mirror of
https://github.com/dawidd6/action-ansible-playbook.git
synced 2024-11-21 23:19:24 +00:00
node_modules: upgrade
This commit is contained in:
parent
9ff0bc8d99
commit
00765f79cf
320 changed files with 31840 additions and 1039 deletions
48
node_modules/.package-lock.json
generated
vendored
48
node_modules/.package-lock.json
generated
vendored
|
@ -1,12 +1,12 @@
|
||||||
{
|
{
|
||||||
"name": "action-ansible-playbook",
|
"name": "action-ansible-playbook",
|
||||||
"lockfileVersion": 2,
|
"lockfileVersion": 3,
|
||||||
"requires": true,
|
"requires": true,
|
||||||
"packages": {
|
"packages": {
|
||||||
"node_modules/@actions/core": {
|
"node_modules/@actions/core": {
|
||||||
"version": "1.10.0",
|
"version": "1.10.1",
|
||||||
"resolved": "https://registry.npmjs.org/@actions/core/-/core-1.10.0.tgz",
|
"resolved": "https://registry.npmjs.org/@actions/core/-/core-1.10.1.tgz",
|
||||||
"integrity": "sha512-2aZDDa3zrrZbP5ZYg159sNoLRb61nQ7awl5pSvIq5Qpj81vwDzdMRKzkWJGJuwVvWpvZKx7vspJALyvaaIQyug==",
|
"integrity": "sha512-3lBR9EDAY+iYIpTnTIXmWcNbX3T2kCkAEQGIQx4NVQ0575nk2k3GRZDTPQG+vVtS2izSLmINlxXf0uLtnrTP+g==",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@actions/http-client": "^2.0.1",
|
"@actions/http-client": "^2.0.1",
|
||||||
"uuid": "^8.3.2"
|
"uuid": "^8.3.2"
|
||||||
|
@ -21,17 +21,26 @@
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"node_modules/@actions/http-client": {
|
"node_modules/@actions/http-client": {
|
||||||
"version": "2.0.1",
|
"version": "2.2.0",
|
||||||
"resolved": "https://registry.npmjs.org/@actions/http-client/-/http-client-2.0.1.tgz",
|
"resolved": "https://registry.npmjs.org/@actions/http-client/-/http-client-2.2.0.tgz",
|
||||||
"integrity": "sha512-PIXiMVtz6VvyaRsGY268qvj57hXQEpsYogYOu2nrQhlf+XCGmZstmuZBbAybUl1nQGnvS1k1eEsQ69ZoD7xlSw==",
|
"integrity": "sha512-q+epW0trjVUUHboliPb4UF9g2msf+w61b32tAkFEwL/IwP0DQWgbCMM0Hbe3e3WXSKz5VcUXbzJQgy8Hkra/Lg==",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"tunnel": "^0.0.6"
|
"tunnel": "^0.0.6",
|
||||||
|
"undici": "^5.25.4"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"node_modules/@actions/io": {
|
"node_modules/@actions/io": {
|
||||||
"version": "1.1.2",
|
"version": "1.1.3",
|
||||||
"resolved": "https://registry.npmjs.org/@actions/io/-/io-1.1.2.tgz",
|
"resolved": "https://registry.npmjs.org/@actions/io/-/io-1.1.3.tgz",
|
||||||
"integrity": "sha512-d+RwPlMp+2qmBfeLYPLXuSRykDIFEwdTA0MMxzS9kh4kvP1ftrc/9fzy6pX6qAjthdXruHQ6/6kjT/DNo5ALuw=="
|
"integrity": "sha512-wi9JjgKLYS7U/z8PPbco+PvTb/nRWjeoFlJ1Qer83k/3C5PHQi28hiVdeE2kHXmIL99mQFawx8qt/JPjZilJ8Q=="
|
||||||
|
},
|
||||||
|
"node_modules/@fastify/busboy": {
|
||||||
|
"version": "2.1.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/@fastify/busboy/-/busboy-2.1.0.tgz",
|
||||||
|
"integrity": "sha512-+KpH+QxZU7O4675t3mnkQKcZZg56u+K/Ct2K+N2AZYNVK8kyeo/bI18tI8aPm3tvNNRyTWfj6s5tnGNlcbQRsA==",
|
||||||
|
"engines": {
|
||||||
|
"node": ">=14"
|
||||||
|
}
|
||||||
},
|
},
|
||||||
"node_modules/tunnel": {
|
"node_modules/tunnel": {
|
||||||
"version": "0.0.6",
|
"version": "0.0.6",
|
||||||
|
@ -41,6 +50,17 @@
|
||||||
"node": ">=0.6.11 <=0.7.0 || >=0.7.3"
|
"node": ">=0.6.11 <=0.7.0 || >=0.7.3"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"node_modules/undici": {
|
||||||
|
"version": "5.28.2",
|
||||||
|
"resolved": "https://registry.npmjs.org/undici/-/undici-5.28.2.tgz",
|
||||||
|
"integrity": "sha512-wh1pHJHnUeQV5Xa8/kyQhO7WFa8M34l026L5P/+2TYiakvGy5Rdc8jWZVyG7ieht/0WgJLEd3kcU5gKx+6GC8w==",
|
||||||
|
"dependencies": {
|
||||||
|
"@fastify/busboy": "^2.0.0"
|
||||||
|
},
|
||||||
|
"engines": {
|
||||||
|
"node": ">=14.0"
|
||||||
|
}
|
||||||
|
},
|
||||||
"node_modules/uuid": {
|
"node_modules/uuid": {
|
||||||
"version": "8.3.2",
|
"version": "8.3.2",
|
||||||
"resolved": "https://registry.npmjs.org/uuid/-/uuid-8.3.2.tgz",
|
"resolved": "https://registry.npmjs.org/uuid/-/uuid-8.3.2.tgz",
|
||||||
|
@ -50,9 +70,9 @@
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"node_modules/yaml": {
|
"node_modules/yaml": {
|
||||||
"version": "2.1.3",
|
"version": "2.3.4",
|
||||||
"resolved": "https://registry.npmjs.org/yaml/-/yaml-2.1.3.tgz",
|
"resolved": "https://registry.npmjs.org/yaml/-/yaml-2.3.4.tgz",
|
||||||
"integrity": "sha512-AacA8nRULjKMX2DvWvOAdBZMOfQlypSFkjcOcu9FalllIDJ1kvlREzcdIZmidQUqqeMv7jorHjq2HlLv/+c2lg==",
|
"integrity": "sha512-8aAvwVUSHpfEqTQ4w/KMlf3HcRdt50E5ODIQJBw1fQ5RL34xabzxtUlzTXVqc4rkZsPbvrXKWnABCD7kWSmocA==",
|
||||||
"engines": {
|
"engines": {
|
||||||
"node": ">= 14"
|
"node": ">= 14"
|
||||||
}
|
}
|
||||||
|
|
4
node_modules/@actions/core/README.md
generated
vendored
4
node_modules/@actions/core/README.md
generated
vendored
|
@ -121,7 +121,7 @@ const result = await core.group('Do something async', async () => {
|
||||||
|
|
||||||
This library has 3 methods that will produce [annotations](https://docs.github.com/en/rest/reference/checks#create-a-check-run).
|
This library has 3 methods that will produce [annotations](https://docs.github.com/en/rest/reference/checks#create-a-check-run).
|
||||||
```js
|
```js
|
||||||
core.error('This is a bad error. This will also fail the build.')
|
core.error('This is a bad error, action may still succeed though.')
|
||||||
|
|
||||||
core.warning('Something went wrong, but it\'s not bad enough to fail the build.')
|
core.warning('Something went wrong, but it\'s not bad enough to fail the build.')
|
||||||
|
|
||||||
|
@ -163,7 +163,7 @@ export interface AnnotationProperties {
|
||||||
startColumn?: number
|
startColumn?: number
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* The start column for the annotation. Cannot be sent when `startLine` and `endLine` are different values.
|
* The end column for the annotation. Cannot be sent when `startLine` and `endLine` are different values.
|
||||||
* Defaults to `startColumn` when `startColumn` is provided.
|
* Defaults to `startColumn` when `startColumn` is provided.
|
||||||
*/
|
*/
|
||||||
endColumn?: number
|
endColumn?: number
|
||||||
|
|
4
node_modules/@actions/core/lib/core.d.ts
generated
vendored
4
node_modules/@actions/core/lib/core.d.ts
generated
vendored
|
@ -21,7 +21,7 @@ export declare enum ExitCode {
|
||||||
Failure = 1
|
Failure = 1
|
||||||
}
|
}
|
||||||
/**
|
/**
|
||||||
* Optional properties that can be sent with annotatation commands (notice, error, and warning)
|
* Optional properties that can be sent with annotation commands (notice, error, and warning)
|
||||||
* See: https://docs.github.com/en/rest/reference/checks#create-a-check-run for more information about annotations.
|
* See: https://docs.github.com/en/rest/reference/checks#create-a-check-run for more information about annotations.
|
||||||
*/
|
*/
|
||||||
export interface AnnotationProperties {
|
export interface AnnotationProperties {
|
||||||
|
@ -46,7 +46,7 @@ export interface AnnotationProperties {
|
||||||
*/
|
*/
|
||||||
startColumn?: number;
|
startColumn?: number;
|
||||||
/**
|
/**
|
||||||
* The start column for the annotation. Cannot be sent when `startLine` and `endLine` are different values.
|
* The end column for the annotation. Cannot be sent when `startLine` and `endLine` are different values.
|
||||||
* Defaults to `startColumn` when `startColumn` is provided.
|
* Defaults to `startColumn` when `startColumn` is provided.
|
||||||
*/
|
*/
|
||||||
endColumn?: number;
|
endColumn?: number;
|
||||||
|
|
2
node_modules/@actions/core/lib/oidc-utils.js
generated
vendored
2
node_modules/@actions/core/lib/oidc-utils.js
generated
vendored
|
@ -44,7 +44,7 @@ class OidcClient {
|
||||||
.catch(error => {
|
.catch(error => {
|
||||||
throw new Error(`Failed to get ID Token. \n
|
throw new Error(`Failed to get ID Token. \n
|
||||||
Error Code : ${error.statusCode}\n
|
Error Code : ${error.statusCode}\n
|
||||||
Error Message: ${error.result.message}`);
|
Error Message: ${error.message}`);
|
||||||
});
|
});
|
||||||
const id_token = (_a = res.result) === null || _a === void 0 ? void 0 : _a.value;
|
const id_token = (_a = res.result) === null || _a === void 0 ? void 0 : _a.value;
|
||||||
if (!id_token) {
|
if (!id_token) {
|
||||||
|
|
2
node_modules/@actions/core/lib/oidc-utils.js.map
generated
vendored
2
node_modules/@actions/core/lib/oidc-utils.js.map
generated
vendored
|
@ -1 +1 @@
|
||||||
{"version":3,"file":"oidc-utils.js","sourceRoot":"","sources":["../src/oidc-utils.ts"],"names":[],"mappings":";;;;;;;;;;;;AAGA,sDAA+C;AAC/C,wDAAqE;AACrE,iCAAuC;AAKvC,MAAa,UAAU;IACb,MAAM,CAAC,gBAAgB,CAC7B,UAAU,GAAG,IAAI,EACjB,QAAQ,GAAG,EAAE;QAEb,MAAM,cAAc,GAAmB;YACrC,YAAY,EAAE,UAAU;YACxB,UAAU,EAAE,QAAQ;SACrB,CAAA;QAED,OAAO,IAAI,wBAAU,CACnB,qBAAqB,EACrB,CAAC,IAAI,8BAAuB,CAAC,UAAU,CAAC,eAAe,EAAE,CAAC,CAAC,EAC3D,cAAc,CACf,CAAA;IACH,CAAC;IAEO,MAAM,CAAC,eAAe;QAC5B,MAAM,KAAK,GAAG,OAAO,CAAC,GAAG,CAAC,gCAAgC,CAAC,CAAA;QAC3D,IAAI,CAAC,KAAK,EAAE;YACV,MAAM,IAAI,KAAK,CACb,2DAA2D,CAC5D,CAAA;SACF;QACD,OAAO,KAAK,CAAA;IACd,CAAC;IAEO,MAAM,CAAC,aAAa;QAC1B,MAAM,UAAU,GAAG,OAAO,CAAC,GAAG,CAAC,8BAA8B,CAAC,CAAA;QAC9D,IAAI,CAAC,UAAU,EAAE;YACf,MAAM,IAAI,KAAK,CAAC,yDAAyD,CAAC,CAAA;SAC3E;QACD,OAAO,UAAU,CAAA;IACnB,CAAC;IAEO,MAAM,CAAO,OAAO,CAAC,YAAoB;;;YAC/C,MAAM,UAAU,GAAG,UAAU,CAAC,gBAAgB,EAAE,CAAA;YAEhD,MAAM,GAAG,GAAG,MAAM,UAAU;iBACzB,OAAO,CAAgB,YAAY,CAAC;iBACpC,KAAK,CAAC,KAAK,CAAC,EAAE;gBACb,MAAM,IAAI,KAAK,CACb;uBACa,KAAK,CAAC,UAAU;yBACd,KAAK,CAAC,MAAM,CAAC,OAAO,EAAE,CACtC,CAAA;YACH,CAAC,CAAC,CAAA;YAEJ,MAAM,QAAQ,SAAG,GAAG,CAAC,MAAM,0CAAE,KAAK,CAAA;YAClC,IAAI,CAAC,QAAQ,EAAE;gBACb,MAAM,IAAI,KAAK,CAAC,+CAA+C,CAAC,CAAA;aACjE;YACD,OAAO,QAAQ,CAAA;;KAChB;IAED,MAAM,CAAO,UAAU,CAAC,QAAiB;;YACvC,IAAI;gBACF,gDAAgD;gBAChD,IAAI,YAAY,GAAW,UAAU,CAAC,aAAa,EAAE,CAAA;gBACrD,IAAI,QAAQ,EAAE;oBACZ,MAAM,eAAe,GAAG,kBAAkB,CAAC,QAAQ,CAAC,CAAA;oBACpD,YAAY,GAAG,GAAG,YAAY,aAAa,eAAe,EAAE,CAAA;iBAC7D;gBAED,YAAK,CAAC,mBAAmB,YAAY,EAAE,CAAC,CAAA;gBAExC,MAAM,QAAQ,GAAG,MAAM,UAAU,CAAC,OAAO,CAAC,YAAY,CAAC,CAAA;gBACvD,gBAAS,CAAC,QAAQ,CAAC,CAAA;gBACnB,OAAO,QAAQ,CAAA;aAChB;YAAC,OAAO,KAAK,EAAE;gBACd,MAAM,IAAI,KAAK,CAAC,kBAAkB,KAAK,CAAC,OAAO,EAAE,CAAC,CAAA;aACnD;QACH,CAAC;KAAA;CACF;AAzED,gCAyEC"}
|
{"version":3,"file":"oidc-utils.js","sourceRoot":"","sources":["../src/oidc-utils.ts"],"names":[],"mappings":";;;;;;;;;;;;AAGA,sDAA+C;AAC/C,wDAAqE;AACrE,iCAAuC;AAKvC,MAAa,UAAU;IACb,MAAM,CAAC,gBAAgB,CAC7B,UAAU,GAAG,IAAI,EACjB,QAAQ,GAAG,EAAE;QAEb,MAAM,cAAc,GAAmB;YACrC,YAAY,EAAE,UAAU;YACxB,UAAU,EAAE,QAAQ;SACrB,CAAA;QAED,OAAO,IAAI,wBAAU,CACnB,qBAAqB,EACrB,CAAC,IAAI,8BAAuB,CAAC,UAAU,CAAC,eAAe,EAAE,CAAC,CAAC,EAC3D,cAAc,CACf,CAAA;IACH,CAAC;IAEO,MAAM,CAAC,eAAe;QAC5B,MAAM,KAAK,GAAG,OAAO,CAAC,GAAG,CAAC,gCAAgC,CAAC,CAAA;QAC3D,IAAI,CAAC,KAAK,EAAE;YACV,MAAM,IAAI,KAAK,CACb,2DAA2D,CAC5D,CAAA;SACF;QACD,OAAO,KAAK,CAAA;IACd,CAAC;IAEO,MAAM,CAAC,aAAa;QAC1B,MAAM,UAAU,GAAG,OAAO,CAAC,GAAG,CAAC,8BAA8B,CAAC,CAAA;QAC9D,IAAI,CAAC,UAAU,EAAE;YACf,MAAM,IAAI,KAAK,CAAC,yDAAyD,CAAC,CAAA;SAC3E;QACD,OAAO,UAAU,CAAA;IACnB,CAAC;IAEO,MAAM,CAAO,OAAO,CAAC,YAAoB;;;YAC/C,MAAM,UAAU,GAAG,UAAU,CAAC,gBAAgB,EAAE,CAAA;YAEhD,MAAM,GAAG,GAAG,MAAM,UAAU;iBACzB,OAAO,CAAgB,YAAY,CAAC;iBACpC,KAAK,CAAC,KAAK,CAAC,EAAE;gBACb,MAAM,IAAI,KAAK,CACb;uBACa,KAAK,CAAC,UAAU;yBACd,KAAK,CAAC,OAAO,EAAE,CAC/B,CAAA;YACH,CAAC,CAAC,CAAA;YAEJ,MAAM,QAAQ,SAAG,GAAG,CAAC,MAAM,0CAAE,KAAK,CAAA;YAClC,IAAI,CAAC,QAAQ,EAAE;gBACb,MAAM,IAAI,KAAK,CAAC,+CAA+C,CAAC,CAAA;aACjE;YACD,OAAO,QAAQ,CAAA;;KAChB;IAED,MAAM,CAAO,UAAU,CAAC,QAAiB;;YACvC,IAAI;gBACF,gDAAgD;gBAChD,IAAI,YAAY,GAAW,UAAU,CAAC,aAAa,EAAE,CAAA;gBACrD,IAAI,QAAQ,EAAE;oBACZ,MAAM,eAAe,GAAG,kBAAkB,CAAC,QAAQ,CAAC,CAAA;oBACpD,YAAY,GAAG,GAAG,YAAY,aAAa,eAAe,EAAE,CAAA;iBAC7D;gBAED,YAAK,CAAC,mBAAmB,YAAY,EAAE,CAAC,CAAA;gBAExC,MAAM,QAAQ,GAAG,MAAM,UAAU,CAAC,OAAO,CAAC,YAAY,CAAC,CAAA;gBACvD,gBAAS,CAAC,QAAQ,CAAC,CAAA;gBACnB,OAAO,QAAQ,CAAA;aAChB;YAAC,OAAO,KAAK,EAAE;gBACd,MAAM,IAAI,KAAK,CAAC,kBAAkB,KAAK,CAAC,OAAO,EAAE,CAAC,CAAA;aACnD;QACH,CAAC;KAAA;CACF;AAzED,gCAyEC"}
|
6
node_modules/@actions/core/package.json
generated
vendored
6
node_modules/@actions/core/package.json
generated
vendored
|
@ -1,6 +1,6 @@
|
||||||
{
|
{
|
||||||
"name": "@actions/core",
|
"name": "@actions/core",
|
||||||
"version": "1.10.0",
|
"version": "1.10.1",
|
||||||
"description": "Actions core lib",
|
"description": "Actions core lib",
|
||||||
"keywords": [
|
"keywords": [
|
||||||
"github",
|
"github",
|
||||||
|
@ -30,7 +30,7 @@
|
||||||
"scripts": {
|
"scripts": {
|
||||||
"audit-moderate": "npm install && npm audit --json --audit-level=moderate > audit.json",
|
"audit-moderate": "npm install && npm audit --json --audit-level=moderate > audit.json",
|
||||||
"test": "echo \"Error: run tests from root\" && exit 1",
|
"test": "echo \"Error: run tests from root\" && exit 1",
|
||||||
"tsc": "tsc"
|
"tsc": "tsc -p tsconfig.json"
|
||||||
},
|
},
|
||||||
"bugs": {
|
"bugs": {
|
||||||
"url": "https://github.com/actions/toolkit/issues"
|
"url": "https://github.com/actions/toolkit/issues"
|
||||||
|
@ -43,4 +43,4 @@
|
||||||
"@types/node": "^12.0.2",
|
"@types/node": "^12.0.2",
|
||||||
"@types/uuid": "^8.3.4"
|
"@types/uuid": "^8.3.4"
|
||||||
}
|
}
|
||||||
}
|
}
|
2
node_modules/@actions/http-client/lib/auth.js.map
generated
vendored
2
node_modules/@actions/http-client/lib/auth.js.map
generated
vendored
|
@ -1 +1 @@
|
||||||
{"version":3,"file":"auth.js","sourceRoot":"","sources":["../src/auth.ts"],"names":[],"mappings":";;;;;;;;;;;;AAIA,MAAa,sBAAsB;IAIjC,YAAY,QAAgB,EAAE,QAAgB;QAC5C,IAAI,CAAC,QAAQ,GAAG,QAAQ,CAAA;QACxB,IAAI,CAAC,QAAQ,GAAG,QAAQ,CAAA;IAC1B,CAAC;IAED,cAAc,CAAC,OAA4B;QACzC,IAAI,CAAC,OAAO,CAAC,OAAO,EAAE;YACpB,MAAM,KAAK,CAAC,4BAA4B,CAAC,CAAA;SAC1C;QACD,OAAO,CAAC,OAAO,CAAC,eAAe,CAAC,GAAG,SAAS,MAAM,CAAC,IAAI,CACrD,GAAG,IAAI,CAAC,QAAQ,IAAI,IAAI,CAAC,QAAQ,EAAE,CACpC,CAAC,QAAQ,CAAC,QAAQ,CAAC,EAAE,CAAA;IACxB,CAAC;IAED,iCAAiC;IACjC,uBAAuB;QACrB,OAAO,KAAK,CAAA;IACd,CAAC;IAEK,oBAAoB;;YACxB,MAAM,IAAI,KAAK,CAAC,iBAAiB,CAAC,CAAA;QACpC,CAAC;KAAA;CACF;AA1BD,wDA0BC;AAED,MAAa,uBAAuB;IAGlC,YAAY,KAAa;QACvB,IAAI,CAAC,KAAK,GAAG,KAAK,CAAA;IACpB,CAAC;IAED,yCAAyC;IACzC,sDAAsD;IACtD,cAAc,CAAC,OAA4B;QACzC,IAAI,CAAC,OAAO,CAAC,OAAO,EAAE;YACpB,MAAM,KAAK,CAAC,4BAA4B,CAAC,CAAA;SAC1C;QACD,OAAO,CAAC,OAAO,CAAC,eAAe,CAAC,GAAG,UAAU,IAAI,CAAC,KAAK,EAAE,CAAA;IAC3D,CAAC;IAED,iCAAiC;IACjC,uBAAuB;QACrB,OAAO,KAAK,CAAA;IACd,CAAC;IAEK,oBAAoB;;YACxB,MAAM,IAAI,KAAK,CAAC,iBAAiB,CAAC,CAAA;QACpC,CAAC;KAAA;CACF;AAxBD,0DAwBC;AAED,MAAa,oCAAoC;IAI/C,YAAY,KAAa;QACvB,IAAI,CAAC,KAAK,GAAG,KAAK,CAAA;IACpB,CAAC;IAED,yCAAyC;IACzC,sDAAsD;IACtD,cAAc,CAAC,OAA4B;QACzC,IAAI,CAAC,OAAO,CAAC,OAAO,EAAE;YACpB,MAAM,KAAK,CAAC,4BAA4B,CAAC,CAAA;SAC1C;QACD,OAAO,CAAC,OAAO,CAAC,eAAe,CAAC,GAAG,SAAS,MAAM,CAAC,IAAI,CACrD,OAAO,IAAI,CAAC,KAAK,EAAE,CACpB,CAAC,QAAQ,CAAC,QAAQ,CAAC,EAAE,CAAA;IACxB,CAAC;IAED,iCAAiC;IACjC,uBAAuB;QACrB,OAAO,KAAK,CAAA;IACd,CAAC;IAEK,oBAAoB;;YACxB,MAAM,IAAI,KAAK,CAAC,iBAAiB,CAAC,CAAA;QACpC,CAAC;KAAA;CACF;AA3BD,oFA2BC"}
|
{"version":3,"file":"auth.js","sourceRoot":"","sources":["../src/auth.ts"],"names":[],"mappings":";;;;;;;;;;;;AAIA,MAAa,sBAAsB;IAIjC,YAAY,QAAgB,EAAE,QAAgB;QAC5C,IAAI,CAAC,QAAQ,GAAG,QAAQ,CAAA;QACxB,IAAI,CAAC,QAAQ,GAAG,QAAQ,CAAA;IAC1B,CAAC;IAED,cAAc,CAAC,OAA4B;QACzC,IAAI,CAAC,OAAO,CAAC,OAAO,EAAE;YACpB,MAAM,KAAK,CAAC,4BAA4B,CAAC,CAAA;SAC1C;QACD,OAAO,CAAC,OAAO,CAAC,eAAe,CAAC,GAAG,SAAS,MAAM,CAAC,IAAI,CACrD,GAAG,IAAI,CAAC,QAAQ,IAAI,IAAI,CAAC,QAAQ,EAAE,CACpC,CAAC,QAAQ,CAAC,QAAQ,CAAC,EAAE,CAAA;IACxB,CAAC;IAED,iCAAiC;IACjC,uBAAuB;QACrB,OAAO,KAAK,CAAA;IACd,CAAC;IAEK,oBAAoB;;YACxB,MAAM,IAAI,KAAK,CAAC,iBAAiB,CAAC,CAAA;QACpC,CAAC;KAAA;CACF;AA1BD,wDA0BC;AAED,MAAa,uBAAuB;IAGlC,YAAY,KAAa;QACvB,IAAI,CAAC,KAAK,GAAG,KAAK,CAAA;IACpB,CAAC;IAED,yCAAyC;IACzC,sDAAsD;IACtD,cAAc,CAAC,OAA4B;QACzC,IAAI,CAAC,OAAO,CAAC,OAAO,EAAE;YACpB,MAAM,KAAK,CAAC,4BAA4B,CAAC,CAAA;SAC1C;QACD,OAAO,CAAC,OAAO,CAAC,eAAe,CAAC,GAAG,UAAU,IAAI,CAAC,KAAK,EAAE,CAAA;IAC3D,CAAC;IAED,iCAAiC;IACjC,uBAAuB;QACrB,OAAO,KAAK,CAAA;IACd,CAAC;IAEK,oBAAoB;;YACxB,MAAM,IAAI,KAAK,CAAC,iBAAiB,CAAC,CAAA;QACpC,CAAC;KAAA;CACF;AAxBD,0DAwBC;AAED,MAAa,oCAAoC;IAK/C,YAAY,KAAa;QACvB,IAAI,CAAC,KAAK,GAAG,KAAK,CAAA;IACpB,CAAC;IAED,yCAAyC;IACzC,sDAAsD;IACtD,cAAc,CAAC,OAA4B;QACzC,IAAI,CAAC,OAAO,CAAC,OAAO,EAAE;YACpB,MAAM,KAAK,CAAC,4BAA4B,CAAC,CAAA;SAC1C;QACD,OAAO,CAAC,OAAO,CAAC,eAAe,CAAC,GAAG,SAAS,MAAM,CAAC,IAAI,CACrD,OAAO,IAAI,CAAC,KAAK,EAAE,CACpB,CAAC,QAAQ,CAAC,QAAQ,CAAC,EAAE,CAAA;IACxB,CAAC;IAED,iCAAiC;IACjC,uBAAuB;QACrB,OAAO,KAAK,CAAA;IACd,CAAC;IAEK,oBAAoB;;YACxB,MAAM,IAAI,KAAK,CAAC,iBAAiB,CAAC,CAAA;QACpC,CAAC;KAAA;CACF;AA5BD,oFA4BC"}
|
7
node_modules/@actions/http-client/lib/index.d.ts
generated
vendored
7
node_modules/@actions/http-client/lib/index.d.ts
generated
vendored
|
@ -1,6 +1,9 @@
|
||||||
/// <reference types="node" />
|
/// <reference types="node" />
|
||||||
|
/// <reference types="node" />
|
||||||
|
/// <reference types="node" />
|
||||||
import * as http from 'http';
|
import * as http from 'http';
|
||||||
import * as ifm from './interfaces';
|
import * as ifm from './interfaces';
|
||||||
|
import { ProxyAgent } from 'undici';
|
||||||
export declare enum HttpCodes {
|
export declare enum HttpCodes {
|
||||||
OK = 200,
|
OK = 200,
|
||||||
MultipleChoices = 300,
|
MultipleChoices = 300,
|
||||||
|
@ -51,6 +54,7 @@ export declare class HttpClientResponse {
|
||||||
constructor(message: http.IncomingMessage);
|
constructor(message: http.IncomingMessage);
|
||||||
message: http.IncomingMessage;
|
message: http.IncomingMessage;
|
||||||
readBody(): Promise<string>;
|
readBody(): Promise<string>;
|
||||||
|
readBodyBuffer?(): Promise<Buffer>;
|
||||||
}
|
}
|
||||||
export declare function isHttps(requestUrl: string): boolean;
|
export declare function isHttps(requestUrl: string): boolean;
|
||||||
export declare class HttpClient {
|
export declare class HttpClient {
|
||||||
|
@ -66,6 +70,7 @@ export declare class HttpClient {
|
||||||
private _maxRetries;
|
private _maxRetries;
|
||||||
private _agent;
|
private _agent;
|
||||||
private _proxyAgent;
|
private _proxyAgent;
|
||||||
|
private _proxyAgentDispatcher;
|
||||||
private _keepAlive;
|
private _keepAlive;
|
||||||
private _disposed;
|
private _disposed;
|
||||||
constructor(userAgent?: string, handlers?: ifm.RequestHandler[], requestOptions?: ifm.RequestOptions);
|
constructor(userAgent?: string, handlers?: ifm.RequestHandler[], requestOptions?: ifm.RequestOptions);
|
||||||
|
@ -114,10 +119,12 @@ export declare class HttpClient {
|
||||||
* @param serverUrl The server URL where the request will be sent. For example, https://api.github.com
|
* @param serverUrl The server URL where the request will be sent. For example, https://api.github.com
|
||||||
*/
|
*/
|
||||||
getAgent(serverUrl: string): http.Agent;
|
getAgent(serverUrl: string): http.Agent;
|
||||||
|
getAgentDispatcher(serverUrl: string): ProxyAgent | undefined;
|
||||||
private _prepareRequest;
|
private _prepareRequest;
|
||||||
private _mergeHeaders;
|
private _mergeHeaders;
|
||||||
private _getExistingOrDefaultHeader;
|
private _getExistingOrDefaultHeader;
|
||||||
private _getAgent;
|
private _getAgent;
|
||||||
|
private _getProxyAgentDispatcher;
|
||||||
private _performExponentialBackoff;
|
private _performExponentialBackoff;
|
||||||
private _processResponse;
|
private _processResponse;
|
||||||
}
|
}
|
||||||
|
|
61
node_modules/@actions/http-client/lib/index.js
generated
vendored
61
node_modules/@actions/http-client/lib/index.js
generated
vendored
|
@ -2,7 +2,11 @@
|
||||||
/* eslint-disable @typescript-eslint/no-explicit-any */
|
/* eslint-disable @typescript-eslint/no-explicit-any */
|
||||||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||||
if (k2 === undefined) k2 = k;
|
if (k2 === undefined) k2 = k;
|
||||||
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
|
var desc = Object.getOwnPropertyDescriptor(m, k);
|
||||||
|
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
||||||
|
desc = { enumerable: true, get: function() { return m[k]; } };
|
||||||
|
}
|
||||||
|
Object.defineProperty(o, k2, desc);
|
||||||
}) : (function(o, m, k, k2) {
|
}) : (function(o, m, k, k2) {
|
||||||
if (k2 === undefined) k2 = k;
|
if (k2 === undefined) k2 = k;
|
||||||
o[k2] = m[k];
|
o[k2] = m[k];
|
||||||
|
@ -15,7 +19,7 @@ var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (
|
||||||
var __importStar = (this && this.__importStar) || function (mod) {
|
var __importStar = (this && this.__importStar) || function (mod) {
|
||||||
if (mod && mod.__esModule) return mod;
|
if (mod && mod.__esModule) return mod;
|
||||||
var result = {};
|
var result = {};
|
||||||
if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
||||||
__setModuleDefault(result, mod);
|
__setModuleDefault(result, mod);
|
||||||
return result;
|
return result;
|
||||||
};
|
};
|
||||||
|
@ -34,6 +38,7 @@ const http = __importStar(require("http"));
|
||||||
const https = __importStar(require("https"));
|
const https = __importStar(require("https"));
|
||||||
const pm = __importStar(require("./proxy"));
|
const pm = __importStar(require("./proxy"));
|
||||||
const tunnel = __importStar(require("tunnel"));
|
const tunnel = __importStar(require("tunnel"));
|
||||||
|
const undici_1 = require("undici");
|
||||||
var HttpCodes;
|
var HttpCodes;
|
||||||
(function (HttpCodes) {
|
(function (HttpCodes) {
|
||||||
HttpCodes[HttpCodes["OK"] = 200] = "OK";
|
HttpCodes[HttpCodes["OK"] = 200] = "OK";
|
||||||
|
@ -63,16 +68,16 @@ var HttpCodes;
|
||||||
HttpCodes[HttpCodes["BadGateway"] = 502] = "BadGateway";
|
HttpCodes[HttpCodes["BadGateway"] = 502] = "BadGateway";
|
||||||
HttpCodes[HttpCodes["ServiceUnavailable"] = 503] = "ServiceUnavailable";
|
HttpCodes[HttpCodes["ServiceUnavailable"] = 503] = "ServiceUnavailable";
|
||||||
HttpCodes[HttpCodes["GatewayTimeout"] = 504] = "GatewayTimeout";
|
HttpCodes[HttpCodes["GatewayTimeout"] = 504] = "GatewayTimeout";
|
||||||
})(HttpCodes = exports.HttpCodes || (exports.HttpCodes = {}));
|
})(HttpCodes || (exports.HttpCodes = HttpCodes = {}));
|
||||||
var Headers;
|
var Headers;
|
||||||
(function (Headers) {
|
(function (Headers) {
|
||||||
Headers["Accept"] = "accept";
|
Headers["Accept"] = "accept";
|
||||||
Headers["ContentType"] = "content-type";
|
Headers["ContentType"] = "content-type";
|
||||||
})(Headers = exports.Headers || (exports.Headers = {}));
|
})(Headers || (exports.Headers = Headers = {}));
|
||||||
var MediaTypes;
|
var MediaTypes;
|
||||||
(function (MediaTypes) {
|
(function (MediaTypes) {
|
||||||
MediaTypes["ApplicationJson"] = "application/json";
|
MediaTypes["ApplicationJson"] = "application/json";
|
||||||
})(MediaTypes = exports.MediaTypes || (exports.MediaTypes = {}));
|
})(MediaTypes || (exports.MediaTypes = MediaTypes = {}));
|
||||||
/**
|
/**
|
||||||
* Returns the proxy URL, depending upon the supplied url and proxy environment variables.
|
* Returns the proxy URL, depending upon the supplied url and proxy environment variables.
|
||||||
* @param serverUrl The server URL where the request will be sent. For example, https://api.github.com
|
* @param serverUrl The server URL where the request will be sent. For example, https://api.github.com
|
||||||
|
@ -123,6 +128,19 @@ class HttpClientResponse {
|
||||||
}));
|
}));
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
readBodyBuffer() {
|
||||||
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
|
return new Promise((resolve) => __awaiter(this, void 0, void 0, function* () {
|
||||||
|
const chunks = [];
|
||||||
|
this.message.on('data', (chunk) => {
|
||||||
|
chunks.push(chunk);
|
||||||
|
});
|
||||||
|
this.message.on('end', () => {
|
||||||
|
resolve(Buffer.concat(chunks));
|
||||||
|
});
|
||||||
|
}));
|
||||||
|
});
|
||||||
|
}
|
||||||
}
|
}
|
||||||
exports.HttpClientResponse = HttpClientResponse;
|
exports.HttpClientResponse = HttpClientResponse;
|
||||||
function isHttps(requestUrl) {
|
function isHttps(requestUrl) {
|
||||||
|
@ -428,6 +446,15 @@ class HttpClient {
|
||||||
const parsedUrl = new URL(serverUrl);
|
const parsedUrl = new URL(serverUrl);
|
||||||
return this._getAgent(parsedUrl);
|
return this._getAgent(parsedUrl);
|
||||||
}
|
}
|
||||||
|
getAgentDispatcher(serverUrl) {
|
||||||
|
const parsedUrl = new URL(serverUrl);
|
||||||
|
const proxyUrl = pm.getProxyUrl(parsedUrl);
|
||||||
|
const useProxy = proxyUrl && proxyUrl.hostname;
|
||||||
|
if (!useProxy) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
return this._getProxyAgentDispatcher(parsedUrl, proxyUrl);
|
||||||
|
}
|
||||||
_prepareRequest(method, requestUrl, headers) {
|
_prepareRequest(method, requestUrl, headers) {
|
||||||
const info = {};
|
const info = {};
|
||||||
info.parsedUrl = requestUrl;
|
info.parsedUrl = requestUrl;
|
||||||
|
@ -527,6 +554,30 @@ class HttpClient {
|
||||||
}
|
}
|
||||||
return agent;
|
return agent;
|
||||||
}
|
}
|
||||||
|
_getProxyAgentDispatcher(parsedUrl, proxyUrl) {
|
||||||
|
let proxyAgent;
|
||||||
|
if (this._keepAlive) {
|
||||||
|
proxyAgent = this._proxyAgentDispatcher;
|
||||||
|
}
|
||||||
|
// if agent is already assigned use that agent.
|
||||||
|
if (proxyAgent) {
|
||||||
|
return proxyAgent;
|
||||||
|
}
|
||||||
|
const usingSsl = parsedUrl.protocol === 'https:';
|
||||||
|
proxyAgent = new undici_1.ProxyAgent(Object.assign({ uri: proxyUrl.href, pipelining: !this._keepAlive ? 0 : 1 }, ((proxyUrl.username || proxyUrl.password) && {
|
||||||
|
token: `${proxyUrl.username}:${proxyUrl.password}`
|
||||||
|
})));
|
||||||
|
this._proxyAgentDispatcher = proxyAgent;
|
||||||
|
if (usingSsl && this._ignoreSslError) {
|
||||||
|
// we don't want to set NODE_TLS_REJECT_UNAUTHORIZED=0 since that will affect request for entire process
|
||||||
|
// http.RequestOptions doesn't expose a way to modify RequestOptions.agent.options
|
||||||
|
// we have to cast it to any and change it directly
|
||||||
|
proxyAgent.options = Object.assign(proxyAgent.options.requestTls || {}, {
|
||||||
|
rejectUnauthorized: false
|
||||||
|
});
|
||||||
|
}
|
||||||
|
return proxyAgent;
|
||||||
|
}
|
||||||
_performExponentialBackoff(retryNumber) {
|
_performExponentialBackoff(retryNumber) {
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
retryNumber = Math.min(ExponentialBackoffCeiling, retryNumber);
|
retryNumber = Math.min(ExponentialBackoffCeiling, retryNumber);
|
||||||
|
|
2
node_modules/@actions/http-client/lib/index.js.map
generated
vendored
2
node_modules/@actions/http-client/lib/index.js.map
generated
vendored
File diff suppressed because one or more lines are too long
2
node_modules/@actions/http-client/lib/interfaces.d.ts
generated
vendored
2
node_modules/@actions/http-client/lib/interfaces.d.ts
generated
vendored
|
@ -1,4 +1,6 @@
|
||||||
/// <reference types="node" />
|
/// <reference types="node" />
|
||||||
|
/// <reference types="node" />
|
||||||
|
/// <reference types="node" />
|
||||||
import * as http from 'http';
|
import * as http from 'http';
|
||||||
import * as https from 'https';
|
import * as https from 'https';
|
||||||
import { HttpClientResponse } from './index';
|
import { HttpClientResponse } from './index';
|
||||||
|
|
25
node_modules/@actions/http-client/lib/proxy.js
generated
vendored
25
node_modules/@actions/http-client/lib/proxy.js
generated
vendored
|
@ -15,7 +15,13 @@ function getProxyUrl(reqUrl) {
|
||||||
}
|
}
|
||||||
})();
|
})();
|
||||||
if (proxyVar) {
|
if (proxyVar) {
|
||||||
return new URL(proxyVar);
|
try {
|
||||||
|
return new URL(proxyVar);
|
||||||
|
}
|
||||||
|
catch (_a) {
|
||||||
|
if (!proxyVar.startsWith('http://') && !proxyVar.startsWith('https://'))
|
||||||
|
return new URL(`http://${proxyVar}`);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
return undefined;
|
return undefined;
|
||||||
|
@ -26,6 +32,10 @@ function checkBypass(reqUrl) {
|
||||||
if (!reqUrl.hostname) {
|
if (!reqUrl.hostname) {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
const reqHost = reqUrl.hostname;
|
||||||
|
if (isLoopbackAddress(reqHost)) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
const noProxy = process.env['no_proxy'] || process.env['NO_PROXY'] || '';
|
const noProxy = process.env['no_proxy'] || process.env['NO_PROXY'] || '';
|
||||||
if (!noProxy) {
|
if (!noProxy) {
|
||||||
return false;
|
return false;
|
||||||
|
@ -51,11 +61,22 @@ function checkBypass(reqUrl) {
|
||||||
.split(',')
|
.split(',')
|
||||||
.map(x => x.trim().toUpperCase())
|
.map(x => x.trim().toUpperCase())
|
||||||
.filter(x => x)) {
|
.filter(x => x)) {
|
||||||
if (upperReqHosts.some(x => x === upperNoProxyItem)) {
|
if (upperNoProxyItem === '*' ||
|
||||||
|
upperReqHosts.some(x => x === upperNoProxyItem ||
|
||||||
|
x.endsWith(`.${upperNoProxyItem}`) ||
|
||||||
|
(upperNoProxyItem.startsWith('.') &&
|
||||||
|
x.endsWith(`${upperNoProxyItem}`)))) {
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
exports.checkBypass = checkBypass;
|
exports.checkBypass = checkBypass;
|
||||||
|
function isLoopbackAddress(host) {
|
||||||
|
const hostLower = host.toLowerCase();
|
||||||
|
return (hostLower === 'localhost' ||
|
||||||
|
hostLower.startsWith('127.') ||
|
||||||
|
hostLower.startsWith('[::1]') ||
|
||||||
|
hostLower.startsWith('[0:0:0:0:0:0:0:1]'));
|
||||||
|
}
|
||||||
//# sourceMappingURL=proxy.js.map
|
//# sourceMappingURL=proxy.js.map
|
2
node_modules/@actions/http-client/lib/proxy.js.map
generated
vendored
2
node_modules/@actions/http-client/lib/proxy.js.map
generated
vendored
|
@ -1 +1 @@
|
||||||
{"version":3,"file":"proxy.js","sourceRoot":"","sources":["../src/proxy.ts"],"names":[],"mappings":";;;AAAA,SAAgB,WAAW,CAAC,MAAW;IACrC,MAAM,QAAQ,GAAG,MAAM,CAAC,QAAQ,KAAK,QAAQ,CAAA;IAE7C,IAAI,WAAW,CAAC,MAAM,CAAC,EAAE;QACvB,OAAO,SAAS,CAAA;KACjB;IAED,MAAM,QAAQ,GAAG,CAAC,GAAG,EAAE;QACrB,IAAI,QAAQ,EAAE;YACZ,OAAO,OAAO,CAAC,GAAG,CAAC,aAAa,CAAC,IAAI,OAAO,CAAC,GAAG,CAAC,aAAa,CAAC,CAAA;SAChE;aAAM;YACL,OAAO,OAAO,CAAC,GAAG,CAAC,YAAY,CAAC,IAAI,OAAO,CAAC,GAAG,CAAC,YAAY,CAAC,CAAA;SAC9D;IACH,CAAC,CAAC,EAAE,CAAA;IAEJ,IAAI,QAAQ,EAAE;QACZ,OAAO,IAAI,GAAG,CAAC,QAAQ,CAAC,CAAA;KACzB;SAAM;QACL,OAAO,SAAS,CAAA;KACjB;AACH,CAAC;AApBD,kCAoBC;AAED,SAAgB,WAAW,CAAC,MAAW;IACrC,IAAI,CAAC,MAAM,CAAC,QAAQ,EAAE;QACpB,OAAO,KAAK,CAAA;KACb;IAED,MAAM,OAAO,GAAG,OAAO,CAAC,GAAG,CAAC,UAAU,CAAC,IAAI,OAAO,CAAC,GAAG,CAAC,UAAU,CAAC,IAAI,EAAE,CAAA;IACxE,IAAI,CAAC,OAAO,EAAE;QACZ,OAAO,KAAK,CAAA;KACb;IAED,6BAA6B;IAC7B,IAAI,OAA2B,CAAA;IAC/B,IAAI,MAAM,CAAC,IAAI,EAAE;QACf,OAAO,GAAG,MAAM,CAAC,MAAM,CAAC,IAAI,CAAC,CAAA;KAC9B;SAAM,IAAI,MAAM,CAAC,QAAQ,KAAK,OAAO,EAAE;QACtC,OAAO,GAAG,EAAE,CAAA;KACb;SAAM,IAAI,MAAM,CAAC,QAAQ,KAAK,QAAQ,EAAE;QACvC,OAAO,GAAG,GAAG,CAAA;KACd;IAED,qDAAqD;IACrD,MAAM,aAAa,GAAG,CAAC,MAAM,CAAC,QAAQ,CAAC,WAAW,EAAE,CAAC,CAAA;IACrD,IAAI,OAAO,OAAO,KAAK,QAAQ,EAAE;QAC/B,aAAa,CAAC,IAAI,CAAC,GAAG,aAAa,CAAC,CAAC,CAAC,IAAI,OAAO,EAAE,CAAC,CAAA;KACrD;IAED,uCAAuC;IACvC,KAAK,MAAM,gBAAgB,IAAI,OAAO;SACnC,KAAK,CAAC,GAAG,CAAC;SACV,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,IAAI,EAAE,CAAC,WAAW,EAAE,CAAC;SAChC,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,EAAE;QACjB,IAAI,aAAa,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,KAAK,gBAAgB,CAAC,EAAE;YACnD,OAAO,IAAI,CAAA;SACZ;KACF;IAED,OAAO,KAAK,CAAA;AACd,CAAC;AArCD,kCAqCC"}
|
{"version":3,"file":"proxy.js","sourceRoot":"","sources":["../src/proxy.ts"],"names":[],"mappings":";;;AAAA,SAAgB,WAAW,CAAC,MAAW;IACrC,MAAM,QAAQ,GAAG,MAAM,CAAC,QAAQ,KAAK,QAAQ,CAAA;IAE7C,IAAI,WAAW,CAAC,MAAM,CAAC,EAAE;QACvB,OAAO,SAAS,CAAA;KACjB;IAED,MAAM,QAAQ,GAAG,CAAC,GAAG,EAAE;QACrB,IAAI,QAAQ,EAAE;YACZ,OAAO,OAAO,CAAC,GAAG,CAAC,aAAa,CAAC,IAAI,OAAO,CAAC,GAAG,CAAC,aAAa,CAAC,CAAA;SAChE;aAAM;YACL,OAAO,OAAO,CAAC,GAAG,CAAC,YAAY,CAAC,IAAI,OAAO,CAAC,GAAG,CAAC,YAAY,CAAC,CAAA;SAC9D;IACH,CAAC,CAAC,EAAE,CAAA;IAEJ,IAAI,QAAQ,EAAE;QACZ,IAAI;YACF,OAAO,IAAI,GAAG,CAAC,QAAQ,CAAC,CAAA;SACzB;QAAC,WAAM;YACN,IAAI,CAAC,QAAQ,CAAC,UAAU,CAAC,SAAS,CAAC,IAAI,CAAC,QAAQ,CAAC,UAAU,CAAC,UAAU,CAAC;gBACrE,OAAO,IAAI,GAAG,CAAC,UAAU,QAAQ,EAAE,CAAC,CAAA;SACvC;KACF;SAAM;QACL,OAAO,SAAS,CAAA;KACjB;AACH,CAAC;AAzBD,kCAyBC;AAED,SAAgB,WAAW,CAAC,MAAW;IACrC,IAAI,CAAC,MAAM,CAAC,QAAQ,EAAE;QACpB,OAAO,KAAK,CAAA;KACb;IAED,MAAM,OAAO,GAAG,MAAM,CAAC,QAAQ,CAAA;IAC/B,IAAI,iBAAiB,CAAC,OAAO,CAAC,EAAE;QAC9B,OAAO,IAAI,CAAA;KACZ;IAED,MAAM,OAAO,GAAG,OAAO,CAAC,GAAG,CAAC,UAAU,CAAC,IAAI,OAAO,CAAC,GAAG,CAAC,UAAU,CAAC,IAAI,EAAE,CAAA;IACxE,IAAI,CAAC,OAAO,EAAE;QACZ,OAAO,KAAK,CAAA;KACb;IAED,6BAA6B;IAC7B,IAAI,OAA2B,CAAA;IAC/B,IAAI,MAAM,CAAC,IAAI,EAAE;QACf,OAAO,GAAG,MAAM,CAAC,MAAM,CAAC,IAAI,CAAC,CAAA;KAC9B;SAAM,IAAI,MAAM,CAAC,QAAQ,KAAK,OAAO,EAAE;QACtC,OAAO,GAAG,EAAE,CAAA;KACb;SAAM,IAAI,MAAM,CAAC,QAAQ,KAAK,QAAQ,EAAE;QACvC,OAAO,GAAG,GAAG,CAAA;KACd;IAED,qDAAqD;IACrD,MAAM,aAAa,GAAG,CAAC,MAAM,CAAC,QAAQ,CAAC,WAAW,EAAE,CAAC,CAAA;IACrD,IAAI,OAAO,OAAO,KAAK,QAAQ,EAAE;QAC/B,aAAa,CAAC,IAAI,CAAC,GAAG,aAAa,CAAC,CAAC,CAAC,IAAI,OAAO,EAAE,CAAC,CAAA;KACrD;IAED,uCAAuC;IACvC,KAAK,MAAM,gBAAgB,IAAI,OAAO;SACnC,KAAK,CAAC,GAAG,CAAC;SACV,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,IAAI,EAAE,CAAC,WAAW,EAAE,CAAC;SAChC,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,EAAE;QACjB,IACE,gBAAgB,KAAK,GAAG;YACxB,aAAa,CAAC,IAAI,CAChB,CAAC,CAAC,EAAE,CACF,CAAC,KAAK,gBAAgB;gBACtB,CAAC,CAAC,QAAQ,CAAC,IAAI,gBAAgB,EAAE,CAAC;gBAClC,CAAC,gBAAgB,CAAC,UAAU,CAAC,GAAG,CAAC;oBAC/B,CAAC,CAAC,QAAQ,CAAC,GAAG,gBAAgB,EAAE,CAAC,CAAC,CACvC,EACD;YACA,OAAO,IAAI,CAAA;SACZ;KACF;IAED,OAAO,KAAK,CAAA;AACd,CAAC;AAnDD,kCAmDC;AAED,SAAS,iBAAiB,CAAC,IAAY;IACrC,MAAM,SAAS,GAAG,IAAI,CAAC,WAAW,EAAE,CAAA;IACpC,OAAO,CACL,SAAS,KAAK,WAAW;QACzB,SAAS,CAAC,UAAU,CAAC,MAAM,CAAC;QAC5B,SAAS,CAAC,UAAU,CAAC,OAAO,CAAC;QAC7B,SAAS,CAAC,UAAU,CAAC,mBAAmB,CAAC,CAC1C,CAAA;AACH,CAAC"}
|
9
node_modules/@actions/http-client/package.json
generated
vendored
9
node_modules/@actions/http-client/package.json
generated
vendored
|
@ -1,6 +1,6 @@
|
||||||
{
|
{
|
||||||
"name": "@actions/http-client",
|
"name": "@actions/http-client",
|
||||||
"version": "2.0.1",
|
"version": "2.2.0",
|
||||||
"description": "Actions Http Client",
|
"description": "Actions Http Client",
|
||||||
"keywords": [
|
"keywords": [
|
||||||
"github",
|
"github",
|
||||||
|
@ -39,10 +39,13 @@
|
||||||
"url": "https://github.com/actions/toolkit/issues"
|
"url": "https://github.com/actions/toolkit/issues"
|
||||||
},
|
},
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
|
"@types/node": "20.7.1",
|
||||||
"@types/tunnel": "0.0.3",
|
"@types/tunnel": "0.0.3",
|
||||||
"proxy": "^1.0.1"
|
"proxy": "^2.1.1",
|
||||||
|
"@types/proxy": "^1.0.1"
|
||||||
},
|
},
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"tunnel": "^0.0.6"
|
"tunnel": "^0.0.6",
|
||||||
|
"undici": "^5.25.4"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
4
node_modules/@actions/io/lib/io-util.d.ts
generated
vendored
4
node_modules/@actions/io/lib/io-util.d.ts
generated
vendored
|
@ -1,7 +1,9 @@
|
||||||
/// <reference types="node" />
|
/// <reference types="node" />
|
||||||
import * as fs from 'fs';
|
import * as fs from 'fs';
|
||||||
export declare const chmod: typeof fs.promises.chmod, copyFile: typeof fs.promises.copyFile, lstat: typeof fs.promises.lstat, mkdir: typeof fs.promises.mkdir, readdir: typeof fs.promises.readdir, readlink: typeof fs.promises.readlink, rename: typeof fs.promises.rename, rmdir: typeof fs.promises.rmdir, stat: typeof fs.promises.stat, symlink: typeof fs.promises.symlink, unlink: typeof fs.promises.unlink;
|
export declare const chmod: typeof fs.promises.chmod, copyFile: typeof fs.promises.copyFile, lstat: typeof fs.promises.lstat, mkdir: typeof fs.promises.mkdir, open: typeof fs.promises.open, readdir: typeof fs.promises.readdir, readlink: typeof fs.promises.readlink, rename: typeof fs.promises.rename, rm: typeof fs.promises.rm, rmdir: typeof fs.promises.rmdir, stat: typeof fs.promises.stat, symlink: typeof fs.promises.symlink, unlink: typeof fs.promises.unlink;
|
||||||
export declare const IS_WINDOWS: boolean;
|
export declare const IS_WINDOWS: boolean;
|
||||||
|
export declare const UV_FS_O_EXLOCK = 268435456;
|
||||||
|
export declare const READONLY: number;
|
||||||
export declare function exists(fsPath: string): Promise<boolean>;
|
export declare function exists(fsPath: string): Promise<boolean>;
|
||||||
export declare function isDirectory(fsPath: string, useStat?: boolean): Promise<boolean>;
|
export declare function isDirectory(fsPath: string, useStat?: boolean): Promise<boolean>;
|
||||||
/**
|
/**
|
||||||
|
|
10
node_modules/@actions/io/lib/io-util.js
generated
vendored
10
node_modules/@actions/io/lib/io-util.js
generated
vendored
|
@ -29,11 +29,17 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge
|
||||||
};
|
};
|
||||||
var _a;
|
var _a;
|
||||||
Object.defineProperty(exports, "__esModule", { value: true });
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
exports.getCmdPath = exports.tryGetExecutablePath = exports.isRooted = exports.isDirectory = exports.exists = exports.IS_WINDOWS = exports.unlink = exports.symlink = exports.stat = exports.rmdir = exports.rename = exports.readlink = exports.readdir = exports.mkdir = exports.lstat = exports.copyFile = exports.chmod = void 0;
|
exports.getCmdPath = exports.tryGetExecutablePath = exports.isRooted = exports.isDirectory = exports.exists = exports.READONLY = exports.UV_FS_O_EXLOCK = exports.IS_WINDOWS = exports.unlink = exports.symlink = exports.stat = exports.rmdir = exports.rm = exports.rename = exports.readlink = exports.readdir = exports.open = exports.mkdir = exports.lstat = exports.copyFile = exports.chmod = void 0;
|
||||||
const fs = __importStar(require("fs"));
|
const fs = __importStar(require("fs"));
|
||||||
const path = __importStar(require("path"));
|
const path = __importStar(require("path"));
|
||||||
_a = fs.promises, exports.chmod = _a.chmod, exports.copyFile = _a.copyFile, exports.lstat = _a.lstat, exports.mkdir = _a.mkdir, exports.readdir = _a.readdir, exports.readlink = _a.readlink, exports.rename = _a.rename, exports.rmdir = _a.rmdir, exports.stat = _a.stat, exports.symlink = _a.symlink, exports.unlink = _a.unlink;
|
_a = fs.promises
|
||||||
|
// export const {open} = 'fs'
|
||||||
|
, exports.chmod = _a.chmod, exports.copyFile = _a.copyFile, exports.lstat = _a.lstat, exports.mkdir = _a.mkdir, exports.open = _a.open, exports.readdir = _a.readdir, exports.readlink = _a.readlink, exports.rename = _a.rename, exports.rm = _a.rm, exports.rmdir = _a.rmdir, exports.stat = _a.stat, exports.symlink = _a.symlink, exports.unlink = _a.unlink;
|
||||||
|
// export const {open} = 'fs'
|
||||||
exports.IS_WINDOWS = process.platform === 'win32';
|
exports.IS_WINDOWS = process.platform === 'win32';
|
||||||
|
// See https://github.com/nodejs/node/blob/d0153aee367422d0858105abec186da4dff0a0c5/deps/uv/include/uv/win.h#L691
|
||||||
|
exports.UV_FS_O_EXLOCK = 0x10000000;
|
||||||
|
exports.READONLY = fs.constants.O_RDONLY;
|
||||||
function exists(fsPath) {
|
function exists(fsPath) {
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
try {
|
try {
|
||||||
|
|
2
node_modules/@actions/io/lib/io-util.js.map
generated
vendored
2
node_modules/@actions/io/lib/io-util.js.map
generated
vendored
|
@ -1 +1 @@
|
||||||
{"version":3,"file":"io-util.js","sourceRoot":"","sources":["../src/io-util.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA,uCAAwB;AACxB,2CAA4B;AAEf,KAYT,EAAE,CAAC,QAAQ,EAXb,aAAK,aACL,gBAAQ,gBACR,aAAK,aACL,aAAK,aACL,eAAO,eACP,gBAAQ,gBACR,cAAM,cACN,aAAK,aACL,YAAI,YACJ,eAAO,eACP,cAAM,aACO;AAEF,QAAA,UAAU,GAAG,OAAO,CAAC,QAAQ,KAAK,OAAO,CAAA;AAEtD,SAAsB,MAAM,CAAC,MAAc;;QACzC,IAAI;YACF,MAAM,YAAI,CAAC,MAAM,CAAC,CAAA;SACnB;QAAC,OAAO,GAAG,EAAE;YACZ,IAAI,GAAG,CAAC,IAAI,KAAK,QAAQ,EAAE;gBACzB,OAAO,KAAK,CAAA;aACb;YAED,MAAM,GAAG,CAAA;SACV;QAED,OAAO,IAAI,CAAA;IACb,CAAC;CAAA;AAZD,wBAYC;AAED,SAAsB,WAAW,CAC/B,MAAc,EACd,OAAO,GAAG,KAAK;;QAEf,MAAM,KAAK,GAAG,OAAO,CAAC,CAAC,CAAC,MAAM,YAAI,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,MAAM,aAAK,CAAC,MAAM,CAAC,CAAA;QAChE,OAAO,KAAK,CAAC,WAAW,EAAE,CAAA;IAC5B,CAAC;CAAA;AAND,kCAMC;AAED;;;GAGG;AACH,SAAgB,QAAQ,CAAC,CAAS;IAChC,CAAC,GAAG,mBAAmB,CAAC,CAAC,CAAC,CAAA;IAC1B,IAAI,CAAC,CAAC,EAAE;QACN,MAAM,IAAI,KAAK,CAAC,0CAA0C,CAAC,CAAA;KAC5D;IAED,IAAI,kBAAU,EAAE;QACd,OAAO,CACL,CAAC,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,UAAU,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,8BAA8B;SACxE,CAAA,CAAC,sBAAsB;KACzB;IAED,OAAO,CAAC,CAAC,UAAU,CAAC,GAAG,CAAC,CAAA;AAC1B,CAAC;AAbD,4BAaC;AAED;;;;;GAKG;AACH,SAAsB,oBAAoB,CACxC,QAAgB,EAChB,UAAoB;;QAEpB,IAAI,KAAK,GAAyB,SAAS,CAAA;QAC3C,IAAI;YACF,mBAAmB;YACnB,KAAK,GAAG,MAAM,YAAI,CAAC,QAAQ,CAAC,CAAA;SAC7B;QAAC,OAAO,GAAG,EAAE;YACZ,IAAI,GAAG,CAAC,IAAI,KAAK,QAAQ,EAAE;gBACzB,sCAAsC;gBACtC,OAAO,CAAC,GAAG,CACT,uEAAuE,QAAQ,MAAM,GAAG,EAAE,CAC3F,CAAA;aACF;SACF;QACD,IAAI,KAAK,IAAI,KAAK,CAAC,MAAM,EAAE,EAAE;YAC3B,IAAI,kBAAU,EAAE;gBACd,uCAAuC;gBACvC,MAAM,QAAQ,GAAG,IAAI,CAAC,OAAO,CAAC,QAAQ,CAAC,CAAC,WAAW,EAAE,CAAA;gBACrD,IAAI,UAAU,CAAC,IAAI,CAAC,QAAQ,CAAC,EAAE,CAAC,QAAQ,CAAC,WAAW,EAAE,KAAK,QAAQ,CAAC,EAAE;oBACpE,OAAO,QAAQ,CAAA;iBAChB;aACF;iBAAM;gBACL,IAAI,gBAAgB,CAAC,KAAK,CAAC,EAAE;oBAC3B,OAAO,QAAQ,CAAA;iBAChB;aACF;SACF;QAED,qBAAqB;QACrB,MAAM,gBAAgB,GAAG,QAAQ,CAAA;QACjC,KAAK,MAAM,SAAS,IAAI,UAAU,EAAE;YAClC,QAAQ,GAAG,gBAAgB,GAAG,SAAS,CAAA;YAEvC,KAAK,GAAG,SAAS,CAAA;YACjB,IAAI;gBACF,KAAK,GAAG,MAAM,YAAI,CAAC,QAAQ,CAAC,CAAA;aAC7B;YAAC,OAAO,GAAG,EAAE;gBACZ,IAAI,GAAG,CAAC,IAAI,KAAK,QAAQ,EAAE;oBACzB,sCAAsC;oBACtC,OAAO,CAAC,GAAG,CACT,uEAAuE,QAAQ,MAAM,GAAG,EAAE,CAC3F,CAAA;iBACF;aACF;YAED,IAAI,KAAK,IAAI,KAAK,CAAC,MAAM,EAAE,EAAE;gBAC3B,IAAI,kBAAU,EAAE;oBACd,yEAAyE;oBACzE,IAAI;wBACF,MAAM,SAAS,GAAG,IAAI,CAAC,OAAO,CAAC,QAAQ,CAAC,CAAA;wBACxC,MAAM,SAAS,GAAG,IAAI,CAAC,QAAQ,CAAC,QAAQ,CAAC,CAAC,WAAW,EAAE,CAAA;wBACvD,KAAK,MAAM,UAAU,IAAI,MAAM,eAAO,CAAC,SAAS,CAAC,EAAE;4BACjD,IAAI,SAAS,KAAK,UAAU,CAAC,WAAW,EAAE,EAAE;gCAC1C,QAAQ,GAAG,IAAI,CAAC,IAAI,CAAC,SAAS,EAAE,UAAU,CAAC,CAAA;gCAC3C,MAAK;6BACN;yBACF;qBACF;oBAAC,OAAO,GAAG,EAAE;wBACZ,sCAAsC;wBACtC,OAAO,CAAC,GAAG,CACT,yEAAyE,QAAQ,MAAM,GAAG,EAAE,CAC7F,CAAA;qBACF;oBAED,OAAO,QAAQ,CAAA;iBAChB;qBAAM;oBACL,IAAI,gBAAgB,CAAC,KAAK,CAAC,EAAE;wBAC3B,OAAO,QAAQ,CAAA;qBAChB;iBACF;aACF;SACF;QAED,OAAO,EAAE,CAAA;IACX,CAAC;CAAA;AA5ED,oDA4EC;AAED,SAAS,mBAAmB,CAAC,CAAS;IACpC,CAAC,GAAG,CAAC,IAAI,EAAE,CAAA;IACX,IAAI,kBAAU,EAAE;QACd,6BAA6B;QAC7B,CAAC,GAAG,CAAC,CAAC,OAAO,CAAC,KAAK,EAAE,IAAI,CAAC,CAAA;QAE1B,2BAA2B;QAC3B,OAAO,CAAC,CAAC,OAAO,CAAC,QAAQ,EAAE,IAAI,CAAC,CAAA;KACjC;IAED,2BAA2B;IAC3B,OAAO,CAAC,CAAC,OAAO,CAAC,QAAQ,EAAE,GAAG,CAAC,CAAA;AACjC,CAAC;AAED,qCAAqC;AACrC,6BAA6B;AAC7B,6BAA6B;AAC7B,SAAS,gBAAgB,CAAC,KAAe;IACvC,OAAO,CACL,CAAC,KAAK,CAAC,IAAI,GAAG,CAAC,CAAC,GAAG,CAAC;QACpB,CAAC,CAAC,KAAK,CAAC,IAAI,GAAG,CAAC,CAAC,GAAG,CAAC,IAAI,KAAK,CAAC,GAAG,KAAK,OAAO,CAAC,MAAM,EAAE,CAAC;QACxD,CAAC,CAAC,KAAK,CAAC,IAAI,GAAG,EAAE,CAAC,GAAG,CAAC,IAAI,KAAK,CAAC,GAAG,KAAK,OAAO,CAAC,MAAM,EAAE,CAAC,CAC1D,CAAA;AACH,CAAC;AAED,qCAAqC;AACrC,SAAgB,UAAU;;IACxB,aAAO,OAAO,CAAC,GAAG,CAAC,SAAS,CAAC,mCAAI,SAAS,CAAA;AAC5C,CAAC;AAFD,gCAEC"}
|
{"version":3,"file":"io-util.js","sourceRoot":"","sources":["../src/io-util.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA,uCAAwB;AACxB,2CAA4B;AAEf,KAcT,EAAE,CAAC,QAAQ;AACf,6BAA6B;EAd3B,aAAK,aACL,gBAAQ,gBACR,aAAK,aACL,aAAK,aACL,YAAI,YACJ,eAAO,eACP,gBAAQ,gBACR,cAAM,cACN,UAAE,UACF,aAAK,aACL,YAAI,YACJ,eAAO,eACP,cAAM,aACO;AACf,6BAA6B;AAChB,QAAA,UAAU,GAAG,OAAO,CAAC,QAAQ,KAAK,OAAO,CAAA;AACtD,iHAAiH;AACpG,QAAA,cAAc,GAAG,UAAU,CAAA;AAC3B,QAAA,QAAQ,GAAG,EAAE,CAAC,SAAS,CAAC,QAAQ,CAAA;AAE7C,SAAsB,MAAM,CAAC,MAAc;;QACzC,IAAI;YACF,MAAM,YAAI,CAAC,MAAM,CAAC,CAAA;SACnB;QAAC,OAAO,GAAG,EAAE;YACZ,IAAI,GAAG,CAAC,IAAI,KAAK,QAAQ,EAAE;gBACzB,OAAO,KAAK,CAAA;aACb;YAED,MAAM,GAAG,CAAA;SACV;QAED,OAAO,IAAI,CAAA;IACb,CAAC;CAAA;AAZD,wBAYC;AAED,SAAsB,WAAW,CAC/B,MAAc,EACd,OAAO,GAAG,KAAK;;QAEf,MAAM,KAAK,GAAG,OAAO,CAAC,CAAC,CAAC,MAAM,YAAI,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,MAAM,aAAK,CAAC,MAAM,CAAC,CAAA;QAChE,OAAO,KAAK,CAAC,WAAW,EAAE,CAAA;IAC5B,CAAC;CAAA;AAND,kCAMC;AAED;;;GAGG;AACH,SAAgB,QAAQ,CAAC,CAAS;IAChC,CAAC,GAAG,mBAAmB,CAAC,CAAC,CAAC,CAAA;IAC1B,IAAI,CAAC,CAAC,EAAE;QACN,MAAM,IAAI,KAAK,CAAC,0CAA0C,CAAC,CAAA;KAC5D;IAED,IAAI,kBAAU,EAAE;QACd,OAAO,CACL,CAAC,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,UAAU,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,8BAA8B;SACxE,CAAA,CAAC,sBAAsB;KACzB;IAED,OAAO,CAAC,CAAC,UAAU,CAAC,GAAG,CAAC,CAAA;AAC1B,CAAC;AAbD,4BAaC;AAED;;;;;GAKG;AACH,SAAsB,oBAAoB,CACxC,QAAgB,EAChB,UAAoB;;QAEpB,IAAI,KAAK,GAAyB,SAAS,CAAA;QAC3C,IAAI;YACF,mBAAmB;YACnB,KAAK,GAAG,MAAM,YAAI,CAAC,QAAQ,CAAC,CAAA;SAC7B;QAAC,OAAO,GAAG,EAAE;YACZ,IAAI,GAAG,CAAC,IAAI,KAAK,QAAQ,EAAE;gBACzB,sCAAsC;gBACtC,OAAO,CAAC,GAAG,CACT,uEAAuE,QAAQ,MAAM,GAAG,EAAE,CAC3F,CAAA;aACF;SACF;QACD,IAAI,KAAK,IAAI,KAAK,CAAC,MAAM,EAAE,EAAE;YAC3B,IAAI,kBAAU,EAAE;gBACd,uCAAuC;gBACvC,MAAM,QAAQ,GAAG,IAAI,CAAC,OAAO,CAAC,QAAQ,CAAC,CAAC,WAAW,EAAE,CAAA;gBACrD,IAAI,UAAU,CAAC,IAAI,CAAC,QAAQ,CAAC,EAAE,CAAC,QAAQ,CAAC,WAAW,EAAE,KAAK,QAAQ,CAAC,EAAE;oBACpE,OAAO,QAAQ,CAAA;iBAChB;aACF;iBAAM;gBACL,IAAI,gBAAgB,CAAC,KAAK,CAAC,EAAE;oBAC3B,OAAO,QAAQ,CAAA;iBAChB;aACF;SACF;QAED,qBAAqB;QACrB,MAAM,gBAAgB,GAAG,QAAQ,CAAA;QACjC,KAAK,MAAM,SAAS,IAAI,UAAU,EAAE;YAClC,QAAQ,GAAG,gBAAgB,GAAG,SAAS,CAAA;YAEvC,KAAK,GAAG,SAAS,CAAA;YACjB,IAAI;gBACF,KAAK,GAAG,MAAM,YAAI,CAAC,QAAQ,CAAC,CAAA;aAC7B;YAAC,OAAO,GAAG,EAAE;gBACZ,IAAI,GAAG,CAAC,IAAI,KAAK,QAAQ,EAAE;oBACzB,sCAAsC;oBACtC,OAAO,CAAC,GAAG,CACT,uEAAuE,QAAQ,MAAM,GAAG,EAAE,CAC3F,CAAA;iBACF;aACF;YAED,IAAI,KAAK,IAAI,KAAK,CAAC,MAAM,EAAE,EAAE;gBAC3B,IAAI,kBAAU,EAAE;oBACd,yEAAyE;oBACzE,IAAI;wBACF,MAAM,SAAS,GAAG,IAAI,CAAC,OAAO,CAAC,QAAQ,CAAC,CAAA;wBACxC,MAAM,SAAS,GAAG,IAAI,CAAC,QAAQ,CAAC,QAAQ,CAAC,CAAC,WAAW,EAAE,CAAA;wBACvD,KAAK,MAAM,UAAU,IAAI,MAAM,eAAO,CAAC,SAAS,CAAC,EAAE;4BACjD,IAAI,SAAS,KAAK,UAAU,CAAC,WAAW,EAAE,EAAE;gCAC1C,QAAQ,GAAG,IAAI,CAAC,IAAI,CAAC,SAAS,EAAE,UAAU,CAAC,CAAA;gCAC3C,MAAK;6BACN;yBACF;qBACF;oBAAC,OAAO,GAAG,EAAE;wBACZ,sCAAsC;wBACtC,OAAO,CAAC,GAAG,CACT,yEAAyE,QAAQ,MAAM,GAAG,EAAE,CAC7F,CAAA;qBACF;oBAED,OAAO,QAAQ,CAAA;iBAChB;qBAAM;oBACL,IAAI,gBAAgB,CAAC,KAAK,CAAC,EAAE;wBAC3B,OAAO,QAAQ,CAAA;qBAChB;iBACF;aACF;SACF;QAED,OAAO,EAAE,CAAA;IACX,CAAC;CAAA;AA5ED,oDA4EC;AAED,SAAS,mBAAmB,CAAC,CAAS;IACpC,CAAC,GAAG,CAAC,IAAI,EAAE,CAAA;IACX,IAAI,kBAAU,EAAE;QACd,6BAA6B;QAC7B,CAAC,GAAG,CAAC,CAAC,OAAO,CAAC,KAAK,EAAE,IAAI,CAAC,CAAA;QAE1B,2BAA2B;QAC3B,OAAO,CAAC,CAAC,OAAO,CAAC,QAAQ,EAAE,IAAI,CAAC,CAAA;KACjC;IAED,2BAA2B;IAC3B,OAAO,CAAC,CAAC,OAAO,CAAC,QAAQ,EAAE,GAAG,CAAC,CAAA;AACjC,CAAC;AAED,qCAAqC;AACrC,6BAA6B;AAC7B,6BAA6B;AAC7B,SAAS,gBAAgB,CAAC,KAAe;IACvC,OAAO,CACL,CAAC,KAAK,CAAC,IAAI,GAAG,CAAC,CAAC,GAAG,CAAC;QACpB,CAAC,CAAC,KAAK,CAAC,IAAI,GAAG,CAAC,CAAC,GAAG,CAAC,IAAI,KAAK,CAAC,GAAG,KAAK,OAAO,CAAC,MAAM,EAAE,CAAC;QACxD,CAAC,CAAC,KAAK,CAAC,IAAI,GAAG,EAAE,CAAC,GAAG,CAAC,IAAI,KAAK,CAAC,GAAG,KAAK,OAAO,CAAC,MAAM,EAAE,CAAC,CAC1D,CAAA;AACH,CAAC;AAED,qCAAqC;AACrC,SAAgB,UAAU;;IACxB,aAAO,OAAO,CAAC,GAAG,CAAC,SAAS,CAAC,mCAAI,SAAS,CAAA;AAC5C,CAAC;AAFD,gCAEC"}
|
64
node_modules/@actions/io/lib/io.js
generated
vendored
64
node_modules/@actions/io/lib/io.js
generated
vendored
|
@ -30,12 +30,8 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge
|
||||||
Object.defineProperty(exports, "__esModule", { value: true });
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
exports.findInPath = exports.which = exports.mkdirP = exports.rmRF = exports.mv = exports.cp = void 0;
|
exports.findInPath = exports.which = exports.mkdirP = exports.rmRF = exports.mv = exports.cp = void 0;
|
||||||
const assert_1 = require("assert");
|
const assert_1 = require("assert");
|
||||||
const childProcess = __importStar(require("child_process"));
|
|
||||||
const path = __importStar(require("path"));
|
const path = __importStar(require("path"));
|
||||||
const util_1 = require("util");
|
|
||||||
const ioUtil = __importStar(require("./io-util"));
|
const ioUtil = __importStar(require("./io-util"));
|
||||||
const exec = util_1.promisify(childProcess.exec);
|
|
||||||
const execFile = util_1.promisify(childProcess.execFile);
|
|
||||||
/**
|
/**
|
||||||
* Copies a file or folder.
|
* Copies a file or folder.
|
||||||
* Based off of shelljs - https://github.com/shelljs/shelljs/blob/9237f66c52e5daa40458f94f9565e18e8132f5a6/src/cp.js
|
* Based off of shelljs - https://github.com/shelljs/shelljs/blob/9237f66c52e5daa40458f94f9565e18e8132f5a6/src/cp.js
|
||||||
|
@ -116,61 +112,23 @@ exports.mv = mv;
|
||||||
function rmRF(inputPath) {
|
function rmRF(inputPath) {
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
if (ioUtil.IS_WINDOWS) {
|
if (ioUtil.IS_WINDOWS) {
|
||||||
// Node doesn't provide a delete operation, only an unlink function. This means that if the file is being used by another
|
|
||||||
// program (e.g. antivirus), it won't be deleted. To address this, we shell out the work to rd/del.
|
|
||||||
// Check for invalid characters
|
// Check for invalid characters
|
||||||
// https://docs.microsoft.com/en-us/windows/win32/fileio/naming-a-file
|
// https://docs.microsoft.com/en-us/windows/win32/fileio/naming-a-file
|
||||||
if (/[*"<>|]/.test(inputPath)) {
|
if (/[*"<>|]/.test(inputPath)) {
|
||||||
throw new Error('File path must not contain `*`, `"`, `<`, `>` or `|` on Windows');
|
throw new Error('File path must not contain `*`, `"`, `<`, `>` or `|` on Windows');
|
||||||
}
|
}
|
||||||
try {
|
|
||||||
const cmdPath = ioUtil.getCmdPath();
|
|
||||||
if (yield ioUtil.isDirectory(inputPath, true)) {
|
|
||||||
yield exec(`${cmdPath} /s /c "rd /s /q "%inputPath%""`, {
|
|
||||||
env: { inputPath }
|
|
||||||
});
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
yield exec(`${cmdPath} /s /c "del /f /a "%inputPath%""`, {
|
|
||||||
env: { inputPath }
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
catch (err) {
|
|
||||||
// if you try to delete a file that doesn't exist, desired result is achieved
|
|
||||||
// other errors are valid
|
|
||||||
if (err.code !== 'ENOENT')
|
|
||||||
throw err;
|
|
||||||
}
|
|
||||||
// Shelling out fails to remove a symlink folder with missing source, this unlink catches that
|
|
||||||
try {
|
|
||||||
yield ioUtil.unlink(inputPath);
|
|
||||||
}
|
|
||||||
catch (err) {
|
|
||||||
// if you try to delete a file that doesn't exist, desired result is achieved
|
|
||||||
// other errors are valid
|
|
||||||
if (err.code !== 'ENOENT')
|
|
||||||
throw err;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
else {
|
try {
|
||||||
let isDir = false;
|
// note if path does not exist, error is silent
|
||||||
try {
|
yield ioUtil.rm(inputPath, {
|
||||||
isDir = yield ioUtil.isDirectory(inputPath);
|
force: true,
|
||||||
}
|
maxRetries: 3,
|
||||||
catch (err) {
|
recursive: true,
|
||||||
// if you try to delete a file that doesn't exist, desired result is achieved
|
retryDelay: 300
|
||||||
// other errors are valid
|
});
|
||||||
if (err.code !== 'ENOENT')
|
}
|
||||||
throw err;
|
catch (err) {
|
||||||
return;
|
throw new Error(`File was unable to be removed ${err}`);
|
||||||
}
|
|
||||||
if (isDir) {
|
|
||||||
yield execFile(`rm`, [`-rf`, `${inputPath}`]);
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
yield ioUtil.unlink(inputPath);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
2
node_modules/@actions/io/lib/io.js.map
generated
vendored
2
node_modules/@actions/io/lib/io.js.map
generated
vendored
File diff suppressed because one or more lines are too long
2
node_modules/@actions/io/package.json
generated
vendored
2
node_modules/@actions/io/package.json
generated
vendored
|
@ -1,6 +1,6 @@
|
||||||
{
|
{
|
||||||
"name": "@actions/io",
|
"name": "@actions/io",
|
||||||
"version": "1.1.2",
|
"version": "1.1.3",
|
||||||
"description": "Actions io lib",
|
"description": "Actions io lib",
|
||||||
"keywords": [
|
"keywords": [
|
||||||
"github",
|
"github",
|
||||||
|
|
19
node_modules/@fastify/busboy/LICENSE
generated
vendored
Normal file
19
node_modules/@fastify/busboy/LICENSE
generated
vendored
Normal file
|
@ -0,0 +1,19 @@
|
||||||
|
Copyright Brian White. All rights reserved.
|
||||||
|
|
||||||
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
|
of this software and associated documentation files (the "Software"), to
|
||||||
|
deal in the Software without restriction, including without limitation the
|
||||||
|
rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
|
||||||
|
sell copies of the Software, and to permit persons to whom the Software is
|
||||||
|
furnished to do so, subject to the following conditions:
|
||||||
|
|
||||||
|
The above copyright notice and this permission notice shall be included in
|
||||||
|
all copies or substantial portions of the Software.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||||
|
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||||
|
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||||
|
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||||
|
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
|
||||||
|
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
|
||||||
|
IN THE SOFTWARE.
|
271
node_modules/@fastify/busboy/README.md
generated
vendored
Normal file
271
node_modules/@fastify/busboy/README.md
generated
vendored
Normal file
|
@ -0,0 +1,271 @@
|
||||||
|
# busboy
|
||||||
|
|
||||||
|
<div align="center">
|
||||||
|
|
||||||
|
[![Build Status](https://github.com/fastify/busboy/workflows/ci/badge.svg)](https://github.com/fastify/busboy/actions)
|
||||||
|
[![Coverage Status](https://coveralls.io/repos/fastify/busboy/badge.svg?branch=master)](https://coveralls.io/r/fastify/busboy?branch=master)
|
||||||
|
[![js-standard-style](https://img.shields.io/badge/code%20style-standard-brightgreen.svg?style=flat)](https://standardjs.com/)
|
||||||
|
[![Security Responsible Disclosure](https://img.shields.io/badge/Security-Responsible%20Disclosure-yellow.svg)](https://github.com/nodejs/security-wg/blob/HEAD/processes/responsible_disclosure_template.md)
|
||||||
|
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div align="center">
|
||||||
|
|
||||||
|
[![NPM version](https://img.shields.io/npm/v/@fastify/busboy.svg?style=flat)](https://www.npmjs.com/package/@fastify/busboy)
|
||||||
|
[![NPM downloads](https://img.shields.io/npm/dm/@fastify/busboy.svg?style=flat)](https://www.npmjs.com/package/@fastify/busboy)
|
||||||
|
|
||||||
|
</div>
|
||||||
|
|
||||||
|
Description
|
||||||
|
===========
|
||||||
|
|
||||||
|
A Node.js module for parsing incoming HTML form data.
|
||||||
|
|
||||||
|
This is an officially supported fork by [fastify](https://github.com/fastify/) organization of the amazing library [originally created](https://github.com/mscdex/busboy) by Brian White,
|
||||||
|
aimed at addressing long-standing issues with it.
|
||||||
|
|
||||||
|
Benchmark (Mean time for 500 Kb payload, 2000 cycles, 1000 cycle warmup):
|
||||||
|
|
||||||
|
| Library | Version | Mean time in nanoseconds (less is better) |
|
||||||
|
|-----------------------|---------|-------------------------------------------|
|
||||||
|
| busboy | 0.3.1 | `340114` |
|
||||||
|
| @fastify/busboy | 1.0.0 | `270984` |
|
||||||
|
|
||||||
|
[Changelog](https://github.com/fastify/busboy/blob/master/CHANGELOG.md) since busboy 0.31.
|
||||||
|
|
||||||
|
Requirements
|
||||||
|
============
|
||||||
|
|
||||||
|
* [Node.js](http://nodejs.org/) 10+
|
||||||
|
|
||||||
|
|
||||||
|
Install
|
||||||
|
=======
|
||||||
|
|
||||||
|
npm i @fastify/busboy
|
||||||
|
|
||||||
|
|
||||||
|
Examples
|
||||||
|
========
|
||||||
|
|
||||||
|
* Parsing (multipart) with default options:
|
||||||
|
|
||||||
|
```javascript
|
||||||
|
const http = require('node:http');
|
||||||
|
const { inspect } = require('node:util');
|
||||||
|
const Busboy = require('busboy');
|
||||||
|
|
||||||
|
http.createServer((req, res) => {
|
||||||
|
if (req.method === 'POST') {
|
||||||
|
const busboy = new Busboy({ headers: req.headers });
|
||||||
|
busboy.on('file', (fieldname, file, filename, encoding, mimetype) => {
|
||||||
|
console.log(`File [${fieldname}]: filename: ${filename}, encoding: ${encoding}, mimetype: ${mimetype}`);
|
||||||
|
file.on('data', data => {
|
||||||
|
console.log(`File [${fieldname}] got ${data.length} bytes`);
|
||||||
|
});
|
||||||
|
file.on('end', () => {
|
||||||
|
console.log(`File [${fieldname}] Finished`);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
busboy.on('field', (fieldname, val, fieldnameTruncated, valTruncated, encoding, mimetype) => {
|
||||||
|
console.log(`Field [${fieldname}]: value: ${inspect(val)}`);
|
||||||
|
});
|
||||||
|
busboy.on('finish', () => {
|
||||||
|
console.log('Done parsing form!');
|
||||||
|
res.writeHead(303, { Connection: 'close', Location: '/' });
|
||||||
|
res.end();
|
||||||
|
});
|
||||||
|
req.pipe(busboy);
|
||||||
|
} else if (req.method === 'GET') {
|
||||||
|
res.writeHead(200, { Connection: 'close' });
|
||||||
|
res.end(`<html><head></head><body>
|
||||||
|
<form method="POST" enctype="multipart/form-data">
|
||||||
|
<input type="text" name="textfield"><br>
|
||||||
|
<input type="file" name="filefield"><br>
|
||||||
|
<input type="submit">
|
||||||
|
</form>
|
||||||
|
</body></html>`);
|
||||||
|
}
|
||||||
|
}).listen(8000, () => {
|
||||||
|
console.log('Listening for requests');
|
||||||
|
});
|
||||||
|
|
||||||
|
// Example output, using http://nodejs.org/images/ryan-speaker.jpg as the file:
|
||||||
|
//
|
||||||
|
// Listening for requests
|
||||||
|
// File [filefield]: filename: ryan-speaker.jpg, encoding: binary
|
||||||
|
// File [filefield] got 11971 bytes
|
||||||
|
// Field [textfield]: value: 'testing! :-)'
|
||||||
|
// File [filefield] Finished
|
||||||
|
// Done parsing form!
|
||||||
|
```
|
||||||
|
|
||||||
|
* Save all incoming files to disk:
|
||||||
|
|
||||||
|
```javascript
|
||||||
|
const http = require('node:http');
|
||||||
|
const path = require('node:path');
|
||||||
|
const os = require('node:os');
|
||||||
|
const fs = require('node:fs');
|
||||||
|
|
||||||
|
const Busboy = require('busboy');
|
||||||
|
|
||||||
|
http.createServer(function(req, res) {
|
||||||
|
if (req.method === 'POST') {
|
||||||
|
const busboy = new Busboy({ headers: req.headers });
|
||||||
|
busboy.on('file', function(fieldname, file, filename, encoding, mimetype) {
|
||||||
|
var saveTo = path.join(os.tmpdir(), path.basename(fieldname));
|
||||||
|
file.pipe(fs.createWriteStream(saveTo));
|
||||||
|
});
|
||||||
|
busboy.on('finish', function() {
|
||||||
|
res.writeHead(200, { 'Connection': 'close' });
|
||||||
|
res.end("That's all folks!");
|
||||||
|
});
|
||||||
|
return req.pipe(busboy);
|
||||||
|
}
|
||||||
|
res.writeHead(404);
|
||||||
|
res.end();
|
||||||
|
}).listen(8000, function() {
|
||||||
|
console.log('Listening for requests');
|
||||||
|
});
|
||||||
|
```
|
||||||
|
|
||||||
|
* Parsing (urlencoded) with default options:
|
||||||
|
|
||||||
|
```javascript
|
||||||
|
const http = require('node:http');
|
||||||
|
const { inspect } = require('node:util');
|
||||||
|
|
||||||
|
const Busboy = require('busboy');
|
||||||
|
|
||||||
|
http.createServer(function(req, res) {
|
||||||
|
if (req.method === 'POST') {
|
||||||
|
const busboy = new Busboy({ headers: req.headers });
|
||||||
|
busboy.on('file', function(fieldname, file, filename, encoding, mimetype) {
|
||||||
|
console.log('File [' + fieldname + ']: filename: ' + filename);
|
||||||
|
file.on('data', function(data) {
|
||||||
|
console.log('File [' + fieldname + '] got ' + data.length + ' bytes');
|
||||||
|
});
|
||||||
|
file.on('end', function() {
|
||||||
|
console.log('File [' + fieldname + '] Finished');
|
||||||
|
});
|
||||||
|
});
|
||||||
|
busboy.on('field', function(fieldname, val, fieldnameTruncated, valTruncated) {
|
||||||
|
console.log('Field [' + fieldname + ']: value: ' + inspect(val));
|
||||||
|
});
|
||||||
|
busboy.on('finish', function() {
|
||||||
|
console.log('Done parsing form!');
|
||||||
|
res.writeHead(303, { Connection: 'close', Location: '/' });
|
||||||
|
res.end();
|
||||||
|
});
|
||||||
|
req.pipe(busboy);
|
||||||
|
} else if (req.method === 'GET') {
|
||||||
|
res.writeHead(200, { Connection: 'close' });
|
||||||
|
res.end('<html><head></head><body>\
|
||||||
|
<form method="POST">\
|
||||||
|
<input type="text" name="textfield"><br />\
|
||||||
|
<select name="selectfield">\
|
||||||
|
<option value="1">1</option>\
|
||||||
|
<option value="10">10</option>\
|
||||||
|
<option value="100">100</option>\
|
||||||
|
<option value="9001">9001</option>\
|
||||||
|
</select><br />\
|
||||||
|
<input type="checkbox" name="checkfield">Node.js rules!<br />\
|
||||||
|
<input type="submit">\
|
||||||
|
</form>\
|
||||||
|
</body></html>');
|
||||||
|
}
|
||||||
|
}).listen(8000, function() {
|
||||||
|
console.log('Listening for requests');
|
||||||
|
});
|
||||||
|
|
||||||
|
// Example output:
|
||||||
|
//
|
||||||
|
// Listening for requests
|
||||||
|
// Field [textfield]: value: 'testing! :-)'
|
||||||
|
// Field [selectfield]: value: '9001'
|
||||||
|
// Field [checkfield]: value: 'on'
|
||||||
|
// Done parsing form!
|
||||||
|
```
|
||||||
|
|
||||||
|
|
||||||
|
API
|
||||||
|
===
|
||||||
|
|
||||||
|
_Busboy_ is a _Writable_ stream
|
||||||
|
|
||||||
|
Busboy (special) events
|
||||||
|
-----------------------
|
||||||
|
|
||||||
|
* **file**(< _string_ >fieldname, < _ReadableStream_ >stream, < _string_ >filename, < _string_ >transferEncoding, < _string_ >mimeType) - Emitted for each new file form field found. `transferEncoding` contains the 'Content-Transfer-Encoding' value for the file stream. `mimeType` contains the 'Content-Type' value for the file stream.
|
||||||
|
* Note: if you listen for this event, you should always handle the `stream` no matter if you care about the file contents or not (e.g. you can simply just do `stream.resume();` if you want to discard the contents), otherwise the 'finish' event will never fire on the Busboy instance. However, if you don't care about **any** incoming files, you can simply not listen for the 'file' event at all and any/all files will be automatically and safely discarded (these discarded files do still count towards `files` and `parts` limits).
|
||||||
|
* If a configured file size limit was reached, `stream` will both have a boolean property `truncated` (best checked at the end of the stream) and emit a 'limit' event to notify you when this happens.
|
||||||
|
* The property `bytesRead` informs about the number of bytes that have been read so far.
|
||||||
|
|
||||||
|
* **field**(< _string_ >fieldname, < _string_ >value, < _boolean_ >fieldnameTruncated, < _boolean_ >valueTruncated, < _string_ >transferEncoding, < _string_ >mimeType) - Emitted for each new non-file field found.
|
||||||
|
|
||||||
|
* **partsLimit**() - Emitted when specified `parts` limit has been reached. No more 'file' or 'field' events will be emitted.
|
||||||
|
|
||||||
|
* **filesLimit**() - Emitted when specified `files` limit has been reached. No more 'file' events will be emitted.
|
||||||
|
|
||||||
|
* **fieldsLimit**() - Emitted when specified `fields` limit has been reached. No more 'field' events will be emitted.
|
||||||
|
|
||||||
|
|
||||||
|
Busboy methods
|
||||||
|
--------------
|
||||||
|
|
||||||
|
* **(constructor)**(< _object_ >config) - Creates and returns a new Busboy instance.
|
||||||
|
|
||||||
|
* The constructor takes the following valid `config` settings:
|
||||||
|
|
||||||
|
* **headers** - _object_ - These are the HTTP headers of the incoming request, which are used by individual parsers.
|
||||||
|
|
||||||
|
* **autoDestroy** - _boolean_ - Whether this stream should automatically call .destroy() on itself after ending. (Default: false).
|
||||||
|
|
||||||
|
* **highWaterMark** - _integer_ - highWaterMark to use for this Busboy instance (Default: WritableStream default).
|
||||||
|
|
||||||
|
* **fileHwm** - _integer_ - highWaterMark to use for file streams (Default: ReadableStream default).
|
||||||
|
|
||||||
|
* **defCharset** - _string_ - Default character set to use when one isn't defined (Default: 'utf8').
|
||||||
|
|
||||||
|
* **preservePath** - _boolean_ - If paths in the multipart 'filename' field shall be preserved. (Default: false).
|
||||||
|
|
||||||
|
* **isPartAFile** - __function__ - Use this function to override the default file detection functionality. It has following parameters:
|
||||||
|
|
||||||
|
* fieldName - __string__ The name of the field.
|
||||||
|
|
||||||
|
* contentType - __string__ The content-type of the part, e.g. `text/plain`, `image/jpeg`, `application/octet-stream`
|
||||||
|
|
||||||
|
* fileName - __string__ The name of a file supplied by the part.
|
||||||
|
|
||||||
|
(Default: `(fieldName, contentType, fileName) => (contentType === 'application/octet-stream' || fileName !== undefined)`)
|
||||||
|
|
||||||
|
* **limits** - _object_ - Various limits on incoming data. Valid properties are:
|
||||||
|
|
||||||
|
* **fieldNameSize** - _integer_ - Max field name size (in bytes) (Default: 100 bytes).
|
||||||
|
|
||||||
|
* **fieldSize** - _integer_ - Max field value size (in bytes) (Default: 1 MiB, which is 1024 x 1024 bytes).
|
||||||
|
|
||||||
|
* **fields** - _integer_ - Max number of non-file fields (Default: Infinity).
|
||||||
|
|
||||||
|
* **fileSize** - _integer_ - For multipart forms, the max file size (in bytes) (Default: Infinity).
|
||||||
|
|
||||||
|
* **files** - _integer_ - For multipart forms, the max number of file fields (Default: Infinity).
|
||||||
|
|
||||||
|
* **parts** - _integer_ - For multipart forms, the max number of parts (fields + files) (Default: Infinity).
|
||||||
|
|
||||||
|
* **headerPairs** - _integer_ - For multipart forms, the max number of header key=>value pairs to parse **Default:** 2000
|
||||||
|
|
||||||
|
* **headerSize** - _integer_ - For multipart forms, the max size of a multipart header **Default:** 81920.
|
||||||
|
|
||||||
|
* The constructor can throw errors:
|
||||||
|
|
||||||
|
* **Busboy expected an options-Object.** - Busboy expected an Object as first parameters.
|
||||||
|
|
||||||
|
* **Busboy expected an options-Object with headers-attribute.** - The first parameter is lacking of a headers-attribute.
|
||||||
|
|
||||||
|
* **Limit $limit is not a valid number** - Busboy expected the desired limit to be of type number. Busboy throws this Error to prevent a potential security issue by falling silently back to the Busboy-defaults. Potential source for this Error can be the direct use of environment variables without transforming them to the type number.
|
||||||
|
|
||||||
|
* **Unsupported Content-Type.** - The `Content-Type` isn't one Busboy can parse.
|
||||||
|
|
||||||
|
* **Missing Content-Type-header.** - The provided headers don't include `Content-Type` at all.
|
19
node_modules/@fastify/busboy/deps/dicer/LICENSE
generated
vendored
Normal file
19
node_modules/@fastify/busboy/deps/dicer/LICENSE
generated
vendored
Normal file
|
@ -0,0 +1,19 @@
|
||||||
|
Copyright Brian White. All rights reserved.
|
||||||
|
|
||||||
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
|
of this software and associated documentation files (the "Software"), to
|
||||||
|
deal in the Software without restriction, including without limitation the
|
||||||
|
rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
|
||||||
|
sell copies of the Software, and to permit persons to whom the Software is
|
||||||
|
furnished to do so, subject to the following conditions:
|
||||||
|
|
||||||
|
The above copyright notice and this permission notice shall be included in
|
||||||
|
all copies or substantial portions of the Software.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||||
|
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||||
|
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||||
|
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||||
|
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
|
||||||
|
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
|
||||||
|
IN THE SOFTWARE.
|
207
node_modules/@fastify/busboy/deps/dicer/lib/Dicer.js
generated
vendored
Normal file
207
node_modules/@fastify/busboy/deps/dicer/lib/Dicer.js
generated
vendored
Normal file
|
@ -0,0 +1,207 @@
|
||||||
|
'use strict'
|
||||||
|
|
||||||
|
const WritableStream = require('node:stream').Writable
|
||||||
|
const inherits = require('node:util').inherits
|
||||||
|
|
||||||
|
const StreamSearch = require('../../streamsearch/sbmh')
|
||||||
|
|
||||||
|
const PartStream = require('./PartStream')
|
||||||
|
const HeaderParser = require('./HeaderParser')
|
||||||
|
|
||||||
|
const DASH = 45
|
||||||
|
const B_ONEDASH = Buffer.from('-')
|
||||||
|
const B_CRLF = Buffer.from('\r\n')
|
||||||
|
const EMPTY_FN = function () {}
|
||||||
|
|
||||||
|
function Dicer (cfg) {
|
||||||
|
if (!(this instanceof Dicer)) { return new Dicer(cfg) }
|
||||||
|
WritableStream.call(this, cfg)
|
||||||
|
|
||||||
|
if (!cfg || (!cfg.headerFirst && typeof cfg.boundary !== 'string')) { throw new TypeError('Boundary required') }
|
||||||
|
|
||||||
|
if (typeof cfg.boundary === 'string') { this.setBoundary(cfg.boundary) } else { this._bparser = undefined }
|
||||||
|
|
||||||
|
this._headerFirst = cfg.headerFirst
|
||||||
|
|
||||||
|
this._dashes = 0
|
||||||
|
this._parts = 0
|
||||||
|
this._finished = false
|
||||||
|
this._realFinish = false
|
||||||
|
this._isPreamble = true
|
||||||
|
this._justMatched = false
|
||||||
|
this._firstWrite = true
|
||||||
|
this._inHeader = true
|
||||||
|
this._part = undefined
|
||||||
|
this._cb = undefined
|
||||||
|
this._ignoreData = false
|
||||||
|
this._partOpts = { highWaterMark: cfg.partHwm }
|
||||||
|
this._pause = false
|
||||||
|
|
||||||
|
const self = this
|
||||||
|
this._hparser = new HeaderParser(cfg)
|
||||||
|
this._hparser.on('header', function (header) {
|
||||||
|
self._inHeader = false
|
||||||
|
self._part.emit('header', header)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
inherits(Dicer, WritableStream)
|
||||||
|
|
||||||
|
Dicer.prototype.emit = function (ev) {
|
||||||
|
if (ev === 'finish' && !this._realFinish) {
|
||||||
|
if (!this._finished) {
|
||||||
|
const self = this
|
||||||
|
process.nextTick(function () {
|
||||||
|
self.emit('error', new Error('Unexpected end of multipart data'))
|
||||||
|
if (self._part && !self._ignoreData) {
|
||||||
|
const type = (self._isPreamble ? 'Preamble' : 'Part')
|
||||||
|
self._part.emit('error', new Error(type + ' terminated early due to unexpected end of multipart data'))
|
||||||
|
self._part.push(null)
|
||||||
|
process.nextTick(function () {
|
||||||
|
self._realFinish = true
|
||||||
|
self.emit('finish')
|
||||||
|
self._realFinish = false
|
||||||
|
})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
self._realFinish = true
|
||||||
|
self.emit('finish')
|
||||||
|
self._realFinish = false
|
||||||
|
})
|
||||||
|
}
|
||||||
|
} else { WritableStream.prototype.emit.apply(this, arguments) }
|
||||||
|
}
|
||||||
|
|
||||||
|
Dicer.prototype._write = function (data, encoding, cb) {
|
||||||
|
// ignore unexpected data (e.g. extra trailer data after finished)
|
||||||
|
if (!this._hparser && !this._bparser) { return cb() }
|
||||||
|
|
||||||
|
if (this._headerFirst && this._isPreamble) {
|
||||||
|
if (!this._part) {
|
||||||
|
this._part = new PartStream(this._partOpts)
|
||||||
|
if (this._events.preamble) { this.emit('preamble', this._part) } else { this._ignore() }
|
||||||
|
}
|
||||||
|
const r = this._hparser.push(data)
|
||||||
|
if (!this._inHeader && r !== undefined && r < data.length) { data = data.slice(r) } else { return cb() }
|
||||||
|
}
|
||||||
|
|
||||||
|
// allows for "easier" testing
|
||||||
|
if (this._firstWrite) {
|
||||||
|
this._bparser.push(B_CRLF)
|
||||||
|
this._firstWrite = false
|
||||||
|
}
|
||||||
|
|
||||||
|
this._bparser.push(data)
|
||||||
|
|
||||||
|
if (this._pause) { this._cb = cb } else { cb() }
|
||||||
|
}
|
||||||
|
|
||||||
|
Dicer.prototype.reset = function () {
|
||||||
|
this._part = undefined
|
||||||
|
this._bparser = undefined
|
||||||
|
this._hparser = undefined
|
||||||
|
}
|
||||||
|
|
||||||
|
Dicer.prototype.setBoundary = function (boundary) {
|
||||||
|
const self = this
|
||||||
|
this._bparser = new StreamSearch('\r\n--' + boundary)
|
||||||
|
this._bparser.on('info', function (isMatch, data, start, end) {
|
||||||
|
self._oninfo(isMatch, data, start, end)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
Dicer.prototype._ignore = function () {
|
||||||
|
if (this._part && !this._ignoreData) {
|
||||||
|
this._ignoreData = true
|
||||||
|
this._part.on('error', EMPTY_FN)
|
||||||
|
// we must perform some kind of read on the stream even though we are
|
||||||
|
// ignoring the data, otherwise node's Readable stream will not emit 'end'
|
||||||
|
// after pushing null to the stream
|
||||||
|
this._part.resume()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Dicer.prototype._oninfo = function (isMatch, data, start, end) {
|
||||||
|
let buf; const self = this; let i = 0; let r; let shouldWriteMore = true
|
||||||
|
|
||||||
|
if (!this._part && this._justMatched && data) {
|
||||||
|
while (this._dashes < 2 && (start + i) < end) {
|
||||||
|
if (data[start + i] === DASH) {
|
||||||
|
++i
|
||||||
|
++this._dashes
|
||||||
|
} else {
|
||||||
|
if (this._dashes) { buf = B_ONEDASH }
|
||||||
|
this._dashes = 0
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (this._dashes === 2) {
|
||||||
|
if ((start + i) < end && this._events.trailer) { this.emit('trailer', data.slice(start + i, end)) }
|
||||||
|
this.reset()
|
||||||
|
this._finished = true
|
||||||
|
// no more parts will be added
|
||||||
|
if (self._parts === 0) {
|
||||||
|
self._realFinish = true
|
||||||
|
self.emit('finish')
|
||||||
|
self._realFinish = false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (this._dashes) { return }
|
||||||
|
}
|
||||||
|
if (this._justMatched) { this._justMatched = false }
|
||||||
|
if (!this._part) {
|
||||||
|
this._part = new PartStream(this._partOpts)
|
||||||
|
this._part._read = function (n) {
|
||||||
|
self._unpause()
|
||||||
|
}
|
||||||
|
if (this._isPreamble && this._events.preamble) { this.emit('preamble', this._part) } else if (this._isPreamble !== true && this._events.part) { this.emit('part', this._part) } else { this._ignore() }
|
||||||
|
if (!this._isPreamble) { this._inHeader = true }
|
||||||
|
}
|
||||||
|
if (data && start < end && !this._ignoreData) {
|
||||||
|
if (this._isPreamble || !this._inHeader) {
|
||||||
|
if (buf) { shouldWriteMore = this._part.push(buf) }
|
||||||
|
shouldWriteMore = this._part.push(data.slice(start, end))
|
||||||
|
if (!shouldWriteMore) { this._pause = true }
|
||||||
|
} else if (!this._isPreamble && this._inHeader) {
|
||||||
|
if (buf) { this._hparser.push(buf) }
|
||||||
|
r = this._hparser.push(data.slice(start, end))
|
||||||
|
if (!this._inHeader && r !== undefined && r < end) { this._oninfo(false, data, start + r, end) }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (isMatch) {
|
||||||
|
this._hparser.reset()
|
||||||
|
if (this._isPreamble) { this._isPreamble = false } else {
|
||||||
|
if (start !== end) {
|
||||||
|
++this._parts
|
||||||
|
this._part.on('end', function () {
|
||||||
|
if (--self._parts === 0) {
|
||||||
|
if (self._finished) {
|
||||||
|
self._realFinish = true
|
||||||
|
self.emit('finish')
|
||||||
|
self._realFinish = false
|
||||||
|
} else {
|
||||||
|
self._unpause()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
this._part.push(null)
|
||||||
|
this._part = undefined
|
||||||
|
this._ignoreData = false
|
||||||
|
this._justMatched = true
|
||||||
|
this._dashes = 0
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Dicer.prototype._unpause = function () {
|
||||||
|
if (!this._pause) { return }
|
||||||
|
|
||||||
|
this._pause = false
|
||||||
|
if (this._cb) {
|
||||||
|
const cb = this._cb
|
||||||
|
this._cb = undefined
|
||||||
|
cb()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = Dicer
|
100
node_modules/@fastify/busboy/deps/dicer/lib/HeaderParser.js
generated
vendored
Normal file
100
node_modules/@fastify/busboy/deps/dicer/lib/HeaderParser.js
generated
vendored
Normal file
|
@ -0,0 +1,100 @@
|
||||||
|
'use strict'
|
||||||
|
|
||||||
|
const EventEmitter = require('node:events').EventEmitter
|
||||||
|
const inherits = require('node:util').inherits
|
||||||
|
const getLimit = require('../../../lib/utils/getLimit')
|
||||||
|
|
||||||
|
const StreamSearch = require('../../streamsearch/sbmh')
|
||||||
|
|
||||||
|
const B_DCRLF = Buffer.from('\r\n\r\n')
|
||||||
|
const RE_CRLF = /\r\n/g
|
||||||
|
const RE_HDR = /^([^:]+):[ \t]?([\x00-\xFF]+)?$/ // eslint-disable-line no-control-regex
|
||||||
|
|
||||||
|
function HeaderParser (cfg) {
|
||||||
|
EventEmitter.call(this)
|
||||||
|
|
||||||
|
cfg = cfg || {}
|
||||||
|
const self = this
|
||||||
|
this.nread = 0
|
||||||
|
this.maxed = false
|
||||||
|
this.npairs = 0
|
||||||
|
this.maxHeaderPairs = getLimit(cfg, 'maxHeaderPairs', 2000)
|
||||||
|
this.maxHeaderSize = getLimit(cfg, 'maxHeaderSize', 80 * 1024)
|
||||||
|
this.buffer = ''
|
||||||
|
this.header = {}
|
||||||
|
this.finished = false
|
||||||
|
this.ss = new StreamSearch(B_DCRLF)
|
||||||
|
this.ss.on('info', function (isMatch, data, start, end) {
|
||||||
|
if (data && !self.maxed) {
|
||||||
|
if (self.nread + end - start >= self.maxHeaderSize) {
|
||||||
|
end = self.maxHeaderSize - self.nread + start
|
||||||
|
self.nread = self.maxHeaderSize
|
||||||
|
self.maxed = true
|
||||||
|
} else { self.nread += (end - start) }
|
||||||
|
|
||||||
|
self.buffer += data.toString('binary', start, end)
|
||||||
|
}
|
||||||
|
if (isMatch) { self._finish() }
|
||||||
|
})
|
||||||
|
}
|
||||||
|
inherits(HeaderParser, EventEmitter)
|
||||||
|
|
||||||
|
HeaderParser.prototype.push = function (data) {
|
||||||
|
const r = this.ss.push(data)
|
||||||
|
if (this.finished) { return r }
|
||||||
|
}
|
||||||
|
|
||||||
|
HeaderParser.prototype.reset = function () {
|
||||||
|
this.finished = false
|
||||||
|
this.buffer = ''
|
||||||
|
this.header = {}
|
||||||
|
this.ss.reset()
|
||||||
|
}
|
||||||
|
|
||||||
|
HeaderParser.prototype._finish = function () {
|
||||||
|
if (this.buffer) { this._parseHeader() }
|
||||||
|
this.ss.matches = this.ss.maxMatches
|
||||||
|
const header = this.header
|
||||||
|
this.header = {}
|
||||||
|
this.buffer = ''
|
||||||
|
this.finished = true
|
||||||
|
this.nread = this.npairs = 0
|
||||||
|
this.maxed = false
|
||||||
|
this.emit('header', header)
|
||||||
|
}
|
||||||
|
|
||||||
|
HeaderParser.prototype._parseHeader = function () {
|
||||||
|
if (this.npairs === this.maxHeaderPairs) { return }
|
||||||
|
|
||||||
|
const lines = this.buffer.split(RE_CRLF)
|
||||||
|
const len = lines.length
|
||||||
|
let m, h
|
||||||
|
|
||||||
|
for (var i = 0; i < len; ++i) { // eslint-disable-line no-var
|
||||||
|
if (lines[i].length === 0) { continue }
|
||||||
|
if (lines[i][0] === '\t' || lines[i][0] === ' ') {
|
||||||
|
// folded header content
|
||||||
|
// RFC2822 says to just remove the CRLF and not the whitespace following
|
||||||
|
// it, so we follow the RFC and include the leading whitespace ...
|
||||||
|
if (h) {
|
||||||
|
this.header[h][this.header[h].length - 1] += lines[i]
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const posColon = lines[i].indexOf(':')
|
||||||
|
if (
|
||||||
|
posColon === -1 ||
|
||||||
|
posColon === 0
|
||||||
|
) {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
m = RE_HDR.exec(lines[i])
|
||||||
|
h = m[1].toLowerCase()
|
||||||
|
this.header[h] = this.header[h] || []
|
||||||
|
this.header[h].push((m[2] || ''))
|
||||||
|
if (++this.npairs === this.maxHeaderPairs) { break }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = HeaderParser
|
13
node_modules/@fastify/busboy/deps/dicer/lib/PartStream.js
generated
vendored
Normal file
13
node_modules/@fastify/busboy/deps/dicer/lib/PartStream.js
generated
vendored
Normal file
|
@ -0,0 +1,13 @@
|
||||||
|
'use strict'
|
||||||
|
|
||||||
|
const inherits = require('node:util').inherits
|
||||||
|
const ReadableStream = require('node:stream').Readable
|
||||||
|
|
||||||
|
function PartStream (opts) {
|
||||||
|
ReadableStream.call(this, opts)
|
||||||
|
}
|
||||||
|
inherits(PartStream, ReadableStream)
|
||||||
|
|
||||||
|
PartStream.prototype._read = function (n) {}
|
||||||
|
|
||||||
|
module.exports = PartStream
|
164
node_modules/@fastify/busboy/deps/dicer/lib/dicer.d.ts
generated
vendored
Normal file
164
node_modules/@fastify/busboy/deps/dicer/lib/dicer.d.ts
generated
vendored
Normal file
|
@ -0,0 +1,164 @@
|
||||||
|
// Type definitions for dicer 0.2
|
||||||
|
// Project: https://github.com/mscdex/dicer
|
||||||
|
// Definitions by: BendingBender <https://github.com/BendingBender>
|
||||||
|
// Definitions: https://github.com/DefinitelyTyped/DefinitelyTyped
|
||||||
|
// TypeScript Version: 2.2
|
||||||
|
/// <reference types="node" />
|
||||||
|
|
||||||
|
import stream = require("stream");
|
||||||
|
|
||||||
|
// tslint:disable:unified-signatures
|
||||||
|
|
||||||
|
/**
|
||||||
|
* A very fast streaming multipart parser for node.js.
|
||||||
|
* Dicer is a WritableStream
|
||||||
|
*
|
||||||
|
* Dicer (special) events:
|
||||||
|
* - on('finish', ()) - Emitted when all parts have been parsed and the Dicer instance has been ended.
|
||||||
|
* - on('part', (stream: PartStream)) - Emitted when a new part has been found.
|
||||||
|
* - on('preamble', (stream: PartStream)) - Emitted for preamble if you should happen to need it (can usually be ignored).
|
||||||
|
* - on('trailer', (data: Buffer)) - Emitted when trailing data was found after the terminating boundary (as with the preamble, this can usually be ignored too).
|
||||||
|
*/
|
||||||
|
export class Dicer extends stream.Writable {
|
||||||
|
/**
|
||||||
|
* Creates and returns a new Dicer instance with the following valid config settings:
|
||||||
|
*
|
||||||
|
* @param config The configuration to use
|
||||||
|
*/
|
||||||
|
constructor(config: Dicer.Config);
|
||||||
|
/**
|
||||||
|
* Sets the boundary to use for parsing and performs some initialization needed for parsing.
|
||||||
|
* You should only need to use this if you set headerFirst to true in the constructor and are parsing the boundary from the preamble header.
|
||||||
|
*
|
||||||
|
* @param boundary The boundary to use
|
||||||
|
*/
|
||||||
|
setBoundary(boundary: string): void;
|
||||||
|
addListener(event: "finish", listener: () => void): this;
|
||||||
|
addListener(event: "part", listener: (stream: Dicer.PartStream) => void): this;
|
||||||
|
addListener(event: "preamble", listener: (stream: Dicer.PartStream) => void): this;
|
||||||
|
addListener(event: "trailer", listener: (data: Buffer) => void): this;
|
||||||
|
addListener(event: "close", listener: () => void): this;
|
||||||
|
addListener(event: "drain", listener: () => void): this;
|
||||||
|
addListener(event: "error", listener: (err: Error) => void): this;
|
||||||
|
addListener(event: "pipe", listener: (src: stream.Readable) => void): this;
|
||||||
|
addListener(event: "unpipe", listener: (src: stream.Readable) => void): this;
|
||||||
|
addListener(event: string, listener: (...args: any[]) => void): this;
|
||||||
|
on(event: "finish", listener: () => void): this;
|
||||||
|
on(event: "part", listener: (stream: Dicer.PartStream) => void): this;
|
||||||
|
on(event: "preamble", listener: (stream: Dicer.PartStream) => void): this;
|
||||||
|
on(event: "trailer", listener: (data: Buffer) => void): this;
|
||||||
|
on(event: "close", listener: () => void): this;
|
||||||
|
on(event: "drain", listener: () => void): this;
|
||||||
|
on(event: "error", listener: (err: Error) => void): this;
|
||||||
|
on(event: "pipe", listener: (src: stream.Readable) => void): this;
|
||||||
|
on(event: "unpipe", listener: (src: stream.Readable) => void): this;
|
||||||
|
on(event: string, listener: (...args: any[]) => void): this;
|
||||||
|
once(event: "finish", listener: () => void): this;
|
||||||
|
once(event: "part", listener: (stream: Dicer.PartStream) => void): this;
|
||||||
|
once(event: "preamble", listener: (stream: Dicer.PartStream) => void): this;
|
||||||
|
once(event: "trailer", listener: (data: Buffer) => void): this;
|
||||||
|
once(event: "close", listener: () => void): this;
|
||||||
|
once(event: "drain", listener: () => void): this;
|
||||||
|
once(event: "error", listener: (err: Error) => void): this;
|
||||||
|
once(event: "pipe", listener: (src: stream.Readable) => void): this;
|
||||||
|
once(event: "unpipe", listener: (src: stream.Readable) => void): this;
|
||||||
|
once(event: string, listener: (...args: any[]) => void): this;
|
||||||
|
prependListener(event: "finish", listener: () => void): this;
|
||||||
|
prependListener(event: "part", listener: (stream: Dicer.PartStream) => void): this;
|
||||||
|
prependListener(event: "preamble", listener: (stream: Dicer.PartStream) => void): this;
|
||||||
|
prependListener(event: "trailer", listener: (data: Buffer) => void): this;
|
||||||
|
prependListener(event: "close", listener: () => void): this;
|
||||||
|
prependListener(event: "drain", listener: () => void): this;
|
||||||
|
prependListener(event: "error", listener: (err: Error) => void): this;
|
||||||
|
prependListener(event: "pipe", listener: (src: stream.Readable) => void): this;
|
||||||
|
prependListener(event: "unpipe", listener: (src: stream.Readable) => void): this;
|
||||||
|
prependListener(event: string, listener: (...args: any[]) => void): this;
|
||||||
|
prependOnceListener(event: "finish", listener: () => void): this;
|
||||||
|
prependOnceListener(event: "part", listener: (stream: Dicer.PartStream) => void): this;
|
||||||
|
prependOnceListener(event: "preamble", listener: (stream: Dicer.PartStream) => void): this;
|
||||||
|
prependOnceListener(event: "trailer", listener: (data: Buffer) => void): this;
|
||||||
|
prependOnceListener(event: "close", listener: () => void): this;
|
||||||
|
prependOnceListener(event: "drain", listener: () => void): this;
|
||||||
|
prependOnceListener(event: "error", listener: (err: Error) => void): this;
|
||||||
|
prependOnceListener(event: "pipe", listener: (src: stream.Readable) => void): this;
|
||||||
|
prependOnceListener(event: "unpipe", listener: (src: stream.Readable) => void): this;
|
||||||
|
prependOnceListener(event: string, listener: (...args: any[]) => void): this;
|
||||||
|
removeListener(event: "finish", listener: () => void): this;
|
||||||
|
removeListener(event: "part", listener: (stream: Dicer.PartStream) => void): this;
|
||||||
|
removeListener(event: "preamble", listener: (stream: Dicer.PartStream) => void): this;
|
||||||
|
removeListener(event: "trailer", listener: (data: Buffer) => void): this;
|
||||||
|
removeListener(event: "close", listener: () => void): this;
|
||||||
|
removeListener(event: "drain", listener: () => void): this;
|
||||||
|
removeListener(event: "error", listener: (err: Error) => void): this;
|
||||||
|
removeListener(event: "pipe", listener: (src: stream.Readable) => void): this;
|
||||||
|
removeListener(event: "unpipe", listener: (src: stream.Readable) => void): this;
|
||||||
|
removeListener(event: string, listener: (...args: any[]) => void): this;
|
||||||
|
}
|
||||||
|
|
||||||
|
declare namespace Dicer {
|
||||||
|
interface Config {
|
||||||
|
/**
|
||||||
|
* This is the boundary used to detect the beginning of a new part.
|
||||||
|
*/
|
||||||
|
boundary?: string | undefined;
|
||||||
|
/**
|
||||||
|
* If true, preamble header parsing will be performed first.
|
||||||
|
*/
|
||||||
|
headerFirst?: boolean | undefined;
|
||||||
|
/**
|
||||||
|
* The maximum number of header key=>value pairs to parse Default: 2000 (same as node's http).
|
||||||
|
*/
|
||||||
|
maxHeaderPairs?: number | undefined;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* PartStream is a _ReadableStream_
|
||||||
|
*
|
||||||
|
* PartStream (special) events:
|
||||||
|
* - on('header', (header: object)) - An object containing the header for this particular part. Each property value is an array of one or more string values.
|
||||||
|
*/
|
||||||
|
interface PartStream extends stream.Readable {
|
||||||
|
addListener(event: "header", listener: (header: object) => void): this;
|
||||||
|
addListener(event: "close", listener: () => void): this;
|
||||||
|
addListener(event: "data", listener: (chunk: Buffer | string) => void): this;
|
||||||
|
addListener(event: "end", listener: () => void): this;
|
||||||
|
addListener(event: "readable", listener: () => void): this;
|
||||||
|
addListener(event: "error", listener: (err: Error) => void): this;
|
||||||
|
addListener(event: string, listener: (...args: any[]) => void): this;
|
||||||
|
on(event: "header", listener: (header: object) => void): this;
|
||||||
|
on(event: "close", listener: () => void): this;
|
||||||
|
on(event: "data", listener: (chunk: Buffer | string) => void): this;
|
||||||
|
on(event: "end", listener: () => void): this;
|
||||||
|
on(event: "readable", listener: () => void): this;
|
||||||
|
on(event: "error", listener: (err: Error) => void): this;
|
||||||
|
on(event: string, listener: (...args: any[]) => void): this;
|
||||||
|
once(event: "header", listener: (header: object) => void): this;
|
||||||
|
once(event: "close", listener: () => void): this;
|
||||||
|
once(event: "data", listener: (chunk: Buffer | string) => void): this;
|
||||||
|
once(event: "end", listener: () => void): this;
|
||||||
|
once(event: "readable", listener: () => void): this;
|
||||||
|
once(event: "error", listener: (err: Error) => void): this;
|
||||||
|
once(event: string, listener: (...args: any[]) => void): this;
|
||||||
|
prependListener(event: "header", listener: (header: object) => void): this;
|
||||||
|
prependListener(event: "close", listener: () => void): this;
|
||||||
|
prependListener(event: "data", listener: (chunk: Buffer | string) => void): this;
|
||||||
|
prependListener(event: "end", listener: () => void): this;
|
||||||
|
prependListener(event: "readable", listener: () => void): this;
|
||||||
|
prependListener(event: "error", listener: (err: Error) => void): this;
|
||||||
|
prependListener(event: string, listener: (...args: any[]) => void): this;
|
||||||
|
prependOnceListener(event: "header", listener: (header: object) => void): this;
|
||||||
|
prependOnceListener(event: "close", listener: () => void): this;
|
||||||
|
prependOnceListener(event: "data", listener: (chunk: Buffer | string) => void): this;
|
||||||
|
prependOnceListener(event: "end", listener: () => void): this;
|
||||||
|
prependOnceListener(event: "readable", listener: () => void): this;
|
||||||
|
prependOnceListener(event: "error", listener: (err: Error) => void): this;
|
||||||
|
prependOnceListener(event: string, listener: (...args: any[]) => void): this;
|
||||||
|
removeListener(event: "header", listener: (header: object) => void): this;
|
||||||
|
removeListener(event: "close", listener: () => void): this;
|
||||||
|
removeListener(event: "data", listener: (chunk: Buffer | string) => void): this;
|
||||||
|
removeListener(event: "end", listener: () => void): this;
|
||||||
|
removeListener(event: "readable", listener: () => void): this;
|
||||||
|
removeListener(event: "error", listener: (err: Error) => void): this;
|
||||||
|
removeListener(event: string, listener: (...args: any[]) => void): this;
|
||||||
|
}
|
||||||
|
}
|
228
node_modules/@fastify/busboy/deps/streamsearch/sbmh.js
generated
vendored
Normal file
228
node_modules/@fastify/busboy/deps/streamsearch/sbmh.js
generated
vendored
Normal file
|
@ -0,0 +1,228 @@
|
||||||
|
'use strict'
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Copyright Brian White. All rights reserved.
|
||||||
|
*
|
||||||
|
* @see https://github.com/mscdex/streamsearch
|
||||||
|
*
|
||||||
|
* Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
|
* of this software and associated documentation files (the "Software"), to
|
||||||
|
* deal in the Software without restriction, including without limitation the
|
||||||
|
* rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
|
||||||
|
* sell copies of the Software, and to permit persons to whom the Software is
|
||||||
|
* furnished to do so, subject to the following conditions:
|
||||||
|
*
|
||||||
|
* The above copyright notice and this permission notice shall be included in
|
||||||
|
* all copies or substantial portions of the Software.
|
||||||
|
*
|
||||||
|
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||||
|
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||||
|
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||||
|
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||||
|
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
|
||||||
|
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
|
||||||
|
* IN THE SOFTWARE.
|
||||||
|
*
|
||||||
|
* Based heavily on the Streaming Boyer-Moore-Horspool C++ implementation
|
||||||
|
* by Hongli Lai at: https://github.com/FooBarWidget/boyer-moore-horspool
|
||||||
|
*/
|
||||||
|
const EventEmitter = require('node:events').EventEmitter
|
||||||
|
const inherits = require('node:util').inherits
|
||||||
|
|
||||||
|
function SBMH (needle) {
|
||||||
|
if (typeof needle === 'string') {
|
||||||
|
needle = Buffer.from(needle)
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!Buffer.isBuffer(needle)) {
|
||||||
|
throw new TypeError('The needle has to be a String or a Buffer.')
|
||||||
|
}
|
||||||
|
|
||||||
|
const needleLength = needle.length
|
||||||
|
|
||||||
|
if (needleLength === 0) {
|
||||||
|
throw new Error('The needle cannot be an empty String/Buffer.')
|
||||||
|
}
|
||||||
|
|
||||||
|
if (needleLength > 256) {
|
||||||
|
throw new Error('The needle cannot have a length bigger than 256.')
|
||||||
|
}
|
||||||
|
|
||||||
|
this.maxMatches = Infinity
|
||||||
|
this.matches = 0
|
||||||
|
|
||||||
|
this._occ = new Array(256)
|
||||||
|
.fill(needleLength) // Initialize occurrence table.
|
||||||
|
this._lookbehind_size = 0
|
||||||
|
this._needle = needle
|
||||||
|
this._bufpos = 0
|
||||||
|
|
||||||
|
this._lookbehind = Buffer.alloc(needleLength)
|
||||||
|
|
||||||
|
// Populate occurrence table with analysis of the needle,
|
||||||
|
// ignoring last letter.
|
||||||
|
for (var i = 0; i < needleLength - 1; ++i) { // eslint-disable-line no-var
|
||||||
|
this._occ[needle[i]] = needleLength - 1 - i
|
||||||
|
}
|
||||||
|
}
|
||||||
|
inherits(SBMH, EventEmitter)
|
||||||
|
|
||||||
|
SBMH.prototype.reset = function () {
|
||||||
|
this._lookbehind_size = 0
|
||||||
|
this.matches = 0
|
||||||
|
this._bufpos = 0
|
||||||
|
}
|
||||||
|
|
||||||
|
SBMH.prototype.push = function (chunk, pos) {
|
||||||
|
if (!Buffer.isBuffer(chunk)) {
|
||||||
|
chunk = Buffer.from(chunk, 'binary')
|
||||||
|
}
|
||||||
|
const chlen = chunk.length
|
||||||
|
this._bufpos = pos || 0
|
||||||
|
let r
|
||||||
|
while (r !== chlen && this.matches < this.maxMatches) { r = this._sbmh_feed(chunk) }
|
||||||
|
return r
|
||||||
|
}
|
||||||
|
|
||||||
|
SBMH.prototype._sbmh_feed = function (data) {
|
||||||
|
const len = data.length
|
||||||
|
const needle = this._needle
|
||||||
|
const needleLength = needle.length
|
||||||
|
const lastNeedleChar = needle[needleLength - 1]
|
||||||
|
|
||||||
|
// Positive: points to a position in `data`
|
||||||
|
// pos == 3 points to data[3]
|
||||||
|
// Negative: points to a position in the lookbehind buffer
|
||||||
|
// pos == -2 points to lookbehind[lookbehind_size - 2]
|
||||||
|
let pos = -this._lookbehind_size
|
||||||
|
let ch
|
||||||
|
|
||||||
|
if (pos < 0) {
|
||||||
|
// Lookbehind buffer is not empty. Perform Boyer-Moore-Horspool
|
||||||
|
// search with character lookup code that considers both the
|
||||||
|
// lookbehind buffer and the current round's haystack data.
|
||||||
|
//
|
||||||
|
// Loop until
|
||||||
|
// there is a match.
|
||||||
|
// or until
|
||||||
|
// we've moved past the position that requires the
|
||||||
|
// lookbehind buffer. In this case we switch to the
|
||||||
|
// optimized loop.
|
||||||
|
// or until
|
||||||
|
// the character to look at lies outside the haystack.
|
||||||
|
while (pos < 0 && pos <= len - needleLength) {
|
||||||
|
ch = this._sbmh_lookup_char(data, pos + needleLength - 1)
|
||||||
|
|
||||||
|
if (
|
||||||
|
ch === lastNeedleChar &&
|
||||||
|
this._sbmh_memcmp(data, pos, needleLength - 1)
|
||||||
|
) {
|
||||||
|
this._lookbehind_size = 0
|
||||||
|
++this.matches
|
||||||
|
this.emit('info', true)
|
||||||
|
|
||||||
|
return (this._bufpos = pos + needleLength)
|
||||||
|
}
|
||||||
|
pos += this._occ[ch]
|
||||||
|
}
|
||||||
|
|
||||||
|
// No match.
|
||||||
|
|
||||||
|
if (pos < 0) {
|
||||||
|
// There's too few data for Boyer-Moore-Horspool to run,
|
||||||
|
// so let's use a different algorithm to skip as much as
|
||||||
|
// we can.
|
||||||
|
// Forward pos until
|
||||||
|
// the trailing part of lookbehind + data
|
||||||
|
// looks like the beginning of the needle
|
||||||
|
// or until
|
||||||
|
// pos == 0
|
||||||
|
while (pos < 0 && !this._sbmh_memcmp(data, pos, len - pos)) { ++pos }
|
||||||
|
}
|
||||||
|
|
||||||
|
if (pos >= 0) {
|
||||||
|
// Discard lookbehind buffer.
|
||||||
|
this.emit('info', false, this._lookbehind, 0, this._lookbehind_size)
|
||||||
|
this._lookbehind_size = 0
|
||||||
|
} else {
|
||||||
|
// Cut off part of the lookbehind buffer that has
|
||||||
|
// been processed and append the entire haystack
|
||||||
|
// into it.
|
||||||
|
const bytesToCutOff = this._lookbehind_size + pos
|
||||||
|
if (bytesToCutOff > 0) {
|
||||||
|
// The cut off data is guaranteed not to contain the needle.
|
||||||
|
this.emit('info', false, this._lookbehind, 0, bytesToCutOff)
|
||||||
|
}
|
||||||
|
|
||||||
|
this._lookbehind.copy(this._lookbehind, 0, bytesToCutOff,
|
||||||
|
this._lookbehind_size - bytesToCutOff)
|
||||||
|
this._lookbehind_size -= bytesToCutOff
|
||||||
|
|
||||||
|
data.copy(this._lookbehind, this._lookbehind_size)
|
||||||
|
this._lookbehind_size += len
|
||||||
|
|
||||||
|
this._bufpos = len
|
||||||
|
return len
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pos += (pos >= 0) * this._bufpos
|
||||||
|
|
||||||
|
// Lookbehind buffer is now empty. We only need to check if the
|
||||||
|
// needle is in the haystack.
|
||||||
|
if (data.indexOf(needle, pos) !== -1) {
|
||||||
|
pos = data.indexOf(needle, pos)
|
||||||
|
++this.matches
|
||||||
|
if (pos > 0) { this.emit('info', true, data, this._bufpos, pos) } else { this.emit('info', true) }
|
||||||
|
|
||||||
|
return (this._bufpos = pos + needleLength)
|
||||||
|
} else {
|
||||||
|
pos = len - needleLength
|
||||||
|
}
|
||||||
|
|
||||||
|
// There was no match. If there's trailing haystack data that we cannot
|
||||||
|
// match yet using the Boyer-Moore-Horspool algorithm (because the trailing
|
||||||
|
// data is less than the needle size) then match using a modified
|
||||||
|
// algorithm that starts matching from the beginning instead of the end.
|
||||||
|
// Whatever trailing data is left after running this algorithm is added to
|
||||||
|
// the lookbehind buffer.
|
||||||
|
while (
|
||||||
|
pos < len &&
|
||||||
|
(
|
||||||
|
data[pos] !== needle[0] ||
|
||||||
|
(
|
||||||
|
(Buffer.compare(
|
||||||
|
data.subarray(pos, pos + len - pos),
|
||||||
|
needle.subarray(0, len - pos)
|
||||||
|
) !== 0)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
) {
|
||||||
|
++pos
|
||||||
|
}
|
||||||
|
if (pos < len) {
|
||||||
|
data.copy(this._lookbehind, 0, pos, pos + (len - pos))
|
||||||
|
this._lookbehind_size = len - pos
|
||||||
|
}
|
||||||
|
|
||||||
|
// Everything until pos is guaranteed not to contain needle data.
|
||||||
|
if (pos > 0) { this.emit('info', false, data, this._bufpos, pos < len ? pos : len) }
|
||||||
|
|
||||||
|
this._bufpos = len
|
||||||
|
return len
|
||||||
|
}
|
||||||
|
|
||||||
|
SBMH.prototype._sbmh_lookup_char = function (data, pos) {
|
||||||
|
return (pos < 0)
|
||||||
|
? this._lookbehind[this._lookbehind_size + pos]
|
||||||
|
: data[pos]
|
||||||
|
}
|
||||||
|
|
||||||
|
SBMH.prototype._sbmh_memcmp = function (data, pos, len) {
|
||||||
|
for (var i = 0; i < len; ++i) { // eslint-disable-line no-var
|
||||||
|
if (this._sbmh_lookup_char(data, pos + i) !== this._needle[i]) { return false }
|
||||||
|
}
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = SBMH
|
196
node_modules/@fastify/busboy/lib/main.d.ts
generated
vendored
Normal file
196
node_modules/@fastify/busboy/lib/main.d.ts
generated
vendored
Normal file
|
@ -0,0 +1,196 @@
|
||||||
|
// Definitions by: Jacob Baskin <https://github.com/jacobbaskin>
|
||||||
|
// BendingBender <https://github.com/BendingBender>
|
||||||
|
// Igor Savin <https://github.com/kibertoad>
|
||||||
|
|
||||||
|
/// <reference types="node" />
|
||||||
|
|
||||||
|
import * as http from 'http';
|
||||||
|
import { Readable, Writable } from 'stream';
|
||||||
|
export { Dicer } from "../deps/dicer/lib/dicer";
|
||||||
|
|
||||||
|
export const Busboy: BusboyConstructor;
|
||||||
|
export default Busboy;
|
||||||
|
|
||||||
|
export interface BusboyConfig {
|
||||||
|
/**
|
||||||
|
* These are the HTTP headers of the incoming request, which are used by individual parsers.
|
||||||
|
*/
|
||||||
|
headers: BusboyHeaders;
|
||||||
|
/**
|
||||||
|
* `highWaterMark` to use for this Busboy instance.
|
||||||
|
* @default WritableStream default.
|
||||||
|
*/
|
||||||
|
highWaterMark?: number | undefined;
|
||||||
|
/**
|
||||||
|
* highWaterMark to use for file streams.
|
||||||
|
* @default ReadableStream default.
|
||||||
|
*/
|
||||||
|
fileHwm?: number | undefined;
|
||||||
|
/**
|
||||||
|
* Default character set to use when one isn't defined.
|
||||||
|
* @default 'utf8'
|
||||||
|
*/
|
||||||
|
defCharset?: string | undefined;
|
||||||
|
/**
|
||||||
|
* Detect if a Part is a file.
|
||||||
|
*
|
||||||
|
* By default a file is detected if contentType
|
||||||
|
* is application/octet-stream or fileName is not
|
||||||
|
* undefined.
|
||||||
|
*
|
||||||
|
* Modify this to handle e.g. Blobs.
|
||||||
|
*/
|
||||||
|
isPartAFile?: (fieldName: string | undefined, contentType: string | undefined, fileName: string | undefined) => boolean;
|
||||||
|
/**
|
||||||
|
* If paths in the multipart 'filename' field shall be preserved.
|
||||||
|
* @default false
|
||||||
|
*/
|
||||||
|
preservePath?: boolean | undefined;
|
||||||
|
/**
|
||||||
|
* Various limits on incoming data.
|
||||||
|
*/
|
||||||
|
limits?:
|
||||||
|
| {
|
||||||
|
/**
|
||||||
|
* Max field name size (in bytes)
|
||||||
|
* @default 100 bytes
|
||||||
|
*/
|
||||||
|
fieldNameSize?: number | undefined;
|
||||||
|
/**
|
||||||
|
* Max field value size (in bytes)
|
||||||
|
* @default 1MB
|
||||||
|
*/
|
||||||
|
fieldSize?: number | undefined;
|
||||||
|
/**
|
||||||
|
* Max number of non-file fields
|
||||||
|
* @default Infinity
|
||||||
|
*/
|
||||||
|
fields?: number | undefined;
|
||||||
|
/**
|
||||||
|
* For multipart forms, the max file size (in bytes)
|
||||||
|
* @default Infinity
|
||||||
|
*/
|
||||||
|
fileSize?: number | undefined;
|
||||||
|
/**
|
||||||
|
* For multipart forms, the max number of file fields
|
||||||
|
* @default Infinity
|
||||||
|
*/
|
||||||
|
files?: number | undefined;
|
||||||
|
/**
|
||||||
|
* For multipart forms, the max number of parts (fields + files)
|
||||||
|
* @default Infinity
|
||||||
|
*/
|
||||||
|
parts?: number | undefined;
|
||||||
|
/**
|
||||||
|
* For multipart forms, the max number of header key=>value pairs to parse
|
||||||
|
* @default 2000
|
||||||
|
*/
|
||||||
|
headerPairs?: number | undefined;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* For multipart forms, the max size of a header part
|
||||||
|
* @default 81920
|
||||||
|
*/
|
||||||
|
headerSize?: number | undefined;
|
||||||
|
}
|
||||||
|
| undefined;
|
||||||
|
}
|
||||||
|
|
||||||
|
export type BusboyHeaders = { 'content-type': string } & http.IncomingHttpHeaders;
|
||||||
|
|
||||||
|
export interface BusboyFileStream extends
|
||||||
|
Readable {
|
||||||
|
|
||||||
|
truncated: boolean;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* The number of bytes that have been read so far.
|
||||||
|
*/
|
||||||
|
bytesRead: number;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface Busboy extends Writable {
|
||||||
|
addListener<Event extends keyof BusboyEvents>(event: Event, listener: BusboyEvents[Event]): this;
|
||||||
|
|
||||||
|
addListener(event: string | symbol, listener: (...args: any[]) => void): this;
|
||||||
|
|
||||||
|
on<Event extends keyof BusboyEvents>(event: Event, listener: BusboyEvents[Event]): this;
|
||||||
|
|
||||||
|
on(event: string | symbol, listener: (...args: any[]) => void): this;
|
||||||
|
|
||||||
|
once<Event extends keyof BusboyEvents>(event: Event, listener: BusboyEvents[Event]): this;
|
||||||
|
|
||||||
|
once(event: string | symbol, listener: (...args: any[]) => void): this;
|
||||||
|
|
||||||
|
removeListener<Event extends keyof BusboyEvents>(event: Event, listener: BusboyEvents[Event]): this;
|
||||||
|
|
||||||
|
removeListener(event: string | symbol, listener: (...args: any[]) => void): this;
|
||||||
|
|
||||||
|
off<Event extends keyof BusboyEvents>(event: Event, listener: BusboyEvents[Event]): this;
|
||||||
|
|
||||||
|
off(event: string | symbol, listener: (...args: any[]) => void): this;
|
||||||
|
|
||||||
|
prependListener<Event extends keyof BusboyEvents>(event: Event, listener: BusboyEvents[Event]): this;
|
||||||
|
|
||||||
|
prependListener(event: string | symbol, listener: (...args: any[]) => void): this;
|
||||||
|
|
||||||
|
prependOnceListener<Event extends keyof BusboyEvents>(event: Event, listener: BusboyEvents[Event]): this;
|
||||||
|
|
||||||
|
prependOnceListener(event: string | symbol, listener: (...args: any[]) => void): this;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface BusboyEvents {
|
||||||
|
/**
|
||||||
|
* Emitted for each new file form field found.
|
||||||
|
*
|
||||||
|
* * Note: if you listen for this event, you should always handle the `stream` no matter if you care about the
|
||||||
|
* file contents or not (e.g. you can simply just do `stream.resume();` if you want to discard the contents),
|
||||||
|
* otherwise the 'finish' event will never fire on the Busboy instance. However, if you don't care about **any**
|
||||||
|
* incoming files, you can simply not listen for the 'file' event at all and any/all files will be automatically
|
||||||
|
* and safely discarded (these discarded files do still count towards `files` and `parts` limits).
|
||||||
|
* * If a configured file size limit was reached, `stream` will both have a boolean property `truncated`
|
||||||
|
* (best checked at the end of the stream) and emit a 'limit' event to notify you when this happens.
|
||||||
|
*
|
||||||
|
* @param listener.transferEncoding Contains the 'Content-Transfer-Encoding' value for the file stream.
|
||||||
|
* @param listener.mimeType Contains the 'Content-Type' value for the file stream.
|
||||||
|
*/
|
||||||
|
file: (
|
||||||
|
fieldname: string,
|
||||||
|
stream: BusboyFileStream,
|
||||||
|
filename: string,
|
||||||
|
transferEncoding: string,
|
||||||
|
mimeType: string,
|
||||||
|
) => void;
|
||||||
|
/**
|
||||||
|
* Emitted for each new non-file field found.
|
||||||
|
*/
|
||||||
|
field: (
|
||||||
|
fieldname: string,
|
||||||
|
value: string,
|
||||||
|
fieldnameTruncated: boolean,
|
||||||
|
valueTruncated: boolean,
|
||||||
|
transferEncoding: string,
|
||||||
|
mimeType: string,
|
||||||
|
) => void;
|
||||||
|
finish: () => void;
|
||||||
|
/**
|
||||||
|
* Emitted when specified `parts` limit has been reached. No more 'file' or 'field' events will be emitted.
|
||||||
|
*/
|
||||||
|
partsLimit: () => void;
|
||||||
|
/**
|
||||||
|
* Emitted when specified `files` limit has been reached. No more 'file' events will be emitted.
|
||||||
|
*/
|
||||||
|
filesLimit: () => void;
|
||||||
|
/**
|
||||||
|
* Emitted when specified `fields` limit has been reached. No more 'field' events will be emitted.
|
||||||
|
*/
|
||||||
|
fieldsLimit: () => void;
|
||||||
|
error: (error: unknown) => void;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface BusboyConstructor {
|
||||||
|
(options: BusboyConfig): Busboy;
|
||||||
|
|
||||||
|
new(options: BusboyConfig): Busboy;
|
||||||
|
}
|
||||||
|
|
85
node_modules/@fastify/busboy/lib/main.js
generated
vendored
Normal file
85
node_modules/@fastify/busboy/lib/main.js
generated
vendored
Normal file
|
@ -0,0 +1,85 @@
|
||||||
|
'use strict'
|
||||||
|
|
||||||
|
const WritableStream = require('node:stream').Writable
|
||||||
|
const { inherits } = require('node:util')
|
||||||
|
const Dicer = require('../deps/dicer/lib/Dicer')
|
||||||
|
|
||||||
|
const MultipartParser = require('./types/multipart')
|
||||||
|
const UrlencodedParser = require('./types/urlencoded')
|
||||||
|
const parseParams = require('./utils/parseParams')
|
||||||
|
|
||||||
|
function Busboy (opts) {
|
||||||
|
if (!(this instanceof Busboy)) { return new Busboy(opts) }
|
||||||
|
|
||||||
|
if (typeof opts !== 'object') {
|
||||||
|
throw new TypeError('Busboy expected an options-Object.')
|
||||||
|
}
|
||||||
|
if (typeof opts.headers !== 'object') {
|
||||||
|
throw new TypeError('Busboy expected an options-Object with headers-attribute.')
|
||||||
|
}
|
||||||
|
if (typeof opts.headers['content-type'] !== 'string') {
|
||||||
|
throw new TypeError('Missing Content-Type-header.')
|
||||||
|
}
|
||||||
|
|
||||||
|
const {
|
||||||
|
headers,
|
||||||
|
...streamOptions
|
||||||
|
} = opts
|
||||||
|
|
||||||
|
this.opts = {
|
||||||
|
autoDestroy: false,
|
||||||
|
...streamOptions
|
||||||
|
}
|
||||||
|
WritableStream.call(this, this.opts)
|
||||||
|
|
||||||
|
this._done = false
|
||||||
|
this._parser = this.getParserByHeaders(headers)
|
||||||
|
this._finished = false
|
||||||
|
}
|
||||||
|
inherits(Busboy, WritableStream)
|
||||||
|
|
||||||
|
Busboy.prototype.emit = function (ev) {
|
||||||
|
if (ev === 'finish') {
|
||||||
|
if (!this._done) {
|
||||||
|
this._parser?.end()
|
||||||
|
return
|
||||||
|
} else if (this._finished) {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
this._finished = true
|
||||||
|
}
|
||||||
|
WritableStream.prototype.emit.apply(this, arguments)
|
||||||
|
}
|
||||||
|
|
||||||
|
Busboy.prototype.getParserByHeaders = function (headers) {
|
||||||
|
const parsed = parseParams(headers['content-type'])
|
||||||
|
|
||||||
|
const cfg = {
|
||||||
|
defCharset: this.opts.defCharset,
|
||||||
|
fileHwm: this.opts.fileHwm,
|
||||||
|
headers,
|
||||||
|
highWaterMark: this.opts.highWaterMark,
|
||||||
|
isPartAFile: this.opts.isPartAFile,
|
||||||
|
limits: this.opts.limits,
|
||||||
|
parsedConType: parsed,
|
||||||
|
preservePath: this.opts.preservePath
|
||||||
|
}
|
||||||
|
|
||||||
|
if (MultipartParser.detect.test(parsed[0])) {
|
||||||
|
return new MultipartParser(this, cfg)
|
||||||
|
}
|
||||||
|
if (UrlencodedParser.detect.test(parsed[0])) {
|
||||||
|
return new UrlencodedParser(this, cfg)
|
||||||
|
}
|
||||||
|
throw new Error('Unsupported Content-Type.')
|
||||||
|
}
|
||||||
|
|
||||||
|
Busboy.prototype._write = function (chunk, encoding, cb) {
|
||||||
|
this._parser.write(chunk, cb)
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = Busboy
|
||||||
|
module.exports.default = Busboy
|
||||||
|
module.exports.Busboy = Busboy
|
||||||
|
|
||||||
|
module.exports.Dicer = Dicer
|
306
node_modules/@fastify/busboy/lib/types/multipart.js
generated
vendored
Normal file
306
node_modules/@fastify/busboy/lib/types/multipart.js
generated
vendored
Normal file
|
@ -0,0 +1,306 @@
|
||||||
|
'use strict'
|
||||||
|
|
||||||
|
// TODO:
|
||||||
|
// * support 1 nested multipart level
|
||||||
|
// (see second multipart example here:
|
||||||
|
// http://www.w3.org/TR/html401/interact/forms.html#didx-multipartform-data)
|
||||||
|
// * support limits.fieldNameSize
|
||||||
|
// -- this will require modifications to utils.parseParams
|
||||||
|
|
||||||
|
const { Readable } = require('node:stream')
|
||||||
|
const { inherits } = require('node:util')
|
||||||
|
|
||||||
|
const Dicer = require('../../deps/dicer/lib/Dicer')
|
||||||
|
|
||||||
|
const parseParams = require('../utils/parseParams')
|
||||||
|
const decodeText = require('../utils/decodeText')
|
||||||
|
const basename = require('../utils/basename')
|
||||||
|
const getLimit = require('../utils/getLimit')
|
||||||
|
|
||||||
|
const RE_BOUNDARY = /^boundary$/i
|
||||||
|
const RE_FIELD = /^form-data$/i
|
||||||
|
const RE_CHARSET = /^charset$/i
|
||||||
|
const RE_FILENAME = /^filename$/i
|
||||||
|
const RE_NAME = /^name$/i
|
||||||
|
|
||||||
|
Multipart.detect = /^multipart\/form-data/i
|
||||||
|
function Multipart (boy, cfg) {
|
||||||
|
let i
|
||||||
|
let len
|
||||||
|
const self = this
|
||||||
|
let boundary
|
||||||
|
const limits = cfg.limits
|
||||||
|
const isPartAFile = cfg.isPartAFile || ((fieldName, contentType, fileName) => (contentType === 'application/octet-stream' || fileName !== undefined))
|
||||||
|
const parsedConType = cfg.parsedConType || []
|
||||||
|
const defCharset = cfg.defCharset || 'utf8'
|
||||||
|
const preservePath = cfg.preservePath
|
||||||
|
const fileOpts = { highWaterMark: cfg.fileHwm }
|
||||||
|
|
||||||
|
for (i = 0, len = parsedConType.length; i < len; ++i) {
|
||||||
|
if (Array.isArray(parsedConType[i]) &&
|
||||||
|
RE_BOUNDARY.test(parsedConType[i][0])) {
|
||||||
|
boundary = parsedConType[i][1]
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function checkFinished () {
|
||||||
|
if (nends === 0 && finished && !boy._done) {
|
||||||
|
finished = false
|
||||||
|
self.end()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (typeof boundary !== 'string') { throw new Error('Multipart: Boundary not found') }
|
||||||
|
|
||||||
|
const fieldSizeLimit = getLimit(limits, 'fieldSize', 1 * 1024 * 1024)
|
||||||
|
const fileSizeLimit = getLimit(limits, 'fileSize', Infinity)
|
||||||
|
const filesLimit = getLimit(limits, 'files', Infinity)
|
||||||
|
const fieldsLimit = getLimit(limits, 'fields', Infinity)
|
||||||
|
const partsLimit = getLimit(limits, 'parts', Infinity)
|
||||||
|
const headerPairsLimit = getLimit(limits, 'headerPairs', 2000)
|
||||||
|
const headerSizeLimit = getLimit(limits, 'headerSize', 80 * 1024)
|
||||||
|
|
||||||
|
let nfiles = 0
|
||||||
|
let nfields = 0
|
||||||
|
let nends = 0
|
||||||
|
let curFile
|
||||||
|
let curField
|
||||||
|
let finished = false
|
||||||
|
|
||||||
|
this._needDrain = false
|
||||||
|
this._pause = false
|
||||||
|
this._cb = undefined
|
||||||
|
this._nparts = 0
|
||||||
|
this._boy = boy
|
||||||
|
|
||||||
|
const parserCfg = {
|
||||||
|
boundary,
|
||||||
|
maxHeaderPairs: headerPairsLimit,
|
||||||
|
maxHeaderSize: headerSizeLimit,
|
||||||
|
partHwm: fileOpts.highWaterMark,
|
||||||
|
highWaterMark: cfg.highWaterMark
|
||||||
|
}
|
||||||
|
|
||||||
|
this.parser = new Dicer(parserCfg)
|
||||||
|
this.parser.on('drain', function () {
|
||||||
|
self._needDrain = false
|
||||||
|
if (self._cb && !self._pause) {
|
||||||
|
const cb = self._cb
|
||||||
|
self._cb = undefined
|
||||||
|
cb()
|
||||||
|
}
|
||||||
|
}).on('part', function onPart (part) {
|
||||||
|
if (++self._nparts > partsLimit) {
|
||||||
|
self.parser.removeListener('part', onPart)
|
||||||
|
self.parser.on('part', skipPart)
|
||||||
|
boy.hitPartsLimit = true
|
||||||
|
boy.emit('partsLimit')
|
||||||
|
return skipPart(part)
|
||||||
|
}
|
||||||
|
|
||||||
|
// hack because streams2 _always_ doesn't emit 'end' until nextTick, so let
|
||||||
|
// us emit 'end' early since we know the part has ended if we are already
|
||||||
|
// seeing the next part
|
||||||
|
if (curField) {
|
||||||
|
const field = curField
|
||||||
|
field.emit('end')
|
||||||
|
field.removeAllListeners('end')
|
||||||
|
}
|
||||||
|
|
||||||
|
part.on('header', function (header) {
|
||||||
|
let contype
|
||||||
|
let fieldname
|
||||||
|
let parsed
|
||||||
|
let charset
|
||||||
|
let encoding
|
||||||
|
let filename
|
||||||
|
let nsize = 0
|
||||||
|
|
||||||
|
if (header['content-type']) {
|
||||||
|
parsed = parseParams(header['content-type'][0])
|
||||||
|
if (parsed[0]) {
|
||||||
|
contype = parsed[0].toLowerCase()
|
||||||
|
for (i = 0, len = parsed.length; i < len; ++i) {
|
||||||
|
if (RE_CHARSET.test(parsed[i][0])) {
|
||||||
|
charset = parsed[i][1].toLowerCase()
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (contype === undefined) { contype = 'text/plain' }
|
||||||
|
if (charset === undefined) { charset = defCharset }
|
||||||
|
|
||||||
|
if (header['content-disposition']) {
|
||||||
|
parsed = parseParams(header['content-disposition'][0])
|
||||||
|
if (!RE_FIELD.test(parsed[0])) { return skipPart(part) }
|
||||||
|
for (i = 0, len = parsed.length; i < len; ++i) {
|
||||||
|
if (RE_NAME.test(parsed[i][0])) {
|
||||||
|
fieldname = parsed[i][1]
|
||||||
|
} else if (RE_FILENAME.test(parsed[i][0])) {
|
||||||
|
filename = parsed[i][1]
|
||||||
|
if (!preservePath) { filename = basename(filename) }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else { return skipPart(part) }
|
||||||
|
|
||||||
|
if (header['content-transfer-encoding']) { encoding = header['content-transfer-encoding'][0].toLowerCase() } else { encoding = '7bit' }
|
||||||
|
|
||||||
|
let onData,
|
||||||
|
onEnd
|
||||||
|
|
||||||
|
if (isPartAFile(fieldname, contype, filename)) {
|
||||||
|
// file/binary field
|
||||||
|
if (nfiles === filesLimit) {
|
||||||
|
if (!boy.hitFilesLimit) {
|
||||||
|
boy.hitFilesLimit = true
|
||||||
|
boy.emit('filesLimit')
|
||||||
|
}
|
||||||
|
return skipPart(part)
|
||||||
|
}
|
||||||
|
|
||||||
|
++nfiles
|
||||||
|
|
||||||
|
if (!boy._events.file) {
|
||||||
|
self.parser._ignore()
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
++nends
|
||||||
|
const file = new FileStream(fileOpts)
|
||||||
|
curFile = file
|
||||||
|
file.on('end', function () {
|
||||||
|
--nends
|
||||||
|
self._pause = false
|
||||||
|
checkFinished()
|
||||||
|
if (self._cb && !self._needDrain) {
|
||||||
|
const cb = self._cb
|
||||||
|
self._cb = undefined
|
||||||
|
cb()
|
||||||
|
}
|
||||||
|
})
|
||||||
|
file._read = function (n) {
|
||||||
|
if (!self._pause) { return }
|
||||||
|
self._pause = false
|
||||||
|
if (self._cb && !self._needDrain) {
|
||||||
|
const cb = self._cb
|
||||||
|
self._cb = undefined
|
||||||
|
cb()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
boy.emit('file', fieldname, file, filename, encoding, contype)
|
||||||
|
|
||||||
|
onData = function (data) {
|
||||||
|
if ((nsize += data.length) > fileSizeLimit) {
|
||||||
|
const extralen = fileSizeLimit - nsize + data.length
|
||||||
|
if (extralen > 0) { file.push(data.slice(0, extralen)) }
|
||||||
|
file.truncated = true
|
||||||
|
file.bytesRead = fileSizeLimit
|
||||||
|
part.removeAllListeners('data')
|
||||||
|
file.emit('limit')
|
||||||
|
return
|
||||||
|
} else if (!file.push(data)) { self._pause = true }
|
||||||
|
|
||||||
|
file.bytesRead = nsize
|
||||||
|
}
|
||||||
|
|
||||||
|
onEnd = function () {
|
||||||
|
curFile = undefined
|
||||||
|
file.push(null)
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
// non-file field
|
||||||
|
if (nfields === fieldsLimit) {
|
||||||
|
if (!boy.hitFieldsLimit) {
|
||||||
|
boy.hitFieldsLimit = true
|
||||||
|
boy.emit('fieldsLimit')
|
||||||
|
}
|
||||||
|
return skipPart(part)
|
||||||
|
}
|
||||||
|
|
||||||
|
++nfields
|
||||||
|
++nends
|
||||||
|
let buffer = ''
|
||||||
|
let truncated = false
|
||||||
|
curField = part
|
||||||
|
|
||||||
|
onData = function (data) {
|
||||||
|
if ((nsize += data.length) > fieldSizeLimit) {
|
||||||
|
const extralen = (fieldSizeLimit - (nsize - data.length))
|
||||||
|
buffer += data.toString('binary', 0, extralen)
|
||||||
|
truncated = true
|
||||||
|
part.removeAllListeners('data')
|
||||||
|
} else { buffer += data.toString('binary') }
|
||||||
|
}
|
||||||
|
|
||||||
|
onEnd = function () {
|
||||||
|
curField = undefined
|
||||||
|
if (buffer.length) { buffer = decodeText(buffer, 'binary', charset) }
|
||||||
|
boy.emit('field', fieldname, buffer, false, truncated, encoding, contype)
|
||||||
|
--nends
|
||||||
|
checkFinished()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/* As of node@2efe4ab761666 (v0.10.29+/v0.11.14+), busboy had become
|
||||||
|
broken. Streams2/streams3 is a huge black box of confusion, but
|
||||||
|
somehow overriding the sync state seems to fix things again (and still
|
||||||
|
seems to work for previous node versions).
|
||||||
|
*/
|
||||||
|
part._readableState.sync = false
|
||||||
|
|
||||||
|
part.on('data', onData)
|
||||||
|
part.on('end', onEnd)
|
||||||
|
}).on('error', function (err) {
|
||||||
|
if (curFile) { curFile.emit('error', err) }
|
||||||
|
})
|
||||||
|
}).on('error', function (err) {
|
||||||
|
boy.emit('error', err)
|
||||||
|
}).on('finish', function () {
|
||||||
|
finished = true
|
||||||
|
checkFinished()
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
Multipart.prototype.write = function (chunk, cb) {
|
||||||
|
const r = this.parser.write(chunk)
|
||||||
|
if (r && !this._pause) {
|
||||||
|
cb()
|
||||||
|
} else {
|
||||||
|
this._needDrain = !r
|
||||||
|
this._cb = cb
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Multipart.prototype.end = function () {
|
||||||
|
const self = this
|
||||||
|
|
||||||
|
if (self.parser.writable) {
|
||||||
|
self.parser.end()
|
||||||
|
} else if (!self._boy._done) {
|
||||||
|
process.nextTick(function () {
|
||||||
|
self._boy._done = true
|
||||||
|
self._boy.emit('finish')
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function skipPart (part) {
|
||||||
|
part.resume()
|
||||||
|
}
|
||||||
|
|
||||||
|
function FileStream (opts) {
|
||||||
|
Readable.call(this, opts)
|
||||||
|
|
||||||
|
this.bytesRead = 0
|
||||||
|
|
||||||
|
this.truncated = false
|
||||||
|
}
|
||||||
|
|
||||||
|
inherits(FileStream, Readable)
|
||||||
|
|
||||||
|
FileStream.prototype._read = function (n) {}
|
||||||
|
|
||||||
|
module.exports = Multipart
|
190
node_modules/@fastify/busboy/lib/types/urlencoded.js
generated
vendored
Normal file
190
node_modules/@fastify/busboy/lib/types/urlencoded.js
generated
vendored
Normal file
|
@ -0,0 +1,190 @@
|
||||||
|
'use strict'
|
||||||
|
|
||||||
|
const Decoder = require('../utils/Decoder')
|
||||||
|
const decodeText = require('../utils/decodeText')
|
||||||
|
const getLimit = require('../utils/getLimit')
|
||||||
|
|
||||||
|
const RE_CHARSET = /^charset$/i
|
||||||
|
|
||||||
|
UrlEncoded.detect = /^application\/x-www-form-urlencoded/i
|
||||||
|
function UrlEncoded (boy, cfg) {
|
||||||
|
const limits = cfg.limits
|
||||||
|
const parsedConType = cfg.parsedConType
|
||||||
|
this.boy = boy
|
||||||
|
|
||||||
|
this.fieldSizeLimit = getLimit(limits, 'fieldSize', 1 * 1024 * 1024)
|
||||||
|
this.fieldNameSizeLimit = getLimit(limits, 'fieldNameSize', 100)
|
||||||
|
this.fieldsLimit = getLimit(limits, 'fields', Infinity)
|
||||||
|
|
||||||
|
let charset
|
||||||
|
for (var i = 0, len = parsedConType.length; i < len; ++i) { // eslint-disable-line no-var
|
||||||
|
if (Array.isArray(parsedConType[i]) &&
|
||||||
|
RE_CHARSET.test(parsedConType[i][0])) {
|
||||||
|
charset = parsedConType[i][1].toLowerCase()
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (charset === undefined) { charset = cfg.defCharset || 'utf8' }
|
||||||
|
|
||||||
|
this.decoder = new Decoder()
|
||||||
|
this.charset = charset
|
||||||
|
this._fields = 0
|
||||||
|
this._state = 'key'
|
||||||
|
this._checkingBytes = true
|
||||||
|
this._bytesKey = 0
|
||||||
|
this._bytesVal = 0
|
||||||
|
this._key = ''
|
||||||
|
this._val = ''
|
||||||
|
this._keyTrunc = false
|
||||||
|
this._valTrunc = false
|
||||||
|
this._hitLimit = false
|
||||||
|
}
|
||||||
|
|
||||||
|
UrlEncoded.prototype.write = function (data, cb) {
|
||||||
|
if (this._fields === this.fieldsLimit) {
|
||||||
|
if (!this.boy.hitFieldsLimit) {
|
||||||
|
this.boy.hitFieldsLimit = true
|
||||||
|
this.boy.emit('fieldsLimit')
|
||||||
|
}
|
||||||
|
return cb()
|
||||||
|
}
|
||||||
|
|
||||||
|
let idxeq; let idxamp; let i; let p = 0; const len = data.length
|
||||||
|
|
||||||
|
while (p < len) {
|
||||||
|
if (this._state === 'key') {
|
||||||
|
idxeq = idxamp = undefined
|
||||||
|
for (i = p; i < len; ++i) {
|
||||||
|
if (!this._checkingBytes) { ++p }
|
||||||
|
if (data[i] === 0x3D/* = */) {
|
||||||
|
idxeq = i
|
||||||
|
break
|
||||||
|
} else if (data[i] === 0x26/* & */) {
|
||||||
|
idxamp = i
|
||||||
|
break
|
||||||
|
}
|
||||||
|
if (this._checkingBytes && this._bytesKey === this.fieldNameSizeLimit) {
|
||||||
|
this._hitLimit = true
|
||||||
|
break
|
||||||
|
} else if (this._checkingBytes) { ++this._bytesKey }
|
||||||
|
}
|
||||||
|
|
||||||
|
if (idxeq !== undefined) {
|
||||||
|
// key with assignment
|
||||||
|
if (idxeq > p) { this._key += this.decoder.write(data.toString('binary', p, idxeq)) }
|
||||||
|
this._state = 'val'
|
||||||
|
|
||||||
|
this._hitLimit = false
|
||||||
|
this._checkingBytes = true
|
||||||
|
this._val = ''
|
||||||
|
this._bytesVal = 0
|
||||||
|
this._valTrunc = false
|
||||||
|
this.decoder.reset()
|
||||||
|
|
||||||
|
p = idxeq + 1
|
||||||
|
} else if (idxamp !== undefined) {
|
||||||
|
// key with no assignment
|
||||||
|
++this._fields
|
||||||
|
let key; const keyTrunc = this._keyTrunc
|
||||||
|
if (idxamp > p) { key = (this._key += this.decoder.write(data.toString('binary', p, idxamp))) } else { key = this._key }
|
||||||
|
|
||||||
|
this._hitLimit = false
|
||||||
|
this._checkingBytes = true
|
||||||
|
this._key = ''
|
||||||
|
this._bytesKey = 0
|
||||||
|
this._keyTrunc = false
|
||||||
|
this.decoder.reset()
|
||||||
|
|
||||||
|
if (key.length) {
|
||||||
|
this.boy.emit('field', decodeText(key, 'binary', this.charset),
|
||||||
|
'',
|
||||||
|
keyTrunc,
|
||||||
|
false)
|
||||||
|
}
|
||||||
|
|
||||||
|
p = idxamp + 1
|
||||||
|
if (this._fields === this.fieldsLimit) { return cb() }
|
||||||
|
} else if (this._hitLimit) {
|
||||||
|
// we may not have hit the actual limit if there are encoded bytes...
|
||||||
|
if (i > p) { this._key += this.decoder.write(data.toString('binary', p, i)) }
|
||||||
|
p = i
|
||||||
|
if ((this._bytesKey = this._key.length) === this.fieldNameSizeLimit) {
|
||||||
|
// yep, we actually did hit the limit
|
||||||
|
this._checkingBytes = false
|
||||||
|
this._keyTrunc = true
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
if (p < len) { this._key += this.decoder.write(data.toString('binary', p)) }
|
||||||
|
p = len
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
idxamp = undefined
|
||||||
|
for (i = p; i < len; ++i) {
|
||||||
|
if (!this._checkingBytes) { ++p }
|
||||||
|
if (data[i] === 0x26/* & */) {
|
||||||
|
idxamp = i
|
||||||
|
break
|
||||||
|
}
|
||||||
|
if (this._checkingBytes && this._bytesVal === this.fieldSizeLimit) {
|
||||||
|
this._hitLimit = true
|
||||||
|
break
|
||||||
|
} else if (this._checkingBytes) { ++this._bytesVal }
|
||||||
|
}
|
||||||
|
|
||||||
|
if (idxamp !== undefined) {
|
||||||
|
++this._fields
|
||||||
|
if (idxamp > p) { this._val += this.decoder.write(data.toString('binary', p, idxamp)) }
|
||||||
|
this.boy.emit('field', decodeText(this._key, 'binary', this.charset),
|
||||||
|
decodeText(this._val, 'binary', this.charset),
|
||||||
|
this._keyTrunc,
|
||||||
|
this._valTrunc)
|
||||||
|
this._state = 'key'
|
||||||
|
|
||||||
|
this._hitLimit = false
|
||||||
|
this._checkingBytes = true
|
||||||
|
this._key = ''
|
||||||
|
this._bytesKey = 0
|
||||||
|
this._keyTrunc = false
|
||||||
|
this.decoder.reset()
|
||||||
|
|
||||||
|
p = idxamp + 1
|
||||||
|
if (this._fields === this.fieldsLimit) { return cb() }
|
||||||
|
} else if (this._hitLimit) {
|
||||||
|
// we may not have hit the actual limit if there are encoded bytes...
|
||||||
|
if (i > p) { this._val += this.decoder.write(data.toString('binary', p, i)) }
|
||||||
|
p = i
|
||||||
|
if ((this._val === '' && this.fieldSizeLimit === 0) ||
|
||||||
|
(this._bytesVal = this._val.length) === this.fieldSizeLimit) {
|
||||||
|
// yep, we actually did hit the limit
|
||||||
|
this._checkingBytes = false
|
||||||
|
this._valTrunc = true
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
if (p < len) { this._val += this.decoder.write(data.toString('binary', p)) }
|
||||||
|
p = len
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
cb()
|
||||||
|
}
|
||||||
|
|
||||||
|
UrlEncoded.prototype.end = function () {
|
||||||
|
if (this.boy._done) { return }
|
||||||
|
|
||||||
|
if (this._state === 'key' && this._key.length > 0) {
|
||||||
|
this.boy.emit('field', decodeText(this._key, 'binary', this.charset),
|
||||||
|
'',
|
||||||
|
this._keyTrunc,
|
||||||
|
false)
|
||||||
|
} else if (this._state === 'val') {
|
||||||
|
this.boy.emit('field', decodeText(this._key, 'binary', this.charset),
|
||||||
|
decodeText(this._val, 'binary', this.charset),
|
||||||
|
this._keyTrunc,
|
||||||
|
this._valTrunc)
|
||||||
|
}
|
||||||
|
this.boy._done = true
|
||||||
|
this.boy.emit('finish')
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = UrlEncoded
|
54
node_modules/@fastify/busboy/lib/utils/Decoder.js
generated
vendored
Normal file
54
node_modules/@fastify/busboy/lib/utils/Decoder.js
generated
vendored
Normal file
|
@ -0,0 +1,54 @@
|
||||||
|
'use strict'
|
||||||
|
|
||||||
|
const RE_PLUS = /\+/g
|
||||||
|
|
||||||
|
const HEX = [
|
||||||
|
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
|
||||||
|
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
|
||||||
|
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
|
||||||
|
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0,
|
||||||
|
0, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0,
|
||||||
|
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
|
||||||
|
0, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0,
|
||||||
|
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0
|
||||||
|
]
|
||||||
|
|
||||||
|
function Decoder () {
|
||||||
|
this.buffer = undefined
|
||||||
|
}
|
||||||
|
Decoder.prototype.write = function (str) {
|
||||||
|
// Replace '+' with ' ' before decoding
|
||||||
|
str = str.replace(RE_PLUS, ' ')
|
||||||
|
let res = ''
|
||||||
|
let i = 0; let p = 0; const len = str.length
|
||||||
|
for (; i < len; ++i) {
|
||||||
|
if (this.buffer !== undefined) {
|
||||||
|
if (!HEX[str.charCodeAt(i)]) {
|
||||||
|
res += '%' + this.buffer
|
||||||
|
this.buffer = undefined
|
||||||
|
--i // retry character
|
||||||
|
} else {
|
||||||
|
this.buffer += str[i]
|
||||||
|
++p
|
||||||
|
if (this.buffer.length === 2) {
|
||||||
|
res += String.fromCharCode(parseInt(this.buffer, 16))
|
||||||
|
this.buffer = undefined
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else if (str[i] === '%') {
|
||||||
|
if (i > p) {
|
||||||
|
res += str.substring(p, i)
|
||||||
|
p = i
|
||||||
|
}
|
||||||
|
this.buffer = ''
|
||||||
|
++p
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (p < len && this.buffer === undefined) { res += str.substring(p) }
|
||||||
|
return res
|
||||||
|
}
|
||||||
|
Decoder.prototype.reset = function () {
|
||||||
|
this.buffer = undefined
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = Decoder
|
14
node_modules/@fastify/busboy/lib/utils/basename.js
generated
vendored
Normal file
14
node_modules/@fastify/busboy/lib/utils/basename.js
generated
vendored
Normal file
|
@ -0,0 +1,14 @@
|
||||||
|
'use strict'
|
||||||
|
|
||||||
|
module.exports = function basename (path) {
|
||||||
|
if (typeof path !== 'string') { return '' }
|
||||||
|
for (var i = path.length - 1; i >= 0; --i) { // eslint-disable-line no-var
|
||||||
|
switch (path.charCodeAt(i)) {
|
||||||
|
case 0x2F: // '/'
|
||||||
|
case 0x5C: // '\'
|
||||||
|
path = path.slice(i + 1)
|
||||||
|
return (path === '..' || path === '.' ? '' : path)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return (path === '..' || path === '.' ? '' : path)
|
||||||
|
}
|
114
node_modules/@fastify/busboy/lib/utils/decodeText.js
generated
vendored
Normal file
114
node_modules/@fastify/busboy/lib/utils/decodeText.js
generated
vendored
Normal file
|
@ -0,0 +1,114 @@
|
||||||
|
'use strict'
|
||||||
|
|
||||||
|
// Node has always utf-8
|
||||||
|
const utf8Decoder = new TextDecoder('utf-8')
|
||||||
|
const textDecoders = new Map([
|
||||||
|
['utf-8', utf8Decoder],
|
||||||
|
['utf8', utf8Decoder]
|
||||||
|
])
|
||||||
|
|
||||||
|
function getDecoder (charset) {
|
||||||
|
let lc
|
||||||
|
while (true) {
|
||||||
|
switch (charset) {
|
||||||
|
case 'utf-8':
|
||||||
|
case 'utf8':
|
||||||
|
return decoders.utf8
|
||||||
|
case 'latin1':
|
||||||
|
case 'ascii': // TODO: Make these a separate, strict decoder?
|
||||||
|
case 'us-ascii':
|
||||||
|
case 'iso-8859-1':
|
||||||
|
case 'iso8859-1':
|
||||||
|
case 'iso88591':
|
||||||
|
case 'iso_8859-1':
|
||||||
|
case 'windows-1252':
|
||||||
|
case 'iso_8859-1:1987':
|
||||||
|
case 'cp1252':
|
||||||
|
case 'x-cp1252':
|
||||||
|
return decoders.latin1
|
||||||
|
case 'utf16le':
|
||||||
|
case 'utf-16le':
|
||||||
|
case 'ucs2':
|
||||||
|
case 'ucs-2':
|
||||||
|
return decoders.utf16le
|
||||||
|
case 'base64':
|
||||||
|
return decoders.base64
|
||||||
|
default:
|
||||||
|
if (lc === undefined) {
|
||||||
|
lc = true
|
||||||
|
charset = charset.toLowerCase()
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
return decoders.other.bind(charset)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const decoders = {
|
||||||
|
utf8: (data, sourceEncoding) => {
|
||||||
|
if (data.length === 0) {
|
||||||
|
return ''
|
||||||
|
}
|
||||||
|
if (typeof data === 'string') {
|
||||||
|
data = Buffer.from(data, sourceEncoding)
|
||||||
|
}
|
||||||
|
return data.utf8Slice(0, data.length)
|
||||||
|
},
|
||||||
|
|
||||||
|
latin1: (data, sourceEncoding) => {
|
||||||
|
if (data.length === 0) {
|
||||||
|
return ''
|
||||||
|
}
|
||||||
|
if (typeof data === 'string') {
|
||||||
|
return data
|
||||||
|
}
|
||||||
|
return data.latin1Slice(0, data.length)
|
||||||
|
},
|
||||||
|
|
||||||
|
utf16le: (data, sourceEncoding) => {
|
||||||
|
if (data.length === 0) {
|
||||||
|
return ''
|
||||||
|
}
|
||||||
|
if (typeof data === 'string') {
|
||||||
|
data = Buffer.from(data, sourceEncoding)
|
||||||
|
}
|
||||||
|
return data.ucs2Slice(0, data.length)
|
||||||
|
},
|
||||||
|
|
||||||
|
base64: (data, sourceEncoding) => {
|
||||||
|
if (data.length === 0) {
|
||||||
|
return ''
|
||||||
|
}
|
||||||
|
if (typeof data === 'string') {
|
||||||
|
data = Buffer.from(data, sourceEncoding)
|
||||||
|
}
|
||||||
|
return data.base64Slice(0, data.length)
|
||||||
|
},
|
||||||
|
|
||||||
|
other: (data, sourceEncoding) => {
|
||||||
|
if (data.length === 0) {
|
||||||
|
return ''
|
||||||
|
}
|
||||||
|
if (typeof data === 'string') {
|
||||||
|
data = Buffer.from(data, sourceEncoding)
|
||||||
|
}
|
||||||
|
|
||||||
|
if (textDecoders.has(this.toString())) {
|
||||||
|
try {
|
||||||
|
return textDecoders.get(this).decode(data)
|
||||||
|
} catch (e) { }
|
||||||
|
}
|
||||||
|
return typeof data === 'string'
|
||||||
|
? data
|
||||||
|
: data.toString()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function decodeText (text, sourceEncoding, destEncoding) {
|
||||||
|
if (text) {
|
||||||
|
return getDecoder(destEncoding)(text, sourceEncoding)
|
||||||
|
}
|
||||||
|
return text
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = decodeText
|
16
node_modules/@fastify/busboy/lib/utils/getLimit.js
generated
vendored
Normal file
16
node_modules/@fastify/busboy/lib/utils/getLimit.js
generated
vendored
Normal file
|
@ -0,0 +1,16 @@
|
||||||
|
'use strict'
|
||||||
|
|
||||||
|
module.exports = function getLimit (limits, name, defaultLimit) {
|
||||||
|
if (
|
||||||
|
!limits ||
|
||||||
|
limits[name] === undefined ||
|
||||||
|
limits[name] === null
|
||||||
|
) { return defaultLimit }
|
||||||
|
|
||||||
|
if (
|
||||||
|
typeof limits[name] !== 'number' ||
|
||||||
|
isNaN(limits[name])
|
||||||
|
) { throw new TypeError('Limit ' + name + ' is not a valid number') }
|
||||||
|
|
||||||
|
return limits[name]
|
||||||
|
}
|
196
node_modules/@fastify/busboy/lib/utils/parseParams.js
generated
vendored
Normal file
196
node_modules/@fastify/busboy/lib/utils/parseParams.js
generated
vendored
Normal file
|
@ -0,0 +1,196 @@
|
||||||
|
/* eslint-disable object-property-newline */
|
||||||
|
'use strict'
|
||||||
|
|
||||||
|
const decodeText = require('./decodeText')
|
||||||
|
|
||||||
|
const RE_ENCODED = /%[a-fA-F0-9][a-fA-F0-9]/g
|
||||||
|
|
||||||
|
const EncodedLookup = {
|
||||||
|
'%00': '\x00', '%01': '\x01', '%02': '\x02', '%03': '\x03', '%04': '\x04',
|
||||||
|
'%05': '\x05', '%06': '\x06', '%07': '\x07', '%08': '\x08', '%09': '\x09',
|
||||||
|
'%0a': '\x0a', '%0A': '\x0a', '%0b': '\x0b', '%0B': '\x0b', '%0c': '\x0c',
|
||||||
|
'%0C': '\x0c', '%0d': '\x0d', '%0D': '\x0d', '%0e': '\x0e', '%0E': '\x0e',
|
||||||
|
'%0f': '\x0f', '%0F': '\x0f', '%10': '\x10', '%11': '\x11', '%12': '\x12',
|
||||||
|
'%13': '\x13', '%14': '\x14', '%15': '\x15', '%16': '\x16', '%17': '\x17',
|
||||||
|
'%18': '\x18', '%19': '\x19', '%1a': '\x1a', '%1A': '\x1a', '%1b': '\x1b',
|
||||||
|
'%1B': '\x1b', '%1c': '\x1c', '%1C': '\x1c', '%1d': '\x1d', '%1D': '\x1d',
|
||||||
|
'%1e': '\x1e', '%1E': '\x1e', '%1f': '\x1f', '%1F': '\x1f', '%20': '\x20',
|
||||||
|
'%21': '\x21', '%22': '\x22', '%23': '\x23', '%24': '\x24', '%25': '\x25',
|
||||||
|
'%26': '\x26', '%27': '\x27', '%28': '\x28', '%29': '\x29', '%2a': '\x2a',
|
||||||
|
'%2A': '\x2a', '%2b': '\x2b', '%2B': '\x2b', '%2c': '\x2c', '%2C': '\x2c',
|
||||||
|
'%2d': '\x2d', '%2D': '\x2d', '%2e': '\x2e', '%2E': '\x2e', '%2f': '\x2f',
|
||||||
|
'%2F': '\x2f', '%30': '\x30', '%31': '\x31', '%32': '\x32', '%33': '\x33',
|
||||||
|
'%34': '\x34', '%35': '\x35', '%36': '\x36', '%37': '\x37', '%38': '\x38',
|
||||||
|
'%39': '\x39', '%3a': '\x3a', '%3A': '\x3a', '%3b': '\x3b', '%3B': '\x3b',
|
||||||
|
'%3c': '\x3c', '%3C': '\x3c', '%3d': '\x3d', '%3D': '\x3d', '%3e': '\x3e',
|
||||||
|
'%3E': '\x3e', '%3f': '\x3f', '%3F': '\x3f', '%40': '\x40', '%41': '\x41',
|
||||||
|
'%42': '\x42', '%43': '\x43', '%44': '\x44', '%45': '\x45', '%46': '\x46',
|
||||||
|
'%47': '\x47', '%48': '\x48', '%49': '\x49', '%4a': '\x4a', '%4A': '\x4a',
|
||||||
|
'%4b': '\x4b', '%4B': '\x4b', '%4c': '\x4c', '%4C': '\x4c', '%4d': '\x4d',
|
||||||
|
'%4D': '\x4d', '%4e': '\x4e', '%4E': '\x4e', '%4f': '\x4f', '%4F': '\x4f',
|
||||||
|
'%50': '\x50', '%51': '\x51', '%52': '\x52', '%53': '\x53', '%54': '\x54',
|
||||||
|
'%55': '\x55', '%56': '\x56', '%57': '\x57', '%58': '\x58', '%59': '\x59',
|
||||||
|
'%5a': '\x5a', '%5A': '\x5a', '%5b': '\x5b', '%5B': '\x5b', '%5c': '\x5c',
|
||||||
|
'%5C': '\x5c', '%5d': '\x5d', '%5D': '\x5d', '%5e': '\x5e', '%5E': '\x5e',
|
||||||
|
'%5f': '\x5f', '%5F': '\x5f', '%60': '\x60', '%61': '\x61', '%62': '\x62',
|
||||||
|
'%63': '\x63', '%64': '\x64', '%65': '\x65', '%66': '\x66', '%67': '\x67',
|
||||||
|
'%68': '\x68', '%69': '\x69', '%6a': '\x6a', '%6A': '\x6a', '%6b': '\x6b',
|
||||||
|
'%6B': '\x6b', '%6c': '\x6c', '%6C': '\x6c', '%6d': '\x6d', '%6D': '\x6d',
|
||||||
|
'%6e': '\x6e', '%6E': '\x6e', '%6f': '\x6f', '%6F': '\x6f', '%70': '\x70',
|
||||||
|
'%71': '\x71', '%72': '\x72', '%73': '\x73', '%74': '\x74', '%75': '\x75',
|
||||||
|
'%76': '\x76', '%77': '\x77', '%78': '\x78', '%79': '\x79', '%7a': '\x7a',
|
||||||
|
'%7A': '\x7a', '%7b': '\x7b', '%7B': '\x7b', '%7c': '\x7c', '%7C': '\x7c',
|
||||||
|
'%7d': '\x7d', '%7D': '\x7d', '%7e': '\x7e', '%7E': '\x7e', '%7f': '\x7f',
|
||||||
|
'%7F': '\x7f', '%80': '\x80', '%81': '\x81', '%82': '\x82', '%83': '\x83',
|
||||||
|
'%84': '\x84', '%85': '\x85', '%86': '\x86', '%87': '\x87', '%88': '\x88',
|
||||||
|
'%89': '\x89', '%8a': '\x8a', '%8A': '\x8a', '%8b': '\x8b', '%8B': '\x8b',
|
||||||
|
'%8c': '\x8c', '%8C': '\x8c', '%8d': '\x8d', '%8D': '\x8d', '%8e': '\x8e',
|
||||||
|
'%8E': '\x8e', '%8f': '\x8f', '%8F': '\x8f', '%90': '\x90', '%91': '\x91',
|
||||||
|
'%92': '\x92', '%93': '\x93', '%94': '\x94', '%95': '\x95', '%96': '\x96',
|
||||||
|
'%97': '\x97', '%98': '\x98', '%99': '\x99', '%9a': '\x9a', '%9A': '\x9a',
|
||||||
|
'%9b': '\x9b', '%9B': '\x9b', '%9c': '\x9c', '%9C': '\x9c', '%9d': '\x9d',
|
||||||
|
'%9D': '\x9d', '%9e': '\x9e', '%9E': '\x9e', '%9f': '\x9f', '%9F': '\x9f',
|
||||||
|
'%a0': '\xa0', '%A0': '\xa0', '%a1': '\xa1', '%A1': '\xa1', '%a2': '\xa2',
|
||||||
|
'%A2': '\xa2', '%a3': '\xa3', '%A3': '\xa3', '%a4': '\xa4', '%A4': '\xa4',
|
||||||
|
'%a5': '\xa5', '%A5': '\xa5', '%a6': '\xa6', '%A6': '\xa6', '%a7': '\xa7',
|
||||||
|
'%A7': '\xa7', '%a8': '\xa8', '%A8': '\xa8', '%a9': '\xa9', '%A9': '\xa9',
|
||||||
|
'%aa': '\xaa', '%Aa': '\xaa', '%aA': '\xaa', '%AA': '\xaa', '%ab': '\xab',
|
||||||
|
'%Ab': '\xab', '%aB': '\xab', '%AB': '\xab', '%ac': '\xac', '%Ac': '\xac',
|
||||||
|
'%aC': '\xac', '%AC': '\xac', '%ad': '\xad', '%Ad': '\xad', '%aD': '\xad',
|
||||||
|
'%AD': '\xad', '%ae': '\xae', '%Ae': '\xae', '%aE': '\xae', '%AE': '\xae',
|
||||||
|
'%af': '\xaf', '%Af': '\xaf', '%aF': '\xaf', '%AF': '\xaf', '%b0': '\xb0',
|
||||||
|
'%B0': '\xb0', '%b1': '\xb1', '%B1': '\xb1', '%b2': '\xb2', '%B2': '\xb2',
|
||||||
|
'%b3': '\xb3', '%B3': '\xb3', '%b4': '\xb4', '%B4': '\xb4', '%b5': '\xb5',
|
||||||
|
'%B5': '\xb5', '%b6': '\xb6', '%B6': '\xb6', '%b7': '\xb7', '%B7': '\xb7',
|
||||||
|
'%b8': '\xb8', '%B8': '\xb8', '%b9': '\xb9', '%B9': '\xb9', '%ba': '\xba',
|
||||||
|
'%Ba': '\xba', '%bA': '\xba', '%BA': '\xba', '%bb': '\xbb', '%Bb': '\xbb',
|
||||||
|
'%bB': '\xbb', '%BB': '\xbb', '%bc': '\xbc', '%Bc': '\xbc', '%bC': '\xbc',
|
||||||
|
'%BC': '\xbc', '%bd': '\xbd', '%Bd': '\xbd', '%bD': '\xbd', '%BD': '\xbd',
|
||||||
|
'%be': '\xbe', '%Be': '\xbe', '%bE': '\xbe', '%BE': '\xbe', '%bf': '\xbf',
|
||||||
|
'%Bf': '\xbf', '%bF': '\xbf', '%BF': '\xbf', '%c0': '\xc0', '%C0': '\xc0',
|
||||||
|
'%c1': '\xc1', '%C1': '\xc1', '%c2': '\xc2', '%C2': '\xc2', '%c3': '\xc3',
|
||||||
|
'%C3': '\xc3', '%c4': '\xc4', '%C4': '\xc4', '%c5': '\xc5', '%C5': '\xc5',
|
||||||
|
'%c6': '\xc6', '%C6': '\xc6', '%c7': '\xc7', '%C7': '\xc7', '%c8': '\xc8',
|
||||||
|
'%C8': '\xc8', '%c9': '\xc9', '%C9': '\xc9', '%ca': '\xca', '%Ca': '\xca',
|
||||||
|
'%cA': '\xca', '%CA': '\xca', '%cb': '\xcb', '%Cb': '\xcb', '%cB': '\xcb',
|
||||||
|
'%CB': '\xcb', '%cc': '\xcc', '%Cc': '\xcc', '%cC': '\xcc', '%CC': '\xcc',
|
||||||
|
'%cd': '\xcd', '%Cd': '\xcd', '%cD': '\xcd', '%CD': '\xcd', '%ce': '\xce',
|
||||||
|
'%Ce': '\xce', '%cE': '\xce', '%CE': '\xce', '%cf': '\xcf', '%Cf': '\xcf',
|
||||||
|
'%cF': '\xcf', '%CF': '\xcf', '%d0': '\xd0', '%D0': '\xd0', '%d1': '\xd1',
|
||||||
|
'%D1': '\xd1', '%d2': '\xd2', '%D2': '\xd2', '%d3': '\xd3', '%D3': '\xd3',
|
||||||
|
'%d4': '\xd4', '%D4': '\xd4', '%d5': '\xd5', '%D5': '\xd5', '%d6': '\xd6',
|
||||||
|
'%D6': '\xd6', '%d7': '\xd7', '%D7': '\xd7', '%d8': '\xd8', '%D8': '\xd8',
|
||||||
|
'%d9': '\xd9', '%D9': '\xd9', '%da': '\xda', '%Da': '\xda', '%dA': '\xda',
|
||||||
|
'%DA': '\xda', '%db': '\xdb', '%Db': '\xdb', '%dB': '\xdb', '%DB': '\xdb',
|
||||||
|
'%dc': '\xdc', '%Dc': '\xdc', '%dC': '\xdc', '%DC': '\xdc', '%dd': '\xdd',
|
||||||
|
'%Dd': '\xdd', '%dD': '\xdd', '%DD': '\xdd', '%de': '\xde', '%De': '\xde',
|
||||||
|
'%dE': '\xde', '%DE': '\xde', '%df': '\xdf', '%Df': '\xdf', '%dF': '\xdf',
|
||||||
|
'%DF': '\xdf', '%e0': '\xe0', '%E0': '\xe0', '%e1': '\xe1', '%E1': '\xe1',
|
||||||
|
'%e2': '\xe2', '%E2': '\xe2', '%e3': '\xe3', '%E3': '\xe3', '%e4': '\xe4',
|
||||||
|
'%E4': '\xe4', '%e5': '\xe5', '%E5': '\xe5', '%e6': '\xe6', '%E6': '\xe6',
|
||||||
|
'%e7': '\xe7', '%E7': '\xe7', '%e8': '\xe8', '%E8': '\xe8', '%e9': '\xe9',
|
||||||
|
'%E9': '\xe9', '%ea': '\xea', '%Ea': '\xea', '%eA': '\xea', '%EA': '\xea',
|
||||||
|
'%eb': '\xeb', '%Eb': '\xeb', '%eB': '\xeb', '%EB': '\xeb', '%ec': '\xec',
|
||||||
|
'%Ec': '\xec', '%eC': '\xec', '%EC': '\xec', '%ed': '\xed', '%Ed': '\xed',
|
||||||
|
'%eD': '\xed', '%ED': '\xed', '%ee': '\xee', '%Ee': '\xee', '%eE': '\xee',
|
||||||
|
'%EE': '\xee', '%ef': '\xef', '%Ef': '\xef', '%eF': '\xef', '%EF': '\xef',
|
||||||
|
'%f0': '\xf0', '%F0': '\xf0', '%f1': '\xf1', '%F1': '\xf1', '%f2': '\xf2',
|
||||||
|
'%F2': '\xf2', '%f3': '\xf3', '%F3': '\xf3', '%f4': '\xf4', '%F4': '\xf4',
|
||||||
|
'%f5': '\xf5', '%F5': '\xf5', '%f6': '\xf6', '%F6': '\xf6', '%f7': '\xf7',
|
||||||
|
'%F7': '\xf7', '%f8': '\xf8', '%F8': '\xf8', '%f9': '\xf9', '%F9': '\xf9',
|
||||||
|
'%fa': '\xfa', '%Fa': '\xfa', '%fA': '\xfa', '%FA': '\xfa', '%fb': '\xfb',
|
||||||
|
'%Fb': '\xfb', '%fB': '\xfb', '%FB': '\xfb', '%fc': '\xfc', '%Fc': '\xfc',
|
||||||
|
'%fC': '\xfc', '%FC': '\xfc', '%fd': '\xfd', '%Fd': '\xfd', '%fD': '\xfd',
|
||||||
|
'%FD': '\xfd', '%fe': '\xfe', '%Fe': '\xfe', '%fE': '\xfe', '%FE': '\xfe',
|
||||||
|
'%ff': '\xff', '%Ff': '\xff', '%fF': '\xff', '%FF': '\xff'
|
||||||
|
}
|
||||||
|
|
||||||
|
function encodedReplacer (match) {
|
||||||
|
return EncodedLookup[match]
|
||||||
|
}
|
||||||
|
|
||||||
|
const STATE_KEY = 0
|
||||||
|
const STATE_VALUE = 1
|
||||||
|
const STATE_CHARSET = 2
|
||||||
|
const STATE_LANG = 3
|
||||||
|
|
||||||
|
function parseParams (str) {
|
||||||
|
const res = []
|
||||||
|
let state = STATE_KEY
|
||||||
|
let charset = ''
|
||||||
|
let inquote = false
|
||||||
|
let escaping = false
|
||||||
|
let p = 0
|
||||||
|
let tmp = ''
|
||||||
|
const len = str.length
|
||||||
|
|
||||||
|
for (var i = 0; i < len; ++i) { // eslint-disable-line no-var
|
||||||
|
const char = str[i]
|
||||||
|
if (char === '\\' && inquote) {
|
||||||
|
if (escaping) { escaping = false } else {
|
||||||
|
escaping = true
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
} else if (char === '"') {
|
||||||
|
if (!escaping) {
|
||||||
|
if (inquote) {
|
||||||
|
inquote = false
|
||||||
|
state = STATE_KEY
|
||||||
|
} else { inquote = true }
|
||||||
|
continue
|
||||||
|
} else { escaping = false }
|
||||||
|
} else {
|
||||||
|
if (escaping && inquote) { tmp += '\\' }
|
||||||
|
escaping = false
|
||||||
|
if ((state === STATE_CHARSET || state === STATE_LANG) && char === "'") {
|
||||||
|
if (state === STATE_CHARSET) {
|
||||||
|
state = STATE_LANG
|
||||||
|
charset = tmp.substring(1)
|
||||||
|
} else { state = STATE_VALUE }
|
||||||
|
tmp = ''
|
||||||
|
continue
|
||||||
|
} else if (state === STATE_KEY &&
|
||||||
|
(char === '*' || char === '=') &&
|
||||||
|
res.length) {
|
||||||
|
state = char === '*'
|
||||||
|
? STATE_CHARSET
|
||||||
|
: STATE_VALUE
|
||||||
|
res[p] = [tmp, undefined]
|
||||||
|
tmp = ''
|
||||||
|
continue
|
||||||
|
} else if (!inquote && char === ';') {
|
||||||
|
state = STATE_KEY
|
||||||
|
if (charset) {
|
||||||
|
if (tmp.length) {
|
||||||
|
tmp = decodeText(tmp.replace(RE_ENCODED, encodedReplacer),
|
||||||
|
'binary',
|
||||||
|
charset)
|
||||||
|
}
|
||||||
|
charset = ''
|
||||||
|
} else if (tmp.length) {
|
||||||
|
tmp = decodeText(tmp, 'binary', 'utf8')
|
||||||
|
}
|
||||||
|
if (res[p] === undefined) { res[p] = tmp } else { res[p][1] = tmp }
|
||||||
|
tmp = ''
|
||||||
|
++p
|
||||||
|
continue
|
||||||
|
} else if (!inquote && (char === ' ' || char === '\t')) { continue }
|
||||||
|
}
|
||||||
|
tmp += char
|
||||||
|
}
|
||||||
|
if (charset && tmp.length) {
|
||||||
|
tmp = decodeText(tmp.replace(RE_ENCODED, encodedReplacer),
|
||||||
|
'binary',
|
||||||
|
charset)
|
||||||
|
} else if (tmp) {
|
||||||
|
tmp = decodeText(tmp, 'binary', 'utf8')
|
||||||
|
}
|
||||||
|
|
||||||
|
if (res[p] === undefined) {
|
||||||
|
if (tmp) { res[p] = tmp }
|
||||||
|
} else { res[p][1] = tmp }
|
||||||
|
|
||||||
|
return res
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = parseParams
|
86
node_modules/@fastify/busboy/package.json
generated
vendored
Normal file
86
node_modules/@fastify/busboy/package.json
generated
vendored
Normal file
|
@ -0,0 +1,86 @@
|
||||||
|
{
|
||||||
|
"name": "@fastify/busboy",
|
||||||
|
"version": "2.1.0",
|
||||||
|
"private": false,
|
||||||
|
"author": "Brian White <mscdex@mscdex.net>",
|
||||||
|
"contributors": [
|
||||||
|
{
|
||||||
|
"name": "Igor Savin",
|
||||||
|
"email": "kibertoad@gmail.com",
|
||||||
|
"url": "https://github.com/kibertoad"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "Aras Abbasi",
|
||||||
|
"email": "aras.abbasi@gmail.com",
|
||||||
|
"url": "https://github.com/uzlopak"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"description": "A streaming parser for HTML form data for node.js",
|
||||||
|
"main": "lib/main",
|
||||||
|
"type": "commonjs",
|
||||||
|
"types": "lib/main.d.ts",
|
||||||
|
"scripts": {
|
||||||
|
"bench:busboy": "cd benchmarks && npm install && npm run benchmark-fastify",
|
||||||
|
"bench:dicer": "node bench/dicer/dicer-bench-multipart-parser.js",
|
||||||
|
"coveralls": "nyc report --reporter=lcov",
|
||||||
|
"lint": "npm run lint:standard",
|
||||||
|
"lint:everything": "npm run lint && npm run test:types",
|
||||||
|
"lint:fix": "standard --fix",
|
||||||
|
"lint:standard": "standard --verbose | snazzy",
|
||||||
|
"test:mocha": "tap",
|
||||||
|
"test:types": "tsd",
|
||||||
|
"test:coverage": "nyc npm run test",
|
||||||
|
"test": "npm run test:mocha"
|
||||||
|
},
|
||||||
|
"engines": {
|
||||||
|
"node": ">=14"
|
||||||
|
},
|
||||||
|
"devDependencies": {
|
||||||
|
"@types/node": "^20.1.0",
|
||||||
|
"busboy": "^1.0.0",
|
||||||
|
"photofinish": "^1.8.0",
|
||||||
|
"snazzy": "^9.0.0",
|
||||||
|
"standard": "^17.0.0",
|
||||||
|
"tap": "^16.3.8",
|
||||||
|
"tinybench": "^2.5.1",
|
||||||
|
"tsd": "^0.29.0",
|
||||||
|
"typescript": "^5.0.2"
|
||||||
|
},
|
||||||
|
"keywords": [
|
||||||
|
"uploads",
|
||||||
|
"forms",
|
||||||
|
"multipart",
|
||||||
|
"form-data"
|
||||||
|
],
|
||||||
|
"license": "MIT",
|
||||||
|
"repository": {
|
||||||
|
"type": "git",
|
||||||
|
"url": "https://github.com/fastify/busboy.git"
|
||||||
|
},
|
||||||
|
"tsd": {
|
||||||
|
"directory": "test/types",
|
||||||
|
"compilerOptions": {
|
||||||
|
"esModuleInterop": false,
|
||||||
|
"module": "commonjs",
|
||||||
|
"target": "ES2017"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"standard": {
|
||||||
|
"globals": [
|
||||||
|
"describe",
|
||||||
|
"it"
|
||||||
|
],
|
||||||
|
"ignore": [
|
||||||
|
"bench"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"files": [
|
||||||
|
"README.md",
|
||||||
|
"LICENSE",
|
||||||
|
"lib/*",
|
||||||
|
"deps/encoding/*",
|
||||||
|
"deps/dicer/lib",
|
||||||
|
"deps/streamsearch/",
|
||||||
|
"deps/dicer/LICENSE"
|
||||||
|
]
|
||||||
|
}
|
21
node_modules/undici/LICENSE
generated
vendored
Normal file
21
node_modules/undici/LICENSE
generated
vendored
Normal file
|
@ -0,0 +1,21 @@
|
||||||
|
MIT License
|
||||||
|
|
||||||
|
Copyright (c) Matteo Collina and Undici contributors
|
||||||
|
|
||||||
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
|
of this software and associated documentation files (the "Software"), to deal
|
||||||
|
in the Software without restriction, including without limitation the rights
|
||||||
|
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||||
|
copies of the Software, and to permit persons to whom the Software is
|
||||||
|
furnished to do so, subject to the following conditions:
|
||||||
|
|
||||||
|
The above copyright notice and this permission notice shall be included in all
|
||||||
|
copies or substantial portions of the Software.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||||
|
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||||
|
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||||
|
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||||
|
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||||
|
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||||
|
SOFTWARE.
|
443
node_modules/undici/README.md
generated
vendored
Normal file
443
node_modules/undici/README.md
generated
vendored
Normal file
|
@ -0,0 +1,443 @@
|
||||||
|
# undici
|
||||||
|
|
||||||
|
[![Node CI](https://github.com/nodejs/undici/actions/workflows/nodejs.yml/badge.svg)](https://github.com/nodejs/undici/actions/workflows/nodejs.yml) [![js-standard-style](https://img.shields.io/badge/code%20style-standard-brightgreen.svg?style=flat)](http://standardjs.com/) [![npm version](https://badge.fury.io/js/undici.svg)](https://badge.fury.io/js/undici) [![codecov](https://codecov.io/gh/nodejs/undici/branch/main/graph/badge.svg?token=yZL6LtXkOA)](https://codecov.io/gh/nodejs/undici)
|
||||||
|
|
||||||
|
An HTTP/1.1 client, written from scratch for Node.js.
|
||||||
|
|
||||||
|
> Undici means eleven in Italian. 1.1 -> 11 -> Eleven -> Undici.
|
||||||
|
It is also a Stranger Things reference.
|
||||||
|
|
||||||
|
Have a question about using Undici? Open a [Q&A Discussion](https://github.com/nodejs/undici/discussions/new) or join our official OpenJS [Slack](https://openjs-foundation.slack.com/archives/C01QF9Q31QD) channel.
|
||||||
|
|
||||||
|
## Install
|
||||||
|
|
||||||
|
```
|
||||||
|
npm i undici
|
||||||
|
```
|
||||||
|
|
||||||
|
## Benchmarks
|
||||||
|
|
||||||
|
The benchmark is a simple `hello world` [example](benchmarks/benchmark.js) using a
|
||||||
|
number of unix sockets (connections) with a pipelining depth of 10 running on Node 20.6.0.
|
||||||
|
|
||||||
|
### Connections 1
|
||||||
|
|
||||||
|
|
||||||
|
| Tests | Samples | Result | Tolerance | Difference with slowest |
|
||||||
|
|---------------------|---------|---------------|-----------|-------------------------|
|
||||||
|
| http - no keepalive | 15 | 5.32 req/sec | ± 2.61 % | - |
|
||||||
|
| http - keepalive | 10 | 5.35 req/sec | ± 2.47 % | + 0.44 % |
|
||||||
|
| undici - fetch | 15 | 41.85 req/sec | ± 2.49 % | + 686.04 % |
|
||||||
|
| undici - pipeline | 40 | 50.36 req/sec | ± 2.77 % | + 845.92 % |
|
||||||
|
| undici - stream | 15 | 60.58 req/sec | ± 2.75 % | + 1037.72 % |
|
||||||
|
| undici - request | 10 | 61.19 req/sec | ± 2.60 % | + 1049.24 % |
|
||||||
|
| undici - dispatch | 20 | 64.84 req/sec | ± 2.81 % | + 1117.81 % |
|
||||||
|
|
||||||
|
|
||||||
|
### Connections 50
|
||||||
|
|
||||||
|
| Tests | Samples | Result | Tolerance | Difference with slowest |
|
||||||
|
|---------------------|---------|------------------|-----------|-------------------------|
|
||||||
|
| undici - fetch | 30 | 2107.19 req/sec | ± 2.69 % | - |
|
||||||
|
| http - no keepalive | 10 | 2698.90 req/sec | ± 2.68 % | + 28.08 % |
|
||||||
|
| http - keepalive | 10 | 4639.49 req/sec | ± 2.55 % | + 120.17 % |
|
||||||
|
| undici - pipeline | 40 | 6123.33 req/sec | ± 2.97 % | + 190.59 % |
|
||||||
|
| undici - stream | 50 | 9426.51 req/sec | ± 2.92 % | + 347.35 % |
|
||||||
|
| undici - request | 10 | 10162.88 req/sec | ± 2.13 % | + 382.29 % |
|
||||||
|
| undici - dispatch | 50 | 11191.11 req/sec | ± 2.98 % | + 431.09 % |
|
||||||
|
|
||||||
|
|
||||||
|
## Quick Start
|
||||||
|
|
||||||
|
```js
|
||||||
|
import { request } from 'undici'
|
||||||
|
|
||||||
|
const {
|
||||||
|
statusCode,
|
||||||
|
headers,
|
||||||
|
trailers,
|
||||||
|
body
|
||||||
|
} = await request('http://localhost:3000/foo')
|
||||||
|
|
||||||
|
console.log('response received', statusCode)
|
||||||
|
console.log('headers', headers)
|
||||||
|
|
||||||
|
for await (const data of body) {
|
||||||
|
console.log('data', data)
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log('trailers', trailers)
|
||||||
|
```
|
||||||
|
|
||||||
|
## Body Mixins
|
||||||
|
|
||||||
|
The `body` mixins are the most common way to format the request/response body. Mixins include:
|
||||||
|
|
||||||
|
- [`.formData()`](https://fetch.spec.whatwg.org/#dom-body-formdata)
|
||||||
|
- [`.json()`](https://fetch.spec.whatwg.org/#dom-body-json)
|
||||||
|
- [`.text()`](https://fetch.spec.whatwg.org/#dom-body-text)
|
||||||
|
|
||||||
|
Example usage:
|
||||||
|
|
||||||
|
```js
|
||||||
|
import { request } from 'undici'
|
||||||
|
|
||||||
|
const {
|
||||||
|
statusCode,
|
||||||
|
headers,
|
||||||
|
trailers,
|
||||||
|
body
|
||||||
|
} = await request('http://localhost:3000/foo')
|
||||||
|
|
||||||
|
console.log('response received', statusCode)
|
||||||
|
console.log('headers', headers)
|
||||||
|
console.log('data', await body.json())
|
||||||
|
console.log('trailers', trailers)
|
||||||
|
```
|
||||||
|
|
||||||
|
_Note: Once a mixin has been called then the body cannot be reused, thus calling additional mixins on `.body`, e.g. `.body.json(); .body.text()` will result in an error `TypeError: unusable` being thrown and returned through the `Promise` rejection._
|
||||||
|
|
||||||
|
Should you need to access the `body` in plain-text after using a mixin, the best practice is to use the `.text()` mixin first and then manually parse the text to the desired format.
|
||||||
|
|
||||||
|
For more information about their behavior, please reference the body mixin from the [Fetch Standard](https://fetch.spec.whatwg.org/#body-mixin).
|
||||||
|
|
||||||
|
## Common API Methods
|
||||||
|
|
||||||
|
This section documents our most commonly used API methods. Additional APIs are documented in their own files within the [docs](./docs/) folder and are accessible via the navigation list on the left side of the docs site.
|
||||||
|
|
||||||
|
### `undici.request([url, options]): Promise`
|
||||||
|
|
||||||
|
Arguments:
|
||||||
|
|
||||||
|
* **url** `string | URL | UrlObject`
|
||||||
|
* **options** [`RequestOptions`](./docs/api/Dispatcher.md#parameter-requestoptions)
|
||||||
|
* **dispatcher** `Dispatcher` - Default: [getGlobalDispatcher](#undicigetglobaldispatcher)
|
||||||
|
* **method** `String` - Default: `PUT` if `options.body`, otherwise `GET`
|
||||||
|
* **maxRedirections** `Integer` - Default: `0`
|
||||||
|
|
||||||
|
Returns a promise with the result of the `Dispatcher.request` method.
|
||||||
|
|
||||||
|
Calls `options.dispatcher.request(options)`.
|
||||||
|
|
||||||
|
See [Dispatcher.request](./docs/api/Dispatcher.md#dispatcherrequestoptions-callback) for more details.
|
||||||
|
|
||||||
|
### `undici.stream([url, options, ]factory): Promise`
|
||||||
|
|
||||||
|
Arguments:
|
||||||
|
|
||||||
|
* **url** `string | URL | UrlObject`
|
||||||
|
* **options** [`StreamOptions`](./docs/api/Dispatcher.md#parameter-streamoptions)
|
||||||
|
* **dispatcher** `Dispatcher` - Default: [getGlobalDispatcher](#undicigetglobaldispatcher)
|
||||||
|
* **method** `String` - Default: `PUT` if `options.body`, otherwise `GET`
|
||||||
|
* **maxRedirections** `Integer` - Default: `0`
|
||||||
|
* **factory** `Dispatcher.stream.factory`
|
||||||
|
|
||||||
|
Returns a promise with the result of the `Dispatcher.stream` method.
|
||||||
|
|
||||||
|
Calls `options.dispatcher.stream(options, factory)`.
|
||||||
|
|
||||||
|
See [Dispatcher.stream](docs/api/Dispatcher.md#dispatcherstreamoptions-factory-callback) for more details.
|
||||||
|
|
||||||
|
### `undici.pipeline([url, options, ]handler): Duplex`
|
||||||
|
|
||||||
|
Arguments:
|
||||||
|
|
||||||
|
* **url** `string | URL | UrlObject`
|
||||||
|
* **options** [`PipelineOptions`](docs/api/Dispatcher.md#parameter-pipelineoptions)
|
||||||
|
* **dispatcher** `Dispatcher` - Default: [getGlobalDispatcher](#undicigetglobaldispatcher)
|
||||||
|
* **method** `String` - Default: `PUT` if `options.body`, otherwise `GET`
|
||||||
|
* **maxRedirections** `Integer` - Default: `0`
|
||||||
|
* **handler** `Dispatcher.pipeline.handler`
|
||||||
|
|
||||||
|
Returns: `stream.Duplex`
|
||||||
|
|
||||||
|
Calls `options.dispatch.pipeline(options, handler)`.
|
||||||
|
|
||||||
|
See [Dispatcher.pipeline](docs/api/Dispatcher.md#dispatcherpipelineoptions-handler) for more details.
|
||||||
|
|
||||||
|
### `undici.connect([url, options]): Promise`
|
||||||
|
|
||||||
|
Starts two-way communications with the requested resource using [HTTP CONNECT](https://developer.mozilla.org/en-US/docs/Web/HTTP/Methods/CONNECT).
|
||||||
|
|
||||||
|
Arguments:
|
||||||
|
|
||||||
|
* **url** `string | URL | UrlObject`
|
||||||
|
* **options** [`ConnectOptions`](docs/api/Dispatcher.md#parameter-connectoptions)
|
||||||
|
* **dispatcher** `Dispatcher` - Default: [getGlobalDispatcher](#undicigetglobaldispatcher)
|
||||||
|
* **maxRedirections** `Integer` - Default: `0`
|
||||||
|
* **callback** `(err: Error | null, data: ConnectData | null) => void` (optional)
|
||||||
|
|
||||||
|
Returns a promise with the result of the `Dispatcher.connect` method.
|
||||||
|
|
||||||
|
Calls `options.dispatch.connect(options)`.
|
||||||
|
|
||||||
|
See [Dispatcher.connect](docs/api/Dispatcher.md#dispatcherconnectoptions-callback) for more details.
|
||||||
|
|
||||||
|
### `undici.fetch(input[, init]): Promise`
|
||||||
|
|
||||||
|
Implements [fetch](https://fetch.spec.whatwg.org/#fetch-method).
|
||||||
|
|
||||||
|
* https://developer.mozilla.org/en-US/docs/Web/API/WindowOrWorkerGlobalScope/fetch
|
||||||
|
* https://fetch.spec.whatwg.org/#fetch-method
|
||||||
|
|
||||||
|
Only supported on Node 16.8+.
|
||||||
|
|
||||||
|
Basic usage example:
|
||||||
|
|
||||||
|
```js
|
||||||
|
import { fetch } from 'undici'
|
||||||
|
|
||||||
|
|
||||||
|
const res = await fetch('https://example.com')
|
||||||
|
const json = await res.json()
|
||||||
|
console.log(json)
|
||||||
|
```
|
||||||
|
|
||||||
|
You can pass an optional dispatcher to `fetch` as:
|
||||||
|
|
||||||
|
```js
|
||||||
|
import { fetch, Agent } from 'undici'
|
||||||
|
|
||||||
|
const res = await fetch('https://example.com', {
|
||||||
|
// Mocks are also supported
|
||||||
|
dispatcher: new Agent({
|
||||||
|
keepAliveTimeout: 10,
|
||||||
|
keepAliveMaxTimeout: 10
|
||||||
|
})
|
||||||
|
})
|
||||||
|
const json = await res.json()
|
||||||
|
console.log(json)
|
||||||
|
```
|
||||||
|
|
||||||
|
#### `request.body`
|
||||||
|
|
||||||
|
A body can be of the following types:
|
||||||
|
|
||||||
|
- ArrayBuffer
|
||||||
|
- ArrayBufferView
|
||||||
|
- AsyncIterables
|
||||||
|
- Blob
|
||||||
|
- Iterables
|
||||||
|
- String
|
||||||
|
- URLSearchParams
|
||||||
|
- FormData
|
||||||
|
|
||||||
|
In this implementation of fetch, ```request.body``` now accepts ```Async Iterables```. It is not present in the [Fetch Standard.](https://fetch.spec.whatwg.org)
|
||||||
|
|
||||||
|
```js
|
||||||
|
import { fetch } from 'undici'
|
||||||
|
|
||||||
|
const data = {
|
||||||
|
async *[Symbol.asyncIterator]() {
|
||||||
|
yield 'hello'
|
||||||
|
yield 'world'
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
await fetch('https://example.com', { body: data, method: 'POST', duplex: 'half' })
|
||||||
|
```
|
||||||
|
|
||||||
|
#### `request.duplex`
|
||||||
|
|
||||||
|
- half
|
||||||
|
|
||||||
|
In this implementation of fetch, `request.duplex` must be set if `request.body` is `ReadableStream` or `Async Iterables`. And fetch requests are currently always be full duplex. More detail refer to [Fetch Standard.](https://fetch.spec.whatwg.org/#dom-requestinit-duplex)
|
||||||
|
|
||||||
|
#### `response.body`
|
||||||
|
|
||||||
|
Nodejs has two kinds of streams: [web streams](https://nodejs.org/dist/latest-v16.x/docs/api/webstreams.html), which follow the API of the WHATWG web standard found in browsers, and an older Node-specific [streams API](https://nodejs.org/api/stream.html). `response.body` returns a readable web stream. If you would prefer to work with a Node stream you can convert a web stream using `.fromWeb()`.
|
||||||
|
|
||||||
|
```js
|
||||||
|
import { fetch } from 'undici'
|
||||||
|
import { Readable } from 'node:stream'
|
||||||
|
|
||||||
|
const response = await fetch('https://example.com')
|
||||||
|
const readableWebStream = response.body
|
||||||
|
const readableNodeStream = Readable.fromWeb(readableWebStream)
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Specification Compliance
|
||||||
|
|
||||||
|
This section documents parts of the [Fetch Standard](https://fetch.spec.whatwg.org) that Undici does
|
||||||
|
not support or does not fully implement.
|
||||||
|
|
||||||
|
##### Garbage Collection
|
||||||
|
|
||||||
|
* https://fetch.spec.whatwg.org/#garbage-collection
|
||||||
|
|
||||||
|
The [Fetch Standard](https://fetch.spec.whatwg.org) allows users to skip consuming the response body by relying on
|
||||||
|
[garbage collection](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Memory_Management#garbage_collection) to release connection resources. Undici does not do the same. Therefore, it is important to always either consume or cancel the response body.
|
||||||
|
|
||||||
|
Garbage collection in Node is less aggressive and deterministic
|
||||||
|
(due to the lack of clear idle periods that browsers have through the rendering refresh rate)
|
||||||
|
which means that leaving the release of connection resources to the garbage collector can lead
|
||||||
|
to excessive connection usage, reduced performance (due to less connection re-use), and even
|
||||||
|
stalls or deadlocks when running out of connections.
|
||||||
|
|
||||||
|
```js
|
||||||
|
// Do
|
||||||
|
const headers = await fetch(url)
|
||||||
|
.then(async res => {
|
||||||
|
for await (const chunk of res.body) {
|
||||||
|
// force consumption of body
|
||||||
|
}
|
||||||
|
return res.headers
|
||||||
|
})
|
||||||
|
|
||||||
|
// Do not
|
||||||
|
const headers = await fetch(url)
|
||||||
|
.then(res => res.headers)
|
||||||
|
```
|
||||||
|
|
||||||
|
However, if you want to get only headers, it might be better to use `HEAD` request method. Usage of this method will obviate the need for consumption or cancelling of the response body. See [MDN - HTTP - HTTP request methods - HEAD](https://developer.mozilla.org/en-US/docs/Web/HTTP/Methods/HEAD) for more details.
|
||||||
|
|
||||||
|
```js
|
||||||
|
const headers = await fetch(url, { method: 'HEAD' })
|
||||||
|
.then(res => res.headers)
|
||||||
|
```
|
||||||
|
|
||||||
|
##### Forbidden and Safelisted Header Names
|
||||||
|
|
||||||
|
* https://fetch.spec.whatwg.org/#cors-safelisted-response-header-name
|
||||||
|
* https://fetch.spec.whatwg.org/#forbidden-header-name
|
||||||
|
* https://fetch.spec.whatwg.org/#forbidden-response-header-name
|
||||||
|
* https://github.com/wintercg/fetch/issues/6
|
||||||
|
|
||||||
|
The [Fetch Standard](https://fetch.spec.whatwg.org) requires implementations to exclude certain headers from requests and responses. In browser environments, some headers are forbidden so the user agent remains in full control over them. In Undici, these constraints are removed to give more control to the user.
|
||||||
|
|
||||||
|
### `undici.upgrade([url, options]): Promise`
|
||||||
|
|
||||||
|
Upgrade to a different protocol. See [MDN - HTTP - Protocol upgrade mechanism](https://developer.mozilla.org/en-US/docs/Web/HTTP/Protocol_upgrade_mechanism) for more details.
|
||||||
|
|
||||||
|
Arguments:
|
||||||
|
|
||||||
|
* **url** `string | URL | UrlObject`
|
||||||
|
* **options** [`UpgradeOptions`](docs/api/Dispatcher.md#parameter-upgradeoptions)
|
||||||
|
* **dispatcher** `Dispatcher` - Default: [getGlobalDispatcher](#undicigetglobaldispatcher)
|
||||||
|
* **maxRedirections** `Integer` - Default: `0`
|
||||||
|
* **callback** `(error: Error | null, data: UpgradeData) => void` (optional)
|
||||||
|
|
||||||
|
Returns a promise with the result of the `Dispatcher.upgrade` method.
|
||||||
|
|
||||||
|
Calls `options.dispatcher.upgrade(options)`.
|
||||||
|
|
||||||
|
See [Dispatcher.upgrade](docs/api/Dispatcher.md#dispatcherupgradeoptions-callback) for more details.
|
||||||
|
|
||||||
|
### `undici.setGlobalDispatcher(dispatcher)`
|
||||||
|
|
||||||
|
* dispatcher `Dispatcher`
|
||||||
|
|
||||||
|
Sets the global dispatcher used by Common API Methods.
|
||||||
|
|
||||||
|
### `undici.getGlobalDispatcher()`
|
||||||
|
|
||||||
|
Gets the global dispatcher used by Common API Methods.
|
||||||
|
|
||||||
|
Returns: `Dispatcher`
|
||||||
|
|
||||||
|
### `undici.setGlobalOrigin(origin)`
|
||||||
|
|
||||||
|
* origin `string | URL | undefined`
|
||||||
|
|
||||||
|
Sets the global origin used in `fetch`.
|
||||||
|
|
||||||
|
If `undefined` is passed, the global origin will be reset. This will cause `Response.redirect`, `new Request()`, and `fetch` to throw an error when a relative path is passed.
|
||||||
|
|
||||||
|
```js
|
||||||
|
setGlobalOrigin('http://localhost:3000')
|
||||||
|
|
||||||
|
const response = await fetch('/api/ping')
|
||||||
|
|
||||||
|
console.log(response.url) // http://localhost:3000/api/ping
|
||||||
|
```
|
||||||
|
|
||||||
|
### `undici.getGlobalOrigin()`
|
||||||
|
|
||||||
|
Gets the global origin used in `fetch`.
|
||||||
|
|
||||||
|
Returns: `URL`
|
||||||
|
|
||||||
|
### `UrlObject`
|
||||||
|
|
||||||
|
* **port** `string | number` (optional)
|
||||||
|
* **path** `string` (optional)
|
||||||
|
* **pathname** `string` (optional)
|
||||||
|
* **hostname** `string` (optional)
|
||||||
|
* **origin** `string` (optional)
|
||||||
|
* **protocol** `string` (optional)
|
||||||
|
* **search** `string` (optional)
|
||||||
|
|
||||||
|
## Specification Compliance
|
||||||
|
|
||||||
|
This section documents parts of the HTTP/1.1 specification that Undici does
|
||||||
|
not support or does not fully implement.
|
||||||
|
|
||||||
|
### Expect
|
||||||
|
|
||||||
|
Undici does not support the `Expect` request header field. The request
|
||||||
|
body is always immediately sent and the `100 Continue` response will be
|
||||||
|
ignored.
|
||||||
|
|
||||||
|
Refs: https://tools.ietf.org/html/rfc7231#section-5.1.1
|
||||||
|
|
||||||
|
### Pipelining
|
||||||
|
|
||||||
|
Undici will only use pipelining if configured with a `pipelining` factor
|
||||||
|
greater than `1`.
|
||||||
|
|
||||||
|
Undici always assumes that connections are persistent and will immediately
|
||||||
|
pipeline requests, without checking whether the connection is persistent.
|
||||||
|
Hence, automatic fallback to HTTP/1.0 or HTTP/1.1 without pipelining is
|
||||||
|
not supported.
|
||||||
|
|
||||||
|
Undici will immediately pipeline when retrying requests after a failed
|
||||||
|
connection. However, Undici will not retry the first remaining requests in
|
||||||
|
the prior pipeline and instead error the corresponding callback/promise/stream.
|
||||||
|
|
||||||
|
Undici will abort all running requests in the pipeline when any of them are
|
||||||
|
aborted.
|
||||||
|
|
||||||
|
* Refs: https://tools.ietf.org/html/rfc2616#section-8.1.2.2
|
||||||
|
* Refs: https://tools.ietf.org/html/rfc7230#section-6.3.2
|
||||||
|
|
||||||
|
### Manual Redirect
|
||||||
|
|
||||||
|
Since it is not possible to manually follow an HTTP redirect on the server-side,
|
||||||
|
Undici returns the actual response instead of an `opaqueredirect` filtered one
|
||||||
|
when invoked with a `manual` redirect. This aligns `fetch()` with the other
|
||||||
|
implementations in Deno and Cloudflare Workers.
|
||||||
|
|
||||||
|
Refs: https://fetch.spec.whatwg.org/#atomic-http-redirect-handling
|
||||||
|
|
||||||
|
## Workarounds
|
||||||
|
|
||||||
|
### Network address family autoselection.
|
||||||
|
|
||||||
|
If you experience problem when connecting to a remote server that is resolved by your DNS servers to a IPv6 (AAAA record)
|
||||||
|
first, there are chances that your local router or ISP might have problem connecting to IPv6 networks. In that case
|
||||||
|
undici will throw an error with code `UND_ERR_CONNECT_TIMEOUT`.
|
||||||
|
|
||||||
|
If the target server resolves to both a IPv6 and IPv4 (A records) address and you are using a compatible Node version
|
||||||
|
(18.3.0 and above), you can fix the problem by providing the `autoSelectFamily` option (support by both `undici.request`
|
||||||
|
and `undici.Agent`) which will enable the family autoselection algorithm when establishing the connection.
|
||||||
|
|
||||||
|
## Collaborators
|
||||||
|
|
||||||
|
* [__Daniele Belardi__](https://github.com/dnlup), <https://www.npmjs.com/~dnlup>
|
||||||
|
* [__Ethan Arrowood__](https://github.com/ethan-arrowood), <https://www.npmjs.com/~ethan_arrowood>
|
||||||
|
* [__Matteo Collina__](https://github.com/mcollina), <https://www.npmjs.com/~matteo.collina>
|
||||||
|
* [__Matthew Aitken__](https://github.com/KhafraDev), <https://www.npmjs.com/~khaf>
|
||||||
|
* [__Robert Nagy__](https://github.com/ronag), <https://www.npmjs.com/~ronag>
|
||||||
|
* [__Szymon Marczak__](https://github.com/szmarczak), <https://www.npmjs.com/~szmarczak>
|
||||||
|
* [__Tomas Della Vedova__](https://github.com/delvedor), <https://www.npmjs.com/~delvedor>
|
||||||
|
|
||||||
|
### Releasers
|
||||||
|
|
||||||
|
* [__Ethan Arrowood__](https://github.com/ethan-arrowood), <https://www.npmjs.com/~ethan_arrowood>
|
||||||
|
* [__Matteo Collina__](https://github.com/mcollina), <https://www.npmjs.com/~matteo.collina>
|
||||||
|
* [__Robert Nagy__](https://github.com/ronag), <https://www.npmjs.com/~ronag>
|
||||||
|
* [__Matthew Aitken__](https://github.com/KhafraDev), <https://www.npmjs.com/~khaf>
|
||||||
|
|
||||||
|
## License
|
||||||
|
|
||||||
|
MIT
|
80
node_modules/undici/docs/api/Agent.md
generated
vendored
Normal file
80
node_modules/undici/docs/api/Agent.md
generated
vendored
Normal file
|
@ -0,0 +1,80 @@
|
||||||
|
# Agent
|
||||||
|
|
||||||
|
Extends: `undici.Dispatcher`
|
||||||
|
|
||||||
|
Agent allow dispatching requests against multiple different origins.
|
||||||
|
|
||||||
|
Requests are not guaranteed to be dispatched in order of invocation.
|
||||||
|
|
||||||
|
## `new undici.Agent([options])`
|
||||||
|
|
||||||
|
Arguments:
|
||||||
|
|
||||||
|
* **options** `AgentOptions` (optional)
|
||||||
|
|
||||||
|
Returns: `Agent`
|
||||||
|
|
||||||
|
### Parameter: `AgentOptions`
|
||||||
|
|
||||||
|
Extends: [`PoolOptions`](Pool.md#parameter-pooloptions)
|
||||||
|
|
||||||
|
* **factory** `(origin: URL, opts: Object) => Dispatcher` - Default: `(origin, opts) => new Pool(origin, opts)`
|
||||||
|
* **maxRedirections** `Integer` - Default: `0`. The number of HTTP redirection to follow unless otherwise specified in `DispatchOptions`.
|
||||||
|
* **interceptors** `{ Agent: DispatchInterceptor[] }` - Default: `[RedirectInterceptor]` - A list of interceptors that are applied to the dispatch method. Additional logic can be applied (such as, but not limited to: 302 status code handling, authentication, cookies, compression and caching). Note that the behavior of interceptors is Experimental and might change at any given time.
|
||||||
|
|
||||||
|
## Instance Properties
|
||||||
|
|
||||||
|
### `Agent.closed`
|
||||||
|
|
||||||
|
Implements [Client.closed](Client.md#clientclosed)
|
||||||
|
|
||||||
|
### `Agent.destroyed`
|
||||||
|
|
||||||
|
Implements [Client.destroyed](Client.md#clientdestroyed)
|
||||||
|
|
||||||
|
## Instance Methods
|
||||||
|
|
||||||
|
### `Agent.close([callback])`
|
||||||
|
|
||||||
|
Implements [`Dispatcher.close([callback])`](Dispatcher.md#dispatcherclosecallback-promise).
|
||||||
|
|
||||||
|
### `Agent.destroy([error, callback])`
|
||||||
|
|
||||||
|
Implements [`Dispatcher.destroy([error, callback])`](Dispatcher.md#dispatcherdestroyerror-callback-promise).
|
||||||
|
|
||||||
|
### `Agent.dispatch(options, handler: AgentDispatchOptions)`
|
||||||
|
|
||||||
|
Implements [`Dispatcher.dispatch(options, handler)`](Dispatcher.md#dispatcherdispatchoptions-handler).
|
||||||
|
|
||||||
|
#### Parameter: `AgentDispatchOptions`
|
||||||
|
|
||||||
|
Extends: [`DispatchOptions`](Dispatcher.md#parameter-dispatchoptions)
|
||||||
|
|
||||||
|
* **origin** `string | URL`
|
||||||
|
* **maxRedirections** `Integer`.
|
||||||
|
|
||||||
|
Implements [`Dispatcher.destroy([error, callback])`](Dispatcher.md#dispatcherdestroyerror-callback-promise).
|
||||||
|
|
||||||
|
### `Agent.connect(options[, callback])`
|
||||||
|
|
||||||
|
See [`Dispatcher.connect(options[, callback])`](Dispatcher.md#dispatcherconnectoptions-callback).
|
||||||
|
|
||||||
|
### `Agent.dispatch(options, handler)`
|
||||||
|
|
||||||
|
Implements [`Dispatcher.dispatch(options, handler)`](Dispatcher.md#dispatcherdispatchoptions-handler).
|
||||||
|
|
||||||
|
### `Agent.pipeline(options, handler)`
|
||||||
|
|
||||||
|
See [`Dispatcher.pipeline(options, handler)`](Dispatcher.md#dispatcherpipelineoptions-handler).
|
||||||
|
|
||||||
|
### `Agent.request(options[, callback])`
|
||||||
|
|
||||||
|
See [`Dispatcher.request(options [, callback])`](Dispatcher.md#dispatcherrequestoptions-callback).
|
||||||
|
|
||||||
|
### `Agent.stream(options, factory[, callback])`
|
||||||
|
|
||||||
|
See [`Dispatcher.stream(options, factory[, callback])`](Dispatcher.md#dispatcherstreamoptions-factory-callback).
|
||||||
|
|
||||||
|
### `Agent.upgrade(options[, callback])`
|
||||||
|
|
||||||
|
See [`Dispatcher.upgrade(options[, callback])`](Dispatcher.md#dispatcherupgradeoptions-callback).
|
99
node_modules/undici/docs/api/BalancedPool.md
generated
vendored
Normal file
99
node_modules/undici/docs/api/BalancedPool.md
generated
vendored
Normal file
|
@ -0,0 +1,99 @@
|
||||||
|
# Class: BalancedPool
|
||||||
|
|
||||||
|
Extends: `undici.Dispatcher`
|
||||||
|
|
||||||
|
A pool of [Pool](Pool.md) instances connected to multiple upstreams.
|
||||||
|
|
||||||
|
Requests are not guaranteed to be dispatched in order of invocation.
|
||||||
|
|
||||||
|
## `new BalancedPool(upstreams [, options])`
|
||||||
|
|
||||||
|
Arguments:
|
||||||
|
|
||||||
|
* **upstreams** `URL | string | string[]` - It should only include the **protocol, hostname, and port**.
|
||||||
|
* **options** `BalancedPoolOptions` (optional)
|
||||||
|
|
||||||
|
### Parameter: `BalancedPoolOptions`
|
||||||
|
|
||||||
|
Extends: [`PoolOptions`](Pool.md#parameter-pooloptions)
|
||||||
|
|
||||||
|
* **factory** `(origin: URL, opts: Object) => Dispatcher` - Default: `(origin, opts) => new Pool(origin, opts)`
|
||||||
|
|
||||||
|
The `PoolOptions` are passed to each of the `Pool` instances being created.
|
||||||
|
## Instance Properties
|
||||||
|
|
||||||
|
### `BalancedPool.upstreams`
|
||||||
|
|
||||||
|
Returns an array of upstreams that were previously added.
|
||||||
|
|
||||||
|
### `BalancedPool.closed`
|
||||||
|
|
||||||
|
Implements [Client.closed](Client.md#clientclosed)
|
||||||
|
|
||||||
|
### `BalancedPool.destroyed`
|
||||||
|
|
||||||
|
Implements [Client.destroyed](Client.md#clientdestroyed)
|
||||||
|
|
||||||
|
### `Pool.stats`
|
||||||
|
|
||||||
|
Returns [`PoolStats`](PoolStats.md) instance for this pool.
|
||||||
|
|
||||||
|
## Instance Methods
|
||||||
|
|
||||||
|
### `BalancedPool.addUpstream(upstream)`
|
||||||
|
|
||||||
|
Add an upstream.
|
||||||
|
|
||||||
|
Arguments:
|
||||||
|
|
||||||
|
* **upstream** `string` - It should only include the **protocol, hostname, and port**.
|
||||||
|
|
||||||
|
### `BalancedPool.removeUpstream(upstream)`
|
||||||
|
|
||||||
|
Removes an upstream that was previously addded.
|
||||||
|
|
||||||
|
### `BalancedPool.close([callback])`
|
||||||
|
|
||||||
|
Implements [`Dispatcher.close([callback])`](Dispatcher.md#dispatcherclosecallback-promise).
|
||||||
|
|
||||||
|
### `BalancedPool.destroy([error, callback])`
|
||||||
|
|
||||||
|
Implements [`Dispatcher.destroy([error, callback])`](Dispatcher.md#dispatcherdestroyerror-callback-promise).
|
||||||
|
|
||||||
|
### `BalancedPool.connect(options[, callback])`
|
||||||
|
|
||||||
|
See [`Dispatcher.connect(options[, callback])`](Dispatcher.md#dispatcherconnectoptions-callback).
|
||||||
|
|
||||||
|
### `BalancedPool.dispatch(options, handlers)`
|
||||||
|
|
||||||
|
Implements [`Dispatcher.dispatch(options, handlers)`](Dispatcher.md#dispatcherdispatchoptions-handler).
|
||||||
|
|
||||||
|
### `BalancedPool.pipeline(options, handler)`
|
||||||
|
|
||||||
|
See [`Dispatcher.pipeline(options, handler)`](Dispatcher.md#dispatcherpipelineoptions-handler).
|
||||||
|
|
||||||
|
### `BalancedPool.request(options[, callback])`
|
||||||
|
|
||||||
|
See [`Dispatcher.request(options [, callback])`](Dispatcher.md#dispatcherrequestoptions-callback).
|
||||||
|
|
||||||
|
### `BalancedPool.stream(options, factory[, callback])`
|
||||||
|
|
||||||
|
See [`Dispatcher.stream(options, factory[, callback])`](Dispatcher.md#dispatcherstreamoptions-factory-callback).
|
||||||
|
|
||||||
|
### `BalancedPool.upgrade(options[, callback])`
|
||||||
|
|
||||||
|
See [`Dispatcher.upgrade(options[, callback])`](Dispatcher.md#dispatcherupgradeoptions-callback).
|
||||||
|
|
||||||
|
## Instance Events
|
||||||
|
|
||||||
|
### Event: `'connect'`
|
||||||
|
|
||||||
|
See [Dispatcher Event: `'connect'`](Dispatcher.md#event-connect).
|
||||||
|
|
||||||
|
### Event: `'disconnect'`
|
||||||
|
|
||||||
|
See [Dispatcher Event: `'disconnect'`](Dispatcher.md#event-disconnect).
|
||||||
|
|
||||||
|
### Event: `'drain'`
|
||||||
|
|
||||||
|
See [Dispatcher Event: `'drain'`](Dispatcher.md#event-drain).
|
30
node_modules/undici/docs/api/CacheStorage.md
generated
vendored
Normal file
30
node_modules/undici/docs/api/CacheStorage.md
generated
vendored
Normal file
|
@ -0,0 +1,30 @@
|
||||||
|
# CacheStorage
|
||||||
|
|
||||||
|
Undici exposes a W3C spec-compliant implementation of [CacheStorage](https://developer.mozilla.org/en-US/docs/Web/API/CacheStorage) and [Cache](https://developer.mozilla.org/en-US/docs/Web/API/Cache).
|
||||||
|
|
||||||
|
## Opening a Cache
|
||||||
|
|
||||||
|
Undici exports a top-level CacheStorage instance. You can open a new Cache, or duplicate a Cache with an existing name, by using `CacheStorage.prototype.open`. If you open a Cache with the same name as an already-existing Cache, its list of cached Responses will be shared between both instances.
|
||||||
|
|
||||||
|
```mjs
|
||||||
|
import { caches } from 'undici'
|
||||||
|
|
||||||
|
const cache_1 = await caches.open('v1')
|
||||||
|
const cache_2 = await caches.open('v1')
|
||||||
|
|
||||||
|
// Although .open() creates a new instance,
|
||||||
|
assert(cache_1 !== cache_2)
|
||||||
|
// The same Response is matched in both.
|
||||||
|
assert.deepStrictEqual(await cache_1.match('/req'), await cache_2.match('/req'))
|
||||||
|
```
|
||||||
|
|
||||||
|
## Deleting a Cache
|
||||||
|
|
||||||
|
If a Cache is deleted, the cached Responses/Requests can still be used.
|
||||||
|
|
||||||
|
```mjs
|
||||||
|
const response = await cache_1.match('/req')
|
||||||
|
await caches.delete('v1')
|
||||||
|
|
||||||
|
await response.text() // the Response's body
|
||||||
|
```
|
273
node_modules/undici/docs/api/Client.md
generated
vendored
Normal file
273
node_modules/undici/docs/api/Client.md
generated
vendored
Normal file
|
@ -0,0 +1,273 @@
|
||||||
|
# Class: Client
|
||||||
|
|
||||||
|
Extends: `undici.Dispatcher`
|
||||||
|
|
||||||
|
A basic HTTP/1.1 client, mapped on top a single TCP/TLS connection. Pipelining is disabled by default.
|
||||||
|
|
||||||
|
Requests are not guaranteed to be dispatched in order of invocation.
|
||||||
|
|
||||||
|
## `new Client(url[, options])`
|
||||||
|
|
||||||
|
Arguments:
|
||||||
|
|
||||||
|
* **url** `URL | string` - Should only include the **protocol, hostname, and port**.
|
||||||
|
* **options** `ClientOptions` (optional)
|
||||||
|
|
||||||
|
Returns: `Client`
|
||||||
|
|
||||||
|
### Parameter: `ClientOptions`
|
||||||
|
|
||||||
|
> ⚠️ Warning: The `H2` support is experimental.
|
||||||
|
|
||||||
|
* **bodyTimeout** `number | null` (optional) - Default: `300e3` - The timeout after which a request will time out, in milliseconds. Monitors time between receiving body data. Use `0` to disable it entirely. Defaults to 300 seconds.
|
||||||
|
* **headersTimeout** `number | null` (optional) - Default: `300e3` - The amount of time, in milliseconds, the parser will wait to receive the complete HTTP headers while not sending the request. Defaults to 300 seconds.
|
||||||
|
* **keepAliveMaxTimeout** `number | null` (optional) - Default: `600e3` - The maximum allowed `keepAliveTimeout`, in milliseconds, when overridden by *keep-alive* hints from the server. Defaults to 10 minutes.
|
||||||
|
* **keepAliveTimeout** `number | null` (optional) - Default: `4e3` - The timeout, in milliseconds, after which a socket without active requests will time out. Monitors time between activity on a connected socket. This value may be overridden by *keep-alive* hints from the server. See [MDN: HTTP - Headers - Keep-Alive directives](https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Keep-Alive#directives) for more details. Defaults to 4 seconds.
|
||||||
|
* **keepAliveTimeoutThreshold** `number | null` (optional) - Default: `1e3` - A number of milliseconds subtracted from server *keep-alive* hints when overriding `keepAliveTimeout` to account for timing inaccuracies caused by e.g. transport latency. Defaults to 1 second.
|
||||||
|
* **maxHeaderSize** `number | null` (optional) - Default: `--max-http-header-size` or `16384` - The maximum length of request headers in bytes. Defaults to Node.js' --max-http-header-size or 16KiB.
|
||||||
|
* **maxResponseSize** `number | null` (optional) - Default: `-1` - The maximum length of response body in bytes. Set to `-1` to disable.
|
||||||
|
* **pipelining** `number | null` (optional) - Default: `1` - The amount of concurrent requests to be sent over the single TCP/TLS connection according to [RFC7230](https://tools.ietf.org/html/rfc7230#section-6.3.2). Carefully consider your workload and environment before enabling concurrent requests as pipelining may reduce performance if used incorrectly. Pipelining is sensitive to network stack settings as well as head of line blocking caused by e.g. long running requests. Set to `0` to disable keep-alive connections.
|
||||||
|
* **connect** `ConnectOptions | Function | null` (optional) - Default: `null`.
|
||||||
|
* **strictContentLength** `Boolean` (optional) - Default: `true` - Whether to treat request content length mismatches as errors. If true, an error is thrown when the request content-length header doesn't match the length of the request body.
|
||||||
|
* **interceptors** `{ Client: DispatchInterceptor[] }` - Default: `[RedirectInterceptor]` - A list of interceptors that are applied to the dispatch method. Additional logic can be applied (such as, but not limited to: 302 status code handling, authentication, cookies, compression and caching). Note that the behavior of interceptors is Experimental and might change at any given time.
|
||||||
|
* **autoSelectFamily**: `boolean` (optional) - Default: depends on local Node version, on Node 18.13.0 and above is `false`. Enables a family autodetection algorithm that loosely implements section 5 of [RFC 8305](https://tools.ietf.org/html/rfc8305#section-5). See [here](https://nodejs.org/api/net.html#socketconnectoptions-connectlistener) for more details. This option is ignored if not supported by the current Node version.
|
||||||
|
* **autoSelectFamilyAttemptTimeout**: `number` - Default: depends on local Node version, on Node 18.13.0 and above is `250`. The amount of time in milliseconds to wait for a connection attempt to finish before trying the next address when using the `autoSelectFamily` option. See [here](https://nodejs.org/api/net.html#socketconnectoptions-connectlistener) for more details.
|
||||||
|
* **allowH2**: `boolean` - Default: `false`. Enables support for H2 if the server has assigned bigger priority to it through ALPN negotiation.
|
||||||
|
* **maxConcurrentStreams**: `number` - Default: `100`. Dictates the maximum number of concurrent streams for a single H2 session. It can be overridden by a SETTINGS remote frame.
|
||||||
|
|
||||||
|
#### Parameter: `ConnectOptions`
|
||||||
|
|
||||||
|
Every Tls option, see [here](https://nodejs.org/api/tls.html#tls_tls_connect_options_callback).
|
||||||
|
Furthermore, the following options can be passed:
|
||||||
|
|
||||||
|
* **socketPath** `string | null` (optional) - Default: `null` - An IPC endpoint, either Unix domain socket or Windows named pipe.
|
||||||
|
* **maxCachedSessions** `number | null` (optional) - Default: `100` - Maximum number of TLS cached sessions. Use 0 to disable TLS session caching. Default: 100.
|
||||||
|
* **timeout** `number | null` (optional) - In milliseconds, Default `10e3`.
|
||||||
|
* **servername** `string | null` (optional)
|
||||||
|
* **keepAlive** `boolean | null` (optional) - Default: `true` - TCP keep-alive enabled
|
||||||
|
* **keepAliveInitialDelay** `number | null` (optional) - Default: `60000` - TCP keep-alive interval for the socket in milliseconds
|
||||||
|
|
||||||
|
### Example - Basic Client instantiation
|
||||||
|
|
||||||
|
This will instantiate the undici Client, but it will not connect to the origin until something is queued. Consider using `client.connect` to prematurely connect to the origin, or just call `client.request`.
|
||||||
|
|
||||||
|
```js
|
||||||
|
'use strict'
|
||||||
|
import { Client } from 'undici'
|
||||||
|
|
||||||
|
const client = new Client('http://localhost:3000')
|
||||||
|
```
|
||||||
|
|
||||||
|
### Example - Custom connector
|
||||||
|
|
||||||
|
This will allow you to perform some additional check on the socket that will be used for the next request.
|
||||||
|
|
||||||
|
```js
|
||||||
|
'use strict'
|
||||||
|
import { Client, buildConnector } from 'undici'
|
||||||
|
|
||||||
|
const connector = buildConnector({ rejectUnauthorized: false })
|
||||||
|
const client = new Client('https://localhost:3000', {
|
||||||
|
connect (opts, cb) {
|
||||||
|
connector(opts, (err, socket) => {
|
||||||
|
if (err) {
|
||||||
|
cb(err)
|
||||||
|
} else if (/* assertion */) {
|
||||||
|
socket.destroy()
|
||||||
|
cb(new Error('kaboom'))
|
||||||
|
} else {
|
||||||
|
cb(null, socket)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
})
|
||||||
|
```
|
||||||
|
|
||||||
|
## Instance Methods
|
||||||
|
|
||||||
|
### `Client.close([callback])`
|
||||||
|
|
||||||
|
Implements [`Dispatcher.close([callback])`](Dispatcher.md#dispatcherclosecallback-promise).
|
||||||
|
|
||||||
|
### `Client.destroy([error, callback])`
|
||||||
|
|
||||||
|
Implements [`Dispatcher.destroy([error, callback])`](Dispatcher.md#dispatcherdestroyerror-callback-promise).
|
||||||
|
|
||||||
|
Waits until socket is closed before invoking the callback (or returning a promise if no callback is provided).
|
||||||
|
|
||||||
|
### `Client.connect(options[, callback])`
|
||||||
|
|
||||||
|
See [`Dispatcher.connect(options[, callback])`](Dispatcher.md#dispatcherconnectoptions-callback).
|
||||||
|
|
||||||
|
### `Client.dispatch(options, handlers)`
|
||||||
|
|
||||||
|
Implements [`Dispatcher.dispatch(options, handlers)`](Dispatcher.md#dispatcherdispatchoptions-handler).
|
||||||
|
|
||||||
|
### `Client.pipeline(options, handler)`
|
||||||
|
|
||||||
|
See [`Dispatcher.pipeline(options, handler)`](Dispatcher.md#dispatcherpipelineoptions-handler).
|
||||||
|
|
||||||
|
### `Client.request(options[, callback])`
|
||||||
|
|
||||||
|
See [`Dispatcher.request(options [, callback])`](Dispatcher.md#dispatcherrequestoptions-callback).
|
||||||
|
|
||||||
|
### `Client.stream(options, factory[, callback])`
|
||||||
|
|
||||||
|
See [`Dispatcher.stream(options, factory[, callback])`](Dispatcher.md#dispatcherstreamoptions-factory-callback).
|
||||||
|
|
||||||
|
### `Client.upgrade(options[, callback])`
|
||||||
|
|
||||||
|
See [`Dispatcher.upgrade(options[, callback])`](Dispatcher.md#dispatcherupgradeoptions-callback).
|
||||||
|
|
||||||
|
## Instance Properties
|
||||||
|
|
||||||
|
### `Client.closed`
|
||||||
|
|
||||||
|
* `boolean`
|
||||||
|
|
||||||
|
`true` after `client.close()` has been called.
|
||||||
|
|
||||||
|
### `Client.destroyed`
|
||||||
|
|
||||||
|
* `boolean`
|
||||||
|
|
||||||
|
`true` after `client.destroyed()` has been called or `client.close()` has been called and the client shutdown has completed.
|
||||||
|
|
||||||
|
### `Client.pipelining`
|
||||||
|
|
||||||
|
* `number`
|
||||||
|
|
||||||
|
Property to get and set the pipelining factor.
|
||||||
|
|
||||||
|
## Instance Events
|
||||||
|
|
||||||
|
### Event: `'connect'`
|
||||||
|
|
||||||
|
See [Dispatcher Event: `'connect'`](Dispatcher.md#event-connect).
|
||||||
|
|
||||||
|
Parameters:
|
||||||
|
|
||||||
|
* **origin** `URL`
|
||||||
|
* **targets** `Array<Dispatcher>`
|
||||||
|
|
||||||
|
Emitted when a socket has been created and connected. The client will connect once `client.size > 0`.
|
||||||
|
|
||||||
|
#### Example - Client connect event
|
||||||
|
|
||||||
|
```js
|
||||||
|
import { createServer } from 'http'
|
||||||
|
import { Client } from 'undici'
|
||||||
|
import { once } from 'events'
|
||||||
|
|
||||||
|
const server = createServer((request, response) => {
|
||||||
|
response.end('Hello, World!')
|
||||||
|
}).listen()
|
||||||
|
|
||||||
|
await once(server, 'listening')
|
||||||
|
|
||||||
|
const client = new Client(`http://localhost:${server.address().port}`)
|
||||||
|
|
||||||
|
client.on('connect', (origin) => {
|
||||||
|
console.log(`Connected to ${origin}`) // should print before the request body statement
|
||||||
|
})
|
||||||
|
|
||||||
|
try {
|
||||||
|
const { body } = await client.request({
|
||||||
|
path: '/',
|
||||||
|
method: 'GET'
|
||||||
|
})
|
||||||
|
body.setEncoding('utf-8')
|
||||||
|
body.on('data', console.log)
|
||||||
|
client.close()
|
||||||
|
server.close()
|
||||||
|
} catch (error) {
|
||||||
|
console.error(error)
|
||||||
|
client.close()
|
||||||
|
server.close()
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### Event: `'disconnect'`
|
||||||
|
|
||||||
|
See [Dispatcher Event: `'disconnect'`](Dispatcher.md#event-disconnect).
|
||||||
|
|
||||||
|
Parameters:
|
||||||
|
|
||||||
|
* **origin** `URL`
|
||||||
|
* **targets** `Array<Dispatcher>`
|
||||||
|
* **error** `Error`
|
||||||
|
|
||||||
|
Emitted when socket has disconnected. The error argument of the event is the error which caused the socket to disconnect. The client will reconnect if or once `client.size > 0`.
|
||||||
|
|
||||||
|
#### Example - Client disconnect event
|
||||||
|
|
||||||
|
```js
|
||||||
|
import { createServer } from 'http'
|
||||||
|
import { Client } from 'undici'
|
||||||
|
import { once } from 'events'
|
||||||
|
|
||||||
|
const server = createServer((request, response) => {
|
||||||
|
response.destroy()
|
||||||
|
}).listen()
|
||||||
|
|
||||||
|
await once(server, 'listening')
|
||||||
|
|
||||||
|
const client = new Client(`http://localhost:${server.address().port}`)
|
||||||
|
|
||||||
|
client.on('disconnect', (origin) => {
|
||||||
|
console.log(`Disconnected from ${origin}`)
|
||||||
|
})
|
||||||
|
|
||||||
|
try {
|
||||||
|
await client.request({
|
||||||
|
path: '/',
|
||||||
|
method: 'GET'
|
||||||
|
})
|
||||||
|
} catch (error) {
|
||||||
|
console.error(error.message)
|
||||||
|
client.close()
|
||||||
|
server.close()
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### Event: `'drain'`
|
||||||
|
|
||||||
|
Emitted when pipeline is no longer busy.
|
||||||
|
|
||||||
|
See [Dispatcher Event: `'drain'`](Dispatcher.md#event-drain).
|
||||||
|
|
||||||
|
#### Example - Client drain event
|
||||||
|
|
||||||
|
```js
|
||||||
|
import { createServer } from 'http'
|
||||||
|
import { Client } from 'undici'
|
||||||
|
import { once } from 'events'
|
||||||
|
|
||||||
|
const server = createServer((request, response) => {
|
||||||
|
response.end('Hello, World!')
|
||||||
|
}).listen()
|
||||||
|
|
||||||
|
await once(server, 'listening')
|
||||||
|
|
||||||
|
const client = new Client(`http://localhost:${server.address().port}`)
|
||||||
|
|
||||||
|
client.on('drain', () => {
|
||||||
|
console.log('drain event')
|
||||||
|
client.close()
|
||||||
|
server.close()
|
||||||
|
})
|
||||||
|
|
||||||
|
const requests = [
|
||||||
|
client.request({ path: '/', method: 'GET' }),
|
||||||
|
client.request({ path: '/', method: 'GET' }),
|
||||||
|
client.request({ path: '/', method: 'GET' })
|
||||||
|
]
|
||||||
|
|
||||||
|
await Promise.all(requests)
|
||||||
|
|
||||||
|
console.log('requests completed')
|
||||||
|
```
|
||||||
|
|
||||||
|
### Event: `'error'`
|
||||||
|
|
||||||
|
Invoked for users errors such as throwing in the `onError` handler.
|
115
node_modules/undici/docs/api/Connector.md
generated
vendored
Normal file
115
node_modules/undici/docs/api/Connector.md
generated
vendored
Normal file
|
@ -0,0 +1,115 @@
|
||||||
|
# Connector
|
||||||
|
|
||||||
|
Undici creates the underlying socket via the connector builder.
|
||||||
|
Normally, this happens automatically and you don't need to care about this,
|
||||||
|
but if you need to perform some additional check over the currently used socket,
|
||||||
|
this is the right place.
|
||||||
|
|
||||||
|
If you want to create a custom connector, you must import the `buildConnector` utility.
|
||||||
|
|
||||||
|
#### Parameter: `buildConnector.BuildOptions`
|
||||||
|
|
||||||
|
Every Tls option, see [here](https://nodejs.org/api/tls.html#tls_tls_connect_options_callback).
|
||||||
|
Furthermore, the following options can be passed:
|
||||||
|
|
||||||
|
* **socketPath** `string | null` (optional) - Default: `null` - An IPC endpoint, either Unix domain socket or Windows named pipe.
|
||||||
|
* **maxCachedSessions** `number | null` (optional) - Default: `100` - Maximum number of TLS cached sessions. Use 0 to disable TLS session caching. Default: `100`.
|
||||||
|
* **timeout** `number | null` (optional) - In milliseconds. Default `10e3`.
|
||||||
|
* **servername** `string | null` (optional)
|
||||||
|
|
||||||
|
Once you call `buildConnector`, it will return a connector function, which takes the following parameters.
|
||||||
|
|
||||||
|
#### Parameter: `connector.Options`
|
||||||
|
|
||||||
|
* **hostname** `string` (required)
|
||||||
|
* **host** `string` (optional)
|
||||||
|
* **protocol** `string` (required)
|
||||||
|
* **port** `string` (required)
|
||||||
|
* **servername** `string` (optional)
|
||||||
|
* **localAddress** `string | null` (optional) Local address the socket should connect from.
|
||||||
|
* **httpSocket** `Socket` (optional) Establish secure connection on a given socket rather than creating a new socket. It can only be sent on TLS update.
|
||||||
|
|
||||||
|
### Basic example
|
||||||
|
|
||||||
|
```js
|
||||||
|
'use strict'
|
||||||
|
|
||||||
|
import { Client, buildConnector } from 'undici'
|
||||||
|
|
||||||
|
const connector = buildConnector({ rejectUnauthorized: false })
|
||||||
|
const client = new Client('https://localhost:3000', {
|
||||||
|
connect (opts, cb) {
|
||||||
|
connector(opts, (err, socket) => {
|
||||||
|
if (err) {
|
||||||
|
cb(err)
|
||||||
|
} else if (/* assertion */) {
|
||||||
|
socket.destroy()
|
||||||
|
cb(new Error('kaboom'))
|
||||||
|
} else {
|
||||||
|
cb(null, socket)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
})
|
||||||
|
```
|
||||||
|
|
||||||
|
### Example: validate the CA fingerprint
|
||||||
|
|
||||||
|
```js
|
||||||
|
'use strict'
|
||||||
|
|
||||||
|
import { Client, buildConnector } from 'undici'
|
||||||
|
|
||||||
|
const caFingerprint = 'FO:OB:AR'
|
||||||
|
const connector = buildConnector({ rejectUnauthorized: false })
|
||||||
|
const client = new Client('https://localhost:3000', {
|
||||||
|
connect (opts, cb) {
|
||||||
|
connector(opts, (err, socket) => {
|
||||||
|
if (err) {
|
||||||
|
cb(err)
|
||||||
|
} else if (getIssuerCertificate(socket).fingerprint256 !== caFingerprint) {
|
||||||
|
socket.destroy()
|
||||||
|
cb(new Error('Fingerprint does not match or malformed certificate'))
|
||||||
|
} else {
|
||||||
|
cb(null, socket)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
client.request({
|
||||||
|
path: '/',
|
||||||
|
method: 'GET'
|
||||||
|
}, (err, data) => {
|
||||||
|
if (err) throw err
|
||||||
|
|
||||||
|
const bufs = []
|
||||||
|
data.body.on('data', (buf) => {
|
||||||
|
bufs.push(buf)
|
||||||
|
})
|
||||||
|
data.body.on('end', () => {
|
||||||
|
console.log(Buffer.concat(bufs).toString('utf8'))
|
||||||
|
client.close()
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
function getIssuerCertificate (socket) {
|
||||||
|
let certificate = socket.getPeerCertificate(true)
|
||||||
|
while (certificate && Object.keys(certificate).length > 0) {
|
||||||
|
// invalid certificate
|
||||||
|
if (certificate.issuerCertificate == null) {
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
|
||||||
|
// We have reached the root certificate.
|
||||||
|
// In case of self-signed certificates, `issuerCertificate` may be a circular reference.
|
||||||
|
if (certificate.fingerprint256 === certificate.issuerCertificate.fingerprint256) {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
|
||||||
|
// continue the loop
|
||||||
|
certificate = certificate.issuerCertificate
|
||||||
|
}
|
||||||
|
return certificate
|
||||||
|
}
|
||||||
|
```
|
57
node_modules/undici/docs/api/ContentType.md
generated
vendored
Normal file
57
node_modules/undici/docs/api/ContentType.md
generated
vendored
Normal file
|
@ -0,0 +1,57 @@
|
||||||
|
# MIME Type Parsing
|
||||||
|
|
||||||
|
## `MIMEType` interface
|
||||||
|
|
||||||
|
* **type** `string`
|
||||||
|
* **subtype** `string`
|
||||||
|
* **parameters** `Map<string, string>`
|
||||||
|
* **essence** `string`
|
||||||
|
|
||||||
|
## `parseMIMEType(input)`
|
||||||
|
|
||||||
|
Implements [parse a MIME type](https://mimesniff.spec.whatwg.org/#parse-a-mime-type).
|
||||||
|
|
||||||
|
Parses a MIME type, returning its type, subtype, and any associated parameters. If the parser can't parse an input it returns the string literal `'failure'`.
|
||||||
|
|
||||||
|
```js
|
||||||
|
import { parseMIMEType } from 'undici'
|
||||||
|
|
||||||
|
parseMIMEType('text/html; charset=gbk')
|
||||||
|
// {
|
||||||
|
// type: 'text',
|
||||||
|
// subtype: 'html',
|
||||||
|
// parameters: Map(1) { 'charset' => 'gbk' },
|
||||||
|
// essence: 'text/html'
|
||||||
|
// }
|
||||||
|
```
|
||||||
|
|
||||||
|
Arguments:
|
||||||
|
|
||||||
|
* **input** `string`
|
||||||
|
|
||||||
|
Returns: `MIMEType|'failure'`
|
||||||
|
|
||||||
|
## `serializeAMimeType(input)`
|
||||||
|
|
||||||
|
Implements [serialize a MIME type](https://mimesniff.spec.whatwg.org/#serialize-a-mime-type).
|
||||||
|
|
||||||
|
Serializes a MIMEType object.
|
||||||
|
|
||||||
|
```js
|
||||||
|
import { serializeAMimeType } from 'undici'
|
||||||
|
|
||||||
|
serializeAMimeType({
|
||||||
|
type: 'text',
|
||||||
|
subtype: 'html',
|
||||||
|
parameters: new Map([['charset', 'gbk']]),
|
||||||
|
essence: 'text/html'
|
||||||
|
})
|
||||||
|
// text/html;charset=gbk
|
||||||
|
|
||||||
|
```
|
||||||
|
|
||||||
|
Arguments:
|
||||||
|
|
||||||
|
* **mimeType** `MIMEType`
|
||||||
|
|
||||||
|
Returns: `string`
|
101
node_modules/undici/docs/api/Cookies.md
generated
vendored
Normal file
101
node_modules/undici/docs/api/Cookies.md
generated
vendored
Normal file
|
@ -0,0 +1,101 @@
|
||||||
|
# Cookie Handling
|
||||||
|
|
||||||
|
## `Cookie` interface
|
||||||
|
|
||||||
|
* **name** `string`
|
||||||
|
* **value** `string`
|
||||||
|
* **expires** `Date|number` (optional)
|
||||||
|
* **maxAge** `number` (optional)
|
||||||
|
* **domain** `string` (optional)
|
||||||
|
* **path** `string` (optional)
|
||||||
|
* **secure** `boolean` (optional)
|
||||||
|
* **httpOnly** `boolean` (optional)
|
||||||
|
* **sameSite** `'String'|'Lax'|'None'` (optional)
|
||||||
|
* **unparsed** `string[]` (optional) Left over attributes that weren't parsed.
|
||||||
|
|
||||||
|
## `deleteCookie(headers, name[, attributes])`
|
||||||
|
|
||||||
|
Sets the expiry time of the cookie to the unix epoch, causing browsers to delete it when received.
|
||||||
|
|
||||||
|
```js
|
||||||
|
import { deleteCookie, Headers } from 'undici'
|
||||||
|
|
||||||
|
const headers = new Headers()
|
||||||
|
deleteCookie(headers, 'name')
|
||||||
|
|
||||||
|
console.log(headers.get('set-cookie')) // name=; Expires=Thu, 01 Jan 1970 00:00:00 GMT
|
||||||
|
```
|
||||||
|
|
||||||
|
Arguments:
|
||||||
|
|
||||||
|
* **headers** `Headers`
|
||||||
|
* **name** `string`
|
||||||
|
* **attributes** `{ path?: string, domain?: string }` (optional)
|
||||||
|
|
||||||
|
Returns: `void`
|
||||||
|
|
||||||
|
## `getCookies(headers)`
|
||||||
|
|
||||||
|
Parses the `Cookie` header and returns a list of attributes and values.
|
||||||
|
|
||||||
|
```js
|
||||||
|
import { getCookies, Headers } from 'undici'
|
||||||
|
|
||||||
|
const headers = new Headers({
|
||||||
|
cookie: 'get=cookies; and=attributes'
|
||||||
|
})
|
||||||
|
|
||||||
|
console.log(getCookies(headers)) // { get: 'cookies', and: 'attributes' }
|
||||||
|
```
|
||||||
|
|
||||||
|
Arguments:
|
||||||
|
|
||||||
|
* **headers** `Headers`
|
||||||
|
|
||||||
|
Returns: `Record<string, string>`
|
||||||
|
|
||||||
|
## `getSetCookies(headers)`
|
||||||
|
|
||||||
|
Parses all `Set-Cookie` headers.
|
||||||
|
|
||||||
|
```js
|
||||||
|
import { getSetCookies, Headers } from 'undici'
|
||||||
|
|
||||||
|
const headers = new Headers({ 'set-cookie': 'undici=getSetCookies; Secure' })
|
||||||
|
|
||||||
|
console.log(getSetCookies(headers))
|
||||||
|
// [
|
||||||
|
// {
|
||||||
|
// name: 'undici',
|
||||||
|
// value: 'getSetCookies',
|
||||||
|
// secure: true
|
||||||
|
// }
|
||||||
|
// ]
|
||||||
|
|
||||||
|
```
|
||||||
|
|
||||||
|
Arguments:
|
||||||
|
|
||||||
|
* **headers** `Headers`
|
||||||
|
|
||||||
|
Returns: `Cookie[]`
|
||||||
|
|
||||||
|
## `setCookie(headers, cookie)`
|
||||||
|
|
||||||
|
Appends a cookie to the `Set-Cookie` header.
|
||||||
|
|
||||||
|
```js
|
||||||
|
import { setCookie, Headers } from 'undici'
|
||||||
|
|
||||||
|
const headers = new Headers()
|
||||||
|
setCookie(headers, { name: 'undici', value: 'setCookie' })
|
||||||
|
|
||||||
|
console.log(headers.get('Set-Cookie')) // undici=setCookie
|
||||||
|
```
|
||||||
|
|
||||||
|
Arguments:
|
||||||
|
|
||||||
|
* **headers** `Headers`
|
||||||
|
* **cookie** `Cookie`
|
||||||
|
|
||||||
|
Returns: `void`
|
204
node_modules/undici/docs/api/DiagnosticsChannel.md
generated
vendored
Normal file
204
node_modules/undici/docs/api/DiagnosticsChannel.md
generated
vendored
Normal file
|
@ -0,0 +1,204 @@
|
||||||
|
# Diagnostics Channel Support
|
||||||
|
|
||||||
|
Stability: Experimental.
|
||||||
|
|
||||||
|
Undici supports the [`diagnostics_channel`](https://nodejs.org/api/diagnostics_channel.html) (currently available only on Node.js v16+).
|
||||||
|
It is the preferred way to instrument Undici and retrieve internal information.
|
||||||
|
|
||||||
|
The channels available are the following.
|
||||||
|
|
||||||
|
## `undici:request:create`
|
||||||
|
|
||||||
|
This message is published when a new outgoing request is created.
|
||||||
|
|
||||||
|
```js
|
||||||
|
import diagnosticsChannel from 'diagnostics_channel'
|
||||||
|
|
||||||
|
diagnosticsChannel.channel('undici:request:create').subscribe(({ request }) => {
|
||||||
|
console.log('origin', request.origin)
|
||||||
|
console.log('completed', request.completed)
|
||||||
|
console.log('method', request.method)
|
||||||
|
console.log('path', request.path)
|
||||||
|
console.log('headers') // raw text, e.g: 'bar: bar\r\n'
|
||||||
|
request.addHeader('hello', 'world')
|
||||||
|
console.log('headers', request.headers) // e.g. 'bar: bar\r\nhello: world\r\n'
|
||||||
|
})
|
||||||
|
```
|
||||||
|
|
||||||
|
Note: a request is only loosely completed to a given socket.
|
||||||
|
|
||||||
|
|
||||||
|
## `undici:request:bodySent`
|
||||||
|
|
||||||
|
```js
|
||||||
|
import diagnosticsChannel from 'diagnostics_channel'
|
||||||
|
|
||||||
|
diagnosticsChannel.channel('undici:request:bodySent').subscribe(({ request }) => {
|
||||||
|
// request is the same object undici:request:create
|
||||||
|
})
|
||||||
|
```
|
||||||
|
|
||||||
|
## `undici:request:headers`
|
||||||
|
|
||||||
|
This message is published after the response headers have been received, i.e. the response has been completed.
|
||||||
|
|
||||||
|
```js
|
||||||
|
import diagnosticsChannel from 'diagnostics_channel'
|
||||||
|
|
||||||
|
diagnosticsChannel.channel('undici:request:headers').subscribe(({ request, response }) => {
|
||||||
|
// request is the same object undici:request:create
|
||||||
|
console.log('statusCode', response.statusCode)
|
||||||
|
console.log(response.statusText)
|
||||||
|
// response.headers are buffers.
|
||||||
|
console.log(response.headers.map((x) => x.toString()))
|
||||||
|
})
|
||||||
|
```
|
||||||
|
|
||||||
|
## `undici:request:trailers`
|
||||||
|
|
||||||
|
This message is published after the response body and trailers have been received, i.e. the response has been completed.
|
||||||
|
|
||||||
|
```js
|
||||||
|
import diagnosticsChannel from 'diagnostics_channel'
|
||||||
|
|
||||||
|
diagnosticsChannel.channel('undici:request:trailers').subscribe(({ request, trailers }) => {
|
||||||
|
// request is the same object undici:request:create
|
||||||
|
console.log('completed', request.completed)
|
||||||
|
// trailers are buffers.
|
||||||
|
console.log(trailers.map((x) => x.toString()))
|
||||||
|
})
|
||||||
|
```
|
||||||
|
|
||||||
|
## `undici:request:error`
|
||||||
|
|
||||||
|
This message is published if the request is going to error, but it has not errored yet.
|
||||||
|
|
||||||
|
```js
|
||||||
|
import diagnosticsChannel from 'diagnostics_channel'
|
||||||
|
|
||||||
|
diagnosticsChannel.channel('undici:request:error').subscribe(({ request, error }) => {
|
||||||
|
// request is the same object undici:request:create
|
||||||
|
})
|
||||||
|
```
|
||||||
|
|
||||||
|
## `undici:client:sendHeaders`
|
||||||
|
|
||||||
|
This message is published right before the first byte of the request is written to the socket.
|
||||||
|
|
||||||
|
*Note*: It will publish the exact headers that will be sent to the server in raw format.
|
||||||
|
|
||||||
|
```js
|
||||||
|
import diagnosticsChannel from 'diagnostics_channel'
|
||||||
|
|
||||||
|
diagnosticsChannel.channel('undici:client:sendHeaders').subscribe(({ request, headers, socket }) => {
|
||||||
|
// request is the same object undici:request:create
|
||||||
|
console.log(`Full headers list ${headers.split('\r\n')}`);
|
||||||
|
})
|
||||||
|
```
|
||||||
|
|
||||||
|
## `undici:client:beforeConnect`
|
||||||
|
|
||||||
|
This message is published before creating a new connection for **any** request.
|
||||||
|
You can not assume that this event is related to any specific request.
|
||||||
|
|
||||||
|
```js
|
||||||
|
import diagnosticsChannel from 'diagnostics_channel'
|
||||||
|
|
||||||
|
diagnosticsChannel.channel('undici:client:beforeConnect').subscribe(({ connectParams, connector }) => {
|
||||||
|
// const { host, hostname, protocol, port, servername } = connectParams
|
||||||
|
// connector is a function that creates the socket
|
||||||
|
})
|
||||||
|
```
|
||||||
|
|
||||||
|
## `undici:client:connected`
|
||||||
|
|
||||||
|
This message is published after a connection is established.
|
||||||
|
|
||||||
|
```js
|
||||||
|
import diagnosticsChannel from 'diagnostics_channel'
|
||||||
|
|
||||||
|
diagnosticsChannel.channel('undici:client:connected').subscribe(({ socket, connectParams, connector }) => {
|
||||||
|
// const { host, hostname, protocol, port, servername } = connectParams
|
||||||
|
// connector is a function that creates the socket
|
||||||
|
})
|
||||||
|
```
|
||||||
|
|
||||||
|
## `undici:client:connectError`
|
||||||
|
|
||||||
|
This message is published if it did not succeed to create new connection
|
||||||
|
|
||||||
|
```js
|
||||||
|
import diagnosticsChannel from 'diagnostics_channel'
|
||||||
|
|
||||||
|
diagnosticsChannel.channel('undici:client:connectError').subscribe(({ error, socket, connectParams, connector }) => {
|
||||||
|
// const { host, hostname, protocol, port, servername } = connectParams
|
||||||
|
// connector is a function that creates the socket
|
||||||
|
console.log(`Connect failed with ${error.message}`)
|
||||||
|
})
|
||||||
|
```
|
||||||
|
|
||||||
|
## `undici:websocket:open`
|
||||||
|
|
||||||
|
This message is published after the client has successfully connected to a server.
|
||||||
|
|
||||||
|
```js
|
||||||
|
import diagnosticsChannel from 'diagnostics_channel'
|
||||||
|
|
||||||
|
diagnosticsChannel.channel('undici:websocket:open').subscribe(({ address, protocol, extensions }) => {
|
||||||
|
console.log(address) // address, family, and port
|
||||||
|
console.log(protocol) // negotiated subprotocols
|
||||||
|
console.log(extensions) // negotiated extensions
|
||||||
|
})
|
||||||
|
```
|
||||||
|
|
||||||
|
## `undici:websocket:close`
|
||||||
|
|
||||||
|
This message is published after the connection has closed.
|
||||||
|
|
||||||
|
```js
|
||||||
|
import diagnosticsChannel from 'diagnostics_channel'
|
||||||
|
|
||||||
|
diagnosticsChannel.channel('undici:websocket:close').subscribe(({ websocket, code, reason }) => {
|
||||||
|
console.log(websocket) // the WebSocket object
|
||||||
|
console.log(code) // the closing status code
|
||||||
|
console.log(reason) // the closing reason
|
||||||
|
})
|
||||||
|
```
|
||||||
|
|
||||||
|
## `undici:websocket:socket_error`
|
||||||
|
|
||||||
|
This message is published if the socket experiences an error.
|
||||||
|
|
||||||
|
```js
|
||||||
|
import diagnosticsChannel from 'diagnostics_channel'
|
||||||
|
|
||||||
|
diagnosticsChannel.channel('undici:websocket:socket_error').subscribe((error) => {
|
||||||
|
console.log(error)
|
||||||
|
})
|
||||||
|
```
|
||||||
|
|
||||||
|
## `undici:websocket:ping`
|
||||||
|
|
||||||
|
This message is published after the client receives a ping frame, if the connection is not closing.
|
||||||
|
|
||||||
|
```js
|
||||||
|
import diagnosticsChannel from 'diagnostics_channel'
|
||||||
|
|
||||||
|
diagnosticsChannel.channel('undici:websocket:ping').subscribe(({ payload }) => {
|
||||||
|
// a Buffer or undefined, containing the optional application data of the frame
|
||||||
|
console.log(payload)
|
||||||
|
})
|
||||||
|
```
|
||||||
|
|
||||||
|
## `undici:websocket:pong`
|
||||||
|
|
||||||
|
This message is published after the client receives a pong frame.
|
||||||
|
|
||||||
|
```js
|
||||||
|
import diagnosticsChannel from 'diagnostics_channel'
|
||||||
|
|
||||||
|
diagnosticsChannel.channel('undici:websocket:pong').subscribe(({ payload }) => {
|
||||||
|
// a Buffer or undefined, containing the optional application data of the frame
|
||||||
|
console.log(payload)
|
||||||
|
})
|
||||||
|
```
|
60
node_modules/undici/docs/api/DispatchInterceptor.md
generated
vendored
Normal file
60
node_modules/undici/docs/api/DispatchInterceptor.md
generated
vendored
Normal file
|
@ -0,0 +1,60 @@
|
||||||
|
# Interface: DispatchInterceptor
|
||||||
|
|
||||||
|
Extends: `Function`
|
||||||
|
|
||||||
|
A function that can be applied to the `Dispatcher.Dispatch` function before it is invoked with a dispatch request.
|
||||||
|
|
||||||
|
This allows one to write logic to intercept both the outgoing request, and the incoming response.
|
||||||
|
|
||||||
|
### Parameter: `Dispatcher.Dispatch`
|
||||||
|
|
||||||
|
The base dispatch function you are decorating.
|
||||||
|
|
||||||
|
### ReturnType: `Dispatcher.Dispatch`
|
||||||
|
|
||||||
|
A dispatch function that has been altered to provide additional logic
|
||||||
|
|
||||||
|
### Basic Example
|
||||||
|
|
||||||
|
Here is an example of an interceptor being used to provide a JWT bearer token
|
||||||
|
|
||||||
|
```js
|
||||||
|
'use strict'
|
||||||
|
|
||||||
|
const insertHeaderInterceptor = dispatch => {
|
||||||
|
return function InterceptedDispatch(opts, handler){
|
||||||
|
opts.headers.push('Authorization', 'Bearer [Some token]')
|
||||||
|
return dispatch(opts, handler)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const client = new Client('https://localhost:3000', {
|
||||||
|
interceptors: { Client: [insertHeaderInterceptor] }
|
||||||
|
})
|
||||||
|
|
||||||
|
```
|
||||||
|
|
||||||
|
### Basic Example 2
|
||||||
|
|
||||||
|
Here is a contrived example of an interceptor stripping the headers from a response.
|
||||||
|
|
||||||
|
```js
|
||||||
|
'use strict'
|
||||||
|
|
||||||
|
const clearHeadersInterceptor = dispatch => {
|
||||||
|
const { DecoratorHandler } = require('undici')
|
||||||
|
class ResultInterceptor extends DecoratorHandler {
|
||||||
|
onHeaders (statusCode, headers, resume) {
|
||||||
|
return super.onHeaders(statusCode, [], resume)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return function InterceptedDispatch(opts, handler){
|
||||||
|
return dispatch(opts, new ResultInterceptor(handler))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const client = new Client('https://localhost:3000', {
|
||||||
|
interceptors: { Client: [clearHeadersInterceptor] }
|
||||||
|
})
|
||||||
|
|
||||||
|
```
|
887
node_modules/undici/docs/api/Dispatcher.md
generated
vendored
Normal file
887
node_modules/undici/docs/api/Dispatcher.md
generated
vendored
Normal file
|
@ -0,0 +1,887 @@
|
||||||
|
# Dispatcher
|
||||||
|
|
||||||
|
Extends: `events.EventEmitter`
|
||||||
|
|
||||||
|
Dispatcher is the core API used to dispatch requests.
|
||||||
|
|
||||||
|
Requests are not guaranteed to be dispatched in order of invocation.
|
||||||
|
|
||||||
|
## Instance Methods
|
||||||
|
|
||||||
|
### `Dispatcher.close([callback]): Promise`
|
||||||
|
|
||||||
|
Closes the dispatcher and gracefully waits for enqueued requests to complete before resolving.
|
||||||
|
|
||||||
|
Arguments:
|
||||||
|
|
||||||
|
* **callback** `(error: Error | null, data: null) => void` (optional)
|
||||||
|
|
||||||
|
Returns: `void | Promise<null>` - Only returns a `Promise` if no `callback` argument was passed
|
||||||
|
|
||||||
|
```js
|
||||||
|
dispatcher.close() // -> Promise
|
||||||
|
dispatcher.close(() => {}) // -> void
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Example - Request resolves before Client closes
|
||||||
|
|
||||||
|
```js
|
||||||
|
import { createServer } from 'http'
|
||||||
|
import { Client } from 'undici'
|
||||||
|
import { once } from 'events'
|
||||||
|
|
||||||
|
const server = createServer((request, response) => {
|
||||||
|
response.end('undici')
|
||||||
|
}).listen()
|
||||||
|
|
||||||
|
await once(server, 'listening')
|
||||||
|
|
||||||
|
const client = new Client(`http://localhost:${server.address().port}`)
|
||||||
|
|
||||||
|
try {
|
||||||
|
const { body } = await client.request({
|
||||||
|
path: '/',
|
||||||
|
method: 'GET'
|
||||||
|
})
|
||||||
|
body.setEncoding('utf8')
|
||||||
|
body.on('data', console.log)
|
||||||
|
} catch (error) {}
|
||||||
|
|
||||||
|
await client.close()
|
||||||
|
|
||||||
|
console.log('Client closed')
|
||||||
|
server.close()
|
||||||
|
```
|
||||||
|
|
||||||
|
### `Dispatcher.connect(options[, callback])`
|
||||||
|
|
||||||
|
Starts two-way communications with the requested resource using [HTTP CONNECT](https://developer.mozilla.org/en-US/docs/Web/HTTP/Methods/CONNECT).
|
||||||
|
|
||||||
|
Arguments:
|
||||||
|
|
||||||
|
* **options** `ConnectOptions`
|
||||||
|
* **callback** `(err: Error | null, data: ConnectData | null) => void` (optional)
|
||||||
|
|
||||||
|
Returns: `void | Promise<ConnectData>` - Only returns a `Promise` if no `callback` argument was passed
|
||||||
|
|
||||||
|
#### Parameter: `ConnectOptions`
|
||||||
|
|
||||||
|
* **path** `string`
|
||||||
|
* **headers** `UndiciHeaders` (optional) - Default: `null`
|
||||||
|
* **signal** `AbortSignal | events.EventEmitter | null` (optional) - Default: `null`
|
||||||
|
* **opaque** `unknown` (optional) - This argument parameter is passed through to `ConnectData`
|
||||||
|
|
||||||
|
#### Parameter: `ConnectData`
|
||||||
|
|
||||||
|
* **statusCode** `number`
|
||||||
|
* **headers** `Record<string, string | string[] | undefined>`
|
||||||
|
* **socket** `stream.Duplex`
|
||||||
|
* **opaque** `unknown`
|
||||||
|
|
||||||
|
#### Example - Connect request with echo
|
||||||
|
|
||||||
|
```js
|
||||||
|
import { createServer } from 'http'
|
||||||
|
import { Client } from 'undici'
|
||||||
|
import { once } from 'events'
|
||||||
|
|
||||||
|
const server = createServer((request, response) => {
|
||||||
|
throw Error('should never get here')
|
||||||
|
}).listen()
|
||||||
|
|
||||||
|
server.on('connect', (req, socket, head) => {
|
||||||
|
socket.write('HTTP/1.1 200 Connection established\r\n\r\n')
|
||||||
|
|
||||||
|
let data = head.toString()
|
||||||
|
socket.on('data', (buf) => {
|
||||||
|
data += buf.toString()
|
||||||
|
})
|
||||||
|
|
||||||
|
socket.on('end', () => {
|
||||||
|
socket.end(data)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
await once(server, 'listening')
|
||||||
|
|
||||||
|
const client = new Client(`http://localhost:${server.address().port}`)
|
||||||
|
|
||||||
|
try {
|
||||||
|
const { socket } = await client.connect({
|
||||||
|
path: '/'
|
||||||
|
})
|
||||||
|
const wanted = 'Body'
|
||||||
|
let data = ''
|
||||||
|
socket.on('data', d => { data += d })
|
||||||
|
socket.on('end', () => {
|
||||||
|
console.log(`Data received: ${data.toString()} | Data wanted: ${wanted}`)
|
||||||
|
client.close()
|
||||||
|
server.close()
|
||||||
|
})
|
||||||
|
socket.write(wanted)
|
||||||
|
socket.end()
|
||||||
|
} catch (error) { }
|
||||||
|
```
|
||||||
|
|
||||||
|
### `Dispatcher.destroy([error, callback]): Promise`
|
||||||
|
|
||||||
|
Destroy the dispatcher abruptly with the given error. All the pending and running requests will be asynchronously aborted and error. Since this operation is asynchronously dispatched there might still be some progress on dispatched requests.
|
||||||
|
|
||||||
|
Both arguments are optional; the method can be called in four different ways:
|
||||||
|
|
||||||
|
Arguments:
|
||||||
|
|
||||||
|
* **error** `Error | null` (optional)
|
||||||
|
* **callback** `(error: Error | null, data: null) => void` (optional)
|
||||||
|
|
||||||
|
Returns: `void | Promise<void>` - Only returns a `Promise` if no `callback` argument was passed
|
||||||
|
|
||||||
|
```js
|
||||||
|
dispatcher.destroy() // -> Promise
|
||||||
|
dispatcher.destroy(new Error()) // -> Promise
|
||||||
|
dispatcher.destroy(() => {}) // -> void
|
||||||
|
dispatcher.destroy(new Error(), () => {}) // -> void
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Example - Request is aborted when Client is destroyed
|
||||||
|
|
||||||
|
```js
|
||||||
|
import { createServer } from 'http'
|
||||||
|
import { Client } from 'undici'
|
||||||
|
import { once } from 'events'
|
||||||
|
|
||||||
|
const server = createServer((request, response) => {
|
||||||
|
response.end()
|
||||||
|
}).listen()
|
||||||
|
|
||||||
|
await once(server, 'listening')
|
||||||
|
|
||||||
|
const client = new Client(`http://localhost:${server.address().port}`)
|
||||||
|
|
||||||
|
try {
|
||||||
|
const request = client.request({
|
||||||
|
path: '/',
|
||||||
|
method: 'GET'
|
||||||
|
})
|
||||||
|
client.destroy()
|
||||||
|
.then(() => {
|
||||||
|
console.log('Client destroyed')
|
||||||
|
server.close()
|
||||||
|
})
|
||||||
|
await request
|
||||||
|
} catch (error) {
|
||||||
|
console.error(error)
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### `Dispatcher.dispatch(options, handler)`
|
||||||
|
|
||||||
|
This is the low level API which all the preceding APIs are implemented on top of.
|
||||||
|
This API is expected to evolve through semver-major versions and is less stable than the preceding higher level APIs.
|
||||||
|
It is primarily intended for library developers who implement higher level APIs on top of this.
|
||||||
|
|
||||||
|
Arguments:
|
||||||
|
|
||||||
|
* **options** `DispatchOptions`
|
||||||
|
* **handler** `DispatchHandler`
|
||||||
|
|
||||||
|
Returns: `Boolean` - `false` if dispatcher is busy and further dispatch calls won't make any progress until the `'drain'` event has been emitted.
|
||||||
|
|
||||||
|
#### Parameter: `DispatchOptions`
|
||||||
|
|
||||||
|
* **origin** `string | URL`
|
||||||
|
* **path** `string`
|
||||||
|
* **method** `string`
|
||||||
|
* **reset** `boolean` (optional) - Default: `false` - If `false`, the request will attempt to create a long-living connection by sending the `connection: keep-alive` header,otherwise will attempt to close it immediately after response by sending `connection: close` within the request and closing the socket afterwards.
|
||||||
|
* **body** `string | Buffer | Uint8Array | stream.Readable | Iterable | AsyncIterable | null` (optional) - Default: `null`
|
||||||
|
* **headers** `UndiciHeaders | string[]` (optional) - Default: `null`.
|
||||||
|
* **query** `Record<string, any> | null` (optional) - Default: `null` - Query string params to be embedded in the request URL. Note that both keys and values of query are encoded using `encodeURIComponent`. If for some reason you need to send them unencoded, embed query params into path directly instead.
|
||||||
|
* **idempotent** `boolean` (optional) - Default: `true` if `method` is `'HEAD'` or `'GET'` - Whether the requests can be safely retried or not. If `false` the request won't be sent until all preceding requests in the pipeline has completed.
|
||||||
|
* **blocking** `boolean` (optional) - Default: `false` - Whether the response is expected to take a long time and would end up blocking the pipeline. When this is set to `true` further pipelining will be avoided on the same connection until headers have been received.
|
||||||
|
* **upgrade** `string | null` (optional) - Default: `null` - Upgrade the request. Should be used to specify the kind of upgrade i.e. `'Websocket'`.
|
||||||
|
* **bodyTimeout** `number | null` (optional) - The timeout after which a request will time out, in milliseconds. Monitors time between receiving body data. Use `0` to disable it entirely. Defaults to 300 seconds.
|
||||||
|
* **headersTimeout** `number | null` (optional) - The amount of time, in milliseconds, the parser will wait to receive the complete HTTP headers while not sending the request. Defaults to 300 seconds.
|
||||||
|
* **throwOnError** `boolean` (optional) - Default: `false` - Whether Undici should throw an error upon receiving a 4xx or 5xx response from the server.
|
||||||
|
* **expectContinue** `boolean` (optional) - Default: `false` - For H2, it appends the expect: 100-continue header, and halts the request body until a 100-continue is received from the remote server
|
||||||
|
|
||||||
|
#### Parameter: `DispatchHandler`
|
||||||
|
|
||||||
|
* **onConnect** `(abort: () => void, context: object) => void` - Invoked before request is dispatched on socket. May be invoked multiple times when a request is retried when the request at the head of the pipeline fails.
|
||||||
|
* **onError** `(error: Error) => void` - Invoked when an error has occurred. May not throw.
|
||||||
|
* **onUpgrade** `(statusCode: number, headers: Buffer[], socket: Duplex) => void` (optional) - Invoked when request is upgraded. Required if `DispatchOptions.upgrade` is defined or `DispatchOptions.method === 'CONNECT'`.
|
||||||
|
* **onHeaders** `(statusCode: number, headers: Buffer[], resume: () => void, statusText: string) => boolean` - Invoked when statusCode and headers have been received. May be invoked multiple times due to 1xx informational headers. Not required for `upgrade` requests.
|
||||||
|
* **onData** `(chunk: Buffer) => boolean` - Invoked when response payload data is received. Not required for `upgrade` requests.
|
||||||
|
* **onComplete** `(trailers: Buffer[]) => void` - Invoked when response payload and trailers have been received and the request has completed. Not required for `upgrade` requests.
|
||||||
|
* **onBodySent** `(chunk: string | Buffer | Uint8Array) => void` - Invoked when a body chunk is sent to the server. Not required. For a stream or iterable body this will be invoked for every chunk. For other body types, it will be invoked once after the body is sent.
|
||||||
|
|
||||||
|
#### Example 1 - Dispatch GET request
|
||||||
|
|
||||||
|
```js
|
||||||
|
import { createServer } from 'http'
|
||||||
|
import { Client } from 'undici'
|
||||||
|
import { once } from 'events'
|
||||||
|
|
||||||
|
const server = createServer((request, response) => {
|
||||||
|
response.end('Hello, World!')
|
||||||
|
}).listen()
|
||||||
|
|
||||||
|
await once(server, 'listening')
|
||||||
|
|
||||||
|
const client = new Client(`http://localhost:${server.address().port}`)
|
||||||
|
|
||||||
|
const data = []
|
||||||
|
|
||||||
|
client.dispatch({
|
||||||
|
path: '/',
|
||||||
|
method: 'GET',
|
||||||
|
headers: {
|
||||||
|
'x-foo': 'bar'
|
||||||
|
}
|
||||||
|
}, {
|
||||||
|
onConnect: () => {
|
||||||
|
console.log('Connected!')
|
||||||
|
},
|
||||||
|
onError: (error) => {
|
||||||
|
console.error(error)
|
||||||
|
},
|
||||||
|
onHeaders: (statusCode, headers) => {
|
||||||
|
console.log(`onHeaders | statusCode: ${statusCode} | headers: ${headers}`)
|
||||||
|
},
|
||||||
|
onData: (chunk) => {
|
||||||
|
console.log('onData: chunk received')
|
||||||
|
data.push(chunk)
|
||||||
|
},
|
||||||
|
onComplete: (trailers) => {
|
||||||
|
console.log(`onComplete | trailers: ${trailers}`)
|
||||||
|
const res = Buffer.concat(data).toString('utf8')
|
||||||
|
console.log(`Data: ${res}`)
|
||||||
|
client.close()
|
||||||
|
server.close()
|
||||||
|
}
|
||||||
|
})
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Example 2 - Dispatch Upgrade Request
|
||||||
|
|
||||||
|
```js
|
||||||
|
import { createServer } from 'http'
|
||||||
|
import { Client } from 'undici'
|
||||||
|
import { once } from 'events'
|
||||||
|
|
||||||
|
const server = createServer((request, response) => {
|
||||||
|
response.end()
|
||||||
|
}).listen()
|
||||||
|
|
||||||
|
await once(server, 'listening')
|
||||||
|
|
||||||
|
server.on('upgrade', (request, socket, head) => {
|
||||||
|
console.log('Node.js Server - upgrade event')
|
||||||
|
socket.write('HTTP/1.1 101 Web Socket Protocol Handshake\r\n')
|
||||||
|
socket.write('Upgrade: WebSocket\r\n')
|
||||||
|
socket.write('Connection: Upgrade\r\n')
|
||||||
|
socket.write('\r\n')
|
||||||
|
socket.end()
|
||||||
|
})
|
||||||
|
|
||||||
|
const client = new Client(`http://localhost:${server.address().port}`)
|
||||||
|
|
||||||
|
client.dispatch({
|
||||||
|
path: '/',
|
||||||
|
method: 'GET',
|
||||||
|
upgrade: 'websocket'
|
||||||
|
}, {
|
||||||
|
onConnect: () => {
|
||||||
|
console.log('Undici Client - onConnect')
|
||||||
|
},
|
||||||
|
onError: (error) => {
|
||||||
|
console.log('onError') // shouldn't print
|
||||||
|
},
|
||||||
|
onUpgrade: (statusCode, headers, socket) => {
|
||||||
|
console.log('Undici Client - onUpgrade')
|
||||||
|
console.log(`onUpgrade Headers: ${headers}`)
|
||||||
|
socket.on('data', buffer => {
|
||||||
|
console.log(buffer.toString('utf8'))
|
||||||
|
})
|
||||||
|
socket.on('end', () => {
|
||||||
|
client.close()
|
||||||
|
server.close()
|
||||||
|
})
|
||||||
|
socket.end()
|
||||||
|
}
|
||||||
|
})
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Example 3 - Dispatch POST request
|
||||||
|
|
||||||
|
```js
|
||||||
|
import { createServer } from 'http'
|
||||||
|
import { Client } from 'undici'
|
||||||
|
import { once } from 'events'
|
||||||
|
|
||||||
|
const server = createServer((request, response) => {
|
||||||
|
request.on('data', (data) => {
|
||||||
|
console.log(`Request Data: ${data.toString('utf8')}`)
|
||||||
|
const body = JSON.parse(data)
|
||||||
|
body.message = 'World'
|
||||||
|
response.end(JSON.stringify(body))
|
||||||
|
})
|
||||||
|
}).listen()
|
||||||
|
|
||||||
|
await once(server, 'listening')
|
||||||
|
|
||||||
|
const client = new Client(`http://localhost:${server.address().port}`)
|
||||||
|
|
||||||
|
const data = []
|
||||||
|
|
||||||
|
client.dispatch({
|
||||||
|
path: '/',
|
||||||
|
method: 'POST',
|
||||||
|
headers: {
|
||||||
|
'content-type': 'application/json'
|
||||||
|
},
|
||||||
|
body: JSON.stringify({ message: 'Hello' })
|
||||||
|
}, {
|
||||||
|
onConnect: () => {
|
||||||
|
console.log('Connected!')
|
||||||
|
},
|
||||||
|
onError: (error) => {
|
||||||
|
console.error(error)
|
||||||
|
},
|
||||||
|
onHeaders: (statusCode, headers) => {
|
||||||
|
console.log(`onHeaders | statusCode: ${statusCode} | headers: ${headers}`)
|
||||||
|
},
|
||||||
|
onData: (chunk) => {
|
||||||
|
console.log('onData: chunk received')
|
||||||
|
data.push(chunk)
|
||||||
|
},
|
||||||
|
onComplete: (trailers) => {
|
||||||
|
console.log(`onComplete | trailers: ${trailers}`)
|
||||||
|
const res = Buffer.concat(data).toString('utf8')
|
||||||
|
console.log(`Response Data: ${res}`)
|
||||||
|
client.close()
|
||||||
|
server.close()
|
||||||
|
}
|
||||||
|
})
|
||||||
|
```
|
||||||
|
|
||||||
|
### `Dispatcher.pipeline(options, handler)`
|
||||||
|
|
||||||
|
For easy use with [stream.pipeline](https://nodejs.org/api/stream.html#stream_stream_pipeline_source_transforms_destination_callback). The `handler` argument should return a `Readable` from which the result will be read. Usually it should just return the `body` argument unless some kind of transformation needs to be performed based on e.g. `headers` or `statusCode`. The `handler` should validate the response and save any required state. If there is an error, it should be thrown. The function returns a `Duplex` which writes to the request and reads from the response.
|
||||||
|
|
||||||
|
Arguments:
|
||||||
|
|
||||||
|
* **options** `PipelineOptions`
|
||||||
|
* **handler** `(data: PipelineHandlerData) => stream.Readable`
|
||||||
|
|
||||||
|
Returns: `stream.Duplex`
|
||||||
|
|
||||||
|
#### Parameter: PipelineOptions
|
||||||
|
|
||||||
|
Extends: [`RequestOptions`](#parameter-requestoptions)
|
||||||
|
|
||||||
|
* **objectMode** `boolean` (optional) - Default: `false` - Set to `true` if the `handler` will return an object stream.
|
||||||
|
|
||||||
|
#### Parameter: PipelineHandlerData
|
||||||
|
|
||||||
|
* **statusCode** `number`
|
||||||
|
* **headers** `Record<string, string | string[] | undefined>`
|
||||||
|
* **opaque** `unknown`
|
||||||
|
* **body** `stream.Readable`
|
||||||
|
* **context** `object`
|
||||||
|
* **onInfo** `({statusCode: number, headers: Record<string, string | string[]>}) => void | null` (optional) - Default: `null` - Callback collecting all the info headers (HTTP 100-199) received.
|
||||||
|
|
||||||
|
#### Example 1 - Pipeline Echo
|
||||||
|
|
||||||
|
```js
|
||||||
|
import { Readable, Writable, PassThrough, pipeline } from 'stream'
|
||||||
|
import { createServer } from 'http'
|
||||||
|
import { Client } from 'undici'
|
||||||
|
import { once } from 'events'
|
||||||
|
|
||||||
|
const server = createServer((request, response) => {
|
||||||
|
request.pipe(response)
|
||||||
|
}).listen()
|
||||||
|
|
||||||
|
await once(server, 'listening')
|
||||||
|
|
||||||
|
const client = new Client(`http://localhost:${server.address().port}`)
|
||||||
|
|
||||||
|
let res = ''
|
||||||
|
|
||||||
|
pipeline(
|
||||||
|
new Readable({
|
||||||
|
read () {
|
||||||
|
this.push(Buffer.from('undici'))
|
||||||
|
this.push(null)
|
||||||
|
}
|
||||||
|
}),
|
||||||
|
client.pipeline({
|
||||||
|
path: '/',
|
||||||
|
method: 'GET'
|
||||||
|
}, ({ statusCode, headers, body }) => {
|
||||||
|
console.log(`response received ${statusCode}`)
|
||||||
|
console.log('headers', headers)
|
||||||
|
return pipeline(body, new PassThrough(), () => {})
|
||||||
|
}),
|
||||||
|
new Writable({
|
||||||
|
write (chunk, _, callback) {
|
||||||
|
res += chunk.toString()
|
||||||
|
callback()
|
||||||
|
},
|
||||||
|
final (callback) {
|
||||||
|
console.log(`Response pipelined to writable: ${res}`)
|
||||||
|
callback()
|
||||||
|
}
|
||||||
|
}),
|
||||||
|
error => {
|
||||||
|
if (error) {
|
||||||
|
console.error(error)
|
||||||
|
}
|
||||||
|
|
||||||
|
client.close()
|
||||||
|
server.close()
|
||||||
|
}
|
||||||
|
)
|
||||||
|
```
|
||||||
|
|
||||||
|
### `Dispatcher.request(options[, callback])`
|
||||||
|
|
||||||
|
Performs a HTTP request.
|
||||||
|
|
||||||
|
Non-idempotent requests will not be pipelined in order
|
||||||
|
to avoid indirect failures.
|
||||||
|
|
||||||
|
Idempotent requests will be automatically retried if
|
||||||
|
they fail due to indirect failure from the request
|
||||||
|
at the head of the pipeline. This does not apply to
|
||||||
|
idempotent requests with a stream request body.
|
||||||
|
|
||||||
|
All response bodies must always be fully consumed or destroyed.
|
||||||
|
|
||||||
|
Arguments:
|
||||||
|
|
||||||
|
* **options** `RequestOptions`
|
||||||
|
* **callback** `(error: Error | null, data: ResponseData) => void` (optional)
|
||||||
|
|
||||||
|
Returns: `void | Promise<ResponseData>` - Only returns a `Promise` if no `callback` argument was passed.
|
||||||
|
|
||||||
|
#### Parameter: `RequestOptions`
|
||||||
|
|
||||||
|
Extends: [`DispatchOptions`](#parameter-dispatchoptions)
|
||||||
|
|
||||||
|
* **opaque** `unknown` (optional) - Default: `null` - Used for passing through context to `ResponseData`.
|
||||||
|
* **signal** `AbortSignal | events.EventEmitter | null` (optional) - Default: `null`.
|
||||||
|
* **onInfo** `({statusCode: number, headers: Record<string, string | string[]>}) => void | null` (optional) - Default: `null` - Callback collecting all the info headers (HTTP 100-199) received.
|
||||||
|
|
||||||
|
The `RequestOptions.method` property should not be value `'CONNECT'`.
|
||||||
|
|
||||||
|
#### Parameter: `ResponseData`
|
||||||
|
|
||||||
|
* **statusCode** `number`
|
||||||
|
* **headers** `Record<string, string | string[]>` - Note that all header keys are lower-cased, e. g. `content-type`.
|
||||||
|
* **body** `stream.Readable` which also implements [the body mixin from the Fetch Standard](https://fetch.spec.whatwg.org/#body-mixin).
|
||||||
|
* **trailers** `Record<string, string>` - This object starts out
|
||||||
|
as empty and will be mutated to contain trailers after `body` has emitted `'end'`.
|
||||||
|
* **opaque** `unknown`
|
||||||
|
* **context** `object`
|
||||||
|
|
||||||
|
`body` contains the following additional [body mixin](https://fetch.spec.whatwg.org/#body-mixin) methods and properties:
|
||||||
|
|
||||||
|
- `text()`
|
||||||
|
- `json()`
|
||||||
|
- `arrayBuffer()`
|
||||||
|
- `body`
|
||||||
|
- `bodyUsed`
|
||||||
|
|
||||||
|
`body` can not be consumed twice. For example, calling `text()` after `json()` throws `TypeError`.
|
||||||
|
|
||||||
|
`body` contains the following additional extensions:
|
||||||
|
|
||||||
|
- `dump({ limit: Integer })`, dump the response by reading up to `limit` bytes without killing the socket (optional) - Default: 262144.
|
||||||
|
|
||||||
|
Note that body will still be a `Readable` even if it is empty, but attempting to deserialize it with `json()` will result in an exception. Recommended way to ensure there is a body to deserialize is to check if status code is not 204, and `content-type` header starts with `application/json`.
|
||||||
|
|
||||||
|
#### Example 1 - Basic GET Request
|
||||||
|
|
||||||
|
```js
|
||||||
|
import { createServer } from 'http'
|
||||||
|
import { Client } from 'undici'
|
||||||
|
import { once } from 'events'
|
||||||
|
|
||||||
|
const server = createServer((request, response) => {
|
||||||
|
response.end('Hello, World!')
|
||||||
|
}).listen()
|
||||||
|
|
||||||
|
await once(server, 'listening')
|
||||||
|
|
||||||
|
const client = new Client(`http://localhost:${server.address().port}`)
|
||||||
|
|
||||||
|
try {
|
||||||
|
const { body, headers, statusCode, trailers } = await client.request({
|
||||||
|
path: '/',
|
||||||
|
method: 'GET'
|
||||||
|
})
|
||||||
|
console.log(`response received ${statusCode}`)
|
||||||
|
console.log('headers', headers)
|
||||||
|
body.setEncoding('utf8')
|
||||||
|
body.on('data', console.log)
|
||||||
|
body.on('end', () => {
|
||||||
|
console.log('trailers', trailers)
|
||||||
|
})
|
||||||
|
|
||||||
|
client.close()
|
||||||
|
server.close()
|
||||||
|
} catch (error) {
|
||||||
|
console.error(error)
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Example 2 - Aborting a request
|
||||||
|
|
||||||
|
> Node.js v15+ is required to run this example
|
||||||
|
|
||||||
|
```js
|
||||||
|
import { createServer } from 'http'
|
||||||
|
import { Client } from 'undici'
|
||||||
|
import { once } from 'events'
|
||||||
|
|
||||||
|
const server = createServer((request, response) => {
|
||||||
|
response.end('Hello, World!')
|
||||||
|
}).listen()
|
||||||
|
|
||||||
|
await once(server, 'listening')
|
||||||
|
|
||||||
|
const client = new Client(`http://localhost:${server.address().port}`)
|
||||||
|
const abortController = new AbortController()
|
||||||
|
|
||||||
|
try {
|
||||||
|
client.request({
|
||||||
|
path: '/',
|
||||||
|
method: 'GET',
|
||||||
|
signal: abortController.signal
|
||||||
|
})
|
||||||
|
} catch (error) {
|
||||||
|
console.error(error) // should print an RequestAbortedError
|
||||||
|
client.close()
|
||||||
|
server.close()
|
||||||
|
}
|
||||||
|
|
||||||
|
abortController.abort()
|
||||||
|
```
|
||||||
|
|
||||||
|
Alternatively, any `EventEmitter` that emits an `'abort'` event may be used as an abort controller:
|
||||||
|
|
||||||
|
```js
|
||||||
|
import { createServer } from 'http'
|
||||||
|
import { Client } from 'undici'
|
||||||
|
import EventEmitter, { once } from 'events'
|
||||||
|
|
||||||
|
const server = createServer((request, response) => {
|
||||||
|
response.end('Hello, World!')
|
||||||
|
}).listen()
|
||||||
|
|
||||||
|
await once(server, 'listening')
|
||||||
|
|
||||||
|
const client = new Client(`http://localhost:${server.address().port}`)
|
||||||
|
const ee = new EventEmitter()
|
||||||
|
|
||||||
|
try {
|
||||||
|
client.request({
|
||||||
|
path: '/',
|
||||||
|
method: 'GET',
|
||||||
|
signal: ee
|
||||||
|
})
|
||||||
|
} catch (error) {
|
||||||
|
console.error(error) // should print an RequestAbortedError
|
||||||
|
client.close()
|
||||||
|
server.close()
|
||||||
|
}
|
||||||
|
|
||||||
|
ee.emit('abort')
|
||||||
|
```
|
||||||
|
|
||||||
|
Destroying the request or response body will have the same effect.
|
||||||
|
|
||||||
|
```js
|
||||||
|
import { createServer } from 'http'
|
||||||
|
import { Client } from 'undici'
|
||||||
|
import { once } from 'events'
|
||||||
|
|
||||||
|
const server = createServer((request, response) => {
|
||||||
|
response.end('Hello, World!')
|
||||||
|
}).listen()
|
||||||
|
|
||||||
|
await once(server, 'listening')
|
||||||
|
|
||||||
|
const client = new Client(`http://localhost:${server.address().port}`)
|
||||||
|
|
||||||
|
try {
|
||||||
|
const { body } = await client.request({
|
||||||
|
path: '/',
|
||||||
|
method: 'GET'
|
||||||
|
})
|
||||||
|
body.destroy()
|
||||||
|
} catch (error) {
|
||||||
|
console.error(error) // should print an RequestAbortedError
|
||||||
|
client.close()
|
||||||
|
server.close()
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### `Dispatcher.stream(options, factory[, callback])`
|
||||||
|
|
||||||
|
A faster version of `Dispatcher.request`. This method expects the second argument `factory` to return a [`stream.Writable`](https://nodejs.org/api/stream.html#stream_class_stream_writable) stream which the response will be written to. This improves performance by avoiding creating an intermediate [`stream.Readable`](https://nodejs.org/api/stream.html#stream_readable_streams) stream when the user expects to directly pipe the response body to a [`stream.Writable`](https://nodejs.org/api/stream.html#stream_class_stream_writable) stream.
|
||||||
|
|
||||||
|
As demonstrated in [Example 1 - Basic GET stream request](#example-1---basic-get-stream-request), it is recommended to use the `option.opaque` property to avoid creating a closure for the `factory` method. This pattern works well with Node.js Web Frameworks such as [Fastify](https://fastify.io). See [Example 2 - Stream to Fastify Response](#example-2---stream-to-fastify-response) for more details.
|
||||||
|
|
||||||
|
Arguments:
|
||||||
|
|
||||||
|
* **options** `RequestOptions`
|
||||||
|
* **factory** `(data: StreamFactoryData) => stream.Writable`
|
||||||
|
* **callback** `(error: Error | null, data: StreamData) => void` (optional)
|
||||||
|
|
||||||
|
Returns: `void | Promise<StreamData>` - Only returns a `Promise` if no `callback` argument was passed
|
||||||
|
|
||||||
|
#### Parameter: `StreamFactoryData`
|
||||||
|
|
||||||
|
* **statusCode** `number`
|
||||||
|
* **headers** `Record<string, string | string[] | undefined>`
|
||||||
|
* **opaque** `unknown`
|
||||||
|
* **onInfo** `({statusCode: number, headers: Record<string, string | string[]>}) => void | null` (optional) - Default: `null` - Callback collecting all the info headers (HTTP 100-199) received.
|
||||||
|
|
||||||
|
#### Parameter: `StreamData`
|
||||||
|
|
||||||
|
* **opaque** `unknown`
|
||||||
|
* **trailers** `Record<string, string>`
|
||||||
|
* **context** `object`
|
||||||
|
|
||||||
|
#### Example 1 - Basic GET stream request
|
||||||
|
|
||||||
|
```js
|
||||||
|
import { createServer } from 'http'
|
||||||
|
import { Client } from 'undici'
|
||||||
|
import { once } from 'events'
|
||||||
|
import { Writable } from 'stream'
|
||||||
|
|
||||||
|
const server = createServer((request, response) => {
|
||||||
|
response.end('Hello, World!')
|
||||||
|
}).listen()
|
||||||
|
|
||||||
|
await once(server, 'listening')
|
||||||
|
|
||||||
|
const client = new Client(`http://localhost:${server.address().port}`)
|
||||||
|
|
||||||
|
const bufs = []
|
||||||
|
|
||||||
|
try {
|
||||||
|
await client.stream({
|
||||||
|
path: '/',
|
||||||
|
method: 'GET',
|
||||||
|
opaque: { bufs }
|
||||||
|
}, ({ statusCode, headers, opaque: { bufs } }) => {
|
||||||
|
console.log(`response received ${statusCode}`)
|
||||||
|
console.log('headers', headers)
|
||||||
|
return new Writable({
|
||||||
|
write (chunk, encoding, callback) {
|
||||||
|
bufs.push(chunk)
|
||||||
|
callback()
|
||||||
|
}
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
console.log(Buffer.concat(bufs).toString('utf-8'))
|
||||||
|
|
||||||
|
client.close()
|
||||||
|
server.close()
|
||||||
|
} catch (error) {
|
||||||
|
console.error(error)
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Example 2 - Stream to Fastify Response
|
||||||
|
|
||||||
|
In this example, a (fake) request is made to the fastify server using `fastify.inject()`. This request then executes the fastify route handler which makes a subsequent request to the raw Node.js http server using `undici.dispatcher.stream()`. The fastify response is passed to the `opaque` option so that undici can tap into the underlying writable stream using `response.raw`. This methodology demonstrates how one could use undici and fastify together to create fast-as-possible requests from one backend server to another.
|
||||||
|
|
||||||
|
```js
|
||||||
|
import { createServer } from 'http'
|
||||||
|
import { Client } from 'undici'
|
||||||
|
import { once } from 'events'
|
||||||
|
import fastify from 'fastify'
|
||||||
|
|
||||||
|
const nodeServer = createServer((request, response) => {
|
||||||
|
response.end('Hello, World! From Node.js HTTP Server')
|
||||||
|
}).listen()
|
||||||
|
|
||||||
|
await once(nodeServer, 'listening')
|
||||||
|
|
||||||
|
console.log('Node Server listening')
|
||||||
|
|
||||||
|
const nodeServerUndiciClient = new Client(`http://localhost:${nodeServer.address().port}`)
|
||||||
|
|
||||||
|
const fastifyServer = fastify()
|
||||||
|
|
||||||
|
fastifyServer.route({
|
||||||
|
url: '/',
|
||||||
|
method: 'GET',
|
||||||
|
handler: (request, response) => {
|
||||||
|
nodeServerUndiciClient.stream({
|
||||||
|
path: '/',
|
||||||
|
method: 'GET',
|
||||||
|
opaque: response
|
||||||
|
}, ({ opaque }) => opaque.raw)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
await fastifyServer.listen()
|
||||||
|
|
||||||
|
console.log('Fastify Server listening')
|
||||||
|
|
||||||
|
const fastifyServerUndiciClient = new Client(`http://localhost:${fastifyServer.server.address().port}`)
|
||||||
|
|
||||||
|
try {
|
||||||
|
const { statusCode, body } = await fastifyServerUndiciClient.request({
|
||||||
|
path: '/',
|
||||||
|
method: 'GET'
|
||||||
|
})
|
||||||
|
|
||||||
|
console.log(`response received ${statusCode}`)
|
||||||
|
body.setEncoding('utf8')
|
||||||
|
body.on('data', console.log)
|
||||||
|
|
||||||
|
nodeServerUndiciClient.close()
|
||||||
|
fastifyServerUndiciClient.close()
|
||||||
|
fastifyServer.close()
|
||||||
|
nodeServer.close()
|
||||||
|
} catch (error) { }
|
||||||
|
```
|
||||||
|
|
||||||
|
### `Dispatcher.upgrade(options[, callback])`
|
||||||
|
|
||||||
|
Upgrade to a different protocol. Visit [MDN - HTTP - Protocol upgrade mechanism](https://developer.mozilla.org/en-US/docs/Web/HTTP/Protocol_upgrade_mechanism) for more details.
|
||||||
|
|
||||||
|
Arguments:
|
||||||
|
|
||||||
|
* **options** `UpgradeOptions`
|
||||||
|
|
||||||
|
* **callback** `(error: Error | null, data: UpgradeData) => void` (optional)
|
||||||
|
|
||||||
|
Returns: `void | Promise<UpgradeData>` - Only returns a `Promise` if no `callback` argument was passed
|
||||||
|
|
||||||
|
#### Parameter: `UpgradeOptions`
|
||||||
|
|
||||||
|
* **path** `string`
|
||||||
|
* **method** `string` (optional) - Default: `'GET'`
|
||||||
|
* **headers** `UndiciHeaders` (optional) - Default: `null`
|
||||||
|
* **protocol** `string` (optional) - Default: `'Websocket'` - A string of comma separated protocols, in descending preference order.
|
||||||
|
* **signal** `AbortSignal | EventEmitter | null` (optional) - Default: `null`
|
||||||
|
|
||||||
|
#### Parameter: `UpgradeData`
|
||||||
|
|
||||||
|
* **headers** `http.IncomingHeaders`
|
||||||
|
* **socket** `stream.Duplex`
|
||||||
|
* **opaque** `unknown`
|
||||||
|
|
||||||
|
#### Example 1 - Basic Upgrade Request
|
||||||
|
|
||||||
|
```js
|
||||||
|
import { createServer } from 'http'
|
||||||
|
import { Client } from 'undici'
|
||||||
|
import { once } from 'events'
|
||||||
|
|
||||||
|
const server = createServer((request, response) => {
|
||||||
|
response.statusCode = 101
|
||||||
|
response.setHeader('connection', 'upgrade')
|
||||||
|
response.setHeader('upgrade', request.headers.upgrade)
|
||||||
|
response.end()
|
||||||
|
}).listen()
|
||||||
|
|
||||||
|
await once(server, 'listening')
|
||||||
|
|
||||||
|
const client = new Client(`http://localhost:${server.address().port}`)
|
||||||
|
|
||||||
|
try {
|
||||||
|
const { headers, socket } = await client.upgrade({
|
||||||
|
path: '/',
|
||||||
|
})
|
||||||
|
socket.on('end', () => {
|
||||||
|
console.log(`upgrade: ${headers.upgrade}`) // upgrade: Websocket
|
||||||
|
client.close()
|
||||||
|
server.close()
|
||||||
|
})
|
||||||
|
socket.end()
|
||||||
|
} catch (error) {
|
||||||
|
console.error(error)
|
||||||
|
client.close()
|
||||||
|
server.close()
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
## Instance Events
|
||||||
|
|
||||||
|
### Event: `'connect'`
|
||||||
|
|
||||||
|
Parameters:
|
||||||
|
|
||||||
|
* **origin** `URL`
|
||||||
|
* **targets** `Array<Dispatcher>`
|
||||||
|
|
||||||
|
### Event: `'disconnect'`
|
||||||
|
|
||||||
|
Parameters:
|
||||||
|
|
||||||
|
* **origin** `URL`
|
||||||
|
* **targets** `Array<Dispatcher>`
|
||||||
|
* **error** `Error`
|
||||||
|
|
||||||
|
### Event: `'connectionError'`
|
||||||
|
|
||||||
|
Parameters:
|
||||||
|
|
||||||
|
* **origin** `URL`
|
||||||
|
* **targets** `Array<Dispatcher>`
|
||||||
|
* **error** `Error`
|
||||||
|
|
||||||
|
Emitted when dispatcher fails to connect to
|
||||||
|
origin.
|
||||||
|
|
||||||
|
### Event: `'drain'`
|
||||||
|
|
||||||
|
Parameters:
|
||||||
|
|
||||||
|
* **origin** `URL`
|
||||||
|
|
||||||
|
Emitted when dispatcher is no longer busy.
|
||||||
|
|
||||||
|
## Parameter: `UndiciHeaders`
|
||||||
|
|
||||||
|
* `Record<string, string | string[] | undefined> | string[] | null`
|
||||||
|
|
||||||
|
Header arguments such as `options.headers` in [`Client.dispatch`](Client.md#clientdispatchoptions-handlers) can be specified in two forms; either as an object specified by the `Record<string, string | string[] | undefined>` (`IncomingHttpHeaders`) type, or an array of strings. An array representation of a header list must have an even length or an `InvalidArgumentError` will be thrown.
|
||||||
|
|
||||||
|
Keys are lowercase and values are not modified.
|
||||||
|
|
||||||
|
Response headers will derive a `host` from the `url` of the [Client](Client.md#class-client) instance if no `host` header was previously specified.
|
||||||
|
|
||||||
|
### Example 1 - Object
|
||||||
|
|
||||||
|
```js
|
||||||
|
{
|
||||||
|
'content-length': '123',
|
||||||
|
'content-type': 'text/plain',
|
||||||
|
connection: 'keep-alive',
|
||||||
|
host: 'mysite.com',
|
||||||
|
accept: '*/*'
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### Example 2 - Array
|
||||||
|
|
||||||
|
```js
|
||||||
|
[
|
||||||
|
'content-length', '123',
|
||||||
|
'content-type', 'text/plain',
|
||||||
|
'connection', 'keep-alive',
|
||||||
|
'host', 'mysite.com',
|
||||||
|
'accept', '*/*'
|
||||||
|
]
|
||||||
|
```
|
47
node_modules/undici/docs/api/Errors.md
generated
vendored
Normal file
47
node_modules/undici/docs/api/Errors.md
generated
vendored
Normal file
|
@ -0,0 +1,47 @@
|
||||||
|
# Errors
|
||||||
|
|
||||||
|
Undici exposes a variety of error objects that you can use to enhance your error handling.
|
||||||
|
You can find all the error objects inside the `errors` key.
|
||||||
|
|
||||||
|
```js
|
||||||
|
import { errors } from 'undici'
|
||||||
|
```
|
||||||
|
|
||||||
|
| Error | Error Codes | Description |
|
||||||
|
| ------------------------------------ | ------------------------------------- | ------------------------------------------------------------------------- |
|
||||||
|
| `UndiciError` | `UND_ERR` | all errors below are extended from `UndiciError`. |
|
||||||
|
| `ConnectTimeoutError` | `UND_ERR_CONNECT_TIMEOUT` | socket is destroyed due to connect timeout. |
|
||||||
|
| `HeadersTimeoutError` | `UND_ERR_HEADERS_TIMEOUT` | socket is destroyed due to headers timeout. |
|
||||||
|
| `HeadersOverflowError` | `UND_ERR_HEADERS_OVERFLOW` | socket is destroyed due to headers' max size being exceeded. |
|
||||||
|
| `BodyTimeoutError` | `UND_ERR_BODY_TIMEOUT` | socket is destroyed due to body timeout. |
|
||||||
|
| `ResponseStatusCodeError` | `UND_ERR_RESPONSE_STATUS_CODE` | an error is thrown when `throwOnError` is `true` for status codes >= 400. |
|
||||||
|
| `InvalidArgumentError` | `UND_ERR_INVALID_ARG` | passed an invalid argument. |
|
||||||
|
| `InvalidReturnValueError` | `UND_ERR_INVALID_RETURN_VALUE` | returned an invalid value. |
|
||||||
|
| `RequestAbortedError` | `UND_ERR_ABORTED` | the request has been aborted by the user |
|
||||||
|
| `ClientDestroyedError` | `UND_ERR_DESTROYED` | trying to use a destroyed client. |
|
||||||
|
| `ClientClosedError` | `UND_ERR_CLOSED` | trying to use a closed client. |
|
||||||
|
| `SocketError` | `UND_ERR_SOCKET` | there is an error with the socket. |
|
||||||
|
| `NotSupportedError` | `UND_ERR_NOT_SUPPORTED` | encountered unsupported functionality. |
|
||||||
|
| `RequestContentLengthMismatchError` | `UND_ERR_REQ_CONTENT_LENGTH_MISMATCH` | request body does not match content-length header |
|
||||||
|
| `ResponseContentLengthMismatchError` | `UND_ERR_RES_CONTENT_LENGTH_MISMATCH` | response body does not match content-length header |
|
||||||
|
| `InformationalError` | `UND_ERR_INFO` | expected error with reason |
|
||||||
|
| `ResponseExceededMaxSizeError` | `UND_ERR_RES_EXCEEDED_MAX_SIZE` | response body exceed the max size allowed |
|
||||||
|
|
||||||
|
### `SocketError`
|
||||||
|
|
||||||
|
The `SocketError` has a `.socket` property which holds socket metadata:
|
||||||
|
|
||||||
|
```ts
|
||||||
|
interface SocketInfo {
|
||||||
|
localAddress?: string
|
||||||
|
localPort?: number
|
||||||
|
remoteAddress?: string
|
||||||
|
remotePort?: number
|
||||||
|
remoteFamily?: string
|
||||||
|
timeout?: number
|
||||||
|
bytesWritten?: number
|
||||||
|
bytesRead?: number
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
Be aware that in some cases the `.socket` property can be `null`.
|
27
node_modules/undici/docs/api/Fetch.md
generated
vendored
Normal file
27
node_modules/undici/docs/api/Fetch.md
generated
vendored
Normal file
|
@ -0,0 +1,27 @@
|
||||||
|
# Fetch
|
||||||
|
|
||||||
|
Undici exposes a fetch() method starts the process of fetching a resource from the network.
|
||||||
|
|
||||||
|
Documentation and examples can be found on [MDN](https://developer.mozilla.org/en-US/docs/Web/API/fetch).
|
||||||
|
|
||||||
|
## File
|
||||||
|
|
||||||
|
This API is implemented as per the standard, you can find documentation on [MDN](https://developer.mozilla.org/en-US/docs/Web/API/File)
|
||||||
|
|
||||||
|
In Node versions v18.13.0 and above and v19.2.0 and above, undici will default to using Node's [File](https://nodejs.org/api/buffer.html#class-file) class. In versions where it's not available, it will default to the undici one.
|
||||||
|
|
||||||
|
## FormData
|
||||||
|
|
||||||
|
This API is implemented as per the standard, you can find documentation on [MDN](https://developer.mozilla.org/en-US/docs/Web/API/FormData)
|
||||||
|
|
||||||
|
## Response
|
||||||
|
|
||||||
|
This API is implemented as per the standard, you can find documentation on [MDN](https://developer.mozilla.org/en-US/docs/Web/API/Response)
|
||||||
|
|
||||||
|
## Request
|
||||||
|
|
||||||
|
This API is implemented as per the standard, you can find documentation on [MDN](https://developer.mozilla.org/en-US/docs/Web/API/Request)
|
||||||
|
|
||||||
|
## Header
|
||||||
|
|
||||||
|
This API is implemented as per the standard, you can find documentation on [MDN](https://developer.mozilla.org/en-US/docs/Web/API/Headers)
|
540
node_modules/undici/docs/api/MockAgent.md
generated
vendored
Normal file
540
node_modules/undici/docs/api/MockAgent.md
generated
vendored
Normal file
|
@ -0,0 +1,540 @@
|
||||||
|
# Class: MockAgent
|
||||||
|
|
||||||
|
Extends: `undici.Dispatcher`
|
||||||
|
|
||||||
|
A mocked Agent class that implements the Agent API. It allows one to intercept HTTP requests made through undici and return mocked responses instead.
|
||||||
|
|
||||||
|
## `new MockAgent([options])`
|
||||||
|
|
||||||
|
Arguments:
|
||||||
|
|
||||||
|
* **options** `MockAgentOptions` (optional) - It extends the `Agent` options.
|
||||||
|
|
||||||
|
Returns: `MockAgent`
|
||||||
|
|
||||||
|
### Parameter: `MockAgentOptions`
|
||||||
|
|
||||||
|
Extends: [`AgentOptions`](Agent.md#parameter-agentoptions)
|
||||||
|
|
||||||
|
* **agent** `Agent` (optional) - Default: `new Agent([options])` - a custom agent encapsulated by the MockAgent.
|
||||||
|
|
||||||
|
### Example - Basic MockAgent instantiation
|
||||||
|
|
||||||
|
This will instantiate the MockAgent. It will not do anything until registered as the agent to use with requests and mock interceptions are added.
|
||||||
|
|
||||||
|
```js
|
||||||
|
import { MockAgent } from 'undici'
|
||||||
|
|
||||||
|
const mockAgent = new MockAgent()
|
||||||
|
```
|
||||||
|
|
||||||
|
### Example - Basic MockAgent instantiation with custom agent
|
||||||
|
|
||||||
|
```js
|
||||||
|
import { Agent, MockAgent } from 'undici'
|
||||||
|
|
||||||
|
const agent = new Agent()
|
||||||
|
|
||||||
|
const mockAgent = new MockAgent({ agent })
|
||||||
|
```
|
||||||
|
|
||||||
|
## Instance Methods
|
||||||
|
|
||||||
|
### `MockAgent.get(origin)`
|
||||||
|
|
||||||
|
This method creates and retrieves MockPool or MockClient instances which can then be used to intercept HTTP requests. If the number of connections on the mock agent is set to 1, a MockClient instance is returned. Otherwise a MockPool instance is returned.
|
||||||
|
|
||||||
|
For subsequent `MockAgent.get` calls on the same origin, the same mock instance will be returned.
|
||||||
|
|
||||||
|
Arguments:
|
||||||
|
|
||||||
|
* **origin** `string | RegExp | (value) => boolean` - a matcher for the pool origin to be retrieved from the MockAgent.
|
||||||
|
|
||||||
|
| Matcher type | Condition to pass |
|
||||||
|
|:------------:| -------------------------- |
|
||||||
|
| `string` | Exact match against string |
|
||||||
|
| `RegExp` | Regex must pass |
|
||||||
|
| `Function` | Function must return true |
|
||||||
|
|
||||||
|
Returns: `MockClient | MockPool`.
|
||||||
|
|
||||||
|
| `MockAgentOptions` | Mock instance returned |
|
||||||
|
| -------------------- | ---------------------- |
|
||||||
|
| `connections === 1` | `MockClient` |
|
||||||
|
| `connections` > `1` | `MockPool` |
|
||||||
|
|
||||||
|
#### Example - Basic Mocked Request
|
||||||
|
|
||||||
|
```js
|
||||||
|
import { MockAgent, setGlobalDispatcher, request } from 'undici'
|
||||||
|
|
||||||
|
const mockAgent = new MockAgent()
|
||||||
|
setGlobalDispatcher(mockAgent)
|
||||||
|
|
||||||
|
const mockPool = mockAgent.get('http://localhost:3000')
|
||||||
|
mockPool.intercept({ path: '/foo' }).reply(200, 'foo')
|
||||||
|
|
||||||
|
const { statusCode, body } = await request('http://localhost:3000/foo')
|
||||||
|
|
||||||
|
console.log('response received', statusCode) // response received 200
|
||||||
|
|
||||||
|
for await (const data of body) {
|
||||||
|
console.log('data', data.toString('utf8')) // data foo
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Example - Basic Mocked Request with local mock agent dispatcher
|
||||||
|
|
||||||
|
```js
|
||||||
|
import { MockAgent, request } from 'undici'
|
||||||
|
|
||||||
|
const mockAgent = new MockAgent()
|
||||||
|
|
||||||
|
const mockPool = mockAgent.get('http://localhost:3000')
|
||||||
|
mockPool.intercept({ path: '/foo' }).reply(200, 'foo')
|
||||||
|
|
||||||
|
const {
|
||||||
|
statusCode,
|
||||||
|
body
|
||||||
|
} = await request('http://localhost:3000/foo', { dispatcher: mockAgent })
|
||||||
|
|
||||||
|
console.log('response received', statusCode) // response received 200
|
||||||
|
|
||||||
|
for await (const data of body) {
|
||||||
|
console.log('data', data.toString('utf8')) // data foo
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Example - Basic Mocked Request with local mock pool dispatcher
|
||||||
|
|
||||||
|
```js
|
||||||
|
import { MockAgent, request } from 'undici'
|
||||||
|
|
||||||
|
const mockAgent = new MockAgent()
|
||||||
|
|
||||||
|
const mockPool = mockAgent.get('http://localhost:3000')
|
||||||
|
mockPool.intercept({ path: '/foo' }).reply(200, 'foo')
|
||||||
|
|
||||||
|
const {
|
||||||
|
statusCode,
|
||||||
|
body
|
||||||
|
} = await request('http://localhost:3000/foo', { dispatcher: mockPool })
|
||||||
|
|
||||||
|
console.log('response received', statusCode) // response received 200
|
||||||
|
|
||||||
|
for await (const data of body) {
|
||||||
|
console.log('data', data.toString('utf8')) // data foo
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Example - Basic Mocked Request with local mock client dispatcher
|
||||||
|
|
||||||
|
```js
|
||||||
|
import { MockAgent, request } from 'undici'
|
||||||
|
|
||||||
|
const mockAgent = new MockAgent({ connections: 1 })
|
||||||
|
|
||||||
|
const mockClient = mockAgent.get('http://localhost:3000')
|
||||||
|
mockClient.intercept({ path: '/foo' }).reply(200, 'foo')
|
||||||
|
|
||||||
|
const {
|
||||||
|
statusCode,
|
||||||
|
body
|
||||||
|
} = await request('http://localhost:3000/foo', { dispatcher: mockClient })
|
||||||
|
|
||||||
|
console.log('response received', statusCode) // response received 200
|
||||||
|
|
||||||
|
for await (const data of body) {
|
||||||
|
console.log('data', data.toString('utf8')) // data foo
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Example - Basic Mocked requests with multiple intercepts
|
||||||
|
|
||||||
|
```js
|
||||||
|
import { MockAgent, setGlobalDispatcher, request } from 'undici'
|
||||||
|
|
||||||
|
const mockAgent = new MockAgent()
|
||||||
|
setGlobalDispatcher(mockAgent)
|
||||||
|
|
||||||
|
const mockPool = mockAgent.get('http://localhost:3000')
|
||||||
|
mockPool.intercept({ path: '/foo' }).reply(200, 'foo')
|
||||||
|
mockPool.intercept({ path: '/hello'}).reply(200, 'hello')
|
||||||
|
|
||||||
|
const result1 = await request('http://localhost:3000/foo')
|
||||||
|
|
||||||
|
console.log('response received', result1.statusCode) // response received 200
|
||||||
|
|
||||||
|
for await (const data of result1.body) {
|
||||||
|
console.log('data', data.toString('utf8')) // data foo
|
||||||
|
}
|
||||||
|
|
||||||
|
const result2 = await request('http://localhost:3000/hello')
|
||||||
|
|
||||||
|
console.log('response received', result2.statusCode) // response received 200
|
||||||
|
|
||||||
|
for await (const data of result2.body) {
|
||||||
|
console.log('data', data.toString('utf8')) // data hello
|
||||||
|
}
|
||||||
|
```
|
||||||
|
#### Example - Mock different requests within the same file
|
||||||
|
```js
|
||||||
|
const { MockAgent, setGlobalDispatcher } = require('undici');
|
||||||
|
const agent = new MockAgent();
|
||||||
|
agent.disableNetConnect();
|
||||||
|
setGlobalDispatcher(agent);
|
||||||
|
describe('Test', () => {
|
||||||
|
it('200', async () => {
|
||||||
|
const mockAgent = agent.get('http://test.com');
|
||||||
|
// your test
|
||||||
|
});
|
||||||
|
it('200', async () => {
|
||||||
|
const mockAgent = agent.get('http://testing.com');
|
||||||
|
// your test
|
||||||
|
});
|
||||||
|
});
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Example - Mocked request with query body, headers and trailers
|
||||||
|
|
||||||
|
```js
|
||||||
|
import { MockAgent, setGlobalDispatcher, request } from 'undici'
|
||||||
|
|
||||||
|
const mockAgent = new MockAgent()
|
||||||
|
setGlobalDispatcher(mockAgent)
|
||||||
|
|
||||||
|
const mockPool = mockAgent.get('http://localhost:3000')
|
||||||
|
|
||||||
|
mockPool.intercept({
|
||||||
|
path: '/foo?hello=there&see=ya',
|
||||||
|
method: 'POST',
|
||||||
|
body: 'form1=data1&form2=data2'
|
||||||
|
}).reply(200, { foo: 'bar' }, {
|
||||||
|
headers: { 'content-type': 'application/json' },
|
||||||
|
trailers: { 'Content-MD5': 'test' }
|
||||||
|
})
|
||||||
|
|
||||||
|
const {
|
||||||
|
statusCode,
|
||||||
|
headers,
|
||||||
|
trailers,
|
||||||
|
body
|
||||||
|
} = await request('http://localhost:3000/foo?hello=there&see=ya', {
|
||||||
|
method: 'POST',
|
||||||
|
body: 'form1=data1&form2=data2'
|
||||||
|
})
|
||||||
|
|
||||||
|
console.log('response received', statusCode) // response received 200
|
||||||
|
console.log('headers', headers) // { 'content-type': 'application/json' }
|
||||||
|
|
||||||
|
for await (const data of body) {
|
||||||
|
console.log('data', data.toString('utf8')) // '{"foo":"bar"}'
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log('trailers', trailers) // { 'content-md5': 'test' }
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Example - Mocked request with origin regex
|
||||||
|
|
||||||
|
```js
|
||||||
|
import { MockAgent, setGlobalDispatcher, request } from 'undici'
|
||||||
|
|
||||||
|
const mockAgent = new MockAgent()
|
||||||
|
setGlobalDispatcher(mockAgent)
|
||||||
|
|
||||||
|
const mockPool = mockAgent.get(new RegExp('http://localhost:3000'))
|
||||||
|
mockPool.intercept({ path: '/foo' }).reply(200, 'foo')
|
||||||
|
|
||||||
|
const {
|
||||||
|
statusCode,
|
||||||
|
body
|
||||||
|
} = await request('http://localhost:3000/foo')
|
||||||
|
|
||||||
|
console.log('response received', statusCode) // response received 200
|
||||||
|
|
||||||
|
for await (const data of body) {
|
||||||
|
console.log('data', data.toString('utf8')) // data foo
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Example - Mocked request with origin function
|
||||||
|
|
||||||
|
```js
|
||||||
|
import { MockAgent, setGlobalDispatcher, request } from 'undici'
|
||||||
|
|
||||||
|
const mockAgent = new MockAgent()
|
||||||
|
setGlobalDispatcher(mockAgent)
|
||||||
|
|
||||||
|
const mockPool = mockAgent.get((origin) => origin === 'http://localhost:3000')
|
||||||
|
mockPool.intercept({ path: '/foo' }).reply(200, 'foo')
|
||||||
|
|
||||||
|
const {
|
||||||
|
statusCode,
|
||||||
|
body
|
||||||
|
} = await request('http://localhost:3000/foo')
|
||||||
|
|
||||||
|
console.log('response received', statusCode) // response received 200
|
||||||
|
|
||||||
|
for await (const data of body) {
|
||||||
|
console.log('data', data.toString('utf8')) // data foo
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### `MockAgent.close()`
|
||||||
|
|
||||||
|
Closes the mock agent and waits for registered mock pools and clients to also close before resolving.
|
||||||
|
|
||||||
|
Returns: `Promise<void>`
|
||||||
|
|
||||||
|
#### Example - clean up after tests are complete
|
||||||
|
|
||||||
|
```js
|
||||||
|
import { MockAgent, setGlobalDispatcher } from 'undici'
|
||||||
|
|
||||||
|
const mockAgent = new MockAgent()
|
||||||
|
setGlobalDispatcher(mockAgent)
|
||||||
|
|
||||||
|
await mockAgent.close()
|
||||||
|
```
|
||||||
|
|
||||||
|
### `MockAgent.dispatch(options, handlers)`
|
||||||
|
|
||||||
|
Implements [`Agent.dispatch(options, handlers)`](Agent.md#parameter-agentdispatchoptions).
|
||||||
|
|
||||||
|
### `MockAgent.request(options[, callback])`
|
||||||
|
|
||||||
|
See [`Dispatcher.request(options [, callback])`](Dispatcher.md#dispatcherrequestoptions-callback).
|
||||||
|
|
||||||
|
#### Example - MockAgent request
|
||||||
|
|
||||||
|
```js
|
||||||
|
import { MockAgent } from 'undici'
|
||||||
|
|
||||||
|
const mockAgent = new MockAgent()
|
||||||
|
|
||||||
|
const mockPool = mockAgent.get('http://localhost:3000')
|
||||||
|
mockPool.intercept({ path: '/foo' }).reply(200, 'foo')
|
||||||
|
|
||||||
|
const {
|
||||||
|
statusCode,
|
||||||
|
body
|
||||||
|
} = await mockAgent.request({
|
||||||
|
origin: 'http://localhost:3000',
|
||||||
|
path: '/foo',
|
||||||
|
method: 'GET'
|
||||||
|
})
|
||||||
|
|
||||||
|
console.log('response received', statusCode) // response received 200
|
||||||
|
|
||||||
|
for await (const data of body) {
|
||||||
|
console.log('data', data.toString('utf8')) // data foo
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### `MockAgent.deactivate()`
|
||||||
|
|
||||||
|
This method disables mocking in MockAgent.
|
||||||
|
|
||||||
|
Returns: `void`
|
||||||
|
|
||||||
|
#### Example - Deactivate Mocking
|
||||||
|
|
||||||
|
```js
|
||||||
|
import { MockAgent, setGlobalDispatcher } from 'undici'
|
||||||
|
|
||||||
|
const mockAgent = new MockAgent()
|
||||||
|
setGlobalDispatcher(mockAgent)
|
||||||
|
|
||||||
|
mockAgent.deactivate()
|
||||||
|
```
|
||||||
|
|
||||||
|
### `MockAgent.activate()`
|
||||||
|
|
||||||
|
This method enables mocking in a MockAgent instance. When instantiated, a MockAgent is automatically activated. Therefore, this method is only effective after `MockAgent.deactivate` has been called.
|
||||||
|
|
||||||
|
Returns: `void`
|
||||||
|
|
||||||
|
#### Example - Activate Mocking
|
||||||
|
|
||||||
|
```js
|
||||||
|
import { MockAgent, setGlobalDispatcher } from 'undici'
|
||||||
|
|
||||||
|
const mockAgent = new MockAgent()
|
||||||
|
setGlobalDispatcher(mockAgent)
|
||||||
|
|
||||||
|
mockAgent.deactivate()
|
||||||
|
// No mocking will occur
|
||||||
|
|
||||||
|
// Later
|
||||||
|
mockAgent.activate()
|
||||||
|
```
|
||||||
|
|
||||||
|
### `MockAgent.enableNetConnect([host])`
|
||||||
|
|
||||||
|
When requests are not matched in a MockAgent intercept, a real HTTP request is attempted. We can control this further through the use of `enableNetConnect`. This is achieved by defining host matchers so only matching requests will be attempted.
|
||||||
|
|
||||||
|
When using a string, it should only include the **hostname and optionally, the port**. In addition, calling this method multiple times with a string will allow all HTTP requests that match these values.
|
||||||
|
|
||||||
|
Arguments:
|
||||||
|
|
||||||
|
* **host** `string | RegExp | (value) => boolean` - (optional)
|
||||||
|
|
||||||
|
Returns: `void`
|
||||||
|
|
||||||
|
#### Example - Allow all non-matching urls to be dispatched in a real HTTP request
|
||||||
|
|
||||||
|
```js
|
||||||
|
import { MockAgent, setGlobalDispatcher, request } from 'undici'
|
||||||
|
|
||||||
|
const mockAgent = new MockAgent()
|
||||||
|
setGlobalDispatcher(mockAgent)
|
||||||
|
|
||||||
|
mockAgent.enableNetConnect()
|
||||||
|
|
||||||
|
await request('http://example.com')
|
||||||
|
// A real request is made
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Example - Allow requests matching a host string to make real requests
|
||||||
|
|
||||||
|
```js
|
||||||
|
import { MockAgent, setGlobalDispatcher, request } from 'undici'
|
||||||
|
|
||||||
|
const mockAgent = new MockAgent()
|
||||||
|
setGlobalDispatcher(mockAgent)
|
||||||
|
|
||||||
|
mockAgent.enableNetConnect('example-1.com')
|
||||||
|
mockAgent.enableNetConnect('example-2.com:8080')
|
||||||
|
|
||||||
|
await request('http://example-1.com')
|
||||||
|
// A real request is made
|
||||||
|
|
||||||
|
await request('http://example-2.com:8080')
|
||||||
|
// A real request is made
|
||||||
|
|
||||||
|
await request('http://example-3.com')
|
||||||
|
// Will throw
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Example - Allow requests matching a host regex to make real requests
|
||||||
|
|
||||||
|
```js
|
||||||
|
import { MockAgent, setGlobalDispatcher, request } from 'undici'
|
||||||
|
|
||||||
|
const mockAgent = new MockAgent()
|
||||||
|
setGlobalDispatcher(mockAgent)
|
||||||
|
|
||||||
|
mockAgent.enableNetConnect(new RegExp('example.com'))
|
||||||
|
|
||||||
|
await request('http://example.com')
|
||||||
|
// A real request is made
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Example - Allow requests matching a host function to make real requests
|
||||||
|
|
||||||
|
```js
|
||||||
|
import { MockAgent, setGlobalDispatcher, request } from 'undici'
|
||||||
|
|
||||||
|
const mockAgent = new MockAgent()
|
||||||
|
setGlobalDispatcher(mockAgent)
|
||||||
|
|
||||||
|
mockAgent.enableNetConnect((value) => value === 'example.com')
|
||||||
|
|
||||||
|
await request('http://example.com')
|
||||||
|
// A real request is made
|
||||||
|
```
|
||||||
|
|
||||||
|
### `MockAgent.disableNetConnect()`
|
||||||
|
|
||||||
|
This method causes all requests to throw when requests are not matched in a MockAgent intercept.
|
||||||
|
|
||||||
|
Returns: `void`
|
||||||
|
|
||||||
|
#### Example - Disable all non-matching requests by throwing an error for each
|
||||||
|
|
||||||
|
```js
|
||||||
|
import { MockAgent, request } from 'undici'
|
||||||
|
|
||||||
|
const mockAgent = new MockAgent()
|
||||||
|
|
||||||
|
mockAgent.disableNetConnect()
|
||||||
|
|
||||||
|
await request('http://example.com')
|
||||||
|
// Will throw
|
||||||
|
```
|
||||||
|
|
||||||
|
### `MockAgent.pendingInterceptors()`
|
||||||
|
|
||||||
|
This method returns any pending interceptors registered on a mock agent. A pending interceptor meets one of the following criteria:
|
||||||
|
|
||||||
|
- Is registered with neither `.times(<number>)` nor `.persist()`, and has not been invoked;
|
||||||
|
- Is persistent (i.e., registered with `.persist()`) and has not been invoked;
|
||||||
|
- Is registered with `.times(<number>)` and has not been invoked `<number>` of times.
|
||||||
|
|
||||||
|
Returns: `PendingInterceptor[]` (where `PendingInterceptor` is a `MockDispatch` with an additional `origin: string`)
|
||||||
|
|
||||||
|
#### Example - List all pending inteceptors
|
||||||
|
|
||||||
|
```js
|
||||||
|
const agent = new MockAgent()
|
||||||
|
agent.disableNetConnect()
|
||||||
|
|
||||||
|
agent
|
||||||
|
.get('https://example.com')
|
||||||
|
.intercept({ method: 'GET', path: '/' })
|
||||||
|
.reply(200)
|
||||||
|
|
||||||
|
const pendingInterceptors = agent.pendingInterceptors()
|
||||||
|
// Returns [
|
||||||
|
// {
|
||||||
|
// timesInvoked: 0,
|
||||||
|
// times: 1,
|
||||||
|
// persist: false,
|
||||||
|
// consumed: false,
|
||||||
|
// pending: true,
|
||||||
|
// path: '/',
|
||||||
|
// method: 'GET',
|
||||||
|
// body: undefined,
|
||||||
|
// headers: undefined,
|
||||||
|
// data: {
|
||||||
|
// error: null,
|
||||||
|
// statusCode: 200,
|
||||||
|
// data: '',
|
||||||
|
// headers: {},
|
||||||
|
// trailers: {}
|
||||||
|
// },
|
||||||
|
// origin: 'https://example.com'
|
||||||
|
// }
|
||||||
|
// ]
|
||||||
|
```
|
||||||
|
|
||||||
|
### `MockAgent.assertNoPendingInterceptors([options])`
|
||||||
|
|
||||||
|
This method throws if the mock agent has any pending interceptors. A pending interceptor meets one of the following criteria:
|
||||||
|
|
||||||
|
- Is registered with neither `.times(<number>)` nor `.persist()`, and has not been invoked;
|
||||||
|
- Is persistent (i.e., registered with `.persist()`) and has not been invoked;
|
||||||
|
- Is registered with `.times(<number>)` and has not been invoked `<number>` of times.
|
||||||
|
|
||||||
|
#### Example - Check that there are no pending interceptors
|
||||||
|
|
||||||
|
```js
|
||||||
|
const agent = new MockAgent()
|
||||||
|
agent.disableNetConnect()
|
||||||
|
|
||||||
|
agent
|
||||||
|
.get('https://example.com')
|
||||||
|
.intercept({ method: 'GET', path: '/' })
|
||||||
|
.reply(200)
|
||||||
|
|
||||||
|
agent.assertNoPendingInterceptors()
|
||||||
|
// Throws an UndiciError with the following message:
|
||||||
|
//
|
||||||
|
// 1 interceptor is pending:
|
||||||
|
//
|
||||||
|
// ┌─────────┬────────┬───────────────────────┬──────┬─────────────┬────────────┬─────────────┬───────────┐
|
||||||
|
// │ (index) │ Method │ Origin │ Path │ Status code │ Persistent │ Invocations │ Remaining │
|
||||||
|
// ├─────────┼────────┼───────────────────────┼──────┼─────────────┼────────────┼─────────────┼───────────┤
|
||||||
|
// │ 0 │ 'GET' │ 'https://example.com' │ '/' │ 200 │ '❌' │ 0 │ 1 │
|
||||||
|
// └─────────┴────────┴───────────────────────┴──────┴─────────────┴────────────┴─────────────┴───────────┘
|
||||||
|
```
|
77
node_modules/undici/docs/api/MockClient.md
generated
vendored
Normal file
77
node_modules/undici/docs/api/MockClient.md
generated
vendored
Normal file
|
@ -0,0 +1,77 @@
|
||||||
|
# Class: MockClient
|
||||||
|
|
||||||
|
Extends: `undici.Client`
|
||||||
|
|
||||||
|
A mock client class that implements the same api as [MockPool](MockPool.md).
|
||||||
|
|
||||||
|
## `new MockClient(origin, [options])`
|
||||||
|
|
||||||
|
Arguments:
|
||||||
|
|
||||||
|
* **origin** `string` - It should only include the **protocol, hostname, and port**.
|
||||||
|
* **options** `MockClientOptions` - It extends the `Client` options.
|
||||||
|
|
||||||
|
Returns: `MockClient`
|
||||||
|
|
||||||
|
### Parameter: `MockClientOptions`
|
||||||
|
|
||||||
|
Extends: `ClientOptions`
|
||||||
|
|
||||||
|
* **agent** `Agent` - the agent to associate this MockClient with.
|
||||||
|
|
||||||
|
### Example - Basic MockClient instantiation
|
||||||
|
|
||||||
|
We can use MockAgent to instantiate a MockClient ready to be used to intercept specified requests. It will not do anything until registered as the agent to use and any mock request are registered.
|
||||||
|
|
||||||
|
```js
|
||||||
|
import { MockAgent } from 'undici'
|
||||||
|
|
||||||
|
// Connections must be set to 1 to return a MockClient instance
|
||||||
|
const mockAgent = new MockAgent({ connections: 1 })
|
||||||
|
|
||||||
|
const mockClient = mockAgent.get('http://localhost:3000')
|
||||||
|
```
|
||||||
|
|
||||||
|
## Instance Methods
|
||||||
|
|
||||||
|
### `MockClient.intercept(options)`
|
||||||
|
|
||||||
|
Implements: [`MockPool.intercept(options)`](MockPool.md#mockpoolinterceptoptions)
|
||||||
|
|
||||||
|
### `MockClient.close()`
|
||||||
|
|
||||||
|
Implements: [`MockPool.close()`](MockPool.md#mockpoolclose)
|
||||||
|
|
||||||
|
### `MockClient.dispatch(options, handlers)`
|
||||||
|
|
||||||
|
Implements [`Dispatcher.dispatch(options, handlers)`](Dispatcher.md#dispatcherdispatchoptions-handler).
|
||||||
|
|
||||||
|
### `MockClient.request(options[, callback])`
|
||||||
|
|
||||||
|
See [`Dispatcher.request(options [, callback])`](Dispatcher.md#dispatcherrequestoptions-callback).
|
||||||
|
|
||||||
|
#### Example - MockClient request
|
||||||
|
|
||||||
|
```js
|
||||||
|
import { MockAgent } from 'undici'
|
||||||
|
|
||||||
|
const mockAgent = new MockAgent({ connections: 1 })
|
||||||
|
|
||||||
|
const mockClient = mockAgent.get('http://localhost:3000')
|
||||||
|
mockClient.intercept({ path: '/foo' }).reply(200, 'foo')
|
||||||
|
|
||||||
|
const {
|
||||||
|
statusCode,
|
||||||
|
body
|
||||||
|
} = await mockClient.request({
|
||||||
|
origin: 'http://localhost:3000',
|
||||||
|
path: '/foo',
|
||||||
|
method: 'GET'
|
||||||
|
})
|
||||||
|
|
||||||
|
console.log('response received', statusCode) // response received 200
|
||||||
|
|
||||||
|
for await (const data of body) {
|
||||||
|
console.log('data', data.toString('utf8')) // data foo
|
||||||
|
}
|
||||||
|
```
|
12
node_modules/undici/docs/api/MockErrors.md
generated
vendored
Normal file
12
node_modules/undici/docs/api/MockErrors.md
generated
vendored
Normal file
|
@ -0,0 +1,12 @@
|
||||||
|
# MockErrors
|
||||||
|
|
||||||
|
Undici exposes a variety of mock error objects that you can use to enhance your mock error handling.
|
||||||
|
You can find all the mock error objects inside the `mockErrors` key.
|
||||||
|
|
||||||
|
```js
|
||||||
|
import { mockErrors } from 'undici'
|
||||||
|
```
|
||||||
|
|
||||||
|
| Mock Error | Mock Error Codes | Description |
|
||||||
|
| --------------------- | ------------------------------- | ---------------------------------------------------------- |
|
||||||
|
| `MockNotMatchedError` | `UND_MOCK_ERR_MOCK_NOT_MATCHED` | The request does not match any registered mock dispatches. |
|
547
node_modules/undici/docs/api/MockPool.md
generated
vendored
Normal file
547
node_modules/undici/docs/api/MockPool.md
generated
vendored
Normal file
|
@ -0,0 +1,547 @@
|
||||||
|
# Class: MockPool
|
||||||
|
|
||||||
|
Extends: `undici.Pool`
|
||||||
|
|
||||||
|
A mock Pool class that implements the Pool API and is used by MockAgent to intercept real requests and return mocked responses.
|
||||||
|
|
||||||
|
## `new MockPool(origin, [options])`
|
||||||
|
|
||||||
|
Arguments:
|
||||||
|
|
||||||
|
* **origin** `string` - It should only include the **protocol, hostname, and port**.
|
||||||
|
* **options** `MockPoolOptions` - It extends the `Pool` options.
|
||||||
|
|
||||||
|
Returns: `MockPool`
|
||||||
|
|
||||||
|
### Parameter: `MockPoolOptions`
|
||||||
|
|
||||||
|
Extends: `PoolOptions`
|
||||||
|
|
||||||
|
* **agent** `Agent` - the agent to associate this MockPool with.
|
||||||
|
|
||||||
|
### Example - Basic MockPool instantiation
|
||||||
|
|
||||||
|
We can use MockAgent to instantiate a MockPool ready to be used to intercept specified requests. It will not do anything until registered as the agent to use and any mock request are registered.
|
||||||
|
|
||||||
|
```js
|
||||||
|
import { MockAgent } from 'undici'
|
||||||
|
|
||||||
|
const mockAgent = new MockAgent()
|
||||||
|
|
||||||
|
const mockPool = mockAgent.get('http://localhost:3000')
|
||||||
|
```
|
||||||
|
|
||||||
|
## Instance Methods
|
||||||
|
|
||||||
|
### `MockPool.intercept(options)`
|
||||||
|
|
||||||
|
This method defines the interception rules for matching against requests for a MockPool or MockPool. We can intercept multiple times on a single instance, but each intercept is only used once. For example if you expect to make 2 requests inside a test, you need to call `intercept()` twice. Assuming you use `disableNetConnect()` you will get `MockNotMatchedError` on the second request when you only call `intercept()` once.
|
||||||
|
|
||||||
|
When defining interception rules, all the rules must pass for a request to be intercepted. If a request is not intercepted, a real request will be attempted.
|
||||||
|
|
||||||
|
| Matcher type | Condition to pass |
|
||||||
|
|:------------:| -------------------------- |
|
||||||
|
| `string` | Exact match against string |
|
||||||
|
| `RegExp` | Regex must pass |
|
||||||
|
| `Function` | Function must return true |
|
||||||
|
|
||||||
|
Arguments:
|
||||||
|
|
||||||
|
* **options** `MockPoolInterceptOptions` - Interception options.
|
||||||
|
|
||||||
|
Returns: `MockInterceptor` corresponding to the input options.
|
||||||
|
|
||||||
|
### Parameter: `MockPoolInterceptOptions`
|
||||||
|
|
||||||
|
* **path** `string | RegExp | (path: string) => boolean` - a matcher for the HTTP request path. When a `RegExp` or callback is used, it will match against the request path including all query parameters in alphabetical order. When a `string` is provided, the query parameters can be conveniently specified through the `MockPoolInterceptOptions.query` setting.
|
||||||
|
* **method** `string | RegExp | (method: string) => boolean` - (optional) - a matcher for the HTTP request method. Defaults to `GET`.
|
||||||
|
* **body** `string | RegExp | (body: string) => boolean` - (optional) - a matcher for the HTTP request body.
|
||||||
|
* **headers** `Record<string, string | RegExp | (body: string) => boolean`> - (optional) - a matcher for the HTTP request headers. To be intercepted, a request must match all defined headers. Extra headers not defined here may (or may not) be included in the request and do not affect the interception in any way.
|
||||||
|
* **query** `Record<string, any> | null` - (optional) - a matcher for the HTTP request query string params. Only applies when a `string` was provided for `MockPoolInterceptOptions.path`.
|
||||||
|
|
||||||
|
### Return: `MockInterceptor`
|
||||||
|
|
||||||
|
We can define the behaviour of an intercepted request with the following options.
|
||||||
|
|
||||||
|
* **reply** `(statusCode: number, replyData: string | Buffer | object | MockInterceptor.MockResponseDataHandler, responseOptions?: MockResponseOptions) => MockScope` - define a reply for a matching request. You can define the replyData as a callback to read incoming request data. Default for `responseOptions` is `{}`.
|
||||||
|
* **reply** `(callback: MockInterceptor.MockReplyOptionsCallback) => MockScope` - define a reply for a matching request, allowing dynamic mocking of all reply options rather than just the data.
|
||||||
|
* **replyWithError** `(error: Error) => MockScope` - define an error for a matching request to throw.
|
||||||
|
* **defaultReplyHeaders** `(headers: Record<string, string>) => MockInterceptor` - define default headers to be included in subsequent replies. These are in addition to headers on a specific reply.
|
||||||
|
* **defaultReplyTrailers** `(trailers: Record<string, string>) => MockInterceptor` - define default trailers to be included in subsequent replies. These are in addition to trailers on a specific reply.
|
||||||
|
* **replyContentLength** `() => MockInterceptor` - define automatically calculated `content-length` headers to be included in subsequent replies.
|
||||||
|
|
||||||
|
The reply data of an intercepted request may either be a string, buffer, or JavaScript object. Objects are converted to JSON while strings and buffers are sent as-is.
|
||||||
|
|
||||||
|
By default, `reply` and `replyWithError` define the behaviour for the first matching request only. Subsequent requests will not be affected (this can be changed using the returned `MockScope`).
|
||||||
|
|
||||||
|
### Parameter: `MockResponseOptions`
|
||||||
|
|
||||||
|
* **headers** `Record<string, string>` - headers to be included on the mocked reply.
|
||||||
|
* **trailers** `Record<string, string>` - trailers to be included on the mocked reply.
|
||||||
|
|
||||||
|
### Return: `MockScope`
|
||||||
|
|
||||||
|
A `MockScope` is associated with a single `MockInterceptor`. With this, we can configure the default behaviour of a intercepted reply.
|
||||||
|
|
||||||
|
* **delay** `(waitInMs: number) => MockScope` - delay the associated reply by a set amount in ms.
|
||||||
|
* **persist** `() => MockScope` - any matching request will always reply with the defined response indefinitely.
|
||||||
|
* **times** `(repeatTimes: number) => MockScope` - any matching request will reply with the defined response a fixed amount of times. This is overridden by **persist**.
|
||||||
|
|
||||||
|
#### Example - Basic Mocked Request
|
||||||
|
|
||||||
|
```js
|
||||||
|
import { MockAgent, setGlobalDispatcher, request } from 'undici'
|
||||||
|
|
||||||
|
const mockAgent = new MockAgent()
|
||||||
|
setGlobalDispatcher(mockAgent)
|
||||||
|
|
||||||
|
// MockPool
|
||||||
|
const mockPool = mockAgent.get('http://localhost:3000')
|
||||||
|
mockPool.intercept({ path: '/foo' }).reply(200, 'foo')
|
||||||
|
|
||||||
|
const {
|
||||||
|
statusCode,
|
||||||
|
body
|
||||||
|
} = await request('http://localhost:3000/foo')
|
||||||
|
|
||||||
|
console.log('response received', statusCode) // response received 200
|
||||||
|
|
||||||
|
for await (const data of body) {
|
||||||
|
console.log('data', data.toString('utf8')) // data foo
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Example - Mocked request using reply data callbacks
|
||||||
|
|
||||||
|
```js
|
||||||
|
import { MockAgent, setGlobalDispatcher, request } from 'undici'
|
||||||
|
|
||||||
|
const mockAgent = new MockAgent()
|
||||||
|
setGlobalDispatcher(mockAgent)
|
||||||
|
|
||||||
|
const mockPool = mockAgent.get('http://localhost:3000')
|
||||||
|
|
||||||
|
mockPool.intercept({
|
||||||
|
path: '/echo',
|
||||||
|
method: 'GET',
|
||||||
|
headers: {
|
||||||
|
'User-Agent': 'undici',
|
||||||
|
Host: 'example.com'
|
||||||
|
}
|
||||||
|
}).reply(200, ({ headers }) => ({ message: headers.get('message') }))
|
||||||
|
|
||||||
|
const { statusCode, body, headers } = await request('http://localhost:3000', {
|
||||||
|
headers: {
|
||||||
|
message: 'hello world!'
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
console.log('response received', statusCode) // response received 200
|
||||||
|
console.log('headers', headers) // { 'content-type': 'application/json' }
|
||||||
|
|
||||||
|
for await (const data of body) {
|
||||||
|
console.log('data', data.toString('utf8')) // { "message":"hello world!" }
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Example - Mocked request using reply options callback
|
||||||
|
|
||||||
|
```js
|
||||||
|
import { MockAgent, setGlobalDispatcher, request } from 'undici'
|
||||||
|
|
||||||
|
const mockAgent = new MockAgent()
|
||||||
|
setGlobalDispatcher(mockAgent)
|
||||||
|
|
||||||
|
const mockPool = mockAgent.get('http://localhost:3000')
|
||||||
|
|
||||||
|
mockPool.intercept({
|
||||||
|
path: '/echo',
|
||||||
|
method: 'GET',
|
||||||
|
headers: {
|
||||||
|
'User-Agent': 'undici',
|
||||||
|
Host: 'example.com'
|
||||||
|
}
|
||||||
|
}).reply(({ headers }) => ({ statusCode: 200, data: { message: headers.get('message') }})))
|
||||||
|
|
||||||
|
const { statusCode, body, headers } = await request('http://localhost:3000', {
|
||||||
|
headers: {
|
||||||
|
message: 'hello world!'
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
console.log('response received', statusCode) // response received 200
|
||||||
|
console.log('headers', headers) // { 'content-type': 'application/json' }
|
||||||
|
|
||||||
|
for await (const data of body) {
|
||||||
|
console.log('data', data.toString('utf8')) // { "message":"hello world!" }
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Example - Basic Mocked requests with multiple intercepts
|
||||||
|
|
||||||
|
```js
|
||||||
|
import { MockAgent, setGlobalDispatcher, request } from 'undici'
|
||||||
|
|
||||||
|
const mockAgent = new MockAgent()
|
||||||
|
setGlobalDispatcher(mockAgent)
|
||||||
|
|
||||||
|
const mockPool = mockAgent.get('http://localhost:3000')
|
||||||
|
|
||||||
|
mockPool.intercept({
|
||||||
|
path: '/foo',
|
||||||
|
method: 'GET'
|
||||||
|
}).reply(200, 'foo')
|
||||||
|
|
||||||
|
mockPool.intercept({
|
||||||
|
path: '/hello',
|
||||||
|
method: 'GET',
|
||||||
|
}).reply(200, 'hello')
|
||||||
|
|
||||||
|
const result1 = await request('http://localhost:3000/foo')
|
||||||
|
|
||||||
|
console.log('response received', result1.statusCode) // response received 200
|
||||||
|
|
||||||
|
for await (const data of result1.body) {
|
||||||
|
console.log('data', data.toString('utf8')) // data foo
|
||||||
|
}
|
||||||
|
|
||||||
|
const result2 = await request('http://localhost:3000/hello')
|
||||||
|
|
||||||
|
console.log('response received', result2.statusCode) // response received 200
|
||||||
|
|
||||||
|
for await (const data of result2.body) {
|
||||||
|
console.log('data', data.toString('utf8')) // data hello
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Example - Mocked request with query body, request headers and response headers and trailers
|
||||||
|
|
||||||
|
```js
|
||||||
|
import { MockAgent, setGlobalDispatcher, request } from 'undici'
|
||||||
|
|
||||||
|
const mockAgent = new MockAgent()
|
||||||
|
setGlobalDispatcher(mockAgent)
|
||||||
|
|
||||||
|
const mockPool = mockAgent.get('http://localhost:3000')
|
||||||
|
|
||||||
|
mockPool.intercept({
|
||||||
|
path: '/foo?hello=there&see=ya',
|
||||||
|
method: 'POST',
|
||||||
|
body: 'form1=data1&form2=data2',
|
||||||
|
headers: {
|
||||||
|
'User-Agent': 'undici',
|
||||||
|
Host: 'example.com'
|
||||||
|
}
|
||||||
|
}).reply(200, { foo: 'bar' }, {
|
||||||
|
headers: { 'content-type': 'application/json' },
|
||||||
|
trailers: { 'Content-MD5': 'test' }
|
||||||
|
})
|
||||||
|
|
||||||
|
const {
|
||||||
|
statusCode,
|
||||||
|
headers,
|
||||||
|
trailers,
|
||||||
|
body
|
||||||
|
} = await request('http://localhost:3000/foo?hello=there&see=ya', {
|
||||||
|
method: 'POST',
|
||||||
|
body: 'form1=data1&form2=data2',
|
||||||
|
headers: {
|
||||||
|
foo: 'bar',
|
||||||
|
'User-Agent': 'undici',
|
||||||
|
Host: 'example.com'
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
console.log('response received', statusCode) // response received 200
|
||||||
|
console.log('headers', headers) // { 'content-type': 'application/json' }
|
||||||
|
|
||||||
|
for await (const data of body) {
|
||||||
|
console.log('data', data.toString('utf8')) // '{"foo":"bar"}'
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log('trailers', trailers) // { 'content-md5': 'test' }
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Example - Mocked request using different matchers
|
||||||
|
|
||||||
|
```js
|
||||||
|
import { MockAgent, setGlobalDispatcher, request } from 'undici'
|
||||||
|
|
||||||
|
const mockAgent = new MockAgent()
|
||||||
|
setGlobalDispatcher(mockAgent)
|
||||||
|
|
||||||
|
const mockPool = mockAgent.get('http://localhost:3000')
|
||||||
|
|
||||||
|
mockPool.intercept({
|
||||||
|
path: '/foo',
|
||||||
|
method: /^GET$/,
|
||||||
|
body: (value) => value === 'form=data',
|
||||||
|
headers: {
|
||||||
|
'User-Agent': 'undici',
|
||||||
|
Host: /^example.com$/
|
||||||
|
}
|
||||||
|
}).reply(200, 'foo')
|
||||||
|
|
||||||
|
const {
|
||||||
|
statusCode,
|
||||||
|
body
|
||||||
|
} = await request('http://localhost:3000/foo', {
|
||||||
|
method: 'GET',
|
||||||
|
body: 'form=data',
|
||||||
|
headers: {
|
||||||
|
foo: 'bar',
|
||||||
|
'User-Agent': 'undici',
|
||||||
|
Host: 'example.com'
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
console.log('response received', statusCode) // response received 200
|
||||||
|
|
||||||
|
for await (const data of body) {
|
||||||
|
console.log('data', data.toString('utf8')) // data foo
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Example - Mocked request with reply with a defined error
|
||||||
|
|
||||||
|
```js
|
||||||
|
import { MockAgent, setGlobalDispatcher, request } from 'undici'
|
||||||
|
|
||||||
|
const mockAgent = new MockAgent()
|
||||||
|
setGlobalDispatcher(mockAgent)
|
||||||
|
|
||||||
|
const mockPool = mockAgent.get('http://localhost:3000')
|
||||||
|
|
||||||
|
mockPool.intercept({
|
||||||
|
path: '/foo',
|
||||||
|
method: 'GET'
|
||||||
|
}).replyWithError(new Error('kaboom'))
|
||||||
|
|
||||||
|
try {
|
||||||
|
await request('http://localhost:3000/foo', {
|
||||||
|
method: 'GET'
|
||||||
|
})
|
||||||
|
} catch (error) {
|
||||||
|
console.error(error) // Error: kaboom
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Example - Mocked request with defaultReplyHeaders
|
||||||
|
|
||||||
|
```js
|
||||||
|
import { MockAgent, setGlobalDispatcher, request } from 'undici'
|
||||||
|
|
||||||
|
const mockAgent = new MockAgent()
|
||||||
|
setGlobalDispatcher(mockAgent)
|
||||||
|
|
||||||
|
const mockPool = mockAgent.get('http://localhost:3000')
|
||||||
|
|
||||||
|
mockPool.intercept({
|
||||||
|
path: '/foo',
|
||||||
|
method: 'GET'
|
||||||
|
}).defaultReplyHeaders({ foo: 'bar' })
|
||||||
|
.reply(200, 'foo')
|
||||||
|
|
||||||
|
const { headers } = await request('http://localhost:3000/foo')
|
||||||
|
|
||||||
|
console.log('headers', headers) // headers { foo: 'bar' }
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Example - Mocked request with defaultReplyTrailers
|
||||||
|
|
||||||
|
```js
|
||||||
|
import { MockAgent, setGlobalDispatcher, request } from 'undici'
|
||||||
|
|
||||||
|
const mockAgent = new MockAgent()
|
||||||
|
setGlobalDispatcher(mockAgent)
|
||||||
|
|
||||||
|
const mockPool = mockAgent.get('http://localhost:3000')
|
||||||
|
|
||||||
|
mockPool.intercept({
|
||||||
|
path: '/foo',
|
||||||
|
method: 'GET'
|
||||||
|
}).defaultReplyTrailers({ foo: 'bar' })
|
||||||
|
.reply(200, 'foo')
|
||||||
|
|
||||||
|
const { trailers } = await request('http://localhost:3000/foo')
|
||||||
|
|
||||||
|
console.log('trailers', trailers) // trailers { foo: 'bar' }
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Example - Mocked request with automatic content-length calculation
|
||||||
|
|
||||||
|
```js
|
||||||
|
import { MockAgent, setGlobalDispatcher, request } from 'undici'
|
||||||
|
|
||||||
|
const mockAgent = new MockAgent()
|
||||||
|
setGlobalDispatcher(mockAgent)
|
||||||
|
|
||||||
|
const mockPool = mockAgent.get('http://localhost:3000')
|
||||||
|
|
||||||
|
mockPool.intercept({
|
||||||
|
path: '/foo',
|
||||||
|
method: 'GET'
|
||||||
|
}).replyContentLength().reply(200, 'foo')
|
||||||
|
|
||||||
|
const { headers } = await request('http://localhost:3000/foo')
|
||||||
|
|
||||||
|
console.log('headers', headers) // headers { 'content-length': '3' }
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Example - Mocked request with automatic content-length calculation on an object
|
||||||
|
|
||||||
|
```js
|
||||||
|
import { MockAgent, setGlobalDispatcher, request } from 'undici'
|
||||||
|
|
||||||
|
const mockAgent = new MockAgent()
|
||||||
|
setGlobalDispatcher(mockAgent)
|
||||||
|
|
||||||
|
const mockPool = mockAgent.get('http://localhost:3000')
|
||||||
|
|
||||||
|
mockPool.intercept({
|
||||||
|
path: '/foo',
|
||||||
|
method: 'GET'
|
||||||
|
}).replyContentLength().reply(200, { foo: 'bar' })
|
||||||
|
|
||||||
|
const { headers } = await request('http://localhost:3000/foo')
|
||||||
|
|
||||||
|
console.log('headers', headers) // headers { 'content-length': '13' }
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Example - Mocked request with persist enabled
|
||||||
|
|
||||||
|
```js
|
||||||
|
import { MockAgent, setGlobalDispatcher, request } from 'undici'
|
||||||
|
|
||||||
|
const mockAgent = new MockAgent()
|
||||||
|
setGlobalDispatcher(mockAgent)
|
||||||
|
|
||||||
|
const mockPool = mockAgent.get('http://localhost:3000')
|
||||||
|
|
||||||
|
mockPool.intercept({
|
||||||
|
path: '/foo',
|
||||||
|
method: 'GET'
|
||||||
|
}).reply(200, 'foo').persist()
|
||||||
|
|
||||||
|
const result1 = await request('http://localhost:3000/foo')
|
||||||
|
// Will match and return mocked data
|
||||||
|
|
||||||
|
const result2 = await request('http://localhost:3000/foo')
|
||||||
|
// Will match and return mocked data
|
||||||
|
|
||||||
|
// Etc
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Example - Mocked request with times enabled
|
||||||
|
|
||||||
|
```js
|
||||||
|
import { MockAgent, setGlobalDispatcher, request } from 'undici'
|
||||||
|
|
||||||
|
const mockAgent = new MockAgent()
|
||||||
|
setGlobalDispatcher(mockAgent)
|
||||||
|
|
||||||
|
const mockPool = mockAgent.get('http://localhost:3000')
|
||||||
|
|
||||||
|
mockPool.intercept({
|
||||||
|
path: '/foo',
|
||||||
|
method: 'GET'
|
||||||
|
}).reply(200, 'foo').times(2)
|
||||||
|
|
||||||
|
const result1 = await request('http://localhost:3000/foo')
|
||||||
|
// Will match and return mocked data
|
||||||
|
|
||||||
|
const result2 = await request('http://localhost:3000/foo')
|
||||||
|
// Will match and return mocked data
|
||||||
|
|
||||||
|
const result3 = await request('http://localhost:3000/foo')
|
||||||
|
// Will not match and make attempt a real request
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Example - Mocked request with path callback
|
||||||
|
|
||||||
|
```js
|
||||||
|
import { MockAgent, setGlobalDispatcher, request } from 'undici'
|
||||||
|
import querystring from 'querystring'
|
||||||
|
|
||||||
|
const mockAgent = new MockAgent()
|
||||||
|
setGlobalDispatcher(mockAgent)
|
||||||
|
|
||||||
|
const mockPool = mockAgent.get('http://localhost:3000')
|
||||||
|
|
||||||
|
const matchPath = requestPath => {
|
||||||
|
const [pathname, search] = requestPath.split('?')
|
||||||
|
const requestQuery = querystring.parse(search)
|
||||||
|
|
||||||
|
if (!pathname.startsWith('/foo')) {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!Object.keys(requestQuery).includes('foo') || requestQuery.foo !== 'bar') {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
mockPool.intercept({
|
||||||
|
path: matchPath,
|
||||||
|
method: 'GET'
|
||||||
|
}).reply(200, 'foo')
|
||||||
|
|
||||||
|
const result = await request('http://localhost:3000/foo?foo=bar')
|
||||||
|
// Will match and return mocked data
|
||||||
|
```
|
||||||
|
|
||||||
|
### `MockPool.close()`
|
||||||
|
|
||||||
|
Closes the mock pool and de-registers from associated MockAgent.
|
||||||
|
|
||||||
|
Returns: `Promise<void>`
|
||||||
|
|
||||||
|
#### Example - clean up after tests are complete
|
||||||
|
|
||||||
|
```js
|
||||||
|
import { MockAgent } from 'undici'
|
||||||
|
|
||||||
|
const mockAgent = new MockAgent()
|
||||||
|
const mockPool = mockAgent.get('http://localhost:3000')
|
||||||
|
|
||||||
|
await mockPool.close()
|
||||||
|
```
|
||||||
|
|
||||||
|
### `MockPool.dispatch(options, handlers)`
|
||||||
|
|
||||||
|
Implements [`Dispatcher.dispatch(options, handlers)`](Dispatcher.md#dispatcherdispatchoptions-handler).
|
||||||
|
|
||||||
|
### `MockPool.request(options[, callback])`
|
||||||
|
|
||||||
|
See [`Dispatcher.request(options [, callback])`](Dispatcher.md#dispatcherrequestoptions-callback).
|
||||||
|
|
||||||
|
#### Example - MockPool request
|
||||||
|
|
||||||
|
```js
|
||||||
|
import { MockAgent } from 'undici'
|
||||||
|
|
||||||
|
const mockAgent = new MockAgent()
|
||||||
|
|
||||||
|
const mockPool = mockAgent.get('http://localhost:3000')
|
||||||
|
mockPool.intercept({
|
||||||
|
path: '/foo',
|
||||||
|
method: 'GET',
|
||||||
|
}).reply(200, 'foo')
|
||||||
|
|
||||||
|
const {
|
||||||
|
statusCode,
|
||||||
|
body
|
||||||
|
} = await mockPool.request({
|
||||||
|
origin: 'http://localhost:3000',
|
||||||
|
path: '/foo',
|
||||||
|
method: 'GET'
|
||||||
|
})
|
||||||
|
|
||||||
|
console.log('response received', statusCode) // response received 200
|
||||||
|
|
||||||
|
for await (const data of body) {
|
||||||
|
console.log('data', data.toString('utf8')) // data foo
|
||||||
|
}
|
||||||
|
```
|
84
node_modules/undici/docs/api/Pool.md
generated
vendored
Normal file
84
node_modules/undici/docs/api/Pool.md
generated
vendored
Normal file
|
@ -0,0 +1,84 @@
|
||||||
|
# Class: Pool
|
||||||
|
|
||||||
|
Extends: `undici.Dispatcher`
|
||||||
|
|
||||||
|
A pool of [Client](Client.md) instances connected to the same upstream target.
|
||||||
|
|
||||||
|
Requests are not guaranteed to be dispatched in order of invocation.
|
||||||
|
|
||||||
|
## `new Pool(url[, options])`
|
||||||
|
|
||||||
|
Arguments:
|
||||||
|
|
||||||
|
* **url** `URL | string` - It should only include the **protocol, hostname, and port**.
|
||||||
|
* **options** `PoolOptions` (optional)
|
||||||
|
|
||||||
|
### Parameter: `PoolOptions`
|
||||||
|
|
||||||
|
Extends: [`ClientOptions`](Client.md#parameter-clientoptions)
|
||||||
|
|
||||||
|
* **factory** `(origin: URL, opts: Object) => Dispatcher` - Default: `(origin, opts) => new Client(origin, opts)`
|
||||||
|
* **connections** `number | null` (optional) - Default: `null` - The number of `Client` instances to create. When set to `null`, the `Pool` instance will create an unlimited amount of `Client` instances.
|
||||||
|
* **interceptors** `{ Pool: DispatchInterceptor[] } }` - Default: `{ Pool: [] }` - A list of interceptors that are applied to the dispatch method. Additional logic can be applied (such as, but not limited to: 302 status code handling, authentication, cookies, compression and caching).
|
||||||
|
|
||||||
|
## Instance Properties
|
||||||
|
|
||||||
|
### `Pool.closed`
|
||||||
|
|
||||||
|
Implements [Client.closed](Client.md#clientclosed)
|
||||||
|
|
||||||
|
### `Pool.destroyed`
|
||||||
|
|
||||||
|
Implements [Client.destroyed](Client.md#clientdestroyed)
|
||||||
|
|
||||||
|
### `Pool.stats`
|
||||||
|
|
||||||
|
Returns [`PoolStats`](PoolStats.md) instance for this pool.
|
||||||
|
|
||||||
|
## Instance Methods
|
||||||
|
|
||||||
|
### `Pool.close([callback])`
|
||||||
|
|
||||||
|
Implements [`Dispatcher.close([callback])`](Dispatcher.md#dispatcherclosecallback-promise).
|
||||||
|
|
||||||
|
### `Pool.destroy([error, callback])`
|
||||||
|
|
||||||
|
Implements [`Dispatcher.destroy([error, callback])`](Dispatcher.md#dispatcherdestroyerror-callback-promise).
|
||||||
|
|
||||||
|
### `Pool.connect(options[, callback])`
|
||||||
|
|
||||||
|
See [`Dispatcher.connect(options[, callback])`](Dispatcher.md#dispatcherconnectoptions-callback).
|
||||||
|
|
||||||
|
### `Pool.dispatch(options, handler)`
|
||||||
|
|
||||||
|
Implements [`Dispatcher.dispatch(options, handler)`](Dispatcher.md#dispatcherdispatchoptions-handler).
|
||||||
|
|
||||||
|
### `Pool.pipeline(options, handler)`
|
||||||
|
|
||||||
|
See [`Dispatcher.pipeline(options, handler)`](Dispatcher.md#dispatcherpipelineoptions-handler).
|
||||||
|
|
||||||
|
### `Pool.request(options[, callback])`
|
||||||
|
|
||||||
|
See [`Dispatcher.request(options [, callback])`](Dispatcher.md#dispatcherrequestoptions-callback).
|
||||||
|
|
||||||
|
### `Pool.stream(options, factory[, callback])`
|
||||||
|
|
||||||
|
See [`Dispatcher.stream(options, factory[, callback])`](Dispatcher.md#dispatcherstreamoptions-factory-callback).
|
||||||
|
|
||||||
|
### `Pool.upgrade(options[, callback])`
|
||||||
|
|
||||||
|
See [`Dispatcher.upgrade(options[, callback])`](Dispatcher.md#dispatcherupgradeoptions-callback).
|
||||||
|
|
||||||
|
## Instance Events
|
||||||
|
|
||||||
|
### Event: `'connect'`
|
||||||
|
|
||||||
|
See [Dispatcher Event: `'connect'`](Dispatcher.md#event-connect).
|
||||||
|
|
||||||
|
### Event: `'disconnect'`
|
||||||
|
|
||||||
|
See [Dispatcher Event: `'disconnect'`](Dispatcher.md#event-disconnect).
|
||||||
|
|
||||||
|
### Event: `'drain'`
|
||||||
|
|
||||||
|
See [Dispatcher Event: `'drain'`](Dispatcher.md#event-drain).
|
35
node_modules/undici/docs/api/PoolStats.md
generated
vendored
Normal file
35
node_modules/undici/docs/api/PoolStats.md
generated
vendored
Normal file
|
@ -0,0 +1,35 @@
|
||||||
|
# Class: PoolStats
|
||||||
|
|
||||||
|
Aggregate stats for a [Pool](Pool.md) or [BalancedPool](BalancedPool.md).
|
||||||
|
|
||||||
|
## `new PoolStats(pool)`
|
||||||
|
|
||||||
|
Arguments:
|
||||||
|
|
||||||
|
* **pool** `Pool` - Pool or BalancedPool from which to return stats.
|
||||||
|
|
||||||
|
## Instance Properties
|
||||||
|
|
||||||
|
### `PoolStats.connected`
|
||||||
|
|
||||||
|
Number of open socket connections in this pool.
|
||||||
|
|
||||||
|
### `PoolStats.free`
|
||||||
|
|
||||||
|
Number of open socket connections in this pool that do not have an active request.
|
||||||
|
|
||||||
|
### `PoolStats.pending`
|
||||||
|
|
||||||
|
Number of pending requests across all clients in this pool.
|
||||||
|
|
||||||
|
### `PoolStats.queued`
|
||||||
|
|
||||||
|
Number of queued requests across all clients in this pool.
|
||||||
|
|
||||||
|
### `PoolStats.running`
|
||||||
|
|
||||||
|
Number of currently active requests across all clients in this pool.
|
||||||
|
|
||||||
|
### `PoolStats.size`
|
||||||
|
|
||||||
|
Number of active, pending, or queued requests across all clients in this pool.
|
126
node_modules/undici/docs/api/ProxyAgent.md
generated
vendored
Normal file
126
node_modules/undici/docs/api/ProxyAgent.md
generated
vendored
Normal file
|
@ -0,0 +1,126 @@
|
||||||
|
# Class: ProxyAgent
|
||||||
|
|
||||||
|
Extends: `undici.Dispatcher`
|
||||||
|
|
||||||
|
A Proxy Agent class that implements the Agent API. It allows the connection through proxy in a simple way.
|
||||||
|
|
||||||
|
## `new ProxyAgent([options])`
|
||||||
|
|
||||||
|
Arguments:
|
||||||
|
|
||||||
|
* **options** `ProxyAgentOptions` (required) - It extends the `Agent` options.
|
||||||
|
|
||||||
|
Returns: `ProxyAgent`
|
||||||
|
|
||||||
|
### Parameter: `ProxyAgentOptions`
|
||||||
|
|
||||||
|
Extends: [`AgentOptions`](Agent.md#parameter-agentoptions)
|
||||||
|
|
||||||
|
* **uri** `string` (required) - It can be passed either by a string or a object containing `uri` as string.
|
||||||
|
* **token** `string` (optional) - It can be passed by a string of token for authentication.
|
||||||
|
* **auth** `string` (**deprecated**) - Use token.
|
||||||
|
* **clientFactory** `(origin: URL, opts: Object) => Dispatcher` (optional) - Default: `(origin, opts) => new Pool(origin, opts)`
|
||||||
|
* **requestTls** `BuildOptions` (optional) - Options object passed when creating the underlying socket via the connector builder for the request. See [TLS](https://nodejs.org/api/tls.html#tlsconnectoptions-callback).
|
||||||
|
* **proxyTls** `BuildOptions` (optional) - Options object passed when creating the underlying socket via the connector builder for the proxy server. See [TLS](https://nodejs.org/api/tls.html#tlsconnectoptions-callback).
|
||||||
|
|
||||||
|
Examples:
|
||||||
|
|
||||||
|
```js
|
||||||
|
import { ProxyAgent } from 'undici'
|
||||||
|
|
||||||
|
const proxyAgent = new ProxyAgent('my.proxy.server')
|
||||||
|
// or
|
||||||
|
const proxyAgent = new ProxyAgent({ uri: 'my.proxy.server' })
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Example - Basic ProxyAgent instantiation
|
||||||
|
|
||||||
|
This will instantiate the ProxyAgent. It will not do anything until registered as the agent to use with requests.
|
||||||
|
|
||||||
|
```js
|
||||||
|
import { ProxyAgent } from 'undici'
|
||||||
|
|
||||||
|
const proxyAgent = new ProxyAgent('my.proxy.server')
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Example - Basic Proxy Request with global agent dispatcher
|
||||||
|
|
||||||
|
```js
|
||||||
|
import { setGlobalDispatcher, request, ProxyAgent } from 'undici'
|
||||||
|
|
||||||
|
const proxyAgent = new ProxyAgent('my.proxy.server')
|
||||||
|
setGlobalDispatcher(proxyAgent)
|
||||||
|
|
||||||
|
const { statusCode, body } = await request('http://localhost:3000/foo')
|
||||||
|
|
||||||
|
console.log('response received', statusCode) // response received 200
|
||||||
|
|
||||||
|
for await (const data of body) {
|
||||||
|
console.log('data', data.toString('utf8')) // data foo
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Example - Basic Proxy Request with local agent dispatcher
|
||||||
|
|
||||||
|
```js
|
||||||
|
import { ProxyAgent, request } from 'undici'
|
||||||
|
|
||||||
|
const proxyAgent = new ProxyAgent('my.proxy.server')
|
||||||
|
|
||||||
|
const {
|
||||||
|
statusCode,
|
||||||
|
body
|
||||||
|
} = await request('http://localhost:3000/foo', { dispatcher: proxyAgent })
|
||||||
|
|
||||||
|
console.log('response received', statusCode) // response received 200
|
||||||
|
|
||||||
|
for await (const data of body) {
|
||||||
|
console.log('data', data.toString('utf8')) // data foo
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Example - Basic Proxy Request with authentication
|
||||||
|
|
||||||
|
```js
|
||||||
|
import { setGlobalDispatcher, request, ProxyAgent } from 'undici';
|
||||||
|
|
||||||
|
const proxyAgent = new ProxyAgent({
|
||||||
|
uri: 'my.proxy.server',
|
||||||
|
// token: 'Bearer xxxx'
|
||||||
|
token: `Basic ${Buffer.from('username:password').toString('base64')}`
|
||||||
|
});
|
||||||
|
setGlobalDispatcher(proxyAgent);
|
||||||
|
|
||||||
|
const { statusCode, body } = await request('http://localhost:3000/foo');
|
||||||
|
|
||||||
|
console.log('response received', statusCode); // response received 200
|
||||||
|
|
||||||
|
for await (const data of body) {
|
||||||
|
console.log('data', data.toString('utf8')); // data foo
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### `ProxyAgent.close()`
|
||||||
|
|
||||||
|
Closes the proxy agent and waits for registered pools and clients to also close before resolving.
|
||||||
|
|
||||||
|
Returns: `Promise<void>`
|
||||||
|
|
||||||
|
#### Example - clean up after tests are complete
|
||||||
|
|
||||||
|
```js
|
||||||
|
import { ProxyAgent, setGlobalDispatcher } from 'undici'
|
||||||
|
|
||||||
|
const proxyAgent = new ProxyAgent('my.proxy.server')
|
||||||
|
setGlobalDispatcher(proxyAgent)
|
||||||
|
|
||||||
|
await proxyAgent.close()
|
||||||
|
```
|
||||||
|
|
||||||
|
### `ProxyAgent.dispatch(options, handlers)`
|
||||||
|
|
||||||
|
Implements [`Agent.dispatch(options, handlers)`](Agent.md#parameter-agentdispatchoptions).
|
||||||
|
|
||||||
|
### `ProxyAgent.request(options[, callback])`
|
||||||
|
|
||||||
|
See [`Dispatcher.request(options [, callback])`](Dispatcher.md#dispatcherrequestoptions-callback).
|
108
node_modules/undici/docs/api/RetryHandler.md
generated
vendored
Normal file
108
node_modules/undici/docs/api/RetryHandler.md
generated
vendored
Normal file
|
@ -0,0 +1,108 @@
|
||||||
|
# Class: RetryHandler
|
||||||
|
|
||||||
|
Extends: `undici.DispatcherHandlers`
|
||||||
|
|
||||||
|
A handler class that implements the retry logic for a request.
|
||||||
|
|
||||||
|
## `new RetryHandler(dispatchOptions, retryHandlers, [retryOptions])`
|
||||||
|
|
||||||
|
Arguments:
|
||||||
|
|
||||||
|
- **options** `Dispatch.DispatchOptions & RetryOptions` (required) - It is an intersection of `Dispatcher.DispatchOptions` and `RetryOptions`.
|
||||||
|
- **retryHandlers** `RetryHandlers` (required) - Object containing the `dispatch` to be used on every retry, and `handler` for handling the `dispatch` lifecycle.
|
||||||
|
|
||||||
|
Returns: `retryHandler`
|
||||||
|
|
||||||
|
### Parameter: `Dispatch.DispatchOptions & RetryOptions`
|
||||||
|
|
||||||
|
Extends: [`Dispatch.DispatchOptions`](Dispatcher.md#parameter-dispatchoptions).
|
||||||
|
|
||||||
|
#### `RetryOptions`
|
||||||
|
|
||||||
|
- **retry** `(err: Error, context: RetryContext, callback: (err?: Error | null) => void) => void` (optional) - Function to be called after every retry. It should pass error if no more retries should be performed.
|
||||||
|
- **maxRetries** `number` (optional) - Maximum number of retries. Default: `5`
|
||||||
|
- **maxTimeout** `number` (optional) - Maximum number of milliseconds to wait before retrying. Default: `30000` (30 seconds)
|
||||||
|
- **minTimeout** `number` (optional) - Minimum number of milliseconds to wait before retrying. Default: `500` (half a second)
|
||||||
|
- **timeoutFactor** `number` (optional) - Factor to multiply the timeout by for each retry attempt. Default: `2`
|
||||||
|
- **retryAfter** `boolean` (optional) - It enables automatic retry after the `Retry-After` header is received. Default: `true`
|
||||||
|
-
|
||||||
|
- **methods** `string[]` (optional) - Array of HTTP methods to retry. Default: `['GET', 'PUT', 'HEAD', 'OPTIONS', 'DELETE']`
|
||||||
|
- **statusCodes** `number[]` (optional) - Array of HTTP status codes to retry. Default: `[429, 500, 502, 503, 504]`
|
||||||
|
- **errorCodes** `string[]` (optional) - Array of Error codes to retry. Default: `['ECONNRESET', 'ECONNREFUSED', 'ENOTFOUND', 'ENETDOWN','ENETUNREACH', 'EHOSTDOWN',
|
||||||
|
|
||||||
|
**`RetryContext`**
|
||||||
|
|
||||||
|
- `state`: `RetryState` - Current retry state. It can be mutated.
|
||||||
|
- `opts`: `Dispatch.DispatchOptions & RetryOptions` - Options passed to the retry handler.
|
||||||
|
|
||||||
|
### Parameter `RetryHandlers`
|
||||||
|
|
||||||
|
- **dispatch** `(options: Dispatch.DispatchOptions, handlers: Dispatch.DispatchHandlers) => Promise<Dispatch.DispatchResponse>` (required) - Dispatch function to be called after every retry.
|
||||||
|
- **handler** Extends [`Dispatch.DispatchHandlers`](Dispatcher.md#dispatcherdispatchoptions-handler) (required) - Handler function to be called after the request is successful or the retries are exhausted.
|
||||||
|
|
||||||
|
Examples:
|
||||||
|
|
||||||
|
```js
|
||||||
|
const client = new Client(`http://localhost:${server.address().port}`);
|
||||||
|
const chunks = [];
|
||||||
|
const handler = new RetryHandler(
|
||||||
|
{
|
||||||
|
...dispatchOptions,
|
||||||
|
retryOptions: {
|
||||||
|
// custom retry function
|
||||||
|
retry: function (err, state, callback) {
|
||||||
|
counter++;
|
||||||
|
|
||||||
|
if (err.code && err.code === "UND_ERR_DESTROYED") {
|
||||||
|
callback(err);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (err.statusCode === 206) {
|
||||||
|
callback(err);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
setTimeout(() => callback(null), 1000);
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
dispatch: (...args) => {
|
||||||
|
return client.dispatch(...args);
|
||||||
|
},
|
||||||
|
handler: {
|
||||||
|
onConnect() {},
|
||||||
|
onBodySent() {},
|
||||||
|
onHeaders(status, _rawHeaders, resume, _statusMessage) {
|
||||||
|
// do something with headers
|
||||||
|
},
|
||||||
|
onData(chunk) {
|
||||||
|
chunks.push(chunk);
|
||||||
|
return true;
|
||||||
|
},
|
||||||
|
onComplete() {},
|
||||||
|
onError() {
|
||||||
|
// handle error properly
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
);
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Example - Basic RetryHandler with defaults
|
||||||
|
|
||||||
|
```js
|
||||||
|
const client = new Client(`http://localhost:${server.address().port}`);
|
||||||
|
const handler = new RetryHandler(dispatchOptions, {
|
||||||
|
dispatch: client.dispatch.bind(client),
|
||||||
|
handler: {
|
||||||
|
onConnect() {},
|
||||||
|
onBodySent() {},
|
||||||
|
onHeaders(status, _rawHeaders, resume, _statusMessage) {},
|
||||||
|
onData(chunk) {},
|
||||||
|
onComplete() {},
|
||||||
|
onError(err) {},
|
||||||
|
},
|
||||||
|
});
|
||||||
|
```
|
43
node_modules/undici/docs/api/WebSocket.md
generated
vendored
Normal file
43
node_modules/undici/docs/api/WebSocket.md
generated
vendored
Normal file
|
@ -0,0 +1,43 @@
|
||||||
|
# Class: WebSocket
|
||||||
|
|
||||||
|
> ⚠️ Warning: the WebSocket API is experimental.
|
||||||
|
|
||||||
|
Extends: [`EventTarget`](https://developer.mozilla.org/en-US/docs/Web/API/EventTarget)
|
||||||
|
|
||||||
|
The WebSocket object provides a way to manage a WebSocket connection to a server, allowing bidirectional communication. The API follows the [WebSocket spec](https://developer.mozilla.org/en-US/docs/Web/API/WebSocket) and [RFC 6455](https://datatracker.ietf.org/doc/html/rfc6455).
|
||||||
|
|
||||||
|
## `new WebSocket(url[, protocol])`
|
||||||
|
|
||||||
|
Arguments:
|
||||||
|
|
||||||
|
* **url** `URL | string` - The url's protocol *must* be `ws` or `wss`.
|
||||||
|
* **protocol** `string | string[] | WebSocketInit` (optional) - Subprotocol(s) to request the server use, or a [`Dispatcher`](./Dispatcher.md).
|
||||||
|
|
||||||
|
### Example:
|
||||||
|
|
||||||
|
This example will not work in browsers or other platforms that don't allow passing an object.
|
||||||
|
|
||||||
|
```mjs
|
||||||
|
import { WebSocket, ProxyAgent } from 'undici'
|
||||||
|
|
||||||
|
const proxyAgent = new ProxyAgent('my.proxy.server')
|
||||||
|
|
||||||
|
const ws = new WebSocket('wss://echo.websocket.events', {
|
||||||
|
dispatcher: proxyAgent,
|
||||||
|
protocols: ['echo', 'chat']
|
||||||
|
})
|
||||||
|
```
|
||||||
|
|
||||||
|
If you do not need a custom Dispatcher, it's recommended to use the following pattern:
|
||||||
|
|
||||||
|
```mjs
|
||||||
|
import { WebSocket } from 'undici'
|
||||||
|
|
||||||
|
const ws = new WebSocket('wss://echo.websocket.events', ['echo', 'chat'])
|
||||||
|
```
|
||||||
|
|
||||||
|
## Read More
|
||||||
|
|
||||||
|
- [MDN - WebSocket](https://developer.mozilla.org/en-US/docs/Web/API/WebSocket)
|
||||||
|
- [The WebSocket Specification](https://www.rfc-editor.org/rfc/rfc6455)
|
||||||
|
- [The WHATWG WebSocket Specification](https://websockets.spec.whatwg.org/)
|
62
node_modules/undici/docs/api/api-lifecycle.md
generated
vendored
Normal file
62
node_modules/undici/docs/api/api-lifecycle.md
generated
vendored
Normal file
|
@ -0,0 +1,62 @@
|
||||||
|
# Client Lifecycle
|
||||||
|
|
||||||
|
An Undici [Client](Client.md) can be best described as a state machine. The following list is a summary of the various state transitions the `Client` will go through in its lifecycle. This document also contains detailed breakdowns of each state.
|
||||||
|
|
||||||
|
> This diagram is not a perfect representation of the undici Client. Since the Client class is not actually implemented as a state-machine, actual execution may deviate slightly from what is described below. Consider this as a general resource for understanding the inner workings of the Undici client rather than some kind of formal specification.
|
||||||
|
|
||||||
|
## State Transition Overview
|
||||||
|
|
||||||
|
* A `Client` begins in the **idle** state with no socket connection and no requests in queue.
|
||||||
|
* The *connect* event transitions the `Client` to the **pending** state where requests can be queued prior to processing.
|
||||||
|
* The *close* and *destroy* events transition the `Client` to the **destroyed** state. Since there are no requests in the queue, the *close* event immediately transitions to the **destroyed** state.
|
||||||
|
* The **pending** state indicates the underlying socket connection has been successfully established and requests are queueing.
|
||||||
|
* The *process* event transitions the `Client` to the **processing** state where requests are processed.
|
||||||
|
* If requests are queued, the *close* event transitions to the **processing** state; otherwise, it transitions to the **destroyed** state.
|
||||||
|
* The *destroy* event transitions to the **destroyed** state.
|
||||||
|
* The **processing** state initializes to the **processing.running** state.
|
||||||
|
* If the current request requires draining, the *needDrain* event transitions the `Client` into the **processing.busy** state which will return to the **processing.running** state with the *drainComplete* event.
|
||||||
|
* After all queued requests are completed, the *keepalive* event transitions the `Client` back to the **pending** state. If no requests are queued during the timeout, the **close** event transitions the `Client` to the **destroyed** state.
|
||||||
|
* If the *close* event is fired while the `Client` still has queued requests, the `Client` transitions to the **process.closing** state where it will complete all existing requests before firing the *done* event.
|
||||||
|
* The *done* event gracefully transitions the `Client` to the **destroyed** state.
|
||||||
|
* At any point in time, the *destroy* event will transition the `Client` from the **processing** state to the **destroyed** state, destroying any queued requests.
|
||||||
|
* The **destroyed** state is a final state and the `Client` is no longer functional.
|
||||||
|
|
||||||
|
![A state diagram representing an Undici Client instance](../assets/lifecycle-diagram.png)
|
||||||
|
|
||||||
|
> The diagram was generated using Mermaid.js Live Editor. Modify the state diagram [here](https://mermaid-js.github.io/mermaid-live-editor/#/edit/eyJjb2RlIjoic3RhdGVEaWFncmFtLXYyXG4gICAgWypdIC0tPiBpZGxlXG4gICAgaWRsZSAtLT4gcGVuZGluZyA6IGNvbm5lY3RcbiAgICBpZGxlIC0tPiBkZXN0cm95ZWQgOiBkZXN0cm95L2Nsb3NlXG4gICAgXG4gICAgcGVuZGluZyAtLT4gaWRsZSA6IHRpbWVvdXRcbiAgICBwZW5kaW5nIC0tPiBkZXN0cm95ZWQgOiBkZXN0cm95XG5cbiAgICBzdGF0ZSBjbG9zZV9mb3JrIDw8Zm9yaz4-XG4gICAgcGVuZGluZyAtLT4gY2xvc2VfZm9yayA6IGNsb3NlXG4gICAgY2xvc2VfZm9yayAtLT4gcHJvY2Vzc2luZ1xuICAgIGNsb3NlX2ZvcmsgLS0-IGRlc3Ryb3llZFxuXG4gICAgcGVuZGluZyAtLT4gcHJvY2Vzc2luZyA6IHByb2Nlc3NcblxuICAgIHByb2Nlc3NpbmcgLS0-IHBlbmRpbmcgOiBrZWVwYWxpdmVcbiAgICBwcm9jZXNzaW5nIC0tPiBkZXN0cm95ZWQgOiBkb25lXG4gICAgcHJvY2Vzc2luZyAtLT4gZGVzdHJveWVkIDogZGVzdHJveVxuXG4gICAgc3RhdGUgcHJvY2Vzc2luZyB7XG4gICAgICAgIHJ1bm5pbmcgLS0-IGJ1c3kgOiBuZWVkRHJhaW5cbiAgICAgICAgYnVzeSAtLT4gcnVubmluZyA6IGRyYWluQ29tcGxldGVcbiAgICAgICAgcnVubmluZyAtLT4gWypdIDoga2VlcGFsaXZlXG4gICAgICAgIHJ1bm5pbmcgLS0-IGNsb3NpbmcgOiBjbG9zZVxuICAgICAgICBjbG9zaW5nIC0tPiBbKl0gOiBkb25lXG4gICAgICAgIFsqXSAtLT4gcnVubmluZ1xuICAgIH1cbiAgICAiLCJtZXJtYWlkIjp7InRoZW1lIjoiYmFzZSJ9LCJ1cGRhdGVFZGl0b3IiOmZhbHNlfQ)
|
||||||
|
|
||||||
|
## State details
|
||||||
|
|
||||||
|
### idle
|
||||||
|
|
||||||
|
The **idle** state is the initial state of a `Client` instance. While an `origin` is required for instantiating a `Client` instance, the underlying socket connection will not be established until a request is queued using [`Client.dispatch()`](Client.md#clientdispatchoptions-handlers). By calling `Client.dispatch()` directly or using one of the multiple implementations ([`Client.connect()`](Client.md#clientconnectoptions-callback), [`Client.pipeline()`](Client.md#clientpipelineoptions-handler), [`Client.request()`](Client.md#clientrequestoptions-callback), [`Client.stream()`](Client.md#clientstreamoptions-factory-callback), and [`Client.upgrade()`](Client.md#clientupgradeoptions-callback)), the `Client` instance will transition from **idle** to [**pending**](#pending) and then most likely directly to [**processing**](#processing).
|
||||||
|
|
||||||
|
Calling [`Client.close()`](Client.md#clientclosecallback) or [`Client.destroy()`](Client.md#clientdestroyerror-callback) transitions directly to the [**destroyed**](#destroyed) state since the `Client` instance will have no queued requests in this state.
|
||||||
|
|
||||||
|
### pending
|
||||||
|
|
||||||
|
The **pending** state signifies a non-processing `Client`. Upon entering this state, the `Client` establishes a socket connection and emits the [`'connect'`](Client.md#event-connect) event signalling a connection was successfully established with the `origin` provided during `Client` instantiation. The internal queue is initially empty, and requests can start queueing.
|
||||||
|
|
||||||
|
Calling [`Client.close()`](Client.md#clientclosecallback) with queued requests, transitions the `Client` to the [**processing**](#processing) state. Without queued requests, it transitions to the [**destroyed**](#destroyed) state.
|
||||||
|
|
||||||
|
Calling [`Client.destroy()`](Client.md#clientdestroyerror-callback) transitions directly to the [**destroyed**](#destroyed) state regardless of existing requests.
|
||||||
|
|
||||||
|
### processing
|
||||||
|
|
||||||
|
The **processing** state is a state machine within itself. It initializes to the [**processing.running**](#running) state. The [`Client.dispatch()`](Client.md#clientdispatchoptions-handlers), [`Client.close()`](Client.md#clientclosecallback), and [`Client.destroy()`](Client.md#clientdestroyerror-callback) can be called at any time while the `Client` is in this state. `Client.dispatch()` will add more requests to the queue while existing requests continue to be processed. `Client.close()` will transition to the [**processing.closing**](#closing) state. And `Client.destroy()` will transition to [**destroyed**](#destroyed).
|
||||||
|
|
||||||
|
#### running
|
||||||
|
|
||||||
|
In the **processing.running** sub-state, queued requests are being processed in a FIFO order. If a request body requires draining, the *needDrain* event transitions to the [**processing.busy**](#busy) sub-state. The *close* event transitions the Client to the [**process.closing**](#closing) sub-state. If all queued requests are processed and neither [`Client.close()`](Client.md#clientclosecallback) nor [`Client.destroy()`](Client.md#clientdestroyerror-callback) are called, then the [**processing**](#processing) machine will trigger a *keepalive* event transitioning the `Client` back to the [**pending**](#pending) state. During this time, the `Client` is waiting for the socket connection to timeout, and once it does, it triggers the *timeout* event and transitions to the [**idle**](#idle) state.
|
||||||
|
|
||||||
|
#### busy
|
||||||
|
|
||||||
|
This sub-state is only entered when a request body is an instance of [Stream](https://nodejs.org/api/stream.html) and requires draining. The `Client` cannot process additional requests while in this state and must wait until the currently processing request body is completely drained before transitioning back to [**processing.running**](#running).
|
||||||
|
|
||||||
|
#### closing
|
||||||
|
|
||||||
|
This sub-state is only entered when a `Client` instance has queued requests and the [`Client.close()`](Client.md#clientclosecallback) method is called. In this state, the `Client` instance continues to process requests as usual, with the one exception that no additional requests can be queued. Once all of the queued requests are processed, the `Client` will trigger the *done* event gracefully entering the [**destroyed**](#destroyed) state without an error.
|
||||||
|
|
||||||
|
### destroyed
|
||||||
|
|
||||||
|
The **destroyed** state is a final state for the `Client` instance. Once in this state, a `Client` is nonfunctional. Calling any other `Client` methods will result in an `ClientDestroyedError`.
|
BIN
node_modules/undici/docs/assets/lifecycle-diagram.png
generated
vendored
Normal file
BIN
node_modules/undici/docs/assets/lifecycle-diagram.png
generated
vendored
Normal file
Binary file not shown.
After Width: | Height: | Size: 46 KiB |
64
node_modules/undici/docs/best-practices/client-certificate.md
generated
vendored
Normal file
64
node_modules/undici/docs/best-practices/client-certificate.md
generated
vendored
Normal file
|
@ -0,0 +1,64 @@
|
||||||
|
# Client certificate
|
||||||
|
|
||||||
|
Client certificate authentication can be configured with the `Client`, the required options are passed along through the `connect` option.
|
||||||
|
|
||||||
|
The client certificates must be signed by a trusted CA. The Node.js default is to trust the well-known CAs curated by Mozilla.
|
||||||
|
|
||||||
|
Setting the server option `requestCert: true` tells the server to request the client certificate.
|
||||||
|
|
||||||
|
The server option `rejectUnauthorized: false` allows us to handle any invalid certificate errors in client code. The `authorized` property on the socket of the incoming request will show if the client certificate was valid. The `authorizationError` property will give the reason if the certificate was not valid.
|
||||||
|
|
||||||
|
### Client Certificate Authentication
|
||||||
|
|
||||||
|
```js
|
||||||
|
const { readFileSync } = require('fs')
|
||||||
|
const { join } = require('path')
|
||||||
|
const { createServer } = require('https')
|
||||||
|
const { Client } = require('undici')
|
||||||
|
|
||||||
|
const serverOptions = {
|
||||||
|
ca: [
|
||||||
|
readFileSync(join(__dirname, 'client-ca-crt.pem'), 'utf8')
|
||||||
|
],
|
||||||
|
key: readFileSync(join(__dirname, 'server-key.pem'), 'utf8'),
|
||||||
|
cert: readFileSync(join(__dirname, 'server-crt.pem'), 'utf8'),
|
||||||
|
requestCert: true,
|
||||||
|
rejectUnauthorized: false
|
||||||
|
}
|
||||||
|
|
||||||
|
const server = createServer(serverOptions, (req, res) => {
|
||||||
|
// true if client cert is valid
|
||||||
|
if(req.client.authorized === true) {
|
||||||
|
console.log('valid')
|
||||||
|
} else {
|
||||||
|
console.error(req.client.authorizationError)
|
||||||
|
}
|
||||||
|
res.end()
|
||||||
|
})
|
||||||
|
|
||||||
|
server.listen(0, function () {
|
||||||
|
const tls = {
|
||||||
|
ca: [
|
||||||
|
readFileSync(join(__dirname, 'server-ca-crt.pem'), 'utf8')
|
||||||
|
],
|
||||||
|
key: readFileSync(join(__dirname, 'client-key.pem'), 'utf8'),
|
||||||
|
cert: readFileSync(join(__dirname, 'client-crt.pem'), 'utf8'),
|
||||||
|
rejectUnauthorized: false,
|
||||||
|
servername: 'agent1'
|
||||||
|
}
|
||||||
|
const client = new Client(`https://localhost:${server.address().port}`, {
|
||||||
|
connect: tls
|
||||||
|
})
|
||||||
|
|
||||||
|
client.request({
|
||||||
|
path: '/',
|
||||||
|
method: 'GET'
|
||||||
|
}, (err, { body }) => {
|
||||||
|
body.on('data', (buf) => {})
|
||||||
|
body.on('end', () => {
|
||||||
|
client.close()
|
||||||
|
server.close()
|
||||||
|
})
|
||||||
|
})
|
||||||
|
})
|
||||||
|
```
|
136
node_modules/undici/docs/best-practices/mocking-request.md
generated
vendored
Normal file
136
node_modules/undici/docs/best-practices/mocking-request.md
generated
vendored
Normal file
|
@ -0,0 +1,136 @@
|
||||||
|
# Mocking Request
|
||||||
|
|
||||||
|
Undici has its own mocking [utility](../api/MockAgent.md). It allow us to intercept undici HTTP requests and return mocked values instead. It can be useful for testing purposes.
|
||||||
|
|
||||||
|
Example:
|
||||||
|
|
||||||
|
```js
|
||||||
|
// bank.mjs
|
||||||
|
import { request } from 'undici'
|
||||||
|
|
||||||
|
export async function bankTransfer(recipient, amount) {
|
||||||
|
const { body } = await request('http://localhost:3000/bank-transfer',
|
||||||
|
{
|
||||||
|
method: 'POST',
|
||||||
|
headers: {
|
||||||
|
'X-TOKEN-SECRET': 'SuperSecretToken',
|
||||||
|
},
|
||||||
|
body: JSON.stringify({
|
||||||
|
recipient,
|
||||||
|
amount
|
||||||
|
})
|
||||||
|
}
|
||||||
|
)
|
||||||
|
return await body.json()
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
And this is what the test file looks like:
|
||||||
|
|
||||||
|
```js
|
||||||
|
// index.test.mjs
|
||||||
|
import { strict as assert } from 'assert'
|
||||||
|
import { MockAgent, setGlobalDispatcher, } from 'undici'
|
||||||
|
import { bankTransfer } from './bank.mjs'
|
||||||
|
|
||||||
|
const mockAgent = new MockAgent();
|
||||||
|
|
||||||
|
setGlobalDispatcher(mockAgent);
|
||||||
|
|
||||||
|
// Provide the base url to the request
|
||||||
|
const mockPool = mockAgent.get('http://localhost:3000');
|
||||||
|
|
||||||
|
// intercept the request
|
||||||
|
mockPool.intercept({
|
||||||
|
path: '/bank-transfer',
|
||||||
|
method: 'POST',
|
||||||
|
headers: {
|
||||||
|
'X-TOKEN-SECRET': 'SuperSecretToken',
|
||||||
|
},
|
||||||
|
body: JSON.stringify({
|
||||||
|
recipient: '1234567890',
|
||||||
|
amount: '100'
|
||||||
|
})
|
||||||
|
}).reply(200, {
|
||||||
|
message: 'transaction processed'
|
||||||
|
})
|
||||||
|
|
||||||
|
const success = await bankTransfer('1234567890', '100')
|
||||||
|
|
||||||
|
assert.deepEqual(success, { message: 'transaction processed' })
|
||||||
|
|
||||||
|
// if you dont want to check whether the body or the headers contain the same value
|
||||||
|
// just remove it from interceptor
|
||||||
|
mockPool.intercept({
|
||||||
|
path: '/bank-transfer',
|
||||||
|
method: 'POST',
|
||||||
|
}).reply(400, {
|
||||||
|
message: 'bank account not found'
|
||||||
|
})
|
||||||
|
|
||||||
|
const badRequest = await bankTransfer('1234567890', '100')
|
||||||
|
|
||||||
|
assert.deepEqual(badRequest, { message: 'bank account not found' })
|
||||||
|
```
|
||||||
|
|
||||||
|
Explore other MockAgent functionality [here](../api/MockAgent.md)
|
||||||
|
|
||||||
|
## Debug Mock Value
|
||||||
|
|
||||||
|
When the interceptor and the request options are not the same, undici will automatically make a real HTTP request. To prevent real requests from being made, use `mockAgent.disableNetConnect()`:
|
||||||
|
|
||||||
|
```js
|
||||||
|
const mockAgent = new MockAgent();
|
||||||
|
|
||||||
|
setGlobalDispatcher(mockAgent);
|
||||||
|
mockAgent.disableNetConnect()
|
||||||
|
|
||||||
|
// Provide the base url to the request
|
||||||
|
const mockPool = mockAgent.get('http://localhost:3000');
|
||||||
|
|
||||||
|
mockPool.intercept({
|
||||||
|
path: '/bank-transfer',
|
||||||
|
method: 'POST',
|
||||||
|
}).reply(200, {
|
||||||
|
message: 'transaction processed'
|
||||||
|
})
|
||||||
|
|
||||||
|
const badRequest = await bankTransfer('1234567890', '100')
|
||||||
|
// Will throw an error
|
||||||
|
// MockNotMatchedError: Mock dispatch not matched for path '/bank-transfer':
|
||||||
|
// subsequent request to origin http://localhost:3000 was not allowed (net.connect disabled)
|
||||||
|
```
|
||||||
|
|
||||||
|
## Reply with data based on request
|
||||||
|
|
||||||
|
If the mocked response needs to be dynamically derived from the request parameters, you can provide a function instead of an object to `reply`:
|
||||||
|
|
||||||
|
```js
|
||||||
|
mockPool.intercept({
|
||||||
|
path: '/bank-transfer',
|
||||||
|
method: 'POST',
|
||||||
|
headers: {
|
||||||
|
'X-TOKEN-SECRET': 'SuperSecretToken',
|
||||||
|
},
|
||||||
|
body: JSON.stringify({
|
||||||
|
recipient: '1234567890',
|
||||||
|
amount: '100'
|
||||||
|
})
|
||||||
|
}).reply(200, (opts) => {
|
||||||
|
// do something with opts
|
||||||
|
|
||||||
|
return { message: 'transaction processed' }
|
||||||
|
})
|
||||||
|
```
|
||||||
|
|
||||||
|
in this case opts will be
|
||||||
|
|
||||||
|
```
|
||||||
|
{
|
||||||
|
method: 'POST',
|
||||||
|
headers: { 'X-TOKEN-SECRET': 'SuperSecretToken' },
|
||||||
|
body: '{"recipient":"1234567890","amount":"100"}',
|
||||||
|
origin: 'http://localhost:3000',
|
||||||
|
path: '/bank-transfer'
|
||||||
|
}
|
||||||
|
```
|
127
node_modules/undici/docs/best-practices/proxy.md
generated
vendored
Normal file
127
node_modules/undici/docs/best-practices/proxy.md
generated
vendored
Normal file
|
@ -0,0 +1,127 @@
|
||||||
|
# Connecting through a proxy
|
||||||
|
|
||||||
|
Connecting through a proxy is possible by:
|
||||||
|
|
||||||
|
- Using [AgentProxy](../api/ProxyAgent.md).
|
||||||
|
- Configuring `Client` or `Pool` constructor.
|
||||||
|
|
||||||
|
The proxy url should be passed to the `Client` or `Pool` constructor, while the upstream server url
|
||||||
|
should be added to every request call in the `path`.
|
||||||
|
For instance, if you need to send a request to the `/hello` route of your upstream server,
|
||||||
|
the `path` should be `path: 'http://upstream.server:port/hello?foo=bar'`.
|
||||||
|
|
||||||
|
If you proxy requires basic authentication, you can send it via the `proxy-authorization` header.
|
||||||
|
|
||||||
|
### Connect without authentication
|
||||||
|
|
||||||
|
```js
|
||||||
|
import { Client } from 'undici'
|
||||||
|
import { createServer } from 'http'
|
||||||
|
import proxy from 'proxy'
|
||||||
|
|
||||||
|
const server = await buildServer()
|
||||||
|
const proxyServer = await buildProxy()
|
||||||
|
|
||||||
|
const serverUrl = `http://localhost:${server.address().port}`
|
||||||
|
const proxyUrl = `http://localhost:${proxyServer.address().port}`
|
||||||
|
|
||||||
|
server.on('request', (req, res) => {
|
||||||
|
console.log(req.url) // '/hello?foo=bar'
|
||||||
|
res.setHeader('content-type', 'application/json')
|
||||||
|
res.end(JSON.stringify({ hello: 'world' }))
|
||||||
|
})
|
||||||
|
|
||||||
|
const client = new Client(proxyUrl)
|
||||||
|
|
||||||
|
const response = await client.request({
|
||||||
|
method: 'GET',
|
||||||
|
path: serverUrl + '/hello?foo=bar'
|
||||||
|
})
|
||||||
|
|
||||||
|
response.body.setEncoding('utf8')
|
||||||
|
let data = ''
|
||||||
|
for await (const chunk of response.body) {
|
||||||
|
data += chunk
|
||||||
|
}
|
||||||
|
console.log(response.statusCode) // 200
|
||||||
|
console.log(JSON.parse(data)) // { hello: 'world' }
|
||||||
|
|
||||||
|
server.close()
|
||||||
|
proxyServer.close()
|
||||||
|
client.close()
|
||||||
|
|
||||||
|
function buildServer () {
|
||||||
|
return new Promise((resolve, reject) => {
|
||||||
|
const server = createServer()
|
||||||
|
server.listen(0, () => resolve(server))
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
function buildProxy () {
|
||||||
|
return new Promise((resolve, reject) => {
|
||||||
|
const server = proxy(createServer())
|
||||||
|
server.listen(0, () => resolve(server))
|
||||||
|
})
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### Connect with authentication
|
||||||
|
|
||||||
|
```js
|
||||||
|
import { Client } from 'undici'
|
||||||
|
import { createServer } from 'http'
|
||||||
|
import proxy from 'proxy'
|
||||||
|
|
||||||
|
const server = await buildServer()
|
||||||
|
const proxyServer = await buildProxy()
|
||||||
|
|
||||||
|
const serverUrl = `http://localhost:${server.address().port}`
|
||||||
|
const proxyUrl = `http://localhost:${proxyServer.address().port}`
|
||||||
|
|
||||||
|
proxyServer.authenticate = function (req, fn) {
|
||||||
|
fn(null, req.headers['proxy-authorization'] === `Basic ${Buffer.from('user:pass').toString('base64')}`)
|
||||||
|
}
|
||||||
|
|
||||||
|
server.on('request', (req, res) => {
|
||||||
|
console.log(req.url) // '/hello?foo=bar'
|
||||||
|
res.setHeader('content-type', 'application/json')
|
||||||
|
res.end(JSON.stringify({ hello: 'world' }))
|
||||||
|
})
|
||||||
|
|
||||||
|
const client = new Client(proxyUrl)
|
||||||
|
|
||||||
|
const response = await client.request({
|
||||||
|
method: 'GET',
|
||||||
|
path: serverUrl + '/hello?foo=bar',
|
||||||
|
headers: {
|
||||||
|
'proxy-authorization': `Basic ${Buffer.from('user:pass').toString('base64')}`
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
response.body.setEncoding('utf8')
|
||||||
|
let data = ''
|
||||||
|
for await (const chunk of response.body) {
|
||||||
|
data += chunk
|
||||||
|
}
|
||||||
|
console.log(response.statusCode) // 200
|
||||||
|
console.log(JSON.parse(data)) // { hello: 'world' }
|
||||||
|
|
||||||
|
server.close()
|
||||||
|
proxyServer.close()
|
||||||
|
client.close()
|
||||||
|
|
||||||
|
function buildServer () {
|
||||||
|
return new Promise((resolve, reject) => {
|
||||||
|
const server = createServer()
|
||||||
|
server.listen(0, () => resolve(server))
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
function buildProxy () {
|
||||||
|
return new Promise((resolve, reject) => {
|
||||||
|
const server = proxy(createServer())
|
||||||
|
server.listen(0, () => resolve(server))
|
||||||
|
})
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
20
node_modules/undici/docs/best-practices/writing-tests.md
generated
vendored
Normal file
20
node_modules/undici/docs/best-practices/writing-tests.md
generated
vendored
Normal file
|
@ -0,0 +1,20 @@
|
||||||
|
# Writing tests
|
||||||
|
|
||||||
|
Undici is tuned for a production use case and its default will keep
|
||||||
|
a socket open for a few seconds after an HTTP request is completed to
|
||||||
|
remove the overhead of opening up a new socket. These settings that makes
|
||||||
|
Undici shine in production are not a good fit for using Undici in automated
|
||||||
|
tests, as it will result in longer execution times.
|
||||||
|
|
||||||
|
The following are good defaults that will keep the socket open for only 10ms:
|
||||||
|
|
||||||
|
```js
|
||||||
|
import { request, setGlobalDispatcher, Agent } from 'undici'
|
||||||
|
|
||||||
|
const agent = new Agent({
|
||||||
|
keepAliveTimeout: 10, // milliseconds
|
||||||
|
keepAliveMaxTimeout: 10 // milliseconds
|
||||||
|
})
|
||||||
|
|
||||||
|
setGlobalDispatcher(agent)
|
||||||
|
```
|
15
node_modules/undici/index-fetch.js
generated
vendored
Normal file
15
node_modules/undici/index-fetch.js
generated
vendored
Normal file
|
@ -0,0 +1,15 @@
|
||||||
|
'use strict'
|
||||||
|
|
||||||
|
const fetchImpl = require('./lib/fetch').fetch
|
||||||
|
|
||||||
|
module.exports.fetch = function fetch (resource, init = undefined) {
|
||||||
|
return fetchImpl(resource, init).catch((err) => {
|
||||||
|
Error.captureStackTrace(err, this)
|
||||||
|
throw err
|
||||||
|
})
|
||||||
|
}
|
||||||
|
module.exports.FormData = require('./lib/fetch/formdata').FormData
|
||||||
|
module.exports.Headers = require('./lib/fetch/headers').Headers
|
||||||
|
module.exports.Response = require('./lib/fetch/response').Response
|
||||||
|
module.exports.Request = require('./lib/fetch/request').Request
|
||||||
|
module.exports.WebSocket = require('./lib/websocket/websocket').WebSocket
|
3
node_modules/undici/index.d.ts
generated
vendored
Normal file
3
node_modules/undici/index.d.ts
generated
vendored
Normal file
|
@ -0,0 +1,3 @@
|
||||||
|
export * from './types/index'
|
||||||
|
import Undici from './types/index'
|
||||||
|
export default Undici
|
167
node_modules/undici/index.js
generated
vendored
Normal file
167
node_modules/undici/index.js
generated
vendored
Normal file
|
@ -0,0 +1,167 @@
|
||||||
|
'use strict'
|
||||||
|
|
||||||
|
const Client = require('./lib/client')
|
||||||
|
const Dispatcher = require('./lib/dispatcher')
|
||||||
|
const errors = require('./lib/core/errors')
|
||||||
|
const Pool = require('./lib/pool')
|
||||||
|
const BalancedPool = require('./lib/balanced-pool')
|
||||||
|
const Agent = require('./lib/agent')
|
||||||
|
const util = require('./lib/core/util')
|
||||||
|
const { InvalidArgumentError } = errors
|
||||||
|
const api = require('./lib/api')
|
||||||
|
const buildConnector = require('./lib/core/connect')
|
||||||
|
const MockClient = require('./lib/mock/mock-client')
|
||||||
|
const MockAgent = require('./lib/mock/mock-agent')
|
||||||
|
const MockPool = require('./lib/mock/mock-pool')
|
||||||
|
const mockErrors = require('./lib/mock/mock-errors')
|
||||||
|
const ProxyAgent = require('./lib/proxy-agent')
|
||||||
|
const RetryHandler = require('./lib/handler/RetryHandler')
|
||||||
|
const { getGlobalDispatcher, setGlobalDispatcher } = require('./lib/global')
|
||||||
|
const DecoratorHandler = require('./lib/handler/DecoratorHandler')
|
||||||
|
const RedirectHandler = require('./lib/handler/RedirectHandler')
|
||||||
|
const createRedirectInterceptor = require('./lib/interceptor/redirectInterceptor')
|
||||||
|
|
||||||
|
let hasCrypto
|
||||||
|
try {
|
||||||
|
require('crypto')
|
||||||
|
hasCrypto = true
|
||||||
|
} catch {
|
||||||
|
hasCrypto = false
|
||||||
|
}
|
||||||
|
|
||||||
|
Object.assign(Dispatcher.prototype, api)
|
||||||
|
|
||||||
|
module.exports.Dispatcher = Dispatcher
|
||||||
|
module.exports.Client = Client
|
||||||
|
module.exports.Pool = Pool
|
||||||
|
module.exports.BalancedPool = BalancedPool
|
||||||
|
module.exports.Agent = Agent
|
||||||
|
module.exports.ProxyAgent = ProxyAgent
|
||||||
|
module.exports.RetryHandler = RetryHandler
|
||||||
|
|
||||||
|
module.exports.DecoratorHandler = DecoratorHandler
|
||||||
|
module.exports.RedirectHandler = RedirectHandler
|
||||||
|
module.exports.createRedirectInterceptor = createRedirectInterceptor
|
||||||
|
|
||||||
|
module.exports.buildConnector = buildConnector
|
||||||
|
module.exports.errors = errors
|
||||||
|
|
||||||
|
function makeDispatcher (fn) {
|
||||||
|
return (url, opts, handler) => {
|
||||||
|
if (typeof opts === 'function') {
|
||||||
|
handler = opts
|
||||||
|
opts = null
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!url || (typeof url !== 'string' && typeof url !== 'object' && !(url instanceof URL))) {
|
||||||
|
throw new InvalidArgumentError('invalid url')
|
||||||
|
}
|
||||||
|
|
||||||
|
if (opts != null && typeof opts !== 'object') {
|
||||||
|
throw new InvalidArgumentError('invalid opts')
|
||||||
|
}
|
||||||
|
|
||||||
|
if (opts && opts.path != null) {
|
||||||
|
if (typeof opts.path !== 'string') {
|
||||||
|
throw new InvalidArgumentError('invalid opts.path')
|
||||||
|
}
|
||||||
|
|
||||||
|
let path = opts.path
|
||||||
|
if (!opts.path.startsWith('/')) {
|
||||||
|
path = `/${path}`
|
||||||
|
}
|
||||||
|
|
||||||
|
url = new URL(util.parseOrigin(url).origin + path)
|
||||||
|
} else {
|
||||||
|
if (!opts) {
|
||||||
|
opts = typeof url === 'object' ? url : {}
|
||||||
|
}
|
||||||
|
|
||||||
|
url = util.parseURL(url)
|
||||||
|
}
|
||||||
|
|
||||||
|
const { agent, dispatcher = getGlobalDispatcher() } = opts
|
||||||
|
|
||||||
|
if (agent) {
|
||||||
|
throw new InvalidArgumentError('unsupported opts.agent. Did you mean opts.client?')
|
||||||
|
}
|
||||||
|
|
||||||
|
return fn.call(dispatcher, {
|
||||||
|
...opts,
|
||||||
|
origin: url.origin,
|
||||||
|
path: url.search ? `${url.pathname}${url.search}` : url.pathname,
|
||||||
|
method: opts.method || (opts.body ? 'PUT' : 'GET')
|
||||||
|
}, handler)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports.setGlobalDispatcher = setGlobalDispatcher
|
||||||
|
module.exports.getGlobalDispatcher = getGlobalDispatcher
|
||||||
|
|
||||||
|
if (util.nodeMajor > 16 || (util.nodeMajor === 16 && util.nodeMinor >= 8)) {
|
||||||
|
let fetchImpl = null
|
||||||
|
module.exports.fetch = async function fetch (resource) {
|
||||||
|
if (!fetchImpl) {
|
||||||
|
fetchImpl = require('./lib/fetch').fetch
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
return await fetchImpl(...arguments)
|
||||||
|
} catch (err) {
|
||||||
|
if (typeof err === 'object') {
|
||||||
|
Error.captureStackTrace(err, this)
|
||||||
|
}
|
||||||
|
|
||||||
|
throw err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
module.exports.Headers = require('./lib/fetch/headers').Headers
|
||||||
|
module.exports.Response = require('./lib/fetch/response').Response
|
||||||
|
module.exports.Request = require('./lib/fetch/request').Request
|
||||||
|
module.exports.FormData = require('./lib/fetch/formdata').FormData
|
||||||
|
module.exports.File = require('./lib/fetch/file').File
|
||||||
|
module.exports.FileReader = require('./lib/fileapi/filereader').FileReader
|
||||||
|
|
||||||
|
const { setGlobalOrigin, getGlobalOrigin } = require('./lib/fetch/global')
|
||||||
|
|
||||||
|
module.exports.setGlobalOrigin = setGlobalOrigin
|
||||||
|
module.exports.getGlobalOrigin = getGlobalOrigin
|
||||||
|
|
||||||
|
const { CacheStorage } = require('./lib/cache/cachestorage')
|
||||||
|
const { kConstruct } = require('./lib/cache/symbols')
|
||||||
|
|
||||||
|
// Cache & CacheStorage are tightly coupled with fetch. Even if it may run
|
||||||
|
// in an older version of Node, it doesn't have any use without fetch.
|
||||||
|
module.exports.caches = new CacheStorage(kConstruct)
|
||||||
|
}
|
||||||
|
|
||||||
|
if (util.nodeMajor >= 16) {
|
||||||
|
const { deleteCookie, getCookies, getSetCookies, setCookie } = require('./lib/cookies')
|
||||||
|
|
||||||
|
module.exports.deleteCookie = deleteCookie
|
||||||
|
module.exports.getCookies = getCookies
|
||||||
|
module.exports.getSetCookies = getSetCookies
|
||||||
|
module.exports.setCookie = setCookie
|
||||||
|
|
||||||
|
const { parseMIMEType, serializeAMimeType } = require('./lib/fetch/dataURL')
|
||||||
|
|
||||||
|
module.exports.parseMIMEType = parseMIMEType
|
||||||
|
module.exports.serializeAMimeType = serializeAMimeType
|
||||||
|
}
|
||||||
|
|
||||||
|
if (util.nodeMajor >= 18 && hasCrypto) {
|
||||||
|
const { WebSocket } = require('./lib/websocket/websocket')
|
||||||
|
|
||||||
|
module.exports.WebSocket = WebSocket
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports.request = makeDispatcher(api.request)
|
||||||
|
module.exports.stream = makeDispatcher(api.stream)
|
||||||
|
module.exports.pipeline = makeDispatcher(api.pipeline)
|
||||||
|
module.exports.connect = makeDispatcher(api.connect)
|
||||||
|
module.exports.upgrade = makeDispatcher(api.upgrade)
|
||||||
|
|
||||||
|
module.exports.MockClient = MockClient
|
||||||
|
module.exports.MockPool = MockPool
|
||||||
|
module.exports.MockAgent = MockAgent
|
||||||
|
module.exports.mockErrors = mockErrors
|
148
node_modules/undici/lib/agent.js
generated
vendored
Normal file
148
node_modules/undici/lib/agent.js
generated
vendored
Normal file
|
@ -0,0 +1,148 @@
|
||||||
|
'use strict'
|
||||||
|
|
||||||
|
const { InvalidArgumentError } = require('./core/errors')
|
||||||
|
const { kClients, kRunning, kClose, kDestroy, kDispatch, kInterceptors } = require('./core/symbols')
|
||||||
|
const DispatcherBase = require('./dispatcher-base')
|
||||||
|
const Pool = require('./pool')
|
||||||
|
const Client = require('./client')
|
||||||
|
const util = require('./core/util')
|
||||||
|
const createRedirectInterceptor = require('./interceptor/redirectInterceptor')
|
||||||
|
const { WeakRef, FinalizationRegistry } = require('./compat/dispatcher-weakref')()
|
||||||
|
|
||||||
|
const kOnConnect = Symbol('onConnect')
|
||||||
|
const kOnDisconnect = Symbol('onDisconnect')
|
||||||
|
const kOnConnectionError = Symbol('onConnectionError')
|
||||||
|
const kMaxRedirections = Symbol('maxRedirections')
|
||||||
|
const kOnDrain = Symbol('onDrain')
|
||||||
|
const kFactory = Symbol('factory')
|
||||||
|
const kFinalizer = Symbol('finalizer')
|
||||||
|
const kOptions = Symbol('options')
|
||||||
|
|
||||||
|
function defaultFactory (origin, opts) {
|
||||||
|
return opts && opts.connections === 1
|
||||||
|
? new Client(origin, opts)
|
||||||
|
: new Pool(origin, opts)
|
||||||
|
}
|
||||||
|
|
||||||
|
class Agent extends DispatcherBase {
|
||||||
|
constructor ({ factory = defaultFactory, maxRedirections = 0, connect, ...options } = {}) {
|
||||||
|
super()
|
||||||
|
|
||||||
|
if (typeof factory !== 'function') {
|
||||||
|
throw new InvalidArgumentError('factory must be a function.')
|
||||||
|
}
|
||||||
|
|
||||||
|
if (connect != null && typeof connect !== 'function' && typeof connect !== 'object') {
|
||||||
|
throw new InvalidArgumentError('connect must be a function or an object')
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!Number.isInteger(maxRedirections) || maxRedirections < 0) {
|
||||||
|
throw new InvalidArgumentError('maxRedirections must be a positive number')
|
||||||
|
}
|
||||||
|
|
||||||
|
if (connect && typeof connect !== 'function') {
|
||||||
|
connect = { ...connect }
|
||||||
|
}
|
||||||
|
|
||||||
|
this[kInterceptors] = options.interceptors && options.interceptors.Agent && Array.isArray(options.interceptors.Agent)
|
||||||
|
? options.interceptors.Agent
|
||||||
|
: [createRedirectInterceptor({ maxRedirections })]
|
||||||
|
|
||||||
|
this[kOptions] = { ...util.deepClone(options), connect }
|
||||||
|
this[kOptions].interceptors = options.interceptors
|
||||||
|
? { ...options.interceptors }
|
||||||
|
: undefined
|
||||||
|
this[kMaxRedirections] = maxRedirections
|
||||||
|
this[kFactory] = factory
|
||||||
|
this[kClients] = new Map()
|
||||||
|
this[kFinalizer] = new FinalizationRegistry(/* istanbul ignore next: gc is undeterministic */ key => {
|
||||||
|
const ref = this[kClients].get(key)
|
||||||
|
if (ref !== undefined && ref.deref() === undefined) {
|
||||||
|
this[kClients].delete(key)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
const agent = this
|
||||||
|
|
||||||
|
this[kOnDrain] = (origin, targets) => {
|
||||||
|
agent.emit('drain', origin, [agent, ...targets])
|
||||||
|
}
|
||||||
|
|
||||||
|
this[kOnConnect] = (origin, targets) => {
|
||||||
|
agent.emit('connect', origin, [agent, ...targets])
|
||||||
|
}
|
||||||
|
|
||||||
|
this[kOnDisconnect] = (origin, targets, err) => {
|
||||||
|
agent.emit('disconnect', origin, [agent, ...targets], err)
|
||||||
|
}
|
||||||
|
|
||||||
|
this[kOnConnectionError] = (origin, targets, err) => {
|
||||||
|
agent.emit('connectionError', origin, [agent, ...targets], err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
get [kRunning] () {
|
||||||
|
let ret = 0
|
||||||
|
for (const ref of this[kClients].values()) {
|
||||||
|
const client = ref.deref()
|
||||||
|
/* istanbul ignore next: gc is undeterministic */
|
||||||
|
if (client) {
|
||||||
|
ret += client[kRunning]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return ret
|
||||||
|
}
|
||||||
|
|
||||||
|
[kDispatch] (opts, handler) {
|
||||||
|
let key
|
||||||
|
if (opts.origin && (typeof opts.origin === 'string' || opts.origin instanceof URL)) {
|
||||||
|
key = String(opts.origin)
|
||||||
|
} else {
|
||||||
|
throw new InvalidArgumentError('opts.origin must be a non-empty string or URL.')
|
||||||
|
}
|
||||||
|
|
||||||
|
const ref = this[kClients].get(key)
|
||||||
|
|
||||||
|
let dispatcher = ref ? ref.deref() : null
|
||||||
|
if (!dispatcher) {
|
||||||
|
dispatcher = this[kFactory](opts.origin, this[kOptions])
|
||||||
|
.on('drain', this[kOnDrain])
|
||||||
|
.on('connect', this[kOnConnect])
|
||||||
|
.on('disconnect', this[kOnDisconnect])
|
||||||
|
.on('connectionError', this[kOnConnectionError])
|
||||||
|
|
||||||
|
this[kClients].set(key, new WeakRef(dispatcher))
|
||||||
|
this[kFinalizer].register(dispatcher, key)
|
||||||
|
}
|
||||||
|
|
||||||
|
return dispatcher.dispatch(opts, handler)
|
||||||
|
}
|
||||||
|
|
||||||
|
async [kClose] () {
|
||||||
|
const closePromises = []
|
||||||
|
for (const ref of this[kClients].values()) {
|
||||||
|
const client = ref.deref()
|
||||||
|
/* istanbul ignore else: gc is undeterministic */
|
||||||
|
if (client) {
|
||||||
|
closePromises.push(client.close())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
await Promise.all(closePromises)
|
||||||
|
}
|
||||||
|
|
||||||
|
async [kDestroy] (err) {
|
||||||
|
const destroyPromises = []
|
||||||
|
for (const ref of this[kClients].values()) {
|
||||||
|
const client = ref.deref()
|
||||||
|
/* istanbul ignore else: gc is undeterministic */
|
||||||
|
if (client) {
|
||||||
|
destroyPromises.push(client.destroy(err))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
await Promise.all(destroyPromises)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = Agent
|
54
node_modules/undici/lib/api/abort-signal.js
generated
vendored
Normal file
54
node_modules/undici/lib/api/abort-signal.js
generated
vendored
Normal file
|
@ -0,0 +1,54 @@
|
||||||
|
const { addAbortListener } = require('../core/util')
|
||||||
|
const { RequestAbortedError } = require('../core/errors')
|
||||||
|
|
||||||
|
const kListener = Symbol('kListener')
|
||||||
|
const kSignal = Symbol('kSignal')
|
||||||
|
|
||||||
|
function abort (self) {
|
||||||
|
if (self.abort) {
|
||||||
|
self.abort()
|
||||||
|
} else {
|
||||||
|
self.onError(new RequestAbortedError())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function addSignal (self, signal) {
|
||||||
|
self[kSignal] = null
|
||||||
|
self[kListener] = null
|
||||||
|
|
||||||
|
if (!signal) {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
if (signal.aborted) {
|
||||||
|
abort(self)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
self[kSignal] = signal
|
||||||
|
self[kListener] = () => {
|
||||||
|
abort(self)
|
||||||
|
}
|
||||||
|
|
||||||
|
addAbortListener(self[kSignal], self[kListener])
|
||||||
|
}
|
||||||
|
|
||||||
|
function removeSignal (self) {
|
||||||
|
if (!self[kSignal]) {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
if ('removeEventListener' in self[kSignal]) {
|
||||||
|
self[kSignal].removeEventListener('abort', self[kListener])
|
||||||
|
} else {
|
||||||
|
self[kSignal].removeListener('abort', self[kListener])
|
||||||
|
}
|
||||||
|
|
||||||
|
self[kSignal] = null
|
||||||
|
self[kListener] = null
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = {
|
||||||
|
addSignal,
|
||||||
|
removeSignal
|
||||||
|
}
|
104
node_modules/undici/lib/api/api-connect.js
generated
vendored
Normal file
104
node_modules/undici/lib/api/api-connect.js
generated
vendored
Normal file
|
@ -0,0 +1,104 @@
|
||||||
|
'use strict'
|
||||||
|
|
||||||
|
const { AsyncResource } = require('async_hooks')
|
||||||
|
const { InvalidArgumentError, RequestAbortedError, SocketError } = require('../core/errors')
|
||||||
|
const util = require('../core/util')
|
||||||
|
const { addSignal, removeSignal } = require('./abort-signal')
|
||||||
|
|
||||||
|
class ConnectHandler extends AsyncResource {
|
||||||
|
constructor (opts, callback) {
|
||||||
|
if (!opts || typeof opts !== 'object') {
|
||||||
|
throw new InvalidArgumentError('invalid opts')
|
||||||
|
}
|
||||||
|
|
||||||
|
if (typeof callback !== 'function') {
|
||||||
|
throw new InvalidArgumentError('invalid callback')
|
||||||
|
}
|
||||||
|
|
||||||
|
const { signal, opaque, responseHeaders } = opts
|
||||||
|
|
||||||
|
if (signal && typeof signal.on !== 'function' && typeof signal.addEventListener !== 'function') {
|
||||||
|
throw new InvalidArgumentError('signal must be an EventEmitter or EventTarget')
|
||||||
|
}
|
||||||
|
|
||||||
|
super('UNDICI_CONNECT')
|
||||||
|
|
||||||
|
this.opaque = opaque || null
|
||||||
|
this.responseHeaders = responseHeaders || null
|
||||||
|
this.callback = callback
|
||||||
|
this.abort = null
|
||||||
|
|
||||||
|
addSignal(this, signal)
|
||||||
|
}
|
||||||
|
|
||||||
|
onConnect (abort, context) {
|
||||||
|
if (!this.callback) {
|
||||||
|
throw new RequestAbortedError()
|
||||||
|
}
|
||||||
|
|
||||||
|
this.abort = abort
|
||||||
|
this.context = context
|
||||||
|
}
|
||||||
|
|
||||||
|
onHeaders () {
|
||||||
|
throw new SocketError('bad connect', null)
|
||||||
|
}
|
||||||
|
|
||||||
|
onUpgrade (statusCode, rawHeaders, socket) {
|
||||||
|
const { callback, opaque, context } = this
|
||||||
|
|
||||||
|
removeSignal(this)
|
||||||
|
|
||||||
|
this.callback = null
|
||||||
|
|
||||||
|
let headers = rawHeaders
|
||||||
|
// Indicates is an HTTP2Session
|
||||||
|
if (headers != null) {
|
||||||
|
headers = this.responseHeaders === 'raw' ? util.parseRawHeaders(rawHeaders) : util.parseHeaders(rawHeaders)
|
||||||
|
}
|
||||||
|
|
||||||
|
this.runInAsyncScope(callback, null, null, {
|
||||||
|
statusCode,
|
||||||
|
headers,
|
||||||
|
socket,
|
||||||
|
opaque,
|
||||||
|
context
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
onError (err) {
|
||||||
|
const { callback, opaque } = this
|
||||||
|
|
||||||
|
removeSignal(this)
|
||||||
|
|
||||||
|
if (callback) {
|
||||||
|
this.callback = null
|
||||||
|
queueMicrotask(() => {
|
||||||
|
this.runInAsyncScope(callback, null, err, { opaque })
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function connect (opts, callback) {
|
||||||
|
if (callback === undefined) {
|
||||||
|
return new Promise((resolve, reject) => {
|
||||||
|
connect.call(this, opts, (err, data) => {
|
||||||
|
return err ? reject(err) : resolve(data)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
const connectHandler = new ConnectHandler(opts, callback)
|
||||||
|
this.dispatch({ ...opts, method: 'CONNECT' }, connectHandler)
|
||||||
|
} catch (err) {
|
||||||
|
if (typeof callback !== 'function') {
|
||||||
|
throw err
|
||||||
|
}
|
||||||
|
const opaque = opts && opts.opaque
|
||||||
|
queueMicrotask(() => callback(err, { opaque }))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = connect
|
249
node_modules/undici/lib/api/api-pipeline.js
generated
vendored
Normal file
249
node_modules/undici/lib/api/api-pipeline.js
generated
vendored
Normal file
|
@ -0,0 +1,249 @@
|
||||||
|
'use strict'
|
||||||
|
|
||||||
|
const {
|
||||||
|
Readable,
|
||||||
|
Duplex,
|
||||||
|
PassThrough
|
||||||
|
} = require('stream')
|
||||||
|
const {
|
||||||
|
InvalidArgumentError,
|
||||||
|
InvalidReturnValueError,
|
||||||
|
RequestAbortedError
|
||||||
|
} = require('../core/errors')
|
||||||
|
const util = require('../core/util')
|
||||||
|
const { AsyncResource } = require('async_hooks')
|
||||||
|
const { addSignal, removeSignal } = require('./abort-signal')
|
||||||
|
const assert = require('assert')
|
||||||
|
|
||||||
|
const kResume = Symbol('resume')
|
||||||
|
|
||||||
|
class PipelineRequest extends Readable {
|
||||||
|
constructor () {
|
||||||
|
super({ autoDestroy: true })
|
||||||
|
|
||||||
|
this[kResume] = null
|
||||||
|
}
|
||||||
|
|
||||||
|
_read () {
|
||||||
|
const { [kResume]: resume } = this
|
||||||
|
|
||||||
|
if (resume) {
|
||||||
|
this[kResume] = null
|
||||||
|
resume()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
_destroy (err, callback) {
|
||||||
|
this._read()
|
||||||
|
|
||||||
|
callback(err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
class PipelineResponse extends Readable {
|
||||||
|
constructor (resume) {
|
||||||
|
super({ autoDestroy: true })
|
||||||
|
this[kResume] = resume
|
||||||
|
}
|
||||||
|
|
||||||
|
_read () {
|
||||||
|
this[kResume]()
|
||||||
|
}
|
||||||
|
|
||||||
|
_destroy (err, callback) {
|
||||||
|
if (!err && !this._readableState.endEmitted) {
|
||||||
|
err = new RequestAbortedError()
|
||||||
|
}
|
||||||
|
|
||||||
|
callback(err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
class PipelineHandler extends AsyncResource {
|
||||||
|
constructor (opts, handler) {
|
||||||
|
if (!opts || typeof opts !== 'object') {
|
||||||
|
throw new InvalidArgumentError('invalid opts')
|
||||||
|
}
|
||||||
|
|
||||||
|
if (typeof handler !== 'function') {
|
||||||
|
throw new InvalidArgumentError('invalid handler')
|
||||||
|
}
|
||||||
|
|
||||||
|
const { signal, method, opaque, onInfo, responseHeaders } = opts
|
||||||
|
|
||||||
|
if (signal && typeof signal.on !== 'function' && typeof signal.addEventListener !== 'function') {
|
||||||
|
throw new InvalidArgumentError('signal must be an EventEmitter or EventTarget')
|
||||||
|
}
|
||||||
|
|
||||||
|
if (method === 'CONNECT') {
|
||||||
|
throw new InvalidArgumentError('invalid method')
|
||||||
|
}
|
||||||
|
|
||||||
|
if (onInfo && typeof onInfo !== 'function') {
|
||||||
|
throw new InvalidArgumentError('invalid onInfo callback')
|
||||||
|
}
|
||||||
|
|
||||||
|
super('UNDICI_PIPELINE')
|
||||||
|
|
||||||
|
this.opaque = opaque || null
|
||||||
|
this.responseHeaders = responseHeaders || null
|
||||||
|
this.handler = handler
|
||||||
|
this.abort = null
|
||||||
|
this.context = null
|
||||||
|
this.onInfo = onInfo || null
|
||||||
|
|
||||||
|
this.req = new PipelineRequest().on('error', util.nop)
|
||||||
|
|
||||||
|
this.ret = new Duplex({
|
||||||
|
readableObjectMode: opts.objectMode,
|
||||||
|
autoDestroy: true,
|
||||||
|
read: () => {
|
||||||
|
const { body } = this
|
||||||
|
|
||||||
|
if (body && body.resume) {
|
||||||
|
body.resume()
|
||||||
|
}
|
||||||
|
},
|
||||||
|
write: (chunk, encoding, callback) => {
|
||||||
|
const { req } = this
|
||||||
|
|
||||||
|
if (req.push(chunk, encoding) || req._readableState.destroyed) {
|
||||||
|
callback()
|
||||||
|
} else {
|
||||||
|
req[kResume] = callback
|
||||||
|
}
|
||||||
|
},
|
||||||
|
destroy: (err, callback) => {
|
||||||
|
const { body, req, res, ret, abort } = this
|
||||||
|
|
||||||
|
if (!err && !ret._readableState.endEmitted) {
|
||||||
|
err = new RequestAbortedError()
|
||||||
|
}
|
||||||
|
|
||||||
|
if (abort && err) {
|
||||||
|
abort()
|
||||||
|
}
|
||||||
|
|
||||||
|
util.destroy(body, err)
|
||||||
|
util.destroy(req, err)
|
||||||
|
util.destroy(res, err)
|
||||||
|
|
||||||
|
removeSignal(this)
|
||||||
|
|
||||||
|
callback(err)
|
||||||
|
}
|
||||||
|
}).on('prefinish', () => {
|
||||||
|
const { req } = this
|
||||||
|
|
||||||
|
// Node < 15 does not call _final in same tick.
|
||||||
|
req.push(null)
|
||||||
|
})
|
||||||
|
|
||||||
|
this.res = null
|
||||||
|
|
||||||
|
addSignal(this, signal)
|
||||||
|
}
|
||||||
|
|
||||||
|
onConnect (abort, context) {
|
||||||
|
const { ret, res } = this
|
||||||
|
|
||||||
|
assert(!res, 'pipeline cannot be retried')
|
||||||
|
|
||||||
|
if (ret.destroyed) {
|
||||||
|
throw new RequestAbortedError()
|
||||||
|
}
|
||||||
|
|
||||||
|
this.abort = abort
|
||||||
|
this.context = context
|
||||||
|
}
|
||||||
|
|
||||||
|
onHeaders (statusCode, rawHeaders, resume) {
|
||||||
|
const { opaque, handler, context } = this
|
||||||
|
|
||||||
|
if (statusCode < 200) {
|
||||||
|
if (this.onInfo) {
|
||||||
|
const headers = this.responseHeaders === 'raw' ? util.parseRawHeaders(rawHeaders) : util.parseHeaders(rawHeaders)
|
||||||
|
this.onInfo({ statusCode, headers })
|
||||||
|
}
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
this.res = new PipelineResponse(resume)
|
||||||
|
|
||||||
|
let body
|
||||||
|
try {
|
||||||
|
this.handler = null
|
||||||
|
const headers = this.responseHeaders === 'raw' ? util.parseRawHeaders(rawHeaders) : util.parseHeaders(rawHeaders)
|
||||||
|
body = this.runInAsyncScope(handler, null, {
|
||||||
|
statusCode,
|
||||||
|
headers,
|
||||||
|
opaque,
|
||||||
|
body: this.res,
|
||||||
|
context
|
||||||
|
})
|
||||||
|
} catch (err) {
|
||||||
|
this.res.on('error', util.nop)
|
||||||
|
throw err
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!body || typeof body.on !== 'function') {
|
||||||
|
throw new InvalidReturnValueError('expected Readable')
|
||||||
|
}
|
||||||
|
|
||||||
|
body
|
||||||
|
.on('data', (chunk) => {
|
||||||
|
const { ret, body } = this
|
||||||
|
|
||||||
|
if (!ret.push(chunk) && body.pause) {
|
||||||
|
body.pause()
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.on('error', (err) => {
|
||||||
|
const { ret } = this
|
||||||
|
|
||||||
|
util.destroy(ret, err)
|
||||||
|
})
|
||||||
|
.on('end', () => {
|
||||||
|
const { ret } = this
|
||||||
|
|
||||||
|
ret.push(null)
|
||||||
|
})
|
||||||
|
.on('close', () => {
|
||||||
|
const { ret } = this
|
||||||
|
|
||||||
|
if (!ret._readableState.ended) {
|
||||||
|
util.destroy(ret, new RequestAbortedError())
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
this.body = body
|
||||||
|
}
|
||||||
|
|
||||||
|
onData (chunk) {
|
||||||
|
const { res } = this
|
||||||
|
return res.push(chunk)
|
||||||
|
}
|
||||||
|
|
||||||
|
onComplete (trailers) {
|
||||||
|
const { res } = this
|
||||||
|
res.push(null)
|
||||||
|
}
|
||||||
|
|
||||||
|
onError (err) {
|
||||||
|
const { ret } = this
|
||||||
|
this.handler = null
|
||||||
|
util.destroy(ret, err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function pipeline (opts, handler) {
|
||||||
|
try {
|
||||||
|
const pipelineHandler = new PipelineHandler(opts, handler)
|
||||||
|
this.dispatch({ ...opts, body: pipelineHandler.req }, pipelineHandler)
|
||||||
|
return pipelineHandler.ret
|
||||||
|
} catch (err) {
|
||||||
|
return new PassThrough().destroy(err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = pipeline
|
180
node_modules/undici/lib/api/api-request.js
generated
vendored
Normal file
180
node_modules/undici/lib/api/api-request.js
generated
vendored
Normal file
|
@ -0,0 +1,180 @@
|
||||||
|
'use strict'
|
||||||
|
|
||||||
|
const Readable = require('./readable')
|
||||||
|
const {
|
||||||
|
InvalidArgumentError,
|
||||||
|
RequestAbortedError
|
||||||
|
} = require('../core/errors')
|
||||||
|
const util = require('../core/util')
|
||||||
|
const { getResolveErrorBodyCallback } = require('./util')
|
||||||
|
const { AsyncResource } = require('async_hooks')
|
||||||
|
const { addSignal, removeSignal } = require('./abort-signal')
|
||||||
|
|
||||||
|
class RequestHandler extends AsyncResource {
|
||||||
|
constructor (opts, callback) {
|
||||||
|
if (!opts || typeof opts !== 'object') {
|
||||||
|
throw new InvalidArgumentError('invalid opts')
|
||||||
|
}
|
||||||
|
|
||||||
|
const { signal, method, opaque, body, onInfo, responseHeaders, throwOnError, highWaterMark } = opts
|
||||||
|
|
||||||
|
try {
|
||||||
|
if (typeof callback !== 'function') {
|
||||||
|
throw new InvalidArgumentError('invalid callback')
|
||||||
|
}
|
||||||
|
|
||||||
|
if (highWaterMark && (typeof highWaterMark !== 'number' || highWaterMark < 0)) {
|
||||||
|
throw new InvalidArgumentError('invalid highWaterMark')
|
||||||
|
}
|
||||||
|
|
||||||
|
if (signal && typeof signal.on !== 'function' && typeof signal.addEventListener !== 'function') {
|
||||||
|
throw new InvalidArgumentError('signal must be an EventEmitter or EventTarget')
|
||||||
|
}
|
||||||
|
|
||||||
|
if (method === 'CONNECT') {
|
||||||
|
throw new InvalidArgumentError('invalid method')
|
||||||
|
}
|
||||||
|
|
||||||
|
if (onInfo && typeof onInfo !== 'function') {
|
||||||
|
throw new InvalidArgumentError('invalid onInfo callback')
|
||||||
|
}
|
||||||
|
|
||||||
|
super('UNDICI_REQUEST')
|
||||||
|
} catch (err) {
|
||||||
|
if (util.isStream(body)) {
|
||||||
|
util.destroy(body.on('error', util.nop), err)
|
||||||
|
}
|
||||||
|
throw err
|
||||||
|
}
|
||||||
|
|
||||||
|
this.responseHeaders = responseHeaders || null
|
||||||
|
this.opaque = opaque || null
|
||||||
|
this.callback = callback
|
||||||
|
this.res = null
|
||||||
|
this.abort = null
|
||||||
|
this.body = body
|
||||||
|
this.trailers = {}
|
||||||
|
this.context = null
|
||||||
|
this.onInfo = onInfo || null
|
||||||
|
this.throwOnError = throwOnError
|
||||||
|
this.highWaterMark = highWaterMark
|
||||||
|
|
||||||
|
if (util.isStream(body)) {
|
||||||
|
body.on('error', (err) => {
|
||||||
|
this.onError(err)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
addSignal(this, signal)
|
||||||
|
}
|
||||||
|
|
||||||
|
onConnect (abort, context) {
|
||||||
|
if (!this.callback) {
|
||||||
|
throw new RequestAbortedError()
|
||||||
|
}
|
||||||
|
|
||||||
|
this.abort = abort
|
||||||
|
this.context = context
|
||||||
|
}
|
||||||
|
|
||||||
|
onHeaders (statusCode, rawHeaders, resume, statusMessage) {
|
||||||
|
const { callback, opaque, abort, context, responseHeaders, highWaterMark } = this
|
||||||
|
|
||||||
|
const headers = responseHeaders === 'raw' ? util.parseRawHeaders(rawHeaders) : util.parseHeaders(rawHeaders)
|
||||||
|
|
||||||
|
if (statusCode < 200) {
|
||||||
|
if (this.onInfo) {
|
||||||
|
this.onInfo({ statusCode, headers })
|
||||||
|
}
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
const parsedHeaders = responseHeaders === 'raw' ? util.parseHeaders(rawHeaders) : headers
|
||||||
|
const contentType = parsedHeaders['content-type']
|
||||||
|
const body = new Readable({ resume, abort, contentType, highWaterMark })
|
||||||
|
|
||||||
|
this.callback = null
|
||||||
|
this.res = body
|
||||||
|
if (callback !== null) {
|
||||||
|
if (this.throwOnError && statusCode >= 400) {
|
||||||
|
this.runInAsyncScope(getResolveErrorBodyCallback, null,
|
||||||
|
{ callback, body, contentType, statusCode, statusMessage, headers }
|
||||||
|
)
|
||||||
|
} else {
|
||||||
|
this.runInAsyncScope(callback, null, null, {
|
||||||
|
statusCode,
|
||||||
|
headers,
|
||||||
|
trailers: this.trailers,
|
||||||
|
opaque,
|
||||||
|
body,
|
||||||
|
context
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
onData (chunk) {
|
||||||
|
const { res } = this
|
||||||
|
return res.push(chunk)
|
||||||
|
}
|
||||||
|
|
||||||
|
onComplete (trailers) {
|
||||||
|
const { res } = this
|
||||||
|
|
||||||
|
removeSignal(this)
|
||||||
|
|
||||||
|
util.parseHeaders(trailers, this.trailers)
|
||||||
|
|
||||||
|
res.push(null)
|
||||||
|
}
|
||||||
|
|
||||||
|
onError (err) {
|
||||||
|
const { res, callback, body, opaque } = this
|
||||||
|
|
||||||
|
removeSignal(this)
|
||||||
|
|
||||||
|
if (callback) {
|
||||||
|
// TODO: Does this need queueMicrotask?
|
||||||
|
this.callback = null
|
||||||
|
queueMicrotask(() => {
|
||||||
|
this.runInAsyncScope(callback, null, err, { opaque })
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
if (res) {
|
||||||
|
this.res = null
|
||||||
|
// Ensure all queued handlers are invoked before destroying res.
|
||||||
|
queueMicrotask(() => {
|
||||||
|
util.destroy(res, err)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
if (body) {
|
||||||
|
this.body = null
|
||||||
|
util.destroy(body, err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function request (opts, callback) {
|
||||||
|
if (callback === undefined) {
|
||||||
|
return new Promise((resolve, reject) => {
|
||||||
|
request.call(this, opts, (err, data) => {
|
||||||
|
return err ? reject(err) : resolve(data)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
this.dispatch(opts, new RequestHandler(opts, callback))
|
||||||
|
} catch (err) {
|
||||||
|
if (typeof callback !== 'function') {
|
||||||
|
throw err
|
||||||
|
}
|
||||||
|
const opaque = opts && opts.opaque
|
||||||
|
queueMicrotask(() => callback(err, { opaque }))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = request
|
||||||
|
module.exports.RequestHandler = RequestHandler
|
220
node_modules/undici/lib/api/api-stream.js
generated
vendored
Normal file
220
node_modules/undici/lib/api/api-stream.js
generated
vendored
Normal file
|
@ -0,0 +1,220 @@
|
||||||
|
'use strict'
|
||||||
|
|
||||||
|
const { finished, PassThrough } = require('stream')
|
||||||
|
const {
|
||||||
|
InvalidArgumentError,
|
||||||
|
InvalidReturnValueError,
|
||||||
|
RequestAbortedError
|
||||||
|
} = require('../core/errors')
|
||||||
|
const util = require('../core/util')
|
||||||
|
const { getResolveErrorBodyCallback } = require('./util')
|
||||||
|
const { AsyncResource } = require('async_hooks')
|
||||||
|
const { addSignal, removeSignal } = require('./abort-signal')
|
||||||
|
|
||||||
|
class StreamHandler extends AsyncResource {
|
||||||
|
constructor (opts, factory, callback) {
|
||||||
|
if (!opts || typeof opts !== 'object') {
|
||||||
|
throw new InvalidArgumentError('invalid opts')
|
||||||
|
}
|
||||||
|
|
||||||
|
const { signal, method, opaque, body, onInfo, responseHeaders, throwOnError } = opts
|
||||||
|
|
||||||
|
try {
|
||||||
|
if (typeof callback !== 'function') {
|
||||||
|
throw new InvalidArgumentError('invalid callback')
|
||||||
|
}
|
||||||
|
|
||||||
|
if (typeof factory !== 'function') {
|
||||||
|
throw new InvalidArgumentError('invalid factory')
|
||||||
|
}
|
||||||
|
|
||||||
|
if (signal && typeof signal.on !== 'function' && typeof signal.addEventListener !== 'function') {
|
||||||
|
throw new InvalidArgumentError('signal must be an EventEmitter or EventTarget')
|
||||||
|
}
|
||||||
|
|
||||||
|
if (method === 'CONNECT') {
|
||||||
|
throw new InvalidArgumentError('invalid method')
|
||||||
|
}
|
||||||
|
|
||||||
|
if (onInfo && typeof onInfo !== 'function') {
|
||||||
|
throw new InvalidArgumentError('invalid onInfo callback')
|
||||||
|
}
|
||||||
|
|
||||||
|
super('UNDICI_STREAM')
|
||||||
|
} catch (err) {
|
||||||
|
if (util.isStream(body)) {
|
||||||
|
util.destroy(body.on('error', util.nop), err)
|
||||||
|
}
|
||||||
|
throw err
|
||||||
|
}
|
||||||
|
|
||||||
|
this.responseHeaders = responseHeaders || null
|
||||||
|
this.opaque = opaque || null
|
||||||
|
this.factory = factory
|
||||||
|
this.callback = callback
|
||||||
|
this.res = null
|
||||||
|
this.abort = null
|
||||||
|
this.context = null
|
||||||
|
this.trailers = null
|
||||||
|
this.body = body
|
||||||
|
this.onInfo = onInfo || null
|
||||||
|
this.throwOnError = throwOnError || false
|
||||||
|
|
||||||
|
if (util.isStream(body)) {
|
||||||
|
body.on('error', (err) => {
|
||||||
|
this.onError(err)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
addSignal(this, signal)
|
||||||
|
}
|
||||||
|
|
||||||
|
onConnect (abort, context) {
|
||||||
|
if (!this.callback) {
|
||||||
|
throw new RequestAbortedError()
|
||||||
|
}
|
||||||
|
|
||||||
|
this.abort = abort
|
||||||
|
this.context = context
|
||||||
|
}
|
||||||
|
|
||||||
|
onHeaders (statusCode, rawHeaders, resume, statusMessage) {
|
||||||
|
const { factory, opaque, context, callback, responseHeaders } = this
|
||||||
|
|
||||||
|
const headers = responseHeaders === 'raw' ? util.parseRawHeaders(rawHeaders) : util.parseHeaders(rawHeaders)
|
||||||
|
|
||||||
|
if (statusCode < 200) {
|
||||||
|
if (this.onInfo) {
|
||||||
|
this.onInfo({ statusCode, headers })
|
||||||
|
}
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
this.factory = null
|
||||||
|
|
||||||
|
let res
|
||||||
|
|
||||||
|
if (this.throwOnError && statusCode >= 400) {
|
||||||
|
const parsedHeaders = responseHeaders === 'raw' ? util.parseHeaders(rawHeaders) : headers
|
||||||
|
const contentType = parsedHeaders['content-type']
|
||||||
|
res = new PassThrough()
|
||||||
|
|
||||||
|
this.callback = null
|
||||||
|
this.runInAsyncScope(getResolveErrorBodyCallback, null,
|
||||||
|
{ callback, body: res, contentType, statusCode, statusMessage, headers }
|
||||||
|
)
|
||||||
|
} else {
|
||||||
|
if (factory === null) {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
res = this.runInAsyncScope(factory, null, {
|
||||||
|
statusCode,
|
||||||
|
headers,
|
||||||
|
opaque,
|
||||||
|
context
|
||||||
|
})
|
||||||
|
|
||||||
|
if (
|
||||||
|
!res ||
|
||||||
|
typeof res.write !== 'function' ||
|
||||||
|
typeof res.end !== 'function' ||
|
||||||
|
typeof res.on !== 'function'
|
||||||
|
) {
|
||||||
|
throw new InvalidReturnValueError('expected Writable')
|
||||||
|
}
|
||||||
|
|
||||||
|
// TODO: Avoid finished. It registers an unnecessary amount of listeners.
|
||||||
|
finished(res, { readable: false }, (err) => {
|
||||||
|
const { callback, res, opaque, trailers, abort } = this
|
||||||
|
|
||||||
|
this.res = null
|
||||||
|
if (err || !res.readable) {
|
||||||
|
util.destroy(res, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
this.callback = null
|
||||||
|
this.runInAsyncScope(callback, null, err || null, { opaque, trailers })
|
||||||
|
|
||||||
|
if (err) {
|
||||||
|
abort()
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
res.on('drain', resume)
|
||||||
|
|
||||||
|
this.res = res
|
||||||
|
|
||||||
|
const needDrain = res.writableNeedDrain !== undefined
|
||||||
|
? res.writableNeedDrain
|
||||||
|
: res._writableState && res._writableState.needDrain
|
||||||
|
|
||||||
|
return needDrain !== true
|
||||||
|
}
|
||||||
|
|
||||||
|
onData (chunk) {
|
||||||
|
const { res } = this
|
||||||
|
|
||||||
|
return res ? res.write(chunk) : true
|
||||||
|
}
|
||||||
|
|
||||||
|
onComplete (trailers) {
|
||||||
|
const { res } = this
|
||||||
|
|
||||||
|
removeSignal(this)
|
||||||
|
|
||||||
|
if (!res) {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
this.trailers = util.parseHeaders(trailers)
|
||||||
|
|
||||||
|
res.end()
|
||||||
|
}
|
||||||
|
|
||||||
|
onError (err) {
|
||||||
|
const { res, callback, opaque, body } = this
|
||||||
|
|
||||||
|
removeSignal(this)
|
||||||
|
|
||||||
|
this.factory = null
|
||||||
|
|
||||||
|
if (res) {
|
||||||
|
this.res = null
|
||||||
|
util.destroy(res, err)
|
||||||
|
} else if (callback) {
|
||||||
|
this.callback = null
|
||||||
|
queueMicrotask(() => {
|
||||||
|
this.runInAsyncScope(callback, null, err, { opaque })
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
if (body) {
|
||||||
|
this.body = null
|
||||||
|
util.destroy(body, err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function stream (opts, factory, callback) {
|
||||||
|
if (callback === undefined) {
|
||||||
|
return new Promise((resolve, reject) => {
|
||||||
|
stream.call(this, opts, factory, (err, data) => {
|
||||||
|
return err ? reject(err) : resolve(data)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
this.dispatch(opts, new StreamHandler(opts, factory, callback))
|
||||||
|
} catch (err) {
|
||||||
|
if (typeof callback !== 'function') {
|
||||||
|
throw err
|
||||||
|
}
|
||||||
|
const opaque = opts && opts.opaque
|
||||||
|
queueMicrotask(() => callback(err, { opaque }))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = stream
|
105
node_modules/undici/lib/api/api-upgrade.js
generated
vendored
Normal file
105
node_modules/undici/lib/api/api-upgrade.js
generated
vendored
Normal file
|
@ -0,0 +1,105 @@
|
||||||
|
'use strict'
|
||||||
|
|
||||||
|
const { InvalidArgumentError, RequestAbortedError, SocketError } = require('../core/errors')
|
||||||
|
const { AsyncResource } = require('async_hooks')
|
||||||
|
const util = require('../core/util')
|
||||||
|
const { addSignal, removeSignal } = require('./abort-signal')
|
||||||
|
const assert = require('assert')
|
||||||
|
|
||||||
|
class UpgradeHandler extends AsyncResource {
|
||||||
|
constructor (opts, callback) {
|
||||||
|
if (!opts || typeof opts !== 'object') {
|
||||||
|
throw new InvalidArgumentError('invalid opts')
|
||||||
|
}
|
||||||
|
|
||||||
|
if (typeof callback !== 'function') {
|
||||||
|
throw new InvalidArgumentError('invalid callback')
|
||||||
|
}
|
||||||
|
|
||||||
|
const { signal, opaque, responseHeaders } = opts
|
||||||
|
|
||||||
|
if (signal && typeof signal.on !== 'function' && typeof signal.addEventListener !== 'function') {
|
||||||
|
throw new InvalidArgumentError('signal must be an EventEmitter or EventTarget')
|
||||||
|
}
|
||||||
|
|
||||||
|
super('UNDICI_UPGRADE')
|
||||||
|
|
||||||
|
this.responseHeaders = responseHeaders || null
|
||||||
|
this.opaque = opaque || null
|
||||||
|
this.callback = callback
|
||||||
|
this.abort = null
|
||||||
|
this.context = null
|
||||||
|
|
||||||
|
addSignal(this, signal)
|
||||||
|
}
|
||||||
|
|
||||||
|
onConnect (abort, context) {
|
||||||
|
if (!this.callback) {
|
||||||
|
throw new RequestAbortedError()
|
||||||
|
}
|
||||||
|
|
||||||
|
this.abort = abort
|
||||||
|
this.context = null
|
||||||
|
}
|
||||||
|
|
||||||
|
onHeaders () {
|
||||||
|
throw new SocketError('bad upgrade', null)
|
||||||
|
}
|
||||||
|
|
||||||
|
onUpgrade (statusCode, rawHeaders, socket) {
|
||||||
|
const { callback, opaque, context } = this
|
||||||
|
|
||||||
|
assert.strictEqual(statusCode, 101)
|
||||||
|
|
||||||
|
removeSignal(this)
|
||||||
|
|
||||||
|
this.callback = null
|
||||||
|
const headers = this.responseHeaders === 'raw' ? util.parseRawHeaders(rawHeaders) : util.parseHeaders(rawHeaders)
|
||||||
|
this.runInAsyncScope(callback, null, null, {
|
||||||
|
headers,
|
||||||
|
socket,
|
||||||
|
opaque,
|
||||||
|
context
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
onError (err) {
|
||||||
|
const { callback, opaque } = this
|
||||||
|
|
||||||
|
removeSignal(this)
|
||||||
|
|
||||||
|
if (callback) {
|
||||||
|
this.callback = null
|
||||||
|
queueMicrotask(() => {
|
||||||
|
this.runInAsyncScope(callback, null, err, { opaque })
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function upgrade (opts, callback) {
|
||||||
|
if (callback === undefined) {
|
||||||
|
return new Promise((resolve, reject) => {
|
||||||
|
upgrade.call(this, opts, (err, data) => {
|
||||||
|
return err ? reject(err) : resolve(data)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
const upgradeHandler = new UpgradeHandler(opts, callback)
|
||||||
|
this.dispatch({
|
||||||
|
...opts,
|
||||||
|
method: opts.method || 'GET',
|
||||||
|
upgrade: opts.protocol || 'Websocket'
|
||||||
|
}, upgradeHandler)
|
||||||
|
} catch (err) {
|
||||||
|
if (typeof callback !== 'function') {
|
||||||
|
throw err
|
||||||
|
}
|
||||||
|
const opaque = opts && opts.opaque
|
||||||
|
queueMicrotask(() => callback(err, { opaque }))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = upgrade
|
7
node_modules/undici/lib/api/index.js
generated
vendored
Normal file
7
node_modules/undici/lib/api/index.js
generated
vendored
Normal file
|
@ -0,0 +1,7 @@
|
||||||
|
'use strict'
|
||||||
|
|
||||||
|
module.exports.request = require('./api-request')
|
||||||
|
module.exports.stream = require('./api-stream')
|
||||||
|
module.exports.pipeline = require('./api-pipeline')
|
||||||
|
module.exports.upgrade = require('./api-upgrade')
|
||||||
|
module.exports.connect = require('./api-connect')
|
322
node_modules/undici/lib/api/readable.js
generated
vendored
Normal file
322
node_modules/undici/lib/api/readable.js
generated
vendored
Normal file
|
@ -0,0 +1,322 @@
|
||||||
|
// Ported from https://github.com/nodejs/undici/pull/907
|
||||||
|
|
||||||
|
'use strict'
|
||||||
|
|
||||||
|
const assert = require('assert')
|
||||||
|
const { Readable } = require('stream')
|
||||||
|
const { RequestAbortedError, NotSupportedError, InvalidArgumentError } = require('../core/errors')
|
||||||
|
const util = require('../core/util')
|
||||||
|
const { ReadableStreamFrom, toUSVString } = require('../core/util')
|
||||||
|
|
||||||
|
let Blob
|
||||||
|
|
||||||
|
const kConsume = Symbol('kConsume')
|
||||||
|
const kReading = Symbol('kReading')
|
||||||
|
const kBody = Symbol('kBody')
|
||||||
|
const kAbort = Symbol('abort')
|
||||||
|
const kContentType = Symbol('kContentType')
|
||||||
|
|
||||||
|
const noop = () => {}
|
||||||
|
|
||||||
|
module.exports = class BodyReadable extends Readable {
|
||||||
|
constructor ({
|
||||||
|
resume,
|
||||||
|
abort,
|
||||||
|
contentType = '',
|
||||||
|
highWaterMark = 64 * 1024 // Same as nodejs fs streams.
|
||||||
|
}) {
|
||||||
|
super({
|
||||||
|
autoDestroy: true,
|
||||||
|
read: resume,
|
||||||
|
highWaterMark
|
||||||
|
})
|
||||||
|
|
||||||
|
this._readableState.dataEmitted = false
|
||||||
|
|
||||||
|
this[kAbort] = abort
|
||||||
|
this[kConsume] = null
|
||||||
|
this[kBody] = null
|
||||||
|
this[kContentType] = contentType
|
||||||
|
|
||||||
|
// Is stream being consumed through Readable API?
|
||||||
|
// This is an optimization so that we avoid checking
|
||||||
|
// for 'data' and 'readable' listeners in the hot path
|
||||||
|
// inside push().
|
||||||
|
this[kReading] = false
|
||||||
|
}
|
||||||
|
|
||||||
|
destroy (err) {
|
||||||
|
if (this.destroyed) {
|
||||||
|
// Node < 16
|
||||||
|
return this
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!err && !this._readableState.endEmitted) {
|
||||||
|
err = new RequestAbortedError()
|
||||||
|
}
|
||||||
|
|
||||||
|
if (err) {
|
||||||
|
this[kAbort]()
|
||||||
|
}
|
||||||
|
|
||||||
|
return super.destroy(err)
|
||||||
|
}
|
||||||
|
|
||||||
|
emit (ev, ...args) {
|
||||||
|
if (ev === 'data') {
|
||||||
|
// Node < 16.7
|
||||||
|
this._readableState.dataEmitted = true
|
||||||
|
} else if (ev === 'error') {
|
||||||
|
// Node < 16
|
||||||
|
this._readableState.errorEmitted = true
|
||||||
|
}
|
||||||
|
return super.emit(ev, ...args)
|
||||||
|
}
|
||||||
|
|
||||||
|
on (ev, ...args) {
|
||||||
|
if (ev === 'data' || ev === 'readable') {
|
||||||
|
this[kReading] = true
|
||||||
|
}
|
||||||
|
return super.on(ev, ...args)
|
||||||
|
}
|
||||||
|
|
||||||
|
addListener (ev, ...args) {
|
||||||
|
return this.on(ev, ...args)
|
||||||
|
}
|
||||||
|
|
||||||
|
off (ev, ...args) {
|
||||||
|
const ret = super.off(ev, ...args)
|
||||||
|
if (ev === 'data' || ev === 'readable') {
|
||||||
|
this[kReading] = (
|
||||||
|
this.listenerCount('data') > 0 ||
|
||||||
|
this.listenerCount('readable') > 0
|
||||||
|
)
|
||||||
|
}
|
||||||
|
return ret
|
||||||
|
}
|
||||||
|
|
||||||
|
removeListener (ev, ...args) {
|
||||||
|
return this.off(ev, ...args)
|
||||||
|
}
|
||||||
|
|
||||||
|
push (chunk) {
|
||||||
|
if (this[kConsume] && chunk !== null && this.readableLength === 0) {
|
||||||
|
consumePush(this[kConsume], chunk)
|
||||||
|
return this[kReading] ? super.push(chunk) : true
|
||||||
|
}
|
||||||
|
return super.push(chunk)
|
||||||
|
}
|
||||||
|
|
||||||
|
// https://fetch.spec.whatwg.org/#dom-body-text
|
||||||
|
async text () {
|
||||||
|
return consume(this, 'text')
|
||||||
|
}
|
||||||
|
|
||||||
|
// https://fetch.spec.whatwg.org/#dom-body-json
|
||||||
|
async json () {
|
||||||
|
return consume(this, 'json')
|
||||||
|
}
|
||||||
|
|
||||||
|
// https://fetch.spec.whatwg.org/#dom-body-blob
|
||||||
|
async blob () {
|
||||||
|
return consume(this, 'blob')
|
||||||
|
}
|
||||||
|
|
||||||
|
// https://fetch.spec.whatwg.org/#dom-body-arraybuffer
|
||||||
|
async arrayBuffer () {
|
||||||
|
return consume(this, 'arrayBuffer')
|
||||||
|
}
|
||||||
|
|
||||||
|
// https://fetch.spec.whatwg.org/#dom-body-formdata
|
||||||
|
async formData () {
|
||||||
|
// TODO: Implement.
|
||||||
|
throw new NotSupportedError()
|
||||||
|
}
|
||||||
|
|
||||||
|
// https://fetch.spec.whatwg.org/#dom-body-bodyused
|
||||||
|
get bodyUsed () {
|
||||||
|
return util.isDisturbed(this)
|
||||||
|
}
|
||||||
|
|
||||||
|
// https://fetch.spec.whatwg.org/#dom-body-body
|
||||||
|
get body () {
|
||||||
|
if (!this[kBody]) {
|
||||||
|
this[kBody] = ReadableStreamFrom(this)
|
||||||
|
if (this[kConsume]) {
|
||||||
|
// TODO: Is this the best way to force a lock?
|
||||||
|
this[kBody].getReader() // Ensure stream is locked.
|
||||||
|
assert(this[kBody].locked)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return this[kBody]
|
||||||
|
}
|
||||||
|
|
||||||
|
dump (opts) {
|
||||||
|
let limit = opts && Number.isFinite(opts.limit) ? opts.limit : 262144
|
||||||
|
const signal = opts && opts.signal
|
||||||
|
|
||||||
|
if (signal) {
|
||||||
|
try {
|
||||||
|
if (typeof signal !== 'object' || !('aborted' in signal)) {
|
||||||
|
throw new InvalidArgumentError('signal must be an AbortSignal')
|
||||||
|
}
|
||||||
|
util.throwIfAborted(signal)
|
||||||
|
} catch (err) {
|
||||||
|
return Promise.reject(err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (this.closed) {
|
||||||
|
return Promise.resolve(null)
|
||||||
|
}
|
||||||
|
|
||||||
|
return new Promise((resolve, reject) => {
|
||||||
|
const signalListenerCleanup = signal
|
||||||
|
? util.addAbortListener(signal, () => {
|
||||||
|
this.destroy()
|
||||||
|
})
|
||||||
|
: noop
|
||||||
|
|
||||||
|
this
|
||||||
|
.on('close', function () {
|
||||||
|
signalListenerCleanup()
|
||||||
|
if (signal && signal.aborted) {
|
||||||
|
reject(signal.reason || Object.assign(new Error('The operation was aborted'), { name: 'AbortError' }))
|
||||||
|
} else {
|
||||||
|
resolve(null)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.on('error', noop)
|
||||||
|
.on('data', function (chunk) {
|
||||||
|
limit -= chunk.length
|
||||||
|
if (limit <= 0) {
|
||||||
|
this.destroy()
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.resume()
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// https://streams.spec.whatwg.org/#readablestream-locked
|
||||||
|
function isLocked (self) {
|
||||||
|
// Consume is an implicit lock.
|
||||||
|
return (self[kBody] && self[kBody].locked === true) || self[kConsume]
|
||||||
|
}
|
||||||
|
|
||||||
|
// https://fetch.spec.whatwg.org/#body-unusable
|
||||||
|
function isUnusable (self) {
|
||||||
|
return util.isDisturbed(self) || isLocked(self)
|
||||||
|
}
|
||||||
|
|
||||||
|
async function consume (stream, type) {
|
||||||
|
if (isUnusable(stream)) {
|
||||||
|
throw new TypeError('unusable')
|
||||||
|
}
|
||||||
|
|
||||||
|
assert(!stream[kConsume])
|
||||||
|
|
||||||
|
return new Promise((resolve, reject) => {
|
||||||
|
stream[kConsume] = {
|
||||||
|
type,
|
||||||
|
stream,
|
||||||
|
resolve,
|
||||||
|
reject,
|
||||||
|
length: 0,
|
||||||
|
body: []
|
||||||
|
}
|
||||||
|
|
||||||
|
stream
|
||||||
|
.on('error', function (err) {
|
||||||
|
consumeFinish(this[kConsume], err)
|
||||||
|
})
|
||||||
|
.on('close', function () {
|
||||||
|
if (this[kConsume].body !== null) {
|
||||||
|
consumeFinish(this[kConsume], new RequestAbortedError())
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
process.nextTick(consumeStart, stream[kConsume])
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
function consumeStart (consume) {
|
||||||
|
if (consume.body === null) {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
const { _readableState: state } = consume.stream
|
||||||
|
|
||||||
|
for (const chunk of state.buffer) {
|
||||||
|
consumePush(consume, chunk)
|
||||||
|
}
|
||||||
|
|
||||||
|
if (state.endEmitted) {
|
||||||
|
consumeEnd(this[kConsume])
|
||||||
|
} else {
|
||||||
|
consume.stream.on('end', function () {
|
||||||
|
consumeEnd(this[kConsume])
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
consume.stream.resume()
|
||||||
|
|
||||||
|
while (consume.stream.read() != null) {
|
||||||
|
// Loop
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function consumeEnd (consume) {
|
||||||
|
const { type, body, resolve, stream, length } = consume
|
||||||
|
|
||||||
|
try {
|
||||||
|
if (type === 'text') {
|
||||||
|
resolve(toUSVString(Buffer.concat(body)))
|
||||||
|
} else if (type === 'json') {
|
||||||
|
resolve(JSON.parse(Buffer.concat(body)))
|
||||||
|
} else if (type === 'arrayBuffer') {
|
||||||
|
const dst = new Uint8Array(length)
|
||||||
|
|
||||||
|
let pos = 0
|
||||||
|
for (const buf of body) {
|
||||||
|
dst.set(buf, pos)
|
||||||
|
pos += buf.byteLength
|
||||||
|
}
|
||||||
|
|
||||||
|
resolve(dst.buffer)
|
||||||
|
} else if (type === 'blob') {
|
||||||
|
if (!Blob) {
|
||||||
|
Blob = require('buffer').Blob
|
||||||
|
}
|
||||||
|
resolve(new Blob(body, { type: stream[kContentType] }))
|
||||||
|
}
|
||||||
|
|
||||||
|
consumeFinish(consume)
|
||||||
|
} catch (err) {
|
||||||
|
stream.destroy(err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function consumePush (consume, chunk) {
|
||||||
|
consume.length += chunk.length
|
||||||
|
consume.body.push(chunk)
|
||||||
|
}
|
||||||
|
|
||||||
|
function consumeFinish (consume, err) {
|
||||||
|
if (consume.body === null) {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
if (err) {
|
||||||
|
consume.reject(err)
|
||||||
|
} else {
|
||||||
|
consume.resolve()
|
||||||
|
}
|
||||||
|
|
||||||
|
consume.type = null
|
||||||
|
consume.stream = null
|
||||||
|
consume.resolve = null
|
||||||
|
consume.reject = null
|
||||||
|
consume.length = 0
|
||||||
|
consume.body = null
|
||||||
|
}
|
46
node_modules/undici/lib/api/util.js
generated
vendored
Normal file
46
node_modules/undici/lib/api/util.js
generated
vendored
Normal file
|
@ -0,0 +1,46 @@
|
||||||
|
const assert = require('assert')
|
||||||
|
const {
|
||||||
|
ResponseStatusCodeError
|
||||||
|
} = require('../core/errors')
|
||||||
|
const { toUSVString } = require('../core/util')
|
||||||
|
|
||||||
|
async function getResolveErrorBodyCallback ({ callback, body, contentType, statusCode, statusMessage, headers }) {
|
||||||
|
assert(body)
|
||||||
|
|
||||||
|
let chunks = []
|
||||||
|
let limit = 0
|
||||||
|
|
||||||
|
for await (const chunk of body) {
|
||||||
|
chunks.push(chunk)
|
||||||
|
limit += chunk.length
|
||||||
|
if (limit > 128 * 1024) {
|
||||||
|
chunks = null
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (statusCode === 204 || !contentType || !chunks) {
|
||||||
|
process.nextTick(callback, new ResponseStatusCodeError(`Response status code ${statusCode}${statusMessage ? `: ${statusMessage}` : ''}`, statusCode, headers))
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
if (contentType.startsWith('application/json')) {
|
||||||
|
const payload = JSON.parse(toUSVString(Buffer.concat(chunks)))
|
||||||
|
process.nextTick(callback, new ResponseStatusCodeError(`Response status code ${statusCode}${statusMessage ? `: ${statusMessage}` : ''}`, statusCode, headers, payload))
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
if (contentType.startsWith('text/')) {
|
||||||
|
const payload = toUSVString(Buffer.concat(chunks))
|
||||||
|
process.nextTick(callback, new ResponseStatusCodeError(`Response status code ${statusCode}${statusMessage ? `: ${statusMessage}` : ''}`, statusCode, headers, payload))
|
||||||
|
return
|
||||||
|
}
|
||||||
|
} catch (err) {
|
||||||
|
// Process in a fallback if error
|
||||||
|
}
|
||||||
|
|
||||||
|
process.nextTick(callback, new ResponseStatusCodeError(`Response status code ${statusCode}${statusMessage ? `: ${statusMessage}` : ''}`, statusCode, headers))
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = { getResolveErrorBodyCallback }
|
190
node_modules/undici/lib/balanced-pool.js
generated
vendored
Normal file
190
node_modules/undici/lib/balanced-pool.js
generated
vendored
Normal file
|
@ -0,0 +1,190 @@
|
||||||
|
'use strict'
|
||||||
|
|
||||||
|
const {
|
||||||
|
BalancedPoolMissingUpstreamError,
|
||||||
|
InvalidArgumentError
|
||||||
|
} = require('./core/errors')
|
||||||
|
const {
|
||||||
|
PoolBase,
|
||||||
|
kClients,
|
||||||
|
kNeedDrain,
|
||||||
|
kAddClient,
|
||||||
|
kRemoveClient,
|
||||||
|
kGetDispatcher
|
||||||
|
} = require('./pool-base')
|
||||||
|
const Pool = require('./pool')
|
||||||
|
const { kUrl, kInterceptors } = require('./core/symbols')
|
||||||
|
const { parseOrigin } = require('./core/util')
|
||||||
|
const kFactory = Symbol('factory')
|
||||||
|
|
||||||
|
const kOptions = Symbol('options')
|
||||||
|
const kGreatestCommonDivisor = Symbol('kGreatestCommonDivisor')
|
||||||
|
const kCurrentWeight = Symbol('kCurrentWeight')
|
||||||
|
const kIndex = Symbol('kIndex')
|
||||||
|
const kWeight = Symbol('kWeight')
|
||||||
|
const kMaxWeightPerServer = Symbol('kMaxWeightPerServer')
|
||||||
|
const kErrorPenalty = Symbol('kErrorPenalty')
|
||||||
|
|
||||||
|
function getGreatestCommonDivisor (a, b) {
|
||||||
|
if (b === 0) return a
|
||||||
|
return getGreatestCommonDivisor(b, a % b)
|
||||||
|
}
|
||||||
|
|
||||||
|
function defaultFactory (origin, opts) {
|
||||||
|
return new Pool(origin, opts)
|
||||||
|
}
|
||||||
|
|
||||||
|
class BalancedPool extends PoolBase {
|
||||||
|
constructor (upstreams = [], { factory = defaultFactory, ...opts } = {}) {
|
||||||
|
super()
|
||||||
|
|
||||||
|
this[kOptions] = opts
|
||||||
|
this[kIndex] = -1
|
||||||
|
this[kCurrentWeight] = 0
|
||||||
|
|
||||||
|
this[kMaxWeightPerServer] = this[kOptions].maxWeightPerServer || 100
|
||||||
|
this[kErrorPenalty] = this[kOptions].errorPenalty || 15
|
||||||
|
|
||||||
|
if (!Array.isArray(upstreams)) {
|
||||||
|
upstreams = [upstreams]
|
||||||
|
}
|
||||||
|
|
||||||
|
if (typeof factory !== 'function') {
|
||||||
|
throw new InvalidArgumentError('factory must be a function.')
|
||||||
|
}
|
||||||
|
|
||||||
|
this[kInterceptors] = opts.interceptors && opts.interceptors.BalancedPool && Array.isArray(opts.interceptors.BalancedPool)
|
||||||
|
? opts.interceptors.BalancedPool
|
||||||
|
: []
|
||||||
|
this[kFactory] = factory
|
||||||
|
|
||||||
|
for (const upstream of upstreams) {
|
||||||
|
this.addUpstream(upstream)
|
||||||
|
}
|
||||||
|
this._updateBalancedPoolStats()
|
||||||
|
}
|
||||||
|
|
||||||
|
addUpstream (upstream) {
|
||||||
|
const upstreamOrigin = parseOrigin(upstream).origin
|
||||||
|
|
||||||
|
if (this[kClients].find((pool) => (
|
||||||
|
pool[kUrl].origin === upstreamOrigin &&
|
||||||
|
pool.closed !== true &&
|
||||||
|
pool.destroyed !== true
|
||||||
|
))) {
|
||||||
|
return this
|
||||||
|
}
|
||||||
|
const pool = this[kFactory](upstreamOrigin, Object.assign({}, this[kOptions]))
|
||||||
|
|
||||||
|
this[kAddClient](pool)
|
||||||
|
pool.on('connect', () => {
|
||||||
|
pool[kWeight] = Math.min(this[kMaxWeightPerServer], pool[kWeight] + this[kErrorPenalty])
|
||||||
|
})
|
||||||
|
|
||||||
|
pool.on('connectionError', () => {
|
||||||
|
pool[kWeight] = Math.max(1, pool[kWeight] - this[kErrorPenalty])
|
||||||
|
this._updateBalancedPoolStats()
|
||||||
|
})
|
||||||
|
|
||||||
|
pool.on('disconnect', (...args) => {
|
||||||
|
const err = args[2]
|
||||||
|
if (err && err.code === 'UND_ERR_SOCKET') {
|
||||||
|
// decrease the weight of the pool.
|
||||||
|
pool[kWeight] = Math.max(1, pool[kWeight] - this[kErrorPenalty])
|
||||||
|
this._updateBalancedPoolStats()
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
for (const client of this[kClients]) {
|
||||||
|
client[kWeight] = this[kMaxWeightPerServer]
|
||||||
|
}
|
||||||
|
|
||||||
|
this._updateBalancedPoolStats()
|
||||||
|
|
||||||
|
return this
|
||||||
|
}
|
||||||
|
|
||||||
|
_updateBalancedPoolStats () {
|
||||||
|
this[kGreatestCommonDivisor] = this[kClients].map(p => p[kWeight]).reduce(getGreatestCommonDivisor, 0)
|
||||||
|
}
|
||||||
|
|
||||||
|
removeUpstream (upstream) {
|
||||||
|
const upstreamOrigin = parseOrigin(upstream).origin
|
||||||
|
|
||||||
|
const pool = this[kClients].find((pool) => (
|
||||||
|
pool[kUrl].origin === upstreamOrigin &&
|
||||||
|
pool.closed !== true &&
|
||||||
|
pool.destroyed !== true
|
||||||
|
))
|
||||||
|
|
||||||
|
if (pool) {
|
||||||
|
this[kRemoveClient](pool)
|
||||||
|
}
|
||||||
|
|
||||||
|
return this
|
||||||
|
}
|
||||||
|
|
||||||
|
get upstreams () {
|
||||||
|
return this[kClients]
|
||||||
|
.filter(dispatcher => dispatcher.closed !== true && dispatcher.destroyed !== true)
|
||||||
|
.map((p) => p[kUrl].origin)
|
||||||
|
}
|
||||||
|
|
||||||
|
[kGetDispatcher] () {
|
||||||
|
// We validate that pools is greater than 0,
|
||||||
|
// otherwise we would have to wait until an upstream
|
||||||
|
// is added, which might never happen.
|
||||||
|
if (this[kClients].length === 0) {
|
||||||
|
throw new BalancedPoolMissingUpstreamError()
|
||||||
|
}
|
||||||
|
|
||||||
|
const dispatcher = this[kClients].find(dispatcher => (
|
||||||
|
!dispatcher[kNeedDrain] &&
|
||||||
|
dispatcher.closed !== true &&
|
||||||
|
dispatcher.destroyed !== true
|
||||||
|
))
|
||||||
|
|
||||||
|
if (!dispatcher) {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
const allClientsBusy = this[kClients].map(pool => pool[kNeedDrain]).reduce((a, b) => a && b, true)
|
||||||
|
|
||||||
|
if (allClientsBusy) {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
let counter = 0
|
||||||
|
|
||||||
|
let maxWeightIndex = this[kClients].findIndex(pool => !pool[kNeedDrain])
|
||||||
|
|
||||||
|
while (counter++ < this[kClients].length) {
|
||||||
|
this[kIndex] = (this[kIndex] + 1) % this[kClients].length
|
||||||
|
const pool = this[kClients][this[kIndex]]
|
||||||
|
|
||||||
|
// find pool index with the largest weight
|
||||||
|
if (pool[kWeight] > this[kClients][maxWeightIndex][kWeight] && !pool[kNeedDrain]) {
|
||||||
|
maxWeightIndex = this[kIndex]
|
||||||
|
}
|
||||||
|
|
||||||
|
// decrease the current weight every `this[kClients].length`.
|
||||||
|
if (this[kIndex] === 0) {
|
||||||
|
// Set the current weight to the next lower weight.
|
||||||
|
this[kCurrentWeight] = this[kCurrentWeight] - this[kGreatestCommonDivisor]
|
||||||
|
|
||||||
|
if (this[kCurrentWeight] <= 0) {
|
||||||
|
this[kCurrentWeight] = this[kMaxWeightPerServer]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (pool[kWeight] >= this[kCurrentWeight] && (!pool[kNeedDrain])) {
|
||||||
|
return pool
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
this[kCurrentWeight] = this[kClients][maxWeightIndex][kWeight]
|
||||||
|
this[kIndex] = maxWeightIndex
|
||||||
|
return this[kClients][maxWeightIndex]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = BalancedPool
|
838
node_modules/undici/lib/cache/cache.js
generated
vendored
Normal file
838
node_modules/undici/lib/cache/cache.js
generated
vendored
Normal file
|
@ -0,0 +1,838 @@
|
||||||
|
'use strict'
|
||||||
|
|
||||||
|
const { kConstruct } = require('./symbols')
|
||||||
|
const { urlEquals, fieldValues: getFieldValues } = require('./util')
|
||||||
|
const { kEnumerableProperty, isDisturbed } = require('../core/util')
|
||||||
|
const { kHeadersList } = require('../core/symbols')
|
||||||
|
const { webidl } = require('../fetch/webidl')
|
||||||
|
const { Response, cloneResponse } = require('../fetch/response')
|
||||||
|
const { Request } = require('../fetch/request')
|
||||||
|
const { kState, kHeaders, kGuard, kRealm } = require('../fetch/symbols')
|
||||||
|
const { fetching } = require('../fetch/index')
|
||||||
|
const { urlIsHttpHttpsScheme, createDeferredPromise, readAllBytes } = require('../fetch/util')
|
||||||
|
const assert = require('assert')
|
||||||
|
const { getGlobalDispatcher } = require('../global')
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @see https://w3c.github.io/ServiceWorker/#dfn-cache-batch-operation
|
||||||
|
* @typedef {Object} CacheBatchOperation
|
||||||
|
* @property {'delete' | 'put'} type
|
||||||
|
* @property {any} request
|
||||||
|
* @property {any} response
|
||||||
|
* @property {import('../../types/cache').CacheQueryOptions} options
|
||||||
|
*/
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @see https://w3c.github.io/ServiceWorker/#dfn-request-response-list
|
||||||
|
* @typedef {[any, any][]} requestResponseList
|
||||||
|
*/
|
||||||
|
|
||||||
|
class Cache {
|
||||||
|
/**
|
||||||
|
* @see https://w3c.github.io/ServiceWorker/#dfn-relevant-request-response-list
|
||||||
|
* @type {requestResponseList}
|
||||||
|
*/
|
||||||
|
#relevantRequestResponseList
|
||||||
|
|
||||||
|
constructor () {
|
||||||
|
if (arguments[0] !== kConstruct) {
|
||||||
|
webidl.illegalConstructor()
|
||||||
|
}
|
||||||
|
|
||||||
|
this.#relevantRequestResponseList = arguments[1]
|
||||||
|
}
|
||||||
|
|
||||||
|
async match (request, options = {}) {
|
||||||
|
webidl.brandCheck(this, Cache)
|
||||||
|
webidl.argumentLengthCheck(arguments, 1, { header: 'Cache.match' })
|
||||||
|
|
||||||
|
request = webidl.converters.RequestInfo(request)
|
||||||
|
options = webidl.converters.CacheQueryOptions(options)
|
||||||
|
|
||||||
|
const p = await this.matchAll(request, options)
|
||||||
|
|
||||||
|
if (p.length === 0) {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
return p[0]
|
||||||
|
}
|
||||||
|
|
||||||
|
async matchAll (request = undefined, options = {}) {
|
||||||
|
webidl.brandCheck(this, Cache)
|
||||||
|
|
||||||
|
if (request !== undefined) request = webidl.converters.RequestInfo(request)
|
||||||
|
options = webidl.converters.CacheQueryOptions(options)
|
||||||
|
|
||||||
|
// 1.
|
||||||
|
let r = null
|
||||||
|
|
||||||
|
// 2.
|
||||||
|
if (request !== undefined) {
|
||||||
|
if (request instanceof Request) {
|
||||||
|
// 2.1.1
|
||||||
|
r = request[kState]
|
||||||
|
|
||||||
|
// 2.1.2
|
||||||
|
if (r.method !== 'GET' && !options.ignoreMethod) {
|
||||||
|
return []
|
||||||
|
}
|
||||||
|
} else if (typeof request === 'string') {
|
||||||
|
// 2.2.1
|
||||||
|
r = new Request(request)[kState]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// 5.
|
||||||
|
// 5.1
|
||||||
|
const responses = []
|
||||||
|
|
||||||
|
// 5.2
|
||||||
|
if (request === undefined) {
|
||||||
|
// 5.2.1
|
||||||
|
for (const requestResponse of this.#relevantRequestResponseList) {
|
||||||
|
responses.push(requestResponse[1])
|
||||||
|
}
|
||||||
|
} else { // 5.3
|
||||||
|
// 5.3.1
|
||||||
|
const requestResponses = this.#queryCache(r, options)
|
||||||
|
|
||||||
|
// 5.3.2
|
||||||
|
for (const requestResponse of requestResponses) {
|
||||||
|
responses.push(requestResponse[1])
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// 5.4
|
||||||
|
// We don't implement CORs so we don't need to loop over the responses, yay!
|
||||||
|
|
||||||
|
// 5.5.1
|
||||||
|
const responseList = []
|
||||||
|
|
||||||
|
// 5.5.2
|
||||||
|
for (const response of responses) {
|
||||||
|
// 5.5.2.1
|
||||||
|
const responseObject = new Response(response.body?.source ?? null)
|
||||||
|
const body = responseObject[kState].body
|
||||||
|
responseObject[kState] = response
|
||||||
|
responseObject[kState].body = body
|
||||||
|
responseObject[kHeaders][kHeadersList] = response.headersList
|
||||||
|
responseObject[kHeaders][kGuard] = 'immutable'
|
||||||
|
|
||||||
|
responseList.push(responseObject)
|
||||||
|
}
|
||||||
|
|
||||||
|
// 6.
|
||||||
|
return Object.freeze(responseList)
|
||||||
|
}
|
||||||
|
|
||||||
|
async add (request) {
|
||||||
|
webidl.brandCheck(this, Cache)
|
||||||
|
webidl.argumentLengthCheck(arguments, 1, { header: 'Cache.add' })
|
||||||
|
|
||||||
|
request = webidl.converters.RequestInfo(request)
|
||||||
|
|
||||||
|
// 1.
|
||||||
|
const requests = [request]
|
||||||
|
|
||||||
|
// 2.
|
||||||
|
const responseArrayPromise = this.addAll(requests)
|
||||||
|
|
||||||
|
// 3.
|
||||||
|
return await responseArrayPromise
|
||||||
|
}
|
||||||
|
|
||||||
|
async addAll (requests) {
|
||||||
|
webidl.brandCheck(this, Cache)
|
||||||
|
webidl.argumentLengthCheck(arguments, 1, { header: 'Cache.addAll' })
|
||||||
|
|
||||||
|
requests = webidl.converters['sequence<RequestInfo>'](requests)
|
||||||
|
|
||||||
|
// 1.
|
||||||
|
const responsePromises = []
|
||||||
|
|
||||||
|
// 2.
|
||||||
|
const requestList = []
|
||||||
|
|
||||||
|
// 3.
|
||||||
|
for (const request of requests) {
|
||||||
|
if (typeof request === 'string') {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
// 3.1
|
||||||
|
const r = request[kState]
|
||||||
|
|
||||||
|
// 3.2
|
||||||
|
if (!urlIsHttpHttpsScheme(r.url) || r.method !== 'GET') {
|
||||||
|
throw webidl.errors.exception({
|
||||||
|
header: 'Cache.addAll',
|
||||||
|
message: 'Expected http/s scheme when method is not GET.'
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// 4.
|
||||||
|
/** @type {ReturnType<typeof fetching>[]} */
|
||||||
|
const fetchControllers = []
|
||||||
|
|
||||||
|
// 5.
|
||||||
|
for (const request of requests) {
|
||||||
|
// 5.1
|
||||||
|
const r = new Request(request)[kState]
|
||||||
|
|
||||||
|
// 5.2
|
||||||
|
if (!urlIsHttpHttpsScheme(r.url)) {
|
||||||
|
throw webidl.errors.exception({
|
||||||
|
header: 'Cache.addAll',
|
||||||
|
message: 'Expected http/s scheme.'
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// 5.4
|
||||||
|
r.initiator = 'fetch'
|
||||||
|
r.destination = 'subresource'
|
||||||
|
|
||||||
|
// 5.5
|
||||||
|
requestList.push(r)
|
||||||
|
|
||||||
|
// 5.6
|
||||||
|
const responsePromise = createDeferredPromise()
|
||||||
|
|
||||||
|
// 5.7
|
||||||
|
fetchControllers.push(fetching({
|
||||||
|
request: r,
|
||||||
|
dispatcher: getGlobalDispatcher(),
|
||||||
|
processResponse (response) {
|
||||||
|
// 1.
|
||||||
|
if (response.type === 'error' || response.status === 206 || response.status < 200 || response.status > 299) {
|
||||||
|
responsePromise.reject(webidl.errors.exception({
|
||||||
|
header: 'Cache.addAll',
|
||||||
|
message: 'Received an invalid status code or the request failed.'
|
||||||
|
}))
|
||||||
|
} else if (response.headersList.contains('vary')) { // 2.
|
||||||
|
// 2.1
|
||||||
|
const fieldValues = getFieldValues(response.headersList.get('vary'))
|
||||||
|
|
||||||
|
// 2.2
|
||||||
|
for (const fieldValue of fieldValues) {
|
||||||
|
// 2.2.1
|
||||||
|
if (fieldValue === '*') {
|
||||||
|
responsePromise.reject(webidl.errors.exception({
|
||||||
|
header: 'Cache.addAll',
|
||||||
|
message: 'invalid vary field value'
|
||||||
|
}))
|
||||||
|
|
||||||
|
for (const controller of fetchControllers) {
|
||||||
|
controller.abort()
|
||||||
|
}
|
||||||
|
|
||||||
|
return
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
processResponseEndOfBody (response) {
|
||||||
|
// 1.
|
||||||
|
if (response.aborted) {
|
||||||
|
responsePromise.reject(new DOMException('aborted', 'AbortError'))
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// 2.
|
||||||
|
responsePromise.resolve(response)
|
||||||
|
}
|
||||||
|
}))
|
||||||
|
|
||||||
|
// 5.8
|
||||||
|
responsePromises.push(responsePromise.promise)
|
||||||
|
}
|
||||||
|
|
||||||
|
// 6.
|
||||||
|
const p = Promise.all(responsePromises)
|
||||||
|
|
||||||
|
// 7.
|
||||||
|
const responses = await p
|
||||||
|
|
||||||
|
// 7.1
|
||||||
|
const operations = []
|
||||||
|
|
||||||
|
// 7.2
|
||||||
|
let index = 0
|
||||||
|
|
||||||
|
// 7.3
|
||||||
|
for (const response of responses) {
|
||||||
|
// 7.3.1
|
||||||
|
/** @type {CacheBatchOperation} */
|
||||||
|
const operation = {
|
||||||
|
type: 'put', // 7.3.2
|
||||||
|
request: requestList[index], // 7.3.3
|
||||||
|
response // 7.3.4
|
||||||
|
}
|
||||||
|
|
||||||
|
operations.push(operation) // 7.3.5
|
||||||
|
|
||||||
|
index++ // 7.3.6
|
||||||
|
}
|
||||||
|
|
||||||
|
// 7.5
|
||||||
|
const cacheJobPromise = createDeferredPromise()
|
||||||
|
|
||||||
|
// 7.6.1
|
||||||
|
let errorData = null
|
||||||
|
|
||||||
|
// 7.6.2
|
||||||
|
try {
|
||||||
|
this.#batchCacheOperations(operations)
|
||||||
|
} catch (e) {
|
||||||
|
errorData = e
|
||||||
|
}
|
||||||
|
|
||||||
|
// 7.6.3
|
||||||
|
queueMicrotask(() => {
|
||||||
|
// 7.6.3.1
|
||||||
|
if (errorData === null) {
|
||||||
|
cacheJobPromise.resolve(undefined)
|
||||||
|
} else {
|
||||||
|
// 7.6.3.2
|
||||||
|
cacheJobPromise.reject(errorData)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
// 7.7
|
||||||
|
return cacheJobPromise.promise
|
||||||
|
}
|
||||||
|
|
||||||
|
async put (request, response) {
|
||||||
|
webidl.brandCheck(this, Cache)
|
||||||
|
webidl.argumentLengthCheck(arguments, 2, { header: 'Cache.put' })
|
||||||
|
|
||||||
|
request = webidl.converters.RequestInfo(request)
|
||||||
|
response = webidl.converters.Response(response)
|
||||||
|
|
||||||
|
// 1.
|
||||||
|
let innerRequest = null
|
||||||
|
|
||||||
|
// 2.
|
||||||
|
if (request instanceof Request) {
|
||||||
|
innerRequest = request[kState]
|
||||||
|
} else { // 3.
|
||||||
|
innerRequest = new Request(request)[kState]
|
||||||
|
}
|
||||||
|
|
||||||
|
// 4.
|
||||||
|
if (!urlIsHttpHttpsScheme(innerRequest.url) || innerRequest.method !== 'GET') {
|
||||||
|
throw webidl.errors.exception({
|
||||||
|
header: 'Cache.put',
|
||||||
|
message: 'Expected an http/s scheme when method is not GET'
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// 5.
|
||||||
|
const innerResponse = response[kState]
|
||||||
|
|
||||||
|
// 6.
|
||||||
|
if (innerResponse.status === 206) {
|
||||||
|
throw webidl.errors.exception({
|
||||||
|
header: 'Cache.put',
|
||||||
|
message: 'Got 206 status'
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// 7.
|
||||||
|
if (innerResponse.headersList.contains('vary')) {
|
||||||
|
// 7.1.
|
||||||
|
const fieldValues = getFieldValues(innerResponse.headersList.get('vary'))
|
||||||
|
|
||||||
|
// 7.2.
|
||||||
|
for (const fieldValue of fieldValues) {
|
||||||
|
// 7.2.1
|
||||||
|
if (fieldValue === '*') {
|
||||||
|
throw webidl.errors.exception({
|
||||||
|
header: 'Cache.put',
|
||||||
|
message: 'Got * vary field value'
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// 8.
|
||||||
|
if (innerResponse.body && (isDisturbed(innerResponse.body.stream) || innerResponse.body.stream.locked)) {
|
||||||
|
throw webidl.errors.exception({
|
||||||
|
header: 'Cache.put',
|
||||||
|
message: 'Response body is locked or disturbed'
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// 9.
|
||||||
|
const clonedResponse = cloneResponse(innerResponse)
|
||||||
|
|
||||||
|
// 10.
|
||||||
|
const bodyReadPromise = createDeferredPromise()
|
||||||
|
|
||||||
|
// 11.
|
||||||
|
if (innerResponse.body != null) {
|
||||||
|
// 11.1
|
||||||
|
const stream = innerResponse.body.stream
|
||||||
|
|
||||||
|
// 11.2
|
||||||
|
const reader = stream.getReader()
|
||||||
|
|
||||||
|
// 11.3
|
||||||
|
readAllBytes(reader).then(bodyReadPromise.resolve, bodyReadPromise.reject)
|
||||||
|
} else {
|
||||||
|
bodyReadPromise.resolve(undefined)
|
||||||
|
}
|
||||||
|
|
||||||
|
// 12.
|
||||||
|
/** @type {CacheBatchOperation[]} */
|
||||||
|
const operations = []
|
||||||
|
|
||||||
|
// 13.
|
||||||
|
/** @type {CacheBatchOperation} */
|
||||||
|
const operation = {
|
||||||
|
type: 'put', // 14.
|
||||||
|
request: innerRequest, // 15.
|
||||||
|
response: clonedResponse // 16.
|
||||||
|
}
|
||||||
|
|
||||||
|
// 17.
|
||||||
|
operations.push(operation)
|
||||||
|
|
||||||
|
// 19.
|
||||||
|
const bytes = await bodyReadPromise.promise
|
||||||
|
|
||||||
|
if (clonedResponse.body != null) {
|
||||||
|
clonedResponse.body.source = bytes
|
||||||
|
}
|
||||||
|
|
||||||
|
// 19.1
|
||||||
|
const cacheJobPromise = createDeferredPromise()
|
||||||
|
|
||||||
|
// 19.2.1
|
||||||
|
let errorData = null
|
||||||
|
|
||||||
|
// 19.2.2
|
||||||
|
try {
|
||||||
|
this.#batchCacheOperations(operations)
|
||||||
|
} catch (e) {
|
||||||
|
errorData = e
|
||||||
|
}
|
||||||
|
|
||||||
|
// 19.2.3
|
||||||
|
queueMicrotask(() => {
|
||||||
|
// 19.2.3.1
|
||||||
|
if (errorData === null) {
|
||||||
|
cacheJobPromise.resolve()
|
||||||
|
} else { // 19.2.3.2
|
||||||
|
cacheJobPromise.reject(errorData)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
return cacheJobPromise.promise
|
||||||
|
}
|
||||||
|
|
||||||
|
async delete (request, options = {}) {
|
||||||
|
webidl.brandCheck(this, Cache)
|
||||||
|
webidl.argumentLengthCheck(arguments, 1, { header: 'Cache.delete' })
|
||||||
|
|
||||||
|
request = webidl.converters.RequestInfo(request)
|
||||||
|
options = webidl.converters.CacheQueryOptions(options)
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @type {Request}
|
||||||
|
*/
|
||||||
|
let r = null
|
||||||
|
|
||||||
|
if (request instanceof Request) {
|
||||||
|
r = request[kState]
|
||||||
|
|
||||||
|
if (r.method !== 'GET' && !options.ignoreMethod) {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
assert(typeof request === 'string')
|
||||||
|
|
||||||
|
r = new Request(request)[kState]
|
||||||
|
}
|
||||||
|
|
||||||
|
/** @type {CacheBatchOperation[]} */
|
||||||
|
const operations = []
|
||||||
|
|
||||||
|
/** @type {CacheBatchOperation} */
|
||||||
|
const operation = {
|
||||||
|
type: 'delete',
|
||||||
|
request: r,
|
||||||
|
options
|
||||||
|
}
|
||||||
|
|
||||||
|
operations.push(operation)
|
||||||
|
|
||||||
|
const cacheJobPromise = createDeferredPromise()
|
||||||
|
|
||||||
|
let errorData = null
|
||||||
|
let requestResponses
|
||||||
|
|
||||||
|
try {
|
||||||
|
requestResponses = this.#batchCacheOperations(operations)
|
||||||
|
} catch (e) {
|
||||||
|
errorData = e
|
||||||
|
}
|
||||||
|
|
||||||
|
queueMicrotask(() => {
|
||||||
|
if (errorData === null) {
|
||||||
|
cacheJobPromise.resolve(!!requestResponses?.length)
|
||||||
|
} else {
|
||||||
|
cacheJobPromise.reject(errorData)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
return cacheJobPromise.promise
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @see https://w3c.github.io/ServiceWorker/#dom-cache-keys
|
||||||
|
* @param {any} request
|
||||||
|
* @param {import('../../types/cache').CacheQueryOptions} options
|
||||||
|
* @returns {readonly Request[]}
|
||||||
|
*/
|
||||||
|
async keys (request = undefined, options = {}) {
|
||||||
|
webidl.brandCheck(this, Cache)
|
||||||
|
|
||||||
|
if (request !== undefined) request = webidl.converters.RequestInfo(request)
|
||||||
|
options = webidl.converters.CacheQueryOptions(options)
|
||||||
|
|
||||||
|
// 1.
|
||||||
|
let r = null
|
||||||
|
|
||||||
|
// 2.
|
||||||
|
if (request !== undefined) {
|
||||||
|
// 2.1
|
||||||
|
if (request instanceof Request) {
|
||||||
|
// 2.1.1
|
||||||
|
r = request[kState]
|
||||||
|
|
||||||
|
// 2.1.2
|
||||||
|
if (r.method !== 'GET' && !options.ignoreMethod) {
|
||||||
|
return []
|
||||||
|
}
|
||||||
|
} else if (typeof request === 'string') { // 2.2
|
||||||
|
r = new Request(request)[kState]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// 4.
|
||||||
|
const promise = createDeferredPromise()
|
||||||
|
|
||||||
|
// 5.
|
||||||
|
// 5.1
|
||||||
|
const requests = []
|
||||||
|
|
||||||
|
// 5.2
|
||||||
|
if (request === undefined) {
|
||||||
|
// 5.2.1
|
||||||
|
for (const requestResponse of this.#relevantRequestResponseList) {
|
||||||
|
// 5.2.1.1
|
||||||
|
requests.push(requestResponse[0])
|
||||||
|
}
|
||||||
|
} else { // 5.3
|
||||||
|
// 5.3.1
|
||||||
|
const requestResponses = this.#queryCache(r, options)
|
||||||
|
|
||||||
|
// 5.3.2
|
||||||
|
for (const requestResponse of requestResponses) {
|
||||||
|
// 5.3.2.1
|
||||||
|
requests.push(requestResponse[0])
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// 5.4
|
||||||
|
queueMicrotask(() => {
|
||||||
|
// 5.4.1
|
||||||
|
const requestList = []
|
||||||
|
|
||||||
|
// 5.4.2
|
||||||
|
for (const request of requests) {
|
||||||
|
const requestObject = new Request('https://a')
|
||||||
|
requestObject[kState] = request
|
||||||
|
requestObject[kHeaders][kHeadersList] = request.headersList
|
||||||
|
requestObject[kHeaders][kGuard] = 'immutable'
|
||||||
|
requestObject[kRealm] = request.client
|
||||||
|
|
||||||
|
// 5.4.2.1
|
||||||
|
requestList.push(requestObject)
|
||||||
|
}
|
||||||
|
|
||||||
|
// 5.4.3
|
||||||
|
promise.resolve(Object.freeze(requestList))
|
||||||
|
})
|
||||||
|
|
||||||
|
return promise.promise
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @see https://w3c.github.io/ServiceWorker/#batch-cache-operations-algorithm
|
||||||
|
* @param {CacheBatchOperation[]} operations
|
||||||
|
* @returns {requestResponseList}
|
||||||
|
*/
|
||||||
|
#batchCacheOperations (operations) {
|
||||||
|
// 1.
|
||||||
|
const cache = this.#relevantRequestResponseList
|
||||||
|
|
||||||
|
// 2.
|
||||||
|
const backupCache = [...cache]
|
||||||
|
|
||||||
|
// 3.
|
||||||
|
const addedItems = []
|
||||||
|
|
||||||
|
// 4.1
|
||||||
|
const resultList = []
|
||||||
|
|
||||||
|
try {
|
||||||
|
// 4.2
|
||||||
|
for (const operation of operations) {
|
||||||
|
// 4.2.1
|
||||||
|
if (operation.type !== 'delete' && operation.type !== 'put') {
|
||||||
|
throw webidl.errors.exception({
|
||||||
|
header: 'Cache.#batchCacheOperations',
|
||||||
|
message: 'operation type does not match "delete" or "put"'
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// 4.2.2
|
||||||
|
if (operation.type === 'delete' && operation.response != null) {
|
||||||
|
throw webidl.errors.exception({
|
||||||
|
header: 'Cache.#batchCacheOperations',
|
||||||
|
message: 'delete operation should not have an associated response'
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// 4.2.3
|
||||||
|
if (this.#queryCache(operation.request, operation.options, addedItems).length) {
|
||||||
|
throw new DOMException('???', 'InvalidStateError')
|
||||||
|
}
|
||||||
|
|
||||||
|
// 4.2.4
|
||||||
|
let requestResponses
|
||||||
|
|
||||||
|
// 4.2.5
|
||||||
|
if (operation.type === 'delete') {
|
||||||
|
// 4.2.5.1
|
||||||
|
requestResponses = this.#queryCache(operation.request, operation.options)
|
||||||
|
|
||||||
|
// TODO: the spec is wrong, this is needed to pass WPTs
|
||||||
|
if (requestResponses.length === 0) {
|
||||||
|
return []
|
||||||
|
}
|
||||||
|
|
||||||
|
// 4.2.5.2
|
||||||
|
for (const requestResponse of requestResponses) {
|
||||||
|
const idx = cache.indexOf(requestResponse)
|
||||||
|
assert(idx !== -1)
|
||||||
|
|
||||||
|
// 4.2.5.2.1
|
||||||
|
cache.splice(idx, 1)
|
||||||
|
}
|
||||||
|
} else if (operation.type === 'put') { // 4.2.6
|
||||||
|
// 4.2.6.1
|
||||||
|
if (operation.response == null) {
|
||||||
|
throw webidl.errors.exception({
|
||||||
|
header: 'Cache.#batchCacheOperations',
|
||||||
|
message: 'put operation should have an associated response'
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// 4.2.6.2
|
||||||
|
const r = operation.request
|
||||||
|
|
||||||
|
// 4.2.6.3
|
||||||
|
if (!urlIsHttpHttpsScheme(r.url)) {
|
||||||
|
throw webidl.errors.exception({
|
||||||
|
header: 'Cache.#batchCacheOperations',
|
||||||
|
message: 'expected http or https scheme'
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// 4.2.6.4
|
||||||
|
if (r.method !== 'GET') {
|
||||||
|
throw webidl.errors.exception({
|
||||||
|
header: 'Cache.#batchCacheOperations',
|
||||||
|
message: 'not get method'
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// 4.2.6.5
|
||||||
|
if (operation.options != null) {
|
||||||
|
throw webidl.errors.exception({
|
||||||
|
header: 'Cache.#batchCacheOperations',
|
||||||
|
message: 'options must not be defined'
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// 4.2.6.6
|
||||||
|
requestResponses = this.#queryCache(operation.request)
|
||||||
|
|
||||||
|
// 4.2.6.7
|
||||||
|
for (const requestResponse of requestResponses) {
|
||||||
|
const idx = cache.indexOf(requestResponse)
|
||||||
|
assert(idx !== -1)
|
||||||
|
|
||||||
|
// 4.2.6.7.1
|
||||||
|
cache.splice(idx, 1)
|
||||||
|
}
|
||||||
|
|
||||||
|
// 4.2.6.8
|
||||||
|
cache.push([operation.request, operation.response])
|
||||||
|
|
||||||
|
// 4.2.6.10
|
||||||
|
addedItems.push([operation.request, operation.response])
|
||||||
|
}
|
||||||
|
|
||||||
|
// 4.2.7
|
||||||
|
resultList.push([operation.request, operation.response])
|
||||||
|
}
|
||||||
|
|
||||||
|
// 4.3
|
||||||
|
return resultList
|
||||||
|
} catch (e) { // 5.
|
||||||
|
// 5.1
|
||||||
|
this.#relevantRequestResponseList.length = 0
|
||||||
|
|
||||||
|
// 5.2
|
||||||
|
this.#relevantRequestResponseList = backupCache
|
||||||
|
|
||||||
|
// 5.3
|
||||||
|
throw e
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @see https://w3c.github.io/ServiceWorker/#query-cache
|
||||||
|
* @param {any} requestQuery
|
||||||
|
* @param {import('../../types/cache').CacheQueryOptions} options
|
||||||
|
* @param {requestResponseList} targetStorage
|
||||||
|
* @returns {requestResponseList}
|
||||||
|
*/
|
||||||
|
#queryCache (requestQuery, options, targetStorage) {
|
||||||
|
/** @type {requestResponseList} */
|
||||||
|
const resultList = []
|
||||||
|
|
||||||
|
const storage = targetStorage ?? this.#relevantRequestResponseList
|
||||||
|
|
||||||
|
for (const requestResponse of storage) {
|
||||||
|
const [cachedRequest, cachedResponse] = requestResponse
|
||||||
|
if (this.#requestMatchesCachedItem(requestQuery, cachedRequest, cachedResponse, options)) {
|
||||||
|
resultList.push(requestResponse)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return resultList
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @see https://w3c.github.io/ServiceWorker/#request-matches-cached-item-algorithm
|
||||||
|
* @param {any} requestQuery
|
||||||
|
* @param {any} request
|
||||||
|
* @param {any | null} response
|
||||||
|
* @param {import('../../types/cache').CacheQueryOptions | undefined} options
|
||||||
|
* @returns {boolean}
|
||||||
|
*/
|
||||||
|
#requestMatchesCachedItem (requestQuery, request, response = null, options) {
|
||||||
|
// if (options?.ignoreMethod === false && request.method === 'GET') {
|
||||||
|
// return false
|
||||||
|
// }
|
||||||
|
|
||||||
|
const queryURL = new URL(requestQuery.url)
|
||||||
|
|
||||||
|
const cachedURL = new URL(request.url)
|
||||||
|
|
||||||
|
if (options?.ignoreSearch) {
|
||||||
|
cachedURL.search = ''
|
||||||
|
|
||||||
|
queryURL.search = ''
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!urlEquals(queryURL, cachedURL, true)) {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
if (
|
||||||
|
response == null ||
|
||||||
|
options?.ignoreVary ||
|
||||||
|
!response.headersList.contains('vary')
|
||||||
|
) {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
const fieldValues = getFieldValues(response.headersList.get('vary'))
|
||||||
|
|
||||||
|
for (const fieldValue of fieldValues) {
|
||||||
|
if (fieldValue === '*') {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
const requestValue = request.headersList.get(fieldValue)
|
||||||
|
const queryValue = requestQuery.headersList.get(fieldValue)
|
||||||
|
|
||||||
|
// If one has the header and the other doesn't, or one has
|
||||||
|
// a different value than the other, return false
|
||||||
|
if (requestValue !== queryValue) {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Object.defineProperties(Cache.prototype, {
|
||||||
|
[Symbol.toStringTag]: {
|
||||||
|
value: 'Cache',
|
||||||
|
configurable: true
|
||||||
|
},
|
||||||
|
match: kEnumerableProperty,
|
||||||
|
matchAll: kEnumerableProperty,
|
||||||
|
add: kEnumerableProperty,
|
||||||
|
addAll: kEnumerableProperty,
|
||||||
|
put: kEnumerableProperty,
|
||||||
|
delete: kEnumerableProperty,
|
||||||
|
keys: kEnumerableProperty
|
||||||
|
})
|
||||||
|
|
||||||
|
const cacheQueryOptionConverters = [
|
||||||
|
{
|
||||||
|
key: 'ignoreSearch',
|
||||||
|
converter: webidl.converters.boolean,
|
||||||
|
defaultValue: false
|
||||||
|
},
|
||||||
|
{
|
||||||
|
key: 'ignoreMethod',
|
||||||
|
converter: webidl.converters.boolean,
|
||||||
|
defaultValue: false
|
||||||
|
},
|
||||||
|
{
|
||||||
|
key: 'ignoreVary',
|
||||||
|
converter: webidl.converters.boolean,
|
||||||
|
defaultValue: false
|
||||||
|
}
|
||||||
|
]
|
||||||
|
|
||||||
|
webidl.converters.CacheQueryOptions = webidl.dictionaryConverter(cacheQueryOptionConverters)
|
||||||
|
|
||||||
|
webidl.converters.MultiCacheQueryOptions = webidl.dictionaryConverter([
|
||||||
|
...cacheQueryOptionConverters,
|
||||||
|
{
|
||||||
|
key: 'cacheName',
|
||||||
|
converter: webidl.converters.DOMString
|
||||||
|
}
|
||||||
|
])
|
||||||
|
|
||||||
|
webidl.converters.Response = webidl.interfaceConverter(Response)
|
||||||
|
|
||||||
|
webidl.converters['sequence<RequestInfo>'] = webidl.sequenceConverter(
|
||||||
|
webidl.converters.RequestInfo
|
||||||
|
)
|
||||||
|
|
||||||
|
module.exports = {
|
||||||
|
Cache
|
||||||
|
}
|
144
node_modules/undici/lib/cache/cachestorage.js
generated
vendored
Normal file
144
node_modules/undici/lib/cache/cachestorage.js
generated
vendored
Normal file
|
@ -0,0 +1,144 @@
|
||||||
|
'use strict'
|
||||||
|
|
||||||
|
const { kConstruct } = require('./symbols')
|
||||||
|
const { Cache } = require('./cache')
|
||||||
|
const { webidl } = require('../fetch/webidl')
|
||||||
|
const { kEnumerableProperty } = require('../core/util')
|
||||||
|
|
||||||
|
class CacheStorage {
|
||||||
|
/**
|
||||||
|
* @see https://w3c.github.io/ServiceWorker/#dfn-relevant-name-to-cache-map
|
||||||
|
* @type {Map<string, import('./cache').requestResponseList}
|
||||||
|
*/
|
||||||
|
#caches = new Map()
|
||||||
|
|
||||||
|
constructor () {
|
||||||
|
if (arguments[0] !== kConstruct) {
|
||||||
|
webidl.illegalConstructor()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async match (request, options = {}) {
|
||||||
|
webidl.brandCheck(this, CacheStorage)
|
||||||
|
webidl.argumentLengthCheck(arguments, 1, { header: 'CacheStorage.match' })
|
||||||
|
|
||||||
|
request = webidl.converters.RequestInfo(request)
|
||||||
|
options = webidl.converters.MultiCacheQueryOptions(options)
|
||||||
|
|
||||||
|
// 1.
|
||||||
|
if (options.cacheName != null) {
|
||||||
|
// 1.1.1.1
|
||||||
|
if (this.#caches.has(options.cacheName)) {
|
||||||
|
// 1.1.1.1.1
|
||||||
|
const cacheList = this.#caches.get(options.cacheName)
|
||||||
|
const cache = new Cache(kConstruct, cacheList)
|
||||||
|
|
||||||
|
return await cache.match(request, options)
|
||||||
|
}
|
||||||
|
} else { // 2.
|
||||||
|
// 2.2
|
||||||
|
for (const cacheList of this.#caches.values()) {
|
||||||
|
const cache = new Cache(kConstruct, cacheList)
|
||||||
|
|
||||||
|
// 2.2.1.2
|
||||||
|
const response = await cache.match(request, options)
|
||||||
|
|
||||||
|
if (response !== undefined) {
|
||||||
|
return response
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @see https://w3c.github.io/ServiceWorker/#cache-storage-has
|
||||||
|
* @param {string} cacheName
|
||||||
|
* @returns {Promise<boolean>}
|
||||||
|
*/
|
||||||
|
async has (cacheName) {
|
||||||
|
webidl.brandCheck(this, CacheStorage)
|
||||||
|
webidl.argumentLengthCheck(arguments, 1, { header: 'CacheStorage.has' })
|
||||||
|
|
||||||
|
cacheName = webidl.converters.DOMString(cacheName)
|
||||||
|
|
||||||
|
// 2.1.1
|
||||||
|
// 2.2
|
||||||
|
return this.#caches.has(cacheName)
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @see https://w3c.github.io/ServiceWorker/#dom-cachestorage-open
|
||||||
|
* @param {string} cacheName
|
||||||
|
* @returns {Promise<Cache>}
|
||||||
|
*/
|
||||||
|
async open (cacheName) {
|
||||||
|
webidl.brandCheck(this, CacheStorage)
|
||||||
|
webidl.argumentLengthCheck(arguments, 1, { header: 'CacheStorage.open' })
|
||||||
|
|
||||||
|
cacheName = webidl.converters.DOMString(cacheName)
|
||||||
|
|
||||||
|
// 2.1
|
||||||
|
if (this.#caches.has(cacheName)) {
|
||||||
|
// await caches.open('v1') !== await caches.open('v1')
|
||||||
|
|
||||||
|
// 2.1.1
|
||||||
|
const cache = this.#caches.get(cacheName)
|
||||||
|
|
||||||
|
// 2.1.1.1
|
||||||
|
return new Cache(kConstruct, cache)
|
||||||
|
}
|
||||||
|
|
||||||
|
// 2.2
|
||||||
|
const cache = []
|
||||||
|
|
||||||
|
// 2.3
|
||||||
|
this.#caches.set(cacheName, cache)
|
||||||
|
|
||||||
|
// 2.4
|
||||||
|
return new Cache(kConstruct, cache)
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @see https://w3c.github.io/ServiceWorker/#cache-storage-delete
|
||||||
|
* @param {string} cacheName
|
||||||
|
* @returns {Promise<boolean>}
|
||||||
|
*/
|
||||||
|
async delete (cacheName) {
|
||||||
|
webidl.brandCheck(this, CacheStorage)
|
||||||
|
webidl.argumentLengthCheck(arguments, 1, { header: 'CacheStorage.delete' })
|
||||||
|
|
||||||
|
cacheName = webidl.converters.DOMString(cacheName)
|
||||||
|
|
||||||
|
return this.#caches.delete(cacheName)
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @see https://w3c.github.io/ServiceWorker/#cache-storage-keys
|
||||||
|
* @returns {string[]}
|
||||||
|
*/
|
||||||
|
async keys () {
|
||||||
|
webidl.brandCheck(this, CacheStorage)
|
||||||
|
|
||||||
|
// 2.1
|
||||||
|
const keys = this.#caches.keys()
|
||||||
|
|
||||||
|
// 2.2
|
||||||
|
return [...keys]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Object.defineProperties(CacheStorage.prototype, {
|
||||||
|
[Symbol.toStringTag]: {
|
||||||
|
value: 'CacheStorage',
|
||||||
|
configurable: true
|
||||||
|
},
|
||||||
|
match: kEnumerableProperty,
|
||||||
|
has: kEnumerableProperty,
|
||||||
|
open: kEnumerableProperty,
|
||||||
|
delete: kEnumerableProperty,
|
||||||
|
keys: kEnumerableProperty
|
||||||
|
})
|
||||||
|
|
||||||
|
module.exports = {
|
||||||
|
CacheStorage
|
||||||
|
}
|
5
node_modules/undici/lib/cache/symbols.js
generated
vendored
Normal file
5
node_modules/undici/lib/cache/symbols.js
generated
vendored
Normal file
|
@ -0,0 +1,5 @@
|
||||||
|
'use strict'
|
||||||
|
|
||||||
|
module.exports = {
|
||||||
|
kConstruct: require('../core/symbols').kConstruct
|
||||||
|
}
|
49
node_modules/undici/lib/cache/util.js
generated
vendored
Normal file
49
node_modules/undici/lib/cache/util.js
generated
vendored
Normal file
|
@ -0,0 +1,49 @@
|
||||||
|
'use strict'
|
||||||
|
|
||||||
|
const assert = require('assert')
|
||||||
|
const { URLSerializer } = require('../fetch/dataURL')
|
||||||
|
const { isValidHeaderName } = require('../fetch/util')
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @see https://url.spec.whatwg.org/#concept-url-equals
|
||||||
|
* @param {URL} A
|
||||||
|
* @param {URL} B
|
||||||
|
* @param {boolean | undefined} excludeFragment
|
||||||
|
* @returns {boolean}
|
||||||
|
*/
|
||||||
|
function urlEquals (A, B, excludeFragment = false) {
|
||||||
|
const serializedA = URLSerializer(A, excludeFragment)
|
||||||
|
|
||||||
|
const serializedB = URLSerializer(B, excludeFragment)
|
||||||
|
|
||||||
|
return serializedA === serializedB
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @see https://github.com/chromium/chromium/blob/694d20d134cb553d8d89e5500b9148012b1ba299/content/browser/cache_storage/cache_storage_cache.cc#L260-L262
|
||||||
|
* @param {string} header
|
||||||
|
*/
|
||||||
|
function fieldValues (header) {
|
||||||
|
assert(header !== null)
|
||||||
|
|
||||||
|
const values = []
|
||||||
|
|
||||||
|
for (let value of header.split(',')) {
|
||||||
|
value = value.trim()
|
||||||
|
|
||||||
|
if (!value.length) {
|
||||||
|
continue
|
||||||
|
} else if (!isValidHeaderName(value)) {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
values.push(value)
|
||||||
|
}
|
||||||
|
|
||||||
|
return values
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = {
|
||||||
|
urlEquals,
|
||||||
|
fieldValues
|
||||||
|
}
|
2283
node_modules/undici/lib/client.js
generated
vendored
Normal file
2283
node_modules/undici/lib/client.js
generated
vendored
Normal file
File diff suppressed because it is too large
Load diff
48
node_modules/undici/lib/compat/dispatcher-weakref.js
generated
vendored
Normal file
48
node_modules/undici/lib/compat/dispatcher-weakref.js
generated
vendored
Normal file
|
@ -0,0 +1,48 @@
|
||||||
|
'use strict'
|
||||||
|
|
||||||
|
/* istanbul ignore file: only for Node 12 */
|
||||||
|
|
||||||
|
const { kConnected, kSize } = require('../core/symbols')
|
||||||
|
|
||||||
|
class CompatWeakRef {
|
||||||
|
constructor (value) {
|
||||||
|
this.value = value
|
||||||
|
}
|
||||||
|
|
||||||
|
deref () {
|
||||||
|
return this.value[kConnected] === 0 && this.value[kSize] === 0
|
||||||
|
? undefined
|
||||||
|
: this.value
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
class CompatFinalizer {
|
||||||
|
constructor (finalizer) {
|
||||||
|
this.finalizer = finalizer
|
||||||
|
}
|
||||||
|
|
||||||
|
register (dispatcher, key) {
|
||||||
|
if (dispatcher.on) {
|
||||||
|
dispatcher.on('disconnect', () => {
|
||||||
|
if (dispatcher[kConnected] === 0 && dispatcher[kSize] === 0) {
|
||||||
|
this.finalizer(key)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = function () {
|
||||||
|
// FIXME: remove workaround when the Node bug is fixed
|
||||||
|
// https://github.com/nodejs/node/issues/49344#issuecomment-1741776308
|
||||||
|
if (process.env.NODE_V8_COVERAGE) {
|
||||||
|
return {
|
||||||
|
WeakRef: CompatWeakRef,
|
||||||
|
FinalizationRegistry: CompatFinalizer
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return {
|
||||||
|
WeakRef: global.WeakRef || CompatWeakRef,
|
||||||
|
FinalizationRegistry: global.FinalizationRegistry || CompatFinalizer
|
||||||
|
}
|
||||||
|
}
|
12
node_modules/undici/lib/cookies/constants.js
generated
vendored
Normal file
12
node_modules/undici/lib/cookies/constants.js
generated
vendored
Normal file
|
@ -0,0 +1,12 @@
|
||||||
|
'use strict'
|
||||||
|
|
||||||
|
// https://wicg.github.io/cookie-store/#cookie-maximum-attribute-value-size
|
||||||
|
const maxAttributeValueSize = 1024
|
||||||
|
|
||||||
|
// https://wicg.github.io/cookie-store/#cookie-maximum-name-value-pair-size
|
||||||
|
const maxNameValuePairSize = 4096
|
||||||
|
|
||||||
|
module.exports = {
|
||||||
|
maxAttributeValueSize,
|
||||||
|
maxNameValuePairSize
|
||||||
|
}
|
184
node_modules/undici/lib/cookies/index.js
generated
vendored
Normal file
184
node_modules/undici/lib/cookies/index.js
generated
vendored
Normal file
|
@ -0,0 +1,184 @@
|
||||||
|
'use strict'
|
||||||
|
|
||||||
|
const { parseSetCookie } = require('./parse')
|
||||||
|
const { stringify, getHeadersList } = require('./util')
|
||||||
|
const { webidl } = require('../fetch/webidl')
|
||||||
|
const { Headers } = require('../fetch/headers')
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @typedef {Object} Cookie
|
||||||
|
* @property {string} name
|
||||||
|
* @property {string} value
|
||||||
|
* @property {Date|number|undefined} expires
|
||||||
|
* @property {number|undefined} maxAge
|
||||||
|
* @property {string|undefined} domain
|
||||||
|
* @property {string|undefined} path
|
||||||
|
* @property {boolean|undefined} secure
|
||||||
|
* @property {boolean|undefined} httpOnly
|
||||||
|
* @property {'Strict'|'Lax'|'None'} sameSite
|
||||||
|
* @property {string[]} unparsed
|
||||||
|
*/
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @param {Headers} headers
|
||||||
|
* @returns {Record<string, string>}
|
||||||
|
*/
|
||||||
|
function getCookies (headers) {
|
||||||
|
webidl.argumentLengthCheck(arguments, 1, { header: 'getCookies' })
|
||||||
|
|
||||||
|
webidl.brandCheck(headers, Headers, { strict: false })
|
||||||
|
|
||||||
|
const cookie = headers.get('cookie')
|
||||||
|
const out = {}
|
||||||
|
|
||||||
|
if (!cookie) {
|
||||||
|
return out
|
||||||
|
}
|
||||||
|
|
||||||
|
for (const piece of cookie.split(';')) {
|
||||||
|
const [name, ...value] = piece.split('=')
|
||||||
|
|
||||||
|
out[name.trim()] = value.join('=')
|
||||||
|
}
|
||||||
|
|
||||||
|
return out
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @param {Headers} headers
|
||||||
|
* @param {string} name
|
||||||
|
* @param {{ path?: string, domain?: string }|undefined} attributes
|
||||||
|
* @returns {void}
|
||||||
|
*/
|
||||||
|
function deleteCookie (headers, name, attributes) {
|
||||||
|
webidl.argumentLengthCheck(arguments, 2, { header: 'deleteCookie' })
|
||||||
|
|
||||||
|
webidl.brandCheck(headers, Headers, { strict: false })
|
||||||
|
|
||||||
|
name = webidl.converters.DOMString(name)
|
||||||
|
attributes = webidl.converters.DeleteCookieAttributes(attributes)
|
||||||
|
|
||||||
|
// Matches behavior of
|
||||||
|
// https://github.com/denoland/deno_std/blob/63827b16330b82489a04614027c33b7904e08be5/http/cookie.ts#L278
|
||||||
|
setCookie(headers, {
|
||||||
|
name,
|
||||||
|
value: '',
|
||||||
|
expires: new Date(0),
|
||||||
|
...attributes
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @param {Headers} headers
|
||||||
|
* @returns {Cookie[]}
|
||||||
|
*/
|
||||||
|
function getSetCookies (headers) {
|
||||||
|
webidl.argumentLengthCheck(arguments, 1, { header: 'getSetCookies' })
|
||||||
|
|
||||||
|
webidl.brandCheck(headers, Headers, { strict: false })
|
||||||
|
|
||||||
|
const cookies = getHeadersList(headers).cookies
|
||||||
|
|
||||||
|
if (!cookies) {
|
||||||
|
return []
|
||||||
|
}
|
||||||
|
|
||||||
|
// In older versions of undici, cookies is a list of name:value.
|
||||||
|
return cookies.map((pair) => parseSetCookie(Array.isArray(pair) ? pair[1] : pair))
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @param {Headers} headers
|
||||||
|
* @param {Cookie} cookie
|
||||||
|
* @returns {void}
|
||||||
|
*/
|
||||||
|
function setCookie (headers, cookie) {
|
||||||
|
webidl.argumentLengthCheck(arguments, 2, { header: 'setCookie' })
|
||||||
|
|
||||||
|
webidl.brandCheck(headers, Headers, { strict: false })
|
||||||
|
|
||||||
|
cookie = webidl.converters.Cookie(cookie)
|
||||||
|
|
||||||
|
const str = stringify(cookie)
|
||||||
|
|
||||||
|
if (str) {
|
||||||
|
headers.append('Set-Cookie', stringify(cookie))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
webidl.converters.DeleteCookieAttributes = webidl.dictionaryConverter([
|
||||||
|
{
|
||||||
|
converter: webidl.nullableConverter(webidl.converters.DOMString),
|
||||||
|
key: 'path',
|
||||||
|
defaultValue: null
|
||||||
|
},
|
||||||
|
{
|
||||||
|
converter: webidl.nullableConverter(webidl.converters.DOMString),
|
||||||
|
key: 'domain',
|
||||||
|
defaultValue: null
|
||||||
|
}
|
||||||
|
])
|
||||||
|
|
||||||
|
webidl.converters.Cookie = webidl.dictionaryConverter([
|
||||||
|
{
|
||||||
|
converter: webidl.converters.DOMString,
|
||||||
|
key: 'name'
|
||||||
|
},
|
||||||
|
{
|
||||||
|
converter: webidl.converters.DOMString,
|
||||||
|
key: 'value'
|
||||||
|
},
|
||||||
|
{
|
||||||
|
converter: webidl.nullableConverter((value) => {
|
||||||
|
if (typeof value === 'number') {
|
||||||
|
return webidl.converters['unsigned long long'](value)
|
||||||
|
}
|
||||||
|
|
||||||
|
return new Date(value)
|
||||||
|
}),
|
||||||
|
key: 'expires',
|
||||||
|
defaultValue: null
|
||||||
|
},
|
||||||
|
{
|
||||||
|
converter: webidl.nullableConverter(webidl.converters['long long']),
|
||||||
|
key: 'maxAge',
|
||||||
|
defaultValue: null
|
||||||
|
},
|
||||||
|
{
|
||||||
|
converter: webidl.nullableConverter(webidl.converters.DOMString),
|
||||||
|
key: 'domain',
|
||||||
|
defaultValue: null
|
||||||
|
},
|
||||||
|
{
|
||||||
|
converter: webidl.nullableConverter(webidl.converters.DOMString),
|
||||||
|
key: 'path',
|
||||||
|
defaultValue: null
|
||||||
|
},
|
||||||
|
{
|
||||||
|
converter: webidl.nullableConverter(webidl.converters.boolean),
|
||||||
|
key: 'secure',
|
||||||
|
defaultValue: null
|
||||||
|
},
|
||||||
|
{
|
||||||
|
converter: webidl.nullableConverter(webidl.converters.boolean),
|
||||||
|
key: 'httpOnly',
|
||||||
|
defaultValue: null
|
||||||
|
},
|
||||||
|
{
|
||||||
|
converter: webidl.converters.USVString,
|
||||||
|
key: 'sameSite',
|
||||||
|
allowedValues: ['Strict', 'Lax', 'None']
|
||||||
|
},
|
||||||
|
{
|
||||||
|
converter: webidl.sequenceConverter(webidl.converters.DOMString),
|
||||||
|
key: 'unparsed',
|
||||||
|
defaultValue: []
|
||||||
|
}
|
||||||
|
])
|
||||||
|
|
||||||
|
module.exports = {
|
||||||
|
getCookies,
|
||||||
|
deleteCookie,
|
||||||
|
getSetCookies,
|
||||||
|
setCookie
|
||||||
|
}
|
317
node_modules/undici/lib/cookies/parse.js
generated
vendored
Normal file
317
node_modules/undici/lib/cookies/parse.js
generated
vendored
Normal file
|
@ -0,0 +1,317 @@
|
||||||
|
'use strict'
|
||||||
|
|
||||||
|
const { maxNameValuePairSize, maxAttributeValueSize } = require('./constants')
|
||||||
|
const { isCTLExcludingHtab } = require('./util')
|
||||||
|
const { collectASequenceOfCodePointsFast } = require('../fetch/dataURL')
|
||||||
|
const assert = require('assert')
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @description Parses the field-value attributes of a set-cookie header string.
|
||||||
|
* @see https://datatracker.ietf.org/doc/html/draft-ietf-httpbis-rfc6265bis#section-5.4
|
||||||
|
* @param {string} header
|
||||||
|
* @returns if the header is invalid, null will be returned
|
||||||
|
*/
|
||||||
|
function parseSetCookie (header) {
|
||||||
|
// 1. If the set-cookie-string contains a %x00-08 / %x0A-1F / %x7F
|
||||||
|
// character (CTL characters excluding HTAB): Abort these steps and
|
||||||
|
// ignore the set-cookie-string entirely.
|
||||||
|
if (isCTLExcludingHtab(header)) {
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
|
||||||
|
let nameValuePair = ''
|
||||||
|
let unparsedAttributes = ''
|
||||||
|
let name = ''
|
||||||
|
let value = ''
|
||||||
|
|
||||||
|
// 2. If the set-cookie-string contains a %x3B (";") character:
|
||||||
|
if (header.includes(';')) {
|
||||||
|
// 1. The name-value-pair string consists of the characters up to,
|
||||||
|
// but not including, the first %x3B (";"), and the unparsed-
|
||||||
|
// attributes consist of the remainder of the set-cookie-string
|
||||||
|
// (including the %x3B (";") in question).
|
||||||
|
const position = { position: 0 }
|
||||||
|
|
||||||
|
nameValuePair = collectASequenceOfCodePointsFast(';', header, position)
|
||||||
|
unparsedAttributes = header.slice(position.position)
|
||||||
|
} else {
|
||||||
|
// Otherwise:
|
||||||
|
|
||||||
|
// 1. The name-value-pair string consists of all the characters
|
||||||
|
// contained in the set-cookie-string, and the unparsed-
|
||||||
|
// attributes is the empty string.
|
||||||
|
nameValuePair = header
|
||||||
|
}
|
||||||
|
|
||||||
|
// 3. If the name-value-pair string lacks a %x3D ("=") character, then
|
||||||
|
// the name string is empty, and the value string is the value of
|
||||||
|
// name-value-pair.
|
||||||
|
if (!nameValuePair.includes('=')) {
|
||||||
|
value = nameValuePair
|
||||||
|
} else {
|
||||||
|
// Otherwise, the name string consists of the characters up to, but
|
||||||
|
// not including, the first %x3D ("=") character, and the (possibly
|
||||||
|
// empty) value string consists of the characters after the first
|
||||||
|
// %x3D ("=") character.
|
||||||
|
const position = { position: 0 }
|
||||||
|
name = collectASequenceOfCodePointsFast(
|
||||||
|
'=',
|
||||||
|
nameValuePair,
|
||||||
|
position
|
||||||
|
)
|
||||||
|
value = nameValuePair.slice(position.position + 1)
|
||||||
|
}
|
||||||
|
|
||||||
|
// 4. Remove any leading or trailing WSP characters from the name
|
||||||
|
// string and the value string.
|
||||||
|
name = name.trim()
|
||||||
|
value = value.trim()
|
||||||
|
|
||||||
|
// 5. If the sum of the lengths of the name string and the value string
|
||||||
|
// is more than 4096 octets, abort these steps and ignore the set-
|
||||||
|
// cookie-string entirely.
|
||||||
|
if (name.length + value.length > maxNameValuePairSize) {
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
|
||||||
|
// 6. The cookie-name is the name string, and the cookie-value is the
|
||||||
|
// value string.
|
||||||
|
return {
|
||||||
|
name, value, ...parseUnparsedAttributes(unparsedAttributes)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Parses the remaining attributes of a set-cookie header
|
||||||
|
* @see https://datatracker.ietf.org/doc/html/draft-ietf-httpbis-rfc6265bis#section-5.4
|
||||||
|
* @param {string} unparsedAttributes
|
||||||
|
* @param {[Object.<string, unknown>]={}} cookieAttributeList
|
||||||
|
*/
|
||||||
|
function parseUnparsedAttributes (unparsedAttributes, cookieAttributeList = {}) {
|
||||||
|
// 1. If the unparsed-attributes string is empty, skip the rest of
|
||||||
|
// these steps.
|
||||||
|
if (unparsedAttributes.length === 0) {
|
||||||
|
return cookieAttributeList
|
||||||
|
}
|
||||||
|
|
||||||
|
// 2. Discard the first character of the unparsed-attributes (which
|
||||||
|
// will be a %x3B (";") character).
|
||||||
|
assert(unparsedAttributes[0] === ';')
|
||||||
|
unparsedAttributes = unparsedAttributes.slice(1)
|
||||||
|
|
||||||
|
let cookieAv = ''
|
||||||
|
|
||||||
|
// 3. If the remaining unparsed-attributes contains a %x3B (";")
|
||||||
|
// character:
|
||||||
|
if (unparsedAttributes.includes(';')) {
|
||||||
|
// 1. Consume the characters of the unparsed-attributes up to, but
|
||||||
|
// not including, the first %x3B (";") character.
|
||||||
|
cookieAv = collectASequenceOfCodePointsFast(
|
||||||
|
';',
|
||||||
|
unparsedAttributes,
|
||||||
|
{ position: 0 }
|
||||||
|
)
|
||||||
|
unparsedAttributes = unparsedAttributes.slice(cookieAv.length)
|
||||||
|
} else {
|
||||||
|
// Otherwise:
|
||||||
|
|
||||||
|
// 1. Consume the remainder of the unparsed-attributes.
|
||||||
|
cookieAv = unparsedAttributes
|
||||||
|
unparsedAttributes = ''
|
||||||
|
}
|
||||||
|
|
||||||
|
// Let the cookie-av string be the characters consumed in this step.
|
||||||
|
|
||||||
|
let attributeName = ''
|
||||||
|
let attributeValue = ''
|
||||||
|
|
||||||
|
// 4. If the cookie-av string contains a %x3D ("=") character:
|
||||||
|
if (cookieAv.includes('=')) {
|
||||||
|
// 1. The (possibly empty) attribute-name string consists of the
|
||||||
|
// characters up to, but not including, the first %x3D ("=")
|
||||||
|
// character, and the (possibly empty) attribute-value string
|
||||||
|
// consists of the characters after the first %x3D ("=")
|
||||||
|
// character.
|
||||||
|
const position = { position: 0 }
|
||||||
|
|
||||||
|
attributeName = collectASequenceOfCodePointsFast(
|
||||||
|
'=',
|
||||||
|
cookieAv,
|
||||||
|
position
|
||||||
|
)
|
||||||
|
attributeValue = cookieAv.slice(position.position + 1)
|
||||||
|
} else {
|
||||||
|
// Otherwise:
|
||||||
|
|
||||||
|
// 1. The attribute-name string consists of the entire cookie-av
|
||||||
|
// string, and the attribute-value string is empty.
|
||||||
|
attributeName = cookieAv
|
||||||
|
}
|
||||||
|
|
||||||
|
// 5. Remove any leading or trailing WSP characters from the attribute-
|
||||||
|
// name string and the attribute-value string.
|
||||||
|
attributeName = attributeName.trim()
|
||||||
|
attributeValue = attributeValue.trim()
|
||||||
|
|
||||||
|
// 6. If the attribute-value is longer than 1024 octets, ignore the
|
||||||
|
// cookie-av string and return to Step 1 of this algorithm.
|
||||||
|
if (attributeValue.length > maxAttributeValueSize) {
|
||||||
|
return parseUnparsedAttributes(unparsedAttributes, cookieAttributeList)
|
||||||
|
}
|
||||||
|
|
||||||
|
// 7. Process the attribute-name and attribute-value according to the
|
||||||
|
// requirements in the following subsections. (Notice that
|
||||||
|
// attributes with unrecognized attribute-names are ignored.)
|
||||||
|
const attributeNameLowercase = attributeName.toLowerCase()
|
||||||
|
|
||||||
|
// https://datatracker.ietf.org/doc/html/draft-ietf-httpbis-rfc6265bis#section-5.4.1
|
||||||
|
// If the attribute-name case-insensitively matches the string
|
||||||
|
// "Expires", the user agent MUST process the cookie-av as follows.
|
||||||
|
if (attributeNameLowercase === 'expires') {
|
||||||
|
// 1. Let the expiry-time be the result of parsing the attribute-value
|
||||||
|
// as cookie-date (see Section 5.1.1).
|
||||||
|
const expiryTime = new Date(attributeValue)
|
||||||
|
|
||||||
|
// 2. If the attribute-value failed to parse as a cookie date, ignore
|
||||||
|
// the cookie-av.
|
||||||
|
|
||||||
|
cookieAttributeList.expires = expiryTime
|
||||||
|
} else if (attributeNameLowercase === 'max-age') {
|
||||||
|
// https://datatracker.ietf.org/doc/html/draft-ietf-httpbis-rfc6265bis#section-5.4.2
|
||||||
|
// If the attribute-name case-insensitively matches the string "Max-
|
||||||
|
// Age", the user agent MUST process the cookie-av as follows.
|
||||||
|
|
||||||
|
// 1. If the first character of the attribute-value is not a DIGIT or a
|
||||||
|
// "-" character, ignore the cookie-av.
|
||||||
|
const charCode = attributeValue.charCodeAt(0)
|
||||||
|
|
||||||
|
if ((charCode < 48 || charCode > 57) && attributeValue[0] !== '-') {
|
||||||
|
return parseUnparsedAttributes(unparsedAttributes, cookieAttributeList)
|
||||||
|
}
|
||||||
|
|
||||||
|
// 2. If the remainder of attribute-value contains a non-DIGIT
|
||||||
|
// character, ignore the cookie-av.
|
||||||
|
if (!/^\d+$/.test(attributeValue)) {
|
||||||
|
return parseUnparsedAttributes(unparsedAttributes, cookieAttributeList)
|
||||||
|
}
|
||||||
|
|
||||||
|
// 3. Let delta-seconds be the attribute-value converted to an integer.
|
||||||
|
const deltaSeconds = Number(attributeValue)
|
||||||
|
|
||||||
|
// 4. Let cookie-age-limit be the maximum age of the cookie (which
|
||||||
|
// SHOULD be 400 days or less, see Section 4.1.2.2).
|
||||||
|
|
||||||
|
// 5. Set delta-seconds to the smaller of its present value and cookie-
|
||||||
|
// age-limit.
|
||||||
|
// deltaSeconds = Math.min(deltaSeconds * 1000, maxExpiresMs)
|
||||||
|
|
||||||
|
// 6. If delta-seconds is less than or equal to zero (0), let expiry-
|
||||||
|
// time be the earliest representable date and time. Otherwise, let
|
||||||
|
// the expiry-time be the current date and time plus delta-seconds
|
||||||
|
// seconds.
|
||||||
|
// const expiryTime = deltaSeconds <= 0 ? Date.now() : Date.now() + deltaSeconds
|
||||||
|
|
||||||
|
// 7. Append an attribute to the cookie-attribute-list with an
|
||||||
|
// attribute-name of Max-Age and an attribute-value of expiry-time.
|
||||||
|
cookieAttributeList.maxAge = deltaSeconds
|
||||||
|
} else if (attributeNameLowercase === 'domain') {
|
||||||
|
// https://datatracker.ietf.org/doc/html/draft-ietf-httpbis-rfc6265bis#section-5.4.3
|
||||||
|
// If the attribute-name case-insensitively matches the string "Domain",
|
||||||
|
// the user agent MUST process the cookie-av as follows.
|
||||||
|
|
||||||
|
// 1. Let cookie-domain be the attribute-value.
|
||||||
|
let cookieDomain = attributeValue
|
||||||
|
|
||||||
|
// 2. If cookie-domain starts with %x2E ("."), let cookie-domain be
|
||||||
|
// cookie-domain without its leading %x2E (".").
|
||||||
|
if (cookieDomain[0] === '.') {
|
||||||
|
cookieDomain = cookieDomain.slice(1)
|
||||||
|
}
|
||||||
|
|
||||||
|
// 3. Convert the cookie-domain to lower case.
|
||||||
|
cookieDomain = cookieDomain.toLowerCase()
|
||||||
|
|
||||||
|
// 4. Append an attribute to the cookie-attribute-list with an
|
||||||
|
// attribute-name of Domain and an attribute-value of cookie-domain.
|
||||||
|
cookieAttributeList.domain = cookieDomain
|
||||||
|
} else if (attributeNameLowercase === 'path') {
|
||||||
|
// https://datatracker.ietf.org/doc/html/draft-ietf-httpbis-rfc6265bis#section-5.4.4
|
||||||
|
// If the attribute-name case-insensitively matches the string "Path",
|
||||||
|
// the user agent MUST process the cookie-av as follows.
|
||||||
|
|
||||||
|
// 1. If the attribute-value is empty or if the first character of the
|
||||||
|
// attribute-value is not %x2F ("/"):
|
||||||
|
let cookiePath = ''
|
||||||
|
if (attributeValue.length === 0 || attributeValue[0] !== '/') {
|
||||||
|
// 1. Let cookie-path be the default-path.
|
||||||
|
cookiePath = '/'
|
||||||
|
} else {
|
||||||
|
// Otherwise:
|
||||||
|
|
||||||
|
// 1. Let cookie-path be the attribute-value.
|
||||||
|
cookiePath = attributeValue
|
||||||
|
}
|
||||||
|
|
||||||
|
// 2. Append an attribute to the cookie-attribute-list with an
|
||||||
|
// attribute-name of Path and an attribute-value of cookie-path.
|
||||||
|
cookieAttributeList.path = cookiePath
|
||||||
|
} else if (attributeNameLowercase === 'secure') {
|
||||||
|
// https://datatracker.ietf.org/doc/html/draft-ietf-httpbis-rfc6265bis#section-5.4.5
|
||||||
|
// If the attribute-name case-insensitively matches the string "Secure",
|
||||||
|
// the user agent MUST append an attribute to the cookie-attribute-list
|
||||||
|
// with an attribute-name of Secure and an empty attribute-value.
|
||||||
|
|
||||||
|
cookieAttributeList.secure = true
|
||||||
|
} else if (attributeNameLowercase === 'httponly') {
|
||||||
|
// https://datatracker.ietf.org/doc/html/draft-ietf-httpbis-rfc6265bis#section-5.4.6
|
||||||
|
// If the attribute-name case-insensitively matches the string
|
||||||
|
// "HttpOnly", the user agent MUST append an attribute to the cookie-
|
||||||
|
// attribute-list with an attribute-name of HttpOnly and an empty
|
||||||
|
// attribute-value.
|
||||||
|
|
||||||
|
cookieAttributeList.httpOnly = true
|
||||||
|
} else if (attributeNameLowercase === 'samesite') {
|
||||||
|
// https://datatracker.ietf.org/doc/html/draft-ietf-httpbis-rfc6265bis#section-5.4.7
|
||||||
|
// If the attribute-name case-insensitively matches the string
|
||||||
|
// "SameSite", the user agent MUST process the cookie-av as follows:
|
||||||
|
|
||||||
|
// 1. Let enforcement be "Default".
|
||||||
|
let enforcement = 'Default'
|
||||||
|
|
||||||
|
const attributeValueLowercase = attributeValue.toLowerCase()
|
||||||
|
// 2. If cookie-av's attribute-value is a case-insensitive match for
|
||||||
|
// "None", set enforcement to "None".
|
||||||
|
if (attributeValueLowercase.includes('none')) {
|
||||||
|
enforcement = 'None'
|
||||||
|
}
|
||||||
|
|
||||||
|
// 3. If cookie-av's attribute-value is a case-insensitive match for
|
||||||
|
// "Strict", set enforcement to "Strict".
|
||||||
|
if (attributeValueLowercase.includes('strict')) {
|
||||||
|
enforcement = 'Strict'
|
||||||
|
}
|
||||||
|
|
||||||
|
// 4. If cookie-av's attribute-value is a case-insensitive match for
|
||||||
|
// "Lax", set enforcement to "Lax".
|
||||||
|
if (attributeValueLowercase.includes('lax')) {
|
||||||
|
enforcement = 'Lax'
|
||||||
|
}
|
||||||
|
|
||||||
|
// 5. Append an attribute to the cookie-attribute-list with an
|
||||||
|
// attribute-name of "SameSite" and an attribute-value of
|
||||||
|
// enforcement.
|
||||||
|
cookieAttributeList.sameSite = enforcement
|
||||||
|
} else {
|
||||||
|
cookieAttributeList.unparsed ??= []
|
||||||
|
|
||||||
|
cookieAttributeList.unparsed.push(`${attributeName}=${attributeValue}`)
|
||||||
|
}
|
||||||
|
|
||||||
|
// 8. Return to Step 1 of this algorithm.
|
||||||
|
return parseUnparsedAttributes(unparsedAttributes, cookieAttributeList)
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = {
|
||||||
|
parseSetCookie,
|
||||||
|
parseUnparsedAttributes
|
||||||
|
}
|
291
node_modules/undici/lib/cookies/util.js
generated
vendored
Normal file
291
node_modules/undici/lib/cookies/util.js
generated
vendored
Normal file
|
@ -0,0 +1,291 @@
|
||||||
|
'use strict'
|
||||||
|
|
||||||
|
const assert = require('assert')
|
||||||
|
const { kHeadersList } = require('../core/symbols')
|
||||||
|
|
||||||
|
function isCTLExcludingHtab (value) {
|
||||||
|
if (value.length === 0) {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
for (const char of value) {
|
||||||
|
const code = char.charCodeAt(0)
|
||||||
|
|
||||||
|
if (
|
||||||
|
(code >= 0x00 || code <= 0x08) ||
|
||||||
|
(code >= 0x0A || code <= 0x1F) ||
|
||||||
|
code === 0x7F
|
||||||
|
) {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
CHAR = <any US-ASCII character (octets 0 - 127)>
|
||||||
|
token = 1*<any CHAR except CTLs or separators>
|
||||||
|
separators = "(" | ")" | "<" | ">" | "@"
|
||||||
|
| "," | ";" | ":" | "\" | <">
|
||||||
|
| "/" | "[" | "]" | "?" | "="
|
||||||
|
| "{" | "}" | SP | HT
|
||||||
|
* @param {string} name
|
||||||
|
*/
|
||||||
|
function validateCookieName (name) {
|
||||||
|
for (const char of name) {
|
||||||
|
const code = char.charCodeAt(0)
|
||||||
|
|
||||||
|
if (
|
||||||
|
(code <= 0x20 || code > 0x7F) ||
|
||||||
|
char === '(' ||
|
||||||
|
char === ')' ||
|
||||||
|
char === '>' ||
|
||||||
|
char === '<' ||
|
||||||
|
char === '@' ||
|
||||||
|
char === ',' ||
|
||||||
|
char === ';' ||
|
||||||
|
char === ':' ||
|
||||||
|
char === '\\' ||
|
||||||
|
char === '"' ||
|
||||||
|
char === '/' ||
|
||||||
|
char === '[' ||
|
||||||
|
char === ']' ||
|
||||||
|
char === '?' ||
|
||||||
|
char === '=' ||
|
||||||
|
char === '{' ||
|
||||||
|
char === '}'
|
||||||
|
) {
|
||||||
|
throw new Error('Invalid cookie name')
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
cookie-value = *cookie-octet / ( DQUOTE *cookie-octet DQUOTE )
|
||||||
|
cookie-octet = %x21 / %x23-2B / %x2D-3A / %x3C-5B / %x5D-7E
|
||||||
|
; US-ASCII characters excluding CTLs,
|
||||||
|
; whitespace DQUOTE, comma, semicolon,
|
||||||
|
; and backslash
|
||||||
|
* @param {string} value
|
||||||
|
*/
|
||||||
|
function validateCookieValue (value) {
|
||||||
|
for (const char of value) {
|
||||||
|
const code = char.charCodeAt(0)
|
||||||
|
|
||||||
|
if (
|
||||||
|
code < 0x21 || // exclude CTLs (0-31)
|
||||||
|
code === 0x22 ||
|
||||||
|
code === 0x2C ||
|
||||||
|
code === 0x3B ||
|
||||||
|
code === 0x5C ||
|
||||||
|
code > 0x7E // non-ascii
|
||||||
|
) {
|
||||||
|
throw new Error('Invalid header value')
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* path-value = <any CHAR except CTLs or ";">
|
||||||
|
* @param {string} path
|
||||||
|
*/
|
||||||
|
function validateCookiePath (path) {
|
||||||
|
for (const char of path) {
|
||||||
|
const code = char.charCodeAt(0)
|
||||||
|
|
||||||
|
if (code < 0x21 || char === ';') {
|
||||||
|
throw new Error('Invalid cookie path')
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* I have no idea why these values aren't allowed to be honest,
|
||||||
|
* but Deno tests these. - Khafra
|
||||||
|
* @param {string} domain
|
||||||
|
*/
|
||||||
|
function validateCookieDomain (domain) {
|
||||||
|
if (
|
||||||
|
domain.startsWith('-') ||
|
||||||
|
domain.endsWith('.') ||
|
||||||
|
domain.endsWith('-')
|
||||||
|
) {
|
||||||
|
throw new Error('Invalid cookie domain')
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @see https://www.rfc-editor.org/rfc/rfc7231#section-7.1.1.1
|
||||||
|
* @param {number|Date} date
|
||||||
|
IMF-fixdate = day-name "," SP date1 SP time-of-day SP GMT
|
||||||
|
; fixed length/zone/capitalization subset of the format
|
||||||
|
; see Section 3.3 of [RFC5322]
|
||||||
|
|
||||||
|
day-name = %x4D.6F.6E ; "Mon", case-sensitive
|
||||||
|
/ %x54.75.65 ; "Tue", case-sensitive
|
||||||
|
/ %x57.65.64 ; "Wed", case-sensitive
|
||||||
|
/ %x54.68.75 ; "Thu", case-sensitive
|
||||||
|
/ %x46.72.69 ; "Fri", case-sensitive
|
||||||
|
/ %x53.61.74 ; "Sat", case-sensitive
|
||||||
|
/ %x53.75.6E ; "Sun", case-sensitive
|
||||||
|
date1 = day SP month SP year
|
||||||
|
; e.g., 02 Jun 1982
|
||||||
|
|
||||||
|
day = 2DIGIT
|
||||||
|
month = %x4A.61.6E ; "Jan", case-sensitive
|
||||||
|
/ %x46.65.62 ; "Feb", case-sensitive
|
||||||
|
/ %x4D.61.72 ; "Mar", case-sensitive
|
||||||
|
/ %x41.70.72 ; "Apr", case-sensitive
|
||||||
|
/ %x4D.61.79 ; "May", case-sensitive
|
||||||
|
/ %x4A.75.6E ; "Jun", case-sensitive
|
||||||
|
/ %x4A.75.6C ; "Jul", case-sensitive
|
||||||
|
/ %x41.75.67 ; "Aug", case-sensitive
|
||||||
|
/ %x53.65.70 ; "Sep", case-sensitive
|
||||||
|
/ %x4F.63.74 ; "Oct", case-sensitive
|
||||||
|
/ %x4E.6F.76 ; "Nov", case-sensitive
|
||||||
|
/ %x44.65.63 ; "Dec", case-sensitive
|
||||||
|
year = 4DIGIT
|
||||||
|
|
||||||
|
GMT = %x47.4D.54 ; "GMT", case-sensitive
|
||||||
|
|
||||||
|
time-of-day = hour ":" minute ":" second
|
||||||
|
; 00:00:00 - 23:59:60 (leap second)
|
||||||
|
|
||||||
|
hour = 2DIGIT
|
||||||
|
minute = 2DIGIT
|
||||||
|
second = 2DIGIT
|
||||||
|
*/
|
||||||
|
function toIMFDate (date) {
|
||||||
|
if (typeof date === 'number') {
|
||||||
|
date = new Date(date)
|
||||||
|
}
|
||||||
|
|
||||||
|
const days = [
|
||||||
|
'Sun', 'Mon', 'Tue', 'Wed',
|
||||||
|
'Thu', 'Fri', 'Sat'
|
||||||
|
]
|
||||||
|
|
||||||
|
const months = [
|
||||||
|
'Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun',
|
||||||
|
'Jul', 'Aug', 'Sep', 'Oct', 'Nov', 'Dec'
|
||||||
|
]
|
||||||
|
|
||||||
|
const dayName = days[date.getUTCDay()]
|
||||||
|
const day = date.getUTCDate().toString().padStart(2, '0')
|
||||||
|
const month = months[date.getUTCMonth()]
|
||||||
|
const year = date.getUTCFullYear()
|
||||||
|
const hour = date.getUTCHours().toString().padStart(2, '0')
|
||||||
|
const minute = date.getUTCMinutes().toString().padStart(2, '0')
|
||||||
|
const second = date.getUTCSeconds().toString().padStart(2, '0')
|
||||||
|
|
||||||
|
return `${dayName}, ${day} ${month} ${year} ${hour}:${minute}:${second} GMT`
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
max-age-av = "Max-Age=" non-zero-digit *DIGIT
|
||||||
|
; In practice, both expires-av and max-age-av
|
||||||
|
; are limited to dates representable by the
|
||||||
|
; user agent.
|
||||||
|
* @param {number} maxAge
|
||||||
|
*/
|
||||||
|
function validateCookieMaxAge (maxAge) {
|
||||||
|
if (maxAge < 0) {
|
||||||
|
throw new Error('Invalid cookie max-age')
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @see https://www.rfc-editor.org/rfc/rfc6265#section-4.1.1
|
||||||
|
* @param {import('./index').Cookie} cookie
|
||||||
|
*/
|
||||||
|
function stringify (cookie) {
|
||||||
|
if (cookie.name.length === 0) {
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
|
||||||
|
validateCookieName(cookie.name)
|
||||||
|
validateCookieValue(cookie.value)
|
||||||
|
|
||||||
|
const out = [`${cookie.name}=${cookie.value}`]
|
||||||
|
|
||||||
|
// https://datatracker.ietf.org/doc/html/draft-ietf-httpbis-cookie-prefixes-00#section-3.1
|
||||||
|
// https://datatracker.ietf.org/doc/html/draft-ietf-httpbis-cookie-prefixes-00#section-3.2
|
||||||
|
if (cookie.name.startsWith('__Secure-')) {
|
||||||
|
cookie.secure = true
|
||||||
|
}
|
||||||
|
|
||||||
|
if (cookie.name.startsWith('__Host-')) {
|
||||||
|
cookie.secure = true
|
||||||
|
cookie.domain = null
|
||||||
|
cookie.path = '/'
|
||||||
|
}
|
||||||
|
|
||||||
|
if (cookie.secure) {
|
||||||
|
out.push('Secure')
|
||||||
|
}
|
||||||
|
|
||||||
|
if (cookie.httpOnly) {
|
||||||
|
out.push('HttpOnly')
|
||||||
|
}
|
||||||
|
|
||||||
|
if (typeof cookie.maxAge === 'number') {
|
||||||
|
validateCookieMaxAge(cookie.maxAge)
|
||||||
|
out.push(`Max-Age=${cookie.maxAge}`)
|
||||||
|
}
|
||||||
|
|
||||||
|
if (cookie.domain) {
|
||||||
|
validateCookieDomain(cookie.domain)
|
||||||
|
out.push(`Domain=${cookie.domain}`)
|
||||||
|
}
|
||||||
|
|
||||||
|
if (cookie.path) {
|
||||||
|
validateCookiePath(cookie.path)
|
||||||
|
out.push(`Path=${cookie.path}`)
|
||||||
|
}
|
||||||
|
|
||||||
|
if (cookie.expires && cookie.expires.toString() !== 'Invalid Date') {
|
||||||
|
out.push(`Expires=${toIMFDate(cookie.expires)}`)
|
||||||
|
}
|
||||||
|
|
||||||
|
if (cookie.sameSite) {
|
||||||
|
out.push(`SameSite=${cookie.sameSite}`)
|
||||||
|
}
|
||||||
|
|
||||||
|
for (const part of cookie.unparsed) {
|
||||||
|
if (!part.includes('=')) {
|
||||||
|
throw new Error('Invalid unparsed')
|
||||||
|
}
|
||||||
|
|
||||||
|
const [key, ...value] = part.split('=')
|
||||||
|
|
||||||
|
out.push(`${key.trim()}=${value.join('=')}`)
|
||||||
|
}
|
||||||
|
|
||||||
|
return out.join('; ')
|
||||||
|
}
|
||||||
|
|
||||||
|
let kHeadersListNode
|
||||||
|
|
||||||
|
function getHeadersList (headers) {
|
||||||
|
if (headers[kHeadersList]) {
|
||||||
|
return headers[kHeadersList]
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!kHeadersListNode) {
|
||||||
|
kHeadersListNode = Object.getOwnPropertySymbols(headers).find(
|
||||||
|
(symbol) => symbol.description === 'headers list'
|
||||||
|
)
|
||||||
|
|
||||||
|
assert(kHeadersListNode, 'Headers cannot be parsed')
|
||||||
|
}
|
||||||
|
|
||||||
|
const headersList = headers[kHeadersListNode]
|
||||||
|
assert(headersList)
|
||||||
|
|
||||||
|
return headersList
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = {
|
||||||
|
isCTLExcludingHtab,
|
||||||
|
stringify,
|
||||||
|
getHeadersList
|
||||||
|
}
|
189
node_modules/undici/lib/core/connect.js
generated
vendored
Normal file
189
node_modules/undici/lib/core/connect.js
generated
vendored
Normal file
|
@ -0,0 +1,189 @@
|
||||||
|
'use strict'
|
||||||
|
|
||||||
|
const net = require('net')
|
||||||
|
const assert = require('assert')
|
||||||
|
const util = require('./util')
|
||||||
|
const { InvalidArgumentError, ConnectTimeoutError } = require('./errors')
|
||||||
|
|
||||||
|
let tls // include tls conditionally since it is not always available
|
||||||
|
|
||||||
|
// TODO: session re-use does not wait for the first
|
||||||
|
// connection to resolve the session and might therefore
|
||||||
|
// resolve the same servername multiple times even when
|
||||||
|
// re-use is enabled.
|
||||||
|
|
||||||
|
let SessionCache
|
||||||
|
// FIXME: remove workaround when the Node bug is fixed
|
||||||
|
// https://github.com/nodejs/node/issues/49344#issuecomment-1741776308
|
||||||
|
if (global.FinalizationRegistry && !process.env.NODE_V8_COVERAGE) {
|
||||||
|
SessionCache = class WeakSessionCache {
|
||||||
|
constructor (maxCachedSessions) {
|
||||||
|
this._maxCachedSessions = maxCachedSessions
|
||||||
|
this._sessionCache = new Map()
|
||||||
|
this._sessionRegistry = new global.FinalizationRegistry((key) => {
|
||||||
|
if (this._sessionCache.size < this._maxCachedSessions) {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
const ref = this._sessionCache.get(key)
|
||||||
|
if (ref !== undefined && ref.deref() === undefined) {
|
||||||
|
this._sessionCache.delete(key)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
get (sessionKey) {
|
||||||
|
const ref = this._sessionCache.get(sessionKey)
|
||||||
|
return ref ? ref.deref() : null
|
||||||
|
}
|
||||||
|
|
||||||
|
set (sessionKey, session) {
|
||||||
|
if (this._maxCachedSessions === 0) {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
this._sessionCache.set(sessionKey, new WeakRef(session))
|
||||||
|
this._sessionRegistry.register(session, sessionKey)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
SessionCache = class SimpleSessionCache {
|
||||||
|
constructor (maxCachedSessions) {
|
||||||
|
this._maxCachedSessions = maxCachedSessions
|
||||||
|
this._sessionCache = new Map()
|
||||||
|
}
|
||||||
|
|
||||||
|
get (sessionKey) {
|
||||||
|
return this._sessionCache.get(sessionKey)
|
||||||
|
}
|
||||||
|
|
||||||
|
set (sessionKey, session) {
|
||||||
|
if (this._maxCachedSessions === 0) {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
if (this._sessionCache.size >= this._maxCachedSessions) {
|
||||||
|
// remove the oldest session
|
||||||
|
const { value: oldestKey } = this._sessionCache.keys().next()
|
||||||
|
this._sessionCache.delete(oldestKey)
|
||||||
|
}
|
||||||
|
|
||||||
|
this._sessionCache.set(sessionKey, session)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function buildConnector ({ allowH2, maxCachedSessions, socketPath, timeout, ...opts }) {
|
||||||
|
if (maxCachedSessions != null && (!Number.isInteger(maxCachedSessions) || maxCachedSessions < 0)) {
|
||||||
|
throw new InvalidArgumentError('maxCachedSessions must be a positive integer or zero')
|
||||||
|
}
|
||||||
|
|
||||||
|
const options = { path: socketPath, ...opts }
|
||||||
|
const sessionCache = new SessionCache(maxCachedSessions == null ? 100 : maxCachedSessions)
|
||||||
|
timeout = timeout == null ? 10e3 : timeout
|
||||||
|
allowH2 = allowH2 != null ? allowH2 : false
|
||||||
|
return function connect ({ hostname, host, protocol, port, servername, localAddress, httpSocket }, callback) {
|
||||||
|
let socket
|
||||||
|
if (protocol === 'https:') {
|
||||||
|
if (!tls) {
|
||||||
|
tls = require('tls')
|
||||||
|
}
|
||||||
|
servername = servername || options.servername || util.getServerName(host) || null
|
||||||
|
|
||||||
|
const sessionKey = servername || hostname
|
||||||
|
const session = sessionCache.get(sessionKey) || null
|
||||||
|
|
||||||
|
assert(sessionKey)
|
||||||
|
|
||||||
|
socket = tls.connect({
|
||||||
|
highWaterMark: 16384, // TLS in node can't have bigger HWM anyway...
|
||||||
|
...options,
|
||||||
|
servername,
|
||||||
|
session,
|
||||||
|
localAddress,
|
||||||
|
// TODO(HTTP/2): Add support for h2c
|
||||||
|
ALPNProtocols: allowH2 ? ['http/1.1', 'h2'] : ['http/1.1'],
|
||||||
|
socket: httpSocket, // upgrade socket connection
|
||||||
|
port: port || 443,
|
||||||
|
host: hostname
|
||||||
|
})
|
||||||
|
|
||||||
|
socket
|
||||||
|
.on('session', function (session) {
|
||||||
|
// TODO (fix): Can a session become invalid once established? Don't think so?
|
||||||
|
sessionCache.set(sessionKey, session)
|
||||||
|
})
|
||||||
|
} else {
|
||||||
|
assert(!httpSocket, 'httpSocket can only be sent on TLS update')
|
||||||
|
socket = net.connect({
|
||||||
|
highWaterMark: 64 * 1024, // Same as nodejs fs streams.
|
||||||
|
...options,
|
||||||
|
localAddress,
|
||||||
|
port: port || 80,
|
||||||
|
host: hostname
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// Set TCP keep alive options on the socket here instead of in connect() for the case of assigning the socket
|
||||||
|
if (options.keepAlive == null || options.keepAlive) {
|
||||||
|
const keepAliveInitialDelay = options.keepAliveInitialDelay === undefined ? 60e3 : options.keepAliveInitialDelay
|
||||||
|
socket.setKeepAlive(true, keepAliveInitialDelay)
|
||||||
|
}
|
||||||
|
|
||||||
|
const cancelTimeout = setupTimeout(() => onConnectTimeout(socket), timeout)
|
||||||
|
|
||||||
|
socket
|
||||||
|
.setNoDelay(true)
|
||||||
|
.once(protocol === 'https:' ? 'secureConnect' : 'connect', function () {
|
||||||
|
cancelTimeout()
|
||||||
|
|
||||||
|
if (callback) {
|
||||||
|
const cb = callback
|
||||||
|
callback = null
|
||||||
|
cb(null, this)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.on('error', function (err) {
|
||||||
|
cancelTimeout()
|
||||||
|
|
||||||
|
if (callback) {
|
||||||
|
const cb = callback
|
||||||
|
callback = null
|
||||||
|
cb(err)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
return socket
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function setupTimeout (onConnectTimeout, timeout) {
|
||||||
|
if (!timeout) {
|
||||||
|
return () => {}
|
||||||
|
}
|
||||||
|
|
||||||
|
let s1 = null
|
||||||
|
let s2 = null
|
||||||
|
const timeoutId = setTimeout(() => {
|
||||||
|
// setImmediate is added to make sure that we priotorise socket error events over timeouts
|
||||||
|
s1 = setImmediate(() => {
|
||||||
|
if (process.platform === 'win32') {
|
||||||
|
// Windows needs an extra setImmediate probably due to implementation differences in the socket logic
|
||||||
|
s2 = setImmediate(() => onConnectTimeout())
|
||||||
|
} else {
|
||||||
|
onConnectTimeout()
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}, timeout)
|
||||||
|
return () => {
|
||||||
|
clearTimeout(timeoutId)
|
||||||
|
clearImmediate(s1)
|
||||||
|
clearImmediate(s2)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function onConnectTimeout (socket) {
|
||||||
|
util.destroy(socket, new ConnectTimeoutError())
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = buildConnector
|
230
node_modules/undici/lib/core/errors.js
generated
vendored
Normal file
230
node_modules/undici/lib/core/errors.js
generated
vendored
Normal file
|
@ -0,0 +1,230 @@
|
||||||
|
'use strict'
|
||||||
|
|
||||||
|
class UndiciError extends Error {
|
||||||
|
constructor (message) {
|
||||||
|
super(message)
|
||||||
|
this.name = 'UndiciError'
|
||||||
|
this.code = 'UND_ERR'
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
class ConnectTimeoutError extends UndiciError {
|
||||||
|
constructor (message) {
|
||||||
|
super(message)
|
||||||
|
Error.captureStackTrace(this, ConnectTimeoutError)
|
||||||
|
this.name = 'ConnectTimeoutError'
|
||||||
|
this.message = message || 'Connect Timeout Error'
|
||||||
|
this.code = 'UND_ERR_CONNECT_TIMEOUT'
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
class HeadersTimeoutError extends UndiciError {
|
||||||
|
constructor (message) {
|
||||||
|
super(message)
|
||||||
|
Error.captureStackTrace(this, HeadersTimeoutError)
|
||||||
|
this.name = 'HeadersTimeoutError'
|
||||||
|
this.message = message || 'Headers Timeout Error'
|
||||||
|
this.code = 'UND_ERR_HEADERS_TIMEOUT'
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
class HeadersOverflowError extends UndiciError {
|
||||||
|
constructor (message) {
|
||||||
|
super(message)
|
||||||
|
Error.captureStackTrace(this, HeadersOverflowError)
|
||||||
|
this.name = 'HeadersOverflowError'
|
||||||
|
this.message = message || 'Headers Overflow Error'
|
||||||
|
this.code = 'UND_ERR_HEADERS_OVERFLOW'
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
class BodyTimeoutError extends UndiciError {
|
||||||
|
constructor (message) {
|
||||||
|
super(message)
|
||||||
|
Error.captureStackTrace(this, BodyTimeoutError)
|
||||||
|
this.name = 'BodyTimeoutError'
|
||||||
|
this.message = message || 'Body Timeout Error'
|
||||||
|
this.code = 'UND_ERR_BODY_TIMEOUT'
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
class ResponseStatusCodeError extends UndiciError {
|
||||||
|
constructor (message, statusCode, headers, body) {
|
||||||
|
super(message)
|
||||||
|
Error.captureStackTrace(this, ResponseStatusCodeError)
|
||||||
|
this.name = 'ResponseStatusCodeError'
|
||||||
|
this.message = message || 'Response Status Code Error'
|
||||||
|
this.code = 'UND_ERR_RESPONSE_STATUS_CODE'
|
||||||
|
this.body = body
|
||||||
|
this.status = statusCode
|
||||||
|
this.statusCode = statusCode
|
||||||
|
this.headers = headers
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
class InvalidArgumentError extends UndiciError {
|
||||||
|
constructor (message) {
|
||||||
|
super(message)
|
||||||
|
Error.captureStackTrace(this, InvalidArgumentError)
|
||||||
|
this.name = 'InvalidArgumentError'
|
||||||
|
this.message = message || 'Invalid Argument Error'
|
||||||
|
this.code = 'UND_ERR_INVALID_ARG'
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
class InvalidReturnValueError extends UndiciError {
|
||||||
|
constructor (message) {
|
||||||
|
super(message)
|
||||||
|
Error.captureStackTrace(this, InvalidReturnValueError)
|
||||||
|
this.name = 'InvalidReturnValueError'
|
||||||
|
this.message = message || 'Invalid Return Value Error'
|
||||||
|
this.code = 'UND_ERR_INVALID_RETURN_VALUE'
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
class RequestAbortedError extends UndiciError {
|
||||||
|
constructor (message) {
|
||||||
|
super(message)
|
||||||
|
Error.captureStackTrace(this, RequestAbortedError)
|
||||||
|
this.name = 'AbortError'
|
||||||
|
this.message = message || 'Request aborted'
|
||||||
|
this.code = 'UND_ERR_ABORTED'
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
class InformationalError extends UndiciError {
|
||||||
|
constructor (message) {
|
||||||
|
super(message)
|
||||||
|
Error.captureStackTrace(this, InformationalError)
|
||||||
|
this.name = 'InformationalError'
|
||||||
|
this.message = message || 'Request information'
|
||||||
|
this.code = 'UND_ERR_INFO'
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
class RequestContentLengthMismatchError extends UndiciError {
|
||||||
|
constructor (message) {
|
||||||
|
super(message)
|
||||||
|
Error.captureStackTrace(this, RequestContentLengthMismatchError)
|
||||||
|
this.name = 'RequestContentLengthMismatchError'
|
||||||
|
this.message = message || 'Request body length does not match content-length header'
|
||||||
|
this.code = 'UND_ERR_REQ_CONTENT_LENGTH_MISMATCH'
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
class ResponseContentLengthMismatchError extends UndiciError {
|
||||||
|
constructor (message) {
|
||||||
|
super(message)
|
||||||
|
Error.captureStackTrace(this, ResponseContentLengthMismatchError)
|
||||||
|
this.name = 'ResponseContentLengthMismatchError'
|
||||||
|
this.message = message || 'Response body length does not match content-length header'
|
||||||
|
this.code = 'UND_ERR_RES_CONTENT_LENGTH_MISMATCH'
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
class ClientDestroyedError extends UndiciError {
|
||||||
|
constructor (message) {
|
||||||
|
super(message)
|
||||||
|
Error.captureStackTrace(this, ClientDestroyedError)
|
||||||
|
this.name = 'ClientDestroyedError'
|
||||||
|
this.message = message || 'The client is destroyed'
|
||||||
|
this.code = 'UND_ERR_DESTROYED'
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
class ClientClosedError extends UndiciError {
|
||||||
|
constructor (message) {
|
||||||
|
super(message)
|
||||||
|
Error.captureStackTrace(this, ClientClosedError)
|
||||||
|
this.name = 'ClientClosedError'
|
||||||
|
this.message = message || 'The client is closed'
|
||||||
|
this.code = 'UND_ERR_CLOSED'
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
class SocketError extends UndiciError {
|
||||||
|
constructor (message, socket) {
|
||||||
|
super(message)
|
||||||
|
Error.captureStackTrace(this, SocketError)
|
||||||
|
this.name = 'SocketError'
|
||||||
|
this.message = message || 'Socket error'
|
||||||
|
this.code = 'UND_ERR_SOCKET'
|
||||||
|
this.socket = socket
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
class NotSupportedError extends UndiciError {
|
||||||
|
constructor (message) {
|
||||||
|
super(message)
|
||||||
|
Error.captureStackTrace(this, NotSupportedError)
|
||||||
|
this.name = 'NotSupportedError'
|
||||||
|
this.message = message || 'Not supported error'
|
||||||
|
this.code = 'UND_ERR_NOT_SUPPORTED'
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
class BalancedPoolMissingUpstreamError extends UndiciError {
|
||||||
|
constructor (message) {
|
||||||
|
super(message)
|
||||||
|
Error.captureStackTrace(this, NotSupportedError)
|
||||||
|
this.name = 'MissingUpstreamError'
|
||||||
|
this.message = message || 'No upstream has been added to the BalancedPool'
|
||||||
|
this.code = 'UND_ERR_BPL_MISSING_UPSTREAM'
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
class HTTPParserError extends Error {
|
||||||
|
constructor (message, code, data) {
|
||||||
|
super(message)
|
||||||
|
Error.captureStackTrace(this, HTTPParserError)
|
||||||
|
this.name = 'HTTPParserError'
|
||||||
|
this.code = code ? `HPE_${code}` : undefined
|
||||||
|
this.data = data ? data.toString() : undefined
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
class ResponseExceededMaxSizeError extends UndiciError {
|
||||||
|
constructor (message) {
|
||||||
|
super(message)
|
||||||
|
Error.captureStackTrace(this, ResponseExceededMaxSizeError)
|
||||||
|
this.name = 'ResponseExceededMaxSizeError'
|
||||||
|
this.message = message || 'Response content exceeded max size'
|
||||||
|
this.code = 'UND_ERR_RES_EXCEEDED_MAX_SIZE'
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
class RequestRetryError extends UndiciError {
|
||||||
|
constructor (message, code, { headers, data }) {
|
||||||
|
super(message)
|
||||||
|
Error.captureStackTrace(this, RequestRetryError)
|
||||||
|
this.name = 'RequestRetryError'
|
||||||
|
this.message = message || 'Request retry error'
|
||||||
|
this.code = 'UND_ERR_REQ_RETRY'
|
||||||
|
this.statusCode = code
|
||||||
|
this.data = data
|
||||||
|
this.headers = headers
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = {
|
||||||
|
HTTPParserError,
|
||||||
|
UndiciError,
|
||||||
|
HeadersTimeoutError,
|
||||||
|
HeadersOverflowError,
|
||||||
|
BodyTimeoutError,
|
||||||
|
RequestContentLengthMismatchError,
|
||||||
|
ConnectTimeoutError,
|
||||||
|
ResponseStatusCodeError,
|
||||||
|
InvalidArgumentError,
|
||||||
|
InvalidReturnValueError,
|
||||||
|
RequestAbortedError,
|
||||||
|
ClientDestroyedError,
|
||||||
|
ClientClosedError,
|
||||||
|
InformationalError,
|
||||||
|
SocketError,
|
||||||
|
NotSupportedError,
|
||||||
|
ResponseContentLengthMismatchError,
|
||||||
|
BalancedPoolMissingUpstreamError,
|
||||||
|
ResponseExceededMaxSizeError,
|
||||||
|
RequestRetryError
|
||||||
|
}
|
499
node_modules/undici/lib/core/request.js
generated
vendored
Normal file
499
node_modules/undici/lib/core/request.js
generated
vendored
Normal file
|
@ -0,0 +1,499 @@
|
||||||
|
'use strict'
|
||||||
|
|
||||||
|
const {
|
||||||
|
InvalidArgumentError,
|
||||||
|
NotSupportedError
|
||||||
|
} = require('./errors')
|
||||||
|
const assert = require('assert')
|
||||||
|
const { kHTTP2BuildRequest, kHTTP2CopyHeaders, kHTTP1BuildRequest } = require('./symbols')
|
||||||
|
const util = require('./util')
|
||||||
|
|
||||||
|
// tokenRegExp and headerCharRegex have been lifted from
|
||||||
|
// https://github.com/nodejs/node/blob/main/lib/_http_common.js
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Verifies that the given val is a valid HTTP token
|
||||||
|
* per the rules defined in RFC 7230
|
||||||
|
* See https://tools.ietf.org/html/rfc7230#section-3.2.6
|
||||||
|
*/
|
||||||
|
const tokenRegExp = /^[\^_`a-zA-Z\-0-9!#$%&'*+.|~]+$/
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Matches if val contains an invalid field-vchar
|
||||||
|
* field-value = *( field-content / obs-fold )
|
||||||
|
* field-content = field-vchar [ 1*( SP / HTAB ) field-vchar ]
|
||||||
|
* field-vchar = VCHAR / obs-text
|
||||||
|
*/
|
||||||
|
const headerCharRegex = /[^\t\x20-\x7e\x80-\xff]/
|
||||||
|
|
||||||
|
// Verifies that a given path is valid does not contain control chars \x00 to \x20
|
||||||
|
const invalidPathRegex = /[^\u0021-\u00ff]/
|
||||||
|
|
||||||
|
const kHandler = Symbol('handler')
|
||||||
|
|
||||||
|
const channels = {}
|
||||||
|
|
||||||
|
let extractBody
|
||||||
|
|
||||||
|
try {
|
||||||
|
const diagnosticsChannel = require('diagnostics_channel')
|
||||||
|
channels.create = diagnosticsChannel.channel('undici:request:create')
|
||||||
|
channels.bodySent = diagnosticsChannel.channel('undici:request:bodySent')
|
||||||
|
channels.headers = diagnosticsChannel.channel('undici:request:headers')
|
||||||
|
channels.trailers = diagnosticsChannel.channel('undici:request:trailers')
|
||||||
|
channels.error = diagnosticsChannel.channel('undici:request:error')
|
||||||
|
} catch {
|
||||||
|
channels.create = { hasSubscribers: false }
|
||||||
|
channels.bodySent = { hasSubscribers: false }
|
||||||
|
channels.headers = { hasSubscribers: false }
|
||||||
|
channels.trailers = { hasSubscribers: false }
|
||||||
|
channels.error = { hasSubscribers: false }
|
||||||
|
}
|
||||||
|
|
||||||
|
class Request {
|
||||||
|
constructor (origin, {
|
||||||
|
path,
|
||||||
|
method,
|
||||||
|
body,
|
||||||
|
headers,
|
||||||
|
query,
|
||||||
|
idempotent,
|
||||||
|
blocking,
|
||||||
|
upgrade,
|
||||||
|
headersTimeout,
|
||||||
|
bodyTimeout,
|
||||||
|
reset,
|
||||||
|
throwOnError,
|
||||||
|
expectContinue
|
||||||
|
}, handler) {
|
||||||
|
if (typeof path !== 'string') {
|
||||||
|
throw new InvalidArgumentError('path must be a string')
|
||||||
|
} else if (
|
||||||
|
path[0] !== '/' &&
|
||||||
|
!(path.startsWith('http://') || path.startsWith('https://')) &&
|
||||||
|
method !== 'CONNECT'
|
||||||
|
) {
|
||||||
|
throw new InvalidArgumentError('path must be an absolute URL or start with a slash')
|
||||||
|
} else if (invalidPathRegex.exec(path) !== null) {
|
||||||
|
throw new InvalidArgumentError('invalid request path')
|
||||||
|
}
|
||||||
|
|
||||||
|
if (typeof method !== 'string') {
|
||||||
|
throw new InvalidArgumentError('method must be a string')
|
||||||
|
} else if (tokenRegExp.exec(method) === null) {
|
||||||
|
throw new InvalidArgumentError('invalid request method')
|
||||||
|
}
|
||||||
|
|
||||||
|
if (upgrade && typeof upgrade !== 'string') {
|
||||||
|
throw new InvalidArgumentError('upgrade must be a string')
|
||||||
|
}
|
||||||
|
|
||||||
|
if (headersTimeout != null && (!Number.isFinite(headersTimeout) || headersTimeout < 0)) {
|
||||||
|
throw new InvalidArgumentError('invalid headersTimeout')
|
||||||
|
}
|
||||||
|
|
||||||
|
if (bodyTimeout != null && (!Number.isFinite(bodyTimeout) || bodyTimeout < 0)) {
|
||||||
|
throw new InvalidArgumentError('invalid bodyTimeout')
|
||||||
|
}
|
||||||
|
|
||||||
|
if (reset != null && typeof reset !== 'boolean') {
|
||||||
|
throw new InvalidArgumentError('invalid reset')
|
||||||
|
}
|
||||||
|
|
||||||
|
if (expectContinue != null && typeof expectContinue !== 'boolean') {
|
||||||
|
throw new InvalidArgumentError('invalid expectContinue')
|
||||||
|
}
|
||||||
|
|
||||||
|
this.headersTimeout = headersTimeout
|
||||||
|
|
||||||
|
this.bodyTimeout = bodyTimeout
|
||||||
|
|
||||||
|
this.throwOnError = throwOnError === true
|
||||||
|
|
||||||
|
this.method = method
|
||||||
|
|
||||||
|
this.abort = null
|
||||||
|
|
||||||
|
if (body == null) {
|
||||||
|
this.body = null
|
||||||
|
} else if (util.isStream(body)) {
|
||||||
|
this.body = body
|
||||||
|
|
||||||
|
const rState = this.body._readableState
|
||||||
|
if (!rState || !rState.autoDestroy) {
|
||||||
|
this.endHandler = function autoDestroy () {
|
||||||
|
util.destroy(this)
|
||||||
|
}
|
||||||
|
this.body.on('end', this.endHandler)
|
||||||
|
}
|
||||||
|
|
||||||
|
this.errorHandler = err => {
|
||||||
|
if (this.abort) {
|
||||||
|
this.abort(err)
|
||||||
|
} else {
|
||||||
|
this.error = err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
this.body.on('error', this.errorHandler)
|
||||||
|
} else if (util.isBuffer(body)) {
|
||||||
|
this.body = body.byteLength ? body : null
|
||||||
|
} else if (ArrayBuffer.isView(body)) {
|
||||||
|
this.body = body.buffer.byteLength ? Buffer.from(body.buffer, body.byteOffset, body.byteLength) : null
|
||||||
|
} else if (body instanceof ArrayBuffer) {
|
||||||
|
this.body = body.byteLength ? Buffer.from(body) : null
|
||||||
|
} else if (typeof body === 'string') {
|
||||||
|
this.body = body.length ? Buffer.from(body) : null
|
||||||
|
} else if (util.isFormDataLike(body) || util.isIterable(body) || util.isBlobLike(body)) {
|
||||||
|
this.body = body
|
||||||
|
} else {
|
||||||
|
throw new InvalidArgumentError('body must be a string, a Buffer, a Readable stream, an iterable, or an async iterable')
|
||||||
|
}
|
||||||
|
|
||||||
|
this.completed = false
|
||||||
|
|
||||||
|
this.aborted = false
|
||||||
|
|
||||||
|
this.upgrade = upgrade || null
|
||||||
|
|
||||||
|
this.path = query ? util.buildURL(path, query) : path
|
||||||
|
|
||||||
|
this.origin = origin
|
||||||
|
|
||||||
|
this.idempotent = idempotent == null
|
||||||
|
? method === 'HEAD' || method === 'GET'
|
||||||
|
: idempotent
|
||||||
|
|
||||||
|
this.blocking = blocking == null ? false : blocking
|
||||||
|
|
||||||
|
this.reset = reset == null ? null : reset
|
||||||
|
|
||||||
|
this.host = null
|
||||||
|
|
||||||
|
this.contentLength = null
|
||||||
|
|
||||||
|
this.contentType = null
|
||||||
|
|
||||||
|
this.headers = ''
|
||||||
|
|
||||||
|
// Only for H2
|
||||||
|
this.expectContinue = expectContinue != null ? expectContinue : false
|
||||||
|
|
||||||
|
if (Array.isArray(headers)) {
|
||||||
|
if (headers.length % 2 !== 0) {
|
||||||
|
throw new InvalidArgumentError('headers array must be even')
|
||||||
|
}
|
||||||
|
for (let i = 0; i < headers.length; i += 2) {
|
||||||
|
processHeader(this, headers[i], headers[i + 1])
|
||||||
|
}
|
||||||
|
} else if (headers && typeof headers === 'object') {
|
||||||
|
const keys = Object.keys(headers)
|
||||||
|
for (let i = 0; i < keys.length; i++) {
|
||||||
|
const key = keys[i]
|
||||||
|
processHeader(this, key, headers[key])
|
||||||
|
}
|
||||||
|
} else if (headers != null) {
|
||||||
|
throw new InvalidArgumentError('headers must be an object or an array')
|
||||||
|
}
|
||||||
|
|
||||||
|
if (util.isFormDataLike(this.body)) {
|
||||||
|
if (util.nodeMajor < 16 || (util.nodeMajor === 16 && util.nodeMinor < 8)) {
|
||||||
|
throw new InvalidArgumentError('Form-Data bodies are only supported in node v16.8 and newer.')
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!extractBody) {
|
||||||
|
extractBody = require('../fetch/body.js').extractBody
|
||||||
|
}
|
||||||
|
|
||||||
|
const [bodyStream, contentType] = extractBody(body)
|
||||||
|
if (this.contentType == null) {
|
||||||
|
this.contentType = contentType
|
||||||
|
this.headers += `content-type: ${contentType}\r\n`
|
||||||
|
}
|
||||||
|
this.body = bodyStream.stream
|
||||||
|
this.contentLength = bodyStream.length
|
||||||
|
} else if (util.isBlobLike(body) && this.contentType == null && body.type) {
|
||||||
|
this.contentType = body.type
|
||||||
|
this.headers += `content-type: ${body.type}\r\n`
|
||||||
|
}
|
||||||
|
|
||||||
|
util.validateHandler(handler, method, upgrade)
|
||||||
|
|
||||||
|
this.servername = util.getServerName(this.host)
|
||||||
|
|
||||||
|
this[kHandler] = handler
|
||||||
|
|
||||||
|
if (channels.create.hasSubscribers) {
|
||||||
|
channels.create.publish({ request: this })
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
onBodySent (chunk) {
|
||||||
|
if (this[kHandler].onBodySent) {
|
||||||
|
try {
|
||||||
|
return this[kHandler].onBodySent(chunk)
|
||||||
|
} catch (err) {
|
||||||
|
this.abort(err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
onRequestSent () {
|
||||||
|
if (channels.bodySent.hasSubscribers) {
|
||||||
|
channels.bodySent.publish({ request: this })
|
||||||
|
}
|
||||||
|
|
||||||
|
if (this[kHandler].onRequestSent) {
|
||||||
|
try {
|
||||||
|
return this[kHandler].onRequestSent()
|
||||||
|
} catch (err) {
|
||||||
|
this.abort(err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
onConnect (abort) {
|
||||||
|
assert(!this.aborted)
|
||||||
|
assert(!this.completed)
|
||||||
|
|
||||||
|
if (this.error) {
|
||||||
|
abort(this.error)
|
||||||
|
} else {
|
||||||
|
this.abort = abort
|
||||||
|
return this[kHandler].onConnect(abort)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
onHeaders (statusCode, headers, resume, statusText) {
|
||||||
|
assert(!this.aborted)
|
||||||
|
assert(!this.completed)
|
||||||
|
|
||||||
|
if (channels.headers.hasSubscribers) {
|
||||||
|
channels.headers.publish({ request: this, response: { statusCode, headers, statusText } })
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
return this[kHandler].onHeaders(statusCode, headers, resume, statusText)
|
||||||
|
} catch (err) {
|
||||||
|
this.abort(err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
onData (chunk) {
|
||||||
|
assert(!this.aborted)
|
||||||
|
assert(!this.completed)
|
||||||
|
|
||||||
|
try {
|
||||||
|
return this[kHandler].onData(chunk)
|
||||||
|
} catch (err) {
|
||||||
|
this.abort(err)
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
onUpgrade (statusCode, headers, socket) {
|
||||||
|
assert(!this.aborted)
|
||||||
|
assert(!this.completed)
|
||||||
|
|
||||||
|
return this[kHandler].onUpgrade(statusCode, headers, socket)
|
||||||
|
}
|
||||||
|
|
||||||
|
onComplete (trailers) {
|
||||||
|
this.onFinally()
|
||||||
|
|
||||||
|
assert(!this.aborted)
|
||||||
|
|
||||||
|
this.completed = true
|
||||||
|
if (channels.trailers.hasSubscribers) {
|
||||||
|
channels.trailers.publish({ request: this, trailers })
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
return this[kHandler].onComplete(trailers)
|
||||||
|
} catch (err) {
|
||||||
|
// TODO (fix): This might be a bad idea?
|
||||||
|
this.onError(err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
onError (error) {
|
||||||
|
this.onFinally()
|
||||||
|
|
||||||
|
if (channels.error.hasSubscribers) {
|
||||||
|
channels.error.publish({ request: this, error })
|
||||||
|
}
|
||||||
|
|
||||||
|
if (this.aborted) {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
this.aborted = true
|
||||||
|
|
||||||
|
return this[kHandler].onError(error)
|
||||||
|
}
|
||||||
|
|
||||||
|
onFinally () {
|
||||||
|
if (this.errorHandler) {
|
||||||
|
this.body.off('error', this.errorHandler)
|
||||||
|
this.errorHandler = null
|
||||||
|
}
|
||||||
|
|
||||||
|
if (this.endHandler) {
|
||||||
|
this.body.off('end', this.endHandler)
|
||||||
|
this.endHandler = null
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// TODO: adjust to support H2
|
||||||
|
addHeader (key, value) {
|
||||||
|
processHeader(this, key, value)
|
||||||
|
return this
|
||||||
|
}
|
||||||
|
|
||||||
|
static [kHTTP1BuildRequest] (origin, opts, handler) {
|
||||||
|
// TODO: Migrate header parsing here, to make Requests
|
||||||
|
// HTTP agnostic
|
||||||
|
return new Request(origin, opts, handler)
|
||||||
|
}
|
||||||
|
|
||||||
|
static [kHTTP2BuildRequest] (origin, opts, handler) {
|
||||||
|
const headers = opts.headers
|
||||||
|
opts = { ...opts, headers: null }
|
||||||
|
|
||||||
|
const request = new Request(origin, opts, handler)
|
||||||
|
|
||||||
|
request.headers = {}
|
||||||
|
|
||||||
|
if (Array.isArray(headers)) {
|
||||||
|
if (headers.length % 2 !== 0) {
|
||||||
|
throw new InvalidArgumentError('headers array must be even')
|
||||||
|
}
|
||||||
|
for (let i = 0; i < headers.length; i += 2) {
|
||||||
|
processHeader(request, headers[i], headers[i + 1], true)
|
||||||
|
}
|
||||||
|
} else if (headers && typeof headers === 'object') {
|
||||||
|
const keys = Object.keys(headers)
|
||||||
|
for (let i = 0; i < keys.length; i++) {
|
||||||
|
const key = keys[i]
|
||||||
|
processHeader(request, key, headers[key], true)
|
||||||
|
}
|
||||||
|
} else if (headers != null) {
|
||||||
|
throw new InvalidArgumentError('headers must be an object or an array')
|
||||||
|
}
|
||||||
|
|
||||||
|
return request
|
||||||
|
}
|
||||||
|
|
||||||
|
static [kHTTP2CopyHeaders] (raw) {
|
||||||
|
const rawHeaders = raw.split('\r\n')
|
||||||
|
const headers = {}
|
||||||
|
|
||||||
|
for (const header of rawHeaders) {
|
||||||
|
const [key, value] = header.split(': ')
|
||||||
|
|
||||||
|
if (value == null || value.length === 0) continue
|
||||||
|
|
||||||
|
if (headers[key]) headers[key] += `,${value}`
|
||||||
|
else headers[key] = value
|
||||||
|
}
|
||||||
|
|
||||||
|
return headers
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function processHeaderValue (key, val, skipAppend) {
|
||||||
|
if (val && typeof val === 'object') {
|
||||||
|
throw new InvalidArgumentError(`invalid ${key} header`)
|
||||||
|
}
|
||||||
|
|
||||||
|
val = val != null ? `${val}` : ''
|
||||||
|
|
||||||
|
if (headerCharRegex.exec(val) !== null) {
|
||||||
|
throw new InvalidArgumentError(`invalid ${key} header`)
|
||||||
|
}
|
||||||
|
|
||||||
|
return skipAppend ? val : `${key}: ${val}\r\n`
|
||||||
|
}
|
||||||
|
|
||||||
|
function processHeader (request, key, val, skipAppend = false) {
|
||||||
|
if (val && (typeof val === 'object' && !Array.isArray(val))) {
|
||||||
|
throw new InvalidArgumentError(`invalid ${key} header`)
|
||||||
|
} else if (val === undefined) {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
if (
|
||||||
|
request.host === null &&
|
||||||
|
key.length === 4 &&
|
||||||
|
key.toLowerCase() === 'host'
|
||||||
|
) {
|
||||||
|
if (headerCharRegex.exec(val) !== null) {
|
||||||
|
throw new InvalidArgumentError(`invalid ${key} header`)
|
||||||
|
}
|
||||||
|
// Consumed by Client
|
||||||
|
request.host = val
|
||||||
|
} else if (
|
||||||
|
request.contentLength === null &&
|
||||||
|
key.length === 14 &&
|
||||||
|
key.toLowerCase() === 'content-length'
|
||||||
|
) {
|
||||||
|
request.contentLength = parseInt(val, 10)
|
||||||
|
if (!Number.isFinite(request.contentLength)) {
|
||||||
|
throw new InvalidArgumentError('invalid content-length header')
|
||||||
|
}
|
||||||
|
} else if (
|
||||||
|
request.contentType === null &&
|
||||||
|
key.length === 12 &&
|
||||||
|
key.toLowerCase() === 'content-type'
|
||||||
|
) {
|
||||||
|
request.contentType = val
|
||||||
|
if (skipAppend) request.headers[key] = processHeaderValue(key, val, skipAppend)
|
||||||
|
else request.headers += processHeaderValue(key, val)
|
||||||
|
} else if (
|
||||||
|
key.length === 17 &&
|
||||||
|
key.toLowerCase() === 'transfer-encoding'
|
||||||
|
) {
|
||||||
|
throw new InvalidArgumentError('invalid transfer-encoding header')
|
||||||
|
} else if (
|
||||||
|
key.length === 10 &&
|
||||||
|
key.toLowerCase() === 'connection'
|
||||||
|
) {
|
||||||
|
const value = typeof val === 'string' ? val.toLowerCase() : null
|
||||||
|
if (value !== 'close' && value !== 'keep-alive') {
|
||||||
|
throw new InvalidArgumentError('invalid connection header')
|
||||||
|
} else if (value === 'close') {
|
||||||
|
request.reset = true
|
||||||
|
}
|
||||||
|
} else if (
|
||||||
|
key.length === 10 &&
|
||||||
|
key.toLowerCase() === 'keep-alive'
|
||||||
|
) {
|
||||||
|
throw new InvalidArgumentError('invalid keep-alive header')
|
||||||
|
} else if (
|
||||||
|
key.length === 7 &&
|
||||||
|
key.toLowerCase() === 'upgrade'
|
||||||
|
) {
|
||||||
|
throw new InvalidArgumentError('invalid upgrade header')
|
||||||
|
} else if (
|
||||||
|
key.length === 6 &&
|
||||||
|
key.toLowerCase() === 'expect'
|
||||||
|
) {
|
||||||
|
throw new NotSupportedError('expect header not supported')
|
||||||
|
} else if (tokenRegExp.exec(key) === null) {
|
||||||
|
throw new InvalidArgumentError('invalid header key')
|
||||||
|
} else {
|
||||||
|
if (Array.isArray(val)) {
|
||||||
|
for (let i = 0; i < val.length; i++) {
|
||||||
|
if (skipAppend) {
|
||||||
|
if (request.headers[key]) request.headers[key] += `,${processHeaderValue(key, val[i], skipAppend)}`
|
||||||
|
else request.headers[key] = processHeaderValue(key, val[i], skipAppend)
|
||||||
|
} else {
|
||||||
|
request.headers += processHeaderValue(key, val[i])
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
if (skipAppend) request.headers[key] = processHeaderValue(key, val, skipAppend)
|
||||||
|
else request.headers += processHeaderValue(key, val)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = Request
|
63
node_modules/undici/lib/core/symbols.js
generated
vendored
Normal file
63
node_modules/undici/lib/core/symbols.js
generated
vendored
Normal file
|
@ -0,0 +1,63 @@
|
||||||
|
module.exports = {
|
||||||
|
kClose: Symbol('close'),
|
||||||
|
kDestroy: Symbol('destroy'),
|
||||||
|
kDispatch: Symbol('dispatch'),
|
||||||
|
kUrl: Symbol('url'),
|
||||||
|
kWriting: Symbol('writing'),
|
||||||
|
kResuming: Symbol('resuming'),
|
||||||
|
kQueue: Symbol('queue'),
|
||||||
|
kConnect: Symbol('connect'),
|
||||||
|
kConnecting: Symbol('connecting'),
|
||||||
|
kHeadersList: Symbol('headers list'),
|
||||||
|
kKeepAliveDefaultTimeout: Symbol('default keep alive timeout'),
|
||||||
|
kKeepAliveMaxTimeout: Symbol('max keep alive timeout'),
|
||||||
|
kKeepAliveTimeoutThreshold: Symbol('keep alive timeout threshold'),
|
||||||
|
kKeepAliveTimeoutValue: Symbol('keep alive timeout'),
|
||||||
|
kKeepAlive: Symbol('keep alive'),
|
||||||
|
kHeadersTimeout: Symbol('headers timeout'),
|
||||||
|
kBodyTimeout: Symbol('body timeout'),
|
||||||
|
kServerName: Symbol('server name'),
|
||||||
|
kLocalAddress: Symbol('local address'),
|
||||||
|
kHost: Symbol('host'),
|
||||||
|
kNoRef: Symbol('no ref'),
|
||||||
|
kBodyUsed: Symbol('used'),
|
||||||
|
kRunning: Symbol('running'),
|
||||||
|
kBlocking: Symbol('blocking'),
|
||||||
|
kPending: Symbol('pending'),
|
||||||
|
kSize: Symbol('size'),
|
||||||
|
kBusy: Symbol('busy'),
|
||||||
|
kQueued: Symbol('queued'),
|
||||||
|
kFree: Symbol('free'),
|
||||||
|
kConnected: Symbol('connected'),
|
||||||
|
kClosed: Symbol('closed'),
|
||||||
|
kNeedDrain: Symbol('need drain'),
|
||||||
|
kReset: Symbol('reset'),
|
||||||
|
kDestroyed: Symbol.for('nodejs.stream.destroyed'),
|
||||||
|
kMaxHeadersSize: Symbol('max headers size'),
|
||||||
|
kRunningIdx: Symbol('running index'),
|
||||||
|
kPendingIdx: Symbol('pending index'),
|
||||||
|
kError: Symbol('error'),
|
||||||
|
kClients: Symbol('clients'),
|
||||||
|
kClient: Symbol('client'),
|
||||||
|
kParser: Symbol('parser'),
|
||||||
|
kOnDestroyed: Symbol('destroy callbacks'),
|
||||||
|
kPipelining: Symbol('pipelining'),
|
||||||
|
kSocket: Symbol('socket'),
|
||||||
|
kHostHeader: Symbol('host header'),
|
||||||
|
kConnector: Symbol('connector'),
|
||||||
|
kStrictContentLength: Symbol('strict content length'),
|
||||||
|
kMaxRedirections: Symbol('maxRedirections'),
|
||||||
|
kMaxRequests: Symbol('maxRequestsPerClient'),
|
||||||
|
kProxy: Symbol('proxy agent options'),
|
||||||
|
kCounter: Symbol('socket request counter'),
|
||||||
|
kInterceptors: Symbol('dispatch interceptors'),
|
||||||
|
kMaxResponseSize: Symbol('max response size'),
|
||||||
|
kHTTP2Session: Symbol('http2Session'),
|
||||||
|
kHTTP2SessionState: Symbol('http2Session state'),
|
||||||
|
kHTTP2BuildRequest: Symbol('http2 build request'),
|
||||||
|
kHTTP1BuildRequest: Symbol('http1 build request'),
|
||||||
|
kHTTP2CopyHeaders: Symbol('http2 copy headers'),
|
||||||
|
kHTTPConnVersion: Symbol('http connection version'),
|
||||||
|
kRetryHandlerDefaultRetry: Symbol('retry agent default retry'),
|
||||||
|
kConstruct: Symbol('constructable')
|
||||||
|
}
|
511
node_modules/undici/lib/core/util.js
generated
vendored
Normal file
511
node_modules/undici/lib/core/util.js
generated
vendored
Normal file
|
@ -0,0 +1,511 @@
|
||||||
|
'use strict'
|
||||||
|
|
||||||
|
const assert = require('assert')
|
||||||
|
const { kDestroyed, kBodyUsed } = require('./symbols')
|
||||||
|
const { IncomingMessage } = require('http')
|
||||||
|
const stream = require('stream')
|
||||||
|
const net = require('net')
|
||||||
|
const { InvalidArgumentError } = require('./errors')
|
||||||
|
const { Blob } = require('buffer')
|
||||||
|
const nodeUtil = require('util')
|
||||||
|
const { stringify } = require('querystring')
|
||||||
|
|
||||||
|
const [nodeMajor, nodeMinor] = process.versions.node.split('.').map(v => Number(v))
|
||||||
|
|
||||||
|
function nop () {}
|
||||||
|
|
||||||
|
function isStream (obj) {
|
||||||
|
return obj && typeof obj === 'object' && typeof obj.pipe === 'function' && typeof obj.on === 'function'
|
||||||
|
}
|
||||||
|
|
||||||
|
// based on https://github.com/node-fetch/fetch-blob/blob/8ab587d34080de94140b54f07168451e7d0b655e/index.js#L229-L241 (MIT License)
|
||||||
|
function isBlobLike (object) {
|
||||||
|
return (Blob && object instanceof Blob) || (
|
||||||
|
object &&
|
||||||
|
typeof object === 'object' &&
|
||||||
|
(typeof object.stream === 'function' ||
|
||||||
|
typeof object.arrayBuffer === 'function') &&
|
||||||
|
/^(Blob|File)$/.test(object[Symbol.toStringTag])
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
function buildURL (url, queryParams) {
|
||||||
|
if (url.includes('?') || url.includes('#')) {
|
||||||
|
throw new Error('Query params cannot be passed when url already contains "?" or "#".')
|
||||||
|
}
|
||||||
|
|
||||||
|
const stringified = stringify(queryParams)
|
||||||
|
|
||||||
|
if (stringified) {
|
||||||
|
url += '?' + stringified
|
||||||
|
}
|
||||||
|
|
||||||
|
return url
|
||||||
|
}
|
||||||
|
|
||||||
|
function parseURL (url) {
|
||||||
|
if (typeof url === 'string') {
|
||||||
|
url = new URL(url)
|
||||||
|
|
||||||
|
if (!/^https?:/.test(url.origin || url.protocol)) {
|
||||||
|
throw new InvalidArgumentError('Invalid URL protocol: the URL must start with `http:` or `https:`.')
|
||||||
|
}
|
||||||
|
|
||||||
|
return url
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!url || typeof url !== 'object') {
|
||||||
|
throw new InvalidArgumentError('Invalid URL: The URL argument must be a non-null object.')
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!/^https?:/.test(url.origin || url.protocol)) {
|
||||||
|
throw new InvalidArgumentError('Invalid URL protocol: the URL must start with `http:` or `https:`.')
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!(url instanceof URL)) {
|
||||||
|
if (url.port != null && url.port !== '' && !Number.isFinite(parseInt(url.port))) {
|
||||||
|
throw new InvalidArgumentError('Invalid URL: port must be a valid integer or a string representation of an integer.')
|
||||||
|
}
|
||||||
|
|
||||||
|
if (url.path != null && typeof url.path !== 'string') {
|
||||||
|
throw new InvalidArgumentError('Invalid URL path: the path must be a string or null/undefined.')
|
||||||
|
}
|
||||||
|
|
||||||
|
if (url.pathname != null && typeof url.pathname !== 'string') {
|
||||||
|
throw new InvalidArgumentError('Invalid URL pathname: the pathname must be a string or null/undefined.')
|
||||||
|
}
|
||||||
|
|
||||||
|
if (url.hostname != null && typeof url.hostname !== 'string') {
|
||||||
|
throw new InvalidArgumentError('Invalid URL hostname: the hostname must be a string or null/undefined.')
|
||||||
|
}
|
||||||
|
|
||||||
|
if (url.origin != null && typeof url.origin !== 'string') {
|
||||||
|
throw new InvalidArgumentError('Invalid URL origin: the origin must be a string or null/undefined.')
|
||||||
|
}
|
||||||
|
|
||||||
|
const port = url.port != null
|
||||||
|
? url.port
|
||||||
|
: (url.protocol === 'https:' ? 443 : 80)
|
||||||
|
let origin = url.origin != null
|
||||||
|
? url.origin
|
||||||
|
: `${url.protocol}//${url.hostname}:${port}`
|
||||||
|
let path = url.path != null
|
||||||
|
? url.path
|
||||||
|
: `${url.pathname || ''}${url.search || ''}`
|
||||||
|
|
||||||
|
if (origin.endsWith('/')) {
|
||||||
|
origin = origin.substring(0, origin.length - 1)
|
||||||
|
}
|
||||||
|
|
||||||
|
if (path && !path.startsWith('/')) {
|
||||||
|
path = `/${path}`
|
||||||
|
}
|
||||||
|
// new URL(path, origin) is unsafe when `path` contains an absolute URL
|
||||||
|
// From https://developer.mozilla.org/en-US/docs/Web/API/URL/URL:
|
||||||
|
// If first parameter is a relative URL, second param is required, and will be used as the base URL.
|
||||||
|
// If first parameter is an absolute URL, a given second param will be ignored.
|
||||||
|
url = new URL(origin + path)
|
||||||
|
}
|
||||||
|
|
||||||
|
return url
|
||||||
|
}
|
||||||
|
|
||||||
|
function parseOrigin (url) {
|
||||||
|
url = parseURL(url)
|
||||||
|
|
||||||
|
if (url.pathname !== '/' || url.search || url.hash) {
|
||||||
|
throw new InvalidArgumentError('invalid url')
|
||||||
|
}
|
||||||
|
|
||||||
|
return url
|
||||||
|
}
|
||||||
|
|
||||||
|
function getHostname (host) {
|
||||||
|
if (host[0] === '[') {
|
||||||
|
const idx = host.indexOf(']')
|
||||||
|
|
||||||
|
assert(idx !== -1)
|
||||||
|
return host.substring(1, idx)
|
||||||
|
}
|
||||||
|
|
||||||
|
const idx = host.indexOf(':')
|
||||||
|
if (idx === -1) return host
|
||||||
|
|
||||||
|
return host.substring(0, idx)
|
||||||
|
}
|
||||||
|
|
||||||
|
// IP addresses are not valid server names per RFC6066
|
||||||
|
// > Currently, the only server names supported are DNS hostnames
|
||||||
|
function getServerName (host) {
|
||||||
|
if (!host) {
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
|
||||||
|
assert.strictEqual(typeof host, 'string')
|
||||||
|
|
||||||
|
const servername = getHostname(host)
|
||||||
|
if (net.isIP(servername)) {
|
||||||
|
return ''
|
||||||
|
}
|
||||||
|
|
||||||
|
return servername
|
||||||
|
}
|
||||||
|
|
||||||
|
function deepClone (obj) {
|
||||||
|
return JSON.parse(JSON.stringify(obj))
|
||||||
|
}
|
||||||
|
|
||||||
|
function isAsyncIterable (obj) {
|
||||||
|
return !!(obj != null && typeof obj[Symbol.asyncIterator] === 'function')
|
||||||
|
}
|
||||||
|
|
||||||
|
function isIterable (obj) {
|
||||||
|
return !!(obj != null && (typeof obj[Symbol.iterator] === 'function' || typeof obj[Symbol.asyncIterator] === 'function'))
|
||||||
|
}
|
||||||
|
|
||||||
|
function bodyLength (body) {
|
||||||
|
if (body == null) {
|
||||||
|
return 0
|
||||||
|
} else if (isStream(body)) {
|
||||||
|
const state = body._readableState
|
||||||
|
return state && state.objectMode === false && state.ended === true && Number.isFinite(state.length)
|
||||||
|
? state.length
|
||||||
|
: null
|
||||||
|
} else if (isBlobLike(body)) {
|
||||||
|
return body.size != null ? body.size : null
|
||||||
|
} else if (isBuffer(body)) {
|
||||||
|
return body.byteLength
|
||||||
|
}
|
||||||
|
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
|
||||||
|
function isDestroyed (stream) {
|
||||||
|
return !stream || !!(stream.destroyed || stream[kDestroyed])
|
||||||
|
}
|
||||||
|
|
||||||
|
function isReadableAborted (stream) {
|
||||||
|
const state = stream && stream._readableState
|
||||||
|
return isDestroyed(stream) && state && !state.endEmitted
|
||||||
|
}
|
||||||
|
|
||||||
|
function destroy (stream, err) {
|
||||||
|
if (stream == null || !isStream(stream) || isDestroyed(stream)) {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
if (typeof stream.destroy === 'function') {
|
||||||
|
if (Object.getPrototypeOf(stream).constructor === IncomingMessage) {
|
||||||
|
// See: https://github.com/nodejs/node/pull/38505/files
|
||||||
|
stream.socket = null
|
||||||
|
}
|
||||||
|
|
||||||
|
stream.destroy(err)
|
||||||
|
} else if (err) {
|
||||||
|
process.nextTick((stream, err) => {
|
||||||
|
stream.emit('error', err)
|
||||||
|
}, stream, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if (stream.destroyed !== true) {
|
||||||
|
stream[kDestroyed] = true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const KEEPALIVE_TIMEOUT_EXPR = /timeout=(\d+)/
|
||||||
|
function parseKeepAliveTimeout (val) {
|
||||||
|
const m = val.toString().match(KEEPALIVE_TIMEOUT_EXPR)
|
||||||
|
return m ? parseInt(m[1], 10) * 1000 : null
|
||||||
|
}
|
||||||
|
|
||||||
|
function parseHeaders (headers, obj = {}) {
|
||||||
|
// For H2 support
|
||||||
|
if (!Array.isArray(headers)) return headers
|
||||||
|
|
||||||
|
for (let i = 0; i < headers.length; i += 2) {
|
||||||
|
const key = headers[i].toString().toLowerCase()
|
||||||
|
let val = obj[key]
|
||||||
|
|
||||||
|
if (!val) {
|
||||||
|
if (Array.isArray(headers[i + 1])) {
|
||||||
|
obj[key] = headers[i + 1].map(x => x.toString('utf8'))
|
||||||
|
} else {
|
||||||
|
obj[key] = headers[i + 1].toString('utf8')
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
if (!Array.isArray(val)) {
|
||||||
|
val = [val]
|
||||||
|
obj[key] = val
|
||||||
|
}
|
||||||
|
val.push(headers[i + 1].toString('utf8'))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// See https://github.com/nodejs/node/pull/46528
|
||||||
|
if ('content-length' in obj && 'content-disposition' in obj) {
|
||||||
|
obj['content-disposition'] = Buffer.from(obj['content-disposition']).toString('latin1')
|
||||||
|
}
|
||||||
|
|
||||||
|
return obj
|
||||||
|
}
|
||||||
|
|
||||||
|
function parseRawHeaders (headers) {
|
||||||
|
const ret = []
|
||||||
|
let hasContentLength = false
|
||||||
|
let contentDispositionIdx = -1
|
||||||
|
|
||||||
|
for (let n = 0; n < headers.length; n += 2) {
|
||||||
|
const key = headers[n + 0].toString()
|
||||||
|
const val = headers[n + 1].toString('utf8')
|
||||||
|
|
||||||
|
if (key.length === 14 && (key === 'content-length' || key.toLowerCase() === 'content-length')) {
|
||||||
|
ret.push(key, val)
|
||||||
|
hasContentLength = true
|
||||||
|
} else if (key.length === 19 && (key === 'content-disposition' || key.toLowerCase() === 'content-disposition')) {
|
||||||
|
contentDispositionIdx = ret.push(key, val) - 1
|
||||||
|
} else {
|
||||||
|
ret.push(key, val)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// See https://github.com/nodejs/node/pull/46528
|
||||||
|
if (hasContentLength && contentDispositionIdx !== -1) {
|
||||||
|
ret[contentDispositionIdx] = Buffer.from(ret[contentDispositionIdx]).toString('latin1')
|
||||||
|
}
|
||||||
|
|
||||||
|
return ret
|
||||||
|
}
|
||||||
|
|
||||||
|
function isBuffer (buffer) {
|
||||||
|
// See, https://github.com/mcollina/undici/pull/319
|
||||||
|
return buffer instanceof Uint8Array || Buffer.isBuffer(buffer)
|
||||||
|
}
|
||||||
|
|
||||||
|
function validateHandler (handler, method, upgrade) {
|
||||||
|
if (!handler || typeof handler !== 'object') {
|
||||||
|
throw new InvalidArgumentError('handler must be an object')
|
||||||
|
}
|
||||||
|
|
||||||
|
if (typeof handler.onConnect !== 'function') {
|
||||||
|
throw new InvalidArgumentError('invalid onConnect method')
|
||||||
|
}
|
||||||
|
|
||||||
|
if (typeof handler.onError !== 'function') {
|
||||||
|
throw new InvalidArgumentError('invalid onError method')
|
||||||
|
}
|
||||||
|
|
||||||
|
if (typeof handler.onBodySent !== 'function' && handler.onBodySent !== undefined) {
|
||||||
|
throw new InvalidArgumentError('invalid onBodySent method')
|
||||||
|
}
|
||||||
|
|
||||||
|
if (upgrade || method === 'CONNECT') {
|
||||||
|
if (typeof handler.onUpgrade !== 'function') {
|
||||||
|
throw new InvalidArgumentError('invalid onUpgrade method')
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
if (typeof handler.onHeaders !== 'function') {
|
||||||
|
throw new InvalidArgumentError('invalid onHeaders method')
|
||||||
|
}
|
||||||
|
|
||||||
|
if (typeof handler.onData !== 'function') {
|
||||||
|
throw new InvalidArgumentError('invalid onData method')
|
||||||
|
}
|
||||||
|
|
||||||
|
if (typeof handler.onComplete !== 'function') {
|
||||||
|
throw new InvalidArgumentError('invalid onComplete method')
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// A body is disturbed if it has been read from and it cannot
|
||||||
|
// be re-used without losing state or data.
|
||||||
|
function isDisturbed (body) {
|
||||||
|
return !!(body && (
|
||||||
|
stream.isDisturbed
|
||||||
|
? stream.isDisturbed(body) || body[kBodyUsed] // TODO (fix): Why is body[kBodyUsed] needed?
|
||||||
|
: body[kBodyUsed] ||
|
||||||
|
body.readableDidRead ||
|
||||||
|
(body._readableState && body._readableState.dataEmitted) ||
|
||||||
|
isReadableAborted(body)
|
||||||
|
))
|
||||||
|
}
|
||||||
|
|
||||||
|
function isErrored (body) {
|
||||||
|
return !!(body && (
|
||||||
|
stream.isErrored
|
||||||
|
? stream.isErrored(body)
|
||||||
|
: /state: 'errored'/.test(nodeUtil.inspect(body)
|
||||||
|
)))
|
||||||
|
}
|
||||||
|
|
||||||
|
function isReadable (body) {
|
||||||
|
return !!(body && (
|
||||||
|
stream.isReadable
|
||||||
|
? stream.isReadable(body)
|
||||||
|
: /state: 'readable'/.test(nodeUtil.inspect(body)
|
||||||
|
)))
|
||||||
|
}
|
||||||
|
|
||||||
|
function getSocketInfo (socket) {
|
||||||
|
return {
|
||||||
|
localAddress: socket.localAddress,
|
||||||
|
localPort: socket.localPort,
|
||||||
|
remoteAddress: socket.remoteAddress,
|
||||||
|
remotePort: socket.remotePort,
|
||||||
|
remoteFamily: socket.remoteFamily,
|
||||||
|
timeout: socket.timeout,
|
||||||
|
bytesWritten: socket.bytesWritten,
|
||||||
|
bytesRead: socket.bytesRead
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async function * convertIterableToBuffer (iterable) {
|
||||||
|
for await (const chunk of iterable) {
|
||||||
|
yield Buffer.isBuffer(chunk) ? chunk : Buffer.from(chunk)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let ReadableStream
|
||||||
|
function ReadableStreamFrom (iterable) {
|
||||||
|
if (!ReadableStream) {
|
||||||
|
ReadableStream = require('stream/web').ReadableStream
|
||||||
|
}
|
||||||
|
|
||||||
|
if (ReadableStream.from) {
|
||||||
|
return ReadableStream.from(convertIterableToBuffer(iterable))
|
||||||
|
}
|
||||||
|
|
||||||
|
let iterator
|
||||||
|
return new ReadableStream(
|
||||||
|
{
|
||||||
|
async start () {
|
||||||
|
iterator = iterable[Symbol.asyncIterator]()
|
||||||
|
},
|
||||||
|
async pull (controller) {
|
||||||
|
const { done, value } = await iterator.next()
|
||||||
|
if (done) {
|
||||||
|
queueMicrotask(() => {
|
||||||
|
controller.close()
|
||||||
|
})
|
||||||
|
} else {
|
||||||
|
const buf = Buffer.isBuffer(value) ? value : Buffer.from(value)
|
||||||
|
controller.enqueue(new Uint8Array(buf))
|
||||||
|
}
|
||||||
|
return controller.desiredSize > 0
|
||||||
|
},
|
||||||
|
async cancel (reason) {
|
||||||
|
await iterator.return()
|
||||||
|
}
|
||||||
|
},
|
||||||
|
0
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
// The chunk should be a FormData instance and contains
|
||||||
|
// all the required methods.
|
||||||
|
function isFormDataLike (object) {
|
||||||
|
return (
|
||||||
|
object &&
|
||||||
|
typeof object === 'object' &&
|
||||||
|
typeof object.append === 'function' &&
|
||||||
|
typeof object.delete === 'function' &&
|
||||||
|
typeof object.get === 'function' &&
|
||||||
|
typeof object.getAll === 'function' &&
|
||||||
|
typeof object.has === 'function' &&
|
||||||
|
typeof object.set === 'function' &&
|
||||||
|
object[Symbol.toStringTag] === 'FormData'
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
function throwIfAborted (signal) {
|
||||||
|
if (!signal) { return }
|
||||||
|
if (typeof signal.throwIfAborted === 'function') {
|
||||||
|
signal.throwIfAborted()
|
||||||
|
} else {
|
||||||
|
if (signal.aborted) {
|
||||||
|
// DOMException not available < v17.0.0
|
||||||
|
const err = new Error('The operation was aborted')
|
||||||
|
err.name = 'AbortError'
|
||||||
|
throw err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function addAbortListener (signal, listener) {
|
||||||
|
if ('addEventListener' in signal) {
|
||||||
|
signal.addEventListener('abort', listener, { once: true })
|
||||||
|
return () => signal.removeEventListener('abort', listener)
|
||||||
|
}
|
||||||
|
signal.addListener('abort', listener)
|
||||||
|
return () => signal.removeListener('abort', listener)
|
||||||
|
}
|
||||||
|
|
||||||
|
const hasToWellFormed = !!String.prototype.toWellFormed
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @param {string} val
|
||||||
|
*/
|
||||||
|
function toUSVString (val) {
|
||||||
|
if (hasToWellFormed) {
|
||||||
|
return `${val}`.toWellFormed()
|
||||||
|
} else if (nodeUtil.toUSVString) {
|
||||||
|
return nodeUtil.toUSVString(val)
|
||||||
|
}
|
||||||
|
|
||||||
|
return `${val}`
|
||||||
|
}
|
||||||
|
|
||||||
|
// Parsed accordingly to RFC 9110
|
||||||
|
// https://www.rfc-editor.org/rfc/rfc9110#field.content-range
|
||||||
|
function parseRangeHeader (range) {
|
||||||
|
if (range == null || range === '') return { start: 0, end: null, size: null }
|
||||||
|
|
||||||
|
const m = range ? range.match(/^bytes (\d+)-(\d+)\/(\d+)?$/) : null
|
||||||
|
return m
|
||||||
|
? {
|
||||||
|
start: parseInt(m[1]),
|
||||||
|
end: m[2] ? parseInt(m[2]) : null,
|
||||||
|
size: m[3] ? parseInt(m[3]) : null
|
||||||
|
}
|
||||||
|
: null
|
||||||
|
}
|
||||||
|
|
||||||
|
const kEnumerableProperty = Object.create(null)
|
||||||
|
kEnumerableProperty.enumerable = true
|
||||||
|
|
||||||
|
module.exports = {
|
||||||
|
kEnumerableProperty,
|
||||||
|
nop,
|
||||||
|
isDisturbed,
|
||||||
|
isErrored,
|
||||||
|
isReadable,
|
||||||
|
toUSVString,
|
||||||
|
isReadableAborted,
|
||||||
|
isBlobLike,
|
||||||
|
parseOrigin,
|
||||||
|
parseURL,
|
||||||
|
getServerName,
|
||||||
|
isStream,
|
||||||
|
isIterable,
|
||||||
|
isAsyncIterable,
|
||||||
|
isDestroyed,
|
||||||
|
parseRawHeaders,
|
||||||
|
parseHeaders,
|
||||||
|
parseKeepAliveTimeout,
|
||||||
|
destroy,
|
||||||
|
bodyLength,
|
||||||
|
deepClone,
|
||||||
|
ReadableStreamFrom,
|
||||||
|
isBuffer,
|
||||||
|
validateHandler,
|
||||||
|
getSocketInfo,
|
||||||
|
isFormDataLike,
|
||||||
|
buildURL,
|
||||||
|
throwIfAborted,
|
||||||
|
addAbortListener,
|
||||||
|
parseRangeHeader,
|
||||||
|
nodeMajor,
|
||||||
|
nodeMinor,
|
||||||
|
nodeHasAutoSelectFamily: nodeMajor > 18 || (nodeMajor === 18 && nodeMinor >= 13),
|
||||||
|
safeHTTPMethods: ['GET', 'HEAD', 'OPTIONS', 'TRACE']
|
||||||
|
}
|
192
node_modules/undici/lib/dispatcher-base.js
generated
vendored
Normal file
192
node_modules/undici/lib/dispatcher-base.js
generated
vendored
Normal file
|
@ -0,0 +1,192 @@
|
||||||
|
'use strict'
|
||||||
|
|
||||||
|
const Dispatcher = require('./dispatcher')
|
||||||
|
const {
|
||||||
|
ClientDestroyedError,
|
||||||
|
ClientClosedError,
|
||||||
|
InvalidArgumentError
|
||||||
|
} = require('./core/errors')
|
||||||
|
const { kDestroy, kClose, kDispatch, kInterceptors } = require('./core/symbols')
|
||||||
|
|
||||||
|
const kDestroyed = Symbol('destroyed')
|
||||||
|
const kClosed = Symbol('closed')
|
||||||
|
const kOnDestroyed = Symbol('onDestroyed')
|
||||||
|
const kOnClosed = Symbol('onClosed')
|
||||||
|
const kInterceptedDispatch = Symbol('Intercepted Dispatch')
|
||||||
|
|
||||||
|
class DispatcherBase extends Dispatcher {
|
||||||
|
constructor () {
|
||||||
|
super()
|
||||||
|
|
||||||
|
this[kDestroyed] = false
|
||||||
|
this[kOnDestroyed] = null
|
||||||
|
this[kClosed] = false
|
||||||
|
this[kOnClosed] = []
|
||||||
|
}
|
||||||
|
|
||||||
|
get destroyed () {
|
||||||
|
return this[kDestroyed]
|
||||||
|
}
|
||||||
|
|
||||||
|
get closed () {
|
||||||
|
return this[kClosed]
|
||||||
|
}
|
||||||
|
|
||||||
|
get interceptors () {
|
||||||
|
return this[kInterceptors]
|
||||||
|
}
|
||||||
|
|
||||||
|
set interceptors (newInterceptors) {
|
||||||
|
if (newInterceptors) {
|
||||||
|
for (let i = newInterceptors.length - 1; i >= 0; i--) {
|
||||||
|
const interceptor = this[kInterceptors][i]
|
||||||
|
if (typeof interceptor !== 'function') {
|
||||||
|
throw new InvalidArgumentError('interceptor must be an function')
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
this[kInterceptors] = newInterceptors
|
||||||
|
}
|
||||||
|
|
||||||
|
close (callback) {
|
||||||
|
if (callback === undefined) {
|
||||||
|
return new Promise((resolve, reject) => {
|
||||||
|
this.close((err, data) => {
|
||||||
|
return err ? reject(err) : resolve(data)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
if (typeof callback !== 'function') {
|
||||||
|
throw new InvalidArgumentError('invalid callback')
|
||||||
|
}
|
||||||
|
|
||||||
|
if (this[kDestroyed]) {
|
||||||
|
queueMicrotask(() => callback(new ClientDestroyedError(), null))
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
if (this[kClosed]) {
|
||||||
|
if (this[kOnClosed]) {
|
||||||
|
this[kOnClosed].push(callback)
|
||||||
|
} else {
|
||||||
|
queueMicrotask(() => callback(null, null))
|
||||||
|
}
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
this[kClosed] = true
|
||||||
|
this[kOnClosed].push(callback)
|
||||||
|
|
||||||
|
const onClosed = () => {
|
||||||
|
const callbacks = this[kOnClosed]
|
||||||
|
this[kOnClosed] = null
|
||||||
|
for (let i = 0; i < callbacks.length; i++) {
|
||||||
|
callbacks[i](null, null)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Should not error.
|
||||||
|
this[kClose]()
|
||||||
|
.then(() => this.destroy())
|
||||||
|
.then(() => {
|
||||||
|
queueMicrotask(onClosed)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
destroy (err, callback) {
|
||||||
|
if (typeof err === 'function') {
|
||||||
|
callback = err
|
||||||
|
err = null
|
||||||
|
}
|
||||||
|
|
||||||
|
if (callback === undefined) {
|
||||||
|
return new Promise((resolve, reject) => {
|
||||||
|
this.destroy(err, (err, data) => {
|
||||||
|
return err ? /* istanbul ignore next: should never error */ reject(err) : resolve(data)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
if (typeof callback !== 'function') {
|
||||||
|
throw new InvalidArgumentError('invalid callback')
|
||||||
|
}
|
||||||
|
|
||||||
|
if (this[kDestroyed]) {
|
||||||
|
if (this[kOnDestroyed]) {
|
||||||
|
this[kOnDestroyed].push(callback)
|
||||||
|
} else {
|
||||||
|
queueMicrotask(() => callback(null, null))
|
||||||
|
}
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!err) {
|
||||||
|
err = new ClientDestroyedError()
|
||||||
|
}
|
||||||
|
|
||||||
|
this[kDestroyed] = true
|
||||||
|
this[kOnDestroyed] = this[kOnDestroyed] || []
|
||||||
|
this[kOnDestroyed].push(callback)
|
||||||
|
|
||||||
|
const onDestroyed = () => {
|
||||||
|
const callbacks = this[kOnDestroyed]
|
||||||
|
this[kOnDestroyed] = null
|
||||||
|
for (let i = 0; i < callbacks.length; i++) {
|
||||||
|
callbacks[i](null, null)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Should not error.
|
||||||
|
this[kDestroy](err).then(() => {
|
||||||
|
queueMicrotask(onDestroyed)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
[kInterceptedDispatch] (opts, handler) {
|
||||||
|
if (!this[kInterceptors] || this[kInterceptors].length === 0) {
|
||||||
|
this[kInterceptedDispatch] = this[kDispatch]
|
||||||
|
return this[kDispatch](opts, handler)
|
||||||
|
}
|
||||||
|
|
||||||
|
let dispatch = this[kDispatch].bind(this)
|
||||||
|
for (let i = this[kInterceptors].length - 1; i >= 0; i--) {
|
||||||
|
dispatch = this[kInterceptors][i](dispatch)
|
||||||
|
}
|
||||||
|
this[kInterceptedDispatch] = dispatch
|
||||||
|
return dispatch(opts, handler)
|
||||||
|
}
|
||||||
|
|
||||||
|
dispatch (opts, handler) {
|
||||||
|
if (!handler || typeof handler !== 'object') {
|
||||||
|
throw new InvalidArgumentError('handler must be an object')
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
if (!opts || typeof opts !== 'object') {
|
||||||
|
throw new InvalidArgumentError('opts must be an object.')
|
||||||
|
}
|
||||||
|
|
||||||
|
if (this[kDestroyed] || this[kOnDestroyed]) {
|
||||||
|
throw new ClientDestroyedError()
|
||||||
|
}
|
||||||
|
|
||||||
|
if (this[kClosed]) {
|
||||||
|
throw new ClientClosedError()
|
||||||
|
}
|
||||||
|
|
||||||
|
return this[kInterceptedDispatch](opts, handler)
|
||||||
|
} catch (err) {
|
||||||
|
if (typeof handler.onError !== 'function') {
|
||||||
|
throw new InvalidArgumentError('invalid onError method')
|
||||||
|
}
|
||||||
|
|
||||||
|
handler.onError(err)
|
||||||
|
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = DispatcherBase
|
19
node_modules/undici/lib/dispatcher.js
generated
vendored
Normal file
19
node_modules/undici/lib/dispatcher.js
generated
vendored
Normal file
|
@ -0,0 +1,19 @@
|
||||||
|
'use strict'
|
||||||
|
|
||||||
|
const EventEmitter = require('events')
|
||||||
|
|
||||||
|
class Dispatcher extends EventEmitter {
|
||||||
|
dispatch () {
|
||||||
|
throw new Error('not implemented')
|
||||||
|
}
|
||||||
|
|
||||||
|
close () {
|
||||||
|
throw new Error('not implemented')
|
||||||
|
}
|
||||||
|
|
||||||
|
destroy () {
|
||||||
|
throw new Error('not implemented')
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = Dispatcher
|
21
node_modules/undici/lib/fetch/LICENSE
generated
vendored
Normal file
21
node_modules/undici/lib/fetch/LICENSE
generated
vendored
Normal file
|
@ -0,0 +1,21 @@
|
||||||
|
MIT License
|
||||||
|
|
||||||
|
Copyright (c) 2020 Ethan Arrowood
|
||||||
|
|
||||||
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
|
of this software and associated documentation files (the "Software"), to deal
|
||||||
|
in the Software without restriction, including without limitation the rights
|
||||||
|
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||||
|
copies of the Software, and to permit persons to whom the Software is
|
||||||
|
furnished to do so, subject to the following conditions:
|
||||||
|
|
||||||
|
The above copyright notice and this permission notice shall be included in all
|
||||||
|
copies or substantial portions of the Software.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||||
|
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||||
|
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||||
|
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||||
|
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||||
|
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||||
|
SOFTWARE.
|
605
node_modules/undici/lib/fetch/body.js
generated
vendored
Normal file
605
node_modules/undici/lib/fetch/body.js
generated
vendored
Normal file
|
@ -0,0 +1,605 @@
|
||||||
|
'use strict'
|
||||||
|
|
||||||
|
const Busboy = require('@fastify/busboy')
|
||||||
|
const util = require('../core/util')
|
||||||
|
const {
|
||||||
|
ReadableStreamFrom,
|
||||||
|
isBlobLike,
|
||||||
|
isReadableStreamLike,
|
||||||
|
readableStreamClose,
|
||||||
|
createDeferredPromise,
|
||||||
|
fullyReadBody
|
||||||
|
} = require('./util')
|
||||||
|
const { FormData } = require('./formdata')
|
||||||
|
const { kState } = require('./symbols')
|
||||||
|
const { webidl } = require('./webidl')
|
||||||
|
const { DOMException, structuredClone } = require('./constants')
|
||||||
|
const { Blob, File: NativeFile } = require('buffer')
|
||||||
|
const { kBodyUsed } = require('../core/symbols')
|
||||||
|
const assert = require('assert')
|
||||||
|
const { isErrored } = require('../core/util')
|
||||||
|
const { isUint8Array, isArrayBuffer } = require('util/types')
|
||||||
|
const { File: UndiciFile } = require('./file')
|
||||||
|
const { parseMIMEType, serializeAMimeType } = require('./dataURL')
|
||||||
|
|
||||||
|
let ReadableStream = globalThis.ReadableStream
|
||||||
|
|
||||||
|
/** @type {globalThis['File']} */
|
||||||
|
const File = NativeFile ?? UndiciFile
|
||||||
|
const textEncoder = new TextEncoder()
|
||||||
|
const textDecoder = new TextDecoder()
|
||||||
|
|
||||||
|
// https://fetch.spec.whatwg.org/#concept-bodyinit-extract
|
||||||
|
function extractBody (object, keepalive = false) {
|
||||||
|
if (!ReadableStream) {
|
||||||
|
ReadableStream = require('stream/web').ReadableStream
|
||||||
|
}
|
||||||
|
|
||||||
|
// 1. Let stream be null.
|
||||||
|
let stream = null
|
||||||
|
|
||||||
|
// 2. If object is a ReadableStream object, then set stream to object.
|
||||||
|
if (object instanceof ReadableStream) {
|
||||||
|
stream = object
|
||||||
|
} else if (isBlobLike(object)) {
|
||||||
|
// 3. Otherwise, if object is a Blob object, set stream to the
|
||||||
|
// result of running object’s get stream.
|
||||||
|
stream = object.stream()
|
||||||
|
} else {
|
||||||
|
// 4. Otherwise, set stream to a new ReadableStream object, and set
|
||||||
|
// up stream.
|
||||||
|
stream = new ReadableStream({
|
||||||
|
async pull (controller) {
|
||||||
|
controller.enqueue(
|
||||||
|
typeof source === 'string' ? textEncoder.encode(source) : source
|
||||||
|
)
|
||||||
|
queueMicrotask(() => readableStreamClose(controller))
|
||||||
|
},
|
||||||
|
start () {},
|
||||||
|
type: undefined
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// 5. Assert: stream is a ReadableStream object.
|
||||||
|
assert(isReadableStreamLike(stream))
|
||||||
|
|
||||||
|
// 6. Let action be null.
|
||||||
|
let action = null
|
||||||
|
|
||||||
|
// 7. Let source be null.
|
||||||
|
let source = null
|
||||||
|
|
||||||
|
// 8. Let length be null.
|
||||||
|
let length = null
|
||||||
|
|
||||||
|
// 9. Let type be null.
|
||||||
|
let type = null
|
||||||
|
|
||||||
|
// 10. Switch on object:
|
||||||
|
if (typeof object === 'string') {
|
||||||
|
// Set source to the UTF-8 encoding of object.
|
||||||
|
// Note: setting source to a Uint8Array here breaks some mocking assumptions.
|
||||||
|
source = object
|
||||||
|
|
||||||
|
// Set type to `text/plain;charset=UTF-8`.
|
||||||
|
type = 'text/plain;charset=UTF-8'
|
||||||
|
} else if (object instanceof URLSearchParams) {
|
||||||
|
// URLSearchParams
|
||||||
|
|
||||||
|
// spec says to run application/x-www-form-urlencoded on body.list
|
||||||
|
// this is implemented in Node.js as apart of an URLSearchParams instance toString method
|
||||||
|
// See: https://github.com/nodejs/node/blob/e46c680bf2b211bbd52cf959ca17ee98c7f657f5/lib/internal/url.js#L490
|
||||||
|
// and https://github.com/nodejs/node/blob/e46c680bf2b211bbd52cf959ca17ee98c7f657f5/lib/internal/url.js#L1100
|
||||||
|
|
||||||
|
// Set source to the result of running the application/x-www-form-urlencoded serializer with object’s list.
|
||||||
|
source = object.toString()
|
||||||
|
|
||||||
|
// Set type to `application/x-www-form-urlencoded;charset=UTF-8`.
|
||||||
|
type = 'application/x-www-form-urlencoded;charset=UTF-8'
|
||||||
|
} else if (isArrayBuffer(object)) {
|
||||||
|
// BufferSource/ArrayBuffer
|
||||||
|
|
||||||
|
// Set source to a copy of the bytes held by object.
|
||||||
|
source = new Uint8Array(object.slice())
|
||||||
|
} else if (ArrayBuffer.isView(object)) {
|
||||||
|
// BufferSource/ArrayBufferView
|
||||||
|
|
||||||
|
// Set source to a copy of the bytes held by object.
|
||||||
|
source = new Uint8Array(object.buffer.slice(object.byteOffset, object.byteOffset + object.byteLength))
|
||||||
|
} else if (util.isFormDataLike(object)) {
|
||||||
|
const boundary = `----formdata-undici-0${`${Math.floor(Math.random() * 1e11)}`.padStart(11, '0')}`
|
||||||
|
const prefix = `--${boundary}\r\nContent-Disposition: form-data`
|
||||||
|
|
||||||
|
/*! formdata-polyfill. MIT License. Jimmy Wärting <https://jimmy.warting.se/opensource> */
|
||||||
|
const escape = (str) =>
|
||||||
|
str.replace(/\n/g, '%0A').replace(/\r/g, '%0D').replace(/"/g, '%22')
|
||||||
|
const normalizeLinefeeds = (value) => value.replace(/\r?\n|\r/g, '\r\n')
|
||||||
|
|
||||||
|
// Set action to this step: run the multipart/form-data
|
||||||
|
// encoding algorithm, with object’s entry list and UTF-8.
|
||||||
|
// - This ensures that the body is immutable and can't be changed afterwords
|
||||||
|
// - That the content-length is calculated in advance.
|
||||||
|
// - And that all parts are pre-encoded and ready to be sent.
|
||||||
|
|
||||||
|
const blobParts = []
|
||||||
|
const rn = new Uint8Array([13, 10]) // '\r\n'
|
||||||
|
length = 0
|
||||||
|
let hasUnknownSizeValue = false
|
||||||
|
|
||||||
|
for (const [name, value] of object) {
|
||||||
|
if (typeof value === 'string') {
|
||||||
|
const chunk = textEncoder.encode(prefix +
|
||||||
|
`; name="${escape(normalizeLinefeeds(name))}"` +
|
||||||
|
`\r\n\r\n${normalizeLinefeeds(value)}\r\n`)
|
||||||
|
blobParts.push(chunk)
|
||||||
|
length += chunk.byteLength
|
||||||
|
} else {
|
||||||
|
const chunk = textEncoder.encode(`${prefix}; name="${escape(normalizeLinefeeds(name))}"` +
|
||||||
|
(value.name ? `; filename="${escape(value.name)}"` : '') + '\r\n' +
|
||||||
|
`Content-Type: ${
|
||||||
|
value.type || 'application/octet-stream'
|
||||||
|
}\r\n\r\n`)
|
||||||
|
blobParts.push(chunk, value, rn)
|
||||||
|
if (typeof value.size === 'number') {
|
||||||
|
length += chunk.byteLength + value.size + rn.byteLength
|
||||||
|
} else {
|
||||||
|
hasUnknownSizeValue = true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const chunk = textEncoder.encode(`--${boundary}--`)
|
||||||
|
blobParts.push(chunk)
|
||||||
|
length += chunk.byteLength
|
||||||
|
if (hasUnknownSizeValue) {
|
||||||
|
length = null
|
||||||
|
}
|
||||||
|
|
||||||
|
// Set source to object.
|
||||||
|
source = object
|
||||||
|
|
||||||
|
action = async function * () {
|
||||||
|
for (const part of blobParts) {
|
||||||
|
if (part.stream) {
|
||||||
|
yield * part.stream()
|
||||||
|
} else {
|
||||||
|
yield part
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Set type to `multipart/form-data; boundary=`,
|
||||||
|
// followed by the multipart/form-data boundary string generated
|
||||||
|
// by the multipart/form-data encoding algorithm.
|
||||||
|
type = 'multipart/form-data; boundary=' + boundary
|
||||||
|
} else if (isBlobLike(object)) {
|
||||||
|
// Blob
|
||||||
|
|
||||||
|
// Set source to object.
|
||||||
|
source = object
|
||||||
|
|
||||||
|
// Set length to object’s size.
|
||||||
|
length = object.size
|
||||||
|
|
||||||
|
// If object’s type attribute is not the empty byte sequence, set
|
||||||
|
// type to its value.
|
||||||
|
if (object.type) {
|
||||||
|
type = object.type
|
||||||
|
}
|
||||||
|
} else if (typeof object[Symbol.asyncIterator] === 'function') {
|
||||||
|
// If keepalive is true, then throw a TypeError.
|
||||||
|
if (keepalive) {
|
||||||
|
throw new TypeError('keepalive')
|
||||||
|
}
|
||||||
|
|
||||||
|
// If object is disturbed or locked, then throw a TypeError.
|
||||||
|
if (util.isDisturbed(object) || object.locked) {
|
||||||
|
throw new TypeError(
|
||||||
|
'Response body object should not be disturbed or locked'
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
stream =
|
||||||
|
object instanceof ReadableStream ? object : ReadableStreamFrom(object)
|
||||||
|
}
|
||||||
|
|
||||||
|
// 11. If source is a byte sequence, then set action to a
|
||||||
|
// step that returns source and length to source’s length.
|
||||||
|
if (typeof source === 'string' || util.isBuffer(source)) {
|
||||||
|
length = Buffer.byteLength(source)
|
||||||
|
}
|
||||||
|
|
||||||
|
// 12. If action is non-null, then run these steps in in parallel:
|
||||||
|
if (action != null) {
|
||||||
|
// Run action.
|
||||||
|
let iterator
|
||||||
|
stream = new ReadableStream({
|
||||||
|
async start () {
|
||||||
|
iterator = action(object)[Symbol.asyncIterator]()
|
||||||
|
},
|
||||||
|
async pull (controller) {
|
||||||
|
const { value, done } = await iterator.next()
|
||||||
|
if (done) {
|
||||||
|
// When running action is done, close stream.
|
||||||
|
queueMicrotask(() => {
|
||||||
|
controller.close()
|
||||||
|
})
|
||||||
|
} else {
|
||||||
|
// Whenever one or more bytes are available and stream is not errored,
|
||||||
|
// enqueue a Uint8Array wrapping an ArrayBuffer containing the available
|
||||||
|
// bytes into stream.
|
||||||
|
if (!isErrored(stream)) {
|
||||||
|
controller.enqueue(new Uint8Array(value))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return controller.desiredSize > 0
|
||||||
|
},
|
||||||
|
async cancel (reason) {
|
||||||
|
await iterator.return()
|
||||||
|
},
|
||||||
|
type: undefined
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// 13. Let body be a body whose stream is stream, source is source,
|
||||||
|
// and length is length.
|
||||||
|
const body = { stream, source, length }
|
||||||
|
|
||||||
|
// 14. Return (body, type).
|
||||||
|
return [body, type]
|
||||||
|
}
|
||||||
|
|
||||||
|
// https://fetch.spec.whatwg.org/#bodyinit-safely-extract
|
||||||
|
function safelyExtractBody (object, keepalive = false) {
|
||||||
|
if (!ReadableStream) {
|
||||||
|
// istanbul ignore next
|
||||||
|
ReadableStream = require('stream/web').ReadableStream
|
||||||
|
}
|
||||||
|
|
||||||
|
// To safely extract a body and a `Content-Type` value from
|
||||||
|
// a byte sequence or BodyInit object object, run these steps:
|
||||||
|
|
||||||
|
// 1. If object is a ReadableStream object, then:
|
||||||
|
if (object instanceof ReadableStream) {
|
||||||
|
// Assert: object is neither disturbed nor locked.
|
||||||
|
// istanbul ignore next
|
||||||
|
assert(!util.isDisturbed(object), 'The body has already been consumed.')
|
||||||
|
// istanbul ignore next
|
||||||
|
assert(!object.locked, 'The stream is locked.')
|
||||||
|
}
|
||||||
|
|
||||||
|
// 2. Return the results of extracting object.
|
||||||
|
return extractBody(object, keepalive)
|
||||||
|
}
|
||||||
|
|
||||||
|
function cloneBody (body) {
|
||||||
|
// To clone a body body, run these steps:
|
||||||
|
|
||||||
|
// https://fetch.spec.whatwg.org/#concept-body-clone
|
||||||
|
|
||||||
|
// 1. Let « out1, out2 » be the result of teeing body’s stream.
|
||||||
|
const [out1, out2] = body.stream.tee()
|
||||||
|
const out2Clone = structuredClone(out2, { transfer: [out2] })
|
||||||
|
// This, for whatever reasons, unrefs out2Clone which allows
|
||||||
|
// the process to exit by itself.
|
||||||
|
const [, finalClone] = out2Clone.tee()
|
||||||
|
|
||||||
|
// 2. Set body’s stream to out1.
|
||||||
|
body.stream = out1
|
||||||
|
|
||||||
|
// 3. Return a body whose stream is out2 and other members are copied from body.
|
||||||
|
return {
|
||||||
|
stream: finalClone,
|
||||||
|
length: body.length,
|
||||||
|
source: body.source
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async function * consumeBody (body) {
|
||||||
|
if (body) {
|
||||||
|
if (isUint8Array(body)) {
|
||||||
|
yield body
|
||||||
|
} else {
|
||||||
|
const stream = body.stream
|
||||||
|
|
||||||
|
if (util.isDisturbed(stream)) {
|
||||||
|
throw new TypeError('The body has already been consumed.')
|
||||||
|
}
|
||||||
|
|
||||||
|
if (stream.locked) {
|
||||||
|
throw new TypeError('The stream is locked.')
|
||||||
|
}
|
||||||
|
|
||||||
|
// Compat.
|
||||||
|
stream[kBodyUsed] = true
|
||||||
|
|
||||||
|
yield * stream
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function throwIfAborted (state) {
|
||||||
|
if (state.aborted) {
|
||||||
|
throw new DOMException('The operation was aborted.', 'AbortError')
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function bodyMixinMethods (instance) {
|
||||||
|
const methods = {
|
||||||
|
blob () {
|
||||||
|
// The blob() method steps are to return the result of
|
||||||
|
// running consume body with this and the following step
|
||||||
|
// given a byte sequence bytes: return a Blob whose
|
||||||
|
// contents are bytes and whose type attribute is this’s
|
||||||
|
// MIME type.
|
||||||
|
return specConsumeBody(this, (bytes) => {
|
||||||
|
let mimeType = bodyMimeType(this)
|
||||||
|
|
||||||
|
if (mimeType === 'failure') {
|
||||||
|
mimeType = ''
|
||||||
|
} else if (mimeType) {
|
||||||
|
mimeType = serializeAMimeType(mimeType)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Return a Blob whose contents are bytes and type attribute
|
||||||
|
// is mimeType.
|
||||||
|
return new Blob([bytes], { type: mimeType })
|
||||||
|
}, instance)
|
||||||
|
},
|
||||||
|
|
||||||
|
arrayBuffer () {
|
||||||
|
// The arrayBuffer() method steps are to return the result
|
||||||
|
// of running consume body with this and the following step
|
||||||
|
// given a byte sequence bytes: return a new ArrayBuffer
|
||||||
|
// whose contents are bytes.
|
||||||
|
return specConsumeBody(this, (bytes) => {
|
||||||
|
return new Uint8Array(bytes).buffer
|
||||||
|
}, instance)
|
||||||
|
},
|
||||||
|
|
||||||
|
text () {
|
||||||
|
// The text() method steps are to return the result of running
|
||||||
|
// consume body with this and UTF-8 decode.
|
||||||
|
return specConsumeBody(this, utf8DecodeBytes, instance)
|
||||||
|
},
|
||||||
|
|
||||||
|
json () {
|
||||||
|
// The json() method steps are to return the result of running
|
||||||
|
// consume body with this and parse JSON from bytes.
|
||||||
|
return specConsumeBody(this, parseJSONFromBytes, instance)
|
||||||
|
},
|
||||||
|
|
||||||
|
async formData () {
|
||||||
|
webidl.brandCheck(this, instance)
|
||||||
|
|
||||||
|
throwIfAborted(this[kState])
|
||||||
|
|
||||||
|
const contentType = this.headers.get('Content-Type')
|
||||||
|
|
||||||
|
// If mimeType’s essence is "multipart/form-data", then:
|
||||||
|
if (/multipart\/form-data/.test(contentType)) {
|
||||||
|
const headers = {}
|
||||||
|
for (const [key, value] of this.headers) headers[key.toLowerCase()] = value
|
||||||
|
|
||||||
|
const responseFormData = new FormData()
|
||||||
|
|
||||||
|
let busboy
|
||||||
|
|
||||||
|
try {
|
||||||
|
busboy = new Busboy({
|
||||||
|
headers,
|
||||||
|
preservePath: true
|
||||||
|
})
|
||||||
|
} catch (err) {
|
||||||
|
throw new DOMException(`${err}`, 'AbortError')
|
||||||
|
}
|
||||||
|
|
||||||
|
busboy.on('field', (name, value) => {
|
||||||
|
responseFormData.append(name, value)
|
||||||
|
})
|
||||||
|
busboy.on('file', (name, value, filename, encoding, mimeType) => {
|
||||||
|
const chunks = []
|
||||||
|
|
||||||
|
if (encoding === 'base64' || encoding.toLowerCase() === 'base64') {
|
||||||
|
let base64chunk = ''
|
||||||
|
|
||||||
|
value.on('data', (chunk) => {
|
||||||
|
base64chunk += chunk.toString().replace(/[\r\n]/gm, '')
|
||||||
|
|
||||||
|
const end = base64chunk.length - base64chunk.length % 4
|
||||||
|
chunks.push(Buffer.from(base64chunk.slice(0, end), 'base64'))
|
||||||
|
|
||||||
|
base64chunk = base64chunk.slice(end)
|
||||||
|
})
|
||||||
|
value.on('end', () => {
|
||||||
|
chunks.push(Buffer.from(base64chunk, 'base64'))
|
||||||
|
responseFormData.append(name, new File(chunks, filename, { type: mimeType }))
|
||||||
|
})
|
||||||
|
} else {
|
||||||
|
value.on('data', (chunk) => {
|
||||||
|
chunks.push(chunk)
|
||||||
|
})
|
||||||
|
value.on('end', () => {
|
||||||
|
responseFormData.append(name, new File(chunks, filename, { type: mimeType }))
|
||||||
|
})
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
const busboyResolve = new Promise((resolve, reject) => {
|
||||||
|
busboy.on('finish', resolve)
|
||||||
|
busboy.on('error', (err) => reject(new TypeError(err)))
|
||||||
|
})
|
||||||
|
|
||||||
|
if (this.body !== null) for await (const chunk of consumeBody(this[kState].body)) busboy.write(chunk)
|
||||||
|
busboy.end()
|
||||||
|
await busboyResolve
|
||||||
|
|
||||||
|
return responseFormData
|
||||||
|
} else if (/application\/x-www-form-urlencoded/.test(contentType)) {
|
||||||
|
// Otherwise, if mimeType’s essence is "application/x-www-form-urlencoded", then:
|
||||||
|
|
||||||
|
// 1. Let entries be the result of parsing bytes.
|
||||||
|
let entries
|
||||||
|
try {
|
||||||
|
let text = ''
|
||||||
|
// application/x-www-form-urlencoded parser will keep the BOM.
|
||||||
|
// https://url.spec.whatwg.org/#concept-urlencoded-parser
|
||||||
|
// Note that streaming decoder is stateful and cannot be reused
|
||||||
|
const streamingDecoder = new TextDecoder('utf-8', { ignoreBOM: true })
|
||||||
|
|
||||||
|
for await (const chunk of consumeBody(this[kState].body)) {
|
||||||
|
if (!isUint8Array(chunk)) {
|
||||||
|
throw new TypeError('Expected Uint8Array chunk')
|
||||||
|
}
|
||||||
|
text += streamingDecoder.decode(chunk, { stream: true })
|
||||||
|
}
|
||||||
|
text += streamingDecoder.decode()
|
||||||
|
entries = new URLSearchParams(text)
|
||||||
|
} catch (err) {
|
||||||
|
// istanbul ignore next: Unclear when new URLSearchParams can fail on a string.
|
||||||
|
// 2. If entries is failure, then throw a TypeError.
|
||||||
|
throw Object.assign(new TypeError(), { cause: err })
|
||||||
|
}
|
||||||
|
|
||||||
|
// 3. Return a new FormData object whose entries are entries.
|
||||||
|
const formData = new FormData()
|
||||||
|
for (const [name, value] of entries) {
|
||||||
|
formData.append(name, value)
|
||||||
|
}
|
||||||
|
return formData
|
||||||
|
} else {
|
||||||
|
// Wait a tick before checking if the request has been aborted.
|
||||||
|
// Otherwise, a TypeError can be thrown when an AbortError should.
|
||||||
|
await Promise.resolve()
|
||||||
|
|
||||||
|
throwIfAborted(this[kState])
|
||||||
|
|
||||||
|
// Otherwise, throw a TypeError.
|
||||||
|
throw webidl.errors.exception({
|
||||||
|
header: `${instance.name}.formData`,
|
||||||
|
message: 'Could not parse content as FormData.'
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return methods
|
||||||
|
}
|
||||||
|
|
||||||
|
function mixinBody (prototype) {
|
||||||
|
Object.assign(prototype.prototype, bodyMixinMethods(prototype))
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @see https://fetch.spec.whatwg.org/#concept-body-consume-body
|
||||||
|
* @param {Response|Request} object
|
||||||
|
* @param {(value: unknown) => unknown} convertBytesToJSValue
|
||||||
|
* @param {Response|Request} instance
|
||||||
|
*/
|
||||||
|
async function specConsumeBody (object, convertBytesToJSValue, instance) {
|
||||||
|
webidl.brandCheck(object, instance)
|
||||||
|
|
||||||
|
throwIfAborted(object[kState])
|
||||||
|
|
||||||
|
// 1. If object is unusable, then return a promise rejected
|
||||||
|
// with a TypeError.
|
||||||
|
if (bodyUnusable(object[kState].body)) {
|
||||||
|
throw new TypeError('Body is unusable')
|
||||||
|
}
|
||||||
|
|
||||||
|
// 2. Let promise be a new promise.
|
||||||
|
const promise = createDeferredPromise()
|
||||||
|
|
||||||
|
// 3. Let errorSteps given error be to reject promise with error.
|
||||||
|
const errorSteps = (error) => promise.reject(error)
|
||||||
|
|
||||||
|
// 4. Let successSteps given a byte sequence data be to resolve
|
||||||
|
// promise with the result of running convertBytesToJSValue
|
||||||
|
// with data. If that threw an exception, then run errorSteps
|
||||||
|
// with that exception.
|
||||||
|
const successSteps = (data) => {
|
||||||
|
try {
|
||||||
|
promise.resolve(convertBytesToJSValue(data))
|
||||||
|
} catch (e) {
|
||||||
|
errorSteps(e)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// 5. If object’s body is null, then run successSteps with an
|
||||||
|
// empty byte sequence.
|
||||||
|
if (object[kState].body == null) {
|
||||||
|
successSteps(new Uint8Array())
|
||||||
|
return promise.promise
|
||||||
|
}
|
||||||
|
|
||||||
|
// 6. Otherwise, fully read object’s body given successSteps,
|
||||||
|
// errorSteps, and object’s relevant global object.
|
||||||
|
await fullyReadBody(object[kState].body, successSteps, errorSteps)
|
||||||
|
|
||||||
|
// 7. Return promise.
|
||||||
|
return promise.promise
|
||||||
|
}
|
||||||
|
|
||||||
|
// https://fetch.spec.whatwg.org/#body-unusable
|
||||||
|
function bodyUnusable (body) {
|
||||||
|
// An object including the Body interface mixin is
|
||||||
|
// said to be unusable if its body is non-null and
|
||||||
|
// its body’s stream is disturbed or locked.
|
||||||
|
return body != null && (body.stream.locked || util.isDisturbed(body.stream))
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @see https://encoding.spec.whatwg.org/#utf-8-decode
|
||||||
|
* @param {Buffer} buffer
|
||||||
|
*/
|
||||||
|
function utf8DecodeBytes (buffer) {
|
||||||
|
if (buffer.length === 0) {
|
||||||
|
return ''
|
||||||
|
}
|
||||||
|
|
||||||
|
// 1. Let buffer be the result of peeking three bytes from
|
||||||
|
// ioQueue, converted to a byte sequence.
|
||||||
|
|
||||||
|
// 2. If buffer is 0xEF 0xBB 0xBF, then read three
|
||||||
|
// bytes from ioQueue. (Do nothing with those bytes.)
|
||||||
|
if (buffer[0] === 0xEF && buffer[1] === 0xBB && buffer[2] === 0xBF) {
|
||||||
|
buffer = buffer.subarray(3)
|
||||||
|
}
|
||||||
|
|
||||||
|
// 3. Process a queue with an instance of UTF-8’s
|
||||||
|
// decoder, ioQueue, output, and "replacement".
|
||||||
|
const output = textDecoder.decode(buffer)
|
||||||
|
|
||||||
|
// 4. Return output.
|
||||||
|
return output
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @see https://infra.spec.whatwg.org/#parse-json-bytes-to-a-javascript-value
|
||||||
|
* @param {Uint8Array} bytes
|
||||||
|
*/
|
||||||
|
function parseJSONFromBytes (bytes) {
|
||||||
|
return JSON.parse(utf8DecodeBytes(bytes))
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @see https://fetch.spec.whatwg.org/#concept-body-mime-type
|
||||||
|
* @param {import('./response').Response|import('./request').Request} object
|
||||||
|
*/
|
||||||
|
function bodyMimeType (object) {
|
||||||
|
const { headersList } = object[kState]
|
||||||
|
const contentType = headersList.get('content-type')
|
||||||
|
|
||||||
|
if (contentType === null) {
|
||||||
|
return 'failure'
|
||||||
|
}
|
||||||
|
|
||||||
|
return parseMIMEType(contentType)
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = {
|
||||||
|
extractBody,
|
||||||
|
safelyExtractBody,
|
||||||
|
cloneBody,
|
||||||
|
mixinBody
|
||||||
|
}
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Reference in a new issue