mirror of
https://github.com/actions/setup-python.git
synced 2024-11-22 14:48:36 +00:00
This reverts commit bdd6409dc1
.
This commit is contained in:
parent
bdd6409dc1
commit
948e5343c7
187 changed files with 15033 additions and 6120 deletions
12
.github/workflows/lint-yaml.yml
vendored
12
.github/workflows/lint-yaml.yml
vendored
|
@ -1,12 +0,0 @@
|
|||
name: Lint YAML
|
||||
on: [pull_request]
|
||||
jobs:
|
||||
lint:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@master
|
||||
- name: Lint action.yml
|
||||
uses: ibiqlik/action-yamllint@master
|
||||
with:
|
||||
file_or_dir: action.yml
|
||||
config_file: yaml-lint-config.yml
|
4
.github/workflows/workflow.yml
vendored
4
.github/workflows/workflow.yml
vendored
|
@ -1,5 +1,5 @@
|
|||
name: Main workflow
|
||||
on: [push, pull_request]
|
||||
on: [push]
|
||||
jobs:
|
||||
run:
|
||||
name: Run
|
||||
|
@ -14,7 +14,7 @@ jobs:
|
|||
- name: Set Node.js 10.x
|
||||
uses: actions/setup-node@master
|
||||
with:
|
||||
node-version: 10.x
|
||||
version: 10.x
|
||||
|
||||
- name: npm install
|
||||
run: npm install
|
||||
|
|
7
.gitignore
vendored
7
.gitignore
vendored
|
@ -1,10 +1,7 @@
|
|||
# Ignore node_modules, ncc is used to compile nodejs modules into a single file in the releases branch
|
||||
node_modules/
|
||||
# Explicitly not ignoring node_modules so that they are included in package downloaded by runner
|
||||
!node_modules/
|
||||
__tests__/runner/*
|
||||
|
||||
# Ignore js files that are transpiled from ts files in src/
|
||||
lib/
|
||||
|
||||
# Rest of the file pulled from https://github.com/github/gitignore/blob/master/Node.gitignore
|
||||
# Logs
|
||||
logs
|
||||
|
|
77
README.md
77
README.md
|
@ -16,11 +16,11 @@ See [action.yml](action.yml)
|
|||
Basic:
|
||||
```yaml
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/checkout@master
|
||||
- uses: actions/setup-python@v1
|
||||
with:
|
||||
python-version: '3.x' # Version range or exact version of a Python version to use, using SemVer's version range syntax
|
||||
architecture: 'x64' # optional x64 or x86. Defaults to x64 if not specified
|
||||
python-version: '3.x' # Version range or exact version of a Python version to use, using semvers version range syntax.
|
||||
architecture: 'x64' # (x64 or x86)
|
||||
- run: python my_script.py
|
||||
```
|
||||
|
||||
|
@ -28,13 +28,13 @@ Matrix Testing:
|
|||
```yaml
|
||||
jobs:
|
||||
build:
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: ubuntu-16.04
|
||||
strategy:
|
||||
matrix:
|
||||
python-version: [ '2.x', '3.x', 'pypy2', 'pypy3' ]
|
||||
name: Python ${{ matrix.python-version }} sample
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/checkout@master
|
||||
- name: Setup python
|
||||
uses: actions/setup-python@v1
|
||||
with:
|
||||
|
@ -43,73 +43,6 @@ jobs:
|
|||
- run: python my_script.py
|
||||
```
|
||||
|
||||
Exclude a specific Python version:
|
||||
```yaml
|
||||
jobs:
|
||||
build:
|
||||
runs-on: ${{ matrix.os }}
|
||||
strategy:
|
||||
matrix:
|
||||
os: [ubuntu-latest, macos-latest, windows-latest]
|
||||
python-version: [2.7, 3.6, 3.7, 3.8, pypy2, pypy3]
|
||||
exclude:
|
||||
- os: macos-latest
|
||||
python-version: 3.8
|
||||
- os: windows-latest
|
||||
python-version: 3.6
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v1
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- name: Display Python version
|
||||
run: python -c "import sys; print(sys.version)"
|
||||
```
|
||||
|
||||
# Getting started with Python + Actions
|
||||
|
||||
Check out our detailed guide on using [Python with GitHub Actions](https://help.github.com/en/actions/automating-your-workflow-with-github-actions/using-python-with-github-actions).
|
||||
|
||||
# Hosted Tool Cache
|
||||
|
||||
GitHub hosted runners have a tools cache that comes with Python + PyPy already installed. This tools cache helps speed up runs and tool setup by not requiring any new downloads. There is an environment variable called `RUNNER_TOOL_CACHE` on each runner that describes the location of this tools cache and there is where you will find Python and PyPy installed. `setup-python` works by taking a specific version of Python or PyPy in this tools cache and adding it to PATH.
|
||||
|
||||
|| Location |
|
||||
|------|-------|
|
||||
|**Tool Cache Directory** |`RUNNER_TOOL_CACHE`|
|
||||
|**Python Tool Cache**|`RUNNER_TOOL_CACHE/Python/*`|
|
||||
|**PyPy Tool Cache**|`RUNNER_TOOL_CACHE/PyPy/*`|
|
||||
|
||||
GitHub virtual environments are setup in [actions/virtual-environments](https://github.com/actions/virtual-environments). During the setup, the available versions of Python and PyPy are automatically downloaded, setup and documented.
|
||||
- [Tools cache setup for Ubuntu](https://github.com/actions/virtual-environments/blob/master/images/linux/scripts/installers/hosted-tool-cache.sh)
|
||||
- [Tools cache setup for Windows](https://github.com/actions/virtual-environments/blob/master/images/win/scripts/Installers/Download-ToolCache.ps1)
|
||||
|
||||
# Using `setup-python` with a self hosted runner
|
||||
|
||||
If you would like to use `setup-python` on a self-hosted runner, you will need to download all versions of Python & PyPy that you would like and setup a similar tools cache locally for your runner.
|
||||
|
||||
- Create an global environment variable called `AGENT_TOOLSDIRECTORY` that will point to the root directory of where you want the tools installed. The env variable is preferrably global as it must be set in the shell that will install the tools cache, along with the shell that the runner will be using.
|
||||
- This env variable is used internally by the runner to set the `RUNNER_TOOL_CACHE` env variable
|
||||
- Example for Administrator Powershell: `[System.Environment]::SetEnvironmentVariable("AGENT_TOOLSDIRECTORY", "C:\hostedtoolcache\windows", [System.EnvironmentVariableTarget]::Machine)` (restart the shell afterwards)
|
||||
- Download the appropriate NPM packages from the [GitHub Actions NPM registry](https://github.com/orgs/actions/packages)
|
||||
- Make sure to have `npm` installed, and then [configure npm for use with GitHub packages](https://help.github.com/en/packages/using-github-packages-with-your-projects-ecosystem/configuring-npm-for-use-with-github-packages#authenticating-to-github-package-registry)
|
||||
- Create an empty npm project for easier installation (`npm init`) in the tools cache directory. You can delete `package.json`, `package.lock.json` and `node_modules` after all tools get installed
|
||||
- Before downloading a specific package, create an empty folder for the version of Python/PyPY that is being installed. If downloading Python 3.6.8 for example, create `C:\hostedtoolcache\windows\Python\3.6.8`
|
||||
- Once configured, download a specific package by calling `npm install`. Note (if downloading a PyPy package on Windows, you will need 7zip installed along with `7z.exe` added to your PATH)
|
||||
- Each NPM package has multiple versions that determine the version of Python or PyPy that should be installed.
|
||||
- `npm install @actions/toolcache-python-windows-x64@3.7.61579791175` for example installs Python 3.7.6 while `npm install @actions/toolcache-python-windows-x64@3.6.81579791177` installs Python 3.6.8
|
||||
- You can browse and find all available versions of a package by searching the GitHub Actions NPM registry
|
||||
![image](https://user-images.githubusercontent.com/16109154/76194005-87aeb400-61e5-11ea-9b21-ef9111247f84.png)
|
||||
|
||||
# Using Python without `setup-python`
|
||||
|
||||
`setup-python` helps keep your dependencies explicit and ensures consistent behavior between different runners. If you use `python` in a shell on a GitHub hosted runner without `setup-python` it will default to whatever is in PATH. The default version of Python in PATH vary between runners and can change unexpectedly so we recommend you always use `setup-python`.
|
||||
|
||||
# Available versions of Python
|
||||
|
||||
For detailed information regarding the available versions of Python that are installed see [Software installed on GitHub-hosted runners](https://help.github.com/en/actions/automating-your-workflow-with-github-actions/software-installed-on-github-hosted-runners)
|
||||
|
||||
# License
|
||||
|
||||
The scripts and documentation in this project are released under the [MIT License](LICENSE)
|
||||
|
|
13
action.yml
13
action.yml
|
@ -1,20 +1,13 @@
|
|||
---
|
||||
name: 'Setup Python'
|
||||
description: 'Set up a specific version of Python and add the command-line tools to the PATH.'
|
||||
description: 'Set up a specific version of Python and add the command-line tools to the PATH'
|
||||
author: 'GitHub'
|
||||
inputs:
|
||||
python-version:
|
||||
description: "Version range or exact version of a Python version to use, using SemVer's version range syntax."
|
||||
description: 'Version range or exact version of a Python version to use, using semvers version range syntax.'
|
||||
default: '3.x'
|
||||
architecture:
|
||||
description: 'The target architecture (x86, x64) of the Python interpreter.'
|
||||
default: 'x64'
|
||||
outputs:
|
||||
python-version:
|
||||
description: "The installed python version. Useful when given a version range as input."
|
||||
runs:
|
||||
using: 'node12'
|
||||
main: 'dist/index.js'
|
||||
branding:
|
||||
icon: 'code'
|
||||
color: 'yellow'
|
||||
main: 'lib/setup-python.js'
|
||||
|
|
4926
dist/index.js
vendored
4926
dist/index.js
vendored
File diff suppressed because it is too large
Load diff
|
@ -2,30 +2,21 @@
|
|||
|
||||
### Checkin
|
||||
|
||||
- Do check in source (`src/`)
|
||||
- Do check in a single `index.js` file after running `ncc`
|
||||
- Do not check in `node_modules/`
|
||||
- Do checkin source (src)
|
||||
- Do checkin build output (lib)
|
||||
- Do checkin runtime node_modules
|
||||
- Do not checkin devDependency node_modules (husky can help see below)
|
||||
|
||||
### NCC
|
||||
### devDependencies
|
||||
|
||||
In order to avoid uploading `node_modules/` to the repository, we use [zeit/ncc](https://github.com/zeit/ncc) to create a single `index.js` file that gets saved in `dist/`.
|
||||
In order to handle correctly checking in node_modules without devDependencies, we run [Husky](https://github.com/typicode/husky) before each commit.
|
||||
This step ensures that formatting and checkin rules are followed and that devDependencies are excluded. To make sure Husky runs correctly, please use the following workflow:
|
||||
|
||||
### Developing
|
||||
|
||||
If you're developing locally, you can run
|
||||
```
|
||||
npm install
|
||||
tsc
|
||||
ncc build
|
||||
npm install # installs all devDependencies including Husky
|
||||
git add abc.ext # Add the files you've changed. This should include files in src, lib, and node_modules (see above)
|
||||
git commit -m "Informative commit message" # Commit. This will run Husky
|
||||
```
|
||||
Any files generated using `tsc` will be added to `lib/`, however those files also are not uploaded to the repository and are exluded using `.gitignore`.
|
||||
|
||||
During the commit step, Husky will take care of formatting all files with [Prettier](https://github.com/prettier/prettier)
|
||||
|
||||
### Testing
|
||||
|
||||
We ask that you include a link to a successful run that utilizes the changes you are working on. For example, if your changes are in the branch `newAwesomeFeature`, then show an example run that uses `setup-python@newAwesomeFeature` or `my-fork@newAwesomeFeature`. This will help speed up testing and help us confirm that there are no breaking changes or bugs.
|
||||
|
||||
### Releases
|
||||
|
||||
There is a `master` branch where contributor changes are merged into. There are also release branches such as `releases/v1` that are used for tagging (for example the `v1` tag) and publishing new versions of the action. Changes from `master` are periodically merged into a releases branch. You do not need to create any PR that merges changes from master into a releases branch.
|
||||
During the commit step, Husky will take care of formatting all files with [Prettier](https://github.com/prettier/prettier) as well as pruning out devDependencies using `npm prune --production`.
|
||||
It will also make sure these changes are appropriately included in your commit (no further work is needed)
|
159
lib/find-python.js
Normal file
159
lib/find-python.js
Normal file
|
@ -0,0 +1,159 @@
|
|||
"use strict";
|
||||
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
||||
return new (P || (P = Promise))(function (resolve, reject) {
|
||||
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
||||
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
||||
function step(result) { result.done ? resolve(result.value) : new P(function (resolve) { resolve(result.value); }).then(fulfilled, rejected); }
|
||||
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
||||
});
|
||||
};
|
||||
var __importStar = (this && this.__importStar) || function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
|
||||
result["default"] = mod;
|
||||
return result;
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const os = __importStar(require("os"));
|
||||
const path = __importStar(require("path"));
|
||||
const semver = __importStar(require("semver"));
|
||||
let cacheDirectory = process.env['RUNNER_TOOLSDIRECTORY'] || '';
|
||||
if (!cacheDirectory) {
|
||||
let baseLocation;
|
||||
if (process.platform === 'win32') {
|
||||
// On windows use the USERPROFILE env variable
|
||||
baseLocation = process.env['USERPROFILE'] || 'C:\\';
|
||||
}
|
||||
else {
|
||||
if (process.platform === 'darwin') {
|
||||
baseLocation = '/Users';
|
||||
}
|
||||
else {
|
||||
baseLocation = '/home';
|
||||
}
|
||||
}
|
||||
cacheDirectory = path.join(baseLocation, 'actions', 'cache');
|
||||
}
|
||||
const core = __importStar(require("@actions/core"));
|
||||
const tc = __importStar(require("@actions/tool-cache"));
|
||||
const IS_WINDOWS = process.platform === 'win32';
|
||||
// Python has "scripts" or "bin" directories where command-line tools that come with packages are installed.
|
||||
// This is where pip is, along with anything that pip installs.
|
||||
// There is a seperate directory for `pip install --user`.
|
||||
//
|
||||
// For reference, these directories are as follows:
|
||||
// macOS / Linux:
|
||||
// <sys.prefix>/bin (by default /usr/local/bin, but not on hosted agents -- see the `else`)
|
||||
// (--user) ~/.local/bin
|
||||
// Windows:
|
||||
// <Python installation dir>\Scripts
|
||||
// (--user) %APPDATA%\Python\PythonXY\Scripts
|
||||
// See https://docs.python.org/3/library/sysconfig.html
|
||||
function binDir(installDir) {
|
||||
if (IS_WINDOWS) {
|
||||
return path.join(installDir, 'Scripts');
|
||||
}
|
||||
else {
|
||||
return path.join(installDir, 'bin');
|
||||
}
|
||||
}
|
||||
// Note on the tool cache layout for PyPy:
|
||||
// PyPy has its own versioning scheme that doesn't follow the Python versioning scheme.
|
||||
// A particular version of PyPy may contain one or more versions of the Python interpreter.
|
||||
// For example, PyPy 7.0 contains Python 2.7, 3.5, and 3.6-alpha.
|
||||
// We only care about the Python version, so we don't use the PyPy version for the tool cache.
|
||||
function usePyPy(majorVersion, architecture) {
|
||||
const findPyPy = tc.find.bind(undefined, 'PyPy', majorVersion.toString());
|
||||
let installDir = findPyPy(architecture);
|
||||
if (!installDir && IS_WINDOWS) {
|
||||
// PyPy only precompiles binaries for x86, but the architecture parameter defaults to x64.
|
||||
// On Hosted VS2017, we only install an x86 version.
|
||||
// Fall back to x86.
|
||||
installDir = findPyPy('x86');
|
||||
}
|
||||
if (!installDir) {
|
||||
// PyPy not installed in $(Agent.ToolsDirectory)
|
||||
throw new Error(`PyPy ${majorVersion} not found`);
|
||||
}
|
||||
// For PyPy, Windows uses 'bin', not 'Scripts'.
|
||||
const _binDir = path.join(installDir, 'bin');
|
||||
// On Linux and macOS, the Python interpreter is in 'bin'.
|
||||
// On Windows, it is in the installation root.
|
||||
const pythonLocation = IS_WINDOWS ? installDir : _binDir;
|
||||
core.exportVariable('pythonLocation', pythonLocation);
|
||||
core.addPath(installDir);
|
||||
core.addPath(_binDir);
|
||||
}
|
||||
function useCpythonVersion(version, architecture) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
const desugaredVersionSpec = desugarDevVersion(version);
|
||||
const semanticVersionSpec = pythonVersionToSemantic(desugaredVersionSpec);
|
||||
core.debug(`Semantic version spec of ${version} is ${semanticVersionSpec}`);
|
||||
const installDir = tc.find('Python', semanticVersionSpec, architecture);
|
||||
if (!installDir) {
|
||||
// Fail and list available versions
|
||||
const x86Versions = tc
|
||||
.findAllVersions('Python', 'x86')
|
||||
.map(s => `${s} (x86)`)
|
||||
.join(os.EOL);
|
||||
const x64Versions = tc
|
||||
.findAllVersions('Python', 'x64')
|
||||
.map(s => `${s} (x64)`)
|
||||
.join(os.EOL);
|
||||
throw new Error([
|
||||
`Version ${version} with arch ${architecture} not found`,
|
||||
'Available versions:',
|
||||
x86Versions,
|
||||
x64Versions
|
||||
].join(os.EOL));
|
||||
}
|
||||
core.exportVariable('pythonLocation', installDir);
|
||||
core.addPath(installDir);
|
||||
core.addPath(binDir(installDir));
|
||||
if (IS_WINDOWS) {
|
||||
// Add --user directory
|
||||
// `installDir` from tool cache should look like $AGENT_TOOLSDIRECTORY/Python/<semantic version>/x64/
|
||||
// So if `findLocalTool` succeeded above, we must have a conformant `installDir`
|
||||
const version = path.basename(path.dirname(installDir));
|
||||
const major = semver.major(version);
|
||||
const minor = semver.minor(version);
|
||||
const userScriptsDir = path.join(process.env['APPDATA'] || '', 'Python', `Python${major}${minor}`, 'Scripts');
|
||||
core.addPath(userScriptsDir);
|
||||
}
|
||||
// On Linux and macOS, pip will create the --user directory and add it to PATH as needed.
|
||||
});
|
||||
}
|
||||
/** Convert versions like `3.8-dev` to a version like `>= 3.8.0-a0`. */
|
||||
function desugarDevVersion(versionSpec) {
|
||||
if (versionSpec.endsWith('-dev')) {
|
||||
const versionRoot = versionSpec.slice(0, -'-dev'.length);
|
||||
return `>= ${versionRoot}.0-a0`;
|
||||
}
|
||||
else {
|
||||
return versionSpec;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Python's prelease versions look like `3.7.0b2`.
|
||||
* This is the one part of Python versioning that does not look like semantic versioning, which specifies `3.7.0-b2`.
|
||||
* If the version spec contains prerelease versions, we need to convert them to the semantic version equivalent.
|
||||
*/
|
||||
function pythonVersionToSemantic(versionSpec) {
|
||||
const prereleaseVersion = /(\d+\.\d+\.\d+)((?:a|b|rc)\d*)/g;
|
||||
return versionSpec.replace(prereleaseVersion, '$1-$2');
|
||||
}
|
||||
exports.pythonVersionToSemantic = pythonVersionToSemantic;
|
||||
function findPythonVersion(version, architecture) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
switch (version.toUpperCase()) {
|
||||
case 'PYPY2':
|
||||
return usePyPy(2, architecture);
|
||||
case 'PYPY3':
|
||||
return usePyPy(3, architecture);
|
||||
default:
|
||||
return yield useCpythonVersion(version, architecture);
|
||||
}
|
||||
});
|
||||
}
|
||||
exports.findPythonVersion = findPythonVersion;
|
37
lib/setup-python.js
Normal file
37
lib/setup-python.js
Normal file
|
@ -0,0 +1,37 @@
|
|||
"use strict";
|
||||
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
||||
return new (P || (P = Promise))(function (resolve, reject) {
|
||||
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
||||
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
||||
function step(result) { result.done ? resolve(result.value) : new P(function (resolve) { resolve(result.value); }).then(fulfilled, rejected); }
|
||||
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
||||
});
|
||||
};
|
||||
var __importStar = (this && this.__importStar) || function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
|
||||
result["default"] = mod;
|
||||
return result;
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const core = __importStar(require("@actions/core"));
|
||||
const finder = __importStar(require("./find-python"));
|
||||
const path = __importStar(require("path"));
|
||||
function run() {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
try {
|
||||
let version = core.getInput('python-version');
|
||||
if (version) {
|
||||
const arch = core.getInput('architecture', { required: true });
|
||||
yield finder.findPythonVersion(version, arch);
|
||||
}
|
||||
const matchersPath = path.join(__dirname, '..', '.github');
|
||||
console.log(`##[add-matcher]${path.join(matchersPath, 'python.json')}`);
|
||||
}
|
||||
catch (err) {
|
||||
core.setFailed(err.message);
|
||||
}
|
||||
});
|
||||
}
|
||||
run();
|
15
node_modules/.bin/semver
generated
vendored
Normal file
15
node_modules/.bin/semver
generated
vendored
Normal file
|
@ -0,0 +1,15 @@
|
|||
#!/bin/sh
|
||||
basedir=$(dirname "$(echo "$0" | sed -e 's,\\,/,g')")
|
||||
|
||||
case `uname` in
|
||||
*CYGWIN*) basedir=`cygpath -w "$basedir"`;;
|
||||
esac
|
||||
|
||||
if [ -x "$basedir/node" ]; then
|
||||
"$basedir/node" "$basedir/../semver/bin/semver.js" "$@"
|
||||
ret=$?
|
||||
else
|
||||
node "$basedir/../semver/bin/semver.js" "$@"
|
||||
ret=$?
|
||||
fi
|
||||
exit $ret
|
7
node_modules/.bin/semver.cmd
generated
vendored
Normal file
7
node_modules/.bin/semver.cmd
generated
vendored
Normal file
|
@ -0,0 +1,7 @@
|
|||
@IF EXIST "%~dp0\node.exe" (
|
||||
"%~dp0\node.exe" "%~dp0\..\semver\bin\semver.js" %*
|
||||
) ELSE (
|
||||
@SETLOCAL
|
||||
@SET PATHEXT=%PATHEXT:;.JS;=;%
|
||||
node "%~dp0\..\semver\bin\semver.js" %*
|
||||
)
|
15
node_modules/.bin/uuid
generated
vendored
Normal file
15
node_modules/.bin/uuid
generated
vendored
Normal file
|
@ -0,0 +1,15 @@
|
|||
#!/bin/sh
|
||||
basedir=$(dirname "$(echo "$0" | sed -e 's,\\,/,g')")
|
||||
|
||||
case `uname` in
|
||||
*CYGWIN*) basedir=`cygpath -w "$basedir"`;;
|
||||
esac
|
||||
|
||||
if [ -x "$basedir/node" ]; then
|
||||
"$basedir/node" "$basedir/../uuid/bin/uuid" "$@"
|
||||
ret=$?
|
||||
else
|
||||
node "$basedir/../uuid/bin/uuid" "$@"
|
||||
ret=$?
|
||||
fi
|
||||
exit $ret
|
7
node_modules/.bin/uuid.cmd
generated
vendored
Normal file
7
node_modules/.bin/uuid.cmd
generated
vendored
Normal file
|
@ -0,0 +1,7 @@
|
|||
@IF EXIST "%~dp0\node.exe" (
|
||||
"%~dp0\node.exe" "%~dp0\..\uuid\bin\uuid" %*
|
||||
) ELSE (
|
||||
@SETLOCAL
|
||||
@SET PATHEXT=%PATHEXT:;.JS;=;%
|
||||
node "%~dp0\..\uuid\bin\uuid" %*
|
||||
)
|
81
node_modules/@actions/core/README.md
generated
vendored
Normal file
81
node_modules/@actions/core/README.md
generated
vendored
Normal file
|
@ -0,0 +1,81 @@
|
|||
# `@actions/core`
|
||||
|
||||
> Core functions for setting results, logging, registering secrets and exporting variables across actions
|
||||
|
||||
## Usage
|
||||
|
||||
#### Inputs/Outputs
|
||||
|
||||
You can use this library to get inputs or set outputs:
|
||||
|
||||
```
|
||||
const core = require('@actions/core');
|
||||
|
||||
const myInput = core.getInput('inputName', { required: true });
|
||||
|
||||
// Do stuff
|
||||
|
||||
core.setOutput('outputKey', 'outputVal');
|
||||
```
|
||||
|
||||
#### Exporting variables/secrets
|
||||
|
||||
You can also export variables and secrets for future steps. Variables get set in the environment automatically, while secrets must be scoped into the environment from a workflow using `{{ secret.FOO }}`. Secrets will also be masked from the logs:
|
||||
|
||||
```
|
||||
const core = require('@actions/core');
|
||||
|
||||
// Do stuff
|
||||
|
||||
core.exportVariable('envVar', 'Val');
|
||||
core.exportSecret('secretVar', variableWithSecretValue);
|
||||
```
|
||||
|
||||
#### PATH Manipulation
|
||||
|
||||
You can explicitly add items to the path for all remaining steps in a workflow:
|
||||
|
||||
```
|
||||
const core = require('@actions/core');
|
||||
|
||||
core.addPath('pathToTool');
|
||||
```
|
||||
|
||||
#### Exit codes
|
||||
|
||||
You should use this library to set the failing exit code for your action:
|
||||
|
||||
```
|
||||
const core = require('@actions/core');
|
||||
|
||||
try {
|
||||
// Do stuff
|
||||
}
|
||||
catch (err) {
|
||||
// setFailed logs the message and sets a failing exit code
|
||||
core.setFailed(`Action failed with error ${err}`);
|
||||
}
|
||||
|
||||
```
|
||||
|
||||
#### Logging
|
||||
|
||||
Finally, this library provides some utilities for logging:
|
||||
|
||||
```
|
||||
const core = require('@actions/core');
|
||||
|
||||
const myInput = core.getInput('input');
|
||||
try {
|
||||
core.debug('Inside try block');
|
||||
|
||||
if (!myInput) {
|
||||
core.warning('myInput wasnt set');
|
||||
}
|
||||
|
||||
// Do stuff
|
||||
}
|
||||
catch (err) {
|
||||
core.error('Error ${err}, action may still succeed though');
|
||||
}
|
||||
```
|
16
node_modules/@actions/core/lib/command.d.ts
generated
vendored
Normal file
16
node_modules/@actions/core/lib/command.d.ts
generated
vendored
Normal file
|
@ -0,0 +1,16 @@
|
|||
interface CommandProperties {
|
||||
[key: string]: string;
|
||||
}
|
||||
/**
|
||||
* Commands
|
||||
*
|
||||
* Command Format:
|
||||
* ##[name key=value;key=value]message
|
||||
*
|
||||
* Examples:
|
||||
* ##[warning]This is the user warning message
|
||||
* ##[set-secret name=mypassword]definatelyNotAPassword!
|
||||
*/
|
||||
export declare function issueCommand(command: string, properties: CommandProperties, message: string): void;
|
||||
export declare function issue(name: string, message: string): void;
|
||||
export {};
|
66
node_modules/@actions/core/lib/command.js
generated
vendored
Normal file
66
node_modules/@actions/core/lib/command.js
generated
vendored
Normal file
|
@ -0,0 +1,66 @@
|
|||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const os = require("os");
|
||||
/**
|
||||
* Commands
|
||||
*
|
||||
* Command Format:
|
||||
* ##[name key=value;key=value]message
|
||||
*
|
||||
* Examples:
|
||||
* ##[warning]This is the user warning message
|
||||
* ##[set-secret name=mypassword]definatelyNotAPassword!
|
||||
*/
|
||||
function issueCommand(command, properties, message) {
|
||||
const cmd = new Command(command, properties, message);
|
||||
process.stdout.write(cmd.toString() + os.EOL);
|
||||
}
|
||||
exports.issueCommand = issueCommand;
|
||||
function issue(name, message) {
|
||||
issueCommand(name, {}, message);
|
||||
}
|
||||
exports.issue = issue;
|
||||
const CMD_PREFIX = '##[';
|
||||
class Command {
|
||||
constructor(command, properties, message) {
|
||||
if (!command) {
|
||||
command = 'missing.command';
|
||||
}
|
||||
this.command = command;
|
||||
this.properties = properties;
|
||||
this.message = message;
|
||||
}
|
||||
toString() {
|
||||
let cmdStr = CMD_PREFIX + this.command;
|
||||
if (this.properties && Object.keys(this.properties).length > 0) {
|
||||
cmdStr += ' ';
|
||||
for (const key in this.properties) {
|
||||
if (this.properties.hasOwnProperty(key)) {
|
||||
const val = this.properties[key];
|
||||
if (val) {
|
||||
// safely append the val - avoid blowing up when attempting to
|
||||
// call .replace() if message is not a string for some reason
|
||||
cmdStr += `${key}=${escape(`${val || ''}`)};`;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
cmdStr += ']';
|
||||
// safely append the message - avoid blowing up when attempting to
|
||||
// call .replace() if message is not a string for some reason
|
||||
const message = `${this.message || ''}`;
|
||||
cmdStr += escapeData(message);
|
||||
return cmdStr;
|
||||
}
|
||||
}
|
||||
function escapeData(s) {
|
||||
return s.replace(/\r/g, '%0D').replace(/\n/g, '%0A');
|
||||
}
|
||||
function escape(s) {
|
||||
return s
|
||||
.replace(/\r/g, '%0D')
|
||||
.replace(/\n/g, '%0A')
|
||||
.replace(/]/g, '%5D')
|
||||
.replace(/;/g, '%3B');
|
||||
}
|
||||
//# sourceMappingURL=command.js.map
|
1
node_modules/@actions/core/lib/command.js.map
generated
vendored
Normal file
1
node_modules/@actions/core/lib/command.js.map
generated
vendored
Normal file
|
@ -0,0 +1 @@
|
|||
{"version":3,"file":"command.js","sourceRoot":"","sources":["../src/command.ts"],"names":[],"mappings":";;AAAA,yBAAwB;AAQxB;;;;;;;;;GASG;AACH,SAAgB,YAAY,CAC1B,OAAe,EACf,UAA6B,EAC7B,OAAe;IAEf,MAAM,GAAG,GAAG,IAAI,OAAO,CAAC,OAAO,EAAE,UAAU,EAAE,OAAO,CAAC,CAAA;IACrD,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,GAAG,CAAC,QAAQ,EAAE,GAAG,EAAE,CAAC,GAAG,CAAC,CAAA;AAC/C,CAAC;AAPD,oCAOC;AAED,SAAgB,KAAK,CAAC,IAAY,EAAE,OAAe;IACjD,YAAY,CAAC,IAAI,EAAE,EAAE,EAAE,OAAO,CAAC,CAAA;AACjC,CAAC;AAFD,sBAEC;AAED,MAAM,UAAU,GAAG,KAAK,CAAA;AAExB,MAAM,OAAO;IAKX,YAAY,OAAe,EAAE,UAA6B,EAAE,OAAe;QACzE,IAAI,CAAC,OAAO,EAAE;YACZ,OAAO,GAAG,iBAAiB,CAAA;SAC5B;QAED,IAAI,CAAC,OAAO,GAAG,OAAO,CAAA;QACtB,IAAI,CAAC,UAAU,GAAG,UAAU,CAAA;QAC5B,IAAI,CAAC,OAAO,GAAG,OAAO,CAAA;IACxB,CAAC;IAED,QAAQ;QACN,IAAI,MAAM,GAAG,UAAU,GAAG,IAAI,CAAC,OAAO,CAAA;QAEtC,IAAI,IAAI,CAAC,UAAU,IAAI,MAAM,CAAC,IAAI,CAAC,IAAI,CAAC,UAAU,CAAC,CAAC,MAAM,GAAG,CAAC,EAAE;YAC9D,MAAM,IAAI,GAAG,CAAA;YACb,KAAK,MAAM,GAAG,IAAI,IAAI,CAAC,UAAU,EAAE;gBACjC,IAAI,IAAI,CAAC,UAAU,CAAC,cAAc,CAAC,GAAG,CAAC,EAAE;oBACvC,MAAM,GAAG,GAAG,IAAI,CAAC,UAAU,CAAC,GAAG,CAAC,CAAA;oBAChC,IAAI,GAAG,EAAE;wBACP,8DAA8D;wBAC9D,6DAA6D;wBAC7D,MAAM,IAAI,GAAG,GAAG,IAAI,MAAM,CAAC,GAAG,GAAG,IAAI,EAAE,EAAE,CAAC,GAAG,CAAA;qBAC9C;iBACF;aACF;SACF;QAED,MAAM,IAAI,GAAG,CAAA;QAEb,kEAAkE;QAClE,6DAA6D;QAC7D,MAAM,OAAO,GAAG,GAAG,IAAI,CAAC,OAAO,IAAI,EAAE,EAAE,CAAA;QACvC,MAAM,IAAI,UAAU,CAAC,OAAO,CAAC,CAAA;QAE7B,OAAO,MAAM,CAAA;IACf,CAAC;CACF;AAED,SAAS,UAAU,CAAC,CAAS;IAC3B,OAAO,CAAC,CAAC,OAAO,CAAC,KAAK,EAAE,KAAK,CAAC,CAAC,OAAO,CAAC,KAAK,EAAE,KAAK,CAAC,CAAA;AACtD,CAAC;AAED,SAAS,MAAM,CAAC,CAAS;IACvB,OAAO,CAAC;SACL,OAAO,CAAC,KAAK,EAAE,KAAK,CAAC;SACrB,OAAO,CAAC,KAAK,EAAE,KAAK,CAAC;SACrB,OAAO,CAAC,IAAI,EAAE,KAAK,CAAC;SACpB,OAAO,CAAC,IAAI,EAAE,KAAK,CAAC,CAAA;AACzB,CAAC"}
|
73
node_modules/@actions/core/lib/core.d.ts
generated
vendored
Normal file
73
node_modules/@actions/core/lib/core.d.ts
generated
vendored
Normal file
|
@ -0,0 +1,73 @@
|
|||
/**
|
||||
* Interface for getInput options
|
||||
*/
|
||||
export interface InputOptions {
|
||||
/** Optional. Whether the input is required. If required and not present, will throw. Defaults to false */
|
||||
required?: boolean;
|
||||
}
|
||||
/**
|
||||
* The code to exit an action
|
||||
*/
|
||||
export declare enum ExitCode {
|
||||
/**
|
||||
* A code indicating that the action was successful
|
||||
*/
|
||||
Success = 0,
|
||||
/**
|
||||
* A code indicating that the action was a failure
|
||||
*/
|
||||
Failure = 1
|
||||
}
|
||||
/**
|
||||
* sets env variable for this action and future actions in the job
|
||||
* @param name the name of the variable to set
|
||||
* @param val the value of the variable
|
||||
*/
|
||||
export declare function exportVariable(name: string, val: string): void;
|
||||
/**
|
||||
* exports the variable and registers a secret which will get masked from logs
|
||||
* @param name the name of the variable to set
|
||||
* @param val value of the secret
|
||||
*/
|
||||
export declare function exportSecret(name: string, val: string): void;
|
||||
/**
|
||||
* Prepends inputPath to the PATH (for this action and future actions)
|
||||
* @param inputPath
|
||||
*/
|
||||
export declare function addPath(inputPath: string): void;
|
||||
/**
|
||||
* Gets the value of an input. The value is also trimmed.
|
||||
*
|
||||
* @param name name of the input to get
|
||||
* @param options optional. See InputOptions.
|
||||
* @returns string
|
||||
*/
|
||||
export declare function getInput(name: string, options?: InputOptions): string;
|
||||
/**
|
||||
* Sets the value of an output.
|
||||
*
|
||||
* @param name name of the output to set
|
||||
* @param value value to store
|
||||
*/
|
||||
export declare function setOutput(name: string, value: string): void;
|
||||
/**
|
||||
* Sets the action status to failed.
|
||||
* When the action exits it will be with an exit code of 1
|
||||
* @param message add error issue message
|
||||
*/
|
||||
export declare function setFailed(message: string): void;
|
||||
/**
|
||||
* Writes debug message to user log
|
||||
* @param message debug message
|
||||
*/
|
||||
export declare function debug(message: string): void;
|
||||
/**
|
||||
* Adds an error issue
|
||||
* @param message error issue message
|
||||
*/
|
||||
export declare function error(message: string): void;
|
||||
/**
|
||||
* Adds an warning issue
|
||||
* @param message warning issue message
|
||||
*/
|
||||
export declare function warning(message: string): void;
|
116
node_modules/@actions/core/lib/core.js
generated
vendored
Normal file
116
node_modules/@actions/core/lib/core.js
generated
vendored
Normal file
|
@ -0,0 +1,116 @@
|
|||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const command_1 = require("./command");
|
||||
const path = require("path");
|
||||
/**
|
||||
* The code to exit an action
|
||||
*/
|
||||
var ExitCode;
|
||||
(function (ExitCode) {
|
||||
/**
|
||||
* A code indicating that the action was successful
|
||||
*/
|
||||
ExitCode[ExitCode["Success"] = 0] = "Success";
|
||||
/**
|
||||
* A code indicating that the action was a failure
|
||||
*/
|
||||
ExitCode[ExitCode["Failure"] = 1] = "Failure";
|
||||
})(ExitCode = exports.ExitCode || (exports.ExitCode = {}));
|
||||
//-----------------------------------------------------------------------
|
||||
// Variables
|
||||
//-----------------------------------------------------------------------
|
||||
/**
|
||||
* sets env variable for this action and future actions in the job
|
||||
* @param name the name of the variable to set
|
||||
* @param val the value of the variable
|
||||
*/
|
||||
function exportVariable(name, val) {
|
||||
process.env[name] = val;
|
||||
command_1.issueCommand('set-env', { name }, val);
|
||||
}
|
||||
exports.exportVariable = exportVariable;
|
||||
/**
|
||||
* exports the variable and registers a secret which will get masked from logs
|
||||
* @param name the name of the variable to set
|
||||
* @param val value of the secret
|
||||
*/
|
||||
function exportSecret(name, val) {
|
||||
exportVariable(name, val);
|
||||
command_1.issueCommand('set-secret', {}, val);
|
||||
}
|
||||
exports.exportSecret = exportSecret;
|
||||
/**
|
||||
* Prepends inputPath to the PATH (for this action and future actions)
|
||||
* @param inputPath
|
||||
*/
|
||||
function addPath(inputPath) {
|
||||
command_1.issueCommand('add-path', {}, inputPath);
|
||||
process.env['PATH'] = `${inputPath}${path.delimiter}${process.env['PATH']}`;
|
||||
}
|
||||
exports.addPath = addPath;
|
||||
/**
|
||||
* Gets the value of an input. The value is also trimmed.
|
||||
*
|
||||
* @param name name of the input to get
|
||||
* @param options optional. See InputOptions.
|
||||
* @returns string
|
||||
*/
|
||||
function getInput(name, options) {
|
||||
const val = process.env[`INPUT_${name.replace(' ', '_').toUpperCase()}`] || '';
|
||||
if (options && options.required && !val) {
|
||||
throw new Error(`Input required and not supplied: ${name}`);
|
||||
}
|
||||
return val.trim();
|
||||
}
|
||||
exports.getInput = getInput;
|
||||
/**
|
||||
* Sets the value of an output.
|
||||
*
|
||||
* @param name name of the output to set
|
||||
* @param value value to store
|
||||
*/
|
||||
function setOutput(name, value) {
|
||||
command_1.issueCommand('set-output', { name }, value);
|
||||
}
|
||||
exports.setOutput = setOutput;
|
||||
//-----------------------------------------------------------------------
|
||||
// Results
|
||||
//-----------------------------------------------------------------------
|
||||
/**
|
||||
* Sets the action status to failed.
|
||||
* When the action exits it will be with an exit code of 1
|
||||
* @param message add error issue message
|
||||
*/
|
||||
function setFailed(message) {
|
||||
process.exitCode = ExitCode.Failure;
|
||||
error(message);
|
||||
}
|
||||
exports.setFailed = setFailed;
|
||||
//-----------------------------------------------------------------------
|
||||
// Logging Commands
|
||||
//-----------------------------------------------------------------------
|
||||
/**
|
||||
* Writes debug message to user log
|
||||
* @param message debug message
|
||||
*/
|
||||
function debug(message) {
|
||||
command_1.issueCommand('debug', {}, message);
|
||||
}
|
||||
exports.debug = debug;
|
||||
/**
|
||||
* Adds an error issue
|
||||
* @param message error issue message
|
||||
*/
|
||||
function error(message) {
|
||||
command_1.issue('error', message);
|
||||
}
|
||||
exports.error = error;
|
||||
/**
|
||||
* Adds an warning issue
|
||||
* @param message warning issue message
|
||||
*/
|
||||
function warning(message) {
|
||||
command_1.issue('warning', message);
|
||||
}
|
||||
exports.warning = warning;
|
||||
//# sourceMappingURL=core.js.map
|
1
node_modules/@actions/core/lib/core.js.map
generated
vendored
Normal file
1
node_modules/@actions/core/lib/core.js.map
generated
vendored
Normal file
|
@ -0,0 +1 @@
|
|||
{"version":3,"file":"core.js","sourceRoot":"","sources":["../src/core.ts"],"names":[],"mappings":";;AAAA,uCAA6C;AAE7C,6BAA4B;AAU5B;;GAEG;AACH,IAAY,QAUX;AAVD,WAAY,QAAQ;IAClB;;OAEG;IACH,6CAAW,CAAA;IAEX;;OAEG;IACH,6CAAW,CAAA;AACb,CAAC,EAVW,QAAQ,GAAR,gBAAQ,KAAR,gBAAQ,QAUnB;AAED,yEAAyE;AACzE,YAAY;AACZ,yEAAyE;AAEzE;;;;GAIG;AACH,SAAgB,cAAc,CAAC,IAAY,EAAE,GAAW;IACtD,OAAO,CAAC,GAAG,CAAC,IAAI,CAAC,GAAG,GAAG,CAAA;IACvB,sBAAY,CAAC,SAAS,EAAE,EAAC,IAAI,EAAC,EAAE,GAAG,CAAC,CAAA;AACtC,CAAC;AAHD,wCAGC;AAED;;;;GAIG;AACH,SAAgB,YAAY,CAAC,IAAY,EAAE,GAAW;IACpD,cAAc,CAAC,IAAI,EAAE,GAAG,CAAC,CAAA;IACzB,sBAAY,CAAC,YAAY,EAAE,EAAE,EAAE,GAAG,CAAC,CAAA;AACrC,CAAC;AAHD,oCAGC;AAED;;;GAGG;AACH,SAAgB,OAAO,CAAC,SAAiB;IACvC,sBAAY,CAAC,UAAU,EAAE,EAAE,EAAE,SAAS,CAAC,CAAA;IACvC,OAAO,CAAC,GAAG,CAAC,MAAM,CAAC,GAAG,GAAG,SAAS,GAAG,IAAI,CAAC,SAAS,GAAG,OAAO,CAAC,GAAG,CAAC,MAAM,CAAC,EAAE,CAAA;AAC7E,CAAC;AAHD,0BAGC;AAED;;;;;;GAMG;AACH,SAAgB,QAAQ,CAAC,IAAY,EAAE,OAAsB;IAC3D,MAAM,GAAG,GACP,OAAO,CAAC,GAAG,CAAC,SAAS,IAAI,CAAC,OAAO,CAAC,GAAG,EAAE,GAAG,CAAC,CAAC,WAAW,EAAE,EAAE,CAAC,IAAI,EAAE,CAAA;IACpE,IAAI,OAAO,IAAI,OAAO,CAAC,QAAQ,IAAI,CAAC,GAAG,EAAE;QACvC,MAAM,IAAI,KAAK,CAAC,oCAAoC,IAAI,EAAE,CAAC,CAAA;KAC5D;IAED,OAAO,GAAG,CAAC,IAAI,EAAE,CAAA;AACnB,CAAC;AARD,4BAQC;AAED;;;;;GAKG;AACH,SAAgB,SAAS,CAAC,IAAY,EAAE,KAAa;IACnD,sBAAY,CAAC,YAAY,EAAE,EAAC,IAAI,EAAC,EAAE,KAAK,CAAC,CAAA;AAC3C,CAAC;AAFD,8BAEC;AAED,yEAAyE;AACzE,UAAU;AACV,yEAAyE;AAEzE;;;;GAIG;AACH,SAAgB,SAAS,CAAC,OAAe;IACvC,OAAO,CAAC,QAAQ,GAAG,QAAQ,CAAC,OAAO,CAAA;IACnC,KAAK,CAAC,OAAO,CAAC,CAAA;AAChB,CAAC;AAHD,8BAGC;AAED,yEAAyE;AACzE,mBAAmB;AACnB,yEAAyE;AAEzE;;;GAGG;AACH,SAAgB,KAAK,CAAC,OAAe;IACnC,sBAAY,CAAC,OAAO,EAAE,EAAE,EAAE,OAAO,CAAC,CAAA;AACpC,CAAC;AAFD,sBAEC;AAED;;;GAGG;AACH,SAAgB,KAAK,CAAC,OAAe;IACnC,eAAK,CAAC,OAAO,EAAE,OAAO,CAAC,CAAA;AACzB,CAAC;AAFD,sBAEC;AAED;;;GAGG;AACH,SAAgB,OAAO,CAAC,OAAe;IACrC,eAAK,CAAC,SAAS,EAAE,OAAO,CAAC,CAAA;AAC3B,CAAC;AAFD,0BAEC"}
|
64
node_modules/@actions/core/package.json
generated
vendored
Normal file
64
node_modules/@actions/core/package.json
generated
vendored
Normal file
|
@ -0,0 +1,64 @@
|
|||
{
|
||||
"_from": "@actions/core@^1.0.0",
|
||||
"_id": "@actions/core@1.0.0",
|
||||
"_inBundle": false,
|
||||
"_integrity": "sha512-aMIlkx96XH4E/2YZtEOeyrYQfhlas9jIRkfGPqMwXD095Rdkzo4lB6ZmbxPQSzD+e1M+Xsm98ZhuSMYGv/AlqA==",
|
||||
"_location": "/@actions/core",
|
||||
"_phantomChildren": {},
|
||||
"_requested": {
|
||||
"type": "range",
|
||||
"registry": true,
|
||||
"raw": "@actions/core@^1.0.0",
|
||||
"name": "@actions/core",
|
||||
"escapedName": "@actions%2fcore",
|
||||
"scope": "@actions",
|
||||
"rawSpec": "^1.0.0",
|
||||
"saveSpec": null,
|
||||
"fetchSpec": "^1.0.0"
|
||||
},
|
||||
"_requiredBy": [
|
||||
"/",
|
||||
"/@actions/tool-cache"
|
||||
],
|
||||
"_resolved": "https://registry.npmjs.org/@actions/core/-/core-1.0.0.tgz",
|
||||
"_shasum": "4a090a2e958cc300b9ea802331034d5faf42d239",
|
||||
"_spec": "@actions/core@^1.0.0",
|
||||
"_where": "C:\\Users\\damccorm\\Documents\\setup-python",
|
||||
"bugs": {
|
||||
"url": "https://github.com/actions/toolkit/issues"
|
||||
},
|
||||
"bundleDependencies": false,
|
||||
"deprecated": false,
|
||||
"description": "Actions core lib",
|
||||
"devDependencies": {
|
||||
"@types/node": "^12.0.2"
|
||||
},
|
||||
"directories": {
|
||||
"lib": "lib",
|
||||
"test": "__tests__"
|
||||
},
|
||||
"files": [
|
||||
"lib"
|
||||
],
|
||||
"gitHead": "a40bce7c8d382aa3dbadaa327acbc696e9390e55",
|
||||
"homepage": "https://github.com/actions/toolkit/tree/master/packages/core",
|
||||
"keywords": [
|
||||
"core",
|
||||
"actions"
|
||||
],
|
||||
"license": "MIT",
|
||||
"main": "lib/core.js",
|
||||
"name": "@actions/core",
|
||||
"publishConfig": {
|
||||
"access": "public"
|
||||
},
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git+https://github.com/actions/toolkit.git"
|
||||
},
|
||||
"scripts": {
|
||||
"test": "echo \"Error: run tests from root\" && exit 1",
|
||||
"tsc": "tsc"
|
||||
},
|
||||
"version": "1.0.0"
|
||||
}
|
60
node_modules/@actions/exec/README.md
generated
vendored
Normal file
60
node_modules/@actions/exec/README.md
generated
vendored
Normal file
|
@ -0,0 +1,60 @@
|
|||
# `@actions/exec`
|
||||
|
||||
## Usage
|
||||
|
||||
#### Basic
|
||||
|
||||
You can use this package to execute your tools on the command line in a cross platform way:
|
||||
|
||||
```
|
||||
const exec = require('@actions/exec');
|
||||
|
||||
await exec.exec('node index.js');
|
||||
```
|
||||
|
||||
#### Args
|
||||
|
||||
You can also pass in arg arrays:
|
||||
|
||||
```
|
||||
const exec = require('@actions/exec');
|
||||
|
||||
await exec.exec('node', ['index.js', 'foo=bar']);
|
||||
```
|
||||
|
||||
#### Output/options
|
||||
|
||||
Capture output or specify [other options](https://github.com/actions/toolkit/blob/d9347d4ab99fd507c0b9104b2cf79fb44fcc827d/packages/exec/src/interfaces.ts#L5):
|
||||
|
||||
```
|
||||
const exec = require('@actions/exec');
|
||||
|
||||
const myOutput = '';
|
||||
const myError = '';
|
||||
|
||||
const options = {};
|
||||
options.listeners = {
|
||||
stdout: (data: Buffer) => {
|
||||
myOutput += data.toString();
|
||||
},
|
||||
stderr: (data: Buffer) => {
|
||||
myError += data.toString();
|
||||
}
|
||||
};
|
||||
options.cwd = './lib';
|
||||
|
||||
await exec.exec('node', ['index.js', 'foo=bar'], options);
|
||||
```
|
||||
|
||||
#### Exec tools not in the PATH
|
||||
|
||||
You can use it in conjunction with the `which` function from `@actions/io` to execute tools that are not in the PATH:
|
||||
|
||||
```
|
||||
const exec = require('@actions/exec');
|
||||
const io = require('@actions/io');
|
||||
|
||||
const pythonPath: string = await io.which('python', true)
|
||||
|
||||
await exec.exec(`"${pythonPath}"`, ['main.py']);
|
||||
```
|
12
node_modules/@actions/exec/lib/exec.d.ts
generated
vendored
Normal file
12
node_modules/@actions/exec/lib/exec.d.ts
generated
vendored
Normal file
|
@ -0,0 +1,12 @@
|
|||
import * as im from './interfaces';
|
||||
/**
|
||||
* Exec a command.
|
||||
* Output will be streamed to the live console.
|
||||
* Returns promise with return code
|
||||
*
|
||||
* @param commandLine command to execute (can include additional args). Must be correctly escaped.
|
||||
* @param args optional arguments for tool. Escaping is handled by the lib.
|
||||
* @param options optional exec options. See ExecOptions
|
||||
* @returns Promise<number> exit code
|
||||
*/
|
||||
export declare function exec(commandLine: string, args?: string[], options?: im.ExecOptions): Promise<number>;
|
36
node_modules/@actions/exec/lib/exec.js
generated
vendored
Normal file
36
node_modules/@actions/exec/lib/exec.js
generated
vendored
Normal file
|
@ -0,0 +1,36 @@
|
|||
"use strict";
|
||||
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
||||
return new (P || (P = Promise))(function (resolve, reject) {
|
||||
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
||||
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
||||
function step(result) { result.done ? resolve(result.value) : new P(function (resolve) { resolve(result.value); }).then(fulfilled, rejected); }
|
||||
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
||||
});
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const tr = require("./toolrunner");
|
||||
/**
|
||||
* Exec a command.
|
||||
* Output will be streamed to the live console.
|
||||
* Returns promise with return code
|
||||
*
|
||||
* @param commandLine command to execute (can include additional args). Must be correctly escaped.
|
||||
* @param args optional arguments for tool. Escaping is handled by the lib.
|
||||
* @param options optional exec options. See ExecOptions
|
||||
* @returns Promise<number> exit code
|
||||
*/
|
||||
function exec(commandLine, args, options) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
const commandArgs = tr.argStringToArray(commandLine);
|
||||
if (commandArgs.length === 0) {
|
||||
throw new Error(`Parameter 'commandLine' cannot be null or empty.`);
|
||||
}
|
||||
// Path to tool to execute should be first arg
|
||||
const toolPath = commandArgs[0];
|
||||
args = commandArgs.slice(1).concat(args || []);
|
||||
const runner = new tr.ToolRunner(toolPath, args, options);
|
||||
return runner.exec();
|
||||
});
|
||||
}
|
||||
exports.exec = exec;
|
||||
//# sourceMappingURL=exec.js.map
|
1
node_modules/@actions/exec/lib/exec.js.map
generated
vendored
Normal file
1
node_modules/@actions/exec/lib/exec.js.map
generated
vendored
Normal file
|
@ -0,0 +1 @@
|
|||
{"version":3,"file":"exec.js","sourceRoot":"","sources":["../src/exec.ts"],"names":[],"mappings":";;;;;;;;;;AACA,mCAAkC;AAElC;;;;;;;;;GASG;AACH,SAAsB,IAAI,CACxB,WAAmB,EACnB,IAAe,EACf,OAAwB;;QAExB,MAAM,WAAW,GAAG,EAAE,CAAC,gBAAgB,CAAC,WAAW,CAAC,CAAA;QACpD,IAAI,WAAW,CAAC,MAAM,KAAK,CAAC,EAAE;YAC5B,MAAM,IAAI,KAAK,CAAC,kDAAkD,CAAC,CAAA;SACpE;QACD,8CAA8C;QAC9C,MAAM,QAAQ,GAAG,WAAW,CAAC,CAAC,CAAC,CAAA;QAC/B,IAAI,GAAG,WAAW,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,IAAI,IAAI,EAAE,CAAC,CAAA;QAC9C,MAAM,MAAM,GAAkB,IAAI,EAAE,CAAC,UAAU,CAAC,QAAQ,EAAE,IAAI,EAAE,OAAO,CAAC,CAAA;QACxE,OAAO,MAAM,CAAC,IAAI,EAAE,CAAA;IACtB,CAAC;CAAA;AAdD,oBAcC"}
|
35
node_modules/@actions/exec/lib/interfaces.d.ts
generated
vendored
Normal file
35
node_modules/@actions/exec/lib/interfaces.d.ts
generated
vendored
Normal file
|
@ -0,0 +1,35 @@
|
|||
/// <reference types="node" />
|
||||
import * as stream from 'stream';
|
||||
/**
|
||||
* Interface for exec options
|
||||
*/
|
||||
export interface ExecOptions {
|
||||
/** optional working directory. defaults to current */
|
||||
cwd?: string;
|
||||
/** optional envvar dictionary. defaults to current process's env */
|
||||
env?: {
|
||||
[key: string]: string;
|
||||
};
|
||||
/** optional. defaults to false */
|
||||
silent?: boolean;
|
||||
/** optional out stream to use. Defaults to process.stdout */
|
||||
outStream?: stream.Writable;
|
||||
/** optional err stream to use. Defaults to process.stderr */
|
||||
errStream?: stream.Writable;
|
||||
/** optional. whether to skip quoting/escaping arguments if needed. defaults to false. */
|
||||
windowsVerbatimArguments?: boolean;
|
||||
/** optional. whether to fail if output to stderr. defaults to false */
|
||||
failOnStdErr?: boolean;
|
||||
/** optional. defaults to failing on non zero. ignore will not fail leaving it up to the caller */
|
||||
ignoreReturnCode?: boolean;
|
||||
/** optional. How long in ms to wait for STDIO streams to close after the exit event of the process before terminating. defaults to 10000 */
|
||||
delay?: number;
|
||||
/** optional. Listeners for output. Callback functions that will be called on these events */
|
||||
listeners?: {
|
||||
stdout?: (data: Buffer) => void;
|
||||
stderr?: (data: Buffer) => void;
|
||||
stdline?: (data: string) => void;
|
||||
errline?: (data: string) => void;
|
||||
debug?: (data: string) => void;
|
||||
};
|
||||
}
|
3
node_modules/@actions/exec/lib/interfaces.js
generated
vendored
Normal file
3
node_modules/@actions/exec/lib/interfaces.js
generated
vendored
Normal file
|
@ -0,0 +1,3 @@
|
|||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
//# sourceMappingURL=interfaces.js.map
|
1
node_modules/@actions/exec/lib/interfaces.js.map
generated
vendored
Normal file
1
node_modules/@actions/exec/lib/interfaces.js.map
generated
vendored
Normal file
|
@ -0,0 +1 @@
|
|||
{"version":3,"file":"interfaces.js","sourceRoot":"","sources":["../src/interfaces.ts"],"names":[],"mappings":""}
|
37
node_modules/@actions/exec/lib/toolrunner.d.ts
generated
vendored
Normal file
37
node_modules/@actions/exec/lib/toolrunner.d.ts
generated
vendored
Normal file
|
@ -0,0 +1,37 @@
|
|||
/// <reference types="node" />
|
||||
import * as events from 'events';
|
||||
import * as im from './interfaces';
|
||||
export declare class ToolRunner extends events.EventEmitter {
|
||||
constructor(toolPath: string, args?: string[], options?: im.ExecOptions);
|
||||
private toolPath;
|
||||
private args;
|
||||
private options;
|
||||
private _debug;
|
||||
private _getCommandString;
|
||||
private _processLineBuffer;
|
||||
private _getSpawnFileName;
|
||||
private _getSpawnArgs;
|
||||
private _endsWith;
|
||||
private _isCmdFile;
|
||||
private _windowsQuoteCmdArg;
|
||||
private _uvQuoteCmdArg;
|
||||
private _cloneExecOptions;
|
||||
private _getSpawnOptions;
|
||||
/**
|
||||
* Exec a tool.
|
||||
* Output will be streamed to the live console.
|
||||
* Returns promise with return code
|
||||
*
|
||||
* @param tool path to tool to exec
|
||||
* @param options optional exec options. See ExecOptions
|
||||
* @returns number
|
||||
*/
|
||||
exec(): Promise<number>;
|
||||
}
|
||||
/**
|
||||
* Convert an arg string to an array of args. Handles escaping
|
||||
*
|
||||
* @param argString string of arguments
|
||||
* @returns string[] array of arguments
|
||||
*/
|
||||
export declare function argStringToArray(argString: string): string[];
|
573
node_modules/@actions/exec/lib/toolrunner.js
generated
vendored
Normal file
573
node_modules/@actions/exec/lib/toolrunner.js
generated
vendored
Normal file
|
@ -0,0 +1,573 @@
|
|||
"use strict";
|
||||
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
||||
return new (P || (P = Promise))(function (resolve, reject) {
|
||||
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
||||
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
||||
function step(result) { result.done ? resolve(result.value) : new P(function (resolve) { resolve(result.value); }).then(fulfilled, rejected); }
|
||||
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
||||
});
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const os = require("os");
|
||||
const events = require("events");
|
||||
const child = require("child_process");
|
||||
/* eslint-disable @typescript-eslint/unbound-method */
|
||||
const IS_WINDOWS = process.platform === 'win32';
|
||||
/*
|
||||
* Class for running command line tools. Handles quoting and arg parsing in a platform agnostic way.
|
||||
*/
|
||||
class ToolRunner extends events.EventEmitter {
|
||||
constructor(toolPath, args, options) {
|
||||
super();
|
||||
if (!toolPath) {
|
||||
throw new Error("Parameter 'toolPath' cannot be null or empty.");
|
||||
}
|
||||
this.toolPath = toolPath;
|
||||
this.args = args || [];
|
||||
this.options = options || {};
|
||||
}
|
||||
_debug(message) {
|
||||
if (this.options.listeners && this.options.listeners.debug) {
|
||||
this.options.listeners.debug(message);
|
||||
}
|
||||
}
|
||||
_getCommandString(options, noPrefix) {
|
||||
const toolPath = this._getSpawnFileName();
|
||||
const args = this._getSpawnArgs(options);
|
||||
let cmd = noPrefix ? '' : '[command]'; // omit prefix when piped to a second tool
|
||||
if (IS_WINDOWS) {
|
||||
// Windows + cmd file
|
||||
if (this._isCmdFile()) {
|
||||
cmd += toolPath;
|
||||
for (const a of args) {
|
||||
cmd += ` ${a}`;
|
||||
}
|
||||
}
|
||||
// Windows + verbatim
|
||||
else if (options.windowsVerbatimArguments) {
|
||||
cmd += `"${toolPath}"`;
|
||||
for (const a of args) {
|
||||
cmd += ` ${a}`;
|
||||
}
|
||||
}
|
||||
// Windows (regular)
|
||||
else {
|
||||
cmd += this._windowsQuoteCmdArg(toolPath);
|
||||
for (const a of args) {
|
||||
cmd += ` ${this._windowsQuoteCmdArg(a)}`;
|
||||
}
|
||||
}
|
||||
}
|
||||
else {
|
||||
// OSX/Linux - this can likely be improved with some form of quoting.
|
||||
// creating processes on Unix is fundamentally different than Windows.
|
||||
// on Unix, execvp() takes an arg array.
|
||||
cmd += toolPath;
|
||||
for (const a of args) {
|
||||
cmd += ` ${a}`;
|
||||
}
|
||||
}
|
||||
return cmd;
|
||||
}
|
||||
_processLineBuffer(data, strBuffer, onLine) {
|
||||
try {
|
||||
let s = strBuffer + data.toString();
|
||||
let n = s.indexOf(os.EOL);
|
||||
while (n > -1) {
|
||||
const line = s.substring(0, n);
|
||||
onLine(line);
|
||||
// the rest of the string ...
|
||||
s = s.substring(n + os.EOL.length);
|
||||
n = s.indexOf(os.EOL);
|
||||
}
|
||||
strBuffer = s;
|
||||
}
|
||||
catch (err) {
|
||||
// streaming lines to console is best effort. Don't fail a build.
|
||||
this._debug(`error processing line. Failed with error ${err}`);
|
||||
}
|
||||
}
|
||||
_getSpawnFileName() {
|
||||
if (IS_WINDOWS) {
|
||||
if (this._isCmdFile()) {
|
||||
return process.env['COMSPEC'] || 'cmd.exe';
|
||||
}
|
||||
}
|
||||
return this.toolPath;
|
||||
}
|
||||
_getSpawnArgs(options) {
|
||||
if (IS_WINDOWS) {
|
||||
if (this._isCmdFile()) {
|
||||
let argline = `/D /S /C "${this._windowsQuoteCmdArg(this.toolPath)}`;
|
||||
for (const a of this.args) {
|
||||
argline += ' ';
|
||||
argline += options.windowsVerbatimArguments
|
||||
? a
|
||||
: this._windowsQuoteCmdArg(a);
|
||||
}
|
||||
argline += '"';
|
||||
return [argline];
|
||||
}
|
||||
}
|
||||
return this.args;
|
||||
}
|
||||
_endsWith(str, end) {
|
||||
return str.endsWith(end);
|
||||
}
|
||||
_isCmdFile() {
|
||||
const upperToolPath = this.toolPath.toUpperCase();
|
||||
return (this._endsWith(upperToolPath, '.CMD') ||
|
||||
this._endsWith(upperToolPath, '.BAT'));
|
||||
}
|
||||
_windowsQuoteCmdArg(arg) {
|
||||
// for .exe, apply the normal quoting rules that libuv applies
|
||||
if (!this._isCmdFile()) {
|
||||
return this._uvQuoteCmdArg(arg);
|
||||
}
|
||||
// otherwise apply quoting rules specific to the cmd.exe command line parser.
|
||||
// the libuv rules are generic and are not designed specifically for cmd.exe
|
||||
// command line parser.
|
||||
//
|
||||
// for a detailed description of the cmd.exe command line parser, refer to
|
||||
// http://stackoverflow.com/questions/4094699/how-does-the-windows-command-interpreter-cmd-exe-parse-scripts/7970912#7970912
|
||||
// need quotes for empty arg
|
||||
if (!arg) {
|
||||
return '""';
|
||||
}
|
||||
// determine whether the arg needs to be quoted
|
||||
const cmdSpecialChars = [
|
||||
' ',
|
||||
'\t',
|
||||
'&',
|
||||
'(',
|
||||
')',
|
||||
'[',
|
||||
']',
|
||||
'{',
|
||||
'}',
|
||||
'^',
|
||||
'=',
|
||||
';',
|
||||
'!',
|
||||
"'",
|
||||
'+',
|
||||
',',
|
||||
'`',
|
||||
'~',
|
||||
'|',
|
||||
'<',
|
||||
'>',
|
||||
'"'
|
||||
];
|
||||
let needsQuotes = false;
|
||||
for (const char of arg) {
|
||||
if (cmdSpecialChars.some(x => x === char)) {
|
||||
needsQuotes = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
// short-circuit if quotes not needed
|
||||
if (!needsQuotes) {
|
||||
return arg;
|
||||
}
|
||||
// the following quoting rules are very similar to the rules that by libuv applies.
|
||||
//
|
||||
// 1) wrap the string in quotes
|
||||
//
|
||||
// 2) double-up quotes - i.e. " => ""
|
||||
//
|
||||
// this is different from the libuv quoting rules. libuv replaces " with \", which unfortunately
|
||||
// doesn't work well with a cmd.exe command line.
|
||||
//
|
||||
// note, replacing " with "" also works well if the arg is passed to a downstream .NET console app.
|
||||
// for example, the command line:
|
||||
// foo.exe "myarg:""my val"""
|
||||
// is parsed by a .NET console app into an arg array:
|
||||
// [ "myarg:\"my val\"" ]
|
||||
// which is the same end result when applying libuv quoting rules. although the actual
|
||||
// command line from libuv quoting rules would look like:
|
||||
// foo.exe "myarg:\"my val\""
|
||||
//
|
||||
// 3) double-up slashes that preceed a quote,
|
||||
// e.g. hello \world => "hello \world"
|
||||
// hello\"world => "hello\\""world"
|
||||
// hello\\"world => "hello\\\\""world"
|
||||
// hello world\ => "hello world\\"
|
||||
//
|
||||
// technically this is not required for a cmd.exe command line, or the batch argument parser.
|
||||
// the reasons for including this as a .cmd quoting rule are:
|
||||
//
|
||||
// a) this is optimized for the scenario where the argument is passed from the .cmd file to an
|
||||
// external program. many programs (e.g. .NET console apps) rely on the slash-doubling rule.
|
||||
//
|
||||
// b) it's what we've been doing previously (by deferring to node default behavior) and we
|
||||
// haven't heard any complaints about that aspect.
|
||||
//
|
||||
// note, a weakness of the quoting rules chosen here, is that % is not escaped. in fact, % cannot be
|
||||
// escaped when used on the command line directly - even though within a .cmd file % can be escaped
|
||||
// by using %%.
|
||||
//
|
||||
// the saving grace is, on the command line, %var% is left as-is if var is not defined. this contrasts
|
||||
// the line parsing rules within a .cmd file, where if var is not defined it is replaced with nothing.
|
||||
//
|
||||
// one option that was explored was replacing % with ^% - i.e. %var% => ^%var^%. this hack would
|
||||
// often work, since it is unlikely that var^ would exist, and the ^ character is removed when the
|
||||
// variable is used. the problem, however, is that ^ is not removed when %* is used to pass the args
|
||||
// to an external program.
|
||||
//
|
||||
// an unexplored potential solution for the % escaping problem, is to create a wrapper .cmd file.
|
||||
// % can be escaped within a .cmd file.
|
||||
let reverse = '"';
|
||||
let quoteHit = true;
|
||||
for (let i = arg.length; i > 0; i--) {
|
||||
// walk the string in reverse
|
||||
reverse += arg[i - 1];
|
||||
if (quoteHit && arg[i - 1] === '\\') {
|
||||
reverse += '\\'; // double the slash
|
||||
}
|
||||
else if (arg[i - 1] === '"') {
|
||||
quoteHit = true;
|
||||
reverse += '"'; // double the quote
|
||||
}
|
||||
else {
|
||||
quoteHit = false;
|
||||
}
|
||||
}
|
||||
reverse += '"';
|
||||
return reverse
|
||||
.split('')
|
||||
.reverse()
|
||||
.join('');
|
||||
}
|
||||
_uvQuoteCmdArg(arg) {
|
||||
// Tool runner wraps child_process.spawn() and needs to apply the same quoting as
|
||||
// Node in certain cases where the undocumented spawn option windowsVerbatimArguments
|
||||
// is used.
|
||||
//
|
||||
// Since this function is a port of quote_cmd_arg from Node 4.x (technically, lib UV,
|
||||
// see https://github.com/nodejs/node/blob/v4.x/deps/uv/src/win/process.c for details),
|
||||
// pasting copyright notice from Node within this function:
|
||||
//
|
||||
// Copyright Joyent, Inc. and other Node contributors. All rights reserved.
|
||||
//
|
||||
// Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
// of this software and associated documentation files (the "Software"), to
|
||||
// deal in the Software without restriction, including without limitation the
|
||||
// rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
|
||||
// sell copies of the Software, and to permit persons to whom the Software is
|
||||
// furnished to do so, subject to the following conditions:
|
||||
//
|
||||
// The above copyright notice and this permission notice shall be included in
|
||||
// all copies or substantial portions of the Software.
|
||||
//
|
||||
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
|
||||
// FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
|
||||
// IN THE SOFTWARE.
|
||||
if (!arg) {
|
||||
// Need double quotation for empty argument
|
||||
return '""';
|
||||
}
|
||||
if (!arg.includes(' ') && !arg.includes('\t') && !arg.includes('"')) {
|
||||
// No quotation needed
|
||||
return arg;
|
||||
}
|
||||
if (!arg.includes('"') && !arg.includes('\\')) {
|
||||
// No embedded double quotes or backslashes, so I can just wrap
|
||||
// quote marks around the whole thing.
|
||||
return `"${arg}"`;
|
||||
}
|
||||
// Expected input/output:
|
||||
// input : hello"world
|
||||
// output: "hello\"world"
|
||||
// input : hello""world
|
||||
// output: "hello\"\"world"
|
||||
// input : hello\world
|
||||
// output: hello\world
|
||||
// input : hello\\world
|
||||
// output: hello\\world
|
||||
// input : hello\"world
|
||||
// output: "hello\\\"world"
|
||||
// input : hello\\"world
|
||||
// output: "hello\\\\\"world"
|
||||
// input : hello world\
|
||||
// output: "hello world\\" - note the comment in libuv actually reads "hello world\"
|
||||
// but it appears the comment is wrong, it should be "hello world\\"
|
||||
let reverse = '"';
|
||||
let quoteHit = true;
|
||||
for (let i = arg.length; i > 0; i--) {
|
||||
// walk the string in reverse
|
||||
reverse += arg[i - 1];
|
||||
if (quoteHit && arg[i - 1] === '\\') {
|
||||
reverse += '\\';
|
||||
}
|
||||
else if (arg[i - 1] === '"') {
|
||||
quoteHit = true;
|
||||
reverse += '\\';
|
||||
}
|
||||
else {
|
||||
quoteHit = false;
|
||||
}
|
||||
}
|
||||
reverse += '"';
|
||||
return reverse
|
||||
.split('')
|
||||
.reverse()
|
||||
.join('');
|
||||
}
|
||||
_cloneExecOptions(options) {
|
||||
options = options || {};
|
||||
const result = {
|
||||
cwd: options.cwd || process.cwd(),
|
||||
env: options.env || process.env,
|
||||
silent: options.silent || false,
|
||||
windowsVerbatimArguments: options.windowsVerbatimArguments || false,
|
||||
failOnStdErr: options.failOnStdErr || false,
|
||||
ignoreReturnCode: options.ignoreReturnCode || false,
|
||||
delay: options.delay || 10000
|
||||
};
|
||||
result.outStream = options.outStream || process.stdout;
|
||||
result.errStream = options.errStream || process.stderr;
|
||||
return result;
|
||||
}
|
||||
_getSpawnOptions(options, toolPath) {
|
||||
options = options || {};
|
||||
const result = {};
|
||||
result.cwd = options.cwd;
|
||||
result.env = options.env;
|
||||
result['windowsVerbatimArguments'] =
|
||||
options.windowsVerbatimArguments || this._isCmdFile();
|
||||
if (options.windowsVerbatimArguments) {
|
||||
result.argv0 = `"${toolPath}"`;
|
||||
}
|
||||
return result;
|
||||
}
|
||||
/**
|
||||
* Exec a tool.
|
||||
* Output will be streamed to the live console.
|
||||
* Returns promise with return code
|
||||
*
|
||||
* @param tool path to tool to exec
|
||||
* @param options optional exec options. See ExecOptions
|
||||
* @returns number
|
||||
*/
|
||||
exec() {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
return new Promise((resolve, reject) => {
|
||||
this._debug(`exec tool: ${this.toolPath}`);
|
||||
this._debug('arguments:');
|
||||
for (const arg of this.args) {
|
||||
this._debug(` ${arg}`);
|
||||
}
|
||||
const optionsNonNull = this._cloneExecOptions(this.options);
|
||||
if (!optionsNonNull.silent && optionsNonNull.outStream) {
|
||||
optionsNonNull.outStream.write(this._getCommandString(optionsNonNull) + os.EOL);
|
||||
}
|
||||
const state = new ExecState(optionsNonNull, this.toolPath);
|
||||
state.on('debug', (message) => {
|
||||
this._debug(message);
|
||||
});
|
||||
const fileName = this._getSpawnFileName();
|
||||
const cp = child.spawn(fileName, this._getSpawnArgs(optionsNonNull), this._getSpawnOptions(this.options, fileName));
|
||||
const stdbuffer = '';
|
||||
if (cp.stdout) {
|
||||
cp.stdout.on('data', (data) => {
|
||||
if (this.options.listeners && this.options.listeners.stdout) {
|
||||
this.options.listeners.stdout(data);
|
||||
}
|
||||
if (!optionsNonNull.silent && optionsNonNull.outStream) {
|
||||
optionsNonNull.outStream.write(data);
|
||||
}
|
||||
this._processLineBuffer(data, stdbuffer, (line) => {
|
||||
if (this.options.listeners && this.options.listeners.stdline) {
|
||||
this.options.listeners.stdline(line);
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
const errbuffer = '';
|
||||
if (cp.stderr) {
|
||||
cp.stderr.on('data', (data) => {
|
||||
state.processStderr = true;
|
||||
if (this.options.listeners && this.options.listeners.stderr) {
|
||||
this.options.listeners.stderr(data);
|
||||
}
|
||||
if (!optionsNonNull.silent &&
|
||||
optionsNonNull.errStream &&
|
||||
optionsNonNull.outStream) {
|
||||
const s = optionsNonNull.failOnStdErr
|
||||
? optionsNonNull.errStream
|
||||
: optionsNonNull.outStream;
|
||||
s.write(data);
|
||||
}
|
||||
this._processLineBuffer(data, errbuffer, (line) => {
|
||||
if (this.options.listeners && this.options.listeners.errline) {
|
||||
this.options.listeners.errline(line);
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
cp.on('error', (err) => {
|
||||
state.processError = err.message;
|
||||
state.processExited = true;
|
||||
state.processClosed = true;
|
||||
state.CheckComplete();
|
||||
});
|
||||
cp.on('exit', (code) => {
|
||||
state.processExitCode = code;
|
||||
state.processExited = true;
|
||||
this._debug(`Exit code ${code} received from tool '${this.toolPath}'`);
|
||||
state.CheckComplete();
|
||||
});
|
||||
cp.on('close', (code) => {
|
||||
state.processExitCode = code;
|
||||
state.processExited = true;
|
||||
state.processClosed = true;
|
||||
this._debug(`STDIO streams have closed for tool '${this.toolPath}'`);
|
||||
state.CheckComplete();
|
||||
});
|
||||
state.on('done', (error, exitCode) => {
|
||||
if (stdbuffer.length > 0) {
|
||||
this.emit('stdline', stdbuffer);
|
||||
}
|
||||
if (errbuffer.length > 0) {
|
||||
this.emit('errline', errbuffer);
|
||||
}
|
||||
cp.removeAllListeners();
|
||||
if (error) {
|
||||
reject(error);
|
||||
}
|
||||
else {
|
||||
resolve(exitCode);
|
||||
}
|
||||
});
|
||||
});
|
||||
});
|
||||
}
|
||||
}
|
||||
exports.ToolRunner = ToolRunner;
|
||||
/**
|
||||
* Convert an arg string to an array of args. Handles escaping
|
||||
*
|
||||
* @param argString string of arguments
|
||||
* @returns string[] array of arguments
|
||||
*/
|
||||
function argStringToArray(argString) {
|
||||
const args = [];
|
||||
let inQuotes = false;
|
||||
let escaped = false;
|
||||
let arg = '';
|
||||
function append(c) {
|
||||
// we only escape double quotes.
|
||||
if (escaped && c !== '"') {
|
||||
arg += '\\';
|
||||
}
|
||||
arg += c;
|
||||
escaped = false;
|
||||
}
|
||||
for (let i = 0; i < argString.length; i++) {
|
||||
const c = argString.charAt(i);
|
||||
if (c === '"') {
|
||||
if (!escaped) {
|
||||
inQuotes = !inQuotes;
|
||||
}
|
||||
else {
|
||||
append(c);
|
||||
}
|
||||
continue;
|
||||
}
|
||||
if (c === '\\' && escaped) {
|
||||
append(c);
|
||||
continue;
|
||||
}
|
||||
if (c === '\\' && inQuotes) {
|
||||
escaped = true;
|
||||
continue;
|
||||
}
|
||||
if (c === ' ' && !inQuotes) {
|
||||
if (arg.length > 0) {
|
||||
args.push(arg);
|
||||
arg = '';
|
||||
}
|
||||
continue;
|
||||
}
|
||||
append(c);
|
||||
}
|
||||
if (arg.length > 0) {
|
||||
args.push(arg.trim());
|
||||
}
|
||||
return args;
|
||||
}
|
||||
exports.argStringToArray = argStringToArray;
|
||||
class ExecState extends events.EventEmitter {
|
||||
constructor(options, toolPath) {
|
||||
super();
|
||||
this.processClosed = false; // tracks whether the process has exited and stdio is closed
|
||||
this.processError = '';
|
||||
this.processExitCode = 0;
|
||||
this.processExited = false; // tracks whether the process has exited
|
||||
this.processStderr = false; // tracks whether stderr was written to
|
||||
this.delay = 10000; // 10 seconds
|
||||
this.done = false;
|
||||
this.timeout = null;
|
||||
if (!toolPath) {
|
||||
throw new Error('toolPath must not be empty');
|
||||
}
|
||||
this.options = options;
|
||||
this.toolPath = toolPath;
|
||||
if (options.delay) {
|
||||
this.delay = options.delay;
|
||||
}
|
||||
}
|
||||
CheckComplete() {
|
||||
if (this.done) {
|
||||
return;
|
||||
}
|
||||
if (this.processClosed) {
|
||||
this._setResult();
|
||||
}
|
||||
else if (this.processExited) {
|
||||
this.timeout = setTimeout(ExecState.HandleTimeout, this.delay, this);
|
||||
}
|
||||
}
|
||||
_debug(message) {
|
||||
this.emit('debug', message);
|
||||
}
|
||||
_setResult() {
|
||||
// determine whether there is an error
|
||||
let error;
|
||||
if (this.processExited) {
|
||||
if (this.processError) {
|
||||
error = new Error(`There was an error when attempting to execute the process '${this.toolPath}'. This may indicate the process failed to start. Error: ${this.processError}`);
|
||||
}
|
||||
else if (this.processExitCode !== 0 && !this.options.ignoreReturnCode) {
|
||||
error = new Error(`The process '${this.toolPath}' failed with exit code ${this.processExitCode}`);
|
||||
}
|
||||
else if (this.processStderr && this.options.failOnStdErr) {
|
||||
error = new Error(`The process '${this.toolPath}' failed because one or more lines were written to the STDERR stream`);
|
||||
}
|
||||
}
|
||||
// clear the timeout
|
||||
if (this.timeout) {
|
||||
clearTimeout(this.timeout);
|
||||
this.timeout = null;
|
||||
}
|
||||
this.done = true;
|
||||
this.emit('done', error, this.processExitCode);
|
||||
}
|
||||
static HandleTimeout(state) {
|
||||
if (state.done) {
|
||||
return;
|
||||
}
|
||||
if (!state.processClosed && state.processExited) {
|
||||
const message = `The STDIO streams did not close within ${state.delay /
|
||||
1000} seconds of the exit event from process '${state.toolPath}'. This may indicate a child process inherited the STDIO streams and has not yet exited.`;
|
||||
state._debug(message);
|
||||
}
|
||||
state._setResult();
|
||||
}
|
||||
}
|
||||
//# sourceMappingURL=toolrunner.js.map
|
1
node_modules/@actions/exec/lib/toolrunner.js.map
generated
vendored
Normal file
1
node_modules/@actions/exec/lib/toolrunner.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
63
node_modules/@actions/exec/package.json
generated
vendored
Normal file
63
node_modules/@actions/exec/package.json
generated
vendored
Normal file
|
@ -0,0 +1,63 @@
|
|||
{
|
||||
"_from": "@actions/exec@^1.0.0",
|
||||
"_id": "@actions/exec@1.0.0",
|
||||
"_inBundle": false,
|
||||
"_integrity": "sha512-nquH0+XKng+Ll7rZfCojN7NWSbnGh+ltwUJhzfbLkmOJgxocGX2/yXcZLMyT9fa7+tByEow/NSTrBExNlEj9fw==",
|
||||
"_location": "/@actions/exec",
|
||||
"_phantomChildren": {},
|
||||
"_requested": {
|
||||
"type": "range",
|
||||
"registry": true,
|
||||
"raw": "@actions/exec@^1.0.0",
|
||||
"name": "@actions/exec",
|
||||
"escapedName": "@actions%2fexec",
|
||||
"scope": "@actions",
|
||||
"rawSpec": "^1.0.0",
|
||||
"saveSpec": null,
|
||||
"fetchSpec": "^1.0.0"
|
||||
},
|
||||
"_requiredBy": [
|
||||
"/@actions/tool-cache"
|
||||
],
|
||||
"_resolved": "https://registry.npmjs.org/@actions/exec/-/exec-1.0.0.tgz",
|
||||
"_shasum": "70c8b698c9baa02965c07da5f0b185ca56f0a955",
|
||||
"_spec": "@actions/exec@^1.0.0",
|
||||
"_where": "C:\\Users\\damccorm\\Documents\\setup-python\\node_modules\\@actions\\tool-cache",
|
||||
"bugs": {
|
||||
"url": "https://github.com/actions/toolkit/issues"
|
||||
},
|
||||
"bundleDependencies": false,
|
||||
"deprecated": false,
|
||||
"description": "Actions exec lib",
|
||||
"devDependencies": {
|
||||
"@actions/io": "^1.0.0"
|
||||
},
|
||||
"directories": {
|
||||
"lib": "lib",
|
||||
"test": "__tests__"
|
||||
},
|
||||
"files": [
|
||||
"lib"
|
||||
],
|
||||
"gitHead": "a40bce7c8d382aa3dbadaa327acbc696e9390e55",
|
||||
"homepage": "https://github.com/actions/toolkit/tree/master/packages/exec",
|
||||
"keywords": [
|
||||
"exec",
|
||||
"actions"
|
||||
],
|
||||
"license": "MIT",
|
||||
"main": "lib/exec.js",
|
||||
"name": "@actions/exec",
|
||||
"publishConfig": {
|
||||
"access": "public"
|
||||
},
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git+https://github.com/actions/toolkit.git"
|
||||
},
|
||||
"scripts": {
|
||||
"test": "echo \"Error: run tests from root\" && exit 1",
|
||||
"tsc": "tsc"
|
||||
},
|
||||
"version": "1.0.0"
|
||||
}
|
53
node_modules/@actions/io/README.md
generated
vendored
Normal file
53
node_modules/@actions/io/README.md
generated
vendored
Normal file
|
@ -0,0 +1,53 @@
|
|||
# `@actions/io`
|
||||
|
||||
> Core functions for cli filesystem scenarios
|
||||
|
||||
## Usage
|
||||
|
||||
#### mkdir -p
|
||||
|
||||
Recursively make a directory. Follows rules specified in [man mkdir](https://linux.die.net/man/1/mkdir) with the `-p` option specified:
|
||||
|
||||
```
|
||||
const io = require('@actions/io');
|
||||
|
||||
await io.mkdirP('path/to/make');
|
||||
```
|
||||
|
||||
#### cp/mv
|
||||
|
||||
Copy or move files or folders. Follows rules specified in [man cp](https://linux.die.net/man/1/cp) and [man mv](https://linux.die.net/man/1/mv):
|
||||
|
||||
```
|
||||
const io = require('@actions/io');
|
||||
|
||||
// Recursive must be true for directories
|
||||
const options = { recursive: true, force: false }
|
||||
|
||||
await io.cp('path/to/directory', 'path/to/dest', options);
|
||||
await io.mv('path/to/file', 'path/to/dest');
|
||||
```
|
||||
|
||||
#### rm -rf
|
||||
|
||||
Remove a file or folder recursively. Follows rules specified in [man rm](https://linux.die.net/man/1/rm) with the `-r` and `-f` rules specified.
|
||||
|
||||
```
|
||||
const io = require('@actions/io');
|
||||
|
||||
await io.rmRF('path/to/directory');
|
||||
await io.rmRF('path/to/file');
|
||||
```
|
||||
|
||||
#### which
|
||||
|
||||
Get the path to a tool and resolves via paths. Follows the rules specified in [man which](https://linux.die.net/man/1/which).
|
||||
|
||||
```
|
||||
const exec = require('@actions/exec');
|
||||
const io = require('@actions/io');
|
||||
|
||||
const pythonPath: string = await io.which('python', true)
|
||||
|
||||
await exec.exec(`"${pythonPath}"`, ['main.py']);
|
||||
```
|
29
node_modules/@actions/io/lib/io-util.d.ts
generated
vendored
Normal file
29
node_modules/@actions/io/lib/io-util.d.ts
generated
vendored
Normal file
|
@ -0,0 +1,29 @@
|
|||
/// <reference types="node" />
|
||||
import * as fs from 'fs';
|
||||
export declare const chmod: typeof fs.promises.chmod, copyFile: typeof fs.promises.copyFile, lstat: typeof fs.promises.lstat, mkdir: typeof fs.promises.mkdir, readdir: typeof fs.promises.readdir, readlink: typeof fs.promises.readlink, rename: typeof fs.promises.rename, rmdir: typeof fs.promises.rmdir, stat: typeof fs.promises.stat, symlink: typeof fs.promises.symlink, unlink: typeof fs.promises.unlink;
|
||||
export declare const IS_WINDOWS: boolean;
|
||||
export declare function exists(fsPath: string): Promise<boolean>;
|
||||
export declare function isDirectory(fsPath: string, useStat?: boolean): Promise<boolean>;
|
||||
/**
|
||||
* On OSX/Linux, true if path starts with '/'. On Windows, true for paths like:
|
||||
* \, \hello, \\hello\share, C:, and C:\hello (and corresponding alternate separator cases).
|
||||
*/
|
||||
export declare function isRooted(p: string): boolean;
|
||||
/**
|
||||
* Recursively create a directory at `fsPath`.
|
||||
*
|
||||
* This implementation is optimistic, meaning it attempts to create the full
|
||||
* path first, and backs up the path stack from there.
|
||||
*
|
||||
* @param fsPath The path to create
|
||||
* @param maxDepth The maximum recursion depth
|
||||
* @param depth The current recursion depth
|
||||
*/
|
||||
export declare function mkdirP(fsPath: string, maxDepth?: number, depth?: number): Promise<void>;
|
||||
/**
|
||||
* Best effort attempt to determine whether a file exists and is executable.
|
||||
* @param filePath file path to check
|
||||
* @param extensions additional file extensions to try
|
||||
* @return if file exists and is executable, returns the file path. otherwise empty string.
|
||||
*/
|
||||
export declare function tryGetExecutablePath(filePath: string, extensions: string[]): Promise<string>;
|
194
node_modules/@actions/io/lib/io-util.js
generated
vendored
Normal file
194
node_modules/@actions/io/lib/io-util.js
generated
vendored
Normal file
|
@ -0,0 +1,194 @@
|
|||
"use strict";
|
||||
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
||||
return new (P || (P = Promise))(function (resolve, reject) {
|
||||
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
||||
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
||||
function step(result) { result.done ? resolve(result.value) : new P(function (resolve) { resolve(result.value); }).then(fulfilled, rejected); }
|
||||
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
||||
});
|
||||
};
|
||||
var _a;
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const assert_1 = require("assert");
|
||||
const fs = require("fs");
|
||||
const path = require("path");
|
||||
_a = fs.promises, exports.chmod = _a.chmod, exports.copyFile = _a.copyFile, exports.lstat = _a.lstat, exports.mkdir = _a.mkdir, exports.readdir = _a.readdir, exports.readlink = _a.readlink, exports.rename = _a.rename, exports.rmdir = _a.rmdir, exports.stat = _a.stat, exports.symlink = _a.symlink, exports.unlink = _a.unlink;
|
||||
exports.IS_WINDOWS = process.platform === 'win32';
|
||||
function exists(fsPath) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
try {
|
||||
yield exports.stat(fsPath);
|
||||
}
|
||||
catch (err) {
|
||||
if (err.code === 'ENOENT') {
|
||||
return false;
|
||||
}
|
||||
throw err;
|
||||
}
|
||||
return true;
|
||||
});
|
||||
}
|
||||
exports.exists = exists;
|
||||
function isDirectory(fsPath, useStat = false) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
const stats = useStat ? yield exports.stat(fsPath) : yield exports.lstat(fsPath);
|
||||
return stats.isDirectory();
|
||||
});
|
||||
}
|
||||
exports.isDirectory = isDirectory;
|
||||
/**
|
||||
* On OSX/Linux, true if path starts with '/'. On Windows, true for paths like:
|
||||
* \, \hello, \\hello\share, C:, and C:\hello (and corresponding alternate separator cases).
|
||||
*/
|
||||
function isRooted(p) {
|
||||
p = normalizeSeparators(p);
|
||||
if (!p) {
|
||||
throw new Error('isRooted() parameter "p" cannot be empty');
|
||||
}
|
||||
if (exports.IS_WINDOWS) {
|
||||
return (p.startsWith('\\') || /^[A-Z]:/i.test(p) // e.g. \ or \hello or \\hello
|
||||
); // e.g. C: or C:\hello
|
||||
}
|
||||
return p.startsWith('/');
|
||||
}
|
||||
exports.isRooted = isRooted;
|
||||
/**
|
||||
* Recursively create a directory at `fsPath`.
|
||||
*
|
||||
* This implementation is optimistic, meaning it attempts to create the full
|
||||
* path first, and backs up the path stack from there.
|
||||
*
|
||||
* @param fsPath The path to create
|
||||
* @param maxDepth The maximum recursion depth
|
||||
* @param depth The current recursion depth
|
||||
*/
|
||||
function mkdirP(fsPath, maxDepth = 1000, depth = 1) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
assert_1.ok(fsPath, 'a path argument must be provided');
|
||||
fsPath = path.resolve(fsPath);
|
||||
if (depth >= maxDepth)
|
||||
return exports.mkdir(fsPath);
|
||||
try {
|
||||
yield exports.mkdir(fsPath);
|
||||
return;
|
||||
}
|
||||
catch (err) {
|
||||
switch (err.code) {
|
||||
case 'ENOENT': {
|
||||
yield mkdirP(path.dirname(fsPath), maxDepth, depth + 1);
|
||||
yield exports.mkdir(fsPath);
|
||||
return;
|
||||
}
|
||||
default: {
|
||||
let stats;
|
||||
try {
|
||||
stats = yield exports.stat(fsPath);
|
||||
}
|
||||
catch (err2) {
|
||||
throw err;
|
||||
}
|
||||
if (!stats.isDirectory())
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
exports.mkdirP = mkdirP;
|
||||
/**
|
||||
* Best effort attempt to determine whether a file exists and is executable.
|
||||
* @param filePath file path to check
|
||||
* @param extensions additional file extensions to try
|
||||
* @return if file exists and is executable, returns the file path. otherwise empty string.
|
||||
*/
|
||||
function tryGetExecutablePath(filePath, extensions) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
let stats = undefined;
|
||||
try {
|
||||
// test file exists
|
||||
stats = yield exports.stat(filePath);
|
||||
}
|
||||
catch (err) {
|
||||
if (err.code !== 'ENOENT') {
|
||||
// eslint-disable-next-line no-console
|
||||
console.log(`Unexpected error attempting to determine if executable file exists '${filePath}': ${err}`);
|
||||
}
|
||||
}
|
||||
if (stats && stats.isFile()) {
|
||||
if (exports.IS_WINDOWS) {
|
||||
// on Windows, test for valid extension
|
||||
const upperExt = path.extname(filePath).toUpperCase();
|
||||
if (extensions.some(validExt => validExt.toUpperCase() === upperExt)) {
|
||||
return filePath;
|
||||
}
|
||||
}
|
||||
else {
|
||||
if (isUnixExecutable(stats)) {
|
||||
return filePath;
|
||||
}
|
||||
}
|
||||
}
|
||||
// try each extension
|
||||
const originalFilePath = filePath;
|
||||
for (const extension of extensions) {
|
||||
filePath = originalFilePath + extension;
|
||||
stats = undefined;
|
||||
try {
|
||||
stats = yield exports.stat(filePath);
|
||||
}
|
||||
catch (err) {
|
||||
if (err.code !== 'ENOENT') {
|
||||
// eslint-disable-next-line no-console
|
||||
console.log(`Unexpected error attempting to determine if executable file exists '${filePath}': ${err}`);
|
||||
}
|
||||
}
|
||||
if (stats && stats.isFile()) {
|
||||
if (exports.IS_WINDOWS) {
|
||||
// preserve the case of the actual file (since an extension was appended)
|
||||
try {
|
||||
const directory = path.dirname(filePath);
|
||||
const upperName = path.basename(filePath).toUpperCase();
|
||||
for (const actualName of yield exports.readdir(directory)) {
|
||||
if (upperName === actualName.toUpperCase()) {
|
||||
filePath = path.join(directory, actualName);
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
catch (err) {
|
||||
// eslint-disable-next-line no-console
|
||||
console.log(`Unexpected error attempting to determine the actual case of the file '${filePath}': ${err}`);
|
||||
}
|
||||
return filePath;
|
||||
}
|
||||
else {
|
||||
if (isUnixExecutable(stats)) {
|
||||
return filePath;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return '';
|
||||
});
|
||||
}
|
||||
exports.tryGetExecutablePath = tryGetExecutablePath;
|
||||
function normalizeSeparators(p) {
|
||||
p = p || '';
|
||||
if (exports.IS_WINDOWS) {
|
||||
// convert slashes on Windows
|
||||
p = p.replace(/\//g, '\\');
|
||||
// remove redundant slashes
|
||||
return p.replace(/\\\\+/g, '\\');
|
||||
}
|
||||
// remove redundant slashes
|
||||
return p.replace(/\/\/+/g, '/');
|
||||
}
|
||||
// on Mac/Linux, test the execute bit
|
||||
// R W X R W X R W X
|
||||
// 256 128 64 32 16 8 4 2 1
|
||||
function isUnixExecutable(stats) {
|
||||
return ((stats.mode & 1) > 0 ||
|
||||
((stats.mode & 8) > 0 && stats.gid === process.getgid()) ||
|
||||
((stats.mode & 64) > 0 && stats.uid === process.getuid()));
|
||||
}
|
||||
//# sourceMappingURL=io-util.js.map
|
1
node_modules/@actions/io/lib/io-util.js.map
generated
vendored
Normal file
1
node_modules/@actions/io/lib/io-util.js.map
generated
vendored
Normal file
|
@ -0,0 +1 @@
|
|||
{"version":3,"file":"io-util.js","sourceRoot":"","sources":["../src/io-util.ts"],"names":[],"mappings":";;;;;;;;;;;AAAA,mCAAyB;AACzB,yBAAwB;AACxB,6BAA4B;AAEf,gBAYE,qTAAA;AAEF,QAAA,UAAU,GAAG,OAAO,CAAC,QAAQ,KAAK,OAAO,CAAA;AAEtD,SAAsB,MAAM,CAAC,MAAc;;QACzC,IAAI;YACF,MAAM,YAAI,CAAC,MAAM,CAAC,CAAA;SACnB;QAAC,OAAO,GAAG,EAAE;YACZ,IAAI,GAAG,CAAC,IAAI,KAAK,QAAQ,EAAE;gBACzB,OAAO,KAAK,CAAA;aACb;YAED,MAAM,GAAG,CAAA;SACV;QAED,OAAO,IAAI,CAAA;IACb,CAAC;CAAA;AAZD,wBAYC;AAED,SAAsB,WAAW,CAC/B,MAAc,EACd,UAAmB,KAAK;;QAExB,MAAM,KAAK,GAAG,OAAO,CAAC,CAAC,CAAC,MAAM,YAAI,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,MAAM,aAAK,CAAC,MAAM,CAAC,CAAA;QAChE,OAAO,KAAK,CAAC,WAAW,EAAE,CAAA;IAC5B,CAAC;CAAA;AAND,kCAMC;AAED;;;GAGG;AACH,SAAgB,QAAQ,CAAC,CAAS;IAChC,CAAC,GAAG,mBAAmB,CAAC,CAAC,CAAC,CAAA;IAC1B,IAAI,CAAC,CAAC,EAAE;QACN,MAAM,IAAI,KAAK,CAAC,0CAA0C,CAAC,CAAA;KAC5D;IAED,IAAI,kBAAU,EAAE;QACd,OAAO,CACL,CAAC,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,UAAU,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,8BAA8B;SACxE,CAAA,CAAC,sBAAsB;KACzB;IAED,OAAO,CAAC,CAAC,UAAU,CAAC,GAAG,CAAC,CAAA;AAC1B,CAAC;AAbD,4BAaC;AAED;;;;;;;;;GASG;AACH,SAAsB,MAAM,CAC1B,MAAc,EACd,WAAmB,IAAI,EACvB,QAAgB,CAAC;;QAEjB,WAAE,CAAC,MAAM,EAAE,kCAAkC,CAAC,CAAA;QAE9C,MAAM,GAAG,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,CAAA;QAE7B,IAAI,KAAK,IAAI,QAAQ;YAAE,OAAO,aAAK,CAAC,MAAM,CAAC,CAAA;QAE3C,IAAI;YACF,MAAM,aAAK,CAAC,MAAM,CAAC,CAAA;YACnB,OAAM;SACP;QAAC,OAAO,GAAG,EAAE;YACZ,QAAQ,GAAG,CAAC,IAAI,EAAE;gBAChB,KAAK,QAAQ,CAAC,CAAC;oBACb,MAAM,MAAM,CAAC,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,EAAE,QAAQ,EAAE,KAAK,GAAG,CAAC,CAAC,CAAA;oBACvD,MAAM,aAAK,CAAC,MAAM,CAAC,CAAA;oBACnB,OAAM;iBACP;gBACD,OAAO,CAAC,CAAC;oBACP,IAAI,KAAe,CAAA;oBAEnB,IAAI;wBACF,KAAK,GAAG,MAAM,YAAI,CAAC,MAAM,CAAC,CAAA;qBAC3B;oBAAC,OAAO,IAAI,EAAE;wBACb,MAAM,GAAG,CAAA;qBACV;oBAED,IAAI,CAAC,KAAK,CAAC,WAAW,EAAE;wBAAE,MAAM,GAAG,CAAA;iBACpC;aACF;SACF;IACH,CAAC;CAAA;AAlCD,wBAkCC;AAED;;;;;GAKG;AACH,SAAsB,oBAAoB,CACxC,QAAgB,EAChB,UAAoB;;QAEpB,IAAI,KAAK,GAAyB,SAAS,CAAA;QAC3C,IAAI;YACF,mBAAmB;YACnB,KAAK,GAAG,MAAM,YAAI,CAAC,QAAQ,CAAC,CAAA;SAC7B;QAAC,OAAO,GAAG,EAAE;YACZ,IAAI,GAAG,CAAC,IAAI,KAAK,QAAQ,EAAE;gBACzB,sCAAsC;gBACtC,OAAO,CAAC,GAAG,CACT,uEAAuE,QAAQ,MAAM,GAAG,EAAE,CAC3F,CAAA;aACF;SACF;QACD,IAAI,KAAK,IAAI,KAAK,CAAC,MAAM,EAAE,EAAE;YAC3B,IAAI,kBAAU,EAAE;gBACd,uCAAuC;gBACvC,MAAM,QAAQ,GAAG,IAAI,CAAC,OAAO,CAAC,QAAQ,CAAC,CAAC,WAAW,EAAE,CAAA;gBACrD,IAAI,UAAU,CAAC,IAAI,CAAC,QAAQ,CAAC,EAAE,CAAC,QAAQ,CAAC,WAAW,EAAE,KAAK,QAAQ,CAAC,EAAE;oBACpE,OAAO,QAAQ,CAAA;iBAChB;aACF;iBAAM;gBACL,IAAI,gBAAgB,CAAC,KAAK,CAAC,EAAE;oBAC3B,OAAO,QAAQ,CAAA;iBAChB;aACF;SACF;QAED,qBAAqB;QACrB,MAAM,gBAAgB,GAAG,QAAQ,CAAA;QACjC,KAAK,MAAM,SAAS,IAAI,UAAU,EAAE;YAClC,QAAQ,GAAG,gBAAgB,GAAG,SAAS,CAAA;YAEvC,KAAK,GAAG,SAAS,CAAA;YACjB,IAAI;gBACF,KAAK,GAAG,MAAM,YAAI,CAAC,QAAQ,CAAC,CAAA;aAC7B;YAAC,OAAO,GAAG,EAAE;gBACZ,IAAI,GAAG,CAAC,IAAI,KAAK,QAAQ,EAAE;oBACzB,sCAAsC;oBACtC,OAAO,CAAC,GAAG,CACT,uEAAuE,QAAQ,MAAM,GAAG,EAAE,CAC3F,CAAA;iBACF;aACF;YAED,IAAI,KAAK,IAAI,KAAK,CAAC,MAAM,EAAE,EAAE;gBAC3B,IAAI,kBAAU,EAAE;oBACd,yEAAyE;oBACzE,IAAI;wBACF,MAAM,SAAS,GAAG,IAAI,CAAC,OAAO,CAAC,QAAQ,CAAC,CAAA;wBACxC,MAAM,SAAS,GAAG,IAAI,CAAC,QAAQ,CAAC,QAAQ,CAAC,CAAC,WAAW,EAAE,CAAA;wBACvD,KAAK,MAAM,UAAU,IAAI,MAAM,eAAO,CAAC,SAAS,CAAC,EAAE;4BACjD,IAAI,SAAS,KAAK,UAAU,CAAC,WAAW,EAAE,EAAE;gCAC1C,QAAQ,GAAG,IAAI,CAAC,IAAI,CAAC,SAAS,EAAE,UAAU,CAAC,CAAA;gCAC3C,MAAK;6BACN;yBACF;qBACF;oBAAC,OAAO,GAAG,EAAE;wBACZ,sCAAsC;wBACtC,OAAO,CAAC,GAAG,CACT,yEAAyE,QAAQ,MAAM,GAAG,EAAE,CAC7F,CAAA;qBACF;oBAED,OAAO,QAAQ,CAAA;iBAChB;qBAAM;oBACL,IAAI,gBAAgB,CAAC,KAAK,CAAC,EAAE;wBAC3B,OAAO,QAAQ,CAAA;qBAChB;iBACF;aACF;SACF;QAED,OAAO,EAAE,CAAA;IACX,CAAC;CAAA;AA5ED,oDA4EC;AAED,SAAS,mBAAmB,CAAC,CAAS;IACpC,CAAC,GAAG,CAAC,IAAI,EAAE,CAAA;IACX,IAAI,kBAAU,EAAE;QACd,6BAA6B;QAC7B,CAAC,GAAG,CAAC,CAAC,OAAO,CAAC,KAAK,EAAE,IAAI,CAAC,CAAA;QAE1B,2BAA2B;QAC3B,OAAO,CAAC,CAAC,OAAO,CAAC,QAAQ,EAAE,IAAI,CAAC,CAAA;KACjC;IAED,2BAA2B;IAC3B,OAAO,CAAC,CAAC,OAAO,CAAC,QAAQ,EAAE,GAAG,CAAC,CAAA;AACjC,CAAC;AAED,qCAAqC;AACrC,6BAA6B;AAC7B,6BAA6B;AAC7B,SAAS,gBAAgB,CAAC,KAAe;IACvC,OAAO,CACL,CAAC,KAAK,CAAC,IAAI,GAAG,CAAC,CAAC,GAAG,CAAC;QACpB,CAAC,CAAC,KAAK,CAAC,IAAI,GAAG,CAAC,CAAC,GAAG,CAAC,IAAI,KAAK,CAAC,GAAG,KAAK,OAAO,CAAC,MAAM,EAAE,CAAC;QACxD,CAAC,CAAC,KAAK,CAAC,IAAI,GAAG,EAAE,CAAC,GAAG,CAAC,IAAI,KAAK,CAAC,GAAG,KAAK,OAAO,CAAC,MAAM,EAAE,CAAC,CAC1D,CAAA;AACH,CAAC"}
|
56
node_modules/@actions/io/lib/io.d.ts
generated
vendored
Normal file
56
node_modules/@actions/io/lib/io.d.ts
generated
vendored
Normal file
|
@ -0,0 +1,56 @@
|
|||
/**
|
||||
* Interface for cp/mv options
|
||||
*/
|
||||
export interface CopyOptions {
|
||||
/** Optional. Whether to recursively copy all subdirectories. Defaults to false */
|
||||
recursive?: boolean;
|
||||
/** Optional. Whether to overwrite existing files in the destination. Defaults to true */
|
||||
force?: boolean;
|
||||
}
|
||||
/**
|
||||
* Interface for cp/mv options
|
||||
*/
|
||||
export interface MoveOptions {
|
||||
/** Optional. Whether to overwrite existing files in the destination. Defaults to true */
|
||||
force?: boolean;
|
||||
}
|
||||
/**
|
||||
* Copies a file or folder.
|
||||
* Based off of shelljs - https://github.com/shelljs/shelljs/blob/9237f66c52e5daa40458f94f9565e18e8132f5a6/src/cp.js
|
||||
*
|
||||
* @param source source path
|
||||
* @param dest destination path
|
||||
* @param options optional. See CopyOptions.
|
||||
*/
|
||||
export declare function cp(source: string, dest: string, options?: CopyOptions): Promise<void>;
|
||||
/**
|
||||
* Moves a path.
|
||||
*
|
||||
* @param source source path
|
||||
* @param dest destination path
|
||||
* @param options optional. See MoveOptions.
|
||||
*/
|
||||
export declare function mv(source: string, dest: string, options?: MoveOptions): Promise<void>;
|
||||
/**
|
||||
* Remove a path recursively with force
|
||||
*
|
||||
* @param inputPath path to remove
|
||||
*/
|
||||
export declare function rmRF(inputPath: string): Promise<void>;
|
||||
/**
|
||||
* Make a directory. Creates the full path with folders in between
|
||||
* Will throw if it fails
|
||||
*
|
||||
* @param fsPath path to create
|
||||
* @returns Promise<void>
|
||||
*/
|
||||
export declare function mkdirP(fsPath: string): Promise<void>;
|
||||
/**
|
||||
* Returns path of a tool had the tool actually been invoked. Resolves via paths.
|
||||
* If you check and the tool does not exist, it will throw.
|
||||
*
|
||||
* @param tool name of the tool
|
||||
* @param check whether to check if tool exists
|
||||
* @returns Promise<string> path to tool
|
||||
*/
|
||||
export declare function which(tool: string, check?: boolean): Promise<string>;
|
289
node_modules/@actions/io/lib/io.js
generated
vendored
Normal file
289
node_modules/@actions/io/lib/io.js
generated
vendored
Normal file
|
@ -0,0 +1,289 @@
|
|||
"use strict";
|
||||
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
||||
return new (P || (P = Promise))(function (resolve, reject) {
|
||||
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
||||
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
||||
function step(result) { result.done ? resolve(result.value) : new P(function (resolve) { resolve(result.value); }).then(fulfilled, rejected); }
|
||||
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
||||
});
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const childProcess = require("child_process");
|
||||
const path = require("path");
|
||||
const util_1 = require("util");
|
||||
const ioUtil = require("./io-util");
|
||||
const exec = util_1.promisify(childProcess.exec);
|
||||
/**
|
||||
* Copies a file or folder.
|
||||
* Based off of shelljs - https://github.com/shelljs/shelljs/blob/9237f66c52e5daa40458f94f9565e18e8132f5a6/src/cp.js
|
||||
*
|
||||
* @param source source path
|
||||
* @param dest destination path
|
||||
* @param options optional. See CopyOptions.
|
||||
*/
|
||||
function cp(source, dest, options = {}) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
const { force, recursive } = readCopyOptions(options);
|
||||
const destStat = (yield ioUtil.exists(dest)) ? yield ioUtil.stat(dest) : null;
|
||||
// Dest is an existing file, but not forcing
|
||||
if (destStat && destStat.isFile() && !force) {
|
||||
return;
|
||||
}
|
||||
// If dest is an existing directory, should copy inside.
|
||||
const newDest = destStat && destStat.isDirectory()
|
||||
? path.join(dest, path.basename(source))
|
||||
: dest;
|
||||
if (!(yield ioUtil.exists(source))) {
|
||||
throw new Error(`no such file or directory: ${source}`);
|
||||
}
|
||||
const sourceStat = yield ioUtil.stat(source);
|
||||
if (sourceStat.isDirectory()) {
|
||||
if (!recursive) {
|
||||
throw new Error(`Failed to copy. ${source} is a directory, but tried to copy without recursive flag.`);
|
||||
}
|
||||
else {
|
||||
yield cpDirRecursive(source, newDest, 0, force);
|
||||
}
|
||||
}
|
||||
else {
|
||||
if (path.relative(source, newDest) === '') {
|
||||
// a file cannot be copied to itself
|
||||
throw new Error(`'${newDest}' and '${source}' are the same file`);
|
||||
}
|
||||
yield copyFile(source, newDest, force);
|
||||
}
|
||||
});
|
||||
}
|
||||
exports.cp = cp;
|
||||
/**
|
||||
* Moves a path.
|
||||
*
|
||||
* @param source source path
|
||||
* @param dest destination path
|
||||
* @param options optional. See MoveOptions.
|
||||
*/
|
||||
function mv(source, dest, options = {}) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
if (yield ioUtil.exists(dest)) {
|
||||
let destExists = true;
|
||||
if (yield ioUtil.isDirectory(dest)) {
|
||||
// If dest is directory copy src into dest
|
||||
dest = path.join(dest, path.basename(source));
|
||||
destExists = yield ioUtil.exists(dest);
|
||||
}
|
||||
if (destExists) {
|
||||
if (options.force == null || options.force) {
|
||||
yield rmRF(dest);
|
||||
}
|
||||
else {
|
||||
throw new Error('Destination already exists');
|
||||
}
|
||||
}
|
||||
}
|
||||
yield mkdirP(path.dirname(dest));
|
||||
yield ioUtil.rename(source, dest);
|
||||
});
|
||||
}
|
||||
exports.mv = mv;
|
||||
/**
|
||||
* Remove a path recursively with force
|
||||
*
|
||||
* @param inputPath path to remove
|
||||
*/
|
||||
function rmRF(inputPath) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
if (ioUtil.IS_WINDOWS) {
|
||||
// Node doesn't provide a delete operation, only an unlink function. This means that if the file is being used by another
|
||||
// program (e.g. antivirus), it won't be deleted. To address this, we shell out the work to rd/del.
|
||||
try {
|
||||
if (yield ioUtil.isDirectory(inputPath, true)) {
|
||||
yield exec(`rd /s /q "${inputPath}"`);
|
||||
}
|
||||
else {
|
||||
yield exec(`del /f /a "${inputPath}"`);
|
||||
}
|
||||
}
|
||||
catch (err) {
|
||||
// if you try to delete a file that doesn't exist, desired result is achieved
|
||||
// other errors are valid
|
||||
if (err.code !== 'ENOENT')
|
||||
throw err;
|
||||
}
|
||||
// Shelling out fails to remove a symlink folder with missing source, this unlink catches that
|
||||
try {
|
||||
yield ioUtil.unlink(inputPath);
|
||||
}
|
||||
catch (err) {
|
||||
// if you try to delete a file that doesn't exist, desired result is achieved
|
||||
// other errors are valid
|
||||
if (err.code !== 'ENOENT')
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
else {
|
||||
let isDir = false;
|
||||
try {
|
||||
isDir = yield ioUtil.isDirectory(inputPath);
|
||||
}
|
||||
catch (err) {
|
||||
// if you try to delete a file that doesn't exist, desired result is achieved
|
||||
// other errors are valid
|
||||
if (err.code !== 'ENOENT')
|
||||
throw err;
|
||||
return;
|
||||
}
|
||||
if (isDir) {
|
||||
yield exec(`rm -rf "${inputPath}"`);
|
||||
}
|
||||
else {
|
||||
yield ioUtil.unlink(inputPath);
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
exports.rmRF = rmRF;
|
||||
/**
|
||||
* Make a directory. Creates the full path with folders in between
|
||||
* Will throw if it fails
|
||||
*
|
||||
* @param fsPath path to create
|
||||
* @returns Promise<void>
|
||||
*/
|
||||
function mkdirP(fsPath) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
yield ioUtil.mkdirP(fsPath);
|
||||
});
|
||||
}
|
||||
exports.mkdirP = mkdirP;
|
||||
/**
|
||||
* Returns path of a tool had the tool actually been invoked. Resolves via paths.
|
||||
* If you check and the tool does not exist, it will throw.
|
||||
*
|
||||
* @param tool name of the tool
|
||||
* @param check whether to check if tool exists
|
||||
* @returns Promise<string> path to tool
|
||||
*/
|
||||
function which(tool, check) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
if (!tool) {
|
||||
throw new Error("parameter 'tool' is required");
|
||||
}
|
||||
// recursive when check=true
|
||||
if (check) {
|
||||
const result = yield which(tool, false);
|
||||
if (!result) {
|
||||
if (ioUtil.IS_WINDOWS) {
|
||||
throw new Error(`Unable to locate executable file: ${tool}. Please verify either the file path exists or the file can be found within a directory specified by the PATH environment variable. Also verify the file has a valid extension for an executable file.`);
|
||||
}
|
||||
else {
|
||||
throw new Error(`Unable to locate executable file: ${tool}. Please verify either the file path exists or the file can be found within a directory specified by the PATH environment variable. Also check the file mode to verify the file is executable.`);
|
||||
}
|
||||
}
|
||||
}
|
||||
try {
|
||||
// build the list of extensions to try
|
||||
const extensions = [];
|
||||
if (ioUtil.IS_WINDOWS && process.env.PATHEXT) {
|
||||
for (const extension of process.env.PATHEXT.split(path.delimiter)) {
|
||||
if (extension) {
|
||||
extensions.push(extension);
|
||||
}
|
||||
}
|
||||
}
|
||||
// if it's rooted, return it if exists. otherwise return empty.
|
||||
if (ioUtil.isRooted(tool)) {
|
||||
const filePath = yield ioUtil.tryGetExecutablePath(tool, extensions);
|
||||
if (filePath) {
|
||||
return filePath;
|
||||
}
|
||||
return '';
|
||||
}
|
||||
// if any path separators, return empty
|
||||
if (tool.includes('/') || (ioUtil.IS_WINDOWS && tool.includes('\\'))) {
|
||||
return '';
|
||||
}
|
||||
// build the list of directories
|
||||
//
|
||||
// Note, technically "where" checks the current directory on Windows. From a task lib perspective,
|
||||
// it feels like we should not do this. Checking the current directory seems like more of a use
|
||||
// case of a shell, and the which() function exposed by the task lib should strive for consistency
|
||||
// across platforms.
|
||||
const directories = [];
|
||||
if (process.env.PATH) {
|
||||
for (const p of process.env.PATH.split(path.delimiter)) {
|
||||
if (p) {
|
||||
directories.push(p);
|
||||
}
|
||||
}
|
||||
}
|
||||
// return the first match
|
||||
for (const directory of directories) {
|
||||
const filePath = yield ioUtil.tryGetExecutablePath(directory + path.sep + tool, extensions);
|
||||
if (filePath) {
|
||||
return filePath;
|
||||
}
|
||||
}
|
||||
return '';
|
||||
}
|
||||
catch (err) {
|
||||
throw new Error(`which failed with message ${err.message}`);
|
||||
}
|
||||
});
|
||||
}
|
||||
exports.which = which;
|
||||
function readCopyOptions(options) {
|
||||
const force = options.force == null ? true : options.force;
|
||||
const recursive = Boolean(options.recursive);
|
||||
return { force, recursive };
|
||||
}
|
||||
function cpDirRecursive(sourceDir, destDir, currentDepth, force) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
// Ensure there is not a run away recursive copy
|
||||
if (currentDepth >= 255)
|
||||
return;
|
||||
currentDepth++;
|
||||
yield mkdirP(destDir);
|
||||
const files = yield ioUtil.readdir(sourceDir);
|
||||
for (const fileName of files) {
|
||||
const srcFile = `${sourceDir}/${fileName}`;
|
||||
const destFile = `${destDir}/${fileName}`;
|
||||
const srcFileStat = yield ioUtil.lstat(srcFile);
|
||||
if (srcFileStat.isDirectory()) {
|
||||
// Recurse
|
||||
yield cpDirRecursive(srcFile, destFile, currentDepth, force);
|
||||
}
|
||||
else {
|
||||
yield copyFile(srcFile, destFile, force);
|
||||
}
|
||||
}
|
||||
// Change the mode for the newly created directory
|
||||
yield ioUtil.chmod(destDir, (yield ioUtil.stat(sourceDir)).mode);
|
||||
});
|
||||
}
|
||||
// Buffered file copy
|
||||
function copyFile(srcFile, destFile, force) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
if ((yield ioUtil.lstat(srcFile)).isSymbolicLink()) {
|
||||
// unlink/re-link it
|
||||
try {
|
||||
yield ioUtil.lstat(destFile);
|
||||
yield ioUtil.unlink(destFile);
|
||||
}
|
||||
catch (e) {
|
||||
// Try to override file permission
|
||||
if (e.code === 'EPERM') {
|
||||
yield ioUtil.chmod(destFile, '0666');
|
||||
yield ioUtil.unlink(destFile);
|
||||
}
|
||||
// other errors = it doesn't exist, no work to do
|
||||
}
|
||||
// Copy over symlink
|
||||
const symlinkFull = yield ioUtil.readlink(srcFile);
|
||||
yield ioUtil.symlink(symlinkFull, destFile, ioUtil.IS_WINDOWS ? 'junction' : null);
|
||||
}
|
||||
else if (!(yield ioUtil.exists(destFile)) || force) {
|
||||
yield ioUtil.copyFile(srcFile, destFile);
|
||||
}
|
||||
});
|
||||
}
|
||||
//# sourceMappingURL=io.js.map
|
1
node_modules/@actions/io/lib/io.js.map
generated
vendored
Normal file
1
node_modules/@actions/io/lib/io.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
61
node_modules/@actions/io/package.json
generated
vendored
Normal file
61
node_modules/@actions/io/package.json
generated
vendored
Normal file
|
@ -0,0 +1,61 @@
|
|||
{
|
||||
"_from": "@actions/io@^1.0.0",
|
||||
"_id": "@actions/io@1.0.0",
|
||||
"_inBundle": false,
|
||||
"_integrity": "sha512-ezrJSRdqtXtdx1WXlfYL85+40F7gB39jCK9P0jZVODW3W6xUYmu6ZOEc/UmmElUwhRyDRm1R4yNZu1Joq2kuQg==",
|
||||
"_location": "/@actions/io",
|
||||
"_phantomChildren": {},
|
||||
"_requested": {
|
||||
"type": "range",
|
||||
"registry": true,
|
||||
"raw": "@actions/io@^1.0.0",
|
||||
"name": "@actions/io",
|
||||
"escapedName": "@actions%2fio",
|
||||
"scope": "@actions",
|
||||
"rawSpec": "^1.0.0",
|
||||
"saveSpec": null,
|
||||
"fetchSpec": "^1.0.0"
|
||||
},
|
||||
"_requiredBy": [
|
||||
"#DEV:/",
|
||||
"/@actions/tool-cache"
|
||||
],
|
||||
"_resolved": "https://registry.npmjs.org/@actions/io/-/io-1.0.0.tgz",
|
||||
"_shasum": "379454174660623bb5b3bce0be8b9e2285a62bcb",
|
||||
"_spec": "@actions/io@^1.0.0",
|
||||
"_where": "C:\\Users\\damccorm\\Documents\\setup-python\\node_modules\\@actions\\tool-cache",
|
||||
"bugs": {
|
||||
"url": "https://github.com/actions/toolkit/issues"
|
||||
},
|
||||
"bundleDependencies": false,
|
||||
"deprecated": false,
|
||||
"description": "Actions io lib",
|
||||
"directories": {
|
||||
"lib": "lib",
|
||||
"test": "__tests__"
|
||||
},
|
||||
"files": [
|
||||
"lib"
|
||||
],
|
||||
"gitHead": "a40bce7c8d382aa3dbadaa327acbc696e9390e55",
|
||||
"homepage": "https://github.com/actions/toolkit/tree/master/packages/io",
|
||||
"keywords": [
|
||||
"io",
|
||||
"actions"
|
||||
],
|
||||
"license": "MIT",
|
||||
"main": "lib/io.js",
|
||||
"name": "@actions/io",
|
||||
"publishConfig": {
|
||||
"access": "public"
|
||||
},
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git+https://github.com/actions/toolkit.git"
|
||||
},
|
||||
"scripts": {
|
||||
"test": "echo \"Error: run tests from root\" && exit 1",
|
||||
"tsc": "tsc"
|
||||
},
|
||||
"version": "1.0.0"
|
||||
}
|
82
node_modules/@actions/tool-cache/README.md
generated
vendored
Normal file
82
node_modules/@actions/tool-cache/README.md
generated
vendored
Normal file
|
@ -0,0 +1,82 @@
|
|||
# `@actions/tool-cache`
|
||||
|
||||
> Functions necessary for downloading and caching tools.
|
||||
|
||||
## Usage
|
||||
|
||||
#### Download
|
||||
|
||||
You can use this to download tools (or other files) from a download URL:
|
||||
|
||||
```
|
||||
const tc = require('@actions/tool-cache');
|
||||
|
||||
const node12Path = await tc.downloadTool('http://nodejs.org/dist/v12.7.0/node-v12.7.0-linux-x64.tar.gz');
|
||||
```
|
||||
|
||||
#### Extract
|
||||
|
||||
These can then be extracted in platform specific ways:
|
||||
|
||||
```
|
||||
const tc = require('@actions/tool-cache');
|
||||
|
||||
if (process.platform === 'win32') {
|
||||
tc.downloadTool('http://nodejs.org/dist/v12.7.0/node-v12.7.0-win-x64.zip');
|
||||
const node12ExtractedFolder = await tc.extractZip(node12Path, 'path/to/extract/to');
|
||||
|
||||
// Or alternately
|
||||
tc.downloadTool('http://nodejs.org/dist/v12.7.0/node-v12.7.0-win-x64.7z');
|
||||
const node12ExtractedFolder = await tc.extract7z(node12Path, 'path/to/extract/to');
|
||||
}
|
||||
else {
|
||||
const node12Path = await tc.downloadTool('http://nodejs.org/dist/v12.7.0/node-v12.7.0-linux-x64.tar.gz');
|
||||
const node12ExtractedFolder = await tc.extractTar(node12Path, 'path/to/extract/to');
|
||||
}
|
||||
```
|
||||
|
||||
#### Cache
|
||||
|
||||
Finally, you can cache these directories in our tool-cache. This is useful if you want to switch back and forth between versions of a tool, or save a tool between runs for private runners (private runners are still in development but are on the roadmap).
|
||||
|
||||
You'll often want to add it to the path as part of this step:
|
||||
|
||||
```
|
||||
const tc = require('@actions/tool-cache');
|
||||
const core = require('@actions/core');
|
||||
|
||||
const node12Path = await tc.downloadTool('http://nodejs.org/dist/v12.7.0/node-v12.7.0-linux-x64.tar.gz');
|
||||
const node12ExtractedFolder = await tc.extractTar(node12Path, 'path/to/extract/to');
|
||||
|
||||
const cachedPath = await tc.cacheDir(node12ExtractedFolder, 'node', '12.7.0');
|
||||
core.addPath(cachedPath);
|
||||
```
|
||||
|
||||
You can also cache files for reuse.
|
||||
|
||||
```
|
||||
const tc = require('@actions/tool-cache');
|
||||
|
||||
tc.cacheFile('path/to/exe', 'destFileName.exe', 'myExeName', '1.1.0');
|
||||
```
|
||||
|
||||
#### Find
|
||||
|
||||
Finally, you can find directories and files you've previously cached:
|
||||
|
||||
```
|
||||
const tc = require('@actions/tool-cache');
|
||||
const core = require('@actions/core');
|
||||
|
||||
const nodeDirectory = tc.find('node', '12.x', 'x64');
|
||||
core.addPath(nodeDirectory);
|
||||
```
|
||||
|
||||
You can even find all cached versions of a tool:
|
||||
|
||||
```
|
||||
const tc = require('@actions/tool-cache');
|
||||
|
||||
const allNodeVersions = tc.findAllVersions('node');
|
||||
console.log(`Versions of node available: ${allNodeVersions}`);
|
||||
```
|
78
node_modules/@actions/tool-cache/lib/tool-cache.d.ts
generated
vendored
Normal file
78
node_modules/@actions/tool-cache/lib/tool-cache.d.ts
generated
vendored
Normal file
|
@ -0,0 +1,78 @@
|
|||
export declare class HTTPError extends Error {
|
||||
readonly httpStatusCode: number | undefined;
|
||||
constructor(httpStatusCode: number | undefined);
|
||||
}
|
||||
/**
|
||||
* Download a tool from an url and stream it into a file
|
||||
*
|
||||
* @param url url of tool to download
|
||||
* @returns path to downloaded tool
|
||||
*/
|
||||
export declare function downloadTool(url: string): Promise<string>;
|
||||
/**
|
||||
* Extract a .7z file
|
||||
*
|
||||
* @param file path to the .7z file
|
||||
* @param dest destination directory. Optional.
|
||||
* @param _7zPath path to 7zr.exe. Optional, for long path support. Most .7z archives do not have this
|
||||
* problem. If your .7z archive contains very long paths, you can pass the path to 7zr.exe which will
|
||||
* gracefully handle long paths. By default 7zdec.exe is used because it is a very small program and is
|
||||
* bundled with the tool lib. However it does not support long paths. 7zr.exe is the reduced command line
|
||||
* interface, it is smaller than the full command line interface, and it does support long paths. At the
|
||||
* time of this writing, it is freely available from the LZMA SDK that is available on the 7zip website.
|
||||
* Be sure to check the current license agreement. If 7zr.exe is bundled with your action, then the path
|
||||
* to 7zr.exe can be pass to this function.
|
||||
* @returns path to the destination directory
|
||||
*/
|
||||
export declare function extract7z(file: string, dest?: string, _7zPath?: string): Promise<string>;
|
||||
/**
|
||||
* Extract a tar
|
||||
*
|
||||
* @param file path to the tar
|
||||
* @param dest destination directory. Optional.
|
||||
* @returns path to the destination directory
|
||||
*/
|
||||
export declare function extractTar(file: string, dest?: string): Promise<string>;
|
||||
/**
|
||||
* Extract a zip
|
||||
*
|
||||
* @param file path to the zip
|
||||
* @param dest destination directory. Optional.
|
||||
* @returns path to the destination directory
|
||||
*/
|
||||
export declare function extractZip(file: string, dest?: string): Promise<string>;
|
||||
/**
|
||||
* Caches a directory and installs it into the tool cacheDir
|
||||
*
|
||||
* @param sourceDir the directory to cache into tools
|
||||
* @param tool tool name
|
||||
* @param version version of the tool. semver format
|
||||
* @param arch architecture of the tool. Optional. Defaults to machine architecture
|
||||
*/
|
||||
export declare function cacheDir(sourceDir: string, tool: string, version: string, arch?: string): Promise<string>;
|
||||
/**
|
||||
* Caches a downloaded file (GUID) and installs it
|
||||
* into the tool cache with a given targetName
|
||||
*
|
||||
* @param sourceFile the file to cache into tools. Typically a result of downloadTool which is a guid.
|
||||
* @param targetFile the name of the file name in the tools directory
|
||||
* @param tool tool name
|
||||
* @param version version of the tool. semver format
|
||||
* @param arch architecture of the tool. Optional. Defaults to machine architecture
|
||||
*/
|
||||
export declare function cacheFile(sourceFile: string, targetFile: string, tool: string, version: string, arch?: string): Promise<string>;
|
||||
/**
|
||||
* Finds the path to a tool version in the local installed tool cache
|
||||
*
|
||||
* @param toolName name of the tool
|
||||
* @param versionSpec version of the tool
|
||||
* @param arch optional arch. defaults to arch of computer
|
||||
*/
|
||||
export declare function find(toolName: string, versionSpec: string, arch?: string): string;
|
||||
/**
|
||||
* Finds the paths to all versions of a tool that are installed in the local tool cache
|
||||
*
|
||||
* @param toolName name of the tool
|
||||
* @param arch optional arch. defaults to arch of computer
|
||||
*/
|
||||
export declare function findAllVersions(toolName: string, arch?: string): string[];
|
436
node_modules/@actions/tool-cache/lib/tool-cache.js
generated
vendored
Normal file
436
node_modules/@actions/tool-cache/lib/tool-cache.js
generated
vendored
Normal file
|
@ -0,0 +1,436 @@
|
|||
"use strict";
|
||||
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
||||
return new (P || (P = Promise))(function (resolve, reject) {
|
||||
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
||||
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
||||
function step(result) { result.done ? resolve(result.value) : new P(function (resolve) { resolve(result.value); }).then(fulfilled, rejected); }
|
||||
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
||||
});
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const core = require("@actions/core");
|
||||
const io = require("@actions/io");
|
||||
const fs = require("fs");
|
||||
const os = require("os");
|
||||
const path = require("path");
|
||||
const httpm = require("typed-rest-client/HttpClient");
|
||||
const semver = require("semver");
|
||||
const uuidV4 = require("uuid/v4");
|
||||
const exec_1 = require("@actions/exec/lib/exec");
|
||||
const assert_1 = require("assert");
|
||||
class HTTPError extends Error {
|
||||
constructor(httpStatusCode) {
|
||||
super(`Unexpected HTTP response: ${httpStatusCode}`);
|
||||
this.httpStatusCode = httpStatusCode;
|
||||
Object.setPrototypeOf(this, new.target.prototype);
|
||||
}
|
||||
}
|
||||
exports.HTTPError = HTTPError;
|
||||
const IS_WINDOWS = process.platform === 'win32';
|
||||
const userAgent = 'actions/tool-cache';
|
||||
// On load grab temp directory and cache directory and remove them from env (currently don't want to expose this)
|
||||
let tempDirectory = process.env['RUNNER_TEMP'] || '';
|
||||
let cacheRoot = process.env['RUNNER_TOOL_CACHE'] || '';
|
||||
// If directories not found, place them in common temp locations
|
||||
if (!tempDirectory || !cacheRoot) {
|
||||
let baseLocation;
|
||||
if (IS_WINDOWS) {
|
||||
// On windows use the USERPROFILE env variable
|
||||
baseLocation = process.env['USERPROFILE'] || 'C:\\';
|
||||
}
|
||||
else {
|
||||
if (process.platform === 'darwin') {
|
||||
baseLocation = '/Users';
|
||||
}
|
||||
else {
|
||||
baseLocation = '/home';
|
||||
}
|
||||
}
|
||||
if (!tempDirectory) {
|
||||
tempDirectory = path.join(baseLocation, 'actions', 'temp');
|
||||
}
|
||||
if (!cacheRoot) {
|
||||
cacheRoot = path.join(baseLocation, 'actions', 'cache');
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Download a tool from an url and stream it into a file
|
||||
*
|
||||
* @param url url of tool to download
|
||||
* @returns path to downloaded tool
|
||||
*/
|
||||
function downloadTool(url) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
// Wrap in a promise so that we can resolve from within stream callbacks
|
||||
return new Promise((resolve, reject) => __awaiter(this, void 0, void 0, function* () {
|
||||
try {
|
||||
const http = new httpm.HttpClient(userAgent, [], {
|
||||
allowRetries: true,
|
||||
maxRetries: 3
|
||||
});
|
||||
const destPath = path.join(tempDirectory, uuidV4());
|
||||
yield io.mkdirP(tempDirectory);
|
||||
core.debug(`Downloading ${url}`);
|
||||
core.debug(`Downloading ${destPath}`);
|
||||
if (fs.existsSync(destPath)) {
|
||||
throw new Error(`Destination file path ${destPath} already exists`);
|
||||
}
|
||||
const response = yield http.get(url);
|
||||
if (response.message.statusCode !== 200) {
|
||||
const err = new HTTPError(response.message.statusCode);
|
||||
core.debug(`Failed to download from "${url}". Code(${response.message.statusCode}) Message(${response.message.statusMessage})`);
|
||||
throw err;
|
||||
}
|
||||
const file = fs.createWriteStream(destPath);
|
||||
file.on('open', () => __awaiter(this, void 0, void 0, function* () {
|
||||
try {
|
||||
const stream = response.message.pipe(file);
|
||||
stream.on('close', () => {
|
||||
core.debug('download complete');
|
||||
resolve(destPath);
|
||||
});
|
||||
}
|
||||
catch (err) {
|
||||
core.debug(`Failed to download from "${url}". Code(${response.message.statusCode}) Message(${response.message.statusMessage})`);
|
||||
reject(err);
|
||||
}
|
||||
}));
|
||||
file.on('error', err => {
|
||||
file.end();
|
||||
reject(err);
|
||||
});
|
||||
}
|
||||
catch (err) {
|
||||
reject(err);
|
||||
}
|
||||
}));
|
||||
});
|
||||
}
|
||||
exports.downloadTool = downloadTool;
|
||||
/**
|
||||
* Extract a .7z file
|
||||
*
|
||||
* @param file path to the .7z file
|
||||
* @param dest destination directory. Optional.
|
||||
* @param _7zPath path to 7zr.exe. Optional, for long path support. Most .7z archives do not have this
|
||||
* problem. If your .7z archive contains very long paths, you can pass the path to 7zr.exe which will
|
||||
* gracefully handle long paths. By default 7zdec.exe is used because it is a very small program and is
|
||||
* bundled with the tool lib. However it does not support long paths. 7zr.exe is the reduced command line
|
||||
* interface, it is smaller than the full command line interface, and it does support long paths. At the
|
||||
* time of this writing, it is freely available from the LZMA SDK that is available on the 7zip website.
|
||||
* Be sure to check the current license agreement. If 7zr.exe is bundled with your action, then the path
|
||||
* to 7zr.exe can be pass to this function.
|
||||
* @returns path to the destination directory
|
||||
*/
|
||||
function extract7z(file, dest, _7zPath) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
assert_1.ok(IS_WINDOWS, 'extract7z() not supported on current OS');
|
||||
assert_1.ok(file, 'parameter "file" is required');
|
||||
dest = dest || (yield _createExtractFolder(dest));
|
||||
const originalCwd = process.cwd();
|
||||
process.chdir(dest);
|
||||
if (_7zPath) {
|
||||
try {
|
||||
const args = [
|
||||
'x',
|
||||
'-bb1',
|
||||
'-bd',
|
||||
'-sccUTF-8',
|
||||
file
|
||||
];
|
||||
const options = {
|
||||
silent: true
|
||||
};
|
||||
yield exec_1.exec(`"${_7zPath}"`, args, options);
|
||||
}
|
||||
finally {
|
||||
process.chdir(originalCwd);
|
||||
}
|
||||
}
|
||||
else {
|
||||
const escapedScript = path
|
||||
.join(__dirname, '..', 'scripts', 'Invoke-7zdec.ps1')
|
||||
.replace(/'/g, "''")
|
||||
.replace(/"|\n|\r/g, ''); // double-up single quotes, remove double quotes and newlines
|
||||
const escapedFile = file.replace(/'/g, "''").replace(/"|\n|\r/g, '');
|
||||
const escapedTarget = dest.replace(/'/g, "''").replace(/"|\n|\r/g, '');
|
||||
const command = `& '${escapedScript}' -Source '${escapedFile}' -Target '${escapedTarget}'`;
|
||||
const args = [
|
||||
'-NoLogo',
|
||||
'-Sta',
|
||||
'-NoProfile',
|
||||
'-NonInteractive',
|
||||
'-ExecutionPolicy',
|
||||
'Unrestricted',
|
||||
'-Command',
|
||||
command
|
||||
];
|
||||
const options = {
|
||||
silent: true
|
||||
};
|
||||
try {
|
||||
const powershellPath = yield io.which('powershell', true);
|
||||
yield exec_1.exec(`"${powershellPath}"`, args, options);
|
||||
}
|
||||
finally {
|
||||
process.chdir(originalCwd);
|
||||
}
|
||||
}
|
||||
return dest;
|
||||
});
|
||||
}
|
||||
exports.extract7z = extract7z;
|
||||
/**
|
||||
* Extract a tar
|
||||
*
|
||||
* @param file path to the tar
|
||||
* @param dest destination directory. Optional.
|
||||
* @returns path to the destination directory
|
||||
*/
|
||||
function extractTar(file, dest) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
if (!file) {
|
||||
throw new Error("parameter 'file' is required");
|
||||
}
|
||||
dest = dest || (yield _createExtractFolder(dest));
|
||||
const tarPath = yield io.which('tar', true);
|
||||
yield exec_1.exec(`"${tarPath}"`, ['xzC', dest, '-f', file]);
|
||||
return dest;
|
||||
});
|
||||
}
|
||||
exports.extractTar = extractTar;
|
||||
/**
|
||||
* Extract a zip
|
||||
*
|
||||
* @param file path to the zip
|
||||
* @param dest destination directory. Optional.
|
||||
* @returns path to the destination directory
|
||||
*/
|
||||
function extractZip(file, dest) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
if (!file) {
|
||||
throw new Error("parameter 'file' is required");
|
||||
}
|
||||
dest = dest || (yield _createExtractFolder(dest));
|
||||
if (IS_WINDOWS) {
|
||||
yield extractZipWin(file, dest);
|
||||
}
|
||||
else {
|
||||
yield extractZipNix(file, dest);
|
||||
}
|
||||
return dest;
|
||||
});
|
||||
}
|
||||
exports.extractZip = extractZip;
|
||||
function extractZipWin(file, dest) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
// build the powershell command
|
||||
const escapedFile = file.replace(/'/g, "''").replace(/"|\n|\r/g, ''); // double-up single quotes, remove double quotes and newlines
|
||||
const escapedDest = dest.replace(/'/g, "''").replace(/"|\n|\r/g, '');
|
||||
const command = `$ErrorActionPreference = 'Stop' ; try { Add-Type -AssemblyName System.IO.Compression.FileSystem } catch { } ; [System.IO.Compression.ZipFile]::ExtractToDirectory('${escapedFile}', '${escapedDest}')`;
|
||||
// run powershell
|
||||
const powershellPath = yield io.which('powershell');
|
||||
const args = [
|
||||
'-NoLogo',
|
||||
'-Sta',
|
||||
'-NoProfile',
|
||||
'-NonInteractive',
|
||||
'-ExecutionPolicy',
|
||||
'Unrestricted',
|
||||
'-Command',
|
||||
command
|
||||
];
|
||||
yield exec_1.exec(`"${powershellPath}"`, args);
|
||||
});
|
||||
}
|
||||
function extractZipNix(file, dest) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
const unzipPath = path.join(__dirname, '..', 'scripts', 'externals', 'unzip');
|
||||
yield exec_1.exec(`"${unzipPath}"`, [file], { cwd: dest });
|
||||
});
|
||||
}
|
||||
/**
|
||||
* Caches a directory and installs it into the tool cacheDir
|
||||
*
|
||||
* @param sourceDir the directory to cache into tools
|
||||
* @param tool tool name
|
||||
* @param version version of the tool. semver format
|
||||
* @param arch architecture of the tool. Optional. Defaults to machine architecture
|
||||
*/
|
||||
function cacheDir(sourceDir, tool, version, arch) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
version = semver.clean(version) || version;
|
||||
arch = arch || os.arch();
|
||||
core.debug(`Caching tool ${tool} ${version} ${arch}`);
|
||||
core.debug(`source dir: ${sourceDir}`);
|
||||
if (!fs.statSync(sourceDir).isDirectory()) {
|
||||
throw new Error('sourceDir is not a directory');
|
||||
}
|
||||
// Create the tool dir
|
||||
const destPath = yield _createToolPath(tool, version, arch);
|
||||
// copy each child item. do not move. move can fail on Windows
|
||||
// due to anti-virus software having an open handle on a file.
|
||||
for (const itemName of fs.readdirSync(sourceDir)) {
|
||||
const s = path.join(sourceDir, itemName);
|
||||
yield io.cp(s, destPath, { recursive: true });
|
||||
}
|
||||
// write .complete
|
||||
_completeToolPath(tool, version, arch);
|
||||
return destPath;
|
||||
});
|
||||
}
|
||||
exports.cacheDir = cacheDir;
|
||||
/**
|
||||
* Caches a downloaded file (GUID) and installs it
|
||||
* into the tool cache with a given targetName
|
||||
*
|
||||
* @param sourceFile the file to cache into tools. Typically a result of downloadTool which is a guid.
|
||||
* @param targetFile the name of the file name in the tools directory
|
||||
* @param tool tool name
|
||||
* @param version version of the tool. semver format
|
||||
* @param arch architecture of the tool. Optional. Defaults to machine architecture
|
||||
*/
|
||||
function cacheFile(sourceFile, targetFile, tool, version, arch) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
version = semver.clean(version) || version;
|
||||
arch = arch || os.arch();
|
||||
core.debug(`Caching tool ${tool} ${version} ${arch}`);
|
||||
core.debug(`source file: ${sourceFile}`);
|
||||
if (!fs.statSync(sourceFile).isFile()) {
|
||||
throw new Error('sourceFile is not a file');
|
||||
}
|
||||
// create the tool dir
|
||||
const destFolder = yield _createToolPath(tool, version, arch);
|
||||
// copy instead of move. move can fail on Windows due to
|
||||
// anti-virus software having an open handle on a file.
|
||||
const destPath = path.join(destFolder, targetFile);
|
||||
core.debug(`destination file ${destPath}`);
|
||||
yield io.cp(sourceFile, destPath);
|
||||
// write .complete
|
||||
_completeToolPath(tool, version, arch);
|
||||
return destFolder;
|
||||
});
|
||||
}
|
||||
exports.cacheFile = cacheFile;
|
||||
/**
|
||||
* Finds the path to a tool version in the local installed tool cache
|
||||
*
|
||||
* @param toolName name of the tool
|
||||
* @param versionSpec version of the tool
|
||||
* @param arch optional arch. defaults to arch of computer
|
||||
*/
|
||||
function find(toolName, versionSpec, arch) {
|
||||
if (!toolName) {
|
||||
throw new Error('toolName parameter is required');
|
||||
}
|
||||
if (!versionSpec) {
|
||||
throw new Error('versionSpec parameter is required');
|
||||
}
|
||||
arch = arch || os.arch();
|
||||
// attempt to resolve an explicit version
|
||||
if (!_isExplicitVersion(versionSpec)) {
|
||||
const localVersions = findAllVersions(toolName, arch);
|
||||
const match = _evaluateVersions(localVersions, versionSpec);
|
||||
versionSpec = match;
|
||||
}
|
||||
// check for the explicit version in the cache
|
||||
let toolPath = '';
|
||||
if (versionSpec) {
|
||||
versionSpec = semver.clean(versionSpec) || '';
|
||||
const cachePath = path.join(cacheRoot, toolName, versionSpec, arch);
|
||||
core.debug(`checking cache: ${cachePath}`);
|
||||
if (fs.existsSync(cachePath) && fs.existsSync(`${cachePath}.complete`)) {
|
||||
core.debug(`Found tool in cache ${toolName} ${versionSpec} ${arch}`);
|
||||
toolPath = cachePath;
|
||||
}
|
||||
else {
|
||||
core.debug('not found');
|
||||
}
|
||||
}
|
||||
return toolPath;
|
||||
}
|
||||
exports.find = find;
|
||||
/**
|
||||
* Finds the paths to all versions of a tool that are installed in the local tool cache
|
||||
*
|
||||
* @param toolName name of the tool
|
||||
* @param arch optional arch. defaults to arch of computer
|
||||
*/
|
||||
function findAllVersions(toolName, arch) {
|
||||
const versions = [];
|
||||
arch = arch || os.arch();
|
||||
const toolPath = path.join(cacheRoot, toolName);
|
||||
if (fs.existsSync(toolPath)) {
|
||||
const children = fs.readdirSync(toolPath);
|
||||
for (const child of children) {
|
||||
if (_isExplicitVersion(child)) {
|
||||
const fullPath = path.join(toolPath, child, arch || '');
|
||||
if (fs.existsSync(fullPath) && fs.existsSync(`${fullPath}.complete`)) {
|
||||
versions.push(child);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return versions;
|
||||
}
|
||||
exports.findAllVersions = findAllVersions;
|
||||
function _createExtractFolder(dest) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
if (!dest) {
|
||||
// create a temp dir
|
||||
dest = path.join(tempDirectory, uuidV4());
|
||||
}
|
||||
yield io.mkdirP(dest);
|
||||
return dest;
|
||||
});
|
||||
}
|
||||
function _createToolPath(tool, version, arch) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
const folderPath = path.join(cacheRoot, tool, semver.clean(version) || version, arch || '');
|
||||
core.debug(`destination ${folderPath}`);
|
||||
const markerPath = `${folderPath}.complete`;
|
||||
yield io.rmRF(folderPath);
|
||||
yield io.rmRF(markerPath);
|
||||
yield io.mkdirP(folderPath);
|
||||
return folderPath;
|
||||
});
|
||||
}
|
||||
function _completeToolPath(tool, version, arch) {
|
||||
const folderPath = path.join(cacheRoot, tool, semver.clean(version) || version, arch || '');
|
||||
const markerPath = `${folderPath}.complete`;
|
||||
fs.writeFileSync(markerPath, '');
|
||||
core.debug('finished caching tool');
|
||||
}
|
||||
function _isExplicitVersion(versionSpec) {
|
||||
const c = semver.clean(versionSpec) || '';
|
||||
core.debug(`isExplicit: ${c}`);
|
||||
const valid = semver.valid(c) != null;
|
||||
core.debug(`explicit? ${valid}`);
|
||||
return valid;
|
||||
}
|
||||
function _evaluateVersions(versions, versionSpec) {
|
||||
let version = '';
|
||||
core.debug(`evaluating ${versions.length} versions`);
|
||||
versions = versions.sort((a, b) => {
|
||||
if (semver.gt(a, b)) {
|
||||
return 1;
|
||||
}
|
||||
return -1;
|
||||
});
|
||||
for (let i = versions.length - 1; i >= 0; i--) {
|
||||
const potential = versions[i];
|
||||
const satisfied = semver.satisfies(potential, versionSpec);
|
||||
if (satisfied) {
|
||||
version = potential;
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (version) {
|
||||
core.debug(`matched: ${version}`);
|
||||
}
|
||||
else {
|
||||
core.debug('match not found');
|
||||
}
|
||||
return version;
|
||||
}
|
||||
//# sourceMappingURL=tool-cache.js.map
|
1
node_modules/@actions/tool-cache/lib/tool-cache.js.map
generated
vendored
Normal file
1
node_modules/@actions/tool-cache/lib/tool-cache.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
75
node_modules/@actions/tool-cache/package.json
generated
vendored
Normal file
75
node_modules/@actions/tool-cache/package.json
generated
vendored
Normal file
|
@ -0,0 +1,75 @@
|
|||
{
|
||||
"_from": "@actions/tool-cache@^1.0.0",
|
||||
"_id": "@actions/tool-cache@1.0.0",
|
||||
"_inBundle": false,
|
||||
"_integrity": "sha512-l3zT0IfDfi5Ik5aMpnXqGHGATxN8xa9ls4ue+X/CBXpPhRMRZS4vcuh5Q9T98WAGbkysRCfhpbksTPHIcKnNwQ==",
|
||||
"_location": "/@actions/tool-cache",
|
||||
"_phantomChildren": {},
|
||||
"_requested": {
|
||||
"type": "range",
|
||||
"registry": true,
|
||||
"raw": "@actions/tool-cache@^1.0.0",
|
||||
"name": "@actions/tool-cache",
|
||||
"escapedName": "@actions%2ftool-cache",
|
||||
"scope": "@actions",
|
||||
"rawSpec": "^1.0.0",
|
||||
"saveSpec": null,
|
||||
"fetchSpec": "^1.0.0"
|
||||
},
|
||||
"_requiredBy": [
|
||||
"/"
|
||||
],
|
||||
"_resolved": "https://registry.npmjs.org/@actions/tool-cache/-/tool-cache-1.0.0.tgz",
|
||||
"_shasum": "a9ac414bd2e0bf1f5f0302f029193c418d344c09",
|
||||
"_spec": "@actions/tool-cache@^1.0.0",
|
||||
"_where": "C:\\Users\\damccorm\\Documents\\setup-python",
|
||||
"bugs": {
|
||||
"url": "https://github.com/actions/toolkit/issues"
|
||||
},
|
||||
"bundleDependencies": false,
|
||||
"dependencies": {
|
||||
"@actions/core": "^1.0.0",
|
||||
"@actions/exec": "^1.0.0",
|
||||
"@actions/io": "^1.0.0",
|
||||
"semver": "^6.1.0",
|
||||
"typed-rest-client": "^1.4.0",
|
||||
"uuid": "^3.3.2"
|
||||
},
|
||||
"deprecated": false,
|
||||
"description": "Actions tool-cache lib",
|
||||
"devDependencies": {
|
||||
"@types/nock": "^10.0.3",
|
||||
"@types/semver": "^6.0.0",
|
||||
"@types/uuid": "^3.4.4",
|
||||
"nock": "^10.0.6"
|
||||
},
|
||||
"directories": {
|
||||
"lib": "lib",
|
||||
"test": "__tests__"
|
||||
},
|
||||
"files": [
|
||||
"lib",
|
||||
"scripts"
|
||||
],
|
||||
"gitHead": "a40bce7c8d382aa3dbadaa327acbc696e9390e55",
|
||||
"homepage": "https://github.com/actions/toolkit/tree/master/packages/exec",
|
||||
"keywords": [
|
||||
"exec",
|
||||
"actions"
|
||||
],
|
||||
"license": "MIT",
|
||||
"main": "lib/tool-cache.js",
|
||||
"name": "@actions/tool-cache",
|
||||
"publishConfig": {
|
||||
"access": "public"
|
||||
},
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git+https://github.com/actions/toolkit.git"
|
||||
},
|
||||
"scripts": {
|
||||
"test": "echo \"Error: run tests from root\" && exit 1",
|
||||
"tsc": "tsc"
|
||||
},
|
||||
"version": "1.0.0"
|
||||
}
|
60
node_modules/@actions/tool-cache/scripts/Invoke-7zdec.ps1
generated
vendored
Normal file
60
node_modules/@actions/tool-cache/scripts/Invoke-7zdec.ps1
generated
vendored
Normal file
|
@ -0,0 +1,60 @@
|
|||
[CmdletBinding()]
|
||||
param(
|
||||
[Parameter(Mandatory = $true)]
|
||||
[string]$Source,
|
||||
|
||||
[Parameter(Mandatory = $true)]
|
||||
[string]$Target)
|
||||
|
||||
# This script translates the output from 7zdec into UTF8. Node has limited
|
||||
# built-in support for encodings.
|
||||
#
|
||||
# 7zdec uses the system default code page. The system default code page varies
|
||||
# depending on the locale configuration. On an en-US box, the system default code
|
||||
# page is Windows-1252.
|
||||
#
|
||||
# Note, on a typical en-US box, testing with the 'ç' character is a good way to
|
||||
# determine whether data is passed correctly between processes. This is because
|
||||
# the 'ç' character has a different code point across each of the common encodings
|
||||
# on a typical en-US box, i.e.
|
||||
# 1) the default console-output code page (IBM437)
|
||||
# 2) the system default code page (i.e. CP_ACP) (Windows-1252)
|
||||
# 3) UTF8
|
||||
|
||||
$ErrorActionPreference = 'Stop'
|
||||
|
||||
# Redefine the wrapper over STDOUT to use UTF8. Node expects UTF8 by default.
|
||||
$stdout = [System.Console]::OpenStandardOutput()
|
||||
$utf8 = New-Object System.Text.UTF8Encoding($false) # do not emit BOM
|
||||
$writer = New-Object System.IO.StreamWriter($stdout, $utf8)
|
||||
[System.Console]::SetOut($writer)
|
||||
|
||||
# All subsequent output must be written using [System.Console]::WriteLine(). In
|
||||
# PowerShell 4, Write-Host and Out-Default do not consider the updated stream writer.
|
||||
|
||||
Set-Location -LiteralPath $Target
|
||||
|
||||
# Print the ##command.
|
||||
$_7zdec = Join-Path -Path "$PSScriptRoot" -ChildPath "externals/7zdec.exe"
|
||||
[System.Console]::WriteLine("##[command]$_7zdec x `"$Source`"")
|
||||
|
||||
# The $OutputEncoding variable instructs PowerShell how to interpret the output
|
||||
# from the external command.
|
||||
$OutputEncoding = [System.Text.Encoding]::Default
|
||||
|
||||
# Note, the output from 7zdec.exe needs to be iterated over. Otherwise PowerShell.exe
|
||||
# will launch the external command in such a way that it inherits the streams.
|
||||
& $_7zdec x $Source 2>&1 |
|
||||
ForEach-Object {
|
||||
if ($_ -is [System.Management.Automation.ErrorRecord]) {
|
||||
[System.Console]::WriteLine($_.Exception.Message)
|
||||
}
|
||||
else {
|
||||
[System.Console]::WriteLine($_)
|
||||
}
|
||||
}
|
||||
[System.Console]::WriteLine("##[debug]7zdec.exe exit code '$LASTEXITCODE'")
|
||||
[System.Console]::Out.Flush()
|
||||
if ($LASTEXITCODE -ne 0) {
|
||||
exit $LASTEXITCODE
|
||||
}
|
BIN
node_modules/@actions/tool-cache/scripts/externals/7zdec.exe
generated
vendored
Normal file
BIN
node_modules/@actions/tool-cache/scripts/externals/7zdec.exe
generated
vendored
Normal file
Binary file not shown.
BIN
node_modules/@actions/tool-cache/scripts/externals/unzip
generated
vendored
Normal file
BIN
node_modules/@actions/tool-cache/scripts/externals/unzip
generated
vendored
Normal file
Binary file not shown.
70
node_modules/semver/CHANGELOG.md
generated
vendored
Normal file
70
node_modules/semver/CHANGELOG.md
generated
vendored
Normal file
|
@ -0,0 +1,70 @@
|
|||
# changes log
|
||||
|
||||
## 6.2.0
|
||||
|
||||
* Coerce numbers to strings when passed to semver.coerce()
|
||||
* Add `rtl` option to coerce from right to left
|
||||
|
||||
## 6.1.3
|
||||
|
||||
* Handle X-ranges properly in includePrerelease mode
|
||||
|
||||
## 6.1.2
|
||||
|
||||
* Do not throw when testing invalid version strings
|
||||
|
||||
## 6.1.1
|
||||
|
||||
* Add options support for semver.coerce()
|
||||
* Handle undefined version passed to Range.test
|
||||
|
||||
## 6.1.0
|
||||
|
||||
* Add semver.compareBuild function
|
||||
* Support `*` in semver.intersects
|
||||
|
||||
## 6.0
|
||||
|
||||
* Fix `intersects` logic.
|
||||
|
||||
This is technically a bug fix, but since it is also a change to behavior
|
||||
that may require users updating their code, it is marked as a major
|
||||
version increment.
|
||||
|
||||
## 5.7
|
||||
|
||||
* Add `minVersion` method
|
||||
|
||||
## 5.6
|
||||
|
||||
* Move boolean `loose` param to an options object, with
|
||||
backwards-compatibility protection.
|
||||
* Add ability to opt out of special prerelease version handling with
|
||||
the `includePrerelease` option flag.
|
||||
|
||||
## 5.5
|
||||
|
||||
* Add version coercion capabilities
|
||||
|
||||
## 5.4
|
||||
|
||||
* Add intersection checking
|
||||
|
||||
## 5.3
|
||||
|
||||
* Add `minSatisfying` method
|
||||
|
||||
## 5.2
|
||||
|
||||
* Add `prerelease(v)` that returns prerelease components
|
||||
|
||||
## 5.1
|
||||
|
||||
* Add Backus-Naur for ranges
|
||||
* Remove excessively cute inspection methods
|
||||
|
||||
## 5.0
|
||||
|
||||
* Remove AMD/Browserified build artifacts
|
||||
* Fix ltr and gtr when using the `*` range
|
||||
* Fix for range `*` with a prerelease identifier
|
15
node_modules/semver/LICENSE
generated
vendored
Normal file
15
node_modules/semver/LICENSE
generated
vendored
Normal file
|
@ -0,0 +1,15 @@
|
|||
The ISC License
|
||||
|
||||
Copyright (c) Isaac Z. Schlueter and Contributors
|
||||
|
||||
Permission to use, copy, modify, and/or distribute this software for any
|
||||
purpose with or without fee is hereby granted, provided that the above
|
||||
copyright notice and this permission notice appear in all copies.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
|
||||
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
|
||||
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
|
||||
ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
||||
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
||||
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
|
||||
IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
443
node_modules/semver/README.md
generated
vendored
Normal file
443
node_modules/semver/README.md
generated
vendored
Normal file
|
@ -0,0 +1,443 @@
|
|||
semver(1) -- The semantic versioner for npm
|
||||
===========================================
|
||||
|
||||
## Install
|
||||
|
||||
```bash
|
||||
npm install semver
|
||||
````
|
||||
|
||||
## Usage
|
||||
|
||||
As a node module:
|
||||
|
||||
```js
|
||||
const semver = require('semver')
|
||||
|
||||
semver.valid('1.2.3') // '1.2.3'
|
||||
semver.valid('a.b.c') // null
|
||||
semver.clean(' =v1.2.3 ') // '1.2.3'
|
||||
semver.satisfies('1.2.3', '1.x || >=2.5.0 || 5.0.0 - 7.2.3') // true
|
||||
semver.gt('1.2.3', '9.8.7') // false
|
||||
semver.lt('1.2.3', '9.8.7') // true
|
||||
semver.minVersion('>=1.0.0') // '1.0.0'
|
||||
semver.valid(semver.coerce('v2')) // '2.0.0'
|
||||
semver.valid(semver.coerce('42.6.7.9.3-alpha')) // '42.6.7'
|
||||
```
|
||||
|
||||
As a command-line utility:
|
||||
|
||||
```
|
||||
$ semver -h
|
||||
|
||||
A JavaScript implementation of the https://semver.org/ specification
|
||||
Copyright Isaac Z. Schlueter
|
||||
|
||||
Usage: semver [options] <version> [<version> [...]]
|
||||
Prints valid versions sorted by SemVer precedence
|
||||
|
||||
Options:
|
||||
-r --range <range>
|
||||
Print versions that match the specified range.
|
||||
|
||||
-i --increment [<level>]
|
||||
Increment a version by the specified level. Level can
|
||||
be one of: major, minor, patch, premajor, preminor,
|
||||
prepatch, or prerelease. Default level is 'patch'.
|
||||
Only one version may be specified.
|
||||
|
||||
--preid <identifier>
|
||||
Identifier to be used to prefix premajor, preminor,
|
||||
prepatch or prerelease version increments.
|
||||
|
||||
-l --loose
|
||||
Interpret versions and ranges loosely
|
||||
|
||||
-p --include-prerelease
|
||||
Always include prerelease versions in range matching
|
||||
|
||||
-c --coerce
|
||||
Coerce a string into SemVer if possible
|
||||
(does not imply --loose)
|
||||
|
||||
--rtl
|
||||
Coerce version strings right to left
|
||||
|
||||
--ltr
|
||||
Coerce version strings left to right (default)
|
||||
|
||||
Program exits successfully if any valid version satisfies
|
||||
all supplied ranges, and prints all satisfying versions.
|
||||
|
||||
If no satisfying versions are found, then exits failure.
|
||||
|
||||
Versions are printed in ascending order, so supplying
|
||||
multiple versions to the utility will just sort them.
|
||||
```
|
||||
|
||||
## Versions
|
||||
|
||||
A "version" is described by the `v2.0.0` specification found at
|
||||
<https://semver.org/>.
|
||||
|
||||
A leading `"="` or `"v"` character is stripped off and ignored.
|
||||
|
||||
## Ranges
|
||||
|
||||
A `version range` is a set of `comparators` which specify versions
|
||||
that satisfy the range.
|
||||
|
||||
A `comparator` is composed of an `operator` and a `version`. The set
|
||||
of primitive `operators` is:
|
||||
|
||||
* `<` Less than
|
||||
* `<=` Less than or equal to
|
||||
* `>` Greater than
|
||||
* `>=` Greater than or equal to
|
||||
* `=` Equal. If no operator is specified, then equality is assumed,
|
||||
so this operator is optional, but MAY be included.
|
||||
|
||||
For example, the comparator `>=1.2.7` would match the versions
|
||||
`1.2.7`, `1.2.8`, `2.5.3`, and `1.3.9`, but not the versions `1.2.6`
|
||||
or `1.1.0`.
|
||||
|
||||
Comparators can be joined by whitespace to form a `comparator set`,
|
||||
which is satisfied by the **intersection** of all of the comparators
|
||||
it includes.
|
||||
|
||||
A range is composed of one or more comparator sets, joined by `||`. A
|
||||
version matches a range if and only if every comparator in at least
|
||||
one of the `||`-separated comparator sets is satisfied by the version.
|
||||
|
||||
For example, the range `>=1.2.7 <1.3.0` would match the versions
|
||||
`1.2.7`, `1.2.8`, and `1.2.99`, but not the versions `1.2.6`, `1.3.0`,
|
||||
or `1.1.0`.
|
||||
|
||||
The range `1.2.7 || >=1.2.9 <2.0.0` would match the versions `1.2.7`,
|
||||
`1.2.9`, and `1.4.6`, but not the versions `1.2.8` or `2.0.0`.
|
||||
|
||||
### Prerelease Tags
|
||||
|
||||
If a version has a prerelease tag (for example, `1.2.3-alpha.3`) then
|
||||
it will only be allowed to satisfy comparator sets if at least one
|
||||
comparator with the same `[major, minor, patch]` tuple also has a
|
||||
prerelease tag.
|
||||
|
||||
For example, the range `>1.2.3-alpha.3` would be allowed to match the
|
||||
version `1.2.3-alpha.7`, but it would *not* be satisfied by
|
||||
`3.4.5-alpha.9`, even though `3.4.5-alpha.9` is technically "greater
|
||||
than" `1.2.3-alpha.3` according to the SemVer sort rules. The version
|
||||
range only accepts prerelease tags on the `1.2.3` version. The
|
||||
version `3.4.5` *would* satisfy the range, because it does not have a
|
||||
prerelease flag, and `3.4.5` is greater than `1.2.3-alpha.7`.
|
||||
|
||||
The purpose for this behavior is twofold. First, prerelease versions
|
||||
frequently are updated very quickly, and contain many breaking changes
|
||||
that are (by the author's design) not yet fit for public consumption.
|
||||
Therefore, by default, they are excluded from range matching
|
||||
semantics.
|
||||
|
||||
Second, a user who has opted into using a prerelease version has
|
||||
clearly indicated the intent to use *that specific* set of
|
||||
alpha/beta/rc versions. By including a prerelease tag in the range,
|
||||
the user is indicating that they are aware of the risk. However, it
|
||||
is still not appropriate to assume that they have opted into taking a
|
||||
similar risk on the *next* set of prerelease versions.
|
||||
|
||||
Note that this behavior can be suppressed (treating all prerelease
|
||||
versions as if they were normal versions, for the purpose of range
|
||||
matching) by setting the `includePrerelease` flag on the options
|
||||
object to any
|
||||
[functions](https://github.com/npm/node-semver#functions) that do
|
||||
range matching.
|
||||
|
||||
#### Prerelease Identifiers
|
||||
|
||||
The method `.inc` takes an additional `identifier` string argument that
|
||||
will append the value of the string as a prerelease identifier:
|
||||
|
||||
```javascript
|
||||
semver.inc('1.2.3', 'prerelease', 'beta')
|
||||
// '1.2.4-beta.0'
|
||||
```
|
||||
|
||||
command-line example:
|
||||
|
||||
```bash
|
||||
$ semver 1.2.3 -i prerelease --preid beta
|
||||
1.2.4-beta.0
|
||||
```
|
||||
|
||||
Which then can be used to increment further:
|
||||
|
||||
```bash
|
||||
$ semver 1.2.4-beta.0 -i prerelease
|
||||
1.2.4-beta.1
|
||||
```
|
||||
|
||||
### Advanced Range Syntax
|
||||
|
||||
Advanced range syntax desugars to primitive comparators in
|
||||
deterministic ways.
|
||||
|
||||
Advanced ranges may be combined in the same way as primitive
|
||||
comparators using white space or `||`.
|
||||
|
||||
#### Hyphen Ranges `X.Y.Z - A.B.C`
|
||||
|
||||
Specifies an inclusive set.
|
||||
|
||||
* `1.2.3 - 2.3.4` := `>=1.2.3 <=2.3.4`
|
||||
|
||||
If a partial version is provided as the first version in the inclusive
|
||||
range, then the missing pieces are replaced with zeroes.
|
||||
|
||||
* `1.2 - 2.3.4` := `>=1.2.0 <=2.3.4`
|
||||
|
||||
If a partial version is provided as the second version in the
|
||||
inclusive range, then all versions that start with the supplied parts
|
||||
of the tuple are accepted, but nothing that would be greater than the
|
||||
provided tuple parts.
|
||||
|
||||
* `1.2.3 - 2.3` := `>=1.2.3 <2.4.0`
|
||||
* `1.2.3 - 2` := `>=1.2.3 <3.0.0`
|
||||
|
||||
#### X-Ranges `1.2.x` `1.X` `1.2.*` `*`
|
||||
|
||||
Any of `X`, `x`, or `*` may be used to "stand in" for one of the
|
||||
numeric values in the `[major, minor, patch]` tuple.
|
||||
|
||||
* `*` := `>=0.0.0` (Any version satisfies)
|
||||
* `1.x` := `>=1.0.0 <2.0.0` (Matching major version)
|
||||
* `1.2.x` := `>=1.2.0 <1.3.0` (Matching major and minor versions)
|
||||
|
||||
A partial version range is treated as an X-Range, so the special
|
||||
character is in fact optional.
|
||||
|
||||
* `""` (empty string) := `*` := `>=0.0.0`
|
||||
* `1` := `1.x.x` := `>=1.0.0 <2.0.0`
|
||||
* `1.2` := `1.2.x` := `>=1.2.0 <1.3.0`
|
||||
|
||||
#### Tilde Ranges `~1.2.3` `~1.2` `~1`
|
||||
|
||||
Allows patch-level changes if a minor version is specified on the
|
||||
comparator. Allows minor-level changes if not.
|
||||
|
||||
* `~1.2.3` := `>=1.2.3 <1.(2+1).0` := `>=1.2.3 <1.3.0`
|
||||
* `~1.2` := `>=1.2.0 <1.(2+1).0` := `>=1.2.0 <1.3.0` (Same as `1.2.x`)
|
||||
* `~1` := `>=1.0.0 <(1+1).0.0` := `>=1.0.0 <2.0.0` (Same as `1.x`)
|
||||
* `~0.2.3` := `>=0.2.3 <0.(2+1).0` := `>=0.2.3 <0.3.0`
|
||||
* `~0.2` := `>=0.2.0 <0.(2+1).0` := `>=0.2.0 <0.3.0` (Same as `0.2.x`)
|
||||
* `~0` := `>=0.0.0 <(0+1).0.0` := `>=0.0.0 <1.0.0` (Same as `0.x`)
|
||||
* `~1.2.3-beta.2` := `>=1.2.3-beta.2 <1.3.0` Note that prereleases in
|
||||
the `1.2.3` version will be allowed, if they are greater than or
|
||||
equal to `beta.2`. So, `1.2.3-beta.4` would be allowed, but
|
||||
`1.2.4-beta.2` would not, because it is a prerelease of a
|
||||
different `[major, minor, patch]` tuple.
|
||||
|
||||
#### Caret Ranges `^1.2.3` `^0.2.5` `^0.0.4`
|
||||
|
||||
Allows changes that do not modify the left-most non-zero element in the
|
||||
`[major, minor, patch]` tuple. In other words, this allows patch and
|
||||
minor updates for versions `1.0.0` and above, patch updates for
|
||||
versions `0.X >=0.1.0`, and *no* updates for versions `0.0.X`.
|
||||
|
||||
Many authors treat a `0.x` version as if the `x` were the major
|
||||
"breaking-change" indicator.
|
||||
|
||||
Caret ranges are ideal when an author may make breaking changes
|
||||
between `0.2.4` and `0.3.0` releases, which is a common practice.
|
||||
However, it presumes that there will *not* be breaking changes between
|
||||
`0.2.4` and `0.2.5`. It allows for changes that are presumed to be
|
||||
additive (but non-breaking), according to commonly observed practices.
|
||||
|
||||
* `^1.2.3` := `>=1.2.3 <2.0.0`
|
||||
* `^0.2.3` := `>=0.2.3 <0.3.0`
|
||||
* `^0.0.3` := `>=0.0.3 <0.0.4`
|
||||
* `^1.2.3-beta.2` := `>=1.2.3-beta.2 <2.0.0` Note that prereleases in
|
||||
the `1.2.3` version will be allowed, if they are greater than or
|
||||
equal to `beta.2`. So, `1.2.3-beta.4` would be allowed, but
|
||||
`1.2.4-beta.2` would not, because it is a prerelease of a
|
||||
different `[major, minor, patch]` tuple.
|
||||
* `^0.0.3-beta` := `>=0.0.3-beta <0.0.4` Note that prereleases in the
|
||||
`0.0.3` version *only* will be allowed, if they are greater than or
|
||||
equal to `beta`. So, `0.0.3-pr.2` would be allowed.
|
||||
|
||||
When parsing caret ranges, a missing `patch` value desugars to the
|
||||
number `0`, but will allow flexibility within that value, even if the
|
||||
major and minor versions are both `0`.
|
||||
|
||||
* `^1.2.x` := `>=1.2.0 <2.0.0`
|
||||
* `^0.0.x` := `>=0.0.0 <0.1.0`
|
||||
* `^0.0` := `>=0.0.0 <0.1.0`
|
||||
|
||||
A missing `minor` and `patch` values will desugar to zero, but also
|
||||
allow flexibility within those values, even if the major version is
|
||||
zero.
|
||||
|
||||
* `^1.x` := `>=1.0.0 <2.0.0`
|
||||
* `^0.x` := `>=0.0.0 <1.0.0`
|
||||
|
||||
### Range Grammar
|
||||
|
||||
Putting all this together, here is a Backus-Naur grammar for ranges,
|
||||
for the benefit of parser authors:
|
||||
|
||||
```bnf
|
||||
range-set ::= range ( logical-or range ) *
|
||||
logical-or ::= ( ' ' ) * '||' ( ' ' ) *
|
||||
range ::= hyphen | simple ( ' ' simple ) * | ''
|
||||
hyphen ::= partial ' - ' partial
|
||||
simple ::= primitive | partial | tilde | caret
|
||||
primitive ::= ( '<' | '>' | '>=' | '<=' | '=' ) partial
|
||||
partial ::= xr ( '.' xr ( '.' xr qualifier ? )? )?
|
||||
xr ::= 'x' | 'X' | '*' | nr
|
||||
nr ::= '0' | ['1'-'9'] ( ['0'-'9'] ) *
|
||||
tilde ::= '~' partial
|
||||
caret ::= '^' partial
|
||||
qualifier ::= ( '-' pre )? ( '+' build )?
|
||||
pre ::= parts
|
||||
build ::= parts
|
||||
parts ::= part ( '.' part ) *
|
||||
part ::= nr | [-0-9A-Za-z]+
|
||||
```
|
||||
|
||||
## Functions
|
||||
|
||||
All methods and classes take a final `options` object argument. All
|
||||
options in this object are `false` by default. The options supported
|
||||
are:
|
||||
|
||||
- `loose` Be more forgiving about not-quite-valid semver strings.
|
||||
(Any resulting output will always be 100% strict compliant, of
|
||||
course.) For backwards compatibility reasons, if the `options`
|
||||
argument is a boolean value instead of an object, it is interpreted
|
||||
to be the `loose` param.
|
||||
- `includePrerelease` Set to suppress the [default
|
||||
behavior](https://github.com/npm/node-semver#prerelease-tags) of
|
||||
excluding prerelease tagged versions from ranges unless they are
|
||||
explicitly opted into.
|
||||
|
||||
Strict-mode Comparators and Ranges will be strict about the SemVer
|
||||
strings that they parse.
|
||||
|
||||
* `valid(v)`: Return the parsed version, or null if it's not valid.
|
||||
* `inc(v, release)`: Return the version incremented by the release
|
||||
type (`major`, `premajor`, `minor`, `preminor`, `patch`,
|
||||
`prepatch`, or `prerelease`), or null if it's not valid
|
||||
* `premajor` in one call will bump the version up to the next major
|
||||
version and down to a prerelease of that major version.
|
||||
`preminor`, and `prepatch` work the same way.
|
||||
* If called from a non-prerelease version, the `prerelease` will work the
|
||||
same as `prepatch`. It increments the patch version, then makes a
|
||||
prerelease. If the input version is already a prerelease it simply
|
||||
increments it.
|
||||
* `prerelease(v)`: Returns an array of prerelease components, or null
|
||||
if none exist. Example: `prerelease('1.2.3-alpha.1') -> ['alpha', 1]`
|
||||
* `major(v)`: Return the major version number.
|
||||
* `minor(v)`: Return the minor version number.
|
||||
* `patch(v)`: Return the patch version number.
|
||||
* `intersects(r1, r2, loose)`: Return true if the two supplied ranges
|
||||
or comparators intersect.
|
||||
* `parse(v)`: Attempt to parse a string as a semantic version, returning either
|
||||
a `SemVer` object or `null`.
|
||||
|
||||
### Comparison
|
||||
|
||||
* `gt(v1, v2)`: `v1 > v2`
|
||||
* `gte(v1, v2)`: `v1 >= v2`
|
||||
* `lt(v1, v2)`: `v1 < v2`
|
||||
* `lte(v1, v2)`: `v1 <= v2`
|
||||
* `eq(v1, v2)`: `v1 == v2` This is true if they're logically equivalent,
|
||||
even if they're not the exact same string. You already know how to
|
||||
compare strings.
|
||||
* `neq(v1, v2)`: `v1 != v2` The opposite of `eq`.
|
||||
* `cmp(v1, comparator, v2)`: Pass in a comparison string, and it'll call
|
||||
the corresponding function above. `"==="` and `"!=="` do simple
|
||||
string comparison, but are included for completeness. Throws if an
|
||||
invalid comparison string is provided.
|
||||
* `compare(v1, v2)`: Return `0` if `v1 == v2`, or `1` if `v1` is greater, or `-1` if
|
||||
`v2` is greater. Sorts in ascending order if passed to `Array.sort()`.
|
||||
* `rcompare(v1, v2)`: The reverse of compare. Sorts an array of versions
|
||||
in descending order when passed to `Array.sort()`.
|
||||
* `compareBuild(v1, v2)`: The same as `compare` but considers `build` when two versions
|
||||
are equal. Sorts in ascending order if passed to `Array.sort()`.
|
||||
`v2` is greater. Sorts in ascending order if passed to `Array.sort()`.
|
||||
* `diff(v1, v2)`: Returns difference between two versions by the release type
|
||||
(`major`, `premajor`, `minor`, `preminor`, `patch`, `prepatch`, or `prerelease`),
|
||||
or null if the versions are the same.
|
||||
|
||||
### Comparators
|
||||
|
||||
* `intersects(comparator)`: Return true if the comparators intersect
|
||||
|
||||
### Ranges
|
||||
|
||||
* `validRange(range)`: Return the valid range or null if it's not valid
|
||||
* `satisfies(version, range)`: Return true if the version satisfies the
|
||||
range.
|
||||
* `maxSatisfying(versions, range)`: Return the highest version in the list
|
||||
that satisfies the range, or `null` if none of them do.
|
||||
* `minSatisfying(versions, range)`: Return the lowest version in the list
|
||||
that satisfies the range, or `null` if none of them do.
|
||||
* `minVersion(range)`: Return the lowest version that can possibly match
|
||||
the given range.
|
||||
* `gtr(version, range)`: Return `true` if version is greater than all the
|
||||
versions possible in the range.
|
||||
* `ltr(version, range)`: Return `true` if version is less than all the
|
||||
versions possible in the range.
|
||||
* `outside(version, range, hilo)`: Return true if the version is outside
|
||||
the bounds of the range in either the high or low direction. The
|
||||
`hilo` argument must be either the string `'>'` or `'<'`. (This is
|
||||
the function called by `gtr` and `ltr`.)
|
||||
* `intersects(range)`: Return true if any of the ranges comparators intersect
|
||||
|
||||
Note that, since ranges may be non-contiguous, a version might not be
|
||||
greater than a range, less than a range, *or* satisfy a range! For
|
||||
example, the range `1.2 <1.2.9 || >2.0.0` would have a hole from `1.2.9`
|
||||
until `2.0.0`, so the version `1.2.10` would not be greater than the
|
||||
range (because `2.0.1` satisfies, which is higher), nor less than the
|
||||
range (since `1.2.8` satisfies, which is lower), and it also does not
|
||||
satisfy the range.
|
||||
|
||||
If you want to know if a version satisfies or does not satisfy a
|
||||
range, use the `satisfies(version, range)` function.
|
||||
|
||||
### Coercion
|
||||
|
||||
* `coerce(version, options)`: Coerces a string to semver if possible
|
||||
|
||||
This aims to provide a very forgiving translation of a non-semver string to
|
||||
semver. It looks for the first digit in a string, and consumes all
|
||||
remaining characters which satisfy at least a partial semver (e.g., `1`,
|
||||
`1.2`, `1.2.3`) up to the max permitted length (256 characters). Longer
|
||||
versions are simply truncated (`4.6.3.9.2-alpha2` becomes `4.6.3`). All
|
||||
surrounding text is simply ignored (`v3.4 replaces v3.3.1` becomes
|
||||
`3.4.0`). Only text which lacks digits will fail coercion (`version one`
|
||||
is not valid). The maximum length for any semver component considered for
|
||||
coercion is 16 characters; longer components will be ignored
|
||||
(`10000000000000000.4.7.4` becomes `4.7.4`). The maximum value for any
|
||||
semver component is `Integer.MAX_SAFE_INTEGER || (2**53 - 1)`; higher value
|
||||
components are invalid (`9999999999999999.4.7.4` is likely invalid).
|
||||
|
||||
If the `options.rtl` flag is set, then `coerce` will return the right-most
|
||||
coercible tuple that does not share an ending index with a longer coercible
|
||||
tuple. For example, `1.2.3.4` will return `2.3.4` in rtl mode, not
|
||||
`4.0.0`. `1.2.3/4` will return `4.0.0`, because the `4` is not a part of
|
||||
any other overlapping SemVer tuple.
|
||||
|
||||
### Clean
|
||||
|
||||
* `clean(version)`: Clean a string to be a valid semver if possible
|
||||
|
||||
This will return a cleaned and trimmed semver version. If the provided version is not valid a null will be returned. This does not work for ranges.
|
||||
|
||||
ex.
|
||||
* `s.clean(' = v 2.1.5foo')`: `null`
|
||||
* `s.clean(' = v 2.1.5foo', { loose: true })`: `'2.1.5-foo'`
|
||||
* `s.clean(' = v 2.1.5-foo')`: `null`
|
||||
* `s.clean(' = v 2.1.5-foo', { loose: true })`: `'2.1.5-foo'`
|
||||
* `s.clean('=v2.1.5')`: `'2.1.5'`
|
||||
* `s.clean(' =v2.1.5')`: `2.1.5`
|
||||
* `s.clean(' 2.1.5 ')`: `'2.1.5'`
|
||||
* `s.clean('~1.0.0')`: `null`
|
174
node_modules/semver/bin/semver.js
generated
vendored
Normal file
174
node_modules/semver/bin/semver.js
generated
vendored
Normal file
|
@ -0,0 +1,174 @@
|
|||
#!/usr/bin/env node
|
||||
// Standalone semver comparison program.
|
||||
// Exits successfully and prints matching version(s) if
|
||||
// any supplied version is valid and passes all tests.
|
||||
|
||||
var argv = process.argv.slice(2)
|
||||
|
||||
var versions = []
|
||||
|
||||
var range = []
|
||||
|
||||
var inc = null
|
||||
|
||||
var version = require('../package.json').version
|
||||
|
||||
var loose = false
|
||||
|
||||
var includePrerelease = false
|
||||
|
||||
var coerce = false
|
||||
|
||||
var rtl = false
|
||||
|
||||
var identifier
|
||||
|
||||
var semver = require('../semver')
|
||||
|
||||
var reverse = false
|
||||
|
||||
var options = {}
|
||||
|
||||
main()
|
||||
|
||||
function main () {
|
||||
if (!argv.length) return help()
|
||||
while (argv.length) {
|
||||
var a = argv.shift()
|
||||
var indexOfEqualSign = a.indexOf('=')
|
||||
if (indexOfEqualSign !== -1) {
|
||||
a = a.slice(0, indexOfEqualSign)
|
||||
argv.unshift(a.slice(indexOfEqualSign + 1))
|
||||
}
|
||||
switch (a) {
|
||||
case '-rv': case '-rev': case '--rev': case '--reverse':
|
||||
reverse = true
|
||||
break
|
||||
case '-l': case '--loose':
|
||||
loose = true
|
||||
break
|
||||
case '-p': case '--include-prerelease':
|
||||
includePrerelease = true
|
||||
break
|
||||
case '-v': case '--version':
|
||||
versions.push(argv.shift())
|
||||
break
|
||||
case '-i': case '--inc': case '--increment':
|
||||
switch (argv[0]) {
|
||||
case 'major': case 'minor': case 'patch': case 'prerelease':
|
||||
case 'premajor': case 'preminor': case 'prepatch':
|
||||
inc = argv.shift()
|
||||
break
|
||||
default:
|
||||
inc = 'patch'
|
||||
break
|
||||
}
|
||||
break
|
||||
case '--preid':
|
||||
identifier = argv.shift()
|
||||
break
|
||||
case '-r': case '--range':
|
||||
range.push(argv.shift())
|
||||
break
|
||||
case '-c': case '--coerce':
|
||||
coerce = true
|
||||
break
|
||||
case '--rtl':
|
||||
rtl = true
|
||||
break
|
||||
case '--ltr':
|
||||
rtl = false
|
||||
break
|
||||
case '-h': case '--help': case '-?':
|
||||
return help()
|
||||
default:
|
||||
versions.push(a)
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
var options = { loose: loose, includePrerelease: includePrerelease, rtl: rtl }
|
||||
|
||||
versions = versions.map(function (v) {
|
||||
return coerce ? (semver.coerce(v, options) || { version: v }).version : v
|
||||
}).filter(function (v) {
|
||||
return semver.valid(v)
|
||||
})
|
||||
if (!versions.length) return fail()
|
||||
if (inc && (versions.length !== 1 || range.length)) { return failInc() }
|
||||
|
||||
for (var i = 0, l = range.length; i < l; i++) {
|
||||
versions = versions.filter(function (v) {
|
||||
return semver.satisfies(v, range[i], options)
|
||||
})
|
||||
if (!versions.length) return fail()
|
||||
}
|
||||
return success(versions)
|
||||
}
|
||||
|
||||
function failInc () {
|
||||
console.error('--inc can only be used on a single version with no range')
|
||||
fail()
|
||||
}
|
||||
|
||||
function fail () { process.exit(1) }
|
||||
|
||||
function success () {
|
||||
var compare = reverse ? 'rcompare' : 'compare'
|
||||
versions.sort(function (a, b) {
|
||||
return semver[compare](a, b, options)
|
||||
}).map(function (v) {
|
||||
return semver.clean(v, options)
|
||||
}).map(function (v) {
|
||||
return inc ? semver.inc(v, inc, options, identifier) : v
|
||||
}).forEach(function (v, i, _) { console.log(v) })
|
||||
}
|
||||
|
||||
function help () {
|
||||
console.log(['SemVer ' + version,
|
||||
'',
|
||||
'A JavaScript implementation of the https://semver.org/ specification',
|
||||
'Copyright Isaac Z. Schlueter',
|
||||
'',
|
||||
'Usage: semver [options] <version> [<version> [...]]',
|
||||
'Prints valid versions sorted by SemVer precedence',
|
||||
'',
|
||||
'Options:',
|
||||
'-r --range <range>',
|
||||
' Print versions that match the specified range.',
|
||||
'',
|
||||
'-i --increment [<level>]',
|
||||
' Increment a version by the specified level. Level can',
|
||||
' be one of: major, minor, patch, premajor, preminor,',
|
||||
" prepatch, or prerelease. Default level is 'patch'.",
|
||||
' Only one version may be specified.',
|
||||
'',
|
||||
'--preid <identifier>',
|
||||
' Identifier to be used to prefix premajor, preminor,',
|
||||
' prepatch or prerelease version increments.',
|
||||
'',
|
||||
'-l --loose',
|
||||
' Interpret versions and ranges loosely',
|
||||
'',
|
||||
'-p --include-prerelease',
|
||||
' Always include prerelease versions in range matching',
|
||||
'',
|
||||
'-c --coerce',
|
||||
' Coerce a string into SemVer if possible',
|
||||
' (does not imply --loose)',
|
||||
'',
|
||||
'--rtl',
|
||||
' Coerce version strings right to left',
|
||||
'',
|
||||
'--ltr',
|
||||
' Coerce version strings left to right (default)',
|
||||
'',
|
||||
'Program exits successfully if any valid version satisfies',
|
||||
'all supplied ranges, and prints all satisfying versions.',
|
||||
'',
|
||||
'If no satisfying versions are found, then exits failure.',
|
||||
'',
|
||||
'Versions are printed in ascending order, so supplying',
|
||||
'multiple versions to the utility will just sort them.'
|
||||
].join('\n'))
|
||||
}
|
62
node_modules/semver/package.json
generated
vendored
Normal file
62
node_modules/semver/package.json
generated
vendored
Normal file
|
@ -0,0 +1,62 @@
|
|||
{
|
||||
"_from": "semver@^6.1.1",
|
||||
"_id": "semver@6.3.0",
|
||||
"_inBundle": false,
|
||||
"_integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==",
|
||||
"_location": "/semver",
|
||||
"_phantomChildren": {},
|
||||
"_requested": {
|
||||
"type": "range",
|
||||
"registry": true,
|
||||
"raw": "semver@^6.1.1",
|
||||
"name": "semver",
|
||||
"escapedName": "semver",
|
||||
"rawSpec": "^6.1.1",
|
||||
"saveSpec": null,
|
||||
"fetchSpec": "^6.1.1"
|
||||
},
|
||||
"_requiredBy": [
|
||||
"/",
|
||||
"/@actions/tool-cache",
|
||||
"/istanbul-lib-instrument"
|
||||
],
|
||||
"_resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz",
|
||||
"_shasum": "ee0a64c8af5e8ceea67687b133761e1becbd1d3d",
|
||||
"_spec": "semver@^6.1.1",
|
||||
"_where": "E:\\github\\setup-python",
|
||||
"bin": {
|
||||
"semver": "./bin/semver.js"
|
||||
},
|
||||
"bugs": {
|
||||
"url": "https://github.com/npm/node-semver/issues"
|
||||
},
|
||||
"bundleDependencies": false,
|
||||
"deprecated": false,
|
||||
"description": "The semantic version parser used by npm.",
|
||||
"devDependencies": {
|
||||
"tap": "^14.3.1"
|
||||
},
|
||||
"files": [
|
||||
"bin",
|
||||
"range.bnf",
|
||||
"semver.js"
|
||||
],
|
||||
"homepage": "https://github.com/npm/node-semver#readme",
|
||||
"license": "ISC",
|
||||
"main": "semver.js",
|
||||
"name": "semver",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git+https://github.com/npm/node-semver.git"
|
||||
},
|
||||
"scripts": {
|
||||
"postpublish": "git push origin --follow-tags",
|
||||
"postversion": "npm publish",
|
||||
"preversion": "npm test",
|
||||
"test": "tap"
|
||||
},
|
||||
"tap": {
|
||||
"check-coverage": true
|
||||
},
|
||||
"version": "6.3.0"
|
||||
}
|
16
node_modules/semver/range.bnf
generated
vendored
Normal file
16
node_modules/semver/range.bnf
generated
vendored
Normal file
|
@ -0,0 +1,16 @@
|
|||
range-set ::= range ( logical-or range ) *
|
||||
logical-or ::= ( ' ' ) * '||' ( ' ' ) *
|
||||
range ::= hyphen | simple ( ' ' simple ) * | ''
|
||||
hyphen ::= partial ' - ' partial
|
||||
simple ::= primitive | partial | tilde | caret
|
||||
primitive ::= ( '<' | '>' | '>=' | '<=' | '=' ) partial
|
||||
partial ::= xr ( '.' xr ( '.' xr qualifier ? )? )?
|
||||
xr ::= 'x' | 'X' | '*' | nr
|
||||
nr ::= '0' | [1-9] ( [0-9] ) *
|
||||
tilde ::= '~' partial
|
||||
caret ::= '^' partial
|
||||
qualifier ::= ( '-' pre )? ( '+' build )?
|
||||
pre ::= parts
|
||||
build ::= parts
|
||||
parts ::= part ( '.' part ) *
|
||||
part ::= nr | [-0-9A-Za-z]+
|
1596
node_modules/semver/semver.js
generated
vendored
Normal file
1596
node_modules/semver/semver.js
generated
vendored
Normal file
File diff suppressed because it is too large
Load diff
2
node_modules/tunnel/.npmignore
generated
vendored
Normal file
2
node_modules/tunnel/.npmignore
generated
vendored
Normal file
|
@ -0,0 +1,2 @@
|
|||
/.idea
|
||||
/node_modules
|
13
node_modules/tunnel/CHANGELOG.md
generated
vendored
Normal file
13
node_modules/tunnel/CHANGELOG.md
generated
vendored
Normal file
|
@ -0,0 +1,13 @@
|
|||
# Changelog
|
||||
|
||||
- 0.0.4 (2016/01/23)
|
||||
- supported Node v0.12 or later.
|
||||
|
||||
- 0.0.3 (2014/01/20)
|
||||
- fixed package.json
|
||||
|
||||
- 0.0.1 (2012/02/18)
|
||||
- supported Node v0.6.x (0.6.11 or later).
|
||||
|
||||
- 0.0.0 (2012/02/11)
|
||||
- first release.
|
21
node_modules/tunnel/LICENSE
generated
vendored
Normal file
21
node_modules/tunnel/LICENSE
generated
vendored
Normal file
|
@ -0,0 +1,21 @@
|
|||
The MIT License (MIT)
|
||||
|
||||
Copyright (c) 2012 Koichi Kobayashi
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in
|
||||
all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
THE SOFTWARE.
|
179
node_modules/tunnel/README.md
generated
vendored
Normal file
179
node_modules/tunnel/README.md
generated
vendored
Normal file
|
@ -0,0 +1,179 @@
|
|||
# node-tunnel - HTTP/HTTPS Agents for tunneling proxies
|
||||
|
||||
## Example
|
||||
|
||||
```javascript
|
||||
var tunnel = require('tunnel');
|
||||
|
||||
var tunnelingAgent = tunnel.httpsOverHttp({
|
||||
proxy: {
|
||||
host: 'localhost',
|
||||
port: 3128
|
||||
}
|
||||
});
|
||||
|
||||
var req = https.request({
|
||||
host: 'example.com',
|
||||
port: 443,
|
||||
agent: tunnelingAgent
|
||||
});
|
||||
```
|
||||
|
||||
## Installation
|
||||
|
||||
$ npm install tunnel
|
||||
|
||||
## Usages
|
||||
|
||||
### HTTP over HTTP tunneling
|
||||
|
||||
```javascript
|
||||
var tunnelingAgent = tunnel.httpOverHttp({
|
||||
maxSockets: poolSize, // Defaults to 5
|
||||
|
||||
proxy: { // Proxy settings
|
||||
host: proxyHost, // Defaults to 'localhost'
|
||||
port: proxyPort, // Defaults to 80
|
||||
localAddress: localAddress, // Local interface if necessary
|
||||
|
||||
// Basic authorization for proxy server if necessary
|
||||
proxyAuth: 'user:password',
|
||||
|
||||
// Header fields for proxy server if necessary
|
||||
headers: {
|
||||
'User-Agent': 'Node'
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
var req = http.request({
|
||||
host: 'example.com',
|
||||
port: 80,
|
||||
agent: tunnelingAgent
|
||||
});
|
||||
```
|
||||
|
||||
### HTTPS over HTTP tunneling
|
||||
|
||||
```javascript
|
||||
var tunnelingAgent = tunnel.httpsOverHttp({
|
||||
maxSockets: poolSize, // Defaults to 5
|
||||
|
||||
// CA for origin server if necessary
|
||||
ca: [ fs.readFileSync('origin-server-ca.pem')],
|
||||
|
||||
// Client certification for origin server if necessary
|
||||
key: fs.readFileSync('origin-server-key.pem'),
|
||||
cert: fs.readFileSync('origin-server-cert.pem'),
|
||||
|
||||
proxy: { // Proxy settings
|
||||
host: proxyHost, // Defaults to 'localhost'
|
||||
port: proxyPort, // Defaults to 80
|
||||
localAddress: localAddress, // Local interface if necessary
|
||||
|
||||
// Basic authorization for proxy server if necessary
|
||||
proxyAuth: 'user:password',
|
||||
|
||||
// Header fields for proxy server if necessary
|
||||
headers: {
|
||||
'User-Agent': 'Node'
|
||||
},
|
||||
}
|
||||
});
|
||||
|
||||
var req = https.request({
|
||||
host: 'example.com',
|
||||
port: 443,
|
||||
agent: tunnelingAgent
|
||||
});
|
||||
```
|
||||
|
||||
### HTTP over HTTPS tunneling
|
||||
|
||||
```javascript
|
||||
var tunnelingAgent = tunnel.httpOverHttps({
|
||||
maxSockets: poolSize, // Defaults to 5
|
||||
|
||||
proxy: { // Proxy settings
|
||||
host: proxyHost, // Defaults to 'localhost'
|
||||
port: proxyPort, // Defaults to 443
|
||||
localAddress: localAddress, // Local interface if necessary
|
||||
|
||||
// Basic authorization for proxy server if necessary
|
||||
proxyAuth: 'user:password',
|
||||
|
||||
// Header fields for proxy server if necessary
|
||||
headers: {
|
||||
'User-Agent': 'Node'
|
||||
},
|
||||
|
||||
// CA for proxy server if necessary
|
||||
ca: [ fs.readFileSync('origin-server-ca.pem')],
|
||||
|
||||
// Server name for verification if necessary
|
||||
servername: 'example.com',
|
||||
|
||||
// Client certification for proxy server if necessary
|
||||
key: fs.readFileSync('origin-server-key.pem'),
|
||||
cert: fs.readFileSync('origin-server-cert.pem'),
|
||||
}
|
||||
});
|
||||
|
||||
var req = http.request({
|
||||
host: 'example.com',
|
||||
port: 80,
|
||||
agent: tunnelingAgent
|
||||
});
|
||||
```
|
||||
|
||||
### HTTPS over HTTPS tunneling
|
||||
|
||||
```javascript
|
||||
var tunnelingAgent = tunnel.httpsOverHttps({
|
||||
maxSockets: poolSize, // Defaults to 5
|
||||
|
||||
// CA for origin server if necessary
|
||||
ca: [ fs.readFileSync('origin-server-ca.pem')],
|
||||
|
||||
// Client certification for origin server if necessary
|
||||
key: fs.readFileSync('origin-server-key.pem'),
|
||||
cert: fs.readFileSync('origin-server-cert.pem'),
|
||||
|
||||
proxy: { // Proxy settings
|
||||
host: proxyHost, // Defaults to 'localhost'
|
||||
port: proxyPort, // Defaults to 443
|
||||
localAddress: localAddress, // Local interface if necessary
|
||||
|
||||
// Basic authorization for proxy server if necessary
|
||||
proxyAuth: 'user:password',
|
||||
|
||||
// Header fields for proxy server if necessary
|
||||
headers: {
|
||||
'User-Agent': 'Node'
|
||||
}
|
||||
|
||||
// CA for proxy server if necessary
|
||||
ca: [ fs.readFileSync('origin-server-ca.pem')],
|
||||
|
||||
// Server name for verification if necessary
|
||||
servername: 'example.com',
|
||||
|
||||
// Client certification for proxy server if necessary
|
||||
key: fs.readFileSync('origin-server-key.pem'),
|
||||
cert: fs.readFileSync('origin-server-cert.pem'),
|
||||
}
|
||||
});
|
||||
|
||||
var req = https.request({
|
||||
host: 'example.com',
|
||||
port: 443,
|
||||
agent: tunnelingAgent
|
||||
});
|
||||
```
|
||||
|
||||
## CONTRIBUTORS
|
||||
* [Aleksis Brezas (abresas)](https://github.com/abresas)
|
||||
|
||||
## License
|
||||
|
||||
Licensed under the [MIT](https://github.com/koichik/node-tunnel/blob/master/LICENSE) license.
|
1
node_modules/tunnel/index.js
generated
vendored
Normal file
1
node_modules/tunnel/index.js
generated
vendored
Normal file
|
@ -0,0 +1 @@
|
|||
module.exports = require('./lib/tunnel');
|
247
node_modules/tunnel/lib/tunnel.js
generated
vendored
Normal file
247
node_modules/tunnel/lib/tunnel.js
generated
vendored
Normal file
|
@ -0,0 +1,247 @@
|
|||
'use strict';
|
||||
|
||||
var net = require('net');
|
||||
var tls = require('tls');
|
||||
var http = require('http');
|
||||
var https = require('https');
|
||||
var events = require('events');
|
||||
var assert = require('assert');
|
||||
var util = require('util');
|
||||
|
||||
|
||||
exports.httpOverHttp = httpOverHttp;
|
||||
exports.httpsOverHttp = httpsOverHttp;
|
||||
exports.httpOverHttps = httpOverHttps;
|
||||
exports.httpsOverHttps = httpsOverHttps;
|
||||
|
||||
|
||||
function httpOverHttp(options) {
|
||||
var agent = new TunnelingAgent(options);
|
||||
agent.request = http.request;
|
||||
return agent;
|
||||
}
|
||||
|
||||
function httpsOverHttp(options) {
|
||||
var agent = new TunnelingAgent(options);
|
||||
agent.request = http.request;
|
||||
agent.createSocket = createSecureSocket;
|
||||
return agent;
|
||||
}
|
||||
|
||||
function httpOverHttps(options) {
|
||||
var agent = new TunnelingAgent(options);
|
||||
agent.request = https.request;
|
||||
return agent;
|
||||
}
|
||||
|
||||
function httpsOverHttps(options) {
|
||||
var agent = new TunnelingAgent(options);
|
||||
agent.request = https.request;
|
||||
agent.createSocket = createSecureSocket;
|
||||
return agent;
|
||||
}
|
||||
|
||||
|
||||
function TunnelingAgent(options) {
|
||||
var self = this;
|
||||
self.options = options || {};
|
||||
self.proxyOptions = self.options.proxy || {};
|
||||
self.maxSockets = self.options.maxSockets || http.Agent.defaultMaxSockets;
|
||||
self.requests = [];
|
||||
self.sockets = [];
|
||||
|
||||
self.on('free', function onFree(socket, host, port, localAddress) {
|
||||
var options = toOptions(host, port, localAddress);
|
||||
for (var i = 0, len = self.requests.length; i < len; ++i) {
|
||||
var pending = self.requests[i];
|
||||
if (pending.host === options.host && pending.port === options.port) {
|
||||
// Detect the request to connect same origin server,
|
||||
// reuse the connection.
|
||||
self.requests.splice(i, 1);
|
||||
pending.request.onSocket(socket);
|
||||
return;
|
||||
}
|
||||
}
|
||||
socket.destroy();
|
||||
self.removeSocket(socket);
|
||||
});
|
||||
}
|
||||
util.inherits(TunnelingAgent, events.EventEmitter);
|
||||
|
||||
TunnelingAgent.prototype.addRequest = function addRequest(req, host, port, localAddress) {
|
||||
var self = this;
|
||||
var options = mergeOptions({request: req}, self.options, toOptions(host, port, localAddress));
|
||||
|
||||
if (self.sockets.length >= this.maxSockets) {
|
||||
// We are over limit so we'll add it to the queue.
|
||||
self.requests.push(options);
|
||||
return;
|
||||
}
|
||||
|
||||
// If we are under maxSockets create a new one.
|
||||
self.createSocket(options, function(socket) {
|
||||
socket.on('free', onFree);
|
||||
socket.on('close', onCloseOrRemove);
|
||||
socket.on('agentRemove', onCloseOrRemove);
|
||||
req.onSocket(socket);
|
||||
|
||||
function onFree() {
|
||||
self.emit('free', socket, options);
|
||||
}
|
||||
|
||||
function onCloseOrRemove(err) {
|
||||
self.removeSocket(socket);
|
||||
socket.removeListener('free', onFree);
|
||||
socket.removeListener('close', onCloseOrRemove);
|
||||
socket.removeListener('agentRemove', onCloseOrRemove);
|
||||
}
|
||||
});
|
||||
};
|
||||
|
||||
TunnelingAgent.prototype.createSocket = function createSocket(options, cb) {
|
||||
var self = this;
|
||||
var placeholder = {};
|
||||
self.sockets.push(placeholder);
|
||||
|
||||
var connectOptions = mergeOptions({}, self.proxyOptions, {
|
||||
method: 'CONNECT',
|
||||
path: options.host + ':' + options.port,
|
||||
agent: false
|
||||
});
|
||||
if (connectOptions.proxyAuth) {
|
||||
connectOptions.headers = connectOptions.headers || {};
|
||||
connectOptions.headers['Proxy-Authorization'] = 'Basic ' +
|
||||
new Buffer(connectOptions.proxyAuth).toString('base64');
|
||||
}
|
||||
|
||||
debug('making CONNECT request');
|
||||
var connectReq = self.request(connectOptions);
|
||||
connectReq.useChunkedEncodingByDefault = false; // for v0.6
|
||||
connectReq.once('response', onResponse); // for v0.6
|
||||
connectReq.once('upgrade', onUpgrade); // for v0.6
|
||||
connectReq.once('connect', onConnect); // for v0.7 or later
|
||||
connectReq.once('error', onError);
|
||||
connectReq.end();
|
||||
|
||||
function onResponse(res) {
|
||||
// Very hacky. This is necessary to avoid http-parser leaks.
|
||||
res.upgrade = true;
|
||||
}
|
||||
|
||||
function onUpgrade(res, socket, head) {
|
||||
// Hacky.
|
||||
process.nextTick(function() {
|
||||
onConnect(res, socket, head);
|
||||
});
|
||||
}
|
||||
|
||||
function onConnect(res, socket, head) {
|
||||
connectReq.removeAllListeners();
|
||||
socket.removeAllListeners();
|
||||
|
||||
if (res.statusCode === 200) {
|
||||
assert.equal(head.length, 0);
|
||||
debug('tunneling connection has established');
|
||||
self.sockets[self.sockets.indexOf(placeholder)] = socket;
|
||||
cb(socket);
|
||||
} else {
|
||||
debug('tunneling socket could not be established, statusCode=%d',
|
||||
res.statusCode);
|
||||
var error = new Error('tunneling socket could not be established, ' +
|
||||
'statusCode=' + res.statusCode);
|
||||
error.code = 'ECONNRESET';
|
||||
options.request.emit('error', error);
|
||||
self.removeSocket(placeholder);
|
||||
}
|
||||
}
|
||||
|
||||
function onError(cause) {
|
||||
connectReq.removeAllListeners();
|
||||
|
||||
debug('tunneling socket could not be established, cause=%s\n',
|
||||
cause.message, cause.stack);
|
||||
var error = new Error('tunneling socket could not be established, ' +
|
||||
'cause=' + cause.message);
|
||||
error.code = 'ECONNRESET';
|
||||
options.request.emit('error', error);
|
||||
self.removeSocket(placeholder);
|
||||
}
|
||||
};
|
||||
|
||||
TunnelingAgent.prototype.removeSocket = function removeSocket(socket) {
|
||||
var pos = this.sockets.indexOf(socket)
|
||||
if (pos === -1) {
|
||||
return;
|
||||
}
|
||||
this.sockets.splice(pos, 1);
|
||||
|
||||
var pending = this.requests.shift();
|
||||
if (pending) {
|
||||
// If we have pending requests and a socket gets closed a new one
|
||||
// needs to be created to take over in the pool for the one that closed.
|
||||
this.createSocket(pending, function(socket) {
|
||||
pending.request.onSocket(socket);
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
function createSecureSocket(options, cb) {
|
||||
var self = this;
|
||||
TunnelingAgent.prototype.createSocket.call(self, options, function(socket) {
|
||||
var hostHeader = options.request.getHeader('host');
|
||||
var tlsOptions = mergeOptions({}, self.options, {
|
||||
socket: socket,
|
||||
servername: hostHeader ? hostHeader.replace(/:.*$/, '') : options.host
|
||||
});
|
||||
|
||||
// 0 is dummy port for v0.6
|
||||
var secureSocket = tls.connect(0, tlsOptions);
|
||||
self.sockets[self.sockets.indexOf(socket)] = secureSocket;
|
||||
cb(secureSocket);
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
function toOptions(host, port, localAddress) {
|
||||
if (typeof host === 'string') { // since v0.10
|
||||
return {
|
||||
host: host,
|
||||
port: port,
|
||||
localAddress: localAddress
|
||||
};
|
||||
}
|
||||
return host; // for v0.11 or later
|
||||
}
|
||||
|
||||
function mergeOptions(target) {
|
||||
for (var i = 1, len = arguments.length; i < len; ++i) {
|
||||
var overrides = arguments[i];
|
||||
if (typeof overrides === 'object') {
|
||||
var keys = Object.keys(overrides);
|
||||
for (var j = 0, keyLen = keys.length; j < keyLen; ++j) {
|
||||
var k = keys[j];
|
||||
if (overrides[k] !== undefined) {
|
||||
target[k] = overrides[k];
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return target;
|
||||
}
|
||||
|
||||
|
||||
var debug;
|
||||
if (process.env.NODE_DEBUG && /\btunnel\b/.test(process.env.NODE_DEBUG)) {
|
||||
debug = function() {
|
||||
var args = Array.prototype.slice.call(arguments);
|
||||
if (typeof args[0] === 'string') {
|
||||
args[0] = 'TUNNEL: ' + args[0];
|
||||
} else {
|
||||
args.unshift('TUNNEL:');
|
||||
}
|
||||
console.error.apply(console, args);
|
||||
}
|
||||
} else {
|
||||
debug = function() {};
|
||||
}
|
||||
exports.debug = debug; // for test
|
64
node_modules/tunnel/package.json
generated
vendored
Normal file
64
node_modules/tunnel/package.json
generated
vendored
Normal file
|
@ -0,0 +1,64 @@
|
|||
{
|
||||
"_from": "tunnel@0.0.4",
|
||||
"_id": "tunnel@0.0.4",
|
||||
"_inBundle": false,
|
||||
"_integrity": "sha1-LTeFoVjBdMmhbcLARuxfxfF0IhM=",
|
||||
"_location": "/tunnel",
|
||||
"_phantomChildren": {},
|
||||
"_requested": {
|
||||
"type": "version",
|
||||
"registry": true,
|
||||
"raw": "tunnel@0.0.4",
|
||||
"name": "tunnel",
|
||||
"escapedName": "tunnel",
|
||||
"rawSpec": "0.0.4",
|
||||
"saveSpec": null,
|
||||
"fetchSpec": "0.0.4"
|
||||
},
|
||||
"_requiredBy": [
|
||||
"/typed-rest-client"
|
||||
],
|
||||
"_resolved": "https://registry.npmjs.org/tunnel/-/tunnel-0.0.4.tgz",
|
||||
"_shasum": "2d3785a158c174c9a16dc2c046ec5fc5f1742213",
|
||||
"_spec": "tunnel@0.0.4",
|
||||
"_where": "E:\\github\\setup-python\\node_modules\\typed-rest-client",
|
||||
"author": {
|
||||
"name": "Koichi Kobayashi",
|
||||
"email": "koichik@improvement.jp"
|
||||
},
|
||||
"bugs": {
|
||||
"url": "https://github.com/koichik/node-tunnel/issues"
|
||||
},
|
||||
"bundleDependencies": false,
|
||||
"deprecated": false,
|
||||
"description": "Node HTTP/HTTPS Agents for tunneling proxies",
|
||||
"devDependencies": {
|
||||
"mocha": "*",
|
||||
"should": "*"
|
||||
},
|
||||
"directories": {
|
||||
"lib": "./lib"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=0.6.11 <=0.7.0 || >=0.7.3"
|
||||
},
|
||||
"homepage": "https://github.com/koichik/node-tunnel/",
|
||||
"keywords": [
|
||||
"http",
|
||||
"https",
|
||||
"agent",
|
||||
"proxy",
|
||||
"tunnel"
|
||||
],
|
||||
"license": "MIT",
|
||||
"main": "./index.js",
|
||||
"name": "tunnel",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git+https://github.com/koichik/node-tunnel.git"
|
||||
},
|
||||
"scripts": {
|
||||
"test": "./node_modules/mocha/bin/mocha"
|
||||
},
|
||||
"version": "0.0.4"
|
||||
}
|
108
node_modules/tunnel/test/http-over-http.js
generated
vendored
Normal file
108
node_modules/tunnel/test/http-over-http.js
generated
vendored
Normal file
|
@ -0,0 +1,108 @@
|
|||
var http = require('http');
|
||||
var net = require('net');
|
||||
var should = require('should');
|
||||
var tunnel = require('../index');
|
||||
|
||||
describe('HTTP over HTTP', function() {
|
||||
it('should finish without error', function(done) {
|
||||
var serverPort = 3000;
|
||||
var proxyPort = 3001;
|
||||
var poolSize = 3;
|
||||
var N = 10;
|
||||
var serverConnect = 0;
|
||||
var proxyConnect = 0;
|
||||
var clientConnect = 0;
|
||||
var server;
|
||||
var proxy;
|
||||
var agent;
|
||||
|
||||
server = http.createServer(function(req, res) {
|
||||
tunnel.debug('SERVER: got request');
|
||||
++serverConnect;
|
||||
res.writeHead(200);
|
||||
res.end('Hello' + req.url);
|
||||
tunnel.debug('SERVER: sending response');
|
||||
});
|
||||
server.listen(serverPort, setupProxy);
|
||||
|
||||
function setupProxy() {
|
||||
proxy = http.createServer(function(req, res) {
|
||||
should.fail();
|
||||
});
|
||||
proxy.on('upgrade', onConnect); // for v0.6
|
||||
proxy.on('connect', onConnect); // for v0.7 or later
|
||||
|
||||
function onConnect(req, clientSocket, head) {
|
||||
tunnel.debug('PROXY: got CONNECT request');
|
||||
|
||||
req.method.should.equal('CONNECT');
|
||||
req.url.should.equal('localhost:' + serverPort);
|
||||
req.headers.should.not.have.property('transfer-encoding');
|
||||
req.headers.should.have.property('proxy-authorization',
|
||||
'Basic ' + new Buffer('user:password').toString('base64'));
|
||||
++proxyConnect;
|
||||
|
||||
tunnel.debug('PROXY: creating a tunnel');
|
||||
var serverSocket = net.connect(serverPort, function() {
|
||||
tunnel.debug('PROXY: replying to client CONNECT request');
|
||||
clientSocket.write('HTTP/1.1 200 Connection established\r\n\r\n');
|
||||
clientSocket.pipe(serverSocket);
|
||||
serverSocket.write(head);
|
||||
serverSocket.pipe(clientSocket);
|
||||
// workaround, see joyent/node#2524
|
||||
serverSocket.on('end', function() {
|
||||
clientSocket.end();
|
||||
});
|
||||
});
|
||||
}
|
||||
proxy.listen(proxyPort, setupClient);
|
||||
}
|
||||
|
||||
function setupClient() {
|
||||
agent = tunnel.httpOverHttp({
|
||||
maxSockets: poolSize,
|
||||
proxy: {
|
||||
port: proxyPort,
|
||||
proxyAuth: 'user:password'
|
||||
}
|
||||
});
|
||||
|
||||
for (var i = 0; i < N; ++i) {
|
||||
doClientRequest(i);
|
||||
}
|
||||
|
||||
function doClientRequest(i) {
|
||||
tunnel.debug('CLIENT: Making HTTP request (%d)', i);
|
||||
var req = http.get({
|
||||
port: serverPort,
|
||||
path: '/' + i,
|
||||
agent: agent
|
||||
}, function(res) {
|
||||
tunnel.debug('CLIENT: got HTTP response (%d)', i);
|
||||
res.setEncoding('utf8');
|
||||
res.on('data', function(data) {
|
||||
data.should.equal('Hello/' + i);
|
||||
});
|
||||
res.on('end', function() {
|
||||
++clientConnect;
|
||||
if (clientConnect === N) {
|
||||
proxy.close();
|
||||
server.close();
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
server.on('close', function() {
|
||||
serverConnect.should.equal(N);
|
||||
proxyConnect.should.equal(poolSize);
|
||||
clientConnect.should.equal(N);
|
||||
|
||||
agent.sockets.should.be.empty;
|
||||
agent.requests.should.be.empty;
|
||||
|
||||
done();
|
||||
});
|
||||
});
|
||||
});
|
130
node_modules/tunnel/test/http-over-https.js
generated
vendored
Normal file
130
node_modules/tunnel/test/http-over-https.js
generated
vendored
Normal file
|
@ -0,0 +1,130 @@
|
|||
var http = require('http');
|
||||
var https = require('https');
|
||||
var net = require('net');
|
||||
var fs = require('fs');
|
||||
var path = require('path');
|
||||
var should = require('should');
|
||||
var tunnel = require('../index');
|
||||
|
||||
function readPem(file) {
|
||||
return fs.readFileSync(path.join('test/keys', file + '.pem'));
|
||||
}
|
||||
|
||||
var proxyKey = readPem('proxy1-key');
|
||||
var proxyCert = readPem('proxy1-cert');
|
||||
var proxyCA = readPem('ca2-cert');
|
||||
var clientKey = readPem('client1-key');
|
||||
var clientCert = readPem('client1-cert');
|
||||
var clientCA = readPem('ca3-cert');
|
||||
|
||||
describe('HTTP over HTTPS', function() {
|
||||
it('should finish without error', function(done) {
|
||||
var serverPort = 3004;
|
||||
var proxyPort = 3005;
|
||||
var poolSize = 3;
|
||||
var N = 10;
|
||||
var serverConnect = 0;
|
||||
var proxyConnect = 0;
|
||||
var clientConnect = 0;
|
||||
var server;
|
||||
var proxy;
|
||||
var agent;
|
||||
|
||||
server = http.createServer(function(req, res) {
|
||||
tunnel.debug('SERVER: got request');
|
||||
++serverConnect;
|
||||
res.writeHead(200);
|
||||
res.end('Hello' + req.url);
|
||||
tunnel.debug('SERVER: sending response');
|
||||
});
|
||||
server.listen(serverPort, setupProxy);
|
||||
|
||||
function setupProxy() {
|
||||
proxy = https.createServer({
|
||||
key: proxyKey,
|
||||
cert: proxyCert,
|
||||
ca: [clientCA],
|
||||
requestCert: true,
|
||||
rejectUnauthorized: true
|
||||
}, function(req, res) {
|
||||
should.fail();
|
||||
});
|
||||
proxy.on('upgrade', onConnect); // for v0.6
|
||||
proxy.on('connect', onConnect); // for v0.7 or later
|
||||
|
||||
function onConnect(req, clientSocket, head) {
|
||||
tunnel.debug('PROXY: got CONNECT request');
|
||||
|
||||
req.method.should.equal('CONNECT');
|
||||
req.url.should.equal('localhost:' + serverPort);
|
||||
req.headers.should.not.have.property('transfer-encoding');
|
||||
++proxyConnect;
|
||||
|
||||
tunnel.debug('PROXY: creating a tunnel');
|
||||
var serverSocket = net.connect(serverPort, function() {
|
||||
tunnel.debug('PROXY: replying to client CONNECT request');
|
||||
clientSocket.write('HTTP/1.1 200 Connection established\r\n\r\n');
|
||||
clientSocket.pipe(serverSocket);
|
||||
serverSocket.write(head);
|
||||
serverSocket.pipe(clientSocket);
|
||||
// workaround, see joyent/node#2524
|
||||
serverSocket.on('end', function() {
|
||||
clientSocket.end();
|
||||
});
|
||||
});
|
||||
}
|
||||
proxy.listen(proxyPort, setupClient);
|
||||
}
|
||||
|
||||
function setupClient() {
|
||||
agent = tunnel.httpOverHttps({
|
||||
maxSockets: poolSize,
|
||||
proxy: {
|
||||
port: proxyPort,
|
||||
key: clientKey,
|
||||
cert: clientCert,
|
||||
ca: [proxyCA],
|
||||
rejectUnauthorized: true
|
||||
}
|
||||
});
|
||||
|
||||
for (var i = 0; i < N; ++i) {
|
||||
doClientRequest(i);
|
||||
}
|
||||
|
||||
function doClientRequest(i) {
|
||||
tunnel.debug('CLIENT: Making HTTP request (%d)', i);
|
||||
var req = http.get({
|
||||
port: serverPort,
|
||||
path: '/' + i,
|
||||
agent: agent
|
||||
}, function(res) {
|
||||
tunnel.debug('CLIENT: got HTTP response (%d)', i);
|
||||
res.setEncoding('utf8');
|
||||
res.on('data', function(data) {
|
||||
data.should.equal('Hello/' + i);
|
||||
});
|
||||
res.on('end', function() {
|
||||
++clientConnect;
|
||||
if (clientConnect === N) {
|
||||
proxy.close();
|
||||
server.close();
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
server.on('close', function() {
|
||||
serverConnect.should.equal(N);
|
||||
proxyConnect.should.equal(poolSize);
|
||||
clientConnect.should.equal(N);
|
||||
|
||||
var name = 'localhost:' + serverPort;
|
||||
agent.sockets.should.be.empty;
|
||||
agent.requests.should.be.empty;
|
||||
|
||||
done();
|
||||
});
|
||||
});
|
||||
});
|
130
node_modules/tunnel/test/https-over-http.js
generated
vendored
Normal file
130
node_modules/tunnel/test/https-over-http.js
generated
vendored
Normal file
|
@ -0,0 +1,130 @@
|
|||
var http = require('http');
|
||||
var https = require('https');
|
||||
var net = require('net');
|
||||
var fs = require('fs');
|
||||
var path = require('path');
|
||||
var should = require('should');
|
||||
var tunnel = require('../index');
|
||||
|
||||
function readPem(file) {
|
||||
return fs.readFileSync(path.join('test/keys', file + '.pem'));
|
||||
}
|
||||
|
||||
var serverKey = readPem('server1-key');
|
||||
var serverCert = readPem('server1-cert');
|
||||
var serverCA = readPem('ca1-cert');
|
||||
var clientKey = readPem('client1-key');
|
||||
var clientCert = readPem('client1-cert');
|
||||
var clientCA = readPem('ca3-cert');
|
||||
|
||||
|
||||
describe('HTTPS over HTTP', function() {
|
||||
it('should finish without error', function(done) {
|
||||
var serverPort = 3002;
|
||||
var proxyPort = 3003;
|
||||
var poolSize = 3;
|
||||
var N = 10;
|
||||
var serverConnect = 0;
|
||||
var proxyConnect = 0;
|
||||
var clientConnect = 0;
|
||||
var server;
|
||||
var proxy;
|
||||
var agent;
|
||||
|
||||
server = https.createServer({
|
||||
key: serverKey,
|
||||
cert: serverCert,
|
||||
ca: [clientCA],
|
||||
requestCert: true,
|
||||
rejectUnauthorized: true
|
||||
}, function(req, res) {
|
||||
tunnel.debug('SERVER: got request');
|
||||
++serverConnect;
|
||||
res.writeHead(200);
|
||||
res.end('Hello' + req.url);
|
||||
tunnel.debug('SERVER: sending response');
|
||||
});
|
||||
server.listen(serverPort, setupProxy);
|
||||
|
||||
function setupProxy() {
|
||||
proxy = http.createServer(function(req, res) {
|
||||
should.fail();
|
||||
});
|
||||
proxy.on('upgrade', onConnect); // for v0.6
|
||||
proxy.on('connect', onConnect); // for v0.7 or later
|
||||
|
||||
function onConnect(req, clientSocket, head) {
|
||||
tunnel.debug('PROXY: got CONNECT request');
|
||||
|
||||
req.method.should.equal('CONNECT');
|
||||
req.url.should.equal('localhost:' + serverPort);
|
||||
req.headers.should.not.have.property('transfer-encoding');
|
||||
++proxyConnect;
|
||||
|
||||
var serverSocket = net.connect(serverPort, function() {
|
||||
tunnel.debug('PROXY: replying to client CONNECT request');
|
||||
clientSocket.write('HTTP/1.1 200 Connection established\r\n\r\n');
|
||||
clientSocket.pipe(serverSocket);
|
||||
serverSocket.write(head);
|
||||
serverSocket.pipe(clientSocket);
|
||||
// workaround, see joyent/node#2524
|
||||
serverSocket.on('end', function() {
|
||||
clientSocket.end();
|
||||
});
|
||||
});
|
||||
}
|
||||
proxy.listen(proxyPort, setupClient);
|
||||
}
|
||||
|
||||
function setupClient() {
|
||||
agent = tunnel.httpsOverHttp({
|
||||
maxSockets: poolSize,
|
||||
key: clientKey,
|
||||
cert: clientCert,
|
||||
ca: [serverCA],
|
||||
rejectUnauthorized: true,
|
||||
proxy: {
|
||||
port: proxyPort
|
||||
}
|
||||
});
|
||||
|
||||
for (var i = 0; i < N; ++i) {
|
||||
doClientRequest(i);
|
||||
}
|
||||
|
||||
function doClientRequest(i) {
|
||||
tunnel.debug('CLIENT: Making HTTPS request (%d)', i);
|
||||
var req = https.get({
|
||||
port: serverPort,
|
||||
path: '/' + i,
|
||||
agent: agent
|
||||
}, function(res) {
|
||||
tunnel.debug('CLIENT: got HTTPS response (%d)', i);
|
||||
res.setEncoding('utf8');
|
||||
res.on('data', function(data) {
|
||||
data.should.equal('Hello/' + i);
|
||||
});
|
||||
res.on('end', function() {
|
||||
++clientConnect;
|
||||
if (clientConnect === N) {
|
||||
proxy.close();
|
||||
server.close();
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
server.on('close', function() {
|
||||
serverConnect.should.equal(N);
|
||||
proxyConnect.should.equal(poolSize);
|
||||
clientConnect.should.equal(N);
|
||||
|
||||
var name = 'localhost:' + serverPort;
|
||||
agent.sockets.should.be.empty;
|
||||
agent.requests.should.be.empty;
|
||||
|
||||
done();
|
||||
});
|
||||
});
|
||||
});
|
261
node_modules/tunnel/test/https-over-https-error.js
generated
vendored
Normal file
261
node_modules/tunnel/test/https-over-https-error.js
generated
vendored
Normal file
|
@ -0,0 +1,261 @@
|
|||
var http = require('http');
|
||||
var https = require('https');
|
||||
var net = require('net');
|
||||
var fs = require('fs');
|
||||
var path = require('path');
|
||||
var should = require('should');
|
||||
var tunnel = require('../index');
|
||||
|
||||
function readPem(file) {
|
||||
return fs.readFileSync(path.join('test/keys', file + '.pem'));
|
||||
}
|
||||
|
||||
var serverKey = readPem('server2-key');
|
||||
var serverCert = readPem('server2-cert');
|
||||
var serverCA = readPem('ca1-cert');
|
||||
var proxyKey = readPem('proxy2-key');
|
||||
var proxyCert = readPem('proxy2-cert');
|
||||
var proxyCA = readPem('ca2-cert');
|
||||
var client1Key = readPem('client1-key');
|
||||
var client1Cert = readPem('client1-cert');
|
||||
var client1CA = readPem('ca3-cert');
|
||||
var client2Key = readPem('client2-key');
|
||||
var client2Cert = readPem('client2-cert');
|
||||
var client2CA = readPem('ca4-cert');
|
||||
|
||||
describe('HTTPS over HTTPS authentication failed', function() {
|
||||
it('should finish without error', function(done) {
|
||||
var serverPort = 3008;
|
||||
var proxyPort = 3009;
|
||||
var serverConnect = 0;
|
||||
var proxyConnect = 0;
|
||||
var clientRequest = 0;
|
||||
var clientConnect = 0;
|
||||
var clientError = 0;
|
||||
var server;
|
||||
var proxy;
|
||||
|
||||
server = https.createServer({
|
||||
key: serverKey,
|
||||
cert: serverCert,
|
||||
ca: [client1CA],
|
||||
requestCert: true,
|
||||
rejectUnauthorized: true
|
||||
}, function(req, res) {
|
||||
tunnel.debug('SERVER: got request', req.url);
|
||||
++serverConnect;
|
||||
req.on('data', function(data) {
|
||||
});
|
||||
req.on('end', function() {
|
||||
res.writeHead(200);
|
||||
res.end('Hello, ' + serverConnect);
|
||||
tunnel.debug('SERVER: sending response');
|
||||
});
|
||||
req.resume();
|
||||
});
|
||||
//server.addContext('server2', {
|
||||
// key: serverKey,
|
||||
// cert: serverCert,
|
||||
// ca: [client1CA],
|
||||
//});
|
||||
server.listen(serverPort, setupProxy);
|
||||
|
||||
function setupProxy() {
|
||||
proxy = https.createServer({
|
||||
key: proxyKey,
|
||||
cert: proxyCert,
|
||||
ca: [client2CA],
|
||||
requestCert: true,
|
||||
rejectUnauthorized: true
|
||||
}, function(req, res) {
|
||||
should.fail();
|
||||
});
|
||||
//proxy.addContext('proxy2', {
|
||||
// key: proxyKey,
|
||||
// cert: proxyCert,
|
||||
// ca: [client2CA],
|
||||
//});
|
||||
proxy.on('upgrade', onConnect); // for v0.6
|
||||
proxy.on('connect', onConnect); // for v0.7 or later
|
||||
|
||||
function onConnect(req, clientSocket, head) {
|
||||
req.method.should.equal('CONNECT');
|
||||
req.url.should.equal('localhost:' + serverPort);
|
||||
req.headers.should.not.have.property('transfer-encoding');
|
||||
++proxyConnect;
|
||||
|
||||
var serverSocket = net.connect(serverPort, function() {
|
||||
tunnel.debug('PROXY: replying to client CONNECT request');
|
||||
clientSocket.write('HTTP/1.1 200 Connection established\r\n\r\n');
|
||||
clientSocket.pipe(serverSocket);
|
||||
serverSocket.write(head);
|
||||
serverSocket.pipe(clientSocket);
|
||||
// workaround, see #2524
|
||||
serverSocket.on('end', function() {
|
||||
clientSocket.end();
|
||||
});
|
||||
});
|
||||
}
|
||||
proxy.listen(proxyPort, setupClient);
|
||||
}
|
||||
|
||||
function setupClient() {
|
||||
function doRequest(name, options, host) {
|
||||
tunnel.debug('CLIENT: Making HTTPS request (%s)', name);
|
||||
++clientRequest;
|
||||
var agent = tunnel.httpsOverHttps(options);
|
||||
var req = https.get({
|
||||
host: 'localhost',
|
||||
port: serverPort,
|
||||
path: '/' + encodeURIComponent(name),
|
||||
headers: {
|
||||
host: host ? host : 'localhost',
|
||||
},
|
||||
rejectUnauthorized: true,
|
||||
agent: agent
|
||||
}, function(res) {
|
||||
tunnel.debug('CLIENT: got HTTPS response (%s)', name);
|
||||
++clientConnect;
|
||||
res.on('data', function(data) {
|
||||
});
|
||||
res.on('end', function() {
|
||||
req.emit('finish');
|
||||
});
|
||||
res.resume();
|
||||
});
|
||||
req.on('error', function(err) {
|
||||
tunnel.debug('CLIENT: failed HTTP response (%s)', name, err);
|
||||
++clientError;
|
||||
req.emit('finish');
|
||||
});
|
||||
req.on('finish', function() {
|
||||
if (clientConnect + clientError === clientRequest) {
|
||||
proxy.close();
|
||||
server.close();
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
doRequest('no cert origin nor proxy', { // invalid
|
||||
maxSockets: 1,
|
||||
ca: [serverCA],
|
||||
rejectUnauthorized: true,
|
||||
// no certificate for origin server
|
||||
proxy: {
|
||||
port: proxyPort,
|
||||
ca: [proxyCA],
|
||||
rejectUnauthorized: true,
|
||||
headers: {
|
||||
host: 'proxy2'
|
||||
}
|
||||
// no certificate for proxy
|
||||
}
|
||||
}, 'server2');
|
||||
|
||||
doRequest('no cert proxy', { // invalid
|
||||
maxSockets: 1,
|
||||
ca: [serverCA],
|
||||
rejectUnauthorized: true,
|
||||
// client certification for origin server
|
||||
key: client1Key,
|
||||
cert: client1Cert,
|
||||
proxy: {
|
||||
port: proxyPort,
|
||||
ca: [proxyCA],
|
||||
rejectUnauthorized: true,
|
||||
headers: {
|
||||
host: 'proxy2'
|
||||
}
|
||||
// no certificate for proxy
|
||||
}
|
||||
}, 'server2');
|
||||
|
||||
doRequest('no cert origin', { // invalid
|
||||
maxSockets: 1,
|
||||
ca: [serverCA],
|
||||
rejectUnauthorized: true,
|
||||
// no certificate for origin server
|
||||
proxy: {
|
||||
port: proxyPort,
|
||||
servername: 'proxy2',
|
||||
ca: [proxyCA],
|
||||
rejectUnauthorized: true,
|
||||
headers: {
|
||||
host: 'proxy2'
|
||||
},
|
||||
// client certification for proxy
|
||||
key: client2Key,
|
||||
cert: client2Cert
|
||||
}
|
||||
}, 'server2');
|
||||
|
||||
doRequest('invalid proxy server name', { // invalid
|
||||
maxSockets: 1,
|
||||
ca: [serverCA],
|
||||
rejectUnauthorized: true,
|
||||
// client certification for origin server
|
||||
key: client1Key,
|
||||
cert: client1Cert,
|
||||
proxy: {
|
||||
port: proxyPort,
|
||||
ca: [proxyCA],
|
||||
rejectUnauthorized: true,
|
||||
// client certification for proxy
|
||||
key: client2Key,
|
||||
cert: client2Cert,
|
||||
}
|
||||
}, 'server2');
|
||||
|
||||
doRequest('invalid origin server name', { // invalid
|
||||
maxSockets: 1,
|
||||
ca: [serverCA],
|
||||
rejectUnauthorized: true,
|
||||
// client certification for origin server
|
||||
key: client1Key,
|
||||
cert: client1Cert,
|
||||
proxy: {
|
||||
port: proxyPort,
|
||||
servername: 'proxy2',
|
||||
ca: [proxyCA],
|
||||
rejectUnauthorized: true,
|
||||
headers: {
|
||||
host: 'proxy2'
|
||||
},
|
||||
// client certification for proxy
|
||||
key: client2Key,
|
||||
cert: client2Cert
|
||||
}
|
||||
});
|
||||
|
||||
doRequest('valid', { // valid
|
||||
maxSockets: 1,
|
||||
ca: [serverCA],
|
||||
rejectUnauthorized: true,
|
||||
// client certification for origin server
|
||||
key: client1Key,
|
||||
cert: client1Cert,
|
||||
proxy: {
|
||||
port: proxyPort,
|
||||
servername: 'proxy2',
|
||||
ca: [proxyCA],
|
||||
rejectUnauthorized: true,
|
||||
headers: {
|
||||
host: 'proxy2'
|
||||
},
|
||||
// client certification for proxy
|
||||
key: client2Key,
|
||||
cert: client2Cert
|
||||
}
|
||||
}, 'server2');
|
||||
}
|
||||
|
||||
server.on('close', function() {
|
||||
serverConnect.should.equal(1);
|
||||
proxyConnect.should.equal(3);
|
||||
clientConnect.should.equal(1);
|
||||
clientError.should.equal(5);
|
||||
|
||||
done();
|
||||
});
|
||||
});
|
||||
});
|
146
node_modules/tunnel/test/https-over-https.js
generated
vendored
Normal file
146
node_modules/tunnel/test/https-over-https.js
generated
vendored
Normal file
|
@ -0,0 +1,146 @@
|
|||
var http = require('http');
|
||||
var https = require('https');
|
||||
var net = require('net');
|
||||
var fs = require('fs');
|
||||
var path = require('path');
|
||||
var should = require('should');
|
||||
var tunnel = require('../index.js');
|
||||
|
||||
function readPem(file) {
|
||||
return fs.readFileSync(path.join('test/keys', file + '.pem'));
|
||||
}
|
||||
|
||||
var serverKey = readPem('server1-key');
|
||||
var serverCert = readPem('server1-cert');
|
||||
var serverCA = readPem('ca1-cert');
|
||||
var proxyKey = readPem('proxy1-key');
|
||||
var proxyCert = readPem('proxy1-cert');
|
||||
var proxyCA = readPem('ca2-cert');
|
||||
var client1Key = readPem('client1-key');
|
||||
var client1Cert = readPem('client1-cert');
|
||||
var client1CA = readPem('ca3-cert');
|
||||
var client2Key = readPem('client2-key');
|
||||
var client2Cert = readPem('client2-cert');
|
||||
var client2CA = readPem('ca4-cert');
|
||||
|
||||
describe('HTTPS over HTTPS', function() {
|
||||
it('should finish without error', function(done) {
|
||||
var serverPort = 3006;
|
||||
var proxyPort = 3007;
|
||||
var poolSize = 3;
|
||||
var N = 5;
|
||||
var serverConnect = 0;
|
||||
var proxyConnect = 0;
|
||||
var clientConnect = 0;
|
||||
var server;
|
||||
var proxy;
|
||||
var agent;
|
||||
|
||||
server = https.createServer({
|
||||
key: serverKey,
|
||||
cert: serverCert,
|
||||
ca: [client1CA],
|
||||
requestCert: true,
|
||||
rejectUnauthorized: true
|
||||
}, function(req, res) {
|
||||
tunnel.debug('SERVER: got request');
|
||||
++serverConnect;
|
||||
res.writeHead(200);
|
||||
res.end('Hello' + req.url);
|
||||
tunnel.debug('SERVER: sending response');
|
||||
});
|
||||
server.listen(serverPort, setupProxy);
|
||||
|
||||
function setupProxy() {
|
||||
proxy = https.createServer({
|
||||
key: proxyKey,
|
||||
cert: proxyCert,
|
||||
ca: [client2CA],
|
||||
requestCert: true,
|
||||
rejectUnauthorized: true
|
||||
}, function(req, res) {
|
||||
should.fail();
|
||||
});
|
||||
proxy.on('upgrade', onConnect); // for v0.6
|
||||
proxy.on('connect', onConnect); // for v0.7 or later
|
||||
|
||||
function onConnect(req, clientSocket, head) {
|
||||
tunnel.debug('PROXY: got CONNECT request');
|
||||
req.method.should.equal('CONNECT');
|
||||
req.url.should.equal('localhost:' + serverPort);
|
||||
req.headers.should.not.have.property('transfer-encoding');
|
||||
++proxyConnect;
|
||||
|
||||
var serverSocket = net.connect(serverPort, function() {
|
||||
tunnel.debug('PROXY: replying to client CONNECT request');
|
||||
clientSocket.write('HTTP/1.1 200 Connection established\r\n\r\n');
|
||||
clientSocket.pipe(serverSocket);
|
||||
serverSocket.write(head);
|
||||
serverSocket.pipe(clientSocket);
|
||||
// workaround, see joyent/node#2524
|
||||
serverSocket.on('end', function() {
|
||||
clientSocket.end();
|
||||
});
|
||||
});
|
||||
}
|
||||
proxy.listen(proxyPort, setupClient);
|
||||
}
|
||||
|
||||
function setupClient() {
|
||||
agent = tunnel.httpsOverHttps({
|
||||
maxSockets: poolSize,
|
||||
// client certification for origin server
|
||||
key: client1Key,
|
||||
cert: client1Cert,
|
||||
ca: [serverCA],
|
||||
rejectUnauthroized: true,
|
||||
proxy: {
|
||||
port: proxyPort,
|
||||
// client certification for proxy
|
||||
key: client2Key,
|
||||
cert: client2Cert,
|
||||
ca: [proxyCA],
|
||||
rejectUnauthroized: true
|
||||
}
|
||||
});
|
||||
|
||||
for (var i = 0; i < N; ++i) {
|
||||
doClientRequest(i);
|
||||
}
|
||||
|
||||
function doClientRequest(i) {
|
||||
tunnel.debug('CLIENT: Making HTTPS request (%d)', i);
|
||||
var req = https.get({
|
||||
port: serverPort,
|
||||
path: '/' + i,
|
||||
agent: agent
|
||||
}, function(res) {
|
||||
tunnel.debug('CLIENT: got HTTPS response (%d)', i);
|
||||
res.setEncoding('utf8');
|
||||
res.on('data', function(data) {
|
||||
data.should.equal('Hello/' + i);
|
||||
});
|
||||
res.on('end', function() {
|
||||
++clientConnect;
|
||||
if (clientConnect === N) {
|
||||
proxy.close();
|
||||
server.close();
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
server.on('close', function() {
|
||||
serverConnect.should.equal(N);
|
||||
proxyConnect.should.equal(poolSize);
|
||||
clientConnect.should.equal(N);
|
||||
|
||||
var name = 'localhost:' + serverPort;
|
||||
agent.sockets.should.be.empty;
|
||||
agent.requests.should.be.empty;
|
||||
|
||||
done();
|
||||
});
|
||||
});
|
||||
});
|
157
node_modules/tunnel/test/keys/Makefile
generated
vendored
Normal file
157
node_modules/tunnel/test/keys/Makefile
generated
vendored
Normal file
|
@ -0,0 +1,157 @@
|
|||
all: server1-cert.pem server2-cert.pem proxy1-cert.pem proxy2-cert.pem client1-cert.pem client2-cert.pem
|
||||
|
||||
|
||||
#
|
||||
# Create Certificate Authority: ca1
|
||||
# ('password' is used for the CA password.)
|
||||
#
|
||||
ca1-cert.pem: ca1.cnf
|
||||
openssl req -new -x509 -days 9999 -config ca1.cnf -keyout ca1-key.pem -out ca1-cert.pem
|
||||
|
||||
#
|
||||
# Create Certificate Authority: ca2
|
||||
# ('password' is used for the CA password.)
|
||||
#
|
||||
ca2-cert.pem: ca2.cnf
|
||||
openssl req -new -x509 -days 9999 -config ca2.cnf -keyout ca2-key.pem -out ca2-cert.pem
|
||||
|
||||
#
|
||||
# Create Certificate Authority: ca3
|
||||
# ('password' is used for the CA password.)
|
||||
#
|
||||
ca3-cert.pem: ca3.cnf
|
||||
openssl req -new -x509 -days 9999 -config ca3.cnf -keyout ca3-key.pem -out ca3-cert.pem
|
||||
|
||||
#
|
||||
# Create Certificate Authority: ca4
|
||||
# ('password' is used for the CA password.)
|
||||
#
|
||||
ca4-cert.pem: ca4.cnf
|
||||
openssl req -new -x509 -days 9999 -config ca4.cnf -keyout ca4-key.pem -out ca4-cert.pem
|
||||
|
||||
|
||||
#
|
||||
# server1 is signed by ca1.
|
||||
#
|
||||
server1-key.pem:
|
||||
openssl genrsa -out server1-key.pem 1024
|
||||
|
||||
server1-csr.pem: server1.cnf server1-key.pem
|
||||
openssl req -new -config server1.cnf -key server1-key.pem -out server1-csr.pem
|
||||
|
||||
server1-cert.pem: server1-csr.pem ca1-cert.pem ca1-key.pem
|
||||
openssl x509 -req \
|
||||
-days 9999 \
|
||||
-passin "pass:password" \
|
||||
-in server1-csr.pem \
|
||||
-CA ca1-cert.pem \
|
||||
-CAkey ca1-key.pem \
|
||||
-CAcreateserial \
|
||||
-out server1-cert.pem
|
||||
|
||||
#
|
||||
# server2 is signed by ca1.
|
||||
#
|
||||
server2-key.pem:
|
||||
openssl genrsa -out server2-key.pem 1024
|
||||
|
||||
server2-csr.pem: server2.cnf server2-key.pem
|
||||
openssl req -new -config server2.cnf -key server2-key.pem -out server2-csr.pem
|
||||
|
||||
server2-cert.pem: server2-csr.pem ca1-cert.pem ca1-key.pem
|
||||
openssl x509 -req \
|
||||
-days 9999 \
|
||||
-passin "pass:password" \
|
||||
-in server2-csr.pem \
|
||||
-CA ca1-cert.pem \
|
||||
-CAkey ca1-key.pem \
|
||||
-CAcreateserial \
|
||||
-out server2-cert.pem
|
||||
|
||||
server2-verify: server2-cert.pem ca1-cert.pem
|
||||
openssl verify -CAfile ca1-cert.pem server2-cert.pem
|
||||
|
||||
#
|
||||
# proxy1 is signed by ca2.
|
||||
#
|
||||
proxy1-key.pem:
|
||||
openssl genrsa -out proxy1-key.pem 1024
|
||||
|
||||
proxy1-csr.pem: proxy1.cnf proxy1-key.pem
|
||||
openssl req -new -config proxy1.cnf -key proxy1-key.pem -out proxy1-csr.pem
|
||||
|
||||
proxy1-cert.pem: proxy1-csr.pem ca2-cert.pem ca2-key.pem
|
||||
openssl x509 -req \
|
||||
-days 9999 \
|
||||
-passin "pass:password" \
|
||||
-in proxy1-csr.pem \
|
||||
-CA ca2-cert.pem \
|
||||
-CAkey ca2-key.pem \
|
||||
-CAcreateserial \
|
||||
-out proxy1-cert.pem
|
||||
|
||||
#
|
||||
# proxy2 is signed by ca2.
|
||||
#
|
||||
proxy2-key.pem:
|
||||
openssl genrsa -out proxy2-key.pem 1024
|
||||
|
||||
proxy2-csr.pem: proxy2.cnf proxy2-key.pem
|
||||
openssl req -new -config proxy2.cnf -key proxy2-key.pem -out proxy2-csr.pem
|
||||
|
||||
proxy2-cert.pem: proxy2-csr.pem ca2-cert.pem ca2-key.pem
|
||||
openssl x509 -req \
|
||||
-days 9999 \
|
||||
-passin "pass:password" \
|
||||
-in proxy2-csr.pem \
|
||||
-CA ca2-cert.pem \
|
||||
-CAkey ca2-key.pem \
|
||||
-CAcreateserial \
|
||||
-out proxy2-cert.pem
|
||||
|
||||
proxy2-verify: proxy2-cert.pem ca2-cert.pem
|
||||
openssl verify -CAfile ca2-cert.pem proxy2-cert.pem
|
||||
|
||||
#
|
||||
# client1 is signed by ca3.
|
||||
#
|
||||
client1-key.pem:
|
||||
openssl genrsa -out client1-key.pem 1024
|
||||
|
||||
client1-csr.pem: client1.cnf client1-key.pem
|
||||
openssl req -new -config client1.cnf -key client1-key.pem -out client1-csr.pem
|
||||
|
||||
client1-cert.pem: client1-csr.pem ca3-cert.pem ca3-key.pem
|
||||
openssl x509 -req \
|
||||
-days 9999 \
|
||||
-passin "pass:password" \
|
||||
-in client1-csr.pem \
|
||||
-CA ca3-cert.pem \
|
||||
-CAkey ca3-key.pem \
|
||||
-CAcreateserial \
|
||||
-out client1-cert.pem
|
||||
|
||||
#
|
||||
# client2 is signed by ca4.
|
||||
#
|
||||
client2-key.pem:
|
||||
openssl genrsa -out client2-key.pem 1024
|
||||
|
||||
client2-csr.pem: client2.cnf client2-key.pem
|
||||
openssl req -new -config client2.cnf -key client2-key.pem -out client2-csr.pem
|
||||
|
||||
client2-cert.pem: client2-csr.pem ca4-cert.pem ca4-key.pem
|
||||
openssl x509 -req \
|
||||
-days 9999 \
|
||||
-passin "pass:password" \
|
||||
-in client2-csr.pem \
|
||||
-CA ca4-cert.pem \
|
||||
-CAkey ca4-key.pem \
|
||||
-CAcreateserial \
|
||||
-out client2-cert.pem
|
||||
|
||||
|
||||
clean:
|
||||
rm -f *.pem *.srl
|
||||
|
||||
test: client-verify server2-verify proxy1-verify proxy2-verify client-verify
|
14
node_modules/tunnel/test/keys/agent1-cert.pem
generated
vendored
Normal file
14
node_modules/tunnel/test/keys/agent1-cert.pem
generated
vendored
Normal file
|
@ -0,0 +1,14 @@
|
|||
-----BEGIN CERTIFICATE-----
|
||||
MIICKjCCAZMCCQDQ8o4kHKdCPDANBgkqhkiG9w0BAQUFADB6MQswCQYDVQQGEwJV
|
||||
UzELMAkGA1UECBMCQ0ExCzAJBgNVBAcTAlNGMQ8wDQYDVQQKEwZKb3llbnQxEDAO
|
||||
BgNVBAsTB05vZGUuanMxDDAKBgNVBAMTA2NhMTEgMB4GCSqGSIb3DQEJARYRcnlA
|
||||
dGlueWNsb3Vkcy5vcmcwHhcNMTEwMzE0MTgyOTEyWhcNMzgwNzI5MTgyOTEyWjB9
|
||||
MQswCQYDVQQGEwJVUzELMAkGA1UECBMCQ0ExCzAJBgNVBAcTAlNGMQ8wDQYDVQQK
|
||||
EwZKb3llbnQxEDAOBgNVBAsTB05vZGUuanMxDzANBgNVBAMTBmFnZW50MTEgMB4G
|
||||
CSqGSIb3DQEJARYRcnlAdGlueWNsb3Vkcy5vcmcwXDANBgkqhkiG9w0BAQEFAANL
|
||||
ADBIAkEAnzpAqcoXZxWJz/WFK7BXwD23jlREyG11x7gkydteHvn6PrVBbB5yfu6c
|
||||
bk8w3/Ar608AcyMQ9vHjkLQKH7cjEQIDAQABMA0GCSqGSIb3DQEBBQUAA4GBAKha
|
||||
HqjCfTIut+m/idKy3AoFh48tBHo3p9Nl5uBjQJmahKdZAaiksL24Pl+NzPQ8LIU+
|
||||
FyDHFp6OeJKN6HzZ72Bh9wpBVu6Uj1hwhZhincyTXT80wtSI/BoUAW8Ls2kwPdus
|
||||
64LsJhhxqj2m4vPKNRbHB2QxnNrGi30CUf3kt3Ia
|
||||
-----END CERTIFICATE-----
|
10
node_modules/tunnel/test/keys/agent1-csr.pem
generated
vendored
Normal file
10
node_modules/tunnel/test/keys/agent1-csr.pem
generated
vendored
Normal file
|
@ -0,0 +1,10 @@
|
|||
-----BEGIN CERTIFICATE REQUEST-----
|
||||
MIIBXTCCAQcCAQAwfTELMAkGA1UEBhMCVVMxCzAJBgNVBAgTAkNBMQswCQYDVQQH
|
||||
EwJTRjEPMA0GA1UEChMGSm95ZW50MRAwDgYDVQQLEwdOb2RlLmpzMQ8wDQYDVQQD
|
||||
EwZhZ2VudDExIDAeBgkqhkiG9w0BCQEWEXJ5QHRpbnljbG91ZHMub3JnMFwwDQYJ
|
||||
KoZIhvcNAQEBBQADSwAwSAJBAJ86QKnKF2cVic/1hSuwV8A9t45URMhtdce4JMnb
|
||||
Xh75+j61QWwecn7unG5PMN/wK+tPAHMjEPbx45C0Ch+3IxECAwEAAaAlMCMGCSqG
|
||||
SIb3DQEJBzEWExRBIGNoYWxsZW5nZSBwYXNzd29yZDANBgkqhkiG9w0BAQUFAANB
|
||||
AF+AfG64hNyYHum46m6i7RgnUBrJSOynGjs23TekV4he3QdMSAAPPqbll8W14+y3
|
||||
vOo7/yQ2v2uTqxCjakUNPPs=
|
||||
-----END CERTIFICATE REQUEST-----
|
9
node_modules/tunnel/test/keys/agent1-key.pem
generated
vendored
Normal file
9
node_modules/tunnel/test/keys/agent1-key.pem
generated
vendored
Normal file
|
@ -0,0 +1,9 @@
|
|||
-----BEGIN RSA PRIVATE KEY-----
|
||||
MIIBOwIBAAJBAJ86QKnKF2cVic/1hSuwV8A9t45URMhtdce4JMnbXh75+j61QWwe
|
||||
cn7unG5PMN/wK+tPAHMjEPbx45C0Ch+3IxECAwEAAQJBAI2cU1IuR+4IO87WPyAB
|
||||
76kruoo87AeNQkjjvuQ/00+b/6IS45mcEP5Kw0NukbqBhIw2di9uQ9J51DJ/ZfQr
|
||||
+YECIQDUHaN3ZjIdJ7/w8Yq9Zzz+3kY2F/xEz6e4ftOFW8bY2QIhAMAref+WYckC
|
||||
oECgOLAvAxB1lI4j7oCbAaawfxKdnPj5AiEAi95rXx09aGpAsBGmSdScrPdG1v6j
|
||||
83/2ebrvoZ1uFqkCIB0AssnrRVjUB6GZTNTyU3ERfdkx/RX1zvr8WkFR/lXpAiB7
|
||||
cUZ1i8ZkZrPrdVgw2cb28UJM7qZHQnXcMHTXFFvxeQ==
|
||||
-----END RSA PRIVATE KEY-----
|
19
node_modules/tunnel/test/keys/agent1.cnf
generated
vendored
Normal file
19
node_modules/tunnel/test/keys/agent1.cnf
generated
vendored
Normal file
|
@ -0,0 +1,19 @@
|
|||
[ req ]
|
||||
default_bits = 1024
|
||||
days = 999
|
||||
distinguished_name = req_distinguished_name
|
||||
attributes = req_attributes
|
||||
prompt = no
|
||||
|
||||
[ req_distinguished_name ]
|
||||
C = US
|
||||
ST = CA
|
||||
L = SF
|
||||
O = Joyent
|
||||
OU = Node.js
|
||||
CN = agent1
|
||||
emailAddress = ry@tinyclouds.org
|
||||
|
||||
[ req_attributes ]
|
||||
challengePassword = A challenge password
|
||||
|
13
node_modules/tunnel/test/keys/agent2-cert.pem
generated
vendored
Normal file
13
node_modules/tunnel/test/keys/agent2-cert.pem
generated
vendored
Normal file
|
@ -0,0 +1,13 @@
|
|||
-----BEGIN CERTIFICATE-----
|
||||
MIIB7DCCAZYCCQC7gs0MDNn6MTANBgkqhkiG9w0BAQUFADB9MQswCQYDVQQGEwJV
|
||||
UzELMAkGA1UECBMCQ0ExCzAJBgNVBAcTAlNGMQ8wDQYDVQQKEwZKb3llbnQxEDAO
|
||||
BgNVBAsTB05vZGUuanMxDzANBgNVBAMTBmFnZW50MjEgMB4GCSqGSIb3DQEJARYR
|
||||
cnlAdGlueWNsb3Vkcy5vcmcwHhcNMTEwMzE0MTgyOTEyWhcNMzgwNzI5MTgyOTEy
|
||||
WjB9MQswCQYDVQQGEwJVUzELMAkGA1UECBMCQ0ExCzAJBgNVBAcTAlNGMQ8wDQYD
|
||||
VQQKEwZKb3llbnQxEDAOBgNVBAsTB05vZGUuanMxDzANBgNVBAMTBmFnZW50MjEg
|
||||
MB4GCSqGSIb3DQEJARYRcnlAdGlueWNsb3Vkcy5vcmcwXDANBgkqhkiG9w0BAQEF
|
||||
AANLADBIAkEAyXb8FrRdKbhrKLgLSsn61i1C7w7fVVVd7OQsmV/7p9WB2lWFiDlC
|
||||
WKGU9SiIz/A6wNZDUAuc2E+VwtpCT561AQIDAQABMA0GCSqGSIb3DQEBBQUAA0EA
|
||||
C8HzpuNhFLCI3A5KkBS5zHAQax6TFUOhbpBCR0aTDbJ6F1liDTK1lmU/BjvPoj+9
|
||||
1LHwrmh29rK8kBPEjmymCQ==
|
||||
-----END CERTIFICATE-----
|
10
node_modules/tunnel/test/keys/agent2-csr.pem
generated
vendored
Normal file
10
node_modules/tunnel/test/keys/agent2-csr.pem
generated
vendored
Normal file
|
@ -0,0 +1,10 @@
|
|||
-----BEGIN CERTIFICATE REQUEST-----
|
||||
MIIBXTCCAQcCAQAwfTELMAkGA1UEBhMCVVMxCzAJBgNVBAgTAkNBMQswCQYDVQQH
|
||||
EwJTRjEPMA0GA1UEChMGSm95ZW50MRAwDgYDVQQLEwdOb2RlLmpzMQ8wDQYDVQQD
|
||||
EwZhZ2VudDIxIDAeBgkqhkiG9w0BCQEWEXJ5QHRpbnljbG91ZHMub3JnMFwwDQYJ
|
||||
KoZIhvcNAQEBBQADSwAwSAJBAMl2/Ba0XSm4ayi4C0rJ+tYtQu8O31VVXezkLJlf
|
||||
+6fVgdpVhYg5QlihlPUoiM/wOsDWQ1ALnNhPlcLaQk+etQECAwEAAaAlMCMGCSqG
|
||||
SIb3DQEJBzEWExRBIGNoYWxsZW5nZSBwYXNzd29yZDANBgkqhkiG9w0BAQUFAANB
|
||||
AJnll2pt5l0pzskQSpjjLVTlFDFmJr/AZ3UK8v0WxBjYjCe5Jx4YehkChpxIyDUm
|
||||
U3J9q9MDUf0+Y2+EGkssFfk=
|
||||
-----END CERTIFICATE REQUEST-----
|
9
node_modules/tunnel/test/keys/agent2-key.pem
generated
vendored
Normal file
9
node_modules/tunnel/test/keys/agent2-key.pem
generated
vendored
Normal file
|
@ -0,0 +1,9 @@
|
|||
-----BEGIN RSA PRIVATE KEY-----
|
||||
MIIBOgIBAAJBAMl2/Ba0XSm4ayi4C0rJ+tYtQu8O31VVXezkLJlf+6fVgdpVhYg5
|
||||
QlihlPUoiM/wOsDWQ1ALnNhPlcLaQk+etQECAwEAAQJBAMT6Bf34+UHKY1ObpsbH
|
||||
9u2jsVblFq1rWvs8GPMY6oertzvwm3DpuSUp7PTgOB1nLTLYtCERbQ4ovtN8tn3p
|
||||
OHUCIQDzIEGsoCr5vlxXvy2zJwu+fxYuhTZWMVuo1397L0VyhwIhANQh+yzqUgaf
|
||||
WRtSB4T2W7ADtJI35ET61jKBty3CqJY3AiAIwju7dVW3A5WeD6Qc1SZGKZvp9yCb
|
||||
AFI2BfVwwaY11wIgXF3PeGcvACMyMWsuSv7aPXHfliswAbkWuzcwA4TW01ECIGWa
|
||||
cgsDvVFxmfM5NPSuT/UDTa6R5BFISB5ea0N0AR3I
|
||||
-----END RSA PRIVATE KEY-----
|
19
node_modules/tunnel/test/keys/agent2.cnf
generated
vendored
Normal file
19
node_modules/tunnel/test/keys/agent2.cnf
generated
vendored
Normal file
|
@ -0,0 +1,19 @@
|
|||
[ req ]
|
||||
default_bits = 1024
|
||||
days = 999
|
||||
distinguished_name = req_distinguished_name
|
||||
attributes = req_attributes
|
||||
prompt = no
|
||||
|
||||
[ req_distinguished_name ]
|
||||
C = US
|
||||
ST = CA
|
||||
L = SF
|
||||
O = Joyent
|
||||
OU = Node.js
|
||||
CN = agent2
|
||||
emailAddress = ry@tinyclouds.org
|
||||
|
||||
[ req_attributes ]
|
||||
challengePassword = A challenge password
|
||||
|
14
node_modules/tunnel/test/keys/agent3-cert.pem
generated
vendored
Normal file
14
node_modules/tunnel/test/keys/agent3-cert.pem
generated
vendored
Normal file
|
@ -0,0 +1,14 @@
|
|||
-----BEGIN CERTIFICATE-----
|
||||
MIICKjCCAZMCCQCDBr594bsJmTANBgkqhkiG9w0BAQUFADB6MQswCQYDVQQGEwJV
|
||||
UzELMAkGA1UECBMCQ0ExCzAJBgNVBAcTAlNGMQ8wDQYDVQQKEwZKb3llbnQxEDAO
|
||||
BgNVBAsTB05vZGUuanMxDDAKBgNVBAMTA2NhMjEgMB4GCSqGSIb3DQEJARYRcnlA
|
||||
dGlueWNsb3Vkcy5vcmcwHhcNMTEwMzE0MTgyOTEyWhcNMzgwNzI5MTgyOTEyWjB9
|
||||
MQswCQYDVQQGEwJVUzELMAkGA1UECBMCQ0ExCzAJBgNVBAcTAlNGMQ8wDQYDVQQK
|
||||
EwZKb3llbnQxEDAOBgNVBAsTB05vZGUuanMxDzANBgNVBAMTBmFnZW50MzEgMB4G
|
||||
CSqGSIb3DQEJARYRcnlAdGlueWNsb3Vkcy5vcmcwXDANBgkqhkiG9w0BAQEFAANL
|
||||
ADBIAkEAtlNDZ+bHeBI0B2gD/IWqA7Aq1hwsnS4+XpnLesjTQcL2JwFFpkR0oWrw
|
||||
yjrYhCogi7c5gjKrLZF1d2JD5JgHgQIDAQABMA0GCSqGSIb3DQEBBQUAA4GBAJoK
|
||||
bXwsImk7vJz9649yrmsXwnuGbEKVYMvqcGyjaZNP9lYEG41y5CeRzxhWy2rlYdhE
|
||||
f2nqE2lg75oJP7LQqfQY7aCqwahM3q/GQbsfKVCGjF7TVyq9TQzd8iW+FEJIQzSE
|
||||
3aN85hR67+3VAXeSzmkGSVBO2m1SJIug4qftIkc2
|
||||
-----END CERTIFICATE-----
|
10
node_modules/tunnel/test/keys/agent3-csr.pem
generated
vendored
Normal file
10
node_modules/tunnel/test/keys/agent3-csr.pem
generated
vendored
Normal file
|
@ -0,0 +1,10 @@
|
|||
-----BEGIN CERTIFICATE REQUEST-----
|
||||
MIIBXTCCAQcCAQAwfTELMAkGA1UEBhMCVVMxCzAJBgNVBAgTAkNBMQswCQYDVQQH
|
||||
EwJTRjEPMA0GA1UEChMGSm95ZW50MRAwDgYDVQQLEwdOb2RlLmpzMQ8wDQYDVQQD
|
||||
EwZhZ2VudDMxIDAeBgkqhkiG9w0BCQEWEXJ5QHRpbnljbG91ZHMub3JnMFwwDQYJ
|
||||
KoZIhvcNAQEBBQADSwAwSAJBALZTQ2fmx3gSNAdoA/yFqgOwKtYcLJ0uPl6Zy3rI
|
||||
00HC9icBRaZEdKFq8Mo62IQqIIu3OYIyqy2RdXdiQ+SYB4ECAwEAAaAlMCMGCSqG
|
||||
SIb3DQEJBzEWExRBIGNoYWxsZW5nZSBwYXNzd29yZDANBgkqhkiG9w0BAQUFAANB
|
||||
AEGo76iH+a8pnE+RWQT+wg9/BL+iIuqrcFXLs0rbGonqderrwXAe15ODwql/Bfu3
|
||||
zgMt8ooTsgMPcMX9EgmubEM=
|
||||
-----END CERTIFICATE REQUEST-----
|
9
node_modules/tunnel/test/keys/agent3-key.pem
generated
vendored
Normal file
9
node_modules/tunnel/test/keys/agent3-key.pem
generated
vendored
Normal file
|
@ -0,0 +1,9 @@
|
|||
-----BEGIN RSA PRIVATE KEY-----
|
||||
MIIBOwIBAAJBALZTQ2fmx3gSNAdoA/yFqgOwKtYcLJ0uPl6Zy3rI00HC9icBRaZE
|
||||
dKFq8Mo62IQqIIu3OYIyqy2RdXdiQ+SYB4ECAwEAAQJAIk+G9s2SKgFa8y3a2jGZ
|
||||
LfqABSzmJGooaIsOpLuYLd6eCC31XUDlT4rPVGRhysKQCQ4+NMjgdnj9ZqNnvXY/
|
||||
RQIhAOgbdltr3Ey2hy7RuDW5rmOeJTuVqCrZ7QI8ifyCEbYTAiEAyRfvWSvvASeP
|
||||
kZTMUhATRUpuyDQW+058NE0oJSinTpsCIQCR/FPhBGI3TcaQyA9Ym0T4GwvIAkUX
|
||||
TqInefRAAX8qSQIgZVJPAdIWGbHSL9sWW97HpukLCorcbYEtKbkamiZyrjMCIQCX
|
||||
lX76ttkeId5OsJGQcF67eFMMr2UGZ1WMf6M39lCYHQ==
|
||||
-----END RSA PRIVATE KEY-----
|
19
node_modules/tunnel/test/keys/agent3.cnf
generated
vendored
Normal file
19
node_modules/tunnel/test/keys/agent3.cnf
generated
vendored
Normal file
|
@ -0,0 +1,19 @@
|
|||
[ req ]
|
||||
default_bits = 1024
|
||||
days = 999
|
||||
distinguished_name = req_distinguished_name
|
||||
attributes = req_attributes
|
||||
prompt = no
|
||||
|
||||
[ req_distinguished_name ]
|
||||
C = US
|
||||
ST = CA
|
||||
L = SF
|
||||
O = Joyent
|
||||
OU = Node.js
|
||||
CN = agent3
|
||||
emailAddress = ry@tinyclouds.org
|
||||
|
||||
[ req_attributes ]
|
||||
challengePassword = A challenge password
|
||||
|
15
node_modules/tunnel/test/keys/agent4-cert.pem
generated
vendored
Normal file
15
node_modules/tunnel/test/keys/agent4-cert.pem
generated
vendored
Normal file
|
@ -0,0 +1,15 @@
|
|||
-----BEGIN CERTIFICATE-----
|
||||
MIICSDCCAbGgAwIBAgIJAIMGvn3huwmaMA0GCSqGSIb3DQEBBQUAMHoxCzAJBgNV
|
||||
BAYTAlVTMQswCQYDVQQIEwJDQTELMAkGA1UEBxMCU0YxDzANBgNVBAoTBkpveWVu
|
||||
dDEQMA4GA1UECxMHTm9kZS5qczEMMAoGA1UEAxMDY2EyMSAwHgYJKoZIhvcNAQkB
|
||||
FhFyeUB0aW55Y2xvdWRzLm9yZzAeFw0xMTAzMTQxODI5MTJaFw0zODA3MjkxODI5
|
||||
MTJaMH0xCzAJBgNVBAYTAlVTMQswCQYDVQQIEwJDQTELMAkGA1UEBxMCU0YxDzAN
|
||||
BgNVBAoTBkpveWVudDEQMA4GA1UECxMHTm9kZS5qczEPMA0GA1UEAxMGYWdlbnQ0
|
||||
MSAwHgYJKoZIhvcNAQkBFhFyeUB0aW55Y2xvdWRzLm9yZzBcMA0GCSqGSIb3DQEB
|
||||
AQUAA0sAMEgCQQDN/yMfmQ8zdvmjlGk7b3Mn6wY2FjaMb4c5ENJX15vyYhKS1zhx
|
||||
6n0kQIn2vf6yqG7tO5Okz2IJiD9Sa06mK6GrAgMBAAGjFzAVMBMGA1UdJQQMMAoG
|
||||
CCsGAQUFBwMCMA0GCSqGSIb3DQEBBQUAA4GBAA8FXpRmdrHBdlofNvxa14zLvv0N
|
||||
WnUGUmxVklFLKXvpVWTanOhVgI2TDCMrT5WvCRTD25iT1EUKWxjDhFJrklQJ+IfC
|
||||
KC6fsgO7AynuxWSfSkc8/acGiAH+20vW9QxR53HYiIDMXEV/wnE0KVcr3t/d70lr
|
||||
ImanTrunagV+3O4O
|
||||
-----END CERTIFICATE-----
|
10
node_modules/tunnel/test/keys/agent4-csr.pem
generated
vendored
Normal file
10
node_modules/tunnel/test/keys/agent4-csr.pem
generated
vendored
Normal file
|
@ -0,0 +1,10 @@
|
|||
-----BEGIN CERTIFICATE REQUEST-----
|
||||
MIIBXTCCAQcCAQAwfTELMAkGA1UEBhMCVVMxCzAJBgNVBAgTAkNBMQswCQYDVQQH
|
||||
EwJTRjEPMA0GA1UEChMGSm95ZW50MRAwDgYDVQQLEwdOb2RlLmpzMQ8wDQYDVQQD
|
||||
EwZhZ2VudDQxIDAeBgkqhkiG9w0BCQEWEXJ5QHRpbnljbG91ZHMub3JnMFwwDQYJ
|
||||
KoZIhvcNAQEBBQADSwAwSAJBAM3/Ix+ZDzN2+aOUaTtvcyfrBjYWNoxvhzkQ0lfX
|
||||
m/JiEpLXOHHqfSRAifa9/rKobu07k6TPYgmIP1JrTqYroasCAwEAAaAlMCMGCSqG
|
||||
SIb3DQEJBzEWExRBIGNoYWxsZW5nZSBwYXNzd29yZDANBgkqhkiG9w0BAQUFAANB
|
||||
AMzo7GUOBtGm5MSck1rrEE2C1bU3qoVvXVuiN3A/57zXeNeq24FZMLnkDeL9U+/b
|
||||
Kj646XFou04gla982Xp74p0=
|
||||
-----END CERTIFICATE REQUEST-----
|
9
node_modules/tunnel/test/keys/agent4-key.pem
generated
vendored
Normal file
9
node_modules/tunnel/test/keys/agent4-key.pem
generated
vendored
Normal file
|
@ -0,0 +1,9 @@
|
|||
-----BEGIN RSA PRIVATE KEY-----
|
||||
MIIBOQIBAAJBAM3/Ix+ZDzN2+aOUaTtvcyfrBjYWNoxvhzkQ0lfXm/JiEpLXOHHq
|
||||
fSRAifa9/rKobu07k6TPYgmIP1JrTqYroasCAwEAAQJAN8RQb+dx1A7rejtdWbfM
|
||||
Rww7PD07Oz2eL/a72wgFsdIabRuVypIoHunqV0sAegYtNJt9yu+VhREw0R5tx/qz
|
||||
EQIhAPY+nmzp0b4iFRk7mtGUmCTr9iwwzoqzITwphE7FpQnFAiEA1ihUHFT9YPHO
|
||||
f85skM6qZv77NEgXHO8NJmQZ5GX1ZK8CICzle+Mluo0tD6W7HV4q9pZ8wzSJbY8S
|
||||
W/PpKetm09F1AiAWTw8sAGKAtc/IGo3Oq+iuYAN1F8lolzJsfGMCGujsOwIgAJKP
|
||||
t3eXilwX3ZlsDWSklWNZ7iYcfYrvAc3JqU6gFCE=
|
||||
-----END RSA PRIVATE KEY-----
|
21
node_modules/tunnel/test/keys/agent4.cnf
generated
vendored
Normal file
21
node_modules/tunnel/test/keys/agent4.cnf
generated
vendored
Normal file
|
@ -0,0 +1,21 @@
|
|||
[ req ]
|
||||
default_bits = 1024
|
||||
days = 999
|
||||
distinguished_name = req_distinguished_name
|
||||
attributes = req_attributes
|
||||
prompt = no
|
||||
|
||||
[ req_distinguished_name ]
|
||||
C = US
|
||||
ST = CA
|
||||
L = SF
|
||||
O = Joyent
|
||||
OU = Node.js
|
||||
CN = agent4
|
||||
emailAddress = ry@tinyclouds.org
|
||||
|
||||
[ req_attributes ]
|
||||
challengePassword = A challenge password
|
||||
|
||||
[ ext_key_usage ]
|
||||
extendedKeyUsage = clientAuth
|
14
node_modules/tunnel/test/keys/ca1-cert.pem
generated
vendored
Normal file
14
node_modules/tunnel/test/keys/ca1-cert.pem
generated
vendored
Normal file
|
@ -0,0 +1,14 @@
|
|||
-----BEGIN CERTIFICATE-----
|
||||
MIICIzCCAYwCCQC4ONZJx5BOwjANBgkqhkiG9w0BAQUFADBWMQswCQYDVQQGEwJK
|
||||
UDESMBAGA1UECxQJbm9kZWpzX2pwMQwwCgYDVQQDEwNjYTExJTAjBgkqhkiG9w0B
|
||||
CQEWFmtvaWNoaWtAaW1wcm92ZW1lbnQuanAwHhcNMTMxMjI0MTEyMzIxWhcNNDEw
|
||||
NTEwMTEyMzIxWjBWMQswCQYDVQQGEwJKUDESMBAGA1UECxQJbm9kZWpzX2pwMQww
|
||||
CgYDVQQDEwNjYTExJTAjBgkqhkiG9w0BCQEWFmtvaWNoaWtAaW1wcm92ZW1lbnQu
|
||||
anAwgZ8wDQYJKoZIhvcNAQEBBQADgY0AMIGJAoGBAOJMS1ug8jUu0wwEfD4h9/Mg
|
||||
w0fvs7JbpMxtwpdcFpg/6ECd8YzGUvljLzeHPe2AhF26MiWIUN3YTxZRiQQ2tv93
|
||||
afRVWchdPypytmuxv2aYGjhZ66Tv4vNRizM71OE+66+KS30gEQW2k4MTr0ZVlRPR
|
||||
OVey+zRSLdVaKciB/XaBAgMBAAEwDQYJKoZIhvcNAQEFBQADgYEApfbly4b+Ry1q
|
||||
bGIgGrlTvNFvF+j2RuHqSpuTB4nKyw1tbNreKmEEb6SBEfkjcTONx5rKECZ5RRPX
|
||||
z4R/o1G6Dn21ouf1pWQO0BC/HnLN30KvvsoZRoxBn/fqBlJA+j/Kpj3RQgFj6l2I
|
||||
AKI5fD+ucPqRGhjmmTsNyc+Ln4UfAq8=
|
||||
-----END CERTIFICATE-----
|
1
node_modules/tunnel/test/keys/ca1-cert.srl
generated
vendored
Normal file
1
node_modules/tunnel/test/keys/ca1-cert.srl
generated
vendored
Normal file
|
@ -0,0 +1 @@
|
|||
B111C9CEF0257692
|
17
node_modules/tunnel/test/keys/ca1-key.pem
generated
vendored
Normal file
17
node_modules/tunnel/test/keys/ca1-key.pem
generated
vendored
Normal file
|
@ -0,0 +1,17 @@
|
|||
-----BEGIN ENCRYPTED PRIVATE KEY-----
|
||||
MIICxjBABgkqhkiG9w0BBQ0wMzAbBgkqhkiG9w0BBQwwDgQIbo5wvG42IY0CAggA
|
||||
MBQGCCqGSIb3DQMHBAgf8SPuz4biYASCAoAR4r8MVikusOAEt4Xp6nB7whrMX4iG
|
||||
G792Qpf21nHZPMV73w3cdkfimbAfUn8F50tSJwdrAa8U9BjjpL9Kt0loIyXt/r8c
|
||||
6PWAQ4WZuLPgTFUTJUNAXrunBHI0iFWYEN4YzJYmT1qN3J4u0diy0MkKz6eJPfZ3
|
||||
3v97+nF7dR2H86ZgLKsuE4pO5IRb60XW85d7CYaY6rU6l6mXMF0g9sIccHTlFoet
|
||||
Xm6cA7NAm1XSI1ciYcoc8oaVE9dXoOALaTnBEZ2MJGpsYQ0Hr7kB4VKAO9wsOta5
|
||||
L9nXPv79Nzo1MZMChkrORFnwOzH4ffsUwVQ70jUzkt5DEyzCM1oSxFNRQESxnFrr
|
||||
7c1jLg2gxAVwnqYo8njsKJ23BZqZUxHsBgB2Mg1L/iPT6zhclD0u3RZx9MR4ezB2
|
||||
IqoCF19Z5bblkReAeVRAE9Ol4hKVaCEIIPUspcw7eGVGONalHDCSXpIFnJoZLeXJ
|
||||
OZjLmYlA6KkJw52eNE5IwIb8l/tha2fwNpRvlMoXp65yH9wKyJk8zPSM6WAk4dKD
|
||||
nLrTCK4KtM6aIbG14Mff6WEf3uaLPM0cLwxmuypfieCZfkIzgytNdFZoBgaYUpon
|
||||
zazvUMoy3gqDBorcU08SaosdRoL+s+QVkRhA29shf42lqOM4zbh0dTul4QDlLG0U
|
||||
VBNeMJ3HnrqATfBU28j3bUqtuF2RffgcN/3ivlBjcyzF/iPt0TWmm6Zz5v4K8+b6
|
||||
lOm6gofIz+ffg2cXfPzrqZ2/xhFkcerRuN0Xp5eAhlI2vGJVGuEc4X+tT7VtQgLV
|
||||
iovqzlLhp+ph/gsfCcsYZ9iso3ozw+Cx1HfJ8XT7yWUgXxblkt4uszEo
|
||||
-----END ENCRYPTED PRIVATE KEY-----
|
17
node_modules/tunnel/test/keys/ca1.cnf
generated
vendored
Normal file
17
node_modules/tunnel/test/keys/ca1.cnf
generated
vendored
Normal file
|
@ -0,0 +1,17 @@
|
|||
[ req ]
|
||||
default_bits = 1024
|
||||
days = 9999
|
||||
distinguished_name = req_distinguished_name
|
||||
attributes = req_attributes
|
||||
prompt = no
|
||||
output_password = password
|
||||
|
||||
[ req_distinguished_name ]
|
||||
C = JP
|
||||
OU = nodejs_jp
|
||||
CN = ca1
|
||||
emailAddress = koichik@improvement.jp
|
||||
|
||||
[ req_attributes ]
|
||||
challengePassword = A challenge password
|
||||
|
14
node_modules/tunnel/test/keys/ca2-cert.pem
generated
vendored
Normal file
14
node_modules/tunnel/test/keys/ca2-cert.pem
generated
vendored
Normal file
|
@ -0,0 +1,14 @@
|
|||
-----BEGIN CERTIFICATE-----
|
||||
MIICIzCCAYwCCQCxIhZSDET+8DANBgkqhkiG9w0BAQUFADBWMQswCQYDVQQGEwJK
|
||||
UDESMBAGA1UECxQJbm9kZWpzX2pwMQwwCgYDVQQDEwNjYTIxJTAjBgkqhkiG9w0B
|
||||
CQEWFmtvaWNoaWtAaW1wcm92ZW1lbnQuanAwHhcNMTMxMjI0MTEyMzIxWhcNNDEw
|
||||
NTEwMTEyMzIxWjBWMQswCQYDVQQGEwJKUDESMBAGA1UECxQJbm9kZWpzX2pwMQww
|
||||
CgYDVQQDEwNjYTIxJTAjBgkqhkiG9w0BCQEWFmtvaWNoaWtAaW1wcm92ZW1lbnQu
|
||||
anAwgZ8wDQYJKoZIhvcNAQEBBQADgY0AMIGJAoGBAMaaLMMe7K5eYABH3NnJoimG
|
||||
LvY4S5tdGF6YRwfkn1bgGa+kEw1zNqa/Y0jSzs4h7bApt3+bKTalR4+Zk+0UmWgZ
|
||||
Gvlq8+mdqDXtBKoWE3vYDPBmeNyKsgxf9UIhFOpsxVUeYP8t66qJyUk/FlFJcDqc
|
||||
WPawikl1bUFSZXBKu4PxAgMBAAEwDQYJKoZIhvcNAQEFBQADgYEAwh3sXPIkA5kn
|
||||
fpg7fV5haS4EpFr9ia61dzWbhXDZtasAx+nWdWqgG4T+HIYSLlMNZbGJ998uhFZf
|
||||
DEHlbY/WuSBukZ0w+xqKBtPyjLIQKVvNiaTx5YMzQes62R1iklOXzBzyHbYIxFOG
|
||||
dqLfIjEe/mVVoR23LN2tr8Wa6+rmd+w=
|
||||
-----END CERTIFICATE-----
|
1
node_modules/tunnel/test/keys/ca2-cert.srl
generated
vendored
Normal file
1
node_modules/tunnel/test/keys/ca2-cert.srl
generated
vendored
Normal file
|
@ -0,0 +1 @@
|
|||
9BF2D4B2E00EDF16
|
10
node_modules/tunnel/test/keys/ca2-crl.pem
generated
vendored
Normal file
10
node_modules/tunnel/test/keys/ca2-crl.pem
generated
vendored
Normal file
|
@ -0,0 +1,10 @@
|
|||
-----BEGIN X509 CRL-----
|
||||
MIIBXTCBxzANBgkqhkiG9w0BAQQFADB6MQswCQYDVQQGEwJVUzELMAkGA1UECBMC
|
||||
Q0ExCzAJBgNVBAcTAlNGMQ8wDQYDVQQKEwZKb3llbnQxEDAOBgNVBAsTB05vZGUu
|
||||
anMxDDAKBgNVBAMTA2NhMjEgMB4GCSqGSIb3DQEJARYRcnlAdGlueWNsb3Vkcy5v
|
||||
cmcXDTExMDMxNDE4MjkxNloXDTEzMTIwNzE4MjkxNlowHDAaAgkAgwa+feG7CZoX
|
||||
DTExMDMxNDE4MjkxNFowDQYJKoZIhvcNAQEEBQADgYEArRKuEkOla61fm4zlZtHe
|
||||
LTXFV0Hgo21PScHAp6JqPol4rN5R9+EmUkv7gPCVVBJ9VjIgxSosHiLsDiz3zR+u
|
||||
txHemhzbdIVANAIiChnFct8sEqH2eL4N6XNUIlMIR06NjNl7NbN8w8haqiearnuT
|
||||
wmnaL4TThPmpbpKAF7N7JqQ=
|
||||
-----END X509 CRL-----
|
1
node_modules/tunnel/test/keys/ca2-database.txt
generated
vendored
Normal file
1
node_modules/tunnel/test/keys/ca2-database.txt
generated
vendored
Normal file
|
@ -0,0 +1 @@
|
|||
R 380729182912Z 110314182914Z 8306BE7DE1BB099A unknown /C=US/ST=CA/L=SF/O=Joyent/OU=Node.js/CN=agent4/emailAddress=ry@tinyclouds.org
|
17
node_modules/tunnel/test/keys/ca2-key.pem
generated
vendored
Normal file
17
node_modules/tunnel/test/keys/ca2-key.pem
generated
vendored
Normal file
|
@ -0,0 +1,17 @@
|
|||
-----BEGIN ENCRYPTED PRIVATE KEY-----
|
||||
MIICxjBABgkqhkiG9w0BBQ0wMzAbBgkqhkiG9w0BBQwwDgQI3aq9fKZIOF0CAggA
|
||||
MBQGCCqGSIb3DQMHBAjyunMfVve0OwSCAoAdMsrRFlQUSILw+bq3cSVIIbFjwcs0
|
||||
B1Uz2rc9SB+1qjsazjv4zvPQSXTrsx2EOSJf9PSPz7r+c0NzO9vfWLorpXof/lwL
|
||||
C1tRN7/1OqEW/mTK+1wlv0M5C4cmf44BBXmI+y+RWrQ/qc+CWEMvfHwv9zWr2K+i
|
||||
cLlZv55727GvZYCMMVLiqYd/Ejj98loBsE5dhN4JJ5MPaN3UHhFTCpD453GIIzCi
|
||||
FRuYhOOtX4qYoEuP2db4S2qu26723ZJnYBEHkK2YZiRrgvoZHugyGIr4f/RRoSUI
|
||||
fPgycgQfL3Ow+Y1G533PiZ+CYgh9cViUzhZImEPiZpSuUntAD1loOYkJuV9Ai9XZ
|
||||
+t6+7tfkM3aAo1bkaU8KcfINxxNWfAhCbUQw+tGJl2A+73OM5AGjGSfzjQQL/FOa
|
||||
5omfEvdfEX2XyRRlqnQ2VucvSTL9ZdzbIJGg/euJTpM44Fwc7yAZv2aprbPoPixu
|
||||
yyf0LoTjlGGSBZvHkunpWx82lYEXvHhcnCxV5MDFw8wehvDrvcSuzb8//HzLOiOB
|
||||
gzUr3DOQk4U1UD6xixZjAKC+NUwTVZoHg68KtmQfkq+eGUWf5oJP4xUigi3ui/Wy
|
||||
OCBDdlRBkFtgLGL51KJqtq1ixx3Q9HMl0y6edr5Ls0unDIo0LtUWUUcAtr6wl+kK
|
||||
zSztxFMi2zTtbhbkwoVpucNstFQNfV1k22vtnlcux2FV2DdZiJQwYpIbr8Gj6gpK
|
||||
gtV5l9RFe21oZBcKPt/chrF8ayiClfGMpF3D2p2GqGCe0HuH5uM/JAFf60rbnriA
|
||||
Nu1bWiXsXLRUXcLIQ/uEPR3Mvvo9k1h4Q6it1Rp67eQiXCX6h2uFq+sB
|
||||
-----END ENCRYPTED PRIVATE KEY-----
|
1
node_modules/tunnel/test/keys/ca2-serial
generated
vendored
Normal file
1
node_modules/tunnel/test/keys/ca2-serial
generated
vendored
Normal file
|
@ -0,0 +1 @@
|
|||
01
|
17
node_modules/tunnel/test/keys/ca2.cnf
generated
vendored
Normal file
17
node_modules/tunnel/test/keys/ca2.cnf
generated
vendored
Normal file
|
@ -0,0 +1,17 @@
|
|||
[ req ]
|
||||
default_bits = 1024
|
||||
days = 9999
|
||||
distinguished_name = req_distinguished_name
|
||||
attributes = req_attributes
|
||||
prompt = no
|
||||
output_password = password
|
||||
|
||||
[ req_distinguished_name ]
|
||||
C = JP
|
||||
OU = nodejs_jp
|
||||
CN = ca2
|
||||
emailAddress = koichik@improvement.jp
|
||||
|
||||
[ req_attributes ]
|
||||
challengePassword = A challenge password
|
||||
|
14
node_modules/tunnel/test/keys/ca3-cert.pem
generated
vendored
Normal file
14
node_modules/tunnel/test/keys/ca3-cert.pem
generated
vendored
Normal file
|
@ -0,0 +1,14 @@
|
|||
-----BEGIN CERTIFICATE-----
|
||||
MIICIzCCAYwCCQCudHFhEWiUHDANBgkqhkiG9w0BAQUFADBWMQswCQYDVQQGEwJK
|
||||
UDESMBAGA1UECxQJbm9kZWpzX2pwMQwwCgYDVQQDEwNjYTMxJTAjBgkqhkiG9w0B
|
||||
CQEWFmtvaWNoaWtAaW1wcm92ZW1lbnQuanAwHhcNMTMxMjI0MTEyMzIxWhcNNDEw
|
||||
NTEwMTEyMzIxWjBWMQswCQYDVQQGEwJKUDESMBAGA1UECxQJbm9kZWpzX2pwMQww
|
||||
CgYDVQQDEwNjYTMxJTAjBgkqhkiG9w0BCQEWFmtvaWNoaWtAaW1wcm92ZW1lbnQu
|
||||
anAwgZ8wDQYJKoZIhvcNAQEBBQADgY0AMIGJAoGBAJPRJMhCNtxX6dQ3rLdrzVCl
|
||||
XJMSRIICpbsc7arOzSJcrsIYeYC4d29dGwxYNLnAkKSmHujFT9SmFgh88CoYETLp
|
||||
gE9zCk9hVCwUlWelM/UaIrzeLT4SC3VBptnLmMtk2mqFniLcaFdMycAcX8OIhAgG
|
||||
fbqyT5Wxwz7UMegip2ZjAgMBAAEwDQYJKoZIhvcNAQEFBQADgYEADpu8a/W+NPnS
|
||||
mhyIOxXn8O//2oH9ELlBYFLIgTid0xmS05x/MgkXtWqiBEEZFoOfoJBJxM3vTFs0
|
||||
PiZvcVjv0IIjDF4s54yRVH+4WI2p7cil1fgzAVRTuOIuR+VyN7ct8s26a/7GFDq6
|
||||
NJMByyjsJHyxwwri5hVv+jbLCxmnDjI=
|
||||
-----END CERTIFICATE-----
|
1
node_modules/tunnel/test/keys/ca3-cert.srl
generated
vendored
Normal file
1
node_modules/tunnel/test/keys/ca3-cert.srl
generated
vendored
Normal file
|
@ -0,0 +1 @@
|
|||
EF7B2CF0FA61DF41
|
17
node_modules/tunnel/test/keys/ca3-key.pem
generated
vendored
Normal file
17
node_modules/tunnel/test/keys/ca3-key.pem
generated
vendored
Normal file
|
@ -0,0 +1,17 @@
|
|||
-----BEGIN ENCRYPTED PRIVATE KEY-----
|
||||
MIICxjBABgkqhkiG9w0BBQ0wMzAbBgkqhkiG9w0BBQwwDgQIwAta+L4c9soCAggA
|
||||
MBQGCCqGSIb3DQMHBAgqRud2p3SvogSCAoDXoDJOJDkvgFpQ6rxeV5r0fLX4SrGJ
|
||||
quv4yt02QxSDUPN2ZLtBt6bLzg4Zv2pIggufYJcZ2IOUnX82T7FlvBP8hbW1q3Bs
|
||||
jAso7z8kJlFrZjNudjuP2l/X8tjrVyr3I0PoRoomtcHnCcSDdyne8Dqqj1enuikF
|
||||
8b7FZUqocNLfu8LmNGxMmMwjw3UqhtpP5DjqV60B8ytQFPoz/gFh6aNGvsrD/avU
|
||||
Dj8EJkQZP6Q32vmCzAvSiLjk7FA7RFmBtaurE9hJYNlc5v1eo69EUwPkeVlTpglJ
|
||||
5sZAHxlhQCgc72ST6uFQKiMO3ng/JJA5N9EvacYSHQvI1TQIo43V2A//zUh/5hGL
|
||||
sDv4pRuFq9miX8iiQpwo1LDfRzdwg7+tiLm8/mDyeLUSzDNc6GIX/tC9R4Ukq4ge
|
||||
1Cfq0gtKSRxZhM8HqpGBC9rDs5mpdUqTRsoHLFn5T6/gMiAtrLCJxgD8JsZBa8rM
|
||||
KZ09QEdZXTvpyvZ8bSakP5PF6Yz3QYO32CakL7LDPpCng0QDNHG10YaZbTOgJIzQ
|
||||
NJ5o87DkgDx0Bb3L8FoREIBkjpYFbQi2fvPthoepZ3D5VamVsOwOiZ2sR1WF2J8l
|
||||
X9c8GdG38byO+SQIPNZ8eT5JvUcNeSlIZiVSwvaEk496d2KzhmMMfoBLFVeHXG90
|
||||
CIZPleVfkTmgNQgXPWcFngqTZdDEGsHjEDDhbEAijB3EeOxyiiEDJPMy5zqkdy5D
|
||||
cZ/Y77EDbln7omcyL+cGvCgBhhYpTbtbuBtzW4CiCvcfEB5N4EtJKOTRJXIpL/d3
|
||||
oVnZruqRRKidKwFMEZU2NZJX5FneAWFSeCv0IrY2vAUIc3El+n84CFFK
|
||||
-----END ENCRYPTED PRIVATE KEY-----
|
17
node_modules/tunnel/test/keys/ca3.cnf
generated
vendored
Normal file
17
node_modules/tunnel/test/keys/ca3.cnf
generated
vendored
Normal file
|
@ -0,0 +1,17 @@
|
|||
[ req ]
|
||||
default_bits = 1024
|
||||
days = 9999
|
||||
distinguished_name = req_distinguished_name
|
||||
attributes = req_attributes
|
||||
prompt = no
|
||||
output_password = password
|
||||
|
||||
[ req_distinguished_name ]
|
||||
C = JP
|
||||
OU = nodejs_jp
|
||||
CN = ca3
|
||||
emailAddress = koichik@improvement.jp
|
||||
|
||||
[ req_attributes ]
|
||||
challengePassword = A challenge password
|
||||
|
14
node_modules/tunnel/test/keys/ca4-cert.pem
generated
vendored
Normal file
14
node_modules/tunnel/test/keys/ca4-cert.pem
generated
vendored
Normal file
|
@ -0,0 +1,14 @@
|
|||
-----BEGIN CERTIFICATE-----
|
||||
MIICIzCCAYwCCQDUGh2r7lOpITANBgkqhkiG9w0BAQUFADBWMQswCQYDVQQGEwJK
|
||||
UDESMBAGA1UECxQJbm9kZWpzX2pwMQwwCgYDVQQDEwNjYTQxJTAjBgkqhkiG9w0B
|
||||
CQEWFmtvaWNoaWtAaW1wcm92ZW1lbnQuanAwHhcNMTMxMjI0MTEyMzIxWhcNNDEw
|
||||
NTEwMTEyMzIxWjBWMQswCQYDVQQGEwJKUDESMBAGA1UECxQJbm9kZWpzX2pwMQww
|
||||
CgYDVQQDEwNjYTQxJTAjBgkqhkiG9w0BCQEWFmtvaWNoaWtAaW1wcm92ZW1lbnQu
|
||||
anAwgZ8wDQYJKoZIhvcNAQEBBQADgY0AMIGJAoGBAOOC+SPC8XzkjIHfKPMzzNV6
|
||||
O/LpqQWdzJtEvFNW0oQ9g8gSV4iKqwUFrLNnSlwSGigvqKqGmYtG8S17ANWInoxI
|
||||
c3sQlrS2cGbgLUBNKu4hZ7s+11EPOjbnn0QUE5w9GN8fy8CDx7ID/8URYKoxcoRv
|
||||
0w7EJ2agfd68KS1ayxUXAgMBAAEwDQYJKoZIhvcNAQEFBQADgYEAumPFeR63Dyki
|
||||
SWQtRAe2QWkIFlSRAR2PvSDdsDMLwMeXF5wD3Hv51yfTu9Gkg0QJB86deYfQ5vfV
|
||||
4QsOQ35icesa12boyYpTE0/OoEX1f/s1sLlszpRvtAki3J4bkcGWAzM5yO1fKqpQ
|
||||
MbtPzLn+DA7ymxuJa6EQAEb+kaJEBuU=
|
||||
-----END CERTIFICATE-----
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Reference in a new issue