From 87c1465d93a96d73b21b6d8b8dea10680e4869e1 Mon Sep 17 00:00:00 2001 From: Jason D'Amour Date: Mon, 19 May 2025 10:56:36 -0700 Subject: [PATCH 01/15] Add image digests to output --- action.yml | 17 +++--- common/src/dev-container-cli.ts | 4 +- github-action/src/main.ts | 105 ++++++++++++++++++++++++++++++++ 3 files changed, 117 insertions(+), 9 deletions(-) diff --git a/action.yml b/action.yml index 3af8a63eb..e262addaf 100644 --- a/action.yml +++ b/action.yml @@ -12,7 +12,7 @@ inputs: required: false description: One or more comma-separated image tags (defaults to latest) platform: - require: false + required: false description: Platforms for which the image should be built. If omitted, defaults to the platform of the GitHub Actions Runner. Multiple platforms should be comma separated. runCmd: required: false @@ -20,11 +20,11 @@ inputs: subFolder: required: false description: Specify a child folder (containing a .devcontainer) instead of using the repository root - default: + default: "" configFile: required: false description: Specify the path to a devcontainer.json file instead of using `./.devcontainer/devcontainer.json` or `./.devcontainer.json` - default: + default: "" checkoutPath: required: false description: Specify path to checked out folder if not using default (or for testing with nektos/act) @@ -35,7 +35,7 @@ inputs: description: Control when images are pushed. Options are never, filter, always. For filter (default), images are pushed if the refFilterForPush and eventFilterForPush conditions are met refFilterForPush: required: false - default: + default: "" description: Set the source branches (e.g. refs/heads/main) that are allowed to trigger a push of the dev container image. Leave empty to allow all. eventFilterForPush: required: false @@ -46,11 +46,11 @@ inputs: description: Specify environment variables to pass to the docker run command inheritEnv: required: false - default: false + default: "false" description: Inherit all environment variables of the runner CI machine. skipContainerUserIdUpdate: required: false - default: false + default: "false" description: For non-root Dev Containers (i.e. where `remoteUser` is specified), the action attempts to make the container user UID and GID match those of the host user. Set this to true to skip this step (defaults to false) userDataFolder: required: false @@ -59,9 +59,8 @@ inputs: required: false description: Specify additional images to use for build caching noCache: - type: boolean required: false - default: false + default: "false" description: Builds the image with `--no-cache` (takes precedence over `cacheFrom`) cacheTo: required: false @@ -69,6 +68,8 @@ inputs: outputs: runCmdOutput: description: The output of the command specified in the runCmd input + imageDigests: + description: The SHA256 digests of the built images. JSON object with platform keys and digest values. runs: using: 'node20' main: 'github-action/run-main.js' diff --git a/common/src/dev-container-cli.ts b/common/src/dev-container-cli.ts index 4ad248d2d..85a9d2018 100644 --- a/common/src/dev-container-cli.ts +++ b/common/src/dev-container-cli.ts @@ -151,7 +151,9 @@ export interface DevContainerCliSuccessResult { } export interface DevContainerCliBuildResult - extends DevContainerCliSuccessResult {} + extends DevContainerCliSuccessResult { + imageDigests?: Record; +} export interface DevContainerCliBuildArgs { workspaceFolder: string; configFile: string | undefined; diff --git a/github-action/src/main.ts b/github-action/src/main.ts index 1189788cd..761304850 100644 --- a/github-action/src/main.ts +++ b/github-action/src/main.ts @@ -138,6 +138,54 @@ export async function runMain(): Promise { return; } + // If we have a platform specified and the image was built, get the image digest + if (buildResult.outcome === 'success') { + // Create a digests object to track digests for each platform + const digestsObj: Record = {}; + + if (platform) { + // Extract the image digest from the build output + if (buildResult.imageDigest) { + core.info(`Image digest for ${platform}: ${buildResult.imageDigest}`); + digestsObj[platform] = buildResult.imageDigest; + } else { + // If buildResult doesn't have imageDigest, try to get it from the built image + if (imageName) { + const inspectCmd = await exec('docker', ['buildx', 'imagetools', 'inspect', `${imageName}:${imageTagArray[0]}`, '--format', '{{json .}}'], { silent: true }); + if (inspectCmd.exitCode === 0) { + try { + const imageInfo = JSON.parse(inspectCmd.stdout); + if (imageInfo.manifest && imageInfo.manifest.digest) { + const digest = imageInfo.manifest.digest; + core.info(`Image digest for ${platform}: ${digest}`); + digestsObj[platform] = digest; + } + } catch (error) { + core.warning(`Failed to parse image digest: ${error.message}`); + } + } else { + core.warning(`Failed to inspect image: ${inspectCmd.stderr}`); + } + } + } + } else if (imageName) { + // For non-platform specific builds, still try to get the digest + const inspectCmd = await exec('docker', ['inspect', `${imageName}:${imageTagArray[0]}`, '--format', '{{.Id}}'], { silent: true }); + if (inspectCmd.exitCode === 0) { + const digest = inspectCmd.stdout.trim(); + core.info(`Image digest: ${digest}`); + digestsObj['default'] = digest; + } + } + + // Output the digests as a JSON string + if (Object.keys(digestsObj).length > 0) { + const digestsJson = JSON.stringify(digestsObj); + core.info(`Image digests: ${digestsJson}`); + core.setOutput('imageDigests', digestsJson); + } + } + for (const [key, value] of Object.entries(githubEnvs)) { if (process.env[key]) { // Add additional bind mount @@ -264,17 +312,74 @@ export async function runPost(): Promise { const platform = emptyStringAsUndefined(core.getInput('platform')); if (platform) { + // Create a digests object to track digests for each platform + const digestsObj: Record = {}; + const platforms = platform.split(/\s*,\s*/); + for (const tag of imageTagArray) { core.info(`Copying multiplatform image '${imageName}:${tag}'...`); const imageSource = `oci-archive:/tmp/output.tar:${tag}`; const imageDest = `docker://${imageName}:${tag}`; await copyImage(true, imageSource, imageDest); + + // After pushing, get and set digest + const inspectCmd = await exec('docker', ['buildx', 'imagetools', 'inspect', `${imageName}:${tag}`, '--format', '{{json .}}'], { silent: true }); + if (inspectCmd.exitCode === 0) { + try { + const imageInfo = JSON.parse(inspectCmd.stdout); + + // If it's a manifest list, extract digests for each platform + if (imageInfo.manifests) { + for (const manifest of imageInfo.manifests) { + if (manifest.platform && manifest.digest) { + const platformStr = `${manifest.platform.os}/${manifest.platform.architecture}${manifest.platform.variant ? '/' + manifest.platform.variant : ''}`; + core.info(`Image digest for ${imageName}:${tag} (${platformStr}): ${manifest.digest}`); + digestsObj[platformStr] = manifest.digest; + } + } + } else if (imageInfo.manifest && imageInfo.manifest.digest) { + // Single platform image + const digest = imageInfo.manifest.digest; + core.info(`Image digest for ${imageName}:${tag}: ${digest}`); + digestsObj[platforms[0] || 'default'] = digest; + } + } catch (error) { + core.warning(`Failed to parse image digest: ${error.message}`); + } + } + } + + // Output the digests as a JSON string + if (Object.keys(digestsObj).length > 0) { + const digestsJson = JSON.stringify(digestsObj); + core.info(`Image digests: ${digestsJson}`); + core.setOutput('imageDigests', digestsJson); } } else { + // Create a digests object for non-platform specific builds + const digestsObj: Record = {}; + for (const tag of imageTagArray) { core.info(`Pushing image '${imageName}:${tag}'...`); await pushImage(imageName, tag); + + // After pushing, get and set digest + const inspectCmd = await exec('docker', ['inspect', `${imageName}:${tag}`, '--format', '{{.Id}}'], { silent: true }); + if (inspectCmd.exitCode === 0) { + const digest = inspectCmd.stdout.trim(); + core.info(`Image digest for ${imageName}:${tag}: ${digest}`); + digestsObj[tag] = digest; + } else { + core.warning(`Failed to get image digest: ${inspectCmd.stderr}`); + } + } + + // Output the digests as a JSON string + if (Object.keys(digestsObj).length > 0) { + const digestsJson = JSON.stringify(digestsObj); + core.info(`Image digests: ${digestsJson}`); + core.setOutput('imageDigests', digestsJson); } } } From 73d6362b10bea028601a38d7e810d2f02eef8b78 Mon Sep 17 00:00:00 2001 From: Jason D'Amour Date: Mon, 19 May 2025 10:56:47 -0700 Subject: [PATCH 02/15] Add docs on multiarch builds with native runners --- docs/multi-platform-builds.md | 117 +++++++++++++++++++++++++++++++++- 1 file changed, 114 insertions(+), 3 deletions(-) diff --git a/docs/multi-platform-builds.md b/docs/multi-platform-builds.md index 1ba9cd34d..57a04d490 100644 --- a/docs/multi-platform-builds.md +++ b/docs/multi-platform-builds.md @@ -2,13 +2,15 @@ Building dev containers to support multiple platforms (aka CPU architectures) is possible with the devcontainers/ci GitHub Action/Azure DevOps Task, but requires other actions/tasks to be run beforehand and has several caveats. -## General Notes/Caveats +## Multiplatform with Emulation + +### General Notes/Caveats - Multiplatform builds utilize emulation to build on architectures not native to the system the build is running on. This will significantly increase build times over native, single architecture builds. - If you are using runCmd, the command will only be run on the architecure of the system the build is running on. This means that, if you are using runCmd to test the image, there may be bugs on the alternate platforms that will not be caught by your test suite. Manual post-build testing is advised. - As of October 2022, all hosted servers for GitHub Actions and Azure Pipelines are x86_64 only. If you want to automatically run runCmd-based tests on your devcontainer on another architecure, you'll need a self-hosted runner on that architecture. It is possible that there will be future support for hosted arm64 machines, see [here for a tracking issue for Linux](https://github.com/actions/runner-images/issues/5631). -## GitHub Actions Example +### GitHub Actions Example ``` name: 'build' @@ -43,7 +45,7 @@ jobs: platform: linux/amd64,linux/arm64 ``` -## Azure DevOps Task Example +### Azure DevOps Task Example ``` trigger: @@ -72,3 +74,112 @@ jobs: imageName: UserNameHere/ImageNameHere platform: linux/amd64,linux/arm64 ``` + + +## Multiplatform with native runners + +### General notes + +- Uses matrix strategy to distribute platform builds across native runners +- Avoids cross-platform emulation for better performance and reliability +- Leverages the `imageDigests` output to capture platform-specific image digests +- Combines all platform-specific images into a single manifest list + +### Benefits of Native Runners + +Building on native runners instead of using QEMU emulation provides several advantages: + +1. **Performance**: Native builds are significantly faster than emulated builds +2. **Reliability**: Some platform-specific operations may not work correctly under emulation +3. **Parallelization**: Building multiple platforms simultaneously reduces overall build time + +### How the Matrix Outputs Work + +- In Github Actions when jobs run in a matrix, only the last matrix instance to complete can set the value for a given output. +- To work around this, we take the json output from the action, and write it to separate output variables per runner +- `imageDigests: {"linux/amd64": "sha256@abc123"}` gets turned into `IMAGE_DIGEST_linux_amd64=sha256@abc123` +- This approach requires hardcoding the list of output variables + +### Github Actions Example + +``` +name: Build Multi-Platform Images + +on: + push: + branches: [ main ] + pull_request: + branches: [ main ] + +env: + REGISTRY: ghcr.io + IMAGE_NAME: ${{ github.repository }} + +jobs: + # Build images on parallel native runners + build: + runs-on: ubuntu-latest + strategy: + matrix: + platform: + - linux/amd64 + - linux/arm64 + fail-fast: false + outputs: + IMAGE_DIGEST_linux_amd64: ${{ steps.build.outputs.IMAGE_DIGEST_linux_amd64 }} + IMAGE_DIGEST_linux_arm64: ${{ steps.build.outputs.IMAGE_DIGEST_linux_arm64 }} + steps: + - name: Checkout repository + uses: actions/checkout@v4 + + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v3 + + - name: Login to Container Registry + uses: docker/login-action@v3 + with: + registry: ${{ env.REGISTRY }} + username: ${{ github.actor }} + password: ${{ secrets.GITHUB_TOKEN }} + + - name: Build and push + id: build + uses: devcontainers/ci@v0.3 + with: + imageName: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }} + platform: ${{ matrix.platform }} + push: always + + # Combine all digests from the matrix into a single output + - name: Set matrix outputs + if: always() + run: | + # Extract the digest for this platform from the JSON output + DIGESTS_JSON='${{ steps.build.outputs.imageDigests }}' + PLATFORM="${{ matrix.platform }}" + DIGEST=$(echo $DIGESTS_JSON | jq -r --arg platform "$PLATFORM" '.[$platform]') + echo "IMAGE_DIGEST_${PLATFORM//\//_}=${DIGEST}" >> $GITHUB_OUTPUT + + + # Create a manifest list from all platform images + manifest: + runs-on: ubuntu-latest + needs: build + steps: + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v3 + + - name: Create and push manifest list + run: | + IMAGE=${{ env.REGISTRY }}/${{ env.IMAGE_NAME }} + + # Create manifest list from each platform's digest + docker buildx imagetools create \ + -t ${IMAGE}:latest \ + ${IMAGE}@${{ needs.build.outputs.IMAGE_DIGEST_linux_amd64 }} \ + ${IMAGE}@${{ needs.build.outputs.IMAGE_DIGEST_linux_arm64 }} + + - name: Inspect manifest + run: | + docker buildx imagetools inspect ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:latest +``` From 0ba0decb572d40a414657be8c55ecfb4be1e6f74 Mon Sep 17 00:00:00 2001 From: Jason D'Amour Date: Tue, 24 Jun 2025 16:52:43 +0000 Subject: [PATCH 03/15] fix build errors --- github-action/src/main.ts | 75 +++++++++++++++++++++++++++++---------- 1 file changed, 57 insertions(+), 18 deletions(-) diff --git a/github-action/src/main.ts b/github-action/src/main.ts index 761304850..0eda80ff3 100644 --- a/github-action/src/main.ts +++ b/github-action/src/main.ts @@ -142,16 +142,29 @@ export async function runMain(): Promise { if (buildResult.outcome === 'success') { // Create a digests object to track digests for each platform const digestsObj: Record = {}; - + if (platform) { // Extract the image digest from the build output - if (buildResult.imageDigest) { - core.info(`Image digest for ${platform}: ${buildResult.imageDigest}`); - digestsObj[platform] = buildResult.imageDigest; + if (buildResult.imageDigests) { + core.info( + `Image digest for ${platform}: ${buildResult.imageDigests}`, + ); + digestsObj[platform] = buildResult.imageDigests[platform]; } else { // If buildResult doesn't have imageDigest, try to get it from the built image if (imageName) { - const inspectCmd = await exec('docker', ['buildx', 'imagetools', 'inspect', `${imageName}:${imageTagArray[0]}`, '--format', '{{json .}}'], { silent: true }); + const inspectCmd = await exec( + 'docker', + [ + 'buildx', + 'imagetools', + 'inspect', + `${imageName}:${imageTagArray[0]}`, + '--format', + '{{json .}}', + ], + {silent: true}, + ); if (inspectCmd.exitCode === 0) { try { const imageInfo = JSON.parse(inspectCmd.stdout); @@ -170,14 +183,23 @@ export async function runMain(): Promise { } } else if (imageName) { // For non-platform specific builds, still try to get the digest - const inspectCmd = await exec('docker', ['inspect', `${imageName}:${imageTagArray[0]}`, '--format', '{{.Id}}'], { silent: true }); + const inspectCmd = await exec( + 'docker', + [ + 'inspect', + `${imageName}:${imageTagArray[0]}`, + '--format', + '{{.Id}}', + ], + {silent: true}, + ); if (inspectCmd.exitCode === 0) { const digest = inspectCmd.stdout.trim(); core.info(`Image digest: ${digest}`); digestsObj['default'] = digest; } } - + // Output the digests as a JSON string if (Object.keys(digestsObj).length > 0) { const digestsJson = JSON.stringify(digestsObj); @@ -315,26 +337,39 @@ export async function runPost(): Promise { // Create a digests object to track digests for each platform const digestsObj: Record = {}; const platforms = platform.split(/\s*,\s*/); - + for (const tag of imageTagArray) { core.info(`Copying multiplatform image '${imageName}:${tag}'...`); const imageSource = `oci-archive:/tmp/output.tar:${tag}`; const imageDest = `docker://${imageName}:${tag}`; await copyImage(true, imageSource, imageDest); - + // After pushing, get and set digest - const inspectCmd = await exec('docker', ['buildx', 'imagetools', 'inspect', `${imageName}:${tag}`, '--format', '{{json .}}'], { silent: true }); + const inspectCmd = await exec( + 'docker', + [ + 'buildx', + 'imagetools', + 'inspect', + `${imageName}:${tag}`, + '--format', + '{{json .}}', + ], + {silent: true}, + ); if (inspectCmd.exitCode === 0) { try { const imageInfo = JSON.parse(inspectCmd.stdout); - + // If it's a manifest list, extract digests for each platform if (imageInfo.manifests) { for (const manifest of imageInfo.manifests) { if (manifest.platform && manifest.digest) { - const platformStr = `${manifest.platform.os}/${manifest.platform.architecture}${manifest.platform.variant ? '/' + manifest.platform.variant : ''}`; - core.info(`Image digest for ${imageName}:${tag} (${platformStr}): ${manifest.digest}`); + const platformStr = `${manifest.platform.os}/${manifest.platform.architecture}${manifest.platform.variant ? `/${manifest.platform.variant}` : ''}`; + core.info( + `Image digest for ${imageName}:${tag} (${platformStr}): ${manifest.digest}`, + ); digestsObj[platformStr] = manifest.digest; } } @@ -349,7 +384,7 @@ export async function runPost(): Promise { } } } - + // Output the digests as a JSON string if (Object.keys(digestsObj).length > 0) { const digestsJson = JSON.stringify(digestsObj); @@ -359,13 +394,17 @@ export async function runPost(): Promise { } else { // Create a digests object for non-platform specific builds const digestsObj: Record = {}; - + for (const tag of imageTagArray) { core.info(`Pushing image '${imageName}:${tag}'...`); await pushImage(imageName, tag); - + // After pushing, get and set digest - const inspectCmd = await exec('docker', ['inspect', `${imageName}:${tag}`, '--format', '{{.Id}}'], { silent: true }); + const inspectCmd = await exec( + 'docker', + ['inspect', `${imageName}:${tag}`, '--format', '{{.Id}}'], + {silent: true}, + ); if (inspectCmd.exitCode === 0) { const digest = inspectCmd.stdout.trim(); core.info(`Image digest for ${imageName}:${tag}: ${digest}`); @@ -374,7 +413,7 @@ export async function runPost(): Promise { core.warning(`Failed to get image digest: ${inspectCmd.stderr}`); } } - + // Output the digests as a JSON string if (Object.keys(digestsObj).length > 0) { const digestsJson = JSON.stringify(digestsObj); From a2d6752139df5de32bbe4bb9ef89f88d8f3ccf83 Mon Sep 17 00:00:00 2001 From: Jason D'Amour Date: Tue, 24 Jun 2025 17:05:31 +0000 Subject: [PATCH 04/15] test: commit the dist to test --- .gitignore | 2 +- azdo-task/DevcontainersCi/dist/config.d.ts | 21 + .../dist/dev-container-cli.d.ts | 59 + azdo-task/DevcontainersCi/dist/docker.d.ts | 11 + azdo-task/DevcontainersCi/dist/envvars.d.ts | 2 + azdo-task/DevcontainersCi/dist/exec.d.ts | 9 + azdo-task/DevcontainersCi/dist/file.d.ts | 1 + azdo-task/DevcontainersCi/dist/index.js | 13280 +++++++ azdo-task/DevcontainersCi/dist/index.js.map | 1 + azdo-task/DevcontainersCi/dist/lib.json | 41 + azdo-task/DevcontainersCi/dist/licenses.txt | 208 + azdo-task/DevcontainersCi/dist/skopeo.d.ts | 3 + .../dist/sourcemap-register.js | 1 + azdo-task/DevcontainersCi/dist/users.d.ts | 13 + azdo-task/DevcontainersCi/dist/windows.d.ts | 1 + github-action/dist/config.d.ts | 21 + github-action/dist/dev-container-cli.d.ts | 59 + github-action/dist/docker.d.ts | 11 + github-action/dist/envvars.d.ts | 2 + github-action/dist/exec.d.ts | 9 + github-action/dist/file.d.ts | 1 + github-action/dist/index.js | 31164 ++++++++++++++++ github-action/dist/index.js.map | 1 + github-action/dist/licenses.txt | 161 + github-action/dist/skopeo.d.ts | 3 + github-action/dist/sourcemap-register.js | 1 + github-action/dist/users.d.ts | 13 + github-action/dist/windows.d.ts | 1 + 28 files changed, 45099 insertions(+), 1 deletion(-) create mode 100644 azdo-task/DevcontainersCi/dist/config.d.ts create mode 100644 azdo-task/DevcontainersCi/dist/dev-container-cli.d.ts create mode 100644 azdo-task/DevcontainersCi/dist/docker.d.ts create mode 100644 azdo-task/DevcontainersCi/dist/envvars.d.ts create mode 100644 azdo-task/DevcontainersCi/dist/exec.d.ts create mode 100644 azdo-task/DevcontainersCi/dist/file.d.ts create mode 100644 azdo-task/DevcontainersCi/dist/index.js create mode 100644 azdo-task/DevcontainersCi/dist/index.js.map create mode 100644 azdo-task/DevcontainersCi/dist/lib.json create mode 100644 azdo-task/DevcontainersCi/dist/licenses.txt create mode 100644 azdo-task/DevcontainersCi/dist/skopeo.d.ts create mode 100644 azdo-task/DevcontainersCi/dist/sourcemap-register.js create mode 100644 azdo-task/DevcontainersCi/dist/users.d.ts create mode 100644 azdo-task/DevcontainersCi/dist/windows.d.ts create mode 100644 github-action/dist/config.d.ts create mode 100644 github-action/dist/dev-container-cli.d.ts create mode 100644 github-action/dist/docker.d.ts create mode 100644 github-action/dist/envvars.d.ts create mode 100644 github-action/dist/exec.d.ts create mode 100644 github-action/dist/file.d.ts create mode 100644 github-action/dist/index.js create mode 100644 github-action/dist/index.js.map create mode 100644 github-action/dist/licenses.txt create mode 100644 github-action/dist/skopeo.d.ts create mode 100644 github-action/dist/sourcemap-register.js create mode 100644 github-action/dist/users.d.ts create mode 100644 github-action/dist/windows.d.ts diff --git a/.gitignore b/.gitignore index 722649ab0..d70e21e89 100644 --- a/.gitignore +++ b/.gitignore @@ -109,4 +109,4 @@ output common_lib lib -dist + diff --git a/azdo-task/DevcontainersCi/dist/config.d.ts b/azdo-task/DevcontainersCi/dist/config.d.ts new file mode 100644 index 000000000..0f84908df --- /dev/null +++ b/azdo-task/DevcontainersCi/dist/config.d.ts @@ -0,0 +1,21 @@ +export interface DevContainerConfig { + workspaceFolder?: string; + remoteUser?: string; + dockerFile?: string; + context?: string; + build?: { + dockerfile?: string; + context?: string; + args?: Record; + cacheFrom?: string | string[]; + cacheTo?: string | string[]; + }; + runArgs?: string[]; + mounts?: string[]; +} +export declare function loadFromFile(filepath: string): Promise; +export declare function loadFromString(content: string): DevContainerConfig; +export declare function getWorkspaceFolder(config: DevContainerConfig, repoPath: string): string; +export declare function getRemoteUser(config: DevContainerConfig): string; +export declare function getDockerfile(config: DevContainerConfig): string | undefined; +export declare function getContext(config: DevContainerConfig): string | undefined; diff --git a/azdo-task/DevcontainersCi/dist/dev-container-cli.d.ts b/azdo-task/DevcontainersCi/dist/dev-container-cli.d.ts new file mode 100644 index 000000000..d90485248 --- /dev/null +++ b/azdo-task/DevcontainersCi/dist/dev-container-cli.d.ts @@ -0,0 +1,59 @@ +import { ExecFunction } from './exec'; +export interface DevContainerCliError { + outcome: 'error'; + code: number; + message: string; + description: string; +} +declare function isCliInstalled(exec: ExecFunction): Promise; +declare function installCli(exec: ExecFunction): Promise; +export interface DevContainerCliSuccessResult { + outcome: 'success'; +} +export interface DevContainerCliBuildResult extends DevContainerCliSuccessResult { + imageDigests?: Record; +} +export interface DevContainerCliBuildArgs { + workspaceFolder: string; + configFile: string | undefined; + imageName?: string[]; + platform?: string; + additionalCacheFroms?: string[]; + userDataFolder?: string; + output?: string; + noCache?: boolean; + cacheTo?: string[]; +} +declare function devContainerBuild(args: DevContainerCliBuildArgs, log: (data: string) => void): Promise; +export interface DevContainerCliUpResult extends DevContainerCliSuccessResult { + containerId: string; + remoteUser: string; + remoteWorkspaceFolder: string; +} +export interface DevContainerCliUpArgs { + workspaceFolder: string; + configFile: string | undefined; + additionalCacheFroms?: string[]; + cacheTo?: string[]; + skipContainerUserIdUpdate?: boolean; + env?: string[]; + userDataFolder?: string; + additionalMounts?: string[]; +} +declare function devContainerUp(args: DevContainerCliUpArgs, log: (data: string) => void): Promise; +export interface DevContainerCliExecArgs { + workspaceFolder: string; + configFile: string | undefined; + command: string[]; + env?: string[]; + userDataFolder?: string; +} +declare function devContainerExec(args: DevContainerCliExecArgs, log: (data: string) => void): Promise; +export declare const devcontainer: { + build: typeof devContainerBuild; + up: typeof devContainerUp; + exec: typeof devContainerExec; + isCliInstalled: typeof isCliInstalled; + installCli: typeof installCli; +}; +export {}; diff --git a/azdo-task/DevcontainersCi/dist/docker.d.ts b/azdo-task/DevcontainersCi/dist/docker.d.ts new file mode 100644 index 000000000..97c3e196f --- /dev/null +++ b/azdo-task/DevcontainersCi/dist/docker.d.ts @@ -0,0 +1,11 @@ +import { ExecFunction } from './exec'; +export declare function isDockerBuildXInstalled(exec: ExecFunction): Promise; +export declare function buildImage(exec: ExecFunction, imageName: string, imageTag: string | undefined, checkoutPath: string, subFolder: string, skipContainerUserIdUpdate: boolean, cacheFrom: string[], cacheTo: string[]): Promise; +export declare function runContainer(exec: ExecFunction, imageName: string, imageTag: string | undefined, checkoutPath: string, subFolder: string, command: string, envs?: string[], mounts?: string[]): Promise; +export declare function pushImage(exec: ExecFunction, imageName: string, imageTag: string | undefined): Promise; +export interface DockerMount { + type: string; + source: string; + target: string; +} +export declare function parseMount(mountString: string): DockerMount; diff --git a/azdo-task/DevcontainersCi/dist/envvars.d.ts b/azdo-task/DevcontainersCi/dist/envvars.d.ts new file mode 100644 index 000000000..2e778b0d1 --- /dev/null +++ b/azdo-task/DevcontainersCi/dist/envvars.d.ts @@ -0,0 +1,2 @@ +export declare function substituteValues(input: string): string; +export declare function populateDefaults(envs: string[], inheritEnv: boolean): string[]; diff --git a/azdo-task/DevcontainersCi/dist/exec.d.ts b/azdo-task/DevcontainersCi/dist/exec.d.ts new file mode 100644 index 000000000..a28ed33a9 --- /dev/null +++ b/azdo-task/DevcontainersCi/dist/exec.d.ts @@ -0,0 +1,9 @@ +export interface ExecResult { + exitCode: number; + stdout: string; + stderr: string; +} +export interface ExecOptions { + silent?: boolean; +} +export type ExecFunction = (command: string, args: string[], options: ExecOptions) => Promise; diff --git a/azdo-task/DevcontainersCi/dist/file.d.ts b/azdo-task/DevcontainersCi/dist/file.d.ts new file mode 100644 index 000000000..d7bfac1dc --- /dev/null +++ b/azdo-task/DevcontainersCi/dist/file.d.ts @@ -0,0 +1 @@ +export declare function getAbsolutePath(inputPath: string, referencePath: string): string; diff --git a/azdo-task/DevcontainersCi/dist/index.js b/azdo-task/DevcontainersCi/dist/index.js new file mode 100644 index 000000000..886e19a20 --- /dev/null +++ b/azdo-task/DevcontainersCi/dist/index.js @@ -0,0 +1,13280 @@ +require('./sourcemap-register.js');/******/ (() => { // webpackBootstrap +/******/ var __webpack_modules__ = ({ + +/***/ 306: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || (function () { + var ownKeys = function(o) { + ownKeys = Object.getOwnPropertyNames || function (o) { + var ar = []; + for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k; + return ar; + }; + return ownKeys(o); + }; + return function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]); + __setModuleDefault(result, mod); + return result; + }; +})(); +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.isDockerBuildXInstalled = isDockerBuildXInstalled; +exports.buildImage = buildImage; +exports.runContainer = runContainer; +exports.pushImage = pushImage; +const task = __importStar(__nccwpck_require__(358)); +const docker = __importStar(__nccwpck_require__(276)); +const exec_1 = __nccwpck_require__(31); +function isDockerBuildXInstalled() { + return __awaiter(this, void 0, void 0, function* () { + return yield docker.isDockerBuildXInstalled(exec_1.exec); + }); +} +function buildImage(imageName, imageTag, checkoutPath, subFolder, skipContainerUserIdUpdate, cacheFrom, cacheTo) { + return __awaiter(this, void 0, void 0, function* () { + console.log('🏗 Building dev container...'); + try { + return yield docker.buildImage(exec_1.exec, imageName, imageTag, checkoutPath, subFolder, skipContainerUserIdUpdate, cacheFrom, cacheTo); + } + catch (error) { + task.setResult(task.TaskResult.Failed, error); + return ''; + } + }); +} +function runContainer(imageName, imageTag, checkoutPath, subFolder, command, envs) { + return __awaiter(this, void 0, void 0, function* () { + console.log('🏃‍♀️ Running dev container...'); + try { + yield docker.runContainer(exec_1.exec, imageName, imageTag, checkoutPath, subFolder, command, envs); + return true; + } + catch (error) { + task.setResult(task.TaskResult.Failed, error); + return false; + } + }); +} +function pushImage(imageName, imageTag) { + return __awaiter(this, void 0, void 0, function* () { + console.log('📌 Pushing image...'); + try { + yield docker.pushImage(exec_1.exec, imageName, imageTag); + return true; + } + catch (error) { + task.setResult(task.TaskResult.Failed, error); + return false; + } + }); +} + + +/***/ }), + +/***/ 31: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || (function () { + var ownKeys = function(o) { + ownKeys = Object.getOwnPropertyNames || function (o) { + var ar = []; + for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k; + return ar; + }; + return ownKeys(o); + }; + return function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]); + __setModuleDefault(result, mod); + return result; + }; +})(); +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.exec = exec; +const task = __importStar(__nccwpck_require__(358)); +const stream = __importStar(__nccwpck_require__(203)); +// https://github.com/microsoft/azure-pipelines-task-lib/blob/master/node/docs/azure-pipelines-task-lib.md +/* global BufferEncoding */ +class TeeStream extends stream.Writable { + constructor(teeStream, options) { + super(options); + this.value = ''; + this.teeStream = teeStream; + } + _write(data, encoding, callback) { + this.value += data; + this.teeStream.write(data, encoding); // NOTE - currently ignoring teeStream callback + if (callback) { + callback(); + } + } + toString() { + return this.value; + } +} +class NullStream extends stream.Writable { + _write(data, encoding, callback) { + if (callback) { + callback(); + } + } +} +function trimCommand(input) { + if (input.startsWith('[command]')) { + const newLine = input.indexOf('\n'); + return input.substring(newLine + 1); + } + return input; +} +function exec(command, args, options) { + return __awaiter(this, void 0, void 0, function* () { + const outStream = new TeeStream(options.silent ? new NullStream() : process.stdout); + const errStream = new TeeStream(options.silent ? new NullStream() : process.stderr); + const exitCode = yield task.exec(command, args, { + failOnStdErr: false, + silent: false, // always run non-silent - we just don't output to process.stdout/stderr with the TeeStreams above + ignoreReturnCode: true, + outStream, + errStream, + }); + return { + exitCode, + stdout: trimCommand(outStream.toString()), + stderr: errStream.toString(), + }; + }); +} + + +/***/ }), + +/***/ 915: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || (function () { + var ownKeys = function(o) { + ownKeys = Object.getOwnPropertyNames || function (o) { + var ar = []; + for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k; + return ar; + }; + return ownKeys(o); + }; + return function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]); + __setModuleDefault(result, mod); + return result; + }; +})(); +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.runMain = runMain; +exports.runPost = runPost; +const task = __importStar(__nccwpck_require__(358)); +const task_1 = __nccwpck_require__(358); +const path_1 = __importDefault(__nccwpck_require__(928)); +const envvars_1 = __nccwpck_require__(363); +const dev_container_cli_1 = __nccwpck_require__(532); +const docker_1 = __nccwpck_require__(306); +const skopeo_1 = __nccwpck_require__(967); +const exec_1 = __nccwpck_require__(31); +function runMain() { + return __awaiter(this, void 0, void 0, function* () { + var _a, _b, _c, _d, _e, _f, _g, _h, _j, _k, _l; + try { + task.setTaskVariable('hasRunMain', 'true'); + const buildXInstalled = yield (0, docker_1.isDockerBuildXInstalled)(); + if (!buildXInstalled) { + console.log('### WARNING: docker buildx not available: add a step to set up with docker/setup-buildx-action - see https://github.com/devcontainers/ci/blob/main/docs/azure-devops-task.md'); + return; + } + const devContainerCliInstalled = yield dev_container_cli_1.devcontainer.isCliInstalled(exec_1.exec); + if (!devContainerCliInstalled) { + console.log('Installing @devcontainers/cli...'); + const success = yield dev_container_cli_1.devcontainer.installCli(exec_1.exec); + if (!success) { + task.setResult(task.TaskResult.Failed, '@devcontainers/cli install failed!'); + return; + } + } + const checkoutPath = (_a = task.getInput('checkoutPath')) !== null && _a !== void 0 ? _a : ''; + const imageName = task.getInput('imageName'); + const imageTag = task.getInput('imageTag'); + const platform = task.getInput('platform'); + const subFolder = (_b = task.getInput('subFolder')) !== null && _b !== void 0 ? _b : '.'; + const relativeConfigFile = task.getInput('configFile'); + const runCommand = task.getInput('runCmd'); + const envs = (_d = (_c = task.getInput('env')) === null || _c === void 0 ? void 0 : _c.split('\n')) !== null && _d !== void 0 ? _d : []; + const inheritEnv = ((_e = task.getInput('inheritEnv')) !== null && _e !== void 0 ? _e : 'false') === 'true'; + const inputEnvsWithDefaults = (0, envvars_1.populateDefaults)(envs, inheritEnv); + const cacheFrom = (_g = (_f = task.getInput('cacheFrom')) === null || _f === void 0 ? void 0 : _f.split('\n')) !== null && _g !== void 0 ? _g : []; + const noCache = ((_h = task.getInput('noCache')) !== null && _h !== void 0 ? _h : 'false') === 'true'; + const cacheTo = (_k = (_j = task.getInput('cacheTo')) === null || _j === void 0 ? void 0 : _j.split('\n')) !== null && _k !== void 0 ? _k : []; + const skipContainerUserIdUpdate = ((_l = task.getInput('skipContainerUserIdUpdate')) !== null && _l !== void 0 ? _l : 'false') === 'true'; + if (platform) { + const skopeoInstalled = yield (0, skopeo_1.isSkopeoInstalled)(); + if (!skopeoInstalled) { + console.log('skopeo not available and is required for multi-platform builds - make sure it is installed on your build agent'); + return; + } + } + const buildxOutput = platform ? 'type=oci,dest=/tmp/output.tar' : undefined; + const log = (message) => console.log(message); + const workspaceFolder = path_1.default.resolve(checkoutPath, subFolder); + const configFile = relativeConfigFile && path_1.default.resolve(checkoutPath, relativeConfigFile); + const resolvedImageTag = imageTag !== null && imageTag !== void 0 ? imageTag : 'latest'; + const imageTagArray = resolvedImageTag.split(/\s*,\s*/); + const fullImageNameArray = []; + for (const tag of imageTagArray) { + fullImageNameArray.push(`${imageName}:${tag}`); + } + if (imageName) { + if (fullImageNameArray.length === 1) { + if (!noCache && !cacheFrom.includes(fullImageNameArray[0])) { + // If the cacheFrom options don't include the fullImageName, add it here + // This ensures that when building a PR where the image specified in the action + // isn't included in devcontainer.json (or docker-compose.yml), the action still + // resolves a previous image for the tag as a layer cache (if pushed to a registry) + cacheFrom.splice(0, 0, fullImageNameArray[0]); + } + } + else { + // Don't automatically add --cache-from if multiple image tags are specified + console.log('Not adding --cache-from automatically since multiple image tags were supplied'); + } + } + else { + console.log('!! imageTag specified without specifying imageName - ignoring imageTag'); + } + const buildArgs = { + workspaceFolder, + configFile, + imageName: fullImageNameArray, + platform, + additionalCacheFroms: cacheFrom, + output: buildxOutput, + noCache, + cacheTo, + }; + console.log('\n\n'); + console.log('***'); + console.log('*** Building the dev container'); + console.log('***'); + const buildResult = yield dev_container_cli_1.devcontainer.build(buildArgs, log); + if (buildResult.outcome !== 'success') { + console.log(`### ERROR: Dev container build failed: ${buildResult.message} (exit code: ${buildResult.code})\n${buildResult.description}`); + task.setResult(task_1.TaskResult.Failed, buildResult.message); + } + if (buildResult.outcome !== 'success') { + return; + } + if (runCommand) { + console.log('\n\n'); + console.log('***'); + console.log('*** Starting the dev container'); + console.log('***'); + const upArgs = { + workspaceFolder, + configFile, + additionalCacheFroms: cacheFrom, + skipContainerUserIdUpdate, + env: inputEnvsWithDefaults, + }; + const upResult = yield dev_container_cli_1.devcontainer.up(upArgs, log); + if (upResult.outcome !== 'success') { + console.log(`### ERROR: Dev container up failed: ${upResult.message} (exit code: ${upResult.code})\n${upResult.description}`); + task.setResult(task_1.TaskResult.Failed, upResult.message); + } + if (upResult.outcome !== 'success') { + return; + } + console.log('\n\n'); + console.log('***'); + console.log('*** Running command in the dev container'); + console.log('***'); + const execArgs = { + workspaceFolder, + configFile, + command: ['bash', '-c', runCommand], + env: inputEnvsWithDefaults, + }; + let execLogString = ''; + const execLog = (message) => { + console.log(message); + if (!message.includes('@devcontainers/cli')) { + execLogString += message; + } + }; + const execResult = yield dev_container_cli_1.devcontainer.exec(execArgs, execLog); + if (execResult !== 0) { + console.log(`### ERROR: Dev container exec failed (exit code: ${execResult})`); + task.setResult(task_1.TaskResult.Failed, `Dev container exec failed (exit code: ${execResult})`); + return; + } + if (execLogString.length >= 25000) { + execLogString = execLogString.substring(0, 24963); + execLogString += 'TRUNCATED TO 25K CHAR MAX OUTPUT SIZE'; + } + console.log(`##vso[task.setvariable variable=runCmdOutput]${execLog}`); + } + else { + console.log('No runCmd set - skipping starting/running container'); + } + // TODO - should we stop the container? + } + catch (err) { + task.setResult(task.TaskResult.Failed, err.message); + } + }); +} +function runPost() { + return __awaiter(this, void 0, void 0, function* () { + var _a, _b, _c, _d, _e, _f; + const pushOption = task.getInput('push'); + const imageName = task.getInput('imageName'); + const pushOnFailedBuild = ((_a = task.getInput('pushOnFailedBuild')) !== null && _a !== void 0 ? _a : 'false') === 'true'; + // default to 'never' if not set and no imageName + if (pushOption === 'never' || (!pushOption && !imageName)) { + console.log(`Image push skipped because 'push' is set to '${pushOption}'`); + return; + } + // default to 'filter' if not set and imageName is set + if (pushOption === 'filter' || (!pushOption && imageName)) { + // https://docs.microsoft.com/en-us/azure/devops/pipelines/build/variables?view=azure-devops&tabs=yaml + const agentJobStatus = process.env.AGENT_JOBSTATUS; + switch (agentJobStatus) { + case 'Succeeded': + case 'SucceededWithIssues': + // continue + break; + default: + if (!pushOnFailedBuild) { + console.log(`Image push skipped because Agent JobStatus is '${agentJobStatus}'`); + return; + } + } + const buildReasonsForPush = (_c = (_b = task.getInput('buildReasonsForPush')) === null || _b === void 0 ? void 0 : _b.split('\n')) !== null && _c !== void 0 ? _c : []; + const sourceBranchFilterForPush = (_e = (_d = task.getInput('sourceBranchFilterForPush')) === null || _d === void 0 ? void 0 : _d.split('\n')) !== null && _e !== void 0 ? _e : []; + // check build reason is allowed + const buildReason = process.env.BUILD_REASON; + if (buildReasonsForPush.length !== 0 && // empty filter allows all + !buildReasonsForPush.some(s => s === buildReason)) { + console.log(`Image push skipped because buildReason (${buildReason}) is not in buildReasonsForPush`); + return; + } + // check branch is allowed + const sourceBranch = process.env.BUILD_SOURCEBRANCH; + if (sourceBranchFilterForPush.length !== 0 && // empty filter allows all + !sourceBranchFilterForPush.some(s => s === sourceBranch)) { + console.log(`Image push skipped because source branch (${sourceBranch}) is not in sourceBranchFilterForPush`); + return; + } + } + if (!imageName) { + if (pushOption) { + // pushOption was set (and not to "never") - give an error that imageName is required + task.setResult(task.TaskResult.Failed, `imageName input is required to push images (push: ${pushOption})`); + } + return; + } + const imageTag = (_f = task.getInput('imageTag')) !== null && _f !== void 0 ? _f : 'latest'; + const imageTagArray = imageTag.split(/\s*,\s*/); + const platform = task.getInput('platform'); + if (platform) { + for (const tag of imageTagArray) { + console.log(`Copying multiplatform image '${imageName}:${tag}'...`); + const imageSource = `oci-archive:/tmp/output.tar:${tag}`; + const imageDest = `docker://${imageName}:${tag}`; + yield (0, skopeo_1.copyImage)(true, imageSource, imageDest); + } + } + else { + for (const tag of imageTagArray) { + console.log(`Pushing image '${imageName}:${tag}'...`); + yield (0, docker_1.pushImage)(imageName, tag); + } + } + }); +} + + +/***/ }), + +/***/ 967: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || (function () { + var ownKeys = function(o) { + ownKeys = Object.getOwnPropertyNames || function (o) { + var ar = []; + for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k; + return ar; + }; + return ownKeys(o); + }; + return function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]); + __setModuleDefault(result, mod); + return result; + }; +})(); +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.isSkopeoInstalled = isSkopeoInstalled; +exports.copyImage = copyImage; +const task = __importStar(__nccwpck_require__(358)); +const skopeo = __importStar(__nccwpck_require__(421)); +const exec_1 = __nccwpck_require__(31); +function isSkopeoInstalled() { + return __awaiter(this, void 0, void 0, function* () { + return yield skopeo.isSkopeoInstalled(exec_1.exec); + }); +} +function copyImage(all, source, dest) { + return __awaiter(this, void 0, void 0, function* () { + console.log('📌 Copying image...'); + try { + yield skopeo.copyImage(exec_1.exec, all, source, dest); + return true; + } + catch (error) { + task.setResult(task.TaskResult.Failed, error); + return false; + } + }); +} + + +/***/ }), + +/***/ 202: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +var _a, _b; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.isSigPipeError = exports._exposeCertSettings = exports._exposeProxySettings = exports._normalizeSeparators = exports._isRooted = exports._getDirectoryName = exports._ensureRooted = exports._isUncPath = exports._loadData = exports._ensurePatternRooted = exports._getFindInfoFromPattern = exports._cloneMatchOptions = exports._legacyFindFiles_convertPatternToRegExp = exports._which = exports._checkPath = exports._exist = exports._debug = exports._error = exports._warning = exports._command = exports._getVariableKey = exports._getVariable = exports._loc = exports._setResourcePath = exports._setErrStream = exports._setStdStream = exports._writeLine = exports._truncateBeforeSensitiveKeyword = exports._endsWith = exports._startsWith = exports.IssueAuditAction = exports.IssueSource = exports._vault = exports._knownVariableMap = void 0; +var fs = __nccwpck_require__(896); +var path = __nccwpck_require__(928); +var os = __nccwpck_require__(857); +var minimatch = __nccwpck_require__(533); +var util = __nccwpck_require__(23); +var tcm = __nccwpck_require__(373); +var vm = __nccwpck_require__(59); +var semver = __nccwpck_require__(763); +var crypto = __nccwpck_require__(982); +/** + * Hash table of known variable info. The formatted env var name is the lookup key. + * + * The purpose of this hash table is to keep track of known variables. The hash table + * needs to be maintained for multiple reasons: + * 1) to distinguish between env vars and job vars + * 2) to distinguish between secret vars and public + * 3) to know the real variable name and not just the formatted env var name. + */ +exports._knownVariableMap = {}; +var _commandCorrelationId; +//----------------------------------------------------- +// Enums +//----------------------------------------------------- +var IssueSource; +(function (IssueSource) { + IssueSource["CustomerScript"] = "CustomerScript"; + IssueSource["TaskInternal"] = "TaskInternal"; +})(IssueSource = exports.IssueSource || (exports.IssueSource = {})); +var IssueAuditAction; +(function (IssueAuditAction) { + IssueAuditAction[IssueAuditAction["Unknown"] = 0] = "Unknown"; + IssueAuditAction[IssueAuditAction["ShellTasksValidation"] = 1] = "ShellTasksValidation"; +})(IssueAuditAction = exports.IssueAuditAction || (exports.IssueAuditAction = {})); +//----------------------------------------------------- +// Validation Checks +//----------------------------------------------------- +// async await needs generators in node 4.x+ +if (semver.lt(process.versions.node, '4.2.0')) { + _warning('Tasks require a new agent. Upgrade your agent or node to 4.2.0 or later', IssueSource.TaskInternal); +} +//----------------------------------------------------- +// String convenience +//----------------------------------------------------- +function _startsWith(str, start) { + return str.slice(0, start.length) == start; +} +exports._startsWith = _startsWith; +function _endsWith(str, end) { + return str.slice(-end.length) == end; +} +exports._endsWith = _endsWith; +function _truncateBeforeSensitiveKeyword(str, sensitiveKeywordsPattern) { + if (!str) { + return str; + } + var index = str.search(sensitiveKeywordsPattern); + if (index <= 0) { + return str; + } + return "".concat(str.substring(0, index), "..."); +} +exports._truncateBeforeSensitiveKeyword = _truncateBeforeSensitiveKeyword; +//----------------------------------------------------- +// General Helpers +//----------------------------------------------------- +var _outStream = process.stdout; +var _errStream = process.stderr; +function _writeLine(str) { + _outStream.write(str + os.EOL); +} +exports._writeLine = _writeLine; +function _setStdStream(stdStream) { + _outStream = stdStream; +} +exports._setStdStream = _setStdStream; +function _setErrStream(errStream) { + _errStream = errStream; +} +exports._setErrStream = _setErrStream; +//----------------------------------------------------- +// Loc Helpers +//----------------------------------------------------- +var _locStringCache = {}; +var _resourceFiles = {}; +var _libResourceFileLoaded = false; +var _resourceCulture = 'en-US'; +function _loadResJson(resjsonFile) { + var resJson; + if (_exist(resjsonFile)) { + var resjsonContent = fs.readFileSync(resjsonFile, 'utf8').toString(); + // remove BOM + if (resjsonContent.indexOf('\uFEFF') == 0) { + resjsonContent = resjsonContent.slice(1); + } + try { + resJson = JSON.parse(resjsonContent); + } + catch (err) { + _debug('unable to parse resjson with err: ' + err.message); + } + } + else { + _debug('.resjson file not found: ' + resjsonFile); + } + return resJson; +} +function _loadLocStrings(resourceFile, culture) { + var locStrings = {}; + if (_exist(resourceFile)) { + var resourceJson = require(resourceFile); + if (resourceJson && resourceJson.hasOwnProperty('messages')) { + var locResourceJson; + // load up resource resjson for different culture + var localizedResourceFile = path.join(path.dirname(resourceFile), 'Strings', 'resources.resjson'); + var upperCulture = culture.toUpperCase(); + var cultures = []; + try { + cultures = fs.readdirSync(localizedResourceFile); + } + catch (ex) { } + for (var i = 0; i < cultures.length; i++) { + if (cultures[i].toUpperCase() == upperCulture) { + localizedResourceFile = path.join(localizedResourceFile, cultures[i], 'resources.resjson'); + if (_exist(localizedResourceFile)) { + locResourceJson = _loadResJson(localizedResourceFile); + } + break; + } + } + for (var key in resourceJson.messages) { + if (locResourceJson && locResourceJson.hasOwnProperty('loc.messages.' + key)) { + locStrings[key] = locResourceJson['loc.messages.' + key]; + } + else { + locStrings[key] = resourceJson.messages[key]; + } + } + } + } + else { + _warning('LIB_ResourceFile does not exist', IssueSource.TaskInternal); + } + return locStrings; +} +/** + * Sets the location of the resources json. This is typically the task.json file. + * Call once at the beginning of the script before any calls to loc. + * @param path Full path to the json. + * @param ignoreWarnings Won't throw warnings if path already set. + * @returns void + */ +function _setResourcePath(path, ignoreWarnings) { + if (ignoreWarnings === void 0) { ignoreWarnings = false; } + if (process.env['TASKLIB_INPROC_UNITS']) { + _resourceFiles = {}; + _libResourceFileLoaded = false; + _locStringCache = {}; + _resourceCulture = 'en-US'; + } + if (!_resourceFiles[path]) { + _checkPath(path, 'resource file path'); + _resourceFiles[path] = path; + _debug('adding resource file: ' + path); + _resourceCulture = _getVariable('system.culture') || _resourceCulture; + var locStrs = _loadLocStrings(path, _resourceCulture); + for (var key in locStrs) { + //cache loc string + _locStringCache[key] = locStrs[key]; + } + } + else { + if (ignoreWarnings) { + } + else { + _warning(_loc('LIB_ResourceFileAlreadySet', path), IssueSource.TaskInternal); + } + } +} +exports._setResourcePath = _setResourcePath; +/** + * Gets the localized string from the json resource file. Optionally formats with additional params. + * + * @param key key of the resources string in the resource file + * @param param additional params for formatting the string + * @returns string + */ +function _loc(key) { + var param = []; + for (var _i = 1; _i < arguments.length; _i++) { + param[_i - 1] = arguments[_i]; + } + if (!_libResourceFileLoaded) { + // merge loc strings from azure-pipelines-task-lib. + var libResourceFile = __nccwpck_require__.ab + "lib.json"; + var libLocStrs = _loadLocStrings(__nccwpck_require__.ab + "lib.json", _resourceCulture); + for (var libKey in libLocStrs) { + //cache azure-pipelines-task-lib loc string + _locStringCache[libKey] = libLocStrs[libKey]; + } + _libResourceFileLoaded = true; + } + var locString; + ; + if (_locStringCache.hasOwnProperty(key)) { + locString = _locStringCache[key]; + } + else { + if (Object.keys(_resourceFiles).length <= 0) { + _warning("Resource file haven't been set, can't find loc string for key: ".concat(key), IssueSource.TaskInternal); + } + else { + _warning("Can't find loc string for key: ".concat(key)); + } + locString = key; + } + if (param.length > 0) { + return util.format.apply(this, [locString].concat(param)); + } + else { + return locString; + } +} +exports._loc = _loc; +//----------------------------------------------------- +// Input Helpers +//----------------------------------------------------- +/** + * Gets a variable value that is defined on the build/release definition or set at runtime. + * + * @param name name of the variable to get + * @returns string + */ +function _getVariable(name) { + var varval; + // get the metadata + var info; + var key = _getVariableKey(name); + if (exports._knownVariableMap.hasOwnProperty(key)) { + info = exports._knownVariableMap[key]; + } + if (info && info.secret) { + // get the secret value + varval = exports._vault.retrieveSecret('SECRET_' + key); + } + else { + // get the public value + varval = process.env[key]; + // fallback for pre 2.104.1 agent + if (!varval && name.toUpperCase() == 'AGENT.JOBSTATUS') { + varval = process.env['agent.jobstatus']; + } + } + _debug(name + '=' + varval); + return varval; +} +exports._getVariable = _getVariable; +function _getVariableKey(name) { + if (!name) { + throw new Error(_loc('LIB_ParameterIsRequired', 'name')); + } + return name.replace(/\./g, '_').replace(/ /g, '_').toUpperCase(); +} +exports._getVariableKey = _getVariableKey; +//----------------------------------------------------- +// Cmd Helpers +//----------------------------------------------------- +function _command(command, properties, message) { + var taskCmd = new tcm.TaskCommand(command, properties, message); + _writeLine(taskCmd.toString()); +} +exports._command = _command; +function _warning(message, source, auditAction) { + if (source === void 0) { source = IssueSource.TaskInternal; } + _command('task.issue', { + 'type': 'warning', + 'source': source, + 'correlationId': _commandCorrelationId, + 'auditAction': auditAction + }, message); +} +exports._warning = _warning; +function _error(message, source, auditAction) { + if (source === void 0) { source = IssueSource.TaskInternal; } + _command('task.issue', { + 'type': 'error', + 'source': source, + 'correlationId': _commandCorrelationId, + 'auditAction': auditAction + }, message); +} +exports._error = _error; +var debugMode = ((_a = _getVariable('system.debug')) === null || _a === void 0 ? void 0 : _a.toLowerCase()) === 'true'; +var shouldCheckDebugMode = ((_b = _getVariable('DistributedTask.Tasks.Node.SkipDebugLogsWhenDebugModeOff')) === null || _b === void 0 ? void 0 : _b.toLowerCase()) === 'true'; +function _debug(message) { + if (!shouldCheckDebugMode + || (shouldCheckDebugMode && debugMode)) { + _command('task.debug', null, message); + } +} +exports._debug = _debug; +// //----------------------------------------------------- +// // Disk Functions +// //----------------------------------------------------- +/** + * Returns whether a path exists. + * + * @param path path to check + * @returns boolean + */ +function _exist(path) { + var exist = false; + try { + exist = !!(path && fs.statSync(path) != null); + } + catch (err) { + if (err && err.code === 'ENOENT') { + exist = false; + } + else { + throw err; + } + } + return exist; +} +exports._exist = _exist; +/** + * Checks whether a path exists. + * If the path does not exist, it will throw. + * + * @param p path to check + * @param name name only used in error message to identify the path + * @returns void + */ +function _checkPath(p, name) { + _debug('check path : ' + p); + if (!_exist(p)) { + throw new Error(_loc('LIB_PathNotFound', name, p)); + } +} +exports._checkPath = _checkPath; +/** + * Returns path of a tool had the tool actually been invoked. Resolves via paths. + * If you check and the tool does not exist, it will throw. + * + * @param tool name of the tool + * @param check whether to check if tool exists + * @returns string + */ +function _which(tool, check) { + if (!tool) { + throw new Error('parameter \'tool\' is required'); + } + // recursive when check=true + if (check) { + var result = _which(tool, false); + if (result) { + return result; + } + else { + if (process.platform == 'win32') { + throw new Error(_loc('LIB_WhichNotFound_Win', tool)); + } + else { + throw new Error(_loc('LIB_WhichNotFound_Linux', tool)); + } + } + } + _debug("which '".concat(tool, "'")); + try { + // build the list of extensions to try + var extensions = []; + if (process.platform == 'win32' && process.env['PATHEXT']) { + for (var _i = 0, _a = process.env['PATHEXT'].split(path.delimiter); _i < _a.length; _i++) { + var extension = _a[_i]; + if (extension) { + extensions.push(extension); + } + } + } + // if it's rooted, return it if exists. otherwise return empty. + if (_isRooted(tool)) { + var filePath = _tryGetExecutablePath(tool, extensions); + if (filePath) { + _debug("found: '".concat(filePath, "'")); + return filePath; + } + _debug('not found'); + return ''; + } + // if any path separators, return empty + if (tool.indexOf('/') >= 0 || (process.platform == 'win32' && tool.indexOf('\\') >= 0)) { + _debug('not found'); + return ''; + } + // build the list of directories + // + // Note, technically "where" checks the current directory on Windows. From a task lib perspective, + // it feels like we should not do this. Checking the current directory seems like more of a use + // case of a shell, and the which() function exposed by the task lib should strive for consistency + // across platforms. + var directories = []; + if (process.env['PATH']) { + for (var _b = 0, _c = process.env['PATH'].split(path.delimiter); _b < _c.length; _b++) { + var p = _c[_b]; + if (p) { + directories.push(p); + } + } + } + // return the first match + for (var _d = 0, directories_1 = directories; _d < directories_1.length; _d++) { + var directory = directories_1[_d]; + var filePath = _tryGetExecutablePath(directory + path.sep + tool, extensions); + if (filePath) { + _debug("found: '".concat(filePath, "'")); + return filePath; + } + } + _debug('not found'); + return ''; + } + catch (err) { + throw new Error(_loc('LIB_OperationFailed', 'which', err.message)); + } +} +exports._which = _which; +/** + * Best effort attempt to determine whether a file exists and is executable. + * @param filePath file path to check + * @param extensions additional file extensions to try + * @return if file exists and is executable, returns the file path. otherwise empty string. + */ +function _tryGetExecutablePath(filePath, extensions) { + try { + // test file exists + var stats = fs.statSync(filePath); + if (stats.isFile()) { + if (process.platform == 'win32') { + // on Windows, test for valid extension + var isExecutable = false; + var fileName = path.basename(filePath); + var dotIndex = fileName.lastIndexOf('.'); + if (dotIndex >= 0) { + var upperExt_1 = fileName.substr(dotIndex).toUpperCase(); + if (extensions.some(function (validExt) { return validExt.toUpperCase() == upperExt_1; })) { + return filePath; + } + } + } + else { + if (isUnixExecutable(stats)) { + return filePath; + } + } + } + } + catch (err) { + if (err.code != 'ENOENT') { + _debug("Unexpected error attempting to determine if executable file exists '".concat(filePath, "': ").concat(err)); + } + } + // try each extension + var originalFilePath = filePath; + for (var _i = 0, extensions_1 = extensions; _i < extensions_1.length; _i++) { + var extension = extensions_1[_i]; + var found = false; + var filePath_1 = originalFilePath + extension; + try { + var stats = fs.statSync(filePath_1); + if (stats.isFile()) { + if (process.platform == 'win32') { + // preserve the case of the actual file (since an extension was appended) + try { + var directory = path.dirname(filePath_1); + var upperName = path.basename(filePath_1).toUpperCase(); + for (var _a = 0, _b = fs.readdirSync(directory); _a < _b.length; _a++) { + var actualName = _b[_a]; + if (upperName == actualName.toUpperCase()) { + filePath_1 = path.join(directory, actualName); + break; + } + } + } + catch (err) { + _debug("Unexpected error attempting to determine the actual case of the file '".concat(filePath_1, "': ").concat(err)); + } + return filePath_1; + } + else { + if (isUnixExecutable(stats)) { + return filePath_1; + } + } + } + } + catch (err) { + if (err.code != 'ENOENT') { + _debug("Unexpected error attempting to determine if executable file exists '".concat(filePath_1, "': ").concat(err)); + } + } + } + return ''; +} +// on Mac/Linux, test the execute bit +// R W X R W X R W X +// 256 128 64 32 16 8 4 2 1 +function isUnixExecutable(stats) { + return (stats.mode & 1) > 0 || ((stats.mode & 8) > 0 && stats.gid === process.getgid()) || ((stats.mode & 64) > 0 && stats.uid === process.getuid()); +} +function _legacyFindFiles_convertPatternToRegExp(pattern) { + pattern = (process.platform == 'win32' ? pattern.replace(/\\/g, '/') : pattern) // normalize separator on Windows + .replace(/[-\/\\^$*+?.()|[\]{}]/g, '\\$&') // regex escape - from http://stackoverflow.com/questions/3561493/is-there-a-regexp-escape-function-in-javascript + .replace(/\\\/\\\*\\\*\\\//g, '((\/.+/)|(\/))') // replace directory globstar, e.g. /hello/**/world + .replace(/\\\*\\\*/g, '.*') // replace remaining globstars with a wildcard that can span directory separators, e.g. /hello/**dll + .replace(/\\\*/g, '[^\/]*') // replace asterisks with a wildcard that cannot span directory separators, e.g. /hello/*.dll + .replace(/\\\?/g, '[^\/]'); // replace single character wildcards, e.g. /hello/log?.dll + pattern = "^".concat(pattern, "$"); + var flags = process.platform == 'win32' ? 'i' : ''; + return new RegExp(pattern, flags); +} +exports._legacyFindFiles_convertPatternToRegExp = _legacyFindFiles_convertPatternToRegExp; +function _cloneMatchOptions(matchOptions) { + return { + debug: matchOptions.debug, + nobrace: matchOptions.nobrace, + noglobstar: matchOptions.noglobstar, + dot: matchOptions.dot, + noext: matchOptions.noext, + nocase: matchOptions.nocase, + nonull: matchOptions.nonull, + matchBase: matchOptions.matchBase, + nocomment: matchOptions.nocomment, + nonegate: matchOptions.nonegate, + flipNegate: matchOptions.flipNegate + }; +} +exports._cloneMatchOptions = _cloneMatchOptions; +function _getFindInfoFromPattern(defaultRoot, pattern, matchOptions) { + // parameter validation + if (!defaultRoot) { + throw new Error('getFindRootFromPattern() parameter defaultRoot cannot be empty'); + } + if (!pattern) { + throw new Error('getFindRootFromPattern() parameter pattern cannot be empty'); + } + if (!matchOptions.nobrace) { + throw new Error('getFindRootFromPattern() expected matchOptions.nobrace to be true'); + } + // for the sake of determining the findPath, pretend nocase=false + matchOptions = _cloneMatchOptions(matchOptions); + matchOptions.nocase = false; + // check if basename only and matchBase=true + if (matchOptions.matchBase && + !_isRooted(pattern) && + (process.platform == 'win32' ? pattern.replace(/\\/g, '/') : pattern).indexOf('/') < 0) { + return { + adjustedPattern: pattern, + findPath: defaultRoot, + statOnly: false, + }; + } + // the technique applied by this function is to use the information on the Minimatch object determine + // the findPath. Minimatch breaks the pattern into path segments, and exposes information about which + // segments are literal vs patterns. + // + // note, the technique currently imposes a limitation for drive-relative paths with a glob in the + // first segment, e.g. C:hello*/world. it's feasible to overcome this limitation, but is left unsolved + // for now. + var minimatchObj = new minimatch.Minimatch(pattern, matchOptions); + // the "set" property is an array of arrays of parsed path segment info. the outer array should only + // contain one item, otherwise something went wrong. brace expansion can result in multiple arrays, + // but that should be turned off by the time this function is reached. + if (minimatchObj.set.length != 1) { + throw new Error('getFindRootFromPattern() expected Minimatch(...).set.length to be 1. Actual: ' + minimatchObj.set.length); + } + var literalSegments = []; + for (var _i = 0, _a = minimatchObj.set[0]; _i < _a.length; _i++) { + var parsedSegment = _a[_i]; + if (typeof parsedSegment == 'string') { + // the item is a string when the original input for the path segment does not contain any + // unescaped glob characters. + // + // note, the string here is already unescaped (i.e. glob escaping removed), so it is ready + // to pass to find() as-is. for example, an input string 'hello\\*world' => 'hello*world'. + literalSegments.push(parsedSegment); + continue; + } + break; + } + // join the literal segments back together. Minimatch converts '\' to '/' on Windows, then squashes + // consequetive slashes, and finally splits on slash. this means that UNC format is lost, but can + // be detected from the original pattern. + var joinedSegments = literalSegments.join('/'); + if (joinedSegments && process.platform == 'win32' && _startsWith(pattern.replace(/\\/g, '/'), '//')) { + joinedSegments = '/' + joinedSegments; // restore UNC format + } + // determine the find path + var findPath; + if (_isRooted(pattern)) { // the pattern was rooted + findPath = joinedSegments; + } + else if (joinedSegments) { // the pattern was not rooted, and literal segments were found + findPath = _ensureRooted(defaultRoot, joinedSegments); + } + else { // the pattern was not rooted, and no literal segments were found + findPath = defaultRoot; + } + // clean up the path + if (findPath) { + findPath = _getDirectoryName(_ensureRooted(findPath, '_')); // hack to remove unnecessary trailing slash + findPath = _normalizeSeparators(findPath); // normalize slashes + } + return { + adjustedPattern: _ensurePatternRooted(defaultRoot, pattern), + findPath: findPath, + statOnly: literalSegments.length == minimatchObj.set[0].length, + }; +} +exports._getFindInfoFromPattern = _getFindInfoFromPattern; +function _ensurePatternRooted(root, p) { + if (!root) { + throw new Error('ensurePatternRooted() parameter "root" cannot be empty'); + } + if (!p) { + throw new Error('ensurePatternRooted() parameter "p" cannot be empty'); + } + if (_isRooted(p)) { + return p; + } + // normalize root + root = _normalizeSeparators(root); + // escape special glob characters + root = (process.platform == 'win32' ? root : root.replace(/\\/g, '\\\\')) // escape '\' on OSX/Linux + .replace(/(\[)(?=[^\/]+\])/g, '[[]') // escape '[' when ']' follows within the path segment + .replace(/\?/g, '[?]') // escape '?' + .replace(/\*/g, '[*]') // escape '*' + .replace(/\+\(/g, '[+](') // escape '+(' + .replace(/@\(/g, '[@](') // escape '@(' + .replace(/!\(/g, '[!]('); // escape '!(' + return _ensureRooted(root, p); +} +exports._ensurePatternRooted = _ensurePatternRooted; +//------------------------------------------------------------------- +// Populate the vault with sensitive data. Inputs and Endpoints +//------------------------------------------------------------------- +function _loadData() { + // in agent, prefer TempDirectory then workFolder. + // In interactive dev mode, it won't be + var keyPath = _getVariable("agent.TempDirectory") || _getVariable("agent.workFolder") || process.cwd(); + exports._vault = new vm.Vault(keyPath); + exports._knownVariableMap = {}; + _debug('loading inputs and endpoints'); + var loaded = 0; + for (var envvar in process.env) { + if (_startsWith(envvar, 'INPUT_') || + _startsWith(envvar, 'ENDPOINT_AUTH_') || + _startsWith(envvar, 'SECUREFILE_TICKET_') || + _startsWith(envvar, 'SECRET_') || + _startsWith(envvar, 'VSTS_TASKVARIABLE_')) { + // Record the secret variable metadata. This is required by getVariable to know whether + // to retrieve the value from the vault. In a 2.104.1 agent or higher, this metadata will + // be overwritten when the VSTS_SECRET_VARIABLES env var is processed below. + if (_startsWith(envvar, 'SECRET_')) { + var variableName = envvar.substring('SECRET_'.length); + if (variableName) { + // This is technically not the variable name (has underscores instead of dots), + // but it's good enough to make getVariable work in a pre-2.104.1 agent where + // the VSTS_SECRET_VARIABLES env var is not defined. + exports._knownVariableMap[_getVariableKey(variableName)] = { name: variableName, secret: true }; + } + } + // store the secret + var value = process.env[envvar]; + if (value) { + ++loaded; + _debug('loading ' + envvar); + exports._vault.storeSecret(envvar, value); + delete process.env[envvar]; + } + } + } + _debug('loaded ' + loaded); + var correlationId = process.env["COMMAND_CORRELATION_ID"]; + delete process.env["COMMAND_CORRELATION_ID"]; + _commandCorrelationId = correlationId ? String(correlationId) : ""; + // store public variable metadata + var names; + try { + names = JSON.parse(process.env['VSTS_PUBLIC_VARIABLES'] || '[]'); + } + catch (err) { + throw new Error('Failed to parse VSTS_PUBLIC_VARIABLES as JSON. ' + err); // may occur during interactive testing + } + names.forEach(function (name) { + exports._knownVariableMap[_getVariableKey(name)] = { name: name, secret: false }; + }); + delete process.env['VSTS_PUBLIC_VARIABLES']; + // store secret variable metadata + try { + names = JSON.parse(process.env['VSTS_SECRET_VARIABLES'] || '[]'); + } + catch (err) { + throw new Error('Failed to parse VSTS_SECRET_VARIABLES as JSON. ' + err); // may occur during interactive testing + } + names.forEach(function (name) { + exports._knownVariableMap[_getVariableKey(name)] = { name: name, secret: true }; + }); + delete process.env['VSTS_SECRET_VARIABLES']; + // avoid loading twice (overwrites .taskkey) + global['_vsts_task_lib_loaded'] = true; +} +exports._loadData = _loadData; +//-------------------------------------------------------------------------------- +// Internal path helpers. +//-------------------------------------------------------------------------------- +/** + * Defines if path is unc-path. + * + * @param path a path to a file. + * @returns true if path starts with double backslash, otherwise returns false. + */ +function _isUncPath(path) { + return /^\\\\[^\\]/.test(path); +} +exports._isUncPath = _isUncPath; +function _ensureRooted(root, p) { + if (!root) { + throw new Error('ensureRooted() parameter "root" cannot be empty'); + } + if (!p) { + throw new Error('ensureRooted() parameter "p" cannot be empty'); + } + if (_isRooted(p)) { + return p; + } + if (process.platform == 'win32' && root.match(/^[A-Z]:$/i)) { // e.g. C: + return root + p; + } + // ensure root ends with a separator + if (_endsWith(root, '/') || (process.platform == 'win32' && _endsWith(root, '\\'))) { + // root already ends with a separator + } + else { + root += path.sep; // append separator + } + return root + p; +} +exports._ensureRooted = _ensureRooted; +/** + * Determines the parent path and trims trailing slashes (when safe). Path separators are normalized + * in the result. This function works similar to the .NET System.IO.Path.GetDirectoryName() method. + * For example, C:\hello\world\ returns C:\hello\world (trailing slash removed). Returns empty when + * no higher directory can be determined. + */ +function _getDirectoryName(p) { + // short-circuit if empty + if (!p) { + return ''; + } + // normalize separators + p = _normalizeSeparators(p); + // on Windows, the goal of this function is to match the behavior of + // [System.IO.Path]::GetDirectoryName(), e.g. + // C:/ => + // C:/hello => C:\ + // C:/hello/ => C:\hello + // C:/hello/world => C:\hello + // C:/hello/world/ => C:\hello\world + // C: => + // C:hello => C: + // C:hello/ => C:hello + // / => + // /hello => \ + // /hello/ => \hello + // //hello => + // //hello/ => + // //hello/world => + // //hello/world/ => \\hello\world + // + // unfortunately, path.dirname() can't simply be used. for example, on Windows + // it yields different results from Path.GetDirectoryName: + // C:/ => C:/ + // C:/hello => C:/ + // C:/hello/ => C:/ + // C:/hello/world => C:/hello + // C:/hello/world/ => C:/hello + // C: => C: + // C:hello => C: + // C:hello/ => C: + // / => / + // /hello => / + // /hello/ => / + // //hello => / + // //hello/ => / + // //hello/world => //hello/world + // //hello/world/ => //hello/world/ + // //hello/world/again => //hello/world/ + // //hello/world/again/ => //hello/world/ + // //hello/world/again/again => //hello/world/again + // //hello/world/again/again/ => //hello/world/again + if (process.platform == 'win32') { + if (/^[A-Z]:\\?[^\\]+$/i.test(p)) { // e.g. C:\hello or C:hello + return p.charAt(2) == '\\' ? p.substring(0, 3) : p.substring(0, 2); + } + else if (/^[A-Z]:\\?$/i.test(p)) { // e.g. C:\ or C: + return ''; + } + var lastSlashIndex = p.lastIndexOf('\\'); + if (lastSlashIndex < 0) { // file name only + return ''; + } + else if (p == '\\') { // relative root + return ''; + } + else if (lastSlashIndex == 0) { // e.g. \\hello + return '\\'; + } + else if (/^\\\\[^\\]+(\\[^\\]*)?$/.test(p)) { // UNC root, e.g. \\hello or \\hello\ or \\hello\world + return ''; + } + return p.substring(0, lastSlashIndex); // e.g. hello\world => hello or hello\world\ => hello\world + // note, this means trailing slashes for non-root directories + // (i.e. not C:\, \, or \\unc\) will simply be removed. + } + // OSX/Linux + if (p.indexOf('/') < 0) { // file name only + return ''; + } + else if (p == '/') { + return ''; + } + else if (_endsWith(p, '/')) { + return p.substring(0, p.length - 1); + } + return path.dirname(p); +} +exports._getDirectoryName = _getDirectoryName; +/** + * On OSX/Linux, true if path starts with '/'. On Windows, true for paths like: + * \, \hello, \\hello\share, C:, and C:\hello (and corresponding alternate separator cases). + */ +function _isRooted(p) { + p = _normalizeSeparators(p); + if (!p) { + throw new Error('isRooted() parameter "p" cannot be empty'); + } + if (process.platform == 'win32') { + return _startsWith(p, '\\') || // e.g. \ or \hello or \\hello + /^[A-Z]:/i.test(p); // e.g. C: or C:\hello + } + return _startsWith(p, '/'); // e.g. /hello +} +exports._isRooted = _isRooted; +function _normalizeSeparators(p) { + p = p || ''; + if (process.platform == 'win32') { + // convert slashes on Windows + p = p.replace(/\//g, '\\'); + // remove redundant slashes + var isUnc = /^\\\\+[^\\]/.test(p); // e.g. \\hello + return (isUnc ? '\\' : '') + p.replace(/\\\\+/g, '\\'); // preserve leading // for UNC + } + // remove redundant slashes + return p.replace(/\/\/+/g, '/'); +} +exports._normalizeSeparators = _normalizeSeparators; +//----------------------------------------------------- +// Expose proxy information to vsts-node-api +//----------------------------------------------------- +function _exposeProxySettings() { + var proxyUrl = _getVariable('Agent.ProxyUrl'); + if (proxyUrl && proxyUrl.length > 0) { + var proxyUsername = _getVariable('Agent.ProxyUsername'); + var proxyPassword = _getVariable('Agent.ProxyPassword'); + var proxyBypassHostsJson = _getVariable('Agent.ProxyBypassList'); + global['_vsts_task_lib_proxy_url'] = proxyUrl; + global['_vsts_task_lib_proxy_username'] = proxyUsername; + global['_vsts_task_lib_proxy_bypass'] = proxyBypassHostsJson; + global['_vsts_task_lib_proxy_password'] = _exposeTaskLibSecret('proxy', proxyPassword || ''); + _debug('expose agent proxy configuration.'); + global['_vsts_task_lib_proxy'] = true; + } +} +exports._exposeProxySettings = _exposeProxySettings; +//----------------------------------------------------- +// Expose certificate information to vsts-node-api +//----------------------------------------------------- +function _exposeCertSettings() { + var ca = _getVariable('Agent.CAInfo'); + if (ca) { + global['_vsts_task_lib_cert_ca'] = ca; + } + var clientCert = _getVariable('Agent.ClientCert'); + if (clientCert) { + var clientCertKey = _getVariable('Agent.ClientCertKey'); + var clientCertArchive = _getVariable('Agent.ClientCertArchive'); + var clientCertPassword = _getVariable('Agent.ClientCertPassword'); + global['_vsts_task_lib_cert_clientcert'] = clientCert; + global['_vsts_task_lib_cert_key'] = clientCertKey; + global['_vsts_task_lib_cert_archive'] = clientCertArchive; + global['_vsts_task_lib_cert_passphrase'] = _exposeTaskLibSecret('cert', clientCertPassword || ''); + } + if (ca || clientCert) { + _debug('expose agent certificate configuration.'); + global['_vsts_task_lib_cert'] = true; + } + var skipCertValidation = _getVariable('Agent.SkipCertValidation') || 'false'; + if (skipCertValidation) { + global['_vsts_task_lib_skip_cert_validation'] = skipCertValidation.toUpperCase() === 'TRUE'; + } +} +exports._exposeCertSettings = _exposeCertSettings; +// We store the encryption key on disk and hold the encrypted content and key file in memory +// return base64encoded:base64encoded +// downstream vsts-node-api will retrieve the secret later +function _exposeTaskLibSecret(keyFile, secret) { + if (secret) { + var encryptKey = crypto.randomBytes(256); + var cipher = crypto.createCipher("aes-256-ctr", encryptKey); + var encryptedContent = cipher.update(secret, "utf8", "hex"); // CodeQL [SM01511] agent need to retrieve password later to connect to proxy server + encryptedContent += cipher.final("hex"); + var storageFile = path.join(_getVariable('Agent.TempDirectory') || _getVariable("agent.workFolder") || process.cwd(), keyFile); + fs.writeFileSync(storageFile, encryptKey.toString('base64'), { encoding: 'utf8' }); + return new Buffer(storageFile).toString('base64') + ':' + new Buffer(encryptedContent).toString('base64'); + } +} +function isSigPipeError(e) { + var _a; + if (!e || typeof e !== 'object') { + return false; + } + return e.code === 'EPIPE' && ((_a = e.syscall) === null || _a === void 0 ? void 0 : _a.toUpperCase()) === 'WRITE'; +} +exports.isSigPipeError = isSigPipeError; + + +/***/ }), + +/***/ 138: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +var concatMap = __nccwpck_require__(87); +var balanced = __nccwpck_require__(380); + +module.exports = expandTop; + +var escSlash = '\0SLASH'+Math.random()+'\0'; +var escOpen = '\0OPEN'+Math.random()+'\0'; +var escClose = '\0CLOSE'+Math.random()+'\0'; +var escComma = '\0COMMA'+Math.random()+'\0'; +var escPeriod = '\0PERIOD'+Math.random()+'\0'; + +function numeric(str) { + return parseInt(str, 10) == str + ? parseInt(str, 10) + : str.charCodeAt(0); +} + +function escapeBraces(str) { + return str.split('\\\\').join(escSlash) + .split('\\{').join(escOpen) + .split('\\}').join(escClose) + .split('\\,').join(escComma) + .split('\\.').join(escPeriod); +} + +function unescapeBraces(str) { + return str.split(escSlash).join('\\') + .split(escOpen).join('{') + .split(escClose).join('}') + .split(escComma).join(',') + .split(escPeriod).join('.'); +} + + +// Basically just str.split(","), but handling cases +// where we have nested braced sections, which should be +// treated as individual members, like {a,{b,c},d} +function parseCommaParts(str) { + if (!str) + return ['']; + + var parts = []; + var m = balanced('{', '}', str); + + if (!m) + return str.split(','); + + var pre = m.pre; + var body = m.body; + var post = m.post; + var p = pre.split(','); + + p[p.length-1] += '{' + body + '}'; + var postParts = parseCommaParts(post); + if (post.length) { + p[p.length-1] += postParts.shift(); + p.push.apply(p, postParts); + } + + parts.push.apply(parts, p); + + return parts; +} + +function expandTop(str) { + if (!str) + return []; + + // I don't know why Bash 4.3 does this, but it does. + // Anything starting with {} will have the first two bytes preserved + // but *only* at the top level, so {},a}b will not expand to anything, + // but a{},b}c will be expanded to [a}c,abc]. + // One could argue that this is a bug in Bash, but since the goal of + // this module is to match Bash's rules, we escape a leading {} + if (str.substr(0, 2) === '{}') { + str = '\\{\\}' + str.substr(2); + } + + return expand(escapeBraces(str), true).map(unescapeBraces); +} + +function identity(e) { + return e; +} + +function embrace(str) { + return '{' + str + '}'; +} +function isPadded(el) { + return /^-?0\d/.test(el); +} + +function lte(i, y) { + return i <= y; +} +function gte(i, y) { + return i >= y; +} + +function expand(str, isTop) { + var expansions = []; + + var m = balanced('{', '}', str); + if (!m || /\$$/.test(m.pre)) return [str]; + + var isNumericSequence = /^-?\d+\.\.-?\d+(?:\.\.-?\d+)?$/.test(m.body); + var isAlphaSequence = /^[a-zA-Z]\.\.[a-zA-Z](?:\.\.-?\d+)?$/.test(m.body); + var isSequence = isNumericSequence || isAlphaSequence; + var isOptions = m.body.indexOf(',') >= 0; + if (!isSequence && !isOptions) { + // {a},b} + if (m.post.match(/,.*\}/)) { + str = m.pre + '{' + m.body + escClose + m.post; + return expand(str); + } + return [str]; + } + + var n; + if (isSequence) { + n = m.body.split(/\.\./); + } else { + n = parseCommaParts(m.body); + if (n.length === 1) { + // x{{a,b}}y ==> x{a}y x{b}y + n = expand(n[0], false).map(embrace); + if (n.length === 1) { + var post = m.post.length + ? expand(m.post, false) + : ['']; + return post.map(function(p) { + return m.pre + n[0] + p; + }); + } + } + } + + // at this point, n is the parts, and we know it's not a comma set + // with a single entry. + + // no need to expand pre, since it is guaranteed to be free of brace-sets + var pre = m.pre; + var post = m.post.length + ? expand(m.post, false) + : ['']; + + var N; + + if (isSequence) { + var x = numeric(n[0]); + var y = numeric(n[1]); + var width = Math.max(n[0].length, n[1].length) + var incr = n.length == 3 + ? Math.abs(numeric(n[2])) + : 1; + var test = lte; + var reverse = y < x; + if (reverse) { + incr *= -1; + test = gte; + } + var pad = n.some(isPadded); + + N = []; + + for (var i = x; test(i, y); i += incr) { + var c; + if (isAlphaSequence) { + c = String.fromCharCode(i); + if (c === '\\') + c = ''; + } else { + c = String(i); + if (pad) { + var need = width - c.length; + if (need > 0) { + var z = new Array(need + 1).join('0'); + if (i < 0) + c = '-' + z + c.slice(1); + else + c = z + c; + } + } + } + N.push(c); + } + } else { + N = concatMap(n, function(el) { return expand(el, false) }); + } + + for (var j = 0; j < N.length; j++) { + for (var k = 0; k < post.length; k++) { + var expansion = pre + N[j] + post[k]; + if (!isTop || isSequence || expansion) + expansions.push(expansion); + } + } + + return expansions; +} + + + +/***/ }), + +/***/ 533: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +module.exports = minimatch +minimatch.Minimatch = Minimatch + +const path = (() => { try { return __nccwpck_require__(928) } catch (e) {}})() || { + sep: '/' +} +minimatch.sep = path.sep + +const GLOBSTAR = minimatch.GLOBSTAR = Minimatch.GLOBSTAR = {} +const expand = __nccwpck_require__(138) + +const plTypes = { + '!': { open: '(?:(?!(?:', close: '))[^/]*?)'}, + '?': { open: '(?:', close: ')?' }, + '+': { open: '(?:', close: ')+' }, + '*': { open: '(?:', close: ')*' }, + '@': { open: '(?:', close: ')' } +} + +// any single thing other than / +// don't need to escape / when using new RegExp() +const qmark = '[^/]' + +// * => any number of characters +const star = qmark + '*?' + +// ** when dots are allowed. Anything goes, except .. and . +// not (^ or / followed by one or two dots followed by $ or /), +// followed by anything, any number of times. +const twoStarDot = '(?:(?!(?:\\\/|^)(?:\\.{1,2})($|\\\/)).)*?' + +// not a ^ or / followed by a dot, +// followed by anything, any number of times. +const twoStarNoDot = '(?:(?!(?:\\\/|^)\\.).)*?' + +// characters that need to be escaped in RegExp. +const reSpecials = charSet('().*{}+?[]^$\\!') + +// "abc" -> { a:true, b:true, c:true } +function charSet (s) { + return s.split('').reduce(function (set, c) { + set[c] = true + return set + }, {}) +} + +// normalizes slashes. +const slashSplit = /\/+/ + +minimatch.filter = filter +function filter (pattern, options) { + options = options || {} + return function (p, i, list) { + return minimatch(p, pattern, options) + } +} + +function ext (a, b) { + a = a || {} + b = b || {} + const t = {} + Object.keys(a).forEach(function (k) { + t[k] = a[k] + }) + Object.keys(b).forEach(function (k) { + t[k] = b[k] + }) + return t +} + +minimatch.defaults = function (def) { + if (!def || typeof def !== 'object' || !Object.keys(def).length) { + return minimatch + } + + const orig = minimatch + + const m = function minimatch (p, pattern, options) { + return orig(p, pattern, ext(def, options)) + } + + m.Minimatch = function Minimatch (pattern, options) { + return new orig.Minimatch(pattern, ext(def, options)) + } + m.Minimatch.defaults = options => { + return orig.defaults(ext(def, options)).Minimatch + } + + m.filter = function filter (pattern, options) { + return orig.filter(pattern, ext(def, options)) + } + + m.defaults = function defaults (options) { + return orig.defaults(ext(def, options)) + } + + m.makeRe = function makeRe (pattern, options) { + return orig.makeRe(pattern, ext(def, options)) + } + + m.braceExpand = function braceExpand (pattern, options) { + return orig.braceExpand(pattern, ext(def, options)) + } + + m.match = function (list, pattern, options) { + return orig.match(list, pattern, ext(def, options)) + } + + return m +} + +Minimatch.defaults = function (def) { + return minimatch.defaults(def).Minimatch +} + +function minimatch (p, pattern, options) { + assertValidPattern(pattern) + + if (!options) options = {} + + // shortcut: comments match nothing. + if (!options.nocomment && pattern.charAt(0) === '#') { + return false + } + + // "" only matches "" + if (pattern.trim() === '') return p === '' + + return new Minimatch(pattern, options).match(p) +} + +function Minimatch (pattern, options) { + if (!(this instanceof Minimatch)) { + return new Minimatch(pattern, options) + } + + assertValidPattern(pattern) + + if (!options) options = {} + pattern = pattern.trim() + + // windows support: need to use /, not \ + if (path.sep !== '/') { + pattern = pattern.split(path.sep).join('/') + } + + this.options = options + this.set = [] + this.pattern = pattern + this.regexp = null + this.negate = false + this.comment = false + this.empty = false + + // make the set of regexps etc. + this.make() +} + +Minimatch.prototype.debug = function () {} + +Minimatch.prototype.make = make +function make () { + // don't do it more than once. + if (this._made) return + + var pattern = this.pattern + var options = this.options + + // empty patterns and comments match nothing. + if (!options.nocomment && pattern.charAt(0) === '#') { + this.comment = true + return + } + if (!pattern) { + this.empty = true + return + } + + // step 1: figure out negation, etc. + this.parseNegate() + + // step 2: expand braces + var set = this.globSet = this.braceExpand() + + if (options.debug) this.debug = console.error + + this.debug(this.pattern, set) + + // step 3: now we have a set, so turn each one into a series of path-portion + // matching patterns. + // These will be regexps, except in the case of "**", which is + // set to the GLOBSTAR object for globstar behavior, + // and will not contain any / characters + set = this.globParts = set.map(function (s) { + return s.split(slashSplit) + }) + + this.debug(this.pattern, set) + + // glob --> regexps + set = set.map(function (s, si, set) { + return s.map(this.parse, this) + }, this) + + this.debug(this.pattern, set) + + // filter out everything that didn't compile properly. + set = set.filter(function (s) { + return s.indexOf(false) === -1 + }) + + this.debug(this.pattern, set) + + this.set = set +} + +Minimatch.prototype.parseNegate = parseNegate +function parseNegate () { + var pattern = this.pattern + var negate = false + var options = this.options + var negateOffset = 0 + + if (options.nonegate) return + + for (var i = 0, l = pattern.length + ; i < l && pattern.charAt(i) === '!' + ; i++) { + negate = !negate + negateOffset++ + } + + if (negateOffset) this.pattern = pattern.substr(negateOffset) + this.negate = negate +} + +// Brace expansion: +// a{b,c}d -> abd acd +// a{b,}c -> abc ac +// a{0..3}d -> a0d a1d a2d a3d +// a{b,c{d,e}f}g -> abg acdfg acefg +// a{b,c}d{e,f}g -> abdeg acdeg abdeg abdfg +// +// Invalid sets are not expanded. +// a{2..}b -> a{2..}b +// a{b}c -> a{b}c +minimatch.braceExpand = function (pattern, options) { + return braceExpand(pattern, options) +} + +Minimatch.prototype.braceExpand = braceExpand + +function braceExpand (pattern, options) { + if (!options) { + if (this instanceof Minimatch) { + options = this.options + } else { + options = {} + } + } + + pattern = typeof pattern === 'undefined' + ? this.pattern : pattern + + assertValidPattern(pattern) + + if (options.nobrace || !/\{(?:(?!\{).)*\}/.test(pattern)) { + // shortcut. no need to expand. + return [pattern] + } + + return expand(pattern) +} + +const MAX_PATTERN_LENGTH = 1024 * 64 +const assertValidPattern = pattern => { + if (typeof pattern !== 'string') { + throw new TypeError('invalid pattern') + } + + if (pattern.length > MAX_PATTERN_LENGTH) { + throw new TypeError('pattern is too long') + } +} + +// parse a component of the expanded set. +// At this point, no pattern may contain "/" in it +// so we're going to return a 2d array, where each entry is the full +// pattern, split on '/', and then turned into a regular expression. +// A regexp is made at the end which joins each array with an +// escaped /, and another full one which joins each regexp with |. +// +// Following the lead of Bash 4.1, note that "**" only has special meaning +// when it is the *only* thing in a path portion. Otherwise, any series +// of * is equivalent to a single *. Globstar behavior is enabled by +// default, and can be disabled by setting options.noglobstar. +Minimatch.prototype.parse = parse +const SUBPARSE = {} +function parse (pattern, isSub) { + assertValidPattern(pattern) + + var options = this.options + + // shortcuts + if (!options.noglobstar && pattern === '**') return GLOBSTAR + if (pattern === '') return '' + + var re = '' + var hasMagic = false + var escaping = false + // ? => one single character + var patternListStack = [] + var negativeLists = [] + var stateChar + var inClass = false + var reClassStart = -1 + var classStart = -1 + // . and .. never match anything that doesn't start with ., + // even when options.dot is set. + var patternStart = pattern.charAt(0) === '.' ? '' // anything + // not (start or / followed by . or .. followed by / or end) + : options.dot ? '(?!(?:^|\\\/)\\.{1,2}(?:$|\\\/))' + : '(?!\\.)' + var self = this + + function clearStateChar () { + if (stateChar) { + // we had some state-tracking character + // that wasn't consumed by this pass. + switch (stateChar) { + case '*': + re += star + hasMagic = true + break + case '?': + re += qmark + hasMagic = true + break + default: + re += '\\' + stateChar + break + } + self.debug('clearStateChar %j %j', stateChar, re) + stateChar = false + } + } + + for (var i = 0, len = pattern.length, c + ; (i < len) && (c = pattern.charAt(i)) + ; i++) { + this.debug('%s\t%s %s %j', pattern, i, re, c) + + // skip over any that are escaped. + if (escaping && reSpecials[c]) { + re += '\\' + c + escaping = false + continue + } + + switch (c) { + case '/': /* istanbul ignore next */ { + // completely not allowed, even escaped. + // Should already be path-split by now. + return false + } + + case '\\': + clearStateChar() + escaping = true + continue + + // the various stateChar values + // for the "extglob" stuff. + case '?': + case '*': + case '+': + case '@': + case '!': + this.debug('%s\t%s %s %j <-- stateChar', pattern, i, re, c) + + // all of those are literals inside a class, except that + // the glob [!a] means [^a] in regexp + if (inClass) { + this.debug(' in class') + if (c === '!' && i === classStart + 1) c = '^' + re += c + continue + } + + // if we already have a stateChar, then it means + // that there was something like ** or +? in there. + // Handle the stateChar, then proceed with this one. + self.debug('call clearStateChar %j', stateChar) + clearStateChar() + stateChar = c + // if extglob is disabled, then +(asdf|foo) isn't a thing. + // just clear the statechar *now*, rather than even diving into + // the patternList stuff. + if (options.noext) clearStateChar() + continue + + case '(': + if (inClass) { + re += '(' + continue + } + + if (!stateChar) { + re += '\\(' + continue + } + + patternListStack.push({ + type: stateChar, + start: i - 1, + reStart: re.length, + open: plTypes[stateChar].open, + close: plTypes[stateChar].close + }) + // negation is (?:(?!js)[^/]*) + re += stateChar === '!' ? '(?:(?!(?:' : '(?:' + this.debug('plType %j %j', stateChar, re) + stateChar = false + continue + + case ')': + if (inClass || !patternListStack.length) { + re += '\\)' + continue + } + + clearStateChar() + hasMagic = true + var pl = patternListStack.pop() + // negation is (?:(?!js)[^/]*) + // The others are (?:) + re += pl.close + if (pl.type === '!') { + negativeLists.push(pl) + } + pl.reEnd = re.length + continue + + case '|': + if (inClass || !patternListStack.length || escaping) { + re += '\\|' + escaping = false + continue + } + + clearStateChar() + re += '|' + continue + + // these are mostly the same in regexp and glob + case '[': + // swallow any state-tracking char before the [ + clearStateChar() + + if (inClass) { + re += '\\' + c + continue + } + + inClass = true + classStart = i + reClassStart = re.length + re += c + continue + + case ']': + // a right bracket shall lose its special + // meaning and represent itself in + // a bracket expression if it occurs + // first in the list. -- POSIX.2 2.8.3.2 + if (i === classStart + 1 || !inClass) { + re += '\\' + c + escaping = false + continue + } + + // handle the case where we left a class open. + // "[z-a]" is valid, equivalent to "\[z-a\]" + if (inClass) { + // split where the last [ was, make sure we don't have + // an invalid re. if so, re-walk the contents of the + // would-be class to re-translate any characters that + // were passed through as-is + // TODO: It would probably be faster to determine this + // without a try/catch and a new RegExp, but it's tricky + // to do safely. For now, this is safe and works. + var cs = pattern.substring(classStart + 1, i) + try { + RegExp('[' + cs + ']') + } catch (er) { + // not a valid class! + var sp = this.parse(cs, SUBPARSE) + re = re.substr(0, reClassStart) + '\\[' + sp[0] + '\\]' + hasMagic = hasMagic || sp[1] + inClass = false + continue + } + } + + // finish up the class. + hasMagic = true + inClass = false + re += c + continue + + default: + // swallow any state char that wasn't consumed + clearStateChar() + + if (escaping) { + // no need + escaping = false + } else if (reSpecials[c] + && !(c === '^' && inClass)) { + re += '\\' + } + + re += c + + } // switch + } // for + + // handle the case where we left a class open. + // "[abc" is valid, equivalent to "\[abc" + if (inClass) { + // split where the last [ was, and escape it + // this is a huge pita. We now have to re-walk + // the contents of the would-be class to re-translate + // any characters that were passed through as-is + cs = pattern.substr(classStart + 1) + sp = this.parse(cs, SUBPARSE) + re = re.substr(0, reClassStart) + '\\[' + sp[0] + hasMagic = hasMagic || sp[1] + } + + // handle the case where we had a +( thing at the *end* + // of the pattern. + // each pattern list stack adds 3 chars, and we need to go through + // and escape any | chars that were passed through as-is for the regexp. + // Go through and escape them, taking care not to double-escape any + // | chars that were already escaped. + for (pl = patternListStack.pop(); pl; pl = patternListStack.pop()) { + var tail = re.slice(pl.reStart + pl.open.length) + this.debug('setting tail', re, pl) + // maybe some even number of \, then maybe 1 \, followed by a | + tail = tail.replace(/((?:\\{2}){0,64})(\\?)\|/g, function (_, $1, $2) { + if (!$2) { + // the | isn't already escaped, so escape it. + $2 = '\\' + } + + // need to escape all those slashes *again*, without escaping the + // one that we need for escaping the | character. As it works out, + // escaping an even number of slashes can be done by simply repeating + // it exactly after itself. That's why this trick works. + // + // I am sorry that you have to see this. + return $1 + $1 + $2 + '|' + }) + + this.debug('tail=%j\n %s', tail, tail, pl, re) + var t = pl.type === '*' ? star + : pl.type === '?' ? qmark + : '\\' + pl.type + + hasMagic = true + re = re.slice(0, pl.reStart) + t + '\\(' + tail + } + + // handle trailing things that only matter at the very end. + clearStateChar() + if (escaping) { + // trailing \\ + re += '\\\\' + } + + // only need to apply the nodot start if the re starts with + // something that could conceivably capture a dot + var addPatternStart = false + switch (re.charAt(0)) { + case '.': + case '[': + case '(': addPatternStart = true + } + + // Hack to work around lack of negative lookbehind in JS + // A pattern like: *.!(x).!(y|z) needs to ensure that a name + // like 'a.xyz.yz' doesn't match. So, the first negative + // lookahead, has to look ALL the way ahead, to the end of + // the pattern. + for (var n = negativeLists.length - 1; n > -1; n--) { + var nl = negativeLists[n] + + var nlBefore = re.slice(0, nl.reStart) + var nlFirst = re.slice(nl.reStart, nl.reEnd - 8) + var nlLast = re.slice(nl.reEnd - 8, nl.reEnd) + var nlAfter = re.slice(nl.reEnd) + + nlLast += nlAfter + + // Handle nested stuff like *(*.js|!(*.json)), where open parens + // mean that we should *not* include the ) in the bit that is considered + // "after" the negated section. + var openParensBefore = nlBefore.split('(').length - 1 + var cleanAfter = nlAfter + for (i = 0; i < openParensBefore; i++) { + cleanAfter = cleanAfter.replace(/\)[+*?]?/, '') + } + nlAfter = cleanAfter + + var dollar = '' + if (nlAfter === '' && isSub !== SUBPARSE) { + dollar = '$' + } + var newRe = nlBefore + nlFirst + nlAfter + dollar + nlLast + re = newRe + } + + // if the re is not "" at this point, then we need to make sure + // it doesn't match against an empty path part. + // Otherwise a/* will match a/, which it should not. + if (re !== '' && hasMagic) { + re = '(?=.)' + re + } + + if (addPatternStart) { + re = patternStart + re + } + + // parsing just a piece of a larger pattern. + if (isSub === SUBPARSE) { + return [re, hasMagic] + } + + // skip the regexp for non-magical patterns + // unescape anything in it, though, so that it'll be + // an exact match against a file etc. + if (!hasMagic) { + return globUnescape(pattern) + } + + var flags = options.nocase ? 'i' : '' + try { + var regExp = new RegExp('^' + re + '$', flags) + } catch (er) /* istanbul ignore next - should be impossible */ { + // If it was an invalid regular expression, then it can't match + // anything. This trick looks for a character after the end of + // the string, which is of course impossible, except in multi-line + // mode, but it's not a /m regex. + return new RegExp('$.') + } + + regExp._glob = pattern + regExp._src = re + + return regExp +} + +minimatch.makeRe = function (pattern, options) { + return new Minimatch(pattern, options || {}).makeRe() +} + +Minimatch.prototype.makeRe = makeRe +function makeRe () { + if (this.regexp || this.regexp === false) return this.regexp + + // at this point, this.set is a 2d array of partial + // pattern strings, or "**". + // + // It's better to use .match(). This function shouldn't + // be used, really, but it's pretty convenient sometimes, + // when you just want to work with a regex. + var set = this.set + + if (!set.length) { + this.regexp = false + return this.regexp + } + var options = this.options + + var twoStar = options.noglobstar ? star + : options.dot ? twoStarDot + : twoStarNoDot + var flags = options.nocase ? 'i' : '' + + var re = set.map(function (pattern) { + return pattern.map(function (p) { + return (p === GLOBSTAR) ? twoStar + : (typeof p === 'string') ? regExpEscape(p) + : p._src + }).join('\\\/') + }).join('|') + + // must match entire pattern + // ending in a * or ** will make it less strict. + re = '^(?:' + re + ')$' + + // can match anything, as long as it's not this. + if (this.negate) re = '^(?!' + re + ').*$' + + try { + this.regexp = new RegExp(re, flags) + } catch (ex) /* istanbul ignore next - should be impossible */ { + this.regexp = false + } + return this.regexp +} + +minimatch.match = function (list, pattern, options) { + options = options || {} + const mm = new Minimatch(pattern, options) + list = list.filter(function (f) { + return mm.match(f) + }) + if (mm.options.nonull && !list.length) { + list.push(pattern) + } + return list +} + +Minimatch.prototype.match = match +function match (f, partial) { + this.debug('match', f, this.pattern) + // short-circuit in the case of busted things. + // comments, etc. + if (this.comment) return false + if (this.empty) return f === '' + + if (f === '/' && partial) return true + + var options = this.options + + // windows: need to use /, not \ + if (path.sep !== '/') { + f = f.split(path.sep).join('/') + } + + // treat the test path as a set of pathparts. + f = f.split(slashSplit) + this.debug(this.pattern, 'split', f) + + // just ONE of the pattern sets in this.set needs to match + // in order for it to be valid. If negating, then just one + // match means that we have failed. + // Either way, return on the first hit. + + var set = this.set + this.debug(this.pattern, 'set', set) + + // Find the basename of the path by looking for the last non-empty segment + var filename + var i + for (i = f.length - 1; i >= 0; i--) { + filename = f[i] + if (filename) break + } + + for (i = 0; i < set.length; i++) { + var pattern = set[i] + var file = f + if (options.matchBase && pattern.length === 1) { + file = [filename] + } + var hit = this.matchOne(file, pattern, partial) + if (hit) { + if (options.flipNegate) return true + return !this.negate + } + } + + // didn't get any hits. this is success if it's a negative + // pattern, failure otherwise. + if (options.flipNegate) return false + return this.negate +} + +// set partial to true to test if, for example, +// "/a/b" matches the start of "/*/b/*/d" +// Partial means, if you run out of file before you run +// out of pattern, then that's fine, as long as all +// the parts match. +Minimatch.prototype.matchOne = function (file, pattern, partial) { + var options = this.options + + this.debug('matchOne', + { 'this': this, file: file, pattern: pattern }) + + this.debug('matchOne', file.length, pattern.length) + + for (var fi = 0, + pi = 0, + fl = file.length, + pl = pattern.length + ; (fi < fl) && (pi < pl) + ; fi++, pi++) { + this.debug('matchOne loop') + var p = pattern[pi] + var f = file[fi] + + this.debug(pattern, p, f) + + // should be impossible. + // some invalid regexp stuff in the set. + /* istanbul ignore if */ + if (p === false) return false + + if (p === GLOBSTAR) { + this.debug('GLOBSTAR', [pattern, p, f]) + + // "**" + // a/**/b/**/c would match the following: + // a/b/x/y/z/c + // a/x/y/z/b/c + // a/b/x/b/x/c + // a/b/c + // To do this, take the rest of the pattern after + // the **, and see if it would match the file remainder. + // If so, return success. + // If not, the ** "swallows" a segment, and try again. + // This is recursively awful. + // + // a/**/b/**/c matching a/b/x/y/z/c + // - a matches a + // - doublestar + // - matchOne(b/x/y/z/c, b/**/c) + // - b matches b + // - doublestar + // - matchOne(x/y/z/c, c) -> no + // - matchOne(y/z/c, c) -> no + // - matchOne(z/c, c) -> no + // - matchOne(c, c) yes, hit + var fr = fi + var pr = pi + 1 + if (pr === pl) { + this.debug('** at the end') + // a ** at the end will just swallow the rest. + // We have found a match. + // however, it will not swallow /.x, unless + // options.dot is set. + // . and .. are *never* matched by **, for explosively + // exponential reasons. + for (; fi < fl; fi++) { + if (file[fi] === '.' || file[fi] === '..' || + (!options.dot && file[fi].charAt(0) === '.')) return false + } + return true + } + + // ok, let's see if we can swallow whatever we can. + while (fr < fl) { + var swallowee = file[fr] + + this.debug('\nglobstar while', file, fr, pattern, pr, swallowee) + + // XXX remove this slice. Just pass the start index. + if (this.matchOne(file.slice(fr), pattern.slice(pr), partial)) { + this.debug('globstar found match!', fr, fl, swallowee) + // found a match. + return true + } else { + // can't swallow "." or ".." ever. + // can only swallow ".foo" when explicitly asked. + if (swallowee === '.' || swallowee === '..' || + (!options.dot && swallowee.charAt(0) === '.')) { + this.debug('dot detected!', file, fr, pattern, pr) + break + } + + // ** swallows a segment, and continue. + this.debug('globstar swallow a segment, and continue') + fr++ + } + } + + // no match was found. + // However, in partial mode, we can't say this is necessarily over. + // If there's more *pattern* left, then + /* istanbul ignore if */ + if (partial) { + // ran out of file + this.debug('\n>>> no match, partial?', file, fr, pattern, pr) + if (fr === fl) return true + } + return false + } + + // something other than ** + // non-magic patterns just have to match exactly + // patterns with magic have been turned into regexps. + var hit + if (typeof p === 'string') { + if (options.nocase) { + hit = f.toLowerCase() === p.toLowerCase() + } else { + hit = f === p + } + this.debug('string match', p, f, hit) + } else { + hit = f.match(p) + this.debug('pattern match', p, f, hit) + } + + if (!hit) return false + } + + // Note: ending in / means that we'll get a final "" + // at the end of the pattern. This can only match a + // corresponding "" at the end of the file. + // If the file ends in /, then it can only match a + // a pattern that ends in /, unless the pattern just + // doesn't have any more for it. But, a/b/ should *not* + // match "a/b/*", even though "" matches against the + // [^/]*? pattern, except in partial mode, where it might + // simply not be reached yet. + // However, a/b/ should still satisfy a/* + + // now either we fell off the end of the pattern, or we're done. + if (fi === fl && pi === pl) { + // ran out of pattern and filename at the same time. + // an exact hit! + return true + } else if (fi === fl) { + // ran out of file, but still had pattern left. + // this is ok if we're doing the match as part of + // a glob fs traversal. + return partial + } else /* istanbul ignore else */ if (pi === pl) { + // ran out of pattern, still have file left. + // this is only acceptable if we're on the very last + // empty segment of a file with a trailing slash. + // a/* should match a/b/ + return (fi === fl - 1) && (file[fi] === '') + } + + // should be unreachable. + /* istanbul ignore next */ + throw new Error('wtf?') +} + +// replace stuff like \* with * +function globUnescape (s) { + return s.replace(/\\(.)/g, '$1') +} + +function regExpEscape (s) { + return s.replace(/[-[\]{}()*+?.,\\^$|#\s]/g, '\\$&') +} + + +/***/ }), + +/***/ 763: +/***/ ((module, exports) => { + +exports = module.exports = SemVer + +var debug +/* istanbul ignore next */ +if (typeof process === 'object' && + process.env && + process.env.NODE_DEBUG && + /\bsemver\b/i.test(process.env.NODE_DEBUG)) { + debug = function () { + var args = Array.prototype.slice.call(arguments, 0) + args.unshift('SEMVER') + console.log.apply(console, args) + } +} else { + debug = function () {} +} + +// Note: this is the semver.org version of the spec that it implements +// Not necessarily the package version of this code. +exports.SEMVER_SPEC_VERSION = '2.0.0' + +var MAX_LENGTH = 256 +var MAX_SAFE_INTEGER = Number.MAX_SAFE_INTEGER || + /* istanbul ignore next */ 9007199254740991 + +// Max safe segment length for coercion. +var MAX_SAFE_COMPONENT_LENGTH = 16 + +var MAX_SAFE_BUILD_LENGTH = MAX_LENGTH - 6 + +// The actual regexps go on exports.re +var re = exports.re = [] +var safeRe = exports.safeRe = [] +var src = exports.src = [] +var R = 0 + +var LETTERDASHNUMBER = '[a-zA-Z0-9-]' + +// Replace some greedy regex tokens to prevent regex dos issues. These regex are +// used internally via the safeRe object since all inputs in this library get +// normalized first to trim and collapse all extra whitespace. The original +// regexes are exported for userland consumption and lower level usage. A +// future breaking change could export the safer regex only with a note that +// all input should have extra whitespace removed. +var safeRegexReplacements = [ + ['\\s', 1], + ['\\d', MAX_LENGTH], + [LETTERDASHNUMBER, MAX_SAFE_BUILD_LENGTH], +] + +function makeSafeRe (value) { + for (var i = 0; i < safeRegexReplacements.length; i++) { + var token = safeRegexReplacements[i][0] + var max = safeRegexReplacements[i][1] + value = value + .split(token + '*').join(token + '{0,' + max + '}') + .split(token + '+').join(token + '{1,' + max + '}') + } + return value +} + +// The following Regular Expressions can be used for tokenizing, +// validating, and parsing SemVer version strings. + +// ## Numeric Identifier +// A single `0`, or a non-zero digit followed by zero or more digits. + +var NUMERICIDENTIFIER = R++ +src[NUMERICIDENTIFIER] = '0|[1-9]\\d*' +var NUMERICIDENTIFIERLOOSE = R++ +src[NUMERICIDENTIFIERLOOSE] = '\\d+' + +// ## Non-numeric Identifier +// Zero or more digits, followed by a letter or hyphen, and then zero or +// more letters, digits, or hyphens. + +var NONNUMERICIDENTIFIER = R++ +src[NONNUMERICIDENTIFIER] = '\\d*[a-zA-Z-]' + LETTERDASHNUMBER + '*' + +// ## Main Version +// Three dot-separated numeric identifiers. + +var MAINVERSION = R++ +src[MAINVERSION] = '(' + src[NUMERICIDENTIFIER] + ')\\.' + + '(' + src[NUMERICIDENTIFIER] + ')\\.' + + '(' + src[NUMERICIDENTIFIER] + ')' + +var MAINVERSIONLOOSE = R++ +src[MAINVERSIONLOOSE] = '(' + src[NUMERICIDENTIFIERLOOSE] + ')\\.' + + '(' + src[NUMERICIDENTIFIERLOOSE] + ')\\.' + + '(' + src[NUMERICIDENTIFIERLOOSE] + ')' + +// ## Pre-release Version Identifier +// A numeric identifier, or a non-numeric identifier. + +var PRERELEASEIDENTIFIER = R++ +src[PRERELEASEIDENTIFIER] = '(?:' + src[NUMERICIDENTIFIER] + + '|' + src[NONNUMERICIDENTIFIER] + ')' + +var PRERELEASEIDENTIFIERLOOSE = R++ +src[PRERELEASEIDENTIFIERLOOSE] = '(?:' + src[NUMERICIDENTIFIERLOOSE] + + '|' + src[NONNUMERICIDENTIFIER] + ')' + +// ## Pre-release Version +// Hyphen, followed by one or more dot-separated pre-release version +// identifiers. + +var PRERELEASE = R++ +src[PRERELEASE] = '(?:-(' + src[PRERELEASEIDENTIFIER] + + '(?:\\.' + src[PRERELEASEIDENTIFIER] + ')*))' + +var PRERELEASELOOSE = R++ +src[PRERELEASELOOSE] = '(?:-?(' + src[PRERELEASEIDENTIFIERLOOSE] + + '(?:\\.' + src[PRERELEASEIDENTIFIERLOOSE] + ')*))' + +// ## Build Metadata Identifier +// Any combination of digits, letters, or hyphens. + +var BUILDIDENTIFIER = R++ +src[BUILDIDENTIFIER] = LETTERDASHNUMBER + '+' + +// ## Build Metadata +// Plus sign, followed by one or more period-separated build metadata +// identifiers. + +var BUILD = R++ +src[BUILD] = '(?:\\+(' + src[BUILDIDENTIFIER] + + '(?:\\.' + src[BUILDIDENTIFIER] + ')*))' + +// ## Full Version String +// A main version, followed optionally by a pre-release version and +// build metadata. + +// Note that the only major, minor, patch, and pre-release sections of +// the version string are capturing groups. The build metadata is not a +// capturing group, because it should not ever be used in version +// comparison. + +var FULL = R++ +var FULLPLAIN = 'v?' + src[MAINVERSION] + + src[PRERELEASE] + '?' + + src[BUILD] + '?' + +src[FULL] = '^' + FULLPLAIN + '$' + +// like full, but allows v1.2.3 and =1.2.3, which people do sometimes. +// also, 1.0.0alpha1 (prerelease without the hyphen) which is pretty +// common in the npm registry. +var LOOSEPLAIN = '[v=\\s]*' + src[MAINVERSIONLOOSE] + + src[PRERELEASELOOSE] + '?' + + src[BUILD] + '?' + +var LOOSE = R++ +src[LOOSE] = '^' + LOOSEPLAIN + '$' + +var GTLT = R++ +src[GTLT] = '((?:<|>)?=?)' + +// Something like "2.*" or "1.2.x". +// Note that "x.x" is a valid xRange identifer, meaning "any version" +// Only the first item is strictly required. +var XRANGEIDENTIFIERLOOSE = R++ +src[XRANGEIDENTIFIERLOOSE] = src[NUMERICIDENTIFIERLOOSE] + '|x|X|\\*' +var XRANGEIDENTIFIER = R++ +src[XRANGEIDENTIFIER] = src[NUMERICIDENTIFIER] + '|x|X|\\*' + +var XRANGEPLAIN = R++ +src[XRANGEPLAIN] = '[v=\\s]*(' + src[XRANGEIDENTIFIER] + ')' + + '(?:\\.(' + src[XRANGEIDENTIFIER] + ')' + + '(?:\\.(' + src[XRANGEIDENTIFIER] + ')' + + '(?:' + src[PRERELEASE] + ')?' + + src[BUILD] + '?' + + ')?)?' + +var XRANGEPLAINLOOSE = R++ +src[XRANGEPLAINLOOSE] = '[v=\\s]*(' + src[XRANGEIDENTIFIERLOOSE] + ')' + + '(?:\\.(' + src[XRANGEIDENTIFIERLOOSE] + ')' + + '(?:\\.(' + src[XRANGEIDENTIFIERLOOSE] + ')' + + '(?:' + src[PRERELEASELOOSE] + ')?' + + src[BUILD] + '?' + + ')?)?' + +var XRANGE = R++ +src[XRANGE] = '^' + src[GTLT] + '\\s*' + src[XRANGEPLAIN] + '$' +var XRANGELOOSE = R++ +src[XRANGELOOSE] = '^' + src[GTLT] + '\\s*' + src[XRANGEPLAINLOOSE] + '$' + +// Coercion. +// Extract anything that could conceivably be a part of a valid semver +var COERCE = R++ +src[COERCE] = '(?:^|[^\\d])' + + '(\\d{1,' + MAX_SAFE_COMPONENT_LENGTH + '})' + + '(?:\\.(\\d{1,' + MAX_SAFE_COMPONENT_LENGTH + '}))?' + + '(?:\\.(\\d{1,' + MAX_SAFE_COMPONENT_LENGTH + '}))?' + + '(?:$|[^\\d])' + +// Tilde ranges. +// Meaning is "reasonably at or greater than" +var LONETILDE = R++ +src[LONETILDE] = '(?:~>?)' + +var TILDETRIM = R++ +src[TILDETRIM] = '(\\s*)' + src[LONETILDE] + '\\s+' +re[TILDETRIM] = new RegExp(src[TILDETRIM], 'g') +safeRe[TILDETRIM] = new RegExp(makeSafeRe(src[TILDETRIM]), 'g') +var tildeTrimReplace = '$1~' + +var TILDE = R++ +src[TILDE] = '^' + src[LONETILDE] + src[XRANGEPLAIN] + '$' +var TILDELOOSE = R++ +src[TILDELOOSE] = '^' + src[LONETILDE] + src[XRANGEPLAINLOOSE] + '$' + +// Caret ranges. +// Meaning is "at least and backwards compatible with" +var LONECARET = R++ +src[LONECARET] = '(?:\\^)' + +var CARETTRIM = R++ +src[CARETTRIM] = '(\\s*)' + src[LONECARET] + '\\s+' +re[CARETTRIM] = new RegExp(src[CARETTRIM], 'g') +safeRe[CARETTRIM] = new RegExp(makeSafeRe(src[CARETTRIM]), 'g') +var caretTrimReplace = '$1^' + +var CARET = R++ +src[CARET] = '^' + src[LONECARET] + src[XRANGEPLAIN] + '$' +var CARETLOOSE = R++ +src[CARETLOOSE] = '^' + src[LONECARET] + src[XRANGEPLAINLOOSE] + '$' + +// A simple gt/lt/eq thing, or just "" to indicate "any version" +var COMPARATORLOOSE = R++ +src[COMPARATORLOOSE] = '^' + src[GTLT] + '\\s*(' + LOOSEPLAIN + ')$|^$' +var COMPARATOR = R++ +src[COMPARATOR] = '^' + src[GTLT] + '\\s*(' + FULLPLAIN + ')$|^$' + +// An expression to strip any whitespace between the gtlt and the thing +// it modifies, so that `> 1.2.3` ==> `>1.2.3` +var COMPARATORTRIM = R++ +src[COMPARATORTRIM] = '(\\s*)' + src[GTLT] + + '\\s*(' + LOOSEPLAIN + '|' + src[XRANGEPLAIN] + ')' + +// this one has to use the /g flag +re[COMPARATORTRIM] = new RegExp(src[COMPARATORTRIM], 'g') +safeRe[COMPARATORTRIM] = new RegExp(makeSafeRe(src[COMPARATORTRIM]), 'g') +var comparatorTrimReplace = '$1$2$3' + +// Something like `1.2.3 - 1.2.4` +// Note that these all use the loose form, because they'll be +// checked against either the strict or loose comparator form +// later. +var HYPHENRANGE = R++ +src[HYPHENRANGE] = '^\\s*(' + src[XRANGEPLAIN] + ')' + + '\\s+-\\s+' + + '(' + src[XRANGEPLAIN] + ')' + + '\\s*$' + +var HYPHENRANGELOOSE = R++ +src[HYPHENRANGELOOSE] = '^\\s*(' + src[XRANGEPLAINLOOSE] + ')' + + '\\s+-\\s+' + + '(' + src[XRANGEPLAINLOOSE] + ')' + + '\\s*$' + +// Star ranges basically just allow anything at all. +var STAR = R++ +src[STAR] = '(<|>)?=?\\s*\\*' + +// Compile to actual regexp objects. +// All are flag-free, unless they were created above with a flag. +for (var i = 0; i < R; i++) { + debug(i, src[i]) + if (!re[i]) { + re[i] = new RegExp(src[i]) + + // Replace all greedy whitespace to prevent regex dos issues. These regex are + // used internally via the safeRe object since all inputs in this library get + // normalized first to trim and collapse all extra whitespace. The original + // regexes are exported for userland consumption and lower level usage. A + // future breaking change could export the safer regex only with a note that + // all input should have extra whitespace removed. + safeRe[i] = new RegExp(makeSafeRe(src[i])) + } +} + +exports.parse = parse +function parse (version, options) { + if (!options || typeof options !== 'object') { + options = { + loose: !!options, + includePrerelease: false + } + } + + if (version instanceof SemVer) { + return version + } + + if (typeof version !== 'string') { + return null + } + + if (version.length > MAX_LENGTH) { + return null + } + + var r = options.loose ? safeRe[LOOSE] : safeRe[FULL] + if (!r.test(version)) { + return null + } + + try { + return new SemVer(version, options) + } catch (er) { + return null + } +} + +exports.valid = valid +function valid (version, options) { + var v = parse(version, options) + return v ? v.version : null +} + +exports.clean = clean +function clean (version, options) { + var s = parse(version.trim().replace(/^[=v]+/, ''), options) + return s ? s.version : null +} + +exports.SemVer = SemVer + +function SemVer (version, options) { + if (!options || typeof options !== 'object') { + options = { + loose: !!options, + includePrerelease: false + } + } + if (version instanceof SemVer) { + if (version.loose === options.loose) { + return version + } else { + version = version.version + } + } else if (typeof version !== 'string') { + throw new TypeError('Invalid Version: ' + version) + } + + if (version.length > MAX_LENGTH) { + throw new TypeError('version is longer than ' + MAX_LENGTH + ' characters') + } + + if (!(this instanceof SemVer)) { + return new SemVer(version, options) + } + + debug('SemVer', version, options) + this.options = options + this.loose = !!options.loose + + var m = version.trim().match(options.loose ? safeRe[LOOSE] : safeRe[FULL]) + + if (!m) { + throw new TypeError('Invalid Version: ' + version) + } + + this.raw = version + + // these are actually numbers + this.major = +m[1] + this.minor = +m[2] + this.patch = +m[3] + + if (this.major > MAX_SAFE_INTEGER || this.major < 0) { + throw new TypeError('Invalid major version') + } + + if (this.minor > MAX_SAFE_INTEGER || this.minor < 0) { + throw new TypeError('Invalid minor version') + } + + if (this.patch > MAX_SAFE_INTEGER || this.patch < 0) { + throw new TypeError('Invalid patch version') + } + + // numberify any prerelease numeric ids + if (!m[4]) { + this.prerelease = [] + } else { + this.prerelease = m[4].split('.').map(function (id) { + if (/^[0-9]+$/.test(id)) { + var num = +id + if (num >= 0 && num < MAX_SAFE_INTEGER) { + return num + } + } + return id + }) + } + + this.build = m[5] ? m[5].split('.') : [] + this.format() +} + +SemVer.prototype.format = function () { + this.version = this.major + '.' + this.minor + '.' + this.patch + if (this.prerelease.length) { + this.version += '-' + this.prerelease.join('.') + } + return this.version +} + +SemVer.prototype.toString = function () { + return this.version +} + +SemVer.prototype.compare = function (other) { + debug('SemVer.compare', this.version, this.options, other) + if (!(other instanceof SemVer)) { + other = new SemVer(other, this.options) + } + + return this.compareMain(other) || this.comparePre(other) +} + +SemVer.prototype.compareMain = function (other) { + if (!(other instanceof SemVer)) { + other = new SemVer(other, this.options) + } + + return compareIdentifiers(this.major, other.major) || + compareIdentifiers(this.minor, other.minor) || + compareIdentifiers(this.patch, other.patch) +} + +SemVer.prototype.comparePre = function (other) { + if (!(other instanceof SemVer)) { + other = new SemVer(other, this.options) + } + + // NOT having a prerelease is > having one + if (this.prerelease.length && !other.prerelease.length) { + return -1 + } else if (!this.prerelease.length && other.prerelease.length) { + return 1 + } else if (!this.prerelease.length && !other.prerelease.length) { + return 0 + } + + var i = 0 + do { + var a = this.prerelease[i] + var b = other.prerelease[i] + debug('prerelease compare', i, a, b) + if (a === undefined && b === undefined) { + return 0 + } else if (b === undefined) { + return 1 + } else if (a === undefined) { + return -1 + } else if (a === b) { + continue + } else { + return compareIdentifiers(a, b) + } + } while (++i) +} + +// preminor will bump the version up to the next minor release, and immediately +// down to pre-release. premajor and prepatch work the same way. +SemVer.prototype.inc = function (release, identifier) { + switch (release) { + case 'premajor': + this.prerelease.length = 0 + this.patch = 0 + this.minor = 0 + this.major++ + this.inc('pre', identifier) + break + case 'preminor': + this.prerelease.length = 0 + this.patch = 0 + this.minor++ + this.inc('pre', identifier) + break + case 'prepatch': + // If this is already a prerelease, it will bump to the next version + // drop any prereleases that might already exist, since they are not + // relevant at this point. + this.prerelease.length = 0 + this.inc('patch', identifier) + this.inc('pre', identifier) + break + // If the input is a non-prerelease version, this acts the same as + // prepatch. + case 'prerelease': + if (this.prerelease.length === 0) { + this.inc('patch', identifier) + } + this.inc('pre', identifier) + break + + case 'major': + // If this is a pre-major version, bump up to the same major version. + // Otherwise increment major. + // 1.0.0-5 bumps to 1.0.0 + // 1.1.0 bumps to 2.0.0 + if (this.minor !== 0 || + this.patch !== 0 || + this.prerelease.length === 0) { + this.major++ + } + this.minor = 0 + this.patch = 0 + this.prerelease = [] + break + case 'minor': + // If this is a pre-minor version, bump up to the same minor version. + // Otherwise increment minor. + // 1.2.0-5 bumps to 1.2.0 + // 1.2.1 bumps to 1.3.0 + if (this.patch !== 0 || this.prerelease.length === 0) { + this.minor++ + } + this.patch = 0 + this.prerelease = [] + break + case 'patch': + // If this is not a pre-release version, it will increment the patch. + // If it is a pre-release it will bump up to the same patch version. + // 1.2.0-5 patches to 1.2.0 + // 1.2.0 patches to 1.2.1 + if (this.prerelease.length === 0) { + this.patch++ + } + this.prerelease = [] + break + // This probably shouldn't be used publicly. + // 1.0.0 "pre" would become 1.0.0-0 which is the wrong direction. + case 'pre': + if (this.prerelease.length === 0) { + this.prerelease = [0] + } else { + var i = this.prerelease.length + while (--i >= 0) { + if (typeof this.prerelease[i] === 'number') { + this.prerelease[i]++ + i = -2 + } + } + if (i === -1) { + // didn't increment anything + this.prerelease.push(0) + } + } + if (identifier) { + // 1.2.0-beta.1 bumps to 1.2.0-beta.2, + // 1.2.0-beta.fooblz or 1.2.0-beta bumps to 1.2.0-beta.0 + if (this.prerelease[0] === identifier) { + if (isNaN(this.prerelease[1])) { + this.prerelease = [identifier, 0] + } + } else { + this.prerelease = [identifier, 0] + } + } + break + + default: + throw new Error('invalid increment argument: ' + release) + } + this.format() + this.raw = this.version + return this +} + +exports.inc = inc +function inc (version, release, loose, identifier) { + if (typeof (loose) === 'string') { + identifier = loose + loose = undefined + } + + try { + return new SemVer(version, loose).inc(release, identifier).version + } catch (er) { + return null + } +} + +exports.diff = diff +function diff (version1, version2) { + if (eq(version1, version2)) { + return null + } else { + var v1 = parse(version1) + var v2 = parse(version2) + var prefix = '' + if (v1.prerelease.length || v2.prerelease.length) { + prefix = 'pre' + var defaultResult = 'prerelease' + } + for (var key in v1) { + if (key === 'major' || key === 'minor' || key === 'patch') { + if (v1[key] !== v2[key]) { + return prefix + key + } + } + } + return defaultResult // may be undefined + } +} + +exports.compareIdentifiers = compareIdentifiers + +var numeric = /^[0-9]+$/ +function compareIdentifiers (a, b) { + var anum = numeric.test(a) + var bnum = numeric.test(b) + + if (anum && bnum) { + a = +a + b = +b + } + + return a === b ? 0 + : (anum && !bnum) ? -1 + : (bnum && !anum) ? 1 + : a < b ? -1 + : 1 +} + +exports.rcompareIdentifiers = rcompareIdentifiers +function rcompareIdentifiers (a, b) { + return compareIdentifiers(b, a) +} + +exports.major = major +function major (a, loose) { + return new SemVer(a, loose).major +} + +exports.minor = minor +function minor (a, loose) { + return new SemVer(a, loose).minor +} + +exports.patch = patch +function patch (a, loose) { + return new SemVer(a, loose).patch +} + +exports.compare = compare +function compare (a, b, loose) { + return new SemVer(a, loose).compare(new SemVer(b, loose)) +} + +exports.compareLoose = compareLoose +function compareLoose (a, b) { + return compare(a, b, true) +} + +exports.rcompare = rcompare +function rcompare (a, b, loose) { + return compare(b, a, loose) +} + +exports.sort = sort +function sort (list, loose) { + return list.sort(function (a, b) { + return exports.compare(a, b, loose) + }) +} + +exports.rsort = rsort +function rsort (list, loose) { + return list.sort(function (a, b) { + return exports.rcompare(a, b, loose) + }) +} + +exports.gt = gt +function gt (a, b, loose) { + return compare(a, b, loose) > 0 +} + +exports.lt = lt +function lt (a, b, loose) { + return compare(a, b, loose) < 0 +} + +exports.eq = eq +function eq (a, b, loose) { + return compare(a, b, loose) === 0 +} + +exports.neq = neq +function neq (a, b, loose) { + return compare(a, b, loose) !== 0 +} + +exports.gte = gte +function gte (a, b, loose) { + return compare(a, b, loose) >= 0 +} + +exports.lte = lte +function lte (a, b, loose) { + return compare(a, b, loose) <= 0 +} + +exports.cmp = cmp +function cmp (a, op, b, loose) { + switch (op) { + case '===': + if (typeof a === 'object') + a = a.version + if (typeof b === 'object') + b = b.version + return a === b + + case '!==': + if (typeof a === 'object') + a = a.version + if (typeof b === 'object') + b = b.version + return a !== b + + case '': + case '=': + case '==': + return eq(a, b, loose) + + case '!=': + return neq(a, b, loose) + + case '>': + return gt(a, b, loose) + + case '>=': + return gte(a, b, loose) + + case '<': + return lt(a, b, loose) + + case '<=': + return lte(a, b, loose) + + default: + throw new TypeError('Invalid operator: ' + op) + } +} + +exports.Comparator = Comparator +function Comparator (comp, options) { + if (!options || typeof options !== 'object') { + options = { + loose: !!options, + includePrerelease: false + } + } + + if (comp instanceof Comparator) { + if (comp.loose === !!options.loose) { + return comp + } else { + comp = comp.value + } + } + + if (!(this instanceof Comparator)) { + return new Comparator(comp, options) + } + + comp = comp.trim().split(/\s+/).join(' ') + debug('comparator', comp, options) + this.options = options + this.loose = !!options.loose + this.parse(comp) + + if (this.semver === ANY) { + this.value = '' + } else { + this.value = this.operator + this.semver.version + } + + debug('comp', this) +} + +var ANY = {} +Comparator.prototype.parse = function (comp) { + var r = this.options.loose ? safeRe[COMPARATORLOOSE] : safeRe[COMPARATOR] + var m = comp.match(r) + + if (!m) { + throw new TypeError('Invalid comparator: ' + comp) + } + + this.operator = m[1] + if (this.operator === '=') { + this.operator = '' + } + + // if it literally is just '>' or '' then allow anything. + if (!m[2]) { + this.semver = ANY + } else { + this.semver = new SemVer(m[2], this.options.loose) + } +} + +Comparator.prototype.toString = function () { + return this.value +} + +Comparator.prototype.test = function (version) { + debug('Comparator.test', version, this.options.loose) + + if (this.semver === ANY) { + return true + } + + if (typeof version === 'string') { + version = new SemVer(version, this.options) + } + + return cmp(version, this.operator, this.semver, this.options) +} + +Comparator.prototype.intersects = function (comp, options) { + if (!(comp instanceof Comparator)) { + throw new TypeError('a Comparator is required') + } + + if (!options || typeof options !== 'object') { + options = { + loose: !!options, + includePrerelease: false + } + } + + var rangeTmp + + if (this.operator === '') { + rangeTmp = new Range(comp.value, options) + return satisfies(this.value, rangeTmp, options) + } else if (comp.operator === '') { + rangeTmp = new Range(this.value, options) + return satisfies(comp.semver, rangeTmp, options) + } + + var sameDirectionIncreasing = + (this.operator === '>=' || this.operator === '>') && + (comp.operator === '>=' || comp.operator === '>') + var sameDirectionDecreasing = + (this.operator === '<=' || this.operator === '<') && + (comp.operator === '<=' || comp.operator === '<') + var sameSemVer = this.semver.version === comp.semver.version + var differentDirectionsInclusive = + (this.operator === '>=' || this.operator === '<=') && + (comp.operator === '>=' || comp.operator === '<=') + var oppositeDirectionsLessThan = + cmp(this.semver, '<', comp.semver, options) && + ((this.operator === '>=' || this.operator === '>') && + (comp.operator === '<=' || comp.operator === '<')) + var oppositeDirectionsGreaterThan = + cmp(this.semver, '>', comp.semver, options) && + ((this.operator === '<=' || this.operator === '<') && + (comp.operator === '>=' || comp.operator === '>')) + + return sameDirectionIncreasing || sameDirectionDecreasing || + (sameSemVer && differentDirectionsInclusive) || + oppositeDirectionsLessThan || oppositeDirectionsGreaterThan +} + +exports.Range = Range +function Range (range, options) { + if (!options || typeof options !== 'object') { + options = { + loose: !!options, + includePrerelease: false + } + } + + if (range instanceof Range) { + if (range.loose === !!options.loose && + range.includePrerelease === !!options.includePrerelease) { + return range + } else { + return new Range(range.raw, options) + } + } + + if (range instanceof Comparator) { + return new Range(range.value, options) + } + + if (!(this instanceof Range)) { + return new Range(range, options) + } + + this.options = options + this.loose = !!options.loose + this.includePrerelease = !!options.includePrerelease + + // First reduce all whitespace as much as possible so we do not have to rely + // on potentially slow regexes like \s*. This is then stored and used for + // future error messages as well. + this.raw = range + .trim() + .split(/\s+/) + .join(' ') + + // First, split based on boolean or || + this.set = this.raw.split('||').map(function (range) { + return this.parseRange(range.trim()) + }, this).filter(function (c) { + // throw out any that are not relevant for whatever reason + return c.length + }) + + if (!this.set.length) { + throw new TypeError('Invalid SemVer Range: ' + this.raw) + } + + this.format() +} + +Range.prototype.format = function () { + this.range = this.set.map(function (comps) { + return comps.join(' ').trim() + }).join('||').trim() + return this.range +} + +Range.prototype.toString = function () { + return this.range +} + +Range.prototype.parseRange = function (range) { + var loose = this.options.loose + // `1.2.3 - 1.2.4` => `>=1.2.3 <=1.2.4` + var hr = loose ? safeRe[HYPHENRANGELOOSE] : safeRe[HYPHENRANGE] + range = range.replace(hr, hyphenReplace) + debug('hyphen replace', range) + // `> 1.2.3 < 1.2.5` => `>1.2.3 <1.2.5` + range = range.replace(safeRe[COMPARATORTRIM], comparatorTrimReplace) + debug('comparator trim', range, safeRe[COMPARATORTRIM]) + + // `~ 1.2.3` => `~1.2.3` + range = range.replace(safeRe[TILDETRIM], tildeTrimReplace) + + // `^ 1.2.3` => `^1.2.3` + range = range.replace(safeRe[CARETTRIM], caretTrimReplace) + + // At this point, the range is completely trimmed and + // ready to be split into comparators. + var compRe = loose ? safeRe[COMPARATORLOOSE] : safeRe[COMPARATOR] + var set = range.split(' ').map(function (comp) { + return parseComparator(comp, this.options) + }, this).join(' ').split(/\s+/) + if (this.options.loose) { + // in loose mode, throw out any that are not valid comparators + set = set.filter(function (comp) { + return !!comp.match(compRe) + }) + } + set = set.map(function (comp) { + return new Comparator(comp, this.options) + }, this) + + return set +} + +Range.prototype.intersects = function (range, options) { + if (!(range instanceof Range)) { + throw new TypeError('a Range is required') + } + + return this.set.some(function (thisComparators) { + return thisComparators.every(function (thisComparator) { + return range.set.some(function (rangeComparators) { + return rangeComparators.every(function (rangeComparator) { + return thisComparator.intersects(rangeComparator, options) + }) + }) + }) + }) +} + +// Mostly just for testing and legacy API reasons +exports.toComparators = toComparators +function toComparators (range, options) { + return new Range(range, options).set.map(function (comp) { + return comp.map(function (c) { + return c.value + }).join(' ').trim().split(' ') + }) +} + +// comprised of xranges, tildes, stars, and gtlt's at this point. +// already replaced the hyphen ranges +// turn into a set of JUST comparators. +function parseComparator (comp, options) { + debug('comp', comp, options) + comp = replaceCarets(comp, options) + debug('caret', comp) + comp = replaceTildes(comp, options) + debug('tildes', comp) + comp = replaceXRanges(comp, options) + debug('xrange', comp) + comp = replaceStars(comp, options) + debug('stars', comp) + return comp +} + +function isX (id) { + return !id || id.toLowerCase() === 'x' || id === '*' +} + +// ~, ~> --> * (any, kinda silly) +// ~2, ~2.x, ~2.x.x, ~>2, ~>2.x ~>2.x.x --> >=2.0.0 <3.0.0 +// ~2.0, ~2.0.x, ~>2.0, ~>2.0.x --> >=2.0.0 <2.1.0 +// ~1.2, ~1.2.x, ~>1.2, ~>1.2.x --> >=1.2.0 <1.3.0 +// ~1.2.3, ~>1.2.3 --> >=1.2.3 <1.3.0 +// ~1.2.0, ~>1.2.0 --> >=1.2.0 <1.3.0 +function replaceTildes (comp, options) { + return comp.trim().split(/\s+/).map(function (comp) { + return replaceTilde(comp, options) + }).join(' ') +} + +function replaceTilde (comp, options) { + var r = options.loose ? safeRe[TILDELOOSE] : safeRe[TILDE] + return comp.replace(r, function (_, M, m, p, pr) { + debug('tilde', comp, _, M, m, p, pr) + var ret + + if (isX(M)) { + ret = '' + } else if (isX(m)) { + ret = '>=' + M + '.0.0 <' + (+M + 1) + '.0.0' + } else if (isX(p)) { + // ~1.2 == >=1.2.0 <1.3.0 + ret = '>=' + M + '.' + m + '.0 <' + M + '.' + (+m + 1) + '.0' + } else if (pr) { + debug('replaceTilde pr', pr) + ret = '>=' + M + '.' + m + '.' + p + '-' + pr + + ' <' + M + '.' + (+m + 1) + '.0' + } else { + // ~1.2.3 == >=1.2.3 <1.3.0 + ret = '>=' + M + '.' + m + '.' + p + + ' <' + M + '.' + (+m + 1) + '.0' + } + + debug('tilde return', ret) + return ret + }) +} + +// ^ --> * (any, kinda silly) +// ^2, ^2.x, ^2.x.x --> >=2.0.0 <3.0.0 +// ^2.0, ^2.0.x --> >=2.0.0 <3.0.0 +// ^1.2, ^1.2.x --> >=1.2.0 <2.0.0 +// ^1.2.3 --> >=1.2.3 <2.0.0 +// ^1.2.0 --> >=1.2.0 <2.0.0 +function replaceCarets (comp, options) { + return comp.trim().split(/\s+/).map(function (comp) { + return replaceCaret(comp, options) + }).join(' ') +} + +function replaceCaret (comp, options) { + debug('caret', comp, options) + var r = options.loose ? safeRe[CARETLOOSE] : safeRe[CARET] + return comp.replace(r, function (_, M, m, p, pr) { + debug('caret', comp, _, M, m, p, pr) + var ret + + if (isX(M)) { + ret = '' + } else if (isX(m)) { + ret = '>=' + M + '.0.0 <' + (+M + 1) + '.0.0' + } else if (isX(p)) { + if (M === '0') { + ret = '>=' + M + '.' + m + '.0 <' + M + '.' + (+m + 1) + '.0' + } else { + ret = '>=' + M + '.' + m + '.0 <' + (+M + 1) + '.0.0' + } + } else if (pr) { + debug('replaceCaret pr', pr) + if (M === '0') { + if (m === '0') { + ret = '>=' + M + '.' + m + '.' + p + '-' + pr + + ' <' + M + '.' + m + '.' + (+p + 1) + } else { + ret = '>=' + M + '.' + m + '.' + p + '-' + pr + + ' <' + M + '.' + (+m + 1) + '.0' + } + } else { + ret = '>=' + M + '.' + m + '.' + p + '-' + pr + + ' <' + (+M + 1) + '.0.0' + } + } else { + debug('no pr') + if (M === '0') { + if (m === '0') { + ret = '>=' + M + '.' + m + '.' + p + + ' <' + M + '.' + m + '.' + (+p + 1) + } else { + ret = '>=' + M + '.' + m + '.' + p + + ' <' + M + '.' + (+m + 1) + '.0' + } + } else { + ret = '>=' + M + '.' + m + '.' + p + + ' <' + (+M + 1) + '.0.0' + } + } + + debug('caret return', ret) + return ret + }) +} + +function replaceXRanges (comp, options) { + debug('replaceXRanges', comp, options) + return comp.split(/\s+/).map(function (comp) { + return replaceXRange(comp, options) + }).join(' ') +} + +function replaceXRange (comp, options) { + comp = comp.trim() + var r = options.loose ? safeRe[XRANGELOOSE] : safeRe[XRANGE] + return comp.replace(r, function (ret, gtlt, M, m, p, pr) { + debug('xRange', comp, ret, gtlt, M, m, p, pr) + var xM = isX(M) + var xm = xM || isX(m) + var xp = xm || isX(p) + var anyX = xp + + if (gtlt === '=' && anyX) { + gtlt = '' + } + + if (xM) { + if (gtlt === '>' || gtlt === '<') { + // nothing is allowed + ret = '<0.0.0' + } else { + // nothing is forbidden + ret = '*' + } + } else if (gtlt && anyX) { + // we know patch is an x, because we have any x at all. + // replace X with 0 + if (xm) { + m = 0 + } + p = 0 + + if (gtlt === '>') { + // >1 => >=2.0.0 + // >1.2 => >=1.3.0 + // >1.2.3 => >= 1.2.4 + gtlt = '>=' + if (xm) { + M = +M + 1 + m = 0 + p = 0 + } else { + m = +m + 1 + p = 0 + } + } else if (gtlt === '<=') { + // <=0.7.x is actually <0.8.0, since any 0.7.x should + // pass. Similarly, <=7.x is actually <8.0.0, etc. + gtlt = '<' + if (xm) { + M = +M + 1 + } else { + m = +m + 1 + } + } + + ret = gtlt + M + '.' + m + '.' + p + } else if (xm) { + ret = '>=' + M + '.0.0 <' + (+M + 1) + '.0.0' + } else if (xp) { + ret = '>=' + M + '.' + m + '.0 <' + M + '.' + (+m + 1) + '.0' + } + + debug('xRange return', ret) + + return ret + }) +} + +// Because * is AND-ed with everything else in the comparator, +// and '' means "any version", just remove the *s entirely. +function replaceStars (comp, options) { + debug('replaceStars', comp, options) + // Looseness is ignored here. star is always as loose as it gets! + return comp.trim().replace(safeRe[STAR], '') +} + +// This function is passed to string.replace(safeRe[HYPHENRANGE]) +// M, m, patch, prerelease, build +// 1.2 - 3.4.5 => >=1.2.0 <=3.4.5 +// 1.2.3 - 3.4 => >=1.2.0 <3.5.0 Any 3.4.x will do +// 1.2 - 3.4 => >=1.2.0 <3.5.0 +function hyphenReplace ($0, + from, fM, fm, fp, fpr, fb, + to, tM, tm, tp, tpr, tb) { + if (isX(fM)) { + from = '' + } else if (isX(fm)) { + from = '>=' + fM + '.0.0' + } else if (isX(fp)) { + from = '>=' + fM + '.' + fm + '.0' + } else { + from = '>=' + from + } + + if (isX(tM)) { + to = '' + } else if (isX(tm)) { + to = '<' + (+tM + 1) + '.0.0' + } else if (isX(tp)) { + to = '<' + tM + '.' + (+tm + 1) + '.0' + } else if (tpr) { + to = '<=' + tM + '.' + tm + '.' + tp + '-' + tpr + } else { + to = '<=' + to + } + + return (from + ' ' + to).trim() +} + +// if ANY of the sets match ALL of its comparators, then pass +Range.prototype.test = function (version) { + if (!version) { + return false + } + + if (typeof version === 'string') { + version = new SemVer(version, this.options) + } + + for (var i = 0; i < this.set.length; i++) { + if (testSet(this.set[i], version, this.options)) { + return true + } + } + return false +} + +function testSet (set, version, options) { + for (var i = 0; i < set.length; i++) { + if (!set[i].test(version)) { + return false + } + } + + if (version.prerelease.length && !options.includePrerelease) { + // Find the set of versions that are allowed to have prereleases + // For example, ^1.2.3-pr.1 desugars to >=1.2.3-pr.1 <2.0.0 + // That should allow `1.2.3-pr.2` to pass. + // However, `1.2.4-alpha.notready` should NOT be allowed, + // even though it's within the range set by the comparators. + for (i = 0; i < set.length; i++) { + debug(set[i].semver) + if (set[i].semver === ANY) { + continue + } + + if (set[i].semver.prerelease.length > 0) { + var allowed = set[i].semver + if (allowed.major === version.major && + allowed.minor === version.minor && + allowed.patch === version.patch) { + return true + } + } + } + + // Version has a -pre, but it's not one of the ones we like. + return false + } + + return true +} + +exports.satisfies = satisfies +function satisfies (version, range, options) { + try { + range = new Range(range, options) + } catch (er) { + return false + } + return range.test(version) +} + +exports.maxSatisfying = maxSatisfying +function maxSatisfying (versions, range, options) { + var max = null + var maxSV = null + try { + var rangeObj = new Range(range, options) + } catch (er) { + return null + } + versions.forEach(function (v) { + if (rangeObj.test(v)) { + // satisfies(v, range, options) + if (!max || maxSV.compare(v) === -1) { + // compare(max, v, true) + max = v + maxSV = new SemVer(max, options) + } + } + }) + return max +} + +exports.minSatisfying = minSatisfying +function minSatisfying (versions, range, options) { + var min = null + var minSV = null + try { + var rangeObj = new Range(range, options) + } catch (er) { + return null + } + versions.forEach(function (v) { + if (rangeObj.test(v)) { + // satisfies(v, range, options) + if (!min || minSV.compare(v) === 1) { + // compare(min, v, true) + min = v + minSV = new SemVer(min, options) + } + } + }) + return min +} + +exports.minVersion = minVersion +function minVersion (range, loose) { + range = new Range(range, loose) + + var minver = new SemVer('0.0.0') + if (range.test(minver)) { + return minver + } + + minver = new SemVer('0.0.0-0') + if (range.test(minver)) { + return minver + } + + minver = null + for (var i = 0; i < range.set.length; ++i) { + var comparators = range.set[i] + + comparators.forEach(function (comparator) { + // Clone to avoid manipulating the comparator's semver object. + var compver = new SemVer(comparator.semver.version) + switch (comparator.operator) { + case '>': + if (compver.prerelease.length === 0) { + compver.patch++ + } else { + compver.prerelease.push(0) + } + compver.raw = compver.format() + /* fallthrough */ + case '': + case '>=': + if (!minver || gt(minver, compver)) { + minver = compver + } + break + case '<': + case '<=': + /* Ignore maximum versions */ + break + /* istanbul ignore next */ + default: + throw new Error('Unexpected operation: ' + comparator.operator) + } + }) + } + + if (minver && range.test(minver)) { + return minver + } + + return null +} + +exports.validRange = validRange +function validRange (range, options) { + try { + // Return '*' instead of '' so that truthiness works. + // This will throw if it's invalid anyway + return new Range(range, options).range || '*' + } catch (er) { + return null + } +} + +// Determine if version is less than all the versions possible in the range +exports.ltr = ltr +function ltr (version, range, options) { + return outside(version, range, '<', options) +} + +// Determine if version is greater than all the versions possible in the range. +exports.gtr = gtr +function gtr (version, range, options) { + return outside(version, range, '>', options) +} + +exports.outside = outside +function outside (version, range, hilo, options) { + version = new SemVer(version, options) + range = new Range(range, options) + + var gtfn, ltefn, ltfn, comp, ecomp + switch (hilo) { + case '>': + gtfn = gt + ltefn = lte + ltfn = lt + comp = '>' + ecomp = '>=' + break + case '<': + gtfn = lt + ltefn = gte + ltfn = gt + comp = '<' + ecomp = '<=' + break + default: + throw new TypeError('Must provide a hilo val of "<" or ">"') + } + + // If it satisifes the range it is not outside + if (satisfies(version, range, options)) { + return false + } + + // From now on, variable terms are as if we're in "gtr" mode. + // but note that everything is flipped for the "ltr" function. + + for (var i = 0; i < range.set.length; ++i) { + var comparators = range.set[i] + + var high = null + var low = null + + comparators.forEach(function (comparator) { + if (comparator.semver === ANY) { + comparator = new Comparator('>=0.0.0') + } + high = high || comparator + low = low || comparator + if (gtfn(comparator.semver, high.semver, options)) { + high = comparator + } else if (ltfn(comparator.semver, low.semver, options)) { + low = comparator + } + }) + + // If the edge version comparator has a operator then our version + // isn't outside it + if (high.operator === comp || high.operator === ecomp) { + return false + } + + // If the lowest version comparator has an operator and our version + // is less than it then it isn't higher than the range + if ((!low.operator || low.operator === comp) && + ltefn(version, low.semver)) { + return false + } else if (low.operator === ecomp && ltfn(version, low.semver)) { + return false + } + } + return true +} + +exports.prerelease = prerelease +function prerelease (version, options) { + var parsed = parse(version, options) + return (parsed && parsed.prerelease.length) ? parsed.prerelease : null +} + +exports.intersects = intersects +function intersects (r1, r2, options) { + r1 = new Range(r1, options) + r2 = new Range(r2, options) + return r1.intersects(r2) +} + +exports.coerce = coerce +function coerce (version) { + if (version instanceof SemVer) { + return version + } + + if (typeof version !== 'string') { + return null + } + + var match = version.match(safeRe[COERCE]) + + if (match == null) { + return null + } + + return parse(match[1] + + '.' + (match[2] || '0') + + '.' + (match[3] || '0')) +} + + +/***/ }), + +/***/ 358: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +var __spreadArray = (this && this.__spreadArray) || function (to, from, pack) { + if (pack || arguments.length === 2) for (var i = 0, l = from.length, ar; i < l; i++) { + if (ar || !(i in from)) { + if (!ar) ar = Array.prototype.slice.call(from, 0, i); + ar[i] = from[i]; + } + } + return to.concat(ar || Array.prototype.slice.call(from)); +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getPlatform = exports.osType = exports.writeFile = exports.exist = exports.stats = exports.debug = exports.error = exports.warning = exports.command = exports.setTaskVariable = exports.getTaskVariable = exports.getSecureFileTicket = exports.getSecureFileName = exports.getEndpointAuthorization = exports.getEndpointAuthorizationParameterRequired = exports.getEndpointAuthorizationParameter = exports.getEndpointAuthorizationSchemeRequired = exports.getEndpointAuthorizationScheme = exports.getEndpointDataParameterRequired = exports.getEndpointDataParameter = exports.getEndpointUrlRequired = exports.getEndpointUrl = exports.getPathInputRequired = exports.getPathInput = exports.filePathSupplied = exports.getDelimitedInput = exports.getPipelineFeature = exports.getBoolFeatureFlag = exports.getBoolInput = exports.getInputRequired = exports.getInput = exports.setSecret = exports.setVariable = exports.getVariables = exports.assertAgent = exports.getVariable = exports.loc = exports.setResourcePath = exports.setSanitizedResult = exports.setResult = exports.setErrStream = exports.setStdStream = exports.AgentHostedMode = exports.Platform = exports.IssueSource = exports.FieldType = exports.ArtifactType = exports.IssueType = exports.TaskState = exports.TaskResult = void 0; +exports.updateReleaseName = exports.addBuildTag = exports.updateBuildNumber = exports.uploadBuildLog = exports.associateArtifact = exports.uploadArtifact = exports.logIssue = exports.logDetail = exports.setProgress = exports.setEndpoint = exports.addAttachment = exports.uploadSummary = exports.prependPath = exports.uploadFile = exports.CodeCoverageEnabler = exports.CodeCoveragePublisher = exports.TestPublisher = exports.getHttpCertConfiguration = exports.getHttpProxyConfiguration = exports.findMatch = exports.filter = exports.match = exports.tool = exports.execSync = exports.exec = exports.execAsync = exports.rmRF = exports.legacyFindFiles = exports.find = exports.retry = exports.mv = exports.cp = exports.ls = exports.which = exports.resolve = exports.mkdirP = exports.popd = exports.pushd = exports.cd = exports.checkPath = exports.cwd = exports.getAgentMode = exports.getNodeMajorVersion = void 0; +var childProcess = __nccwpck_require__(317); +var fs = __nccwpck_require__(896); +var path = __nccwpck_require__(928); +var os = __nccwpck_require__(857); +var minimatch = __nccwpck_require__(533); +var im = __nccwpck_require__(202); +var tcm = __nccwpck_require__(373); +var trm = __nccwpck_require__(419); +var semver = __nccwpck_require__(763); +var TaskResult; +(function (TaskResult) { + TaskResult[TaskResult["Succeeded"] = 0] = "Succeeded"; + TaskResult[TaskResult["SucceededWithIssues"] = 1] = "SucceededWithIssues"; + TaskResult[TaskResult["Failed"] = 2] = "Failed"; + TaskResult[TaskResult["Cancelled"] = 3] = "Cancelled"; + TaskResult[TaskResult["Skipped"] = 4] = "Skipped"; +})(TaskResult = exports.TaskResult || (exports.TaskResult = {})); +var TaskState; +(function (TaskState) { + TaskState[TaskState["Unknown"] = 0] = "Unknown"; + TaskState[TaskState["Initialized"] = 1] = "Initialized"; + TaskState[TaskState["InProgress"] = 2] = "InProgress"; + TaskState[TaskState["Completed"] = 3] = "Completed"; +})(TaskState = exports.TaskState || (exports.TaskState = {})); +var IssueType; +(function (IssueType) { + IssueType[IssueType["Error"] = 0] = "Error"; + IssueType[IssueType["Warning"] = 1] = "Warning"; +})(IssueType = exports.IssueType || (exports.IssueType = {})); +var ArtifactType; +(function (ArtifactType) { + ArtifactType[ArtifactType["Container"] = 0] = "Container"; + ArtifactType[ArtifactType["FilePath"] = 1] = "FilePath"; + ArtifactType[ArtifactType["VersionControl"] = 2] = "VersionControl"; + ArtifactType[ArtifactType["GitRef"] = 3] = "GitRef"; + ArtifactType[ArtifactType["TfvcLabel"] = 4] = "TfvcLabel"; +})(ArtifactType = exports.ArtifactType || (exports.ArtifactType = {})); +var FieldType; +(function (FieldType) { + FieldType[FieldType["AuthParameter"] = 0] = "AuthParameter"; + FieldType[FieldType["DataParameter"] = 1] = "DataParameter"; + FieldType[FieldType["Url"] = 2] = "Url"; +})(FieldType = exports.FieldType || (exports.FieldType = {})); +exports.IssueSource = im.IssueSource; +/** Platforms supported by our build agent */ +var Platform; +(function (Platform) { + Platform[Platform["Windows"] = 0] = "Windows"; + Platform[Platform["MacOS"] = 1] = "MacOS"; + Platform[Platform["Linux"] = 2] = "Linux"; +})(Platform = exports.Platform || (exports.Platform = {})); +var AgentHostedMode; +(function (AgentHostedMode) { + AgentHostedMode[AgentHostedMode["Unknown"] = 0] = "Unknown"; + AgentHostedMode[AgentHostedMode["SelfHosted"] = 1] = "SelfHosted"; + AgentHostedMode[AgentHostedMode["MsHosted"] = 2] = "MsHosted"; +})(AgentHostedMode = exports.AgentHostedMode || (exports.AgentHostedMode = {})); +//----------------------------------------------------- +// General Helpers +//----------------------------------------------------- +exports.setStdStream = im._setStdStream; +exports.setErrStream = im._setErrStream; +function setResult(result, message, done) { + (0, exports.debug)('task result: ' + TaskResult[result]); + // add an error issue + if (result == TaskResult.Failed && message) { + (0, exports.error)(message, exports.IssueSource.TaskInternal); + } + else if (result == TaskResult.SucceededWithIssues && message) { + (0, exports.warning)(message, exports.IssueSource.TaskInternal); + } + // task.complete + var properties = { 'result': TaskResult[result] }; + if (done) { + properties['done'] = 'true'; + } + (0, exports.command)('task.complete', properties, message); +} +exports.setResult = setResult; +/** + * Sets the result of the task with sanitized message. + * + * @param result TaskResult enum of Succeeded, SucceededWithIssues, Failed, Cancelled or Skipped. + * @param message A message which will be logged as an error issue if the result is Failed. Message will be truncated + * before first occurence of wellknown sensitive keyword. + * @param done Optional. Instructs the agent the task is done. This is helpful when child processes + * may still be running and prevent node from fully exiting. This argument is supported + * from agent version 2.142.0 or higher (otherwise will no-op). + * @returns void + */ +function setSanitizedResult(result, message, done) { + var pattern = /password|key|secret|bearer|authorization|token|pat/i; + var sanitizedMessage = im._truncateBeforeSensitiveKeyword(message, pattern); + setResult(result, sanitizedMessage, done); +} +exports.setSanitizedResult = setSanitizedResult; +// +// Catching all exceptions +// +process.on('uncaughtException', function (err) { + if (!im.isSigPipeError(err)) { + setResult(TaskResult.Failed, (0, exports.loc)('LIB_UnhandledEx', err.message)); + (0, exports.error)(String(err.stack), im.IssueSource.TaskInternal); + } +}); +// +// Catching unhandled rejections from promises and rethrowing them as exceptions +// For example, a promise that is rejected but not handled by a .catch() handler in node 10 +// doesn't cause an uncaughtException but causes in Node 16. +// For types definitions(Error | Any) see https://nodejs.org/docs/latest-v16.x/api/process.html#event-unhandledrejection +// +process.on('unhandledRejection', function (reason) { + if (reason instanceof Error) { + throw reason; + } + else { + throw new Error(reason); + } +}); +//----------------------------------------------------- +// Loc Helpers +//----------------------------------------------------- +exports.setResourcePath = im._setResourcePath; +exports.loc = im._loc; +//----------------------------------------------------- +// Input Helpers +//----------------------------------------------------- +exports.getVariable = im._getVariable; +/** + * Asserts the agent version is at least the specified minimum. + * + * @param minimum minimum version version - must be 2.104.1 or higher + */ +function assertAgent(minimum) { + if (semver.lt(minimum, '2.104.1')) { + throw new Error('assertAgent() requires the parameter to be 2.104.1 or higher'); + } + var agent = (0, exports.getVariable)('Agent.Version'); + (0, exports.debug)('Detected Agent.Version=' + (agent ? agent : 'undefined')); + if (agent && semver.lt(agent, minimum)) { + throw new Error("Agent version ".concat(minimum, " or higher is required. Detected Agent version: ").concat(agent)); + } +} +exports.assertAgent = assertAgent; +/** + * Gets a snapshot of the current state of all job variables available to the task. + * Requires a 2.104.1 agent or higher for full functionality. + * + * Limitations on an agent prior to 2.104.1: + * 1) The return value does not include all public variables. Only public variables + * that have been added using setVariable are returned. + * 2) The name returned for each secret variable is the formatted environment variable + * name, not the actual variable name (unless it was set explicitly at runtime using + * setVariable). + * + * @returns VariableInfo[] + */ +function getVariables() { + return Object.keys(im._knownVariableMap) + .map(function (key) { + var info = im._knownVariableMap[key]; + return { name: info.name, value: (0, exports.getVariable)(info.name), secret: info.secret }; + }); +} +exports.getVariables = getVariables; +/** + * Sets a variable which will be available to subsequent tasks as well. + * + * @param name name of the variable to set + * @param val value to set + * @param secret whether variable is secret. Multi-line secrets are not allowed. Optional, defaults to false + * @param isOutput whether variable is an output variable. Optional, defaults to false + * @returns void + */ +function setVariable(name, val, secret, isOutput) { + if (secret === void 0) { secret = false; } + if (isOutput === void 0) { isOutput = false; } + // once a secret always a secret + var key = im._getVariableKey(name); + if (im._knownVariableMap.hasOwnProperty(key)) { + secret = secret || im._knownVariableMap[key].secret; + } + // store the value + var varValue = val || ''; + (0, exports.debug)('set ' + name + '=' + (secret && varValue ? '********' : varValue)); + if (secret) { + if (varValue && varValue.match(/\r|\n/) && "".concat(process.env['SYSTEM_UNSAFEALLOWMULTILINESECRET']).toUpperCase() != 'TRUE') { + throw new Error((0, exports.loc)('LIB_MultilineSecret')); + } + im._vault.storeSecret('SECRET_' + key, varValue); + delete process.env[key]; + } + else { + process.env[key] = varValue; + } + // store the metadata + im._knownVariableMap[key] = { name: name, secret: secret }; + // write the setvariable command + (0, exports.command)('task.setvariable', { 'variable': name || '', isOutput: (isOutput || false).toString(), 'issecret': (secret || false).toString() }, varValue); +} +exports.setVariable = setVariable; +/** + * Registers a value with the logger, so the value will be masked from the logs. Multi-line secrets are not allowed. + * + * @param val value to register + */ +function setSecret(val) { + if (val) { + if (val.match(/\r|\n/) && "".concat(process.env['SYSTEM_UNSAFEALLOWMULTILINESECRET']).toUpperCase() !== 'TRUE') { + throw new Error((0, exports.loc)('LIB_MultilineSecret')); + } + (0, exports.command)('task.setsecret', {}, val); + } +} +exports.setSecret = setSecret; +/** + * Gets the value of an input. + * If required is true and the value is not set, it will throw. + * + * @param name name of the input to get + * @param required whether input is required. optional, defaults to false + * @returns string + */ +function getInput(name, required) { + var inval = im._vault.retrieveSecret('INPUT_' + im._getVariableKey(name)); + if (required && !inval) { + throw new Error((0, exports.loc)('LIB_InputRequired', name)); + } + (0, exports.debug)(name + '=' + inval); + return inval; +} +exports.getInput = getInput; +/** + * Gets the value of an input. + * If the value is not set, it will throw. + * + * @param name name of the input to get + * @returns string + */ +function getInputRequired(name) { + return getInput(name, true); +} +exports.getInputRequired = getInputRequired; +/** + * Gets the value of an input and converts to a bool. Convenience. + * If required is true and the value is not set, it will throw. + * If required is false and the value is not set, returns false. + * + * @param name name of the bool input to get + * @param required whether input is required. optional, defaults to false + * @returns boolean + */ +function getBoolInput(name, required) { + return (getInput(name, required) || '').toUpperCase() == "TRUE"; +} +exports.getBoolInput = getBoolInput; +/** + * Gets the value of an feature flag and converts to a bool. + * @IMPORTANT This method is only for internal Microsoft development. Do not use it for external tasks. + * @param name name of the feature flag to get. + * @param defaultValue default value of the feature flag in case it's not found in env. (optional. Default value = false) + * @returns boolean + * @deprecated Don't use this for new development. Use getPipelineFeature instead. + */ +function getBoolFeatureFlag(ffName, defaultValue) { + if (defaultValue === void 0) { defaultValue = false; } + var ffValue = process.env[ffName]; + if (!ffValue) { + (0, exports.debug)("Feature flag ".concat(ffName, " not found. Returning ").concat(defaultValue, " as default.")); + return defaultValue; + } + (0, exports.debug)("Feature flag ".concat(ffName, " = ").concat(ffValue)); + return ffValue.toLowerCase() === "true"; +} +exports.getBoolFeatureFlag = getBoolFeatureFlag; +/** + * Gets the value of an task feature and converts to a bool. + * @IMPORTANT This method is only for internal Microsoft development. Do not use it for external tasks. + * @param name name of the feature to get. + * @returns boolean + */ +function getPipelineFeature(featureName) { + var variableName = im._getVariableKey("DistributedTask.Tasks.".concat(featureName)); + var featureValue = process.env[variableName]; + if (!featureValue) { + (0, exports.debug)("Feature '".concat(featureName, "' not found. Returning false as default.")); + return false; + } + var boolValue = featureValue.toLowerCase() === "true"; + (0, exports.debug)("Feature '".concat(featureName, "' = '").concat(featureValue, "'. Processed as '").concat(boolValue, "'.")); + return boolValue; +} +exports.getPipelineFeature = getPipelineFeature; +/** + * Gets the value of an input and splits the value using a delimiter (space, comma, etc). + * Empty values are removed. This function is useful for splitting an input containing a simple + * list of items - such as build targets. + * IMPORTANT: Do not use this function for splitting additional args! Instead use argString(), which + * follows normal argument splitting rules and handles values encapsulated by quotes. + * If required is true and the value is not set, it will throw. + * + * @param name name of the input to get + * @param delim delimiter to split on + * @param required whether input is required. optional, defaults to false + * @returns string[] + */ +function getDelimitedInput(name, delim, required) { + var inputVal = getInput(name, required); + if (!inputVal) { + return []; + } + var result = []; + inputVal.split(delim).forEach(function (x) { + if (x) { + result.push(x); + } + }); + return result; +} +exports.getDelimitedInput = getDelimitedInput; +/** + * Checks whether a path inputs value was supplied by the user + * File paths are relative with a picker, so an empty path is the root of the repo. + * Useful if you need to condition work (like append an arg) if a value was supplied + * + * @param name name of the path input to check + * @returns boolean + */ +function filePathSupplied(name) { + // normalize paths + var pathValue = this.resolve(this.getPathInput(name) || ''); + var repoRoot = this.resolve((0, exports.getVariable)('build.sourcesDirectory') || (0, exports.getVariable)('system.defaultWorkingDirectory') || ''); + var supplied = pathValue !== repoRoot; + (0, exports.debug)(name + 'path supplied :' + supplied); + return supplied; +} +exports.filePathSupplied = filePathSupplied; +/** + * Gets the value of a path input + * It will be quoted for you if it isn't already and contains spaces + * If required is true and the value is not set, it will throw. + * If check is true and the path does not exist, it will throw. + * + * @param name name of the input to get + * @param required whether input is required. optional, defaults to false + * @param check whether path is checked. optional, defaults to false + * @returns string + */ +function getPathInput(name, required, check) { + var inval = getInput(name, required); + if (inval) { + if (check) { + (0, exports.checkPath)(inval, name); + } + } + return inval; +} +exports.getPathInput = getPathInput; +/** + * Gets the value of a path input + * It will be quoted for you if it isn't already and contains spaces + * If the value is not set, it will throw. + * If check is true and the path does not exist, it will throw. + * + * @param name name of the input to get + * @param check whether path is checked. optional, defaults to false + * @returns string + */ +function getPathInputRequired(name, check) { + return getPathInput(name, true, check); +} +exports.getPathInputRequired = getPathInputRequired; +//----------------------------------------------------- +// Endpoint Helpers +//----------------------------------------------------- +/** + * Gets the url for a service endpoint + * If the url was not set and is not optional, it will throw. + * + * @param id name of the service endpoint + * @param optional whether the url is optional + * @returns string + */ +function getEndpointUrl(id, optional) { + var urlval = process.env['ENDPOINT_URL_' + id]; + if (!optional && !urlval) { + throw new Error((0, exports.loc)('LIB_EndpointNotExist', id)); + } + (0, exports.debug)(id + '=' + urlval); + return urlval; +} +exports.getEndpointUrl = getEndpointUrl; +/** + * Gets the url for a service endpoint + * If the url was not set, it will throw. + * + * @param id name of the service endpoint + * @returns string + */ +function getEndpointUrlRequired(id) { + return getEndpointUrl(id, false); +} +exports.getEndpointUrlRequired = getEndpointUrlRequired; +/* + * Gets the endpoint data parameter value with specified key for a service endpoint + * If the endpoint data parameter was not set and is not optional, it will throw. + * + * @param id name of the service endpoint + * @param key of the parameter + * @param optional whether the endpoint data is optional + * @returns {string} value of the endpoint data parameter + */ +function getEndpointDataParameter(id, key, optional) { + var dataParamVal = process.env['ENDPOINT_DATA_' + id + '_' + key.toUpperCase()]; + if (!optional && !dataParamVal) { + throw new Error((0, exports.loc)('LIB_EndpointDataNotExist', id, key)); + } + (0, exports.debug)(id + ' data ' + key + ' = ' + dataParamVal); + return dataParamVal; +} +exports.getEndpointDataParameter = getEndpointDataParameter; +/* + * Gets the endpoint data parameter value with specified key for a service endpoint + * If the endpoint data parameter was not set, it will throw. + * + * @param id name of the service endpoint + * @param key of the parameter + * @returns {string} value of the endpoint data parameter + */ +function getEndpointDataParameterRequired(id, key) { + return getEndpointDataParameter(id, key, false); +} +exports.getEndpointDataParameterRequired = getEndpointDataParameterRequired; +/** + * Gets the endpoint authorization scheme for a service endpoint + * If the endpoint authorization scheme is not set and is not optional, it will throw. + * + * @param id name of the service endpoint + * @param optional whether the endpoint authorization scheme is optional + * @returns {string} value of the endpoint authorization scheme + */ +function getEndpointAuthorizationScheme(id, optional) { + var authScheme = im._vault.retrieveSecret('ENDPOINT_AUTH_SCHEME_' + id); + if (!optional && !authScheme) { + throw new Error((0, exports.loc)('LIB_EndpointAuthNotExist', id)); + } + (0, exports.debug)(id + ' auth scheme = ' + authScheme); + return authScheme; +} +exports.getEndpointAuthorizationScheme = getEndpointAuthorizationScheme; +/** + * Gets the endpoint authorization scheme for a service endpoint + * If the endpoint authorization scheme is not set, it will throw. + * + * @param id name of the service endpoint + * @returns {string} value of the endpoint authorization scheme + */ +function getEndpointAuthorizationSchemeRequired(id) { + return getEndpointAuthorizationScheme(id, false); +} +exports.getEndpointAuthorizationSchemeRequired = getEndpointAuthorizationSchemeRequired; +/** + * Gets the endpoint authorization parameter value for a service endpoint with specified key + * If the endpoint authorization parameter is not set and is not optional, it will throw. + * + * @param id name of the service endpoint + * @param key key to find the endpoint authorization parameter + * @param optional optional whether the endpoint authorization scheme is optional + * @returns {string} value of the endpoint authorization parameter value + */ +function getEndpointAuthorizationParameter(id, key, optional) { + var authParam = im._vault.retrieveSecret('ENDPOINT_AUTH_PARAMETER_' + id + '_' + key.toUpperCase()); + if (!optional && !authParam) { + throw new Error((0, exports.loc)('LIB_EndpointAuthNotExist', id)); + } + (0, exports.debug)(id + ' auth param ' + key + ' = ' + authParam); + return authParam; +} +exports.getEndpointAuthorizationParameter = getEndpointAuthorizationParameter; +/** + * Gets the endpoint authorization parameter value for a service endpoint with specified key + * If the endpoint authorization parameter is not set, it will throw. + * + * @param id name of the service endpoint + * @param key key to find the endpoint authorization parameter + * @returns {string} value of the endpoint authorization parameter value + */ +function getEndpointAuthorizationParameterRequired(id, key) { + return getEndpointAuthorizationParameter(id, key, false); +} +exports.getEndpointAuthorizationParameterRequired = getEndpointAuthorizationParameterRequired; +/** + * Gets the authorization details for a service endpoint + * If the authorization was not set and is not optional, it will set the task result to Failed. + * + * @param id name of the service endpoint + * @param optional whether the url is optional + * @returns string + */ +function getEndpointAuthorization(id, optional) { + var aval = im._vault.retrieveSecret('ENDPOINT_AUTH_' + id); + if (!optional && !aval) { + setResult(TaskResult.Failed, (0, exports.loc)('LIB_EndpointAuthNotExist', id)); + } + (0, exports.debug)(id + ' exists ' + (!!aval)); + var auth; + try { + if (aval) { + auth = JSON.parse(aval); + } + } + catch (err) { + throw new Error((0, exports.loc)('LIB_InvalidEndpointAuth', aval)); + } + return auth; +} +exports.getEndpointAuthorization = getEndpointAuthorization; +//----------------------------------------------------- +// SecureFile Helpers +//----------------------------------------------------- +/** + * Gets the name for a secure file + * + * @param id secure file id + * @returns string + */ +function getSecureFileName(id) { + var name = process.env['SECUREFILE_NAME_' + id]; + (0, exports.debug)('secure file name for id ' + id + ' = ' + name); + return name; +} +exports.getSecureFileName = getSecureFileName; +/** + * Gets the secure file ticket that can be used to download the secure file contents + * + * @param id name of the secure file + * @returns {string} secure file ticket + */ +function getSecureFileTicket(id) { + var ticket = im._vault.retrieveSecret('SECUREFILE_TICKET_' + id); + (0, exports.debug)('secure file ticket for id ' + id + ' = ' + ticket); + return ticket; +} +exports.getSecureFileTicket = getSecureFileTicket; +//----------------------------------------------------- +// Task Variable Helpers +//----------------------------------------------------- +/** + * Gets a variable value that is set by previous step from the same wrapper task. + * Requires a 2.115.0 agent or higher. + * + * @param name name of the variable to get + * @returns string + */ +function getTaskVariable(name) { + assertAgent('2.115.0'); + var inval = im._vault.retrieveSecret('VSTS_TASKVARIABLE_' + im._getVariableKey(name)); + if (inval) { + inval = inval.trim(); + } + (0, exports.debug)('task variable: ' + name + '=' + inval); + return inval; +} +exports.getTaskVariable = getTaskVariable; +/** + * Sets a task variable which will only be available to subsequent steps belong to the same wrapper task. + * Requires a 2.115.0 agent or higher. + * + * @param name name of the variable to set + * @param val value to set + * @param secret whether variable is secret. optional, defaults to false + * @returns void + */ +function setTaskVariable(name, val, secret) { + if (secret === void 0) { secret = false; } + assertAgent('2.115.0'); + var key = im._getVariableKey(name); + // store the value + var varValue = val || ''; + (0, exports.debug)('set task variable: ' + name + '=' + (secret && varValue ? '********' : varValue)); + im._vault.storeSecret('VSTS_TASKVARIABLE_' + key, varValue); + delete process.env[key]; + // write the command + (0, exports.command)('task.settaskvariable', { 'variable': name || '', 'issecret': (secret || false).toString() }, varValue); +} +exports.setTaskVariable = setTaskVariable; +//----------------------------------------------------- +// Cmd Helpers +//----------------------------------------------------- +exports.command = im._command; +exports.warning = im._warning; +exports.error = im._error; +exports.debug = im._debug; +//----------------------------------------------------- +// Disk Functions +//----------------------------------------------------- +/** + * Get's stat on a path. + * Useful for checking whether a file or directory. Also getting created, modified and accessed time. + * see [fs.stat](https://nodejs.org/api/fs.html#fs_class_fs_stats) + * + * @param path path to check + * @returns fsStat + */ +function stats(path) { + return fs.statSync(path); +} +exports.stats = stats; +exports.exist = im._exist; +function writeFile(file, data, options) { + if (typeof (options) === 'string') { + fs.writeFileSync(file, data, { encoding: options }); + } + else { + fs.writeFileSync(file, data, options); + } +} +exports.writeFile = writeFile; +/** + * @deprecated Use `getPlatform` + * Useful for determining the host operating system. + * see [os.type](https://nodejs.org/api/os.html#os_os_type) + * + * @return the name of the operating system + */ +function osType() { + return os.type(); +} +exports.osType = osType; +/** + * Determine the operating system the build agent is running on. + * @returns {Platform} + * @throws {Error} Platform is not supported by our agent + */ +function getPlatform() { + switch (process.platform) { + case 'win32': return Platform.Windows; + case 'darwin': return Platform.MacOS; + case 'linux': return Platform.Linux; + default: throw Error((0, exports.loc)('LIB_PlatformNotSupported', process.platform)); + } +} +exports.getPlatform = getPlatform; +/** + * Resolves major version of Node.js engine used by the agent. + * @returns {Number} Node's major version. + */ +function getNodeMajorVersion() { + var _a; + var version = (_a = process === null || process === void 0 ? void 0 : process.versions) === null || _a === void 0 ? void 0 : _a.node; + if (!version) { + throw new Error((0, exports.loc)('LIB_UndefinedNodeVersion')); + } + var parts = version.split('.').map(Number); + if (parts.length < 1) { + return NaN; + } + return parts[0]; +} +exports.getNodeMajorVersion = getNodeMajorVersion; +/** + * Return hosted type of Agent + * @returns {AgentHostedMode} + */ +function getAgentMode() { + var agentCloudId = (0, exports.getVariable)('Agent.CloudId'); + if (agentCloudId === undefined) + return AgentHostedMode.Unknown; + if (agentCloudId) + return AgentHostedMode.MsHosted; + return AgentHostedMode.SelfHosted; +} +exports.getAgentMode = getAgentMode; +/** + * Returns the process's current working directory. + * see [process.cwd](https://nodejs.org/api/process.html#process_process_cwd) + * + * @return the path to the current working directory of the process + */ +function cwd() { + return process.cwd(); +} +exports.cwd = cwd; +exports.checkPath = im._checkPath; +/** + * Change working directory. + * + * @param {string} path - New working directory path + * @returns {void} + */ +function cd(path) { + if (path === '-') { + if (!process.env.OLDPWD) { + throw new Error((0, exports.loc)('LIB_NotFoundPreviousDirectory')); + } + else { + path = process.env.OLDPWD; + } + } + if (path === '~') { + path = os.homedir(); + } + if (!fs.existsSync(path)) { + throw new Error((0, exports.loc)('LIB_PathNotFound', 'cd', path)); + } + if (!fs.statSync(path).isDirectory()) { + throw new Error((0, exports.loc)('LIB_PathIsNotADirectory', path)); + } + try { + var currentPath = process.cwd(); + process.chdir(path); + process.env.OLDPWD = currentPath; + } + catch (error) { + (0, exports.debug)((0, exports.loc)('LIB_OperationFailed', 'cd', error)); + } +} +exports.cd = cd; +var dirStack = []; +function getActualStack() { + return [process.cwd()].concat(dirStack); +} +/** + * Change working directory and push it on the stack + * + * @param {string} dir - New working directory path + * @returns {void} + */ +function pushd(dir) { + if (dir === void 0) { dir = ''; } + var dirs = getActualStack(); + var maybeIndex = parseInt(dir); + if (dir === '+0') { + return dirs; + } + else if (dir.length === 0) { + if (dirs.length > 1) { + dirs.splice.apply(dirs, __spreadArray([0, 0], dirs.splice(1, 1), false)); + } + else { + throw new Error((0, exports.loc)('LIB_DirectoryStackEmpty')); + } + } + else if (!isNaN(maybeIndex)) { + if (maybeIndex < dirStack.length + 1) { + maybeIndex = dir.charAt(0) === '-' ? maybeIndex - 1 : maybeIndex; + } + dirs.splice.apply(dirs, __spreadArray([0, dirs.length], dirs.slice(maybeIndex).concat(dirs.slice(0, maybeIndex)), false)); + } + else { + dirs.unshift(dir); + } + var _path = path.resolve(dirs.shift()); + try { + cd(_path); + } + catch (error) { + if (!fs.existsSync(_path)) { + throw new Error((0, exports.loc)('Not found', 'pushd', _path)); + } + throw error; + } + dirStack.splice.apply(dirStack, __spreadArray([0, dirStack.length], dirs, false)); + return getActualStack(); +} +exports.pushd = pushd; +/** + * Change working directory back to previously pushed directory + * + * @param {string} index - Index to remove from the stack + * @returns {void} + */ +function popd(index) { + if (index === void 0) { index = ''; } + if (dirStack.length === 0) { + throw new Error((0, exports.loc)('LIB_DirectoryStackEmpty')); + } + var maybeIndex = parseInt(index); + if (isNaN(maybeIndex)) { + maybeIndex = 0; + } + else if (maybeIndex < dirStack.length + 1) { + maybeIndex = index.charAt(0) === '-' ? maybeIndex - 1 : maybeIndex; + } + if (maybeIndex > 0 || dirStack.length + maybeIndex === 0) { + maybeIndex = maybeIndex > 0 ? maybeIndex - 1 : maybeIndex; + dirStack.splice(maybeIndex, 1); + } + else { + var _path = path.resolve(dirStack.shift()); + cd(_path); + } + return getActualStack(); +} +exports.popd = popd; +/** + * Make a directory. Creates the full path with folders in between + * Will throw if it fails + * + * @param {string} p - Path to create + * @returns {void} + */ +function mkdirP(p) { + if (!p) { + throw new Error((0, exports.loc)('LIB_ParameterIsRequired', 'p')); + } + // build a stack of directories to create + var stack = []; + var testDir = p; + while (true) { + // validate the loop is not out of control + if (stack.length >= Number(process.env['TASKLIB_TEST_MKDIRP_FAILSAFE'] || 1000)) { + // let the framework throw + (0, exports.debug)('loop is out of control'); + fs.mkdirSync(p); + return; + } + (0, exports.debug)("testing directory '".concat(testDir, "'")); + var stats_1 = void 0; + try { + stats_1 = fs.statSync(testDir); + } + catch (err) { + if (err.code == 'ENOENT') { + // validate the directory is not the drive root + var parentDir = path.dirname(testDir); + if (testDir == parentDir) { + throw new Error((0, exports.loc)('LIB_MkdirFailedInvalidDriveRoot', p, testDir)); // Unable to create directory '{p}'. Root directory does not exist: '{testDir}' + } + // push the dir and test the parent + stack.push(testDir); + testDir = parentDir; + continue; + } + else if (err.code == 'UNKNOWN') { + throw new Error((0, exports.loc)('LIB_MkdirFailedInvalidShare', p, testDir)); // Unable to create directory '{p}'. Unable to verify the directory exists: '{testDir}'. If directory is a file share, please verify the share name is correct, the share is online, and the current process has permission to access the share. + } + else { + throw err; + } + } + if (!stats_1.isDirectory()) { + throw new Error((0, exports.loc)('LIB_MkdirFailedFileExists', p, testDir)); // Unable to create directory '{p}'. Conflicting file exists: '{testDir}' + } + // testDir exists + break; + } + // create each directory + while (stack.length) { + var dir = stack.pop(); // non-null because `stack.length` was truthy + (0, exports.debug)("mkdir '".concat(dir, "'")); + try { + fs.mkdirSync(dir); + } + catch (err) { + throw new Error((0, exports.loc)('LIB_MkdirFailed', p, err.message)); // Unable to create directory '{p}'. {err.message} + } + } +} +exports.mkdirP = mkdirP; +/** + * Resolves a sequence of paths or path segments into an absolute path. + * Calls node.js path.resolve() + * Allows L0 testing with consistent path formats on Mac/Linux and Windows in the mock implementation + * @param pathSegments + * @returns {string} + */ +function resolve() { + var pathSegments = []; + for (var _i = 0; _i < arguments.length; _i++) { + pathSegments[_i] = arguments[_i]; + } + var absolutePath = path.resolve.apply(this, pathSegments); + (0, exports.debug)('Absolute path for pathSegments: ' + pathSegments + ' = ' + absolutePath); + return absolutePath; +} +exports.resolve = resolve; +exports.which = im._which; +/** + * Returns array of files in the given path, or in current directory if no path provided. + * @param {unknown} optionsOrPaths - Available options: -R (recursive), -A (all files, include files beginning with ., except for . and ..) + * @param {unknown[]} paths - Paths to search. + * @return {string[]} - An array of files in the given path(s). + */ +function ls(optionsOrPaths) { + var paths = []; + for (var _i = 1; _i < arguments.length; _i++) { + paths[_i - 1] = arguments[_i]; + } + var isRecursive = false; + var includeHidden = false; + if (typeof optionsOrPaths === 'string' && optionsOrPaths.startsWith('-')) { + var options = String(optionsOrPaths).toLowerCase(); + isRecursive = options.includes('r'); + includeHidden = options.includes('a'); + } + // Flatten paths if the paths argument is array + if (Array.isArray(paths)) { + paths = paths.flat(Infinity); + } + // If the first argument is not options, then it is a path + if (typeof optionsOrPaths !== 'string' || !optionsOrPaths.startsWith('-')) { + var pathsFromOptions = []; + if (Array.isArray(optionsOrPaths)) { + pathsFromOptions = optionsOrPaths; + } + else if (optionsOrPaths && typeof optionsOrPaths === 'string') { + pathsFromOptions = [optionsOrPaths]; + } + if (paths === undefined || paths.length === 0) { + paths = pathsFromOptions; + } + else { + paths.push.apply(paths, pathsFromOptions); + } + } + if (paths.length === 0) { + paths.push(path.resolve('.')); + } + var pathsCopy = __spreadArray([], paths, true); + var preparedPaths = []; + try { + var _loop_1 = function () { + var pathEntry = resolve(paths.shift()); + if (pathEntry === null || pathEntry === void 0 ? void 0 : pathEntry.includes('*')) { + paths.push.apply(paths, findMatch(path.dirname(pathEntry), [path.basename(pathEntry)])); + return "continue"; + } + if (fs.lstatSync(pathEntry).isDirectory()) { + preparedPaths.push.apply(preparedPaths, fs.readdirSync(pathEntry).map(function (file) { return path.join(pathEntry, file); })); + } + else { + preparedPaths.push(pathEntry); + } + }; + while (paths.length > 0) { + _loop_1(); + } + var entries = []; + var _loop_2 = function () { + var entry = preparedPaths.shift(); + var entrybasename = path.basename(entry); + if (entry === null || entry === void 0 ? void 0 : entry.includes('*')) { + preparedPaths.push.apply(preparedPaths, findMatch(path.dirname(entry), [entrybasename])); + return "continue"; + } + if (!includeHidden && entrybasename.startsWith('.') && entrybasename !== '.' && entrybasename !== '..') { + return "continue"; + } + var baseDir = pathsCopy.find(function (p) { return entry.startsWith(path.resolve(p)); }) || path.resolve('.'); + if (fs.lstatSync(entry).isDirectory() && isRecursive) { + preparedPaths.push.apply(preparedPaths, fs.readdirSync(entry).map(function (x) { return path.join(entry, x); })); + entries.push(path.relative(baseDir, entry)); + } + else { + entries.push(path.relative(baseDir, entry)); + } + }; + while (preparedPaths.length > 0) { + _loop_2(); + } + return entries; + } + catch (error) { + if (error.code === 'ENOENT') { + throw new Error((0, exports.loc)('LIB_PathNotFound', 'ls', error.message)); + } + else { + throw new Error((0, exports.loc)('LIB_OperationFailed', 'ls', error)); + } + } +} +exports.ls = ls; +/** + * Copies a file or folder. + * @param {string} sourceOrOptions - Either the source path or an option string '-r', '-f' , '-n' or '-rfn' for recursive, force and no-clobber. + * @param {string} destinationOrSource - Destination path or the source path. + * @param {string} [optionsOrDestination] - Options string or the destination path. + * @param {boolean} [continueOnError=false] - Optional. Whether to continue on error. + * @param {number} [retryCount=0] - Optional. Retry count to copy the file. It might help to resolve intermittent issues e.g. with UNC target paths on a remote host. + * @returns {void} + */ +function cp(sourceOrOptions, destinationOrSource, optionsOrDestination, continueOnError, retryCount) { + if (continueOnError === void 0) { continueOnError = false; } + if (retryCount === void 0) { retryCount = 0; } + retry(function () { + var recursive = false; + var force = true; + var source = String(sourceOrOptions); + var destination = destinationOrSource; + var options = ''; + if (typeof sourceOrOptions === 'string' && sourceOrOptions.startsWith('-')) { + options = sourceOrOptions.toLowerCase(); + recursive = options.includes('r'); + force = !options.includes('n'); + source = destinationOrSource; + destination = String(optionsOrDestination); + } + else if (typeof optionsOrDestination === 'string' && optionsOrDestination && optionsOrDestination.startsWith('-')) { + options = optionsOrDestination.toLowerCase(); + recursive = options.includes('r'); + force = !options.includes('n'); + source = String(sourceOrOptions); + destination = destinationOrSource; + } + if (!fs.existsSync(destination) && !force) { + throw new Error((0, exports.loc)('LIB_PathNotFound', 'cp', destination)); + } + var lstatSource = fs.lstatSync(source); + if (!force && fs.existsSync(destination)) { + return; + } + try { + if (lstatSource.isSymbolicLink()) { + var symlinkTarget = fs.readlinkSync(source); + source = path.resolve(path.dirname(source), symlinkTarget); + lstatSource = fs.lstatSync(source); + } + if (lstatSource.isFile()) { + if (fs.existsSync(destination) && fs.lstatSync(destination).isDirectory()) { + destination = path.join(destination, path.basename(source)); + } + if (force) { + fs.copyFileSync(source, destination); + } + else { + fs.copyFileSync(source, destination, fs.constants.COPYFILE_EXCL); + } + } + else { + copyDirectoryWithResolvedSymlinks(source, path.join(destination, path.basename(source)), force); + } + } + catch (error) { + throw new Error((0, exports.loc)('LIB_OperationFailed', 'cp', error)); + } + }, [], { retryCount: retryCount, continueOnError: continueOnError }); +} +exports.cp = cp; +var copyDirectoryWithResolvedSymlinks = function (src, dest, force) { + var srcPath; + var destPath; + var entry; + var entries = fs.readdirSync(src, { withFileTypes: true }); + if (!fs.existsSync(dest)) { + fs.mkdirSync(dest, { recursive: true }); + } + for (var _i = 0, entries_1 = entries; _i < entries_1.length; _i++) { + entry = entries_1[_i]; + srcPath = path.join(src, entry.name); + destPath = path.join(dest, entry.name); + if (entry.isSymbolicLink()) { + // Resolve the symbolic link and copy the target + var symlinkTarget = fs.readlinkSync(srcPath); + var resolvedPath = path.resolve(path.dirname(srcPath), symlinkTarget); + var stat = fs.lstatSync(resolvedPath); + if (stat.isFile()) { + // Use the actual target file's name instead of the symbolic link's name + var targetFileName = path.basename(resolvedPath); + var targetDestPath = path.join(dest, targetFileName); + fs.copyFileSync(resolvedPath, targetDestPath); + } + else if (stat.isDirectory()) { + copyDirectoryWithResolvedSymlinks(resolvedPath, destPath, force); + } + } + else if (entry.isFile()) { + fs.copyFileSync(srcPath, destPath); + } + else if (entry.isDirectory()) { + copyDirectoryWithResolvedSymlinks(srcPath, destPath, force); + } + } +}; +/** + * Moves a path. + * + * @param {string} source - Source path. + * @param {string} dest - Destination path. + * @param {MoveOptionsVariants} [options] - Option string -f or -n for force and no clobber. + * @param {boolean} [continueOnError] - Optional. Whether to continue on error. + * @returns {void} + */ +function mv(source, dest, options, continueOnError) { + var force = false; + if (options && typeof options === 'string' && options.startsWith('-')) { + var lowercasedOptions = String(options).toLowerCase(); + force = lowercasedOptions.includes('f') && !lowercasedOptions.includes('n'); + } + var sourceExists = fs.existsSync(source); + var destExists = fs.existsSync(dest); + var sources = []; + try { + if (!sourceExists) { + if (source.includes('*')) { + sources.push.apply(sources, findMatch(path.resolve(path.dirname(source)), [path.basename(source)])); + } + else { + throw new Error((0, exports.loc)('LIB_PathNotFound', 'mv', source)); + } + } + else { + sources.push(source); + } + if (destExists && !force) { + throw new Error("File already exists at ".concat(dest)); + } + for (var _i = 0, sources_1 = sources; _i < sources_1.length; _i++) { + var source_1 = sources_1[_i]; + fs.renameSync(source_1, dest); + } + } + catch (error) { + (0, exports.debug)('mv failed'); + var errMsg = (0, exports.loc)('LIB_OperationFailed', 'mv', error); + (0, exports.debug)(errMsg); + if (!continueOnError) { + throw new Error(errMsg); + } + } +} +exports.mv = mv; +/** + * Tries to execute a function a specified number of times. + * + * @param func a function to be executed. + * @param args executed function arguments array. + * @param retryOptions optional. Defaults to { continueOnError: false, retryCount: 0 }. + * @returns the same as the usual function. + */ +function retry(func, args, retryOptions) { + if (retryOptions === void 0) { retryOptions = { continueOnError: false, retryCount: 0 }; } + while (retryOptions.retryCount >= 0) { + try { + return func.apply(void 0, args); + } + catch (e) { + if (retryOptions.retryCount <= 0) { + if (retryOptions.continueOnError) { + (0, exports.warning)(e, exports.IssueSource.TaskInternal); + break; + } + else { + throw e; + } + } + else { + (0, exports.debug)("Attempt to execute function \"".concat(func === null || func === void 0 ? void 0 : func.name, "\" failed, retries left: ").concat(retryOptions.retryCount)); + retryOptions.retryCount--; + } + } + } +} +exports.retry = retry; +/** + * Gets info about item stats. + * + * @param path a path to the item to be processed. + * @param followSymbolicLink indicates whether to traverse descendants of symbolic link directories. + * @param allowBrokenSymbolicLinks when true, broken symbolic link will not cause an error. + * @returns fs.Stats + */ +function _getStats(path, followSymbolicLink, allowBrokenSymbolicLinks) { + // stat returns info about the target of a symlink (or symlink chain), + // lstat returns info about a symlink itself + var stats; + if (followSymbolicLink) { + try { + // use stat (following symlinks) + stats = fs.statSync(path); + } + catch (err) { + if (err.code == 'ENOENT' && allowBrokenSymbolicLinks) { + // fallback to lstat (broken symlinks allowed) + stats = fs.lstatSync(path); + (0, exports.debug)(" ".concat(path, " (broken symlink)")); + } + else { + throw err; + } + } + } + else { + // use lstat (not following symlinks) + stats = fs.lstatSync(path); + } + return stats; +} +/** + * Recursively finds all paths a given path. Returns an array of paths. + * + * @param findPath path to search + * @param options optional. defaults to { followSymbolicLinks: true }. following soft links is generally appropriate unless deleting files. + * @returns string[] + */ +function find(findPath, options) { + if (!findPath) { + (0, exports.debug)('no path specified'); + return []; + } + // normalize the path, otherwise the first result is inconsistently formatted from the rest of the results + // because path.join() performs normalization. + findPath = path.normalize(findPath); + // debug trace the parameters + (0, exports.debug)("findPath: '".concat(findPath, "'")); + options = options || _getDefaultFindOptions(); + _debugFindOptions(options); + // return empty if not exists + try { + fs.lstatSync(findPath); + } + catch (err) { + if (err.code == 'ENOENT') { + (0, exports.debug)('0 results'); + return []; + } + throw err; + } + try { + var result = []; + // push the first item + var stack = [new _FindItem(findPath, 1)]; + var traversalChain = []; // used to detect cycles + var _loop_3 = function () { + // pop the next item and push to the result array + var item = stack.pop(); // non-null because `stack.length` was truthy + var stats_2 = void 0; + try { + // `item.path` equals `findPath` for the first item to be processed, when the `result` array is empty + var isPathToSearch = !result.length; + // following specified symlinks only if current path equals specified path + var followSpecifiedSymbolicLink = options.followSpecifiedSymbolicLink && isPathToSearch; + // following all symlinks or following symlink for the specified path + var followSymbolicLink = options.followSymbolicLinks || followSpecifiedSymbolicLink; + // stat the item. The stat info is used further below to determine whether to traverse deeper + stats_2 = _getStats(item.path, followSymbolicLink, options.allowBrokenSymbolicLinks); + } + catch (err) { + if (err.code == 'ENOENT' && options.skipMissingFiles) { + (0, exports.warning)("No such file or directory: \"".concat(item.path, "\" - skipping."), exports.IssueSource.TaskInternal); + return "continue"; + } + throw err; + } + result.push(item.path); + // note, isDirectory() returns false for the lstat of a symlink + if (stats_2.isDirectory()) { + (0, exports.debug)(" ".concat(item.path, " (directory)")); + if (options.followSymbolicLinks) { + // get the realpath + var realPath_1; + if (im._isUncPath(item.path)) { + // Sometimes there are spontaneous issues when working with unc-paths, so retries have been added for them. + realPath_1 = retry(fs.realpathSync, [item.path], { continueOnError: false, retryCount: 5 }); + } + else { + realPath_1 = fs.realpathSync(item.path); + } + // fixup the traversal chain to match the item level + while (traversalChain.length >= item.level) { + traversalChain.pop(); + } + // test for a cycle + if (traversalChain.some(function (x) { return x == realPath_1; })) { + (0, exports.debug)(' cycle detected'); + return "continue"; + } + // update the traversal chain + traversalChain.push(realPath_1); + } + // push the child items in reverse onto the stack + var childLevel_1 = item.level + 1; + var childItems = fs.readdirSync(item.path) + .map(function (childName) { return new _FindItem(path.join(item.path, childName), childLevel_1); }); + for (var i = childItems.length - 1; i >= 0; i--) { + stack.push(childItems[i]); + } + } + else { + (0, exports.debug)(" ".concat(item.path, " (file)")); + } + }; + while (stack.length) { + _loop_3(); + } + (0, exports.debug)("".concat(result.length, " results")); + return result; + } + catch (err) { + throw new Error((0, exports.loc)('LIB_OperationFailed', 'find', err.message)); + } +} +exports.find = find; +var _FindItem = /** @class */ (function () { + function _FindItem(path, level) { + this.path = path; + this.level = level; + } + return _FindItem; +}()); +function _debugFindOptions(options) { + (0, exports.debug)("findOptions.allowBrokenSymbolicLinks: '".concat(options.allowBrokenSymbolicLinks, "'")); + (0, exports.debug)("findOptions.followSpecifiedSymbolicLink: '".concat(options.followSpecifiedSymbolicLink, "'")); + (0, exports.debug)("findOptions.followSymbolicLinks: '".concat(options.followSymbolicLinks, "'")); + (0, exports.debug)("findOptions.skipMissingFiles: '".concat(options.skipMissingFiles, "'")); +} +function _getDefaultFindOptions() { + return { + allowBrokenSymbolicLinks: false, + followSpecifiedSymbolicLink: true, + followSymbolicLinks: true, + skipMissingFiles: false + }; +} +/** + * Prefer tl.find() and tl.match() instead. This function is for backward compatibility + * when porting tasks to Node from the PowerShell or PowerShell3 execution handler. + * + * @param rootDirectory path to root unrooted patterns with + * @param pattern include and exclude patterns + * @param includeFiles whether to include files in the result. defaults to true when includeFiles and includeDirectories are both false + * @param includeDirectories whether to include directories in the result + * @returns string[] + */ +function legacyFindFiles(rootDirectory, pattern, includeFiles, includeDirectories) { + if (!pattern) { + throw new Error('pattern parameter cannot be empty'); + } + (0, exports.debug)("legacyFindFiles rootDirectory: '".concat(rootDirectory, "'")); + (0, exports.debug)("pattern: '".concat(pattern, "'")); + (0, exports.debug)("includeFiles: '".concat(includeFiles, "'")); + (0, exports.debug)("includeDirectories: '".concat(includeDirectories, "'")); + if (!includeFiles && !includeDirectories) { + includeFiles = true; + } + // organize the patterns into include patterns and exclude patterns + var includePatterns = []; + var excludePatterns = []; + pattern = pattern.replace(/;;/g, '\0'); + for (var _i = 0, _a = pattern.split(';'); _i < _a.length; _i++) { + var pat = _a[_i]; + if (!pat) { + continue; + } + pat = pat.replace(/\0/g, ';'); + // determine whether include pattern and remove any include/exclude prefix. + // include patterns start with +: or anything other than -: + // exclude patterns start with -: + var isIncludePattern = void 0; + if (im._startsWith(pat, '+:')) { + pat = pat.substring(2); + isIncludePattern = true; + } + else if (im._startsWith(pat, '-:')) { + pat = pat.substring(2); + isIncludePattern = false; + } + else { + isIncludePattern = true; + } + // validate pattern does not end with a slash + if (im._endsWith(pat, '/') || (process.platform == 'win32' && im._endsWith(pat, '\\'))) { + throw new Error((0, exports.loc)('LIB_InvalidPattern', pat)); + } + // root the pattern + if (rootDirectory && !path.isAbsolute(pat)) { + pat = path.join(rootDirectory, pat); + // remove trailing slash sometimes added by path.join() on Windows, e.g. + // path.join('\\\\hello', 'world') => '\\\\hello\\world\\' + // path.join('//hello', 'world') => '\\\\hello\\world\\' + if (im._endsWith(pat, '\\')) { + pat = pat.substring(0, pat.length - 1); + } + } + if (isIncludePattern) { + includePatterns.push(pat); + } + else { + excludePatterns.push(im._legacyFindFiles_convertPatternToRegExp(pat)); + } + } + // find and apply patterns + var count = 0; + var result = _legacyFindFiles_getMatchingItems(includePatterns, excludePatterns, !!includeFiles, !!includeDirectories); + (0, exports.debug)('all matches:'); + for (var _b = 0, result_1 = result; _b < result_1.length; _b++) { + var resultItem = result_1[_b]; + (0, exports.debug)(' ' + resultItem); + } + (0, exports.debug)('total matched: ' + result.length); + return result; +} +exports.legacyFindFiles = legacyFindFiles; +function _legacyFindFiles_getMatchingItems(includePatterns, excludePatterns, includeFiles, includeDirectories) { + (0, exports.debug)('getMatchingItems()'); + for (var _i = 0, includePatterns_1 = includePatterns; _i < includePatterns_1.length; _i++) { + var pattern = includePatterns_1[_i]; + (0, exports.debug)("includePattern: '".concat(pattern, "'")); + } + for (var _a = 0, excludePatterns_1 = excludePatterns; _a < excludePatterns_1.length; _a++) { + var pattern = excludePatterns_1[_a]; + (0, exports.debug)("excludePattern: ".concat(pattern)); + } + (0, exports.debug)('includeFiles: ' + includeFiles); + (0, exports.debug)('includeDirectories: ' + includeDirectories); + var allFiles = {}; + var _loop_4 = function (pattern) { + // determine the directory to search + // + // note, getDirectoryName removes redundant path separators + var findPath = void 0; + var starIndex = pattern.indexOf('*'); + var questionIndex = pattern.indexOf('?'); + if (starIndex < 0 && questionIndex < 0) { + // if no wildcards are found, use the directory name portion of the path. + // if there is no directory name (file name only in pattern or drive root), + // this will return empty string. + findPath = im._getDirectoryName(pattern); + } + else { + // extract the directory prior to the first wildcard + var index = Math.min(starIndex >= 0 ? starIndex : questionIndex, questionIndex >= 0 ? questionIndex : starIndex); + findPath = im._getDirectoryName(pattern.substring(0, index)); + } + // note, due to this short-circuit and the above usage of getDirectoryName, this + // function has the same limitations regarding drive roots as the powershell + // implementation. + // + // also note, since getDirectoryName eliminates slash redundancies, some additional + // work may be required if removal of this limitation is attempted. + if (!findPath) { + return "continue"; + } + var patternRegex = im._legacyFindFiles_convertPatternToRegExp(pattern); + // find files/directories + var items = find(findPath, { followSymbolicLinks: true }) + .filter(function (item) { + if (includeFiles && includeDirectories) { + return true; + } + var isDir = fs.statSync(item).isDirectory(); + return (includeFiles && !isDir) || (includeDirectories && isDir); + }) + .forEach(function (item) { + var normalizedPath = process.platform == 'win32' ? item.replace(/\\/g, '/') : item; // normalize separators + // **/times/** will not match C:/fun/times because there isn't a trailing slash + // so try both if including directories + var alternatePath = "".concat(normalizedPath, "/"); // potential bug: it looks like this will result in a false + // positive if the item is a regular file and not a directory + var isMatch = false; + if (patternRegex.test(normalizedPath) || (includeDirectories && patternRegex.test(alternatePath))) { + isMatch = true; + // test whether the path should be excluded + for (var _i = 0, excludePatterns_2 = excludePatterns; _i < excludePatterns_2.length; _i++) { + var regex = excludePatterns_2[_i]; + if (regex.test(normalizedPath) || (includeDirectories && regex.test(alternatePath))) { + isMatch = false; + break; + } + } + } + if (isMatch) { + allFiles[item] = item; + } + }); + }; + for (var _b = 0, includePatterns_2 = includePatterns; _b < includePatterns_2.length; _b++) { + var pattern = includePatterns_2[_b]; + _loop_4(pattern); + } + return Object.keys(allFiles).sort(); +} +/** + * Remove a path recursively with force + * + * @param {string} inputPath - Path to remove + * @return {void} + * @throws When the file or directory exists but could not be deleted. + */ +function rmRF(inputPath) { + (0, exports.debug)('rm -rf ' + inputPath); + if (getPlatform() == Platform.Windows) { + // Node doesn't provide a delete operation, only an unlink function. This means that if the file is being used by another + // program (e.g. antivirus), it won't be deleted. To address this, we shell out the work to rd/del. + try { + var lstats = fs.lstatSync(inputPath); + if (lstats.isDirectory() && !lstats.isSymbolicLink()) { + (0, exports.debug)('removing directory ' + inputPath); + childProcess.execFileSync("cmd.exe", ["/c", "rd", "/s", "/q", inputPath]); + } + else if (lstats.isSymbolicLink()) { + (0, exports.debug)('removing symbolic link ' + inputPath); + var realPath = fs.readlinkSync(inputPath); + if (fs.existsSync(realPath)) { + var stats_3 = fs.statSync(realPath); + if (stats_3.isDirectory()) { + childProcess.execFileSync("cmd.exe", ["/c", "rd", "/s", "/q", realPath]); + fs.unlinkSync(inputPath); + } + else { + fs.unlinkSync(inputPath); + } + } + else { + (0, exports.debug)("Symbolic link '".concat(inputPath, "' points to a non-existing target '").concat(realPath, "'. Removing the symbolic link.")); + fs.unlinkSync(inputPath); + } + } + else { + (0, exports.debug)('removing file ' + inputPath); + childProcess.execFileSync("cmd.exe", ["/c", "del", "/f", "/a", inputPath]); + } + } + catch (err) { + (0, exports.debug)('Error: ' + err.message); + if (err.code != 'ENOENT') { + throw new Error((0, exports.loc)('LIB_OperationFailed', 'rmRF', err.message)); + } + } + } + else { + var lstats = void 0; + try { + if (inputPath.includes('*')) { + var entries = findMatch(path.dirname(inputPath), [path.basename(inputPath)]); + for (var _i = 0, entries_2 = entries; _i < entries_2.length; _i++) { + var entry = entries_2[_i]; + rmRF(entry); + } + } + else { + lstats = fs.lstatSync(inputPath); + if (lstats.isDirectory() && !lstats.isSymbolicLink()) { + (0, exports.debug)('removing directory ' + inputPath); + fs.rmSync(inputPath, { recursive: true, force: true }); + } + else if (lstats.isSymbolicLink()) { + (0, exports.debug)('removing symbolic link ' + inputPath); + var realPath = fs.readlinkSync(inputPath); + if (fs.existsSync(realPath)) { + var stats_4 = fs.statSync(realPath); + if (stats_4.isDirectory()) { + fs.rmSync(realPath, { recursive: true, force: true }); + fs.unlinkSync(inputPath); + } + else { + fs.unlinkSync(inputPath); + } + } + else { + (0, exports.debug)("Symbolic link '".concat(inputPath, "' points to a non-existing target '").concat(realPath, "'. Removing the symbolic link.")); + fs.unlinkSync(inputPath); + } + } + else { + (0, exports.debug)('removing file ' + inputPath); + fs.unlinkSync(inputPath); + } + } + } + catch (err) { + (0, exports.debug)('Error: ' + err.message); + if (err.code != 'ENOENT') { + throw new Error((0, exports.loc)('LIB_OperationFailed', 'rmRF', err.message)); + } + } + } +} +exports.rmRF = rmRF; +/** + * Exec a tool. Convenience wrapper over ToolRunner to exec with args in one call. + * Output will be streamed to the live console. + * Returns promise with return code + * + * @param tool path to tool to exec + * @param args an arg string or array of args + * @param options optional exec options. See IExecOptions + * @returns number + */ +function execAsync(tool, args, options) { + var tr = this.tool(tool); + if (args) { + if (args instanceof Array) { + tr.arg(args); + } + else if (typeof (args) === 'string') { + tr.line(args); + } + } + return tr.execAsync(options); +} +exports.execAsync = execAsync; +/** + * Exec a tool. Convenience wrapper over ToolRunner to exec with args in one call. + * Output will be streamed to the live console. + * Returns promise with return code + * + * @deprecated Use the {@link execAsync} method that returns a native Javascript Promise instead + * @param tool path to tool to exec + * @param args an arg string or array of args + * @param options optional exec options. See IExecOptions + * @returns number + */ +function exec(tool, args, options) { + var tr = this.tool(tool); + if (args) { + if (args instanceof Array) { + tr.arg(args); + } + else if (typeof (args) === 'string') { + tr.line(args); + } + } + return tr.exec(options); +} +exports.exec = exec; +/** + * Exec a tool synchronously. Convenience wrapper over ToolRunner to execSync with args in one call. + * Output will be *not* be streamed to the live console. It will be returned after execution is complete. + * Appropriate for short running tools + * Returns IExecResult with output and return code + * + * @param tool path to tool to exec + * @param args an arg string or array of args + * @param options optional exec options. See IExecSyncOptions + * @returns IExecSyncResult + */ +function execSync(tool, args, options) { + var tr = this.tool(tool); + if (args) { + if (args instanceof Array) { + tr.arg(args); + } + else if (typeof (args) === 'string') { + tr.line(args); + } + } + return tr.execSync(options); +} +exports.execSync = execSync; +/** + * Convenience factory to create a ToolRunner. + * + * @param tool path to tool to exec + * @returns ToolRunner + */ +function tool(tool) { + var tr = new trm.ToolRunner(tool); + tr.on('debug', function (message) { + (0, exports.debug)(message); + }); + return tr; +} +exports.tool = tool; +/** + * Applies glob patterns to a list of paths. Supports interleaved exclude patterns. + * + * @param list array of paths + * @param patterns patterns to apply. supports interleaved exclude patterns. + * @param patternRoot optional. default root to apply to unrooted patterns. not applied to basename-only patterns when matchBase:true. + * @param options optional. defaults to { dot: true, nobrace: true, nocase: process.platform == 'win32' }. + */ +function match(list, patterns, patternRoot, options) { + // trace parameters + (0, exports.debug)("patternRoot: '".concat(patternRoot, "'")); + options = options || _getDefaultMatchOptions(); // default match options + _debugMatchOptions(options); + // convert pattern to an array + if (typeof patterns == 'string') { + patterns = [patterns]; + } + // hashtable to keep track of matches + var map = {}; + var originalOptions = options; + for (var _i = 0, patterns_1 = patterns; _i < patterns_1.length; _i++) { + var pattern = patterns_1[_i]; + (0, exports.debug)("pattern: '".concat(pattern, "'")); + // trim and skip empty + pattern = (pattern || '').trim(); + if (!pattern) { + (0, exports.debug)('skipping empty pattern'); + continue; + } + // clone match options + var options_1 = im._cloneMatchOptions(originalOptions); + // skip comments + if (!options_1.nocomment && im._startsWith(pattern, '#')) { + (0, exports.debug)('skipping comment'); + continue; + } + // set nocomment - brace expansion could result in a leading '#' + options_1.nocomment = true; + // determine whether pattern is include or exclude + var negateCount = 0; + if (!options_1.nonegate) { + while (pattern.charAt(negateCount) == '!') { + negateCount++; + } + pattern = pattern.substring(negateCount); // trim leading '!' + if (negateCount) { + (0, exports.debug)("trimmed leading '!'. pattern: '".concat(pattern, "'")); + } + } + var isIncludePattern = negateCount == 0 || + (negateCount % 2 == 0 && !options_1.flipNegate) || + (negateCount % 2 == 1 && options_1.flipNegate); + // set nonegate - brace expansion could result in a leading '!' + options_1.nonegate = true; + options_1.flipNegate = false; + // expand braces - required to accurately root patterns + var expanded = void 0; + var preExpanded = pattern; + if (options_1.nobrace) { + expanded = [pattern]; + } + else { + // convert slashes on Windows before calling braceExpand(). unfortunately this means braces cannot + // be escaped on Windows, this limitation is consistent with current limitations of minimatch (3.0.3). + (0, exports.debug)('expanding braces'); + var convertedPattern = process.platform == 'win32' ? pattern.replace(/\\/g, '/') : pattern; + expanded = minimatch.braceExpand(convertedPattern); + } + // set nobrace + options_1.nobrace = true; + for (var _a = 0, expanded_1 = expanded; _a < expanded_1.length; _a++) { + var pattern_1 = expanded_1[_a]; + if (expanded.length != 1 || pattern_1 != preExpanded) { + (0, exports.debug)("pattern: '".concat(pattern_1, "'")); + } + // trim and skip empty + pattern_1 = (pattern_1 || '').trim(); + if (!pattern_1) { + (0, exports.debug)('skipping empty pattern'); + continue; + } + // root the pattern when all of the following conditions are true: + if (patternRoot && // patternRoot supplied + !im._isRooted(pattern_1) && // AND pattern not rooted + // AND matchBase:false or not basename only + (!options_1.matchBase || (process.platform == 'win32' ? pattern_1.replace(/\\/g, '/') : pattern_1).indexOf('/') >= 0)) { + pattern_1 = im._ensureRooted(patternRoot, pattern_1); + (0, exports.debug)("rooted pattern: '".concat(pattern_1, "'")); + } + if (isIncludePattern) { + // apply the pattern + (0, exports.debug)('applying include pattern against original list'); + var matchResults = minimatch.match(list, pattern_1, options_1); + (0, exports.debug)(matchResults.length + ' matches'); + // union the results + for (var _b = 0, matchResults_1 = matchResults; _b < matchResults_1.length; _b++) { + var matchResult = matchResults_1[_b]; + map[matchResult] = true; + } + } + else { + // apply the pattern + (0, exports.debug)('applying exclude pattern against original list'); + var matchResults = minimatch.match(list, pattern_1, options_1); + (0, exports.debug)(matchResults.length + ' matches'); + // substract the results + for (var _c = 0, matchResults_2 = matchResults; _c < matchResults_2.length; _c++) { + var matchResult = matchResults_2[_c]; + delete map[matchResult]; + } + } + } + } + // return a filtered version of the original list (preserves order and prevents duplication) + var result = list.filter(function (item) { return map.hasOwnProperty(item); }); + (0, exports.debug)(result.length + ' final results'); + return result; +} +exports.match = match; +/** + * Filter to apply glob patterns + * + * @param pattern pattern to apply + * @param options optional. defaults to { dot: true, nobrace: true, nocase: process.platform == 'win32' }. + */ +function filter(pattern, options) { + options = options || _getDefaultMatchOptions(); + return minimatch.filter(pattern, options); +} +exports.filter = filter; +function _debugMatchOptions(options) { + (0, exports.debug)("matchOptions.debug: '".concat(options.debug, "'")); + (0, exports.debug)("matchOptions.nobrace: '".concat(options.nobrace, "'")); + (0, exports.debug)("matchOptions.noglobstar: '".concat(options.noglobstar, "'")); + (0, exports.debug)("matchOptions.dot: '".concat(options.dot, "'")); + (0, exports.debug)("matchOptions.noext: '".concat(options.noext, "'")); + (0, exports.debug)("matchOptions.nocase: '".concat(options.nocase, "'")); + (0, exports.debug)("matchOptions.nonull: '".concat(options.nonull, "'")); + (0, exports.debug)("matchOptions.matchBase: '".concat(options.matchBase, "'")); + (0, exports.debug)("matchOptions.nocomment: '".concat(options.nocomment, "'")); + (0, exports.debug)("matchOptions.nonegate: '".concat(options.nonegate, "'")); + (0, exports.debug)("matchOptions.flipNegate: '".concat(options.flipNegate, "'")); +} +function _getDefaultMatchOptions() { + return { + debug: false, + nobrace: true, + noglobstar: false, + dot: true, + noext: false, + nocase: process.platform == 'win32', + nonull: false, + matchBase: false, + nocomment: false, + nonegate: false, + flipNegate: false + }; +} +/** + * Determines the find root from a list of patterns. Performs the find and then applies the glob patterns. + * Supports interleaved exclude patterns. Unrooted patterns are rooted using defaultRoot, unless + * matchOptions.matchBase is specified and the pattern is a basename only. For matchBase cases, the + * defaultRoot is used as the find root. + * + * @param defaultRoot default path to root unrooted patterns. falls back to System.DefaultWorkingDirectory or process.cwd(). + * @param patterns pattern or array of patterns to apply + * @param findOptions defaults to { followSymbolicLinks: true }. following soft links is generally appropriate unless deleting files. + * @param matchOptions defaults to { dot: true, nobrace: true, nocase: process.platform == 'win32' } + */ +function findMatch(defaultRoot, patterns, findOptions, matchOptions) { + // apply defaults for parameters and trace + defaultRoot = defaultRoot || this.getVariable('system.defaultWorkingDirectory') || process.cwd(); + (0, exports.debug)("defaultRoot: '".concat(defaultRoot, "'")); + patterns = patterns || []; + patterns = typeof patterns == 'string' ? [patterns] : patterns; + findOptions = findOptions || _getDefaultFindOptions(); + _debugFindOptions(findOptions); + matchOptions = matchOptions || _getDefaultMatchOptions(); + _debugMatchOptions(matchOptions); + // normalize slashes for root dir + defaultRoot = im._normalizeSeparators(defaultRoot); + var results = {}; + var originalMatchOptions = matchOptions; + for (var _i = 0, _a = (patterns || []); _i < _a.length; _i++) { + var pattern = _a[_i]; + (0, exports.debug)("pattern: '".concat(pattern, "'")); + // trim and skip empty + pattern = (pattern || '').trim(); + if (!pattern) { + (0, exports.debug)('skipping empty pattern'); + continue; + } + // clone match options + var matchOptions_1 = im._cloneMatchOptions(originalMatchOptions); + // skip comments + if (!matchOptions_1.nocomment && im._startsWith(pattern, '#')) { + (0, exports.debug)('skipping comment'); + continue; + } + // set nocomment - brace expansion could result in a leading '#' + matchOptions_1.nocomment = true; + // determine whether pattern is include or exclude + var negateCount = 0; + if (!matchOptions_1.nonegate) { + while (pattern.charAt(negateCount) == '!') { + negateCount++; + } + pattern = pattern.substring(negateCount); // trim leading '!' + if (negateCount) { + (0, exports.debug)("trimmed leading '!'. pattern: '".concat(pattern, "'")); + } + } + var isIncludePattern = negateCount == 0 || + (negateCount % 2 == 0 && !matchOptions_1.flipNegate) || + (negateCount % 2 == 1 && matchOptions_1.flipNegate); + // set nonegate - brace expansion could result in a leading '!' + matchOptions_1.nonegate = true; + matchOptions_1.flipNegate = false; + // expand braces - required to accurately interpret findPath + var expanded = void 0; + var preExpanded = pattern; + if (matchOptions_1.nobrace) { + expanded = [pattern]; + } + else { + // convert slashes on Windows before calling braceExpand(). unfortunately this means braces cannot + // be escaped on Windows, this limitation is consistent with current limitations of minimatch (3.0.3). + (0, exports.debug)('expanding braces'); + var convertedPattern = process.platform == 'win32' ? pattern.replace(/\\/g, '/') : pattern; + expanded = minimatch.braceExpand(convertedPattern); + } + // set nobrace + matchOptions_1.nobrace = true; + for (var _b = 0, expanded_2 = expanded; _b < expanded_2.length; _b++) { + var pattern_2 = expanded_2[_b]; + if (expanded.length != 1 || pattern_2 != preExpanded) { + (0, exports.debug)("pattern: '".concat(pattern_2, "'")); + } + // trim and skip empty + pattern_2 = (pattern_2 || '').trim(); + if (!pattern_2) { + (0, exports.debug)('skipping empty pattern'); + continue; + } + if (isIncludePattern) { + // determine the findPath + var findInfo = im._getFindInfoFromPattern(defaultRoot, pattern_2, matchOptions_1); + var findPath = findInfo.findPath; + (0, exports.debug)("findPath: '".concat(findPath, "'")); + if (!findPath) { + (0, exports.debug)('skipping empty path'); + continue; + } + // perform the find + (0, exports.debug)("statOnly: '".concat(findInfo.statOnly, "'")); + var findResults = []; + if (findInfo.statOnly) { + // simply stat the path - all path segments were used to build the path + try { + fs.statSync(findPath); + findResults.push(findPath); + } + catch (err) { + if (err.code != 'ENOENT') { + throw err; + } + (0, exports.debug)('ENOENT'); + } + } + else { + findResults = find(findPath, findOptions); + } + (0, exports.debug)("found ".concat(findResults.length, " paths")); + // apply the pattern + (0, exports.debug)('applying include pattern'); + if (findInfo.adjustedPattern != pattern_2) { + (0, exports.debug)("adjustedPattern: '".concat(findInfo.adjustedPattern, "'")); + pattern_2 = findInfo.adjustedPattern; + } + var matchResults = minimatch.match(findResults, pattern_2, matchOptions_1); + (0, exports.debug)(matchResults.length + ' matches'); + // union the results + for (var _c = 0, matchResults_3 = matchResults; _c < matchResults_3.length; _c++) { + var matchResult = matchResults_3[_c]; + var key = process.platform == 'win32' ? matchResult.toUpperCase() : matchResult; + results[key] = matchResult; + } + } + else { + // check if basename only and matchBase=true + if (matchOptions_1.matchBase && + !im._isRooted(pattern_2) && + (process.platform == 'win32' ? pattern_2.replace(/\\/g, '/') : pattern_2).indexOf('/') < 0) { + // do not root the pattern + (0, exports.debug)('matchBase and basename only'); + } + else { + // root the exclude pattern + pattern_2 = im._ensurePatternRooted(defaultRoot, pattern_2); + (0, exports.debug)("after ensurePatternRooted, pattern: '".concat(pattern_2, "'")); + } + // apply the pattern + (0, exports.debug)('applying exclude pattern'); + var matchResults = minimatch.match(Object.keys(results).map(function (key) { return results[key]; }), pattern_2, matchOptions_1); + (0, exports.debug)(matchResults.length + ' matches'); + // substract the results + for (var _d = 0, matchResults_4 = matchResults; _d < matchResults_4.length; _d++) { + var matchResult = matchResults_4[_d]; + var key = process.platform == 'win32' ? matchResult.toUpperCase() : matchResult; + delete results[key]; + } + } + } + } + var finalResult = Object.keys(results) + .map(function (key) { return results[key]; }) + .sort(); + (0, exports.debug)(finalResult.length + ' final results'); + return finalResult; +} +exports.findMatch = findMatch; +/** + * Build Proxy URL in the following format: protocol://username:password@hostname:port + * @param proxyUrl Url address of the proxy server (eg: http://example.com) + * @param proxyUsername Proxy username (optional) + * @param proxyPassword Proxy password (optional) + * @returns string + */ +function getProxyFormattedUrl(proxyUrl, proxyUsername, proxyPassword) { + var parsedUrl = new URL(proxyUrl); + var proxyAddress = "".concat(parsedUrl.protocol, "//").concat(parsedUrl.host); + if (proxyUsername) { + proxyAddress = "".concat(parsedUrl.protocol, "//").concat(proxyUsername, ":").concat(proxyPassword, "@").concat(parsedUrl.host); + } + return proxyAddress; +} +/** + * Gets http proxy configuration used by Build/Release agent + * + * @return ProxyConfiguration + */ +function getHttpProxyConfiguration(requestUrl) { + var proxyUrl = (0, exports.getVariable)('Agent.ProxyUrl'); + if (proxyUrl && proxyUrl.length > 0) { + var proxyUsername = (0, exports.getVariable)('Agent.ProxyUsername'); + var proxyPassword = (0, exports.getVariable)('Agent.ProxyPassword'); + var proxyBypassHosts = JSON.parse((0, exports.getVariable)('Agent.ProxyBypassList') || '[]'); + var bypass_1 = false; + if (requestUrl) { + proxyBypassHosts.forEach(function (bypassHost) { + if (new RegExp(bypassHost, 'i').test(requestUrl)) { + bypass_1 = true; + } + }); + } + if (bypass_1) { + return null; + } + else { + var proxyAddress = getProxyFormattedUrl(proxyUrl, proxyUsername, proxyPassword); + return { + proxyUrl: proxyUrl, + proxyUsername: proxyUsername, + proxyPassword: proxyPassword, + proxyBypassHosts: proxyBypassHosts, + proxyFormattedUrl: proxyAddress + }; + } + } + else { + return null; + } +} +exports.getHttpProxyConfiguration = getHttpProxyConfiguration; +/** + * Gets http certificate configuration used by Build/Release agent + * + * @return CertConfiguration + */ +function getHttpCertConfiguration() { + var ca = (0, exports.getVariable)('Agent.CAInfo'); + var clientCert = (0, exports.getVariable)('Agent.ClientCert'); + if (ca || clientCert) { + var certConfig = {}; + certConfig.caFile = ca; + certConfig.certFile = clientCert; + if (clientCert) { + var clientCertKey = (0, exports.getVariable)('Agent.ClientCertKey'); + var clientCertArchive = (0, exports.getVariable)('Agent.ClientCertArchive'); + var clientCertPassword = (0, exports.getVariable)('Agent.ClientCertPassword'); + certConfig.keyFile = clientCertKey; + certConfig.certArchiveFile = clientCertArchive; + certConfig.passphrase = clientCertPassword; + } + return certConfig; + } + else { + return null; + } +} +exports.getHttpCertConfiguration = getHttpCertConfiguration; +//----------------------------------------------------- +// Test Publisher +//----------------------------------------------------- +var TestPublisher = /** @class */ (function () { + function TestPublisher(testRunner) { + this.testRunner = testRunner; + } + TestPublisher.prototype.publish = function (resultFiles, mergeResults, platform, config, runTitle, publishRunAttachments, testRunSystem) { + // Could have used an initializer, but wanted to avoid reordering parameters when converting to strict null checks + // (A parameter cannot both be optional and have an initializer) + testRunSystem = testRunSystem || "VSTSTask"; + var properties = {}; + properties['type'] = this.testRunner; + if (mergeResults) { + properties['mergeResults'] = mergeResults; + } + if (platform) { + properties['platform'] = platform; + } + if (config) { + properties['config'] = config; + } + if (runTitle) { + properties['runTitle'] = runTitle; + } + if (publishRunAttachments) { + properties['publishRunAttachments'] = publishRunAttachments; + } + if (resultFiles) { + properties['resultFiles'] = Array.isArray(resultFiles) ? resultFiles.join() : resultFiles; + } + properties['testRunSystem'] = testRunSystem; + (0, exports.command)('results.publish', properties, ''); + }; + return TestPublisher; +}()); +exports.TestPublisher = TestPublisher; +//----------------------------------------------------- +// Code coverage Publisher +//----------------------------------------------------- +var CodeCoveragePublisher = /** @class */ (function () { + function CodeCoveragePublisher() { + } + CodeCoveragePublisher.prototype.publish = function (codeCoverageTool, summaryFileLocation, reportDirectory, additionalCodeCoverageFiles) { + var properties = {}; + if (codeCoverageTool) { + properties['codecoveragetool'] = codeCoverageTool; + } + if (summaryFileLocation) { + properties['summaryfile'] = summaryFileLocation; + } + if (reportDirectory) { + properties['reportdirectory'] = reportDirectory; + } + if (additionalCodeCoverageFiles) { + properties['additionalcodecoveragefiles'] = Array.isArray(additionalCodeCoverageFiles) ? additionalCodeCoverageFiles.join() : additionalCodeCoverageFiles; + } + (0, exports.command)('codecoverage.publish', properties, ""); + }; + return CodeCoveragePublisher; +}()); +exports.CodeCoveragePublisher = CodeCoveragePublisher; +//----------------------------------------------------- +// Code coverage Publisher +//----------------------------------------------------- +var CodeCoverageEnabler = /** @class */ (function () { + function CodeCoverageEnabler(buildTool, ccTool) { + this.buildTool = buildTool; + this.ccTool = ccTool; + } + CodeCoverageEnabler.prototype.enableCodeCoverage = function (buildProps) { + buildProps['buildtool'] = this.buildTool; + buildProps['codecoveragetool'] = this.ccTool; + (0, exports.command)('codecoverage.enable', buildProps, ""); + }; + return CodeCoverageEnabler; +}()); +exports.CodeCoverageEnabler = CodeCoverageEnabler; +//----------------------------------------------------- +// Task Logging Commands +//----------------------------------------------------- +/** + * Upload user interested file as additional log information + * to the current timeline record. + * + * The file shall be available for download along with task logs. + * + * @param path Path to the file that should be uploaded. + * @returns void + */ +function uploadFile(path) { + (0, exports.command)("task.uploadfile", null, path); +} +exports.uploadFile = uploadFile; +/** + * Instruction for the agent to update the PATH environment variable. + * The specified directory is prepended to the PATH. + * The updated environment variable will be reflected in subsequent tasks. + * + * @param path Local directory path. + * @returns void + */ +function prependPath(path) { + assertAgent("2.115.0"); + (0, exports.command)("task.prependpath", null, path); +} +exports.prependPath = prependPath; +/** + * Upload and attach summary markdown to current timeline record. + * This summary shall be added to the build/release summary and + * not available for download with logs. + * + * @param path Local directory path. + * @returns void + */ +function uploadSummary(path) { + (0, exports.command)("task.uploadsummary", null, path); +} +exports.uploadSummary = uploadSummary; +/** + * Upload and attach attachment to current timeline record. + * These files are not available for download with logs. + * These can only be referred to by extensions using the type or name values. + * + * @param type Attachment type. + * @param name Attachment name. + * @param path Attachment path. + * @returns void + */ +function addAttachment(type, name, path) { + (0, exports.command)("task.addattachment", { "type": type, "name": name }, path); +} +exports.addAttachment = addAttachment; +/** + * Set an endpoint field with given value. + * Value updated will be retained in the endpoint for + * the subsequent tasks that execute within the same job. + * + * @param id Endpoint id. + * @param field FieldType enum of AuthParameter, DataParameter or Url. + * @param key Key. + * @param value Value for key or url. + * @returns void + */ +function setEndpoint(id, field, key, value) { + (0, exports.command)("task.setendpoint", { "id": id, "field": FieldType[field].toLowerCase(), "key": key }, value); +} +exports.setEndpoint = setEndpoint; +/** + * Set progress and current operation for current task. + * + * @param percent Percentage of completion. + * @param currentOperation Current pperation. + * @returns void + */ +function setProgress(percent, currentOperation) { + (0, exports.command)("task.setprogress", { "value": "".concat(percent) }, currentOperation); +} +exports.setProgress = setProgress; +/** + * Indicates whether to write the logging command directly to the host or to the output pipeline. + * + * @param id Timeline record Guid. + * @param parentId Parent timeline record Guid. + * @param recordType Record type. + * @param recordName Record name. + * @param order Order of timeline record. + * @param startTime Start time. + * @param finishTime End time. + * @param progress Percentage of completion. + * @param state TaskState enum of Unknown, Initialized, InProgress or Completed. + * @param result TaskResult enum of Succeeded, SucceededWithIssues, Failed, Cancelled or Skipped. + * @param message current operation + * @returns void + */ +function logDetail(id, message, parentId, recordType, recordName, order, startTime, finishTime, progress, state, result) { + var properties = { + "id": id, + "parentid": parentId, + "type": recordType, + "name": recordName, + "order": order ? order.toString() : undefined, + "starttime": startTime, + "finishtime": finishTime, + "progress": progress ? progress.toString() : undefined, + "state": state ? TaskState[state] : undefined, + "result": result ? TaskResult[result] : undefined + }; + (0, exports.command)("task.logdetail", properties, message); +} +exports.logDetail = logDetail; +/** + * Log error or warning issue to timeline record of current task. + * + * @param type IssueType enum of Error or Warning. + * @param sourcePath Source file location. + * @param lineNumber Line number. + * @param columnNumber Column number. + * @param code Error or warning code. + * @param message Error or warning message. + * @returns void + */ +function logIssue(type, message, sourcePath, lineNumber, columnNumber, errorCode) { + var properties = { + "type": IssueType[type].toLowerCase(), + "code": errorCode, + "sourcepath": sourcePath, + "linenumber": lineNumber ? lineNumber.toString() : undefined, + "columnnumber": columnNumber ? columnNumber.toString() : undefined, + }; + (0, exports.command)("task.logissue", properties, message); +} +exports.logIssue = logIssue; +//----------------------------------------------------- +// Artifact Logging Commands +//----------------------------------------------------- +/** + * Upload user interested file as additional log information + * to the current timeline record. + * + * The file shall be available for download along with task logs. + * + * @param containerFolder Folder that the file will upload to, folder will be created if needed. + * @param path Path to the file that should be uploaded. + * @param name Artifact name. + * @returns void + */ +function uploadArtifact(containerFolder, path, name) { + (0, exports.command)("artifact.upload", { "containerfolder": containerFolder, "artifactname": name }, path); +} +exports.uploadArtifact = uploadArtifact; +/** + * Create an artifact link, artifact location is required to be + * a file container path, VC path or UNC share path. + * + * The file shall be available for download along with task logs. + * + * @param name Artifact name. + * @param path Path to the file that should be associated. + * @param artifactType ArtifactType enum of Container, FilePath, VersionControl, GitRef or TfvcLabel. + * @returns void + */ +function associateArtifact(name, path, artifactType) { + (0, exports.command)("artifact.associate", { "type": ArtifactType[artifactType].toLowerCase(), "artifactname": name }, path); +} +exports.associateArtifact = associateArtifact; +//----------------------------------------------------- +// Build Logging Commands +//----------------------------------------------------- +/** + * Upload user interested log to build’s container “logs\tool” folder. + * + * @param path Path to the file that should be uploaded. + * @returns void + */ +function uploadBuildLog(path) { + (0, exports.command)("build.uploadlog", null, path); +} +exports.uploadBuildLog = uploadBuildLog; +/** + * Update build number for current build. + * + * @param value Value to be assigned as the build number. + * @returns void + */ +function updateBuildNumber(value) { + (0, exports.command)("build.updatebuildnumber", null, value); +} +exports.updateBuildNumber = updateBuildNumber; +/** + * Add a tag for current build. + * + * @param value Tag value. + * @returns void + */ +function addBuildTag(value) { + (0, exports.command)("build.addbuildtag", null, value); +} +exports.addBuildTag = addBuildTag; +//----------------------------------------------------- +// Release Logging Commands +//----------------------------------------------------- +/** + * Update release name for current release. + * + * @param value Value to be assigned as the release name. + * @returns void + */ +function updateReleaseName(name) { + assertAgent("2.132.0"); + (0, exports.command)("release.updatereleasename", null, name); +} +exports.updateReleaseName = updateReleaseName; +//----------------------------------------------------- +// Tools +//----------------------------------------------------- +exports.TaskCommand = tcm.TaskCommand; +exports.commandFromString = tcm.commandFromString; +exports.ToolRunner = trm.ToolRunner; +//----------------------------------------------------- +// Validation Checks +//----------------------------------------------------- +// async await needs generators in node 4.x+ +if (semver.lt(process.versions.node, '4.2.0')) { + (0, exports.warning)('Tasks require a new agent. Upgrade your agent or node to 4.2.0 or later', exports.IssueSource.TaskInternal); +} +//------------------------------------------------------------------- +// Populate the vault with sensitive data. Inputs and Endpoints +//------------------------------------------------------------------- +// avoid loading twice (overwrites .taskkey) +if (!global['_vsts_task_lib_loaded']) { + im._loadData(); + im._exposeProxySettings(); + im._exposeCertSettings(); +} + + +/***/ }), + +/***/ 373: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.commandFromString = exports.TaskCommand = void 0; +// +// Command Format: +// ##vso[artifact.command key=value;key=value]user message +// +// Examples: +// ##vso[task.progress value=58] +// ##vso[task.issue type=warning;]This is the user warning message +// +var CMD_PREFIX = '##vso['; +var TaskCommand = /** @class */ (function () { + function TaskCommand(command, properties, message) { + if (!command) { + command = 'missing.command'; + } + this.command = command; + this.properties = properties; + this.message = message; + } + TaskCommand.prototype.toString = function () { + var cmdStr = CMD_PREFIX + this.command; + if (this.properties && Object.keys(this.properties).length > 0) { + cmdStr += ' '; + for (var key in this.properties) { + if (this.properties.hasOwnProperty(key)) { + var val = this.properties[key]; + if (val) { + // safely append the val - avoid blowing up when attempting to + // call .replace() if message is not a string for some reason + cmdStr += key + '=' + escape('' + (val || '')) + ';'; + } + } + } + } + cmdStr += ']'; + // safely append the message - avoid blowing up when attempting to + // call .replace() if message is not a string for some reason + var message = '' + (this.message || ''); + cmdStr += escapedata(message); + return cmdStr; + }; + return TaskCommand; +}()); +exports.TaskCommand = TaskCommand; +function commandFromString(commandLine) { + var preLen = CMD_PREFIX.length; + var lbPos = commandLine.indexOf('['); + var rbPos = commandLine.indexOf(']'); + if (lbPos == -1 || rbPos == -1 || rbPos - lbPos < 3) { + throw new Error('Invalid command brackets'); + } + var cmdInfo = commandLine.substring(lbPos + 1, rbPos); + var spaceIdx = cmdInfo.indexOf(' '); + var command = cmdInfo; + var properties = {}; + if (spaceIdx > 0) { + command = cmdInfo.trim().substring(0, spaceIdx); + var propSection = cmdInfo.trim().substring(spaceIdx + 1); + var propLines = propSection.split(';'); + propLines.forEach(function (propLine) { + propLine = propLine.trim(); + if (propLine.length > 0) { + var eqIndex = propLine.indexOf('='); + if (eqIndex == -1) { + throw new Error('Invalid property: ' + propLine); + } + var key = propLine.substring(0, eqIndex); + var val = propLine.substring(eqIndex + 1); + properties[key] = unescape(val); + } + }); + } + var msg = unescapedata(commandLine.substring(rbPos + 1)); + var cmd = new TaskCommand(command, properties, msg); + return cmd; +} +exports.commandFromString = commandFromString; +function escapedata(s) { + return s.replace(/%/g, '%AZP25') + .replace(/\r/g, '%0D') + .replace(/\n/g, '%0A'); +} +function unescapedata(s) { + return s.replace(/%0D/g, '\r') + .replace(/%0A/g, '\n') + .replace(/%AZP25/g, '%'); +} +function escape(s) { + return s.replace(/%/g, '%AZP25') + .replace(/\r/g, '%0D') + .replace(/\n/g, '%0A') + .replace(/]/g, '%5D') + .replace(/;/g, '%3B'); +} +function unescape(s) { + return s.replace(/%0D/g, '\r') + .replace(/%0A/g, '\n') + .replace(/%5D/g, ']') + .replace(/%3B/g, ';') + .replace(/%AZP25/g, '%'); +} + + +/***/ }), + +/***/ 419: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +var __extends = (this && this.__extends) || (function () { + var extendStatics = function (d, b) { + extendStatics = Object.setPrototypeOf || + ({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) || + function (d, b) { for (var p in b) if (Object.prototype.hasOwnProperty.call(b, p)) d[p] = b[p]; }; + return extendStatics(d, b); + }; + return function (d, b) { + if (typeof b !== "function" && b !== null) + throw new TypeError("Class extends value " + String(b) + " is not a constructor or null"); + extendStatics(d, b); + function __() { this.constructor = d; } + d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __()); + }; +})(); +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.ToolRunner = void 0; +var Q = __nccwpck_require__(560); +var os = __nccwpck_require__(857); +var events = __nccwpck_require__(434); +var child = __nccwpck_require__(317); +var im = __nccwpck_require__(202); +var fs = __nccwpck_require__(896); +var ToolRunner = /** @class */ (function (_super) { + __extends(ToolRunner, _super); + function ToolRunner(toolPath) { + var _this = _super.call(this) || this; + _this.cmdSpecialChars = [' ', '\t', '&', '(', ')', '[', ']', '{', '}', '^', '=', ';', '!', '\'', '+', ',', '`', '~', '|', '<', '>', '"']; + if (!toolPath) { + throw new Error('Parameter \'toolPath\' cannot be null or empty.'); + } + _this.toolPath = im._which(toolPath, true); + _this.args = []; + _this._debug('toolRunner toolPath: ' + toolPath); + return _this; + } + ToolRunner.prototype._debug = function (message) { + this.emit('debug', message); + }; + ToolRunner.prototype._argStringToArray = function (argString) { + var args = []; + var inQuotes = false; + var escaped = false; + var lastCharWasSpace = true; + var arg = ''; + var append = function (c) { + // we only escape double quotes. + if (escaped) { + if (c !== '"') { + arg += '\\'; + } + else { + arg.slice(0, -1); + } + } + arg += c; + escaped = false; + }; + for (var i = 0; i < argString.length; i++) { + var c = argString.charAt(i); + if (c === ' ' && !inQuotes) { + if (!lastCharWasSpace) { + args.push(arg); + arg = ''; + } + lastCharWasSpace = true; + continue; + } + else { + lastCharWasSpace = false; + } + if (c === '"') { + if (!escaped) { + inQuotes = !inQuotes; + } + else { + append(c); + } + continue; + } + if (c === "\\" && escaped) { + append(c); + continue; + } + if (c === "\\" && inQuotes) { + escaped = true; + continue; + } + append(c); + lastCharWasSpace = false; + } + if (!lastCharWasSpace) { + args.push(arg.trim()); + } + return args; + }; + ToolRunner.prototype._getCommandString = function (options, noPrefix) { + var _this = this; + var toolPath = this._getSpawnFileName(); + var args = this._getSpawnArgs(options); + var cmd = noPrefix ? '' : '[command]'; // omit prefix when piped to a second tool + var commandParts = []; + if (process.platform == 'win32') { + // Windows + cmd file + if (this._isCmdFile()) { + commandParts.push(toolPath); + commandParts = commandParts.concat(args); + } + // Windows + verbatim + else if (options.windowsVerbatimArguments) { + commandParts.push("\"".concat(toolPath, "\"")); + commandParts = commandParts.concat(args); + } + else if (options.shell) { + commandParts.push(this._windowsQuoteCmdArg(toolPath)); + commandParts = commandParts.concat(args); + } + // Windows (regular) + else { + commandParts.push(this._windowsQuoteCmdArg(toolPath)); + commandParts = commandParts.concat(args.map(function (arg) { return _this._windowsQuoteCmdArg(arg); })); + } + } + else { + // OSX/Linux - this can likely be improved with some form of quoting. + // creating processes on Unix is fundamentally different than Windows. + // on Unix, execvp() takes an arg array. + commandParts.push(toolPath); + commandParts = commandParts.concat(args); + } + cmd += commandParts.join(' '); + // append second tool + if (this.pipeOutputToTool) { + cmd += ' | ' + this.pipeOutputToTool._getCommandString(options, /*noPrefix:*/ true); + } + return cmd; + }; + ToolRunner.prototype._processLineBuffer = function (data, buffer, onLine) { + var newBuffer = buffer + data.toString(); + try { + var eolIndex = newBuffer.indexOf(os.EOL); + while (eolIndex > -1) { + var line = newBuffer.substring(0, eolIndex); + onLine(line); + // the rest of the string ... + newBuffer = newBuffer.substring(eolIndex + os.EOL.length); + eolIndex = newBuffer.indexOf(os.EOL); + } + } + catch (err) { + // streaming lines to console is best effort. Don't fail a build. + this._debug('error processing line'); + } + return newBuffer; + }; + /** + * Wraps an arg string with specified char if it's not already wrapped + * @returns {string} Arg wrapped with specified char + * @param {string} arg Input argument string + * @param {string} wrapChar A char input string should be wrapped with + */ + ToolRunner.prototype._wrapArg = function (arg, wrapChar) { + if (!this._isWrapped(arg, wrapChar)) { + return "".concat(wrapChar).concat(arg).concat(wrapChar); + } + return arg; + }; + /** + * Unwraps an arg string wrapped with specified char + * @param arg Arg wrapped with specified char + * @param wrapChar A char to be removed + */ + ToolRunner.prototype._unwrapArg = function (arg, wrapChar) { + if (this._isWrapped(arg, wrapChar)) { + var pattern = new RegExp("(^\\\\?".concat(wrapChar, ")|(\\\\?").concat(wrapChar, "$)"), 'g'); + return arg.trim().replace(pattern, ''); + } + return arg; + }; + /** + * Determine if arg string is wrapped with specified char + * @param arg Input arg string + */ + ToolRunner.prototype._isWrapped = function (arg, wrapChar) { + var pattern = new RegExp("^\\\\?".concat(wrapChar, ".+\\\\?").concat(wrapChar, "$")); + return pattern.test(arg.trim()); + }; + ToolRunner.prototype._getSpawnFileName = function (options) { + if (process.platform == 'win32') { + if (this._isCmdFile()) { + return process.env['COMSPEC'] || 'cmd.exe'; + } + } + if (options && options.shell) { + return this._wrapArg(this.toolPath, '"'); + } + return this.toolPath; + }; + ToolRunner.prototype._getSpawnArgs = function (options) { + var _this = this; + if (process.platform == 'win32') { + if (this._isCmdFile()) { + var argline = "/D /S /C \"".concat(this._windowsQuoteCmdArg(this.toolPath)); + for (var i = 0; i < this.args.length; i++) { + argline += ' '; + argline += options.windowsVerbatimArguments ? this.args[i] : this._windowsQuoteCmdArg(this.args[i]); + } + argline += '"'; + return [argline]; + } + if (options.windowsVerbatimArguments) { + // note, in Node 6.x options.argv0 can be used instead of overriding args.slice and args.unshift. + // for more details, refer to https://github.com/nodejs/node/blob/v6.x/lib/child_process.js + var args_1 = this.args.slice(0); // copy the array + // override slice to prevent Node from creating a copy of the arg array. + // we need Node to use the "unshift" override below. + args_1.slice = function () { + if (arguments.length != 1 || arguments[0] != 0) { + throw new Error('Unexpected arguments passed to args.slice when windowsVerbatimArguments flag is set.'); + } + return args_1; + }; + // override unshift + // + // when using the windowsVerbatimArguments option, Node does not quote the tool path when building + // the cmdline parameter for the win32 function CreateProcess(). an unquoted space in the tool path + // causes problems for tools when attempting to parse their own command line args. tools typically + // assume their arguments begin after arg 0. + // + // by hijacking unshift, we can quote the tool path when it pushed onto the args array. Node builds + // the cmdline parameter from the args array. + // + // note, we can't simply pass a quoted tool path to Node for multiple reasons: + // 1) Node verifies the file exists (calls win32 function GetFileAttributesW) and the check returns + // false if the path is quoted. + // 2) Node passes the tool path as the application parameter to CreateProcess, which expects the + // path to be unquoted. + // + // also note, in addition to the tool path being embedded within the cmdline parameter, Node also + // passes the tool path to CreateProcess via the application parameter (optional parameter). when + // present, Windows uses the application parameter to determine which file to run, instead of + // interpreting the file from the cmdline parameter. + args_1.unshift = function () { + if (arguments.length != 1) { + throw new Error('Unexpected arguments passed to args.unshift when windowsVerbatimArguments flag is set.'); + } + return Array.prototype.unshift.call(args_1, "\"".concat(arguments[0], "\"")); // quote the file name + }; + return args_1; + } + else if (options.shell) { + var args = []; + for (var _i = 0, _a = this.args; _i < _a.length; _i++) { + var arg = _a[_i]; + if (this._needQuotesForCmd(arg, '%')) { + args.push(this._wrapArg(arg, '"')); + } + else { + args.push(arg); + } + } + return args; + } + } + else if (options.shell) { + return this.args.map(function (arg) { + if (_this._isWrapped(arg, "'")) { + return arg; + } + // remove wrapping double quotes to avoid escaping + arg = _this._unwrapArg(arg, '"'); + arg = _this._escapeChar(arg, '"'); + return _this._wrapArg(arg, '"'); + }); + } + return this.args; + }; + /** + * Escape specified character. + * @param arg String to escape char in + * @param charToEscape Char should be escaped + */ + ToolRunner.prototype._escapeChar = function (arg, charToEscape) { + var escChar = "\\"; + var output = ''; + var charIsEscaped = false; + for (var _i = 0, arg_1 = arg; _i < arg_1.length; _i++) { + var char = arg_1[_i]; + if (char === charToEscape && !charIsEscaped) { + output += escChar + char; + } + else { + output += char; + } + charIsEscaped = char === escChar && !charIsEscaped; + } + return output; + }; + ToolRunner.prototype._isCmdFile = function () { + var upperToolPath = this.toolPath.toUpperCase(); + return im._endsWith(upperToolPath, '.CMD') || im._endsWith(upperToolPath, '.BAT'); + }; + /** + * Determine whether the cmd arg needs to be quoted. Returns true if arg contains any of special chars array. + * @param arg The cmd command arg. + * @param additionalChars Additional chars which should be also checked. + */ + ToolRunner.prototype._needQuotesForCmd = function (arg, additionalChars) { + var specialChars = this.cmdSpecialChars; + if (additionalChars) { + specialChars = this.cmdSpecialChars.concat(additionalChars); + } + var _loop_1 = function (char) { + if (specialChars.some(function (x) { return x === char; })) { + return { value: true }; + } + }; + for (var _i = 0, arg_2 = arg; _i < arg_2.length; _i++) { + var char = arg_2[_i]; + var state_1 = _loop_1(char); + if (typeof state_1 === "object") + return state_1.value; + } + return false; + }; + ToolRunner.prototype._windowsQuoteCmdArg = function (arg) { + // for .exe, apply the normal quoting rules that libuv applies + if (!this._isCmdFile()) { + return this._uv_quote_cmd_arg(arg); + } + // otherwise apply quoting rules specific to the cmd.exe command line parser. + // the libuv rules are generic and are not designed specifically for cmd.exe + // command line parser. + // + // for a detailed description of the cmd.exe command line parser, refer to + // http://stackoverflow.com/questions/4094699/how-does-the-windows-command-interpreter-cmd-exe-parse-scripts/7970912#7970912 + // need quotes for empty arg + if (!arg) { + return '""'; + } + // determine whether the arg needs to be quoted + var needsQuotes = this._needQuotesForCmd(arg); + // short-circuit if quotes not needed + if (!needsQuotes) { + return arg; + } + // the following quoting rules are very similar to the rules that by libuv applies. + // + // 1) wrap the string in quotes + // + // 2) double-up quotes - i.e. " => "" + // + // this is different from the libuv quoting rules. libuv replaces " with \", which unfortunately + // doesn't work well with a cmd.exe command line. + // + // note, replacing " with "" also works well if the arg is passed to a downstream .NET console app. + // for example, the command line: + // foo.exe "myarg:""my val""" + // is parsed by a .NET console app into an arg array: + // [ "myarg:\"my val\"" ] + // which is the same end result when applying libuv quoting rules. although the actual + // command line from libuv quoting rules would look like: + // foo.exe "myarg:\"my val\"" + // + // 3) double-up slashes that preceed a quote, + // e.g. hello \world => "hello \world" + // hello\"world => "hello\\""world" + // hello\\"world => "hello\\\\""world" + // hello world\ => "hello world\\" + // + // technically this is not required for a cmd.exe command line, or the batch argument parser. + // the reasons for including this as a .cmd quoting rule are: + // + // a) this is optimized for the scenario where the argument is passed from the .cmd file to an + // external program. many programs (e.g. .NET console apps) rely on the slash-doubling rule. + // + // b) it's what we've been doing previously (by deferring to node default behavior) and we + // haven't heard any complaints about that aspect. + // + // note, a weakness of the quoting rules chosen here, is that % is not escaped. in fact, % cannot be + // escaped when used on the command line directly - even though within a .cmd file % can be escaped + // by using %%. + // + // the saving grace is, on the command line, %var% is left as-is if var is not defined. this contrasts + // the line parsing rules within a .cmd file, where if var is not defined it is replaced with nothing. + // + // one option that was explored was replacing % with ^% - i.e. %var% => ^%var^%. this hack would + // often work, since it is unlikely that var^ would exist, and the ^ character is removed when the + // variable is used. the problem, however, is that ^ is not removed when %* is used to pass the args + // to an external program. + // + // an unexplored potential solution for the % escaping problem, is to create a wrapper .cmd file. + // % can be escaped within a .cmd file. + var reverse = '"'; + var quote_hit = true; + for (var i = arg.length; i > 0; i--) { // walk the string in reverse + reverse += arg[i - 1]; + if (quote_hit && arg[i - 1] == '\\') { + reverse += '\\'; // double the slash + } + else if (arg[i - 1] == '"') { + quote_hit = true; + reverse += '"'; // double the quote + } + else { + quote_hit = false; + } + } + reverse += '"'; + return reverse.split('').reverse().join(''); + }; + ToolRunner.prototype._uv_quote_cmd_arg = function (arg) { + // Tool runner wraps child_process.spawn() and needs to apply the same quoting as + // Node in certain cases where the undocumented spawn option windowsVerbatimArguments + // is used. + // + // Since this function is a port of quote_cmd_arg from Node 4.x (technically, lib UV, + // see https://github.com/nodejs/node/blob/v4.x/deps/uv/src/win/process.c for details), + // pasting copyright notice from Node within this function: + // + // Copyright Joyent, Inc. and other Node contributors. All rights reserved. + // + // Permission is hereby granted, free of charge, to any person obtaining a copy + // of this software and associated documentation files (the "Software"), to + // deal in the Software without restriction, including without limitation the + // rights to use, copy, modify, merge, publish, distribute, sublicense, and/or + // sell copies of the Software, and to permit persons to whom the Software is + // furnished to do so, subject to the following conditions: + // + // The above copyright notice and this permission notice shall be included in + // all copies or substantial portions of the Software. + // + // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING + // FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS + // IN THE SOFTWARE. + if (!arg) { + // Need double quotation for empty argument + return '""'; + } + if (arg.indexOf(' ') < 0 && arg.indexOf('\t') < 0 && arg.indexOf('"') < 0) { + // No quotation needed + return arg; + } + if (arg.indexOf('"') < 0 && arg.indexOf('\\') < 0) { + // No embedded double quotes or backslashes, so I can just wrap + // quote marks around the whole thing. + return "\"".concat(arg, "\""); + } + // Expected input/output: + // input : hello"world + // output: "hello\"world" + // input : hello""world + // output: "hello\"\"world" + // input : hello\world + // output: hello\world + // input : hello\\world + // output: hello\\world + // input : hello\"world + // output: "hello\\\"world" + // input : hello\\"world + // output: "hello\\\\\"world" + // input : hello world\ + // output: "hello world\\" - note the comment in libuv actually reads "hello world\" + // but it appears the comment is wrong, it should be "hello world\\" + var reverse = '"'; + var quote_hit = true; + for (var i = arg.length; i > 0; i--) { // walk the string in reverse + reverse += arg[i - 1]; + if (quote_hit && arg[i - 1] == '\\') { + reverse += '\\'; + } + else if (arg[i - 1] == '"') { + quote_hit = true; + reverse += '\\'; + } + else { + quote_hit = false; + } + } + reverse += '"'; + return reverse.split('').reverse().join(''); + }; + ToolRunner.prototype._cloneExecOptions = function (options) { + options = options || {}; + var result = { + cwd: options.cwd || process.cwd(), + env: options.env || process.env, + silent: options.silent || false, + failOnStdErr: options.failOnStdErr || false, + ignoreReturnCode: options.ignoreReturnCode || false, + windowsVerbatimArguments: options.windowsVerbatimArguments || false, + shell: options.shell || false + }; + result.outStream = options.outStream || process.stdout; + result.errStream = options.errStream || process.stderr; + return result; + }; + ToolRunner.prototype._getSpawnOptions = function (options) { + options = options || {}; + var result = {}; + result.cwd = options.cwd; + result.env = options.env; + result.shell = options.shell; + result['windowsVerbatimArguments'] = options.windowsVerbatimArguments || this._isCmdFile(); + return result; + }; + ToolRunner.prototype._getSpawnSyncOptions = function (options) { + var result = {}; + result.maxBuffer = 1024 * 1024 * 1024; + result.cwd = options.cwd; + result.env = options.env; + result.shell = options.shell; + result['windowsVerbatimArguments'] = options.windowsVerbatimArguments || this._isCmdFile(); + return result; + }; + ToolRunner.prototype.execWithPipingAsync = function (pipeOutputToTool, options) { + var _this = this; + this._debug('exec tool: ' + this.toolPath); + this._debug('arguments:'); + this.args.forEach(function (arg) { + _this._debug(' ' + arg); + }); + var success = true; + var optionsNonNull = this._cloneExecOptions(options); + if (!optionsNonNull.silent) { + optionsNonNull.outStream.write(this._getCommandString(optionsNonNull) + os.EOL); + } + var cp; + var toolPath = pipeOutputToTool.toolPath; + var toolPathFirst; + var successFirst = true; + var returnCodeFirst; + var fileStream; + var waitingEvents = 0; // number of process or stream events we are waiting on to complete + var returnCode = 0; + var error; + toolPathFirst = this.toolPath; + // Following node documentation example from this link on how to pipe output of one process to another + // https://nodejs.org/api/child_process.html#child_process_child_process_spawn_command_args_options + //start the child process for both tools + waitingEvents++; + var cpFirst = child.spawn(this._getSpawnFileName(optionsNonNull), this._getSpawnArgs(optionsNonNull), this._getSpawnOptions(optionsNonNull)); + waitingEvents++; + cp = child.spawn(pipeOutputToTool._getSpawnFileName(optionsNonNull), pipeOutputToTool._getSpawnArgs(optionsNonNull), pipeOutputToTool._getSpawnOptions(optionsNonNull)); + fileStream = this.pipeOutputToFile ? fs.createWriteStream(this.pipeOutputToFile) : null; + return new Promise(function (resolve, reject) { + var _a, _b, _c, _d; + if (fileStream) { + waitingEvents++; + fileStream.on('finish', function () { + waitingEvents--; //file write is complete + fileStream = null; + if (waitingEvents == 0) { + if (error) { + reject(error); + } + else { + resolve(returnCode); + } + } + }); + fileStream.on('error', function (err) { + waitingEvents--; //there were errors writing to the file, write is done + _this._debug("Failed to pipe output of ".concat(toolPathFirst, " to file ").concat(_this.pipeOutputToFile, ". Error = ").concat(err)); + fileStream = null; + if (waitingEvents == 0) { + if (error) { + reject(error); + } + else { + resolve(returnCode); + } + } + }); + } + //pipe stdout of first tool to stdin of second tool + (_a = cpFirst.stdout) === null || _a === void 0 ? void 0 : _a.on('data', function (data) { + var _a, _b; + try { + if (fileStream) { + fileStream.write(data); + } + if (!((_a = cp.stdin) === null || _a === void 0 ? void 0 : _a.destroyed)) { + (_b = cp.stdin) === null || _b === void 0 ? void 0 : _b.write(data); + } + } + catch (err) { + _this._debug('Failed to pipe output of ' + toolPathFirst + ' to ' + toolPath); + _this._debug(toolPath + ' might have exited due to errors prematurely. Verify the arguments passed are valid.'); + } + }); + (_b = cpFirst.stderr) === null || _b === void 0 ? void 0 : _b.on('data', function (data) { + if (fileStream) { + fileStream.write(data); + } + successFirst = !optionsNonNull.failOnStdErr; + if (!optionsNonNull.silent) { + var s = optionsNonNull.failOnStdErr ? optionsNonNull.errStream : optionsNonNull.outStream; + s.write(data); + } + }); + cpFirst.on('error', function (err) { + var _a; + waitingEvents--; //first process is complete with errors + if (fileStream) { + fileStream.end(); + } + (_a = cp.stdin) === null || _a === void 0 ? void 0 : _a.end(); + error = new Error(toolPathFirst + ' failed. ' + err.message); + if (waitingEvents == 0) { + reject(error); + } + }); + cpFirst.on('close', function (code, signal) { + var _a; + waitingEvents--; //first process is complete + if (code != 0 && !optionsNonNull.ignoreReturnCode) { + successFirst = false; + returnCodeFirst = code; + returnCode = returnCodeFirst; + } + _this._debug('success of first tool:' + successFirst); + if (fileStream) { + fileStream.end(); + } + (_a = cp.stdin) === null || _a === void 0 ? void 0 : _a.end(); + if (waitingEvents == 0) { + if (error) { + reject(error); + } + else { + resolve(returnCode); + } + } + }); + var stdLineBuffer = ''; + (_c = cp.stdout) === null || _c === void 0 ? void 0 : _c.on('data', function (data) { + _this.emit('stdout', data); + if (!optionsNonNull.silent) { + optionsNonNull.outStream.write(data); + } + stdLineBuffer = _this._processLineBuffer(data, stdLineBuffer, function (line) { + _this.emit('stdline', line); + }); + }); + var errLineBuffer = ''; + (_d = cp.stderr) === null || _d === void 0 ? void 0 : _d.on('data', function (data) { + _this.emit('stderr', data); + success = !optionsNonNull.failOnStdErr; + if (!optionsNonNull.silent) { + var s = optionsNonNull.failOnStdErr ? optionsNonNull.errStream : optionsNonNull.outStream; + s.write(data); + } + errLineBuffer = _this._processLineBuffer(data, errLineBuffer, function (line) { + _this.emit('errline', line); + }); + }); + cp.on('error', function (err) { + waitingEvents--; //process is done with errors + error = new Error(toolPath + ' failed. ' + err.message); + if (waitingEvents == 0) { + reject(error); + } + }); + cp.on('close', function (code, signal) { + waitingEvents--; //process is complete + _this._debug('rc:' + code); + returnCode = code; + if (stdLineBuffer.length > 0) { + _this.emit('stdline', stdLineBuffer); + } + if (errLineBuffer.length > 0) { + _this.emit('errline', errLineBuffer); + } + if (code != 0 && !optionsNonNull.ignoreReturnCode) { + success = false; + } + _this._debug('success:' + success); + if (!successFirst) { //in the case output is piped to another tool, check exit code of both tools + error = new Error(toolPathFirst + ' failed with return code: ' + returnCodeFirst); + } + else if (!success) { + error = new Error(toolPath + ' failed with return code: ' + code); + } + if (waitingEvents == 0) { + if (error) { + reject(error); + } + else { + resolve(returnCode); + } + } + }); + }); + }; + ToolRunner.prototype.execWithPiping = function (pipeOutputToTool, options) { + var _this = this; + var _a, _b, _c, _d; + var defer = Q.defer(); + this._debug('exec tool: ' + this.toolPath); + this._debug('arguments:'); + this.args.forEach(function (arg) { + _this._debug(' ' + arg); + }); + var success = true; + var optionsNonNull = this._cloneExecOptions(options); + if (!optionsNonNull.silent) { + optionsNonNull.outStream.write(this._getCommandString(optionsNonNull) + os.EOL); + } + var cp; + var toolPath = pipeOutputToTool.toolPath; + var toolPathFirst; + var successFirst = true; + var returnCodeFirst; + var fileStream; + var waitingEvents = 0; // number of process or stream events we are waiting on to complete + var returnCode = 0; + var error; + toolPathFirst = this.toolPath; + // Following node documentation example from this link on how to pipe output of one process to another + // https://nodejs.org/api/child_process.html#child_process_child_process_spawn_command_args_options + //start the child process for both tools + waitingEvents++; + var cpFirst = child.spawn(this._getSpawnFileName(optionsNonNull), this._getSpawnArgs(optionsNonNull), this._getSpawnOptions(optionsNonNull)); + waitingEvents++; + cp = child.spawn(pipeOutputToTool._getSpawnFileName(optionsNonNull), pipeOutputToTool._getSpawnArgs(optionsNonNull), pipeOutputToTool._getSpawnOptions(optionsNonNull)); + fileStream = this.pipeOutputToFile ? fs.createWriteStream(this.pipeOutputToFile) : null; + if (fileStream) { + waitingEvents++; + fileStream.on('finish', function () { + waitingEvents--; //file write is complete + fileStream = null; + if (waitingEvents == 0) { + if (error) { + defer.reject(error); + } + else { + defer.resolve(returnCode); + } + } + }); + fileStream.on('error', function (err) { + waitingEvents--; //there were errors writing to the file, write is done + _this._debug("Failed to pipe output of ".concat(toolPathFirst, " to file ").concat(_this.pipeOutputToFile, ". Error = ").concat(err)); + fileStream = null; + if (waitingEvents == 0) { + if (error) { + defer.reject(error); + } + else { + defer.resolve(returnCode); + } + } + }); + } + //pipe stdout of first tool to stdin of second tool + (_a = cpFirst.stdout) === null || _a === void 0 ? void 0 : _a.on('data', function (data) { + var _a; + try { + if (fileStream) { + fileStream.write(data); + } + (_a = cp.stdin) === null || _a === void 0 ? void 0 : _a.write(data); + } + catch (err) { + _this._debug('Failed to pipe output of ' + toolPathFirst + ' to ' + toolPath); + _this._debug(toolPath + ' might have exited due to errors prematurely. Verify the arguments passed are valid.'); + } + }); + (_b = cpFirst.stderr) === null || _b === void 0 ? void 0 : _b.on('data', function (data) { + if (fileStream) { + fileStream.write(data); + } + successFirst = !optionsNonNull.failOnStdErr; + if (!optionsNonNull.silent) { + var s = optionsNonNull.failOnStdErr ? optionsNonNull.errStream : optionsNonNull.outStream; + s.write(data); + } + }); + cpFirst.on('error', function (err) { + var _a; + waitingEvents--; //first process is complete with errors + if (fileStream) { + fileStream.end(); + } + (_a = cp.stdin) === null || _a === void 0 ? void 0 : _a.end(); + error = new Error(toolPathFirst + ' failed. ' + err.message); + if (waitingEvents == 0) { + defer.reject(error); + } + }); + cpFirst.on('close', function (code, signal) { + var _a; + waitingEvents--; //first process is complete + if (code != 0 && !optionsNonNull.ignoreReturnCode) { + successFirst = false; + returnCodeFirst = code; + returnCode = returnCodeFirst; + } + _this._debug('success of first tool:' + successFirst); + if (fileStream) { + fileStream.end(); + } + (_a = cp.stdin) === null || _a === void 0 ? void 0 : _a.end(); + if (waitingEvents == 0) { + if (error) { + defer.reject(error); + } + else { + defer.resolve(returnCode); + } + } + }); + var stdLineBuffer = ''; + (_c = cp.stdout) === null || _c === void 0 ? void 0 : _c.on('data', function (data) { + _this.emit('stdout', data); + if (!optionsNonNull.silent) { + optionsNonNull.outStream.write(data); + } + stdLineBuffer = _this._processLineBuffer(data, stdLineBuffer, function (line) { + _this.emit('stdline', line); + }); + }); + var errLineBuffer = ''; + (_d = cp.stderr) === null || _d === void 0 ? void 0 : _d.on('data', function (data) { + _this.emit('stderr', data); + success = !optionsNonNull.failOnStdErr; + if (!optionsNonNull.silent) { + var s = optionsNonNull.failOnStdErr ? optionsNonNull.errStream : optionsNonNull.outStream; + s.write(data); + } + errLineBuffer = _this._processLineBuffer(data, errLineBuffer, function (line) { + _this.emit('errline', line); + }); + }); + cp.on('error', function (err) { + waitingEvents--; //process is done with errors + error = new Error(toolPath + ' failed. ' + err.message); + if (waitingEvents == 0) { + defer.reject(error); + } + }); + cp.on('close', function (code, signal) { + waitingEvents--; //process is complete + _this._debug('rc:' + code); + returnCode = code; + if (stdLineBuffer.length > 0) { + _this.emit('stdline', stdLineBuffer); + } + if (errLineBuffer.length > 0) { + _this.emit('errline', errLineBuffer); + } + if (code != 0 && !optionsNonNull.ignoreReturnCode) { + success = false; + } + _this._debug('success:' + success); + if (!successFirst) { //in the case output is piped to another tool, check exit code of both tools + error = new Error(toolPathFirst + ' failed with return code: ' + returnCodeFirst); + } + else if (!success) { + error = new Error(toolPath + ' failed with return code: ' + code); + } + if (waitingEvents == 0) { + if (error) { + defer.reject(error); + } + else { + defer.resolve(returnCode); + } + } + }); + return defer.promise; + }; + /** + * Add argument + * Append an argument or an array of arguments + * returns ToolRunner for chaining + * + * @param val string cmdline or array of strings + * @returns ToolRunner + */ + ToolRunner.prototype.arg = function (val) { + if (!val) { + return this; + } + if (val instanceof Array) { + this._debug(this.toolPath + ' arg: ' + JSON.stringify(val)); + this.args = this.args.concat(val); + } + else if (typeof (val) === 'string') { + this._debug(this.toolPath + ' arg: ' + val); + this.args = this.args.concat(val.trim()); + } + return this; + }; + /** + * Parses an argument line into one or more arguments + * e.g. .line('"arg one" two -z') is equivalent to .arg(['arg one', 'two', '-z']) + * returns ToolRunner for chaining + * + * @param val string argument line + * @returns ToolRunner + */ + ToolRunner.prototype.line = function (val) { + if (!val) { + return this; + } + this._debug(this.toolPath + ' arg: ' + val); + this.args = this.args.concat(this._argStringToArray(val)); + return this; + }; + /** + * Add argument(s) if a condition is met + * Wraps arg(). See arg for details + * returns ToolRunner for chaining + * + * @param condition boolean condition + * @param val string cmdline or array of strings + * @returns ToolRunner + */ + ToolRunner.prototype.argIf = function (condition, val) { + if (condition) { + this.arg(val); + } + return this; + }; + /** + * Pipe output of exec() to another tool + * @param tool + * @param file optional filename to additionally stream the output to. + * @returns {ToolRunner} + */ + ToolRunner.prototype.pipeExecOutputToTool = function (tool, file) { + this.pipeOutputToTool = tool; + this.pipeOutputToFile = file; + return this; + }; + /** + * Exec a tool. + * Output will be streamed to the live console. + * Returns promise with return code + * + * @param tool path to tool to exec + * @param options optional exec options. See IExecOptions + * @returns number + */ + ToolRunner.prototype.execAsync = function (options) { + var _this = this; + var _a, _b, _c; + if (this.pipeOutputToTool) { + return this.execWithPipingAsync(this.pipeOutputToTool, options); + } + this._debug('exec tool: ' + this.toolPath); + this._debug('arguments:'); + this.args.forEach(function (arg) { + _this._debug(' ' + arg); + }); + var optionsNonNull = this._cloneExecOptions(options); + if (!optionsNonNull.silent) { + optionsNonNull.outStream.write(this._getCommandString(optionsNonNull) + os.EOL); + } + var state = new ExecState(optionsNonNull, this.toolPath); + state.on('debug', function (message) { + _this._debug(message); + }); + var stdLineBuffer = ''; + var errLineBuffer = ''; + var emitDoneEvent = function (resolve, reject) { + state.on('done', function (error, exitCode) { + if (stdLineBuffer.length > 0) { + _this.emit('stdline', stdLineBuffer); + } + if (errLineBuffer.length > 0) { + _this.emit('errline', errLineBuffer); + } + if (cp) { + cp.removeAllListeners(); + } + if (error) { + reject(error); + } + else { + resolve(exitCode); + } + }); + }; + // Edge case when the node itself cant's spawn and emit event + var cp; + try { + cp = child.spawn(this._getSpawnFileName(options), this._getSpawnArgs(optionsNonNull), this._getSpawnOptions(options)); + } + catch (error) { + return new Promise(function (resolve, reject) { + emitDoneEvent(resolve, reject); + state.processError = error.message; + state.processExited = true; + state.processClosed = true; + state.CheckComplete(); + }); + } + this.childProcess = cp; + // it is possible for the child process to end its last line without a new line. + // because stdout is buffered, this causes the last line to not get sent to the parent + // stream. Adding this event forces a flush before the child streams are closed. + (_a = cp.stdout) === null || _a === void 0 ? void 0 : _a.on('finish', function () { + if (!optionsNonNull.silent) { + optionsNonNull.outStream.write(os.EOL); + } + }); + (_b = cp.stdout) === null || _b === void 0 ? void 0 : _b.on('data', function (data) { + _this.emit('stdout', data); + if (!optionsNonNull.silent) { + optionsNonNull.outStream.write(data); + } + stdLineBuffer = _this._processLineBuffer(data, stdLineBuffer, function (line) { + _this.emit('stdline', line); + }); + }); + (_c = cp.stderr) === null || _c === void 0 ? void 0 : _c.on('data', function (data) { + state.processStderr = true; + _this.emit('stderr', data); + if (!optionsNonNull.silent) { + var s = optionsNonNull.failOnStdErr ? optionsNonNull.errStream : optionsNonNull.outStream; + s.write(data); + } + errLineBuffer = _this._processLineBuffer(data, errLineBuffer, function (line) { + _this.emit('errline', line); + }); + }); + cp.on('error', function (err) { + state.processError = err.message; + state.processExited = true; + state.processClosed = true; + state.CheckComplete(); + }); + // Do not write debug logs here. Sometimes stdio not closed yet and you can damage user output commands. + cp.on('exit', function (code, signal) { + state.processExitCode = code; + state.processExitSignal = signal; + state.processExited = true; + state.CheckComplete(); + }); + cp.on('close', function (code, signal) { + state.processCloseCode = code; + state.processCloseSignal = signal; + state.processClosed = true; + state.processExited = true; + state.CheckComplete(); + }); + return new Promise(emitDoneEvent); + }; + /** + * Exec a tool. + * Output will be streamed to the live console. + * Returns promise with return code + * + * @deprecated Use the `execAsync` method that returns a native Javascript promise instead + * @param tool path to tool to exec + * @param options optional exec options. See IExecOptions + * @returns number + */ + ToolRunner.prototype.exec = function (options) { + var _this = this; + var _a, _b, _c; + if (this.pipeOutputToTool) { + return this.execWithPiping(this.pipeOutputToTool, options); + } + var defer = Q.defer(); + this._debug('exec tool: ' + this.toolPath); + this._debug('arguments:'); + this.args.forEach(function (arg) { + _this._debug(' ' + arg); + }); + var optionsNonNull = this._cloneExecOptions(options); + if (!optionsNonNull.silent) { + optionsNonNull.outStream.write(this._getCommandString(optionsNonNull) + os.EOL); + } + var state = new ExecState(optionsNonNull, this.toolPath); + state.on('debug', function (message) { + _this._debug(message); + }); + var stdLineBuffer = ''; + var errLineBuffer = ''; + state.on('done', function (error, exitCode) { + if (stdLineBuffer.length > 0) { + _this.emit('stdline', stdLineBuffer); + } + if (errLineBuffer.length > 0) { + _this.emit('errline', errLineBuffer); + } + if (cp) { + cp.removeAllListeners(); + } + if (error) { + defer.reject(error); + } + else { + defer.resolve(exitCode); + } + }); + // Edge case when the node itself cant's spawn and emit event + var cp; + try { + cp = child.spawn(this._getSpawnFileName(options), this._getSpawnArgs(optionsNonNull), this._getSpawnOptions(options)); + } + catch (error) { + state.processError = error.message; + state.processExited = true; + state.processClosed = true; + state.CheckComplete(); + return defer.promise; + } + this.childProcess = cp; + // it is possible for the child process to end its last line without a new line. + // because stdout is buffered, this causes the last line to not get sent to the parent + // stream. Adding this event forces a flush before the child streams are closed. + (_a = cp.stdout) === null || _a === void 0 ? void 0 : _a.on('finish', function () { + if (!optionsNonNull.silent) { + optionsNonNull.outStream.write(os.EOL); + } + }); + (_b = cp.stdout) === null || _b === void 0 ? void 0 : _b.on('data', function (data) { + _this.emit('stdout', data); + if (!optionsNonNull.silent) { + optionsNonNull.outStream.write(data); + } + stdLineBuffer = _this._processLineBuffer(data, stdLineBuffer, function (line) { + _this.emit('stdline', line); + }); + }); + (_c = cp.stderr) === null || _c === void 0 ? void 0 : _c.on('data', function (data) { + state.processStderr = true; + _this.emit('stderr', data); + if (!optionsNonNull.silent) { + var s = optionsNonNull.failOnStdErr ? optionsNonNull.errStream : optionsNonNull.outStream; + s.write(data); + } + errLineBuffer = _this._processLineBuffer(data, errLineBuffer, function (line) { + _this.emit('errline', line); + }); + }); + cp.on('error', function (err) { + state.processError = err.message; + state.processExited = true; + state.processClosed = true; + state.CheckComplete(); + }); + // Do not write debug logs here. Sometimes stdio not closed yet and you can damage user output commands. + cp.on('exit', function (code, signal) { + state.processExitCode = code; + state.processExitSignal = signal; + state.processExited = true; + state.CheckComplete(); + }); + cp.on('close', function (code, signal) { + state.processCloseCode = code; + state.processCloseSignal = signal; + state.processClosed = true; + state.processExited = true; + state.CheckComplete(); + }); + return defer.promise; + }; + /** + * Exec a tool synchronously. + * Output will be *not* be streamed to the live console. It will be returned after execution is complete. + * Appropriate for short running tools + * Returns IExecSyncResult with output and return code + * + * @param tool path to tool to exec + * @param options optional exec options. See IExecSyncOptions + * @returns IExecSyncResult + */ + ToolRunner.prototype.execSync = function (options) { + var _this = this; + this._debug('exec tool: ' + this.toolPath); + this._debug('arguments:'); + this.args.forEach(function (arg) { + _this._debug(' ' + arg); + }); + var success = true; + options = this._cloneExecOptions(options); + if (!options.silent) { + options.outStream.write(this._getCommandString(options) + os.EOL); + } + var r = child.spawnSync(this._getSpawnFileName(options), this._getSpawnArgs(options), this._getSpawnSyncOptions(options)); + if (!options.silent && r.stdout && r.stdout.length > 0) { + options.outStream.write(r.stdout); + } + if (!options.silent && r.stderr && r.stderr.length > 0) { + options.errStream.write(r.stderr); + } + var res = { code: r.status, error: r.error }; + res.stdout = (r.stdout) ? r.stdout.toString() : ''; + res.stderr = (r.stderr) ? r.stderr.toString() : ''; + return res; + }; + /** + * Used to close child process by sending SIGNINT signal. + * It allows executed script to have some additional logic on SIGINT, before exiting. + */ + ToolRunner.prototype.killChildProcess = function (signal) { + if (signal === void 0) { signal = "SIGTERM"; } + if (this.childProcess) { + this._debug("[killChildProcess] Signal ".concat(signal, " received")); + this.childProcess.kill(signal); + } + }; + return ToolRunner; +}(events.EventEmitter)); +exports.ToolRunner = ToolRunner; +var ExecState = /** @class */ (function (_super) { + __extends(ExecState, _super); + function ExecState(options, toolPath) { + var _this = _super.call(this) || this; + _this.delay = 10000; // 10 seconds + _this.timeout = null; + if (!toolPath) { + throw new Error('toolPath must not be empty'); + } + _this.options = options; + _this.toolPath = toolPath; + var delay = process.env['TASKLIB_TEST_TOOLRUNNER_EXITDELAY']; + if (delay) { + _this.delay = parseInt(delay); + } + return _this; + } + ExecState.prototype.CheckComplete = function () { + if (this.done) { + return; + } + if (this.processClosed) { + this._setResult(); + } + else if (this.processExited) { + this.timeout = setTimeout(ExecState.HandleTimeout, this.delay, this); + } + }; + ExecState.prototype._debug = function (message) { + this.emit('debug', message); + }; + ExecState.prototype._setResult = function () { + // determine whether there is an error + var error; + if (this.processExited) { + this._debug("Process exited with code ".concat(this.processExitCode, " and signal ").concat(this.processExitSignal, " for tool '").concat(this.toolPath, "'")); + if (this.processError) { + error = new Error(im._loc('LIB_ProcessError', this.toolPath, this.processError)); + } + else if (this.processExitCode != 0 && !this.options.ignoreReturnCode) { + error = new Error(im._loc('LIB_ProcessExitCode', this.toolPath, this.processExitCode)); + } + else if (this.processStderr && this.options.failOnStdErr) { + error = new Error(im._loc('LIB_ProcessStderr', this.toolPath)); + } + } + if (this.processClosed) { + this._debug("STDIO streams have closed and received exit code ".concat(this.processCloseCode, " and signal ").concat(this.processCloseSignal, " for tool '").concat(this.toolPath, "'")); + } + // clear the timeout + if (this.timeout) { + clearTimeout(this.timeout); + this.timeout = null; + } + this.done = true; + this.emit('done', error, this.processExitCode); + }; + ExecState.HandleTimeout = function (state) { + if (state.done) { + return; + } + if (!state.processClosed && state.processExited) { + console.log(im._loc('LIB_StdioNotClosed', state.delay / 1000, state.toolPath)); + state._debug(im._loc('LIB_StdioNotClosed', state.delay / 1000, state.toolPath)); + } + state._setResult(); + }; + return ExecState; +}(events.EventEmitter)); + + +/***/ }), + +/***/ 59: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.Vault = void 0; +var fs = __nccwpck_require__(896); +var path = __nccwpck_require__(928); +var crypto = __nccwpck_require__(982); +var uuidV4 = __nccwpck_require__(21); +var algorithm = "aes-256-ctr"; +var encryptEncoding = 'hex'; +var unencryptedEncoding = 'utf8'; +// +// Store sensitive data in proc. +// Main goal: Protects tasks which would dump envvars from leaking secrets inadvertently +// the task lib clears after storing. +// Also protects against a dump of a process getting the secrets +// The secret is generated and stored externally for the lifetime of the task. +// +var Vault = /** @class */ (function () { + function Vault(keyPath) { + this._keyFile = path.join(keyPath, '.taskkey'); + this._store = {}; + this.genKey(); + } + Vault.prototype.initialize = function () { + }; + Vault.prototype.storeSecret = function (name, data) { + if (!name || name.length == 0) { + return false; + } + name = name.toLowerCase(); + if (!data || data.length == 0) { + if (this._store.hasOwnProperty(name)) { + delete this._store[name]; + } + return false; + } + var key = this.getKey(); + var iv = crypto.randomBytes(16); + var cipher = crypto.createCipheriv(algorithm, key, iv); + var crypted = cipher.update(data, unencryptedEncoding, encryptEncoding); + var cryptedFinal = cipher.final(encryptEncoding); + this._store[name] = iv.toString(encryptEncoding) + crypted + cryptedFinal; + return true; + }; + Vault.prototype.retrieveSecret = function (name) { + var secret; + name = (name || '').toLowerCase(); + if (this._store.hasOwnProperty(name)) { + var key = this.getKey(); + var data = this._store[name]; + var ivDataBuffer = Buffer.from(data, encryptEncoding); + var iv = ivDataBuffer.slice(0, 16); + var encryptedText = ivDataBuffer.slice(16); + var decipher = crypto.createDecipheriv(algorithm, key, iv); + var dec = decipher.update(encryptedText); + var decFinal = decipher.final(unencryptedEncoding); + secret = dec + decFinal; + } + return secret; + }; + Vault.prototype.getKey = function () { + var key = fs.readFileSync(this._keyFile).toString('utf8'); + // Key needs to be hashed to correct length to match algorithm (aes-256-ctr) + return crypto.createHash('sha256').update(key).digest(); + }; + Vault.prototype.genKey = function () { + fs.writeFileSync(this._keyFile, uuidV4(), { encoding: 'utf8' }); + }; + return Vault; +}()); +exports.Vault = Vault; + + +/***/ }), + +/***/ 380: +/***/ ((module) => { + +"use strict"; + +module.exports = balanced; +function balanced(a, b, str) { + if (a instanceof RegExp) a = maybeMatch(a, str); + if (b instanceof RegExp) b = maybeMatch(b, str); + + var r = range(a, b, str); + + return r && { + start: r[0], + end: r[1], + pre: str.slice(0, r[0]), + body: str.slice(r[0] + a.length, r[1]), + post: str.slice(r[1] + b.length) + }; +} + +function maybeMatch(reg, str) { + var m = str.match(reg); + return m ? m[0] : null; +} + +balanced.range = range; +function range(a, b, str) { + var begs, beg, left, right, result; + var ai = str.indexOf(a); + var bi = str.indexOf(b, ai + 1); + var i = ai; + + if (ai >= 0 && bi > 0) { + if(a===b) { + return [ai, bi]; + } + begs = []; + left = str.length; + + while (i >= 0 && !result) { + if (i == ai) { + begs.push(i); + ai = str.indexOf(a, i + 1); + } else if (begs.length == 1) { + result = [ begs.pop(), bi ]; + } else { + beg = begs.pop(); + if (beg < left) { + left = beg; + right = bi; + } + + bi = str.indexOf(b, i + 1); + } + + i = ai < bi && ai >= 0 ? ai : bi; + } + + if (begs.length) { + result = [ left, right ]; + } + } + + return result; +} + + +/***/ }), + +/***/ 87: +/***/ ((module) => { + +module.exports = function (xs, fn) { + var res = []; + for (var i = 0; i < xs.length; i++) { + var x = fn(xs[i], i); + if (isArray(x)) res.push.apply(res, x); + else res.push(x); + } + return res; +}; + +var isArray = Array.isArray || function (xs) { + return Object.prototype.toString.call(xs) === '[object Array]'; +}; + + +/***/ }), + +/***/ 560: +/***/ ((module) => { + +// vim:ts=4:sts=4:sw=4: +/*! + * + * Copyright 2009-2017 Kris Kowal under the terms of the MIT + * license found at https://github.com/kriskowal/q/blob/v1/LICENSE + * + * With parts by Tyler Close + * Copyright 2007-2009 Tyler Close under the terms of the MIT X license found + * at http://www.opensource.org/licenses/mit-license.html + * Forked at ref_send.js version: 2009-05-11 + * + * With parts by Mark Miller + * Copyright (C) 2011 Google Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +(function (definition) { + "use strict"; + + // This file will function properly as a