diff --git a/build/Source/image-info.js b/build/Source/image-info.js new file mode 100644 index 0000000000..bcbce987e4 --- /dev/null +++ b/build/Source/image-info.js @@ -0,0 +1,377 @@ +const path = require("path"); +const push = require("./push").push; +const asyncUtils = require("./utils/async"); +const configUtils = require("./utils/config"); +const imageContentUtils = require("./utils/image-content-extractor"); +const componentFormatterFactory = require("./utils/component-formatter-factory"); +const markdownFormatterFactory = require("./utils/markdown-formatter-factory"); +const handlebars = require("handlebars"); +let releaseNotesHeaderTemplate, releaseNotesVariantPartTemplate; + +// Register helper for anchors - Adapted from https://github.com/gjtorikian/html-pipeline/blob/main/lib/html/pipeline/toc_filter.rb +handlebars.registerHelper("anchor", (value) => + value + .toLowerCase() + .replace(/[^\w\- ]/g, "") + .replace(/ /g, "-"), +); + +async function generateImageInformationFiles( + repo, + release, + registry, + registryPath, + stubRegistry, + stubRegistryPath, + buildFirst, + pruneBetweenDefinitions, + generateCgManifest, + generateMarkdown, + overwrite, + outputPath, + definitionId, +) { + // Load config files + await configUtils.loadConfig(); + + const alreadyRegistered = {}; + const cgManifest = { + "Registrations": [], + "Version": 1, + }; + + // cgmanifest file path and whether it exists + const cgManifestPath = path.join(outputPath, "cgmanifest.json"); + const cgManifestExists = await asyncUtils.exists(cgManifestPath); + + console.log("(*) Generating image information files..."); + const definitions = definitionId + ? [definitionId] + : configUtils.getSortedDefinitionBuildList(); + await asyncUtils.forEach(definitions, async (currentDefinitionId) => { + // Target file paths and whether they exist + const definitionRelativePath = configUtils.getDefinitionPath( + currentDefinitionId, + true, + ); + const historyFolder = path.join( + outputPath, + definitionRelativePath, + configUtils.getConfig("historyFolderName", "history"), + ); + const version = configUtils.getVersionFromRelease( + release, + currentDefinitionId, + ); + const markdownPath = path.join(historyFolder, `${version}.md`); + const markdownExists = await asyncUtils.exists(markdownPath); + + // Skip if not overwriting and all files exist + if ( + !overwrite && + (!generateMarkdown || markdownExists) && + (!generateCgManifest || cgManifestExists) + ) { + console.log( + `(*) Skipping ${currentDefinitionId}. Not in overwrite mode and content already exists.`, + ); + return; + } + + // Extract information + const definitionInfo = await getDefinitionImageContent( + repo, + release, + registry, + registryPath, + stubRegistry, + stubRegistryPath, + currentDefinitionId, + alreadyRegistered, + buildFirst, + ); + + // Write markdown file as appropriate + if (generateMarkdown && (overwrite || !markdownExists)) { + console.log("(*) Writing image history markdown..."); + await asyncUtils.mkdirp(historyFolder); + await asyncUtils.writeFile(markdownPath, definitionInfo.markdown); + } + + // Add component registrations if we're using them + if (generateCgManifest) { + cgManifest.Registrations = cgManifest.Registrations.concat( + definitionInfo.registrations, + ); + } + // Prune images if setting enabled + if (pruneBetweenDefinitions) { + await asyncUtils.spawn("docker", ["image", "prune", "-a", "-f"]); + } + }); + + // Write final cgmanifest.json file if needed + if (generateCgManifest && (overwrite || !cgManifestExists)) { + console.log("(*) Writing cgmanifest.json..."); + await asyncUtils.writeFile( + path.join(outputPath, "cgmanifest.json"), + JSON.stringify(cgManifest, undefined, 4), + ); + } + console.log("(*) Done!"); +} + +async function getDefinitionImageContent( + repo, + release, + registry, + registryPath, + stubRegistry, + stubRegistryPath, + definitionId, + alreadyRegistered, + buildFirst, +) { + const dependencies = configUtils.getDefinitionDependencies(definitionId); + if (typeof dependencies !== "object") { + return []; + } + + let registrations = []; + + const variants = configUtils.getVariants(definitionId) || [null]; + const version = configUtils.getVersionFromRelease(release, definitionId); + + // Create header for markdown + let markdown = await generateReleaseNotesHeader( + repo, + release, + definitionId, + variants, + dependencies, + ); + + await asyncUtils.forEach(variants, async (variant) => { + if (variant) { + console.log(`\n(*) Processing variant ${variant}...`); + } + + const imageTag = configUtils.getTagsForVersion( + definitionId, + version, + registry, + registryPath, + variant, + )[0]; + if (buildFirst) { + // Build but don't push images + console.log("(*) Building image..."); + await push( + repo, + release, + false, + registry, + registryPath, + registry, + registryPath, + false, + false, + [], + 1, + 1, + false, + definitionId, + ); + } else { + console.log(`(*) Pulling image ${imageTag}...`); + await asyncUtils.spawn("docker", ["pull", imageTag]); + } + + // Extract content information + const contents = await imageContentUtils.getAllContentInfo( + imageTag, + dependencies, + ); + + // Update markdown content + markdown = + markdown + + (await generateReleaseNotesPart( + contents, + release, + stubRegistry, + stubRegistryPath, + definitionId, + variant, + )); + + // Add to registrations + registrations = registrations.concat( + getUniqueComponents(alreadyRegistered, contents), + ); + }); + + // Register upstream images + await asyncUtils.forEach(dependencies.imageVariants, async (imageTag) => { + if (typeof alreadyRegistered[imageTag] === "undefined") { + const [image, imageVersion] = imageTag.split(":"); + registrations.push({ + "Component": { + "Type": "other", + "Other": { + "Name": `Docker Image: ${image}`, + "Version": imageVersion, + "DownloadUrl": dependencies.imageLink, + }, + }, + }); + alreadyRegistered[dependencies.image] = [imageVersion]; + } + }); + + return { + registrations: registrations, + markdown: markdown, + version: version, + }; +} + +// Filter out components already in the registration list and format output returns an array of formatted and filtered contents +function getUniqueComponents(alreadyRegistered, contents) { + let componentList = []; + + const contentFormatter = componentFormatterFactory.getFormatter( + contents.distro, + ); + for (let contentType in contents) { + const formatterFn = contentFormatter[contentType]; + let content = contents[contentType]; + if (formatterFn && content) { + if (!Array.isArray(content)) { + content = [content]; + } + componentList = componentList.concat( + content.reduce((prev, next) => { + const uniqueId = JSON.stringify(next); + if (!alreadyRegistered[uniqueId]) { + alreadyRegistered[uniqueId] = true; + const component = formatterFn(next); + if (component) { + prev.push(component); + } + } + return prev; + }, []), + ); + } + } + + return componentList; +} + +// Use template to generate header of version markdown content +async function generateReleaseNotesHeader( + repo, + release, + definitionId, + variants, + dependencies, +) { + releaseNotesHeaderTemplate = + releaseNotesHeaderTemplate || + handlebars.compile( + await asyncUtils.readFile( + path.join(__dirname, "..", "assets", "release-notes-header.md"), + ), + ); + const data = { + version: configUtils.getVersionFromRelease(release, definitionId), + definition: definitionId, + release: release, + annotation: dependencies.annotation, + repository: repo, + variants: variants, + hasVariants: variants && variants[0], + }; + return releaseNotesHeaderTemplate(data); +} + +// Generate release notes section for variant +async function generateReleaseNotesPart( + contents, + release, + stubRegistry, + stubRegistryPath, + definitionId, + variant, +) { + releaseNotesVariantPartTemplate = + releaseNotesVariantPartTemplate || + handlebars.compile( + await asyncUtils.readFile( + path.join( + __dirname, + "..", + "assets", + "release-notes-variant-part.md", + ), + ), + ); + const markdownFormatter = markdownFormatterFactory.getFormatter(); + const formattedContents = getFormattedContents(contents, markdownFormatter); + formattedContents.hasPip = + formattedContents.pip.length > 0 || formattedContents.pipx.length > 0; + formattedContents.tags = configUtils.getTagList( + definitionId, + release, + "full-only", + stubRegistry, + stubRegistryPath, + variant, + ); + formattedContents.variant = variant; + + // architecture property could be a single string, an array, or an object of arrays by variant + let architectures = configUtils.getBuildSettings(definitionId) + .architectures || ["linux/amd64"]; + if (!Array.isArray(architectures)) { + architectures = architectures[variant]; + } + formattedContents.architectures = architectures.reduce( + (prev, current, index) => (index > 0 ? `${prev}, ${current}` : current), + "", + ); + return releaseNotesVariantPartTemplate(formattedContents); +} + +// Return all contents as an object of formatted values +function getFormattedContents(contents, contentFormatter) { + let formattedContents = {}; + for (let contentType in contents) { + formattedContents[contentType] = getFormattedContent( + contents[contentType], + contentFormatter[contentType], + ); + } + return formattedContents; +} + +function getFormattedContent(content, formatterFn) { + if (!formatterFn || !content) { + return null; + } + if (!Array.isArray(content)) { + return formatterFn(content); + } + return content.reduce((prev, next) => { + const formattedContent = formatterFn(next); + if (formattedContent) { + prev.push(formattedContent); + } + return prev; + }, []); +} + +module.exports = { + generateImageInformationFiles: generateImageInformationFiles, +}; diff --git a/build/Source/package.js b/build/Source/package.js new file mode 100644 index 0000000000..afcc2be246 --- /dev/null +++ b/build/Source/package.js @@ -0,0 +1,74 @@ +/*-------------------------------------------------------------------------------------------------------------- + * Copyright (c) Microsoft Corporation. All rights reserved. + * Licensed under the MIT License. See https://go.microsoft.com/fwlink/?linkid=2090316 for license information. + *-------------------------------------------------------------------------------------------------------------*/ + +const path = require('path'); +const push = require('./push').push; +const prep = require('./prep'); +const asyncUtils = require('./utils/async'); +const configUtils = require('./utils/config'); +const packageJson = require('../../package.json'); + +async function package(repo, release, updateLatest, registry, registryPath, stubRegistry, + stubRegistryPath, prepAndPackageOnly, packageOnly, cleanWhenDone, definitionsToSkipPush) { + + // Optional argument defaults + packageOnly = typeof packageOnly === 'undefined' ? false : packageOnly; + prepAndPackageOnly = typeof prepAndPackageOnly === 'undefined' ? false : prepAndPackageOnly; + cleanWhenDone = typeof cleanWhenDone === 'undefined' ? true : cleanWhenDone; + stubRegistry = stubRegistry || registry; + stubRegistryPath = stubRegistryPath || registryPath; + + // Stage content and load config + const stagingFolder = await configUtils.getStagingFolder(release); + await configUtils.loadConfig(stagingFolder); + + if (!packageOnly) { + // First, push images, update content + await push(repo, release, updateLatest, registry, registryPath, stubRegistry, stubRegistryPath, true, prepAndPackageOnly, definitionsToSkipPush); + } + + // Then package + console.log(`\n(*) **** Package ${release} ****`); + + console.log(`(*) Updating package.json with release version...`); + const version = configUtils.getVersionFromRelease(release); + const packageJsonVersion = version === 'dev' ? packageJson.version + '-dev' : version; + const packageJsonPath = path.join(stagingFolder, 'package.json'); + const packageJsonRaw = await asyncUtils.readFile(packageJsonPath); + const packageJsonModified = packageJsonRaw.replace(/"version".?:.?".+"/, `"version": "${packageJsonVersion}"`); + await asyncUtils.writeFile(packageJsonPath, packageJsonModified); + + // Update all definition config files for release (devcontainer.json, Dockerfile, library-scripts) + const allDefinitions = configUtils.getAllDefinitionPaths(); + for (let currentDefinitionId in allDefinitions) { + if (typeof currentDefinitionId === 'string') { + await prep.updateConfigForRelease(currentDefinitionId, repo, release, registry, registryPath, stubRegistry, stubRegistryPath); + } + } + + console.log('(*) Packaging...'); + const opts = { stdio: 'inherit', cwd: stagingFolder, shell: true }; + await asyncUtils.spawn('yarn', ['install'], opts); + await asyncUtils.spawn('npm', ['pack'], opts); // Need to use npm due to https://github.com/yarnpkg/yarn/issues/685 + + let outputPath = null; + console.log('(*) Moving package...'); + outputPath = path.join(__dirname, '..', '..', `${packageJson.name}-${packageJsonVersion}.tgz`); + await asyncUtils.copyFile(path.join(stagingFolder, `${packageJson.name}-${packageJsonVersion}.tgz`), outputPath); + + if (cleanWhenDone) { + // And finally clean up + console.log('(*) Cleaning up...'); + await asyncUtils.rimraf(stagingFolder); + } + + console.log('(*) Done!!'); + + return outputPath; +} + +module.exports = { + package: package +} diff --git a/build/Source/patch.js b/build/Source/patch.js new file mode 100644 index 0000000000..36cbadfebf --- /dev/null +++ b/build/Source/patch.js @@ -0,0 +1,368 @@ +/*-------------------------------------------------------------------------------------------------------------- + * Copyright (c) Microsoft Corporation. All rights reserved. + * Licensed under the MIT License. See https://go.microsoft.com/fwlink/?linkid=2090316 for license information. + *-------------------------------------------------------------------------------------------------------------*/ + +const path = require("path"); +const asyncUtils = require("./utils/async"); +const jsonc = require("jsonc").jsonc; + +async function patch(patchPath, registry, registryPath) { + patchPath = path.resolve(patchPath); + const patchConfig = await getPatchConfig(patchPath); + + console.log(`(*) Applying patch located at "${patchPath}"...`); + const dockerFilePath = `${patchPath}/${patchConfig.dockerFile || "Dockerfile"}`; + if (patchConfig.tagList) { + throw new Error("tagList property has been deprecated."); + } + + // Update each listed imageId + await asyncUtils.forEach(patchConfig.imageIds, async (imageId) => { + await patchImage( + imageId, + patchPath, + dockerFilePath, + patchConfig.bumpVersion, + registry, + registryPath, + ); + }); + + // If config says to delete any untagged images mentioned in the patch, do so. + if (patchConfig.deleteUntaggedImages && patchConfig.imageIds) { + await deleteUntaggedImages(patchConfig.imageIds, registry); + } + + console.log("\n(*) Done!"); +} + +async function patchImage( + imageId, + patchPath, + dockerFilePath, + bumpVersion, + registry, +) { + console.log(`\n*** Updating Image: ${imageId} ***`); + const spawnOpts = { stdio: "inherit", cwd: patchPath, shell: true }; + + // Get repository and tag list for imageId + let repoAndTagList = await getImageRepositoryAndTags(imageId, registry); + if (repoAndTagList.length === 0) { + console.log("(*) No tags to patch. Skipping."); + return; + } + + console.log( + `(*) Tags to update: ${JSON.stringify( + repoAndTagList.reduce((prev, repoAndTag) => { + return ( + prev + repoAndTag.repository + ":" + repoAndTag.tag + " " + ); + }, ""), + undefined, + 4, + )}`, + ); + + // Bump breakfix number of it applies + if (bumpVersion) { + repoAndTagList = updateVersionTags(repoAndTagList); + } + + //Generate tag arguments + const tagArgs = repoAndTagList.reduce((prev, repoAndTag) => { + return prev.concat([ + "--tag", + `${registry}/${repoAndTag.repository}:${repoAndTag.tag}`, + ]); + }, []); + + // Pull and build patched image for tag + let retry = false; + do { + try { + await asyncUtils.spawn( + "docker", + [ + "build", + "--pull", + "--build-arg", + `ORIGINAL_IMAGE=${registry}/${repoAndTagList[0].repository}@${imageId}`, + ] + .concat(tagArgs) + .concat("-f", dockerFilePath, patchPath), + spawnOpts, + ); + } catch (ex) { + // Try to clean out unused images and retry once if get an out of storage response + if ( + ex.result && + ex.result.indexOf("no space left on device") >= 0 && + retry === false + ) { + console.log(`(*) Out of space - pruning all unused images...`); + await asyncUtils.spawn( + "docker", + ["image", "prune", "--all", "--force"], + spawnOpts, + ); + console.log(`(*) Retrying...`); + retry = true; + } else { + throw ex; + } + } + } while (retry); + + // Push updates + await asyncUtils.forEach(repoAndTagList, async (repoAndTag) => { + await asyncUtils.spawn( + "docker", + ["push", `${registry}/${repoAndTag.repository}:${repoAndTag.tag}`], + spawnOpts, + ); + }); + + // Prune proactively to reduce space use + console.log(`(*) Pruning dangling images...`); + await asyncUtils.spawn("docker", ["image", "prune", "--force"], spawnOpts); +} + +function updateVersionTags(repoAndTagList) { + return repoAndTagList.reduce((prev, repoAndTag) => { + let tag = repoAndTag.tag; + // Get the version number section of the tag if it exists + const firstDash = tag.indexOf("-"); + if (firstDash > 0) { + const versionSection = tag.substring(0, firstDash - 1); + // See if there are three digits in the version number + const versionParts = versionSection.split("."); + if (versionParts.length === 3) { + // If there are, update the break fix version + tag = `${versionParts[0]}.${versionParts[1]}.${versionParts[2] + 1}${tag.substring(firstDash)}`; + } + } + return prev.push({ + name: repoAndTag.repository, + tag: tag, + }); + }, []); +} + +async function deleteUnpatchedImages(patchPath, registry) { + patchPath = path.resolve(patchPath); + const patchConfig = await getPatchConfig(patchPath); + if (!patchConfig.imageIds) { + console.log("(!) Patch does not include image IDs. Nothing to do."); + return; + } + return await deleteUntaggedImages(patchConfig.imageIds, registry); +} + +async function deleteUntaggedImages(imageIds, registry) { + console.log("\n*** Deleting untagged images ***"); + // ACR registry name is the registry minus .azurecr.io + const registryName = registry.replace(/\..*/, ""); + + const manifests = await getImageManifests(imageIds, registry); + + console.log( + `(*) Manifests to delete: ${JSON.stringify(manifests, undefined, 4)}`, + ); + + const spawnOpts = { stdio: "inherit", shell: true }; + await asyncUtils.forEach(manifests, async (manifest) => { + if (manifest.tags.length > 0) { + console.log( + `(!) Skipping ${manifest.digest} because it has tags: ${manifest.tags}`, + ); + return; + } + const fullImageId = `${manifest.repository}@${manifest.digest}`; + console.log(`(*) Deleting ${fullImageId}...`); + // Pull and build patched tag + await asyncUtils.spawn( + "az", + [ + "acr", + "repository", + "delete", + "--yes", + "--name", + registryName, + "--image", + fullImageId, + ], + spawnOpts, + ); + }); + + console.log("(*) Done deleting manifests!"); +} + +// Find tags for image +async function getImageRepositoryAndTags(imageId, registry) { + // ACR registry name is the registry minus .azurecr.io + const registryName = registry.replace(/\..*/, ""); + + // Get list of repositories + console.log(`(*) Getting repository list for ACR "${registryName}"...`); + const repositoryListOutput = await asyncUtils.spawn( + "az", + ["acr", "repository", "list", "--name", registryName], + { shell: true, stdio: "pipe" }, + ); + const repositoryList = JSON.parse(repositoryListOutput); + + let repoAndTagList = []; + await asyncUtils.forEach(repositoryList, async (repository) => { + console.log(`(*) Checking in for "${imageId}" in "${repository}"...`); + const tagListOutput = await asyncUtils.spawn( + "az", + [ + "acr", + "repository", + "show-tags", + "--detail", + "--name", + registryName, + "--repository", + repository, + "--query", + `"[?digest=='${imageId}'].name"`, + ], + { shell: true, stdio: "pipe" }, + ); + const additionalTags = JSON.parse(tagListOutput); + repoAndTagList = repoAndTagList.concat( + additionalTags.map((tag) => { + return { + repository: repository, + tag: tag, + }; + }), + ); + }); + return repoAndTagList; +} + +async function getImageManifests(imageIds, registry) { + // ACR registry name is the registry minus .azurecr.io + const registryName = registry.replace(/\..*/, ""); + + let manifests = []; + + // Get list of repositories + console.log(`(*) Getting repository list for ACR "${registryName}"...`); + const repositoryListOutput = await asyncUtils.spawn( + "az", + ["acr", "repository", "list", "--name", registryName], + { shell: true, stdio: "pipe" }, + ); + const repositoryList = JSON.parse(repositoryListOutput); + + // Query each repository for images, then add any tags found to the list + const query = + imageIds.reduce((prev, current) => { + return prev + ? `${prev} || digest=='${current}'` + : `"[?digest=='${current}'`; + }, null) + '] | []"'; + await asyncUtils.forEach(repositoryList, async (repository) => { + console.log(`(*) Getting manifests from "${repository}"...`); + const registryManifestListOutput = await asyncUtils.spawn( + "az", + [ + "acr", + "repository", + "show-manifests", + "--name", + registryName, + "--repository", + repository, + "--query", + query, + ], + { shell: true, stdio: "pipe" }, + ); + let registryManifestList = JSON.parse(registryManifestListOutput); + registryManifestList = registryManifestList.map((manifest) => { + manifest.repository = repository; + return manifest; + }); + manifests = manifests.concat(registryManifestList); + }); + + return manifests; +} + +async function getPatchConfig(patchPath) { + const patchConfigFilePath = path.resolve(patchPath, "patch.json"); + if (!(await asyncUtils.exists(patchConfigFilePath))) { + throw `No patch.json found at ${patchConfigFilePath}`; + } + const patchConfig = await jsonc.read(patchConfigFilePath); + + if (typeof patchConfig.bumpVersion === "undefined") { + patchConfig.bumpVersion = true; + } + if (typeof patchConfig.deleteUntaggedImages === "undefined") { + patchConfig.deleteUntaggedImages = false; + } + + return patchConfig; +} + +async function patchAll(registry, registryPath) { + const patchRoot = path.resolve(__dirname, "..", "patch"); + const patchStatusFilePath = path.join(patchRoot, "status.json"); + const patchStatus = (await asyncUtils.exists(patchStatusFilePath)) + ? await jsonc.read(patchStatusFilePath) + : { complete: {}, failed: {} }; + patchStatus.failed = {}; + const patchList = await asyncUtils.readdir(patchRoot, { + withFileTypes: true, + }); + await asyncUtils.forEach(patchList, async (patchEntry) => { + if (patchStatus.complete[patchEntry.name]) { + console.log(`(*) Patch ${patchEntry.name} already complete.`); + return; + } + if (patchEntry.isDirectory()) { + try { + await patch( + path.join(patchRoot, patchEntry.name), + registry, + registryPath, + ); + patchStatus.complete[patchEntry.name] = true; + } catch (ex) { + console.log(`(!) Patch ${patchEntry.name} failed - ${ex}.`); + patchStatus.failed[patchEntry.name] = JSON.stringify( + ex, + undefined, + 4, + ); + await asyncUtils.writeFile( + patchStatusFilePath, + JSON.stringify(patchStatus, undefined, 4), + ); + throw ex; + } + } + }); + + // Write status file for next time + await asyncUtils.writeFile( + patchStatusFilePath, + JSON.stringify(patchStatus, undefined, 4), + ); +} + +module.exports = { + patchAll: patchAll, + patch: patch, + deleteUnpatchedImages: deleteUnpatchedImages, +}; diff --git a/build/Source/prep.js b/build/Source/prep.js new file mode 100644 index 0000000000..441df65788 --- /dev/null +++ b/build/Source/prep.js @@ -0,0 +1,519 @@ +/*-------------------------------------------------------------------------------------------------------------- + * Copyright (c) Microsoft Corporation. All rights reserved. + * Licensed under the MIT License. See https://go.microsoft.com/fwlink/?linkid=2090316 for license information. + *-------------------------------------------------------------------------------------------------------------*/ + +const path = require("path"); +const asyncUtils = require("./utils/async"); +const configUtils = require("./utils/config"); +const mkdirp = require("mkdirp"); +const glob = require("glob"); +const handlebars = require("handlebars"); +let metaEnvTemplate; + +const scriptSHA = {}; + +const assetsPath = path.join(__dirname, "..", "assets"); +const stubPromises = { + alpine: asyncUtils.readFile(path.join(assetsPath, "alpine.Dockerfile")), + debian: asyncUtils.readFile(path.join(assetsPath, "debian.Dockerfile")), + redhat: asyncUtils.readFile(path.join(assetsPath, "redhat.Dockerfile")), +}; + +const dockerFilePreamble = configUtils.getConfig("dockerFilePreamble"); +const scriptLibraryPathInRepo = configUtils.getConfig( + "scriptLibraryPathInRepo", +); +const scriptLibraryFolderNameInDefinition = configUtils.getConfig( + "scriptLibraryFolderNameInDefinition", +); + +const historyUrlPrefix = configUtils.getConfig("historyUrlPrefix"); +const repositoryUrl = configUtils.getConfig("repositoryUrl"); + +// Prepares dockerfile for building or packaging +async function prepDockerFile( + devContainerDockerfilePath, + definitionId, + repo, + release, + registry, + registryPath, + stubRegistry, + stubRegistryPath, + isForBuild, + variant, +) { + const devContainerJsonPath = path.dirname(devContainerDockerfilePath); + + // Read Dockerfile + const devContainerDockerfileRaw = await asyncUtils.readFile( + devContainerDockerfilePath, + ); + + // Use exact version of building, MAJOR if not + const version = isForBuild + ? configUtils.getVersionFromRelease(release, definitionId) + : configUtils.majorFromRelease(release, definitionId); + + // Create initial result object + const prepResult = { + shouldFlattenBaseImage: false, + baseImage: null, + flattenedBaseImage: null, + devContainerDockerfileModified: await updateScriptSources( + devContainerDockerfileRaw, + repo, + release, + true, + ), + meta: { + version: version, + definitionId: definitionId, + variant: variant, + gitRepository: repositoryUrl, + gitRepositoryRelease: release, + contentsUrl: `${historyUrlPrefix}${definitionId}/${configUtils.getConfig("historyFolderName", "history")}/${version}.md`, + buildTimestamp: `${new Date().toUTCString()}`, + }, + }; + + // Copy any scripts from the script library, add meta.env into the appropriate definition specific folder + await copyLibraryScriptsForDefinition( + devContainerJsonPath, + isForBuild, + prepResult.meta, + ); + + if (isForBuild) { + // If building, update FROM to target registry and version if definition has a parent + const parentTag = configUtils.getParentTagForVersion( + definitionId, + version, + registry, + registryPath, + variant, + ); + if (parentTag) { + prepResult.devContainerDockerfileModified = replaceFrom( + prepResult.devContainerDockerfileModified, + `FROM ${parentTag}`, + ); + } + + prepResult.shouldFlattenBaseImage = + configUtils.shouldFlattenDefinitionBaseImage(definitionId); + if (prepResult.shouldFlattenBaseImage) { + // Determine base image + const baseImageFromCaptureGroups = /FROM\s+(.+):([^\s\n]+)?/.exec( + prepResult.devContainerDockerfileModified, + ); + let registryPath = baseImageFromCaptureGroups[1] + .replace("${VARIANT}", variant) + .replace("$VARIANT", variant); + const tagName = + baseImageFromCaptureGroups.length > 2 + ? baseImageFromCaptureGroups[2] + .replace("${VARIANT}", variant) + .replace("$VARIANT", variant) + : null; + prepResult.baseImageTag = + registryPath + (tagName ? ":" + tagName : ""); + + // Create tag for flattened image + const registrySlashIndex = registryPath.indexOf("/"); + if (registrySlashIndex > -1) { + registryPath = registryPath.substring(registrySlashIndex + 1); + } + prepResult.flattenedBaseImageTag = `${registry}/${registryPath}:${tagName ? tagName + "-" : ""}flattened`; + + // Modify Dockerfile contents to use flattened image tag + prepResult.devContainerDockerfileModified = replaceFrom( + prepResult.devContainerDockerfileModified, + `FROM ${prepResult.flattenedBaseImageTag}`, + ); + } + } else { + // Otherwise update any Dockerfiles that refer to an un-versioned tag of another dev container + // to the MAJOR version from this release. + const expectedRegistry = configUtils.getConfig( + "stubRegistry", + "mcr.microsoft.com", + ); + const expectedRegistryPath = configUtils.getConfig( + "stubRegistryPath", + "vscode/devcontainers", + ); + const fromCaptureGroups = new RegExp( + `FROM\\s+(${expectedRegistry}/${expectedRegistryPath}/.+:.+)`, + ).exec(devContainerDockerfileRaw); + if (fromCaptureGroups && fromCaptureGroups.length > 0) { + const fromDefinitionTag = configUtils.getUpdatedTag( + fromCaptureGroups[1], + expectedRegistry, + expectedRegistryPath, + version, + stubRegistry, + stubRegistryPath, + variant, + ); + prepResult.devContainerDockerfileModified = + prepResult.devContainerDockerfileModified.replace( + fromCaptureGroups[0], + `FROM ${fromDefinitionTag}`, + ); + } + } + + await asyncUtils.writeFile( + devContainerDockerfilePath, + prepResult.devContainerDockerfileModified, + ); + return prepResult; +} + +async function createStub( + dotDevContainerPath, + definitionId, + repo, + release, + baseDockerFileExists, + stubRegistry, + stubRegistryPath, +) { + const userDockerFilePath = path.join(dotDevContainerPath, "Dockerfile"); + console.log("(*) Generating user Dockerfile..."); + const templateDockerfile = await configUtils.objectByDefinitionLinuxDistro( + definitionId, + stubPromises, + ); + const userDockerFile = await processStub( + templateDockerfile, + definitionId, + repo, + release, + baseDockerFileExists, + stubRegistry, + stubRegistryPath, + ); + await asyncUtils.writeFile(userDockerFilePath, userDockerFile); +} + +async function updateStub( + dotDevContainerPath, + definitionId, + repo, + release, + baseDockerFileExists, + registry, + registryPath, +) { + console.log("(*) Updating user Dockerfile..."); + const userDockerFilePath = path.join(dotDevContainerPath, "Dockerfile"); + const userDockerFile = await asyncUtils.readFile(userDockerFilePath); + const userDockerFileModified = await processStub( + userDockerFile, + definitionId, + repo, + release, + baseDockerFileExists, + registry, + registryPath, + ); + await asyncUtils.writeFile(userDockerFilePath, userDockerFileModified); +} + +async function processStub( + userDockerFile, + definitionId, + repo, + release, + baseDockerFileExists, + registry, + registryPath, +) { + const devContainerImageVersion = configUtils.majorFromRelease( + release, + definitionId, + ); + const relativePath = configUtils.getDefinitionPath(definitionId, true); + let fromSection = `# ${dockerFilePreamble}https://github.com/${repo}/tree/${release}/${relativePath}/.devcontainer/${baseDockerFileExists ? "base." : ""}Dockerfile\n\n`; + // The VARIANT arg allows this value to be set from devcontainer.json, handle it if found + if (/ARG\s+VARIANT\s*=/.exec(userDockerFile) !== null) { + const variant = configUtils.getVariants(definitionId)[0]; + const tagWithVariant = configUtils.getTagsForVersion( + definitionId, + devContainerImageVersion, + registry, + registryPath, + "${VARIANT}", + )[0]; + // Handle scenario where "# [Choice]" comment exists + const choiceCaptureGroup = /(#\s+\[Choice\].+\n)ARG\s+VARIANT\s*=/.exec( + userDockerFile, + ); + if (choiceCaptureGroup) { + fromSection += choiceCaptureGroup[1]; + } + fromSection += `ARG VARIANT="${variant}"\nFROM ${tagWithVariant}`; + } else { + const imageTag = configUtils.getTagsForVersion( + definitionId, + devContainerImageVersion, + registry, + registryPath, + )[0]; + fromSection += `FROM ${imageTag}`; + } + + return replaceFrom(userDockerFile, fromSection); +} + +async function updateConfigForRelease( + definitionId, + repo, + release, + registry, + registryPath, + stubRegistry, + stubRegistryPath, +) { + // Look for context in devcontainer.json and use it to build the Dockerfile + console.log(`(*) Making version specific updates to ${definitionId}...`); + const definitionPath = configUtils.getDefinitionPath(definitionId, false); + const relativePath = configUtils.getDefinitionPath(definitionId, true); + const dotDevContainerPath = path.join(definitionPath, ".devcontainer"); + const devContainerJsonPath = path.join( + dotDevContainerPath, + "devcontainer.json", + ); + const devContainerJsonRaw = await asyncUtils.readFile(devContainerJsonPath); + const devContainerJsonModified = + `// ${configUtils.getConfig("devContainerJsonPreamble")}https://github.com/${repo}/tree/${release}/${relativePath}\n` + + devContainerJsonRaw; + await asyncUtils.writeFile(devContainerJsonPath, devContainerJsonModified); + + // Replace version specific content in Dockerfile + const dockerFilePath = path.join(dotDevContainerPath, "Dockerfile"); + if (await asyncUtils.exists(dockerFilePath)) { + await prepDockerFile( + dockerFilePath, + definitionId, + repo, + release, + registry, + registryPath, + stubRegistry, + stubRegistryPath, + false, + ); + } +} + +// Replace script URLs and generate SHAs if applicable +async function updateScriptSources( + devContainerDockerfileRaw, + repo, + release, + updateScriptSha, +) { + updateScriptSha = + typeof updateScriptSha === "undefined" ? true : updateScriptSha; + let devContainerDockerfileModified = devContainerDockerfileRaw; + + const scriptArgs = + /ARG\s+.+_SCRIPT_SOURCE/.exec(devContainerDockerfileRaw) || []; + await asyncUtils.forEach(scriptArgs, async (scriptArg) => { + // Replace script URL and generate SHA if applicable + const scriptCaptureGroups = new RegExp( + `${scriptArg}\\s*=\\s*"(.+)/${scriptLibraryPathInRepo.replace(".", "\\.")}/(.+)"`, + ).exec(devContainerDockerfileModified); + if (scriptCaptureGroups) { + console.log(`(*) Script library source found.`); + const scriptName = scriptCaptureGroups[2]; + const scriptSource = `https://raw.githubusercontent.com/${repo}/${release}/${scriptLibraryPathInRepo}/${scriptName}`; + console.log(` Updated script source URL: ${scriptSource}`); + let sha = scriptSHA[scriptName]; + if (updateScriptSha && typeof sha === "undefined") { + const scriptRaw = await asyncUtils.getUrlAsString(scriptSource); + sha = await asyncUtils.shaForString(scriptRaw); + scriptSHA[scriptName] = sha; + } + console.log(` Script SHA: ${sha}`); + const shaArg = scriptArg.replace("_SOURCE", "_SHA"); + devContainerDockerfileModified = devContainerDockerfileModified + .replace( + new RegExp(`${scriptArg}\\s*=\\s*".+"`), + `${scriptArg}="${scriptSource}"`, + ) + .replace( + new RegExp(`${shaArg}\\s*=\\s*".+"`), + `${shaArg}="${updateScriptSha ? sha : "dev-mode"}"`, + ); + } + }); + + return devContainerDockerfileModified; +} + +// Update script files and URLs in a Dockerfile to be release specific (or not) and optionally update the SHA to lock to this version +async function updateScriptSourcesInDockerfile( + devContainerDockerfilePath, + repo, + release, + updateScriptSha, +) { + const devContainerDockerfileRaw = await asyncUtils.readFile( + devContainerDockerfilePath, + ); + const devContainerDockerfileModified = await updateScriptSources( + devContainerDockerfileRaw, + repo, + release, + updateScriptSha, + ); + await asyncUtils.writeFile( + devContainerDockerfilePath, + devContainerDockerfileModified, + ); + await copyLibraryScriptsForDefinition( + path.dirname(devContainerDockerfilePath), + ); +} + +// Update all script URLS in the entire repo (not staging folder) +async function updateAllScriptSourcesInRepo(repo, release, updateScriptSha) { + const definitionFolder = path.join(__dirname, "..", "..", "containers"); + // Update script versions in definition Dockerfiles for release + const allDefinitions = await asyncUtils.readdir(definitionFolder, { + withFileTypes: true, + }); + await asyncUtils.forEach(allDefinitions, async (currentDefinition) => { + if (!currentDefinition.isDirectory()) { + return; + } + + const dockerFileBasePath = path.join( + definitionFolder, + currentDefinition.name, + ".devcontainer", + "base.Dockerfile", + ); + if (await asyncUtils.exists(dockerFileBasePath)) { + console.log( + `(*) Looking for script source in base.Dockerfile for ${currentDefinition.name}.`, + ); + await updateScriptSourcesInDockerfile( + dockerFileBasePath, + repo, + release, + updateScriptSha, + ); + } + const dockerFilePath = path.join( + definitionFolder, + currentDefinition.name, + ".devcontainer", + "Dockerfile", + ); + if (await asyncUtils.exists(dockerFilePath)) { + console.log( + `(*) Looking for script source in Dockerfile for ${currentDefinition.name}.`, + ); + await updateScriptSourcesInDockerfile( + dockerFilePath, + repo, + release, + updateScriptSha, + ); + } + }); +} + +// Copy contents of script library to folder, meta.env file if specified and building +async function copyLibraryScriptsForDefinition( + definitionDevContainerJsonFolder, + isForBuild, + meta, +) { + const libraryScriptsFolder = path.join( + definitionDevContainerJsonFolder, + scriptLibraryFolderNameInDefinition, + ); + if (await asyncUtils.exists(libraryScriptsFolder)) { + await asyncUtils.forEach( + await asyncUtils.readdir(libraryScriptsFolder), + async (script) => { + // Only copy files that end in .sh + if (path.extname(script) !== ".sh") { + return; + } + const possibleScriptSource = path.join( + __dirname, + "..", + "..", + scriptLibraryPathInRepo, + script, + ); + if (await asyncUtils.exists(possibleScriptSource)) { + const targetScriptPath = path.join( + libraryScriptsFolder, + script, + ); + console.log( + `(*) Copying ${script} to ${libraryScriptsFolder}...`, + ); + await asyncUtils.copyFile( + possibleScriptSource, + targetScriptPath, + ); + } + }, + ); + } + if (isForBuild && meta) { + // Write meta.env for use by scripts + metaEnvTemplate = + metaEnvTemplate || + handlebars.compile( + await asyncUtils.readFile( + path.join(__dirname, "..", "assets", "meta.env"), + ), + ); + mkdirp(libraryScriptsFolder); + await asyncUtils.writeFile( + path.join(libraryScriptsFolder, "meta.env"), + metaEnvTemplate(meta), + ); + } +} + +// For CI of the script library folder +async function copyLibraryScriptsForAllDefinitions() { + const devcontainerFolders = glob.sync( + `${path.resolve(__dirname, "..", "..")}/+(containers|container-templates|repository-containers)/**/.devcontainer`, + ); + await asyncUtils.forEach(devcontainerFolders, async (folder) => { + console.log( + `(*) Checking ${path.basename(path.resolve(folder, ".."))} for ${scriptLibraryFolderNameInDefinition} folder...`, + ); + await copyLibraryScriptsForDefinition(folder); + }); +} + +function replaceFrom(dockerFileContents, newFromSection) { + return dockerFileContents.replace( + /(#\s+\[Choice\].+\n)?(ARG\s+VARIANT\s*=\s*.+\n)?(FROM\s+[^\s\n]+)/, + newFromSection, + ); +} + +module.exports = { + createStub: createStub, + updateStub: updateStub, + updateConfigForRelease: updateConfigForRelease, + prepDockerFile: prepDockerFile, + copyLibraryScriptsForAllDefinitions: copyLibraryScriptsForAllDefinitions, + updateScriptSourcesInDockerfile: updateScriptSourcesInDockerfile, + updateAllScriptSourcesInRepo: updateAllScriptSourcesInRepo, +}; diff --git a/build/Source/push.js b/build/Source/push.js new file mode 100644 index 0000000000..f85d0c0058 --- /dev/null +++ b/build/Source/push.js @@ -0,0 +1,509 @@ +/*-------------------------------------------------------------------------------------------------------------- + * Copyright (c) Microsoft Corporation. All rights reserved. + * Licensed under the MIT License. See https://go.microsoft.com/fwlink/?linkid=2090316 for license information. + *-------------------------------------------------------------------------------------------------------------*/ + +const path = require("path"); +const jsonc = require("jsonc").jsonc; +const asyncUtils = require("./utils/async"); +const configUtils = require("./utils/config"); +const prep = require("./prep"); + +const imageLabelPrefix = configUtils.getConfig( + "imageLabelPrefix", + "com.microsoft.vscode.devcontainers", +); + +async function push( + repo, + release, + updateLatest, + registry, + registryPath, + stubRegistry, + stubRegistryPath, + pushImages, + prepOnly, + definitionsToSkip, + page, + pageTotal, + replaceImages, + definitionId, +) { + // Optional argument defaults + prepOnly = typeof prepOnly === "undefined" ? false : prepOnly; + pushImages = typeof pushImages === "undefined" ? true : pushImages; + page = page || 1; + pageTotal = pageTotal || 1; + stubRegistry = stubRegistry || registry; + stubRegistryPath = stubRegistryPath || registryPath; + definitionsToSkip = definitionsToSkip || []; + + // Always replace images when building and pushing the "dev" tag + replaceImages = + configUtils.getVersionFromRelease(release, definitionId) == "dev" || + replaceImages; + + // Stage content + const stagingFolder = await configUtils.getStagingFolder(release); + await configUtils.loadConfig(stagingFolder); + + // Use or create a buildx / buildkit "builder" that using the docker-container driver which internally + // uses QEMU to emulate different architectures for cross-platform builds. Setting up a separate + // builder avoids problems with the default config being different otherwise altered. It also can + // be tweaked down the road to use a different driver like using separate machines per architecture. + // See https://docs.docker.com/engine/reference/commandline/buildx_create/ + console.log("(*) Setting up builder..."); + const builders = await asyncUtils.exec("docker buildx ls"); + if (builders.indexOf("vscode-dev-containers") < 0) { + await asyncUtils.spawn("docker", [ + "buildx", + "create", + "--use", + "--name", + "vscode-dev-containers", + ]); + } else { + await asyncUtils.spawn("docker", [ + "buildx", + "use", + "vscode-dev-containers", + ]); + } + // This step sets up the QEMU emulators for cross-platform builds. See https://github.com/docker/buildx#building-multi-platform-images + await asyncUtils.spawn("docker", [ + "run", + "--privileged", + "--rm", + "tonistiigi/binfmt", + "--install", + "all", + ]); + + // Build and push subset of images + const definitionsToPush = definitionId + ? [definitionId] + : configUtils.getSortedDefinitionBuildList( + page, + pageTotal, + definitionsToSkip, + ); + await asyncUtils.forEach(definitionsToPush, async (currentDefinitionId) => { + console.log(`**** Pushing ${currentDefinitionId} ${release} ****`); + await pushImage( + currentDefinitionId, + repo, + release, + updateLatest, + registry, + registryPath, + stubRegistry, + stubRegistryPath, + prepOnly, + pushImages, + replaceImages, + ); + }); + + return stagingFolder; +} + +async function pushImage( + definitionId, + repo, + release, + updateLatest, + registry, + registryPath, + stubRegistry, + stubRegistryPath, + prepOnly, + pushImages, + replaceImage, +) { + const definitionPath = configUtils.getDefinitionPath(definitionId); + const dotDevContainerPath = path.join(definitionPath, ".devcontainer"); + // Use base.Dockerfile for image build if found, otherwise use Dockerfile + const dockerFileExists = await asyncUtils.exists( + path.join(dotDevContainerPath, "Dockerfile"), + ); + const baseDockerFileExists = await asyncUtils.exists( + path.join(dotDevContainerPath, "base.Dockerfile"), + ); + const dockerFilePath = path.join( + dotDevContainerPath, + `${baseDockerFileExists ? "base." : ""}Dockerfile`, + ); + + // Make sure there's a Dockerfile present + if (!(await asyncUtils.exists(dockerFilePath))) { + throw `Definition ${definitionId} does not exist! Invalid path: ${definitionPath}`; + } + + // Look for context in devcontainer.json and use it to build the Dockerfile + console.log("(*) Reading devcontainer.json..."); + const devContainerJsonPath = path.join( + dotDevContainerPath, + "devcontainer.json", + ); + const devContainerJsonRaw = await asyncUtils.readFile(devContainerJsonPath); + const devContainerJson = jsonc.parse(devContainerJsonRaw); + + // Process variants in reverse order to be sure the first one is tagged as "latest" if appropriate + const variants = configUtils.getVariants(definitionId) || [null]; + for (let i = variants.length - 1; i > -1; i--) { + const variant = variants[i]; + + // Update common setup script download URL, SHA, parent tag if applicable + console.log( + `(*) Prep Dockerfile for ${definitionId} ${variant ? 'variant "' + variant + '"' : ""}...`, + ); + const prepResult = await prep.prepDockerFile( + dockerFilePath, + definitionId, + repo, + release, + registry, + registryPath, + stubRegistry, + stubRegistryPath, + true, + variant, + ); + + if (prepOnly) { + console.log(`(*) Skipping build and push to registry.`); + } else { + if (prepResult.shouldFlattenBaseImage) { + console.log(`(*) Flattening base image...`); + await flattenBaseImage( + prepResult.baseImageTag, + prepResult.flattenedBaseImageTag, + pushImages, + ); + } + + // Build image + console.log(`(*) Building image...`); + // Determine tags to use + const versionTags = configUtils.getTagList( + definitionId, + release, + updateLatest, + registry, + registryPath, + variant, + ); + console.log( + `(*) Tags:${versionTags.reduce((prev, current) => (prev += `\n ${current}`), "")}`, + ); + const buildSettings = configUtils.getBuildSettings(definitionId); + let architectures = buildSettings.architectures; + switch (typeof architectures) { + case "string": + architectures = [architectures]; + break; + case "object": + if (!Array.isArray(architectures)) { + architectures = architectures[variant]; + } + break; + case "undefined": + architectures = ["linux/amd64"]; + break; + } + console.log( + `(*) Target image architectures: ${architectures.reduce((prev, current) => (prev += `\n ${current}`), "")}`, + ); + let localArchitecture = process.arch; + switch (localArchitecture) { + case "arm": + localArchitecture = "linux/arm/v7"; + break; + case "aarch32": + localArchitecture = "linux/arm/v7"; + break; + case "aarch64": + localArchitecture = "linux/arm64"; + break; + case "x64": + localArchitecture = "linux/amd64"; + break; + case "x32": + localArchitecture = "linux/386"; + break; + default: + localArchitecture = `linux/${localArchitecture}`; + break; + } + console.log(`(*) Local architecture: ${localArchitecture}`); + if (!pushImages) { + console.log( + `(*) Push disabled: Only building local architecture (${localArchitecture}).`, + ); + } + if ( + replaceImage || + !(await isDefinitionVersionAlreadyPublished( + definitionId, + release, + registry, + registryPath, + variant, + )) + ) { + const context = devContainerJson.build + ? devContainerJson.build.context || "." + : devContainerJson.context || "."; + const workingDir = path.resolve(dotDevContainerPath, context); + // Add tags to buildx command params + const buildParams = versionTags.reduce( + (prev, current) => prev.concat(["-t", current]), + [], + ); + // Note: build.args in devcontainer.json is intentionally ignored so you can vary image contents and defaults as needed + // Add VARIANT --build-arg if applicable + if (variant) { + buildParams.push("--build-arg", `VARIANT=${variant}`); + } + // Generate list of --build-arg values if applicable + for (let buildArg in buildSettings.buildArgs || {}) { + buildParams.push( + "--build-arg", + `${buildArg}=${buildSettings.buildArgs[buildArg]}`, + ); + } + // Generate list of variant specific --build-arg values if applicable + if (buildSettings.variantBuildArgs) { + for (let buildArg in buildSettings.variantBuildArgs[ + variant + ] || {}) { + buildParams.push( + "--build-arg", + `${buildArg}=${buildSettings.variantBuildArgs[variant][buildArg]}`, + ); + } + } + const spawnOpts = { + stdio: "inherit", + cwd: workingDir, + shell: true, + }; + await asyncUtils.spawn( + "docker", + [ + "buildx", + "build", + workingDir, + "-f", + dockerFilePath, + "--label", + `version=${prepResult.meta.version}`, + `--label`, + `${imageLabelPrefix}.id=${prepResult.meta.definitionId}`, + "--label", + `${imageLabelPrefix}.variant=${prepResult.meta.variant}`, + "--label", + `${imageLabelPrefix}.release=${prepResult.meta.gitRepositoryRelease}`, + "--label", + `${imageLabelPrefix}.source=${prepResult.meta.gitRepository}`, + "--label", + `${imageLabelPrefix}.timestamp='${prepResult.meta.buildTimestamp}'`, + "--builder", + "vscode-dev-containers", + "--progress", + "plain", + "--platform", + pushImages + ? architectures + .reduce( + (prev, current) => prev + "," + current, + "", + ) + .substring(1) + : localArchitecture, + pushImages ? "--push" : "--load", + ...buildParams, + ], + spawnOpts, + ); + if (!pushImages) { + console.log(`(*) Skipping push to registry.`); + } + } else { + console.log(`(*) Version already published. Skipping.`); + } + } + } + + // If base.Dockerfile found, update stub/devcontainer.json, otherwise create - just use the default (first) variant if one exists + if (baseDockerFileExists && dockerFileExists) { + await prep.updateStub( + dotDevContainerPath, + definitionId, + repo, + release, + baseDockerFileExists, + stubRegistry, + stubRegistryPath, + ); + console.log("(*) Updating devcontainer.json..."); + await asyncUtils.writeFile( + devContainerJsonPath, + devContainerJsonRaw.replace('"base.Dockerfile"', '"Dockerfile"'), + ); + console.log("(*) Removing base.Dockerfile..."); + await asyncUtils.rimraf(dockerFilePath); + } else { + await prep.createStub( + dotDevContainerPath, + definitionId, + repo, + release, + baseDockerFileExists, + stubRegistry, + stubRegistryPath, + ); + } + + console.log("(*) Done!\n"); +} + +async function flattenBaseImage( + baseImageTag, + flattenedBaseImageTag, + pushImages, +) { + const flattenedImageCaptureGroups = /([^\/]+)\/(.+):(.+)/.exec( + flattenedBaseImageTag, + ); + if ( + await isImageAlreadyPublished( + flattenedImageCaptureGroups[1], + flattenedImageCaptureGroups[2], + flattenedImageCaptureGroups[3], + ) + ) { + console.log("(*) Flattened base image already published."); + return; + } + + // Flatten + const processOpts = { stdio: "inherit", shell: true }; + console.log("(*) Preparing base image..."); + await asyncUtils.spawn( + "docker", + [ + "run", + "-d", + "--name", + "vscode-dev-containers-build-flatten", + baseImageTag, + "bash", + ], + processOpts, + ); + const containerInspectOutput = await asyncUtils.spawn( + "docker", + ["inspect", "vscode-dev-containers-build-flatten"], + { shell: true, stdio: "pipe" }, + ); + console.log("(*) Flattening (this could take a while)..."); + const config = JSON.parse(containerInspectOutput)[0].Config; + const envString = config.Env.reduce( + (prev, current) => prev + " " + current, + "", + ); + const importArgs = `-c 'ENV ${envString}' -c 'ENTRYPOINT ${JSON.stringify(config.Entrypoint)}' -c 'CMD ${JSON.stringify(config.Cmd)}'`; + await asyncUtils.exec( + `docker export vscode-dev-containers-build-flatten | docker import ${importArgs} - ${flattenedBaseImageTag}`, + processOpts, + ); + await asyncUtils.spawn( + "docker", + ["container", "rm", "-f", "vscode-dev-containers-build-flatten"], + processOpts, + ); + + // Push if enabled + if (pushImages) { + console.log("(*) Pushing..."); + await asyncUtils.spawn( + "docker", + ["push", flattenedBaseImageTag], + processOpts, + ); + } else { + console.log("(*) Skipping push."); + } +} + +async function isDefinitionVersionAlreadyPublished( + definitionId, + release, + registry, + registryPath, + variant, +) { + // See if image already exists + const tagsToCheck = configUtils.getTagList( + definitionId, + release, + false, + registry, + registryPath, + variant, + ); + const tagParts = tagsToCheck[0].split(":"); + const registryName = registry.replace(/\..*/, ""); + return await isImageAlreadyPublished( + registryName, + tagParts[0].replace(/[^\/]+\//, ""), + tagParts[1], + ); +} + +async function isImageAlreadyPublished(registryName, repositoryName, tagName) { + registryName = registryName.replace(/\.azurecr\.io.*/, ""); + // Check if repository exists + const repositoriesOutput = await asyncUtils.spawn( + "az", + ["acr", "repository", "list", "--name", registryName], + { shell: true, stdio: "pipe" }, + ); + const repositories = JSON.parse(repositoriesOutput); + if (repositories.indexOf(repositoryName) < 0) { + console.log( + "(*) Repository does not exist. Image version has not been published yet.", + ); + return false; + } + + // Assuming repository exists, check if tag exists + const tagListOutput = await asyncUtils.spawn( + "az", + [ + "acr", + "repository", + "show-tags", + "--name", + registryName, + "--repository", + repositoryName, + "--query", + `"[?@=='${tagName}']"`, + ], + { shell: true, stdio: "pipe" }, + ); + const tagList = JSON.parse(tagListOutput); + if (tagList.length > 0) { + console.log("(*) Image version has already been published."); + return true; + } + console.log("(*) Image version has not been published yet."); + return false; +} + +module.exports = { + push: push, +}; diff --git a/build/Source/utils/async.js b/build/Source/utils/async.js new file mode 100644 index 0000000000..46215bae33 --- /dev/null +++ b/build/Source/utils/async.js @@ -0,0 +1,227 @@ +/*-------------------------------------------------------------------------------------------------------------- + * Copyright (c) Microsoft Corporation. All rights reserved. + * Licensed under the MIT License. See https://go.microsoft.com/fwlink/?linkid=2090316 for license information. + *-------------------------------------------------------------------------------------------------------------*/ + +const fs = require("fs"); +const https = require("https"); +const crypto = require("crypto"); +const rimrafCb = require("rimraf"); +const mkdirpCb = require("mkdirp"); +const copyFilesCb = require("copyfiles"); +const spawnCb = require("child_process").spawn; +const execCb = require("child_process").exec; + +module.exports = { + // async forEach + forEach: async (array, cb) => { + for (let i = 0; i < array.length; i++) { + await cb(array[i], i, array); + } + }, + + // async spawn + spawn: async (command, args, opts) => { + console.log( + `(*) Spawn: ${command}${args.reduce((prev, current) => `${prev} ${current}`, "")}`, + ); + + opts = opts || { stdio: "inherit", shell: true }; + let echo = false; + if (opts.stdio === "inherit") { + opts.stdio = "pipe"; + echo = true; + } + return new Promise((resolve, reject) => { + let result = ""; + const proc = spawnCb(command, args, opts); + proc.on("close", (code, signal) => { + if (code !== 0) { + if (!echo) { + console.error(result); + } + const err = new Error( + `Non-zero exit code: ${code} ${signal || ""}`, + ); + err.result = result; + err.code = code; + err.signal = signal; + reject(err); + return; + } + resolve(result); + }); + if (proc.stdout) { + proc.stdout.on("data", (chunk) => { + const stringChunk = chunk.toString(); + result += stringChunk; + if (echo) { + process.stdout.write(stringChunk); + } + }); + } + if (proc.stderr) { + proc.stderr.on("data", (chunk) => { + const stringChunk = chunk.toString(); + result += stringChunk; + if (echo) { + process.stderr.write(stringChunk); + } + }); + } + proc.on("error", reject); + }); + }, + + exec: async (command, opts) => { + console.log(`(*) Exec: ${command}`); + + opts = opts || { stdio: "inherit", shell: true }; + return new Promise((resolve, reject) => { + let result = ""; + const proc = execCb(command, opts); + proc.on("close", (code, signal) => { + if (code !== 0) { + console.log(result); + const err = new Error( + `Non-zero exit code: ${code} ${signal || ""}`, + ); + err.result = result; + err.code = code; + err.signal = signal; + reject(err); + return; + } + resolve(result); + }); + if (proc.stdout) { + proc.stdout.on("data", (chunk) => (result += chunk.toString())); + } + if (proc.stderr) { + proc.stderr.on("data", (chunk) => (result += chunk.toString())); + } + proc.on("error", reject); + }); + }, + + // async rename + rename: async (from, to) => { + return new Promise((resolve, reject) => { + fs.rename(from, to, (err) => (err ? reject(err) : resolve())); + }); + }, + + // async readFile + readFile: async (filePath) => { + return new Promise((resolve, reject) => { + fs.readFile(filePath, "utf8", (err, data) => + err ? reject(err) : resolve(data.toString()), + ); + }); + }, + + // async writeFile + writeFile: async function (filePath, data) { + return new Promise((resolve, reject) => { + fs.writeFile(filePath, data, "utf8", (err) => + err ? reject(err) : resolve(filePath), + ); + }); + }, + + // async mkdirp + mkdirp: async (pathToMake) => { + return new Promise((resolve, reject) => { + mkdirpCb(pathToMake, (err, made) => + err ? reject(err) : resolve(made), + ); + }); + }, + + // async rimraf + rimraf: async (pathToRemove, opts) => { + opts = opts || {}; + return new Promise((resolve, reject) => { + rimrafCb(pathToRemove, opts, (err) => + err ? reject(err) : resolve(pathToRemove), + ); + }); + }, + + // async copyfiles + copyFiles: async (source, blobs, target) => { + return new Promise((resolve, reject) => { + process.chdir(source); + copyFilesCb(blobs.concat(target), { all: true }, (err) => + err ? reject(err) : resolve(target), + ); + }); + }, + + // async copyfile + copyFile: async (src, dest) => { + return new Promise((resolve, reject) => { + fs.copyFile(src, dest, (err) => (err ? reject(err) : resolve())); + }); + }, + + // async chmod + chmod: async (src, mod) => { + return new Promise((resolve, reject) => { + fs.chmod(src, mod, (err) => (err ? reject(err) : resolve())); + }); + }, + + // async readdir + readdir: async (dirPath, opts) => { + opts = opts || {}; + return new Promise((resolve, reject) => { + fs.readdir(dirPath, opts, (err, files) => + err ? reject(err) : resolve(files), + ); + }); + }, + + // async exists + exists: async (filePath) => { + return fs.existsSync(filePath); + }, + + // async gen SHA 256 hash for file + shaForFile: async (filePath) => { + return new Promise((resolve, reject) => { + const fd = fs.createReadStream(filePath); + const hash = crypto.createHash("sha256"); + hash.setEncoding("hex"); + fd.on("end", function () { + hash.end(); + resolve(hash.read()); + }); + fd.on("error", (err) => { + reject(err); + }); + fd.pipe(hash); + }); + }, + + // async gen SHA 256 hash for string + shaForString: async (content) => { + const hash = crypto.createHash("sha256"); + hash.update(content); + return hash.digest("hex"); + }, + + // async HTTPS get + getUrlAsString: async (url) => { + return new Promise((resolve, reject) => { + let content = ""; + const req = https.get(url, function (res) { + res.on("data", function (chunk) { + content += chunk.toString(); + }); + }); + req.on("error", reject); + req.on("close", () => resolve(content)); + }); + }, +}; diff --git a/build/Source/utils/component-formatter-factory.js b/build/Source/utils/component-formatter-factory.js new file mode 100644 index 0000000000..fd9fed3162 --- /dev/null +++ b/build/Source/utils/component-formatter-factory.js @@ -0,0 +1,255 @@ +/*-------------------------------------------------------------------------------------------------------------- + * Copyright (c) Microsoft Corporation. All rights reserved. + * Licensed under the MIT License. See https://go.microsoft.com/fwlink/?linkid=2090316 for license information. + *-------------------------------------------------------------------------------------------------------------*/ + +/* Generate "Linux" entry for linux packages. E.g. +{ + "Component": { + "Type": "linux", + "Linux": { + "Name": "yarn", + "Version": "1.22.5-1", + "Distribution": "Debian", + "Release": "10", + "Pool-URL": "https://dl.yarnpkg.com/debian", + "Key-URL": "https://dl.yarnpkg.com/debian/pubkey.gpg" + } +} + */ +function linuxPackageComponentFormatter(packageInfo, distroInfo) { + if (packageInfo.cgIgnore) { + return null; + } + return { + "Component": { + "Type": "linux", + "Linux": { + "Name": packageInfo.name, + "Version": packageInfo.version, + "Distribution": distroInfo.id, + "Release": distroInfo.versionId, + "Pool-URL": packageInfo.poolUrl, + "Key-URL": packageInfo.poolKeyUrl, + }, + }, + }; +} + +/* Generate "Npm" entries. E.g. +{ + "Component": { + "Type": "npm", + "Npm": { + "Name": "eslint", + "Version": "7.7.0" + } + } +} +*/ +function npmComponentFormatter(packageInfo) { + if (packageInfo.cgIgnore) { + return null; + } + return { + "Component": { + "Type": "npm", + "Npm": { + "Name": packageInfo.name, + "Version": packageInfo.version, + }, + }, + }; +} + +/* Generate "Pip" entries. E.g. +{ + "Component": { + "Type": "Pip", + "Pip": { + "Name": "pylint", + "Version": "2.6.0" + } + } +} +*/ +function pipComponentFormatter(packageInfo) { + if (packageInfo.cgIgnore) { + return null; + } + return { + "Component": { + "Type": "Pip", + "Pip": { + "Name": packageInfo.name, + "Version": packageInfo.version, + }, + }, + }; +} + +/* Generate "Git" entries. E.g. +{ + "Component": { + "Type": "git", + "Git": { + "Name": "Oh My Zsh!", + "repositoryUrl": "https://github.com/ohmyzsh/ohmyzsh.git", + "commitHash": "cddac7177abc358f44efb469af43191922273705" + } + } +} +*/ +function gitComponentFormatter(repositoryInfo) { + if (repositoryInfo.cgIgnore) { + return null; + } + return { + "Component": { + "Type": "git", + "Git": { + "Name": repositoryInfo.name, + "repositoryUrl": repositoryInfo.repositoryUrl, + "commitHash": repositoryInfo.commitHash, + }, + }, + }; +} + +/* Generate "Other" entries. E.g. +{ + "Component": { + "Type": "other", + "Other": { + "Name": "Xdebug", + "Version": "2.9.6", + "DownloadUrl": "https://pecl.php.net/get/xdebug-2.9.6.tgz" + } + } +} +*/ +function otherComponentFormatter(componentInfo) { + if (componentInfo.cgIgnore) { + return null; + } + return { + "Component": { + "Type": "other", + "Other": { + "Name": componentInfo.name, + "Version": componentInfo.version, + "DownloadUrl": componentInfo.downloadUrl, + }, + }, + }; +} + +/* Generate "RubyGems" entries. E.g. +{ + "Component": { + "Type": "RubyGems", + "RubyGems": { + "Name": "rake", + "Version": "13.0.1" + } + } +} +*/ +function gemComponentFormatter(packageInfo) { + if (packageInfo.cgIgnore) { + return null; + } + return { + "Component": { + "Type": "RubyGems", + "RubyGems": { + "Name": packageInfo.name, + "Version": packageInfo.version, + }, + }, + }; +} + +/* Generate "Cargo" entries. E.g. +{ + "Component": { + "Type": "cargo", + "Cargo": { + "Name": "rustfmt", + "Version": "1.4.17-stable" + } + } +} +*/ +function cargoComponentFormatter(packageInfo) { + if (packageInfo.cgIgnore) { + return null; + } + return { + "Component": { + "Type": "cargo", + "Cargo": { + "Name": packageInfo.name, + "Version": packageInfo.version, + }, + }, + }; +} + +/* Generate "Go" entries. E.g. +"Component": { + "Type": "go", + "Go": { + "Name": "golang.org/x/tools/gopls", + "Version": "0.6.4" + } +} +*/ +function goComponentFormatter(packageInfo) { + if (packageInfo.cgIgnore) { + return null; + } + return { + "Component": { + "Type": "go", + "Go": { + "Name": packageInfo.name, + "Version": packageInfo.version, + }, + }, + }; +} + +// Remove unused properties like markdownIgnore that only apply to other formatters +function manualComponentFormatter(component) { + if (component.cgIgnore || component.CgIgnore || component.CGIgnore) { + return null; + } + component.markdownIgnore = undefined; + component.MarkdownIgnore = undefined; + return component; +} + +function getFormatter(distroInfo) { + return { + image: null, + distro: null, + linux: (packageInfo) => { + return linuxPackageComponentFormatter(packageInfo, distroInfo); + }, + npm: npmComponentFormatter, + pip: pipComponentFormatter, + pipx: pipComponentFormatter, + gem: gemComponentFormatter, + cargo: cargoComponentFormatter, + go: goComponentFormatter, + git: gitComponentFormatter, + other: otherComponentFormatter, + languages: otherComponentFormatter, + manual: manualComponentFormatter, + }; +} + +module.exports = { + getFormatter: getFormatter, +}; diff --git a/build/Source/utils/config.js b/build/Source/utils/config.js new file mode 100644 index 0000000000..32c3721b01 --- /dev/null +++ b/build/Source/utils/config.js @@ -0,0 +1,629 @@ +/*-------------------------------------------------------------------------------------------------------------- + * Copyright (c) Microsoft Corporation. All rights reserved. + * Licensed under the MIT License. See https://go.microsoft.com/fwlink/?linkid=2090316 for license information. + *-------------------------------------------------------------------------------------------------------------*/ + +const os = require('os'); +const path = require('path'); +const glob = require('glob'); +const asyncUtils = require('./async'); +const jsonc = require('jsonc').jsonc; +const config = require('../../config.json'); + +config.definitionDependencies = config.definitionDependencies || {}; +config.definitionBuildSettings = config.definitionBuildSettings || {}; +config.definitionVersions = config.definitionVersions || {}; +config.definitionVariants = config.definitionVariants || {}; + +const stagingFolders = {}; +const definitionTagLookup = {}; +const allDefinitionPaths = {}; + +// Must be called first +async function loadConfig(repoPath) { + repoPath = repoPath || path.join(__dirname, '..', '..', '..'); + const definitionBuildConfigFile = getConfig('definitionBuildConfigFile', 'definition-manifest.json'); + + // Get list of definition folders + const containersPath = path.join(repoPath, getConfig('containersPathInRepo', 'containers')); + const definitions = await asyncUtils.readdir(containersPath, { withFileTypes: true }); + await asyncUtils.forEach(definitions, async (definitionFolder) => { + // If directory entry is a file (like README.md, skip + if (!definitionFolder.isDirectory()) { + return; + } + + const definitionId = definitionFolder.name; + const definitionPath = path.resolve(path.join(containersPath, definitionId)); + + // If a .deprecated file is found, remove the directory from staging and return + if(await asyncUtils.exists(path.join(definitionPath, '.deprecated'))) { + await asyncUtils.rimraf(definitionPath); + return; + } + + // Add to complete list of definitions + allDefinitionPaths[definitionId] = { + path: definitionPath, + relativeToRootPath: path.relative(repoPath, definitionPath) + } + // If definition-manifest.json exists, load it + const manifestPath = path.join(definitionPath, definitionBuildConfigFile); + if (await asyncUtils.exists(manifestPath)) { + await loadDefinitionManifest(manifestPath, definitionId); + } + }); + + // Populate image variants and tag lookup + for (let definitionId in config.definitionBuildSettings) { + const buildSettings = config.definitionBuildSettings[definitionId]; + const definitionVariants = config.definitionVariants[definitionId]; + const dependencies = config.definitionDependencies[definitionId]; + buildSettings.architecture = buildSettings.architecture || ['linux/amd64']; + + // Populate images list for variants for dependency registration + dependencies.imageVariants = definitionVariants ? + definitionVariants.map((variant) => dependencies.image.replace('${VARIANT}', variant)) : + [dependencies.image]; + + // Populate definition and variant lookup + if (buildSettings.tags) { + // Variants can be used as a VARAINT arg in tags, so support that too. However, these can + // get overwritten in certain tag configs resulting in bad lookups, so **process them first**. + const variants = definitionVariants ? ['${VARIANT}', '$VARIANT'].concat(definitionVariants) : [undefined]; + + variants.forEach((variant) => { + const blankTagList = getTagsForVersion(definitionId, '', 'ANY', 'ANY', variant); + blankTagList.forEach((blankTag) => { + definitionTagLookup[blankTag] = { + id: definitionId, + variant: variant + }; + }); + const devTagList = getTagsForVersion(definitionId, 'dev', 'ANY', 'ANY', variant); + devTagList.forEach((devTag) => { + definitionTagLookup[devTag] = { + id: definitionId, + variant: variant + } + }); + }) + } + } + config.needsDedicatedPage = config.needsDedicatedPage || []; +} + +// Get a value from the config file or a similarly named env var +function getConfig(property, defaultVal) { + defaultVal = defaultVal || null; + // Generate env var name from property - camelCase to CAMEL_CASE + const envVar = property.split('').reduce((prev, next) => { + if (next >= 'A' && next <= 'Z') { + return prev + '_' + next; + } else { + return prev + next.toLocaleUpperCase(); + } + }, ''); + + return process.env[envVar] || config[property] || defaultVal; +} + +// Loads definition-manifest.json and adds it to config +async function loadDefinitionManifest(manifestPath, definitionId) { + const buildJson = await jsonc.read(manifestPath); + if (buildJson.variants) { + config.definitionVariants[definitionId] = buildJson.variants; + } + if (buildJson.build) { + config.definitionBuildSettings[definitionId] = buildJson.build; + } + if (buildJson.dependencies) { + config.definitionDependencies[definitionId] = buildJson.dependencies; + } + if (buildJson.definitionVersion) { + config.definitionVersions[definitionId] = buildJson.definitionVersion; + } +} + +// Returns location of the definition based on Id +function getDefinitionPath(definitionId, relative) { + return relative ? allDefinitionPaths[definitionId].relativeToRootPath : allDefinitionPaths[definitionId].path +} + +function getAllDefinitionPaths() { + return allDefinitionPaths; +} + +// Convert a release string (v1.0.0) or branch (main) into a version. If a definitionId and +// release string is passed in, use the version specified in defintion-build.json if one exists. +function getVersionFromRelease(release, definitionId) { + definitionId = definitionId || 'NOT SPECIFIED'; + + // Already is a version + if (!isNaN(parseInt(release.charAt(0)))) { + return config.definitionVersions[definitionId] || release; + } + + // Is a release string + if (release.charAt(0) === 'v' && !isNaN(parseInt(release.charAt(1)))) { + return config.definitionVersions[definitionId] || release.substr(1); + } + + // Is a branch + return 'dev'; +} + +// Look up distro and fallback to debian if not specified +function getLinuxDistroForDefinition(definitionId) { + return config.definitionBuildSettings[definitionId].rootDistro || 'debian'; +} + +// Generate 'latest' flavor of a given definition's tag +function getLatestTag(definitionId, registry, registryPath) { + if (typeof config.definitionBuildSettings[definitionId] === 'undefined') { + return null; + } + + // Given there could be multiple registries in the tag list, get all the different latest variations + return config.definitionBuildSettings[definitionId].tags.reduce((list, tag) => { + const latest = `${registry}/${registryPath}/${tag.replace(/:.+/, ':latest')}` + if (list.indexOf(latest) < 0) { + list.push(latest); + } + return list; + }, []); + +} + +function getVariants(definitionId) { + return config.definitionVariants[definitionId] || null; +} + +// Create all the needed variants of the specified version identifier for a given definition +function getTagsForVersion(definitionId, version, registry, registryPath, variant) { + if (typeof config.definitionBuildSettings[definitionId] === 'undefined') { + return null; + } + + // If the definition states that only versioned tags are returned and the version is 'dev', + // add the definition Id to ensure that we do not incorrectly hijack a tag from another definition. + if (version === 'dev') { + version = config.definitionBuildSettings[definitionId].versionedTagsOnly ? `dev-${definitionId.replace(/-/mg,'')}` : 'dev'; + } + + + // Use the first variant if none passed in, unless there isn't one + if (!variant) { + const variants = getVariants(definitionId); + variant = variants ? variants[0] : 'NOVARIANT'; + } + let tags = config.definitionBuildSettings[definitionId].tags; + + // See if there are any variant specific tags that should be added to the output + const variantTags = config.definitionBuildSettings[definitionId].variantTags; + // ${VARIANT} or $VARIANT may be passed in as a way to do lookups. Add all in this case. + if (['${VARIANT}', '$VARIANT'].indexOf(variant) > -1) { + if (variantTags) { + for (let variantEntry in variantTags) { + tags = tags.concat(variantTags[variantEntry] || []); + } + } + } else { + if (variantTags) { + tags = tags.concat(variantTags[variant] || []); + } + } + + return tags.reduce((list, tag) => { + // One of the tags that needs to be supported is one where there is no version, but there + // are other attributes. For example, python:3 in addition to python:0.35.0-3. So, a version + // of '' is allowed. However, there are also instances that are just the version, so in + // these cases latest would be used instead. However, latest is passed in separately. + let baseTag = tag.replace('${VERSION}', version) + .replace(':-', ':') + .replace(/\$\{?VARIANT\}?/, variant || 'NOVARIANT') + .replace('-NOVARIANT', ''); + if (baseTag.charAt(baseTag.length - 1) !== ':') { + list.push(`${registry}/${registryPath}/${baseTag}`); + } + return list; + }, []); +} + +/* +Generate complete list of tags for a given definition. + +versionPartHandling has a few different modes: + - true/'all-latest' - latest, X.X.X, X.X, X + - false/'all' - X.X.X, X.X, X + - 'full-only' - X.X.X + - 'major-minor' - X.X + - 'major' - X +*/ +function getTagList(definitionId, release, versionPartHandling, registry, registryPath, variant) { + const version = getVersionFromRelease(release, definitionId); + + // If version is 'dev', there's no need to generate semver tags for the version + // (e.g. for 1.0.2, we should also tag 1.0 and 1). So just return the tags for 'dev'. + if (version === 'dev') { + return getTagsForVersion(definitionId, version, registry, registryPath, variant); + } + + // If this is a release version, split it out into the three parts of the semver + const versionParts = version.split('.'); + if (versionParts.length !== 3) { + throw (`Invalid version format in ${version}.`); + } + + let versionList, updateUnversionedTags, updateLatest; + switch(versionPartHandling) { + case true: + case 'all-latest': + updateLatest = true; + updateUnversionedTags = true; + versionList = [version,`${versionParts[0]}.${versionParts[1]}`, `${versionParts[0]}` ]; + break; + case false: + case 'all': + updateLatest = false; + updateUnversionedTags = true; + versionList = [version,`${versionParts[0]}.${versionParts[1]}`, `${versionParts[0]}` ]; + break; + case 'full-only': + updateLatest = false; + updateUnversionedTags = false; + versionList = [version]; + break; + case 'major-minor': + updateLatest = false; + updateUnversionedTags = false; + versionList = [`${versionParts[0]}.${versionParts[1]}`]; + break; + case 'major': + updateLatest = false; + updateUnversionedTags = false; + versionList = [ `${versionParts[0]}`]; + break; + } + + // Normally, we also want to return a tag without a version number, but for + // some definitions that exist in the same repository as others, we may + // only want to return a list of tags with part of the version number in it + if(updateUnversionedTags && !config.definitionBuildSettings[definitionId].versionedTagsOnly) { + // This is the equivalent of latest for qualified tags- e.g. python:3 instead of python:0.35.0-3 + versionList.push(''); + } + + const allVariants = getVariants(definitionId); + const firstVariant = allVariants ? allVariants[0] : variant; + let tagList = []; + + versionList.forEach((tagVersion) => { + tagList = tagList.concat(getTagsForVersion(definitionId, tagVersion, registry, registryPath, variant)); + }); + + // If this variant should also be used for the the latest tag, add it. The "latest" value could be + // true, false, or a specific variant. "true" assumes the first variant is the latest. + const definitionLatestProperty = config.definitionBuildSettings[definitionId].latest; + return tagList.concat((updateLatest + && definitionLatestProperty + && (!allVariants + || variant === definitionLatestProperty + || (definitionLatestProperty === true && variant === firstVariant))) + ? getLatestTag(definitionId, registry, registryPath) + : []); +} + +// Walk the image build config and paginate and sort list so parents build before (and with) children +function getSortedDefinitionBuildList(page, pageTotal, definitionsToSkip) { + page = page || 1; + pageTotal = pageTotal || 1; + definitionsToSkip = definitionsToSkip || []; + + // Bucket definitions by parent + const parentBuckets = {}; + const dupeBuckets = []; + const noParentList = []; + let total = 0; + for (let definitionId in config.definitionBuildSettings) { + // If paged build, ensure this definition should be included + if (typeof config.definitionBuildSettings[definitionId] === 'object') { + if (definitionsToSkip.indexOf(definitionId) < 0) { + let parentId = config.definitionBuildSettings[definitionId].parent; + if (parentId) { + // if multi-parent, merge the buckets + if (typeof parentId !== 'string') { + parentId = createMultiParentBucket(parentId, parentBuckets, dupeBuckets); + } + bucketDefinition(definitionId, parentId, parentBuckets); + } else { + noParentList.push(definitionId); + } + total++; + } else { + console.log(`(*) Skipping ${definitionId}.`) + } + } + } + // Remove duplicate buckets that are no longer needed + dupeBuckets.forEach((currentBucketId) => { + parentBuckets[currentBucketId] = undefined; + }); + // Remove parents from no parent list - they are in their buckets already + for (let parentId in parentBuckets) { + if (parentId) { + noParentList.splice(noParentList.indexOf(parentId), 1); + } + } + + const allPages = []; + let pageTotalMinusDedicatedPages = pageTotal; + // Remove items that need their own buckets and add the buckets + if (config.needsDedicatedPage) { + // Remove skipped items from list that needs dedicated page + const filteredNeedsDedicatedPage = config.needsDedicatedPage.reduce((prev, current) => (definitionsToSkip.indexOf(current) < 0 ? prev.concat(current) : prev), []); + if (pageTotal > filteredNeedsDedicatedPage.length) { + pageTotalMinusDedicatedPages = pageTotal - filteredNeedsDedicatedPage.length; + filteredNeedsDedicatedPage.forEach((definitionId) => { + allPages.push([definitionId]); + const definitionIndex = noParentList.indexOf(definitionId); + if (definitionIndex > -1) { + noParentList.splice(definitionIndex, 1); + total--; + } + }); + } else { + console.log(`(!) Not enough pages to give dedicated pages to ${JSON.stringify(filteredNeedsDedicatedPage, null, 4)}. Adding them to other pages.`); + } + } + + // Create pages and distribute entries with no parents + const pageSize = Math.floor(total / pageTotalMinusDedicatedPages); + for (let bucketId in parentBuckets) { + let bucket = parentBuckets[bucketId]; + if (typeof bucket === 'object') { + if (noParentList.length > 0 && bucket.length < pageSize) { + const toConcat = noParentList.splice(0, pageSize - bucket.length); + bucket = bucket.concat(toConcat); + } + allPages.push(bucket); + } + } + while (noParentList.length > 0) { + const noParentPage = noParentList.splice(0, noParentList.length > pageSize ? pageSize : noParentList.length); + allPages.push(noParentPage); + } + + if (allPages.length > pageTotal) { + // If too many pages, add extra pages to last one + console.log(`(!) Not enough pages to for target page size. Adding excess definitions to last page.`); + for (let i = pageTotal; i < allPages.length; i++) { + allPages[pageTotal - 1] = allPages[pageTotal - 1].concat(allPages[i]); + allPages.splice(i, 1); + } + } else if (allPages.length < pageTotal) { + // If too few, add some empty pages + for (let i = allPages.length; i < pageTotal; i++) { + allPages.push([]); + } + } + + console.log(`(*) Builds paginated as follows: ${JSON.stringify(allPages, null, 4)}\n(*) Processing page ${page} of ${pageTotal}.\n`); + + return allPages[page - 1]; +} + +// Handle multi-parent definitions +function createMultiParentBucket(variantParentMap, parentBuckets, dupeBuckets) { + // Get parent of first variant + const parentId = variantParentMap[Object.keys(variantParentMap)[0]]; + const firstParentBucket = parentBuckets[parentId] || [parentId]; + // Merge other parent buckets into the first parent + for (let currentVariant in variantParentMap) { + const currentParentId = variantParentMap[currentVariant]; + if (currentParentId !== parentId) { + const currentParentBucket = parentBuckets[currentParentId]; + // Merge buckets if not already merged + if (currentParentBucket && dupeBuckets.indexOf(currentParentId) < 0) { + currentParentBucket.forEach((current) => firstParentBucket.push(current)); + } else if (firstParentBucket.indexOf(currentParentId)<0) { + firstParentBucket.push(currentParentId); + } + dupeBuckets.push(currentParentId); + parentBuckets[currentParentId]=firstParentBucket; + } + } + parentBuckets[parentId] = firstParentBucket; + return parentId; +} + +// Add definition to correct parent bucket when sorting +function bucketDefinition(definitionId, parentId, parentBuckets) { + // Handle parents that have parents + // TODO: Recursive parents rather than just parents-of-parents + if (config.definitionBuildSettings[parentId].parent) { + const oldParentId = parentId; + parentId = config.definitionBuildSettings[parentId].parent; + parentBuckets[parentId] = parentBuckets[parentId] || [parentId]; + if (parentBuckets[parentId].indexOf(oldParentId) < 0) { + parentBuckets[parentId].push(oldParentId); + } + } + + // Add to parent bucket + parentBuckets[parentId] = parentBuckets[parentId] || [parentId]; + if (parentBuckets[parentId].indexOf(definitionId) < 0) { + parentBuckets[parentId].push(definitionId); + } +} + +// Get parent tag for a given child definition +function getParentTagForVersion(definitionId, version, registry, registryPath, variant) { + let parentId = config.definitionBuildSettings[definitionId].parent; + if (parentId) { + if(typeof parentId !== 'string') { + // Use variant to figure out correct parent, or return first parent if child has no variant + parentId = variant ? parentId[variant] : parentId[Object.keys(parentId)[0]]; + } + + // Determine right parent variant to use (assuming there are variants) + const parentVariantList = getVariants(parentId); + let parentVariant; + if(parentVariantList) { + // If a variant is specified in the parentVariant property in build, use it - otherwise default to the child definition's variant + parentVariant = config.definitionBuildSettings[definitionId].parentVariant || variant; + if(typeof parentVariant !== 'string') { + // Use variant to figure out correct variant it not the same across all parents, or return first variant if child has no variant + parentVariant = variant ? parentVariant[variant] : parentVariant[Object.keys(parentId)[0]]; + } + if(!parentVariantList.includes(parentVariant)) { + throw `Unable to determine variant for parent. Variant ${parentVariant} is not in ${parentId} list: ${parentVariantList}`; + } + } + + // Parent image version may be different than child's + const parentVersion = getVersionFromRelease(version, parentId); + return getTagsForVersion(parentId, parentVersion, registry, registryPath, parentVariant)[0]; + } + return null; +} + +// Takes an existing tag and updates it with a new registry version and optionally a variant +function getUpdatedTag(currentTag, currentRegistry, currentRegistryPath, updatedVersion, updatedRegistry, updatedRegistryPath, variant) { + updatedRegistry = updatedRegistry || currentRegistry; + updatedRegistryPath = updatedRegistryPath || currentRegistryPath; + + const definition = getDefinitionFromTag(currentTag, currentRegistry, currentRegistryPath); + + // If definition not found, fall back on swapping out more generic logic - e.g. for when a image already has a version tag in it + if (!definition) { + const repository = new RegExp(`${currentRegistry}/${currentRegistryPath}/(.+):`).exec(currentTag)[1]; + const updatedTag = currentTag.replace(new RegExp(`${currentRegistry}/${currentRegistryPath}/${repository}:(dev-|${updatedVersion}-)?`), `${updatedRegistry}/${updatedRegistryPath}/${repository}:${updatedVersion}-`); + console.log(` Using RegEx to update ${currentTag}\n to ${updatedTag}`); + return updatedTag; + } + + // See if definition found and no variant passed in, see if definition lookup returned a variant match + if (!variant) { + variant = definition.variant; + } + + const updatedTags = getTagsForVersion(definition.id, updatedVersion, updatedRegistry, updatedRegistryPath, variant); + if (updatedTags && updatedTags.length > 0) { + console.log(` Updating ${currentTag}\n to ${updatedTags[0]}`); + return updatedTags[0]; + } + // In the case where this is already a tag with a version number in it, + // we won't get an updated tag returned, so we'll just reuse the current tag. + return currentTag; +} + +// Lookup definition from a tag +function getDefinitionFromTag(tag, registry, registryPath) { + registry = registry || '.+'; + registryPath = registryPath || '.+'; + const captureGroups = new RegExp(`${registry}/${registryPath}/(.+):(.+)`).exec(tag); + const repo = captureGroups[1]; + const tagPart = captureGroups[2]; + const definition = definitionTagLookup[`ANY/ANY/${repo}:${tagPart}`]; + if (definition) { + return definition; + } + + // If lookup fails, try removing a numeric first part - dev- is already handled + return definitionTagLookup[`ANY/ANY/${repo}:${tagPart.replace(/^\d+-/,'')}`]; +} + +// Return just the major version of a release number +function majorFromRelease(release, definitionId) { + const version = getVersionFromRelease(release, definitionId); + + if (version === 'dev') { + return 'dev'; + } + + const versionParts = version.split('.'); + return versionParts[0]; +} + +// Return an object from a map based on the linux distro for the definition +function objectByDefinitionLinuxDistro(definitionId, objectsByDistro) { + const distro = getLinuxDistroForDefinition(definitionId); + const obj = objectsByDistro[distro]; + return obj; +} + +function getDefinitionDependencies(definitionId) { + return config.definitionDependencies[definitionId]; +} + +function getAllDependencies() { + return config.definitionDependencies; +} + +function getPoolKeyForPoolUrl(poolUrl) { + const poolKey = config.poolKeys[poolUrl]; + return poolKey; +} + +function getFallbackPoolUrl(package) { + const poolUrl = config.poolUrlFallback[package]; + console.log (`(*) Fallback pool URL for ${package} is ${poolUrl}`); + return poolUrl; +} + + +async function getStagingFolder(release) { + if (!stagingFolders[release]) { + const stagingFolder = path.join(os.tmpdir(), 'vscode-dev-containers', release); + console.log(`(*) Copying files to ${stagingFolder}\n`); + await asyncUtils.rimraf(stagingFolder); // Clean out folder if it exists + await asyncUtils.mkdirp(stagingFolder); // Create the folder + await asyncUtils.copyFiles( + path.resolve(__dirname, '..', '..', '..'), + getConfig('filesToStage'), + stagingFolder); + + stagingFolders[release] = stagingFolder; + } + return stagingFolders[release]; +} + +function shouldFlattenDefinitionBaseImage(definitionId) { + return (getConfig('flattenBaseImage', []).indexOf(definitionId) >= 0) +} + +function getDefaultDependencies(dependencyType) { + const packageManagerConfig = getConfig('commonDependencies'); + return packageManagerConfig ? packageManagerConfig[dependencyType] : null; +} + +function getBuildSettings(definitionId) { + return config.definitionBuildSettings[definitionId]; +} + +module.exports = { + loadConfig: loadConfig, + getTagList: getTagList, + getVariants: getVariants, + getAllDefinitionPaths: getAllDefinitionPaths, + getBuildSettings: getBuildSettings, + getDefinitionFromTag: getDefinitionFromTag, + getDefinitionPath: getDefinitionPath, + getSortedDefinitionBuildList: getSortedDefinitionBuildList, + getParentTagForVersion: getParentTagForVersion, + getUpdatedTag: getUpdatedTag, + majorFromRelease: majorFromRelease, + objectByDefinitionLinuxDistro: objectByDefinitionLinuxDistro, + getDefinitionDependencies: getDefinitionDependencies, + getAllDependencies: getAllDependencies, + getDefaultDependencies: getDefaultDependencies, + getStagingFolder: getStagingFolder, + getLinuxDistroForDefinition: getLinuxDistroForDefinition, + getVersionFromRelease: getVersionFromRelease, + getTagsForVersion: getTagsForVersion, + getFallbackPoolUrl: getFallbackPoolUrl, + getPoolKeyForPoolUrl: getPoolKeyForPoolUrl, + getConfig: getConfig, + shouldFlattenDefinitionBaseImage: shouldFlattenDefinitionBaseImage +}; diff --git a/build/Source/utils/image-content-extractor.js b/build/Source/utils/image-content-extractor.js new file mode 100644 index 0000000000..492a6b1f71 --- /dev/null +++ b/build/Source/utils/image-content-extractor.js @@ -0,0 +1,655 @@ +/*-------------------------------------------------------------------------------------------------------------- + * Copyright (c) Microsoft Corporation. All rights reserved. + * Licensed under the MIT License. See https://go.microsoft.com/fwlink/?linkid=2090316 for license information. + *-------------------------------------------------------------------------------------------------------------*/ + +const asyncUtils = require('./async'); +const configUtils = require('./config'); + +// Docker images and native OS libraries need to be registered as "other" while others are scenario dependant +const linuxPackageInfoExtractionConfig = { + apt: { + // Command to get package versions: dpkg-query --show -f='${Package}\t${Version}\n' + // Output: + // Command to get download URLs: apt-get update && apt-get install -y --reinstall --print-uris + // Output: Multi-line output, but each line is '.deb' __.deb + namePrefix: 'Debian Package:', + listCommand: "dpkg-query --show -f='\\${Package} ~~v~~ \\${Version}\n'", + lineRegEx: /(.+) ~~v~~ (.+)/, + getUriCommand: 'apt-get update && apt-get install -y --reinstall --print-uris', + downloadUriMatchRegEx: "'(.+\\.deb)'\\s*${PACKAGE}_.+\\s", + poolUriMatchRegEx: "'(.+)/pool.+\\.deb'\\s*${PACKAGE}_.+\\s" + }, + apk: { + // Command to get package versions: apk info -e -v + // Output: - + // Command to get download URLs: apk policy + namePrefix: 'Alpine Package:', + listCommand: "apk info -e -v", + lineRegEx: /(.+)-([0-9].+)/, + getUriCommand: 'apk update && apk policy', + downloadUriMatchRegEx: '${PACKAGE} policy:\\n.*${VERSION}:\\n.*lib/apk/db/installed\\n\\s*(.+)\\n', + downloadUriSuffix: '/x86_64/${PACKAGE}-${VERSION}.apk', + poolUriMatchRegEx: '${PACKAGE} policy:\\n.*${VERSION}:\\n.*lib/apk/db/installed\\n\\s*(.+)\\n', + } +} +linuxPackageInfoExtractionConfig.alpine = linuxPackageInfoExtractionConfig.apk; +linuxPackageInfoExtractionConfig.debian = linuxPackageInfoExtractionConfig.apt; +linuxPackageInfoExtractionConfig.ubuntu = linuxPackageInfoExtractionConfig.apt; + +/* This function converts the contents of /etc/os-release from this: + + PRETTY_NAME="Debian GNU/Linux 10 (buster)" + NAME="Debian GNU/Linux" + VERSION_ID="10" + VERSION="10 (buster)" + VERSION_CODENAME=buster + ID=debian + HOME_URL="https://www.debian.org/" + SUPPORT_URL="https://www.debian.org/support" + BUG_REPORT_URL="https://bugs.debian.org/" + +to an object like this: + +{ + prettyName: "Debian GNU/Linux 10 (buster)" + name: "Debian GNU/Linux" + versioonId: "10" + version: "10 (buster)" + versionCodename: buster + id: debian + homeUrl: "https://www.debian.org/" + supportUrl: "https://www.debian.org/support" + bugReportUrl: "https://bugs.debian.org/" +} +*/ +async function getLinuxDistroInfo(imageTagOrContainerName) { + const info = {}; + const osInfoCommandOutput = await getCommandOutputFromContainer(imageTagOrContainerName, 'cat /etc/os-release', true); + const osInfoLines = osInfoCommandOutput.split('\n'); + osInfoLines.forEach((infoLine) => { + const infoLineParts = infoLine.split('='); + if (infoLineParts.length === 2) { + const propName = snakeCaseToCamelCase(infoLineParts[0].trim()); + info[propName] = infoLineParts[1].replace(/"/g,'').trim(); + } + }) + return info; +} + +// Convert SNAKE_CASE to snakeCase ... well, technically camelCase :) +function snakeCaseToCamelCase(variableName) { + return variableName.split('').reduce((prev, next) => { + if(prev.charAt(prev.length-1) === '_') { + return prev.substr(0, prev.length-1) + next.toLocaleUpperCase(); + } + return prev + next.toLocaleLowerCase(); + }, ''); +} + +/* A set of info objects linux packages. E.g. +{ + name: "yarn", + version: "1.22.5-1", + annotation: "Yarn" + poolUrl: "https://dl.yarnpkg.com/debian", + poolKeyUrl: "https://dl.yarnpkg.com/debian/pubkey.gpg" +} + +Defaults to "cgIgnore": true, "markdownIgnore": false given base packages don't need to be registered +*/ +async function getLinuxPackageInfo(imageTagOrContainerName, packageList, linuxDistroInfo) { + // Merge in default dependencies + packageList = packageList || []; + const packageManager = getLinuxPackageManagerForDistro(linuxDistroInfo.id); + const defaultPackages = configUtils.getDefaultDependencies(packageManager) || []; + packageList = defaultPackages.concat(packageList); + + // Return empty array if no packages + if (packageList.length === 0) { + return []; + } + + // Get OS info if not passed in + if(!linuxDistroInfo) { + linuxDistroInfo = await getLinuxDistroInfo(imageTagOrContainerName); + } + + // Generate a settings object from packageList + const settings = packageList.reduce((obj, current) => { + if(typeof current === 'string') { + obj[current] = { name: current }; + } else { + obj[current.name] = current; + } + return obj; + }, {}); + + // Space separated list of packages for use in commands + const packageListCommandPart = packageList.reduce((prev, current) => { + return prev += ` ${typeof current === 'string' ? current : current.name}`; + }, ''); + + // Use the appropriate package lookup settings for distro + const extractionConfig = linuxPackageInfoExtractionConfig[packageManager]; + + // Generate and exec command to get installed package versions + console.log('(*) Gathering information about Linux package versions...'); + const packageVersionListOutput = await getCommandOutputFromContainer(imageTagOrContainerName, + extractionConfig.listCommand + packageListCommandPart + " || echo 'Some packages were not found.'", true); + + // Generate and exec command to extract download URIs + console.log('(*) Gathering information about Linux package download URLs...'); + const packageUriCommandOutput = await getCommandOutputFromContainer(imageTagOrContainerName, + extractionConfig.getUriCommand + packageListCommandPart + " || echo 'Some packages were not found.'", true); + + const componentList = []; + const packageVersionList = packageVersionListOutput.split('\n'); + packageVersionList.forEach((packageVersion) => { + packageVersion = packageVersion.trim(); + if (packageVersion !== '') { + const versionCaptureGroup = new RegExp(extractionConfig.lineRegEx).exec(packageVersion); + if (!versionCaptureGroup) { + if(packageVersion === 'Some packages were not found.') { + console.log('(!) Warning: Some specified packages were not found.'); + } else { + console.log(`(!) Warning: Unable to parse output "${packageVersion}" - skipping.`); + } + return; + } + const [, package, version ] = versionCaptureGroup; + const packageSettings = settings[package] || {}; + const cgIgnore = typeof packageSettings.cgIgnore === 'undefined' ? true : packageSettings.cgIgnore; // default to true + const poolUrl = getPoolUrlFromPackageVersionListOutput(packageUriCommandOutput, extractionConfig, package, version); + if(!cgIgnore && !poolUrl) { + throw new Error('(!) No pool URL found to register package!'); + } + componentList.push({ + name: package, + version: version, + poolUrl: poolUrl, + poolKeyUrl: configUtils.getPoolKeyForPoolUrl(poolUrl), + annotation: packageSettings.annotation, + cgIgnore: cgIgnore, + markdownIgnore: packageSettings.markdownIgnore + }); + } + }); + + return componentList; +} + +// Gets a package pool URL out of a download URL - Needed for registering in cgmanifest.json +function getPoolUrlFromPackageVersionListOutput(packageUriCommandOutput, config, package, version) { + // Handle regex reserved charters in regex strings and that ":" is treaded as "1%3a" on Debian/Ubuntu + const sanitizedPackage = package.replace(/\+/g, '\\+').replace(/\./g, '\\.'); + const sanitizedVersion = version.replace(/\+/g, '\\+').replace(/\./g, '\\.').replace(/:/g, '%3a'); + const uriCaptureGroup = new RegExp( + config.poolUriMatchRegEx.replace('${PACKAGE}', sanitizedPackage).replace('${VERSION}', sanitizedVersion), 'm') + .exec(packageUriCommandOutput); + + if (!uriCaptureGroup) { + const fallbackPoolUrl = configUtils.getFallbackPoolUrl(package); + if (fallbackPoolUrl) { + return fallbackPoolUrl; + } + console.log(`(!) No URI found for ${package} ${version}.`); + return null; + } + + // Extract URIs + return uriCaptureGroup[1]; +} + +/* Generate "Npm" info objects. E.g. +{ + name: "eslint", + version: "7.23.0" +} +*/ +async function getNpmGlobalPackageInfo(imageTagOrContainerName, packageList) { + // Merge in default dependencies + packageList = packageList || []; + const defaultPackages = configUtils.getDefaultDependencies('npm') || []; + packageList = defaultPackages.concat(packageList); + + // Return empty array if no packages + if (packageList.length === 0) { + return []; + } + + console.log(`(*) Gathering information about globally installed npm packages...`); + + const packageListString = packageList.reduce((prev, current) => prev + ' ' + current, ''); + const npmOutputRaw = await getCommandOutputFromContainer(imageTagOrContainerName, `bash -l -c 'set -e && npm ls --global --depth 2 --json ${packageListString}' 2>/dev/null`); + const npmOutput = JSON.parse(npmOutputRaw); + + return packageList.map((package) => { + let packageJson = npmOutput.dependencies[package]; + if (!packageJson) { + // Possible desired package is referenced by another top level package, so check dependencies too. + // E.g. tslint-to-eslint-config can cause typescript to not appear at top level in npm ls + for (let packageInNpmOutput in npmOutput.dependencies) { + const packageDependencies = npmOutput.dependencies[packageInNpmOutput].dependencies; + if(packageDependencies) { + packageJson = packageDependencies[package]; + if(packageJson) { + break; + } + } + } + } + if(!packageJson || !packageJson.version) { + throw new Error(`Unable to parse version for ${package} from npm ls output: ${npmOutputRaw}`); + } + return { + name: package, + version:packageJson.version + } + }); +} + + +/* Generate pip or pipx info objects. E.g. +{ + name: "pylint", + version: "2.6.0" +} +*/ +async function getPipPackageInfo(imageTagOrContainerName, packageList, usePipx) { + // Merge in default dependencies + packageList = packageList || []; + const defaultPackages = configUtils.getDefaultDependencies(usePipx ? 'pipx' : 'pip') || []; + packageList = defaultPackages.concat(packageList); + + // Return empty array if no packages + if (packageList.length === 0) { + return []; + } + + // Generate and exec command to get installed package versions + console.log('(*) Gathering information about pip packages...'); + const versionLookup = usePipx ? await getPipxVersionLookup(imageTagOrContainerName) : await getPipVersionLookup(imageTagOrContainerName); + + return packageList.map((package) => { + return { + name: package, + version: versionLookup[package] + }; + }); +} + +async function getPipVersionLookup(imageTagOrContainerName) { + const packageVersionListOutput = await getCommandOutputFromContainer(imageTagOrContainerName, 'pip list --format json'); + + const packageVersionList = JSON.parse(packageVersionListOutput); + + return packageVersionList.reduce((prev, current) => { + prev[current.name] = current.version; + return prev; + }, {}); +} + +async function getPipxVersionLookup(imageTagOrContainerName) { + // --format json doesn't work with pipx, so have to do text parsing + const packageVersionListOutput = await getCommandOutputFromContainer(imageTagOrContainerName, 'pipx list'); + + const packageVersionListOutputLines = packageVersionListOutput.split('\n'); + return packageVersionListOutputLines.reduce((prev, current) => { + const versionCaptureGroup = /package\s(.+)\s(.+),/.exec(current); + if (versionCaptureGroup) { + prev[versionCaptureGroup[1]] = versionCaptureGroup[2]; + } + return prev; + }, {}); +} + +/* Generate git info objects. E.g. +{ + name: "Oh My Zsh!", + path: "/home/codespace/.oh-my-zsh", + repositoryUrl: "https://github.com/ohmyzsh/ohmyzsh.git", + commitHash: "cddac7177abc358f44efb469af43191922273705" +} +*/ +async function getGitRepositoryInfo(imageTagOrContainerName, gitRepos) { + // Merge in default dependencies + const defaultPackages = configUtils.getDefaultDependencies('git'); + if(defaultPackages) { + const merged = defaultPackages; + for(let otherName in gitRepos) { + merged[otherName] = gitRepos[otherName]; + } + gitRepos = merged; + } + // Return empty array if no components + if (!gitRepos) { + return []; + } + + const componentList = []; + for(let repoName in gitRepos) { + const repoPath = gitRepos[repoName]; + if (typeof repoPath === 'string') { + console.log(`(*) Getting remote and commit for ${repoName} at ${repoPath}...`); + // Go to the specified folder, see if the commands have already been run, if not run them and get output + const remoteAndCommitOutput = await getCommandOutputFromContainer(imageTagOrContainerName, `git config --global --add safe.directory \\"${repoPath}\\" && cd \\"${repoPath}\\" && if [ -f \\".git-remote-and-commit\\" ]; then cat .git-remote-and-commit; else git remote get-url origin && git log -n 1 --pretty=format:%H -- . | tee /dev/null; fi`,true); + const [gitRemote, gitCommit] = remoteAndCommitOutput.split('\n'); + componentList.push({ + name: repoName, + path: repoPath, + repositoryUrl: gitRemote, + commitHash: gitCommit + }); + } + } + + return componentList; +} + +/* Generate "other" info objects. E.g. +{ + name: "Xdebug", + version: "2.9.6", + downloadUrl: "https://pecl.php.net/get/xdebug-2.9.6.tgz" +} +*/ +async function getOtherComponentInfo(imageTagOrContainerName, otherComponents, otherType) { + otherType = otherType || 'other'; + // Merge in default dependencies + const defaultPackages = configUtils.getDefaultDependencies(otherType); + if(defaultPackages) { + const merged = defaultPackages; + for(let otherName in otherComponents) { + merged[otherName] = otherComponents[otherName]; + } + otherComponents = merged; + } + // Return empty array if no components + if (!otherComponents) { + return []; + } + + console.log(`(*) Gathering information about "other" components...`); + const componentList = []; + for(let otherName in otherComponents) { + const otherSettings = mergeOtherDefaultSettings(otherName, otherComponents[otherName]); + if (typeof otherSettings === 'object') { + console.log(`(*) Getting version for ${otherName}...`); + // Run specified command to get the version number + const otherVersion = (await getCommandOutputFromContainer(imageTagOrContainerName, otherSettings.versionCommand)); + componentList.push({ + name: otherName, + version: otherVersion, + downloadUrl: otherSettings.downloadUrl, + path: otherSettings.path, + annotation: otherSettings.annotation, + cgIgnore: otherSettings.cgIgnore, + markdownIgnore: otherSettings.markdownIgnore + }); + } + } + + return componentList; +} + +// Merge in default config for specified otherName if it exists +function mergeOtherDefaultSettings(otherName, settings) { + const otherDefaultSettings = configUtils.getConfig('otherDependencyDefaultSettings', null); + if (!otherDefaultSettings || !otherDefaultSettings[otherName] ) { + return settings; + } + // Create a copy of default settings for merging + const mergedSettings = Object.assign({}, otherDefaultSettings[otherName]); + settings = settings || {}; + for (let settingName in settings) { + mergedSettings[settingName] = settings[settingName]; + } + return mergedSettings; +} + +/* Generate Ruby gems info objects. E.g. +{ + name: "rake", + version: "13.0.1" +} +*/ +async function getGemPackageInfo(imageTagOrContainerName, packageList) { + // Merge in default dependencies + packageList = packageList || []; + const defaultPackages = configUtils.getDefaultDependencies('gem') || []; + packageList = defaultPackages.concat(packageList); + + // Return empty array if no packages + if (packageList.length === 0) { + return []; + } + + console.log(`(*) Gathering information about gems...`); + const gemListOutput = await getCommandOutputFromContainer(imageTagOrContainerName, "bash -l -c 'set -e && gem list -d --local' 2>/dev/null"); + return packageList.map((gem) => { + const gemVersionCaptureGroup = new RegExp(`^${gem}\\s\\(([^\\),]+)`,'m').exec(gemListOutput); + const gemVersion = gemVersionCaptureGroup[1]; + return { + name: gem, + version: gemVersion + } + }); +} + +/* Generate cargo info object. E.g. +{ + name: "rustfmt", + version: "1.4.17-stable" +} +*/ +async function getCargoPackageInfo(imageTagOrContainerName, packages) { + // Merge in default dependencies + const defaultPackages = configUtils.getDefaultDependencies('go'); + if(defaultPackages) { + const merged = defaultPackages; + for(let package in packages) { + merged[package] = packages[package]; + } + packages = merged; + } + // Return empty array if no packages + if (!packages) { + return []; + } + + const componentList = []; + console.log(`(*) Gathering information about cargo packages...`); + + for(let crate in packages) { + if (typeof crate === 'string') { + const versionCommand = packages[crate] || `${crate} --version`; + console.log(`(*) Getting version for ${crate}...`); + const versionOutput = await getCommandOutputFromContainer(imageTagOrContainerName, versionCommand); + const crateVersionCaptureGroup = new RegExp('[0-9]+\\.[0-9]+\\.[0-9]+','m').exec(versionOutput); + const version = crateVersionCaptureGroup[0]; + componentList.push({ + name: crate, + version: version + }); + } + } + + return componentList; +} + +/* Generate go info objects. E.g. +{ + name: "golang.org/x/tools/gopls", + version: "0.6.4" +} +*/ +async function getGoPackageInfo(imageTagOrContainerName, packages) { + // Merge in default dependencies + const defaultPackages = configUtils.getDefaultDependencies('go'); + if(defaultPackages) { + const merged = defaultPackages; + for(let package in packages) { + merged[package] = packages[package]; + } + packages = merged; + } + // Return empty array if no components + if (!packages) { + return []; + } + + console.log(`(*) Gathering information about go modules and packages...`); + const componentList = []; + const packageInstallOutput = await getCommandOutputFromContainer(imageTagOrContainerName, "cat /usr/local/etc/vscode-dev-containers/go.log"); + for(let package in packages) { + if (typeof package === 'string') { + const versionCommand = packages[package]; + let version; + if(versionCommand) { + version = await getCommandOutputFromContainer(imageTagOrContainerName, versionCommand); + } else { + const versionCaptureGroup = new RegExp(`downloading\\s*${package}\\s*v([0-9]+\\.[0-9]+\\.[0-9]+.*)\\n`).exec(packageInstallOutput); + version = versionCaptureGroup ? versionCaptureGroup[1] : 'latest'; + } + componentList.push({ + name: package, + version: version + }); + } + } + + return componentList; +} + +/* Generate image info object. E.g. +{ + "name": "debian" + "digest": "sha256:c33d4c1938625a1d0cda78102127b81935e0e94785bc4810b71b5f236dd935e" +} +*/ +async function getImageInfo(imageTagOrContainerName) { + let image = imageTagOrContainerName; + if(isContainerName(imageTagOrContainerName)) { + image = await asyncUtils.spawn('docker', ['inspect', "--format='{{.Image}}'", imageTagOrContainerName.trim()], { shell: true, stdio: 'pipe' }); + } + // If image not yet published, there will be no repo digests, so set to N/A if that is the case + let name, digest; + try { + const imageNameAndDigest = await asyncUtils.spawn('docker', ['inspect', "--format='{{index .RepoDigests 0}}'", image], { shell: true, stdio: 'pipe' }); + [name, digest] = imageNameAndDigest.trim().split('@'); + } catch(err) { + if(err.result.indexOf('Template parsing error') > 0) { + name = 'N/A'; + digest = 'N/A'; + } else { + throw err; + } + } + const nonRootUser = await getCommandOutputFromContainer(imageTagOrContainerName, 'id -un 1000', true) + return { + "name": name, + "digest": digest, + "user": nonRootUser + } +} + + +// Command to start a container for processing. Returns a container name with a +// specific format that can be used to detect whether an image tag or container +// name is passed into the content extractor functions. +async function startContainerForProcessing(imageTag) { + const containerName = `vscdc--extract--${Date.now()}`; + await asyncUtils.spawn('docker', ['run', '-d', '--rm', '--init', '--privileged', '--name', containerName, imageTag, 'sh -c "while sleep 1000; do :; done"'], { shell: true, stdio: 'inherit' }); + return containerName; +} + +// Removes the specified container +async function removeProcessingContainer(containerName) { + await asyncUtils.spawn('docker', ['rm', '-f', containerName], { shell: true, stdio: 'inherit' }); +} + +// Utility that executes commands inside a container. If a specially formatted container +// name is passed in, the function will use "docker exec" and otherwise use "docker run" +// since this means an image tag was passed in instead. +async function getCommandOutputFromContainer(imageTagOrContainerName, command, forceRoot) { + const runArgs = isContainerName(imageTagOrContainerName) ? + ['exec'].concat(forceRoot ? ['-u', 'root'] : []) + : ['run','--init', '--privileged', '--rm'].concat(forceRoot ? ['-u', 'root'] : []); + const wrappedCommand = `bash -c "set -e && echo ~~~BEGIN~~~ && ${command} && echo && echo ~~~END~~~"`; + runArgs.push(imageTagOrContainerName); + runArgs.push(wrappedCommand); + const result = await asyncUtils.spawn('docker', runArgs, { shell: true, stdio: 'pipe' }); + // Filter out noise from ENTRYPOINT output + const filteredResult = result.substring(result.indexOf('~~~BEGIN~~~') + 11, result.indexOf('~~~END~~~')); + return filteredResult.trim(); +} + +function isContainerName(imageTagOrContainerName) { + return (imageTagOrContainerName.indexOf('vscdc--extract--') === 0) +} + +// Use distro "ID" from /etc/os-release to determine appropriate package manger +function getLinuxPackageManagerForDistro(distroId) +{ + switch(distroId) { + case 'apt': + case 'debian': + case 'ubuntu': return 'apt'; + case 'apk': + case 'alpine': return 'apk'; + } + return null; +} + +// Return dependencies by mapping distro "ID" from /etc/os-release to determine appropriate package manger +function getLinuxPackageManagerDependencies(dependencies, distroInfo) { + if(dependencies[distroInfo.id]) { + return dependencies[distroInfo.id]; + } + return dependencies[getLinuxPackageManagerForDistro(distroInfo.id)] +} + +// Spins up a container for a referenced image and extracts info for the specified dependencies +async function getAllContentInfo(imageTag, dependencies) { + const containerName = await startContainerForProcessing(imageTag); + try { + const distroInfo = await getLinuxDistroInfo(containerName); + const contents = { + image: await getImageInfo(containerName), + distro: distroInfo, + linux: await getLinuxPackageInfo(containerName, getLinuxPackageManagerDependencies(dependencies, distroInfo), distroInfo), + npm: await getNpmGlobalPackageInfo(containerName, dependencies.npm), + pip: await getPipPackageInfo(containerName, dependencies.pip, false), + pipx: await getPipPackageInfo(containerName, dependencies.pipx, true), + gem: await getGemPackageInfo(containerName, dependencies.gem), + cargo: await getCargoPackageInfo(containerName, dependencies.cargo), + go: await getGoPackageInfo(containerName, dependencies.go), + git: await getGitRepositoryInfo(containerName, dependencies.git), + other: await getOtherComponentInfo(containerName, dependencies.other, 'other'), + languages: await getOtherComponentInfo(containerName, dependencies.languages, 'languages'), + manual: dependencies.manual + } + await removeProcessingContainer(containerName); + return contents; + } catch (e) { + await removeProcessingContainer(containerName); + throw e; + } +} + +module.exports = { + getImageInfo: getImageInfo, + getLinuxDistroInfo: getLinuxDistroInfo, + getLinuxPackageInfo: getLinuxPackageInfo, + getNpmGlobalPackageInfo: getNpmGlobalPackageInfo, + getPipPackageInfo: getPipPackageInfo, + getGemPackageInfo: getGemPackageInfo, + getCargoPackageInfo: getCargoPackageInfo, + getGoPackageInfo: getGoPackageInfo, + getGitRepositoryInfo: getGitRepositoryInfo, + getOtherComponentInfo: getOtherComponentInfo, + startContainerForProcessing: startContainerForProcessing, + removeProcessingContainer: removeProcessingContainer, + getAllContentInfo: getAllContentInfo +} diff --git a/build/Source/utils/markdown-formatter-factory.js b/build/Source/utils/markdown-formatter-factory.js new file mode 100644 index 0000000000..bc22257faa --- /dev/null +++ b/build/Source/utils/markdown-formatter-factory.js @@ -0,0 +1,96 @@ +/*-------------------------------------------------------------------------------------------------------------- + * Copyright (c) Microsoft Corporation. All rights reserved. + * Licensed under the MIT License. See https://go.microsoft.com/fwlink/?linkid=2090316 for license information. + *-------------------------------------------------------------------------------------------------------------*/ + +/* +Returns: + +{ + name: "Xdebug", + version: "2.9.6", + url: "https://pecl.php.net/get/xdebug-2.9.6.tgz" + path: "/opt/something" +} + +*/ +function nameAndVersionNormalizer(packageInfo) { + if (packageInfo.markdownIgnore) { + return null; + } + const normalized = Object.assign({}, packageInfo); + normalized.version = packageInfo.version || packageInfo.commitHash; + if (!normalized.version) { + console.log( + `(!) Warning: No version for package ${packageInfo.name} - skipping markdown output.`, + ); + return null; + } + normalized.version = normalized.version.replace(/\n/g, "
"); + normalized.url = packageInfo.downloadUrl || packageInfo.repositoryUrl; + normalized.path = normalized.path + ? normalized.path.replace(/\n/g, "
") + : normalized.path; + return normalized; +} + +/* Handle CG manifest entries like: +{ + "Component": { + "Type": "other", + "Other": { + "Name": "Xdebug", + "Version": "2.9.6", + "DownloadUrl": "https://pecl.php.net/get/xdebug-2.9.6.tgz" + } + } +} + +Returns: + +{ + name: "Xdebug", + version: "2.9.6", + "url": "HTTPS://pecl.php.net/get/xdebug-2.9.6.tgz" +} + +*/ +function componentNormalizer(component) { + if (component.markdownIgnore || component.MarkdownIgnore) { + return null; + } + let componentType = component.Component.Type; + // Handle capitalization differences + if (!component.Component[componentType]) { + componentType = + componentType[0].toUpperCase() + componentType.substr(1); + } + const componentInfo = component.Component[componentType]; + return { + name: componentInfo.Name, + url: componentInfo.DownloadUrl, + version: componentInfo.Version, + }; +} + +function getFormatter() { + return { + image: (info) => info, + distro: (info) => info, + linux: nameAndVersionNormalizer, + npm: nameAndVersionNormalizer, + pip: nameAndVersionNormalizer, + pipx: nameAndVersionNormalizer, + gem: nameAndVersionNormalizer, + cargo: nameAndVersionNormalizer, + go: nameAndVersionNormalizer, + git: nameAndVersionNormalizer, + other: nameAndVersionNormalizer, + languages: nameAndVersionNormalizer, + manual: componentNormalizer, + }; +} + +module.exports = { + getFormatter: getFormatter, +}; diff --git a/containers/clojure/test-project/Source/clj/sample.clj b/containers/clojure/test-project/Source/clj/sample.clj new file mode 100644 index 0000000000..4ae65c4b26 --- /dev/null +++ b/containers/clojure/test-project/Source/clj/sample.clj @@ -0,0 +1,10 @@ +(ns sample + (:gen-class)) + +(defn main + [& args] + (println "Hello world")) + +(defn -main + [& args] + (apply main args)) diff --git a/containers/clojure/test-project/Source/cljs/sample/main.cljs b/containers/clojure/test-project/Source/cljs/sample/main.cljs new file mode 100644 index 0000000000..e60d426a9a --- /dev/null +++ b/containers/clojure/test-project/Source/cljs/sample/main.cljs @@ -0,0 +1,3 @@ +(ns sample.main) + +(println "Hello world") diff --git a/containers/codespaces-linux/test-project/yarn.lock b/containers/codespaces-linux/test-project/yarn.lock index e931f3f699..5b13f0c035 100644 --- a/containers/codespaces-linux/test-project/yarn.lock +++ b/containers/codespaces-linux/test-project/yarn.lock @@ -313,9 +313,9 @@ string_decoder@^1.1.1: safe-buffer "~5.2.0" tar-fs@^2.0.0: - version "2.1.1" - resolved "https://registry.yarnpkg.com/tar-fs/-/tar-fs-2.1.1.tgz#489a15ab85f1f0befabb370b7de4f9eb5cbe8784" - integrity sha512-V0r2Y9scmbDRLCNex/+hYzvp/zyYjvFbHPNgVTKfQvVrb6guiE/fxP+XblDNR011utopbkex2nM4dHNV6GDsng== + version "2.1.2" + resolved "https://registry.yarnpkg.com/tar-fs/-/tar-fs-2.1.2.tgz#425f154f3404cb16cb8ff6e671d45ab2ed9596c5" + integrity sha512-EsaAXwxmx8UB7FRKqeozqEPop69DXcmYwTQwXvyAPF352HJsPdkVhvTaDPYqfNgruveJIJy3TA2l+2zj8LJIJA== dependencies: chownr "^1.1.1" mkdirp-classic "^0.5.2" diff --git a/containers/dapr-javascript-node/test-project/Source/accounts.ts b/containers/dapr-javascript-node/test-project/Source/accounts.ts new file mode 100644 index 0000000000..7d68629bbb --- /dev/null +++ b/containers/dapr-javascript-node/test-project/Source/accounts.ts @@ -0,0 +1,56 @@ +/*-------------------------------------------------------------------------------------------------------------- + * Copyright (c) Microsoft Corporation. All rights reserved. + * Licensed under the MIT License. See https://go.microsoft.com/fwlink/?linkid=2090316 for license information. + *-------------------------------------------------------------------------------------------------------------*/ + +import * as express from "express"; + +import DaprClient from "./daprClient"; + +const router = express.Router(); + +const daprClient = new DaprClient(); + +const store = "statestore"; + +router.use(express.json({ strict: false })); + +router.get("/:id", async (req, res) => { + const balance = await daprClient.getState(store, req.params.id); + + if (balance !== undefined) { + res.status(200) + .header("Content-Type", "application/json") + .send(JSON.stringify(balance)); + } else { + res.sendStatus(404); + } +}); + +router.post("/:id/deposit", async (req, res) => { + let balance = + (await daprClient.getState(store, req.params.id)) ?? 0; + + balance += req.body as number; + + await daprClient.setState(store, req.params.id, balance); + + res.status(200) + .header("Content-Type", "application/json") + .send(JSON.stringify(balance)); +}); + +router.post("/:id/withdraw", async (req, res) => { + let balance = + (await daprClient.getState(store, req.params.id)) ?? 0; + + balance -= req.body as number; + + await daprClient.setState(store, req.params.id, balance); + + res.status(200) + .header("Content-Type", "application/json") + .send(JSON.stringify(balance)); +}); + +export default router; diff --git a/containers/dapr-javascript-node/test-project/Source/daprClient.ts b/containers/dapr-javascript-node/test-project/Source/daprClient.ts new file mode 100644 index 0000000000..79d1278657 --- /dev/null +++ b/containers/dapr-javascript-node/test-project/Source/daprClient.ts @@ -0,0 +1,57 @@ +/*-------------------------------------------------------------------------------------------------------------- + * Copyright (c) Microsoft Corporation. All rights reserved. + * Licensed under the MIT License. See https://go.microsoft.com/fwlink/?linkid=2090316 for license information. + *-------------------------------------------------------------------------------------------------------------*/ + +import * as fetch from "isomorphic-fetch"; + +export default class DaprClient { + private readonly daprEndpoint: string; + + constructor(daprEndpoint?: string) { + this.daprEndpoint = + daprEndpoint ?? + `http://localhost:${process.env.DAPR_HTTP_PORT ?? 3500}/v1.0`; + } + + public async getState( + store: string, + key: string, + ): Promise { + const response = await fetch( + `${this.daprEndpoint}/state/${store}/${key}`, + ); + + if (!response.ok) { + throw new Error("Could not get state."); + } else if (response.status === 204) { + return undefined; + } + + const value = await response.text(); + + if (!value) { + return undefined; + } + + return JSON.parse(value); + } + + public async setState( + store: string, + key: string, + value: T, + ): Promise { + const response = await fetch(`${this.daprEndpoint}/state/${store}`, { + body: JSON.stringify([{ key, value }]), + headers: { + "Content-Type": "application/json", + }, + method: "POST", + }); + + if (!response.ok) { + throw new Error("Could not set state."); + } + } +} diff --git a/containers/dapr-javascript-node/test-project/Source/server.ts b/containers/dapr-javascript-node/test-project/Source/server.ts new file mode 100644 index 0000000000..9f121500bc --- /dev/null +++ b/containers/dapr-javascript-node/test-project/Source/server.ts @@ -0,0 +1,27 @@ +/*-------------------------------------------------------------------------------------------------------------- + * Copyright (c) Microsoft Corporation. All rights reserved. + * Licensed under the MIT License. See https://go.microsoft.com/fwlink/?linkid=2090316 for license information. + *-------------------------------------------------------------------------------------------------------------*/ + +import * as express from "express"; + +import accounts from "./accounts"; + +// Constants +const PORT = 3000; + +const HOST = "0.0.0.0"; + +// App +const app = express(); + +app.use("/accounts", accounts); + +app.listen(PORT, HOST); + +console.log(`Running on http://${HOST}:${PORT}`); + +// Used for automated testing +if (process.env.REGRESSION_TESTING === "true") { + process.exit(0); +} diff --git a/containers/java-8/test-project/Source/main/java/com/mycompany/app/App.java b/containers/java-8/test-project/Source/main/java/com/mycompany/app/App.java new file mode 100644 index 0000000000..357d9f90c2 --- /dev/null +++ b/containers/java-8/test-project/Source/main/java/com/mycompany/app/App.java @@ -0,0 +1,17 @@ +/*------------------------------------------------------------------------------------------------------------- + * Copyright (c) Microsoft Corporation. All rights reserved. + * Licensed under the MIT License. See https://go.microsoft.com/fwlink/?linkid=2090316 for license information. + *-------------------------------------------------------------------------------------------------------------*/ + +package com.mycompany.app; + +import static javax.xml.XMLConstants.XML_NS_PREFIX; + +public class App +{ + public static void main( String[] args ) + { + System.out.println( "Hello Remote World!" ); + System.out.println("The XML namespace prefix is: " + XML_NS_PREFIX); + } +} diff --git a/containers/java-postgres/test-project/Source/main/java/com/mycompany/app/App.java b/containers/java-postgres/test-project/Source/main/java/com/mycompany/app/App.java new file mode 100644 index 0000000000..357d9f90c2 --- /dev/null +++ b/containers/java-postgres/test-project/Source/main/java/com/mycompany/app/App.java @@ -0,0 +1,17 @@ +/*------------------------------------------------------------------------------------------------------------- + * Copyright (c) Microsoft Corporation. All rights reserved. + * Licensed under the MIT License. See https://go.microsoft.com/fwlink/?linkid=2090316 for license information. + *-------------------------------------------------------------------------------------------------------------*/ + +package com.mycompany.app; + +import static javax.xml.XMLConstants.XML_NS_PREFIX; + +public class App +{ + public static void main( String[] args ) + { + System.out.println( "Hello Remote World!" ); + System.out.println("The XML namespace prefix is: " + XML_NS_PREFIX); + } +} diff --git a/containers/java-postgres/test-project/Source/main/java/module-info.java b/containers/java-postgres/test-project/Source/main/java/module-info.java new file mode 100644 index 0000000000..d401511e9b --- /dev/null +++ b/containers/java-postgres/test-project/Source/main/java/module-info.java @@ -0,0 +1,10 @@ +/*-------------------------------------------------------------------------------------------------------------- + * Copyright (c) Microsoft Corporation. All rights reserved. + * Licensed under the MIT License. See https://go.microsoft.com/fwlink/?linkid=2090316 for license information. + *-------------------------------------------------------------------------------------------------------------*/ + +module mymodule { + requires java.base; + requires java.xml; + requires java.sql; +} \ No newline at end of file diff --git a/containers/java/test-project/Source/main/java/com/mycompany/app/App.java b/containers/java/test-project/Source/main/java/com/mycompany/app/App.java new file mode 100644 index 0000000000..357d9f90c2 --- /dev/null +++ b/containers/java/test-project/Source/main/java/com/mycompany/app/App.java @@ -0,0 +1,17 @@ +/*------------------------------------------------------------------------------------------------------------- + * Copyright (c) Microsoft Corporation. All rights reserved. + * Licensed under the MIT License. See https://go.microsoft.com/fwlink/?linkid=2090316 for license information. + *-------------------------------------------------------------------------------------------------------------*/ + +package com.mycompany.app; + +import static javax.xml.XMLConstants.XML_NS_PREFIX; + +public class App +{ + public static void main( String[] args ) + { + System.out.println( "Hello Remote World!" ); + System.out.println("The XML namespace prefix is: " + XML_NS_PREFIX); + } +} diff --git a/containers/java/test-project/Source/main/java/module-info.java b/containers/java/test-project/Source/main/java/module-info.java new file mode 100644 index 0000000000..dbf1c3226a --- /dev/null +++ b/containers/java/test-project/Source/main/java/module-info.java @@ -0,0 +1,9 @@ +/*-------------------------------------------------------------------------------------------------------------- + * Copyright (c) Microsoft Corporation. All rights reserved. + * Licensed under the MIT License. See https://go.microsoft.com/fwlink/?linkid=2090316 for license information. + *-------------------------------------------------------------------------------------------------------------*/ + +module mymodule { + requires java.base; + requires java.xml; +} \ No newline at end of file diff --git a/containers/reasonml/test-project/Source/Demo.re b/containers/reasonml/test-project/Source/Demo.re new file mode 100644 index 0000000000..487b5d85ab --- /dev/null +++ b/containers/reasonml/test-project/Source/Demo.re @@ -0,0 +1,14 @@ +type schoolPerson = + | Teacher + | Director + | Student(string); + +let greeting = person => + switch (person) { + | Teacher => "Hey Professor!" + | Director => "Hello Director." + | Student("Richard") => "Still here Ricky?" + | Student(anyOtherName) => "Hey, " ++ anyOtherName ++ "!" + }; + +Js.log(greeting(Student("Dev"))); \ No newline at end of file diff --git a/containers/rust-postgres/test-project/Knowledge.dot b/containers/rust-postgres/test-project/Knowledge.dot new file mode 100644 index 0000000000..c06a7a6868 --- /dev/null +++ b/containers/rust-postgres/test-project/Knowledge.dot @@ -0,0 +1,31 @@ +digraph G { + newrank=true; + rankdir=LR; + splines=ortho; + overlap=false; + nodesep=0.5; + ranksep=0.5; + concentrate=true; + graph [bgcolor="#f9f9f9", color="#888888", penwidth=2, fontname="Arial"]; + node [fontname="Arial", fontsize=14, style=filled, margin="0.3,0.2"]; + edge [fontname="Arial", fontsize=10, color="#555555", fontcolor="gray"]; + subgraph cluster_1 { + label="root"; + style=filled; color="#5DADE2"; fillcolor="#5DADE220"; + "n0" [label="root", shape=folder, fillcolor="#5DADE2", penwidth=1.5]; + "n2" [label="cargo (.): test_project v0.1.0", shape=ellipse, fillcolor="#EB984E", penwidth=1.5]; + "n1" [label="Cargo.toml", shape=note, fillcolor="#FADBD8", penwidth=1]; + } + + "n0" -> "n1" [label="Contain", style=solid, color="blue", weight=2, arrowsize=0.8]; + "n0" -> "n2" [label="Contain", style=solid, color="blue", weight=2, arrowsize=0.8]; + subgraph cluster_legend { + label="Legend"; fontsize=14; style=dashed; color=gray; + "dir" [label="Directory", shape=folder, fillcolor="#5DADE2"]; + "file" [label="File", shape=note, fillcolor="#FADBD8"]; + "pkg" [label="Package", shape=ellipse, fillcolor="#EB984E"]; + "ext" [label="External", shape=trapezium, fillcolor="#EC7063"]; + "dir" -> "file" [label="Contain", style=solid, color=blue]; + "pkg" -> "ext" [label="Dependency", style=dashed, color=red]; + } +} diff --git a/containers/rust-postgres/test-project/Knowledge.svg b/containers/rust-postgres/test-project/Knowledge.svg new file mode 100644 index 0000000000..f1fc1b43e8 --- /dev/null +++ b/containers/rust-postgres/test-project/Knowledge.svg @@ -0,0 +1,97 @@ + + + + + + +G + + +cluster_1 + +root + + +cluster_legend + +Legend + + + +n0 + +root + + + +n2 + +cargo (.): test_project v0.1.0 + + + +n0->n2 + + +Contain + + + +n1 + + + +Cargo.toml + + + +n0->n1 + + +Contain + + + +dir + +Directory + + + +file + + + +File + + + +dir->file + + +Contain + + + +pkg + +Package + + + +ext + +External + + + +pkg->ext + + +Dependency + + + diff --git a/containers/rust-postgres/test-project/Source/main.rs b/containers/rust-postgres/test-project/Source/main.rs new file mode 100644 index 0000000000..01d0d6b675 --- /dev/null +++ b/containers/rust-postgres/test-project/Source/main.rs @@ -0,0 +1,10 @@ +// -------------------------------------------------------------------------------------------------------------- +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See https://go.microsoft.com/fwlink/?linkid=2090316 for license information. +// ------------------------------------------------------------------------------------------------------------- + +fn main() { + let name = "VS Code Remote - Containers"; + + println!("Hello, {}!", name); +} diff --git a/containers/rust/Knowledge.dot b/containers/rust/Knowledge.dot new file mode 100644 index 0000000000..21dd8a4db9 --- /dev/null +++ b/containers/rust/Knowledge.dot @@ -0,0 +1,31 @@ +digraph G { + newrank=true; + rankdir=LR; + splines=ortho; + overlap=false; + nodesep=0.5; + ranksep=0.5; + concentrate=true; + graph [bgcolor="#f9f9f9", color="#888888", penwidth=2, fontname="Arial"]; + node [fontname="Arial", fontsize=14, style=filled, margin="0.3,0.2"]; + edge [fontname="Arial", fontsize=10, color="#555555", fontcolor="gray"]; + subgraph cluster_1 { + label="root"; + style=filled; color="#5DADE2"; fillcolor="#5DADE220"; + "n0" [label="root", shape=folder, fillcolor="#5DADE2", penwidth=1.5]; + "n2" [label="cargo (.): hello_remote_world v0.1.0", shape=ellipse, fillcolor="#EB984E", penwidth=1.5]; + "n1" [label="Cargo.toml", shape=note, fillcolor="#FADBD8", penwidth=1]; + } + + "n0" -> "n1" [label="Contain", style=solid, color="blue", weight=2, arrowsize=0.8]; + "n0" -> "n2" [label="Contain", style=solid, color="blue", weight=2, arrowsize=0.8]; + subgraph cluster_legend { + label="Legend"; fontsize=14; style=dashed; color=gray; + "dir" [label="Directory", shape=folder, fillcolor="#5DADE2"]; + "file" [label="File", shape=note, fillcolor="#FADBD8"]; + "pkg" [label="Package", shape=ellipse, fillcolor="#EB984E"]; + "ext" [label="External", shape=trapezium, fillcolor="#EC7063"]; + "dir" -> "file" [label="Contain", style=solid, color=blue]; + "pkg" -> "ext" [label="Dependency", style=dashed, color=red]; + } +} diff --git a/containers/rust/Knowledge.svg b/containers/rust/Knowledge.svg new file mode 100644 index 0000000000..ccb4e8b13d --- /dev/null +++ b/containers/rust/Knowledge.svg @@ -0,0 +1,97 @@ + + + + + + +G + + +cluster_1 + +root + + +cluster_legend + +Legend + + + +n0 + +root + + + +n2 + +cargo (.): hello_remote_world v0.1.0 + + + +n0->n2 + + +Contain + + + +n1 + + + +Cargo.toml + + + +n0->n1 + + +Contain + + + +dir + +Directory + + + +file + + + +File + + + +dir->file + + +Contain + + + +pkg + +Package + + + +ext + +External + + + +pkg->ext + + +Dependency + + + diff --git a/containers/rust/test-project/Source/main.rs b/containers/rust/test-project/Source/main.rs new file mode 100644 index 0000000000..01d0d6b675 --- /dev/null +++ b/containers/rust/test-project/Source/main.rs @@ -0,0 +1,10 @@ +// -------------------------------------------------------------------------------------------------------------- +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See https://go.microsoft.com/fwlink/?linkid=2090316 for license information. +// ------------------------------------------------------------------------------------------------------------- + +fn main() { + let name = "VS Code Remote - Containers"; + + println!("Hello, {}!", name); +} diff --git a/containers/typescript-node/test-project/Source/server.ts b/containers/typescript-node/test-project/Source/server.ts new file mode 100644 index 0000000000..f008fe53a9 --- /dev/null +++ b/containers/typescript-node/test-project/Source/server.ts @@ -0,0 +1,26 @@ +/*-------------------------------------------------------------------------------------------------------------- + * Copyright (c) Microsoft Corporation. All rights reserved. + * Licensed under the MIT License. See https://go.microsoft.com/fwlink/?linkid=2090316 for license information. + *-------------------------------------------------------------------------------------------------------------*/ + +import * as express from "express"; + +// Constants +const PORT = 3000; + +const HOST = "0.0.0.0"; + +// App +const app = express(); +app.get("/", (req: any, res: any) => { + res.send("Hello remote world!\n"); +}); + +app.listen(PORT, HOST); + +console.log(`Running on http://${HOST}:${PORT}`); + +// Used for automated testing +if (process.env.REGRESSION_TESTING === "true") { + process.exit(0); +} diff --git a/script-library/container-features/Source/devcontainer-features.json b/script-library/container-features/Source/devcontainer-features.json new file mode 100644 index 0000000000..26043a7585 --- /dev/null +++ b/script-library/container-features/Source/devcontainer-features.json @@ -0,0 +1,1249 @@ +{ + "features": [ + // Problem #1 - you should be able to pick docker-in-docker or docker-from-docker, but not both, and settings need to be different + { + "id": "docker-in-docker", + "name": "Docker (Moby) support (Docker-in-Docker)", + "documentationURL": "https://github.com/microsoft/vscode-dev-containers/blob/main/script-library/docs/docker-in-docker.md", + "options": { + "version": { + "type": "string", + "proposals": ["latest", "20.10"], + "default": "latest", + "description": "Select or enter a Docker/Moby Engine version. (Availability can vary by OS version.)" + }, + "moby": { + "type": "boolean", + "default": true, + "description": "Install OSS Moby build instead of Docker CE" + }, + "dockerDashComposeVersion": { + "type": "string", + "enum": ["v1", "v2"], + "default": "v1", + "description": "Default version of Docker Compose (v1 or v2)" + } + }, + "buildArg": "_VSC_INSTALL_DOCKER_IN_DOCKER", + "entrypoint": "/usr/local/share/docker-init.sh", + "privileged": true, + "containerEnv": { + "DOCKER_BUILDKIT": "1" + }, + "extensions": ["ms-azuretools.vscode-docker"], + // Problem #2 - The volume name here should be something unique per dev container. Need ability to either get a var that points to one or ask for a volume name + "mounts": [ + { + "source": "dind-var-lib-docker", + "target": "/var/lib/docker", + "type": "volume" + } + ], + "include": [ + "cpp", + "dotnet", + "dotnet-fsharp", + "dotnet-mssql", + "python-3-anaconda", + "python-3-miniconda", + "powershell", + "java", + "go", + "php", + "ruby", + "rust", + "typescript-node", + "javascript-node", + "python-3", + "python-3-anaconda", + "python-3-miniconda", + "ubuntu", + "debian", + "javascript-node-mongo", + "javascript-node-postgres", + "php-mariadb", + "python-3-postgres" + ] + }, + { + "id": "docker-from-docker", + "name": "Docker (Moby) support, reuse host Docker Engine (Docker-from-Docker)", + "documentationURL": "https://github.com/microsoft/vscode-dev-containers/blob/main/script-library/docs/docker.md", + "options": { + "version": { + "type": "string", + "proposals": ["latest", "20.10"], + "default": "latest", + "description": "Select or enter a Docker/Moby CLI version. (Availability can vary by OS version.)" + }, + "moby": { + "type": "boolean", + "default": true, + "description": "Install OSS Moby build instead of Docker CE" + }, + "dockerDashComposeVersion": { + "type": "string", + "enum": ["v1", "v2"], + "default": "v1", + "description": "Compose version to use for docker-compose (v1 or v2)" + } + }, + "buildArg": "_VSC_INSTALL_DOCKER_FROM_DOCKER", + "entrypoint": "/usr/local/share/docker-init.sh", + "containerEnv": { + "DOCKER_BUILDKIT": "1" + }, + "extensions": ["ms-azuretools.vscode-docker"], + "mounts": [ + { + "source": "/var/run/docker.sock", + "target": "/var/run/docker-host.sock", + "type": "bind" + } + ], + "include": [ + "cpp", + "dotnet", + "dotnet-fsharp", + "dotnet-mssql", + "powershell", + "java", + "go", + "php", + "ruby", + "rust", + "typescript-node", + "javascript-node", + "python-3", + "python-3-anaconda", + "python-3-miniconda", + "ubuntu", + "debian", + "javascript-node-mongo", + "javascript-node-postgres", + "php-mariadb", + "python-3-postgres" + ] + }, + { + "id": "kubectl-helm-minikube", + "name": "Kubectl, Helm, and Minikube", + "documentationURL": "https://github.com/microsoft/vscode-dev-containers/blob/main/script-library/docs/kubectl-helm.md", + "options": { + "version": { + "type": "string", + "proposals": ["latest", "1.23", "1.22", "1.21"], + "default": "latest", + "description": "Select or enter a Kubernetes version to install" + }, + "helm": { + "type": "string", + "proposals": ["latest"], + "default": "latest", + "description": "Select or enter a Helm version to install" + }, + "minikube": { + "type": "string", + "proposals": ["latest"], + "default": "latest", + "description": "Select or enter a Minikube version to install" + } + }, + "buildArg": "_VSC_INSTALL_KUBECTL_HELM_MINIKUBE", + "extensions": ["ms-kubernetes-tools.vscode-kubernetes-tools"], + // Problem #2 - The volume name here should be something unique per dev container. Need ability to either get a var that points to one or ask for a volume name + "mounts": [ + { + "source": "minikube-config", + "target": "/home/vscode/.minikube", + "type": "volume" + } + ], + "include": [ + "cpp", + "dotnet", + "dotnet-fsharp", + "dotnet-mssql", + "powershell", + "java", + "go", + "php", + "ruby", + "rust", + "typescript-node", + "javascript-node", + "python-3", + "python-3-anaconda", + "python-3-miniconda", + "ubuntu", + "debian", + "javascript-node-mongo", + "javascript-node-postgres", + "php-mariadb", + "python-3-postgres" + ] + }, + { + "id": "terraform", + "name": "Terraform, tflint, and TFGrunt", + "documentationURL": "https://github.com/microsoft/vscode-dev-containers/blob/main/script-library/docs/terraform.md", + "options": { + "version": { + "type": "string", + "proposals": ["latest", "1.2", "1.1", "1.0", "0.15"], + "default": "latest", + "description": "Terraform version" + }, + "tflint": { + "type": "string", + "proposals": ["latest"], + "default": "latest", + "description": "Tflint version" + }, + "terragrunt": { + "type": "string", + "proposals": ["latest"], + "default": "latest", + "description": "Terragrunt version" + } + }, + "buildArg": "_VSC_INSTALL_TERRAFORM", + "extensions": [ + "HashiCorp.terraform", + "ms-azuretools.vscode-azureterraform" + ], + "settings": { + "terraform.languageServer": { + "enabled": true, + "args": [] + }, + "azureTerraform.terminal": "integrated" + }, + "include": [ + "cpp", + "kubernetes-helm", + "kubernetes-helm-minikube", + "dotnet", + "dotnet-fsharp", + "dotnet-mssql", + "powershell", + "java", + "go", + "php", + "ruby", + "rust", + "typescript-node", + "javascript-node", + "python-3", + "python-3-anaconda", + "python-3-miniconda", + "ubuntu", + "debian", + "javascript-node-mongo", + "javascript-node-postgres", + "php-mariadb", + "python-3-postgres" + ] + }, + { + "id": "git", + "name": "Git (may require compilation)", + "documentationURL": "https://github.com/microsoft/vscode-dev-containers/blob/main/script-library/docs/git-from-src.md", + "options": { + "version": { + "type": "string", + "proposals": ["latest", "os-provided"], + "default": "os-provided", + "description": "Select or enter a Git version." + }, + "ppa": { + "type": "boolean", + "default": true, + "description": "Install from PPA if available" + } + }, + "buildArg": "_VSC_INSTALL_GIT", + "include": [ + "cpp", + "kubernetes-helm", + "kubernetes-helm-minikube", + "docker-from-docker", + "docker-in-docker", + "dotnet", + "dotnet-fsharp", + "dotnet-mssql", + "powershell", + "java", + "go", + "php", + "ruby", + "rust", + "typescript-node", + "javascript-node", + "python-3", + "python-3-anaconda", + "python-3-miniconda", + "ubuntu", + "debian", + "javascript-node-mongo", + "javascript-node-postgres", + "php-mariadb", + "python-3-postgres" + ] + }, + { + "id": "git-lfs", + "name": "Git Large File Support (LFS)", + "documentationURL": "https://github.com/microsoft/vscode-dev-containers/blob/main/script-library/docs/git-lfs.md", + "options": { + "version": { + "type": "string", + "enum": ["latest"], + "default": "latest", + "description": "Currently unused." + } + }, + "buildArg": "_VSC_INSTALL_GIT_LFS", + "include": [ + "cpp", + "kubernetes-helm", + "kubernetes-helm-minikube", + "docker-from-docker", + "docker-in-docker", + "dotnet", + "dotnet-fsharp", + "dotnet-mssql", + "powershell", + "java", + "go", + "php", + "ruby", + "rust", + "typescript-node", + "javascript-node", + "python-3", + "python-3-anaconda", + "python-3-miniconda", + "ubuntu", + "debian", + "javascript-node-mongo", + "javascript-node-postgres", + "php-mariadb", + "python-3-postgres" + ] + }, + { + "id": "github-cli", + "name": "GitHub CLI", + "documentationURL": "https://github.com/microsoft/vscode-dev-containers/blob/main/script-library/docs/github.md", + "options": { + "version": { + "type": "string", + "proposals": ["latest"], + "default": "latest", + "description": "Select version of the GitHub CLI, if not latest." + } + }, + "buildArg": "_VSC_INSTALL_GITHUB_CLI", + "include": [ + "cpp", + "kubernetes-helm", + "kubernetes-helm-minikube", + "docker-from-docker", + "docker-in-docker", + "dotnet", + "dotnet-fsharp", + "dotnet-mssql", + "powershell", + "java", + "go", + "php", + "ruby", + "rust", + "typescript-node", + "javascript-node", + "python-3", + "python-3-anaconda", + "python-3-miniconda", + "ubuntu", + "debian", + "javascript-node-mongo", + "javascript-node-postgres", + "php-mariadb", + "ruby-rails-postgres", + "python-3-postgres" + ] + }, + { + "id": "aws-cli", + "name": "AWS CLI", + "documentationURL": "https://github.com/microsoft/vscode-dev-containers/blob/main/script-library/docs/awscli.md", + "options": { + "version": { + "type": "string", + "proposals": ["latest"], + "default": "latest", + "description": "Select or enter an AWS CLI version. (Available versions here: https://github.com/aws/aws-cli/blob/v2/CHANGELOG.rst)" + } + }, + "buildArg": "_VSC_INSTALL_AWS_CLI", + "extensions": ["AmazonWebServices.aws-toolkit-vscode"], + "include": [ + "cpp", + "dotnet", + "dotnet-fsharp", + "dotnet-mssql", + "kubernetes-helm", + "kubernetes-helm-minikube", + "docker-from-docker", + "docker-in-docker", + "powershell", + "java", + "go", + "php", + "ruby", + "rust", + "typescript-node", + "javascript-node", + "python-3", + "python-3-anaconda", + "python-3-miniconda", + "ubuntu", + "debian", + "javascript-node-mongo", + "javascript-node-postgres", + "php-mariadb", + "ruby-rails-postgres", + "python-3-postgres" + ] + }, + { + "id": "azure-cli", + "name": "Azure CLI", + "documentationURL": "https://github.com/microsoft/vscode-dev-containers/blob/main/script-library/docs/azcli.md", + "options": { + "version": { + "type": "string", + "proposals": ["latest"], + "default": "latest", + "description": "Select or enter an Azure CLI version. (Available versions may vary by Linux distribution.)" + } + }, + "buildArg": "_VSC_INSTALL_AZURE_CLI", + "extensions": ["ms-vscode.azurecli"], + "include": [ + "cpp", + "dotnet", + "dotnet-fsharp", + "dotnet-mssql", + "kubernetes-helm", + "kubernetes-helm-minikube", + "docker-from-docker", + "docker-in-docker", + "powershell", + "java", + "go", + "php", + "ruby", + "rust", + "typescript-node", + "javascript-node", + "python-3", + "python-3-anaconda", + "python-3-miniconda", + "ubuntu", + "debian", + "javascript-node-mongo", + "javascript-node-postgres", + "php-mariadb", + "ruby-rails-postgres", + "python-3-postgres" + ] + }, + { + "id": "sshd", + "name": "SSH server", + "documentationURL": "https://github.com/microsoft/vscode-dev-containers/blob/main/script-library/docs/sshd.md", + "options": { + "version": { + "type": "string", + "enum": ["latest"], + "default": "latest", + "description": "Currently unused." + } + }, + "buildArg": "_VSC_INSTALL_SSH", + "entrypoint": "/usr/local/share/ssh-init.sh", + "include": [ + "cpp", + "kubernetes-helm", + "kubernetes-helm-minikube", + "docker-from-docker", + "docker-in-docker", + "dotnet", + "dotnet-fsharp", + "dotnet-mssql", + "powershell", + "java", + "go", + "php", + "ruby", + "rust", + "typescript-node", + "javascript-node", + "python-3", + "python-3-anaconda", + "python-3-miniconda", + "ubuntu", + "debian", + "javascript-node-mongo", + "javascript-node-postgres", + "php-mariadb", + "ruby-rails-postgres", + "python-3-postgres" + ] + }, + { + "id": "desktop-lite", + "name": "Light-weight desktop (Fluxbox)", + "documentationURL": "https://github.com/microsoft/vscode-dev-containers/blob/main/script-library/docs/desktop-lite.md", + "options": { + "version": { + "type": "string", + "enum": ["latest"], + "default": "latest", + "description": "Currently unused." + }, + "password": { + "type": "string", + "proposals": ["vscode", "codespaces", "password"], + "default": "vscode", + "description": "Enter a password for desktop connections" + }, + "webPort": { + "type": "string", + "proposals": ["6080"], + "default": "6080", + "description": "Enter a port for the desktop web client" + }, + "vncPort": { + "type": "string", + "proposals": ["5901"], + "default": "5901", + "description": "Enter a port for the desktop VNC server" + } + }, + "init": true, + "buildArg": "_VSC_INSTALL_DESKTOP_LITE", + "entrypoint": "/usr/local/share/desktop-init.sh", + "containerEnv": { + "DISPLAY": ":1" + }, + "include": [ + "cpp", + "kubernetes-helm", + "kubernetes-helm-minikube", + "docker-from-docker", + "docker-in-docker", + "dotnet", + "dotnet-fsharp", + "dotnet-mssql", + "powershell", + "java", + "go", + "php", + "ruby", + "rust", + "typescript-node", + "javascript-node", + "python-3", + "python-3-anaconda", + "python-3-miniconda", + "ubuntu", + "debian", + "javascript-node-mongo", + "javascript-node-postgres", + "php-mariadb", + "ruby-rails-postgres", + "python-3-postgres" + ] + }, + { + "id": "homebrew", + "name": "Homebrew", + "documentationURL": "https://github.com/microsoft/vscode-dev-containers/blob/main/script-library/docs/homebrew.md", + "options": { + "version": { + "type": "string", + "enum": ["latest"], + "default": "latest", + "description": "Currently unused." + }, + "shallow": { + "type": "boolean", + "default": false, + "description": "Use shallow clone for faster container build." + } + }, + "buildArg": "_VSC_INSTALL_HOMEBREW", + "containerEnv": { + "BREW_PREFIX": "/home/linuxbrew/.linuxbrew", + "PATH": "${BREW_PREFIX}/sbin:${BREW_PREFIX}/bin:${PATH}" + }, + "include": [ + "cpp", + "kubernetes-helm", + "kubernetes-helm-minikube", + "docker-from-docker", + "docker-in-docker", + "dotnet", + "dotnet-fsharp", + "dotnet-mssql", + "powershell", + "java", + "go", + "php", + "ruby", + "rust", + "typescript-node", + "javascript-node", + "python-3", + "python-3-anaconda", + "python-3-miniconda", + "ubuntu", + "debian", + "javascript-node-mongo", + "javascript-node-postgres", + "php-mariadb", + "ruby-rails-postgres", + "python-3-postgres" + ] + }, + { + "id": "fish", + "name": "fish shell and Fisher", + "documentationURL": "https://github.com/microsoft/vscode-dev-containers/blob/main/script-library/docs/fish.md", + "options": { + "version": { + "type": "string", + "enum": ["latest"], + "default": "latest", + "description": "Currently unused." + } + }, + "buildArg": "_VSC_INSTALL_FISH", + "include": [ + "cpp", + "kubernetes-helm", + "kubernetes-helm-minikube", + "docker-from-docker", + "docker-in-docker", + "dotnet", + "dotnet-fsharp", + "dotnet-mssql", + "powershell", + "java", + "go", + "php", + "ruby", + "rust", + "typescript-node", + "javascript-node", + "python-3", + "python-3-anaconda", + "python-3-miniconda", + "ubuntu", + "debian", + "javascript-node-mongo", + "javascript-node-postgres", + "php-mariadb", + "ruby-rails-postgres", + "python-3-postgres" + ] + }, + { + "id": "node", + "name": "Node.js (via nvm) and yarn", + "documentationURL": "https://github.com/microsoft/vscode-dev-containers/blob/main/script-library/docs/node.md", + "options": { + "version": { + "type": "string", + "proposals": ["lts", "latest", "18", "16", "14"], + "default": "lts", + "description": "Select or enter a Node.js version to install" + }, + "nodeGypDependencies": { + "type": "boolean", + "default": true, + "description": "Install dependencies to compile native node modules (node-gyp)?" + } + }, + "buildArg": "_VSC_INSTALL_NODE", + "extensions": ["dbaeumer.vscode-eslint"], + "containerEnv": { + "NVM_DIR": "/usr/local/share/nvm", + "NVM_SYMLINK_CURRENT": "true", + "PATH": "${NVM_DIR}/current/bin:${PATH}" + }, + "include": [ + "cpp", + "kubernetes-helm", + "kubernetes-helm-minikube", + "docker-from-docker", + "docker-in-docker", + "powershell", + "rust", + "ubuntu", + "debian" + ] + }, + { + "id": "python", + "name": "Python (may require compilation)", + "documentationURL": "https://github.com/microsoft/vscode-dev-containers/blob/main/script-library/docs/python.md", + "options": { + "version": { + "type": "string", + "enum": [ + "latest", + "os-provided", + "3.10", + "3.9", + "3.8", + "3.7", + "3.6" + ], + "default": "os-provided", + "description": "Select a Python version to install." + }, + "installTools": { + "type": "boolean", + "default": true, + "description": "Install common Python tools like pylint" + }, + "optimize": { + "type": "boolean", + "default": false, + "description": "Optimize Python for performance when compiled (slow)" + } + }, + "buildArg": "_VSC_INSTALL_PYTHON", + "extensions": ["ms-python.python", "ms-python.vscode-pylance"], + "containerEnv": { + "PYTHON_PATH": "/usr/local/python", + "PIPX_HOME": "/usr/local/py-utils", + "PIPX_BIN_DIR": "/usr/local/py-utils/bin", + "PATH": "${PYTHON_PATH}/bin:${PATH}:${PIPX_BIN_DIR}" + }, + "settings": { + "python.defaultInterpreterPath": "/usr/local/bin/python", + "python.formatting.autopep8Path": "/usr/local/py-utils/bin/autopep8", + "python.formatting.blackPath": "/usr/local/py-utils/bin/black", + "python.formatting.yapfPath": "/usr/local/py-utils/bin/yapf", + "python.linting.banditPath": "/usr/local/py-utils/bin/bandit", + "python.linting.flake8Path": "/usr/local/py-utils/bin/flake8", + "python.linting.mypyPath": "/usr/local/py-utils/bin/mypy", + "python.linting.pycodestylePath": "/usr/local/py-utils/bin/pycodestyle", + "python.linting.pydocstylePath": "/usr/local/py-utils/bin/pydocstyle", + "python.linting.pylintPath": "/usr/local/py-utils/bin/pylint" + }, + "include": [ + "cpp", + "kubernetes-helm", + "kubernetes-helm-minikube", + "docker-from-docker", + "docker-in-docker", + "dotnet", + "dotnet-fsharp", + "dotnet-mssql", + "powershell", + "java", + "java-8", + "go", + "php", + "ruby", + "rust", + "typescript-node", + "javascript-node", + "ubuntu", + "debian", + "javascript-node-mongo", + "javascript-node-postgres", + "php-mariadb", + "ruby-rails-postgres" + ] + }, + { + "id": "golang", + "name": "Go", + "documentationURL": "https://github.com/microsoft/vscode-dev-containers/blob/main/script-library/docs/go.md", + "options": { + "version": { + "type": "string", + "proposals": ["latest", "1.19", "1.18"], + "default": "latest", + "description": "Select or enter a Go version to install" + } + }, + "init": true, + "buildArg": "_VSC_INSTALL_GOLANG", + "extensions": ["golang.Go"], + "containerEnv": { + "GOPATH": "/go", + "PATH": "${GOPATH}/bin:${GOROOT}/bin:${PATH}" + }, + "capAdd": ["SYS_PTRACE"], + "securityOpt": ["seccomp=unconfined"], + "include": [ + "cpp", + "kubernetes-helm", + "kubernetes-helm-minikube", + "docker-from-docker", + "docker-in-docker", + "dotnet", + "dotnet-fsharp", + "dotnet-mssql", + "powershell", + "java", + "java-8", + "php", + "ruby", + "rust", + "typescript-node", + "javascript-node", + "python-3", + "python-3-anaconda", + "python-3-miniconda", + "ubuntu", + "debian", + "javascript-node-mongo", + "javascript-node-postgres", + "php-mariadb", + "ruby-rails-postgres", + "python-3-postgres" + ] + }, + { + "id": "java", + "name": "Java (via SDKMAN!)", + "documentationURL": "https://github.com/microsoft/vscode-dev-containers/blob/main/script-library/docs/java.md", + "options": { + "version": { + "type": "string", + "proposals": ["lts", "latest", "17", "11", "8"], + "default": "lts", + "description": "Select or enter a Java version to install" + } + }, + "buildArg": "_VSC_INSTALL_JAVA", + "extensions": ["vscjava.vscode-java-pack"], + "containerEnv": { + "SDKMAN_DIR": "/usr/local/sdkman", + "PATH": "${SDKMAN_DIR}/bin:${SDKMAN_DIR}/candidates/java/current/bin:${PATH}" + }, + "settings": { + "java.import.gradle.java.home": "/usr/local/sdkman/candidates/java/current" + }, + "include": [ + "cpp", + "kubernetes-helm", + "kubernetes-helm-minikube", + "docker-from-docker", + "docker-in-docker", + "dotnet", + "dotnet-fsharp", + "dotnet-mssql", + "powershell", + "go", + "php", + "ruby", + "rust", + "typescript-node", + "javascript-node", + "python-3", + "python-3-anaconda", + "python-3-miniconda", + "ubuntu", + "debian", + "javascript-node-mongo", + "javascript-node-postgres", + "php-mariadb", + "ruby-rails-postgres", + "python-3-postgres" + ] + }, + // Problem #6 - Maven and gradle options should not be presented if Java isn't checked + { + "id": "maven", + "name": "Maven (via SDKMAN!)", + "documentationURL": "https://github.com/microsoft/vscode-dev-containers/blob/main/script-library/docs/maven.md", + "options": { + "version": { + "type": "string", + "proposals": ["latest", "3.8", "3.6", "3.5"], + "default": "latest", + "description": "Select or enter a Maven version to install" + } + }, + "buildArg": "_VSC_INSTALL_MAVEN", + "extensions": ["vscjava.vscode-java-pack"], + "containerEnv": { + "SDKMAN_DIR": "/usr/local/sdkman", + "PATH": "${SDKMAN_DIR}/bin:${SDKMAN_DIR}/candidates/maven/current/bin:${PATH}" + }, + "include": [ + "cpp", + "kubernetes-helm", + "kubernetes-helm-minikube", + "docker-from-docker", + "docker-in-docker", + "dotnet", + "dotnet-fsharp", + "dotnet-mssql", + "powershell", + "go", + "php", + "ruby", + "rust", + "typescript-node", + "javascript-node", + "python-3", + "python-3-anaconda", + "python-3-miniconda", + "ubuntu", + "debian", + "javascript-node-mongo", + "javascript-node-postgres", + "php-mariadb", + "python-3-postgres" + ] + }, + { + "id": "gradle", + "name": "Gradle (via SDKMAN!)", + "documentationURL": "https://github.com/microsoft/vscode-dev-containers/blob/main/script-library/docs/gradle.md", + "options": { + "version": { + "type": "string", + "proposals": ["latest", "7", "6", "5"], + "default": "latest", + "description": "Select or enter a Gradle version to install" + } + }, + "buildArg": "_VSC_INSTALL_GRADLE", + "extensions": ["vscjava.vscode-java-pack"], + "containerEnv": { + "SDKMAN_DIR": "/usr/local/sdkman", + "PATH": "${SDKMAN_DIR}/bin:${SDKMAN_DIR}/candidates/gradle/current/bin:${PATH}" + }, + "include": [ + "cpp", + "kubernetes-helm", + "kubernetes-helm-minikube", + "docker-from-docker", + "docker-in-docker", + "dotnet", + "dotnet-fsharp", + "dotnet-mssql", + "powershell", + "go", + "php", + "ruby", + "rust", + "typescript-node", + "javascript-node", + "python-3", + "python-3-anaconda", + "python-3-miniconda", + "ubuntu", + "debian", + "javascript-node-mongo", + "javascript-node-postgres", + "php-mariadb", + "ruby-rails-postgres", + "python-3-postgres" + ] + }, + { + "id": "ruby", + "name": "Ruby (via rvm)", + "documentationURL": "https://github.com/microsoft/vscode-dev-containers/blob/main/script-library/docs/ruby.md", + "options": { + "version": { + "type": "string", + "proposals": ["latest", "3.1", "3.0", "2.7"], + "default": "latest", + "description": "Select or enter a Ruby version to install" + } + }, + "buildArg": "_VSC_INSTALL_RUBY", + "extensions": ["rebornix.Ruby"], + "containerEnv": { + "GEM_PATH": "/usr/local/rvm/gems/default:/usr/local/rvm/gems/default@global", + "GEM_HOME": "/usr/local/rvm/gems/default", + "MY_RUBY_HOME": "/usr/local/rvm/rubies/default", + "PATH": "/usr/local/rvm/gems/default/bin:/usr/local/rvm/gems/default@global/bin:/usr/local/rvm/rubies/default/bin:${PATH}" + }, + "include": [ + "cpp", + "kubernetes-helm", + "kubernetes-helm-minikube", + "docker-from-docker", + "docker-in-docker", + "dotnet", + "dotnet-fsharp", + "dotnet-mssql", + "powershell", + "go", + "java", + "java-8", + "php", + "rust", + "typescript-node", + "javascript-node", + "python-3", + "python-3-anaconda", + "python-3-miniconda", + "ubuntu", + "debian", + "javascript-node-mongo", + "javascript-node-postgres", + "php-mariadb", + "python-3-postgres" + ] + }, + { + "id": "rust", + "name": "Rust", + "documentationURL": "https://github.com/microsoft/vscode-dev-containers/blob/main/script-library/docs/rust.md", + "options": { + "version": { + "type": "string", + "proposals": ["latest", "1.55", "1.54", "1.53"], + "default": "latest", + "description": "Select or enter a version of Rust to install." + }, + "profile": { + "type": "string", + "proposals": ["minimal", "default", "complete"], + "default": "minimal", + "description": "Select a rustup install profile." + } + }, + "buildArg": "_VSC_INSTALL_RUST", + "extensions": [ + "vadimcn.vscode-lldb", + "mutantdino.resourcemonitor", + "rust-lang.rust-analyzer", + "tamasfe.even-better-toml", + "serayuzgur.crates" + ], + "containerEnv": { + "CARGO_HOME": "/usr/local/cargo", + "RUSTUP_HOME": "/usr/local/rustup", + "PATH": "${CARGO_HOME}/bin:${PATH}" + }, + "capAdd": ["SYS_PTRACE"], + "securityOpt": ["seccomp=unconfined"], + "settings": { + "lldb.executable": "/usr/bin/lldb", + "files.watcherExclude": { + "**/target/**": true + }, + "rust-analyzer.checkOnSave.command": "clippy" + }, + "include": [ + "cpp", + "kubernetes-helm", + "kubernetes-helm-minikube", + "docker-from-docker", + "docker-in-docker", + "dotnet", + "dotnet-fsharp", + "dotnet-mssql", + "powershell", + "go", + "java", + "java-8", + "php", + "ruby", + "typescript-node", + "javascript-node", + "python-3", + "python-3-anaconda", + "python-3-miniconda", + "ubuntu", + "debian", + "javascript-node-mongo", + "javascript-node-postgres", + "php-mariadb", + "ruby-rails-postgres", + "python-3-postgres" + ] + }, + { + "id": "powershell", + "name": "PowerShell", + "documentationURL": "https://github.com/microsoft/vscode-dev-containers/blob/main/script-library/docs/powershell.md", + "options": { + "version": { + "type": "string", + "proposals": ["latest", "7.1"], + "default": "latest", + "description": "Select or enter a version of PowerShell." + } + }, + "buildArg": "_VSC_INSTALL_POWERSHELL", + "include": [ + "cpp", + "kubernetes-helm", + "kubernetes-helm-minikube", + "docker-from-docker", + "docker-in-docker", + "dotnet", + "dotnet-fsharp", + "dotnet-mssql", + "go", + "java", + "java-8", + "php", + "ruby", + "rust", + "typescript-node", + "javascript-node", + "python-3", + "python-3-anaconda", + "python-3-miniconda", + "ubuntu", + "debian", + "javascript-node-mongo", + "javascript-node-postgres", + "php-mariadb", + "ruby-rails-postgres", + "python-3-postgres" + ] + }, + { + "id": "common", + "name": "Common OS utilities", + "documentationURL": "https://github.com/microsoft/vscode-dev-containers/blob/main/script-library/docs/common.md", + "options": { + "installZsh": { + "type": "boolean", + "default": true, + "description": "Install ZSH?" + }, + "installOhMyZsh": { + "type": "boolean", + "default": true, + "description": "Install Oh My Zsh!?" + }, + "upgradePackages": { + "type": "boolean", + "default": true, + "description": "Upgrade OS packages?" + }, + "username": { + "type": "string", + "proposals": ["vscode", "codespace", "none", "automatic"], + "default": "automatic", + "description": "Enter name of non-root user to configure or none to skip" + }, + "uid": { + "type": "string", + "proposals": ["1000", "automatic"], + "default": "automatic", + "description": "Enter uid for non-root user" + }, + "gid": { + "type": "string", + "proposals": ["1000", "automatic"], + "default": "automatic", + "description": "Enter gid for non-root user" + }, + "nonFreePackages": { + "type": "boolean", + "default": true, + "description": "Add packages from non-free Debian repository?" + } + }, + "buildArg": "_VSC_INSTALL_COMMON", + "include": ["puppet", "chef"] + }, + { + "id": "dotnet", + "name": "Dotnet CLI", + "documentationURL": "https://github.com/microsoft/vscode-dev-containers/blob/main/script-library/docs/dotnet.md", + "options": { + "version": { + "type": "string", + "proposals": ["latest", "6.0", "5.0", "3.1"], + "default": "latest", + "description": "Select or enter a dotnet CLI version. (Available versions may vary by Linux distribution.)" + }, + "runtimeOnly": { + "type": "boolean", + "default": false, + "description": "Install just the dotnet runtime if true, and sdk if false." + } + }, + "buildArg": "_VSC_INSTALL_DOTNET", + "containerEnv": { + "DOTNET_ROOT": "/usr/local/dotnet", + "PATH": "${PATH}:${DOTNET_ROOT}" + }, + "extensions": ["ms-dotnettools.csharp"], + "include": [ + "cpp", + "kubernetes-helm", + "kubernetes-helm-minikube", + "docker-from-docker", + "docker-in-docker", + "powershell", + "java", + "go", + "php", + "ruby", + "rust", + "typescript-node", + "javascript-node", + "python-3", + "python-3-anaconda", + "python-3-miniconda", + "ubuntu", + "debian", + "javascript-node-mongo", + "javascript-node-postgres", + "php-mariadb", + "ruby-rails-postgres", + "python-3-postgres" + ] + }, + { + "id": "jupyterlab", + "name": "Jupyter Lab", + "documentationURL": "https://github.com/microsoft/vscode-dev-containers/blob/main/script-library/docs/jupyterlab.md", + "options": { + "version": { + "type": "string", + "proposals": ["latest", "3.6.2"], + "default": "latest", + "description": "Select or enter a jupyterlab version." + } + }, + "buildArg": "_VSC_INSTALL_JUPYTERLAB", + "extensions": [ + "ms-python.python", + "ms-python.vscode-pylance", + "ms-toolsai.jupyter" + ], + "include": [ + "python-3", + "python-3-anaconda", + "python-3-miniconda", + "ubuntu", + "debian" + ] + } + ] +} diff --git a/script-library/container-features/Source/feature-scripts.env b/script-library/container-features/Source/feature-scripts.env new file mode 100644 index 0000000000..20214a620c --- /dev/null +++ b/script-library/container-features/Source/feature-scripts.env @@ -0,0 +1,25 @@ +_VSC_INSTALL_COMMON="common-debian.sh ${_BUILD_ARG_COMMON_INSTALLZSH} ${_BUILD_ARG_COMMON_USERNAME} ${_BUILD_ARG_COMMON_UID} ${_BUILD_ARG_COMMON_GID} ${_BUILD_ARG_COMMON_UPGRADEPACKAGES} ${_BUILD_ARG_COMMON_INSTALLOHMYZSH} ${_BUILD_ARG_COMMON_NONFREEPACKAGES}" +_VSC_INSTALL_GIT="git-from-src-debian.sh ${_BUILD_ARG_GIT_VERSION:-latest} ${_BUILD_ARG_GIT_PPA:-true}" +_VSC_INSTALL_GIT_LFS="git-lfs-debian.sh ${_BUILD_ARG_GIT_LFS_VERSION:-latest}" +_VSC_INSTALL_DOCKER_IN_DOCKER="docker-in-docker-debian.sh true automatic ${_BUILD_ARG_DOCKER_IN_DOCKER_MOBY:-true} ${_BUILD_ARG_DOCKER_IN_DOCKER_VERSION:-latest} ${_BUILD_ARG_DOCKER_IN_DOCKER_DOCKERDASHCOMPOSEVERSION:-v1}" +_VSC_INSTALL_DOCKER_FROM_DOCKER="docker-debian.sh true /var/run/docker-host.sock /var/run/docker.sock automatic ${_BUILD_ARG_DOCKER_FROM_DOCKER_MOBY:-true} ${_BUILD_ARG_DOCKER_FROM_DOCKER_VERSION:-latest} ${_BUILD_ARG_DOCKER_FROM_DOCKER_DOCKERDASHCOMPOSEVERSION:-v1}" +_VSC_INSTALL_KUBECTL_HELM_MINIKUBE="kubectl-helm-debian.sh ${_BUILD_ARG_KUBECTL_HELM_MINIKUBE_VERSION:-latest} ${_BUILD_ARG_KUBECTL_HELM_MINIKUBE_HELM:-latest} ${_BUILD_ARG_KUBECTL_HELM_MINIKUBE_MINIKUBE:-latest}" +_VSC_INSTALL_TERRAFORM="terraform-debian.sh ${_BUILD_ARG_TERRAFORM_VERSION:-latest} ${_BUILD_ARG_TERRAFORM_TFLINT:-latest} ${_BUILD_ARG_TERRAFORM_TERRAGRUNT:-latest}" +_VSC_INSTALL_GITHUB_CLI="github-debian.sh ${_BUILD_ARG_GITHUB_CLI_VERSION:-latest}" +_VSC_INSTALL_AWS_CLI="awscli-debian.sh ${_BUILD_ARG_AWS_CLI_VERSION:-latest}" +_VSC_INSTALL_AZURE_CLI="azcli-debian.sh ${_BUILD_ARG_AZURE_CLI_VERSION:-latest}" +_VSC_INSTALL_SSH=sshd-debian.sh +_VSC_INSTALL_NODE="node-debian.sh /usr/local/share/nvm ${_BUILD_ARG_NODE_VERSION:-lts/*} automatic true ${_BUILD_ARG_NODE_NODEGYPDEPENDENCIES:-true}" +_VSC_INSTALL_PYTHON="python-debian.sh ${_BUILD_ARG_PYTHON_VERSION:-latest} /usr/local/python /usr/local/py-utils automatic true ${_BUILD_ARG_PYTHON_INSTALLTOOLS:-true} true ${_BUILD_ARG_PYTHON_OPTIMIZE:-false}" +_VSC_INSTALL_GOLANG="go-debian.sh ${_BUILD_ARG_GOLANG_VERSION:-latest}" +_VSC_INSTALL_JAVA="java-wrapper.sh ${_BUILD_ARG_JAVA_VERSION:-latest}" +_VSC_INSTALL_GRADLE="gradle-debian.sh ${_BUILD_ARG_GRADLE_VERSION:-latest}" +_VSC_INSTALL_MAVEN="maven-debian.sh ${_BUILD_ARG_MAVEN_VERSION:-latest}" +_VSC_INSTALL_RUBY="ruby-debian.sh ${_BUILD_ARG_RUBY_VERSION:-latest}" +_VSC_INSTALL_HOMEBREW="homebrew-debian.sh automatic true ${_BUILD_ARG_HOMEBREW_SHALLOW:-false}" +_VSC_INSTALL_RUST="rust-debian.sh /usr/local/cargo /usr/local/rustup automatic true false ${_BUILD_ARG_RUST_VERSION:-latest} ${_BUILD_ARG_RUST_PROFILE:-minimal}" +_VSC_INSTALL_POWERSHELL="powershell-debian.sh ${_BUILD_ARG_POWERSHELL_VERSION:-latest}" +_VSC_INSTALL_DESKTOP_LITE="desktop-lite-debian.sh automatic ${_BUILD_ARG_DESKTOP_LITE_PASSWORD:-vscode} true ${_BUILD_ARG_DESKTOP_LITE_VNCPORT:-5901} ${_BUILD_ARG_DESKTOP_LITE_WEBPORT:-6080}" +_VSC_INSTALL_DOTNET="dotnet-debian.sh ${_BUILD_ARG_DOTNET_VERSION:-latest} ${_BUILD_ARG_DOTNET_RUNTIMEONLY:-false} automatic true /usr/local/dotnet dotnet" +_VSC_INSTALL_FISH="fish-debian.sh" +_VSC_INSTALL_JUPYTERLAB="jupyterlab-debian.sh ${_BUILD_ARG_JUPYTERLAB_VERSION:-latest}" diff --git a/script-library/container-features/Source/install.sh b/script-library/container-features/Source/install.sh new file mode 100644 index 0000000000..d9fe994e4c --- /dev/null +++ b/script-library/container-features/Source/install.sh @@ -0,0 +1,56 @@ +#!/usr/bin/env bash +set -e +cd "$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" + +# Verify we're on a supported OS +. /etc/os-release +if [ "${ID}" != "debian" ] && [ "${ID_LIKE}" != "debian" ]; then +cat << EOF + +*********** Unsupported operating system "${ID}" detected *********** + +Features support currently requires a Debian/Ubuntu-based image. Update your +image or Dockerfile FROM statement to start with a supported OS. For example: +mcr.microsoft.com/vscode/devcontainers/base:ubuntu + +Aborting build... + +EOF + exit 2 +fi + +set -a +. ./devcontainer-features.env +set +a + +chmod +x *.sh + +# Execute option scripts if correct environment variable is set to "true" +feature_marker_path="/usr/local/etc/vscode-dev-containers/features" +mkdir -p "${feature_marker_path}" +while IFS= read -r feature_line; do + # Extract the env var part of the line + feature_var_name="${feature_line%%=*}" + if [ ! -z "${!feature_var_name}" ]; then + # If a value is set for the env var, execute the script + feature_script_and_args="${feature_line##*=}" + feature_script_and_args="${feature_script_and_args%\"}" + script_command="$(eval echo "${feature_script_and_args#\"}")" + echo "(*) Script: ${script_command}" + + # Check if script with same args has already been run + feature_marker="${feature_marker_path}/${feature_var_name}"; + if [ -e "${feature_marker}" ] && [ "${script_command}" = "$(cat ${feature_marker})" ]; then + echo "(*) Skipping. Script already run with same arguments." + else + # Execute script and create a marker with the script args + ./${script_command} + echo "${script_command}" > "${feature_marker}" + fi + fi +done < ./feature-scripts.env + +# Clean up +apt-get autoremove -y +apt-get clean -y +rm -rf /var/lib/apt/lists/* diff --git a/script-library/container-features/Source/java-wrapper.sh b/script-library/container-features/Source/java-wrapper.sh new file mode 100644 index 0000000000..c48e7d9b2a --- /dev/null +++ b/script-library/container-features/Source/java-wrapper.sh @@ -0,0 +1,52 @@ +#!/usr/bin/env bash +# Wrapper function that also installs JDK 11 if JDK 8 is selected since this is required for the Java extension + +set -e + +JAVA_VERSION=${1:-"default"} +SDKMAN_DIR=${2:-"/usr/local/sdkman"} +USERNAME=${3:-"automatic"} +UPDATE_RC=${4:-"true"} +ADDITIONAL_JAVA_VERSION=11 + +cd "$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" +chmod +x java-debian.sh + +is_jdk_8="false" +if echo "${JAVA_VERSION}" | grep -E '^8([\s\.]|$)' > /dev/null 2>&1; then + is_jdk_8="true" +fi + +# If the user selected JDK 8, install the JDK 11 as well since this is needed by the Java extension +if [ "${is_jdk_8}" = "true" ]; then + echo "(*) Installing JDK ${ADDITIONAL_JAVA_VERSION} as Java VS Code extension requires a recent JDK..." + ./java-debian.sh "${ADDITIONAL_JAVA_VERSION}" "${SDKMAN_DIR}" "${USERNAME}" "${UPDATE_RC}" + jdk_11_folder="$(ls --format=single-column ${SDKMAN_DIR}/candidates/java | grep -oE -m 1 '11\..+')" + ln -s "${SDKMAN_DIR}/candidates/java/${jdk_11_folder}" /extension-java-home + + # Determine the appropriate non-root user + username="" + possible_users=("vscode" "node" "codespace" "$(awk -v val=1000 -F ":" '$3==val{print $1}' /etc/passwd)") + for current_user in ${possible_users[@]}; do + if id -u ${current_user} > /dev/null 2>&1; then + username=${current_user} + break + fi + done + if [ "${username}" = "" ]; then + username=root + fi +else + ln -s ${SDKMAN_DIR}/candidates/java/current /extension-java-home +fi + +echo "(*) Installing JDK ${JAVA_VERSION}..." +./java-debian.sh "${JAVA_VERSION}" "${SDKMAN_DIR}" "${USERNAME}" "${UPDATE_RC}" +if [ "${is_jdk_8}" = "true" ]; then + # Set current and default version to last SDK installed + jdk_full_version="$(ls --format=single-column "${SDKMAN_DIR}/candidates/java" | sort -rV | grep -oE -m 1 "${JAVA_VERSION}\\..+" )" + echo "(*) Setting default JDK to ${jdk_full_version}..." + . ${SDKMAN_DIR}/bin/sdkman-init.sh + sdk use java "${jdk_full_version}" + sdk default java "${jdk_full_version}" +fi \ No newline at end of file