Cargo.lock 🔗
@@ -4793,6 +4793,7 @@ dependencies = [
"paths",
"picker",
"project",
+ "regex",
"serde",
"serde_json",
"serde_json_lenient",
KyleBarton created
Self-Review Checklist:
- [x] I've reviewed my own diff for quality, security, and reliability
- [x] Unsafe blocks (if any) have justifying comments
- [x] The content is consistent with the [UI/UX
checklist](https://github.com/zed-industries/zed/blob/main/CONTRIBUTING.md#uiux-checklist)
- [x] Tests cover the new/changed behavior
- [x] Performance impact has been considered and is acceptable
Closes #52928
Release Notes:
- Fixed handling of multi-stage and stage-specified dockerfiles in dev
container manifests
- Fixed the way we find the base image in a dev container when build
args need expansion
Cargo.lock | 1
crates/dev_container/Cargo.toml | 1
crates/dev_container/src/devcontainer_json.rs | 41
crates/dev_container/src/devcontainer_manifest.rs | 516 ++++++++++++----
crates/dev_container/src/docker.rs | 2
5 files changed, 422 insertions(+), 139 deletions(-)
@@ -4793,6 +4793,7 @@ dependencies = [
"paths",
"picker",
"project",
+ "regex",
"serde",
"serde_json",
"serde_json_lenient",
@@ -19,6 +19,7 @@ futures.workspace = true
log.workspace = true
menu.workspace = true
paths.workspace = true
+regex.workspace = true
picker.workspace = true
project.workspace = true
settings.workspace = true
@@ -138,7 +138,7 @@ pub(crate) struct ContainerBuild {
context: Option<String>,
pub(crate) args: Option<HashMap<String, String>>,
options: Option<Vec<String>>,
- target: Option<String>,
+ pub(crate) target: Option<String>,
#[serde(default, deserialize_with = "deserialize_string_or_array")]
cache_from: Option<Vec<String>>,
}
@@ -185,8 +185,8 @@ pub(crate) enum LifecycleCommand {
#[derive(Debug, PartialEq, Eq)]
pub(crate) enum DevContainerBuildType {
- Image,
- Dockerfile,
+ Image(String),
+ Dockerfile(ContainerBuild),
DockerCompose,
None,
}
@@ -249,14 +249,15 @@ pub(crate) fn deserialize_devcontainer_json(json: &str) -> Result<DevContainer,
impl DevContainer {
pub(crate) fn build_type(&self) -> DevContainerBuildType {
- if self.image.is_some() {
- return DevContainerBuildType::Image;
+ if let Some(image) = &self.image {
+ DevContainerBuildType::Image(image.clone())
} else if self.docker_compose_file.is_some() {
- return DevContainerBuildType::DockerCompose;
- } else if self.build.is_some() {
- return DevContainerBuildType::Dockerfile;
+ DevContainerBuildType::DockerCompose
+ } else if let Some(build) = &self.build {
+ DevContainerBuildType::Dockerfile(build.clone())
+ } else {
+ DevContainerBuildType::None
}
- return DevContainerBuildType::None;
}
}
@@ -911,7 +912,12 @@ mod test {
}
);
- assert_eq!(devcontainer.build_type(), DevContainerBuildType::Image);
+ assert_eq!(
+ devcontainer.build_type(),
+ DevContainerBuildType::Image(String::from(
+ "mcr.microsoft.com/devcontainers/base:ubuntu"
+ ))
+ );
}
#[test]
@@ -1366,7 +1372,20 @@ mod test {
}
);
- assert_eq!(devcontainer.build_type(), DevContainerBuildType::Dockerfile);
+ assert_eq!(
+ devcontainer.build_type(),
+ DevContainerBuildType::Dockerfile(ContainerBuild {
+ dockerfile: "DockerFile".to_string(),
+ context: Some("..".to_string()),
+ args: Some(HashMap::from([(
+ "MYARG".to_string(),
+ "MYVALUE".to_string()
+ )])),
+ options: Some(vec!["--some-option".to_string(), "--mount".to_string()]),
+ target: Some("development".to_string()),
+ cache_from: Some(vec!["some_image".to_string()]),
+ })
+ );
}
#[test]
@@ -6,6 +6,8 @@ use std::{
sync::Arc,
};
+use regex::Regex;
+
use fs::Fs;
use http_client::HttpClient;
use util::{ResultExt, command::Command};
@@ -217,11 +219,10 @@ impl DevContainerManifest {
async fn dockerfile_location(&self) -> Option<PathBuf> {
let dev_container = self.dev_container();
match dev_container.build_type() {
- DevContainerBuildType::Image => None,
- DevContainerBuildType::Dockerfile => dev_container
- .build
- .as_ref()
- .map(|build| self.config_directory.join(&build.dockerfile)),
+ DevContainerBuildType::Image(_) => None,
+ DevContainerBuildType::Dockerfile(build) => {
+ Some(self.config_directory.join(&build.dockerfile))
+ }
DevContainerBuildType::DockerCompose => {
let Ok(docker_compose_manifest) = self.docker_compose_manifest().await else {
return None;
@@ -260,48 +261,50 @@ impl DevContainerManifest {
/// - The image sourced in the docker-compose main service dockerfile, if one is specified
/// If no such image is available, return an error
async fn get_base_image_from_config(&self) -> Result<String, DevContainerError> {
- if let Some(image) = &self.dev_container().image {
- return Ok(image.to_string());
- }
- if let Some(dockerfile) = self.dev_container().build.as_ref().map(|b| &b.dockerfile) {
- let dockerfile_contents = self
- .fs
- .load(&self.config_directory.join(dockerfile))
- .await
- .map_err(|e| {
- log::error!("Error reading dockerfile: {e}");
- DevContainerError::DevContainerParseFailed
- })?;
- return image_from_dockerfile(self, dockerfile_contents);
- }
- if self.dev_container().docker_compose_file.is_some() {
- let docker_compose_manifest = self.docker_compose_manifest().await?;
- let (_, main_service) = find_primary_service(&docker_compose_manifest, &self)?;
+ match self.dev_container().build_type() {
+ DevContainerBuildType::Image(image) => {
+ return Ok(image);
+ }
+ DevContainerBuildType::Dockerfile(build) => {
+ let dockerfile_contents = self.expanded_dockerfile_content().await?;
+ return image_from_dockerfile(dockerfile_contents, &build.target).ok_or_else(
+ || {
+ log::error!("Unable to find base image in Dockerfile");
+ DevContainerError::DevContainerParseFailed
+ },
+ );
+ }
+ DevContainerBuildType::DockerCompose => {
+ let docker_compose_manifest = self.docker_compose_manifest().await?;
+ let (_, main_service) = find_primary_service(&docker_compose_manifest, &self)?;
- if let Some(dockerfile) = main_service
- .build
- .as_ref()
- .and_then(|b| b.dockerfile.as_ref())
- {
- let dockerfile_contents = self
- .fs
- .load(&self.config_directory.join(dockerfile))
- .await
- .map_err(|e| {
- log::error!("Error reading dockerfile: {e}");
+ if let Some(_) = main_service
+ .build
+ .as_ref()
+ .and_then(|b| b.dockerfile.as_ref())
+ {
+ let dockerfile_contents = self.expanded_dockerfile_content().await?;
+ return image_from_dockerfile(
+ dockerfile_contents,
+ &main_service.build.as_ref().and_then(|b| b.target.clone()),
+ )
+ .ok_or_else(|| {
+ log::error!("Unable to find base image in Dockerfile");
DevContainerError::DevContainerParseFailed
- })?;
- return image_from_dockerfile(self, dockerfile_contents);
+ });
+ }
+ if let Some(image) = &main_service.image {
+ return Ok(image.to_string());
+ }
+
+ log::error!("No valid base image found in docker-compose configuration");
+ return Err(DevContainerError::DevContainerParseFailed);
}
- if let Some(image) = &main_service.image {
- return Ok(image.to_string());
+ DevContainerBuildType::None => {
+ log::error!("Not a valid devcontainer config for build");
+ return Err(DevContainerError::NotInValidProject);
}
-
- log::error!("No valid base image found in docker-compose configuration");
- return Err(DevContainerError::DevContainerParseFailed);
}
- log::error!("No valid base image found in dev container configuration");
- Err(DevContainerError::DevContainerParseFailed)
}
async fn download_feature_and_dockerfile_resources(&mut self) -> Result<(), DevContainerError> {
@@ -505,7 +508,10 @@ impl DevContainerManifest {
// --- Phase 3: Generate extended Dockerfile from the inflated manifests ---
- let is_compose = dev_container.build_type() == DevContainerBuildType::DockerCompose;
+ let is_compose = match dev_container.build_type() {
+ DevContainerBuildType::DockerCompose => true,
+ _ => false,
+ };
let use_buildkit = self.docker_client.supports_compose_buildkit() || !is_compose;
let dockerfile_base_content = if let Some(location) = &self.dockerfile_location().await {
@@ -514,10 +520,29 @@ impl DevContainerManifest {
None
};
+ let build_target = if is_compose {
+ find_primary_service(&self.docker_compose_manifest().await?, self)?
+ .1
+ .build
+ .and_then(|b| b.target)
+ } else {
+ dev_container.build.as_ref().and_then(|b| b.target.clone())
+ };
+
+ let dockerfile_content = dockerfile_base_content
+ .map(|content| {
+ dockerfile_inject_alias(
+ &content,
+ "dev_container_auto_added_stage_label",
+ build_target,
+ )
+ })
+ .unwrap_or_default();
+
let dockerfile_content = self.generate_dockerfile_extended(
&container_user,
&remote_user,
- dockerfile_base_content,
+ dockerfile_content,
use_buildkit,
);
@@ -544,7 +569,7 @@ impl DevContainerManifest {
&self,
container_user: &str,
remote_user: &str,
- dockerfile_content: Option<String>,
+ dockerfile_content: String,
use_buildkit: bool,
) -> String {
#[cfg(not(target_os = "windows"))]
@@ -565,16 +590,6 @@ impl DevContainerManifest {
let container_home_cmd = get_ent_passwd_shell_command(container_user);
let remote_home_cmd = get_ent_passwd_shell_command(remote_user);
- let dockerfile_content = dockerfile_content
- .map(|content| {
- if dockerfile_alias(&content).is_some() {
- content
- } else {
- dockerfile_inject_alias(&content, "dev_container_auto_added_stage_label")
- }
- })
- .unwrap_or("".to_string());
-
let dest = FEATURES_CONTAINER_TEMP_DEST_FOLDER;
let feature_content_source_stage = if use_buildkit {
@@ -694,20 +709,17 @@ RUN sed -i -E 's/((^|\s)PATH=)([^\$]*)$/\1\${{PATH:-\3}}/g' /etc/profile || true
}
let dev_container = self.dev_container();
match dev_container.build_type() {
- DevContainerBuildType::Image => {
+ DevContainerBuildType::Image(base_image) => {
let built_docker_image = self.build_docker_image().await?;
- let Some(base_image) = dev_container.image.as_ref() else {
- log::error!("Dev container is using and image which can't be referenced");
- return Err(DevContainerError::DevContainerParseFailed);
- };
+
let built_docker_image = self
- .update_remote_user_uid(built_docker_image, base_image)
+ .update_remote_user_uid(built_docker_image, &base_image)
.await?;
let resources = self.build_merged_resources(built_docker_image)?;
Ok(DevContainerBuildResources::Docker(resources))
}
- DevContainerBuildType::Dockerfile => {
+ DevContainerBuildType::Dockerfile(_) => {
let built_docker_image = self.build_docker_image().await?;
let Some(features_build_info) = &self.features_build_info else {
log::error!(
@@ -892,6 +904,7 @@ RUN sed -i -E 's/((^|\s)PATH=)([^\$]*)$/\1\${{PATH:-\3}}/g' /etc/profile || true
}),
),
dockerfile: Some(dockerfile_path.display().to_string()),
+ target: Some("dev_containers_target_stage".to_string()),
args: Some(build_args),
additional_contexts,
}),
@@ -983,6 +996,7 @@ RUN sed -i -E 's/((^|\s)PATH=)([^\$]*)$/\1\${{PATH:-\3}}/g' /etc/profile || true
features_build_info.empty_context_dir.display().to_string(),
),
dockerfile: Some(dockerfile_path.display().to_string()),
+ target: Some("dev_containers_target_stage".to_string()),
args: Some(build_args),
additional_contexts,
}),
@@ -1252,11 +1266,8 @@ RUN sed -i -E 's/((^|\s)PATH=)([^\$]*)$/\1\${{PATH:-\3}}/g' /etc/profile || true
};
match dev_container.build_type() {
- DevContainerBuildType::Image => {
- let Some(image_tag) = &dev_container.image else {
- return Err(DevContainerError::DevContainerParseFailed);
- };
- let base_image = self.docker_client.inspect(image_tag).await?;
+ DevContainerBuildType::Image(image_tag) => {
+ let base_image = self.docker_client.inspect(&image_tag).await?;
if dev_container
.features
.as_ref()
@@ -1266,7 +1277,7 @@ RUN sed -i -E 's/((^|\s)PATH=)([^\$]*)$/\1\${{PATH:-\3}}/g' /etc/profile || true
return Ok(base_image);
}
}
- DevContainerBuildType::Dockerfile => {}
+ DevContainerBuildType::Dockerfile(_) => {}
DevContainerBuildType::DockerCompose | DevContainerBuildType::None => {
return Err(DevContainerError::DevContainerParseFailed);
}
@@ -1390,7 +1401,7 @@ RUN sed -i -E 's/((^|\s)PATH=)([^\$]*)$/\1\${{PATH:-\3}}/g' /etc/profile || true
DevContainerError::FilesystemError
})?;
- let updated_image_tag = format!("{}-uid", features_build_info.image_tag);
+ let updated_image_tag = features_build_info.image_tag.clone();
let mut command = Command::new(self.docker_client.docker_cli());
command.args(["build"]);
@@ -1603,7 +1614,7 @@ RUN sed -i -E 's/((^|\s)PATH=)([^\$]*)$/\1\${PATH:-\3}/g' /etc/profile || true
command.args(["-t", &features_build_info.image_tag]);
- if dev_container.build_type() == DevContainerBuildType::Dockerfile {
+ if let DevContainerBuildType::Dockerfile(_) = dev_container.build_type() {
command.arg(self.config_directory.display().to_string());
} else {
// Use an empty folder as the build context to avoid pulling in unneeded files.
@@ -1784,7 +1795,6 @@ RUN sed -i -E 's/((^|\s)PATH=)([^\$]*)$/\1\${PATH:-\3}/g' /etc/profile || true
}
for app_port in &self.dev_container().app_port {
command.arg("-p");
- // Should just implement display for an AppPort struct which takes care of this; it might be a custom map like (literally) "8081:8080"
command.arg(app_port);
}
@@ -1987,6 +1997,65 @@ RUN sed -i -E 's/((^|\s)PATH=)([^\$]*)$/\1\${PATH:-\3}/g' /etc/profile || true
safe_id_lower(alternate_name)
}
}
+
+ async fn expanded_dockerfile_content(&self) -> Result<String, DevContainerError> {
+ let Some(dockerfile_path) = self.dockerfile_location().await else {
+ log::error!("Tried to expand dockerfile for an image-type config");
+ return Err(DevContainerError::DevContainerParseFailed);
+ };
+
+ let devcontainer_args = self
+ .dev_container()
+ .build
+ .as_ref()
+ .and_then(|b| b.args.clone())
+ .unwrap_or_default();
+ let contents = self.fs.load(&dockerfile_path).await.map_err(|e| {
+ log::error!("Failed to load Dockerfile: {e}");
+ DevContainerError::FilesystemError
+ })?;
+ let mut parsed_lines: Vec<String> = Vec::new();
+ let mut inline_args: Vec<(String, String)> = Vec::new();
+ let key_regex = Regex::new(r"(?:^|\s)(\w+)=").expect("valid regex");
+
+ for line in contents.lines() {
+ let mut parsed_line = line.to_string();
+ // Replace from devcontainer args first, since they take precedence
+ for (key, value) in &devcontainer_args {
+ parsed_line = parsed_line.replace(&format!("${{{key}}}"), value)
+ }
+ for (key, value) in &inline_args {
+ parsed_line = parsed_line.replace(&format!("${{{key}}}"), value);
+ }
+ if let Some(arg_directives) = parsed_line.strip_prefix("ARG ") {
+ let trimmed = arg_directives.trim();
+ let key_matches: Vec<_> = key_regex.captures_iter(trimmed).collect();
+ for (i, captures) in key_matches.iter().enumerate() {
+ let key = captures[1].to_string();
+ // Insert the devcontainer overrides here if needed
+ let value_start = captures.get(0).expect("full match").end();
+ let value_end = if i + 1 < key_matches.len() {
+ key_matches[i + 1].get(0).expect("full match").start()
+ } else {
+ trimmed.len()
+ };
+ let raw_value = trimmed[value_start..value_end].trim();
+ let value = if raw_value.starts_with('"')
+ && raw_value.ends_with('"')
+ && raw_value.len() > 1
+ {
+ &raw_value[1..raw_value.len() - 1]
+ } else {
+ raw_value
+ };
+ inline_args.push((key, value.to_string()));
+ }
+ }
+ parsed_lines.push(parsed_line);
+ }
+
+ Ok(parsed_lines.join("\n"))
+ }
}
/// Holds all the information needed to construct a `docker buildx build` command
@@ -2237,46 +2306,37 @@ chmod +x ./install.sh
Ok(script)
}
-// Dockerfile actions need to be moved to their own file
-fn dockerfile_alias(dockerfile_content: &str) -> Option<String> {
- dockerfile_content
- .lines()
- .find(|line| line.starts_with("FROM"))
- .and_then(|line| {
- let words: Vec<&str> = line.split(" ").collect();
- if words.len() > 2 && words[words.len() - 2].to_lowercase() == "as" {
- return Some(words[words.len() - 1].to_string());
- } else {
- return None;
- }
- })
-}
-
-fn dockerfile_inject_alias(dockerfile_content: &str, alias: &str) -> String {
- if dockerfile_alias(dockerfile_content).is_some() {
- dockerfile_content.to_string()
- } else {
- dockerfile_content
- .lines()
- .map(|line| {
- if line.starts_with("FROM") {
- format!("{} AS {}", line, alias)
- } else {
- line.to_string()
- }
- })
- .collect::<Vec<String>>()
- .join("\n")
+fn dockerfile_inject_alias(
+ dockerfile_content: &str,
+ alias: &str,
+ build_target: Option<String>,
+) -> String {
+ match image_from_dockerfile(dockerfile_content.to_string(), &build_target) {
+ Some(target) => format!(
+ r#"{dockerfile_content}
+FROM {target} AS {alias}"#
+ ),
+ None => dockerfile_content.to_string(),
}
}
-fn image_from_dockerfile(
- devcontainer: &DevContainerManifest,
- dockerfile_contents: String,
-) -> Result<String, DevContainerError> {
- let mut raw_contents = dockerfile_contents
+fn image_from_dockerfile(dockerfile_contents: String, target: &Option<String>) -> Option<String> {
+ dockerfile_contents
.lines()
- .find(|line| line.starts_with("FROM"))
+ .filter(|line| line.starts_with("FROM"))
+ .rfind(|from_line| match &target {
+ Some(target) => {
+ let parts = from_line.split(' ').collect::<Vec<&str>>();
+ if parts.len() >= 3
+ && parts.get(parts.len() - 2).unwrap_or(&"").to_lowercase() == "as"
+ {
+ parts.last().unwrap_or(&"").to_lowercase() == target.to_lowercase()
+ } else {
+ false
+ }
+ }
+ None => true,
+ })
.and_then(|from_line| {
from_line
.split(' ')
@@ -2284,21 +2344,6 @@ fn image_from_dockerfile(
.get(1)
.map(|s| s.to_string())
})
- .ok_or_else(|| {
- log::error!("Could not find an image definition in dockerfile");
- DevContainerError::DevContainerParseFailed
- })?;
-
- for (k, v) in devcontainer
- .dev_container()
- .build
- .as_ref()
- .and_then(|b| b.args.as_ref())
- .unwrap_or(&HashMap::new())
- {
- raw_contents = raw_contents.replace(&format!("${{{}}}", k), v);
- }
- Ok(raw_contents)
}
// Container user things
@@ -2386,6 +2431,7 @@ mod test {
devcontainer_manifest::{
ConfigStatus, DevContainerManifest, DockerBuildResources, DockerComposeResources,
DockerInspect, extract_feature_id, find_primary_service, get_remote_user_from_config,
+ image_from_dockerfile,
},
docker::{
DockerClient, DockerComposeConfig, DockerComposeService, DockerComposeServiceBuild,
@@ -3087,7 +3133,7 @@ RUN echo "export HISTFILE=/home/$USERNAME/commandhistory/.bash_history" >> "/hom
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
ARG VARIANT="16-bullseye"
-FROM mcr.microsoft.com/devcontainers/typescript-node:1-${VARIANT} AS dev_container_auto_added_stage_label
+FROM mcr.microsoft.com/devcontainers/typescript-node:1-${VARIANT}
RUN mkdir -p /workspaces && chown node:node /workspaces
@@ -3100,6 +3146,7 @@ RUN echo "export HISTFILE=/home/$USERNAME/commandhistory/.bash_history" >> "/hom
&& mkdir -p /home/$USERNAME/commandhistory \
&& touch /home/$USERNAME/commandhistory/.bash_history \
&& chown -R $USERNAME /home/$USERNAME/commandhistory
+FROM mcr.microsoft.com/devcontainers/typescript-node:1-${VARIANT} AS dev_container_auto_added_stage_label
FROM $_DEV_CONTAINERS_BASE_IMAGE AS dev_containers_feature_content_normalize
USER root
@@ -3426,13 +3473,14 @@ RUN apt-get update && export DEBIAN_FRONTEND=noninteractive \
&feature_dockerfile,
r#"ARG _DEV_CONTAINERS_BASE_IMAGE=placeholder
-FROM mcr.microsoft.com/devcontainers/rust:2-1-bookworm AS dev_container_auto_added_stage_label
+FROM mcr.microsoft.com/devcontainers/rust:2-1-bookworm
# Include lld linker to improve build times either by using environment variable
# RUSTFLAGS="-C link-arg=-fuse-ld=lld" or with Cargo's configuration file (i.e see .cargo/config.toml).
RUN apt-get update && export DEBIAN_FRONTEND=noninteractive \
&& apt-get -y install clang lld \
&& apt-get autoremove -y && apt-get clean -y
+FROM mcr.microsoft.com/devcontainers/rust:2-1-bookworm AS dev_container_auto_added_stage_label
FROM $_DEV_CONTAINERS_BASE_IMAGE AS dev_containers_feature_content_normalize
USER root
@@ -3748,13 +3796,14 @@ RUN apt-get update && export DEBIAN_FRONTEND=noninteractive \
&feature_dockerfile,
r#"ARG _DEV_CONTAINERS_BASE_IMAGE=placeholder
-FROM mcr.microsoft.com/devcontainers/rust:2-1-bookworm AS dev_container_auto_added_stage_label
+FROM mcr.microsoft.com/devcontainers/rust:2-1-bookworm
# Include lld linker to improve build times either by using environment variable
# RUSTFLAGS="-C link-arg=-fuse-ld=lld" or with Cargo's configuration file (i.e see .cargo/config.toml).
RUN apt-get update && export DEBIAN_FRONTEND=noninteractive \
&& apt-get -y install clang lld \
&& apt-get autoremove -y && apt-get clean -y
+FROM mcr.microsoft.com/devcontainers/rust:2-1-bookworm AS dev_container_auto_added_stage_label
FROM $_DEV_CONTAINERS_BASE_IMAGE AS dev_containers_feature_content_normalize
USER root
@@ -3927,13 +3976,14 @@ RUN apt-get update && export DEBIAN_FRONTEND=noninteractive \
&feature_dockerfile,
r#"ARG _DEV_CONTAINERS_BASE_IMAGE=placeholder
-FROM mcr.microsoft.com/devcontainers/rust:2-1-bookworm AS dev_container_auto_added_stage_label
+FROM mcr.microsoft.com/devcontainers/rust:2-1-bookworm
# Include lld linker to improve build times either by using environment variable
# RUSTFLAGS="-C link-arg=-fuse-ld=lld" or with Cargo's configuration file (i.e see .cargo/config.toml).
RUN apt-get update && export DEBIAN_FRONTEND=noninteractive \
&& apt-get -y install clang lld \
&& apt-get autoremove -y && apt-get clean -y
+FROM mcr.microsoft.com/devcontainers/rust:2-1-bookworm AS dev_container_auto_added_stage_label
FROM dev_container_feature_content_temp as dev_containers_feature_content_source
@@ -4046,6 +4096,7 @@ ENV DOCKER_BUILDKIT=1
"VARIANT": "18-bookworm",
"FOO": "bar",
},
+ "target": "development",
},
"workspaceMount": "source=${localWorkspaceFolder},target=${containerWorkspaceFolder},type=bind,consistency=cached",
"workspaceFolder": "/workspace2",
@@ -4131,7 +4182,8 @@ ENV DOCKER_BUILDKIT=1
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
ARG VARIANT="16-bullseye"
-FROM mcr.microsoft.com/devcontainers/typescript-node:1-${VARIANT}
+FROM mcr.microsoft.com/devcontainers/typescript-node:latest as predev
+FROM mcr.microsoft.com/devcontainers/typescript-node:1-${VARIANT} as development
RUN mkdir -p /workspaces && chown node:node /workspaces
@@ -4174,7 +4226,8 @@ RUN echo "export HISTFILE=/home/$USERNAME/commandhistory/.bash_history" >> "/hom
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
ARG VARIANT="16-bullseye"
-FROM mcr.microsoft.com/devcontainers/typescript-node:1-${VARIANT} AS dev_container_auto_added_stage_label
+FROM mcr.microsoft.com/devcontainers/typescript-node:latest as predev
+FROM mcr.microsoft.com/devcontainers/typescript-node:1-${VARIANT} as development
RUN mkdir -p /workspaces && chown node:node /workspaces
@@ -4187,6 +4240,7 @@ RUN echo "export HISTFILE=/home/$USERNAME/commandhistory/.bash_history" >> "/hom
&& mkdir -p /home/$USERNAME/commandhistory \
&& touch /home/$USERNAME/commandhistory/.bash_history \
&& chown -R $USERNAME /home/$USERNAME/commandhistory
+FROM mcr.microsoft.com/devcontainers/typescript-node:1-${VARIANT} AS dev_container_auto_added_stage_label
FROM $_DEV_CONTAINERS_BASE_IMAGE AS dev_containers_feature_content_normalize
USER root
@@ -4477,6 +4531,211 @@ RUN sed -i -E 's/((^|\s)PATH=)([^\$]*)$/\1\${PATH:-\3}/g' /etc/profile || true
);
}
+ #[gpui::test]
+ async fn test_gets_base_image_from_dockerfile(cx: &mut TestAppContext) {
+ cx.executor().allow_parking();
+ env_logger::try_init().ok();
+ let given_devcontainer_contents = r#"
+ {
+ "name": "cli-${devcontainerId}",
+ "build": {
+ "dockerfile": "Dockerfile",
+ "args": {
+ "VERSION": "1.22",
+ }
+ },
+ }
+ "#;
+
+ let (test_dependencies, mut devcontainer_manifest) =
+ init_default_devcontainer_manifest(cx, given_devcontainer_contents)
+ .await
+ .unwrap();
+
+ test_dependencies
+ .fs
+ .atomic_write(
+ PathBuf::from(TEST_PROJECT_PATH).join(".devcontainer/Dockerfile"),
+ r#"
+FROM dontgrabme as build_context
+ARG VERSION=1.21
+ARG REPOSITORY=mybuild
+ARG REGISTRY=docker.io/stuff
+
+ARG IMAGE=${REGISTRY}/${REPOSITORY}:${VERSION}
+
+FROM ${IMAGE} AS devcontainer
+ "#
+ .trim()
+ .to_string(),
+ )
+ .await
+ .unwrap();
+
+ devcontainer_manifest.parse_nonremote_vars().unwrap();
+
+ let dockerfile_contents = devcontainer_manifest
+ .expanded_dockerfile_content()
+ .await
+ .unwrap();
+ let base_image = image_from_dockerfile(
+ dockerfile_contents,
+ &devcontainer_manifest
+ .dev_container()
+ .build
+ .as_ref()
+ .and_then(|b| b.target.clone()),
+ )
+ .unwrap();
+
+ assert_eq!(base_image, "docker.io/stuff/mybuild:1.22".to_string());
+ }
+
+ #[gpui::test]
+ async fn test_gets_base_image_from_dockerfile_with_target_specified(cx: &mut TestAppContext) {
+ cx.executor().allow_parking();
+ env_logger::try_init().ok();
+ let given_devcontainer_contents = r#"
+ {
+ "name": "cli-${devcontainerId}",
+ "build": {
+ "dockerfile": "Dockerfile",
+ "args": {
+ "VERSION": "1.22",
+ },
+ "target": "development"
+ },
+ }
+ "#;
+
+ let (test_dependencies, mut devcontainer_manifest) =
+ init_default_devcontainer_manifest(cx, given_devcontainer_contents)
+ .await
+ .unwrap();
+
+ test_dependencies
+ .fs
+ .atomic_write(
+ PathBuf::from(TEST_PROJECT_PATH).join(".devcontainer/Dockerfile"),
+ r#"
+FROM dontgrabme as build_context
+ARG VERSION=1.21
+ARG REPOSITORY=mybuild
+ARG REGISTRY=docker.io/stuff
+
+ARG IMAGE=${REGISTRY}/${REPOSITORY}:${VERSION}
+ARG DEV_IMAGE=${REGISTRY}/${REPOSITORY}:latest
+
+FROM ${DEV_IMAGE} AS development
+FROM ${IMAGE} AS production
+ "#
+ .trim()
+ .to_string(),
+ )
+ .await
+ .unwrap();
+
+ devcontainer_manifest.parse_nonremote_vars().unwrap();
+
+ let dockerfile_contents = devcontainer_manifest
+ .expanded_dockerfile_content()
+ .await
+ .unwrap();
+ let base_image = image_from_dockerfile(
+ dockerfile_contents,
+ &devcontainer_manifest
+ .dev_container()
+ .build
+ .as_ref()
+ .and_then(|b| b.target.clone()),
+ )
+ .unwrap();
+
+ assert_eq!(base_image, "docker.io/stuff/mybuild:latest".to_string());
+ }
+
+ #[gpui::test]
+ async fn test_expands_args_in_dockerfile(cx: &mut TestAppContext) {
+ cx.executor().allow_parking();
+ env_logger::try_init().ok();
+ let given_devcontainer_contents = r#"
+ {
+ "name": "cli-${devcontainerId}",
+ "build": {
+ "dockerfile": "Dockerfile",
+ "args": {
+ "JSON_ARG": "some-value",
+ "ELIXIR_VERSION": "1.21",
+ }
+ },
+ }
+ "#;
+
+ let (test_dependencies, mut devcontainer_manifest) =
+ init_default_devcontainer_manifest(cx, given_devcontainer_contents)
+ .await
+ .unwrap();
+
+ test_dependencies
+ .fs
+ .atomic_write(
+ PathBuf::from(TEST_PROJECT_PATH).join(".devcontainer/Dockerfile"),
+ r#"
+ARG INVALID_FORWARD_REFERENCE=${OTP_VERSION}
+ARG ELIXIR_VERSION=1.20.0-rc.4
+ARG FOO=foo BAR=bar
+ARG FOOBAR=${FOO}${BAR}
+ARG OTP_VERSION=28.4.1
+ARG DEBIAN_VERSION=trixie-20260316-slim
+ARG IMAGE="docker.io/hexpm/elixir:${ELIXIR_VERSION}-erlang-${OTP_VERSION}-debian-${DEBIAN_VERSION}"
+ARG NESTED_MAP="{"key1": "val1", "key2": "val2"}"
+ARG WRAPPING_MAP={"nested_map": ${NESTED_MAP}}
+ARG FROM_JSON=${JSON_ARG}
+
+FROM ${IMAGE} AS devcontainer
+ "#
+ .trim()
+ .to_string(),
+ )
+ .await
+ .unwrap();
+
+ devcontainer_manifest.parse_nonremote_vars().unwrap();
+
+ let expanded_dockerfile = devcontainer_manifest
+ .expanded_dockerfile_content()
+ .await
+ .unwrap();
+
+ assert_eq!(
+ &expanded_dockerfile,
+ r#"
+ARG INVALID_FORWARD_REFERENCE=${OTP_VERSION}
+ARG ELIXIR_VERSION=1.20.0-rc.4
+ARG FOO=foo BAR=bar
+ARG FOOBAR=foobar
+ARG OTP_VERSION=28.4.1
+ARG DEBIAN_VERSION=trixie-20260316-slim
+ARG IMAGE="docker.io/hexpm/elixir:1.21-erlang-28.4.1-debian-trixie-20260316-slim"
+ARG NESTED_MAP="{"key1": "val1", "key2": "val2"}"
+ARG WRAPPING_MAP={"nested_map": {"key1": "val1", "key2": "val2"}}
+ARG FROM_JSON=some-value
+
+FROM docker.io/hexpm/elixir:1.21-erlang-28.4.1-debian-trixie-20260316-slim AS devcontainer
+ "#
+ .trim()
+ )
+ }
+
+ #[test]
+ fn test_aliases_dockerfile_with_pre_existing_aliases_for_build() {}
+
+ #[test]
+ fn test_aliases_dockerfile_with_no_aliases_for_build() {}
+
+ #[test]
+ fn test_aliases_dockerfile_with_build_target_specified() {}
+
pub(crate) struct RecordedExecCommand {
pub(crate) _container_id: String,
pub(crate) _remote_folder: String,
@@ -4641,6 +4900,7 @@ RUN sed -i -E 's/((^|\s)PATH=)([^\$]*)$/\1\${PATH:-\3}/g' /etc/profile || true
dockerfile: Some("Dockerfile".to_string()),
args: None,
additional_contexts: None,
+ target: None,
}),
volumes: vec![MountDefinition {
source: Some("../..".to_string()),
@@ -80,6 +80,8 @@ pub(crate) struct DockerComposeServiceBuild {
#[serde(skip_serializing_if = "Option::is_none")]
pub(crate) dockerfile: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
+ pub(crate) target: Option<String>,
+ #[serde(skip_serializing_if = "Option::is_none")]
pub(crate) args: Option<HashMap<String, String>>,
#[serde(skip_serializing_if = "Option::is_none")]
pub(crate) additional_contexts: Option<HashMap<String, String>>,