extension_host: Make extension updates more resilient (#54355)

Finn Evers created

Following our incident earlier, I noticed a report that mentioned their
extension was uninstalled upon update which made them stumble across the
issue for our outage.

The reason for this was that currently, whenever we install extension
updates, we _first_ removed the old directory and _then_ went ahead and
downloaded the new version. This would in turn obviousy lead to issus
where something malfunctioned during the update/install download -
whether that is an issue on our side with the servers or an issue on the
users side when they have a poor network connection or lose that.

This PR changes this - we now do the entirety of the download on a
background task, unpack the archive into a temporary directory if
possible and then proceed with removing the old extension contents and
moving the new contents only _after_ everything prior succeeded.

I also moved this off of the main thread since there is no reason to do
this there as well as the `Drop` impl of a `TempDir` doing some blocking
work on the main thread otherwise.

Release Notes:

- Made extension updates more resilient to network and upstream failures

Change summary

crates/extension_host/src/extension_host.rs | 92 ++++++++++++++--------
1 file changed, 59 insertions(+), 33 deletions(-)

Detailed changes

crates/extension_host/src/extension_host.rs 🔗

@@ -19,7 +19,7 @@ use extension::{
     ExtensionGrammarProxy, ExtensionHostProxy, ExtensionLanguageProxy,
     ExtensionLanguageServerProxy, ExtensionSnippetProxy, ExtensionThemeProxy,
 };
-use fs::{Fs, RemoveOptions};
+use fs::{Fs, RemoveOptions, RenameOptions};
 use futures::future::join_all;
 use futures::{
     AsyncReadExt as _, Future, FutureExt as _, StreamExt as _,
@@ -726,41 +726,67 @@ impl ExtensionStore {
                 }
             });
 
-            let mut response = http_client
-                .get(url.as_ref(), Default::default(), true)
-                .await
-                .context("downloading extension")?;
+            cx.background_spawn(async move {
+                let mut response = http_client
+                    .get(url.as_ref(), Default::default(), true)
+                    .await
+                    .context("downloading extension")?;
+
+                let content_length = response
+                    .headers()
+                    .get(http_client::http::header::CONTENT_LENGTH)
+                    .and_then(|value| value.to_str().ok()?.parse::<usize>().ok());
+
+                let mut body = BufReader::new(response.body_mut());
+                let mut tar_gz_bytes = Vec::new();
+                body.read_to_end(&mut tar_gz_bytes).await?;
+
+                if let Some(content_length) = content_length {
+                    let actual_len = tar_gz_bytes.len();
+                    if content_length != actual_len {
+                        bail!(
+                            "downloaded extension size {actual_len} \
+                        does not match content length {content_length}"
+                        );
+                    }
+                }
 
-            fs.remove_dir(
-                &extension_dir,
-                RemoveOptions {
-                    recursive: true,
-                    ignore_if_not_exists: true,
-                },
-            )
-            .await?;
+                let decompressed_bytes = GzipDecoder::new(BufReader::new(tar_gz_bytes.as_slice()));
+                let archive = Archive::new(decompressed_bytes);
 
-            let content_length = response
-                .headers()
-                .get(http_client::http::header::CONTENT_LENGTH)
-                .and_then(|value| value.to_str().ok()?.parse::<usize>().ok());
-
-            let mut body = BufReader::new(response.body_mut());
-            let mut tar_gz_bytes = Vec::new();
-            body.read_to_end(&mut tar_gz_bytes).await?;
-
-            if let Some(content_length) = content_length {
-                let actual_len = tar_gz_bytes.len();
-                if content_length != actual_len {
-                    bail!(concat!(
-                        "downloaded extension size {actual_len} ",
-                        "does not match content length {content_length}"
-                    ));
+                let remove_dir = || {
+                    fs.remove_dir(
+                        &extension_dir,
+                        RemoveOptions {
+                            recursive: true,
+                            ignore_if_not_exists: true,
+                        },
+                    )
+                };
+
+                match tempfile::tempdir_in(paths::temp_dir()).or_else(|_| tempfile::tempdir()) {
+                    Ok(temp_dir) => {
+                        archive.unpack(temp_dir.path()).await?;
+                        remove_dir().await?;
+                        fs.rename(
+                            temp_dir.path(),
+                            &extension_dir,
+                            RenameOptions {
+                                overwrite: true,
+                                ignore_if_exists: true,
+                                create_parents: true,
+                            },
+                        )
+                        .await
+                    }
+                    Err(_) => {
+                        remove_dir().await?;
+                        archive.unpack(extension_dir).await.map_err(Into::into)
+                    }
                 }
-            }
-            let decompressed_bytes = GzipDecoder::new(BufReader::new(tar_gz_bytes.as_slice()));
-            let archive = Archive::new(decompressed_bytes);
-            archive.unpack(extension_dir).await?;
+            })
+            .await?;
+
             this.update(cx, |this, cx| this.reload(Some(extension_id.clone()), cx))?
                 .await;