Cargo.toml 🔗
@@ -862,6 +862,7 @@ todo = "deny"
declare_interior_mutable_const = "deny"
redundant_clone = "deny"
+disallowed_methods = "deny"
# We currently do not restrict any style rules
# as it slows down shipping code to Zed.
Lukas Wirth created
std commands can block for an arbitrary duration and so runs risk of
blocking tasks for too long. This replaces all such uses where sensible
with async processes.
Release Notes:
- N/A *or* Added/Fixed/Improved ...
Cargo.toml | 1
clippy.toml | 11
crates/auto_update/src/auto_update.rs | 21
crates/auto_update_helper/src/updater.rs | 1
crates/cli/build.rs | 1
crates/cli/src/main.rs | 4
crates/collections/src/collections.rs | 19
crates/crashes/src/crashes.rs | 11
crates/explorer_command_injector/src/explorer_command_injector.rs | 1
crates/extension/src/extension_builder.rs | 40
crates/extension_cli/src/main.rs | 3
crates/fs/src/fs.rs | 25
crates/git/src/repository.rs | 70
crates/gpui/build.rs | 1
crates/gpui/src/platform/linux/platform.rs | 22
crates/gpui/src/platform/mac/platform.rs | 11
crates/gpui/src/platform/windows/platform.rs | 5
crates/media/build.rs | 1
crates/project/src/environment.rs | 2
crates/project/src/git_store.rs | 2
crates/project/src/terminals.rs | 6
crates/remote_server/build.rs | 1
crates/remote_server/src/unix.rs | 67
crates/system_specs/src/system_specs.rs | 4
crates/util/src/shell_env.rs | 11
crates/util/src/util.rs | 4
crates/vim/src/command.rs | 11
crates/zed/build.rs | 1
crates/zed/src/main.rs | 2
tooling/perf/Cargo.toml | 1
tooling/xtask/src/tasks/clippy.rs | 1
31 files changed, 220 insertions(+), 141 deletions(-)
@@ -862,6 +862,7 @@ todo = "deny"
declare_interior_mutable_const = "deny"
redundant_clone = "deny"
+disallowed_methods = "deny"
# We currently do not restrict any style rules
# as it slows down shipping code to Zed.
@@ -5,3 +5,14 @@ ignore-interior-mutability = [
# and Hash impls do not use fields with interior mutability.
"agent::context::AgentContextKey"
]
+disallowed-methods = [
+ { path = "std::process::Command::spawn", reason = "Spawning `std::process::Command` can block the current thread for an unknown duration", replacement = "smol::process::Command::spawn" },
+ { path = "std::process::Command::output", reason = "Spawning `std::process::Command` can block the current thread for an unknown duration", replacement = "smol::process::Command::output" },
+ { path = "std::process::Command::status", reason = "Spawning `std::process::Command` can block the current thread for an unknown duration", replacement = "smol::process::Command::status" },
+]
+disallowed-types = [
+ # { path = "std::collections::HashMap", replacement = "collections::HashMap" },
+ # { path = "std::collections::HashSet", replacement = "collections::HashSet" },
+ # { path = "indexmap::IndexSet", replacement = "collections::IndexSet" },
+ # { path = "indexmap::IndexMap", replacement = "collections::IndexMap" },
+]
@@ -310,10 +310,10 @@ impl AutoUpdater {
// the app after an update, we use `set_restart_path` to run the auto
// update helper instead of the app, so that it can overwrite the app
// and then spawn the new binary.
- let quit_subscription = Some(cx.on_app_quit(|_, _| async move {
- #[cfg(target_os = "windows")]
- finalize_auto_update_on_quit();
- }));
+ #[cfg(target_os = "windows")]
+ let quit_subscription = Some(cx.on_app_quit(|_, _| finalize_auto_update_on_quit()));
+ #[cfg(not(target_os = "windows"))]
+ let quit_subscription = None;
cx.on_app_restart(|this, _| {
this.quit_subscription.take();
@@ -942,11 +942,12 @@ async fn install_release_windows(downloaded_installer: PathBuf) -> Result<Option
let helper_path = std::env::current_exe()?
.parent()
.context("No parent dir for Zed.exe")?
- .join("tools\\auto_update_helper.exe");
+ .join("tools")
+ .join("auto_update_helper.exe");
Ok(Some(helper_path))
}
-pub fn finalize_auto_update_on_quit() {
+pub async fn finalize_auto_update_on_quit() {
let Some(installer_path) = std::env::current_exe()
.ok()
.and_then(|p| p.parent().map(|p| p.join("updates")))
@@ -959,12 +960,14 @@ pub fn finalize_auto_update_on_quit() {
if flag_file.exists()
&& let Some(helper) = installer_path
.parent()
- .map(|p| p.join("tools\\auto_update_helper.exe"))
+ .map(|p| p.join("tools").join("auto_update_helper.exe"))
{
- let mut command = std::process::Command::new(helper);
+ let mut command = smol::process::Command::new(helper);
command.arg("--launch");
command.arg("false");
- let _ = command.spawn();
+ if let Ok(mut cmd) = command.spawn() {
+ _ = cmd.status().await;
+ }
}
}
@@ -160,6 +160,7 @@ pub(crate) fn perform_update(app_dir: &Path, hwnd: Option<isize>, launch: bool)
}
}
if launch {
+ #[allow(clippy::disallowed_methods, reason = "doesn't run in the main binary")]
let _ = std::process::Command::new(app_dir.join("Zed.exe"))
.creation_flags(CREATE_NEW_PROCESS_GROUP.0)
.spawn();
@@ -1,3 +1,4 @@
+#![allow(clippy::disallowed_methods, reason = "build scripts are exempt")]
use std::process::Command;
fn main() {
@@ -1,3 +1,7 @@
+#![allow(
+ clippy::disallowed_methods,
+ reason = "We are not in an async environment, so std::process::Command is fine"
+)]
#![cfg_attr(
any(target_os = "linux", target_os = "freebsd", target_os = "windows"),
allow(dead_code)
@@ -1,27 +1,8 @@
-#[cfg(feature = "test-support")]
pub type HashMap<K, V> = FxHashMap<K, V>;
-
-#[cfg(feature = "test-support")]
pub type HashSet<T> = FxHashSet<T>;
-
-#[cfg(feature = "test-support")]
pub type IndexMap<K, V> = indexmap::IndexMap<K, V, rustc_hash::FxBuildHasher>;
-
-#[cfg(feature = "test-support")]
pub type IndexSet<T> = indexmap::IndexSet<T, rustc_hash::FxBuildHasher>;
-#[cfg(not(feature = "test-support"))]
-pub type HashMap<K, V> = std::collections::HashMap<K, V>;
-
-#[cfg(not(feature = "test-support"))]
-pub type HashSet<T> = std::collections::HashSet<T>;
-
-#[cfg(not(feature = "test-support"))]
-pub type IndexMap<K, V> = indexmap::IndexMap<K, V>;
-
-#[cfg(not(feature = "test-support"))]
-pub type IndexSet<T> = indexmap::IndexSet<T>;
-
pub use indexmap::Equivalent;
pub use rustc_hash::FxHasher;
pub use rustc_hash::{FxHashMap, FxHashSet};
@@ -3,6 +3,7 @@ use log::info;
use minidumper::{Client, LoopAction, MinidumpBinary};
use release_channel::{RELEASE_CHANNEL, ReleaseChannel};
use serde::{Deserialize, Serialize};
+use smol::process::Command;
#[cfg(target_os = "macos")]
use std::sync::atomic::AtomicU32;
@@ -12,7 +13,7 @@ use std::{
io,
panic::{self, PanicHookInfo},
path::{Path, PathBuf},
- process::{self, Command},
+ process::{self},
sync::{
Arc, OnceLock,
atomic::{AtomicBool, Ordering},
@@ -53,13 +54,13 @@ pub async fn init(crash_init: InitCrashHandler) {
// used by the crash handler isn't destroyed correctly which causes it to stay on the file
// system and block further attempts to initialize crash handlers with that socket path.
let socket_name = paths::temp_dir().join(format!("zed-crash-handler-{zed_pid}"));
- #[allow(unused)]
- let server_pid = Command::new(exe)
+ let _crash_handler = Command::new(exe)
.arg("--crash-handler")
.arg(&socket_name)
.spawn()
- .expect("unable to spawn server process")
- .id();
+ .expect("unable to spawn server process");
+ #[cfg(target_os = "linux")]
+ let server_pid = _crash_handler.id();
info!("spawning crash handler process");
let mut elapsed = Duration::ZERO;
@@ -77,6 +77,7 @@ impl IExplorerCommand_Impl for ExplorerCommandInjector_Impl {
for idx in 0..count {
let item = unsafe { items.GetItemAt(idx)? };
let item_path = unsafe { item.GetDisplayName(SIGDN_FILESYSPATH)?.to_string()? };
+ #[allow(clippy::disallowed_methods, reason = "no async context in sight..")]
std::process::Command::new(&zed_exe)
.arg(&item_path)
.spawn()
@@ -142,7 +142,7 @@ impl ExtensionBuilder {
manifest: &mut ExtensionManifest,
options: CompileExtensionOptions,
) -> anyhow::Result<()> {
- self.install_rust_wasm_target_if_needed()?;
+ self.install_rust_wasm_target_if_needed().await?;
let cargo_toml_content = fs::read_to_string(extension_dir.join("Cargo.toml"))?;
let cargo_toml: CargoToml = toml::from_str(&cargo_toml_content)?;
@@ -151,7 +151,7 @@ impl ExtensionBuilder {
"compiling Rust crate for extension {}",
extension_dir.display()
);
- let output = util::command::new_std_command("cargo")
+ let output = util::command::new_smol_command("cargo")
.args(["build", "--target", RUST_TARGET])
.args(options.release.then_some("--release"))
.arg("--target-dir")
@@ -160,6 +160,7 @@ impl ExtensionBuilder {
.env("RUSTC_WRAPPER", "")
.current_dir(extension_dir)
.output()
+ .await
.context("failed to run `cargo`")?;
if !output.status.success() {
bail!(
@@ -235,7 +236,8 @@ impl ExtensionBuilder {
&grammar_repo_dir,
&grammar_metadata.repository,
&grammar_metadata.rev,
- )?;
+ )
+ .await?;
let base_grammar_path = grammar_metadata
.path
@@ -248,7 +250,7 @@ impl ExtensionBuilder {
let scanner_path = src_path.join("scanner.c");
log::info!("compiling {grammar_name} parser");
- let clang_output = util::command::new_std_command(&clang_path)
+ let clang_output = util::command::new_smol_command(&clang_path)
.args(["-fPIC", "-shared", "-Os"])
.arg(format!("-Wl,--export=tree_sitter_{grammar_name}"))
.arg("-o")
@@ -258,6 +260,7 @@ impl ExtensionBuilder {
.arg(&parser_path)
.args(scanner_path.exists().then_some(scanner_path))
.output()
+ .await
.context("failed to run clang")?;
if !clang_output.status.success() {
@@ -271,15 +274,16 @@ impl ExtensionBuilder {
Ok(())
}
- fn checkout_repo(&self, directory: &Path, url: &str, rev: &str) -> Result<()> {
+ async fn checkout_repo(&self, directory: &Path, url: &str, rev: &str) -> Result<()> {
let git_dir = directory.join(".git");
if directory.exists() {
- let remotes_output = util::command::new_std_command("git")
+ let remotes_output = util::command::new_smol_command("git")
.arg("--git-dir")
.arg(&git_dir)
.args(["remote", "-v"])
- .output()?;
+ .output()
+ .await?;
let has_remote = remotes_output.status.success()
&& String::from_utf8_lossy(&remotes_output.stdout)
.lines()
@@ -298,10 +302,11 @@ impl ExtensionBuilder {
fs::create_dir_all(directory).with_context(|| {
format!("failed to create grammar directory {}", directory.display(),)
})?;
- let init_output = util::command::new_std_command("git")
+ let init_output = util::command::new_smol_command("git")
.arg("init")
.current_dir(directory)
- .output()?;
+ .output()
+ .await?;
if !init_output.status.success() {
bail!(
"failed to run `git init` in directory '{}'",
@@ -309,11 +314,12 @@ impl ExtensionBuilder {
);
}
- let remote_add_output = util::command::new_std_command("git")
+ let remote_add_output = util::command::new_smol_command("git")
.arg("--git-dir")
.arg(&git_dir)
.args(["remote", "add", "origin", url])
.output()
+ .await
.context("failed to execute `git remote add`")?;
if !remote_add_output.status.success() {
bail!(
@@ -323,19 +329,21 @@ impl ExtensionBuilder {
}
}
- let fetch_output = util::command::new_std_command("git")
+ let fetch_output = util::command::new_smol_command("git")
.arg("--git-dir")
.arg(&git_dir)
.args(["fetch", "--depth", "1", "origin", rev])
.output()
+ .await
.context("failed to execute `git fetch`")?;
- let checkout_output = util::command::new_std_command("git")
+ let checkout_output = util::command::new_smol_command("git")
.arg("--git-dir")
.arg(&git_dir)
.args(["checkout", rev])
.current_dir(directory)
.output()
+ .await
.context("failed to execute `git checkout`")?;
if !checkout_output.status.success() {
if !fetch_output.status.success() {
@@ -356,11 +364,12 @@ impl ExtensionBuilder {
Ok(())
}
- fn install_rust_wasm_target_if_needed(&self) -> Result<()> {
- let rustc_output = util::command::new_std_command("rustc")
+ async fn install_rust_wasm_target_if_needed(&self) -> Result<()> {
+ let rustc_output = util::command::new_smol_command("rustc")
.arg("--print")
.arg("sysroot")
.output()
+ .await
.context("failed to run rustc")?;
if !rustc_output.status.success() {
bail!(
@@ -374,11 +383,12 @@ impl ExtensionBuilder {
return Ok(());
}
- let output = util::command::new_std_command("rustup")
+ let output = util::command::new_smol_command("rustup")
.args(["target", "add", RUST_TARGET])
.stderr(Stdio::piped())
.stdout(Stdio::inherit())
.output()
+ .await
.context("failed to run `rustup target add`")?;
if !output.status.success() {
bail!(
@@ -2,7 +2,6 @@ use std::collections::{BTreeSet, HashMap};
use std::env;
use std::fs;
use std::path::{Path, PathBuf};
-use std::process::Command;
use std::sync::Arc;
use ::fs::{CopyOptions, Fs, RealFs, copy_recursive};
@@ -13,6 +12,7 @@ use extension::extension_builder::{CompileExtensionOptions, ExtensionBuilder};
use language::LanguageConfig;
use reqwest_client::ReqwestClient;
use rpc::ExtensionProvides;
+use tokio::process::Command;
use tree_sitter::{Language, Query, WasmStore};
#[derive(Parser, Debug)]
@@ -89,6 +89,7 @@ async fn main() -> Result<()> {
.current_dir(&output_dir)
.args(["-czvf", "archive.tar.gz", "-C", "archive", "."])
.output()
+ .await
.context("failed to run tar")?;
if !tar_output.status.success() {
bail!(
@@ -12,7 +12,7 @@ use gpui::BackgroundExecutor;
use gpui::Global;
use gpui::ReadGlobal as _;
use std::borrow::Cow;
-use util::command::{new_smol_command, new_std_command};
+use util::command::new_smol_command;
#[cfg(unix)]
use std::os::fd::{AsFd, AsRawFd};
@@ -135,7 +135,8 @@ pub trait Fs: Send + Sync {
);
fn open_repo(&self, abs_dot_git: &Path) -> Option<Arc<dyn GitRepository>>;
- fn git_init(&self, abs_work_directory: &Path, fallback_branch_name: String) -> Result<()>;
+ async fn git_init(&self, abs_work_directory: &Path, fallback_branch_name: String)
+ -> Result<()>;
async fn git_clone(&self, repo_url: &str, abs_work_directory: &Path) -> Result<()>;
fn is_fake(&self) -> bool;
async fn is_case_sensitive(&self) -> Result<bool>;
@@ -835,11 +836,16 @@ impl Fs for RealFs {
)?))
}
- fn git_init(&self, abs_work_directory_path: &Path, fallback_branch_name: String) -> Result<()> {
- let config = new_std_command("git")
+ async fn git_init(
+ &self,
+ abs_work_directory_path: &Path,
+ fallback_branch_name: String,
+ ) -> Result<()> {
+ let config = new_smol_command("git")
.current_dir(abs_work_directory_path)
.args(&["config", "--global", "--get", "init.defaultBranch"])
- .output()?;
+ .output()
+ .await?;
let branch_name;
@@ -849,11 +855,12 @@ impl Fs for RealFs {
branch_name = Cow::Borrowed(fallback_branch_name.as_str());
}
- new_std_command("git")
+ new_smol_command("git")
.current_dir(abs_work_directory_path)
.args(&["init", "-b"])
.arg(branch_name.trim())
- .output()?;
+ .output()
+ .await?;
Ok(())
}
@@ -2438,12 +2445,12 @@ impl Fs for FakeFs {
.log_err()
}
- fn git_init(
+ async fn git_init(
&self,
abs_work_directory_path: &Path,
_fallback_branch_name: String,
) -> Result<()> {
- smol::block_on(self.create_dir(&abs_work_directory_path.join(".git")))
+ self.create_dir(&abs_work_directory_path.join(".git")).await
}
async fn git_clone(&self, _repo_url: &str, _abs_work_directory: &Path) -> Result<()> {
@@ -5,6 +5,7 @@ use crate::{Oid, SHORT_SHA_LENGTH};
use anyhow::{Context as _, Result, anyhow, bail};
use collections::HashMap;
use futures::future::BoxFuture;
+use futures::io::BufWriter;
use futures::{AsyncWriteExt, FutureExt as _, select_biased};
use git2::BranchType;
use gpui::{AppContext as _, AsyncApp, BackgroundExecutor, SharedString, Task};
@@ -12,20 +13,19 @@ use parking_lot::Mutex;
use rope::Rope;
use schemars::JsonSchema;
use serde::Deserialize;
+use smol::io::{AsyncBufReadExt, AsyncReadExt, BufReader};
use std::borrow::Cow;
use std::ffi::{OsStr, OsString};
-use std::io::prelude::*;
use std::process::{ExitStatus, Stdio};
use std::{
cmp::Ordering,
future,
- io::{BufRead, BufReader, BufWriter, Read},
path::{Path, PathBuf},
sync::Arc,
};
use sum_tree::MapSeekTarget;
use thiserror::Error;
-use util::command::{new_smol_command, new_std_command};
+use util::command::new_smol_command;
use util::paths::PathStyle;
use util::rel_path::RelPath;
use util::{ResultExt, paths};
@@ -644,7 +644,7 @@ impl GitRepository for RealGitRepository {
self.executor
.spawn(async move {
let working_directory = working_directory?;
- let output = new_std_command("git")
+ let output = new_smol_command("git")
.current_dir(&working_directory)
.args([
"--no-optional-locks",
@@ -653,7 +653,8 @@ impl GitRepository for RealGitRepository {
"--format=%H%x00%B%x00%at%x00%ae%x00%an%x00",
&commit,
])
- .output()?;
+ .output()
+ .await?;
let output = std::str::from_utf8(&output.stdout)?;
let fields = output.split('\0').collect::<Vec<_>>();
if fields.len() != 6 {
@@ -681,7 +682,7 @@ impl GitRepository for RealGitRepository {
return future::ready(Err(anyhow!("no working directory"))).boxed();
};
cx.background_spawn(async move {
- let show_output = util::command::new_std_command("git")
+ let show_output = util::command::new_smol_command("git")
.current_dir(&working_directory)
.args([
"--no-optional-locks",
@@ -696,6 +697,7 @@ impl GitRepository for RealGitRepository {
.stdout(Stdio::piped())
.stderr(Stdio::piped())
.output()
+ .await
.context("starting git show process")?;
let show_stdout = String::from_utf8_lossy(&show_output.stdout);
@@ -703,7 +705,7 @@ impl GitRepository for RealGitRepository {
let parent_sha = lines.next().unwrap().trim().trim_end_matches('\0');
let changes = parse_git_diff_name_status(lines.next().unwrap_or(""));
- let mut cat_file_process = util::command::new_std_command("git")
+ let mut cat_file_process = util::command::new_smol_command("git")
.current_dir(&working_directory)
.args(["--no-optional-locks", "cat-file", "--batch=%(objectsize)"])
.stdin(Stdio::piped())
@@ -712,7 +714,6 @@ impl GitRepository for RealGitRepository {
.spawn()
.context("starting git cat-file process")?;
- use std::io::Write as _;
let mut files = Vec::<CommitFile>::new();
let mut stdin = BufWriter::with_capacity(512, cat_file_process.stdin.take().unwrap());
let mut stdout = BufReader::new(cat_file_process.stdout.take().unwrap());
@@ -726,28 +727,40 @@ impl GitRepository for RealGitRepository {
match status_code {
StatusCode::Modified => {
- writeln!(&mut stdin, "{commit}:{path}")?;
- writeln!(&mut stdin, "{parent_sha}:{path}")?;
+ stdin.write_all(commit.as_bytes()).await?;
+ stdin.write_all(b":").await?;
+ stdin.write_all(path.as_bytes()).await?;
+ stdin.write_all(b"\n").await?;
+ stdin.write_all(parent_sha.as_bytes()).await?;
+ stdin.write_all(b":").await?;
+ stdin.write_all(path.as_bytes()).await?;
+ stdin.write_all(b"\n").await?;
}
StatusCode::Added => {
- writeln!(&mut stdin, "{commit}:{path}")?;
+ stdin.write_all(commit.as_bytes()).await?;
+ stdin.write_all(b":").await?;
+ stdin.write_all(path.as_bytes()).await?;
+ stdin.write_all(b"\n").await?;
}
StatusCode::Deleted => {
- writeln!(&mut stdin, "{parent_sha}:{path}")?;
+ stdin.write_all(parent_sha.as_bytes()).await?;
+ stdin.write_all(b":").await?;
+ stdin.write_all(path.as_bytes()).await?;
+ stdin.write_all(b"\n").await?;
}
_ => continue,
}
- stdin.flush()?;
+ stdin.flush().await?;
info_line.clear();
- stdout.read_line(&mut info_line)?;
+ stdout.read_line(&mut info_line).await?;
let len = info_line.trim_end().parse().with_context(|| {
format!("invalid object size output from cat-file {info_line}")
})?;
let mut text = vec![0; len];
- stdout.read_exact(&mut text)?;
- stdout.read_exact(&mut newline)?;
+ stdout.read_exact(&mut text).await?;
+ stdout.read_exact(&mut newline).await?;
let text = String::from_utf8_lossy(&text).to_string();
let mut old_text = None;
@@ -755,13 +768,13 @@ impl GitRepository for RealGitRepository {
match status_code {
StatusCode::Modified => {
info_line.clear();
- stdout.read_line(&mut info_line)?;
+ stdout.read_line(&mut info_line).await?;
let len = info_line.trim_end().parse().with_context(|| {
format!("invalid object size output from cat-file {}", info_line)
})?;
let mut parent_text = vec![0; len];
- stdout.read_exact(&mut parent_text)?;
- stdout.read_exact(&mut newline)?;
+ stdout.read_exact(&mut parent_text).await?;
+ stdout.read_exact(&mut newline).await?;
old_text = Some(String::from_utf8_lossy(&parent_text).to_string());
new_text = Some(text);
}
@@ -962,7 +975,7 @@ impl GitRepository for RealGitRepository {
self.executor
.spawn(async move {
let working_directory = working_directory?;
- let mut process = new_std_command("git")
+ let mut process = new_smol_command("git")
.current_dir(&working_directory)
.args([
"--no-optional-locks",
@@ -980,12 +993,13 @@ impl GitRepository for RealGitRepository {
.context("no stdin for git cat-file subprocess")?;
let mut stdin = BufWriter::new(stdin);
for rev in &revs {
- writeln!(&mut stdin, "{rev}")?;
+ stdin.write_all(rev.as_bytes()).await?;
+ stdin.write_all(b"\n").await?;
}
- stdin.flush()?;
+ stdin.flush().await?;
drop(stdin);
- let output = process.wait_with_output()?;
+ let output = process.output().await?;
let output = std::str::from_utf8(&output.stdout)?;
let shas = output
.lines()
@@ -1024,10 +1038,11 @@ impl GitRepository for RealGitRepository {
let args = git_status_args(path_prefixes);
log::debug!("Checking for git status in {path_prefixes:?}");
self.executor.spawn(async move {
- let output = new_std_command(&git_binary_path)
+ let output = new_smol_command(&git_binary_path)
.current_dir(working_directory)
.args(args)
- .output()?;
+ .output()
+ .await?;
if output.status.success() {
let stdout = String::from_utf8_lossy(&output.stdout);
stdout.parse()
@@ -1043,10 +1058,11 @@ impl GitRepository for RealGitRepository {
let working_directory = self.working_directory();
self.executor
.spawn(async move {
- let output = new_std_command(&git_binary_path)
+ let output = new_smol_command(&git_binary_path)
.current_dir(working_directory?)
.args(&["stash", "list", "--pretty=format:%gd%x00%H%x00%ct%x00%s"])
- .output()?;
+ .output()
+ .await?;
if output.status.success() {
let stdout = String::from_utf8_lossy(&output.stdout);
stdout.parse()
@@ -1,3 +1,4 @@
+#![allow(clippy::disallowed_methods, reason = "build scripts are exempt")]
#![cfg_attr(any(not(target_os = "macos"), feature = "macos-blade"), allow(unused))]
//TODO: consider generating shader code for WGSL
@@ -204,6 +204,10 @@ impl<P: LinuxClient + 'static> Platform for P {
app_path = app_path.display()
);
+ #[allow(
+ clippy::disallowed_methods,
+ reason = "We are restarting ourselves, using std command thus is fine"
+ )]
let restart_process = Command::new("/usr/bin/env")
.arg("bash")
.arg("-c")
@@ -403,11 +407,15 @@ impl<P: LinuxClient + 'static> Platform for P {
let path = path.to_owned();
self.background_executor()
.spawn(async move {
- let _ = std::process::Command::new("xdg-open")
+ let _ = smol::process::Command::new("xdg-open")
.arg(path)
.spawn()
.context("invoking xdg-open")
- .log_err();
+ .log_err()?
+ .status()
+ .await
+ .log_err()?;
+ Some(())
})
.detach();
}
@@ -591,10 +599,14 @@ pub(super) fn open_uri_internal(
if let Some(token) = activation_token.as_ref() {
command.env("XDG_ACTIVATION_TOKEN", token);
}
- match command.spawn() {
- Ok(_) => return,
+ let program = format!("{:?}", command.get_program());
+ match smol::process::Command::from(command).spawn() {
+ Ok(mut cmd) => {
+ cmd.status().await.log_err();
+ return;
+ }
Err(e) => {
- log::error!("Failed to open with {:?}: {}", command.get_program(), e)
+ log::error!("Failed to open with {}: {}", program, e)
}
}
}
@@ -543,6 +543,10 @@ impl Platform for MacPlatform {
open "$1"
"#;
+ #[allow(
+ clippy::disallowed_methods,
+ reason = "We are restarting ourselves, using std command thus is fine"
+ )]
let restart_process = Command::new("/bin/bash")
.arg("-c")
.arg(script)
@@ -855,11 +859,14 @@ impl Platform for MacPlatform {
.lock()
.background_executor
.spawn(async move {
- let _ = std::process::Command::new("open")
+ if let Some(mut child) = smol::process::Command::new("open")
.arg(path)
.spawn()
.context("invoking open command")
- .log_err();
+ .log_err()
+ {
+ child.status().await.log_err();
+ }
})
.detach();
}
@@ -349,6 +349,11 @@ impl Platform for WindowsPlatform {
pid,
app_path.display(),
);
+
+ #[allow(
+ clippy::disallowed_methods,
+ reason = "We are restarting ourselves, using std command thus is fine"
+ )]
let restart_process = util::command::new_std_command("powershell.exe")
.arg("-command")
.arg(script)
@@ -1,3 +1,4 @@
+#![allow(clippy::disallowed_methods, reason = "build scripts are exempt")]
#[cfg(target_os = "macos")]
fn main() {
use std::{env, path::PathBuf, process::Command};
@@ -249,7 +249,7 @@ async fn load_shell_environment(
use util::shell_env;
let dir_ = dir.to_owned();
- let mut envs = match smol::unblock(move || shell_env::capture(&dir_)).await {
+ let mut envs = match shell_env::capture(&dir_).await {
Ok(envs) => envs,
Err(err) => {
util::log_err(&err);
@@ -1407,7 +1407,7 @@ impl GitStore {
GitStoreState::Local { fs, .. } => {
let fs = fs.clone();
cx.background_executor()
- .spawn(async move { fs.git_init(&path, fallback_branch_name) })
+ .spawn(async move { fs.git_init(&path, fallback_branch_name).await })
}
GitStoreState::Remote {
upstream_client,
@@ -470,7 +470,7 @@ impl Project {
TerminalSettings::get(settings_location, cx)
}
- pub fn exec_in_shell(&self, command: String, cx: &App) -> Result<std::process::Command> {
+ pub fn exec_in_shell(&self, command: String, cx: &App) -> Result<smol::process::Command> {
let path = self.first_project_directory(cx);
let remote_client = self.remote_client.as_ref();
let settings = self.terminal_settings(&path, cx).clone();
@@ -508,6 +508,10 @@ impl Project {
Ok(command)
}
}
+ .map(|mut process| {
+ util::set_pre_exec_to_start_new_session(&mut process);
+ smol::process::Command::from(process)
+ })
}
pub fn local_terminal_handles(&self) -> &Vec<WeakEntity<terminal::Terminal>> {
@@ -1,3 +1,4 @@
+#![allow(clippy::disallowed_methods, reason = "build scripts are exempt")]
use std::process::Command;
const ZED_MANIFEST: &str = include_str!("../zed/Cargo.toml");
@@ -547,32 +547,36 @@ pub(crate) fn execute_proxy(
.detach();
log::info!("starting proxy process. PID: {}", std::process::id());
+ smol::block_on(async {
+ let server_pid = check_pid_file(&server_paths.pid_file)
+ .await
+ .map_err(|source| ExecuteProxyError::CheckPidFile {
+ source,
+ path: server_paths.pid_file.clone(),
+ })?;
+ let server_running = server_pid.is_some();
+ if is_reconnecting {
+ if !server_running {
+ log::error!("attempted to reconnect, but no server running");
+ return Err(ExecuteProxyError::ServerNotRunning(
+ ProxyLaunchError::ServerNotRunning,
+ ));
+ }
+ } else {
+ if let Some(pid) = server_pid {
+ log::info!(
+ "proxy found server already running with PID {}. Killing process and cleaning up files...",
+ pid
+ );
+ kill_running_server(pid, &server_paths).await?;
+ }
- let server_pid = check_pid_file(&server_paths.pid_file).map_err(|source| {
- ExecuteProxyError::CheckPidFile {
- source,
- path: server_paths.pid_file.clone(),
- }
+ spawn_server(&server_paths)
+ .await
+ .map_err(ExecuteProxyError::SpawnServer)?;
+ };
+ Ok(())
})?;
- let server_running = server_pid.is_some();
- if is_reconnecting {
- if !server_running {
- log::error!("attempted to reconnect, but no server running");
- return Err(ExecuteProxyError::ServerNotRunning(
- ProxyLaunchError::ServerNotRunning,
- ));
- }
- } else {
- if let Some(pid) = server_pid {
- log::info!(
- "proxy found server already running with PID {}. Killing process and cleaning up files...",
- pid
- );
- kill_running_server(pid, &server_paths)?;
- }
-
- spawn_server(&server_paths).map_err(ExecuteProxyError::SpawnServer)?;
- };
let stdin_task = smol::spawn(async move {
let stdin = Async::new(std::io::stdin())?;
@@ -626,11 +630,12 @@ pub(crate) fn execute_proxy(
Ok(())
}
-fn kill_running_server(pid: u32, paths: &ServerPaths) -> Result<(), ExecuteProxyError> {
+async fn kill_running_server(pid: u32, paths: &ServerPaths) -> Result<(), ExecuteProxyError> {
log::info!("killing existing server with PID {}", pid);
- std::process::Command::new("kill")
+ smol::process::Command::new("kill")
.arg(pid.to_string())
.output()
+ .await
.map_err(|source| ExecuteProxyError::KillRunningServer { source, pid })?;
for file in [
@@ -666,7 +671,7 @@ pub(crate) enum SpawnServerError {
LaunchStatus { status: ExitStatus, paths: String },
}
-fn spawn_server(paths: &ServerPaths) -> Result<(), SpawnServerError> {
+async fn spawn_server(paths: &ServerPaths) -> Result<(), SpawnServerError> {
if paths.stdin_socket.exists() {
std::fs::remove_file(&paths.stdin_socket).map_err(SpawnServerError::RemoveStdinSocket)?;
}
@@ -678,7 +683,7 @@ fn spawn_server(paths: &ServerPaths) -> Result<(), SpawnServerError> {
}
let binary_name = std::env::current_exe().map_err(SpawnServerError::CurrentExe)?;
- let mut server_process = std::process::Command::new(binary_name);
+ let mut server_process = smol::process::Command::new(binary_name);
server_process
.arg("run")
.arg("--log-file")
@@ -694,6 +699,7 @@ fn spawn_server(paths: &ServerPaths) -> Result<(), SpawnServerError> {
let status = server_process
.status()
+ .await
.map_err(SpawnServerError::ProcessStatus)?;
if !status.success() {
@@ -733,7 +739,7 @@ pub(crate) struct CheckPidError {
pid: u32,
}
-fn check_pid_file(path: &Path) -> Result<Option<u32>, CheckPidError> {
+async fn check_pid_file(path: &Path) -> Result<Option<u32>, CheckPidError> {
let Some(pid) = std::fs::read_to_string(&path)
.ok()
.and_then(|contents| contents.parse::<u32>().ok())
@@ -742,10 +748,11 @@ fn check_pid_file(path: &Path) -> Result<Option<u32>, CheckPidError> {
};
log::debug!("Checking if process with PID {} exists...", pid);
- match std::process::Command::new("kill")
+ match smol::process::Command::new("kill")
.arg("-0")
.arg(pid.to_string())
.output()
+ .await
{
Ok(output) if output.status.success() => {
log::debug!(
@@ -146,6 +146,10 @@ impl Display for SystemSpecs {
fn try_determine_available_gpus() -> Option<String> {
#[cfg(any(target_os = "linux", target_os = "freebsd"))]
{
+ #[allow(
+ clippy::disallowed_methods,
+ reason = "we are not running in an executor"
+ )]
std::process::Command::new("vulkaninfo")
.args(&["--summary"])
.output()
@@ -5,7 +5,7 @@ use collections::HashMap;
/// Capture all environment variables from the login shell.
#[cfg(unix)]
-pub fn capture(directory: &std::path::Path) -> Result<collections::HashMap<String, String>> {
+pub async fn capture(directory: &std::path::Path) -> Result<collections::HashMap<String, String>> {
use std::os::unix::process::CommandExt;
use std::process::Stdio;
@@ -59,7 +59,7 @@ pub fn capture(directory: &std::path::Path) -> Result<collections::HashMap<Strin
super::set_pre_exec_to_start_new_session(&mut command);
- let (env_output, process_output) = spawn_and_read_fd(command, fd_num)?;
+ let (env_output, process_output) = spawn_and_read_fd(command, fd_num).await?;
let env_output = String::from_utf8_lossy(&env_output);
anyhow::ensure!(
@@ -77,7 +77,7 @@ pub fn capture(directory: &std::path::Path) -> Result<collections::HashMap<Strin
}
#[cfg(unix)]
-fn spawn_and_read_fd(
+async fn spawn_and_read_fd(
mut command: std::process::Command,
child_fd: std::os::fd::RawFd,
) -> anyhow::Result<(Vec<u8>, std::process::Output)> {
@@ -91,13 +91,12 @@ fn spawn_and_read_fd(
child_fd,
}])?;
- let process = command.spawn()?;
- drop(command);
+ let process = smol::process::Command::from(command).spawn()?;
let mut buffer = Vec::new();
reader.read_to_end(&mut buffer)?;
- Ok((buffer, process.wait_with_output()?))
+ Ok((buffer, process.output().await?))
}
pub fn print_env() {
@@ -308,7 +308,7 @@ pub fn get_shell_safe_zed_path() -> anyhow::Result<String> {
}
#[cfg(unix)]
-pub fn load_login_shell_environment() -> Result<()> {
+pub async fn load_login_shell_environment() -> Result<()> {
load_shell_from_passwd().log_err();
// If possible, we want to `cd` in the user's `$HOME` to trigger programs
@@ -316,7 +316,7 @@ pub fn load_login_shell_environment() -> Result<()> {
// into shell's `cd` command (and hooks) to manipulate env.
// We do this so that we get the env a user would have when spawning a shell
// in home directory.
- for (name, value) in shell_env::capture(paths::home_dir())? {
+ for (name, value) in shell_env::capture(paths::home_dir()).await? {
unsafe { env::set_var(&name, &value) };
}
@@ -6,6 +6,7 @@ use editor::{
actions::{SortLinesCaseInsensitive, SortLinesCaseSensitive},
display_map::ToDisplayPoint,
};
+use futures::AsyncWriteExt as _;
use gpui::{Action, App, AppContext as _, Context, Global, Keystroke, Task, Window, actions};
use itertools::Itertools;
use language::Point;
@@ -16,7 +17,6 @@ use schemars::JsonSchema;
use search::{BufferSearchBar, SearchOptions};
use serde::Deserialize;
use std::{
- io::Write,
iter::Peekable,
ops::{Deref, Range},
path::Path,
@@ -1966,7 +1966,6 @@ impl ShellExec {
process.stdin(Stdio::null());
};
- util::set_pre_exec_to_start_new_session(&mut process);
let is_read = self.is_read;
let task = cx.spawn_in(window, async move |vim, cx| {
@@ -1984,18 +1983,16 @@ impl ShellExec {
let range = range.clone();
cx.background_spawn(async move {
for chunk in snapshot.text_for_range(range) {
- if stdin.write_all(chunk.as_bytes()).log_err().is_none() {
+ if stdin.write_all(chunk.as_bytes()).await.log_err().is_none() {
return;
}
}
- stdin.flush().log_err();
+ stdin.flush().await.log_err();
})
.detach();
};
- let output = cx
- .background_spawn(async move { running.wait_with_output() })
- .await;
+ let output = cx.background_spawn(running.output()).await;
let Some(output) = output.log_err() else {
vim.update_in(cx, |vim, window, cx| {
@@ -1,3 +1,4 @@
+#![allow(clippy::disallowed_methods, reason = "build scripts are exempt")]
use std::process::Command;
fn main() {
@@ -339,7 +339,7 @@ pub fn main() {
app.background_executor()
.spawn(async {
#[cfg(unix)]
- util::load_login_shell_environment().log_err();
+ util::load_login_shell_environment().await.log_err();
shell_env_loaded_tx.send(()).ok();
})
.detach()
@@ -23,6 +23,7 @@ allow_attributes_without_reason = "deny" # This covers `expect` also, since we d
let_underscore_must_use = "forbid"
undocumented_unsafe_blocks = "forbid"
missing_safety_doc = "forbid"
+disallowed_methods = { level = "allow", priority = 1}
[dependencies]
collections.workspace = true
@@ -1,3 +1,4 @@
+#![allow(clippy::disallowed_methods, reason = "tooling is exempt")]
use std::process::Command;
use anyhow::{Context as _, Result, bail};