Detailed changes
@@ -1212,12 +1212,7 @@ impl AgentPanel {
window: &mut Window,
cx: &mut Context<Self>,
) {
- let Some(workspace) = self
- .workspace
- .upgrade()
- .ok_or_else(|| anyhow!("workspace dropped"))
- .log_err()
- else {
+ let Some(workspace) = self.workspace.upgrade() else {
return;
};
@@ -1,7 +1,7 @@
use crate::context::ContextLoadResult;
use crate::inline_prompt_editor::CodegenStatus;
use crate::{context::load_context, context_store::ContextStore};
-use anyhow::Result;
+use anyhow::{Context as _, Result};
use assistant_settings::AssistantSettings;
use client::telemetry::Telemetry;
use collections::HashSet;
@@ -419,16 +419,16 @@ impl CodegenAlternative {
if start_buffer.remote_id() == end_buffer.remote_id() {
(start_buffer.clone(), start_buffer_offset..end_buffer_offset)
} else {
- return Err(anyhow::anyhow!("invalid transformation range"));
+ anyhow::bail!("invalid transformation range");
}
} else {
- return Err(anyhow::anyhow!("invalid transformation range"));
+ anyhow::bail!("invalid transformation range");
};
let prompt = self
.builder
.generate_inline_transformation_prompt(user_prompt, language_name, buffer, range)
- .map_err(|e| anyhow::anyhow!("Failed to generate content prompt: {}", e))?;
+ .context("generating content prompt")?;
let context_task = self.context_store.as_ref().map(|context_store| {
if let Some(project) = self.project.upgrade() {
@@ -2,7 +2,7 @@ use std::ops::Range;
use std::path::{Path, PathBuf};
use std::sync::Arc;
-use anyhow::{Result, anyhow};
+use anyhow::{Context as _, Result, anyhow};
use assistant_context_editor::AssistantContext;
use collections::{HashSet, IndexSet};
use futures::{self, FutureExt};
@@ -142,17 +142,12 @@ impl ContextStore {
remove_if_exists: bool,
cx: &mut Context<Self>,
) -> Result<Option<AgentContextHandle>> {
- let Some(project) = self.project.upgrade() else {
- return Err(anyhow!("failed to read project"));
- };
-
- let Some(entry_id) = project
+ let project = self.project.upgrade().context("failed to read project")?;
+ let entry_id = project
.read(cx)
.entry_for_path(project_path, cx)
.map(|entry| entry.id)
- else {
- return Err(anyhow!("no entry found for directory context"));
- };
+ .context("no entry found for directory context")?;
let context_id = self.next_context_id.post_inc();
let context = AgentContextHandle::Directory(DirectoryContextHandle {
@@ -1,6 +1,6 @@
use std::{collections::VecDeque, path::Path, sync::Arc};
-use anyhow::{Context as _, anyhow};
+use anyhow::Context as _;
use assistant_context_editor::{AssistantContext, SavedContextMetadata};
use chrono::{DateTime, Utc};
use futures::future::{TryFutureExt as _, join_all};
@@ -130,7 +130,10 @@ impl HistoryStore {
.boxed()
})
.unwrap_or_else(|_| {
- async { Err(anyhow!("no thread store")) }.boxed()
+ async {
+ anyhow::bail!("no thread store");
+ }
+ .boxed()
}),
SerializedRecentEntry::Context(id) => context_store
.update(cx, |context_store, cx| {
@@ -140,7 +143,10 @@ impl HistoryStore {
.boxed()
})
.unwrap_or_else(|_| {
- async { Err(anyhow!("no context store")) }.boxed()
+ async {
+ anyhow::bail!("no context store");
+ }
+ .boxed()
}),
});
let entries = join_all(entries)
@@ -1630,7 +1630,7 @@ impl Thread {
CompletionRequestStatus::Failed {
code, message, request_id
} => {
- return Err(anyhow!("completion request failed. request_id: {request_id}, code: {code}, message: {message}"));
+ anyhow::bail!("completion request failed. request_id: {request_id}, code: {code}, message: {message}");
}
CompletionRequestStatus::UsageUpdated {
amount, limit
@@ -419,7 +419,7 @@ impl ThreadStore {
let thread = database
.try_find_thread(id.clone())
.await?
- .ok_or_else(|| anyhow!("no thread found with ID: {id:?}"))?;
+ .with_context(|| format!("no thread found with ID: {id:?}"))?;
let thread = this.update_in(cx, |this, window, cx| {
cx.new(|cx| {
@@ -699,20 +699,14 @@ impl SerializedThread {
SerializedThread::VERSION => Ok(serde_json::from_value::<SerializedThread>(
saved_thread_json,
)?),
- _ => Err(anyhow!(
- "unrecognized serialized thread version: {}",
- version
- )),
+ _ => anyhow::bail!("unrecognized serialized thread version: {version:?}"),
},
None => {
let saved_thread =
serde_json::from_value::<LegacySerializedThread>(saved_thread_json)?;
Ok(saved_thread.upgrade())
}
- version => Err(anyhow!(
- "unrecognized serialized thread version: {:?}",
- version
- )),
+ version => anyhow::bail!("unrecognized serialized thread version: {version:?}"),
}
}
}
@@ -90,7 +90,7 @@ impl Model {
} else if id.starts_with("claude-3-haiku") {
Ok(Self::Claude3Haiku)
} else {
- Err(anyhow!("invalid model id"))
+ anyhow::bail!("invalid model id {id}");
}
}
@@ -385,10 +385,10 @@ impl RateLimitInfo {
}
}
-fn get_header<'a>(key: &str, headers: &'a HeaderMap) -> Result<&'a str, anyhow::Error> {
+fn get_header<'a>(key: &str, headers: &'a HeaderMap) -> anyhow::Result<&'a str> {
Ok(headers
.get(key)
- .ok_or_else(|| anyhow!("missing header `{key}`"))?
+ .with_context(|| format!("missing header `{key}`"))?
.to_str()?)
}
@@ -1,6 +1,6 @@
// This crate was essentially pulled out verbatim from main `zed` crate to avoid having to run RustEmbed macro whenever zed has to be rebuilt. It saves a second or two on an incremental build.
-use anyhow::anyhow;
+use anyhow::Context as _;
use gpui::{App, AssetSource, Result, SharedString};
use rust_embed::RustEmbed;
@@ -21,7 +21,7 @@ impl AssetSource for Assets {
fn load(&self, path: &str) -> Result<Option<std::borrow::Cow<'static, [u8]>>> {
Self::get(path)
.map(|f| Some(f.data))
- .ok_or_else(|| anyhow!("could not find asset at path \"{}\"", path))
+ .with_context(|| format!("loading asset at path {path:?}"))
}
fn list(&self, path: &str) -> Result<Vec<SharedString>> {
@@ -39,7 +39,7 @@ impl AssetSource for Assets {
impl Assets {
/// Populate the [`TextSystem`] of the given [`AppContext`] with all `.ttf` fonts in the `fonts` directory.
- pub fn load_fonts(&self, cx: &App) -> gpui::Result<()> {
+ pub fn load_fonts(&self, cx: &App) -> anyhow::Result<()> {
let font_paths = self.list("fonts")?;
let mut embedded_fonts = Vec::new();
for font_path in font_paths {
@@ -1,7 +1,7 @@
#[cfg(test)]
mod context_tests;
-use anyhow::{Context as _, Result, anyhow, bail};
+use anyhow::{Context as _, Result, bail};
use assistant_settings::AssistantSettings;
use assistant_slash_command::{
SlashCommandContent, SlashCommandEvent, SlashCommandLine, SlashCommandOutputSection,
@@ -3011,7 +3011,7 @@ impl SavedContext {
let saved_context_json = serde_json::from_str::<serde_json::Value>(json)?;
match saved_context_json
.get("version")
- .ok_or_else(|| anyhow!("version not found"))?
+ .context("version not found")?
{
serde_json::Value::String(version) => match version.as_str() {
SavedContext::VERSION => {
@@ -3032,9 +3032,9 @@ impl SavedContext {
serde_json::from_value::<SavedContextV0_1_0>(saved_context_json)?;
Ok(saved_context.upgrade())
}
- _ => Err(anyhow!("unrecognized saved context version: {}", version)),
+ _ => anyhow::bail!("unrecognized saved context version: {version:?}"),
},
- _ => Err(anyhow!("version not found on saved context")),
+ _ => anyhow::bail!("version not found on saved context"),
}
}
@@ -2,7 +2,7 @@ use crate::{
AssistantContext, ContextEvent, ContextId, ContextOperation, ContextVersion, SavedContext,
SavedContextMetadata,
};
-use anyhow::{Context as _, Result, anyhow};
+use anyhow::{Context as _, Result};
use assistant_slash_command::{SlashCommandId, SlashCommandWorkingSet};
use client::{Client, TypedEnvelope, proto, telemetry::Telemetry};
use clock::ReplicaId;
@@ -164,16 +164,18 @@ impl ContextStore {
) -> Result<proto::OpenContextResponse> {
let context_id = ContextId::from_proto(envelope.payload.context_id);
let operations = this.update(&mut cx, |this, cx| {
- if this.project.read(cx).is_via_collab() {
- return Err(anyhow!("only the host contexts can be opened"));
- }
+ anyhow::ensure!(
+ !this.project.read(cx).is_via_collab(),
+ "only the host contexts can be opened"
+ );
let context = this
.loaded_context_for_id(&context_id, cx)
.context("context not found")?;
- if context.read(cx).replica_id() != ReplicaId::default() {
- return Err(anyhow!("context must be opened via the host"));
- }
+ anyhow::ensure!(
+ context.read(cx).replica_id() == ReplicaId::default(),
+ "context must be opened via the host"
+ );
anyhow::Ok(
context
@@ -193,9 +195,10 @@ impl ContextStore {
mut cx: AsyncApp,
) -> Result<proto::CreateContextResponse> {
let (context_id, operations) = this.update(&mut cx, |this, cx| {
- if this.project.read(cx).is_via_collab() {
- return Err(anyhow!("can only create contexts as the host"));
- }
+ anyhow::ensure!(
+ !this.project.read(cx).is_via_collab(),
+ "can only create contexts as the host"
+ );
let context = this.create(cx);
let context_id = context.read(cx).id().clone();
@@ -237,9 +240,10 @@ impl ContextStore {
mut cx: AsyncApp,
) -> Result<proto::SynchronizeContextsResponse> {
this.update(&mut cx, |this, cx| {
- if this.project.read(cx).is_via_collab() {
- return Err(anyhow!("only the host can synchronize contexts"));
- }
+ anyhow::ensure!(
+ !this.project.read(cx).is_via_collab(),
+ "only the host can synchronize contexts"
+ );
let mut local_versions = Vec::new();
for remote_version_proto in envelope.payload.contexts {
@@ -370,7 +374,7 @@ impl ContextStore {
) -> Task<Result<Entity<AssistantContext>>> {
let project = self.project.read(cx);
let Some(project_id) = project.remote_id() else {
- return Task::ready(Err(anyhow!("project was not remote")));
+ return Task::ready(Err(anyhow::anyhow!("project was not remote")));
};
let replica_id = project.replica_id();
@@ -533,7 +537,7 @@ impl ContextStore {
) -> Task<Result<Entity<AssistantContext>>> {
let project = self.project.read(cx);
let Some(project_id) = project.remote_id() else {
- return Task::ready(Err(anyhow!("project was not remote")));
+ return Task::ready(Err(anyhow::anyhow!("project was not remote")));
};
if let Some(context) = self.loaded_context_for_id(&context_id, cx) {
@@ -1,4 +1,4 @@
-use anyhow::{Result, anyhow};
+use anyhow::{Context as _, Result, anyhow};
use assistant_slash_command::{
AfterCompletion, ArgumentCompletion, SlashCommand, SlashCommandOutput,
SlashCommandOutputSection, SlashCommandResult,
@@ -84,9 +84,7 @@ impl SlashCommand for ContextServerSlashCommand {
if let Some(server) = self.store.read(cx).get_running_server(&server_id) {
cx.foreground_executor().spawn(async move {
- let Some(protocol) = server.client() else {
- return Err(anyhow!("Context server not initialized"));
- };
+ let protocol = server.client().context("Context server not initialized")?;
let completion_result = protocol
.completion(
@@ -139,21 +137,16 @@ impl SlashCommand for ContextServerSlashCommand {
let store = self.store.read(cx);
if let Some(server) = store.get_running_server(&server_id) {
cx.foreground_executor().spawn(async move {
- let Some(protocol) = server.client() else {
- return Err(anyhow!("Context server not initialized"));
- };
+ let protocol = server.client().context("Context server not initialized")?;
let result = protocol.run_prompt(&prompt_name, prompt_args).await?;
- // Check that there are only user roles
- if result
- .messages
- .iter()
- .any(|msg| !matches!(msg.role, context_server::types::Role::User))
- {
- return Err(anyhow!(
- "Prompt contains non-user roles, which is not supported"
- ));
- }
+ anyhow::ensure!(
+ result
+ .messages
+ .iter()
+ .all(|msg| matches!(msg.role, context_server::types::Role::User)),
+ "Prompt contains non-user roles, which is not supported"
+ );
// Extract text from user messages into a single prompt string
let mut prompt = result
@@ -192,9 +185,7 @@ impl SlashCommand for ContextServerSlashCommand {
}
fn completion_argument(prompt: &Prompt, arguments: &[String]) -> Result<(String, String)> {
- if arguments.is_empty() {
- return Err(anyhow!("No arguments given"));
- }
+ anyhow::ensure!(!arguments.is_empty(), "No arguments given");
match &prompt.arguments {
Some(args) if args.len() == 1 => {
@@ -202,16 +193,16 @@ fn completion_argument(prompt: &Prompt, arguments: &[String]) -> Result<(String,
let arg_value = arguments.join(" ");
Ok((arg_name, arg_value))
}
- Some(_) => Err(anyhow!("Prompt must have exactly one argument")),
- None => Err(anyhow!("Prompt has no arguments")),
+ Some(_) => anyhow::bail!("Prompt must have exactly one argument"),
+ None => anyhow::bail!("Prompt has no arguments"),
}
}
fn prompt_arguments(prompt: &Prompt, arguments: &[String]) -> Result<HashMap<String, String>> {
match &prompt.arguments {
- Some(args) if args.len() > 1 => Err(anyhow!(
- "Prompt has more than one argument, which is not supported"
- )),
+ Some(args) if args.len() > 1 => {
+ anyhow::bail!("Prompt has more than one argument, which is not supported");
+ }
Some(args) if args.len() == 1 => {
if !arguments.is_empty() {
let mut map = HashMap::default();
@@ -220,15 +211,15 @@ fn prompt_arguments(prompt: &Prompt, arguments: &[String]) -> Result<HashMap<Str
} else if arguments.is_empty() && args[0].required == Some(false) {
Ok(HashMap::default())
} else {
- Err(anyhow!("Prompt expects argument but none given"))
+ anyhow::bail!("Prompt expects argument but none given");
}
}
Some(_) | None => {
- if arguments.is_empty() {
- Ok(HashMap::default())
- } else {
- Err(anyhow!("Prompt expects no arguments but some were given"))
- }
+ anyhow::ensure!(
+ arguments.is_empty(),
+ "Prompt expects no arguments but some were given"
+ );
+ Ok(HashMap::default())
}
}
}
@@ -118,10 +118,7 @@ impl SlashCommand for DeltaSlashCommand {
}
}
- if !changes_detected {
- return Err(anyhow!("no new changes detected"));
- }
-
+ anyhow::ensure!(changes_detected, "no new changes detected");
Ok(output.to_event_stream())
})
}
@@ -1,4 +1,4 @@
-use anyhow::{Result, anyhow};
+use anyhow::{Context as _, Result, anyhow};
use assistant_slash_command::{
ArgumentCompletion, SlashCommand, SlashCommandOutput, SlashCommandOutputSection,
SlashCommandResult,
@@ -189,7 +189,7 @@ impl SlashCommand for DiagnosticsSlashCommand {
window.spawn(cx, async move |_| {
task.await?
.map(|output| output.to_event_stream())
- .ok_or_else(|| anyhow!("No diagnostics found"))
+ .context("No diagnostics found")
})
}
}
@@ -3,7 +3,7 @@ use std::sync::Arc;
use std::sync::atomic::AtomicBool;
use std::time::Duration;
-use anyhow::{Result, anyhow, bail};
+use anyhow::{Context as _, Result, anyhow, bail};
use assistant_slash_command::{
ArgumentCompletion, SlashCommand, SlashCommandOutput, SlashCommandOutputSection,
SlashCommandResult,
@@ -52,15 +52,16 @@ impl DocsSlashCommand {
.is_none()
{
let index_provider_deps = maybe!({
- let workspace = workspace.clone().ok_or_else(|| anyhow!("no workspace"))?;
let workspace = workspace
+ .as_ref()
+ .context("no workspace")?
.upgrade()
- .ok_or_else(|| anyhow!("workspace was dropped"))?;
+ .context("workspace dropped")?;
let project = workspace.read(cx).project().clone();
let fs = project.read(cx).fs().clone();
let cargo_workspace_root = Self::path_to_cargo_toml(project, cx)
.and_then(|path| path.parent().map(|path| path.to_path_buf()))
- .ok_or_else(|| anyhow!("no Cargo workspace root found"))?;
+ .context("no Cargo workspace root found")?;
anyhow::Ok((fs, cargo_workspace_root))
});
@@ -78,10 +79,11 @@ impl DocsSlashCommand {
.is_none()
{
let http_client = maybe!({
- let workspace = workspace.ok_or_else(|| anyhow!("no workspace"))?;
let workspace = workspace
+ .as_ref()
+ .context("no workspace")?
.upgrade()
- .ok_or_else(|| anyhow!("workspace was dropped"))?;
+ .context("workspace was dropped")?;
let project = workspace.read(cx).project().clone();
anyhow::Ok(project.read(cx).client().http_client())
});
@@ -174,7 +176,7 @@ impl SlashCommand for DocsSlashCommand {
let args = DocsSlashCommandArgs::parse(arguments);
let store = args
.provider()
- .ok_or_else(|| anyhow!("no docs provider specified"))
+ .context("no docs provider specified")
.and_then(|provider| IndexedDocsStore::try_global(provider, cx));
cx.background_spawn(async move {
fn build_completions(items: Vec<String>) -> Vec<ArgumentCompletion> {
@@ -287,7 +289,7 @@ impl SlashCommand for DocsSlashCommand {
let task = cx.background_spawn({
let store = args
.provider()
- .ok_or_else(|| anyhow!("no docs provider specified"))
+ .context("no docs provider specified")
.and_then(|provider| IndexedDocsStore::try_global(provider, cx));
async move {
let (provider, key) = match args.clone() {
@@ -3,7 +3,7 @@ use std::rc::Rc;
use std::sync::Arc;
use std::sync::atomic::AtomicBool;
-use anyhow::{Context, Result, anyhow, bail};
+use anyhow::{Context as _, Result, anyhow, bail};
use assistant_slash_command::{
ArgumentCompletion, SlashCommand, SlashCommandOutput, SlashCommandOutputSection,
SlashCommandResult,
@@ -230,7 +230,10 @@ fn collect_files(
})
.collect::<anyhow::Result<Vec<custom_path_matcher::PathMatcher>>>()
else {
- return futures::stream::once(async { Err(anyhow!("invalid path")) }).boxed();
+ return futures::stream::once(async {
+ anyhow::bail!("invalid path");
+ })
+ .boxed();
};
let project_handle = project.downgrade();
@@ -1,5 +1,5 @@
use crate::ActionLog;
-use anyhow::{Result, anyhow};
+use anyhow::{Context as _, Result};
use gpui::{AsyncApp, Entity};
use language::{OutlineItem, ParseStatus};
use project::Project;
@@ -22,7 +22,7 @@ pub async fn file_outline(
let project_path = project.read_with(cx, |project, cx| {
project
.find_project_path(&path, cx)
- .ok_or_else(|| anyhow!("Path {path} not found in project"))
+ .with_context(|| format!("Path {path} not found in project"))
})??;
project
@@ -41,9 +41,9 @@ pub async fn file_outline(
}
let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot())?;
- let Some(outline) = snapshot.outline(None) else {
- return Err(anyhow!("No outline information available for this file."));
- };
+ let outline = snapshot
+ .outline(None)
+ .context("No outline information available for this file at path {path}")?;
render_outline(
outline
@@ -27,12 +27,10 @@ fn adapt_to_json_schema_subset(json: &mut Value) -> Result<()> {
const UNSUPPORTED_KEYS: [&str; 4] = ["if", "then", "else", "$ref"];
for key in UNSUPPORTED_KEYS {
- if obj.contains_key(key) {
- return Err(anyhow::anyhow!(
- "Schema cannot be made compatible because it contains \"{}\" ",
- key
- ));
- }
+ anyhow::ensure!(
+ !obj.contains_key(key),
+ "Schema cannot be made compatible because it contains \"{key}\""
+ );
}
const KEYS_TO_REMOVE: [&str; 5] = [
@@ -1,5 +1,5 @@
use crate::schema::json_schema_for;
-use anyhow::{Result, anyhow};
+use anyhow::{Context as _, Result, anyhow};
use assistant_tool::{ActionLog, Tool, ToolResult};
use gpui::AnyWindowHandle;
use gpui::{App, AppContext, Entity, Task};
@@ -107,17 +107,13 @@ impl Tool for CopyPathTool {
});
cx.background_spawn(async move {
- match copy_task.await {
- Ok(_) => Ok(
- format!("Copied {} to {}", input.source_path, input.destination_path).into(),
- ),
- Err(err) => Err(anyhow!(
- "Failed to copy {} to {}: {}",
- input.source_path,
- input.destination_path,
- err
- )),
- }
+ let _ = copy_task.await.with_context(|| {
+ format!(
+ "Copying {} to {}",
+ input.source_path, input.destination_path
+ )
+ })?;
+ Ok(format!("Copied {} to {}", input.source_path, input.destination_path).into())
})
.into()
}
@@ -1,5 +1,5 @@
use crate::schema::json_schema_for;
-use anyhow::{Result, anyhow};
+use anyhow::{Context as _, Result, anyhow};
use assistant_tool::{ActionLog, Tool, ToolResult};
use gpui::AnyWindowHandle;
use gpui::{App, Entity, Task};
@@ -86,7 +86,7 @@ impl Tool for CreateDirectoryTool {
project.create_entry(project_path.clone(), true, cx)
})?
.await
- .map_err(|err| anyhow!("Unable to create directory {destination_path}: {err}"))?;
+ .with_context(|| format!("Creating directory {destination_path}"))?;
Ok(format!("Created directory {destination_path}").into())
})
@@ -1,5 +1,5 @@
use crate::schema::json_schema_for;
-use anyhow::{Result, anyhow};
+use anyhow::{Context as _, Result, anyhow};
use assistant_tool::{ActionLog, Tool, ToolResult};
use futures::{SinkExt, StreamExt, channel::mpsc};
use gpui::{AnyWindowHandle, App, AppContext, Entity, Task};
@@ -122,19 +122,17 @@ impl Tool for DeletePathTool {
}
}
- let delete = project.update(cx, |project, cx| {
- project.delete_file(project_path, false, cx)
- })?;
-
- match delete {
- Some(deletion_task) => match deletion_task.await {
- Ok(()) => Ok(format!("Deleted {path_str}").into()),
- Err(err) => Err(anyhow!("Failed to delete {path_str}: {err}")),
- },
- None => Err(anyhow!(
- "Couldn't delete {path_str} because that path isn't in this project."
- )),
- }
+ let deletion_task = project
+ .update(cx, |project, cx| {
+ project.delete_file(project_path, false, cx)
+ })?
+ .with_context(|| {
+ format!("Couldn't delete {path_str} because that path isn't in this project.")
+ })?;
+ deletion_task
+ .await
+ .with_context(|| format!("Deleting {path_str}"))?;
+ Ok(format!("Deleted {path_str}").into())
})
.into()
}
@@ -6,7 +6,6 @@ use crate::{
list_directory_tool::ListDirectoryToolInput,
};
use Role::*;
-use anyhow::anyhow;
use assistant_tool::ToolRegistry;
use client::{Client, UserStore};
use collections::HashMap;
@@ -1207,10 +1206,7 @@ impl EvalAssertion {
}
}
- Err(anyhow!(
- "No score found in response. Raw output: {}",
- output
- ))
+ anyhow::bail!("No score found in response. Raw output: {output}");
})
}
@@ -98,21 +98,21 @@ impl BlameEntry {
let sha = parts
.next()
.and_then(|line| line.parse::<Oid>().ok())
- .ok_or_else(|| anyhow!("failed to parse sha"))?;
+ .with_context(|| format!("parsing sha from {line}"))?;
let original_line_number = parts
.next()
.and_then(|line| line.parse::<u32>().ok())
- .ok_or_else(|| anyhow!("Failed to parse original line number"))?;
+ .with_context(|| format!("parsing original line number from {line}"))?;
let final_line_number = parts
.next()
.and_then(|line| line.parse::<u32>().ok())
- .ok_or_else(|| anyhow!("Failed to parse final line number"))?;
+ .with_context(|| format!("parsing final line number from {line}"))?;
let line_count = parts
.next()
.and_then(|line| line.parse::<u32>().ok())
- .ok_or_else(|| anyhow!("Failed to parse final line number"))?;
+ .with_context(|| format!("parsing line count from {line}"))?;
let start_line = final_line_number.saturating_sub(1);
let end_line = start_line + line_count;
@@ -80,7 +80,7 @@ async fn run_git_blame(
.stdout(Stdio::piped())
.stderr(Stdio::piped())
.spawn()
- .map_err(|e| anyhow!("Failed to start git blame process: {}", e))?;
+ .context("starting git blame process")?;
let stdin = child
.stdin
@@ -92,10 +92,7 @@ async fn run_git_blame(
}
stdin.flush().await?;
- let output = child
- .output()
- .await
- .map_err(|e| anyhow!("Failed to read git blame output: {}", e))?;
+ let output = child.output().await.context("reading git blame output")?;
if !output.status.success() {
let stderr = String::from_utf8_lossy(&output.stderr);
@@ -103,7 +100,7 @@ async fn run_git_blame(
if trimmed == GIT_BLAME_NO_COMMIT_ERROR || trimmed.contains(GIT_BLAME_NO_PATH) {
return Ok(String::new());
}
- return Err(anyhow!("git blame process failed: {}", stderr));
+ anyhow::bail!("git blame process failed: {stderr}");
}
Ok(String::from_utf8(output.stdout)?)
@@ -144,21 +141,21 @@ impl BlameEntry {
let sha = parts
.next()
.and_then(|line| line.parse::<Oid>().ok())
- .ok_or_else(|| anyhow!("failed to parse sha"))?;
+ .with_context(|| format!("parsing sha from {line}"))?;
let original_line_number = parts
.next()
.and_then(|line| line.parse::<u32>().ok())
- .ok_or_else(|| anyhow!("Failed to parse original line number"))?;
+ .with_context(|| format!("parsing original line number from {line}"))?;
let final_line_number = parts
.next()
.and_then(|line| line.parse::<u32>().ok())
- .ok_or_else(|| anyhow!("Failed to parse final line number"))?;
+ .with_context(|| format!("parsing final line number from {line}"))?;
let line_count = parts
.next()
.and_then(|line| line.parse::<u32>().ok())
- .ok_or_else(|| anyhow!("Failed to parse final line number"))?;
+ .with_context(|| format!("parsing line count from {line}"))?;
let start_line = final_line_number.saturating_sub(1);
let end_line = start_line + line_count;
@@ -5272,7 +5272,7 @@ impl Editor {
task.await?;
}
- Ok::<_, anyhow::Error>(())
+ anyhow::Ok(())
})
.detach_and_log_err(cx);
}
@@ -10369,8 +10369,8 @@ impl Editor {
.map(|line| {
line.strip_prefix(&line_prefix)
.or_else(|| line.trim_start().strip_prefix(&line_prefix.trim_start()))
- .ok_or_else(|| {
- anyhow!("line did not start with prefix {line_prefix:?}: {line:?}")
+ .with_context(|| {
+ format!("line did not start with prefix {line_prefix:?}: {line:?}")
})
})
.collect::<Result<Vec<_>, _>>()
@@ -16944,7 +16944,7 @@ impl Editor {
Err(err) => {
let message = format!("Failed to copy permalink: {err}");
- Err::<(), anyhow::Error>(err).log_err();
+ anyhow::Result::<()>::Err(err).log_err();
if let Some(workspace) = workspace {
workspace
@@ -16999,7 +16999,7 @@ impl Editor {
Err(err) => {
let message = format!("Failed to open permalink: {err}");
- Err::<(), anyhow::Error>(err).log_err();
+ anyhow::Result::<()>::Err(err).log_err();
if let Some(workspace) = workspace {
workspace
@@ -80,7 +80,7 @@ async fn run_git_blame(
.stdout(Stdio::piped())
.stderr(Stdio::piped())
.spawn()
- .map_err(|e| anyhow!("Failed to start git blame process: {}", e))?;
+ .context("starting git blame process")?;
let stdin = child
.stdin
@@ -92,10 +92,7 @@ async fn run_git_blame(
}
stdin.flush().await?;
- let output = child
- .output()
- .await
- .map_err(|e| anyhow!("Failed to read git blame output: {}", e))?;
+ let output = child.output().await.context("reading git blame output")?;
handle_command_output(output)
}
@@ -107,7 +104,7 @@ fn handle_command_output(output: std::process::Output) -> Result<String> {
if trimmed == GIT_BLAME_NO_COMMIT_ERROR || trimmed.contains(GIT_BLAME_NO_PATH) {
return Ok(String::new());
}
- return Err(anyhow!("git blame process failed: {}", stderr));
+ anyhow::bail!("git blame process failed: {stderr}");
}
Ok(String::from_utf8(output.stdout)?)
@@ -148,21 +145,21 @@ impl BlameEntry {
let sha = parts
.next()
.and_then(|line| line.parse::<Oid>().ok())
- .ok_or_else(|| anyhow!("failed to parse sha"))?;
+ .with_context(|| format!("parsing sha from {line}"))?;
let original_line_number = parts
.next()
.and_then(|line| line.parse::<u32>().ok())
- .ok_or_else(|| anyhow!("Failed to parse original line number"))?;
+ .with_context(|| format!("parsing original line number from {line}"))?;
let final_line_number = parts
.next()
.and_then(|line| line.parse::<u32>().ok())
- .ok_or_else(|| anyhow!("Failed to parse final line number"))?;
+ .with_context(|| format!("parsing final line number from {line}"))?;
let line_count = parts
.next()
.and_then(|line| line.parse::<u32>().ok())
- .ok_or_else(|| anyhow!("Failed to parse final line number"))?;
+ .with_context(|| format!("parsing line count from {line}"))?;
let start_line = final_line_number.saturating_sub(1);
let end_line = start_line + line_count;
@@ -80,7 +80,7 @@ async fn run_git_blame(
.stdout(Stdio::piped())
.stderr(Stdio::piped())
.spawn()
- .map_err(|e| anyhow!("Failed to start git blame process: {}", e))?;
+ .context("starting git blame process")?;
let stdin = child
.stdin
@@ -92,10 +92,7 @@ async fn run_git_blame(
}
stdin.flush().await?;
- let output = child
- .output()
- .await
- .map_err(|e| anyhow!("Failed to read git blame output: {}", e))?;
+ let output = child.output().await.context("reading git blame output")?;
if !output.status.success() {
let stderr = String::from_utf8_lossy(&output.stderr);
@@ -103,7 +100,7 @@ async fn run_git_blame(
if trimmed == GIT_BLAME_NO_COMMIT_ERROR || trimmed.contains(GIT_BLAME_NO_PATH) {
return Ok(String::new());
}
- return Err(anyhow!("git blame process failed: {}", stderr));
+ anyhow::bail!("git blame process failed: {stderr}");
}
Ok(String::from_utf8(output.stdout)?)
@@ -144,21 +141,21 @@ impl BlameEntry {
let sha = parts
.next()
.and_then(|line| line.parse::<Oid>().ok())
- .ok_or_else(|| anyhow!("failed to parse sha"))?;
+ .with_context(|| format!("parsing sha from {line}"))?;
let original_line_number = parts
.next()
.and_then(|line| line.parse::<u32>().ok())
- .ok_or_else(|| anyhow!("Failed to parse original line number"))?;
+ .with_context(|| format!("parsing original line number from {line}"))?;
let final_line_number = parts
.next()
.and_then(|line| line.parse::<u32>().ok())
- .ok_or_else(|| anyhow!("Failed to parse final line number"))?;
+ .with_context(|| format!("parsing final line number from {line}"))?;
let line_count = parts
.next()
.and_then(|line| line.parse::<u32>().ok())
- .ok_or_else(|| anyhow!("Failed to parse final line number"))?;
+ .with_context(|| format!("parsing line count from {line}"))?;
let start_line = final_line_number.saturating_sub(1);
let end_line = start_line + line_count;
@@ -20,7 +20,7 @@ use std::{
#[cfg(any(feature = "tree-sitter-highlight", feature = "tree-sitter-tags"))]
use anyhow::Error;
-use anyhow::{Context, Result, anyhow};
+use anyhow::{Context as _, Result, anyhow};
use etcetera::BaseStrategy as _;
use fs4::fs_std::FileExt;
use indoc::indoc;
@@ -875,16 +875,13 @@ impl Loader {
FileExt::unlock(lock_file)?;
fs::remove_file(lock_path)?;
-
- if output.status.success() {
- Ok(())
- } else {
- Err(anyhow!(
- "Parser compilation failed.\nStdout: {}\nStderr: {}",
- String::from_utf8_lossy(&output.stdout),
- String::from_utf8_lossy(&output.stderr)
- ))
- }
+ anyhow::ensure!(
+ output.status.success(),
+ "Parser compilation failed.\nStdout: {}\nStderr: {}",
+ String::from_utf8_lossy(&output.stdout),
+ String::from_utf8_lossy(&output.stderr)
+ );
+ Ok(())
}
#[cfg(unix)]
@@ -941,17 +938,13 @@ impl Loader {
.map(|f| format!(" `{f}`"))
.collect::<Vec<_>>()
.join("\n");
+ anyhow::bail!(format!(indoc! {"
+ Missing required functions in the external scanner, parsing won't work without these!
- return Err(anyhow!(format!(
- indoc! {"
- Missing required functions in the external scanner, parsing won't work without these!
-
- {}
+ {missing}
- You can read more about this at https://tree-sitter.github.io/tree-sitter/creating-parsers/4-external-scanners
- "},
- missing,
- )));
+ You can read more about this at https://tree-sitter.github.io/tree-sitter/creating-parsers/4-external-scanners
+ "}));
}
}
}
@@ -1008,9 +1001,9 @@ impl Loader {
{
EmccSource::Podman
} else {
- return Err(anyhow!(
+ anyhow::bail!(
"You must have either emcc, docker, or podman on your PATH to run this command"
- ));
+ );
};
let mut command = match source {
@@ -1103,12 +1096,11 @@ impl Loader {
.spawn()
.with_context(|| "Failed to run emcc command")?
.wait()?;
- if !status.success() {
- return Err(anyhow!("emcc command failed"));
- }
-
- fs::rename(src_path.join(output_name), output_path)
- .context("failed to rename wasm output file")?;
+ anyhow::ensure!(status.success(), "emcc command failed");
+ let source_path = src_path.join(output_name);
+ fs::rename(&source_path, &output_path).with_context(|| {
+ format!("failed to rename wasm output file from {source_path:?} to {output_path:?}")
+ })?;
Ok(())
}
@@ -1185,11 +1177,8 @@ impl Loader {
.map(|path| {
let path = parser_path.join(path);
// prevent p being above/outside of parser_path
- if path.starts_with(parser_path) {
- Ok(path)
- } else {
- Err(anyhow!("External file path {path:?} is outside of parser directory {parser_path:?}"))
- }
+ anyhow::ensure!(path.starts_with(parser_path), "External file path {path:?} is outside of parser directory {parser_path:?}");
+ Ok(path)
})
.collect::<Result<Vec<_>>>()
}).transpose()?,
@@ -1324,11 +1313,8 @@ impl Loader {
let name = GRAMMAR_NAME_REGEX
.captures(&first_three_lines)
.and_then(|c| c.get(1))
- .ok_or_else(|| {
- anyhow!(
- "Failed to parse the language name from grammar.json at {}",
- grammar_path.display()
- )
+ .with_context(|| {
+ format!("Failed to parse the language name from grammar.json at {grammar_path:?}")
})?;
Ok(name.as_str().to_string())
@@ -1347,7 +1333,7 @@ impl Loader {
{
Ok(config.0)
} else {
- Err(anyhow!("Unknown scope '{scope}'"))
+ anyhow::bail!("Unknown scope '{scope}'")
}
} else if let Some((lang, _)) = self
.language_configuration_for_file_name(path)
@@ -1371,7 +1357,7 @@ impl Loader {
} else if let Some(lang) = self.language_configuration_for_first_line_regex(path)? {
Ok(lang.0)
} else {
- Err(anyhow!("No language found"))
+ anyhow::bail!("No language found");
}
}
@@ -3,7 +3,7 @@ use crate::{
edit_agent::{EditAgent, EditAgentOutput, EditAgentOutputEvent},
schema::json_schema_for,
};
-use anyhow::{Result, anyhow};
+use anyhow::{Context as _, Result, anyhow};
use assistant_tool::{
ActionLog, AnyToolCard, Tool, ToolCard, ToolResult, ToolResultContent, ToolResultOutput,
ToolUseStatus,
@@ -279,15 +279,15 @@ impl Tool for EditFileTool {
let input_path = input.path.display();
if diff.is_empty() {
- if hallucinated_old_text {
- Err(anyhow!(formatdoc! {"
- Some edits were produced but none of them could be applied.
- Read the relevant sections of {input_path} again so that
- I can perform the requested edits.
- "}))
- } else {
- Ok("No edits were made.".to_string().into())
- }
+ anyhow::ensure!(
+ !hallucinated_old_text,
+ formatdoc! {"
+ Some edits were produced but none of them could be applied.
+ Read the relevant sections of {input_path} again so that
+ I can perform the requested edits.
+ "}
+ );
+ Ok("No edits were made.".to_string().into())
} else {
Ok(ToolResultOutput {
content: ToolResultContent::Text(format!(
@@ -347,53 +347,52 @@ fn resolve_path(
EditFileMode::Edit | EditFileMode::Overwrite => {
let path = project
.find_project_path(&input.path, cx)
- .ok_or_else(|| anyhow!("Can't edit file: path not found"))?;
+ .context("Can't edit file: path not found")?;
let entry = project
.entry_for_path(&path, cx)
- .ok_or_else(|| anyhow!("Can't edit file: path not found"))?;
-
- if !entry.is_file() {
- return Err(anyhow!("Can't edit file: path is a directory"));
- }
+ .context("Can't edit file: path not found")?;
+ anyhow::ensure!(entry.is_file(), "Can't edit file: path is a directory");
Ok(path)
}
EditFileMode::Create => {
if let Some(path) = project.find_project_path(&input.path, cx) {
- if project.entry_for_path(&path, cx).is_some() {
- return Err(anyhow!("Can't create file: file already exists"));
- }
+ anyhow::ensure!(
+ project.entry_for_path(&path, cx).is_none(),
+ "Can't create file: file already exists"
+ );
}
let parent_path = input
.path
.parent()
- .ok_or_else(|| anyhow!("Can't create file: incorrect path"))?;
+ .context("Can't create file: incorrect path")?;
let parent_project_path = project.find_project_path(&parent_path, cx);
let parent_entry = parent_project_path
.as_ref()
.and_then(|path| project.entry_for_path(&path, cx))
- .ok_or_else(|| anyhow!("Can't create file: parent directory doesn't exist"))?;
+ .context("Can't create file: parent directory doesn't exist")?;
- if !parent_entry.is_dir() {
- return Err(anyhow!("Can't create file: parent is not a directory"));
- }
+ anyhow::ensure!(
+ parent_entry.is_dir(),
+ "Can't create file: parent is not a directory"
+ );
let file_name = input
.path
.file_name()
- .ok_or_else(|| anyhow!("Can't create file: invalid filename"))?;
+ .context("Can't create file: invalid filename")?;
let new_file_path = parent_project_path.map(|parent| ProjectPath {
path: Arc::from(parent.path.join(file_name)),
..parent
});
- new_file_path.ok_or_else(|| anyhow!("Can't create file"))
+ new_file_path.context("Can't create file")
}
}
}
@@ -917,8 +916,6 @@ async fn build_buffer_diff(
#[cfg(test)]
mod tests {
- use std::result::Result;
-
use super::*;
use client::TelemetrySettings;
use fs::FakeFs;
@@ -1019,7 +1016,7 @@ mod tests {
mode: &EditFileMode,
path: &str,
cx: &mut TestAppContext,
- ) -> Result<ProjectPath, anyhow::Error> {
+ ) -> anyhow::Result<ProjectPath> {
init_test(cx);
let fs = FakeFs::new(cx.executor());
@@ -1046,7 +1043,7 @@ mod tests {
result
}
- fn assert_resolved_path_eq(path: Result<ProjectPath, anyhow::Error>, expected: &str) {
+ fn assert_resolved_path_eq(path: anyhow::Result<ProjectPath>, expected: &str) {
let actual = path
.expect("Should return valid path")
.path
@@ -109,7 +109,7 @@ impl Tool for GrepTool {
let input = match serde_json::from_value::<GrepToolInput>(input) {
Ok(input) => input,
Err(error) => {
- return Task::ready(Err(anyhow!("Failed to parse input: {}", error))).into();
+ return Task::ready(Err(anyhow!("Failed to parse input: {error}"))).into();
}
};
@@ -122,7 +122,7 @@ impl Tool for GrepTool {
) {
Ok(matcher) => matcher,
Err(error) => {
- return Task::ready(Err(anyhow!("invalid include glob pattern: {}", error))).into();
+ return Task::ready(Err(anyhow!("invalid include glob pattern: {error}"))).into();
}
};
@@ -1,5 +1,5 @@
use crate::schema::json_schema_for;
-use anyhow::{Result, anyhow};
+use anyhow::{Context as _, Result, anyhow};
use assistant_tool::{ActionLog, Tool, ToolResult};
use gpui::{AnyWindowHandle, App, AppContext, Entity, Task};
use language_model::{LanguageModel, LanguageModelRequest, LanguageModelToolSchemaFormat};
@@ -117,17 +117,10 @@ impl Tool for MovePathTool {
});
cx.background_spawn(async move {
- match rename_task.await {
- Ok(_) => {
- Ok(format!("Moved {} to {}", input.source_path, input.destination_path).into())
- }
- Err(err) => Err(anyhow!(
- "Failed to move {} to {}: {}",
- input.source_path,
- input.destination_path,
- err
- )),
- }
+ let _ = rename_task.await.with_context(|| {
+ format!("Moving {} to {}", input.source_path, input.destination_path)
+ })?;
+ Ok(format!("Moved {} to {}", input.source_path, input.destination_path).into())
})
.into()
}
@@ -1,5 +1,5 @@
use crate::schema::json_schema_for;
-use anyhow::{Result, anyhow};
+use anyhow::{Context as _, Result, anyhow};
use assistant_tool::{ActionLog, Tool, ToolResult};
use assistant_tool::{ToolResultContent, outline};
use gpui::{AnyWindowHandle, App, Entity, Task};
@@ -129,7 +129,7 @@ impl Tool for ReadFileTool {
let language_model_image = cx
.update(|cx| LanguageModelImage::from_image(image, cx))?
.await
- .ok_or_else(|| anyhow!("Failed to process image"))?;
+ .context("processing image")?;
Ok(ToolResultOutput {
content: ToolResultContent::Image(language_model_image),
@@ -152,7 +152,7 @@ impl Tool for ReadFileTool {
.as_ref()
.map_or(true, |file| !file.disk_state().exists())
})? {
- return Err(anyhow!("{} not found", file_path));
+ anyhow::bail!("{file_path} not found");
}
project.update(cx, |project, cx| {
@@ -382,13 +382,11 @@ fn working_dir(
match worktrees.next() {
Some(worktree) => {
- if worktrees.next().is_none() {
- Ok(Some(worktree.read(cx).abs_path().to_path_buf()))
- } else {
- Err(anyhow!(
- "'.' is ambiguous in multi-root workspaces. Please specify a root directory explicitly.",
- ))
- }
+ anyhow::ensure!(
+ worktrees.next().is_none(),
+ "'.' is ambiguous in multi-root workspaces. Please specify a root directory explicitly.",
+ );
+ Ok(Some(worktree.read(cx).abs_path().to_path_buf()))
}
None => Ok(None),
}
@@ -409,9 +407,7 @@ fn working_dir(
}
}
- Err(anyhow!(
- "`cd` directory {cd:?} was not in any of the project's worktrees."
- ))
+ anyhow::bail!("`cd` directory {cd:?} was not in any of the project's worktrees.");
}
}
@@ -1,6 +1,6 @@
use std::{io::Cursor, sync::Arc};
-use anyhow::Result;
+use anyhow::{Context as _, Result};
use collections::HashMap;
use gpui::{App, AssetSource, Global};
use rodio::{
@@ -44,8 +44,8 @@ impl SoundRegistry {
let bytes = self
.assets
.load(&path)?
- .map(Ok)
- .unwrap_or_else(|| Err(anyhow::anyhow!("No such asset available")))?
+ .map(anyhow::Ok)
+ .with_context(|| format!("No asset available for path {path}"))??
.into_owned();
let cursor = Cursor::new(bytes);
let source = Decoder::new(cursor)?.convert_samples::<f32>().buffered();
@@ -1,4 +1,4 @@
-use anyhow::{Context as _, Result, anyhow};
+use anyhow::{Context as _, Result};
use client::{Client, TelemetrySettings};
use db::RELEASE_CHANNEL;
use db::kvp::KEY_VALUE_STORE;
@@ -367,7 +367,7 @@ impl AutoUpdater {
cx.default_global::<GlobalAutoUpdate>()
.0
.clone()
- .ok_or_else(|| anyhow!("auto-update not initialized"))
+ .context("auto-update not initialized")
})??;
let release = Self::get_release(
@@ -411,7 +411,7 @@ impl AutoUpdater {
cx.default_global::<GlobalAutoUpdate>()
.0
.clone()
- .ok_or_else(|| anyhow!("auto-update not initialized"))
+ .context("auto-update not initialized")
})??;
let release = Self::get_release(
@@ -465,12 +465,11 @@ impl AutoUpdater {
let mut body = Vec::new();
response.body_mut().read_to_end(&mut body).await?;
- if !response.status().is_success() {
- return Err(anyhow!(
- "failed to fetch release: {:?}",
- String::from_utf8_lossy(&body),
- ));
- }
+ anyhow::ensure!(
+ response.status().is_success(),
+ "failed to fetch release: {:?}",
+ String::from_utf8_lossy(&body),
+ );
serde_json::from_slice(body.as_slice()).with_context(|| {
format!(
@@ -557,10 +556,10 @@ impl AutoUpdater {
let installer_dir = InstallerDir::new().await?;
let filename = match OS {
- "macos" => Ok("Zed.dmg"),
+ "macos" => anyhow::Ok("Zed.dmg"),
"linux" => Ok("zed.tar.gz"),
"windows" => Ok("ZedUpdateInstaller.exe"),
- _ => Err(anyhow!("not supported: {:?}", OS)),
+ unsupported_os => anyhow::bail!("not supported: {unsupported_os}"),
}?;
#[cfg(not(target_os = "windows"))]
@@ -581,7 +580,7 @@ impl AutoUpdater {
"macos" => install_release_macos(&installer_dir, downloaded_asset, &cx).await,
"linux" => install_release_linux(&installer_dir, downloaded_asset, &cx).await,
"windows" => install_release_windows(downloaded_asset).await,
- _ => Err(anyhow!("not supported: {:?}", OS)),
+ unsupported_os => anyhow::bail!("not supported: {unsupported_os}"),
}?;
this.update(&mut cx, |this, cx| {
@@ -640,12 +639,11 @@ async fn download_remote_server_binary(
let request_body = AsyncBody::from(serde_json::to_string(&update_request_body)?);
let mut response = client.get(&release.url, request_body, true).await?;
- if !response.status().is_success() {
- return Err(anyhow!(
- "failed to download remote server release: {:?}",
- response.status()
- ));
- }
+ anyhow::ensure!(
+ response.status().is_success(),
+ "failed to download remote server release: {:?}",
+ response.status()
+ );
smol::io::copy(response.body_mut(), &mut temp_file).await?;
smol::fs::rename(&temp, &target_path).await?;
@@ -792,7 +790,7 @@ async fn install_release_macos(
let running_app_path = cx.update(|cx| cx.app_path())??;
let running_app_filename = running_app_path
.file_name()
- .ok_or_else(|| anyhow!("invalid running app path"))?;
+ .with_context(|| format!("invalid running app path {running_app_path:?}"))?;
let mount_path = temp_dir.path().join("Zed");
let mut mounted_app_path: OsString = mount_path.join(running_app_filename).into();
@@ -22,7 +22,7 @@ mod windows_impl {
use super::dialog::create_dialog_window;
use super::updater::perform_update;
- use anyhow::{Context, Result};
+ use anyhow::{Context as _, Result};
use windows::{
Win32::{
Foundation::{HWND, LPARAM, WPARAM},
@@ -4,7 +4,7 @@ use std::{
time::{Duration, Instant},
};
-use anyhow::{Context, Result};
+use anyhow::{Context as _, Result};
use windows::Win32::{
Foundation::{HWND, LPARAM, WPARAM},
System::Threading::CREATE_NEW_PROCESS_GROUP,
@@ -124,9 +124,7 @@ pub(crate) fn perform_update(app_dir: &Path, hwnd: Option<isize>) -> Result<()>
for job in JOBS.iter() {
let start = Instant::now();
loop {
- if start.elapsed().as_secs() > 2 {
- return Err(anyhow::anyhow!("Timed out"));
- }
+ anyhow::ensure!(start.elapsed().as_secs() <= 2, "Timed out");
match (*job)(app_dir) {
Ok(_) => {
unsafe { PostMessageW(hwnd, WM_JOB_UPDATED, WPARAM(0), LPARAM(0))? };
@@ -3,7 +3,7 @@ mod models;
use std::collections::HashMap;
use std::pin::Pin;
-use anyhow::{Error, Result, anyhow};
+use anyhow::{Context as _, Error, Result, anyhow};
use aws_sdk_bedrockruntime as bedrock;
pub use aws_sdk_bedrockruntime as bedrock_client;
pub use aws_sdk_bedrockruntime::types::{
@@ -97,7 +97,7 @@ pub async fn stream_completion(
}
})
.await
- .map_err(|err| anyhow!("failed to spawn task: {err:?}"))?
+ .context("spawning a task")?
}
pub fn aws_document_to_value(document: &Document) -> Value {
@@ -1,4 +1,3 @@
-use anyhow::anyhow;
use serde::{Deserialize, Serialize};
use strum::EnumIter;
@@ -107,7 +106,7 @@ impl Model {
} else if id.starts_with("claude-3-7-sonnet-thinking") {
Ok(Self::Claude3_7SonnetThinking)
} else {
- Err(anyhow!("invalid model id"))
+ anyhow::bail!("invalid model id {id}");
}
}
@@ -294,7 +293,7 @@ impl Model {
}
}
- pub fn cross_region_inference_id(&self, region: &str) -> Result<String, anyhow::Error> {
+ pub fn cross_region_inference_id(&self, region: &str) -> anyhow::Result<String> {
let region_group = if region.starts_with("us-gov-") {
"us-gov"
} else if region.starts_with("us-") {
@@ -307,8 +306,7 @@ impl Model {
// Canada and South America regions - default to US profiles
"us"
} else {
- // Unknown region
- return Err(anyhow!("Unsupported Region"));
+ anyhow::bail!("Unsupported Region {region}");
};
let model_id = self.id();
@@ -2,7 +2,7 @@ pub mod participant;
pub mod room;
use crate::call_settings::CallSettings;
-use anyhow::{Result, anyhow};
+use anyhow::{Context as _, Result, anyhow};
use audio::Audio;
use client::{ChannelId, Client, TypedEnvelope, User, UserStore, ZED_ALWAYS_ACTIVE, proto};
use collections::HashSet;
@@ -187,7 +187,7 @@ impl ActiveCall {
let invite = if let Some(room) = room {
cx.spawn(async move |_, cx| {
- let room = room.await.map_err(|err| anyhow!("{:?}", err))?;
+ let room = room.await.map_err(|err| anyhow!("{err:?}"))?;
let initial_project_id = if let Some(initial_project) = initial_project {
Some(
@@ -236,7 +236,7 @@ impl ActiveCall {
.shared();
self.pending_room_creation = Some(room.clone());
cx.background_spawn(async move {
- room.await.map_err(|err| anyhow!("{:?}", err))?;
+ room.await.map_err(|err| anyhow!("{err:?}"))?;
anyhow::Ok(())
})
};
@@ -326,7 +326,7 @@ impl ActiveCall {
.0
.borrow_mut()
.take()
- .ok_or_else(|| anyhow!("no incoming call"))?;
+ .context("no incoming call")?;
telemetry::event!("Incoming Call Declined", room_id = call.room_id);
self.client.send(proto::DeclineCall {
room_id: call.room_id,
@@ -399,12 +399,9 @@ impl ActiveCall {
project: Entity<Project>,
cx: &mut Context<Self>,
) -> Result<()> {
- if let Some((room, _)) = self.room.as_ref() {
- self.report_call_event("Project Unshared", cx);
- room.update(cx, |room, cx| room.unshare_project(project, cx))
- } else {
- Err(anyhow!("no active call"))
- }
+ let (room, _) = self.room.as_ref().context("no active call")?;
+ self.report_call_event("Project Unshared", cx);
+ room.update(cx, |room, cx| room.unshare_project(project, cx))
}
pub fn location(&self) -> Option<&WeakEntity<Project>> {
@@ -1,4 +1,4 @@
-use anyhow::{Result, anyhow};
+use anyhow::{Context as _, Result};
use client::{ParticipantIndex, User, proto};
use collections::HashMap;
use gpui::WeakEntity;
@@ -18,17 +18,17 @@ pub enum ParticipantLocation {
impl ParticipantLocation {
pub fn from_proto(location: Option<proto::ParticipantLocation>) -> Result<Self> {
- match location.and_then(|l| l.variant) {
- Some(proto::participant_location::Variant::SharedProject(project)) => {
+ match location
+ .and_then(|l| l.variant)
+ .context("participant location was not provided")?
+ {
+ proto::participant_location::Variant::SharedProject(project) => {
Ok(Self::SharedProject {
project_id: project.id,
})
}
- Some(proto::participant_location::Variant::UnsharedProject(_)) => {
- Ok(Self::UnsharedProject)
- }
- Some(proto::participant_location::Variant::External(_)) => Ok(Self::External),
- None => Err(anyhow!("participant location was not provided")),
+ proto::participant_location::Variant::UnsharedProject(_) => Ok(Self::UnsharedProject),
+ proto::participant_location::Variant::External(_) => Ok(Self::External),
}
}
}
@@ -2,7 +2,7 @@ use crate::{
call_settings::CallSettings,
participant::{LocalParticipant, ParticipantLocation, RemoteParticipant},
};
-use anyhow::{Result, anyhow};
+use anyhow::{Context as _, Result, anyhow};
use audio::{Audio, Sound};
use client::{
ChannelId, Client, ParticipantIndex, TypedEnvelope, User, UserStore,
@@ -165,7 +165,7 @@ impl Room {
) -> Task<Result<Entity<Self>>> {
cx.spawn(async move |cx| {
let response = client.request(proto::CreateRoom {}).await?;
- let room_proto = response.room.ok_or_else(|| anyhow!("invalid room"))?;
+ let room_proto = response.room.context("invalid room")?;
let room = cx.new(|cx| {
let mut room = Self::new(
room_proto.id,
@@ -270,7 +270,7 @@ impl Room {
user_store: Entity<UserStore>,
mut cx: AsyncApp,
) -> Result<Entity<Self>> {
- let room_proto = response.room.ok_or_else(|| anyhow!("invalid room"))?;
+ let room_proto = response.room.context("invalid room")?;
let room = cx.new(|cx| {
Self::new(
room_proto.id,
@@ -360,7 +360,7 @@ impl Room {
log::info!("detected client disconnection");
this.upgrade()
- .ok_or_else(|| anyhow!("room was dropped"))?
+ .context("room was dropped")?
.update(cx, |this, cx| {
this.status = RoomStatus::Rejoining;
cx.notify();
@@ -428,9 +428,7 @@ impl Room {
log::info!("reconnection failed, leaving room");
this.update(cx, |this, cx| this.leave(cx))?.await?;
}
- Err(anyhow!(
- "can't reconnect to room: client failed to re-establish connection"
- ))
+ anyhow::bail!("can't reconnect to room: client failed to re-establish connection");
}
fn rejoin(&mut self, cx: &mut Context<Self>) -> Task<Result<()>> {
@@ -494,7 +492,7 @@ impl Room {
let response = response.await?;
let message_id = response.message_id;
let response = response.payload;
- let room_proto = response.room.ok_or_else(|| anyhow!("invalid room"))?;
+ let room_proto = response.room.context("invalid room")?;
this.update(cx, |this, cx| {
this.status = RoomStatus::Online;
this.apply_room_update(room_proto, cx)?;
@@ -645,10 +643,7 @@ impl Room {
envelope: TypedEnvelope<proto::RoomUpdated>,
mut cx: AsyncApp,
) -> Result<()> {
- let room = envelope
- .payload
- .room
- .ok_or_else(|| anyhow!("invalid room"))?;
+ let room = envelope.payload.room.context("invalid room")?;
this.update(&mut cx, |this, cx| this.apply_room_update(room, cx))?
}
@@ -937,12 +932,15 @@ impl Room {
} => {
let user_id = participant.identity().0.parse()?;
let track_id = track.sid();
- let participant = self.remote_participants.get_mut(&user_id).ok_or_else(|| {
- anyhow!(
- "{:?} subscribed to track by unknown participant {user_id}",
- self.client.user_id()
- )
- })?;
+ let participant =
+ self.remote_participants
+ .get_mut(&user_id)
+ .with_context(|| {
+ format!(
+ "{:?} subscribed to track by unknown participant {user_id}",
+ self.client.user_id()
+ )
+ })?;
if self.live_kit.as_ref().map_or(true, |kit| kit.deafened) {
if publication.is_audio() {
publication.set_enabled(false, cx);
@@ -972,12 +970,15 @@ impl Room {
track, participant, ..
} => {
let user_id = participant.identity().0.parse()?;
- let participant = self.remote_participants.get_mut(&user_id).ok_or_else(|| {
- anyhow!(
- "{:?}, unsubscribed from track by unknown participant {user_id}",
- self.client.user_id()
- )
- })?;
+ let participant =
+ self.remote_participants
+ .get_mut(&user_id)
+ .with_context(|| {
+ format!(
+ "{:?}, unsubscribed from track by unknown participant {user_id}",
+ self.client.user_id()
+ )
+ })?;
match track {
livekit_client::RemoteTrack::Audio(track) => {
participant.audio_tracks.remove(&track.sid());
@@ -1324,7 +1325,7 @@ impl Room {
let live_kit = this
.live_kit
.as_mut()
- .ok_or_else(|| anyhow!("live-kit was not initialized"))?;
+ .context("live-kit was not initialized")?;
let canceled = if let LocalTrack::Pending {
publish_id: cur_publish_id,
@@ -1389,7 +1390,7 @@ impl Room {
cx.spawn(async move |this, cx| {
let sources = sources.await??;
- let source = sources.first().ok_or_else(|| anyhow!("no display found"))?;
+ let source = sources.first().context("no display found")?;
let publication = participant.publish_screenshare_track(&**source, cx).await;
@@ -1397,7 +1398,7 @@ impl Room {
let live_kit = this
.live_kit
.as_mut()
- .ok_or_else(|| anyhow!("live-kit was not initialized"))?;
+ .context("live-kit was not initialized")?;
let canceled = if let LocalTrack::Pending {
publish_id: cur_publish_id,
@@ -1485,16 +1486,14 @@ impl Room {
}
pub fn unshare_screen(&mut self, cx: &mut Context<Self>) -> Result<()> {
- if self.status.is_offline() {
- return Err(anyhow!("room is offline"));
- }
+ anyhow::ensure!(!self.status.is_offline(), "room is offline");
let live_kit = self
.live_kit
.as_mut()
- .ok_or_else(|| anyhow!("live-kit was not initialized"))?;
+ .context("live-kit was not initialized")?;
match mem::take(&mut live_kit.screen_track) {
- LocalTrack::None => Err(anyhow!("screen was not shared")),
+ LocalTrack::None => anyhow::bail!("screen was not shared"),
LocalTrack::Pending { .. } => {
cx.notify();
Ok(())
@@ -1,5 +1,5 @@
use crate::{Channel, ChannelStore};
-use anyhow::{Result, anyhow};
+use anyhow::{Context as _, Result};
use client::{
ChannelId, Client, Subscription, TypedEnvelope, UserId, proto,
user::{User, UserStore},
@@ -170,15 +170,16 @@ impl ChannelChat {
message: MessageParams,
cx: &mut Context<Self>,
) -> Result<Task<Result<u64>>> {
- if message.text.trim().is_empty() {
- Err(anyhow!("message body can't be empty"))?;
- }
+ anyhow::ensure!(
+ !message.text.trim().is_empty(),
+ "message body can't be empty"
+ );
let current_user = self
.user_store
.read(cx)
.current_user()
- .ok_or_else(|| anyhow!("current_user is not present"))?;
+ .context("current_user is not present")?;
let channel_id = self.channel_id;
let pending_id = ChannelMessageId::Pending(post_inc(&mut self.next_pending_message_id));
@@ -215,7 +216,7 @@ impl ChannelChat {
});
let response = request.await?;
drop(outgoing_message_guard);
- let response = response.message.ok_or_else(|| anyhow!("invalid message"))?;
+ let response = response.message.context("invalid message")?;
let id = response.id;
let message = ChannelMessage::from_proto(response, &user_store, cx).await?;
this.update(cx, |this, cx| {
@@ -470,7 +471,7 @@ impl ChannelChat {
});
let response = request.await?;
let message = ChannelMessage::from_proto(
- response.message.ok_or_else(|| anyhow!("invalid message"))?,
+ response.message.context("invalid message")?,
&user_store,
cx,
)
@@ -531,10 +532,7 @@ impl ChannelChat {
mut cx: AsyncApp,
) -> Result<()> {
let user_store = this.update(&mut cx, |this, _| this.user_store.clone())?;
- let message = message
- .payload
- .message
- .ok_or_else(|| anyhow!("empty message"))?;
+ let message = message.payload.message.context("empty message")?;
let message_id = message.id;
let message = ChannelMessage::from_proto(message, &user_store, &mut cx).await?;
@@ -566,10 +564,7 @@ impl ChannelChat {
mut cx: AsyncApp,
) -> Result<()> {
let user_store = this.update(&mut cx, |this, _| this.user_store.clone())?;
- let message = message
- .payload
- .message
- .ok_or_else(|| anyhow!("empty message"))?;
+ let message = message.payload.message.context("empty message")?;
let message = ChannelMessage::from_proto(message, &user_store, &mut cx).await?;
@@ -753,10 +748,7 @@ impl ChannelMessage {
.collect(),
timestamp: OffsetDateTime::from_unix_timestamp(message.timestamp as i64)?,
sender,
- nonce: message
- .nonce
- .ok_or_else(|| anyhow!("nonce is required"))?
- .into(),
+ nonce: message.nonce.context("nonce is required")?.into(),
reply_to_message_id: message.reply_to_message_id,
edited_at,
})
@@ -1,7 +1,7 @@
mod channel_index;
use crate::{ChannelMessage, channel_buffer::ChannelBuffer, channel_chat::ChannelChat};
-use anyhow::{Result, anyhow};
+use anyhow::{Context as _, Result, anyhow};
use channel_index::ChannelIndex;
use client::{ChannelId, Client, ClientSettings, Subscription, User, UserId, UserStore};
use collections::{HashMap, HashSet, hash_map};
@@ -332,9 +332,7 @@ impl ChannelStore {
cx.spawn(async move |this, cx| {
if let Some(request) = request {
let response = request.await?;
- let this = this
- .upgrade()
- .ok_or_else(|| anyhow!("channel store dropped"))?;
+ let this = this.upgrade().context("channel store dropped")?;
let user_store = this.update(cx, |this, _| this.user_store.clone())?;
ChannelMessage::from_proto_vec(response.messages, &user_store, cx).await
} else {
@@ -482,7 +480,7 @@ impl ChannelStore {
.spawn(async move |this, cx| {
let channel = this.update(cx, |this, _| {
this.channel_for_id(channel_id).cloned().ok_or_else(|| {
- Arc::new(anyhow!("no channel for id: {}", channel_id))
+ Arc::new(anyhow!("no channel for id: {channel_id}"))
})
})??;
@@ -514,7 +512,7 @@ impl ChannelStore {
}
}
};
- cx.background_spawn(async move { task.await.map_err(|error| anyhow!("{}", error)) })
+ cx.background_spawn(async move { task.await.map_err(|error| anyhow!("{error}")) })
}
pub fn is_channel_admin(&self, channel_id: ChannelId) -> bool {
@@ -578,9 +576,7 @@ impl ChannelStore {
})
.await?;
- let channel = response
- .channel
- .ok_or_else(|| anyhow!("missing channel in response"))?;
+ let channel = response.channel.context("missing channel in response")?;
let channel_id = ChannelId(channel.id);
this.update(cx, |this, cx| {
@@ -752,7 +748,7 @@ impl ChannelStore {
})
.await?
.channel
- .ok_or_else(|| anyhow!("missing channel in response"))?;
+ .context("missing channel in response")?;
this.update(cx, |this, cx| {
let task = this.update_channels(
proto::UpdateChannels {
@@ -169,7 +169,7 @@ fn main() -> Result<()> {
"To retrieve the system specs on the command line, run the following command:",
&format!("{} --system-specs", path.display()),
];
- return Err(anyhow::anyhow!(msg.join("\n")));
+ anyhow::bail!(msg.join("\n"));
}
#[cfg(all(
@@ -255,11 +255,10 @@ fn main() -> Result<()> {
}
}
- if let Some(_) = args.dev_server_token {
- return Err(anyhow::anyhow!(
- "Dev servers were removed in v0.157.x please upgrade to SSH remoting: https://zed.dev/docs/remote-development"
- ))?;
- }
+ anyhow::ensure!(
+ args.dev_server_token.is_none(),
+ "Dev servers were removed in v0.157.x please upgrade to SSH remoting: https://zed.dev/docs/remote-development"
+ );
let sender: JoinHandle<anyhow::Result<()>> = thread::spawn({
let exit_status = exit_status.clone();
@@ -400,7 +399,7 @@ mod linux {
time::Duration,
};
- use anyhow::anyhow;
+ use anyhow::{Context as _, anyhow};
use cli::FORCE_CLI_MODE_ENV_VAR_NAME;
use fork::Fork;
@@ -417,9 +416,7 @@ mod linux {
path.to_path_buf().canonicalize()?
} else {
let cli = env::current_exe()?;
- let dir = cli
- .parent()
- .ok_or_else(|| anyhow!("no parent path for cli"))?;
+ let dir = cli.parent().context("no parent path for cli")?;
// libexec is the standard, lib/zed is for Arch (and other non-libexec distros),
// ./zed is for the target directory in development builds.
@@ -428,8 +425,8 @@ mod linux {
possible_locations
.iter()
.find_map(|p| dir.join(p).canonicalize().ok().filter(|path| path != &cli))
- .ok_or_else(|| {
- anyhow!("could not find any of: {}", possible_locations.join(", "))
+ .with_context(|| {
+ format!("could not find any of: {}", possible_locations.join(", "))
})?
};
@@ -759,7 +756,7 @@ mod windows {
#[cfg(target_os = "macos")]
mod mac_os {
- use anyhow::{Context as _, Result, anyhow};
+ use anyhow::{Context as _, Result};
use core_foundation::{
array::{CFArray, CFIndex},
base::TCFType as _,
@@ -800,9 +797,10 @@ mod mac_os {
let cli_path = std::env::current_exe()?.canonicalize()?;
let mut app_path = cli_path.clone();
while app_path.extension() != Some(OsStr::new("app")) {
- if !app_path.pop() {
- return Err(anyhow!("cannot find app bundle containing {:?}", cli_path));
- }
+ anyhow::ensure!(
+ app_path.pop(),
+ "cannot find app bundle containing {cli_path:?}"
+ );
}
Ok(app_path)
}
@@ -711,9 +711,10 @@ impl Client {
let id = (TypeId::of::<T>(), remote_id);
let mut state = self.handler_set.lock();
- if state.entities_by_type_and_remote_id.contains_key(&id) {
- return Err(anyhow!("already subscribed to entity"));
- }
+ anyhow::ensure!(
+ !state.entities_by_type_and_remote_id.contains_key(&id),
+ "already subscribed to entity"
+ );
state
.entities_by_type_and_remote_id
@@ -962,10 +963,7 @@ impl Client {
hello_message_type_name
)
})?;
- let peer_id = hello
- .payload
- .peer_id
- .ok_or_else(|| anyhow!("invalid peer id"))?;
+ let peer_id = hello.payload.peer_id.context("invalid peer id")?;
Ok(peer_id)
};
@@ -1075,22 +1073,19 @@ impl Client {
}
let response = http.get(&url, Default::default(), false).await?;
- let collab_url = if response.status().is_redirection() {
- response
- .headers()
- .get("Location")
- .ok_or_else(|| anyhow!("missing location header in /rpc response"))?
- .to_str()
- .map_err(EstablishConnectionError::other)?
- .to_string()
- } else {
- Err(anyhow!(
- "unexpected /rpc response status {}",
- response.status()
- ))?
- };
-
- Url::parse(&collab_url).context("invalid rpc url")
+ anyhow::ensure!(
+ response.status().is_redirection(),
+ "unexpected /rpc response status {}",
+ response.status()
+ );
+ let collab_url = response
+ .headers()
+ .get("Location")
+ .context("missing location header in /rpc response")?
+ .to_str()
+ .map_err(EstablishConnectionError::other)?
+ .to_string();
+ Url::parse(&collab_url).with_context(|| format!("parsing colab rpc url {collab_url}"))
}
}
@@ -1132,7 +1127,7 @@ impl Client {
let rpc_host = rpc_url
.host_str()
.zip(rpc_url.port_or_known_default())
- .ok_or_else(|| anyhow!("missing host in rpc url"))?;
+ .context("missing host in rpc url")?;
let stream = {
let handle = cx.update(|cx| gpui_tokio::Tokio::handle(cx)).ok().unwrap();
@@ -1287,16 +1282,13 @@ impl Client {
)
.context("failed to respond to login http request")?;
return Ok((
- user_id
- .ok_or_else(|| anyhow!("missing user_id parameter"))?,
- access_token.ok_or_else(|| {
- anyhow!("missing access_token parameter")
- })?,
+ user_id.context("missing user_id parameter")?,
+ access_token.context("missing access_token parameter")?,
));
}
}
- Err(anyhow!("didn't receive login redirect"))
+ anyhow::bail!("didn't receive login redirect");
})
.await?;
@@ -1414,13 +1406,12 @@ impl Client {
let mut response = http.send(request).await?;
let mut body = String::new();
response.body_mut().read_to_string(&mut body).await?;
- if !response.status().is_success() {
- Err(anyhow!(
- "admin user request failed {} - {}",
- response.status().as_u16(),
- body,
- ))?;
- }
+ anyhow::ensure!(
+ response.status().is_success(),
+ "admin user request failed {} - {}",
+ response.status().as_u16(),
+ body,
+ );
let response: AuthenticatedUserResponse = serde_json::from_str(&body)?;
// Use the admin API token to authenticate as the impersonated user.
@@ -1457,7 +1448,7 @@ impl Client {
if let Status::Connected { connection_id, .. } = *self.status().borrow() {
Ok(connection_id)
} else {
- Err(anyhow!("not connected"))
+ anyhow::bail!("not connected");
}
}
@@ -1,5 +1,5 @@
//! socks proxy
-use anyhow::{Context, Result, anyhow};
+use anyhow::{Context as _, Result};
use http_client::Url;
use tokio_socks::tcp::{Socks4Stream, Socks5Stream};
@@ -31,7 +31,7 @@ pub(crate) async fn connect_socks_proxy_stream(
// If parsing the proxy URL fails, we must avoid falling back to an insecure connection.
// SOCKS proxies are often used in contexts where security and privacy are critical,
// so any fallback could expose users to significant risks.
- return Err(anyhow!("Parsing proxy url failed"));
+ anyhow::bail!("Parsing proxy url failed");
};
// Connect to proxy and wrap protocol later
@@ -1,5 +1,5 @@
use crate::{Client, Connection, Credentials, EstablishConnectionError, UserStore};
-use anyhow::{Result, anyhow};
+use anyhow::{Context as _, Result, anyhow};
use chrono::Duration;
use futures::{StreamExt, stream::BoxStream};
use gpui::{AppContext as _, BackgroundExecutor, Entity, TestAppContext};
@@ -45,7 +45,7 @@ impl FakeServer {
move |cx| {
let state = state.clone();
cx.spawn(async move |_| {
- let state = state.upgrade().ok_or_else(|| anyhow!("server dropped"))?;
+ let state = state.upgrade().context("server dropped")?;
let mut state = state.lock();
state.auth_count += 1;
let access_token = state.access_token.to_string();
@@ -64,8 +64,8 @@ impl FakeServer {
let state = state.clone();
let credentials = credentials.clone();
cx.spawn(async move |cx| {
- let state = state.upgrade().ok_or_else(|| anyhow!("server dropped"))?;
- let peer = peer.upgrade().ok_or_else(|| anyhow!("server dropped"))?;
+ let state = state.upgrade().context("server dropped")?;
+ let peer = peer.upgrade().context("server dropped")?;
if state.lock().forbid_connections {
Err(EstablishConnectionError::Other(anyhow!(
"server is forbidding connections"
@@ -155,7 +155,7 @@ impl FakeServer {
.expect("not connected")
.next()
.await
- .ok_or_else(|| anyhow!("other half hung up"))?;
+ .context("other half hung up")?;
self.executor.finish_waiting();
let type_name = message.payload_type_name();
let message = message.into_any();
@@ -388,9 +388,7 @@ impl UserStore {
// Users are fetched in parallel above and cached in call to get_users
// No need to parallelize here
let mut updated_contacts = Vec::new();
- let this = this
- .upgrade()
- .ok_or_else(|| anyhow!("can't upgrade user store handle"))?;
+ let this = this.upgrade().context("can't upgrade user store handle")?;
for contact in message.contacts {
updated_contacts
.push(Arc::new(Contact::from_proto(contact, &this, cx).await?));
@@ -574,7 +572,7 @@ impl UserStore {
let client = self.client.upgrade();
cx.spawn(async move |_, _| {
client
- .ok_or_else(|| anyhow!("can't upgrade client reference"))?
+ .context("can't upgrade client reference")?
.request(proto::RespondToContactRequest {
requester_id,
response: proto::ContactRequestResponse::Dismiss as i32,
@@ -596,7 +594,7 @@ impl UserStore {
cx.spawn(async move |this, cx| {
let response = client
- .ok_or_else(|| anyhow!("can't upgrade client reference"))?
+ .context("can't upgrade client reference")?
.request(request)
.await;
this.update(cx, |this, cx| {
@@ -663,7 +661,7 @@ impl UserStore {
this.users
.get(user_id)
.cloned()
- .ok_or_else(|| anyhow!("user {} not found", user_id))
+ .with_context(|| format!("user {user_id} not found"))
})
.collect()
})?
@@ -703,7 +701,7 @@ impl UserStore {
this.users
.get(&user_id)
.cloned()
- .ok_or_else(|| anyhow!("server responded with no users"))
+ .context("server responded with no users")
})?
})
}
@@ -765,20 +763,17 @@ impl UserStore {
};
let client = self.client.clone();
- cx.spawn(async move |this, cx| {
- if let Some(client) = client.upgrade() {
- let response = client
- .request(proto::AcceptTermsOfService {})
- .await
- .context("error accepting tos")?;
-
- this.update(cx, |this, cx| {
- this.set_current_user_accepted_tos_at(Some(response.accepted_tos_at));
- cx.emit(Event::PrivateUserInfoUpdated);
- })
- } else {
- Err(anyhow!("client not found"))
- }
+ cx.spawn(async move |this, cx| -> anyhow::Result<()> {
+ let client = client.upgrade().context("client not found")?;
+ let response = client
+ .request(proto::AcceptTermsOfService {})
+ .await
+ .context("error accepting tos")?;
+ this.update(cx, |this, cx| {
+ this.set_current_user_accepted_tos_at(Some(response.accepted_tos_at));
+ cx.emit(Event::PrivateUserInfoUpdated);
+ })?;
+ Ok(())
})
}
@@ -897,7 +892,7 @@ impl Contact {
impl Collaborator {
pub fn from_proto(message: proto::Collaborator) -> Result<Self> {
Ok(Self {
- peer_id: message.peer_id.ok_or_else(|| anyhow!("invalid peer id"))?,
+ peer_id: message.peer_id.context("invalid peer id")?,
replica_id: message.replica_id as ReplicaId,
user_id: message.user_id as UserId,
is_host: message.is_host,
@@ -10,7 +10,7 @@ use crate::{
db::{User, UserId},
rpc,
};
-use anyhow::anyhow;
+use anyhow::Context as _;
use axum::{
Extension, Json, Router,
body::Body,
@@ -220,7 +220,7 @@ async fn create_access_token(
.db
.get_user_by_id(user_id)
.await?
- .ok_or_else(|| anyhow!("user not found"))?;
+ .context("user not found")?;
let mut impersonated_user_id = None;
if let Some(impersonate) = params.impersonate {
@@ -1,4 +1,4 @@
-use anyhow::{Context, anyhow, bail};
+use anyhow::{Context as _, bail};
use axum::{
Extension, Json, Router,
extract::{self, Query},
@@ -89,7 +89,7 @@ async fn get_billing_preferences(
.db
.get_user_by_github_user_id(params.github_user_id)
.await?
- .ok_or_else(|| anyhow!("user not found"))?;
+ .context("user not found")?;
let billing_customer = app.db.get_billing_customer_by_user_id(user.id).await?;
let preferences = app.db.get_billing_preferences(user.id).await?;
@@ -138,7 +138,7 @@ async fn update_billing_preferences(
.db
.get_user_by_github_user_id(body.github_user_id)
.await?
- .ok_or_else(|| anyhow!("user not found"))?;
+ .context("user not found")?;
let billing_customer = app.db.get_billing_customer_by_user_id(user.id).await?;
@@ -241,7 +241,7 @@ async fn list_billing_subscriptions(
.db
.get_user_by_github_user_id(params.github_user_id)
.await?
- .ok_or_else(|| anyhow!("user not found"))?;
+ .context("user not found")?;
let subscriptions = app.db.get_billing_subscriptions(user.id).await?;
@@ -307,7 +307,7 @@ async fn create_billing_subscription(
.db
.get_user_by_github_user_id(body.github_user_id)
.await?
- .ok_or_else(|| anyhow!("user not found"))?;
+ .context("user not found")?;
let Some(stripe_billing) = app.stripe_billing.clone() else {
log::error!("failed to retrieve Stripe billing object");
@@ -432,7 +432,7 @@ async fn manage_billing_subscription(
.db
.get_user_by_github_user_id(body.github_user_id)
.await?
- .ok_or_else(|| anyhow!("user not found"))?;
+ .context("user not found")?;
let Some(stripe_client) = app.stripe_client.clone() else {
log::error!("failed to retrieve Stripe client");
@@ -454,7 +454,7 @@ async fn manage_billing_subscription(
.db
.get_billing_customer_by_user_id(user.id)
.await?
- .ok_or_else(|| anyhow!("billing customer not found"))?;
+ .context("billing customer not found")?;
let customer_id = CustomerId::from_str(&customer.stripe_customer_id)
.context("failed to parse customer ID")?;
@@ -462,7 +462,7 @@ async fn manage_billing_subscription(
.db
.get_billing_subscription_by_id(body.subscription_id)
.await?
- .ok_or_else(|| anyhow!("subscription not found"))?;
+ .context("subscription not found")?;
let subscription_id = SubscriptionId::from_str(&subscription.stripe_subscription_id)
.context("failed to parse subscription ID")?;
@@ -559,7 +559,7 @@ async fn manage_billing_subscription(
None
}
})
- .ok_or_else(|| anyhow!("No subscription item to update"))?;
+ .context("No subscription item to update")?;
Some(CreateBillingPortalSessionFlowData {
type_: CreateBillingPortalSessionFlowDataType::SubscriptionUpdateConfirm,
@@ -653,7 +653,7 @@ async fn migrate_to_new_billing(
.db
.get_user_by_github_user_id(body.github_user_id)
.await?
- .ok_or_else(|| anyhow!("user not found"))?;
+ .context("user not found")?;
let old_billing_subscriptions_by_user = app
.db
@@ -732,13 +732,13 @@ async fn sync_billing_subscription(
.db
.get_user_by_github_user_id(body.github_user_id)
.await?
- .ok_or_else(|| anyhow!("user not found"))?;
+ .context("user not found")?;
let billing_customer = app
.db
.get_billing_customer_by_user_id(user.id)
.await?
- .ok_or_else(|| anyhow!("billing customer not found"))?;
+ .context("billing customer not found")?;
let stripe_customer_id = billing_customer
.stripe_customer_id
.parse::<stripe::CustomerId>()
@@ -1031,13 +1031,13 @@ async fn sync_subscription(
let billing_customer =
find_or_create_billing_customer(app, stripe_client, subscription.customer)
.await?
- .ok_or_else(|| anyhow!("billing customer not found"))?;
+ .context("billing customer not found")?;
if let Some(SubscriptionKind::ZedProTrial) = subscription_kind {
if subscription.status == SubscriptionStatus::Trialing {
let current_period_start =
DateTime::from_timestamp(subscription.current_period_start, 0)
- .ok_or_else(|| anyhow!("No trial subscription period start"))?;
+ .context("No trial subscription period start")?;
app.db
.update_billing_customer(
@@ -1243,7 +1243,7 @@ async fn get_monthly_spend(
.db
.get_user_by_github_user_id(params.github_user_id)
.await?
- .ok_or_else(|| anyhow!("user not found"))?;
+ .context("user not found")?;
let Some(llm_db) = app.llm_db.clone() else {
return Err(Error::http(
@@ -1311,7 +1311,7 @@ async fn get_current_usage(
.db
.get_user_by_github_user_id(params.github_user_id)
.await?
- .ok_or_else(|| anyhow!("user not found"))?;
+ .context("user not found")?;
let feature_flags = app.db.get_user_flags(user.id).await?;
let has_extended_trial = feature_flags
@@ -1,6 +1,5 @@
use std::sync::{Arc, OnceLock};
-use anyhow::anyhow;
use axum::{
Extension, Json, Router,
extract::{self, Query},
@@ -39,7 +38,7 @@ impl CheckIsContributorParams {
return Ok(ContributorSelector::GitHubLogin { github_login });
}
- Err(anyhow!(
+ Err(anyhow::anyhow!(
"must be one of `github_user_id` or `github_login`."
))?
}
@@ -1,6 +1,6 @@
use crate::db::ExtensionVersionConstraints;
use crate::{AppState, Error, Result, db::NewExtensionVersion};
-use anyhow::{Context as _, anyhow};
+use anyhow::Context as _;
use aws_sdk_s3::presigning::PresigningConfig;
use axum::{
Extension, Json, Router,
@@ -181,7 +181,7 @@ async fn download_latest_extension(
.db
.get_extension(¶ms.extension_id, constraints.as_ref())
.await?
- .ok_or_else(|| anyhow!("unknown extension"))?;
+ .context("unknown extension")?;
download_extension(
Extension(app),
Path(DownloadExtensionParams {
@@ -238,7 +238,7 @@ async fn download_extension(
))
.presigned(PresigningConfig::expires_in(EXTENSION_DOWNLOAD_URL_LIFETIME).unwrap())
.await
- .map_err(|e| anyhow!("failed to create presigned extension download url {e}"))?;
+ .context("creating presigned extension download url")?;
Ok(Redirect::temporary(url.uri()))
}
@@ -374,7 +374,7 @@ async fn fetch_extension_manifest(
blob_store_bucket: &String,
extension_id: &str,
version: &str,
-) -> Result<NewExtensionVersion, anyhow::Error> {
+) -> anyhow::Result<NewExtensionVersion> {
let object = blob_store_client
.get_object()
.bucket(blob_store_bucket)
@@ -397,8 +397,8 @@ async fn fetch_extension_manifest(
String::from_utf8_lossy(&manifest_bytes)
)
})?;
- let published_at = object.last_modified.ok_or_else(|| {
- anyhow!("missing last modified timestamp for extension {extension_id} version {version}")
+ let published_at = object.last_modified.with_context(|| {
+ format!("missing last modified timestamp for extension {extension_id} version {version}")
})?;
let published_at = time::OffsetDateTime::from_unix_timestamp_nanos(published_at.as_nanos())?;
let published_at = PrimitiveDateTime::new(published_at.date(), published_at.time());
@@ -1,3 +1,4 @@
+use anyhow::Context as _;
use collections::HashMap;
use semantic_version::SemanticVersion;
@@ -13,18 +14,12 @@ pub struct IpsFile {
impl IpsFile {
pub fn parse(bytes: &[u8]) -> anyhow::Result<IpsFile> {
let mut split = bytes.splitn(2, |&b| b == b'\n');
- let header_bytes = split
- .next()
- .ok_or_else(|| anyhow::anyhow!("No header found"))?;
- let header: Header = serde_json::from_slice(header_bytes)
- .map_err(|e| anyhow::anyhow!("Failed to parse header: {}", e))?;
+ let header_bytes = split.next().context("No header found")?;
+ let header: Header = serde_json::from_slice(header_bytes).context("parsing header")?;
- let body_bytes = split
- .next()
- .ok_or_else(|| anyhow::anyhow!("No body found"))?;
+ let body_bytes = split.next().context("No body found")?;
- let body: Body = serde_json::from_slice(body_bytes)
- .map_err(|e| anyhow::anyhow!("Failed to parse body: {}", e))?;
+ let body: Body = serde_json::from_slice(body_bytes).context("parsing body")?;
Ok(IpsFile { header, body })
}
@@ -3,7 +3,7 @@ use crate::{
db::{self, AccessTokenId, Database, UserId},
rpc::Principal,
};
-use anyhow::{Context as _, anyhow};
+use anyhow::Context as _;
use axum::{
http::{self, Request, StatusCode},
middleware::Next,
@@ -85,14 +85,14 @@ pub async fn validate_header<B>(mut req: Request<B>, next: Next<B>) -> impl Into
.db
.get_user_by_id(user_id)
.await?
- .ok_or_else(|| anyhow!("user {} not found", user_id))?;
+ .with_context(|| format!("user {user_id} not found"))?;
if let Some(impersonator_id) = validate_result.impersonator_id {
let admin = state
.db
.get_user_by_id(impersonator_id)
.await?
- .ok_or_else(|| anyhow!("user {} not found", impersonator_id))?;
+ .with_context(|| format!("user {impersonator_id} not found"))?;
req.extensions_mut()
.insert(Principal::Impersonated { user, admin });
} else {
@@ -192,7 +192,7 @@ pub async fn verify_access_token(
let db_token = db.get_access_token(token.id).await?;
let token_user_id = db_token.impersonated_user_id.unwrap_or(db_token.user_id);
if token_user_id != user_id {
- return Err(anyhow!("no such access token"))?;
+ return Err(anyhow::anyhow!("no such access token"))?;
}
let t0 = Instant::now();
@@ -5,7 +5,7 @@ mod tables;
pub mod tests;
use crate::{Error, Result, executor::Executor};
-use anyhow::anyhow;
+use anyhow::{Context as _, anyhow};
use collections::{BTreeMap, BTreeSet, HashMap, HashSet};
use dashmap::DashMap;
use futures::StreamExt;
@@ -320,11 +320,9 @@ impl Database {
let mut tx = Arc::new(Some(tx));
let result = f(TransactionHandle(tx.clone())).await;
- let Some(tx) = Arc::get_mut(&mut tx).and_then(|tx| tx.take()) else {
- return Err(anyhow!(
- "couldn't complete transaction because it's still in use"
- ))?;
- };
+ let tx = Arc::get_mut(&mut tx)
+ .and_then(|tx| tx.take())
+ .context("couldn't complete transaction because it's still in use")?;
Ok((tx, result))
}
@@ -344,11 +342,9 @@ impl Database {
let mut tx = Arc::new(Some(tx));
let result = f(TransactionHandle(tx.clone())).await;
- let Some(tx) = Arc::get_mut(&mut tx).and_then(|tx| tx.take()) else {
- return Err(anyhow!(
- "couldn't complete transaction because it's still in use"
- ))?;
- };
+ let tx = Arc::get_mut(&mut tx)
+ .and_then(|tx| tx.take())
+ .context("couldn't complete transaction because it's still in use")?;
Ok((tx, result))
}
@@ -853,9 +849,7 @@ fn db_status_to_proto(
)
}
_ => {
- return Err(anyhow!(
- "Unexpected combination of status fields: {entry:?}"
- ));
+ anyhow::bail!("Unexpected combination of status fields: {entry:?}");
}
};
Ok(proto::StatusEntry {
@@ -1,4 +1,5 @@
use super::*;
+use anyhow::Context as _;
use sea_orm::sea_query::Query;
impl Database {
@@ -51,7 +52,7 @@ impl Database {
Ok(access_token::Entity::find_by_id(access_token_id)
.one(&*tx)
.await?
- .ok_or_else(|| anyhow!("no such access token"))?)
+ .context("no such access token")?)
})
.await
}
@@ -1,3 +1,5 @@
+use anyhow::Context as _;
+
use super::*;
#[derive(Debug)]
@@ -82,7 +84,7 @@ impl Database {
Ok(preferences
.into_iter()
.next()
- .ok_or_else(|| anyhow!("billing preferences not found"))?)
+ .context("billing preferences not found")?)
})
.await
}
@@ -1,3 +1,5 @@
+use anyhow::Context as _;
+
use crate::db::billing_subscription::{
StripeCancellationReason, StripeSubscriptionStatus, SubscriptionKind,
};
@@ -51,7 +53,7 @@ impl Database {
Ok(billing_subscription::Entity::find_by_id(id)
.one(&*tx)
.await?
- .ok_or_else(|| anyhow!("failed to retrieve inserted billing subscription"))?)
+ .context("failed to retrieve inserted billing subscription")?)
})
.await
}
@@ -1,4 +1,5 @@
use super::*;
+use anyhow::Context as _;
use prost::Message;
use text::{EditOperation, UndoOperation};
@@ -467,7 +468,7 @@ impl Database {
.filter(buffer::Column::ChannelId.eq(channel_id))
.one(&*tx)
.await?
- .ok_or_else(|| anyhow!("no such buffer"))?;
+ .context("no such buffer")?;
let serialization_version = self
.get_buffer_operation_serialization_version(buffer.id, buffer.epoch, &tx)
@@ -606,7 +607,7 @@ impl Database {
.into_values::<_, QueryOperationSerializationVersion>()
.one(tx)
.await?
- .ok_or_else(|| anyhow!("missing buffer snapshot"))?)
+ .context("missing buffer snapshot")?)
}
pub async fn get_channel_buffer(
@@ -621,7 +622,7 @@ impl Database {
.find_related(buffer::Entity)
.one(tx)
.await?
- .ok_or_else(|| anyhow!("no such buffer"))?)
+ .context("no such buffer")?)
}
async fn get_buffer_state(
@@ -643,7 +644,7 @@ impl Database {
)
.one(tx)
.await?
- .ok_or_else(|| anyhow!("no such snapshot"))?;
+ .context("no such snapshot")?;
let version = snapshot.operation_serialization_version;
(snapshot.text, version)
@@ -839,7 +840,7 @@ fn operation_from_storage(
_format_version: i32,
) -> Result<proto::operation::Variant, Error> {
let operation =
- storage::Operation::decode(row.value.as_slice()).map_err(|error| anyhow!("{}", error))?;
+ storage::Operation::decode(row.value.as_slice()).map_err(|error| anyhow!("{error}"))?;
let version = version_from_storage(&operation.version);
Ok(if operation.is_undo {
proto::operation::Variant::Undo(proto::operation::Undo {
@@ -1,4 +1,5 @@
use super::*;
+use anyhow::Context as _;
use rpc::{
ErrorCode, ErrorCodeExt,
proto::{ChannelBufferVersion, VectorClockEntry, channel_member::Kind},
@@ -647,11 +648,8 @@ impl Database {
.and(channel_member::Column::UserId.eq(for_user)),
)
.one(&*tx)
- .await?;
-
- let Some(membership) = membership else {
- Err(anyhow!("no such member"))?
- };
+ .await?
+ .context("no such member")?;
let mut update = membership.into_active_model();
update.role = ActiveValue::Set(role);
@@ -1,3 +1,5 @@
+use anyhow::Context as _;
+
use super::*;
impl Database {
@@ -215,7 +217,7 @@ impl Database {
)
.one(&*tx)
.await?
- .ok_or_else(|| anyhow!("no such contact"))?;
+ .context("no such contact")?;
contact::Entity::delete_by_id(contact.id).exec(&*tx).await?;
@@ -1,5 +1,6 @@
use std::str::FromStr;
+use anyhow::Context;
use chrono::Utc;
use sea_orm::sea_query::IntoCondition;
use util::ResultExt;
@@ -166,7 +167,7 @@ impl Database {
.filter(extension::Column::ExternalId.eq(extension_id))
.one(&*tx)
.await?
- .ok_or_else(|| anyhow!("no such extension: {extension_id}"))?;
+ .with_context(|| format!("no such extension: {extension_id}"))?;
let extensions = [extension];
let mut versions = self
@@ -274,7 +275,7 @@ impl Database {
.filter(extension::Column::ExternalId.eq(*external_id))
.one(&*tx)
.await?
- .ok_or_else(|| anyhow!("failed to insert extension"))?
+ .context("failed to insert extension")?
};
extension_version::Entity::insert_many(versions.iter().map(|version| {
@@ -1,4 +1,5 @@
use super::*;
+use anyhow::Context as _;
use rpc::Notification;
use sea_orm::{SelectColumns, TryInsertResult};
use time::OffsetDateTime;
@@ -330,7 +331,7 @@ impl Database {
.filter(channel_message::Column::Nonce.eq(Uuid::from_u128(nonce)))
.one(&*tx)
.await?
- .ok_or_else(|| anyhow!("failed to insert message"))?
+ .context("failed to insert message")?
.id;
}
}
@@ -1,4 +1,5 @@
use super::*;
+use anyhow::Context as _;
use rpc::Notification;
use util::ResultExt;
@@ -256,7 +257,7 @@ pub fn model_to_proto(this: &Database, row: notification::Model) -> Result<proto
let kind = this
.notification_kinds_by_id
.get(&row.kind)
- .ok_or_else(|| anyhow!("Unknown notification kind"))?;
+ .context("Unknown notification kind")?;
Ok(proto::Notification {
id: row.id.to_proto(),
kind: kind.to_string(),
@@ -276,5 +277,5 @@ fn notification_kind_from_proto(
.notification_kinds_by_name
.get(&proto.kind)
.copied()
- .ok_or_else(|| anyhow!("invalid notification kind {:?}", proto.kind))?)
+ .with_context(|| format!("invalid notification kind {:?}", proto.kind))?)
}
@@ -49,7 +49,7 @@ impl Database {
)
.one(&*tx)
.await?
- .ok_or_else(|| anyhow!("could not find participant"))?;
+ .context("could not find participant")?;
if participant.room_id != room_id {
return Err(anyhow!("shared project on unexpected room"))?;
}
@@ -128,7 +128,7 @@ impl Database {
let project = project::Entity::find_by_id(project_id)
.one(&*tx)
.await?
- .ok_or_else(|| anyhow!("project not found"))?;
+ .context("project not found")?;
let room = if let Some(room_id) = project.room_id {
Some(self.get_room(room_id, &tx).await?)
} else {
@@ -160,7 +160,7 @@ impl Database {
)
.one(&*tx)
.await?
- .ok_or_else(|| anyhow!("no such project"))?;
+ .context("no such project")?;
self.update_project_worktrees(project.id, worktrees, &tx)
.await?;
@@ -242,7 +242,7 @@ impl Database {
)
.one(&*tx)
.await?
- .ok_or_else(|| anyhow!("no such project: {project_id}"))?;
+ .with_context(|| format!("no such project: {project_id}"))?;
// Update metadata.
worktree::Entity::update(worktree::ActiveModel {
@@ -624,16 +624,13 @@ impl Database {
let project_id = ProjectId::from_proto(update.project_id);
let worktree_id = update.worktree_id as i64;
self.project_transaction(project_id, |tx| async move {
- let summary = update
- .summary
- .as_ref()
- .ok_or_else(|| anyhow!("invalid summary"))?;
+ let summary = update.summary.as_ref().context("invalid summary")?;
// Ensure the update comes from the host.
let project = project::Entity::find_by_id(project_id)
.one(&*tx)
.await?
- .ok_or_else(|| anyhow!("no such project"))?;
+ .context("no such project")?;
if project.host_connection()? != connection {
return Err(anyhow!("can't update a project hosted by someone else"))?;
}
@@ -677,16 +674,13 @@ impl Database {
) -> Result<TransactionGuard<Vec<ConnectionId>>> {
let project_id = ProjectId::from_proto(update.project_id);
self.project_transaction(project_id, |tx| async move {
- let server = update
- .server
- .as_ref()
- .ok_or_else(|| anyhow!("invalid language server"))?;
+ let server = update.server.as_ref().context("invalid language server")?;
// Ensure the update comes from the host.
let project = project::Entity::find_by_id(project_id)
.one(&*tx)
.await?
- .ok_or_else(|| anyhow!("no such project"))?;
+ .context("no such project")?;
if project.host_connection()? != connection {
return Err(anyhow!("can't update a project hosted by someone else"))?;
}
@@ -732,7 +726,7 @@ impl Database {
let project = project::Entity::find_by_id(project_id)
.one(&*tx)
.await?
- .ok_or_else(|| anyhow!("no such project"))?;
+ .context("no such project")?;
if project.host_connection()? != connection {
return Err(anyhow!("can't update a project hosted by someone else"))?;
}
@@ -778,7 +772,7 @@ impl Database {
Ok(project::Entity::find_by_id(id)
.one(&*tx)
.await?
- .ok_or_else(|| anyhow!("no such project"))?)
+ .context("no such project")?)
})
.await
}
@@ -1074,7 +1068,7 @@ impl Database {
let project = project::Entity::find_by_id(project_id)
.one(&*tx)
.await?
- .ok_or_else(|| anyhow!("no such project"))?;
+ .context("no such project")?;
let collaborators = project
.find_related(project_collaborator::Entity)
.all(&*tx)
@@ -1143,7 +1137,7 @@ impl Database {
)
.one(&*tx)
.await?
- .ok_or_else(|| anyhow!("failed to read project host"))?;
+ .context("failed to read project host")?;
Ok(())
})
@@ -1162,7 +1156,7 @@ impl Database {
let project = project::Entity::find_by_id(project_id)
.one(tx)
.await?
- .ok_or_else(|| anyhow!("no such project"))?;
+ .context("no such project")?;
let role_from_room = if let Some(room_id) = project.room_id {
room_participant::Entity::find()
@@ -1287,7 +1281,7 @@ impl Database {
let project = project::Entity::find_by_id(project_id)
.one(tx)
.await?
- .ok_or_else(|| anyhow!("no such project"))?;
+ .context("no such project")?;
let mut collaborators = project_collaborator::Entity::find()
.filter(project_collaborator::Column::ProjectId.eq(project_id))
@@ -161,7 +161,7 @@ impl Database {
)
.one(&*tx)
.await?
- .ok_or_else(|| anyhow!("user is not in the room"))?;
+ .context("user is not in the room")?;
let called_user_role = match caller.role.unwrap_or(ChannelRole::Member) {
ChannelRole::Admin | ChannelRole::Member => ChannelRole::Member,
@@ -193,7 +193,7 @@ impl Database {
let room = self.get_room(room_id, &tx).await?;
let incoming_call = Self::build_incoming_call(&room, called_user_id)
- .ok_or_else(|| anyhow!("failed to build incoming call"))?;
+ .context("failed to build incoming call")?;
Ok((room, incoming_call))
})
.await
@@ -279,7 +279,7 @@ impl Database {
)
.one(&*tx)
.await?
- .ok_or_else(|| anyhow!("no call to cancel"))?;
+ .context("no call to cancel")?;
room_participant::Entity::delete(participant.into_active_model())
.exec(&*tx)
@@ -310,7 +310,7 @@ impl Database {
.into_values::<_, QueryChannelId>()
.one(&*tx)
.await?
- .ok_or_else(|| anyhow!("no such room"))?;
+ .context("no such room")?;
if channel_id.is_some() {
Err(anyhow!("tried to join channel call directly"))?
@@ -462,7 +462,7 @@ impl Database {
}
let (channel, room) = self.get_channel_room(room_id, tx).await?;
- let channel = channel.ok_or_else(|| anyhow!("no channel for room"))?;
+ let channel = channel.context("no channel for room")?;
Ok(JoinRoom {
room,
channel: Some(channel),
@@ -505,7 +505,7 @@ impl Database {
let project = project::Entity::find_by_id(project_id)
.one(&*tx)
.await?
- .ok_or_else(|| anyhow!("project does not exist"))?;
+ .context("project does not exist")?;
if project.host_user_id != Some(user_id) {
return Err(anyhow!("no such project"))?;
}
@@ -519,7 +519,7 @@ impl Database {
.position(|collaborator| {
collaborator.user_id == user_id && collaborator.is_host
})
- .ok_or_else(|| anyhow!("host not found among collaborators"))?;
+ .context("host not found among collaborators")?;
let host = collaborators.swap_remove(host_ix);
let old_connection_id = host.connection();
@@ -1051,11 +1051,7 @@ impl Database {
let tx = tx;
let location_kind;
let location_project_id;
- match location
- .variant
- .as_ref()
- .ok_or_else(|| anyhow!("invalid location"))?
- {
+ match location.variant.as_ref().context("invalid location")? {
proto::participant_location::Variant::SharedProject(project) => {
location_kind = 0;
location_project_id = Some(ProjectId::from_proto(project.id));
@@ -1119,7 +1115,7 @@ impl Database {
)
.one(&*tx)
.await?
- .ok_or_else(|| anyhow!("only admins can set participant role"))?;
+ .context("only admins can set participant role")?;
if role.requires_cla() {
self.check_user_has_signed_cla(user_id, room_id, &tx)
@@ -1156,7 +1152,7 @@ impl Database {
let channel = room::Entity::find_by_id(room_id)
.one(tx)
.await?
- .ok_or_else(|| anyhow!("could not find room"))?
+ .context("could not find room")?
.find_related(channel::Entity)
.one(tx)
.await?;
@@ -1297,7 +1293,7 @@ impl Database {
let db_room = room::Entity::find_by_id(room_id)
.one(tx)
.await?
- .ok_or_else(|| anyhow!("could not find room"))?;
+ .context("could not find room")?;
let mut db_participants = db_room
.find_related(room_participant::Entity)
@@ -1,3 +1,4 @@
+use anyhow::Context as _;
use chrono::NaiveDateTime;
use super::*;
@@ -247,7 +248,7 @@ impl Database {
.into_values::<_, QueryAs>()
.one(&*tx)
.await?
- .ok_or_else(|| anyhow!("could not find user"))?;
+ .context("could not find user")?;
Ok(metrics_id.to_string())
})
.await
@@ -1,5 +1,5 @@
use crate::db::{ProjectId, Result, RoomId, ServerId, UserId};
-use anyhow::anyhow;
+use anyhow::Context as _;
use rpc::ConnectionId;
use sea_orm::entity::prelude::*;
@@ -18,10 +18,10 @@ impl Model {
pub fn host_connection(&self) -> Result<ConnectionId> {
let host_connection_server_id = self
.host_connection_server_id
- .ok_or_else(|| anyhow!("empty host_connection_server_id"))?;
+ .context("empty host_connection_server_id")?;
let host_connection_id = self
.host_connection_id
- .ok_or_else(|| anyhow!("empty host_connection_id"))?;
+ .context("empty host_connection_id")?;
Ok(ConnectionId {
owner_id: host_connection_server_id.0 as u32,
id: host_connection_id as u32,
@@ -1,4 +1,4 @@
-use anyhow::{Result, anyhow};
+use anyhow::{Context as _, Result};
use std::fs;
use std::path::Path;
@@ -6,8 +6,8 @@ pub fn get_dotenv_vars(current_dir: impl AsRef<Path>) -> Result<Vec<(String, Str
let current_dir = current_dir.as_ref();
let mut vars = Vec::new();
- let env_content = fs::read_to_string(current_dir.join(".env.toml"))
- .map_err(|_| anyhow!("no .env.toml file found"))?;
+ let env_content =
+ fs::read_to_string(current_dir.join(".env.toml")).context("no .env.toml file found")?;
add_vars(env_content, &mut vars)?;
@@ -14,7 +14,7 @@ pub mod user_backfiller;
#[cfg(test)]
mod tests;
-use anyhow::anyhow;
+use anyhow::Context as _;
use aws_config::{BehaviorVersion, Region};
use axum::{
http::{HeaderMap, StatusCode},
@@ -339,7 +339,7 @@ fn build_stripe_client(config: &Config) -> anyhow::Result<stripe::Client> {
let api_key = config
.stripe_api_key
.as_ref()
- .ok_or_else(|| anyhow!("missing stripe_api_key"))?;
+ .context("missing stripe_api_key")?;
Ok(stripe::Client::new(api_key))
}
@@ -348,11 +348,11 @@ async fn build_blob_store_client(config: &Config) -> anyhow::Result<aws_sdk_s3::
config
.blob_store_access_key
.clone()
- .ok_or_else(|| anyhow!("missing blob_store_access_key"))?,
+ .context("missing blob_store_access_key")?,
config
.blob_store_secret_key
.clone()
- .ok_or_else(|| anyhow!("missing blob_store_secret_key"))?,
+ .context("missing blob_store_secret_key")?,
None,
None,
"env",
@@ -363,13 +363,13 @@ async fn build_blob_store_client(config: &Config) -> anyhow::Result<aws_sdk_s3::
config
.blob_store_url
.as_ref()
- .ok_or_else(|| anyhow!("missing blob_store_url"))?,
+ .context("missing blob_store_url")?,
)
.region(Region::new(
config
.blob_store_region
.clone()
- .ok_or_else(|| anyhow!("missing blob_store_region"))?,
+ .context("missing blob_store_region")?,
))
.credentials_provider(keys)
.load()
@@ -383,11 +383,11 @@ async fn build_kinesis_client(config: &Config) -> anyhow::Result<aws_sdk_kinesis
config
.kinesis_access_key
.clone()
- .ok_or_else(|| anyhow!("missing kinesis_access_key"))?,
+ .context("missing kinesis_access_key")?,
config
.kinesis_secret_key
.clone()
- .ok_or_else(|| anyhow!("missing kinesis_secret_key"))?,
+ .context("missing kinesis_secret_key")?,
None,
None,
"env",
@@ -398,7 +398,7 @@ async fn build_kinesis_client(config: &Config) -> anyhow::Result<aws_sdk_kinesis
config
.kinesis_region
.clone()
- .ok_or_else(|| anyhow!("missing kinesis_region"))?,
+ .context("missing kinesis_region")?,
))
.credentials_provider(keys)
.load()
@@ -19,7 +19,7 @@ use usage_measure::UsageMeasure;
use std::future::Future;
use std::sync::Arc;
-use anyhow::anyhow;
+use anyhow::Context;
pub use sea_orm::ConnectOptions;
use sea_orm::prelude::*;
use sea_orm::{
@@ -93,7 +93,7 @@ impl LlmDatabase {
Ok(self
.models
.get(&(provider, name.to_string()))
- .ok_or_else(|| anyhow!("unknown model {provider:?}:{name}"))?)
+ .with_context(|| format!("unknown model {provider:?}:{name}"))?)
}
pub fn model_by_id(&self, id: ModelId) -> Result<&model::Model> {
@@ -101,7 +101,7 @@ impl LlmDatabase {
.models
.values()
.find(|model| model.id == id)
- .ok_or_else(|| anyhow!("no model for ID {id:?}"))?)
+ .with_context(|| format!("no model for ID {id:?}"))?)
}
pub fn options(&self) -> &ConnectOptions {
@@ -142,11 +142,9 @@ impl LlmDatabase {
let mut tx = Arc::new(Some(tx));
let result = f(TransactionHandle(tx.clone())).await;
- let Some(tx) = Arc::get_mut(&mut tx).and_then(|tx| tx.take()) else {
- return Err(anyhow!(
- "couldn't complete transaction because it's still in use"
- ))?;
- };
+ let tx = Arc::get_mut(&mut tx)
+ .and_then(|tx| tx.take())
+ .context("couldn't complete transaction because it's still in use")?;
Ok((tx, result))
}
@@ -2,7 +2,7 @@ use crate::db::billing_subscription::SubscriptionKind;
use crate::db::{billing_subscription, user};
use crate::llm::AGENT_EXTENDED_TRIAL_FEATURE_FLAG;
use crate::{Config, db::billing_preference};
-use anyhow::{Result, anyhow};
+use anyhow::{Context as _, Result};
use chrono::{NaiveDateTime, Utc};
use jsonwebtoken::{DecodingKey, EncodingKey, Header, Validation};
use serde::{Deserialize, Serialize};
@@ -49,7 +49,7 @@ impl LlmTokenClaims {
let secret = config
.llm_api_secret
.as_ref()
- .ok_or_else(|| anyhow!("no LLM API secret"))?;
+ .context("no LLM API secret")?;
let plan = if is_staff {
Plan::ZedPro
@@ -63,7 +63,7 @@ impl LlmTokenClaims {
let subscription_period =
billing_subscription::Model::current_period(Some(subscription), is_staff)
.map(|(start, end)| (start.naive_utc(), end.naive_utc()))
- .ok_or_else(|| anyhow!("A plan is required to use Zed's hosted models or edit predictions. Visit https://zed.dev/account to get started."))?;
+ .context("A plan is required to use Zed's hosted models or edit predictions. Visit https://zed.dev/account to get started.")?;
let now = Utc::now();
let claims = Self {
@@ -112,7 +112,7 @@ impl LlmTokenClaims {
let secret = config
.llm_api_secret
.as_ref()
- .ok_or_else(|| anyhow!("no LLM API secret"))?;
+ .context("no LLM API secret")?;
match jsonwebtoken::decode::<Self>(
token,
@@ -1,4 +1,4 @@
-use anyhow::anyhow;
+use anyhow::{Context as _, anyhow};
use axum::headers::HeaderMapExt;
use axum::{
Extension, Router,
@@ -138,11 +138,11 @@ async fn main() -> Result<()> {
.config
.llm_database_url
.as_ref()
- .ok_or_else(|| anyhow!("missing LLM_DATABASE_URL"))?;
+ .context("missing LLM_DATABASE_URL")?;
let max_connections = state
.config
.llm_database_max_connections
- .ok_or_else(|| anyhow!("missing LLM_DATABASE_MAX_CONNECTIONS"))?;
+ .context("missing LLM_DATABASE_MAX_CONNECTIONS")?;
let mut db_options = db::ConnectOptions::new(database_url);
db_options.max_connections(max_connections);
@@ -287,7 +287,7 @@ async fn setup_llm_database(config: &Config) -> Result<()> {
let database_url = config
.llm_database_url
.as_ref()
- .ok_or_else(|| anyhow!("missing LLM_DATABASE_URL"))?;
+ .context("missing LLM_DATABASE_URL")?;
let db_options = db::ConnectOptions::new(database_url.clone());
let db = LlmDatabase::new(db_options, Executor::Production).await?;
@@ -30,12 +30,11 @@ pub async fn run_database_migrations(
for migration in migrations {
match applied_migrations.get(&migration.version) {
Some(applied_migration) => {
- if migration.checksum != applied_migration.checksum {
- Err(anyhow!(
- "checksum mismatch for applied migration {}",
- migration.description
- ))?;
- }
+ anyhow::ensure!(
+ migration.checksum == applied_migration.checksum,
+ "checksum mismatch for applied migration {}",
+ migration.description
+ );
}
None => {
let elapsed = connection.apply(&migration).await?;
@@ -664,7 +664,7 @@ impl Server {
Err(error) => {
let proto_err = match &error {
Error::Internal(err) => err.to_proto(),
- _ => ErrorCode::Internal.message(format!("{}", error)).to_proto(),
+ _ => ErrorCode::Internal.message(format!("{error}")).to_proto(),
};
peer.respond_with_error(receipt, proto_err)?;
Err(error)
@@ -938,7 +938,7 @@ impl Server {
.db
.get_user_by_id(user_id)
.await?
- .ok_or_else(|| anyhow!("user not found"))?;
+ .context("user not found")?;
let update_user_plan = make_update_user_plan_message(
&self.app_state.db,
@@ -1169,7 +1169,7 @@ pub async fn handle_metrics(Extension(server): Extension<Arc<Server>>) -> Result
let metric_families = prometheus::gather();
let encoded_metrics = encoder
.encode_to_string(&metric_families)
- .map_err(|err| anyhow!("{}", err))?;
+ .map_err(|err| anyhow!("{err}"))?;
Ok(encoded_metrics)
}
@@ -1685,7 +1685,7 @@ async fn decline_call(message: proto::DeclineCall, session: Session) -> Result<(
.await
.decline_call(Some(room_id), session.user_id())
.await?
- .ok_or_else(|| anyhow!("failed to decline call"))?;
+ .context("declining call")?;
room_updated(&room, &session.peer);
}
@@ -1715,9 +1715,7 @@ async fn update_participant_location(
session: Session,
) -> Result<()> {
let room_id = RoomId::from_proto(request.room_id);
- let location = request
- .location
- .ok_or_else(|| anyhow!("invalid location"))?;
+ let location = request.location.context("invalid location")?;
let db = session.db().await;
let room = db
@@ -2246,7 +2244,7 @@ async fn create_buffer_for_peer(
session.connection_id,
)
.await?;
- let peer_id = request.peer_id.ok_or_else(|| anyhow!("invalid peer id"))?;
+ let peer_id = request.peer_id.context("invalid peer id")?;
session
.peer
.forward_send(session.connection_id, peer_id.into(), request)?;
@@ -2377,10 +2375,7 @@ async fn follow(
) -> Result<()> {
let room_id = RoomId::from_proto(request.room_id);
let project_id = request.project_id.map(ProjectId::from_proto);
- let leader_id = request
- .leader_id
- .ok_or_else(|| anyhow!("invalid leader id"))?
- .into();
+ let leader_id = request.leader_id.context("invalid leader id")?.into();
let follower_id = session.connection_id;
session
@@ -2411,10 +2406,7 @@ async fn follow(
async fn unfollow(request: proto::Unfollow, session: Session) -> Result<()> {
let room_id = RoomId::from_proto(request.room_id);
let project_id = request.project_id.map(ProjectId::from_proto);
- let leader_id = request
- .leader_id
- .ok_or_else(|| anyhow!("invalid leader id"))?
- .into();
+ let leader_id = request.leader_id.context("invalid leader id")?.into();
let follower_id = session.connection_id;
session
@@ -3358,9 +3350,7 @@ async fn join_channel_internal(
};
channel_updated(
- &joined_room
- .channel
- .ok_or_else(|| anyhow!("channel not returned"))?,
+ &joined_room.channel.context("channel not returned")?,
&joined_room.room,
&session.peer,
&*session.connection_pool().await,
@@ -3568,9 +3558,7 @@ async fn send_channel_message(
// TODO: adjust mentions if body is trimmed
let timestamp = OffsetDateTime::now_utc();
- let nonce = request
- .nonce
- .ok_or_else(|| anyhow!("nonce can't be blank"))?;
+ let nonce = request.nonce.context("nonce can't be blank")?;
let channel_id = ChannelId::from_proto(request.channel_id);
let CreatedChannelMessage {
@@ -3710,10 +3698,7 @@ async fn update_channel_message(
)
.await?;
- let nonce = request
- .nonce
- .clone()
- .ok_or_else(|| anyhow!("nonce can't be blank"))?;
+ let nonce = request.nonce.clone().context("nonce can't be blank")?;
let message = proto::ChannelMessage {
sender_id: session.user_id().to_proto(),
@@ -3818,14 +3803,12 @@ async fn get_supermaven_api_key(
return Err(anyhow!("supermaven not enabled for this account"))?;
}
- let email = session
- .email()
- .ok_or_else(|| anyhow!("user must have an email"))?;
+ let email = session.email().context("user must have an email")?;
let supermaven_admin_api = session
.supermaven_client
.as_ref()
- .ok_or_else(|| anyhow!("supermaven not configured"))?;
+ .context("supermaven not configured")?;
let result = supermaven_admin_api
.try_get_or_create_user(CreateExternalUserRequest { id: user_id, email })
@@ -3973,7 +3956,7 @@ async fn get_private_user_info(
let user = db
.get_user_by_id(session.user_id())
.await?
- .ok_or_else(|| anyhow!("user not found"))?;
+ .context("user not found")?;
let flags = db.get_user_flags(session.user_id()).await?;
response.send(proto::GetPrivateUserInfoResponse {
@@ -4019,19 +4002,23 @@ async fn get_llm_api_token(
let user = db
.get_user_by_id(user_id)
.await?
- .ok_or_else(|| anyhow!("user {} not found", user_id))?;
+ .with_context(|| format!("user {user_id} not found"))?;
if user.accepted_tos_at.is_none() {
Err(anyhow!("terms of service not accepted"))?
}
- let Some(stripe_client) = session.app_state.stripe_client.as_ref() else {
- Err(anyhow!("failed to retrieve Stripe client"))?
- };
+ let stripe_client = session
+ .app_state
+ .stripe_client
+ .as_ref()
+ .context("failed to retrieve Stripe client")?;
- let Some(stripe_billing) = session.app_state.stripe_billing.as_ref() else {
- Err(anyhow!("failed to retrieve Stripe billing object"))?
- };
+ let stripe_billing = session
+ .app_state
+ .stripe_billing
+ .as_ref()
+ .context("failed to retrieve Stripe billing object")?;
let billing_customer =
if let Some(billing_customer) = db.get_billing_customer_by_user_id(user.id).await? {
@@ -4047,7 +4034,7 @@ async fn get_llm_api_token(
stripe::Expandable::Id(customer_id),
)
.await?
- .ok_or_else(|| anyhow!("billing customer not found"))?
+ .context("billing customer not found")?
};
let billing_subscription =
@@ -1,5 +1,5 @@
use crate::db::{ChannelId, ChannelRole, UserId};
-use anyhow::{Result, anyhow};
+use anyhow::{Context as _, Result};
use collections::{BTreeMap, HashMap, HashSet};
use rpc::ConnectionId;
use semantic_version::SemanticVersion;
@@ -77,7 +77,7 @@ impl ConnectionPool {
let connection = self
.connections
.get_mut(&connection_id)
- .ok_or_else(|| anyhow!("no such connection"))?;
+ .context("no such connection")?;
let user_id = connection.user_id;
@@ -1,6 +1,6 @@
use super::{RandomizedTest, TestClient, TestError, TestServer, UserTestPlan};
use crate::{db::UserId, tests::run_randomized_test};
-use anyhow::{Result, anyhow};
+use anyhow::{Context as _, Result};
use async_trait::async_trait;
use call::ActiveCall;
use collections::{BTreeMap, HashMap};
@@ -782,8 +782,7 @@ impl RandomizedTest for ProjectCollaborationTest {
let save =
project.update(cx, |project, cx| project.save_buffer(buffer.clone(), cx));
let save = cx.spawn(|cx| async move {
- save.await
- .map_err(|err| anyhow!("save request failed: {:?}", err))?;
+ save.await.context("save request failed")?;
assert!(
buffer
.read_with(&cx, |buffer, _| { buffer.saved_version().to_owned() })
@@ -1,6 +1,6 @@
use std::sync::Arc;
-use anyhow::{Context as _, Result, anyhow};
+use anyhow::{Context as _, Result};
use chrono::{DateTime, Utc};
use util::ResultExt;
@@ -144,12 +144,9 @@ impl UserBackfiller {
}
}
- let response = match response.error_for_status() {
- Ok(response) => response,
- Err(err) => return Err(anyhow!("failed to fetch GitHub user: {err}")),
- };
-
response
+ .error_for_status()
+ .context("fetching GitHub user")?
.json()
.await
.with_context(|| format!("failed to deserialize GitHub user from '{url}'"))
@@ -3,6 +3,7 @@ mod contact_finder;
use self::channel_modal::ChannelModal;
use crate::{CollaborationPanelSettings, channel_view::ChannelView, chat_panel::ChatPanel};
+use anyhow::Context as _;
use call::ActiveCall;
use channel::{Channel, ChannelEvent, ChannelStore};
use client::{ChannelId, Client, Contact, User, UserStore};
@@ -388,9 +389,7 @@ impl CollabPanel {
Some(serialization_key) => cx
.background_spawn(async move { KEY_VALUE_STORE.read_kvp(&serialization_key) })
.await
- .map_err(|_| {
- anyhow::anyhow!("Failed to read collaboration panel from key value store")
- })
+ .context("reading collaboration panel from key value store")
.log_err()
.flatten()
.map(|panel| serde_json::from_str::<SerializedCollabPanel>(&panel))
@@ -1,4 +1,4 @@
-use anyhow::{Context, Result, anyhow};
+use anyhow::{Context as _, Result, anyhow};
use collections::HashMap;
use futures::{FutureExt, StreamExt, channel::oneshot, select};
use gpui::{AppContext as _, AsyncApp, BackgroundExecutor, Task};
@@ -308,7 +308,7 @@ impl Client {
.response_handlers
.lock()
.as_mut()
- .ok_or_else(|| anyhow!("server shut down"))
+ .context("server shut down")
.map(|handlers| {
handlers.insert(
RequestId::Int(id),
@@ -341,7 +341,7 @@ impl Client {
} else if let Some(result) = parsed.result {
Ok(serde_json::from_str(result.get())?)
} else {
- Err(anyhow!("Invalid response: no result or error"))
+ anyhow::bail!("Invalid response: no result or error");
}
}
Err(_) => anyhow::bail!("cancelled")
@@ -46,12 +46,11 @@ impl ModelContextProtocol {
.request(types::RequestType::Initialize.as_str(), params)
.await?;
- if !Self::supported_protocols().contains(&response.protocol_version) {
- return Err(anyhow::anyhow!(
- "Unsupported protocol version: {:?}",
- response.protocol_version
- ));
- }
+ anyhow::ensure!(
+ Self::supported_protocols().contains(&response.protocol_version),
+ "Unsupported protocol version: {:?}",
+ response.protocol_version
+ );
log::trace!("mcp server info {:?}", response.server_info);
@@ -96,14 +95,11 @@ impl InitializedContextServerProtocol {
}
fn check_capability(&self, capability: ServerCapability) -> Result<()> {
- if self.capable(capability) {
- Ok(())
- } else {
- Err(anyhow::anyhow!(
- "Server does not support {:?} capability",
- capability
- ))
- }
+ anyhow::ensure!(
+ self.capable(capability),
+ "Server does not support {capability:?} capability"
+ );
+ Ok(())
}
/// List the MCP prompts.
@@ -133,21 +133,20 @@ enum CopilotServer {
impl CopilotServer {
fn as_authenticated(&mut self) -> Result<&mut RunningCopilotServer> {
let server = self.as_running()?;
- if matches!(server.sign_in_status, SignInStatus::Authorized { .. }) {
- Ok(server)
- } else {
- Err(anyhow!("must sign in before using copilot"))
- }
+ anyhow::ensure!(
+ matches!(server.sign_in_status, SignInStatus::Authorized { .. }),
+ "must sign in before using copilot"
+ );
+ Ok(server)
}
fn as_running(&mut self) -> Result<&mut RunningCopilotServer> {
match self {
- CopilotServer::Starting { .. } => Err(anyhow!("copilot is still starting")),
- CopilotServer::Disabled => Err(anyhow!("copilot is disabled")),
- CopilotServer::Error(error) => Err(anyhow!(
- "copilot was not started because of an error: {}",
- error
- )),
+ CopilotServer::Starting { .. } => anyhow::bail!("copilot is still starting"),
+ CopilotServer::Disabled => anyhow::bail!("copilot is disabled"),
+ CopilotServer::Error(error) => {
+ anyhow::bail!("copilot was not started because of an error: {error}")
+ }
CopilotServer::Running(server) => Ok(server),
}
}
@@ -648,7 +647,7 @@ impl Copilot {
}
};
- cx.background_spawn(task.map_err(|err| anyhow!("{:?}", err)))
+ cx.background_spawn(task.map_err(|err| anyhow!("{err:?}")))
} else {
// If we're downloading, wait until download is finished
// If we're in a stuck state, display to the user
@@ -2,6 +2,7 @@ use std::path::PathBuf;
use std::sync::Arc;
use std::sync::OnceLock;
+use anyhow::Context as _;
use anyhow::{Result, anyhow};
use chrono::DateTime;
use collections::HashSet;
@@ -322,8 +323,8 @@ impl TryFrom<ApiTokenResponse> for ApiToken {
type Error = anyhow::Error;
fn try_from(response: ApiTokenResponse) -> Result<Self, Self::Error> {
- let expires_at = DateTime::from_timestamp(response.expires_at, 0)
- .ok_or_else(|| anyhow!("invalid expires_at"))?;
+ let expires_at =
+ DateTime::from_timestamp(response.expires_at, 0).context("invalid expires_at")?;
Ok(Self {
api_key: response.token,
@@ -442,9 +443,11 @@ impl CopilotChat {
request: Request,
mut cx: AsyncApp,
) -> Result<BoxStream<'static, Result<ResponseEvent>>> {
- let Some(this) = cx.update(|cx| Self::global(cx)).ok().flatten() else {
- return Err(anyhow!("Copilot chat is not enabled"));
- };
+ let this = cx
+ .update(|cx| Self::global(cx))
+ .ok()
+ .flatten()
+ .context("Copilot chat is not enabled")?;
let (oauth_token, api_token, client) = this.read_with(&cx, |this, _| {
(
@@ -454,7 +457,7 @@ impl CopilotChat {
)
})?;
- let oauth_token = oauth_token.ok_or_else(|| anyhow!("No OAuth token available"))?;
+ let oauth_token = oauth_token.context("No OAuth token available")?;
let token = match api_token {
Some(api_token) if api_token.remaining_seconds() > 5 * 60 => api_token.clone(),
@@ -513,18 +516,19 @@ async fn request_models(api_token: String, client: Arc<dyn HttpClient>) -> Resul
let mut response = client.send(request).await?;
- if response.status().is_success() {
- let mut body = Vec::new();
- response.body_mut().read_to_end(&mut body).await?;
+ anyhow::ensure!(
+ response.status().is_success(),
+ "Failed to request models: {}",
+ response.status()
+ );
+ let mut body = Vec::new();
+ response.body_mut().read_to_end(&mut body).await?;
- let body_str = std::str::from_utf8(&body)?;
+ let body_str = std::str::from_utf8(&body)?;
- let models = serde_json::from_str::<ModelSchema>(body_str)?.data;
+ let models = serde_json::from_str::<ModelSchema>(body_str)?.data;
- Ok(models)
- } else {
- Err(anyhow!("Failed to request models: {}", response.status()))
- }
+ Ok(models)
}
async fn request_api_token(oauth_token: &str, client: Arc<dyn HttpClient>) -> Result<ApiToken> {
@@ -551,8 +555,7 @@ async fn request_api_token(oauth_token: &str, client: Arc<dyn HttpClient>) -> Re
response.body_mut().read_to_end(&mut body).await?;
let body_str = std::str::from_utf8(&body)?;
-
- Err(anyhow!("Failed to request API token: {}", body_str))
+ anyhow::bail!("Failed to request API token: {body_str}");
}
}
@@ -603,11 +606,11 @@ async fn stream_completion(
let mut body = Vec::new();
response.body_mut().read_to_end(&mut body).await?;
let body_str = std::str::from_utf8(&body)?;
- return Err(anyhow!(
+ anyhow::bail!(
"Failed to connect to API: {} {}",
response.status(),
body_str
- ));
+ );
}
if is_streaming {
@@ -1,5 +1,5 @@
use ::fs::Fs;
-use anyhow::{Context as _, Result, anyhow};
+use anyhow::{Context as _, Result};
use async_compression::futures::bufread::GzipDecoder;
use async_tar::Archive;
use async_trait::async_trait;
@@ -103,8 +103,8 @@ impl TcpArguments {
pub fn from_proto(proto: proto::TcpHost) -> anyhow::Result<Self> {
let host = TcpArgumentsTemplate::from_proto(proto)?;
Ok(TcpArguments {
- host: host.host.ok_or_else(|| anyhow!("missing host"))?,
- port: host.port.ok_or_else(|| anyhow!("missing port"))?,
+ host: host.host.context("missing host")?,
+ port: host.port.context("missing port")?,
timeout: host.timeout,
})
}
@@ -200,9 +200,7 @@ impl DebugTaskDefinition {
}
pub fn from_proto(proto: proto::DebugTaskDefinition) -> Result<Self> {
- let request = proto
- .request
- .ok_or_else(|| anyhow::anyhow!("request is required"))?;
+ let request = proto.request.context("request is required")?;
Ok(Self {
label: proto.label.into(),
initialize_args: proto.initialize_args.map(|v| v.into()),
@@ -346,12 +344,11 @@ pub async fn download_adapter_from_github(
.get(&github_version.url, Default::default(), true)
.await
.context("Error downloading release")?;
- if !response.status().is_success() {
- Err(anyhow!(
- "download failed with status {}",
- response.status().to_string()
- ))?;
- }
+ anyhow::ensure!(
+ response.status().is_success(),
+ "download failed with status {}",
+ response.status().to_string()
+ );
match file_type {
DownloadedFileType::GzipTar => {
@@ -2,7 +2,7 @@ use crate::{
adapters::DebugAdapterBinary,
transport::{IoKind, LogKind, TransportDelegate},
};
-use anyhow::{Result, anyhow};
+use anyhow::Result;
use dap_types::{
messages::{Message, Response},
requests::Request,
@@ -187,10 +187,7 @@ impl DebugAdapterClient {
Ok(serde_json::from_value(Default::default())?)
}
}
- false => Err(anyhow!(
- "Request failed: {}",
- response.message.unwrap_or_default()
- )),
+ false => anyhow::bail!("Request failed: {}", response.message.unwrap_or_default()),
}
}
@@ -1,4 +1,4 @@
-use anyhow::{Result, anyhow};
+use anyhow::{Context as _, Result};
use client::proto::{
self, DapChecksum, DapChecksumAlgorithm, DapEvaluateContext, DapModule, DapScope,
DapScopePresentationHint, DapSource, DapSourcePresentationHint, DapStackFrame, DapVariable,
@@ -311,9 +311,9 @@ impl ProtoConversion for dap_types::Module {
fn from_proto(payload: Self::ProtoType) -> Result<Self> {
let id = match payload
.id
- .ok_or(anyhow!("All DapModule proto messages must have an id"))?
+ .context("All DapModule proto messages must have an id")?
.id
- .ok_or(anyhow!("All DapModuleID proto messages must have an id"))?
+ .context("All DapModuleID proto messages must have an id")?
{
proto::dap_module_id::Id::String(string) => dap_types::ModuleId::String(string),
proto::dap_module_id::Id::Number(num) => dap_types::ModuleId::Number(num),
@@ -1,4 +1,4 @@
-use anyhow::{Context, Result, anyhow, bail};
+use anyhow::{Context as _, Result, bail};
use dap_types::{
ErrorResponse,
messages::{Message, Response},
@@ -226,12 +226,9 @@ impl TransportDelegate {
pub(crate) async fn send_message(&self, message: Message) -> Result<()> {
if let Some(server_tx) = self.server_tx.lock().await.as_ref() {
- server_tx
- .send(message)
- .await
- .map_err(|e| anyhow!("Failed to send message: {}", e))
+ server_tx.send(message).await.context("sending message")
} else {
- Err(anyhow!("Server tx already dropped"))
+ anyhow::bail!("Server tx already dropped")
}
}
@@ -254,7 +251,7 @@ impl TransportDelegate {
};
if bytes_read == 0 {
- break Err(anyhow!("Debugger log stream closed"));
+ anyhow::bail!("Debugger log stream closed");
}
if let Some(log_handlers) = log_handlers.as_ref() {
@@ -379,7 +376,7 @@ impl TransportDelegate {
let result = loop {
match reader.read_line(&mut buffer).await {
- Ok(0) => break Err(anyhow!("debugger error stream closed")),
+ Ok(0) => anyhow::bail!("debugger error stream closed"),
Ok(_) => {
for (kind, log_handler) in log_handlers.lock().iter_mut() {
if matches!(kind, LogKind::Adapter) {
@@ -409,13 +406,13 @@ impl TransportDelegate {
.and_then(|response| response.error.map(|msg| msg.format))
.or_else(|| response.message.clone())
{
- return Err(anyhow!(error_message));
+ anyhow::bail!(error_message);
};
- Err(anyhow!(
+ anyhow::bail!(
"Received error response from adapter. Response: {:?}",
- response.clone()
- ))
+ response
+ );
}
}
@@ -437,7 +434,7 @@ impl TransportDelegate {
.with_context(|| "reading a message from server")?
== 0
{
- return Err(anyhow!("debugger reader stream closed"));
+ anyhow::bail!("debugger reader stream closed");
};
if buffer == "\r\n" {
@@ -540,9 +537,10 @@ impl TcpTransport {
}
async fn start(binary: &DebugAdapterBinary, cx: AsyncApp) -> Result<(TransportPipe, Self)> {
- let Some(connection_args) = binary.connection.as_ref() else {
- return Err(anyhow!("No connection arguments provided"));
- };
+ let connection_args = binary
+ .connection
+ .as_ref()
+ .context("No connection arguments provided")?;
let host = connection_args.host;
let port = connection_args.port;
@@ -577,7 +575,7 @@ impl TcpTransport {
let (mut process, (rx, tx)) = select! {
_ = cx.background_executor().timer(Duration::from_millis(timeout)).fuse() => {
- return Err(anyhow!(format!("Connection to TCP DAP timeout {}:{}", host, port)))
+ anyhow::bail!("Connection to TCP DAP timeout {host}:{port}");
},
result = cx.spawn(async move |cx| {
loop {
@@ -591,7 +589,7 @@ impl TcpTransport {
} else {
String::from_utf8_lossy(&output.stderr).to_string()
};
- return Err(anyhow!("{}\nerror: process exited before debugger attached.", output));
+ anyhow::bail!("{output}\nerror: process exited before debugger attached.");
}
cx.background_executor().timer(Duration::from_millis(100)).await;
}
@@ -664,14 +662,8 @@ impl StdioTransport {
.spawn()
.with_context(|| "failed to spawn command.")?;
- let stdin = process
- .stdin
- .take()
- .ok_or_else(|| anyhow!("Failed to open stdin"))?;
- let stdout = process
- .stdout
- .take()
- .ok_or_else(|| anyhow!("Failed to open stdout"))?;
+ let stdin = process.stdin.take().context("Failed to open stdin")?;
+ let stdout = process.stdout.take().context("Failed to open stdout")?;
let stderr = process
.stderr
.take()
@@ -793,7 +785,7 @@ impl FakeTransport {
match message {
Err(error) => {
- break anyhow!(error);
+ break anyhow::anyhow!(error);
}
Ok(message) => {
match message {
@@ -1,6 +1,6 @@
use std::{collections::HashMap, path::PathBuf, sync::OnceLock};
-use anyhow::Result;
+use anyhow::{Context as _, Result};
use async_trait::async_trait;
use dap::adapters::{DebugTaskDefinition, latest_github_release};
use futures::StreamExt;
@@ -69,22 +69,16 @@ impl CodeLldbDebugAdapter {
let arch = match std::env::consts::ARCH {
"aarch64" => "arm64",
"x86_64" => "x64",
- _ => {
- return Err(anyhow!(
- "unsupported architecture {}",
- std::env::consts::ARCH
- ));
+ unsupported => {
+ anyhow::bail!("unsupported architecture {unsupported}");
}
};
let platform = match std::env::consts::OS {
"macos" => "darwin",
"linux" => "linux",
"windows" => "win32",
- _ => {
- return Err(anyhow!(
- "unsupported operating system {}",
- std::env::consts::OS
- ));
+ unsupported => {
+ anyhow::bail!("unsupported operating system {unsupported}");
}
};
let asset_name = format!("codelldb-{platform}-{arch}.vsix");
@@ -94,7 +88,7 @@ impl CodeLldbDebugAdapter {
.assets
.iter()
.find(|asset| asset.name == asset_name)
- .ok_or_else(|| anyhow!("no asset found matching {:?}", asset_name))?
+ .with_context(|| format!("no asset found matching {asset_name:?}"))?
.browser_download_url
.clone(),
};
@@ -138,10 +132,7 @@ impl DebugAdapter for CodeLldbDebugAdapter {
version_path
} else {
let mut paths = delegate.fs().read_dir(&adapter_path).await?;
- paths
- .next()
- .await
- .ok_or_else(|| anyhow!("No adapter found"))??
+ paths.next().await.context("No adapter found")??
};
let adapter_dir = version_path.join("extension").join("adapter");
let path = adapter_dir.join("codelldb").to_string_lossy().to_string();
@@ -8,7 +8,7 @@ mod ruby;
use std::sync::Arc;
-use anyhow::{Result, anyhow};
+use anyhow::Result;
use async_trait::async_trait;
use codelldb::CodeLldbDebugAdapter;
use dap::{
@@ -1,6 +1,6 @@
use std::{collections::HashMap, ffi::OsStr};
-use anyhow::{Result, bail};
+use anyhow::{Context as _, Result, bail};
use async_trait::async_trait;
use dap::{StartDebuggingRequestArguments, adapters::DebugTaskDefinition};
use gpui::AsyncApp;
@@ -78,7 +78,7 @@ impl DebugAdapter for GdbDebugAdapter {
.which(OsStr::new("gdb"))
.await
.and_then(|p| p.to_str().map(|s| s.to_string()))
- .ok_or(anyhow!("Could not find gdb in path"));
+ .context("Could not find gdb in path");
if gdb_path.is_err() && user_setting_path.is_none() {
bail!("Could not find gdb path or it's not installed");
@@ -1,3 +1,4 @@
+use anyhow::Context as _;
use dap::{StartDebuggingRequestArguments, adapters::DebugTaskDefinition};
use gpui::{AsyncApp, SharedString};
use language::LanguageName;
@@ -59,18 +60,14 @@ impl DebugAdapter for GoDebugAdapter {
.which(OsStr::new("dlv"))
.await
.and_then(|p| p.to_str().map(|p| p.to_string()))
- .ok_or(anyhow!("Dlv not found in path"))?;
+ .context("Dlv not found in path")?;
let tcp_connection = config.tcp_connection.clone().unwrap_or_default();
let (host, port, timeout) = crate::configure_tcp_connection(tcp_connection).await?;
Ok(DebugAdapterBinary {
command: delve_path,
- arguments: vec![
- "dap".into(),
- "--listen".into(),
- format!("{}:{}", host, port),
- ],
+ arguments: vec!["dap".into(), "--listen".into(), format!("{host}:{port}")],
cwd: None,
envs: HashMap::default(),
connection: Some(adapters::TcpArguments {
@@ -1,4 +1,5 @@
use adapters::latest_github_release;
+use anyhow::Context as _;
use dap::{StartDebuggingRequestArguments, adapters::DebugTaskDefinition};
use gpui::AsyncApp;
use std::{collections::HashMap, path::PathBuf, sync::OnceLock};
@@ -74,7 +75,7 @@ impl JsDebugAdapter {
.assets
.iter()
.find(|asset| asset.name == asset_name)
- .ok_or_else(|| anyhow!("no asset found matching {:?}", asset_name))?
+ .with_context(|| format!("no asset found matching {asset_name:?}"))?
.browser_download_url
.clone(),
})
@@ -98,7 +99,7 @@ impl JsDebugAdapter {
file_name.starts_with(&file_name_prefix)
})
.await
- .ok_or_else(|| anyhow!("Couldn't find JavaScript dap directory"))?
+ .context("Couldn't find JavaScript dap directory")?
};
let tcp_connection = config.tcp_connection.clone().unwrap_or_default();
@@ -1,4 +1,5 @@
use adapters::latest_github_release;
+use anyhow::Context as _;
use dap::adapters::{DebugTaskDefinition, TcpArguments};
use gpui::{AsyncApp, SharedString};
use language::LanguageName;
@@ -58,7 +59,7 @@ impl PhpDebugAdapter {
.assets
.iter()
.find(|asset| asset.name == asset_name)
- .ok_or_else(|| anyhow!("no asset found matching {:?}", asset_name))?
+ .with_context(|| format!("no asset found matching {asset_name:?}"))?
.browser_download_url
.clone(),
})
@@ -82,7 +83,7 @@ impl PhpDebugAdapter {
file_name.starts_with(&file_name_prefix)
})
.await
- .ok_or_else(|| anyhow!("Couldn't find PHP dap directory"))?
+ .context("Couldn't find PHP dap directory")?
};
let tcp_connection = config.tcp_connection.clone().unwrap_or_default();
@@ -1,4 +1,5 @@
use crate::*;
+use anyhow::Context as _;
use dap::{DebugRequest, StartDebuggingRequestArguments, adapters::DebugTaskDefinition};
use gpui::{AsyncApp, SharedString};
use language::LanguageName;
@@ -112,7 +113,7 @@ impl PythonDebugAdapter {
file_name.starts_with(&file_name_prefix)
})
.await
- .ok_or_else(|| anyhow!("Debugpy directory not found"))?
+ .context("Debugpy directory not found")?
};
let toolchain = delegate
@@ -143,7 +144,7 @@ impl PythonDebugAdapter {
};
Ok(DebugAdapterBinary {
- command: python_path.ok_or(anyhow!("failed to find binary path for python"))?,
+ command: python_path.context("failed to find binary path for Python")?,
arguments: vec![
debugpy_dir
.join(Self::ADAPTER_PATH)
@@ -1,4 +1,4 @@
-use anyhow::{Result, anyhow};
+use anyhow::Result;
use async_trait::async_trait;
use dap::{
DebugRequest, StartDebuggingRequestArguments,
@@ -54,12 +54,11 @@ impl DebugAdapter for RubyDebugAdapter {
.arg("debug")
.output()
.await?;
- if !output.status.success() {
- return Err(anyhow!(
- "Failed to install rdbg:\n{}",
- String::from_utf8_lossy(&output.stderr).to_string()
- ));
- }
+ anyhow::ensure!(
+ output.status.success(),
+ "Failed to install rdbg:\n{}",
+ String::from_utf8_lossy(&output.stderr).to_string()
+ );
}
}
}
@@ -7,7 +7,7 @@ use crate::{
ShowStackTrace, StepBack, StepInto, StepOut, StepOver, Stop, ToggleIgnoreBreakpoints,
persistence,
};
-use anyhow::{Result, anyhow};
+use anyhow::{Context as _, Result, anyhow};
use command_palette_hooks::CommandPaletteFilter;
use dap::StartDebuggingRequestArguments;
use dap::adapters::DebugAdapterName;
@@ -1021,17 +1021,13 @@ impl DebugPanel {
}
workspace.update(cx, |workspace, cx| {
- if let Some(project_path) = workspace
+ workspace
.project()
.read(cx)
.project_path_for_absolute_path(&path, cx)
- {
- Ok(project_path)
- } else {
- Err(anyhow!(
- "Couldn't get project path for .zed/debug.json in active worktree"
- ))
- }
+ .context(
+ "Couldn't get project path for .zed/debug.json in active worktree",
+ )
})?
})
})
@@ -9,7 +9,6 @@ use std::{
usize,
};
-use anyhow::Result;
use dap::{
DapRegistry, DebugRequest,
adapters::{DebugAdapterName, DebugTaskDefinition},
@@ -253,7 +252,7 @@ impl NewSessionModal {
cx.emit(DismissEvent);
})
.ok();
- Result::<_, anyhow::Error>::Ok(())
+ anyhow::Ok(())
})
.detach_and_log_err(cx);
}
@@ -1,3 +1,4 @@
+use anyhow::Context as _;
use collections::HashMap;
use dap::{Capabilities, adapters::DebugAdapterName};
use db::kvp::KEY_VALUE_STORE;
@@ -96,18 +97,14 @@ pub(crate) async fn serialize_pane_layout(
adapter_name: DebugAdapterName,
pane_group: SerializedLayout,
) -> anyhow::Result<()> {
- if let Ok(serialized_pane_group) = serde_json::to_string(&pane_group) {
- KEY_VALUE_STORE
- .write_kvp(
- format!("{DEBUGGER_PANEL_PREFIX}-{adapter_name}"),
- serialized_pane_group,
- )
- .await
- } else {
- Err(anyhow::anyhow!(
- "Failed to serialize pane group with serde_json as a string"
- ))
- }
+ let serialized_pane_group = serde_json::to_string(&pane_group)
+ .context("Serializing pane group with serde_json as a string")?;
+ KEY_VALUE_STORE
+ .write_kvp(
+ format!("{DEBUGGER_PANEL_PREFIX}-{adapter_name}"),
+ serialized_pane_group,
+ )
+ .await
}
pub(crate) fn build_serialized_layout(
@@ -196,7 +196,7 @@ impl FollowableItem for DebugSession {
_state: &mut Option<proto::view::Variant>,
_window: &mut Window,
_cx: &mut App,
- ) -> Option<gpui::Task<gpui::Result<Entity<Self>>>> {
+ ) -> Option<gpui::Task<anyhow::Result<Entity<Self>>>> {
None
}
@@ -218,7 +218,7 @@ impl FollowableItem for DebugSession {
_message: proto::update_view::Variant,
_window: &mut Window,
_cx: &mut Context<Self>,
- ) -> gpui::Task<gpui::Result<()>> {
+ ) -> gpui::Task<anyhow::Result<()>> {
Task::ready(Ok(()))
}
@@ -10,7 +10,7 @@ use std::{any::Any, ops::ControlFlow, path::PathBuf, sync::Arc, time::Duration};
use crate::persistence::{self, DebuggerPaneItem, SerializedLayout};
use super::DebugPanelItemEvent;
-use anyhow::{Result, anyhow};
+use anyhow::{Context as _, Result, anyhow};
use breakpoint_list::BreakpointList;
use collections::{HashMap, IndexMap};
use console::Console;
@@ -817,7 +817,7 @@ impl RunningState {
let exit_status = terminal
.read_with(cx, |terminal, cx| terminal.wait_for_completed_task(cx))?
.await
- .ok_or_else(|| anyhow!("Failed to wait for completed task"))?;
+ .context("Failed to wait for completed task")?;
if !exit_status.success() {
anyhow::bail!("Build failed");
@@ -829,22 +829,22 @@ impl RunningState {
let request = if let Some(request) = request {
request
} else if let Some((task, locator_name)) = build_output {
- let locator_name = locator_name
- .ok_or_else(|| anyhow!("Could not find a valid locator for a build task"))?;
+ let locator_name =
+ locator_name.context("Could not find a valid locator for a build task")?;
dap_store
.update(cx, |this, cx| {
this.run_debug_locator(&locator_name, task, cx)
})?
.await?
} else {
- return Err(anyhow!("No request or build provided"));
+ anyhow::bail!("No request or build provided");
};
let request = match request {
dap::DebugRequest::Launch(launch_request) => {
let cwd = match launch_request.cwd.as_deref().and_then(|path| path.to_str()) {
Some(cwd) => {
let substituted_cwd = substitute_variables_in_str(&cwd, &task_context)
- .ok_or_else(|| anyhow!("Failed to substitute variables in cwd"))?;
+ .context("substituting variables in cwd")?;
Some(PathBuf::from(substituted_cwd))
}
None => None,
@@ -854,7 +854,7 @@ impl RunningState {
&launch_request.env.into_iter().collect(),
&task_context,
)
- .ok_or_else(|| anyhow!("Failed to substitute variables in env"))?
+ .context("substituting variables in env")?
.into_iter()
.collect();
let new_launch_request = LaunchRequest {
@@ -862,13 +862,13 @@ impl RunningState {
&launch_request.program,
&task_context,
)
- .ok_or_else(|| anyhow!("Failed to substitute variables in program"))?,
+ .context("substituting variables in program")?,
args: launch_request
.args
.into_iter()
.map(|arg| substitute_variables_in_str(&arg, &task_context))
.collect::<Option<Vec<_>>>()
- .ok_or_else(|| anyhow!("Failed to substitute variables in args"))?,
+ .context("substituting variables in args")?,
cwd,
env,
};
@@ -994,7 +994,7 @@ impl RunningState {
.pty_info
.pid()
.map(|pid| pid.as_u32())
- .ok_or_else(|| anyhow!("Terminal was spawned but PID was not available"))
+ .context("Terminal was spawned but PID was not available")
})?
});
@@ -377,7 +377,7 @@ impl LineBreakpoint {
})
.ok();
}
- Result::<_, anyhow::Error>::Ok(())
+ anyhow::Ok(())
})
.detach();
@@ -278,7 +278,7 @@ impl CompletionProvider for ConsoleQueryBarCompletionProvider {
_completion_indices: Vec<usize>,
_completions: Rc<RefCell<Box<[Completion]>>>,
_cx: &mut Context<Editor>,
- ) -> gpui::Task<gpui::Result<bool>> {
+ ) -> gpui::Task<anyhow::Result<bool>> {
Task::ready(Ok(false))
}
@@ -289,7 +289,7 @@ impl CompletionProvider for ConsoleQueryBarCompletionProvider {
_completion_index: usize,
_push_to_history: bool,
_cx: &mut Context<Editor>,
- ) -> gpui::Task<gpui::Result<Option<language::Transaction>>> {
+ ) -> gpui::Task<anyhow::Result<Option<language::Transaction>>> {
Task::ready(Ok(None))
}
@@ -2,7 +2,7 @@ use std::path::Path;
use std::sync::Arc;
use std::time::Duration;
-use anyhow::{Result, anyhow};
+use anyhow::{Context as _, Result, anyhow};
use dap::StackFrameId;
use gpui::{
AnyElement, Entity, EventEmitter, FocusHandle, Focusable, MouseButton, ScrollStrategy,
@@ -285,9 +285,10 @@ impl StackFrameList {
})?;
this.update_in(cx, |this, window, cx| {
this.workspace.update(cx, |workspace, cx| {
- let project_path = buffer.read(cx).project_path(cx).ok_or_else(|| {
- anyhow!("Could not select a stack frame for unnamed buffer")
- })?;
+ let project_path = buffer
+ .read(cx)
+ .project_path(cx)
+ .context("Could not select a stack frame for unnamed buffer")?;
let open_preview = !workspace
.item_of_type::<StackTraceView>(cx)
@@ -312,9 +313,9 @@ impl StackFrameList {
.await?;
this.update(cx, |this, cx| {
- let Some(thread_id) = this.state.read_with(cx, |state, _| state.thread_id)? else {
- return Err(anyhow!("No selected thread ID found"));
- };
+ let thread_id = this.state.read_with(cx, |state, _| {
+ state.thread_id.context("No selected thread ID found")
+ })??;
this.workspace.update(cx, |workspace, cx| {
let breakpoint_store = workspace.project().read(cx).breakpoint_store();
@@ -1,6 +1,6 @@
use std::sync::Arc;
-use anyhow::{Result, anyhow};
+use anyhow::{Context as _, Result};
use dap::adapters::DebugTaskDefinition;
use dap::{DebugRequest, client::DebugAdapterClient};
use gpui::{Entity, TestAppContext, WindowHandle};
@@ -125,7 +125,7 @@ pub fn start_debug_session_with<T: Fn(&Arc<DebugAdapterClient>) + 'static>(
.and_then(|panel| panel.read(cx).active_session())
.map(|session| session.read(cx).running_state().read(cx).session())
.cloned()
- .ok_or_else(|| anyhow!("Failed to get active session"))
+ .context("Failed to get active session")
})??;
Ok(session)
@@ -29,7 +29,7 @@ impl TryFrom<String> for Role {
"assistant" => Ok(Self::Assistant),
"system" => Ok(Self::System),
"tool" => Ok(Self::Tool),
- _ => Err(anyhow!("invalid role '{value}'")),
+ _ => anyhow::bail!("invalid role '{value}'"),
}
}
}
@@ -72,7 +72,7 @@ impl Model {
match id {
"deepseek-chat" => Ok(Self::Chat),
"deepseek-reasoner" => Ok(Self::Reasoner),
- _ => Err(anyhow!("invalid model id")),
+ _ => anyhow::bail!("invalid model id {id}"),
}
}
@@ -296,10 +296,10 @@ pub async fn stream_completion(
} else {
let mut body = String::new();
response.body_mut().read_to_string(&mut body).await?;
- Err(anyhow!(
+ anyhow::bail!(
"Failed to connect to DeepSeek API: {} {}",
response.status(),
body,
- ))
+ );
}
}
@@ -5552,7 +5552,7 @@ impl Editor {
task.await?;
}
- Ok::<_, anyhow::Error>(())
+ anyhow::Ok(())
})
.detach_and_log_err(cx);
}
@@ -10630,8 +10630,8 @@ impl Editor {
.map(|line| {
line.strip_prefix(&line_prefix)
.or_else(|| line.trim_start().strip_prefix(&line_prefix.trim_start()))
- .ok_or_else(|| {
- anyhow!("line did not start with prefix {line_prefix:?}: {line:?}")
+ .with_context(|| {
+ format!("line did not start with prefix {line_prefix:?}: {line:?}")
})
})
.collect::<Result<Vec<_>, _>>()
@@ -17330,7 +17330,7 @@ impl Editor {
Err(err) => {
let message = format!("Failed to copy permalink: {err}");
- Err::<(), anyhow::Error>(err).log_err();
+ anyhow::Result::<()>::Err(err).log_err();
if let Some(workspace) = workspace {
workspace
@@ -17385,7 +17385,7 @@ impl Editor {
Err(err) => {
let message = format!("Failed to open permalink: {err}");
- Err::<(), anyhow::Error>(err).log_err();
+ anyhow::Result::<()>::Err(err).log_err();
if let Some(workspace) = workspace {
workspace
@@ -649,7 +649,7 @@ pub fn show_link_definition(
}
})?;
- Ok::<_, anyhow::Error>(())
+ anyhow::Ok(())
}
.log_err()
.await
@@ -5,6 +5,7 @@ use crate::{
hover_links::{InlayHighlight, RangeInEditor},
scroll::{Autoscroll, ScrollAmount},
};
+use anyhow::Context as _;
use gpui::{
AnyElement, AsyncWindowContext, Context, Entity, Focusable as _, FontWeight, Hsla,
InteractiveElement, IntoElement, MouseButton, ParentElement, Pixels, ScrollHandle, Size,
@@ -341,7 +342,7 @@ fn show_hover(
.and_then(|renderer| {
renderer.render_hover(group, point_range, buffer_id, cx)
})
- .ok_or_else(|| anyhow::anyhow!("no rendered diagnostic"))
+ .context("no rendered diagnostic")
})??;
let (background_color, border_color) = cx.update(|_, cx| {
@@ -445,7 +445,7 @@ async fn update_editor_from_message(
}
multibuffer.remove_excerpts(removed_excerpt_ids, cx);
- Result::<(), anyhow::Error>::Ok(())
+ anyhow::Ok(())
})
})??;
@@ -355,7 +355,7 @@ impl Item for ProposedChangesEditor {
project: Entity<Project>,
window: &mut Window,
cx: &mut Context<Self>,
- ) -> Task<gpui::Result<()>> {
+ ) -> Task<anyhow::Result<()>> {
self.editor.update(cx, |editor, cx| {
Item::save(editor, format, project, window, cx)
})
@@ -488,7 +488,7 @@ impl SemanticsProvider for BranchBufferSemanticsProvider {
buffer: &Entity<Buffer>,
position: text::Anchor,
cx: &mut App,
- ) -> Option<Task<gpui::Result<Vec<project::DocumentHighlight>>>> {
+ ) -> Option<Task<anyhow::Result<Vec<project::DocumentHighlight>>>> {
let buffer = self.to_base(&buffer, &[position], cx)?;
self.0.document_highlights(&buffer, position, cx)
}
@@ -499,7 +499,7 @@ impl SemanticsProvider for BranchBufferSemanticsProvider {
position: text::Anchor,
kind: crate::GotoDefinitionKind,
cx: &mut App,
- ) -> Option<Task<gpui::Result<Vec<project::LocationLink>>>> {
+ ) -> Option<Task<anyhow::Result<Vec<project::LocationLink>>>> {
let buffer = self.to_base(&buffer, &[position], cx)?;
self.0.definitions(&buffer, position, kind, cx)
}
@@ -509,7 +509,7 @@ impl SemanticsProvider for BranchBufferSemanticsProvider {
_: &Entity<Buffer>,
_: text::Anchor,
_: &mut App,
- ) -> Option<Task<gpui::Result<Option<Range<text::Anchor>>>>> {
+ ) -> Option<Task<anyhow::Result<Option<Range<text::Anchor>>>>> {
None
}
@@ -519,7 +519,7 @@ impl SemanticsProvider for BranchBufferSemanticsProvider {
_: text::Anchor,
_: String,
_: &mut App,
- ) -> Option<Task<gpui::Result<project::ProjectTransaction>>> {
+ ) -> Option<Task<anyhow::Result<project::ProjectTransaction>>> {
None
}
}
@@ -11,7 +11,6 @@ use instance::{ExampleInstance, JudgeOutput, RunOutput, run_git};
pub(crate) use tool_metrics::*;
use ::fs::RealFs;
-use anyhow::anyhow;
use clap::Parser;
use client::{Client, ProxySettings, UserStore};
use collections::{HashMap, HashSet};
@@ -255,13 +254,10 @@ fn main() {
let actual_origin =
run_git(&repo_path, &["remote", "get-url", "origin"]).await?;
- if actual_origin != repo_url {
- return Err(anyhow!(
- "remote origin {} does not match expected origin {}",
- actual_origin,
- repo_url,
- ));
- }
+ anyhow::ensure!(
+ actual_origin == repo_url,
+ "remote origin {actual_origin} does not match expected origin {repo_url}"
+ );
}
}
}
@@ -467,7 +463,7 @@ pub fn find_model(
match matching_models.as_slice() {
[model] => Ok(model.clone()),
- [] => Err(anyhow!(
+ [] => anyhow::bail!(
"No language model with ID {}/{} was available. Available models: {}",
provider_id,
model_id,
@@ -476,15 +472,15 @@ pub fn find_model(
.map(|model| format!("{}/{}", model.provider_id().0, model.id().0))
.collect::<Vec<_>>()
.join(", ")
- )),
- _ => Err(anyhow!(
+ ),
+ _ => anyhow::bail!(
"Multiple language models with ID {} available - use `--provider` to choose one of: {:?}",
model_id,
matching_models
.iter()
.map(|model| model.provider_id().0)
.collect::<Vec<_>>()
- )),
+ ),
}
}
@@ -177,12 +177,10 @@ impl ExampleContext {
fn log_assertion<T>(&mut self, result: Result<T>, message: String) -> Result<T> {
if let Some(max) = self.meta.max_assertions {
- if self.assertions.run_count() > max {
- return Err(anyhow!(
- "More assertions were run than the stated max_assertions of {}",
- max
- ));
- }
+ anyhow::ensure!(
+ self.assertions.run_count() <= max,
+ "More assertions were run than the stated max_assertions of {max}"
+ );
}
self.assertions.ran.push(RanAssertion {
@@ -319,7 +317,7 @@ impl ExampleContext {
}
}
_ = self.app.background_executor().timer(THREAD_EVENT_TIMEOUT).fuse() => {
- return Err(anyhow!("Agentic loop stalled - waited {:?} without any events", THREAD_EVENT_TIMEOUT));
+ anyhow::bail!("Agentic loop stalled - waited {THREAD_EVENT_TIMEOUT:?} without any events");
}
}
}
@@ -1,4 +1,4 @@
-use anyhow::{Context, Result, anyhow};
+use anyhow::{Context as _, Result};
use clap::Parser;
use serde_json::{Value, json};
use std::fs;
@@ -57,7 +57,7 @@ fn inject_thread_data(template: String, threads_data: Value) -> Result<String> {
let injection_marker = "let threadsData = window.threadsData || { threads: [dummyThread] };";
template
.find(injection_marker)
- .ok_or_else(|| anyhow!("Could not find the thread injection point in the template"))?;
+ .context("Could not find the thread injection point in the template")?;
let threads_json = serde_json::to_string_pretty(&threads_data)
.context("Failed to serialize threads data to JSON")?;
@@ -1,4 +1,4 @@
-use anyhow::{Result, anyhow};
+use anyhow::{Context as _, Result};
use std::fs;
use std::path::{Path, PathBuf};
use uuid::Uuid;
@@ -11,7 +11,7 @@ pub fn get_or_create_id(path: &Path) -> Result<String> {
}
}
let new_id = Uuid::new_v4().to_string();
- fs::create_dir_all(path.parent().ok_or_else(|| anyhow!("invalid id path"))?)?;
+ fs::create_dir_all(path.parent().context("invalid id path")?)?;
fs::write(path, &new_id)?;
Ok(new_id)
}
@@ -1,5 +1,5 @@
use agent::{Message, MessageSegment, SerializedThread, ThreadStore};
-use anyhow::{Context, Result, anyhow, bail};
+use anyhow::{Context as _, Result, anyhow, bail};
use assistant_tool::ToolWorkingSet;
use client::proto::LspWorkProgress;
use futures::channel::mpsc;
@@ -285,7 +285,7 @@ impl ExampleInstance {
diagnostics_before = query_lsp_diagnostics(project.clone(), cx).await?;
if diagnostics_before.is_some() && language_server.allow_preexisting_diagnostics {
- return Err(anyhow!("Example has pre-existing diagnostics. If you want to run this example regardless, set `allow_preexisting_diagnostics` to `true` in `base.toml`"));
+ anyhow::bail!("Example has pre-existing diagnostics. If you want to run this example regardless, set `allow_preexisting_diagnostics` to `true` in `base.toml`");
}
Some(LanguageServerState {
@@ -296,9 +296,7 @@ impl ExampleInstance {
None
};
- if std::env::var("ZED_EVAL_SETUP_ONLY").is_ok() {
- return Err(anyhow!("Setup only mode"));
- }
+ anyhow::ensure!(std::env::var("ZED_EVAL_SETUP_ONLY").is_err(), "Setup only mode");
let last_diff_file_path = this.run_directory.join("last.diff");
@@ -710,7 +708,7 @@ pub fn wait_for_lang_server(
anyhow::Ok(())
},
_ = timeout.fuse() => {
- Err(anyhow!("LSP wait timed out after 5 minutes"))
+ anyhow::bail!("LSP wait timed out after 5 minutes");
}
};
drop(subscriptions);
@@ -808,18 +806,16 @@ pub async fn run_git(repo_path: &Path, args: &[&str]) -> Result<String> {
.output()
.await?;
- if output.status.success() {
- Ok(String::from_utf8(output.stdout)?.trim().to_string())
- } else {
- Err(anyhow!(
- "`git {}` within `{}` failed with status: {}\nstderr:\n{}\nstdout:\n{}",
- args.join(" "),
- repo_path.display(),
- output.status,
- String::from_utf8_lossy(&output.stderr),
- String::from_utf8_lossy(&output.stdout),
- ))
- }
+ anyhow::ensure!(
+ output.status.success(),
+ "`git {}` within `{}` failed with status: {}\nstderr:\n{}\nstdout:\n{}",
+ args.join(" "),
+ repo_path.display(),
+ output.status,
+ String::from_utf8_lossy(&output.stderr),
+ String::from_utf8_lossy(&output.stdout),
+ );
+ Ok(String::from_utf8(output.stdout)?.trim().to_string())
}
fn messages_to_markdown<'a>(message_iter: impl IntoIterator<Item = &'a Message>) -> String {
@@ -881,9 +877,7 @@ pub async fn send_language_model_request(
full_response.push_str(&chunk_str);
}
Err(err) => {
- return Err(anyhow!(
- "Error receiving response from language model: {err}"
- ));
+ anyhow::bail!("Error receiving response from language model: {err}");
}
}
}
@@ -8,7 +8,7 @@ use std::path::{Path, PathBuf};
use std::sync::Arc;
use ::lsp::LanguageServerName;
-use anyhow::{Context as _, Result, anyhow, bail};
+use anyhow::{Context as _, Result, bail};
use async_trait::async_trait;
use fs::normalize_path;
use gpui::{App, Task};
@@ -173,7 +173,7 @@ pub fn parse_wasm_extension_version(
//
// By parsing the entirety of the Wasm bytes before we return, we're able to detect this problem
// earlier as an `Err` rather than as a panic.
- version.ok_or_else(|| anyhow!("extension {} has no zed:api-version section", extension_id))
+ version.with_context(|| format!("extension {extension_id} has no zed:api-version section"))
}
fn parse_wasm_extension_version_custom_section(data: &[u8]) -> Option<SemanticVersion> {
@@ -1,7 +1,7 @@
use crate::{
ExtensionLibraryKind, ExtensionManifest, GrammarManifestEntry, parse_wasm_extension_version,
};
-use anyhow::{Context as _, Result, anyhow, bail};
+use anyhow::{Context as _, Result, bail};
use async_compression::futures::bufread::GzipDecoder;
use async_tar::Archive;
use futures::io::BufReader;
@@ -134,7 +134,7 @@ impl ExtensionBuilder {
extension_dir: &Path,
manifest: &mut ExtensionManifest,
options: CompileExtensionOptions,
- ) -> Result<(), anyhow::Error> {
+ ) -> anyhow::Result<()> {
self.install_rust_wasm_target_if_needed()?;
let cargo_toml_content = fs::read_to_string(extension_dir.join("Cargo.toml"))?;
@@ -429,7 +429,7 @@ impl ExtensionBuilder {
let inner_dir = fs::read_dir(&tar_out_dir)?
.next()
- .ok_or_else(|| anyhow!("no content"))?
+ .context("no content")?
.context("failed to read contents of extracted wasi archive directory")?
.path();
fs::rename(&inner_dir, &wasi_sdk_dir).context("failed to move extracted wasi dir")?;
@@ -588,7 +588,7 @@ fn populate_defaults(manifest: &mut ExtensionManifest, extension_path: &Path) ->
let grammar_name = grammar_path
.file_stem()
.and_then(|stem| stem.to_str())
- .ok_or_else(|| anyhow!("no grammar name"))?;
+ .context("no grammar name")?;
if !manifest.grammars.contains_key(grammar_name) {
manifest.grammars.insert(
grammar_name.into(),
@@ -1,4 +1,4 @@
-use anyhow::{Context as _, Result, anyhow, bail};
+use anyhow::{Context as _, Result, bail};
use collections::{BTreeMap, HashMap};
use fs::Fs;
use language::LanguageName;
@@ -213,7 +213,7 @@ impl ExtensionManifest {
let extension_name = extension_dir
.file_name()
.and_then(OsStr::to_str)
- .ok_or_else(|| anyhow!("invalid extension name"))?;
+ .context("invalid extension name")?;
let mut extension_manifest_path = extension_dir.join("extension.json");
if fs.is_file(&extension_manifest_path).await {
@@ -6,7 +6,7 @@ use std::process::Command;
use std::sync::Arc;
use ::fs::{CopyOptions, Fs, RealFs, copy_recursive};
-use anyhow::{Context, Result, anyhow, bail};
+use anyhow::{Context as _, Result, bail};
use clap::Parser;
use extension::ExtensionManifest;
use extension::extension_builder::{CompileExtensionOptions, ExtensionBuilder};
@@ -107,7 +107,7 @@ async fn main() -> Result<()> {
schema_version: Some(manifest.schema_version.0),
repository: manifest
.repository
- .ok_or_else(|| anyhow!("missing repository in extension manifest"))?,
+ .context("missing repository in extension manifest")?,
wasm_api_version: manifest.lib.version.map(|version| version.to_string()),
provides: extension_provides,
})?;
@@ -196,11 +196,7 @@ async fn copy_extension_resources(
for theme_path in &manifest.themes {
fs::copy(
extension_path.join(theme_path),
- output_themes_dir.join(
- theme_path
- .file_name()
- .ok_or_else(|| anyhow!("invalid theme path"))?,
- ),
+ output_themes_dir.join(theme_path.file_name().context("invalid theme path")?),
)
.with_context(|| format!("failed to copy theme '{}'", theme_path.display()))?;
}
@@ -215,7 +211,7 @@ async fn copy_extension_resources(
output_icon_themes_dir.join(
icon_theme_path
.file_name()
- .ok_or_else(|| anyhow!("invalid icon theme path"))?,
+ .context("invalid icon theme path")?,
),
)
.with_context(|| {
@@ -245,11 +241,8 @@ async fn copy_extension_resources(
copy_recursive(
fs.as_ref(),
&extension_path.join(language_path),
- &output_languages_dir.join(
- language_path
- .file_name()
- .ok_or_else(|| anyhow!("invalid language path"))?,
- ),
+ &output_languages_dir
+ .join(language_path.file_name().context("invalid language path")?),
CopyOptions {
overwrite: true,
ignore_if_exists: false,
@@ -300,7 +293,7 @@ fn test_languages(
Some(
grammars
.get(name.as_ref())
- .ok_or_else(|| anyhow!("grammar not found: '{name}'"))?,
+ .with_context(|| format!("grammar not found: '{name}'"))?,
)
} else {
None
@@ -311,12 +304,12 @@ fn test_languages(
let entry = entry?;
let query_path = entry.path();
if query_path.extension() == Some("scm".as_ref()) {
- let grammar = grammar.ok_or_else(|| {
- anyhow!(
+ let grammar = grammar.with_context(|| {
+ format! {
"language {} provides query {} but no grammar",
config.name,
query_path.display()
- )
+ }
})?;
let query_source = fs::read_to_string(&query_path)?;
@@ -717,7 +717,7 @@ impl ExtensionStore {
let mut response = http_client
.get(url.as_ref(), Default::default(), true)
.await
- .map_err(|err| anyhow!("error downloading extension: {}", err))?;
+ .context("downloading extension")?;
fs.remove_dir(
&extension_dir,
@@ -1415,7 +1415,7 @@ impl ExtensionStore {
let is_dev = fs
.metadata(&extension_dir)
.await?
- .ok_or_else(|| anyhow!("directory does not exist"))?
+ .context("directory does not exist")?
.is_symlink;
if let Ok(mut language_paths) = fs.read_dir(&extension_dir.join("languages")).await {
@@ -1,6 +1,6 @@
use std::{path::PathBuf, sync::Arc};
-use anyhow::{Context as _, Result, anyhow};
+use anyhow::{Context as _, Result};
use client::{TypedEnvelope, proto};
use collections::{HashMap, HashSet};
use extension::{
@@ -295,7 +295,7 @@ impl HeadlessExtensionStore {
let extension = envelope
.payload
.extension
- .with_context(|| anyhow!("Invalid InstallExtension request"))?;
+ .context("Invalid InstallExtension request")?;
extensions
.update(&mut cx, |extensions, cx| {
@@ -533,11 +533,11 @@ impl WasmHost {
pub fn writeable_path_from_extension(&self, id: &Arc<str>, path: &Path) -> Result<PathBuf> {
let extension_work_dir = self.work_dir.join(id.as_ref());
let path = normalize_path(&extension_work_dir.join(path));
- if path.starts_with(&extension_work_dir) {
- Ok(path)
- } else {
- Err(anyhow!("cannot write to path {}", path.display()))
- }
+ anyhow::ensure!(
+ path.starts_with(&extension_work_dir),
+ "cannot write to path {path:?}",
+ );
+ Ok(path)
}
}
@@ -569,7 +569,7 @@ pub fn parse_wasm_extension_version(
//
// By parsing the entirety of the Wasm bytes before we return, we're able to detect this problem
// earlier as an `Err` rather than as a panic.
- version.ok_or_else(|| anyhow!("extension {} has no zed:api-version section", extension_id))
+ version.with_context(|| format!("extension {extension_id} has no zed:api-version section"))
}
fn parse_wasm_extension_version_custom_section(data: &[u8]) -> Option<SemanticVersion> {
@@ -83,11 +83,10 @@ pub fn authorize_access_to_unreleased_wasm_api_version(
}
};
- if !allow_unreleased_version {
- Err(anyhow!(
- "unreleased versions of the extension API can only be used on development builds of Zed"
- ))?;
- }
+ anyhow::ensure!(
+ allow_unreleased_version,
+ "unreleased versions of the extension API can only be used on development builds of Zed"
+ );
Ok(())
}
@@ -774,7 +773,7 @@ impl Extension {
.await
}
Extension::V0_0_1(_) | Extension::V0_0_4(_) | Extension::V0_0_6(_) => {
- Err(anyhow!("`run_slash_command` not available prior to v0.1.0"))
+ anyhow::bail!("`run_slash_command` not available prior to v0.1.0");
}
}
}
@@ -809,9 +808,9 @@ impl Extension {
Extension::V0_0_1(_)
| Extension::V0_0_4(_)
| Extension::V0_0_6(_)
- | Extension::V0_1_0(_) => Err(anyhow!(
- "`context_server_command` not available prior to v0.2.0"
- )),
+ | Extension::V0_1_0(_) => {
+ anyhow::bail!("`context_server_command` not available prior to v0.2.0");
+ }
}
}
@@ -836,9 +835,9 @@ impl Extension {
| Extension::V0_1_0(_)
| Extension::V0_2_0(_)
| Extension::V0_3_0(_)
- | Extension::V0_4_0(_) => Err(anyhow!(
- "`context_server_configuration` not available prior to v0.5.0"
- )),
+ | Extension::V0_4_0(_) => {
+ anyhow::bail!("`context_server_configuration` not available prior to v0.5.0");
+ }
}
}
@@ -854,9 +853,9 @@ impl Extension {
Extension::V0_3_0(ext) => ext.call_suggest_docs_packages(store, provider).await,
Extension::V0_2_0(ext) => ext.call_suggest_docs_packages(store, provider).await,
Extension::V0_1_0(ext) => ext.call_suggest_docs_packages(store, provider).await,
- Extension::V0_0_1(_) | Extension::V0_0_4(_) | Extension::V0_0_6(_) => Err(anyhow!(
- "`suggest_docs_packages` not available prior to v0.1.0"
- )),
+ Extension::V0_0_1(_) | Extension::V0_0_4(_) | Extension::V0_0_6(_) => {
+ anyhow::bail!("`suggest_docs_packages` not available prior to v0.1.0");
+ }
}
}
@@ -893,7 +892,7 @@ impl Extension {
.await
}
Extension::V0_0_1(_) | Extension::V0_0_4(_) | Extension::V0_0_6(_) => {
- Err(anyhow!("`index_docs` not available prior to v0.1.0"))
+ anyhow::bail!("`index_docs` not available prior to v0.1.0");
}
}
}
@@ -920,7 +919,7 @@ impl Extension {
Ok(Ok(dap_binary))
}
- _ => Err(anyhow!("`get_dap_binary` not available prior to v0.6.0")),
+ _ => anyhow::bail!("`get_dap_binary` not available prior to v0.6.0"),
}
}
}
@@ -1,7 +1,7 @@
use crate::wasm_host::{WasmState, wit::ToWasmtimeResult};
use ::http_client::{AsyncBody, HttpRequestExt};
use ::settings::{Settings, WorktreeId};
-use anyhow::{Context, Result, anyhow, bail};
+use anyhow::{Context as _, Result, bail};
use async_compression::futures::bufread::GzipDecoder;
use async_tar::Archive;
use extension::{ExtensionLanguageServerProxy, KeyValueStoreDelegate, WorktreeDelegate};
@@ -365,7 +365,7 @@ impl From<http_client::HttpMethod> for ::http_client::Method {
fn convert_request(
extension_request: &http_client::HttpRequest,
-) -> Result<::http_client::Request<AsyncBody>, anyhow::Error> {
+) -> anyhow::Result<::http_client::Request<AsyncBody>> {
let mut request = ::http_client::Request::builder()
.method(::http_client::Method::from(extension_request.method))
.uri(&extension_request.url)
@@ -389,7 +389,7 @@ fn convert_request(
async fn convert_response(
response: &mut ::http_client::Response<AsyncBody>,
-) -> Result<http_client::HttpResponse, anyhow::Error> {
+) -> anyhow::Result<http_client::HttpResponse> {
let mut extension_response = http_client::HttpResponse {
body: Vec::new(),
headers: Vec::new(),
@@ -508,14 +508,13 @@ impl ExtensionImports for WasmState {
.http_client
.get(&url, Default::default(), true)
.await
- .map_err(|err| anyhow!("error downloading release: {}", err))?;
+ .context("downloading release")?;
- if !response.status().is_success() {
- Err(anyhow!(
- "download failed with status {}",
- response.status().to_string()
- ))?;
- }
+ anyhow::ensure!(
+ response.status().is_success(),
+ "download failed with status {}",
+ response.status().to_string()
+ );
let body = BufReader::new(response.body_mut());
match file_type {
@@ -568,7 +567,7 @@ impl ExtensionImports for WasmState {
use std::os::unix::fs::PermissionsExt;
return fs::set_permissions(&path, Permissions::from_mode(0o755))
- .map_err(|error| anyhow!("failed to set permissions for path {path:?}: {error}"))
+ .with_context(|| format!("setting permissions for path {path:?}"))
.to_wasmtime_result();
}
@@ -9,7 +9,7 @@ use crate::wasm_host::wit::{CompletionKind, CompletionLabelDetails, InsertTextFo
use crate::wasm_host::{WasmState, wit::ToWasmtimeResult};
use ::http_client::{AsyncBody, HttpRequestExt};
use ::settings::{Settings, WorktreeId};
-use anyhow::{Context, Result, anyhow, bail};
+use anyhow::{Context as _, Result, bail};
use async_compression::futures::bufread::GzipDecoder;
use async_tar::Archive;
use async_trait::async_trait;
@@ -524,7 +524,7 @@ impl From<http_client::HttpMethod> for ::http_client::Method {
fn convert_request(
extension_request: &http_client::HttpRequest,
-) -> Result<::http_client::Request<AsyncBody>, anyhow::Error> {
+) -> anyhow::Result<::http_client::Request<AsyncBody>> {
let mut request = ::http_client::Request::builder()
.method(::http_client::Method::from(extension_request.method))
.uri(&extension_request.url)
@@ -548,7 +548,7 @@ fn convert_request(
async fn convert_response(
response: &mut ::http_client::Response<AsyncBody>,
-) -> Result<http_client::HttpResponse, anyhow::Error> {
+) -> anyhow::Result<http_client::HttpResponse> {
let mut extension_response = http_client::HttpResponse {
body: Vec::new(),
headers: Vec::new(),
@@ -871,14 +871,13 @@ impl ExtensionImports for WasmState {
.http_client
.get(&url, Default::default(), true)
.await
- .map_err(|err| anyhow!("error downloading release: {}", err))?;
+ .context("downloading release")?;
- if !response.status().is_success() {
- Err(anyhow!(
- "download failed with status {}",
- response.status().to_string()
- ))?;
- }
+ anyhow::ensure!(
+ response.status().is_success(),
+ "download failed with status {}",
+ response.status().to_string()
+ );
let body = BufReader::new(response.body_mut());
match file_type {
@@ -931,7 +930,7 @@ impl ExtensionImports for WasmState {
use std::os::unix::fs::PermissionsExt;
return fs::set_permissions(&path, Permissions::from_mode(0o755))
- .map_err(|error| anyhow!("failed to set permissions for path {path:?}: {error}"))
+ .with_context(|| format!("setting permissions for path {path:?}"))
.to_wasmtime_result();
}
@@ -1,5 +1,5 @@
use crate::FakeFs;
-use anyhow::{Context as _, Result, anyhow};
+use anyhow::{Context as _, Result};
use collections::{HashMap, HashSet};
use futures::future::{self, BoxFuture};
use git::{
@@ -80,7 +80,7 @@ impl GitRepository for FakeGitRepository {
state
.index_contents
.get(path.as_ref())
- .ok_or_else(|| anyhow!("not present in index"))
+ .context("not present in index")
.cloned()
})
.await
@@ -95,7 +95,7 @@ impl GitRepository for FakeGitRepository {
state
.head_contents
.get(path.as_ref())
- .ok_or_else(|| anyhow!("not present in HEAD"))
+ .context("not present in HEAD")
.cloned()
})
.await
@@ -119,8 +119,8 @@ impl GitRepository for FakeGitRepository {
_env: Arc<HashMap<String, String>>,
) -> BoxFuture<anyhow::Result<()>> {
self.with_state_async(true, move |state| {
- if let Some(message) = state.simulated_index_write_error_message.clone() {
- return Err(anyhow!("{}", message));
+ if let Some(message) = &state.simulated_index_write_error_message {
+ anyhow::bail!("{message}");
} else if let Some(content) = content {
state.index_contents.insert(path, content);
} else {
@@ -360,7 +360,7 @@ impl Fs for RealFs {
if options.ignore_if_exists {
return Ok(());
} else {
- return Err(anyhow!("{target:?} already exists"));
+ anyhow::bail!("{target:?} already exists");
}
}
@@ -373,7 +373,7 @@ impl Fs for RealFs {
if options.ignore_if_exists {
return Ok(());
} else {
- return Err(anyhow!("{target:?} already exists"));
+ anyhow::bail!("{target:?} already exists");
}
}
@@ -538,7 +538,7 @@ impl Fs for RealFs {
}?;
tmp_file.write_all(data.as_bytes())?;
tmp_file.persist(path)?;
- Ok::<(), anyhow::Error>(())
+ anyhow::Ok(())
})
.await?;
@@ -568,7 +568,7 @@ impl Fs for RealFs {
temp_file_path
};
atomic_replace(path.as_path(), temp_file.as_path())?;
- Ok::<(), anyhow::Error>(())
+ anyhow::Ok(())
})
.await?;
Ok(())
@@ -672,7 +672,7 @@ impl Fs for RealFs {
) -> Result<Pin<Box<dyn Send + Stream<Item = Result<PathBuf>>>>> {
let result = smol::fs::read_dir(path).await?.map(|entry| match entry {
Ok(entry) => Ok(entry.path()),
- Err(error) => Err(anyhow!("failed to read dir entry {:?}", error)),
+ Err(error) => Err(anyhow!("failed to read dir entry {error:?}")),
});
Ok(Box::pin(result))
}
@@ -942,7 +942,7 @@ impl FakeFsState {
.ok_or_else(|| {
anyhow!(io::Error::new(
io::ErrorKind::NotFound,
- format!("not found: {}", target.display())
+ format!("not found: {target:?}")
))
})?
.0)
@@ -1012,9 +1012,7 @@ impl FakeFsState {
Fn: FnOnce(btree_map::Entry<String, Arc<Mutex<FakeFsEntry>>>) -> Result<T>,
{
let path = normalize_path(path);
- let filename = path
- .file_name()
- .ok_or_else(|| anyhow!("cannot overwrite the root"))?;
+ let filename = path.file_name().context("cannot overwrite the root")?;
let parent_path = path.parent().unwrap();
let parent = self.read_path(parent_path)?;
@@ -1352,7 +1350,7 @@ impl FakeFs {
let path = std::str::from_utf8(content)
.ok()
.and_then(|content| content.strip_prefix("gitdir:"))
- .ok_or_else(|| anyhow!("not a valid gitfile"))?
+ .context("not a valid gitfile")?
.trim();
git_dir_path.insert(normalize_path(&dot_git.parent().unwrap().join(path)))
}
@@ -1394,7 +1392,7 @@ impl FakeFs {
Ok(result)
} else {
- Err(anyhow!("not a valid git repository"))
+ anyhow::bail!("not a valid git repository");
}
}
@@ -1744,7 +1742,7 @@ impl FakeFsEntry {
if let Self::File { content, .. } = self {
Ok(content)
} else {
- Err(anyhow!("not a file: {}", path.display()))
+ anyhow::bail!("not a file: {path:?}");
}
}
@@ -1755,7 +1753,7 @@ impl FakeFsEntry {
if let Self::Dir { entries, .. } = self {
Ok(entries)
} else {
- Err(anyhow!("not a directory: {}", path.display()))
+ anyhow::bail!("not a directory: {path:?}");
}
}
}
@@ -1867,7 +1865,7 @@ impl Fs for FakeFs {
kind = Some(PathEventKind::Changed);
*e.get_mut() = file;
} else if !options.ignore_if_exists {
- return Err(anyhow!("path already exists: {}", path.display()));
+ anyhow::bail!("path already exists: {path:?}");
}
}
btree_map::Entry::Vacant(e) => {
@@ -1941,7 +1939,7 @@ impl Fs for FakeFs {
if let btree_map::Entry::Occupied(e) = e {
Ok(e.get().clone())
} else {
- Err(anyhow!("path does not exist: {}", &old_path.display()))
+ anyhow::bail!("path does not exist: {old_path:?}")
}
})?;
@@ -1959,7 +1957,7 @@ impl Fs for FakeFs {
if options.overwrite {
*e.get_mut() = moved_entry;
} else if !options.ignore_if_exists {
- return Err(anyhow!("path already exists: {}", new_path.display()));
+ anyhow::bail!("path already exists: {new_path:?}");
}
}
btree_map::Entry::Vacant(e) => {
@@ -2003,7 +2001,7 @@ impl Fs for FakeFs {
kind = Some(PathEventKind::Changed);
Ok(Some(e.get().clone()))
} else if !options.ignore_if_exists {
- return Err(anyhow!("{target:?} already exists"));
+ anyhow::bail!("{target:?} already exists");
} else {
Ok(None)
}
@@ -2027,10 +2025,8 @@ impl Fs for FakeFs {
self.simulate_random_delay().await;
let path = normalize_path(path);
- let parent_path = path
- .parent()
- .ok_or_else(|| anyhow!("cannot remove the root"))?;
- let base_name = path.file_name().unwrap();
+ let parent_path = path.parent().context("cannot remove the root")?;
+ let base_name = path.file_name().context("cannot remove the root")?;
let mut state = self.state.lock();
let parent_entry = state.read_path(parent_path)?;
@@ -2042,7 +2038,7 @@ impl Fs for FakeFs {
match entry {
btree_map::Entry::Vacant(_) => {
if !options.ignore_if_not_exists {
- return Err(anyhow!("{path:?} does not exist"));
+ anyhow::bail!("{path:?} does not exist");
}
}
btree_map::Entry::Occupied(e) => {
@@ -2050,7 +2046,7 @@ impl Fs for FakeFs {
let mut entry = e.get().lock();
let children = entry.dir_entries(&path)?;
if !options.recursive && !children.is_empty() {
- return Err(anyhow!("{path:?} is not empty"));
+ anyhow::bail!("{path:?} is not empty");
}
}
e.remove();
@@ -2064,9 +2060,7 @@ impl Fs for FakeFs {
self.simulate_random_delay().await;
let path = normalize_path(path);
- let parent_path = path
- .parent()
- .ok_or_else(|| anyhow!("cannot remove the root"))?;
+ let parent_path = path.parent().context("cannot remove the root")?;
let base_name = path.file_name().unwrap();
let mut state = self.state.lock();
let parent_entry = state.read_path(parent_path)?;
@@ -2077,7 +2071,7 @@ impl Fs for FakeFs {
match entry {
btree_map::Entry::Vacant(_) => {
if !options.ignore_if_not_exists {
- return Err(anyhow!("{path:?} does not exist"));
+ anyhow::bail!("{path:?} does not exist");
}
}
btree_map::Entry::Occupied(e) => {
@@ -2148,11 +2142,10 @@ impl Fs for FakeFs {
let path = normalize_path(path);
self.simulate_random_delay().await;
let state = self.state.lock();
- if let Some((_, canonical_path)) = state.try_read_path(&path, true) {
- Ok(canonical_path)
- } else {
- Err(anyhow!("path does not exist: {}", path.display()))
- }
+ let (_, canonical_path) = state
+ .try_read_path(&path, true)
+ .with_context(|| format!("path does not exist: {path:?}"))?;
+ Ok(canonical_path)
}
async fn is_file(&self, path: &Path) -> bool {
@@ -2220,15 +2213,14 @@ impl Fs for FakeFs {
self.simulate_random_delay().await;
let path = normalize_path(path);
let state = self.state.lock();
- if let Some((entry, _)) = state.try_read_path(&path, false) {
- let entry = entry.lock();
- if let FakeFsEntry::Symlink { target } = &*entry {
- Ok(target.clone())
- } else {
- Err(anyhow!("not a symlink: {}", path.display()))
- }
+ let (entry, _) = state
+ .try_read_path(&path, false)
+ .with_context(|| format!("path does not exist: {path:?}"))?;
+ let entry = entry.lock();
+ if let FakeFsEntry::Symlink { target } = &*entry {
+ Ok(target.clone())
} else {
- Err(anyhow!("path does not exist: {}", path.display()))
+ anyhow::bail!("not a symlink: {path:?}")
}
}
@@ -2403,7 +2395,7 @@ pub async fn copy_recursive<'a>(
if options.ignore_if_exists {
continue;
} else {
- return Err(anyhow!("{target_item:?} already exists"));
+ anyhow::bail!("{target_item:?} already exists");
}
}
let _ = fs
@@ -2443,7 +2435,7 @@ fn read_recursive<'a>(
let metadata = fs
.metadata(source)
.await?
- .ok_or_else(|| anyhow!("path does not exist: {}", source.display()))?;
+ .with_context(|| format!("path does not exist: {source:?}"))?;
if metadata.is_dir {
output.push((source.to_path_buf(), true));
@@ -23,7 +23,7 @@ impl FsWatcher {
}
impl Watcher for FsWatcher {
- fn add(&self, path: &std::path::Path) -> gpui::Result<()> {
+ fn add(&self, path: &std::path::Path) -> anyhow::Result<()> {
let root_path = SanitizedPath::from(path);
let tx = self.tx.clone();
@@ -78,7 +78,7 @@ impl Watcher for FsWatcher {
Ok(())
}
- fn remove(&self, path: &std::path::Path) -> gpui::Result<()> {
+ fn remove(&self, path: &std::path::Path) -> anyhow::Result<()> {
use notify::Watcher;
Ok(global(|w| w.watcher.lock().unwatch(path))??)
}
@@ -130,6 +130,6 @@ pub fn global<T>(f: impl FnOnce(&GlobalWatcher) -> T) -> anyhow::Result<T> {
});
match result {
Ok(g) => Ok(f(g)),
- Err(e) => Err(anyhow::anyhow!("{}", e)),
+ Err(e) => Err(anyhow::anyhow!("{e}")),
}
}
@@ -57,7 +57,7 @@ impl Watcher for MacWatcher {
Ok(())
}
- fn remove(&self, path: &Path) -> gpui::Result<()> {
+ fn remove(&self, path: &Path) -> anyhow::Result<()> {
let handles = self
.handles
.upgrade()
@@ -1,6 +1,6 @@
use crate::commit::get_messages;
use crate::{GitRemote, Oid};
-use anyhow::{Context as _, Result, anyhow};
+use anyhow::{Context as _, Result};
use collections::{HashMap, HashSet};
use futures::AsyncWriteExt;
use gpui::SharedString;
@@ -80,7 +80,7 @@ async fn run_git_blame(
.stdout(Stdio::piped())
.stderr(Stdio::piped())
.spawn()
- .map_err(|e| anyhow!("Failed to start git blame process: {}", e))?;
+ .context("starting git blame process")?;
let stdin = child
.stdin
@@ -92,10 +92,7 @@ async fn run_git_blame(
}
stdin.flush().await?;
- let output = child
- .output()
- .await
- .map_err(|e| anyhow!("Failed to read git blame output: {}", e))?;
+ let output = child.output().await.context("reading git blame output")?;
if !output.status.success() {
let stderr = String::from_utf8_lossy(&output.stderr);
@@ -103,7 +100,7 @@ async fn run_git_blame(
if trimmed == GIT_BLAME_NO_COMMIT_ERROR || trimmed.contains(GIT_BLAME_NO_PATH) {
return Ok(String::new());
}
- return Err(anyhow!("git blame process failed: {}", stderr));
+ anyhow::bail!("git blame process failed: {stderr}");
}
Ok(String::from_utf8(output.stdout)?)
@@ -144,21 +141,21 @@ impl BlameEntry {
let sha = parts
.next()
.and_then(|line| line.parse::<Oid>().ok())
- .ok_or_else(|| anyhow!("failed to parse sha"))?;
+ .context("parsing sha")?;
let original_line_number = parts
.next()
.and_then(|line| line.parse::<u32>().ok())
- .ok_or_else(|| anyhow!("Failed to parse original line number"))?;
+ .context("parsing original line number")?;
let final_line_number = parts
.next()
.and_then(|line| line.parse::<u32>().ok())
- .ok_or_else(|| anyhow!("Failed to parse final line number"))?;
+ .context("parsing final line number")?;
let line_count = parts
.next()
.and_then(|line| line.parse::<u32>().ok())
- .ok_or_else(|| anyhow!("Failed to parse final line number"))?;
+ .context("parsing line count")?;
let start_line = final_line_number.saturating_sub(1);
let end_line = start_line + line_count;
@@ -7,7 +7,7 @@ pub mod status;
pub use crate::hosting_provider::*;
pub use crate::remote::*;
-use anyhow::{Context as _, Result, anyhow};
+use anyhow::{Context as _, Result};
pub use git2 as libgit;
use gpui::action_with_deprecated_aliases;
use gpui::actions;
@@ -99,7 +99,7 @@ impl FromStr for Oid {
fn from_str(s: &str) -> std::prelude::v1::Result<Self, Self::Err> {
libgit::Oid::from_str(s)
- .map_err(|error| anyhow!("failed to parse git oid: {}", error))
+ .context("parsing git oid")
.map(Self)
}
}
@@ -477,7 +477,7 @@ impl GitRepository for RealGitRepository {
.stdout(Stdio::piped())
.stderr(Stdio::piped())
.output()
- .map_err(|e| anyhow!("Failed to start git show process: {e}"))?;
+ .context("starting git show process")?;
let show_stdout = String::from_utf8_lossy(&show_output.stdout);
let mut lines = show_stdout.split('\n');
@@ -491,7 +491,7 @@ impl GitRepository for RealGitRepository {
.stdout(Stdio::piped())
.stderr(Stdio::piped())
.spawn()
- .map_err(|e| anyhow!("Failed to start git cat-file process: {e}"))?;
+ .context("starting git cat-file process")?;
use std::io::Write as _;
let mut files = Vec::<CommitFile>::new();
@@ -578,12 +578,11 @@ impl GitRepository for RealGitRepository {
.args(["reset", mode_flag, &commit])
.output()
.await?;
- if !output.status.success() {
- return Err(anyhow!(
- "Failed to reset:\n{}",
- String::from_utf8_lossy(&output.stderr)
- ));
- }
+ anyhow::ensure!(
+ output.status.success(),
+ "Failed to reset:\n{}",
+ String::from_utf8_lossy(&output.stderr),
+ );
Ok(())
}
.boxed()
@@ -609,12 +608,11 @@ impl GitRepository for RealGitRepository {
.args(paths.iter().map(|path| path.as_ref()))
.output()
.await?;
- if !output.status.success() {
- return Err(anyhow!(
- "Failed to checkout files:\n{}",
- String::from_utf8_lossy(&output.stderr)
- ));
- }
+ anyhow::ensure!(
+ output.status.success(),
+ "Failed to checkout files:\n{}",
+ String::from_utf8_lossy(&output.stderr),
+ );
Ok(())
}
.boxed()
@@ -707,12 +705,11 @@ impl GitRepository for RealGitRepository {
.output()
.await?;
- if !output.status.success() {
- return Err(anyhow!(
- "Failed to stage:\n{}",
- String::from_utf8_lossy(&output.stderr)
- ));
- }
+ anyhow::ensure!(
+ output.status.success(),
+ "Failed to stage:\n{}",
+ String::from_utf8_lossy(&output.stderr)
+ );
} else {
let output = new_smol_command(&git_binary_path)
.current_dir(&working_directory)
@@ -721,13 +718,11 @@ impl GitRepository for RealGitRepository {
.arg(path.to_unix_style())
.output()
.await?;
-
- if !output.status.success() {
- return Err(anyhow!(
- "Failed to unstage:\n{}",
- String::from_utf8_lossy(&output.stderr)
- ));
- }
+ anyhow::ensure!(
+ output.status.success(),
+ "Failed to unstage:\n{}",
+ String::from_utf8_lossy(&output.stderr)
+ );
}
Ok(())
@@ -761,7 +756,7 @@ impl GitRepository for RealGitRepository {
let stdin = process
.stdin
.take()
- .ok_or_else(|| anyhow!("no stdin for git cat-file subprocess"))?;
+ .context("no stdin for git cat-file subprocess")?;
let mut stdin = BufWriter::new(stdin);
for rev in &revs {
write!(&mut stdin, "{rev}\n")?;
@@ -813,7 +808,7 @@ impl GitRepository for RealGitRepository {
stdout.parse()
} else {
let stderr = String::from_utf8_lossy(&output.stderr);
- Err(anyhow!("git status failed: {}", stderr))
+ anyhow::bail!("git status failed: {stderr}");
}
})
.boxed()
@@ -849,12 +844,11 @@ impl GitRepository for RealGitRepository {
.output()
.await?;
- if !output.status.success() {
- return Err(anyhow!(
- "Failed to git git branches:\n{}",
- String::from_utf8_lossy(&output.stderr)
- ));
- }
+ anyhow::ensure!(
+ output.status.success(),
+ "Failed to git git branches:\n{}",
+ String::from_utf8_lossy(&output.stderr)
+ );
let input = String::from_utf8_lossy(&output.stdout);
@@ -903,7 +897,7 @@ impl GitRepository for RealGitRepository {
branch.set_upstream(Some(&name))?;
branch
} else {
- return Err(anyhow!("Branch not found"));
+ anyhow::bail!("Branch not found");
};
let revision = branch.get();
@@ -912,7 +906,7 @@ impl GitRepository for RealGitRepository {
repo.set_head(
revision
.name()
- .ok_or_else(|| anyhow!("Branch name could not be retrieved"))?,
+ .context("Branch name could not be retrieved")?,
)?;
Ok(())
})
@@ -970,12 +964,11 @@ impl GitRepository for RealGitRepository {
.output()
.await?;
- if !output.status.success() {
- return Err(anyhow!(
- "Failed to run git diff:\n{}",
- String::from_utf8_lossy(&output.stderr)
- ));
- }
+ anyhow::ensure!(
+ output.status.success(),
+ "Failed to run git diff:\n{}",
+ String::from_utf8_lossy(&output.stderr)
+ );
Ok(String::from_utf8_lossy(&output.stdout).to_string())
})
.boxed()
@@ -998,13 +991,11 @@ impl GitRepository for RealGitRepository {
.args(paths.iter().map(|p| p.to_unix_style()))
.output()
.await?;
-
- if !output.status.success() {
- return Err(anyhow!(
- "Failed to stage paths:\n{}",
- String::from_utf8_lossy(&output.stderr)
- ));
- }
+ anyhow::ensure!(
+ output.status.success(),
+ "Failed to stage paths:\n{}",
+ String::from_utf8_lossy(&output.stderr),
+ );
}
Ok(())
})
@@ -1030,12 +1021,11 @@ impl GitRepository for RealGitRepository {
.output()
.await?;
- if !output.status.success() {
- return Err(anyhow!(
- "Failed to unstage:\n{}",
- String::from_utf8_lossy(&output.stderr)
- ));
- }
+ anyhow::ensure!(
+ output.status.success(),
+ "Failed to unstage:\n{}",
+ String::from_utf8_lossy(&output.stderr),
+ );
}
Ok(())
})
@@ -1069,12 +1059,11 @@ impl GitRepository for RealGitRepository {
let output = cmd.output().await?;
- if !output.status.success() {
- return Err(anyhow!(
- "Failed to commit:\n{}",
- String::from_utf8_lossy(&output.stderr)
- ));
- }
+ anyhow::ensure!(
+ output.status.success(),
+ "Failed to commit:\n{}",
+ String::from_utf8_lossy(&output.stderr)
+ );
Ok(())
})
.boxed()
@@ -1190,22 +1179,19 @@ impl GitRepository for RealGitRepository {
.output()
.await?;
- if output.status.success() {
- let remote_names = String::from_utf8_lossy(&output.stdout)
- .split('\n')
- .filter(|name| !name.is_empty())
- .map(|name| Remote {
- name: name.trim().to_string().into(),
- })
- .collect();
-
- return Ok(remote_names);
- } else {
- return Err(anyhow!(
- "Failed to get remotes:\n{}",
- String::from_utf8_lossy(&output.stderr)
- ));
- }
+ anyhow::ensure!(
+ output.status.success(),
+ "Failed to get remotes:\n{}",
+ String::from_utf8_lossy(&output.stderr)
+ );
+ let remote_names = String::from_utf8_lossy(&output.stdout)
+ .split('\n')
+ .filter(|name| !name.is_empty())
+ .map(|name| Remote {
+ name: name.trim().to_string().into(),
+ })
+ .collect();
+ Ok(remote_names)
})
.boxed()
}
@@ -1222,11 +1208,11 @@ impl GitRepository for RealGitRepository {
.args(args)
.output()
.await?;
- if output.status.success() {
- Ok(String::from_utf8(output.stdout)?)
- } else {
- Err(anyhow!(String::from_utf8_lossy(&output.stderr).to_string()))
- }
+ anyhow::ensure!(
+ output.status.success(),
+ String::from_utf8_lossy(&output.stderr).to_string()
+ );
+ Ok(String::from_utf8(output.stdout)?)
};
let head = git_cmd(&["rev-parse", "HEAD"])
@@ -1504,14 +1490,14 @@ impl GitBinary {
{
let mut command = self.build_command(args);
let output = command.output().await?;
- if output.status.success() {
- Ok(String::from_utf8(output.stdout)?)
- } else {
- Err(anyhow!(GitBinaryCommandError {
+ anyhow::ensure!(
+ output.status.success(),
+ GitBinaryCommandError {
stdout: String::from_utf8_lossy(&output.stdout).to_string(),
status: output.status,
- }))
- }
+ }
+ );
+ Ok(String::from_utf8(output.stdout)?)
}
fn build_command<S>(&self, args: impl IntoIterator<Item = S>) -> smol::process::Command
@@ -1545,14 +1531,15 @@ async fn run_git_command(
if env.contains_key("GIT_ASKPASS") {
let git_process = command.spawn()?;
let output = git_process.output().await?;
- if !output.status.success() {
- Err(anyhow!("{}", String::from_utf8_lossy(&output.stderr)))
- } else {
- Ok(RemoteCommandOutput {
- stdout: String::from_utf8_lossy(&output.stdout).to_string(),
- stderr: String::from_utf8_lossy(&output.stderr).to_string(),
- })
- }
+ anyhow::ensure!(
+ output.status.success(),
+ "{}",
+ String::from_utf8_lossy(&output.stderr)
+ );
+ Ok(RemoteCommandOutput {
+ stdout: String::from_utf8_lossy(&output.stdout).to_string(),
+ stderr: String::from_utf8_lossy(&output.stderr).to_string(),
+ })
} else {
let ask_pass = AskPassSession::new(executor, ask_pass).await?;
command
@@ -1568,7 +1555,7 @@ async fn run_git_command(
async fn run_askpass_command(
mut ask_pass: AskPassSession,
git_process: smol::process::Child,
-) -> std::result::Result<RemoteCommandOutput, anyhow::Error> {
+) -> anyhow::Result<RemoteCommandOutput> {
select_biased! {
result = ask_pass.run().fuse() => {
match result {
@@ -1582,17 +1569,15 @@ async fn run_askpass_command(
}
output = git_process.output().fuse() => {
let output = output?;
- if !output.status.success() {
- Err(anyhow!(
- "{}",
- String::from_utf8_lossy(&output.stderr)
- ))
- } else {
- Ok(RemoteCommandOutput {
- stdout: String::from_utf8_lossy(&output.stdout).to_string(),
- stderr: String::from_utf8_lossy(&output.stderr).to_string(),
- })
- }
+ anyhow::ensure!(
+ output.status.success(),
+ "{}",
+ String::from_utf8_lossy(&output.stderr)
+ );
+ Ok(RemoteCommandOutput {
+ stdout: String::from_utf8_lossy(&output.stdout).to_string(),
+ stderr: String::from_utf8_lossy(&output.stderr).to_string(),
+ })
}
}
}
@@ -1752,12 +1737,8 @@ fn parse_upstream_track(upstream_track: &str) -> Result<UpstreamTracking> {
}));
}
- let upstream_track = upstream_track
- .strip_prefix("[")
- .ok_or_else(|| anyhow!("missing ["))?;
- let upstream_track = upstream_track
- .strip_suffix("]")
- .ok_or_else(|| anyhow!("missing ["))?;
+ let upstream_track = upstream_track.strip_prefix("[").context("missing [")?;
+ let upstream_track = upstream_track.strip_suffix("]").context("missing [")?;
let mut ahead: u32 = 0;
let mut behind: u32 = 0;
for component in upstream_track.split(", ") {
@@ -1,5 +1,5 @@
use crate::repository::RepoPath;
-use anyhow::{Result, anyhow};
+use anyhow::Result;
use serde::{Deserialize, Serialize};
use std::{path::Path, str::FromStr, sync::Arc};
use util::ResultExt;
@@ -241,7 +241,7 @@ impl StatusCode {
b'R' => Ok(StatusCode::Renamed),
b'C' => Ok(StatusCode::Copied),
b' ' => Ok(StatusCode::Unmodified),
- _ => Err(anyhow!("Invalid status code: {byte}")),
+ _ => anyhow::bail!("Invalid status code: {byte}"),
}
}
@@ -286,7 +286,7 @@ impl UnmergedStatusCode {
b'A' => Ok(UnmergedStatusCode::Added),
b'D' => Ok(UnmergedStatusCode::Deleted),
b'U' => Ok(UnmergedStatusCode::Updated),
- _ => Err(anyhow!("Invalid unmerged status code: {byte}")),
+ _ => anyhow::bail!("Invalid unmerged status code: {byte}"),
}
}
}
@@ -3,7 +3,8 @@ mod settings;
use std::sync::Arc;
-use anyhow::{Result, anyhow};
+use anyhow::Context as _;
+use anyhow::Result;
use git::GitHostingProviderRegistry;
use git::repository::GitRepository;
use gpui::App;
@@ -58,7 +59,7 @@ pub fn get_host_from_git_remote_url(remote_url: &str) -> Result<String> {
.ok()
.and_then(|remote_url| remote_url.host_str().map(|host| host.to_string()))
})
- .ok_or_else(|| anyhow!("URL has no host"))
+ .context("URL has no host")
}
#[cfg(test)]
@@ -1,7 +1,7 @@
use std::str::FromStr;
use std::sync::{Arc, LazyLock};
-use anyhow::{Context, Result, bail};
+use anyhow::{Context as _, Result, bail};
use async_trait::async_trait;
use futures::AsyncReadExt;
use git::{
@@ -1,7 +1,7 @@
use std::str::FromStr;
use std::sync::Arc;
-use anyhow::{Context, Result, bail};
+use anyhow::{Context as _, Result, bail};
use async_trait::async_trait;
use futures::AsyncReadExt;
use gpui::SharedString;
@@ -1,7 +1,7 @@
use std::str::FromStr;
use std::sync::{Arc, LazyLock};
-use anyhow::{Context, Result, bail};
+use anyhow::{Context as _, Result, bail};
use async_trait::async_trait;
use futures::AsyncReadExt;
use gpui::SharedString;
@@ -1,4 +1,4 @@
-use anyhow::{Context as _, anyhow};
+use anyhow::Context as _;
use fuzzy::StringMatchCandidate;
use collections::HashSet;
@@ -381,7 +381,7 @@ impl PickerDelegate for BranchListDelegate {
.delegate
.repo
.as_ref()
- .ok_or_else(|| anyhow!("No active repository"))?
+ .context("No active repository")?
.clone();
let mut cx = cx.to_async();
@@ -1,4 +1,4 @@
-use anyhow::{Result, anyhow};
+use anyhow::{Context as _, Result};
use buffer_diff::{BufferDiff, BufferDiffSnapshot};
use editor::{Editor, EditorEvent, MultiBuffer};
use git::repository::{CommitDetails, CommitDiff, CommitSummary, RepoPath};
@@ -172,7 +172,7 @@ impl CommitView {
.map(|path| path.worktree_id)
.or(first_worktree_id)
})?
- .ok_or_else(|| anyhow!("project has no worktrees"))?;
+ .context("project has no worktrees")?;
let file = Arc::new(GitBlob {
path: file.path.clone(),
is_deleted,
@@ -9,7 +9,7 @@ use crate::{branch_picker, picker_prompt, render_remote_button};
use crate::{
git_panel_settings::GitPanelSettings, git_status_icon, repository_selector::RepositorySelector,
};
-use anyhow::Result;
+use anyhow::Context as _;
use askpass::AskPassDelegate;
use assistant_settings::AssistantSettings;
use db::kvp::KEY_VALUE_STORE;
@@ -1626,14 +1626,12 @@ impl GitPanel {
&mut self,
window: &mut Window,
cx: &mut Context<Self>,
- ) -> impl Future<Output = Result<bool, anyhow::Error>> + use<> {
+ ) -> impl Future<Output = anyhow::Result<bool>> + use<> {
let repo = self.active_repository.clone();
let mut cx = window.to_async(cx);
async move {
- let Some(repo) = repo else {
- return Err(anyhow::anyhow!("No active repository"));
- };
+ let repo = repo.context("No active repository")?;
let pushed_to: Vec<SharedString> = repo
.update(&mut cx, |repo, _| repo.check_for_pushed_commits())?
@@ -2090,22 +2088,16 @@ impl GitPanel {
let mut cx = window.to_async(cx);
async move {
- let Some(repo) = repo else {
- return Err(anyhow::anyhow!("No active repository"));
- };
-
+ let repo = repo.context("No active repository")?;
let mut current_remotes: Vec<Remote> = repo
.update(&mut cx, |repo, _| {
- let Some(current_branch) = repo.branch.as_ref() else {
- return Err(anyhow::anyhow!("No active branch"));
- };
-
- Ok(repo.get_remotes(Some(current_branch.name().to_string())))
+ let current_branch = repo.branch.as_ref().context("No active branch")?;
+ anyhow::Ok(repo.get_remotes(Some(current_branch.name().to_string())))
})??
.await??;
if current_remotes.len() == 0 {
- return Err(anyhow::anyhow!("No active remote"));
+ anyhow::bail!("No active remote");
} else if current_remotes.len() == 1 {
return Ok(Some(current_remotes.pop().unwrap()));
} else {
@@ -39,8 +39,7 @@ pub async fn stream_generate_content(
match serde_json::from_str(line) {
Ok(response) => Some(Ok(response)),
Err(error) => Some(Err(anyhow!(format!(
- "Error parsing JSON: {:?}\n{:?}",
- error, line
+ "Error parsing JSON: {error:?}\n{line:?}"
)))),
}
} else {
@@ -85,15 +84,13 @@ pub async fn count_tokens(
let mut response = client.send(http_request).await?;
let mut text = String::new();
response.body_mut().read_to_string(&mut text).await?;
- if response.status().is_success() {
- Ok(serde_json::from_str::<CountTokensResponse>(&text)?)
- } else {
- Err(anyhow!(
- "error during countTokens, status code: {:?}, body: {}",
- response.status(),
- text
- ))
- }
+ anyhow::ensure!(
+ response.status().is_success(),
+ "error during countTokens, status code: {:?}, body: {}",
+ response.status(),
+ text
+ );
+ Ok(serde_json::from_str::<CountTokensResponse>(&text)?)
}
pub fn validate_generate_content_request(request: &GenerateContentRequest) -> Result<()> {
@@ -1,6 +1,5 @@
use std::{path::Path, sync::Arc, time::Duration};
-use anyhow::anyhow;
use gpui::{
Animation, AnimationExt, App, Application, Asset, AssetLogger, AssetSource, Bounds, Context,
Hsla, ImageAssetLoader, ImageCacheError, ImgResourceLoader, LOADING_DELAY, Length, Pixels,
@@ -57,7 +56,7 @@ impl Asset for LoadImageWithParameters {
timer.await;
if parameters.fail {
log::error!("Intentionally failed to load image");
- Err(anyhow!("Failed to load image").into())
+ Err(anyhow::anyhow!("Failed to load image").into())
} else {
data.await
}
@@ -1,5 +1,5 @@
use crate::SharedString;
-use anyhow::{Result, anyhow};
+use anyhow::{Context as _, Result};
use collections::HashMap;
pub use no_action::{NoAction, is_no_action};
use serde_json::json;
@@ -235,7 +235,7 @@ impl ActionRegistry {
let name = self
.names_by_type_id
.get(type_id)
- .ok_or_else(|| anyhow!("no action type registered for {:?}", type_id))?
+ .with_context(|| format!("no action type registered for {type_id:?}"))?
.clone();
Ok(self.build_action(&name, None)?)
@@ -10,7 +10,7 @@ use std::{
time::Duration,
};
-use anyhow::{Result, anyhow};
+use anyhow::{Context as _, Result, anyhow};
use derive_more::{Deref, DerefMut};
use futures::{
Future, FutureExt,
@@ -1021,9 +1021,9 @@ impl App {
let mut window = cx
.windows
.get_mut(id)
- .ok_or_else(|| anyhow!("window not found"))?
+ .context("window not found")?
.take()
- .ok_or_else(|| anyhow!("window not found"))?;
+ .context("window not found")?;
let root_view = window.root.clone().unwrap();
@@ -1042,7 +1042,7 @@ impl App {
} else {
cx.windows
.get_mut(id)
- .ok_or_else(|| anyhow!("window not found"))?
+ .context("window not found")?
.replace(window);
}
@@ -1119,7 +1119,7 @@ impl App {
self.globals_by_type
.get(&TypeId::of::<G>())
.map(|any_state| any_state.downcast_ref::<G>().unwrap())
- .ok_or_else(|| anyhow!("no state of type {} exists", type_name::<G>()))
+ .with_context(|| format!("no state of type {} exists", type_name::<G>()))
.unwrap()
}
@@ -1138,7 +1138,7 @@ impl App {
self.globals_by_type
.get_mut(&global_type)
.and_then(|any_state| any_state.downcast_mut::<G>())
- .ok_or_else(|| anyhow!("no state of type {} exists", type_name::<G>()))
+ .with_context(|| format!("no state of type {} exists", type_name::<G>()))
.unwrap()
}
@@ -1201,7 +1201,7 @@ impl App {
GlobalLease::new(
self.globals_by_type
.remove(&TypeId::of::<G>())
- .ok_or_else(|| anyhow!("no global registered of type {}", type_name::<G>()))
+ .with_context(|| format!("no global registered of type {}", type_name::<G>()))
.unwrap(),
)
}
@@ -1765,7 +1765,7 @@ impl AppContext for App {
let window = self
.windows
.get(window.id)
- .ok_or_else(|| anyhow!("window not found"))?
+ .context("window not found")?
.as_ref()
.expect("attempted to read a window that is already on the stack");
@@ -1915,9 +1915,12 @@ impl HttpClient for NullHttpClient {
_req: http_client::Request<http_client::AsyncBody>,
) -> futures::future::BoxFuture<
'static,
- Result<http_client::Response<http_client::AsyncBody>, anyhow::Error>,
+ anyhow::Result<http_client::Response<http_client::AsyncBody>>,
> {
- async move { Err(anyhow!("No HttpClient available")) }.boxed()
+ async move {
+ anyhow::bail!("No HttpClient available");
+ }
+ .boxed()
}
fn proxy(&self) -> Option<&Url> {
@@ -3,7 +3,7 @@ use crate::{
Entity, EventEmitter, Focusable, ForegroundExecutor, Global, PromptLevel, Render, Reservation,
Result, Subscription, Task, VisualContext, Window, WindowHandle,
};
-use anyhow::{Context as _, anyhow};
+use anyhow::Context as _;
use derive_more::{Deref, DerefMut};
use futures::channel::oneshot;
use std::{future::Future, rc::Weak};
@@ -27,19 +27,13 @@ impl AppContext for AsyncApp {
&mut self,
build_entity: impl FnOnce(&mut Context<T>) -> T,
) -> Self::Result<Entity<T>> {
- let app = self
- .app
- .upgrade()
- .ok_or_else(|| anyhow!("app was released"))?;
+ let app = self.app.upgrade().context("app was released")?;
let mut app = app.borrow_mut();
Ok(app.new(build_entity))
}
fn reserve_entity<T: 'static>(&mut self) -> Result<Reservation<T>> {
- let app = self
- .app
- .upgrade()
- .ok_or_else(|| anyhow!("app was released"))?;
+ let app = self.app.upgrade().context("app was released")?;
let mut app = app.borrow_mut();
Ok(app.reserve_entity())
}
@@ -49,10 +43,7 @@ impl AppContext for AsyncApp {
reservation: Reservation<T>,
build_entity: impl FnOnce(&mut Context<T>) -> T,
) -> Result<Entity<T>> {
- let app = self
- .app
- .upgrade()
- .ok_or_else(|| anyhow!("app was released"))?;
+ let app = self.app.upgrade().context("app was released")?;
let mut app = app.borrow_mut();
Ok(app.insert_entity(reservation, build_entity))
}
@@ -62,10 +53,7 @@ impl AppContext for AsyncApp {
handle: &Entity<T>,
update: impl FnOnce(&mut T, &mut Context<T>) -> R,
) -> Self::Result<R> {
- let app = self
- .app
- .upgrade()
- .ok_or_else(|| anyhow!("app was released"))?;
+ let app = self.app.upgrade().context("app was released")?;
let mut app = app.borrow_mut();
Ok(app.update_entity(handle, update))
}
@@ -125,10 +113,7 @@ impl AppContext for AsyncApp {
impl AsyncApp {
/// Schedules all windows in the application to be redrawn.
pub fn refresh(&self) -> Result<()> {
- let app = self
- .app
- .upgrade()
- .ok_or_else(|| anyhow!("app was released"))?;
+ let app = self.app.upgrade().context("app was released")?;
let mut lock = app.borrow_mut();
lock.refresh_windows();
Ok(())
@@ -146,10 +131,7 @@ impl AsyncApp {
/// Invoke the given function in the context of the app, then flush any effects produced during its invocation.
pub fn update<R>(&self, f: impl FnOnce(&mut App) -> R) -> Result<R> {
- let app = self
- .app
- .upgrade()
- .ok_or_else(|| anyhow!("app was released"))?;
+ let app = self.app.upgrade().context("app was released")?;
let mut lock = app.borrow_mut();
Ok(lock.update(f))
}
@@ -165,10 +147,7 @@ impl AsyncApp {
T: 'static + EventEmitter<Event>,
Event: 'static,
{
- let app = self
- .app
- .upgrade()
- .ok_or_else(|| anyhow!("app was released"))?;
+ let app = self.app.upgrade().context("app was released")?;
let mut lock = app.borrow_mut();
let subscription = lock.subscribe(entity, on_event);
Ok(subscription)
@@ -183,10 +162,7 @@ impl AsyncApp {
where
V: 'static + Render,
{
- let app = self
- .app
- .upgrade()
- .ok_or_else(|| anyhow!("app was released"))?;
+ let app = self.app.upgrade().context("app was released")?;
let mut lock = app.borrow_mut();
lock.open_window(options, build_root_view)
}
@@ -206,10 +182,7 @@ impl AsyncApp {
/// Determine whether global state of the specified type has been assigned.
/// Returns an error if the `App` has been dropped.
pub fn has_global<G: Global>(&self) -> Result<bool> {
- let app = self
- .app
- .upgrade()
- .ok_or_else(|| anyhow!("app was released"))?;
+ let app = self.app.upgrade().context("app was released")?;
let app = app.borrow_mut();
Ok(app.has_global::<G>())
}
@@ -219,10 +192,7 @@ impl AsyncApp {
/// Panics if no global state of the specified type has been assigned.
/// Returns an error if the `App` has been dropped.
pub fn read_global<G: Global, R>(&self, read: impl FnOnce(&G, &App) -> R) -> Result<R> {
- let app = self
- .app
- .upgrade()
- .ok_or_else(|| anyhow!("app was released"))?;
+ let app = self.app.upgrade().context("app was released")?;
let app = app.borrow_mut();
Ok(read(app.global(), &app))
}
@@ -245,10 +215,7 @@ impl AsyncApp {
&self,
update: impl FnOnce(&mut G, &mut App) -> R,
) -> Result<R> {
- let app = self
- .app
- .upgrade()
- .ok_or_else(|| anyhow!("app was released"))?;
+ let app = self.app.upgrade().context("app was released")?;
let mut app = app.borrow_mut();
Ok(app.update(|cx| cx.update_global(update)))
}
@@ -1,5 +1,5 @@
use crate::{App, AppContext, VisualContext, Window, seal::Sealed};
-use anyhow::{Result, anyhow};
+use anyhow::{Context as _, Result};
use collections::FxHashSet;
use derive_more::{Deref, DerefMut};
use parking_lot::{RwLock, RwLockUpgradableReadGuard};
@@ -692,7 +692,7 @@ impl<T: 'static> WeakEntity<T> {
{
crate::Flatten::flatten(
self.upgrade()
- .ok_or_else(|| anyhow!("entity released"))
+ .context("entity released")
.map(|this| cx.update_entity(&this, update)),
)
}
@@ -710,7 +710,7 @@ impl<T: 'static> WeakEntity<T> {
Result<C::Result<R>>: crate::Flatten<R>,
{
let window = cx.window_handle();
- let this = self.upgrade().ok_or_else(|| anyhow!("entity released"))?;
+ let this = self.upgrade().context("entity released")?;
crate::Flatten::flatten(window.update(cx, |_, window, cx| {
this.update(cx, |entity, cx| update(entity, window, cx))
@@ -727,7 +727,7 @@ impl<T: 'static> WeakEntity<T> {
{
crate::Flatten::flatten(
self.upgrade()
- .ok_or_else(|| anyhow!("entity release"))
+ .context("entity released")
.map(|this| cx.read_entity(&this, read)),
)
}
@@ -1,4 +1,4 @@
-use anyhow::{Context, bail};
+use anyhow::{Context as _, bail};
use serde::de::{self, Deserialize, Deserializer, Visitor};
use std::{
fmt::{self, Display, Formatter},
@@ -5,7 +5,7 @@ use crate::{
SMOOTH_SVG_SCALE_FACTOR, SharedString, SharedUri, StyleRefinement, Styled, SvgSize, Task,
Window, px, swap_rgba_pa_to_bgra,
};
-use anyhow::{Result, anyhow};
+use anyhow::{Context as _, Result};
use futures::{AsyncReadExt, Future};
use image::{
@@ -595,7 +595,7 @@ impl Asset for ImageAssetLoader {
let mut response = client
.get(uri.as_ref(), ().into(), true)
.await
- .map_err(|e| anyhow!(e))?;
+ .with_context(|| format!("loading image asset from {uri:?}"))?;
let mut body = Vec::new();
response.body_mut().read_to_end(&mut body).await?;
if !response.status().is_success() {
@@ -4,7 +4,7 @@ use crate::{
Pixels, Point, SharedString, Size, TextOverflow, TextRun, TextStyle, TooltipId, WhiteSpace,
Window, WrappedLine, WrappedLineLayout, register_tooltip_mouse_handlers, set_tooltip_on_window,
};
-use anyhow::anyhow;
+use anyhow::Context as _;
use smallvec::SmallVec;
use std::{
cell::{Cell, RefCell},
@@ -401,7 +401,7 @@ impl TextLayout {
let mut element_state = self.0.borrow_mut();
let element_state = element_state
.as_mut()
- .ok_or_else(|| anyhow!("measurement has not been performed on {}", text))
+ .with_context(|| format!("measurement has not been performed on {text}"))
.unwrap();
element_state.bounds = Some(bounds);
}
@@ -410,11 +410,11 @@ impl TextLayout {
let element_state = self.0.borrow();
let element_state = element_state
.as_ref()
- .ok_or_else(|| anyhow!("measurement has not been performed on {}", text))
+ .with_context(|| format!("measurement has not been performed on {text}"))
.unwrap();
let bounds = element_state
.bounds
- .ok_or_else(|| anyhow!("prepaint has not been performed on {:?}", text))
+ .with_context(|| format!("prepaint has not been performed on {text}"))
.unwrap();
let line_height = element_state.line_height;
@@ -1,5 +1,5 @@
use crate::SharedString;
-use anyhow::{Result, anyhow};
+use anyhow::{Context as _, Result};
use std::fmt;
/// A datastructure for resolving whether an action should be dispatched
@@ -243,7 +243,7 @@ impl KeyBindingContextPredicate {
let source = skip_whitespace(source);
let (predicate, rest) = Self::parse_expr(source, 0)?;
if let Some(next) = rest.chars().next() {
- Err(anyhow!("unexpected character '{next:?}'"))
+ anyhow::bail!("unexpected character '{next:?}'");
} else {
Ok(predicate)
}
@@ -329,20 +329,14 @@ impl KeyBindingContextPredicate {
}
fn parse_primary(mut source: &str) -> anyhow::Result<(Self, &str)> {
- let next = source
- .chars()
- .next()
- .ok_or_else(|| anyhow!("unexpected end"))?;
+ let next = source.chars().next().context("unexpected end")?;
match next {
'(' => {
source = skip_whitespace(&source[1..]);
let (predicate, rest) = Self::parse_expr(source, 0)?;
- if let Some(stripped) = rest.strip_prefix(')') {
- source = skip_whitespace(stripped);
- Ok((predicate, source))
- } else {
- Err(anyhow!("expected a ')'"))
- }
+ let stripped = rest.strip_prefix(')').context("expected a ')'")?;
+ source = skip_whitespace(stripped);
+ Ok((predicate, source))
}
'!' => {
let source = skip_whitespace(&source[1..]);
@@ -368,7 +362,7 @@ impl KeyBindingContextPredicate {
source,
))
}
- _ => Err(anyhow!("unexpected character '{next:?}'")),
+ _ => anyhow::bail!("unexpected character '{next:?}'"),
}
}
@@ -388,7 +382,7 @@ impl KeyBindingContextPredicate {
if let (Self::Identifier(left), Self::Identifier(right)) = (self, other) {
Ok(Self::Equal(left, right))
} else {
- Err(anyhow!("operands of == must be identifiers"))
+ anyhow::bail!("operands of == must be identifiers");
}
}
@@ -396,7 +390,7 @@ impl KeyBindingContextPredicate {
if let (Self::Identifier(left), Self::Identifier(right)) = (self, other) {
Ok(Self::NotEqual(left, right))
} else {
- Err(anyhow!("operands of != must be identifiers"))
+ anyhow::bail!("operands of != must be identifiers");
}
}
}
@@ -30,7 +30,7 @@ impl BladeContext {
..Default::default()
})
}
- .map_err(|e| anyhow::anyhow!("{:?}", e))?,
+ .map_err(|e| anyhow::anyhow!("{e:?}"))?,
);
Ok(Self { gpu })
}
@@ -49,8 +49,7 @@ fn parse_pci_id(id: &str) -> anyhow::Result<u32> {
"Expected a 4 digit PCI ID in hexadecimal format"
);
- return u32::from_str_radix(id, 16)
- .map_err(|_| anyhow::anyhow!("Failed to parse PCI ID as hex"));
+ return u32::from_str_radix(id, 16).context("parsing PCI ID as hex");
}
#[cfg(test)]
@@ -95,9 +95,7 @@ impl LinuxClient for HeadlessClient {
_handle: AnyWindowHandle,
_params: WindowParams,
) -> anyhow::Result<Box<dyn PlatformWindow>> {
- Err(anyhow::anyhow!(
- "neither DISPLAY nor WAYLAND_DISPLAY is set. You can run in headless mode"
- ))
+ anyhow::bail!("neither DISPLAY nor WAYLAND_DISPLAY is set. You can run in headless mode");
}
fn compositor_name(&self) -> &'static str {
@@ -490,7 +490,7 @@ impl<P: LinuxClient + 'static> Platform for P {
let attributes = item.attributes().await?;
let username = attributes
.get("username")
- .ok_or_else(|| anyhow!("Cannot find username in stored credentials"))?;
+ .context("Cannot find username in stored credentials")?;
let secret = item.secret().await?;
// we lose the zeroizing capabilities at this boundary,
@@ -3,7 +3,7 @@ use crate::{
GlyphId, LineLayout, Pixels, PlatformTextSystem, Point, RenderGlyphParams, SUBPIXEL_VARIANTS,
ShapedGlyph, ShapedRun, SharedString, Size, point, size,
};
-use anyhow::{Context as _, Ok, Result, anyhow};
+use anyhow::{Context as _, Ok, Result};
use collections::HashMap;
use cosmic_text::{
Attrs, AttrsList, CacheKey, Family, Font as CosmicTextFont, FontFeatures as CosmicFontFeatures,
@@ -232,7 +232,7 @@ impl CosmicTextSystemState {
let font = self
.font_system
.get_font(font_id)
- .ok_or_else(|| anyhow!("Could not load font"))?;
+ .context("Could not load font")?;
// HACK: To let the storybook run and render Windows caption icons. We should actually do better font fallback.
let allowed_bad_font_names = [
@@ -309,7 +309,7 @@ impl CosmicTextSystemState {
glyph_bounds: Bounds<DevicePixels>,
) -> Result<(Size<DevicePixels>, Vec<u8>)> {
if glyph_bounds.size.width.0 == 0 || glyph_bounds.size.height.0 == 0 {
- Err(anyhow!("glyph bounds are empty"))
+ anyhow::bail!("glyph bounds are empty");
} else {
let bitmap_size = glyph_bounds.size;
let font = &self.loaded_fonts[params.font_id.0].font;
@@ -469,7 +469,7 @@ impl TryFrom<&FontFeatures> for CosmicFontFeatures {
.0
.as_bytes()
.try_into()
- .map_err(|_| anyhow!("Incorrect feature flag format"))?;
+ .context("Incorrect feature flag format")?;
let tag = cosmic_text::FeatureTag::new(&name_bytes);
@@ -3,6 +3,7 @@ use std::{
hash::{Hash, Hasher},
};
+use anyhow::Context as _;
use uuid::Uuid;
use wayland_backend::client::ObjectId;
@@ -28,11 +29,11 @@ impl PlatformDisplay for WaylandDisplay {
}
fn uuid(&self) -> anyhow::Result<Uuid> {
- if let Some(name) = &self.name {
- Ok(Uuid::new_v5(&Uuid::NAMESPACE_DNS, name.as_bytes()))
- } else {
- Err(anyhow::anyhow!("Wayland display does not have a name"))
- }
+ let name = self
+ .name
+ .as_ref()
+ .context("Wayland display does not have a name")?;
+ Ok(Uuid::new_v5(&Uuid::NAMESPACE_DNS, name.as_bytes()))
}
fn bounds(&self) -> Bounds<Pixels> {
@@ -1,4 +1,4 @@
-use anyhow::Result;
+use anyhow::Context as _;
use uuid::Uuid;
use x11rb::{connection::Connection as _, xcb_ffi::XCBConnection};
@@ -17,12 +17,11 @@ impl X11Display {
scale_factor: f32,
x_screen_index: usize,
) -> anyhow::Result<Self> {
- let Some(screen) = xcb.setup().roots.get(x_screen_index) else {
- return Err(anyhow::anyhow!(
- "No screen found with index {}",
- x_screen_index
- ));
- };
+ let screen = xcb
+ .setup()
+ .roots
+ .get(x_screen_index)
+ .with_context(|| format!("No screen found with index {x_screen_index}"))?;
Ok(Self {
x_screen_index,
bounds: Bounds {
@@ -42,7 +41,7 @@ impl PlatformDisplay for X11Display {
DisplayId(self.x_screen_index as u32)
}
- fn uuid(&self) -> Result<Uuid> {
+ fn uuid(&self) -> anyhow::Result<Uuid> {
Ok(self.uuid)
}
@@ -2,7 +2,7 @@ use crate::{
AtlasKey, AtlasTextureId, AtlasTextureKind, AtlasTile, Bounds, DevicePixels, PlatformAtlas,
Point, Size, platform::AtlasTextureList,
};
-use anyhow::{Result, anyhow};
+use anyhow::{Context as _, Result};
use collections::FxHashMap;
use derive_more::{Deref, DerefMut};
use etagere::BucketedAtlasAllocator;
@@ -77,7 +77,7 @@ impl PlatformAtlas for MetalAtlas {
};
let tile = lock
.allocate(size, key.texture_kind())
- .ok_or_else(|| anyhow!("failed to allocate"))?;
+ .context("failed to allocate")?;
let texture = lock.texture(tile.texture_id);
texture.upload(tile.bounds, &bytes);
lock.tiles_by_key.insert(key.clone(), tile.clone());
@@ -4,7 +4,7 @@ use crate::{
MonochromeSprite, PaintSurface, Path, PathId, PathVertex, PolychromeSprite, PrimitiveBatch,
Quad, ScaledPixels, Scene, Shadow, Size, Surface, Underline, point, size,
};
-use anyhow::{Result, anyhow};
+use anyhow::{Context as _, Result};
use block::ConcreteBlock;
use cocoa::{
base::{NO, YES},
@@ -376,14 +376,14 @@ impl MetalRenderer {
let command_buffer = command_queue.new_command_buffer();
let mut instance_offset = 0;
- let Some(path_tiles) = self.rasterize_paths(
- scene.paths(),
- instance_buffer,
- &mut instance_offset,
- command_buffer,
- ) else {
- return Err(anyhow!("failed to rasterize {} paths", scene.paths().len()));
- };
+ let path_tiles = self
+ .rasterize_paths(
+ scene.paths(),
+ instance_buffer,
+ &mut instance_offset,
+ command_buffer,
+ )
+ .with_context(|| format!("rasterizing {} paths", scene.paths().len()))?;
let render_pass_descriptor = metal::RenderPassDescriptor::new();
let color_attachment = render_pass_descriptor
@@ -471,7 +471,7 @@ impl MetalRenderer {
if !ok {
command_encoder.end_encoding();
- return Err(anyhow!(
+ anyhow::bail!(
"scene too large: {} paths, {} shadows, {} quads, {} underlines, {} mono, {} poly, {} surfaces",
scene.paths.len(),
scene.shadows.len(),
@@ -480,7 +480,7 @@ impl MetalRenderer {
scene.monochrome_sprites.len(),
scene.polychrome_sprites.len(),
scene.surfaces.len(),
- ));
+ );
}
}
@@ -638,7 +638,7 @@ impl Platform for MacPlatform {
Ok(())
} else {
let msg: id = msg_send![error, localizedDescription];
- Err(anyhow!("Failed to register: {:?}", msg))
+ Err(anyhow!("Failed to register: {msg:?}"))
};
if let Some(done_tx) = done_tx.take() {
@@ -832,11 +832,8 @@ impl Platform for MacPlatform {
fn app_path(&self) -> Result<PathBuf> {
unsafe {
let bundle: id = NSBundle::mainBundle();
- if bundle.is_null() {
- Err(anyhow!("app is not running inside a bundle"))
- } else {
- Ok(path_from_objc(msg_send![bundle, bundlePath]))
- }
+ anyhow::ensure!(!bundle.is_null(), "app is not running inside a bundle");
+ Ok(path_from_objc(msg_send![bundle, bundlePath]))
}
}
@@ -877,17 +874,11 @@ impl Platform for MacPlatform {
fn path_for_auxiliary_executable(&self, name: &str) -> Result<PathBuf> {
unsafe {
let bundle: id = NSBundle::mainBundle();
- if bundle.is_null() {
- Err(anyhow!("app is not running inside a bundle"))
- } else {
- let name = ns_string(name);
- let url: id = msg_send![bundle, URLForAuxiliaryExecutable: name];
- if url.is_null() {
- Err(anyhow!("resource not found"))
- } else {
- ns_url_to_path(url)
- }
- }
+ anyhow::ensure!(!bundle.is_null(), "app is not running inside a bundle");
+ let name = ns_string(name);
+ let url: id = msg_send![bundle, URLForAuxiliaryExecutable: name];
+ anyhow::ensure!(!url.is_null(), "resource not found");
+ ns_url_to_path(url)
}
}
@@ -1101,10 +1092,7 @@ impl Platform for MacPlatform {
verb = "creating";
status = SecItemAdd(attrs.as_concrete_TypeRef(), ptr::null_mut());
}
-
- if status != errSecSuccess {
- return Err(anyhow!("{} password failed: {}", verb, status));
- }
+ anyhow::ensure!(status == errSecSuccess, "{verb} password failed: {status}");
}
Ok(())
})
@@ -1131,24 +1119,24 @@ impl Platform for MacPlatform {
match status {
security::errSecSuccess => {}
security::errSecItemNotFound | security::errSecUserCanceled => return Ok(None),
- _ => return Err(anyhow!("reading password failed: {}", status)),
+ _ => anyhow::bail!("reading password failed: {status}"),
}
let result = CFType::wrap_under_create_rule(result)
.downcast::<CFDictionary>()
- .ok_or_else(|| anyhow!("keychain item was not a dictionary"))?;
+ .context("keychain item was not a dictionary")?;
let username = result
.find(kSecAttrAccount as *const _)
- .ok_or_else(|| anyhow!("account was missing from keychain item"))?;
+ .context("account was missing from keychain item")?;
let username = CFType::wrap_under_get_rule(*username)
.downcast::<CFString>()
- .ok_or_else(|| anyhow!("account was not a string"))?;
+ .context("account was not a string")?;
let password = result
.find(kSecValueData as *const _)
- .ok_or_else(|| anyhow!("password was missing from keychain item"))?;
+ .context("password was missing from keychain item")?;
let password = CFType::wrap_under_get_rule(*password)
.downcast::<CFData>()
- .ok_or_else(|| anyhow!("password was not a string"))?;
+ .context("password was not a string")?;
Ok(Some((username.to_string(), password.bytes().to_vec())))
}
@@ -1168,10 +1156,7 @@ impl Platform for MacPlatform {
query_attrs.set(kSecAttrServer as *const _, url.as_CFTypeRef());
let status = SecItemDelete(query_attrs.as_concrete_TypeRef());
-
- if status != errSecSuccess {
- return Err(anyhow!("delete password failed: {}", status));
- }
+ anyhow::ensure!(status == errSecSuccess, "delete password failed: {status}");
}
Ok(())
})
@@ -1455,15 +1440,12 @@ unsafe fn ns_string(string: &str) -> id {
unsafe fn ns_url_to_path(url: id) -> Result<PathBuf> {
let path: *mut c_char = msg_send![url, fileSystemRepresentation];
- if path.is_null() {
- Err(anyhow!("url is not a file path: {}", unsafe {
- CStr::from_ptr(url.absoluteString().UTF8String()).to_string_lossy()
- }))
- } else {
- Ok(PathBuf::from(OsStr::from_bytes(unsafe {
- CStr::from_ptr(path).to_bytes()
- })))
- }
+ anyhow::ensure!(!path.is_null(), "url is not a file path: {}", unsafe {
+ CStr::from_ptr(url.absoluteString().UTF8String()).to_string_lossy()
+ });
+ Ok(PathBuf::from(OsStr::from_bytes(unsafe {
+ CStr::from_ptr(path).to_bytes()
+ })))
}
#[link(name = "Carbon", kind = "framework")]
@@ -194,7 +194,7 @@ impl MacTextSystemState {
core_graphics::data_provider::CGDataProvider::from_slice(embedded_font)
};
let font = core_graphics::font::CGFont::from_data_provider(data_provider)
- .map_err(|_| anyhow!("Could not load an embedded font."))?;
+ .map_err(|()| anyhow!("Could not load an embedded font."))?;
let font = font_kit::loaders::core_text::Font::from_core_graphics_font(font);
Ok(Handle::from_native(&font))
}
@@ -348,7 +348,7 @@ impl MacTextSystemState {
glyph_bounds: Bounds<DevicePixels>,
) -> Result<(Size<DevicePixels>, Vec<u8>)> {
if glyph_bounds.size.width.0 == 0 || glyph_bounds.size.height.0 == 0 {
- Err(anyhow!("glyph bounds are empty"))
+ anyhow::bail!("glyph bounds are empty");
} else {
// Add an extra pixel when the subpixel variant isn't zero to make room for anti-aliasing.
let mut bitmap_size = glyph_bounds.size;
@@ -54,9 +54,7 @@ impl DockMenuItem {
},
action,
}),
- _ => Err(anyhow::anyhow!(
- "Only `MenuItem::Action` is supported for dock menu on Windows."
- )),
+ _ => anyhow::bail!("Only `MenuItem::Action` is supported for dock menu on Windows."),
}
}
}
@@ -1,7 +1,7 @@
use std::{borrow::Cow, sync::Arc};
use ::util::ResultExt;
-use anyhow::{Result, anyhow};
+use anyhow::Result;
use collections::HashMap;
use itertools::Itertools;
use parking_lot::{RwLock, RwLockUpgradableReadGuard};
@@ -729,7 +729,7 @@ impl DirectWriteState {
glyph_bounds: Bounds<DevicePixels>,
) -> Result<(Size<DevicePixels>, Vec<u8>)> {
if glyph_bounds.size.width.0 == 0 || glyph_bounds.size.height.0 == 0 {
- return Err(anyhow!("glyph bounds are empty"));
+ anyhow::bail!("glyph bounds are empty");
}
let font_info = &self.fonts[params.font_id.0];
@@ -1301,7 +1301,7 @@ fn get_postscript_name(font_face: &IDWriteFontFace3, locale: &str) -> Result<Str
)?
};
if !exists.as_bool() || info.is_none() {
- return Err(anyhow!("No postscript name found for font face"));
+ anyhow::bail!("No postscript name found for font face");
}
get_name(info.unwrap(), locale)
@@ -1393,9 +1393,7 @@ fn get_name(string: IDWriteLocalizedStrings, locale: &str) -> Result<String> {
&mut exists as _,
)?
};
- if !exists.as_bool() {
- return Err(anyhow!("No localised string for {}", locale));
- }
+ anyhow::ensure!(exists.as_bool(), "No localised string for {locale}");
}
let name_length = unsafe { string.GetStringLength(locale_name_index) }? as usize;
@@ -576,7 +576,7 @@ impl Platform for WindowsPlatform {
// todo(windows)
fn path_for_auxiliary_executable(&self, _name: &str) -> Result<PathBuf> {
- Err(anyhow!("not yet implemented"))
+ anyhow::bail!("not yet implemented");
}
fn set_cursor_style(&self, style: CursorStyle) {
@@ -1,5 +1,4 @@
use crate::{AssetSource, DevicePixels, IsZero, Result, SharedString, Size};
-use anyhow::anyhow;
use resvg::tiny_skia::Pixmap;
use std::{
hash::Hash,
@@ -56,9 +55,7 @@ impl SvgRenderer {
}
pub(crate) fn render(&self, params: &RenderSvgParams) -> Result<Option<Vec<u8>>> {
- if params.size.is_zero() {
- return Err(anyhow!("can't render at a zero size"));
- }
+ anyhow::ensure!(!params.size.is_zero(), "can't render at a zero size");
// Load the tree.
let Some(bytes) = self.asset_source.load(¶ms.path)? else {
@@ -16,7 +16,7 @@ use crate::{
Bounds, DevicePixels, Hsla, Pixels, PlatformTextSystem, Point, Result, SharedString, Size,
StrikethroughStyle, UnderlineStyle, px,
};
-use anyhow::anyhow;
+use anyhow::{Context as _, anyhow};
use collections::FxHashMap;
use core::fmt;
use derive_more::Deref;
@@ -100,7 +100,7 @@ impl TextSystem {
fn clone_font_id_result(font_id: &Result<FontId>) -> Result<FontId> {
match font_id {
Ok(font_id) => Ok(*font_id),
- Err(err) => Err(anyhow!("{}", err)),
+ Err(err) => Err(anyhow!("{err}")),
}
}
@@ -174,7 +174,7 @@ impl TextSystem {
let glyph_id = self
.platform_text_system
.glyph_for_char(font_id, character)
- .ok_or_else(|| anyhow!("glyph not found for character '{}'", character))?;
+ .with_context(|| format!("glyph not found for character '{character}'"))?;
let bounds = self
.platform_text_system
.typographic_bounds(font_id, glyph_id)?;
@@ -188,7 +188,7 @@ impl TextSystem {
let glyph_id = self
.platform_text_system
.glyph_for_char(font_id, ch)
- .ok_or_else(|| anyhow!("glyph not found for character '{}'", ch))?;
+ .with_context(|| format!("glyph not found for character '{ch}'"))?;
let result = self.platform_text_system.advance(font_id, glyph_id)?
/ self.units_per_em(font_id) as f32;
@@ -3922,7 +3922,7 @@ impl<V: 'static + Render> WindowHandle<V> {
.and_then(|window| window.root.clone())
.map(|root_view| root_view.downcast::<V>())
})
- .ok_or_else(|| anyhow!("window not found"))?
+ .context("window not found")?
.map_err(|_| anyhow!("the type of the window's root view has changed"))?;
Ok(x.read(cx))
@@ -4103,7 +4103,7 @@ impl TryInto<SharedString> for ElementId {
if let ElementId::Name(name) = self {
Ok(name)
} else {
- Err(anyhow!("element id is not string"))
+ anyhow::bail!("element id is not string")
}
}
}
@@ -1,5 +1,5 @@
use crate::HttpClient;
-use anyhow::{Context, Result, anyhow, bail};
+use anyhow::{Context as _, Result, anyhow, bail};
use futures::AsyncReadExt;
use serde::Deserialize;
use std::sync::Arc;
@@ -31,7 +31,7 @@ pub async fn latest_github_release(
require_assets: bool,
pre_release: bool,
http: Arc<dyn HttpClient>,
-) -> Result<GithubRelease, anyhow::Error> {
+) -> anyhow::Result<GithubRelease> {
let mut response = http
.get(
format!("https://api.github.com/repos/{repo_name_with_owner}/releases").as_str(),
@@ -60,12 +60,12 @@ pub async fn latest_github_release(
Ok(releases) => releases,
Err(err) => {
- log::error!("Error deserializing: {:?}", err);
+ log::error!("Error deserializing: {err:?}");
log::error!(
"GitHub API response text: {:?}",
String::from_utf8_lossy(body.as_slice())
);
- return Err(anyhow!("error deserializing latest release"));
+ anyhow::bail!("error deserializing latest release: {err:?}");
}
};
@@ -73,14 +73,14 @@ pub async fn latest_github_release(
.into_iter()
.filter(|release| !require_assets || !release.assets.is_empty())
.find(|release| release.pre_release == pre_release)
- .ok_or(anyhow!("Failed to find a release"))
+ .context("finding a prerelease")
}
pub async fn get_release_by_tag_name(
repo_name_with_owner: &str,
tag: &str,
http: Arc<dyn HttpClient>,
-) -> Result<GithubRelease, anyhow::Error> {
+) -> anyhow::Result<GithubRelease> {
let mut response = http
.get(
&format!("https://api.github.com/repos/{repo_name_with_owner}/releases/tags/{tag}"),
@@ -107,12 +107,12 @@ pub async fn get_release_by_tag_name(
}
let release = serde_json::from_slice::<GithubRelease>(body.as_slice()).map_err(|err| {
- log::error!("Error deserializing: {:?}", err);
+ log::error!("Error deserializing: {err:?}");
log::error!(
"GitHub API response text: {:?}",
String::from_utf8_lossy(body.as_slice())
);
- anyhow!("error deserializing GitHub release")
+ anyhow!("error deserializing GitHub release: {err:?}")
})?;
Ok(release)
@@ -140,7 +140,7 @@ pub fn build_asset_url(repo_name_with_owner: &str, tag: &str, kind: AssetKind) -
}
);
url.path_segments_mut()
- .map_err(|_| anyhow!("cannot modify url path segments"))?
+ .map_err(|()| anyhow!("cannot modify url path segments"))?
.push(&asset_filename);
Ok(url.to_string())
}
@@ -42,14 +42,14 @@ pub trait HttpClient: 'static + Send + Sync {
fn send(
&self,
req: http::Request<AsyncBody>,
- ) -> BoxFuture<'static, Result<Response<AsyncBody>, anyhow::Error>>;
+ ) -> BoxFuture<'static, anyhow::Result<Response<AsyncBody>>>;
fn get<'a>(
&'a self,
uri: &str,
body: AsyncBody,
follow_redirects: bool,
- ) -> BoxFuture<'a, Result<Response<AsyncBody>, anyhow::Error>> {
+ ) -> BoxFuture<'a, anyhow::Result<Response<AsyncBody>>> {
let request = Builder::new()
.uri(uri)
.follow_redirects(if follow_redirects {
@@ -69,7 +69,7 @@ pub trait HttpClient: 'static + Send + Sync {
&'a self,
uri: &str,
body: AsyncBody,
- ) -> BoxFuture<'a, Result<Response<AsyncBody>, anyhow::Error>> {
+ ) -> BoxFuture<'a, anyhow::Result<Response<AsyncBody>>> {
let request = Builder::new()
.uri(uri)
.method(Method::POST)
@@ -114,7 +114,7 @@ impl HttpClient for HttpClientWithProxy {
fn send(
&self,
req: Request<AsyncBody>,
- ) -> BoxFuture<'static, Result<Response<AsyncBody>, anyhow::Error>> {
+ ) -> BoxFuture<'static, anyhow::Result<Response<AsyncBody>>> {
self.client.send(req)
}
@@ -131,7 +131,7 @@ impl HttpClient for Arc<HttpClientWithProxy> {
fn send(
&self,
req: Request<AsyncBody>,
- ) -> BoxFuture<'static, Result<Response<AsyncBody>, anyhow::Error>> {
+ ) -> BoxFuture<'static, anyhow::Result<Response<AsyncBody>>> {
self.client.send(req)
}
@@ -246,7 +246,7 @@ impl HttpClient for Arc<HttpClientWithUrl> {
fn send(
&self,
req: Request<AsyncBody>,
- ) -> BoxFuture<'static, Result<Response<AsyncBody>, anyhow::Error>> {
+ ) -> BoxFuture<'static, anyhow::Result<Response<AsyncBody>>> {
self.client.send(req)
}
@@ -263,7 +263,7 @@ impl HttpClient for HttpClientWithUrl {
fn send(
&self,
req: Request<AsyncBody>,
- ) -> BoxFuture<'static, Result<Response<AsyncBody>, anyhow::Error>> {
+ ) -> BoxFuture<'static, anyhow::Result<Response<AsyncBody>>> {
self.client.send(req)
}
@@ -304,7 +304,7 @@ impl HttpClient for BlockedHttpClient {
fn send(
&self,
_req: Request<AsyncBody>,
- ) -> BoxFuture<'static, Result<Response<AsyncBody>, anyhow::Error>> {
+ ) -> BoxFuture<'static, anyhow::Result<Response<AsyncBody>>> {
Box::pin(async {
Err(std::io::Error::new(
std::io::ErrorKind::PermissionDenied,
@@ -325,7 +325,7 @@ impl HttpClient for BlockedHttpClient {
#[cfg(feature = "test-support")]
type FakeHttpHandler = Box<
- dyn Fn(Request<AsyncBody>) -> BoxFuture<'static, Result<Response<AsyncBody>, anyhow::Error>>
+ dyn Fn(Request<AsyncBody>) -> BoxFuture<'static, anyhow::Result<Response<AsyncBody>>>
+ Send
+ Sync
+ 'static,
@@ -340,7 +340,7 @@ pub struct FakeHttpClient {
impl FakeHttpClient {
pub fn create<Fut, F>(handler: F) -> Arc<HttpClientWithUrl>
where
- Fut: futures::Future<Output = Result<Response<AsyncBody>, anyhow::Error>> + Send + 'static,
+ Fut: futures::Future<Output = anyhow::Result<Response<AsyncBody>>> + Send + 'static,
F: Fn(Request<AsyncBody>) -> Fut + Send + Sync + 'static,
{
Arc::new(HttpClientWithUrl {
@@ -385,7 +385,7 @@ impl HttpClient for FakeHttpClient {
fn send(
&self,
req: Request<AsyncBody>,
- ) -> BoxFuture<'static, Result<Response<AsyncBody>, anyhow::Error>> {
+ ) -> BoxFuture<'static, anyhow::Result<Response<AsyncBody>>> {
let future = (self.handler)(req);
future
}
@@ -222,11 +222,11 @@ impl SerializableItem for ImageView {
item_id: ItemId,
window: &mut Window,
cx: &mut App,
- ) -> Task<gpui::Result<Entity<Self>>> {
+ ) -> Task<anyhow::Result<Entity<Self>>> {
window.spawn(cx, async move |cx| {
let image_path = IMAGE_VIEWER
.get_image_path(item_id, workspace_id)?
- .ok_or_else(|| anyhow::anyhow!("No image path found"))?;
+ .context("No image path found")?;
let (worktree, relative_path) = project
.update(cx, |project, cx| {
@@ -256,7 +256,7 @@ impl SerializableItem for ImageView {
alive_items: Vec<ItemId>,
_window: &mut Window,
cx: &mut App,
- ) -> Task<gpui::Result<()>> {
+ ) -> Task<anyhow::Result<()>> {
delete_unloaded_items(
alive_items,
workspace_id,
@@ -273,7 +273,7 @@ impl SerializableItem for ImageView {
_closing: bool,
_window: &mut Window,
cx: &mut Context<Self>,
- ) -> Option<Task<gpui::Result<()>>> {
+ ) -> Option<Task<anyhow::Result<()>>> {
let workspace_id = workspace.database_id()?;
let image_path = self.image_item.read(cx).abs_path(cx)?;
@@ -28,10 +28,7 @@ impl Settings for ImageViewerSettings {
type FileContent = Self;
- fn load(
- sources: SettingsSources<Self::FileContent>,
- _: &mut App,
- ) -> Result<Self, anyhow::Error> {
+ fn load(sources: SettingsSources<Self::FileContent>, _: &mut App) -> anyhow::Result<Self> {
SettingsSources::<Self::FileContent>::json_merge_with(
[sources.default]
.into_iter()
@@ -12,7 +12,7 @@ use std::path::PathBuf;
use std::sync::{Arc, LazyLock};
use std::time::{Duration, Instant};
-use anyhow::{Context, Result, bail};
+use anyhow::{Context as _, Result, bail};
use async_trait::async_trait;
use collections::{HashSet, VecDeque};
use fs::Fs;
@@ -2,7 +2,7 @@ use std::path::PathBuf;
use std::sync::Arc;
use std::sync::atomic::AtomicBool;
-use anyhow::{Result, anyhow};
+use anyhow::{Context as _, Result, anyhow};
use async_trait::async_trait;
use collections::HashMap;
use derive_more::{Deref, Display};
@@ -66,7 +66,7 @@ impl IndexedDocsStore {
let registry = IndexedDocsRegistry::global(cx);
registry
.get_provider_store(provider.clone())
- .ok_or_else(|| anyhow!("no indexed docs store found for {provider}"))
+ .with_context(|| format!("no indexed docs store found for {provider}"))
}
pub fn new(
@@ -285,7 +285,7 @@ impl IndexedDocsDatabase {
let txn = env.read_txn()?;
entries
.get(&txn, &key)?
- .ok_or_else(|| anyhow!("no docs found for {key}"))
+ .with_context(|| format!("no docs found for {key}"))
})
}
@@ -1,7 +1,7 @@
use std::ops::Range;
use std::str::FromStr as _;
-use anyhow::{Result, anyhow};
+use anyhow::{Context as _, Result};
use gpui::http_client::http::{HeaderMap, HeaderValue};
use gpui::{App, Context, Entity, SharedString};
use language::Buffer;
@@ -69,15 +69,15 @@ impl EditPredictionUsage {
pub fn from_headers(headers: &HeaderMap<HeaderValue>) -> Result<Self> {
let limit = headers
.get(EDIT_PREDICTIONS_USAGE_LIMIT_HEADER_NAME)
- .ok_or_else(|| {
- anyhow!("missing {EDIT_PREDICTIONS_USAGE_LIMIT_HEADER_NAME:?} header")
+ .with_context(|| {
+ format!("missing {EDIT_PREDICTIONS_USAGE_LIMIT_HEADER_NAME:?} header")
})?;
let limit = UsageLimit::from_str(limit.to_str()?)?;
let amount = headers
.get(EDIT_PREDICTIONS_USAGE_AMOUNT_HEADER_NAME)
- .ok_or_else(|| {
- anyhow!("missing {EDIT_PREDICTIONS_USAGE_AMOUNT_HEADER_NAME:?} header")
+ .with_context(|| {
+ format!("missing {EDIT_PREDICTIONS_USAGE_AMOUNT_HEADER_NAME:?} header")
})?;
let amount = amount.to_str()?.parse::<i32>()?;
@@ -1,4 +1,4 @@
-use anyhow::{Context as _, Result, anyhow};
+use anyhow::{Context as _, Result};
use client::ZED_URL_SCHEME;
use gpui::{AppContext as _, AsyncApp, Context, PromptLevel, Window, actions};
use release_channel::ReleaseChannel;
@@ -55,11 +55,8 @@ async fn install_script(cx: &AsyncApp) -> Result<PathBuf> {
.output()
.await?
.status;
- if status.success() {
- Ok(link_path.into())
- } else {
- Err(anyhow!("error running osascript"))
- }
+ anyhow::ensure!(status.success(), "error running osascript");
+ Ok(link_path.into())
}
pub async fn register_zed_scheme(cx: &AsyncApp) -> anyhow::Result<()> {
@@ -17,7 +17,7 @@ use crate::{
task_context::RunnableRange,
text_diff::text_diff,
};
-use anyhow::{Context as _, Result, anyhow};
+use anyhow::{Context as _, Result};
use async_watch as watch;
pub use clock::ReplicaId;
use clock::{AGENT_REPLICA_ID, Lamport};
@@ -816,13 +816,11 @@ impl Buffer {
message: proto::BufferState,
file: Option<Arc<dyn File>>,
) -> Result<Self> {
- let buffer_id = BufferId::new(message.id)
- .with_context(|| anyhow!("Could not deserialize buffer_id"))?;
+ let buffer_id = BufferId::new(message.id).context("Could not deserialize buffer_id")?;
let buffer = TextBuffer::new(replica_id, buffer_id, message.base_text);
let mut this = Self::build(buffer, file, capability);
this.text.set_line_ending(proto::deserialize_line_ending(
- rpc::proto::LineEnding::from_i32(message.line_ending)
- .ok_or_else(|| anyhow!("missing line_ending"))?,
+ rpc::proto::LineEnding::from_i32(message.line_ending).context("missing line_ending")?,
));
this.saved_version = proto::deserialize_version(&message.saved_version);
this.saved_mtime = message.saved_mtime.map(|time| time.into());
@@ -24,7 +24,7 @@ pub mod buffer_tests;
pub use crate::language_settings::EditPredictionsMode;
use crate::language_settings::SoftWrap;
-use anyhow::{Context as _, Result, anyhow};
+use anyhow::{Context as _, Result};
use async_trait::async_trait;
use collections::{HashMap, HashSet, IndexSet};
use fs::Fs;
@@ -368,9 +368,7 @@ pub trait LspAdapter: 'static + Send + Sync {
}
}
- if !binary_options.allow_binary_download {
- return Err(anyhow!("downloading language servers disabled"));
- }
+ anyhow::ensure!(binary_options.allow_binary_download, "downloading language servers disabled");
if let Some(cached_binary) = cached_binary.as_ref() {
return Ok(cached_binary.clone());
@@ -1296,17 +1294,13 @@ impl Language {
}
pub fn with_highlights_query(mut self, source: &str) -> Result<Self> {
- let grammar = self
- .grammar_mut()
- .ok_or_else(|| anyhow!("cannot mutate grammar"))?;
+ let grammar = self.grammar_mut().context("cannot mutate grammar")?;
grammar.highlights_query = Some(Query::new(&grammar.ts_language, source)?);
Ok(self)
}
pub fn with_runnable_query(mut self, source: &str) -> Result<Self> {
- let grammar = self
- .grammar_mut()
- .ok_or_else(|| anyhow!("cannot mutate grammar"))?;
+ let grammar = self.grammar_mut().context("cannot mutate grammar")?;
let query = Query::new(&grammar.ts_language, source)?;
let mut extra_captures = Vec::with_capacity(query.capture_names().len());
@@ -1329,9 +1323,7 @@ impl Language {
}
pub fn with_outline_query(mut self, source: &str) -> Result<Self> {
- let grammar = self
- .grammar_mut()
- .ok_or_else(|| anyhow!("cannot mutate grammar"))?;
+ let grammar = self.grammar_mut().context("cannot mutate grammar")?;
let query = Query::new(&grammar.ts_language, source)?;
let mut item_capture_ix = None;
let mut name_capture_ix = None;
@@ -1368,9 +1360,7 @@ impl Language {
}
pub fn with_text_object_query(mut self, source: &str) -> Result<Self> {
- let grammar = self
- .grammar_mut()
- .ok_or_else(|| anyhow!("cannot mutate grammar"))?;
+ let grammar = self.grammar_mut().context("cannot mutate grammar")?;
let query = Query::new(&grammar.ts_language, source)?;
let mut text_objects_by_capture_ix = Vec::new();
@@ -1388,9 +1378,7 @@ impl Language {
}
pub fn with_embedding_query(mut self, source: &str) -> Result<Self> {
- let grammar = self
- .grammar_mut()
- .ok_or_else(|| anyhow!("cannot mutate grammar"))?;
+ let grammar = self.grammar_mut().context("cannot mutate grammar")?;
let query = Query::new(&grammar.ts_language, source)?;
let mut item_capture_ix = None;
let mut name_capture_ix = None;
@@ -1421,9 +1409,7 @@ impl Language {
}
pub fn with_brackets_query(mut self, source: &str) -> Result<Self> {
- let grammar = self
- .grammar_mut()
- .ok_or_else(|| anyhow!("cannot mutate grammar"))?;
+ let grammar = self.grammar_mut().context("cannot mutate grammar")?;
let query = Query::new(&grammar.ts_language, source)?;
let mut open_capture_ix = None;
let mut close_capture_ix = None;
@@ -1458,9 +1444,7 @@ impl Language {
}
pub fn with_indents_query(mut self, source: &str) -> Result<Self> {
- let grammar = self
- .grammar_mut()
- .ok_or_else(|| anyhow!("cannot mutate grammar"))?;
+ let grammar = self.grammar_mut().context("cannot mutate grammar")?;
let query = Query::new(&grammar.ts_language, source)?;
let mut indent_capture_ix = None;
let mut start_capture_ix = None;
@@ -1488,9 +1472,7 @@ impl Language {
}
pub fn with_injection_query(mut self, source: &str) -> Result<Self> {
- let grammar = self
- .grammar_mut()
- .ok_or_else(|| anyhow!("cannot mutate grammar"))?;
+ let grammar = self.grammar_mut().context("cannot mutate grammar")?;
let query = Query::new(&grammar.ts_language, source)?;
let mut language_capture_ix = None;
let mut injection_language_capture_ix = None;
@@ -1508,18 +1490,14 @@ impl Language {
language_capture_ix = match (language_capture_ix, injection_language_capture_ix) {
(None, Some(ix)) => Some(ix),
(Some(_), Some(_)) => {
- return Err(anyhow!(
- "both language and injection.language captures are present"
- ));
+ anyhow::bail!("both language and injection.language captures are present");
}
_ => language_capture_ix,
};
content_capture_ix = match (content_capture_ix, injection_content_capture_ix) {
(None, Some(ix)) => Some(ix),
(Some(_), Some(_)) => {
- return Err(anyhow!(
- "both content and injection.content captures are present"
- ));
+ anyhow::bail!("both content and injection.content captures are present")
}
_ => content_capture_ix,
};
@@ -1553,10 +1531,7 @@ impl Language {
pub fn with_override_query(mut self, source: &str) -> anyhow::Result<Self> {
let query = {
- let grammar = self
- .grammar
- .as_ref()
- .ok_or_else(|| anyhow!("no grammar for language"))?;
+ let grammar = self.grammar.as_ref().context("no grammar for language")?;
Query::new(&grammar.ts_language, source)?
};
@@ -1607,10 +1582,10 @@ impl Language {
.values()
.any(|entry| entry.name == *referenced_name)
{
- Err(anyhow!(
+ anyhow::bail!(
"language {:?} has overrides in config not in query: {referenced_name:?}",
self.config.name
- ))?;
+ );
}
}
@@ -1633,9 +1608,7 @@ impl Language {
self.config.brackets.disabled_scopes_by_bracket_ix.clear();
- let grammar = self
- .grammar_mut()
- .ok_or_else(|| anyhow!("cannot mutate grammar"))?;
+ let grammar = self.grammar_mut().context("cannot mutate grammar")?;
grammar.override_config = Some(OverrideConfig {
query,
values: override_configs_by_id,
@@ -1644,9 +1617,7 @@ impl Language {
}
pub fn with_redaction_query(mut self, source: &str) -> anyhow::Result<Self> {
- let grammar = self
- .grammar_mut()
- .ok_or_else(|| anyhow!("cannot mutate grammar"))?;
+ let grammar = self.grammar_mut().context("cannot mutate grammar")?;
let query = Query::new(&grammar.ts_language, source)?;
let mut redaction_capture_ix = None;
@@ -2190,18 +2161,16 @@ pub fn point_from_lsp(point: lsp::Position) -> Unclipped<PointUtf16> {
}
pub fn range_to_lsp(range: Range<PointUtf16>) -> Result<lsp::Range> {
- if range.start > range.end {
- Err(anyhow!(
- "Inverted range provided to an LSP request: {:?}-{:?}",
- range.start,
- range.end
- ))
- } else {
- Ok(lsp::Range {
- start: point_to_lsp(range.start),
- end: point_to_lsp(range.end),
- })
- }
+ anyhow::ensure!(
+ range.start <= range.end,
+ "Inverted range provided to an LSP request: {:?}-{:?}",
+ range.start,
+ range.end
+ );
+ Ok(lsp::Range {
+ start: point_to_lsp(range.start),
+ end: point_to_lsp(range.end),
+ })
}
pub fn range_from_lsp(range: lsp::Range) -> Range<Unclipped<PointUtf16>> {
@@ -873,15 +873,13 @@ impl LanguageRegistry {
}
}
Err(e) => {
- log::error!("failed to load language {name}:\n{:?}", e);
+ log::error!("failed to load language {name}:\n{e:?}");
let mut state = this.state.write();
state.mark_language_loaded(id);
if let Some(mut txs) = state.loading_languages.remove(&id) {
for tx in txs.drain(..) {
let _ = tx.send(Err(anyhow!(
- "failed to load language {}: {}",
- name,
- e
+ "failed to load language {name}: {e}",
)));
}
}
@@ -944,7 +942,7 @@ impl LanguageRegistry {
let grammar_name = wasm_path
.file_stem()
.and_then(OsStr::to_str)
- .ok_or_else(|| anyhow!("invalid grammar filename"))?;
+ .context("invalid grammar filename")?;
anyhow::Ok(with_parser(|parser| {
let mut store = parser.take_wasm_store().unwrap();
let grammar = store.load_language(grammar_name, &wasm_bytes);
@@ -970,7 +968,7 @@ impl LanguageRegistry {
}
}
} else {
- tx.send(Err(Arc::new(anyhow!("no such grammar {}", name))))
+ tx.send(Err(Arc::new(anyhow!("no such grammar {name}"))))
.ok();
}
@@ -1,7 +1,7 @@
//! Handles conversions of `language` items to and from the [`rpc`] protocol.
use crate::{CursorShape, Diagnostic, diagnostic_set::DiagnosticEntry};
-use anyhow::{Context as _, Result, anyhow};
+use anyhow::{Context as _, Result};
use clock::ReplicaId;
use lsp::{DiagnosticSeverity, LanguageServerId};
use rpc::proto;
@@ -259,10 +259,7 @@ pub fn deserialize_anchor_range(range: proto::AnchorRange) -> Result<Range<Ancho
/// Deserializes an [`crate::Operation`] from the RPC representation.
pub fn deserialize_operation(message: proto::Operation) -> Result<crate::Operation> {
Ok(
- match message
- .variant
- .ok_or_else(|| anyhow!("missing operation variant"))?
- {
+ match message.variant.context("missing operation variant")? {
proto::operation::Variant::Edit(edit) => {
crate::Operation::Buffer(text::Operation::Edit(deserialize_edit_operation(edit)))
}
@@ -312,7 +309,7 @@ pub fn deserialize_operation(message: proto::Operation) -> Result<crate::Operati
line_mode: message.line_mode,
cursor_shape: deserialize_cursor_shape(
proto::CursorShape::from_i32(message.cursor_shape)
- .ok_or_else(|| anyhow!("Missing cursor shape"))?,
+ .context("Missing cursor shape")?,
),
}
}
@@ -510,11 +507,7 @@ pub fn serialize_transaction(transaction: &Transaction) -> proto::Transaction {
/// Deserializes a [`Transaction`] from the RPC representation.
pub fn deserialize_transaction(transaction: proto::Transaction) -> Result<Transaction> {
Ok(Transaction {
- id: deserialize_timestamp(
- transaction
- .id
- .ok_or_else(|| anyhow!("missing transaction id"))?,
- ),
+ id: deserialize_timestamp(transaction.id.context("missing transaction id")?),
edit_ids: transaction
.edit_ids
.into_iter()
@@ -4,6 +4,7 @@ mod syntax_map_tests;
use crate::{
Grammar, InjectionConfig, Language, LanguageId, LanguageRegistry, QUERY_CURSORS, with_parser,
};
+use anyhow::Context as _;
use collections::HashMap;
use futures::FutureExt;
use std::{
@@ -1246,7 +1247,7 @@ fn parse_text(
old_tree.as_ref(),
None,
)
- .ok_or_else(|| anyhow::anyhow!("failed to parse"))
+ .context("failed to parse")
})
}
@@ -8,7 +8,7 @@ mod telemetry;
#[cfg(any(test, feature = "test-support"))]
pub mod fake_provider;
-use anyhow::{Result, anyhow};
+use anyhow::{Context as _, Result};
use client::Client;
use futures::FutureExt;
use futures::{StreamExt, future::BoxFuture, stream::BoxStream};
@@ -122,12 +122,16 @@ impl RequestUsage {
pub fn from_headers(headers: &HeaderMap<HeaderValue>) -> Result<Self> {
let limit = headers
.get(MODEL_REQUESTS_USAGE_LIMIT_HEADER_NAME)
- .ok_or_else(|| anyhow!("missing {MODEL_REQUESTS_USAGE_LIMIT_HEADER_NAME:?} header"))?;
+ .with_context(|| {
+ format!("missing {MODEL_REQUESTS_USAGE_LIMIT_HEADER_NAME:?} header")
+ })?;
let limit = UsageLimit::from_str(limit.to_str()?)?;
let amount = headers
.get(MODEL_REQUESTS_USAGE_AMOUNT_HEADER_NAME)
- .ok_or_else(|| anyhow!("missing {MODEL_REQUESTS_USAGE_AMOUNT_HEADER_NAME:?} header"))?;
+ .with_context(|| {
+ format!("missing {MODEL_REQUESTS_USAGE_AMOUNT_HEADER_NAME:?} header")
+ })?;
let amount = amount.to_str()?.parse::<i32>()?;
Ok(Self { limit, amount })
@@ -403,7 +403,7 @@ impl AnthropicModel {
};
async move {
- let api_key = api_key.ok_or_else(|| anyhow!("Missing Anthropic API Key"))?;
+ let api_key = api_key.context("Missing Anthropic API Key")?;
let request =
anthropic::stream_completion(http_client.as_ref(), &api_url, &api_key, request);
request.await.context("failed to stream completion")
@@ -365,10 +365,10 @@ struct BedrockModel {
}
impl BedrockModel {
- fn get_or_init_client(&self, cx: &AsyncApp) -> Result<&BedrockClient, anyhow::Error> {
+ fn get_or_init_client(&self, cx: &AsyncApp) -> anyhow::Result<&BedrockClient> {
self.client
.get_or_try_init_blocking(|| {
- let Ok((auth_method, credentials, endpoint, region, settings)) =
+ let (auth_method, credentials, endpoint, region, settings) =
cx.read_entity(&self.state, |state, _cx| {
let auth_method = state
.settings
@@ -390,10 +390,7 @@ impl BedrockModel {
region,
state.settings.clone(),
)
- })
- else {
- return Err(anyhow!("App state dropped"));
- };
+ })?;
let mut config_builder = aws_config::defaults(BehaviorVersion::latest())
.stalled_stream_protection(StalledStreamProtectionConfig::disabled())
@@ -438,13 +435,11 @@ impl BedrockModel {
}
let config = self.handler.block_on(config_builder.load());
- Ok(BedrockClient::new(&config))
+ anyhow::Ok(BedrockClient::new(&config))
})
- .map_err(|err| anyhow!("Failed to initialize Bedrock client: {err}"))?;
+ .context("initializing Bedrock client")?;
- self.client
- .get()
- .ok_or_else(|| anyhow!("Bedrock client not initialized"))
+ self.client.get().context("Bedrock client not initialized")
}
fn stream_completion(
@@ -544,7 +539,10 @@ impl LanguageModel for BedrockModel {
region
}) else {
- return async move { Err(anyhow!("App State Dropped")) }.boxed();
+ return async move {
+ anyhow::bail!("App State Dropped");
+ }
+ .boxed();
};
let model_id = match self.model.cross_region_inference_id(®ion) {
@@ -720,7 +718,7 @@ pub fn into_bedrock(
BedrockToolChoice::Any(BedrockAnyToolChoice::builder().build())
}
Some(LanguageModelToolChoice::None) => {
- return Err(anyhow!("LanguageModelToolChoice::None is not supported"));
+ anyhow::bail!("LanguageModelToolChoice::None is not supported");
}
};
let tool_config: BedrockToolConfig = BedrockToolConfig::builder()
@@ -615,7 +615,7 @@ impl CloudLanguageModel {
}
}
- return Err(anyhow!("Forbidden"));
+ anyhow::bail!("Forbidden");
} else if status.as_u16() >= 500 && status.as_u16() < 600 {
// If we encounter an error in the 500 range, retry after a delay.
// We've seen at least these in the wild from API providers:
@@ -626,10 +626,10 @@ impl CloudLanguageModel {
if retries_remaining == 0 {
let mut body = String::new();
response.body_mut().read_to_string(&mut body).await?;
- return Err(anyhow!(
+ anyhow::bail!(
"cloud language model completion failed after {} retries with status {status}: {body}",
Self::MAX_RETRIES
- ));
+ );
}
Timer::after(retry_delay).await;
@@ -251,7 +251,7 @@ impl DeepSeekLanguageModel {
};
let future = self.request_limiter.stream(async move {
- let api_key = api_key.ok_or_else(|| anyhow!("Missing DeepSeek API Key"))?;
+ let api_key = api_key.context("Missing DeepSeek API Key")?;
let request =
deepseek::stream_completion(http_client.as_ref(), &api_url, &api_key, request);
let response = request.await?;
@@ -355,7 +355,7 @@ impl LanguageModel for DeepSeekLanguageModel {
response
.choices
.first()
- .ok_or_else(|| anyhow!("Empty response"))
+ .context("Empty response")
.map(|choice| {
choice
.delta
@@ -279,7 +279,7 @@ impl GoogleLanguageModel {
};
async move {
- let api_key = api_key.ok_or_else(|| anyhow!("Missing Google API key"))?;
+ let api_key = api_key.context("Missing Google API key")?;
let request = google_ai::stream_generate_content(
http_client.as_ref(),
&api_url,
@@ -351,7 +351,7 @@ impl LanguageModel for GoogleLanguageModel {
let api_url = settings.api_url.clone();
async move {
- let api_key = api_key.ok_or_else(|| anyhow!("Missing Google API key"))?;
+ let api_key = api_key.context("Missing Google API key")?;
let response = google_ai::count_tokens(
http_client.as_ref(),
&api_url,
@@ -277,7 +277,7 @@ impl MistralLanguageModel {
};
let future = self.request_limiter.stream(async move {
- let api_key = api_key.ok_or_else(|| anyhow!("Missing Mistral API Key"))?;
+ let api_key = api_key.context("Missing Mistral API Key")?;
let request =
mistral::stream_completion(http_client.as_ref(), &api_url, &api_key, request);
let response = request.await?;
@@ -265,7 +265,7 @@ impl OpenAiLanguageModel {
};
let future = self.request_limiter.stream(async move {
- let api_key = api_key.ok_or_else(|| anyhow!("Missing OpenAI API Key"))?;
+ let api_key = api_key.context("Missing OpenAI API Key")?;
let request = stream_completion(http_client.as_ref(), &api_url, &api_key, request);
let response = request.await?;
Ok(response)
@@ -1,7 +1,7 @@
mod active_buffer_language;
pub use active_buffer_language::ActiveBufferLanguage;
-use anyhow::anyhow;
+use anyhow::Context as _;
use editor::Editor;
use file_finder::file_finder_settings::FileFinderSettings;
use file_icons::FileIcons;
@@ -192,12 +192,8 @@ impl PickerDelegate for LanguageSelectorDelegate {
let buffer = self.buffer.downgrade();
cx.spawn_in(window, async move |_, cx| {
let language = language.await?;
- let project = project
- .upgrade()
- .ok_or_else(|| anyhow!("project was dropped"))?;
- let buffer = buffer
- .upgrade()
- .ok_or_else(|| anyhow!("buffer was dropped"))?;
+ let project = project.upgrade().context("project was dropped")?;
+ let buffer = buffer.upgrade().context("buffer was dropped")?;
project.update(cx, |project, cx| {
project.set_language_for_buffer(&buffer, language, cx);
})
@@ -1,4 +1,4 @@
-use anyhow::{Context, Result, anyhow, bail};
+use anyhow::{Context as _, Result, bail};
use async_trait::async_trait;
use futures::StreamExt;
use gpui::{App, AsyncApp};
@@ -54,7 +54,7 @@ impl super::LspAdapter for CLspAdapter {
.assets
.iter()
.find(|asset| asset.name == asset_name)
- .ok_or_else(|| anyhow!("no asset found matching {:?}", asset_name))?;
+ .with_context(|| format!("no asset found matching {asset_name:?}"))?;
let version = GitHubLspBinaryVersion {
name: release.tag_name,
url: asset.browser_download_url.clone(),
@@ -80,12 +80,11 @@ impl super::LspAdapter for CLspAdapter {
.await
.context("error downloading release")?;
let mut file = File::create(&zip_path).await?;
- if !response.status().is_success() {
- Err(anyhow!(
- "download failed with status {}",
- response.status().to_string()
- ))?;
- }
+ anyhow::ensure!(
+ response.status().is_success(),
+ "download failed with status {}",
+ response.status().to_string()
+ );
futures::io::copy(response.body_mut(), &mut file).await?;
let unzip_status = util::command::new_smol_command("unzip")
@@ -94,10 +93,7 @@ impl super::LspAdapter for CLspAdapter {
.output()
.await?
.status;
- if !unzip_status.success() {
- Err(anyhow!("failed to unzip clangd archive"))?;
- }
-
+ anyhow::ensure!(unzip_status.success(), "failed to unzip clangd archive");
remove_matching(&container_dir, |entry| entry != version_dir).await;
}
@@ -339,20 +335,17 @@ async fn get_cached_server_binary(container_dir: PathBuf) -> Option<LanguageServ
last_clangd_dir = Some(entry.path());
}
}
- let clangd_dir = last_clangd_dir.ok_or_else(|| anyhow!("no cached binary"))?;
+ let clangd_dir = last_clangd_dir.context("no cached binary")?;
let clangd_bin = clangd_dir.join("bin/clangd");
- if clangd_bin.exists() {
- Ok(LanguageServerBinary {
- path: clangd_bin,
- env: None,
- arguments: vec![],
- })
- } else {
- Err(anyhow!(
- "missing clangd binary in directory {:?}",
- clangd_dir
- ))
- }
+ anyhow::ensure!(
+ clangd_bin.exists(),
+ "missing clangd binary in directory {clangd_dir:?}"
+ );
+ Ok(LanguageServerBinary {
+ path: clangd_bin,
+ env: None,
+ arguments: vec![],
+ })
})
.await
.log_err()
@@ -1,4 +1,4 @@
-use anyhow::{Result, anyhow};
+use anyhow::{Context as _, Result};
use async_trait::async_trait;
use futures::StreamExt;
use gpui::AsyncApp;
@@ -149,20 +149,17 @@ async fn get_cached_server_binary(
last_version_dir = Some(entry.path());
}
}
- let last_version_dir = last_version_dir.ok_or_else(|| anyhow!("no cached binary"))?;
+ let last_version_dir = last_version_dir.context("no cached binary")?;
let server_path = last_version_dir.join(SERVER_PATH);
- if server_path.exists() {
- Ok(LanguageServerBinary {
- path: node.binary_path().await?,
- env: None,
- arguments: server_binary_arguments(&server_path),
- })
- } else {
- Err(anyhow!(
- "missing executable in directory {:?}",
- last_version_dir
- ))
- }
+ anyhow::ensure!(
+ server_path.exists(),
+ "missing executable in directory {last_version_dir:?}"
+ );
+ Ok(LanguageServerBinary {
+ path: node.binary_path().await?,
+ env: None,
+ arguments: server_binary_arguments(&server_path),
+ })
})
.await
.log_err()
@@ -1,4 +1,4 @@
-use anyhow::{Context as _, Result, anyhow};
+use anyhow::{Context as _, Result};
use async_trait::async_trait;
use collections::HashMap;
use futures::StreamExt;
@@ -107,7 +107,7 @@ impl super::LspAdapter for GoLspAdapter {
delegate.show_notification(NOTIFICATION_MESSAGE, cx);
})?
}
- return Err(anyhow!("cannot install gopls"));
+ anyhow::bail!("cannot install gopls");
}
Ok(())
}))
@@ -167,10 +167,9 @@ impl super::LspAdapter for GoLspAdapter {
String::from_utf8_lossy(&install_output.stdout),
String::from_utf8_lossy(&install_output.stderr)
);
-
- return Err(anyhow!(
+ anyhow::bail!(
"failed to install gopls with `go install`. Is `go` installed and in the PATH? Check logs for more information."
- ));
+ );
}
let installed_binary_path = gobin_dir.join(BINARY);
@@ -405,15 +404,12 @@ async fn get_cached_server_binary(container_dir: PathBuf) -> Option<LanguageServ
}
}
- if let Some(path) = last_binary_path {
- Ok(LanguageServerBinary {
- path,
- arguments: server_binary_arguments(),
- env: None,
- })
- } else {
- Err(anyhow!("no cached binary"))
- }
+ let path = last_binary_path.context("no cached binary")?;
+ anyhow::Ok(LanguageServerBinary {
+ path,
+ arguments: server_binary_arguments(),
+ env: None,
+ })
})
.await
.log_err()
@@ -1,4 +1,4 @@
-use anyhow::{Context, Result, anyhow, bail};
+use anyhow::{Context as _, Result, bail};
use async_compression::futures::bufread::GzipDecoder;
use async_tar::Archive;
use async_trait::async_trait;
@@ -321,20 +321,17 @@ async fn get_cached_server_binary(
}
}
- let last_version_dir = last_version_dir.ok_or_else(|| anyhow!("no cached binary"))?;
+ let last_version_dir = last_version_dir.context("no cached binary")?;
let server_path = last_version_dir.join(SERVER_PATH);
- if server_path.exists() {
- Ok(LanguageServerBinary {
- path: node.binary_path().await?,
- env: None,
- arguments: server_binary_arguments(&server_path),
- })
- } else {
- Err(anyhow!(
- "missing executable in directory {:?}",
- last_version_dir
- ))
- }
+ anyhow::ensure!(
+ server_path.exists(),
+ "missing executable in directory {last_version_dir:?}"
+ );
+ Ok(LanguageServerBinary {
+ path: node.binary_path().await?,
+ env: None,
+ arguments: server_binary_arguments(&server_path),
+ })
})
.await
.log_err()
@@ -430,7 +427,7 @@ impl LspAdapter for NodeVersionAdapter {
.http_client()
.get(&version.url, Default::default(), true)
.await
- .map_err(|err| anyhow!("error downloading release: {}", err))?;
+ .context("downloading release")?;
if version.url.ends_with(".zip") {
node_runtime::extract_zip(
&destination_container_path,
@@ -488,7 +485,7 @@ async fn get_cached_version_server_binary(container_dir: PathBuf) -> Option<Lang
}
anyhow::Ok(LanguageServerBinary {
- path: last.ok_or_else(|| anyhow!("no cached binary"))?,
+ path: last.context("no cached binary")?,
env: None,
arguments: Default::default(),
})
@@ -1,4 +1,4 @@
-use anyhow::ensure;
+use anyhow::{Context as _, ensure};
use anyhow::{Result, anyhow};
use async_trait::async_trait;
use collections::HashMap;
@@ -883,11 +883,11 @@ impl PyLspAdapter {
async fn ensure_venv(delegate: &dyn LspAdapterDelegate) -> Result<Arc<Path>> {
let python_path = Self::find_base_python(delegate)
.await
- .ok_or_else(|| anyhow!("Could not find Python installation for PyLSP"))?;
+ .context("Could not find Python installation for PyLSP")?;
let work_dir = delegate
.language_server_download_dir(&Self::SERVER_NAME)
.await
- .ok_or_else(|| anyhow!("Could not get working directory for PyLSP"))?;
+ .context("Could not get working directory for PyLSP")?;
let mut path = PathBuf::from(work_dir.as_ref());
path.push("pylsp-venv");
if !path.exists() {
@@ -1,4 +1,4 @@
-use anyhow::{Context as _, Result, anyhow};
+use anyhow::{Context as _, Result};
use async_compression::futures::bufread::GzipDecoder;
use async_trait::async_trait;
use collections::HashMap;
@@ -974,7 +974,7 @@ async fn get_cached_server_binary(container_dir: PathBuf) -> Option<LanguageServ
}
anyhow::Ok(LanguageServerBinary {
- path: last.ok_or_else(|| anyhow!("no cached binary"))?,
+ path: last.context("no cached binary")?,
env: None,
arguments: Default::default(),
})
@@ -1,4 +1,4 @@
-use anyhow::{Result, anyhow};
+use anyhow::{Context as _, Result};
use async_trait::async_trait;
use collections::HashMap;
use futures::StreamExt;
@@ -198,20 +198,17 @@ async fn get_cached_server_binary(
last_version_dir = Some(entry.path());
}
}
- let last_version_dir = last_version_dir.ok_or_else(|| anyhow!("no cached binary"))?;
+ let last_version_dir = last_version_dir.context("no cached binary")?;
let server_path = last_version_dir.join(SERVER_PATH);
- if server_path.exists() {
- Ok(LanguageServerBinary {
- path: node.binary_path().await?,
- env: None,
- arguments: server_binary_arguments(&server_path),
- })
- } else {
- Err(anyhow!(
- "missing executable in directory {:?}",
- last_version_dir
- ))
- }
+ anyhow::ensure!(
+ server_path.exists(),
+ "missing executable in directory {last_version_dir:?}"
+ );
+ Ok(LanguageServerBinary {
+ path: node.binary_path().await?,
+ env: None,
+ arguments: server_binary_arguments(&server_path),
+ })
})
.await
.log_err()
@@ -1,4 +1,4 @@
-use anyhow::{Context as _, Result, anyhow};
+use anyhow::{Context as _, Result};
use async_compression::futures::bufread::GzipDecoder;
use async_tar::Archive;
use async_trait::async_trait;
@@ -315,10 +315,7 @@ async fn get_cached_ts_server_binary(
arguments: typescript_server_binary_arguments(&old_server_path),
})
} else {
- Err(anyhow!(
- "missing executable in directory {:?}",
- container_dir
- ))
+ anyhow::bail!("missing executable in directory {container_dir:?}")
}
})
.await
@@ -491,7 +488,7 @@ impl LspAdapter for EsLintLspAdapter {
.http_client()
.get(&version.url, Default::default(), true)
.await
- .map_err(|err| anyhow!("error downloading release: {}", err))?;
+ .context("downloading release")?;
match Self::GITHUB_ASSET_KIND {
AssetKind::TarGz => {
let decompressed_bytes = GzipDecoder::new(BufReader::new(response.body_mut()));
@@ -529,7 +526,7 @@ impl LspAdapter for EsLintLspAdapter {
}
let mut dir = fs::read_dir(&destination_path).await?;
- let first = dir.next().await.ok_or(anyhow!("missing first file"))??;
+ let first = dir.next().await.context("missing first file")??;
let repo_root = destination_path.join("vscode-eslint");
fs::rename(first.path(), &repo_root).await?;
@@ -580,9 +577,10 @@ impl LspAdapter for EsLintLspAdapter {
#[cfg(target_os = "windows")]
async fn handle_symlink(src_dir: PathBuf, dest_dir: PathBuf) -> Result<()> {
- if fs::metadata(&src_dir).await.is_err() {
- return Err(anyhow!("Directory {} not present.", src_dir.display()));
- }
+ anyhow::ensure!(
+ fs::metadata(&src_dir).await.is_ok(),
+ "Directory {src_dir:?} is not present"
+ );
if fs::metadata(&dest_dir).await.is_ok() {
fs::remove_file(&dest_dir).await?;
}
@@ -1,4 +1,4 @@
-use anyhow::{Result, anyhow};
+use anyhow::Result;
use async_trait::async_trait;
use collections::HashMap;
use gpui::AsyncApp;
@@ -284,18 +284,15 @@ async fn get_cached_ts_server_binary(
) -> Option<LanguageServerBinary> {
maybe!(async {
let server_path = container_dir.join(VtslsLspAdapter::SERVER_PATH);
- if server_path.exists() {
- Ok(LanguageServerBinary {
- path: node.binary_path().await?,
- env: None,
- arguments: typescript_server_binary_arguments(&server_path),
- })
- } else {
- Err(anyhow!(
- "missing executable in directory {:?}",
- container_dir
- ))
- }
+ anyhow::ensure!(
+ server_path.exists(),
+ "missing executable in directory {container_dir:?}"
+ );
+ Ok(LanguageServerBinary {
+ path: node.binary_path().await?,
+ env: None,
+ arguments: typescript_server_binary_arguments(&server_path),
+ })
})
.await
.log_err()
@@ -1,4 +1,4 @@
-use anyhow::{Result, anyhow};
+use anyhow::{Context as _, Result};
use async_trait::async_trait;
use futures::StreamExt;
use gpui::AsyncApp;
@@ -173,20 +173,17 @@ async fn get_cached_server_binary(
last_version_dir = Some(entry.path());
}
}
- let last_version_dir = last_version_dir.ok_or_else(|| anyhow!("no cached binary"))?;
+ let last_version_dir = last_version_dir.context("no cached binary")?;
let server_path = last_version_dir.join(SERVER_PATH);
- if server_path.exists() {
- Ok(LanguageServerBinary {
- path: node.binary_path().await?,
- env: None,
- arguments: server_binary_arguments(&server_path),
- })
- } else {
- Err(anyhow!(
- "missing executable in directory {:?}",
- last_version_dir
- ))
- }
+ anyhow::ensure!(
+ server_path.exists(),
+ "missing executable in directory {last_version_dir:?}"
+ );
+ Ok(LanguageServerBinary {
+ path: node.binary_path().await?,
+ env: None,
+ arguments: server_binary_arguments(&server_path),
+ })
})
.await
.log_err()
@@ -1,7 +1,7 @@
pub mod proto;
pub mod token;
-use anyhow::{Result, anyhow};
+use anyhow::Result;
use async_trait::async_trait;
use prost::Message;
use reqwest::header::CONTENT_TYPE;
@@ -79,12 +79,12 @@ impl LiveKitClient {
Ok(Res::decode(response.bytes().await?)?)
} else {
log::error!("Response {}: {:?}", url, response.status());
- Err(anyhow!(
+ anyhow::bail!(
"POST {} failed with status code {:?}, {:?}",
url,
response.status(),
response.text().await
- ))
+ );
}
}
}
@@ -1,4 +1,4 @@
-use anyhow::{Result, anyhow};
+use anyhow::Result;
use jsonwebtoken::{DecodingKey, EncodingKey, Header, Validation};
use serde::{Deserialize, Serialize};
use std::{
@@ -74,9 +74,7 @@ pub fn create(
video_grant: VideoGrant,
) -> Result<String> {
if video_grant.room_join.is_some() && identity.is_none() {
- Err(anyhow!(
- "identity is required for room_join grant, but it is none"
- ))?;
+ anyhow::bail!("identity is required for room_join grant, but it is none");
}
let now = SystemTime::now();
@@ -1,6 +1,6 @@
use std::sync::Arc;
-use anyhow::Result;
+use anyhow::{Context as _, Result};
use collections::HashMap;
use futures::{SinkExt, channel::mpsc};
use gpui::{App, AsyncApp, ScreenCaptureSource, ScreenCaptureStream, Task};
@@ -160,7 +160,7 @@ impl LocalParticipant {
})?
.await?
.map(LocalTrackPublication)
- .map_err(|error| anyhow::anyhow!("failed to publish track: {error}"))
+ .context("publishing a track")
}
pub async fn unpublish_track(
@@ -172,7 +172,7 @@ impl LocalParticipant {
Tokio::spawn(cx, async move { participant.unpublish_track(&sid).await })?
.await?
.map(LocalTrackPublication)
- .map_err(|error| anyhow::anyhow!("failed to unpublish track: {error}"))
+ .context("unpublishing a track")
}
}
@@ -1,4 +1,4 @@
-use anyhow::{Context as _, Result, anyhow};
+use anyhow::{Context as _, Result};
use cpal::traits::{DeviceTrait, HostTrait, StreamTrait as _};
use futures::channel::mpsc::UnboundedSender;
@@ -365,14 +365,14 @@ fn default_device(input: bool) -> Result<(cpal::Device, cpal::SupportedStreamCon
if input {
device = cpal::default_host()
.default_input_device()
- .ok_or_else(|| anyhow!("no audio input device available"))?;
+ .context("no audio input device available")?;
config = device
.default_input_config()
.context("failed to get default input config")?;
} else {
device = cpal::default_host()
.default_output_device()
- .ok_or_else(|| anyhow!("no audio output device available"))?;
+ .context("no audio output device available")?;
config = device
.default_output_config()
.context("failed to get default output config")?;
@@ -493,10 +493,7 @@ fn create_buffer_pool(
]);
pixel_buffer_pool::CVPixelBufferPool::new(None, Some(&buffer_attributes)).map_err(|cv_return| {
- anyhow!(
- "failed to create pixel buffer pool: CVReturn({})",
- cv_return
- )
+ anyhow::anyhow!("failed to create pixel buffer pool: CVReturn({cv_return})",)
})
}
@@ -707,7 +704,7 @@ mod macos {
}
impl super::DeviceChangeListenerApi for CoreAudioDefaultDeviceChangeListener {
- fn new(input: bool) -> gpui::Result<Self> {
+ fn new(input: bool) -> anyhow::Result<Self> {
let (tx, rx) = futures::channel::mpsc::unbounded();
let callback = Box::new(PropertyListenerCallbackWrapper(Box::new(move || {
@@ -1,7 +1,7 @@
use crate::{AudioStream, Participant, RemoteTrack, RoomEvent, TrackPublication};
use crate::mock_client::{participant::*, publication::*, track::*};
-use anyhow::{Context as _, Result, anyhow};
+use anyhow::{Context as _, Result};
use async_trait::async_trait;
use collections::{BTreeMap, HashMap, HashSet, btree_map::Entry as BTreeEntry, hash_map::Entry};
use gpui::{App, AsyncApp, BackgroundExecutor};
@@ -69,7 +69,7 @@ impl TestServer {
e.insert(server.clone());
Ok(server)
} else {
- Err(anyhow!("a server with url {:?} already exists", url))
+ anyhow::bail!("a server with url {url:?} already exists");
}
}
@@ -77,7 +77,7 @@ impl TestServer {
Ok(SERVERS
.lock()
.get(url)
- .ok_or_else(|| anyhow!("no server found for url"))?
+ .context("no server found for url")?
.clone())
}
@@ -85,7 +85,7 @@ impl TestServer {
SERVERS
.lock()
.remove(&self.url)
- .ok_or_else(|| anyhow!("server with url {:?} does not exist", self.url))?;
+ .with_context(|| format!("server with url {:?} does not exist", self.url))?;
Ok(())
}
@@ -103,7 +103,7 @@ impl TestServer {
e.insert(Default::default());
Ok(())
} else {
- Err(anyhow!("room {:?} already exists", room))
+ anyhow::bail!("{room:?} already exists");
}
}
@@ -113,7 +113,7 @@ impl TestServer {
let mut server_rooms = self.rooms.lock();
server_rooms
.remove(&room)
- .ok_or_else(|| anyhow!("room {:?} does not exist", room))?;
+ .with_context(|| format!("room {room:?} does not exist"))?;
Ok(())
}
@@ -176,11 +176,7 @@ impl TestServer {
e.insert(client_room);
Ok(identity)
} else {
- Err(anyhow!(
- "{:?} attempted to join room {:?} twice",
- identity,
- room_name
- ))
+ anyhow::bail!("{identity:?} attempted to join room {room_name:?} twice");
}
}
@@ -193,13 +189,9 @@ impl TestServer {
let mut server_rooms = self.rooms.lock();
let room = server_rooms
.get_mut(&*room_name)
- .ok_or_else(|| anyhow!("room {} does not exist", room_name))?;
- room.client_rooms.remove(&identity).ok_or_else(|| {
- anyhow!(
- "{:?} attempted to leave room {:?} before joining it",
- identity,
- room_name
- )
+ .with_context(|| format!("room {room_name:?} does not exist"))?;
+ room.client_rooms.remove(&identity).with_context(|| {
+ format!("{identity:?} attempted to leave room {room_name:?} before joining it")
})?;
Ok(())
}
@@ -247,14 +239,10 @@ impl TestServer {
let mut server_rooms = self.rooms.lock();
let room = server_rooms
.get_mut(&room_name)
- .ok_or_else(|| anyhow!("room {} does not exist", room_name))?;
- room.client_rooms.remove(&identity).ok_or_else(|| {
- anyhow!(
- "participant {:?} did not join room {:?}",
- identity,
- room_name
- )
- })?;
+ .with_context(|| format!("room {room_name} does not exist"))?;
+ room.client_rooms
+ .remove(&identity)
+ .with_context(|| format!("participant {identity:?} did not join room {room_name:?}"))?;
Ok(())
}
@@ -269,7 +257,7 @@ impl TestServer {
let mut server_rooms = self.rooms.lock();
let room = server_rooms
.get_mut(&room_name)
- .ok_or_else(|| anyhow!("room {} does not exist", room_name))?;
+ .with_context(|| format!("room {room_name} does not exist"))?;
room.participant_permissions
.insert(ParticipantIdentity(identity), permission);
Ok(())
@@ -308,7 +296,7 @@ impl TestServer {
let mut server_rooms = self.rooms.lock();
let room = server_rooms
.get_mut(&*room_name)
- .ok_or_else(|| anyhow!("room {} does not exist", room_name))?;
+ .with_context(|| format!("room {room_name} does not exist"))?;
let can_publish = room
.participant_permissions
@@ -317,9 +305,7 @@ impl TestServer {
.or(claims.video.can_publish)
.unwrap_or(true);
- if !can_publish {
- return Err(anyhow!("user is not allowed to publish"));
- }
+ anyhow::ensure!(can_publish, "user is not allowed to publish");
let sid: TrackSid = format!("TR_{}", nanoid::nanoid!(17)).try_into().unwrap();
let server_track = Arc::new(TestServerVideoTrack {
@@ -374,7 +360,7 @@ impl TestServer {
let mut server_rooms = self.rooms.lock();
let room = server_rooms
.get_mut(&*room_name)
- .ok_or_else(|| anyhow!("room {} does not exist", room_name))?;
+ .with_context(|| format!("room {room_name} does not exist"))?;
let can_publish = room
.participant_permissions
@@ -383,9 +369,7 @@ impl TestServer {
.or(claims.video.can_publish)
.unwrap_or(true);
- if !can_publish {
- return Err(anyhow!("user is not allowed to publish"));
- }
+ anyhow::ensure!(can_publish, "user is not allowed to publish");
let sid: TrackSid = format!("TR_{}", nanoid::nanoid!(17)).try_into().unwrap();
let server_track = Arc::new(TestServerAudioTrack {
@@ -443,7 +427,7 @@ impl TestServer {
let mut server_rooms = self.rooms.lock();
let room = server_rooms
.get_mut(&*room_name)
- .ok_or_else(|| anyhow!("room {} does not exist", room_name))?;
+ .with_context(|| format!("room {room_name} does not exist"))?;
if let Some(track) = room
.audio_tracks
.iter_mut()
@@ -513,11 +497,11 @@ impl TestServer {
let mut server_rooms = self.rooms.lock();
let room = server_rooms
.get_mut(&*room_name)
- .ok_or_else(|| anyhow!("room {} does not exist", room_name))?;
+ .with_context(|| format!("room {room_name} does not exist"))?;
let client_room = room
.client_rooms
.get(&identity)
- .ok_or_else(|| anyhow!("not a participant in room"))?;
+ .context("not a participant in room")?;
Ok(room
.video_tracks
.iter()
@@ -536,11 +520,11 @@ impl TestServer {
let mut server_rooms = self.rooms.lock();
let room = server_rooms
.get_mut(&*room_name)
- .ok_or_else(|| anyhow!("room {} does not exist", room_name))?;
+ .with_context(|| format!("room {room_name} does not exist"))?;
let client_room = room
.client_rooms
.get(&identity)
- .ok_or_else(|| anyhow!("not a participant in room"))?;
+ .context("not a participant in room")?;
Ok(room
.audio_tracks
.iter()
@@ -1,4 +1,4 @@
-use anyhow::{Context as _, Result, anyhow};
+use anyhow::{Context as _, Result};
use futures::{AsyncBufReadExt, AsyncReadExt, StreamExt, io::BufReader, stream::BoxStream};
use http_client::{AsyncBody, HttpClient, Method, Request as HttpRequest, http};
use serde::{Deserialize, Serialize};
@@ -25,7 +25,7 @@ impl TryFrom<String> for Role {
"assistant" => Ok(Self::Assistant),
"system" => Ok(Self::System),
"tool" => Ok(Self::Tool),
- _ => Err(anyhow!("invalid role '{value}'")),
+ _ => anyhow::bail!("invalid role '{value}'"),
}
}
}
@@ -253,11 +253,11 @@ pub async fn complete(
let mut body = Vec::new();
response.body_mut().read_to_end(&mut body).await?;
let body_str = std::str::from_utf8(&body)?;
- Err(anyhow!(
+ anyhow::bail!(
"Failed to connect to API: {} {}",
response.status(),
body_str
- ))
+ );
}
}
@@ -304,12 +304,11 @@ pub async fn stream_chat_completion(
} else {
let mut body = String::new();
response.body_mut().read_to_string(&mut body).await?;
-
- Err(anyhow!(
+ anyhow::bail!(
"Failed to connect to LM Studio API: {} {}",
response.status(),
body,
- ))
+ );
}
}
@@ -331,17 +330,15 @@ pub async fn get_models(
let mut body = String::new();
response.body_mut().read_to_string(&mut body).await?;
- if response.status().is_success() {
- let response: ListModelsResponse =
- serde_json::from_str(&body).context("Unable to parse LM Studio models response")?;
- Ok(response.data)
- } else {
- Err(anyhow!(
- "Failed to connect to LM Studio API: {} {}",
- response.status(),
- body,
- ))
- }
+ anyhow::ensure!(
+ response.status().is_success(),
+ "Failed to connect to LM Studio API: {} {}",
+ response.status(),
+ body,
+ );
+ let response: ListModelsResponse =
+ serde_json::from_str(&body).context("Unable to parse LM Studio models response")?;
+ Ok(response.data)
}
/// Sends an empty request to LM Studio to trigger loading the model
@@ -367,11 +364,10 @@ pub async fn preload_model(client: Arc<dyn HttpClient>, api_url: &str, model: &s
} else {
let mut body = String::new();
response.body_mut().read_to_string(&mut body).await?;
-
- Err(anyhow!(
+ anyhow::bail!(
"Failed to connect to LM Studio API: {} {}",
response.status(),
body,
- ))
+ );
}
}
@@ -1,7 +1,7 @@
use std::str;
use std::sync::Arc;
-use anyhow::{Result, anyhow};
+use anyhow::{Context as _, Result};
use collections::HashMap;
use futures::{
AsyncBufReadExt, AsyncRead, AsyncReadExt as _,
@@ -35,7 +35,7 @@ where
}
if reader.read_until(b'\n', buffer).await? == 0 {
- return Err(anyhow!("cannot read LSP message headers"));
+ anyhow::bail!("cannot read LSP message headers");
}
}
}
@@ -82,7 +82,7 @@ impl LspStdoutHandler {
.split('\n')
.find(|line| line.starts_with(CONTENT_LEN_HEADER))
.and_then(|line| line.strip_prefix(CONTENT_LEN_HEADER))
- .ok_or_else(|| anyhow!("invalid LSP message header {headers:?}"))?
+ .with_context(|| format!("invalid LSP message header {headers:?}"))?
.trim_end()
.parse()?;
@@ -352,7 +352,7 @@ impl LanguageServer {
let stdout = server.stdout.take().unwrap();
let stderr = server.stderr.take().unwrap();
let root_uri = Url::from_file_path(&working_dir)
- .map_err(|_| anyhow!("{} is not a valid URI", working_dir.display()))?;
+ .map_err(|()| anyhow!("{working_dir:?} is not a valid URI"))?;
let server = Self::new_internal(
server_id,
server_name,
@@ -11,7 +11,7 @@ pub mod core_media {
CMItemIndex, CMSampleTimingInfo, CMTime, CMTimeMake, CMVideoCodecType,
kCMSampleAttachmentKey_NotSync, kCMTimeInvalid, kCMVideoCodecType_H264,
};
- use anyhow::{Result, anyhow};
+ use anyhow::Result;
use core_foundation::{
array::{CFArray, CFArrayRef},
base::{CFTypeID, OSStatus, TCFType},
@@ -69,12 +69,11 @@ pub mod core_media {
index as CMItemIndex,
&mut timing_info,
);
-
- if result == 0 {
- Ok(timing_info)
- } else {
- Err(anyhow!("error getting sample timing info, code {}", result))
- }
+ anyhow::ensure!(
+ result == 0,
+ "error getting sample timing info, code {result}"
+ );
+ Ok(timing_info)
}
}
@@ -153,11 +152,8 @@ pub mod core_media {
ptr::null_mut(),
ptr::null_mut(),
);
- if result == 0 {
- Ok(std::slice::from_raw_parts(bytes, len))
- } else {
- Err(anyhow!("error getting parameter set, code: {}", result))
- }
+ anyhow::ensure!(result == 0, "error getting parameter set, code: {result}");
+ Ok(std::slice::from_raw_parts(bytes, len))
}
}
}
@@ -231,7 +227,7 @@ pub mod core_video {
kCVPixelFormatType_32BGRA, kCVPixelFormatType_420YpCbCr8BiPlanarFullRange,
kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange, kCVPixelFormatType_420YpCbCr8Planar,
};
- use anyhow::{Result, anyhow};
+ use anyhow::Result;
use core_foundation::{
base::kCFAllocatorDefault, dictionary::CFDictionaryRef, mach_port::CFAllocatorRef,
};
@@ -267,11 +263,11 @@ pub mod core_video {
&mut this,
)
};
- if result == kCVReturnSuccess {
- unsafe { Ok(CVMetalTextureCache::wrap_under_create_rule(this)) }
- } else {
- Err(anyhow!("could not create texture cache, code: {}", result))
- }
+ anyhow::ensure!(
+ result == kCVReturnSuccess,
+ "could not create texture cache, code: {result}"
+ );
+ unsafe { Ok(CVMetalTextureCache::wrap_under_create_rule(this)) }
}
/// # Safety
@@ -300,11 +296,11 @@ pub mod core_video {
&mut this,
)
};
- if result == kCVReturnSuccess {
- unsafe { Ok(CVMetalTexture::wrap_under_create_rule(this)) }
- } else {
- Err(anyhow!("could not create texture, code: {}", result))
- }
+ anyhow::ensure!(
+ result == kCVReturnSuccess,
+ "could not create texture, code: {result}"
+ );
+ unsafe { Ok(CVMetalTexture::wrap_under_create_rule(this)) }
}
}
@@ -14,7 +14,7 @@
//!
//! You only need to write replacement logic for x-1 to x because you can be certain that, internally, every user will be at x-1, regardless of their on disk state.
-use anyhow::{Context, Result};
+use anyhow::{Context as _, Result};
use std::{cmp::Reverse, ops::Range, sync::LazyLock};
use streaming_iterator::StreamingIterator;
use tree_sitter::{Query, QueryMatch};
@@ -26,7 +26,7 @@ impl TryFrom<String> for Role {
"assistant" => Ok(Self::Assistant),
"system" => Ok(Self::System),
"tool" => Ok(Self::Tool),
- _ => Err(anyhow!("invalid role '{value}'")),
+ _ => anyhow::bail!("invalid role '{value}'"),
}
}
}
@@ -84,7 +84,7 @@ impl Model {
"mistral-small-latest" => Ok(Self::MistralSmallLatest),
"open-mistral-nemo" => Ok(Self::OpenMistralNemo),
"open-codestral-mamba" => Ok(Self::OpenCodestralMamba),
- _ => Err(anyhow!("invalid model id")),
+ invalid_id => anyhow::bail!("invalid model id '{invalid_id}'"),
}
}
@@ -363,10 +363,10 @@ pub async fn stream_completion(
} else {
let mut body = String::new();
response.body_mut().read_to_string(&mut body).await?;
- Err(anyhow!(
+ anyhow::bail!(
"Failed to connect to Mistral API: {} {}",
response.status(),
body,
- ))
+ );
}
}
@@ -1,6 +1,6 @@
mod archive;
-use anyhow::{Context, Result, anyhow, bail};
+use anyhow::{Context as _, Result, anyhow, bail};
pub use archive::extract_zip;
use async_compression::futures::bufread::GzipDecoder;
use async_tar::Archive;
@@ -157,7 +157,7 @@ impl NodeRuntime {
info.dist_tags
.latest
.or_else(|| info.versions.pop())
- .ok_or_else(|| anyhow!("no version found for npm package {}", name))
+ .with_context(|| format!("no version found for npm package {name}"))
}
pub async fn npm_install_packages(
@@ -411,13 +411,14 @@ impl NodeRuntimeTrait for ManagedNodeRuntime {
let npm_file = self.installation_path.join(Self::NPM_PATH);
let env_path = path_with_node_binary_prepended(&node_binary).unwrap_or_default();
- if smol::fs::metadata(&node_binary).await.is_err() {
- return Err(anyhow!("missing node binary file"));
- }
-
- if smol::fs::metadata(&npm_file).await.is_err() {
- return Err(anyhow!("missing npm file"));
- }
+ anyhow::ensure!(
+ smol::fs::metadata(&node_binary).await.is_ok(),
+ "missing node binary file"
+ );
+ anyhow::ensure!(
+ smol::fs::metadata(&npm_file).await.is_ok(),
+ "missing npm file"
+ );
let node_ca_certs = env::var(NODE_CA_CERTS_ENV_VAR).unwrap_or_else(|_| String::new());
@@ -443,22 +444,20 @@ impl NodeRuntimeTrait for ManagedNodeRuntime {
let mut output = attempt().await;
if output.is_err() {
output = attempt().await;
- if output.is_err() {
- return Err(anyhow!(
- "failed to launch npm subcommand {subcommand} subcommand\nerr: {:?}",
- output.err()
- ));
- }
+ anyhow::ensure!(
+ output.is_ok(),
+ "failed to launch npm subcommand {subcommand} subcommand\nerr: {:?}",
+ output.err()
+ );
}
if let Ok(output) = &output {
- if !output.status.success() {
- return Err(anyhow!(
- "failed to execute npm {subcommand} subcommand:\nstdout: {:?}\nstderr: {:?}",
- String::from_utf8_lossy(&output.stdout),
- String::from_utf8_lossy(&output.stderr)
- ));
- }
+ anyhow::ensure!(
+ output.status.success(),
+ "failed to execute npm {subcommand} subcommand:\nstdout: {:?}\nstderr: {:?}",
+ String::from_utf8_lossy(&output.stdout),
+ String::from_utf8_lossy(&output.stderr)
+ );
}
output.map_err(|e| anyhow!("{e}"))
@@ -559,14 +558,12 @@ impl NodeRuntimeTrait for SystemNodeRuntime {
.args(args);
configure_npm_command(&mut command, directory, proxy);
let output = command.output().await?;
- if !output.status.success() {
- return Err(anyhow!(
- "failed to execute npm {subcommand} subcommand:\nstdout: {:?}\nstderr: {:?}",
- String::from_utf8_lossy(&output.stdout),
- String::from_utf8_lossy(&output.stderr)
- ));
- }
-
+ anyhow::ensure!(
+ output.status.success(),
+ "failed to execute npm {subcommand} subcommand:\nstdout: {:?}\nstderr: {:?}",
+ String::from_utf8_lossy(&output.stdout),
+ String::from_utf8_lossy(&output.stderr)
+ );
Ok(output)
}
@@ -1,4 +1,4 @@
-use anyhow::{Context as _, Result, anyhow};
+use anyhow::{Context as _, Result};
use futures::{AsyncBufReadExt, AsyncReadExt, StreamExt, io::BufReader, stream::BoxStream};
use http_client::{AsyncBody, HttpClient, Method, Request as HttpRequest, http};
use serde::{Deserialize, Serialize};
@@ -242,11 +242,11 @@ pub async fn complete(
Ok(response_message)
} else {
let body_str = std::str::from_utf8(&body)?;
- Err(anyhow!(
+ anyhow::bail!(
"Failed to connect to API: {} {}",
response.status(),
body_str
- ))
+ );
}
}
@@ -276,12 +276,11 @@ pub async fn stream_chat_completion(
} else {
let mut body = String::new();
response.body_mut().read_to_string(&mut body).await?;
-
- Err(anyhow!(
+ anyhow::bail!(
"Failed to connect to Ollama API: {} {}",
response.status(),
body,
- ))
+ );
}
}
@@ -303,18 +302,15 @@ pub async fn get_models(
let mut body = String::new();
response.body_mut().read_to_string(&mut body).await?;
- if response.status().is_success() {
- let response: LocalModelsResponse =
- serde_json::from_str(&body).context("Unable to parse Ollama tag listing")?;
-
- Ok(response.models)
- } else {
- Err(anyhow!(
- "Failed to connect to Ollama API: {} {}",
- response.status(),
- body,
- ))
- }
+ anyhow::ensure!(
+ response.status().is_success(),
+ "Failed to connect to Ollama API: {} {}",
+ response.status(),
+ body,
+ );
+ let response: LocalModelsResponse =
+ serde_json::from_str(&body).context("Unable to parse Ollama tag listing")?;
+ Ok(response.models)
}
/// Fetch details of a model, used to determine model capabilities
@@ -332,16 +328,14 @@ pub async fn show_model(client: &dyn HttpClient, api_url: &str, model: &str) ->
let mut body = String::new();
response.body_mut().read_to_string(&mut body).await?;
- if response.status().is_success() {
- let details: ModelShow = serde_json::from_str(body.as_str())?;
- Ok(details)
- } else {
- Err(anyhow!(
- "Failed to connect to Ollama API: {} {}",
- response.status(),
- body,
- ))
- }
+ anyhow::ensure!(
+ response.status().is_success(),
+ "Failed to connect to Ollama API: {} {}",
+ response.status(),
+ body,
+ );
+ let details: ModelShow = serde_json::from_str(body.as_str())?;
+ Ok(details)
}
/// Sends an empty request to Ollama to trigger loading the model
@@ -366,12 +360,11 @@ pub async fn preload_model(client: Arc<dyn HttpClient>, api_url: &str, model: &s
} else {
let mut body = String::new();
response.body_mut().read_to_string(&mut body).await?;
-
- Err(anyhow!(
+ anyhow::bail!(
"Failed to connect to Ollama API: {} {}",
response.status(),
body,
- ))
+ );
}
}
@@ -37,7 +37,7 @@ impl TryFrom<String> for Role {
"assistant" => Ok(Self::Assistant),
"system" => Ok(Self::System),
"tool" => Ok(Self::Tool),
- _ => Err(anyhow!("invalid role '{value}'")),
+ _ => anyhow::bail!("invalid role '{value}'"),
}
}
}
@@ -118,7 +118,7 @@ impl Model {
"o3-mini" => Ok(Self::O3Mini),
"o3" => Ok(Self::O3),
"o4-mini" => Ok(Self::O4Mini),
- _ => Err(anyhow!("invalid model id")),
+ invalid_id => anyhow::bail!("invalid model id '{invalid_id}'"),
}
}
@@ -491,16 +491,15 @@ pub async fn complete(
}
match serde_json::from_str::<OpenAiResponse>(&body) {
- Ok(response) if !response.error.message.is_empty() => Err(anyhow!(
+ Ok(response) if !response.error.message.is_empty() => anyhow::bail!(
"Failed to connect to OpenAI API: {}",
response.error.message,
- )),
-
- _ => Err(anyhow!(
+ ),
+ _ => anyhow::bail!(
"Failed to connect to OpenAI API: {} {}",
response.status(),
body,
- )),
+ ),
}
}
}
@@ -541,16 +540,15 @@ pub async fn complete_text(
}
match serde_json::from_str::<OpenAiResponse>(&body) {
- Ok(response) if !response.error.message.is_empty() => Err(anyhow!(
+ Ok(response) if !response.error.message.is_empty() => anyhow::bail!(
"Failed to connect to OpenAI API: {}",
response.error.message,
- )),
-
- _ => Err(anyhow!(
+ ),
+ _ => anyhow::bail!(
"Failed to connect to OpenAI API: {} {}",
response.status(),
body,
- )),
+ ),
}
}
}
@@ -672,11 +670,11 @@ pub async fn stream_completion(
response.error.message,
)),
- _ => Err(anyhow!(
+ _ => anyhow::bail!(
"Failed to connect to OpenAI API: {} {}",
response.status(),
body,
- )),
+ ),
}
}
}
@@ -732,16 +730,14 @@ pub fn embed<'a>(
let mut body = String::new();
response.body_mut().read_to_string(&mut body).await?;
- if response.status().is_success() {
- let response: OpenAiEmbeddingResponse =
- serde_json::from_str(&body).context("failed to parse OpenAI embedding response")?;
- Ok(response)
- } else {
- Err(anyhow!(
- "error during embedding, status: {:?}, body: {:?}",
- response.status(),
- body
- ))
- }
+ anyhow::ensure!(
+ response.status().is_success(),
+ "error during embedding, status: {:?}, body: {:?}",
+ response.status(),
+ body
+ );
+ let response: OpenAiEmbeddingResponse =
+ serde_json::from_str(&body).context("failed to parse OpenAI embedding response")?;
+ Ok(response)
}
}
@@ -1,4 +1,4 @@
-use anyhow::{Context as _, anyhow};
+use anyhow::Context as _;
use collections::{HashMap, HashSet};
use fs::Fs;
use gpui::{AsyncApp, Entity};
@@ -421,7 +421,7 @@ impl Prettier {
prettier_parser = prettier_parser.or_else(|| buffer_language.and_then(|language| language.prettier_parser_name()));
if prettier_parser.is_none() {
log::error!("Formatting unsaved file with prettier failed. No prettier parser configured for language {buffer_language:?}");
- return Err(anyhow!("Cannot determine prettier parser for unsaved file"));
+ anyhow::bail!("Cannot determine prettier parser for unsaved file");
}
}
@@ -58,7 +58,7 @@ struct RemoteBufferStore {
project_id: u64,
loading_remote_buffers_by_id: HashMap<BufferId, Entity<Buffer>>,
remote_buffer_listeners:
- HashMap<BufferId, Vec<oneshot::Sender<Result<Entity<Buffer>, anyhow::Error>>>>,
+ HashMap<BufferId, Vec<oneshot::Sender<anyhow::Result<Entity<Buffer>>>>>,
worktree_store: Entity<WorktreeStore>,
}
@@ -152,11 +152,7 @@ impl RemoteBufferStore {
capability: Capability,
cx: &mut Context<BufferStore>,
) -> Result<Option<Entity<Buffer>>> {
- match envelope
- .payload
- .variant
- .ok_or_else(|| anyhow!("missing variant"))?
- {
+ match envelope.payload.variant.context("missing variant")? {
proto::create_buffer_for_peer::Variant::State(mut state) => {
let buffer_id = BufferId::new(state.id)?;
@@ -168,8 +164,8 @@ impl RemoteBufferStore {
.worktree_store
.read(cx)
.worktree_for_id(worktree_id, cx)
- .ok_or_else(|| {
- anyhow!("no worktree found for id {}", file.worktree_id)
+ .with_context(|| {
+ format!("no worktree found for id {}", file.worktree_id)
})?;
buffer_file = Some(Arc::new(File::from_proto(file, worktree.clone(), cx)?)
as Arc<dyn language::File>);
@@ -197,8 +193,8 @@ impl RemoteBufferStore {
.loading_remote_buffers_by_id
.get(&buffer_id)
.cloned()
- .ok_or_else(|| {
- anyhow!(
+ .with_context(|| {
+ format!(
"received chunk for buffer {} without initial state",
chunk.buffer_id
)
@@ -341,10 +337,7 @@ impl RemoteBufferStore {
});
cx.spawn(async move |this, cx| {
- let response = request
- .await?
- .transaction
- .ok_or_else(|| anyhow!("missing transaction"))?;
+ let response = request.await?.transaction.context("missing transaction")?;
this.update(cx, |this, cx| {
this.deserialize_project_transaction(response, push_to_history, cx)
})?
@@ -913,8 +906,8 @@ impl BufferStore {
if is_remote {
return Ok(());
} else {
- debug_panic!("buffer {} was already registered", remote_id);
- Err(anyhow!("buffer {} was already registered", remote_id))?;
+ debug_panic!("buffer {remote_id} was already registered");
+ anyhow::bail!("buffer {remote_id} was already registered");
}
}
entry.insert(open_buffer);
@@ -963,7 +956,7 @@ impl BufferStore {
pub fn get_existing(&self, buffer_id: BufferId) -> Result<Entity<Buffer>> {
self.get(buffer_id)
- .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))
+ .with_context(|| format!("unknown buffer id {buffer_id}"))
}
pub fn get_possibly_incomplete(&self, buffer_id: BufferId) -> Option<Entity<Buffer>> {
@@ -1279,9 +1272,9 @@ impl BufferStore {
capability: Capability,
cx: &mut Context<Self>,
) -> Result<()> {
- let Some(remote) = self.as_remote_mut() else {
- return Err(anyhow!("buffer store is not a remote"));
- };
+ let remote = self
+ .as_remote_mut()
+ .context("buffer store is not a remote")?;
if let Some(buffer) =
remote.handle_create_buffer_for_peer(envelope, replica_id, capability, cx)?
@@ -1303,12 +1296,12 @@ impl BufferStore {
this.update(&mut cx, |this, cx| {
let payload = envelope.payload.clone();
if let Some(buffer) = this.get_possibly_incomplete(buffer_id) {
- let file = payload.file.ok_or_else(|| anyhow!("invalid file"))?;
+ let file = payload.file.context("invalid file")?;
let worktree = this
.worktree_store
.read(cx)
.worktree_for_id(WorktreeId::from_proto(file.worktree_id), cx)
- .ok_or_else(|| anyhow!("no such worktree"))?;
+ .context("no such worktree")?;
let file = File::from_proto(file, worktree, cx)?;
let old_file = buffer.update(cx, |buffer, cx| {
let old_file = buffer.file().cloned();
@@ -1445,7 +1438,7 @@ impl BufferStore {
let mtime = envelope.payload.mtime.clone().map(|time| time.into());
let line_ending = deserialize_line_ending(
proto::LineEnding::from_i32(envelope.payload.line_ending)
- .ok_or_else(|| anyhow!("missing line ending"))?,
+ .context("missing line ending")?,
);
this.update(&mut cx, |this, cx| {
if let Some(buffer) = this.get_possibly_incomplete(buffer_id) {
@@ -1495,7 +1488,7 @@ impl BufferStore {
let buffer_id = BufferId::new(*buffer_id)?;
buffers.insert(this.get_existing(buffer_id)?);
}
- Ok::<_, anyhow::Error>(this.reload_buffers(buffers, false, cx))
+ anyhow::Ok(this.reload_buffers(buffers, false, cx))
})??;
let project_transaction = reload.await?;
@@ -233,9 +233,10 @@ impl ContextServerStore {
}
pub fn stop_server(&mut self, id: &ContextServerId, cx: &mut Context<Self>) -> Result<()> {
- let Some(state) = self.servers.remove(id) else {
- return Err(anyhow::anyhow!("Context server not found"));
- };
+ let state = self
+ .servers
+ .remove(id)
+ .context("Context server not found")?;
let server = state.server();
let configuration = state.configuration();
@@ -336,9 +337,10 @@ impl ContextServerStore {
}
fn remove_server(&mut self, id: &ContextServerId, cx: &mut Context<Self>) -> Result<()> {
- let Some(state) = self.servers.remove(id) else {
- return Err(anyhow::anyhow!("Context server not found"));
- };
+ let state = self
+ .servers
+ .remove(id)
+ .context("Context server not found")?;
drop(state);
cx.emit(Event::ServerStatusChanged {
server_id: id.clone(),
@@ -1097,7 +1099,7 @@ mod tests {
self.tx
.unbounded_send(response.to_string())
- .map_err(|e| anyhow::anyhow!("Failed to send message: {}", e))?;
+ .context("sending a message")?;
}
}
}
@@ -1,7 +1,7 @@
//! Module for managing breakpoints in a project.
//!
//! Breakpoints are separate from a session because they're not associated with any particular debug session. They can also be set up without a session running.
-use anyhow::{Result, anyhow};
+use anyhow::{Context as _, Result};
pub use breakpoints_in_file::{BreakpointSessionState, BreakpointWithPosition};
use breakpoints_in_file::{BreakpointsInFile, StatefulBreakpoint};
use collections::{BTreeMap, HashMap};
@@ -219,7 +219,7 @@ impl BreakpointStore {
})
.ok()
.flatten()
- .ok_or_else(|| anyhow!("Invalid project path"))?
+ .context("Invalid project path")?
.await?;
breakpoints.update(&mut cx, move |this, cx| {
@@ -272,25 +272,25 @@ impl BreakpointStore {
.update(&mut cx, |this, cx| {
this.project_path_for_absolute_path(message.payload.path.as_ref(), cx)
})?
- .ok_or_else(|| anyhow!("Could not resolve provided abs path"))?;
+ .context("Could not resolve provided abs path")?;
let buffer = this
.update(&mut cx, |this, cx| {
this.buffer_store().read(cx).get_by_path(&path, cx)
})?
- .ok_or_else(|| anyhow!("Could not find buffer for a given path"))?;
+ .context("Could not find buffer for a given path")?;
let breakpoint = message
.payload
.breakpoint
- .ok_or_else(|| anyhow!("Breakpoint not present in RPC payload"))?;
+ .context("Breakpoint not present in RPC payload")?;
let position = language::proto::deserialize_anchor(
breakpoint
.position
.clone()
- .ok_or_else(|| anyhow!("Anchor not present in RPC payload"))?,
+ .context("Anchor not present in RPC payload")?,
)
- .ok_or_else(|| anyhow!("Anchor deserialization failed"))?;
- let breakpoint = Breakpoint::from_proto(breakpoint)
- .ok_or_else(|| anyhow!("Could not deserialize breakpoint"))?;
+ .context("Anchor deserialization failed")?;
+ let breakpoint =
+ Breakpoint::from_proto(breakpoint).context("Could not deserialize breakpoint")?;
breakpoints.update(&mut cx, |this, cx| {
this.toggle_breakpoint(
@@ -1,6 +1,6 @@
use std::sync::Arc;
-use anyhow::{Ok, Result, anyhow};
+use anyhow::{Context as _, Ok, Result};
use dap::{
Capabilities, ContinueArguments, ExceptionFilterOptions, InitializeRequestArguments,
InitializeRequestArgumentsPathFormat, NextArguments, SetVariableResponse, SourceBreakpoint,
@@ -1766,7 +1766,7 @@ impl DapCommand for LocationsCommand {
source: response
.source
.map(<dap::Source as ProtoConversion>::from_proto)
- .ok_or_else(|| anyhow!("Missing `source` field in Locations proto"))?,
+ .context("Missing `source` field in Locations proto")?,
line: response.line,
column: response.column,
end_line: response.end_line,
@@ -237,9 +237,7 @@ impl DapStore {
let binary = DebugAdapterBinary::from_proto(response)?;
let mut ssh_command = ssh_client.update(cx, |ssh, _| {
anyhow::Ok(SshCommand {
- arguments: ssh
- .ssh_args()
- .ok_or_else(|| anyhow!("SSH arguments not found"))?,
+ arguments: ssh.ssh_args().context("SSH arguments not found")?,
})
})??;
@@ -316,10 +314,10 @@ impl DapStore {
return Ok(result);
}
- Err(anyhow!(
+ anyhow::bail!(
"None of the locators for task `{}` completed successfully",
build_command.label
- ))
+ )
})
} else {
Task::ready(Err(anyhow!(
@@ -735,7 +733,7 @@ impl DapStore {
let task = envelope
.payload
.build_command
- .ok_or_else(|| anyhow!("missing definition"))?;
+ .context("missing definition")?;
let build_task = SpawnInTerminal::from_proto(task);
let locator = envelope.payload.locator;
let request = this
@@ -753,10 +751,7 @@ impl DapStore {
mut cx: AsyncApp,
) -> Result<proto::DebugAdapterBinary> {
let definition = DebugTaskDefinition::from_proto(
- envelope
- .payload
- .definition
- .ok_or_else(|| anyhow!("missing definition"))?,
+ envelope.payload.definition.context("missing definition")?,
)?;
let (tx, mut rx) = mpsc::unbounded();
let session_id = envelope.payload.session_id;
@@ -1,4 +1,4 @@
-use anyhow::{Result, anyhow};
+use anyhow::{Context as _, Result};
use async_trait::async_trait;
use dap::{DapLocator, DebugRequest, adapters::DebugAdapterName};
use gpui::SharedString;
@@ -90,11 +90,10 @@ impl DapLocator for CargoLocator {
}
async fn run(&self, build_config: SpawnInTerminal) -> Result<DebugRequest> {
- let Some(cwd) = build_config.cwd.clone() else {
- return Err(anyhow!(
- "Couldn't get cwd from debug config which is needed for locators"
- ));
- };
+ let cwd = build_config
+ .cwd
+ .clone()
+ .context("Couldn't get cwd from debug config which is needed for locators")?;
let builder = ShellBuilder::new(true, &build_config.shell).non_interactive();
let (program, args) = builder.build(
"cargo".into(),
@@ -119,9 +118,7 @@ impl DapLocator for CargoLocator {
}
let status = child.status().await?;
- if !status.success() {
- return Err(anyhow::anyhow!("Cargo command failed"));
- }
+ anyhow::ensure!(status.success(), "Cargo command failed");
let executables = output
.lines()
@@ -133,9 +130,10 @@ impl DapLocator for CargoLocator {
.map(String::from)
})
.collect::<Vec<_>>();
- if executables.is_empty() {
- return Err(anyhow!("Couldn't get executable in cargo locator"));
- };
+ anyhow::ensure!(
+ !executables.is_empty(),
+ "Couldn't get executable in cargo locator"
+ );
let is_test = build_config.args.first().map_or(false, |arg| arg == "test");
let mut test_name = None;
@@ -161,7 +159,7 @@ impl DapLocator for CargoLocator {
};
let Some(executable) = executable.or_else(|| executables.first().cloned()) else {
- return Err(anyhow!("Couldn't get executable in cargo locator"));
+ anyhow::bail!("Couldn't get executable in cargo locator");
};
let args = test_name.into_iter().collect();
@@ -12,7 +12,7 @@ use super::dap_command::{
TerminateThreadsCommand, ThreadsCommand, VariablesCommand,
};
use super::dap_store::DapStore;
-use anyhow::{Result, anyhow};
+use anyhow::{Context as _, Result, anyhow};
use collections::{HashMap, HashSet, IndexMap, IndexSet};
use dap::adapters::{DebugAdapterBinary, DebugAdapterName};
use dap::messages::Response;
@@ -487,8 +487,7 @@ impl Mode {
match self {
Mode::Running(debug_adapter_client) => debug_adapter_client.request(request),
Mode::Building => Task::ready(Err(anyhow!(
- "no adapter running to send request: {:?}",
- request
+ "no adapter running to send request: {request:?}"
))),
}
}
@@ -1736,7 +1735,7 @@ impl Session {
anyhow::Ok(
task.await
.map(|response| response.targets)
- .ok_or_else(|| anyhow!("failed to fetch completions"))?,
+ .context("failed to fetch completions")?,
)
})
}
@@ -976,7 +976,7 @@ impl GitStore {
return cx.spawn(async move |cx| {
let provider_registry = cx.update(GitHostingProviderRegistry::default_global)?;
get_permalink_in_rust_registry_src(provider_registry, file_path, selection)
- .map_err(|_| anyhow!("no permalink available"))
+ .context("no permalink available")
});
// TODO remote case
@@ -997,23 +997,20 @@ impl GitStore {
RepositoryState::Local { backend, .. } => {
let origin_url = backend
.remote_url(&remote)
- .ok_or_else(|| anyhow!("remote \"{remote}\" not found"))?;
+ .with_context(|| format!("remote \"{remote}\" not found"))?;
- let sha = backend
- .head_sha()
- .await
- .ok_or_else(|| anyhow!("failed to read HEAD SHA"))?;
+ let sha = backend.head_sha().await.context("reading HEAD SHA")?;
let provider_registry =
cx.update(GitHostingProviderRegistry::default_global)?;
let (provider, remote) =
parse_git_remote_url(provider_registry, &origin_url)
- .ok_or_else(|| anyhow!("failed to parse Git remote URL"))?;
+ .context("parsing Git remote URL")?;
- let path = repo_path
- .to_str()
- .ok_or_else(|| anyhow!("failed to convert path to string"))?;
+ let path = repo_path.to_str().with_context(|| {
+ format!("converting repo path {repo_path:?} to string")
+ })?;
Ok(provider.build_permalink(
remote,
@@ -1966,7 +1963,7 @@ impl GitStore {
let delegates = cx.update(|cx| repository.read(cx).askpass_delegates.clone())?;
let Some(mut askpass) = delegates.lock().remove(&envelope.payload.askpass_id) else {
debug_panic!("no askpass found");
- return Err(anyhow::anyhow!("no askpass found"));
+ anyhow::bail!("no askpass found");
};
let response = askpass.ask_password(envelope.payload.prompt).await?;
@@ -2035,7 +2032,7 @@ impl GitStore {
let buffer = this.buffer_store.read(cx).get(buffer_id)?;
Some(this.open_unstaged_diff(buffer, cx))
})?
- .ok_or_else(|| anyhow!("no such buffer"))?
+ .context("missing buffer")?
.await?;
this.update(&mut cx, |this, _| {
let shared_diffs = this
@@ -2059,7 +2056,7 @@ impl GitStore {
let buffer = this.buffer_store.read(cx).get(buffer_id)?;
Some(this.open_uncommitted_diff(buffer, cx))
})?
- .ok_or_else(|| anyhow!("no such buffer"))?
+ .context("missing buffer")?
.await?;
this.update(&mut cx, |this, _| {
let shared_diffs = this
@@ -3915,7 +3912,7 @@ impl Repository {
self.send_job(None, |repo, _cx| async move {
match repo {
RepositoryState::Local { backend, .. } => backend.checkpoint().await,
- RepositoryState::Remote { .. } => Err(anyhow!("not implemented yet")),
+ RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"),
}
})
}
@@ -3929,7 +3926,7 @@ impl Repository {
RepositoryState::Local { backend, .. } => {
backend.restore_checkpoint(checkpoint).await
}
- RepositoryState::Remote { .. } => Err(anyhow!("not implemented yet")),
+ RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"),
}
})
}
@@ -3984,7 +3981,7 @@ impl Repository {
RepositoryState::Local { backend, .. } => {
backend.compare_checkpoints(left, right).await
}
- RepositoryState::Remote { .. } => Err(anyhow!("not implemented yet")),
+ RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"),
}
})
}
@@ -4001,7 +3998,7 @@ impl Repository {
.diff_checkpoints(base_checkpoint, target_checkpoint)
.await
}
- RepositoryState::Remote { .. } => Err(anyhow!("not implemented yet")),
+ RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"),
}
})
}
@@ -4064,7 +4061,7 @@ impl Repository {
cx.spawn(async move |_, cx| {
let environment = project_environment
.upgrade()
- .ok_or_else(|| anyhow!("missing project environment"))?
+ .context("missing project environment")?
.update(cx, |project_environment, cx| {
project_environment.get_directory_environment(work_directory_abs_path.clone(), cx)
})?
@@ -4076,7 +4073,7 @@ impl Repository {
let backend = cx
.background_spawn(async move {
fs.open_repo(&dot_git_abs_path)
- .ok_or_else(|| anyhow!("failed to build repository"))
+ .with_context(|| format!("opening repository at {dot_git_abs_path:?}"))
})
.await?;
@@ -4215,8 +4212,7 @@ impl Repository {
buffer_id: buffer_id.to_proto(),
})
.await?;
- let mode =
- Mode::from_i32(response.mode).ok_or_else(|| anyhow!("Invalid mode"))?;
+ let mode = Mode::from_i32(response.mode).context("Invalid mode")?;
let bases = match mode {
Mode::IndexMatchesHead => DiffBasesChange::SetBoth(response.committed_text),
Mode::IndexAndHead => DiffBasesChange::SetEach {
@@ -4353,7 +4349,7 @@ fn get_permalink_in_rust_registry_src(
let cargo_toml = std::fs::read_to_string(dir.join("Cargo.toml"))?;
let manifest = toml::from_str::<CargoToml>(&cargo_toml)?;
let (provider, remote) = parse_git_remote_url(provider_registry, &manifest.package.repository)
- .ok_or_else(|| anyhow!("Failed to parse package.repository field of manifest"))?;
+ .context("parsing package.repository field of manifest")?;
let path = PathBuf::from(cargo_vcs_info.path_in_vcs).join(path.strip_prefix(dir).unwrap());
let permalink = provider.build_permalink(
remote,
@@ -4597,7 +4593,7 @@ fn status_from_proto(
let Some(variant) = status.and_then(|status| status.variant) else {
let code = proto::GitStatus::from_i32(simple_status)
- .ok_or_else(|| anyhow!("Invalid git status code: {simple_status}"))?;
+ .with_context(|| format!("Invalid git status code: {simple_status}"))?;
let result = match code {
proto::GitStatus::Added => TrackedStatus {
worktree_status: StatusCode::Added,
@@ -4619,7 +4615,7 @@ fn status_from_proto(
index_status: StatusCode::Unmodified,
}
.into(),
- _ => return Err(anyhow!("Invalid code for simple status: {simple_status}")),
+ _ => anyhow::bail!("Invalid code for simple status: {simple_status}"),
};
return Ok(result);
};
@@ -4631,12 +4627,12 @@ fn status_from_proto(
let [first_head, second_head] =
[unmerged.first_head, unmerged.second_head].map(|head| {
let code = proto::GitStatus::from_i32(head)
- .ok_or_else(|| anyhow!("Invalid git status code: {head}"))?;
+ .with_context(|| format!("Invalid git status code: {head}"))?;
let result = match code {
proto::GitStatus::Added => UnmergedStatusCode::Added,
proto::GitStatus::Updated => UnmergedStatusCode::Updated,
proto::GitStatus::Deleted => UnmergedStatusCode::Deleted,
- _ => return Err(anyhow!("Invalid code for unmerged status: {code:?}")),
+ _ => anyhow::bail!("Invalid code for unmerged status: {code:?}"),
};
Ok(result)
});
@@ -4651,7 +4647,7 @@ fn status_from_proto(
let [index_status, worktree_status] = [tracked.index_status, tracked.worktree_status]
.map(|status| {
let code = proto::GitStatus::from_i32(status)
- .ok_or_else(|| anyhow!("Invalid git status code: {status}"))?;
+ .with_context(|| format!("Invalid git status code: {status}"))?;
let result = match code {
proto::GitStatus::Modified => StatusCode::Modified,
proto::GitStatus::TypeChanged => StatusCode::TypeChanged,
@@ -4660,7 +4656,7 @@ fn status_from_proto(
proto::GitStatus::Renamed => StatusCode::Renamed,
proto::GitStatus::Copied => StatusCode::Copied,
proto::GitStatus::Unmodified => StatusCode::Unmodified,
- _ => return Err(anyhow!("Invalid code for tracked status: {code:?}")),
+ _ => anyhow::bail!("Invalid code for tracked status: {code:?}"),
};
Ok(result)
});
@@ -2,7 +2,7 @@ use crate::{
Project, ProjectEntryId, ProjectItem, ProjectPath,
worktree_store::{WorktreeStore, WorktreeStoreEvent},
};
-use anyhow::{Context as _, Result, anyhow};
+use anyhow::{Context as _, Result};
use collections::{HashMap, HashSet, hash_map};
use futures::{StreamExt, channel::oneshot};
use gpui::{
@@ -128,7 +128,7 @@ impl ImageItem {
let file_metadata = fs
.metadata(image_path.as_path())
.await?
- .ok_or_else(|| anyhow!("failed to load image metadata"))?;
+ .context("failed to load image metadata")?;
Ok(ImageMetadata {
width,
@@ -223,7 +223,7 @@ impl ProjectItem for ImageItem {
project: &Entity<Project>,
path: &ProjectPath,
cx: &mut App,
- ) -> Option<Task<gpui::Result<Entity<Self>>>> {
+ ) -> Option<Task<anyhow::Result<Entity<Self>>>> {
if is_image_file(&project, &path, cx) {
Some(cx.spawn({
let path = path.clone();
@@ -702,7 +702,7 @@ fn create_gpui_image(content: Vec<u8>) -> anyhow::Result<Arc<gpui::Image>> {
image::ImageFormat::Gif => gpui::ImageFormat::Gif,
image::ImageFormat::Bmp => gpui::ImageFormat::Bmp,
image::ImageFormat::Tiff => gpui::ImageFormat::Tiff,
- _ => Err(anyhow::anyhow!("Image format not supported"))?,
+ format => anyhow::bail!("Image format {format:?} not supported"),
},
content,
)))
@@ -7,7 +7,7 @@ use crate::{
PrepareRenameResponse, ProjectTransaction, ResolveState,
lsp_store::{LocalLspStore, LspStore},
};
-use anyhow::{Context as _, Result, anyhow};
+use anyhow::{Context as _, Result};
use async_trait::async_trait;
use client::proto::{self, PeerId};
use clock::Global;
@@ -48,9 +48,7 @@ pub fn lsp_formatting_options(settings: &LanguageSettings) -> lsp::FormattingOpt
pub(crate) fn file_path_to_lsp_url(path: &Path) -> Result<lsp::Url> {
match lsp::Url::from_file_path(path) {
Ok(url) => Ok(url),
- Err(()) => Err(anyhow!(
- "Invalid file path provided to LSP request: {path:?}"
- )),
+ Err(()) => anyhow::bail!("Invalid file path provided to LSP request: {path:?}"),
}
}
@@ -293,7 +291,7 @@ impl LspCommand for PrepareRename {
Some(lsp::OneOf::Left(true)) => Ok(LspParamsOrResponse::Response(
PrepareRenameResponse::OnlyUnpreparedRenameSupported,
)),
- _ => Err(anyhow!("Rename not supported")),
+ _ => anyhow::bail!("Rename not supported"),
}
}
@@ -359,7 +357,7 @@ impl LspCommand for PrepareRename {
let position = message
.position
.and_then(deserialize_anchor)
- .ok_or_else(|| anyhow!("invalid position"))?;
+ .context("invalid position")?;
buffer
.update(&mut cx, |buffer, _| {
buffer.wait_for_version(deserialize_version(&message.version))
@@ -422,9 +420,9 @@ impl LspCommand for PrepareRename {
) {
Ok(PrepareRenameResponse::Success(start..end))
} else {
- Err(anyhow!(
+ anyhow::bail!(
"Missing start or end position in remote project PrepareRenameResponse"
- ))
+ );
}
} else if message.only_unprepared_rename_supported {
Ok(PrepareRenameResponse::OnlyUnpreparedRenameSupported)
@@ -508,7 +506,7 @@ impl LspCommand for PerformRename {
let position = message
.position
.and_then(deserialize_anchor)
- .ok_or_else(|| anyhow!("invalid position"))?;
+ .context("invalid position")?;
buffer
.update(&mut cx, |buffer, _| {
buffer.wait_for_version(deserialize_version(&message.version))
@@ -543,9 +541,7 @@ impl LspCommand for PerformRename {
_: Entity<Buffer>,
mut cx: AsyncApp,
) -> Result<ProjectTransaction> {
- let message = message
- .transaction
- .ok_or_else(|| anyhow!("missing transaction"))?;
+ let message = message.transaction.context("missing transaction")?;
lsp_store
.update(&mut cx, |lsp_store, cx| {
lsp_store.buffer_store().update(cx, |buffer_store, cx| {
@@ -622,7 +618,7 @@ impl LspCommand for GetDefinition {
let position = message
.position
.and_then(deserialize_anchor)
- .ok_or_else(|| anyhow!("invalid position"))?;
+ .context("invalid position")?;
buffer
.update(&mut cx, |buffer, _| {
buffer.wait_for_version(deserialize_version(&message.version))
@@ -721,7 +717,7 @@ impl LspCommand for GetDeclaration {
let position = message
.position
.and_then(deserialize_anchor)
- .ok_or_else(|| anyhow!("invalid position"))?;
+ .context("invalid position")?;
buffer
.update(&mut cx, |buffer, _| {
buffer.wait_for_version(deserialize_version(&message.version))
@@ -813,7 +809,7 @@ impl LspCommand for GetImplementation {
let position = message
.position
.and_then(deserialize_anchor)
- .ok_or_else(|| anyhow!("invalid position"))?;
+ .context("invalid position")?;
buffer
.update(&mut cx, |buffer, _| {
buffer.wait_for_version(deserialize_version(&message.version))
@@ -912,7 +908,7 @@ impl LspCommand for GetTypeDefinition {
let position = message
.position
.and_then(deserialize_anchor)
- .ok_or_else(|| anyhow!("invalid position"))?;
+ .context("invalid position")?;
buffer
.update(&mut cx, |buffer, _| {
buffer.wait_for_version(deserialize_version(&message.version))
@@ -963,7 +959,7 @@ fn language_server_for_buffer(
.map(|(adapter, server)| (adapter.clone(), server.clone()))
})
})?
- .ok_or_else(|| anyhow!("no language server found for buffer"))
+ .context("no language server found for buffer")
}
pub async fn location_links_from_proto(
@@ -997,11 +993,11 @@ pub fn location_link_from_proto(
let start = origin
.start
.and_then(deserialize_anchor)
- .ok_or_else(|| anyhow!("missing origin start"))?;
+ .context("missing origin start")?;
let end = origin
.end
.and_then(deserialize_anchor)
- .ok_or_else(|| anyhow!("missing origin end"))?;
+ .context("missing origin end")?;
buffer
.update(cx, |buffer, _| buffer.wait_for_anchors([start, end]))?
.await?;
@@ -1013,7 +1009,7 @@ pub fn location_link_from_proto(
None => None,
};
- let target = link.target.ok_or_else(|| anyhow!("missing target"))?;
+ let target = link.target.context("missing target")?;
let buffer_id = BufferId::new(target.buffer_id)?;
let buffer = lsp_store
.update(cx, |lsp_store, cx| {
@@ -1023,11 +1019,11 @@ pub fn location_link_from_proto(
let start = target
.start
.and_then(deserialize_anchor)
- .ok_or_else(|| anyhow!("missing target start"))?;
+ .context("missing target start")?;
let end = target
.end
.and_then(deserialize_anchor)
- .ok_or_else(|| anyhow!("missing target end"))?;
+ .context("missing target end")?;
buffer
.update(cx, |buffer, _| buffer.wait_for_anchors([start, end]))?
.await?;
@@ -1337,7 +1333,7 @@ impl LspCommand for GetReferences {
let position = message
.position
.and_then(deserialize_anchor)
- .ok_or_else(|| anyhow!("invalid position"))?;
+ .context("invalid position")?;
buffer
.update(&mut cx, |buffer, _| {
buffer.wait_for_version(deserialize_version(&message.version))
@@ -1393,11 +1389,11 @@ impl LspCommand for GetReferences {
let start = location
.start
.and_then(deserialize_anchor)
- .ok_or_else(|| anyhow!("missing target start"))?;
+ .context("missing target start")?;
let end = location
.end
.and_then(deserialize_anchor)
- .ok_or_else(|| anyhow!("missing target end"))?;
+ .context("missing target end")?;
target_buffer
.update(&mut cx, |buffer, _| buffer.wait_for_anchors([start, end]))?
.await?;
@@ -1494,7 +1490,7 @@ impl LspCommand for GetDocumentHighlights {
let position = message
.position
.and_then(deserialize_anchor)
- .ok_or_else(|| anyhow!("invalid position"))?;
+ .context("invalid position")?;
buffer
.update(&mut cx, |buffer, _| {
buffer.wait_for_version(deserialize_version(&message.version))
@@ -1540,11 +1536,11 @@ impl LspCommand for GetDocumentHighlights {
let start = highlight
.start
.and_then(deserialize_anchor)
- .ok_or_else(|| anyhow!("missing target start"))?;
+ .context("missing target start")?;
let end = highlight
.end
.and_then(deserialize_anchor)
- .ok_or_else(|| anyhow!("missing target end"))?;
+ .context("missing target end")?;
buffer
.update(&mut cx, |buffer, _| buffer.wait_for_anchors([start, end]))?
.await?;
@@ -1723,19 +1719,15 @@ impl LspCommand for GetDocumentSymbols {
let kind =
unsafe { mem::transmute::<i32, lsp::SymbolKind>(serialized_symbol.kind) };
- let start = serialized_symbol
- .start
- .ok_or_else(|| anyhow!("invalid start"))?;
- let end = serialized_symbol
- .end
- .ok_or_else(|| anyhow!("invalid end"))?;
+ let start = serialized_symbol.start.context("invalid start")?;
+ let end = serialized_symbol.end.context("invalid end")?;
let selection_start = serialized_symbol
.selection_start
- .ok_or_else(|| anyhow!("invalid selection start"))?;
+ .context("invalid selection start")?;
let selection_end = serialized_symbol
.selection_end
- .ok_or_else(|| anyhow!("invalid selection end"))?;
+ .context("invalid selection end")?;
Ok(DocumentSymbol {
name: serialized_symbol.name,
@@ -1993,7 +1985,7 @@ impl LspCommand for GetHover {
let position = message
.position
.and_then(deserialize_anchor)
- .ok_or_else(|| anyhow!("invalid position"))?;
+ .context("invalid position")?;
buffer
.update(&mut cx, |buffer, _| {
buffer.wait_for_version(deserialize_version(&message.version))
@@ -2329,7 +2321,7 @@ impl LspCommand for GetCompletions {
buffer.clip_point_utf16(Unclipped(p.to_point_utf16(buffer)), Bias::Left)
})
})
- .ok_or_else(|| anyhow!("invalid position"))??;
+ .context("invalid position")??;
Ok(Self {
position,
context: CompletionContext {
@@ -2597,11 +2589,11 @@ impl LspCommand for GetCodeActions {
let start = message
.start
.and_then(language::proto::deserialize_anchor)
- .ok_or_else(|| anyhow!("invalid start"))?;
+ .context("invalid start")?;
let end = message
.end
.and_then(language::proto::deserialize_anchor)
- .ok_or_else(|| anyhow!("invalid end"))?;
+ .context("invalid end")?;
buffer
.update(&mut cx, |buffer, _| {
buffer.wait_for_version(deserialize_version(&message.version))
@@ -2767,7 +2759,7 @@ impl LspCommand for OnTypeFormatting {
let position = message
.position
.and_then(deserialize_anchor)
- .ok_or_else(|| anyhow!("invalid position"))?;
+ .context("invalid position")?;
buffer
.update(&mut cx, |buffer, _| {
buffer.wait_for_version(deserialize_version(&message.version))
@@ -3576,15 +3568,13 @@ impl LspCommand for LinkedEditingRange {
buffer: Entity<Buffer>,
mut cx: AsyncApp,
) -> Result<Self> {
- let position = message
- .position
- .ok_or_else(|| anyhow!("invalid position"))?;
+ let position = message.position.context("invalid position")?;
buffer
.update(&mut cx, |buffer, _| {
buffer.wait_for_version(deserialize_version(&message.version))
})?
.await?;
- let position = deserialize_anchor(position).ok_or_else(|| anyhow!("invalid position"))?;
+ let position = deserialize_anchor(position).context("invalid position")?;
buffer
.update(&mut cx, |buffer, _| buffer.wait_for_anchors([position]))?
.await?;
@@ -1204,7 +1204,7 @@ impl LocalLspStore {
buffer.finalize_last_transaction();
let transaction_id = buffer
.start_transaction()
- .ok_or_else(|| anyhow!("transaction already open"))?;
+ .context("transaction already open")?;
let transaction = buffer
.get_transaction(transaction_id)
.expect("transaction started")
@@ -1862,14 +1862,14 @@ impl LocalLspStore {
let capabilities = &language_server.capabilities();
let range_formatting_provider = capabilities.document_range_formatting_provider.as_ref();
if range_formatting_provider.map_or(false, |provider| provider == &OneOf::Left(false)) {
- return Err(anyhow!(
+ anyhow::bail!(
"{} language server does not support range formatting",
language_server.name()
- ));
+ );
}
let uri = lsp::Url::from_file_path(abs_path)
- .map_err(|_| anyhow!("failed to convert abs path to uri"))?;
+ .map_err(|()| anyhow!("failed to convert abs path to uri"))?;
let text_document = lsp::TextDocumentIdentifier::new(uri);
let lsp_edits = {
@@ -1934,7 +1934,7 @@ impl LocalLspStore {
zlog::info!(logger => "Formatting via LSP");
let uri = lsp::Url::from_file_path(abs_path)
- .map_err(|_| anyhow!("failed to convert abs path to uri"))?;
+ .map_err(|()| anyhow!("failed to convert abs path to uri"))?;
let text_document = lsp::TextDocumentIdentifier::new(uri);
let capabilities = &language_server.capabilities();
@@ -2026,10 +2026,7 @@ impl LocalLspStore {
.stderr(smol::process::Stdio::piped())
.spawn()?;
- let stdin = child
- .stdin
- .as_mut()
- .ok_or_else(|| anyhow!("failed to acquire stdin"))?;
+ let stdin = child.stdin.as_mut().context("failed to acquire stdin")?;
let text = buffer
.handle
.update(cx, |buffer, _| buffer.as_rope().clone())?;
@@ -2039,14 +2036,13 @@ impl LocalLspStore {
stdin.flush().await?;
let output = child.output().await?;
- if !output.status.success() {
- return Err(anyhow!(
- "command failed with exit code {:?}:\nstdout: {}\nstderr: {}",
- output.status.code(),
- String::from_utf8_lossy(&output.stdout),
- String::from_utf8_lossy(&output.stderr),
- ));
- }
+ anyhow::ensure!(
+ output.status.success(),
+ "command failed with exit code {:?}:\nstdout: {}\nstderr: {}",
+ output.status.code(),
+ String::from_utf8_lossy(&output.stdout),
+ String::from_utf8_lossy(&output.stderr),
+ );
let stdout = String::from_utf8(output.stdout)?;
Ok(Some(
@@ -2570,9 +2566,7 @@ impl LocalLspStore {
// We detect this case and treat it as if the version was `None`.
return Ok(buffer.read(cx).text_snapshot());
} else {
- return Err(anyhow!(
- "no snapshots found for buffer {buffer_id} and server {server_id}"
- ));
+ anyhow::bail!("no snapshots found for buffer {buffer_id} and server {server_id}");
};
let found_snapshot = snapshots
@@ -2617,7 +2611,7 @@ impl LocalLspStore {
push_to_history: bool,
project_transaction: &mut ProjectTransaction,
cx: &mut AsyncApp,
- ) -> Result<(), anyhow::Error> {
+ ) -> anyhow::Result<()> {
for mut action in actions {
Self::try_resolve_code_action(language_server, &mut action)
.await
@@ -2846,7 +2840,7 @@ impl LocalLspStore {
let abs_path = op
.uri
.to_file_path()
- .map_err(|_| anyhow!("can't convert URI to path"))?;
+ .map_err(|()| anyhow!("can't convert URI to path"))?;
if let Some(parent_path) = abs_path.parent() {
fs.create_dir(parent_path).await?;
@@ -2871,11 +2865,11 @@ impl LocalLspStore {
let source_abs_path = op
.old_uri
.to_file_path()
- .map_err(|_| anyhow!("can't convert URI to path"))?;
+ .map_err(|()| anyhow!("can't convert URI to path"))?;
let target_abs_path = op
.new_uri
.to_file_path()
- .map_err(|_| anyhow!("can't convert URI to path"))?;
+ .map_err(|()| anyhow!("can't convert URI to path"))?;
fs.rename(
&source_abs_path,
&target_abs_path,
@@ -2893,7 +2887,7 @@ impl LocalLspStore {
let abs_path = op
.uri
.to_file_path()
- .map_err(|_| anyhow!("can't convert URI to path"))?;
+ .map_err(|()| anyhow!("can't convert URI to path"))?;
let options = op
.options
.map(|options| fs::RemoveOptions {
@@ -3042,12 +3036,10 @@ impl LocalLspStore {
adapter: Arc<CachedLspAdapter>,
cx: &mut AsyncApp,
) -> Result<lsp::ApplyWorkspaceEditResponse> {
- let this = this
- .upgrade()
- .ok_or_else(|| anyhow!("project project closed"))?;
+ let this = this.upgrade().context("project project closed")?;
let language_server = this
.update(cx, |this, _| this.language_server_for_id(server_id))?
- .ok_or_else(|| anyhow!("language server not found"))?;
+ .context("language server not found")?;
let transaction = Self::deserialize_workspace_edit(
this.clone(),
params.edit,
@@ -4372,13 +4364,13 @@ impl LspStore {
err
);
log::warn!("{message}");
- anyhow!(message)
+ anyhow::anyhow!(message)
})?;
let response = request
.response_from_lsp(
response,
- this.upgrade().ok_or_else(|| anyhow!("no app context"))?,
+ this.upgrade().context("no app context")?,
buffer_handle,
language_server.server_id(),
cx.clone(),
@@ -4591,7 +4583,7 @@ impl LspStore {
.request(request)
.await?
.transaction
- .ok_or_else(|| anyhow!("missing transaction"))?;
+ .context("missing transaction")?;
buffer_store
.update(cx, |buffer_store, cx| {
@@ -4613,7 +4605,7 @@ impl LspStore {
if let Some(edit) = action.lsp_action.edit() {
if edit.changes.is_some() || edit.document_changes.is_some() {
return LocalLspStore::deserialize_workspace_edit(
- this.upgrade().ok_or_else(|| anyhow!("no app present"))?,
+ this.upgrade().context("no app present")?,
edit.clone(),
push_to_history,
lsp_adapter.clone(),
@@ -5715,7 +5707,7 @@ impl LspStore {
LspCommand::response_from_proto(
lsp_request,
response,
- project.upgrade().ok_or_else(|| anyhow!("No project"))?,
+ project.upgrade().context("No project")?,
buffer_handle.clone(),
cx.clone(),
)
@@ -6525,7 +6517,7 @@ impl LspStore {
mut diagnostics: Vec<DiagnosticEntry<Unclipped<PointUtf16>>>,
filter: F,
cx: &mut Context<Self>,
- ) -> Result<(), anyhow::Error> {
+ ) -> anyhow::Result<()> {
let Some((worktree, relative_path)) =
self.worktree_store.read(cx).find_worktree(&abs_path, cx)
else {
@@ -6730,7 +6722,7 @@ impl LspStore {
let abs_path = abs_path
.to_file_path()
- .map_err(|_| anyhow!("can't convert URI to path"))?;
+ .map_err(|()| anyhow!("can't convert URI to path"))?;
let p = abs_path.clone();
let yarn_worktree = lsp_store
.update(cx, move |lsp_store, cx| match lsp_store.as_local() {
@@ -7094,12 +7086,8 @@ impl LspStore {
mut cx: AsyncApp,
) -> Result<proto::ApplyCodeActionResponse> {
let sender_id = envelope.original_sender_id().unwrap_or_default();
- let action = Self::deserialize_code_action(
- envelope
- .payload
- .action
- .ok_or_else(|| anyhow!("invalid action"))?,
- )?;
+ let action =
+ Self::deserialize_code_action(envelope.payload.action.context("invalid action")?)?;
let apply_code_action = this.update(&mut cx, |this, cx| {
let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
let buffer = this.buffer_store.read(cx).get_existing(buffer_id)?;
@@ -7198,7 +7186,7 @@ impl LspStore {
)
})
})?
- .ok_or_else(|| anyhow!("worktree not found"))?;
+ .context("worktree not found")?;
let (old_abs_path, new_abs_path) = {
let root_path = worktree.update(&mut cx, |this, _| this.abs_path())?;
let new_path = PathBuf::from_proto(envelope.payload.new_path.clone());
@@ -7288,10 +7276,7 @@ impl LspStore {
envelope: TypedEnvelope<proto::StartLanguageServer>,
mut cx: AsyncApp,
) -> Result<()> {
- let server = envelope
- .payload
- .server
- .ok_or_else(|| anyhow!("invalid server"))?;
+ let server = envelope.payload.server.context("invalid server")?;
this.update(&mut cx, |this, cx| {
let server_id = LanguageServerId(server.id as usize);
@@ -7322,11 +7307,7 @@ impl LspStore {
this.update(&mut cx, |this, cx| {
let language_server_id = LanguageServerId(envelope.payload.language_server_id as usize);
- match envelope
- .payload
- .variant
- .ok_or_else(|| anyhow!("invalid variant"))?
- {
+ match envelope.payload.variant.context("invalid variant")? {
proto::update_language_server::Variant::WorkStart(payload) => {
this.on_lsp_work_start(
language_server_id,
@@ -7903,11 +7884,11 @@ impl LspStore {
let completion = this
.read_with(&cx, |this, cx| {
let id = LanguageServerId(envelope.payload.language_server_id as usize);
- let Some(server) = this.language_server_for_id(id) else {
- return Err(anyhow!("No language server {id}"));
- };
+ let server = this
+ .language_server_for_id(id)
+ .with_context(|| format!("No language server {id}"))?;
- Ok(cx.background_spawn(async move {
+ anyhow::Ok(cx.background_spawn(async move {
let can_resolve = server
.capabilities()
.completion_provider
@@ -7994,8 +7975,8 @@ impl LspStore {
.payload
.position
.and_then(deserialize_anchor)
- .ok_or_else(|| anyhow!("invalid position"))?;
- Ok::<_, anyhow::Error>(this.apply_on_type_formatting(
+ .context("invalid position")?;
+ anyhow::Ok(this.apply_on_type_formatting(
buffer,
position,
envelope.payload.trigger.clone(),
@@ -8114,18 +8095,12 @@ impl LspStore {
mut cx: AsyncApp,
) -> Result<proto::OpenBufferForSymbolResponse> {
let peer_id = envelope.original_sender_id().unwrap_or_default();
- let symbol = envelope
- .payload
- .symbol
- .ok_or_else(|| anyhow!("invalid symbol"))?;
+ let symbol = envelope.payload.symbol.context("invalid symbol")?;
let symbol = Self::deserialize_symbol(symbol)?;
let symbol = this.update(&mut cx, |this, _| {
let signature = this.symbol_signature(&symbol.path);
- if signature == symbol.signature {
- Ok(symbol)
- } else {
- Err(anyhow!("invalid symbol signature"))
- }
+ anyhow::ensure!(signature == symbol.signature, "invalid symbol signature");
+ Ok(symbol)
})??;
let buffer = this
.update(&mut cx, |this, cx| {
@@ -8268,10 +8243,7 @@ impl LspStore {
let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
let buffer = this.buffer_store.read(cx).get_existing(buffer_id)?;
let completion = Self::deserialize_completion(
- envelope
- .payload
- .completion
- .ok_or_else(|| anyhow!("invalid completion"))?,
+ envelope.payload.completion.context("invalid completion")?,
)?;
anyhow::Ok((buffer, completion))
})??;
@@ -8365,10 +8337,7 @@ impl LspStore {
let ranges = match &target {
LspFormatTarget::Buffers => None,
LspFormatTarget::Ranges(ranges) => {
- let Some(ranges) = ranges.get(&id) else {
- return Err(anyhow!("No format ranges provided for buffer"));
- };
- Some(ranges.clone())
+ Some(ranges.get(&id).context("No format ranges provided for buffer")?.clone())
}
};
@@ -8498,17 +8467,20 @@ impl LspStore {
buffers.insert(this.buffer_store.read(cx).get_existing(buffer_id)?);
}
let kind = match envelope.payload.kind.as_str() {
- "" => Ok(CodeActionKind::EMPTY),
- "quickfix" => Ok(CodeActionKind::QUICKFIX),
- "refactor" => Ok(CodeActionKind::REFACTOR),
- "refactor.extract" => Ok(CodeActionKind::REFACTOR_EXTRACT),
- "refactor.inline" => Ok(CodeActionKind::REFACTOR_INLINE),
- "refactor.rewrite" => Ok(CodeActionKind::REFACTOR_REWRITE),
- "source" => Ok(CodeActionKind::SOURCE),
- "source.organizeImports" => Ok(CodeActionKind::SOURCE_ORGANIZE_IMPORTS),
- "source.fixAll" => Ok(CodeActionKind::SOURCE_FIX_ALL),
- _ => Err(anyhow!("Invalid code action kind")),
- }?;
+ "" => CodeActionKind::EMPTY,
+ "quickfix" => CodeActionKind::QUICKFIX,
+ "refactor" => CodeActionKind::REFACTOR,
+ "refactor.extract" => CodeActionKind::REFACTOR_EXTRACT,
+ "refactor.inline" => CodeActionKind::REFACTOR_INLINE,
+ "refactor.rewrite" => CodeActionKind::REFACTOR_REWRITE,
+ "source" => CodeActionKind::SOURCE,
+ "source.organizeImports" => CodeActionKind::SOURCE_ORGANIZE_IMPORTS,
+ "source.fixAll" => CodeActionKind::SOURCE_FIX_ALL,
+ _ => anyhow::bail!(
+ "Invalid code action kind {}",
+ envelope.payload.kind.as_str()
+ ),
+ };
anyhow::Ok(this.apply_code_action_kind(buffers, kind, false, cx))
})??;
@@ -8778,7 +8750,7 @@ impl LspStore {
let abs_path = params
.uri
.to_file_path()
- .map_err(|_| anyhow!("URI is not a file"))?;
+ .map_err(|()| anyhow!("URI is not a file"))?;
let mut diagnostics = Vec::default();
let mut primary_diagnostic_group_ids = HashMap::default();
let mut sources_by_group_id = HashMap::default();
@@ -9320,12 +9292,8 @@ impl LspStore {
path: Arc::<Path>::from_proto(serialized_symbol.path),
};
- let start = serialized_symbol
- .start
- .ok_or_else(|| anyhow!("invalid start"))?;
- let end = serialized_symbol
- .end
- .ok_or_else(|| anyhow!("invalid end"))?;
+ let start = serialized_symbol.start.context("invalid start")?;
+ let end = serialized_symbol.end.context("invalid end")?;
Ok(CoreSymbol {
language_server_name: LanguageServerName(serialized_symbol.language_server_name.into()),
source_worktree_id,
@@ -10307,15 +10275,14 @@ impl LspAdapterDelegate for LocalLspAdapterDelegate {
.output()
.await?;
- if output.status.success() {
- return Ok(());
- }
- Err(anyhow!(
+ anyhow::ensure!(
+ output.status.success(),
"{}, stdout: {:?}, stderr: {:?}",
output.status,
String::from_utf8_lossy(&output.stdout),
String::from_utf8_lossy(&output.stderr)
- ))
+ );
+ Ok(())
}
fn update_status(&self, server_name: LanguageServerName, status: language::BinaryStatus) {
@@ -761,8 +761,7 @@ pub(super) async fn format_with_prettier(
.log_err();
Some(Err(anyhow!(
- "{} failed to spawn: {error:#}",
- prettier_description
+ "{prettier_description} failed to spawn: {error:#}"
)))
}
}
@@ -2022,7 +2022,7 @@ impl Project {
worktree.expand_all_for_entry(entry_id, cx)
});
Some(cx.spawn(async move |this, cx| {
- task.ok_or_else(|| anyhow!("no task"))?.await?;
+ task.context("no task")?.await?;
this.update(cx, |_, cx| {
cx.emit(Event::ExpandedAllForEntry(worktree_id, entry_id));
})?;
@@ -2031,9 +2031,10 @@ impl Project {
}
pub fn shared(&mut self, project_id: u64, cx: &mut Context<Self>) -> Result<()> {
- if !matches!(self.client_state, ProjectClientState::Local) {
- return Err(anyhow!("project was already shared"));
- }
+ anyhow::ensure!(
+ matches!(self.client_state, ProjectClientState::Local),
+ "project was already shared"
+ );
self.client_subscriptions.extend([
self.client
@@ -2151,9 +2152,10 @@ impl Project {
}
fn unshare_internal(&mut self, cx: &mut App) -> Result<()> {
- if self.is_via_collab() {
- return Err(anyhow!("attempted to unshare a remote project"));
- }
+ anyhow::ensure!(
+ !self.is_via_collab(),
+ "attempted to unshare a remote project"
+ );
if let ProjectClientState::Shared { remote_id, .. } = self.client_state {
self.client_state = ProjectClientState::Local;
@@ -2189,7 +2191,7 @@ impl Project {
.ok();
Ok(())
} else {
- Err(anyhow!("attempted to unshare an unshared project"))
+ anyhow::bail!("attempted to unshare an unshared project");
}
}
@@ -2431,7 +2433,7 @@ impl Project {
if let Some(buffer) = self.buffer_for_id(id, cx) {
Task::ready(Ok(buffer))
} else if self.is_local() || self.is_via_ssh() {
- Task::ready(Err(anyhow!("buffer {} does not exist", id)))
+ Task::ready(Err(anyhow!("buffer {id} does not exist")))
} else if let Some(project_id) = self.remote_id() {
let request = self.client.request(proto::OpenBufferById {
project_id,
@@ -2521,9 +2523,7 @@ impl Project {
let weak_project = cx.entity().downgrade();
cx.spawn(async move |_, cx| {
let image_item = open_image_task.await?;
- let project = weak_project
- .upgrade()
- .ok_or_else(|| anyhow!("Project dropped"))?;
+ let project = weak_project.upgrade().context("Project dropped")?;
let metadata = ImageItem::load_image_metadata(image_item.clone(), project, cx).await?;
image_item.update(cx, |image_item, cx| {
@@ -4272,7 +4272,7 @@ impl Project {
.payload
.collaborator
.take()
- .ok_or_else(|| anyhow!("empty collaborator"))?;
+ .context("empty collaborator")?;
let collaborator = Collaborator::from_proto(collaborator)?;
this.update(&mut cx, |this, cx| {
@@ -4296,16 +4296,16 @@ impl Project {
let old_peer_id = envelope
.payload
.old_peer_id
- .ok_or_else(|| anyhow!("missing old peer id"))?;
+ .context("missing old peer id")?;
let new_peer_id = envelope
.payload
.new_peer_id
- .ok_or_else(|| anyhow!("missing new peer id"))?;
+ .context("missing new peer id")?;
this.update(&mut cx, |this, cx| {
let collaborator = this
.collaborators
.remove(&old_peer_id)
- .ok_or_else(|| anyhow!("received UpdateProjectCollaborator for unknown peer"))?;
+ .context("received UpdateProjectCollaborator for unknown peer")?;
let is_host = collaborator.is_host;
this.collaborators.insert(new_peer_id, collaborator);
@@ -4336,14 +4336,11 @@ impl Project {
mut cx: AsyncApp,
) -> Result<()> {
this.update(&mut cx, |this, cx| {
- let peer_id = envelope
- .payload
- .peer_id
- .ok_or_else(|| anyhow!("invalid peer id"))?;
+ let peer_id = envelope.payload.peer_id.context("invalid peer id")?;
let replica_id = this
.collaborators
.remove(&peer_id)
- .ok_or_else(|| anyhow!("unknown peer {:?}", peer_id))?
+ .with_context(|| format!("unknown peer {peer_id:?}"))?
.replica_id;
this.buffer_store.update(cx, |buffer_store, cx| {
buffer_store.forget_shared_buffers_for(&peer_id);
@@ -4557,11 +4554,7 @@ impl Project {
) -> Result<proto::FindSearchCandidatesResponse> {
let peer_id = envelope.original_sender_id()?;
let message = envelope.payload;
- let query = SearchQuery::from_proto(
- message
- .query
- .ok_or_else(|| anyhow!("missing query field"))?,
- )?;
+ let query = SearchQuery::from_proto(message.query.context("missing query field")?)?;
let results = this.update(&mut cx, |this, cx| {
this.find_search_candidate_buffers(&query, message.limit as _, cx)
})?;
@@ -4639,13 +4632,10 @@ impl Project {
.file()
.map(|f| f.is_private())
.unwrap_or_default();
- if is_private {
- Err(anyhow!(ErrorCode::UnsharedItem))
- } else {
- Ok(proto::OpenBufferResponse {
- buffer_id: this.create_buffer_for_peer(&buffer, peer_id, cx).into(),
- })
- }
+ anyhow::ensure!(!is_private, ErrorCode::UnsharedItem);
+ Ok(proto::OpenBufferResponse {
+ buffer_id: this.create_buffer_for_peer(&buffer, peer_id, cx).into(),
+ })
})?
}
@@ -450,10 +450,7 @@ impl WorktreeStore {
})
.collect::<HashMap<_, _>>();
- let (client, project_id) = self
- .upstream_client()
- .clone()
- .ok_or_else(|| anyhow!("invalid project"))?;
+ let (client, project_id) = self.upstream_client().clone().context("invalid project")?;
for worktree in worktrees {
if let Some(old_worktree) =
@@ -916,7 +913,7 @@ impl WorktreeStore {
let worktree = this.update(&mut cx, |this, cx| {
let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
this.worktree_for_id(worktree_id, cx)
- .ok_or_else(|| anyhow!("worktree not found"))
+ .context("worktree not found")
})??;
Worktree::handle_create_entry(worktree, envelope.payload, cx).await
}
@@ -929,7 +926,7 @@ impl WorktreeStore {
let entry_id = ProjectEntryId::from_proto(envelope.payload.entry_id);
let worktree = this.update(&mut cx, |this, cx| {
this.worktree_for_entry(entry_id, cx)
- .ok_or_else(|| anyhow!("worktree not found"))
+ .context("worktree not found")
})??;
Worktree::handle_copy_entry(worktree, envelope.payload, cx).await
}
@@ -942,7 +939,7 @@ impl WorktreeStore {
let entry_id = ProjectEntryId::from_proto(envelope.payload.entry_id);
let worktree = this.update(&mut cx, |this, cx| {
this.worktree_for_entry(entry_id, cx)
- .ok_or_else(|| anyhow!("worktree not found"))
+ .context("worktree not found")
})??;
Worktree::handle_delete_entry(worktree, envelope.payload, cx).await
}
@@ -955,7 +952,7 @@ impl WorktreeStore {
let entry_id = ProjectEntryId::from_proto(envelope.payload.entry_id);
let worktree = this
.update(&mut cx, |this, cx| this.worktree_for_entry(entry_id, cx))?
- .ok_or_else(|| anyhow!("invalid request"))?;
+ .context("invalid request")?;
Worktree::handle_expand_entry(worktree, envelope.payload, cx).await
}
@@ -967,7 +964,7 @@ impl WorktreeStore {
let entry_id = ProjectEntryId::from_proto(envelope.payload.entry_id);
let worktree = this
.update(&mut cx, |this, cx| this.worktree_for_entry(entry_id, cx))?
- .ok_or_else(|| anyhow!("invalid request"))?;
+ .context("invalid request")?;
Worktree::handle_expand_all_for_entry(worktree, envelope.payload, cx).await
}
}
@@ -1,7 +1,7 @@
mod project_panel_settings;
mod utils;
-use anyhow::{Context as _, Result, anyhow};
+use anyhow::{Context as _, Result};
use client::{ErrorCode, ErrorExt};
use collections::{BTreeSet, HashMap, hash_map};
use command_palette_hooks::CommandPaletteFilter;
@@ -603,7 +603,7 @@ impl ProjectPanel {
Some(serialization_key) => cx
.background_spawn(async move { KEY_VALUE_STORE.read_kvp(&serialization_key) })
.await
- .map_err(|e| anyhow!("Failed to load project panel: {}", e))
+ .context("loading project panel")
.log_err()
.flatten()
.map(|panel| serde_json::from_str::<SerializedProjectPanel>(&panel))
@@ -2304,7 +2304,7 @@ impl ProjectPanel {
project_panel
.project
.update(cx, |project, cx| project.delete_entry(entry_id, true, cx))
- .ok_or_else(|| anyhow!("no such entry"))
+ .context("no such entry")
})??
.await?;
}
@@ -4465,34 +4465,30 @@ impl ProjectPanel {
skip_ignored: bool,
cx: &mut Context<Self>,
) -> Result<()> {
- if let Some(worktree) = project.read(cx).worktree_for_entry(entry_id, cx) {
- let worktree = worktree.read(cx);
- if skip_ignored
- && worktree
- .entry_for_id(entry_id)
- .map_or(true, |entry| entry.is_ignored && !entry.is_always_included)
- {
- return Err(anyhow!(
- "can't reveal an ignored entry in the project panel"
- ));
- }
-
- let worktree_id = worktree.id();
- self.expand_entry(worktree_id, entry_id, cx);
- self.update_visible_entries(Some((worktree_id, entry_id)), cx);
- self.marked_entries.clear();
- self.marked_entries.insert(SelectedEntry {
- worktree_id,
- entry_id,
- });
- self.autoscroll(cx);
- cx.notify();
- Ok(())
- } else {
- Err(anyhow!(
- "can't reveal a non-existent entry in the project panel"
- ))
+ let worktree = project
+ .read(cx)
+ .worktree_for_entry(entry_id, cx)
+ .context("can't reveal a non-existent entry in the project panel")?;
+ let worktree = worktree.read(cx);
+ if skip_ignored
+ && worktree
+ .entry_for_id(entry_id)
+ .map_or(true, |entry| entry.is_ignored && !entry.is_always_included)
+ {
+ anyhow::bail!("can't reveal an ignored entry in the project panel");
}
+
+ let worktree_id = worktree.id();
+ self.expand_entry(worktree_id, entry_id, cx);
+ self.update_visible_entries(Some((worktree_id, entry_id)), cx);
+ self.marked_entries.clear();
+ self.marked_entries.insert(SelectedEntry {
+ worktree_id,
+ entry_id,
+ });
+ self.autoscroll(cx);
+ cx.notify();
+ Ok(())
}
fn find_active_indent_guide(
@@ -5268,7 +5268,7 @@ impl project::ProjectItem for TestProjectItem {
_project: &Entity<Project>,
path: &ProjectPath,
cx: &mut App,
- ) -> Option<Task<gpui::Result<Entity<Self>>>> {
+ ) -> Option<Task<anyhow::Result<Entity<Self>>>> {
let path = path.clone();
Some(cx.spawn(async move |cx| cx.new(|_| Self { path })))
}
@@ -139,7 +139,7 @@ impl PickerDelegate for ProjectSymbolsDelegate {
});
});
})?;
- Ok::<_, anyhow::Error>(())
+ anyhow::Ok(())
})
.detach_and_log_err(cx);
cx.emit(DismissEvent);
@@ -1,6 +1,6 @@
mod prompts;
-use anyhow::{Result, anyhow};
+use anyhow::{Context as _, Result, anyhow};
use chrono::{DateTime, Utc};
use collections::HashMap;
use futures::FutureExt as _;
@@ -266,10 +266,7 @@ impl PromptStore {
let bodies = self.bodies;
cx.background_spawn(async move {
let txn = env.read_txn()?;
- let mut prompt = bodies
- .get(&txn, &id)?
- .ok_or_else(|| anyhow!("prompt not found"))?
- .into();
+ let mut prompt = bodies.get(&txn, &id)?.context("prompt not found")?.into();
LineEnding::normalize(&mut prompt);
Ok(prompt)
})
@@ -124,7 +124,7 @@ impl ErrorExt for anyhow::Error {
if let Some(rpc_error) = self.downcast_ref::<RpcError>() {
rpc_error.cloned()
} else {
- anyhow::anyhow!("{}", self)
+ anyhow::anyhow!("{self}")
}
}
}
@@ -1,5 +1,5 @@
use crate::{Envelope, PeerId};
-use anyhow::{Result, anyhow};
+use anyhow::{Context as _, Result};
use serde::Serialize;
use std::{
any::{Any, TypeId},
@@ -201,7 +201,7 @@ pub struct TypedEnvelope<T> {
impl<T> TypedEnvelope<T> {
pub fn original_sender_id(&self) -> Result<PeerId> {
self.original_sender_id
- .ok_or_else(|| anyhow!("missing original_sender_id"))
+ .context("missing original_sender_id")
}
}
@@ -1,7 +1,7 @@
use std::collections::BTreeSet;
use std::{path::PathBuf, sync::Arc, time::Duration};
-use anyhow::{Context as _, Result, anyhow};
+use anyhow::{Context as _, Result};
use auto_update::AutoUpdater;
use editor::Editor;
use extension_host::ExtensionStore;
@@ -484,15 +484,14 @@ impl remote::SshClientDelegate for SshClientDelegate {
cx,
)
.await
- .map_err(|e| {
- anyhow!(
- "Failed to download remote server binary (version: {}, os: {}, arch: {}): {}",
+ .with_context(|| {
+ format!(
+ "Downloading remote server binary (version: {}, os: {}, arch: {})",
version
.map(|v| format!("{}", v))
.unwrap_or("unknown".to_string()),
platform.os,
platform.arch,
- e
)
})?;
Ok(binary_path)
@@ -100,7 +100,7 @@ macro_rules! shell_script {
fn parse_port_number(port_str: &str) -> Result<u16> {
port_str
.parse()
- .map_err(|e| anyhow!("Invalid port number: {}: {}", port_str, e))
+ .with_context(|| format!("parsing port number: {port_str}"))
}
fn parse_port_forward_spec(spec: &str) -> Result<SshPortForwardOption> {
@@ -151,9 +151,7 @@ impl SshConnectionOptions {
"-w",
];
- let mut tokens = shlex::split(input)
- .ok_or_else(|| anyhow!("invalid input"))?
- .into_iter();
+ let mut tokens = shlex::split(input).context("invalid input")?.into_iter();
'outer: while let Some(arg) = tokens.next() {
if ALLOWED_OPTS.contains(&(&arg as &str)) {
@@ -369,14 +367,12 @@ impl SshSocket {
async fn run_command(&self, program: &str, args: &[&str]) -> Result<String> {
let output = self.ssh_command(program, args).output().await?;
- if output.status.success() {
- Ok(String::from_utf8_lossy(&output.stdout).to_string())
- } else {
- Err(anyhow!(
- "failed to run command: {}",
- String::from_utf8_lossy(&output.stderr)
- ))
- }
+ anyhow::ensure!(
+ output.status.success(),
+ "failed to run command: {}",
+ String::from_utf8_lossy(&output.stderr)
+ );
+ Ok(String::from_utf8_lossy(&output.stdout).to_string())
}
fn ssh_options<'a>(&self, command: &'a mut process::Command) -> &'a mut process::Command {
@@ -727,13 +723,13 @@ impl SshRemoteClient {
.map(|state| state.can_reconnect())
.unwrap_or(false);
if !can_reconnect {
+ log::info!("aborting reconnect, because not in state that allows reconnecting");
let error = if let Some(state) = lock.as_ref() {
format!("invalid state, cannot reconnect while in state {state}")
} else {
"no state set".to_string()
};
- log::info!("aborting reconnect, because not in state that allows reconnecting");
- return Err(anyhow!(error));
+ anyhow::bail!(error);
}
let state = lock.take().unwrap();
@@ -1363,14 +1359,13 @@ impl RemoteConnection for SshRemoteConnection {
cx.background_spawn(async move {
let output = output.await?;
- if !output.status.success() {
- return Err(anyhow!(
- "failed to upload directory {} -> {}: {}",
- src_path.display(),
- dest_path.display(),
- String::from_utf8_lossy(&output.stderr)
- ));
- }
+ anyhow::ensure!(
+ output.status.success(),
+ "failed to upload directory {} -> {}: {}",
+ src_path.display(),
+ dest_path.display(),
+ String::from_utf8_lossy(&output.stderr)
+ );
Ok(())
})
@@ -1446,7 +1441,7 @@ impl SshRemoteConnection {
_delegate: Arc<dyn SshClientDelegate>,
_cx: &mut AsyncApp,
) -> Result<Self> {
- Err(anyhow!("ssh is not supported on this platform"))
+ anyhow::bail!("ssh is not supported on this platform");
}
#[cfg(unix)]
@@ -1506,10 +1501,10 @@ impl SshRemoteConnection {
match result {
AskPassResult::CancelledByUser => {
master_process.kill().ok();
- Err(anyhow!("SSH connection canceled"))?
+ anyhow::bail!("SSH connection canceled")
}
AskPassResult::Timedout => {
- Err(anyhow!("connecting to host timed out"))?
+ anyhow::bail!("connecting to host timed out")
}
}
}
@@ -1531,7 +1526,7 @@ impl SshRemoteConnection {
"failed to connect: {}",
String::from_utf8_lossy(&output).trim()
);
- Err(anyhow!(error_message))?;
+ anyhow::bail!(error_message);
}
drop(askpass);
@@ -1566,15 +1561,15 @@ impl SshRemoteConnection {
async fn platform(&self) -> Result<SshPlatform> {
let uname = self.socket.run_command("sh", &["-c", "uname -sm"]).await?;
let Some((os, arch)) = uname.split_once(" ") else {
- Err(anyhow!("unknown uname: {uname:?}"))?
+ anyhow::bail!("unknown uname: {uname:?}")
};
let os = match os.trim() {
"Darwin" => "macos",
"Linux" => "linux",
- _ => Err(anyhow!(
+ _ => anyhow::bail!(
"Prebuilt remote servers are not yet available for {os:?}. See https://zed.dev/docs/remote-development"
- ))?,
+ ),
};
// exclude armv5,6,7 as they are 32-bit.
let arch = if arch.starts_with("armv8")
@@ -1586,9 +1581,9 @@ impl SshRemoteConnection {
} else if arch.starts_with("x86") {
"x86_64"
} else {
- Err(anyhow!(
+ anyhow::bail!(
"Prebuilt remote servers are not yet available for {arch:?}. See https://zed.dev/docs/remote-development"
- ))?
+ )
};
Ok(SshPlatform { os, arch })
@@ -1940,16 +1935,14 @@ impl SshRemoteConnection {
.output()
.await?;
- if output.status.success() {
- Ok(())
- } else {
- Err(anyhow!(
- "failed to upload file {} -> {}: {}",
- src_path.display(),
- dest_path.display(),
- String::from_utf8_lossy(&output.stderr)
- ))
- }
+ anyhow::ensure!(
+ output.status.success(),
+ "failed to upload file {} -> {}: {}",
+ src_path.display(),
+ dest_path.display(),
+ String::from_utf8_lossy(&output.stderr)
+ );
+ Ok(())
}
#[cfg(debug_assertions)]
@@ -1967,9 +1960,10 @@ impl SshRemoteConnection {
.stderr(Stdio::inherit())
.output()
.await?;
- if !output.status.success() {
- Err(anyhow!("Failed to run command: {:?}", command))?;
- }
+ anyhow::ensure!(
+ output.status.success(),
+ "Failed to run command: {command:?}"
+ );
Ok(())
}
@@ -2242,8 +2236,7 @@ impl ChannelClient {
async move {
let response = response.await?;
log::debug!("ssh request finish. name:{}", T::NAME);
- T::Response::from_envelope(response)
- .ok_or_else(|| anyhow!("received a response of the wrong type"))
+ T::Response::from_envelope(response).context("received a response of the wrong type")
}
}
@@ -2263,7 +2256,7 @@ impl ChannelClient {
},
async {
smol::Timer::after(timeout).await;
- Err(anyhow!("Timeout detected"))
+ anyhow::bail!("Timeout detected")
},
)
.await
@@ -2277,7 +2270,7 @@ impl ChannelClient {
},
async {
smol::Timer::after(timeout).await;
- Err(anyhow!("Timeout detected"))
+ anyhow::bail!("Timeout detected")
},
)
.await
@@ -2307,8 +2300,8 @@ impl ChannelClient {
};
async move {
if let Err(error) = &result {
- log::error!("failed to send message: {}", error);
- return Err(anyhow!("failed to send message: {}", error));
+ log::error!("failed to send message: {error}");
+ anyhow::bail!("failed to send message: {error}");
}
let response = rx.await.context("connection lost")?.0;
@@ -1,5 +1,5 @@
use ::proto::{FromProto, ToProto};
-use anyhow::{Result, anyhow};
+use anyhow::{Context as _, Result, anyhow};
use extension::ExtensionHostProxy;
use extension_host::headless_host::HeadlessExtensionStore;
@@ -368,7 +368,7 @@ impl HeadlessProject {
let mut parent = path
.parent()
.ok_or(e)
- .map_err(|_| anyhow!("{:?} does not exist", path))?;
+ .with_context(|| format!("{path:?} does not exist"))?;
if parent == Path::new("") {
parent = util::paths::home_dir();
}
@@ -558,11 +558,7 @@ impl HeadlessProject {
mut cx: AsyncApp,
) -> Result<proto::FindSearchCandidatesResponse> {
let message = envelope.payload;
- let query = SearchQuery::from_proto(
- message
- .query
- .ok_or_else(|| anyhow!("missing query field"))?,
- )?;
+ let query = SearchQuery::from_proto(message.query.context("missing query field")?)?;
let results = this.update(&mut cx, |this, cx| {
this.buffer_store.update(cx, |buffer_store, cx| {
buffer_store.find_search_candidates(&query, message.limit as _, this.fs.clone(), cx)
@@ -333,7 +333,7 @@ fn start_server(
break;
};
if let Err(error) = incoming_tx.unbounded_send(message) {
- log::error!("failed to send message to application: {:?}. exiting.", error);
+ log::error!("failed to send message to application: {error:?}. exiting.");
return Err(anyhow!(error));
}
}
@@ -390,8 +390,7 @@ fn init_paths() -> anyhow::Result<()> {
]
.iter()
{
- std::fs::create_dir_all(path)
- .map_err(|e| anyhow!("Could not create directory {:?}: {}", path, e))?;
+ std::fs::create_dir_all(path).with_context(|| format!("creating directory {path:?}"))?;
}
Ok(())
}
@@ -542,7 +541,7 @@ pub fn execute_proxy(identifier: String, is_reconnecting: bool) -> Result<()> {
if is_reconnecting {
if !server_running {
log::error!("attempted to reconnect, but no server running");
- return Err(anyhow!(ProxyLaunchError::ServerNotRunning));
+ anyhow::bail!(ProxyLaunchError::ServerNotRunning);
}
} else {
if let Some(pid) = server_pid {
@@ -573,19 +572,20 @@ pub fn execute_proxy(identifier: String, is_reconnecting: bool) -> Result<()> {
let mut stream = smol::net::unix::UnixStream::connect(&server_paths.stderr_socket).await?;
let mut stderr_buffer = vec![0; 2048];
loop {
- match stream.read(&mut stderr_buffer).await {
- Ok(0) => {
+ match stream
+ .read(&mut stderr_buffer)
+ .await
+ .context("reading stderr")?
+ {
+ 0 => {
let error =
std::io::Error::new(std::io::ErrorKind::UnexpectedEof, "stderr closed");
Err(anyhow!(error))?;
}
- Ok(n) => {
+ n => {
stderr.write_all(&mut stderr_buffer[..n]).await?;
stderr.flush().await?;
}
- Err(error) => {
- Err(anyhow!("error reading stderr: {error:?}"))?;
- }
}
}
});
@@ -868,7 +868,7 @@ fn read_proxy_settings(cx: &mut Context<HeadlessProject>) -> Option<Url> {
}
fn daemonize() -> Result<ControlFlow<()>> {
- match fork::fork().map_err(|e| anyhow::anyhow!("failed to call fork with error code {}", e))? {
+ match fork::fork().map_err(|e| anyhow!("failed to call fork with error code {e}"))? {
fork::Fork::Parent(_) => {
return Ok(ControlFlow::Break(()));
}
@@ -351,14 +351,7 @@ impl RunningKernel for NativeRunningKernel {
fn force_shutdown(&mut self, _window: &mut Window, _cx: &mut App) -> Task<anyhow::Result<()>> {
self._process_status_task.take();
self.request_tx.close_channel();
-
- Task::ready(match self.process.kill() {
- Ok(_) => Ok(()),
- Err(error) => Err(anyhow::anyhow!(
- "Failed to kill the kernel process: {}",
- error
- )),
- })
+ Task::ready(self.process.kill().context("killing the kernel process"))
}
}
@@ -54,7 +54,7 @@ pub async fn launch_remote_kernel(
if !response.status().is_success() {
let mut body = String::new();
response.into_body().read_to_string(&mut body).await?;
- return Err(anyhow::anyhow!("Failed to launch kernel: {}", body));
+ anyhow::bail!("Failed to launch kernel: {body}");
}
let mut body = String::new();
@@ -79,36 +79,31 @@ pub async fn list_remote_kernelspecs(
let response = http_client.send(request).await?;
- if response.status().is_success() {
- let mut body = response.into_body();
-
- let mut body_bytes = Vec::new();
- body.read_to_end(&mut body_bytes).await?;
-
- let kernel_specs: KernelSpecsResponse = serde_json::from_slice(&body_bytes)?;
-
- let remote_kernelspecs = kernel_specs
- .kernelspecs
- .into_iter()
- .map(|(name, spec)| RemoteKernelSpecification {
- name: name.clone(),
- url: remote_server.base_url.clone(),
- token: remote_server.token.clone(),
- kernelspec: spec.spec,
- })
- .collect::<Vec<RemoteKernelSpecification>>();
-
- if remote_kernelspecs.is_empty() {
- Err(anyhow::anyhow!("No kernel specs found"))
- } else {
- Ok(remote_kernelspecs.clone())
- }
- } else {
- Err(anyhow::anyhow!(
- "Failed to fetch kernel specs: {}",
- response.status()
- ))
- }
+ anyhow::ensure!(
+ response.status().is_success(),
+ "Failed to fetch kernel specs: {}",
+ response.status()
+ );
+ let mut body = response.into_body();
+
+ let mut body_bytes = Vec::new();
+ body.read_to_end(&mut body_bytes).await?;
+
+ let kernel_specs: KernelSpecsResponse = serde_json::from_slice(&body_bytes)?;
+
+ let remote_kernelspecs = kernel_specs
+ .kernelspecs
+ .into_iter()
+ .map(|(name, spec)| RemoteKernelSpecification {
+ name: name.clone(),
+ url: remote_server.base_url.clone(),
+ token: remote_server.token.clone(),
+ kernelspec: spec.spec,
+ })
+ .collect::<Vec<RemoteKernelSpecification>>();
+
+ anyhow::ensure!(!remote_kernelspecs.is_empty(), "No kernel specs found");
+ Ok(remote_kernelspecs.clone())
}
impl PartialEq for RemoteKernelSpecification {
@@ -288,14 +283,12 @@ impl RunningKernel for RemoteRunningKernel {
let response = http_client.send(request).await?;
- if response.status().is_success() {
- Ok(())
- } else {
- Err(anyhow::anyhow!(
- "Failed to shutdown kernel: {}",
- response.status()
- ))
- }
+ anyhow::ensure!(
+ response.status().is_success(),
+ "Failed to shutdown kernel: {}",
+ response.status()
+ );
+ Ok(())
})
}
}
@@ -565,7 +565,7 @@ impl project::ProjectItem for NotebookItem {
project: &Entity<Project>,
path: &ProjectPath,
cx: &mut App,
- ) -> Option<Task<gpui::Result<Entity<Self>>>> {
+ ) -> Option<Task<anyhow::Result<Entity<Self>>>> {
let path = path.clone();
let project = project.clone();
let fs = project.read(cx).fs().clone();
@@ -575,7 +575,7 @@ impl project::ProjectItem for NotebookItem {
Some(cx.spawn(async move |cx| {
let abs_path = project
.read_with(cx, |project, cx| project.absolute_path(&path, cx))?
- .ok_or_else(|| anyhow::anyhow!("Failed to find the absolute path"))?;
+ .with_context(|| format!("finding the absolute path of {path:?}"))?;
// todo: watch for changes to the file
let file_content = fs.load(&abs_path.as_path()).await?;
@@ -51,8 +51,8 @@ impl ImageView {
image::ImageFormat::WebP => ImageFormat::Webp,
image::ImageFormat::Tiff => ImageFormat::Tiff,
image::ImageFormat::Bmp => ImageFormat::Bmp,
- _ => {
- return Err(anyhow::anyhow!("unsupported image format"));
+ format => {
+ anyhow::bail!("unsupported image format {format:?}");
}
};
@@ -107,7 +107,7 @@ pub fn run(
let kernel_specification = store
.read(cx)
.active_kernelspec(project_path.worktree_id, Some(language.clone()), cx)
- .ok_or_else(|| anyhow::anyhow!("No kernel found for language: {}", language.name()))?;
+ .with_context(|| format!("No kernel found for language: {}", language.name()))?;
let fs = store.read(cx).fs().clone();
@@ -1,6 +1,6 @@
use std::sync::Arc;
-use anyhow::Result;
+use anyhow::{Context as _, Result};
use collections::HashMap;
use command_palette_hooks::CommandPaletteFilter;
use gpui::{App, Context, Entity, EntityId, Global, Subscription, Task, prelude::*};
@@ -125,7 +125,7 @@ impl ReplStore {
cx.spawn(async move |this, cx| {
let kernel_specifications = kernel_specifications
.await
- .map_err(|e| anyhow::anyhow!("Failed to get python kernelspecs: {:?}", e))?;
+ .context("getting python kernelspecs")?;
this.update(cx, |this, cx| {
this.kernel_specifications_for_worktree
@@ -6,6 +6,7 @@ use crate::{
kernels::{Kernel, KernelSpecification, NativeRunningKernel},
outputs::{ExecutionStatus, ExecutionView},
};
+use anyhow::Context as _;
use collections::{HashMap, HashSet};
use editor::{
Anchor, AnchorRangeExt as _, Editor, MultiBuffer, ToPoint,
@@ -57,13 +58,8 @@ impl EditorBlock {
on_close: CloseBlockFn,
cx: &mut Context<Session>,
) -> anyhow::Result<Self> {
- let editor = editor
- .upgrade()
- .ok_or_else(|| anyhow::anyhow!("editor is not open"))?;
- let workspace = editor
- .read(cx)
- .workspace()
- .ok_or_else(|| anyhow::anyhow!("workspace dropped"))?;
+ let editor = editor.upgrade().context("editor is not open")?;
+ let workspace = editor.read(cx).workspace().context("workspace dropped")?;
let execution_view = cx.new(|cx| ExecutionView::new(status, workspace.downgrade(), cx));
@@ -220,7 +220,7 @@ impl http_client::HttpClient for ReqwestClient {
req: http::Request<http_client::AsyncBody>,
) -> futures::future::BoxFuture<
'static,
- Result<http_client::Response<http_client::AsyncBody>, anyhow::Error>,
+ anyhow::Result<http_client::Response<http_client::AsyncBody>>,
> {
let (parts, body) = req.into_parts();
@@ -4,12 +4,8 @@ use futures::{SinkExt as _, StreamExt as _};
pub struct Connection {
pub(crate) tx:
Box<dyn 'static + Send + Unpin + futures::Sink<WebSocketMessage, Error = anyhow::Error>>,
- pub(crate) rx: Box<
- dyn 'static
- + Send
- + Unpin
- + futures::Stream<Item = Result<WebSocketMessage, anyhow::Error>>,
- >,
+ pub(crate) rx:
+ Box<dyn 'static + Send + Unpin + futures::Stream<Item = anyhow::Result<WebSocketMessage>>>,
}
impl Connection {
@@ -19,7 +15,7 @@ impl Connection {
+ Send
+ Unpin
+ futures::Sink<WebSocketMessage, Error = anyhow::Error>
- + futures::Stream<Item = Result<WebSocketMessage, anyhow::Error>>,
+ + futures::Stream<Item = anyhow::Result<WebSocketMessage>>,
{
let (tx, rx) = stream.split();
Self {
@@ -28,7 +24,7 @@ impl Connection {
}
}
- pub async fn send(&mut self, message: WebSocketMessage) -> Result<(), anyhow::Error> {
+ pub async fn send(&mut self, message: WebSocketMessage) -> anyhow::Result<()> {
self.tx.send(message).await
}
@@ -56,7 +52,7 @@ impl Connection {
executor: gpui::BackgroundExecutor,
) -> (
Box<dyn Send + Unpin + futures::Sink<WebSocketMessage, Error = anyhow::Error>>,
- Box<dyn Send + Unpin + futures::Stream<Item = Result<WebSocketMessage, anyhow::Error>>>,
+ Box<dyn Send + Unpin + futures::Stream<Item = anyhow::Result<WebSocketMessage>>>,
) {
use anyhow::anyhow;
use futures::channel::mpsc;
@@ -2,7 +2,6 @@
pub use ::proto::*;
-use anyhow::anyhow;
use async_tungstenite::tungstenite::Message as WebSocketMessage;
use futures::{SinkExt as _, StreamExt as _};
use proto::Message as _;
@@ -40,7 +39,7 @@ impl<S> MessageStream<S>
where
S: futures::Sink<WebSocketMessage, Error = anyhow::Error> + Unpin,
{
- pub async fn write(&mut self, message: Message) -> Result<(), anyhow::Error> {
+ pub async fn write(&mut self, message: Message) -> anyhow::Result<()> {
#[cfg(any(test, feature = "test-support"))]
const COMPRESSION_LEVEL: i32 = -7;
@@ -81,9 +80,9 @@ where
impl<S> MessageStream<S>
where
- S: futures::Stream<Item = Result<WebSocketMessage, anyhow::Error>> + Unpin,
+ S: futures::Stream<Item = anyhow::Result<WebSocketMessage>> + Unpin,
{
- pub async fn read(&mut self) -> Result<(Message, Instant), anyhow::Error> {
+ pub async fn read(&mut self) -> anyhow::Result<(Message, Instant)> {
while let Some(bytes) = self.stream.next().await {
let received_at = Instant::now();
match bytes? {
@@ -102,7 +101,7 @@ where
_ => {}
}
}
- Err(anyhow!("connection closed"))
+ anyhow::bail!("connection closed");
}
}
@@ -113,7 +112,7 @@ mod tests {
#[gpui::test]
async fn test_buffer_size() {
let (tx, rx) = futures::channel::mpsc::unbounded();
- let mut sink = MessageStream::new(tx.sink_map_err(|_| anyhow!("")));
+ let mut sink = MessageStream::new(tx.sink_map_err(|_| anyhow::anyhow!("")));
sink.write(Message::Envelope(Envelope {
payload: Some(envelope::Payload::UpdateWorktree(UpdateWorktree {
root_name: "abcdefg".repeat(10),
@@ -197,7 +197,7 @@ impl Peer {
}
_ = create_timer(WRITE_TIMEOUT).fuse() => {
tracing::trace!(%connection_id, "outgoing rpc message: writing timed out");
- Err(anyhow!("timed out writing message"))?;
+ anyhow::bail!("timed out writing message");
}
}
}
@@ -217,7 +217,7 @@ impl Peer {
}
_ = create_timer(WRITE_TIMEOUT).fuse() => {
tracing::trace!(%connection_id, "keepalive interval: pinging timed out");
- Err(anyhow!("timed out sending keepalive"))?;
+ anyhow::bail!("timed out sending keepalive");
}
}
}
@@ -240,7 +240,7 @@ impl Peer {
},
_ = create_timer(WRITE_TIMEOUT).fuse() => {
tracing::trace!(%connection_id, "incoming rpc message: processing timed out");
- Err(anyhow!("timed out processing incoming message"))?
+ anyhow::bail!("timed out processing incoming message");
}
}
}
@@ -248,7 +248,7 @@ impl Peer {
},
_ = receive_timeout => {
tracing::trace!(%connection_id, "receive timeout: delay between messages too long");
- Err(anyhow!("delay between messages too long"))?
+ anyhow::bail!("delay between messages too long");
}
}
}
@@ -441,7 +441,7 @@ impl Peer {
sender_id: receiver_id.into(),
original_sender_id: response.original_sender_id,
payload: T::Response::from_envelope(response)
- .ok_or_else(|| anyhow!("received response of the wrong type"))?,
+ .context("received response of the wrong type")?,
received_at,
})
}
@@ -465,18 +465,17 @@ impl Peer {
.response_channels
.lock()
.as_mut()
- .ok_or_else(|| anyhow!("connection was closed"))?
+ .context("connection was closed")?
.insert(envelope.id, tx);
connection
.outgoing_tx
.unbounded_send(Message::Envelope(envelope))
- .map_err(|_| anyhow!("connection was closed"))?;
+ .context("connection was closed")?;
Ok(())
});
async move {
send?;
- let (response, received_at, _barrier) =
- rx.await.map_err(|_| anyhow!("connection was closed"))?;
+ let (response, received_at, _barrier) = rx.await.context("connection was closed")?;
if let Some(proto::envelope::Payload::Error(error)) = &response.payload {
return Err(RpcError::from_proto(error, type_name));
}
@@ -496,14 +495,14 @@ impl Peer {
stream_response_channels
.lock()
.as_mut()
- .ok_or_else(|| anyhow!("connection was closed"))?
+ .context("connection was closed")?
.insert(message_id, tx);
connection
.outgoing_tx
.unbounded_send(Message::Envelope(
request.into_envelope(message_id, None, None),
))
- .map_err(|_| anyhow!("connection was closed"))?;
+ .context("connection was closed")?;
Ok((message_id, stream_response_channels))
});
@@ -530,7 +529,7 @@ impl Peer {
} else {
Some(
T::Response::from_envelope(response)
- .ok_or_else(|| anyhow!("received response of the wrong type")),
+ .context("received response of the wrong type"),
)
}
}
@@ -662,7 +661,7 @@ impl Peer {
let connections = self.connections.read();
let connection = connections
.get(&connection_id)
- .ok_or_else(|| anyhow!("no such connection: {}", connection_id))?;
+ .with_context(|| format!("no such connection: {connection_id}"))?;
Ok(connection.clone())
}
}
@@ -1,4 +1,4 @@
-use anyhow::anyhow;
+use anyhow::Context;
use collections::HashMap;
use futures::{
Future, FutureExt as _,
@@ -190,7 +190,7 @@ impl AnyProtoClient {
let response = self.0.request(envelope, T::NAME);
async move {
T::Response::from_envelope(response.await?)
- .ok_or_else(|| anyhow!("received response of the wrong type"))
+ .context("received response of the wrong type")
}
}
@@ -3,7 +3,7 @@ use crate::{
embedding::{Embedding, EmbeddingProvider, TextToEmbed},
indexing::{IndexingEntryHandle, IndexingEntrySet},
};
-use anyhow::{Context as _, Result, anyhow};
+use anyhow::{Context as _, Result};
use collections::Bound;
use feature_flags::FeatureFlagAppExt;
use fs::Fs;
@@ -422,7 +422,7 @@ impl EmbeddingIndex {
.context("failed to create read transaction")?;
Ok(db
.get(&tx, &db_key_for_path(&path))?
- .ok_or_else(|| anyhow!("no such path"))?
+ .context("no such path")?
.chunks
.clone())
})
@@ -282,11 +282,10 @@ impl ProjectIndex {
.collect();
let query_embeddings = embedding_provider.embed(&queries[..]).await?;
- if query_embeddings.len() != queries.len() {
- return Err(anyhow!(
- "The number of query embeddings does not match the number of queries"
- ));
- }
+ anyhow::ensure!(
+ query_embeddings.len() == queries.len(),
+ "The number of query embeddings does not match the number of queries"
+ );
let mut results_by_worker = Vec::new();
for _ in 0..cx.background_executor().num_cpus() {
@@ -264,7 +264,6 @@ impl Drop for SemanticDb {
#[cfg(test)]
mod tests {
use super::*;
- use anyhow::anyhow;
use chunking::Chunk;
use embedding_index::{ChunkedFile, EmbeddingIndex};
use feature_flags::FeatureFlagAppExt;
@@ -446,15 +445,15 @@ mod tests {
cx.executor().allow_parking();
let provider = Arc::new(TestEmbeddingProvider::new(3, |text| {
- if text.contains('g') {
- Err(anyhow!("cannot embed text containing a 'g' character"))
- } else {
- Ok(Embedding::new(
- ('a'..='z')
- .map(|char| text.chars().filter(|c| *c == char).count() as f32)
- .collect(),
- ))
- }
+ anyhow::ensure!(
+ !text.contains('g'),
+ "cannot embed text containing a 'g' character"
+ );
+ Ok(Embedding::new(
+ ('a'..='z')
+ .map(|char| text.chars().filter(|c| *c == char).count() as f32)
+ .collect(),
+ ))
}));
let (indexing_progress_tx, _) = channel::unbounded();
@@ -543,7 +543,7 @@ impl SummaryIndex {
.find(|model| &model.id() == &summary_model_id)
else {
return cx.background_spawn(async move {
- Err(anyhow!("Couldn't find the preferred summarization model ({:?}) in the language registry's available models", summary_model_id))
+ anyhow::bail!("Couldn't find the preferred summarization model ({summary_model_id:?}) in the language registry's available models")
});
};
let utf8_path = path.to_string_lossy();
@@ -7,7 +7,7 @@ use std::{
str::FromStr,
};
-use anyhow::{Result, anyhow};
+use anyhow::{Context as _, Result};
use serde::{Deserialize, Serialize, de::Error};
/// A [semantic version](https://semver.org/) number.
@@ -54,15 +54,15 @@ impl FromStr for SemanticVersion {
let mut components = s.trim().split('.');
let major = components
.next()
- .ok_or_else(|| anyhow!("missing major version number"))?
+ .context("missing major version number")?
.parse()?;
let minor = components
.next()
- .ok_or_else(|| anyhow!("missing minor version number"))?
+ .context("missing minor version number")?
.parse()?;
let patch = components
.next()
- .ok_or_else(|| anyhow!("missing patch version number"))?
+ .context("missing patch version number")?
.parse()?;
Ok(Self {
major,
@@ -1,4 +1,4 @@
-use anyhow::{Result, anyhow};
+use anyhow::Result;
use collections::{BTreeMap, HashMap, IndexMap};
use fs::Fs;
use gpui::{
@@ -154,12 +154,12 @@ impl KeymapFile {
pub fn load_asset(asset_path: &str, cx: &App) -> anyhow::Result<Vec<KeyBinding>> {
match Self::load(asset_str::<SettingsAssets>(asset_path).as_ref(), cx) {
KeymapFileLoadResult::Success { key_bindings } => Ok(key_bindings),
- KeymapFileLoadResult::SomeFailedToLoad { error_message, .. } => Err(anyhow!(
- "Error loading built-in keymap \"{asset_path}\": {error_message}",
- )),
- KeymapFileLoadResult::JsonParseFailure { error } => Err(anyhow!(
- "JSON parse error in built-in keymap \"{asset_path}\": {error}"
- )),
+ KeymapFileLoadResult::SomeFailedToLoad { error_message, .. } => {
+ anyhow::bail!("Error loading built-in keymap \"{asset_path}\": {error_message}",)
+ }
+ KeymapFileLoadResult::JsonParseFailure { error } => {
+ anyhow::bail!("JSON parse error in built-in keymap \"{asset_path}\": {error}")
+ }
}
}
@@ -173,14 +173,14 @@ impl KeymapFile {
key_bindings,
error_message,
..
- } if key_bindings.is_empty() => Err(anyhow!(
- "Error loading built-in keymap \"{asset_path}\": {error_message}",
- )),
+ } if key_bindings.is_empty() => {
+ anyhow::bail!("Error loading built-in keymap \"{asset_path}\": {error_message}",)
+ }
KeymapFileLoadResult::Success { key_bindings, .. }
| KeymapFileLoadResult::SomeFailedToLoad { key_bindings, .. } => Ok(key_bindings),
- KeymapFileLoadResult::JsonParseFailure { error } => Err(anyhow!(
- "JSON parse error in built-in keymap \"{asset_path}\": {error}"
- )),
+ KeymapFileLoadResult::JsonParseFailure { error } => {
+ anyhow::bail!("JSON parse error in built-in keymap \"{asset_path}\": {error}")
+ }
}
}
@@ -1,4 +1,4 @@
-use anyhow::{Context as _, Result, anyhow};
+use anyhow::{Context as _, Result};
use collections::{BTreeMap, HashMap, btree_map, hash_map};
use ec4rs::{ConfigParser, PropertiesSource, Section};
use fs::Fs;
@@ -635,13 +635,10 @@ impl SettingsStore {
cx: &mut App,
) -> Result<()> {
let settings: Value = parse_json_with_comments(default_settings_content)?;
- if settings.is_object() {
- self.raw_default_settings = settings;
- self.recompute_values(None, cx)?;
- Ok(())
- } else {
- Err(anyhow!("settings must be an object"))
- }
+ anyhow::ensure!(settings.is_object(), "settings must be an object");
+ self.raw_default_settings = settings;
+ self.recompute_values(None, cx)?;
+ Ok(())
}
/// Sets the user settings via a JSON string.
@@ -1,4 +1,4 @@
-use anyhow::{Context as _, Result, anyhow};
+use anyhow::{Context as _, Result};
use smallvec::SmallVec;
use std::{collections::BTreeMap, ops::Range};
@@ -114,7 +114,7 @@ fn parse_tabstop<'a>(
if source.starts_with('}') {
source = &source[1..];
} else {
- return Err(anyhow!("expected a closing brace"));
+ anyhow::bail!("expected a closing brace");
}
} else {
let (index, rest) = parse_int(source)?;
@@ -137,9 +137,7 @@ fn parse_int(source: &str) -> Result<(usize, &str)> {
let len = source
.find(|c: char| !c.is_ascii_digit())
.unwrap_or(source.len());
- if len == 0 {
- return Err(anyhow!("expected an integer"));
- }
+ anyhow::ensure!(len > 0, "expected an integer");
let (prefix, suffix) = source.split_at(len);
Ok((prefix.parse()?, suffix))
}
@@ -180,11 +178,10 @@ fn parse_choices<'a>(
Some(_) => {
let chunk_end = source.find([',', '|', '\\']);
- if chunk_end.is_none() {
- return Err(anyhow!(
- "Placeholder choice doesn't contain closing pipe-character '|'"
- ));
- }
+ anyhow::ensure!(
+ chunk_end.is_some(),
+ "Placeholder choice doesn't contain closing pipe-character '|'"
+ );
let (chunk, rest) = source.split_at(chunk_end.unwrap());
@@ -6,7 +6,7 @@ use std::{
ptr,
};
-use anyhow::{Result, anyhow};
+use anyhow::Result;
use libsqlite3_sys::*;
pub struct Connection {
@@ -199,11 +199,7 @@ impl Connection {
)
};
- Err(anyhow!(
- "Sqlite call failed with code {} and message: {:?}",
- code as isize,
- message
- ))
+ anyhow::bail!("Sqlite call failed with code {code} and message: {message:?}")
}
}
@@ -6,7 +6,7 @@
use std::ffi::CString;
-use anyhow::{Context as _, Result, anyhow};
+use anyhow::{Context as _, Result};
use indoc::{formatdoc, indoc};
use libsqlite3_sys::sqlite3_exec;
@@ -69,14 +69,14 @@ impl Connection {
// Migration already run. Continue
continue;
} else {
- return Err(anyhow!(formatdoc! {"
- Migration changed for {} at step {}
+ anyhow::bail!(formatdoc! {"
+ Migration changed for {domain} at step {index}
Stored migration:
- {}
+ {completed_migration}
Proposed migration:
- {}", domain, index, completed_migration, migration}));
+ {migration}"});
}
}
@@ -78,7 +78,7 @@ mod tests {
assert!(
connection
- .with_savepoint("second", || -> Result<Option<()>, anyhow::Error> {
+ .with_savepoint("second", || -> anyhow::Result<Option<()>> {
connection.exec_bound("INSERT INTO text(text, idx) VALUES (?, ?)")?((
save2_text, 2,
))?;
@@ -2,7 +2,7 @@ use std::ffi::{CStr, CString, c_int};
use std::marker::PhantomData;
use std::{ptr, slice, str};
-use anyhow::{Context, Result, anyhow, bail};
+use anyhow::{Context as _, Result, bail};
use libsqlite3_sys::*;
use crate::bindable::{Bind, Column};
@@ -126,7 +126,7 @@ impl<'a> Statement<'a> {
if any_succeed {
Ok(())
} else {
- Err(anyhow!("Failed to bind parameters"))
+ anyhow::bail!("Failed to bind parameters")
}
}
@@ -261,7 +261,7 @@ impl<'a> Statement<'a> {
SQLITE_TEXT => Ok(SqlType::Text),
SQLITE_BLOB => Ok(SqlType::Blob),
SQLITE_NULL => Ok(SqlType::Null),
- _ => Err(anyhow!("Column type returned was incorrect ")),
+ _ => anyhow::bail!("Column type returned was incorrect"),
}
}
@@ -282,7 +282,7 @@ impl<'a> Statement<'a> {
self.step()
}
}
- SQLITE_MISUSE => Err(anyhow!("Statement step returned SQLITE_MISUSE")),
+ SQLITE_MISUSE => anyhow::bail!("Statement step returned SQLITE_MISUSE"),
_other_error => {
self.connection.last_error()?;
unreachable!("Step returned error code and last error failed to catch it");
@@ -328,16 +328,16 @@ impl<'a> Statement<'a> {
callback: impl FnOnce(&mut Statement) -> Result<R>,
) -> Result<R> {
println!("{:?}", std::any::type_name::<R>());
- if this.step()? != StepResult::Row {
- return Err(anyhow!("single called with query that returns no rows."));
- }
+ anyhow::ensure!(
+ this.step()? == StepResult::Row,
+ "single called with query that returns no rows."
+ );
let result = callback(this)?;
- if this.step()? != StepResult::Done {
- return Err(anyhow!(
- "single called with a query that returns more than one row."
- ));
- }
+ anyhow::ensure!(
+ this.step()? == StepResult::Done,
+ "single called with a query that returns more than one row."
+ );
Ok(result)
}
@@ -366,11 +366,10 @@ impl<'a> Statement<'a> {
.map(|r| Some(r))
.context("Failed to parse row result")?;
- if this.step().context("Second step call")? != StepResult::Done {
- return Err(anyhow!(
- "maybe called with a query that returns more than one row."
- ));
- }
+ anyhow::ensure!(
+ this.step().context("Second step call")? == StepResult::Done,
+ "maybe called with a query that returns more than one row."
+ );
Ok(result)
}
@@ -1,6 +1,6 @@
use std::borrow::Cow;
-use anyhow::{Result, anyhow};
+use anyhow::{Context as _, Result};
use gpui::{AssetSource, SharedString};
use rust_embed::RustEmbed;
@@ -19,7 +19,7 @@ impl AssetSource for Assets {
fn load(&self, path: &str) -> Result<Option<Cow<'static, [u8]>>> {
Self::get(path)
.map(|f| f.data)
- .ok_or_else(|| anyhow!("could not find asset at path \"{}\"", path))
+ .with_context(|| format!("could not find asset at path {path:?}"))
.map(Some)
}
@@ -2,7 +2,6 @@ use std::str::FromStr;
use std::sync::OnceLock;
use crate::stories::*;
-use anyhow::anyhow;
use clap::ValueEnum;
use clap::builder::PossibleValue;
use gpui::AnyView;
@@ -90,7 +89,7 @@ impl FromStr for StorySelector {
return Ok(Self::Component(component_story));
}
- Err(anyhow!("story not found for '{raw_story_name}'"))
+ anyhow::bail!("story not found for '{raw_story_name}'")
}
}
@@ -129,7 +129,7 @@ impl Render for StoryWrapper {
}
}
-fn load_embedded_fonts(cx: &App) -> gpui::Result<()> {
+fn load_embedded_fonts(cx: &App) -> anyhow::Result<()> {
let font_paths = cx.asset_source().list("fonts")?;
let mut embedded_fonts = Vec::new();
for font_path in font_paths {
@@ -91,7 +91,7 @@ impl SupermavenAdminApi {
if error.message == "User not found" {
return Ok(None);
} else {
- return Err(anyhow!("Supermaven API error: {}", error.message));
+ anyhow::bail!("Supermaven API error: {}", error.message);
}
} else if response.status().is_server_error() {
let error: SupermavenApiError = serde_json::from_slice(&body)?;
@@ -155,7 +155,7 @@ impl SupermavenAdminApi {
if error.message == "User not found" {
return Ok(());
} else {
- return Err(anyhow!("Supermaven API error: {}", error.message));
+ anyhow::bail!("Supermaven API error: {}", error.message);
}
} else if response.status().is_server_error() {
let error: SupermavenApiError = serde_json::from_slice(&body)?;
@@ -204,7 +204,7 @@ pub async fn latest_release(
if response.status().is_client_error() || response.status().is_server_error() {
let body_str = std::str::from_utf8(&body)?;
let error: SupermavenApiError = serde_json::from_str(body_str)?;
- return Err(anyhow!("Supermaven API error: {}", error.message));
+ anyhow::bail!("Supermaven API error: {}", error.message);
}
serde_json::from_slice::<SupermavenDownloadResponse>(&body)
@@ -239,13 +239,13 @@ pub async fn get_supermaven_agent_path(client: Arc<dyn HttpClient>) -> Result<Pa
"macos" => "darwin",
"windows" => "windows",
"linux" => "linux",
- _ => return Err(anyhow!("unsupported platform")),
+ unsupported => anyhow::bail!("unsupported platform {unsupported}"),
};
let arch = match std::env::consts::ARCH {
"x86_64" => "amd64",
"aarch64" => "arm64",
- _ => return Err(anyhow!("unsupported architecture")),
+ unsupported => anyhow::bail!("unsupported architecture {unsupported}"),
};
let download_info = latest_release(client.clone(), platform, arch).await?;
@@ -1,4 +1,4 @@
-use anyhow::Result;
+use anyhow::{Context as _, Result};
use collections::FxHashMap;
use gpui::SharedString;
use schemars::{JsonSchema, r#gen::SchemaSettings};
@@ -147,9 +147,7 @@ impl DebugRequest {
}
pub fn from_proto(val: proto::DebugRequest) -> Result<DebugRequest> {
- let request = val
- .request
- .ok_or_else(|| anyhow::anyhow!("Missing debug request"))?;
+ let request = val.request.context("Missing debug request")?;
match request {
proto::debug_request::Request::DebugLaunchRequest(proto::DebugLaunchRequest {
program,
@@ -1,4 +1,4 @@
-use anyhow::{Context, bail};
+use anyhow::{Context as _, bail};
use collections::{HashMap, HashSet};
use schemars::{JsonSchema, r#gen::SchemaSettings};
use serde::{Deserialize, Serialize};
@@ -1,6 +1,6 @@
use std::path::PathBuf;
-use anyhow::anyhow;
+use anyhow::Context as _;
use collections::HashMap;
use gpui::SharedString;
use serde::Deserialize;
@@ -53,9 +53,9 @@ impl VsCodeDebugTaskDefinition {
request: match self.request {
Request::Launch => {
let cwd = self.cwd.map(|cwd| PathBuf::from(replacer.replace(&cwd)));
- let program = self.program.ok_or_else(|| {
- anyhow!("vscode debug launch configuration does not define a program")
- })?;
+ let program = self
+ .program
+ .context("vscode debug launch configuration does not define a program")?;
let program = replacer.replace(&program);
let args = self
.args
@@ -1668,7 +1668,7 @@ impl SerializableItem for TerminalView {
alive_items: Vec<workspace::ItemId>,
_window: &mut Window,
cx: &mut App,
- ) -> Task<gpui::Result<()>> {
+ ) -> Task<anyhow::Result<()>> {
delete_unloaded_items(alive_items, workspace_id, "terminals", &TERMINAL_DB, cx)
}
@@ -1679,7 +1679,7 @@ impl SerializableItem for TerminalView {
_closing: bool,
_: &mut Window,
cx: &mut Context<Self>,
- ) -> Option<Task<gpui::Result<()>>> {
+ ) -> Option<Task<anyhow::Result<()>>> {
let terminal = self.terminal().read(cx);
if terminal.task().is_some() {
return None;
@@ -11,7 +11,7 @@ mod tests;
mod undo_map;
pub use anchor::*;
-use anyhow::{Context as _, Result, anyhow};
+use anyhow::{Context as _, Result};
use clock::LOCAL_BRANCH_REPLICA_ID;
pub use clock::ReplicaId;
use collections::{HashMap, HashSet};
@@ -1586,7 +1586,7 @@ impl Buffer {
async move {
for mut future in futures {
if future.recv().await.is_none() {
- Err(anyhow!("gave up waiting for edits"))?;
+ anyhow::bail!("gave up waiting for edits");
}
}
Ok(())
@@ -1615,7 +1615,7 @@ impl Buffer {
async move {
for mut future in futures {
if future.recv().await.is_none() {
- Err(anyhow!("gave up waiting for anchors"))?;
+ anyhow::bail!("gave up waiting for anchors");
}
}
Ok(())
@@ -1635,7 +1635,7 @@ impl Buffer {
async move {
if let Some(mut rx) = rx {
if rx.recv().await.is_none() {
- Err(anyhow!("gave up waiting for version"))?;
+ anyhow::bail!("gave up waiting for version");
}
}
Ok(())
@@ -288,7 +288,7 @@ impl TryFrom<StaticColorScaleSet> for ColorScaleSet {
type Error = anyhow::Error;
fn try_from(value: StaticColorScaleSet) -> Result<Self, Self::Error> {
- fn to_color_scale(scale: StaticColorScale) -> Result<ColorScale, anyhow::Error> {
+ fn to_color_scale(scale: StaticColorScale) -> anyhow::Result<ColorScale> {
scale
.into_iter()
.map(|color| Rgba::try_from(color).map(Hsla::from))
@@ -1,6 +1,6 @@
use std::borrow::Cow;
-use anyhow::{Result, anyhow};
+use anyhow::{Context as _, Result};
use gpui::{AssetSource, SharedString};
use rust_embed::RustEmbed;
@@ -14,7 +14,7 @@ impl AssetSource for Assets {
fn load(&self, path: &str) -> Result<Option<Cow<'static, [u8]>>> {
Self::get(path)
.map(|f| f.data)
- .ok_or_else(|| anyhow!("could not find asset at path \"{}\"", path))
+ .with_context(|| format!("could not find asset at path {path:?}"))
.map(Some)
}
@@ -34,7 +34,7 @@ pub trait PathExt {
}
#[cfg(windows)]
{
- use anyhow::anyhow;
+ use anyhow::Context as _;
use tendril::fmt::{Format, WTF8};
WTF8::validate(bytes)
.then(|| {
@@ -43,7 +43,7 @@ pub trait PathExt {
OsStr::from_encoded_bytes_unchecked(bytes)
}))
})
- .ok_or_else(|| anyhow!("Invalid WTF-8 sequence: {bytes:?}"))
+ .with_context(|| format!("Invalid WTF-8 sequence: {bytes:?}"))
}
}
}
@@ -27,7 +27,7 @@ use std::{
use unicase::UniCase;
#[cfg(unix)]
-use anyhow::{Context as _, anyhow};
+use anyhow::Context as _;
pub use take_until::*;
#[cfg(any(test, feature = "test-support"))]
@@ -335,9 +335,7 @@ pub fn load_login_shell_environment() -> Result<()> {
)
.output()
.context("failed to spawn login shell to source login environment variables")?;
- if !output.status.success() {
- Err(anyhow!("login shell exited with error"))?;
- }
+ anyhow::ensure!(output.status.success(), "login shell exited with error");
let stdout = String::from_utf8_lossy(&output.stdout);
@@ -1,4 +1,4 @@
-use anyhow::{Result, anyhow};
+use anyhow::Result;
use collections::HashMap;
use command_palette_hooks::CommandInterceptResult;
use editor::{
@@ -675,10 +675,10 @@ impl Position {
let Some(Mark::Local(anchors)) =
vim.get_mark(&name.to_string(), editor, window, cx)
else {
- return Err(anyhow!("mark {} not set", name));
+ anyhow::bail!("mark {name} not set");
};
let Some(mark) = anchors.last() else {
- return Err(anyhow!("mark {} contains empty anchors", name));
+ anyhow::bail!("mark {name} contains empty anchors");
};
mark.to_point(&snapshot.buffer_snapshot)
.row
@@ -1,6 +1,6 @@
use std::sync::Arc;
-use anyhow::{Context as _, Result, anyhow};
+use anyhow::{Context as _, Result};
use client::Client;
use futures::AsyncReadExt as _;
use gpui::{App, AppContext, Context, Entity, Subscription, Task};
@@ -96,9 +96,9 @@ async fn perform_web_search(
} else {
let mut body = String::new();
response.body_mut().read_to_string(&mut body).await?;
- return Err(anyhow!(
+ anyhow::bail!(
"error performing web search.\nStatus: {:?}\nBody: {body}",
response.status(),
- ));
+ );
}
}
@@ -1316,7 +1316,7 @@ pub mod test {
_project: &Entity<Project>,
_path: &ProjectPath,
_cx: &mut App,
- ) -> Option<Task<gpui::Result<Entity<Self>>>> {
+ ) -> Option<Task<anyhow::Result<Entity<Self>>>> {
None
}
fn entry_id(&self, _: &App) -> Option<ProjectEntryId> {
@@ -1,4 +1,5 @@
use crate::{SuppressNotification, Toast, Workspace};
+use anyhow::Context as _;
use gpui::{
AnyView, App, AppContext as _, AsyncWindowContext, ClickEvent, ClipboardItem, Context,
DismissEvent, Entity, EventEmitter, FocusHandle, Focusable, PromptLevel, Render, ScrollHandle,
@@ -239,9 +240,9 @@ impl LanguageServerPrompt {
});
potential_future? // App Closed
- .ok_or_else(|| anyhow::anyhow!("Response already sent"))?
+ .context("Response already sent")?
.await
- .ok_or_else(|| anyhow::anyhow!("Stream already closed"))?;
+ .context("Stream already closed")?;
this.update(cx, |_, cx| cx.emit(DismissEvent))?;
@@ -3,7 +3,7 @@ use crate::{
pane_group::element::pane_axis,
workspace_settings::{PaneSplitDirectionHorizontal, PaneSplitDirectionVertical},
};
-use anyhow::{Result, anyhow};
+use anyhow::Result;
use call::{ActiveCall, ParticipantLocation};
use collections::HashMap;
use gpui::{
@@ -58,7 +58,7 @@ impl PaneGroup {
self.root = Member::new_axis(old_pane.clone(), new_pane.clone(), direction);
Ok(())
} else {
- Err(anyhow!("Pane not found"))
+ anyhow::bail!("Pane not found");
}
}
Member::Axis(axis) => axis.split(old_pane, new_pane, direction),
@@ -538,7 +538,7 @@ impl PaneAxis {
}
}
}
- Err(anyhow!("Pane not found"))
+ anyhow::bail!("Pane not found");
}
fn remove(&mut self, pane_to_remove: &Entity<Pane>) -> Result<Option<Member>> {
@@ -579,7 +579,7 @@ impl PaneAxis {
Ok(None)
}
} else {
- Err(anyhow!("Pane not found"))
+ anyhow::bail!("Pane not found");
}
}
@@ -8,7 +8,7 @@ use std::{
sync::Arc,
};
-use anyhow::{Context, Result, anyhow, bail};
+use anyhow::{Context as _, Result, bail};
use client::DevServerProjectId;
use db::{define_connection, query, sqlez::connection::Connection, sqlez_macros::sql};
use gpui::{Axis, Bounds, Task, WindowBounds, WindowId, point, size};
@@ -181,7 +181,7 @@ impl Column for BreakpointStateWrapper<'_> {
match state {
0 => Ok((BreakpointState::Enabled.into(), start_index + 1)),
1 => Ok((BreakpointState::Disabled.into(), start_index + 1)),
- _ => Err(anyhow::anyhow!("Invalid BreakpointState discriminant")),
+ _ => anyhow::bail!("Invalid BreakpointState discriminant {state}"),
}
}
}
@@ -914,7 +914,7 @@ impl WorkspaceDb {
log::debug!("Inserting SSH project at host {host}");
self.insert_ssh_project(host, port, paths, user)
.await?
- .ok_or_else(|| anyhow!("failed to insert ssh project"))
+ .context("failed to insert ssh project")
}
}
@@ -1244,7 +1244,7 @@ impl WorkspaceDb {
*axis,
flex_string,
))?
- .ok_or_else(|| anyhow!("Couldn't retrieve group_id from inserted pane_group"))?;
+ .context("Couldn't retrieve group_id from inserted pane_group")?;
for (position, group) in children.iter().enumerate() {
Self::save_pane_group(conn, workspace_id, group, Some((group_id, position)))?
@@ -1270,7 +1270,7 @@ impl WorkspaceDb {
VALUES (?, ?, ?)
RETURNING pane_id
))?((workspace_id, pane.active, pane.pinned_count))?
- .ok_or_else(|| anyhow!("Could not retrieve inserted pane_id"))?;
+ .context("Could not retrieve inserted pane_id")?;
let (parent_id, order) = parent.unzip();
conn.exec_bound(sql!(
@@ -1296,7 +1296,7 @@ impl Workspace {
) -> Task<
anyhow::Result<(
WindowHandle<Workspace>,
- Vec<Option<Result<Box<dyn ItemHandle>, anyhow::Error>>>,
+ Vec<Option<anyhow::Result<Box<dyn ItemHandle>>>>,
)>,
> {
let project_handle = Project::local(
@@ -2187,7 +2187,7 @@ impl Workspace {
}
*keystrokes.borrow_mut() = Default::default();
- Err(anyhow!("over 100 keystrokes passed to send_keystrokes"))
+ anyhow::bail!("over 100 keystrokes passed to send_keystrokes");
})
.detach_and_log_err(cx);
}
@@ -2324,7 +2324,7 @@ impl Workspace {
pane: Option<WeakEntity<Pane>>,
window: &mut Window,
cx: &mut Context<Self>,
- ) -> Task<Vec<Option<Result<Box<dyn ItemHandle>, anyhow::Error>>>> {
+ ) -> Task<Vec<Option<anyhow::Result<Box<dyn ItemHandle>>>>> {
log::info!("open paths {abs_paths:?}");
let fs = self.app_state.fs.clone();
@@ -3076,7 +3076,7 @@ impl Workspace {
focus_item: bool,
window: &mut Window,
cx: &mut App,
- ) -> Task<Result<Box<dyn ItemHandle>, anyhow::Error>> {
+ ) -> Task<anyhow::Result<Box<dyn ItemHandle>>> {
self.open_path_preview(path, pane, focus_item, false, true, window, cx)
}
@@ -3089,7 +3089,7 @@ impl Workspace {
activate: bool,
window: &mut Window,
cx: &mut App,
- ) -> Task<Result<Box<dyn ItemHandle>, anyhow::Error>> {
+ ) -> Task<anyhow::Result<Box<dyn ItemHandle>>> {
let pane = pane.unwrap_or_else(|| {
self.last_active_center_pane.clone().unwrap_or_else(|| {
self.panes
@@ -3127,7 +3127,7 @@ impl Workspace {
path: impl Into<ProjectPath>,
window: &mut Window,
cx: &mut Context<Self>,
- ) -> Task<Result<Box<dyn ItemHandle>, anyhow::Error>> {
+ ) -> Task<anyhow::Result<Box<dyn ItemHandle>>> {
self.split_path_preview(path, false, None, window, cx)
}
@@ -3138,7 +3138,7 @@ impl Workspace {
split_direction: Option<SplitDirection>,
window: &mut Window,
cx: &mut Context<Self>,
- ) -> Task<Result<Box<dyn ItemHandle>, anyhow::Error>> {
+ ) -> Task<anyhow::Result<Box<dyn ItemHandle>>> {
let pane = self.last_active_center_pane.clone().unwrap_or_else(|| {
self.panes
.first()
@@ -3178,7 +3178,7 @@ impl Workspace {
))
})
})
- .map(|option| option.ok_or_else(|| anyhow!("pane was dropped")))?
+ .map(|option| option.context("pane was dropped"))?
})
}
@@ -3938,12 +3938,12 @@ impl Workspace {
let state = this
.follower_states
.get_mut(&leader_id)
- .ok_or_else(|| anyhow!("following interrupted"))?;
+ .context("following interrupted")?;
state.active_view_id = response
.active_view
.as_ref()
.and_then(|view| ViewId::from_proto(view.id.clone()?).ok());
- Ok::<_, anyhow::Error>(())
+ anyhow::Ok(())
})??;
if let Some(view) = response.active_view {
Self::add_view_from_leader(this.clone(), leader_peer_id, &view, cx).await?;
@@ -4286,7 +4286,7 @@ impl Workspace {
update: proto::UpdateFollowers,
cx: &mut AsyncWindowContext,
) -> Result<()> {
- match update.variant.ok_or_else(|| anyhow!("invalid update"))? {
+ match update.variant.context("invalid update")? {
proto::update_followers::Variant::CreateView(view) => {
let view_id = ViewId::from_proto(view.id.clone().context("invalid view id")?)?;
let should_add_view = this.update(cx, |this, _| {
@@ -4328,12 +4328,8 @@ impl Workspace {
}
}
proto::update_followers::Variant::UpdateView(update_view) => {
- let variant = update_view
- .variant
- .ok_or_else(|| anyhow!("missing update view variant"))?;
- let id = update_view
- .id
- .ok_or_else(|| anyhow!("missing update view id"))?;
+ let variant = update_view.variant.context("missing update view variant")?;
+ let id = update_view.id.context("missing update view id")?;
let mut tasks = Vec::new();
this.update_in(cx, |this, window, cx| {
let project = this.project.clone();
@@ -4368,7 +4364,7 @@ impl Workspace {
let this = this.upgrade().context("workspace dropped")?;
let Some(id) = view.id.clone() else {
- return Err(anyhow!("no id for view"));
+ anyhow::bail!("no id for view");
};
let id = ViewId::from_proto(id)?;
let panel_id = view.panel_id.and_then(proto::PanelId::from_i32);
@@ -4395,18 +4391,16 @@ impl Workspace {
existing_item
} else {
let variant = view.variant.clone();
- if variant.is_none() {
- Err(anyhow!("missing view variant"))?;
- }
+ anyhow::ensure!(variant.is_some(), "missing view variant");
let task = cx.update(|window, cx| {
FollowableViewRegistry::from_state_proto(this.clone(), id, variant, window, cx)
})?;
let Some(task) = task else {
- return Err(anyhow!(
+ anyhow::bail!(
"failed to construct view from leader (maybe from a different version of zed?)"
- ));
+ );
};
let mut new_item = task.await?;
@@ -5099,7 +5093,7 @@ impl Workspace {
) -> Result<()> {
self.serializable_items_tx
.unbounded_send(item)
- .map_err(|err| anyhow!("failed to send serializable item over channel: {}", err))
+ .map_err(|err| anyhow!("failed to send serializable item over channel: {err}"))
}
pub(crate) fn load_workspace(
@@ -6298,7 +6292,7 @@ impl ViewId {
creator: message
.creator
.map(CollaboratorId::PeerId)
- .ok_or_else(|| anyhow!("creator is missing"))?,
+ .context("creator is missing")?,
id: message.id,
})
}
@@ -6440,7 +6434,7 @@ async fn join_channel_internal(
// this loop will terminate within client::CONNECTION_TIMEOUT seconds.
'outer: loop {
let Some(status) = client_status.recv().await else {
- return Err(anyhow!("error connecting"));
+ anyhow::bail!("error connecting");
};
match status {
@@ -6662,7 +6656,7 @@ pub fn open_paths(
) -> Task<
anyhow::Result<(
WindowHandle<Workspace>,
- Vec<Option<Result<Box<dyn ItemHandle>, anyhow::Error>>>,
+ Vec<Option<anyhow::Result<Box<dyn ItemHandle>>>>,
)>,
> {
let abs_paths = abs_paths.to_vec();
@@ -6824,7 +6818,7 @@ pub fn create_and_open_local_file(
.await;
let item = items.pop().flatten();
- item.ok_or_else(|| anyhow!("path {path:?} is not a file"))?
+ item.with_context(|| format!("path {path:?} is not a file"))?
})
}
@@ -6945,9 +6939,7 @@ async fn open_ssh_project_inner(
}
if project_paths_to_open.is_empty() {
- return Err(project_path_errors
- .pop()
- .unwrap_or_else(|| anyhow!("no paths given")));
+ return Err(project_path_errors.pop().context("no paths given")?);
}
cx.update_window(window.into(), |_, window, cx| {
@@ -7053,7 +7045,7 @@ pub fn join_in_room_project(
let active_call = cx.update(|cx| ActiveCall::global(cx))?;
let room = active_call
.read_with(cx, |call, _| call.room().cloned())?
- .ok_or_else(|| anyhow!("not in a call"))?;
+ .context("not in a call")?;
let project = room
.update(cx, |room, cx| {
room.join_project(
@@ -9351,7 +9343,7 @@ mod tests {
_project: &Entity<Project>,
path: &ProjectPath,
cx: &mut App,
- ) -> Option<Task<gpui::Result<Entity<Self>>>> {
+ ) -> Option<Task<anyhow::Result<Entity<Self>>>> {
if path.path.extension().unwrap() == "png" {
Some(cx.spawn(async move |cx| cx.new(|_| TestPngItem {})))
} else {
@@ -9426,7 +9418,7 @@ mod tests {
_project: &Entity<Project>,
path: &ProjectPath,
cx: &mut App,
- ) -> Option<Task<gpui::Result<Entity<Self>>>> {
+ ) -> Option<Task<anyhow::Result<Entity<Self>>>> {
if path.path.extension().unwrap() == "ipynb" {
Some(cx.spawn(async move |cx| cx.new(|_| TestIpynbItem {})))
} else {
@@ -252,13 +252,7 @@ impl WorkDirectory {
match self {
WorkDirectory::InProject { relative_path } => Ok(path
.strip_prefix(relative_path)
- .map_err(|_| {
- anyhow!(
- "could not relativize {:?} against {:?}",
- path,
- relative_path
- )
- })?
+ .map_err(|_| anyhow!("could not relativize {path:?} against {relative_path:?}"))?
.into()),
WorkDirectory::AboveProject {
location_in_repo, ..
@@ -1093,7 +1087,7 @@ impl Worktree {
),
)
})?;
- task.ok_or_else(|| anyhow!("invalid entry"))?.await?;
+ task.context("invalid entry")?.await?;
Ok(proto::ProjectEntryResponse {
entry: None,
worktree_scan_id: scan_id as u64,
@@ -1108,7 +1102,7 @@ impl Worktree {
let task = this.update(&mut cx, |this, cx| {
this.expand_entry(ProjectEntryId::from_proto(request.entry_id), cx)
})?;
- task.ok_or_else(|| anyhow!("no such entry"))?.await?;
+ task.context("no such entry")?.await?;
let scan_id = this.read_with(&cx, |this, _| this.scan_id())?;
Ok(proto::ExpandProjectEntryResponse {
worktree_scan_id: scan_id as u64,
@@ -1123,7 +1117,7 @@ impl Worktree {
let task = this.update(&mut cx, |this, cx| {
this.expand_all_for_entry(ProjectEntryId::from_proto(request.entry_id), cx)
})?;
- task.ok_or_else(|| anyhow!("no such entry"))?.await?;
+ task.context("no such entry")?.await?;
let scan_id = this.read_with(&cx, |this, _| this.scan_id())?;
Ok(proto::ExpandAllForProjectEntryResponse {
worktree_scan_id: scan_id as u64,
@@ -1487,9 +1481,7 @@ impl LocalWorktree {
let abs_path = abs_path?;
let content = fs.load_bytes(&abs_path).await?;
- let worktree = worktree
- .upgrade()
- .ok_or_else(|| anyhow!("worktree was dropped"))?;
+ let worktree = worktree.upgrade().context("worktree was dropped")?;
let file = match entry.await? {
Some(entry) => File::for_entry(entry, worktree),
None => {
@@ -1544,9 +1536,7 @@ impl LocalWorktree {
}
let text = fs.load(&abs_path).await?;
- let worktree = this
- .upgrade()
- .ok_or_else(|| anyhow!("worktree was dropped"))?;
+ let worktree = this.upgrade().context("worktree was dropped")?;
let file = match entry.await? {
Some(entry) => File::for_entry(entry, worktree),
None => {
@@ -1683,7 +1673,7 @@ impl LocalWorktree {
.refresh_entry(path.clone(), None, cx)
})?
.await?;
- let worktree = this.upgrade().ok_or_else(|| anyhow!("worktree dropped"))?;
+ let worktree = this.upgrade().context("worktree dropped")?;
if let Some(entry) = entry {
Ok(File::for_entry(entry, worktree))
} else {
@@ -1930,17 +1920,17 @@ impl LocalWorktree {
)
.await
.with_context(|| {
- anyhow!("Failed to copy file from {source:?} to {target:?}")
+ format!("Failed to copy file from {source:?} to {target:?}")
})?;
}
- Ok::<(), anyhow::Error>(())
+ anyhow::Ok(())
})
.await
.log_err();
let mut refresh = cx.read_entity(
&this.upgrade().with_context(|| "Dropped worktree")?,
|this, _| {
- Ok::<postage::barrier::Receiver, anyhow::Error>(
+ anyhow::Ok::<postage::barrier::Receiver>(
this.as_local()
.with_context(|| "Worktree is not local")?
.refresh_entries_for_paths(paths_to_refresh.clone()),
@@ -1950,7 +1940,7 @@ impl LocalWorktree {
cx.background_spawn(async move {
refresh.next().await;
- Ok::<(), anyhow::Error>(())
+ anyhow::Ok(())
})
.await
.log_err();
@@ -2040,7 +2030,7 @@ impl LocalWorktree {
let new_entry = this.update(cx, |this, _| {
this.entry_for_path(path)
.cloned()
- .ok_or_else(|| anyhow!("failed to read path after update"))
+ .context("reading path after update")
})??;
Ok(Some(new_entry))
})
@@ -2301,7 +2291,7 @@ impl RemoteWorktree {
paths_to_copy: Vec<Arc<Path>>,
local_fs: Arc<dyn Fs>,
cx: &Context<Worktree>,
- ) -> Task<Result<Vec<ProjectEntryId>, anyhow::Error>> {
+ ) -> Task<anyhow::Result<Vec<ProjectEntryId>>> {
let client = self.client.clone();
let worktree_id = self.id().to_proto();
let project_id = self.project_id;
@@ -2424,7 +2414,7 @@ impl Snapshot {
.components()
.any(|component| !matches!(component, std::path::Component::Normal(_)))
{
- return Err(anyhow!("invalid path"));
+ anyhow::bail!("invalid path");
}
if path.file_name().is_some() {
Ok(self.abs_path.as_path().join(path))
@@ -3402,15 +3392,12 @@ impl File {
worktree: Entity<Worktree>,
cx: &App,
) -> Result<Self> {
- let worktree_id = worktree
- .read(cx)
- .as_remote()
- .ok_or_else(|| anyhow!("not remote"))?
- .id();
+ let worktree_id = worktree.read(cx).as_remote().context("not remote")?.id();
- if worktree_id.to_proto() != proto.worktree_id {
- return Err(anyhow!("worktree id does not match file"));
- }
+ anyhow::ensure!(
+ worktree_id.to_proto() == proto.worktree_id,
+ "worktree id does not match file"
+ );
let disk_state = if proto.is_deleted {
DiskState::Deleted
@@ -5559,7 +5546,7 @@ impl CreatedEntry {
fn parse_gitfile(content: &str) -> anyhow::Result<&Path> {
let path = content
.strip_prefix("gitdir:")
- .ok_or_else(|| anyhow!("failed to parse gitfile content {content:?}"))?;
+ .with_context(|| format!("parsing gitfile content {content:?}"))?;
Ok(Path::new(path.trim()))
}
@@ -4,7 +4,7 @@
mod reliability;
mod zed;
-use anyhow::{Context as _, Result, anyhow};
+use anyhow::{Context as _, Result};
use clap::{Parser, command};
use cli::FORCE_CLI_MODE_ENV_VAR_NAME;
use client::{Client, ProxySettings, UserStore, parse_zed_link};
@@ -1073,7 +1073,7 @@ fn parse_url_arg(arg: &str, cx: &App) -> Result<String> {
{
Ok(arg.into())
} else {
- Err(anyhow!("error parsing path argument: {}", error))
+ anyhow::bail!("error parsing path argument: {error}")
}
}
}
@@ -421,7 +421,7 @@ fn initialize_panels(
workspace.update_in(cx, |workspace, window, cx| {
workspace.add_panel(debug_panel, window, cx);
})?;
- Result::<_, anyhow::Error>::Ok(())
+ anyhow::Ok(())
},
)
.detach()
@@ -951,7 +951,7 @@ impl SerializableItem for ComponentPreview {
item_id: ItemId,
window: &mut Window,
cx: &mut App,
- ) -> Task<gpui::Result<Entity<Self>>> {
+ ) -> Task<anyhow::Result<Entity<Self>>> {
let deserialized_active_page =
match COMPONENT_PREVIEW_DB.get_active_page(item_id, workspace_id) {
Ok(page) => {
@@ -1009,7 +1009,7 @@ impl SerializableItem for ComponentPreview {
alive_items: Vec<ItemId>,
_window: &mut Window,
cx: &mut App,
- ) -> Task<gpui::Result<()>> {
+ ) -> Task<anyhow::Result<()>> {
delete_unloaded_items(
alive_items,
workspace_id,
@@ -1026,7 +1026,7 @@ impl SerializableItem for ComponentPreview {
_closing: bool,
_window: &mut Window,
cx: &mut Context<Self>,
- ) -> Option<Task<gpui::Result<()>>> {
+ ) -> Option<Task<anyhow::Result<()>>> {
let active_page = self.active_page_id(cx);
let workspace_id = self.workspace_id?;
Some(cx.background_spawn(async move {
@@ -1,6 +1,6 @@
use crate::handle_open_request;
use crate::restorable_workspace_locations;
-use anyhow::{Context as _, Result, anyhow};
+use anyhow::{Context as _, Result};
use cli::{CliRequest, CliResponse, ipc::IpcSender};
use cli::{IpcHandshake, ipc};
use client::parse_zed_link;
@@ -74,13 +74,14 @@ impl OpenRequest {
let url = url::Url::parse(file)?;
let host = url
.host()
- .ok_or_else(|| anyhow!("missing host in ssh url: {}", file))?
+ .with_context(|| format!("missing host in ssh url: {file}"))?
.to_string();
let username = Some(url.username().to_string()).filter(|s| !s.is_empty());
let port = url.port();
- if !self.open_paths.is_empty() {
- return Err(anyhow!("cannot open both local and ssh paths"));
- }
+ anyhow::ensure!(
+ self.open_paths.is_empty(),
+ "cannot open both local and ssh paths"
+ );
let mut connection_options = SshSettings::get_global(cx).connection_options_for(
host.clone(),
port,
@@ -90,9 +91,10 @@ impl OpenRequest {
connection_options.password = Some(password.to_string());
}
if let Some(ssh_connection) = &self.ssh_connection {
- if *ssh_connection != connection_options {
- return Err(anyhow!("cannot open multiple ssh connections"));
- }
+ anyhow::ensure!(
+ *ssh_connection == connection_options,
+ "cannot open multiple ssh connections"
+ );
}
self.ssh_connection = Some(connection_options);
self.parse_file_path(url.path());
@@ -123,7 +125,7 @@ impl OpenRequest {
}
}
}
- Err(anyhow!("invalid zed url: {}", request_path))
+ anyhow::bail!("invalid zed url: {request_path}")
}
}
@@ -141,7 +143,7 @@ impl OpenListener {
pub fn open_urls(&self, urls: Vec<String>) {
self.0
.unbounded_send(urls)
- .map_err(|_| anyhow!("no listener for open requests"))
+ .context("no listener for open requests")
.log_err();
}
}
@@ -191,7 +193,7 @@ fn connect_to_cli(
break;
}
}
- Ok::<_, anyhow::Error>(())
+ anyhow::Ok(())
});
Ok((async_request_rx, response_tx))
@@ -401,9 +403,7 @@ async fn open_workspaces(
}
}
- if errored {
- return Err(anyhow!("failed to open a workspace"));
- }
+ anyhow::ensure!(!errored, "failed to open a workspace");
}
Ok(())
@@ -14,7 +14,7 @@ use license_detection::LICENSE_FILES_TO_CHECK;
pub use license_detection::is_license_eligible_for_data_collection;
pub use rate_completion_modal::*;
-use anyhow::{Context as _, Result, anyhow};
+use anyhow::{Context as _, Result};
use arrayvec::ArrayVec;
use client::{Client, UserStore};
use collections::{HashMap, HashSet, VecDeque};
@@ -788,11 +788,12 @@ and then another
.get(MINIMUM_REQUIRED_VERSION_HEADER_NAME)
.and_then(|version| SemanticVersion::from_str(version.to_str().ok()?).ok())
{
- if app_version < minimum_required_version {
- return Err(anyhow!(ZedUpdateRequiredError {
+ anyhow::ensure!(
+ app_version >= minimum_required_version,
+ ZedUpdateRequiredError {
minimum_version: minimum_required_version
- }));
- }
+ }
+ );
}
if response.status().is_success() {
@@ -812,11 +813,11 @@ and then another
} else {
let mut body = String::new();
response.body_mut().read_to_string(&mut body).await?;
- return Err(anyhow!(
+ anyhow::bail!(
"error predicting edits.\nStatus: {:?}\nBody: {}",
response.status(),
body
- ));
+ );
}
}
}
@@ -1,4 +1,4 @@
-use anyhow::{Result, anyhow};
+use anyhow::Result;
pub struct EnvFilter {
pub level_global: Option<log::LevelFilter>,
@@ -14,9 +14,7 @@ pub fn parse(filter: &str) -> Result<EnvFilter> {
for directive in filter.split(',') {
match directive.split_once('=') {
Some((name, level)) => {
- if level.contains('=') {
- return Err(anyhow!("Invalid directive: {}", directive));
- }
+ anyhow::ensure!(!level.contains('='), "Invalid directive: {directive}");
let level = parse_level(level.trim())?;
directive_names.push(name.trim().trim_end_matches(".rs").to_string());
directive_levels.push(level);
@@ -27,9 +25,7 @@ pub fn parse(filter: &str) -> Result<EnvFilter> {
directive_levels.push(log::LevelFilter::max() /* Enable all levels */);
continue;
};
- if max_level.is_some() {
- return Err(anyhow!("Cannot set multiple max levels"));
- }
+ anyhow::ensure!(max_level.is_none(), "Cannot set multiple max levels");
max_level.replace(level);
}
};
@@ -61,7 +57,7 @@ fn parse_level(level: &str) -> Result<log::LevelFilter> {
if level.eq_ignore_ascii_case("OFF") || level.eq_ignore_ascii_case("NONE") {
return Ok(log::LevelFilter::Off);
}
- Err(anyhow!("Invalid level: {}", level))
+ anyhow::bail!("Invalid level: {level}")
}
#[cfg(test)]
@@ -234,10 +234,7 @@ fn rotate_log_file<PathRef>(
.map(|err| anyhow::anyhow!(err)),
};
if let Some(err) = rotation_error {
- eprintln!(
- "Log file rotation failed. Truncating log file anyways: {}",
- err,
- );
+ eprintln!("Log file rotation failed. Truncating log file anyways: {err}",);
}
_ = file.set_len(0);
@@ -1,6 +1,6 @@
use std::process::Command;
-use anyhow::{Context, Result, bail};
+use anyhow::{Context as _, Result, bail};
use clap::Parser;
#[derive(Parser)]
@@ -1,6 +1,6 @@
use std::path::{Path, PathBuf};
-use anyhow::{Result, anyhow};
+use anyhow::{Context as _, Result};
use clap::Parser;
use crate::workspace::load_workspace;
@@ -17,7 +17,7 @@ pub fn run_licenses(_args: LicensesArgs) -> Result<()> {
let crate_dir = package
.manifest_path
.parent()
- .ok_or_else(|| anyhow!("no crate directory for {}", package.name))?;
+ .with_context(|| format!("no crate directory for {}", package.name))?;
if let Some(license_file) = first_license_file(crate_dir, LICENSE_FILES) {
if !license_file.is_symlink() {
@@ -2,7 +2,7 @@ use std::collections::BTreeMap;
use std::fs;
use std::path::Path;
-use anyhow::{Context as _, Result, anyhow};
+use anyhow::{Context as _, Result};
use cargo_toml::{Dependency, Manifest};
use clap::Parser;
@@ -78,5 +78,5 @@ fn read_cargo_toml(path: impl AsRef<Path>) -> Result<Manifest> {
let path = path.as_ref();
let cargo_toml_bytes = fs::read(path)?;
Manifest::from_slice(&cargo_toml_bytes)
- .with_context(|| anyhow!("failed to read Cargo.toml at {path:?}"))
+ .with_context(|| format!("reading Cargo.toml at {path:?}"))
}