diff --git a/crates/acp_thread/src/acp_thread.rs b/crates/acp_thread/src/acp_thread.rs index ec88dc5125b177c6788fed35401e174b4462c2af..9f6a9625d1d63050d3c2f534f1d9288d277711ab 100644 --- a/crates/acp_thread/src/acp_thread.rs +++ b/crates/acp_thread/src/acp_thread.rs @@ -375,18 +375,16 @@ impl ToolCall { }) .ok()??; let buffer = buffer.await.log_err()?; - let position = buffer - .update(cx, |buffer, _| { - let snapshot = buffer.snapshot(); - if let Some(row) = location.line { - let column = snapshot.indent_size_for_line(row).len; - let point = snapshot.clip_point(Point::new(row, column), Bias::Left); - snapshot.anchor_before(point) - } else { - Anchor::min_for_buffer(snapshot.remote_id()) - } - }) - .ok()?; + let position = buffer.update(cx, |buffer, _| { + let snapshot = buffer.snapshot(); + if let Some(row) = location.line { + let column = snapshot.indent_size_for_line(row).len; + let point = snapshot.clip_point(Point::new(row, column), Bias::Left); + snapshot.anchor_before(point) + } else { + Anchor::min_for_buffer(snapshot.remote_id()) + } + }); Some(ResolvedLocation { buffer, position }) } @@ -1803,7 +1801,7 @@ impl AcpThread { .ok(); let old_checkpoint = git_store - .update(cx, |git, cx| git.checkpoint(cx))? + .update(cx, |git, cx| git.checkpoint(cx)) .await .context("failed to get old checkpoint") .log_err(); @@ -1983,7 +1981,7 @@ impl AcpThread { rewind.await?; if let Some(checkpoint) = checkpoint { git_store - .update(cx, |git, cx| git.restore_checkpoint(checkpoint, cx))? + .update(cx, |git, cx| git.restore_checkpoint(checkpoint, cx)) .await?; } @@ -2001,7 +1999,7 @@ impl AcpThread { let telemetry = ActionLogTelemetry::from(&*self); cx.spawn(async move |this, cx| { - cx.update(|cx| truncate.run(id.clone(), cx))?.await?; + cx.update(|cx| truncate.run(id.clone(), cx)).await?; this.update(cx, |this, cx| { if let Some((ix, _)) = this.user_message_mut(&id) { // Collect all terminals from entries that will be removed @@ -2060,7 +2058,7 @@ impl AcpThread { let equal = git_store .update(cx, |git, cx| { git.compare_checkpoints(old_checkpoint.clone(), new_checkpoint, cx) - })? + }) .await .unwrap_or(true); @@ -2119,17 +2117,14 @@ impl AcpThread { let project = self.project.clone(); let action_log = self.action_log.clone(); cx.spawn(async move |this, cx| { - let load = project - .update(cx, |project, cx| { - let path = project - .project_path_for_absolute_path(&path, cx) - .ok_or_else(|| { - acp::Error::resource_not_found(Some(path.display().to_string())) - })?; - Ok(project.open_buffer(path, cx)) - }) - .map_err(|e| acp::Error::internal_error().data(e.to_string())) - .flatten()?; + let load = project.update(cx, |project, cx| { + let path = project + .project_path_for_absolute_path(&path, cx) + .ok_or_else(|| { + acp::Error::resource_not_found(Some(path.display().to_string())) + })?; + Ok::<_, acp::Error>(project.open_buffer(path, cx)) + })?; let buffer = load.await?; @@ -2148,9 +2143,9 @@ impl AcpThread { } else { action_log.update(cx, |action_log, cx| { action_log.buffer_read(buffer.clone(), cx); - })?; + }); - let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot())?; + let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot()); this.update(cx, |this, _| { this.shared_buffers.insert(buffer.clone(), snapshot.clone()); })?; @@ -2179,7 +2174,7 @@ impl AcpThread { }), cx, ); - })?; + }); Ok(snapshot.text_for_range(start..end).collect::()) }) @@ -2200,7 +2195,7 @@ impl AcpThread { .context("invalid path")?; anyhow::Ok(project.open_buffer(path, cx)) }); - let buffer = load??.await?; + let buffer = load?.await?; let snapshot = this.update(cx, |this, cx| { this.shared_buffers .get(&buffer) @@ -2235,7 +2230,7 @@ impl AcpThread { }), cx, ); - })?; + }); let format_on_save = cx.update(|cx| { action_log.update(cx, |action_log, cx| { @@ -2257,7 +2252,7 @@ impl AcpThread { action_log.buffer_edited(buffer.clone(), cx); }); format_on_save - })?; + }); if format_on_save { let format_task = project.update(cx, |project, cx| { @@ -2268,16 +2263,16 @@ impl AcpThread { FormatTrigger::Save, cx, ) - })?; + }); format_task.await.log_err(); action_log.update(cx, |action_log, cx| { action_log.buffer_edited(buffer.clone(), cx); - })?; + }); } project - .update(cx, |project, cx| project.save_buffer(buffer, cx))? + .update(cx, |project, cx| project.save_buffer(buffer, cx)) .await }) } @@ -2323,7 +2318,7 @@ impl AcpThread { project .remote_client() .and_then(|r| r.read(cx).default_system_shell()) - })? + }) .unwrap_or_else(|| get_default_system_shell_preferring_bash()); let (task_command, task_args) = ShellBuilder::new(&Shell::Program(shell), is_windows) @@ -2341,10 +2336,10 @@ impl AcpThread { }, cx, ) - })? + }) .await?; - cx.new(|cx| { + anyhow::Ok(cx.new(|cx| { Terminal::new( terminal_id, &format!("{} {}", command, args.join(" ")), @@ -2354,7 +2349,7 @@ impl AcpThread { language_registry, cx, ) - }) + })) } }); diff --git a/crates/acp_thread/src/diff.rs b/crates/acp_thread/src/diff.rs index f65893a87d963b2b87258b278ee4f63054e5f0a1..35b16c4e4b2660fb7d9200a6960bb6dd3dae5c7a 100644 --- a/crates/acp_thread/src/diff.rs +++ b/crates/acp_thread/src/diff.rs @@ -35,7 +35,7 @@ impl Diff { .await .log_err(); - buffer.update(cx, |buffer, cx| buffer.set_language(language.clone(), cx))?; + buffer.update(cx, |buffer, cx| buffer.set_language(language.clone(), cx)); let diff = build_buffer_diff( old_text.unwrap_or("".into()).into(), @@ -45,31 +45,29 @@ impl Diff { ) .await?; - multibuffer - .update(cx, |multibuffer, cx| { - let hunk_ranges = { - let buffer = buffer.read(cx); - diff.read(cx) - .snapshot(cx) - .hunks_intersecting_range( - Anchor::min_for_buffer(buffer.remote_id()) - ..Anchor::max_for_buffer(buffer.remote_id()), - buffer, - ) - .map(|diff_hunk| diff_hunk.buffer_range.to_point(buffer)) - .collect::>() - }; - - multibuffer.set_excerpts_for_path( - PathKey::for_buffer(&buffer, cx), - buffer.clone(), - hunk_ranges, - multibuffer_context_lines(cx), - cx, - ); - multibuffer.add_diff(diff, cx); - }) - .log_err(); + multibuffer.update(cx, |multibuffer, cx| { + let hunk_ranges = { + let buffer = buffer.read(cx); + diff.read(cx) + .snapshot(cx) + .hunks_intersecting_range( + Anchor::min_for_buffer(buffer.remote_id()) + ..Anchor::max_for_buffer(buffer.remote_id()), + buffer, + ) + .map(|diff_hunk| diff_hunk.buffer_range.to_point(buffer)) + .collect::>() + }; + + multibuffer.set_excerpts_for_path( + PathKey::for_buffer(&buffer, cx), + buffer.clone(), + hunk_ranges, + multibuffer_context_lines(cx), + cx, + ); + multibuffer.add_diff(diff, cx); + }); anyhow::Ok(()) } @@ -206,8 +204,8 @@ impl PendingDiff { let buffer_diff = self.diff.clone(); let base_text = self.base_text.clone(); self.update_diff = cx.spawn(async move |diff, cx| { - let text_snapshot = buffer.read_with(cx, |buffer, _| buffer.text_snapshot())?; - let language = buffer.read_with(cx, |buffer, _| buffer.language().cloned())?; + let text_snapshot = buffer.read_with(cx, |buffer, _| buffer.text_snapshot()); + let language = buffer.read_with(cx, |buffer, _| buffer.language().cloned()); let update = buffer_diff .update(cx, |diff, cx| { diff.update_diff( @@ -217,7 +215,7 @@ impl PendingDiff { language, cx, ) - })? + }) .await; let (task1, task2) = buffer_diff.update(cx, |diff, cx| { let task1 = diff.set_snapshot(update.clone(), &text_snapshot, cx); @@ -226,7 +224,7 @@ impl PendingDiff { .unwrap() .update(cx, |diff, cx| diff.set_snapshot(update, &text_snapshot, cx)); (task1, task2) - })?; + }); task1.await; task2.await; diff.update(cx, |diff, cx| { @@ -374,36 +372,37 @@ async fn build_buffer_diff( language_registry: Option>, cx: &mut AsyncApp, ) -> Result> { - let language = cx.update(|cx| buffer.read(cx).language().cloned())?; - let buffer = cx.update(|cx| buffer.read(cx).snapshot())?; + let language = cx.update(|cx| buffer.read(cx).language().cloned()); + let text_snapshot = cx.update(|cx| buffer.read(cx).text_snapshot()); + let buffer = cx.update(|cx| buffer.read(cx).snapshot()); - let secondary_diff = cx.new(|cx| BufferDiff::new(&buffer, cx))?; + let secondary_diff = cx.new(|cx| BufferDiff::new(&buffer, cx)); let update = secondary_diff .update(cx, |secondary_diff, cx| { secondary_diff.update_diff( - buffer.text.clone(), + text_snapshot.clone(), Some(old_text), true, language.clone(), cx, ) - })? + }) .await; secondary_diff .update(cx, |secondary_diff, cx| { secondary_diff.language_changed(language.clone(), language_registry.clone(), cx); secondary_diff.set_snapshot(update.clone(), &buffer, cx) - })? + }) .await; - let diff = cx.new(|cx| BufferDiff::new(&buffer, cx))?; + let diff = cx.new(|cx| BufferDiff::new(&buffer, cx)); diff.update(cx, |diff, cx| { diff.language_changed(language, language_registry, cx); diff.set_secondary_diff(secondary_diff); diff.set_snapshot(update.clone(), &buffer, cx) - })? + }) .await; Ok(diff) } diff --git a/crates/acp_thread/src/terminal.rs b/crates/acp_thread/src/terminal.rs index 76582aa2ae053eee521a410e28ecbcfc174b0b4d..61da3c27b551cbdb156a86f120a8a7af4799af96 100644 --- a/crates/acp_thread/src/terminal.rs +++ b/crates/acp_thread/src/terminal.rs @@ -205,7 +205,7 @@ pub async fn create_terminal_entity( project.environment().update(cx, |env, cx| { env.directory_environment(dir.clone().into(), cx) }) - })? + }) .await .unwrap_or_default() } else { @@ -225,11 +225,9 @@ pub async fn create_terminal_entity( .remote_client() .and_then(|r| r.read(cx).default_system_shell()) .map(Shell::Program) - })? + }) .unwrap_or_else(|| Shell::Program(get_default_system_shell_preferring_bash())); - let is_windows = project - .read_with(cx, |project, cx| project.path_style(cx).is_windows()) - .unwrap_or(cfg!(windows)); + let is_windows = project.read_with(cx, |project, cx| project.path_style(cx).is_windows()); let (task_command, task_args) = task::ShellBuilder::new(&shell, is_windows) .redirect_stdin_to_dev_null() .build(Some(command.clone()), &args); @@ -246,6 +244,6 @@ pub async fn create_terminal_entity( }, cx, ) - })? + }) .await } diff --git a/crates/action_log/src/action_log.rs b/crates/action_log/src/action_log.rs index 801765bb88646ca0083619557b02e5e1081f93e6..404fb3616dd4139d878e17bd9bbf847471902098 100644 --- a/crates/action_log/src/action_log.rs +++ b/crates/action_log/src/action_log.rs @@ -198,7 +198,7 @@ impl ActionLog { .ok(); let buffer_repo = git_store.read_with(cx, |git_store, cx| { git_store.repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx) - })?; + }); let (mut git_diff_updates_tx, mut git_diff_updates_rx) = watch::channel(()); let _repo_subscription = @@ -214,7 +214,7 @@ impl ActionLog { } } })) - })? + }) } else { None }; @@ -394,54 +394,51 @@ impl ActionLog { buffer.read(cx).language().cloned(), )) })??; - let update = diff.update(cx, |diff, cx| { - diff.update_diff( - buffer_snapshot.clone(), - Some(new_base_text), - true, - language, - cx, - ) - }); - let mut unreviewed_edits = Patch::default(); - if let Ok(update) = update { - let update = update.await; - - diff.update(cx, |diff, cx| { - diff.set_snapshot(update.clone(), &buffer_snapshot, cx) - })? + let update = diff + .update(cx, |diff, cx| { + diff.update_diff( + buffer_snapshot.clone(), + Some(new_base_text), + true, + language, + cx, + ) + }) .await; - let diff_snapshot = diff.update(cx, |diff, cx| diff.snapshot(cx))?; - - unreviewed_edits = cx - .background_spawn({ - let buffer_snapshot = buffer_snapshot.clone(); - let new_diff_base = new_diff_base.clone(); - async move { - let mut unreviewed_edits = Patch::default(); - for hunk in diff_snapshot.hunks_intersecting_range( - Anchor::min_for_buffer(buffer_snapshot.remote_id()) - ..Anchor::max_for_buffer(buffer_snapshot.remote_id()), - &buffer_snapshot, - ) { - let old_range = new_diff_base - .offset_to_point(hunk.diff_base_byte_range.start) - ..new_diff_base.offset_to_point(hunk.diff_base_byte_range.end); - let new_range = hunk.range.start..hunk.range.end; - unreviewed_edits.push(point_to_row_edit( - Edit { - old: old_range, - new: new_range, - }, - &new_diff_base, - buffer_snapshot.as_rope(), - )); - } - unreviewed_edits + diff.update(cx, |diff, cx| { + diff.set_snapshot(update.clone(), &buffer_snapshot, cx) + }) + .await; + let diff_snapshot = diff.update(cx, |diff, cx| diff.snapshot(cx)); + + let unreviewed_edits = cx + .background_spawn({ + let buffer_snapshot = buffer_snapshot.clone(); + let new_diff_base = new_diff_base.clone(); + async move { + let mut unreviewed_edits = Patch::default(); + for hunk in diff_snapshot.hunks_intersecting_range( + Anchor::min_for_buffer(buffer_snapshot.remote_id()) + ..Anchor::max_for_buffer(buffer_snapshot.remote_id()), + &buffer_snapshot, + ) { + let old_range = new_diff_base + .offset_to_point(hunk.diff_base_byte_range.start) + ..new_diff_base.offset_to_point(hunk.diff_base_byte_range.end); + let new_range = hunk.range.start..hunk.range.end; + unreviewed_edits.push(point_to_row_edit( + Edit { + old: old_range, + new: new_range, + }, + &new_diff_base, + buffer_snapshot.as_rope(), + )); } - }) - .await; - } + unreviewed_edits + } + }) + .await; this.update(cx, |this, cx| { let tracked_buffer = this .tracked_buffers diff --git a/crates/activity_indicator/src/activity_indicator.rs b/crates/activity_indicator/src/activity_indicator.rs index b537fabc9b7102f0d9cfab42370a21983a941f19..dbc7f4b19b40d83ab6630b8ba0cb01539eaea40d 100644 --- a/crates/activity_indicator/src/activity_indicator.rs +++ b/crates/activity_indicator/src/activity_indicator.rs @@ -246,7 +246,7 @@ impl ActivityIndicator { cx, ); buffer.set_capability(language::Capability::ReadOnly, cx); - })?; + }); workspace.update_in(cx, |workspace, window, cx| { workspace.add_item_to_active_pane( Box::new(cx.new(|cx| { diff --git a/crates/agent/src/agent.rs b/crates/agent/src/agent.rs index cbab898871b3e715cdfc34366cdf9c6470814e1d..90aecf47fad2d8c3562eff63e6dbf852c306a61c 100644 --- a/crates/agent/src/agent.rs +++ b/crates/agent/src/agent.rs @@ -250,10 +250,10 @@ impl NativeAgent { log::debug!("Creating new NativeAgent"); let project_context = cx - .update(|cx| Self::build_project_context(&project, prompt_store.as_ref(), cx))? + .update(|cx| Self::build_project_context(&project, prompt_store.as_ref(), cx)) .await; - cx.new(|cx| { + Ok(cx.new(|cx| { let context_server_store = project.read(cx).context_server_store(); let context_server_registry = cx.new(|cx| ContextServerRegistry::new(context_server_store.clone(), cx)); @@ -295,7 +295,7 @@ impl NativeAgent { fs, _subscriptions: subscriptions, } - }) + })) } fn register_session( @@ -512,10 +512,12 @@ impl NativeAgent { let buffer_task = project.update(cx, |project, cx| project.open_buffer(project_path, cx)); let rope_task = cx.spawn(async move |cx| { - buffer_task.await?.read_with(cx, |buffer, cx| { + let buffer = buffer_task.await?; + let (project_entry_id, rope) = buffer.read_with(cx, |buffer, cx| { let project_entry_id = buffer.entry_id(cx).context("buffer has no file")?; anyhow::Ok((project_entry_id, buffer.as_rope().clone())) - })? + })?; + anyhow::Ok((project_entry_id, rope)) }); // Build a string from the rope on a background thread. cx.background_spawn(async move { @@ -761,10 +763,10 @@ impl NativeAgent { let thread = task.await?; let acp_thread = this.update(cx, |this, cx| this.register_session(thread.clone(), cx))?; - let events = thread.update(cx, |thread, cx| thread.replay(cx))?; + let events = thread.update(cx, |thread, cx| thread.replay(cx)); cx.update(|cx| { NativeAgentConnection::handle_thread_events(events, acp_thread.downgrade(), cx) - })? + }) .await?; Ok(acp_thread) }) @@ -811,7 +813,7 @@ impl NativeAgent { }; let db_thread = db_thread.await; database.save_thread(id, db_thread).await.log_err(); - history.update(cx, |history, cx| history.reload(cx)).ok(); + history.update(cx, |history, cx| history.reload(cx)); }); } @@ -849,7 +851,7 @@ impl NativeAgent { path_style, cx, ); - })?; + }); for message in prompt.messages { let context_server::types::PromptMessage { role, content } = message; @@ -866,13 +868,11 @@ impl NativeAgent { true, cx, ); - anyhow::Ok(()) - })??; + })?; thread.update(cx, |thread, cx| { thread.push_acp_user_block(id, [block], path_style, cx); - anyhow::Ok(()) - })??; + }); } context_server::types::Role::Assistant => { acp_thread.update(cx, |acp_thread, cx| { @@ -882,13 +882,11 @@ impl NativeAgent { true, cx, ); - anyhow::Ok(()) - })??; + })?; thread.update(cx, |thread, cx| { thread.push_acp_agent_block(block, cx); - anyhow::Ok(()) - })??; + }); } } @@ -902,11 +900,11 @@ impl NativeAgent { // Resume if MCP prompt did not end with a user message thread.resume(cx) } - })??; + })?; cx.update(|cx| { NativeAgentConnection::handle_thread_events(response_stream, acp_thread, cx) - })? + }) .await }) } @@ -1187,33 +1185,30 @@ impl acp_thread::AgentConnection for NativeAgentConnection { log::debug!("Starting thread creation in async context"); // Create Thread - let thread = agent.update( - cx, - |agent, cx: &mut gpui::Context| -> Result<_> { - // Fetch default model from registry settings - let registry = LanguageModelRegistry::read_global(cx); - // Log available models for debugging - let available_count = registry.available_models(cx).count(); - log::debug!("Total available models: {}", available_count); - - let default_model = registry.default_model().and_then(|default_model| { - agent - .models - .model_from_id(&LanguageModels::model_id(&default_model.model)) - }); - Ok(cx.new(|cx| { - Thread::new( - project.clone(), - agent.project_context.clone(), - agent.context_server_registry.clone(), - agent.templates.clone(), - default_model, - cx, - ) - })) - }, - )??; - agent.update(cx, |agent, cx| agent.register_session(thread, cx)) + let thread = agent.update(cx, |agent, cx| { + // Fetch default model from registry settings + let registry = LanguageModelRegistry::read_global(cx); + // Log available models for debugging + let available_count = registry.available_models(cx).count(); + log::debug!("Total available models: {}", available_count); + + let default_model = registry.default_model().and_then(|default_model| { + agent + .models + .model_from_id(&LanguageModels::model_id(&default_model.model)) + }); + cx.new(|cx| { + Thread::new( + project.clone(), + agent.project_context.clone(), + agent.context_server_registry.clone(), + agent.templates.clone(), + default_model, + cx, + ) + }) + }); + Ok(agent.update(cx, |agent, cx| agent.register_session(thread, cx))) }) } @@ -1446,7 +1441,7 @@ impl ThreadEnvironment for AcpThreadEnvironment { let terminal = task?.await?; let (drop_tx, drop_rx) = oneshot::channel(); - let terminal_id = terminal.read_with(cx, |terminal, _cx| terminal.id().clone())?; + let terminal_id = terminal.read_with(cx, |terminal, _cx| terminal.id().clone()); cx.spawn(async move |cx| { drop_rx.await.ok(); @@ -1471,17 +1466,19 @@ pub struct AcpTerminalHandle { impl TerminalHandle for AcpTerminalHandle { fn id(&self, cx: &AsyncApp) -> Result { - self.terminal.read_with(cx, |term, _cx| term.id().clone()) + Ok(self.terminal.read_with(cx, |term, _cx| term.id().clone())) } fn wait_for_exit(&self, cx: &AsyncApp) -> Result>> { - self.terminal - .read_with(cx, |term, _cx| term.wait_for_exit()) + Ok(self + .terminal + .read_with(cx, |term, _cx| term.wait_for_exit())) } fn current_output(&self, cx: &AsyncApp) -> Result { - self.terminal - .read_with(cx, |term, cx| term.current_output(cx)) + Ok(self + .terminal + .read_with(cx, |term, cx| term.current_output(cx))) } fn kill(&self, cx: &AsyncApp) -> Result<()> { @@ -1489,13 +1486,14 @@ impl TerminalHandle for AcpTerminalHandle { self.terminal.update(cx, |terminal, cx| { terminal.kill(cx); }); - })?; + }); Ok(()) } fn was_stopped_by_user(&self, cx: &AsyncApp) -> Result { - self.terminal - .read_with(cx, |term, _cx| term.was_stopped_by_user()) + Ok(self + .terminal + .read_with(cx, |term, _cx| term.was_stopped_by_user())) } } diff --git a/crates/agent/src/edit_agent.rs b/crates/agent/src/edit_agent.rs index 5ea04729a49afae944c5e7ca88ad67791e18b6f3..bc027bd55a19b1afd78f44963e1e3c9e7f6e5d04 100644 --- a/crates/agent/src/edit_agent.rs +++ b/crates/agent/src/edit_agent.rs @@ -114,8 +114,8 @@ impl EditAgent { let (events_tx, events_rx) = mpsc::unbounded(); let conversation = conversation.clone(); let output = cx.spawn(async move |cx| { - let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot())?; - let path = cx.update(|cx| snapshot.resolve_file_path(true, cx))?; + let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot()); + let path = cx.update(|cx| snapshot.resolve_file_path(true, cx)); let prompt = CreateFilePromptTemplate { path, edit_description, @@ -148,7 +148,7 @@ impl EditAgent { let this = self.clone(); let task = cx.spawn(async move |cx| { this.action_log - .update(cx, |log, cx| log.buffer_created(buffer.clone(), cx))?; + .update(cx, |log, cx| log.buffer_created(buffer.clone(), cx)); this.overwrite_with_chunks_internal(buffer, parse_rx, output_events_tx, cx) .await?; parse_task.await @@ -182,7 +182,7 @@ impl EditAgent { Anchor::min_max_range_for_buffer(buffer.read(cx).remote_id()), )) .ok(); - })?; + }); while let Some(event) = parse_rx.next().await { match event? { @@ -203,7 +203,7 @@ impl EditAgent { ) }); buffer.read(cx).remote_id() - })?; + }); output_events_tx .unbounded_send(EditAgentOutputEvent::Edited( Anchor::min_max_range_for_buffer(buffer_id), @@ -231,8 +231,8 @@ impl EditAgent { let conversation = conversation.clone(); let edit_format = self.edit_format; let output = cx.spawn(async move |cx| { - let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot())?; - let path = cx.update(|cx| snapshot.resolve_file_path(true, cx))?; + let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot()); + let path = cx.update(|cx| snapshot.resolve_file_path(true, cx)); let prompt = match edit_format { EditFormat::XmlTags => EditFileXmlPromptTemplate { path, @@ -263,7 +263,7 @@ impl EditAgent { cx: &mut AsyncApp, ) -> Result { self.action_log - .update(cx, |log, cx| log.buffer_read(buffer.clone(), cx))?; + .update(cx, |log, cx| log.buffer_read(buffer.clone(), cx)); let (output, edit_events) = Self::parse_edit_chunks(edit_chunks, self.edit_format, cx); let mut edit_events = edit_events.peekable(); @@ -274,7 +274,7 @@ impl EditAgent { continue; }; - let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot())?; + let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot()); // Resolve the old text in the background, updating the agent // location as we keep refining which range it corresponds to. @@ -292,7 +292,7 @@ impl EditAgent { }), cx, ); - })?; + }); output_events .unbounded_send(EditAgentOutputEvent::ResolvingEditRange(old_range)) .ok(); @@ -375,7 +375,7 @@ impl EditAgent { ); }); (min_edit_start, max_edit_end) - })?; + }); output_events .unbounded_send(EditAgentOutputEvent::Edited(min_edit_start..max_edit_end)) .ok(); diff --git a/crates/agent/src/edit_agent/evals.rs b/crates/agent/src/edit_agent/evals.rs index 01c81e0103a2d3624c7e8eb9b9c587726fcc4876..0726a9b3e92b2167548c1dac61036158cfff0f12 100644 --- a/crates/agent/src/edit_agent/evals.rs +++ b/crates/agent/src/edit_agent/evals.rs @@ -1473,9 +1473,9 @@ impl EditAgentTest { .provider(&selected_model.provider) .expect("Provider not found"); provider.authenticate(cx) - })? + }) .await?; - cx.update(|cx| { + Ok(cx.update(|cx| { let models = LanguageModelRegistry::read_global(cx); let model = models .available_models(cx) @@ -1485,7 +1485,7 @@ impl EditAgentTest { }) .unwrap_or_else(|| panic!("Model {} not found", selected_model.model.0)); model - }) + })) } async fn eval(&self, mut eval: EvalInput, cx: &mut TestAppContext) -> Result { diff --git a/crates/agent/src/history_store.rs b/crates/agent/src/history_store.rs index e08ba5a3b762c81a4a9be1fccd6207938d0ab45c..1ddeb7bda32d9fe636962b2281c41e83393a4322 100644 --- a/crates/agent/src/history_store.rs +++ b/crates/agent/src/history_store.rs @@ -43,7 +43,7 @@ pub fn load_agent_thread( cx.spawn(async move |cx| { let (agent, _) = connection.await?; let agent = agent.downcast::().unwrap(); - cx.update(|cx| agent.load_thread(session_id, cx))?.await + cx.update(|cx| agent.load_thread(session_id, cx)).await }) } diff --git a/crates/agent/src/outline.rs b/crates/agent/src/outline.rs index 77af4849ffd19c1630331f5c755ff372cb69aeba..b8b5068e06d8cf23808e24e8fddb93103420cd74 100644 --- a/crates/agent/src/outline.rs +++ b/crates/agent/src/outline.rs @@ -25,13 +25,13 @@ pub async fn get_buffer_content_or_outline( path: Option<&str>, cx: &AsyncApp, ) -> Result { - let file_size = buffer.read_with(cx, |buffer, _| buffer.text().len())?; + let file_size = buffer.read_with(cx, |buffer, _| buffer.text().len()); if file_size > AUTO_OUTLINE_SIZE { // For large files, use outline instead of full content // Wait until the buffer has been fully parsed, so we can read its outline buffer - .read_with(cx, |buffer, _| buffer.parsing_idle())? + .read_with(cx, |buffer, _| buffer.parsing_idle()) .await; let outline_items = buffer.read_with(cx, |buffer, _| { @@ -42,7 +42,7 @@ pub async fn get_buffer_content_or_outline( .into_iter() .map(|item| item.to_point(&snapshot)) .collect::>() - })?; + }); // If no outline exists, fall back to first 1KB so the agent has some context if outline_items.is_empty() { @@ -55,7 +55,7 @@ pub async fn get_buffer_content_or_outline( } else { format!("# First 1KB of file (file too large to show full content, and no outline available)\n\n{content}") } - })?; + }); return Ok(BufferContent { text, @@ -76,7 +76,7 @@ pub async fn get_buffer_content_or_outline( }) } else { // File is small enough, return full content - let text = buffer.read_with(cx, |buffer, _| buffer.text())?; + let text = buffer.read_with(cx, |buffer, _| buffer.text()); Ok(BufferContent { text, is_outline: false, diff --git a/crates/agent/src/thread.rs b/crates/agent/src/thread.rs index 9c70cad9665652cfc2f45c2d79da4a2c65834175..d54130240c89f76a44ed71e7e4ebc5c65ac4aa2d 100644 --- a/crates/agent/src/thread.rs +++ b/crates/agent/src/thread.rs @@ -2413,7 +2413,7 @@ where cx.spawn(async move |cx| { let input = serde_json::from_value(input)?; let output = cx - .update(|cx| self.0.clone().run(input, event_stream, cx))? + .update(|cx| self.0.clone().run(input, event_stream, cx)) .await?; let raw_output = serde_json::to_value(&output)?; Ok(AgentToolOutput { @@ -2650,7 +2650,7 @@ impl ToolCallEventStream { .get_or_insert_default() .set_always_allow_tool_actions(true); }); - })?; + }); } Ok(()) diff --git a/crates/agent/src/tools/context_server_registry.rs b/crates/agent/src/tools/context_server_registry.rs index 3b01b2feb7dd36615a8ba7c63d81a81694e0d268..30f2be95ef66e27a6802205a1bee5707dcee443d 100644 --- a/crates/agent/src/tools/context_server_registry.rs +++ b/crates/agent/src/tools/context_server_registry.rs @@ -403,10 +403,7 @@ pub fn get_prompt( arguments: HashMap, cx: &mut AsyncApp, ) -> Task> { - let server = match cx.update(|cx| server_store.read(cx).get_running_server(server_id)) { - Ok(server) => server, - Err(error) => return Task::ready(Err(error)), - }; + let server = cx.update(|cx| server_store.read(cx).get_running_server(server_id)); let Some(server) = server else { return Task::ready(Err(anyhow::anyhow!("Context server not found"))); }; diff --git a/crates/agent/src/tools/delete_path_tool.rs b/crates/agent/src/tools/delete_path_tool.rs index 01a77f5d811127b3df470ec73fbc91ff7c26fd52..2b7482000856bf929b1f20ced02e39b2e55ec04c 100644 --- a/crates/agent/src/tools/delete_path_tool.rs +++ b/crates/agent/src/tools/delete_path_tool.rs @@ -115,19 +115,19 @@ impl AgentTool for DeletePathTool { cx.spawn(async move |cx| { while let Some(path) = paths_rx.next().await { if let Ok(buffer) = project - .update(cx, |project, cx| project.open_buffer(path, cx))? + .update(cx, |project, cx| project.open_buffer(path, cx)) .await { action_log.update(cx, |action_log, cx| { action_log.will_delete_buffer(buffer.clone(), cx) - })?; + }); } } let deletion_task = project .update(cx, |project, cx| { project.delete_file(project_path, false, cx) - })? + }) .with_context(|| { format!("Couldn't delete {path} because that path isn't in this project.") })?; diff --git a/crates/agent/src/tools/diagnostics_tool.rs b/crates/agent/src/tools/diagnostics_tool.rs index f07ec4cfe6903ec454eb39a7afc7748327e026ec..ea98f1830d07874d847d93fa532b0b6b3806a1d9 100644 --- a/crates/agent/src/tools/diagnostics_tool.rs +++ b/crates/agent/src/tools/diagnostics_tool.rs @@ -105,7 +105,7 @@ impl AgentTool for DiagnosticsTool { cx.spawn(async move |cx| { let mut output = String::new(); let buffer = buffer.await?; - let snapshot = buffer.read_with(cx, |buffer, _cx| buffer.snapshot())?; + let snapshot = buffer.read_with(cx, |buffer, _cx| buffer.snapshot()); for (_, group) in snapshot.diagnostic_groups(None) { let entry = &group.entries[group.primary_ix]; diff --git a/crates/agent/src/tools/edit_file_tool.rs b/crates/agent/src/tools/edit_file_tool.rs index 3acb7f5951f3ca4b682dcabc62a0d54c35ab08d6..72bb051e867d783ff8188210bb4ed5539be2bb8f 100644 --- a/crates/agent/src/tools/edit_file_tool.rs +++ b/crates/agent/src/tools/edit_file_tool.rs @@ -301,7 +301,7 @@ impl AgentTool for EditFileTool { let buffer = project .update(cx, |project, cx| { project.open_buffer(project_path.clone(), cx) - })? + }) .await?; // Check if the file has been modified since the agent last read it @@ -357,7 +357,7 @@ impl AgentTool for EditFileTool { } } - let diff = cx.new(|cx| Diff::new(buffer.clone(), cx))?; + let diff = cx.new(|cx| Diff::new(buffer.clone(), cx)); event_stream.update_diff(diff.clone()); let _finalize_diff = util::defer({ let diff = diff.downgrade(); @@ -367,7 +367,7 @@ impl AgentTool for EditFileTool { } }); - let old_snapshot = buffer.read_with(cx, |buffer, _cx| buffer.snapshot())?; + let old_snapshot = buffer.read_with(cx, |buffer, _cx| buffer.snapshot()); let old_text = cx .background_spawn({ let old_snapshot = old_snapshot.clone(); @@ -399,9 +399,9 @@ impl AgentTool for EditFileTool { match event { EditAgentOutputEvent::Edited(range) => { if !emitted_location { - let line = buffer.update(cx, |buffer, _cx| { + let line = Some(buffer.update(cx, |buffer, _cx| { range.start.to_point(&buffer.snapshot()).row - }).ok(); + })); if let Some(abs_path) = abs_path.clone() { event_stream.update_fields(ToolCallUpdateFields::new().locations(vec![ToolCallLocation::new(abs_path).line(line)])); } @@ -411,7 +411,7 @@ impl AgentTool for EditFileTool { EditAgentOutputEvent::UnresolvedEditRange => hallucinated_old_text = true, EditAgentOutputEvent::AmbiguousEditRange(ranges) => ambiguous_ranges = ranges, EditAgentOutputEvent::ResolvingEditRange(range) => { - diff.update(cx, |card, cx| card.reveal_range(range.clone(), cx))?; + diff.update(cx, |card, cx| card.reveal_range(range.clone(), cx)); // if !emitted_location { // let line = buffer.update(cx, |buffer, _cx| { // range.start.to_point(&buffer.snapshot()).row @@ -428,23 +428,21 @@ impl AgentTool for EditFileTool { } // If format_on_save is enabled, format the buffer - let format_on_save_enabled = buffer - .read_with(cx, |buffer, cx| { - let settings = language_settings::language_settings( - buffer.language().map(|l| l.name()), - buffer.file(), - cx, - ); - settings.format_on_save != FormatOnSave::Off - }) - .unwrap_or(false); + let format_on_save_enabled = buffer.read_with(cx, |buffer, cx| { + let settings = language_settings::language_settings( + buffer.language().map(|l| l.name()), + buffer.file(), + cx, + ); + settings.format_on_save != FormatOnSave::Off + }); let edit_agent_output = output.await?; if format_on_save_enabled { action_log.update(cx, |log, cx| { log.buffer_edited(buffer.clone(), cx); - })?; + }); let format_task = project.update(cx, |project, cx| { project.format( @@ -454,30 +452,30 @@ impl AgentTool for EditFileTool { FormatTrigger::Save, cx, ) - })?; + }); format_task.await.log_err(); } project - .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))? + .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx)) .await?; action_log.update(cx, |log, cx| { log.buffer_edited(buffer.clone(), cx); - })?; + }); // Update the recorded read time after a successful edit so consecutive edits work if let Some(abs_path) = abs_path.as_ref() { if let Some(new_mtime) = buffer.read_with(cx, |buffer, _| { buffer.file().and_then(|file| file.disk_state().mtime()) - })? { + }) { self.thread.update(cx, |thread, _| { thread.file_read_times.insert(abs_path.to_path_buf(), new_mtime); })?; } } - let new_snapshot = buffer.read_with(cx, |buffer, _cx| buffer.snapshot())?; + let new_snapshot = buffer.read_with(cx, |buffer, _cx| buffer.snapshot()); let (new_text, unified_diff) = cx .background_spawn({ let new_snapshot = new_snapshot.clone(); diff --git a/crates/agent/src/tools/grep_tool.rs b/crates/agent/src/tools/grep_tool.rs index 8b83382f7ef517fcccf06ffa2e990c9ec53d539c..20c94ba6902af2a01d3a80062eb9fa803d8c25b0 100644 --- a/crates/agent/src/tools/grep_tool.rs +++ b/crates/agent/src/tools/grep_tool.rs @@ -191,7 +191,7 @@ impl AgentTool for GrepTool { continue; } - let Ok((Some(path), mut parse_status)) = buffer.read_with(cx, |buffer, cx| { + let (Some(path), mut parse_status) = buffer.read_with(cx, |buffer, cx| { (buffer.file().map(|file| file.full_path(cx)), buffer.parse_status()) }) else { continue; @@ -200,20 +200,21 @@ impl AgentTool for GrepTool { // Check if this file should be excluded based on its worktree settings if let Ok(Some(project_path)) = project.read_with(cx, |project, cx| { project.find_project_path(&path, cx) - }) - && cx.update(|cx| { + }) { + if cx.update(|cx| { let worktree_settings = WorktreeSettings::get(Some((&project_path).into()), cx); worktree_settings.is_path_excluded(&project_path.path) || worktree_settings.is_path_private(&project_path.path) - }).unwrap_or(false) { + }) { continue; } + } while *parse_status.borrow() != ParseStatus::Idle { parse_status.changed().await?; } - let snapshot = buffer.read_with(cx, |buffer, _cx| buffer.snapshot())?; + let snapshot = buffer.read_with(cx, |buffer, _cx| buffer.snapshot()); let mut ranges = ranges .into_iter() diff --git a/crates/agent/src/tools/read_file_tool.rs b/crates/agent/src/tools/read_file_tool.rs index b4b0f3274e0626beea58eadde3158673dc0e2e20..2fa6efa9cdffa229fd1c2c447345220e460d286f 100644 --- a/crates/agent/src/tools/read_file_tool.rs +++ b/crates/agent/src/tools/read_file_tool.rs @@ -167,14 +167,14 @@ impl AgentTool for ReadFileTool { self.project.update(cx, |project, cx| { project.open_image(project_path.clone(), cx) }) - })? + }) .await?; let image = - image_entity.read_with(cx, |image_item, _| Arc::clone(&image_item.image))?; + image_entity.read_with(cx, |image_item, _| Arc::clone(&image_item.image)); let language_model_image = cx - .update(|cx| LanguageModelImage::from_image(image, cx))? + .update(|cx| LanguageModelImage::from_image(image, cx)) .await .context("processing image")?; @@ -197,21 +197,21 @@ impl AgentTool for ReadFileTool { project.update(cx, |project, cx| { project.open_buffer(project_path.clone(), cx) }) - })? + }) .await?; if buffer.read_with(cx, |buffer, _| { buffer .file() .as_ref() .is_none_or(|file| !file.disk_state().exists()) - })? { + }) { anyhow::bail!("{file_path} not found"); } // Record the file read time and mtime if let Some(mtime) = buffer.read_with(cx, |buffer, _| { buffer.file().and_then(|file| file.disk_state().mtime()) - })? { + }) { self.thread .update(cx, |thread, _| { thread.file_read_times.insert(abs_path.to_path_buf(), mtime); @@ -239,11 +239,11 @@ impl AgentTool for ReadFileTool { let start = buffer.anchor_before(Point::new(start_row, 0)); let end = buffer.anchor_before(Point::new(end_row, 0)); buffer.text_for_range(start..end).collect::() - })?; + }); action_log.update(cx, |log, cx| { log.buffer_read(buffer.clone(), cx); - })?; + }); Ok(result.into()) } else { @@ -257,7 +257,7 @@ impl AgentTool for ReadFileTool { action_log.update(cx, |log, cx| { log.buffer_read(buffer.clone(), cx); - })?; + }); if buffer_content.is_outline { Ok(formatdoc! {" @@ -297,7 +297,7 @@ impl AgentTool for ReadFileTool { acp::ToolCallContent::Content(acp::Content::new(markdown)), ])); } - })?; + }); result }) diff --git a/crates/agent/src/tools/restore_file_from_disk_tool.rs b/crates/agent/src/tools/restore_file_from_disk_tool.rs index f5723f6ee3ee46144152dd3ed2939ab2cfaca9c0..eb2a027c723c80e4225380af7397e51b1af68d2b 100644 --- a/crates/agent/src/tools/restore_file_from_disk_tool.rs +++ b/crates/agent/src/tools/restore_file_from_disk_tool.rs @@ -74,49 +74,29 @@ impl AgentTool for RestoreFileFromDiskTool { let mut clean_paths: Vec = Vec::new(); let mut not_found_paths: Vec = Vec::new(); let mut open_errors: Vec<(PathBuf, String)> = Vec::new(); - let mut dirty_check_errors: Vec<(PathBuf, String)> = Vec::new(); + let dirty_check_errors: Vec<(PathBuf, String)> = Vec::new(); let mut reload_errors: Vec = Vec::new(); for path in input_paths { - let project_path = - project.read_with(cx, |project, cx| project.find_project_path(&path, cx)); - - let project_path = match project_path { - Ok(Some(project_path)) => project_path, - Ok(None) => { - not_found_paths.push(path); - continue; - } - Err(error) => { - open_errors.push((path, error.to_string())); - continue; - } + let Some(project_path) = + project.read_with(cx, |project, cx| project.find_project_path(&path, cx)) + else { + not_found_paths.push(path); + continue; }; let open_buffer_task = project.update(cx, |project, cx| project.open_buffer(project_path, cx)); - let buffer = match open_buffer_task { - Ok(task) => match task.await { - Ok(buffer) => buffer, - Err(error) => { - open_errors.push((path, error.to_string())); - continue; - } - }, + let buffer = match open_buffer_task.await { + Ok(buffer) => buffer, Err(error) => { open_errors.push((path, error.to_string())); continue; } }; - let is_dirty = match buffer.read_with(cx, |buffer, _| buffer.is_dirty()) { - Ok(is_dirty) => is_dirty, - Err(error) => { - dirty_check_errors.push((path, error.to_string())); - continue; - } - }; + let is_dirty = buffer.read_with(cx, |buffer, _| buffer.is_dirty()); if is_dirty { buffers_to_reload.insert(buffer); @@ -131,15 +111,8 @@ impl AgentTool for RestoreFileFromDiskTool { project.reload_buffers(buffers_to_reload, true, cx) }); - match reload_task { - Ok(task) => { - if let Err(error) = task.await { - reload_errors.push(error.to_string()); - } - } - Err(error) => { - reload_errors.push(error.to_string()); - } + if let Err(error) = reload_task.await { + reload_errors.push(error.to_string()); } } diff --git a/crates/agent/src/tools/save_file_tool.rs b/crates/agent/src/tools/save_file_tool.rs index 429352200109c52303c9f6f94a28a49136af1a61..dab69433bb515614bed2d3509f9d71b7c1c9ef0c 100644 --- a/crates/agent/src/tools/save_file_tool.rs +++ b/crates/agent/src/tools/save_file_tool.rs @@ -71,49 +71,29 @@ impl AgentTool for SaveFileTool { let mut clean_paths: Vec = Vec::new(); let mut not_found_paths: Vec = Vec::new(); let mut open_errors: Vec<(PathBuf, String)> = Vec::new(); - let mut dirty_check_errors: Vec<(PathBuf, String)> = Vec::new(); + let dirty_check_errors: Vec<(PathBuf, String)> = Vec::new(); let mut save_errors: Vec<(String, String)> = Vec::new(); for path in input_paths { - let project_path = - project.read_with(cx, |project, cx| project.find_project_path(&path, cx)); - - let project_path = match project_path { - Ok(Some(project_path)) => project_path, - Ok(None) => { - not_found_paths.push(path); - continue; - } - Err(error) => { - open_errors.push((path, error.to_string())); - continue; - } + let Some(project_path) = + project.read_with(cx, |project, cx| project.find_project_path(&path, cx)) + else { + not_found_paths.push(path); + continue; }; let open_buffer_task = project.update(cx, |project, cx| project.open_buffer(project_path, cx)); - let buffer = match open_buffer_task { - Ok(task) => match task.await { - Ok(buffer) => buffer, - Err(error) => { - open_errors.push((path, error.to_string())); - continue; - } - }, + let buffer = match open_buffer_task.await { + Ok(buffer) => buffer, Err(error) => { open_errors.push((path, error.to_string())); continue; } }; - let is_dirty = match buffer.read_with(cx, |buffer, _| buffer.is_dirty()) { - Ok(is_dirty) => is_dirty, - Err(error) => { - dirty_check_errors.push((path, error.to_string())); - continue; - } - }; + let is_dirty = buffer.read_with(cx, |buffer, _| buffer.is_dirty()); if is_dirty { buffers_to_save.insert(buffer); @@ -125,30 +105,19 @@ impl AgentTool for SaveFileTool { // Save each buffer individually since there's no batch save API. for buffer in buffers_to_save { - let path_for_buffer = match buffer.read_with(cx, |buffer, _| { - buffer - .file() - .map(|file| file.path().to_rel_path_buf()) - .map(|path| path.as_rel_path().as_unix_str().to_owned()) - }) { - Ok(path) => path.unwrap_or_else(|| "".to_string()), - Err(error) => { - save_errors.push(("".to_string(), error.to_string())); - continue; - } - }; + let path_for_buffer = buffer + .read_with(cx, |buffer, _| { + buffer + .file() + .map(|file| file.path().to_rel_path_buf()) + .map(|path| path.as_rel_path().as_unix_str().to_owned()) + }) + .unwrap_or_else(|| "".to_string()); let save_task = project.update(cx, |project, cx| project.save_buffer(buffer, cx)); - match save_task { - Ok(task) => { - if let Err(error) = task.await { - save_errors.push((path_for_buffer, error.to_string())); - } - } - Err(error) => { - save_errors.push((path_for_buffer, error.to_string())); - } + if let Err(error) = save_task.await { + save_errors.push((path_for_buffer, error.to_string())); } } diff --git a/crates/agent_servers/src/acp.rs b/crates/agent_servers/src/acp.rs index 06d6ef3963bfeec7fd69513328eeda1c76d43edc..0c8fd1da2e0281bf2feac054d3094eb212efde47 100644 --- a/crates/agent_servers/src/acp.rs +++ b/crates/agent_servers/src/acp.rs @@ -111,7 +111,7 @@ impl AcpConnection { is_remote: bool, cx: &mut AsyncApp, ) -> Result { - let shell = cx.update(|cx| TerminalSettings::get(None, cx).shell.clone())?; + let shell = cx.update(|cx| TerminalSettings::get(None, cx).shell.clone()); let builder = ShellBuilder::new(&shell, cfg!(windows)).non_interactive(); let mut child = builder.build_std_command(Some(command.path.display().to_string()), &command.args); @@ -133,13 +133,13 @@ impl AcpConnection { let sessions = Rc::new(RefCell::new(HashMap::default())); - let (release_channel, version) = cx.update(|cx| { + let (release_channel, version): (Option<&str>, String) = cx.update(|cx| { ( release_channel::ReleaseChannel::try_global(cx) .map(|release_channel| release_channel.display_name()), release_channel::AppVersion::global(cx).to_string(), ) - })?; + }); let client = ClientDelegate { sessions: sessions.clone(), @@ -191,7 +191,7 @@ impl AcpConnection { AcpConnectionRegistry::default_global(cx).update(cx, |registry, cx| { registry.set_active_connection(server_name.clone(), &connection, cx) }); - })?; + }); let response = connection .initialize( @@ -343,7 +343,7 @@ impl AgentConnection for AcpConnection { } })?; - let use_config_options = cx.update(|cx| cx.has_flag::())?; + let use_config_options = cx.update(|cx| cx.has_flag::()); // Config options take precedence over legacy modes/models let (modes, models, config_options) = if use_config_options && let Some(opts) = response.config_options { @@ -532,8 +532,8 @@ impl AgentConnection for AcpConnection { } let session_id = response.session_id; - let action_log = cx.new(|_| ActionLog::new(project.clone()))?; - let thread = cx.new(|cx| { + let action_log = cx.new(|_| ActionLog::new(project.clone())); + let thread: Entity = cx.new(|cx| { AcpThread::new( self.server_name.clone(), self.clone(), @@ -544,7 +544,7 @@ impl AgentConnection for AcpConnection { watch::Receiver::constant(self.agent_capabilities.prompt_capabilities.clone()), cx, ) - })?; + }); let session = AcpSession { @@ -1104,8 +1104,7 @@ impl acp::Client for ClientDelegate { cx, ) })?; - let terminal_id = - terminal_entity.read_with(&self.cx, |terminal, _| terminal.id().clone())?; + let terminal_id = terminal_entity.read_with(&self.cx, |terminal, _| terminal.id().clone()); Ok(acp::CreateTerminalResponse::new(terminal_id)) } diff --git a/crates/agent_servers/src/gemini.rs b/crates/agent_servers/src/gemini.rs index 7e3371467afb35715cad7d92d20ab6df55170028..bdd3a5a384e3002552e3cd8e2566c75c342e244a 100644 --- a/crates/agent_servers/src/gemini.rs +++ b/crates/agent_servers/src/gemini.rs @@ -47,7 +47,7 @@ impl AgentServer for Gemini { extra_env.insert("SURFACE".to_owned(), "zed".to_owned()); if let Some(api_key) = cx - .update(GoogleLanguageModelProvider::api_key_for_gemini_cli)? + .update(GoogleLanguageModelProvider::api_key_for_gemini_cli) .await .ok() { diff --git a/crates/agent_ui/src/acp/message_editor.rs b/crates/agent_ui/src/acp/message_editor.rs index 99023bb0d5d5c5dab5e781c197f3678633bb5b8e..9be8ca32e0fb186f8129ecb0364cbb3d84434bad 100644 --- a/crates/agent_ui/src/acp/message_editor.rs +++ b/crates/agent_ui/src/acp/message_editor.rs @@ -466,9 +466,9 @@ impl MessageEditor { } } }); - Ok((chunks, all_tracked_buffers)) + anyhow::Ok((chunks, all_tracked_buffers)) })?; - result + Ok(result) }) } @@ -678,28 +678,24 @@ impl MessageEditor { .update(cx, |project, cx| { project.project_path_for_absolute_path(&file_path, cx) }) - .map_err(|e| e.to_string())? .ok_or_else(|| "project path not found".to_string())?; let buffer = project .update(cx, |project, cx| project.open_buffer(project_path, cx)) - .map_err(|e| e.to_string())? .await .map_err(|e| e.to_string())?; - buffer - .update(cx, |buffer, cx| { - let start = Point::new(*line_range.start(), 0) - .min(buffer.max_point()); - let end = Point::new(*line_range.end() + 1, 0) - .min(buffer.max_point()); - let content = buffer.text_for_range(start..end).collect(); - Mention::Text { - content, - tracked_buffers: vec![cx.entity()], - } - }) - .map_err(|e| e.to_string()) + Ok(buffer.update(cx, |buffer, cx| { + let start = + Point::new(*line_range.start(), 0).min(buffer.max_point()); + let end = Point::new(*line_range.end() + 1, 0) + .min(buffer.max_point()); + let content = buffer.text_for_range(start..end).collect(); + Mention::Text { + content, + tracked_buffers: vec![cx.entity()], + } + })) } }) .shared(); diff --git a/crates/agent_ui/src/acp/thread_view.rs b/crates/agent_ui/src/acp/thread_view.rs index 94ef65503c72589ef7496c872e1310be0568ab09..067d23a6df87d551866c51d7b97f37c54825da52 100644 --- a/crates/agent_ui/src/acp/thread_view.rs +++ b/crates/agent_ui/src/acp/thread_view.rs @@ -985,7 +985,7 @@ impl AcpThreadView { ); }); } - })?; + }); anyhow::Ok(()) }) @@ -1019,7 +1019,7 @@ impl AcpThreadView { history_store .update(&mut cx.clone(), |store, cx| { store.save_thread(session_id.clone(), db_thread, cx) - })? + }) .await?; let thread_metadata = agent::DbThreadMetadata { @@ -1652,18 +1652,18 @@ impl AcpThreadView { .iter() .take(entry_ix) .any(|entry| entry.diffs().next().is_some()) - })?; + }); if has_earlier_edits { thread.update(cx, |thread, cx| { thread.action_log().update(cx, |action_log, cx| { action_log.keep_all_edits(None, cx); }); - })?; + }); } thread - .update(cx, |thread, cx| thread.rewind(user_message_id, cx))? + .update(cx, |thread, cx| thread.rewind(user_message_id, cx)) .await?; this.update_in(cx, |this, window, cx| { this.send_impl(message_editor, window, cx); @@ -2140,7 +2140,7 @@ impl AcpThreadView { }) }); - if let Ok(Some(resolve_task)) = resolved_node_runtime { + if let Some(resolve_task) = resolved_node_runtime { if let Ok(node_path) = resolve_task.await { task.command = Some(node_path.to_string_lossy().to_string()); } @@ -2159,11 +2159,11 @@ impl AcpThreadView { task.allow_concurrent_runs = true; task.hide = task::HideStrategy::Always; - let terminal = terminal_panel.update_in(cx, |terminal_panel, window, cx| { - terminal_panel.spawn_task(&task, window, cx) - })?; - - let terminal = terminal.await?; + let terminal = terminal_panel + .update_in(cx, |terminal_panel, window, cx| { + terminal_panel.spawn_task(&task, window, cx) + })? + .await?; if check_exit_code { // For extension-based auth, wait for the process to exit and check exit code @@ -2214,7 +2214,7 @@ impl AcpThreadView { } } _ = exit_status => { - if !previous_attempt && project.read_with(cx, |project, _| project.is_via_remote_server())? && login.label.contains("gemini") { + if !previous_attempt && project.read_with(cx, |project, _| project.is_via_remote_server()) && login.label.contains("gemini") { return cx.update(|window, cx| Self::spawn_external_agent_login(login, workspace, project.clone(), true, false, window, cx))?.await } return Err(anyhow!("exited before logging in")); @@ -5667,14 +5667,14 @@ impl AcpThreadView { let markdown_language = markdown_language_task.await?; let buffer = project - .update(cx, |project, cx| project.create_buffer(false, cx))? + .update(cx, |project, cx| project.create_buffer(false, cx)) .await?; buffer.update(cx, |buffer, cx| { buffer.set_text(markdown, cx); buffer.set_language(Some(markdown_language), cx); buffer.set_capability(language::Capability::ReadWrite, cx); - })?; + }); workspace.update_in(cx, |workspace, window, cx| { let buffer = cx diff --git a/crates/agent_ui/src/agent_configuration.rs b/crates/agent_ui/src/agent_configuration.rs index 9abe5c2f4f4f7fc1c4e2ae92390cc4cb74528151..4e10dc0478625e2b534460b29d2efbc9a925cdce 100644 --- a/crates/agent_ui/src/agent_configuration.rs +++ b/crates/agent_ui/src/agent_configuration.rs @@ -821,7 +821,8 @@ impl AgentConfiguration { } }, ) - }) + }); + anyhow::Ok(()) } }) .detach_and_log_err(cx); @@ -1304,7 +1305,7 @@ fn show_unable_to_uninstall_extension_with_context_server( .context_servers .remove(&context_server_id.0); }); - })?; + }); anyhow::Ok(()) } }) diff --git a/crates/agent_ui/src/agent_configuration/add_llm_provider_modal.rs b/crates/agent_ui/src/agent_configuration/add_llm_provider_modal.rs index eb7c4646eab178222cf7e1d5c02740f2bdc576a0..db4968edf7e2be9e28695c4e1d0dce28444a73c5 100644 --- a/crates/agent_ui/src/agent_configuration/add_llm_provider_modal.rs +++ b/crates/agent_ui/src/agent_configuration/add_llm_provider_modal.rs @@ -262,7 +262,7 @@ fn save_provider_to_settings( let task = cx.write_credentials(&api_url, "Bearer", api_key.as_bytes()); cx.spawn(async move |cx| { task.await - .map_err(|_| "Failed to write API key to keychain")?; + .map_err(|_| SharedString::from("Failed to write API key to keychain"))?; cx.update(|cx| { update_settings_file(fs, cx, |settings, _cx| { settings @@ -278,8 +278,7 @@ fn save_provider_to_settings( }, ); }); - }) - .ok(); + }); Ok(()) }) } diff --git a/crates/agent_ui/src/completion_provider.rs b/crates/agent_ui/src/completion_provider.rs index a7b955b81ef3a7edccca98f15fa73bb40787a2c9..1bc9548aa6b327f57c01a36eda5f851e669a7741 100644 --- a/crates/agent_ui/src/completion_provider.rs +++ b/crates/agent_ui/src/completion_provider.rs @@ -1091,8 +1091,8 @@ impl CompletionProvider for PromptCompletio ) } }) - .collect() - })?; + .collect::>() + }); Ok(vec![CompletionResponse { completions, @@ -1469,26 +1469,19 @@ pub(crate) fn search_symbols( let Some(symbols) = symbols_task.await.log_err() else { return Vec::new(); }; - let Some((visible_match_candidates, external_match_candidates)): Option<(Vec<_>, Vec<_>)> = - project - .update(cx, |project, cx| { - symbols - .iter() - .enumerate() - .map(|(id, symbol)| { - StringMatchCandidate::new(id, symbol.label.filter_text()) - }) - .partition(|candidate| match &symbols[candidate.id].path { - SymbolLocation::InProject(project_path) => project - .entry_for_path(project_path, cx) - .is_some_and(|e| !e.is_ignored), - SymbolLocation::OutsideProject { .. } => false, - }) - }) - .log_err() - else { - return Vec::new(); - }; + let (visible_match_candidates, external_match_candidates): (Vec<_>, Vec<_>) = project + .update(cx, |project, cx| { + symbols + .iter() + .enumerate() + .map(|(id, symbol)| StringMatchCandidate::new(id, symbol.label.filter_text())) + .partition(|candidate| match &symbols[candidate.id].path { + SymbolLocation::InProject(project_path) => project + .entry_for_path(project_path, cx) + .is_some_and(|e| !e.is_ignored), + SymbolLocation::OutsideProject { .. } => false, + }) + }); const MAX_MATCHES: usize = 100; let mut visible_matches = cx.background_executor().block(fuzzy::match_strings( diff --git a/crates/agent_ui/src/inline_assistant.rs b/crates/agent_ui/src/inline_assistant.rs index b3c14c5a0ec332f66c300023759db9f09b94dc6f..de2f1c360452d4698438968f8df5bc0e90c11388 100644 --- a/crates/agent_ui/src/inline_assistant.rs +++ b/crates/agent_ui/src/inline_assistant.rs @@ -300,7 +300,7 @@ impl InlineAssistant { if let Some(error) = configuration_error() { if let ConfigurationError::ProviderNotAuthenticated(provider) = error { cx.spawn(async move |_, cx| { - cx.update(|cx| provider.authenticate(cx))?.await?; + cx.update(|cx| provider.authenticate(cx)).await?; anyhow::Ok(()) }) .detach_and_log_err(cx); @@ -1633,7 +1633,7 @@ impl EditorInlineAssists { let editor = editor.upgrade().context("editor was dropped")?; cx.update_global(|assistant: &mut InlineAssistant, cx| { assistant.update_editor_highlights(&editor, cx); - })?; + }); } Ok(()) } @@ -1978,7 +1978,7 @@ impl CodeActionProvider for AssistantCodeActionProvider { let multibuffer_snapshot = multibuffer.read(cx); multibuffer_snapshot.anchor_range_in_excerpt(excerpt_id, action.range) }) - })? + }) .context("invalid range")?; let prompt_store = prompt_store.await.ok(); diff --git a/crates/agent_ui/src/mention_set.rs b/crates/agent_ui/src/mention_set.rs index ea4b82e3037393ef1c73ea5608d61b0f0e5de054..8160280be078412cf86e7f761b637451b4022ccb 100644 --- a/crates/agent_ui/src/mention_set.rs +++ b/crates/agent_ui/src/mention_set.rs @@ -94,7 +94,7 @@ impl MentionSet { let content = if full_mention_content && let MentionUri::Directory { abs_path } = &mention_uri { - cx.update(|cx| full_mention_for_directory(&project, abs_path, cx))? + cx.update(|cx| full_mention_for_directory(&project, abs_path, cx)) .await? } else { task.await.map_err(|e| anyhow!("{e}"))? @@ -180,9 +180,7 @@ impl MentionSet { let image = cx .spawn(async move |_, cx| { let image = image_task.await.map_err(|e| e.to_string())?; - let image = image - .update(cx, |image, _| image.image.clone()) - .map_err(|e| e.to_string())?; + let image = image.update(cx, |image, _| image.image.clone()); Ok(image) }) .shared(); @@ -291,10 +289,10 @@ impl MentionSet { let task = project.update(cx, |project, cx| project.open_image(project_path, cx)); return cx.spawn(async move |_, cx| { let image = task.await?; - let image = image.update(cx, |image, _| image.image.clone())?; + let image = image.update(cx, |image, _| image.image.clone()); let format = image.format; let image = cx - .update(|cx| LanguageModelImage::from_image(image, cx))? + .update(|cx| LanguageModelImage::from_image(image, cx)) .await; if let Some(image) = image { Ok(Mention::Image(MentionImage { @@ -365,8 +363,8 @@ impl MentionSet { content, tracked_buffers: vec![cx.entity()], } - })?; - anyhow::Ok(mention) + }); + Ok(mention) }) } @@ -493,9 +491,9 @@ impl MentionSet { let agent = agent.downcast::().unwrap(); let summary = agent .0 - .update(cx, |agent, cx| agent.thread_summary(id, cx))? + .update(cx, |agent, cx| agent.thread_summary(id, cx)) .await?; - anyhow::Ok(Mention::Text { + Ok(Mention::Text { content: summary.to_string(), tracked_buffers: Vec::new(), }) @@ -512,7 +510,7 @@ impl MentionSet { }); cx.spawn(async move |_, cx| { let text_thread = text_thread_task.await?; - let xml = text_thread.update(cx, |text_thread, cx| text_thread.to_xml(cx))?; + let xml = text_thread.update(cx, |text_thread, cx| text_thread.to_xml(cx)); Ok(Mention::Text { content: xml, tracked_buffers: Vec::new(), @@ -580,8 +578,8 @@ pub(crate) fn paste_images_as_context( }) .ok(); for image in images { - let Ok((excerpt_id, text_anchor, multibuffer_anchor)) = - editor.update_in(cx, |message_editor, window, cx| { + let Some((excerpt_id, text_anchor, multibuffer_anchor)) = editor + .update_in(cx, |message_editor, window, cx| { let snapshot = message_editor.snapshot(window, cx); let (excerpt_id, _, buffer_snapshot) = snapshot.buffer_snapshot().as_singleton().unwrap(); @@ -599,6 +597,7 @@ pub(crate) fn paste_images_as_context( ); (*excerpt_id, text_anchor, multibuffer_anchor) }) + .ok() else { break; }; @@ -607,12 +606,10 @@ pub(crate) fn paste_images_as_context( let Some(start_anchor) = multibuffer_anchor else { continue; }; - let Ok(end_anchor) = editor.update(cx, |editor, cx| { + let end_anchor = editor.update(cx, |editor, cx| { let snapshot = editor.buffer().read(cx).snapshot(cx); snapshot.anchor_before(start_anchor.to_offset(&snapshot) + content_len) - }) else { - continue; - }; + }); let image = Arc::new(image); let Ok(Some((crease_id, tx))) = cx.update(|window, cx| { insert_crease_for_mention( @@ -648,23 +645,17 @@ pub(crate) fn paste_images_as_context( }) .shared(); - mention_set - .update(cx, |mention_set, _cx| { - mention_set.insert_mention(crease_id, MentionUri::PastedImage, task.clone()) - }) - .ok(); + mention_set.update(cx, |mention_set, _cx| { + mention_set.insert_mention(crease_id, MentionUri::PastedImage, task.clone()) + }); if task.await.notify_async_err(cx).is_none() { - editor - .update(cx, |editor, cx| { - editor.edit([(start_anchor..end_anchor, "")], cx); - }) - .ok(); - mention_set - .update(cx, |mention_set, _cx| { - mention_set.remove_mention(&crease_id) - }) - .ok(); + editor.update(cx, |editor, cx| { + editor.edit([(start_anchor..end_anchor, "")], cx); + }); + mention_set.update(cx, |mention_set, _cx| { + mention_set.remove_mention(&crease_id) + }); } } })) @@ -822,42 +813,44 @@ fn full_mention_for_directory( cx.spawn(async move |cx| { let file_paths = worktree.read_with(cx, |worktree, _cx| { collect_files_in_path(worktree, &directory_path) - })?; + }); let descendants_future = cx.update(|cx| { - futures::future::join_all(file_paths.into_iter().map(|(worktree_path, full_path)| { - let rel_path = worktree_path - .strip_prefix(&directory_path) - .log_err() - .map_or_else(|| worktree_path.clone(), |rel_path| rel_path.into()); - - let open_task = project.update(cx, |project, cx| { - project.buffer_store().update(cx, |buffer_store, cx| { - let project_path = ProjectPath { - worktree_id, - path: worktree_path, - }; - buffer_store.open_buffer(project_path, cx) - }) - }); - - cx.spawn(async move |cx| { - let buffer = open_task.await.log_err()?; - let buffer_content = outline::get_buffer_content_or_outline( - buffer.clone(), - Some(&full_path), - &cx, - ) - .await - .ok()?; + futures::future::join_all(file_paths.into_iter().map( + |(worktree_path, full_path): (Arc, String)| { + let rel_path = worktree_path + .strip_prefix(&directory_path) + .log_err() + .map_or_else(|| worktree_path.clone(), |rel_path| rel_path.into()); + + let open_task = project.update(cx, |project, cx| { + project.buffer_store().update(cx, |buffer_store, cx| { + let project_path = ProjectPath { + worktree_id, + path: worktree_path, + }; + buffer_store.open_buffer(project_path, cx) + }) + }); - Some((rel_path, full_path, buffer_content.text, buffer)) - }) - })) - })?; + cx.spawn(async move |cx| { + let buffer = open_task.await.log_err()?; + let buffer_content = outline::get_buffer_content_or_outline( + buffer.clone(), + Some(&full_path), + &cx, + ) + .await + .ok()?; + + Some((rel_path, full_path, buffer_content.text, buffer)) + }) + }, + )) + }); let contents = cx .background_spawn(async move { - let (contents, tracked_buffers) = descendants_future + let (contents, tracked_buffers): (Vec<_>, Vec<_>) = descendants_future .await .into_iter() .flatten() diff --git a/crates/assistant_slash_commands/src/default_command.rs b/crates/assistant_slash_commands/src/default_command.rs index 01eff881cff0f07db9bf34e25853432e413ed79f..4ded6c846cf8e6f006bbf24497c5368b61c7a742 100644 --- a/crates/assistant_slash_commands/src/default_command.rs +++ b/crates/assistant_slash_commands/src/default_command.rs @@ -56,7 +56,7 @@ impl SlashCommand for DefaultSlashCommand { let store = PromptStore::global(cx); cx.spawn(async move |cx| { let store = store.await?; - let prompts = store.read_with(cx, |store, _cx| store.default_prompt_metadata())?; + let prompts = store.read_with(cx, |store, _cx| store.default_prompt_metadata()); let mut text = String::new(); text.push('\n'); diff --git a/crates/assistant_slash_commands/src/diagnostics_command.rs b/crates/assistant_slash_commands/src/diagnostics_command.rs index 3b3e3f7b895d50b36c3981bf4ee442b09bfdf33f..60f6d7c1a6e559028fd211f12d7408721ffcdea7 100644 --- a/crates/assistant_slash_commands/src/diagnostics_command.rs +++ b/crates/assistant_slash_commands/src/diagnostics_command.rs @@ -300,7 +300,7 @@ fn collect_diagnostics( .await .log_err() { - let snapshot = cx.read_entity(&buffer, |buffer, _| buffer.snapshot())?; + let snapshot = cx.read_entity(&buffer, |buffer, _| buffer.snapshot()); collect_buffer_diagnostics(&mut output, &snapshot, options.include_warnings); } diff --git a/crates/assistant_slash_commands/src/file_command.rs b/crates/assistant_slash_commands/src/file_command.rs index ae4e8363b40d520b9ea33e5cba5ffa68d783ab04..ff6514e3359a5d6d8c569079b8e0e7a88a080e77 100644 --- a/crates/assistant_slash_commands/src/file_command.rs +++ b/crates/assistant_slash_commands/src/file_command.rs @@ -369,7 +369,7 @@ fn collect_files( }; if let Some(buffer) = open_buffer_task.await.log_err() { let mut output = SlashCommandOutput::default(); - let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot())?; + let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot()); append_buffer_to_output( &snapshot, Some(path_including_worktree_name.display(path_style).as_ref()), diff --git a/crates/assistant_slash_commands/src/prompt_command.rs b/crates/assistant_slash_commands/src/prompt_command.rs index bbd6d3e3ad201c06940d6dc986616f61c8e15547..961dd266ad6bd8e91ccd04f3e74ab9534750f37b 100644 --- a/crates/assistant_slash_commands/src/prompt_command.rs +++ b/crates/assistant_slash_commands/src/prompt_command.rs @@ -47,7 +47,7 @@ impl SlashCommand for PromptSlashCommand { let cancellation_flag = Arc::new(AtomicBool::default()); let prompts: Vec = store .await? - .read_with(cx, |store, cx| store.search(query, cancellation_flag, cx))? + .read_with(cx, |store, cx| store.search(query, cancellation_flag, cx)) .await; Ok(prompts .into_iter() @@ -91,7 +91,7 @@ impl SlashCommand for PromptSlashCommand { .id_for_title(&title) .with_context(|| format!("no prompt found with title {:?}", title))?; anyhow::Ok(store.load(prompt_id, cx)) - })?? + })? .await?; anyhow::Ok(body) } diff --git a/crates/assistant_text_thread/src/text_thread.rs b/crates/assistant_text_thread/src/text_thread.rs index 5ec72eb0814f9ac09aba36f52d6f011af5b47249..4001dca1f512187ba02b645f932a54dbafe62ce6 100644 --- a/crates/assistant_text_thread/src/text_thread.rs +++ b/crates/assistant_text_thread/src/text_thread.rs @@ -1263,7 +1263,7 @@ impl TextThread { } let token_count = cx - .update(|cx| model.model.count_tokens(request, cx))? + .update(|cx| model.model.count_tokens(request, cx)) .await?; this.update(cx, |this, cx| { this.token_count = Some(token_count); diff --git a/crates/assistant_text_thread/src/text_thread_store.rs b/crates/assistant_text_thread/src/text_thread_store.rs index 483baa73134334162ea30d269a1f955dd8fe023a..2e859c42ef2f9ed86404b80dabf56be71acacfa4 100644 --- a/crates/assistant_text_thread/src/text_thread_store.rs +++ b/crates/assistant_text_thread/src/text_thread_store.rs @@ -124,7 +124,7 @@ impl TextThreadStore { this.register_context_server_handlers(cx); this.reload(cx).detach_and_log_err(cx); this - })?; + }); Ok(this) }) @@ -166,7 +166,8 @@ impl TextThreadStore { }) .collect(); cx.notify(); - }) + }); + Ok(()) } async fn handle_open_context( @@ -196,7 +197,7 @@ impl TextThreadStore { .read(cx) .serialize_ops(&TextThreadVersion::default(), cx), ) - })??; + })?; let operations = operations.await; Ok(proto::OpenContextResponse { context: Some(proto::Context { operations }), @@ -224,7 +225,7 @@ impl TextThreadStore { .read(cx) .serialize_ops(&TextThreadVersion::default(), cx), )) - })??; + })?; let operations = operations.await; Ok(proto::CreateContextResponse { context_id: context_id.to_proto(), @@ -245,7 +246,7 @@ impl TextThreadStore { text_thread.update(cx, |text_thread, cx| text_thread.apply_ops([operation], cx)); } Ok(()) - })? + }) } async fn handle_synchronize_contexts( @@ -290,7 +291,7 @@ impl TextThreadStore { anyhow::Ok(proto::SynchronizeContextsResponse { contexts: local_versions, }) - })? + }) } fn handle_project_shared(&mut self, cx: &mut Context) { @@ -416,7 +417,7 @@ impl TextThreadStore { Some(project), cx, ) - })?; + }); let operations = cx .background_spawn(async move { context_proto @@ -426,7 +427,7 @@ impl TextThreadStore { .collect::>>() }) .await?; - text_thread.update(cx, |context, cx| context.apply_ops(operations, cx))?; + text_thread.update(cx, |context, cx| context.apply_ops(operations, cx)); this.update(cx, |this, cx| { if let Some(existing_context) = this.loaded_text_thread_for_id(&context_id, cx) { existing_context @@ -473,7 +474,7 @@ impl TextThreadStore { Some(project), cx, ) - })?; + }); this.update(cx, |this, cx| { if let Some(existing_context) = this.loaded_text_thread_for_path(&path, cx) { existing_context @@ -580,7 +581,7 @@ impl TextThreadStore { Some(project), cx, ) - })?; + }); let operations = cx .background_spawn(async move { context_proto @@ -590,7 +591,7 @@ impl TextThreadStore { .collect::>>() }) .await?; - text_thread.update(cx, |context, cx| context.apply_ops(operations, cx))?; + text_thread.update(cx, |context, cx| context.apply_ops(operations, cx)); this.update(cx, |this, cx| { if let Some(existing_context) = this.loaded_text_thread_for_id(&text_thread_id, cx) { diff --git a/crates/audio/src/audio.rs b/crates/audio/src/audio.rs index 2c1f770530107733c9d9f3bbf39be5cc9d9ff05e..49239320facdd71b47b709b67bab32b5f0aba9ac 100644 --- a/crates/audio/src/audio.rs +++ b/crates/audio/src/audio.rs @@ -303,9 +303,9 @@ pub struct VoipParts { #[cfg(not(any(all(target_os = "windows", target_env = "gnu"), target_os = "freebsd")))] impl VoipParts { pub fn new(cx: &AsyncApp) -> anyhow::Result { - let (apm, replays) = cx.try_read_default_global::(|audio, _| { + let (apm, replays) = cx.read_default_global::(|audio, _| { (Arc::clone(&audio.echo_canceller), audio.replays.clone()) - })?; + }); let legacy_audio_compatible = AudioSettings::try_read_global(cx, |settings| settings.legacy_audio_compatible) .unwrap_or(true); diff --git a/crates/auto_update/src/auto_update.rs b/crates/auto_update/src/auto_update.rs index d9758109a57d3853d008d5a9c03d716fe3cc5e2c..9e02fdb4937e64272dc741320866f0d5f0fad7b5 100644 --- a/crates/auto_update/src/auto_update.rs +++ b/crates/auto_update/src/auto_update.rs @@ -436,7 +436,7 @@ impl AutoUpdater { .0 .clone() .context("auto-update not initialized") - })??; + })?; set_status("Fetching remote server release", cx); let release = Self::get_release_asset( @@ -456,7 +456,7 @@ impl AutoUpdater { let version_path = platform_dir.join(format!("{}.gz", release.version)); smol::fs::create_dir_all(&platform_dir).await.ok(); - let client = this.read_with(cx, |this, _| this.client.http_client())?; + let client = this.read_with(cx, |this, _| this.client.http_client()); if smol::fs::metadata(&version_path).await.is_err() { log::info!( @@ -482,7 +482,7 @@ impl AutoUpdater { .0 .clone() .context("auto-update not initialized") - })??; + })?; let release = Self::get_release_asset(&this, channel, version, "zed-remote-server", os, arch, cx) @@ -500,7 +500,7 @@ impl AutoUpdater { arch: &str, cx: &mut AsyncApp, ) -> Result { - let client = this.read_with(cx, |this, _| this.client.clone())?; + let client = this.read_with(cx, |this, _| this.client.clone()); let (system_id, metrics_id, is_staff) = if client.telemetry().metrics_enabled() { ( @@ -563,7 +563,7 @@ impl AutoUpdater { this.status.clone(), ReleaseChannel::try_global(cx).unwrap_or(ReleaseChannel::Stable), ) - })?; + }); Self::check_dependencies()?; @@ -571,12 +571,12 @@ impl AutoUpdater { this.status = AutoUpdateStatus::Checking; log::info!("Auto Update: checking for updates"); cx.notify(); - })?; + }); let fetched_release_data = Self::get_release_asset(&this, release_channel, None, "zed", OS, ARCH, cx).await?; let fetched_version = fetched_release_data.clone().version; - let app_commit_sha = cx.update(|cx| AppCommitSha::try_global(cx).map(|sha| sha.full())); + let app_commit_sha = Ok(cx.update(|cx| AppCommitSha::try_global(cx).map(|sha| sha.full()))); let newer_version = Self::check_if_fetched_version_is_newer( release_channel, app_commit_sha, @@ -586,7 +586,7 @@ impl AutoUpdater { )?; let Some(newer_version) = newer_version else { - return this.update(cx, |this, cx| { + this.update(cx, |this, cx| { let status = match previous_status { AutoUpdateStatus::Updated { .. } => previous_status, _ => AutoUpdateStatus::Idle, @@ -594,6 +594,7 @@ impl AutoUpdater { this.status = status; cx.notify(); }); + return Ok(()); }; this.update(cx, |this, cx| { @@ -601,7 +602,7 @@ impl AutoUpdater { version: newer_version.clone(), }; cx.notify(); - })?; + }); let installer_dir = InstallerDir::new().await?; let target_path = Self::target_path(&installer_dir).await?; @@ -612,11 +613,11 @@ impl AutoUpdater { version: newer_version.clone(), }; cx.notify(); - })?; + }); let new_binary_path = Self::install_release(installer_dir, target_path, cx).await?; if let Some(new_binary_path) = new_binary_path { - cx.update(|cx| cx.set_restart_path(new_binary_path))?; + cx.update(|cx| cx.set_restart_path(new_binary_path)); } this.update(cx, |this, cx| { @@ -626,7 +627,8 @@ impl AutoUpdater { version: newer_version, }; cx.notify(); - }) + }); + Ok(()) } fn check_if_fetched_version_is_newer( @@ -807,9 +809,9 @@ async fn install_release_linux( downloaded_tar_gz: PathBuf, cx: &AsyncApp, ) -> Result> { - let channel = cx.update(|cx| ReleaseChannel::global(cx).dev_name())?; + let channel = cx.update(|cx| ReleaseChannel::global(cx).dev_name()); let home_dir = PathBuf::from(env::var("HOME").context("no HOME env var set")?); - let running_app_path = cx.update(|cx| cx.app_path())??; + let running_app_path = cx.update(|cx| cx.app_path())?; let extracted = temp_dir.path().join("zed"); fs::create_dir_all(&extracted) @@ -874,7 +876,7 @@ async fn install_release_macos( downloaded_dmg: PathBuf, cx: &AsyncApp, ) -> Result> { - let running_app_path = cx.update(|cx| cx.app_path())??; + let running_app_path = cx.update(|cx| cx.app_path())?; let running_app_filename = running_app_path .file_name() .with_context(|| format!("invalid running app path {running_app_path:?}"))?; diff --git a/crates/auto_update_ui/src/auto_update_ui.rs b/crates/auto_update_ui/src/auto_update_ui.rs index 210951904c733c476582d879f435e4e74e03d15b..41485e5396331ea71b0fa20e0cf38c9b76c0a82e 100644 --- a/crates/auto_update_ui/src/auto_update_ui.rs +++ b/crates/auto_update_ui/src/auto_update_ui.rs @@ -207,8 +207,8 @@ pub fn notify_if_app_was_updated(cx: &mut App) { updater .set_should_show_update_notification(false, cx) .detach_and_log_err(cx); - }) - })?; + }); + }); } anyhow::Ok(()) }) diff --git a/crates/call/src/call_impl/mod.rs b/crates/call/src/call_impl/mod.rs index b4fcdb2552a440f897504910a99f30a182134c09..08d3a28e10787ada3664c970ab52ea968ca54860 100644 --- a/crates/call/src/call_impl/mod.rs +++ b/crates/call/src/call_impl/mod.rs @@ -112,24 +112,24 @@ impl ActiveCall { envelope: TypedEnvelope, mut cx: AsyncApp, ) -> Result { - let user_store = this.read_with(&cx, |this, _| this.user_store.clone())?; + let user_store = this.read_with(&cx, |this, _| this.user_store.clone()); let call = IncomingCall { room_id: envelope.payload.room_id, participants: user_store .update(&mut cx, |user_store, cx| { user_store.get_users(envelope.payload.participant_user_ids, cx) - })? + }) .await?, calling_user: user_store .update(&mut cx, |user_store, cx| { user_store.get_user(envelope.payload.calling_user_id, cx) - })? + }) .await?, initial_project: envelope.payload.initial_project, }; this.update(&mut cx, |this, _| { *this.incoming_call.0.borrow_mut() = Some(call); - })?; + }); Ok(proto::Ack {}) } @@ -147,7 +147,7 @@ impl ActiveCall { { incoming_call.take(); } - })?; + }); Ok(()) } @@ -187,7 +187,7 @@ impl ActiveCall { let initial_project_id = if let Some(initial_project) = initial_project { Some( - room.update(cx, |room, cx| room.share_project(initial_project, cx))? + room.update(cx, |room, cx| room.share_project(initial_project, cx)) .await?, ) } else { @@ -196,7 +196,7 @@ impl ActiveCall { room.update(cx, move |room, cx| { room.call(called_user_id, initial_project_id, cx) - })? + }) .await?; anyhow::Ok(()) @@ -216,7 +216,7 @@ impl ActiveCall { user_store, cx, ) - })? + }) .await?; this.update(cx, |this, cx| this.set_room(Some(room.clone()), cx))? diff --git a/crates/call/src/call_impl/room.rs b/crates/call/src/call_impl/room.rs index ccc8c067c25a91aa44c01911be89c21f0ea9367c..a2e1ac2fcc2779f2340dd35d5800749cb6bfcbb2 100644 --- a/crates/call/src/call_impl/room.rs +++ b/crates/call/src/call_impl/room.rs @@ -185,13 +185,13 @@ impl Room { room.local_participant.role = participant.role() } room - })?; + }); let initial_project_id = if let Some(initial_project) = initial_project { let initial_project_id = room .update(cx, |room, cx| { room.share_project(initial_project.clone(), cx) - })? + }) .await?; Some(initial_project_id) } else { @@ -202,7 +202,7 @@ impl Room { .update(cx, |room, cx| { room.leave_when_empty = true; room.call(called_user_id, initial_project_id, cx) - })? + }) .await; match did_join { Ok(()) => Ok(room), @@ -286,12 +286,12 @@ impl Room { user_store, cx, ) - })?; + }); room.update(&mut cx, |room, cx| { room.leave_when_empty = room.channel_id.is_none(); room.apply_room_update(room_proto, cx)?; anyhow::Ok(()) - })??; + })?; Ok(room) } @@ -379,7 +379,7 @@ impl Room { .update(cx, |this, cx| { this.status = RoomStatus::Rejoining; cx.notify(); - })?; + }); // Wait for client to re-establish a connection to the server. let executor = cx.background_executor().clone(); @@ -390,15 +390,11 @@ impl Room { log::info!("client reconnected, attempting to rejoin room"); let Some(this) = this.upgrade() else { break }; - match this.update(cx, |this, cx| this.rejoin(cx)) { - Ok(task) => { - if task.await.log_err().is_some() { - return true; - } else { - remaining_attempts -= 1; - } - } - Err(_app_dropped) => return false, + let task = this.update(cx, |this, cx| this.rejoin(cx)); + if task.await.log_err().is_some() { + return true; + } else { + remaining_attempts -= 1; } } else if client_status.borrow().is_signed_out() { return false; @@ -437,7 +433,7 @@ impl Room { // we leave the room and return an error. if let Some(this) = this.upgrade() { log::info!("reconnection failed, leaving room"); - this.update(cx, |this, cx| this.leave(cx))?.await?; + this.update(cx, |this, cx| this.leave(cx)).await?; } anyhow::bail!("can't reconnect to room: client failed to re-establish connection"); } @@ -665,7 +661,7 @@ impl Room { mut cx: AsyncApp, ) -> Result<()> { let room = envelope.payload.room.context("invalid room")?; - this.update(&mut cx, |this, cx| this.apply_room_update(room, cx))? + this.update(&mut cx, |this, cx| this.apply_room_update(room, cx)) } fn apply_room_update(&mut self, room: proto::Room, cx: &mut Context) -> Result<()> { @@ -1203,7 +1199,7 @@ impl Room { cx.spawn(async move |this, cx| { let response = request.await?; - project.update(cx, |project, cx| project.shared(response.project_id, cx))??; + project.update(cx, |project, cx| project.shared(response.project_id, cx))?; // If the user's location is in this project, it changes from UnsharedProject to SharedProject. this.update(cx, |this, cx| { diff --git a/crates/channel/src/channel_buffer.rs b/crates/channel/src/channel_buffer.rs index efa0850753887c2116ee7916727a870a3528b627..c4966443978e1eaf86192171de4c765cac41d5c7 100644 --- a/crates/channel/src/channel_buffer.rs +++ b/crates/channel/src/channel_buffer.rs @@ -71,8 +71,8 @@ impl ChannelBuffer { capability, base_text, ) - })?; - buffer.update(cx, |buffer, cx| buffer.apply_ops(operations, cx))?; + }); + buffer.update(cx, |buffer, cx| buffer.apply_ops(operations, cx)); let subscription = client.subscribe_to_entity(channel.id.0)?; @@ -93,7 +93,7 @@ impl ChannelBuffer { }; this.replace_collaborators(response.collaborators, cx); this - })?) + })) } fn release(&mut self, _: &mut App) { @@ -168,7 +168,7 @@ impl ChannelBuffer { cx.notify(); this.buffer .update(cx, |buffer, cx| buffer.apply_ops(ops, cx)) - })?; + }); Ok(()) } @@ -182,7 +182,8 @@ impl ChannelBuffer { this.replace_collaborators(message.payload.collaborators, cx); cx.emit(ChannelBufferEvent::CollaboratorsChanged); cx.notify(); - }) + }); + Ok(()) } fn on_buffer_update( diff --git a/crates/channel/src/channel_store.rs b/crates/channel/src/channel_store.rs index e983d03e0d6758f681de9e4a3e6fd13dc7075b01..62e34210ebac2dd0e017b415adb094857bb11025 100644 --- a/crates/channel/src/channel_store.rs +++ b/crates/channel/src/channel_store.rs @@ -170,17 +170,14 @@ impl ChannelStore { match status { client::Status::Connected { .. } => { this.update(cx, |this, cx| this.handle_connect(cx)) - .ok()? .await .log_err()?; } client::Status::SignedOut | client::Status::UpgradeRequired => { - this.update(cx, |this, cx| this.handle_disconnect(false, cx)) - .ok(); + this.update(cx, |this, cx| this.handle_disconnect(false, cx)); } _ => { - this.update(cx, |this, cx| this.handle_disconnect(true, cx)) - .ok(); + this.update(cx, |this, cx| this.handle_disconnect(true, cx)); } } } @@ -204,7 +201,7 @@ impl ChannelStore { while let Some(update_channels) = update_channels_rx.next().await { if let Some(this) = this.upgrade() { let update_task = this - .update(cx, |this, cx| this.update_channels(update_channels, cx))?; + .update(cx, |this, cx| this.update_channels(update_channels, cx)); if let Some(update_task) = update_task { update_task.await.log_err(); } @@ -814,7 +811,7 @@ impl ChannelStore { this.update_channels_tx .unbounded_send(message.payload) .unwrap(); - })?; + }); Ok(()) } @@ -841,7 +838,8 @@ impl ChannelStore { .set_role(role) } } - }) + }); + Ok(()) } fn handle_connect(&mut self, cx: &mut Context) -> Task> { @@ -965,8 +963,7 @@ impl ChannelStore { buffer.update(cx, |buffer, cx| buffer.disconnect(cx)); } } - }) - .ok(); + }); } }) }); diff --git a/crates/client/src/client.rs b/crates/client/src/client.rs index 0d2f36169780751200abc80f88f089d423977984..2731462a5a4dd5c9da173c637fee773a99925e89 100644 --- a/crates/client/src/client.rs +++ b/crates/client/src/client.rs @@ -343,7 +343,7 @@ impl ClientCredentialsProvider { } fn server_url(&self, cx: &AsyncApp) -> Result { - cx.update(|cx| ClientSettings::get_global(cx).server_url.clone()) + Ok(cx.update(|cx| ClientSettings::get_global(cx).server_url.clone())) } /// Reads the credentials from the provider. @@ -934,10 +934,10 @@ impl Client { let connect_task = cx.update({ let cloud_client = self.cloud_client.clone(); move |cx| cloud_client.connect(cx) - })??; + })?; let connection = connect_task.await?; - let (mut messages, task) = cx.update(|cx| connection.spawn(cx))?; + let (mut messages, task) = cx.update(|cx| connection.spawn(cx)); task.detach(); cx.spawn({ @@ -977,8 +977,7 @@ impl Client { } }) .detach(); - }) - .log_err(); + }); let credentials = self.sign_in(try_provider, cx).await?; @@ -1003,8 +1002,7 @@ impl Client { } }) .detach_and_log_err(cx); - }) - .log_err(); + }); Ok(()) } @@ -1249,14 +1247,8 @@ impl Client { credentials: &Credentials, cx: &AsyncApp, ) -> Task> { - let release_channel = cx - .update(|cx| ReleaseChannel::try_global(cx)) - .ok() - .flatten(); - let app_version = cx - .update(|cx| AppVersion::global(cx).to_string()) - .ok() - .unwrap_or_default(); + let release_channel = cx.update(|cx| ReleaseChannel::try_global(cx)); + let app_version = cx.update(|cx| AppVersion::global(cx).to_string()); let http = self.http.clone(); let proxy = http.proxy().cloned(); @@ -1293,7 +1285,7 @@ impl Client { None => Box::new(TcpStream::connect(rpc_host).await?), }) } - })? + }) .await?; log::info!("connected to rpc endpoint {}", rpc_url); @@ -1361,12 +1353,12 @@ impl Client { let (open_url_tx, open_url_rx) = oneshot::channel::(); cx.update(|cx| { cx.spawn(async move |cx| { - let url = open_url_rx.await?; - cx.update(|cx| cx.open_url(&url)) + if let Ok(url) = open_url_rx.await { + cx.update(|cx| cx.open_url(&url)); + } }) - .detach_and_log_err(cx); - }) - .log_err(); + .detach(); + }); let credentials = background .clone() @@ -1468,7 +1460,7 @@ impl Client { }) .await?; - cx.update(|cx| cx.activate(true))?; + cx.update(|cx| cx.activate(true)); Ok(credentials) }) } @@ -1687,8 +1679,7 @@ impl Client { for handler in self.message_to_client_handlers.lock().iter() { handler(&message, cx); } - }) - .ok(); + }); } pub fn telemetry(&self) -> &Arc { @@ -2101,7 +2092,7 @@ mod tests { let (done_tx2, done_rx2) = smol::channel::unbounded(); AnyProtoClient::from(client.clone()).add_entity_message_handler( move |entity: Entity, _: TypedEnvelope, cx| { - match entity.read_with(&cx, |entity, _| entity.id).unwrap() { + match entity.read_with(&cx, |entity, _| entity.id) { 1 => done_tx1.try_send(()).unwrap(), 2 => done_tx2.try_send(()).unwrap(), _ => unreachable!(), diff --git a/crates/client/src/user.rs b/crates/client/src/user.rs index 1cf682d2059a9ee29f2782ab10412ccfbe387e22..d162ab4ca6ae77ba796ebb8d6a720d398092d943 100644 --- a/crates/client/src/user.rs +++ b/crates/client/src/user.rs @@ -256,7 +256,7 @@ impl UserStore { } else { anyhow::Ok(()) } - })??; + })?; this.update(cx, |_, cx| cx.notify())?; } @@ -299,7 +299,7 @@ impl UserStore { _: TypedEnvelope, mut cx: AsyncApp, ) -> Result<()> { - this.update(&mut cx, |_, cx| cx.emit(Event::ShowContacts))?; + this.update(&mut cx, |_, cx| cx.emit(Event::ShowContacts)); Ok(()) } @@ -312,7 +312,7 @@ impl UserStore { this.update_contacts_tx .unbounded_send(UpdateContacts::Update(message.payload)) .unwrap(); - })?; + }); Ok(()) } @@ -353,7 +353,7 @@ impl UserStore { let mut incoming_requests = Vec::new(); for request in message.incoming_requests { incoming_requests.push({ - this.update(cx, |this, cx| this.get_user(request.requester_id, cx))? + this.update(cx, |this, cx| this.get_user(request.requester_id, cx)) .await? }); } @@ -361,7 +361,7 @@ impl UserStore { let mut outgoing_requests = Vec::new(); for requested_user_id in message.outgoing_requests { outgoing_requests.push( - this.update(cx, |this, cx| this.get_user(requested_user_id, cx))? + this.update(cx, |this, cx| this.get_user(requested_user_id, cx)) .await?, ); } @@ -428,7 +428,7 @@ impl UserStore { } cx.notify(); - })?; + }); Ok(()) }) @@ -798,7 +798,7 @@ impl UserStore { this.read_with(cx, |this, _cx| { this.client.upgrade().map(|client| client.cloud_client()) }) - })?? + })? .ok_or(anyhow::anyhow!("Failed to get Cloud client"))?; let response = cloud_client.get_authenticated_user().await?; @@ -806,7 +806,7 @@ impl UserStore { this.update(cx, |this, cx| { this.update_authenticated_user(response, cx); }) - })??; + })?; } } @@ -914,7 +914,7 @@ impl Contact { let user = user_store .update(cx, |user_store, cx| { user_store.get_user(contact.user_id, cx) - })? + }) .await?; Ok(Self { user, diff --git a/crates/codestral/src/codestral.rs b/crates/codestral/src/codestral.rs index 9cf2fab80b78ba06c6a2523013e2f73934f50052..b8a609b847a99fe5db187549cf1aa5efd96343af 100644 --- a/crates/codestral/src/codestral.rs +++ b/crates/codestral/src/codestral.rs @@ -283,7 +283,7 @@ impl EditPredictionDelegate for CodestralEditPredictionDelegate { let edits: Arc<[(Range, Arc)]> = vec![(cursor_position..cursor_position, completion_text.into())].into(); let edit_preview = buffer - .read_with(cx, |buffer, cx| buffer.preview_edits(edits.clone(), cx))? + .read_with(cx, |buffer, cx| buffer.preview_edits(edits.clone(), cx)) .await; this.update(cx, |this, cx| { diff --git a/crates/collab/src/tests/random_project_collaboration_tests.rs b/crates/collab/src/tests/random_project_collaboration_tests.rs index acdffacd0dec1c31a4729737feb4f241aaae3c51..a2d0955e04b0ccdc88b83e7178292d30e1039446 100644 --- a/crates/collab/src/tests/random_project_collaboration_tests.rs +++ b/crates/collab/src/tests/random_project_collaboration_tests.rs @@ -800,7 +800,6 @@ impl RandomizedTest for ProjectCollaborationTest { assert!( buffer .read_with(&cx, |buffer, _| { buffer.saved_version().to_owned() }) - .expect("App should not be dropped") .observed_all(&requested_version) ); anyhow::Ok(()) diff --git a/crates/collab_ui/src/channel_view.rs b/crates/collab_ui/src/channel_view.rs index 8959c6ccbe88d1f3f78fb29009904244624d9999..0e6ecaae6b89cd4ba34abc85a2bc6941b1b085a3 100644 --- a/crates/collab_ui/src/channel_view.rs +++ b/crates/collab_ui/src/channel_view.rs @@ -173,7 +173,7 @@ impl ChannelView { }; buffer.set_language(Some(markdown), cx); }) - })?; + }); cx.new_window_entity(|window, cx| { let mut this = Self::new( diff --git a/crates/collab_ui/src/collab_panel.rs b/crates/collab_ui/src/collab_panel.rs index 8ed0b81f32d907235cb30a2b3b45f1e5d52cc528..138c8d857f3685b68f6053880c8f1a8d067abe82 100644 --- a/crates/collab_ui/src/collab_panel.rs +++ b/crates/collab_ui/src/collab_panel.rs @@ -2179,7 +2179,7 @@ impl CollabPanel { cx.spawn_in(window, async move |this, cx| { if answer.await? == 0 { channel_store - .update(cx, |channels, _| channels.remove_channel(channel_id))? + .update(cx, |channels, _| channels.remove_channel(channel_id)) .await .notify_async_err(cx); this.update_in(cx, |_, window, cx| cx.focus_self(window)) @@ -2213,7 +2213,7 @@ impl CollabPanel { cx.spawn_in(window, async move |_, cx| { if answer.await? == 0 { user_store - .update(cx, |store, cx| store.remove_contact(user_id, cx))? + .update(cx, |store, cx| store.remove_contact(user_id, cx)) .await .notify_async_err(cx); } diff --git a/crates/collab_ui/src/notifications/incoming_call_notification.rs b/crates/collab_ui/src/notifications/incoming_call_notification.rs index 0bf71ce61439f17135b7ef2ebc651a36d985e342..592f39f67679f94199afb86123ee389336cb9347 100644 --- a/crates/collab_ui/src/notifications/incoming_call_notification.rs +++ b/crates/collab_ui/src/notifications/incoming_call_notification.rs @@ -24,27 +24,20 @@ pub fn init(app_state: &Arc, cx: &mut App) { } if let Some(incoming_call) = incoming_call { - let unique_screens = cx.update(|cx| cx.displays()).unwrap(); + let unique_screens = cx.update(|cx| cx.displays()); let window_size = gpui::Size { width: px(400.), height: px(72.), }; for screen in unique_screens { - if let Some(options) = cx - .update(|cx| notification_window_options(screen, window_size, cx)) - .log_err() - { - let window = cx - .open_window(options, |_, cx| { - cx.new(|_| { - IncomingCallNotification::new( - incoming_call.clone(), - app_state.clone(), - ) - }) - }) - .unwrap(); + let options = + cx.update(|cx| notification_window_options(screen, window_size, cx)); + if let Ok(window) = cx.open_window(options, |_, cx| { + cx.new(|_| { + IncomingCallNotification::new(incoming_call.clone(), app_state.clone()) + }) + }) { notification_windows.push(window); } } @@ -88,8 +81,7 @@ impl IncomingCallNotificationState { ) .detach_and_log_err(cx); } - }) - .log_err(); + }); } anyhow::Ok(()) }) diff --git a/crates/context_server/src/listener.rs b/crates/context_server/src/listener.rs index b71d59d760242d7f927e35dc1fef2351b462af32..ad70c6d32e1ef65dcc85ea5c49f0aeacdc2c1098 100644 --- a/crates/context_server/src/listener.rs +++ b/crates/context_server/src/listener.rs @@ -222,16 +222,12 @@ impl McpServer { } else if let Some(handler) = handlers.borrow().get(&request.method.as_ref()) { let outgoing_tx = outgoing_tx.clone(); - if let Some(task) = cx - .update(|cx| handler(request_id, request.params, cx)) - .log_err() - { - cx.spawn(async move |_| { - let response = task.await; - outgoing_tx.unbounded_send(response).ok(); - }) - .detach(); - } + let task = cx.update(|cx| handler(request_id, request.params, cx)); + cx.spawn(async move |_| { + let response = task.await; + outgoing_tx.unbounded_send(response).ok(); + }) + .detach(); } else { Self::send_err( request_id, diff --git a/crates/context_server/src/transport/stdio_transport.rs b/crates/context_server/src/transport/stdio_transport.rs index 2632dfce62292fcda552bce967b541c3949d7052..c3af1aa8745a074ad545cad0518d2ffea2822b65 100644 --- a/crates/context_server/src/transport/stdio_transport.rs +++ b/crates/context_server/src/transport/stdio_transport.rs @@ -31,7 +31,7 @@ impl StdioTransport { working_directory: &Option, cx: &AsyncApp, ) -> Result { - let shell = cx.update(|cx| TerminalSettings::get(None, cx).shell.clone())?; + let shell = cx.update(|cx| TerminalSettings::get(None, cx).shell.clone()); let builder = ShellBuilder::new(&shell, cfg!(windows)).non_interactive(); let mut command = builder.build_smol_command(Some(binary.executable.display().to_string()), &binary.args); diff --git a/crates/copilot/src/copilot.rs b/crates/copilot/src/copilot.rs index a6963296f5c0ce0395698d2952618123c103ff55..c86c249a6788027ef2550390a60e91529a222a20 100644 --- a/crates/copilot/src/copilot.rs +++ b/crates/copilot/src/copilot.rs @@ -546,7 +546,7 @@ impl Copilot { let mut params = server.default_initialize_params(false, cx); params.initialization_options = Some(editor_info_json); server.initialize(params, configuration.into(), cx) - })? + }) .await?; this.update(cx, |_, cx| notify_did_change_config_to_server(&server, cx))? diff --git a/crates/copilot/src/copilot_chat.rs b/crates/copilot/src/copilot_chat.rs index 52a3631791ecaf4e1f7b2bc935be37816f2b25de..085959b59f97c0c17f4a4044b71c158b703cc515 100644 --- a/crates/copilot/src/copilot_chat.rs +++ b/crates/copilot/src/copilot_chat.rs @@ -624,8 +624,6 @@ impl CopilotChat { ) -> Result<(Arc, ApiToken, CopilotChatConfiguration)> { let this = cx .update(|cx| Self::global(cx)) - .ok() - .flatten() .context("Copilot chat is not enabled")?; let (oauth_token, api_token, client, configuration) = this.read_with(cx, |this, _| { @@ -635,7 +633,7 @@ impl CopilotChat { this.client.clone(), this.configuration.clone(), ) - })?; + }); let oauth_token = oauth_token.context("No OAuth token available")?; @@ -648,7 +646,7 @@ impl CopilotChat { this.update(cx, |this, cx| { this.api_token = Some(token.clone()); cx.notify(); - })?; + }); token } }; diff --git a/crates/copilot/src/copilot_edit_prediction_delegate.rs b/crates/copilot/src/copilot_edit_prediction_delegate.rs index 33fdf86f7da9222cbaa881292de18c4f4370ae38..fe26979f655418f74efc29c6c0ad0757895261ef 100644 --- a/crates/copilot/src/copilot_edit_prediction_delegate.rs +++ b/crates/copilot/src/copilot_edit_prediction_delegate.rs @@ -75,14 +75,12 @@ impl EditPredictionDelegate for CopilotEditPredictionDelegate { let completions = copilot .update(cx, |copilot, cx| { copilot.completions(&buffer, cursor_position, cx) - })? + }) .await?; if let Some(mut completion) = completions.into_iter().next() - && let Some((trimmed_range, trimmed_text, snapshot)) = cx - .update(|cx| trim_completion(&completion, cx)) - .ok() - .flatten() + && let Some((trimmed_range, trimmed_text, snapshot)) = + cx.update(|cx| trim_completion(&completion, cx)) { let preview = buffer .update(cx, |this, cx| { @@ -90,7 +88,7 @@ impl EditPredictionDelegate for CopilotEditPredictionDelegate { Arc::from([(trimmed_range.clone(), trimmed_text.clone())].as_slice()), cx, ) - })? + }) .await; this.update(cx, |this, cx| { this.pending_refresh = None; diff --git a/crates/credentials_provider/src/credentials_provider.rs b/crates/credentials_provider/src/credentials_provider.rs index 2c8dd6fc812aaeffd6c06c88ee2adceabdbb27a3..249b8333e114223aa558cd33637fd103294a8f8d 100644 --- a/crates/credentials_provider/src/credentials_provider.rs +++ b/crates/credentials_provider/src/credentials_provider.rs @@ -92,7 +92,7 @@ impl CredentialsProvider for KeychainCredentialsProvider { url: &'a str, cx: &'a AsyncApp, ) -> Pin)>>> + 'a>> { - async move { cx.update(|cx| cx.read_credentials(url))?.await }.boxed_local() + async move { cx.update(|cx| cx.read_credentials(url)).await }.boxed_local() } fn write_credentials<'a>( @@ -103,7 +103,7 @@ impl CredentialsProvider for KeychainCredentialsProvider { cx: &'a AsyncApp, ) -> Pin> + 'a>> { async move { - cx.update(move |cx| cx.write_credentials(url, username, password))? + cx.update(move |cx| cx.write_credentials(url, username, password)) .await } .boxed_local() @@ -114,7 +114,7 @@ impl CredentialsProvider for KeychainCredentialsProvider { url: &'a str, cx: &'a AsyncApp, ) -> Pin> + 'a>> { - async move { cx.update(move |cx| cx.delete_credentials(url))?.await }.boxed_local() + async move { cx.update(move |cx| cx.delete_credentials(url)).await }.boxed_local() } } diff --git a/crates/dap/src/transport.rs b/crates/dap/src/transport.rs index 73ebda657099d401164678220313ba4f7a7d5dc2..3e616fb3f451d5a63ba69d1220f1d06f21cce957 100644 --- a/crates/dap/src/transport.rs +++ b/crates/dap/src/transport.rs @@ -178,9 +178,7 @@ impl TransportDelegate { self.tasks.lock().clear(); let log_dap_communications = - cx.update(|cx| DebuggerSettings::get_global(cx).log_dap_communications) - .with_context(|| "Failed to get Debugger Setting log dap communications error in transport::start_handlers. Defaulting to false") - .unwrap_or(false); + cx.update(|cx| DebuggerSettings::get_global(cx).log_dap_communications); let connect = self.transport.lock().connect(); let (input, output) = connect.await?; @@ -550,10 +548,9 @@ impl TcpTransport { process = Some(p); }; - let timeout = connection_args.timeout.unwrap_or_else(|| { - cx.update(|cx| DebuggerSettings::get_global(cx).timeout) - .unwrap_or(20000u64) - }); + let timeout = connection_args + .timeout + .unwrap_or_else(|| cx.update(|cx| DebuggerSettings::get_global(cx).timeout)); log::info!( "Debug adapter has connected to TCP server {}:{}", diff --git a/crates/debugger_tools/src/dap_log.rs b/crates/debugger_tools/src/dap_log.rs index 317ce8b4c65e441f1fc4041706989532aa150204..8ff43f35ad9f5be2f4fbf975c229f3e9f8d96334 100644 --- a/crates/debugger_tools/src/dap_log.rs +++ b/crates/debugger_tools/src/dap_log.rs @@ -155,7 +155,7 @@ impl LogStore { if let Some(this) = this.upgrade() { this.update(cx, |this, cx| { this.add_debug_adapter_message(message, cx); - })?; + }); } smol::future::yield_now().await; @@ -170,7 +170,7 @@ impl LogStore { if let Some(this) = this.upgrade() { this.update(cx, |this, cx| { this.add_debug_adapter_log(message, cx); - })?; + }); } smol::future::yield_now().await; @@ -902,10 +902,10 @@ impl DapLogView { let language = language.await.ok(); buffer.update(cx, |buffer, cx| { buffer.set_language(language, cx); - }) + }); } }) - .detach_and_log_err(cx); + .detach(); }); self.editor = editor; diff --git a/crates/debugger_ui/src/debugger_panel.rs b/crates/debugger_ui/src/debugger_panel.rs index 0b91b9f28559ac6d7f991b7a0b9822820004148d..86e8ccc496e0a75684d9128b782ccb7899f8399b 100644 --- a/crates/debugger_ui/src/debugger_panel.rs +++ b/crates/debugger_ui/src/debugger_panel.rs @@ -268,7 +268,7 @@ impl DebugPanel { dap_store .update(cx, |dap_store, cx| { dap_store.boot_session(session.clone(), definition, worktree, cx) - })? + }) .await } }); @@ -286,7 +286,7 @@ impl DebugPanel { .unbounded_send(format!("error: {:#}", error)) .ok(); session.shutdown(cx) - })? + }) .await; } anyhow::Ok(()) @@ -404,7 +404,7 @@ impl DebugPanel { session.boot(binary, worktree, dap_store_handle.downgrade(), cx) }); (session, task) - })?; + }); Self::register_session(this.clone(), session.clone(), true, cx).await?; if let Err(error) = task.await { @@ -418,7 +418,7 @@ impl DebugPanel { )) .ok(); session.shutdown(cx) - })? + }) .await; return Err(error); @@ -466,11 +466,10 @@ impl DebugPanel { session.boot(binary, worktree, dap_store_handle.downgrade(), cx) }); (session, task) - })?; + }); // Focus child sessions if the parent has never emitted a stopped event; // this improves our JavaScript experience, as it always spawns a "main" session that then spawns subsessions. - let parent_ever_stopped = - parent_session.update(cx, |this, _| this.has_ever_stopped())?; + let parent_ever_stopped = parent_session.update(cx, |this, _| this.has_ever_stopped()); Self::register_session(this, session, !parent_ever_stopped, cx).await?; task.await }) @@ -517,7 +516,7 @@ impl DebugPanel { return; } } - session.update(cx, |session, cx| session.shutdown(cx)).ok(); + session.update(cx, |session, cx| session.shutdown(cx)); this.update(cx, |this, cx| { this.retain_sessions(|other| entity_id != other.entity_id()); if let Some(active_session_id) = this @@ -1443,7 +1442,7 @@ async fn register_session_inner( session: Entity, cx: &mut AsyncWindowContext, ) -> Result> { - let adapter_name = session.read_with(cx, |session, _| session.adapter())?; + let adapter_name = session.read_with(cx, |session, _| session.adapter()); this.update_in(cx, |_, window, cx| { cx.subscribe_in( &session, diff --git a/crates/debugger_ui/src/new_process_modal.rs b/crates/debugger_ui/src/new_process_modal.rs index 68e391562b57d530a21624b0626173eeb7a67c16..1ea8bcca76eac61702acd9c31b703bf33383d552 100644 --- a/crates/debugger_ui/src/new_process_modal.rs +++ b/crates/debugger_ui/src/new_process_modal.rs @@ -185,7 +185,7 @@ impl NewProcessModal { .collect::>(); let Some(task_inventory) = task_store - .update(cx, |task_store, _| task_store.task_inventory().cloned())? + .update(cx, |task_store, _| task_store.task_inventory().cloned()) else { return Ok(()); }; @@ -194,7 +194,7 @@ impl NewProcessModal { .update(cx, |task_inventory, cx| { task_inventory .used_and_current_resolved_tasks(task_contexts.clone(), cx) - })? + }) .await; if let Ok(task) = debug_picker.update(cx, |picker, cx| { diff --git a/crates/debugger_ui/src/session/running.rs b/crates/debugger_ui/src/session/running.rs index 422207d3cbf4880e0c8e3c02e01dbe373800ea62..d2f0162d348dcab365f71575aa663a2b8212ee48 100644 --- a/crates/debugger_ui/src/session/running.rs +++ b/crates/debugger_ui/src/session/running.rs @@ -1113,7 +1113,7 @@ impl RunningState { task_with_shell.clone(), cx, ) - })?.await?; + }).await?; let terminal_view = cx.new_window_entity(|window, cx| { TerminalView::new( @@ -1135,7 +1135,7 @@ impl RunningState { })?; let exit_status = terminal - .read_with(cx, |terminal, cx| terminal.wait_for_completed_task(cx))? + .read_with(cx, |terminal, cx| terminal.wait_for_completed_task(cx)) .await .context("Failed to wait for completed task")?; @@ -1302,7 +1302,7 @@ impl RunningState { .pid() .map(|pid| pid.as_u32()) .context("Terminal was spawned but PID was not available") - })? + }) }); cx.background_spawn(async move { anyhow::Ok(sender.send(terminal_task.await).await?) }) diff --git a/crates/debugger_ui/src/session/running/breakpoint_list.rs b/crates/debugger_ui/src/session/running/breakpoint_list.rs index f154757429a2bbfe153ee40c2c513dd06f05aa03..352acbd530d937d8300528000693ab76099ca991 100644 --- a/crates/debugger_ui/src/session/running/breakpoint_list.rs +++ b/crates/debugger_ui/src/session/running/breakpoint_list.rs @@ -151,7 +151,7 @@ impl BreakpointList { .update(cx, |this, cx| this.find_or_create_worktree(path, false, cx)); cx.spawn_in(window, async move |this, cx| { let (worktree, relative_path) = task.await?; - let worktree_id = worktree.read_with(cx, |this, _| this.id())?; + let worktree_id = worktree.read_with(cx, |this, _| this.id()); let item = this .update_in(cx, |this, window, cx| { this.workspace.update(cx, |this, cx| { diff --git a/crates/debugger_ui/src/session/running/stack_frame_list.rs b/crates/debugger_ui/src/session/running/stack_frame_list.rs index 4dffb57a792cb5a5ed6bcc8003a8fa6f3b9af9de..b460f532548d8a71fafb031ff5c77323d60f046c 100644 --- a/crates/debugger_ui/src/session/running/stack_frame_list.rs +++ b/crates/debugger_ui/src/session/running/stack_frame_list.rs @@ -428,7 +428,7 @@ impl StackFrameList { .await?; let position = buffer.read_with(cx, |this, _| { this.snapshot().anchor_after(PointUtf16::new(row, 0)) - })?; + }); this.update_in(cx, |this, window, cx| { this.workspace.update(cx, |workspace, cx| { let project_path = buffer diff --git a/crates/debugger_ui/src/stack_trace_view.rs b/crates/debugger_ui/src/stack_trace_view.rs index 70b88d203e4ff8017127eee2ad6ff0a81df74c69..1d274ba63da839a4a7e9da9cae4cacdc1d872aa5 100644 --- a/crates/debugger_ui/src/stack_trace_view.rs +++ b/crates/debugger_ui/src/stack_trace_view.rs @@ -183,13 +183,13 @@ impl StackTraceView { .await?; let project_path = ProjectPath { - worktree_id: worktree.read_with(cx, |tree, _| tree.id())?, + worktree_id: worktree.read_with(cx, |tree, _| tree.id()), path: relative_path, }; if let Some(buffer) = this .read_with(cx, |this, _| this.project.clone())? - .update(cx, |project, cx| project.open_buffer(project_path, cx))? + .update(cx, |project, cx| project.open_buffer(project_path, cx)) .await .log_err() { diff --git a/crates/diagnostics/src/diagnostics.rs b/crates/diagnostics/src/diagnostics.rs index afa3ebf42abd2968e779596aa6131b5f5a0c4c94..806e164a68aa9d80adc8ad23e6ce9363970c768a 100644 --- a/crates/diagnostics/src/diagnostics.rs +++ b/crates/diagnostics/src/diagnostics.rs @@ -360,7 +360,7 @@ impl ProjectDiagnosticsEditor { }; if let Some(buffer) = project_handle - .update(cx, |project, cx| project.open_buffer(path.clone(), cx))? + .update(cx, |project, cx| project.open_buffer(path.clone(), cx)) .await .log_err() { @@ -1088,9 +1088,8 @@ async fn heuristic_syntactic_expand( return Some(node_row_range); } else if node_name.ends_with("statement") || node_name.ends_with("declaration") { // Expand to the nearest dedent or blank line for statements and declarations. - let tab_size = cx - .update(|cx| snapshot.settings_at(node_range.start, cx).tab_size.get()) - .ok()?; + let tab_size = + cx.update(|cx| snapshot.settings_at(node_range.start, cx).tab_size.get()); let indent_level = snapshot .line_indent_for_row(node_range.start.row) .len(tab_size); diff --git a/crates/edit_prediction/src/capture_example.rs b/crates/edit_prediction/src/capture_example.rs index b176dfe0f215bf0ad9ccd1d63634d41ffd0ab66e..f234e480b6d0b467e6b0e368b8d5b36949968311 100644 --- a/crates/edit_prediction/src/capture_example.rs +++ b/crates/edit_prediction/src/capture_example.rs @@ -153,19 +153,19 @@ async fn collect_snapshots( .filter(|path| path.worktree_id == worktree_id)?; let relative_path: Arc = project_path.path.as_std_path().into(); Some((project_path, relative_path)) - })? { + }) { if let hash_map::Entry::Vacant(entry) = snapshots_by_path.entry(relative_path) { let buffer = project .update(cx, |project, cx| { project.open_buffer(project_path.clone(), cx) - })? + }) .await?; let diff = git_store .update(cx, |git_store, cx| { git_store.open_uncommitted_diff(buffer.clone(), cx) - })? + }) .await?; - let diff_snapshot = diff.update(cx, |diff, cx| diff.snapshot(cx))?; + let diff_snapshot = diff.update(cx, |diff, cx| diff.snapshot(cx)); entry.insert((stored_event.old_snapshot.clone(), diff_snapshot)); } } diff --git a/crates/edit_prediction/src/edit_prediction.rs b/crates/edit_prediction/src/edit_prediction.rs index 6867deb4a666db806b7052cb9e644efb5267394a..f24d5b77e1ab93cb6746626b02a11f467650fa31 100644 --- a/crates/edit_prediction/src/edit_prediction.rs +++ b/crates/edit_prediction/src/edit_prediction.rs @@ -1841,7 +1841,7 @@ impl EditPredictionStore { worktree_id: file.worktree_id(cx), path: file.path().clone(), }) - })?; + }); let buffer_task = project.update(cx, |project, cx| { let (path, _, _) = project @@ -1862,7 +1862,7 @@ impl EditPredictionStore { })?; Some(project.open_buffer(path, cx)) - })?; + }); if let Some(buffer_task) = buffer_task { let closest_buffer = buffer_task.await?; @@ -1874,7 +1874,7 @@ impl EditPredictionStore { .into_iter() .min_by_key(|entry| entry.diagnostic.severity) .map(|entry| entry.range.start) - })? + }) .map(|position| (closest_buffer, position)); } } @@ -1973,8 +1973,7 @@ impl EditPredictionStore { }) }, ); - }) - .ok(); + }); } Err(err) } diff --git a/crates/edit_prediction/src/prediction.rs b/crates/edit_prediction/src/prediction.rs index c63640ccd0e1815b32f736e8a0fee8d75d124df1..af0f87dfbef5d484ab80ce81013312f430cfad80 100644 --- a/crates/edit_prediction/src/prediction.rs +++ b/crates/edit_prediction/src/prediction.rs @@ -49,16 +49,14 @@ impl EditPredictionResult { }; } - let Some((edits, snapshot, edit_preview_task)) = edited_buffer - .read_with(cx, |buffer, cx| { + let Some((edits, snapshot, edit_preview_task)) = + edited_buffer.read_with(cx, |buffer, cx| { let new_snapshot = buffer.snapshot(); let edits: Arc<[_]> = interpolate_edits(&edited_buffer_snapshot, &new_snapshot, &edits)?.into(); Some((edits.clone(), new_snapshot, buffer.preview_edits(edits, cx))) }) - .ok() - .flatten() else { return Self { id, diff --git a/crates/edit_prediction/src/udiff.rs b/crates/edit_prediction/src/udiff.rs index 517d193678a3478b9c74b5b2a6c55b32a91b5b5b..f52b2e4bf5daeef1ac6a926147dd166e2ea8ce59 100644 --- a/crates/edit_prediction/src/udiff.rs +++ b/crates/edit_prediction/src/udiff.rs @@ -32,7 +32,7 @@ pub async fn apply_diff( cx: &mut AsyncApp, ) -> Result { let worktree = project - .read_with(cx, |project, cx| project.visible_worktrees(cx).next())? + .read_with(cx, |project, cx| project.visible_worktrees(cx).next()) .context("project has no worktree")?; let paths: Vec<_> = diff_str @@ -65,7 +65,7 @@ pub async fn apply_diff( } else { None } - })?; + }); if let Some(delete_task) = delete_task { delete_task.await?; @@ -79,20 +79,20 @@ pub async fn apply_diff( let buffer = match included_files.entry(path.to_string()) { Entry::Occupied(entry) => entry.get().clone(), Entry::Vacant(entry) => { - let buffer = if status == FileStatus::Created { + let buffer: Entity = if status == FileStatus::Created { project - .update(cx, |project, cx| project.create_buffer(true, cx))? + .update(cx, |project, cx| project.create_buffer(true, cx)) .await? } else { let project_path = project .update(cx, |project, cx| { project.find_project_path(path.as_ref(), cx) - })? + }) .with_context(|| format!("no such path: {}", path))?; project .update(cx, |project, cx| { project.open_buffer(project_path, cx) - })? + }) .await? }; entry.insert(buffer.clone()); @@ -111,7 +111,7 @@ pub async fn apply_diff( .with_context(|| format!("Diff:\n{diff_str}"))?, ); anyhow::Ok(()) - })??; + })?; } DiffEvent::FileEnd { renamed_to } => { let buffer = current_file @@ -135,14 +135,14 @@ pub async fn apply_diff( new_project_path, cx, )) - })?? + })? .await?; } let edits = mem::take(&mut edits); buffer.update(cx, |buffer, cx| { buffer.edit(edits, None, cx); - })?; + }); } } } @@ -174,7 +174,7 @@ pub async fn refresh_worktree_entries( .as_local() .unwrap() .refresh_entries_for_paths(rel_paths) - })? + }) .recv() .await; } diff --git a/crates/edit_prediction/src/zeta1.rs b/crates/edit_prediction/src/zeta1.rs index d4ee8f661ba05a5dd09628250e1df59f8895203c..8575102f638d8a0f1a1cca7ba1d89b20ea33f818 100644 --- a/crates/edit_prediction/src/zeta1.rs +++ b/crates/edit_prediction/src/zeta1.rs @@ -177,8 +177,7 @@ pub(crate) fn request_prediction_with_zeta1( }) }, ); - }) - .ok(); + }); } return Err(err); diff --git a/crates/edit_prediction_cli/src/format_prompt.rs b/crates/edit_prediction_cli/src/format_prompt.rs index 84da5e3516cd0f3853d2c0a1be21553f83f46b46..34504d5299f3bc7740f8c4d35d5e9de3d66e7791 100644 --- a/crates/edit_prediction_cli/src/format_prompt.rs +++ b/crates/edit_prediction_cli/src/format_prompt.rs @@ -11,7 +11,7 @@ use edit_prediction::{ EditPredictionStore, zeta2::{zeta2_output_for_patch, zeta2_prompt_input}, }; -use gpui::AsyncApp; +use gpui::{AsyncApp, Entity}; use std::sync::Arc; use zeta_prompt::format_zeta_prompt; @@ -46,36 +46,37 @@ pub async fn run_format_prompt( step_progress.set_substatus("formatting zeta2 prompt"); - let ep_store = cx.update(|cx| { + let ep_store: Entity = cx.update(|cx| { EditPredictionStore::try_global(cx).context("EditPredictionStore not initialized") - })??; + })?; let state = example.state.as_ref().context("state must be set")?; - let snapshot = state.buffer.read_with(&cx, |buffer, _| buffer.snapshot())?; + let snapshot = state.buffer.read_with(&cx, |buffer, _| buffer.snapshot()); let project = state.project.clone(); - let (_, input) = ep_store.update(&mut cx, |ep_store, cx| { - let events = ep_store - .edit_history_for_project(&project, cx) - .into_iter() - .map(|e| e.event) - .collect(); - anyhow::Ok(zeta2_prompt_input( - &snapshot, - example - .context - .as_ref() - .context("context must be set")? - .files - .clone(), - events, - example.spec.cursor_path.clone(), - example - .buffer - .as_ref() - .context("buffer must be set")? - .cursor_offset, - )) - })??; + let (_, input) = + ep_store.update(&mut cx, |ep_store: &mut EditPredictionStore, cx| { + let events = ep_store + .edit_history_for_project(&project, cx) + .into_iter() + .map(|e| e.event) + .collect(); + anyhow::Ok(zeta2_prompt_input( + &snapshot, + example + .context + .as_ref() + .context("context must be set")? + .files + .clone(), + events, + example.spec.cursor_path.clone(), + example + .buffer + .as_ref() + .context("buffer must be set")? + .cursor_offset, + )) + })?; let prompt = format_zeta_prompt(&input); let expected_output = zeta2_output_for_patch( &input, diff --git a/crates/edit_prediction_cli/src/load_project.rs b/crates/edit_prediction_cli/src/load_project.rs index 0fbb80979a15b7e82bef71cb7b3877a7287ae513..794c0d02d20ef4f17dd50a5d054a11e852589b79 100644 --- a/crates/edit_prediction_cli/src/load_project.rs +++ b/crates/edit_prediction_cli/src/load_project.rs @@ -36,7 +36,7 @@ pub async fn run_load_project( let (buffer, cursor_position) = cursor_position(example, &project, &open_buffers, &mut cx).await?; buffer - .read_with(&cx, |buffer, _| buffer.parsing_idle())? + .read_with(&cx, |buffer, _| buffer.parsing_idle()) .await; let (example_buffer, language_name) = buffer.read_with(&cx, |buffer, _cx| { let cursor_point = cursor_position.to_point(&buffer); @@ -64,7 +64,7 @@ pub async fn run_load_project( }, language_name, ) - })?; + }); progress.set_info(language_name, InfoStyle::Normal); @@ -84,7 +84,7 @@ async fn cursor_position( open_buffers: &OpenedBuffers, cx: &mut AsyncApp, ) -> Result<(Entity, Anchor)> { - let language_registry = project.read_with(cx, |project, _| project.languages().clone())?; + let language_registry = project.read_with(cx, |project, _| project.languages().clone()); let result = language_registry .load_language_for_file_path(&example.spec.cursor_path) .await; @@ -101,14 +101,14 @@ async fn cursor_position( buffer.clone() } else { // Since the worktree scanner is disabled, manually refresh entries for the cursor path. - if let Some(worktree) = project.read_with(cx, |project, cx| project.worktrees(cx).next())? { + if let Some(worktree) = project.read_with(cx, |project, cx| project.worktrees(cx).next()) { refresh_worktree_entries(&worktree, [&*example.spec.cursor_path], cx).await?; } let cursor_path = project .read_with(cx, |project, cx| { project.find_project_path(&example.spec.cursor_path, cx) - })? + }) .with_context(|| { format!( "failed to find cursor path {}", @@ -117,13 +117,13 @@ async fn cursor_position( })?; project - .update(cx, |project, cx| project.open_buffer(cursor_path, cx))? + .update(cx, |project, cx| project.open_buffer(cursor_path, cx)) .await? }; let (cursor_excerpt, cursor_offset_within_excerpt) = example.spec.cursor_excerpt()?; - let excerpt_offset = cursor_buffer.read_with(cx, |buffer, _cx| { + let excerpt_offset = cursor_buffer.read_with(&*cx, |buffer, _cx| { let text = buffer.text(); let mut matches = text.match_indices(&cursor_excerpt); @@ -139,11 +139,11 @@ async fn cursor_position( &example.spec.name ); Ok(excerpt_offset) - })??; + })?; let cursor_offset = excerpt_offset + cursor_offset_within_excerpt; let cursor_anchor = - cursor_buffer.read_with(cx, |buffer, _| buffer.anchor_after(cursor_offset))?; + cursor_buffer.read_with(&*cx, |buffer, _| buffer.anchor_after(cursor_offset)); Ok((cursor_buffer, cursor_anchor)) } @@ -155,7 +155,7 @@ async fn setup_project( cx: &mut AsyncApp, ) -> Result> { let ep_store = cx - .update(|cx| EditPredictionStore::try_global(cx))? + .update(|cx| EditPredictionStore::try_global(cx)) .context("Store should be initialized at init")?; let worktree_path = setup_worktree(example, step_progress).await?; @@ -163,16 +163,13 @@ async fn setup_project( if let Some(project) = app_state.project_cache.get(&example.spec.repository_url) { ep_store.update(cx, |ep_store, _| { ep_store.clear_history_for_project(&project); - })?; - let buffer_store = project.read_with(cx, |project, _| project.buffer_store().clone())?; + }); + let buffer_store = project.read_with(cx, |project, _| project.buffer_store().clone()); let buffers = buffer_store.read_with(cx, |buffer_store, _| { buffer_store.buffers().collect::>() - })?; + }); for buffer in buffers { - buffer - .update(cx, |buffer, cx| buffer.reload(cx))? - .await - .ok(); + buffer.update(cx, |buffer, cx| buffer.reload(cx)).await.ok(); } return Ok(project); } @@ -188,20 +185,20 @@ async fn setup_project( false, cx, ) - })?; + }); project .update(cx, |project, cx| { project.disable_worktree_scanner(cx); project.create_worktree(&worktree_path, true, cx) - })? + }) .await?; app_state .project_cache .insert(example.spec.repository_url.clone(), project.clone()); - let buffer_store = project.read_with(cx, |project, _| project.buffer_store().clone())?; + let buffer_store = project.read_with(cx, |project, _| project.buffer_store().clone()); cx.subscribe(&buffer_store, { let project = project.clone(); move |_, event, cx| match event { @@ -210,7 +207,7 @@ async fn setup_project( } _ => {} } - })? + }) .detach(); Ok(project) diff --git a/crates/edit_prediction_cli/src/predict.rs b/crates/edit_prediction_cli/src/predict.rs index a74ddaa515dbd7168424d16c9a3038b2dd88ef2d..8a9500f96967415171f4627ffc7c6ce40f355c66 100644 --- a/crates/edit_prediction_cli/src/predict.rs +++ b/crates/edit_prediction_cli/src/predict.rs @@ -78,9 +78,9 @@ pub async fn run_prediction( .await; } - let ep_store = cx.update(|cx| { - EditPredictionStore::try_global(cx).context("EditPredictionStore not initialized") - })??; + let ep_store = cx + .update(|cx| EditPredictionStore::try_global(cx)) + .context("EditPredictionStore not initialized")?; ep_store.update(&mut cx, |store, _cx| { let model = match provider { @@ -93,7 +93,7 @@ pub async fn run_prediction( } }; store.set_edit_prediction_model(model); - })?; + }); step_progress.set_substatus("configuring model"); let state = example.state.as_ref().context("state must be set")?; let run_dir = RUN_DIR.join(&example.spec.name); @@ -101,8 +101,7 @@ pub async fn run_prediction( let updated_example = Arc::new(Mutex::new(example.clone())); let current_run_ix = Arc::new(AtomicUsize::new(0)); - let mut debug_rx = - ep_store.update(&mut cx, |store, cx| store.debug_info(&state.project, cx))?; + let mut debug_rx = ep_store.update(&mut cx, |store, cx| store.debug_info(&state.project, cx)); let debug_task = cx.background_spawn({ let updated_example = updated_example.clone(); let current_run_ix = current_run_ix.clone(); @@ -185,7 +184,7 @@ pub async fn run_prediction( cloud_llm_client::PredictEditsRequestTrigger::Cli, cx, ) - })? + }) .await?; let actual_patch = prediction @@ -219,7 +218,7 @@ pub async fn run_prediction( ep_store.update(&mut cx, |store, _| { store.remove_project(&state.project); - })?; + }); debug_task.await?; *example = Arc::into_inner(updated_example) diff --git a/crates/edit_prediction_cli/src/retrieve_context.rs b/crates/edit_prediction_cli/src/retrieve_context.rs index c993cd493df91d0238a6c3c7e7038b4f31568287..8ccfcae9fe17542b99e81df6168484fb1bcd55b0 100644 --- a/crates/edit_prediction_cli/src/retrieve_context.rs +++ b/crates/edit_prediction_cli/src/retrieve_context.rs @@ -34,19 +34,19 @@ pub async fn run_context_retrieval( let _lsp_handle = project.update(&mut cx, |project, cx| { project.register_buffer_with_language_servers(&state.buffer, cx) - })?; + }); wait_for_language_servers_to_start(&project, &state.buffer, &step_progress, &mut cx).await?; - let ep_store = cx.update(|cx| { - EditPredictionStore::try_global(cx).context("EditPredictionStore not initialized") - })??; + let ep_store = cx + .update(|cx| EditPredictionStore::try_global(cx)) + .context("EditPredictionStore not initialized")?; let mut events = ep_store.update(&mut cx, |store, cx| { store.register_buffer(&state.buffer, &project, cx); store.set_use_context(true); store.refresh_context(&project, &state.buffer, state.cursor_position, cx); store.debug_info(&project, cx) - })?; + }); while let Some(event) = events.next().await { match event { @@ -58,7 +58,7 @@ pub async fn run_context_retrieval( } let context_files = - ep_store.update(&mut cx, |store, cx| store.context_for_project(&project, cx))?; + ep_store.update(&mut cx, |store, cx| store.context_for_project(&project, cx)); let excerpt_count: usize = context_files.iter().map(|f| f.excerpts.len()).sum(); step_progress.set_info(format!("{} excerpts", excerpt_count), InfoStyle::Normal); @@ -75,10 +75,10 @@ async fn wait_for_language_servers_to_start( step_progress: &Arc, cx: &mut AsyncApp, ) -> anyhow::Result<()> { - let lsp_store = project.read_with(cx, |project, _| project.lsp_store())?; + let lsp_store = project.read_with(cx, |project, _| project.lsp_store()); - let (language_server_ids, mut starting_language_server_ids) = buffer - .update(cx, |buffer, cx| { + let (language_server_ids, mut starting_language_server_ids) = + buffer.update(cx, |buffer, cx| { lsp_store.update(cx, |lsp_store, cx| { let ids = lsp_store.language_servers_for_local_buffer(buffer, cx); let starting_ids = ids @@ -88,8 +88,7 @@ async fn wait_for_language_servers_to_start( .collect::>(); (ids, starting_ids) }) - }) - .unwrap_or_default(); + }); step_progress.set_substatus(format!("waiting for {} LSPs", language_server_ids.len())); @@ -164,7 +163,7 @@ async fn wait_for_language_servers_to_start( ]; project - .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))? + .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx)) .await?; let mut pending_language_server_ids = lsp_store.read_with(cx, |lsp_store, _| { @@ -178,7 +177,7 @@ async fn wait_for_language_servers_to_start( .is_some_and(|status| status.has_pending_diagnostic_updates) }) .collect::>() - })?; + }); while !pending_language_server_ids.is_empty() { futures::select! { language_server_id = rx.next() => { diff --git a/crates/edit_prediction_context/src/edit_prediction_context.rs b/crates/edit_prediction_context/src/edit_prediction_context.rs index 15576a835d9b4b0781b1e3979edbed443fa40f62..5bbfa4ed9d92e7462922c9b9df3b0c134ca1dc96 100644 --- a/crates/edit_prediction_context/src/edit_prediction_context.rs +++ b/crates/edit_prediction_context/src/edit_prediction_context.rs @@ -197,7 +197,7 @@ impl RelatedExcerptStore { DefinitionTask::CacheMiss(task) => { let locations = task.await.log_err()??; let duration = start_time.elapsed(); - cx.update(|cx| { + Some(cx.update(|cx| { ( identifier, Arc::new(CacheEntry { @@ -210,8 +210,7 @@ impl RelatedExcerptStore { }), Some(duration), ) - }) - .ok() + })) } } }) @@ -280,12 +279,12 @@ async fn rebuild_related_files( if let hash_map::Entry::Vacant(e) = snapshots.entry(definition.buffer.entity_id()) { definition .buffer - .read_with(cx, |buffer, _| buffer.parsing_idle())? + .read_with(cx, |buffer, _| buffer.parsing_idle()) .await; e.insert( definition .buffer - .read_with(cx, |buffer, _| buffer.snapshot())?, + .read_with(cx, |buffer, _| buffer.snapshot()), ); } let worktree_id = definition.path.worktree_id; @@ -296,7 +295,7 @@ async fn rebuild_related_files( if let Some(worktree) = project.worktree_for_id(worktree_id, cx) { e.insert(worktree.read(cx).root_name().as_unix_str().to_string()); } - })?; + }); } } } diff --git a/crates/edit_prediction_ui/src/edit_prediction_context_view.rs b/crates/edit_prediction_ui/src/edit_prediction_context_view.rs index 92d66d2bec3a7a3b35678f1d4da92fae6b071633..a148f08c11f25c0cf419a9e2fe5dd741eeb01105 100644 --- a/crates/edit_prediction_ui/src/edit_prediction_context_view.rs +++ b/crates/edit_prediction_ui/src/edit_prediction_context_view.rs @@ -188,7 +188,7 @@ impl EditPredictionContextView { for (path, buffer, ranges) in paths { multibuffer.set_excerpts_for_path(path, buffer, ranges, 0, cx); } - })?; + }); editor.update_in(cx, |editor, window, cx| { editor.move_to_beginning(&Default::default(), window, cx); diff --git a/crates/edit_prediction_ui/src/edit_prediction_ui.rs b/crates/edit_prediction_ui/src/edit_prediction_ui.rs index ed4a750494fffc45549be6023f113f953f5d91e8..73af36c82fa36ed0c55b7796d52fd5b23c8698d6 100644 --- a/crates/edit_prediction_ui/src/edit_prediction_ui.rs +++ b/crates/edit_prediction_ui/src/edit_prediction_ui.rs @@ -168,16 +168,19 @@ fn capture_example_as_markdown( fs.create_dir(&dir).await.ok(); let mut path = dir.join(&example_spec.name.replace(' ', "--").replace(':', "-")); path.set_extension("md"); - project.update(cx, |project, cx| project.open_local_buffer(&path, cx)) + project + .update(cx, |project, cx| project.open_local_buffer(&path, cx)) + .await? } else { - project.update(cx, |project, cx| project.create_buffer(false, cx)) - }? - .await?; + project + .update(cx, |project, cx| project.create_buffer(false, cx)) + .await? + }; buffer.update(cx, |buffer, cx| { buffer.set_text(example_spec.to_markdown(), cx); buffer.set_language(Some(markdown_language), cx); - })?; + }); workspace_entity.update_in(cx, |workspace, window, cx| { workspace.add_item_to_active_pane( Box::new( diff --git a/crates/editor/src/blink_manager.rs b/crates/editor/src/blink_manager.rs index d99cf6a7d59d40383e572f4638b17edbf3d0da53..1b3baa999352fb37cf36603db4860e622b8549c9 100644 --- a/crates/editor/src/blink_manager.rs +++ b/crates/editor/src/blink_manager.rs @@ -74,8 +74,7 @@ impl BlinkManager { cx.spawn(async move |this, cx| { Timer::after(interval).await; if let Some(this) = this.upgrade() { - this.update(cx, |this, cx| this.blink_cursors(epoch, cx)) - .ok(); + this.update(cx, |this, cx| this.blink_cursors(epoch, cx)); } }) .detach(); diff --git a/crates/editor/src/clangd_ext.rs b/crates/editor/src/clangd_ext.rs index 49936580987188e13ba88f919288f65371951f4b..892fdfb586013f69e38fdff20fe625dfe82f6c4c 100644 --- a/crates/editor/src/clangd_ext.rs +++ b/crates/editor/src/clangd_ext.rs @@ -45,10 +45,10 @@ pub fn switch_source_header( .map(|file| file.path()) .map(|path| path.display(PathStyle::local()).to_string()) .unwrap_or_else(|| "Unknown".to_string()) - })?; + }); let switch_source_header = if let Some((client, project_id)) = upstream_client { - let buffer_id = buffer.read_with(cx, |buffer, _| buffer.remote_id())?; + let buffer_id = buffer.read_with(cx, |buffer, _| buffer.remote_id()); let request = proto::LspExtSwitchSourceHeader { project_id, buffer_id: buffer_id.to_proto(), @@ -67,7 +67,7 @@ pub fn switch_source_header( project::lsp_store::lsp_ext_command::SwitchSourceHeader, cx, ) - })? + }) .await .with_context(|| { format!("Switch source/header LSP request for path \"{source_file}\" failed") diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs index 7b114820d252f7cd4c5a1ccac2ae10fc73cfb092..ccc87686c8f202b2fde46b04323ab74bf9083ab7 100644 --- a/crates/editor/src/editor.rs +++ b/crates/editor/src/editor.rs @@ -5512,12 +5512,10 @@ impl Editor { Some(cx.spawn_in(window, async move |editor, cx| { if let Some(transaction) = on_type_formatting.await? { if push_to_client_history { - buffer - .update(cx, |buffer, _| { - buffer.push_transaction(transaction, Instant::now()); - buffer.finalize_last_transaction(); - }) - .ok(); + buffer.update(cx, |buffer, _| { + buffer.push_transaction(transaction, Instant::now()); + buffer.finalize_last_transaction(); + }); } editor.update(cx, |editor, cx| { editor.refresh_document_highlights(cx); @@ -6306,7 +6304,7 @@ impl Editor { let project_transaction = lsp_store .update(cx, |lsp_store, cx| { lsp_store.apply_code_action(buffer_handle, command, false, cx) - })? + }) .await .context("applying post-completion command")?; if let Some(workspace) = editor.read_with(cx, |editor, _| editor.workspace())? { @@ -6713,7 +6711,7 @@ impl Editor { .all(|range| { excerpt_range.start <= range.start && excerpt_range.end >= range.end }) - })?; + }); if all_edits_within_excerpt { return Ok(()); @@ -6741,7 +6739,7 @@ impl Editor { } multibuffer.push_transaction(entries.iter().map(|(b, t)| (b, t)), cx); multibuffer - })?; + }); workspace.update_in(cx, |workspace, window, cx| { let project = workspace.project().clone(); @@ -7101,13 +7099,9 @@ impl Editor { .timer(Duration::from_millis(debounce)) .await; - let highlights = if let Some(highlights) = cx - .update(|cx| { - provider.document_highlights(&cursor_buffer, cursor_buffer_position, cx) - }) - .ok() - .flatten() - { + let highlights = if let Some(highlights) = cx.update(|cx| { + provider.document_highlights(&cursor_buffer, cursor_buffer_position, cx) + }) { highlights.await.log_err() } else { None @@ -16346,9 +16340,7 @@ impl Editor { return; }; - let hide_runnables = project - .update(cx, |project, _| project.is_via_collab()) - .unwrap_or(true); + let hide_runnables = project.update(cx, |project, _| project.is_via_collab()); if hide_runnables { return; } @@ -16531,11 +16523,9 @@ impl Editor { let mut templates_with_tags = Vec::new(); if let Some(inventory) = inventory { for RunnableTag(tag) in tags { - let Ok(new_tasks) = inventory.update(cx, |inventory, cx| { + let new_tasks = inventory.update(cx, |inventory, cx| { inventory.list_tasks(file.clone(), Some(language.clone()), worktree_id, cx) - }) else { - return templates_with_tags; - }; + }); templates_with_tags.extend(new_tasks.await.into_iter().filter( move |(_, template)| { template.tags.iter().any(|source_tag| source_tag == &tag) @@ -17621,7 +17611,7 @@ impl Editor { .clip_point_utf16(point_from_lsp(lsp_location.range.end), Bias::Left); target_buffer.anchor_after(target_start) ..target_buffer.anchor_before(target_end) - })?; + }); Location { buffer: target_buffer_handle, range, @@ -17720,7 +17710,7 @@ impl Editor { }); (locations, current_location_index) - })?; + }); let Some(current_location_index) = current_location_index else { // This indicates something has gone wrong, because we already @@ -18459,27 +18449,27 @@ impl Editor { } }; - buffer - .update(cx, |buffer, cx| { - if let Some(transaction) = transaction - && !buffer.is_singleton() - { - buffer.push_transaction(&transaction.0, cx); - } - cx.notify(); - }) - .ok(); + buffer.update(cx, |buffer, cx| { + if let Some(transaction) = transaction + && !buffer.is_singleton() + { + buffer.push_transaction(&transaction.0, cx); + } + cx.notify(); + }); if let Some(transaction_id_now) = - buffer.read_with(cx, |b, cx| b.last_transaction_id(cx))? + buffer.read_with(cx, |b, cx| b.last_transaction_id(cx)) { let has_new_transaction = transaction_id_prev != Some(transaction_id_now); if has_new_transaction { - _ = editor.update(cx, |editor, _| { - editor - .selection_history - .insert_transaction(transaction_id_now, selections_prev); - }); + editor + .update(cx, |editor, _| { + editor + .selection_history + .insert_transaction(transaction_id_now, selections_prev); + }) + .ok(); } } @@ -18527,17 +18517,15 @@ impl Editor { } transaction = apply_action.log_err().fuse() => transaction, }; - buffer - .update(cx, |buffer, cx| { - // check if we need this - if let Some(transaction) = transaction - && !buffer.is_singleton() - { - buffer.push_transaction(&transaction.0, cx); - } - cx.notify(); - }) - .ok(); + buffer.update(cx, |buffer, cx| { + // check if we need this + if let Some(transaction) = transaction + && !buffer.is_singleton() + { + buffer.push_transaction(&transaction.0, cx); + } + cx.notify(); + }); Ok(()) }) } @@ -18831,10 +18819,8 @@ impl Editor { if let Some(debounce) = debounce { cx.background_executor().timer(debounce).await; } - let Some(snapshot) = editor.upgrade().and_then(|editor| { - editor - .update(cx, |editor, cx| editor.buffer().read(cx).snapshot(cx)) - .ok() + let Some(snapshot) = editor.upgrade().map(|editor| { + editor.update(cx, |editor, cx| editor.buffer().read(cx).snapshot(cx)) }) else { return; }; @@ -21217,19 +21203,14 @@ impl Editor { anyhow::Result::<()>::Err(err).log_err(); if let Some(workspace) = workspace { - workspace - .update(cx, |workspace, cx| { - struct OpenPermalinkToLine; + workspace.update(cx, |workspace, cx| { + struct OpenPermalinkToLine; - workspace.show_toast( - Toast::new( - NotificationId::unique::(), - message, - ), - cx, - ) - }) - .ok(); + workspace.show_toast( + Toast::new(NotificationId::unique::(), message), + cx, + ) + }); } } }) @@ -24025,20 +24006,15 @@ fn update_uncommitted_diff_for_buffer( }); cx.spawn(async move |cx| { let diffs = future::join_all(tasks).await; - if editor - .read_with(cx, |editor, _cx| editor.temporary_diff_override) - .unwrap_or(false) - { + if editor.read_with(cx, |editor, _cx| editor.temporary_diff_override) { return; } - buffer - .update(cx, |buffer, cx| { - for diff in diffs.into_iter().flatten() { - buffer.add_diff(diff, cx); - } - }) - .ok(); + buffer.update(cx, |buffer, cx| { + for diff in diffs.into_iter().flatten() { + buffer.add_diff(diff, cx); + } + }); }) } @@ -25081,7 +25057,7 @@ impl SemanticsProvider for Entity { snapshot.anchor_before(range.start) ..snapshot.anchor_after(range.end), ) - })? + }) } }) }) diff --git a/crates/editor/src/editor_tests.rs b/crates/editor/src/editor_tests.rs index 4f60989cd47e3fd070aa2d2e309ac7e0f162b9c7..466184bcec6b29dd7fd0cc04ae6d13b78bd65eb3 100644 --- a/crates/editor/src/editor_tests.rs +++ b/crates/editor/src/editor_tests.rs @@ -18251,7 +18251,7 @@ async fn test_on_type_formatting_is_applied_after_autoindent(cx: &mut TestAppCon "fn c() {\n d()\n .\n}\n", "OnTypeFormatting should triggered after autoindent applied" ) - })?; + }); Ok(Some(vec![])) } diff --git a/crates/editor/src/git/blame.rs b/crates/editor/src/git/blame.rs index d1338c3cbd3540914b23a53410fd5c823e1285c8..0546ae6143c51b6e552a783899d56fc85922b48a 100644 --- a/crates/editor/src/git/blame.rs +++ b/crates/editor/src/git/blame.rs @@ -533,9 +533,9 @@ impl GitBlame { }) }) .collect::>>() - })??; + })?; let provider_registry = - cx.update(|cx| GitHostingProviderRegistry::default_global(cx))?; + cx.update(|cx| GitHostingProviderRegistry::default_global(cx)); let (results, errors) = cx .background_spawn({ async move { diff --git a/crates/editor/src/hover_links.rs b/crates/editor/src/hover_links.rs index 1c00acbfa9f1a69cbe01c45758db5a0cd4fee757..5ee81909a1c665b847f3aef05cdc495e918f4c6e 100644 --- a/crates/editor/src/hover_links.rs +++ b/crates/editor/src/hover_links.rs @@ -475,7 +475,7 @@ pub(crate) fn find_url( ) -> Option<(Range, String)> { const LIMIT: usize = 2048; - let snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot()).ok()?; + let snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot()); let offset = position.to_offset(&snapshot); let mut token_start = offset; @@ -535,9 +535,7 @@ pub(crate) fn find_url_from_range( ) -> Option { const LIMIT: usize = 2048; - let Ok(snapshot) = buffer.read_with(&cx, |buffer, _| buffer.snapshot()) else { - return None; - }; + let snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot()); let start_offset = range.start.to_offset(&snapshot); let end_offset = range.end.to_offset(&snapshot); @@ -595,7 +593,7 @@ pub(crate) async fn find_file( cx: &mut AsyncWindowContext, ) -> Option<(Range, ResolvedPath)> { let project = project?; - let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot()).ok()?; + let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot()); let scope = snapshot.language_scope_at(position); let (range, candidate_file_path) = surrounding_filename(&snapshot, position)?; let candidate_len = candidate_file_path.len(); @@ -610,7 +608,6 @@ pub(crate) async fn find_file( .update(cx, |project, cx| { project.resolve_path_in_buffer(candidate_file_path, buffer, cx) }) - .ok()? .await .filter(|s| s.is_file()) } diff --git a/crates/editor/src/hover_popover.rs b/crates/editor/src/hover_popover.rs index 9c718cff4e0eeebd08fea9c20404ddb8f599988a..f72194ba25fea15a865f22380eb646bf511ce59b 100644 --- a/crates/editor/src/hover_popover.rs +++ b/crates/editor/src/hover_popover.rs @@ -165,7 +165,7 @@ pub fn hover_at_inlay( this.hover_state.diagnostic_popover = None; })?; - let language_registry = project.read_with(cx, |p, _| p.languages().clone())?; + let language_registry = project.read_with(cx, |p, _| p.languages().clone()); let blocks = vec![inlay_hover.tooltip]; let parsed_content = parse_blocks(&blocks, Some(&language_registry), None, cx).await; diff --git a/crates/editor/src/items.rs b/crates/editor/src/items.rs index 4578e3b5fd99700b9a981785501be363876e6c06..163c4a6bb01acfff0a64b738067dbea7ba0d2a95 100644 --- a/crates/editor/src/items.rs +++ b/crates/editor/src/items.rs @@ -398,7 +398,7 @@ async fn update_editor_from_message( .into_iter() .map(|id| BufferId::new(id).map(|id| project.open_buffer_by_id(id, cx))) .collect::>>() - })??; + })?; let _inserted_excerpt_buffers = try_join_all(inserted_excerpt_buffers).await?; // Update the editor's excerpts. @@ -892,7 +892,7 @@ impl Item for Editor { project .update(cx, |project, cx| { project.save_buffers(buffers_to_save.clone(), cx) - })? + }) .await?; } @@ -938,15 +938,13 @@ impl Item for Editor { this.update(cx, |editor, cx| { editor.request_autoscroll(Autoscroll::fit(), cx) })?; - buffer - .update(cx, |buffer, cx| { - if let Some(transaction) = transaction - && !buffer.is_singleton() - { - buffer.push_transaction(&transaction.0, cx); - } - }) - .ok(); + buffer.update(cx, |buffer, cx| { + if let Some(transaction) = transaction + && !buffer.is_singleton() + { + buffer.push_transaction(&transaction.0, cx); + } + }); Ok(()) }) } @@ -1103,7 +1101,7 @@ impl SerializableItem for Editor { let project = project.clone(); async move |cx| { let language_registry = - project.read_with(cx, |project, _| project.languages().clone())?; + project.read_with(cx, |project, _| project.languages().clone()); let language = if let Some(language_name) = language { // We don't fail here, because we'd rather not set the language if the name changed @@ -1118,7 +1116,7 @@ impl SerializableItem for Editor { // First create the empty buffer let buffer = project - .update(cx, |project, cx| project.create_buffer(true, cx))? + .update(cx, |project, cx| project.create_buffer(true, cx)) .await .context("Failed to create buffer while deserializing editor")?; @@ -1132,7 +1130,7 @@ impl SerializableItem for Editor { if let Some(entry) = buffer.peek_undo_stack() { buffer.forget_transaction(entry.transaction_id()); } - })?; + }); cx.update(|window, cx| { cx.new(|cx| { @@ -1187,7 +1185,7 @@ impl SerializableItem for Editor { if let Some(entry) = buffer.peek_undo_stack() { buffer.forget_transaction(entry.transaction_id()); } - })?; + }); } cx.update(|window, cx| { @@ -1229,7 +1227,7 @@ impl SerializableItem for Editor { .. } => window.spawn(cx, async move |cx| { let buffer = project - .update(cx, |project, cx| project.create_buffer(true, cx))? + .update(cx, |project, cx| project.create_buffer(true, cx)) .await .context("Failed to create buffer")?; diff --git a/crates/editor/src/jsx_tag_auto_close.rs b/crates/editor/src/jsx_tag_auto_close.rs index 1d808c968d579569fb595a5a1a0ddaa4dbc718b3..8d8f8d43b1ad1d374b130a9b4bc83297e4a76214 100644 --- a/crates/editor/src/jsx_tag_auto_close.rs +++ b/crates/editor/src/jsx_tag_auto_close.rs @@ -443,7 +443,7 @@ pub(crate) fn handle_from( }; } - let buffer_snapshot = buffer.read_with(cx, |buf, _| buf.snapshot()).ok()?; + let buffer_snapshot = buffer.read_with(cx, |buf, _| buf.snapshot()); let Some(edit_behavior_state) = should_auto_close(&buffer_snapshot, &edited_ranges, &jsx_tag_auto_close_config) @@ -567,11 +567,9 @@ pub(crate) fn handle_from( } } - buffer - .update(cx, |buffer, cx| { - buffer.edit(edits, None, cx); - }) - .ok()?; + buffer.update(cx, |buffer, cx| { + buffer.edit(edits, None, cx); + }); if any_selections_need_update { let multi_buffer_snapshot = this diff --git a/crates/editor/src/linked_editing_ranges.rs b/crates/editor/src/linked_editing_ranges.rs index ff3096961d646a2a98458319d927a4e2723d0602..b3f2136acdc3bfed36956f1c23d76ad4b7934c3c 100644 --- a/crates/editor/src/linked_editing_ranges.rs +++ b/crates/editor/src/linked_editing_ranges.rs @@ -99,9 +99,7 @@ pub(super) fn refresh_linked_ranges( let cx = cx.to_async(); let highlights = async move { let edits = linked_edits_task.await.log_err()?; - let snapshot = cx - .read_entity(&buffer, |buffer, _| buffer.snapshot()) - .ok()?; + let snapshot = cx.read_entity(&buffer, |buffer, _| buffer.snapshot()); let buffer_id = snapshot.remote_id(); // Find the range containing our current selection. diff --git a/crates/editor/src/lsp_ext.rs b/crates/editor/src/lsp_ext.rs index 37cc734ab1ef0a0b677b3e405ff70b461d349a1c..058a297a974bdb210bdb90b4e3320809f6870641 100644 --- a/crates/editor/src/lsp_ext.rs +++ b/crates/editor/src/lsp_ext.rs @@ -60,27 +60,22 @@ async fn lsp_task_context( buffer: &Entity, cx: &mut AsyncApp, ) -> Option { - let (worktree_store, environment) = project - .read_with(cx, |project, _| { - (project.worktree_store(), project.environment().clone()) - }) - .ok()?; + let (worktree_store, environment) = project.read_with(cx, |project, _| { + (project.worktree_store(), project.environment().clone()) + }); - let worktree_abs_path = cx - .update(|cx| { - let worktree_id = buffer.read(cx).file().map(|f| f.worktree_id(cx)); + let worktree_abs_path = cx.update(|cx| { + let worktree_id = buffer.read(cx).file().map(|f| f.worktree_id(cx)); - worktree_id - .and_then(|worktree_id| worktree_store.read(cx).worktree_for_id(worktree_id, cx)) - .and_then(|worktree| worktree.read(cx).root_dir()) - }) - .ok()?; + worktree_id + .and_then(|worktree_id| worktree_store.read(cx).worktree_for_id(worktree_id, cx)) + .and_then(|worktree| worktree.read(cx).root_dir()) + }); let project_env = environment .update(cx, |environment, cx| { environment.buffer_environment(buffer, &worktree_store, cx) }) - .ok()? .await; Some(TaskContext { @@ -126,19 +121,18 @@ pub fn lsp_tasks( let source_kind = match buffer.update(cx, |buffer, _| { buffer.language().map(|language| language.name()) }) { - Ok(Some(language_name)) => TaskSourceKind::Lsp { + Some(language_name) => TaskSourceKind::Lsp { server: server_id, language_name: SharedString::from(language_name), }, - Ok(None) => continue, - Err(_) => return Vec::new(), + None => continue, }; let id_base = source_kind.to_id_base(); let lsp_buffer_context = lsp_task_context(&project, &buffer, cx) .await .unwrap_or_default(); - if let Ok(runnables_task) = project.update(cx, |project, cx| { + let runnables_task = project.update(cx, |project, cx| { let buffer_id = buffer.read(cx).remote_id(); project.request_lsp( buffer, @@ -149,8 +143,8 @@ pub fn lsp_tasks( }, cx, ) - }) && let Some(new_runnables) = runnables_task.await.log_err() - { + }); + if let Some(new_runnables) = runnables_task.await.log_err() { new_lsp_tasks.extend(new_runnables.runnables.into_iter().filter_map( |(location, runnable)| { let resolved_task = diff --git a/crates/editor/src/rust_analyzer_ext.rs b/crates/editor/src/rust_analyzer_ext.rs index f548db75ad5d8cfe32a59a798b6d23931c34f215..70139214a6e9d5b4e8760ddbcbf579b26dcde51c 100644 --- a/crates/editor/src/rust_analyzer_ext.rs +++ b/crates/editor/src/rust_analyzer_ext.rs @@ -81,7 +81,7 @@ pub fn go_to_parent_module( let upstream_client = lsp_store.read(cx).upstream_client(); cx.spawn_in(window, async move |editor, cx| { let location_links = if let Some((client, project_id)) = upstream_client { - let buffer_id = buffer.read_with(cx, |buffer, _| buffer.remote_id())?; + let buffer_id = buffer.read_with(cx, |buffer, _| buffer.remote_id()); let request = proto::LspExtGoToParentModule { project_id, @@ -103,7 +103,7 @@ pub fn go_to_parent_module( .collect::>() .context("go to parent module via collab")? } else { - let buffer_snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot())?; + let buffer_snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot()); let position = trigger_anchor.text_anchor.to_point_utf16(&buffer_snapshot); project .update(cx, |project, cx| { @@ -113,7 +113,7 @@ pub fn go_to_parent_module( project::lsp_store::lsp_ext_command::GoToParentModule { position }, cx, ) - })? + }) .await .context("go to parent module")? }; @@ -161,7 +161,7 @@ pub fn expand_macro_recursively( let upstream_client = project.read(cx).lsp_store().read(cx).upstream_client(); cx.spawn_in(window, async move |_editor, cx| { let macro_expansion = if let Some((client, project_id)) = upstream_client { - let buffer_id = buffer.update(cx, |buffer, _| buffer.remote_id())?; + let buffer_id = buffer.update(cx, |buffer, _| buffer.remote_id()); let request = proto::LspExtExpandMacro { project_id, buffer_id: buffer_id.to_proto(), @@ -176,7 +176,7 @@ pub fn expand_macro_recursively( expansion: response.expansion, } } else { - let buffer_snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot())?; + let buffer_snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot()); let position = trigger_anchor.text_anchor.to_point_utf16(&buffer_snapshot); project .update(cx, |project, cx| { @@ -186,7 +186,7 @@ pub fn expand_macro_recursively( ExpandMacro { position }, cx, ) - })? + }) .await .context("expand macro")? }; @@ -200,7 +200,7 @@ pub fn expand_macro_recursively( } let buffer = project - .update(cx, |project, cx| project.create_buffer(false, cx))? + .update(cx, |project, cx| project.create_buffer(false, cx)) .await?; workspace.update_in(cx, |workspace, window, cx| { buffer.update(cx, |buffer, cx| { @@ -252,7 +252,7 @@ pub fn open_docs(editor: &mut Editor, _: &OpenDocs, window: &mut Window, cx: &mu let upstream_client = project.read(cx).lsp_store().read(cx).upstream_client(); cx.spawn_in(window, async move |_editor, cx| { let docs_urls = if let Some((client, project_id)) = upstream_client { - let buffer_id = buffer.read_with(cx, |buffer, _| buffer.remote_id())?; + let buffer_id = buffer.read_with(cx, |buffer, _| buffer.remote_id()); let request = proto::LspExtOpenDocs { project_id, buffer_id: buffer_id.to_proto(), @@ -267,7 +267,7 @@ pub fn open_docs(editor: &mut Editor, _: &OpenDocs, window: &mut Window, cx: &mu local: response.local, } } else { - let buffer_snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot())?; + let buffer_snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot()); let position = trigger_anchor.text_anchor.to_point_utf16(&buffer_snapshot); project .update(cx, |project, cx| { @@ -277,7 +277,7 @@ pub fn open_docs(editor: &mut Editor, _: &OpenDocs, window: &mut Window, cx: &mu project::lsp_store::lsp_ext_command::OpenDocs { position }, cx, ) - })? + }) .await .context("open docs")? }; @@ -303,7 +303,8 @@ pub fn open_docs(editor: &mut Editor, _: &OpenDocs, window: &mut Window, cx: &mu if let Some(web_url) = docs_urls.web { cx.open_url(&web_url); } - }) + }); + anyhow::Ok(()) }) .detach_and_log_err(cx); } diff --git a/crates/eval/src/eval.rs b/crates/eval/src/eval.rs index 80633696b7d5e655bb7db3627568b881642cf62c..ac773bab786223fb073a5a87118ec0304e1781bf 100644 --- a/crates/eval/src/eval.rs +++ b/crates/eval/src/eval.rs @@ -150,7 +150,7 @@ fn main() { registry.set_default_model(Some(agent_model.clone()), cx); }); judge_model - })?; + }); let mut examples = Vec::new(); @@ -210,7 +210,8 @@ fn main() { if examples.is_empty() { eprintln!("Filter matched no examples"); - return cx.update(|cx| cx.quit()); + cx.update(|cx| cx.quit()); + return anyhow::Ok(()); } let mut repo_urls = HashSet::default(); @@ -294,7 +295,7 @@ fn main() { let result = async { example.setup().await?; let run_output = cx - .update(|cx| example.run(app_state.clone(), cx))? + .update(|cx| example.run(app_state.clone(), cx)) .await?; let judge_output = judge_example( example.clone(), @@ -328,7 +329,8 @@ fn main() { app_state.client.telemetry().flush_events().await; - cx.update(|cx| cx.quit()) + cx.update(|cx| cx.quit()); + anyhow::Ok(()) }) .detach_and_log_err(cx); }); diff --git a/crates/eval/src/example.rs b/crates/eval/src/example.rs index b6f93bcc875a849bbdec60c60b595fbcd1b8c5d8..32c649e98c3abdda091fdb895a087eda685c41cc 100644 --- a/crates/eval/src/example.rs +++ b/crates/eval/src/example.rs @@ -221,7 +221,7 @@ impl ExampleContext { } else { thread.proceed(cx) } - })??; + })?; let task = self.app.background_spawn(async move { let mut messages = Vec::new(); @@ -357,11 +357,13 @@ impl ExampleContext { } pub fn edits(&self) -> HashMap, FileEdits> { - self.agent_thread - .read_with(&self.app, |thread, cx| { - let action_log = thread.action_log().read(cx); - HashMap::from_iter(action_log.changed_buffers(cx).into_iter().map( - |(buffer, diff)| { + self.agent_thread.read_with(&self.app, |thread, cx| { + let action_log = thread.action_log().read(cx); + HashMap::from_iter( + action_log + .changed_buffers(cx) + .into_iter() + .map(|(buffer, diff)| { let snapshot = buffer.read(cx).snapshot(); let file = snapshot.file().unwrap(); @@ -381,10 +383,9 @@ impl ExampleContext { .collect(); (file.path().clone(), FileEdits { hunks }) - }, - )) - }) - .unwrap() + }), + ) + }) } pub fn agent_thread(&self) -> Entity { @@ -393,16 +394,14 @@ impl ExampleContext { } impl AppContext for ExampleContext { - type Result = anyhow::Result; - fn new( &mut self, build_entity: impl FnOnce(&mut gpui::Context) -> T, - ) -> Self::Result> { + ) -> Entity { self.app.new(build_entity) } - fn reserve_entity(&mut self) -> Self::Result> { + fn reserve_entity(&mut self) -> gpui::Reservation { self.app.reserve_entity() } @@ -410,7 +409,7 @@ impl AppContext for ExampleContext { &mut self, reservation: gpui::Reservation, build_entity: impl FnOnce(&mut gpui::Context) -> T, - ) -> Self::Result> { + ) -> Entity { self.app.insert_entity(reservation, build_entity) } @@ -418,25 +417,21 @@ impl AppContext for ExampleContext { &mut self, handle: &Entity, update: impl FnOnce(&mut T, &mut gpui::Context) -> R, - ) -> Self::Result + ) -> R where T: 'static, { self.app.update_entity(handle, update) } - fn as_mut<'a, T>(&'a mut self, handle: &Entity) -> Self::Result> + fn as_mut<'a, T>(&'a mut self, handle: &Entity) -> gpui::GpuiBorrow<'a, T> where T: 'static, { self.app.as_mut(handle) } - fn read_entity( - &self, - handle: &Entity, - read: impl FnOnce(&T, &App) -> R, - ) -> Self::Result + fn read_entity(&self, handle: &Entity, read: impl FnOnce(&T, &App) -> R) -> R where T: 'static, { @@ -471,7 +466,7 @@ impl AppContext for ExampleContext { self.app.background_spawn(future) } - fn read_global(&self, callback: impl FnOnce(&G, &App) -> R) -> Self::Result + fn read_global(&self, callback: impl FnOnce(&G, &App) -> R) -> R where G: gpui::Global, { diff --git a/crates/eval/src/examples/code_block_citations.rs b/crates/eval/src/examples/code_block_citations.rs index c8ba75e99f019b0b0609743b10573bae712f82cd..4fe7aa81124ca3fa8f84cd5145e83bd710fdf461 100644 --- a/crates/eval/src/examples/code_block_citations.rs +++ b/crates/eval/src/examples/code_block_citations.rs @@ -62,39 +62,29 @@ impl Example for CodeBlockCitations { cx.assert(citation.contains("/"), format!("Slash in {citation:?}",)) { let path_range = PathWithRange::new(citation); - let path = cx - .agent_thread() - .update(cx, |thread, cx| { - thread - .project() - .read(cx) - .find_project_path(path_range.path.as_ref(), cx) - }) - .ok() - .flatten(); + let path = cx.agent_thread().update(cx, |thread, cx| { + thread + .project() + .read(cx) + .find_project_path(path_range.path.as_ref(), cx) + }); if let Ok(path) = cx.assert_some(path, format!("Valid path: {citation:?}")) { let buffer_text = { - let buffer = match cx.agent_thread().update(cx, |thread, cx| { - thread - .project() - .update(cx, |project, cx| project.open_buffer(path, cx)) - }) { - Ok(buffer_task) => buffer_task.await.ok(), - Err(err) => { - cx.assert( - false, - format!("Expected Ok(buffer), not {err:?}"), - ) - .ok(); - break; - } - }; + let buffer = cx + .agent_thread() + .update(cx, |thread, cx| { + thread + .project() + .update(cx, |project, cx| project.open_buffer(path, cx)) + }) + .await + .ok(); let Ok(buffer_text) = cx.assert_some( - buffer.and_then(|buffer| { - buffer.read_with(cx, |buffer, _| buffer.text()).ok() + buffer.map(|buffer| { + buffer.read_with(cx, |buffer, _| buffer.text()) }), "Reading buffer text succeeded", ) else { diff --git a/crates/eval/src/examples/file_change_notification.rs b/crates/eval/src/examples/file_change_notification.rs index 41ce10cd2240f2e81812a51b2ec581422c102c41..10683ec6509cece9d8d26039ff36ff458bdf418a 100644 --- a/crates/eval/src/examples/file_change_notification.rs +++ b/crates/eval/src/examples/file_change_notification.rs @@ -29,7 +29,7 @@ impl Example for FileChangeNotificationExample { .read(cx) .find_project_path("README", cx) .expect("README file should exist in this repo") - })?; + }); let buffer = { cx.agent_thread() @@ -37,7 +37,7 @@ impl Example for FileChangeNotificationExample { thread .project() .update(cx, |project, cx| project.open_buffer(project_path, cx)) - })? + }) .await? }; @@ -45,7 +45,7 @@ impl Example for FileChangeNotificationExample { thread.action_log().update(cx, |action_log, cx| { action_log.buffer_read(buffer.clone(), cx); }); - })?; + }); // Start conversation (specific message is not important) cx.prompt_with_max_turns("Find all files in this repo", 1) @@ -54,7 +54,7 @@ impl Example for FileChangeNotificationExample { // Edit the README buffer - the model should get a notification on next turn buffer.update(cx, |buffer, cx| { buffer.edit([(0..buffer.len(), "Surprise!")], None, cx); - })?; + }); // Run for some more turns. // The model shouldn't thank us for letting it know about the file change. diff --git a/crates/eval/src/instance.rs b/crates/eval/src/instance.rs index c6affb557d0f16ed6c8da80d39b37807bf048c60..27a5447027cb3d89c7a5dca1f0a68b8382081533 100644 --- a/crates/eval/src/instance.rs +++ b/crates/eval/src/instance.rs @@ -220,7 +220,7 @@ impl ExampleInstance { worktree .update(cx, |worktree, _cx| { worktree.as_local().unwrap().scan_complete() - })? + }) .await; struct LanguageServerState { @@ -233,39 +233,40 @@ impl ExampleInstance { let lsp = if let Some(language_server) = &meta.language_server { // Open a file that matches the language to cause LSP to start. - let language_file = worktree.read_with(cx, |worktree, _cx| { - worktree - .files(false, 0) - .find_map(|e| { - if e.path.clone().extension() - == Some(&language_server.file_extension) - { - Some(ProjectPath { - worktree_id: worktree.id(), - path: e.path.clone(), - }) - } else { - None - } - }) - .context("Failed to find a file for example language") - })??; + let language_file = worktree + .read_with(cx, |worktree, _cx| { + worktree + .files(false, 0) + .find_map(|e| { + if e.path.clone().extension() + == Some(&language_server.file_extension) + { + Some(ProjectPath { + worktree_id: worktree.id(), + path: e.path.clone(), + }) + } else { + None + } + }) + .context("Failed to find a file for example language") + })?; let open_language_file_buffer_task = project.update(cx, |project, cx| { project.open_buffer(language_file.clone(), cx) - })?; + }); let language_file_buffer = open_language_file_buffer_task.await?; let lsp_open_handle = project.update(cx, |project, cx| { project.register_buffer_with_language_servers(&language_file_buffer, cx) - })?; + }); wait_for_lang_server(&project, &language_file_buffer, this.log_prefix.clone(), cx).await?; diagnostic_summary_before = project.read_with(cx, |project, cx| { - project.diagnostic_summary(false, cx) - })?; + project.diagnostic_summary(false, cx) + }); diagnostics_before = query_lsp_diagnostics(project.clone(), cx).await?; if diagnostics_before.is_some() && language_server.allow_preexisting_diagnostics { @@ -337,7 +338,7 @@ impl ExampleInstance { }); thread - }).unwrap(); + }); let mut example_cx = ExampleContext::new( meta.clone(), @@ -371,13 +372,13 @@ impl ExampleInstance { .update(|cx| { let project = project.clone(); cx.spawn(async move |cx| query_lsp_diagnostics(project, cx).await) - })? + }) .await?; println!("{}Got diagnostics", this.log_prefix); diagnostic_summary_after = project.read_with(cx, |project, cx| { - project.diagnostic_summary(false, cx) - })?; + project.diagnostic_summary(false, cx) + }); } @@ -389,7 +390,7 @@ impl ExampleInstance { fs::write(this.run_directory.join("diagnostics_after.txt"), diagnostics_after)?; } - thread.update(cx, |thread, _cx| { + Ok(thread.update(cx, |thread, _cx| { RunOutput { repository_diff, diagnostic_summary_before, @@ -401,7 +402,7 @@ impl ExampleInstance { thread_markdown: thread.to_markdown(), programmatic_assertions: example_cx.assertions, } - }) + })) }) } @@ -614,17 +615,19 @@ struct EvalTerminalHandle { impl agent::TerminalHandle for EvalTerminalHandle { fn id(&self, cx: &AsyncApp) -> Result { - self.terminal.read_with(cx, |term, _cx| term.id().clone()) + Ok(self.terminal.read_with(cx, |term, _cx| term.id().clone())) } fn wait_for_exit(&self, cx: &AsyncApp) -> Result>> { - self.terminal - .read_with(cx, |term, _cx| term.wait_for_exit()) + Ok(self + .terminal + .read_with(cx, |term, _cx| term.wait_for_exit())) } fn current_output(&self, cx: &AsyncApp) -> Result { - self.terminal - .read_with(cx, |term, cx| term.current_output(cx)) + Ok(self + .terminal + .read_with(cx, |term, cx| term.current_output(cx))) } fn kill(&self, cx: &AsyncApp) -> Result<()> { @@ -632,13 +635,14 @@ impl agent::TerminalHandle for EvalTerminalHandle { self.terminal.update(cx, |terminal, cx| { terminal.kill(cx); }); - })?; + }); Ok(()) } fn was_stopped_by_user(&self, cx: &AsyncApp) -> Result { - self.terminal - .read_with(cx, |term, _cx| term.was_stopped_by_user()) + Ok(self + .terminal + .read_with(cx, |term, _cx| term.was_stopped_by_user())) } } @@ -653,7 +657,7 @@ impl agent::ThreadEnvironment for EvalThreadEnvironment { let project = self.project.clone(); cx.spawn(async move |cx| { let language_registry = - project.read_with(cx, |project, _cx| project.languages().clone())?; + project.read_with(cx, |project, _cx| project.languages().clone()); let id = acp::TerminalId::new(uuid::Uuid::new_v4().to_string()); let terminal = acp_thread::create_terminal_entity(command, &[], vec![], cwd.clone(), &project, cx) @@ -668,7 +672,7 @@ impl agent::ThreadEnvironment for EvalThreadEnvironment { language_registry, cx, ) - })?; + }); Ok(Rc::new(EvalTerminalHandle { terminal }) as Rc) }) } @@ -899,25 +903,20 @@ pub fn wait_for_lang_server( let (mut tx, mut rx) = mpsc::channel(1); - let lsp_store = project - .read_with(cx, |project, _| project.lsp_store()) - .unwrap(); - - let has_lang_server = buffer - .update(cx, |buffer, cx| { - lsp_store.update(cx, |lsp_store, cx| { - lsp_store - .running_language_servers_for_local_buffer(buffer, cx) - .next() - .is_some() - }) + let lsp_store = project.read_with(cx, |project, _| project.lsp_store()); + + let has_lang_server = buffer.update(cx, |buffer, cx| { + lsp_store.update(cx, |lsp_store, cx| { + lsp_store + .running_language_servers_for_local_buffer(buffer, cx) + .next() + .is_some() }) - .unwrap_or(false); + }); if has_lang_server { project .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx)) - .unwrap() .detach(); } @@ -984,7 +983,7 @@ pub async fn query_lsp_diagnostics( .filter(|(_, _, summary)| summary.error_count > 0 || summary.warning_count > 0) .map(|(project_path, _, _)| project_path) .collect::>() - })?; + }); if paths_with_diagnostics.is_empty() { return Ok(None); @@ -993,9 +992,9 @@ pub async fn query_lsp_diagnostics( let mut output = String::new(); for project_path in paths_with_diagnostics { let buffer = project - .update(cx, |project, cx| project.open_buffer(project_path, cx))? + .update(cx, |project, cx| project.open_buffer(project_path, cx)) .await?; - let snapshot = buffer.read_with(cx, |buffer, _cx| buffer.snapshot())?; + let snapshot = buffer.read_with(cx, |buffer, _cx| buffer.snapshot()); for (_, group) in snapshot.diagnostic_groups(None) { let entry = &group.entries[group.primary_ix]; diff --git a/crates/extension_host/src/headless_host.rs b/crates/extension_host/src/headless_host.rs index c3a290a55a8f901553d6d2b2542d8af8bcd1665c..290dbb6fd40fc3c15dcb210c767b9102b7117544 100644 --- a/crates/extension_host/src/headless_host.rs +++ b/crates/extension_host/src/headless_host.rs @@ -245,8 +245,7 @@ impl HeadlessExtensionStore { cx, )); } - }) - .ok(); + }); let _ = join_all(removal_tasks).await; fs.remove_dir( @@ -304,7 +303,7 @@ impl HeadlessExtensionStore { let missing_extensions = extension_store .update(&mut cx, |extension_store, cx| { extension_store.sync_extensions(requested_extensions.collect(), cx) - })? + }) .await?; Ok(proto::SyncExtensionsResponse { @@ -343,7 +342,7 @@ impl HeadlessExtensionStore { PathBuf::from(envelope.payload.tmp_dir), cx, ) - })? + }) .await?; Ok(proto::Ack {}) diff --git a/crates/extension_host/src/wasm_host.rs b/crates/extension_host/src/wasm_host.rs index 0d041c64f4c88735a2bcdf275f005c38aa37e49e..6499dcf2f7a726339b19c962c3db367ec67a987f 100644 --- a/crates/extension_host/src/wasm_host.rs +++ b/crates/extension_host/src/wasm_host.rs @@ -681,11 +681,11 @@ impl WasmHost { // Run wasi-dependent operations on tokio. // wasmtime_wasi internally uses tokio for I/O operations. let (extension_task, manifest, work_dir, tx, zed_api_version) = - gpui_tokio::Tokio::spawn(cx, load_extension(zed_api_version, component))?.await??; + gpui_tokio::Tokio::spawn(cx, load_extension(zed_api_version, component)).await??; // Run the extension message loop on tokio since extension // calls may invoke wasi functions that require a tokio runtime. - let task = Arc::new(gpui_tokio::Tokio::spawn(cx, extension_task)?); + let task = Arc::new(gpui_tokio::Tokio::spawn(cx, extension_task)); Ok(WasmExtension { manifest, diff --git a/crates/extension_host/src/wasm_host/wit/since_v0_1_0.rs b/crates/extension_host/src/wasm_host/wit/since_v0_1_0.rs index a7a20f6dc7f1dbedddf34a13032887adf5b61a6e..708583410a3f0b3f6949d08a28d82722ec275f11 100644 --- a/crates/extension_host/src/wasm_host/wit/since_v0_1_0.rs +++ b/crates/extension_host/src/wasm_host/wit/since_v0_1_0.rs @@ -471,7 +471,7 @@ impl ExtensionImports for WasmState { } .boxed_local() }) - .await? + .await .to_wasmtime_result() } diff --git a/crates/extension_host/src/wasm_host/wit/since_v0_8_0.rs b/crates/extension_host/src/wasm_host/wit/since_v0_8_0.rs index 8b3f8e86b71e959eade1e5d3710ce66b5b2d3008..faa2d7395042bd05f8e47df2288094eb0bb7a53a 100644 --- a/crates/extension_host/src/wasm_host/wit/since_v0_8_0.rs +++ b/crates/extension_host/src/wasm_host/wit/since_v0_8_0.rs @@ -1004,7 +1004,7 @@ impl ExtensionImports for WasmState { } .boxed_local() }) - .await? + .await .to_wasmtime_result() } diff --git a/crates/extensions_ui/src/extensions_ui.rs b/crates/extensions_ui/src/extensions_ui.rs index 3dd4803ce17adb053f86f29e3724d58d479136c6..79ac0653b656f5ed73ae457bf8e72e0d3f4fbbe8 100644 --- a/crates/extensions_ui/src/extensions_ui.rs +++ b/crates/extensions_ui/src/extensions_ui.rs @@ -13,7 +13,7 @@ use editor::{Editor, EditorElement, EditorStyle}; use extension_host::{ExtensionManifest, ExtensionOperation, ExtensionStore}; use fuzzy::{StringMatchCandidate, match_strings}; use gpui::{ - Action, App, ClipboardItem, Context, Corner, Entity, EventEmitter, Flatten, Focusable, + Action, App, ClipboardItem, Context, Corner, Entity, EventEmitter, Focusable, InteractiveElement, KeyContext, ParentElement, Point, Render, Styled, Task, TextStyle, UniformListScrollHandle, WeakEntity, Window, actions, point, uniform_list, }; @@ -131,25 +131,22 @@ pub fn init(cx: &mut App) { let workspace_handle = cx.entity().downgrade(); window .spawn(cx, async move |cx| { - let extension_path = - match Flatten::flatten(prompt.await.map_err(|e| e.into())) { - Ok(Some(mut paths)) => paths.pop()?, - Ok(None) => return None, - Err(err) => { - workspace_handle - .update(cx, |workspace, cx| { - workspace.show_portal_error(err.to_string(), cx); - }) - .ok(); - return None; - } - }; + let extension_path = match prompt.await.map_err(anyhow::Error::from) { + Ok(Some(mut paths)) => paths.pop()?, + Ok(None) => return None, + Err(err) => { + workspace_handle + .update(cx, |workspace, cx| { + workspace.show_portal_error(err.to_string(), cx); + }) + .ok(); + return None; + } + }; - let install_task = store - .update(cx, |store, cx| { - store.install_dev_extension(extension_path, cx) - }) - .ok()?; + let install_task = store.update(cx, |store, cx| { + store.install_dev_extension(extension_path, cx) + }); match install_task.await { Ok(_) => {} diff --git a/crates/file_finder/src/file_finder.rs b/crates/file_finder/src/file_finder.rs index 1bfd41fa2709e4c46b5177bee6851d91dc86bccb..ef81ccb8c48b1a496df923f0fcd12b0375084ac1 100644 --- a/crates/file_finder/src/file_finder.rs +++ b/crates/file_finder/src/file_finder.rs @@ -1225,35 +1225,27 @@ impl FileFinderDelegate { let query_path = Path::new(query.path_query()); let mut path_matches = Vec::new(); - let abs_file_exists = if let Ok(task) = project.update(cx, |this, cx| { - this.resolve_abs_file_path(query.path_query(), cx) - }) { - task.await.is_some() - } else { - false - }; + let abs_file_exists = project + .update(cx, |this, cx| { + this.resolve_abs_file_path(query.path_query(), cx) + }) + .await + .is_some(); if abs_file_exists { - let update_result = project - .update(cx, |project, cx| { - if let Some((worktree, relative_path)) = - project.find_worktree(query_path, cx) - { - path_matches.push(ProjectPanelOrdMatch(PathMatch { - score: 1.0, - positions: Vec::new(), - worktree_id: worktree.read(cx).id().to_usize(), - path: relative_path, - path_prefix: RelPath::empty().into(), - is_dir: false, // File finder doesn't support directories - distance_to_relative_ancestor: usize::MAX, - })); - } - }) - .log_err(); - if update_result.is_none() { - return abs_file_exists; - } + project.update(cx, |project, cx| { + if let Some((worktree, relative_path)) = project.find_worktree(query_path, cx) { + path_matches.push(ProjectPanelOrdMatch(PathMatch { + score: 1.0, + positions: Vec::new(), + worktree_id: worktree.read(cx).id().to_usize(), + path: relative_path, + path_prefix: RelPath::empty().into(), + is_dir: false, // File finder doesn't support directories + distance_to_relative_ancestor: usize::MAX, + })); + } + }); } picker diff --git a/crates/git/src/repository.rs b/crates/git/src/repository.rs index ca0a0b24d4e33f6e3f977a4451d9dae5ecbddb92..63434612957e028f7fc92c526cd5c9672d771853 100644 --- a/crates/git/src/repository.rs +++ b/crates/git/src/repository.rs @@ -744,8 +744,6 @@ pub async fn get_git_committer(cx: &AsyncApp) -> GitCommitter { .context("could not find git binary path") .log_err() }) - .ok() - .flatten() } else { None }; diff --git a/crates/git_ui/src/branch_picker.rs b/crates/git_ui/src/branch_picker.rs index a23d116f4371362ac1e809be844f207d2b496edc..7d8e63081ba56c4b3814c4cdde9539491d5b6971 100644 --- a/crates/git_ui/src/branch_picker.rs +++ b/crates/git_ui/src/branch_picker.rs @@ -397,7 +397,7 @@ impl BranchListDelegate { cx.spawn(async move |_, cx| { repo.update(cx, |repo, _| { repo.create_branch(new_branch_name, base_branch) - })? + }) .await??; Ok(()) @@ -444,11 +444,11 @@ impl BranchListDelegate { Entry::Branch { branch, .. } => match branch.remote_name() { Some(remote_name) => { is_remote = true; - repo.update(cx, |repo, _| repo.remove_remote(remote_name.to_string()))? + repo.update(cx, |repo, _| repo.remove_remote(remote_name.to_string())) .await? } None => { - repo.update(cx, |repo, _| repo.delete_branch(branch.name().to_string()))? + repo.update(cx, |repo, _| repo.delete_branch(branch.name().to_string())) .await? } }, @@ -763,7 +763,7 @@ impl PickerDelegate for BranchListDelegate { let branch = branch.clone(); cx.spawn(async move |_, cx| { - repo.update(cx, |repo, _| repo.change_branch(branch.name().to_string()))? + repo.update(cx, |repo, _| repo.change_branch(branch.name().to_string())) .await??; anyhow::Ok(()) @@ -1433,7 +1433,6 @@ mod tests { cx.spawn(async move |mut cx| { for branch in branch_names { repo.update(&mut cx, |repo, _| repo.create_branch(branch, None)) - .unwrap() .await .unwrap() .unwrap(); @@ -1498,7 +1497,6 @@ mod tests { repo.update(&mut cx, |repo, _| { repo.create_remote(branch, String::from("test")) }) - .unwrap() .await .unwrap() .unwrap(); diff --git a/crates/git_ui/src/commit_view.rs b/crates/git_ui/src/commit_view.rs index 51c8403a0a41698053075d6f6f2cb6eed05fc72c..77d9519602006dbc7ef3ac78ddbd55db7fb34e25 100644 --- a/crates/git_ui/src/commit_view.rs +++ b/crates/git_ui/src/commit_view.rs @@ -236,7 +236,7 @@ impl CommitView { .repo_path_to_project_path(&file.path, cx) .map(|path| path.worktree_id) .or(first_worktree_id) - })? + }) .context("project has no worktrees")?; let short_sha = commit_sha.get(0..7).unwrap_or(&commit_sha); let file_name = file @@ -555,7 +555,7 @@ impl CommitView { return Err(anyhow::anyhow!("Stash has changed, not applying")); } Ok(repo.stash_apply(Some(stash), cx)) - })?; + }); match result { Ok(task) => task.await?, @@ -582,7 +582,7 @@ impl CommitView { return Err(anyhow::anyhow!("Stash has changed, pop aborted")); } Ok(repo.stash_pop(Some(stash), cx)) - })?; + }); match result { Ok(task) => task.await?, @@ -609,7 +609,7 @@ impl CommitView { return Err(anyhow::anyhow!("Stash has changed, drop aborted")); } Ok(repo.stash_drop(Some(stash), cx)) - })?; + }); match result { Ok(task) => task.await??, @@ -673,7 +673,7 @@ impl CommitView { workspace .panel::(cx) .and_then(|p| p.read(cx).active_repository.clone()) - })?; + }); let Some(repo) = repo else { return Ok(()); @@ -752,7 +752,7 @@ async fn build_buffer( let line_ending = LineEnding::detect(&text); LineEnding::normalize(&mut text); let text = Rope::from(text); - let language = cx.update(|cx| language_registry.language_for_file(&blob, Some(&text), cx))?; + let language = cx.update(|cx| language_registry.language_for_file(&blob, Some(&text), cx)); let language = if let Some(language) = language { language_registry .load_language(&language) @@ -772,7 +772,7 @@ async fn build_buffer( let mut buffer = Buffer::build(buffer, Some(blob), Capability::ReadWrite); buffer.set_language_async(language, cx); buffer - })?; + }); Ok(buffer) } @@ -786,10 +786,10 @@ async fn build_buffer_diff( LineEnding::normalize(old_text); } - let language = cx.update(|cx| buffer.read(cx).language().cloned())?; - let buffer = cx.update(|cx| buffer.read(cx).snapshot())?; + let language = cx.update(|cx| buffer.read(cx).language().cloned()); + let buffer = cx.update(|cx| buffer.read(cx).snapshot()); - let diff = cx.new(|cx| BufferDiff::new(&buffer.text, cx))?; + let diff = cx.new(|cx| BufferDiff::new(&buffer.text, cx)); let update = diff .update(cx, |diff, cx| { @@ -800,13 +800,13 @@ async fn build_buffer_diff( language.clone(), cx, ) - })? + }) .await; diff.update(cx, |diff, cx| { diff.language_changed(language, Some(language_registry.clone()), cx); diff.set_snapshot(update, &buffer.text, cx) - })? + }) .await; Ok(diff) diff --git a/crates/git_ui/src/conflict_view.rs b/crates/git_ui/src/conflict_view.rs index 813e63ab8c96e736cf0cc126526a683b418c2137..6a7432f838dd2711c2f0bc70d0bf6dd41f3da367 100644 --- a/crates/git_ui/src/conflict_view.rs +++ b/crates/git_ui/src/conflict_view.rs @@ -487,24 +487,16 @@ pub(crate) fn resolve_conflict( else { return; }; - let Some(save) = project - .update(cx, |project, cx| { - if multibuffer.read(cx).all_diff_hunks_expanded() { - project.save_buffer(buffer.clone(), cx) - } else { - Task::ready(Ok(())) - } - }) - .ok() - else { - return; - }; + let save = project.update(cx, |project, cx| { + if multibuffer.read(cx).all_diff_hunks_expanded() { + project.save_buffer(buffer.clone(), cx) + } else { + Task::ready(Ok(())) + } + }); if save.await.log_err().is_none() { let open_path = maybe!({ - let path = buffer - .read_with(cx, |buffer, cx| buffer.project_path(cx)) - .ok() - .flatten()?; + let path = buffer.read_with(cx, |buffer, cx| buffer.project_path(cx))?; workspace .update_in(cx, |workspace, window, cx| { workspace.open_path_preview(path, None, false, false, false, window, cx) diff --git a/crates/git_ui/src/file_diff_view.rs b/crates/git_ui/src/file_diff_view.rs index a07abd832cbf25f48cdff25750bfa20c40626474..048aa82cb58b04dee88df81f425583b129d52b75 100644 --- a/crates/git_ui/src/file_diff_view.rs +++ b/crates/git_ui/src/file_diff_view.rs @@ -47,12 +47,12 @@ impl FileDiffView { window.spawn(cx, async move |cx| { let project = workspace.update(cx, |workspace, _| workspace.project().clone())?; let old_buffer = project - .update(cx, |project, cx| project.open_local_buffer(&old_path, cx))? + .update(cx, |project, cx| project.open_local_buffer(&old_path, cx)) .await?; let new_buffer = project - .update(cx, |project, cx| project.open_local_buffer(&new_path, cx))? + .update(cx, |project, cx| project.open_local_buffer(&new_path, cx)) .await?; - let languages = project.update(cx, |project, _| project.languages().clone())?; + let languages = project.update(cx, |project, _| project.languages().clone()); let buffer_diff = build_buffer_diff(&old_buffer, &new_buffer, languages, cx).await?; @@ -151,7 +151,7 @@ impl FileDiffView { new_snapshot.text.clone(), cx, ) - })? + }) .await .ok(); log::trace!("finish recalculating"); @@ -168,10 +168,10 @@ async fn build_buffer_diff( language_registry: Arc, cx: &mut AsyncApp, ) -> Result> { - let old_buffer_snapshot = old_buffer.read_with(cx, |buffer, _| buffer.snapshot())?; - let new_buffer_snapshot = new_buffer.read_with(cx, |buffer, _| buffer.snapshot())?; + let old_buffer_snapshot = old_buffer.read_with(cx, |buffer, _| buffer.snapshot()); + let new_buffer_snapshot = new_buffer.read_with(cx, |buffer, _| buffer.snapshot()); - let diff = cx.new(|cx| BufferDiff::new(&new_buffer_snapshot.text, cx))?; + let diff = cx.new(|cx| BufferDiff::new(&new_buffer_snapshot.text, cx)); let update = diff .update(cx, |diff, cx| { @@ -182,7 +182,7 @@ async fn build_buffer_diff( new_buffer_snapshot.language().cloned(), cx, ) - })? + }) .await; diff.update(cx, |diff, cx| { @@ -192,7 +192,7 @@ async fn build_buffer_diff( cx, ); diff.set_snapshot(update, &new_buffer_snapshot.text, cx) - })? + }) .await; Ok(diff) diff --git a/crates/git_ui/src/git_panel.rs b/crates/git_ui/src/git_panel.rs index 41ace67b4df001551b07c89a3c2b99c18d6859de..5f98b547e1eee58df24fb3ce4f2d852abf0d8c70 100644 --- a/crates/git_ui/src/git_panel.rs +++ b/crates/git_ui/src/git_panel.rs @@ -1280,7 +1280,7 @@ impl GitPanel { .ok_or_else(|| anyhow::anyhow!("Failed to open file"))?; if let Some(active_editor) = item.downcast::() { if let Some(diff_task) = - active_editor.update(cx, |editor, _cx| editor.wait_for_diff_to_load())? + active_editor.update(cx, |editor, _cx| editor.wait_for_diff_to_load()) { diff_task.await; } @@ -1298,7 +1298,8 @@ impl GitPanel { cx, ); }) - })?; + }) + .log_err(); } anyhow::Ok(()) @@ -1385,7 +1386,7 @@ impl GitPanel { let gitignore_abs_path = repo_root.join(".gitignore"); - let buffer = project + let buffer: Entity = project .update(cx, |project, cx| { project.open_local_buffer(gitignore_abs_path, cx) })? @@ -1397,7 +1398,7 @@ impl GitPanel { if existing_content .lines() - .any(|line| line.trim() == file_path_str) + .any(|line: &str| line.trim() == file_path_str) { return; } @@ -1413,7 +1414,7 @@ impl GitPanel { buffer.edit([(insert_position..insert_position, new_entry)], None, cx); should_save = true; - })?; + }); if should_save { project @@ -1935,7 +1936,7 @@ impl GitPanel { cx.spawn({ async move |this, cx| { let stash_task = active_repository - .update(cx, |repo, cx| repo.stash_pop(None, cx))? + .update(cx, |repo, cx| repo.stash_pop(None, cx)) .await; this.update(cx, |this, cx| { stash_task @@ -1958,7 +1959,7 @@ impl GitPanel { cx.spawn({ async move |this, cx| { let stash_task = active_repository - .update(cx, |repo, cx| repo.stash_apply(None, cx))? + .update(cx, |repo, cx| repo.stash_apply(None, cx)) .await; this.update(cx, |this, cx| { stash_task @@ -1981,7 +1982,7 @@ impl GitPanel { cx.spawn({ async move |this, cx| { let stash_task = active_repository - .update(cx, |repo, cx| repo.stash_all(cx))? + .update(cx, |repo, cx| repo.stash_all(cx)) .await; this.update(cx, |this, cx| { stash_task @@ -2278,7 +2279,7 @@ impl GitPanel { stage_task.await?; let commit_task = active_repository.update(cx, |repo, cx| { repo.commit(message.into(), None, options, askpass, cx) - })?; + }); commit_task.await? }) }; @@ -2322,7 +2323,7 @@ impl GitPanel { repo.update(cx, |repo, cx| { repo.reset("HEAD^".to_string(), ResetMode::Soft, cx) - })? + }) .await??; Ok(Some(prior_head)) @@ -2362,7 +2363,7 @@ impl GitPanel { let repo = repo.context("No active repository")?; let pushed_to: Vec = repo - .update(&mut cx, |repo, _| repo.check_for_pushed_commits())? + .update(&mut cx, |repo, _| repo.check_for_pushed_commits()) .await??; if pushed_to.is_empty() { @@ -2533,41 +2534,37 @@ impl GitPanel { repo_work_dir: &Arc, cx: &mut AsyncApp, ) -> Option { - let rules_path = cx - .update(|cx| { - for worktree in project.read(cx).worktrees(cx) { - let worktree_abs_path = worktree.read(cx).abs_path(); - if !worktree_abs_path.starts_with(&repo_work_dir) { - continue; - } + let rules_path = cx.update(|cx| { + for worktree in project.read(cx).worktrees(cx) { + let worktree_abs_path = worktree.read(cx).abs_path(); + if !worktree_abs_path.starts_with(&repo_work_dir) { + continue; + } - let worktree_snapshot = worktree.read(cx).snapshot(); - for rules_name in RULES_FILE_NAMES { - if let Ok(rel_path) = RelPath::unix(rules_name) { - if let Some(entry) = worktree_snapshot.entry_for_path(rel_path) { - if entry.is_file() { - return Some(ProjectPath { - worktree_id: worktree.read(cx).id(), - path: entry.path.clone(), - }); - } + let worktree_snapshot = worktree.read(cx).snapshot(); + for rules_name in RULES_FILE_NAMES { + if let Ok(rel_path) = RelPath::unix(rules_name) { + if let Some(entry) = worktree_snapshot.entry_for_path(rel_path) { + if entry.is_file() { + return Some(ProjectPath { + worktree_id: worktree.read(cx).id(), + path: entry.path.clone(), + }); } } } } - None - }) - .ok()??; + } + None + })?; let buffer = project .update(cx, |project, cx| project.open_buffer(rules_path, cx)) - .ok()? .await .ok()?; let content = buffer .read_with(cx, |buffer, _| buffer.text()) - .ok()? .trim() .to_string(); @@ -2591,12 +2588,11 @@ impl GitPanel { } let load = async { - let store = cx.update(|cx| PromptStore::global(cx)).ok()?.await.ok()?; + let store = cx.update(|cx| PromptStore::global(cx)).await.ok()?; store .update(cx, |s, cx| { s.load(PromptId::BuiltIn(BuiltInPrompt::CommitMessage), cx) }) - .ok()? .await .ok() }; @@ -2653,9 +2649,9 @@ impl GitPanel { } else { None } - })? { + }) { task.await.log_err(); - }; + } let mut diff_text = match diff.await { Ok(result) => match result { @@ -2772,7 +2768,6 @@ impl GitPanel { let repo = repo?; let remotes = repo .update(cx, |repo, _| repo.get_remotes(None, false)) - .ok()? .await .ok()? .log_err()?; @@ -2827,7 +2822,7 @@ impl GitPanel { }; let fetch = repo.update(cx, |repo, cx| { repo.fetch(fetch_options.clone(), askpass, cx) - })?; + }); let remote_message = fetch.await?; this.update(cx, |this, cx| { @@ -2983,7 +2978,7 @@ impl GitPanel { let pull = repo.update(cx, |repo, cx| { repo.pull(branch_name, remote.name.clone(), rebase, askpass, cx) - })?; + }); let remote_message = pull.await?; @@ -3069,7 +3064,7 @@ impl GitPanel { askpass_delegate, cx, ) - })?; + }); let remote_output = push.await?; @@ -3199,7 +3194,7 @@ impl GitPanel { Some(current_branch.name().to_string()) }; anyhow::Ok(repo.get_remotes(current_branch, is_push)) - })?? + })? .await??; let current_remotes: Vec<_> = current_remotes @@ -5630,7 +5625,7 @@ impl GitPanelMessageTooltip { git_panel.load_commit_details(sha.to_string(), cx), git_panel.workspace.clone(), ) - })?; + }); let details = details.await?; let provider_registry = cx .update(|_, app| GitHostingProviderRegistry::default_global(app)) diff --git a/crates/git_ui/src/git_ui.rs b/crates/git_ui/src/git_ui.rs index d414d53283be4440b0ef892f1f861a3e39c9424c..5cb99689e0c23304ec21c1b4fe935ac543299a75 100644 --- a/crates/git_ui/src/git_ui.rs +++ b/crates/git_ui/src/git_ui.rs @@ -354,7 +354,7 @@ impl RenameBranchModal { match repo .update(cx, |repo, _| { repo.rename_branch(current_branch, new_name.clone()) - })? + }) .await { Ok(Ok(_)) => Ok(()), diff --git a/crates/git_ui/src/stash_picker.rs b/crates/git_ui/src/stash_picker.rs index 6d0a9d291e4a8c7096c525b9b401e54e599b0b53..a688c250a63e09deab03677efda92e34a38aebc4 100644 --- a/crates/git_ui/src/stash_picker.rs +++ b/crates/git_ui/src/stash_picker.rs @@ -246,7 +246,7 @@ impl StashListDelegate { }; cx.spawn(async move |_, cx| { - repo.update(cx, |repo, cx| repo.stash_drop(Some(stash_index), cx))? + repo.update(cx, |repo, cx| repo.stash_drop(Some(stash_index), cx)) .await??; Ok(()) }) @@ -281,7 +281,7 @@ impl StashListDelegate { }; cx.spawn(async move |_, cx| { - repo.update(cx, |repo, cx| repo.stash_pop(Some(stash_index), cx))? + repo.update(cx, |repo, cx| repo.stash_pop(Some(stash_index), cx)) .await?; Ok(()) }) @@ -297,7 +297,7 @@ impl StashListDelegate { }; cx.spawn(async move |_, cx| { - repo.update(cx, |repo, cx| repo.stash_apply(Some(stash_index), cx))? + repo.update(cx, |repo, cx| repo.stash_apply(Some(stash_index), cx)) .await?; Ok(()) }) diff --git a/crates/git_ui/src/text_diff_view.rs b/crates/git_ui/src/text_diff_view.rs index 1f80e2ace7fc67d1ffcefe220ac95a94b04b088f..e76c1a3fbc656cff07b62681a1a2e09d291a13f8 100644 --- a/crates/git_ui/src/text_diff_view.rs +++ b/crates/git_ui/src/text_diff_view.rs @@ -256,10 +256,10 @@ async fn update_diff_buffer( clipboard_buffer: &Entity, cx: &mut AsyncApp, ) -> Result<()> { - let source_buffer_snapshot = source_buffer.read_with(cx, |buffer, _| buffer.snapshot())?; + let source_buffer_snapshot = source_buffer.read_with(cx, |buffer, _| buffer.snapshot()); let language = source_buffer_snapshot.language().cloned(); - let base_buffer_snapshot = clipboard_buffer.read_with(cx, |buffer, _| buffer.snapshot())?; + let base_buffer_snapshot = clipboard_buffer.read_with(cx, |buffer, _| buffer.snapshot()); let base_text = base_buffer_snapshot.text(); let update = diff @@ -271,12 +271,12 @@ async fn update_diff_buffer( language, cx, ) - })? + }) .await; diff.update(cx, |diff, cx| { diff.set_snapshot(update, &source_buffer_snapshot.text, cx) - })? + }) .await; Ok(()) } diff --git a/crates/git_ui/src/worktree_picker.rs b/crates/git_ui/src/worktree_picker.rs index 40ec67b3b9b92268a56acc892f8cbfb46e6209ec..94c37378cca301f84abcaf3ccc12b238bdfb8f37 100644 --- a/crates/git_ui/src/worktree_picker.rs +++ b/crates/git_ui/src/worktree_picker.rs @@ -260,7 +260,7 @@ impl WorktreeListDelegate { repo.update(cx, |repo, _| { repo.create_worktree(branch.clone(), path.clone(), commit) - })? + }) .await??; let new_worktree_path = path.join(branch); @@ -444,7 +444,7 @@ async fn open_remote_worktree( return Ok(()); }; - let new_project = cx.update(|cx| { + let new_project: Entity = cx.update(|cx| { project::Project::remote( session, app_state.client.clone(), @@ -455,7 +455,7 @@ async fn open_remote_worktree( true, cx, ) - })?; + }); let window_to_use = if replace_current_window { workspace_window @@ -463,12 +463,12 @@ async fn open_remote_worktree( let workspace_position = cx .update(|cx| { workspace::remote_workspace_position_from_db(connection_options.clone(), &paths, cx) - })? + }) .await .context("fetching workspace position from db")?; let mut options = - cx.update(|cx| (app_state.build_window_options)(workspace_position.display, cx))?; + cx.update(|cx| (app_state.build_window_options)(workspace_position.display, cx)); options.window_bounds = workspace_position.window_bounds; cx.open_window(options, |window, cx| { diff --git a/crates/gpui/src/app.rs b/crates/gpui/src/app.rs index c021ba499720393beb4bd33d53a41f124712d4d3..74f697bbeb7dd2b59cba3e46af97a66ea4457baf 100644 --- a/crates/gpui/src/app.rs +++ b/crates/gpui/src/app.rs @@ -2235,8 +2235,6 @@ impl App { } impl AppContext for App { - type Result = T; - /// Builds an entity that is owned by the application. /// /// The given function will be invoked with a [`Context`] and must return an object representing the entity. An @@ -2258,7 +2256,7 @@ impl AppContext for App { }) } - fn reserve_entity(&mut self) -> Self::Result> { + fn reserve_entity(&mut self) -> Reservation { Reservation(self.entities.reserve()) } @@ -2266,7 +2264,7 @@ impl AppContext for App { &mut self, reservation: Reservation, build_entity: impl FnOnce(&mut Context) -> T, - ) -> Self::Result> { + ) -> Entity { self.update(|cx| { let slot = reservation.0; let entity = build_entity(&mut Context::new_context(cx, slot.downgrade())); @@ -2299,11 +2297,7 @@ impl AppContext for App { GpuiBorrow::new(handle.clone(), self) } - fn read_entity( - &self, - handle: &Entity, - read: impl FnOnce(&T, &App) -> R, - ) -> Self::Result + fn read_entity(&self, handle: &Entity, read: impl FnOnce(&T, &App) -> R) -> R where T: 'static, { @@ -2348,7 +2342,7 @@ impl AppContext for App { self.background_executor.spawn(future) } - fn read_global(&self, callback: impl FnOnce(&G, &App) -> R) -> Self::Result + fn read_global(&self, callback: impl FnOnce(&G, &App) -> R) -> R where G: Global, { diff --git a/crates/gpui/src/app/async_context.rs b/crates/gpui/src/app/async_context.rs index 805dfced162cd27f0cc785a8282ae3b802c2873a..ed202f6b86b6edd812d50f7fa975fbd5def9ae62 100644 --- a/crates/gpui/src/app/async_context.rs +++ b/crates/gpui/src/app/async_context.rs @@ -1,9 +1,10 @@ use crate::{ AnyView, AnyWindowHandle, App, AppCell, AppContext, BackgroundExecutor, BorrowAppContext, - Entity, EventEmitter, Focusable, ForegroundExecutor, Global, PromptButton, PromptLevel, Render, - Reservation, Result, Subscription, Task, VisualContext, Window, WindowHandle, + Entity, EventEmitter, Focusable, ForegroundExecutor, Global, GpuiBorrow, PromptButton, + PromptLevel, Render, Reservation, Result, Subscription, Task, VisualContext, Window, + WindowHandle, }; -use anyhow::{Context as _, anyhow}; +use anyhow::Context as _; use derive_more::{Deref, DerefMut}; use futures::channel::oneshot; use std::{future::Future, rc::Weak}; @@ -12,7 +13,10 @@ use super::{Context, WeakEntity}; /// An async-friendly version of [App] with a static lifetime so it can be held across `await` points in async code. /// You're provided with an instance when calling [App::spawn], and you can also create one with [App::to_async]. -/// Internally, this holds a weak reference to an `App`, so its methods are fallible to protect against cases where the [App] is dropped. +/// +/// Internally, this holds a weak reference to an `App`. Methods will panic if the app has been dropped, +/// but this should not happen in practice when using foreground tasks spawned via `cx.spawn()`, +/// as the executor checks if the app is alive before running each task. #[derive(Clone)] pub struct AsyncApp { pub(crate) app: Weak, @@ -20,64 +24,61 @@ pub struct AsyncApp { pub(crate) foreground_executor: ForegroundExecutor, } -impl AppContext for AsyncApp { - type Result = Result; +impl AsyncApp { + fn app(&self) -> std::rc::Rc { + self.app + .upgrade() + .expect("app was released before async operation completed") + } +} - fn new( - &mut self, - build_entity: impl FnOnce(&mut Context) -> T, - ) -> Self::Result> { - let app = self.app.upgrade().context("app was released")?; +impl AppContext for AsyncApp { + fn new(&mut self, build_entity: impl FnOnce(&mut Context) -> T) -> Entity { + let app = self.app(); let mut app = app.borrow_mut(); - Ok(app.new(build_entity)) + app.new(build_entity) } - fn reserve_entity(&mut self) -> Result> { - let app = self.app.upgrade().context("app was released")?; + fn reserve_entity(&mut self) -> Reservation { + let app = self.app(); let mut app = app.borrow_mut(); - Ok(app.reserve_entity()) + app.reserve_entity() } fn insert_entity( &mut self, reservation: Reservation, build_entity: impl FnOnce(&mut Context) -> T, - ) -> Result> { - let app = self.app.upgrade().context("app was released")?; + ) -> Entity { + let app = self.app(); let mut app = app.borrow_mut(); - Ok(app.insert_entity(reservation, build_entity)) + app.insert_entity(reservation, build_entity) } fn update_entity( &mut self, handle: &Entity, update: impl FnOnce(&mut T, &mut Context) -> R, - ) -> Self::Result { - let app = self.app.upgrade().context("app was released")?; + ) -> R { + let app = self.app(); let mut app = app.borrow_mut(); - Ok(app.update_entity(handle, update)) + app.update_entity(handle, update) } - fn as_mut<'a, T>(&'a mut self, _handle: &Entity) -> Self::Result> + fn as_mut<'a, T>(&'a mut self, _handle: &Entity) -> GpuiBorrow<'a, T> where T: 'static, { - Err(anyhow!( - "Cannot as_mut with an async context. Try calling update() first" - )) + panic!("Cannot as_mut with an async context. Try calling update() first") } - fn read_entity( - &self, - handle: &Entity, - callback: impl FnOnce(&T, &App) -> R, - ) -> Self::Result + fn read_entity(&self, handle: &Entity, callback: impl FnOnce(&T, &App) -> R) -> R where T: 'static, { - let app = self.app.upgrade().context("app was released")?; + let app = self.app(); let lock = app.borrow(); - Ok(lock.read_entity(handle, callback)) + lock.read_entity(handle, callback) } fn update_window(&mut self, window: AnyWindowHandle, f: F) -> Result @@ -109,23 +110,22 @@ impl AppContext for AsyncApp { self.background_executor.spawn(future) } - fn read_global(&self, callback: impl FnOnce(&G, &App) -> R) -> Self::Result + fn read_global(&self, callback: impl FnOnce(&G, &App) -> R) -> R where G: Global, { - let app = self.app.upgrade().context("app was released")?; + let app = self.app(); let mut lock = app.borrow_mut(); - Ok(lock.update(|this| this.read_global(callback))) + lock.update(|this| this.read_global(callback)) } } impl AsyncApp { /// Schedules all windows in the application to be redrawn. - pub fn refresh(&self) -> Result<()> { - let app = self.app.upgrade().context("app was released")?; + pub fn refresh(&self) { + let app = self.app(); let mut lock = app.borrow_mut(); lock.refresh_windows(); - Ok(()) } /// Get an executor which can be used to spawn futures in the background. @@ -139,10 +139,10 @@ impl AsyncApp { } /// Invoke the given function in the context of the app, then flush any effects produced during its invocation. - pub fn update(&self, f: impl FnOnce(&mut App) -> R) -> Result { - let app = self.app.upgrade().context("app was released")?; + pub fn update(&self, f: impl FnOnce(&mut App) -> R) -> R { + let app = self.app(); let mut lock = app.borrow_mut(); - Ok(lock.update(f)) + lock.update(f) } /// Arrange for the given callback to be invoked whenever the given entity emits an event of a given type. @@ -150,16 +150,15 @@ impl AsyncApp { pub fn subscribe( &mut self, entity: &Entity, - mut on_event: impl FnMut(Entity, &Event, &mut App) + 'static, - ) -> Result + on_event: impl FnMut(Entity, &Event, &mut App) + 'static, + ) -> Subscription where T: 'static + EventEmitter, Event: 'static, { - let app = self.app.upgrade().context("app was released")?; + let app = self.app(); let mut lock = app.borrow_mut(); - let subscription = lock.subscribe(entity, on_event); - Ok(subscription) + lock.subscribe(entity, on_event) } /// Open a window with the given options based on the root view returned by the given function. @@ -171,7 +170,7 @@ impl AsyncApp { where V: 'static + Render, { - let app = self.app.upgrade().context("app was released")?; + let app = self.app(); let mut lock = app.borrow_mut(); lock.open_window(options, build_root_view) } @@ -189,61 +188,50 @@ impl AsyncApp { } /// Determine whether global state of the specified type has been assigned. - /// Returns an error if the `App` has been dropped. - pub fn has_global(&self) -> Result { - let app = self.app.upgrade().context("app was released")?; + pub fn has_global(&self) -> bool { + let app = self.app(); let app = app.borrow_mut(); - Ok(app.has_global::()) + app.has_global::() } /// Reads the global state of the specified type, passing it to the given callback. /// /// Panics if no global state of the specified type has been assigned. - /// Returns an error if the `App` has been dropped. - pub fn read_global(&self, read: impl FnOnce(&G, &App) -> R) -> Result { - let app = self.app.upgrade().context("app was released")?; + pub fn read_global(&self, read: impl FnOnce(&G, &App) -> R) -> R { + let app = self.app(); let app = app.borrow_mut(); - Ok(read(app.global(), &app)) + read(app.global(), &app) } /// Reads the global state of the specified type, passing it to the given callback. /// /// Similar to [`AsyncApp::read_global`], but returns an error instead of panicking - /// if no state of the specified type has been assigned. - /// - /// Returns an error if no state of the specified type has been assigned the `App` has been dropped. pub fn try_read_global(&self, read: impl FnOnce(&G, &App) -> R) -> Option { - let app = self.app.upgrade()?; + let app = self.app(); let app = app.borrow_mut(); Some(read(app.try_global()?, &app)) } /// Reads the global state of the specified type, passing it to the given callback. /// A default value is assigned if a global of this type has not yet been assigned. - /// - /// # Errors - /// If the app has ben dropped this returns an error. - pub fn try_read_default_global( + pub fn read_default_global( &self, read: impl FnOnce(&G, &App) -> R, - ) -> Result { - let app = self.app.upgrade().context("app was released")?; + ) -> R { + let app = self.app(); let mut app = app.borrow_mut(); app.update(|cx| { cx.default_global::(); }); - Ok(read(app.try_global().context("app was released")?, &app)) + read(app.global(), &app) } /// A convenience method for [`App::update_global`](BorrowAppContext::update_global) /// for updating the global state of the specified type. - pub fn update_global( - &self, - update: impl FnOnce(&mut G, &mut App) -> R, - ) -> Result { - let app = self.app.upgrade().context("app was released")?; + pub fn update_global(&self, update: impl FnOnce(&mut G, &mut App) -> R) -> R { + let app = self.app(); let mut app = app.borrow_mut(); - Ok(app.update(|cx| cx.update_global(update))) + app.update(|cx| cx.update_global(update)) } /// Run something using this entity and cx, when the returned struct is dropped @@ -359,54 +347,41 @@ impl AsyncWindowContext { } impl AppContext for AsyncWindowContext { - type Result = Result; - - fn new(&mut self, build_entity: impl FnOnce(&mut Context) -> T) -> Result> + fn new(&mut self, build_entity: impl FnOnce(&mut Context) -> T) -> Entity where T: 'static, { - self.app - .update_window(self.window, |_, _, cx| cx.new(build_entity)) + self.app.new(build_entity) } - fn reserve_entity(&mut self) -> Result> { - self.app - .update_window(self.window, |_, _, cx| cx.reserve_entity()) + fn reserve_entity(&mut self) -> Reservation { + self.app.reserve_entity() } fn insert_entity( &mut self, reservation: Reservation, build_entity: impl FnOnce(&mut Context) -> T, - ) -> Self::Result> { - self.app.update_window(self.window, |_, _, cx| { - cx.insert_entity(reservation, build_entity) - }) + ) -> Entity { + self.app.insert_entity(reservation, build_entity) } fn update_entity( &mut self, handle: &Entity, update: impl FnOnce(&mut T, &mut Context) -> R, - ) -> Result { - self.app - .update_window(self.window, |_, _, cx| cx.update_entity(handle, update)) + ) -> R { + self.app.update_entity(handle, update) } - fn as_mut<'a, T>(&'a mut self, _: &Entity) -> Self::Result> + fn as_mut<'a, T>(&'a mut self, _: &Entity) -> GpuiBorrow<'a, T> where T: 'static, { - Err(anyhow!( - "Cannot use as_mut() from an async context, call `update`" - )) + panic!("Cannot use as_mut() from an async context, call `update`") } - fn read_entity( - &self, - handle: &Entity, - read: impl FnOnce(&T, &App) -> R, - ) -> Self::Result + fn read_entity(&self, handle: &Entity, read: impl FnOnce(&T, &App) -> R) -> R where T: 'static, { @@ -438,7 +413,7 @@ impl AppContext for AsyncWindowContext { self.app.background_executor.spawn(future) } - fn read_global(&self, callback: impl FnOnce(&G, &App) -> R) -> Result + fn read_global(&self, callback: impl FnOnce(&G, &App) -> R) -> R where G: Global, { @@ -447,6 +422,8 @@ impl AppContext for AsyncWindowContext { } impl VisualContext for AsyncWindowContext { + type Result = Result; + fn window_handle(&self) -> AnyWindowHandle { self.window } @@ -454,7 +431,7 @@ impl VisualContext for AsyncWindowContext { fn new_window_entity( &mut self, build_entity: impl FnOnce(&mut Window, &mut Context) -> T, - ) -> Self::Result> { + ) -> Result> { self.app.update_window(self.window, |_, window, cx| { cx.new(|cx| build_entity(window, cx)) }) @@ -464,7 +441,7 @@ impl VisualContext for AsyncWindowContext { &mut self, view: &Entity, update: impl FnOnce(&mut T, &mut Window, &mut Context) -> R, - ) -> Self::Result { + ) -> Result { self.app.update_window(self.window, |_, window, cx| { view.update(cx, |entity, cx| update(entity, window, cx)) }) @@ -473,7 +450,7 @@ impl VisualContext for AsyncWindowContext { fn replace_root_view( &mut self, build_view: impl FnOnce(&mut Window, &mut Context) -> V, - ) -> Self::Result> + ) -> Result> where V: 'static + Render, { @@ -482,7 +459,7 @@ impl VisualContext for AsyncWindowContext { }) } - fn focus(&mut self, view: &Entity) -> Self::Result<()> + fn focus(&mut self, view: &Entity) -> Result<()> where V: Focusable, { diff --git a/crates/gpui/src/app/context.rs b/crates/gpui/src/app/context.rs index b780ca426c15c99030f24ee48bde978ad38526e7..aa482ccd07136f2823b364292dcf0d4a18e98039 100644 --- a/crates/gpui/src/app/context.rs +++ b/crates/gpui/src/app/context.rs @@ -753,8 +753,6 @@ impl Context<'_, T> { } impl AppContext for Context<'_, T> { - type Result = U; - #[inline] fn new(&mut self, build_entity: impl FnOnce(&mut Context) -> U) -> Entity { self.app.new(build_entity) @@ -770,7 +768,7 @@ impl AppContext for Context<'_, T> { &mut self, reservation: Reservation, build_entity: impl FnOnce(&mut Context) -> U, - ) -> Self::Result> { + ) -> Entity { self.app.insert_entity(reservation, build_entity) } @@ -784,7 +782,7 @@ impl AppContext for Context<'_, T> { } #[inline] - fn as_mut<'a, E>(&'a mut self, handle: &Entity) -> Self::Result> + fn as_mut<'a, E>(&'a mut self, handle: &Entity) -> super::GpuiBorrow<'a, E> where E: 'static, { @@ -792,11 +790,7 @@ impl AppContext for Context<'_, T> { } #[inline] - fn read_entity( - &self, - handle: &Entity, - read: impl FnOnce(&U, &App) -> R, - ) -> Self::Result + fn read_entity(&self, handle: &Entity, read: impl FnOnce(&U, &App) -> R) -> R where U: 'static, { @@ -832,7 +826,7 @@ impl AppContext for Context<'_, T> { } #[inline] - fn read_global(&self, callback: impl FnOnce(&G, &App) -> R) -> Self::Result + fn read_global(&self, callback: impl FnOnce(&G, &App) -> R) -> R where G: Global, { diff --git a/crates/gpui/src/app/entity_map.rs b/crates/gpui/src/app/entity_map.rs index 8c1bdfa1cee509dcbc061200cb651ce5d3bf4fcd..6cf18a69177286048860d3c711b19eda038874b7 100644 --- a/crates/gpui/src/app/entity_map.rs +++ b/crates/gpui/src/app/entity_map.rs @@ -431,11 +431,7 @@ impl Entity { /// Read the entity referenced by this handle with the given function. #[inline] - pub fn read_with( - &self, - cx: &C, - f: impl FnOnce(&T, &App) -> R, - ) -> C::Result { + pub fn read_with(&self, cx: &C, f: impl FnOnce(&T, &App) -> R) -> R { cx.read_entity(self, f) } @@ -445,18 +441,18 @@ impl Entity { &self, cx: &mut C, update: impl FnOnce(&mut T, &mut Context) -> R, - ) -> C::Result { + ) -> R { cx.update_entity(self, update) } /// Updates the entity referenced by this handle with the given function. #[inline] - pub fn as_mut<'a, C: AppContext>(&self, cx: &'a mut C) -> C::Result> { + pub fn as_mut<'a, C: AppContext>(&self, cx: &'a mut C) -> GpuiBorrow<'a, T> { cx.as_mut(self) } /// Updates the entity referenced by this handle with the given function. - pub fn write(&self, cx: &mut C, value: T) -> C::Result<()> { + pub fn write(&self, cx: &mut C, value: T) { self.update(cx, |entity, cx| { *entity = value; cx.notify(); @@ -465,7 +461,7 @@ impl Entity { /// Updates the entity referenced by this handle with the given function if /// the referenced entity still exists, within a visual context that has a window. - /// Returns an error if the entity has been released. + /// Returns an error if the window has been closed. #[inline] pub fn update_in( &self, @@ -749,13 +745,9 @@ impl WeakEntity { ) -> Result where C: AppContext, - Result>: crate::Flatten, { - crate::Flatten::flatten( - self.upgrade() - .context("entity released") - .map(|this| cx.update_entity(&this, update)), - ) + let entity = self.upgrade().context("entity released")?; + Ok(cx.update_entity(&entity, update)) } /// Updates the entity referenced by this handle with the given function if @@ -768,14 +760,13 @@ impl WeakEntity { ) -> Result where C: VisualContext, - Result>: crate::Flatten, { let window = cx.window_handle(); - let this = self.upgrade().context("entity released")?; + let entity = self.upgrade().context("entity released")?; - crate::Flatten::flatten(window.update(cx, |_, window, cx| { - this.update(cx, |entity, cx| update(entity, window, cx)) - })) + window.update(cx, |_, window, cx| { + entity.update(cx, |entity, cx| update(entity, window, cx)) + }) } /// Reads the entity referenced by this handle with the given function if @@ -784,13 +775,9 @@ impl WeakEntity { pub fn read_with(&self, cx: &C, read: impl FnOnce(&T, &App) -> R) -> Result where C: AppContext, - Result>: crate::Flatten, { - crate::Flatten::flatten( - self.upgrade() - .context("entity released") - .map(|this| cx.read_entity(&this, read)), - ) + let entity = self.upgrade().context("entity released")?; + Ok(cx.read_entity(&entity, read)) } /// Create a new weak entity that can never be upgraded. diff --git a/crates/gpui/src/app/test_context.rs b/crates/gpui/src/app/test_context.rs index 33e9365dece4da99c6733e2a2ac250a785dde05f..f710a19ed58630a5d6b37dfb76c682bb6fd6de67 100644 --- a/crates/gpui/src/app/test_context.rs +++ b/crates/gpui/src/app/test_context.rs @@ -33,17 +33,12 @@ pub struct TestAppContext { } impl AppContext for TestAppContext { - type Result = T; - - fn new( - &mut self, - build_entity: impl FnOnce(&mut Context) -> T, - ) -> Self::Result> { + fn new(&mut self, build_entity: impl FnOnce(&mut Context) -> T) -> Entity { let mut app = self.app.borrow_mut(); app.new(build_entity) } - fn reserve_entity(&mut self) -> Self::Result> { + fn reserve_entity(&mut self) -> crate::Reservation { let mut app = self.app.borrow_mut(); app.reserve_entity() } @@ -52,7 +47,7 @@ impl AppContext for TestAppContext { &mut self, reservation: crate::Reservation, build_entity: impl FnOnce(&mut Context) -> T, - ) -> Self::Result> { + ) -> Entity { let mut app = self.app.borrow_mut(); app.insert_entity(reservation, build_entity) } @@ -61,23 +56,19 @@ impl AppContext for TestAppContext { &mut self, handle: &Entity, update: impl FnOnce(&mut T, &mut Context) -> R, - ) -> Self::Result { + ) -> R { let mut app = self.app.borrow_mut(); app.update_entity(handle, update) } - fn as_mut<'a, T>(&'a mut self, _: &Entity) -> Self::Result> + fn as_mut<'a, T>(&'a mut self, _: &Entity) -> super::GpuiBorrow<'a, T> where T: 'static, { panic!("Cannot use as_mut with a test app context. Try calling update() first") } - fn read_entity( - &self, - handle: &Entity, - read: impl FnOnce(&T, &App) -> R, - ) -> Self::Result + fn read_entity(&self, handle: &Entity, read: impl FnOnce(&T, &App) -> R) -> R where T: 'static, { @@ -112,7 +103,7 @@ impl AppContext for TestAppContext { self.background_executor.spawn(future) } - fn read_global(&self, callback: impl FnOnce(&G, &App) -> R) -> Self::Result + fn read_global(&self, callback: impl FnOnce(&G, &App) -> R) -> R where G: Global, { @@ -918,16 +909,11 @@ impl VisualTestContext { } impl AppContext for VisualTestContext { - type Result = ::Result; - - fn new( - &mut self, - build_entity: impl FnOnce(&mut Context) -> T, - ) -> Self::Result> { + fn new(&mut self, build_entity: impl FnOnce(&mut Context) -> T) -> Entity { self.cx.new(build_entity) } - fn reserve_entity(&mut self) -> Self::Result> { + fn reserve_entity(&mut self) -> crate::Reservation { self.cx.reserve_entity() } @@ -935,7 +921,7 @@ impl AppContext for VisualTestContext { &mut self, reservation: crate::Reservation, build_entity: impl FnOnce(&mut Context) -> T, - ) -> Self::Result> { + ) -> Entity { self.cx.insert_entity(reservation, build_entity) } @@ -943,25 +929,21 @@ impl AppContext for VisualTestContext { &mut self, handle: &Entity, update: impl FnOnce(&mut T, &mut Context) -> R, - ) -> Self::Result + ) -> R where T: 'static, { self.cx.update_entity(handle, update) } - fn as_mut<'a, T>(&'a mut self, handle: &Entity) -> Self::Result> + fn as_mut<'a, T>(&'a mut self, handle: &Entity) -> super::GpuiBorrow<'a, T> where T: 'static, { self.cx.as_mut(handle) } - fn read_entity( - &self, - handle: &Entity, - read: impl FnOnce(&T, &App) -> R, - ) -> Self::Result + fn read_entity(&self, handle: &Entity, read: impl FnOnce(&T, &App) -> R) -> R where T: 'static, { @@ -993,7 +975,7 @@ impl AppContext for VisualTestContext { self.cx.background_spawn(future) } - fn read_global(&self, callback: impl FnOnce(&G, &App) -> R) -> Self::Result + fn read_global(&self, callback: impl FnOnce(&G, &App) -> R) -> R where G: Global, { @@ -1002,6 +984,8 @@ impl AppContext for VisualTestContext { } impl VisualContext for VisualTestContext { + type Result = T; + /// Get the underlying window handle underlying this context. fn window_handle(&self) -> AnyWindowHandle { self.window @@ -1010,30 +994,30 @@ impl VisualContext for VisualTestContext { fn new_window_entity( &mut self, build_entity: impl FnOnce(&mut Window, &mut Context) -> T, - ) -> Self::Result> { + ) -> Entity { self.window .update(&mut self.cx, |_, window, cx| { cx.new(|cx| build_entity(window, cx)) }) - .unwrap() + .expect("window was unexpectedly closed") } fn update_window_entity( &mut self, view: &Entity, update: impl FnOnce(&mut V, &mut Window, &mut Context) -> R, - ) -> Self::Result { + ) -> R { self.window .update(&mut self.cx, |_, window, cx| { view.update(cx, |v, cx| update(v, window, cx)) }) - .unwrap() + .expect("window was unexpectedly closed") } fn replace_root_view( &mut self, build_view: impl FnOnce(&mut Window, &mut Context) -> V, - ) -> Self::Result> + ) -> Entity where V: 'static + Render, { @@ -1041,15 +1025,15 @@ impl VisualContext for VisualTestContext { .update(&mut self.cx, |_, window, cx| { window.replace_root(cx, build_view) }) - .unwrap() + .expect("window was unexpectedly closed") } - fn focus(&mut self, view: &Entity) -> Self::Result<()> { + fn focus(&mut self, view: &Entity) { self.window .update(&mut self.cx, |_, window, cx| { view.read(cx).focus_handle(cx).focus(window, cx) }) - .unwrap() + .expect("window was unexpectedly closed") } } diff --git a/crates/gpui/src/app/visual_test_context.rs b/crates/gpui/src/app/visual_test_context.rs index 50a7ed6c2853379ce054ebf5277bccdeeb7b4c56..08b167431975440bcf7b78593000b87cd67d105a 100644 --- a/crates/gpui/src/app/visual_test_context.rs +++ b/crates/gpui/src/app/visual_test_context.rs @@ -366,17 +366,12 @@ impl Default for VisualTestAppContext { } impl AppContext for VisualTestAppContext { - type Result = T; - - fn new( - &mut self, - build_entity: impl FnOnce(&mut Context) -> T, - ) -> Self::Result> { + fn new(&mut self, build_entity: impl FnOnce(&mut Context) -> T) -> Entity { let mut app = self.app.borrow_mut(); app.new(build_entity) } - fn reserve_entity(&mut self) -> Self::Result> { + fn reserve_entity(&mut self) -> crate::Reservation { let mut app = self.app.borrow_mut(); app.reserve_entity() } @@ -385,7 +380,7 @@ impl AppContext for VisualTestAppContext { &mut self, reservation: crate::Reservation, build_entity: impl FnOnce(&mut Context) -> T, - ) -> Self::Result> { + ) -> Entity { let mut app = self.app.borrow_mut(); app.insert_entity(reservation, build_entity) } @@ -394,23 +389,19 @@ impl AppContext for VisualTestAppContext { &mut self, handle: &Entity, update: impl FnOnce(&mut T, &mut Context) -> R, - ) -> Self::Result { + ) -> R { let mut app = self.app.borrow_mut(); app.update_entity(handle, update) } - fn as_mut<'a, T>(&'a mut self, _: &Entity) -> Self::Result> + fn as_mut<'a, T>(&'a mut self, _: &Entity) -> crate::GpuiBorrow<'a, T> where T: 'static, { panic!("Cannot use as_mut with a visual test app context. Try calling update() first") } - fn read_entity( - &self, - handle: &Entity, - read: impl FnOnce(&T, &App) -> R, - ) -> Self::Result + fn read_entity(&self, handle: &Entity, read: impl FnOnce(&T, &App) -> R) -> R where T: 'static, { @@ -445,7 +436,7 @@ impl AppContext for VisualTestAppContext { self.background_executor.spawn(future) } - fn read_global(&self, callback: impl FnOnce(&G, &App) -> R) -> Self::Result + fn read_global(&self, callback: impl FnOnce(&G, &App) -> R) -> R where G: Global, { diff --git a/crates/gpui/src/gpui.rs b/crates/gpui/src/gpui.rs index e5c726f58e117b76e2dbb2976089d5788baa848e..99401e40b7c07fbdf35c2602984f84e8dc38ba1a 100644 --- a/crates/gpui/src/gpui.rs +++ b/crates/gpui/src/gpui.rs @@ -118,23 +118,16 @@ pub use window::*; /// The context trait, allows the different contexts in GPUI to be used /// interchangeably for certain operations. pub trait AppContext { - /// The result type for this context, used for async contexts that - /// can't hold a direct reference to the application context. - type Result; - /// Create a new entity in the app context. #[expect( clippy::wrong_self_convention, reason = "`App::new` is an ubiquitous function for creating entities" )] - fn new( - &mut self, - build_entity: impl FnOnce(&mut Context) -> T, - ) -> Self::Result>; + fn new(&mut self, build_entity: impl FnOnce(&mut Context) -> T) -> Entity; /// Reserve a slot for a entity to be inserted later. /// The returned [Reservation] allows you to obtain the [EntityId] for the future entity. - fn reserve_entity(&mut self) -> Self::Result>; + fn reserve_entity(&mut self) -> Reservation; /// Insert a new entity in the app context based on a [Reservation] previously obtained from [`reserve_entity`]. /// @@ -143,28 +136,24 @@ pub trait AppContext { &mut self, reservation: Reservation, build_entity: impl FnOnce(&mut Context) -> T, - ) -> Self::Result>; + ) -> Entity; /// Update a entity in the app context. fn update_entity( &mut self, handle: &Entity, update: impl FnOnce(&mut T, &mut Context) -> R, - ) -> Self::Result + ) -> R where T: 'static; /// Update a entity in the app context. - fn as_mut<'a, T>(&'a mut self, handle: &Entity) -> Self::Result> + fn as_mut<'a, T>(&'a mut self, handle: &Entity) -> GpuiBorrow<'a, T> where T: 'static; /// Read a entity from the app context. - fn read_entity( - &self, - handle: &Entity, - read: impl FnOnce(&T, &App) -> R, - ) -> Self::Result + fn read_entity(&self, handle: &Entity, read: impl FnOnce(&T, &App) -> R) -> R where T: 'static; @@ -188,7 +177,7 @@ pub trait AppContext { R: Send + 'static; /// Read a global from this app context - fn read_global(&self, callback: impl FnOnce(&G, &App) -> R) -> Self::Result + fn read_global(&self, callback: impl FnOnce(&G, &App) -> R) -> R where G: Global; } @@ -207,6 +196,9 @@ impl Reservation { /// This trait is used for the different visual contexts in GPUI that /// require a window to be present. pub trait VisualContext: AppContext { + /// The result type for window operations. + type Result; + /// Returns the handle of the window associated with this context. fn window_handle(&self) -> AnyWindowHandle; @@ -284,24 +276,6 @@ where } } -/// A flatten equivalent for anyhow `Result`s. -pub trait Flatten { - /// Convert this type into a simple `Result`. - fn flatten(self) -> Result; -} - -impl Flatten for Result> { - fn flatten(self) -> Result { - self? - } -} - -impl Flatten for Result { - fn flatten(self) -> Result { - self - } -} - /// Information about the GPU GPUI is running on. #[derive(Default, Debug, serde::Serialize, serde::Deserialize, Clone)] pub struct GpuSpecs { diff --git a/crates/gpui/src/platform/app_menu.rs b/crates/gpui/src/platform/app_menu.rs index 39e7556b2d210f85fb9fda573244c9f031a92e2c..b1e0d82bb9f6d4ee265d047f562e088a8e48c1db 100644 --- a/crates/gpui/src/platform/app_menu.rs +++ b/crates/gpui/src/platform/app_menu.rs @@ -1,5 +1,4 @@ use crate::{Action, App, Platform, SharedString}; -use util::ResultExt; /// A menu of the application, either a main menu or a submenu pub struct Menu { @@ -263,14 +262,18 @@ pub(crate) fn init_app_menus(platform: &dyn Platform, cx: &App) { platform.on_will_open_app_menu(Box::new({ let cx = cx.to_async(); move || { - cx.update(|cx| cx.clear_pending_keystrokes()).ok(); + if let Some(app) = cx.app.upgrade() { + app.borrow_mut().update(|cx| cx.clear_pending_keystrokes()); + } } })); platform.on_validate_app_menu_command(Box::new({ let cx = cx.to_async(); move |action| { - cx.update(|cx| cx.is_action_available(action)) + cx.app + .upgrade() + .map(|app| app.borrow_mut().update(|cx| cx.is_action_available(action))) .unwrap_or(false) } })); @@ -278,7 +281,9 @@ pub(crate) fn init_app_menus(platform: &dyn Platform, cx: &App) { platform.on_app_menu_action(Box::new({ let cx = cx.to_async(); move |action| { - cx.update(|cx| cx.dispatch_action(action)).log_err(); + if let Some(app) = cx.app.upgrade() { + app.borrow_mut().update(|cx| cx.dispatch_action(action)); + } } })); } diff --git a/crates/gpui/src/platform/mac/platform.rs b/crates/gpui/src/platform/mac/platform.rs index e9e9e9e654e6eef45492f149784c378c493999dc..db7ab92428a239c924da400f7075aab3abeb0a1d 100644 --- a/crates/gpui/src/platform/mac/platform.rs +++ b/crates/gpui/src/platform/mac/platform.rs @@ -681,7 +681,7 @@ impl Platform for MacPlatform { } self.background_executor() - .spawn(async { crate::Flatten::flatten(done_rx.await.map_err(|e| anyhow!(e))) }) + .spawn(async { done_rx.await.map_err(|e| anyhow!(e))? }) } fn on_open_urls(&self, callback: Box)>) { diff --git a/crates/gpui/src/window.rs b/crates/gpui/src/window.rs index 20a46622387505049fc72a9ec1ee955e38e18002..94ae5f7a8ce8b2de8d44c328321283ee5f45843b 100644 --- a/crates/gpui/src/window.rs +++ b/crates/gpui/src/window.rs @@ -4955,11 +4955,11 @@ impl WindowHandle { where C: AppContext, { - crate::Flatten::flatten(cx.update_window(self.any_handle, |root_view, _, _| { + cx.update_window(self.any_handle, |root_view, _, _| { root_view .downcast::() .map_err(|_| anyhow!("the type of the window's root view has changed")) - })) + })? } /// Updates the root view of this window. diff --git a/crates/gpui_macros/src/derive_app_context.rs b/crates/gpui_macros/src/derive_app_context.rs index d2dc250d0239769f6834860a128c2653546a926e..46f9e58409895735c1c75f631175718b28027c14 100644 --- a/crates/gpui_macros/src/derive_app_context.rs +++ b/crates/gpui_macros/src/derive_app_context.rs @@ -21,16 +21,14 @@ pub fn derive_app_context(input: TokenStream) -> TokenStream { impl #impl_generics gpui::AppContext for #type_name #type_generics #where_clause { - type Result = T; - fn new( &mut self, build_entity: impl FnOnce(&mut gpui::Context<'_, T>) -> T, - ) -> Self::Result> { + ) -> gpui::Entity { self.#app_variable.new(build_entity) } - fn reserve_entity(&mut self) -> Self::Result> { + fn reserve_entity(&mut self) -> gpui::Reservation { self.#app_variable.reserve_entity() } @@ -38,7 +36,7 @@ pub fn derive_app_context(input: TokenStream) -> TokenStream { &mut self, reservation: gpui::Reservation, build_entity: impl FnOnce(&mut gpui::Context<'_, T>) -> T, - ) -> Self::Result> { + ) -> gpui::Entity { self.#app_variable.insert_entity(reservation, build_entity) } @@ -46,7 +44,7 @@ pub fn derive_app_context(input: TokenStream) -> TokenStream { &mut self, handle: &gpui::Entity, update: impl FnOnce(&mut T, &mut gpui::Context<'_, T>) -> R, - ) -> Self::Result + ) -> R where T: 'static, { @@ -56,7 +54,7 @@ pub fn derive_app_context(input: TokenStream) -> TokenStream { fn as_mut<'y, 'z, T>( &'y mut self, handle: &'z gpui::Entity, - ) -> Self::Result> + ) -> gpui::GpuiBorrow<'y, T> where T: 'static, { @@ -67,7 +65,7 @@ pub fn derive_app_context(input: TokenStream) -> TokenStream { &self, handle: &gpui::Entity, read: impl FnOnce(&T, &gpui::App) -> R, - ) -> Self::Result + ) -> R where T: 'static, { @@ -99,7 +97,7 @@ pub fn derive_app_context(input: TokenStream) -> TokenStream { self.#app_variable.background_spawn(future) } - fn read_global(&self, callback: impl FnOnce(&G, &gpui::App) -> R) -> Self::Result + fn read_global(&self, callback: impl FnOnce(&G, &gpui::App) -> R) -> R where G: gpui::Global, { diff --git a/crates/gpui_macros/src/derive_visual_context.rs b/crates/gpui_macros/src/derive_visual_context.rs index b827e753d9678efba01d3fdd77f8e66ea62b6bbd..a639b6d2d6e9eda9f5f9a167a2366b243b4d5011 100644 --- a/crates/gpui_macros/src/derive_visual_context.rs +++ b/crates/gpui_macros/src/derive_visual_context.rs @@ -28,6 +28,8 @@ pub fn derive_visual_context(input: TokenStream) -> TokenStream { impl #impl_generics gpui::VisualContext for #type_name #type_generics #where_clause { + type Result = T; + fn window_handle(&self) -> gpui::AnyWindowHandle { self.#window_variable.window_handle() } @@ -36,33 +38,33 @@ pub fn derive_visual_context(input: TokenStream) -> TokenStream { &mut self, entity: &gpui::Entity, update: impl FnOnce(&mut T, &mut gpui::Window, &mut gpui::Context) -> R, - ) -> Self::Result { + ) -> R { gpui::AppContext::update_entity(self.#app_variable, entity, |entity, cx| update(entity, self.#window_variable, cx)) } fn new_window_entity( &mut self, build_entity: impl FnOnce(&mut gpui::Window, &mut gpui::Context<'_, T>) -> T, - ) -> Self::Result> { + ) -> gpui::Entity { gpui::AppContext::new(self.#app_variable, |cx| build_entity(self.#window_variable, cx)) } fn replace_root_view( &mut self, build_view: impl FnOnce(&mut gpui::Window, &mut gpui::Context) -> V, - ) -> Self::Result> + ) -> gpui::Entity where V: 'static + gpui::Render, { self.#window_variable.replace_root(self.#app_variable, build_view) } - fn focus(&mut self, entity: &gpui::Entity) -> Self::Result<()> + fn focus(&mut self, entity: &gpui::Entity) where V: gpui::Focusable, { let focus_handle = gpui::Focusable::focus_handle(entity, self.#app_variable); - self.#window_variable.focus(&focus_handle, self.#app_variable) + self.#window_variable.focus(&focus_handle, self.#app_variable); } } }; diff --git a/crates/gpui_tokio/src/gpui_tokio.rs b/crates/gpui_tokio/src/gpui_tokio.rs index 9cfa1493af49ee95210edb9669a6ca89095f42cd..f6e1e0643b2c2a377c1bb8bd042f94c06d382709 100644 --- a/crates/gpui_tokio/src/gpui_tokio.rs +++ b/crates/gpui_tokio/src/gpui_tokio.rs @@ -56,7 +56,7 @@ pub struct Tokio {} impl Tokio { /// Spawns the given future on Tokio's thread pool, and returns it via a GPUI task /// Note that the Tokio task will be cancelled if the GPUI task is dropped - pub fn spawn(cx: &C, f: Fut) -> C::Result>> + pub fn spawn(cx: &C, f: Fut) -> Task> where C: AppContext, Fut: Future + Send + 'static, @@ -78,7 +78,7 @@ impl Tokio { /// Spawns the given future on Tokio's thread pool, and returns it via a GPUI task /// Note that the Tokio task will be cancelled if the GPUI task is dropped - pub fn spawn_result(cx: &C, f: Fut) -> C::Result>> + pub fn spawn_result(cx: &C, f: Fut) -> Task> where C: AppContext, Fut: Future> + Send + 'static, diff --git a/crates/image_viewer/src/image_viewer.rs b/crates/image_viewer/src/image_viewer.rs index f268588b58ccd07c1ecc367aeaaaf80c4f0996cf..852c9ec95bca95eb76d0af72c21f041b31d60e54 100644 --- a/crates/image_viewer/src/image_viewer.rs +++ b/crates/image_viewer/src/image_viewer.rs @@ -234,10 +234,10 @@ impl SerializableItem for ImageView { let (worktree, relative_path) = project .update(cx, |project, cx| { project.find_or_create_worktree(image_path.clone(), false, cx) - })? + }) .await .context("Path not found")?; - let worktree_id = worktree.update(cx, |worktree, _cx| worktree.id())?; + let worktree_id = worktree.update(cx, |worktree, _cx| worktree.id()); let project_path = ProjectPath { worktree_id, @@ -245,7 +245,7 @@ impl SerializableItem for ImageView { }; let image_item = project - .update(cx, |project, cx| project.open_image(project_path, cx))? + .update(cx, |project, cx| project.open_image(project_path, cx)) .await?; cx.update( diff --git a/crates/inspector_ui/src/div_inspector.rs b/crates/inspector_ui/src/div_inspector.rs index 9b145e920e48605f19f566ca14a7caf63aff8f0a..18588d80e49fe81bc9be19a634c7288e699b1d64 100644 --- a/crates/inspector_ui/src/div_inspector.rs +++ b/crates/inspector_ui/src/div_inspector.rs @@ -86,7 +86,7 @@ impl DivInspector { // Create Rust style buffer without adding it to the project / buffer_store, so that // Rust Analyzer doesn't get started for it. let rust_language_result = languages.language_for_name("Rust").await; - let rust_style_buffer = rust_language_result.and_then(|rust_language| { + let rust_style_buffer = rust_language_result.map(|rust_language| { cx.new(|cx| Buffer::local("", cx).with_language_async(rust_language, cx)) }); @@ -462,16 +462,16 @@ impl DivInspector { cx: &mut AsyncWindowContext, ) -> Result> { let worktree = project - .update(cx, |project, cx| project.create_worktree(path, false, cx))? + .update(cx, |project, cx| project.create_worktree(path, false, cx)) .await?; let project_path = worktree.read_with(cx, |worktree, _cx| ProjectPath { worktree_id: worktree.id(), path: RelPath::empty().into(), - })?; + }); let buffer = project - .update(cx, |project, cx| project.open_path(project_path, cx))? + .update(cx, |project, cx| project.open_path(project_path, cx)) .await? .1; diff --git a/crates/install_cli/src/install_cli_binary.rs b/crates/install_cli/src/install_cli_binary.rs index 414bdabc7090be4372ff984949809839bbd3ee05..095ed3cd315c49d38909a12608cd5607723abb3a 100644 --- a/crates/install_cli/src/install_cli_binary.rs +++ b/crates/install_cli/src/install_cli_binary.rs @@ -17,7 +17,7 @@ actions!( ); async fn install_script(cx: &AsyncApp) -> Result { - let cli_path = cx.update(|cx| cx.path_for_auxiliary_executable("cli"))??; + let cli_path = cx.update(|cx| cx.path_for_auxiliary_executable("cli"))?; let link_path = Path::new("/usr/local/bin/zed"); let bin_dir_path = link_path.parent().unwrap(); diff --git a/crates/install_cli/src/register_zed_scheme.rs b/crates/install_cli/src/register_zed_scheme.rs index 819287c5d0bcd15e531e21b417c7e5d4a4b4ece5..5dac3ef5d8ecd5e6dfd5fae4073440b340e2545e 100644 --- a/crates/install_cli/src/register_zed_scheme.rs +++ b/crates/install_cli/src/register_zed_scheme.rs @@ -10,6 +10,5 @@ actions!( ); pub async fn register_zed_scheme(cx: &AsyncApp) -> anyhow::Result<()> { - cx.update(|cx| cx.register_url_scheme(ZED_URL_SCHEME))? - .await + cx.update(|cx| cx.register_url_scheme(ZED_URL_SCHEME)).await } diff --git a/crates/json_schema_store/src/json_schema_store.rs b/crates/json_schema_store/src/json_schema_store.rs index ca3fb20f0115b7db5f9cc984a0ca1719b59f4b05..f728e844e1feb624c666fdc1f99a549e73081698 100644 --- a/crates/json_schema_store/src/json_schema_store.rs +++ b/crates/json_schema_store/src/json_schema_store.rs @@ -79,7 +79,6 @@ fn handle_schema_request( ) -> Task> { let languages = lsp_store.read_with(cx, |lsp_store, _| lsp_store.languages.clone()); cx.spawn(async move |cx| { - let languages = languages?; let schema = resolve_schema_request(&languages, lsp_store, uri, cx).await?; serde_json::to_string(&schema).context("Failed to serialize schema") }) @@ -136,7 +135,7 @@ pub async fn resolve_schema_request_inner( local, &worktree, cx, )) }) - })? + }) .context(concat!( "Failed to create adapter delegate - ", "either LSP store is not in local mode or no worktree is available" @@ -190,16 +189,16 @@ pub async fn resolve_schema_request_inner( lsp_adapter_names: &lsp_adapter_names, }, ) - })? + }) } - "keymap" => cx.update(settings::KeymapFile::generate_json_schema_for_registered_actions)?, + "keymap" => cx.update(settings::KeymapFile::generate_json_schema_for_registered_actions), "action" => { let normalized_action_name = rest.context("No Action name provided")?; let action_name = denormalize_action_name(normalized_action_name); let mut generator = settings::KeymapFile::action_schema_generator(); let schema = cx // PERF: cx.action_schema_by_name(action_name, &mut generator) - .update(|cx| cx.action_schemas(&mut generator))? + .update(|cx| cx.action_schemas(&mut generator)) .into_iter() .find_map(|(name, schema)| (name == action_name).then_some(schema)) .flatten(); @@ -209,7 +208,7 @@ pub async fn resolve_schema_request_inner( "debug_tasks" => { let adapter_schemas = cx.read_global::(|dap_registry, _| { dap_registry.adapters_schema() - })?; + }); task::DebugTaskFile::generate_json_schema(&adapter_schemas) } "package_json" => package_json_schema(), diff --git a/crates/keymap_editor/src/keymap_editor.rs b/crates/keymap_editor/src/keymap_editor.rs index 160d32ce5cd118f49c2353cf85bcb4ebad8325b0..141cae7986ffb4436162cc2e7d62f4853cb97f83 100644 --- a/crates/keymap_editor/src/keymap_editor.rs +++ b/crates/keymap_editor/src/keymap_editor.rs @@ -2318,20 +2318,18 @@ impl KeybindingEditorModal { .await; let language = load_keybind_context_language(workspace, cx).await; - editor_entity - .update(cx, |editor, cx| { - if let Some(buffer) = editor.buffer().read(cx).as_singleton() { - buffer.update(cx, |buffer, cx| { - buffer.set_language(Some(language), cx); - }); - } - editor.set_completion_provider(Some(std::rc::Rc::new( - KeyContextCompletionProvider { contexts }, - ))); - }) - .context("Failed to load completions for keybinding context") + editor_entity.update(cx, |editor, cx| { + if let Some(buffer) = editor.buffer().read(cx).as_singleton() { + buffer.update(cx, |buffer, cx| { + buffer.set_language(Some(language), cx); + }); + } + editor.set_completion_provider(Some(std::rc::Rc::new( + KeyContextCompletionProvider { contexts }, + ))); + }); }) - .detach_and_log_err(cx); + .detach(); input }); diff --git a/crates/language_models/src/provider/anthropic.rs b/crates/language_models/src/provider/anthropic.rs index 43496a6061ba7ea8086008939ece8674c3301d0d..05c64ef7461bd0f4ae243acbf1c74a6975a32352 100644 --- a/crates/language_models/src/provider/anthropic.rs +++ b/crates/language_models/src/provider/anthropic.rs @@ -2,9 +2,9 @@ use anthropic::{ ANTHROPIC_API_URL, AnthropicError, AnthropicModelMode, ContentDelta, CountTokensRequest, Event, ResponseContent, ToolResultContent, ToolResultPart, Usage, }; -use anyhow::{Result, anyhow}; +use anyhow::Result; use collections::{BTreeMap, HashMap}; -use futures::{FutureExt, Stream, StreamExt, future, future::BoxFuture, stream::BoxStream}; +use futures::{FutureExt, Stream, StreamExt, future::BoxFuture, stream::BoxStream}; use gpui::{AnyView, App, AsyncApp, Context, Entity, Task}; use http_client::HttpClient; use language_model::{ @@ -444,12 +444,10 @@ impl AnthropicModel { > { let http_client = self.http_client.clone(); - let Ok((api_key, api_url)) = self.state.read_with(cx, |state, cx| { + let (api_key, api_url) = self.state.read_with(cx, |state, cx| { let api_url = AnthropicLanguageModelProvider::api_url(cx); (state.api_key_state.key(&api_url), api_url) - }) else { - return future::ready(Err(anyhow!("App state dropped").into())).boxed(); - }; + }); let beta_headers = self.model.beta_headers(); @@ -1020,13 +1018,9 @@ impl ConfigurationView { let load_credentials_task = Some(cx.spawn({ let state = state.clone(); async move |this, cx| { - if let Some(task) = state - .update(cx, |state, cx| state.authenticate(cx)) - .log_err() - { - // We don't log an error, because "not signed in" is also an error. - let _ = task.await; - } + let task = state.update(cx, |state, cx| state.authenticate(cx)); + // We don't log an error, because "not signed in" is also an error. + let _ = task.await; this.update(cx, |this, cx| { this.load_credentials_task = None; cx.notify(); @@ -1056,7 +1050,7 @@ impl ConfigurationView { let state = self.state.clone(); cx.spawn_in(window, async move |_, cx| { state - .update(cx, |state, cx| state.set_api_key(Some(api_key), cx))? + .update(cx, |state, cx| state.set_api_key(Some(api_key), cx)) .await }) .detach_and_log_err(cx); @@ -1069,7 +1063,7 @@ impl ConfigurationView { let state = self.state.clone(); cx.spawn_in(window, async move |_, cx| { state - .update(cx, |state, cx| state.set_api_key(None, cx))? + .update(cx, |state, cx| state.set_api_key(None, cx)) .await }) .detach_and_log_err(cx); diff --git a/crates/language_models/src/provider/bedrock.rs b/crates/language_models/src/provider/bedrock.rs index 1ded1c401a7ecbb4a304d5a15c1003b3dac74b10..894ec6ad987b613c4d8e4fbcb42afe966a39576f 100644 --- a/crates/language_models/src/provider/bedrock.rs +++ b/crates/language_models/src/provider/bedrock.rs @@ -527,7 +527,7 @@ impl BedrockModel { let endpoint = state.settings.as_ref().and_then(|s| s.endpoint.clone()); let region = state.get_region(); (state.auth.clone(), endpoint, region) - })?; + }); let mut config_builder = aws_config::defaults(BehaviorVersion::latest()) .stalled_stream_protection(StalledStreamProtectionConfig::disabled()) @@ -597,10 +597,8 @@ impl BedrockModel { return futures::future::ready(Err(anyhow!("App state dropped"))).boxed(); }; - match Tokio::spawn(cx, bedrock::stream_completion(runtime_client, request)) { - Ok(res) => async { res.await.map_err(|err| anyhow!(err))? }.boxed(), - Err(err) => futures::future::ready(Err(anyhow!(err))).boxed(), - } + let task = Tokio::spawn(cx, bedrock::stream_completion(runtime_client, request)); + async move { task.await.map_err(|err| anyhow!(err))? }.boxed() } } @@ -670,11 +668,9 @@ impl LanguageModel for BedrockModel { LanguageModelCompletionError, >, > { - let Ok((region, allow_global)) = cx.read_entity(&self.state, |state, _cx| { + let (region, allow_global) = cx.read_entity(&self.state, |state, _cx| { (state.get_region(), state.get_allow_global()) - }) else { - return async move { Err(anyhow::anyhow!("App State Dropped").into()) }.boxed(); - }; + }); let model_id = match self.model.cross_region_inference_id(®ion, allow_global) { Ok(s) => s, @@ -1194,10 +1190,7 @@ impl ConfigurationView { let load_credentials_task = Some(cx.spawn({ let state = state.clone(); async move |this, cx| { - if let Some(task) = state - .update(cx, |state, cx| state.authenticate(cx)) - .log_err() - { + if let Some(task) = Some(state.update(cx, |state, cx| state.authenticate(cx))) { // We don't log an error, because "not signed in" is also an error. let _ = task.await; } @@ -1273,7 +1266,7 @@ impl ConfigurationView { }; state.set_static_credentials(credentials, cx) - })? + }) .await }) .detach_and_log_err(cx); @@ -1290,7 +1283,7 @@ impl ConfigurationView { .update(cx, |editor, cx| editor.set_text("", window, cx)); let state = self.state.clone(); - cx.spawn(async move |_, cx| state.update(cx, |state, cx| state.reset_auth(cx))?.await) + cx.spawn(async move |_, cx| state.update(cx, |state, cx| state.reset_auth(cx)).await) .detach_and_log_err(cx); } diff --git a/crates/language_models/src/provider/cloud.rs b/crates/language_models/src/provider/cloud.rs index 65a42740eb9a8aff830d7544ed5aa972c6697d88..354052e3af04b18dedb41f4a057bfdfb228c5996 100644 --- a/crates/language_models/src/provider/cloud.rs +++ b/crates/language_models/src/provider/cloud.rs @@ -755,7 +755,7 @@ impl LanguageModel for CloudLanguageModel { let prompt_id = request.prompt_id.clone(); let intent = request.intent; let mode = request.mode; - let app_version = cx.update(|cx| AppVersion::global(cx)).ok(); + let app_version = Some(cx.update(|cx| AppVersion::global(cx))); let thinking_allowed = request.thinking_allowed; let provider_name = provider_name(&self.model.provider); match self.model.provider { diff --git a/crates/language_models/src/provider/deepseek.rs b/crates/language_models/src/provider/deepseek.rs index 9d8a7eb13985e9ba0891ec3562e822626a335b02..ea623d2cf24f26ce32e8d1fd309ac747e469096e 100644 --- a/crates/language_models/src/provider/deepseek.rs +++ b/crates/language_models/src/provider/deepseek.rs @@ -3,7 +3,7 @@ use collections::{BTreeMap, HashMap}; use deepseek::DEEPSEEK_API_URL; use futures::Stream; -use futures::{FutureExt, StreamExt, future, future::BoxFuture, stream::BoxStream}; +use futures::{FutureExt, StreamExt, future::BoxFuture, stream::BoxStream}; use gpui::{AnyView, App, AsyncApp, Context, Entity, SharedString, Task, Window}; use http_client::HttpClient; use language_model::{ @@ -203,12 +203,10 @@ impl DeepSeekLanguageModel { ) -> BoxFuture<'static, Result>>> { let http_client = self.http_client.clone(); - let Ok((api_key, api_url)) = self.state.read_with(cx, |state, cx| { + let (api_key, api_url) = self.state.read_with(cx, |state, cx| { let api_url = DeepSeekLanguageModelProvider::api_url(cx); (state.api_key_state.key(&api_url), api_url) - }) else { - return future::ready(Err(anyhow!("App state dropped"))).boxed(); - }; + }); let future = self.request_limiter.stream(async move { let Some(api_key) = api_key else { @@ -540,10 +538,7 @@ impl ConfigurationView { let load_credentials_task = Some(cx.spawn({ let state = state.clone(); async move |this, cx| { - if let Some(task) = state - .update(cx, |state, cx| state.authenticate(cx)) - .log_err() - { + if let Some(task) = Some(state.update(cx, |state, cx| state.authenticate(cx))) { let _ = task.await; } @@ -571,7 +566,7 @@ impl ConfigurationView { let state = self.state.clone(); cx.spawn(async move |_, cx| { state - .update(cx, |state, cx| state.set_api_key(Some(api_key), cx))? + .update(cx, |state, cx| state.set_api_key(Some(api_key), cx)) .await }) .detach_and_log_err(cx); @@ -584,7 +579,7 @@ impl ConfigurationView { let state = self.state.clone(); cx.spawn(async move |_, cx| { state - .update(cx, |state, cx| state.set_api_key(None, cx))? + .update(cx, |state, cx| state.set_api_key(None, cx)) .await }) .detach_and_log_err(cx); diff --git a/crates/language_models/src/provider/google.rs b/crates/language_models/src/provider/google.rs index 8b3babedcd8360042a24b6b4414169798648b7b6..2f5b3b3701d51e4f4faadae0f8ef83f8bf6b5b2f 100644 --- a/crates/language_models/src/provider/google.rs +++ b/crates/language_models/src/provider/google.rs @@ -1,7 +1,7 @@ -use anyhow::{Context as _, Result, anyhow}; +use anyhow::{Context as _, Result}; use collections::BTreeMap; use credentials_provider::CredentialsProvider; -use futures::{FutureExt, Stream, StreamExt, future, future::BoxFuture}; +use futures::{FutureExt, Stream, StreamExt, future::BoxFuture}; use google_ai::{ FunctionDeclaration, GenerateContentResponse, GoogleModelMode, Part, SystemInstruction, ThinkingConfig, UsageMetadata, @@ -256,12 +256,10 @@ impl GoogleLanguageModel { > { let http_client = self.http_client.clone(); - let Ok((api_key, api_url)) = self.state.read_with(cx, |state, cx| { + let (api_key, api_url) = self.state.read_with(cx, |state, cx| { let api_url = GoogleLanguageModelProvider::api_url(cx); (state.api_key_state.key(&api_url), api_url) - }) else { - return future::ready(Err(anyhow!("App state dropped"))).boxed(); - }; + }); async move { let api_key = api_key.context("Missing Google API key")?; @@ -771,10 +769,7 @@ impl ConfigurationView { let load_credentials_task = Some(cx.spawn_in(window, { let state = state.clone(); async move |this, cx| { - if let Some(task) = state - .update(cx, |state, cx| state.authenticate(cx)) - .log_err() - { + if let Some(task) = Some(state.update(cx, |state, cx| state.authenticate(cx))) { // We don't log an error, because "not signed in" is also an error. let _ = task.await; } @@ -807,7 +802,7 @@ impl ConfigurationView { let state = self.state.clone(); cx.spawn_in(window, async move |_, cx| { state - .update(cx, |state, cx| state.set_api_key(Some(api_key), cx))? + .update(cx, |state, cx| state.set_api_key(Some(api_key), cx)) .await }) .detach_and_log_err(cx); @@ -820,7 +815,7 @@ impl ConfigurationView { let state = self.state.clone(); cx.spawn_in(window, async move |_, cx| { state - .update(cx, |state, cx| state.set_api_key(None, cx))? + .update(cx, |state, cx| state.set_api_key(None, cx)) .await }) .detach_and_log_err(cx); diff --git a/crates/language_models/src/provider/lmstudio.rs b/crates/language_models/src/provider/lmstudio.rs index 237b64ac7d0ed728b057f6b553ad2a2a1ebae1db..3d4a6f6f86ea0e92f1ec1acb1db8fdd7b203f64c 100644 --- a/crates/language_models/src/provider/lmstudio.rs +++ b/crates/language_models/src/provider/lmstudio.rs @@ -376,12 +376,10 @@ impl LmStudioLanguageModel { Result>>, > { let http_client = self.http_client.clone(); - let Ok(api_url) = cx.update(|cx| { + let api_url = cx.update(|cx| { let settings = &AllLanguageModelSettings::get_global(cx).lmstudio; settings.api_url.clone() - }) else { - return futures::future::ready(Err(anyhow!("App state dropped"))).boxed(); - }; + }); let future = self.request_limiter.stream(async move { let request = lmstudio::stream_chat_completion(http_client.as_ref(), &api_url, request); @@ -644,10 +642,7 @@ impl ConfigurationView { let loading_models_task = Some(cx.spawn({ let state = state.clone(); async move |this, cx| { - if let Some(task) = state - .update(cx, |state, cx| state.authenticate(cx)) - .log_err() - { + if let Some(task) = Some(state.update(cx, |state, cx| state.authenticate(cx))) { task.await.log_err(); } this.update(cx, |this, cx| { diff --git a/crates/language_models/src/provider/mistral.rs b/crates/language_models/src/provider/mistral.rs index eb8d56b5df01dba7cf35732ae45a8dc586549045..cb4f2bf63fb18ffeb730201739f5168af4df7dd9 100644 --- a/crates/language_models/src/provider/mistral.rs +++ b/crates/language_models/src/provider/mistral.rs @@ -1,7 +1,7 @@ use anyhow::{Result, anyhow}; use collections::BTreeMap; -use futures::{FutureExt, Stream, StreamExt, future, future::BoxFuture, stream::BoxStream}; +use futures::{FutureExt, Stream, StreamExt, future::BoxFuture, stream::BoxStream}; use gpui::{AnyView, App, AsyncApp, Context, Entity, Global, SharedString, Task, Window}; use http_client::HttpClient; use language_model::{ @@ -272,12 +272,10 @@ impl MistralLanguageModel { > { let http_client = self.http_client.clone(); - let Ok((api_key, api_url)) = self.state.read_with(cx, |state, cx| { + let (api_key, api_url) = self.state.read_with(cx, |state, cx| { let api_url = MistralLanguageModelProvider::api_url(cx); (state.api_key_state.key(&api_url), api_url) - }) else { - return future::ready(Err(anyhow!("App state dropped"))).boxed(); - }; + }); let future = self.request_limiter.stream(async move { let Some(api_key) = api_key else { @@ -754,10 +752,7 @@ impl ConfigurationView { let load_credentials_task = Some(cx.spawn_in(window, { let state = state.clone(); async move |this, cx| { - if let Some(task) = state - .update(cx, |state, cx| state.authenticate(cx)) - .log_err() - { + if let Some(task) = Some(state.update(cx, |state, cx| state.authenticate(cx))) { // We don't log an error, because "not signed in" is also an error. let _ = task.await; } @@ -790,7 +785,7 @@ impl ConfigurationView { let state = self.state.clone(); cx.spawn_in(window, async move |_, cx| { state - .update(cx, |state, cx| state.set_api_key(Some(api_key), cx))? + .update(cx, |state, cx| state.set_api_key(Some(api_key), cx)) .await }) .detach_and_log_err(cx); @@ -803,7 +798,7 @@ impl ConfigurationView { let state = self.state.clone(); cx.spawn_in(window, async move |_, cx| { state - .update(cx, |state, cx| state.set_api_key(None, cx))? + .update(cx, |state, cx| state.set_api_key(None, cx)) .await }) .detach_and_log_err(cx); diff --git a/crates/language_models/src/provider/ollama.rs b/crates/language_models/src/provider/ollama.rs index 3c35841d83bcc37918debd1d365e2f6a00ff0476..ec8517ef1b250058155ac53c60406325b35ef86a 100644 --- a/crates/language_models/src/provider/ollama.rs +++ b/crates/language_models/src/provider/ollama.rs @@ -479,12 +479,10 @@ impl LanguageModel for OllamaLanguageModel { let request = self.to_ollama_request(request); let http_client = self.http_client.clone(); - let Ok((api_key, api_url)) = self.state.read_with(cx, |state, cx| { + let (api_key, api_url) = self.state.read_with(cx, |state, cx| { let api_url = OllamaLanguageModelProvider::api_url(cx); (state.api_key_state.key(&api_url), api_url) - }) else { - return futures::future::ready(Err(anyhow!("App state dropped").into())).boxed(); - }; + }); let future = self.request_limiter.stream(async move { let stream = @@ -645,7 +643,7 @@ impl ConfigurationView { let state = self.state.clone(); cx.spawn_in(window, async move |_, cx| { state - .update(cx, |state, cx| state.set_api_key(Some(api_key), cx))? + .update(cx, |state, cx| state.set_api_key(Some(api_key), cx)) .await }) .detach_and_log_err(cx); @@ -658,7 +656,7 @@ impl ConfigurationView { let state = self.state.clone(); cx.spawn_in(window, async move |_, cx| { state - .update(cx, |state, cx| state.set_api_key(None, cx))? + .update(cx, |state, cx| state.set_api_key(None, cx)) .await }) .detach_and_log_err(cx); diff --git a/crates/language_models/src/provider/open_ai.rs b/crates/language_models/src/provider/open_ai.rs index abd7d7ee36b807c4ff2844f610a8fe60ee464519..04248ffc3c7a465d04bca0ffa11ca53e39a24880 100644 --- a/crates/language_models/src/provider/open_ai.rs +++ b/crates/language_models/src/provider/open_ai.rs @@ -1,7 +1,7 @@ use anyhow::{Result, anyhow}; use collections::{BTreeMap, HashMap}; use futures::Stream; -use futures::{FutureExt, StreamExt, future, future::BoxFuture}; +use futures::{FutureExt, StreamExt, future::BoxFuture}; use gpui::{AnyView, App, AsyncApp, Context, Entity, SharedString, Task, Window}; use http_client::HttpClient; use language_model::{ @@ -215,12 +215,10 @@ impl OpenAiLanguageModel { { let http_client = self.http_client.clone(); - let Ok((api_key, api_url)) = self.state.read_with(cx, |state, cx| { + let (api_key, api_url) = self.state.read_with(cx, |state, cx| { let api_url = OpenAiLanguageModelProvider::api_url(cx); (state.api_key_state.key(&api_url), api_url) - }) else { - return future::ready(Err(anyhow!("App state dropped"))).boxed(); - }; + }); let future = self.request_limiter.stream(async move { let provider = PROVIDER_NAME; @@ -249,12 +247,10 @@ impl OpenAiLanguageModel { { let http_client = self.http_client.clone(); - let Ok((api_key, api_url)) = self.state.read_with(cx, |state, cx| { + let (api_key, api_url) = self.state.read_with(cx, |state, cx| { let api_url = OpenAiLanguageModelProvider::api_url(cx); (state.api_key_state.key(&api_url), api_url) - }) else { - return future::ready(Err(anyhow!("App state dropped"))).boxed(); - }; + }); let provider = PROVIDER_NAME; let future = self.request_limiter.stream(async move { @@ -1206,10 +1202,7 @@ impl ConfigurationView { let load_credentials_task = Some(cx.spawn_in(window, { let state = state.clone(); async move |this, cx| { - if let Some(task) = state - .update(cx, |state, cx| state.authenticate(cx)) - .log_err() - { + if let Some(task) = Some(state.update(cx, |state, cx| state.authenticate(cx))) { // We don't log an error, because "not signed in" is also an error. let _ = task.await; } @@ -1241,7 +1234,7 @@ impl ConfigurationView { let state = self.state.clone(); cx.spawn_in(window, async move |_, cx| { state - .update(cx, |state, cx| state.set_api_key(Some(api_key), cx))? + .update(cx, |state, cx| state.set_api_key(Some(api_key), cx)) .await }) .detach_and_log_err(cx); @@ -1254,7 +1247,7 @@ impl ConfigurationView { let state = self.state.clone(); cx.spawn_in(window, async move |_, cx| { state - .update(cx, |state, cx| state.set_api_key(None, cx))? + .update(cx, |state, cx| state.set_api_key(None, cx)) .await }) .detach_and_log_err(cx); diff --git a/crates/language_models/src/provider/open_ai_compatible.rs b/crates/language_models/src/provider/open_ai_compatible.rs index dc512356d83dc8b3b601891b3116d249c70bc785..2bc6b0dc284ea66960c9557d3e1253a4e59afd62 100644 --- a/crates/language_models/src/provider/open_ai_compatible.rs +++ b/crates/language_models/src/provider/open_ai_compatible.rs @@ -1,6 +1,6 @@ -use anyhow::{Result, anyhow}; +use anyhow::Result; use convert_case::{Case, Casing}; -use futures::{FutureExt, StreamExt, future, future::BoxFuture}; +use futures::{FutureExt, StreamExt, future::BoxFuture}; use gpui::{AnyView, App, AsyncApp, Context, Entity, SharedString, Task, Window}; use http_client::HttpClient; use language_model::{ @@ -214,15 +214,13 @@ impl OpenAiCompatibleLanguageModel { > { let http_client = self.http_client.clone(); - let Ok((api_key, api_url)) = self.state.read_with(cx, |state, _cx| { + let (api_key, api_url) = self.state.read_with(cx, |state, _cx| { let api_url = &state.settings.api_url; ( state.api_key_state.key(api_url), state.settings.api_url.clone(), ) - }) else { - return future::ready(Err(anyhow!("App state dropped").into())).boxed(); - }; + }); let provider = self.provider_name.clone(); let future = self.request_limiter.stream(async move { @@ -251,15 +249,13 @@ impl OpenAiCompatibleLanguageModel { { let http_client = self.http_client.clone(); - let Ok((api_key, api_url)) = self.state.read_with(cx, |state, _cx| { + let (api_key, api_url) = self.state.read_with(cx, |state, _cx| { let api_url = &state.settings.api_url; ( state.api_key_state.key(api_url), state.settings.api_url.clone(), ) - }) else { - return future::ready(Err(anyhow!("App state dropped"))).boxed(); - }; + }); let provider = self.provider_name.clone(); let future = self.request_limiter.stream(async move { @@ -428,10 +424,7 @@ impl ConfigurationView { let load_credentials_task = Some(cx.spawn_in(window, { let state = state.clone(); async move |this, cx| { - if let Some(task) = state - .update(cx, |state, cx| state.authenticate(cx)) - .log_err() - { + if let Some(task) = Some(state.update(cx, |state, cx| state.authenticate(cx))) { // We don't log an error, because "not signed in" is also an error. let _ = task.await; } @@ -463,7 +456,7 @@ impl ConfigurationView { let state = self.state.clone(); cx.spawn_in(window, async move |_, cx| { state - .update(cx, |state, cx| state.set_api_key(Some(api_key), cx))? + .update(cx, |state, cx| state.set_api_key(Some(api_key), cx)) .await }) .detach_and_log_err(cx); @@ -476,7 +469,7 @@ impl ConfigurationView { let state = self.state.clone(); cx.spawn_in(window, async move |_, cx| { state - .update(cx, |state, cx| state.set_api_key(None, cx))? + .update(cx, |state, cx| state.set_api_key(None, cx)) .await }) .detach_and_log_err(cx); diff --git a/crates/language_models/src/provider/open_router.rs b/crates/language_models/src/provider/open_router.rs index 9d36c7791cbd0332bfcb97b07fece2a8a6ae3b18..273b45ea23f76936a41584c9c58cd3c73c5c4967 100644 --- a/crates/language_models/src/provider/open_router.rs +++ b/crates/language_models/src/provider/open_router.rs @@ -1,6 +1,6 @@ use anyhow::{Result, anyhow}; use collections::HashMap; -use futures::{FutureExt, Stream, StreamExt, future, future::BoxFuture}; +use futures::{FutureExt, Stream, StreamExt, future::BoxFuture}; use gpui::{AnyView, App, AsyncApp, Context, Entity, SharedString, Task}; use http_client::HttpClient; use language_model::{ @@ -273,12 +273,10 @@ impl OpenRouterLanguageModel { >, > { let http_client = self.http_client.clone(); - let Ok((api_key, api_url)) = self.state.read_with(cx, |state, cx| { + let (api_key, api_url) = self.state.read_with(cx, |state, cx| { let api_url = OpenRouterLanguageModelProvider::api_url(cx); (state.api_key_state.key(&api_url), api_url) - }) else { - return future::ready(Err(anyhow!("App state dropped").into())).boxed(); - }; + }); async move { let Some(api_key) = api_key else { @@ -752,10 +750,7 @@ impl ConfigurationView { let load_credentials_task = Some(cx.spawn_in(window, { let state = state.clone(); async move |this, cx| { - if let Some(task) = state - .update(cx, |state, cx| state.authenticate(cx)) - .log_err() - { + if let Some(task) = Some(state.update(cx, |state, cx| state.authenticate(cx))) { let _ = task.await; } @@ -787,7 +782,7 @@ impl ConfigurationView { let state = self.state.clone(); cx.spawn_in(window, async move |_, cx| { state - .update(cx, |state, cx| state.set_api_key(Some(api_key), cx))? + .update(cx, |state, cx| state.set_api_key(Some(api_key), cx)) .await }) .detach_and_log_err(cx); @@ -800,7 +795,7 @@ impl ConfigurationView { let state = self.state.clone(); cx.spawn_in(window, async move |_, cx| { state - .update(cx, |state, cx| state.set_api_key(None, cx))? + .update(cx, |state, cx| state.set_api_key(None, cx)) .await }) .detach_and_log_err(cx); diff --git a/crates/language_models/src/provider/vercel.rs b/crates/language_models/src/provider/vercel.rs index 66ac0991c6f9bec44afa486ab7b2531ddd4eee54..3b324e46927f5864d83a5e4b74c46f5e39e8ab3a 100644 --- a/crates/language_models/src/provider/vercel.rs +++ b/crates/language_models/src/provider/vercel.rs @@ -1,6 +1,6 @@ -use anyhow::{Result, anyhow}; +use anyhow::Result; use collections::BTreeMap; -use futures::{FutureExt, StreamExt, future, future::BoxFuture}; +use futures::{FutureExt, StreamExt, future::BoxFuture}; use gpui::{AnyView, App, AsyncApp, Context, Entity, SharedString, Task, Window}; use http_client::HttpClient; use language_model::{ @@ -198,12 +198,10 @@ impl VercelLanguageModel { { let http_client = self.http_client.clone(); - let Ok((api_key, api_url)) = self.state.read_with(cx, |state, cx| { + let (api_key, api_url) = self.state.read_with(cx, |state, cx| { let api_url = VercelLanguageModelProvider::api_url(cx); (state.api_key_state.key(&api_url), api_url) - }) else { - return future::ready(Err(anyhow!("App state dropped"))).boxed(); - }; + }); let future = self.request_limiter.stream(async move { let provider = PROVIDER_NAME; @@ -379,10 +377,7 @@ impl ConfigurationView { let load_credentials_task = Some(cx.spawn_in(window, { let state = state.clone(); async move |this, cx| { - if let Some(task) = state - .update(cx, |state, cx| state.authenticate(cx)) - .log_err() - { + if let Some(task) = Some(state.update(cx, |state, cx| state.authenticate(cx))) { // We don't log an error, because "not signed in" is also an error. let _ = task.await; } @@ -414,7 +409,7 @@ impl ConfigurationView { let state = self.state.clone(); cx.spawn_in(window, async move |_, cx| { state - .update(cx, |state, cx| state.set_api_key(Some(api_key), cx))? + .update(cx, |state, cx| state.set_api_key(Some(api_key), cx)) .await }) .detach_and_log_err(cx); @@ -427,7 +422,7 @@ impl ConfigurationView { let state = self.state.clone(); cx.spawn_in(window, async move |_, cx| { state - .update(cx, |state, cx| state.set_api_key(None, cx))? + .update(cx, |state, cx| state.set_api_key(None, cx)) .await }) .detach_and_log_err(cx); diff --git a/crates/language_models/src/provider/x_ai.rs b/crates/language_models/src/provider/x_ai.rs index 1da0bd4a06cba9ce956498747428db85eb9e8b60..06564224dea9621d594e5cf3f4a84093f1620446 100644 --- a/crates/language_models/src/provider/x_ai.rs +++ b/crates/language_models/src/provider/x_ai.rs @@ -1,6 +1,6 @@ -use anyhow::{Result, anyhow}; +use anyhow::Result; use collections::BTreeMap; -use futures::{FutureExt, StreamExt, future, future::BoxFuture}; +use futures::{FutureExt, StreamExt, future::BoxFuture}; use gpui::{AnyView, App, AsyncApp, Context, Entity, Task, Window}; use http_client::HttpClient; use language_model::{ @@ -207,12 +207,10 @@ impl XAiLanguageModel { > { let http_client = self.http_client.clone(); - let Ok((api_key, api_url)) = self.state.read_with(cx, |state, cx| { + let (api_key, api_url) = self.state.read_with(cx, |state, cx| { let api_url = XAiLanguageModelProvider::api_url(cx); (state.api_key_state.key(&api_url), api_url) - }) else { - return future::ready(Err(anyhow!("App state dropped").into())).boxed(); - }; + }); let future = self.request_limiter.stream(async move { let provider = PROVIDER_NAME; @@ -382,10 +380,7 @@ impl ConfigurationView { let load_credentials_task = Some(cx.spawn_in(window, { let state = state.clone(); async move |this, cx| { - if let Some(task) = state - .update(cx, |state, cx| state.authenticate(cx)) - .log_err() - { + if let Some(task) = Some(state.update(cx, |state, cx| state.authenticate(cx))) { // We don't log an error, because "not signed in" is also an error. let _ = task.await; } @@ -417,7 +412,7 @@ impl ConfigurationView { let state = self.state.clone(); cx.spawn_in(window, async move |_, cx| { state - .update(cx, |state, cx| state.set_api_key(Some(api_key), cx))? + .update(cx, |state, cx| state.set_api_key(Some(api_key), cx)) .await }) .detach_and_log_err(cx); @@ -430,7 +425,7 @@ impl ConfigurationView { let state = self.state.clone(); cx.spawn_in(window, async move |_, cx| { state - .update(cx, |state, cx| state.set_api_key(None, cx))? + .update(cx, |state, cx| state.set_api_key(None, cx)) .await }) .detach_and_log_err(cx); diff --git a/crates/language_selector/src/language_selector.rs b/crates/language_selector/src/language_selector.rs index 991cce50baf82b2604e510a0eeb2eac4af1578dd..f826c429134073668bd0ab7d6f93567b42f6e62b 100644 --- a/crates/language_selector/src/language_selector.rs +++ b/crates/language_selector/src/language_selector.rs @@ -205,7 +205,8 @@ impl PickerDelegate for LanguageSelectorDelegate { let buffer = buffer.upgrade().context("buffer was dropped")?; project.update(cx, |project, cx| { project.set_language_for_buffer(&buffer, language, cx); - }) + }); + anyhow::Ok(()) }) .detach_and_log_err(cx); } diff --git a/crates/language_tools/src/lsp_button.rs b/crates/language_tools/src/lsp_button.rs index de27c34ce9c6e840758b884ab0c05e04dfa8c1e5..603818640d3af38edd0679743c2b5cf2fb3ebec0 100644 --- a/crates/language_tools/src/lsp_button.rs +++ b/crates/language_tools/src/lsp_button.rs @@ -334,7 +334,7 @@ impl LanguageServerState { cx, ); buffer.set_capability(language::Capability::ReadOnly, cx); - })?; + }); workspace.update(cx, |workspace, cx| { window_handle.update(cx, |_, window, cx| { diff --git a/crates/language_tools/src/lsp_log_view.rs b/crates/language_tools/src/lsp_log_view.rs index 2b2575912ae4543d2bf3cbd0c6b667ace7c82e91..43212cd63818ead409b180babfe1ebda2359001f 100644 --- a/crates/language_tools/src/lsp_log_view.rs +++ b/crates/language_tools/src/lsp_log_view.rs @@ -569,10 +569,10 @@ impl LspLogView { let language = language.await.ok(); buffer.update(cx, |buffer, cx| { buffer.set_language(language, cx); - }) + }); } }) - .detach_and_log_err(cx); + .detach(); }); self.editor = editor; diff --git a/crates/languages/src/css.rs b/crates/languages/src/css.rs index ca6bbd827e1c58beb13244d61e69d5c14a29c89d..c5c89a0c66431380cf9f500a23b74a19230f3046 100644 --- a/crates/languages/src/css.rs +++ b/crates/languages/src/css.rs @@ -162,7 +162,7 @@ impl LspAdapter for CssLspAdapter { let project_options = cx.update(|cx| { language_server_settings(delegate.as_ref(), &self.name(), cx) .and_then(|s| s.settings.clone()) - })?; + }); if let Some(override_options) = project_options { merge_json_value_into(override_options, &mut default_config); diff --git a/crates/languages/src/eslint.rs b/crates/languages/src/eslint.rs index fd4133d7ebcafc2553e25c876eb9fb1c6257ebc1..943034652de852b2c39b4887218c3c8e28f329e1 100644 --- a/crates/languages/src/eslint.rs +++ b/crates/languages/src/eslint.rs @@ -237,7 +237,7 @@ impl LspAdapter for EsLintLspAdapter { cx, ) .and_then(|s| s.settings.clone()) - })?; + }); if let Some(override_options) = override_options { let working_directories = override_options.get("workingDirectories").and_then(|wd| { diff --git a/crates/languages/src/go.rs b/crates/languages/src/go.rs index 130e142076b8c6ec0393e4f0d617c3a522b2ef22..ed6b456b1c74d0ef1e0611e0017d240a0158a4f0 100644 --- a/crates/languages/src/go.rs +++ b/crates/languages/src/go.rs @@ -71,7 +71,7 @@ impl LspInstaller for GoLspAdapter { { cx.update(|cx| { delegate.show_notification(NOTIFICATION_MESSAGE, cx); - })? + }); } anyhow::bail!( "Could not install the Go language server `gopls`, because `go` was not found." diff --git a/crates/languages/src/json.rs b/crates/languages/src/json.rs index 5e0f4907ef09973ad5d7b4f67c19ced1f1ddf05e..aaddfcca1b71d1348ac031081f11ed184c4819c0 100644 --- a/crates/languages/src/json.rs +++ b/crates/languages/src/json.rs @@ -58,10 +58,9 @@ impl ContextProvider for JsonTaskProvider { let contents = file .worktree .update(cx, |this, cx| this.load_file(&file.path, cx)) - .ok()? .await .ok()?; - let path = cx.update(|cx| file.abs_path(cx)).ok()?.as_path().into(); + let path = cx.update(|cx| file.abs_path(cx)).as_path().into(); let task_templates = if is_package_json { let package_json = serde_json_lenient::from_str::< @@ -273,11 +272,11 @@ impl LspAdapter for JsonLspAdapter { "schemas": schemas } }) - })?; + }); let project_options = cx.update(|cx| { language_server_settings(delegate.as_ref(), &self.name(), cx) .and_then(|s| s.settings.clone()) - })?; + }); if let Some(override_options) = project_options { merge_json_value_into(override_options, &mut config); diff --git a/crates/languages/src/lib.rs b/crates/languages/src/lib.rs index f7e100ed750e743bf69fe379acd13bfce98b7484..7e7b83b3cb5ce82c614d9ba7cd9faba6d2f3a17b 100644 --- a/crates/languages/src/lib.rs +++ b/crates/languages/src/lib.rs @@ -326,7 +326,7 @@ pub fn init(languages: Arc, fs: Arc, node: NodeRuntime ) .log_err(); }); - })?; + }); prev_language_settings = language_settings; } } diff --git a/crates/languages/src/python.rs b/crates/languages/src/python.rs index 7ea5821f539b20e8fd341489f4e4cee21f40b53c..2ec6bef8f2893aa3d42d64d7c09c7e9cb18682c4 100644 --- a/crates/languages/src/python.rs +++ b/crates/languages/src/python.rs @@ -249,7 +249,7 @@ impl LspAdapter for TyLspAdapter { .update(|cx| { language_server_settings(delegate.as_ref(), &self.name(), cx) .and_then(|s| s.settings.clone()) - })? + }) .unwrap_or_else(|| json!({})); if let Some(toolchain) = toolchain.and_then(|toolchain| { serde_json::from_value::(toolchain.as_json).ok() @@ -574,7 +574,7 @@ impl LspAdapter for PyrightLspAdapter { _: Option, cx: &mut AsyncApp, ) -> Result { - cx.update(move |cx| { + Ok(cx.update(move |cx| { let mut user_settings = language_server_settings(adapter.as_ref(), &Self::SERVER_NAME, cx) .and_then(|s| s.settings.clone()) @@ -636,7 +636,7 @@ impl LspAdapter for PyrightLspAdapter { } user_settings - }) + })) } } @@ -1703,7 +1703,7 @@ impl LspAdapter for PyLspAdapter { _: Option, cx: &mut AsyncApp, ) -> Result { - cx.update(move |cx| { + Ok(cx.update(move |cx| { let mut user_settings = language_server_settings(adapter.as_ref(), &Self::SERVER_NAME, cx) .and_then(|s| s.settings.clone()) @@ -1761,7 +1761,7 @@ impl LspAdapter for PyLspAdapter { )])); user_settings - }) + })) } } @@ -1995,7 +1995,7 @@ impl LspAdapter for BasedPyrightLspAdapter { _: Option, cx: &mut AsyncApp, ) -> Result { - cx.update(move |cx| { + Ok(cx.update(move |cx| { let mut user_settings = language_server_settings(adapter.as_ref(), &Self::SERVER_NAME, cx) .and_then(|s| s.settings.clone()) @@ -2070,7 +2070,7 @@ impl LspAdapter for BasedPyrightLspAdapter { } user_settings - }) + })) } } diff --git a/crates/languages/src/tailwind.rs b/crates/languages/src/tailwind.rs index b4b6f76cec28d5d21c31ea67aa72ead6814eae7d..72e4684ce0a0242e5381c118a9748e3d9718341d 100644 --- a/crates/languages/src/tailwind.rs +++ b/crates/languages/src/tailwind.rs @@ -156,7 +156,7 @@ impl LspAdapter for TailwindLspAdapter { language_server_settings(delegate.as_ref(), &Self::SERVER_NAME, cx) .and_then(|s| s.settings.clone()) .unwrap_or_default() - })?; + }); if tailwind_user_settings.get("emmetCompletions").is_none() { tailwind_user_settings["emmetCompletions"] = Value::Bool(true); diff --git a/crates/languages/src/tailwindcss.rs b/crates/languages/src/tailwindcss.rs index 0ea3c45ffaff3969d6a7145ceaa178dc4f4a990f..016c2956591a5140ab4b2d8313711382fee47d30 100644 --- a/crates/languages/src/tailwindcss.rs +++ b/crates/languages/src/tailwindcss.rs @@ -163,7 +163,7 @@ impl LspAdapter for TailwindCssLspAdapter { let project_options = cx.update(|cx| { language_server_settings(delegate.as_ref(), &self.name(), cx) .and_then(|s| s.settings.clone()) - })?; + }); if let Some(override_options) = project_options { merge_json_value_into(override_options, &mut default_config); diff --git a/crates/languages/src/typescript.rs b/crates/languages/src/typescript.rs index 9822ab6f3fc9bcce73e854ae4958f43e23006b39..2b2fb19c629f85c6b51eba64d154b43e716f6827 100644 --- a/crates/languages/src/typescript.rs +++ b/crates/languages/src/typescript.rs @@ -835,7 +835,7 @@ impl LspAdapter for TypeScriptLspAdapter { let override_options = cx.update(|cx| { language_server_settings(delegate.as_ref(), &Self::SERVER_NAME, cx) .and_then(|s| s.settings.clone()) - })?; + }); if let Some(options) = override_options { return Ok(options); } diff --git a/crates/languages/src/vtsls.rs b/crates/languages/src/vtsls.rs index 7106929c4ad3845d9aca06e0c5206a5d1de9b02c..7ed170daa39135f14b084bc4b1535f272c325d47 100644 --- a/crates/languages/src/vtsls.rs +++ b/crates/languages/src/vtsls.rs @@ -291,7 +291,7 @@ impl LspAdapter for VtslsLspAdapter { let override_options = cx.update(|cx| { language_server_settings(delegate.as_ref(), &SERVER_NAME, cx) .and_then(|s| s.settings.clone()) - })?; + }); if let Some(override_options) = override_options { merge_json_value_into(override_options, &mut default_workspace_configuration) diff --git a/crates/languages/src/yaml.rs b/crates/languages/src/yaml.rs index 6c1d8bc2d9e74578868dc687ec76a3b95790c5a9..64d110bc4475474642d97a0c0c43de6495978ff0 100644 --- a/crates/languages/src/yaml.rs +++ b/crates/languages/src/yaml.rs @@ -147,7 +147,7 @@ impl LspAdapter for YamlLspAdapter { AllLanguageSettings::get(Some(location), cx) .language(Some(location), Some(&"YAML".into()), cx) .tab_size - })?; + }); let mut options = serde_json::json!({ "[yaml]": {"editor.tabSize": tab_size}, @@ -157,7 +157,7 @@ impl LspAdapter for YamlLspAdapter { let project_options = cx.update(|cx| { language_server_settings(delegate.as_ref(), &Self::SERVER_NAME, cx) .and_then(|s| s.settings.clone()) - })?; + }); if let Some(override_options) = project_options { merge_json_value_into(override_options, &mut options); } diff --git a/crates/livekit_client/examples/test_app.rs b/crates/livekit_client/examples/test_app.rs index a4d815aa9be6a84df95083ae979691c109a668cb..2366d0f8a6cae3be9ed6a6a68b7b8372c74db264 100644 --- a/crates/livekit_client/examples/test_app.rs +++ b/crates/livekit_client/examples/test_app.rs @@ -144,7 +144,6 @@ impl LivekitWindow { ) .unwrap() }) - .unwrap() } fn handle_room_event(&mut self, event: RoomEvent, window: &mut Window, cx: &mut Context) { diff --git a/crates/livekit_client/src/livekit_client.rs b/crates/livekit_client/src/livekit_client.rs index 5d31f802c81678478fdb907c479e5cb63cba0487..6fc1d3415a493e7e1989472616015916a82cf818 100644 --- a/crates/livekit_client/src/livekit_client.rs +++ b/crates/livekit_client/src/livekit_client.rs @@ -60,7 +60,7 @@ impl Room { config.connector = Some(connector); let (room, mut events) = Tokio::spawn(cx, async move { livekit::Room::connect(&url, &token, config).await - })? + }) .await??; let (mut tx, rx) = mpsc::unbounded(); @@ -189,7 +189,7 @@ impl LocalParticipant { let participant = self.0.clone(); Tokio::spawn(cx, async move { participant.publish_track(track, options).await - })? + }) .await? .map(LocalTrackPublication) .context("publishing a track") @@ -201,7 +201,7 @@ impl LocalParticipant { cx: &mut AsyncApp, ) -> Result { let participant = self.0.clone(); - Tokio::spawn(cx, async move { participant.unpublish_track(&sid).await })? + Tokio::spawn(cx, async move { participant.unpublish_track(&sid).await }) .await? .map(LocalTrackPublication) .context("unpublishing a track") diff --git a/crates/livekit_client/src/livekit_client/playback.rs b/crates/livekit_client/src/livekit_client/playback.rs index 1956f480cfa17038884a635bc37c0cacda1b24ec..b9a1f66772434f640455dcff70f1d5cf5f56c6f3 100644 --- a/crates/livekit_client/src/livekit_client/playback.rs +++ b/crates/livekit_client/src/livekit_client/playback.rs @@ -478,7 +478,7 @@ pub(crate) async fn capture_local_video_track( width: metadata.resolution.width.0 as u32, height: metadata.resolution.height.0 as u32, }) - })? + }) .await?; let capture_stream = capture_source diff --git a/crates/notifications/src/notification_store.rs b/crates/notifications/src/notification_store.rs index 7cae74a7293694ebedd603ded656af00201c7366..f30a018811db822db49c9f05a2e5be3096ec0229 100644 --- a/crates/notifications/src/notification_store.rs +++ b/crates/notifications/src/notification_store.rs @@ -79,16 +79,13 @@ impl NotificationStore { let this = this.upgrade()?; match status { client::Status::Connected { .. } => { - if let Some(task) = this - .update(cx, |this, cx| this.handle_connect(cx)) - .log_err()? - { + if let Some(task) = this.update(cx, |this, cx| this.handle_connect(cx)) { task.await.log_err()?; } } - _ => this - .update(cx, |this, cx| this.handle_disconnect(cx)) - .log_err()?, + _ => { + this.update(cx, |this, cx| this.handle_disconnect(cx)); + } } } Some(()) @@ -161,7 +158,7 @@ impl NotificationStore { .context("Notification store was dropped while loading notifications")?; let response = request.await?; - this.update(cx, |this, _| this.loaded_all_notifications = response.done)?; + this.update(cx, |this, _| this.loaded_all_notifications = response.done); Self::add_notifications( this, response.notifications, @@ -212,8 +209,8 @@ impl NotificationStore { ) -> Result<()> { this.update(&mut cx, |this, cx| { this.splice_notifications([(envelope.payload.notification_id, None)], false, cx); - Ok(()) - })? + }); + Ok(()) } async fn add_notifications( @@ -259,10 +256,10 @@ impl NotificationStore { } } - let user_store = this.read_with(cx, |this, _| this.user_store.clone())?; + let user_store = this.read_with(cx, |this, _| this.user_store.clone()); user_store - .update(cx, |store, cx| store.get_users(user_ids, cx))? + .update(cx, |store, cx| store.get_users(user_ids, cx)) .await?; this.update(cx, |this, cx| { if options.clear_old { @@ -285,8 +282,7 @@ impl NotificationStore { options.is_new, cx, ); - }) - .log_err(); + }); Ok(()) } diff --git a/crates/outline_panel/src/outline_panel.rs b/crates/outline_panel/src/outline_panel.rs index b43456264ca1c89014f154b07f59b4ddd44d6d39..d4ef31200542cfeb07a05c6c3705d5d4a1d10cbd 100644 --- a/crates/outline_panel/src/outline_panel.rs +++ b/crates/outline_panel/src/outline_panel.rs @@ -2084,7 +2084,7 @@ impl OutlinePanel { let entry = worktree.read(cx).entry_for_id(entry_id)?.clone(); Some((worktree, entry)) }) - })?, + }), PanelEntry::Outline(outline_entry) => { let (buffer_id, excerpt_id) = outline_entry.ids(); outline_panel.update(cx, |outline_panel, cx| { diff --git a/crates/prettier/src/prettier.rs b/crates/prettier/src/prettier.rs index bc4ce609a1fd39e4303c5fd048a0c8605b3a3ddc..b4130b3c75e22c29108019b27665fb83a59bb0f5 100644 --- a/crates/prettier/src/prettier.rs +++ b/crates/prettier/src/prettier.rs @@ -329,7 +329,7 @@ impl Prettier { settings: Default::default(), }; executor.spawn(server.initialize(params, configuration.into(), cx)) - })? + }) .await .context("prettier server initialization")?; Ok(Self::Real(RealPrettier { @@ -475,7 +475,7 @@ impl Prettier { ignore_path, }, }) - })? + }) .context("building prettier request")?; let response = local @@ -483,7 +483,7 @@ impl Prettier { .request::(params) .await .into_response()?; - let diff_task = buffer.update(cx, |buffer, cx| buffer.diff(response.text, cx))?; + let diff_task = buffer.update(cx, |buffer, cx| buffer.diff(response.text, cx)); Ok(diff_task.await) } #[cfg(any(test, feature = "test-support"))] @@ -520,7 +520,7 @@ impl Prettier { } None => panic!("Should not format buffer without a language with prettier"), } - })?? + })? .await), } } diff --git a/crates/project/src/agent_server_store.rs b/crates/project/src/agent_server_store.rs index 077e4d387d50cbb685bdee551612d7a5545e1ac4..52ce237b2d525037988c627ccc39e83dfd91f8a8 100644 --- a/crates/project/src/agent_server_store.rs +++ b/crates/project/src/agent_server_store.rs @@ -760,7 +760,7 @@ impl AgentServerStore { new_version_available_tx, &mut cx.to_async(), )) - })?? + })? .await?; Ok(proto::AgentServerCommand { path: command.path.to_string_lossy().into_owned(), @@ -840,7 +840,7 @@ impl AgentServerStore { .collect(); cx.emit(AgentServersUpdated); Ok(()) - })? + }) } async fn handle_external_extension_agents_updated( @@ -889,7 +889,7 @@ impl AgentServerStore { this.reregister_agents(cx); cx.emit(AgentServersUpdated); Ok(()) - })? + }) } async fn handle_loading_status_updated( @@ -904,7 +904,8 @@ impl AgentServerStore { { status_tx.send(envelope.payload.status.into()).ok(); } - }) + }); + Ok(()) } async fn handle_new_version_available( @@ -921,7 +922,8 @@ impl AgentServerStore { .send(Some(envelope.payload.version)) .ok(); } - }) + }); + Ok(()) } pub fn get_extension_id_for_agent( diff --git a/crates/project/src/buffer_store.rs b/crates/project/src/buffer_store.rs index a8fd7e2dd194d4d0eac6be25fa3f290852d6ff5f..5a6d005d17cd2eee856cec42b8fbb2278eae6a9e 100644 --- a/crates/project/src/buffer_store.rs +++ b/crates/project/src/buffer_store.rs @@ -148,7 +148,7 @@ impl RemoteBufferStore { buffer_handle.update(cx, |buffer, cx| { buffer.did_save(version.clone(), mtime, cx); - })?; + }); Ok(()) }) @@ -274,14 +274,14 @@ impl RemoteBufferStore { buffer .update(cx, |buffer, _| { buffer.wait_for_edits(transaction.edit_ids.iter().copied()) - })? + }) .await?; if push_to_history { buffer.update(cx, |buffer, _| { buffer.push_transaction(transaction.clone(), Instant::now()); buffer.finalize_last_transaction(); - })?; + }); } } @@ -422,7 +422,8 @@ impl LocalBufferStore { buffer.file_updated(new_file, cx); } buffer.did_save(version.clone(), mtime, cx); - }) + }); + Ok(()) }) } @@ -625,7 +626,7 @@ impl LocalBufferStore { let path = path.clone(); let buffer = match load_file.await { Ok(loaded) => { - let reservation = cx.reserve_entity::()?; + let reservation = cx.reserve_entity::(); let buffer_id = BufferId::from(reservation.entity_id().as_non_zero_u64()); let text_buffer = cx .background_spawn(async move { @@ -638,7 +639,7 @@ impl LocalBufferStore { buffer.set_encoding(loaded.encoding); buffer.set_has_bom(loaded.has_bom); buffer - })? + }) } Err(error) if is_not_found_error(&error) => cx.new(|cx| { let buffer_id = BufferId::from(cx.entity_id().as_non_zero_u64()); @@ -655,7 +656,7 @@ impl LocalBufferStore { })), Capability::ReadWrite, ) - })?, + }), Err(e) => return Err(e), }; this.update(cx, |this, cx| { @@ -703,7 +704,7 @@ impl LocalBufferStore { ) -> Task>> { cx.spawn(async move |buffer_store, cx| { let buffer = - cx.new(|cx| Buffer::local("", cx).with_language(language::PLAIN_TEXT.clone(), cx))?; + cx.new(|cx| Buffer::local("", cx).with_language(language::PLAIN_TEXT.clone(), cx)); buffer_store.update(cx, |buffer_store, cx| { buffer_store.add_buffer(buffer.clone(), cx).log_err(); if !project_searchable { @@ -725,7 +726,7 @@ impl LocalBufferStore { cx.spawn(async move |_, cx| { let mut project_transaction = ProjectTransaction::default(); for buffer in buffers { - let transaction = buffer.update(cx, |buffer, cx| buffer.reload(cx))?.await?; + let transaction = buffer.update(cx, |buffer, cx| buffer.reload(cx)).await?; buffer.update(cx, |buffer, cx| { if let Some(transaction) = transaction { if !push_to_history { @@ -733,7 +734,7 @@ impl LocalBufferStore { } project_transaction.0.insert(cx.entity(), transaction); } - })?; + }); } Ok(project_transaction) @@ -1179,7 +1180,7 @@ impl BufferStore { } } Ok(proto::Ack {}) - })? + }) } pub fn register_shared_lsp_handle( @@ -1348,7 +1349,7 @@ impl BufferStore { .log_err(); } Ok(()) - })? + }) } pub async fn handle_save_buffer( @@ -1365,32 +1366,32 @@ impl BufferStore { .map(|(_, project_id)| *project_id) .context("project is not shared")?, )) - })??; + })?; buffer .update(&mut cx, |buffer, _| { buffer.wait_for_version(deserialize_version(&envelope.payload.version)) - })? + }) .await?; - let buffer_id = buffer.read_with(&cx, |buffer, _| buffer.remote_id())?; + let buffer_id = buffer.read_with(&cx, |buffer, _| buffer.remote_id()); if let Some(new_path) = envelope.payload.new_path && let Some(new_path) = ProjectPath::from_proto(new_path) { this.update(&mut cx, |this, cx| { this.save_buffer_as(buffer.clone(), new_path, cx) - })? + }) .await?; } else { - this.update(&mut cx, |this, cx| this.save_buffer(buffer.clone(), cx))? + this.update(&mut cx, |this, cx| this.save_buffer(buffer.clone(), cx)) .await?; } - buffer.read_with(&cx, |buffer, _| proto::BufferSaved { + Ok(buffer.read_with(&cx, |buffer, _| proto::BufferSaved { project_id, buffer_id: buffer_id.into(), version: serialize_version(buffer.saved_version()), mtime: buffer.saved_mtime().map(|time| time.into()), - }) + })) } pub async fn handle_close_buffer( @@ -1415,7 +1416,8 @@ impl BufferStore { peer_id, buffer_id ) - }) + }); + Ok(()) } pub async fn handle_buffer_saved( @@ -1443,7 +1445,8 @@ impl BufferStore { }) .log_err(); } - }) + }); + Ok(()) } pub async fn handle_buffer_reloaded( @@ -1476,7 +1479,8 @@ impl BufferStore { }) .log_err(); } - }) + }); + Ok(()) } pub fn reload_buffers( @@ -1507,12 +1511,12 @@ impl BufferStore { buffers.insert(this.get_existing(buffer_id)?); } anyhow::Ok(this.reload_buffers(buffers, false, cx)) - })??; + })?; let project_transaction = reload.await?; let project_transaction = this.update(&mut cx, |this, cx| { this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx) - })?; + }); Ok(proto::ReloadBuffersResponse { transaction: Some(project_transaction), }) @@ -1546,9 +1550,9 @@ impl BufferStore { return anyhow::Ok(()); }; - let operations = buffer.update(cx, |b, cx| b.serialize_ops(None, cx))?; + let operations = buffer.update(cx, |b, cx| b.serialize_ops(None, cx)); let operations = operations.await; - let state = buffer.update(cx, |buffer, cx| buffer.to_proto(cx))?; + let state = buffer.update(cx, |buffer, cx| buffer.to_proto(cx)); let initial_state = proto::CreateBufferForPeer { project_id, diff --git a/crates/project/src/connection_manager.rs b/crates/project/src/connection_manager.rs index 253d5d32a17104bbaeb9edbb34f94fab57bd3914..4cb1edb1dc68a06b070b3ecbcb893d9a469bad3b 100644 --- a/crates/project/src/connection_manager.rs +++ b/crates/project/src/connection_manager.rs @@ -166,15 +166,11 @@ impl Manager { log::info!("client reconnected, attempting to rejoin projects"); let Some(this) = this.upgrade() else { break }; - match this.update(cx, |this, cx| this.reconnected(cx)) { - Ok(task) => { - if task.await.log_err().is_some() { - return true; - } else { - remaining_attempts -= 1; - } - } - Err(_app_dropped) => return false, + let task = this.update(cx, |this, cx| this.reconnected(cx)); + if task.await.log_err().is_some() { + return true; + } else { + remaining_attempts -= 1; } } else if client_status.borrow().is_signed_out() { return false; @@ -215,7 +211,7 @@ impl Manager { // we leave the room and return an error. if let Some(this) = this.upgrade() { log::info!("reconnection failed, disconnecting projects"); - this.update(cx, |this, cx| this.connection_lost(cx))?; + this.update(cx, |this, cx| this.connection_lost(cx)); } Ok(()) diff --git a/crates/project/src/context_server_store.rs b/crates/project/src/context_server_store.rs index 03612a593a8fb4e08579b3b1effba5728ba2627c..78fa5214da160cf1c45862b0ff3e15414dd561f0 100644 --- a/crates/project/src/context_server_store.rs +++ b/crates/project/src/context_server_store.rs @@ -136,10 +136,8 @@ impl ContextServerConfiguration { enabled: _, settings, } => { - let descriptor = cx - .update(|cx| registry.read(cx).context_server_descriptor(&id.0)) - .ok() - .flatten()?; + let descriptor = + cx.update(|cx| registry.read(cx).context_server_descriptor(&id.0))?; match descriptor.command(worktree_store, cx).await { Ok(command) => { @@ -350,17 +348,15 @@ impl ContextServerStore { .update(cx, |this, _| { this.context_server_settings.get(&server.id().0).cloned() }) - .ok() - .flatten() .context("Failed to get context server settings")?; if !settings.enabled() { - return Ok(()); + return anyhow::Ok(()); } let (registry, worktree_store) = this.update(cx, |this, _| { (this.registry.clone(), this.worktree_store.clone()) - })?; + }); let configuration = ContextServerConfiguration::from_settings( settings, server.id(), @@ -373,7 +369,8 @@ impl ContextServerStore { this.update(cx, |this, cx| { this.run_server(server, Arc::new(configuration), cx) - }) + }); + Ok(()) }) .detach_and_log_err(cx); } @@ -611,9 +608,7 @@ impl ContextServerStore { ) })?; - for (id, _) in - registry.read_with(cx, |registry, _| registry.context_server_descriptors())? - { + for (id, _) in registry.read_with(cx, |registry, _| registry.context_server_descriptors()) { configured_servers .entry(id) .or_insert(ContextServerSettings::default_extension()); diff --git a/crates/project/src/context_server_store/extension.rs b/crates/project/src/context_server_store/extension.rs index ca5cacf3b549523dee8b85242bea86653eecbf7a..6ad8bd806c8eafffbbccf21293b3612191cce48e 100644 --- a/crates/project/src/context_server_store/extension.rs +++ b/crates/project/src/context_server_store/extension.rs @@ -38,14 +38,14 @@ fn extension_project( worktree_store: Entity, cx: &mut AsyncApp, ) -> Result> { - worktree_store.update(cx, |worktree_store, cx| { + Ok(worktree_store.update(cx, |worktree_store, cx| { Arc::new(ExtensionProject { worktree_ids: worktree_store .visible_worktrees(cx) .map(|worktree| worktree.read(cx).id().to_proto()) .collect(), }) - }) + })) } impl registry::ContextServerDescriptor for ContextServerDescriptor { diff --git a/crates/project/src/debugger/breakpoint_store.rs b/crates/project/src/debugger/breakpoint_store.rs index 15c3a2586ed222a169c8f3140b04247b08add9e6..7da8a180887a6f8ee9107201b1ee2c2d650cce20 100644 --- a/crates/project/src/debugger/breakpoint_store.rs +++ b/crates/project/src/debugger/breakpoint_store.rs @@ -223,8 +223,6 @@ impl BreakpointStore { .update(cx, |this, cx| this.open_buffer(path, cx)), ) }) - .ok() - .flatten() .context("Invalid project path")? .await?; @@ -263,7 +261,7 @@ impl BreakpointStore { .collect(); cx.notify(); - })?; + }); Ok(()) } @@ -278,12 +276,12 @@ impl BreakpointStore { this.worktree_store .read(cx) .project_path_for_absolute_path(message.payload.path.as_ref(), cx) - })? + }) .context("Could not resolve provided abs path")?; let buffer = this .update(&mut cx, |this, cx| { this.buffer_store.read(cx).get_by_path(&path) - })? + }) .context("Could not find buffer for a given path")?; let breakpoint = message .payload @@ -309,7 +307,7 @@ impl BreakpointStore { BreakpointEditAction::Toggle, cx, ); - })?; + }); Ok(proto::Ack {}) } @@ -809,7 +807,7 @@ impl BreakpointStore { log::error!("Todo: Serialized breakpoints which do not have buffer (yet)"); continue; }; - let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot())?; + let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot()); let mut breakpoints_for_file = this.update(cx, |_, cx| BreakpointsInFile::new(buffer, cx))?; diff --git a/crates/project/src/debugger/dap_store.rs b/crates/project/src/debugger/dap_store.rs index 4a588e7c436f5f29fffd953b8fce988daa4655d8..bb7a3c339d12fc6a0f94990365f96584b63ba855 100644 --- a/crates/project/src/debugger/dap_store.rs +++ b/crates/project/src/debugger/dap_store.rs @@ -323,7 +323,7 @@ impl DapStore { if let Some(c) = binary.connection { let host = Ipv4Addr::LOCALHOST; let port; - if remote.read_with(cx, |remote, _cx| remote.shares_network_interface())? { + if remote.read_with(cx, |remote, _cx| remote.shares_network_interface()) { port = c.port; port_forwarding = None; } else { @@ -348,7 +348,7 @@ impl DapStore { binary.cwd.map(|path| path.display().to_string()), port_forwarding, ) - })??; + })?; Ok(DebugAdapterBinary { command: Some(command.program), @@ -530,7 +530,7 @@ impl DapStore { session .update(cx, |session, cx| { session.boot(binary, worktree, dap_store, cx) - })? + }) .await } }) @@ -583,7 +583,7 @@ impl DapStore { } else { Task::ready(HashMap::default()) } - })? + }) .await; Ok(()) @@ -691,16 +691,14 @@ impl DapStore { }); } VariableLookupKind::Expression => { - let Ok(eval_task) = session.read_with(cx, |session, _| { + let eval_task = session.read_with(cx, |session, _| { session.state.request_dap(EvaluateCommand { expression: inline_value_location.variable_name.clone(), frame_id: Some(stack_frame_id), source: None, context: Some(EvaluateArgumentsContext::Variables), }) - }) else { - continue; - }; + }); if let Some(response) = eval_task.await.log_err() { inlay_hints.push(InlayHint { @@ -816,7 +814,7 @@ impl DapStore { let request = this .update(&mut cx, |this, cx| { this.run_debug_locator(&locator, build_task, cx) - })? + }) .await?; Ok(request.to_proto()) @@ -846,8 +844,7 @@ impl DapStore { }) .ok(); } - }) - .ok(); + }); } } }) @@ -858,7 +855,7 @@ impl DapStore { this.worktree_store .read(cx) .worktree_for_id(WorktreeId::from_proto(envelope.payload.worktree_id), cx) - })? + }) .context("Failed to find worktree with a given ID")?; let binary = this .update(&mut cx, |this, cx| { @@ -869,7 +866,7 @@ impl DapStore { tx, cx, ) - })? + }) .await?; Ok(binary.to_proto()) } @@ -890,7 +887,8 @@ impl DapStore { .unbounded_send(envelope.payload.message) .ok(); }) - }) + }); + Ok(()) } pub fn sync_adapter_options( diff --git a/crates/project/src/debugger/session.rs b/crates/project/src/debugger/session.rs index 1bc41df4bd89b4a32b71ed4f0bec0a61e729f998..49e86a03dde26470a723909ddd28c518d0dcde4a 100644 --- a/crates/project/src/debugger/session.rs +++ b/crates/project/src/debugger/session.rs @@ -209,9 +209,8 @@ impl RunningMode { } }); - let client = if let Some(client) = parent_session - .and_then(|session| cx.update(|cx| session.read(cx).adapter_client()).ok()) - .flatten() + let client = if let Some(client) = + parent_session.and_then(|session| cx.update(|cx| session.read(cx).adapter_client())) { client .create_child_connection(session_id, binary.clone(), message_handler, cx) @@ -466,7 +465,7 @@ impl RunningMode { })?; initialized_rx.await?; let errors_by_path = cx - .update(|cx| this.send_source_breakpoints(false, &breakpoint_store, cx))? + .update(|cx| this.send_source_breakpoints(false, &breakpoint_store, cx)) .await; dap_store.update(cx, |_, cx| { @@ -2858,7 +2857,7 @@ impl Session { let mut console_output = self.console_output(cx); let task = cx.spawn(async move |this, cx| { let forward_ports_process = if remote_client - .read_with(cx, |client, _| client.shares_network_interface())? + .read_with(cx, |client, _| client.shares_network_interface()) { request.other.insert( "proxyUri".into(), @@ -2890,7 +2889,7 @@ impl Session { .spawn() .context("spawning port forwarding process")?; anyhow::Ok(child) - })??; + })?; Some(child) }; diff --git a/crates/project/src/git_store.rs b/crates/project/src/git_store.rs index 2f0dc3f06bf13ad1f4d455e1a3857fafb02710c6..451b1d2457325df29d504aad99dd732d9df864fe 100644 --- a/crates/project/src/git_store.rs +++ b/crates/project/src/git_store.rs @@ -696,15 +696,15 @@ impl GitStore { cx: &mut Context, ) -> Task>> { cx.spawn(async move |this, cx| { - let buffer_snapshot = buffer.update(cx, |buffer, _| buffer.snapshot())?; + let buffer_snapshot = buffer.update(cx, |buffer, _| buffer.snapshot()); let content = match oid { None => None, Some(oid) => Some( - repo.update(cx, |repo, cx| repo.load_blob_content(oid, cx))? + repo.update(cx, |repo, cx| repo.load_blob_content(oid, cx)) .await?, ), }; - let buffer_diff = cx.new(|cx| BufferDiff::new(&buffer_snapshot, cx))?; + let buffer_diff = cx.new(|cx| BufferDiff::new(&buffer_snapshot, cx)); buffer_diff .update(cx, |buffer_diff, cx| { @@ -714,14 +714,14 @@ impl GitStore { buffer_snapshot.text, cx, ) - })? + }) .await?; let unstaged_diff = this .update(cx, |this, cx| this.open_unstaged_diff(buffer.clone(), cx))? .await?; buffer_diff.update(cx, |buffer_diff, _| { buffer_diff.set_secondary_diff(unstaged_diff); - })?; + }); this.update(cx, |_, cx| { cx.subscribe(&buffer_diff, Self::on_buffer_diff_event) @@ -1111,7 +1111,7 @@ impl GitStore { } let file_path = file.worktree.read(cx).absolutize(&file.path); return cx.spawn(async move |cx| { - let provider_registry = cx.update(GitHostingProviderRegistry::default_global)?; + let provider_registry = cx.update(GitHostingProviderRegistry::default_global); get_permalink_in_rust_registry_src(provider_registry, file_path, selection) .context("no permalink available") }); @@ -1138,7 +1138,7 @@ impl GitStore { let sha = backend.head_sha().await.context("reading HEAD SHA")?; let provider_registry = - cx.update(GitHostingProviderRegistry::default_global)?; + cx.update(GitHostingProviderRegistry::default_global); let (provider, remote) = parse_git_remote_url(provider_registry, &origin_url) @@ -1226,8 +1226,7 @@ impl GitStore { for (repo, paths) in paths_by_git_repo { repo.update(cx, |repo, cx| { repo.paths_changed(paths, downstream.clone(), cx); - }) - .ok(); + }); } }) .detach(); @@ -1493,7 +1492,7 @@ impl GitStore { let diff_bases_change = repo .update(cx, |repo, cx| { repo.load_committed_text(buffer_id, repo_path, cx) - })? + }) .await?; diff_state.update(cx, |diff_state, cx| { @@ -1503,7 +1502,8 @@ impl GitStore { Some(diff_bases_change), cx, ); - }) + }); + anyhow::Ok(()) } .await .log_err(); @@ -1759,7 +1759,7 @@ impl GitStore { client.send(update).log_err(); } Ok(()) - })? + }) } async fn handle_remove_repository( @@ -1780,7 +1780,8 @@ impl GitStore { cx.emit(GitStoreEvent::ActiveRepositoryChanged(None)); } cx.emit(GitStoreEvent::RepositoryRemoved(id)); - }) + }); + Ok(()) } async fn handle_git_init( @@ -1790,7 +1791,7 @@ impl GitStore { ) -> Result { let path: Arc = PathBuf::from(envelope.payload.abs_path).into(); let name = envelope.payload.fallback_branch_name; - cx.update(|cx| this.read(cx).git_init(path, name, cx))? + cx.update(|cx| this.read(cx).git_init(path, name, cx)) .await?; Ok(proto::Ack {}) @@ -1804,7 +1805,7 @@ impl GitStore { let path: Arc = PathBuf::from(envelope.payload.abs_path).into(); let repo_name = envelope.payload.remote_repo; let result = cx - .update(|cx| this.read(cx).git_clone(repo_name, path, cx))? + .update(|cx| this.read(cx).git_clone(repo_name, path, cx)) .await; Ok(proto::GitCloneResponse { @@ -1833,7 +1834,7 @@ impl GitStore { let remote_output = repository_handle .update(&mut cx, |repository_handle, cx| { repository_handle.fetch(fetch_options, askpass, cx) - })? + }) .await??; Ok(proto::RemoteMessageResponse { @@ -1882,7 +1883,7 @@ impl GitStore { askpass, cx, ) - })? + }) .await??; Ok(proto::RemoteMessageResponse { stdout: remote_output.stdout, @@ -1913,7 +1914,7 @@ impl GitStore { let remote_message = repository_handle .update(&mut cx, |repository_handle, cx| { repository_handle.pull(branch_name, remote_name, rebase, askpass, cx) - })? + }) .await??; Ok(proto::RemoteMessageResponse { @@ -1940,7 +1941,7 @@ impl GitStore { repository_handle .update(&mut cx, |repository_handle, cx| { repository_handle.stage_entries(entries, cx) - })? + }) .await?; Ok(proto::Ack {}) } @@ -1963,7 +1964,7 @@ impl GitStore { repository_handle .update(&mut cx, |repository_handle, cx| { repository_handle.unstage_entries(entries, cx) - })? + }) .await?; Ok(proto::Ack {}) @@ -1987,7 +1988,7 @@ impl GitStore { repository_handle .update(&mut cx, |repository_handle, cx| { repository_handle.stash_entries(entries, cx) - })? + }) .await?; Ok(proto::Ack {}) @@ -2005,7 +2006,7 @@ impl GitStore { repository_handle .update(&mut cx, |repository_handle, cx| { repository_handle.stash_pop(stash_index, cx) - })? + }) .await?; Ok(proto::Ack {}) @@ -2023,7 +2024,7 @@ impl GitStore { repository_handle .update(&mut cx, |repository_handle, cx| { repository_handle.stash_apply(stash_index, cx) - })? + }) .await?; Ok(proto::Ack {}) @@ -2041,7 +2042,7 @@ impl GitStore { repository_handle .update(&mut cx, |repository_handle, cx| { repository_handle.stash_drop(stash_index, cx) - })? + }) .await??; Ok(proto::Ack {}) @@ -2064,7 +2065,7 @@ impl GitStore { None, cx, ) - })? + }) .await??; Ok(proto::Ack {}) } @@ -2080,7 +2081,7 @@ impl GitStore { repository_handle .update(&mut cx, |repository_handle, cx| { repository_handle.run_hook(hook, cx) - })? + }) .await??; Ok(proto::Ack {}) } @@ -2119,7 +2120,7 @@ impl GitStore { askpass, cx, ) - })? + }) .await??; Ok(proto::Ack {}) } @@ -2138,7 +2139,7 @@ impl GitStore { let remotes = repository_handle .update(&mut cx, |repository_handle, _| { repository_handle.get_remotes(branch_name, is_push) - })? + }) .await??; Ok(proto::GetRemotesResponse { @@ -2162,7 +2163,7 @@ impl GitStore { let worktrees = repository_handle .update(&mut cx, |repository_handle, _| { repository_handle.worktrees() - })? + }) .await??; Ok(proto::GitWorktreesResponse { @@ -2187,7 +2188,7 @@ impl GitStore { repository_handle .update(&mut cx, |repository_handle, _| { repository_handle.create_worktree(name, directory, commit) - })? + }) .await??; Ok(proto::Ack {}) @@ -2202,7 +2203,7 @@ impl GitStore { let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?; let branches = repository_handle - .update(&mut cx, |repository_handle, _| repository_handle.branches())? + .update(&mut cx, |repository_handle, _| repository_handle.branches()) .await??; Ok(proto::GitBranchesResponse { @@ -2223,7 +2224,7 @@ impl GitStore { let branch = repository_handle .update(&mut cx, |repository_handle, _| { repository_handle.default_branch() - })? + }) .await?? .map(Into::into); @@ -2241,7 +2242,7 @@ impl GitStore { repository_handle .update(&mut cx, |repository_handle, _| { repository_handle.create_branch(branch_name, None) - })? + }) .await??; Ok(proto::Ack {}) @@ -2259,7 +2260,7 @@ impl GitStore { repository_handle .update(&mut cx, |repository_handle, _| { repository_handle.change_branch(branch_name) - })? + }) .await??; Ok(proto::Ack {}) @@ -2278,7 +2279,7 @@ impl GitStore { repository_handle .update(&mut cx, |repository_handle, _| { repository_handle.rename_branch(branch, new_name) - })? + }) .await??; Ok(proto::Ack {}) @@ -2297,7 +2298,7 @@ impl GitStore { repository_handle .update(&mut cx, |repository_handle, _| { repository_handle.create_remote(remote_name, remote_url) - })? + }) .await??; Ok(proto::Ack {}) @@ -2315,7 +2316,7 @@ impl GitStore { repository_handle .update(&mut cx, |repository_handle, _| { repository_handle.delete_branch(branch_name) - })? + }) .await??; Ok(proto::Ack {}) @@ -2333,7 +2334,7 @@ impl GitStore { repository_handle .update(&mut cx, |repository_handle, _| { repository_handle.remove_remote(remote_name) - })? + }) .await??; Ok(proto::Ack {}) @@ -2350,7 +2351,7 @@ impl GitStore { let commit = repository_handle .update(&mut cx, |repository_handle, _| { repository_handle.show(envelope.payload.commit) - })? + }) .await??; Ok(proto::GitCommitDetails { sha: commit.sha.into(), @@ -2372,7 +2373,7 @@ impl GitStore { let commit_diff = repository_handle .update(&mut cx, |repository_handle, _| { repository_handle.load_commit_diff(envelope.payload.commit) - })? + }) .await??; Ok(proto::LoadCommitDiffResponse { files: commit_diff @@ -2401,7 +2402,7 @@ impl GitStore { let file_history = repository_handle .update(&mut cx, |repository_handle, _| { repository_handle.file_history_paginated(path, skip, limit) - })? + }) .await??; Ok(proto::GitFileHistoryResponse { @@ -2437,7 +2438,7 @@ impl GitStore { repository_handle .update(&mut cx, |repository_handle, cx| { repository_handle.reset(envelope.payload.commit, mode, cx) - })? + }) .await??; Ok(proto::Ack {}) } @@ -2459,7 +2460,7 @@ impl GitStore { repository_handle .update(&mut cx, |repository_handle, cx| { repository_handle.checkout_files(&envelope.payload.commit, paths, cx) - })? + }) .await?; Ok(proto::Ack {}) } @@ -2474,10 +2475,10 @@ impl GitStore { let buffer = repository .update(&mut cx, |repository, cx| { repository.open_commit_buffer(None, this.read(cx).buffer_store.clone(), cx) - })? + }) .await?; - let buffer_id = buffer.read_with(&cx, |buffer, _| buffer.remote_id())?; + let buffer_id = buffer.read_with(&cx, |buffer, _| buffer.remote_id()); this.update(&mut cx, |this, cx| { this.buffer_store.update(cx, |buffer_store, cx| { buffer_store @@ -2488,7 +2489,7 @@ impl GitStore { ) .detach_and_log_err(cx); }) - })?; + }); Ok(proto::OpenBufferResponse { buffer_id: buffer_id.to_proto(), @@ -2503,7 +2504,7 @@ impl GitStore { let repository_id = RepositoryId::from_proto(envelope.payload.repository_id); let repository = Self::repository_for_request(&this, repository_id, &mut cx)?; - let delegates = cx.update(|cx| repository.read(cx).askpass_delegates.clone())?; + let delegates = cx.update(|cx| repository.read(cx).askpass_delegates.clone()); let Some(mut askpass) = delegates.lock().remove(&envelope.payload.askpass_id) else { debug_panic!("no askpass found"); anyhow::bail!("no askpass found"); @@ -2535,7 +2536,7 @@ impl GitStore { let branches = repository_handle .update(&mut cx, |repository_handle, _| { repository_handle.check_for_pushed_commits() - })? + }) .await??; Ok(proto::CheckForPushedCommitsResponse { pushed_to: branches @@ -2560,7 +2561,7 @@ impl GitStore { let mut diff = repository_handle .update(&mut cx, |repository_handle, cx| { repository_handle.diff(diff_type, cx) - })? + }) .await??; const ONE_MB: usize = 1_000_000; if diff.len() > ONE_MB { @@ -2592,7 +2593,7 @@ impl GitStore { .update(&mut cx, |this, cx| { let repository = this.repositories().get(&repository_id)?; Some(repository.update(cx, |repo, cx| repo.diff_tree(diff_type, cx))) - })? + }) .context("missing repository")? .await??; @@ -2633,7 +2634,7 @@ impl GitStore { .update(&mut cx, |this, cx| { let repository = this.repositories().get(&repository_id)?; Some(repository.update(cx, |repo, cx| repo.load_blob_content(oid, cx))) - })? + }) .context("missing repository")? .await?; Ok(proto::GetBlobContentResponse { content }) @@ -2649,7 +2650,7 @@ impl GitStore { .update(&mut cx, |this, cx| { let buffer = this.buffer_store.read(cx).get(buffer_id)?; Some(this.open_unstaged_diff(buffer, cx)) - })? + }) .context("missing buffer")? .await?; this.update(&mut cx, |this, _| { @@ -2658,8 +2659,8 @@ impl GitStore { .entry(request.original_sender_id.unwrap_or(request.sender_id)) .or_default(); shared_diffs.entry(buffer_id).or_default().unstaged = Some(diff.clone()); - })?; - let staged_text = diff.read_with(&cx, |diff, cx| diff.base_text_string(cx))?; + }); + let staged_text = diff.read_with(&cx, |diff, cx| diff.base_text_string(cx)); Ok(proto::OpenUnstagedDiffResponse { staged_text }) } @@ -2673,7 +2674,7 @@ impl GitStore { .update(&mut cx, |this, cx| { let buffer = this.buffer_store.read(cx).get(buffer_id)?; Some(this.open_uncommitted_diff(buffer, cx)) - })? + }) .context("missing buffer")? .await?; this.update(&mut cx, |this, _| { @@ -2682,8 +2683,8 @@ impl GitStore { .entry(request.original_sender_id.unwrap_or(request.sender_id)) .or_default(); shared_diffs.entry(buffer_id).or_default().uncommitted = Some(diff.clone()); - })?; - diff.read_with(&cx, |diff, cx| { + }); + Ok(diff.read_with(&cx, |diff, cx| { use proto::open_uncommitted_diff_response::Mode; let unstaged_diff = diff.secondary_diff(); @@ -2721,7 +2722,7 @@ impl GitStore { staged_text, mode: mode.into(), } - }) + })) } async fn handle_update_diff_bases( @@ -2739,7 +2740,8 @@ impl GitStore { diff_state.handle_base_texts_updated(buffer, request.payload, cx); }) } - }) + }); + Ok(()) } async fn handle_blame_buffer( @@ -2751,16 +2753,16 @@ impl GitStore { let version = deserialize_version(&envelope.payload.version); let buffer = this.read_with(&cx, |this, cx| { this.buffer_store.read(cx).get_existing(buffer_id) - })??; + })?; buffer .update(&mut cx, |buffer, _| { buffer.wait_for_version(version.clone()) - })? + }) .await?; let blame = this .update(&mut cx, |this, cx| { this.blame_buffer(&buffer, Some(version), cx) - })? + }) .await?; Ok(serialize_blame_buffer_response(blame)) } @@ -2781,11 +2783,11 @@ impl GitStore { }; let buffer = this.read_with(&cx, |this, cx| { this.buffer_store.read(cx).get_existing(buffer_id) - })??; + })?; let permalink = this .update(&mut cx, |this, cx| { this.get_permalink_to_line(&buffer, selection, cx) - })? + }) .await?; Ok(proto::GetPermalinkToLineResponse { permalink: permalink.to_string(), @@ -2802,7 +2804,7 @@ impl GitStore { .get(&id) .context("missing repository handle") .cloned() - })? + }) } pub fn repo_snapshots(&self, cx: &App) -> HashMap { @@ -3106,7 +3108,7 @@ impl BufferGitState { language.clone(), cx, ) - })? + }) .await, ); } @@ -3129,7 +3131,7 @@ impl BufferGitState { language.clone(), cx, ) - })? + }) .await, ) } @@ -3172,7 +3174,7 @@ impl BufferGitState { diff.language_changed(language.clone(), language_registry.clone(), cx); } diff.set_snapshot(new_unstaged_diff, &buffer, cx) - })?; + }); Some(task.await) } else { None @@ -3195,7 +3197,7 @@ impl BufferGitState { true, cx, ) - })? + }) .await; } @@ -3210,7 +3212,7 @@ impl BufferGitState { this.head_changed = false; this.language_changed = false; *this.recalculating_tx.borrow_mut() = false; - })?; + }); } Ok(()) @@ -3244,8 +3246,7 @@ fn make_remote_delegate( anyhow::Ok(()) }) .detach_and_log_err(cx); - }) - .log_err(); + }); }) } @@ -3674,7 +3675,7 @@ impl Repository { }) .collect::>() }) - })??; + })?; let buffer_diff_base_changes = cx .background_spawn(async move { @@ -3919,7 +3920,7 @@ impl Repository { RepositoryState::Local(..) => { this.update(&mut cx, |_, cx| { Self::open_local_commit_buffer(languages, buffer_store, cx) - })? + }) .await } RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => { @@ -3932,18 +3933,18 @@ impl Repository { let buffer = buffer_store .update(&mut cx, |buffer_store, cx| { buffer_store.wait_for_remote_buffer(buffer_id, cx) - })? + }) .await?; if let Some(language_registry) = languages { let git_commit_language = language_registry.language_for_name("Git Commit").await?; buffer.update(&mut cx, |buffer, cx| { buffer.set_language(Some(git_commit_language), cx); - })?; + }); } this.update(&mut cx, |this, _| { this.commit_message_buffer = Some(buffer.clone()); - })?; + }); Ok(buffer) } } @@ -3959,14 +3960,14 @@ impl Repository { ) -> Task>> { cx.spawn(async move |repository, cx| { let buffer = buffer_store - .update(cx, |buffer_store, cx| buffer_store.create_buffer(false, cx))? + .update(cx, |buffer_store, cx| buffer_store.create_buffer(false, cx)) .await?; if let Some(language_registry) = language_registry { let git_commit_language = language_registry.language_for_name("Git Commit").await?; buffer.update(cx, |buffer, cx| { buffer.set_language(Some(git_commit_language), cx); - })?; + }); } repository.update(cx, |repository, _| { @@ -4969,20 +4970,22 @@ impl Repository { .read_with(&cx, |this, cx| this.repo_path_to_project_path(&path, cx)) .ok() .flatten(); - git_store.update(&mut cx, |git_store, cx| { - let buffer_id = git_store - .buffer_store - .read(cx) - .get_by_path(&project_path?)? - .read(cx) - .remote_id(); - let diff_state = git_store.diffs.get(&buffer_id)?; - diff_state.update(cx, |diff_state, _| { - diff_state.hunk_staging_operation_count_as_of_write = - hunk_staging_operation_count; - }); - Some(()) - })?; + git_store + .update(&mut cx, |git_store, cx| { + let buffer_id = git_store + .buffer_store + .read(cx) + .get_by_path(&project_path?)? + .read(cx) + .remote_id(); + let diff_state = git_store.diffs.get(&buffer_id)?; + diff_state.update(cx, |diff_state, _| { + diff_state.hunk_staging_operation_count_as_of_write = + hunk_staging_operation_count; + }); + Some(()) + }) + .context("Git store dropped")?; } Ok(()) }, @@ -5596,7 +5599,7 @@ impl Repository { this.snapshot.clone(), backend.clone(), ) - })? + }) .await?; this.update(&mut cx, |this, cx| { this.snapshot = snapshot.clone(); @@ -5604,7 +5607,7 @@ impl Repository { for event in events { cx.emit(event); } - })?; + }); if let Some(updates_tx) = updates_tx { updates_tx .unbounded_send(DownstreamUpdate::UpdateRepository(snapshot)) @@ -5624,7 +5627,7 @@ impl Repository { cx.spawn(async move |_, cx| { let state = state.await.map_err(|err| anyhow::anyhow!(err))?; if let Some(git_hosting_provider_registry) = - cx.update(|cx| GitHostingProviderRegistry::try_global(cx))? + cx.update(|cx| GitHostingProviderRegistry::try_global(cx)) { git_hosting_providers::register_additional_providers( git_hosting_provider_registry, diff --git a/crates/project/src/image_store.rs b/crates/project/src/image_store.rs index 71bee30b99dfa89694885d34e20849869df123f0..c933df1be3a7497295202574e404a1c501086a49 100644 --- a/crates/project/src/image_store.rs +++ b/crates/project/src/image_store.rs @@ -138,7 +138,7 @@ impl ImageItem { .abs_path(cx) .context("absolutizing image file path")?; anyhow::Ok((fs, image_path)) - })??; + })?; let image_bytes = fs.load_bytes(&image_path).await?; Self::compute_metadata_from_bytes(&image_bytes) @@ -234,7 +234,7 @@ impl ProjectItem for ImageItem { let project = project.clone(); async move |cx| { project - .update(cx, |project, cx| project.open_image(path, cx))? + .update(cx, |project, cx| project.open_image(path, cx)) .await } })) @@ -628,9 +628,9 @@ impl ImageStoreImpl for Entity { image, image_metadata: None, reload_task: None, - })?; + }); - let image_id = cx.read_entity(&entity, |model, _| model.id)?; + let image_id = cx.read_entity(&entity, |model, _| model.id); this.update(cx, |this, cx| { image_store.update(cx, |image_store, cx| { @@ -649,7 +649,7 @@ impl ImageStoreImpl for Entity { } anyhow::Ok(()) - })??; + })?; Ok(entity) }) @@ -662,7 +662,7 @@ impl ImageStoreImpl for Entity { ) -> Task> { cx.spawn(async move |_, cx| { for image in images { - if let Some(rec) = image.update(cx, |image, cx| image.reload(cx))? { + if let Some(rec) = image.update(cx, |image, cx| image.reload(cx)) { rec.await? } } @@ -709,7 +709,7 @@ impl ImageStoreImpl for Entity { remote_store .update(cx, |remote_store, cx| { remote_store.wait_for_remote_image(image_id, cx) - })? + }) .await }) } diff --git a/crates/project/src/lsp_command.rs b/crates/project/src/lsp_command.rs index 04baa687916e44ccfead8fd8074c24ae270634a6..e5b91094fe670b8ed740134fc5abcebe500caf81 100644 --- a/crates/project/src/lsp_command.rs +++ b/crates/project/src/lsp_command.rs @@ -371,7 +371,7 @@ impl LspCommand for PrepareRename { Ok(PrepareRenameResponse::Success(range)) } None => Ok(PrepareRenameResponse::InvalidPosition), - })? + }) } fn to_proto(&self, project_id: u64, buffer: &Buffer) -> proto::PrepareRename { @@ -398,11 +398,11 @@ impl LspCommand for PrepareRename { buffer .update(&mut cx, |buffer, _| { buffer.wait_for_version(deserialize_version(&message.version)) - })? + }) .await?; Ok(Self { - position: buffer.read_with(&cx, |buffer, _| position.to_point_utf16(buffer))?, + position: buffer.read_with(&cx, |buffer, _| position.to_point_utf16(buffer)), }) } @@ -449,7 +449,7 @@ impl LspCommand for PrepareRename { buffer .update(&mut cx, |buffer, _| { buffer.wait_for_version(deserialize_version(&message.version)) - })? + }) .await?; if let (Some(start), Some(end)) = ( message.start.and_then(deserialize_anchor), @@ -556,10 +556,10 @@ impl LspCommand for PerformRename { buffer .update(&mut cx, |buffer, _| { buffer.wait_for_version(deserialize_version(&message.version)) - })? + }) .await?; Ok(Self { - position: buffer.read_with(&cx, |buffer, _| position.to_point_utf16(buffer))?, + position: buffer.read_with(&cx, |buffer, _| position.to_point_utf16(buffer)), new_name: message.new_name, push_to_history: false, }) @@ -593,7 +593,7 @@ impl LspCommand for PerformRename { lsp_store.buffer_store().update(cx, |buffer_store, cx| { buffer_store.deserialize_project_transaction(message, self.push_to_history, cx) }) - })? + }) .await } @@ -671,10 +671,10 @@ impl LspCommand for GetDefinitions { buffer .update(&mut cx, |buffer, _| { buffer.wait_for_version(deserialize_version(&message.version)) - })? + }) .await?; Ok(Self { - position: buffer.read_with(&cx, |buffer, _| position.to_point_utf16(buffer))?, + position: buffer.read_with(&cx, |buffer, _| position.to_point_utf16(buffer)), }) } @@ -774,10 +774,10 @@ impl LspCommand for GetDeclarations { buffer .update(&mut cx, |buffer, _| { buffer.wait_for_version(deserialize_version(&message.version)) - })? + }) .await?; Ok(Self { - position: buffer.read_with(&cx, |buffer, _| position.to_point_utf16(buffer))?, + position: buffer.read_with(&cx, |buffer, _| position.to_point_utf16(buffer)), }) } @@ -876,10 +876,10 @@ impl LspCommand for GetImplementations { buffer .update(&mut cx, |buffer, _| { buffer.wait_for_version(deserialize_version(&message.version)) - })? + }) .await?; Ok(Self { - position: buffer.read_with(&cx, |buffer, _| position.to_point_utf16(buffer))?, + position: buffer.read_with(&cx, |buffer, _| position.to_point_utf16(buffer)), }) } @@ -975,10 +975,10 @@ impl LspCommand for GetTypeDefinitions { buffer .update(&mut cx, |buffer, _| { buffer.wait_for_version(deserialize_version(&message.version)) - })? + }) .await?; Ok(Self { - position: buffer.read_with(&cx, |buffer, _| position.to_point_utf16(buffer))?, + position: buffer.read_with(&cx, |buffer, _| position.to_point_utf16(buffer)), }) } @@ -1021,7 +1021,7 @@ fn language_server_for_buffer( .language_server_for_local_buffer(buffer, server_id, cx) .map(|(adapter, server)| (adapter.clone(), server.clone())) }) - })? + }) .context("no language server found for buffer") } @@ -1051,7 +1051,7 @@ pub fn location_link_from_proto( let buffer = lsp_store .update(cx, |lsp_store, cx| { lsp_store.wait_for_remote_buffer(buffer_id, cx) - })? + }) .await?; let start = origin .start @@ -1062,7 +1062,7 @@ pub fn location_link_from_proto( .and_then(deserialize_anchor) .context("missing origin end")?; buffer - .update(cx, |buffer, _| buffer.wait_for_anchors([start, end]))? + .update(cx, |buffer, _| buffer.wait_for_anchors([start, end])) .await?; Some(Location { buffer, @@ -1077,7 +1077,7 @@ pub fn location_link_from_proto( let buffer = lsp_store .update(cx, |lsp_store, cx| { lsp_store.wait_for_remote_buffer(buffer_id, cx) - })? + }) .await?; let start = target .start @@ -1088,7 +1088,7 @@ pub fn location_link_from_proto( .and_then(deserialize_anchor) .context("missing target end")?; buffer - .update(cx, |buffer, _| buffer.wait_for_anchors([start, end]))? + .update(cx, |buffer, _| buffer.wait_for_anchors([start, end])) .await?; let target = Location { buffer, @@ -1137,7 +1137,7 @@ pub async fn location_links_from_lsp( let target_buffer_handle = lsp_store .update(&mut cx, |this, cx| { this.open_local_buffer_via_lsp(target_uri, language_server.server_id(), cx) - })? + }) .await?; cx.update(|cx| { @@ -1169,7 +1169,7 @@ pub async fn location_links_from_lsp( origin: origin_location, target: target_location, }) - })?; + }); } Ok(definitions) } @@ -1192,10 +1192,10 @@ pub async fn location_link_from_lsp( let target_buffer_handle = lsp_store .update(cx, |lsp_store, cx| { lsp_store.open_local_buffer_via_lsp(target_uri, language_server.server_id(), cx) - })? + }) .await?; - cx.update(|cx| { + Ok(cx.update(|cx| { let origin_location = origin_range.map(|origin_range| { let origin_buffer = buffer.read(cx); let origin_start = @@ -1224,7 +1224,7 @@ pub async fn location_link_from_lsp( origin: origin_location, target: target_location, } - }) + })) } pub fn location_links_to_proto( @@ -1341,7 +1341,7 @@ impl LspCommand for GetReferences { language_server.server_id(), cx, ) - })? + }) .await?; target_buffer_handle @@ -1356,7 +1356,7 @@ impl LspCommand for GetReferences { range: target_buffer.anchor_after(target_start) ..target_buffer.anchor_before(target_end), }); - })?; + }); } } @@ -1387,10 +1387,10 @@ impl LspCommand for GetReferences { buffer .update(&mut cx, |buffer, _| { buffer.wait_for_version(deserialize_version(&message.version)) - })? + }) .await?; Ok(Self { - position: buffer.read_with(&cx, |buffer, _| position.to_point_utf16(buffer))?, + position: buffer.read_with(&cx, |buffer, _| position.to_point_utf16(buffer)), }) } @@ -1434,7 +1434,7 @@ impl LspCommand for GetReferences { let target_buffer = project .update(&mut cx, |this, cx| { this.wait_for_remote_buffer(buffer_id, cx) - })? + }) .await?; let start = location .start @@ -1445,7 +1445,7 @@ impl LspCommand for GetReferences { .and_then(deserialize_anchor) .context("missing target end")?; target_buffer - .update(&mut cx, |buffer, _| buffer.wait_for_anchors([start, end]))? + .update(&mut cx, |buffer, _| buffer.wait_for_anchors([start, end])) .await?; locations.push(Location { buffer: target_buffer, @@ -1502,7 +1502,7 @@ impl LspCommand for GetDocumentHighlights { _: LanguageServerId, cx: AsyncApp, ) -> Result> { - buffer.read_with(&cx, |buffer, _| { + Ok(buffer.read_with(&cx, |buffer, _| { let mut lsp_highlights = lsp_highlights.unwrap_or_default(); lsp_highlights.sort_unstable_by_key(|h| (h.range.start, Reverse(h.range.end))); lsp_highlights @@ -1520,7 +1520,7 @@ impl LspCommand for GetDocumentHighlights { } }) .collect() - }) + })) } fn to_proto(&self, project_id: u64, buffer: &Buffer) -> proto::GetDocumentHighlights { @@ -1547,10 +1547,10 @@ impl LspCommand for GetDocumentHighlights { buffer .update(&mut cx, |buffer, _| { buffer.wait_for_version(deserialize_version(&message.version)) - })? + }) .await?; Ok(Self { - position: buffer.read_with(&cx, |buffer, _| position.to_point_utf16(buffer))?, + position: buffer.read_with(&cx, |buffer, _| position.to_point_utf16(buffer)), }) } @@ -1595,7 +1595,7 @@ impl LspCommand for GetDocumentHighlights { .and_then(deserialize_anchor) .context("missing target end")?; buffer - .update(&mut cx, |buffer, _| buffer.wait_for_anchors([start, end]))? + .update(&mut cx, |buffer, _| buffer.wait_for_anchors([start, end])) .await?; let kind = match proto::document_highlight::Kind::from_i32(highlight.kind) { Some(proto::document_highlight::Kind::Text) => DocumentHighlightKind::TEXT, @@ -1711,7 +1711,7 @@ impl LspCommand for GetDocumentSymbols { buffer .update(&mut cx, |buffer, _| { buffer.wait_for_version(deserialize_version(&message.version)) - })? + }) .await?; Ok(Self) } @@ -1856,14 +1856,14 @@ impl LspCommand for GetSignatureHelp { let Some(message) = message else { return Ok(None); }; - cx.update(|cx| { + Ok(cx.update(|cx| { SignatureHelp::new( message, Some(lsp_store.read(cx).languages.clone()), Some(id), cx, ) - }) + })) } fn to_proto(&self, project_id: u64, buffer: &Buffer) -> Self::ProtoRequest { @@ -1885,10 +1885,10 @@ impl LspCommand for GetSignatureHelp { buffer .update(&mut cx, |buffer, _| { buffer.wait_for_version(deserialize_version(&payload.version)) - })? + }) .await .with_context(|| format!("waiting for version for buffer {}", buffer.entity_id()))?; - let buffer_snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot())?; + let buffer_snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot()); Ok(Self { position: payload .position @@ -1918,7 +1918,7 @@ impl LspCommand for GetSignatureHelp { _: Entity, cx: AsyncApp, ) -> Result { - cx.update(|cx| { + Ok(cx.update(|cx| { response .signature_help .map(proto_to_lsp_signature) @@ -1930,7 +1930,7 @@ impl LspCommand for GetSignatureHelp { cx, ) }) - }) + })) } fn buffer_id_from_proto(message: &Self::ProtoRequest) -> Result { @@ -1991,7 +1991,7 @@ impl LspCommand for GetHover { buffer.anchor_after(token_start)..buffer.anchor_before(token_end) }), ) - })?; + }); fn hover_blocks_from_marked_string(marked_string: lsp::MarkedString) -> Option { let block = match marked_string { @@ -2064,10 +2064,10 @@ impl LspCommand for GetHover { buffer .update(&mut cx, |buffer, _| { buffer.wait_for_version(deserialize_version(&message.version)) - })? + }) .await?; Ok(Self { - position: buffer.read_with(&cx, |buffer, _| position.to_point_utf16(buffer))?, + position: buffer.read_with(&cx, |buffer, _| position.to_point_utf16(buffer)), }) } @@ -2141,7 +2141,7 @@ impl LspCommand for GetHover { return Ok(None); } - let language = buffer.read_with(&cx, |buffer, _| buffer.language().cloned())?; + let language = buffer.read_with(&cx, |buffer, _| buffer.language().cloned()); let range = if let (Some(start), Some(end)) = (message.start, message.end) { language::proto::deserialize_anchor(start) .and_then(|start| language::proto::deserialize_anchor(end).map(|end| start..end)) @@ -2152,7 +2152,7 @@ impl LspCommand for GetHover { buffer .update(&mut cx, |buffer, _| { buffer.wait_for_anchors([range.start, range.end]) - })? + }) .await?; } @@ -2238,7 +2238,7 @@ impl LspCommand for GetCompletions { let language_server_adapter = lsp_store .read_with(&cx, |lsp_store, _| { lsp_store.language_server_adapter_for_id(server_id) - })? + }) .with_context(|| format!("no language server with id {server_id}"))?; let lsp_defaults = response_list @@ -2355,7 +2355,7 @@ impl LspCommand for GetCompletions { completion_edits.push(edit); true }); - })?; + }); // If completions were filtered out due to errors that may be transient, mark the result // incomplete so that it is re-queried. @@ -2420,7 +2420,7 @@ impl LspCommand for GetCompletions { ) -> Result { let version = deserialize_version(&message.version); buffer - .update(&mut cx, |buffer, _| buffer.wait_for_version(version))? + .update(&mut cx, |buffer, _| buffer.wait_for_version(version)) .await?; let position = message .position @@ -2430,7 +2430,7 @@ impl LspCommand for GetCompletions { buffer.clip_point_utf16(Unclipped(p.to_point_utf16(buffer)), Bias::Left) }) }) - .context("invalid position")??; + .context("invalid position")?; Ok(Self { position, context: CompletionContext { @@ -2471,7 +2471,7 @@ impl LspCommand for GetCompletions { buffer .update(&mut cx, |buffer, _| { buffer.wait_for_version(deserialize_version(&message.version)) - })? + }) .await?; let completions = message @@ -2644,7 +2644,7 @@ impl LspCommand for GetCodeActions { .with_context(|| { format!("Missing the language server that just returned a response {server_id}") }) - })??; + })?; let server_capabilities = language_server.capabilities(); let available_commands = server_capabilities @@ -2719,7 +2719,7 @@ impl LspCommand for GetCodeActions { buffer .update(&mut cx, |buffer, _| { buffer.wait_for_version(deserialize_version(&message.version)) - })? + }) .await?; Ok(Self { @@ -2754,7 +2754,7 @@ impl LspCommand for GetCodeActions { buffer .update(&mut cx, |buffer, _| { buffer.wait_for_version(deserialize_version(&message.version)) - })? + }) .await?; message .actions @@ -2889,17 +2889,17 @@ impl LspCommand for OnTypeFormatting { buffer .update(&mut cx, |buffer, _| { buffer.wait_for_version(deserialize_version(&message.version)) - })? + }) .await?; let options = buffer.update(&mut cx, |buffer, cx| { lsp_formatting_options( language_settings(buffer.language().map(|l| l.name()), buffer.file(), cx).as_ref(), ) - })?; + }); Ok(Self { - position: buffer.read_with(&cx, |buffer, _| position.to_point_utf16(buffer))?, + position: buffer.read_with(&cx, |buffer, _| position.to_point_utf16(buffer)), trigger: message.trigger.clone(), options, push_to_history: false, @@ -2959,7 +2959,7 @@ impl InlayHints { } else { buffer.anchor_after(position) } - })?; + }); let label = Self::lsp_inlay_label_to_project(lsp_hint.label, server_id) .await .context("lsp to project inlay hint conversion")?; @@ -3421,7 +3421,7 @@ impl LspCommand for InlayHints { buffer .update(&mut cx, |buffer, _| { buffer.wait_for_version(deserialize_version(&message.version)) - })? + }) .await?; Ok(Self { range: start..end }) @@ -3453,7 +3453,7 @@ impl LspCommand for InlayHints { buffer .update(&mut cx, |buffer, _| { buffer.wait_for_version(deserialize_version(&message.version)) - })? + }) .await?; let mut hints = Vec::new(); @@ -3510,7 +3510,7 @@ impl LspCommand for GetCodeLens { server_id: LanguageServerId, cx: AsyncApp, ) -> anyhow::Result> { - let snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot())?; + let snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot()); let language_server = cx.update(|cx| { lsp_store .read(cx) @@ -3518,7 +3518,7 @@ impl LspCommand for GetCodeLens { .with_context(|| { format!("Missing the language server that just returned a response {server_id}") }) - })??; + })?; let server_capabilities = language_server.capabilities(); let available_commands = server_capabilities .execute_command_provider @@ -3566,7 +3566,7 @@ impl LspCommand for GetCodeLens { buffer .update(&mut cx, |buffer, _| { buffer.wait_for_version(deserialize_version(&message.version)) - })? + }) .await?; Ok(Self) } @@ -3597,7 +3597,7 @@ impl LspCommand for GetCodeLens { buffer .update(&mut cx, |buffer, _| { buffer.wait_for_version(deserialize_version(&message.version)) - })? + }) .await?; message .lens_actions @@ -3663,7 +3663,7 @@ impl LspCommand for LinkedEditingRange { if let Some(lsp::LinkedEditingRanges { mut ranges, .. }) = message { ranges.sort_by_key(|range| range.start); - buffer.read_with(&cx, |buffer, _| { + Ok(buffer.read_with(&cx, |buffer, _| { ranges .into_iter() .map(|range| { @@ -3673,7 +3673,7 @@ impl LspCommand for LinkedEditingRange { buffer.anchor_before(start)..buffer.anchor_after(end) }) .collect() - }) + })) } else { Ok(vec![]) } @@ -3698,11 +3698,11 @@ impl LspCommand for LinkedEditingRange { buffer .update(&mut cx, |buffer, _| { buffer.wait_for_version(deserialize_version(&message.version)) - })? + }) .await?; let position = deserialize_anchor(position).context("invalid position")?; buffer - .update(&mut cx, |buffer, _| buffer.wait_for_anchors([position]))? + .update(&mut cx, |buffer, _| buffer.wait_for_anchors([position])) .await?; Ok(Self { position }) } @@ -3736,7 +3736,7 @@ impl LspCommand for LinkedEditingRange { buffer .update(&mut cx, |buffer, _| { buffer.wait_for_version(deserialize_version(&message.version)) - })? + }) .await?; let items: Vec> = message .items @@ -3751,7 +3751,7 @@ impl LspCommand for LinkedEditingRange { buffer .update(&mut cx, |buffer, _| { buffer.wait_for_anchors([range.start, range.end]) - })? + }) .await?; } Ok(items) @@ -4105,7 +4105,7 @@ impl LspCommand for GetDocumentDiagnostics { }) .transpose()? .with_context(|| format!("missing url on buffer {}", buffer.remote_id())) - })??; + })?; let mut pulled_diagnostics = HashMap::default(); match message { diff --git a/crates/project/src/lsp_store.rs b/crates/project/src/lsp_store.rs index 81e6d5291b858bf7c1305ffee6523bfd421f9821..d1969e2b85ce8c437e7e6d7c8d034ff5bc179e84 100644 --- a/crates/project/src/lsp_store.rs +++ b/crates/project/src/lsp_store.rs @@ -508,7 +508,7 @@ impl LocalLspStore { language_server.default_initialize_params(pull_diagnostics, cx); params.initialization_options = initialization_options; adapter.adapter.prepare_initialize_params(params, cx) - })??; + })?; Self::setup_lsp_messages( lsp_store.clone(), @@ -527,16 +527,14 @@ impl LocalLspStore { Arc::new(did_change_configuration_params.clone()), cx, ) - })? + }) .await .inspect_err(|_| { if let Some(lsp_store) = lsp_store.upgrade() { - lsp_store - .update(cx, |lsp_store, cx| { - lsp_store.cleanup_lsp_data(server_id); - cx.emit(LspStoreEvent::LanguageServerRemoved(server_id)) - }) - .ok(); + lsp_store.update(cx, |lsp_store, cx| { + lsp_store.cleanup_lsp_data(server_id); + cx.emit(LspStoreEvent::LanguageServerRemoved(server_id)) + }); } })?; @@ -776,8 +774,7 @@ impl LocalLspStore { cx, ) .log_err(); - }) - .ok(); + }); } } }) @@ -1150,8 +1147,7 @@ impl LocalLspStore { disk_based_diagnostics_progress_token.clone(), cx, ); - }) - .ok(); + }); } } }) @@ -1168,8 +1164,7 @@ impl LocalLspStore { LanguageServerLogType::Log(params.typ), params.message, )); - }) - .ok(); + }); } } }) @@ -1189,8 +1184,7 @@ impl LocalLspStore { }, params.message, )); - }) - .ok(); + }); } } }) @@ -1440,7 +1434,7 @@ impl LocalLspStore { let transaction_id = buffer.push_empty_transaction(cx.background_executor().now()); buffer.finalize_last_transaction(); anyhow::Ok(transaction_id) - })??; + })?; let result = Self::format_buffer_locally( lsp_store.clone(), @@ -1471,7 +1465,7 @@ impl LocalLspStore { project_transaction .0 .insert(cx.entity(), formatting_transaction); - })?; + }); result?; } @@ -1521,7 +1515,7 @@ impl LocalLspStore { buffer.merge_transactions(transaction_id, formatting_transaction_id); } Ok(()) - })? + }) } // handle whitespace formatting @@ -1529,7 +1523,7 @@ impl LocalLspStore { zlog::trace!(logger => "removing trailing whitespace"); let diff = buffer .handle - .read_with(cx, |buffer, cx| buffer.remove_trailing_whitespace(cx))? + .read_with(cx, |buffer, cx| buffer.remove_trailing_whitespace(cx)) .await; extend_formatting_transaction(buffer, formatting_transaction_id, cx, |buffer, cx| { buffer.apply_diff(diff, cx); @@ -2054,22 +2048,20 @@ impl LocalLspStore { transaction_id_project_transaction, formatting_transaction_id, ); - })?; + }); } if !project_transaction_command.0.is_empty() { let mut extra_buffers = String::new(); for buffer in project_transaction_command.0.keys() { - buffer - .read_with(cx, |b, cx| { - if let Some(path) = b.project_path(cx) { - if !extra_buffers.is_empty() { - extra_buffers.push_str(", "); - } - extra_buffers.push_str(path.path.as_unix_str()); + buffer.read_with(cx, |b, cx| { + if let Some(path) = b.project_path(cx) { + if !extra_buffers.is_empty() { + extra_buffers.push_str(", "); } - }) - .ok(); + extra_buffers.push_str(path.path.as_unix_str()); + } + }); } zlog::warn!( logger => @@ -2192,7 +2184,7 @@ impl LocalLspStore { } else if matches!(range_formatting_provider, Some(p) if *p != OneOf::Left(false)) { let _timer = zlog::time!(logger => "format-range"); let buffer_start = lsp::Position::new(0, 0); - let buffer_end = buffer.read_with(cx, |b, _| point_to_lsp(b.max_point_utf16()))?; + let buffer_end = buffer.read_with(cx, |b, _| point_to_lsp(b.max_point_utf16())); language_server .request::(lsp::DocumentRangeFormattingParams { text_document: text_document.clone(), @@ -2236,7 +2228,7 @@ impl LocalLspStore { worktree_path.pop(); } Some(worktree_path) - })?; + }); let mut child = util::command::new_smol_command(command); @@ -2267,7 +2259,7 @@ impl LocalLspStore { let stdin = child.stdin.as_mut().context("failed to acquire stdin")?; let text = buffer .handle - .read_with(cx, |buffer, _| buffer.as_rope().clone())?; + .read_with(cx, |buffer, _| buffer.as_rope().clone()); for chunk in text.chunks() { stdin.write_all(chunk.as_bytes()).await?; } @@ -2286,7 +2278,7 @@ impl LocalLspStore { Ok(Some( buffer .handle - .update(cx, |buffer, cx| buffer.diff(stdout, cx))? + .update(cx, |buffer, cx| buffer.diff(stdout, cx)) .await, )) } @@ -2965,7 +2957,7 @@ impl LocalLspStore { None, cx, ) - })? + }) .await?; let transaction = buffer_to_edit.update(cx, |buffer, cx| { @@ -2984,7 +2976,7 @@ impl LocalLspStore { } else { None } - })?; + }); Ok(transaction) } @@ -3077,7 +3069,7 @@ impl LocalLspStore { language_server: Arc, cx: &mut AsyncApp, ) -> Result { - let fs = this.read_with(cx, |this, _| this.as_local().unwrap().fs.clone())?; + let fs = this.read_with(cx, |this, _| this.as_local().unwrap().fs.clone()); let mut operations = Vec::new(); if let Some(document_changes) = edit.document_changes { @@ -3182,7 +3174,7 @@ impl LocalLspStore { language_server.server_id(), cx, ) - })? + }) .await?; let edits = this @@ -3268,7 +3260,7 @@ impl LocalLspStore { op.text_document.version, cx, ) - })? + }) .await?; let transaction = buffer_to_edit.update(cx, |buffer, cx| { @@ -3286,7 +3278,7 @@ impl LocalLspStore { buffer.forget_transaction(transaction_id) } }) - })?; + }); if let Some(transaction) = transaction { project_transaction.0.insert(buffer_to_edit, transaction); } @@ -3305,7 +3297,7 @@ impl LocalLspStore { ) -> Result { let this = this.upgrade().context("project project closed")?; let language_server = this - .read_with(cx, |this, _| this.language_server_for_id(server_id))? + .read_with(cx, |this, _| this.language_server_for_id(server_id)) .context("language server not found")?; let transaction = Self::deserialize_workspace_edit( this.clone(), @@ -3325,7 +3317,7 @@ impl LocalLspStore { .last_workspace_edits_by_language_server .insert(server_id, transaction); } - })?; + }); Ok(lsp::ApplyWorkspaceEditResponse { applied: true, failed_change: None, @@ -4456,8 +4448,7 @@ impl LspStore { } } }); - }) - .ok(); + }); } this.update(cx, |this, cx| { @@ -4503,8 +4494,7 @@ impl LspStore { for buffer in buffers_with_unknown_injections { buffer.update(cx, |buffer, cx| buffer.reparse(cx, false)); } - }) - .ok(); + }); } } }) @@ -5067,7 +5057,7 @@ impl LspStore { buffer_store .update(cx, |buffer_store, cx| { buffer_store.deserialize_project_transaction(response, push_to_history, cx) - })? + }) .await }) } else if self.mode.is_local() { @@ -5172,7 +5162,7 @@ impl LspStore { .map(|buffer| { buffer.read_with(cx, |buffer, _| buffer.remote_id().into()) }) - .collect::>()?, + .collect(), }) .await .and_then(|result| result.transaction.context("missing transaction")); @@ -5188,7 +5178,7 @@ impl LspStore { push_to_history, cx, ) - })? + }) .await }) } else { @@ -5521,7 +5511,7 @@ impl LspStore { buffer .update(cx, |buffer, _| { buffer.wait_for_edits(Some(position.timestamp)) - })? + }) .await?; this.update(cx, |this, cx| { let position = position.to_point_utf16(buffer.read(cx)); @@ -5567,7 +5557,7 @@ impl LspStore { cx.spawn(async move |this, cx| { if let Some(waiter) = - buffer.update(cx, |buffer, _| buffer.wait_for_autoindent_applied())? + buffer.update(cx, |buffer, _| buffer.wait_for_autoindent_applied()) { waiter.await?; } @@ -5585,7 +5575,7 @@ impl LspStore { cx, ) }) - })?? + })? .await }) } @@ -6715,13 +6705,13 @@ impl LspStore { buffer_handle .update(cx, |buffer, _| { buffer.wait_for_edits(transaction.edit_ids.iter().copied()) - })? + }) .await?; if push_to_history { buffer_handle.update(cx, |buffer, _| { buffer.push_transaction(transaction.clone(), Instant::now()); buffer.finalize_last_transaction(); - })?; + }); } Ok(Some(transaction)) } else { @@ -6820,7 +6810,7 @@ impl LspStore { None }; Ok(transaction) - })? + }) } else { Ok(None) } @@ -7200,7 +7190,7 @@ impl LspStore { })) .await; - let buffer_snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot())?; + let buffer_snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot()); let mut has_errors = false; let inlay_hints = inlay_hints .into_iter() @@ -7462,13 +7452,9 @@ impl LspStore { .map_err(Arc::new); let fetched_colors = match fetched_colors { Ok(fetched_colors) => { - if Some(true) - == buffer - .update(cx, |buffer, _| { - buffer.version() != buffer_version_queried_for - }) - .ok() - { + if buffer.update(cx, |buffer, _| { + buffer.version() != buffer_version_queried_for + }) { return Ok(DocumentColors::default()); } fetched_colors @@ -7888,8 +7874,8 @@ impl LspStore { range: range_from_lsp(symbol_location.range), }) }) - .collect() - })?; + .collect::>() + }); populate_labels_for_symbols( core_symbols, @@ -8735,8 +8721,8 @@ impl LspStore { }) })? .await?; - let worktree_root = worktree.read_with(cx, |worktree, _| worktree.abs_path())?; - let source_ws = if worktree.read_with(cx, |worktree, _| worktree.is_local())? { + let worktree_root = worktree.read_with(cx, |worktree, _| worktree.abs_path()); + let source_ws = if worktree.read_with(cx, |worktree, _| worktree.is_local()) { lsp_store .update(cx, |lsp_store, cx| { if let Some(local) = lsp_store.as_local_mut() { @@ -8766,7 +8752,7 @@ impl LspStore { (worktree, relative_path, source_ws) }; let project_path = ProjectPath { - worktree_id: worktree.read_with(cx, |worktree, _| worktree.id())?, + worktree_id: worktree.read_with(cx, |worktree, _| worktree.id()), path: relative_path, }; let buffer = lsp_store @@ -8793,7 +8779,7 @@ impl LspStore { if is_read_only { buffer.set_capability(Capability::ReadOnly, cx); } - })?; + }); } Ok(buffer) }) @@ -8877,7 +8863,7 @@ impl LspStore { let buffer_id = GetCompletions::buffer_id_from_proto(&envelope.payload)?; let buffer_handle = this.update(&mut cx, |this, cx| { this.buffer_store.read(cx).get_existing(buffer_id) - })??; + })?; let request = GetCompletions::from_proto( envelope.payload, this.clone(), @@ -8894,7 +8880,7 @@ impl LspStore { let response = this .update(&mut cx, |this, cx| { this.request_lsp(buffer_handle.clone(), server_to_query, request, cx) - })? + }) .await?; this.update(&mut cx, |this, cx| { Ok(GetCompletions::response_to_proto( @@ -8904,7 +8890,7 @@ impl LspStore { &buffer_handle.read(cx).version(), cx, )) - })? + }) } async fn handle_lsp_command( @@ -8920,7 +8906,7 @@ impl LspStore { let buffer_id = T::buffer_id_from_proto(&envelope.payload)?; let buffer_handle = this.update(&mut cx, |this, cx| { this.buffer_store.read(cx).get_existing(buffer_id) - })??; + })?; let request = T::from_proto( envelope.payload, this.clone(), @@ -8936,7 +8922,7 @@ impl LspStore { request, cx, ) - })? + }) .await?; this.update(&mut cx, |this, cx| { Ok(T::response_to_proto( @@ -8946,7 +8932,7 @@ impl LspStore { &buffer_handle.read(cx).version(), cx, )) - })? + }) } async fn handle_lsp_query( @@ -9104,11 +9090,11 @@ impl LspStore { let version = deserialize_version(get_document_diagnostics.buffer_version()); let buffer = lsp_store.update(&mut cx, |this, cx| { this.buffer_store.read(cx).get_existing(buffer_id) - })??; + })?; buffer .update(&mut cx, |buffer, _| { buffer.wait_for_version(version.clone()) - })? + }) .await?; lsp_store.update(&mut cx, |lsp_store, cx| { let lsp_data = lsp_store.latest_lsp_data(&buffer, cx); @@ -9127,12 +9113,10 @@ impl LspStore { existing_queries.insert( lsp_request_id, cx.spawn(async move |lsp_store, cx| { - let diagnostics_pull = lsp_store - .update(cx, |lsp_store, cx| { - lsp_store.pull_diagnostics_for_buffer(buffer, cx) - }) - .ok(); - if let Some(diagnostics_pull) = diagnostics_pull { + let diagnostics_pull = lsp_store.update(cx, |lsp_store, cx| { + lsp_store.pull_diagnostics_for_buffer(buffer, cx) + }); + if let Ok(diagnostics_pull) = diagnostics_pull { match diagnostics_pull.await { Ok(()) => {} Err(e) => log::error!("Failed to pull diagnostics: {e:#}"), @@ -9140,7 +9124,7 @@ impl LspStore { } }), ); - })?; + }); } Request::InlayHints(inlay_hints) => { let query_start = inlay_hints @@ -9188,7 +9172,7 @@ impl LspStore { if let Some((upstream_client, _)) = lsp_store.upstream_client() { upstream_client.handle_lsp_response(envelope.clone()); } - })?; + }); Ok(()) } @@ -9204,7 +9188,7 @@ impl LspStore { let buffer_id = BufferId::new(envelope.payload.buffer_id)?; let buffer = this.buffer_store.read(cx).get_existing(buffer_id)?; anyhow::Ok(this.apply_code_action(buffer, action, false, cx)) - })??; + })?; let project_transaction = apply_code_action.await?; let project_transaction = this.update(&mut cx, |this, cx| { @@ -9215,7 +9199,7 @@ impl LspStore { cx, ) }) - })?; + }); Ok(proto::ApplyCodeActionResponse { transaction: Some(project_transaction), }) @@ -9268,7 +9252,7 @@ impl LspStore { }); Ok(()) - })??; + })?; Ok(proto::Ack {}) } @@ -9298,13 +9282,13 @@ impl LspStore { new_worktree, entry.clone(), )) - })? + }) .context("worktree not found")?; let (old_abs_path, old_worktree_id) = old_worktree.read_with(&cx, |worktree, _| { (worktree.absolutize(&old_entry.path), worktree.id()) - })?; + }); let new_abs_path = - new_worktree.read_with(&cx, |worktree, _| worktree.absolutize(&new_path))?; + new_worktree.read_with(&cx, |worktree, _| worktree.absolutize(&new_path)); let _transaction = Self::will_rename_entry( this.downgrade(), @@ -9328,8 +9312,7 @@ impl LspStore { &new_abs_path, old_entry.is_dir(), ); - }) - .ok(); + }); response } @@ -9421,7 +9404,7 @@ impl LspStore { cx.emit(LspStoreEvent::DiagnosticsUpdated { server_id, paths }); } Ok(()) - })? + }) } async fn handle_start_language_server( @@ -9464,7 +9447,7 @@ impl LspStore { server.worktree_id.map(WorktreeId::from_proto), )); cx.notify(); - })?; + }); Ok(()) } @@ -9543,7 +9526,7 @@ impl LspStore { } Ok(()) - })? + }) } async fn handle_language_server_log( @@ -9566,7 +9549,8 @@ impl LspStore { log_type, message, )); - }) + }); + Ok(()) } async fn handle_lsp_ext_cancel_flycheck( @@ -9581,7 +9565,7 @@ impl LspStore { } else { None } - })?; + }); if let Some(task) = task { task.context("handling lsp ext cancel flycheck")?; } @@ -9623,7 +9607,7 @@ impl LspStore { )?; } anyhow::Ok(()) - })??; + })?; Ok(proto::Ack {}) } @@ -9634,15 +9618,13 @@ impl LspStore { cx: AsyncApp, ) -> Result { let server_id = LanguageServerId(envelope.payload.language_server_id as usize); - lsp_store - .read_with(&cx, |lsp_store, _| { - if let Some(server) = lsp_store.language_server_for_id(server_id) { - Some(server.notify::(())) - } else { - None - } - }) - .context("handling lsp ext clear flycheck")?; + lsp_store.read_with(&cx, |lsp_store, _| { + if let Some(server) = lsp_store.language_server_for_id(server_id) { + Some(server.notify::(())) + } else { + None + } + }); Ok(proto::Ack {}) } @@ -10161,7 +10143,7 @@ impl LspStore { anyhow::Ok(lsp_completion) } })) - })?? + })? .await?; let mut documentation_is_markdown = false; @@ -10188,7 +10170,7 @@ impl LspStore { let buffer_snapshot = this.update(&mut cx, |this, cx| { let buffer = this.buffer_store.read(cx).get_existing(buffer_id)?; anyhow::Ok(buffer.read(cx).snapshot()) - })??; + })?; if let Some(text_edit) = completion.text_edit.as_ref() { let edit = parse_completion_text_edit(text_edit, &buffer_snapshot); @@ -10238,7 +10220,7 @@ impl LspStore { envelope.payload.trigger.clone(), cx, )) - })??; + })?; let transaction = on_type_formatting .await? @@ -10257,7 +10239,7 @@ impl LspStore { server_id: LanguageServerId::from_proto(envelope.payload.server_id), request_id: envelope.payload.request_id.map(|id| id as usize), }); - })?; + }); Ok(proto::Ack {}) } @@ -10269,7 +10251,7 @@ impl LspStore { let server_id = LanguageServerId::from_proto(envelope.payload.server_id); lsp_store.update(&mut cx, |lsp_store, _| { lsp_store.pull_workspace_diagnostics(server_id); - })?; + }); Ok(proto::Ack {}) } @@ -10281,7 +10263,7 @@ impl LspStore { let buffer_id = BufferId::new(envelope.payload.buffer_id)?; let buffer = lsp_store.update(&mut cx, |lsp_store, cx| { lsp_store.buffer_store.read(cx).get_existing(buffer_id) - })??; + })?; let color = envelope .payload @@ -10316,7 +10298,7 @@ impl LspStore { LanguageServerId(envelope.payload.server_id as usize), cx, ) - })? + }) .await .context("resolving color presentation")?; @@ -10351,7 +10333,7 @@ impl LspStore { let buffer = lsp_store.update(&mut cx, |lsp_store, cx| { let buffer_id = BufferId::new(envelope.payload.buffer_id)?; lsp_store.buffer_store.read(cx).get_existing(buffer_id) - })??; + })?; let response_hint = lsp_store .update(&mut cx, |lsp_store, cx| { lsp_store.resolve_inlay_hint( @@ -10360,7 +10342,7 @@ impl LspStore { LanguageServerId(envelope.payload.language_server_id as usize), cx, ) - })? + }) .await .context("inlay hints fetch")?; Ok(proto::ResolveInlayHintResponse { @@ -10375,7 +10357,7 @@ impl LspStore { ) -> Result { this.update(&mut cx, |_, cx| { cx.emit(LspStoreEvent::RefreshCodeLens); - })?; + }); Ok(proto::Ack {}) } @@ -10397,7 +10379,7 @@ impl LspStore { anyhow::ensure!(&new_signature == signature, "invalid symbol signature"); } Ok(()) - })??; + })?; let buffer = this .update(&mut cx, |this, cx| { this.open_buffer_for_symbol( @@ -10413,7 +10395,7 @@ impl LspStore { }, cx, ) - })? + }) .await?; this.update(&mut cx, |this, cx| { @@ -10433,7 +10415,7 @@ impl LspStore { let buffer_id = buffer.read(cx).remote_id().to_proto(); Ok(proto::OpenBufferForSymbolResponse { buffer_id }) } - })? + }) } fn symbol_signature(&self, abs_path: &Path) -> [u8; 32] { @@ -10451,7 +10433,7 @@ impl LspStore { let symbols = this .update(&mut cx, |this, cx| { this.symbols(&envelope.payload.query, cx) - })? + }) .await?; Ok(proto::GetProjectSymbolsResponse { @@ -10488,7 +10470,7 @@ impl LspStore { .collect(), cx, ); - })?; + }); Ok(proto::Ack {}) } @@ -10533,7 +10515,7 @@ impl LspStore { ) .detach_and_log_err(cx); } - })?; + }); Ok(proto::Ack {}) } @@ -10565,7 +10547,7 @@ impl LspStore { } } anyhow::Ok(()) - })??; + })?; Ok(proto::Ack {}) } @@ -10597,7 +10579,7 @@ impl LspStore { envelope.payload.completion.context("invalid completion")?, )?; anyhow::Ok((buffer, completion)) - })??; + })?; let apply_additional_edits = this.update(&mut cx, |this, cx| { this.apply_additional_edits_for_completion( @@ -10618,7 +10600,7 @@ impl LspStore { false, cx, ) - })?; + }); Ok(proto::ApplyCompletionAdditionalEditsResponse { transaction: apply_additional_edits @@ -10741,8 +10723,8 @@ impl LspStore { trigger: trigger as i32, buffer_ids: buffers .iter() - .map(|buffer| buffer.read_with(cx, |buffer, _| buffer.remote_id().into())) - .collect::>()?, + .map(|buffer| buffer.read_with(cx, |buffer, _| buffer.remote_id().to_proto())) + .collect(), }) .await .and_then(|result| result.transaction.context("missing transaction")); @@ -10763,7 +10745,7 @@ impl LspStore { push_to_history, cx, ) - })? + }) .await }) } else { @@ -10786,7 +10768,7 @@ impl LspStore { } let trigger = FormatTrigger::from_proto(envelope.payload.trigger); anyhow::Ok(this.format(buffers, LspFormatTarget::Buffers, false, trigger, cx)) - })??; + })?; let project_transaction = format.await?; let project_transaction = this.update(&mut cx, |this, cx| { @@ -10797,7 +10779,7 @@ impl LspStore { cx, ) }) - })?; + }); Ok(proto::FormatBuffersResponse { transaction: Some(project_transaction), }) @@ -10831,7 +10813,7 @@ impl LspStore { ), }; anyhow::Ok(this.apply_code_action_kind(buffers, kind, false, cx)) - })??; + })?; let project_transaction = format.await?; let project_transaction = this.update(&mut cx, |this, cx| { @@ -10842,7 +10824,7 @@ impl LspStore { cx, ) }) - })?; + }); Ok(proto::ApplyCodeActionKindResponse { transaction: Some(project_transaction), }) @@ -11060,18 +11042,16 @@ impl LspStore { }; cx.spawn(async move |lsp_store, cx| { stop_task.await; - lsp_store - .update(cx, |lsp_store, cx| { - for buffer in buffers { - lsp_store.register_buffer_with_language_servers( - &buffer, - only_restart_servers.clone(), - true, - cx, - ); - } - }) - .ok() + lsp_store.update(cx, |lsp_store, cx| { + for buffer in buffers { + lsp_store.register_buffer_with_language_servers( + &buffer, + only_restart_servers.clone(), + true, + cx, + ); + } + }) }) .detach(); } @@ -12976,9 +12956,9 @@ impl LspStore { let version = deserialize_version(proto_request.buffer_version()); let buffer = lsp_store.update(cx, |this, cx| { this.buffer_store.read(cx).get_existing(buffer_id) - })??; + })?; buffer - .update(cx, |buffer, _| buffer.wait_for_version(version))? + .update(cx, |buffer, _| buffer.wait_for_version(version)) .await?; lsp_store.update(cx, |lsp_store, cx| { let buffer_snapshot = buffer.read(cx).snapshot(); @@ -13010,7 +12990,7 @@ impl LspStore { } } anyhow::Ok(()) - })??; + })?; Ok(()) } @@ -13034,11 +13014,11 @@ impl LspStore { let version = deserialize_version(proto_request.buffer_version()); let buffer = lsp_store.update(cx, |this, cx| { this.buffer_store.read(cx).get_existing(buffer_id) - })??; + })?; buffer - .update(cx, |buffer, _| buffer.wait_for_version(version.clone()))? + .update(cx, |buffer, _| buffer.wait_for_version(version.clone())) .await?; - let buffer_version = buffer.read_with(cx, |buffer, _| buffer.version())?; + let buffer_version = buffer.read_with(cx, |buffer, _| buffer.version()); let request = T::from_proto(proto_request, lsp_store.clone(), buffer.clone(), cx.clone()).await?; let key = LspKey { @@ -13114,7 +13094,7 @@ impl LspStore { .ok(); }), ); - })?; + }); Ok(()) } diff --git a/crates/project/src/lsp_store/log_store.rs b/crates/project/src/lsp_store/log_store.rs index 92f8fecadd0236e899ef16781e55405dfe05f282..877cf44c4a0b511c89172e6cf87f857d200ed178 100644 --- a/crates/project/src/lsp_store/log_store.rs +++ b/crates/project/src/lsp_store/log_store.rs @@ -229,7 +229,7 @@ impl LogStore { if let Some(log_store) = log_store.upgrade() { log_store.update(cx, |log_store, cx| { log_store.on_io(server_id, io_kind, &message, cx); - })?; + }); } } anyhow::Ok(()) diff --git a/crates/project/src/lsp_store/lsp_ext_command.rs b/crates/project/src/lsp_store/lsp_ext_command.rs index 5066143244da890a63ead6650cb61fdb71d3964a..270db67576f0a02155997757a01d489d44ef1766 100644 --- a/crates/project/src/lsp_store/lsp_ext_command.rs +++ b/crates/project/src/lsp_store/lsp_ext_command.rs @@ -122,7 +122,7 @@ impl LspCommand for ExpandMacro { .and_then(deserialize_anchor) .context("invalid position")?; Ok(Self { - position: buffer.read_with(&cx, |buffer, _| position.to_point_utf16(buffer))?, + position: buffer.read_with(&cx, |buffer, _| position.to_point_utf16(buffer)), }) } @@ -256,7 +256,7 @@ impl LspCommand for OpenDocs { .and_then(deserialize_anchor) .context("invalid position")?; Ok(Self { - position: buffer.read_with(&cx, |buffer, _| position.to_point_utf16(buffer))?, + position: buffer.read_with(&cx, |buffer, _| position.to_point_utf16(buffer)), }) } @@ -469,7 +469,7 @@ impl LspCommand for GoToParentModule { .and_then(deserialize_anchor) .context("bad request with bad position")?; Ok(Self { - position: buffer.read_with(&cx, |buffer, _| position.to_point_utf16(buffer))?, + position: buffer.read_with(&cx, |buffer, _| position.to_point_utf16(buffer)), }) } diff --git a/crates/project/src/lsp_store/rust_analyzer_ext.rs b/crates/project/src/lsp_store/rust_analyzer_ext.rs index 4d5f134e5f1682d53df3a0ab3f55a4b3676518f8..72c90e4596ebdc482bef00748dcce470076f434b 100644 --- a/crates/project/src/lsp_store/rust_analyzer_ext.rs +++ b/crates/project/src/lsp_store/rust_analyzer_ext.rs @@ -124,7 +124,7 @@ pub fn cancel_flycheck( Ok(()) } }) - .context("lsp ext cancel flycheck")??; + .context("lsp ext cancel flycheck")?; }; anyhow::Ok(()) }) @@ -157,8 +157,7 @@ pub fn run_flycheck( if let Some((client, project_id)) = upstream_client { let buffer_id = buffer - .map(|buffer| buffer.read_with(cx, |buffer, _| buffer.remote_id().to_proto())) - .transpose()?; + .map(|buffer| buffer.read_with(cx, |buffer, _| buffer.remote_id().to_proto())); let request = proto::LspExtRunFlycheck { project_id, buffer_id, @@ -182,7 +181,7 @@ pub fn run_flycheck( Ok(()) } }) - .context("lsp ext run flycheck")??; + .context("lsp ext run flycheck")?; }; anyhow::Ok(()) }) @@ -231,7 +230,7 @@ pub fn clear_flycheck( Ok(()) } }) - .context("lsp ext clear flycheck")??; + .context("lsp ext clear flycheck")?; }; anyhow::Ok(()) }) @@ -242,34 +241,32 @@ fn find_rust_analyzer_server( buffer: Option<&Entity>, cx: &mut AsyncApp, ) -> Option { - project - .read_with(cx, |project, cx| { - buffer - .and_then(|buffer| { - project.language_server_id_for_name(buffer.read(cx), &RUST_ANALYZER_NAME, cx) - }) - // If no rust-analyzer found for the current buffer (e.g. `settings.json`), fall back to the project lookup - // and use project's rust-analyzer if it's the only one. - .or_else(|| { - let rust_analyzer_servers = project - .lsp_store() - .read(cx) - .language_server_statuses - .iter() - .filter_map(|(server_id, server_status)| { - if server_status.name == RUST_ANALYZER_NAME { - Some(*server_id) - } else { - None - } - }) - .collect::>(); - if rust_analyzer_servers.len() == 1 { - rust_analyzer_servers.first().copied() - } else { - None - } - }) - }) - .ok()? + project.read_with(cx, |project, cx| { + buffer + .and_then(|buffer| { + project.language_server_id_for_name(buffer.read(cx), &RUST_ANALYZER_NAME, cx) + }) + // If no rust-analyzer found for the current buffer (e.g. `settings.json`), fall back to the project lookup + // and use project's rust-analyzer if it's the only one. + .or_else(|| { + let rust_analyzer_servers = project + .lsp_store() + .read(cx) + .language_server_statuses + .iter() + .filter_map(|(server_id, server_status)| { + if server_status.name == RUST_ANALYZER_NAME { + Some(*server_id) + } else { + None + } + }) + .collect::>(); + if rust_analyzer_servers.len() == 1 { + rust_analyzer_servers.first().copied() + } else { + None + } + }) + }) } diff --git a/crates/project/src/prettier_store.rs b/crates/project/src/prettier_store.rs index a8b6fe37701d85d06d837a0a5e494e2a294777ec..0a39b36c87bdaf0dd04c08387eceee8c23eb2f39 100644 --- a/crates/project/src/prettier_store.rs +++ b/crates/project/src/prettier_store.rs @@ -134,7 +134,7 @@ impl PrettierStore { { Ok(ControlFlow::Break(())) => None, Ok(ControlFlow::Continue(None)) => { - let default_instance = lsp_store + let default_task = lsp_store .update(cx, |lsp_store, cx| { lsp_store .prettiers_per_worktree @@ -147,8 +147,9 @@ impl PrettierStore { cx, ) }) - .ok()?; - Some((None, default_instance?.log_err().await?)) + .ok()??; + let default_instance = default_task.await.ok()?; + Some((None, default_instance)) } Ok(ControlFlow::Continue(Some(prettier_dir))) => { lsp_store @@ -162,21 +163,22 @@ impl PrettierStore { .ok()?; if let Some(prettier_task) = lsp_store .update(cx, |lsp_store, cx| { - lsp_store.prettier_instances.get_mut(&prettier_dir).map( - |existing_instance| { + lsp_store + .prettier_instances + .get_mut(&prettier_dir) + .and_then(|existing_instance| { existing_instance.prettier_task( &node, Some(&prettier_dir), Some(worktree_id), cx, ) - }, - ) + }) }) .ok()? { log::debug!("Found already started prettier in {prettier_dir:?}"); - return Some((Some(prettier_dir), prettier_task?.await.log_err()?)); + return Some((Some(prettier_dir), prettier_task.await.log_err()?)); } log::info!("Found prettier in {prettier_dir:?}, starting."); @@ -735,12 +737,9 @@ pub(super) async fn format_with_prettier( match prettier_task.await { Ok(prettier) => { - let buffer_path = buffer - .update(cx, |buffer, cx| { - File::from_dyn(buffer.file()).map(|file| file.abs_path(cx)) - }) - .ok() - .flatten(); + let buffer_path = buffer.update(cx, |buffer, cx| { + File::from_dyn(buffer.file()).map(|file| file.abs_path(cx)) + }); let format_result = prettier .format(buffer, buffer_path, ignore_dir, cx) diff --git a/crates/project/src/project.rs b/crates/project/src/project.rs index 818909f90f030857bfbbdd9e754f6cad6aec56f7..07d55cf5e1a7d8a23cd66cb97d7f943167bcbc86 100644 --- a/crates/project/src/project.rs +++ b/crates/project/src/project.rs @@ -1587,16 +1587,16 @@ impl Project { response.payload.project_id, path_style, ) - })?; + }); let buffer_store = cx.new(|cx| { BufferStore::remote(worktree_store.clone(), client.clone().into(), remote_id, cx) - })?; + }); let image_store = cx.new(|cx| { ImageStore::remote(worktree_store.clone(), client.clone().into(), remote_id, cx) - })?; + }); let environment = - cx.new(|cx| ProjectEnvironment::new(None, worktree_store.downgrade(), None, true, cx))?; + cx.new(|cx| ProjectEnvironment::new(None, worktree_store.downgrade(), None, true, cx)); let breakpoint_store = cx.new(|_| { BreakpointStore::remote( remote_id, @@ -1604,7 +1604,7 @@ impl Project { buffer_store.clone(), worktree_store.clone(), ) - })?; + }); let dap_store = cx.new(|cx| { DapStore::new_collab( remote_id, @@ -1614,7 +1614,7 @@ impl Project { fs.clone(), cx, ) - })?; + }); let lsp_store = cx.new(|cx| { LspStore::new_remote( @@ -1625,7 +1625,7 @@ impl Project { remote_id, cx, ) - })?; + }); let task_store = cx.new(|cx| { if run_tasks { @@ -1640,7 +1640,7 @@ impl Project { } else { TaskStore::Noop } - })?; + }); let settings_observer = cx.new(|cx| { SettingsObserver::new_remote( @@ -1650,7 +1650,7 @@ impl Project { None, cx, ) - })?; + }); let git_store = cx.new(|cx| { GitStore::remote( @@ -1661,9 +1661,9 @@ impl Project { remote_id, cx, ) - })?; + }); - let agent_server_store = cx.new(|cx| AgentServerStore::collab(cx))?; + let agent_server_store = cx.new(|cx| AgentServerStore::collab(cx)); let replica_id = ReplicaId::new(response.payload.replica_id as u16); let project = cx.new(|cx| { @@ -1750,19 +1750,17 @@ impl Project { project.add_worktree(&worktree, cx); } project - })?; + }); let weak_project = project.downgrade(); - lsp_store - .update(&mut cx, |lsp_store, cx| { - lsp_store.set_language_server_statuses_from_proto( - weak_project, - response.payload.language_servers, - response.payload.language_server_capabilities, - cx, - ); - }) - .ok(); + lsp_store.update(&mut cx, |lsp_store, cx| { + lsp_store.set_language_server_statuses_from_proto( + weak_project, + response.payload.language_servers, + response.payload.language_server_capabilities, + cx, + ); + }); let subscriptions = subscriptions .into_iter() @@ -1799,14 +1797,14 @@ impl Project { .map(|peer| peer.user_id) .collect(); user_store - .update(&mut cx, |user_store, cx| user_store.get_users(user_ids, cx))? + .update(&mut cx, |user_store, cx| user_store.get_users(user_ids, cx)) .await?; project.update(&mut cx, |this, cx| { this.set_collaborators_from_proto(response.payload.collaborators, cx)?; this.client_subscriptions.extend(subscriptions); anyhow::Ok(()) - })??; + })?; Ok(project) } @@ -1860,34 +1858,28 @@ impl Project { let languages = LanguageRegistry::test(cx.background_executor().clone()); let clock = Arc::new(FakeSystemClock::new()); let http_client = http_client::FakeHttpClient::with_404_response(); - let client = cx - .update(|cx| client::Client::new(clock, http_client.clone(), cx)) - .unwrap(); - let user_store = cx.new(|cx| UserStore::new(client.clone(), cx)).unwrap(); - let project = cx - .update(|cx| { - Project::local( - client, - node_runtime::NodeRuntime::unavailable(), - user_store, - Arc::new(languages), - fs, - None, - false, - cx, - ) - }) - .unwrap(); + let client = cx.update(|cx| client::Client::new(clock, http_client.clone(), cx)); + let user_store = cx.new(|cx| UserStore::new(client.clone(), cx)); + let project = cx.update(|cx| { + Project::local( + client, + node_runtime::NodeRuntime::unavailable(), + user_store, + Arc::new(languages), + fs, + None, + false, + cx, + ) + }); for path in root_paths { - let (tree, _) = project + let (tree, _): (Entity, _) = project .update(cx, |project, cx| { project.find_or_create_worktree(path, true, cx) }) - .unwrap() .await .unwrap(); tree.read_with(cx, |tree, _| tree.as_local().unwrap().scan_complete()) - .unwrap() .await; } project @@ -2314,7 +2306,7 @@ impl Project { let lsp_store = self.lsp_store().downgrade(); cx.spawn(async move |project, cx| { let (old_abs_path, new_abs_path) = { - let root_path = worktree.read_with(cx, |this, _| this.abs_path())?; + let root_path = worktree.read_with(cx, |this, _| this.abs_path()); let new_abs_path = if is_root_entry { root_path .parent() @@ -2338,7 +2330,7 @@ impl Project { let entry = worktree_store .update(cx, |worktree_store, cx| { worktree_store.rename_entry(entry_id, new_path.clone(), cx) - })? + }) .await?; project @@ -2755,7 +2747,7 @@ impl Project { let buffer = task.await?; let project_entry_id = buffer.read_with(cx, |buffer, _cx| { File::from_dyn(buffer.file()).and_then(|file| file.project_entry_id()) - })?; + }); Ok((project_entry_id, buffer)) }) @@ -2955,7 +2947,7 @@ impl Project { // Check if metadata already exists (e.g., for remote images) let needs_metadata = - cx.read_entity(&image_item, |item, _| item.image_metadata.is_none())?; + cx.read_entity(&image_item, |item, _| item.image_metadata.is_none()); if needs_metadata { let project = weak_project.upgrade().context("Project dropped")?; @@ -2964,7 +2956,7 @@ impl Project { image_item.update(cx, |image_item, cx| { image_item.image_metadata = Some(metadata); cx.emit(ImageItemEvent::MetadataUpdated); - })?; + }); } Ok(image_item) @@ -4372,20 +4364,18 @@ impl Project { path: &RelPath, cx: &mut AsyncApp, ) -> Option { - worktree - .read_with(cx, |worktree, _| { - worktree.entry_for_path(path).map(|entry| { - let project_path = ProjectPath { - worktree_id: worktree.id(), - path: entry.path.clone(), - }; - ResolvedPath::ProjectPath { - project_path, - is_dir: entry.is_dir(), - } - }) + worktree.read_with(cx, |worktree, _| { + worktree.entry_for_path(path).map(|entry| { + let project_path = ProjectPath { + worktree_id: worktree.id(), + path: entry.path.clone(), + }; + ResolvedPath::ProjectPath { + project_path, + is_dir: entry.is_dir(), + } }) - .ok()? + }) } pub fn list_directory( @@ -4653,7 +4643,7 @@ impl Project { this.disconnected_from_host(cx); } Ok(()) - })? + }) } async fn handle_add_collaborator( @@ -4676,7 +4666,7 @@ impl Project { cx.emit(Event::CollaboratorJoined(collaborator.peer_id)); this.collaborators .insert(collaborator.peer_id, collaborator); - })?; + }); Ok(()) } @@ -4720,7 +4710,7 @@ impl Project { new_peer_id, }); Ok(()) - })? + }) } async fn handle_remove_collaborator( @@ -4747,7 +4737,7 @@ impl Project { cx.emit(Event::CollaboratorLeft(peer_id)); Ok(()) - })? + }) } async fn handle_update_project( @@ -4769,7 +4759,7 @@ impl Project { this.set_worktrees_from_proto(envelope.payload.worktrees, cx)?; } Ok(()) - })? + }) } async fn handle_toast( @@ -4783,7 +4773,7 @@ impl Project { message: envelope.payload.message, }); Ok(()) - })? + }) } async fn handle_language_server_prompt_request( @@ -4811,7 +4801,7 @@ impl Project { })); anyhow::Ok(()) - })??; + })?; // We drop `this` to avoid holding a reference in this future for too // long. @@ -4843,7 +4833,7 @@ impl Project { notification_id: envelope.payload.notification_id.into(), }); Ok(()) - })? + }) } // Collab sends UpdateWorktree protos as messages @@ -4861,7 +4851,7 @@ impl Project { }); } Ok(()) - })? + }) } async fn handle_update_buffer_from_remote_server( @@ -4877,7 +4867,7 @@ impl Project { .detach_and_log_err(cx); } this.buffer_store.clone() - })?; + }); BufferStore::handle_update_buffer(buffer_store, envelope, cx).await } @@ -4886,12 +4876,12 @@ impl Project { envelope: TypedEnvelope, mut cx: AsyncApp, ) -> Result { - if this.read_with(&cx, |project, _| project.is_via_collab())? { + if this.read_with(&cx, |project, _| project.is_via_collab()) { return Ok(proto::Ack {}); } let trusted_worktrees = cx - .update(|cx| TrustedWorktrees::try_get_global(cx))? + .update(|cx| TrustedWorktrees::try_get_global(cx)) .context("missing trusted worktrees")?; trusted_worktrees.update(&mut cx, |trusted_worktrees, cx| { trusted_worktrees.trust( @@ -4904,7 +4894,7 @@ impl Project { .collect(), cx, ); - })?; + }); Ok(proto::Ack {}) } @@ -4913,12 +4903,12 @@ impl Project { envelope: TypedEnvelope, mut cx: AsyncApp, ) -> Result { - if this.read_with(&cx, |project, _| project.is_via_collab())? { + if this.read_with(&cx, |project, _| project.is_via_collab()) { return Ok(proto::Ack {}); } let trusted_worktrees = cx - .update(|cx| TrustedWorktrees::try_get_global(cx))? + .update(|cx| TrustedWorktrees::try_get_global(cx)) .context("missing trusted worktrees")?; trusted_worktrees.update(&mut cx, |trusted_worktrees, cx| { let worktree_store = this.read(cx).worktree_store().downgrade(); @@ -4930,7 +4920,7 @@ impl Project { .map(PathTrust::Worktree) .collect::>(); trusted_worktrees.restrict(worktree_store, restricted_paths, cx); - })?; + }); Ok(proto::Ack {}) } @@ -4947,7 +4937,7 @@ impl Project { .detach_and_log_err(cx); } this.buffer_store.clone() - })?; + }); BufferStore::handle_update_buffer(buffer_store, envelope, cx).await } @@ -4996,7 +4986,7 @@ impl Project { cx, ) }) - })? + }) } async fn handle_toggle_lsp_logs( @@ -5018,7 +5008,7 @@ impl Project { enabled: envelope.payload.enabled, toggled_log_kind, }) - })?; + }); Ok(()) } @@ -5032,7 +5022,7 @@ impl Project { this.buffer_store.update(cx, |this, cx| { this.handle_synchronize_buffers(envelope, cx, client) }) - })??; + })?; Ok(response) } @@ -5044,12 +5034,12 @@ impl Project { ) -> Result { let peer_id = envelope.original_sender_id()?; let message = envelope.payload; - let path_style = this.read_with(&cx, |this, cx| this.path_style(cx))?; + let path_style = this.read_with(&cx, |this, cx| this.path_style(cx)); let query = SearchQuery::from_proto(message.query.context("missing query field")?, path_style)?; let results = this.update(&mut cx, |this, cx| { this.search_impl(query, cx).matching_buffers(cx) - })?; + }); let mut response = proto::FindSearchCandidatesResponse { buffer_ids: Vec::new(), @@ -5059,7 +5049,7 @@ impl Project { this.update(&mut cx, |this, cx| { let buffer_id = this.create_buffer_for_peer(&buffer, peer_id, cx); response.buffer_ids.push(buffer_id.to_proto()); - })?; + }); } Ok(response) @@ -5073,7 +5063,7 @@ impl Project { let peer_id = envelope.original_sender_id()?; let buffer_id = BufferId::new(envelope.payload.id)?; let buffer = this - .update(&mut cx, |this, cx| this.open_buffer_by_id(buffer_id, cx))? + .update(&mut cx, |this, cx| this.open_buffer_by_id(buffer_id, cx)) .await?; Project::respond_to_open_buffer_request(this, buffer, peer_id, &mut cx) } @@ -5089,7 +5079,7 @@ impl Project { let open_buffer = this .update(&mut cx, |this, cx| { this.open_buffer(ProjectPath { worktree_id, path }, cx) - })? + }) .await?; Project::respond_to_open_buffer_request(this, open_buffer, peer_id, &mut cx) } @@ -5100,7 +5090,7 @@ impl Project { mut cx: AsyncApp, ) -> Result { let buffer = this - .update(&mut cx, |this, cx| this.create_buffer(true, cx))? + .update(&mut cx, |this, cx| this.create_buffer(true, cx)) .await?; let peer_id = envelope.original_sender_id()?; @@ -5123,7 +5113,7 @@ impl Project { Ok(proto::OpenBufferResponse { buffer_id: this.create_buffer_for_peer(&buffer, peer_id, cx).into(), }) - })? + }) } fn create_buffer_for_peer( @@ -5149,9 +5139,7 @@ impl Project { this.image_store.update(cx, |image_store, cx| { image_store.handle_create_image_for_peer(envelope, cx) }) - })? - .log_err(); - Ok(()) + }) } fn synchronize_remote_buffers(&mut self, cx: &mut Context) -> Task> { @@ -5480,7 +5468,7 @@ impl Project { }; cx.spawn(async move |cx| { let file = worktree - .update(cx, |worktree, cx| worktree.load_file(&rel_path, cx))? + .update(cx, |worktree, cx| worktree.load_file(&rel_path, cx)) .await .context("Failed to load settings file")?; @@ -5488,7 +5476,7 @@ impl Project { let new_text = cx.read_global::(|store, cx| { store.new_text_for_update(file.text, move |settings| update(settings, cx)) - })?; + }); worktree .update(cx, |worktree, cx| { let line_ending = text::LineEnding::detect(&new_text); @@ -5500,7 +5488,7 @@ impl Project { has_bom, cx, ) - })? + }) .await .context("Failed to write settings file")?; diff --git a/crates/project/src/project_search.rs b/crates/project/src/project_search.rs index 2efe0b73688a090629766ce9be28ef70a8b10e3b..33c8b38f76f80c73499293ace2df2ce61cdb8cb2 100644 --- a/crates/project/src/project_search.rs +++ b/crates/project/src/project_search.rs @@ -189,10 +189,7 @@ impl Search { let query = Arc::new(query); let (candidate_searcher, tasks) = match self.kind { SearchKind::OpenBuffersOnly => { - let Ok(open_buffers) = cx.update(|cx| self.all_loaded_buffers(&query, cx)) - else { - return; - }; + let open_buffers = cx.update(|cx| self.all_loaded_buffers(&query, cx)); let fill_requests = cx .background_spawn(async move { for buffer in open_buffers { @@ -259,16 +256,14 @@ impl Search { }); let weak_buffer_store = self.buffer_store.downgrade(); let buffer_store = self.buffer_store; - let Ok(guard) = cx.update(|cx| { + let guard = cx.update(|cx| { Project::retain_remotely_created_models_impl( &models, &buffer_store, &self.worktree_store, cx, ) - }) else { - return; - }; + }); let issue_remote_buffers_request = cx .spawn(async move |cx| { @@ -387,7 +382,7 @@ impl Search { let (mut snapshot, worktree_settings) = worktree .read_with(cx, |this, _| { Some((this.snapshot(), this.as_local()?.settings())) - })? + }) .context("The worktree is not local")?; if query.include_ignored() { // Pre-fetch all of the ignored directories as they're going to be searched. @@ -409,11 +404,11 @@ impl Search { .map(|path| local.add_path_prefix_to_scan(path).into_future()) .collect::>(); Some(barrier) - })?; + }); if let Some(barriers) = barrier { futures::future::join_all(barriers).await; } - snapshot = worktree.read_with(cx, |this, _| this.snapshot())?; + snapshot = worktree.read_with(cx, |this, _| this.snapshot()); } let tx = tx.clone(); let results = results.clone(); @@ -483,7 +478,7 @@ impl Search { .into_iter() .map(|path| this.open_buffer(path, cx)) .collect::>() - })?; + }); while let Some(buffer) = buffers.next().await { if let Some(buffer) = buffer.log_err() { @@ -508,7 +503,7 @@ impl Search { ) { _ = maybe!(async move { while let Ok(buffer) = rx.recv().await { - let snapshot = buffer.read_with(&mut cx, |this, _| this.snapshot())?; + let snapshot = buffer.read_with(&mut cx, |this, _| this.snapshot()); let (tx, rx) = oneshot::channel(); find_all_matches_tx.send((buffer, snapshot, tx)).await?; results.send(rx).await?; diff --git a/crates/project/src/project_settings.rs b/crates/project/src/project_settings.rs index 50a2b675d282ee5a5549082fceadaa2bf49e8d51..1a97cecbd735cfa8433866e17c5ac0d91382f4ae 100644 --- a/crates/project/src/project_settings.rs +++ b/crates/project/src/project_settings.rs @@ -863,7 +863,7 @@ impl SettingsObserver { )], cx, ); - })?; + }); Ok(()) } @@ -878,7 +878,7 @@ impl SettingsObserver { .result() .context("setting new user settings")?; anyhow::Ok(()) - })??; + })?; Ok(()) } @@ -1191,7 +1191,7 @@ impl SettingsObserver { return; }; if let Some(user_tasks_content) = user_tasks_content { - let Ok(()) = task_store.update(cx, |task_store, cx| { + task_store.update(cx, |task_store, cx| { task_store .update_user_tasks( TaskSettingsLocation::Global(&file_path), @@ -1199,20 +1199,16 @@ impl SettingsObserver { cx, ) .log_err(); - }) else { - return; - }; + }); } while let Some(user_tasks_content) = user_tasks_file_rx.next().await { - let Ok(result) = task_store.update(cx, |task_store, cx| { + let result = task_store.update(cx, |task_store, cx| { task_store.update_user_tasks( TaskSettingsLocation::Global(&file_path), Some(&user_tasks_content), cx, ) - }) else { - break; - }; + }); weak_entry .update(cx, |_, cx| match result { @@ -1246,7 +1242,7 @@ impl SettingsObserver { return; }; if let Some(user_tasks_content) = user_tasks_content { - let Ok(()) = task_store.update(cx, |task_store, cx| { + task_store.update(cx, |task_store, cx| { task_store .update_user_debug_scenarios( TaskSettingsLocation::Global(&file_path), @@ -1254,20 +1250,16 @@ impl SettingsObserver { cx, ) .log_err(); - }) else { - return; - }; + }); } while let Some(user_tasks_content) = user_tasks_file_rx.next().await { - let Ok(result) = task_store.update(cx, |task_store, cx| { + let result = task_store.update(cx, |task_store, cx| { task_store.update_user_debug_scenarios( TaskSettingsLocation::Global(&file_path), Some(&user_tasks_content), cx, ) - }) else { - break; - }; + }); weak_entry .update(cx, |_, cx| match result { diff --git a/crates/project/src/task_inventory.rs b/crates/project/src/task_inventory.rs index 4f4939491f19ebe6d32b82f780c4a9988a66c1d4..7a63b68422bbfc98684f32ebc637151de7d1d8d9 100644 --- a/crates/project/src/task_inventory.rs +++ b/crates/project/src/task_inventory.rs @@ -892,17 +892,13 @@ mod test_inventory { .unwrap_or_else(|| panic!("Failed to find task with name {task_name}")); let id_base = task_source_kind.to_id_base(); - inventory - .update(&mut cx, |inventory, _| { - inventory.task_scheduled( - task_source_kind.clone(), - task.resolve_task(&id_base, &TaskContext::default()) - .unwrap_or_else(|| { - panic!("Failed to resolve task with name {task_name}") - }), - ) - }) - .unwrap(); + inventory.update(&mut cx, |inventory, _| { + inventory.task_scheduled( + task_source_kind.clone(), + task.resolve_task(&id_base, &TaskContext::default()) + .unwrap_or_else(|| panic!("Failed to resolve task with name {task_name}")), + ) + }); }) } @@ -925,17 +921,13 @@ mod test_inventory { .find(|(_, task)| task.label == task_name) .unwrap_or_else(|| panic!("Failed to find task with name {task_name}")); let id_base = task_source_kind.to_id_base(); - inventory - .update(&mut cx, |inventory, _| { - inventory.task_scheduled( - task_source_kind.clone(), - task.resolve_task(&id_base, &TaskContext::default()) - .unwrap_or_else(|| { - panic!("Failed to resolve task with name {task_name}") - }), - ); - }) - .unwrap(); + inventory.update(&mut cx, |inventory, _| { + inventory.task_scheduled( + task_source_kind.clone(), + task.resolve_task(&id_base, &TaskContext::default()) + .unwrap_or_else(|| panic!("Failed to resolve task with name {task_name}")), + ); + }); }) } diff --git a/crates/project/src/task_store.rs b/crates/project/src/task_store.rs index 462b164e83b6d7dd91c11edc8482290079019bf3..7aec460aeb9917eb9c1c58668ece4a10033a7ac9 100644 --- a/crates/project/src/task_store.rs +++ b/crates/project/src/task_store.rs @@ -84,7 +84,7 @@ impl TaskStore { anyhow::bail!("empty task store cannot handle task context requests") } }) - })??; + })?; let buffer_store = buffer_store .upgrade() .context("no buffer store when handling task context request")?; @@ -115,7 +115,7 @@ impl TaskStore { .with_context(|| format!("no local buffer with id {buffer_id}")), ) } - })? + }) .await?; let location = Location { @@ -143,7 +143,7 @@ impl TaskStore { variables }; store.task_context_for_location(captured_variables, location, cx) - })?; + }); let task_context = context_task.await.unwrap_or_default(); Ok(proto::TaskContext { project_env: task_context.project_env.into_iter().collect(), @@ -319,7 +319,6 @@ fn local_task_context_for_location( .update(cx, |environment, cx| { environment.buffer_environment(&location.buffer, &worktree_store, cx) }) - .ok()? .await; let mut task_variables = cx @@ -335,7 +334,6 @@ fn local_task_context_for_location( cx, ) }) - .ok()? .await .log_err()?; // Remove all custom entries starting with _, as they're not intended for use by the end user. @@ -376,15 +374,12 @@ fn remote_task_context_for_location( cx, ) }) - .ok()? .await .log_err() .unwrap_or_default(); remote_context.extend(captured_variables); - let buffer_id = cx - .update(|cx| location.buffer.read(cx).remote_id().to_proto()) - .ok()?; + let buffer_id = cx.update(|cx| location.buffer.read(cx).remote_id().to_proto()); let context_task = upstream_client.request(proto::TaskContextForLocation { project_id, location: Some(proto::Location { @@ -472,7 +467,7 @@ fn combine_task_variables( toolchain_store.clone(), cx, ) - })? + }) .await .context("building basic default context")?; captured_variables.extend(baseline); @@ -491,7 +486,7 @@ fn combine_task_variables( toolchain_store, cx, ) - })? + }) .await .context("building provider context")?, ); diff --git a/crates/project/src/telemetry_snapshot.rs b/crates/project/src/telemetry_snapshot.rs index 5f9155371d74887af25d6e7481848444c6f25112..6212b448835350537b881e6262f689b9e4fe1de4 100644 --- a/crates/project/src/telemetry_snapshot.rs +++ b/crates/project/src/telemetry_snapshot.rs @@ -59,12 +59,7 @@ impl TelemetryWorktreeSnapshot { (path, snapshot) }); - let Ok((worktree_path, _snapshot)) = worktree_info else { - return TelemetryWorktreeSnapshot { - worktree_path: String::new(), - git_state: None, - }; - }; + let (worktree_path, _snapshot) = worktree_info; let git_state = git_store .update(cx, |git_store, cx| { @@ -78,8 +73,6 @@ impl TelemetryWorktreeSnapshot { }) .cloned() }) - .ok() - .flatten() .map(|repo| { repo.update(cx, |repo, _| { let current_branch = @@ -111,10 +104,7 @@ impl TelemetryWorktreeSnapshot { }); let git_state = match git_state { - Some(git_state) => match git_state.ok() { - Some(git_state) => git_state.await.ok(), - None => None, - }, + Some(receiver) => receiver.await.ok(), None => None, }; diff --git a/crates/project/src/terminals.rs b/crates/project/src/terminals.rs index c19f7b46c499d1a0368102a0d491d354ce26a800..f928871f1eb56bdbc38109d8509acb9776edb994 100644 --- a/crates/project/src/terminals.rs +++ b/crates/project/src/terminals.rs @@ -129,9 +129,9 @@ impl Project { .await .ok(); let lister = language?.toolchain_lister()?; - return cx - .update(|cx| lister.activation_script(&toolchain, shell_kind, cx)) - .ok(); + return Some( + cx.update(|cx| lister.activation_script(&toolchain, shell_kind, cx)), + ); } None }) @@ -345,9 +345,9 @@ impl Project { .await .ok(); let lister = language?.toolchain_lister()?; - return cx - .update(|cx| lister.activation_script(&toolchain, shell_kind, cx)) - .ok(); + return Some( + cx.update(|cx| lister.activation_script(&toolchain, shell_kind, cx)), + ); } None }) diff --git a/crates/project/src/toolchain_store.rs b/crates/project/src/toolchain_store.rs index 7afc70827f85e1a1bafcad436409936876fd3b45..0820e4506e5c6b8d51c2732c64afcb21566350dd 100644 --- a/crates/project/src/toolchain_store.rs +++ b/crates/project/src/toolchain_store.rs @@ -270,7 +270,7 @@ impl ToolchainStore { RelPath::empty().into() }; Ok(this.activate_toolchain(ProjectPath { worktree_id, path }, toolchain, cx)) - })?? + })? .await; Ok(proto::Ack {}) } @@ -292,7 +292,7 @@ impl ToolchainStore { language_name, cx, ) - })? + }) .await; Ok(proto::ActiveToolchainResponse { @@ -322,7 +322,7 @@ impl ToolchainStore { language_name, cx, )) - })?? + })? .await; let has_values = toolchains.is_some(); let groups = if let Some(Toolchains { toolchains, .. }) = &toolchains { @@ -380,7 +380,7 @@ impl ToolchainStore { let language_name = LanguageName::from_proto(envelope.payload.language_name); let path = PathBuf::from(envelope.payload.abs_path); this.resolve_toolchain(path, language_name, cx) - })? + }) .await; let response = match toolchain { Ok(toolchain) => { @@ -539,9 +539,7 @@ impl LocalToolchainStore { path: Arc::from(RelPath::empty()), worktree_id, }); - let abs_path = worktree - .update(cx, |this, _| this.absolutize(&relative_path.path)) - .ok()?; + let abs_path = worktree.update(cx, |this, _| this.absolutize(&relative_path.path)); let project_env = environment .update(cx, |environment, cx| { @@ -551,7 +549,6 @@ impl LocalToolchainStore { cx, ) }) - .ok()? .await; cx.background_spawn(async move { @@ -613,7 +610,7 @@ impl LocalToolchainStore { path.as_path().into(), cx, ) - })? + }) .await; cx.background_spawn(async move { toolchain_lister diff --git a/crates/project/src/worktree_store.rs b/crates/project/src/worktree_store.rs index 7b3ddb657cb10ae407538a9dde34504b36b8d4ea..83a7070aa97fb68794f44aa6e20c391957f34183 100644 --- a/crates/project/src/worktree_store.rs +++ b/crates/project/src/worktree_store.rs @@ -268,7 +268,7 @@ impl WorktreeStore { None, cx, ) - })? + }) .await }) } @@ -293,7 +293,7 @@ impl WorktreeStore { response.worktree_scan_id as usize, cx, ) - })? + }) .await .map(Some), None => Ok(None), @@ -419,7 +419,7 @@ impl WorktreeStore { cx, ) } - })? + }) .await? .map(CreatedEntry::Included) .unwrap_or_else(|| CreatedEntry::Excluded { @@ -448,13 +448,13 @@ impl WorktreeStore { response.worktree_scan_id as usize, cx, ) - })? + }) .await .map(CreatedEntry::Included), None => { let abs_path = new_worktree.read_with(cx, |worktree, _| { worktree.absolutize(&new_project_path.path) - })?; + }); Ok(CreatedEntry::Excluded { abs_path }) } } @@ -507,15 +507,13 @@ impl WorktreeStore { .ok() .flatten() { - trusted_worktrees - .update(cx, |trusted_worktrees, cx| { - trusted_worktrees.can_trust( - &worktree_store, - worktree.read(cx).id(), - cx, - ); - }) - .ok(); + trusted_worktrees.update(cx, |trusted_worktrees, cx| { + trusted_worktrees.can_trust( + &worktree_store, + worktree.read(cx).id(), + cx, + ); + }); } } Ok(worktree) @@ -559,7 +557,7 @@ impl WorktreeStore { if let Some(existing_worktree) = this.read_with(cx, |this, cx| { this.worktree_for_id(WorktreeId::from_proto(response.worktree_id), cx) - })? { + }) { return Ok(existing_worktree); } @@ -583,11 +581,11 @@ impl WorktreeStore { path_style, cx, ) - })?; + }); this.update(cx, |this, cx| { this.add(&worktree, cx); - })?; + }); Ok(worktree) }) } @@ -620,8 +618,7 @@ impl WorktreeStore { if visible { cx.update(|cx| { cx.add_recent_document(abs_path.as_path()); - }) - .log_err(); + }); } Ok(worktree) @@ -948,7 +945,7 @@ impl WorktreeStore { let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id); this.worktree_for_id(worktree_id, cx) .context("worktree not found") - })??; + })?; Worktree::handle_create_entry(worktree, envelope.payload, cx).await } @@ -982,7 +979,7 @@ impl WorktreeStore { scan_id, this.copy_entry(entry_id, new_project_path.into(), cx), )) - })??; + })?; let entry = entry.await?; Ok(proto::ProjectEntryResponse { entry: entry.as_ref().map(|entry| entry.into()), @@ -1008,7 +1005,7 @@ impl WorktreeStore { } this.worktree_for_entry(entry_id, cx) .context("worktree not found") - })??; + })?; Worktree::handle_delete_entry(worktree, envelope.payload, cx).await } @@ -1043,7 +1040,7 @@ impl WorktreeStore { scan_id, this.rename_entry(entry_id, (new_worktree_id, rel_path).into(), cx), )) - })??; + })?; Ok(proto::ProjectEntryResponse { entry: match &task.await? { CreatedEntry::Included(entry) => Some(entry.into()), @@ -1060,7 +1057,7 @@ impl WorktreeStore { ) -> Result { let entry_id = ProjectEntryId::from_proto(envelope.payload.entry_id); let worktree = this - .update(&mut cx, |this, cx| this.worktree_for_entry(entry_id, cx))? + .update(&mut cx, |this, cx| this.worktree_for_entry(entry_id, cx)) .context("invalid request")?; Worktree::handle_expand_entry(worktree, envelope.payload, cx).await } @@ -1072,7 +1069,7 @@ impl WorktreeStore { ) -> Result { let entry_id = ProjectEntryId::from_proto(envelope.payload.entry_id); let worktree = this - .update(&mut cx, |this, cx| this.worktree_for_entry(entry_id, cx))? + .update(&mut cx, |this, cx| this.worktree_for_entry(entry_id, cx)) .context("invalid request")?; Worktree::handle_expand_all_for_entry(worktree, envelope.payload, cx).await } diff --git a/crates/project_benchmarks/src/main.rs b/crates/project_benchmarks/src/main.rs index a20ed68ff4da729154980cfc74461e5f5b53b95c..02f810ad50323ba0fa5c07e8634569a09255f054 100644 --- a/crates/project_benchmarks/src/main.rs +++ b/crates/project_benchmarks/src/main.rs @@ -85,7 +85,7 @@ fn main() -> Result<(), anyhow::Error> { .into_iter() .map(|worktree| this.find_or_create_worktree(worktree, true, cx)) .collect::>() - })?; + }); let worktrees = futures::future::join_all(worktrees) .await @@ -94,7 +94,7 @@ fn main() -> Result<(), anyhow::Error> { for (worktree, _) in &worktrees { worktree - .update(cx, |this, _| this.as_local().unwrap().scan_complete())? + .update(cx, |this, _| this.as_local().unwrap().scan_complete()) .await; } println!("Worktrees loaded"); @@ -102,9 +102,7 @@ fn main() -> Result<(), anyhow::Error> { println!("Starting a project search"); let timer = std::time::Instant::now(); let mut first_match = None; - let matches = project - .update(cx, |this, cx| this.search(query, cx)) - .unwrap(); + let matches = project.update(cx, |this, cx| this.search(query, cx)); let mut matched_files = 0; let mut matched_chunks = 0; while let Ok(match_result) = matches.rx.recv().await { @@ -125,7 +123,7 @@ fn main() -> Result<(), anyhow::Error> { "Finished project search after {elapsed:?}. Matched {matched_files} files and {matched_chunks} excerpts" ); drop(project); - cx.update(|cx| cx.quit())?; + cx.update(|cx| cx.quit()); anyhow::Ok(()) }) diff --git a/crates/project_panel/src/project_panel.rs b/crates/project_panel/src/project_panel.rs index 896ffd0cfdf59c74c6571e4f3775a32ffbb04df6..a8623a62ed2dcb8749eafeadf117f9851327b6f6 100644 --- a/crates/project_panel/src/project_panel.rs +++ b/crates/project_panel/src/project_panel.rs @@ -3879,9 +3879,9 @@ impl ProjectPanel { let task = worktree.update(cx, |worktree, cx| { worktree.copy_external_entries(target_directory, paths, fs, cx) - })?; + }); - let opened_entries = task + let opened_entries: Vec<_> = task .await .with_context(|| "failed to copy external paths")?; this.update(cx, |this, cx| { diff --git a/crates/project_panel/src/project_panel_tests.rs b/crates/project_panel/src/project_panel_tests.rs index 6403ef357232c6b0fc920e745fc23bc517a18b4b..5e2c189ca8048f05371fa759961990ae97878d7f 100644 --- a/crates/project_panel/src/project_panel_tests.rs +++ b/crates/project_panel/src/project_panel_tests.rs @@ -8320,7 +8320,7 @@ impl project::ProjectItem for TestProjectItem { cx: &mut App, ) -> Option>>> { let path = path.clone(); - Some(cx.spawn(async move |cx| cx.new(|_| Self { path }))) + Some(cx.spawn(async move |cx| Ok(cx.new(|_| Self { path })))) } fn entry_id(&self, _: &App) -> Option { diff --git a/crates/prompt_store/src/prompt_store.rs b/crates/prompt_store/src/prompt_store.rs index 1f63acb1965428cf3dbc6b9b5739e249c13a9c31..8459ca6738f2fd84544212d19b454eea88527168 100644 --- a/crates/prompt_store/src/prompt_store.rs +++ b/crates/prompt_store/src/prompt_store.rs @@ -37,7 +37,7 @@ pub fn init(cx: &mut App) { .spawn(async move |cx| { prompt_store_task .await - .and_then(|prompt_store| cx.new(|_cx| prompt_store)) + .map(|prompt_store| cx.new(|_cx| prompt_store)) .map_err(Arc::new) }) .shared(); diff --git a/crates/recent_projects/src/remote_connections.rs b/crates/recent_projects/src/remote_connections.rs index 70e3a795784c22284c7591e68b1629a47867b79c..6148da270b1e9c181a8b0348835ce147331e47f4 100644 --- a/crates/recent_projects/src/remote_connections.rs +++ b/crates/recent_projects/src/remote_connections.rs @@ -611,7 +611,7 @@ pub fn connect( cx.spawn(async move |cx| { let connection = remote::connect(connection_options, delegate.clone(), cx).await?; - cx.update(|cx| remote::RemoteClient::new(unique_identifier, connection, rx, delegate, cx))? + cx.update(|cx| remote::RemoteClient::new(unique_identifier, connection, rx, delegate, cx)) .await }) } @@ -631,12 +631,12 @@ pub async fn open_remote_project( .update(|cx| { // todo: These paths are wrong they may have column and line information workspace::remote_workspace_position_from_db(connection_options.clone(), &paths, cx) - })? + }) .await .context("fetching remote workspace position from db")?; let mut options = - cx.update(|cx| (app_state.build_window_options)(workspace_position.display, cx))?; + cx.update(|cx| (app_state.build_window_options)(workspace_position.display, cx)); options.window_bounds = workspace_position.window_bounds; cx.open_window(options, |window, cx| { @@ -755,7 +755,7 @@ pub async fn open_remote_project( paths.clone(), cx, ) - })? + }) .await; window diff --git a/crates/recent_projects/src/remote_servers.rs b/crates/recent_projects/src/remote_servers.rs index 70423196e8e0acd9ef6ba3430eb0b2374d1b8f40..1191af118357ccc0b3834735514ef6cd41f13479 100644 --- a/crates/recent_projects/src/remote_servers.rs +++ b/crates/recent_projects/src/remote_servers.rs @@ -273,11 +273,9 @@ impl ProjectPicker { .read_with(cx, |workspace, _| workspace.app_state().clone()) .ok()?; - let remote_connection = project - .read_with(cx, |project, cx| { - project.remote_client()?.read(cx).connection() - }) - .ok()??; + let remote_connection = project.read_with(cx, |project, cx| { + project.remote_client()?.read(cx).connection() + })?; let (paths, paths_with_positions) = determine_paths_with_positions(&remote_connection, paths).await; @@ -1020,7 +1018,7 @@ impl RemoteServerProjects { })?; let home_dir = project - .read_with(cx, |project, cx| project.resolve_abs_path("~", cx))? + .read_with(cx, |project, cx| project.resolve_abs_path("~", cx)) .await .and_then(|path| path.into_abs_path()) .map(|path| RemotePathBuf::new(path, path_style)) @@ -2113,17 +2111,13 @@ impl RemoteServerProjects { cx.spawn(async move |cx| { if confirmation.await.ok() == Some(0) { - remote_servers - .update(cx, |this, cx| { - this.delete_wsl_distro(index, cx); - }) - .ok(); - remote_servers - .update(cx, |this, cx| { - this.mode = Mode::default_mode(&this.ssh_config_servers, cx); - cx.notify(); - }) - .ok(); + remote_servers.update(cx, |this, cx| { + this.delete_wsl_distro(index, cx); + }); + remote_servers.update(cx, |this, cx| { + this.mode = Mode::default_mode(&this.ssh_config_servers, cx); + cx.notify(); + }); } anyhow::Ok(()) }) @@ -2269,17 +2263,13 @@ impl RemoteServerProjects { cx.spawn(async move |cx| { if confirmation.await.ok() == Some(0) { - remote_servers - .update(cx, |this, cx| { - this.delete_ssh_server(index, cx); - }) - .ok(); - remote_servers - .update(cx, |this, cx| { - this.mode = Mode::default_mode(&this.ssh_config_servers, cx); - cx.notify(); - }) - .ok(); + remote_servers.update(cx, |this, cx| { + this.delete_ssh_server(index, cx); + }); + remote_servers.update(cx, |this, cx| { + this.mode = Mode::default_mode(&this.ssh_config_servers, cx); + cx.notify(); + }); } anyhow::Ok(()) }) diff --git a/crates/remote/src/remote_client.rs b/crates/remote/src/remote_client.rs index f8c079f0d9a23f9c5c9cba15cf282e2a3376b7f8..3d6ccc97716e33284e507d35088f9b3b09bb8370 100644 --- a/crates/remote/src/remote_client.rs +++ b/crates/remote/src/remote_client.rs @@ -361,7 +361,7 @@ pub async fn connect( cx.update_default_global(|pool: &mut ConnectionPool, cx| { pool.connect(connection_options.clone(), delegate.clone(), cx) }) - })? + }) .await .map_err(|e| e.cloned()) } @@ -389,7 +389,7 @@ impl RemoteClient { "client", remote_connection.has_wsl_interop(), ) - })?; + }); let path_style = remote_connection.path_style(); let this = cx.new(|_| Self { @@ -398,7 +398,7 @@ impl RemoteClient { connection_options: remote_connection.connection_options(), path_style, state: Some(State::Connecting), - })?; + }); let io_task = remote_connection.start_proxy( unique_identifier, @@ -461,7 +461,7 @@ impl RemoteClient { multiplex_task, heartbeat_task, }); - })?; + }); Ok(Some(this)) }); @@ -621,7 +621,7 @@ impl RemoteClient { let remote_connection = cx .update_global(|pool: &mut ConnectionPool, cx| { pool.connect(connection_options, delegate.clone(), cx) - })? + }) .await .map_err(|error| error.cloned())?; @@ -966,7 +966,6 @@ impl RemoteClient { panic!("missing test connection") } }) - .unwrap() .await .unwrap(); @@ -1128,7 +1127,7 @@ impl ConnectionPool { Err(Arc::new(error)) } } - })? + }) } }) .shared(); diff --git a/crates/remote/src/transport/docker.rs b/crates/remote/src/transport/docker.rs index 58a03eed0828e4f0f0bc08ff647f70ec8c9994f2..3723001f9e0d04caa571530eab70cbb60155b6c2 100644 --- a/crates/remote/src/transport/docker.rs +++ b/crates/remote/src/transport/docker.rs @@ -66,7 +66,7 @@ impl DockerExecConnection { AppVersion::global(cx), AppCommitSha::try_global(cx), ) - })?; + }); let remote_platform = this.check_remote_platform().await?; this.path_style = match remote_platform.os { @@ -200,7 +200,7 @@ impl DockerExecConnection { ) } _ => Ok(Some(AppVersion::global(cx))), - })??; + })?; let tmp_path_gz = paths::remote_server_dir_relative().join( RelPath::unix(&format!( diff --git a/crates/remote/src/transport/ssh.rs b/crates/remote/src/transport/ssh.rs index d13e1c4934947e39b08e05eb32e2787548e621e1..ff4f07f420dce163c818999fcdfb058a490aed98 100644 --- a/crates/remote/src/transport/ssh.rs +++ b/crates/remote/src/transport/ssh.rs @@ -592,7 +592,7 @@ impl SshRemoteConnection { }; let (release_channel, version) = - cx.update(|cx| (ReleaseChannel::global(cx), AppVersion::global(cx)))?; + cx.update(|cx| (ReleaseChannel::global(cx), AppVersion::global(cx))); this.remote_binary_path = Some( this.ensure_server_binary(&delegate, release_channel, version, cx) .await?, @@ -668,7 +668,7 @@ impl SshRemoteConnection { ) } _ => Ok(Some(AppVersion::global(cx))), - })??; + })?; let tmp_path_gz = remote_server_dir_relative().join( RelPath::unix(&format!( diff --git a/crates/remote/src/transport/wsl.rs b/crates/remote/src/transport/wsl.rs index 32dd9ebe8247bb4a0b631a79b1a93deb621e6ed1..bac263188d58730e720c12ed2805321fe41d206a 100644 --- a/crates/remote/src/transport/wsl.rs +++ b/crates/remote/src/transport/wsl.rs @@ -65,7 +65,7 @@ impl WslRemoteConnection { connection_options.user ); let (release_channel, version) = - cx.update(|cx| (ReleaseChannel::global(cx), AppVersion::global(cx)))?; + cx.update(|cx| (ReleaseChannel::global(cx), AppVersion::global(cx))); let mut this = Self { connection_options, @@ -210,7 +210,7 @@ impl WslRemoteConnection { let wanted_version = match release_channel { ReleaseChannel::Nightly | ReleaseChannel::Dev => None, - _ => Some(cx.update(|cx| AppVersion::global(cx))?), + _ => Some(cx.update(|cx| AppVersion::global(cx))), }; let src_path = delegate diff --git a/crates/remote_server/src/headless_project.rs b/crates/remote_server/src/headless_project.rs index 2dd3927adfa3db221d565fe4f07596c38fedfeab..7c93b02c42d801bd2d9412cf95a914fc3ddc3435 100644 --- a/crates/remote_server/src/headless_project.rs +++ b/crates/remote_server/src/headless_project.rs @@ -443,7 +443,7 @@ impl HeadlessProject { mut cx: AsyncApp, ) -> Result { use client::ErrorCodeExt; - let fs = this.read_with(&cx, |this, _| this.fs.clone())?; + let fs = this.read_with(&cx, |this, _| this.fs.clone()); let path = PathBuf::from(shellexpand::tilde(&message.payload.path).to_string()); let canonicalized = match fs.canonicalize(&path).await { @@ -480,7 +480,7 @@ impl HeadlessProject { true, &mut cx, ) - })? + }) .await?; let response = this.read_with(&cx, |_, cx| { @@ -489,7 +489,7 @@ impl HeadlessProject { worktree_id: worktree.id().to_proto(), canonicalized_path: canonicalized.to_string_lossy().into_owned(), } - })?; + }); // We spawn this asynchronously, so that we can send the response back // *before* `worktree_store.add()` can send out UpdateProject requests @@ -508,8 +508,7 @@ impl HeadlessProject { this.worktree_store.update(cx, |worktree_store, cx| { worktree_store.add(&worktree, cx); }); - }) - .log_err(); + }); }) .detach(); @@ -526,7 +525,7 @@ impl HeadlessProject { this.worktree_store.update(cx, |worktree_store, cx| { worktree_store.remove_worktree(worktree_id, cx); }); - })?; + }); Ok(proto::Ack {}) } @@ -542,16 +541,16 @@ impl HeadlessProject { let buffer = this.buffer_store.update(cx, |buffer_store, cx| { buffer_store.open_buffer(ProjectPath { worktree_id, path }, cx) }); - anyhow::Ok((buffer_store, buffer)) - })??; + (buffer_store, buffer) + }); let buffer = buffer.await?; - let buffer_id = buffer.read_with(&cx, |b, _| b.remote_id())?; + let buffer_id = buffer.read_with(&cx, |b, _| b.remote_id()); buffer_store.update(&mut cx, |buffer_store, cx| { buffer_store .create_buffer_for_peer(&buffer, REMOTE_SERVER_PEER_ID, cx) .detach_and_log_err(cx); - })?; + }); Ok(proto::OpenBufferResponse { buffer_id: buffer_id.to_proto(), @@ -571,21 +570,21 @@ impl HeadlessProject { let (worktree_store, session) = this.read_with(&cx, |this, _| { (this.worktree_store.clone(), this.session.clone()) - })?; + }); let worktree = worktree_store - .read_with(&cx, |store, cx| store.worktree_for_id(worktree_id, cx))? + .read_with(&cx, |store, cx| store.worktree_for_id(worktree_id, cx)) .context("worktree not found")?; let load_task = worktree.update(&mut cx, |worktree, cx| { worktree.load_binary_file(path.as_ref(), cx) - })?; + }); let loaded_file = load_task.await?; let content = loaded_file.content; let file = loaded_file.file; - let proto_file = worktree.read_with(&cx, |_worktree, cx| file.to_proto(cx))?; + let proto_file = worktree.read_with(&cx, |_worktree, cx| file.to_proto(cx)); let image_id = ImageId::from(NonZeroU64::new(NEXT_ID.fetch_add(1, Ordering::Relaxed)).unwrap()); @@ -629,9 +628,9 @@ impl HeadlessProject { mut cx: AsyncApp, ) -> Result { let trusted_worktrees = cx - .update(|cx| TrustedWorktrees::try_get_global(cx))? + .update(|cx| TrustedWorktrees::try_get_global(cx)) .context("missing trusted worktrees")?; - let worktree_store = this.read_with(&cx, |project, _| project.worktree_store.clone())?; + let worktree_store = this.read_with(&cx, |project, _| project.worktree_store.clone()); trusted_worktrees.update(&mut cx, |trusted_worktrees, cx| { trusted_worktrees.trust( &worktree_store, @@ -643,7 +642,7 @@ impl HeadlessProject { .collect(), cx, ); - })?; + }); Ok(proto::Ack {}) } @@ -653,10 +652,9 @@ impl HeadlessProject { mut cx: AsyncApp, ) -> Result { let trusted_worktrees = cx - .update(|cx| TrustedWorktrees::try_get_global(cx))? + .update(|cx| TrustedWorktrees::try_get_global(cx)) .context("missing trusted worktrees")?; - let worktree_store = - this.read_with(&cx, |project, _| project.worktree_store.downgrade())?; + let worktree_store = this.read_with(&cx, |project, _| project.worktree_store.downgrade()); trusted_worktrees.update(&mut cx, |trusted_worktrees, cx| { let restricted_paths = envelope .payload @@ -666,7 +664,7 @@ impl HeadlessProject { .map(PathTrust::Worktree) .collect::>(); trusted_worktrees.restrict(worktree_store, restricted_paths, cx); - })?; + }); Ok(proto::Ack {}) } @@ -680,16 +678,16 @@ impl HeadlessProject { let buffer = this .buffer_store .update(cx, |buffer_store, cx| buffer_store.create_buffer(true, cx)); - anyhow::Ok((buffer_store, buffer)) - })??; + (buffer_store, buffer) + }); let buffer = buffer.await?; - let buffer_id = buffer.read_with(&cx, |b, _| b.remote_id())?; + let buffer_id = buffer.read_with(&cx, |b, _| b.remote_id()); buffer_store.update(&mut cx, |buffer_store, cx| { buffer_store .create_buffer_for_peer(&buffer, REMOTE_SERVER_PEER_ID, cx) .detach_and_log_err(cx); - })?; + }); Ok(proto::OpenBufferResponse { buffer_id: buffer_id.to_proto(), @@ -719,7 +717,7 @@ impl HeadlessProject { log_store.toggle_lsp_logs(server_id, envelope.payload.enabled, toggled_log_kind); }); anyhow::Ok(()) - })??; + })?; Ok(()) } @@ -735,7 +733,7 @@ impl HeadlessProject { this.worktree_store.update(cx, |worktree_store, cx| { worktree_store.find_or_create_worktree(settings_path, false, cx) }) - })? + }) .await?; let (buffer, buffer_store) = this.update(&mut cx, |this, cx| { @@ -750,7 +748,7 @@ impl HeadlessProject { }); (buffer, this.buffer_store.clone()) - })?; + }); let buffer = buffer.await?; @@ -770,7 +768,7 @@ impl HeadlessProject { }); buffer_id - })?; + }); Ok(proto::OpenBufferResponse { buffer_id: buffer_id.to_proto(), @@ -797,21 +795,21 @@ impl HeadlessProject { ) .into_handle(query, cx) .matching_buffers(cx) - })?; + }); let mut response = proto::FindSearchCandidatesResponse { buffer_ids: Vec::new(), }; - let buffer_store = this.read_with(&cx, |this, _| this.buffer_store.clone())?; + let buffer_store = this.read_with(&cx, |this, _| this.buffer_store.clone()); while let Ok(buffer) = results.rx.recv().await { - let buffer_id = buffer.read_with(&cx, |this, _| this.remote_id())?; + let buffer_id = buffer.read_with(&cx, |this, _| this.remote_id()); response.buffer_ids.push(buffer_id.to_proto()); buffer_store .update(&mut cx, |buffer_store, cx| { buffer_store.create_buffer_for_peer(&buffer, REMOTE_SERVER_PEER_ID, cx) - })? + }) .await?; } @@ -823,7 +821,7 @@ impl HeadlessProject { envelope: TypedEnvelope, cx: AsyncApp, ) -> Result { - let fs = cx.read_entity(&this, |this, _| this.fs.clone())?; + let fs = cx.read_entity(&this, |this, _| this.fs.clone()); let expanded = PathBuf::from(shellexpand::tilde(&envelope.payload.path).to_string()); let check_info = envelope .payload @@ -855,7 +853,7 @@ impl HeadlessProject { envelope: TypedEnvelope, cx: AsyncApp, ) -> Result { - let fs = cx.read_entity(&this, |this, _| this.fs.clone())?; + let fs = cx.read_entity(&this, |this, _| this.fs.clone()); let expanded = PathBuf::from(shellexpand::tilde(&envelope.payload.path).to_string()); let metadata = fs.metadata(&expanded).await?; @@ -942,7 +940,7 @@ impl HeadlessProject { this.environment.update(cx, |environment, cx| { environment.local_directory_environment(&shell, directory.into(), cx) }) - })? + }) .await .context("failed to get directory environment")? .into_iter() diff --git a/crates/remote_server/src/unix.rs b/crates/remote_server/src/unix.rs index 4073d3edae1aed2ad86bd214db0b39c2b0d1c2fe..d302451263b16ca4fe86a86fe19f07030538e538 100644 --- a/crates/remote_server/src/unix.rs +++ b/crates/remote_server/src/unix.rs @@ -242,7 +242,7 @@ fn start_server( // when calling quit, but it should be. cx.shutdown(); cx.quit(); - })?; + }); break; } _ = app_quit_rx.next().fuse() => { @@ -939,7 +939,7 @@ pub fn handle_settings_file_changes( }); cx.spawn(async move |cx| { while let Some(server_settings_content) = server_settings_file.next().await { - let result = cx.update_global(|store: &mut SettingsStore, cx| { + cx.update_global(|store: &mut SettingsStore, cx| { let result = store.set_server_settings(&server_settings_content, cx); if let Err(err) = &result { log::error!("Failed to load server settings: {err}"); @@ -947,9 +947,6 @@ pub fn handle_settings_file_changes( settings_changed(result.err(), cx); cx.refresh_windows(); }); - if result.is_err() { - break; // App dropped - } } }) .detach(); diff --git a/crates/repl/src/kernels/native_kernel.rs b/crates/repl/src/kernels/native_kernel.rs index 8630768decc6e788efdd3eaaadafc0c957e86d7e..6152958925fa023efe8f4e9a0c816b2793024281 100644 --- a/crates/repl/src/kernels/native_kernel.rs +++ b/crates/repl/src/kernels/native_kernel.rs @@ -294,15 +294,13 @@ impl NativeRunningKernel { if let Err(err) = result { log::error!("kernel: handling failed for {name}: {err:?}"); - session - .update(cx, |session, cx| { - session.kernel_errored( - format!("handling failed for {name}: {err}"), - cx, - ); - cx.notify(); - }) - .ok(); + session.update(cx, |session, cx| { + session.kernel_errored( + format!("handling failed for {name}: {err}"), + cx, + ); + cx.notify(); + }); } } } @@ -328,13 +326,11 @@ impl NativeRunningKernel { log::error!("{}", error_message); - session - .update(cx, |session, cx| { - session.kernel_errored(error_message, cx); + session.update(cx, |session, cx| { + session.kernel_errored(error_message, cx); - cx.notify(); - }) - .ok(); + cx.notify(); + }); }); anyhow::Ok(Box::new(Self { diff --git a/crates/repl/src/notebook/notebook_ui.rs b/crates/repl/src/notebook/notebook_ui.rs index 07c6e9c8aa8c116cf5b4ec46ca07817cb8b4c36f..3dbbed46a4f5169d7fbee9a2782269e00ba41975 100644 --- a/crates/repl/src/notebook/notebook_ui.rs +++ b/crates/repl/src/notebook/notebook_ui.rs @@ -561,7 +561,7 @@ impl project::ProjectItem for NotebookItem { if path.path.extension().unwrap_or_default() == "ipynb" { Some(cx.spawn(async move |cx| { let abs_path = project - .read_with(cx, |project, cx| project.absolute_path(&path, cx))? + .read_with(cx, |project, cx| project.absolute_path(&path, cx)) .with_context(|| format!("finding the absolute path of {path:?}"))?; // todo: watch for changes to the file @@ -586,16 +586,16 @@ impl project::ProjectItem for NotebookItem { let id = project .update(cx, |project, cx| { project.entry_for_path(&path, cx).map(|entry| entry.id) - })? + }) .context("Entry not found")?; - cx.new(|_| NotebookItem { + Ok(cx.new(|_| NotebookItem { path: abs_path, project_path: path, languages, notebook, id, - }) + })) })) } else { None diff --git a/crates/rules_library/src/rules_library.rs b/crates/rules_library/src/rules_library.rs index fc6af46782f26615aa0f5faeb7062ca03181ab9b..00ccb816bfd6275aabbdec7eb1247c43e46cc0df 100644 --- a/crates/rules_library/src/rules_library.rs +++ b/crates/rules_library/src/rules_library.rs @@ -82,29 +82,26 @@ pub fn open_rules_library( let store = PromptStore::global(cx); cx.spawn(async move |cx| { // We query windows in spawn so that all windows have been returned to GPUI - let existing_window = cx - .update(|cx| { - let existing_window = cx - .windows() - .into_iter() - .find_map(|window| window.downcast::()); - if let Some(existing_window) = existing_window { - existing_window - .update(cx, |rules_library, window, cx| { - if let Some(prompt_to_select) = prompt_to_select { - rules_library.load_rule(prompt_to_select, true, window, cx); - } - window.activate_window() - }) - .ok(); + let existing_window = cx.update(|cx| { + let existing_window = cx + .windows() + .into_iter() + .find_map(|window| window.downcast::()); + if let Some(existing_window) = existing_window { + existing_window + .update(cx, |rules_library, window, cx| { + if let Some(prompt_to_select) = prompt_to_select { + rules_library.load_rule(prompt_to_select, true, window, cx); + } + window.activate_window() + }) + .ok(); - Some(existing_window) - } else { - None - } - }) - .ok() - .flatten(); + Some(existing_window) + } else { + None + } + }); if let Some(existing_window) = existing_window { return Ok(existing_window); @@ -151,7 +148,7 @@ pub fn open_rules_library( }) }, ) - })? + }) }) } diff --git a/crates/session/src/session.rs b/crates/session/src/session.rs index fd45982bf40dc975857670ad29e28610f92617a6..8a3e1133d0b2bfaedd11128381e61bd486241cc9 100644 --- a/crates/session/src/session.rs +++ b/crates/session/src/session.rs @@ -67,7 +67,7 @@ impl AppSession { let _serialization_task = cx.spawn(async move |_, cx| { let mut current_window_stack = Vec::new(); loop { - if let Some(windows) = cx.update(|cx| window_stack(cx)).ok().flatten() + if let Some(windows) = cx.update(|cx| window_stack(cx)) && windows != current_window_stack { store_window_stack(&windows).await; diff --git a/crates/settings/src/settings_store.rs b/crates/settings/src/settings_store.rs index a9130a5d14de0c07578a8e70ec3299930689dd0f..fcf380d91f3f891f9f5ca225144ce9494c0594df 100644 --- a/crates/settings/src/settings_store.rs +++ b/crates/settings/src/settings_store.rs @@ -508,9 +508,9 @@ impl SettingsStore { update: impl 'static + Send + FnOnce(&mut SettingsContent, &App), ) { _ = self.update_settings_file_inner(fs, move |old_text: String, cx: AsyncApp| { - cx.read_global(|store: &SettingsStore, cx| { + Ok(cx.read_global(|store: &SettingsStore, cx| { store.new_text_for_update(old_text, |content| update(content, cx)) - }) + })) }); } @@ -520,9 +520,9 @@ impl SettingsStore { vscode_settings: VsCodeSettings, ) -> oneshot::Receiver> { self.update_settings_file_inner(fs, move |old_text: String, cx: AsyncApp| { - cx.read_global(|store: &SettingsStore, _cx| { + Ok(cx.read_global(|store: &SettingsStore, _cx| { store.get_vscode_edits(old_text, &vscode_settings) - }) + })) }) } diff --git a/crates/storybook/src/storybook.rs b/crates/storybook/src/storybook.rs index 42ca921e63b753c2bc938054e48708dfff6a04d5..89cf0155e12d717f92708b5656dc5c5c3ddbc4a5 100644 --- a/crates/storybook/src/storybook.rs +++ b/crates/storybook/src/storybook.rs @@ -154,8 +154,7 @@ pub fn init(cx: &mut App) { fn quit(_: &Quit, cx: &mut App) { cx.spawn(async move |cx| { - cx.update(|cx| cx.quit())?; - anyhow::Ok(()) + cx.update(|cx| cx.quit()); }) - .detach_and_log_err(cx); + .detach(); } diff --git a/crates/terminal/src/terminal.rs b/crates/terminal/src/terminal.rs index 279626f197c1146506f84c9014c6cc2247892d94..1ea09d724ba5d986c5a75249868188b69ebe89a1 100644 --- a/crates/terminal/src/terminal.rs +++ b/crates/terminal/src/terminal.rs @@ -2751,24 +2751,17 @@ mod tests { }) .detach(); cx.background_spawn(async move { - #[cfg(target_os = "windows")] - { - let exit_status = completion_rx.recv().await.ok().flatten(); - if let Some(exit_status) = exit_status { - assert!( - !exit_status.success(), - "Wrong shell command should result in a failure" - ); - assert_eq!(exit_status.code(), Some(1)); - } - } - #[cfg(not(target_os = "windows"))] - { - let exit_status = completion_rx.recv().await.unwrap().unwrap(); + // The channel may be closed if the terminal is dropped before sending + // the completion signal, which can happen with certain task scheduling orders. + let exit_status = completion_rx.recv().await.ok().flatten(); + if let Some(exit_status) = exit_status { assert!( !exit_status.success(), "Wrong shell command should result in a failure" ); + #[cfg(target_os = "windows")] + assert_eq!(exit_status.code(), Some(1)); + #[cfg(not(target_os = "windows"))] assert_eq!(exit_status.code(), None); } }) diff --git a/crates/terminal_view/src/persistence.rs b/crates/terminal_view/src/persistence.rs index 8d6ef03fd714e6694aca12f3fe6a3a8bb166e84c..f2df1e0eec42d2531c8c83a4e9de187b4fda6742 100644 --- a/crates/terminal_view/src/persistence.rs +++ b/crates/terminal_view/src/persistence.rs @@ -136,7 +136,7 @@ pub(crate) fn deserialize_terminal_panel( terminal_panel.center = PaneGroup::with_root(center_group); terminal_panel.active_pane = active_pane.unwrap_or_else(|| terminal_panel.center.first_pane()); - })?; + }); } } } @@ -251,30 +251,27 @@ async fn deserialize_pane_group( .update(cx, |workspace, cx| default_working_directory(workspace, cx)) .ok() .flatten(); - let Some(terminal) = project + let terminal = project .update(cx, |project, cx| { project.create_terminal_shell(working_directory, cx) }) - .log_err() - else { + .await + .log_err(); + let Some(terminal) = terminal else { return; }; - - let terminal = terminal.await.log_err(); pane.update_in(cx, |pane, window, cx| { - if let Some(terminal) = terminal { - let terminal_view = Box::new(cx.new(|cx| { - TerminalView::new( - terminal, - workspace.clone(), - Some(workspace_id), - project.downgrade(), - window, - cx, - ) - })); - pane.add_item(terminal_view, true, false, None, window, cx); - } + let terminal_view = Box::new(cx.new(|cx| { + TerminalView::new( + terminal, + workspace.clone(), + Some(workspace_id), + project.downgrade(), + window, + cx, + ) + })); + pane.add_item(terminal_view, true, false, None, window, cx); }) .ok(); } diff --git a/crates/terminal_view/src/terminal_panel.rs b/crates/terminal_view/src/terminal_panel.rs index 2c8779275ae57b708a3d6303ebc98bd7b9552b91..0b58400a86ff40849d2e67b0533802ab25f2a88f 100644 --- a/crates/terminal_view/src/terminal_panel.rs +++ b/crates/terminal_view/src/terminal_panel.rs @@ -281,11 +281,9 @@ impl TerminalPanel { }; if let Some(workspace) = workspace.upgrade() { - workspace - .update(&mut cx, |workspace, _| { - workspace.set_terminal_provider(TerminalProvider(terminal_panel.clone())) - }) - .ok(); + workspace.update(&mut cx, |workspace, _| { + workspace.set_terminal_provider(TerminalProvider(terminal_panel.clone())) + }); } // Since panels/docks are loaded outside from the workspace, we cleanup here, instead of through the workspace. @@ -487,7 +485,6 @@ impl TerminalPanel { ), None => project.create_terminal_shell(working_directory, cx), }) - .ok()? .await .log_err()?; @@ -781,7 +778,7 @@ impl TerminalPanel { })?; let project = workspace.read_with(cx, |workspace, _| workspace.project().clone())?; let terminal = project - .update(cx, |project, cx| project.create_terminal_task(task, cx))? + .update(cx, |project, cx| project.create_terminal_task(task, cx)) .await?; let result = workspace.update_in(cx, |workspace, window, cx| { let terminal_view = Box::new(cx.new(|cx| { @@ -840,7 +837,7 @@ impl TerminalPanel { })?; let project = workspace.read_with(cx, |workspace, _| workspace.project().clone())?; let terminal = project - .update(cx, |project, cx| project.create_terminal_shell(cwd, cx))? + .update(cx, |project, cx| project.create_terminal_shell(cwd, cx)) .await; match terminal { @@ -914,15 +911,13 @@ impl TerminalPanel { .timer(Duration::from_millis(50)) .await; let terminal_panel = terminal_panel.upgrade()?; - let items = terminal_panel - .update(cx, |terminal_panel, cx| { - SerializedItems::WithSplits(serialize_pane_group( - &terminal_panel.center, - &terminal_panel.active_pane, - cx, - )) - }) - .ok()?; + let items = terminal_panel.update(cx, |terminal_panel, cx| { + SerializedItems::WithSplits(serialize_pane_group( + &terminal_panel.center, + &terminal_panel.active_pane, + cx, + )) + }); cx.background_spawn( async move { KEY_VALUE_STORE @@ -964,7 +959,7 @@ impl TerminalPanel { let new_terminal = project .update(cx, |project, cx| { project.create_terminal_task(spawn_task, cx) - })? + }) .await?; terminal_to_replace.update_in(cx, |terminal_to_replace, window, cx| { terminal_to_replace.set_terminal(new_terminal.clone(), window, cx); @@ -1299,14 +1294,12 @@ async fn wait_for_terminals_tasks( terminals_for_task: Vec<(usize, Entity, Entity)>, cx: &mut AsyncApp, ) { - let pending_tasks = terminals_for_task.iter().filter_map(|(_, _, terminal)| { - terminal - .update(cx, |terminal_view, cx| { - terminal_view - .terminal() - .update(cx, |terminal, cx| terminal.wait_for_completed_task(cx)) - }) - .ok() + let pending_tasks = terminals_for_task.iter().map(|(_, _, terminal)| { + terminal.update(cx, |terminal_view, cx| { + terminal_view + .terminal() + .update(cx, |terminal, cx| terminal.wait_for_completed_task(cx)) + }) }); join_all(pending_tasks).await; } diff --git a/crates/terminal_view/src/terminal_path_like_target.rs b/crates/terminal_view/src/terminal_path_like_target.rs index fa401966459c5ce3bc14918bf5b64089de4c50ed..24056b4ab371d0db648ea9e37a64ff935deb62d6 100644 --- a/crates/terminal_view/src/terminal_path_like_target.rs +++ b/crates/terminal_view/src/terminal_path_like_target.rs @@ -362,8 +362,8 @@ fn possible_open_target( cx.spawn(async move |cx| { background_fs_checks_task.await.or_else(|| { for (worktree, worktree_paths_to_check) in worktree_paths_to_check { - let found_entry = worktree - .update(cx, |worktree, _| -> Option { + if let Some(found_entry) = + worktree.update(cx, |worktree, _| -> Option { let traversal = worktree.traverse_from_path(true, true, false, RelPath::empty()); for entry in traversal { @@ -387,8 +387,7 @@ fn possible_open_target( } None }) - .ok()?; - if let Some(found_entry) = found_entry { + { return Some(found_entry); } } diff --git a/crates/terminal_view/src/terminal_view.rs b/crates/terminal_view/src/terminal_view.rs index e7e60ff4b31dfbdd16b7de8841285d81fc311fc5..f6805b82153455694fbde0edb785db0a88d6592d 100644 --- a/crates/terminal_view/src/terminal_view.rs +++ b/crates/terminal_view/src/terminal_view.rs @@ -1421,7 +1421,7 @@ impl SerializableItem for TerminalView { .flatten(); let terminal = project - .update(cx, |project, cx| project.create_terminal_shell(cwd, cx))? + .update(cx, |project, cx| project.create_terminal_shell(cwd, cx)) .await?; cx.update(|window, cx| { cx.new(|cx| { diff --git a/crates/theme/src/font_family_cache.rs b/crates/theme/src/font_family_cache.rs index 411cf9b4d41359f4da4520061ac46c984bdd08f2..b4ab57e52d5c48c432bfc8f139df29824c997224 100644 --- a/crates/theme/src/font_family_cache.rs +++ b/crates/theme/src/font_family_cache.rs @@ -71,9 +71,7 @@ impl FontFamilyCache { return; } - let Ok(text_system) = cx.update(|cx| App::text_system(cx).clone()) else { - return; - }; + let text_system = cx.update(|cx| App::text_system(cx).clone()); let state = self.state.clone(); diff --git a/crates/toolchain_selector/src/active_toolchain.rs b/crates/toolchain_selector/src/active_toolchain.rs index 06f7d1cdf3e27f43bdb5013038b943b9e5193680..36af60e0f792f5146b9b573bb6a060a8461fe117 100644 --- a/crates/toolchain_selector/src/active_toolchain.rs +++ b/crates/toolchain_selector/src/active_toolchain.rs @@ -198,13 +198,10 @@ impl ActiveToolchain { .or_else(|| toolchains.toolchains.first()) .cloned(); if let Some(toolchain) = &default_choice { - let worktree_root_path = project - .read_with(cx, |this, cx| { - this.worktree_for_id(worktree_id, cx) - .map(|worktree| worktree.read(cx).abs_path()) - }) - .ok() - .flatten()?; + let worktree_root_path = project.read_with(cx, |this, cx| { + this.worktree_for_id(worktree_id, cx) + .map(|worktree| worktree.read(cx).abs_path()) + })?; workspace::WORKSPACE_DB .set_toolchain( workspace_id, @@ -225,7 +222,6 @@ impl ActiveToolchain { cx, ) }) - .ok()? .await; } diff --git a/crates/toolchain_selector/src/toolchain_selector.rs b/crates/toolchain_selector/src/toolchain_selector.rs index 36ef2b960a8abfe684628cea465b68e6eab5e463..62a1ad33eba6942a96a72e10671b992c303062eb 100644 --- a/crates/toolchain_selector/src/toolchain_selector.rs +++ b/crates/toolchain_selector/src/toolchain_selector.rs @@ -207,7 +207,7 @@ impl AddToolchainState { let toolchain = project .update(cx, |this, cx| { this.resolve_toolchain(path.clone(), language_name, cx) - })? + }) .await; let Ok(toolchain) = toolchain else { // Go back to the path input state @@ -240,7 +240,7 @@ impl AddToolchainState { }; let resolved_toolchain_path = project.read_with(cx, |this, cx| { this.find_project_path(&toolchain.path.as_ref(), cx) - })?; + }); // Suggest a default scope based on the applicability. let scope = if let Some(project_path) = resolved_toolchain_path { @@ -250,8 +250,6 @@ impl AddToolchainState { this.worktree_for_id(root_path.worktree_id, cx) .map(|worktree| worktree.read(cx).abs_path()) }) - .ok() - .flatten() .context("Could not find a worktree with a given worktree ID")?; ToolchainScope::Subproject(worktree_root_path, root_path.path) } else { @@ -610,7 +608,7 @@ impl ToolchainSelector { language_name.clone(), cx, ) - })? + }) .await; workspace .update_in(cx, |this, window, cx| { @@ -788,7 +786,6 @@ impl ToolchainSelectorDelegate { .read_with(cx, |this, _| { Project::toolchain_metadata(this.languages().clone(), language_name.clone()) }) - .ok()? .await?; let relative_path = this .update(cx, |this, cx| { @@ -817,7 +814,6 @@ impl ToolchainSelectorDelegate { cx, ) }) - .ok()? .await?; let pretty_path = { if relative_path.is_empty() { diff --git a/crates/vim/src/command.rs b/crates/vim/src/command.rs index 22bfc7ec8b1651c7155ec26fdc42059c1f72245f..2e639a0dd501d4e85c034f7d1e3a88851a85ff4d 100644 --- a/crates/vim/src/command.rs +++ b/crates/vim/src/command.rs @@ -1982,7 +1982,7 @@ pub fn command_interceptor( let (range, query) = VimCommand::parse_range(&string[1..]); let action = match cx.update(|cx| commands(cx).get(cmd_idx)?.parse(&query, &range, cx)) { - Ok(Some(action)) => action, + Some(action) => action, _ => continue, }; results.push(CommandInterceptItem { diff --git a/crates/workspace/src/item.rs b/crates/workspace/src/item.rs index 06126823aba5e36b361b0514698d950a430694ea..eaf5cfd99704c415039bf30f94435bb75c0f79cc 100644 --- a/crates/workspace/src/item.rs +++ b/crates/workspace/src/item.rs @@ -852,7 +852,8 @@ impl ItemHandle for Entity { close_item_task.await?; pane.update(cx, |pane, _cx| { pane.nav_history_mut().remove_item(item_id); - }) + }); + anyhow::Ok(()) } }) .detach_and_log_err(cx); diff --git a/crates/workspace/src/notifications.rs b/crates/workspace/src/notifications.rs index 14119030f794b095ab419c9a68567fd600ae0420..1c973669d033c74f9d63c005ce65a7d1e8f86edd 100644 --- a/crates/workspace/src/notifications.rs +++ b/crates/workspace/src/notifications.rs @@ -248,12 +248,12 @@ impl LanguageServerPrompt { this.request.take().map(|request| request.respond(ix)) }); - potential_future? // App Closed + potential_future .context("Response already sent")? .await .context("Stream already closed")?; - this.update(cx, |_, cx| cx.emit(DismissEvent))?; + this.update(cx, |_, cx| cx.emit(DismissEvent)); anyhow::Ok(()) }) diff --git a/crates/workspace/src/pane.rs b/crates/workspace/src/pane.rs index d1fefea4026305671b29bac07483fcb2ceb9da90..532c1869d1485c91f1047ccf74781b69c3620b87 100644 --- a/crates/workspace/src/pane.rs +++ b/crates/workspace/src/pane.rs @@ -2099,7 +2099,7 @@ impl Pane { const DELETED_MESSAGE: &str = "This file has been deleted on disk since you started editing it. Do you want to recreate it?"; - let path_style = project.read_with(cx, |project, cx| project.path_style(cx))?; + let path_style = project.read_with(cx, |project, cx| project.path_style(cx)); if save_intent == SaveIntent::Skip { return Ok(true); }; @@ -2314,7 +2314,7 @@ impl Pane { .flatten(); let save_task = if let Some(project_path) = project_path { let (worktree, path) = project_path.await?; - let worktree_id = worktree.read_with(cx, |worktree, _| worktree.id())?; + let worktree_id = worktree.read_with(cx, |worktree, _| worktree.id()); let new_path = ProjectPath { worktree_id, path }; pane.update_in(cx, |pane, window, cx| { diff --git a/crates/workspace/src/toast_layer.rs b/crates/workspace/src/toast_layer.rs index 515794554831dc62bdf8babf717ce1f372f37763..ef1832e6f29137d233a5d17db160de1f0c31d730 100644 --- a/crates/workspace/src/toast_layer.rs +++ b/crates/workspace/src/toast_layer.rs @@ -185,7 +185,7 @@ impl ToastLayer { cx.background_executor().timer(duration).await; if let Some(this) = this.upgrade() { - this.update(cx, |this, cx| this.hide_toast(cx)).ok(); + this.update(cx, |this, cx| this.hide_toast(cx)); } }); diff --git a/crates/workspace/src/workspace.rs b/crates/workspace/src/workspace.rs index e82ab432b42ae637030532a243de72d5df2558d9..d5811f3528c1d9bbc65fc80b38e394d8b0a975ba 100644 --- a/crates/workspace/src/workspace.rs +++ b/crates/workspace/src/workspace.rs @@ -566,8 +566,7 @@ fn prompt_and_open_paths(app_state: Arc, options: PathPromptOptions, c Ok(Some(paths)) => { cx.update(|cx| { open_paths(&paths, app_state, OpenOptions::default(), cx).detach_and_log_err(cx) - }) - .ok(); + }); } Ok(None) => {} Err(err) => { @@ -583,8 +582,7 @@ fn prompt_and_open_paths(app_state: Arc, options: PathPromptOptions, c }) .ok(); } - }) - .ok(); + }); } }, ) @@ -685,7 +683,7 @@ impl ProjectItemRegistry { Ok(project_item) => { let project_item = project_item; let project_entry_id: Option = - project_item.read_with(cx, project::ProjectItem::entry_id)?; + project_item.read_with(cx, project::ProjectItem::entry_id); let build_workspace_item = Box::new( |pane: &mut Pane, window: &mut Window, cx: &mut Context| { Box::new(cx.new(|cx| { @@ -1653,11 +1651,9 @@ impl Workspace { if let Some(paths) = serialized_workspace.as_ref().map(|ws| &ws.paths) { paths_to_open = paths.ordered_paths().cloned().collect(); if !paths.is_lexicographically_ordered() { - project_handle - .update(cx, |project, cx| { - project.set_worktrees_reordered(true, cx); - }) - .log_err(); + project_handle.update(cx, |project, cx| { + project.set_worktrees_reordered(true, cx); + }); } } @@ -1669,7 +1665,7 @@ impl Workspace { if let Some((_, project_entry)) = cx .update(|cx| { Workspace::project_path_for_path(project_handle.clone(), &path, true, cx) - })? + }) .await .log_err() { @@ -1698,8 +1694,7 @@ impl Workspace { None } }) - })? - else { + }) else { // We did not find a worktree with a given path, but that's whatever. continue; }; @@ -1710,7 +1705,7 @@ impl Workspace { project_handle .update(cx, |this, cx| { this.activate_toolchain(ProjectPath { worktree_id, path }, toolchain, cx) - })? + }) .await; } if let Some(workspace) = serialized_workspace.as_ref() { @@ -1720,7 +1715,7 @@ impl Workspace { this.add_toolchain(toolchain.clone(), scope.clone(), cx); } } - })?; + }); } let window = if let Some(window) = requesting_window { @@ -1770,7 +1765,7 @@ impl Workspace { }; // Use the serialized workspace to construct the new window - let mut options = cx.update(|cx| (app_state.build_window_options)(display, cx))?; + let mut options = cx.update(|cx| (app_state.build_window_options)(display, cx)); options.window_bounds = window_bounds; let centered_layout = serialized_workspace .as_ref() @@ -2501,7 +2496,7 @@ impl Workspace { if let Some(active_call) = active_call && workspace_count == 1 - && active_call.read_with(cx, |call, _| call.room().is_some())? + && active_call.read_with(cx, |call, _| call.room().is_some()) { if close_intent == CloseIntent::CloseWindow { let answer = cx.update(|window, cx| { @@ -2518,7 +2513,7 @@ impl Workspace { return anyhow::Ok(false); } else { active_call - .update(cx, |call, cx| call.hang_up(cx))? + .update(cx, |call, cx| call.hang_up(cx)) .await .log_err(); } @@ -2764,7 +2759,7 @@ impl Workspace { }, cx, ) - })? + }) .await?; Ok(()) }) @@ -2969,7 +2964,7 @@ impl Workspace { }); cx.spawn(async move |cx| { let (worktree, path) = entry.await?; - let worktree_id = worktree.read_with(cx, |t, _| t.id())?; + let worktree_id = worktree.read_with(cx, |t, _| t.id()); Ok(( worktree, ProjectPath { @@ -5034,7 +5029,7 @@ impl Workspace { .get(&leader_id.into()) .context("stopped following")?; anyhow::Ok(state.pane().clone()) - })??; + })?; let existing_item = pane.update_in(cx, |pane, window, cx| { let client = this.read(cx).client().clone(); pane.items().find_map(|item| { @@ -5103,7 +5098,8 @@ impl Workspace { ); Some(()) - })?; + }) + .context("no follower state")?; Ok(()) } @@ -5861,7 +5857,7 @@ impl Workspace { breakpoint_store .with_serialized_breakpoints(serialized_workspace.breakpoints, cx) }) - })? + }) .await; // Clean up all the items that have _not_ been loaded. Our ItemIds aren't stable. That means @@ -6074,7 +6070,7 @@ impl Workspace { let clear_task = persistence::DB.clear_trusted_worktrees(); cx.spawn(async move |_, cx| { if clear_task.await.log_err().is_some() { - cx.update(|cx| reload(cx)).ok(); + cx.update(|cx| reload(cx)); } }) .detach(); @@ -7500,7 +7496,7 @@ impl WorkspaceStore { }); Ok(response) - })? + }) } async fn handle_update_followers( @@ -7524,7 +7520,7 @@ impl WorkspaceStore { .is_ok() }); Ok(()) - })? + }) } pub fn workspaces(&self) -> &HashSet> { @@ -7650,7 +7646,7 @@ async fn join_channel_internal( None }; (should_prompt, open_room) - })?; + }); if let Some(room) = open_room { let task = room.update(cx, |room, cx| { @@ -7659,7 +7655,7 @@ async fn join_channel_internal( } None - })?; + }); if let Some(task) = task { task.await?; } @@ -7688,7 +7684,7 @@ async fn join_channel_internal( } } - let client = cx.update(|cx| active_call.read(cx).client())?; + let client = cx.update(|cx| active_call.read(cx).client()); let mut client_status = client.status(); @@ -7719,14 +7715,14 @@ async fn join_channel_internal( let room = active_call .update(cx, |active_call, cx| { active_call.join_channel(channel_id, cx) - })? + }) .await?; let Some(room) = room else { return anyhow::Ok(true); }; - room.update(cx, |room, _| room.room_update_completed())? + room.update(cx, |room, _| room.room_update_completed()) .await; let task = room.update(cx, |room, cx| { @@ -7768,7 +7764,7 @@ async fn join_channel_internal( } None - })?; + }); if let Some(task) = task { task.await?; return anyhow::Ok(true); @@ -7807,14 +7803,13 @@ pub fn join_channel( None, cx, ) - })? + }) .await?; if result.is_ok() { cx.update(|cx| { cx.dispatch_action(&OpenChannelNotes); - }) - .log_err(); + }); } active_window = Some(window_handle); @@ -7872,7 +7867,7 @@ pub async fn get_any_active_workspace( // find an existing workspace to focus and show call controls let active_window = activate_any_workspace_window(&mut cx); if active_window.is_none() { - cx.update(|cx| Workspace::new_local(vec![], app_state.clone(), None, None, None, cx))? + cx.update(|cx| Workspace::new_local(vec![], app_state.clone(), None, None, None, cx)) .await?; } activate_any_workspace_window(&mut cx).context("could not open zed") @@ -7897,8 +7892,6 @@ fn activate_any_workspace_window(cx: &mut AsyncApp) -> Option Vec> { @@ -7972,7 +7965,7 @@ pub fn open_paths( } } } - })?; + }); if open_options.open_new_workspace.is_none() && (existing.is_none() || open_options.prefer_focused_window) @@ -8002,7 +7995,7 @@ pub fn open_paths( break; } } - })?; + }); } } @@ -8042,7 +8035,7 @@ pub fn open_paths( None, cx, ) - })? + }) .await }; @@ -8159,7 +8152,7 @@ pub fn open_remote_project_with_new_connection( delegate, cx, ) - })? + }) .await? { Some(result) => result, @@ -8177,7 +8170,7 @@ pub fn open_remote_project_with_new_connection( true, cx, ) - })?; + }); open_remote_project_inner( project, @@ -8244,7 +8237,7 @@ async fn open_remote_project_inner( }; this.activate_toolchain(ProjectPath { worktree_id, path }, toolchain, cx) - })? + }) .await; } let mut project_paths_to_open = vec![]; @@ -8252,7 +8245,7 @@ async fn open_remote_project_inner( for path in paths { let result = cx - .update(|cx| Workspace::project_path_for_path(project.clone(), &path, true, cx))? + .update(|cx| Workspace::project_path_for_path(project.clone(), &path, true, cx)) .await; match result { Ok((_, project_path)) => { @@ -8369,9 +8362,9 @@ pub fn join_in_room_project( let workspace = if let Some(existing_workspace) = existing_workspace { existing_workspace } else { - let active_call = cx.update(|cx| ActiveCall::global(cx))?; + let active_call = cx.update(|cx| ActiveCall::global(cx)); let room = active_call - .read_with(cx, |call, _| call.room().cloned())? + .read_with(cx, |call, _| call.room().cloned()) .context("not in a call")?; let project = room .update(cx, |room, cx| { @@ -8381,7 +8374,7 @@ pub fn join_in_room_project( app_state.fs.clone(), cx, ) - })? + }) .await?; let window_bounds_override = window_bounds_env_override(); @@ -8393,7 +8386,7 @@ pub fn join_in_room_project( Workspace::new(Default::default(), project, app_state.clone(), window, cx) }) }) - })?? + })? }; workspace.update(cx, |workspace, window, cx| { @@ -8459,7 +8452,7 @@ pub fn reload(cx: &mut App) { if let Some(prompt) = prompt { let answer = prompt.await?; if answer != 0 { - return Ok(()); + return anyhow::Ok(()); } } @@ -8469,10 +8462,11 @@ pub fn reload(cx: &mut App) { workspace.prepare_to_close(CloseIntent::Quit, window, cx) }) && !should_close.await? { - return Ok(()); + return anyhow::Ok(()); } } - cx.update(|cx| cx.restart()) + cx.update(|cx| cx.restart()); + anyhow::Ok(()) }) .detach_and_log_err(cx); } @@ -11600,7 +11594,7 @@ mod tests { cx: &mut App, ) -> Option>>> { if path.path.extension().unwrap() == "png" { - Some(cx.spawn(async move |cx| cx.new(|_| TestPngItem {}))) + Some(cx.spawn(async move |cx| Ok(cx.new(|_| TestPngItem {})))) } else { None } @@ -11675,7 +11669,7 @@ mod tests { cx: &mut App, ) -> Option>>> { if path.path.extension().unwrap() == "ipynb" { - Some(cx.spawn(async move |cx| cx.new(|_| TestIpynbItem {}))) + Some(cx.spawn(async move |cx| Ok(cx.new(|_| TestIpynbItem {})))) } else { None } diff --git a/crates/worktree/src/worktree.rs b/crates/worktree/src/worktree.rs index 55282632a1172ba87f02f946046c548ccaf9b342..2f8b618494fe8a206d0d62f8d0683fbff0738280 100644 --- a/crates/worktree/src/worktree.rs +++ b/crates/worktree/src/worktree.rs @@ -399,7 +399,7 @@ impl Worktree { None }; - cx.new(move |cx: &mut Context| { + Ok(cx.new(move |cx: &mut Context| { let mut snapshot = LocalSnapshot { ignores_by_parent_abs_path: Default::default(), global_gitignore: Default::default(), @@ -478,7 +478,7 @@ impl Worktree { }; worktree.start_background_scanner(scan_requests_rx, path_prefixes_to_scan_rx, cx); Worktree::Local(worktree) - }) + })) } pub fn remote( @@ -931,7 +931,7 @@ impl Worktree { cx, ), )) - })??; + })?; Ok(proto::ProjectEntryResponse { entry: match &entry.await? { CreatedEntry::Included(entry) => Some(entry.into()), @@ -955,8 +955,9 @@ impl Worktree { cx, ), ) - })?; - task.context("invalid entry")?.await?; + }); + task.ok_or_else(|| anyhow::anyhow!("invalid entry"))? + .await?; Ok(proto::ProjectEntryResponse { entry: None, worktree_scan_id: scan_id as u64, @@ -970,9 +971,10 @@ impl Worktree { ) -> Result { let task = this.update(&mut cx, |this, cx| { this.expand_entry(ProjectEntryId::from_proto(request.entry_id), cx) - })?; - task.context("no such entry")?.await?; - let scan_id = this.read_with(&cx, |this, _| this.scan_id())?; + }); + task.ok_or_else(|| anyhow::anyhow!("no such entry"))? + .await?; + let scan_id = this.read_with(&cx, |this, _| this.scan_id()); Ok(proto::ExpandProjectEntryResponse { worktree_scan_id: scan_id as u64, }) @@ -985,9 +987,10 @@ impl Worktree { ) -> Result { let task = this.update(&mut cx, |this, cx| { this.expand_all_for_entry(ProjectEntryId::from_proto(request.entry_id), cx) - })?; - task.context("no such entry")?.await?; - let scan_id = this.read_with(&cx, |this, _| this.scan_id())?; + }); + task.ok_or_else(|| anyhow::anyhow!("no such entry"))? + .await?; + let scan_id = this.read_with(&cx, |this, _| this.scan_id()); Ok(proto::ExpandAllForProjectEntryResponse { worktree_scan_id: scan_id as u64, }) @@ -1137,8 +1140,7 @@ impl LocalWorktree { this.update_abs_path_and_refresh(new_path, cx); } } - }) - .ok(); + }); } }); self._background_scanner_tasks = vec![background_scanner, scan_state_updater]; @@ -1705,7 +1707,7 @@ impl LocalWorktree { .refresh_entries_for_paths(paths_to_refresh.clone()), ) }, - )??; + )?; cx.background_spawn(async move { refresh.next().await; @@ -1715,12 +1717,12 @@ impl LocalWorktree { .log_err(); let this = this.upgrade().with_context(|| "Dropped worktree")?; - cx.read_entity(&this, |this, _| { + Ok(cx.read_entity(&this, |this, _| { paths_to_refresh .iter() .filter_map(|path| Some(this.entry_for_path(path)?.id)) .collect() - }) + })) }) } diff --git a/crates/worktree_benchmarks/src/main.rs b/crates/worktree_benchmarks/src/main.rs index c1b76f9e3c483ec6c989cc255a11c5320d4b49f7..028c38fd583d344683ece9b34088c320494a8648 100644 --- a/crates/worktree_benchmarks/src/main.rs +++ b/crates/worktree_benchmarks/src/main.rs @@ -31,15 +31,13 @@ fn main() { ) .await .expect("Worktree initialization to succeed"); - let did_finish_scan = worktree - .update(cx, |this, _| this.as_local().unwrap().scan_complete()) - .unwrap(); + let did_finish_scan = + worktree.update(cx, |this, _| this.as_local().unwrap().scan_complete()); let start = std::time::Instant::now(); did_finish_scan.await; let elapsed = start.elapsed(); - let (files, directories) = worktree - .read_with(cx, |this, _| (this.file_count(), this.dir_count())) - .unwrap(); + let (files, directories) = + worktree.read_with(cx, |this, _| (this.file_count(), this.dir_count())); println!( "{:?} for {directories} directories and {files} files", elapsed diff --git a/crates/zed/src/main.rs b/crates/zed/src/main.rs index 06dd2d282203e88739036aa192b0c0a4fb0c039b..99c97d1d45570d680356eab86b8891c9830d978a 100644 --- a/crates/zed/src/main.rs +++ b/crates/zed/src/main.rs @@ -128,7 +128,7 @@ fn files_not_created_on_launch(errors: HashMap>) { } fn fail_to_open_window_async(e: anyhow::Error, cx: &mut AsyncApp) { - cx.update(|cx| fail_to_open_window(e, cx)).log_err(); + cx.update(|cx| fail_to_open_window(e, cx)); } fn fail_to_open_window(e: anyhow::Error, _cx: &mut App) { @@ -797,8 +797,7 @@ fn main() { if let Some(request) = OpenRequest::parse(urls, cx).log_err() { handle_open_request(request, app_state.clone(), cx); } - }) - .ok(); + }); } }) .detach(); @@ -873,7 +872,7 @@ fn handle_open_request(request: OpenRequest, app_state: Arc, cx: &mut history_store .update(&mut cx.clone(), |store, cx| { store.save_thread(session_id.clone(), db_thread, cx) - })? + }) .await?; let thread_metadata = agent::DbThreadMetadata { @@ -1125,7 +1124,7 @@ fn handle_open_request(request: OpenRequest, app_state: Arc, cx: &mut None, cx, ) - })? + }) .await?; } @@ -1223,9 +1222,8 @@ async fn installation_id() -> Result { async fn restore_or_create_workspace(app_state: Arc, cx: &mut AsyncApp) -> Result<()> { if let Some(locations) = restorable_workspace_locations(cx, &app_state).await { - let use_system_window_tabs = cx - .update(|cx| WorkspaceSettings::get_global(cx).use_system_window_tabs) - .unwrap_or(false); + let use_system_window_tabs = + cx.update(|cx| WorkspaceSettings::get_global(cx).use_system_window_tabs); let mut results: Vec> = Vec::new(); let mut tasks = Vec::new(); @@ -1241,7 +1239,7 @@ async fn restore_or_create_workspace(app_state: Arc, cx: &mut AsyncApp workspace::OpenOptions::default(), cx, ) - })?; + }); open_task.await.map(|_| ()) }); @@ -1259,7 +1257,7 @@ async fn restore_or_create_workspace(app_state: Arc, cx: &mut AsyncApp cx.update(|cx| { RemoteSettings::get_global(cx) .fill_connection_options_from_settings(options) - })?; + }); } let task = cx.spawn(async move |cx| { recent_projects::open_remote_project( @@ -1300,24 +1298,20 @@ async fn restore_or_create_workspace(app_state: Arc, cx: &mut AsyncApp }; // Try to find an active workspace to show the toast - let toast_shown = cx - .update(|cx| { - if let Some(window) = cx.active_window() - && let Some(workspace) = window.downcast::() - { - workspace - .update(cx, |workspace, _, cx| { - workspace.show_toast( - Toast::new(NotificationId::unique::<()>(), message), - cx, - ) - }) - .ok(); - return true; - } - false - }) - .unwrap_or(false); + let toast_shown = cx.update(|cx| { + if let Some(window) = cx.active_window() + && let Some(workspace) = window.downcast::() + { + workspace + .update(cx, |workspace, _, cx| { + workspace + .show_toast(Toast::new(NotificationId::unique::<()>(), message), cx) + }) + .ok(); + return true; + } + false + }); // If we couldn't show a toast (no windows opened successfully), // we've already logged the errors above, so the user can check logs @@ -1328,7 +1322,7 @@ async fn restore_or_create_workspace(app_state: Arc, cx: &mut AsyncApp } } } else if matches!(KEY_VALUE_STORE.read_kvp(FIRST_OPEN), Ok(None)) { - cx.update(|cx| show_onboarding_view(app_state, cx))?.await?; + cx.update(|cx| show_onboarding_view(app_state, cx)).await?; } else { cx.update(|cx| { workspace::open_new( @@ -1345,7 +1339,7 @@ async fn restore_or_create_workspace(app_state: Arc, cx: &mut AsyncApp } }, ) - })? + }) .await?; } @@ -1356,21 +1350,17 @@ pub(crate) async fn restorable_workspace_locations( cx: &mut AsyncApp, app_state: &Arc, ) -> Option> { - let mut restore_behavior = cx - .update(|cx| WorkspaceSettings::get(None, cx).restore_on_startup) - .ok()?; + let mut restore_behavior = cx.update(|cx| WorkspaceSettings::get(None, cx).restore_on_startup); let session_handle = app_state.session.clone(); - let (last_session_id, last_session_window_stack) = cx - .update(|cx| { - let session = session_handle.read(cx); + let (last_session_id, last_session_window_stack) = cx.update(|cx| { + let session = session_handle.read(cx); - ( - session.last_session_id().map(|id| id.to_string()), - session.last_session_window_stack(), - ) - }) - .ok()?; + ( + session.last_session_id().map(|id| id.to_string()), + session.last_session_window_stack(), + ) + }); if last_session_id.is_none() && matches!( @@ -1582,27 +1572,26 @@ fn load_user_themes_in_background(fs: Arc, cx: &mut App) { cx.spawn({ let fs = fs.clone(); async move |cx| { - if let Some(theme_registry) = cx.update(|cx| ThemeRegistry::global(cx)).log_err() { - let themes_dir = paths::themes_dir().as_ref(); - match fs - .metadata(themes_dir) - .await - .ok() - .flatten() - .map(|m| m.is_dir) - { - Some(is_dir) => { - anyhow::ensure!(is_dir, "Themes dir path {themes_dir:?} is not a directory") - } - None => { - fs.create_dir(themes_dir).await.with_context(|| { - format!("Failed to create themes dir at path {themes_dir:?}") - })?; - } + let theme_registry = cx.update(|cx| ThemeRegistry::global(cx)); + let themes_dir = paths::themes_dir().as_ref(); + match fs + .metadata(themes_dir) + .await + .ok() + .flatten() + .map(|m| m.is_dir) + { + Some(is_dir) => { + anyhow::ensure!(is_dir, "Themes dir path {themes_dir:?} is not a directory") + } + None => { + fs.create_dir(themes_dir).await.with_context(|| { + format!("Failed to create themes dir at path {themes_dir:?}") + })?; } - theme_registry.load_user_themes(themes_dir, fs).await?; - cx.update(GlobalTheme::reload_theme)?; } + theme_registry.load_user_themes(themes_dir, fs).await?; + cx.update(GlobalTheme::reload_theme); anyhow::Ok(()) } }) @@ -1619,15 +1608,16 @@ fn watch_themes(fs: Arc, cx: &mut App) { while let Some(paths) = events.next().await { for event in paths { - if fs.metadata(&event.path).await.ok().flatten().is_some() - && let Some(theme_registry) = - cx.update(|cx| ThemeRegistry::global(cx)).log_err() - && let Some(()) = theme_registry + if fs.metadata(&event.path).await.ok().flatten().is_some() { + let theme_registry = cx.update(|cx| ThemeRegistry::global(cx)); + if theme_registry .load_user_theme(&event.path, fs.clone()) .await .log_err() - { - cx.update(GlobalTheme::reload_theme).log_err(); + .is_some() + { + cx.update(GlobalTheme::reload_theme); + } } } } diff --git a/crates/zed/src/visual_test_runner.rs b/crates/zed/src/visual_test_runner.rs index 0c65c59c362029ae4bab1f79e1f14f458b927670..537f40e9259984f460839465f5fcb07e0c65c189 100644 --- a/crates/zed/src/visual_test_runner.rs +++ b/crates/zed/src/visual_test_runner.rs @@ -92,12 +92,13 @@ impl TestWorkspace { false, cx, ) - })?; + }); - let add_worktree_task = project.update(cx, |project, cx| { - project.find_or_create_worktree(project_path, true, cx) - })?; - add_worktree_task.await?; + project + .update(cx, |project, cx| { + project.find_or_create_worktree(project_path, true, cx) + }) + .await?; let bounds = Bounds { origin: point(px(0.0), px(0.0)), @@ -118,7 +119,7 @@ impl TestWorkspace { }) }, ) - })??; + })?; cx.background_executor() .timer(std::time::Duration::from_millis(100)) @@ -257,7 +258,7 @@ fn main() { Ok(ws) => ws, Err(e) => { log::error!("Failed to create workspace: {}", e); - cx.update(|cx| cx.quit()).ok(); + cx.update(|cx| cx.quit()); std::process::exit(1); } }; @@ -265,19 +266,19 @@ fn main() { // Set up project panel if let Err(e) = setup_project_panel(&workspace, &mut cx).await { log::error!("Failed to setup project panel: {}", e); - cx.update(|cx| cx.quit()).ok(); + cx.update(|cx| cx.quit()); std::process::exit(1); } // Open main.rs in the editor if let Err(e) = open_file(&workspace, "src/main.rs", &mut cx).await { log::error!("Failed to open file: {}", e); - cx.update(|cx| cx.quit()).ok(); + cx.update(|cx| cx.quit()); std::process::exit(1); } // Request a window refresh to ensure all pending effects are processed - cx.refresh().ok(); + cx.refresh(); cx.background_executor() .timer(std::time::Duration::from_millis(500)) .await; @@ -306,11 +307,10 @@ fn main() { ws.close_panel::(window, cx); }) .ok(); - }) - .ok(); + }); // Refresh and wait for panel to close - cx.refresh().ok(); + cx.refresh(); cx.background_executor() .timer(std::time::Duration::from_millis(100)) .await; @@ -341,11 +341,11 @@ fn main() { } if any_failed { - cx.update(|cx| cx.quit()).ok(); + cx.update(|cx| cx.quit()); std::process::exit(1); } - cx.update(|cx| cx.quit()).ok(); + cx.update(|cx| cx.quit()); }) .detach(); }); @@ -371,7 +371,7 @@ async fn run_visual_test( update_baseline: bool, ) -> Result { // Capture the screenshot using direct texture capture (no ScreenCaptureKit needed) - let screenshot = cx.update(|cx| capture_screenshot(window, cx))??; + let screenshot = cx.update(|cx| capture_screenshot(window, cx))?; // Get paths let baseline_path = get_baseline_path(test_name); @@ -731,27 +731,28 @@ async fn run_agent_thread_view_test( false, cx, ) - })?; + }); // Add the test directory as a worktree - let add_worktree_task = project.update(cx, |project, cx| { - project.find_or_create_worktree(&project_path, true, cx) - })?; - let (worktree, _) = add_worktree_task.await?; + let (worktree, _) = project + .update(cx, |project, cx| { + project.find_or_create_worktree(&project_path, true, cx) + }) + .await?; // Wait for worktree to scan and find the image file - let worktree_name = worktree.read_with(cx, |wt, _| wt.root_name_str().to_string())?; + let worktree_name = worktree.read_with(cx, |wt, _| wt.root_name_str().to_string()); cx.background_executor() .timer(std::time::Duration::from_millis(100)) .await; // Create the necessary entities for the ReadFileTool - let action_log = cx.new(|_| action_log::ActionLog::new(project.clone()))?; - let context_server_registry = cx - .new(|cx| agent::ContextServerRegistry::new(project.read(cx).context_server_store(), cx))?; + let action_log = cx.new(|_| action_log::ActionLog::new(project.clone())); + let context_server_registry = + cx.new(|cx| agent::ContextServerRegistry::new(project.read(cx).context_server_store(), cx)); let fake_model = Arc::new(language_model::fake_provider::FakeLanguageModel::default()); - let project_context = cx.new(|_| prompt_store::ProjectContext::default())?; + let project_context = cx.new(|_| prompt_store::ProjectContext::default()); // Create the agent Thread let thread = cx.new(|cx| { @@ -763,7 +764,7 @@ async fn run_agent_thread_view_test( Some(fake_model), cx, ) - })?; + }); // Create the ReadFileTool let tool = Arc::new(agent::ReadFileTool::new( @@ -782,10 +783,9 @@ async fn run_agent_thread_view_test( start_line: None, end_line: None, }; - let run_task = cx.update(|cx| tool.clone().run(input, event_stream, cx))?; - // The tool runs async - wait for it - run_task.await?; + cx.update(|cx| tool.clone().run(input, event_stream, cx)) + .await?; // Collect the events from the tool execution let mut tool_content: Vec = Vec::new(); @@ -845,7 +845,7 @@ async fn run_agent_thread_view_test( cx.new(|cx| Workspace::new(None, project.clone(), app_state.clone(), window, cx)) }, ) - })??; + })?; cx.background_executor() .timer(std::time::Duration::from_millis(100)) @@ -873,7 +873,7 @@ async fn run_agent_thread_view_test( // Inject the stub server and open the stub thread workspace_window.update(cx, |_workspace, window, cx| { - panel.update(cx, |panel, cx| { + panel.update(cx, |panel: &mut AgentPanel, cx| { panel.open_external_thread_with_server(stub_agent.clone(), window, cx); }); })?; @@ -884,16 +884,20 @@ async fn run_agent_thread_view_test( // Get the thread view and send a message let thread_view = panel - .read_with(cx, |panel, _| panel.active_thread_view_for_tests().cloned())? + .read_with(cx, |panel, _| panel.active_thread_view_for_tests().cloned()) .ok_or_else(|| anyhow::anyhow!("No active thread view"))?; let thread = thread_view - .update(cx, |view, _cx| view.thread().cloned())? + .update(cx, |view: &mut agent_ui::acp::AcpThreadView, _cx| { + view.thread().cloned() + }) .ok_or_else(|| anyhow::anyhow!("Thread not available"))?; // Send the message to trigger the image response thread - .update(cx, |thread, cx| thread.send_raw("Show me the Zed logo", cx))? + .update(cx, |thread: &mut acp_thread::AcpThread, cx| { + thread.send_raw("Show me the Zed logo", cx) + }) .await?; cx.background_executor() @@ -902,7 +906,7 @@ async fn run_agent_thread_view_test( // Get the tool call ID for expanding later let tool_call_id = thread - .update(cx, |thread, _cx| { + .update(cx, |thread: &mut acp_thread::AcpThread, _cx| { thread.entries().iter().find_map(|entry| { if let acp_thread::AgentThreadEntry::ToolCall(tool_call) = entry { Some(tool_call.id.clone()) @@ -910,7 +914,7 @@ async fn run_agent_thread_view_test( None } }) - })? + }) .ok_or_else(|| anyhow::anyhow!("Expected a ToolCall entry in thread for visual test"))?; // Refresh window for collapsed state @@ -935,9 +939,9 @@ async fn run_agent_thread_view_test( .await?; // Now expand the tool call so its content (the image) is visible - thread_view.update(cx, |view, cx| { + thread_view.update(cx, |view: &mut agent_ui::acp::AcpThreadView, cx| { view.expand_tool_call(tool_call_id, cx); - })?; + }); cx.background_executor() .timer(std::time::Duration::from_millis(100)) diff --git a/crates/zed/src/zed.rs b/crates/zed/src/zed.rs index 85d144f7714fdf62e7cd1557fb3b7ed9ef3603b2..3586a931a7d5edeeab2fa365e106c2d618571aff 100644 --- a/crates/zed/src/zed.rs +++ b/crates/zed/src/zed.rs @@ -34,8 +34,8 @@ use git_ui::project_diff::ProjectDiffToolbar; use gpui::{ Action, App, AppContext as _, AsyncWindowContext, Context, DismissEvent, Element, Entity, Focusable, KeyBinding, ParentElement, PathPromptOptions, PromptLevel, ReadGlobal, SharedString, - Task, TitlebarOptions, UpdateGlobal, WeakEntity, Window, WindowKind, WindowOptions, actions, - image_cache, point, px, retain_all, + Task, TitlebarOptions, UpdateGlobal, WeakEntity, Window, WindowHandle, WindowKind, + WindowOptions, actions, image_cache, point, px, retain_all, }; use image_viewer::ImageInfo; use language::Capability; @@ -159,11 +159,7 @@ pub fn init(cx: &mut App) { cx.on_action(|_: &RestoreBanner, cx| title_bar::restore_banner(cx)); let flag = cx.wait_for_flag::(); cx.spawn(async |cx| { - if cx - .update(|cx| ReleaseChannel::global(cx) == ReleaseChannel::Dev) - .unwrap_or_default() - || flag.await - { + if cx.update(|cx| ReleaseChannel::global(cx) == ReleaseChannel::Dev) || flag.await { cx.update(|cx| { cx.on_action(|_: &TestPanic, _| panic!("Ran the TestPanic action")) .on_action(|_: &TestCrash, _| { @@ -174,8 +170,7 @@ pub fn init(cx: &mut App) { puts(0xabad1d3a as *const i8); } }); - }) - .ok(); + }); }; }) .detach(); @@ -559,8 +554,7 @@ fn initialize_file_watcher(window: &mut Window, cx: &mut Context) { cx.update(|cx| { cx.open_url("https://zed.dev/docs/linux#could-not-start-inotify"); cx.quit(); - }) - .ok(); + }); } }) .detach() @@ -590,8 +584,7 @@ fn initialize_file_watcher(window: &mut Window, cx: &mut Context) { cx.update(|cx| { cx.open_url("https://zed.dev/docs/windows"); cx.quit() - }) - .ok(); + }); } }) .detach() @@ -641,8 +634,7 @@ fn show_software_emulation_warning_if_needed( cx.update(|cx| { cx.open_url(open_url); cx.quit(); - }) - .ok(); + }); } }) .detach() @@ -1282,8 +1274,7 @@ fn about(_: &mut Workspace, window: &mut Window, cx: &mut Context) { let content = format!("{}\n{}", message, detail.as_deref().unwrap_or("")); cx.update(|cx| { cx.write_to_clipboard(gpui::ClipboardItem::new_string(content)); - }) - .ok(); + }); } }) .detach(); @@ -1307,19 +1298,18 @@ fn quit(_: &Quit, cx: &mut App) { let should_confirm = WorkspaceSettings::get_global(cx).confirm_quit; cx.spawn(async move |cx| { - let mut workspace_windows = cx.update(|cx| { + let mut workspace_windows: Vec> = cx.update(|cx| { cx.windows() .into_iter() .filter_map(|window| window.downcast::()) .collect::>() - })?; + }); // If multiple windows have unsaved changes, and need a save prompt, // prompt in the active window before switching to a different window. cx.update(|cx| { workspace_windows.sort_by_key(|window| window.is_active(cx) == Some(false)); - }) - .log_err(); + }); if should_confirm && let Some(workspace) = workspace_windows.first() { let answer = workspace @@ -1351,12 +1341,13 @@ fn quit(_: &Quit, cx: &mut App) { workspace.prepare_to_close(CloseIntent::Quit, window, cx) }) .log_err() - && !should_close.await? { - return Ok(()); + if !should_close.await? { + return Ok(()); + } } } - cx.update(|cx| cx.quit())?; + cx.update(|cx| cx.quit()); anyhow::Ok(()) }) .detach_and_log_err(cx); @@ -1565,7 +1556,7 @@ pub fn handle_settings_file_changes( Either::Right(content) => (content, true), }; - let result = cx.update_global(|store: &mut SettingsStore, cx| { + cx.update_global(|store: &mut SettingsStore, cx| { let result = if is_user { store.set_user_settings(&content, cx) } else { @@ -1584,10 +1575,6 @@ pub fn handle_settings_file_changes( } cx.refresh_windows(); }); - - if result.is_err() { - break; // App dropped - } } }) .detach(); @@ -1699,8 +1686,7 @@ pub fn handle_keymap_file_changes( show_keymap_file_json_error(notification_id.clone(), &error, cx) } } - }) - .ok(); + }); } }) .detach(); @@ -1791,8 +1777,7 @@ fn show_markdown_app_notification( .primary_on_click_arc(primary_button_on_click) }) }) - }) - .ok(); + }); }) .detach(); } @@ -1929,9 +1914,9 @@ fn open_local_file( let file_exists = { let full_path = worktree.read_with(cx, |tree, _| { tree.abs_path().join(settings_relative_path.as_std_path()) - })?; + }); - let fs = project.read_with(cx, |project, _| project.fs().clone())?; + let fs = project.read_with(cx, |project, _| project.fs().clone()); fs.metadata(&full_path) .await @@ -1942,23 +1927,23 @@ fn open_local_file( if !file_exists { if let Some(dir_path) = settings_relative_path.parent() - && worktree.read_with(cx, |tree, _| tree.entry_for_path(dir_path).is_none())? + && worktree.read_with(cx, |tree, _| tree.entry_for_path(dir_path).is_none()) { project .update(cx, |project, cx| { project.create_entry((tree_id, dir_path), true, cx) - })? + }) .await .context("worktree was removed")?; } if worktree.read_with(cx, |tree, _| { tree.entry_for_path(settings_relative_path).is_none() - })? { + }) { project .update(cx, |project, cx| { project.create_entry((tree_id, settings_relative_path), false, cx) - })? + }) .await .context("worktree was removed")?; } diff --git a/crates/zed/src/zed/open_listener.rs b/crates/zed/src/zed/open_listener.rs index dc3ab74a70ae5b0a33d1fff7e01e3a0a208e34f2..6e92e5042324428375a2a35cb829e74581e28a6e 100644 --- a/crates/zed/src/zed/open_listener.rs +++ b/crates/zed/src/zed/open_listener.rs @@ -341,7 +341,7 @@ pub async fn open_paths_with_positions( .collect::>(); let (workspace, mut items) = cx - .update(|cx| workspace::open_paths(&paths, app_state, open_options, cx))? + .update(|cx| workspace::open_paths(&paths, app_state, open_options, cx)) .await?; for diff_pair in diff_paths { @@ -349,9 +349,10 @@ pub async fn open_paths_with_positions( let new_path = Path::new(&diff_pair[1]).canonicalize()?; if let Ok(diff_view) = workspace.update(cx, |workspace, window, cx| { FileDiffView::open(old_path, new_path, workspace, window, cx) - }) && let Some(diff_view) = diff_view.await.log_err() - { - items.push(Some(Ok(Box::new(diff_view)))) + }) { + if let Some(diff_view) = diff_view.await.log_err() { + items.push(Some(Ok(Box::new(diff_view)))) + } } } @@ -421,8 +422,7 @@ pub async fn handle_cli_connection( responses.send(CliResponse::Exit { status: 1 }).log_err(); } }; - }) - .log_err(); + }); return; } @@ -476,8 +476,7 @@ async fn open_workspaces( if grouped_locations.is_empty() { // If we have no paths to open, show the welcome screen if this is the first launch if matches!(KEY_VALUE_STORE.read_kvp(FIRST_OPEN), Ok(None)) { - cx.update(|cx| show_onboarding_view(app_state, cx).detach()) - .log_err(); + cx.update(|cx| show_onboarding_view(app_state, cx).detach()); } // If not the first launch, show an empty window with empty editor else { @@ -490,8 +489,7 @@ async fn open_workspaces( Editor::new_file(workspace, &Default::default(), window, cx) }) .detach(); - }) - .log_err(); + }); } } else { // If there are paths to open, open a workspace for each grouping of paths @@ -529,7 +527,7 @@ async fn open_workspaces( cx.update(|cx| { RemoteSettings::get_global(cx) .fill_connection_options_from_settings(options) - })?; + }); } cx.spawn(async move |cx| { open_remote_project( @@ -571,9 +569,7 @@ async fn open_local_workspace( let (open_new_workspace, replace_window) = if reuse { ( Some(true), - cx.update(|cx| workspace::local_workspace_windows(cx).into_iter().next()) - .ok() - .flatten(), + cx.update(|cx| workspace::local_workspace_windows(cx).into_iter().next()), ) } else { (open_new_workspace, None) @@ -637,14 +633,14 @@ async fn open_local_workspace( if wait { let (release_tx, release_rx) = oneshot::channel(); item_release_futures.push(release_rx); - subscriptions.push(cx.update(|cx| { + subscriptions.push(Ok(cx.update(|cx| { item.on_release( cx, Box::new(move |_| { release_tx.send(()).ok(); }), ) - })); + }))); } } Some(Err(err)) => {