Detailed changes
@@ -375,18 +375,16 @@ impl ToolCall {
})
.ok()??;
let buffer = buffer.await.log_err()?;
- let position = buffer
- .update(cx, |buffer, _| {
- let snapshot = buffer.snapshot();
- if let Some(row) = location.line {
- let column = snapshot.indent_size_for_line(row).len;
- let point = snapshot.clip_point(Point::new(row, column), Bias::Left);
- snapshot.anchor_before(point)
- } else {
- Anchor::min_for_buffer(snapshot.remote_id())
- }
- })
- .ok()?;
+ let position = buffer.update(cx, |buffer, _| {
+ let snapshot = buffer.snapshot();
+ if let Some(row) = location.line {
+ let column = snapshot.indent_size_for_line(row).len;
+ let point = snapshot.clip_point(Point::new(row, column), Bias::Left);
+ snapshot.anchor_before(point)
+ } else {
+ Anchor::min_for_buffer(snapshot.remote_id())
+ }
+ });
Some(ResolvedLocation { buffer, position })
}
@@ -1803,7 +1801,7 @@ impl AcpThread {
.ok();
let old_checkpoint = git_store
- .update(cx, |git, cx| git.checkpoint(cx))?
+ .update(cx, |git, cx| git.checkpoint(cx))
.await
.context("failed to get old checkpoint")
.log_err();
@@ -1983,7 +1981,7 @@ impl AcpThread {
rewind.await?;
if let Some(checkpoint) = checkpoint {
git_store
- .update(cx, |git, cx| git.restore_checkpoint(checkpoint, cx))?
+ .update(cx, |git, cx| git.restore_checkpoint(checkpoint, cx))
.await?;
}
@@ -2001,7 +1999,7 @@ impl AcpThread {
let telemetry = ActionLogTelemetry::from(&*self);
cx.spawn(async move |this, cx| {
- cx.update(|cx| truncate.run(id.clone(), cx))?.await?;
+ cx.update(|cx| truncate.run(id.clone(), cx)).await?;
this.update(cx, |this, cx| {
if let Some((ix, _)) = this.user_message_mut(&id) {
// Collect all terminals from entries that will be removed
@@ -2060,7 +2058,7 @@ impl AcpThread {
let equal = git_store
.update(cx, |git, cx| {
git.compare_checkpoints(old_checkpoint.clone(), new_checkpoint, cx)
- })?
+ })
.await
.unwrap_or(true);
@@ -2119,17 +2117,14 @@ impl AcpThread {
let project = self.project.clone();
let action_log = self.action_log.clone();
cx.spawn(async move |this, cx| {
- let load = project
- .update(cx, |project, cx| {
- let path = project
- .project_path_for_absolute_path(&path, cx)
- .ok_or_else(|| {
- acp::Error::resource_not_found(Some(path.display().to_string()))
- })?;
- Ok(project.open_buffer(path, cx))
- })
- .map_err(|e| acp::Error::internal_error().data(e.to_string()))
- .flatten()?;
+ let load = project.update(cx, |project, cx| {
+ let path = project
+ .project_path_for_absolute_path(&path, cx)
+ .ok_or_else(|| {
+ acp::Error::resource_not_found(Some(path.display().to_string()))
+ })?;
+ Ok::<_, acp::Error>(project.open_buffer(path, cx))
+ })?;
let buffer = load.await?;
@@ -2148,9 +2143,9 @@ impl AcpThread {
} else {
action_log.update(cx, |action_log, cx| {
action_log.buffer_read(buffer.clone(), cx);
- })?;
+ });
- let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot())?;
+ let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
this.update(cx, |this, _| {
this.shared_buffers.insert(buffer.clone(), snapshot.clone());
})?;
@@ -2179,7 +2174,7 @@ impl AcpThread {
}),
cx,
);
- })?;
+ });
Ok(snapshot.text_for_range(start..end).collect::<String>())
})
@@ -2200,7 +2195,7 @@ impl AcpThread {
.context("invalid path")?;
anyhow::Ok(project.open_buffer(path, cx))
});
- let buffer = load??.await?;
+ let buffer = load?.await?;
let snapshot = this.update(cx, |this, cx| {
this.shared_buffers
.get(&buffer)
@@ -2235,7 +2230,7 @@ impl AcpThread {
}),
cx,
);
- })?;
+ });
let format_on_save = cx.update(|cx| {
action_log.update(cx, |action_log, cx| {
@@ -2257,7 +2252,7 @@ impl AcpThread {
action_log.buffer_edited(buffer.clone(), cx);
});
format_on_save
- })?;
+ });
if format_on_save {
let format_task = project.update(cx, |project, cx| {
@@ -2268,16 +2263,16 @@ impl AcpThread {
FormatTrigger::Save,
cx,
)
- })?;
+ });
format_task.await.log_err();
action_log.update(cx, |action_log, cx| {
action_log.buffer_edited(buffer.clone(), cx);
- })?;
+ });
}
project
- .update(cx, |project, cx| project.save_buffer(buffer, cx))?
+ .update(cx, |project, cx| project.save_buffer(buffer, cx))
.await
})
}
@@ -2323,7 +2318,7 @@ impl AcpThread {
project
.remote_client()
.and_then(|r| r.read(cx).default_system_shell())
- })?
+ })
.unwrap_or_else(|| get_default_system_shell_preferring_bash());
let (task_command, task_args) =
ShellBuilder::new(&Shell::Program(shell), is_windows)
@@ -2341,10 +2336,10 @@ impl AcpThread {
},
cx,
)
- })?
+ })
.await?;
- cx.new(|cx| {
+ anyhow::Ok(cx.new(|cx| {
Terminal::new(
terminal_id,
&format!("{} {}", command, args.join(" ")),
@@ -2354,7 +2349,7 @@ impl AcpThread {
language_registry,
cx,
)
- })
+ }))
}
});
@@ -35,7 +35,7 @@ impl Diff {
.await
.log_err();
- buffer.update(cx, |buffer, cx| buffer.set_language(language.clone(), cx))?;
+ buffer.update(cx, |buffer, cx| buffer.set_language(language.clone(), cx));
let diff = build_buffer_diff(
old_text.unwrap_or("".into()).into(),
@@ -45,31 +45,29 @@ impl Diff {
)
.await?;
- multibuffer
- .update(cx, |multibuffer, cx| {
- let hunk_ranges = {
- let buffer = buffer.read(cx);
- diff.read(cx)
- .snapshot(cx)
- .hunks_intersecting_range(
- Anchor::min_for_buffer(buffer.remote_id())
- ..Anchor::max_for_buffer(buffer.remote_id()),
- buffer,
- )
- .map(|diff_hunk| diff_hunk.buffer_range.to_point(buffer))
- .collect::<Vec<_>>()
- };
-
- multibuffer.set_excerpts_for_path(
- PathKey::for_buffer(&buffer, cx),
- buffer.clone(),
- hunk_ranges,
- multibuffer_context_lines(cx),
- cx,
- );
- multibuffer.add_diff(diff, cx);
- })
- .log_err();
+ multibuffer.update(cx, |multibuffer, cx| {
+ let hunk_ranges = {
+ let buffer = buffer.read(cx);
+ diff.read(cx)
+ .snapshot(cx)
+ .hunks_intersecting_range(
+ Anchor::min_for_buffer(buffer.remote_id())
+ ..Anchor::max_for_buffer(buffer.remote_id()),
+ buffer,
+ )
+ .map(|diff_hunk| diff_hunk.buffer_range.to_point(buffer))
+ .collect::<Vec<_>>()
+ };
+
+ multibuffer.set_excerpts_for_path(
+ PathKey::for_buffer(&buffer, cx),
+ buffer.clone(),
+ hunk_ranges,
+ multibuffer_context_lines(cx),
+ cx,
+ );
+ multibuffer.add_diff(diff, cx);
+ });
anyhow::Ok(())
}
@@ -206,8 +204,8 @@ impl PendingDiff {
let buffer_diff = self.diff.clone();
let base_text = self.base_text.clone();
self.update_diff = cx.spawn(async move |diff, cx| {
- let text_snapshot = buffer.read_with(cx, |buffer, _| buffer.text_snapshot())?;
- let language = buffer.read_with(cx, |buffer, _| buffer.language().cloned())?;
+ let text_snapshot = buffer.read_with(cx, |buffer, _| buffer.text_snapshot());
+ let language = buffer.read_with(cx, |buffer, _| buffer.language().cloned());
let update = buffer_diff
.update(cx, |diff, cx| {
diff.update_diff(
@@ -217,7 +215,7 @@ impl PendingDiff {
language,
cx,
)
- })?
+ })
.await;
let (task1, task2) = buffer_diff.update(cx, |diff, cx| {
let task1 = diff.set_snapshot(update.clone(), &text_snapshot, cx);
@@ -226,7 +224,7 @@ impl PendingDiff {
.unwrap()
.update(cx, |diff, cx| diff.set_snapshot(update, &text_snapshot, cx));
(task1, task2)
- })?;
+ });
task1.await;
task2.await;
diff.update(cx, |diff, cx| {
@@ -374,36 +372,37 @@ async fn build_buffer_diff(
language_registry: Option<Arc<LanguageRegistry>>,
cx: &mut AsyncApp,
) -> Result<Entity<BufferDiff>> {
- let language = cx.update(|cx| buffer.read(cx).language().cloned())?;
- let buffer = cx.update(|cx| buffer.read(cx).snapshot())?;
+ let language = cx.update(|cx| buffer.read(cx).language().cloned());
+ let text_snapshot = cx.update(|cx| buffer.read(cx).text_snapshot());
+ let buffer = cx.update(|cx| buffer.read(cx).snapshot());
- let secondary_diff = cx.new(|cx| BufferDiff::new(&buffer, cx))?;
+ let secondary_diff = cx.new(|cx| BufferDiff::new(&buffer, cx));
let update = secondary_diff
.update(cx, |secondary_diff, cx| {
secondary_diff.update_diff(
- buffer.text.clone(),
+ text_snapshot.clone(),
Some(old_text),
true,
language.clone(),
cx,
)
- })?
+ })
.await;
secondary_diff
.update(cx, |secondary_diff, cx| {
secondary_diff.language_changed(language.clone(), language_registry.clone(), cx);
secondary_diff.set_snapshot(update.clone(), &buffer, cx)
- })?
+ })
.await;
- let diff = cx.new(|cx| BufferDiff::new(&buffer, cx))?;
+ let diff = cx.new(|cx| BufferDiff::new(&buffer, cx));
diff.update(cx, |diff, cx| {
diff.language_changed(language, language_registry, cx);
diff.set_secondary_diff(secondary_diff);
diff.set_snapshot(update.clone(), &buffer, cx)
- })?
+ })
.await;
Ok(diff)
}
@@ -205,7 +205,7 @@ pub async fn create_terminal_entity(
project.environment().update(cx, |env, cx| {
env.directory_environment(dir.clone().into(), cx)
})
- })?
+ })
.await
.unwrap_or_default()
} else {
@@ -225,11 +225,9 @@ pub async fn create_terminal_entity(
.remote_client()
.and_then(|r| r.read(cx).default_system_shell())
.map(Shell::Program)
- })?
+ })
.unwrap_or_else(|| Shell::Program(get_default_system_shell_preferring_bash()));
- let is_windows = project
- .read_with(cx, |project, cx| project.path_style(cx).is_windows())
- .unwrap_or(cfg!(windows));
+ let is_windows = project.read_with(cx, |project, cx| project.path_style(cx).is_windows());
let (task_command, task_args) = task::ShellBuilder::new(&shell, is_windows)
.redirect_stdin_to_dev_null()
.build(Some(command.clone()), &args);
@@ -246,6 +244,6 @@ pub async fn create_terminal_entity(
},
cx,
)
- })?
+ })
.await
}
@@ -198,7 +198,7 @@ impl ActionLog {
.ok();
let buffer_repo = git_store.read_with(cx, |git_store, cx| {
git_store.repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
- })?;
+ });
let (mut git_diff_updates_tx, mut git_diff_updates_rx) = watch::channel(());
let _repo_subscription =
@@ -214,7 +214,7 @@ impl ActionLog {
}
}
}))
- })?
+ })
} else {
None
};
@@ -394,54 +394,51 @@ impl ActionLog {
buffer.read(cx).language().cloned(),
))
})??;
- let update = diff.update(cx, |diff, cx| {
- diff.update_diff(
- buffer_snapshot.clone(),
- Some(new_base_text),
- true,
- language,
- cx,
- )
- });
- let mut unreviewed_edits = Patch::default();
- if let Ok(update) = update {
- let update = update.await;
-
- diff.update(cx, |diff, cx| {
- diff.set_snapshot(update.clone(), &buffer_snapshot, cx)
- })?
+ let update = diff
+ .update(cx, |diff, cx| {
+ diff.update_diff(
+ buffer_snapshot.clone(),
+ Some(new_base_text),
+ true,
+ language,
+ cx,
+ )
+ })
.await;
- let diff_snapshot = diff.update(cx, |diff, cx| diff.snapshot(cx))?;
-
- unreviewed_edits = cx
- .background_spawn({
- let buffer_snapshot = buffer_snapshot.clone();
- let new_diff_base = new_diff_base.clone();
- async move {
- let mut unreviewed_edits = Patch::default();
- for hunk in diff_snapshot.hunks_intersecting_range(
- Anchor::min_for_buffer(buffer_snapshot.remote_id())
- ..Anchor::max_for_buffer(buffer_snapshot.remote_id()),
- &buffer_snapshot,
- ) {
- let old_range = new_diff_base
- .offset_to_point(hunk.diff_base_byte_range.start)
- ..new_diff_base.offset_to_point(hunk.diff_base_byte_range.end);
- let new_range = hunk.range.start..hunk.range.end;
- unreviewed_edits.push(point_to_row_edit(
- Edit {
- old: old_range,
- new: new_range,
- },
- &new_diff_base,
- buffer_snapshot.as_rope(),
- ));
- }
- unreviewed_edits
+ diff.update(cx, |diff, cx| {
+ diff.set_snapshot(update.clone(), &buffer_snapshot, cx)
+ })
+ .await;
+ let diff_snapshot = diff.update(cx, |diff, cx| diff.snapshot(cx));
+
+ let unreviewed_edits = cx
+ .background_spawn({
+ let buffer_snapshot = buffer_snapshot.clone();
+ let new_diff_base = new_diff_base.clone();
+ async move {
+ let mut unreviewed_edits = Patch::default();
+ for hunk in diff_snapshot.hunks_intersecting_range(
+ Anchor::min_for_buffer(buffer_snapshot.remote_id())
+ ..Anchor::max_for_buffer(buffer_snapshot.remote_id()),
+ &buffer_snapshot,
+ ) {
+ let old_range = new_diff_base
+ .offset_to_point(hunk.diff_base_byte_range.start)
+ ..new_diff_base.offset_to_point(hunk.diff_base_byte_range.end);
+ let new_range = hunk.range.start..hunk.range.end;
+ unreviewed_edits.push(point_to_row_edit(
+ Edit {
+ old: old_range,
+ new: new_range,
+ },
+ &new_diff_base,
+ buffer_snapshot.as_rope(),
+ ));
}
- })
- .await;
- }
+ unreviewed_edits
+ }
+ })
+ .await;
this.update(cx, |this, cx| {
let tracked_buffer = this
.tracked_buffers
@@ -246,7 +246,7 @@ impl ActivityIndicator {
cx,
);
buffer.set_capability(language::Capability::ReadOnly, cx);
- })?;
+ });
workspace.update_in(cx, |workspace, window, cx| {
workspace.add_item_to_active_pane(
Box::new(cx.new(|cx| {
@@ -250,10 +250,10 @@ impl NativeAgent {
log::debug!("Creating new NativeAgent");
let project_context = cx
- .update(|cx| Self::build_project_context(&project, prompt_store.as_ref(), cx))?
+ .update(|cx| Self::build_project_context(&project, prompt_store.as_ref(), cx))
.await;
- cx.new(|cx| {
+ Ok(cx.new(|cx| {
let context_server_store = project.read(cx).context_server_store();
let context_server_registry =
cx.new(|cx| ContextServerRegistry::new(context_server_store.clone(), cx));
@@ -295,7 +295,7 @@ impl NativeAgent {
fs,
_subscriptions: subscriptions,
}
- })
+ }))
}
fn register_session(
@@ -512,10 +512,12 @@ impl NativeAgent {
let buffer_task =
project.update(cx, |project, cx| project.open_buffer(project_path, cx));
let rope_task = cx.spawn(async move |cx| {
- buffer_task.await?.read_with(cx, |buffer, cx| {
+ let buffer = buffer_task.await?;
+ let (project_entry_id, rope) = buffer.read_with(cx, |buffer, cx| {
let project_entry_id = buffer.entry_id(cx).context("buffer has no file")?;
anyhow::Ok((project_entry_id, buffer.as_rope().clone()))
- })?
+ })?;
+ anyhow::Ok((project_entry_id, rope))
});
// Build a string from the rope on a background thread.
cx.background_spawn(async move {
@@ -761,10 +763,10 @@ impl NativeAgent {
let thread = task.await?;
let acp_thread =
this.update(cx, |this, cx| this.register_session(thread.clone(), cx))?;
- let events = thread.update(cx, |thread, cx| thread.replay(cx))?;
+ let events = thread.update(cx, |thread, cx| thread.replay(cx));
cx.update(|cx| {
NativeAgentConnection::handle_thread_events(events, acp_thread.downgrade(), cx)
- })?
+ })
.await?;
Ok(acp_thread)
})
@@ -811,7 +813,7 @@ impl NativeAgent {
};
let db_thread = db_thread.await;
database.save_thread(id, db_thread).await.log_err();
- history.update(cx, |history, cx| history.reload(cx)).ok();
+ history.update(cx, |history, cx| history.reload(cx));
});
}
@@ -849,7 +851,7 @@ impl NativeAgent {
path_style,
cx,
);
- })?;
+ });
for message in prompt.messages {
let context_server::types::PromptMessage { role, content } = message;
@@ -866,13 +868,11 @@ impl NativeAgent {
true,
cx,
);
- anyhow::Ok(())
- })??;
+ })?;
thread.update(cx, |thread, cx| {
thread.push_acp_user_block(id, [block], path_style, cx);
- anyhow::Ok(())
- })??;
+ });
}
context_server::types::Role::Assistant => {
acp_thread.update(cx, |acp_thread, cx| {
@@ -882,13 +882,11 @@ impl NativeAgent {
true,
cx,
);
- anyhow::Ok(())
- })??;
+ })?;
thread.update(cx, |thread, cx| {
thread.push_acp_agent_block(block, cx);
- anyhow::Ok(())
- })??;
+ });
}
}
@@ -902,11 +900,11 @@ impl NativeAgent {
// Resume if MCP prompt did not end with a user message
thread.resume(cx)
}
- })??;
+ })?;
cx.update(|cx| {
NativeAgentConnection::handle_thread_events(response_stream, acp_thread, cx)
- })?
+ })
.await
})
}
@@ -1187,33 +1185,30 @@ impl acp_thread::AgentConnection for NativeAgentConnection {
log::debug!("Starting thread creation in async context");
// Create Thread
- let thread = agent.update(
- cx,
- |agent, cx: &mut gpui::Context<NativeAgent>| -> Result<_> {
- // Fetch default model from registry settings
- let registry = LanguageModelRegistry::read_global(cx);
- // Log available models for debugging
- let available_count = registry.available_models(cx).count();
- log::debug!("Total available models: {}", available_count);
-
- let default_model = registry.default_model().and_then(|default_model| {
- agent
- .models
- .model_from_id(&LanguageModels::model_id(&default_model.model))
- });
- Ok(cx.new(|cx| {
- Thread::new(
- project.clone(),
- agent.project_context.clone(),
- agent.context_server_registry.clone(),
- agent.templates.clone(),
- default_model,
- cx,
- )
- }))
- },
- )??;
- agent.update(cx, |agent, cx| agent.register_session(thread, cx))
+ let thread = agent.update(cx, |agent, cx| {
+ // Fetch default model from registry settings
+ let registry = LanguageModelRegistry::read_global(cx);
+ // Log available models for debugging
+ let available_count = registry.available_models(cx).count();
+ log::debug!("Total available models: {}", available_count);
+
+ let default_model = registry.default_model().and_then(|default_model| {
+ agent
+ .models
+ .model_from_id(&LanguageModels::model_id(&default_model.model))
+ });
+ cx.new(|cx| {
+ Thread::new(
+ project.clone(),
+ agent.project_context.clone(),
+ agent.context_server_registry.clone(),
+ agent.templates.clone(),
+ default_model,
+ cx,
+ )
+ })
+ });
+ Ok(agent.update(cx, |agent, cx| agent.register_session(thread, cx)))
})
}
@@ -1446,7 +1441,7 @@ impl ThreadEnvironment for AcpThreadEnvironment {
let terminal = task?.await?;
let (drop_tx, drop_rx) = oneshot::channel();
- let terminal_id = terminal.read_with(cx, |terminal, _cx| terminal.id().clone())?;
+ let terminal_id = terminal.read_with(cx, |terminal, _cx| terminal.id().clone());
cx.spawn(async move |cx| {
drop_rx.await.ok();
@@ -1471,17 +1466,19 @@ pub struct AcpTerminalHandle {
impl TerminalHandle for AcpTerminalHandle {
fn id(&self, cx: &AsyncApp) -> Result<acp::TerminalId> {
- self.terminal.read_with(cx, |term, _cx| term.id().clone())
+ Ok(self.terminal.read_with(cx, |term, _cx| term.id().clone()))
}
fn wait_for_exit(&self, cx: &AsyncApp) -> Result<Shared<Task<acp::TerminalExitStatus>>> {
- self.terminal
- .read_with(cx, |term, _cx| term.wait_for_exit())
+ Ok(self
+ .terminal
+ .read_with(cx, |term, _cx| term.wait_for_exit()))
}
fn current_output(&self, cx: &AsyncApp) -> Result<acp::TerminalOutputResponse> {
- self.terminal
- .read_with(cx, |term, cx| term.current_output(cx))
+ Ok(self
+ .terminal
+ .read_with(cx, |term, cx| term.current_output(cx)))
}
fn kill(&self, cx: &AsyncApp) -> Result<()> {
@@ -1489,13 +1486,14 @@ impl TerminalHandle for AcpTerminalHandle {
self.terminal.update(cx, |terminal, cx| {
terminal.kill(cx);
});
- })?;
+ });
Ok(())
}
fn was_stopped_by_user(&self, cx: &AsyncApp) -> Result<bool> {
- self.terminal
- .read_with(cx, |term, _cx| term.was_stopped_by_user())
+ Ok(self
+ .terminal
+ .read_with(cx, |term, _cx| term.was_stopped_by_user()))
}
}
@@ -114,8 +114,8 @@ impl EditAgent {
let (events_tx, events_rx) = mpsc::unbounded();
let conversation = conversation.clone();
let output = cx.spawn(async move |cx| {
- let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot())?;
- let path = cx.update(|cx| snapshot.resolve_file_path(true, cx))?;
+ let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
+ let path = cx.update(|cx| snapshot.resolve_file_path(true, cx));
let prompt = CreateFilePromptTemplate {
path,
edit_description,
@@ -148,7 +148,7 @@ impl EditAgent {
let this = self.clone();
let task = cx.spawn(async move |cx| {
this.action_log
- .update(cx, |log, cx| log.buffer_created(buffer.clone(), cx))?;
+ .update(cx, |log, cx| log.buffer_created(buffer.clone(), cx));
this.overwrite_with_chunks_internal(buffer, parse_rx, output_events_tx, cx)
.await?;
parse_task.await
@@ -182,7 +182,7 @@ impl EditAgent {
Anchor::min_max_range_for_buffer(buffer.read(cx).remote_id()),
))
.ok();
- })?;
+ });
while let Some(event) = parse_rx.next().await {
match event? {
@@ -203,7 +203,7 @@ impl EditAgent {
)
});
buffer.read(cx).remote_id()
- })?;
+ });
output_events_tx
.unbounded_send(EditAgentOutputEvent::Edited(
Anchor::min_max_range_for_buffer(buffer_id),
@@ -231,8 +231,8 @@ impl EditAgent {
let conversation = conversation.clone();
let edit_format = self.edit_format;
let output = cx.spawn(async move |cx| {
- let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot())?;
- let path = cx.update(|cx| snapshot.resolve_file_path(true, cx))?;
+ let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
+ let path = cx.update(|cx| snapshot.resolve_file_path(true, cx));
let prompt = match edit_format {
EditFormat::XmlTags => EditFileXmlPromptTemplate {
path,
@@ -263,7 +263,7 @@ impl EditAgent {
cx: &mut AsyncApp,
) -> Result<EditAgentOutput> {
self.action_log
- .update(cx, |log, cx| log.buffer_read(buffer.clone(), cx))?;
+ .update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
let (output, edit_events) = Self::parse_edit_chunks(edit_chunks, self.edit_format, cx);
let mut edit_events = edit_events.peekable();
@@ -274,7 +274,7 @@ impl EditAgent {
continue;
};
- let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot())?;
+ let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
// Resolve the old text in the background, updating the agent
// location as we keep refining which range it corresponds to.
@@ -292,7 +292,7 @@ impl EditAgent {
}),
cx,
);
- })?;
+ });
output_events
.unbounded_send(EditAgentOutputEvent::ResolvingEditRange(old_range))
.ok();
@@ -375,7 +375,7 @@ impl EditAgent {
);
});
(min_edit_start, max_edit_end)
- })?;
+ });
output_events
.unbounded_send(EditAgentOutputEvent::Edited(min_edit_start..max_edit_end))
.ok();
@@ -1473,9 +1473,9 @@ impl EditAgentTest {
.provider(&selected_model.provider)
.expect("Provider not found");
provider.authenticate(cx)
- })?
+ })
.await?;
- cx.update(|cx| {
+ Ok(cx.update(|cx| {
let models = LanguageModelRegistry::read_global(cx);
let model = models
.available_models(cx)
@@ -1485,7 +1485,7 @@ impl EditAgentTest {
})
.unwrap_or_else(|| panic!("Model {} not found", selected_model.model.0));
model
- })
+ }))
}
async fn eval(&self, mut eval: EvalInput, cx: &mut TestAppContext) -> Result<EditEvalOutput> {
@@ -43,7 +43,7 @@ pub fn load_agent_thread(
cx.spawn(async move |cx| {
let (agent, _) = connection.await?;
let agent = agent.downcast::<crate::NativeAgentConnection>().unwrap();
- cx.update(|cx| agent.load_thread(session_id, cx))?.await
+ cx.update(|cx| agent.load_thread(session_id, cx)).await
})
}
@@ -25,13 +25,13 @@ pub async fn get_buffer_content_or_outline(
path: Option<&str>,
cx: &AsyncApp,
) -> Result<BufferContent> {
- let file_size = buffer.read_with(cx, |buffer, _| buffer.text().len())?;
+ let file_size = buffer.read_with(cx, |buffer, _| buffer.text().len());
if file_size > AUTO_OUTLINE_SIZE {
// For large files, use outline instead of full content
// Wait until the buffer has been fully parsed, so we can read its outline
buffer
- .read_with(cx, |buffer, _| buffer.parsing_idle())?
+ .read_with(cx, |buffer, _| buffer.parsing_idle())
.await;
let outline_items = buffer.read_with(cx, |buffer, _| {
@@ -42,7 +42,7 @@ pub async fn get_buffer_content_or_outline(
.into_iter()
.map(|item| item.to_point(&snapshot))
.collect::<Vec<_>>()
- })?;
+ });
// If no outline exists, fall back to first 1KB so the agent has some context
if outline_items.is_empty() {
@@ -55,7 +55,7 @@ pub async fn get_buffer_content_or_outline(
} else {
format!("# First 1KB of file (file too large to show full content, and no outline available)\n\n{content}")
}
- })?;
+ });
return Ok(BufferContent {
text,
@@ -76,7 +76,7 @@ pub async fn get_buffer_content_or_outline(
})
} else {
// File is small enough, return full content
- let text = buffer.read_with(cx, |buffer, _| buffer.text())?;
+ let text = buffer.read_with(cx, |buffer, _| buffer.text());
Ok(BufferContent {
text,
is_outline: false,
@@ -2413,7 +2413,7 @@ where
cx.spawn(async move |cx| {
let input = serde_json::from_value(input)?;
let output = cx
- .update(|cx| self.0.clone().run(input, event_stream, cx))?
+ .update(|cx| self.0.clone().run(input, event_stream, cx))
.await?;
let raw_output = serde_json::to_value(&output)?;
Ok(AgentToolOutput {
@@ -2650,7 +2650,7 @@ impl ToolCallEventStream {
.get_or_insert_default()
.set_always_allow_tool_actions(true);
});
- })?;
+ });
}
Ok(())
@@ -403,10 +403,7 @@ pub fn get_prompt(
arguments: HashMap<String, String>,
cx: &mut AsyncApp,
) -> Task<Result<context_server::types::PromptsGetResponse>> {
- let server = match cx.update(|cx| server_store.read(cx).get_running_server(server_id)) {
- Ok(server) => server,
- Err(error) => return Task::ready(Err(error)),
- };
+ let server = cx.update(|cx| server_store.read(cx).get_running_server(server_id));
let Some(server) = server else {
return Task::ready(Err(anyhow::anyhow!("Context server not found")));
};
@@ -115,19 +115,19 @@ impl AgentTool for DeletePathTool {
cx.spawn(async move |cx| {
while let Some(path) = paths_rx.next().await {
if let Ok(buffer) = project
- .update(cx, |project, cx| project.open_buffer(path, cx))?
+ .update(cx, |project, cx| project.open_buffer(path, cx))
.await
{
action_log.update(cx, |action_log, cx| {
action_log.will_delete_buffer(buffer.clone(), cx)
- })?;
+ });
}
}
let deletion_task = project
.update(cx, |project, cx| {
project.delete_file(project_path, false, cx)
- })?
+ })
.with_context(|| {
format!("Couldn't delete {path} because that path isn't in this project.")
})?;
@@ -105,7 +105,7 @@ impl AgentTool for DiagnosticsTool {
cx.spawn(async move |cx| {
let mut output = String::new();
let buffer = buffer.await?;
- let snapshot = buffer.read_with(cx, |buffer, _cx| buffer.snapshot())?;
+ let snapshot = buffer.read_with(cx, |buffer, _cx| buffer.snapshot());
for (_, group) in snapshot.diagnostic_groups(None) {
let entry = &group.entries[group.primary_ix];
@@ -301,7 +301,7 @@ impl AgentTool for EditFileTool {
let buffer = project
.update(cx, |project, cx| {
project.open_buffer(project_path.clone(), cx)
- })?
+ })
.await?;
// Check if the file has been modified since the agent last read it
@@ -357,7 +357,7 @@ impl AgentTool for EditFileTool {
}
}
- let diff = cx.new(|cx| Diff::new(buffer.clone(), cx))?;
+ let diff = cx.new(|cx| Diff::new(buffer.clone(), cx));
event_stream.update_diff(diff.clone());
let _finalize_diff = util::defer({
let diff = diff.downgrade();
@@ -367,7 +367,7 @@ impl AgentTool for EditFileTool {
}
});
- let old_snapshot = buffer.read_with(cx, |buffer, _cx| buffer.snapshot())?;
+ let old_snapshot = buffer.read_with(cx, |buffer, _cx| buffer.snapshot());
let old_text = cx
.background_spawn({
let old_snapshot = old_snapshot.clone();
@@ -399,9 +399,9 @@ impl AgentTool for EditFileTool {
match event {
EditAgentOutputEvent::Edited(range) => {
if !emitted_location {
- let line = buffer.update(cx, |buffer, _cx| {
+ let line = Some(buffer.update(cx, |buffer, _cx| {
range.start.to_point(&buffer.snapshot()).row
- }).ok();
+ }));
if let Some(abs_path) = abs_path.clone() {
event_stream.update_fields(ToolCallUpdateFields::new().locations(vec![ToolCallLocation::new(abs_path).line(line)]));
}
@@ -411,7 +411,7 @@ impl AgentTool for EditFileTool {
EditAgentOutputEvent::UnresolvedEditRange => hallucinated_old_text = true,
EditAgentOutputEvent::AmbiguousEditRange(ranges) => ambiguous_ranges = ranges,
EditAgentOutputEvent::ResolvingEditRange(range) => {
- diff.update(cx, |card, cx| card.reveal_range(range.clone(), cx))?;
+ diff.update(cx, |card, cx| card.reveal_range(range.clone(), cx));
// if !emitted_location {
// let line = buffer.update(cx, |buffer, _cx| {
// range.start.to_point(&buffer.snapshot()).row
@@ -428,23 +428,21 @@ impl AgentTool for EditFileTool {
}
// If format_on_save is enabled, format the buffer
- let format_on_save_enabled = buffer
- .read_with(cx, |buffer, cx| {
- let settings = language_settings::language_settings(
- buffer.language().map(|l| l.name()),
- buffer.file(),
- cx,
- );
- settings.format_on_save != FormatOnSave::Off
- })
- .unwrap_or(false);
+ let format_on_save_enabled = buffer.read_with(cx, |buffer, cx| {
+ let settings = language_settings::language_settings(
+ buffer.language().map(|l| l.name()),
+ buffer.file(),
+ cx,
+ );
+ settings.format_on_save != FormatOnSave::Off
+ });
let edit_agent_output = output.await?;
if format_on_save_enabled {
action_log.update(cx, |log, cx| {
log.buffer_edited(buffer.clone(), cx);
- })?;
+ });
let format_task = project.update(cx, |project, cx| {
project.format(
@@ -454,30 +452,30 @@ impl AgentTool for EditFileTool {
FormatTrigger::Save,
cx,
)
- })?;
+ });
format_task.await.log_err();
}
project
- .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))?
+ .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
.await?;
action_log.update(cx, |log, cx| {
log.buffer_edited(buffer.clone(), cx);
- })?;
+ });
// Update the recorded read time after a successful edit so consecutive edits work
if let Some(abs_path) = abs_path.as_ref() {
if let Some(new_mtime) = buffer.read_with(cx, |buffer, _| {
buffer.file().and_then(|file| file.disk_state().mtime())
- })? {
+ }) {
self.thread.update(cx, |thread, _| {
thread.file_read_times.insert(abs_path.to_path_buf(), new_mtime);
})?;
}
}
- let new_snapshot = buffer.read_with(cx, |buffer, _cx| buffer.snapshot())?;
+ let new_snapshot = buffer.read_with(cx, |buffer, _cx| buffer.snapshot());
let (new_text, unified_diff) = cx
.background_spawn({
let new_snapshot = new_snapshot.clone();
@@ -191,7 +191,7 @@ impl AgentTool for GrepTool {
continue;
}
- let Ok((Some(path), mut parse_status)) = buffer.read_with(cx, |buffer, cx| {
+ let (Some(path), mut parse_status) = buffer.read_with(cx, |buffer, cx| {
(buffer.file().map(|file| file.full_path(cx)), buffer.parse_status())
}) else {
continue;
@@ -200,20 +200,21 @@ impl AgentTool for GrepTool {
// Check if this file should be excluded based on its worktree settings
if let Ok(Some(project_path)) = project.read_with(cx, |project, cx| {
project.find_project_path(&path, cx)
- })
- && cx.update(|cx| {
+ }) {
+ if cx.update(|cx| {
let worktree_settings = WorktreeSettings::get(Some((&project_path).into()), cx);
worktree_settings.is_path_excluded(&project_path.path)
|| worktree_settings.is_path_private(&project_path.path)
- }).unwrap_or(false) {
+ }) {
continue;
}
+ }
while *parse_status.borrow() != ParseStatus::Idle {
parse_status.changed().await?;
}
- let snapshot = buffer.read_with(cx, |buffer, _cx| buffer.snapshot())?;
+ let snapshot = buffer.read_with(cx, |buffer, _cx| buffer.snapshot());
let mut ranges = ranges
.into_iter()
@@ -167,14 +167,14 @@ impl AgentTool for ReadFileTool {
self.project.update(cx, |project, cx| {
project.open_image(project_path.clone(), cx)
})
- })?
+ })
.await?;
let image =
- image_entity.read_with(cx, |image_item, _| Arc::clone(&image_item.image))?;
+ image_entity.read_with(cx, |image_item, _| Arc::clone(&image_item.image));
let language_model_image = cx
- .update(|cx| LanguageModelImage::from_image(image, cx))?
+ .update(|cx| LanguageModelImage::from_image(image, cx))
.await
.context("processing image")?;
@@ -197,21 +197,21 @@ impl AgentTool for ReadFileTool {
project.update(cx, |project, cx| {
project.open_buffer(project_path.clone(), cx)
})
- })?
+ })
.await?;
if buffer.read_with(cx, |buffer, _| {
buffer
.file()
.as_ref()
.is_none_or(|file| !file.disk_state().exists())
- })? {
+ }) {
anyhow::bail!("{file_path} not found");
}
// Record the file read time and mtime
if let Some(mtime) = buffer.read_with(cx, |buffer, _| {
buffer.file().and_then(|file| file.disk_state().mtime())
- })? {
+ }) {
self.thread
.update(cx, |thread, _| {
thread.file_read_times.insert(abs_path.to_path_buf(), mtime);
@@ -239,11 +239,11 @@ impl AgentTool for ReadFileTool {
let start = buffer.anchor_before(Point::new(start_row, 0));
let end = buffer.anchor_before(Point::new(end_row, 0));
buffer.text_for_range(start..end).collect::<String>()
- })?;
+ });
action_log.update(cx, |log, cx| {
log.buffer_read(buffer.clone(), cx);
- })?;
+ });
Ok(result.into())
} else {
@@ -257,7 +257,7 @@ impl AgentTool for ReadFileTool {
action_log.update(cx, |log, cx| {
log.buffer_read(buffer.clone(), cx);
- })?;
+ });
if buffer_content.is_outline {
Ok(formatdoc! {"
@@ -297,7 +297,7 @@ impl AgentTool for ReadFileTool {
acp::ToolCallContent::Content(acp::Content::new(markdown)),
]));
}
- })?;
+ });
result
})
@@ -74,49 +74,29 @@ impl AgentTool for RestoreFileFromDiskTool {
let mut clean_paths: Vec<PathBuf> = Vec::new();
let mut not_found_paths: Vec<PathBuf> = Vec::new();
let mut open_errors: Vec<(PathBuf, String)> = Vec::new();
- let mut dirty_check_errors: Vec<(PathBuf, String)> = Vec::new();
+ let dirty_check_errors: Vec<(PathBuf, String)> = Vec::new();
let mut reload_errors: Vec<String> = Vec::new();
for path in input_paths {
- let project_path =
- project.read_with(cx, |project, cx| project.find_project_path(&path, cx));
-
- let project_path = match project_path {
- Ok(Some(project_path)) => project_path,
- Ok(None) => {
- not_found_paths.push(path);
- continue;
- }
- Err(error) => {
- open_errors.push((path, error.to_string()));
- continue;
- }
+ let Some(project_path) =
+ project.read_with(cx, |project, cx| project.find_project_path(&path, cx))
+ else {
+ not_found_paths.push(path);
+ continue;
};
let open_buffer_task =
project.update(cx, |project, cx| project.open_buffer(project_path, cx));
- let buffer = match open_buffer_task {
- Ok(task) => match task.await {
- Ok(buffer) => buffer,
- Err(error) => {
- open_errors.push((path, error.to_string()));
- continue;
- }
- },
+ let buffer = match open_buffer_task.await {
+ Ok(buffer) => buffer,
Err(error) => {
open_errors.push((path, error.to_string()));
continue;
}
};
- let is_dirty = match buffer.read_with(cx, |buffer, _| buffer.is_dirty()) {
- Ok(is_dirty) => is_dirty,
- Err(error) => {
- dirty_check_errors.push((path, error.to_string()));
- continue;
- }
- };
+ let is_dirty = buffer.read_with(cx, |buffer, _| buffer.is_dirty());
if is_dirty {
buffers_to_reload.insert(buffer);
@@ -131,15 +111,8 @@ impl AgentTool for RestoreFileFromDiskTool {
project.reload_buffers(buffers_to_reload, true, cx)
});
- match reload_task {
- Ok(task) => {
- if let Err(error) = task.await {
- reload_errors.push(error.to_string());
- }
- }
- Err(error) => {
- reload_errors.push(error.to_string());
- }
+ if let Err(error) = reload_task.await {
+ reload_errors.push(error.to_string());
}
}
@@ -71,49 +71,29 @@ impl AgentTool for SaveFileTool {
let mut clean_paths: Vec<PathBuf> = Vec::new();
let mut not_found_paths: Vec<PathBuf> = Vec::new();
let mut open_errors: Vec<(PathBuf, String)> = Vec::new();
- let mut dirty_check_errors: Vec<(PathBuf, String)> = Vec::new();
+ let dirty_check_errors: Vec<(PathBuf, String)> = Vec::new();
let mut save_errors: Vec<(String, String)> = Vec::new();
for path in input_paths {
- let project_path =
- project.read_with(cx, |project, cx| project.find_project_path(&path, cx));
-
- let project_path = match project_path {
- Ok(Some(project_path)) => project_path,
- Ok(None) => {
- not_found_paths.push(path);
- continue;
- }
- Err(error) => {
- open_errors.push((path, error.to_string()));
- continue;
- }
+ let Some(project_path) =
+ project.read_with(cx, |project, cx| project.find_project_path(&path, cx))
+ else {
+ not_found_paths.push(path);
+ continue;
};
let open_buffer_task =
project.update(cx, |project, cx| project.open_buffer(project_path, cx));
- let buffer = match open_buffer_task {
- Ok(task) => match task.await {
- Ok(buffer) => buffer,
- Err(error) => {
- open_errors.push((path, error.to_string()));
- continue;
- }
- },
+ let buffer = match open_buffer_task.await {
+ Ok(buffer) => buffer,
Err(error) => {
open_errors.push((path, error.to_string()));
continue;
}
};
- let is_dirty = match buffer.read_with(cx, |buffer, _| buffer.is_dirty()) {
- Ok(is_dirty) => is_dirty,
- Err(error) => {
- dirty_check_errors.push((path, error.to_string()));
- continue;
- }
- };
+ let is_dirty = buffer.read_with(cx, |buffer, _| buffer.is_dirty());
if is_dirty {
buffers_to_save.insert(buffer);
@@ -125,30 +105,19 @@ impl AgentTool for SaveFileTool {
// Save each buffer individually since there's no batch save API.
for buffer in buffers_to_save {
- let path_for_buffer = match buffer.read_with(cx, |buffer, _| {
- buffer
- .file()
- .map(|file| file.path().to_rel_path_buf())
- .map(|path| path.as_rel_path().as_unix_str().to_owned())
- }) {
- Ok(path) => path.unwrap_or_else(|| "<unknown>".to_string()),
- Err(error) => {
- save_errors.push(("<unknown>".to_string(), error.to_string()));
- continue;
- }
- };
+ let path_for_buffer = buffer
+ .read_with(cx, |buffer, _| {
+ buffer
+ .file()
+ .map(|file| file.path().to_rel_path_buf())
+ .map(|path| path.as_rel_path().as_unix_str().to_owned())
+ })
+ .unwrap_or_else(|| "<unknown>".to_string());
let save_task = project.update(cx, |project, cx| project.save_buffer(buffer, cx));
- match save_task {
- Ok(task) => {
- if let Err(error) = task.await {
- save_errors.push((path_for_buffer, error.to_string()));
- }
- }
- Err(error) => {
- save_errors.push((path_for_buffer, error.to_string()));
- }
+ if let Err(error) = save_task.await {
+ save_errors.push((path_for_buffer, error.to_string()));
}
}
@@ -111,7 +111,7 @@ impl AcpConnection {
is_remote: bool,
cx: &mut AsyncApp,
) -> Result<Self> {
- let shell = cx.update(|cx| TerminalSettings::get(None, cx).shell.clone())?;
+ let shell = cx.update(|cx| TerminalSettings::get(None, cx).shell.clone());
let builder = ShellBuilder::new(&shell, cfg!(windows)).non_interactive();
let mut child =
builder.build_std_command(Some(command.path.display().to_string()), &command.args);
@@ -133,13 +133,13 @@ impl AcpConnection {
let sessions = Rc::new(RefCell::new(HashMap::default()));
- let (release_channel, version) = cx.update(|cx| {
+ let (release_channel, version): (Option<&str>, String) = cx.update(|cx| {
(
release_channel::ReleaseChannel::try_global(cx)
.map(|release_channel| release_channel.display_name()),
release_channel::AppVersion::global(cx).to_string(),
)
- })?;
+ });
let client = ClientDelegate {
sessions: sessions.clone(),
@@ -191,7 +191,7 @@ impl AcpConnection {
AcpConnectionRegistry::default_global(cx).update(cx, |registry, cx| {
registry.set_active_connection(server_name.clone(), &connection, cx)
});
- })?;
+ });
let response = connection
.initialize(
@@ -343,7 +343,7 @@ impl AgentConnection for AcpConnection {
}
})?;
- let use_config_options = cx.update(|cx| cx.has_flag::<AcpBetaFeatureFlag>())?;
+ let use_config_options = cx.update(|cx| cx.has_flag::<AcpBetaFeatureFlag>());
// Config options take precedence over legacy modes/models
let (modes, models, config_options) = if use_config_options && let Some(opts) = response.config_options {
@@ -532,8 +532,8 @@ impl AgentConnection for AcpConnection {
}
let session_id = response.session_id;
- let action_log = cx.new(|_| ActionLog::new(project.clone()))?;
- let thread = cx.new(|cx| {
+ let action_log = cx.new(|_| ActionLog::new(project.clone()));
+ let thread: Entity<AcpThread> = cx.new(|cx| {
AcpThread::new(
self.server_name.clone(),
self.clone(),
@@ -544,7 +544,7 @@ impl AgentConnection for AcpConnection {
watch::Receiver::constant(self.agent_capabilities.prompt_capabilities.clone()),
cx,
)
- })?;
+ });
let session = AcpSession {
@@ -1104,8 +1104,7 @@ impl acp::Client for ClientDelegate {
cx,
)
})?;
- let terminal_id =
- terminal_entity.read_with(&self.cx, |terminal, _| terminal.id().clone())?;
+ let terminal_id = terminal_entity.read_with(&self.cx, |terminal, _| terminal.id().clone());
Ok(acp::CreateTerminalResponse::new(terminal_id))
}
@@ -47,7 +47,7 @@ impl AgentServer for Gemini {
extra_env.insert("SURFACE".to_owned(), "zed".to_owned());
if let Some(api_key) = cx
- .update(GoogleLanguageModelProvider::api_key_for_gemini_cli)?
+ .update(GoogleLanguageModelProvider::api_key_for_gemini_cli)
.await
.ok()
{
@@ -466,9 +466,9 @@ impl MessageEditor {
}
}
});
- Ok((chunks, all_tracked_buffers))
+ anyhow::Ok((chunks, all_tracked_buffers))
})?;
- result
+ Ok(result)
})
}
@@ -678,28 +678,24 @@ impl MessageEditor {
.update(cx, |project, cx| {
project.project_path_for_absolute_path(&file_path, cx)
})
- .map_err(|e| e.to_string())?
.ok_or_else(|| "project path not found".to_string())?;
let buffer = project
.update(cx, |project, cx| project.open_buffer(project_path, cx))
- .map_err(|e| e.to_string())?
.await
.map_err(|e| e.to_string())?;
- buffer
- .update(cx, |buffer, cx| {
- let start = Point::new(*line_range.start(), 0)
- .min(buffer.max_point());
- let end = Point::new(*line_range.end() + 1, 0)
- .min(buffer.max_point());
- let content = buffer.text_for_range(start..end).collect();
- Mention::Text {
- content,
- tracked_buffers: vec![cx.entity()],
- }
- })
- .map_err(|e| e.to_string())
+ Ok(buffer.update(cx, |buffer, cx| {
+ let start =
+ Point::new(*line_range.start(), 0).min(buffer.max_point());
+ let end = Point::new(*line_range.end() + 1, 0)
+ .min(buffer.max_point());
+ let content = buffer.text_for_range(start..end).collect();
+ Mention::Text {
+ content,
+ tracked_buffers: vec![cx.entity()],
+ }
+ }))
}
})
.shared();
@@ -985,7 +985,7 @@ impl AcpThreadView {
);
});
}
- })?;
+ });
anyhow::Ok(())
})
@@ -1019,7 +1019,7 @@ impl AcpThreadView {
history_store
.update(&mut cx.clone(), |store, cx| {
store.save_thread(session_id.clone(), db_thread, cx)
- })?
+ })
.await?;
let thread_metadata = agent::DbThreadMetadata {
@@ -1652,18 +1652,18 @@ impl AcpThreadView {
.iter()
.take(entry_ix)
.any(|entry| entry.diffs().next().is_some())
- })?;
+ });
if has_earlier_edits {
thread.update(cx, |thread, cx| {
thread.action_log().update(cx, |action_log, cx| {
action_log.keep_all_edits(None, cx);
});
- })?;
+ });
}
thread
- .update(cx, |thread, cx| thread.rewind(user_message_id, cx))?
+ .update(cx, |thread, cx| thread.rewind(user_message_id, cx))
.await?;
this.update_in(cx, |this, window, cx| {
this.send_impl(message_editor, window, cx);
@@ -2140,7 +2140,7 @@ impl AcpThreadView {
})
});
- if let Ok(Some(resolve_task)) = resolved_node_runtime {
+ if let Some(resolve_task) = resolved_node_runtime {
if let Ok(node_path) = resolve_task.await {
task.command = Some(node_path.to_string_lossy().to_string());
}
@@ -2159,11 +2159,11 @@ impl AcpThreadView {
task.allow_concurrent_runs = true;
task.hide = task::HideStrategy::Always;
- let terminal = terminal_panel.update_in(cx, |terminal_panel, window, cx| {
- terminal_panel.spawn_task(&task, window, cx)
- })?;
-
- let terminal = terminal.await?;
+ let terminal = terminal_panel
+ .update_in(cx, |terminal_panel, window, cx| {
+ terminal_panel.spawn_task(&task, window, cx)
+ })?
+ .await?;
if check_exit_code {
// For extension-based auth, wait for the process to exit and check exit code
@@ -2214,7 +2214,7 @@ impl AcpThreadView {
}
}
_ = exit_status => {
- if !previous_attempt && project.read_with(cx, |project, _| project.is_via_remote_server())? && login.label.contains("gemini") {
+ if !previous_attempt && project.read_with(cx, |project, _| project.is_via_remote_server()) && login.label.contains("gemini") {
return cx.update(|window, cx| Self::spawn_external_agent_login(login, workspace, project.clone(), true, false, window, cx))?.await
}
return Err(anyhow!("exited before logging in"));
@@ -5667,14 +5667,14 @@ impl AcpThreadView {
let markdown_language = markdown_language_task.await?;
let buffer = project
- .update(cx, |project, cx| project.create_buffer(false, cx))?
+ .update(cx, |project, cx| project.create_buffer(false, cx))
.await?;
buffer.update(cx, |buffer, cx| {
buffer.set_text(markdown, cx);
buffer.set_language(Some(markdown_language), cx);
buffer.set_capability(language::Capability::ReadWrite, cx);
- })?;
+ });
workspace.update_in(cx, |workspace, window, cx| {
let buffer = cx
@@ -821,7 +821,8 @@ impl AgentConfiguration {
}
},
)
- })
+ });
+ anyhow::Ok(())
}
})
.detach_and_log_err(cx);
@@ -1304,7 +1305,7 @@ fn show_unable_to_uninstall_extension_with_context_server(
.context_servers
.remove(&context_server_id.0);
});
- })?;
+ });
anyhow::Ok(())
}
})
@@ -262,7 +262,7 @@ fn save_provider_to_settings(
let task = cx.write_credentials(&api_url, "Bearer", api_key.as_bytes());
cx.spawn(async move |cx| {
task.await
- .map_err(|_| "Failed to write API key to keychain")?;
+ .map_err(|_| SharedString::from("Failed to write API key to keychain"))?;
cx.update(|cx| {
update_settings_file(fs, cx, |settings, _cx| {
settings
@@ -278,8 +278,7 @@ fn save_provider_to_settings(
},
);
});
- })
- .ok();
+ });
Ok(())
})
}
@@ -1091,8 +1091,8 @@ impl<T: PromptCompletionProviderDelegate> CompletionProvider for PromptCompletio
)
}
})
- .collect()
- })?;
+ .collect::<Vec<_>>()
+ });
Ok(vec![CompletionResponse {
completions,
@@ -1469,26 +1469,19 @@ pub(crate) fn search_symbols(
let Some(symbols) = symbols_task.await.log_err() else {
return Vec::new();
};
- let Some((visible_match_candidates, external_match_candidates)): Option<(Vec<_>, Vec<_>)> =
- project
- .update(cx, |project, cx| {
- symbols
- .iter()
- .enumerate()
- .map(|(id, symbol)| {
- StringMatchCandidate::new(id, symbol.label.filter_text())
- })
- .partition(|candidate| match &symbols[candidate.id].path {
- SymbolLocation::InProject(project_path) => project
- .entry_for_path(project_path, cx)
- .is_some_and(|e| !e.is_ignored),
- SymbolLocation::OutsideProject { .. } => false,
- })
- })
- .log_err()
- else {
- return Vec::new();
- };
+ let (visible_match_candidates, external_match_candidates): (Vec<_>, Vec<_>) = project
+ .update(cx, |project, cx| {
+ symbols
+ .iter()
+ .enumerate()
+ .map(|(id, symbol)| StringMatchCandidate::new(id, symbol.label.filter_text()))
+ .partition(|candidate| match &symbols[candidate.id].path {
+ SymbolLocation::InProject(project_path) => project
+ .entry_for_path(project_path, cx)
+ .is_some_and(|e| !e.is_ignored),
+ SymbolLocation::OutsideProject { .. } => false,
+ })
+ });
const MAX_MATCHES: usize = 100;
let mut visible_matches = cx.background_executor().block(fuzzy::match_strings(
@@ -300,7 +300,7 @@ impl InlineAssistant {
if let Some(error) = configuration_error() {
if let ConfigurationError::ProviderNotAuthenticated(provider) = error {
cx.spawn(async move |_, cx| {
- cx.update(|cx| provider.authenticate(cx))?.await?;
+ cx.update(|cx| provider.authenticate(cx)).await?;
anyhow::Ok(())
})
.detach_and_log_err(cx);
@@ -1633,7 +1633,7 @@ impl EditorInlineAssists {
let editor = editor.upgrade().context("editor was dropped")?;
cx.update_global(|assistant: &mut InlineAssistant, cx| {
assistant.update_editor_highlights(&editor, cx);
- })?;
+ });
}
Ok(())
}
@@ -1978,7 +1978,7 @@ impl CodeActionProvider for AssistantCodeActionProvider {
let multibuffer_snapshot = multibuffer.read(cx);
multibuffer_snapshot.anchor_range_in_excerpt(excerpt_id, action.range)
})
- })?
+ })
.context("invalid range")?;
let prompt_store = prompt_store.await.ok();
@@ -94,7 +94,7 @@ impl MentionSet {
let content = if full_mention_content
&& let MentionUri::Directory { abs_path } = &mention_uri
{
- cx.update(|cx| full_mention_for_directory(&project, abs_path, cx))?
+ cx.update(|cx| full_mention_for_directory(&project, abs_path, cx))
.await?
} else {
task.await.map_err(|e| anyhow!("{e}"))?
@@ -180,9 +180,7 @@ impl MentionSet {
let image = cx
.spawn(async move |_, cx| {
let image = image_task.await.map_err(|e| e.to_string())?;
- let image = image
- .update(cx, |image, _| image.image.clone())
- .map_err(|e| e.to_string())?;
+ let image = image.update(cx, |image, _| image.image.clone());
Ok(image)
})
.shared();
@@ -291,10 +289,10 @@ impl MentionSet {
let task = project.update(cx, |project, cx| project.open_image(project_path, cx));
return cx.spawn(async move |_, cx| {
let image = task.await?;
- let image = image.update(cx, |image, _| image.image.clone())?;
+ let image = image.update(cx, |image, _| image.image.clone());
let format = image.format;
let image = cx
- .update(|cx| LanguageModelImage::from_image(image, cx))?
+ .update(|cx| LanguageModelImage::from_image(image, cx))
.await;
if let Some(image) = image {
Ok(Mention::Image(MentionImage {
@@ -365,8 +363,8 @@ impl MentionSet {
content,
tracked_buffers: vec![cx.entity()],
}
- })?;
- anyhow::Ok(mention)
+ });
+ Ok(mention)
})
}
@@ -493,9 +491,9 @@ impl MentionSet {
let agent = agent.downcast::<agent::NativeAgentConnection>().unwrap();
let summary = agent
.0
- .update(cx, |agent, cx| agent.thread_summary(id, cx))?
+ .update(cx, |agent, cx| agent.thread_summary(id, cx))
.await?;
- anyhow::Ok(Mention::Text {
+ Ok(Mention::Text {
content: summary.to_string(),
tracked_buffers: Vec::new(),
})
@@ -512,7 +510,7 @@ impl MentionSet {
});
cx.spawn(async move |_, cx| {
let text_thread = text_thread_task.await?;
- let xml = text_thread.update(cx, |text_thread, cx| text_thread.to_xml(cx))?;
+ let xml = text_thread.update(cx, |text_thread, cx| text_thread.to_xml(cx));
Ok(Mention::Text {
content: xml,
tracked_buffers: Vec::new(),
@@ -580,8 +578,8 @@ pub(crate) fn paste_images_as_context(
})
.ok();
for image in images {
- let Ok((excerpt_id, text_anchor, multibuffer_anchor)) =
- editor.update_in(cx, |message_editor, window, cx| {
+ let Some((excerpt_id, text_anchor, multibuffer_anchor)) = editor
+ .update_in(cx, |message_editor, window, cx| {
let snapshot = message_editor.snapshot(window, cx);
let (excerpt_id, _, buffer_snapshot) =
snapshot.buffer_snapshot().as_singleton().unwrap();
@@ -599,6 +597,7 @@ pub(crate) fn paste_images_as_context(
);
(*excerpt_id, text_anchor, multibuffer_anchor)
})
+ .ok()
else {
break;
};
@@ -607,12 +606,10 @@ pub(crate) fn paste_images_as_context(
let Some(start_anchor) = multibuffer_anchor else {
continue;
};
- let Ok(end_anchor) = editor.update(cx, |editor, cx| {
+ let end_anchor = editor.update(cx, |editor, cx| {
let snapshot = editor.buffer().read(cx).snapshot(cx);
snapshot.anchor_before(start_anchor.to_offset(&snapshot) + content_len)
- }) else {
- continue;
- };
+ });
let image = Arc::new(image);
let Ok(Some((crease_id, tx))) = cx.update(|window, cx| {
insert_crease_for_mention(
@@ -648,23 +645,17 @@ pub(crate) fn paste_images_as_context(
})
.shared();
- mention_set
- .update(cx, |mention_set, _cx| {
- mention_set.insert_mention(crease_id, MentionUri::PastedImage, task.clone())
- })
- .ok();
+ mention_set.update(cx, |mention_set, _cx| {
+ mention_set.insert_mention(crease_id, MentionUri::PastedImage, task.clone())
+ });
if task.await.notify_async_err(cx).is_none() {
- editor
- .update(cx, |editor, cx| {
- editor.edit([(start_anchor..end_anchor, "")], cx);
- })
- .ok();
- mention_set
- .update(cx, |mention_set, _cx| {
- mention_set.remove_mention(&crease_id)
- })
- .ok();
+ editor.update(cx, |editor, cx| {
+ editor.edit([(start_anchor..end_anchor, "")], cx);
+ });
+ mention_set.update(cx, |mention_set, _cx| {
+ mention_set.remove_mention(&crease_id)
+ });
}
}
}))
@@ -822,42 +813,44 @@ fn full_mention_for_directory(
cx.spawn(async move |cx| {
let file_paths = worktree.read_with(cx, |worktree, _cx| {
collect_files_in_path(worktree, &directory_path)
- })?;
+ });
let descendants_future = cx.update(|cx| {
- futures::future::join_all(file_paths.into_iter().map(|(worktree_path, full_path)| {
- let rel_path = worktree_path
- .strip_prefix(&directory_path)
- .log_err()
- .map_or_else(|| worktree_path.clone(), |rel_path| rel_path.into());
-
- let open_task = project.update(cx, |project, cx| {
- project.buffer_store().update(cx, |buffer_store, cx| {
- let project_path = ProjectPath {
- worktree_id,
- path: worktree_path,
- };
- buffer_store.open_buffer(project_path, cx)
- })
- });
-
- cx.spawn(async move |cx| {
- let buffer = open_task.await.log_err()?;
- let buffer_content = outline::get_buffer_content_or_outline(
- buffer.clone(),
- Some(&full_path),
- &cx,
- )
- .await
- .ok()?;
+ futures::future::join_all(file_paths.into_iter().map(
+ |(worktree_path, full_path): (Arc<RelPath>, String)| {
+ let rel_path = worktree_path
+ .strip_prefix(&directory_path)
+ .log_err()
+ .map_or_else(|| worktree_path.clone(), |rel_path| rel_path.into());
+
+ let open_task = project.update(cx, |project, cx| {
+ project.buffer_store().update(cx, |buffer_store, cx| {
+ let project_path = ProjectPath {
+ worktree_id,
+ path: worktree_path,
+ };
+ buffer_store.open_buffer(project_path, cx)
+ })
+ });
- Some((rel_path, full_path, buffer_content.text, buffer))
- })
- }))
- })?;
+ cx.spawn(async move |cx| {
+ let buffer = open_task.await.log_err()?;
+ let buffer_content = outline::get_buffer_content_or_outline(
+ buffer.clone(),
+ Some(&full_path),
+ &cx,
+ )
+ .await
+ .ok()?;
+
+ Some((rel_path, full_path, buffer_content.text, buffer))
+ })
+ },
+ ))
+ });
let contents = cx
.background_spawn(async move {
- let (contents, tracked_buffers) = descendants_future
+ let (contents, tracked_buffers): (Vec<_>, Vec<_>) = descendants_future
.await
.into_iter()
.flatten()
@@ -56,7 +56,7 @@ impl SlashCommand for DefaultSlashCommand {
let store = PromptStore::global(cx);
cx.spawn(async move |cx| {
let store = store.await?;
- let prompts = store.read_with(cx, |store, _cx| store.default_prompt_metadata())?;
+ let prompts = store.read_with(cx, |store, _cx| store.default_prompt_metadata());
let mut text = String::new();
text.push('\n');
@@ -300,7 +300,7 @@ fn collect_diagnostics(
.await
.log_err()
{
- let snapshot = cx.read_entity(&buffer, |buffer, _| buffer.snapshot())?;
+ let snapshot = cx.read_entity(&buffer, |buffer, _| buffer.snapshot());
collect_buffer_diagnostics(&mut output, &snapshot, options.include_warnings);
}
@@ -369,7 +369,7 @@ fn collect_files(
};
if let Some(buffer) = open_buffer_task.await.log_err() {
let mut output = SlashCommandOutput::default();
- let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot())?;
+ let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
append_buffer_to_output(
&snapshot,
Some(path_including_worktree_name.display(path_style).as_ref()),
@@ -47,7 +47,7 @@ impl SlashCommand for PromptSlashCommand {
let cancellation_flag = Arc::new(AtomicBool::default());
let prompts: Vec<PromptMetadata> = store
.await?
- .read_with(cx, |store, cx| store.search(query, cancellation_flag, cx))?
+ .read_with(cx, |store, cx| store.search(query, cancellation_flag, cx))
.await;
Ok(prompts
.into_iter()
@@ -91,7 +91,7 @@ impl SlashCommand for PromptSlashCommand {
.id_for_title(&title)
.with_context(|| format!("no prompt found with title {:?}", title))?;
anyhow::Ok(store.load(prompt_id, cx))
- })??
+ })?
.await?;
anyhow::Ok(body)
}
@@ -1263,7 +1263,7 @@ impl TextThread {
}
let token_count = cx
- .update(|cx| model.model.count_tokens(request, cx))?
+ .update(|cx| model.model.count_tokens(request, cx))
.await?;
this.update(cx, |this, cx| {
this.token_count = Some(token_count);
@@ -124,7 +124,7 @@ impl TextThreadStore {
this.register_context_server_handlers(cx);
this.reload(cx).detach_and_log_err(cx);
this
- })?;
+ });
Ok(this)
})
@@ -166,7 +166,8 @@ impl TextThreadStore {
})
.collect();
cx.notify();
- })
+ });
+ Ok(())
}
async fn handle_open_context(
@@ -196,7 +197,7 @@ impl TextThreadStore {
.read(cx)
.serialize_ops(&TextThreadVersion::default(), cx),
)
- })??;
+ })?;
let operations = operations.await;
Ok(proto::OpenContextResponse {
context: Some(proto::Context { operations }),
@@ -224,7 +225,7 @@ impl TextThreadStore {
.read(cx)
.serialize_ops(&TextThreadVersion::default(), cx),
))
- })??;
+ })?;
let operations = operations.await;
Ok(proto::CreateContextResponse {
context_id: context_id.to_proto(),
@@ -245,7 +246,7 @@ impl TextThreadStore {
text_thread.update(cx, |text_thread, cx| text_thread.apply_ops([operation], cx));
}
Ok(())
- })?
+ })
}
async fn handle_synchronize_contexts(
@@ -290,7 +291,7 @@ impl TextThreadStore {
anyhow::Ok(proto::SynchronizeContextsResponse {
contexts: local_versions,
})
- })?
+ })
}
fn handle_project_shared(&mut self, cx: &mut Context<Self>) {
@@ -416,7 +417,7 @@ impl TextThreadStore {
Some(project),
cx,
)
- })?;
+ });
let operations = cx
.background_spawn(async move {
context_proto
@@ -426,7 +427,7 @@ impl TextThreadStore {
.collect::<Result<Vec<_>>>()
})
.await?;
- text_thread.update(cx, |context, cx| context.apply_ops(operations, cx))?;
+ text_thread.update(cx, |context, cx| context.apply_ops(operations, cx));
this.update(cx, |this, cx| {
if let Some(existing_context) = this.loaded_text_thread_for_id(&context_id, cx) {
existing_context
@@ -473,7 +474,7 @@ impl TextThreadStore {
Some(project),
cx,
)
- })?;
+ });
this.update(cx, |this, cx| {
if let Some(existing_context) = this.loaded_text_thread_for_path(&path, cx) {
existing_context
@@ -580,7 +581,7 @@ impl TextThreadStore {
Some(project),
cx,
)
- })?;
+ });
let operations = cx
.background_spawn(async move {
context_proto
@@ -590,7 +591,7 @@ impl TextThreadStore {
.collect::<Result<Vec<_>>>()
})
.await?;
- text_thread.update(cx, |context, cx| context.apply_ops(operations, cx))?;
+ text_thread.update(cx, |context, cx| context.apply_ops(operations, cx));
this.update(cx, |this, cx| {
if let Some(existing_context) = this.loaded_text_thread_for_id(&text_thread_id, cx)
{
@@ -303,9 +303,9 @@ pub struct VoipParts {
#[cfg(not(any(all(target_os = "windows", target_env = "gnu"), target_os = "freebsd")))]
impl VoipParts {
pub fn new(cx: &AsyncApp) -> anyhow::Result<Self> {
- let (apm, replays) = cx.try_read_default_global::<Audio, _>(|audio, _| {
+ let (apm, replays) = cx.read_default_global::<Audio, _>(|audio, _| {
(Arc::clone(&audio.echo_canceller), audio.replays.clone())
- })?;
+ });
let legacy_audio_compatible =
AudioSettings::try_read_global(cx, |settings| settings.legacy_audio_compatible)
.unwrap_or(true);
@@ -436,7 +436,7 @@ impl AutoUpdater {
.0
.clone()
.context("auto-update not initialized")
- })??;
+ })?;
set_status("Fetching remote server release", cx);
let release = Self::get_release_asset(
@@ -456,7 +456,7 @@ impl AutoUpdater {
let version_path = platform_dir.join(format!("{}.gz", release.version));
smol::fs::create_dir_all(&platform_dir).await.ok();
- let client = this.read_with(cx, |this, _| this.client.http_client())?;
+ let client = this.read_with(cx, |this, _| this.client.http_client());
if smol::fs::metadata(&version_path).await.is_err() {
log::info!(
@@ -482,7 +482,7 @@ impl AutoUpdater {
.0
.clone()
.context("auto-update not initialized")
- })??;
+ })?;
let release =
Self::get_release_asset(&this, channel, version, "zed-remote-server", os, arch, cx)
@@ -500,7 +500,7 @@ impl AutoUpdater {
arch: &str,
cx: &mut AsyncApp,
) -> Result<ReleaseAsset> {
- let client = this.read_with(cx, |this, _| this.client.clone())?;
+ let client = this.read_with(cx, |this, _| this.client.clone());
let (system_id, metrics_id, is_staff) = if client.telemetry().metrics_enabled() {
(
@@ -563,7 +563,7 @@ impl AutoUpdater {
this.status.clone(),
ReleaseChannel::try_global(cx).unwrap_or(ReleaseChannel::Stable),
)
- })?;
+ });
Self::check_dependencies()?;
@@ -571,12 +571,12 @@ impl AutoUpdater {
this.status = AutoUpdateStatus::Checking;
log::info!("Auto Update: checking for updates");
cx.notify();
- })?;
+ });
let fetched_release_data =
Self::get_release_asset(&this, release_channel, None, "zed", OS, ARCH, cx).await?;
let fetched_version = fetched_release_data.clone().version;
- let app_commit_sha = cx.update(|cx| AppCommitSha::try_global(cx).map(|sha| sha.full()));
+ let app_commit_sha = Ok(cx.update(|cx| AppCommitSha::try_global(cx).map(|sha| sha.full())));
let newer_version = Self::check_if_fetched_version_is_newer(
release_channel,
app_commit_sha,
@@ -586,7 +586,7 @@ impl AutoUpdater {
)?;
let Some(newer_version) = newer_version else {
- return this.update(cx, |this, cx| {
+ this.update(cx, |this, cx| {
let status = match previous_status {
AutoUpdateStatus::Updated { .. } => previous_status,
_ => AutoUpdateStatus::Idle,
@@ -594,6 +594,7 @@ impl AutoUpdater {
this.status = status;
cx.notify();
});
+ return Ok(());
};
this.update(cx, |this, cx| {
@@ -601,7 +602,7 @@ impl AutoUpdater {
version: newer_version.clone(),
};
cx.notify();
- })?;
+ });
let installer_dir = InstallerDir::new().await?;
let target_path = Self::target_path(&installer_dir).await?;
@@ -612,11 +613,11 @@ impl AutoUpdater {
version: newer_version.clone(),
};
cx.notify();
- })?;
+ });
let new_binary_path = Self::install_release(installer_dir, target_path, cx).await?;
if let Some(new_binary_path) = new_binary_path {
- cx.update(|cx| cx.set_restart_path(new_binary_path))?;
+ cx.update(|cx| cx.set_restart_path(new_binary_path));
}
this.update(cx, |this, cx| {
@@ -626,7 +627,8 @@ impl AutoUpdater {
version: newer_version,
};
cx.notify();
- })
+ });
+ Ok(())
}
fn check_if_fetched_version_is_newer(
@@ -807,9 +809,9 @@ async fn install_release_linux(
downloaded_tar_gz: PathBuf,
cx: &AsyncApp,
) -> Result<Option<PathBuf>> {
- let channel = cx.update(|cx| ReleaseChannel::global(cx).dev_name())?;
+ let channel = cx.update(|cx| ReleaseChannel::global(cx).dev_name());
let home_dir = PathBuf::from(env::var("HOME").context("no HOME env var set")?);
- let running_app_path = cx.update(|cx| cx.app_path())??;
+ let running_app_path = cx.update(|cx| cx.app_path())?;
let extracted = temp_dir.path().join("zed");
fs::create_dir_all(&extracted)
@@ -874,7 +876,7 @@ async fn install_release_macos(
downloaded_dmg: PathBuf,
cx: &AsyncApp,
) -> Result<Option<PathBuf>> {
- let running_app_path = cx.update(|cx| cx.app_path())??;
+ let running_app_path = cx.update(|cx| cx.app_path())?;
let running_app_filename = running_app_path
.file_name()
.with_context(|| format!("invalid running app path {running_app_path:?}"))?;
@@ -207,8 +207,8 @@ pub fn notify_if_app_was_updated(cx: &mut App) {
updater
.set_should_show_update_notification(false, cx)
.detach_and_log_err(cx);
- })
- })?;
+ });
+ });
}
anyhow::Ok(())
})
@@ -112,24 +112,24 @@ impl ActiveCall {
envelope: TypedEnvelope<proto::IncomingCall>,
mut cx: AsyncApp,
) -> Result<proto::Ack> {
- let user_store = this.read_with(&cx, |this, _| this.user_store.clone())?;
+ let user_store = this.read_with(&cx, |this, _| this.user_store.clone());
let call = IncomingCall {
room_id: envelope.payload.room_id,
participants: user_store
.update(&mut cx, |user_store, cx| {
user_store.get_users(envelope.payload.participant_user_ids, cx)
- })?
+ })
.await?,
calling_user: user_store
.update(&mut cx, |user_store, cx| {
user_store.get_user(envelope.payload.calling_user_id, cx)
- })?
+ })
.await?,
initial_project: envelope.payload.initial_project,
};
this.update(&mut cx, |this, _| {
*this.incoming_call.0.borrow_mut() = Some(call);
- })?;
+ });
Ok(proto::Ack {})
}
@@ -147,7 +147,7 @@ impl ActiveCall {
{
incoming_call.take();
}
- })?;
+ });
Ok(())
}
@@ -187,7 +187,7 @@ impl ActiveCall {
let initial_project_id = if let Some(initial_project) = initial_project {
Some(
- room.update(cx, |room, cx| room.share_project(initial_project, cx))?
+ room.update(cx, |room, cx| room.share_project(initial_project, cx))
.await?,
)
} else {
@@ -196,7 +196,7 @@ impl ActiveCall {
room.update(cx, move |room, cx| {
room.call(called_user_id, initial_project_id, cx)
- })?
+ })
.await?;
anyhow::Ok(())
@@ -216,7 +216,7 @@ impl ActiveCall {
user_store,
cx,
)
- })?
+ })
.await?;
this.update(cx, |this, cx| this.set_room(Some(room.clone()), cx))?
@@ -185,13 +185,13 @@ impl Room {
room.local_participant.role = participant.role()
}
room
- })?;
+ });
let initial_project_id = if let Some(initial_project) = initial_project {
let initial_project_id = room
.update(cx, |room, cx| {
room.share_project(initial_project.clone(), cx)
- })?
+ })
.await?;
Some(initial_project_id)
} else {
@@ -202,7 +202,7 @@ impl Room {
.update(cx, |room, cx| {
room.leave_when_empty = true;
room.call(called_user_id, initial_project_id, cx)
- })?
+ })
.await;
match did_join {
Ok(()) => Ok(room),
@@ -286,12 +286,12 @@ impl Room {
user_store,
cx,
)
- })?;
+ });
room.update(&mut cx, |room, cx| {
room.leave_when_empty = room.channel_id.is_none();
room.apply_room_update(room_proto, cx)?;
anyhow::Ok(())
- })??;
+ })?;
Ok(room)
}
@@ -379,7 +379,7 @@ impl Room {
.update(cx, |this, cx| {
this.status = RoomStatus::Rejoining;
cx.notify();
- })?;
+ });
// Wait for client to re-establish a connection to the server.
let executor = cx.background_executor().clone();
@@ -390,15 +390,11 @@ impl Room {
log::info!("client reconnected, attempting to rejoin room");
let Some(this) = this.upgrade() else { break };
- match this.update(cx, |this, cx| this.rejoin(cx)) {
- Ok(task) => {
- if task.await.log_err().is_some() {
- return true;
- } else {
- remaining_attempts -= 1;
- }
- }
- Err(_app_dropped) => return false,
+ let task = this.update(cx, |this, cx| this.rejoin(cx));
+ if task.await.log_err().is_some() {
+ return true;
+ } else {
+ remaining_attempts -= 1;
}
} else if client_status.borrow().is_signed_out() {
return false;
@@ -437,7 +433,7 @@ impl Room {
// we leave the room and return an error.
if let Some(this) = this.upgrade() {
log::info!("reconnection failed, leaving room");
- this.update(cx, |this, cx| this.leave(cx))?.await?;
+ this.update(cx, |this, cx| this.leave(cx)).await?;
}
anyhow::bail!("can't reconnect to room: client failed to re-establish connection");
}
@@ -665,7 +661,7 @@ impl Room {
mut cx: AsyncApp,
) -> Result<()> {
let room = envelope.payload.room.context("invalid room")?;
- this.update(&mut cx, |this, cx| this.apply_room_update(room, cx))?
+ this.update(&mut cx, |this, cx| this.apply_room_update(room, cx))
}
fn apply_room_update(&mut self, room: proto::Room, cx: &mut Context<Self>) -> Result<()> {
@@ -1203,7 +1199,7 @@ impl Room {
cx.spawn(async move |this, cx| {
let response = request.await?;
- project.update(cx, |project, cx| project.shared(response.project_id, cx))??;
+ project.update(cx, |project, cx| project.shared(response.project_id, cx))?;
// If the user's location is in this project, it changes from UnsharedProject to SharedProject.
this.update(cx, |this, cx| {
@@ -71,8 +71,8 @@ impl ChannelBuffer {
capability,
base_text,
)
- })?;
- buffer.update(cx, |buffer, cx| buffer.apply_ops(operations, cx))?;
+ });
+ buffer.update(cx, |buffer, cx| buffer.apply_ops(operations, cx));
let subscription = client.subscribe_to_entity(channel.id.0)?;
@@ -93,7 +93,7 @@ impl ChannelBuffer {
};
this.replace_collaborators(response.collaborators, cx);
this
- })?)
+ }))
}
fn release(&mut self, _: &mut App) {
@@ -168,7 +168,7 @@ impl ChannelBuffer {
cx.notify();
this.buffer
.update(cx, |buffer, cx| buffer.apply_ops(ops, cx))
- })?;
+ });
Ok(())
}
@@ -182,7 +182,8 @@ impl ChannelBuffer {
this.replace_collaborators(message.payload.collaborators, cx);
cx.emit(ChannelBufferEvent::CollaboratorsChanged);
cx.notify();
- })
+ });
+ Ok(())
}
fn on_buffer_update(
@@ -170,17 +170,14 @@ impl ChannelStore {
match status {
client::Status::Connected { .. } => {
this.update(cx, |this, cx| this.handle_connect(cx))
- .ok()?
.await
.log_err()?;
}
client::Status::SignedOut | client::Status::UpgradeRequired => {
- this.update(cx, |this, cx| this.handle_disconnect(false, cx))
- .ok();
+ this.update(cx, |this, cx| this.handle_disconnect(false, cx));
}
_ => {
- this.update(cx, |this, cx| this.handle_disconnect(true, cx))
- .ok();
+ this.update(cx, |this, cx| this.handle_disconnect(true, cx));
}
}
}
@@ -204,7 +201,7 @@ impl ChannelStore {
while let Some(update_channels) = update_channels_rx.next().await {
if let Some(this) = this.upgrade() {
let update_task = this
- .update(cx, |this, cx| this.update_channels(update_channels, cx))?;
+ .update(cx, |this, cx| this.update_channels(update_channels, cx));
if let Some(update_task) = update_task {
update_task.await.log_err();
}
@@ -814,7 +811,7 @@ impl ChannelStore {
this.update_channels_tx
.unbounded_send(message.payload)
.unwrap();
- })?;
+ });
Ok(())
}
@@ -841,7 +838,8 @@ impl ChannelStore {
.set_role(role)
}
}
- })
+ });
+ Ok(())
}
fn handle_connect(&mut self, cx: &mut Context<Self>) -> Task<Result<()>> {
@@ -965,8 +963,7 @@ impl ChannelStore {
buffer.update(cx, |buffer, cx| buffer.disconnect(cx));
}
}
- })
- .ok();
+ });
}
})
});
@@ -343,7 +343,7 @@ impl ClientCredentialsProvider {
}
fn server_url(&self, cx: &AsyncApp) -> Result<String> {
- cx.update(|cx| ClientSettings::get_global(cx).server_url.clone())
+ Ok(cx.update(|cx| ClientSettings::get_global(cx).server_url.clone()))
}
/// Reads the credentials from the provider.
@@ -934,10 +934,10 @@ impl Client {
let connect_task = cx.update({
let cloud_client = self.cloud_client.clone();
move |cx| cloud_client.connect(cx)
- })??;
+ })?;
let connection = connect_task.await?;
- let (mut messages, task) = cx.update(|cx| connection.spawn(cx))?;
+ let (mut messages, task) = cx.update(|cx| connection.spawn(cx));
task.detach();
cx.spawn({
@@ -977,8 +977,7 @@ impl Client {
}
})
.detach();
- })
- .log_err();
+ });
let credentials = self.sign_in(try_provider, cx).await?;
@@ -1003,8 +1002,7 @@ impl Client {
}
})
.detach_and_log_err(cx);
- })
- .log_err();
+ });
Ok(())
}
@@ -1249,14 +1247,8 @@ impl Client {
credentials: &Credentials,
cx: &AsyncApp,
) -> Task<Result<Connection, EstablishConnectionError>> {
- let release_channel = cx
- .update(|cx| ReleaseChannel::try_global(cx))
- .ok()
- .flatten();
- let app_version = cx
- .update(|cx| AppVersion::global(cx).to_string())
- .ok()
- .unwrap_or_default();
+ let release_channel = cx.update(|cx| ReleaseChannel::try_global(cx));
+ let app_version = cx.update(|cx| AppVersion::global(cx).to_string());
let http = self.http.clone();
let proxy = http.proxy().cloned();
@@ -1293,7 +1285,7 @@ impl Client {
None => Box::new(TcpStream::connect(rpc_host).await?),
})
}
- })?
+ })
.await?;
log::info!("connected to rpc endpoint {}", rpc_url);
@@ -1361,12 +1353,12 @@ impl Client {
let (open_url_tx, open_url_rx) = oneshot::channel::<String>();
cx.update(|cx| {
cx.spawn(async move |cx| {
- let url = open_url_rx.await?;
- cx.update(|cx| cx.open_url(&url))
+ if let Ok(url) = open_url_rx.await {
+ cx.update(|cx| cx.open_url(&url));
+ }
})
- .detach_and_log_err(cx);
- })
- .log_err();
+ .detach();
+ });
let credentials = background
.clone()
@@ -1468,7 +1460,7 @@ impl Client {
})
.await?;
- cx.update(|cx| cx.activate(true))?;
+ cx.update(|cx| cx.activate(true));
Ok(credentials)
})
}
@@ -1687,8 +1679,7 @@ impl Client {
for handler in self.message_to_client_handlers.lock().iter() {
handler(&message, cx);
}
- })
- .ok();
+ });
}
pub fn telemetry(&self) -> &Arc<Telemetry> {
@@ -2101,7 +2092,7 @@ mod tests {
let (done_tx2, done_rx2) = smol::channel::unbounded();
AnyProtoClient::from(client.clone()).add_entity_message_handler(
move |entity: Entity<TestEntity>, _: TypedEnvelope<proto::JoinProject>, cx| {
- match entity.read_with(&cx, |entity, _| entity.id).unwrap() {
+ match entity.read_with(&cx, |entity, _| entity.id) {
1 => done_tx1.try_send(()).unwrap(),
2 => done_tx2.try_send(()).unwrap(),
_ => unreachable!(),
@@ -256,7 +256,7 @@ impl UserStore {
} else {
anyhow::Ok(())
}
- })??;
+ })?;
this.update(cx, |_, cx| cx.notify())?;
}
@@ -299,7 +299,7 @@ impl UserStore {
_: TypedEnvelope<proto::ShowContacts>,
mut cx: AsyncApp,
) -> Result<()> {
- this.update(&mut cx, |_, cx| cx.emit(Event::ShowContacts))?;
+ this.update(&mut cx, |_, cx| cx.emit(Event::ShowContacts));
Ok(())
}
@@ -312,7 +312,7 @@ impl UserStore {
this.update_contacts_tx
.unbounded_send(UpdateContacts::Update(message.payload))
.unwrap();
- })?;
+ });
Ok(())
}
@@ -353,7 +353,7 @@ impl UserStore {
let mut incoming_requests = Vec::new();
for request in message.incoming_requests {
incoming_requests.push({
- this.update(cx, |this, cx| this.get_user(request.requester_id, cx))?
+ this.update(cx, |this, cx| this.get_user(request.requester_id, cx))
.await?
});
}
@@ -361,7 +361,7 @@ impl UserStore {
let mut outgoing_requests = Vec::new();
for requested_user_id in message.outgoing_requests {
outgoing_requests.push(
- this.update(cx, |this, cx| this.get_user(requested_user_id, cx))?
+ this.update(cx, |this, cx| this.get_user(requested_user_id, cx))
.await?,
);
}
@@ -428,7 +428,7 @@ impl UserStore {
}
cx.notify();
- })?;
+ });
Ok(())
})
@@ -798,7 +798,7 @@ impl UserStore {
this.read_with(cx, |this, _cx| {
this.client.upgrade().map(|client| client.cloud_client())
})
- })??
+ })?
.ok_or(anyhow::anyhow!("Failed to get Cloud client"))?;
let response = cloud_client.get_authenticated_user().await?;
@@ -806,7 +806,7 @@ impl UserStore {
this.update(cx, |this, cx| {
this.update_authenticated_user(response, cx);
})
- })??;
+ })?;
}
}
@@ -914,7 +914,7 @@ impl Contact {
let user = user_store
.update(cx, |user_store, cx| {
user_store.get_user(contact.user_id, cx)
- })?
+ })
.await?;
Ok(Self {
user,
@@ -283,7 +283,7 @@ impl EditPredictionDelegate for CodestralEditPredictionDelegate {
let edits: Arc<[(Range<Anchor>, Arc<str>)]> =
vec![(cursor_position..cursor_position, completion_text.into())].into();
let edit_preview = buffer
- .read_with(cx, |buffer, cx| buffer.preview_edits(edits.clone(), cx))?
+ .read_with(cx, |buffer, cx| buffer.preview_edits(edits.clone(), cx))
.await;
this.update(cx, |this, cx| {
@@ -800,7 +800,6 @@ impl RandomizedTest for ProjectCollaborationTest {
assert!(
buffer
.read_with(&cx, |buffer, _| { buffer.saved_version().to_owned() })
- .expect("App should not be dropped")
.observed_all(&requested_version)
);
anyhow::Ok(())
@@ -173,7 +173,7 @@ impl ChannelView {
};
buffer.set_language(Some(markdown), cx);
})
- })?;
+ });
cx.new_window_entity(|window, cx| {
let mut this = Self::new(
@@ -2179,7 +2179,7 @@ impl CollabPanel {
cx.spawn_in(window, async move |this, cx| {
if answer.await? == 0 {
channel_store
- .update(cx, |channels, _| channels.remove_channel(channel_id))?
+ .update(cx, |channels, _| channels.remove_channel(channel_id))
.await
.notify_async_err(cx);
this.update_in(cx, |_, window, cx| cx.focus_self(window))
@@ -2213,7 +2213,7 @@ impl CollabPanel {
cx.spawn_in(window, async move |_, cx| {
if answer.await? == 0 {
user_store
- .update(cx, |store, cx| store.remove_contact(user_id, cx))?
+ .update(cx, |store, cx| store.remove_contact(user_id, cx))
.await
.notify_async_err(cx);
}
@@ -24,27 +24,20 @@ pub fn init(app_state: &Arc<AppState>, cx: &mut App) {
}
if let Some(incoming_call) = incoming_call {
- let unique_screens = cx.update(|cx| cx.displays()).unwrap();
+ let unique_screens = cx.update(|cx| cx.displays());
let window_size = gpui::Size {
width: px(400.),
height: px(72.),
};
for screen in unique_screens {
- if let Some(options) = cx
- .update(|cx| notification_window_options(screen, window_size, cx))
- .log_err()
- {
- let window = cx
- .open_window(options, |_, cx| {
- cx.new(|_| {
- IncomingCallNotification::new(
- incoming_call.clone(),
- app_state.clone(),
- )
- })
- })
- .unwrap();
+ let options =
+ cx.update(|cx| notification_window_options(screen, window_size, cx));
+ if let Ok(window) = cx.open_window(options, |_, cx| {
+ cx.new(|_| {
+ IncomingCallNotification::new(incoming_call.clone(), app_state.clone())
+ })
+ }) {
notification_windows.push(window);
}
}
@@ -88,8 +81,7 @@ impl IncomingCallNotificationState {
)
.detach_and_log_err(cx);
}
- })
- .log_err();
+ });
}
anyhow::Ok(())
})
@@ -222,16 +222,12 @@ impl McpServer {
} else if let Some(handler) = handlers.borrow().get(&request.method.as_ref()) {
let outgoing_tx = outgoing_tx.clone();
- if let Some(task) = cx
- .update(|cx| handler(request_id, request.params, cx))
- .log_err()
- {
- cx.spawn(async move |_| {
- let response = task.await;
- outgoing_tx.unbounded_send(response).ok();
- })
- .detach();
- }
+ let task = cx.update(|cx| handler(request_id, request.params, cx));
+ cx.spawn(async move |_| {
+ let response = task.await;
+ outgoing_tx.unbounded_send(response).ok();
+ })
+ .detach();
} else {
Self::send_err(
request_id,
@@ -31,7 +31,7 @@ impl StdioTransport {
working_directory: &Option<PathBuf>,
cx: &AsyncApp,
) -> Result<Self> {
- let shell = cx.update(|cx| TerminalSettings::get(None, cx).shell.clone())?;
+ let shell = cx.update(|cx| TerminalSettings::get(None, cx).shell.clone());
let builder = ShellBuilder::new(&shell, cfg!(windows)).non_interactive();
let mut command =
builder.build_smol_command(Some(binary.executable.display().to_string()), &binary.args);
@@ -546,7 +546,7 @@ impl Copilot {
let mut params = server.default_initialize_params(false, cx);
params.initialization_options = Some(editor_info_json);
server.initialize(params, configuration.into(), cx)
- })?
+ })
.await?;
this.update(cx, |_, cx| notify_did_change_config_to_server(&server, cx))?
@@ -624,8 +624,6 @@ impl CopilotChat {
) -> Result<(Arc<dyn HttpClient>, ApiToken, CopilotChatConfiguration)> {
let this = cx
.update(|cx| Self::global(cx))
- .ok()
- .flatten()
.context("Copilot chat is not enabled")?;
let (oauth_token, api_token, client, configuration) = this.read_with(cx, |this, _| {
@@ -635,7 +633,7 @@ impl CopilotChat {
this.client.clone(),
this.configuration.clone(),
)
- })?;
+ });
let oauth_token = oauth_token.context("No OAuth token available")?;
@@ -648,7 +646,7 @@ impl CopilotChat {
this.update(cx, |this, cx| {
this.api_token = Some(token.clone());
cx.notify();
- })?;
+ });
token
}
};
@@ -75,14 +75,12 @@ impl EditPredictionDelegate for CopilotEditPredictionDelegate {
let completions = copilot
.update(cx, |copilot, cx| {
copilot.completions(&buffer, cursor_position, cx)
- })?
+ })
.await?;
if let Some(mut completion) = completions.into_iter().next()
- && let Some((trimmed_range, trimmed_text, snapshot)) = cx
- .update(|cx| trim_completion(&completion, cx))
- .ok()
- .flatten()
+ && let Some((trimmed_range, trimmed_text, snapshot)) =
+ cx.update(|cx| trim_completion(&completion, cx))
{
let preview = buffer
.update(cx, |this, cx| {
@@ -90,7 +88,7 @@ impl EditPredictionDelegate for CopilotEditPredictionDelegate {
Arc::from([(trimmed_range.clone(), trimmed_text.clone())].as_slice()),
cx,
)
- })?
+ })
.await;
this.update(cx, |this, cx| {
this.pending_refresh = None;
@@ -92,7 +92,7 @@ impl CredentialsProvider for KeychainCredentialsProvider {
url: &'a str,
cx: &'a AsyncApp,
) -> Pin<Box<dyn Future<Output = Result<Option<(String, Vec<u8>)>>> + 'a>> {
- async move { cx.update(|cx| cx.read_credentials(url))?.await }.boxed_local()
+ async move { cx.update(|cx| cx.read_credentials(url)).await }.boxed_local()
}
fn write_credentials<'a>(
@@ -103,7 +103,7 @@ impl CredentialsProvider for KeychainCredentialsProvider {
cx: &'a AsyncApp,
) -> Pin<Box<dyn Future<Output = Result<()>> + 'a>> {
async move {
- cx.update(move |cx| cx.write_credentials(url, username, password))?
+ cx.update(move |cx| cx.write_credentials(url, username, password))
.await
}
.boxed_local()
@@ -114,7 +114,7 @@ impl CredentialsProvider for KeychainCredentialsProvider {
url: &'a str,
cx: &'a AsyncApp,
) -> Pin<Box<dyn Future<Output = Result<()>> + 'a>> {
- async move { cx.update(move |cx| cx.delete_credentials(url))?.await }.boxed_local()
+ async move { cx.update(move |cx| cx.delete_credentials(url)).await }.boxed_local()
}
}
@@ -178,9 +178,7 @@ impl TransportDelegate {
self.tasks.lock().clear();
let log_dap_communications =
- cx.update(|cx| DebuggerSettings::get_global(cx).log_dap_communications)
- .with_context(|| "Failed to get Debugger Setting log dap communications error in transport::start_handlers. Defaulting to false")
- .unwrap_or(false);
+ cx.update(|cx| DebuggerSettings::get_global(cx).log_dap_communications);
let connect = self.transport.lock().connect();
let (input, output) = connect.await?;
@@ -550,10 +548,9 @@ impl TcpTransport {
process = Some(p);
};
- let timeout = connection_args.timeout.unwrap_or_else(|| {
- cx.update(|cx| DebuggerSettings::get_global(cx).timeout)
- .unwrap_or(20000u64)
- });
+ let timeout = connection_args
+ .timeout
+ .unwrap_or_else(|| cx.update(|cx| DebuggerSettings::get_global(cx).timeout));
log::info!(
"Debug adapter has connected to TCP server {}:{}",
@@ -155,7 +155,7 @@ impl LogStore {
if let Some(this) = this.upgrade() {
this.update(cx, |this, cx| {
this.add_debug_adapter_message(message, cx);
- })?;
+ });
}
smol::future::yield_now().await;
@@ -170,7 +170,7 @@ impl LogStore {
if let Some(this) = this.upgrade() {
this.update(cx, |this, cx| {
this.add_debug_adapter_log(message, cx);
- })?;
+ });
}
smol::future::yield_now().await;
@@ -902,10 +902,10 @@ impl DapLogView {
let language = language.await.ok();
buffer.update(cx, |buffer, cx| {
buffer.set_language(language, cx);
- })
+ });
}
})
- .detach_and_log_err(cx);
+ .detach();
});
self.editor = editor;
@@ -268,7 +268,7 @@ impl DebugPanel {
dap_store
.update(cx, |dap_store, cx| {
dap_store.boot_session(session.clone(), definition, worktree, cx)
- })?
+ })
.await
}
});
@@ -286,7 +286,7 @@ impl DebugPanel {
.unbounded_send(format!("error: {:#}", error))
.ok();
session.shutdown(cx)
- })?
+ })
.await;
}
anyhow::Ok(())
@@ -404,7 +404,7 @@ impl DebugPanel {
session.boot(binary, worktree, dap_store_handle.downgrade(), cx)
});
(session, task)
- })?;
+ });
Self::register_session(this.clone(), session.clone(), true, cx).await?;
if let Err(error) = task.await {
@@ -418,7 +418,7 @@ impl DebugPanel {
))
.ok();
session.shutdown(cx)
- })?
+ })
.await;
return Err(error);
@@ -466,11 +466,10 @@ impl DebugPanel {
session.boot(binary, worktree, dap_store_handle.downgrade(), cx)
});
(session, task)
- })?;
+ });
// Focus child sessions if the parent has never emitted a stopped event;
// this improves our JavaScript experience, as it always spawns a "main" session that then spawns subsessions.
- let parent_ever_stopped =
- parent_session.update(cx, |this, _| this.has_ever_stopped())?;
+ let parent_ever_stopped = parent_session.update(cx, |this, _| this.has_ever_stopped());
Self::register_session(this, session, !parent_ever_stopped, cx).await?;
task.await
})
@@ -517,7 +516,7 @@ impl DebugPanel {
return;
}
}
- session.update(cx, |session, cx| session.shutdown(cx)).ok();
+ session.update(cx, |session, cx| session.shutdown(cx));
this.update(cx, |this, cx| {
this.retain_sessions(|other| entity_id != other.entity_id());
if let Some(active_session_id) = this
@@ -1443,7 +1442,7 @@ async fn register_session_inner(
session: Entity<Session>,
cx: &mut AsyncWindowContext,
) -> Result<Entity<DebugSession>> {
- let adapter_name = session.read_with(cx, |session, _| session.adapter())?;
+ let adapter_name = session.read_with(cx, |session, _| session.adapter());
this.update_in(cx, |_, window, cx| {
cx.subscribe_in(
&session,
@@ -185,7 +185,7 @@ impl NewProcessModal {
.collect::<Vec<_>>();
let Some(task_inventory) = task_store
- .update(cx, |task_store, _| task_store.task_inventory().cloned())?
+ .update(cx, |task_store, _| task_store.task_inventory().cloned())
else {
return Ok(());
};
@@ -194,7 +194,7 @@ impl NewProcessModal {
.update(cx, |task_inventory, cx| {
task_inventory
.used_and_current_resolved_tasks(task_contexts.clone(), cx)
- })?
+ })
.await;
if let Ok(task) = debug_picker.update(cx, |picker, cx| {
@@ -1113,7 +1113,7 @@ impl RunningState {
task_with_shell.clone(),
cx,
)
- })?.await?;
+ }).await?;
let terminal_view = cx.new_window_entity(|window, cx| {
TerminalView::new(
@@ -1135,7 +1135,7 @@ impl RunningState {
})?;
let exit_status = terminal
- .read_with(cx, |terminal, cx| terminal.wait_for_completed_task(cx))?
+ .read_with(cx, |terminal, cx| terminal.wait_for_completed_task(cx))
.await
.context("Failed to wait for completed task")?;
@@ -1302,7 +1302,7 @@ impl RunningState {
.pid()
.map(|pid| pid.as_u32())
.context("Terminal was spawned but PID was not available")
- })?
+ })
});
cx.background_spawn(async move { anyhow::Ok(sender.send(terminal_task.await).await?) })
@@ -151,7 +151,7 @@ impl BreakpointList {
.update(cx, |this, cx| this.find_or_create_worktree(path, false, cx));
cx.spawn_in(window, async move |this, cx| {
let (worktree, relative_path) = task.await?;
- let worktree_id = worktree.read_with(cx, |this, _| this.id())?;
+ let worktree_id = worktree.read_with(cx, |this, _| this.id());
let item = this
.update_in(cx, |this, window, cx| {
this.workspace.update(cx, |this, cx| {
@@ -428,7 +428,7 @@ impl StackFrameList {
.await?;
let position = buffer.read_with(cx, |this, _| {
this.snapshot().anchor_after(PointUtf16::new(row, 0))
- })?;
+ });
this.update_in(cx, |this, window, cx| {
this.workspace.update(cx, |workspace, cx| {
let project_path = buffer
@@ -183,13 +183,13 @@ impl StackTraceView {
.await?;
let project_path = ProjectPath {
- worktree_id: worktree.read_with(cx, |tree, _| tree.id())?,
+ worktree_id: worktree.read_with(cx, |tree, _| tree.id()),
path: relative_path,
};
if let Some(buffer) = this
.read_with(cx, |this, _| this.project.clone())?
- .update(cx, |project, cx| project.open_buffer(project_path, cx))?
+ .update(cx, |project, cx| project.open_buffer(project_path, cx))
.await
.log_err()
{
@@ -360,7 +360,7 @@ impl ProjectDiagnosticsEditor {
};
if let Some(buffer) = project_handle
- .update(cx, |project, cx| project.open_buffer(path.clone(), cx))?
+ .update(cx, |project, cx| project.open_buffer(path.clone(), cx))
.await
.log_err()
{
@@ -1088,9 +1088,8 @@ async fn heuristic_syntactic_expand(
return Some(node_row_range);
} else if node_name.ends_with("statement") || node_name.ends_with("declaration") {
// Expand to the nearest dedent or blank line for statements and declarations.
- let tab_size = cx
- .update(|cx| snapshot.settings_at(node_range.start, cx).tab_size.get())
- .ok()?;
+ let tab_size =
+ cx.update(|cx| snapshot.settings_at(node_range.start, cx).tab_size.get());
let indent_level = snapshot
.line_indent_for_row(node_range.start.row)
.len(tab_size);
@@ -153,19 +153,19 @@ async fn collect_snapshots(
.filter(|path| path.worktree_id == worktree_id)?;
let relative_path: Arc<Path> = project_path.path.as_std_path().into();
Some((project_path, relative_path))
- })? {
+ }) {
if let hash_map::Entry::Vacant(entry) = snapshots_by_path.entry(relative_path) {
let buffer = project
.update(cx, |project, cx| {
project.open_buffer(project_path.clone(), cx)
- })?
+ })
.await?;
let diff = git_store
.update(cx, |git_store, cx| {
git_store.open_uncommitted_diff(buffer.clone(), cx)
- })?
+ })
.await?;
- let diff_snapshot = diff.update(cx, |diff, cx| diff.snapshot(cx))?;
+ let diff_snapshot = diff.update(cx, |diff, cx| diff.snapshot(cx));
entry.insert((stored_event.old_snapshot.clone(), diff_snapshot));
}
}
@@ -1841,7 +1841,7 @@ impl EditPredictionStore {
worktree_id: file.worktree_id(cx),
path: file.path().clone(),
})
- })?;
+ });
let buffer_task = project.update(cx, |project, cx| {
let (path, _, _) = project
@@ -1862,7 +1862,7 @@ impl EditPredictionStore {
})?;
Some(project.open_buffer(path, cx))
- })?;
+ });
if let Some(buffer_task) = buffer_task {
let closest_buffer = buffer_task.await?;
@@ -1874,7 +1874,7 @@ impl EditPredictionStore {
.into_iter()
.min_by_key(|entry| entry.diagnostic.severity)
.map(|entry| entry.range.start)
- })?
+ })
.map(|position| (closest_buffer, position));
}
}
@@ -1973,8 +1973,7 @@ impl EditPredictionStore {
})
},
);
- })
- .ok();
+ });
}
Err(err)
}
@@ -49,16 +49,14 @@ impl EditPredictionResult {
};
}
- let Some((edits, snapshot, edit_preview_task)) = edited_buffer
- .read_with(cx, |buffer, cx| {
+ let Some((edits, snapshot, edit_preview_task)) =
+ edited_buffer.read_with(cx, |buffer, cx| {
let new_snapshot = buffer.snapshot();
let edits: Arc<[_]> =
interpolate_edits(&edited_buffer_snapshot, &new_snapshot, &edits)?.into();
Some((edits.clone(), new_snapshot, buffer.preview_edits(edits, cx)))
})
- .ok()
- .flatten()
else {
return Self {
id,
@@ -32,7 +32,7 @@ pub async fn apply_diff(
cx: &mut AsyncApp,
) -> Result<OpenedBuffers> {
let worktree = project
- .read_with(cx, |project, cx| project.visible_worktrees(cx).next())?
+ .read_with(cx, |project, cx| project.visible_worktrees(cx).next())
.context("project has no worktree")?;
let paths: Vec<_> = diff_str
@@ -65,7 +65,7 @@ pub async fn apply_diff(
} else {
None
}
- })?;
+ });
if let Some(delete_task) = delete_task {
delete_task.await?;
@@ -79,20 +79,20 @@ pub async fn apply_diff(
let buffer = match included_files.entry(path.to_string()) {
Entry::Occupied(entry) => entry.get().clone(),
Entry::Vacant(entry) => {
- let buffer = if status == FileStatus::Created {
+ let buffer: Entity<Buffer> = if status == FileStatus::Created {
project
- .update(cx, |project, cx| project.create_buffer(true, cx))?
+ .update(cx, |project, cx| project.create_buffer(true, cx))
.await?
} else {
let project_path = project
.update(cx, |project, cx| {
project.find_project_path(path.as_ref(), cx)
- })?
+ })
.with_context(|| format!("no such path: {}", path))?;
project
.update(cx, |project, cx| {
project.open_buffer(project_path, cx)
- })?
+ })
.await?
};
entry.insert(buffer.clone());
@@ -111,7 +111,7 @@ pub async fn apply_diff(
.with_context(|| format!("Diff:\n{diff_str}"))?,
);
anyhow::Ok(())
- })??;
+ })?;
}
DiffEvent::FileEnd { renamed_to } => {
let buffer = current_file
@@ -135,14 +135,14 @@ pub async fn apply_diff(
new_project_path,
cx,
))
- })??
+ })?
.await?;
}
let edits = mem::take(&mut edits);
buffer.update(cx, |buffer, cx| {
buffer.edit(edits, None, cx);
- })?;
+ });
}
}
}
@@ -174,7 +174,7 @@ pub async fn refresh_worktree_entries(
.as_local()
.unwrap()
.refresh_entries_for_paths(rel_paths)
- })?
+ })
.recv()
.await;
}
@@ -177,8 +177,7 @@ pub(crate) fn request_prediction_with_zeta1(
})
},
);
- })
- .ok();
+ });
}
return Err(err);
@@ -11,7 +11,7 @@ use edit_prediction::{
EditPredictionStore,
zeta2::{zeta2_output_for_patch, zeta2_prompt_input},
};
-use gpui::AsyncApp;
+use gpui::{AsyncApp, Entity};
use std::sync::Arc;
use zeta_prompt::format_zeta_prompt;
@@ -46,36 +46,37 @@ pub async fn run_format_prompt(
step_progress.set_substatus("formatting zeta2 prompt");
- let ep_store = cx.update(|cx| {
+ let ep_store: Entity<EditPredictionStore> = cx.update(|cx| {
EditPredictionStore::try_global(cx).context("EditPredictionStore not initialized")
- })??;
+ })?;
let state = example.state.as_ref().context("state must be set")?;
- let snapshot = state.buffer.read_with(&cx, |buffer, _| buffer.snapshot())?;
+ let snapshot = state.buffer.read_with(&cx, |buffer, _| buffer.snapshot());
let project = state.project.clone();
- let (_, input) = ep_store.update(&mut cx, |ep_store, cx| {
- let events = ep_store
- .edit_history_for_project(&project, cx)
- .into_iter()
- .map(|e| e.event)
- .collect();
- anyhow::Ok(zeta2_prompt_input(
- &snapshot,
- example
- .context
- .as_ref()
- .context("context must be set")?
- .files
- .clone(),
- events,
- example.spec.cursor_path.clone(),
- example
- .buffer
- .as_ref()
- .context("buffer must be set")?
- .cursor_offset,
- ))
- })??;
+ let (_, input) =
+ ep_store.update(&mut cx, |ep_store: &mut EditPredictionStore, cx| {
+ let events = ep_store
+ .edit_history_for_project(&project, cx)
+ .into_iter()
+ .map(|e| e.event)
+ .collect();
+ anyhow::Ok(zeta2_prompt_input(
+ &snapshot,
+ example
+ .context
+ .as_ref()
+ .context("context must be set")?
+ .files
+ .clone(),
+ events,
+ example.spec.cursor_path.clone(),
+ example
+ .buffer
+ .as_ref()
+ .context("buffer must be set")?
+ .cursor_offset,
+ ))
+ })?;
let prompt = format_zeta_prompt(&input);
let expected_output = zeta2_output_for_patch(
&input,
@@ -36,7 +36,7 @@ pub async fn run_load_project(
let (buffer, cursor_position) =
cursor_position(example, &project, &open_buffers, &mut cx).await?;
buffer
- .read_with(&cx, |buffer, _| buffer.parsing_idle())?
+ .read_with(&cx, |buffer, _| buffer.parsing_idle())
.await;
let (example_buffer, language_name) = buffer.read_with(&cx, |buffer, _cx| {
let cursor_point = cursor_position.to_point(&buffer);
@@ -64,7 +64,7 @@ pub async fn run_load_project(
},
language_name,
)
- })?;
+ });
progress.set_info(language_name, InfoStyle::Normal);
@@ -84,7 +84,7 @@ async fn cursor_position(
open_buffers: &OpenedBuffers,
cx: &mut AsyncApp,
) -> Result<(Entity<Buffer>, Anchor)> {
- let language_registry = project.read_with(cx, |project, _| project.languages().clone())?;
+ let language_registry = project.read_with(cx, |project, _| project.languages().clone());
let result = language_registry
.load_language_for_file_path(&example.spec.cursor_path)
.await;
@@ -101,14 +101,14 @@ async fn cursor_position(
buffer.clone()
} else {
// Since the worktree scanner is disabled, manually refresh entries for the cursor path.
- if let Some(worktree) = project.read_with(cx, |project, cx| project.worktrees(cx).next())? {
+ if let Some(worktree) = project.read_with(cx, |project, cx| project.worktrees(cx).next()) {
refresh_worktree_entries(&worktree, [&*example.spec.cursor_path], cx).await?;
}
let cursor_path = project
.read_with(cx, |project, cx| {
project.find_project_path(&example.spec.cursor_path, cx)
- })?
+ })
.with_context(|| {
format!(
"failed to find cursor path {}",
@@ -117,13 +117,13 @@ async fn cursor_position(
})?;
project
- .update(cx, |project, cx| project.open_buffer(cursor_path, cx))?
+ .update(cx, |project, cx| project.open_buffer(cursor_path, cx))
.await?
};
let (cursor_excerpt, cursor_offset_within_excerpt) = example.spec.cursor_excerpt()?;
- let excerpt_offset = cursor_buffer.read_with(cx, |buffer, _cx| {
+ let excerpt_offset = cursor_buffer.read_with(&*cx, |buffer, _cx| {
let text = buffer.text();
let mut matches = text.match_indices(&cursor_excerpt);
@@ -139,11 +139,11 @@ async fn cursor_position(
&example.spec.name
);
Ok(excerpt_offset)
- })??;
+ })?;
let cursor_offset = excerpt_offset + cursor_offset_within_excerpt;
let cursor_anchor =
- cursor_buffer.read_with(cx, |buffer, _| buffer.anchor_after(cursor_offset))?;
+ cursor_buffer.read_with(&*cx, |buffer, _| buffer.anchor_after(cursor_offset));
Ok((cursor_buffer, cursor_anchor))
}
@@ -155,7 +155,7 @@ async fn setup_project(
cx: &mut AsyncApp,
) -> Result<Entity<Project>> {
let ep_store = cx
- .update(|cx| EditPredictionStore::try_global(cx))?
+ .update(|cx| EditPredictionStore::try_global(cx))
.context("Store should be initialized at init")?;
let worktree_path = setup_worktree(example, step_progress).await?;
@@ -163,16 +163,13 @@ async fn setup_project(
if let Some(project) = app_state.project_cache.get(&example.spec.repository_url) {
ep_store.update(cx, |ep_store, _| {
ep_store.clear_history_for_project(&project);
- })?;
- let buffer_store = project.read_with(cx, |project, _| project.buffer_store().clone())?;
+ });
+ let buffer_store = project.read_with(cx, |project, _| project.buffer_store().clone());
let buffers = buffer_store.read_with(cx, |buffer_store, _| {
buffer_store.buffers().collect::<Vec<_>>()
- })?;
+ });
for buffer in buffers {
- buffer
- .update(cx, |buffer, cx| buffer.reload(cx))?
- .await
- .ok();
+ buffer.update(cx, |buffer, cx| buffer.reload(cx)).await.ok();
}
return Ok(project);
}
@@ -188,20 +185,20 @@ async fn setup_project(
false,
cx,
)
- })?;
+ });
project
.update(cx, |project, cx| {
project.disable_worktree_scanner(cx);
project.create_worktree(&worktree_path, true, cx)
- })?
+ })
.await?;
app_state
.project_cache
.insert(example.spec.repository_url.clone(), project.clone());
- let buffer_store = project.read_with(cx, |project, _| project.buffer_store().clone())?;
+ let buffer_store = project.read_with(cx, |project, _| project.buffer_store().clone());
cx.subscribe(&buffer_store, {
let project = project.clone();
move |_, event, cx| match event {
@@ -210,7 +207,7 @@ async fn setup_project(
}
_ => {}
}
- })?
+ })
.detach();
Ok(project)
@@ -78,9 +78,9 @@ pub async fn run_prediction(
.await;
}
- let ep_store = cx.update(|cx| {
- EditPredictionStore::try_global(cx).context("EditPredictionStore not initialized")
- })??;
+ let ep_store = cx
+ .update(|cx| EditPredictionStore::try_global(cx))
+ .context("EditPredictionStore not initialized")?;
ep_store.update(&mut cx, |store, _cx| {
let model = match provider {
@@ -93,7 +93,7 @@ pub async fn run_prediction(
}
};
store.set_edit_prediction_model(model);
- })?;
+ });
step_progress.set_substatus("configuring model");
let state = example.state.as_ref().context("state must be set")?;
let run_dir = RUN_DIR.join(&example.spec.name);
@@ -101,8 +101,7 @@ pub async fn run_prediction(
let updated_example = Arc::new(Mutex::new(example.clone()));
let current_run_ix = Arc::new(AtomicUsize::new(0));
- let mut debug_rx =
- ep_store.update(&mut cx, |store, cx| store.debug_info(&state.project, cx))?;
+ let mut debug_rx = ep_store.update(&mut cx, |store, cx| store.debug_info(&state.project, cx));
let debug_task = cx.background_spawn({
let updated_example = updated_example.clone();
let current_run_ix = current_run_ix.clone();
@@ -185,7 +184,7 @@ pub async fn run_prediction(
cloud_llm_client::PredictEditsRequestTrigger::Cli,
cx,
)
- })?
+ })
.await?;
let actual_patch = prediction
@@ -219,7 +218,7 @@ pub async fn run_prediction(
ep_store.update(&mut cx, |store, _| {
store.remove_project(&state.project);
- })?;
+ });
debug_task.await?;
*example = Arc::into_inner(updated_example)
@@ -34,19 +34,19 @@ pub async fn run_context_retrieval(
let _lsp_handle = project.update(&mut cx, |project, cx| {
project.register_buffer_with_language_servers(&state.buffer, cx)
- })?;
+ });
wait_for_language_servers_to_start(&project, &state.buffer, &step_progress, &mut cx).await?;
- let ep_store = cx.update(|cx| {
- EditPredictionStore::try_global(cx).context("EditPredictionStore not initialized")
- })??;
+ let ep_store = cx
+ .update(|cx| EditPredictionStore::try_global(cx))
+ .context("EditPredictionStore not initialized")?;
let mut events = ep_store.update(&mut cx, |store, cx| {
store.register_buffer(&state.buffer, &project, cx);
store.set_use_context(true);
store.refresh_context(&project, &state.buffer, state.cursor_position, cx);
store.debug_info(&project, cx)
- })?;
+ });
while let Some(event) = events.next().await {
match event {
@@ -58,7 +58,7 @@ pub async fn run_context_retrieval(
}
let context_files =
- ep_store.update(&mut cx, |store, cx| store.context_for_project(&project, cx))?;
+ ep_store.update(&mut cx, |store, cx| store.context_for_project(&project, cx));
let excerpt_count: usize = context_files.iter().map(|f| f.excerpts.len()).sum();
step_progress.set_info(format!("{} excerpts", excerpt_count), InfoStyle::Normal);
@@ -75,10 +75,10 @@ async fn wait_for_language_servers_to_start(
step_progress: &Arc<StepProgress>,
cx: &mut AsyncApp,
) -> anyhow::Result<()> {
- let lsp_store = project.read_with(cx, |project, _| project.lsp_store())?;
+ let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
- let (language_server_ids, mut starting_language_server_ids) = buffer
- .update(cx, |buffer, cx| {
+ let (language_server_ids, mut starting_language_server_ids) =
+ buffer.update(cx, |buffer, cx| {
lsp_store.update(cx, |lsp_store, cx| {
let ids = lsp_store.language_servers_for_local_buffer(buffer, cx);
let starting_ids = ids
@@ -88,8 +88,7 @@ async fn wait_for_language_servers_to_start(
.collect::<HashSet<_>>();
(ids, starting_ids)
})
- })
- .unwrap_or_default();
+ });
step_progress.set_substatus(format!("waiting for {} LSPs", language_server_ids.len()));
@@ -164,7 +163,7 @@ async fn wait_for_language_servers_to_start(
];
project
- .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))?
+ .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
.await?;
let mut pending_language_server_ids = lsp_store.read_with(cx, |lsp_store, _| {
@@ -178,7 +177,7 @@ async fn wait_for_language_servers_to_start(
.is_some_and(|status| status.has_pending_diagnostic_updates)
})
.collect::<HashSet<_>>()
- })?;
+ });
while !pending_language_server_ids.is_empty() {
futures::select! {
language_server_id = rx.next() => {
@@ -197,7 +197,7 @@ impl RelatedExcerptStore {
DefinitionTask::CacheMiss(task) => {
let locations = task.await.log_err()??;
let duration = start_time.elapsed();
- cx.update(|cx| {
+ Some(cx.update(|cx| {
(
identifier,
Arc::new(CacheEntry {
@@ -210,8 +210,7 @@ impl RelatedExcerptStore {
}),
Some(duration),
)
- })
- .ok()
+ }))
}
}
})
@@ -280,12 +279,12 @@ async fn rebuild_related_files(
if let hash_map::Entry::Vacant(e) = snapshots.entry(definition.buffer.entity_id()) {
definition
.buffer
- .read_with(cx, |buffer, _| buffer.parsing_idle())?
+ .read_with(cx, |buffer, _| buffer.parsing_idle())
.await;
e.insert(
definition
.buffer
- .read_with(cx, |buffer, _| buffer.snapshot())?,
+ .read_with(cx, |buffer, _| buffer.snapshot()),
);
}
let worktree_id = definition.path.worktree_id;
@@ -296,7 +295,7 @@ async fn rebuild_related_files(
if let Some(worktree) = project.worktree_for_id(worktree_id, cx) {
e.insert(worktree.read(cx).root_name().as_unix_str().to_string());
}
- })?;
+ });
}
}
}
@@ -188,7 +188,7 @@ impl EditPredictionContextView {
for (path, buffer, ranges) in paths {
multibuffer.set_excerpts_for_path(path, buffer, ranges, 0, cx);
}
- })?;
+ });
editor.update_in(cx, |editor, window, cx| {
editor.move_to_beginning(&Default::default(), window, cx);
@@ -168,16 +168,19 @@ fn capture_example_as_markdown(
fs.create_dir(&dir).await.ok();
let mut path = dir.join(&example_spec.name.replace(' ', "--").replace(':', "-"));
path.set_extension("md");
- project.update(cx, |project, cx| project.open_local_buffer(&path, cx))
+ project
+ .update(cx, |project, cx| project.open_local_buffer(&path, cx))
+ .await?
} else {
- project.update(cx, |project, cx| project.create_buffer(false, cx))
- }?
- .await?;
+ project
+ .update(cx, |project, cx| project.create_buffer(false, cx))
+ .await?
+ };
buffer.update(cx, |buffer, cx| {
buffer.set_text(example_spec.to_markdown(), cx);
buffer.set_language(Some(markdown_language), cx);
- })?;
+ });
workspace_entity.update_in(cx, |workspace, window, cx| {
workspace.add_item_to_active_pane(
Box::new(
@@ -74,8 +74,7 @@ impl BlinkManager {
cx.spawn(async move |this, cx| {
Timer::after(interval).await;
if let Some(this) = this.upgrade() {
- this.update(cx, |this, cx| this.blink_cursors(epoch, cx))
- .ok();
+ this.update(cx, |this, cx| this.blink_cursors(epoch, cx));
}
})
.detach();
@@ -45,10 +45,10 @@ pub fn switch_source_header(
.map(|file| file.path())
.map(|path| path.display(PathStyle::local()).to_string())
.unwrap_or_else(|| "Unknown".to_string())
- })?;
+ });
let switch_source_header = if let Some((client, project_id)) = upstream_client {
- let buffer_id = buffer.read_with(cx, |buffer, _| buffer.remote_id())?;
+ let buffer_id = buffer.read_with(cx, |buffer, _| buffer.remote_id());
let request = proto::LspExtSwitchSourceHeader {
project_id,
buffer_id: buffer_id.to_proto(),
@@ -67,7 +67,7 @@ pub fn switch_source_header(
project::lsp_store::lsp_ext_command::SwitchSourceHeader,
cx,
)
- })?
+ })
.await
.with_context(|| {
format!("Switch source/header LSP request for path \"{source_file}\" failed")
@@ -5512,12 +5512,10 @@ impl Editor {
Some(cx.spawn_in(window, async move |editor, cx| {
if let Some(transaction) = on_type_formatting.await? {
if push_to_client_history {
- buffer
- .update(cx, |buffer, _| {
- buffer.push_transaction(transaction, Instant::now());
- buffer.finalize_last_transaction();
- })
- .ok();
+ buffer.update(cx, |buffer, _| {
+ buffer.push_transaction(transaction, Instant::now());
+ buffer.finalize_last_transaction();
+ });
}
editor.update(cx, |editor, cx| {
editor.refresh_document_highlights(cx);
@@ -6306,7 +6304,7 @@ impl Editor {
let project_transaction = lsp_store
.update(cx, |lsp_store, cx| {
lsp_store.apply_code_action(buffer_handle, command, false, cx)
- })?
+ })
.await
.context("applying post-completion command")?;
if let Some(workspace) = editor.read_with(cx, |editor, _| editor.workspace())? {
@@ -6713,7 +6711,7 @@ impl Editor {
.all(|range| {
excerpt_range.start <= range.start && excerpt_range.end >= range.end
})
- })?;
+ });
if all_edits_within_excerpt {
return Ok(());
@@ -6741,7 +6739,7 @@ impl Editor {
}
multibuffer.push_transaction(entries.iter().map(|(b, t)| (b, t)), cx);
multibuffer
- })?;
+ });
workspace.update_in(cx, |workspace, window, cx| {
let project = workspace.project().clone();
@@ -7101,13 +7099,9 @@ impl Editor {
.timer(Duration::from_millis(debounce))
.await;
- let highlights = if let Some(highlights) = cx
- .update(|cx| {
- provider.document_highlights(&cursor_buffer, cursor_buffer_position, cx)
- })
- .ok()
- .flatten()
- {
+ let highlights = if let Some(highlights) = cx.update(|cx| {
+ provider.document_highlights(&cursor_buffer, cursor_buffer_position, cx)
+ }) {
highlights.await.log_err()
} else {
None
@@ -16346,9 +16340,7 @@ impl Editor {
return;
};
- let hide_runnables = project
- .update(cx, |project, _| project.is_via_collab())
- .unwrap_or(true);
+ let hide_runnables = project.update(cx, |project, _| project.is_via_collab());
if hide_runnables {
return;
}
@@ -16531,11 +16523,9 @@ impl Editor {
let mut templates_with_tags = Vec::new();
if let Some(inventory) = inventory {
for RunnableTag(tag) in tags {
- let Ok(new_tasks) = inventory.update(cx, |inventory, cx| {
+ let new_tasks = inventory.update(cx, |inventory, cx| {
inventory.list_tasks(file.clone(), Some(language.clone()), worktree_id, cx)
- }) else {
- return templates_with_tags;
- };
+ });
templates_with_tags.extend(new_tasks.await.into_iter().filter(
move |(_, template)| {
template.tags.iter().any(|source_tag| source_tag == &tag)
@@ -17621,7 +17611,7 @@ impl Editor {
.clip_point_utf16(point_from_lsp(lsp_location.range.end), Bias::Left);
target_buffer.anchor_after(target_start)
..target_buffer.anchor_before(target_end)
- })?;
+ });
Location {
buffer: target_buffer_handle,
range,
@@ -17720,7 +17710,7 @@ impl Editor {
});
(locations, current_location_index)
- })?;
+ });
let Some(current_location_index) = current_location_index else {
// This indicates something has gone wrong, because we already
@@ -18459,27 +18449,27 @@ impl Editor {
}
};
- buffer
- .update(cx, |buffer, cx| {
- if let Some(transaction) = transaction
- && !buffer.is_singleton()
- {
- buffer.push_transaction(&transaction.0, cx);
- }
- cx.notify();
- })
- .ok();
+ buffer.update(cx, |buffer, cx| {
+ if let Some(transaction) = transaction
+ && !buffer.is_singleton()
+ {
+ buffer.push_transaction(&transaction.0, cx);
+ }
+ cx.notify();
+ });
if let Some(transaction_id_now) =
- buffer.read_with(cx, |b, cx| b.last_transaction_id(cx))?
+ buffer.read_with(cx, |b, cx| b.last_transaction_id(cx))
{
let has_new_transaction = transaction_id_prev != Some(transaction_id_now);
if has_new_transaction {
- _ = editor.update(cx, |editor, _| {
- editor
- .selection_history
- .insert_transaction(transaction_id_now, selections_prev);
- });
+ editor
+ .update(cx, |editor, _| {
+ editor
+ .selection_history
+ .insert_transaction(transaction_id_now, selections_prev);
+ })
+ .ok();
}
}
@@ -18527,17 +18517,15 @@ impl Editor {
}
transaction = apply_action.log_err().fuse() => transaction,
};
- buffer
- .update(cx, |buffer, cx| {
- // check if we need this
- if let Some(transaction) = transaction
- && !buffer.is_singleton()
- {
- buffer.push_transaction(&transaction.0, cx);
- }
- cx.notify();
- })
- .ok();
+ buffer.update(cx, |buffer, cx| {
+ // check if we need this
+ if let Some(transaction) = transaction
+ && !buffer.is_singleton()
+ {
+ buffer.push_transaction(&transaction.0, cx);
+ }
+ cx.notify();
+ });
Ok(())
})
}
@@ -18831,10 +18819,8 @@ impl Editor {
if let Some(debounce) = debounce {
cx.background_executor().timer(debounce).await;
}
- let Some(snapshot) = editor.upgrade().and_then(|editor| {
- editor
- .update(cx, |editor, cx| editor.buffer().read(cx).snapshot(cx))
- .ok()
+ let Some(snapshot) = editor.upgrade().map(|editor| {
+ editor.update(cx, |editor, cx| editor.buffer().read(cx).snapshot(cx))
}) else {
return;
};
@@ -21217,19 +21203,14 @@ impl Editor {
anyhow::Result::<()>::Err(err).log_err();
if let Some(workspace) = workspace {
- workspace
- .update(cx, |workspace, cx| {
- struct OpenPermalinkToLine;
+ workspace.update(cx, |workspace, cx| {
+ struct OpenPermalinkToLine;
- workspace.show_toast(
- Toast::new(
- NotificationId::unique::<OpenPermalinkToLine>(),
- message,
- ),
- cx,
- )
- })
- .ok();
+ workspace.show_toast(
+ Toast::new(NotificationId::unique::<OpenPermalinkToLine>(), message),
+ cx,
+ )
+ });
}
}
})
@@ -24025,20 +24006,15 @@ fn update_uncommitted_diff_for_buffer(
});
cx.spawn(async move |cx| {
let diffs = future::join_all(tasks).await;
- if editor
- .read_with(cx, |editor, _cx| editor.temporary_diff_override)
- .unwrap_or(false)
- {
+ if editor.read_with(cx, |editor, _cx| editor.temporary_diff_override) {
return;
}
- buffer
- .update(cx, |buffer, cx| {
- for diff in diffs.into_iter().flatten() {
- buffer.add_diff(diff, cx);
- }
- })
- .ok();
+ buffer.update(cx, |buffer, cx| {
+ for diff in diffs.into_iter().flatten() {
+ buffer.add_diff(diff, cx);
+ }
+ });
})
}
@@ -25081,7 +25057,7 @@ impl SemanticsProvider for Entity<Project> {
snapshot.anchor_before(range.start)
..snapshot.anchor_after(range.end),
)
- })?
+ })
}
})
})
@@ -18251,7 +18251,7 @@ async fn test_on_type_formatting_is_applied_after_autoindent(cx: &mut TestAppCon
"fn c() {\n d()\n .\n}\n",
"OnTypeFormatting should triggered after autoindent applied"
)
- })?;
+ });
Ok(Some(vec![]))
}
@@ -533,9 +533,9 @@ impl GitBlame {
})
})
.collect::<Result<Vec<_>>>()
- })??;
+ })?;
let provider_registry =
- cx.update(|cx| GitHostingProviderRegistry::default_global(cx))?;
+ cx.update(|cx| GitHostingProviderRegistry::default_global(cx));
let (results, errors) = cx
.background_spawn({
async move {
@@ -475,7 +475,7 @@ pub(crate) fn find_url(
) -> Option<(Range<text::Anchor>, String)> {
const LIMIT: usize = 2048;
- let snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot()).ok()?;
+ let snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
let offset = position.to_offset(&snapshot);
let mut token_start = offset;
@@ -535,9 +535,7 @@ pub(crate) fn find_url_from_range(
) -> Option<String> {
const LIMIT: usize = 2048;
- let Ok(snapshot) = buffer.read_with(&cx, |buffer, _| buffer.snapshot()) else {
- return None;
- };
+ let snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
let start_offset = range.start.to_offset(&snapshot);
let end_offset = range.end.to_offset(&snapshot);
@@ -595,7 +593,7 @@ pub(crate) async fn find_file(
cx: &mut AsyncWindowContext,
) -> Option<(Range<text::Anchor>, ResolvedPath)> {
let project = project?;
- let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot()).ok()?;
+ let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
let scope = snapshot.language_scope_at(position);
let (range, candidate_file_path) = surrounding_filename(&snapshot, position)?;
let candidate_len = candidate_file_path.len();
@@ -610,7 +608,6 @@ pub(crate) async fn find_file(
.update(cx, |project, cx| {
project.resolve_path_in_buffer(candidate_file_path, buffer, cx)
})
- .ok()?
.await
.filter(|s| s.is_file())
}
@@ -165,7 +165,7 @@ pub fn hover_at_inlay(
this.hover_state.diagnostic_popover = None;
})?;
- let language_registry = project.read_with(cx, |p, _| p.languages().clone())?;
+ let language_registry = project.read_with(cx, |p, _| p.languages().clone());
let blocks = vec![inlay_hover.tooltip];
let parsed_content =
parse_blocks(&blocks, Some(&language_registry), None, cx).await;
@@ -398,7 +398,7 @@ async fn update_editor_from_message(
.into_iter()
.map(|id| BufferId::new(id).map(|id| project.open_buffer_by_id(id, cx)))
.collect::<Result<Vec<_>>>()
- })??;
+ })?;
let _inserted_excerpt_buffers = try_join_all(inserted_excerpt_buffers).await?;
// Update the editor's excerpts.
@@ -892,7 +892,7 @@ impl Item for Editor {
project
.update(cx, |project, cx| {
project.save_buffers(buffers_to_save.clone(), cx)
- })?
+ })
.await?;
}
@@ -938,15 +938,13 @@ impl Item for Editor {
this.update(cx, |editor, cx| {
editor.request_autoscroll(Autoscroll::fit(), cx)
})?;
- buffer
- .update(cx, |buffer, cx| {
- if let Some(transaction) = transaction
- && !buffer.is_singleton()
- {
- buffer.push_transaction(&transaction.0, cx);
- }
- })
- .ok();
+ buffer.update(cx, |buffer, cx| {
+ if let Some(transaction) = transaction
+ && !buffer.is_singleton()
+ {
+ buffer.push_transaction(&transaction.0, cx);
+ }
+ });
Ok(())
})
}
@@ -1103,7 +1101,7 @@ impl SerializableItem for Editor {
let project = project.clone();
async move |cx| {
let language_registry =
- project.read_with(cx, |project, _| project.languages().clone())?;
+ project.read_with(cx, |project, _| project.languages().clone());
let language = if let Some(language_name) = language {
// We don't fail here, because we'd rather not set the language if the name changed
@@ -1118,7 +1116,7 @@ impl SerializableItem for Editor {
// First create the empty buffer
let buffer = project
- .update(cx, |project, cx| project.create_buffer(true, cx))?
+ .update(cx, |project, cx| project.create_buffer(true, cx))
.await
.context("Failed to create buffer while deserializing editor")?;
@@ -1132,7 +1130,7 @@ impl SerializableItem for Editor {
if let Some(entry) = buffer.peek_undo_stack() {
buffer.forget_transaction(entry.transaction_id());
}
- })?;
+ });
cx.update(|window, cx| {
cx.new(|cx| {
@@ -1187,7 +1185,7 @@ impl SerializableItem for Editor {
if let Some(entry) = buffer.peek_undo_stack() {
buffer.forget_transaction(entry.transaction_id());
}
- })?;
+ });
}
cx.update(|window, cx| {
@@ -1229,7 +1227,7 @@ impl SerializableItem for Editor {
..
} => window.spawn(cx, async move |cx| {
let buffer = project
- .update(cx, |project, cx| project.create_buffer(true, cx))?
+ .update(cx, |project, cx| project.create_buffer(true, cx))
.await
.context("Failed to create buffer")?;
@@ -443,7 +443,7 @@ pub(crate) fn handle_from(
};
}
- let buffer_snapshot = buffer.read_with(cx, |buf, _| buf.snapshot()).ok()?;
+ let buffer_snapshot = buffer.read_with(cx, |buf, _| buf.snapshot());
let Some(edit_behavior_state) =
should_auto_close(&buffer_snapshot, &edited_ranges, &jsx_tag_auto_close_config)
@@ -567,11 +567,9 @@ pub(crate) fn handle_from(
}
}
- buffer
- .update(cx, |buffer, cx| {
- buffer.edit(edits, None, cx);
- })
- .ok()?;
+ buffer.update(cx, |buffer, cx| {
+ buffer.edit(edits, None, cx);
+ });
if any_selections_need_update {
let multi_buffer_snapshot = this
@@ -99,9 +99,7 @@ pub(super) fn refresh_linked_ranges(
let cx = cx.to_async();
let highlights = async move {
let edits = linked_edits_task.await.log_err()?;
- let snapshot = cx
- .read_entity(&buffer, |buffer, _| buffer.snapshot())
- .ok()?;
+ let snapshot = cx.read_entity(&buffer, |buffer, _| buffer.snapshot());
let buffer_id = snapshot.remote_id();
// Find the range containing our current selection.
@@ -60,27 +60,22 @@ async fn lsp_task_context(
buffer: &Entity<Buffer>,
cx: &mut AsyncApp,
) -> Option<TaskContext> {
- let (worktree_store, environment) = project
- .read_with(cx, |project, _| {
- (project.worktree_store(), project.environment().clone())
- })
- .ok()?;
+ let (worktree_store, environment) = project.read_with(cx, |project, _| {
+ (project.worktree_store(), project.environment().clone())
+ });
- let worktree_abs_path = cx
- .update(|cx| {
- let worktree_id = buffer.read(cx).file().map(|f| f.worktree_id(cx));
+ let worktree_abs_path = cx.update(|cx| {
+ let worktree_id = buffer.read(cx).file().map(|f| f.worktree_id(cx));
- worktree_id
- .and_then(|worktree_id| worktree_store.read(cx).worktree_for_id(worktree_id, cx))
- .and_then(|worktree| worktree.read(cx).root_dir())
- })
- .ok()?;
+ worktree_id
+ .and_then(|worktree_id| worktree_store.read(cx).worktree_for_id(worktree_id, cx))
+ .and_then(|worktree| worktree.read(cx).root_dir())
+ });
let project_env = environment
.update(cx, |environment, cx| {
environment.buffer_environment(buffer, &worktree_store, cx)
})
- .ok()?
.await;
Some(TaskContext {
@@ -126,19 +121,18 @@ pub fn lsp_tasks(
let source_kind = match buffer.update(cx, |buffer, _| {
buffer.language().map(|language| language.name())
}) {
- Ok(Some(language_name)) => TaskSourceKind::Lsp {
+ Some(language_name) => TaskSourceKind::Lsp {
server: server_id,
language_name: SharedString::from(language_name),
},
- Ok(None) => continue,
- Err(_) => return Vec::new(),
+ None => continue,
};
let id_base = source_kind.to_id_base();
let lsp_buffer_context = lsp_task_context(&project, &buffer, cx)
.await
.unwrap_or_default();
- if let Ok(runnables_task) = project.update(cx, |project, cx| {
+ let runnables_task = project.update(cx, |project, cx| {
let buffer_id = buffer.read(cx).remote_id();
project.request_lsp(
buffer,
@@ -149,8 +143,8 @@ pub fn lsp_tasks(
},
cx,
)
- }) && let Some(new_runnables) = runnables_task.await.log_err()
- {
+ });
+ if let Some(new_runnables) = runnables_task.await.log_err() {
new_lsp_tasks.extend(new_runnables.runnables.into_iter().filter_map(
|(location, runnable)| {
let resolved_task =
@@ -81,7 +81,7 @@ pub fn go_to_parent_module(
let upstream_client = lsp_store.read(cx).upstream_client();
cx.spawn_in(window, async move |editor, cx| {
let location_links = if let Some((client, project_id)) = upstream_client {
- let buffer_id = buffer.read_with(cx, |buffer, _| buffer.remote_id())?;
+ let buffer_id = buffer.read_with(cx, |buffer, _| buffer.remote_id());
let request = proto::LspExtGoToParentModule {
project_id,
@@ -103,7 +103,7 @@ pub fn go_to_parent_module(
.collect::<anyhow::Result<_>>()
.context("go to parent module via collab")?
} else {
- let buffer_snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot())?;
+ let buffer_snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
let position = trigger_anchor.text_anchor.to_point_utf16(&buffer_snapshot);
project
.update(cx, |project, cx| {
@@ -113,7 +113,7 @@ pub fn go_to_parent_module(
project::lsp_store::lsp_ext_command::GoToParentModule { position },
cx,
)
- })?
+ })
.await
.context("go to parent module")?
};
@@ -161,7 +161,7 @@ pub fn expand_macro_recursively(
let upstream_client = project.read(cx).lsp_store().read(cx).upstream_client();
cx.spawn_in(window, async move |_editor, cx| {
let macro_expansion = if let Some((client, project_id)) = upstream_client {
- let buffer_id = buffer.update(cx, |buffer, _| buffer.remote_id())?;
+ let buffer_id = buffer.update(cx, |buffer, _| buffer.remote_id());
let request = proto::LspExtExpandMacro {
project_id,
buffer_id: buffer_id.to_proto(),
@@ -176,7 +176,7 @@ pub fn expand_macro_recursively(
expansion: response.expansion,
}
} else {
- let buffer_snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot())?;
+ let buffer_snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
let position = trigger_anchor.text_anchor.to_point_utf16(&buffer_snapshot);
project
.update(cx, |project, cx| {
@@ -186,7 +186,7 @@ pub fn expand_macro_recursively(
ExpandMacro { position },
cx,
)
- })?
+ })
.await
.context("expand macro")?
};
@@ -200,7 +200,7 @@ pub fn expand_macro_recursively(
}
let buffer = project
- .update(cx, |project, cx| project.create_buffer(false, cx))?
+ .update(cx, |project, cx| project.create_buffer(false, cx))
.await?;
workspace.update_in(cx, |workspace, window, cx| {
buffer.update(cx, |buffer, cx| {
@@ -252,7 +252,7 @@ pub fn open_docs(editor: &mut Editor, _: &OpenDocs, window: &mut Window, cx: &mu
let upstream_client = project.read(cx).lsp_store().read(cx).upstream_client();
cx.spawn_in(window, async move |_editor, cx| {
let docs_urls = if let Some((client, project_id)) = upstream_client {
- let buffer_id = buffer.read_with(cx, |buffer, _| buffer.remote_id())?;
+ let buffer_id = buffer.read_with(cx, |buffer, _| buffer.remote_id());
let request = proto::LspExtOpenDocs {
project_id,
buffer_id: buffer_id.to_proto(),
@@ -267,7 +267,7 @@ pub fn open_docs(editor: &mut Editor, _: &OpenDocs, window: &mut Window, cx: &mu
local: response.local,
}
} else {
- let buffer_snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot())?;
+ let buffer_snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
let position = trigger_anchor.text_anchor.to_point_utf16(&buffer_snapshot);
project
.update(cx, |project, cx| {
@@ -277,7 +277,7 @@ pub fn open_docs(editor: &mut Editor, _: &OpenDocs, window: &mut Window, cx: &mu
project::lsp_store::lsp_ext_command::OpenDocs { position },
cx,
)
- })?
+ })
.await
.context("open docs")?
};
@@ -303,7 +303,8 @@ pub fn open_docs(editor: &mut Editor, _: &OpenDocs, window: &mut Window, cx: &mu
if let Some(web_url) = docs_urls.web {
cx.open_url(&web_url);
}
- })
+ });
+ anyhow::Ok(())
})
.detach_and_log_err(cx);
}
@@ -150,7 +150,7 @@ fn main() {
registry.set_default_model(Some(agent_model.clone()), cx);
});
judge_model
- })?;
+ });
let mut examples = Vec::new();
@@ -210,7 +210,8 @@ fn main() {
if examples.is_empty() {
eprintln!("Filter matched no examples");
- return cx.update(|cx| cx.quit());
+ cx.update(|cx| cx.quit());
+ return anyhow::Ok(());
}
let mut repo_urls = HashSet::default();
@@ -294,7 +295,7 @@ fn main() {
let result = async {
example.setup().await?;
let run_output = cx
- .update(|cx| example.run(app_state.clone(), cx))?
+ .update(|cx| example.run(app_state.clone(), cx))
.await?;
let judge_output = judge_example(
example.clone(),
@@ -328,7 +329,8 @@ fn main() {
app_state.client.telemetry().flush_events().await;
- cx.update(|cx| cx.quit())
+ cx.update(|cx| cx.quit());
+ anyhow::Ok(())
})
.detach_and_log_err(cx);
});
@@ -221,7 +221,7 @@ impl ExampleContext {
} else {
thread.proceed(cx)
}
- })??;
+ })?;
let task = self.app.background_spawn(async move {
let mut messages = Vec::new();
@@ -357,11 +357,13 @@ impl ExampleContext {
}
pub fn edits(&self) -> HashMap<Arc<RelPath>, FileEdits> {
- self.agent_thread
- .read_with(&self.app, |thread, cx| {
- let action_log = thread.action_log().read(cx);
- HashMap::from_iter(action_log.changed_buffers(cx).into_iter().map(
- |(buffer, diff)| {
+ self.agent_thread.read_with(&self.app, |thread, cx| {
+ let action_log = thread.action_log().read(cx);
+ HashMap::from_iter(
+ action_log
+ .changed_buffers(cx)
+ .into_iter()
+ .map(|(buffer, diff)| {
let snapshot = buffer.read(cx).snapshot();
let file = snapshot.file().unwrap();
@@ -381,10 +383,9 @@ impl ExampleContext {
.collect();
(file.path().clone(), FileEdits { hunks })
- },
- ))
- })
- .unwrap()
+ }),
+ )
+ })
}
pub fn agent_thread(&self) -> Entity<Thread> {
@@ -393,16 +394,14 @@ impl ExampleContext {
}
impl AppContext for ExampleContext {
- type Result<T> = anyhow::Result<T>;
-
fn new<T: 'static>(
&mut self,
build_entity: impl FnOnce(&mut gpui::Context<T>) -> T,
- ) -> Self::Result<Entity<T>> {
+ ) -> Entity<T> {
self.app.new(build_entity)
}
- fn reserve_entity<T: 'static>(&mut self) -> Self::Result<gpui::Reservation<T>> {
+ fn reserve_entity<T: 'static>(&mut self) -> gpui::Reservation<T> {
self.app.reserve_entity()
}
@@ -410,7 +409,7 @@ impl AppContext for ExampleContext {
&mut self,
reservation: gpui::Reservation<T>,
build_entity: impl FnOnce(&mut gpui::Context<T>) -> T,
- ) -> Self::Result<Entity<T>> {
+ ) -> Entity<T> {
self.app.insert_entity(reservation, build_entity)
}
@@ -418,25 +417,21 @@ impl AppContext for ExampleContext {
&mut self,
handle: &Entity<T>,
update: impl FnOnce(&mut T, &mut gpui::Context<T>) -> R,
- ) -> Self::Result<R>
+ ) -> R
where
T: 'static,
{
self.app.update_entity(handle, update)
}
- fn as_mut<'a, T>(&'a mut self, handle: &Entity<T>) -> Self::Result<gpui::GpuiBorrow<'a, T>>
+ fn as_mut<'a, T>(&'a mut self, handle: &Entity<T>) -> gpui::GpuiBorrow<'a, T>
where
T: 'static,
{
self.app.as_mut(handle)
}
- fn read_entity<T, R>(
- &self,
- handle: &Entity<T>,
- read: impl FnOnce(&T, &App) -> R,
- ) -> Self::Result<R>
+ fn read_entity<T, R>(&self, handle: &Entity<T>, read: impl FnOnce(&T, &App) -> R) -> R
where
T: 'static,
{
@@ -471,7 +466,7 @@ impl AppContext for ExampleContext {
self.app.background_spawn(future)
}
- fn read_global<G, R>(&self, callback: impl FnOnce(&G, &App) -> R) -> Self::Result<R>
+ fn read_global<G, R>(&self, callback: impl FnOnce(&G, &App) -> R) -> R
where
G: gpui::Global,
{
@@ -62,39 +62,29 @@ impl Example for CodeBlockCitations {
cx.assert(citation.contains("/"), format!("Slash in {citation:?}",))
{
let path_range = PathWithRange::new(citation);
- let path = cx
- .agent_thread()
- .update(cx, |thread, cx| {
- thread
- .project()
- .read(cx)
- .find_project_path(path_range.path.as_ref(), cx)
- })
- .ok()
- .flatten();
+ let path = cx.agent_thread().update(cx, |thread, cx| {
+ thread
+ .project()
+ .read(cx)
+ .find_project_path(path_range.path.as_ref(), cx)
+ });
if let Ok(path) = cx.assert_some(path, format!("Valid path: {citation:?}"))
{
let buffer_text = {
- let buffer = match cx.agent_thread().update(cx, |thread, cx| {
- thread
- .project()
- .update(cx, |project, cx| project.open_buffer(path, cx))
- }) {
- Ok(buffer_task) => buffer_task.await.ok(),
- Err(err) => {
- cx.assert(
- false,
- format!("Expected Ok(buffer), not {err:?}"),
- )
- .ok();
- break;
- }
- };
+ let buffer = cx
+ .agent_thread()
+ .update(cx, |thread, cx| {
+ thread
+ .project()
+ .update(cx, |project, cx| project.open_buffer(path, cx))
+ })
+ .await
+ .ok();
let Ok(buffer_text) = cx.assert_some(
- buffer.and_then(|buffer| {
- buffer.read_with(cx, |buffer, _| buffer.text()).ok()
+ buffer.map(|buffer| {
+ buffer.read_with(cx, |buffer, _| buffer.text())
}),
"Reading buffer text succeeded",
) else {
@@ -29,7 +29,7 @@ impl Example for FileChangeNotificationExample {
.read(cx)
.find_project_path("README", cx)
.expect("README file should exist in this repo")
- })?;
+ });
let buffer = {
cx.agent_thread()
@@ -37,7 +37,7 @@ impl Example for FileChangeNotificationExample {
thread
.project()
.update(cx, |project, cx| project.open_buffer(project_path, cx))
- })?
+ })
.await?
};
@@ -45,7 +45,7 @@ impl Example for FileChangeNotificationExample {
thread.action_log().update(cx, |action_log, cx| {
action_log.buffer_read(buffer.clone(), cx);
});
- })?;
+ });
// Start conversation (specific message is not important)
cx.prompt_with_max_turns("Find all files in this repo", 1)
@@ -54,7 +54,7 @@ impl Example for FileChangeNotificationExample {
// Edit the README buffer - the model should get a notification on next turn
buffer.update(cx, |buffer, cx| {
buffer.edit([(0..buffer.len(), "Surprise!")], None, cx);
- })?;
+ });
// Run for some more turns.
// The model shouldn't thank us for letting it know about the file change.
@@ -220,7 +220,7 @@ impl ExampleInstance {
worktree
.update(cx, |worktree, _cx| {
worktree.as_local().unwrap().scan_complete()
- })?
+ })
.await;
struct LanguageServerState {
@@ -233,39 +233,40 @@ impl ExampleInstance {
let lsp = if let Some(language_server) = &meta.language_server {
// Open a file that matches the language to cause LSP to start.
- let language_file = worktree.read_with(cx, |worktree, _cx| {
- worktree
- .files(false, 0)
- .find_map(|e| {
- if e.path.clone().extension()
- == Some(&language_server.file_extension)
- {
- Some(ProjectPath {
- worktree_id: worktree.id(),
- path: e.path.clone(),
- })
- } else {
- None
- }
- })
- .context("Failed to find a file for example language")
- })??;
+ let language_file = worktree
+ .read_with(cx, |worktree, _cx| {
+ worktree
+ .files(false, 0)
+ .find_map(|e| {
+ if e.path.clone().extension()
+ == Some(&language_server.file_extension)
+ {
+ Some(ProjectPath {
+ worktree_id: worktree.id(),
+ path: e.path.clone(),
+ })
+ } else {
+ None
+ }
+ })
+ .context("Failed to find a file for example language")
+ })?;
let open_language_file_buffer_task = project.update(cx, |project, cx| {
project.open_buffer(language_file.clone(), cx)
- })?;
+ });
let language_file_buffer = open_language_file_buffer_task.await?;
let lsp_open_handle = project.update(cx, |project, cx| {
project.register_buffer_with_language_servers(&language_file_buffer, cx)
- })?;
+ });
wait_for_lang_server(&project, &language_file_buffer, this.log_prefix.clone(), cx).await?;
diagnostic_summary_before = project.read_with(cx, |project, cx| {
- project.diagnostic_summary(false, cx)
- })?;
+ project.diagnostic_summary(false, cx)
+ });
diagnostics_before = query_lsp_diagnostics(project.clone(), cx).await?;
if diagnostics_before.is_some() && language_server.allow_preexisting_diagnostics {
@@ -337,7 +338,7 @@ impl ExampleInstance {
});
thread
- }).unwrap();
+ });
let mut example_cx = ExampleContext::new(
meta.clone(),
@@ -371,13 +372,13 @@ impl ExampleInstance {
.update(|cx| {
let project = project.clone();
cx.spawn(async move |cx| query_lsp_diagnostics(project, cx).await)
- })?
+ })
.await?;
println!("{}Got diagnostics", this.log_prefix);
diagnostic_summary_after = project.read_with(cx, |project, cx| {
- project.diagnostic_summary(false, cx)
- })?;
+ project.diagnostic_summary(false, cx)
+ });
}
@@ -389,7 +390,7 @@ impl ExampleInstance {
fs::write(this.run_directory.join("diagnostics_after.txt"), diagnostics_after)?;
}
- thread.update(cx, |thread, _cx| {
+ Ok(thread.update(cx, |thread, _cx| {
RunOutput {
repository_diff,
diagnostic_summary_before,
@@ -401,7 +402,7 @@ impl ExampleInstance {
thread_markdown: thread.to_markdown(),
programmatic_assertions: example_cx.assertions,
}
- })
+ }))
})
}
@@ -614,17 +615,19 @@ struct EvalTerminalHandle {
impl agent::TerminalHandle for EvalTerminalHandle {
fn id(&self, cx: &AsyncApp) -> Result<acp::TerminalId> {
- self.terminal.read_with(cx, |term, _cx| term.id().clone())
+ Ok(self.terminal.read_with(cx, |term, _cx| term.id().clone()))
}
fn wait_for_exit(&self, cx: &AsyncApp) -> Result<Shared<Task<acp::TerminalExitStatus>>> {
- self.terminal
- .read_with(cx, |term, _cx| term.wait_for_exit())
+ Ok(self
+ .terminal
+ .read_with(cx, |term, _cx| term.wait_for_exit()))
}
fn current_output(&self, cx: &AsyncApp) -> Result<acp::TerminalOutputResponse> {
- self.terminal
- .read_with(cx, |term, cx| term.current_output(cx))
+ Ok(self
+ .terminal
+ .read_with(cx, |term, cx| term.current_output(cx)))
}
fn kill(&self, cx: &AsyncApp) -> Result<()> {
@@ -632,13 +635,14 @@ impl agent::TerminalHandle for EvalTerminalHandle {
self.terminal.update(cx, |terminal, cx| {
terminal.kill(cx);
});
- })?;
+ });
Ok(())
}
fn was_stopped_by_user(&self, cx: &AsyncApp) -> Result<bool> {
- self.terminal
- .read_with(cx, |term, _cx| term.was_stopped_by_user())
+ Ok(self
+ .terminal
+ .read_with(cx, |term, _cx| term.was_stopped_by_user()))
}
}
@@ -653,7 +657,7 @@ impl agent::ThreadEnvironment for EvalThreadEnvironment {
let project = self.project.clone();
cx.spawn(async move |cx| {
let language_registry =
- project.read_with(cx, |project, _cx| project.languages().clone())?;
+ project.read_with(cx, |project, _cx| project.languages().clone());
let id = acp::TerminalId::new(uuid::Uuid::new_v4().to_string());
let terminal =
acp_thread::create_terminal_entity(command, &[], vec![], cwd.clone(), &project, cx)
@@ -668,7 +672,7 @@ impl agent::ThreadEnvironment for EvalThreadEnvironment {
language_registry,
cx,
)
- })?;
+ });
Ok(Rc::new(EvalTerminalHandle { terminal }) as Rc<dyn agent::TerminalHandle>)
})
}
@@ -899,25 +903,20 @@ pub fn wait_for_lang_server(
let (mut tx, mut rx) = mpsc::channel(1);
- let lsp_store = project
- .read_with(cx, |project, _| project.lsp_store())
- .unwrap();
-
- let has_lang_server = buffer
- .update(cx, |buffer, cx| {
- lsp_store.update(cx, |lsp_store, cx| {
- lsp_store
- .running_language_servers_for_local_buffer(buffer, cx)
- .next()
- .is_some()
- })
+ let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
+
+ let has_lang_server = buffer.update(cx, |buffer, cx| {
+ lsp_store.update(cx, |lsp_store, cx| {
+ lsp_store
+ .running_language_servers_for_local_buffer(buffer, cx)
+ .next()
+ .is_some()
})
- .unwrap_or(false);
+ });
if has_lang_server {
project
.update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
- .unwrap()
.detach();
}
@@ -984,7 +983,7 @@ pub async fn query_lsp_diagnostics(
.filter(|(_, _, summary)| summary.error_count > 0 || summary.warning_count > 0)
.map(|(project_path, _, _)| project_path)
.collect::<Vec<_>>()
- })?;
+ });
if paths_with_diagnostics.is_empty() {
return Ok(None);
@@ -993,9 +992,9 @@ pub async fn query_lsp_diagnostics(
let mut output = String::new();
for project_path in paths_with_diagnostics {
let buffer = project
- .update(cx, |project, cx| project.open_buffer(project_path, cx))?
+ .update(cx, |project, cx| project.open_buffer(project_path, cx))
.await?;
- let snapshot = buffer.read_with(cx, |buffer, _cx| buffer.snapshot())?;
+ let snapshot = buffer.read_with(cx, |buffer, _cx| buffer.snapshot());
for (_, group) in snapshot.diagnostic_groups(None) {
let entry = &group.entries[group.primary_ix];
@@ -245,8 +245,7 @@ impl HeadlessExtensionStore {
cx,
));
}
- })
- .ok();
+ });
let _ = join_all(removal_tasks).await;
fs.remove_dir(
@@ -304,7 +303,7 @@ impl HeadlessExtensionStore {
let missing_extensions = extension_store
.update(&mut cx, |extension_store, cx| {
extension_store.sync_extensions(requested_extensions.collect(), cx)
- })?
+ })
.await?;
Ok(proto::SyncExtensionsResponse {
@@ -343,7 +342,7 @@ impl HeadlessExtensionStore {
PathBuf::from(envelope.payload.tmp_dir),
cx,
)
- })?
+ })
.await?;
Ok(proto::Ack {})
@@ -681,11 +681,11 @@ impl WasmHost {
// Run wasi-dependent operations on tokio.
// wasmtime_wasi internally uses tokio for I/O operations.
let (extension_task, manifest, work_dir, tx, zed_api_version) =
- gpui_tokio::Tokio::spawn(cx, load_extension(zed_api_version, component))?.await??;
+ gpui_tokio::Tokio::spawn(cx, load_extension(zed_api_version, component)).await??;
// Run the extension message loop on tokio since extension
// calls may invoke wasi functions that require a tokio runtime.
- let task = Arc::new(gpui_tokio::Tokio::spawn(cx, extension_task)?);
+ let task = Arc::new(gpui_tokio::Tokio::spawn(cx, extension_task));
Ok(WasmExtension {
manifest,
@@ -471,7 +471,7 @@ impl ExtensionImports for WasmState {
}
.boxed_local()
})
- .await?
+ .await
.to_wasmtime_result()
}
@@ -1004,7 +1004,7 @@ impl ExtensionImports for WasmState {
}
.boxed_local()
})
- .await?
+ .await
.to_wasmtime_result()
}
@@ -13,7 +13,7 @@ use editor::{Editor, EditorElement, EditorStyle};
use extension_host::{ExtensionManifest, ExtensionOperation, ExtensionStore};
use fuzzy::{StringMatchCandidate, match_strings};
use gpui::{
- Action, App, ClipboardItem, Context, Corner, Entity, EventEmitter, Flatten, Focusable,
+ Action, App, ClipboardItem, Context, Corner, Entity, EventEmitter, Focusable,
InteractiveElement, KeyContext, ParentElement, Point, Render, Styled, Task, TextStyle,
UniformListScrollHandle, WeakEntity, Window, actions, point, uniform_list,
};
@@ -131,25 +131,22 @@ pub fn init(cx: &mut App) {
let workspace_handle = cx.entity().downgrade();
window
.spawn(cx, async move |cx| {
- let extension_path =
- match Flatten::flatten(prompt.await.map_err(|e| e.into())) {
- Ok(Some(mut paths)) => paths.pop()?,
- Ok(None) => return None,
- Err(err) => {
- workspace_handle
- .update(cx, |workspace, cx| {
- workspace.show_portal_error(err.to_string(), cx);
- })
- .ok();
- return None;
- }
- };
+ let extension_path = match prompt.await.map_err(anyhow::Error::from) {
+ Ok(Some(mut paths)) => paths.pop()?,
+ Ok(None) => return None,
+ Err(err) => {
+ workspace_handle
+ .update(cx, |workspace, cx| {
+ workspace.show_portal_error(err.to_string(), cx);
+ })
+ .ok();
+ return None;
+ }
+ };
- let install_task = store
- .update(cx, |store, cx| {
- store.install_dev_extension(extension_path, cx)
- })
- .ok()?;
+ let install_task = store.update(cx, |store, cx| {
+ store.install_dev_extension(extension_path, cx)
+ });
match install_task.await {
Ok(_) => {}
@@ -1225,35 +1225,27 @@ impl FileFinderDelegate {
let query_path = Path::new(query.path_query());
let mut path_matches = Vec::new();
- let abs_file_exists = if let Ok(task) = project.update(cx, |this, cx| {
- this.resolve_abs_file_path(query.path_query(), cx)
- }) {
- task.await.is_some()
- } else {
- false
- };
+ let abs_file_exists = project
+ .update(cx, |this, cx| {
+ this.resolve_abs_file_path(query.path_query(), cx)
+ })
+ .await
+ .is_some();
if abs_file_exists {
- let update_result = project
- .update(cx, |project, cx| {
- if let Some((worktree, relative_path)) =
- project.find_worktree(query_path, cx)
- {
- path_matches.push(ProjectPanelOrdMatch(PathMatch {
- score: 1.0,
- positions: Vec::new(),
- worktree_id: worktree.read(cx).id().to_usize(),
- path: relative_path,
- path_prefix: RelPath::empty().into(),
- is_dir: false, // File finder doesn't support directories
- distance_to_relative_ancestor: usize::MAX,
- }));
- }
- })
- .log_err();
- if update_result.is_none() {
- return abs_file_exists;
- }
+ project.update(cx, |project, cx| {
+ if let Some((worktree, relative_path)) = project.find_worktree(query_path, cx) {
+ path_matches.push(ProjectPanelOrdMatch(PathMatch {
+ score: 1.0,
+ positions: Vec::new(),
+ worktree_id: worktree.read(cx).id().to_usize(),
+ path: relative_path,
+ path_prefix: RelPath::empty().into(),
+ is_dir: false, // File finder doesn't support directories
+ distance_to_relative_ancestor: usize::MAX,
+ }));
+ }
+ });
}
picker
@@ -744,8 +744,6 @@ pub async fn get_git_committer(cx: &AsyncApp) -> GitCommitter {
.context("could not find git binary path")
.log_err()
})
- .ok()
- .flatten()
} else {
None
};
@@ -397,7 +397,7 @@ impl BranchListDelegate {
cx.spawn(async move |_, cx| {
repo.update(cx, |repo, _| {
repo.create_branch(new_branch_name, base_branch)
- })?
+ })
.await??;
Ok(())
@@ -444,11 +444,11 @@ impl BranchListDelegate {
Entry::Branch { branch, .. } => match branch.remote_name() {
Some(remote_name) => {
is_remote = true;
- repo.update(cx, |repo, _| repo.remove_remote(remote_name.to_string()))?
+ repo.update(cx, |repo, _| repo.remove_remote(remote_name.to_string()))
.await?
}
None => {
- repo.update(cx, |repo, _| repo.delete_branch(branch.name().to_string()))?
+ repo.update(cx, |repo, _| repo.delete_branch(branch.name().to_string()))
.await?
}
},
@@ -763,7 +763,7 @@ impl PickerDelegate for BranchListDelegate {
let branch = branch.clone();
cx.spawn(async move |_, cx| {
- repo.update(cx, |repo, _| repo.change_branch(branch.name().to_string()))?
+ repo.update(cx, |repo, _| repo.change_branch(branch.name().to_string()))
.await??;
anyhow::Ok(())
@@ -1433,7 +1433,6 @@ mod tests {
cx.spawn(async move |mut cx| {
for branch in branch_names {
repo.update(&mut cx, |repo, _| repo.create_branch(branch, None))
- .unwrap()
.await
.unwrap()
.unwrap();
@@ -1498,7 +1497,6 @@ mod tests {
repo.update(&mut cx, |repo, _| {
repo.create_remote(branch, String::from("test"))
})
- .unwrap()
.await
.unwrap()
.unwrap();
@@ -236,7 +236,7 @@ impl CommitView {
.repo_path_to_project_path(&file.path, cx)
.map(|path| path.worktree_id)
.or(first_worktree_id)
- })?
+ })
.context("project has no worktrees")?;
let short_sha = commit_sha.get(0..7).unwrap_or(&commit_sha);
let file_name = file
@@ -555,7 +555,7 @@ impl CommitView {
return Err(anyhow::anyhow!("Stash has changed, not applying"));
}
Ok(repo.stash_apply(Some(stash), cx))
- })?;
+ });
match result {
Ok(task) => task.await?,
@@ -582,7 +582,7 @@ impl CommitView {
return Err(anyhow::anyhow!("Stash has changed, pop aborted"));
}
Ok(repo.stash_pop(Some(stash), cx))
- })?;
+ });
match result {
Ok(task) => task.await?,
@@ -609,7 +609,7 @@ impl CommitView {
return Err(anyhow::anyhow!("Stash has changed, drop aborted"));
}
Ok(repo.stash_drop(Some(stash), cx))
- })?;
+ });
match result {
Ok(task) => task.await??,
@@ -673,7 +673,7 @@ impl CommitView {
workspace
.panel::<GitPanel>(cx)
.and_then(|p| p.read(cx).active_repository.clone())
- })?;
+ });
let Some(repo) = repo else {
return Ok(());
@@ -752,7 +752,7 @@ async fn build_buffer(
let line_ending = LineEnding::detect(&text);
LineEnding::normalize(&mut text);
let text = Rope::from(text);
- let language = cx.update(|cx| language_registry.language_for_file(&blob, Some(&text), cx))?;
+ let language = cx.update(|cx| language_registry.language_for_file(&blob, Some(&text), cx));
let language = if let Some(language) = language {
language_registry
.load_language(&language)
@@ -772,7 +772,7 @@ async fn build_buffer(
let mut buffer = Buffer::build(buffer, Some(blob), Capability::ReadWrite);
buffer.set_language_async(language, cx);
buffer
- })?;
+ });
Ok(buffer)
}
@@ -786,10 +786,10 @@ async fn build_buffer_diff(
LineEnding::normalize(old_text);
}
- let language = cx.update(|cx| buffer.read(cx).language().cloned())?;
- let buffer = cx.update(|cx| buffer.read(cx).snapshot())?;
+ let language = cx.update(|cx| buffer.read(cx).language().cloned());
+ let buffer = cx.update(|cx| buffer.read(cx).snapshot());
- let diff = cx.new(|cx| BufferDiff::new(&buffer.text, cx))?;
+ let diff = cx.new(|cx| BufferDiff::new(&buffer.text, cx));
let update = diff
.update(cx, |diff, cx| {
@@ -800,13 +800,13 @@ async fn build_buffer_diff(
language.clone(),
cx,
)
- })?
+ })
.await;
diff.update(cx, |diff, cx| {
diff.language_changed(language, Some(language_registry.clone()), cx);
diff.set_snapshot(update, &buffer.text, cx)
- })?
+ })
.await;
Ok(diff)
@@ -487,24 +487,16 @@ pub(crate) fn resolve_conflict(
else {
return;
};
- let Some(save) = project
- .update(cx, |project, cx| {
- if multibuffer.read(cx).all_diff_hunks_expanded() {
- project.save_buffer(buffer.clone(), cx)
- } else {
- Task::ready(Ok(()))
- }
- })
- .ok()
- else {
- return;
- };
+ let save = project.update(cx, |project, cx| {
+ if multibuffer.read(cx).all_diff_hunks_expanded() {
+ project.save_buffer(buffer.clone(), cx)
+ } else {
+ Task::ready(Ok(()))
+ }
+ });
if save.await.log_err().is_none() {
let open_path = maybe!({
- let path = buffer
- .read_with(cx, |buffer, cx| buffer.project_path(cx))
- .ok()
- .flatten()?;
+ let path = buffer.read_with(cx, |buffer, cx| buffer.project_path(cx))?;
workspace
.update_in(cx, |workspace, window, cx| {
workspace.open_path_preview(path, None, false, false, false, window, cx)
@@ -47,12 +47,12 @@ impl FileDiffView {
window.spawn(cx, async move |cx| {
let project = workspace.update(cx, |workspace, _| workspace.project().clone())?;
let old_buffer = project
- .update(cx, |project, cx| project.open_local_buffer(&old_path, cx))?
+ .update(cx, |project, cx| project.open_local_buffer(&old_path, cx))
.await?;
let new_buffer = project
- .update(cx, |project, cx| project.open_local_buffer(&new_path, cx))?
+ .update(cx, |project, cx| project.open_local_buffer(&new_path, cx))
.await?;
- let languages = project.update(cx, |project, _| project.languages().clone())?;
+ let languages = project.update(cx, |project, _| project.languages().clone());
let buffer_diff = build_buffer_diff(&old_buffer, &new_buffer, languages, cx).await?;
@@ -151,7 +151,7 @@ impl FileDiffView {
new_snapshot.text.clone(),
cx,
)
- })?
+ })
.await
.ok();
log::trace!("finish recalculating");
@@ -168,10 +168,10 @@ async fn build_buffer_diff(
language_registry: Arc<LanguageRegistry>,
cx: &mut AsyncApp,
) -> Result<Entity<BufferDiff>> {
- let old_buffer_snapshot = old_buffer.read_with(cx, |buffer, _| buffer.snapshot())?;
- let new_buffer_snapshot = new_buffer.read_with(cx, |buffer, _| buffer.snapshot())?;
+ let old_buffer_snapshot = old_buffer.read_with(cx, |buffer, _| buffer.snapshot());
+ let new_buffer_snapshot = new_buffer.read_with(cx, |buffer, _| buffer.snapshot());
- let diff = cx.new(|cx| BufferDiff::new(&new_buffer_snapshot.text, cx))?;
+ let diff = cx.new(|cx| BufferDiff::new(&new_buffer_snapshot.text, cx));
let update = diff
.update(cx, |diff, cx| {
@@ -182,7 +182,7 @@ async fn build_buffer_diff(
new_buffer_snapshot.language().cloned(),
cx,
)
- })?
+ })
.await;
diff.update(cx, |diff, cx| {
@@ -192,7 +192,7 @@ async fn build_buffer_diff(
cx,
);
diff.set_snapshot(update, &new_buffer_snapshot.text, cx)
- })?
+ })
.await;
Ok(diff)
@@ -1280,7 +1280,7 @@ impl GitPanel {
.ok_or_else(|| anyhow::anyhow!("Failed to open file"))?;
if let Some(active_editor) = item.downcast::<Editor>() {
if let Some(diff_task) =
- active_editor.update(cx, |editor, _cx| editor.wait_for_diff_to_load())?
+ active_editor.update(cx, |editor, _cx| editor.wait_for_diff_to_load())
{
diff_task.await;
}
@@ -1298,7 +1298,8 @@ impl GitPanel {
cx,
);
})
- })?;
+ })
+ .log_err();
}
anyhow::Ok(())
@@ -1385,7 +1386,7 @@ impl GitPanel {
let gitignore_abs_path = repo_root.join(".gitignore");
- let buffer = project
+ let buffer: Entity<Buffer> = project
.update(cx, |project, cx| {
project.open_local_buffer(gitignore_abs_path, cx)
})?
@@ -1397,7 +1398,7 @@ impl GitPanel {
if existing_content
.lines()
- .any(|line| line.trim() == file_path_str)
+ .any(|line: &str| line.trim() == file_path_str)
{
return;
}
@@ -1413,7 +1414,7 @@ impl GitPanel {
buffer.edit([(insert_position..insert_position, new_entry)], None, cx);
should_save = true;
- })?;
+ });
if should_save {
project
@@ -1935,7 +1936,7 @@ impl GitPanel {
cx.spawn({
async move |this, cx| {
let stash_task = active_repository
- .update(cx, |repo, cx| repo.stash_pop(None, cx))?
+ .update(cx, |repo, cx| repo.stash_pop(None, cx))
.await;
this.update(cx, |this, cx| {
stash_task
@@ -1958,7 +1959,7 @@ impl GitPanel {
cx.spawn({
async move |this, cx| {
let stash_task = active_repository
- .update(cx, |repo, cx| repo.stash_apply(None, cx))?
+ .update(cx, |repo, cx| repo.stash_apply(None, cx))
.await;
this.update(cx, |this, cx| {
stash_task
@@ -1981,7 +1982,7 @@ impl GitPanel {
cx.spawn({
async move |this, cx| {
let stash_task = active_repository
- .update(cx, |repo, cx| repo.stash_all(cx))?
+ .update(cx, |repo, cx| repo.stash_all(cx))
.await;
this.update(cx, |this, cx| {
stash_task
@@ -2278,7 +2279,7 @@ impl GitPanel {
stage_task.await?;
let commit_task = active_repository.update(cx, |repo, cx| {
repo.commit(message.into(), None, options, askpass, cx)
- })?;
+ });
commit_task.await?
})
};
@@ -2322,7 +2323,7 @@ impl GitPanel {
repo.update(cx, |repo, cx| {
repo.reset("HEAD^".to_string(), ResetMode::Soft, cx)
- })?
+ })
.await??;
Ok(Some(prior_head))
@@ -2362,7 +2363,7 @@ impl GitPanel {
let repo = repo.context("No active repository")?;
let pushed_to: Vec<SharedString> = repo
- .update(&mut cx, |repo, _| repo.check_for_pushed_commits())?
+ .update(&mut cx, |repo, _| repo.check_for_pushed_commits())
.await??;
if pushed_to.is_empty() {
@@ -2533,41 +2534,37 @@ impl GitPanel {
repo_work_dir: &Arc<Path>,
cx: &mut AsyncApp,
) -> Option<String> {
- let rules_path = cx
- .update(|cx| {
- for worktree in project.read(cx).worktrees(cx) {
- let worktree_abs_path = worktree.read(cx).abs_path();
- if !worktree_abs_path.starts_with(&repo_work_dir) {
- continue;
- }
+ let rules_path = cx.update(|cx| {
+ for worktree in project.read(cx).worktrees(cx) {
+ let worktree_abs_path = worktree.read(cx).abs_path();
+ if !worktree_abs_path.starts_with(&repo_work_dir) {
+ continue;
+ }
- let worktree_snapshot = worktree.read(cx).snapshot();
- for rules_name in RULES_FILE_NAMES {
- if let Ok(rel_path) = RelPath::unix(rules_name) {
- if let Some(entry) = worktree_snapshot.entry_for_path(rel_path) {
- if entry.is_file() {
- return Some(ProjectPath {
- worktree_id: worktree.read(cx).id(),
- path: entry.path.clone(),
- });
- }
+ let worktree_snapshot = worktree.read(cx).snapshot();
+ for rules_name in RULES_FILE_NAMES {
+ if let Ok(rel_path) = RelPath::unix(rules_name) {
+ if let Some(entry) = worktree_snapshot.entry_for_path(rel_path) {
+ if entry.is_file() {
+ return Some(ProjectPath {
+ worktree_id: worktree.read(cx).id(),
+ path: entry.path.clone(),
+ });
}
}
}
}
- None
- })
- .ok()??;
+ }
+ None
+ })?;
let buffer = project
.update(cx, |project, cx| project.open_buffer(rules_path, cx))
- .ok()?
.await
.ok()?;
let content = buffer
.read_with(cx, |buffer, _| buffer.text())
- .ok()?
.trim()
.to_string();
@@ -2591,12 +2588,11 @@ impl GitPanel {
}
let load = async {
- let store = cx.update(|cx| PromptStore::global(cx)).ok()?.await.ok()?;
+ let store = cx.update(|cx| PromptStore::global(cx)).await.ok()?;
store
.update(cx, |s, cx| {
s.load(PromptId::BuiltIn(BuiltInPrompt::CommitMessage), cx)
})
- .ok()?
.await
.ok()
};
@@ -2653,9 +2649,9 @@ impl GitPanel {
} else {
None
}
- })? {
+ }) {
task.await.log_err();
- };
+ }
let mut diff_text = match diff.await {
Ok(result) => match result {
@@ -2772,7 +2768,6 @@ impl GitPanel {
let repo = repo?;
let remotes = repo
.update(cx, |repo, _| repo.get_remotes(None, false))
- .ok()?
.await
.ok()?
.log_err()?;
@@ -2827,7 +2822,7 @@ impl GitPanel {
};
let fetch = repo.update(cx, |repo, cx| {
repo.fetch(fetch_options.clone(), askpass, cx)
- })?;
+ });
let remote_message = fetch.await?;
this.update(cx, |this, cx| {
@@ -2983,7 +2978,7 @@ impl GitPanel {
let pull = repo.update(cx, |repo, cx| {
repo.pull(branch_name, remote.name.clone(), rebase, askpass, cx)
- })?;
+ });
let remote_message = pull.await?;
@@ -3069,7 +3064,7 @@ impl GitPanel {
askpass_delegate,
cx,
)
- })?;
+ });
let remote_output = push.await?;
@@ -3199,7 +3194,7 @@ impl GitPanel {
Some(current_branch.name().to_string())
};
anyhow::Ok(repo.get_remotes(current_branch, is_push))
- })??
+ })?
.await??;
let current_remotes: Vec<_> = current_remotes
@@ -5630,7 +5625,7 @@ impl GitPanelMessageTooltip {
git_panel.load_commit_details(sha.to_string(), cx),
git_panel.workspace.clone(),
)
- })?;
+ });
let details = details.await?;
let provider_registry = cx
.update(|_, app| GitHostingProviderRegistry::default_global(app))
@@ -354,7 +354,7 @@ impl RenameBranchModal {
match repo
.update(cx, |repo, _| {
repo.rename_branch(current_branch, new_name.clone())
- })?
+ })
.await
{
Ok(Ok(_)) => Ok(()),
@@ -246,7 +246,7 @@ impl StashListDelegate {
};
cx.spawn(async move |_, cx| {
- repo.update(cx, |repo, cx| repo.stash_drop(Some(stash_index), cx))?
+ repo.update(cx, |repo, cx| repo.stash_drop(Some(stash_index), cx))
.await??;
Ok(())
})
@@ -281,7 +281,7 @@ impl StashListDelegate {
};
cx.spawn(async move |_, cx| {
- repo.update(cx, |repo, cx| repo.stash_pop(Some(stash_index), cx))?
+ repo.update(cx, |repo, cx| repo.stash_pop(Some(stash_index), cx))
.await?;
Ok(())
})
@@ -297,7 +297,7 @@ impl StashListDelegate {
};
cx.spawn(async move |_, cx| {
- repo.update(cx, |repo, cx| repo.stash_apply(Some(stash_index), cx))?
+ repo.update(cx, |repo, cx| repo.stash_apply(Some(stash_index), cx))
.await?;
Ok(())
})
@@ -256,10 +256,10 @@ async fn update_diff_buffer(
clipboard_buffer: &Entity<Buffer>,
cx: &mut AsyncApp,
) -> Result<()> {
- let source_buffer_snapshot = source_buffer.read_with(cx, |buffer, _| buffer.snapshot())?;
+ let source_buffer_snapshot = source_buffer.read_with(cx, |buffer, _| buffer.snapshot());
let language = source_buffer_snapshot.language().cloned();
- let base_buffer_snapshot = clipboard_buffer.read_with(cx, |buffer, _| buffer.snapshot())?;
+ let base_buffer_snapshot = clipboard_buffer.read_with(cx, |buffer, _| buffer.snapshot());
let base_text = base_buffer_snapshot.text();
let update = diff
@@ -271,12 +271,12 @@ async fn update_diff_buffer(
language,
cx,
)
- })?
+ })
.await;
diff.update(cx, |diff, cx| {
diff.set_snapshot(update, &source_buffer_snapshot.text, cx)
- })?
+ })
.await;
Ok(())
}
@@ -260,7 +260,7 @@ impl WorktreeListDelegate {
repo.update(cx, |repo, _| {
repo.create_worktree(branch.clone(), path.clone(), commit)
- })?
+ })
.await??;
let new_worktree_path = path.join(branch);
@@ -444,7 +444,7 @@ async fn open_remote_worktree(
return Ok(());
};
- let new_project = cx.update(|cx| {
+ let new_project: Entity<project::Project> = cx.update(|cx| {
project::Project::remote(
session,
app_state.client.clone(),
@@ -455,7 +455,7 @@ async fn open_remote_worktree(
true,
cx,
)
- })?;
+ });
let window_to_use = if replace_current_window {
workspace_window
@@ -463,12 +463,12 @@ async fn open_remote_worktree(
let workspace_position = cx
.update(|cx| {
workspace::remote_workspace_position_from_db(connection_options.clone(), &paths, cx)
- })?
+ })
.await
.context("fetching workspace position from db")?;
let mut options =
- cx.update(|cx| (app_state.build_window_options)(workspace_position.display, cx))?;
+ cx.update(|cx| (app_state.build_window_options)(workspace_position.display, cx));
options.window_bounds = workspace_position.window_bounds;
cx.open_window(options, |window, cx| {
@@ -2235,8 +2235,6 @@ impl App {
}
impl AppContext for App {
- type Result<T> = T;
-
/// Builds an entity that is owned by the application.
///
/// The given function will be invoked with a [`Context`] and must return an object representing the entity. An
@@ -2258,7 +2256,7 @@ impl AppContext for App {
})
}
- fn reserve_entity<T: 'static>(&mut self) -> Self::Result<Reservation<T>> {
+ fn reserve_entity<T: 'static>(&mut self) -> Reservation<T> {
Reservation(self.entities.reserve())
}
@@ -2266,7 +2264,7 @@ impl AppContext for App {
&mut self,
reservation: Reservation<T>,
build_entity: impl FnOnce(&mut Context<T>) -> T,
- ) -> Self::Result<Entity<T>> {
+ ) -> Entity<T> {
self.update(|cx| {
let slot = reservation.0;
let entity = build_entity(&mut Context::new_context(cx, slot.downgrade()));
@@ -2299,11 +2297,7 @@ impl AppContext for App {
GpuiBorrow::new(handle.clone(), self)
}
- fn read_entity<T, R>(
- &self,
- handle: &Entity<T>,
- read: impl FnOnce(&T, &App) -> R,
- ) -> Self::Result<R>
+ fn read_entity<T, R>(&self, handle: &Entity<T>, read: impl FnOnce(&T, &App) -> R) -> R
where
T: 'static,
{
@@ -2348,7 +2342,7 @@ impl AppContext for App {
self.background_executor.spawn(future)
}
- fn read_global<G, R>(&self, callback: impl FnOnce(&G, &App) -> R) -> Self::Result<R>
+ fn read_global<G, R>(&self, callback: impl FnOnce(&G, &App) -> R) -> R
where
G: Global,
{
@@ -1,9 +1,10 @@
use crate::{
AnyView, AnyWindowHandle, App, AppCell, AppContext, BackgroundExecutor, BorrowAppContext,
- Entity, EventEmitter, Focusable, ForegroundExecutor, Global, PromptButton, PromptLevel, Render,
- Reservation, Result, Subscription, Task, VisualContext, Window, WindowHandle,
+ Entity, EventEmitter, Focusable, ForegroundExecutor, Global, GpuiBorrow, PromptButton,
+ PromptLevel, Render, Reservation, Result, Subscription, Task, VisualContext, Window,
+ WindowHandle,
};
-use anyhow::{Context as _, anyhow};
+use anyhow::Context as _;
use derive_more::{Deref, DerefMut};
use futures::channel::oneshot;
use std::{future::Future, rc::Weak};
@@ -12,7 +13,10 @@ use super::{Context, WeakEntity};
/// An async-friendly version of [App] with a static lifetime so it can be held across `await` points in async code.
/// You're provided with an instance when calling [App::spawn], and you can also create one with [App::to_async].
-/// Internally, this holds a weak reference to an `App`, so its methods are fallible to protect against cases where the [App] is dropped.
+///
+/// Internally, this holds a weak reference to an `App`. Methods will panic if the app has been dropped,
+/// but this should not happen in practice when using foreground tasks spawned via `cx.spawn()`,
+/// as the executor checks if the app is alive before running each task.
#[derive(Clone)]
pub struct AsyncApp {
pub(crate) app: Weak<AppCell>,
@@ -20,64 +24,61 @@ pub struct AsyncApp {
pub(crate) foreground_executor: ForegroundExecutor,
}
-impl AppContext for AsyncApp {
- type Result<T> = Result<T>;
+impl AsyncApp {
+ fn app(&self) -> std::rc::Rc<AppCell> {
+ self.app
+ .upgrade()
+ .expect("app was released before async operation completed")
+ }
+}
- fn new<T: 'static>(
- &mut self,
- build_entity: impl FnOnce(&mut Context<T>) -> T,
- ) -> Self::Result<Entity<T>> {
- let app = self.app.upgrade().context("app was released")?;
+impl AppContext for AsyncApp {
+ fn new<T: 'static>(&mut self, build_entity: impl FnOnce(&mut Context<T>) -> T) -> Entity<T> {
+ let app = self.app();
let mut app = app.borrow_mut();
- Ok(app.new(build_entity))
+ app.new(build_entity)
}
- fn reserve_entity<T: 'static>(&mut self) -> Result<Reservation<T>> {
- let app = self.app.upgrade().context("app was released")?;
+ fn reserve_entity<T: 'static>(&mut self) -> Reservation<T> {
+ let app = self.app();
let mut app = app.borrow_mut();
- Ok(app.reserve_entity())
+ app.reserve_entity()
}
fn insert_entity<T: 'static>(
&mut self,
reservation: Reservation<T>,
build_entity: impl FnOnce(&mut Context<T>) -> T,
- ) -> Result<Entity<T>> {
- let app = self.app.upgrade().context("app was released")?;
+ ) -> Entity<T> {
+ let app = self.app();
let mut app = app.borrow_mut();
- Ok(app.insert_entity(reservation, build_entity))
+ app.insert_entity(reservation, build_entity)
}
fn update_entity<T: 'static, R>(
&mut self,
handle: &Entity<T>,
update: impl FnOnce(&mut T, &mut Context<T>) -> R,
- ) -> Self::Result<R> {
- let app = self.app.upgrade().context("app was released")?;
+ ) -> R {
+ let app = self.app();
let mut app = app.borrow_mut();
- Ok(app.update_entity(handle, update))
+ app.update_entity(handle, update)
}
- fn as_mut<'a, T>(&'a mut self, _handle: &Entity<T>) -> Self::Result<super::GpuiBorrow<'a, T>>
+ fn as_mut<'a, T>(&'a mut self, _handle: &Entity<T>) -> GpuiBorrow<'a, T>
where
T: 'static,
{
- Err(anyhow!(
- "Cannot as_mut with an async context. Try calling update() first"
- ))
+ panic!("Cannot as_mut with an async context. Try calling update() first")
}
- fn read_entity<T, R>(
- &self,
- handle: &Entity<T>,
- callback: impl FnOnce(&T, &App) -> R,
- ) -> Self::Result<R>
+ fn read_entity<T, R>(&self, handle: &Entity<T>, callback: impl FnOnce(&T, &App) -> R) -> R
where
T: 'static,
{
- let app = self.app.upgrade().context("app was released")?;
+ let app = self.app();
let lock = app.borrow();
- Ok(lock.read_entity(handle, callback))
+ lock.read_entity(handle, callback)
}
fn update_window<T, F>(&mut self, window: AnyWindowHandle, f: F) -> Result<T>
@@ -109,23 +110,22 @@ impl AppContext for AsyncApp {
self.background_executor.spawn(future)
}
- fn read_global<G, R>(&self, callback: impl FnOnce(&G, &App) -> R) -> Self::Result<R>
+ fn read_global<G, R>(&self, callback: impl FnOnce(&G, &App) -> R) -> R
where
G: Global,
{
- let app = self.app.upgrade().context("app was released")?;
+ let app = self.app();
let mut lock = app.borrow_mut();
- Ok(lock.update(|this| this.read_global(callback)))
+ lock.update(|this| this.read_global(callback))
}
}
impl AsyncApp {
/// Schedules all windows in the application to be redrawn.
- pub fn refresh(&self) -> Result<()> {
- let app = self.app.upgrade().context("app was released")?;
+ pub fn refresh(&self) {
+ let app = self.app();
let mut lock = app.borrow_mut();
lock.refresh_windows();
- Ok(())
}
/// Get an executor which can be used to spawn futures in the background.
@@ -139,10 +139,10 @@ impl AsyncApp {
}
/// Invoke the given function in the context of the app, then flush any effects produced during its invocation.
- pub fn update<R>(&self, f: impl FnOnce(&mut App) -> R) -> Result<R> {
- let app = self.app.upgrade().context("app was released")?;
+ pub fn update<R>(&self, f: impl FnOnce(&mut App) -> R) -> R {
+ let app = self.app();
let mut lock = app.borrow_mut();
- Ok(lock.update(f))
+ lock.update(f)
}
/// Arrange for the given callback to be invoked whenever the given entity emits an event of a given type.
@@ -150,16 +150,15 @@ impl AsyncApp {
pub fn subscribe<T, Event>(
&mut self,
entity: &Entity<T>,
- mut on_event: impl FnMut(Entity<T>, &Event, &mut App) + 'static,
- ) -> Result<Subscription>
+ on_event: impl FnMut(Entity<T>, &Event, &mut App) + 'static,
+ ) -> Subscription
where
T: 'static + EventEmitter<Event>,
Event: 'static,
{
- let app = self.app.upgrade().context("app was released")?;
+ let app = self.app();
let mut lock = app.borrow_mut();
- let subscription = lock.subscribe(entity, on_event);
- Ok(subscription)
+ lock.subscribe(entity, on_event)
}
/// Open a window with the given options based on the root view returned by the given function.
@@ -171,7 +170,7 @@ impl AsyncApp {
where
V: 'static + Render,
{
- let app = self.app.upgrade().context("app was released")?;
+ let app = self.app();
let mut lock = app.borrow_mut();
lock.open_window(options, build_root_view)
}
@@ -189,61 +188,50 @@ impl AsyncApp {
}
/// Determine whether global state of the specified type has been assigned.
- /// Returns an error if the `App` has been dropped.
- pub fn has_global<G: Global>(&self) -> Result<bool> {
- let app = self.app.upgrade().context("app was released")?;
+ pub fn has_global<G: Global>(&self) -> bool {
+ let app = self.app();
let app = app.borrow_mut();
- Ok(app.has_global::<G>())
+ app.has_global::<G>()
}
/// Reads the global state of the specified type, passing it to the given callback.
///
/// Panics if no global state of the specified type has been assigned.
- /// Returns an error if the `App` has been dropped.
- pub fn read_global<G: Global, R>(&self, read: impl FnOnce(&G, &App) -> R) -> Result<R> {
- let app = self.app.upgrade().context("app was released")?;
+ pub fn read_global<G: Global, R>(&self, read: impl FnOnce(&G, &App) -> R) -> R {
+ let app = self.app();
let app = app.borrow_mut();
- Ok(read(app.global(), &app))
+ read(app.global(), &app)
}
/// Reads the global state of the specified type, passing it to the given callback.
///
/// Similar to [`AsyncApp::read_global`], but returns an error instead of panicking
- /// if no state of the specified type has been assigned.
- ///
- /// Returns an error if no state of the specified type has been assigned the `App` has been dropped.
pub fn try_read_global<G: Global, R>(&self, read: impl FnOnce(&G, &App) -> R) -> Option<R> {
- let app = self.app.upgrade()?;
+ let app = self.app();
let app = app.borrow_mut();
Some(read(app.try_global()?, &app))
}
/// Reads the global state of the specified type, passing it to the given callback.
/// A default value is assigned if a global of this type has not yet been assigned.
- ///
- /// # Errors
- /// If the app has ben dropped this returns an error.
- pub fn try_read_default_global<G: Global + Default, R>(
+ pub fn read_default_global<G: Global + Default, R>(
&self,
read: impl FnOnce(&G, &App) -> R,
- ) -> Result<R> {
- let app = self.app.upgrade().context("app was released")?;
+ ) -> R {
+ let app = self.app();
let mut app = app.borrow_mut();
app.update(|cx| {
cx.default_global::<G>();
});
- Ok(read(app.try_global().context("app was released")?, &app))
+ read(app.global(), &app)
}
/// A convenience method for [`App::update_global`](BorrowAppContext::update_global)
/// for updating the global state of the specified type.
- pub fn update_global<G: Global, R>(
- &self,
- update: impl FnOnce(&mut G, &mut App) -> R,
- ) -> Result<R> {
- let app = self.app.upgrade().context("app was released")?;
+ pub fn update_global<G: Global, R>(&self, update: impl FnOnce(&mut G, &mut App) -> R) -> R {
+ let app = self.app();
let mut app = app.borrow_mut();
- Ok(app.update(|cx| cx.update_global(update)))
+ app.update(|cx| cx.update_global(update))
}
/// Run something using this entity and cx, when the returned struct is dropped
@@ -359,54 +347,41 @@ impl AsyncWindowContext {
}
impl AppContext for AsyncWindowContext {
- type Result<T> = Result<T>;
-
- fn new<T>(&mut self, build_entity: impl FnOnce(&mut Context<T>) -> T) -> Result<Entity<T>>
+ fn new<T>(&mut self, build_entity: impl FnOnce(&mut Context<T>) -> T) -> Entity<T>
where
T: 'static,
{
- self.app
- .update_window(self.window, |_, _, cx| cx.new(build_entity))
+ self.app.new(build_entity)
}
- fn reserve_entity<T: 'static>(&mut self) -> Result<Reservation<T>> {
- self.app
- .update_window(self.window, |_, _, cx| cx.reserve_entity())
+ fn reserve_entity<T: 'static>(&mut self) -> Reservation<T> {
+ self.app.reserve_entity()
}
fn insert_entity<T: 'static>(
&mut self,
reservation: Reservation<T>,
build_entity: impl FnOnce(&mut Context<T>) -> T,
- ) -> Self::Result<Entity<T>> {
- self.app.update_window(self.window, |_, _, cx| {
- cx.insert_entity(reservation, build_entity)
- })
+ ) -> Entity<T> {
+ self.app.insert_entity(reservation, build_entity)
}
fn update_entity<T: 'static, R>(
&mut self,
handle: &Entity<T>,
update: impl FnOnce(&mut T, &mut Context<T>) -> R,
- ) -> Result<R> {
- self.app
- .update_window(self.window, |_, _, cx| cx.update_entity(handle, update))
+ ) -> R {
+ self.app.update_entity(handle, update)
}
- fn as_mut<'a, T>(&'a mut self, _: &Entity<T>) -> Self::Result<super::GpuiBorrow<'a, T>>
+ fn as_mut<'a, T>(&'a mut self, _: &Entity<T>) -> GpuiBorrow<'a, T>
where
T: 'static,
{
- Err(anyhow!(
- "Cannot use as_mut() from an async context, call `update`"
- ))
+ panic!("Cannot use as_mut() from an async context, call `update`")
}
- fn read_entity<T, R>(
- &self,
- handle: &Entity<T>,
- read: impl FnOnce(&T, &App) -> R,
- ) -> Self::Result<R>
+ fn read_entity<T, R>(&self, handle: &Entity<T>, read: impl FnOnce(&T, &App) -> R) -> R
where
T: 'static,
{
@@ -438,7 +413,7 @@ impl AppContext for AsyncWindowContext {
self.app.background_executor.spawn(future)
}
- fn read_global<G, R>(&self, callback: impl FnOnce(&G, &App) -> R) -> Result<R>
+ fn read_global<G, R>(&self, callback: impl FnOnce(&G, &App) -> R) -> R
where
G: Global,
{
@@ -447,6 +422,8 @@ impl AppContext for AsyncWindowContext {
}
impl VisualContext for AsyncWindowContext {
+ type Result<T> = Result<T>;
+
fn window_handle(&self) -> AnyWindowHandle {
self.window
}
@@ -454,7 +431,7 @@ impl VisualContext for AsyncWindowContext {
fn new_window_entity<T: 'static>(
&mut self,
build_entity: impl FnOnce(&mut Window, &mut Context<T>) -> T,
- ) -> Self::Result<Entity<T>> {
+ ) -> Result<Entity<T>> {
self.app.update_window(self.window, |_, window, cx| {
cx.new(|cx| build_entity(window, cx))
})
@@ -464,7 +441,7 @@ impl VisualContext for AsyncWindowContext {
&mut self,
view: &Entity<T>,
update: impl FnOnce(&mut T, &mut Window, &mut Context<T>) -> R,
- ) -> Self::Result<R> {
+ ) -> Result<R> {
self.app.update_window(self.window, |_, window, cx| {
view.update(cx, |entity, cx| update(entity, window, cx))
})
@@ -473,7 +450,7 @@ impl VisualContext for AsyncWindowContext {
fn replace_root_view<V>(
&mut self,
build_view: impl FnOnce(&mut Window, &mut Context<V>) -> V,
- ) -> Self::Result<Entity<V>>
+ ) -> Result<Entity<V>>
where
V: 'static + Render,
{
@@ -482,7 +459,7 @@ impl VisualContext for AsyncWindowContext {
})
}
- fn focus<V>(&mut self, view: &Entity<V>) -> Self::Result<()>
+ fn focus<V>(&mut self, view: &Entity<V>) -> Result<()>
where
V: Focusable,
{
@@ -753,8 +753,6 @@ impl<T> Context<'_, T> {
}
impl<T> AppContext for Context<'_, T> {
- type Result<U> = U;
-
#[inline]
fn new<U: 'static>(&mut self, build_entity: impl FnOnce(&mut Context<U>) -> U) -> Entity<U> {
self.app.new(build_entity)
@@ -770,7 +768,7 @@ impl<T> AppContext for Context<'_, T> {
&mut self,
reservation: Reservation<U>,
build_entity: impl FnOnce(&mut Context<U>) -> U,
- ) -> Self::Result<Entity<U>> {
+ ) -> Entity<U> {
self.app.insert_entity(reservation, build_entity)
}
@@ -784,7 +782,7 @@ impl<T> AppContext for Context<'_, T> {
}
#[inline]
- fn as_mut<'a, E>(&'a mut self, handle: &Entity<E>) -> Self::Result<super::GpuiBorrow<'a, E>>
+ fn as_mut<'a, E>(&'a mut self, handle: &Entity<E>) -> super::GpuiBorrow<'a, E>
where
E: 'static,
{
@@ -792,11 +790,7 @@ impl<T> AppContext for Context<'_, T> {
}
#[inline]
- fn read_entity<U, R>(
- &self,
- handle: &Entity<U>,
- read: impl FnOnce(&U, &App) -> R,
- ) -> Self::Result<R>
+ fn read_entity<U, R>(&self, handle: &Entity<U>, read: impl FnOnce(&U, &App) -> R) -> R
where
U: 'static,
{
@@ -832,7 +826,7 @@ impl<T> AppContext for Context<'_, T> {
}
#[inline]
- fn read_global<G, R>(&self, callback: impl FnOnce(&G, &App) -> R) -> Self::Result<R>
+ fn read_global<G, R>(&self, callback: impl FnOnce(&G, &App) -> R) -> R
where
G: Global,
{
@@ -431,11 +431,7 @@ impl<T: 'static> Entity<T> {
/// Read the entity referenced by this handle with the given function.
#[inline]
- pub fn read_with<R, C: AppContext>(
- &self,
- cx: &C,
- f: impl FnOnce(&T, &App) -> R,
- ) -> C::Result<R> {
+ pub fn read_with<R, C: AppContext>(&self, cx: &C, f: impl FnOnce(&T, &App) -> R) -> R {
cx.read_entity(self, f)
}
@@ -445,18 +441,18 @@ impl<T: 'static> Entity<T> {
&self,
cx: &mut C,
update: impl FnOnce(&mut T, &mut Context<T>) -> R,
- ) -> C::Result<R> {
+ ) -> R {
cx.update_entity(self, update)
}
/// Updates the entity referenced by this handle with the given function.
#[inline]
- pub fn as_mut<'a, C: AppContext>(&self, cx: &'a mut C) -> C::Result<GpuiBorrow<'a, T>> {
+ pub fn as_mut<'a, C: AppContext>(&self, cx: &'a mut C) -> GpuiBorrow<'a, T> {
cx.as_mut(self)
}
/// Updates the entity referenced by this handle with the given function.
- pub fn write<C: AppContext>(&self, cx: &mut C, value: T) -> C::Result<()> {
+ pub fn write<C: AppContext>(&self, cx: &mut C, value: T) {
self.update(cx, |entity, cx| {
*entity = value;
cx.notify();
@@ -465,7 +461,7 @@ impl<T: 'static> Entity<T> {
/// Updates the entity referenced by this handle with the given function if
/// the referenced entity still exists, within a visual context that has a window.
- /// Returns an error if the entity has been released.
+ /// Returns an error if the window has been closed.
#[inline]
pub fn update_in<R, C: VisualContext>(
&self,
@@ -749,13 +745,9 @@ impl<T: 'static> WeakEntity<T> {
) -> Result<R>
where
C: AppContext,
- Result<C::Result<R>>: crate::Flatten<R>,
{
- crate::Flatten::flatten(
- self.upgrade()
- .context("entity released")
- .map(|this| cx.update_entity(&this, update)),
- )
+ let entity = self.upgrade().context("entity released")?;
+ Ok(cx.update_entity(&entity, update))
}
/// Updates the entity referenced by this handle with the given function if
@@ -768,14 +760,13 @@ impl<T: 'static> WeakEntity<T> {
) -> Result<R>
where
C: VisualContext,
- Result<C::Result<R>>: crate::Flatten<R>,
{
let window = cx.window_handle();
- let this = self.upgrade().context("entity released")?;
+ let entity = self.upgrade().context("entity released")?;
- crate::Flatten::flatten(window.update(cx, |_, window, cx| {
- this.update(cx, |entity, cx| update(entity, window, cx))
- }))
+ window.update(cx, |_, window, cx| {
+ entity.update(cx, |entity, cx| update(entity, window, cx))
+ })
}
/// Reads the entity referenced by this handle with the given function if
@@ -784,13 +775,9 @@ impl<T: 'static> WeakEntity<T> {
pub fn read_with<C, R>(&self, cx: &C, read: impl FnOnce(&T, &App) -> R) -> Result<R>
where
C: AppContext,
- Result<C::Result<R>>: crate::Flatten<R>,
{
- crate::Flatten::flatten(
- self.upgrade()
- .context("entity released")
- .map(|this| cx.read_entity(&this, read)),
- )
+ let entity = self.upgrade().context("entity released")?;
+ Ok(cx.read_entity(&entity, read))
}
/// Create a new weak entity that can never be upgraded.
@@ -33,17 +33,12 @@ pub struct TestAppContext {
}
impl AppContext for TestAppContext {
- type Result<T> = T;
-
- fn new<T: 'static>(
- &mut self,
- build_entity: impl FnOnce(&mut Context<T>) -> T,
- ) -> Self::Result<Entity<T>> {
+ fn new<T: 'static>(&mut self, build_entity: impl FnOnce(&mut Context<T>) -> T) -> Entity<T> {
let mut app = self.app.borrow_mut();
app.new(build_entity)
}
- fn reserve_entity<T: 'static>(&mut self) -> Self::Result<crate::Reservation<T>> {
+ fn reserve_entity<T: 'static>(&mut self) -> crate::Reservation<T> {
let mut app = self.app.borrow_mut();
app.reserve_entity()
}
@@ -52,7 +47,7 @@ impl AppContext for TestAppContext {
&mut self,
reservation: crate::Reservation<T>,
build_entity: impl FnOnce(&mut Context<T>) -> T,
- ) -> Self::Result<Entity<T>> {
+ ) -> Entity<T> {
let mut app = self.app.borrow_mut();
app.insert_entity(reservation, build_entity)
}
@@ -61,23 +56,19 @@ impl AppContext for TestAppContext {
&mut self,
handle: &Entity<T>,
update: impl FnOnce(&mut T, &mut Context<T>) -> R,
- ) -> Self::Result<R> {
+ ) -> R {
let mut app = self.app.borrow_mut();
app.update_entity(handle, update)
}
- fn as_mut<'a, T>(&'a mut self, _: &Entity<T>) -> Self::Result<super::GpuiBorrow<'a, T>>
+ fn as_mut<'a, T>(&'a mut self, _: &Entity<T>) -> super::GpuiBorrow<'a, T>
where
T: 'static,
{
panic!("Cannot use as_mut with a test app context. Try calling update() first")
}
- fn read_entity<T, R>(
- &self,
- handle: &Entity<T>,
- read: impl FnOnce(&T, &App) -> R,
- ) -> Self::Result<R>
+ fn read_entity<T, R>(&self, handle: &Entity<T>, read: impl FnOnce(&T, &App) -> R) -> R
where
T: 'static,
{
@@ -112,7 +103,7 @@ impl AppContext for TestAppContext {
self.background_executor.spawn(future)
}
- fn read_global<G, R>(&self, callback: impl FnOnce(&G, &App) -> R) -> Self::Result<R>
+ fn read_global<G, R>(&self, callback: impl FnOnce(&G, &App) -> R) -> R
where
G: Global,
{
@@ -918,16 +909,11 @@ impl VisualTestContext {
}
impl AppContext for VisualTestContext {
- type Result<T> = <TestAppContext as AppContext>::Result<T>;
-
- fn new<T: 'static>(
- &mut self,
- build_entity: impl FnOnce(&mut Context<T>) -> T,
- ) -> Self::Result<Entity<T>> {
+ fn new<T: 'static>(&mut self, build_entity: impl FnOnce(&mut Context<T>) -> T) -> Entity<T> {
self.cx.new(build_entity)
}
- fn reserve_entity<T: 'static>(&mut self) -> Self::Result<crate::Reservation<T>> {
+ fn reserve_entity<T: 'static>(&mut self) -> crate::Reservation<T> {
self.cx.reserve_entity()
}
@@ -935,7 +921,7 @@ impl AppContext for VisualTestContext {
&mut self,
reservation: crate::Reservation<T>,
build_entity: impl FnOnce(&mut Context<T>) -> T,
- ) -> Self::Result<Entity<T>> {
+ ) -> Entity<T> {
self.cx.insert_entity(reservation, build_entity)
}
@@ -943,25 +929,21 @@ impl AppContext for VisualTestContext {
&mut self,
handle: &Entity<T>,
update: impl FnOnce(&mut T, &mut Context<T>) -> R,
- ) -> Self::Result<R>
+ ) -> R
where
T: 'static,
{
self.cx.update_entity(handle, update)
}
- fn as_mut<'a, T>(&'a mut self, handle: &Entity<T>) -> Self::Result<super::GpuiBorrow<'a, T>>
+ fn as_mut<'a, T>(&'a mut self, handle: &Entity<T>) -> super::GpuiBorrow<'a, T>
where
T: 'static,
{
self.cx.as_mut(handle)
}
- fn read_entity<T, R>(
- &self,
- handle: &Entity<T>,
- read: impl FnOnce(&T, &App) -> R,
- ) -> Self::Result<R>
+ fn read_entity<T, R>(&self, handle: &Entity<T>, read: impl FnOnce(&T, &App) -> R) -> R
where
T: 'static,
{
@@ -993,7 +975,7 @@ impl AppContext for VisualTestContext {
self.cx.background_spawn(future)
}
- fn read_global<G, R>(&self, callback: impl FnOnce(&G, &App) -> R) -> Self::Result<R>
+ fn read_global<G, R>(&self, callback: impl FnOnce(&G, &App) -> R) -> R
where
G: Global,
{
@@ -1002,6 +984,8 @@ impl AppContext for VisualTestContext {
}
impl VisualContext for VisualTestContext {
+ type Result<T> = T;
+
/// Get the underlying window handle underlying this context.
fn window_handle(&self) -> AnyWindowHandle {
self.window
@@ -1010,30 +994,30 @@ impl VisualContext for VisualTestContext {
fn new_window_entity<T: 'static>(
&mut self,
build_entity: impl FnOnce(&mut Window, &mut Context<T>) -> T,
- ) -> Self::Result<Entity<T>> {
+ ) -> Entity<T> {
self.window
.update(&mut self.cx, |_, window, cx| {
cx.new(|cx| build_entity(window, cx))
})
- .unwrap()
+ .expect("window was unexpectedly closed")
}
fn update_window_entity<V: 'static, R>(
&mut self,
view: &Entity<V>,
update: impl FnOnce(&mut V, &mut Window, &mut Context<V>) -> R,
- ) -> Self::Result<R> {
+ ) -> R {
self.window
.update(&mut self.cx, |_, window, cx| {
view.update(cx, |v, cx| update(v, window, cx))
})
- .unwrap()
+ .expect("window was unexpectedly closed")
}
fn replace_root_view<V>(
&mut self,
build_view: impl FnOnce(&mut Window, &mut Context<V>) -> V,
- ) -> Self::Result<Entity<V>>
+ ) -> Entity<V>
where
V: 'static + Render,
{
@@ -1041,15 +1025,15 @@ impl VisualContext for VisualTestContext {
.update(&mut self.cx, |_, window, cx| {
window.replace_root(cx, build_view)
})
- .unwrap()
+ .expect("window was unexpectedly closed")
}
- fn focus<V: crate::Focusable>(&mut self, view: &Entity<V>) -> Self::Result<()> {
+ fn focus<V: crate::Focusable>(&mut self, view: &Entity<V>) {
self.window
.update(&mut self.cx, |_, window, cx| {
view.read(cx).focus_handle(cx).focus(window, cx)
})
- .unwrap()
+ .expect("window was unexpectedly closed")
}
}
@@ -366,17 +366,12 @@ impl Default for VisualTestAppContext {
}
impl AppContext for VisualTestAppContext {
- type Result<T> = T;
-
- fn new<T: 'static>(
- &mut self,
- build_entity: impl FnOnce(&mut Context<T>) -> T,
- ) -> Self::Result<Entity<T>> {
+ fn new<T: 'static>(&mut self, build_entity: impl FnOnce(&mut Context<T>) -> T) -> Entity<T> {
let mut app = self.app.borrow_mut();
app.new(build_entity)
}
- fn reserve_entity<T: 'static>(&mut self) -> Self::Result<crate::Reservation<T>> {
+ fn reserve_entity<T: 'static>(&mut self) -> crate::Reservation<T> {
let mut app = self.app.borrow_mut();
app.reserve_entity()
}
@@ -385,7 +380,7 @@ impl AppContext for VisualTestAppContext {
&mut self,
reservation: crate::Reservation<T>,
build_entity: impl FnOnce(&mut Context<T>) -> T,
- ) -> Self::Result<Entity<T>> {
+ ) -> Entity<T> {
let mut app = self.app.borrow_mut();
app.insert_entity(reservation, build_entity)
}
@@ -394,23 +389,19 @@ impl AppContext for VisualTestAppContext {
&mut self,
handle: &Entity<T>,
update: impl FnOnce(&mut T, &mut Context<T>) -> R,
- ) -> Self::Result<R> {
+ ) -> R {
let mut app = self.app.borrow_mut();
app.update_entity(handle, update)
}
- fn as_mut<'a, T>(&'a mut self, _: &Entity<T>) -> Self::Result<crate::GpuiBorrow<'a, T>>
+ fn as_mut<'a, T>(&'a mut self, _: &Entity<T>) -> crate::GpuiBorrow<'a, T>
where
T: 'static,
{
panic!("Cannot use as_mut with a visual test app context. Try calling update() first")
}
- fn read_entity<T, R>(
- &self,
- handle: &Entity<T>,
- read: impl FnOnce(&T, &App) -> R,
- ) -> Self::Result<R>
+ fn read_entity<T, R>(&self, handle: &Entity<T>, read: impl FnOnce(&T, &App) -> R) -> R
where
T: 'static,
{
@@ -445,7 +436,7 @@ impl AppContext for VisualTestAppContext {
self.background_executor.spawn(future)
}
- fn read_global<G, R>(&self, callback: impl FnOnce(&G, &App) -> R) -> Self::Result<R>
+ fn read_global<G, R>(&self, callback: impl FnOnce(&G, &App) -> R) -> R
where
G: Global,
{
@@ -118,23 +118,16 @@ pub use window::*;
/// The context trait, allows the different contexts in GPUI to be used
/// interchangeably for certain operations.
pub trait AppContext {
- /// The result type for this context, used for async contexts that
- /// can't hold a direct reference to the application context.
- type Result<T>;
-
/// Create a new entity in the app context.
#[expect(
clippy::wrong_self_convention,
reason = "`App::new` is an ubiquitous function for creating entities"
)]
- fn new<T: 'static>(
- &mut self,
- build_entity: impl FnOnce(&mut Context<T>) -> T,
- ) -> Self::Result<Entity<T>>;
+ fn new<T: 'static>(&mut self, build_entity: impl FnOnce(&mut Context<T>) -> T) -> Entity<T>;
/// Reserve a slot for a entity to be inserted later.
/// The returned [Reservation] allows you to obtain the [EntityId] for the future entity.
- fn reserve_entity<T: 'static>(&mut self) -> Self::Result<Reservation<T>>;
+ fn reserve_entity<T: 'static>(&mut self) -> Reservation<T>;
/// Insert a new entity in the app context based on a [Reservation] previously obtained from [`reserve_entity`].
///
@@ -143,28 +136,24 @@ pub trait AppContext {
&mut self,
reservation: Reservation<T>,
build_entity: impl FnOnce(&mut Context<T>) -> T,
- ) -> Self::Result<Entity<T>>;
+ ) -> Entity<T>;
/// Update a entity in the app context.
fn update_entity<T, R>(
&mut self,
handle: &Entity<T>,
update: impl FnOnce(&mut T, &mut Context<T>) -> R,
- ) -> Self::Result<R>
+ ) -> R
where
T: 'static;
/// Update a entity in the app context.
- fn as_mut<'a, T>(&'a mut self, handle: &Entity<T>) -> Self::Result<GpuiBorrow<'a, T>>
+ fn as_mut<'a, T>(&'a mut self, handle: &Entity<T>) -> GpuiBorrow<'a, T>
where
T: 'static;
/// Read a entity from the app context.
- fn read_entity<T, R>(
- &self,
- handle: &Entity<T>,
- read: impl FnOnce(&T, &App) -> R,
- ) -> Self::Result<R>
+ fn read_entity<T, R>(&self, handle: &Entity<T>, read: impl FnOnce(&T, &App) -> R) -> R
where
T: 'static;
@@ -188,7 +177,7 @@ pub trait AppContext {
R: Send + 'static;
/// Read a global from this app context
- fn read_global<G, R>(&self, callback: impl FnOnce(&G, &App) -> R) -> Self::Result<R>
+ fn read_global<G, R>(&self, callback: impl FnOnce(&G, &App) -> R) -> R
where
G: Global;
}
@@ -207,6 +196,9 @@ impl<T: 'static> Reservation<T> {
/// This trait is used for the different visual contexts in GPUI that
/// require a window to be present.
pub trait VisualContext: AppContext {
+ /// The result type for window operations.
+ type Result<T>;
+
/// Returns the handle of the window associated with this context.
fn window_handle(&self) -> AnyWindowHandle;
@@ -284,24 +276,6 @@ where
}
}
-/// A flatten equivalent for anyhow `Result`s.
-pub trait Flatten<T> {
- /// Convert this type into a simple `Result<T>`.
- fn flatten(self) -> Result<T>;
-}
-
-impl<T> Flatten<T> for Result<Result<T>> {
- fn flatten(self) -> Result<T> {
- self?
- }
-}
-
-impl<T> Flatten<T> for Result<T> {
- fn flatten(self) -> Result<T> {
- self
- }
-}
-
/// Information about the GPU GPUI is running on.
#[derive(Default, Debug, serde::Serialize, serde::Deserialize, Clone)]
pub struct GpuSpecs {
@@ -1,5 +1,4 @@
use crate::{Action, App, Platform, SharedString};
-use util::ResultExt;
/// A menu of the application, either a main menu or a submenu
pub struct Menu {
@@ -263,14 +262,18 @@ pub(crate) fn init_app_menus(platform: &dyn Platform, cx: &App) {
platform.on_will_open_app_menu(Box::new({
let cx = cx.to_async();
move || {
- cx.update(|cx| cx.clear_pending_keystrokes()).ok();
+ if let Some(app) = cx.app.upgrade() {
+ app.borrow_mut().update(|cx| cx.clear_pending_keystrokes());
+ }
}
}));
platform.on_validate_app_menu_command(Box::new({
let cx = cx.to_async();
move |action| {
- cx.update(|cx| cx.is_action_available(action))
+ cx.app
+ .upgrade()
+ .map(|app| app.borrow_mut().update(|cx| cx.is_action_available(action)))
.unwrap_or(false)
}
}));
@@ -278,7 +281,9 @@ pub(crate) fn init_app_menus(platform: &dyn Platform, cx: &App) {
platform.on_app_menu_action(Box::new({
let cx = cx.to_async();
move |action| {
- cx.update(|cx| cx.dispatch_action(action)).log_err();
+ if let Some(app) = cx.app.upgrade() {
+ app.borrow_mut().update(|cx| cx.dispatch_action(action));
+ }
}
}));
}
@@ -681,7 +681,7 @@ impl Platform for MacPlatform {
}
self.background_executor()
- .spawn(async { crate::Flatten::flatten(done_rx.await.map_err(|e| anyhow!(e))) })
+ .spawn(async { done_rx.await.map_err(|e| anyhow!(e))? })
}
fn on_open_urls(&self, callback: Box<dyn FnMut(Vec<String>)>) {
@@ -4955,11 +4955,11 @@ impl<V: 'static + Render> WindowHandle<V> {
where
C: AppContext,
{
- crate::Flatten::flatten(cx.update_window(self.any_handle, |root_view, _, _| {
+ cx.update_window(self.any_handle, |root_view, _, _| {
root_view
.downcast::<V>()
.map_err(|_| anyhow!("the type of the window's root view has changed"))
- }))
+ })?
}
/// Updates the root view of this window.
@@ -21,16 +21,14 @@ pub fn derive_app_context(input: TokenStream) -> TokenStream {
impl #impl_generics gpui::AppContext for #type_name #type_generics
#where_clause
{
- type Result<T> = T;
-
fn new<T: 'static>(
&mut self,
build_entity: impl FnOnce(&mut gpui::Context<'_, T>) -> T,
- ) -> Self::Result<gpui::Entity<T>> {
+ ) -> gpui::Entity<T> {
self.#app_variable.new(build_entity)
}
- fn reserve_entity<T: 'static>(&mut self) -> Self::Result<gpui::Reservation<T>> {
+ fn reserve_entity<T: 'static>(&mut self) -> gpui::Reservation<T> {
self.#app_variable.reserve_entity()
}
@@ -38,7 +36,7 @@ pub fn derive_app_context(input: TokenStream) -> TokenStream {
&mut self,
reservation: gpui::Reservation<T>,
build_entity: impl FnOnce(&mut gpui::Context<'_, T>) -> T,
- ) -> Self::Result<gpui::Entity<T>> {
+ ) -> gpui::Entity<T> {
self.#app_variable.insert_entity(reservation, build_entity)
}
@@ -46,7 +44,7 @@ pub fn derive_app_context(input: TokenStream) -> TokenStream {
&mut self,
handle: &gpui::Entity<T>,
update: impl FnOnce(&mut T, &mut gpui::Context<'_, T>) -> R,
- ) -> Self::Result<R>
+ ) -> R
where
T: 'static,
{
@@ -56,7 +54,7 @@ pub fn derive_app_context(input: TokenStream) -> TokenStream {
fn as_mut<'y, 'z, T>(
&'y mut self,
handle: &'z gpui::Entity<T>,
- ) -> Self::Result<gpui::GpuiBorrow<'y, T>>
+ ) -> gpui::GpuiBorrow<'y, T>
where
T: 'static,
{
@@ -67,7 +65,7 @@ pub fn derive_app_context(input: TokenStream) -> TokenStream {
&self,
handle: &gpui::Entity<T>,
read: impl FnOnce(&T, &gpui::App) -> R,
- ) -> Self::Result<R>
+ ) -> R
where
T: 'static,
{
@@ -99,7 +97,7 @@ pub fn derive_app_context(input: TokenStream) -> TokenStream {
self.#app_variable.background_spawn(future)
}
- fn read_global<G, R>(&self, callback: impl FnOnce(&G, &gpui::App) -> R) -> Self::Result<R>
+ fn read_global<G, R>(&self, callback: impl FnOnce(&G, &gpui::App) -> R) -> R
where
G: gpui::Global,
{
@@ -28,6 +28,8 @@ pub fn derive_visual_context(input: TokenStream) -> TokenStream {
impl #impl_generics gpui::VisualContext for #type_name #type_generics
#where_clause
{
+ type Result<T> = T;
+
fn window_handle(&self) -> gpui::AnyWindowHandle {
self.#window_variable.window_handle()
}
@@ -36,33 +38,33 @@ pub fn derive_visual_context(input: TokenStream) -> TokenStream {
&mut self,
entity: &gpui::Entity<T>,
update: impl FnOnce(&mut T, &mut gpui::Window, &mut gpui::Context<T>) -> R,
- ) -> Self::Result<R> {
+ ) -> R {
gpui::AppContext::update_entity(self.#app_variable, entity, |entity, cx| update(entity, self.#window_variable, cx))
}
fn new_window_entity<T: 'static>(
&mut self,
build_entity: impl FnOnce(&mut gpui::Window, &mut gpui::Context<'_, T>) -> T,
- ) -> Self::Result<gpui::Entity<T>> {
+ ) -> gpui::Entity<T> {
gpui::AppContext::new(self.#app_variable, |cx| build_entity(self.#window_variable, cx))
}
fn replace_root_view<V>(
&mut self,
build_view: impl FnOnce(&mut gpui::Window, &mut gpui::Context<V>) -> V,
- ) -> Self::Result<gpui::Entity<V>>
+ ) -> gpui::Entity<V>
where
V: 'static + gpui::Render,
{
self.#window_variable.replace_root(self.#app_variable, build_view)
}
- fn focus<V>(&mut self, entity: &gpui::Entity<V>) -> Self::Result<()>
+ fn focus<V>(&mut self, entity: &gpui::Entity<V>)
where
V: gpui::Focusable,
{
let focus_handle = gpui::Focusable::focus_handle(entity, self.#app_variable);
- self.#window_variable.focus(&focus_handle, self.#app_variable)
+ self.#window_variable.focus(&focus_handle, self.#app_variable);
}
}
};
@@ -56,7 +56,7 @@ pub struct Tokio {}
impl Tokio {
/// Spawns the given future on Tokio's thread pool, and returns it via a GPUI task
/// Note that the Tokio task will be cancelled if the GPUI task is dropped
- pub fn spawn<C, Fut, R>(cx: &C, f: Fut) -> C::Result<Task<Result<R, JoinError>>>
+ pub fn spawn<C, Fut, R>(cx: &C, f: Fut) -> Task<Result<R, JoinError>>
where
C: AppContext,
Fut: Future<Output = R> + Send + 'static,
@@ -78,7 +78,7 @@ impl Tokio {
/// Spawns the given future on Tokio's thread pool, and returns it via a GPUI task
/// Note that the Tokio task will be cancelled if the GPUI task is dropped
- pub fn spawn_result<C, Fut, R>(cx: &C, f: Fut) -> C::Result<Task<anyhow::Result<R>>>
+ pub fn spawn_result<C, Fut, R>(cx: &C, f: Fut) -> Task<anyhow::Result<R>>
where
C: AppContext,
Fut: Future<Output = anyhow::Result<R>> + Send + 'static,
@@ -234,10 +234,10 @@ impl SerializableItem for ImageView {
let (worktree, relative_path) = project
.update(cx, |project, cx| {
project.find_or_create_worktree(image_path.clone(), false, cx)
- })?
+ })
.await
.context("Path not found")?;
- let worktree_id = worktree.update(cx, |worktree, _cx| worktree.id())?;
+ let worktree_id = worktree.update(cx, |worktree, _cx| worktree.id());
let project_path = ProjectPath {
worktree_id,
@@ -245,7 +245,7 @@ impl SerializableItem for ImageView {
};
let image_item = project
- .update(cx, |project, cx| project.open_image(project_path, cx))?
+ .update(cx, |project, cx| project.open_image(project_path, cx))
.await?;
cx.update(
@@ -86,7 +86,7 @@ impl DivInspector {
// Create Rust style buffer without adding it to the project / buffer_store, so that
// Rust Analyzer doesn't get started for it.
let rust_language_result = languages.language_for_name("Rust").await;
- let rust_style_buffer = rust_language_result.and_then(|rust_language| {
+ let rust_style_buffer = rust_language_result.map(|rust_language| {
cx.new(|cx| Buffer::local("", cx).with_language_async(rust_language, cx))
});
@@ -462,16 +462,16 @@ impl DivInspector {
cx: &mut AsyncWindowContext,
) -> Result<Entity<Buffer>> {
let worktree = project
- .update(cx, |project, cx| project.create_worktree(path, false, cx))?
+ .update(cx, |project, cx| project.create_worktree(path, false, cx))
.await?;
let project_path = worktree.read_with(cx, |worktree, _cx| ProjectPath {
worktree_id: worktree.id(),
path: RelPath::empty().into(),
- })?;
+ });
let buffer = project
- .update(cx, |project, cx| project.open_path(project_path, cx))?
+ .update(cx, |project, cx| project.open_path(project_path, cx))
.await?
.1;
@@ -17,7 +17,7 @@ actions!(
);
async fn install_script(cx: &AsyncApp) -> Result<PathBuf> {
- let cli_path = cx.update(|cx| cx.path_for_auxiliary_executable("cli"))??;
+ let cli_path = cx.update(|cx| cx.path_for_auxiliary_executable("cli"))?;
let link_path = Path::new("/usr/local/bin/zed");
let bin_dir_path = link_path.parent().unwrap();
@@ -10,6 +10,5 @@ actions!(
);
pub async fn register_zed_scheme(cx: &AsyncApp) -> anyhow::Result<()> {
- cx.update(|cx| cx.register_url_scheme(ZED_URL_SCHEME))?
- .await
+ cx.update(|cx| cx.register_url_scheme(ZED_URL_SCHEME)).await
}
@@ -79,7 +79,6 @@ fn handle_schema_request(
) -> Task<Result<String>> {
let languages = lsp_store.read_with(cx, |lsp_store, _| lsp_store.languages.clone());
cx.spawn(async move |cx| {
- let languages = languages?;
let schema = resolve_schema_request(&languages, lsp_store, uri, cx).await?;
serde_json::to_string(&schema).context("Failed to serialize schema")
})
@@ -136,7 +135,7 @@ pub async fn resolve_schema_request_inner(
local, &worktree, cx,
))
})
- })?
+ })
.context(concat!(
"Failed to create adapter delegate - ",
"either LSP store is not in local mode or no worktree is available"
@@ -190,16 +189,16 @@ pub async fn resolve_schema_request_inner(
lsp_adapter_names: &lsp_adapter_names,
},
)
- })?
+ })
}
- "keymap" => cx.update(settings::KeymapFile::generate_json_schema_for_registered_actions)?,
+ "keymap" => cx.update(settings::KeymapFile::generate_json_schema_for_registered_actions),
"action" => {
let normalized_action_name = rest.context("No Action name provided")?;
let action_name = denormalize_action_name(normalized_action_name);
let mut generator = settings::KeymapFile::action_schema_generator();
let schema = cx
// PERF: cx.action_schema_by_name(action_name, &mut generator)
- .update(|cx| cx.action_schemas(&mut generator))?
+ .update(|cx| cx.action_schemas(&mut generator))
.into_iter()
.find_map(|(name, schema)| (name == action_name).then_some(schema))
.flatten();
@@ -209,7 +208,7 @@ pub async fn resolve_schema_request_inner(
"debug_tasks" => {
let adapter_schemas = cx.read_global::<dap::DapRegistry, _>(|dap_registry, _| {
dap_registry.adapters_schema()
- })?;
+ });
task::DebugTaskFile::generate_json_schema(&adapter_schemas)
}
"package_json" => package_json_schema(),
@@ -2318,20 +2318,18 @@ impl KeybindingEditorModal {
.await;
let language = load_keybind_context_language(workspace, cx).await;
- editor_entity
- .update(cx, |editor, cx| {
- if let Some(buffer) = editor.buffer().read(cx).as_singleton() {
- buffer.update(cx, |buffer, cx| {
- buffer.set_language(Some(language), cx);
- });
- }
- editor.set_completion_provider(Some(std::rc::Rc::new(
- KeyContextCompletionProvider { contexts },
- )));
- })
- .context("Failed to load completions for keybinding context")
+ editor_entity.update(cx, |editor, cx| {
+ if let Some(buffer) = editor.buffer().read(cx).as_singleton() {
+ buffer.update(cx, |buffer, cx| {
+ buffer.set_language(Some(language), cx);
+ });
+ }
+ editor.set_completion_provider(Some(std::rc::Rc::new(
+ KeyContextCompletionProvider { contexts },
+ )));
+ });
})
- .detach_and_log_err(cx);
+ .detach();
input
});
@@ -2,9 +2,9 @@ use anthropic::{
ANTHROPIC_API_URL, AnthropicError, AnthropicModelMode, ContentDelta, CountTokensRequest, Event,
ResponseContent, ToolResultContent, ToolResultPart, Usage,
};
-use anyhow::{Result, anyhow};
+use anyhow::Result;
use collections::{BTreeMap, HashMap};
-use futures::{FutureExt, Stream, StreamExt, future, future::BoxFuture, stream::BoxStream};
+use futures::{FutureExt, Stream, StreamExt, future::BoxFuture, stream::BoxStream};
use gpui::{AnyView, App, AsyncApp, Context, Entity, Task};
use http_client::HttpClient;
use language_model::{
@@ -444,12 +444,10 @@ impl AnthropicModel {
> {
let http_client = self.http_client.clone();
- let Ok((api_key, api_url)) = self.state.read_with(cx, |state, cx| {
+ let (api_key, api_url) = self.state.read_with(cx, |state, cx| {
let api_url = AnthropicLanguageModelProvider::api_url(cx);
(state.api_key_state.key(&api_url), api_url)
- }) else {
- return future::ready(Err(anyhow!("App state dropped").into())).boxed();
- };
+ });
let beta_headers = self.model.beta_headers();
@@ -1020,13 +1018,9 @@ impl ConfigurationView {
let load_credentials_task = Some(cx.spawn({
let state = state.clone();
async move |this, cx| {
- if let Some(task) = state
- .update(cx, |state, cx| state.authenticate(cx))
- .log_err()
- {
- // We don't log an error, because "not signed in" is also an error.
- let _ = task.await;
- }
+ let task = state.update(cx, |state, cx| state.authenticate(cx));
+ // We don't log an error, because "not signed in" is also an error.
+ let _ = task.await;
this.update(cx, |this, cx| {
this.load_credentials_task = None;
cx.notify();
@@ -1056,7 +1050,7 @@ impl ConfigurationView {
let state = self.state.clone();
cx.spawn_in(window, async move |_, cx| {
state
- .update(cx, |state, cx| state.set_api_key(Some(api_key), cx))?
+ .update(cx, |state, cx| state.set_api_key(Some(api_key), cx))
.await
})
.detach_and_log_err(cx);
@@ -1069,7 +1063,7 @@ impl ConfigurationView {
let state = self.state.clone();
cx.spawn_in(window, async move |_, cx| {
state
- .update(cx, |state, cx| state.set_api_key(None, cx))?
+ .update(cx, |state, cx| state.set_api_key(None, cx))
.await
})
.detach_and_log_err(cx);
@@ -527,7 +527,7 @@ impl BedrockModel {
let endpoint = state.settings.as_ref().and_then(|s| s.endpoint.clone());
let region = state.get_region();
(state.auth.clone(), endpoint, region)
- })?;
+ });
let mut config_builder = aws_config::defaults(BehaviorVersion::latest())
.stalled_stream_protection(StalledStreamProtectionConfig::disabled())
@@ -597,10 +597,8 @@ impl BedrockModel {
return futures::future::ready(Err(anyhow!("App state dropped"))).boxed();
};
- match Tokio::spawn(cx, bedrock::stream_completion(runtime_client, request)) {
- Ok(res) => async { res.await.map_err(|err| anyhow!(err))? }.boxed(),
- Err(err) => futures::future::ready(Err(anyhow!(err))).boxed(),
- }
+ let task = Tokio::spawn(cx, bedrock::stream_completion(runtime_client, request));
+ async move { task.await.map_err(|err| anyhow!(err))? }.boxed()
}
}
@@ -670,11 +668,9 @@ impl LanguageModel for BedrockModel {
LanguageModelCompletionError,
>,
> {
- let Ok((region, allow_global)) = cx.read_entity(&self.state, |state, _cx| {
+ let (region, allow_global) = cx.read_entity(&self.state, |state, _cx| {
(state.get_region(), state.get_allow_global())
- }) else {
- return async move { Err(anyhow::anyhow!("App State Dropped").into()) }.boxed();
- };
+ });
let model_id = match self.model.cross_region_inference_id(®ion, allow_global) {
Ok(s) => s,
@@ -1194,10 +1190,7 @@ impl ConfigurationView {
let load_credentials_task = Some(cx.spawn({
let state = state.clone();
async move |this, cx| {
- if let Some(task) = state
- .update(cx, |state, cx| state.authenticate(cx))
- .log_err()
- {
+ if let Some(task) = Some(state.update(cx, |state, cx| state.authenticate(cx))) {
// We don't log an error, because "not signed in" is also an error.
let _ = task.await;
}
@@ -1273,7 +1266,7 @@ impl ConfigurationView {
};
state.set_static_credentials(credentials, cx)
- })?
+ })
.await
})
.detach_and_log_err(cx);
@@ -1290,7 +1283,7 @@ impl ConfigurationView {
.update(cx, |editor, cx| editor.set_text("", window, cx));
let state = self.state.clone();
- cx.spawn(async move |_, cx| state.update(cx, |state, cx| state.reset_auth(cx))?.await)
+ cx.spawn(async move |_, cx| state.update(cx, |state, cx| state.reset_auth(cx)).await)
.detach_and_log_err(cx);
}
@@ -755,7 +755,7 @@ impl LanguageModel for CloudLanguageModel {
let prompt_id = request.prompt_id.clone();
let intent = request.intent;
let mode = request.mode;
- let app_version = cx.update(|cx| AppVersion::global(cx)).ok();
+ let app_version = Some(cx.update(|cx| AppVersion::global(cx)));
let thinking_allowed = request.thinking_allowed;
let provider_name = provider_name(&self.model.provider);
match self.model.provider {
@@ -3,7 +3,7 @@ use collections::{BTreeMap, HashMap};
use deepseek::DEEPSEEK_API_URL;
use futures::Stream;
-use futures::{FutureExt, StreamExt, future, future::BoxFuture, stream::BoxStream};
+use futures::{FutureExt, StreamExt, future::BoxFuture, stream::BoxStream};
use gpui::{AnyView, App, AsyncApp, Context, Entity, SharedString, Task, Window};
use http_client::HttpClient;
use language_model::{
@@ -203,12 +203,10 @@ impl DeepSeekLanguageModel {
) -> BoxFuture<'static, Result<BoxStream<'static, Result<deepseek::StreamResponse>>>> {
let http_client = self.http_client.clone();
- let Ok((api_key, api_url)) = self.state.read_with(cx, |state, cx| {
+ let (api_key, api_url) = self.state.read_with(cx, |state, cx| {
let api_url = DeepSeekLanguageModelProvider::api_url(cx);
(state.api_key_state.key(&api_url), api_url)
- }) else {
- return future::ready(Err(anyhow!("App state dropped"))).boxed();
- };
+ });
let future = self.request_limiter.stream(async move {
let Some(api_key) = api_key else {
@@ -540,10 +538,7 @@ impl ConfigurationView {
let load_credentials_task = Some(cx.spawn({
let state = state.clone();
async move |this, cx| {
- if let Some(task) = state
- .update(cx, |state, cx| state.authenticate(cx))
- .log_err()
- {
+ if let Some(task) = Some(state.update(cx, |state, cx| state.authenticate(cx))) {
let _ = task.await;
}
@@ -571,7 +566,7 @@ impl ConfigurationView {
let state = self.state.clone();
cx.spawn(async move |_, cx| {
state
- .update(cx, |state, cx| state.set_api_key(Some(api_key), cx))?
+ .update(cx, |state, cx| state.set_api_key(Some(api_key), cx))
.await
})
.detach_and_log_err(cx);
@@ -584,7 +579,7 @@ impl ConfigurationView {
let state = self.state.clone();
cx.spawn(async move |_, cx| {
state
- .update(cx, |state, cx| state.set_api_key(None, cx))?
+ .update(cx, |state, cx| state.set_api_key(None, cx))
.await
})
.detach_and_log_err(cx);
@@ -1,7 +1,7 @@
-use anyhow::{Context as _, Result, anyhow};
+use anyhow::{Context as _, Result};
use collections::BTreeMap;
use credentials_provider::CredentialsProvider;
-use futures::{FutureExt, Stream, StreamExt, future, future::BoxFuture};
+use futures::{FutureExt, Stream, StreamExt, future::BoxFuture};
use google_ai::{
FunctionDeclaration, GenerateContentResponse, GoogleModelMode, Part, SystemInstruction,
ThinkingConfig, UsageMetadata,
@@ -256,12 +256,10 @@ impl GoogleLanguageModel {
> {
let http_client = self.http_client.clone();
- let Ok((api_key, api_url)) = self.state.read_with(cx, |state, cx| {
+ let (api_key, api_url) = self.state.read_with(cx, |state, cx| {
let api_url = GoogleLanguageModelProvider::api_url(cx);
(state.api_key_state.key(&api_url), api_url)
- }) else {
- return future::ready(Err(anyhow!("App state dropped"))).boxed();
- };
+ });
async move {
let api_key = api_key.context("Missing Google API key")?;
@@ -771,10 +769,7 @@ impl ConfigurationView {
let load_credentials_task = Some(cx.spawn_in(window, {
let state = state.clone();
async move |this, cx| {
- if let Some(task) = state
- .update(cx, |state, cx| state.authenticate(cx))
- .log_err()
- {
+ if let Some(task) = Some(state.update(cx, |state, cx| state.authenticate(cx))) {
// We don't log an error, because "not signed in" is also an error.
let _ = task.await;
}
@@ -807,7 +802,7 @@ impl ConfigurationView {
let state = self.state.clone();
cx.spawn_in(window, async move |_, cx| {
state
- .update(cx, |state, cx| state.set_api_key(Some(api_key), cx))?
+ .update(cx, |state, cx| state.set_api_key(Some(api_key), cx))
.await
})
.detach_and_log_err(cx);
@@ -820,7 +815,7 @@ impl ConfigurationView {
let state = self.state.clone();
cx.spawn_in(window, async move |_, cx| {
state
- .update(cx, |state, cx| state.set_api_key(None, cx))?
+ .update(cx, |state, cx| state.set_api_key(None, cx))
.await
})
.detach_and_log_err(cx);
@@ -376,12 +376,10 @@ impl LmStudioLanguageModel {
Result<futures::stream::BoxStream<'static, Result<lmstudio::ResponseStreamEvent>>>,
> {
let http_client = self.http_client.clone();
- let Ok(api_url) = cx.update(|cx| {
+ let api_url = cx.update(|cx| {
let settings = &AllLanguageModelSettings::get_global(cx).lmstudio;
settings.api_url.clone()
- }) else {
- return futures::future::ready(Err(anyhow!("App state dropped"))).boxed();
- };
+ });
let future = self.request_limiter.stream(async move {
let request = lmstudio::stream_chat_completion(http_client.as_ref(), &api_url, request);
@@ -644,10 +642,7 @@ impl ConfigurationView {
let loading_models_task = Some(cx.spawn({
let state = state.clone();
async move |this, cx| {
- if let Some(task) = state
- .update(cx, |state, cx| state.authenticate(cx))
- .log_err()
- {
+ if let Some(task) = Some(state.update(cx, |state, cx| state.authenticate(cx))) {
task.await.log_err();
}
this.update(cx, |this, cx| {
@@ -1,7 +1,7 @@
use anyhow::{Result, anyhow};
use collections::BTreeMap;
-use futures::{FutureExt, Stream, StreamExt, future, future::BoxFuture, stream::BoxStream};
+use futures::{FutureExt, Stream, StreamExt, future::BoxFuture, stream::BoxStream};
use gpui::{AnyView, App, AsyncApp, Context, Entity, Global, SharedString, Task, Window};
use http_client::HttpClient;
use language_model::{
@@ -272,12 +272,10 @@ impl MistralLanguageModel {
> {
let http_client = self.http_client.clone();
- let Ok((api_key, api_url)) = self.state.read_with(cx, |state, cx| {
+ let (api_key, api_url) = self.state.read_with(cx, |state, cx| {
let api_url = MistralLanguageModelProvider::api_url(cx);
(state.api_key_state.key(&api_url), api_url)
- }) else {
- return future::ready(Err(anyhow!("App state dropped"))).boxed();
- };
+ });
let future = self.request_limiter.stream(async move {
let Some(api_key) = api_key else {
@@ -754,10 +752,7 @@ impl ConfigurationView {
let load_credentials_task = Some(cx.spawn_in(window, {
let state = state.clone();
async move |this, cx| {
- if let Some(task) = state
- .update(cx, |state, cx| state.authenticate(cx))
- .log_err()
- {
+ if let Some(task) = Some(state.update(cx, |state, cx| state.authenticate(cx))) {
// We don't log an error, because "not signed in" is also an error.
let _ = task.await;
}
@@ -790,7 +785,7 @@ impl ConfigurationView {
let state = self.state.clone();
cx.spawn_in(window, async move |_, cx| {
state
- .update(cx, |state, cx| state.set_api_key(Some(api_key), cx))?
+ .update(cx, |state, cx| state.set_api_key(Some(api_key), cx))
.await
})
.detach_and_log_err(cx);
@@ -803,7 +798,7 @@ impl ConfigurationView {
let state = self.state.clone();
cx.spawn_in(window, async move |_, cx| {
state
- .update(cx, |state, cx| state.set_api_key(None, cx))?
+ .update(cx, |state, cx| state.set_api_key(None, cx))
.await
})
.detach_and_log_err(cx);
@@ -479,12 +479,10 @@ impl LanguageModel for OllamaLanguageModel {
let request = self.to_ollama_request(request);
let http_client = self.http_client.clone();
- let Ok((api_key, api_url)) = self.state.read_with(cx, |state, cx| {
+ let (api_key, api_url) = self.state.read_with(cx, |state, cx| {
let api_url = OllamaLanguageModelProvider::api_url(cx);
(state.api_key_state.key(&api_url), api_url)
- }) else {
- return futures::future::ready(Err(anyhow!("App state dropped").into())).boxed();
- };
+ });
let future = self.request_limiter.stream(async move {
let stream =
@@ -645,7 +643,7 @@ impl ConfigurationView {
let state = self.state.clone();
cx.spawn_in(window, async move |_, cx| {
state
- .update(cx, |state, cx| state.set_api_key(Some(api_key), cx))?
+ .update(cx, |state, cx| state.set_api_key(Some(api_key), cx))
.await
})
.detach_and_log_err(cx);
@@ -658,7 +656,7 @@ impl ConfigurationView {
let state = self.state.clone();
cx.spawn_in(window, async move |_, cx| {
state
- .update(cx, |state, cx| state.set_api_key(None, cx))?
+ .update(cx, |state, cx| state.set_api_key(None, cx))
.await
})
.detach_and_log_err(cx);
@@ -1,7 +1,7 @@
use anyhow::{Result, anyhow};
use collections::{BTreeMap, HashMap};
use futures::Stream;
-use futures::{FutureExt, StreamExt, future, future::BoxFuture};
+use futures::{FutureExt, StreamExt, future::BoxFuture};
use gpui::{AnyView, App, AsyncApp, Context, Entity, SharedString, Task, Window};
use http_client::HttpClient;
use language_model::{
@@ -215,12 +215,10 @@ impl OpenAiLanguageModel {
{
let http_client = self.http_client.clone();
- let Ok((api_key, api_url)) = self.state.read_with(cx, |state, cx| {
+ let (api_key, api_url) = self.state.read_with(cx, |state, cx| {
let api_url = OpenAiLanguageModelProvider::api_url(cx);
(state.api_key_state.key(&api_url), api_url)
- }) else {
- return future::ready(Err(anyhow!("App state dropped"))).boxed();
- };
+ });
let future = self.request_limiter.stream(async move {
let provider = PROVIDER_NAME;
@@ -249,12 +247,10 @@ impl OpenAiLanguageModel {
{
let http_client = self.http_client.clone();
- let Ok((api_key, api_url)) = self.state.read_with(cx, |state, cx| {
+ let (api_key, api_url) = self.state.read_with(cx, |state, cx| {
let api_url = OpenAiLanguageModelProvider::api_url(cx);
(state.api_key_state.key(&api_url), api_url)
- }) else {
- return future::ready(Err(anyhow!("App state dropped"))).boxed();
- };
+ });
let provider = PROVIDER_NAME;
let future = self.request_limiter.stream(async move {
@@ -1206,10 +1202,7 @@ impl ConfigurationView {
let load_credentials_task = Some(cx.spawn_in(window, {
let state = state.clone();
async move |this, cx| {
- if let Some(task) = state
- .update(cx, |state, cx| state.authenticate(cx))
- .log_err()
- {
+ if let Some(task) = Some(state.update(cx, |state, cx| state.authenticate(cx))) {
// We don't log an error, because "not signed in" is also an error.
let _ = task.await;
}
@@ -1241,7 +1234,7 @@ impl ConfigurationView {
let state = self.state.clone();
cx.spawn_in(window, async move |_, cx| {
state
- .update(cx, |state, cx| state.set_api_key(Some(api_key), cx))?
+ .update(cx, |state, cx| state.set_api_key(Some(api_key), cx))
.await
})
.detach_and_log_err(cx);
@@ -1254,7 +1247,7 @@ impl ConfigurationView {
let state = self.state.clone();
cx.spawn_in(window, async move |_, cx| {
state
- .update(cx, |state, cx| state.set_api_key(None, cx))?
+ .update(cx, |state, cx| state.set_api_key(None, cx))
.await
})
.detach_and_log_err(cx);
@@ -1,6 +1,6 @@
-use anyhow::{Result, anyhow};
+use anyhow::Result;
use convert_case::{Case, Casing};
-use futures::{FutureExt, StreamExt, future, future::BoxFuture};
+use futures::{FutureExt, StreamExt, future::BoxFuture};
use gpui::{AnyView, App, AsyncApp, Context, Entity, SharedString, Task, Window};
use http_client::HttpClient;
use language_model::{
@@ -214,15 +214,13 @@ impl OpenAiCompatibleLanguageModel {
> {
let http_client = self.http_client.clone();
- let Ok((api_key, api_url)) = self.state.read_with(cx, |state, _cx| {
+ let (api_key, api_url) = self.state.read_with(cx, |state, _cx| {
let api_url = &state.settings.api_url;
(
state.api_key_state.key(api_url),
state.settings.api_url.clone(),
)
- }) else {
- return future::ready(Err(anyhow!("App state dropped").into())).boxed();
- };
+ });
let provider = self.provider_name.clone();
let future = self.request_limiter.stream(async move {
@@ -251,15 +249,13 @@ impl OpenAiCompatibleLanguageModel {
{
let http_client = self.http_client.clone();
- let Ok((api_key, api_url)) = self.state.read_with(cx, |state, _cx| {
+ let (api_key, api_url) = self.state.read_with(cx, |state, _cx| {
let api_url = &state.settings.api_url;
(
state.api_key_state.key(api_url),
state.settings.api_url.clone(),
)
- }) else {
- return future::ready(Err(anyhow!("App state dropped"))).boxed();
- };
+ });
let provider = self.provider_name.clone();
let future = self.request_limiter.stream(async move {
@@ -428,10 +424,7 @@ impl ConfigurationView {
let load_credentials_task = Some(cx.spawn_in(window, {
let state = state.clone();
async move |this, cx| {
- if let Some(task) = state
- .update(cx, |state, cx| state.authenticate(cx))
- .log_err()
- {
+ if let Some(task) = Some(state.update(cx, |state, cx| state.authenticate(cx))) {
// We don't log an error, because "not signed in" is also an error.
let _ = task.await;
}
@@ -463,7 +456,7 @@ impl ConfigurationView {
let state = self.state.clone();
cx.spawn_in(window, async move |_, cx| {
state
- .update(cx, |state, cx| state.set_api_key(Some(api_key), cx))?
+ .update(cx, |state, cx| state.set_api_key(Some(api_key), cx))
.await
})
.detach_and_log_err(cx);
@@ -476,7 +469,7 @@ impl ConfigurationView {
let state = self.state.clone();
cx.spawn_in(window, async move |_, cx| {
state
- .update(cx, |state, cx| state.set_api_key(None, cx))?
+ .update(cx, |state, cx| state.set_api_key(None, cx))
.await
})
.detach_and_log_err(cx);
@@ -1,6 +1,6 @@
use anyhow::{Result, anyhow};
use collections::HashMap;
-use futures::{FutureExt, Stream, StreamExt, future, future::BoxFuture};
+use futures::{FutureExt, Stream, StreamExt, future::BoxFuture};
use gpui::{AnyView, App, AsyncApp, Context, Entity, SharedString, Task};
use http_client::HttpClient;
use language_model::{
@@ -273,12 +273,10 @@ impl OpenRouterLanguageModel {
>,
> {
let http_client = self.http_client.clone();
- let Ok((api_key, api_url)) = self.state.read_with(cx, |state, cx| {
+ let (api_key, api_url) = self.state.read_with(cx, |state, cx| {
let api_url = OpenRouterLanguageModelProvider::api_url(cx);
(state.api_key_state.key(&api_url), api_url)
- }) else {
- return future::ready(Err(anyhow!("App state dropped").into())).boxed();
- };
+ });
async move {
let Some(api_key) = api_key else {
@@ -752,10 +750,7 @@ impl ConfigurationView {
let load_credentials_task = Some(cx.spawn_in(window, {
let state = state.clone();
async move |this, cx| {
- if let Some(task) = state
- .update(cx, |state, cx| state.authenticate(cx))
- .log_err()
- {
+ if let Some(task) = Some(state.update(cx, |state, cx| state.authenticate(cx))) {
let _ = task.await;
}
@@ -787,7 +782,7 @@ impl ConfigurationView {
let state = self.state.clone();
cx.spawn_in(window, async move |_, cx| {
state
- .update(cx, |state, cx| state.set_api_key(Some(api_key), cx))?
+ .update(cx, |state, cx| state.set_api_key(Some(api_key), cx))
.await
})
.detach_and_log_err(cx);
@@ -800,7 +795,7 @@ impl ConfigurationView {
let state = self.state.clone();
cx.spawn_in(window, async move |_, cx| {
state
- .update(cx, |state, cx| state.set_api_key(None, cx))?
+ .update(cx, |state, cx| state.set_api_key(None, cx))
.await
})
.detach_and_log_err(cx);
@@ -1,6 +1,6 @@
-use anyhow::{Result, anyhow};
+use anyhow::Result;
use collections::BTreeMap;
-use futures::{FutureExt, StreamExt, future, future::BoxFuture};
+use futures::{FutureExt, StreamExt, future::BoxFuture};
use gpui::{AnyView, App, AsyncApp, Context, Entity, SharedString, Task, Window};
use http_client::HttpClient;
use language_model::{
@@ -198,12 +198,10 @@ impl VercelLanguageModel {
{
let http_client = self.http_client.clone();
- let Ok((api_key, api_url)) = self.state.read_with(cx, |state, cx| {
+ let (api_key, api_url) = self.state.read_with(cx, |state, cx| {
let api_url = VercelLanguageModelProvider::api_url(cx);
(state.api_key_state.key(&api_url), api_url)
- }) else {
- return future::ready(Err(anyhow!("App state dropped"))).boxed();
- };
+ });
let future = self.request_limiter.stream(async move {
let provider = PROVIDER_NAME;
@@ -379,10 +377,7 @@ impl ConfigurationView {
let load_credentials_task = Some(cx.spawn_in(window, {
let state = state.clone();
async move |this, cx| {
- if let Some(task) = state
- .update(cx, |state, cx| state.authenticate(cx))
- .log_err()
- {
+ if let Some(task) = Some(state.update(cx, |state, cx| state.authenticate(cx))) {
// We don't log an error, because "not signed in" is also an error.
let _ = task.await;
}
@@ -414,7 +409,7 @@ impl ConfigurationView {
let state = self.state.clone();
cx.spawn_in(window, async move |_, cx| {
state
- .update(cx, |state, cx| state.set_api_key(Some(api_key), cx))?
+ .update(cx, |state, cx| state.set_api_key(Some(api_key), cx))
.await
})
.detach_and_log_err(cx);
@@ -427,7 +422,7 @@ impl ConfigurationView {
let state = self.state.clone();
cx.spawn_in(window, async move |_, cx| {
state
- .update(cx, |state, cx| state.set_api_key(None, cx))?
+ .update(cx, |state, cx| state.set_api_key(None, cx))
.await
})
.detach_and_log_err(cx);
@@ -1,6 +1,6 @@
-use anyhow::{Result, anyhow};
+use anyhow::Result;
use collections::BTreeMap;
-use futures::{FutureExt, StreamExt, future, future::BoxFuture};
+use futures::{FutureExt, StreamExt, future::BoxFuture};
use gpui::{AnyView, App, AsyncApp, Context, Entity, Task, Window};
use http_client::HttpClient;
use language_model::{
@@ -207,12 +207,10 @@ impl XAiLanguageModel {
> {
let http_client = self.http_client.clone();
- let Ok((api_key, api_url)) = self.state.read_with(cx, |state, cx| {
+ let (api_key, api_url) = self.state.read_with(cx, |state, cx| {
let api_url = XAiLanguageModelProvider::api_url(cx);
(state.api_key_state.key(&api_url), api_url)
- }) else {
- return future::ready(Err(anyhow!("App state dropped").into())).boxed();
- };
+ });
let future = self.request_limiter.stream(async move {
let provider = PROVIDER_NAME;
@@ -382,10 +380,7 @@ impl ConfigurationView {
let load_credentials_task = Some(cx.spawn_in(window, {
let state = state.clone();
async move |this, cx| {
- if let Some(task) = state
- .update(cx, |state, cx| state.authenticate(cx))
- .log_err()
- {
+ if let Some(task) = Some(state.update(cx, |state, cx| state.authenticate(cx))) {
// We don't log an error, because "not signed in" is also an error.
let _ = task.await;
}
@@ -417,7 +412,7 @@ impl ConfigurationView {
let state = self.state.clone();
cx.spawn_in(window, async move |_, cx| {
state
- .update(cx, |state, cx| state.set_api_key(Some(api_key), cx))?
+ .update(cx, |state, cx| state.set_api_key(Some(api_key), cx))
.await
})
.detach_and_log_err(cx);
@@ -430,7 +425,7 @@ impl ConfigurationView {
let state = self.state.clone();
cx.spawn_in(window, async move |_, cx| {
state
- .update(cx, |state, cx| state.set_api_key(None, cx))?
+ .update(cx, |state, cx| state.set_api_key(None, cx))
.await
})
.detach_and_log_err(cx);
@@ -205,7 +205,8 @@ impl PickerDelegate for LanguageSelectorDelegate {
let buffer = buffer.upgrade().context("buffer was dropped")?;
project.update(cx, |project, cx| {
project.set_language_for_buffer(&buffer, language, cx);
- })
+ });
+ anyhow::Ok(())
})
.detach_and_log_err(cx);
}
@@ -334,7 +334,7 @@ impl LanguageServerState {
cx,
);
buffer.set_capability(language::Capability::ReadOnly, cx);
- })?;
+ });
workspace.update(cx, |workspace, cx| {
window_handle.update(cx, |_, window, cx| {
@@ -569,10 +569,10 @@ impl LspLogView {
let language = language.await.ok();
buffer.update(cx, |buffer, cx| {
buffer.set_language(language, cx);
- })
+ });
}
})
- .detach_and_log_err(cx);
+ .detach();
});
self.editor = editor;
@@ -162,7 +162,7 @@ impl LspAdapter for CssLspAdapter {
let project_options = cx.update(|cx| {
language_server_settings(delegate.as_ref(), &self.name(), cx)
.and_then(|s| s.settings.clone())
- })?;
+ });
if let Some(override_options) = project_options {
merge_json_value_into(override_options, &mut default_config);
@@ -237,7 +237,7 @@ impl LspAdapter for EsLintLspAdapter {
cx,
)
.and_then(|s| s.settings.clone())
- })?;
+ });
if let Some(override_options) = override_options {
let working_directories = override_options.get("workingDirectories").and_then(|wd| {
@@ -71,7 +71,7 @@ impl LspInstaller for GoLspAdapter {
{
cx.update(|cx| {
delegate.show_notification(NOTIFICATION_MESSAGE, cx);
- })?
+ });
}
anyhow::bail!(
"Could not install the Go language server `gopls`, because `go` was not found."
@@ -58,10 +58,9 @@ impl ContextProvider for JsonTaskProvider {
let contents = file
.worktree
.update(cx, |this, cx| this.load_file(&file.path, cx))
- .ok()?
.await
.ok()?;
- let path = cx.update(|cx| file.abs_path(cx)).ok()?.as_path().into();
+ let path = cx.update(|cx| file.abs_path(cx)).as_path().into();
let task_templates = if is_package_json {
let package_json = serde_json_lenient::from_str::<
@@ -273,11 +272,11 @@ impl LspAdapter for JsonLspAdapter {
"schemas": schemas
}
})
- })?;
+ });
let project_options = cx.update(|cx| {
language_server_settings(delegate.as_ref(), &self.name(), cx)
.and_then(|s| s.settings.clone())
- })?;
+ });
if let Some(override_options) = project_options {
merge_json_value_into(override_options, &mut config);
@@ -326,7 +326,7 @@ pub fn init(languages: Arc<LanguageRegistry>, fs: Arc<dyn Fs>, node: NodeRuntime
)
.log_err();
});
- })?;
+ });
prev_language_settings = language_settings;
}
}
@@ -249,7 +249,7 @@ impl LspAdapter for TyLspAdapter {
.update(|cx| {
language_server_settings(delegate.as_ref(), &self.name(), cx)
.and_then(|s| s.settings.clone())
- })?
+ })
.unwrap_or_else(|| json!({}));
if let Some(toolchain) = toolchain.and_then(|toolchain| {
serde_json::from_value::<PythonToolchainData>(toolchain.as_json).ok()
@@ -574,7 +574,7 @@ impl LspAdapter for PyrightLspAdapter {
_: Option<Uri>,
cx: &mut AsyncApp,
) -> Result<Value> {
- cx.update(move |cx| {
+ Ok(cx.update(move |cx| {
let mut user_settings =
language_server_settings(adapter.as_ref(), &Self::SERVER_NAME, cx)
.and_then(|s| s.settings.clone())
@@ -636,7 +636,7 @@ impl LspAdapter for PyrightLspAdapter {
}
user_settings
- })
+ }))
}
}
@@ -1703,7 +1703,7 @@ impl LspAdapter for PyLspAdapter {
_: Option<Uri>,
cx: &mut AsyncApp,
) -> Result<Value> {
- cx.update(move |cx| {
+ Ok(cx.update(move |cx| {
let mut user_settings =
language_server_settings(adapter.as_ref(), &Self::SERVER_NAME, cx)
.and_then(|s| s.settings.clone())
@@ -1761,7 +1761,7 @@ impl LspAdapter for PyLspAdapter {
)]));
user_settings
- })
+ }))
}
}
@@ -1995,7 +1995,7 @@ impl LspAdapter for BasedPyrightLspAdapter {
_: Option<Uri>,
cx: &mut AsyncApp,
) -> Result<Value> {
- cx.update(move |cx| {
+ Ok(cx.update(move |cx| {
let mut user_settings =
language_server_settings(adapter.as_ref(), &Self::SERVER_NAME, cx)
.and_then(|s| s.settings.clone())
@@ -2070,7 +2070,7 @@ impl LspAdapter for BasedPyrightLspAdapter {
}
user_settings
- })
+ }))
}
}
@@ -156,7 +156,7 @@ impl LspAdapter for TailwindLspAdapter {
language_server_settings(delegate.as_ref(), &Self::SERVER_NAME, cx)
.and_then(|s| s.settings.clone())
.unwrap_or_default()
- })?;
+ });
if tailwind_user_settings.get("emmetCompletions").is_none() {
tailwind_user_settings["emmetCompletions"] = Value::Bool(true);
@@ -163,7 +163,7 @@ impl LspAdapter for TailwindCssLspAdapter {
let project_options = cx.update(|cx| {
language_server_settings(delegate.as_ref(), &self.name(), cx)
.and_then(|s| s.settings.clone())
- })?;
+ });
if let Some(override_options) = project_options {
merge_json_value_into(override_options, &mut default_config);
@@ -835,7 +835,7 @@ impl LspAdapter for TypeScriptLspAdapter {
let override_options = cx.update(|cx| {
language_server_settings(delegate.as_ref(), &Self::SERVER_NAME, cx)
.and_then(|s| s.settings.clone())
- })?;
+ });
if let Some(options) = override_options {
return Ok(options);
}
@@ -291,7 +291,7 @@ impl LspAdapter for VtslsLspAdapter {
let override_options = cx.update(|cx| {
language_server_settings(delegate.as_ref(), &SERVER_NAME, cx)
.and_then(|s| s.settings.clone())
- })?;
+ });
if let Some(override_options) = override_options {
merge_json_value_into(override_options, &mut default_workspace_configuration)
@@ -147,7 +147,7 @@ impl LspAdapter for YamlLspAdapter {
AllLanguageSettings::get(Some(location), cx)
.language(Some(location), Some(&"YAML".into()), cx)
.tab_size
- })?;
+ });
let mut options = serde_json::json!({
"[yaml]": {"editor.tabSize": tab_size},
@@ -157,7 +157,7 @@ impl LspAdapter for YamlLspAdapter {
let project_options = cx.update(|cx| {
language_server_settings(delegate.as_ref(), &Self::SERVER_NAME, cx)
.and_then(|s| s.settings.clone())
- })?;
+ });
if let Some(override_options) = project_options {
merge_json_value_into(override_options, &mut options);
}
@@ -144,7 +144,6 @@ impl LivekitWindow {
)
.unwrap()
})
- .unwrap()
}
fn handle_room_event(&mut self, event: RoomEvent, window: &mut Window, cx: &mut Context<Self>) {
@@ -60,7 +60,7 @@ impl Room {
config.connector = Some(connector);
let (room, mut events) = Tokio::spawn(cx, async move {
livekit::Room::connect(&url, &token, config).await
- })?
+ })
.await??;
let (mut tx, rx) = mpsc::unbounded();
@@ -189,7 +189,7 @@ impl LocalParticipant {
let participant = self.0.clone();
Tokio::spawn(cx, async move {
participant.publish_track(track, options).await
- })?
+ })
.await?
.map(LocalTrackPublication)
.context("publishing a track")
@@ -201,7 +201,7 @@ impl LocalParticipant {
cx: &mut AsyncApp,
) -> Result<LocalTrackPublication> {
let participant = self.0.clone();
- Tokio::spawn(cx, async move { participant.unpublish_track(&sid).await })?
+ Tokio::spawn(cx, async move { participant.unpublish_track(&sid).await })
.await?
.map(LocalTrackPublication)
.context("unpublishing a track")
@@ -478,7 +478,7 @@ pub(crate) async fn capture_local_video_track(
width: metadata.resolution.width.0 as u32,
height: metadata.resolution.height.0 as u32,
})
- })?
+ })
.await?;
let capture_stream = capture_source
@@ -79,16 +79,13 @@ impl NotificationStore {
let this = this.upgrade()?;
match status {
client::Status::Connected { .. } => {
- if let Some(task) = this
- .update(cx, |this, cx| this.handle_connect(cx))
- .log_err()?
- {
+ if let Some(task) = this.update(cx, |this, cx| this.handle_connect(cx)) {
task.await.log_err()?;
}
}
- _ => this
- .update(cx, |this, cx| this.handle_disconnect(cx))
- .log_err()?,
+ _ => {
+ this.update(cx, |this, cx| this.handle_disconnect(cx));
+ }
}
}
Some(())
@@ -161,7 +158,7 @@ impl NotificationStore {
.context("Notification store was dropped while loading notifications")?;
let response = request.await?;
- this.update(cx, |this, _| this.loaded_all_notifications = response.done)?;
+ this.update(cx, |this, _| this.loaded_all_notifications = response.done);
Self::add_notifications(
this,
response.notifications,
@@ -212,8 +209,8 @@ impl NotificationStore {
) -> Result<()> {
this.update(&mut cx, |this, cx| {
this.splice_notifications([(envelope.payload.notification_id, None)], false, cx);
- Ok(())
- })?
+ });
+ Ok(())
}
async fn add_notifications(
@@ -259,10 +256,10 @@ impl NotificationStore {
}
}
- let user_store = this.read_with(cx, |this, _| this.user_store.clone())?;
+ let user_store = this.read_with(cx, |this, _| this.user_store.clone());
user_store
- .update(cx, |store, cx| store.get_users(user_ids, cx))?
+ .update(cx, |store, cx| store.get_users(user_ids, cx))
.await?;
this.update(cx, |this, cx| {
if options.clear_old {
@@ -285,8 +282,7 @@ impl NotificationStore {
options.is_new,
cx,
);
- })
- .log_err();
+ });
Ok(())
}
@@ -2084,7 +2084,7 @@ impl OutlinePanel {
let entry = worktree.read(cx).entry_for_id(entry_id)?.clone();
Some((worktree, entry))
})
- })?,
+ }),
PanelEntry::Outline(outline_entry) => {
let (buffer_id, excerpt_id) = outline_entry.ids();
outline_panel.update(cx, |outline_panel, cx| {
@@ -329,7 +329,7 @@ impl Prettier {
settings: Default::default(),
};
executor.spawn(server.initialize(params, configuration.into(), cx))
- })?
+ })
.await
.context("prettier server initialization")?;
Ok(Self::Real(RealPrettier {
@@ -475,7 +475,7 @@ impl Prettier {
ignore_path,
},
})
- })?
+ })
.context("building prettier request")?;
let response = local
@@ -483,7 +483,7 @@ impl Prettier {
.request::<Format>(params)
.await
.into_response()?;
- let diff_task = buffer.update(cx, |buffer, cx| buffer.diff(response.text, cx))?;
+ let diff_task = buffer.update(cx, |buffer, cx| buffer.diff(response.text, cx));
Ok(diff_task.await)
}
#[cfg(any(test, feature = "test-support"))]
@@ -520,7 +520,7 @@ impl Prettier {
}
None => panic!("Should not format buffer without a language with prettier"),
}
- })??
+ })?
.await),
}
}
@@ -760,7 +760,7 @@ impl AgentServerStore {
new_version_available_tx,
&mut cx.to_async(),
))
- })??
+ })?
.await?;
Ok(proto::AgentServerCommand {
path: command.path.to_string_lossy().into_owned(),
@@ -840,7 +840,7 @@ impl AgentServerStore {
.collect();
cx.emit(AgentServersUpdated);
Ok(())
- })?
+ })
}
async fn handle_external_extension_agents_updated(
@@ -889,7 +889,7 @@ impl AgentServerStore {
this.reregister_agents(cx);
cx.emit(AgentServersUpdated);
Ok(())
- })?
+ })
}
async fn handle_loading_status_updated(
@@ -904,7 +904,8 @@ impl AgentServerStore {
{
status_tx.send(envelope.payload.status.into()).ok();
}
- })
+ });
+ Ok(())
}
async fn handle_new_version_available(
@@ -921,7 +922,8 @@ impl AgentServerStore {
.send(Some(envelope.payload.version))
.ok();
}
- })
+ });
+ Ok(())
}
pub fn get_extension_id_for_agent(
@@ -148,7 +148,7 @@ impl RemoteBufferStore {
buffer_handle.update(cx, |buffer, cx| {
buffer.did_save(version.clone(), mtime, cx);
- })?;
+ });
Ok(())
})
@@ -274,14 +274,14 @@ impl RemoteBufferStore {
buffer
.update(cx, |buffer, _| {
buffer.wait_for_edits(transaction.edit_ids.iter().copied())
- })?
+ })
.await?;
if push_to_history {
buffer.update(cx, |buffer, _| {
buffer.push_transaction(transaction.clone(), Instant::now());
buffer.finalize_last_transaction();
- })?;
+ });
}
}
@@ -422,7 +422,8 @@ impl LocalBufferStore {
buffer.file_updated(new_file, cx);
}
buffer.did_save(version.clone(), mtime, cx);
- })
+ });
+ Ok(())
})
}
@@ -625,7 +626,7 @@ impl LocalBufferStore {
let path = path.clone();
let buffer = match load_file.await {
Ok(loaded) => {
- let reservation = cx.reserve_entity::<Buffer>()?;
+ let reservation = cx.reserve_entity::<Buffer>();
let buffer_id = BufferId::from(reservation.entity_id().as_non_zero_u64());
let text_buffer = cx
.background_spawn(async move {
@@ -638,7 +639,7 @@ impl LocalBufferStore {
buffer.set_encoding(loaded.encoding);
buffer.set_has_bom(loaded.has_bom);
buffer
- })?
+ })
}
Err(error) if is_not_found_error(&error) => cx.new(|cx| {
let buffer_id = BufferId::from(cx.entity_id().as_non_zero_u64());
@@ -655,7 +656,7 @@ impl LocalBufferStore {
})),
Capability::ReadWrite,
)
- })?,
+ }),
Err(e) => return Err(e),
};
this.update(cx, |this, cx| {
@@ -703,7 +704,7 @@ impl LocalBufferStore {
) -> Task<Result<Entity<Buffer>>> {
cx.spawn(async move |buffer_store, cx| {
let buffer =
- cx.new(|cx| Buffer::local("", cx).with_language(language::PLAIN_TEXT.clone(), cx))?;
+ cx.new(|cx| Buffer::local("", cx).with_language(language::PLAIN_TEXT.clone(), cx));
buffer_store.update(cx, |buffer_store, cx| {
buffer_store.add_buffer(buffer.clone(), cx).log_err();
if !project_searchable {
@@ -725,7 +726,7 @@ impl LocalBufferStore {
cx.spawn(async move |_, cx| {
let mut project_transaction = ProjectTransaction::default();
for buffer in buffers {
- let transaction = buffer.update(cx, |buffer, cx| buffer.reload(cx))?.await?;
+ let transaction = buffer.update(cx, |buffer, cx| buffer.reload(cx)).await?;
buffer.update(cx, |buffer, cx| {
if let Some(transaction) = transaction {
if !push_to_history {
@@ -733,7 +734,7 @@ impl LocalBufferStore {
}
project_transaction.0.insert(cx.entity(), transaction);
}
- })?;
+ });
}
Ok(project_transaction)
@@ -1179,7 +1180,7 @@ impl BufferStore {
}
}
Ok(proto::Ack {})
- })?
+ })
}
pub fn register_shared_lsp_handle(
@@ -1348,7 +1349,7 @@ impl BufferStore {
.log_err();
}
Ok(())
- })?
+ })
}
pub async fn handle_save_buffer(
@@ -1365,32 +1366,32 @@ impl BufferStore {
.map(|(_, project_id)| *project_id)
.context("project is not shared")?,
))
- })??;
+ })?;
buffer
.update(&mut cx, |buffer, _| {
buffer.wait_for_version(deserialize_version(&envelope.payload.version))
- })?
+ })
.await?;
- let buffer_id = buffer.read_with(&cx, |buffer, _| buffer.remote_id())?;
+ let buffer_id = buffer.read_with(&cx, |buffer, _| buffer.remote_id());
if let Some(new_path) = envelope.payload.new_path
&& let Some(new_path) = ProjectPath::from_proto(new_path)
{
this.update(&mut cx, |this, cx| {
this.save_buffer_as(buffer.clone(), new_path, cx)
- })?
+ })
.await?;
} else {
- this.update(&mut cx, |this, cx| this.save_buffer(buffer.clone(), cx))?
+ this.update(&mut cx, |this, cx| this.save_buffer(buffer.clone(), cx))
.await?;
}
- buffer.read_with(&cx, |buffer, _| proto::BufferSaved {
+ Ok(buffer.read_with(&cx, |buffer, _| proto::BufferSaved {
project_id,
buffer_id: buffer_id.into(),
version: serialize_version(buffer.saved_version()),
mtime: buffer.saved_mtime().map(|time| time.into()),
- })
+ }))
}
pub async fn handle_close_buffer(
@@ -1415,7 +1416,8 @@ impl BufferStore {
peer_id,
buffer_id
)
- })
+ });
+ Ok(())
}
pub async fn handle_buffer_saved(
@@ -1443,7 +1445,8 @@ impl BufferStore {
})
.log_err();
}
- })
+ });
+ Ok(())
}
pub async fn handle_buffer_reloaded(
@@ -1476,7 +1479,8 @@ impl BufferStore {
})
.log_err();
}
- })
+ });
+ Ok(())
}
pub fn reload_buffers(
@@ -1507,12 +1511,12 @@ impl BufferStore {
buffers.insert(this.get_existing(buffer_id)?);
}
anyhow::Ok(this.reload_buffers(buffers, false, cx))
- })??;
+ })?;
let project_transaction = reload.await?;
let project_transaction = this.update(&mut cx, |this, cx| {
this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
- })?;
+ });
Ok(proto::ReloadBuffersResponse {
transaction: Some(project_transaction),
})
@@ -1546,9 +1550,9 @@ impl BufferStore {
return anyhow::Ok(());
};
- let operations = buffer.update(cx, |b, cx| b.serialize_ops(None, cx))?;
+ let operations = buffer.update(cx, |b, cx| b.serialize_ops(None, cx));
let operations = operations.await;
- let state = buffer.update(cx, |buffer, cx| buffer.to_proto(cx))?;
+ let state = buffer.update(cx, |buffer, cx| buffer.to_proto(cx));
let initial_state = proto::CreateBufferForPeer {
project_id,
@@ -166,15 +166,11 @@ impl Manager {
log::info!("client reconnected, attempting to rejoin projects");
let Some(this) = this.upgrade() else { break };
- match this.update(cx, |this, cx| this.reconnected(cx)) {
- Ok(task) => {
- if task.await.log_err().is_some() {
- return true;
- } else {
- remaining_attempts -= 1;
- }
- }
- Err(_app_dropped) => return false,
+ let task = this.update(cx, |this, cx| this.reconnected(cx));
+ if task.await.log_err().is_some() {
+ return true;
+ } else {
+ remaining_attempts -= 1;
}
} else if client_status.borrow().is_signed_out() {
return false;
@@ -215,7 +211,7 @@ impl Manager {
// we leave the room and return an error.
if let Some(this) = this.upgrade() {
log::info!("reconnection failed, disconnecting projects");
- this.update(cx, |this, cx| this.connection_lost(cx))?;
+ this.update(cx, |this, cx| this.connection_lost(cx));
}
Ok(())
@@ -136,10 +136,8 @@ impl ContextServerConfiguration {
enabled: _,
settings,
} => {
- let descriptor = cx
- .update(|cx| registry.read(cx).context_server_descriptor(&id.0))
- .ok()
- .flatten()?;
+ let descriptor =
+ cx.update(|cx| registry.read(cx).context_server_descriptor(&id.0))?;
match descriptor.command(worktree_store, cx).await {
Ok(command) => {
@@ -350,17 +348,15 @@ impl ContextServerStore {
.update(cx, |this, _| {
this.context_server_settings.get(&server.id().0).cloned()
})
- .ok()
- .flatten()
.context("Failed to get context server settings")?;
if !settings.enabled() {
- return Ok(());
+ return anyhow::Ok(());
}
let (registry, worktree_store) = this.update(cx, |this, _| {
(this.registry.clone(), this.worktree_store.clone())
- })?;
+ });
let configuration = ContextServerConfiguration::from_settings(
settings,
server.id(),
@@ -373,7 +369,8 @@ impl ContextServerStore {
this.update(cx, |this, cx| {
this.run_server(server, Arc::new(configuration), cx)
- })
+ });
+ Ok(())
})
.detach_and_log_err(cx);
}
@@ -611,9 +608,7 @@ impl ContextServerStore {
)
})?;
- for (id, _) in
- registry.read_with(cx, |registry, _| registry.context_server_descriptors())?
- {
+ for (id, _) in registry.read_with(cx, |registry, _| registry.context_server_descriptors()) {
configured_servers
.entry(id)
.or_insert(ContextServerSettings::default_extension());
@@ -38,14 +38,14 @@ fn extension_project(
worktree_store: Entity<WorktreeStore>,
cx: &mut AsyncApp,
) -> Result<Arc<ExtensionProject>> {
- worktree_store.update(cx, |worktree_store, cx| {
+ Ok(worktree_store.update(cx, |worktree_store, cx| {
Arc::new(ExtensionProject {
worktree_ids: worktree_store
.visible_worktrees(cx)
.map(|worktree| worktree.read(cx).id().to_proto())
.collect(),
})
- })
+ }))
}
impl registry::ContextServerDescriptor for ContextServerDescriptor {
@@ -223,8 +223,6 @@ impl BreakpointStore {
.update(cx, |this, cx| this.open_buffer(path, cx)),
)
})
- .ok()
- .flatten()
.context("Invalid project path")?
.await?;
@@ -263,7 +261,7 @@ impl BreakpointStore {
.collect();
cx.notify();
- })?;
+ });
Ok(())
}
@@ -278,12 +276,12 @@ impl BreakpointStore {
this.worktree_store
.read(cx)
.project_path_for_absolute_path(message.payload.path.as_ref(), cx)
- })?
+ })
.context("Could not resolve provided abs path")?;
let buffer = this
.update(&mut cx, |this, cx| {
this.buffer_store.read(cx).get_by_path(&path)
- })?
+ })
.context("Could not find buffer for a given path")?;
let breakpoint = message
.payload
@@ -309,7 +307,7 @@ impl BreakpointStore {
BreakpointEditAction::Toggle,
cx,
);
- })?;
+ });
Ok(proto::Ack {})
}
@@ -809,7 +807,7 @@ impl BreakpointStore {
log::error!("Todo: Serialized breakpoints which do not have buffer (yet)");
continue;
};
- let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot())?;
+ let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
let mut breakpoints_for_file =
this.update(cx, |_, cx| BreakpointsInFile::new(buffer, cx))?;
@@ -323,7 +323,7 @@ impl DapStore {
if let Some(c) = binary.connection {
let host = Ipv4Addr::LOCALHOST;
let port;
- if remote.read_with(cx, |remote, _cx| remote.shares_network_interface())? {
+ if remote.read_with(cx, |remote, _cx| remote.shares_network_interface()) {
port = c.port;
port_forwarding = None;
} else {
@@ -348,7 +348,7 @@ impl DapStore {
binary.cwd.map(|path| path.display().to_string()),
port_forwarding,
)
- })??;
+ })?;
Ok(DebugAdapterBinary {
command: Some(command.program),
@@ -530,7 +530,7 @@ impl DapStore {
session
.update(cx, |session, cx| {
session.boot(binary, worktree, dap_store, cx)
- })?
+ })
.await
}
})
@@ -583,7 +583,7 @@ impl DapStore {
} else {
Task::ready(HashMap::default())
}
- })?
+ })
.await;
Ok(())
@@ -691,16 +691,14 @@ impl DapStore {
});
}
VariableLookupKind::Expression => {
- let Ok(eval_task) = session.read_with(cx, |session, _| {
+ let eval_task = session.read_with(cx, |session, _| {
session.state.request_dap(EvaluateCommand {
expression: inline_value_location.variable_name.clone(),
frame_id: Some(stack_frame_id),
source: None,
context: Some(EvaluateArgumentsContext::Variables),
})
- }) else {
- continue;
- };
+ });
if let Some(response) = eval_task.await.log_err() {
inlay_hints.push(InlayHint {
@@ -816,7 +814,7 @@ impl DapStore {
let request = this
.update(&mut cx, |this, cx| {
this.run_debug_locator(&locator, build_task, cx)
- })?
+ })
.await?;
Ok(request.to_proto())
@@ -846,8 +844,7 @@ impl DapStore {
})
.ok();
}
- })
- .ok();
+ });
}
}
})
@@ -858,7 +855,7 @@ impl DapStore {
this.worktree_store
.read(cx)
.worktree_for_id(WorktreeId::from_proto(envelope.payload.worktree_id), cx)
- })?
+ })
.context("Failed to find worktree with a given ID")?;
let binary = this
.update(&mut cx, |this, cx| {
@@ -869,7 +866,7 @@ impl DapStore {
tx,
cx,
)
- })?
+ })
.await?;
Ok(binary.to_proto())
}
@@ -890,7 +887,8 @@ impl DapStore {
.unbounded_send(envelope.payload.message)
.ok();
})
- })
+ });
+ Ok(())
}
pub fn sync_adapter_options(
@@ -209,9 +209,8 @@ impl RunningMode {
}
});
- let client = if let Some(client) = parent_session
- .and_then(|session| cx.update(|cx| session.read(cx).adapter_client()).ok())
- .flatten()
+ let client = if let Some(client) =
+ parent_session.and_then(|session| cx.update(|cx| session.read(cx).adapter_client()))
{
client
.create_child_connection(session_id, binary.clone(), message_handler, cx)
@@ -466,7 +465,7 @@ impl RunningMode {
})?;
initialized_rx.await?;
let errors_by_path = cx
- .update(|cx| this.send_source_breakpoints(false, &breakpoint_store, cx))?
+ .update(|cx| this.send_source_breakpoints(false, &breakpoint_store, cx))
.await;
dap_store.update(cx, |_, cx| {
@@ -2858,7 +2857,7 @@ impl Session {
let mut console_output = self.console_output(cx);
let task = cx.spawn(async move |this, cx| {
let forward_ports_process = if remote_client
- .read_with(cx, |client, _| client.shares_network_interface())?
+ .read_with(cx, |client, _| client.shares_network_interface())
{
request.other.insert(
"proxyUri".into(),
@@ -2890,7 +2889,7 @@ impl Session {
.spawn()
.context("spawning port forwarding process")?;
anyhow::Ok(child)
- })??;
+ })?;
Some(child)
};
@@ -696,15 +696,15 @@ impl GitStore {
cx: &mut Context<Self>,
) -> Task<Result<Entity<BufferDiff>>> {
cx.spawn(async move |this, cx| {
- let buffer_snapshot = buffer.update(cx, |buffer, _| buffer.snapshot())?;
+ let buffer_snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
let content = match oid {
None => None,
Some(oid) => Some(
- repo.update(cx, |repo, cx| repo.load_blob_content(oid, cx))?
+ repo.update(cx, |repo, cx| repo.load_blob_content(oid, cx))
.await?,
),
};
- let buffer_diff = cx.new(|cx| BufferDiff::new(&buffer_snapshot, cx))?;
+ let buffer_diff = cx.new(|cx| BufferDiff::new(&buffer_snapshot, cx));
buffer_diff
.update(cx, |buffer_diff, cx| {
@@ -714,14 +714,14 @@ impl GitStore {
buffer_snapshot.text,
cx,
)
- })?
+ })
.await?;
let unstaged_diff = this
.update(cx, |this, cx| this.open_unstaged_diff(buffer.clone(), cx))?
.await?;
buffer_diff.update(cx, |buffer_diff, _| {
buffer_diff.set_secondary_diff(unstaged_diff);
- })?;
+ });
this.update(cx, |_, cx| {
cx.subscribe(&buffer_diff, Self::on_buffer_diff_event)
@@ -1111,7 +1111,7 @@ impl GitStore {
}
let file_path = file.worktree.read(cx).absolutize(&file.path);
return cx.spawn(async move |cx| {
- let provider_registry = cx.update(GitHostingProviderRegistry::default_global)?;
+ let provider_registry = cx.update(GitHostingProviderRegistry::default_global);
get_permalink_in_rust_registry_src(provider_registry, file_path, selection)
.context("no permalink available")
});
@@ -1138,7 +1138,7 @@ impl GitStore {
let sha = backend.head_sha().await.context("reading HEAD SHA")?;
let provider_registry =
- cx.update(GitHostingProviderRegistry::default_global)?;
+ cx.update(GitHostingProviderRegistry::default_global);
let (provider, remote) =
parse_git_remote_url(provider_registry, &origin_url)
@@ -1226,8 +1226,7 @@ impl GitStore {
for (repo, paths) in paths_by_git_repo {
repo.update(cx, |repo, cx| {
repo.paths_changed(paths, downstream.clone(), cx);
- })
- .ok();
+ });
}
})
.detach();
@@ -1493,7 +1492,7 @@ impl GitStore {
let diff_bases_change = repo
.update(cx, |repo, cx| {
repo.load_committed_text(buffer_id, repo_path, cx)
- })?
+ })
.await?;
diff_state.update(cx, |diff_state, cx| {
@@ -1503,7 +1502,8 @@ impl GitStore {
Some(diff_bases_change),
cx,
);
- })
+ });
+ anyhow::Ok(())
}
.await
.log_err();
@@ -1759,7 +1759,7 @@ impl GitStore {
client.send(update).log_err();
}
Ok(())
- })?
+ })
}
async fn handle_remove_repository(
@@ -1780,7 +1780,8 @@ impl GitStore {
cx.emit(GitStoreEvent::ActiveRepositoryChanged(None));
}
cx.emit(GitStoreEvent::RepositoryRemoved(id));
- })
+ });
+ Ok(())
}
async fn handle_git_init(
@@ -1790,7 +1791,7 @@ impl GitStore {
) -> Result<proto::Ack> {
let path: Arc<Path> = PathBuf::from(envelope.payload.abs_path).into();
let name = envelope.payload.fallback_branch_name;
- cx.update(|cx| this.read(cx).git_init(path, name, cx))?
+ cx.update(|cx| this.read(cx).git_init(path, name, cx))
.await?;
Ok(proto::Ack {})
@@ -1804,7 +1805,7 @@ impl GitStore {
let path: Arc<Path> = PathBuf::from(envelope.payload.abs_path).into();
let repo_name = envelope.payload.remote_repo;
let result = cx
- .update(|cx| this.read(cx).git_clone(repo_name, path, cx))?
+ .update(|cx| this.read(cx).git_clone(repo_name, path, cx))
.await;
Ok(proto::GitCloneResponse {
@@ -1833,7 +1834,7 @@ impl GitStore {
let remote_output = repository_handle
.update(&mut cx, |repository_handle, cx| {
repository_handle.fetch(fetch_options, askpass, cx)
- })?
+ })
.await??;
Ok(proto::RemoteMessageResponse {
@@ -1882,7 +1883,7 @@ impl GitStore {
askpass,
cx,
)
- })?
+ })
.await??;
Ok(proto::RemoteMessageResponse {
stdout: remote_output.stdout,
@@ -1913,7 +1914,7 @@ impl GitStore {
let remote_message = repository_handle
.update(&mut cx, |repository_handle, cx| {
repository_handle.pull(branch_name, remote_name, rebase, askpass, cx)
- })?
+ })
.await??;
Ok(proto::RemoteMessageResponse {
@@ -1940,7 +1941,7 @@ impl GitStore {
repository_handle
.update(&mut cx, |repository_handle, cx| {
repository_handle.stage_entries(entries, cx)
- })?
+ })
.await?;
Ok(proto::Ack {})
}
@@ -1963,7 +1964,7 @@ impl GitStore {
repository_handle
.update(&mut cx, |repository_handle, cx| {
repository_handle.unstage_entries(entries, cx)
- })?
+ })
.await?;
Ok(proto::Ack {})
@@ -1987,7 +1988,7 @@ impl GitStore {
repository_handle
.update(&mut cx, |repository_handle, cx| {
repository_handle.stash_entries(entries, cx)
- })?
+ })
.await?;
Ok(proto::Ack {})
@@ -2005,7 +2006,7 @@ impl GitStore {
repository_handle
.update(&mut cx, |repository_handle, cx| {
repository_handle.stash_pop(stash_index, cx)
- })?
+ })
.await?;
Ok(proto::Ack {})
@@ -2023,7 +2024,7 @@ impl GitStore {
repository_handle
.update(&mut cx, |repository_handle, cx| {
repository_handle.stash_apply(stash_index, cx)
- })?
+ })
.await?;
Ok(proto::Ack {})
@@ -2041,7 +2042,7 @@ impl GitStore {
repository_handle
.update(&mut cx, |repository_handle, cx| {
repository_handle.stash_drop(stash_index, cx)
- })?
+ })
.await??;
Ok(proto::Ack {})
@@ -2064,7 +2065,7 @@ impl GitStore {
None,
cx,
)
- })?
+ })
.await??;
Ok(proto::Ack {})
}
@@ -2080,7 +2081,7 @@ impl GitStore {
repository_handle
.update(&mut cx, |repository_handle, cx| {
repository_handle.run_hook(hook, cx)
- })?
+ })
.await??;
Ok(proto::Ack {})
}
@@ -2119,7 +2120,7 @@ impl GitStore {
askpass,
cx,
)
- })?
+ })
.await??;
Ok(proto::Ack {})
}
@@ -2138,7 +2139,7 @@ impl GitStore {
let remotes = repository_handle
.update(&mut cx, |repository_handle, _| {
repository_handle.get_remotes(branch_name, is_push)
- })?
+ })
.await??;
Ok(proto::GetRemotesResponse {
@@ -2162,7 +2163,7 @@ impl GitStore {
let worktrees = repository_handle
.update(&mut cx, |repository_handle, _| {
repository_handle.worktrees()
- })?
+ })
.await??;
Ok(proto::GitWorktreesResponse {
@@ -2187,7 +2188,7 @@ impl GitStore {
repository_handle
.update(&mut cx, |repository_handle, _| {
repository_handle.create_worktree(name, directory, commit)
- })?
+ })
.await??;
Ok(proto::Ack {})
@@ -2202,7 +2203,7 @@ impl GitStore {
let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
let branches = repository_handle
- .update(&mut cx, |repository_handle, _| repository_handle.branches())?
+ .update(&mut cx, |repository_handle, _| repository_handle.branches())
.await??;
Ok(proto::GitBranchesResponse {
@@ -2223,7 +2224,7 @@ impl GitStore {
let branch = repository_handle
.update(&mut cx, |repository_handle, _| {
repository_handle.default_branch()
- })?
+ })
.await??
.map(Into::into);
@@ -2241,7 +2242,7 @@ impl GitStore {
repository_handle
.update(&mut cx, |repository_handle, _| {
repository_handle.create_branch(branch_name, None)
- })?
+ })
.await??;
Ok(proto::Ack {})
@@ -2259,7 +2260,7 @@ impl GitStore {
repository_handle
.update(&mut cx, |repository_handle, _| {
repository_handle.change_branch(branch_name)
- })?
+ })
.await??;
Ok(proto::Ack {})
@@ -2278,7 +2279,7 @@ impl GitStore {
repository_handle
.update(&mut cx, |repository_handle, _| {
repository_handle.rename_branch(branch, new_name)
- })?
+ })
.await??;
Ok(proto::Ack {})
@@ -2297,7 +2298,7 @@ impl GitStore {
repository_handle
.update(&mut cx, |repository_handle, _| {
repository_handle.create_remote(remote_name, remote_url)
- })?
+ })
.await??;
Ok(proto::Ack {})
@@ -2315,7 +2316,7 @@ impl GitStore {
repository_handle
.update(&mut cx, |repository_handle, _| {
repository_handle.delete_branch(branch_name)
- })?
+ })
.await??;
Ok(proto::Ack {})
@@ -2333,7 +2334,7 @@ impl GitStore {
repository_handle
.update(&mut cx, |repository_handle, _| {
repository_handle.remove_remote(remote_name)
- })?
+ })
.await??;
Ok(proto::Ack {})
@@ -2350,7 +2351,7 @@ impl GitStore {
let commit = repository_handle
.update(&mut cx, |repository_handle, _| {
repository_handle.show(envelope.payload.commit)
- })?
+ })
.await??;
Ok(proto::GitCommitDetails {
sha: commit.sha.into(),
@@ -2372,7 +2373,7 @@ impl GitStore {
let commit_diff = repository_handle
.update(&mut cx, |repository_handle, _| {
repository_handle.load_commit_diff(envelope.payload.commit)
- })?
+ })
.await??;
Ok(proto::LoadCommitDiffResponse {
files: commit_diff
@@ -2401,7 +2402,7 @@ impl GitStore {
let file_history = repository_handle
.update(&mut cx, |repository_handle, _| {
repository_handle.file_history_paginated(path, skip, limit)
- })?
+ })
.await??;
Ok(proto::GitFileHistoryResponse {
@@ -2437,7 +2438,7 @@ impl GitStore {
repository_handle
.update(&mut cx, |repository_handle, cx| {
repository_handle.reset(envelope.payload.commit, mode, cx)
- })?
+ })
.await??;
Ok(proto::Ack {})
}
@@ -2459,7 +2460,7 @@ impl GitStore {
repository_handle
.update(&mut cx, |repository_handle, cx| {
repository_handle.checkout_files(&envelope.payload.commit, paths, cx)
- })?
+ })
.await?;
Ok(proto::Ack {})
}
@@ -2474,10 +2475,10 @@ impl GitStore {
let buffer = repository
.update(&mut cx, |repository, cx| {
repository.open_commit_buffer(None, this.read(cx).buffer_store.clone(), cx)
- })?
+ })
.await?;
- let buffer_id = buffer.read_with(&cx, |buffer, _| buffer.remote_id())?;
+ let buffer_id = buffer.read_with(&cx, |buffer, _| buffer.remote_id());
this.update(&mut cx, |this, cx| {
this.buffer_store.update(cx, |buffer_store, cx| {
buffer_store
@@ -2488,7 +2489,7 @@ impl GitStore {
)
.detach_and_log_err(cx);
})
- })?;
+ });
Ok(proto::OpenBufferResponse {
buffer_id: buffer_id.to_proto(),
@@ -2503,7 +2504,7 @@ impl GitStore {
let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
let repository = Self::repository_for_request(&this, repository_id, &mut cx)?;
- let delegates = cx.update(|cx| repository.read(cx).askpass_delegates.clone())?;
+ let delegates = cx.update(|cx| repository.read(cx).askpass_delegates.clone());
let Some(mut askpass) = delegates.lock().remove(&envelope.payload.askpass_id) else {
debug_panic!("no askpass found");
anyhow::bail!("no askpass found");
@@ -2535,7 +2536,7 @@ impl GitStore {
let branches = repository_handle
.update(&mut cx, |repository_handle, _| {
repository_handle.check_for_pushed_commits()
- })?
+ })
.await??;
Ok(proto::CheckForPushedCommitsResponse {
pushed_to: branches
@@ -2560,7 +2561,7 @@ impl GitStore {
let mut diff = repository_handle
.update(&mut cx, |repository_handle, cx| {
repository_handle.diff(diff_type, cx)
- })?
+ })
.await??;
const ONE_MB: usize = 1_000_000;
if diff.len() > ONE_MB {
@@ -2592,7 +2593,7 @@ impl GitStore {
.update(&mut cx, |this, cx| {
let repository = this.repositories().get(&repository_id)?;
Some(repository.update(cx, |repo, cx| repo.diff_tree(diff_type, cx)))
- })?
+ })
.context("missing repository")?
.await??;
@@ -2633,7 +2634,7 @@ impl GitStore {
.update(&mut cx, |this, cx| {
let repository = this.repositories().get(&repository_id)?;
Some(repository.update(cx, |repo, cx| repo.load_blob_content(oid, cx)))
- })?
+ })
.context("missing repository")?
.await?;
Ok(proto::GetBlobContentResponse { content })
@@ -2649,7 +2650,7 @@ impl GitStore {
.update(&mut cx, |this, cx| {
let buffer = this.buffer_store.read(cx).get(buffer_id)?;
Some(this.open_unstaged_diff(buffer, cx))
- })?
+ })
.context("missing buffer")?
.await?;
this.update(&mut cx, |this, _| {
@@ -2658,8 +2659,8 @@ impl GitStore {
.entry(request.original_sender_id.unwrap_or(request.sender_id))
.or_default();
shared_diffs.entry(buffer_id).or_default().unstaged = Some(diff.clone());
- })?;
- let staged_text = diff.read_with(&cx, |diff, cx| diff.base_text_string(cx))?;
+ });
+ let staged_text = diff.read_with(&cx, |diff, cx| diff.base_text_string(cx));
Ok(proto::OpenUnstagedDiffResponse { staged_text })
}
@@ -2673,7 +2674,7 @@ impl GitStore {
.update(&mut cx, |this, cx| {
let buffer = this.buffer_store.read(cx).get(buffer_id)?;
Some(this.open_uncommitted_diff(buffer, cx))
- })?
+ })
.context("missing buffer")?
.await?;
this.update(&mut cx, |this, _| {
@@ -2682,8 +2683,8 @@ impl GitStore {
.entry(request.original_sender_id.unwrap_or(request.sender_id))
.or_default();
shared_diffs.entry(buffer_id).or_default().uncommitted = Some(diff.clone());
- })?;
- diff.read_with(&cx, |diff, cx| {
+ });
+ Ok(diff.read_with(&cx, |diff, cx| {
use proto::open_uncommitted_diff_response::Mode;
let unstaged_diff = diff.secondary_diff();
@@ -2721,7 +2722,7 @@ impl GitStore {
staged_text,
mode: mode.into(),
}
- })
+ }))
}
async fn handle_update_diff_bases(
@@ -2739,7 +2740,8 @@ impl GitStore {
diff_state.handle_base_texts_updated(buffer, request.payload, cx);
})
}
- })
+ });
+ Ok(())
}
async fn handle_blame_buffer(
@@ -2751,16 +2753,16 @@ impl GitStore {
let version = deserialize_version(&envelope.payload.version);
let buffer = this.read_with(&cx, |this, cx| {
this.buffer_store.read(cx).get_existing(buffer_id)
- })??;
+ })?;
buffer
.update(&mut cx, |buffer, _| {
buffer.wait_for_version(version.clone())
- })?
+ })
.await?;
let blame = this
.update(&mut cx, |this, cx| {
this.blame_buffer(&buffer, Some(version), cx)
- })?
+ })
.await?;
Ok(serialize_blame_buffer_response(blame))
}
@@ -2781,11 +2783,11 @@ impl GitStore {
};
let buffer = this.read_with(&cx, |this, cx| {
this.buffer_store.read(cx).get_existing(buffer_id)
- })??;
+ })?;
let permalink = this
.update(&mut cx, |this, cx| {
this.get_permalink_to_line(&buffer, selection, cx)
- })?
+ })
.await?;
Ok(proto::GetPermalinkToLineResponse {
permalink: permalink.to_string(),
@@ -2802,7 +2804,7 @@ impl GitStore {
.get(&id)
.context("missing repository handle")
.cloned()
- })?
+ })
}
pub fn repo_snapshots(&self, cx: &App) -> HashMap<RepositoryId, RepositorySnapshot> {
@@ -3106,7 +3108,7 @@ impl BufferGitState {
language.clone(),
cx,
)
- })?
+ })
.await,
);
}
@@ -3129,7 +3131,7 @@ impl BufferGitState {
language.clone(),
cx,
)
- })?
+ })
.await,
)
}
@@ -3172,7 +3174,7 @@ impl BufferGitState {
diff.language_changed(language.clone(), language_registry.clone(), cx);
}
diff.set_snapshot(new_unstaged_diff, &buffer, cx)
- })?;
+ });
Some(task.await)
} else {
None
@@ -3195,7 +3197,7 @@ impl BufferGitState {
true,
cx,
)
- })?
+ })
.await;
}
@@ -3210,7 +3212,7 @@ impl BufferGitState {
this.head_changed = false;
this.language_changed = false;
*this.recalculating_tx.borrow_mut() = false;
- })?;
+ });
}
Ok(())
@@ -3244,8 +3246,7 @@ fn make_remote_delegate(
anyhow::Ok(())
})
.detach_and_log_err(cx);
- })
- .log_err();
+ });
})
}
@@ -3674,7 +3675,7 @@ impl Repository {
})
.collect::<Vec<_>>()
})
- })??;
+ })?;
let buffer_diff_base_changes = cx
.background_spawn(async move {
@@ -3919,7 +3920,7 @@ impl Repository {
RepositoryState::Local(..) => {
this.update(&mut cx, |_, cx| {
Self::open_local_commit_buffer(languages, buffer_store, cx)
- })?
+ })
.await
}
RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
@@ -3932,18 +3933,18 @@ impl Repository {
let buffer = buffer_store
.update(&mut cx, |buffer_store, cx| {
buffer_store.wait_for_remote_buffer(buffer_id, cx)
- })?
+ })
.await?;
if let Some(language_registry) = languages {
let git_commit_language =
language_registry.language_for_name("Git Commit").await?;
buffer.update(&mut cx, |buffer, cx| {
buffer.set_language(Some(git_commit_language), cx);
- })?;
+ });
}
this.update(&mut cx, |this, _| {
this.commit_message_buffer = Some(buffer.clone());
- })?;
+ });
Ok(buffer)
}
}
@@ -3959,14 +3960,14 @@ impl Repository {
) -> Task<Result<Entity<Buffer>>> {
cx.spawn(async move |repository, cx| {
let buffer = buffer_store
- .update(cx, |buffer_store, cx| buffer_store.create_buffer(false, cx))?
+ .update(cx, |buffer_store, cx| buffer_store.create_buffer(false, cx))
.await?;
if let Some(language_registry) = language_registry {
let git_commit_language = language_registry.language_for_name("Git Commit").await?;
buffer.update(cx, |buffer, cx| {
buffer.set_language(Some(git_commit_language), cx);
- })?;
+ });
}
repository.update(cx, |repository, _| {
@@ -4969,20 +4970,22 @@ impl Repository {
.read_with(&cx, |this, cx| this.repo_path_to_project_path(&path, cx))
.ok()
.flatten();
- git_store.update(&mut cx, |git_store, cx| {
- let buffer_id = git_store
- .buffer_store
- .read(cx)
- .get_by_path(&project_path?)?
- .read(cx)
- .remote_id();
- let diff_state = git_store.diffs.get(&buffer_id)?;
- diff_state.update(cx, |diff_state, _| {
- diff_state.hunk_staging_operation_count_as_of_write =
- hunk_staging_operation_count;
- });
- Some(())
- })?;
+ git_store
+ .update(&mut cx, |git_store, cx| {
+ let buffer_id = git_store
+ .buffer_store
+ .read(cx)
+ .get_by_path(&project_path?)?
+ .read(cx)
+ .remote_id();
+ let diff_state = git_store.diffs.get(&buffer_id)?;
+ diff_state.update(cx, |diff_state, _| {
+ diff_state.hunk_staging_operation_count_as_of_write =
+ hunk_staging_operation_count;
+ });
+ Some(())
+ })
+ .context("Git store dropped")?;
}
Ok(())
},
@@ -5596,7 +5599,7 @@ impl Repository {
this.snapshot.clone(),
backend.clone(),
)
- })?
+ })
.await?;
this.update(&mut cx, |this, cx| {
this.snapshot = snapshot.clone();
@@ -5604,7 +5607,7 @@ impl Repository {
for event in events {
cx.emit(event);
}
- })?;
+ });
if let Some(updates_tx) = updates_tx {
updates_tx
.unbounded_send(DownstreamUpdate::UpdateRepository(snapshot))
@@ -5624,7 +5627,7 @@ impl Repository {
cx.spawn(async move |_, cx| {
let state = state.await.map_err(|err| anyhow::anyhow!(err))?;
if let Some(git_hosting_provider_registry) =
- cx.update(|cx| GitHostingProviderRegistry::try_global(cx))?
+ cx.update(|cx| GitHostingProviderRegistry::try_global(cx))
{
git_hosting_providers::register_additional_providers(
git_hosting_provider_registry,
@@ -138,7 +138,7 @@ impl ImageItem {
.abs_path(cx)
.context("absolutizing image file path")?;
anyhow::Ok((fs, image_path))
- })??;
+ })?;
let image_bytes = fs.load_bytes(&image_path).await?;
Self::compute_metadata_from_bytes(&image_bytes)
@@ -234,7 +234,7 @@ impl ProjectItem for ImageItem {
let project = project.clone();
async move |cx| {
project
- .update(cx, |project, cx| project.open_image(path, cx))?
+ .update(cx, |project, cx| project.open_image(path, cx))
.await
}
}))
@@ -628,9 +628,9 @@ impl ImageStoreImpl for Entity<LocalImageStore> {
image,
image_metadata: None,
reload_task: None,
- })?;
+ });
- let image_id = cx.read_entity(&entity, |model, _| model.id)?;
+ let image_id = cx.read_entity(&entity, |model, _| model.id);
this.update(cx, |this, cx| {
image_store.update(cx, |image_store, cx| {
@@ -649,7 +649,7 @@ impl ImageStoreImpl for Entity<LocalImageStore> {
}
anyhow::Ok(())
- })??;
+ })?;
Ok(entity)
})
@@ -662,7 +662,7 @@ impl ImageStoreImpl for Entity<LocalImageStore> {
) -> Task<Result<()>> {
cx.spawn(async move |_, cx| {
for image in images {
- if let Some(rec) = image.update(cx, |image, cx| image.reload(cx))? {
+ if let Some(rec) = image.update(cx, |image, cx| image.reload(cx)) {
rec.await?
}
}
@@ -709,7 +709,7 @@ impl ImageStoreImpl for Entity<RemoteImageStore> {
remote_store
.update(cx, |remote_store, cx| {
remote_store.wait_for_remote_image(image_id, cx)
- })?
+ })
.await
})
}
@@ -371,7 +371,7 @@ impl LspCommand for PrepareRename {
Ok(PrepareRenameResponse::Success(range))
}
None => Ok(PrepareRenameResponse::InvalidPosition),
- })?
+ })
}
fn to_proto(&self, project_id: u64, buffer: &Buffer) -> proto::PrepareRename {
@@ -398,11 +398,11 @@ impl LspCommand for PrepareRename {
buffer
.update(&mut cx, |buffer, _| {
buffer.wait_for_version(deserialize_version(&message.version))
- })?
+ })
.await?;
Ok(Self {
- position: buffer.read_with(&cx, |buffer, _| position.to_point_utf16(buffer))?,
+ position: buffer.read_with(&cx, |buffer, _| position.to_point_utf16(buffer)),
})
}
@@ -449,7 +449,7 @@ impl LspCommand for PrepareRename {
buffer
.update(&mut cx, |buffer, _| {
buffer.wait_for_version(deserialize_version(&message.version))
- })?
+ })
.await?;
if let (Some(start), Some(end)) = (
message.start.and_then(deserialize_anchor),
@@ -556,10 +556,10 @@ impl LspCommand for PerformRename {
buffer
.update(&mut cx, |buffer, _| {
buffer.wait_for_version(deserialize_version(&message.version))
- })?
+ })
.await?;
Ok(Self {
- position: buffer.read_with(&cx, |buffer, _| position.to_point_utf16(buffer))?,
+ position: buffer.read_with(&cx, |buffer, _| position.to_point_utf16(buffer)),
new_name: message.new_name,
push_to_history: false,
})
@@ -593,7 +593,7 @@ impl LspCommand for PerformRename {
lsp_store.buffer_store().update(cx, |buffer_store, cx| {
buffer_store.deserialize_project_transaction(message, self.push_to_history, cx)
})
- })?
+ })
.await
}
@@ -671,10 +671,10 @@ impl LspCommand for GetDefinitions {
buffer
.update(&mut cx, |buffer, _| {
buffer.wait_for_version(deserialize_version(&message.version))
- })?
+ })
.await?;
Ok(Self {
- position: buffer.read_with(&cx, |buffer, _| position.to_point_utf16(buffer))?,
+ position: buffer.read_with(&cx, |buffer, _| position.to_point_utf16(buffer)),
})
}
@@ -774,10 +774,10 @@ impl LspCommand for GetDeclarations {
buffer
.update(&mut cx, |buffer, _| {
buffer.wait_for_version(deserialize_version(&message.version))
- })?
+ })
.await?;
Ok(Self {
- position: buffer.read_with(&cx, |buffer, _| position.to_point_utf16(buffer))?,
+ position: buffer.read_with(&cx, |buffer, _| position.to_point_utf16(buffer)),
})
}
@@ -876,10 +876,10 @@ impl LspCommand for GetImplementations {
buffer
.update(&mut cx, |buffer, _| {
buffer.wait_for_version(deserialize_version(&message.version))
- })?
+ })
.await?;
Ok(Self {
- position: buffer.read_with(&cx, |buffer, _| position.to_point_utf16(buffer))?,
+ position: buffer.read_with(&cx, |buffer, _| position.to_point_utf16(buffer)),
})
}
@@ -975,10 +975,10 @@ impl LspCommand for GetTypeDefinitions {
buffer
.update(&mut cx, |buffer, _| {
buffer.wait_for_version(deserialize_version(&message.version))
- })?
+ })
.await?;
Ok(Self {
- position: buffer.read_with(&cx, |buffer, _| position.to_point_utf16(buffer))?,
+ position: buffer.read_with(&cx, |buffer, _| position.to_point_utf16(buffer)),
})
}
@@ -1021,7 +1021,7 @@ fn language_server_for_buffer(
.language_server_for_local_buffer(buffer, server_id, cx)
.map(|(adapter, server)| (adapter.clone(), server.clone()))
})
- })?
+ })
.context("no language server found for buffer")
}
@@ -1051,7 +1051,7 @@ pub fn location_link_from_proto(
let buffer = lsp_store
.update(cx, |lsp_store, cx| {
lsp_store.wait_for_remote_buffer(buffer_id, cx)
- })?
+ })
.await?;
let start = origin
.start
@@ -1062,7 +1062,7 @@ pub fn location_link_from_proto(
.and_then(deserialize_anchor)
.context("missing origin end")?;
buffer
- .update(cx, |buffer, _| buffer.wait_for_anchors([start, end]))?
+ .update(cx, |buffer, _| buffer.wait_for_anchors([start, end]))
.await?;
Some(Location {
buffer,
@@ -1077,7 +1077,7 @@ pub fn location_link_from_proto(
let buffer = lsp_store
.update(cx, |lsp_store, cx| {
lsp_store.wait_for_remote_buffer(buffer_id, cx)
- })?
+ })
.await?;
let start = target
.start
@@ -1088,7 +1088,7 @@ pub fn location_link_from_proto(
.and_then(deserialize_anchor)
.context("missing target end")?;
buffer
- .update(cx, |buffer, _| buffer.wait_for_anchors([start, end]))?
+ .update(cx, |buffer, _| buffer.wait_for_anchors([start, end]))
.await?;
let target = Location {
buffer,
@@ -1137,7 +1137,7 @@ pub async fn location_links_from_lsp(
let target_buffer_handle = lsp_store
.update(&mut cx, |this, cx| {
this.open_local_buffer_via_lsp(target_uri, language_server.server_id(), cx)
- })?
+ })
.await?;
cx.update(|cx| {
@@ -1169,7 +1169,7 @@ pub async fn location_links_from_lsp(
origin: origin_location,
target: target_location,
})
- })?;
+ });
}
Ok(definitions)
}
@@ -1192,10 +1192,10 @@ pub async fn location_link_from_lsp(
let target_buffer_handle = lsp_store
.update(cx, |lsp_store, cx| {
lsp_store.open_local_buffer_via_lsp(target_uri, language_server.server_id(), cx)
- })?
+ })
.await?;
- cx.update(|cx| {
+ Ok(cx.update(|cx| {
let origin_location = origin_range.map(|origin_range| {
let origin_buffer = buffer.read(cx);
let origin_start =
@@ -1224,7 +1224,7 @@ pub async fn location_link_from_lsp(
origin: origin_location,
target: target_location,
}
- })
+ }))
}
pub fn location_links_to_proto(
@@ -1341,7 +1341,7 @@ impl LspCommand for GetReferences {
language_server.server_id(),
cx,
)
- })?
+ })
.await?;
target_buffer_handle
@@ -1356,7 +1356,7 @@ impl LspCommand for GetReferences {
range: target_buffer.anchor_after(target_start)
..target_buffer.anchor_before(target_end),
});
- })?;
+ });
}
}
@@ -1387,10 +1387,10 @@ impl LspCommand for GetReferences {
buffer
.update(&mut cx, |buffer, _| {
buffer.wait_for_version(deserialize_version(&message.version))
- })?
+ })
.await?;
Ok(Self {
- position: buffer.read_with(&cx, |buffer, _| position.to_point_utf16(buffer))?,
+ position: buffer.read_with(&cx, |buffer, _| position.to_point_utf16(buffer)),
})
}
@@ -1434,7 +1434,7 @@ impl LspCommand for GetReferences {
let target_buffer = project
.update(&mut cx, |this, cx| {
this.wait_for_remote_buffer(buffer_id, cx)
- })?
+ })
.await?;
let start = location
.start
@@ -1445,7 +1445,7 @@ impl LspCommand for GetReferences {
.and_then(deserialize_anchor)
.context("missing target end")?;
target_buffer
- .update(&mut cx, |buffer, _| buffer.wait_for_anchors([start, end]))?
+ .update(&mut cx, |buffer, _| buffer.wait_for_anchors([start, end]))
.await?;
locations.push(Location {
buffer: target_buffer,
@@ -1502,7 +1502,7 @@ impl LspCommand for GetDocumentHighlights {
_: LanguageServerId,
cx: AsyncApp,
) -> Result<Vec<DocumentHighlight>> {
- buffer.read_with(&cx, |buffer, _| {
+ Ok(buffer.read_with(&cx, |buffer, _| {
let mut lsp_highlights = lsp_highlights.unwrap_or_default();
lsp_highlights.sort_unstable_by_key(|h| (h.range.start, Reverse(h.range.end)));
lsp_highlights
@@ -1520,7 +1520,7 @@ impl LspCommand for GetDocumentHighlights {
}
})
.collect()
- })
+ }))
}
fn to_proto(&self, project_id: u64, buffer: &Buffer) -> proto::GetDocumentHighlights {
@@ -1547,10 +1547,10 @@ impl LspCommand for GetDocumentHighlights {
buffer
.update(&mut cx, |buffer, _| {
buffer.wait_for_version(deserialize_version(&message.version))
- })?
+ })
.await?;
Ok(Self {
- position: buffer.read_with(&cx, |buffer, _| position.to_point_utf16(buffer))?,
+ position: buffer.read_with(&cx, |buffer, _| position.to_point_utf16(buffer)),
})
}
@@ -1595,7 +1595,7 @@ impl LspCommand for GetDocumentHighlights {
.and_then(deserialize_anchor)
.context("missing target end")?;
buffer
- .update(&mut cx, |buffer, _| buffer.wait_for_anchors([start, end]))?
+ .update(&mut cx, |buffer, _| buffer.wait_for_anchors([start, end]))
.await?;
let kind = match proto::document_highlight::Kind::from_i32(highlight.kind) {
Some(proto::document_highlight::Kind::Text) => DocumentHighlightKind::TEXT,
@@ -1711,7 +1711,7 @@ impl LspCommand for GetDocumentSymbols {
buffer
.update(&mut cx, |buffer, _| {
buffer.wait_for_version(deserialize_version(&message.version))
- })?
+ })
.await?;
Ok(Self)
}
@@ -1856,14 +1856,14 @@ impl LspCommand for GetSignatureHelp {
let Some(message) = message else {
return Ok(None);
};
- cx.update(|cx| {
+ Ok(cx.update(|cx| {
SignatureHelp::new(
message,
Some(lsp_store.read(cx).languages.clone()),
Some(id),
cx,
)
- })
+ }))
}
fn to_proto(&self, project_id: u64, buffer: &Buffer) -> Self::ProtoRequest {
@@ -1885,10 +1885,10 @@ impl LspCommand for GetSignatureHelp {
buffer
.update(&mut cx, |buffer, _| {
buffer.wait_for_version(deserialize_version(&payload.version))
- })?
+ })
.await
.with_context(|| format!("waiting for version for buffer {}", buffer.entity_id()))?;
- let buffer_snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot())?;
+ let buffer_snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
Ok(Self {
position: payload
.position
@@ -1918,7 +1918,7 @@ impl LspCommand for GetSignatureHelp {
_: Entity<Buffer>,
cx: AsyncApp,
) -> Result<Self::Response> {
- cx.update(|cx| {
+ Ok(cx.update(|cx| {
response
.signature_help
.map(proto_to_lsp_signature)
@@ -1930,7 +1930,7 @@ impl LspCommand for GetSignatureHelp {
cx,
)
})
- })
+ }))
}
fn buffer_id_from_proto(message: &Self::ProtoRequest) -> Result<BufferId> {
@@ -1991,7 +1991,7 @@ impl LspCommand for GetHover {
buffer.anchor_after(token_start)..buffer.anchor_before(token_end)
}),
)
- })?;
+ });
fn hover_blocks_from_marked_string(marked_string: lsp::MarkedString) -> Option<HoverBlock> {
let block = match marked_string {
@@ -2064,10 +2064,10 @@ impl LspCommand for GetHover {
buffer
.update(&mut cx, |buffer, _| {
buffer.wait_for_version(deserialize_version(&message.version))
- })?
+ })
.await?;
Ok(Self {
- position: buffer.read_with(&cx, |buffer, _| position.to_point_utf16(buffer))?,
+ position: buffer.read_with(&cx, |buffer, _| position.to_point_utf16(buffer)),
})
}
@@ -2141,7 +2141,7 @@ impl LspCommand for GetHover {
return Ok(None);
}
- let language = buffer.read_with(&cx, |buffer, _| buffer.language().cloned())?;
+ let language = buffer.read_with(&cx, |buffer, _| buffer.language().cloned());
let range = if let (Some(start), Some(end)) = (message.start, message.end) {
language::proto::deserialize_anchor(start)
.and_then(|start| language::proto::deserialize_anchor(end).map(|end| start..end))
@@ -2152,7 +2152,7 @@ impl LspCommand for GetHover {
buffer
.update(&mut cx, |buffer, _| {
buffer.wait_for_anchors([range.start, range.end])
- })?
+ })
.await?;
}
@@ -2238,7 +2238,7 @@ impl LspCommand for GetCompletions {
let language_server_adapter = lsp_store
.read_with(&cx, |lsp_store, _| {
lsp_store.language_server_adapter_for_id(server_id)
- })?
+ })
.with_context(|| format!("no language server with id {server_id}"))?;
let lsp_defaults = response_list
@@ -2355,7 +2355,7 @@ impl LspCommand for GetCompletions {
completion_edits.push(edit);
true
});
- })?;
+ });
// If completions were filtered out due to errors that may be transient, mark the result
// incomplete so that it is re-queried.
@@ -2420,7 +2420,7 @@ impl LspCommand for GetCompletions {
) -> Result<Self> {
let version = deserialize_version(&message.version);
buffer
- .update(&mut cx, |buffer, _| buffer.wait_for_version(version))?
+ .update(&mut cx, |buffer, _| buffer.wait_for_version(version))
.await?;
let position = message
.position
@@ -2430,7 +2430,7 @@ impl LspCommand for GetCompletions {
buffer.clip_point_utf16(Unclipped(p.to_point_utf16(buffer)), Bias::Left)
})
})
- .context("invalid position")??;
+ .context("invalid position")?;
Ok(Self {
position,
context: CompletionContext {
@@ -2471,7 +2471,7 @@ impl LspCommand for GetCompletions {
buffer
.update(&mut cx, |buffer, _| {
buffer.wait_for_version(deserialize_version(&message.version))
- })?
+ })
.await?;
let completions = message
@@ -2644,7 +2644,7 @@ impl LspCommand for GetCodeActions {
.with_context(|| {
format!("Missing the language server that just returned a response {server_id}")
})
- })??;
+ })?;
let server_capabilities = language_server.capabilities();
let available_commands = server_capabilities
@@ -2719,7 +2719,7 @@ impl LspCommand for GetCodeActions {
buffer
.update(&mut cx, |buffer, _| {
buffer.wait_for_version(deserialize_version(&message.version))
- })?
+ })
.await?;
Ok(Self {
@@ -2754,7 +2754,7 @@ impl LspCommand for GetCodeActions {
buffer
.update(&mut cx, |buffer, _| {
buffer.wait_for_version(deserialize_version(&message.version))
- })?
+ })
.await?;
message
.actions
@@ -2889,17 +2889,17 @@ impl LspCommand for OnTypeFormatting {
buffer
.update(&mut cx, |buffer, _| {
buffer.wait_for_version(deserialize_version(&message.version))
- })?
+ })
.await?;
let options = buffer.update(&mut cx, |buffer, cx| {
lsp_formatting_options(
language_settings(buffer.language().map(|l| l.name()), buffer.file(), cx).as_ref(),
)
- })?;
+ });
Ok(Self {
- position: buffer.read_with(&cx, |buffer, _| position.to_point_utf16(buffer))?,
+ position: buffer.read_with(&cx, |buffer, _| position.to_point_utf16(buffer)),
trigger: message.trigger.clone(),
options,
push_to_history: false,
@@ -2959,7 +2959,7 @@ impl InlayHints {
} else {
buffer.anchor_after(position)
}
- })?;
+ });
let label = Self::lsp_inlay_label_to_project(lsp_hint.label, server_id)
.await
.context("lsp to project inlay hint conversion")?;
@@ -3421,7 +3421,7 @@ impl LspCommand for InlayHints {
buffer
.update(&mut cx, |buffer, _| {
buffer.wait_for_version(deserialize_version(&message.version))
- })?
+ })
.await?;
Ok(Self { range: start..end })
@@ -3453,7 +3453,7 @@ impl LspCommand for InlayHints {
buffer
.update(&mut cx, |buffer, _| {
buffer.wait_for_version(deserialize_version(&message.version))
- })?
+ })
.await?;
let mut hints = Vec::new();
@@ -3510,7 +3510,7 @@ impl LspCommand for GetCodeLens {
server_id: LanguageServerId,
cx: AsyncApp,
) -> anyhow::Result<Vec<CodeAction>> {
- let snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot())?;
+ let snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
let language_server = cx.update(|cx| {
lsp_store
.read(cx)
@@ -3518,7 +3518,7 @@ impl LspCommand for GetCodeLens {
.with_context(|| {
format!("Missing the language server that just returned a response {server_id}")
})
- })??;
+ })?;
let server_capabilities = language_server.capabilities();
let available_commands = server_capabilities
.execute_command_provider
@@ -3566,7 +3566,7 @@ impl LspCommand for GetCodeLens {
buffer
.update(&mut cx, |buffer, _| {
buffer.wait_for_version(deserialize_version(&message.version))
- })?
+ })
.await?;
Ok(Self)
}
@@ -3597,7 +3597,7 @@ impl LspCommand for GetCodeLens {
buffer
.update(&mut cx, |buffer, _| {
buffer.wait_for_version(deserialize_version(&message.version))
- })?
+ })
.await?;
message
.lens_actions
@@ -3663,7 +3663,7 @@ impl LspCommand for LinkedEditingRange {
if let Some(lsp::LinkedEditingRanges { mut ranges, .. }) = message {
ranges.sort_by_key(|range| range.start);
- buffer.read_with(&cx, |buffer, _| {
+ Ok(buffer.read_with(&cx, |buffer, _| {
ranges
.into_iter()
.map(|range| {
@@ -3673,7 +3673,7 @@ impl LspCommand for LinkedEditingRange {
buffer.anchor_before(start)..buffer.anchor_after(end)
})
.collect()
- })
+ }))
} else {
Ok(vec![])
}
@@ -3698,11 +3698,11 @@ impl LspCommand for LinkedEditingRange {
buffer
.update(&mut cx, |buffer, _| {
buffer.wait_for_version(deserialize_version(&message.version))
- })?
+ })
.await?;
let position = deserialize_anchor(position).context("invalid position")?;
buffer
- .update(&mut cx, |buffer, _| buffer.wait_for_anchors([position]))?
+ .update(&mut cx, |buffer, _| buffer.wait_for_anchors([position]))
.await?;
Ok(Self { position })
}
@@ -3736,7 +3736,7 @@ impl LspCommand for LinkedEditingRange {
buffer
.update(&mut cx, |buffer, _| {
buffer.wait_for_version(deserialize_version(&message.version))
- })?
+ })
.await?;
let items: Vec<Range<Anchor>> = message
.items
@@ -3751,7 +3751,7 @@ impl LspCommand for LinkedEditingRange {
buffer
.update(&mut cx, |buffer, _| {
buffer.wait_for_anchors([range.start, range.end])
- })?
+ })
.await?;
}
Ok(items)
@@ -4105,7 +4105,7 @@ impl LspCommand for GetDocumentDiagnostics {
})
.transpose()?
.with_context(|| format!("missing url on buffer {}", buffer.remote_id()))
- })??;
+ })?;
let mut pulled_diagnostics = HashMap::default();
match message {
@@ -508,7 +508,7 @@ impl LocalLspStore {
language_server.default_initialize_params(pull_diagnostics, cx);
params.initialization_options = initialization_options;
adapter.adapter.prepare_initialize_params(params, cx)
- })??;
+ })?;
Self::setup_lsp_messages(
lsp_store.clone(),
@@ -527,16 +527,14 @@ impl LocalLspStore {
Arc::new(did_change_configuration_params.clone()),
cx,
)
- })?
+ })
.await
.inspect_err(|_| {
if let Some(lsp_store) = lsp_store.upgrade() {
- lsp_store
- .update(cx, |lsp_store, cx| {
- lsp_store.cleanup_lsp_data(server_id);
- cx.emit(LspStoreEvent::LanguageServerRemoved(server_id))
- })
- .ok();
+ lsp_store.update(cx, |lsp_store, cx| {
+ lsp_store.cleanup_lsp_data(server_id);
+ cx.emit(LspStoreEvent::LanguageServerRemoved(server_id))
+ });
}
})?;
@@ -776,8 +774,7 @@ impl LocalLspStore {
cx,
)
.log_err();
- })
- .ok();
+ });
}
}
})
@@ -1150,8 +1147,7 @@ impl LocalLspStore {
disk_based_diagnostics_progress_token.clone(),
cx,
);
- })
- .ok();
+ });
}
}
})
@@ -1168,8 +1164,7 @@ impl LocalLspStore {
LanguageServerLogType::Log(params.typ),
params.message,
));
- })
- .ok();
+ });
}
}
})
@@ -1189,8 +1184,7 @@ impl LocalLspStore {
},
params.message,
));
- })
- .ok();
+ });
}
}
})
@@ -1440,7 +1434,7 @@ impl LocalLspStore {
let transaction_id = buffer.push_empty_transaction(cx.background_executor().now());
buffer.finalize_last_transaction();
anyhow::Ok(transaction_id)
- })??;
+ })?;
let result = Self::format_buffer_locally(
lsp_store.clone(),
@@ -1471,7 +1465,7 @@ impl LocalLspStore {
project_transaction
.0
.insert(cx.entity(), formatting_transaction);
- })?;
+ });
result?;
}
@@ -1521,7 +1515,7 @@ impl LocalLspStore {
buffer.merge_transactions(transaction_id, formatting_transaction_id);
}
Ok(())
- })?
+ })
}
// handle whitespace formatting
@@ -1529,7 +1523,7 @@ impl LocalLspStore {
zlog::trace!(logger => "removing trailing whitespace");
let diff = buffer
.handle
- .read_with(cx, |buffer, cx| buffer.remove_trailing_whitespace(cx))?
+ .read_with(cx, |buffer, cx| buffer.remove_trailing_whitespace(cx))
.await;
extend_formatting_transaction(buffer, formatting_transaction_id, cx, |buffer, cx| {
buffer.apply_diff(diff, cx);
@@ -2054,22 +2048,20 @@ impl LocalLspStore {
transaction_id_project_transaction,
formatting_transaction_id,
);
- })?;
+ });
}
if !project_transaction_command.0.is_empty() {
let mut extra_buffers = String::new();
for buffer in project_transaction_command.0.keys() {
- buffer
- .read_with(cx, |b, cx| {
- if let Some(path) = b.project_path(cx) {
- if !extra_buffers.is_empty() {
- extra_buffers.push_str(", ");
- }
- extra_buffers.push_str(path.path.as_unix_str());
+ buffer.read_with(cx, |b, cx| {
+ if let Some(path) = b.project_path(cx) {
+ if !extra_buffers.is_empty() {
+ extra_buffers.push_str(", ");
}
- })
- .ok();
+ extra_buffers.push_str(path.path.as_unix_str());
+ }
+ });
}
zlog::warn!(
logger =>
@@ -2192,7 +2184,7 @@ impl LocalLspStore {
} else if matches!(range_formatting_provider, Some(p) if *p != OneOf::Left(false)) {
let _timer = zlog::time!(logger => "format-range");
let buffer_start = lsp::Position::new(0, 0);
- let buffer_end = buffer.read_with(cx, |b, _| point_to_lsp(b.max_point_utf16()))?;
+ let buffer_end = buffer.read_with(cx, |b, _| point_to_lsp(b.max_point_utf16()));
language_server
.request::<lsp::request::RangeFormatting>(lsp::DocumentRangeFormattingParams {
text_document: text_document.clone(),
@@ -2236,7 +2228,7 @@ impl LocalLspStore {
worktree_path.pop();
}
Some(worktree_path)
- })?;
+ });
let mut child = util::command::new_smol_command(command);
@@ -2267,7 +2259,7 @@ impl LocalLspStore {
let stdin = child.stdin.as_mut().context("failed to acquire stdin")?;
let text = buffer
.handle
- .read_with(cx, |buffer, _| buffer.as_rope().clone())?;
+ .read_with(cx, |buffer, _| buffer.as_rope().clone());
for chunk in text.chunks() {
stdin.write_all(chunk.as_bytes()).await?;
}
@@ -2286,7 +2278,7 @@ impl LocalLspStore {
Ok(Some(
buffer
.handle
- .update(cx, |buffer, cx| buffer.diff(stdout, cx))?
+ .update(cx, |buffer, cx| buffer.diff(stdout, cx))
.await,
))
}
@@ -2965,7 +2957,7 @@ impl LocalLspStore {
None,
cx,
)
- })?
+ })
.await?;
let transaction = buffer_to_edit.update(cx, |buffer, cx| {
@@ -2984,7 +2976,7 @@ impl LocalLspStore {
} else {
None
}
- })?;
+ });
Ok(transaction)
}
@@ -3077,7 +3069,7 @@ impl LocalLspStore {
language_server: Arc<LanguageServer>,
cx: &mut AsyncApp,
) -> Result<ProjectTransaction> {
- let fs = this.read_with(cx, |this, _| this.as_local().unwrap().fs.clone())?;
+ let fs = this.read_with(cx, |this, _| this.as_local().unwrap().fs.clone());
let mut operations = Vec::new();
if let Some(document_changes) = edit.document_changes {
@@ -3182,7 +3174,7 @@ impl LocalLspStore {
language_server.server_id(),
cx,
)
- })?
+ })
.await?;
let edits = this
@@ -3268,7 +3260,7 @@ impl LocalLspStore {
op.text_document.version,
cx,
)
- })?
+ })
.await?;
let transaction = buffer_to_edit.update(cx, |buffer, cx| {
@@ -3286,7 +3278,7 @@ impl LocalLspStore {
buffer.forget_transaction(transaction_id)
}
})
- })?;
+ });
if let Some(transaction) = transaction {
project_transaction.0.insert(buffer_to_edit, transaction);
}
@@ -3305,7 +3297,7 @@ impl LocalLspStore {
) -> Result<lsp::ApplyWorkspaceEditResponse> {
let this = this.upgrade().context("project project closed")?;
let language_server = this
- .read_with(cx, |this, _| this.language_server_for_id(server_id))?
+ .read_with(cx, |this, _| this.language_server_for_id(server_id))
.context("language server not found")?;
let transaction = Self::deserialize_workspace_edit(
this.clone(),
@@ -3325,7 +3317,7 @@ impl LocalLspStore {
.last_workspace_edits_by_language_server
.insert(server_id, transaction);
}
- })?;
+ });
Ok(lsp::ApplyWorkspaceEditResponse {
applied: true,
failed_change: None,
@@ -4456,8 +4448,7 @@ impl LspStore {
}
}
});
- })
- .ok();
+ });
}
this.update(cx, |this, cx| {
@@ -4503,8 +4494,7 @@ impl LspStore {
for buffer in buffers_with_unknown_injections {
buffer.update(cx, |buffer, cx| buffer.reparse(cx, false));
}
- })
- .ok();
+ });
}
}
})
@@ -5067,7 +5057,7 @@ impl LspStore {
buffer_store
.update(cx, |buffer_store, cx| {
buffer_store.deserialize_project_transaction(response, push_to_history, cx)
- })?
+ })
.await
})
} else if self.mode.is_local() {
@@ -5172,7 +5162,7 @@ impl LspStore {
.map(|buffer| {
buffer.read_with(cx, |buffer, _| buffer.remote_id().into())
})
- .collect::<Result<_>>()?,
+ .collect(),
})
.await
.and_then(|result| result.transaction.context("missing transaction"));
@@ -5188,7 +5178,7 @@ impl LspStore {
push_to_history,
cx,
)
- })?
+ })
.await
})
} else {
@@ -5521,7 +5511,7 @@ impl LspStore {
buffer
.update(cx, |buffer, _| {
buffer.wait_for_edits(Some(position.timestamp))
- })?
+ })
.await?;
this.update(cx, |this, cx| {
let position = position.to_point_utf16(buffer.read(cx));
@@ -5567,7 +5557,7 @@ impl LspStore {
cx.spawn(async move |this, cx| {
if let Some(waiter) =
- buffer.update(cx, |buffer, _| buffer.wait_for_autoindent_applied())?
+ buffer.update(cx, |buffer, _| buffer.wait_for_autoindent_applied())
{
waiter.await?;
}
@@ -5585,7 +5575,7 @@ impl LspStore {
cx,
)
})
- })??
+ })?
.await
})
}
@@ -6715,13 +6705,13 @@ impl LspStore {
buffer_handle
.update(cx, |buffer, _| {
buffer.wait_for_edits(transaction.edit_ids.iter().copied())
- })?
+ })
.await?;
if push_to_history {
buffer_handle.update(cx, |buffer, _| {
buffer.push_transaction(transaction.clone(), Instant::now());
buffer.finalize_last_transaction();
- })?;
+ });
}
Ok(Some(transaction))
} else {
@@ -6820,7 +6810,7 @@ impl LspStore {
None
};
Ok(transaction)
- })?
+ })
} else {
Ok(None)
}
@@ -7200,7 +7190,7 @@ impl LspStore {
}))
.await;
- let buffer_snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot())?;
+ let buffer_snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
let mut has_errors = false;
let inlay_hints = inlay_hints
.into_iter()
@@ -7462,13 +7452,9 @@ impl LspStore {
.map_err(Arc::new);
let fetched_colors = match fetched_colors {
Ok(fetched_colors) => {
- if Some(true)
- == buffer
- .update(cx, |buffer, _| {
- buffer.version() != buffer_version_queried_for
- })
- .ok()
- {
+ if buffer.update(cx, |buffer, _| {
+ buffer.version() != buffer_version_queried_for
+ }) {
return Ok(DocumentColors::default());
}
fetched_colors
@@ -7888,8 +7874,8 @@ impl LspStore {
range: range_from_lsp(symbol_location.range),
})
})
- .collect()
- })?;
+ .collect::<Vec<_>>()
+ });
populate_labels_for_symbols(
core_symbols,
@@ -8735,8 +8721,8 @@ impl LspStore {
})
})?
.await?;
- let worktree_root = worktree.read_with(cx, |worktree, _| worktree.abs_path())?;
- let source_ws = if worktree.read_with(cx, |worktree, _| worktree.is_local())? {
+ let worktree_root = worktree.read_with(cx, |worktree, _| worktree.abs_path());
+ let source_ws = if worktree.read_with(cx, |worktree, _| worktree.is_local()) {
lsp_store
.update(cx, |lsp_store, cx| {
if let Some(local) = lsp_store.as_local_mut() {
@@ -8766,7 +8752,7 @@ impl LspStore {
(worktree, relative_path, source_ws)
};
let project_path = ProjectPath {
- worktree_id: worktree.read_with(cx, |worktree, _| worktree.id())?,
+ worktree_id: worktree.read_with(cx, |worktree, _| worktree.id()),
path: relative_path,
};
let buffer = lsp_store
@@ -8793,7 +8779,7 @@ impl LspStore {
if is_read_only {
buffer.set_capability(Capability::ReadOnly, cx);
}
- })?;
+ });
}
Ok(buffer)
})
@@ -8877,7 +8863,7 @@ impl LspStore {
let buffer_id = GetCompletions::buffer_id_from_proto(&envelope.payload)?;
let buffer_handle = this.update(&mut cx, |this, cx| {
this.buffer_store.read(cx).get_existing(buffer_id)
- })??;
+ })?;
let request = GetCompletions::from_proto(
envelope.payload,
this.clone(),
@@ -8894,7 +8880,7 @@ impl LspStore {
let response = this
.update(&mut cx, |this, cx| {
this.request_lsp(buffer_handle.clone(), server_to_query, request, cx)
- })?
+ })
.await?;
this.update(&mut cx, |this, cx| {
Ok(GetCompletions::response_to_proto(
@@ -8904,7 +8890,7 @@ impl LspStore {
&buffer_handle.read(cx).version(),
cx,
))
- })?
+ })
}
async fn handle_lsp_command<T: LspCommand>(
@@ -8920,7 +8906,7 @@ impl LspStore {
let buffer_id = T::buffer_id_from_proto(&envelope.payload)?;
let buffer_handle = this.update(&mut cx, |this, cx| {
this.buffer_store.read(cx).get_existing(buffer_id)
- })??;
+ })?;
let request = T::from_proto(
envelope.payload,
this.clone(),
@@ -8936,7 +8922,7 @@ impl LspStore {
request,
cx,
)
- })?
+ })
.await?;
this.update(&mut cx, |this, cx| {
Ok(T::response_to_proto(
@@ -8946,7 +8932,7 @@ impl LspStore {
&buffer_handle.read(cx).version(),
cx,
))
- })?
+ })
}
async fn handle_lsp_query(
@@ -9104,11 +9090,11 @@ impl LspStore {
let version = deserialize_version(get_document_diagnostics.buffer_version());
let buffer = lsp_store.update(&mut cx, |this, cx| {
this.buffer_store.read(cx).get_existing(buffer_id)
- })??;
+ })?;
buffer
.update(&mut cx, |buffer, _| {
buffer.wait_for_version(version.clone())
- })?
+ })
.await?;
lsp_store.update(&mut cx, |lsp_store, cx| {
let lsp_data = lsp_store.latest_lsp_data(&buffer, cx);
@@ -9127,12 +9113,10 @@ impl LspStore {
existing_queries.insert(
lsp_request_id,
cx.spawn(async move |lsp_store, cx| {
- let diagnostics_pull = lsp_store
- .update(cx, |lsp_store, cx| {
- lsp_store.pull_diagnostics_for_buffer(buffer, cx)
- })
- .ok();
- if let Some(diagnostics_pull) = diagnostics_pull {
+ let diagnostics_pull = lsp_store.update(cx, |lsp_store, cx| {
+ lsp_store.pull_diagnostics_for_buffer(buffer, cx)
+ });
+ if let Ok(diagnostics_pull) = diagnostics_pull {
match diagnostics_pull.await {
Ok(()) => {}
Err(e) => log::error!("Failed to pull diagnostics: {e:#}"),
@@ -9140,7 +9124,7 @@ impl LspStore {
}
}),
);
- })?;
+ });
}
Request::InlayHints(inlay_hints) => {
let query_start = inlay_hints
@@ -9188,7 +9172,7 @@ impl LspStore {
if let Some((upstream_client, _)) = lsp_store.upstream_client() {
upstream_client.handle_lsp_response(envelope.clone());
}
- })?;
+ });
Ok(())
}
@@ -9204,7 +9188,7 @@ impl LspStore {
let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
let buffer = this.buffer_store.read(cx).get_existing(buffer_id)?;
anyhow::Ok(this.apply_code_action(buffer, action, false, cx))
- })??;
+ })?;
let project_transaction = apply_code_action.await?;
let project_transaction = this.update(&mut cx, |this, cx| {
@@ -9215,7 +9199,7 @@ impl LspStore {
cx,
)
})
- })?;
+ });
Ok(proto::ApplyCodeActionResponse {
transaction: Some(project_transaction),
})
@@ -9268,7 +9252,7 @@ impl LspStore {
});
Ok(())
- })??;
+ })?;
Ok(proto::Ack {})
}
@@ -9298,13 +9282,13 @@ impl LspStore {
new_worktree,
entry.clone(),
))
- })?
+ })
.context("worktree not found")?;
let (old_abs_path, old_worktree_id) = old_worktree.read_with(&cx, |worktree, _| {
(worktree.absolutize(&old_entry.path), worktree.id())
- })?;
+ });
let new_abs_path =
- new_worktree.read_with(&cx, |worktree, _| worktree.absolutize(&new_path))?;
+ new_worktree.read_with(&cx, |worktree, _| worktree.absolutize(&new_path));
let _transaction = Self::will_rename_entry(
this.downgrade(),
@@ -9328,8 +9312,7 @@ impl LspStore {
&new_abs_path,
old_entry.is_dir(),
);
- })
- .ok();
+ });
response
}
@@ -9421,7 +9404,7 @@ impl LspStore {
cx.emit(LspStoreEvent::DiagnosticsUpdated { server_id, paths });
}
Ok(())
- })?
+ })
}
async fn handle_start_language_server(
@@ -9464,7 +9447,7 @@ impl LspStore {
server.worktree_id.map(WorktreeId::from_proto),
));
cx.notify();
- })?;
+ });
Ok(())
}
@@ -9543,7 +9526,7 @@ impl LspStore {
}
Ok(())
- })?
+ })
}
async fn handle_language_server_log(
@@ -9566,7 +9549,8 @@ impl LspStore {
log_type,
message,
));
- })
+ });
+ Ok(())
}
async fn handle_lsp_ext_cancel_flycheck(
@@ -9581,7 +9565,7 @@ impl LspStore {
} else {
None
}
- })?;
+ });
if let Some(task) = task {
task.context("handling lsp ext cancel flycheck")?;
}
@@ -9623,7 +9607,7 @@ impl LspStore {
)?;
}
anyhow::Ok(())
- })??;
+ })?;
Ok(proto::Ack {})
}
@@ -9634,15 +9618,13 @@ impl LspStore {
cx: AsyncApp,
) -> Result<proto::Ack> {
let server_id = LanguageServerId(envelope.payload.language_server_id as usize);
- lsp_store
- .read_with(&cx, |lsp_store, _| {
- if let Some(server) = lsp_store.language_server_for_id(server_id) {
- Some(server.notify::<lsp_store::lsp_ext_command::LspExtClearFlycheck>(()))
- } else {
- None
- }
- })
- .context("handling lsp ext clear flycheck")?;
+ lsp_store.read_with(&cx, |lsp_store, _| {
+ if let Some(server) = lsp_store.language_server_for_id(server_id) {
+ Some(server.notify::<lsp_store::lsp_ext_command::LspExtClearFlycheck>(()))
+ } else {
+ None
+ }
+ });
Ok(proto::Ack {})
}
@@ -10161,7 +10143,7 @@ impl LspStore {
anyhow::Ok(lsp_completion)
}
}))
- })??
+ })?
.await?;
let mut documentation_is_markdown = false;
@@ -10188,7 +10170,7 @@ impl LspStore {
let buffer_snapshot = this.update(&mut cx, |this, cx| {
let buffer = this.buffer_store.read(cx).get_existing(buffer_id)?;
anyhow::Ok(buffer.read(cx).snapshot())
- })??;
+ })?;
if let Some(text_edit) = completion.text_edit.as_ref() {
let edit = parse_completion_text_edit(text_edit, &buffer_snapshot);
@@ -10238,7 +10220,7 @@ impl LspStore {
envelope.payload.trigger.clone(),
cx,
))
- })??;
+ })?;
let transaction = on_type_formatting
.await?
@@ -10257,7 +10239,7 @@ impl LspStore {
server_id: LanguageServerId::from_proto(envelope.payload.server_id),
request_id: envelope.payload.request_id.map(|id| id as usize),
});
- })?;
+ });
Ok(proto::Ack {})
}
@@ -10269,7 +10251,7 @@ impl LspStore {
let server_id = LanguageServerId::from_proto(envelope.payload.server_id);
lsp_store.update(&mut cx, |lsp_store, _| {
lsp_store.pull_workspace_diagnostics(server_id);
- })?;
+ });
Ok(proto::Ack {})
}
@@ -10281,7 +10263,7 @@ impl LspStore {
let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
let buffer = lsp_store.update(&mut cx, |lsp_store, cx| {
lsp_store.buffer_store.read(cx).get_existing(buffer_id)
- })??;
+ })?;
let color = envelope
.payload
@@ -10316,7 +10298,7 @@ impl LspStore {
LanguageServerId(envelope.payload.server_id as usize),
cx,
)
- })?
+ })
.await
.context("resolving color presentation")?;
@@ -10351,7 +10333,7 @@ impl LspStore {
let buffer = lsp_store.update(&mut cx, |lsp_store, cx| {
let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
lsp_store.buffer_store.read(cx).get_existing(buffer_id)
- })??;
+ })?;
let response_hint = lsp_store
.update(&mut cx, |lsp_store, cx| {
lsp_store.resolve_inlay_hint(
@@ -10360,7 +10342,7 @@ impl LspStore {
LanguageServerId(envelope.payload.language_server_id as usize),
cx,
)
- })?
+ })
.await
.context("inlay hints fetch")?;
Ok(proto::ResolveInlayHintResponse {
@@ -10375,7 +10357,7 @@ impl LspStore {
) -> Result<proto::Ack> {
this.update(&mut cx, |_, cx| {
cx.emit(LspStoreEvent::RefreshCodeLens);
- })?;
+ });
Ok(proto::Ack {})
}
@@ -10397,7 +10379,7 @@ impl LspStore {
anyhow::ensure!(&new_signature == signature, "invalid symbol signature");
}
Ok(())
- })??;
+ })?;
let buffer = this
.update(&mut cx, |this, cx| {
this.open_buffer_for_symbol(
@@ -10413,7 +10395,7 @@ impl LspStore {
},
cx,
)
- })?
+ })
.await?;
this.update(&mut cx, |this, cx| {
@@ -10433,7 +10415,7 @@ impl LspStore {
let buffer_id = buffer.read(cx).remote_id().to_proto();
Ok(proto::OpenBufferForSymbolResponse { buffer_id })
}
- })?
+ })
}
fn symbol_signature(&self, abs_path: &Path) -> [u8; 32] {
@@ -10451,7 +10433,7 @@ impl LspStore {
let symbols = this
.update(&mut cx, |this, cx| {
this.symbols(&envelope.payload.query, cx)
- })?
+ })
.await?;
Ok(proto::GetProjectSymbolsResponse {
@@ -10488,7 +10470,7 @@ impl LspStore {
.collect(),
cx,
);
- })?;
+ });
Ok(proto::Ack {})
}
@@ -10533,7 +10515,7 @@ impl LspStore {
)
.detach_and_log_err(cx);
}
- })?;
+ });
Ok(proto::Ack {})
}
@@ -10565,7 +10547,7 @@ impl LspStore {
}
}
anyhow::Ok(())
- })??;
+ })?;
Ok(proto::Ack {})
}
@@ -10597,7 +10579,7 @@ impl LspStore {
envelope.payload.completion.context("invalid completion")?,
)?;
anyhow::Ok((buffer, completion))
- })??;
+ })?;
let apply_additional_edits = this.update(&mut cx, |this, cx| {
this.apply_additional_edits_for_completion(
@@ -10618,7 +10600,7 @@ impl LspStore {
false,
cx,
)
- })?;
+ });
Ok(proto::ApplyCompletionAdditionalEditsResponse {
transaction: apply_additional_edits
@@ -10741,8 +10723,8 @@ impl LspStore {
trigger: trigger as i32,
buffer_ids: buffers
.iter()
- .map(|buffer| buffer.read_with(cx, |buffer, _| buffer.remote_id().into()))
- .collect::<Result<_>>()?,
+ .map(|buffer| buffer.read_with(cx, |buffer, _| buffer.remote_id().to_proto()))
+ .collect(),
})
.await
.and_then(|result| result.transaction.context("missing transaction"));
@@ -10763,7 +10745,7 @@ impl LspStore {
push_to_history,
cx,
)
- })?
+ })
.await
})
} else {
@@ -10786,7 +10768,7 @@ impl LspStore {
}
let trigger = FormatTrigger::from_proto(envelope.payload.trigger);
anyhow::Ok(this.format(buffers, LspFormatTarget::Buffers, false, trigger, cx))
- })??;
+ })?;
let project_transaction = format.await?;
let project_transaction = this.update(&mut cx, |this, cx| {
@@ -10797,7 +10779,7 @@ impl LspStore {
cx,
)
})
- })?;
+ });
Ok(proto::FormatBuffersResponse {
transaction: Some(project_transaction),
})
@@ -10831,7 +10813,7 @@ impl LspStore {
),
};
anyhow::Ok(this.apply_code_action_kind(buffers, kind, false, cx))
- })??;
+ })?;
let project_transaction = format.await?;
let project_transaction = this.update(&mut cx, |this, cx| {
@@ -10842,7 +10824,7 @@ impl LspStore {
cx,
)
})
- })?;
+ });
Ok(proto::ApplyCodeActionKindResponse {
transaction: Some(project_transaction),
})
@@ -11060,18 +11042,16 @@ impl LspStore {
};
cx.spawn(async move |lsp_store, cx| {
stop_task.await;
- lsp_store
- .update(cx, |lsp_store, cx| {
- for buffer in buffers {
- lsp_store.register_buffer_with_language_servers(
- &buffer,
- only_restart_servers.clone(),
- true,
- cx,
- );
- }
- })
- .ok()
+ lsp_store.update(cx, |lsp_store, cx| {
+ for buffer in buffers {
+ lsp_store.register_buffer_with_language_servers(
+ &buffer,
+ only_restart_servers.clone(),
+ true,
+ cx,
+ );
+ }
+ })
})
.detach();
}
@@ -12976,9 +12956,9 @@ impl LspStore {
let version = deserialize_version(proto_request.buffer_version());
let buffer = lsp_store.update(cx, |this, cx| {
this.buffer_store.read(cx).get_existing(buffer_id)
- })??;
+ })?;
buffer
- .update(cx, |buffer, _| buffer.wait_for_version(version))?
+ .update(cx, |buffer, _| buffer.wait_for_version(version))
.await?;
lsp_store.update(cx, |lsp_store, cx| {
let buffer_snapshot = buffer.read(cx).snapshot();
@@ -13010,7 +12990,7 @@ impl LspStore {
}
}
anyhow::Ok(())
- })??;
+ })?;
Ok(())
}
@@ -13034,11 +13014,11 @@ impl LspStore {
let version = deserialize_version(proto_request.buffer_version());
let buffer = lsp_store.update(cx, |this, cx| {
this.buffer_store.read(cx).get_existing(buffer_id)
- })??;
+ })?;
buffer
- .update(cx, |buffer, _| buffer.wait_for_version(version.clone()))?
+ .update(cx, |buffer, _| buffer.wait_for_version(version.clone()))
.await?;
- let buffer_version = buffer.read_with(cx, |buffer, _| buffer.version())?;
+ let buffer_version = buffer.read_with(cx, |buffer, _| buffer.version());
let request =
T::from_proto(proto_request, lsp_store.clone(), buffer.clone(), cx.clone()).await?;
let key = LspKey {
@@ -13114,7 +13094,7 @@ impl LspStore {
.ok();
}),
);
- })?;
+ });
Ok(())
}
@@ -229,7 +229,7 @@ impl LogStore {
if let Some(log_store) = log_store.upgrade() {
log_store.update(cx, |log_store, cx| {
log_store.on_io(server_id, io_kind, &message, cx);
- })?;
+ });
}
}
anyhow::Ok(())
@@ -122,7 +122,7 @@ impl LspCommand for ExpandMacro {
.and_then(deserialize_anchor)
.context("invalid position")?;
Ok(Self {
- position: buffer.read_with(&cx, |buffer, _| position.to_point_utf16(buffer))?,
+ position: buffer.read_with(&cx, |buffer, _| position.to_point_utf16(buffer)),
})
}
@@ -256,7 +256,7 @@ impl LspCommand for OpenDocs {
.and_then(deserialize_anchor)
.context("invalid position")?;
Ok(Self {
- position: buffer.read_with(&cx, |buffer, _| position.to_point_utf16(buffer))?,
+ position: buffer.read_with(&cx, |buffer, _| position.to_point_utf16(buffer)),
})
}
@@ -469,7 +469,7 @@ impl LspCommand for GoToParentModule {
.and_then(deserialize_anchor)
.context("bad request with bad position")?;
Ok(Self {
- position: buffer.read_with(&cx, |buffer, _| position.to_point_utf16(buffer))?,
+ position: buffer.read_with(&cx, |buffer, _| position.to_point_utf16(buffer)),
})
}
@@ -124,7 +124,7 @@ pub fn cancel_flycheck(
Ok(())
}
})
- .context("lsp ext cancel flycheck")??;
+ .context("lsp ext cancel flycheck")?;
};
anyhow::Ok(())
})
@@ -157,8 +157,7 @@ pub fn run_flycheck(
if let Some((client, project_id)) = upstream_client {
let buffer_id = buffer
- .map(|buffer| buffer.read_with(cx, |buffer, _| buffer.remote_id().to_proto()))
- .transpose()?;
+ .map(|buffer| buffer.read_with(cx, |buffer, _| buffer.remote_id().to_proto()));
let request = proto::LspExtRunFlycheck {
project_id,
buffer_id,
@@ -182,7 +181,7 @@ pub fn run_flycheck(
Ok(())
}
})
- .context("lsp ext run flycheck")??;
+ .context("lsp ext run flycheck")?;
};
anyhow::Ok(())
})
@@ -231,7 +230,7 @@ pub fn clear_flycheck(
Ok(())
}
})
- .context("lsp ext clear flycheck")??;
+ .context("lsp ext clear flycheck")?;
};
anyhow::Ok(())
})
@@ -242,34 +241,32 @@ fn find_rust_analyzer_server(
buffer: Option<&Entity<Buffer>>,
cx: &mut AsyncApp,
) -> Option<LanguageServerId> {
- project
- .read_with(cx, |project, cx| {
- buffer
- .and_then(|buffer| {
- project.language_server_id_for_name(buffer.read(cx), &RUST_ANALYZER_NAME, cx)
- })
- // If no rust-analyzer found for the current buffer (e.g. `settings.json`), fall back to the project lookup
- // and use project's rust-analyzer if it's the only one.
- .or_else(|| {
- let rust_analyzer_servers = project
- .lsp_store()
- .read(cx)
- .language_server_statuses
- .iter()
- .filter_map(|(server_id, server_status)| {
- if server_status.name == RUST_ANALYZER_NAME {
- Some(*server_id)
- } else {
- None
- }
- })
- .collect::<Vec<_>>();
- if rust_analyzer_servers.len() == 1 {
- rust_analyzer_servers.first().copied()
- } else {
- None
- }
- })
- })
- .ok()?
+ project.read_with(cx, |project, cx| {
+ buffer
+ .and_then(|buffer| {
+ project.language_server_id_for_name(buffer.read(cx), &RUST_ANALYZER_NAME, cx)
+ })
+ // If no rust-analyzer found for the current buffer (e.g. `settings.json`), fall back to the project lookup
+ // and use project's rust-analyzer if it's the only one.
+ .or_else(|| {
+ let rust_analyzer_servers = project
+ .lsp_store()
+ .read(cx)
+ .language_server_statuses
+ .iter()
+ .filter_map(|(server_id, server_status)| {
+ if server_status.name == RUST_ANALYZER_NAME {
+ Some(*server_id)
+ } else {
+ None
+ }
+ })
+ .collect::<Vec<_>>();
+ if rust_analyzer_servers.len() == 1 {
+ rust_analyzer_servers.first().copied()
+ } else {
+ None
+ }
+ })
+ })
}
@@ -134,7 +134,7 @@ impl PrettierStore {
{
Ok(ControlFlow::Break(())) => None,
Ok(ControlFlow::Continue(None)) => {
- let default_instance = lsp_store
+ let default_task = lsp_store
.update(cx, |lsp_store, cx| {
lsp_store
.prettiers_per_worktree
@@ -147,8 +147,9 @@ impl PrettierStore {
cx,
)
})
- .ok()?;
- Some((None, default_instance?.log_err().await?))
+ .ok()??;
+ let default_instance = default_task.await.ok()?;
+ Some((None, default_instance))
}
Ok(ControlFlow::Continue(Some(prettier_dir))) => {
lsp_store
@@ -162,21 +163,22 @@ impl PrettierStore {
.ok()?;
if let Some(prettier_task) = lsp_store
.update(cx, |lsp_store, cx| {
- lsp_store.prettier_instances.get_mut(&prettier_dir).map(
- |existing_instance| {
+ lsp_store
+ .prettier_instances
+ .get_mut(&prettier_dir)
+ .and_then(|existing_instance| {
existing_instance.prettier_task(
&node,
Some(&prettier_dir),
Some(worktree_id),
cx,
)
- },
- )
+ })
})
.ok()?
{
log::debug!("Found already started prettier in {prettier_dir:?}");
- return Some((Some(prettier_dir), prettier_task?.await.log_err()?));
+ return Some((Some(prettier_dir), prettier_task.await.log_err()?));
}
log::info!("Found prettier in {prettier_dir:?}, starting.");
@@ -735,12 +737,9 @@ pub(super) async fn format_with_prettier(
match prettier_task.await {
Ok(prettier) => {
- let buffer_path = buffer
- .update(cx, |buffer, cx| {
- File::from_dyn(buffer.file()).map(|file| file.abs_path(cx))
- })
- .ok()
- .flatten();
+ let buffer_path = buffer.update(cx, |buffer, cx| {
+ File::from_dyn(buffer.file()).map(|file| file.abs_path(cx))
+ });
let format_result = prettier
.format(buffer, buffer_path, ignore_dir, cx)
@@ -1587,16 +1587,16 @@ impl Project {
response.payload.project_id,
path_style,
)
- })?;
+ });
let buffer_store = cx.new(|cx| {
BufferStore::remote(worktree_store.clone(), client.clone().into(), remote_id, cx)
- })?;
+ });
let image_store = cx.new(|cx| {
ImageStore::remote(worktree_store.clone(), client.clone().into(), remote_id, cx)
- })?;
+ });
let environment =
- cx.new(|cx| ProjectEnvironment::new(None, worktree_store.downgrade(), None, true, cx))?;
+ cx.new(|cx| ProjectEnvironment::new(None, worktree_store.downgrade(), None, true, cx));
let breakpoint_store = cx.new(|_| {
BreakpointStore::remote(
remote_id,
@@ -1604,7 +1604,7 @@ impl Project {
buffer_store.clone(),
worktree_store.clone(),
)
- })?;
+ });
let dap_store = cx.new(|cx| {
DapStore::new_collab(
remote_id,
@@ -1614,7 +1614,7 @@ impl Project {
fs.clone(),
cx,
)
- })?;
+ });
let lsp_store = cx.new(|cx| {
LspStore::new_remote(
@@ -1625,7 +1625,7 @@ impl Project {
remote_id,
cx,
)
- })?;
+ });
let task_store = cx.new(|cx| {
if run_tasks {
@@ -1640,7 +1640,7 @@ impl Project {
} else {
TaskStore::Noop
}
- })?;
+ });
let settings_observer = cx.new(|cx| {
SettingsObserver::new_remote(
@@ -1650,7 +1650,7 @@ impl Project {
None,
cx,
)
- })?;
+ });
let git_store = cx.new(|cx| {
GitStore::remote(
@@ -1661,9 +1661,9 @@ impl Project {
remote_id,
cx,
)
- })?;
+ });
- let agent_server_store = cx.new(|cx| AgentServerStore::collab(cx))?;
+ let agent_server_store = cx.new(|cx| AgentServerStore::collab(cx));
let replica_id = ReplicaId::new(response.payload.replica_id as u16);
let project = cx.new(|cx| {
@@ -1750,19 +1750,17 @@ impl Project {
project.add_worktree(&worktree, cx);
}
project
- })?;
+ });
let weak_project = project.downgrade();
- lsp_store
- .update(&mut cx, |lsp_store, cx| {
- lsp_store.set_language_server_statuses_from_proto(
- weak_project,
- response.payload.language_servers,
- response.payload.language_server_capabilities,
- cx,
- );
- })
- .ok();
+ lsp_store.update(&mut cx, |lsp_store, cx| {
+ lsp_store.set_language_server_statuses_from_proto(
+ weak_project,
+ response.payload.language_servers,
+ response.payload.language_server_capabilities,
+ cx,
+ );
+ });
let subscriptions = subscriptions
.into_iter()
@@ -1799,14 +1797,14 @@ impl Project {
.map(|peer| peer.user_id)
.collect();
user_store
- .update(&mut cx, |user_store, cx| user_store.get_users(user_ids, cx))?
+ .update(&mut cx, |user_store, cx| user_store.get_users(user_ids, cx))
.await?;
project.update(&mut cx, |this, cx| {
this.set_collaborators_from_proto(response.payload.collaborators, cx)?;
this.client_subscriptions.extend(subscriptions);
anyhow::Ok(())
- })??;
+ })?;
Ok(project)
}
@@ -1860,34 +1858,28 @@ impl Project {
let languages = LanguageRegistry::test(cx.background_executor().clone());
let clock = Arc::new(FakeSystemClock::new());
let http_client = http_client::FakeHttpClient::with_404_response();
- let client = cx
- .update(|cx| client::Client::new(clock, http_client.clone(), cx))
- .unwrap();
- let user_store = cx.new(|cx| UserStore::new(client.clone(), cx)).unwrap();
- let project = cx
- .update(|cx| {
- Project::local(
- client,
- node_runtime::NodeRuntime::unavailable(),
- user_store,
- Arc::new(languages),
- fs,
- None,
- false,
- cx,
- )
- })
- .unwrap();
+ let client = cx.update(|cx| client::Client::new(clock, http_client.clone(), cx));
+ let user_store = cx.new(|cx| UserStore::new(client.clone(), cx));
+ let project = cx.update(|cx| {
+ Project::local(
+ client,
+ node_runtime::NodeRuntime::unavailable(),
+ user_store,
+ Arc::new(languages),
+ fs,
+ None,
+ false,
+ cx,
+ )
+ });
for path in root_paths {
- let (tree, _) = project
+ let (tree, _): (Entity<Worktree>, _) = project
.update(cx, |project, cx| {
project.find_or_create_worktree(path, true, cx)
})
- .unwrap()
.await
.unwrap();
tree.read_with(cx, |tree, _| tree.as_local().unwrap().scan_complete())
- .unwrap()
.await;
}
project
@@ -2314,7 +2306,7 @@ impl Project {
let lsp_store = self.lsp_store().downgrade();
cx.spawn(async move |project, cx| {
let (old_abs_path, new_abs_path) = {
- let root_path = worktree.read_with(cx, |this, _| this.abs_path())?;
+ let root_path = worktree.read_with(cx, |this, _| this.abs_path());
let new_abs_path = if is_root_entry {
root_path
.parent()
@@ -2338,7 +2330,7 @@ impl Project {
let entry = worktree_store
.update(cx, |worktree_store, cx| {
worktree_store.rename_entry(entry_id, new_path.clone(), cx)
- })?
+ })
.await?;
project
@@ -2755,7 +2747,7 @@ impl Project {
let buffer = task.await?;
let project_entry_id = buffer.read_with(cx, |buffer, _cx| {
File::from_dyn(buffer.file()).and_then(|file| file.project_entry_id())
- })?;
+ });
Ok((project_entry_id, buffer))
})
@@ -2955,7 +2947,7 @@ impl Project {
// Check if metadata already exists (e.g., for remote images)
let needs_metadata =
- cx.read_entity(&image_item, |item, _| item.image_metadata.is_none())?;
+ cx.read_entity(&image_item, |item, _| item.image_metadata.is_none());
if needs_metadata {
let project = weak_project.upgrade().context("Project dropped")?;
@@ -2964,7 +2956,7 @@ impl Project {
image_item.update(cx, |image_item, cx| {
image_item.image_metadata = Some(metadata);
cx.emit(ImageItemEvent::MetadataUpdated);
- })?;
+ });
}
Ok(image_item)
@@ -4372,20 +4364,18 @@ impl Project {
path: &RelPath,
cx: &mut AsyncApp,
) -> Option<ResolvedPath> {
- worktree
- .read_with(cx, |worktree, _| {
- worktree.entry_for_path(path).map(|entry| {
- let project_path = ProjectPath {
- worktree_id: worktree.id(),
- path: entry.path.clone(),
- };
- ResolvedPath::ProjectPath {
- project_path,
- is_dir: entry.is_dir(),
- }
- })
+ worktree.read_with(cx, |worktree, _| {
+ worktree.entry_for_path(path).map(|entry| {
+ let project_path = ProjectPath {
+ worktree_id: worktree.id(),
+ path: entry.path.clone(),
+ };
+ ResolvedPath::ProjectPath {
+ project_path,
+ is_dir: entry.is_dir(),
+ }
})
- .ok()?
+ })
}
pub fn list_directory(
@@ -4653,7 +4643,7 @@ impl Project {
this.disconnected_from_host(cx);
}
Ok(())
- })?
+ })
}
async fn handle_add_collaborator(
@@ -4676,7 +4666,7 @@ impl Project {
cx.emit(Event::CollaboratorJoined(collaborator.peer_id));
this.collaborators
.insert(collaborator.peer_id, collaborator);
- })?;
+ });
Ok(())
}
@@ -4720,7 +4710,7 @@ impl Project {
new_peer_id,
});
Ok(())
- })?
+ })
}
async fn handle_remove_collaborator(
@@ -4747,7 +4737,7 @@ impl Project {
cx.emit(Event::CollaboratorLeft(peer_id));
Ok(())
- })?
+ })
}
async fn handle_update_project(
@@ -4769,7 +4759,7 @@ impl Project {
this.set_worktrees_from_proto(envelope.payload.worktrees, cx)?;
}
Ok(())
- })?
+ })
}
async fn handle_toast(
@@ -4783,7 +4773,7 @@ impl Project {
message: envelope.payload.message,
});
Ok(())
- })?
+ })
}
async fn handle_language_server_prompt_request(
@@ -4811,7 +4801,7 @@ impl Project {
}));
anyhow::Ok(())
- })??;
+ })?;
// We drop `this` to avoid holding a reference in this future for too
// long.
@@ -4843,7 +4833,7 @@ impl Project {
notification_id: envelope.payload.notification_id.into(),
});
Ok(())
- })?
+ })
}
// Collab sends UpdateWorktree protos as messages
@@ -4861,7 +4851,7 @@ impl Project {
});
}
Ok(())
- })?
+ })
}
async fn handle_update_buffer_from_remote_server(
@@ -4877,7 +4867,7 @@ impl Project {
.detach_and_log_err(cx);
}
this.buffer_store.clone()
- })?;
+ });
BufferStore::handle_update_buffer(buffer_store, envelope, cx).await
}
@@ -4886,12 +4876,12 @@ impl Project {
envelope: TypedEnvelope<proto::TrustWorktrees>,
mut cx: AsyncApp,
) -> Result<proto::Ack> {
- if this.read_with(&cx, |project, _| project.is_via_collab())? {
+ if this.read_with(&cx, |project, _| project.is_via_collab()) {
return Ok(proto::Ack {});
}
let trusted_worktrees = cx
- .update(|cx| TrustedWorktrees::try_get_global(cx))?
+ .update(|cx| TrustedWorktrees::try_get_global(cx))
.context("missing trusted worktrees")?;
trusted_worktrees.update(&mut cx, |trusted_worktrees, cx| {
trusted_worktrees.trust(
@@ -4904,7 +4894,7 @@ impl Project {
.collect(),
cx,
);
- })?;
+ });
Ok(proto::Ack {})
}
@@ -4913,12 +4903,12 @@ impl Project {
envelope: TypedEnvelope<proto::RestrictWorktrees>,
mut cx: AsyncApp,
) -> Result<proto::Ack> {
- if this.read_with(&cx, |project, _| project.is_via_collab())? {
+ if this.read_with(&cx, |project, _| project.is_via_collab()) {
return Ok(proto::Ack {});
}
let trusted_worktrees = cx
- .update(|cx| TrustedWorktrees::try_get_global(cx))?
+ .update(|cx| TrustedWorktrees::try_get_global(cx))
.context("missing trusted worktrees")?;
trusted_worktrees.update(&mut cx, |trusted_worktrees, cx| {
let worktree_store = this.read(cx).worktree_store().downgrade();
@@ -4930,7 +4920,7 @@ impl Project {
.map(PathTrust::Worktree)
.collect::<HashSet<_>>();
trusted_worktrees.restrict(worktree_store, restricted_paths, cx);
- })?;
+ });
Ok(proto::Ack {})
}
@@ -4947,7 +4937,7 @@ impl Project {
.detach_and_log_err(cx);
}
this.buffer_store.clone()
- })?;
+ });
BufferStore::handle_update_buffer(buffer_store, envelope, cx).await
}
@@ -4996,7 +4986,7 @@ impl Project {
cx,
)
})
- })?
+ })
}
async fn handle_toggle_lsp_logs(
@@ -5018,7 +5008,7 @@ impl Project {
enabled: envelope.payload.enabled,
toggled_log_kind,
})
- })?;
+ });
Ok(())
}
@@ -5032,7 +5022,7 @@ impl Project {
this.buffer_store.update(cx, |this, cx| {
this.handle_synchronize_buffers(envelope, cx, client)
})
- })??;
+ })?;
Ok(response)
}
@@ -5044,12 +5034,12 @@ impl Project {
) -> Result<proto::FindSearchCandidatesResponse> {
let peer_id = envelope.original_sender_id()?;
let message = envelope.payload;
- let path_style = this.read_with(&cx, |this, cx| this.path_style(cx))?;
+ let path_style = this.read_with(&cx, |this, cx| this.path_style(cx));
let query =
SearchQuery::from_proto(message.query.context("missing query field")?, path_style)?;
let results = this.update(&mut cx, |this, cx| {
this.search_impl(query, cx).matching_buffers(cx)
- })?;
+ });
let mut response = proto::FindSearchCandidatesResponse {
buffer_ids: Vec::new(),
@@ -5059,7 +5049,7 @@ impl Project {
this.update(&mut cx, |this, cx| {
let buffer_id = this.create_buffer_for_peer(&buffer, peer_id, cx);
response.buffer_ids.push(buffer_id.to_proto());
- })?;
+ });
}
Ok(response)
@@ -5073,7 +5063,7 @@ impl Project {
let peer_id = envelope.original_sender_id()?;
let buffer_id = BufferId::new(envelope.payload.id)?;
let buffer = this
- .update(&mut cx, |this, cx| this.open_buffer_by_id(buffer_id, cx))?
+ .update(&mut cx, |this, cx| this.open_buffer_by_id(buffer_id, cx))
.await?;
Project::respond_to_open_buffer_request(this, buffer, peer_id, &mut cx)
}
@@ -5089,7 +5079,7 @@ impl Project {
let open_buffer = this
.update(&mut cx, |this, cx| {
this.open_buffer(ProjectPath { worktree_id, path }, cx)
- })?
+ })
.await?;
Project::respond_to_open_buffer_request(this, open_buffer, peer_id, &mut cx)
}
@@ -5100,7 +5090,7 @@ impl Project {
mut cx: AsyncApp,
) -> Result<proto::OpenBufferResponse> {
let buffer = this
- .update(&mut cx, |this, cx| this.create_buffer(true, cx))?
+ .update(&mut cx, |this, cx| this.create_buffer(true, cx))
.await?;
let peer_id = envelope.original_sender_id()?;
@@ -5123,7 +5113,7 @@ impl Project {
Ok(proto::OpenBufferResponse {
buffer_id: this.create_buffer_for_peer(&buffer, peer_id, cx).into(),
})
- })?
+ })
}
fn create_buffer_for_peer(
@@ -5149,9 +5139,7 @@ impl Project {
this.image_store.update(cx, |image_store, cx| {
image_store.handle_create_image_for_peer(envelope, cx)
})
- })?
- .log_err();
- Ok(())
+ })
}
fn synchronize_remote_buffers(&mut self, cx: &mut Context<Self>) -> Task<Result<()>> {
@@ -5480,7 +5468,7 @@ impl Project {
};
cx.spawn(async move |cx| {
let file = worktree
- .update(cx, |worktree, cx| worktree.load_file(&rel_path, cx))?
+ .update(cx, |worktree, cx| worktree.load_file(&rel_path, cx))
.await
.context("Failed to load settings file")?;
@@ -5488,7 +5476,7 @@ impl Project {
let new_text = cx.read_global::<SettingsStore, _>(|store, cx| {
store.new_text_for_update(file.text, move |settings| update(settings, cx))
- })?;
+ });
worktree
.update(cx, |worktree, cx| {
let line_ending = text::LineEnding::detect(&new_text);
@@ -5500,7 +5488,7 @@ impl Project {
has_bom,
cx,
)
- })?
+ })
.await
.context("Failed to write settings file")?;
@@ -189,10 +189,7 @@ impl Search {
let query = Arc::new(query);
let (candidate_searcher, tasks) = match self.kind {
SearchKind::OpenBuffersOnly => {
- let Ok(open_buffers) = cx.update(|cx| self.all_loaded_buffers(&query, cx))
- else {
- return;
- };
+ let open_buffers = cx.update(|cx| self.all_loaded_buffers(&query, cx));
let fill_requests = cx
.background_spawn(async move {
for buffer in open_buffers {
@@ -259,16 +256,14 @@ impl Search {
});
let weak_buffer_store = self.buffer_store.downgrade();
let buffer_store = self.buffer_store;
- let Ok(guard) = cx.update(|cx| {
+ let guard = cx.update(|cx| {
Project::retain_remotely_created_models_impl(
&models,
&buffer_store,
&self.worktree_store,
cx,
)
- }) else {
- return;
- };
+ });
let issue_remote_buffers_request = cx
.spawn(async move |cx| {
@@ -387,7 +382,7 @@ impl Search {
let (mut snapshot, worktree_settings) = worktree
.read_with(cx, |this, _| {
Some((this.snapshot(), this.as_local()?.settings()))
- })?
+ })
.context("The worktree is not local")?;
if query.include_ignored() {
// Pre-fetch all of the ignored directories as they're going to be searched.
@@ -409,11 +404,11 @@ impl Search {
.map(|path| local.add_path_prefix_to_scan(path).into_future())
.collect::<Vec<_>>();
Some(barrier)
- })?;
+ });
if let Some(barriers) = barrier {
futures::future::join_all(barriers).await;
}
- snapshot = worktree.read_with(cx, |this, _| this.snapshot())?;
+ snapshot = worktree.read_with(cx, |this, _| this.snapshot());
}
let tx = tx.clone();
let results = results.clone();
@@ -483,7 +478,7 @@ impl Search {
.into_iter()
.map(|path| this.open_buffer(path, cx))
.collect::<FuturesOrdered<_>>()
- })?;
+ });
while let Some(buffer) = buffers.next().await {
if let Some(buffer) = buffer.log_err() {
@@ -508,7 +503,7 @@ impl Search {
) {
_ = maybe!(async move {
while let Ok(buffer) = rx.recv().await {
- let snapshot = buffer.read_with(&mut cx, |this, _| this.snapshot())?;
+ let snapshot = buffer.read_with(&mut cx, |this, _| this.snapshot());
let (tx, rx) = oneshot::channel();
find_all_matches_tx.send((buffer, snapshot, tx)).await?;
results.send(rx).await?;
@@ -863,7 +863,7 @@ impl SettingsObserver {
)],
cx,
);
- })?;
+ });
Ok(())
}
@@ -878,7 +878,7 @@ impl SettingsObserver {
.result()
.context("setting new user settings")?;
anyhow::Ok(())
- })??;
+ })?;
Ok(())
}
@@ -1191,7 +1191,7 @@ impl SettingsObserver {
return;
};
if let Some(user_tasks_content) = user_tasks_content {
- let Ok(()) = task_store.update(cx, |task_store, cx| {
+ task_store.update(cx, |task_store, cx| {
task_store
.update_user_tasks(
TaskSettingsLocation::Global(&file_path),
@@ -1199,20 +1199,16 @@ impl SettingsObserver {
cx,
)
.log_err();
- }) else {
- return;
- };
+ });
}
while let Some(user_tasks_content) = user_tasks_file_rx.next().await {
- let Ok(result) = task_store.update(cx, |task_store, cx| {
+ let result = task_store.update(cx, |task_store, cx| {
task_store.update_user_tasks(
TaskSettingsLocation::Global(&file_path),
Some(&user_tasks_content),
cx,
)
- }) else {
- break;
- };
+ });
weak_entry
.update(cx, |_, cx| match result {
@@ -1246,7 +1242,7 @@ impl SettingsObserver {
return;
};
if let Some(user_tasks_content) = user_tasks_content {
- let Ok(()) = task_store.update(cx, |task_store, cx| {
+ task_store.update(cx, |task_store, cx| {
task_store
.update_user_debug_scenarios(
TaskSettingsLocation::Global(&file_path),
@@ -1254,20 +1250,16 @@ impl SettingsObserver {
cx,
)
.log_err();
- }) else {
- return;
- };
+ });
}
while let Some(user_tasks_content) = user_tasks_file_rx.next().await {
- let Ok(result) = task_store.update(cx, |task_store, cx| {
+ let result = task_store.update(cx, |task_store, cx| {
task_store.update_user_debug_scenarios(
TaskSettingsLocation::Global(&file_path),
Some(&user_tasks_content),
cx,
)
- }) else {
- break;
- };
+ });
weak_entry
.update(cx, |_, cx| match result {
@@ -892,17 +892,13 @@ mod test_inventory {
.unwrap_or_else(|| panic!("Failed to find task with name {task_name}"));
let id_base = task_source_kind.to_id_base();
- inventory
- .update(&mut cx, |inventory, _| {
- inventory.task_scheduled(
- task_source_kind.clone(),
- task.resolve_task(&id_base, &TaskContext::default())
- .unwrap_or_else(|| {
- panic!("Failed to resolve task with name {task_name}")
- }),
- )
- })
- .unwrap();
+ inventory.update(&mut cx, |inventory, _| {
+ inventory.task_scheduled(
+ task_source_kind.clone(),
+ task.resolve_task(&id_base, &TaskContext::default())
+ .unwrap_or_else(|| panic!("Failed to resolve task with name {task_name}")),
+ )
+ });
})
}
@@ -925,17 +921,13 @@ mod test_inventory {
.find(|(_, task)| task.label == task_name)
.unwrap_or_else(|| panic!("Failed to find task with name {task_name}"));
let id_base = task_source_kind.to_id_base();
- inventory
- .update(&mut cx, |inventory, _| {
- inventory.task_scheduled(
- task_source_kind.clone(),
- task.resolve_task(&id_base, &TaskContext::default())
- .unwrap_or_else(|| {
- panic!("Failed to resolve task with name {task_name}")
- }),
- );
- })
- .unwrap();
+ inventory.update(&mut cx, |inventory, _| {
+ inventory.task_scheduled(
+ task_source_kind.clone(),
+ task.resolve_task(&id_base, &TaskContext::default())
+ .unwrap_or_else(|| panic!("Failed to resolve task with name {task_name}")),
+ );
+ });
})
}
@@ -84,7 +84,7 @@ impl TaskStore {
anyhow::bail!("empty task store cannot handle task context requests")
}
})
- })??;
+ })?;
let buffer_store = buffer_store
.upgrade()
.context("no buffer store when handling task context request")?;
@@ -115,7 +115,7 @@ impl TaskStore {
.with_context(|| format!("no local buffer with id {buffer_id}")),
)
}
- })?
+ })
.await?;
let location = Location {
@@ -143,7 +143,7 @@ impl TaskStore {
variables
};
store.task_context_for_location(captured_variables, location, cx)
- })?;
+ });
let task_context = context_task.await.unwrap_or_default();
Ok(proto::TaskContext {
project_env: task_context.project_env.into_iter().collect(),
@@ -319,7 +319,6 @@ fn local_task_context_for_location(
.update(cx, |environment, cx| {
environment.buffer_environment(&location.buffer, &worktree_store, cx)
})
- .ok()?
.await;
let mut task_variables = cx
@@ -335,7 +334,6 @@ fn local_task_context_for_location(
cx,
)
})
- .ok()?
.await
.log_err()?;
// Remove all custom entries starting with _, as they're not intended for use by the end user.
@@ -376,15 +374,12 @@ fn remote_task_context_for_location(
cx,
)
})
- .ok()?
.await
.log_err()
.unwrap_or_default();
remote_context.extend(captured_variables);
- let buffer_id = cx
- .update(|cx| location.buffer.read(cx).remote_id().to_proto())
- .ok()?;
+ let buffer_id = cx.update(|cx| location.buffer.read(cx).remote_id().to_proto());
let context_task = upstream_client.request(proto::TaskContextForLocation {
project_id,
location: Some(proto::Location {
@@ -472,7 +467,7 @@ fn combine_task_variables(
toolchain_store.clone(),
cx,
)
- })?
+ })
.await
.context("building basic default context")?;
captured_variables.extend(baseline);
@@ -491,7 +486,7 @@ fn combine_task_variables(
toolchain_store,
cx,
)
- })?
+ })
.await
.context("building provider context")?,
);
@@ -59,12 +59,7 @@ impl TelemetryWorktreeSnapshot {
(path, snapshot)
});
- let Ok((worktree_path, _snapshot)) = worktree_info else {
- return TelemetryWorktreeSnapshot {
- worktree_path: String::new(),
- git_state: None,
- };
- };
+ let (worktree_path, _snapshot) = worktree_info;
let git_state = git_store
.update(cx, |git_store, cx| {
@@ -78,8 +73,6 @@ impl TelemetryWorktreeSnapshot {
})
.cloned()
})
- .ok()
- .flatten()
.map(|repo| {
repo.update(cx, |repo, _| {
let current_branch =
@@ -111,10 +104,7 @@ impl TelemetryWorktreeSnapshot {
});
let git_state = match git_state {
- Some(git_state) => match git_state.ok() {
- Some(git_state) => git_state.await.ok(),
- None => None,
- },
+ Some(receiver) => receiver.await.ok(),
None => None,
};
@@ -129,9 +129,9 @@ impl Project {
.await
.ok();
let lister = language?.toolchain_lister()?;
- return cx
- .update(|cx| lister.activation_script(&toolchain, shell_kind, cx))
- .ok();
+ return Some(
+ cx.update(|cx| lister.activation_script(&toolchain, shell_kind, cx)),
+ );
}
None
})
@@ -345,9 +345,9 @@ impl Project {
.await
.ok();
let lister = language?.toolchain_lister()?;
- return cx
- .update(|cx| lister.activation_script(&toolchain, shell_kind, cx))
- .ok();
+ return Some(
+ cx.update(|cx| lister.activation_script(&toolchain, shell_kind, cx)),
+ );
}
None
})
@@ -270,7 +270,7 @@ impl ToolchainStore {
RelPath::empty().into()
};
Ok(this.activate_toolchain(ProjectPath { worktree_id, path }, toolchain, cx))
- })??
+ })?
.await;
Ok(proto::Ack {})
}
@@ -292,7 +292,7 @@ impl ToolchainStore {
language_name,
cx,
)
- })?
+ })
.await;
Ok(proto::ActiveToolchainResponse {
@@ -322,7 +322,7 @@ impl ToolchainStore {
language_name,
cx,
))
- })??
+ })?
.await;
let has_values = toolchains.is_some();
let groups = if let Some(Toolchains { toolchains, .. }) = &toolchains {
@@ -380,7 +380,7 @@ impl ToolchainStore {
let language_name = LanguageName::from_proto(envelope.payload.language_name);
let path = PathBuf::from(envelope.payload.abs_path);
this.resolve_toolchain(path, language_name, cx)
- })?
+ })
.await;
let response = match toolchain {
Ok(toolchain) => {
@@ -539,9 +539,7 @@ impl LocalToolchainStore {
path: Arc::from(RelPath::empty()),
worktree_id,
});
- let abs_path = worktree
- .update(cx, |this, _| this.absolutize(&relative_path.path))
- .ok()?;
+ let abs_path = worktree.update(cx, |this, _| this.absolutize(&relative_path.path));
let project_env = environment
.update(cx, |environment, cx| {
@@ -551,7 +549,6 @@ impl LocalToolchainStore {
cx,
)
})
- .ok()?
.await;
cx.background_spawn(async move {
@@ -613,7 +610,7 @@ impl LocalToolchainStore {
path.as_path().into(),
cx,
)
- })?
+ })
.await;
cx.background_spawn(async move {
toolchain_lister
@@ -268,7 +268,7 @@ impl WorktreeStore {
None,
cx,
)
- })?
+ })
.await
})
}
@@ -293,7 +293,7 @@ impl WorktreeStore {
response.worktree_scan_id as usize,
cx,
)
- })?
+ })
.await
.map(Some),
None => Ok(None),
@@ -419,7 +419,7 @@ impl WorktreeStore {
cx,
)
}
- })?
+ })
.await?
.map(CreatedEntry::Included)
.unwrap_or_else(|| CreatedEntry::Excluded {
@@ -448,13 +448,13 @@ impl WorktreeStore {
response.worktree_scan_id as usize,
cx,
)
- })?
+ })
.await
.map(CreatedEntry::Included),
None => {
let abs_path = new_worktree.read_with(cx, |worktree, _| {
worktree.absolutize(&new_project_path.path)
- })?;
+ });
Ok(CreatedEntry::Excluded { abs_path })
}
}
@@ -507,15 +507,13 @@ impl WorktreeStore {
.ok()
.flatten()
{
- trusted_worktrees
- .update(cx, |trusted_worktrees, cx| {
- trusted_worktrees.can_trust(
- &worktree_store,
- worktree.read(cx).id(),
- cx,
- );
- })
- .ok();
+ trusted_worktrees.update(cx, |trusted_worktrees, cx| {
+ trusted_worktrees.can_trust(
+ &worktree_store,
+ worktree.read(cx).id(),
+ cx,
+ );
+ });
}
}
Ok(worktree)
@@ -559,7 +557,7 @@ impl WorktreeStore {
if let Some(existing_worktree) = this.read_with(cx, |this, cx| {
this.worktree_for_id(WorktreeId::from_proto(response.worktree_id), cx)
- })? {
+ }) {
return Ok(existing_worktree);
}
@@ -583,11 +581,11 @@ impl WorktreeStore {
path_style,
cx,
)
- })?;
+ });
this.update(cx, |this, cx| {
this.add(&worktree, cx);
- })?;
+ });
Ok(worktree)
})
}
@@ -620,8 +618,7 @@ impl WorktreeStore {
if visible {
cx.update(|cx| {
cx.add_recent_document(abs_path.as_path());
- })
- .log_err();
+ });
}
Ok(worktree)
@@ -948,7 +945,7 @@ impl WorktreeStore {
let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
this.worktree_for_id(worktree_id, cx)
.context("worktree not found")
- })??;
+ })?;
Worktree::handle_create_entry(worktree, envelope.payload, cx).await
}
@@ -982,7 +979,7 @@ impl WorktreeStore {
scan_id,
this.copy_entry(entry_id, new_project_path.into(), cx),
))
- })??;
+ })?;
let entry = entry.await?;
Ok(proto::ProjectEntryResponse {
entry: entry.as_ref().map(|entry| entry.into()),
@@ -1008,7 +1005,7 @@ impl WorktreeStore {
}
this.worktree_for_entry(entry_id, cx)
.context("worktree not found")
- })??;
+ })?;
Worktree::handle_delete_entry(worktree, envelope.payload, cx).await
}
@@ -1043,7 +1040,7 @@ impl WorktreeStore {
scan_id,
this.rename_entry(entry_id, (new_worktree_id, rel_path).into(), cx),
))
- })??;
+ })?;
Ok(proto::ProjectEntryResponse {
entry: match &task.await? {
CreatedEntry::Included(entry) => Some(entry.into()),
@@ -1060,7 +1057,7 @@ impl WorktreeStore {
) -> Result<proto::ExpandProjectEntryResponse> {
let entry_id = ProjectEntryId::from_proto(envelope.payload.entry_id);
let worktree = this
- .update(&mut cx, |this, cx| this.worktree_for_entry(entry_id, cx))?
+ .update(&mut cx, |this, cx| this.worktree_for_entry(entry_id, cx))
.context("invalid request")?;
Worktree::handle_expand_entry(worktree, envelope.payload, cx).await
}
@@ -1072,7 +1069,7 @@ impl WorktreeStore {
) -> Result<proto::ExpandAllForProjectEntryResponse> {
let entry_id = ProjectEntryId::from_proto(envelope.payload.entry_id);
let worktree = this
- .update(&mut cx, |this, cx| this.worktree_for_entry(entry_id, cx))?
+ .update(&mut cx, |this, cx| this.worktree_for_entry(entry_id, cx))
.context("invalid request")?;
Worktree::handle_expand_all_for_entry(worktree, envelope.payload, cx).await
}
@@ -85,7 +85,7 @@ fn main() -> Result<(), anyhow::Error> {
.into_iter()
.map(|worktree| this.find_or_create_worktree(worktree, true, cx))
.collect::<Vec<_>>()
- })?;
+ });
let worktrees = futures::future::join_all(worktrees)
.await
@@ -94,7 +94,7 @@ fn main() -> Result<(), anyhow::Error> {
for (worktree, _) in &worktrees {
worktree
- .update(cx, |this, _| this.as_local().unwrap().scan_complete())?
+ .update(cx, |this, _| this.as_local().unwrap().scan_complete())
.await;
}
println!("Worktrees loaded");
@@ -102,9 +102,7 @@ fn main() -> Result<(), anyhow::Error> {
println!("Starting a project search");
let timer = std::time::Instant::now();
let mut first_match = None;
- let matches = project
- .update(cx, |this, cx| this.search(query, cx))
- .unwrap();
+ let matches = project.update(cx, |this, cx| this.search(query, cx));
let mut matched_files = 0;
let mut matched_chunks = 0;
while let Ok(match_result) = matches.rx.recv().await {
@@ -125,7 +123,7 @@ fn main() -> Result<(), anyhow::Error> {
"Finished project search after {elapsed:?}. Matched {matched_files} files and {matched_chunks} excerpts"
);
drop(project);
- cx.update(|cx| cx.quit())?;
+ cx.update(|cx| cx.quit());
anyhow::Ok(())
})
@@ -3879,9 +3879,9 @@ impl ProjectPanel {
let task = worktree.update(cx, |worktree, cx| {
worktree.copy_external_entries(target_directory, paths, fs, cx)
- })?;
+ });
- let opened_entries = task
+ let opened_entries: Vec<_> = task
.await
.with_context(|| "failed to copy external paths")?;
this.update(cx, |this, cx| {
@@ -8320,7 +8320,7 @@ impl project::ProjectItem for TestProjectItem {
cx: &mut App,
) -> Option<Task<anyhow::Result<Entity<Self>>>> {
let path = path.clone();
- Some(cx.spawn(async move |cx| cx.new(|_| Self { path })))
+ Some(cx.spawn(async move |cx| Ok(cx.new(|_| Self { path }))))
}
fn entry_id(&self, _: &App) -> Option<ProjectEntryId> {
@@ -37,7 +37,7 @@ pub fn init(cx: &mut App) {
.spawn(async move |cx| {
prompt_store_task
.await
- .and_then(|prompt_store| cx.new(|_cx| prompt_store))
+ .map(|prompt_store| cx.new(|_cx| prompt_store))
.map_err(Arc::new)
})
.shared();
@@ -611,7 +611,7 @@ pub fn connect(
cx.spawn(async move |cx| {
let connection = remote::connect(connection_options, delegate.clone(), cx).await?;
- cx.update(|cx| remote::RemoteClient::new(unique_identifier, connection, rx, delegate, cx))?
+ cx.update(|cx| remote::RemoteClient::new(unique_identifier, connection, rx, delegate, cx))
.await
})
}
@@ -631,12 +631,12 @@ pub async fn open_remote_project(
.update(|cx| {
// todo: These paths are wrong they may have column and line information
workspace::remote_workspace_position_from_db(connection_options.clone(), &paths, cx)
- })?
+ })
.await
.context("fetching remote workspace position from db")?;
let mut options =
- cx.update(|cx| (app_state.build_window_options)(workspace_position.display, cx))?;
+ cx.update(|cx| (app_state.build_window_options)(workspace_position.display, cx));
options.window_bounds = workspace_position.window_bounds;
cx.open_window(options, |window, cx| {
@@ -755,7 +755,7 @@ pub async fn open_remote_project(
paths.clone(),
cx,
)
- })?
+ })
.await;
window
@@ -273,11 +273,9 @@ impl ProjectPicker {
.read_with(cx, |workspace, _| workspace.app_state().clone())
.ok()?;
- let remote_connection = project
- .read_with(cx, |project, cx| {
- project.remote_client()?.read(cx).connection()
- })
- .ok()??;
+ let remote_connection = project.read_with(cx, |project, cx| {
+ project.remote_client()?.read(cx).connection()
+ })?;
let (paths, paths_with_positions) =
determine_paths_with_positions(&remote_connection, paths).await;
@@ -1020,7 +1018,7 @@ impl RemoteServerProjects {
})?;
let home_dir = project
- .read_with(cx, |project, cx| project.resolve_abs_path("~", cx))?
+ .read_with(cx, |project, cx| project.resolve_abs_path("~", cx))
.await
.and_then(|path| path.into_abs_path())
.map(|path| RemotePathBuf::new(path, path_style))
@@ -2113,17 +2111,13 @@ impl RemoteServerProjects {
cx.spawn(async move |cx| {
if confirmation.await.ok() == Some(0) {
- remote_servers
- .update(cx, |this, cx| {
- this.delete_wsl_distro(index, cx);
- })
- .ok();
- remote_servers
- .update(cx, |this, cx| {
- this.mode = Mode::default_mode(&this.ssh_config_servers, cx);
- cx.notify();
- })
- .ok();
+ remote_servers.update(cx, |this, cx| {
+ this.delete_wsl_distro(index, cx);
+ });
+ remote_servers.update(cx, |this, cx| {
+ this.mode = Mode::default_mode(&this.ssh_config_servers, cx);
+ cx.notify();
+ });
}
anyhow::Ok(())
})
@@ -2269,17 +2263,13 @@ impl RemoteServerProjects {
cx.spawn(async move |cx| {
if confirmation.await.ok() == Some(0) {
- remote_servers
- .update(cx, |this, cx| {
- this.delete_ssh_server(index, cx);
- })
- .ok();
- remote_servers
- .update(cx, |this, cx| {
- this.mode = Mode::default_mode(&this.ssh_config_servers, cx);
- cx.notify();
- })
- .ok();
+ remote_servers.update(cx, |this, cx| {
+ this.delete_ssh_server(index, cx);
+ });
+ remote_servers.update(cx, |this, cx| {
+ this.mode = Mode::default_mode(&this.ssh_config_servers, cx);
+ cx.notify();
+ });
}
anyhow::Ok(())
})
@@ -361,7 +361,7 @@ pub async fn connect(
cx.update_default_global(|pool: &mut ConnectionPool, cx| {
pool.connect(connection_options.clone(), delegate.clone(), cx)
})
- })?
+ })
.await
.map_err(|e| e.cloned())
}
@@ -389,7 +389,7 @@ impl RemoteClient {
"client",
remote_connection.has_wsl_interop(),
)
- })?;
+ });
let path_style = remote_connection.path_style();
let this = cx.new(|_| Self {
@@ -398,7 +398,7 @@ impl RemoteClient {
connection_options: remote_connection.connection_options(),
path_style,
state: Some(State::Connecting),
- })?;
+ });
let io_task = remote_connection.start_proxy(
unique_identifier,
@@ -461,7 +461,7 @@ impl RemoteClient {
multiplex_task,
heartbeat_task,
});
- })?;
+ });
Ok(Some(this))
});
@@ -621,7 +621,7 @@ impl RemoteClient {
let remote_connection = cx
.update_global(|pool: &mut ConnectionPool, cx| {
pool.connect(connection_options, delegate.clone(), cx)
- })?
+ })
.await
.map_err(|error| error.cloned())?;
@@ -966,7 +966,6 @@ impl RemoteClient {
panic!("missing test connection")
}
})
- .unwrap()
.await
.unwrap();
@@ -1128,7 +1127,7 @@ impl ConnectionPool {
Err(Arc::new(error))
}
}
- })?
+ })
}
})
.shared();
@@ -66,7 +66,7 @@ impl DockerExecConnection {
AppVersion::global(cx),
AppCommitSha::try_global(cx),
)
- })?;
+ });
let remote_platform = this.check_remote_platform().await?;
this.path_style = match remote_platform.os {
@@ -200,7 +200,7 @@ impl DockerExecConnection {
)
}
_ => Ok(Some(AppVersion::global(cx))),
- })??;
+ })?;
let tmp_path_gz = paths::remote_server_dir_relative().join(
RelPath::unix(&format!(
@@ -592,7 +592,7 @@ impl SshRemoteConnection {
};
let (release_channel, version) =
- cx.update(|cx| (ReleaseChannel::global(cx), AppVersion::global(cx)))?;
+ cx.update(|cx| (ReleaseChannel::global(cx), AppVersion::global(cx)));
this.remote_binary_path = Some(
this.ensure_server_binary(&delegate, release_channel, version, cx)
.await?,
@@ -668,7 +668,7 @@ impl SshRemoteConnection {
)
}
_ => Ok(Some(AppVersion::global(cx))),
- })??;
+ })?;
let tmp_path_gz = remote_server_dir_relative().join(
RelPath::unix(&format!(
@@ -65,7 +65,7 @@ impl WslRemoteConnection {
connection_options.user
);
let (release_channel, version) =
- cx.update(|cx| (ReleaseChannel::global(cx), AppVersion::global(cx)))?;
+ cx.update(|cx| (ReleaseChannel::global(cx), AppVersion::global(cx)));
let mut this = Self {
connection_options,
@@ -210,7 +210,7 @@ impl WslRemoteConnection {
let wanted_version = match release_channel {
ReleaseChannel::Nightly | ReleaseChannel::Dev => None,
- _ => Some(cx.update(|cx| AppVersion::global(cx))?),
+ _ => Some(cx.update(|cx| AppVersion::global(cx))),
};
let src_path = delegate
@@ -443,7 +443,7 @@ impl HeadlessProject {
mut cx: AsyncApp,
) -> Result<proto::AddWorktreeResponse> {
use client::ErrorCodeExt;
- let fs = this.read_with(&cx, |this, _| this.fs.clone())?;
+ let fs = this.read_with(&cx, |this, _| this.fs.clone());
let path = PathBuf::from(shellexpand::tilde(&message.payload.path).to_string());
let canonicalized = match fs.canonicalize(&path).await {
@@ -480,7 +480,7 @@ impl HeadlessProject {
true,
&mut cx,
)
- })?
+ })
.await?;
let response = this.read_with(&cx, |_, cx| {
@@ -489,7 +489,7 @@ impl HeadlessProject {
worktree_id: worktree.id().to_proto(),
canonicalized_path: canonicalized.to_string_lossy().into_owned(),
}
- })?;
+ });
// We spawn this asynchronously, so that we can send the response back
// *before* `worktree_store.add()` can send out UpdateProject requests
@@ -508,8 +508,7 @@ impl HeadlessProject {
this.worktree_store.update(cx, |worktree_store, cx| {
worktree_store.add(&worktree, cx);
});
- })
- .log_err();
+ });
})
.detach();
@@ -526,7 +525,7 @@ impl HeadlessProject {
this.worktree_store.update(cx, |worktree_store, cx| {
worktree_store.remove_worktree(worktree_id, cx);
});
- })?;
+ });
Ok(proto::Ack {})
}
@@ -542,16 +541,16 @@ impl HeadlessProject {
let buffer = this.buffer_store.update(cx, |buffer_store, cx| {
buffer_store.open_buffer(ProjectPath { worktree_id, path }, cx)
});
- anyhow::Ok((buffer_store, buffer))
- })??;
+ (buffer_store, buffer)
+ });
let buffer = buffer.await?;
- let buffer_id = buffer.read_with(&cx, |b, _| b.remote_id())?;
+ let buffer_id = buffer.read_with(&cx, |b, _| b.remote_id());
buffer_store.update(&mut cx, |buffer_store, cx| {
buffer_store
.create_buffer_for_peer(&buffer, REMOTE_SERVER_PEER_ID, cx)
.detach_and_log_err(cx);
- })?;
+ });
Ok(proto::OpenBufferResponse {
buffer_id: buffer_id.to_proto(),
@@ -571,21 +570,21 @@ impl HeadlessProject {
let (worktree_store, session) = this.read_with(&cx, |this, _| {
(this.worktree_store.clone(), this.session.clone())
- })?;
+ });
let worktree = worktree_store
- .read_with(&cx, |store, cx| store.worktree_for_id(worktree_id, cx))?
+ .read_with(&cx, |store, cx| store.worktree_for_id(worktree_id, cx))
.context("worktree not found")?;
let load_task = worktree.update(&mut cx, |worktree, cx| {
worktree.load_binary_file(path.as_ref(), cx)
- })?;
+ });
let loaded_file = load_task.await?;
let content = loaded_file.content;
let file = loaded_file.file;
- let proto_file = worktree.read_with(&cx, |_worktree, cx| file.to_proto(cx))?;
+ let proto_file = worktree.read_with(&cx, |_worktree, cx| file.to_proto(cx));
let image_id =
ImageId::from(NonZeroU64::new(NEXT_ID.fetch_add(1, Ordering::Relaxed)).unwrap());
@@ -629,9 +628,9 @@ impl HeadlessProject {
mut cx: AsyncApp,
) -> Result<proto::Ack> {
let trusted_worktrees = cx
- .update(|cx| TrustedWorktrees::try_get_global(cx))?
+ .update(|cx| TrustedWorktrees::try_get_global(cx))
.context("missing trusted worktrees")?;
- let worktree_store = this.read_with(&cx, |project, _| project.worktree_store.clone())?;
+ let worktree_store = this.read_with(&cx, |project, _| project.worktree_store.clone());
trusted_worktrees.update(&mut cx, |trusted_worktrees, cx| {
trusted_worktrees.trust(
&worktree_store,
@@ -643,7 +642,7 @@ impl HeadlessProject {
.collect(),
cx,
);
- })?;
+ });
Ok(proto::Ack {})
}
@@ -653,10 +652,9 @@ impl HeadlessProject {
mut cx: AsyncApp,
) -> Result<proto::Ack> {
let trusted_worktrees = cx
- .update(|cx| TrustedWorktrees::try_get_global(cx))?
+ .update(|cx| TrustedWorktrees::try_get_global(cx))
.context("missing trusted worktrees")?;
- let worktree_store =
- this.read_with(&cx, |project, _| project.worktree_store.downgrade())?;
+ let worktree_store = this.read_with(&cx, |project, _| project.worktree_store.downgrade());
trusted_worktrees.update(&mut cx, |trusted_worktrees, cx| {
let restricted_paths = envelope
.payload
@@ -666,7 +664,7 @@ impl HeadlessProject {
.map(PathTrust::Worktree)
.collect::<HashSet<_>>();
trusted_worktrees.restrict(worktree_store, restricted_paths, cx);
- })?;
+ });
Ok(proto::Ack {})
}
@@ -680,16 +678,16 @@ impl HeadlessProject {
let buffer = this
.buffer_store
.update(cx, |buffer_store, cx| buffer_store.create_buffer(true, cx));
- anyhow::Ok((buffer_store, buffer))
- })??;
+ (buffer_store, buffer)
+ });
let buffer = buffer.await?;
- let buffer_id = buffer.read_with(&cx, |b, _| b.remote_id())?;
+ let buffer_id = buffer.read_with(&cx, |b, _| b.remote_id());
buffer_store.update(&mut cx, |buffer_store, cx| {
buffer_store
.create_buffer_for_peer(&buffer, REMOTE_SERVER_PEER_ID, cx)
.detach_and_log_err(cx);
- })?;
+ });
Ok(proto::OpenBufferResponse {
buffer_id: buffer_id.to_proto(),
@@ -719,7 +717,7 @@ impl HeadlessProject {
log_store.toggle_lsp_logs(server_id, envelope.payload.enabled, toggled_log_kind);
});
anyhow::Ok(())
- })??;
+ })?;
Ok(())
}
@@ -735,7 +733,7 @@ impl HeadlessProject {
this.worktree_store.update(cx, |worktree_store, cx| {
worktree_store.find_or_create_worktree(settings_path, false, cx)
})
- })?
+ })
.await?;
let (buffer, buffer_store) = this.update(&mut cx, |this, cx| {
@@ -750,7 +748,7 @@ impl HeadlessProject {
});
(buffer, this.buffer_store.clone())
- })?;
+ });
let buffer = buffer.await?;
@@ -770,7 +768,7 @@ impl HeadlessProject {
});
buffer_id
- })?;
+ });
Ok(proto::OpenBufferResponse {
buffer_id: buffer_id.to_proto(),
@@ -797,21 +795,21 @@ impl HeadlessProject {
)
.into_handle(query, cx)
.matching_buffers(cx)
- })?;
+ });
let mut response = proto::FindSearchCandidatesResponse {
buffer_ids: Vec::new(),
};
- let buffer_store = this.read_with(&cx, |this, _| this.buffer_store.clone())?;
+ let buffer_store = this.read_with(&cx, |this, _| this.buffer_store.clone());
while let Ok(buffer) = results.rx.recv().await {
- let buffer_id = buffer.read_with(&cx, |this, _| this.remote_id())?;
+ let buffer_id = buffer.read_with(&cx, |this, _| this.remote_id());
response.buffer_ids.push(buffer_id.to_proto());
buffer_store
.update(&mut cx, |buffer_store, cx| {
buffer_store.create_buffer_for_peer(&buffer, REMOTE_SERVER_PEER_ID, cx)
- })?
+ })
.await?;
}
@@ -823,7 +821,7 @@ impl HeadlessProject {
envelope: TypedEnvelope<proto::ListRemoteDirectory>,
cx: AsyncApp,
) -> Result<proto::ListRemoteDirectoryResponse> {
- let fs = cx.read_entity(&this, |this, _| this.fs.clone())?;
+ let fs = cx.read_entity(&this, |this, _| this.fs.clone());
let expanded = PathBuf::from(shellexpand::tilde(&envelope.payload.path).to_string());
let check_info = envelope
.payload
@@ -855,7 +853,7 @@ impl HeadlessProject {
envelope: TypedEnvelope<proto::GetPathMetadata>,
cx: AsyncApp,
) -> Result<proto::GetPathMetadataResponse> {
- let fs = cx.read_entity(&this, |this, _| this.fs.clone())?;
+ let fs = cx.read_entity(&this, |this, _| this.fs.clone());
let expanded = PathBuf::from(shellexpand::tilde(&envelope.payload.path).to_string());
let metadata = fs.metadata(&expanded).await?;
@@ -942,7 +940,7 @@ impl HeadlessProject {
this.environment.update(cx, |environment, cx| {
environment.local_directory_environment(&shell, directory.into(), cx)
})
- })?
+ })
.await
.context("failed to get directory environment")?
.into_iter()
@@ -242,7 +242,7 @@ fn start_server(
// when calling quit, but it should be.
cx.shutdown();
cx.quit();
- })?;
+ });
break;
}
_ = app_quit_rx.next().fuse() => {
@@ -939,7 +939,7 @@ pub fn handle_settings_file_changes(
});
cx.spawn(async move |cx| {
while let Some(server_settings_content) = server_settings_file.next().await {
- let result = cx.update_global(|store: &mut SettingsStore, cx| {
+ cx.update_global(|store: &mut SettingsStore, cx| {
let result = store.set_server_settings(&server_settings_content, cx);
if let Err(err) = &result {
log::error!("Failed to load server settings: {err}");
@@ -947,9 +947,6 @@ pub fn handle_settings_file_changes(
settings_changed(result.err(), cx);
cx.refresh_windows();
});
- if result.is_err() {
- break; // App dropped
- }
}
})
.detach();
@@ -294,15 +294,13 @@ impl NativeRunningKernel {
if let Err(err) = result {
log::error!("kernel: handling failed for {name}: {err:?}");
- session
- .update(cx, |session, cx| {
- session.kernel_errored(
- format!("handling failed for {name}: {err}"),
- cx,
- );
- cx.notify();
- })
- .ok();
+ session.update(cx, |session, cx| {
+ session.kernel_errored(
+ format!("handling failed for {name}: {err}"),
+ cx,
+ );
+ cx.notify();
+ });
}
}
}
@@ -328,13 +326,11 @@ impl NativeRunningKernel {
log::error!("{}", error_message);
- session
- .update(cx, |session, cx| {
- session.kernel_errored(error_message, cx);
+ session.update(cx, |session, cx| {
+ session.kernel_errored(error_message, cx);
- cx.notify();
- })
- .ok();
+ cx.notify();
+ });
});
anyhow::Ok(Box::new(Self {
@@ -561,7 +561,7 @@ impl project::ProjectItem for NotebookItem {
if path.path.extension().unwrap_or_default() == "ipynb" {
Some(cx.spawn(async move |cx| {
let abs_path = project
- .read_with(cx, |project, cx| project.absolute_path(&path, cx))?
+ .read_with(cx, |project, cx| project.absolute_path(&path, cx))
.with_context(|| format!("finding the absolute path of {path:?}"))?;
// todo: watch for changes to the file
@@ -586,16 +586,16 @@ impl project::ProjectItem for NotebookItem {
let id = project
.update(cx, |project, cx| {
project.entry_for_path(&path, cx).map(|entry| entry.id)
- })?
+ })
.context("Entry not found")?;
- cx.new(|_| NotebookItem {
+ Ok(cx.new(|_| NotebookItem {
path: abs_path,
project_path: path,
languages,
notebook,
id,
- })
+ }))
}))
} else {
None
@@ -82,29 +82,26 @@ pub fn open_rules_library(
let store = PromptStore::global(cx);
cx.spawn(async move |cx| {
// We query windows in spawn so that all windows have been returned to GPUI
- let existing_window = cx
- .update(|cx| {
- let existing_window = cx
- .windows()
- .into_iter()
- .find_map(|window| window.downcast::<RulesLibrary>());
- if let Some(existing_window) = existing_window {
- existing_window
- .update(cx, |rules_library, window, cx| {
- if let Some(prompt_to_select) = prompt_to_select {
- rules_library.load_rule(prompt_to_select, true, window, cx);
- }
- window.activate_window()
- })
- .ok();
+ let existing_window = cx.update(|cx| {
+ let existing_window = cx
+ .windows()
+ .into_iter()
+ .find_map(|window| window.downcast::<RulesLibrary>());
+ if let Some(existing_window) = existing_window {
+ existing_window
+ .update(cx, |rules_library, window, cx| {
+ if let Some(prompt_to_select) = prompt_to_select {
+ rules_library.load_rule(prompt_to_select, true, window, cx);
+ }
+ window.activate_window()
+ })
+ .ok();
- Some(existing_window)
- } else {
- None
- }
- })
- .ok()
- .flatten();
+ Some(existing_window)
+ } else {
+ None
+ }
+ });
if let Some(existing_window) = existing_window {
return Ok(existing_window);
@@ -151,7 +148,7 @@ pub fn open_rules_library(
})
},
)
- })?
+ })
})
}
@@ -67,7 +67,7 @@ impl AppSession {
let _serialization_task = cx.spawn(async move |_, cx| {
let mut current_window_stack = Vec::new();
loop {
- if let Some(windows) = cx.update(|cx| window_stack(cx)).ok().flatten()
+ if let Some(windows) = cx.update(|cx| window_stack(cx))
&& windows != current_window_stack
{
store_window_stack(&windows).await;
@@ -508,9 +508,9 @@ impl SettingsStore {
update: impl 'static + Send + FnOnce(&mut SettingsContent, &App),
) {
_ = self.update_settings_file_inner(fs, move |old_text: String, cx: AsyncApp| {
- cx.read_global(|store: &SettingsStore, cx| {
+ Ok(cx.read_global(|store: &SettingsStore, cx| {
store.new_text_for_update(old_text, |content| update(content, cx))
- })
+ }))
});
}
@@ -520,9 +520,9 @@ impl SettingsStore {
vscode_settings: VsCodeSettings,
) -> oneshot::Receiver<Result<()>> {
self.update_settings_file_inner(fs, move |old_text: String, cx: AsyncApp| {
- cx.read_global(|store: &SettingsStore, _cx| {
+ Ok(cx.read_global(|store: &SettingsStore, _cx| {
store.get_vscode_edits(old_text, &vscode_settings)
- })
+ }))
})
}
@@ -154,8 +154,7 @@ pub fn init(cx: &mut App) {
fn quit(_: &Quit, cx: &mut App) {
cx.spawn(async move |cx| {
- cx.update(|cx| cx.quit())?;
- anyhow::Ok(())
+ cx.update(|cx| cx.quit());
})
- .detach_and_log_err(cx);
+ .detach();
}
@@ -2751,24 +2751,17 @@ mod tests {
})
.detach();
cx.background_spawn(async move {
- #[cfg(target_os = "windows")]
- {
- let exit_status = completion_rx.recv().await.ok().flatten();
- if let Some(exit_status) = exit_status {
- assert!(
- !exit_status.success(),
- "Wrong shell command should result in a failure"
- );
- assert_eq!(exit_status.code(), Some(1));
- }
- }
- #[cfg(not(target_os = "windows"))]
- {
- let exit_status = completion_rx.recv().await.unwrap().unwrap();
+ // The channel may be closed if the terminal is dropped before sending
+ // the completion signal, which can happen with certain task scheduling orders.
+ let exit_status = completion_rx.recv().await.ok().flatten();
+ if let Some(exit_status) = exit_status {
assert!(
!exit_status.success(),
"Wrong shell command should result in a failure"
);
+ #[cfg(target_os = "windows")]
+ assert_eq!(exit_status.code(), Some(1));
+ #[cfg(not(target_os = "windows"))]
assert_eq!(exit_status.code(), None);
}
})
@@ -136,7 +136,7 @@ pub(crate) fn deserialize_terminal_panel(
terminal_panel.center = PaneGroup::with_root(center_group);
terminal_panel.active_pane =
active_pane.unwrap_or_else(|| terminal_panel.center.first_pane());
- })?;
+ });
}
}
}
@@ -251,30 +251,27 @@ async fn deserialize_pane_group(
.update(cx, |workspace, cx| default_working_directory(workspace, cx))
.ok()
.flatten();
- let Some(terminal) = project
+ let terminal = project
.update(cx, |project, cx| {
project.create_terminal_shell(working_directory, cx)
})
- .log_err()
- else {
+ .await
+ .log_err();
+ let Some(terminal) = terminal else {
return;
};
-
- let terminal = terminal.await.log_err();
pane.update_in(cx, |pane, window, cx| {
- if let Some(terminal) = terminal {
- let terminal_view = Box::new(cx.new(|cx| {
- TerminalView::new(
- terminal,
- workspace.clone(),
- Some(workspace_id),
- project.downgrade(),
- window,
- cx,
- )
- }));
- pane.add_item(terminal_view, true, false, None, window, cx);
- }
+ let terminal_view = Box::new(cx.new(|cx| {
+ TerminalView::new(
+ terminal,
+ workspace.clone(),
+ Some(workspace_id),
+ project.downgrade(),
+ window,
+ cx,
+ )
+ }));
+ pane.add_item(terminal_view, true, false, None, window, cx);
})
.ok();
}
@@ -281,11 +281,9 @@ impl TerminalPanel {
};
if let Some(workspace) = workspace.upgrade() {
- workspace
- .update(&mut cx, |workspace, _| {
- workspace.set_terminal_provider(TerminalProvider(terminal_panel.clone()))
- })
- .ok();
+ workspace.update(&mut cx, |workspace, _| {
+ workspace.set_terminal_provider(TerminalProvider(terminal_panel.clone()))
+ });
}
// Since panels/docks are loaded outside from the workspace, we cleanup here, instead of through the workspace.
@@ -487,7 +485,6 @@ impl TerminalPanel {
),
None => project.create_terminal_shell(working_directory, cx),
})
- .ok()?
.await
.log_err()?;
@@ -781,7 +778,7 @@ impl TerminalPanel {
})?;
let project = workspace.read_with(cx, |workspace, _| workspace.project().clone())?;
let terminal = project
- .update(cx, |project, cx| project.create_terminal_task(task, cx))?
+ .update(cx, |project, cx| project.create_terminal_task(task, cx))
.await?;
let result = workspace.update_in(cx, |workspace, window, cx| {
let terminal_view = Box::new(cx.new(|cx| {
@@ -840,7 +837,7 @@ impl TerminalPanel {
})?;
let project = workspace.read_with(cx, |workspace, _| workspace.project().clone())?;
let terminal = project
- .update(cx, |project, cx| project.create_terminal_shell(cwd, cx))?
+ .update(cx, |project, cx| project.create_terminal_shell(cwd, cx))
.await;
match terminal {
@@ -914,15 +911,13 @@ impl TerminalPanel {
.timer(Duration::from_millis(50))
.await;
let terminal_panel = terminal_panel.upgrade()?;
- let items = terminal_panel
- .update(cx, |terminal_panel, cx| {
- SerializedItems::WithSplits(serialize_pane_group(
- &terminal_panel.center,
- &terminal_panel.active_pane,
- cx,
- ))
- })
- .ok()?;
+ let items = terminal_panel.update(cx, |terminal_panel, cx| {
+ SerializedItems::WithSplits(serialize_pane_group(
+ &terminal_panel.center,
+ &terminal_panel.active_pane,
+ cx,
+ ))
+ });
cx.background_spawn(
async move {
KEY_VALUE_STORE
@@ -964,7 +959,7 @@ impl TerminalPanel {
let new_terminal = project
.update(cx, |project, cx| {
project.create_terminal_task(spawn_task, cx)
- })?
+ })
.await?;
terminal_to_replace.update_in(cx, |terminal_to_replace, window, cx| {
terminal_to_replace.set_terminal(new_terminal.clone(), window, cx);
@@ -1299,14 +1294,12 @@ async fn wait_for_terminals_tasks(
terminals_for_task: Vec<(usize, Entity<Pane>, Entity<TerminalView>)>,
cx: &mut AsyncApp,
) {
- let pending_tasks = terminals_for_task.iter().filter_map(|(_, _, terminal)| {
- terminal
- .update(cx, |terminal_view, cx| {
- terminal_view
- .terminal()
- .update(cx, |terminal, cx| terminal.wait_for_completed_task(cx))
- })
- .ok()
+ let pending_tasks = terminals_for_task.iter().map(|(_, _, terminal)| {
+ terminal.update(cx, |terminal_view, cx| {
+ terminal_view
+ .terminal()
+ .update(cx, |terminal, cx| terminal.wait_for_completed_task(cx))
+ })
});
join_all(pending_tasks).await;
}
@@ -362,8 +362,8 @@ fn possible_open_target(
cx.spawn(async move |cx| {
background_fs_checks_task.await.or_else(|| {
for (worktree, worktree_paths_to_check) in worktree_paths_to_check {
- let found_entry = worktree
- .update(cx, |worktree, _| -> Option<OpenTarget> {
+ if let Some(found_entry) =
+ worktree.update(cx, |worktree, _| -> Option<OpenTarget> {
let traversal =
worktree.traverse_from_path(true, true, false, RelPath::empty());
for entry in traversal {
@@ -387,8 +387,7 @@ fn possible_open_target(
}
None
})
- .ok()?;
- if let Some(found_entry) = found_entry {
+ {
return Some(found_entry);
}
}
@@ -1421,7 +1421,7 @@ impl SerializableItem for TerminalView {
.flatten();
let terminal = project
- .update(cx, |project, cx| project.create_terminal_shell(cwd, cx))?
+ .update(cx, |project, cx| project.create_terminal_shell(cwd, cx))
.await?;
cx.update(|window, cx| {
cx.new(|cx| {
@@ -71,9 +71,7 @@ impl FontFamilyCache {
return;
}
- let Ok(text_system) = cx.update(|cx| App::text_system(cx).clone()) else {
- return;
- };
+ let text_system = cx.update(|cx| App::text_system(cx).clone());
let state = self.state.clone();
@@ -198,13 +198,10 @@ impl ActiveToolchain {
.or_else(|| toolchains.toolchains.first())
.cloned();
if let Some(toolchain) = &default_choice {
- let worktree_root_path = project
- .read_with(cx, |this, cx| {
- this.worktree_for_id(worktree_id, cx)
- .map(|worktree| worktree.read(cx).abs_path())
- })
- .ok()
- .flatten()?;
+ let worktree_root_path = project.read_with(cx, |this, cx| {
+ this.worktree_for_id(worktree_id, cx)
+ .map(|worktree| worktree.read(cx).abs_path())
+ })?;
workspace::WORKSPACE_DB
.set_toolchain(
workspace_id,
@@ -225,7 +222,6 @@ impl ActiveToolchain {
cx,
)
})
- .ok()?
.await;
}
@@ -207,7 +207,7 @@ impl AddToolchainState {
let toolchain = project
.update(cx, |this, cx| {
this.resolve_toolchain(path.clone(), language_name, cx)
- })?
+ })
.await;
let Ok(toolchain) = toolchain else {
// Go back to the path input state
@@ -240,7 +240,7 @@ impl AddToolchainState {
};
let resolved_toolchain_path = project.read_with(cx, |this, cx| {
this.find_project_path(&toolchain.path.as_ref(), cx)
- })?;
+ });
// Suggest a default scope based on the applicability.
let scope = if let Some(project_path) = resolved_toolchain_path {
@@ -250,8 +250,6 @@ impl AddToolchainState {
this.worktree_for_id(root_path.worktree_id, cx)
.map(|worktree| worktree.read(cx).abs_path())
})
- .ok()
- .flatten()
.context("Could not find a worktree with a given worktree ID")?;
ToolchainScope::Subproject(worktree_root_path, root_path.path)
} else {
@@ -610,7 +608,7 @@ impl ToolchainSelector {
language_name.clone(),
cx,
)
- })?
+ })
.await;
workspace
.update_in(cx, |this, window, cx| {
@@ -788,7 +786,6 @@ impl ToolchainSelectorDelegate {
.read_with(cx, |this, _| {
Project::toolchain_metadata(this.languages().clone(), language_name.clone())
})
- .ok()?
.await?;
let relative_path = this
.update(cx, |this, cx| {
@@ -817,7 +814,6 @@ impl ToolchainSelectorDelegate {
cx,
)
})
- .ok()?
.await?;
let pretty_path = {
if relative_path.is_empty() {
@@ -1982,7 +1982,7 @@ pub fn command_interceptor(
let (range, query) = VimCommand::parse_range(&string[1..]);
let action =
match cx.update(|cx| commands(cx).get(cmd_idx)?.parse(&query, &range, cx)) {
- Ok(Some(action)) => action,
+ Some(action) => action,
_ => continue,
};
results.push(CommandInterceptItem {
@@ -852,7 +852,8 @@ impl<T: Item> ItemHandle for Entity<T> {
close_item_task.await?;
pane.update(cx, |pane, _cx| {
pane.nav_history_mut().remove_item(item_id);
- })
+ });
+ anyhow::Ok(())
}
})
.detach_and_log_err(cx);
@@ -248,12 +248,12 @@ impl LanguageServerPrompt {
this.request.take().map(|request| request.respond(ix))
});
- potential_future? // App Closed
+ potential_future
.context("Response already sent")?
.await
.context("Stream already closed")?;
- this.update(cx, |_, cx| cx.emit(DismissEvent))?;
+ this.update(cx, |_, cx| cx.emit(DismissEvent));
anyhow::Ok(())
})
@@ -2099,7 +2099,7 @@ impl Pane {
const DELETED_MESSAGE: &str = "This file has been deleted on disk since you started editing it. Do you want to recreate it?";
- let path_style = project.read_with(cx, |project, cx| project.path_style(cx))?;
+ let path_style = project.read_with(cx, |project, cx| project.path_style(cx));
if save_intent == SaveIntent::Skip {
return Ok(true);
};
@@ -2314,7 +2314,7 @@ impl Pane {
.flatten();
let save_task = if let Some(project_path) = project_path {
let (worktree, path) = project_path.await?;
- let worktree_id = worktree.read_with(cx, |worktree, _| worktree.id())?;
+ let worktree_id = worktree.read_with(cx, |worktree, _| worktree.id());
let new_path = ProjectPath { worktree_id, path };
pane.update_in(cx, |pane, window, cx| {
@@ -185,7 +185,7 @@ impl ToastLayer {
cx.background_executor().timer(duration).await;
if let Some(this) = this.upgrade() {
- this.update(cx, |this, cx| this.hide_toast(cx)).ok();
+ this.update(cx, |this, cx| this.hide_toast(cx));
}
});
@@ -566,8 +566,7 @@ fn prompt_and_open_paths(app_state: Arc<AppState>, options: PathPromptOptions, c
Ok(Some(paths)) => {
cx.update(|cx| {
open_paths(&paths, app_state, OpenOptions::default(), cx).detach_and_log_err(cx)
- })
- .ok();
+ });
}
Ok(None) => {}
Err(err) => {
@@ -583,8 +582,7 @@ fn prompt_and_open_paths(app_state: Arc<AppState>, options: PathPromptOptions, c
})
.ok();
}
- })
- .ok();
+ });
}
},
)
@@ -685,7 +683,7 @@ impl ProjectItemRegistry {
Ok(project_item) => {
let project_item = project_item;
let project_entry_id: Option<ProjectEntryId> =
- project_item.read_with(cx, project::ProjectItem::entry_id)?;
+ project_item.read_with(cx, project::ProjectItem::entry_id);
let build_workspace_item = Box::new(
|pane: &mut Pane, window: &mut Window, cx: &mut Context<Pane>| {
Box::new(cx.new(|cx| {
@@ -1653,11 +1651,9 @@ impl Workspace {
if let Some(paths) = serialized_workspace.as_ref().map(|ws| &ws.paths) {
paths_to_open = paths.ordered_paths().cloned().collect();
if !paths.is_lexicographically_ordered() {
- project_handle
- .update(cx, |project, cx| {
- project.set_worktrees_reordered(true, cx);
- })
- .log_err();
+ project_handle.update(cx, |project, cx| {
+ project.set_worktrees_reordered(true, cx);
+ });
}
}
@@ -1669,7 +1665,7 @@ impl Workspace {
if let Some((_, project_entry)) = cx
.update(|cx| {
Workspace::project_path_for_path(project_handle.clone(), &path, true, cx)
- })?
+ })
.await
.log_err()
{
@@ -1698,8 +1694,7 @@ impl Workspace {
None
}
})
- })?
- else {
+ }) else {
// We did not find a worktree with a given path, but that's whatever.
continue;
};
@@ -1710,7 +1705,7 @@ impl Workspace {
project_handle
.update(cx, |this, cx| {
this.activate_toolchain(ProjectPath { worktree_id, path }, toolchain, cx)
- })?
+ })
.await;
}
if let Some(workspace) = serialized_workspace.as_ref() {
@@ -1720,7 +1715,7 @@ impl Workspace {
this.add_toolchain(toolchain.clone(), scope.clone(), cx);
}
}
- })?;
+ });
}
let window = if let Some(window) = requesting_window {
@@ -1770,7 +1765,7 @@ impl Workspace {
};
// Use the serialized workspace to construct the new window
- let mut options = cx.update(|cx| (app_state.build_window_options)(display, cx))?;
+ let mut options = cx.update(|cx| (app_state.build_window_options)(display, cx));
options.window_bounds = window_bounds;
let centered_layout = serialized_workspace
.as_ref()
@@ -2501,7 +2496,7 @@ impl Workspace {
if let Some(active_call) = active_call
&& workspace_count == 1
- && active_call.read_with(cx, |call, _| call.room().is_some())?
+ && active_call.read_with(cx, |call, _| call.room().is_some())
{
if close_intent == CloseIntent::CloseWindow {
let answer = cx.update(|window, cx| {
@@ -2518,7 +2513,7 @@ impl Workspace {
return anyhow::Ok(false);
} else {
active_call
- .update(cx, |call, cx| call.hang_up(cx))?
+ .update(cx, |call, cx| call.hang_up(cx))
.await
.log_err();
}
@@ -2764,7 +2759,7 @@ impl Workspace {
},
cx,
)
- })?
+ })
.await?;
Ok(())
})
@@ -2969,7 +2964,7 @@ impl Workspace {
});
cx.spawn(async move |cx| {
let (worktree, path) = entry.await?;
- let worktree_id = worktree.read_with(cx, |t, _| t.id())?;
+ let worktree_id = worktree.read_with(cx, |t, _| t.id());
Ok((
worktree,
ProjectPath {
@@ -5034,7 +5029,7 @@ impl Workspace {
.get(&leader_id.into())
.context("stopped following")?;
anyhow::Ok(state.pane().clone())
- })??;
+ })?;
let existing_item = pane.update_in(cx, |pane, window, cx| {
let client = this.read(cx).client().clone();
pane.items().find_map(|item| {
@@ -5103,7 +5098,8 @@ impl Workspace {
);
Some(())
- })?;
+ })
+ .context("no follower state")?;
Ok(())
}
@@ -5861,7 +5857,7 @@ impl Workspace {
breakpoint_store
.with_serialized_breakpoints(serialized_workspace.breakpoints, cx)
})
- })?
+ })
.await;
// Clean up all the items that have _not_ been loaded. Our ItemIds aren't stable. That means
@@ -6074,7 +6070,7 @@ impl Workspace {
let clear_task = persistence::DB.clear_trusted_worktrees();
cx.spawn(async move |_, cx| {
if clear_task.await.log_err().is_some() {
- cx.update(|cx| reload(cx)).ok();
+ cx.update(|cx| reload(cx));
}
})
.detach();
@@ -7500,7 +7496,7 @@ impl WorkspaceStore {
});
Ok(response)
- })?
+ })
}
async fn handle_update_followers(
@@ -7524,7 +7520,7 @@ impl WorkspaceStore {
.is_ok()
});
Ok(())
- })?
+ })
}
pub fn workspaces(&self) -> &HashSet<WindowHandle<Workspace>> {
@@ -7650,7 +7646,7 @@ async fn join_channel_internal(
None
};
(should_prompt, open_room)
- })?;
+ });
if let Some(room) = open_room {
let task = room.update(cx, |room, cx| {
@@ -7659,7 +7655,7 @@ async fn join_channel_internal(
}
None
- })?;
+ });
if let Some(task) = task {
task.await?;
}
@@ -7688,7 +7684,7 @@ async fn join_channel_internal(
}
}
- let client = cx.update(|cx| active_call.read(cx).client())?;
+ let client = cx.update(|cx| active_call.read(cx).client());
let mut client_status = client.status();
@@ -7719,14 +7715,14 @@ async fn join_channel_internal(
let room = active_call
.update(cx, |active_call, cx| {
active_call.join_channel(channel_id, cx)
- })?
+ })
.await?;
let Some(room) = room else {
return anyhow::Ok(true);
};
- room.update(cx, |room, _| room.room_update_completed())?
+ room.update(cx, |room, _| room.room_update_completed())
.await;
let task = room.update(cx, |room, cx| {
@@ -7768,7 +7764,7 @@ async fn join_channel_internal(
}
None
- })?;
+ });
if let Some(task) = task {
task.await?;
return anyhow::Ok(true);
@@ -7807,14 +7803,13 @@ pub fn join_channel(
None,
cx,
)
- })?
+ })
.await?;
if result.is_ok() {
cx.update(|cx| {
cx.dispatch_action(&OpenChannelNotes);
- })
- .log_err();
+ });
}
active_window = Some(window_handle);
@@ -7872,7 +7867,7 @@ pub async fn get_any_active_workspace(
// find an existing workspace to focus and show call controls
let active_window = activate_any_workspace_window(&mut cx);
if active_window.is_none() {
- cx.update(|cx| Workspace::new_local(vec![], app_state.clone(), None, None, None, cx))?
+ cx.update(|cx| Workspace::new_local(vec![], app_state.clone(), None, None, None, cx))
.await?;
}
activate_any_workspace_window(&mut cx).context("could not open zed")
@@ -7897,8 +7892,6 @@ fn activate_any_workspace_window(cx: &mut AsyncApp) -> Option<WindowHandle<Works
}
None
})
- .ok()
- .flatten()
}
pub fn local_workspace_windows(cx: &App) -> Vec<WindowHandle<Workspace>> {
@@ -7972,7 +7965,7 @@ pub fn open_paths(
}
}
}
- })?;
+ });
if open_options.open_new_workspace.is_none()
&& (existing.is_none() || open_options.prefer_focused_window)
@@ -8002,7 +7995,7 @@ pub fn open_paths(
break;
}
}
- })?;
+ });
}
}
@@ -8042,7 +8035,7 @@ pub fn open_paths(
None,
cx,
)
- })?
+ })
.await
};
@@ -8159,7 +8152,7 @@ pub fn open_remote_project_with_new_connection(
delegate,
cx,
)
- })?
+ })
.await?
{
Some(result) => result,
@@ -8177,7 +8170,7 @@ pub fn open_remote_project_with_new_connection(
true,
cx,
)
- })?;
+ });
open_remote_project_inner(
project,
@@ -8244,7 +8237,7 @@ async fn open_remote_project_inner(
};
this.activate_toolchain(ProjectPath { worktree_id, path }, toolchain, cx)
- })?
+ })
.await;
}
let mut project_paths_to_open = vec![];
@@ -8252,7 +8245,7 @@ async fn open_remote_project_inner(
for path in paths {
let result = cx
- .update(|cx| Workspace::project_path_for_path(project.clone(), &path, true, cx))?
+ .update(|cx| Workspace::project_path_for_path(project.clone(), &path, true, cx))
.await;
match result {
Ok((_, project_path)) => {
@@ -8369,9 +8362,9 @@ pub fn join_in_room_project(
let workspace = if let Some(existing_workspace) = existing_workspace {
existing_workspace
} else {
- let active_call = cx.update(|cx| ActiveCall::global(cx))?;
+ let active_call = cx.update(|cx| ActiveCall::global(cx));
let room = active_call
- .read_with(cx, |call, _| call.room().cloned())?
+ .read_with(cx, |call, _| call.room().cloned())
.context("not in a call")?;
let project = room
.update(cx, |room, cx| {
@@ -8381,7 +8374,7 @@ pub fn join_in_room_project(
app_state.fs.clone(),
cx,
)
- })?
+ })
.await?;
let window_bounds_override = window_bounds_env_override();
@@ -8393,7 +8386,7 @@ pub fn join_in_room_project(
Workspace::new(Default::default(), project, app_state.clone(), window, cx)
})
})
- })??
+ })?
};
workspace.update(cx, |workspace, window, cx| {
@@ -8459,7 +8452,7 @@ pub fn reload(cx: &mut App) {
if let Some(prompt) = prompt {
let answer = prompt.await?;
if answer != 0 {
- return Ok(());
+ return anyhow::Ok(());
}
}
@@ -8469,10 +8462,11 @@ pub fn reload(cx: &mut App) {
workspace.prepare_to_close(CloseIntent::Quit, window, cx)
}) && !should_close.await?
{
- return Ok(());
+ return anyhow::Ok(());
}
}
- cx.update(|cx| cx.restart())
+ cx.update(|cx| cx.restart());
+ anyhow::Ok(())
})
.detach_and_log_err(cx);
}
@@ -11600,7 +11594,7 @@ mod tests {
cx: &mut App,
) -> Option<Task<anyhow::Result<Entity<Self>>>> {
if path.path.extension().unwrap() == "png" {
- Some(cx.spawn(async move |cx| cx.new(|_| TestPngItem {})))
+ Some(cx.spawn(async move |cx| Ok(cx.new(|_| TestPngItem {}))))
} else {
None
}
@@ -11675,7 +11669,7 @@ mod tests {
cx: &mut App,
) -> Option<Task<anyhow::Result<Entity<Self>>>> {
if path.path.extension().unwrap() == "ipynb" {
- Some(cx.spawn(async move |cx| cx.new(|_| TestIpynbItem {})))
+ Some(cx.spawn(async move |cx| Ok(cx.new(|_| TestIpynbItem {}))))
} else {
None
}
@@ -399,7 +399,7 @@ impl Worktree {
None
};
- cx.new(move |cx: &mut Context<Worktree>| {
+ Ok(cx.new(move |cx: &mut Context<Worktree>| {
let mut snapshot = LocalSnapshot {
ignores_by_parent_abs_path: Default::default(),
global_gitignore: Default::default(),
@@ -478,7 +478,7 @@ impl Worktree {
};
worktree.start_background_scanner(scan_requests_rx, path_prefixes_to_scan_rx, cx);
Worktree::Local(worktree)
- })
+ }))
}
pub fn remote(
@@ -931,7 +931,7 @@ impl Worktree {
cx,
),
))
- })??;
+ })?;
Ok(proto::ProjectEntryResponse {
entry: match &entry.await? {
CreatedEntry::Included(entry) => Some(entry.into()),
@@ -955,8 +955,9 @@ impl Worktree {
cx,
),
)
- })?;
- task.context("invalid entry")?.await?;
+ });
+ task.ok_or_else(|| anyhow::anyhow!("invalid entry"))?
+ .await?;
Ok(proto::ProjectEntryResponse {
entry: None,
worktree_scan_id: scan_id as u64,
@@ -970,9 +971,10 @@ impl Worktree {
) -> Result<proto::ExpandProjectEntryResponse> {
let task = this.update(&mut cx, |this, cx| {
this.expand_entry(ProjectEntryId::from_proto(request.entry_id), cx)
- })?;
- task.context("no such entry")?.await?;
- let scan_id = this.read_with(&cx, |this, _| this.scan_id())?;
+ });
+ task.ok_or_else(|| anyhow::anyhow!("no such entry"))?
+ .await?;
+ let scan_id = this.read_with(&cx, |this, _| this.scan_id());
Ok(proto::ExpandProjectEntryResponse {
worktree_scan_id: scan_id as u64,
})
@@ -985,9 +987,10 @@ impl Worktree {
) -> Result<proto::ExpandAllForProjectEntryResponse> {
let task = this.update(&mut cx, |this, cx| {
this.expand_all_for_entry(ProjectEntryId::from_proto(request.entry_id), cx)
- })?;
- task.context("no such entry")?.await?;
- let scan_id = this.read_with(&cx, |this, _| this.scan_id())?;
+ });
+ task.ok_or_else(|| anyhow::anyhow!("no such entry"))?
+ .await?;
+ let scan_id = this.read_with(&cx, |this, _| this.scan_id());
Ok(proto::ExpandAllForProjectEntryResponse {
worktree_scan_id: scan_id as u64,
})
@@ -1137,8 +1140,7 @@ impl LocalWorktree {
this.update_abs_path_and_refresh(new_path, cx);
}
}
- })
- .ok();
+ });
}
});
self._background_scanner_tasks = vec![background_scanner, scan_state_updater];
@@ -1705,7 +1707,7 @@ impl LocalWorktree {
.refresh_entries_for_paths(paths_to_refresh.clone()),
)
},
- )??;
+ )?;
cx.background_spawn(async move {
refresh.next().await;
@@ -1715,12 +1717,12 @@ impl LocalWorktree {
.log_err();
let this = this.upgrade().with_context(|| "Dropped worktree")?;
- cx.read_entity(&this, |this, _| {
+ Ok(cx.read_entity(&this, |this, _| {
paths_to_refresh
.iter()
.filter_map(|path| Some(this.entry_for_path(path)?.id))
.collect()
- })
+ }))
})
}
@@ -31,15 +31,13 @@ fn main() {
)
.await
.expect("Worktree initialization to succeed");
- let did_finish_scan = worktree
- .update(cx, |this, _| this.as_local().unwrap().scan_complete())
- .unwrap();
+ let did_finish_scan =
+ worktree.update(cx, |this, _| this.as_local().unwrap().scan_complete());
let start = std::time::Instant::now();
did_finish_scan.await;
let elapsed = start.elapsed();
- let (files, directories) = worktree
- .read_with(cx, |this, _| (this.file_count(), this.dir_count()))
- .unwrap();
+ let (files, directories) =
+ worktree.read_with(cx, |this, _| (this.file_count(), this.dir_count()));
println!(
"{:?} for {directories} directories and {files} files",
elapsed
@@ -128,7 +128,7 @@ fn files_not_created_on_launch(errors: HashMap<io::ErrorKind, Vec<&Path>>) {
}
fn fail_to_open_window_async(e: anyhow::Error, cx: &mut AsyncApp) {
- cx.update(|cx| fail_to_open_window(e, cx)).log_err();
+ cx.update(|cx| fail_to_open_window(e, cx));
}
fn fail_to_open_window(e: anyhow::Error, _cx: &mut App) {
@@ -797,8 +797,7 @@ fn main() {
if let Some(request) = OpenRequest::parse(urls, cx).log_err() {
handle_open_request(request, app_state.clone(), cx);
}
- })
- .ok();
+ });
}
})
.detach();
@@ -873,7 +872,7 @@ fn handle_open_request(request: OpenRequest, app_state: Arc<AppState>, cx: &mut
history_store
.update(&mut cx.clone(), |store, cx| {
store.save_thread(session_id.clone(), db_thread, cx)
- })?
+ })
.await?;
let thread_metadata = agent::DbThreadMetadata {
@@ -1125,7 +1124,7 @@ fn handle_open_request(request: OpenRequest, app_state: Arc<AppState>, cx: &mut
None,
cx,
)
- })?
+ })
.await?;
}
@@ -1223,9 +1222,8 @@ async fn installation_id() -> Result<IdType> {
async fn restore_or_create_workspace(app_state: Arc<AppState>, cx: &mut AsyncApp) -> Result<()> {
if let Some(locations) = restorable_workspace_locations(cx, &app_state).await {
- let use_system_window_tabs = cx
- .update(|cx| WorkspaceSettings::get_global(cx).use_system_window_tabs)
- .unwrap_or(false);
+ let use_system_window_tabs =
+ cx.update(|cx| WorkspaceSettings::get_global(cx).use_system_window_tabs);
let mut results: Vec<Result<(), Error>> = Vec::new();
let mut tasks = Vec::new();
@@ -1241,7 +1239,7 @@ async fn restore_or_create_workspace(app_state: Arc<AppState>, cx: &mut AsyncApp
workspace::OpenOptions::default(),
cx,
)
- })?;
+ });
open_task.await.map(|_| ())
});
@@ -1259,7 +1257,7 @@ async fn restore_or_create_workspace(app_state: Arc<AppState>, cx: &mut AsyncApp
cx.update(|cx| {
RemoteSettings::get_global(cx)
.fill_connection_options_from_settings(options)
- })?;
+ });
}
let task = cx.spawn(async move |cx| {
recent_projects::open_remote_project(
@@ -1300,24 +1298,20 @@ async fn restore_or_create_workspace(app_state: Arc<AppState>, cx: &mut AsyncApp
};
// Try to find an active workspace to show the toast
- let toast_shown = cx
- .update(|cx| {
- if let Some(window) = cx.active_window()
- && let Some(workspace) = window.downcast::<Workspace>()
- {
- workspace
- .update(cx, |workspace, _, cx| {
- workspace.show_toast(
- Toast::new(NotificationId::unique::<()>(), message),
- cx,
- )
- })
- .ok();
- return true;
- }
- false
- })
- .unwrap_or(false);
+ let toast_shown = cx.update(|cx| {
+ if let Some(window) = cx.active_window()
+ && let Some(workspace) = window.downcast::<Workspace>()
+ {
+ workspace
+ .update(cx, |workspace, _, cx| {
+ workspace
+ .show_toast(Toast::new(NotificationId::unique::<()>(), message), cx)
+ })
+ .ok();
+ return true;
+ }
+ false
+ });
// If we couldn't show a toast (no windows opened successfully),
// we've already logged the errors above, so the user can check logs
@@ -1328,7 +1322,7 @@ async fn restore_or_create_workspace(app_state: Arc<AppState>, cx: &mut AsyncApp
}
}
} else if matches!(KEY_VALUE_STORE.read_kvp(FIRST_OPEN), Ok(None)) {
- cx.update(|cx| show_onboarding_view(app_state, cx))?.await?;
+ cx.update(|cx| show_onboarding_view(app_state, cx)).await?;
} else {
cx.update(|cx| {
workspace::open_new(
@@ -1345,7 +1339,7 @@ async fn restore_or_create_workspace(app_state: Arc<AppState>, cx: &mut AsyncApp
}
},
)
- })?
+ })
.await?;
}
@@ -1356,21 +1350,17 @@ pub(crate) async fn restorable_workspace_locations(
cx: &mut AsyncApp,
app_state: &Arc<AppState>,
) -> Option<Vec<(SerializedWorkspaceLocation, PathList)>> {
- let mut restore_behavior = cx
- .update(|cx| WorkspaceSettings::get(None, cx).restore_on_startup)
- .ok()?;
+ let mut restore_behavior = cx.update(|cx| WorkspaceSettings::get(None, cx).restore_on_startup);
let session_handle = app_state.session.clone();
- let (last_session_id, last_session_window_stack) = cx
- .update(|cx| {
- let session = session_handle.read(cx);
+ let (last_session_id, last_session_window_stack) = cx.update(|cx| {
+ let session = session_handle.read(cx);
- (
- session.last_session_id().map(|id| id.to_string()),
- session.last_session_window_stack(),
- )
- })
- .ok()?;
+ (
+ session.last_session_id().map(|id| id.to_string()),
+ session.last_session_window_stack(),
+ )
+ });
if last_session_id.is_none()
&& matches!(
@@ -1582,27 +1572,26 @@ fn load_user_themes_in_background(fs: Arc<dyn fs::Fs>, cx: &mut App) {
cx.spawn({
let fs = fs.clone();
async move |cx| {
- if let Some(theme_registry) = cx.update(|cx| ThemeRegistry::global(cx)).log_err() {
- let themes_dir = paths::themes_dir().as_ref();
- match fs
- .metadata(themes_dir)
- .await
- .ok()
- .flatten()
- .map(|m| m.is_dir)
- {
- Some(is_dir) => {
- anyhow::ensure!(is_dir, "Themes dir path {themes_dir:?} is not a directory")
- }
- None => {
- fs.create_dir(themes_dir).await.with_context(|| {
- format!("Failed to create themes dir at path {themes_dir:?}")
- })?;
- }
+ let theme_registry = cx.update(|cx| ThemeRegistry::global(cx));
+ let themes_dir = paths::themes_dir().as_ref();
+ match fs
+ .metadata(themes_dir)
+ .await
+ .ok()
+ .flatten()
+ .map(|m| m.is_dir)
+ {
+ Some(is_dir) => {
+ anyhow::ensure!(is_dir, "Themes dir path {themes_dir:?} is not a directory")
+ }
+ None => {
+ fs.create_dir(themes_dir).await.with_context(|| {
+ format!("Failed to create themes dir at path {themes_dir:?}")
+ })?;
}
- theme_registry.load_user_themes(themes_dir, fs).await?;
- cx.update(GlobalTheme::reload_theme)?;
}
+ theme_registry.load_user_themes(themes_dir, fs).await?;
+ cx.update(GlobalTheme::reload_theme);
anyhow::Ok(())
}
})
@@ -1619,15 +1608,16 @@ fn watch_themes(fs: Arc<dyn fs::Fs>, cx: &mut App) {
while let Some(paths) = events.next().await {
for event in paths {
- if fs.metadata(&event.path).await.ok().flatten().is_some()
- && let Some(theme_registry) =
- cx.update(|cx| ThemeRegistry::global(cx)).log_err()
- && let Some(()) = theme_registry
+ if fs.metadata(&event.path).await.ok().flatten().is_some() {
+ let theme_registry = cx.update(|cx| ThemeRegistry::global(cx));
+ if theme_registry
.load_user_theme(&event.path, fs.clone())
.await
.log_err()
- {
- cx.update(GlobalTheme::reload_theme).log_err();
+ .is_some()
+ {
+ cx.update(GlobalTheme::reload_theme);
+ }
}
}
}
@@ -92,12 +92,13 @@ impl TestWorkspace {
false,
cx,
)
- })?;
+ });
- let add_worktree_task = project.update(cx, |project, cx| {
- project.find_or_create_worktree(project_path, true, cx)
- })?;
- add_worktree_task.await?;
+ project
+ .update(cx, |project, cx| {
+ project.find_or_create_worktree(project_path, true, cx)
+ })
+ .await?;
let bounds = Bounds {
origin: point(px(0.0), px(0.0)),
@@ -118,7 +119,7 @@ impl TestWorkspace {
})
},
)
- })??;
+ })?;
cx.background_executor()
.timer(std::time::Duration::from_millis(100))
@@ -257,7 +258,7 @@ fn main() {
Ok(ws) => ws,
Err(e) => {
log::error!("Failed to create workspace: {}", e);
- cx.update(|cx| cx.quit()).ok();
+ cx.update(|cx| cx.quit());
std::process::exit(1);
}
};
@@ -265,19 +266,19 @@ fn main() {
// Set up project panel
if let Err(e) = setup_project_panel(&workspace, &mut cx).await {
log::error!("Failed to setup project panel: {}", e);
- cx.update(|cx| cx.quit()).ok();
+ cx.update(|cx| cx.quit());
std::process::exit(1);
}
// Open main.rs in the editor
if let Err(e) = open_file(&workspace, "src/main.rs", &mut cx).await {
log::error!("Failed to open file: {}", e);
- cx.update(|cx| cx.quit()).ok();
+ cx.update(|cx| cx.quit());
std::process::exit(1);
}
// Request a window refresh to ensure all pending effects are processed
- cx.refresh().ok();
+ cx.refresh();
cx.background_executor()
.timer(std::time::Duration::from_millis(500))
.await;
@@ -306,11 +307,10 @@ fn main() {
ws.close_panel::<ProjectPanel>(window, cx);
})
.ok();
- })
- .ok();
+ });
// Refresh and wait for panel to close
- cx.refresh().ok();
+ cx.refresh();
cx.background_executor()
.timer(std::time::Duration::from_millis(100))
.await;
@@ -341,11 +341,11 @@ fn main() {
}
if any_failed {
- cx.update(|cx| cx.quit()).ok();
+ cx.update(|cx| cx.quit());
std::process::exit(1);
}
- cx.update(|cx| cx.quit()).ok();
+ cx.update(|cx| cx.quit());
})
.detach();
});
@@ -371,7 +371,7 @@ async fn run_visual_test(
update_baseline: bool,
) -> Result<TestResult> {
// Capture the screenshot using direct texture capture (no ScreenCaptureKit needed)
- let screenshot = cx.update(|cx| capture_screenshot(window, cx))??;
+ let screenshot = cx.update(|cx| capture_screenshot(window, cx))?;
// Get paths
let baseline_path = get_baseline_path(test_name);
@@ -731,27 +731,28 @@ async fn run_agent_thread_view_test(
false,
cx,
)
- })?;
+ });
// Add the test directory as a worktree
- let add_worktree_task = project.update(cx, |project, cx| {
- project.find_or_create_worktree(&project_path, true, cx)
- })?;
- let (worktree, _) = add_worktree_task.await?;
+ let (worktree, _) = project
+ .update(cx, |project, cx| {
+ project.find_or_create_worktree(&project_path, true, cx)
+ })
+ .await?;
// Wait for worktree to scan and find the image file
- let worktree_name = worktree.read_with(cx, |wt, _| wt.root_name_str().to_string())?;
+ let worktree_name = worktree.read_with(cx, |wt, _| wt.root_name_str().to_string());
cx.background_executor()
.timer(std::time::Duration::from_millis(100))
.await;
// Create the necessary entities for the ReadFileTool
- let action_log = cx.new(|_| action_log::ActionLog::new(project.clone()))?;
- let context_server_registry = cx
- .new(|cx| agent::ContextServerRegistry::new(project.read(cx).context_server_store(), cx))?;
+ let action_log = cx.new(|_| action_log::ActionLog::new(project.clone()));
+ let context_server_registry =
+ cx.new(|cx| agent::ContextServerRegistry::new(project.read(cx).context_server_store(), cx));
let fake_model = Arc::new(language_model::fake_provider::FakeLanguageModel::default());
- let project_context = cx.new(|_| prompt_store::ProjectContext::default())?;
+ let project_context = cx.new(|_| prompt_store::ProjectContext::default());
// Create the agent Thread
let thread = cx.new(|cx| {
@@ -763,7 +764,7 @@ async fn run_agent_thread_view_test(
Some(fake_model),
cx,
)
- })?;
+ });
// Create the ReadFileTool
let tool = Arc::new(agent::ReadFileTool::new(
@@ -782,10 +783,9 @@ async fn run_agent_thread_view_test(
start_line: None,
end_line: None,
};
- let run_task = cx.update(|cx| tool.clone().run(input, event_stream, cx))?;
-
// The tool runs async - wait for it
- run_task.await?;
+ cx.update(|cx| tool.clone().run(input, event_stream, cx))
+ .await?;
// Collect the events from the tool execution
let mut tool_content: Vec<acp::ToolCallContent> = Vec::new();
@@ -845,7 +845,7 @@ async fn run_agent_thread_view_test(
cx.new(|cx| Workspace::new(None, project.clone(), app_state.clone(), window, cx))
},
)
- })??;
+ })?;
cx.background_executor()
.timer(std::time::Duration::from_millis(100))
@@ -873,7 +873,7 @@ async fn run_agent_thread_view_test(
// Inject the stub server and open the stub thread
workspace_window.update(cx, |_workspace, window, cx| {
- panel.update(cx, |panel, cx| {
+ panel.update(cx, |panel: &mut AgentPanel, cx| {
panel.open_external_thread_with_server(stub_agent.clone(), window, cx);
});
})?;
@@ -884,16 +884,20 @@ async fn run_agent_thread_view_test(
// Get the thread view and send a message
let thread_view = panel
- .read_with(cx, |panel, _| panel.active_thread_view_for_tests().cloned())?
+ .read_with(cx, |panel, _| panel.active_thread_view_for_tests().cloned())
.ok_or_else(|| anyhow::anyhow!("No active thread view"))?;
let thread = thread_view
- .update(cx, |view, _cx| view.thread().cloned())?
+ .update(cx, |view: &mut agent_ui::acp::AcpThreadView, _cx| {
+ view.thread().cloned()
+ })
.ok_or_else(|| anyhow::anyhow!("Thread not available"))?;
// Send the message to trigger the image response
thread
- .update(cx, |thread, cx| thread.send_raw("Show me the Zed logo", cx))?
+ .update(cx, |thread: &mut acp_thread::AcpThread, cx| {
+ thread.send_raw("Show me the Zed logo", cx)
+ })
.await?;
cx.background_executor()
@@ -902,7 +906,7 @@ async fn run_agent_thread_view_test(
// Get the tool call ID for expanding later
let tool_call_id = thread
- .update(cx, |thread, _cx| {
+ .update(cx, |thread: &mut acp_thread::AcpThread, _cx| {
thread.entries().iter().find_map(|entry| {
if let acp_thread::AgentThreadEntry::ToolCall(tool_call) = entry {
Some(tool_call.id.clone())
@@ -910,7 +914,7 @@ async fn run_agent_thread_view_test(
None
}
})
- })?
+ })
.ok_or_else(|| anyhow::anyhow!("Expected a ToolCall entry in thread for visual test"))?;
// Refresh window for collapsed state
@@ -935,9 +939,9 @@ async fn run_agent_thread_view_test(
.await?;
// Now expand the tool call so its content (the image) is visible
- thread_view.update(cx, |view, cx| {
+ thread_view.update(cx, |view: &mut agent_ui::acp::AcpThreadView, cx| {
view.expand_tool_call(tool_call_id, cx);
- })?;
+ });
cx.background_executor()
.timer(std::time::Duration::from_millis(100))
@@ -34,8 +34,8 @@ use git_ui::project_diff::ProjectDiffToolbar;
use gpui::{
Action, App, AppContext as _, AsyncWindowContext, Context, DismissEvent, Element, Entity,
Focusable, KeyBinding, ParentElement, PathPromptOptions, PromptLevel, ReadGlobal, SharedString,
- Task, TitlebarOptions, UpdateGlobal, WeakEntity, Window, WindowKind, WindowOptions, actions,
- image_cache, point, px, retain_all,
+ Task, TitlebarOptions, UpdateGlobal, WeakEntity, Window, WindowHandle, WindowKind,
+ WindowOptions, actions, image_cache, point, px, retain_all,
};
use image_viewer::ImageInfo;
use language::Capability;
@@ -159,11 +159,7 @@ pub fn init(cx: &mut App) {
cx.on_action(|_: &RestoreBanner, cx| title_bar::restore_banner(cx));
let flag = cx.wait_for_flag::<PanicFeatureFlag>();
cx.spawn(async |cx| {
- if cx
- .update(|cx| ReleaseChannel::global(cx) == ReleaseChannel::Dev)
- .unwrap_or_default()
- || flag.await
- {
+ if cx.update(|cx| ReleaseChannel::global(cx) == ReleaseChannel::Dev) || flag.await {
cx.update(|cx| {
cx.on_action(|_: &TestPanic, _| panic!("Ran the TestPanic action"))
.on_action(|_: &TestCrash, _| {
@@ -174,8 +170,7 @@ pub fn init(cx: &mut App) {
puts(0xabad1d3a as *const i8);
}
});
- })
- .ok();
+ });
};
})
.detach();
@@ -559,8 +554,7 @@ fn initialize_file_watcher(window: &mut Window, cx: &mut Context<Workspace>) {
cx.update(|cx| {
cx.open_url("https://zed.dev/docs/linux#could-not-start-inotify");
cx.quit();
- })
- .ok();
+ });
}
})
.detach()
@@ -590,8 +584,7 @@ fn initialize_file_watcher(window: &mut Window, cx: &mut Context<Workspace>) {
cx.update(|cx| {
cx.open_url("https://zed.dev/docs/windows");
cx.quit()
- })
- .ok();
+ });
}
})
.detach()
@@ -641,8 +634,7 @@ fn show_software_emulation_warning_if_needed(
cx.update(|cx| {
cx.open_url(open_url);
cx.quit();
- })
- .ok();
+ });
}
})
.detach()
@@ -1282,8 +1274,7 @@ fn about(_: &mut Workspace, window: &mut Window, cx: &mut Context<Workspace>) {
let content = format!("{}\n{}", message, detail.as_deref().unwrap_or(""));
cx.update(|cx| {
cx.write_to_clipboard(gpui::ClipboardItem::new_string(content));
- })
- .ok();
+ });
}
})
.detach();
@@ -1307,19 +1298,18 @@ fn quit(_: &Quit, cx: &mut App) {
let should_confirm = WorkspaceSettings::get_global(cx).confirm_quit;
cx.spawn(async move |cx| {
- let mut workspace_windows = cx.update(|cx| {
+ let mut workspace_windows: Vec<WindowHandle<Workspace>> = cx.update(|cx| {
cx.windows()
.into_iter()
.filter_map(|window| window.downcast::<Workspace>())
.collect::<Vec<_>>()
- })?;
+ });
// If multiple windows have unsaved changes, and need a save prompt,
// prompt in the active window before switching to a different window.
cx.update(|cx| {
workspace_windows.sort_by_key(|window| window.is_active(cx) == Some(false));
- })
- .log_err();
+ });
if should_confirm && let Some(workspace) = workspace_windows.first() {
let answer = workspace
@@ -1351,12 +1341,13 @@ fn quit(_: &Quit, cx: &mut App) {
workspace.prepare_to_close(CloseIntent::Quit, window, cx)
})
.log_err()
- && !should_close.await?
{
- return Ok(());
+ if !should_close.await? {
+ return Ok(());
+ }
}
}
- cx.update(|cx| cx.quit())?;
+ cx.update(|cx| cx.quit());
anyhow::Ok(())
})
.detach_and_log_err(cx);
@@ -1565,7 +1556,7 @@ pub fn handle_settings_file_changes(
Either::Right(content) => (content, true),
};
- let result = cx.update_global(|store: &mut SettingsStore, cx| {
+ cx.update_global(|store: &mut SettingsStore, cx| {
let result = if is_user {
store.set_user_settings(&content, cx)
} else {
@@ -1584,10 +1575,6 @@ pub fn handle_settings_file_changes(
}
cx.refresh_windows();
});
-
- if result.is_err() {
- break; // App dropped
- }
}
})
.detach();
@@ -1699,8 +1686,7 @@ pub fn handle_keymap_file_changes(
show_keymap_file_json_error(notification_id.clone(), &error, cx)
}
}
- })
- .ok();
+ });
}
})
.detach();
@@ -1791,8 +1777,7 @@ fn show_markdown_app_notification<F>(
.primary_on_click_arc(primary_button_on_click)
})
})
- })
- .ok();
+ });
})
.detach();
}
@@ -1929,9 +1914,9 @@ fn open_local_file(
let file_exists = {
let full_path = worktree.read_with(cx, |tree, _| {
tree.abs_path().join(settings_relative_path.as_std_path())
- })?;
+ });
- let fs = project.read_with(cx, |project, _| project.fs().clone())?;
+ let fs = project.read_with(cx, |project, _| project.fs().clone());
fs.metadata(&full_path)
.await
@@ -1942,23 +1927,23 @@ fn open_local_file(
if !file_exists {
if let Some(dir_path) = settings_relative_path.parent()
- && worktree.read_with(cx, |tree, _| tree.entry_for_path(dir_path).is_none())?
+ && worktree.read_with(cx, |tree, _| tree.entry_for_path(dir_path).is_none())
{
project
.update(cx, |project, cx| {
project.create_entry((tree_id, dir_path), true, cx)
- })?
+ })
.await
.context("worktree was removed")?;
}
if worktree.read_with(cx, |tree, _| {
tree.entry_for_path(settings_relative_path).is_none()
- })? {
+ }) {
project
.update(cx, |project, cx| {
project.create_entry((tree_id, settings_relative_path), false, cx)
- })?
+ })
.await
.context("worktree was removed")?;
}
@@ -341,7 +341,7 @@ pub async fn open_paths_with_positions(
.collect::<Vec<_>>();
let (workspace, mut items) = cx
- .update(|cx| workspace::open_paths(&paths, app_state, open_options, cx))?
+ .update(|cx| workspace::open_paths(&paths, app_state, open_options, cx))
.await?;
for diff_pair in diff_paths {
@@ -349,9 +349,10 @@ pub async fn open_paths_with_positions(
let new_path = Path::new(&diff_pair[1]).canonicalize()?;
if let Ok(diff_view) = workspace.update(cx, |workspace, window, cx| {
FileDiffView::open(old_path, new_path, workspace, window, cx)
- }) && let Some(diff_view) = diff_view.await.log_err()
- {
- items.push(Some(Ok(Box::new(diff_view))))
+ }) {
+ if let Some(diff_view) = diff_view.await.log_err() {
+ items.push(Some(Ok(Box::new(diff_view))))
+ }
}
}
@@ -421,8 +422,7 @@ pub async fn handle_cli_connection(
responses.send(CliResponse::Exit { status: 1 }).log_err();
}
};
- })
- .log_err();
+ });
return;
}
@@ -476,8 +476,7 @@ async fn open_workspaces(
if grouped_locations.is_empty() {
// If we have no paths to open, show the welcome screen if this is the first launch
if matches!(KEY_VALUE_STORE.read_kvp(FIRST_OPEN), Ok(None)) {
- cx.update(|cx| show_onboarding_view(app_state, cx).detach())
- .log_err();
+ cx.update(|cx| show_onboarding_view(app_state, cx).detach());
}
// If not the first launch, show an empty window with empty editor
else {
@@ -490,8 +489,7 @@ async fn open_workspaces(
Editor::new_file(workspace, &Default::default(), window, cx)
})
.detach();
- })
- .log_err();
+ });
}
} else {
// If there are paths to open, open a workspace for each grouping of paths
@@ -529,7 +527,7 @@ async fn open_workspaces(
cx.update(|cx| {
RemoteSettings::get_global(cx)
.fill_connection_options_from_settings(options)
- })?;
+ });
}
cx.spawn(async move |cx| {
open_remote_project(
@@ -571,9 +569,7 @@ async fn open_local_workspace(
let (open_new_workspace, replace_window) = if reuse {
(
Some(true),
- cx.update(|cx| workspace::local_workspace_windows(cx).into_iter().next())
- .ok()
- .flatten(),
+ cx.update(|cx| workspace::local_workspace_windows(cx).into_iter().next()),
)
} else {
(open_new_workspace, None)
@@ -637,14 +633,14 @@ async fn open_local_workspace(
if wait {
let (release_tx, release_rx) = oneshot::channel();
item_release_futures.push(release_rx);
- subscriptions.push(cx.update(|cx| {
+ subscriptions.push(Ok(cx.update(|cx| {
item.on_release(
cx,
Box::new(move |_| {
release_tx.send(()).ok();
}),
)
- }));
+ })));
}
}
Some(Err(err)) => {