Detailed changes
@@ -824,6 +824,7 @@ module_inception = { level = "deny" }
question_mark = { level = "deny" }
redundant_closure = { level = "deny" }
declare_interior_mutable_const = { level = "deny" }
+needless_borrow = { level = "warn"}
# Individual rules that have violations in the codebase:
type_complexity = "allow"
# We often return trait objects from `new` functions.
@@ -485,7 +485,7 @@ impl ContentBlock {
}
fn resource_link_md(uri: &str) -> String {
- if let Some(uri) = MentionUri::parse(&uri).log_err() {
+ if let Some(uri) = MentionUri::parse(uri).log_err() {
uri.as_link().to_string()
} else {
uri.to_string()
@@ -1416,7 +1416,7 @@ impl AcpThread {
fn user_message(&self, id: &UserMessageId) -> Option<&UserMessage> {
self.entries.iter().find_map(|entry| {
if let AgentThreadEntry::UserMessage(message) = entry {
- if message.id.as_ref() == Some(&id) {
+ if message.id.as_ref() == Some(id) {
Some(message)
} else {
None
@@ -1430,7 +1430,7 @@ impl AcpThread {
fn user_message_mut(&mut self, id: &UserMessageId) -> Option<(usize, &mut UserMessage)> {
self.entries.iter_mut().enumerate().find_map(|(ix, entry)| {
if let AgentThreadEntry::UserMessage(message) = entry {
- if message.id.as_ref() == Some(&id) {
+ if message.id.as_ref() == Some(id) {
Some((ix, message))
} else {
None
@@ -2356,7 +2356,7 @@ mod tests {
fn cancel(&self, session_id: &acp::SessionId, cx: &mut App) {
let sessions = self.sessions.lock();
- let thread = sessions.get(&session_id).unwrap().clone();
+ let thread = sessions.get(session_id).unwrap().clone();
cx.spawn(async move |cx| {
thread
@@ -71,8 +71,8 @@ impl Diff {
let hunk_ranges = {
let buffer = new_buffer.read(cx);
let diff = buffer_diff.read(cx);
- diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &buffer, cx)
- .map(|diff_hunk| diff_hunk.buffer_range.to_point(&buffer))
+ diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, buffer, cx)
+ .map(|diff_hunk| diff_hunk.buffer_range.to_point(buffer))
.collect::<Vec<_>>()
};
@@ -306,13 +306,13 @@ impl PendingDiff {
let buffer = self.buffer.read(cx);
let diff = self.diff.read(cx);
let mut ranges = diff
- .hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &buffer, cx)
- .map(|diff_hunk| diff_hunk.buffer_range.to_point(&buffer))
+ .hunks_intersecting_range(Anchor::MIN..Anchor::MAX, buffer, cx)
+ .map(|diff_hunk| diff_hunk.buffer_range.to_point(buffer))
.collect::<Vec<_>>();
ranges.extend(
self.revealed_ranges
.iter()
- .map(|range| range.to_point(&buffer)),
+ .map(|range| range.to_point(buffer)),
);
ranges.sort_unstable_by_key(|range| (range.start, Reverse(range.end)));
@@ -146,7 +146,7 @@ impl MentionUri {
FileIcons::get_folder_icon(false, cx)
.unwrap_or_else(|| IconName::Folder.path().into())
} else {
- FileIcons::get_icon(&abs_path, cx)
+ FileIcons::get_icon(abs_path, cx)
.unwrap_or_else(|| IconName::File.path().into())
}
}
@@ -290,7 +290,7 @@ impl ActionLog {
}
_ = git_diff_updates_rx.changed().fuse() => {
if let Some(git_diff) = git_diff.as_ref() {
- Self::keep_committed_edits(&this, &buffer, &git_diff, cx).await?;
+ Self::keep_committed_edits(&this, &buffer, git_diff, cx).await?;
}
}
}
@@ -498,7 +498,7 @@ impl ActionLog {
new: new_range,
},
&new_diff_base,
- &buffer_snapshot.as_rope(),
+ buffer_snapshot.as_rope(),
));
}
unreviewed_edits
@@ -964,7 +964,7 @@ impl TrackedBuffer {
fn has_edits(&self, cx: &App) -> bool {
self.diff
.read(cx)
- .hunks(&self.buffer.read(cx), cx)
+ .hunks(self.buffer.read(cx), cx)
.next()
.is_some()
}
@@ -2268,7 +2268,7 @@ mod tests {
log::info!("quiescing...");
cx.run_until_parked();
action_log.update(cx, |log, cx| {
- let tracked_buffer = log.tracked_buffers.get(&buffer).unwrap();
+ let tracked_buffer = log.tracked_buffers.get(buffer).unwrap();
let mut old_text = tracked_buffer.diff_base.clone();
let new_text = buffer.read(cx).as_rope();
for edit in tracked_buffer.unreviewed_edits.edits() {
@@ -702,7 +702,7 @@ impl ActivityIndicator {
on_click: Some(Arc::new(|this, window, cx| {
this.dismiss_error_message(&DismissErrorMessage, window, cx)
})),
- tooltip_message: Some(Self::version_tooltip_message(&version)),
+ tooltip_message: Some(Self::version_tooltip_message(version)),
}),
AutoUpdateStatus::Installing { version } => Some(Content {
icon: Some(
@@ -714,13 +714,13 @@ impl ActivityIndicator {
on_click: Some(Arc::new(|this, window, cx| {
this.dismiss_error_message(&DismissErrorMessage, window, cx)
})),
- tooltip_message: Some(Self::version_tooltip_message(&version)),
+ tooltip_message: Some(Self::version_tooltip_message(version)),
}),
AutoUpdateStatus::Updated { version } => Some(Content {
icon: None,
message: "Click to restart and update Zed".to_string(),
on_click: Some(Arc::new(move |_, _, cx| workspace::reload(cx))),
- tooltip_message: Some(Self::version_tooltip_message(&version)),
+ tooltip_message: Some(Self::version_tooltip_message(version)),
}),
AutoUpdateStatus::Errored => Some(Content {
icon: Some(
@@ -1692,7 +1692,7 @@ impl Thread {
self.last_received_chunk_at = Some(Instant::now());
let task = cx.spawn(async move |thread, cx| {
- let stream_completion_future = model.stream_completion(request, &cx);
+ let stream_completion_future = model.stream_completion(request, cx);
let initial_token_usage =
thread.read_with(cx, |thread, _cx| thread.cumulative_token_usage);
let stream_completion = async {
@@ -1824,7 +1824,7 @@ impl Thread {
let streamed_input = if tool_use.is_input_complete {
None
} else {
- Some((&tool_use.input).clone())
+ Some(tool_use.input.clone())
};
let ui_text = thread.tool_use.request_tool_use(
@@ -2051,7 +2051,7 @@ impl Thread {
retry_scheduled = thread
.handle_retryable_error_with_delay(
- &completion_error,
+ completion_error,
Some(retry_strategy),
model.clone(),
intent,
@@ -2130,7 +2130,7 @@ impl Thread {
self.pending_summary = cx.spawn(async move |this, cx| {
let result = async {
- let mut messages = model.model.stream_completion(request, &cx).await?;
+ let mut messages = model.model.stream_completion(request, cx).await?;
let mut new_summary = String::new();
while let Some(event) = messages.next().await {
@@ -2456,7 +2456,7 @@ impl Thread {
// which result to prefer (the old task could complete after the new one, resulting in a
// stale summary).
self.detailed_summary_task = cx.spawn(async move |thread, cx| {
- let stream = model.stream_completion_text(request, &cx);
+ let stream = model.stream_completion_text(request, cx);
let Some(mut messages) = stream.await.log_err() else {
thread
.update(cx, |thread, _cx| {
@@ -4043,7 +4043,7 @@ fn main() {{
});
let fake_model = model.as_fake();
- simulate_successful_response(&fake_model, cx);
+ simulate_successful_response(fake_model, cx);
// Should start generating summary when there are >= 2 messages
thread.read_with(cx, |thread, _| {
@@ -4138,7 +4138,7 @@ fn main() {{
});
let fake_model = model.as_fake();
- simulate_successful_response(&fake_model, cx);
+ simulate_successful_response(fake_model, cx);
thread.read_with(cx, |thread, _| {
// State is still Error, not Generating
@@ -5420,7 +5420,7 @@ fn main() {{
});
let fake_model = model.as_fake();
- simulate_successful_response(&fake_model, cx);
+ simulate_successful_response(fake_model, cx);
thread.read_with(cx, |thread, _| {
assert!(matches!(thread.summary(), ThreadSummary::Generating));
@@ -91,7 +91,7 @@ impl LanguageModels {
for provider in &providers {
for model in provider.recommended_models(cx) {
recommended_models.insert(model.id());
- recommended.push(Self::map_language_model_to_info(&model, &provider));
+ recommended.push(Self::map_language_model_to_info(&model, provider));
}
}
if !recommended.is_empty() {
@@ -62,7 +62,7 @@ fn contains(
handlebars::RenderError::new("contains: missing or invalid query parameter")
})?;
- if list.contains(&query) {
+ if list.contains(query) {
out.write("true")?;
}
@@ -173,7 +173,7 @@ impl UserMessage {
&mut symbol_context,
"\n{}",
MarkdownCodeBlock {
- tag: &codeblock_tag(&abs_path, None),
+ tag: &codeblock_tag(abs_path, None),
text: &content.to_string(),
}
)
@@ -189,8 +189,8 @@ impl UserMessage {
&mut rules_context,
"\n{}",
MarkdownCodeBlock {
- tag: &codeblock_tag(&path, Some(line_range)),
- text: &content
+ tag: &codeblock_tag(path, Some(line_range)),
+ text: content
}
)
.ok();
@@ -207,7 +207,7 @@ impl UserMessage {
"\n{}",
MarkdownCodeBlock {
tag: "",
- text: &content
+ text: content
}
)
.ok();
@@ -1048,7 +1048,7 @@ impl Thread {
tools,
tool_choice: None,
stop: Vec::new(),
- temperature: AgentSettings::temperature_for_model(&model, cx),
+ temperature: AgentSettings::temperature_for_model(model, cx),
thinking_allowed: true,
};
@@ -103,7 +103,7 @@ impl ContextServerRegistry {
self.reload_tools_for_server(server_id.clone(), cx);
}
ContextServerStatus::Stopped | ContextServerStatus::Error(_) => {
- self.registered_servers.remove(&server_id);
+ self.registered_servers.remove(server_id);
cx.notify();
}
}
@@ -471,7 +471,7 @@ fn resolve_path(
let parent_entry = parent_project_path
.as_ref()
- .and_then(|path| project.entry_for_path(&path, cx))
+ .and_then(|path| project.entry_for_path(path, cx))
.context("Can't create file: parent directory doesn't exist")?;
anyhow::ensure!(
@@ -80,7 +80,7 @@ impl AgentTool for TerminalTool {
let first_line = lines.next().unwrap_or_default();
let remaining_line_count = lines.count();
match remaining_line_count {
- 0 => MarkdownInlineCode(&first_line).to_string().into(),
+ 0 => MarkdownInlineCode(first_line).to_string().into(),
1 => MarkdownInlineCode(&format!(
"{} - {} more line",
first_line, remaining_line_count
@@ -19,14 +19,14 @@ pub async fn connect(
root_dir: &Path,
cx: &mut AsyncApp,
) -> Result<Rc<dyn AgentConnection>> {
- let conn = v1::AcpConnection::stdio(server_name, command.clone(), &root_dir, cx).await;
+ let conn = v1::AcpConnection::stdio(server_name, command.clone(), root_dir, cx).await;
match conn {
Ok(conn) => Ok(Rc::new(conn) as _),
Err(err) if err.is::<UnsupportedVersion>() => {
// Consider re-using initialize response and subprocess when adding another version here
let conn: Rc<dyn AgentConnection> =
- Rc::new(v0::AcpConnection::stdio(server_name, command, &root_dir, cx).await?);
+ Rc::new(v0::AcpConnection::stdio(server_name, command, root_dir, cx).await?);
Ok(conn)
}
Err(err) => Err(err),
@@ -291,7 +291,7 @@ impl AgentConnection for ClaudeAgentConnection {
fn cancel(&self, session_id: &acp::SessionId, _cx: &mut App) {
let sessions = self.sessions.borrow();
- let Some(session) = sessions.get(&session_id) else {
+ let Some(session) = sessions.get(session_id) else {
log::warn!("Attempted to cancel nonexistent session {}", session_id);
return;
};
@@ -552,11 +552,11 @@ fn build_code_label_for_full_path(file_name: &str, directory: Option<&str>, cx:
let comment_id = cx.theme().syntax().highlight_id("comment").map(HighlightId);
let mut label = CodeLabel::default();
- label.push_str(&file_name, None);
+ label.push_str(file_name, None);
label.push_str(" ", None);
if let Some(directory) = directory {
- label.push_str(&directory, comment_id);
+ label.push_str(directory, comment_id);
}
label.filter_range = 0..label.text().len();
@@ -1191,7 +1191,7 @@ impl MentionSet {
})
}
MentionUri::Fetch { url } => {
- let Some(content) = self.fetch_results.get(&url).cloned() else {
+ let Some(content) = self.fetch_results.get(url).cloned() else {
return Task::ready(Err(anyhow!("missing fetch result")));
};
let uri = uri.clone();
@@ -330,7 +330,7 @@ async fn fuzzy_search(
.collect::<Vec<_>>();
let mut matches = match_strings(
&candidates,
- &query,
+ query,
false,
true,
100,
@@ -696,7 +696,7 @@ impl AcpThreadView {
};
diff.update(cx, |diff, cx| {
- diff.move_to_path(PathKey::for_buffer(&buffer, cx), window, cx)
+ diff.move_to_path(PathKey::for_buffer(buffer, cx), window, cx)
})
}
@@ -722,13 +722,13 @@ impl AcpThreadView {
let len = thread.read(cx).entries().len();
let index = len - 1;
self.entry_view_state.update(cx, |view_state, cx| {
- view_state.sync_entry(index, &thread, window, cx)
+ view_state.sync_entry(index, thread, window, cx)
});
self.list_state.splice(index..index, 1);
}
AcpThreadEvent::EntryUpdated(index) => {
self.entry_view_state.update(cx, |view_state, cx| {
- view_state.sync_entry(*index, &thread, window, cx)
+ view_state.sync_entry(*index, thread, window, cx)
});
self.list_state.splice(*index..index + 1, 1);
}
@@ -1427,7 +1427,7 @@ impl AcpThreadView {
Empty.into_any_element()
}
}
- ToolCallContent::Diff(diff) => self.render_diff_editor(entry_ix, &diff, cx),
+ ToolCallContent::Diff(diff) => self.render_diff_editor(entry_ix, diff, cx),
ToolCallContent::Terminal(terminal) => {
self.render_terminal_tool_call(entry_ix, terminal, tool_call, window, cx)
}
@@ -1583,7 +1583,7 @@ impl AcpThreadView {
.border_color(self.tool_card_border_color(cx))
.child(
if let Some(entry) = self.entry_view_state.read(cx).entry(entry_ix)
- && let Some(editor) = entry.editor_for_diff(&diff)
+ && let Some(editor) = entry.editor_for_diff(diff)
{
editor.clone().into_any_element()
} else {
@@ -1783,7 +1783,7 @@ impl AcpThreadView {
.entry_view_state
.read(cx)
.entry(entry_ix)
- .and_then(|entry| entry.terminal(&terminal));
+ .and_then(|entry| entry.terminal(terminal));
let show_output = self.terminal_expanded && terminal_view.is_some();
v_flex()
@@ -2420,7 +2420,7 @@ impl AcpThreadView {
.buffer_font(cx)
});
- let file_icon = FileIcons::get_icon(&path, cx)
+ let file_icon = FileIcons::get_icon(path, cx)
.map(Icon::from_path)
.map(|icon| icon.color(Color::Muted).size(IconSize::Small))
.unwrap_or_else(|| {
@@ -3453,7 +3453,7 @@ impl Render for AcpThreadView {
configuration_view,
..
} => self.render_auth_required_state(
- &connection,
+ connection,
description.as_ref(),
configuration_view.as_ref(),
window,
@@ -1044,12 +1044,12 @@ impl ActiveThread {
);
}
ThreadEvent::StreamedAssistantText(message_id, text) => {
- if let Some(rendered_message) = self.rendered_messages_by_id.get_mut(&message_id) {
+ if let Some(rendered_message) = self.rendered_messages_by_id.get_mut(message_id) {
rendered_message.append_text(text, cx);
}
}
ThreadEvent::StreamedAssistantThinking(message_id, text) => {
- if let Some(rendered_message) = self.rendered_messages_by_id.get_mut(&message_id) {
+ if let Some(rendered_message) = self.rendered_messages_by_id.get_mut(message_id) {
rendered_message.append_thinking(text, cx);
}
}
@@ -2473,7 +2473,7 @@ impl ActiveThread {
message_id,
index,
content.clone(),
- &scroll_handle,
+ scroll_handle,
Some(index) == pending_thinking_segment_index,
window,
cx,
@@ -207,7 +207,7 @@ impl AgentDiffPane {
),
match &thread {
AgentDiffThread::Native(thread) => {
- Some(cx.subscribe(&thread, |this, _thread, event, cx| {
+ Some(cx.subscribe(thread, |this, _thread, event, cx| {
this.handle_thread_event(event, cx)
}))
}
@@ -398,7 +398,7 @@ fn keep_edits_in_selection(
.disjoint_anchor_ranges()
.collect::<Vec<_>>();
- keep_edits_in_ranges(editor, buffer_snapshot, &thread, ranges, window, cx)
+ keep_edits_in_ranges(editor, buffer_snapshot, thread, ranges, window, cx)
}
fn reject_edits_in_selection(
@@ -412,7 +412,7 @@ fn reject_edits_in_selection(
.selections
.disjoint_anchor_ranges()
.collect::<Vec<_>>();
- reject_edits_in_ranges(editor, buffer_snapshot, &thread, ranges, window, cx)
+ reject_edits_in_ranges(editor, buffer_snapshot, thread, ranges, window, cx)
}
fn keep_edits_in_ranges(
@@ -1001,7 +1001,7 @@ impl AgentDiffToolbar {
return;
};
- *state = agent_diff.read(cx).editor_state(&editor);
+ *state = agent_diff.read(cx).editor_state(editor);
self.update_location(cx);
cx.notify();
}
@@ -1343,13 +1343,13 @@ impl AgentDiff {
});
let thread_subscription = match &thread {
- AgentDiffThread::Native(thread) => cx.subscribe_in(&thread, window, {
+ AgentDiffThread::Native(thread) => cx.subscribe_in(thread, window, {
let workspace = workspace.clone();
move |this, _thread, event, window, cx| {
this.handle_native_thread_event(&workspace, event, window, cx)
}
}),
- AgentDiffThread::AcpThread(thread) => cx.subscribe_in(&thread, window, {
+ AgentDiffThread::AcpThread(thread) => cx.subscribe_in(thread, window, {
let workspace = workspace.clone();
move |this, thread, event, window, cx| {
this.handle_acp_thread_event(&workspace, thread, event, window, cx)
@@ -1357,11 +1357,11 @@ impl AgentDiff {
}),
};
- if let Some(workspace_thread) = self.workspace_threads.get_mut(&workspace) {
+ if let Some(workspace_thread) = self.workspace_threads.get_mut(workspace) {
// replace thread and action log subscription, but keep editors
workspace_thread.thread = thread.downgrade();
workspace_thread._thread_subscriptions = (action_log_subscription, thread_subscription);
- self.update_reviewing_editors(&workspace, window, cx);
+ self.update_reviewing_editors(workspace, window, cx);
return;
}
@@ -1677,7 +1677,7 @@ impl AgentDiff {
editor.register_addon(EditorAgentDiffAddon);
});
} else {
- unaffected.remove(&weak_editor);
+ unaffected.remove(weak_editor);
}
if new_state == EditorState::Reviewing && previous_state != Some(new_state) {
@@ -1730,7 +1730,7 @@ impl AgentDiff {
fn editor_state(&self, editor: &WeakEntity<Editor>) -> EditorState {
self.reviewing_editors
- .get(&editor)
+ .get(editor)
.cloned()
.unwrap_or(EditorState::Idle)
}
@@ -2923,7 +2923,7 @@ impl AgentPanel {
.style(ButtonStyle::Tinted(ui::TintColor::Warning))
.label_size(LabelSize::Small)
.key_binding(
- KeyBinding::for_action_in(&OpenSettings, &focus_handle, window, cx)
+ KeyBinding::for_action_in(&OpenSettings, focus_handle, window, cx)
.map(|kb| kb.size(rems_from_px(12.))),
)
.on_click(|_event, window, cx| {
@@ -3329,7 +3329,7 @@ impl AgentPanel {
.paths()
.into_iter()
.map(|path| {
- Workspace::project_path_for_path(this.project.clone(), &path, false, cx)
+ Workspace::project_path_for_path(this.project.clone(), path, false, cx)
})
.collect::<Vec<_>>();
cx.spawn_in(window, async move |this, cx| {
@@ -3599,7 +3599,7 @@ impl rules_library::InlineAssistDelegate for PromptLibraryInlineAssist {
let text_thread_store = None;
let context_store = cx.new(|_| ContextStore::new(project.clone(), None));
assistant.assist(
- &prompt_editor,
+ prompt_editor,
self.workspace.clone(),
context_store,
project,
@@ -388,7 +388,7 @@ impl CodegenAlternative {
} else {
let request = self.build_request(&model, user_prompt, cx)?;
cx.spawn(async move |_, cx| {
- Ok(model.stream_completion_text(request.await, &cx).await?)
+ Ok(model.stream_completion_text(request.await, cx).await?)
})
.boxed_local()
};
@@ -447,7 +447,7 @@ impl CodegenAlternative {
}
});
- let temperature = AgentSettings::temperature_for_model(&model, cx);
+ let temperature = AgentSettings::temperature_for_model(model, cx);
Ok(cx.spawn(async move |_cx| {
let mut request_message = LanguageModelRequestMessage {
@@ -1028,7 +1028,7 @@ where
chunk.push('\n');
}
- chunk.push_str(&line);
+ chunk.push_str(line);
}
consumed += line.len();
@@ -728,11 +728,11 @@ fn build_code_label_for_full_path(file_name: &str, directory: Option<&str>, cx:
let comment_id = cx.theme().syntax().highlight_id("comment").map(HighlightId);
let mut label = CodeLabel::default();
- label.push_str(&file_name, None);
+ label.push_str(file_name, None);
label.push_str(" ", None);
if let Some(directory) = directory {
- label.push_str(&directory, comment_id);
+ label.push_str(directory, comment_id);
}
label.filter_range = 0..label.text().len();
@@ -315,7 +315,7 @@ pub fn render_file_context_entry(
context_store: WeakEntity<ContextStore>,
cx: &App,
) -> Stateful<Div> {
- let (file_name, directory) = extract_file_name_and_directory(&path, path_prefix);
+ let (file_name, directory) = extract_file_name_and_directory(path, path_prefix);
let added = context_store.upgrade().and_then(|context_store| {
let project_path = ProjectPath {
@@ -334,7 +334,7 @@ pub fn render_file_context_entry(
let file_icon = if is_directory {
FileIcons::get_folder_icon(false, cx)
} else {
- FileIcons::get_icon(&path, cx)
+ FileIcons::get_icon(path, cx)
}
.map(Icon::from_path)
.unwrap_or_else(|| Icon::new(IconName::File));
@@ -289,7 +289,7 @@ pub(crate) fn search_symbols(
.iter()
.enumerate()
.map(|(id, symbol)| {
- StringMatchCandidate::new(id, &symbol.label.filter_text())
+ StringMatchCandidate::new(id, symbol.label.filter_text())
})
.partition(|candidate| {
project
@@ -167,7 +167,7 @@ impl PickerDelegate for ThreadContextPickerDelegate {
return;
};
let open_thread_task =
- thread_store.update(cx, |this, cx| this.open_thread(&id, window, cx));
+ thread_store.update(cx, |this, cx| this.open_thread(id, window, cx));
cx.spawn(async move |this, cx| {
let thread = open_thread_task.await?;
@@ -236,7 +236,7 @@ pub fn render_thread_context_entry(
let is_added = match entry {
ThreadContextEntry::Thread { id, .. } => context_store
.upgrade()
- .map_or(false, |ctx_store| ctx_store.read(cx).includes_thread(&id)),
+ .map_or(false, |ctx_store| ctx_store.read(cx).includes_thread(id)),
ThreadContextEntry::Context { path, .. } => {
context_store.upgrade().map_or(false, |ctx_store| {
ctx_store.read(cx).includes_text_thread(path)
@@ -338,7 +338,7 @@ pub(crate) fn search_threads(
let candidates = threads
.iter()
.enumerate()
- .map(|(id, (_, thread))| StringMatchCandidate::new(id, &thread.title()))
+ .map(|(id, (_, thread))| StringMatchCandidate::new(id, thread.title()))
.collect::<Vec<_>>();
let matches = fuzzy::match_strings(
&candidates,
@@ -145,7 +145,7 @@ impl ContextStrip {
}
let file_name = active_buffer.file()?.file_name(cx);
- let icon_path = FileIcons::get_icon(&Path::new(&file_name), cx);
+ let icon_path = FileIcons::get_icon(Path::new(&file_name), cx);
Some(SuggestedContext::File {
name: file_name.to_string_lossy().into_owned().into(),
buffer: active_buffer_entity.downgrade(),
@@ -377,7 +377,7 @@ impl ContextStrip {
fn add_suggested_context(&mut self, suggested: &SuggestedContext, cx: &mut Context<Self>) {
self.context_store.update(cx, |context_store, cx| {
- context_store.add_suggested_context(&suggested, cx)
+ context_store.add_suggested_context(suggested, cx)
});
cx.notify();
}
@@ -526,9 +526,9 @@ impl InlineAssistant {
if assist_to_focus.is_none() {
let focus_assist = if newest_selection.reversed {
- range.start.to_point(&snapshot) == newest_selection.start
+ range.start.to_point(snapshot) == newest_selection.start
} else {
- range.end.to_point(&snapshot) == newest_selection.end
+ range.end.to_point(snapshot) == newest_selection.end
};
if focus_assist {
assist_to_focus = Some(assist_id);
@@ -550,7 +550,7 @@ impl InlineAssistant {
let editor_assists = self
.assists_by_editor
.entry(editor.downgrade())
- .or_insert_with(|| EditorInlineAssists::new(&editor, window, cx));
+ .or_insert_with(|| EditorInlineAssists::new(editor, window, cx));
let mut assist_group = InlineAssistGroup::new();
for (assist_id, range, prompt_editor, prompt_block_id, end_block_id) in assists {
let codegen = prompt_editor.read(cx).codegen().clone();
@@ -649,7 +649,7 @@ impl InlineAssistant {
let editor_assists = self
.assists_by_editor
.entry(editor.downgrade())
- .or_insert_with(|| EditorInlineAssists::new(&editor, window, cx));
+ .or_insert_with(|| EditorInlineAssists::new(editor, window, cx));
let mut assist_group = InlineAssistGroup::new();
self.assists.insert(
@@ -75,7 +75,7 @@ impl<T: 'static> Render for PromptEditor<T> {
let codegen = codegen.read(cx);
if codegen.alternative_count(cx) > 1 {
- buttons.push(self.render_cycle_controls(&codegen, cx));
+ buttons.push(self.render_cycle_controls(codegen, cx));
}
let editor_margins = editor_margins.lock();
@@ -296,7 +296,7 @@ impl ModelMatcher {
pub fn fuzzy_search(&self, query: &str) -> Vec<ModelInfo> {
let mut matches = self.bg_executor.block(match_strings(
&self.candidates,
- &query,
+ query,
false,
true,
100,
@@ -1166,7 +1166,7 @@ impl MessageEditor {
.buffer_font(cx)
});
- let file_icon = FileIcons::get_icon(&path, cx)
+ let file_icon = FileIcons::get_icon(path, cx)
.map(Icon::from_path)
.map(|icon| icon.color(Color::Muted).size(IconSize::Small))
.unwrap_or_else(|| {
@@ -1559,9 +1559,8 @@ impl ContextCreasesAddon {
cx: &mut Context<Editor>,
) {
self.creases.entry(key).or_default().extend(creases);
- self._subscription = Some(cx.subscribe(
- &context_store,
- |editor, _, event, cx| match event {
+ self._subscription = Some(
+ cx.subscribe(context_store, |editor, _, event, cx| match event {
ContextStoreEvent::ContextRemoved(key) => {
let Some(this) = editor.addon_mut::<Self>() else {
return;
@@ -1581,8 +1580,8 @@ impl ContextCreasesAddon {
editor.edit(ranges.into_iter().zip(replacement_texts), cx);
cx.notify();
}
- },
- ))
+ }),
+ )
}
pub fn into_inner(self) -> HashMap<AgentContextKey, Vec<(CreaseId, SharedString)>> {
@@ -214,7 +214,7 @@ impl PickerDelegate for SlashCommandDelegate {
let mut label = format!("{}", info.name);
if let Some(args) = info.args.as_ref().filter(|_| selected)
{
- label.push_str(&args);
+ label.push_str(args);
}
Label::new(label)
.single_line()
@@ -48,7 +48,7 @@ impl TerminalCodegen {
let prompt = prompt_task.await;
let model_telemetry_id = model.telemetry_id();
let model_provider_id = model.provider_id();
- let response = model.stream_completion_text(prompt, &cx).await;
+ let response = model.stream_completion_text(prompt, cx).await;
let generate = async {
let message_id = response
.as_ref()
@@ -353,7 +353,7 @@ impl AddedContext {
name,
parent,
tooltip: Some(full_path_string),
- icon_path: FileIcons::get_icon(&full_path, cx),
+ icon_path: FileIcons::get_icon(full_path, cx),
status: ContextStatus::Ready,
render_hover: None,
handle: AgentContextHandle::File(handle),
@@ -615,7 +615,7 @@ impl AddedContext {
let full_path_string: SharedString = full_path.to_string_lossy().into_owned().into();
let (name, parent) =
extract_file_name_and_directory_from_full_path(full_path, &full_path_string);
- let icon_path = FileIcons::get_icon(&full_path, cx);
+ let icon_path = FileIcons::get_icon(full_path, cx);
(name, parent, icon_path)
} else {
("Image".into(), None, None)
@@ -706,7 +706,7 @@ impl ContextFileExcerpt {
.and_then(|p| p.file_name())
.map(|n| n.to_string_lossy().into_owned().into());
- let icon_path = FileIcons::get_icon(&full_path, cx);
+ let icon_path = FileIcons::get_icon(full_path, cx);
ContextFileExcerpt {
file_name_and_range: file_name_and_range.into(),
@@ -592,7 +592,7 @@ impl MessageMetadata {
pub fn is_cache_valid(&self, buffer: &BufferSnapshot, range: &Range<usize>) -> bool {
let result = match &self.cache {
Some(MessageCacheMetadata { cached_at, .. }) => !buffer.has_edits_since_in_range(
- &cached_at,
+ cached_at,
Range {
start: buffer.anchor_at(range.start, Bias::Right),
end: buffer.anchor_at(range.end, Bias::Left),
@@ -1413,7 +1413,7 @@ impl AssistantContext {
}
let request = {
- let mut req = self.to_completion_request(Some(&model), cx);
+ let mut req = self.to_completion_request(Some(model), cx);
// Skip the last message because it's likely to change and
// therefore would be a waste to cache.
req.messages.pop();
@@ -1428,7 +1428,7 @@ impl AssistantContext {
let model = Arc::clone(model);
self.pending_cache_warming_task = cx.spawn(async move |this, cx| {
async move {
- match model.stream_completion(request, &cx).await {
+ match model.stream_completion(request, cx).await {
Ok(mut stream) => {
stream.next().await;
log::info!("Cache warming completed successfully");
@@ -1661,12 +1661,12 @@ impl AssistantContext {
) -> Range<usize> {
let buffer = self.buffer.read(cx);
let start_ix = match all_annotations
- .binary_search_by(|probe| probe.range().end.cmp(&range.start, &buffer))
+ .binary_search_by(|probe| probe.range().end.cmp(&range.start, buffer))
{
Ok(ix) | Err(ix) => ix,
};
let end_ix = match all_annotations
- .binary_search_by(|probe| probe.range().start.cmp(&range.end, &buffer))
+ .binary_search_by(|probe| probe.range().start.cmp(&range.end, buffer))
{
Ok(ix) => ix + 1,
Err(ix) => ix,
@@ -2045,7 +2045,7 @@ impl AssistantContext {
let task = cx.spawn({
async move |this, cx| {
- let stream = model.stream_completion(request, &cx);
+ let stream = model.stream_completion(request, cx);
let assistant_message_id = assistant_message.id;
let mut response_latency = None;
let stream_completion = async {
@@ -2708,7 +2708,7 @@ impl AssistantContext {
self.summary_task = cx.spawn(async move |this, cx| {
let result = async {
- let stream = model.model.stream_completion_text(request, &cx);
+ let stream = model.model.stream_completion_text(request, cx);
let mut messages = stream.await?;
let mut replaced = !replace_old;
@@ -2927,7 +2927,7 @@ impl AssistantContext {
if let Some(old_path) = old_path.as_ref() {
if new_path.as_path() != old_path.as_ref() {
fs.rename(
- &old_path,
+ old_path,
&new_path,
RenameOptions {
overwrite: true,
@@ -1300,7 +1300,7 @@ fn test_summarize_error(
context.assist(cx);
});
- simulate_successful_response(&model, cx);
+ simulate_successful_response(model, cx);
context.read_with(cx, |context, _| {
assert!(!context.summary().content().unwrap().done);
@@ -44,7 +44,7 @@ impl SlashCommand for ContextServerSlashCommand {
parts.push(arg.name.as_str());
}
}
- create_label_for_command(&parts[0], &parts[1..], cx)
+ create_label_for_command(parts[0], &parts[1..], cx)
}
fn description(&self) -> String {
@@ -249,7 +249,7 @@ fn collect_diagnostics(
let worktree = worktree.read(cx);
let worktree_root_path = Path::new(worktree.root_name());
let relative_path = path.strip_prefix(worktree_root_path).ok()?;
- worktree.absolutize(&relative_path).ok()
+ worktree.absolutize(relative_path).ok()
})
})
.is_some()
@@ -365,7 +365,7 @@ pub fn collect_buffer_diagnostics(
) {
for (_, group) in snapshot.diagnostic_groups(None) {
let entry = &group.entries[group.primary_ix];
- collect_diagnostic(output, entry, &snapshot, include_warnings)
+ collect_diagnostic(output, entry, snapshot, include_warnings)
}
}
@@ -396,7 +396,7 @@ fn collect_diagnostic(
let start_row = range.start.row.saturating_sub(EXCERPT_EXPANSION_SIZE);
let end_row = (range.end.row + EXCERPT_EXPANSION_SIZE).min(snapshot.max_point().row) + 1;
let excerpt_range =
- Point::new(start_row, 0).to_offset(&snapshot)..Point::new(end_row, 0).to_offset(&snapshot);
+ Point::new(start_row, 0).to_offset(snapshot)..Point::new(end_row, 0).to_offset(snapshot);
output.text.push_str("```");
if let Some(language_name) = snapshot.language().map(|l| l.code_fence_block_name()) {
@@ -536,7 +536,7 @@ fn resolve_path(
let parent_entry = parent_project_path
.as_ref()
- .and_then(|path| project.entry_for_path(&path, cx))
+ .and_then(|path| project.entry_for_path(path, cx))
.context("Can't create file: parent directory doesn't exist")?;
anyhow::ensure!(
@@ -723,13 +723,13 @@ impl EditFileToolCard {
let buffer = buffer.read(cx);
let diff = diff.read(cx);
let mut ranges = diff
- .hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &buffer, cx)
- .map(|diff_hunk| diff_hunk.buffer_range.to_point(&buffer))
+ .hunks_intersecting_range(Anchor::MIN..Anchor::MAX, buffer, cx)
+ .map(|diff_hunk| diff_hunk.buffer_range.to_point(buffer))
.collect::<Vec<_>>();
ranges.extend(
self.revealed_ranges
.iter()
- .map(|range| range.to_point(&buffer)),
+ .map(|range| range.to_point(buffer)),
);
ranges.sort_unstable_by_key(|range| (range.start, Reverse(range.end)));
@@ -894,7 +894,7 @@ mod tests {
})
.await;
let results = result.unwrap();
- let paths = extract_paths_from_results(&results.content.as_str().unwrap());
+ let paths = extract_paths_from_results(results.content.as_str().unwrap());
assert!(
paths.is_empty(),
"grep_tool should not find files outside the project worktree"
@@ -920,7 +920,7 @@ mod tests {
})
.await;
let results = result.unwrap();
- let paths = extract_paths_from_results(&results.content.as_str().unwrap());
+ let paths = extract_paths_from_results(results.content.as_str().unwrap());
assert!(
paths.iter().any(|p| p.contains("allowed_file.rs")),
"grep_tool should be able to search files inside worktrees"
@@ -946,7 +946,7 @@ mod tests {
})
.await;
let results = result.unwrap();
- let paths = extract_paths_from_results(&results.content.as_str().unwrap());
+ let paths = extract_paths_from_results(results.content.as_str().unwrap());
assert!(
paths.is_empty(),
"grep_tool should not search files in .secretdir (file_scan_exclusions)"
@@ -971,7 +971,7 @@ mod tests {
})
.await;
let results = result.unwrap();
- let paths = extract_paths_from_results(&results.content.as_str().unwrap());
+ let paths = extract_paths_from_results(results.content.as_str().unwrap());
assert!(
paths.is_empty(),
"grep_tool should not search .mymetadata files (file_scan_exclusions)"
@@ -997,7 +997,7 @@ mod tests {
})
.await;
let results = result.unwrap();
- let paths = extract_paths_from_results(&results.content.as_str().unwrap());
+ let paths = extract_paths_from_results(results.content.as_str().unwrap());
assert!(
paths.is_empty(),
"grep_tool should not search .mysecrets (private_files)"
@@ -1022,7 +1022,7 @@ mod tests {
})
.await;
let results = result.unwrap();
- let paths = extract_paths_from_results(&results.content.as_str().unwrap());
+ let paths = extract_paths_from_results(results.content.as_str().unwrap());
assert!(
paths.is_empty(),
"grep_tool should not search .privatekey files (private_files)"
@@ -1047,7 +1047,7 @@ mod tests {
})
.await;
let results = result.unwrap();
- let paths = extract_paths_from_results(&results.content.as_str().unwrap());
+ let paths = extract_paths_from_results(results.content.as_str().unwrap());
assert!(
paths.is_empty(),
"grep_tool should not search .mysensitive files (private_files)"
@@ -1073,7 +1073,7 @@ mod tests {
})
.await;
let results = result.unwrap();
- let paths = extract_paths_from_results(&results.content.as_str().unwrap());
+ let paths = extract_paths_from_results(results.content.as_str().unwrap());
assert!(
paths.iter().any(|p| p.contains("normal_file.rs")),
"Should be able to search normal files"
@@ -1100,7 +1100,7 @@ mod tests {
})
.await;
let results = result.unwrap();
- let paths = extract_paths_from_results(&results.content.as_str().unwrap());
+ let paths = extract_paths_from_results(results.content.as_str().unwrap());
assert!(
paths.is_empty(),
"grep_tool should not allow escaping project boundaries with relative paths"
@@ -1206,7 +1206,7 @@ mod tests {
.unwrap();
let content = result.content.as_str().unwrap();
- let paths = extract_paths_from_results(&content);
+ let paths = extract_paths_from_results(content);
// Should find matches in non-private files
assert!(
@@ -1271,7 +1271,7 @@ mod tests {
.unwrap();
let content = result.content.as_str().unwrap();
- let paths = extract_paths_from_results(&content);
+ let paths = extract_paths_from_results(content);
// Should only find matches in worktree1 *.rs files (excluding private ones)
assert!(
@@ -81,7 +81,7 @@ fn fit_patch_to_size(patch: &str, max_size: usize) -> String {
// Compression level 1: remove context lines in diff bodies, but
// leave the counts and positions of inserted/deleted lines
let mut current_size = patch.len();
- let mut file_patches = split_patch(&patch);
+ let mut file_patches = split_patch(patch);
file_patches.sort_by_key(|patch| patch.len());
let compressed_patches = file_patches
.iter()
@@ -105,7 +105,7 @@ impl Tool for TerminalTool {
let first_line = lines.next().unwrap_or_default();
let remaining_line_count = lines.count();
match remaining_line_count {
- 0 => MarkdownInlineCode(&first_line).to_string(),
+ 0 => MarkdownInlineCode(first_line).to_string(),
1 => MarkdownInlineCode(&format!(
"{} - {} more line",
first_line, remaining_line_count
@@ -231,7 +231,7 @@ fn apply_dirty_filename_style(
let highlight = vec![(filename_position..text.len(), highlight_style)];
Some(
StyledText::new(text)
- .with_default_highlights(&text_style, highlight)
+ .with_default_highlights(text_style, highlight)
.into_any(),
)
}
@@ -928,7 +928,7 @@ impl BufferDiff {
let new_index_text = self.inner.stage_or_unstage_hunks_impl(
&self.secondary_diff.as_ref()?.read(cx).inner,
stage,
- &hunks,
+ hunks,
buffer,
file_exists,
);
@@ -952,12 +952,12 @@ impl BufferDiff {
cx: &App,
) -> Option<Range<Anchor>> {
let start = self
- .hunks_intersecting_range(range.clone(), &buffer, cx)
+ .hunks_intersecting_range(range.clone(), buffer, cx)
.next()?
.buffer_range
.start;
let end = self
- .hunks_intersecting_range_rev(range.clone(), &buffer)
+ .hunks_intersecting_range_rev(range.clone(), buffer)
.next()?
.buffer_range
.end;
@@ -1031,18 +1031,18 @@ impl BufferDiff {
&& state.base_text.syntax_update_count()
== new_state.base_text.syntax_update_count() =>
{
- (false, new_state.compare(&state, buffer))
+ (false, new_state.compare(state, buffer))
}
_ => (true, Some(text::Anchor::MIN..text::Anchor::MAX)),
};
if let Some(secondary_changed_range) = secondary_diff_change {
if let Some(secondary_hunk_range) =
- self.range_to_hunk_range(secondary_changed_range, &buffer, cx)
+ self.range_to_hunk_range(secondary_changed_range, buffer, cx)
{
if let Some(range) = &mut changed_range {
- range.start = secondary_hunk_range.start.min(&range.start, &buffer);
- range.end = secondary_hunk_range.end.max(&range.end, &buffer);
+ range.start = secondary_hunk_range.start.min(&range.start, buffer);
+ range.end = secondary_hunk_range.end.max(&range.end, buffer);
} else {
changed_range = Some(secondary_hunk_range);
}
@@ -1057,8 +1057,8 @@ impl BufferDiff {
if let Some((first, last)) = state.pending_hunks.first().zip(state.pending_hunks.last())
{
if let Some(range) = &mut changed_range {
- range.start = range.start.min(&first.buffer_range.start, &buffer);
- range.end = range.end.max(&last.buffer_range.end, &buffer);
+ range.start = range.start.min(&first.buffer_range.start, buffer);
+ range.end = range.end.max(&last.buffer_range.end, buffer);
} else {
changed_range = Some(first.buffer_range.start..last.buffer_range.end);
}
@@ -1797,7 +1797,7 @@ mod tests {
uncommitted_diff.update(cx, |diff, cx| {
let hunks = diff
- .hunks_intersecting_range(hunk_range.clone(), &buffer, &cx)
+ .hunks_intersecting_range(hunk_range.clone(), &buffer, cx)
.collect::<Vec<_>>();
for hunk in &hunks {
assert_ne!(
@@ -1812,7 +1812,7 @@ mod tests {
.to_string();
let hunks = diff
- .hunks_intersecting_range(hunk_range.clone(), &buffer, &cx)
+ .hunks_intersecting_range(hunk_range.clone(), &buffer, cx)
.collect::<Vec<_>>();
for hunk in &hunks {
assert_eq!(
@@ -1870,7 +1870,7 @@ mod tests {
.to_string();
assert_eq!(new_index_text, buffer_text);
- let hunk = diff.hunks(&buffer, &cx).next().unwrap();
+ let hunk = diff.hunks(&buffer, cx).next().unwrap();
assert_eq!(
hunk.secondary_status,
DiffHunkSecondaryStatus::SecondaryHunkRemovalPending
@@ -1882,7 +1882,7 @@ mod tests {
.to_string();
assert_eq!(index_text, head_text);
- let hunk = diff.hunks(&buffer, &cx).next().unwrap();
+ let hunk = diff.hunks(&buffer, cx).next().unwrap();
// optimistically unstaged (fine, could also be HasSecondaryHunk)
assert_eq!(
hunk.secondary_status,
@@ -518,11 +518,11 @@ mod linux {
) -> Result<(), std::io::Error> {
for _ in 0..100 {
thread::sleep(Duration::from_millis(10));
- if sock.connect_addr(&sock_addr).is_ok() {
+ if sock.connect_addr(sock_addr).is_ok() {
return Ok(());
}
}
- sock.connect_addr(&sock_addr)
+ sock.connect_addr(sock_addr)
}
}
}
@@ -162,7 +162,7 @@ pub fn init(client: &Arc<Client>, cx: &mut App) {
let client = client.clone();
move |_: &SignIn, cx| {
if let Some(client) = client.upgrade() {
- cx.spawn(async move |cx| client.sign_in_with_optional_connect(true, &cx).await)
+ cx.spawn(async move |cx| client.sign_in_with_optional_connect(true, cx).await)
.detach_and_log_err(cx);
}
}
@@ -173,7 +173,7 @@ pub fn init(client: &Arc<Client>, cx: &mut App) {
move |_: &SignOut, cx| {
if let Some(client) = client.upgrade() {
cx.spawn(async move |cx| {
- client.sign_out(&cx).await;
+ client.sign_out(cx).await;
})
.detach();
}
@@ -185,7 +185,7 @@ pub fn init(client: &Arc<Client>, cx: &mut App) {
move |_: &Reconnect, cx| {
if let Some(client) = client.upgrade() {
cx.spawn(async move |cx| {
- client.reconnect(&cx);
+ client.reconnect(cx);
})
.detach();
}
@@ -677,7 +677,7 @@ impl Client {
let mut delay = INITIAL_RECONNECTION_DELAY;
loop {
- match client.connect(true, &cx).await {
+ match client.connect(true, cx).await {
ConnectionResult::Timeout => {
log::error!("client connect attempt timed out")
}
@@ -701,7 +701,7 @@ impl Client {
Status::ReconnectionError {
next_reconnection: Instant::now() + delay,
},
- &cx,
+ cx,
);
let jitter =
Duration::from_millis(rng.gen_range(0..delay.as_millis() as u64));
@@ -1151,7 +1151,7 @@ impl Client {
let this = self.clone();
async move |cx| {
while let Some(message) = incoming.next().await {
- this.handle_message(message, &cx);
+ this.handle_message(message, cx);
// Don't starve the main thread when receiving lots of messages at once.
smol::future::yield_now().await;
}
@@ -1169,12 +1169,12 @@ impl Client {
peer_id,
})
{
- this.set_status(Status::SignedOut, &cx);
+ this.set_status(Status::SignedOut, cx);
}
}
Err(err) => {
log::error!("connection error: {:?}", err);
- this.set_status(Status::ConnectionLost, &cx);
+ this.set_status(Status::ConnectionLost, cx);
}
}
})
@@ -943,21 +943,21 @@ impl Database {
let current_merge_conflicts = db_repository_entry
.current_merge_conflicts
.as_ref()
- .map(|conflicts| serde_json::from_str(&conflicts))
+ .map(|conflicts| serde_json::from_str(conflicts))
.transpose()?
.unwrap_or_default();
let branch_summary = db_repository_entry
.branch_summary
.as_ref()
- .map(|branch_summary| serde_json::from_str(&branch_summary))
+ .map(|branch_summary| serde_json::from_str(branch_summary))
.transpose()?
.unwrap_or_default();
let head_commit_details = db_repository_entry
.head_commit_details
.as_ref()
- .map(|head_commit_details| serde_json::from_str(&head_commit_details))
+ .map(|head_commit_details| serde_json::from_str(head_commit_details))
.transpose()?
.unwrap_or_default();
@@ -746,21 +746,21 @@ impl Database {
let current_merge_conflicts = db_repository
.current_merge_conflicts
.as_ref()
- .map(|conflicts| serde_json::from_str(&conflicts))
+ .map(|conflicts| serde_json::from_str(conflicts))
.transpose()?
.unwrap_or_default();
let branch_summary = db_repository
.branch_summary
.as_ref()
- .map(|branch_summary| serde_json::from_str(&branch_summary))
+ .map(|branch_summary| serde_json::from_str(branch_summary))
.transpose()?
.unwrap_or_default();
let head_commit_details = db_repository
.head_commit_details
.as_ref()
- .map(|head_commit_details| serde_json::from_str(&head_commit_details))
+ .map(|head_commit_details| serde_json::from_str(head_commit_details))
.transpose()?
.unwrap_or_default();
@@ -245,7 +245,7 @@ impl MessageEditor {
if !candidates.is_empty() {
return cx.spawn(async move |_, cx| {
let completion_response = Self::completions_for_candidates(
- &cx,
+ cx,
query.as_str(),
&candidates,
start_anchor..end_anchor,
@@ -263,7 +263,7 @@ impl MessageEditor {
if !candidates.is_empty() {
return cx.spawn(async move |_, cx| {
let completion_response = Self::completions_for_candidates(
- &cx,
+ cx,
query.as_str(),
candidates,
start_anchor..end_anchor,
@@ -2317,7 +2317,7 @@ impl CollabPanel {
let client = this.client.clone();
cx.spawn_in(window, async move |_, cx| {
client
- .connect(true, &cx)
+ .connect(true, cx)
.await
.into_response()
.notify_async_err(cx);
@@ -643,7 +643,7 @@ impl Render for NotificationPanel {
let client = client.clone();
window
.spawn(cx, async move |cx| {
- match client.connect(true, &cx).await {
+ match client.connect(true, cx).await {
util::ConnectionResult::Timeout => {
log::error!("Connection timeout");
}
@@ -315,12 +315,12 @@ impl McpServer {
Self::send_err(
request_id,
format!("Tool not found: {}", params.name),
- &outgoing_tx,
+ outgoing_tx,
);
}
}
Err(err) => {
- Self::send_err(request_id, err.to_string(), &outgoing_tx);
+ Self::send_err(request_id, err.to_string(), outgoing_tx);
}
}
}
@@ -691,7 +691,7 @@ impl CallToolResponse {
let mut text = String::new();
for chunk in &self.content {
if let ToolResponseContent::Text { text: chunk } = chunk {
- text.push_str(&chunk)
+ text.push_str(chunk)
};
}
text
@@ -484,7 +484,7 @@ impl CopilotChat {
};
if this.oauth_token.is_some() {
- cx.spawn(async move |this, mut cx| Self::update_models(&this, &mut cx).await)
+ cx.spawn(async move |this, cx| Self::update_models(&this, cx).await)
.detach_and_log_err(cx);
}
@@ -863,7 +863,7 @@ mod tests {
"object": "list"
}"#;
- let schema: ModelSchema = serde_json::from_str(&json).unwrap();
+ let schema: ModelSchema = serde_json::from_str(json).unwrap();
assert_eq!(schema.data.len(), 2);
assert_eq!(schema.data[0].id, "gpt-4");
@@ -285,7 +285,7 @@ pub async fn download_adapter_from_github(
}
if !adapter_path.exists() {
- fs.create_dir(&adapter_path.as_path())
+ fs.create_dir(adapter_path.as_path())
.await
.context("Failed creating adapter path")?;
}
@@ -36,7 +36,7 @@ impl GoDebugAdapter {
delegate: &Arc<dyn DapDelegate>,
) -> Result<AdapterVersion> {
let release = latest_github_release(
- &"zed-industries/delve-shim-dap",
+ "zed-industries/delve-shim-dap",
true,
false,
delegate.http_client(),
@@ -514,7 +514,7 @@ impl DebugAdapter for JsDebugAdapter {
}
}
- self.get_installed_binary(delegate, &config, user_installed_path, user_args, cx)
+ self.get_installed_binary(delegate, config, user_installed_path, user_args, cx)
.await
}
@@ -717,7 +717,7 @@ impl DebugAdapter for PythonDebugAdapter {
local_path.display()
);
return self
- .get_installed_binary(delegate, &config, Some(local_path.clone()), user_args, None)
+ .get_installed_binary(delegate, config, Some(local_path.clone()), user_args, None)
.await;
}
@@ -754,7 +754,7 @@ impl DebugAdapter for PythonDebugAdapter {
return self
.get_installed_binary(
delegate,
- &config,
+ config,
None,
user_args,
Some(toolchain.path.to_string()),
@@ -762,7 +762,7 @@ impl DebugAdapter for PythonDebugAdapter {
.await;
}
- self.get_installed_binary(delegate, &config, None, user_args, None)
+ self.get_installed_binary(delegate, config, None, user_args, None)
.await
}
@@ -238,7 +238,7 @@ mod tests {
.unwrap();
let _bad_db = open_db::<BadDB>(
tempdir.path(),
- &release_channel::ReleaseChannel::Dev.dev_name(),
+ release_channel::ReleaseChannel::Dev.dev_name(),
)
.await;
}
@@ -279,7 +279,7 @@ mod tests {
{
let corrupt_db = open_db::<CorruptedDB>(
tempdir.path(),
- &release_channel::ReleaseChannel::Dev.dev_name(),
+ release_channel::ReleaseChannel::Dev.dev_name(),
)
.await;
assert!(corrupt_db.persistent());
@@ -287,7 +287,7 @@ mod tests {
let good_db = open_db::<GoodDB>(
tempdir.path(),
- &release_channel::ReleaseChannel::Dev.dev_name(),
+ release_channel::ReleaseChannel::Dev.dev_name(),
)
.await;
assert!(
@@ -334,7 +334,7 @@ mod tests {
// Setup the bad database
let corrupt_db = open_db::<CorruptedDB>(
tempdir.path(),
- &release_channel::ReleaseChannel::Dev.dev_name(),
+ release_channel::ReleaseChannel::Dev.dev_name(),
)
.await;
assert!(corrupt_db.persistent());
@@ -347,7 +347,7 @@ mod tests {
let guard = thread::spawn(move || {
let good_db = smol::block_on(open_db::<GoodDB>(
tmp_path.as_path(),
- &release_channel::ReleaseChannel::Dev.dev_name(),
+ release_channel::ReleaseChannel::Dev.dev_name(),
));
assert!(
good_db.select_row::<usize>("SELECT * FROM test2").unwrap()()
@@ -485,7 +485,7 @@ impl LogStore {
&mut self,
id: &LogStoreEntryIdentifier<'_>,
) -> Option<&Vec<SharedString>> {
- self.get_debug_adapter_state(&id)
+ self.get_debug_adapter_state(id)
.map(|state| &state.rpc_messages.initialization_sequence)
}
}
@@ -536,11 +536,11 @@ impl Render for DapLogToolbarItemView {
})
.unwrap_or_else(|| "No adapter selected".into()),
))
- .menu(move |mut window, cx| {
+ .menu(move |window, cx| {
let log_view = log_view.clone();
let menu_rows = menu_rows.clone();
let project = project.clone();
- ContextMenu::build(&mut window, cx, move |mut menu, window, _cx| {
+ ContextMenu::build(window, cx, move |mut menu, window, _cx| {
for row in menu_rows.into_iter() {
menu = menu.custom_row(move |_window, _cx| {
div()
@@ -1131,7 +1131,7 @@ impl LogStore {
project: &WeakEntity<Project>,
session_id: SessionId,
) -> Vec<SharedString> {
- self.projects.get(&project).map_or(vec![], |state| {
+ self.projects.get(project).map_or(vec![], |state| {
state
.debug_sessions
.get(&session_id)
@@ -693,7 +693,7 @@ impl DebugPanel {
)
.icon_size(IconSize::Small)
.on_click(window.listener_for(
- &running_state,
+ running_state,
|this, _, _window, cx| {
this.pause_thread(cx);
},
@@ -719,7 +719,7 @@ impl DebugPanel {
)
.icon_size(IconSize::Small)
.on_click(window.listener_for(
- &running_state,
+ running_state,
|this, _, _window, cx| this.continue_thread(cx),
))
.disabled(thread_status != ThreadStatus::Stopped)
@@ -742,7 +742,7 @@ impl DebugPanel {
IconButton::new("debug-step-over", IconName::ArrowRight)
.icon_size(IconSize::Small)
.on_click(window.listener_for(
- &running_state,
+ running_state,
|this, _, _window, cx| {
this.step_over(cx);
},
@@ -768,7 +768,7 @@ impl DebugPanel {
)
.icon_size(IconSize::Small)
.on_click(window.listener_for(
- &running_state,
+ running_state,
|this, _, _window, cx| {
this.step_in(cx);
},
@@ -791,7 +791,7 @@ impl DebugPanel {
IconButton::new("debug-step-out", IconName::ArrowUpRight)
.icon_size(IconSize::Small)
.on_click(window.listener_for(
- &running_state,
+ running_state,
|this, _, _window, cx| {
this.step_out(cx);
},
@@ -815,7 +815,7 @@ impl DebugPanel {
IconButton::new("debug-restart", IconName::RotateCcw)
.icon_size(IconSize::Small)
.on_click(window.listener_for(
- &running_state,
+ running_state,
|this, _, window, cx| {
this.rerun_session(window, cx);
},
@@ -837,7 +837,7 @@ impl DebugPanel {
IconButton::new("debug-stop", IconName::Power)
.icon_size(IconSize::Small)
.on_click(window.listener_for(
- &running_state,
+ running_state,
|this, _, _window, cx| {
if this.session().read(cx).is_building() {
this.session().update(cx, |session, cx| {
@@ -892,7 +892,7 @@ impl DebugPanel {
)
.icon_size(IconSize::Small)
.on_click(window.listener_for(
- &running_state,
+ running_state,
|this, _, _, cx| {
this.detach_client(cx);
},
@@ -1160,7 +1160,7 @@ impl DebugPanel {
workspace
.project()
.read(cx)
- .project_path_for_absolute_path(&path, cx)
+ .project_path_for_absolute_path(path, cx)
.context(
"Couldn't get project path for .zed/debug.json in active worktree",
)
@@ -413,7 +413,7 @@ impl NewProcessModal {
let Some(adapter) = self.debugger.as_ref() else {
return;
};
- let scenario = self.debug_scenario(&adapter, cx);
+ let scenario = self.debug_scenario(adapter, cx);
cx.spawn_in(window, async move |this, cx| {
let scenario = scenario.await.context("no scenario to save")?;
let worktree_id = task_contexts
@@ -659,12 +659,7 @@ impl Render for NewProcessModal {
this.mode = NewProcessMode::Attach;
if let Some(debugger) = this.debugger.as_ref() {
- Self::update_attach_picker(
- &this.attach_mode,
- &debugger,
- window,
- cx,
- );
+ Self::update_attach_picker(&this.attach_mode, debugger, window, cx);
}
this.mode_focus_handle(cx).focus(window);
cx.notify();
@@ -1083,7 +1078,7 @@ impl DebugDelegate {
.into_iter()
.map(|(scenario, context)| {
let (kind, scenario) =
- Self::get_scenario_kind(&languages, &dap_registry, scenario);
+ Self::get_scenario_kind(&languages, dap_registry, scenario);
(kind, scenario, Some(context))
})
.chain(
@@ -1100,7 +1095,7 @@ impl DebugDelegate {
.filter(|(_, scenario)| valid_adapters.contains(&scenario.adapter))
.map(|(kind, scenario)| {
let (language, scenario) =
- Self::get_scenario_kind(&languages, &dap_registry, scenario);
+ Self::get_scenario_kind(&languages, dap_registry, scenario);
(language.or(Some(kind)), scenario, None)
}),
)
@@ -341,7 +341,7 @@ impl SerializedPaneLayout {
pub(crate) fn in_order(&self) -> Vec<SerializedPaneLayout> {
let mut panes = vec![];
- Self::inner_in_order(&self, &mut panes);
+ Self::inner_in_order(self, &mut panes);
panes
}
@@ -102,7 +102,7 @@ impl Render for RunningState {
.find(|pane| pane.read(cx).is_zoomed());
let active = self.panes.panes().into_iter().next();
- let pane = if let Some(ref zoomed_pane) = zoomed_pane {
+ let pane = if let Some(zoomed_pane) = zoomed_pane {
zoomed_pane.update(cx, |pane, cx| pane.render(window, cx).into_any_element())
} else if let Some(active) = active {
self.panes
@@ -627,7 +627,7 @@ impl RunningState {
if s.starts_with("\"$ZED_") && s.ends_with('"') {
*s = s[1..s.len() - 1].to_string();
}
- if let Some(substituted) = substitute_variables_in_str(&s, context) {
+ if let Some(substituted) = substitute_variables_in_str(s, context) {
*s = substituted;
}
}
@@ -657,7 +657,7 @@ impl RunningState {
}
resolve_path(s);
- if let Some(substituted) = substitute_variables_in_str(&s, context) {
+ if let Some(substituted) = substitute_variables_in_str(s, context) {
*s = substituted;
}
}
@@ -954,7 +954,7 @@ impl RunningState {
inventory.read(cx).task_template_by_label(
buffer,
worktree_id,
- &label,
+ label,
cx,
)
})
@@ -1310,7 +1310,7 @@ impl RunningState {
let mut pane_item_status = IndexMap::from_iter(
DebuggerPaneItem::all()
.iter()
- .filter(|kind| kind.is_supported(&caps))
+ .filter(|kind| kind.is_supported(caps))
.map(|kind| (*kind, false)),
);
self.panes.panes().iter().for_each(|pane| {
@@ -1371,7 +1371,7 @@ impl RunningState {
this.serialize_layout(window, cx);
match event {
Event::Remove { .. } => {
- let _did_find_pane = this.panes.remove(&source_pane).is_ok();
+ let _did_find_pane = this.panes.remove(source_pane).is_ok();
debug_assert!(_did_find_pane);
cx.notify();
}
@@ -494,7 +494,7 @@ impl BreakpointList {
fn toggle_data_breakpoint(&mut self, id: &str, cx: &mut Context<Self>) {
if let Some(session) = &self.session {
session.update(cx, |this, cx| {
- this.toggle_data_breakpoint(&id, cx);
+ this.toggle_data_breakpoint(id, cx);
});
}
}
@@ -502,7 +502,7 @@ impl BreakpointList {
fn toggle_exception_breakpoint(&mut self, id: &str, cx: &mut Context<Self>) {
if let Some(session) = &self.session {
session.update(cx, |this, cx| {
- this.toggle_exception_breakpoint(&id, cx);
+ this.toggle_exception_breakpoint(id, cx);
});
cx.notify();
const EXCEPTION_SERIALIZATION_INTERVAL: Duration = Duration::from_secs(1);
@@ -697,7 +697,7 @@ impl ConsoleQueryBarCompletionProvider {
new_bytes: &[u8],
snapshot: &TextBufferSnapshot,
) -> Range<Anchor> {
- let buffer_offset = buffer_position.to_offset(&snapshot);
+ let buffer_offset = buffer_position.to_offset(snapshot);
let buffer_bytes = &buffer_text.as_bytes()[0..buffer_offset];
let mut prefix_len = 0;
@@ -977,7 +977,7 @@ mod tests {
&cx.buffer_text(),
snapshot.anchor_before(buffer_position),
replacement.as_bytes(),
- &snapshot,
+ snapshot,
);
cx.update_editor(|editor, _, cx| {
@@ -262,7 +262,7 @@ impl MemoryView {
cx: &mut Context<Self>,
) {
use parse_int::parse;
- let Ok(as_address) = parse::<u64>(&memory_reference) else {
+ let Ok(as_address) = parse::<u64>(memory_reference) else {
return;
};
let access_size = evaluate_name
@@ -931,7 +931,7 @@ impl Render for MemoryView {
v_flex()
.size_full()
.on_drag_move(cx.listener(|this, evt, _, _| {
- this.handle_memory_drag(&evt);
+ this.handle_memory_drag(evt);
}))
.child(self.render_memory(cx).size_full())
.children(self.open_context_menu.as_ref().map(|(menu, position, _)| {
@@ -1289,7 +1289,7 @@ impl VariableList {
}),
)
.child(self.render_variable_value(
- &entry,
+ entry,
&variable_color,
watcher.value.to_string(),
cx,
@@ -1494,7 +1494,7 @@ impl VariableList {
}),
)
.child(self.render_variable_value(
- &variable,
+ variable,
&variable_color,
dap.value.clone(),
cx,
@@ -139,7 +139,7 @@ async fn test_show_attach_modal_and_select_process(
workspace
.update(cx, |_, window, cx| {
let names =
- attach_modal.update(cx, |modal, cx| attach_modal::_process_names(&modal, cx));
+ attach_modal.update(cx, |modal, cx| attach_modal::_process_names(modal, cx));
// Initially all processes are visible.
assert_eq!(3, names.len());
attach_modal.update(cx, |this, cx| {
@@ -154,7 +154,7 @@ async fn test_show_attach_modal_and_select_process(
workspace
.update(cx, |_, _, cx| {
let names =
- attach_modal.update(cx, |modal, cx| attach_modal::_process_names(&modal, cx));
+ attach_modal.update(cx, |modal, cx| attach_modal::_process_names(modal, cx));
// Initially all processes are visible.
assert_eq!(2, names.len());
})
@@ -107,7 +107,7 @@ async fn test_debug_session_substitutes_variables_and_relativizes_paths(
let expected_other_field = if input_path.contains("$ZED_WORKTREE_ROOT") {
input_path
- .replace("$ZED_WORKTREE_ROOT", &path!("/test/worktree/path"))
+ .replace("$ZED_WORKTREE_ROOT", path!("/test/worktree/path"))
.to_owned()
} else {
input_path.to_string()
@@ -46,7 +46,7 @@ impl DiagnosticRenderer {
markdown.push_str(" (");
}
if let Some(source) = diagnostic.source.as_ref() {
- markdown.push_str(&Markdown::escape(&source));
+ markdown.push_str(&Markdown::escape(source));
}
if diagnostic.source.is_some() && diagnostic.code.is_some() {
markdown.push(' ');
@@ -306,7 +306,7 @@ impl DiagnosticBlock {
cx: &mut Context<Editor>,
) {
let snapshot = &editor.buffer().read(cx).snapshot(cx);
- let range = range.start.to_offset(&snapshot)..range.end.to_offset(&snapshot);
+ let range = range.start.to_offset(snapshot)..range.end.to_offset(snapshot);
editor.unfold_ranges(&[range.start..range.end], true, false, cx);
editor.change_selections(Default::default(), window, cx, |s| {
@@ -528,7 +528,7 @@ impl ProjectDiagnosticsEditor {
lsp::DiagnosticSeverity::ERROR
};
- cx.spawn_in(window, async move |this, mut cx| {
+ cx.spawn_in(window, async move |this, cx| {
let diagnostics = buffer_snapshot
.diagnostics_in_range::<_, text::Anchor>(
Point::zero()..buffer_snapshot.max_point(),
@@ -595,7 +595,7 @@ impl ProjectDiagnosticsEditor {
b.initial_range.clone(),
DEFAULT_MULTIBUFFER_CONTEXT,
buffer_snapshot.clone(),
- &mut cx,
+ cx,
)
.await;
let i = excerpt_ranges
@@ -129,7 +129,7 @@ fn handle_frontmatter(book: &mut Book, errors: &mut HashSet<PreprocessorError>)
let Some((name, value)) = line.split_once(':') else {
errors.insert(PreprocessorError::InvalidFrontmatterLine(format!(
"{}: {}",
- chapter_breadcrumbs(&chapter),
+ chapter_breadcrumbs(chapter),
line
)));
continue;
@@ -402,11 +402,11 @@ fn handle_postprocessing() -> Result<()> {
path: &'a std::path::PathBuf,
root: &'a std::path::PathBuf,
) -> &'a std::path::Path {
- &path.strip_prefix(&root).unwrap_or(&path)
+ path.strip_prefix(&root).unwrap_or(path)
}
fn extract_title_from_page(contents: &str, pretty_path: &std::path::Path) -> String {
let title_tag_contents = &title_regex()
- .captures(&contents)
+ .captures(contents)
.with_context(|| format!("Failed to find title in {:?}", pretty_path))
.expect("Page has <title> element")[1];
let title = title_tag_contents
@@ -104,6 +104,6 @@ pub fn apply_related_actions(editor: &Entity<Editor>, window: &mut Window, cx: &
.filter_map(|buffer| buffer.read(cx).language())
.any(|language| is_c_language(language))
{
- register_action(&editor, window, switch_source_header);
+ register_action(editor, window, switch_source_header);
}
}
@@ -317,7 +317,7 @@ async fn filter_and_sort_matches(
let candidates: Arc<[StringMatchCandidate]> = completions
.iter()
.enumerate()
- .map(|(id, completion)| StringMatchCandidate::new(id, &completion.label.filter_text()))
+ .map(|(id, completion)| StringMatchCandidate::new(id, completion.label.filter_text()))
.collect();
let cancel_flag = Arc::new(AtomicBool::new(false));
let background_executor = cx.executor();
@@ -331,5 +331,5 @@ async fn filter_and_sort_matches(
background_executor,
)
.await;
- CompletionsMenu::sort_string_matches(matches, Some(query), snippet_sort_order, &completions)
+ CompletionsMenu::sort_string_matches(matches, Some(query), snippet_sort_order, completions)
}
@@ -321,7 +321,7 @@ impl CompletionsMenu {
let match_candidates = choices
.iter()
.enumerate()
- .map(|(id, completion)| StringMatchCandidate::new(id, &completion))
+ .map(|(id, completion)| StringMatchCandidate::new(id, completion))
.collect();
let entries = choices
.iter()
@@ -77,7 +77,7 @@ fn create_highlight_endpoints(
let ranges = &text_highlights.1;
let start_ix = match ranges.binary_search_by(|probe| {
- let cmp = probe.end.cmp(&start, &buffer);
+ let cmp = probe.end.cmp(&start, buffer);
if cmp.is_gt() {
cmp::Ordering::Greater
} else {
@@ -88,18 +88,18 @@ fn create_highlight_endpoints(
};
for range in &ranges[start_ix..] {
- if range.start.cmp(&end, &buffer).is_ge() {
+ if range.start.cmp(&end, buffer).is_ge() {
break;
}
highlight_endpoints.push(HighlightEndpoint {
- offset: range.start.to_offset(&buffer),
+ offset: range.start.to_offset(buffer),
is_start: true,
tag,
style,
});
highlight_endpoints.push(HighlightEndpoint {
- offset: range.end.to_offset(&buffer),
+ offset: range.end.to_offset(buffer),
is_start: false,
tag,
style,
@@ -36,8 +36,8 @@ pub fn is_invisible(c: char) -> bool {
} else if c >= '\u{7f}' {
c <= '\u{9f}'
|| (c.is_whitespace() && c != IDEOGRAPHIC_SPACE)
- || contains(c, &FORMAT)
- || contains(c, &OTHER)
+ || contains(c, FORMAT)
+ || contains(c, OTHER)
} else {
false
}
@@ -50,7 +50,7 @@ pub fn replacement(c: char) -> Option<&'static str> {
Some(C0_SYMBOLS[c as usize])
} else if c == '\x7f' {
Some(DEL)
- } else if contains(c, &PRESERVE) {
+ } else if contains(c, PRESERVE) {
None
} else {
Some("\u{2007}") // fixed width space
@@ -1461,7 +1461,7 @@ mod tests {
}
let mut prev_ix = 0;
- for boundary in line_wrapper.wrap_line(&[LineFragment::text(&line)], wrap_width) {
+ for boundary in line_wrapper.wrap_line(&[LineFragment::text(line)], wrap_width) {
wrapped_text.push_str(&line[prev_ix..boundary.ix]);
wrapped_text.push('\n');
wrapped_text.push_str(&" ".repeat(boundary.next_indent as usize));
@@ -2379,7 +2379,7 @@ impl Editor {
pending_selection
.selection
.range()
- .includes(&range, &snapshot)
+ .includes(range, &snapshot)
})
{
return true;
@@ -3342,9 +3342,9 @@ impl Editor {
let old_cursor_position = &state.old_cursor_position;
- self.selections_did_change(true, &old_cursor_position, state.effects, window, cx);
+ self.selections_did_change(true, old_cursor_position, state.effects, window, cx);
- if self.should_open_signature_help_automatically(&old_cursor_position, cx) {
+ if self.should_open_signature_help_automatically(old_cursor_position, cx) {
self.show_signature_help(&ShowSignatureHelp, window, cx);
}
}
@@ -3764,9 +3764,9 @@ impl Editor {
ColumnarSelectionState::FromMouse {
selection_tail,
display_point,
- } => display_point.unwrap_or_else(|| selection_tail.to_display_point(&display_map)),
+ } => display_point.unwrap_or_else(|| selection_tail.to_display_point(display_map)),
ColumnarSelectionState::FromSelection { selection_tail } => {
- selection_tail.to_display_point(&display_map)
+ selection_tail.to_display_point(display_map)
}
};
@@ -6082,7 +6082,7 @@ impl Editor {
if let Some(tasks) = &tasks {
if let Some(project) = project {
task_context_task =
- Self::build_tasks_context(&project, &buffer, buffer_row, &tasks, cx);
+ Self::build_tasks_context(&project, &buffer, buffer_row, tasks, cx);
}
}
@@ -6864,7 +6864,7 @@ impl Editor {
for (buffer_snapshot, search_range, excerpt_id) in buffer_ranges {
match_ranges.extend(
regex
- .search(&buffer_snapshot, Some(search_range.clone()))
+ .search(buffer_snapshot, Some(search_range.clone()))
.await
.into_iter()
.filter_map(|match_range| {
@@ -7206,7 +7206,7 @@ impl Editor {
return Some(false);
}
let provider = self.edit_prediction_provider()?;
- if !provider.is_enabled(&buffer, buffer_position, cx) {
+ if !provider.is_enabled(buffer, buffer_position, cx) {
return Some(false);
}
let buffer = buffer.read(cx);
@@ -7966,7 +7966,7 @@ impl Editor {
let multi_buffer_anchor =
Anchor::in_buffer(excerpt_id, buffer_snapshot.remote_id(), breakpoint.position);
let position = multi_buffer_anchor
- .to_point(&multi_buffer_snapshot)
+ .to_point(multi_buffer_snapshot)
.to_display_point(&snapshot);
breakpoint_display_points.insert(
@@ -8859,7 +8859,7 @@ impl Editor {
}
let highlighted_edits = if let Some(edit_preview) = edit_preview.as_ref() {
- crate::edit_prediction_edit_text(&snapshot, edits, edit_preview, false, cx)
+ crate::edit_prediction_edit_text(snapshot, edits, edit_preview, false, cx)
} else {
// Fallback for providers without edit_preview
crate::edit_prediction_fallback_text(edits, cx)
@@ -9222,7 +9222,7 @@ impl Editor {
.child(div().px_1p5().child(match &prediction.completion {
EditPrediction::Move { target, snapshot } => {
use text::ToPoint as _;
- if target.text_anchor.to_point(&snapshot).row > cursor_point.row
+ if target.text_anchor.to_point(snapshot).row > cursor_point.row
{
Icon::new(IconName::ZedPredictDown)
} else {
@@ -9424,7 +9424,7 @@ impl Editor {
.gap_2()
.flex_1()
.child(
- if target.text_anchor.to_point(&snapshot).row > cursor_point.row {
+ if target.text_anchor.to_point(snapshot).row > cursor_point.row {
Icon::new(IconName::ZedPredictDown)
} else {
Icon::new(IconName::ZedPredictUp)
@@ -9440,14 +9440,14 @@ impl Editor {
snapshot,
display_mode: _,
} => {
- let first_edit_row = edits.first()?.0.start.text_anchor.to_point(&snapshot).row;
+ let first_edit_row = edits.first()?.0.start.text_anchor.to_point(snapshot).row;
let (highlighted_edits, has_more_lines) =
if let Some(edit_preview) = edit_preview.as_ref() {
- crate::edit_prediction_edit_text(&snapshot, &edits, edit_preview, true, cx)
+ crate::edit_prediction_edit_text(snapshot, edits, edit_preview, true, cx)
.first_line_preview()
} else {
- crate::edit_prediction_fallback_text(&edits, cx).first_line_preview()
+ crate::edit_prediction_fallback_text(edits, cx).first_line_preview()
};
let styled_text = gpui::StyledText::new(highlighted_edits.text)
@@ -9770,7 +9770,7 @@ impl Editor {
if let Some(choices) = &snippet.choices[snippet.active_index] {
if let Some(selection) = current_ranges.first() {
- self.show_snippet_choices(&choices, selection.clone(), cx);
+ self.show_snippet_choices(choices, selection.clone(), cx);
}
}
@@ -12284,7 +12284,7 @@ impl Editor {
let trigger_in_words =
this.show_edit_predictions_in_menu() || !had_active_edit_prediction;
- this.trigger_completion_on_input(&text, trigger_in_words, window, cx);
+ this.trigger_completion_on_input(text, trigger_in_words, window, cx);
});
}
@@ -17896,7 +17896,7 @@ impl Editor {
ranges: &[Range<Anchor>],
snapshot: &MultiBufferSnapshot,
) -> bool {
- let mut hunks = self.diff_hunks_in_ranges(ranges, &snapshot);
+ let mut hunks = self.diff_hunks_in_ranges(ranges, snapshot);
hunks.any(|hunk| hunk.status().has_secondary_hunk())
}
@@ -19042,8 +19042,8 @@ impl Editor {
buffer_ranges.last()
}?;
- let selection = text::ToPoint::to_point(&range.start, &buffer).row
- ..text::ToPoint::to_point(&range.end, &buffer).row;
+ let selection = text::ToPoint::to_point(&range.start, buffer).row
+ ..text::ToPoint::to_point(&range.end, buffer).row;
Some((
multi_buffer.buffer(buffer.remote_id()).unwrap().clone(),
selection,
@@ -20055,8 +20055,7 @@ impl Editor {
self.registered_buffers
.entry(edited_buffer.read(cx).remote_id())
.or_insert_with(|| {
- project
- .register_buffer_with_language_servers(&edited_buffer, cx)
+ project.register_buffer_with_language_servers(edited_buffer, cx)
});
});
}
@@ -21079,7 +21078,7 @@ impl Editor {
};
if let Some((workspace, path)) = workspace.as_ref().zip(path) {
let Some(task) = cx
- .update_window_entity(&workspace, |workspace, window, cx| {
+ .update_window_entity(workspace, |workspace, window, cx| {
workspace
.open_path_preview(path, None, false, false, false, window, cx)
})
@@ -21303,14 +21302,14 @@ fn process_completion_for_edit(
debug_assert!(
insert_range
.start
- .cmp(&cursor_position, &buffer_snapshot)
+ .cmp(cursor_position, &buffer_snapshot)
.is_le(),
"insert_range should start before or at cursor position"
);
debug_assert!(
replace_range
.start
- .cmp(&cursor_position, &buffer_snapshot)
+ .cmp(cursor_position, &buffer_snapshot)
.is_le(),
"replace_range should start before or at cursor position"
);
@@ -21344,7 +21343,7 @@ fn process_completion_for_edit(
LspInsertMode::ReplaceSuffix => {
if replace_range
.end
- .cmp(&cursor_position, &buffer_snapshot)
+ .cmp(cursor_position, &buffer_snapshot)
.is_gt()
{
let range_after_cursor = *cursor_position..replace_range.end;
@@ -21380,7 +21379,7 @@ fn process_completion_for_edit(
if range_to_replace
.end
- .cmp(&cursor_position, &buffer_snapshot)
+ .cmp(cursor_position, &buffer_snapshot)
.is_lt()
{
range_to_replace.end = *cursor_position;
@@ -21388,7 +21387,7 @@ fn process_completion_for_edit(
CompletionEdit {
new_text,
- replace_range: range_to_replace.to_offset(&buffer),
+ replace_range: range_to_replace.to_offset(buffer),
snippet,
}
}
@@ -22137,7 +22136,7 @@ fn snippet_completions(
snippet
.prefix
.iter()
- .map(move |prefix| StringMatchCandidate::new(ix, &prefix))
+ .map(move |prefix| StringMatchCandidate::new(ix, prefix))
})
.collect::<Vec<StringMatchCandidate>>();
@@ -22366,10 +22365,10 @@ impl SemanticsProvider for Entity<Project> {
cx: &mut App,
) -> Option<Task<Result<Vec<LocationLink>>>> {
Some(self.update(cx, |project, cx| match kind {
- GotoDefinitionKind::Symbol => project.definitions(&buffer, position, cx),
- GotoDefinitionKind::Declaration => project.declarations(&buffer, position, cx),
- GotoDefinitionKind::Type => project.type_definitions(&buffer, position, cx),
- GotoDefinitionKind::Implementation => project.implementations(&buffer, position, cx),
+ GotoDefinitionKind::Symbol => project.definitions(buffer, position, cx),
+ GotoDefinitionKind::Declaration => project.declarations(buffer, position, cx),
+ GotoDefinitionKind::Type => project.type_definitions(buffer, position, cx),
+ GotoDefinitionKind::Implementation => project.implementations(buffer, position, cx),
}))
}
@@ -23778,7 +23777,7 @@ fn all_edits_insertions_or_deletions(
let mut all_deletions = true;
for (range, new_text) in edits.iter() {
- let range_is_empty = range.to_offset(&snapshot).is_empty();
+ let range_is_empty = range.to_offset(snapshot).is_empty();
let text_is_empty = new_text.is_empty();
if range_is_empty != text_is_empty {
@@ -8393,7 +8393,7 @@ async fn test_autoindent_disabled_with_nested_language(cx: &mut TestAppContext)
buffer.set_language(Some(language), cx);
});
- cx.set_state(&r#"struct A {ˇ}"#);
+ cx.set_state(r#"struct A {ˇ}"#);
cx.update_editor(|editor, window, cx| {
editor.newline(&Default::default(), window, cx);
@@ -8405,7 +8405,7 @@ async fn test_autoindent_disabled_with_nested_language(cx: &mut TestAppContext)
}"
));
- cx.set_state(&r#"select_biased!(ˇ)"#);
+ cx.set_state(r#"select_biased!(ˇ)"#);
cx.update_editor(|editor, window, cx| {
editor.newline(&Default::default(), window, cx);
@@ -12319,7 +12319,7 @@ async fn test_completion_with_mode_specified_by_action(cx: &mut TestAppContext)
let counter = Arc::new(AtomicUsize::new(0));
handle_completion_request_with_insert_and_replace(
&mut cx,
- &buffer_marked_text,
+ buffer_marked_text,
vec![(completion_text, completion_text)],
counter.clone(),
)
@@ -12333,7 +12333,7 @@ async fn test_completion_with_mode_specified_by_action(cx: &mut TestAppContext)
.confirm_completion_replace(&ConfirmCompletionReplace, window, cx)
.unwrap()
});
- cx.assert_editor_state(&expected_with_replace_mode);
+ cx.assert_editor_state(expected_with_replace_mode);
handle_resolve_completion_request(&mut cx, None).await;
apply_additional_edits.await.unwrap();
@@ -12353,7 +12353,7 @@ async fn test_completion_with_mode_specified_by_action(cx: &mut TestAppContext)
});
handle_completion_request_with_insert_and_replace(
&mut cx,
- &buffer_marked_text,
+ buffer_marked_text,
vec![(completion_text, completion_text)],
counter.clone(),
)
@@ -12367,7 +12367,7 @@ async fn test_completion_with_mode_specified_by_action(cx: &mut TestAppContext)
.confirm_completion_insert(&ConfirmCompletionInsert, window, cx)
.unwrap()
});
- cx.assert_editor_state(&expected_with_insert_mode);
+ cx.assert_editor_state(expected_with_insert_mode);
handle_resolve_completion_request(&mut cx, None).await;
apply_additional_edits.await.unwrap();
}
@@ -13141,7 +13141,7 @@ async fn test_word_completion(cx: &mut TestAppContext) {
if let Some(CodeContextMenu::Completions(menu)) = editor.context_menu.borrow_mut().as_ref()
{
assert_eq!(
- completion_menu_entries(&menu),
+ completion_menu_entries(menu),
&["first", "last"],
"When LSP server is fast to reply, no fallback word completions are used"
);
@@ -13164,7 +13164,7 @@ async fn test_word_completion(cx: &mut TestAppContext) {
cx.update_editor(|editor, _, _| {
if let Some(CodeContextMenu::Completions(menu)) = editor.context_menu.borrow_mut().as_ref()
{
- assert_eq!(completion_menu_entries(&menu), &["one", "three", "two"],
+ assert_eq!(completion_menu_entries(menu), &["one", "three", "two"],
"When LSP server is slow, document words can be shown instead, if configured accordingly");
} else {
panic!("expected completion menu to be open");
@@ -13225,7 +13225,7 @@ async fn test_word_completions_do_not_duplicate_lsp_ones(cx: &mut TestAppContext
if let Some(CodeContextMenu::Completions(menu)) = editor.context_menu.borrow_mut().as_ref()
{
assert_eq!(
- completion_menu_entries(&menu),
+ completion_menu_entries(menu),
&["first", "last", "second"],
"Word completions that has the same edit as the any of the LSP ones, should not be proposed"
);
@@ -13281,7 +13281,7 @@ async fn test_word_completions_continue_on_typing(cx: &mut TestAppContext) {
if let Some(CodeContextMenu::Completions(menu)) = editor.context_menu.borrow_mut().as_ref()
{
assert_eq!(
- completion_menu_entries(&menu),
+ completion_menu_entries(menu),
&["first", "last", "second"],
"`ShowWordCompletions` action should show word completions"
);
@@ -13298,7 +13298,7 @@ async fn test_word_completions_continue_on_typing(cx: &mut TestAppContext) {
if let Some(CodeContextMenu::Completions(menu)) = editor.context_menu.borrow_mut().as_ref()
{
assert_eq!(
- completion_menu_entries(&menu),
+ completion_menu_entries(menu),
&["last"],
"After showing word completions, further editing should filter them and not query the LSP"
);
@@ -13337,7 +13337,7 @@ async fn test_word_completions_usually_skip_digits(cx: &mut TestAppContext) {
if let Some(CodeContextMenu::Completions(menu)) = editor.context_menu.borrow_mut().as_ref()
{
assert_eq!(
- completion_menu_entries(&menu),
+ completion_menu_entries(menu),
&["let"],
"With no digits in the completion query, no digits should be in the word completions"
);
@@ -13362,7 +13362,7 @@ async fn test_word_completions_usually_skip_digits(cx: &mut TestAppContext) {
cx.update_editor(|editor, _, _| {
if let Some(CodeContextMenu::Completions(menu)) = editor.context_menu.borrow_mut().as_ref()
{
- assert_eq!(completion_menu_entries(&menu), &["33", "35f32"], "The digit is in the completion query, \
+ assert_eq!(completion_menu_entries(menu), &["33", "35f32"], "The digit is in the completion query, \
return matching words with digits (`33`, `35f32`) but exclude query duplicates (`3`)");
} else {
panic!("expected completion menu to be open");
@@ -13599,7 +13599,7 @@ async fn test_completion_page_up_down_keys(cx: &mut TestAppContext) {
cx.update_editor(|editor, _, _| {
if let Some(CodeContextMenu::Completions(menu)) = editor.context_menu.borrow_mut().as_ref()
{
- assert_eq!(completion_menu_entries(&menu), &["first", "last"]);
+ assert_eq!(completion_menu_entries(menu), &["first", "last"]);
} else {
panic!("expected completion menu to be open");
}
@@ -16702,7 +16702,7 @@ async fn test_completions_in_languages_with_extra_word_characters(cx: &mut TestA
if let Some(CodeContextMenu::Completions(menu)) = editor.context_menu.borrow_mut().as_ref()
{
assert_eq!(
- completion_menu_entries(&menu),
+ completion_menu_entries(menu),
&["bg-blue", "bg-red", "bg-yellow"]
);
} else {
@@ -16715,7 +16715,7 @@ async fn test_completions_in_languages_with_extra_word_characters(cx: &mut TestA
cx.update_editor(|editor, _, _| {
if let Some(CodeContextMenu::Completions(menu)) = editor.context_menu.borrow_mut().as_ref()
{
- assert_eq!(completion_menu_entries(&menu), &["bg-blue", "bg-yellow"]);
+ assert_eq!(completion_menu_entries(menu), &["bg-blue", "bg-yellow"]);
} else {
panic!("expected completion menu to be open");
}
@@ -16729,7 +16729,7 @@ async fn test_completions_in_languages_with_extra_word_characters(cx: &mut TestA
cx.update_editor(|editor, _, _| {
if let Some(CodeContextMenu::Completions(menu)) = editor.context_menu.borrow_mut().as_ref()
{
- assert_eq!(completion_menu_entries(&menu), &["bg-yellow"]);
+ assert_eq!(completion_menu_entries(menu), &["bg-yellow"]);
} else {
panic!("expected completion menu to be open");
}
@@ -17298,7 +17298,7 @@ async fn test_multibuffer_reverts(cx: &mut TestAppContext) {
(buffer_2.clone(), base_text_2),
(buffer_3.clone(), base_text_3),
] {
- let diff = cx.new(|cx| BufferDiff::new_with_base_text(&diff_base, &buffer, cx));
+ let diff = cx.new(|cx| BufferDiff::new_with_base_text(diff_base, &buffer, cx));
editor
.buffer
.update(cx, |buffer, cx| buffer.add_diff(diff, cx));
@@ -17919,7 +17919,7 @@ async fn test_toggle_diff_expand_in_multi_buffer(cx: &mut TestAppContext) {
(buffer_2.clone(), file_2_old),
(buffer_3.clone(), file_3_old),
] {
- let diff = cx.new(|cx| BufferDiff::new_with_base_text(&diff_base, &buffer, cx));
+ let diff = cx.new(|cx| BufferDiff::new_with_base_text(diff_base, &buffer, cx));
editor
.buffer
.update(cx, |buffer, cx| buffer.add_diff(diff, cx));
@@ -21024,7 +21024,7 @@ async fn assert_highlighted_edits(
cx.update(|_window, cx| {
let highlighted_edits = edit_prediction_edit_text(
- &snapshot.as_singleton().unwrap().2,
+ snapshot.as_singleton().unwrap().2,
&edits,
&edit_preview,
include_deletions,
@@ -21091,7 +21091,7 @@ fn add_log_breakpoint_at_cursor(
.buffer_snapshot
.anchor_before(Point::new(cursor_position.row, 0));
- (breakpoint_position, Breakpoint::new_log(&log_message))
+ (breakpoint_position, Breakpoint::new_log(log_message))
});
editor.edit_breakpoint_at_anchor(
@@ -1162,7 +1162,7 @@ impl EditorElement {
.map_or(false, |state| state.keyboard_grace);
if mouse_over_inline_blame || mouse_over_popover {
- editor.show_blame_popover(&blame_entry, event.position, false, cx);
+ editor.show_blame_popover(blame_entry, event.position, false, cx);
} else if !keyboard_grace {
editor.hide_blame_popover(cx);
}
@@ -2818,7 +2818,7 @@ impl EditorElement {
}
let row =
- MultiBufferRow(DisplayPoint::new(display_row, 0).to_point(&snapshot).row);
+ MultiBufferRow(DisplayPoint::new(display_row, 0).to_point(snapshot).row);
if snapshot.is_line_folded(row) {
return None;
}
@@ -3312,7 +3312,7 @@ impl EditorElement {
let chunks = snapshot.highlighted_chunks(rows.clone(), true, style);
LineWithInvisibles::from_chunks(
chunks,
- &style,
+ style,
MAX_LINE_LEN,
rows.len(),
&snapshot.mode,
@@ -3393,7 +3393,7 @@ impl EditorElement {
let line_ix = align_to.row().0.checked_sub(rows.start.0);
x_position =
if let Some(layout) = line_ix.and_then(|ix| line_layouts.get(ix as usize)) {
- x_and_width(&layout)
+ x_and_width(layout)
} else {
x_and_width(&layout_line(
align_to.row(),
@@ -5549,9 +5549,9 @@ impl EditorElement {
// In singleton buffers, we select corresponding lines on the line number click, so use | -like cursor.
// In multi buffers, we open file at the line number clicked, so use a pointing hand cursor.
if is_singleton {
- window.set_cursor_style(CursorStyle::IBeam, &hitbox);
+ window.set_cursor_style(CursorStyle::IBeam, hitbox);
} else {
- window.set_cursor_style(CursorStyle::PointingHand, &hitbox);
+ window.set_cursor_style(CursorStyle::PointingHand, hitbox);
}
}
}
@@ -5570,7 +5570,7 @@ impl EditorElement {
&layout.position_map.snapshot,
line_height,
layout.gutter_hitbox.bounds,
- &hunk,
+ hunk,
);
Some((
hunk_bounds,
@@ -6092,10 +6092,10 @@ impl EditorElement {
if axis == ScrollbarAxis::Vertical {
let fast_markers =
- self.collect_fast_scrollbar_markers(layout, &scrollbar_layout, cx);
+ self.collect_fast_scrollbar_markers(layout, scrollbar_layout, cx);
// Refresh slow scrollbar markers in the background. Below, we
// paint whatever markers have already been computed.
- self.refresh_slow_scrollbar_markers(layout, &scrollbar_layout, window, cx);
+ self.refresh_slow_scrollbar_markers(layout, scrollbar_layout, window, cx);
let markers = self.editor.read(cx).scrollbar_marker_state.markers.clone();
for marker in markers.iter().chain(&fast_markers) {
@@ -6129,7 +6129,7 @@ impl EditorElement {
if any_scrollbar_dragged {
window.set_window_cursor_style(CursorStyle::Arrow);
} else {
- window.set_cursor_style(CursorStyle::Arrow, &hitbox);
+ window.set_cursor_style(CursorStyle::Arrow, hitbox);
}
}
})
@@ -9782,7 +9782,7 @@ pub fn layout_line(
let chunks = snapshot.highlighted_chunks(row..row + DisplayRow(1), true, style);
LineWithInvisibles::from_chunks(
chunks,
- &style,
+ style,
MAX_LINE_LEN,
1,
&snapshot.mode,
@@ -794,7 +794,7 @@ pub(crate) async fn find_file(
) -> Option<ResolvedPath> {
project
.update(cx, |project, cx| {
- project.resolve_path_in_buffer(&candidate_file_path, buffer, cx)
+ project.resolve_path_in_buffer(candidate_file_path, buffer, cx)
})
.ok()?
.await
@@ -524,8 +524,8 @@ fn serialize_selection(
) -> proto::Selection {
proto::Selection {
id: selection.id as u64,
- start: Some(serialize_anchor(&selection.start, &buffer)),
- end: Some(serialize_anchor(&selection.end, &buffer)),
+ start: Some(serialize_anchor(&selection.start, buffer)),
+ end: Some(serialize_anchor(&selection.end, buffer)),
reversed: selection.reversed,
}
}
@@ -1010,7 +1010,7 @@ impl Item for Editor {
self.workspace = Some((workspace.weak_handle(), workspace.database_id()));
if let Some(workspace) = &workspace.weak_handle().upgrade() {
cx.subscribe(
- &workspace,
+ workspace,
|editor, _, event: &workspace::Event, _cx| match event {
workspace::Event::ModalOpened => {
editor.mouse_context_menu.take();
@@ -1296,7 +1296,7 @@ impl SerializableItem for Editor {
project
.read(cx)
.worktree_for_id(worktree_id, cx)
- .and_then(|worktree| worktree.read(cx).absolutize(&file.path()).ok())
+ .and_then(|worktree| worktree.read(cx).absolutize(file.path()).ok())
.or_else(|| {
let full_path = file.full_path(cx);
let project_path = project.read(cx).find_project_path(&full_path, cx)?;
@@ -1385,14 +1385,14 @@ impl ProjectItem for Editor {
})
{
editor.fold_ranges(
- clip_ranges(&restoration_data.folds, &snapshot),
+ clip_ranges(&restoration_data.folds, snapshot),
false,
window,
cx,
);
if !restoration_data.selections.is_empty() {
editor.change_selections(SelectionEffects::no_scroll(), window, cx, |s| {
- s.select_ranges(clip_ranges(&restoration_data.selections, &snapshot));
+ s.select_ranges(clip_ranges(&restoration_data.selections, snapshot));
});
}
let (top_row, offset) = restoration_data.scroll_position;
@@ -37,7 +37,7 @@ pub(crate) fn should_auto_close(
let text = buffer
.text_for_range(edited_range.clone())
.collect::<String>();
- let edited_range = edited_range.to_offset(&buffer);
+ let edited_range = edited_range.to_offset(buffer);
if !text.ends_with(">") {
continue;
}
@@ -207,7 +207,7 @@ impl Editor {
.entry(buffer_snapshot.remote_id())
.or_insert_with(Vec::new);
let excerpt_point_range =
- excerpt_range.context.to_point_utf16(&buffer_snapshot);
+ excerpt_range.context.to_point_utf16(buffer_snapshot);
excerpt_data.push((
excerpt_id,
buffer_snapshot.clone(),
@@ -76,7 +76,7 @@ async fn lsp_task_context(
let project_env = project
.update(cx, |project, cx| {
- project.buffer_environment(&buffer, &worktree_store, cx)
+ project.buffer_environment(buffer, &worktree_store, cx)
})
.ok()?
.await;
@@ -102,11 +102,11 @@ impl MouseContextMenu {
let display_snapshot = &editor
.display_map
.update(cx, |display_map, cx| display_map.snapshot(cx));
- let selection_init_range = selection_init.display_range(&display_snapshot);
+ let selection_init_range = selection_init.display_range(display_snapshot);
let selection_now_range = editor
.selections
.newest_anchor()
- .display_range(&display_snapshot);
+ .display_range(display_snapshot);
if selection_now_range == selection_init_range {
return;
}
@@ -439,17 +439,17 @@ pub fn start_of_excerpt(
};
match direction {
Direction::Prev => {
- let mut start = excerpt.start_anchor().to_display_point(&map);
+ let mut start = excerpt.start_anchor().to_display_point(map);
if start >= display_point && start.row() > DisplayRow(0) {
let Some(excerpt) = map.buffer_snapshot.excerpt_before(excerpt.id()) else {
return display_point;
};
- start = excerpt.start_anchor().to_display_point(&map);
+ start = excerpt.start_anchor().to_display_point(map);
}
start
}
Direction::Next => {
- let mut end = excerpt.end_anchor().to_display_point(&map);
+ let mut end = excerpt.end_anchor().to_display_point(map);
*end.row_mut() += 1;
map.clip_point(end, Bias::Right)
}
@@ -467,7 +467,7 @@ pub fn end_of_excerpt(
};
match direction {
Direction::Prev => {
- let mut start = excerpt.start_anchor().to_display_point(&map);
+ let mut start = excerpt.start_anchor().to_display_point(map);
if start.row() > DisplayRow(0) {
*start.row_mut() -= 1;
}
@@ -476,7 +476,7 @@ pub fn end_of_excerpt(
start
}
Direction::Next => {
- let mut end = excerpt.end_anchor().to_display_point(&map);
+ let mut end = excerpt.end_anchor().to_display_point(map);
*end.column_mut() = 0;
if end <= display_point {
*end.row_mut() += 1;
@@ -485,7 +485,7 @@ pub fn end_of_excerpt(
else {
return display_point;
};
- end = excerpt.end_anchor().to_display_point(&map);
+ end = excerpt.end_anchor().to_display_point(map);
*end.column_mut() = 0;
}
end
@@ -478,7 +478,7 @@ impl SemanticsProvider for BranchBufferSemanticsProvider {
}
fn supports_inlay_hints(&self, buffer: &Entity<Buffer>, cx: &mut App) -> bool {
- if let Some(buffer) = self.to_base(&buffer, &[], cx) {
+ if let Some(buffer) = self.to_base(buffer, &[], cx) {
self.0.supports_inlay_hints(&buffer, cx)
} else {
false
@@ -491,7 +491,7 @@ impl SemanticsProvider for BranchBufferSemanticsProvider {
position: text::Anchor,
cx: &mut App,
) -> Option<Task<anyhow::Result<Vec<project::DocumentHighlight>>>> {
- let buffer = self.to_base(&buffer, &[position], cx)?;
+ let buffer = self.to_base(buffer, &[position], cx)?;
self.0.document_highlights(&buffer, position, cx)
}
@@ -502,7 +502,7 @@ impl SemanticsProvider for BranchBufferSemanticsProvider {
kind: crate::GotoDefinitionKind,
cx: &mut App,
) -> Option<Task<anyhow::Result<Vec<project::LocationLink>>>> {
- let buffer = self.to_base(&buffer, &[position], cx)?;
+ let buffer = self.to_base(buffer, &[position], cx)?;
self.0.definitions(&buffer, position, kind, cx)
}
@@ -35,12 +35,12 @@ pub fn apply_related_actions(editor: &Entity<Editor>, window: &mut Window, cx: &
.filter_map(|buffer| buffer.read(cx).language())
.any(|language| is_rust_language(language))
{
- register_action(&editor, window, go_to_parent_module);
- register_action(&editor, window, expand_macro_recursively);
- register_action(&editor, window, open_docs);
- register_action(&editor, window, cancel_flycheck_action);
- register_action(&editor, window, run_flycheck_action);
- register_action(&editor, window, clear_flycheck_action);
+ register_action(editor, window, go_to_parent_module);
+ register_action(editor, window, expand_macro_recursively);
+ register_action(editor, window, open_docs);
+ register_action(editor, window, cancel_flycheck_action);
+ register_action(editor, window, run_flycheck_action);
+ register_action(editor, window, clear_flycheck_action);
}
}
@@ -196,7 +196,7 @@ impl Editor {
.highlight_text(&text, 0..signature.label.len())
.into_iter()
.flat_map(|(range, highlight_id)| {
- Some((range, highlight_id.style(&cx.theme().syntax())?))
+ Some((range, highlight_id.style(cx.theme().syntax())?))
});
signature.highlights =
combine_highlights(signature.highlights.clone(), highlights)
@@ -189,7 +189,7 @@ pub fn editor_content_with_blocks(editor: &Entity<Editor>, cx: &mut VisualTestCo
continue;
}
};
- let content = block_content_for_tests(&editor, custom_block.id, cx)
+ let content = block_content_for_tests(editor, custom_block.id, cx)
.expect("block content not found");
// 2: "related info 1 for diagnostic 0"
if let Some(height) = custom_block.height {
@@ -520,7 +520,7 @@ async fn judge_example(
enable_telemetry: bool,
cx: &AsyncApp,
) -> JudgeOutput {
- let judge_output = example.judge(model.clone(), &run_output, cx).await;
+ let judge_output = example.judge(model.clone(), run_output, cx).await;
if enable_telemetry {
telemetry::event!(
@@ -64,7 +64,7 @@ impl ExampleMetadata {
self.url
.split('/')
.next_back()
- .unwrap_or(&"")
+ .unwrap_or("")
.trim_end_matches(".git")
.into()
}
@@ -255,7 +255,7 @@ impl ExampleContext {
thread.update(cx, |thread, _cx| {
if let Some(tool_use) = pending_tool_use {
let mut tool_metrics = tool_metrics.lock().unwrap();
- if let Some(tool_result) = thread.tool_result(&tool_use_id) {
+ if let Some(tool_result) = thread.tool_result(tool_use_id) {
let message = if tool_result.is_error {
format!("✖︎ {}", tool_use.name)
} else {
@@ -459,8 +459,8 @@ impl ExampleInstance {
let mut output_file =
File::create(self.run_directory.join("judge.md")).expect("failed to create judge.md");
- let diff_task = self.judge_diff(model.clone(), &run_output, cx);
- let thread_task = self.judge_thread(model.clone(), &run_output, cx);
+ let diff_task = self.judge_diff(model.clone(), run_output, cx);
+ let thread_task = self.judge_thread(model.clone(), run_output, cx);
let (diff_result, thread_result) = futures::join!(diff_task, thread_task);
@@ -661,7 +661,7 @@ pub fn wait_for_lang_server(
.update(cx, |buffer, cx| {
lsp_store.update(cx, |lsp_store, cx| {
lsp_store
- .language_servers_for_local_buffer(&buffer, cx)
+ .language_servers_for_local_buffer(buffer, cx)
.next()
.is_some()
})
@@ -693,7 +693,7 @@ pub fn wait_for_lang_server(
_ => {}
}
}),
- cx.subscribe(&project, {
+ cx.subscribe(project, {
let buffer = buffer.clone();
move |project, event, cx| match event {
project::Event::LanguageServerAdded(_, _, _) => {
@@ -838,7 +838,7 @@ fn messages_to_markdown<'a>(message_iter: impl IntoIterator<Item = &'a Message>)
for segment in &message.segments {
match segment {
MessageSegment::Text(text) => {
- messages.push_str(&text);
+ messages.push_str(text);
messages.push_str("\n\n");
}
MessageSegment::Thinking { text, signature } => {
@@ -846,7 +846,7 @@ fn messages_to_markdown<'a>(message_iter: impl IntoIterator<Item = &'a Message>)
if let Some(sig) = signature {
messages.push_str(&format!("Signature: {}\n\n", sig));
}
- messages.push_str(&text);
+ messages.push_str(text);
messages.push_str("\n");
}
MessageSegment::RedactedThinking(items) => {
@@ -878,7 +878,7 @@ pub async fn send_language_model_request(
request: LanguageModelRequest,
cx: &AsyncApp,
) -> anyhow::Result<String> {
- match model.stream_completion_text(request, &cx).await {
+ match model.stream_completion_text(request, cx).await {
Ok(mut stream) => {
let mut full_response = String::new();
while let Some(chunk_result) = stream.stream.next().await {
@@ -452,7 +452,7 @@ impl ExtensionBuilder {
let mut output = Vec::new();
let mut stack = Vec::new();
- for payload in Parser::new(0).parse_all(&input) {
+ for payload in Parser::new(0).parse_all(input) {
let payload = payload?;
// Track nesting depth, so that we don't mess with inner producer sections:
@@ -1341,7 +1341,7 @@ impl ExtensionStore {
&extension_path,
&extension.manifest,
wasm_host.clone(),
- &cx,
+ cx,
)
.await
.with_context(|| format!("Loading extension from {extension_path:?}"));
@@ -1776,7 +1776,7 @@ impl ExtensionStore {
})?;
for client in clients {
- Self::sync_extensions_over_ssh(&this, client, cx)
+ Self::sync_extensions_over_ssh(this, client, cx)
.await
.log_err();
}
@@ -175,7 +175,7 @@ impl HeadlessExtensionStore {
}
let wasm_extension: Arc<dyn Extension> =
- Arc::new(WasmExtension::load(&extension_dir, &manifest, wasm_host.clone(), &cx).await?);
+ Arc::new(WasmExtension::load(&extension_dir, &manifest, wasm_host.clone(), cx).await?);
for (language_server_id, language_server_config) in &manifest.language_servers {
for language in language_server_config.languages() {
@@ -210,7 +210,7 @@ impl FileFinder {
return;
};
if self.picker.read(cx).delegate.has_changed_selected_index {
- if !event.modified() || !init_modifiers.is_subset_of(&event) {
+ if !event.modified() || !init_modifiers.is_subset_of(event) {
self.init_modifiers = None;
window.dispatch_action(menu::Confirm.boxed_clone(), cx);
}
@@ -497,7 +497,7 @@ impl Match {
fn panel_match(&self) -> Option<&ProjectPanelOrdMatch> {
match self {
Match::History { panel_match, .. } => panel_match.as_ref(),
- Match::Search(panel_match) => Some(&panel_match),
+ Match::Search(panel_match) => Some(panel_match),
Match::CreateNew(_) => None,
}
}
@@ -537,7 +537,7 @@ impl Matches {
self.matches.binary_search_by(|m| {
// `reverse()` since if cmp_matches(a, b) == Ordering::Greater, then a is better than b.
// And we want the better entries go first.
- Self::cmp_matches(self.separate_history, currently_opened, &m, &entry).reverse()
+ Self::cmp_matches(self.separate_history, currently_opened, m, entry).reverse()
})
}
}
@@ -1082,7 +1082,7 @@ impl FileFinderDelegate {
if let Some(user_home_path) = std::env::var("HOME").ok() {
let user_home_path = user_home_path.trim();
if !user_home_path.is_empty() {
- if (&full_path).starts_with(user_home_path) {
+ if full_path.starts_with(user_home_path) {
full_path.replace_range(0..user_home_path.len(), "~");
full_path_positions.retain_mut(|pos| {
if *pos >= user_home_path.len() {
@@ -1402,7 +1402,7 @@ impl PickerDelegate for FileFinderDelegate {
cx.notify();
Task::ready(())
} else {
- let path_position = PathWithPosition::parse_str(&raw_query);
+ let path_position = PathWithPosition::parse_str(raw_query);
#[cfg(windows)]
let raw_query = raw_query.trim().to_owned().replace("/", "\\");
@@ -1614,7 +1614,7 @@ async fn test_select_current_open_file_when_no_history(cx: &mut gpui::TestAppCon
let picker = open_file_picker(&workspace, cx);
picker.update(cx, |finder, _| {
- assert_match_selection(&finder, 0, "1_qw");
+ assert_match_selection(finder, 0, "1_qw");
});
}
@@ -2623,7 +2623,7 @@ async fn open_queried_buffer(
workspace: &Entity<Workspace>,
cx: &mut gpui::VisualTestContext,
) -> Vec<FoundPath> {
- let picker = open_file_picker(&workspace, cx);
+ let picker = open_file_picker(workspace, cx);
cx.simulate_input(input);
let history_items = picker.update(cx, |finder, _| {
@@ -637,7 +637,7 @@ impl PickerDelegate for OpenPathDelegate {
FileIcons::get_folder_icon(false, cx)?
} else {
let path = path::Path::new(&candidate.path.string);
- FileIcons::get_icon(&path, cx)?
+ FileIcons::get_icon(path, cx)?
};
Some(Icon::from_path(icon).color(Color::Muted))
});
@@ -776,7 +776,7 @@ impl Fs for RealFs {
}
// Check if path is a symlink and follow the target parent
- if let Some(mut target) = self.read_link(&path).await.ok() {
+ if let Some(mut target) = self.read_link(path).await.ok() {
// Check if symlink target is relative path, if so make it absolute
if target.is_relative() {
if let Some(parent) = path.parent() {
@@ -1677,7 +1677,7 @@ impl FakeFs {
/// by mutating the head, index, and unmerged state.
pub fn set_status_for_repo(&self, dot_git: &Path, statuses: &[(&Path, FileStatus)]) {
let workdir_path = dot_git.parent().unwrap();
- let workdir_contents = self.files_with_contents(&workdir_path);
+ let workdir_contents = self.files_with_contents(workdir_path);
self.with_git_state(dot_git, true, |state| {
state.index_contents.clear();
state.head_contents.clear();
@@ -2244,7 +2244,7 @@ impl Fs for FakeFs {
async fn open_handle(&self, path: &Path) -> Result<Arc<dyn FileHandle>> {
self.simulate_random_delay().await;
let mut state = self.state.lock();
- let inode = match state.entry(&path)? {
+ let inode = match state.entry(path)? {
FakeFsEntry::File { inode, .. } => *inode,
FakeFsEntry::Dir { inode, .. } => *inode,
_ => unreachable!(),
@@ -858,7 +858,7 @@ impl GitRepository for RealGitRepository {
let output = new_smol_command(&git_binary_path)
.current_dir(&working_directory)
.envs(env.iter())
- .args(["update-index", "--add", "--cacheinfo", "100644", &sha])
+ .args(["update-index", "--add", "--cacheinfo", "100644", sha])
.arg(path.to_unix_style())
.output()
.await?;
@@ -959,7 +959,7 @@ impl GitRepository for RealGitRepository {
Ok(working_directory) => working_directory,
Err(e) => return Task::ready(Err(e)),
};
- let args = git_status_args(&path_prefixes);
+ let args = git_status_args(path_prefixes);
log::debug!("Checking for git status in {path_prefixes:?}");
self.executor.spawn(async move {
let output = new_std_command(&git_binary_path)
@@ -1056,7 +1056,7 @@ impl GitRepository for RealGitRepository {
let (_, branch_name) = name.split_once("/").context("Unexpected branch format")?;
let revision = revision.get();
let branch_commit = revision.peel_to_commit()?;
- let mut branch = repo.branch(&branch_name, &branch_commit, false)?;
+ let mut branch = repo.branch(branch_name, &branch_commit, false)?;
branch.set_upstream(Some(&name))?;
branch
} else {
@@ -2349,7 +2349,7 @@ mod tests {
#[allow(clippy::octal_escapes)]
let input = "*\0060964da10574cd9bf06463a53bf6e0769c5c45e\0\0refs/heads/zed-patches\0refs/remotes/origin/zed-patches\0\01733187470\0generated protobuf\n";
assert_eq!(
- parse_branch_input(&input).unwrap(),
+ parse_branch_input(input).unwrap(),
vec![Branch {
is_head: true,
ref_name: "refs/heads/zed-patches".into(),
@@ -468,7 +468,7 @@ impl FromStr for GitStatus {
Some((path, status))
})
.collect::<Vec<_>>();
- entries.sort_unstable_by(|(a, _), (b, _)| a.cmp(&b));
+ entries.sort_unstable_by(|(a, _), (b, _)| a.cmp(b));
// When a file exists in HEAD, is deleted in the index, and exists again in the working copy,
// git produces two lines for it, one reading `D ` (deleted in index, unmodified in working copy)
// and the other reading `??` (untracked). Merge these two into the equivalent of `DA`.
@@ -55,7 +55,7 @@ pub fn get_host_from_git_remote_url(remote_url: &str) -> Result<String> {
}
}
- Url::parse(&remote_url)
+ Url::parse(remote_url)
.ok()
.and_then(|remote_url| remote_url.host_str().map(|host| host.to_string()))
})
@@ -292,7 +292,7 @@ mod tests {
assert_eq!(
Chromium
- .extract_pull_request(&remote, &message)
+ .extract_pull_request(&remote, message)
.unwrap()
.url
.as_str(),
@@ -474,7 +474,7 @@ mod tests {
assert_eq!(
github
- .extract_pull_request(&remote, &message)
+ .extract_pull_request(&remote, message)
.unwrap()
.url
.as_str(),
@@ -488,6 +488,6 @@ mod tests {
See the original PR, this is a fix.
"#
};
- assert_eq!(github.extract_pull_request(&remote, &message), None);
+ assert_eq!(github.extract_pull_request(&remote, message), None);
}
}
@@ -160,7 +160,7 @@ impl CommitView {
});
}
- cx.spawn(async move |this, mut cx| {
+ cx.spawn(async move |this, cx| {
for file in commit_diff.files {
let is_deleted = file.new_text.is_none();
let new_text = file.new_text.unwrap_or_default();
@@ -179,9 +179,9 @@ impl CommitView {
worktree_id,
}) as Arc<dyn language::File>;
- let buffer = build_buffer(new_text, file, &language_registry, &mut cx).await?;
+ let buffer = build_buffer(new_text, file, &language_registry, cx).await?;
let buffer_diff =
- build_buffer_diff(old_text, &buffer, &language_registry, &mut cx).await?;
+ build_buffer_diff(old_text, &buffer, &language_registry, cx).await?;
this.update(cx, |this, cx| {
this.multibuffer.update(cx, |multibuffer, cx| {
@@ -156,7 +156,7 @@ fn buffers_removed(editor: &mut Editor, removed_buffer_ids: &[BufferId], cx: &mu
.unwrap()
.buffers
.retain(|buffer_id, buffer| {
- if removed_buffer_ids.contains(&buffer_id) {
+ if removed_buffer_ids.contains(buffer_id) {
removed_block_ids.extend(buffer.block_ids.iter().map(|(_, block_id)| *block_id));
false
} else {
@@ -222,12 +222,12 @@ fn conflicts_updated(
let precedes_start = range
.context
.start
- .cmp(&conflict_range.start, &buffer_snapshot)
+ .cmp(&conflict_range.start, buffer_snapshot)
.is_le();
let follows_end = range
.context
.end
- .cmp(&conflict_range.start, &buffer_snapshot)
+ .cmp(&conflict_range.start, buffer_snapshot)
.is_ge();
precedes_start && follows_end
}) else {
@@ -268,12 +268,12 @@ fn conflicts_updated(
let precedes_start = range
.context
.start
- .cmp(&conflict.range.start, &buffer_snapshot)
+ .cmp(&conflict.range.start, buffer_snapshot)
.is_le();
let follows_end = range
.context
.end
- .cmp(&conflict.range.start, &buffer_snapshot)
+ .cmp(&conflict.range.start, buffer_snapshot)
.is_ge();
precedes_start && follows_end
}) else {
@@ -398,7 +398,7 @@ mod tests {
let project = Project::test(fs.clone(), [path!("/test").as_ref()], cx).await;
- let (workspace, mut cx) =
+ let (workspace, cx) =
cx.add_window_view(|window, cx| Workspace::test_new(project.clone(), window, cx));
let diff_view = workspace
@@ -417,7 +417,7 @@ mod tests {
// Verify initial diff
assert_state_with_diff(
&diff_view.read_with(cx, |diff_view, _| diff_view.editor.clone()),
- &mut cx,
+ cx,
&unindent(
"
- old line 1
@@ -452,7 +452,7 @@ mod tests {
cx.executor().advance_clock(RECALCULATE_DIFF_DEBOUNCE);
assert_state_with_diff(
&diff_view.read_with(cx, |diff_view, _| diff_view.editor.clone()),
- &mut cx,
+ cx,
&unindent(
"
- old line 1
@@ -487,7 +487,7 @@ mod tests {
cx.executor().advance_clock(RECALCULATE_DIFF_DEBOUNCE);
assert_state_with_diff(
&diff_view.read_with(cx, |diff_view, _| diff_view.editor.clone()),
- &mut cx,
+ cx,
&unindent(
"
ˇnew line 1
@@ -103,7 +103,7 @@ fn prompt<T>(
where
T: IntoEnumIterator + VariantNames + 'static,
{
- let rx = window.prompt(PromptLevel::Info, msg, detail, &T::VARIANTS, cx);
+ let rx = window.prompt(PromptLevel::Info, msg, detail, T::VARIANTS, cx);
cx.spawn(async move |_| Ok(T::iter().nth(rx.await?).unwrap()))
}
@@ -652,14 +652,14 @@ impl GitPanel {
if GitPanelSettings::get_global(cx).sort_by_path {
return self
.entries
- .binary_search_by(|entry| entry.status_entry().unwrap().repo_path.cmp(&path))
+ .binary_search_by(|entry| entry.status_entry().unwrap().repo_path.cmp(path))
.ok();
}
if self.conflicted_count > 0 {
let conflicted_start = 1;
if let Ok(ix) = self.entries[conflicted_start..conflicted_start + self.conflicted_count]
- .binary_search_by(|entry| entry.status_entry().unwrap().repo_path.cmp(&path))
+ .binary_search_by(|entry| entry.status_entry().unwrap().repo_path.cmp(path))
{
return Some(conflicted_start + ix);
}
@@ -671,7 +671,7 @@ impl GitPanel {
0
} + 1;
if let Ok(ix) = self.entries[tracked_start..tracked_start + self.tracked_count]
- .binary_search_by(|entry| entry.status_entry().unwrap().repo_path.cmp(&path))
+ .binary_search_by(|entry| entry.status_entry().unwrap().repo_path.cmp(path))
{
return Some(tracked_start + ix);
}
@@ -687,7 +687,7 @@ impl GitPanel {
0
} + 1;
if let Ok(ix) = self.entries[untracked_start..untracked_start + self.new_count]
- .binary_search_by(|entry| entry.status_entry().unwrap().repo_path.cmp(&path))
+ .binary_search_by(|entry| entry.status_entry().unwrap().repo_path.cmp(path))
{
return Some(untracked_start + ix);
}
@@ -1341,7 +1341,7 @@ impl GitPanel {
.iter()
.filter_map(|entry| entry.status_entry())
.filter(|status_entry| {
- section.contains(&status_entry, repository)
+ section.contains(status_entry, repository)
&& status_entry.staging.as_bool() != Some(goal_staged_state)
})
.map(|status_entry| status_entry.clone())
@@ -1952,7 +1952,7 @@ impl GitPanel {
thinking_allowed: false,
};
- let stream = model.stream_completion_text(request, &cx);
+ let stream = model.stream_completion_text(request, cx);
match stream.await {
Ok(mut messages) => {
if !text_empty {
@@ -4620,7 +4620,7 @@ impl editor::Addon for GitPanelAddon {
git_panel
.read(cx)
- .render_buffer_header_controls(&git_panel, &file, window, cx)
+ .render_buffer_header_controls(&git_panel, file, window, cx)
}
}
@@ -152,7 +152,7 @@ impl PickerDelegate for PickerPromptDelegate {
.all_options
.iter()
.enumerate()
- .map(|(ix, option)| StringMatchCandidate::new(ix, &option))
+ .map(|(ix, option)| StringMatchCandidate::new(ix, option))
.collect::<Vec<StringMatchCandidate>>()
});
let Some(candidates) = candidates.log_err() else {
@@ -1173,7 +1173,7 @@ impl RenderOnce for ProjectDiffEmptyState {
.child(Label::new("No Changes").color(Color::Muted))
} else {
this.when_some(self.current_branch.as_ref(), |this, branch| {
- this.child(has_branch_container(&branch))
+ this.child(has_branch_container(branch))
})
}
}),
@@ -1332,14 +1332,14 @@ fn merge_anchor_ranges<'a>(
loop {
if let Some(left_range) = left
.peek()
- .filter(|range| range.start.cmp(&next_range.end, &snapshot).is_le())
+ .filter(|range| range.start.cmp(&next_range.end, snapshot).is_le())
.cloned()
{
left.next();
next_range.end = left_range.end;
} else if let Some(right_range) = right
.peek()
- .filter(|range| range.start.cmp(&next_range.end, &snapshot).is_le())
+ .filter(|range| range.start.cmp(&next_range.end, snapshot).is_le())
.cloned()
{
right.next();
@@ -686,7 +686,7 @@ mod tests {
let project = Project::test(fs, [project_root.as_ref()], cx).await;
- let (workspace, mut cx) =
+ let (workspace, cx) =
cx.add_window_view(|window, cx| Workspace::test_new(project.clone(), window, cx));
let buffer = project
@@ -725,7 +725,7 @@ mod tests {
assert_state_with_diff(
&diff_view.read_with(cx, |diff_view, _| diff_view.diff_editor.clone()),
- &mut cx,
+ cx,
expected_diff,
);
@@ -374,7 +374,7 @@ mod windows {
shader_path,
"vs_4_1",
);
- generate_rust_binding(&const_name, &output_file, &rust_binding_path);
+ generate_rust_binding(&const_name, &output_file, rust_binding_path);
// Compile fragment shader
let output_file = format!("{}/{}_ps.h", out_dir, module);
@@ -387,7 +387,7 @@ mod windows {
shader_path,
"ps_4_1",
);
- generate_rust_binding(&const_name, &output_file, &rust_binding_path);
+ generate_rust_binding(&const_name, &output_file, rust_binding_path);
}
fn compile_shader_impl(
@@ -137,14 +137,14 @@ impl TextInput {
fn copy(&mut self, _: &Copy, _: &mut Window, cx: &mut Context<Self>) {
if !self.selected_range.is_empty() {
cx.write_to_clipboard(ClipboardItem::new_string(
- (&self.content[self.selected_range.clone()]).to_string(),
+ self.content[self.selected_range.clone()].to_string(),
));
}
}
fn cut(&mut self, _: &Cut, window: &mut Window, cx: &mut Context<Self>) {
if !self.selected_range.is_empty() {
cx.write_to_clipboard(ClipboardItem::new_string(
- (&self.content[self.selected_range.clone()]).to_string(),
+ self.content[self.selected_range.clone()].to_string(),
));
self.replace_text_in_range(None, "", window, cx)
}
@@ -1310,7 +1310,7 @@ impl App {
T: 'static,
{
let window_handle = window.handle;
- self.observe_release(&handle, move |entity, cx| {
+ self.observe_release(handle, move |entity, cx| {
let _ = window_handle.update(cx, |_, window, cx| on_release(entity, window, cx));
})
}
@@ -1917,7 +1917,7 @@ impl AppContext for App {
G: Global,
{
let mut g = self.global::<G>();
- callback(&g, self)
+ callback(g, self)
}
}
@@ -661,7 +661,7 @@ pub struct WeakEntity<T> {
impl<T> std::fmt::Debug for WeakEntity<T> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
- f.debug_struct(&type_name::<Self>())
+ f.debug_struct(type_name::<Self>())
.field("entity_id", &self.any_entity.entity_id)
.field("entity_type", &type_name::<T>())
.finish()
@@ -2785,7 +2785,7 @@ fn handle_tooltip_check_visible_and_update(
match action {
Action::None => {}
- Action::Hide => clear_active_tooltip(&active_tooltip, window),
+ Action::Hide => clear_active_tooltip(active_tooltip, window),
Action::ScheduleHide(tooltip) => {
let delayed_hide_task = window.spawn(cx, {
let active_tooltip = active_tooltip.clone();
@@ -164,7 +164,7 @@ mod conditional {
if let Some(render_inspector) = cx
.inspector_element_registry
.renderers_by_type_id
- .remove(&type_id)
+ .remove(type_id)
{
let mut element = (render_inspector)(
active_element.id.clone(),
@@ -408,7 +408,7 @@ impl DispatchTree {
keymap
.bindings_for_action(action)
.filter(|binding| {
- Self::binding_matches_predicate_and_not_shadowed(&keymap, &binding, context_stack)
+ Self::binding_matches_predicate_and_not_shadowed(&keymap, binding, context_stack)
})
.cloned()
.collect()
@@ -426,7 +426,7 @@ impl DispatchTree {
.bindings_for_action(action)
.rev()
.find(|binding| {
- Self::binding_matches_predicate_and_not_shadowed(&keymap, &binding, context_stack)
+ Self::binding_matches_predicate_and_not_shadowed(&keymap, binding, context_stack)
})
.cloned()
}
@@ -148,7 +148,7 @@ impl Keymap {
let mut pending_bindings = SmallVec::<[(BindingIndex, &KeyBinding); 1]>::new();
for (ix, binding) in self.bindings().enumerate().rev() {
- let Some(depth) = self.binding_enabled(binding, &context_stack) else {
+ let Some(depth) = self.binding_enabled(binding, context_stack) else {
continue;
};
let Some(pending) = binding.match_keystrokes(input) else {
@@ -278,7 +278,7 @@ impl PathBuilder {
options: &StrokeOptions,
) -> Result<Path<Pixels>, Error> {
let path = if let Some(dash_array) = dash_array {
- let measurements = lyon::algorithms::measure::PathMeasurements::from_path(&path, 0.01);
+ let measurements = lyon::algorithms::measure::PathMeasurements::from_path(path, 0.01);
let mut sampler = measurements
.create_sampler(path, lyon::algorithms::measure::SampleType::Normalized);
let mut builder = lyon::path::Path::builder();
@@ -1508,7 +1508,7 @@ impl ClipboardItem {
for entry in self.entries.iter() {
if let ClipboardEntry::String(ClipboardString { text, metadata: _ }) = entry {
- answer.push_str(&text);
+ answer.push_str(text);
any_entries = true;
}
}
@@ -642,7 +642,7 @@ pub(super) fn get_xkb_compose_state(cx: &xkb::Context) -> Option<xkb::compose::S
let mut state: Option<xkb::compose::State> = None;
for locale in locales {
if let Ok(table) =
- xkb::compose::Table::new_from_locale(&cx, &locale, xkb::compose::COMPILE_NO_FLAGS)
+ xkb::compose::Table::new_from_locale(cx, &locale, xkb::compose::COMPILE_NO_FLAGS)
{
state = Some(xkb::compose::State::new(
&table,
@@ -1145,7 +1145,7 @@ impl Dispatch<wl_seat::WlSeat, ()> for WaylandClientStatePtr {
.globals
.text_input_manager
.as_ref()
- .map(|text_input_manager| text_input_manager.get_text_input(&seat, qh, ()));
+ .map(|text_input_manager| text_input_manager.get_text_input(seat, qh, ()));
if let Some(wl_keyboard) = &state.wl_keyboard {
wl_keyboard.release();
@@ -1294,7 +1294,7 @@ impl Dispatch<wl_keyboard::WlKeyboard, ()> for WaylandClientStatePtr {
match key_state {
wl_keyboard::KeyState::Pressed if !keysym.is_modifier_key() => {
let mut keystroke =
- Keystroke::from_xkb(&keymap_state, state.modifiers, keycode);
+ Keystroke::from_xkb(keymap_state, state.modifiers, keycode);
if let Some(mut compose) = state.compose_state.take() {
compose.feed(keysym);
match compose.status() {
@@ -1538,12 +1538,9 @@ impl Dispatch<wl_pointer::WlPointer, ()> for WaylandClientStatePtr {
cursor_shape_device.set_shape(serial, style.to_shape());
} else {
let scale = window.primary_output_scale();
- state.cursor.set_icon(
- &wl_pointer,
- serial,
- style.to_icon_names(),
- scale,
- );
+ state
+ .cursor
+ .set_icon(wl_pointer, serial, style.to_icon_names(), scale);
}
}
drop(state);
@@ -1580,7 +1577,7 @@ impl Dispatch<wl_pointer::WlPointer, ()> for WaylandClientStatePtr {
if state
.keyboard_focused_window
.as_ref()
- .map_or(false, |keyboard_window| window.ptr_eq(&keyboard_window))
+ .map_or(false, |keyboard_window| window.ptr_eq(keyboard_window))
{
state.enter_token = None;
}
@@ -144,7 +144,7 @@ impl Cursor {
hot_y as i32 / scale,
);
- self.surface.attach(Some(&buffer), 0, 0);
+ self.surface.attach(Some(buffer), 0, 0);
self.surface.damage(0, 0, width as i32, height as i32);
self.surface.commit();
}
@@ -1212,7 +1212,7 @@ impl X11Client {
state = self.0.borrow_mut();
if let Some(mut pointer) = state.pointer_device_states.get_mut(&event.sourceid) {
- let scroll_delta = get_scroll_delta_and_update_state(&mut pointer, &event);
+ let scroll_delta = get_scroll_delta_and_update_state(pointer, &event);
drop(state);
if let Some(scroll_delta) = scroll_delta {
window.handle_input(PlatformInput::ScrollWheel(make_scroll_wheel_event(
@@ -1271,7 +1271,7 @@ impl X11Client {
Event::XinputDeviceChanged(event) => {
let mut state = self.0.borrow_mut();
if let Some(mut pointer) = state.pointer_device_states.get_mut(&event.sourceid) {
- reset_pointer_device_scroll_positions(&mut pointer);
+ reset_pointer_device_scroll_positions(pointer);
}
}
_ => {}
@@ -2038,7 +2038,7 @@ fn xdnd_get_supported_atom(
{
if let Some(atoms) = reply.value32() {
for atom in atoms {
- if xdnd_is_atom_supported(atom, &supported_atoms) {
+ if xdnd_is_atom_supported(atom, supported_atoms) {
return atom;
}
}
@@ -73,8 +73,8 @@ pub(crate) fn get_valuator_axis_index(
// valuator present in this event's axisvalues. Axisvalues is ordered from
// lowest valuator number to highest, so counting bits before the 1 bit for
// this valuator yields the index in axisvalues.
- if bit_is_set_in_vec(&valuator_mask, valuator_number) {
- Some(popcount_upto_bit_index(&valuator_mask, valuator_number) as usize)
+ if bit_is_set_in_vec(valuator_mask, valuator_number) {
+ Some(popcount_upto_bit_index(valuator_mask, valuator_number) as usize)
} else {
None
}
@@ -397,7 +397,7 @@ impl X11WindowState {
.display_id
.map_or(x_main_screen_index, |did| did.0 as usize);
- let visual_set = find_visuals(&xcb, x_screen_index);
+ let visual_set = find_visuals(xcb, x_screen_index);
let visual = match visual_set.transparent {
Some(visual) => visual,
@@ -604,7 +604,7 @@ impl X11WindowState {
),
)?;
- xcb_flush(&xcb);
+ xcb_flush(xcb);
let renderer = {
let raw_window = RawWindow {
@@ -664,7 +664,7 @@ impl X11WindowState {
|| "X11 DestroyWindow failed while cleaning it up after setup failure.",
xcb.destroy_window(x_window),
)?;
- xcb_flush(&xcb);
+ xcb_flush(xcb);
}
setup_result
@@ -445,14 +445,14 @@ impl MetalRenderer {
instance_buffer,
&mut instance_offset,
viewport_size,
- &command_encoder,
+ command_encoder,
),
PrimitiveBatch::Quads(quads) => self.draw_quads(
quads,
instance_buffer,
&mut instance_offset,
viewport_size,
- &command_encoder,
+ command_encoder,
),
PrimitiveBatch::Paths(paths) => {
command_encoder.end_encoding();
@@ -480,7 +480,7 @@ impl MetalRenderer {
instance_buffer,
&mut instance_offset,
viewport_size,
- &command_encoder,
+ command_encoder,
)
} else {
false
@@ -491,7 +491,7 @@ impl MetalRenderer {
instance_buffer,
&mut instance_offset,
viewport_size,
- &command_encoder,
+ command_encoder,
),
PrimitiveBatch::MonochromeSprites {
texture_id,
@@ -502,7 +502,7 @@ impl MetalRenderer {
instance_buffer,
&mut instance_offset,
viewport_size,
- &command_encoder,
+ command_encoder,
),
PrimitiveBatch::PolychromeSprites {
texture_id,
@@ -513,14 +513,14 @@ impl MetalRenderer {
instance_buffer,
&mut instance_offset,
viewport_size,
- &command_encoder,
+ command_encoder,
),
PrimitiveBatch::Surfaces(surfaces) => self.draw_surfaces(
surfaces,
instance_buffer,
&mut instance_offset,
viewport_size,
- &command_encoder,
+ command_encoder,
),
};
if !ok {
@@ -763,7 +763,7 @@ impl MetalRenderer {
viewport_size: Size<DevicePixels>,
command_encoder: &metal::RenderCommandEncoderRef,
) -> bool {
- let Some(ref first_path) = paths.first() else {
+ let Some(first_path) = paths.first() else {
return true;
};
@@ -371,7 +371,7 @@ impl MacPlatform {
item = NSMenuItem::alloc(nil)
.initWithTitle_action_keyEquivalent_(
- ns_string(&name),
+ ns_string(name),
selector,
ns_string(key_to_native(&keystroke.key).as_ref()),
)
@@ -383,7 +383,7 @@ impl MacPlatform {
} else {
item = NSMenuItem::alloc(nil)
.initWithTitle_action_keyEquivalent_(
- ns_string(&name),
+ ns_string(name),
selector,
ns_string(""),
)
@@ -392,7 +392,7 @@ impl MacPlatform {
} else {
item = NSMenuItem::alloc(nil)
.initWithTitle_action_keyEquivalent_(
- ns_string(&name),
+ ns_string(name),
selector,
ns_string(""),
)
@@ -412,7 +412,7 @@ impl MacPlatform {
submenu.addItem_(Self::create_menu_item(item, delegate, actions, keymap));
}
item.setSubmenu_(submenu);
- item.setTitle_(ns_string(&name));
+ item.setTitle_(ns_string(name));
item
}
MenuItem::SystemMenu(OsMenu { name, menu_type }) => {
@@ -420,7 +420,7 @@ impl MacPlatform {
let submenu = NSMenu::new(nil).autorelease();
submenu.setDelegate_(delegate);
item.setSubmenu_(submenu);
- item.setTitle_(ns_string(&name));
+ item.setTitle_(ns_string(name));
match menu_type {
SystemMenuType::Services => {
@@ -1480,9 +1480,9 @@ extern "C" fn handle_key_event(this: &Object, native_event: id, key_equivalent:
if key_down_event.is_held {
if let Some(key_char) = key_down_event.keystroke.key_char.as_ref() {
- let handled = with_input_handler(&this, |input_handler| {
+ let handled = with_input_handler(this, |input_handler| {
if !input_handler.apple_press_and_hold_enabled() {
- input_handler.replace_text_in_range(None, &key_char);
+ input_handler.replace_text_in_range(None, key_char);
return YES;
}
NO
@@ -1949,7 +1949,7 @@ extern "C" fn insert_text(this: &Object, _: Sel, text: id, replacement_range: NS
let text = text.to_str();
let replacement_range = replacement_range.to_range();
with_input_handler(this, |input_handler| {
- input_handler.replace_text_in_range(replacement_range, &text)
+ input_handler.replace_text_in_range(replacement_range, text)
});
}
}
@@ -1973,7 +1973,7 @@ extern "C" fn set_marked_text(
let replacement_range = replacement_range.to_range();
let text = text.to_str();
with_input_handler(this, |input_handler| {
- input_handler.replace_and_mark_text_in_range(replacement_range, &text, selected_range)
+ input_handler.replace_and_mark_text_in_range(replacement_range, text, selected_range)
});
}
}
@@ -850,7 +850,7 @@ impl DirectWriteState {
}
let bitmap_data = if params.is_emoji {
- if let Ok(color) = self.rasterize_color(¶ms, glyph_bounds) {
+ if let Ok(color) = self.rasterize_color(params, glyph_bounds) {
color
} else {
let monochrome = self.rasterize_monochrome(params, glyph_bounds)?;
@@ -1784,7 +1784,7 @@ fn apply_font_features(
}
unsafe {
- direct_write_features.AddFontFeature(make_direct_write_feature(&tag, *value))?;
+ direct_write_features.AddFontFeature(make_direct_write_feature(tag, *value))?;
}
}
unsafe {
@@ -758,7 +758,7 @@ impl DirectXRenderPipelines {
impl DirectComposition {
pub fn new(dxgi_device: &IDXGIDevice, hwnd: HWND) -> Result<Self> {
- let comp_device = get_comp_device(&dxgi_device)?;
+ let comp_device = get_comp_device(dxgi_device)?;
let comp_target = unsafe { comp_device.CreateTargetForHwnd(hwnd, true) }?;
let comp_visual = unsafe { comp_device.CreateVisual() }?;
@@ -1144,7 +1144,7 @@ fn create_resources(
[D3D11_VIEWPORT; 1],
)> {
let (render_target, render_target_view) =
- create_render_target_and_its_view(&swap_chain, &devices.device)?;
+ create_render_target_and_its_view(swap_chain, &devices.device)?;
let (path_intermediate_texture, path_intermediate_srv) =
create_path_intermediate_texture(&devices.device, width, height)?;
let (path_intermediate_msaa_texture, path_intermediate_msaa_view) =
@@ -90,7 +90,7 @@ mod tests {
];
for handle in focus_handles.iter() {
- tab.insert(&handle);
+ tab.insert(handle);
}
assert_eq!(
tab.handles
@@ -73,7 +73,7 @@ impl Parse for Args {
(Meta::NameValue(meta), "seed") => {
seeds = vec![parse_usize_from_expr(&meta.value)? as u64]
}
- (Meta::List(list), "seeds") => seeds = parse_u64_array(&list)?,
+ (Meta::List(list), "seeds") => seeds = parse_u64_array(list)?,
(Meta::Path(_), _) => {
return Err(syn::Error::new(meta.span(), "invalid path argument"));
}
@@ -105,7 +105,7 @@ pub fn install_cli(window: &mut Window, cx: &mut Context<Workspace>) {
cx,
)
})?;
- register_zed_scheme(&cx).await.log_err();
+ register_zed_scheme(cx).await.log_err();
Ok(())
})
.detach_and_prompt_err("Error installing zed cli", window, cx, |_, _, _| None);
@@ -16,7 +16,7 @@ pub struct JujutsuStore {
impl JujutsuStore {
pub fn init_global(cx: &mut App) {
- let Some(repository) = RealJujutsuRepository::new(&Path::new(".")).ok() else {
+ let Some(repository) = RealJujutsuRepository::new(Path::new(".")).ok() else {
return;
};
@@ -716,7 +716,7 @@ impl EditPreview {
&self.applied_edits_snapshot,
&self.syntax_snapshot,
None,
- &syntax_theme,
+ syntax_theme,
);
}
@@ -727,7 +727,7 @@ impl EditPreview {
¤t_snapshot.text,
¤t_snapshot.syntax,
Some(deletion_highlight_style),
- &syntax_theme,
+ syntax_theme,
);
}
@@ -737,7 +737,7 @@ impl EditPreview {
&self.applied_edits_snapshot,
&self.syntax_snapshot,
Some(insertion_highlight_style),
- &syntax_theme,
+ syntax_theme,
);
}
@@ -749,7 +749,7 @@ impl EditPreview {
&self.applied_edits_snapshot,
&self.syntax_snapshot,
None,
- &syntax_theme,
+ syntax_theme,
);
highlighted_text.build()
@@ -1830,7 +1830,7 @@ impl Language {
impl LanguageScope {
pub fn path_suffixes(&self) -> &[String] {
- &self.language.path_suffixes()
+ self.language.path_suffixes()
}
pub fn language_name(&self) -> LanguageName {
@@ -1102,7 +1102,7 @@ impl LanguageRegistry {
use gpui::AppContext as _;
let mut state = self.state.write();
- let fake_entry = state.fake_server_entries.get_mut(&name)?;
+ let fake_entry = state.fake_server_entries.get_mut(name)?;
let (server, mut fake_server) = lsp::FakeLanguageServer::new(
server_id,
binary,
@@ -187,8 +187,8 @@ impl LanguageSettings {
let rest = available_language_servers
.iter()
.filter(|&available_language_server| {
- !disabled_language_servers.contains(&available_language_server)
- && !enabled_language_servers.contains(&available_language_server)
+ !disabled_language_servers.contains(available_language_server)
+ && !enabled_language_servers.contains(available_language_server)
})
.cloned()
.collect::<Vec<_>>();
@@ -1297,7 +1297,7 @@ fn parse_text(
) -> anyhow::Result<Tree> {
with_parser(|parser| {
let mut chunks = text.chunks_in_range(start_byte..text.len());
- parser.set_included_ranges(&ranges)?;
+ parser.set_included_ranges(ranges)?;
parser.set_language(&grammar.ts_language)?;
parser
.parse_with_options(
@@ -154,19 +154,19 @@ fn diff_internal(
input,
|old_tokens: Range<u32>, new_tokens: Range<u32>| {
old_offset += token_len(
- &input,
+ input,
&input.before[old_token_ix as usize..old_tokens.start as usize],
);
new_offset += token_len(
- &input,
+ input,
&input.after[new_token_ix as usize..new_tokens.start as usize],
);
let old_len = token_len(
- &input,
+ input,
&input.before[old_tokens.start as usize..old_tokens.end as usize],
);
let new_len = token_len(
- &input,
+ input,
&input.after[new_tokens.start as usize..new_tokens.end as usize],
);
let old_byte_range = old_offset..old_offset + old_len;
@@ -61,6 +61,6 @@ impl ExtensionLanguageProxy for LanguageServerRegistryProxy {
grammars_to_remove: &[Arc<str>],
) {
self.language_registry
- .remove_languages(&languages_to_remove, &grammars_to_remove);
+ .remove_languages(languages_to_remove, grammars_to_remove);
}
}
@@ -220,7 +220,7 @@ impl<'de> Deserialize<'de> for LanguageModelToolResultContent {
// Accept wrapped text format: { "type": "text", "text": "..." }
if let (Some(type_value), Some(text_value)) =
- (get_field(&obj, "type"), get_field(&obj, "text"))
+ (get_field(obj, "type"), get_field(obj, "text"))
{
if let Some(type_str) = type_value.as_str() {
if type_str.to_lowercase() == "text" {
@@ -255,7 +255,7 @@ impl<'de> Deserialize<'de> for LanguageModelToolResultContent {
}
// Try as direct Image (object with "source" and "size" fields)
- if let Some(image) = LanguageModelImage::from_json(&obj) {
+ if let Some(image) = LanguageModelImage::from_json(obj) {
return Ok(Self::Image(image));
}
}
@@ -272,7 +272,7 @@ impl<'de> Deserialize<'de> for LanguageModelToolResultContent {
impl LanguageModelToolResultContent {
pub fn to_str(&self) -> Option<&str> {
match self {
- Self::Text(text) => Some(&text),
+ Self::Text(text) => Some(text),
Self::Image(_) => None,
}
}
@@ -114,7 +114,7 @@ impl State {
.clone();
cx.spawn(async move |this, cx| {
credentials_provider
- .delete_credentials(&api_url, &cx)
+ .delete_credentials(&api_url, cx)
.await
.ok();
this.update(cx, |this, cx| {
@@ -133,7 +133,7 @@ impl State {
.clone();
cx.spawn(async move |this, cx| {
credentials_provider
- .write_credentials(&api_url, "Bearer", api_key.as_bytes(), &cx)
+ .write_credentials(&api_url, "Bearer", api_key.as_bytes(), cx)
.await
.ok();
@@ -212,7 +212,7 @@ impl AnthropicLanguageModelProvider {
} else {
cx.spawn(async move |cx| {
let (_, api_key) = credentials_provider
- .read_credentials(&api_url, &cx)
+ .read_credentials(&api_url, cx)
.await?
.ok_or(AuthenticateError::CredentialsNotFound)?;
@@ -150,7 +150,7 @@ impl State {
let credentials_provider = <dyn CredentialsProvider>::global(cx);
cx.spawn(async move |this, cx| {
credentials_provider
- .delete_credentials(AMAZON_AWS_URL, &cx)
+ .delete_credentials(AMAZON_AWS_URL, cx)
.await
.log_err();
this.update(cx, |this, cx| {
@@ -174,7 +174,7 @@ impl State {
AMAZON_AWS_URL,
"Bearer",
&serde_json::to_vec(&credentials)?,
- &cx,
+ cx,
)
.await?;
this.update(cx, |this, cx| {
@@ -206,7 +206,7 @@ impl State {
(credentials, true)
} else {
let (_, credentials) = credentials_provider
- .read_credentials(AMAZON_AWS_URL, &cx)
+ .read_credentials(AMAZON_AWS_URL, cx)
.await?
.ok_or_else(|| AuthenticateError::CredentialsNotFound)?;
(
@@ -465,7 +465,7 @@ impl BedrockModel {
Result<BoxStream<'static, Result<BedrockStreamingResponse, BedrockError>>>,
> {
let Ok(runtime_client) = self
- .get_or_init_client(&cx)
+ .get_or_init_client(cx)
.cloned()
.context("Bedrock client not initialized")
else {
@@ -193,7 +193,7 @@ impl State {
fn authenticate(&self, cx: &mut Context<Self>) -> Task<Result<()>> {
let client = self.client.clone();
cx.spawn(async move |state, cx| {
- client.sign_in_with_optional_connect(true, &cx).await?;
+ client.sign_in_with_optional_connect(true, cx).await?;
state.update(cx, |_, cx| cx.notify())
})
}
@@ -77,7 +77,7 @@ impl State {
.clone();
cx.spawn(async move |this, cx| {
credentials_provider
- .delete_credentials(&api_url, &cx)
+ .delete_credentials(&api_url, cx)
.await
.log_err();
this.update(cx, |this, cx| {
@@ -96,7 +96,7 @@ impl State {
.clone();
cx.spawn(async move |this, cx| {
credentials_provider
- .write_credentials(&api_url, "Bearer", api_key.as_bytes(), &cx)
+ .write_credentials(&api_url, "Bearer", api_key.as_bytes(), cx)
.await?;
this.update(cx, |this, cx| {
this.api_key = Some(api_key);
@@ -120,7 +120,7 @@ impl State {
(api_key, true)
} else {
let (_, api_key) = credentials_provider
- .read_credentials(&api_url, &cx)
+ .read_credentials(&api_url, cx)
.await?
.ok_or(AuthenticateError::CredentialsNotFound)?;
(
@@ -110,7 +110,7 @@ impl State {
.clone();
cx.spawn(async move |this, cx| {
credentials_provider
- .delete_credentials(&api_url, &cx)
+ .delete_credentials(&api_url, cx)
.await
.log_err();
this.update(cx, |this, cx| {
@@ -129,7 +129,7 @@ impl State {
.clone();
cx.spawn(async move |this, cx| {
credentials_provider
- .write_credentials(&api_url, "Bearer", api_key.as_bytes(), &cx)
+ .write_credentials(&api_url, "Bearer", api_key.as_bytes(), cx)
.await?;
this.update(cx, |this, cx| {
this.api_key = Some(api_key);
@@ -156,7 +156,7 @@ impl State {
(api_key, true)
} else {
let (_, api_key) = credentials_provider
- .read_credentials(&api_url, &cx)
+ .read_credentials(&api_url, cx)
.await?
.ok_or(AuthenticateError::CredentialsNotFound)?;
(
@@ -76,7 +76,7 @@ impl State {
.clone();
cx.spawn(async move |this, cx| {
credentials_provider
- .delete_credentials(&api_url, &cx)
+ .delete_credentials(&api_url, cx)
.await
.log_err();
this.update(cx, |this, cx| {
@@ -95,7 +95,7 @@ impl State {
.clone();
cx.spawn(async move |this, cx| {
credentials_provider
- .write_credentials(&api_url, "Bearer", api_key.as_bytes(), &cx)
+ .write_credentials(&api_url, "Bearer", api_key.as_bytes(), cx)
.await?;
this.update(cx, |this, cx| {
this.api_key = Some(api_key);
@@ -119,7 +119,7 @@ impl State {
(api_key, true)
} else {
let (_, api_key) = credentials_provider
- .read_credentials(&api_url, &cx)
+ .read_credentials(&api_url, cx)
.await?
.ok_or(AuthenticateError::CredentialsNotFound)?;
(
@@ -75,7 +75,7 @@ impl State {
.clone();
cx.spawn(async move |this, cx| {
credentials_provider
- .delete_credentials(&api_url, &cx)
+ .delete_credentials(&api_url, cx)
.await
.log_err();
this.update(cx, |this, cx| {
@@ -94,7 +94,7 @@ impl State {
.clone();
cx.spawn(async move |this, cx| {
credentials_provider
- .write_credentials(&api_url, "Bearer", api_key.as_bytes(), &cx)
+ .write_credentials(&api_url, "Bearer", api_key.as_bytes(), cx)
.await
.log_err();
this.update(cx, |this, cx| {
@@ -119,7 +119,7 @@ impl State {
(api_key, true)
} else {
let (_, api_key) = credentials_provider
- .read_credentials(&api_url, &cx)
+ .read_credentials(&api_url, cx)
.await?
.ok_or(AuthenticateError::CredentialsNotFound)?;
(
@@ -87,7 +87,7 @@ impl State {
let api_url = self.settings.api_url.clone();
cx.spawn(async move |this, cx| {
credentials_provider
- .delete_credentials(&api_url, &cx)
+ .delete_credentials(&api_url, cx)
.await
.log_err();
this.update(cx, |this, cx| {
@@ -103,7 +103,7 @@ impl State {
let api_url = self.settings.api_url.clone();
cx.spawn(async move |this, cx| {
credentials_provider
- .write_credentials(&api_url, "Bearer", api_key.as_bytes(), &cx)
+ .write_credentials(&api_url, "Bearer", api_key.as_bytes(), cx)
.await
.log_err();
this.update(cx, |this, cx| {
@@ -126,7 +126,7 @@ impl State {
(api_key, true)
} else {
let (_, api_key) = credentials_provider
- .read_credentials(&api_url, &cx)
+ .read_credentials(&api_url, cx)
.await?
.ok_or(AuthenticateError::CredentialsNotFound)?;
(
@@ -112,7 +112,7 @@ impl State {
.clone();
cx.spawn(async move |this, cx| {
credentials_provider
- .delete_credentials(&api_url, &cx)
+ .delete_credentials(&api_url, cx)
.await
.log_err();
this.update(cx, |this, cx| {
@@ -131,7 +131,7 @@ impl State {
.clone();
cx.spawn(async move |this, cx| {
credentials_provider
- .write_credentials(&api_url, "Bearer", api_key.as_bytes(), &cx)
+ .write_credentials(&api_url, "Bearer", api_key.as_bytes(), cx)
.await
.log_err();
this.update(cx, |this, cx| {
@@ -157,7 +157,7 @@ impl State {
(api_key, true)
} else {
let (_, api_key) = credentials_provider
- .read_credentials(&api_url, &cx)
+ .read_credentials(&api_url, cx)
.await?
.ok_or(AuthenticateError::CredentialsNotFound)?;
(
@@ -71,7 +71,7 @@ impl State {
};
cx.spawn(async move |this, cx| {
credentials_provider
- .delete_credentials(&api_url, &cx)
+ .delete_credentials(&api_url, cx)
.await
.log_err();
this.update(cx, |this, cx| {
@@ -92,7 +92,7 @@ impl State {
};
cx.spawn(async move |this, cx| {
credentials_provider
- .write_credentials(&api_url, "Bearer", api_key.as_bytes(), &cx)
+ .write_credentials(&api_url, "Bearer", api_key.as_bytes(), cx)
.await
.log_err();
this.update(cx, |this, cx| {
@@ -119,7 +119,7 @@ impl State {
(api_key, true)
} else {
let (_, api_key) = credentials_provider
- .read_credentials(&api_url, &cx)
+ .read_credentials(&api_url, cx)
.await?
.ok_or(AuthenticateError::CredentialsNotFound)?;
(
@@ -71,7 +71,7 @@ impl State {
};
cx.spawn(async move |this, cx| {
credentials_provider
- .delete_credentials(&api_url, &cx)
+ .delete_credentials(&api_url, cx)
.await
.log_err();
this.update(cx, |this, cx| {
@@ -92,7 +92,7 @@ impl State {
};
cx.spawn(async move |this, cx| {
credentials_provider
- .write_credentials(&api_url, "Bearer", api_key.as_bytes(), &cx)
+ .write_credentials(&api_url, "Bearer", api_key.as_bytes(), cx)
.await
.log_err();
this.update(cx, |this, cx| {
@@ -119,7 +119,7 @@ impl State {
(api_key, true)
} else {
let (_, api_key) = credentials_provider
- .read_credentials(&api_url, &cx)
+ .read_credentials(&api_url, cx)
.await?
.ok_or(AuthenticateError::CredentialsNotFound)?;
(
@@ -661,7 +661,7 @@ impl LogStore {
IoKind::StdOut => true,
IoKind::StdIn => false,
IoKind::StdErr => {
- self.add_language_server_log(language_server_id, MessageType::LOG, &message, cx);
+ self.add_language_server_log(language_server_id, MessageType::LOG, message, cx);
return Some(());
}
};
@@ -106,7 +106,7 @@ impl LspAdapter for CssLspAdapter {
.should_install_npm_package(
Self::PACKAGE_NAME,
&server_path,
- &container_dir,
+ container_dir,
VersionStrategy::Latest(version),
)
.await;
@@ -96,7 +96,7 @@ async fn stream_response_archive(
AssetKind::TarGz => extract_tar_gz(destination_path, url, response).await?,
AssetKind::Gz => extract_gz(destination_path, url, response).await?,
AssetKind::Zip => {
- util::archive::extract_zip(&destination_path, response).await?;
+ util::archive::extract_zip(destination_path, response).await?;
}
};
Ok(())
@@ -113,11 +113,11 @@ async fn stream_file_archive(
AssetKind::Gz => extract_gz(destination_path, url, file_archive).await?,
#[cfg(not(windows))]
AssetKind::Zip => {
- util::archive::extract_seekable_zip(&destination_path, file_archive).await?;
+ util::archive::extract_seekable_zip(destination_path, file_archive).await?;
}
#[cfg(windows)]
AssetKind::Zip => {
- util::archive::extract_zip(&destination_path, file_archive).await?;
+ util::archive::extract_zip(destination_path, file_archive).await?;
}
};
Ok(())
@@ -343,7 +343,7 @@ impl LspAdapter for JsonLspAdapter {
.should_install_npm_package(
Self::PACKAGE_NAME,
&server_path,
- &container_dir,
+ container_dir,
VersionStrategy::Latest(version),
)
.await;
@@ -204,7 +204,7 @@ impl LspAdapter for PythonLspAdapter {
.should_install_npm_package(
Self::SERVER_NAME.as_ref(),
&server_path,
- &container_dir,
+ container_dir,
VersionStrategy::Latest(version),
)
.await;
@@ -581,7 +581,7 @@ impl ContextProvider for RustContextProvider {
if let (Some(path), Some(stem)) = (&local_abs_path, task_variables.get(&VariableName::Stem))
{
- let fragment = test_fragment(&variables, &path, stem);
+ let fragment = test_fragment(&variables, path, stem);
variables.insert(RUST_TEST_FRAGMENT_TASK_VARIABLE, fragment);
};
if let Some(test_name) =
@@ -607,7 +607,7 @@ impl ContextProvider for RustContextProvider {
}
if let Some(path) = local_abs_path.as_ref()
&& let Some((target, manifest_path)) =
- target_info_from_abs_path(&path, project_env.as_ref()).await
+ target_info_from_abs_path(path, project_env.as_ref()).await
{
if let Some(target) = target {
variables.extend(TaskVariables::from_iter([
@@ -1570,7 +1570,7 @@ mod tests {
let found = test_fragment(
&TaskVariables::from_iter(variables.into_iter().map(|(k, v)| (k, v.to_owned()))),
path,
- &path.file_stem().unwrap().to_str().unwrap(),
+ path.file_stem().unwrap().to_str().unwrap(),
);
assert_eq!(expected, found);
}
@@ -111,7 +111,7 @@ impl LspAdapter for TailwindLspAdapter {
.should_install_npm_package(
Self::PACKAGE_NAME,
&server_path,
- &container_dir,
+ container_dir,
VersionStrategy::Latest(version),
)
.await;
@@ -587,7 +587,7 @@ impl LspAdapter for TypeScriptLspAdapter {
.should_install_npm_package(
Self::PACKAGE_NAME,
&server_path,
- &container_dir,
+ container_dir,
VersionStrategy::Latest(version.typescript_version.as_str()),
)
.await;
@@ -105,7 +105,7 @@ impl LspAdapter for YamlLspAdapter {
.should_install_npm_package(
Self::PACKAGE_NAME,
&server_path,
- &container_dir,
+ container_dir,
VersionStrategy::Latest(version),
)
.await;
@@ -421,7 +421,7 @@ impl TestServer {
track_sid: &TrackSid,
muted: bool,
) -> Result<()> {
- let claims = livekit_api::token::validate(&token, &self.secret_key)?;
+ let claims = livekit_api::token::validate(token, &self.secret_key)?;
let room_name = claims.video.room.unwrap();
let identity = ParticipantIdentity(claims.sub.unwrap().to_string());
let mut server_rooms = self.rooms.lock();
@@ -475,7 +475,7 @@ impl TestServer {
}
pub(crate) fn is_track_muted(&self, token: &str, track_sid: &TrackSid) -> Option<bool> {
- let claims = livekit_api::token::validate(&token, &self.secret_key).ok()?;
+ let claims = livekit_api::token::validate(token, &self.secret_key).ok()?;
let room_name = claims.video.room.unwrap();
let mut server_rooms = self.rooms.lock();
@@ -875,7 +875,7 @@ impl Element for MarkdownElement {
(CodeBlockRenderer::Custom { render, .. }, _) => {
let parent_container = render(
kind,
- &parsed_markdown,
+ parsed_markdown,
range.clone(),
metadata.clone(),
window,
@@ -247,7 +247,7 @@ pub fn parse_markdown(
events.push(event_for(
text,
range.source_range.start..range.source_range.start + prefix_len,
- &head,
+ head,
));
range.parsed = CowStr::Boxed(tail.into());
range.merged_range.start += prefix_len;
@@ -459,13 +459,13 @@ fn render_markdown_table(parsed: &ParsedMarkdownTable, cx: &mut RenderContext) -
let mut max_lengths: Vec<usize> = vec![0; parsed.header.children.len()];
for (index, cell) in parsed.header.children.iter().enumerate() {
- let length = paragraph_len(&cell);
+ let length = paragraph_len(cell);
max_lengths[index] = length;
}
for row in &parsed.body {
for (index, cell) in row.children.iter().enumerate() {
- let length = paragraph_len(&cell);
+ let length = paragraph_len(cell);
if length > max_lengths[index] {
max_lengths[index] = length;
@@ -37,7 +37,7 @@ fn migrate(text: &str, patterns: MigrationPatterns, query: &Query) -> Result<Opt
let mut edits = vec![];
while let Some(mat) = matches.next() {
if let Some((_, callback)) = patterns.get(mat.pattern_index) {
- edits.extend(callback(&text, &mat, query));
+ edits.extend(callback(text, mat, query));
}
}
@@ -170,7 +170,7 @@ pub fn migrate_settings(text: &str) -> Result<Option<String>> {
pub fn migrate_edit_prediction_provider_settings(text: &str) -> Result<Option<String>> {
migrate(
- &text,
+ text,
&[(
SETTINGS_NESTED_KEY_VALUE_PATTERN,
migrations::m_2025_01_29::replace_edit_prediction_provider_setting,
@@ -293,12 +293,12 @@ mod tests {
use super::*;
fn assert_migrate_keymap(input: &str, output: Option<&str>) {
- let migrated = migrate_keymap(&input).unwrap();
+ let migrated = migrate_keymap(input).unwrap();
pretty_assertions::assert_eq!(migrated.as_deref(), output);
}
fn assert_migrate_settings(input: &str, output: Option<&str>) {
- let migrated = migrate_settings(&input).unwrap();
+ let migrated = migrate_settings(input).unwrap();
pretty_assertions::assert_eq!(migrated.as_deref(), output);
}
@@ -145,7 +145,7 @@ impl Anchor {
.map(|diff| diff.base_text())
{
if a.buffer_id == Some(base_text.remote_id()) {
- return a.bias_right(&base_text);
+ return a.bias_right(base_text);
}
}
a
@@ -212,7 +212,7 @@ impl AnchorRangeExt for Range<Anchor> {
}
fn includes(&self, other: &Range<Anchor>, buffer: &MultiBufferSnapshot) -> bool {
- self.start.cmp(&other.start, &buffer).is_le() && other.end.cmp(&self.end, &buffer).is_le()
+ self.start.cmp(&other.start, buffer).is_le() && other.end.cmp(&self.end, buffer).is_le()
}
fn overlaps(&self, other: &Range<Anchor>, buffer: &MultiBufferSnapshot) -> bool {
@@ -1686,7 +1686,7 @@ impl MultiBuffer {
cx: &mut Context<Self>,
) -> (Vec<Range<Anchor>>, bool) {
let (excerpt_ids, added_a_new_excerpt) =
- self.update_path_excerpts(path, buffer, &buffer_snapshot, new, cx);
+ self.update_path_excerpts(path, buffer, buffer_snapshot, new, cx);
let mut result = Vec::new();
let mut ranges = ranges.into_iter();
@@ -1784,7 +1784,7 @@ impl MultiBuffer {
}
Some((
*existing_id,
- excerpt.range.context.to_point(&buffer_snapshot),
+ excerpt.range.context.to_point(buffer_snapshot),
))
} else {
None
@@ -3056,7 +3056,7 @@ impl MultiBuffer {
snapshot.has_conflict = has_conflict;
for (id, diff) in self.diffs.iter() {
- if snapshot.diffs.get(&id).is_none() {
+ if snapshot.diffs.get(id).is_none() {
snapshot.diffs.insert(*id, diff.diff.read(cx).snapshot(cx));
}
}
@@ -3177,7 +3177,7 @@ impl MultiBuffer {
&mut new_diff_transforms,
&mut end_of_current_insert,
&mut old_expanded_hunks,
- &snapshot,
+ snapshot,
change_kind,
);
@@ -3223,7 +3223,7 @@ impl MultiBuffer {
old_expanded_hunks.clear();
self.push_buffer_content_transform(
- &snapshot,
+ snapshot,
&mut new_diff_transforms,
excerpt_offset,
end_of_current_insert,
@@ -3916,8 +3916,8 @@ impl MultiBufferSnapshot {
&self,
range: Range<T>,
) -> Vec<(&BufferSnapshot, Range<usize>, ExcerptId)> {
- let start = range.start.to_offset(&self);
- let end = range.end.to_offset(&self);
+ let start = range.start.to_offset(self);
+ let end = range.end.to_offset(self);
let mut cursor = self.cursor::<usize>();
cursor.seek(&start);
@@ -3955,8 +3955,8 @@ impl MultiBufferSnapshot {
&self,
range: Range<T>,
) -> impl Iterator<Item = (&BufferSnapshot, Range<usize>, ExcerptId, Option<Anchor>)> + '_ {
- let start = range.start.to_offset(&self);
- let end = range.end.to_offset(&self);
+ let start = range.start.to_offset(self);
+ let end = range.end.to_offset(self);
let mut cursor = self.cursor::<usize>();
cursor.seek(&start);
@@ -4186,7 +4186,7 @@ impl MultiBufferSnapshot {
}
let start =
Anchor::in_buffer(excerpt.id, excerpt.buffer_id, hunk.buffer_range.start)
- .to_point(&self);
+ .to_point(self);
return Some(MultiBufferRow(start.row));
}
}
@@ -4204,7 +4204,7 @@ impl MultiBufferSnapshot {
continue;
};
let start = Anchor::in_buffer(excerpt.id, excerpt.buffer_id, hunk.buffer_range.start)
- .to_point(&self);
+ .to_point(self);
return Some(MultiBufferRow(start.row));
}
}
@@ -4455,7 +4455,7 @@ impl MultiBufferSnapshot {
let mut buffer_position = region.buffer_range.start;
buffer_position.add_assign(&overshoot);
let clipped_buffer_position =
- clip_buffer_position(®ion.buffer, buffer_position, bias);
+ clip_buffer_position(region.buffer, buffer_position, bias);
let mut position = region.range.start;
position.add_assign(&(clipped_buffer_position - region.buffer_range.start));
position
@@ -4485,7 +4485,7 @@ impl MultiBufferSnapshot {
let buffer_start_value = region.buffer_range.start.value.unwrap();
let mut buffer_key = buffer_start_key;
buffer_key.add_assign(&(key - start_key));
- let buffer_value = convert_buffer_dimension(®ion.buffer, buffer_key);
+ let buffer_value = convert_buffer_dimension(region.buffer, buffer_key);
let mut result = start_value;
result.add_assign(&(buffer_value - buffer_start_value));
result
@@ -4633,7 +4633,7 @@ impl MultiBufferSnapshot {
.as_str()
== **delimiter
{
- indent.push_str(&delimiter);
+ indent.push_str(delimiter);
break;
}
}
@@ -4897,8 +4897,8 @@ impl MultiBufferSnapshot {
if let Some(base_text) =
self.diffs.get(buffer_id).map(|diff| diff.base_text())
{
- if base_text.can_resolve(&diff_base_anchor) {
- let base_text_offset = diff_base_anchor.to_offset(&base_text);
+ if base_text.can_resolve(diff_base_anchor) {
+ let base_text_offset = diff_base_anchor.to_offset(base_text);
if base_text_offset >= base_text_byte_range.start
&& base_text_offset <= base_text_byte_range.end
{
@@ -6418,7 +6418,7 @@ impl MultiBufferSnapshot {
for (ix, entry) in excerpt_ids.iter().enumerate() {
if ix == 0 {
- if entry.id.cmp(&ExcerptId::min(), &self).is_le() {
+ if entry.id.cmp(&ExcerptId::min(), self).is_le() {
panic!("invalid first excerpt id {:?}", entry.id);
}
} else if entry.id <= excerpt_ids[ix - 1].id {
@@ -6648,7 +6648,7 @@ where
hunk_info,
..
} => {
- let diff = self.diffs.get(&buffer_id)?;
+ let diff = self.diffs.get(buffer_id)?;
let buffer = diff.base_text();
let mut rope_cursor = buffer.as_rope().cursor(0);
let buffer_start = rope_cursor.summary::<D>(base_text_byte_range.start);
@@ -7767,7 +7767,7 @@ impl<'a> Iterator for MultiBufferChunks<'a> {
}
chunks
} else {
- let base_buffer = &self.diffs.get(&buffer_id)?.base_text();
+ let base_buffer = &self.diffs.get(buffer_id)?.base_text();
base_buffer.chunks(base_text_start..base_text_end, self.language_aware)
};
@@ -473,7 +473,7 @@ fn test_editing_text_in_diff_hunks(cx: &mut TestAppContext) {
let base_text = "one\ntwo\nfour\nfive\nsix\nseven\n";
let text = "one\ntwo\nTHREE\nfour\nfive\nseven\n";
let buffer = cx.new(|cx| Buffer::local(text, cx));
- let diff = cx.new(|cx| BufferDiff::new_with_base_text(&base_text, &buffer, cx));
+ let diff = cx.new(|cx| BufferDiff::new_with_base_text(base_text, &buffer, cx));
let multibuffer = cx.new(|cx| MultiBuffer::singleton(buffer.clone(), cx));
let (mut snapshot, mut subscription) = multibuffer.update(cx, |multibuffer, cx| {
@@ -2265,14 +2265,14 @@ impl ReferenceMultibuffer {
}
if !excerpt.expanded_diff_hunks.iter().any(|expanded_anchor| {
- expanded_anchor.to_offset(&buffer).max(buffer_range.start)
+ expanded_anchor.to_offset(buffer).max(buffer_range.start)
== hunk_range.start.max(buffer_range.start)
}) {
log::trace!("skipping a hunk that's not marked as expanded");
continue;
}
- if !hunk.buffer_range.start.is_valid(&buffer) {
+ if !hunk.buffer_range.start.is_valid(buffer) {
log::trace!("skipping hunk with deleted start: {:?}", hunk.range);
continue;
}
@@ -2449,7 +2449,7 @@ impl ReferenceMultibuffer {
return false;
}
while let Some(hunk) = hunks.peek() {
- match hunk.buffer_range.start.cmp(&hunk_anchor, &buffer) {
+ match hunk.buffer_range.start.cmp(hunk_anchor, &buffer) {
cmp::Ordering::Less => {
hunks.next();
}
@@ -2519,8 +2519,8 @@ async fn test_random_set_ranges(cx: &mut TestAppContext, mut rng: StdRng) {
let mut seen_ranges = Vec::default();
for (_, buf, range) in snapshot.excerpts() {
- let start = range.context.start.to_point(&buf);
- let end = range.context.end.to_point(&buf);
+ let start = range.context.start.to_point(buf);
+ let end = range.context.end.to_point(buf);
seen_ranges.push(start..end);
if let Some(last_end) = last_end.take() {
@@ -2739,9 +2739,8 @@ async fn test_random_multibuffer(cx: &mut TestAppContext, mut rng: StdRng) {
let id = buffer_handle.read(cx).remote_id();
if multibuffer.diff_for(id).is_none() {
let base_text = base_texts.get(&id).unwrap();
- let diff = cx.new(|cx| {
- BufferDiff::new_with_base_text(base_text, &buffer_handle, cx)
- });
+ let diff = cx
+ .new(|cx| BufferDiff::new_with_base_text(base_text, buffer_handle, cx));
reference.add_diff(diff.clone(), cx);
multibuffer.add_diff(diff, cx)
}
@@ -3604,7 +3603,7 @@ fn assert_position_translation(snapshot: &MultiBufferSnapshot) {
offsets[ix - 1],
);
assert!(
- prev_anchor.cmp(&anchor, snapshot).is_lt(),
+ prev_anchor.cmp(anchor, snapshot).is_lt(),
"anchor({}, {bias:?}).cmp(&anchor({}, {bias:?}).is_lt()",
offsets[ix - 1],
offsets[ix],
@@ -126,17 +126,17 @@ impl<T> Default for TypedRow<T> {
impl<T> PartialOrd for TypedOffset<T> {
fn partial_cmp(&self, other: &Self) -> Option<std::cmp::Ordering> {
- Some(self.cmp(&other))
+ Some(self.cmp(other))
}
}
impl<T> PartialOrd for TypedPoint<T> {
fn partial_cmp(&self, other: &Self) -> Option<std::cmp::Ordering> {
- Some(self.cmp(&other))
+ Some(self.cmp(other))
}
}
impl<T> PartialOrd for TypedRow<T> {
fn partial_cmp(&self, other: &Self) -> Option<std::cmp::Ordering> {
- Some(self.cmp(&other))
+ Some(self.cmp(other))
}
}
@@ -494,7 +494,7 @@ impl Onboarding {
window
.spawn(cx, async move |cx| {
client
- .sign_in_with_optional_connect(true, &cx)
+ .sign_in_with_optional_connect(true, cx)
.await
.notify_async_err(cx);
})
@@ -104,7 +104,7 @@ impl<const COLS: usize> Section<COLS> {
self.entries
.iter()
.enumerate()
- .map(|(index, entry)| entry.render(index_offset + index, &focus, window, cx)),
+ .map(|(index, entry)| entry.render(index_offset + index, focus, window, cx)),
)
}
}
@@ -5498,7 +5498,7 @@ mod tests {
assert_eq!(
display_entries(
&project,
- &snapshot(&outline_panel, cx),
+ &snapshot(outline_panel, cx),
&outline_panel.cached_entries,
outline_panel.selected_entry(),
cx,
@@ -5514,7 +5514,7 @@ mod tests {
assert_eq!(
display_entries(
&project,
- &snapshot(&outline_panel, cx),
+ &snapshot(outline_panel, cx),
&outline_panel.cached_entries,
outline_panel.selected_entry(),
cx,
@@ -5532,7 +5532,7 @@ mod tests {
assert_eq!(
display_entries(
&project,
- &snapshot(&outline_panel, cx),
+ &snapshot(outline_panel, cx),
&outline_panel.cached_entries,
outline_panel.selected_entry(),
cx,
@@ -5569,7 +5569,7 @@ mod tests {
assert_eq!(
display_entries(
&project,
- &snapshot(&outline_panel, cx),
+ &snapshot(outline_panel, cx),
&outline_panel.cached_entries,
outline_panel.selected_entry(),
cx,
@@ -5583,7 +5583,7 @@ mod tests {
assert_eq!(
display_entries(
&project,
- &snapshot(&outline_panel, cx),
+ &snapshot(outline_panel, cx),
&outline_panel.cached_entries,
outline_panel.selected_entry(),
cx,
@@ -5602,7 +5602,7 @@ mod tests {
assert_eq!(
display_entries(
&project,
- &snapshot(&outline_panel, cx),
+ &snapshot(outline_panel, cx),
&outline_panel.cached_entries,
outline_panel.selected_entry(),
cx,
@@ -5630,7 +5630,7 @@ mod tests {
assert_eq!(
display_entries(
&project,
- &snapshot(&outline_panel, cx),
+ &snapshot(outline_panel, cx),
&outline_panel.cached_entries,
outline_panel.selected_entry(),
cx,
@@ -5718,7 +5718,7 @@ mod tests {
assert_eq!(
display_entries(
&project,
- &snapshot(&outline_panel, cx),
+ &snapshot(outline_panel, cx),
&outline_panel.cached_entries,
None,
cx,
@@ -5741,7 +5741,7 @@ mod tests {
assert_eq!(
display_entries(
&project,
- &snapshot(&outline_panel, cx),
+ &snapshot(outline_panel, cx),
&outline_panel.cached_entries,
None,
cx,
@@ -5767,7 +5767,7 @@ mod tests {
assert_eq!(
display_entries(
&project,
- &snapshot(&outline_panel, cx),
+ &snapshot(outline_panel, cx),
&outline_panel.cached_entries,
None,
cx,
@@ -5873,7 +5873,7 @@ mod tests {
assert_eq!(
display_entries(
&project,
- &snapshot(&outline_panel, cx),
+ &snapshot(outline_panel, cx),
&outline_panel.cached_entries,
outline_panel.selected_entry(),
cx,
@@ -5896,7 +5896,7 @@ mod tests {
assert_eq!(
display_entries(
&project,
- &snapshot(&outline_panel, cx),
+ &snapshot(outline_panel, cx),
&outline_panel.cached_entries,
outline_panel.selected_entry(),
cx,
@@ -5933,7 +5933,7 @@ mod tests {
assert_eq!(
display_entries(
&project,
- &snapshot(&outline_panel, cx),
+ &snapshot(outline_panel, cx),
&outline_panel.cached_entries,
outline_panel.selected_entry(),
cx,
@@ -5970,7 +5970,7 @@ mod tests {
assert_eq!(
display_entries(
&project,
- &snapshot(&outline_panel, cx),
+ &snapshot(outline_panel, cx),
&outline_panel.cached_entries,
outline_panel.selected_entry(),
cx,
@@ -6073,7 +6073,7 @@ mod tests {
assert_eq!(
display_entries(
&project,
- &snapshot(&outline_panel, cx),
+ &snapshot(outline_panel, cx),
&outline_panel.cached_entries,
outline_panel.selected_entry(),
cx,
@@ -6099,7 +6099,7 @@ mod tests {
assert_eq!(
display_entries(
&project,
- &snapshot(&outline_panel, cx),
+ &snapshot(outline_panel, cx),
&outline_panel.cached_entries,
outline_panel.selected_entry(),
cx,
@@ -6123,7 +6123,7 @@ mod tests {
assert_eq!(
display_entries(
&project,
- &snapshot(&outline_panel, cx),
+ &snapshot(outline_panel, cx),
&outline_panel.cached_entries,
outline_panel.selected_entry(),
cx,
@@ -6144,7 +6144,7 @@ mod tests {
assert_eq!(
display_entries(
&project,
- &snapshot(&outline_panel, cx),
+ &snapshot(outline_panel, cx),
&outline_panel.cached_entries,
outline_panel.selected_entry(),
cx,
@@ -6232,7 +6232,7 @@ struct OutlineEntryExcerpt {
assert_eq!(
display_entries(
&project,
- &snapshot(&outline_panel, cx),
+ &snapshot(outline_panel, cx),
&outline_panel.cached_entries,
outline_panel.selected_entry(),
cx,
@@ -6259,7 +6259,7 @@ outline: struct OutlineEntryExcerpt
assert_eq!(
display_entries(
&project,
- &snapshot(&outline_panel, cx),
+ &snapshot(outline_panel, cx),
&outline_panel.cached_entries,
outline_panel.selected_entry(),
cx,
@@ -6286,7 +6286,7 @@ outline: struct OutlineEntryExcerpt <==== selected
assert_eq!(
display_entries(
&project,
- &snapshot(&outline_panel, cx),
+ &snapshot(outline_panel, cx),
&outline_panel.cached_entries,
outline_panel.selected_entry(),
cx,
@@ -6313,7 +6313,7 @@ outline: struct OutlineEntryExcerpt
assert_eq!(
display_entries(
&project,
- &snapshot(&outline_panel, cx),
+ &snapshot(outline_panel, cx),
&outline_panel.cached_entries,
outline_panel.selected_entry(),
cx,
@@ -6340,7 +6340,7 @@ outline: struct OutlineEntryExcerpt
assert_eq!(
display_entries(
&project,
- &snapshot(&outline_panel, cx),
+ &snapshot(outline_panel, cx),
&outline_panel.cached_entries,
outline_panel.selected_entry(),
cx,
@@ -6367,7 +6367,7 @@ outline: struct OutlineEntryExcerpt
assert_eq!(
display_entries(
&project,
- &snapshot(&outline_panel, cx),
+ &snapshot(outline_panel, cx),
&outline_panel.cached_entries,
outline_panel.selected_entry(),
cx,
@@ -6394,7 +6394,7 @@ outline: struct OutlineEntryExcerpt <==== selected
assert_eq!(
display_entries(
&project,
- &snapshot(&outline_panel, cx),
+ &snapshot(outline_panel, cx),
&outline_panel.cached_entries,
outline_panel.selected_entry(),
cx,
@@ -6421,7 +6421,7 @@ outline: struct OutlineEntryExcerpt
assert_eq!(
display_entries(
&project,
- &snapshot(&outline_panel, cx),
+ &snapshot(outline_panel, cx),
&outline_panel.cached_entries,
outline_panel.selected_entry(),
cx,
@@ -6448,7 +6448,7 @@ outline: struct OutlineEntryExcerpt
assert_eq!(
display_entries(
&project,
- &snapshot(&outline_panel, cx),
+ &snapshot(outline_panel, cx),
&outline_panel.cached_entries,
outline_panel.selected_entry(),
cx,
@@ -6475,7 +6475,7 @@ outline: struct OutlineEntryExcerpt
assert_eq!(
display_entries(
&project,
- &snapshot(&outline_panel, cx),
+ &snapshot(outline_panel, cx),
&outline_panel.cached_entries,
outline_panel.selected_entry(),
cx,
@@ -6502,7 +6502,7 @@ outline: struct OutlineEntryExcerpt <==== selected
assert_eq!(
display_entries(
&project,
- &snapshot(&outline_panel, cx),
+ &snapshot(outline_panel, cx),
&outline_panel.cached_entries,
outline_panel.selected_entry(),
cx,
@@ -6608,7 +6608,7 @@ outline: struct OutlineEntryExcerpt
assert_eq!(
display_entries(
&project,
- &snapshot(&outline_panel, cx),
+ &snapshot(outline_panel, cx),
&outline_panel.cached_entries,
outline_panel.selected_entry(),
cx,
@@ -6645,7 +6645,7 @@ outline: struct OutlineEntryExcerpt
assert_eq!(
display_entries(
&project,
- &snapshot(&outline_panel, cx),
+ &snapshot(outline_panel, cx),
&outline_panel.cached_entries,
outline_panel.selected_entry(),
cx,
@@ -6673,7 +6673,7 @@ outline: struct OutlineEntryExcerpt
assert_eq!(
display_entries(
&project,
- &snapshot(&outline_panel, cx),
+ &snapshot(outline_panel, cx),
&outline_panel.cached_entries,
outline_panel.selected_entry(),
cx,
@@ -6705,7 +6705,7 @@ outline: struct OutlineEntryExcerpt
assert_eq!(
display_entries(
&project,
- &snapshot(&outline_panel, cx),
+ &snapshot(outline_panel, cx),
&outline_panel.cached_entries,
outline_panel.selected_entry(),
cx,
@@ -6736,7 +6736,7 @@ outline: struct OutlineEntryExcerpt
assert_eq!(
display_entries(
&project,
- &snapshot(&outline_panel, cx),
+ &snapshot(outline_panel, cx),
&outline_panel.cached_entries,
outline_panel.selected_entry(),
cx,
@@ -6864,7 +6864,7 @@ outline: struct OutlineEntryExcerpt
.render_data
.get_or_init(|| SearchData::new(
&search_entry.match_range,
- &multi_buffer_snapshot
+ multi_buffer_snapshot
))
.context_text
)
@@ -7255,7 +7255,7 @@ outline: struct OutlineEntryExcerpt
assert_eq!(
display_entries(
&project,
- &snapshot(&outline_panel, cx),
+ &snapshot(outline_panel, cx),
&outline_panel.cached_entries,
outline_panel.selected_entry(),
cx,
@@ -7314,7 +7314,7 @@ outline: fn main()"
assert_eq!(
display_entries(
&project,
- &snapshot(&outline_panel, cx),
+ &snapshot(outline_panel, cx),
&outline_panel.cached_entries,
outline_panel.selected_entry(),
cx,
@@ -7338,7 +7338,7 @@ outline: fn main()"
assert_eq!(
display_entries(
&project,
- &snapshot(&outline_panel, cx),
+ &snapshot(outline_panel, cx),
&outline_panel.cached_entries,
outline_panel.selected_entry(),
cx,
@@ -7403,7 +7403,7 @@ outline: fn main()"
assert_eq!(
display_entries(
&project,
- &snapshot(&outline_panel, cx),
+ &snapshot(outline_panel, cx),
&outline_panel.cached_entries,
outline_panel.selected_entry(),
cx,
@@ -7544,7 +7544,7 @@ outline: fn main()"
assert_eq!(
display_entries(
&project,
- &snapshot(&outline_panel, cx),
+ &snapshot(outline_panel, cx),
&outline_panel.cached_entries,
outline_panel.selected_entry(),
cx,
@@ -7582,7 +7582,7 @@ outline: fn main()"
assert_eq!(
display_entries(
&project,
- &snapshot(&outline_panel, cx),
+ &snapshot(outline_panel, cx),
&outline_panel.cached_entries,
outline_panel.selected_entry(),
cx,
@@ -7616,7 +7616,7 @@ outline: fn main()"
assert_eq!(
display_entries(
&project,
- &snapshot(&outline_panel, cx),
+ &snapshot(outline_panel, cx),
&outline_panel.cached_entries,
outline_panel.selected_entry(),
cx,
@@ -7648,7 +7648,7 @@ outline: fn main()"
assert_eq!(
display_entries(
&project,
- &snapshot(&outline_panel, cx),
+ &snapshot(outline_panel, cx),
&outline_panel.cached_entries,
outline_panel.selected_entry(),
cx,
@@ -368,7 +368,7 @@ impl ContextServerStore {
}
pub fn restart_server(&mut self, id: &ContextServerId, cx: &mut Context<Self>) -> Result<()> {
- if let Some(state) = self.servers.get(&id) {
+ if let Some(state) = self.servers.get(id) {
let configuration = state.configuration();
self.stop_server(&state.server().id(), cx)?;
@@ -397,7 +397,7 @@ impl ContextServerStore {
let server = server.clone();
let configuration = configuration.clone();
async move |this, cx| {
- match server.clone().start(&cx).await {
+ match server.clone().start(cx).await {
Ok(_) => {
log::info!("Started {} context server", id);
debug_assert!(server.client().is_some());
@@ -588,7 +588,7 @@ impl ContextServerStore {
for server_id in this.servers.keys() {
// All servers that are not in desired_servers should be removed from the store.
// This can happen if the user removed a server from the context server settings.
- if !configured_servers.contains_key(&server_id) {
+ if !configured_servers.contains_key(server_id) {
if disabled_servers.contains_key(&server_id.0) {
servers_to_stop.insert(server_id.clone());
} else {
@@ -317,8 +317,8 @@ impl BreakpointStore {
.iter()
.filter_map(|breakpoint| {
breakpoint.bp.bp.to_proto(
- &path,
- &breakpoint.position(),
+ path,
+ breakpoint.position(),
&breakpoint.session_state,
)
})
@@ -753,7 +753,7 @@ impl BreakpointStore {
.iter()
.map(|breakpoint| {
let position = snapshot
- .summary_for_anchor::<PointUtf16>(&breakpoint.position())
+ .summary_for_anchor::<PointUtf16>(breakpoint.position())
.row;
let breakpoint = &breakpoint.bp;
SourceBreakpoint {
@@ -215,7 +215,7 @@ impl DapStore {
dap_settings.and_then(|s| s.binary.as_ref().map(PathBuf::from));
let user_args = dap_settings.map(|s| s.args.clone());
- let delegate = self.delegate(&worktree, console, cx);
+ let delegate = self.delegate(worktree, console, cx);
let cwd: Arc<Path> = worktree.read(cx).abs_path().as_ref().into();
cx.spawn(async move |this, cx| {
@@ -902,7 +902,7 @@ impl dap::adapters::DapDelegate for DapAdapterDelegate {
}
fn worktree_root_path(&self) -> &Path {
- &self.worktree.abs_path()
+ self.worktree.abs_path()
}
fn http_client(&self) -> Arc<dyn HttpClient> {
self.http_client.clone()
@@ -187,12 +187,12 @@ impl DapLocator for CargoLocator {
.cloned();
}
let executable = {
- if let Some(ref name) = test_name.as_ref().and_then(|name| {
+ if let Some(name) = test_name.as_ref().and_then(|name| {
name.strip_prefix('$')
.map(|name| build_config.env.get(name))
.unwrap_or(Some(name))
}) {
- find_best_executable(&executables, &name).await
+ find_best_executable(&executables, name).await
} else {
None
}
@@ -1630,7 +1630,7 @@ impl Session {
+ 'static,
cx: &mut Context<Self>,
) -> Task<Option<T::Response>> {
- if !T::is_supported(&capabilities) {
+ if !T::is_supported(capabilities) {
log::warn!(
"Attempted to send a DAP request that isn't supported: {:?}",
request
@@ -1688,7 +1688,7 @@ impl Session {
self.requests
.entry((&*key.0 as &dyn Any).type_id())
.and_modify(|request_map| {
- request_map.remove(&key);
+ request_map.remove(key);
});
}
@@ -198,7 +198,7 @@ async fn load_directory_shell_environment(
);
};
- load_shell_environment(&dir, load_direnv).await
+ load_shell_environment(dir, load_direnv).await
}
Err(err) => (
None,
@@ -561,7 +561,7 @@ impl GitStore {
pub fn active_repository(&self) -> Option<Entity<Repository>> {
self.active_repo_id
.as_ref()
- .map(|id| self.repositories[&id].clone())
+ .map(|id| self.repositories[id].clone())
}
pub fn open_unstaged_diff(
@@ -1277,7 +1277,7 @@ impl GitStore {
) {
match event {
BufferStoreEvent::BufferAdded(buffer) => {
- cx.subscribe(&buffer, |this, buffer, event, cx| {
+ cx.subscribe(buffer, |this, buffer, event, cx| {
if let BufferEvent::LanguageChanged = event {
let buffer_id = buffer.read(cx).remote_id();
if let Some(diff_state) = this.diffs.get(&buffer_id) {
@@ -1295,7 +1295,7 @@ impl GitStore {
}
}
BufferStoreEvent::BufferDropped(buffer_id) => {
- self.diffs.remove(&buffer_id);
+ self.diffs.remove(buffer_id);
for diffs in self.shared_diffs.values_mut() {
diffs.remove(buffer_id);
}
@@ -1384,8 +1384,8 @@ impl GitStore {
repository.update(cx, |repository, cx| {
let repo_abs_path = &repository.work_directory_abs_path;
if changed_repos.iter().any(|update| {
- update.old_work_directory_abs_path.as_ref() == Some(&repo_abs_path)
- || update.new_work_directory_abs_path.as_ref() == Some(&repo_abs_path)
+ update.old_work_directory_abs_path.as_ref() == Some(repo_abs_path)
+ || update.new_work_directory_abs_path.as_ref() == Some(repo_abs_path)
}) {
repository.reload_buffer_diff_bases(cx);
}
@@ -1536,7 +1536,7 @@ impl GitStore {
});
if is_new {
this._subscriptions
- .push(cx.subscribe(&repo, Self::on_repository_event))
+ .push(cx.subscribe(repo, Self::on_repository_event))
}
repo.update(cx, {
@@ -2353,7 +2353,7 @@ impl GitStore {
// All paths prefixed by a given repo will constitute a continuous range.
while let Some(path) = entries.get(ix)
&& let Some(repo_path) =
- RepositorySnapshot::abs_path_to_repo_path_inner(&repo_path, &path)
+ RepositorySnapshot::abs_path_to_repo_path_inner(&repo_path, path)
{
paths.push((repo_path, ix));
ix += 1;
@@ -2875,14 +2875,14 @@ impl RepositorySnapshot {
}
pub fn had_conflict_on_last_merge_head_change(&self, repo_path: &RepoPath) -> bool {
- self.merge.conflicted_paths.contains(&repo_path)
+ self.merge.conflicted_paths.contains(repo_path)
}
pub fn has_conflict(&self, repo_path: &RepoPath) -> bool {
let had_conflict_on_last_merge_head_change =
- self.merge.conflicted_paths.contains(&repo_path);
+ self.merge.conflicted_paths.contains(repo_path);
let has_conflict_currently = self
- .status_for_path(&repo_path)
+ .status_for_path(repo_path)
.map_or(false, |entry| entry.status.is_conflicted());
had_conflict_on_last_merge_head_change || has_conflict_currently
}
@@ -211,7 +211,7 @@ impl Deref for GitEntryRef<'_> {
type Target = Entry;
fn deref(&self) -> &Self::Target {
- &self.entry
+ self.entry
}
}
@@ -224,7 +224,7 @@ impl ProjectItem for ImageItem {
path: &ProjectPath,
cx: &mut App,
) -> Option<Task<anyhow::Result<Entity<Self>>>> {
- if is_image_file(&project, &path, cx) {
+ if is_image_file(project, path, cx) {
Some(cx.spawn({
let path = path.clone();
let project = project.clone();
@@ -1165,7 +1165,7 @@ pub async fn location_link_from_lsp(
server_id: LanguageServerId,
cx: &mut AsyncApp,
) -> Result<LocationLink> {
- let (_, language_server) = language_server_for_buffer(&lsp_store, &buffer, server_id, cx)?;
+ let (_, language_server) = language_server_for_buffer(lsp_store, buffer, server_id, cx)?;
let (origin_range, target_uri, target_range) = (
link.origin_selection_range,
@@ -442,14 +442,14 @@ impl LocalLspStore {
match result {
Ok(server) => {
lsp_store
- .update(cx, |lsp_store, mut cx| {
+ .update(cx, |lsp_store, cx| {
lsp_store.insert_newly_running_language_server(
adapter,
server.clone(),
server_id,
key,
pending_workspace_folders,
- &mut cx,
+ cx,
);
})
.ok();
@@ -1927,7 +1927,7 @@ impl LocalLspStore {
if let Some(lsp_edits) = lsp_edits {
this.update(cx, |this, cx| {
this.as_local_mut().unwrap().edits_from_lsp(
- &buffer_handle,
+ buffer_handle,
lsp_edits,
language_server.server_id(),
None,
@@ -3115,7 +3115,7 @@ impl LocalLspStore {
let mut servers_to_remove = BTreeSet::default();
let mut servers_to_preserve = HashSet::default();
- for (seed, ref state) in &self.language_server_ids {
+ for (seed, state) in &self.language_server_ids {
if seed.worktree_id == id_to_remove {
servers_to_remove.insert(state.id);
} else {
@@ -3169,7 +3169,7 @@ impl LocalLspStore {
for watcher in watchers {
if let Some((worktree, literal_prefix, pattern)) =
- self.worktree_and_path_for_file_watcher(&worktrees, &watcher, cx)
+ self.worktree_and_path_for_file_watcher(&worktrees, watcher, cx)
{
worktree.update(cx, |worktree, _| {
if let Some((tree, glob)) =
@@ -4131,7 +4131,7 @@ impl LspStore {
local.registered_buffers.remove(&buffer_id);
local.buffers_opened_in_servers.remove(&buffer_id);
if let Some(file) = File::from_dyn(buffer.read(cx).file()).cloned() {
- local.unregister_old_buffer_from_language_servers(&buffer, &file, cx);
+ local.unregister_old_buffer_from_language_servers(buffer, &file, cx);
}
}
})
@@ -4453,7 +4453,7 @@ impl LspStore {
.contains(&server_status.name)
.then_some(server_id)
})
- .filter_map(|server_id| self.lsp_server_capabilities.get(&server_id))
+ .filter_map(|server_id| self.lsp_server_capabilities.get(server_id))
.any(check)
}
@@ -5419,7 +5419,7 @@ impl LspStore {
) -> Task<Result<Vec<LocationLink>>> {
if let Some((upstream_client, project_id)) = self.upstream_client() {
let request = GetTypeDefinitions { position };
- if !self.is_capable_for_proto_request(&buffer, &request, cx) {
+ if !self.is_capable_for_proto_request(buffer, &request, cx) {
return Task::ready(Ok(Vec::new()));
}
let request_task = upstream_client.request(proto::MultiLspQuery {
@@ -5573,7 +5573,7 @@ impl LspStore {
) -> Task<Result<Vec<Location>>> {
if let Some((upstream_client, project_id)) = self.upstream_client() {
let request = GetReferences { position };
- if !self.is_capable_for_proto_request(&buffer, &request, cx) {
+ if !self.is_capable_for_proto_request(buffer, &request, cx) {
return Task::ready(Ok(Vec::new()));
}
let request_task = upstream_client.request(proto::MultiLspQuery {
@@ -5755,7 +5755,7 @@ impl LspStore {
let lsp_data = self.lsp_code_lens.entry(buffer_id).or_default();
if let Some((updating_for, running_update)) = &lsp_data.update {
- if !version_queried_for.changed_since(&updating_for) {
+ if !version_queried_for.changed_since(updating_for) {
return running_update.clone();
}
}
@@ -6786,7 +6786,7 @@ impl LspStore {
let lsp_data = self.lsp_document_colors.entry(buffer_id).or_default();
if let Some((updating_for, running_update)) = &lsp_data.colors_update {
- if !version_queried_for.changed_since(&updating_for) {
+ if !version_queried_for.changed_since(updating_for) {
return Some(running_update.clone());
}
}
@@ -10057,7 +10057,7 @@ impl LspStore {
) -> Shared<Task<Option<HashMap<String, String>>>> {
if let Some(environment) = &self.as_local().map(|local| local.environment.clone()) {
environment.update(cx, |env, cx| {
- env.get_buffer_environment(&buffer, &self.worktree_store, cx)
+ env.get_buffer_environment(buffer, &self.worktree_store, cx)
})
} else {
Task::ready(None).shared()
@@ -11175,7 +11175,7 @@ impl LspStore {
let Some(local) = self.as_local() else { return };
local.prettier_store.update(cx, |prettier_store, cx| {
- prettier_store.update_prettier_settings(&worktree_handle, changes, cx)
+ prettier_store.update_prettier_settings(worktree_handle, changes, cx)
});
let worktree_id = worktree_handle.read(cx).id();
@@ -199,7 +199,7 @@ impl ManifestTree {
) {
match evt {
WorktreeStoreEvent::WorktreeRemoved(_, worktree_id) => {
- self.root_points.remove(&worktree_id);
+ self.root_points.remove(worktree_id);
}
_ => {}
}
@@ -192,7 +192,7 @@ impl LanguageServerTree {
)
});
languages.insert(language_name.clone());
- Arc::downgrade(&node).into()
+ Arc::downgrade(node).into()
})
}
@@ -245,7 +245,7 @@ impl LanguageServerTree {
if !settings.enable_language_server {
return Default::default();
}
- let available_lsp_adapters = self.languages.lsp_adapters(&language_name);
+ let available_lsp_adapters = self.languages.lsp_adapters(language_name);
let available_language_servers = available_lsp_adapters
.iter()
.map(|lsp_adapter| lsp_adapter.name.clone())
@@ -287,7 +287,7 @@ impl LanguageServerTree {
// (e.g., native vs extension) still end up in the right order at the end, rather than
// it being based on which language server happened to be loaded in first.
self.languages.reorder_language_servers(
- &language_name,
+ language_name,
adapters_with_settings
.values()
.map(|(_, adapter)| adapter.clone())
@@ -314,7 +314,7 @@ impl LanguageServerTree {
pub(crate) fn remove_nodes(&mut self, ids: &BTreeSet<LanguageServerId>) {
for (_, servers) in &mut self.instances {
for (_, nodes) in &mut servers.roots {
- nodes.retain(|_, (node, _)| node.id.get().map_or(true, |id| !ids.contains(&id)));
+ nodes.retain(|_, (node, _)| node.id.get().map_or(true, |id| !ids.contains(id)));
}
}
}
@@ -1848,7 +1848,7 @@ impl Project {
cx: &'a mut App,
) -> Shared<Task<Option<HashMap<String, String>>>> {
self.environment.update(cx, |environment, cx| {
- environment.get_buffer_environment(&buffer, &worktree_store, cx)
+ environment.get_buffer_environment(buffer, worktree_store, cx)
})
}
@@ -2592,7 +2592,7 @@ impl Project {
cx: &mut App,
) -> OpenLspBufferHandle {
self.lsp_store.update(cx, |lsp_store, cx| {
- lsp_store.register_buffer_with_language_servers(&buffer, HashSet::default(), false, cx)
+ lsp_store.register_buffer_with_language_servers(buffer, HashSet::default(), false, cx)
})
}
@@ -4167,15 +4167,14 @@ impl Project {
})
.collect();
- cx.spawn(async move |_, mut cx| {
+ cx.spawn(async move |_, cx| {
if let Some(buffer_worktree_id) = buffer_worktree_id {
if let Some((worktree, _)) = worktrees_with_ids
.iter()
.find(|(_, id)| *id == buffer_worktree_id)
{
for candidate in candidates.iter() {
- if let Some(path) =
- Self::resolve_path_in_worktree(&worktree, candidate, &mut cx)
+ if let Some(path) = Self::resolve_path_in_worktree(worktree, candidate, cx)
{
return Some(path);
}
@@ -4187,9 +4186,7 @@ impl Project {
continue;
}
for candidate in candidates.iter() {
- if let Some(path) =
- Self::resolve_path_in_worktree(&worktree, candidate, &mut cx)
- {
+ if let Some(path) = Self::resolve_path_in_worktree(&worktree, candidate, cx) {
return Some(path);
}
}
@@ -5329,7 +5326,7 @@ impl ResolvedPath {
pub fn project_path(&self) -> Option<&ProjectPath> {
match self {
- Self::ProjectPath { project_path, .. } => Some(&project_path),
+ Self::ProjectPath { project_path, .. } => Some(project_path),
_ => None,
}
}
@@ -5399,7 +5396,7 @@ impl Completion {
_ => None,
})
.unwrap_or(DEFAULT_KIND_KEY);
- (kind_key, &self.label.filter_text())
+ (kind_key, self.label.filter_text())
}
/// Whether this completion is a snippet.
@@ -1105,7 +1105,7 @@ impl SettingsObserver {
cx: &mut Context<Self>,
) -> Task<()> {
let mut user_tasks_file_rx =
- watch_config_file(&cx.background_executor(), fs, file_path.clone());
+ watch_config_file(cx.background_executor(), fs, file_path.clone());
let user_tasks_content = cx.background_executor().block(user_tasks_file_rx.next());
let weak_entry = cx.weak_entity();
cx.spawn(async move |settings_observer, cx| {
@@ -1160,7 +1160,7 @@ impl SettingsObserver {
cx: &mut Context<Self>,
) -> Task<()> {
let mut user_tasks_file_rx =
- watch_config_file(&cx.background_executor(), fs, file_path.clone());
+ watch_config_file(cx.background_executor(), fs, file_path.clone());
let user_tasks_content = cx.background_executor().block(user_tasks_file_rx.next());
let weak_entry = cx.weak_entity();
cx.spawn(async move |settings_observer, cx| {
@@ -333,7 +333,7 @@ impl Inventory {
for locator in locators.values() {
if let Some(scenario) = locator
- .create_scenario(&task.original_task(), &task.display_label(), &adapter)
+ .create_scenario(task.original_task(), task.display_label(), &adapter)
.await
{
scenarios.push((kind, scenario));
@@ -503,7 +503,7 @@ impl ProjectPanel {
if let Some((worktree, expanded_dir_ids)) = project
.read(cx)
.worktree_for_id(*worktree_id, cx)
- .zip(this.expanded_dir_ids.get_mut(&worktree_id))
+ .zip(this.expanded_dir_ids.get_mut(worktree_id))
{
let worktree = worktree.read(cx);
@@ -3043,7 +3043,7 @@ impl ProjectPanel {
if hide_root && Some(entry.entry) == worktree.read(cx).root_entry() {
if new_entry_parent_id == Some(entry.id) {
visible_worktree_entries.push(Self::create_new_git_entry(
- &entry.entry,
+ entry.entry,
entry.git_summary,
new_entry_kind,
));
@@ -3106,7 +3106,7 @@ impl ProjectPanel {
};
if precedes_new_entry && (!hide_gitignore || !entry.is_ignored) {
visible_worktree_entries.push(Self::create_new_git_entry(
- &entry.entry,
+ entry.entry,
entry.git_summary,
new_entry_kind,
));
@@ -3503,7 +3503,7 @@ impl ProjectPanel {
let base_index = ix + entry_range.start;
for (i, entry) in visible.entries[entry_range].iter().enumerate() {
let global_index = base_index + i;
- callback(&entry, global_index, entries, window, cx);
+ callback(entry, global_index, entries, window, cx);
}
ix = end_ix;
}
@@ -4669,7 +4669,7 @@ impl ProjectPanel {
};
let (depth, difference) =
- ProjectPanel::calculate_depth_and_difference(&entry, entries_paths);
+ ProjectPanel::calculate_depth_and_difference(entry, entries_paths);
let filename = match difference {
diff if diff > 1 => entry
@@ -191,7 +191,7 @@ impl PickerDelegate for ProjectSymbolsDelegate {
.iter()
.enumerate()
.map(|(id, symbol)| {
- StringMatchCandidate::new(id, &symbol.label.filter_text())
+ StringMatchCandidate::new(id, symbol.label.filter_text())
})
.partition(|candidate| {
project
@@ -1490,7 +1490,7 @@ impl RemoteServerProjects {
.track_focus(&self.focus_handle(cx))
.id("ssh-server-list")
.overflow_y_scroll()
- .track_scroll(&scroll_handle)
+ .track_scroll(scroll_handle)
.size_full()
.child(connect_button)
.child(
@@ -730,7 +730,7 @@ impl SshRemoteClient {
cx,
);
- let multiplex_task = Self::monitor(this.downgrade(), io_task, &cx);
+ let multiplex_task = Self::monitor(this.downgrade(), io_task, cx);
if let Err(error) = client.ping(HEARTBEAT_TIMEOUT).await {
log::error!("failed to establish connection: {}", error);
@@ -918,8 +918,8 @@ impl SshRemoteClient {
}
};
- let multiplex_task = Self::monitor(this.clone(), io_task, &cx);
- client.reconnect(incoming_rx, outgoing_tx, &cx);
+ let multiplex_task = Self::monitor(this.clone(), io_task, cx);
+ client.reconnect(incoming_rx, outgoing_tx, cx);
if let Err(error) = client.resync(HEARTBEAT_TIMEOUT).await {
failed!(error, attempts, ssh_connection, delegate);
@@ -1005,8 +1005,8 @@ impl SshRemoteClient {
if missed_heartbeats != 0 {
missed_heartbeats = 0;
- let _ =this.update(cx, |this, mut cx| {
- this.handle_heartbeat_result(missed_heartbeats, &mut cx)
+ let _ =this.update(cx, |this, cx| {
+ this.handle_heartbeat_result(missed_heartbeats, cx)
})?;
}
}
@@ -1036,8 +1036,8 @@ impl SshRemoteClient {
continue;
}
- let result = this.update(cx, |this, mut cx| {
- this.handle_heartbeat_result(missed_heartbeats, &mut cx)
+ let result = this.update(cx, |this, cx| {
+ this.handle_heartbeat_result(missed_heartbeats, cx)
})?;
if result.is_break() {
return Ok(());
@@ -1214,7 +1214,7 @@ impl SshRemoteClient {
.await
.unwrap();
- connection.simulate_disconnect(&cx);
+ connection.simulate_disconnect(cx);
})
}
@@ -1523,7 +1523,7 @@ impl RemoteConnection for SshRemoteConnection {
incoming_tx,
outgoing_rx,
connection_activity_tx,
- &cx,
+ cx,
)
}
@@ -1908,8 +1908,8 @@ impl SshRemoteConnection {
"-H",
"Content-Type: application/json",
"-d",
- &body,
- &url,
+ body,
+ url,
"-o",
&tmp_path_gz.to_string(),
],
@@ -1930,8 +1930,8 @@ impl SshRemoteConnection {
"--method=GET",
"--header=Content-Type: application/json",
"--body-data",
- &body,
- &url,
+ body,
+ url,
"-O",
&tmp_path_gz.to_string(),
],
@@ -1982,7 +1982,7 @@ impl SshRemoteConnection {
tmp_path_gz,
size / 1024
);
- self.upload_file(&src_path, &tmp_path_gz)
+ self.upload_file(src_path, tmp_path_gz)
.await
.context("failed to upload server binary")?;
log::info!("uploaded remote development server in {:?}", t0.elapsed());
@@ -2654,7 +2654,7 @@ mod fake {
let (outgoing_tx, _) = mpsc::unbounded::<Envelope>();
let (_, incoming_rx) = mpsc::unbounded::<Envelope>();
self.server_channel
- .reconnect(incoming_rx, outgoing_tx, &self.server_cx.get(&cx));
+ .reconnect(incoming_rx, outgoing_tx, &self.server_cx.get(cx));
}
fn start_proxy(
@@ -348,7 +348,7 @@ impl HeadlessProject {
.iter()
.map(|action| action.title.to_string())
.collect(),
- level: Some(prompt_to_proto(&prompt)),
+ level: Some(prompt_to_proto(prompt)),
lsp_name: prompt.lsp_name.clone(),
message: prompt.message.clone(),
});
@@ -388,7 +388,7 @@ impl HeadlessProject {
let parent = fs.canonicalize(parent).await.map_err(|_| {
anyhow!(
proto::ErrorCode::DevServerProjectPathDoesNotExist
- .with_tag("path", &path.to_string_lossy().as_ref())
+ .with_tag("path", path.to_string_lossy().as_ref())
)
})?;
parent.join(path.file_name().unwrap())
@@ -155,7 +155,7 @@ fn init_panic_hook(session_id: String) {
log::error!(
"panic occurred: {}\nBacktrace:\n{}",
&payload,
- (&backtrace).join("\n")
+ backtrace.join("\n")
);
let panic_data = telemetry_events::Panic {
@@ -796,11 +796,8 @@ fn initialize_settings(
fs: Arc<dyn Fs>,
cx: &mut App,
) -> watch::Receiver<Option<NodeBinaryOptions>> {
- let user_settings_file_rx = watch_config_file(
- &cx.background_executor(),
- fs,
- paths::settings_file().clone(),
- );
+ let user_settings_file_rx =
+ watch_config_file(cx.background_executor(), fs, paths::settings_file().clone());
handle_settings_file_changes(user_settings_file_rx, cx, {
let session = session.clone();
@@ -575,7 +575,7 @@ impl project::ProjectItem for NotebookItem {
.with_context(|| format!("finding the absolute path of {path:?}"))?;
// todo: watch for changes to the file
- let file_content = fs.load(&abs_path.as_path()).await?;
+ let file_content = fs.load(abs_path.as_path()).await?;
let notebook = nbformat::parse_notebook(&file_content);
let notebook = match notebook {
@@ -49,7 +49,7 @@ impl Chunk {
self.chars_utf16 |= slice.chars_utf16 << base_ix;
self.newlines |= slice.newlines << base_ix;
self.tabs |= slice.tabs << base_ix;
- self.text.push_str(&slice.text);
+ self.text.push_str(slice.text);
}
#[inline(always)]
@@ -623,7 +623,7 @@ mod tests {
let text = &text[..ix];
log::info!("Chunk: {:?}", text);
- let chunk = Chunk::new(&text);
+ let chunk = Chunk::new(text);
verify_chunk(chunk.as_slice(), text);
for _ in 0..10 {
@@ -142,7 +142,7 @@ impl SearchOption {
SearchSource::Buffer => {
let focus_handle = focus_handle.clone();
button.on_click(move |_: &ClickEvent, window, cx| {
- if !focus_handle.is_focused(&window) {
+ if !focus_handle.is_focused(window) {
window.focus(&focus_handle);
}
window.dispatch_action(action.boxed_clone(), cx);
@@ -26,7 +26,7 @@ pub(super) fn render_action_button(
.on_click({
let focus_handle = focus_handle.clone();
move |_, window, cx| {
- if !focus_handle.is_focused(&window) {
+ if !focus_handle.is_focused(window) {
window.focus(&focus_handle);
}
window.dispatch_action(action.boxed_clone(), cx)
@@ -324,7 +324,7 @@ impl SummaryIndex {
) -> Vec<(Arc<Path>, Option<MTime>)> {
let entry_db_key = db_key_for_path(&entry.path);
- match digest_db.get(&txn, &entry_db_key) {
+ match digest_db.get(txn, &entry_db_key) {
Ok(opt_saved_digest) => {
// The file path is the same, but the mtime is different. (Or there was no mtime.)
// It needs updating, so add it to the backlog! Then, if the backlog is full, drain it and summarize its contents.
@@ -575,7 +575,7 @@ impl SummaryIndex {
let code_len = code.len();
cx.spawn(async move |cx| {
- let stream = model.stream_completion(request, &cx);
+ let stream = model.stream_completion(request, cx);
cx.background_spawn(async move {
let answer: String = stream
.await?
@@ -358,11 +358,11 @@ impl KeymapFile {
let action_input = items[1].clone();
let action_input_string = action_input.to_string();
(
- cx.build_action(&name, Some(action_input)),
+ cx.build_action(name, Some(action_input)),
Some(action_input_string),
)
}
- Value::String(name) => (cx.build_action(&name, None), None),
+ Value::String(name) => (cx.build_action(name, None), None),
Value::Null => (Ok(NoAction.boxed_clone()), None),
_ => {
return Err(format!(
@@ -839,7 +839,7 @@ impl KeymapFile {
if &action.0 != target_action_value {
continue;
}
- return Some((index, &keystrokes_str));
+ return Some((index, keystrokes_str));
}
}
None
@@ -270,7 +270,7 @@ impl ConflictState {
for origin in indices.iter() {
conflicts[origin.index] =
- origin.get_conflict_with(if origin == fst { &snd } else { &fst })
+ origin.get_conflict_with(if origin == fst { snd } else { fst })
}
has_user_conflicts |= fst.override_source == KeybindSource::User
@@ -673,8 +673,8 @@ impl KeymapEditor {
action_name,
action_arguments,
&actions_with_schemas,
- &action_documentation,
- &humanized_action_names,
+ action_documentation,
+ humanized_action_names,
);
let index = processed_bindings.len();
@@ -696,8 +696,8 @@ impl KeymapEditor {
action_name,
None,
&actions_with_schemas,
- &action_documentation,
- &humanized_action_names,
+ action_documentation,
+ humanized_action_names,
);
let string_match_candidate =
StringMatchCandidate::new(index, &action_information.humanized_name);
@@ -2187,7 +2187,7 @@ impl KeybindingEditorModal {
})
.transpose()?;
- cx.build_action(&self.editing_keybind.action().name, value)
+ cx.build_action(self.editing_keybind.action().name, value)
.context("Failed to validate action arguments")?;
Ok(action_arguments)
}
@@ -2862,11 +2862,8 @@ impl CompletionProvider for KeyContextCompletionProvider {
break;
}
}
- let start_anchor = buffer.anchor_before(
- buffer_position
- .to_offset(&buffer)
- .saturating_sub(count_back),
- );
+ let start_anchor =
+ buffer.anchor_before(buffer_position.to_offset(buffer).saturating_sub(count_back));
let replace_range = start_anchor..buffer_position;
gpui::Task::ready(Ok(vec![project::CompletionResponse {
completions: self
@@ -2983,14 +2980,14 @@ async fn save_keybinding_update(
let target = settings::KeybindUpdateTarget {
context: existing_context,
keystrokes: existing_keystrokes,
- action_name: &existing.action().name,
+ action_name: existing.action().name,
action_arguments: existing_args,
};
let source = settings::KeybindUpdateTarget {
context: action_mapping.context.as_ref().map(|a| &***a),
keystrokes: &action_mapping.keystrokes,
- action_name: &existing.action().name,
+ action_name: existing.action().name,
action_arguments: new_args,
};
@@ -3044,7 +3041,7 @@ async fn remove_keybinding(
target: settings::KeybindUpdateTarget {
context: existing.context().and_then(KeybindContextString::local_str),
keystrokes,
- action_name: &existing.action().name,
+ action_name: existing.action().name,
action_arguments: existing
.action()
.arguments
@@ -343,7 +343,7 @@ impl TableInteractionState {
.on_any_mouse_down(|_, _, cx| {
cx.stop_propagation();
})
- .on_scroll_wheel(Self::listener(&this, |_, _, _, cx| {
+ .on_scroll_wheel(Self::listener(this, |_, _, _, cx| {
cx.notify();
}))
.children(Scrollbar::vertical(
@@ -303,10 +303,10 @@ impl LineDiff {
self.flush_insert(old_text);
self.buffered_insert.push_str(suffix);
} else {
- self.buffered_insert.push_str(&text);
+ self.buffered_insert.push_str(text);
}
} else {
- self.buffered_insert.push_str(&text);
+ self.buffered_insert.push_str(text);
if !text.ends_with('\n') {
self.flush_insert(old_text);
}
@@ -523,7 +523,7 @@ mod tests {
apply_line_operations(old_text, &new_text, &expected_line_ops)
);
- let line_ops = char_ops_to_line_ops(&old_text, &char_ops);
+ let line_ops = char_ops_to_line_ops(old_text, &char_ops);
assert_eq!(line_ops, expected_line_ops);
}
@@ -534,7 +534,7 @@ mod tests {
CharOperation::Keep { bytes: 5 },
CharOperation::Delete { bytes: 4 },
];
- let line_ops = char_ops_to_line_ops(&old_text, &char_ops);
+ let line_ops = char_ops_to_line_ops(old_text, &char_ops);
assert_eq!(
line_ops,
vec![
@@ -559,7 +559,7 @@ mod tests {
text: "\ncccc".into(),
},
];
- let line_ops = char_ops_to_line_ops(&old_text, &char_ops);
+ let line_ops = char_ops_to_line_ops(old_text, &char_ops);
assert_eq!(
line_ops,
vec![
@@ -582,7 +582,7 @@ mod tests {
CharOperation::Delete { bytes: 5 },
CharOperation::Keep { bytes: 4 },
];
- let line_ops = char_ops_to_line_ops(&old_text, &char_ops);
+ let line_ops = char_ops_to_line_ops(old_text, &char_ops);
assert_eq!(
line_ops,
vec![
@@ -609,7 +609,7 @@ mod tests {
},
CharOperation::Keep { bytes: 5 },
];
- let line_ops = char_ops_to_line_ops(&old_text, &char_ops);
+ let line_ops = char_ops_to_line_ops(old_text, &char_ops);
assert_eq!(
line_ops,
vec![
@@ -638,7 +638,7 @@ mod tests {
text: "\nEEEE".into(),
},
];
- let line_ops = char_ops_to_line_ops(&old_text, &char_ops);
+ let line_ops = char_ops_to_line_ops(old_text, &char_ops);
assert_eq!(
line_ops,
vec![
@@ -664,7 +664,7 @@ mod tests {
CharOperation::Insert { text: "A".into() },
CharOperation::Keep { bytes: 10 },
];
- let line_ops = char_ops_to_line_ops(&old_text, &char_ops);
+ let line_ops = char_ops_to_line_ops(old_text, &char_ops);
assert_eq!(
line_ops,
vec![
@@ -689,7 +689,7 @@ mod tests {
CharOperation::Keep { bytes: 4 },
];
let new_text = apply_char_operations(old_text, &char_ops);
- let line_ops = char_ops_to_line_ops(&old_text, &char_ops);
+ let line_ops = char_ops_to_line_ops(old_text, &char_ops);
assert_eq!(
line_ops,
vec![
@@ -710,7 +710,7 @@ mod tests {
CharOperation::Insert { text: "\n".into() },
CharOperation::Keep { bytes: 9 },
];
- let line_ops = char_ops_to_line_ops(&old_text, &char_ops);
+ let line_ops = char_ops_to_line_ops(old_text, &char_ops);
assert_eq!(
line_ops,
vec![
@@ -733,7 +733,7 @@ mod tests {
CharOperation::Delete { bytes: 1 },
CharOperation::Keep { bytes: 4 },
];
- let line_ops = char_ops_to_line_ops(&old_text, &char_ops);
+ let line_ops = char_ops_to_line_ops(old_text, &char_ops);
assert_eq!(
line_ops,
vec![
@@ -759,7 +759,7 @@ mod tests {
},
CharOperation::Keep { bytes: 4 },
];
- let line_ops = char_ops_to_line_ops(&old_text, &char_ops);
+ let line_ops = char_ops_to_line_ops(old_text, &char_ops);
assert_eq!(
line_ops,
vec![
@@ -783,7 +783,7 @@ mod tests {
CharOperation::Delete { bytes: 2 },
CharOperation::Keep { bytes: 4 },
];
- let line_ops = char_ops_to_line_ops(&old_text, &char_ops);
+ let line_ops = char_ops_to_line_ops(old_text, &char_ops);
assert_eq!(
line_ops,
vec![
@@ -814,7 +814,7 @@ mod tests {
},
CharOperation::Keep { bytes: 6 },
];
- let line_ops = char_ops_to_line_ops(&old_text, &char_ops);
+ let line_ops = char_ops_to_line_ops(old_text, &char_ops);
assert_eq!(
line_ops,
vec![
@@ -131,7 +131,7 @@ mod tests {
}
"#;
let parsed: VsCodeDebugTaskFile =
- serde_json_lenient::from_str(&raw).expect("deserializing launch.json");
+ serde_json_lenient::from_str(raw).expect("deserializing launch.json");
let zed = DebugTaskFile::try_from(parsed).expect("converting to Zed debug templates");
pretty_assertions::assert_eq!(
zed,
@@ -890,15 +890,15 @@ impl Terminal {
if self.vi_mode_enabled {
match *scroll {
AlacScroll::Delta(delta) => {
- term.vi_mode_cursor = term.vi_mode_cursor.scroll(&term, delta);
+ term.vi_mode_cursor = term.vi_mode_cursor.scroll(term, delta);
}
AlacScroll::PageUp => {
let lines = term.screen_lines() as i32;
- term.vi_mode_cursor = term.vi_mode_cursor.scroll(&term, lines);
+ term.vi_mode_cursor = term.vi_mode_cursor.scroll(term, lines);
}
AlacScroll::PageDown => {
let lines = -(term.screen_lines() as i32);
- term.vi_mode_cursor = term.vi_mode_cursor.scroll(&term, lines);
+ term.vi_mode_cursor = term.vi_mode_cursor.scroll(term, lines);
}
AlacScroll::Top => {
let point = AlacPoint::new(term.topmost_line(), Column(0));
@@ -346,7 +346,7 @@ impl TerminalPanel {
pane::Event::RemovedItem { .. } => self.serialize(cx),
pane::Event::Remove { focus_on_pane } => {
let pane_count_before_removal = self.center.panes().len();
- let _removal_result = self.center.remove(&pane);
+ let _removal_result = self.center.remove(pane);
if pane_count_before_removal == 1 {
self.center.first_pane().update(cx, |pane, cx| {
pane.set_zoomed(false, cx);
@@ -1181,10 +1181,10 @@ impl Render for TerminalPanel {
registrar.size_full().child(self.center.render(
workspace.zoomed_item(),
&workspace::PaneRenderContext {
- follower_states: &&HashMap::default(),
+ follower_states: &HashMap::default(),
active_call: workspace.active_call(),
active_pane: &self.active_pane,
- app_state: &workspace.app_state(),
+ app_state: workspace.app_state(),
project: workspace.project(),
workspace: &workspace.weak_handle(),
},
@@ -1604,15 +1604,15 @@ impl Item for TerminalView {
TaskStatus::Running => (
IconName::PlayFilled,
Color::Disabled,
- TerminalView::rerun_button(&terminal_task),
+ TerminalView::rerun_button(terminal_task),
),
TaskStatus::Unknown => (
IconName::Warning,
Color::Warning,
- TerminalView::rerun_button(&terminal_task),
+ TerminalView::rerun_button(terminal_task),
),
TaskStatus::Completed { success } => {
- let rerun_button = TerminalView::rerun_button(&terminal_task);
+ let rerun_button = TerminalView::rerun_button(terminal_task);
if *success {
(IconName::Check, Color::Success, rerun_button)
@@ -478,7 +478,7 @@ impl TitleBar {
repo.branch
.as_ref()
.map(|branch| branch.name())
- .map(|name| util::truncate_and_trailoff(&name, MAX_BRANCH_NAME_LENGTH))
+ .map(|name| util::truncate_and_trailoff(name, MAX_BRANCH_NAME_LENGTH))
.or_else(|| {
repo.head_commit.as_ref().map(|commit| {
commit
@@ -617,7 +617,7 @@ impl TitleBar {
window
.spawn(cx, async move |cx| {
client
- .sign_in_with_optional_connect(true, &cx)
+ .sign_in_with_optional_connect(true, cx)
.await
.notify_async_err(cx);
})
@@ -216,7 +216,7 @@ mod uniform_list {
};
let visible_entries = &compute_indents_fn(visible_range.clone(), window, cx);
let indent_guides = compute_indent_guides(
- &visible_entries,
+ visible_entries,
visible_range.start,
includes_trailing_indent,
);
@@ -241,7 +241,7 @@ mod sticky_items {
window: &mut Window,
cx: &mut App,
) -> AnyElement {
- let indent_guides = compute_indent_guides(&indents, 0, false);
+ let indent_guides = compute_indent_guides(indents, 0, false);
self.render_from_layout(indent_guides, bounds, item_height, window, cx)
}
}
@@ -163,7 +163,7 @@ pub fn render_keystroke(
let size = size.into();
if use_text {
- let element = Key::new(keystroke_text(&keystroke, platform_style, vim_mode), color)
+ let element = Key::new(keystroke_text(keystroke, platform_style, vim_mode), color)
.size(size)
.into_any_element();
vec![element]
@@ -176,7 +176,7 @@ pub fn render_keystroke(
size,
true,
));
- elements.push(render_key(&keystroke, color, platform_style, size));
+ elements.push(render_key(keystroke, color, platform_style, size));
elements
}
}
@@ -95,7 +95,7 @@ impl VimOption {
}
}
- Self::possibilities(&prefix)
+ Self::possibilities(prefix)
.map(|possible| {
let mut options = prefix_of_options.clone();
options.push(possible);
@@ -2280,8 +2280,8 @@ fn go_to_line(map: &DisplaySnapshot, display_point: DisplayPoint, line: usize) -
}
let mut last_position = None;
for (excerpt, buffer, range) in map.buffer_snapshot.excerpts() {
- let excerpt_range = language::ToOffset::to_offset(&range.context.start, &buffer)
- ..language::ToOffset::to_offset(&range.context.end, &buffer);
+ let excerpt_range = language::ToOffset::to_offset(&range.context.start, buffer)
+ ..language::ToOffset::to_offset(&range.context.end, buffer);
if offset >= excerpt_range.start && offset <= excerpt_range.end {
let text_anchor = buffer.anchor_after(offset);
let anchor = Anchor::in_buffer(excerpt, buffer.remote_id(), text_anchor);
@@ -2882,7 +2882,7 @@ fn method_motion(
} else {
possibilities.min().unwrap_or(offset)
};
- let new_point = map.clip_point(dest.to_display_point(&map), Bias::Left);
+ let new_point = map.clip_point(dest.to_display_point(map), Bias::Left);
if new_point == display_point {
break;
}
@@ -2936,7 +2936,7 @@ fn comment_motion(
} else {
possibilities.min().unwrap_or(offset)
};
- let new_point = map.clip_point(dest.to_display_point(&map), Bias::Left);
+ let new_point = map.clip_point(dest.to_display_point(map), Bias::Left);
if new_point == display_point {
break;
}
@@ -3003,7 +3003,7 @@ fn section_motion(
possibilities.min().unwrap_or(map.buffer_snapshot.len())
};
- let new_point = map.clip_point(offset.to_display_point(&map), Bias::Left);
+ let new_point = map.clip_point(offset.to_display_point(map), Bias::Left);
if new_point == display_point {
break;
}
@@ -155,7 +155,7 @@ fn increment_decimal_string(num: &str, delta: i64) -> String {
}
fn increment_hex_string(num: &str, delta: i64) -> String {
- let result = if let Ok(val) = u64::from_str_radix(&num, 16) {
+ let result = if let Ok(val) = u64::from_str_radix(num, 16) {
val.wrapping_add_signed(delta)
} else {
u64::MAX
@@ -181,7 +181,7 @@ fn should_use_lowercase(num: &str) -> bool {
}
fn increment_binary_string(num: &str, delta: i64) -> String {
- let result = if let Ok(val) = u64::from_str_radix(&num, 2) {
+ let result = if let Ok(val) = u64::from_str_radix(num, 2) {
val.wrapping_add_signed(delta)
} else {
u64::MAX
@@ -549,7 +549,7 @@ mod test {
cx.set_neovim_option("nowrap").await;
let content = "ˇ01234567890123456789";
- cx.set_shared_state(&content).await;
+ cx.set_shared_state(content).await;
cx.simulate_shared_keystrokes("z shift-l").await;
cx.shared_state().await.assert_eq("012345ˇ67890123456789");
@@ -560,7 +560,7 @@ mod test {
cx.shared_state().await.assert_eq("012345ˇ67890123456789");
let content = "ˇ01234567890123456789";
- cx.set_shared_state(&content).await;
+ cx.set_shared_state(content).await;
cx.simulate_shared_keystrokes("z l").await;
cx.shared_state().await.assert_eq("0ˇ1234567890123456789");
@@ -540,7 +540,7 @@ impl MarksState {
cx: &mut Context<Self>,
) {
let buffer = multibuffer.read(cx).as_singleton();
- let abs_path = buffer.as_ref().and_then(|b| self.path_for_buffer(&b, cx));
+ let abs_path = buffer.as_ref().and_then(|b| self.path_for_buffer(b, cx));
let Some(abs_path) = abs_path else {
self.multibuffer_marks
@@ -606,7 +606,7 @@ impl MarksState {
match target? {
MarkLocation::Buffer(entity_id) => {
- let anchors = self.multibuffer_marks.get(&entity_id)?;
+ let anchors = self.multibuffer_marks.get(entity_id)?;
return Some(Mark::Buffer(*entity_id, anchors.get(name)?.clone()));
}
MarkLocation::Path(path) => {
@@ -636,7 +636,7 @@ impl MarksState {
match target {
MarkLocation::Buffer(entity_id) => {
self.multibuffer_marks
- .get_mut(&entity_id)
+ .get_mut(entity_id)
.map(|m| m.remove(&mark_name.clone()));
return;
}
@@ -1042,7 +1042,7 @@ impl Operator {
} => format!("^K{}", make_visible(&first_char.to_string())),
Operator::Literal {
prefix: Some(prefix),
- } => format!("^V{}", make_visible(&prefix)),
+ } => format!("^V{}", make_visible(prefix)),
Operator::AutoIndent => "=".to_string(),
Operator::ShellCommand => "=".to_string(),
_ => self.id().to_string(),
@@ -67,7 +67,7 @@ impl NeovimConnection {
// Ensure we don't create neovim connections in parallel
let _lock = NEOVIM_LOCK.lock();
let (nvim, join_handle, child) = new_child_cmd(
- &mut Command::new("nvim")
+ Command::new("nvim")
.arg("--embed")
.arg("--clean")
// disable swap (otherwise after about 1000 test runs you run out of swap file names)
@@ -161,7 +161,7 @@ impl NeovimConnection {
#[cfg(feature = "neovim")]
pub async fn set_state(&mut self, marked_text: &str) {
- let (text, selections) = parse_state(&marked_text);
+ let (text, selections) = parse_state(marked_text);
let nvim_buffer = self
.nvim
@@ -265,7 +265,7 @@ pub fn init(cx: &mut App) {
workspace.register_action(|workspace, _: &MaximizePane, window, cx| {
let pane = workspace.active_pane();
- let Some(size) = workspace.bounding_box_for_pane(&pane) else {
+ let Some(size) = workspace.bounding_box_for_pane(pane) else {
return;
};
@@ -1599,7 +1599,7 @@ impl Vim {
second_char,
smartcase: VimSettings::get_global(cx).use_smartcase_find,
};
- Vim::globals(cx).last_find = Some((&sneak).clone());
+ Vim::globals(cx).last_find = Some(sneak.clone());
self.motion(sneak, window, cx)
}
} else {
@@ -1616,7 +1616,7 @@ impl Vim {
second_char,
smartcase: VimSettings::get_global(cx).use_smartcase_find,
};
- Vim::globals(cx).last_find = Some((&sneak).clone());
+ Vim::globals(cx).last_find = Some(sneak.clone());
self.motion(sneak, window, cx)
}
} else {
@@ -414,7 +414,7 @@ impl Vim {
);
}
- let original_point = selection.tail().to_point(&map);
+ let original_point = selection.tail().to_point(map);
if let Some(range) = object.range(map, mut_selection, around, count) {
if !range.is_empty() {
@@ -1038,7 +1038,7 @@ where
{
fn detach_and_notify_err(self, window: &mut Window, cx: &mut App) {
window
- .spawn(cx, async move |mut cx| self.await.notify_async_err(&mut cx))
+ .spawn(cx, async move |cx| self.await.notify_async_err(cx))
.detach();
}
}
@@ -1627,8 +1627,7 @@ impl Pane {
items_to_close
.iter()
.filter(|item| {
- item.is_dirty(cx)
- && !Self::skip_save_on_close(item.as_ref(), &workspace, cx)
+ item.is_dirty(cx) && !Self::skip_save_on_close(item.as_ref(), workspace, cx)
})
.map(|item| item.boxed_clone())
.collect::<Vec<_>>()
@@ -1657,7 +1656,7 @@ impl Pane {
let mut should_save = true;
if save_intent == SaveIntent::Close {
workspace.update(cx, |workspace, cx| {
- if Self::skip_save_on_close(item_to_close.as_ref(), &workspace, cx) {
+ if Self::skip_save_on_close(item_to_close.as_ref(), workspace, cx) {
should_save = false;
}
})?;
@@ -647,7 +647,7 @@ impl ProjectItemRegistry {
.build_project_item_for_path_fns
.iter()
.rev()
- .find_map(|open_project_item| open_project_item(&project, &path, window, cx))
+ .find_map(|open_project_item| open_project_item(project, path, window, cx))
else {
return Task::ready(Err(anyhow!("cannot open file {:?}", path.path)));
};
@@ -2431,7 +2431,7 @@ impl Workspace {
);
window.prompt(
PromptLevel::Warning,
- &"Do you want to save all changes in the following files?",
+ "Do you want to save all changes in the following files?",
Some(&detail),
&["Save all", "Discard all", "Cancel"],
cx,
@@ -2767,9 +2767,9 @@ impl Workspace {
let item = pane.read(cx).active_item();
let pane = pane.downgrade();
- window.spawn(cx, async move |mut cx| {
+ window.spawn(cx, async move |cx| {
if let Some(item) = item {
- Pane::save_item(project, &pane, item.as_ref(), save_intent, &mut cx)
+ Pane::save_item(project, &pane, item.as_ref(), save_intent, cx)
.await
.map(|_| ())
} else {
@@ -3889,14 +3889,14 @@ impl Workspace {
pane.track_alternate_file_items();
});
if *local {
- self.unfollow_in_pane(&pane, window, cx);
+ self.unfollow_in_pane(pane, window, cx);
}
serialize_workspace = *focus_changed || pane != self.active_pane();
if pane == self.active_pane() {
self.active_item_path_changed(window, cx);
self.update_active_view_for_followers(window, cx);
} else if *local {
- self.set_active_pane(&pane, window, cx);
+ self.set_active_pane(pane, window, cx);
}
}
pane::Event::UserSavedItem { item, save_intent } => {
@@ -7182,9 +7182,9 @@ pub fn open_paths(
.collect::<Vec<_>>();
cx.update(|cx| {
- for window in local_workspace_windows(&cx) {
- if let Ok(workspace) = window.read(&cx) {
- let m = workspace.project.read(&cx).visibility_for_paths(
+ for window in local_workspace_windows(cx) {
+ if let Ok(workspace) = window.read(cx) {
+ let m = workspace.project.read(cx).visibility_for_paths(
&abs_paths,
&all_metadatas,
open_options.open_new_workspace == None,
@@ -7341,7 +7341,7 @@ pub fn open_ssh_project_with_new_connection(
) -> Task<Result<()>> {
cx.spawn(async move |cx| {
let (serialized_ssh_project, workspace_id, serialized_workspace) =
- serialize_ssh_project(connection_options.clone(), paths.clone(), &cx).await?;
+ serialize_ssh_project(connection_options.clone(), paths.clone(), cx).await?;
let session = match cx
.update(|cx| {
@@ -7395,7 +7395,7 @@ pub fn open_ssh_project_with_existing_connection(
) -> Task<Result<()>> {
cx.spawn(async move |cx| {
let (serialized_ssh_project, workspace_id, serialized_workspace) =
- serialize_ssh_project(connection_options.clone(), paths.clone(), &cx).await?;
+ serialize_ssh_project(connection_options.clone(), paths.clone(), cx).await?;
open_ssh_project_inner(
project,
@@ -3199,7 +3199,7 @@ impl BackgroundScannerState {
}
async fn is_git_dir(path: &Path, fs: &dyn Fs) -> bool {
- if path.file_name() == Some(&*DOT_GIT) {
+ if path.file_name() == Some(*DOT_GIT) {
return true;
}
@@ -3575,7 +3575,7 @@ impl<'a>
cursor_location: &Dimensions<TraversalProgress<'a>, GitSummary>,
_: &(),
) -> Ordering {
- self.cmp_path(&cursor_location.0.max_path)
+ self.cmp_path(cursor_location.0.max_path)
}
}
@@ -5364,13 +5364,13 @@ impl PathTarget<'_> {
impl<'a, S: Summary> SeekTarget<'a, PathSummary<S>, PathProgress<'a>> for PathTarget<'_> {
fn cmp(&self, cursor_location: &PathProgress<'a>, _: &S::Context) -> Ordering {
- self.cmp_path(&cursor_location.max_path)
+ self.cmp_path(cursor_location.max_path)
}
}
impl<'a, S: Summary> SeekTarget<'a, PathSummary<S>, TraversalProgress<'a>> for PathTarget<'_> {
fn cmp(&self, cursor_location: &TraversalProgress<'a>, _: &S::Context) -> Ordering {
- self.cmp_path(&cursor_location.max_path)
+ self.cmp_path(cursor_location.max_path)
}
}
@@ -5396,7 +5396,7 @@ impl<'a> TraversalTarget<'a> {
fn cmp_progress(&self, progress: &TraversalProgress) -> Ordering {
match self {
- TraversalTarget::Path(path) => path.cmp_path(&progress.max_path),
+ TraversalTarget::Path(path) => path.cmp_path(progress.max_path),
TraversalTarget::Count {
count,
include_files,
@@ -5551,7 +5551,7 @@ fn discover_git_paths(dot_git_abs_path: &Arc<Path>, fs: &dyn Fs) -> (Arc<Path>,
let mut repository_dir_abs_path = dot_git_abs_path.clone();
let mut common_dir_abs_path = dot_git_abs_path.clone();
- if let Some(path) = smol::block_on(fs.load(&dot_git_abs_path))
+ if let Some(path) = smol::block_on(fs.load(dot_git_abs_path))
.ok()
.as_ref()
.and_then(|contents| parse_gitfile(contents).log_err())
@@ -371,9 +371,9 @@ pub fn main() {
{
cx.spawn({
let app_state = app_state.clone();
- async move |mut cx| {
- if let Err(e) = restore_or_create_workspace(app_state, &mut cx).await {
- fail_to_open_window_async(e, &mut cx)
+ async move |cx| {
+ if let Err(e) = restore_or_create_workspace(app_state, cx).await {
+ fail_to_open_window_async(e, cx)
}
}
})
@@ -690,7 +690,7 @@ pub fn main() {
cx.spawn({
let client = app_state.client.clone();
- async move |cx| authenticate(client, &cx).await
+ async move |cx| authenticate(client, cx).await
})
.detach_and_log_err(cx);
@@ -722,9 +722,9 @@ pub fn main() {
None => {
cx.spawn({
let app_state = app_state.clone();
- async move |mut cx| {
- if let Err(e) = restore_or_create_workspace(app_state, &mut cx).await {
- fail_to_open_window_async(e, &mut cx)
+ async move |cx| {
+ if let Err(e) = restore_or_create_workspace(app_state, cx).await {
+ fail_to_open_window_async(e, cx)
}
}
})
@@ -795,14 +795,14 @@ fn handle_open_request(request: OpenRequest, app_state: Arc<AppState>, cx: &mut
}
if let Some(connection_options) = request.ssh_connection {
- cx.spawn(async move |mut cx| {
+ cx.spawn(async move |cx| {
let paths: Vec<PathBuf> = request.open_paths.into_iter().map(PathBuf::from).collect();
open_ssh_project(
connection_options,
paths,
app_state,
workspace::OpenOptions::default(),
- &mut cx,
+ cx,
)
.await
})
@@ -813,7 +813,7 @@ fn handle_open_request(request: OpenRequest, app_state: Arc<AppState>, cx: &mut
let mut task = None;
if !request.open_paths.is_empty() || !request.diff_paths.is_empty() {
let app_state = app_state.clone();
- task = Some(cx.spawn(async move |mut cx| {
+ task = Some(cx.spawn(async move |cx| {
let paths_with_position =
derive_paths_with_position(app_state.fs.as_ref(), request.open_paths).await;
let (_window, results) = open_paths_with_positions(
@@ -821,7 +821,7 @@ fn handle_open_request(request: OpenRequest, app_state: Arc<AppState>, cx: &mut
&request.diff_paths,
app_state,
workspace::OpenOptions::default(),
- &mut cx,
+ cx,
)
.await?;
for result in results.into_iter().flatten() {
@@ -834,7 +834,7 @@ fn handle_open_request(request: OpenRequest, app_state: Arc<AppState>, cx: &mut
}
if !request.open_channel_notes.is_empty() || request.join_channel.is_some() {
- cx.spawn(async move |mut cx| {
+ cx.spawn(async move |cx| {
let result = maybe!(async {
if let Some(task) = task {
task.await?;
@@ -842,7 +842,7 @@ fn handle_open_request(request: OpenRequest, app_state: Arc<AppState>, cx: &mut
let client = app_state.client.clone();
// we continue even if authentication fails as join_channel/ open channel notes will
// show a visible error message.
- authenticate(client, &cx).await.log_err();
+ authenticate(client, cx).await.log_err();
if let Some(channel_id) = request.join_channel {
cx.update(|cx| {
@@ -878,14 +878,14 @@ fn handle_open_request(request: OpenRequest, app_state: Arc<AppState>, cx: &mut
})
.await;
if let Err(err) = result {
- fail_to_open_window_async(err, &mut cx);
+ fail_to_open_window_async(err, cx);
}
})
.detach()
} else if let Some(task) = task {
- cx.spawn(async move |mut cx| {
+ cx.spawn(async move |cx| {
if let Err(err) = task.await {
- fail_to_open_window_async(err, &mut cx);
+ fail_to_open_window_async(err, cx);
}
})
.detach();
@@ -536,7 +536,7 @@ async fn upload_previous_panics(
});
if let Some(panic) = panic
- && upload_panic(&http, &panic_report_url, panic, &mut most_recent_panic).await?
+ && upload_panic(&http, panic_report_url, panic, &mut most_recent_panic).await?
{
// We've done what we can, delete the file
fs::remove_file(child_path)
@@ -566,7 +566,7 @@ pub async fn upload_previous_minidumps(http: Arc<HttpClientWithUrl>) -> anyhow::
if let Ok(metadata) = serde_json::from_slice(&smol::fs::read(&json_path).await?) {
if upload_minidump(
http.clone(),
- &minidump_endpoint,
+ minidump_endpoint,
smol::fs::read(&child_path)
.await
.context("Failed to read minidump")?,
@@ -327,7 +327,7 @@ pub fn initialize_workspace(
cx.subscribe_in(&workspace_handle, window, {
move |workspace, _, event, window, cx| match event {
workspace::Event::PaneAdded(pane) => {
- initialize_pane(workspace, &pane, window, cx);
+ initialize_pane(workspace, pane, window, cx);
}
workspace::Event::OpenBundledFile {
text,
@@ -796,7 +796,7 @@ fn register_actions(
.register_action(install_cli)
.register_action(|_, _: &install_cli::RegisterZedScheme, window, cx| {
cx.spawn_in(window, async move |workspace, cx| {
- install_cli::register_zed_scheme(&cx).await?;
+ install_cli::register_zed_scheme(cx).await?;
workspace.update_in(cx, |workspace, _, cx| {
struct RegisterZedScheme;
@@ -650,7 +650,7 @@ impl ComponentPreview {
_window: &mut Window,
_cx: &mut Context<Self>,
) -> impl IntoElement {
- let component = self.component_map.get(&component_id);
+ let component = self.component_map.get(component_id);
if let Some(component) = component {
v_flex()
@@ -147,7 +147,7 @@ fn assign_edit_prediction_providers(
assign_edit_prediction_provider(
editor,
provider,
- &client,
+ client,
user_store.clone(),
window,
cx,
@@ -248,7 +248,7 @@ fn assign_edit_prediction_provider(
if let Some(buffer) = &singleton_buffer {
if buffer.read(cx).file().is_some() {
zeta.update(cx, |zeta, cx| {
- zeta.register_buffer(&buffer, cx);
+ zeta.register_buffer(buffer, cx);
});
}
}
@@ -432,13 +432,13 @@ async fn open_workspaces(
.connection_options_for(ssh.host, ssh.port, ssh.user)
});
if let Ok(connection_options) = connection_options {
- cx.spawn(async move |mut cx| {
+ cx.spawn(async move |cx| {
open_ssh_project(
connection_options,
ssh.paths.into_iter().map(PathBuf::from).collect(),
app_state,
OpenOptions::default(),
- &mut cx,
+ cx,
)
.await
.log_err();
@@ -182,7 +182,7 @@ impl Render for QuickActionBar {
let code_action_element = if is_deployed {
editor.update(cx, |editor, cx| {
if let Some(style) = editor.style() {
- editor.render_context_menu(&style, MAX_CODE_ACTION_MENU_LINES, window, cx)
+ editor.render_context_menu(style, MAX_CODE_ACTION_MENU_LINES, window, cx)
} else {
None
}
@@ -198,7 +198,7 @@ mod tests {
#[test]
fn test_mit_positive_detection() {
- assert!(is_license_eligible_for_data_collection(&MIT_LICENSE));
+ assert!(is_license_eligible_for_data_collection(MIT_LICENSE));
}
#[test]
@@ -505,7 +505,7 @@ impl Zeta {
input_events,
input_excerpt,
buffer_snapshotted_at,
- &cx,
+ cx,
)
.await;
@@ -981,7 +981,7 @@ and then another
old_text,
new_text,
editable_range.start,
- &snapshot,
+ snapshot,
))
}
@@ -991,7 +991,7 @@ and then another
offset: usize,
snapshot: &BufferSnapshot,
) -> Vec<(Range<Anchor>, String)> {
- text_diff(&old_text, &new_text)
+ text_diff(&old_text, new_text)
.into_iter()
.map(|(mut old_range, new_text)| {
old_range.start += offset;
@@ -1182,7 +1182,7 @@ pub fn gather_context(
.filter_map(|(language_server_id, diagnostic_group)| {
let language_server =
local_lsp_store.running_language_server_for_id(language_server_id)?;
- let diagnostic_group = diagnostic_group.resolve::<usize>(&snapshot);
+ let diagnostic_group = diagnostic_group.resolve::<usize>(snapshot);
let language_server_name = language_server.name().to_string();
let serialized = serde_json::to_value(diagnostic_group).unwrap();
Some((language_server_name, serialized))
@@ -1313,10 +1313,10 @@ impl CurrentEditPrediction {
return true;
}
- let Some(old_edits) = old_completion.completion.interpolate(&snapshot) else {
+ let Some(old_edits) = old_completion.completion.interpolate(snapshot) else {
return true;
};
- let Some(new_edits) = self.completion.interpolate(&snapshot) else {
+ let Some(new_edits) = self.completion.interpolate(snapshot) else {
return false;
};
@@ -1664,7 +1664,7 @@ impl edit_prediction::EditPredictionProvider for ZetaEditPredictionProvider {
if let Some(old_completion) = this.current_completion.as_ref() {
let snapshot = buffer.read(cx).snapshot();
- if new_completion.should_replace_completion(&old_completion, &snapshot) {
+ if new_completion.should_replace_completion(old_completion, &snapshot) {
this.zeta.update(cx, |zeta, cx| {
zeta.completion_shown(&new_completion.completion, cx);
});
@@ -131,7 +131,7 @@ async fn get_context(
let (project, _lsp_open_handle, buffer) = if use_language_server {
let (project, lsp_open_handle, buffer) =
- open_buffer_with_language_server(&worktree_path, &cursor.path, &app_state, cx).await?;
+ open_buffer_with_language_server(&worktree_path, &cursor.path, app_state, cx).await?;
(Some(project), Some(lsp_open_handle), buffer)
} else {
let abs_path = worktree_path.join(&cursor.path);
@@ -260,7 +260,7 @@ pub fn wait_for_lang_server(
.update(cx, |buffer, cx| {
lsp_store.update(cx, |lsp_store, cx| {
lsp_store
- .language_servers_for_local_buffer(&buffer, cx)
+ .language_servers_for_local_buffer(buffer, cx)
.next()
.is_some()
})
@@ -291,7 +291,7 @@ pub fn wait_for_lang_server(
_ => {}
}
}),
- cx.subscribe(&project, {
+ cx.subscribe(project, {
let buffer = buffer.clone();
move |project, event, cx| match event {
project::Event::LanguageServerAdded(_, _, _) => {
@@ -82,7 +82,7 @@ pub fn is_scope_enabled(scope: &Scope, module_path: Option<&str>, level: log::Le
// if no scopes are enabled, return false because it's not <= LEVEL_ENABLED_MAX_STATIC
return is_enabled_by_default;
}
- let enabled_status = map.is_enabled(&scope, module_path, level);
+ let enabled_status = map.is_enabled(scope, module_path, level);
return match enabled_status {
EnabledStatus::NotConfigured => is_enabled_by_default,
EnabledStatus::Enabled => true,