From 95b0dab876ccbcdcaddf7818cf9c3e273f2e5cb8 Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Tue, 29 Aug 2023 09:30:36 -0700 Subject: [PATCH] Debounce code action and document highlight requests (#2905) Lately, I've been finding Rust-analyzer unusably slow when editing large files (like `editor_tests.rs`, or `integration_tests.rs`). When I profile the Rust-analyzer process, I see that it sometimes saturates up to 10 cores processing a queue of code actions requests. Additionally, sometimes when collaborating on large files like these, we see long delays in propagating buffer operations. I'm still not sure why this is happening, but whenever I look at the server logs in Datadog, I see that there are remote `CodeActions` and `DocumentHighlights` messages being processed that take upwards of 30 seconds. I think what may be happening is that many such requests are resolving at once, and the responses are taking up too much of the host's bandwidth. I think that both of these problems are caused by us sending way too many code action and document highlight requests to rust-analyzer. This PR adds a simple debounce between changing selections and making these requests. From my local testing, this debounce makes Rust-analyzer *much* more responsive when moving the cursor around a large file like `editor_tests.rs`. --- crates/collab/src/tests/integration_tests.rs | 90 ++++++++++---------- crates/editor/src/editor.rs | 35 +++++--- crates/rpc/src/peer.rs | 54 ++++++------ crates/zed/src/zed.rs | 2 + 4 files changed, 98 insertions(+), 83 deletions(-) diff --git a/crates/collab/src/tests/integration_tests.rs b/crates/collab/src/tests/integration_tests.rs index 9bee8d434cd9ccb6d0fa252e2badc49be99a54d4..0ba3fa06a078dcb8942317d9064b776e1d3f1347 100644 --- a/crates/collab/src/tests/integration_tests.rs +++ b/crates/collab/src/tests/integration_tests.rs @@ -5320,7 +5320,7 @@ async fn test_collaborating_with_code_actions( .unwrap(); let mut fake_language_server = fake_language_servers.next().await.unwrap(); - fake_language_server + let mut requests = fake_language_server .handle_request::(|params, _| async move { assert_eq!( params.text_document.uri, @@ -5329,9 +5329,9 @@ async fn test_collaborating_with_code_actions( assert_eq!(params.range.start, lsp::Position::new(0, 0)); assert_eq!(params.range.end, lsp::Position::new(0, 0)); Ok(None) - }) - .next() - .await; + }); + deterministic.advance_clock(editor::CODE_ACTIONS_DEBOUNCE_TIMEOUT * 2); + requests.next().await; // Move cursor to a location that contains code actions. editor_b.update(cx_b, |editor, cx| { @@ -5341,7 +5341,7 @@ async fn test_collaborating_with_code_actions( cx.focus(&editor_b); }); - fake_language_server + let mut requests = fake_language_server .handle_request::(|params, _| async move { assert_eq!( params.text_document.uri, @@ -5393,9 +5393,9 @@ async fn test_collaborating_with_code_actions( ..Default::default() }, )])) - }) - .next() - .await; + }); + deterministic.advance_clock(editor::CODE_ACTIONS_DEBOUNCE_TIMEOUT * 2); + requests.next().await; // Toggle code actions and wait for them to display. editor_b.update(cx_b, |editor, cx| { @@ -7863,6 +7863,7 @@ async fn test_mutual_editor_inlay_hint_cache_update( client_a.language_registry().add(Arc::clone(&language)); client_b.language_registry().add(language); + // Client A opens a project. client_a .fs() .insert_tree( @@ -7883,6 +7884,7 @@ async fn test_mutual_editor_inlay_hint_cache_update( .await .unwrap(); + // Client B joins the project let project_b = client_b.build_remote_project(project_id, cx_b).await; active_call_b .update(cx_b, |call, cx| call.set_location(Some(&project_b), cx)) @@ -7892,6 +7894,7 @@ async fn test_mutual_editor_inlay_hint_cache_update( let workspace_a = client_a.build_workspace(&project_a, cx_a).root(cx_a); cx_a.foreground().start_waiting(); + // The host opens a rust file. let _buffer_a = project_a .update(cx_a, |project, cx| { project.open_local_buffer("/a/main.rs", cx) @@ -7899,7 +7902,6 @@ async fn test_mutual_editor_inlay_hint_cache_update( .await .unwrap(); let fake_language_server = fake_language_servers.next().await.unwrap(); - let next_call_id = Arc::new(AtomicU32::new(0)); let editor_a = workspace_a .update(cx_a, |workspace, cx| { workspace.open_path((worktree_id, "main.rs"), None, true, cx) @@ -7908,6 +7910,9 @@ async fn test_mutual_editor_inlay_hint_cache_update( .unwrap() .downcast::() .unwrap(); + + // Set up the language server to return an additional inlay hint on each request. + let next_call_id = Arc::new(AtomicU32::new(0)); fake_language_server .handle_request::(move |params, _| { let task_next_call_id = Arc::clone(&next_call_id); @@ -7916,33 +7921,28 @@ async fn test_mutual_editor_inlay_hint_cache_update( params.text_document.uri, lsp::Url::from_file_path("/a/main.rs").unwrap(), ); - let mut current_call_id = Arc::clone(&task_next_call_id).fetch_add(1, SeqCst); - let mut new_hints = Vec::with_capacity(current_call_id as usize); - loop { - new_hints.push(lsp::InlayHint { - position: lsp::Position::new(0, current_call_id), - label: lsp::InlayHintLabel::String(current_call_id.to_string()), - kind: None, - text_edits: None, - tooltip: None, - padding_left: None, - padding_right: None, - data: None, - }); - if current_call_id == 0 { - break; - } - current_call_id -= 1; - } - Ok(Some(new_hints)) + let call_count = task_next_call_id.fetch_add(1, SeqCst); + Ok(Some( + (0..=call_count) + .map(|ix| lsp::InlayHint { + position: lsp::Position::new(0, ix), + label: lsp::InlayHintLabel::String(ix.to_string()), + kind: None, + text_edits: None, + tooltip: None, + padding_left: None, + padding_right: None, + data: None, + }) + .collect(), + )) } }) .next() .await .unwrap(); - cx_a.foreground().finish_waiting(); - cx_a.foreground().run_until_parked(); + deterministic.run_until_parked(); let mut edits_made = 1; editor_a.update(cx_a, |editor, _| { @@ -7968,7 +7968,7 @@ async fn test_mutual_editor_inlay_hint_cache_update( .downcast::() .unwrap(); - cx_b.foreground().run_until_parked(); + deterministic.run_until_parked(); editor_b.update(cx_b, |editor, _| { assert_eq!( vec!["0".to_string(), "1".to_string()], @@ -7989,25 +7989,25 @@ async fn test_mutual_editor_inlay_hint_cache_update( cx.focus(&editor_b); edits_made += 1; }); - cx_a.foreground().run_until_parked(); - cx_b.foreground().run_until_parked(); + + deterministic.run_until_parked(); editor_a.update(cx_a, |editor, _| { assert_eq!( - vec!["0".to_string(), "1".to_string(), "2".to_string()], + vec![ + "0".to_string(), + "1".to_string(), + "2".to_string(), + "3".to_string() + ], extract_hint_labels(editor), - "Host should get hints from the 1st edit and 1st LSP query" + "Guest should get hints the 1st edit and 2nd LSP query" ); let inlay_cache = editor.inlay_hint_cache(); assert_eq!(inlay_cache.version(), edits_made); }); editor_b.update(cx_b, |editor, _| { assert_eq!( - vec![ - "0".to_string(), - "1".to_string(), - "2".to_string(), - "3".to_string() - ], + vec!["0".to_string(), "1".to_string(), "2".to_string(),], extract_hint_labels(editor), "Guest should get hints the 1st edit and 2nd LSP query" ); @@ -8021,8 +8021,8 @@ async fn test_mutual_editor_inlay_hint_cache_update( cx.focus(&editor_a); edits_made += 1; }); - cx_a.foreground().run_until_parked(); - cx_b.foreground().run_until_parked(); + + deterministic.run_until_parked(); editor_a.update(cx_a, |editor, _| { assert_eq!( vec![ @@ -8061,8 +8061,8 @@ async fn test_mutual_editor_inlay_hint_cache_update( .await .expect("inlay refresh request failed"); edits_made += 1; - cx_a.foreground().run_until_parked(); - cx_b.foreground().run_until_parked(); + + deterministic.run_until_parked(); editor_a.update(cx_a, |editor, _| { assert_eq!( vec![ diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs index 67279b1ba62551d0b41b7dc97a3d1e56eb841691..096230a08a175e04ab5f73bf14f8a069f2d2f27e 100644 --- a/crates/editor/src/editor.rs +++ b/crates/editor/src/editor.rs @@ -108,6 +108,8 @@ const MAX_LINE_LEN: usize = 1024; const MIN_NAVIGATION_HISTORY_ROW_DELTA: i64 = 10; const MAX_SELECTION_HISTORY_LEN: usize = 1024; const COPILOT_DEBOUNCE_TIMEOUT: Duration = Duration::from_millis(75); +pub const CODE_ACTIONS_DEBOUNCE_TIMEOUT: Duration = Duration::from_millis(250); +pub const DOCUMENT_HIGHLIGHTS_DEBOUNCE_TIMEOUT: Duration = Duration::from_millis(75); pub const FORMAT_TIMEOUT: Duration = Duration::from_secs(2); @@ -3248,7 +3250,7 @@ impl Editor { } fn refresh_code_actions(&mut self, cx: &mut ViewContext) -> Option<()> { - let project = self.project.as_ref()?; + let project = self.project.clone()?; let buffer = self.buffer.read(cx); let newest_selection = self.selections.newest_anchor().clone(); let (start_buffer, start) = buffer.text_anchor_for_position(newest_selection.start, cx)?; @@ -3257,11 +3259,15 @@ impl Editor { return None; } - let actions = project.update(cx, |project, cx| { - project.code_actions(&start_buffer, start..end, cx) - }); self.code_actions_task = Some(cx.spawn(|this, mut cx| async move { - let actions = actions.await; + cx.background().timer(CODE_ACTIONS_DEBOUNCE_TIMEOUT).await; + + let actions = project + .update(&mut cx, |project, cx| { + project.code_actions(&start_buffer, start..end, cx) + }) + .await; + this.update(&mut cx, |this, cx| { this.available_code_actions = actions.log_err().and_then(|actions| { if actions.is_empty() { @@ -3282,7 +3288,7 @@ impl Editor { return None; } - let project = self.project.as_ref()?; + let project = self.project.clone()?; let buffer = self.buffer.read(cx); let newest_selection = self.selections.newest_anchor().clone(); let cursor_position = newest_selection.head(); @@ -3293,12 +3299,19 @@ impl Editor { return None; } - let highlights = project.update(cx, |project, cx| { - project.document_highlights(&cursor_buffer, cursor_buffer_position, cx) - }); - self.document_highlights_task = Some(cx.spawn(|this, mut cx| async move { - if let Some(highlights) = highlights.await.log_err() { + cx.background() + .timer(DOCUMENT_HIGHLIGHTS_DEBOUNCE_TIMEOUT) + .await; + + let highlights = project + .update(&mut cx, |project, cx| { + project.document_highlights(&cursor_buffer, cursor_buffer_position, cx) + }) + .await + .log_err(); + + if let Some(highlights) = highlights { this.update(&mut cx, |this, cx| { if this.pending_rename.is_some() { return; diff --git a/crates/rpc/src/peer.rs b/crates/rpc/src/peer.rs index 72ddfa567b5822540632bce4bf70d26fe192281c..91b914f169a86b974232b5e8f3e988d36cb64d07 100644 --- a/crates/rpc/src/peer.rs +++ b/crates/rpc/src/peer.rs @@ -171,12 +171,12 @@ impl Peer { let this = self.clone(); let response_channels = connection_state.response_channels.clone(); let handle_io = async move { - tracing::debug!(%connection_id, "handle io future: start"); + tracing::trace!(%connection_id, "handle io future: start"); let _end_connection = util::defer(|| { response_channels.lock().take(); this.connections.write().remove(&connection_id); - tracing::debug!(%connection_id, "handle io future: end"); + tracing::trace!(%connection_id, "handle io future: end"); }); // Send messages on this frequency so the connection isn't closed. @@ -188,68 +188,68 @@ impl Peer { futures::pin_mut!(receive_timeout); loop { - tracing::debug!(%connection_id, "outer loop iteration start"); + tracing::trace!(%connection_id, "outer loop iteration start"); let read_message = reader.read().fuse(); futures::pin_mut!(read_message); loop { - tracing::debug!(%connection_id, "inner loop iteration start"); + tracing::trace!(%connection_id, "inner loop iteration start"); futures::select_biased! { outgoing = outgoing_rx.next().fuse() => match outgoing { Some(outgoing) => { - tracing::debug!(%connection_id, "outgoing rpc message: writing"); + tracing::trace!(%connection_id, "outgoing rpc message: writing"); futures::select_biased! { result = writer.write(outgoing).fuse() => { - tracing::debug!(%connection_id, "outgoing rpc message: done writing"); + tracing::trace!(%connection_id, "outgoing rpc message: done writing"); result.context("failed to write RPC message")?; - tracing::debug!(%connection_id, "keepalive interval: resetting after sending message"); + tracing::trace!(%connection_id, "keepalive interval: resetting after sending message"); keepalive_timer.set(create_timer(KEEPALIVE_INTERVAL).fuse()); } _ = create_timer(WRITE_TIMEOUT).fuse() => { - tracing::debug!(%connection_id, "outgoing rpc message: writing timed out"); + tracing::trace!(%connection_id, "outgoing rpc message: writing timed out"); Err(anyhow!("timed out writing message"))?; } } } None => { - tracing::debug!(%connection_id, "outgoing rpc message: channel closed"); + tracing::trace!(%connection_id, "outgoing rpc message: channel closed"); return Ok(()) }, }, _ = keepalive_timer => { - tracing::debug!(%connection_id, "keepalive interval: pinging"); + tracing::trace!(%connection_id, "keepalive interval: pinging"); futures::select_biased! { result = writer.write(proto::Message::Ping).fuse() => { - tracing::debug!(%connection_id, "keepalive interval: done pinging"); + tracing::trace!(%connection_id, "keepalive interval: done pinging"); result.context("failed to send keepalive")?; - tracing::debug!(%connection_id, "keepalive interval: resetting after pinging"); + tracing::trace!(%connection_id, "keepalive interval: resetting after pinging"); keepalive_timer.set(create_timer(KEEPALIVE_INTERVAL).fuse()); } _ = create_timer(WRITE_TIMEOUT).fuse() => { - tracing::debug!(%connection_id, "keepalive interval: pinging timed out"); + tracing::trace!(%connection_id, "keepalive interval: pinging timed out"); Err(anyhow!("timed out sending keepalive"))?; } } } incoming = read_message => { let incoming = incoming.context("error reading rpc message from socket")?; - tracing::debug!(%connection_id, "incoming rpc message: received"); - tracing::debug!(%connection_id, "receive timeout: resetting"); + tracing::trace!(%connection_id, "incoming rpc message: received"); + tracing::trace!(%connection_id, "receive timeout: resetting"); receive_timeout.set(create_timer(RECEIVE_TIMEOUT).fuse()); if let proto::Message::Envelope(incoming) = incoming { - tracing::debug!(%connection_id, "incoming rpc message: processing"); + tracing::trace!(%connection_id, "incoming rpc message: processing"); futures::select_biased! { result = incoming_tx.send(incoming).fuse() => match result { Ok(_) => { - tracing::debug!(%connection_id, "incoming rpc message: processed"); + tracing::trace!(%connection_id, "incoming rpc message: processed"); } Err(_) => { - tracing::debug!(%connection_id, "incoming rpc message: channel closed"); + tracing::trace!(%connection_id, "incoming rpc message: channel closed"); return Ok(()) } }, _ = create_timer(WRITE_TIMEOUT).fuse() => { - tracing::debug!(%connection_id, "incoming rpc message: processing timed out"); + tracing::trace!(%connection_id, "incoming rpc message: processing timed out"); Err(anyhow!("timed out processing incoming message"))? } } @@ -257,7 +257,7 @@ impl Peer { break; }, _ = receive_timeout => { - tracing::debug!(%connection_id, "receive timeout: delay between messages too long"); + tracing::trace!(%connection_id, "receive timeout: delay between messages too long"); Err(anyhow!("delay between messages too long"))? } } @@ -274,13 +274,13 @@ impl Peer { let response_channels = response_channels.clone(); async move { let message_id = incoming.id; - tracing::debug!(?incoming, "incoming message future: start"); + tracing::trace!(?incoming, "incoming message future: start"); let _end = util::defer(move || { - tracing::debug!(%connection_id, message_id, "incoming message future: end"); + tracing::trace!(%connection_id, message_id, "incoming message future: end"); }); if let Some(responding_to) = incoming.responding_to { - tracing::debug!( + tracing::trace!( %connection_id, message_id, responding_to, @@ -290,7 +290,7 @@ impl Peer { if let Some(tx) = channel { let requester_resumed = oneshot::channel(); if let Err(error) = tx.send((incoming, requester_resumed.0)) { - tracing::debug!( + tracing::trace!( %connection_id, message_id, responding_to = responding_to, @@ -299,14 +299,14 @@ impl Peer { ); } - tracing::debug!( + tracing::trace!( %connection_id, message_id, responding_to, "incoming response: waiting to resume requester" ); let _ = requester_resumed.1.await; - tracing::debug!( + tracing::trace!( %connection_id, message_id, responding_to, @@ -323,7 +323,7 @@ impl Peer { None } else { - tracing::debug!(%connection_id, message_id, "incoming message: received"); + tracing::trace!(%connection_id, message_id, "incoming message: received"); proto::build_typed_envelope(connection_id, incoming).or_else(|| { tracing::error!( %connection_id, diff --git a/crates/zed/src/zed.rs b/crates/zed/src/zed.rs index de05c259c81d7c63ff856ab93d4162a34f992511..9ea406fc3e91c00ab6b7d3d8ebdfed918099a535 100644 --- a/crates/zed/src/zed.rs +++ b/crates/zed/src/zed.rs @@ -1706,6 +1706,8 @@ mod tests { .remove_file(Path::new("/root/a/file2"), Default::default()) .await .unwrap(); + cx.foreground().run_until_parked(); + workspace .update(cx, |w, cx| w.go_back(w.active_pane().downgrade(), cx)) .await