Cargo.lock 🔗
@@ -1785,6 +1785,7 @@ dependencies = [
"project",
"theme",
"unindent",
+ "util",
"workspace",
]
Nathan Sobo and Max Brunsfeld created
Co-Authored-By: Max Brunsfeld <maxbrunsfeld@gmail.com>
Cargo.lock | 1
crates/find/Cargo.toml | 1
crates/find/src/project_find.rs | 3
crates/project/src/project.rs | 84 ++++++
crates/project/src/search.rs | 431 ++++++++++++++++++----------------
crates/server/src/rpc.rs | 129 ++++++++++
6 files changed, 434 insertions(+), 215 deletions(-)
@@ -1785,6 +1785,7 @@ dependencies = [
"project",
"theme",
"unindent",
+ "util",
"workspace",
]
@@ -13,6 +13,7 @@ gpui = { path = "../gpui" }
language = { path = "../language" }
project = { path = "../project" }
theme = { path = "../theme" }
+util = { path = "../util" }
workspace = { path = "../workspace" }
anyhow = "1.0"
postage = { version = "0.4.1", features = ["futures-traits"] }
@@ -12,6 +12,7 @@ use std::{
ops::Range,
path::PathBuf,
};
+use util::ResultExt as _;
use workspace::{Item, ItemHandle, ItemNavHistory, ItemView, Settings, Workspace};
action!(Deploy);
@@ -81,7 +82,7 @@ impl ProjectFind {
.update(cx, |project, cx| project.search(query.clone(), cx));
self.highlighted_ranges.clear();
self.pending_search = Some(cx.spawn_weak(|this, mut cx| async move {
- let matches = search.await;
+ let matches = search.await.log_err()?;
if let Some(this) = this.upgrade(&cx) {
this.update(&mut cx, |this, cx| {
this.highlighted_ranges.clear();
@@ -15,6 +15,7 @@ use gpui::{
UpgradeModelHandle, WeakModelHandle,
};
use language::{
+ proto::{deserialize_anchor, serialize_anchor},
range_from_lsp, Anchor, AnchorRangeExt, Bias, Buffer, CodeAction, CodeLabel, Completion,
Diagnostic, DiagnosticEntry, File as _, Language, LanguageRegistry, Operation, PointUtf16,
ToLspPosition, ToOffset, ToPointUtf16, Transaction,
@@ -226,6 +227,7 @@ impl Project {
client.add_entity_request_handler(Self::handle_lsp_command::<GetReferences>);
client.add_entity_request_handler(Self::handle_lsp_command::<PrepareRename>);
client.add_entity_request_handler(Self::handle_lsp_command::<PerformRename>);
+ client.add_entity_request_handler(Self::handle_search_project);
client.add_entity_request_handler(Self::handle_get_project_symbols);
client.add_entity_request_handler(Self::handle_open_buffer_for_symbol);
client.add_entity_request_handler(Self::handle_open_buffer);
@@ -2049,7 +2051,7 @@ impl Project {
&self,
query: SearchQuery,
cx: &mut ModelContext<Self>,
- ) -> Task<HashMap<ModelHandle<Buffer>, Vec<Range<Anchor>>>> {
+ ) -> Task<Result<HashMap<ModelHandle<Buffer>, Vec<Range<Anchor>>>>> {
if self.is_local() {
let snapshots = self
.strong_worktrees(cx)
@@ -2215,10 +2217,38 @@ impl Project {
}
})
.await;
- matched_buffers.into_iter().flatten().collect()
+ Ok(matched_buffers.into_iter().flatten().collect())
+ })
+ } else if let Some(project_id) = self.remote_id() {
+ let request = self.client.request(query.to_proto(project_id));
+ let request_handle = self.start_buffer_request(cx);
+ cx.spawn(|this, mut cx| async move {
+ let response = request.await?;
+ let mut result = HashMap::default();
+ for location in response.locations {
+ let buffer = location.buffer.ok_or_else(|| anyhow!("missing buffer"))?;
+ let target_buffer = this
+ .update(&mut cx, |this, cx| {
+ this.deserialize_buffer(buffer, request_handle.clone(), cx)
+ })
+ .await?;
+ let start = location
+ .start
+ .and_then(deserialize_anchor)
+ .ok_or_else(|| anyhow!("missing target start"))?;
+ let end = location
+ .end
+ .and_then(deserialize_anchor)
+ .ok_or_else(|| anyhow!("missing target end"))?;
+ result
+ .entry(target_buffer)
+ .or_insert(Vec::new())
+ .push(start..end)
+ }
+ Ok(result)
})
} else {
- todo!()
+ Task::ready(Ok(Default::default()))
}
}
@@ -3012,6 +3042,36 @@ impl Project {
})
}
+ async fn handle_search_project(
+ this: ModelHandle<Self>,
+ envelope: TypedEnvelope<proto::SearchProject>,
+ _: Arc<Client>,
+ mut cx: AsyncAppContext,
+ ) -> Result<proto::SearchProjectResponse> {
+ let peer_id = envelope.original_sender_id()?;
+ let query = SearchQuery::from_proto(envelope.payload)?;
+ let result = this
+ .update(&mut cx, |this, cx| this.search(query, cx))
+ .await?;
+
+ this.update(&mut cx, |this, cx| {
+ let mut locations = Vec::new();
+ for (buffer, ranges) in result {
+ for range in ranges {
+ let start = serialize_anchor(&range.start);
+ let end = serialize_anchor(&range.end);
+ let buffer = this.serialize_buffer_for_peer(&buffer, peer_id, cx);
+ locations.push(proto::Location {
+ buffer: Some(buffer),
+ start: Some(start),
+ end: Some(end),
+ });
+ }
+ }
+ Ok(proto::SearchProjectResponse { locations })
+ })
+ }
+
async fn handle_open_buffer_for_symbol(
this: ModelHandle<Self>,
envelope: TypedEnvelope<proto::OpenBufferForSymbol>,
@@ -4915,7 +4975,9 @@ mod tests {
.await;
assert_eq!(
- search(&project, SearchQuery::text("TWO", false, true), &mut cx).await,
+ search(&project, SearchQuery::text("TWO", false, true), &mut cx)
+ .await
+ .unwrap(),
HashMap::from_iter([
("two.rs".to_string(), vec![6..9]),
("three.rs".to_string(), vec![37..40])
@@ -4933,7 +4995,9 @@ mod tests {
});
assert_eq!(
- search(&project, SearchQuery::text("TWO", false, true), &mut cx).await,
+ search(&project, SearchQuery::text("TWO", false, true), &mut cx)
+ .await
+ .unwrap(),
HashMap::from_iter([
("two.rs".to_string(), vec![6..9]),
("three.rs".to_string(), vec![37..40]),
@@ -4945,10 +5009,12 @@ mod tests {
project: &ModelHandle<Project>,
query: SearchQuery,
cx: &mut gpui::TestAppContext,
- ) -> HashMap<String, Vec<Range<usize>>> {
- project
+ ) -> Result<HashMap<String, Vec<Range<usize>>>> {
+ let results = project
.update(cx, |project, cx| project.search(query, cx))
- .await
+ .await?;
+
+ Ok(results
.into_iter()
.map(|(buffer, ranges)| {
buffer.read_with(cx, |buffer, _| {
@@ -4960,7 +5026,7 @@ mod tests {
(path, ranges)
})
})
- .collect()
+ .collect())
}
}
}
@@ -1,204 +1,227 @@
-use aho_corasick::{AhoCorasick, AhoCorasickBuilder};
-use anyhow::Result;
-use language::{char_kind, Rope};
-use regex::{Regex, RegexBuilder};
-use smol::future::yield_now;
-use std::{
- io::{BufRead, BufReader, Read},
- ops::Range,
- sync::Arc,
-};
-
-#[derive(Clone)]
-pub enum SearchQuery {
- Text {
- search: Arc<AhoCorasick<usize>>,
- query: Arc<str>,
- whole_word: bool,
- case_sensitive: bool,
- },
- Regex {
- regex: Regex,
- query: Arc<str>,
- multiline: bool,
- whole_word: bool,
- case_sensitive: bool,
- },
-}
-
-impl SearchQuery {
- pub fn text(query: impl ToString, whole_word: bool, case_sensitive: bool) -> Self {
- let query = query.to_string();
- let search = AhoCorasickBuilder::new()
- .auto_configure(&[&query])
- .ascii_case_insensitive(!case_sensitive)
- .build(&[&query]);
- Self::Text {
- search: Arc::new(search),
- query: Arc::from(query),
- whole_word,
- case_sensitive,
- }
- }
-
- pub fn regex(query: impl ToString, whole_word: bool, case_sensitive: bool) -> Result<Self> {
- let mut query = query.to_string();
- let initial_query = Arc::from(query.as_str());
- if whole_word {
- let mut word_query = String::new();
- word_query.push_str("\\b");
- word_query.push_str(&query);
- word_query.push_str("\\b");
- query = word_query
- }
-
- let multiline = query.contains("\n") || query.contains("\\n");
- let regex = RegexBuilder::new(&query)
- .case_insensitive(!case_sensitive)
- .multi_line(multiline)
- .build()?;
- Ok(Self::Regex {
- regex,
- query: initial_query,
- multiline,
- whole_word,
- case_sensitive,
- })
- }
-
- pub fn detect<T: Read>(&self, stream: T) -> Result<bool> {
- if self.as_str().is_empty() {
- return Ok(false);
- }
-
- match self {
- Self::Text { search, .. } => {
- let mat = search.stream_find_iter(stream).next();
- match mat {
- Some(Ok(_)) => Ok(true),
- Some(Err(err)) => Err(err.into()),
- None => Ok(false),
- }
- }
- Self::Regex {
- regex, multiline, ..
- } => {
- let mut reader = BufReader::new(stream);
- if *multiline {
- let mut text = String::new();
- if let Err(err) = reader.read_to_string(&mut text) {
- Err(err.into())
- } else {
- Ok(regex.find(&text).is_some())
- }
- } else {
- for line in reader.lines() {
- let line = line?;
- if regex.find(&line).is_some() {
- return Ok(true);
- }
- }
- Ok(false)
- }
- }
- }
- }
-
- pub async fn search(&self, rope: &Rope) -> Vec<Range<usize>> {
- const YIELD_INTERVAL: usize = 20000;
-
- if self.as_str().is_empty() {
- return Default::default();
- }
-
- let mut matches = Vec::new();
- match self {
- Self::Text {
- search, whole_word, ..
- } => {
- for (ix, mat) in search
- .stream_find_iter(rope.bytes_in_range(0..rope.len()))
- .enumerate()
- {
- if (ix + 1) % YIELD_INTERVAL == 0 {
- yield_now().await;
- }
-
- let mat = mat.unwrap();
- if *whole_word {
- let prev_kind = rope.reversed_chars_at(mat.start()).next().map(char_kind);
- let start_kind = char_kind(rope.chars_at(mat.start()).next().unwrap());
- let end_kind = char_kind(rope.reversed_chars_at(mat.end()).next().unwrap());
- let next_kind = rope.chars_at(mat.end()).next().map(char_kind);
- if Some(start_kind) == prev_kind || Some(end_kind) == next_kind {
- continue;
- }
- }
- matches.push(mat.start()..mat.end())
- }
- }
- Self::Regex {
- regex, multiline, ..
- } => {
- if *multiline {
- let text = rope.to_string();
- for (ix, mat) in regex.find_iter(&text).enumerate() {
- if (ix + 1) % YIELD_INTERVAL == 0 {
- yield_now().await;
- }
-
- matches.push(mat.start()..mat.end());
- }
- } else {
- let mut line = String::new();
- let mut line_offset = 0;
- for (chunk_ix, chunk) in rope.chunks().chain(["\n"]).enumerate() {
- if (chunk_ix + 1) % YIELD_INTERVAL == 0 {
- yield_now().await;
- }
-
- for (newline_ix, text) in chunk.split('\n').enumerate() {
- if newline_ix > 0 {
- for mat in regex.find_iter(&line) {
- let start = line_offset + mat.start();
- let end = line_offset + mat.end();
- matches.push(start..end);
- }
-
- line_offset += line.len() + 1;
- line.clear();
- }
- line.push_str(text);
- }
- }
- }
- }
- }
- matches
- }
-
- pub fn as_str(&self) -> &str {
- match self {
- Self::Text { query, .. } => query.as_ref(),
- Self::Regex { query, .. } => query.as_ref(),
- }
- }
-
- pub fn whole_word(&self) -> bool {
- match self {
- Self::Text { whole_word, .. } => *whole_word,
- Self::Regex { whole_word, .. } => *whole_word,
- }
- }
-
- pub fn case_sensitive(&self) -> bool {
- match self {
- Self::Text { case_sensitive, .. } => *case_sensitive,
- Self::Regex { case_sensitive, .. } => *case_sensitive,
- }
- }
-
- pub fn is_regex(&self) -> bool {
- matches!(self, Self::Regex { .. })
- }
-}
+use aho_corasick::{AhoCorasick, AhoCorasickBuilder};
+use anyhow::Result;
+use client::proto;
+use language::{char_kind, Rope};
+use regex::{Regex, RegexBuilder};
+use smol::future::yield_now;
+use std::{
+ io::{BufRead, BufReader, Read},
+ ops::Range,
+ sync::Arc,
+};
+
+#[derive(Clone)]
+pub enum SearchQuery {
+ Text {
+ search: Arc<AhoCorasick<usize>>,
+ query: Arc<str>,
+ whole_word: bool,
+ case_sensitive: bool,
+ },
+ Regex {
+ regex: Regex,
+ query: Arc<str>,
+ multiline: bool,
+ whole_word: bool,
+ case_sensitive: bool,
+ },
+}
+
+impl SearchQuery {
+ pub fn text(query: impl ToString, whole_word: bool, case_sensitive: bool) -> Self {
+ let query = query.to_string();
+ let search = AhoCorasickBuilder::new()
+ .auto_configure(&[&query])
+ .ascii_case_insensitive(!case_sensitive)
+ .build(&[&query]);
+ Self::Text {
+ search: Arc::new(search),
+ query: Arc::from(query),
+ whole_word,
+ case_sensitive,
+ }
+ }
+
+ pub fn regex(query: impl ToString, whole_word: bool, case_sensitive: bool) -> Result<Self> {
+ let mut query = query.to_string();
+ let initial_query = Arc::from(query.as_str());
+ if whole_word {
+ let mut word_query = String::new();
+ word_query.push_str("\\b");
+ word_query.push_str(&query);
+ word_query.push_str("\\b");
+ query = word_query
+ }
+
+ let multiline = query.contains("\n") || query.contains("\\n");
+ let regex = RegexBuilder::new(&query)
+ .case_insensitive(!case_sensitive)
+ .multi_line(multiline)
+ .build()?;
+ Ok(Self::Regex {
+ regex,
+ query: initial_query,
+ multiline,
+ whole_word,
+ case_sensitive,
+ })
+ }
+
+ pub fn from_proto(message: proto::SearchProject) -> Result<Self> {
+ if message.regex {
+ Self::regex(message.query, message.whole_word, message.case_sensitive)
+ } else {
+ Ok(Self::text(
+ message.query,
+ message.whole_word,
+ message.case_sensitive,
+ ))
+ }
+ }
+
+ pub fn to_proto(&self, project_id: u64) -> proto::SearchProject {
+ proto::SearchProject {
+ project_id,
+ query: self.as_str().to_string(),
+ regex: self.is_regex(),
+ whole_word: self.whole_word(),
+ case_sensitive: self.case_sensitive(),
+ }
+ }
+
+ pub fn detect<T: Read>(&self, stream: T) -> Result<bool> {
+ if self.as_str().is_empty() {
+ return Ok(false);
+ }
+
+ match self {
+ Self::Text { search, .. } => {
+ let mat = search.stream_find_iter(stream).next();
+ match mat {
+ Some(Ok(_)) => Ok(true),
+ Some(Err(err)) => Err(err.into()),
+ None => Ok(false),
+ }
+ }
+ Self::Regex {
+ regex, multiline, ..
+ } => {
+ let mut reader = BufReader::new(stream);
+ if *multiline {
+ let mut text = String::new();
+ if let Err(err) = reader.read_to_string(&mut text) {
+ Err(err.into())
+ } else {
+ Ok(regex.find(&text).is_some())
+ }
+ } else {
+ for line in reader.lines() {
+ let line = line?;
+ if regex.find(&line).is_some() {
+ return Ok(true);
+ }
+ }
+ Ok(false)
+ }
+ }
+ }
+ }
+
+ pub async fn search(&self, rope: &Rope) -> Vec<Range<usize>> {
+ const YIELD_INTERVAL: usize = 20000;
+
+ if self.as_str().is_empty() {
+ return Default::default();
+ }
+
+ let mut matches = Vec::new();
+ match self {
+ Self::Text {
+ search, whole_word, ..
+ } => {
+ for (ix, mat) in search
+ .stream_find_iter(rope.bytes_in_range(0..rope.len()))
+ .enumerate()
+ {
+ if (ix + 1) % YIELD_INTERVAL == 0 {
+ yield_now().await;
+ }
+
+ let mat = mat.unwrap();
+ if *whole_word {
+ let prev_kind = rope.reversed_chars_at(mat.start()).next().map(char_kind);
+ let start_kind = char_kind(rope.chars_at(mat.start()).next().unwrap());
+ let end_kind = char_kind(rope.reversed_chars_at(mat.end()).next().unwrap());
+ let next_kind = rope.chars_at(mat.end()).next().map(char_kind);
+ if Some(start_kind) == prev_kind || Some(end_kind) == next_kind {
+ continue;
+ }
+ }
+ matches.push(mat.start()..mat.end())
+ }
+ }
+ Self::Regex {
+ regex, multiline, ..
+ } => {
+ if *multiline {
+ let text = rope.to_string();
+ for (ix, mat) in regex.find_iter(&text).enumerate() {
+ if (ix + 1) % YIELD_INTERVAL == 0 {
+ yield_now().await;
+ }
+
+ matches.push(mat.start()..mat.end());
+ }
+ } else {
+ let mut line = String::new();
+ let mut line_offset = 0;
+ for (chunk_ix, chunk) in rope.chunks().chain(["\n"]).enumerate() {
+ if (chunk_ix + 1) % YIELD_INTERVAL == 0 {
+ yield_now().await;
+ }
+
+ for (newline_ix, text) in chunk.split('\n').enumerate() {
+ if newline_ix > 0 {
+ for mat in regex.find_iter(&line) {
+ let start = line_offset + mat.start();
+ let end = line_offset + mat.end();
+ matches.push(start..end);
+ }
+
+ line_offset += line.len() + 1;
+ line.clear();
+ }
+ line.push_str(text);
+ }
+ }
+ }
+ }
+ }
+ matches
+ }
+
+ pub fn as_str(&self) -> &str {
+ match self {
+ Self::Text { query, .. } => query.as_ref(),
+ Self::Regex { query, .. } => query.as_ref(),
+ }
+ }
+
+ pub fn whole_word(&self) -> bool {
+ match self {
+ Self::Text { whole_word, .. } => *whole_word,
+ Self::Regex { whole_word, .. } => *whole_word,
+ }
+ }
+
+ pub fn case_sensitive(&self) -> bool {
+ match self {
+ Self::Text { case_sensitive, .. } => *case_sensitive,
+ Self::Regex { case_sensitive, .. } => *case_sensitive,
+ }
+ }
+
+ pub fn is_regex(&self) -> bool {
+ matches!(self, Self::Regex { .. })
+ }
+}
@@ -79,6 +79,7 @@ impl Server {
.add_message_handler(Server::disk_based_diagnostics_updated)
.add_request_handler(Server::get_definition)
.add_request_handler(Server::get_references)
+ .add_request_handler(Server::search_project)
.add_request_handler(Server::get_document_highlights)
.add_request_handler(Server::get_project_symbols)
.add_request_handler(Server::open_buffer_for_symbol)
@@ -570,6 +571,20 @@ impl Server {
.await?)
}
+ async fn search_project(
+ self: Arc<Server>,
+ request: TypedEnvelope<proto::SearchProject>,
+ ) -> tide::Result<proto::SearchProjectResponse> {
+ let host_connection_id = self
+ .state()
+ .read_project(request.payload.project_id, request.sender_id)?
+ .host_connection_id;
+ Ok(self
+ .peer
+ .forward_request(request.sender_id, host_connection_id, request.payload)
+ .await?)
+ }
+
async fn get_document_highlights(
self: Arc<Server>,
request: TypedEnvelope<proto::GetDocumentHighlights>,
@@ -1186,7 +1201,7 @@ mod tests {
LanguageConfig, LanguageRegistry, LanguageServerConfig, Point, ToLspPosition,
},
lsp,
- project::{DiagnosticSummary, Project, ProjectPath},
+ project::{search::SearchQuery, DiagnosticSummary, Project, ProjectPath},
workspace::{Settings, Workspace, WorkspaceParams},
};
@@ -2843,6 +2858,118 @@ mod tests {
});
}
+ #[gpui::test(iterations = 10)]
+ async fn test_project_search(mut cx_a: TestAppContext, mut cx_b: TestAppContext) {
+ cx_a.foreground().forbid_parking();
+ let lang_registry = Arc::new(LanguageRegistry::new());
+ let fs = FakeFs::new(cx_a.background());
+ fs.insert_tree(
+ "/root-1",
+ json!({
+ ".zed.toml": r#"collaborators = ["user_b"]"#,
+ "a": "hello world",
+ "b": "goodnight moon",
+ "c": "a world of goo",
+ "d": "world champion of clown world",
+ }),
+ )
+ .await;
+ fs.insert_tree(
+ "/root-2",
+ json!({
+ "e": "disney world is fun",
+ }),
+ )
+ .await;
+
+ // Connect to a server as 2 clients.
+ let mut server = TestServer::start(cx_a.foreground(), cx_a.background()).await;
+ let client_a = server.create_client(&mut cx_a, "user_a").await;
+ let client_b = server.create_client(&mut cx_b, "user_b").await;
+
+ // Share a project as client A
+ let project_a = cx_a.update(|cx| {
+ Project::local(
+ client_a.clone(),
+ client_a.user_store.clone(),
+ lang_registry.clone(),
+ fs.clone(),
+ cx,
+ )
+ });
+ let project_id = project_a.update(&mut cx_a, |p, _| p.next_remote_id()).await;
+
+ let (worktree_1, _) = project_a
+ .update(&mut cx_a, |p, cx| {
+ p.find_or_create_local_worktree("/root-1", false, cx)
+ })
+ .await
+ .unwrap();
+ worktree_1
+ .read_with(&cx_a, |tree, _| tree.as_local().unwrap().scan_complete())
+ .await;
+ let (worktree_2, _) = project_a
+ .update(&mut cx_a, |p, cx| {
+ p.find_or_create_local_worktree("/root-2", false, cx)
+ })
+ .await
+ .unwrap();
+ worktree_2
+ .read_with(&cx_a, |tree, _| tree.as_local().unwrap().scan_complete())
+ .await;
+
+ eprintln!("sharing");
+
+ project_a
+ .update(&mut cx_a, |p, cx| p.share(cx))
+ .await
+ .unwrap();
+
+ // Join the worktree as client B.
+ let project_b = Project::remote(
+ project_id,
+ client_b.clone(),
+ client_b.user_store.clone(),
+ lang_registry.clone(),
+ fs.clone(),
+ &mut cx_b.to_async(),
+ )
+ .await
+ .unwrap();
+
+ let results = project_b
+ .update(&mut cx_b, |project, cx| {
+ project.search(SearchQuery::text("world", false, false), cx)
+ })
+ .await
+ .unwrap();
+
+ let mut ranges_by_path = results
+ .into_iter()
+ .map(|(buffer, ranges)| {
+ buffer.read_with(&cx_b, |buffer, cx| {
+ let path = buffer.file().unwrap().full_path(cx);
+ let offset_ranges = ranges
+ .into_iter()
+ .map(|range| range.to_offset(buffer))
+ .collect::<Vec<_>>();
+ (path, offset_ranges)
+ })
+ })
+ .collect::<Vec<_>>();
+ ranges_by_path.sort_by_key(|(path, _)| path.clone());
+
+ assert_eq!(
+ ranges_by_path,
+ &[
+ (PathBuf::from("root-1/a"), vec![6..11]),
+ (PathBuf::from("root-1/c"), vec![2..7]),
+ (PathBuf::from("root-1/d"), vec![0..5, 24..29]),
+ (PathBuf::from("root-2/e"), vec![7..12]),
+ ]
+ );
+ }
+
#[gpui::test(iterations = 10)]
async fn test_document_highlights(mut cx_a: TestAppContext, mut cx_b: TestAppContext) {
cx_a.foreground().forbid_parking();