1pub mod fs;
2mod ignore;
3mod lsp_command;
4pub mod search;
5pub mod worktree;
6
7use anyhow::{anyhow, Context, Result};
8use client::{proto, Client, PeerId, TypedEnvelope, User, UserStore};
9use clock::ReplicaId;
10use collections::{hash_map, HashMap, HashSet};
11use futures::{future::Shared, Future, FutureExt, StreamExt};
12use fuzzy::{PathMatch, PathMatchCandidate, PathMatchCandidateSet};
13use gpui::{
14 AppContext, AsyncAppContext, Entity, ModelContext, ModelHandle, MutableAppContext, Task,
15 UpgradeModelHandle, WeakModelHandle,
16};
17use language::{
18 range_from_lsp, Anchor, AnchorRangeExt, Bias, Buffer, CodeAction, CodeLabel, Completion,
19 Diagnostic, DiagnosticEntry, File as _, Language, LanguageRegistry, Operation, PointUtf16,
20 ToLspPosition, ToOffset, ToPointUtf16, Transaction,
21};
22use lsp::{DiagnosticSeverity, DocumentHighlightKind, LanguageServer};
23use lsp_command::*;
24use postage::{broadcast, prelude::Stream, sink::Sink, watch};
25use rand::prelude::*;
26use search::SearchQuery;
27use sha2::{Digest, Sha256};
28use smol::block_on;
29use std::{
30 cell::RefCell,
31 cmp,
32 convert::TryInto,
33 hash::Hash,
34 mem,
35 ops::Range,
36 path::{Component, Path, PathBuf},
37 rc::Rc,
38 sync::{atomic::AtomicBool, Arc},
39 time::Instant,
40};
41use util::{post_inc, ResultExt, TryFutureExt as _};
42
43pub use fs::*;
44pub use worktree::*;
45
46pub struct Project {
47 worktrees: Vec<WorktreeHandle>,
48 active_entry: Option<ProjectEntry>,
49 languages: Arc<LanguageRegistry>,
50 language_servers: HashMap<(WorktreeId, String), Arc<LanguageServer>>,
51 started_language_servers:
52 HashMap<(WorktreeId, String), Shared<Task<Option<Arc<LanguageServer>>>>>,
53 client: Arc<client::Client>,
54 user_store: ModelHandle<UserStore>,
55 fs: Arc<dyn Fs>,
56 client_state: ProjectClientState,
57 collaborators: HashMap<PeerId, Collaborator>,
58 subscriptions: Vec<client::Subscription>,
59 language_servers_with_diagnostics_running: isize,
60 opened_buffer: broadcast::Sender<()>,
61 loading_buffers: HashMap<
62 ProjectPath,
63 postage::watch::Receiver<Option<Result<ModelHandle<Buffer>, Arc<anyhow::Error>>>>,
64 >,
65 buffers_state: Rc<RefCell<ProjectBuffers>>,
66 shared_buffers: HashMap<PeerId, HashMap<u64, ModelHandle<Buffer>>>,
67 nonce: u128,
68}
69
70#[derive(Default)]
71struct ProjectBuffers {
72 buffer_request_count: usize,
73 preserved_buffers: Vec<ModelHandle<Buffer>>,
74 open_buffers: HashMap<u64, OpenBuffer>,
75}
76
77enum OpenBuffer {
78 Loaded(WeakModelHandle<Buffer>),
79 Loading(Vec<Operation>),
80}
81
82enum WorktreeHandle {
83 Strong(ModelHandle<Worktree>),
84 Weak(WeakModelHandle<Worktree>),
85}
86
87enum ProjectClientState {
88 Local {
89 is_shared: bool,
90 remote_id_tx: watch::Sender<Option<u64>>,
91 remote_id_rx: watch::Receiver<Option<u64>>,
92 _maintain_remote_id_task: Task<Option<()>>,
93 },
94 Remote {
95 sharing_has_stopped: bool,
96 remote_id: u64,
97 replica_id: ReplicaId,
98 },
99}
100
101#[derive(Clone, Debug)]
102pub struct Collaborator {
103 pub user: Arc<User>,
104 pub peer_id: PeerId,
105 pub replica_id: ReplicaId,
106}
107
108#[derive(Clone, Debug, PartialEq)]
109pub enum Event {
110 ActiveEntryChanged(Option<ProjectEntry>),
111 WorktreeRemoved(WorktreeId),
112 DiskBasedDiagnosticsStarted,
113 DiskBasedDiagnosticsUpdated,
114 DiskBasedDiagnosticsFinished,
115 DiagnosticsUpdated(ProjectPath),
116}
117
118#[derive(Clone, Debug, Eq, PartialEq, Hash, PartialOrd, Ord)]
119pub struct ProjectPath {
120 pub worktree_id: WorktreeId,
121 pub path: Arc<Path>,
122}
123
124#[derive(Clone, Debug, Default, PartialEq)]
125pub struct DiagnosticSummary {
126 pub error_count: usize,
127 pub warning_count: usize,
128 pub info_count: usize,
129 pub hint_count: usize,
130}
131
132#[derive(Debug)]
133pub struct Location {
134 pub buffer: ModelHandle<Buffer>,
135 pub range: Range<language::Anchor>,
136}
137
138#[derive(Debug)]
139pub struct DocumentHighlight {
140 pub range: Range<language::Anchor>,
141 pub kind: DocumentHighlightKind,
142}
143
144#[derive(Clone, Debug)]
145pub struct Symbol {
146 pub source_worktree_id: WorktreeId,
147 pub worktree_id: WorktreeId,
148 pub language_name: String,
149 pub path: PathBuf,
150 pub label: CodeLabel,
151 pub name: String,
152 pub kind: lsp::SymbolKind,
153 pub range: Range<PointUtf16>,
154 pub signature: [u8; 32],
155}
156
157pub struct BufferRequestHandle(Rc<RefCell<ProjectBuffers>>);
158
159#[derive(Default)]
160pub struct ProjectTransaction(pub HashMap<ModelHandle<Buffer>, language::Transaction>);
161
162impl DiagnosticSummary {
163 fn new<'a, T: 'a>(diagnostics: impl IntoIterator<Item = &'a DiagnosticEntry<T>>) -> Self {
164 let mut this = Self {
165 error_count: 0,
166 warning_count: 0,
167 info_count: 0,
168 hint_count: 0,
169 };
170
171 for entry in diagnostics {
172 if entry.diagnostic.is_primary {
173 match entry.diagnostic.severity {
174 DiagnosticSeverity::ERROR => this.error_count += 1,
175 DiagnosticSeverity::WARNING => this.warning_count += 1,
176 DiagnosticSeverity::INFORMATION => this.info_count += 1,
177 DiagnosticSeverity::HINT => this.hint_count += 1,
178 _ => {}
179 }
180 }
181 }
182
183 this
184 }
185
186 pub fn to_proto(&self, path: &Path) -> proto::DiagnosticSummary {
187 proto::DiagnosticSummary {
188 path: path.to_string_lossy().to_string(),
189 error_count: self.error_count as u32,
190 warning_count: self.warning_count as u32,
191 info_count: self.info_count as u32,
192 hint_count: self.hint_count as u32,
193 }
194 }
195}
196
197#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
198pub struct ProjectEntry {
199 pub worktree_id: WorktreeId,
200 pub entry_id: usize,
201}
202
203impl Project {
204 pub fn init(client: &Arc<Client>) {
205 client.add_entity_message_handler(Self::handle_add_collaborator);
206 client.add_entity_message_handler(Self::handle_buffer_reloaded);
207 client.add_entity_message_handler(Self::handle_buffer_saved);
208 client.add_entity_message_handler(Self::handle_close_buffer);
209 client.add_entity_message_handler(Self::handle_disk_based_diagnostics_updated);
210 client.add_entity_message_handler(Self::handle_disk_based_diagnostics_updating);
211 client.add_entity_message_handler(Self::handle_remove_collaborator);
212 client.add_entity_message_handler(Self::handle_register_worktree);
213 client.add_entity_message_handler(Self::handle_unregister_worktree);
214 client.add_entity_message_handler(Self::handle_unshare_project);
215 client.add_entity_message_handler(Self::handle_update_buffer_file);
216 client.add_entity_message_handler(Self::handle_update_buffer);
217 client.add_entity_message_handler(Self::handle_update_diagnostic_summary);
218 client.add_entity_message_handler(Self::handle_update_worktree);
219 client.add_entity_request_handler(Self::handle_apply_additional_edits_for_completion);
220 client.add_entity_request_handler(Self::handle_apply_code_action);
221 client.add_entity_request_handler(Self::handle_format_buffers);
222 client.add_entity_request_handler(Self::handle_get_code_actions);
223 client.add_entity_request_handler(Self::handle_get_completions);
224 client.add_entity_request_handler(Self::handle_lsp_command::<GetDefinition>);
225 client.add_entity_request_handler(Self::handle_lsp_command::<GetDocumentHighlights>);
226 client.add_entity_request_handler(Self::handle_lsp_command::<GetReferences>);
227 client.add_entity_request_handler(Self::handle_lsp_command::<PrepareRename>);
228 client.add_entity_request_handler(Self::handle_lsp_command::<PerformRename>);
229 client.add_entity_request_handler(Self::handle_get_project_symbols);
230 client.add_entity_request_handler(Self::handle_open_buffer_for_symbol);
231 client.add_entity_request_handler(Self::handle_open_buffer);
232 client.add_entity_request_handler(Self::handle_save_buffer);
233 }
234
235 pub fn local(
236 client: Arc<Client>,
237 user_store: ModelHandle<UserStore>,
238 languages: Arc<LanguageRegistry>,
239 fs: Arc<dyn Fs>,
240 cx: &mut MutableAppContext,
241 ) -> ModelHandle<Self> {
242 cx.add_model(|cx: &mut ModelContext<Self>| {
243 let (remote_id_tx, remote_id_rx) = watch::channel();
244 let _maintain_remote_id_task = cx.spawn_weak({
245 let rpc = client.clone();
246 move |this, mut cx| {
247 async move {
248 let mut status = rpc.status();
249 while let Some(status) = status.recv().await {
250 if let Some(this) = this.upgrade(&cx) {
251 let remote_id = if let client::Status::Connected { .. } = status {
252 let response = rpc.request(proto::RegisterProject {}).await?;
253 Some(response.project_id)
254 } else {
255 None
256 };
257
258 if let Some(project_id) = remote_id {
259 let mut registrations = Vec::new();
260 this.update(&mut cx, |this, cx| {
261 for worktree in this.worktrees(cx).collect::<Vec<_>>() {
262 registrations.push(worktree.update(
263 cx,
264 |worktree, cx| {
265 let worktree = worktree.as_local_mut().unwrap();
266 worktree.register(project_id, cx)
267 },
268 ));
269 }
270 });
271 for registration in registrations {
272 registration.await?;
273 }
274 }
275 this.update(&mut cx, |this, cx| this.set_remote_id(remote_id, cx));
276 }
277 }
278 Ok(())
279 }
280 .log_err()
281 }
282 });
283
284 Self {
285 worktrees: Default::default(),
286 collaborators: Default::default(),
287 buffers_state: Default::default(),
288 loading_buffers: Default::default(),
289 shared_buffers: Default::default(),
290 client_state: ProjectClientState::Local {
291 is_shared: false,
292 remote_id_tx,
293 remote_id_rx,
294 _maintain_remote_id_task,
295 },
296 opened_buffer: broadcast::channel(1).0,
297 subscriptions: Vec::new(),
298 active_entry: None,
299 languages,
300 client,
301 user_store,
302 fs,
303 language_servers_with_diagnostics_running: 0,
304 language_servers: Default::default(),
305 started_language_servers: Default::default(),
306 nonce: StdRng::from_entropy().gen(),
307 }
308 })
309 }
310
311 pub async fn remote(
312 remote_id: u64,
313 client: Arc<Client>,
314 user_store: ModelHandle<UserStore>,
315 languages: Arc<LanguageRegistry>,
316 fs: Arc<dyn Fs>,
317 cx: &mut AsyncAppContext,
318 ) -> Result<ModelHandle<Self>> {
319 client.authenticate_and_connect(&cx).await?;
320
321 let response = client
322 .request(proto::JoinProject {
323 project_id: remote_id,
324 })
325 .await?;
326
327 let replica_id = response.replica_id as ReplicaId;
328
329 let mut worktrees = Vec::new();
330 for worktree in response.worktrees {
331 let (worktree, load_task) = cx
332 .update(|cx| Worktree::remote(remote_id, replica_id, worktree, client.clone(), cx));
333 worktrees.push(worktree);
334 load_task.detach();
335 }
336
337 let this = cx.add_model(|cx| {
338 let mut this = Self {
339 worktrees: Vec::new(),
340 loading_buffers: Default::default(),
341 opened_buffer: broadcast::channel(1).0,
342 shared_buffers: Default::default(),
343 active_entry: None,
344 collaborators: Default::default(),
345 languages,
346 user_store: user_store.clone(),
347 fs,
348 subscriptions: vec![client.add_model_for_remote_entity(remote_id, cx)],
349 client,
350 client_state: ProjectClientState::Remote {
351 sharing_has_stopped: false,
352 remote_id,
353 replica_id,
354 },
355 language_servers_with_diagnostics_running: 0,
356 language_servers: Default::default(),
357 started_language_servers: Default::default(),
358 buffers_state: Default::default(),
359 nonce: StdRng::from_entropy().gen(),
360 };
361 for worktree in worktrees {
362 this.add_worktree(&worktree, cx);
363 }
364 this
365 });
366
367 let user_ids = response
368 .collaborators
369 .iter()
370 .map(|peer| peer.user_id)
371 .collect();
372 user_store
373 .update(cx, |user_store, cx| user_store.load_users(user_ids, cx))
374 .await?;
375 let mut collaborators = HashMap::default();
376 for message in response.collaborators {
377 let collaborator = Collaborator::from_proto(message, &user_store, cx).await?;
378 collaborators.insert(collaborator.peer_id, collaborator);
379 }
380
381 this.update(cx, |this, _| {
382 this.collaborators = collaborators;
383 });
384
385 Ok(this)
386 }
387
388 #[cfg(any(test, feature = "test-support"))]
389 pub fn test(fs: Arc<dyn Fs>, cx: &mut gpui::TestAppContext) -> ModelHandle<Project> {
390 let languages = Arc::new(LanguageRegistry::new());
391 let http_client = client::test::FakeHttpClient::with_404_response();
392 let client = client::Client::new(http_client.clone());
393 let user_store = cx.add_model(|cx| UserStore::new(client.clone(), http_client, cx));
394 cx.update(|cx| Project::local(client, user_store, languages, fs, cx))
395 }
396
397 #[cfg(any(test, feature = "test-support"))]
398 pub fn shared_buffer(&self, peer_id: PeerId, remote_id: u64) -> Option<ModelHandle<Buffer>> {
399 self.shared_buffers
400 .get(&peer_id)
401 .and_then(|buffers| buffers.get(&remote_id))
402 .cloned()
403 }
404
405 #[cfg(any(test, feature = "test-support"))]
406 pub fn has_buffered_operations(&self) -> bool {
407 self.buffers_state
408 .borrow()
409 .open_buffers
410 .values()
411 .any(|buffer| matches!(buffer, OpenBuffer::Loading(_)))
412 }
413
414 #[cfg(any(test, feature = "test-support"))]
415 pub fn languages(&self) -> &Arc<LanguageRegistry> {
416 &self.languages
417 }
418
419 pub fn fs(&self) -> &Arc<dyn Fs> {
420 &self.fs
421 }
422
423 fn set_remote_id(&mut self, remote_id: Option<u64>, cx: &mut ModelContext<Self>) {
424 if let ProjectClientState::Local { remote_id_tx, .. } = &mut self.client_state {
425 *remote_id_tx.borrow_mut() = remote_id;
426 }
427
428 self.subscriptions.clear();
429 if let Some(remote_id) = remote_id {
430 self.subscriptions
431 .push(self.client.add_model_for_remote_entity(remote_id, cx));
432 }
433 }
434
435 pub fn remote_id(&self) -> Option<u64> {
436 match &self.client_state {
437 ProjectClientState::Local { remote_id_rx, .. } => *remote_id_rx.borrow(),
438 ProjectClientState::Remote { remote_id, .. } => Some(*remote_id),
439 }
440 }
441
442 pub fn next_remote_id(&self) -> impl Future<Output = u64> {
443 let mut id = None;
444 let mut watch = None;
445 match &self.client_state {
446 ProjectClientState::Local { remote_id_rx, .. } => watch = Some(remote_id_rx.clone()),
447 ProjectClientState::Remote { remote_id, .. } => id = Some(*remote_id),
448 }
449
450 async move {
451 if let Some(id) = id {
452 return id;
453 }
454 let mut watch = watch.unwrap();
455 loop {
456 let id = *watch.borrow();
457 if let Some(id) = id {
458 return id;
459 }
460 watch.recv().await;
461 }
462 }
463 }
464
465 pub fn replica_id(&self) -> ReplicaId {
466 match &self.client_state {
467 ProjectClientState::Local { .. } => 0,
468 ProjectClientState::Remote { replica_id, .. } => *replica_id,
469 }
470 }
471
472 pub fn collaborators(&self) -> &HashMap<PeerId, Collaborator> {
473 &self.collaborators
474 }
475
476 pub fn worktrees<'a>(
477 &'a self,
478 cx: &'a AppContext,
479 ) -> impl 'a + Iterator<Item = ModelHandle<Worktree>> {
480 self.worktrees
481 .iter()
482 .filter_map(move |worktree| worktree.upgrade(cx))
483 }
484
485 pub fn strong_worktrees<'a>(
486 &'a self,
487 cx: &'a AppContext,
488 ) -> impl 'a + Iterator<Item = ModelHandle<Worktree>> {
489 self.worktrees.iter().filter_map(|worktree| {
490 worktree.upgrade(cx).and_then(|worktree| {
491 if worktree.read(cx).is_weak() {
492 None
493 } else {
494 Some(worktree)
495 }
496 })
497 })
498 }
499
500 pub fn worktree_for_id(
501 &self,
502 id: WorktreeId,
503 cx: &AppContext,
504 ) -> Option<ModelHandle<Worktree>> {
505 self.worktrees(cx)
506 .find(|worktree| worktree.read(cx).id() == id)
507 }
508
509 pub fn share(&self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
510 let rpc = self.client.clone();
511 cx.spawn(|this, mut cx| async move {
512 let project_id = this.update(&mut cx, |this, _| {
513 if let ProjectClientState::Local {
514 is_shared,
515 remote_id_rx,
516 ..
517 } = &mut this.client_state
518 {
519 *is_shared = true;
520 remote_id_rx
521 .borrow()
522 .ok_or_else(|| anyhow!("no project id"))
523 } else {
524 Err(anyhow!("can't share a remote project"))
525 }
526 })?;
527
528 rpc.request(proto::ShareProject { project_id }).await?;
529 let mut tasks = Vec::new();
530 this.update(&mut cx, |this, cx| {
531 for worktree in this.worktrees(cx).collect::<Vec<_>>() {
532 worktree.update(cx, |worktree, cx| {
533 let worktree = worktree.as_local_mut().unwrap();
534 tasks.push(worktree.share(project_id, cx));
535 });
536 }
537 });
538 for task in tasks {
539 task.await?;
540 }
541 this.update(&mut cx, |_, cx| cx.notify());
542 Ok(())
543 })
544 }
545
546 pub fn unshare(&self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
547 let rpc = self.client.clone();
548 cx.spawn(|this, mut cx| async move {
549 let project_id = this.update(&mut cx, |this, _| {
550 if let ProjectClientState::Local {
551 is_shared,
552 remote_id_rx,
553 ..
554 } = &mut this.client_state
555 {
556 *is_shared = false;
557 remote_id_rx
558 .borrow()
559 .ok_or_else(|| anyhow!("no project id"))
560 } else {
561 Err(anyhow!("can't share a remote project"))
562 }
563 })?;
564
565 rpc.send(proto::UnshareProject { project_id })?;
566 this.update(&mut cx, |this, cx| {
567 this.collaborators.clear();
568 this.shared_buffers.clear();
569 for worktree in this.worktrees(cx).collect::<Vec<_>>() {
570 worktree.update(cx, |worktree, _| {
571 worktree.as_local_mut().unwrap().unshare();
572 });
573 }
574 cx.notify()
575 });
576 Ok(())
577 })
578 }
579
580 pub fn is_read_only(&self) -> bool {
581 match &self.client_state {
582 ProjectClientState::Local { .. } => false,
583 ProjectClientState::Remote {
584 sharing_has_stopped,
585 ..
586 } => *sharing_has_stopped,
587 }
588 }
589
590 pub fn is_local(&self) -> bool {
591 match &self.client_state {
592 ProjectClientState::Local { .. } => true,
593 ProjectClientState::Remote { .. } => false,
594 }
595 }
596
597 pub fn is_remote(&self) -> bool {
598 !self.is_local()
599 }
600
601 pub fn open_buffer(
602 &mut self,
603 path: impl Into<ProjectPath>,
604 cx: &mut ModelContext<Self>,
605 ) -> Task<Result<ModelHandle<Buffer>>> {
606 let project_path = path.into();
607 let worktree = if let Some(worktree) = self.worktree_for_id(project_path.worktree_id, cx) {
608 worktree
609 } else {
610 return Task::ready(Err(anyhow!("no such worktree")));
611 };
612
613 // If there is already a buffer for the given path, then return it.
614 let existing_buffer = self.get_open_buffer(&project_path, cx);
615 if let Some(existing_buffer) = existing_buffer {
616 return Task::ready(Ok(existing_buffer));
617 }
618
619 let mut loading_watch = match self.loading_buffers.entry(project_path.clone()) {
620 // If the given path is already being loaded, then wait for that existing
621 // task to complete and return the same buffer.
622 hash_map::Entry::Occupied(e) => e.get().clone(),
623
624 // Otherwise, record the fact that this path is now being loaded.
625 hash_map::Entry::Vacant(entry) => {
626 let (mut tx, rx) = postage::watch::channel();
627 entry.insert(rx.clone());
628
629 let load_buffer = if worktree.read(cx).is_local() {
630 self.open_local_buffer(&project_path.path, &worktree, cx)
631 } else {
632 self.open_remote_buffer(&project_path.path, &worktree, cx)
633 };
634
635 cx.spawn(move |this, mut cx| async move {
636 let load_result = load_buffer.await;
637 *tx.borrow_mut() = Some(this.update(&mut cx, |this, _| {
638 // Record the fact that the buffer is no longer loading.
639 this.loading_buffers.remove(&project_path);
640 let buffer = load_result.map_err(Arc::new)?;
641 Ok(buffer)
642 }));
643 })
644 .detach();
645 rx
646 }
647 };
648
649 cx.foreground().spawn(async move {
650 loop {
651 if let Some(result) = loading_watch.borrow().as_ref() {
652 match result {
653 Ok(buffer) => return Ok(buffer.clone()),
654 Err(error) => return Err(anyhow!("{}", error)),
655 }
656 }
657 loading_watch.recv().await;
658 }
659 })
660 }
661
662 fn open_local_buffer(
663 &mut self,
664 path: &Arc<Path>,
665 worktree: &ModelHandle<Worktree>,
666 cx: &mut ModelContext<Self>,
667 ) -> Task<Result<ModelHandle<Buffer>>> {
668 let load_buffer = worktree.update(cx, |worktree, cx| {
669 let worktree = worktree.as_local_mut().unwrap();
670 worktree.load_buffer(path, cx)
671 });
672 let worktree = worktree.downgrade();
673 cx.spawn(|this, mut cx| async move {
674 let buffer = load_buffer.await?;
675 let worktree = worktree
676 .upgrade(&cx)
677 .ok_or_else(|| anyhow!("worktree was removed"))?;
678 this.update(&mut cx, |this, cx| {
679 this.register_buffer(&buffer, Some(&worktree), cx)
680 })?;
681 Ok(buffer)
682 })
683 }
684
685 fn open_remote_buffer(
686 &mut self,
687 path: &Arc<Path>,
688 worktree: &ModelHandle<Worktree>,
689 cx: &mut ModelContext<Self>,
690 ) -> Task<Result<ModelHandle<Buffer>>> {
691 let rpc = self.client.clone();
692 let project_id = self.remote_id().unwrap();
693 let remote_worktree_id = worktree.read(cx).id();
694 let path = path.clone();
695 let path_string = path.to_string_lossy().to_string();
696 let request_handle = self.start_buffer_request(cx);
697 cx.spawn(|this, mut cx| async move {
698 let response = rpc
699 .request(proto::OpenBuffer {
700 project_id,
701 worktree_id: remote_worktree_id.to_proto(),
702 path: path_string,
703 })
704 .await?;
705 let buffer = response.buffer.ok_or_else(|| anyhow!("missing buffer"))?;
706
707 this.update(&mut cx, |this, cx| {
708 this.deserialize_buffer(buffer, request_handle, cx)
709 })
710 .await
711 })
712 }
713
714 fn open_local_buffer_via_lsp(
715 &mut self,
716 abs_path: lsp::Url,
717 lang_name: String,
718 lang_server: Arc<LanguageServer>,
719 cx: &mut ModelContext<Self>,
720 ) -> Task<Result<ModelHandle<Buffer>>> {
721 cx.spawn(|this, mut cx| async move {
722 let abs_path = abs_path
723 .to_file_path()
724 .map_err(|_| anyhow!("can't convert URI to path"))?;
725 let (worktree, relative_path) = if let Some(result) =
726 this.read_with(&cx, |this, cx| this.find_local_worktree(&abs_path, cx))
727 {
728 result
729 } else {
730 let worktree = this
731 .update(&mut cx, |this, cx| {
732 this.create_local_worktree(&abs_path, true, cx)
733 })
734 .await?;
735 this.update(&mut cx, |this, cx| {
736 this.language_servers
737 .insert((worktree.read(cx).id(), lang_name), lang_server);
738 });
739 (worktree, PathBuf::new())
740 };
741
742 let project_path = ProjectPath {
743 worktree_id: worktree.read_with(&cx, |worktree, _| worktree.id()),
744 path: relative_path.into(),
745 };
746 this.update(&mut cx, |this, cx| this.open_buffer(project_path, cx))
747 .await
748 })
749 }
750
751 fn start_buffer_request(&self, cx: &AppContext) -> BufferRequestHandle {
752 BufferRequestHandle::new(self.buffers_state.clone(), cx)
753 }
754
755 pub fn save_buffer_as(
756 &self,
757 buffer: ModelHandle<Buffer>,
758 abs_path: PathBuf,
759 cx: &mut ModelContext<Project>,
760 ) -> Task<Result<()>> {
761 let worktree_task = self.find_or_create_local_worktree(&abs_path, false, cx);
762 cx.spawn(|this, mut cx| async move {
763 let (worktree, path) = worktree_task.await?;
764 worktree
765 .update(&mut cx, |worktree, cx| {
766 worktree
767 .as_local_mut()
768 .unwrap()
769 .save_buffer_as(buffer.clone(), path, cx)
770 })
771 .await?;
772 this.update(&mut cx, |this, cx| {
773 this.assign_language_to_buffer(&buffer, Some(&worktree), cx);
774 });
775 Ok(())
776 })
777 }
778
779 #[cfg(any(test, feature = "test-support"))]
780 pub fn has_open_buffer(&self, path: impl Into<ProjectPath>, cx: &AppContext) -> bool {
781 let path = path.into();
782 if let Some(worktree) = self.worktree_for_id(path.worktree_id, cx) {
783 self.buffers_state
784 .borrow()
785 .open_buffers
786 .iter()
787 .any(|(_, buffer)| {
788 if let Some(buffer) = buffer.upgrade(cx) {
789 if let Some(file) = File::from_dyn(buffer.read(cx).file()) {
790 if file.worktree == worktree && file.path() == &path.path {
791 return true;
792 }
793 }
794 }
795 false
796 })
797 } else {
798 false
799 }
800 }
801
802 pub fn get_open_buffer(
803 &mut self,
804 path: &ProjectPath,
805 cx: &mut ModelContext<Self>,
806 ) -> Option<ModelHandle<Buffer>> {
807 let mut result = None;
808 let worktree = self.worktree_for_id(path.worktree_id, cx)?;
809 self.buffers_state
810 .borrow_mut()
811 .open_buffers
812 .retain(|_, buffer| {
813 if let Some(buffer) = buffer.upgrade(cx) {
814 if let Some(file) = File::from_dyn(buffer.read(cx).file()) {
815 if file.worktree == worktree && file.path() == &path.path {
816 result = Some(buffer);
817 }
818 }
819 true
820 } else {
821 false
822 }
823 });
824 result
825 }
826
827 fn register_buffer(
828 &mut self,
829 buffer: &ModelHandle<Buffer>,
830 worktree: Option<&ModelHandle<Worktree>>,
831 cx: &mut ModelContext<Self>,
832 ) -> Result<()> {
833 let remote_id = buffer.read(cx).remote_id();
834 match self
835 .buffers_state
836 .borrow_mut()
837 .open_buffers
838 .insert(remote_id, OpenBuffer::Loaded(buffer.downgrade()))
839 {
840 None => {}
841 Some(OpenBuffer::Loading(operations)) => {
842 buffer.update(cx, |buffer, cx| buffer.apply_ops(operations, cx))?
843 }
844 Some(OpenBuffer::Loaded(existing_handle)) => {
845 if existing_handle.upgrade(cx).is_some() {
846 Err(anyhow!(
847 "already registered buffer with remote id {}",
848 remote_id
849 ))?
850 }
851 }
852 }
853 self.assign_language_to_buffer(&buffer, worktree, cx);
854 Ok(())
855 }
856
857 fn assign_language_to_buffer(
858 &mut self,
859 buffer: &ModelHandle<Buffer>,
860 worktree: Option<&ModelHandle<Worktree>>,
861 cx: &mut ModelContext<Self>,
862 ) -> Option<()> {
863 let (path, full_path) = {
864 let file = buffer.read(cx).file()?;
865 (file.path().clone(), file.full_path(cx))
866 };
867
868 // If the buffer has a language, set it and start/assign the language server
869 if let Some(language) = self.languages.select_language(&full_path) {
870 buffer.update(cx, |buffer, cx| {
871 buffer.set_language(Some(language.clone()), cx);
872 });
873
874 // For local worktrees, start a language server if needed.
875 // Also assign the language server and any previously stored diagnostics to the buffer.
876 if let Some(local_worktree) = worktree.and_then(|w| w.read(cx).as_local()) {
877 let worktree_id = local_worktree.id();
878 let worktree_abs_path = local_worktree.abs_path().clone();
879 let buffer = buffer.downgrade();
880 let language_server =
881 self.start_language_server(worktree_id, worktree_abs_path, language, cx);
882
883 cx.spawn_weak(|_, mut cx| async move {
884 if let Some(language_server) = language_server.await {
885 if let Some(buffer) = buffer.upgrade(&cx) {
886 buffer.update(&mut cx, |buffer, cx| {
887 buffer.set_language_server(Some(language_server), cx);
888 });
889 }
890 }
891 })
892 .detach();
893 }
894 }
895
896 if let Some(local_worktree) = worktree.and_then(|w| w.read(cx).as_local()) {
897 if let Some(diagnostics) = local_worktree.diagnostics_for_path(&path) {
898 buffer.update(cx, |buffer, cx| {
899 buffer.update_diagnostics(diagnostics, None, cx).log_err();
900 });
901 }
902 }
903
904 None
905 }
906
907 fn start_language_server(
908 &mut self,
909 worktree_id: WorktreeId,
910 worktree_path: Arc<Path>,
911 language: Arc<Language>,
912 cx: &mut ModelContext<Self>,
913 ) -> Shared<Task<Option<Arc<LanguageServer>>>> {
914 enum LspEvent {
915 DiagnosticsStart,
916 DiagnosticsUpdate(lsp::PublishDiagnosticsParams),
917 DiagnosticsFinish,
918 }
919
920 let key = (worktree_id, language.name().to_string());
921 self.started_language_servers
922 .entry(key.clone())
923 .or_insert_with(|| {
924 let language_server = self.languages.start_language_server(
925 &language,
926 worktree_path,
927 self.client.http_client(),
928 cx,
929 );
930 let rpc = self.client.clone();
931 cx.spawn_weak(|this, mut cx| async move {
932 let language_server = language_server?.await.log_err()?;
933 if let Some(this) = this.upgrade(&cx) {
934 this.update(&mut cx, |this, _| {
935 this.language_servers.insert(key, language_server.clone());
936 });
937 }
938
939 let disk_based_sources = language
940 .disk_based_diagnostic_sources()
941 .cloned()
942 .unwrap_or_default();
943 let disk_based_diagnostics_progress_token =
944 language.disk_based_diagnostics_progress_token().cloned();
945 let has_disk_based_diagnostic_progress_token =
946 disk_based_diagnostics_progress_token.is_some();
947 let (diagnostics_tx, diagnostics_rx) = smol::channel::unbounded();
948
949 // Listen for `PublishDiagnostics` notifications.
950 language_server
951 .on_notification::<lsp::notification::PublishDiagnostics, _>({
952 let diagnostics_tx = diagnostics_tx.clone();
953 move |params| {
954 if !has_disk_based_diagnostic_progress_token {
955 block_on(diagnostics_tx.send(LspEvent::DiagnosticsStart)).ok();
956 }
957 block_on(diagnostics_tx.send(LspEvent::DiagnosticsUpdate(params)))
958 .ok();
959 if !has_disk_based_diagnostic_progress_token {
960 block_on(diagnostics_tx.send(LspEvent::DiagnosticsFinish)).ok();
961 }
962 }
963 })
964 .detach();
965
966 // Listen for `Progress` notifications. Send an event when the language server
967 // transitions between running jobs and not running any jobs.
968 let mut running_jobs_for_this_server: i32 = 0;
969 language_server
970 .on_notification::<lsp::notification::Progress, _>(move |params| {
971 let token = match params.token {
972 lsp::NumberOrString::Number(_) => None,
973 lsp::NumberOrString::String(token) => Some(token),
974 };
975
976 if token == disk_based_diagnostics_progress_token {
977 match params.value {
978 lsp::ProgressParamsValue::WorkDone(progress) => {
979 match progress {
980 lsp::WorkDoneProgress::Begin(_) => {
981 running_jobs_for_this_server += 1;
982 if running_jobs_for_this_server == 1 {
983 block_on(
984 diagnostics_tx
985 .send(LspEvent::DiagnosticsStart),
986 )
987 .ok();
988 }
989 }
990 lsp::WorkDoneProgress::End(_) => {
991 running_jobs_for_this_server -= 1;
992 if running_jobs_for_this_server == 0 {
993 block_on(
994 diagnostics_tx
995 .send(LspEvent::DiagnosticsFinish),
996 )
997 .ok();
998 }
999 }
1000 _ => {}
1001 }
1002 }
1003 }
1004 }
1005 })
1006 .detach();
1007
1008 // Process all the LSP events.
1009 cx.spawn(|mut cx| async move {
1010 while let Ok(message) = diagnostics_rx.recv().await {
1011 let this = this.upgrade(&cx)?;
1012 match message {
1013 LspEvent::DiagnosticsStart => {
1014 this.update(&mut cx, |this, cx| {
1015 this.disk_based_diagnostics_started(cx);
1016 if let Some(project_id) = this.remote_id() {
1017 rpc.send(proto::DiskBasedDiagnosticsUpdating {
1018 project_id,
1019 })
1020 .log_err();
1021 }
1022 });
1023 }
1024 LspEvent::DiagnosticsUpdate(mut params) => {
1025 language.process_diagnostics(&mut params);
1026 this.update(&mut cx, |this, cx| {
1027 this.update_diagnostics(params, &disk_based_sources, cx)
1028 .log_err();
1029 });
1030 }
1031 LspEvent::DiagnosticsFinish => {
1032 this.update(&mut cx, |this, cx| {
1033 this.disk_based_diagnostics_finished(cx);
1034 if let Some(project_id) = this.remote_id() {
1035 rpc.send(proto::DiskBasedDiagnosticsUpdated {
1036 project_id,
1037 })
1038 .log_err();
1039 }
1040 });
1041 }
1042 }
1043 }
1044 Some(())
1045 })
1046 .detach();
1047
1048 Some(language_server)
1049 })
1050 .shared()
1051 })
1052 .clone()
1053 }
1054
1055 pub fn update_diagnostics(
1056 &mut self,
1057 params: lsp::PublishDiagnosticsParams,
1058 disk_based_sources: &HashSet<String>,
1059 cx: &mut ModelContext<Self>,
1060 ) -> Result<()> {
1061 let abs_path = params
1062 .uri
1063 .to_file_path()
1064 .map_err(|_| anyhow!("URI is not a file"))?;
1065 let mut next_group_id = 0;
1066 let mut diagnostics = Vec::default();
1067 let mut primary_diagnostic_group_ids = HashMap::default();
1068 let mut sources_by_group_id = HashMap::default();
1069 let mut supporting_diagnostic_severities = HashMap::default();
1070 for diagnostic in ¶ms.diagnostics {
1071 let source = diagnostic.source.as_ref();
1072 let code = diagnostic.code.as_ref().map(|code| match code {
1073 lsp::NumberOrString::Number(code) => code.to_string(),
1074 lsp::NumberOrString::String(code) => code.clone(),
1075 });
1076 let range = range_from_lsp(diagnostic.range);
1077 let is_supporting = diagnostic
1078 .related_information
1079 .as_ref()
1080 .map_or(false, |infos| {
1081 infos.iter().any(|info| {
1082 primary_diagnostic_group_ids.contains_key(&(
1083 source,
1084 code.clone(),
1085 range_from_lsp(info.location.range),
1086 ))
1087 })
1088 });
1089
1090 if is_supporting {
1091 if let Some(severity) = diagnostic.severity {
1092 supporting_diagnostic_severities
1093 .insert((source, code.clone(), range), severity);
1094 }
1095 } else {
1096 let group_id = post_inc(&mut next_group_id);
1097 let is_disk_based =
1098 source.map_or(false, |source| disk_based_sources.contains(source));
1099
1100 sources_by_group_id.insert(group_id, source);
1101 primary_diagnostic_group_ids
1102 .insert((source, code.clone(), range.clone()), group_id);
1103
1104 diagnostics.push(DiagnosticEntry {
1105 range,
1106 diagnostic: Diagnostic {
1107 code: code.clone(),
1108 severity: diagnostic.severity.unwrap_or(DiagnosticSeverity::ERROR),
1109 message: diagnostic.message.clone(),
1110 group_id,
1111 is_primary: true,
1112 is_valid: true,
1113 is_disk_based,
1114 },
1115 });
1116 if let Some(infos) = &diagnostic.related_information {
1117 for info in infos {
1118 if info.location.uri == params.uri && !info.message.is_empty() {
1119 let range = range_from_lsp(info.location.range);
1120 diagnostics.push(DiagnosticEntry {
1121 range,
1122 diagnostic: Diagnostic {
1123 code: code.clone(),
1124 severity: DiagnosticSeverity::INFORMATION,
1125 message: info.message.clone(),
1126 group_id,
1127 is_primary: false,
1128 is_valid: true,
1129 is_disk_based,
1130 },
1131 });
1132 }
1133 }
1134 }
1135 }
1136 }
1137
1138 for entry in &mut diagnostics {
1139 let diagnostic = &mut entry.diagnostic;
1140 if !diagnostic.is_primary {
1141 let source = *sources_by_group_id.get(&diagnostic.group_id).unwrap();
1142 if let Some(&severity) = supporting_diagnostic_severities.get(&(
1143 source,
1144 diagnostic.code.clone(),
1145 entry.range.clone(),
1146 )) {
1147 diagnostic.severity = severity;
1148 }
1149 }
1150 }
1151
1152 self.update_diagnostic_entries(abs_path, params.version, diagnostics, cx)?;
1153 Ok(())
1154 }
1155
1156 pub fn update_diagnostic_entries(
1157 &mut self,
1158 abs_path: PathBuf,
1159 version: Option<i32>,
1160 diagnostics: Vec<DiagnosticEntry<PointUtf16>>,
1161 cx: &mut ModelContext<Project>,
1162 ) -> Result<(), anyhow::Error> {
1163 let (worktree, relative_path) = self
1164 .find_local_worktree(&abs_path, cx)
1165 .ok_or_else(|| anyhow!("no worktree found for diagnostics"))?;
1166 let project_path = ProjectPath {
1167 worktree_id: worktree.read(cx).id(),
1168 path: relative_path.into(),
1169 };
1170
1171 for buffer in self.buffers_state.borrow().open_buffers.values() {
1172 if let Some(buffer) = buffer.upgrade(cx) {
1173 if buffer
1174 .read(cx)
1175 .file()
1176 .map_or(false, |file| *file.path() == project_path.path)
1177 {
1178 buffer.update(cx, |buffer, cx| {
1179 buffer.update_diagnostics(diagnostics.clone(), version, cx)
1180 })?;
1181 break;
1182 }
1183 }
1184 }
1185 worktree.update(cx, |worktree, cx| {
1186 worktree
1187 .as_local_mut()
1188 .ok_or_else(|| anyhow!("not a local worktree"))?
1189 .update_diagnostics(project_path.path.clone(), diagnostics, cx)
1190 })?;
1191 cx.emit(Event::DiagnosticsUpdated(project_path));
1192 Ok(())
1193 }
1194
1195 pub fn format(
1196 &self,
1197 buffers: HashSet<ModelHandle<Buffer>>,
1198 push_to_history: bool,
1199 cx: &mut ModelContext<Project>,
1200 ) -> Task<Result<ProjectTransaction>> {
1201 let mut local_buffers = Vec::new();
1202 let mut remote_buffers = None;
1203 for buffer_handle in buffers {
1204 let buffer = buffer_handle.read(cx);
1205 let worktree;
1206 if let Some(file) = File::from_dyn(buffer.file()) {
1207 worktree = file.worktree.clone();
1208 if let Some(buffer_abs_path) = file.as_local().map(|f| f.abs_path(cx)) {
1209 let lang_server;
1210 if let Some(lang) = buffer.language() {
1211 if let Some(server) = self
1212 .language_servers
1213 .get(&(worktree.read(cx).id(), lang.name().to_string()))
1214 {
1215 lang_server = server.clone();
1216 } else {
1217 return Task::ready(Ok(Default::default()));
1218 };
1219 } else {
1220 return Task::ready(Ok(Default::default()));
1221 }
1222
1223 local_buffers.push((buffer_handle, buffer_abs_path, lang_server));
1224 } else {
1225 remote_buffers.get_or_insert(Vec::new()).push(buffer_handle);
1226 }
1227 } else {
1228 return Task::ready(Ok(Default::default()));
1229 }
1230 }
1231
1232 let remote_buffers = self.remote_id().zip(remote_buffers);
1233 let client = self.client.clone();
1234 let request_handle = self.start_buffer_request(cx);
1235
1236 cx.spawn(|this, mut cx| async move {
1237 let mut project_transaction = ProjectTransaction::default();
1238
1239 if let Some((project_id, remote_buffers)) = remote_buffers {
1240 let response = client
1241 .request(proto::FormatBuffers {
1242 project_id,
1243 buffer_ids: remote_buffers
1244 .iter()
1245 .map(|buffer| buffer.read_with(&cx, |buffer, _| buffer.remote_id()))
1246 .collect(),
1247 })
1248 .await?
1249 .transaction
1250 .ok_or_else(|| anyhow!("missing transaction"))?;
1251 project_transaction = this
1252 .update(&mut cx, |this, cx| {
1253 this.deserialize_project_transaction(
1254 response,
1255 push_to_history,
1256 request_handle,
1257 cx,
1258 )
1259 })
1260 .await?;
1261 }
1262
1263 for (buffer, buffer_abs_path, lang_server) in local_buffers {
1264 let lsp_edits = lang_server
1265 .request::<lsp::request::Formatting>(lsp::DocumentFormattingParams {
1266 text_document: lsp::TextDocumentIdentifier::new(
1267 lsp::Url::from_file_path(&buffer_abs_path).unwrap(),
1268 ),
1269 options: Default::default(),
1270 work_done_progress_params: Default::default(),
1271 })
1272 .await?;
1273
1274 if let Some(lsp_edits) = lsp_edits {
1275 let edits = buffer
1276 .update(&mut cx, |buffer, cx| {
1277 buffer.edits_from_lsp(lsp_edits, None, cx)
1278 })
1279 .await?;
1280 buffer.update(&mut cx, |buffer, cx| {
1281 buffer.finalize_last_transaction();
1282 buffer.start_transaction();
1283 for (range, text) in edits {
1284 buffer.edit([range], text, cx);
1285 }
1286 if buffer.end_transaction(cx).is_some() {
1287 let transaction = buffer.finalize_last_transaction().unwrap().clone();
1288 if !push_to_history {
1289 buffer.forget_transaction(transaction.id);
1290 }
1291 project_transaction.0.insert(cx.handle(), transaction);
1292 }
1293 });
1294 }
1295 }
1296
1297 Ok(project_transaction)
1298 })
1299 }
1300
1301 pub fn definition<T: ToPointUtf16>(
1302 &self,
1303 buffer: &ModelHandle<Buffer>,
1304 position: T,
1305 cx: &mut ModelContext<Self>,
1306 ) -> Task<Result<Vec<Location>>> {
1307 let position = position.to_point_utf16(buffer.read(cx));
1308 self.request_lsp(buffer.clone(), GetDefinition { position }, cx)
1309 }
1310
1311 pub fn references<T: ToPointUtf16>(
1312 &self,
1313 buffer: &ModelHandle<Buffer>,
1314 position: T,
1315 cx: &mut ModelContext<Self>,
1316 ) -> Task<Result<Vec<Location>>> {
1317 let position = position.to_point_utf16(buffer.read(cx));
1318 self.request_lsp(buffer.clone(), GetReferences { position }, cx)
1319 }
1320
1321 pub fn document_highlights<T: ToPointUtf16>(
1322 &self,
1323 buffer: &ModelHandle<Buffer>,
1324 position: T,
1325 cx: &mut ModelContext<Self>,
1326 ) -> Task<Result<Vec<DocumentHighlight>>> {
1327 let position = position.to_point_utf16(buffer.read(cx));
1328 self.request_lsp(buffer.clone(), GetDocumentHighlights { position }, cx)
1329 }
1330
1331 pub fn symbols(&self, query: &str, cx: &mut ModelContext<Self>) -> Task<Result<Vec<Symbol>>> {
1332 if self.is_local() {
1333 let mut language_servers = HashMap::default();
1334 for ((worktree_id, language_name), language_server) in self.language_servers.iter() {
1335 if let Some((worktree, language)) = self
1336 .worktree_for_id(*worktree_id, cx)
1337 .and_then(|worktree| worktree.read(cx).as_local())
1338 .zip(self.languages.get_language(language_name))
1339 {
1340 language_servers
1341 .entry(Arc::as_ptr(language_server))
1342 .or_insert((
1343 language_server.clone(),
1344 *worktree_id,
1345 worktree.abs_path().clone(),
1346 language.clone(),
1347 ));
1348 }
1349 }
1350
1351 let mut requests = Vec::new();
1352 for (language_server, _, _, _) in language_servers.values() {
1353 requests.push(language_server.request::<lsp::request::WorkspaceSymbol>(
1354 lsp::WorkspaceSymbolParams {
1355 query: query.to_string(),
1356 ..Default::default()
1357 },
1358 ));
1359 }
1360
1361 cx.spawn_weak(|this, cx| async move {
1362 let responses = futures::future::try_join_all(requests).await?;
1363
1364 let mut symbols = Vec::new();
1365 if let Some(this) = this.upgrade(&cx) {
1366 this.read_with(&cx, |this, cx| {
1367 for ((_, source_worktree_id, worktree_abs_path, language), lsp_symbols) in
1368 language_servers.into_values().zip(responses)
1369 {
1370 symbols.extend(lsp_symbols.into_iter().flatten().filter_map(
1371 |lsp_symbol| {
1372 let abs_path = lsp_symbol.location.uri.to_file_path().ok()?;
1373 let mut worktree_id = source_worktree_id;
1374 let path;
1375 if let Some((worktree, rel_path)) =
1376 this.find_local_worktree(&abs_path, cx)
1377 {
1378 worktree_id = worktree.read(cx).id();
1379 path = rel_path;
1380 } else {
1381 path = relativize_path(&worktree_abs_path, &abs_path);
1382 }
1383
1384 let label = language
1385 .label_for_symbol(&lsp_symbol.name, lsp_symbol.kind)
1386 .unwrap_or_else(|| {
1387 CodeLabel::plain(lsp_symbol.name.clone(), None)
1388 });
1389 let signature = this.symbol_signature(worktree_id, &path);
1390
1391 Some(Symbol {
1392 source_worktree_id,
1393 worktree_id,
1394 language_name: language.name().to_string(),
1395 name: lsp_symbol.name,
1396 kind: lsp_symbol.kind,
1397 label,
1398 path,
1399 range: range_from_lsp(lsp_symbol.location.range),
1400 signature,
1401 })
1402 },
1403 ));
1404 }
1405 })
1406 }
1407
1408 Ok(symbols)
1409 })
1410 } else if let Some(project_id) = self.remote_id() {
1411 let request = self.client.request(proto::GetProjectSymbols {
1412 project_id,
1413 query: query.to_string(),
1414 });
1415 cx.spawn_weak(|this, cx| async move {
1416 let response = request.await?;
1417 let mut symbols = Vec::new();
1418 if let Some(this) = this.upgrade(&cx) {
1419 this.read_with(&cx, |this, _| {
1420 symbols.extend(
1421 response
1422 .symbols
1423 .into_iter()
1424 .filter_map(|symbol| this.deserialize_symbol(symbol).log_err()),
1425 );
1426 })
1427 }
1428 Ok(symbols)
1429 })
1430 } else {
1431 Task::ready(Ok(Default::default()))
1432 }
1433 }
1434
1435 pub fn open_buffer_for_symbol(
1436 &mut self,
1437 symbol: &Symbol,
1438 cx: &mut ModelContext<Self>,
1439 ) -> Task<Result<ModelHandle<Buffer>>> {
1440 if self.is_local() {
1441 let language_server = if let Some(server) = self
1442 .language_servers
1443 .get(&(symbol.source_worktree_id, symbol.language_name.clone()))
1444 {
1445 server.clone()
1446 } else {
1447 return Task::ready(Err(anyhow!(
1448 "language server for worktree and language not found"
1449 )));
1450 };
1451
1452 let worktree_abs_path = if let Some(worktree_abs_path) = self
1453 .worktree_for_id(symbol.worktree_id, cx)
1454 .and_then(|worktree| worktree.read(cx).as_local())
1455 .map(|local_worktree| local_worktree.abs_path())
1456 {
1457 worktree_abs_path
1458 } else {
1459 return Task::ready(Err(anyhow!("worktree not found for symbol")));
1460 };
1461 let symbol_abs_path = worktree_abs_path.join(&symbol.path);
1462 let symbol_uri = if let Ok(uri) = lsp::Url::from_file_path(symbol_abs_path) {
1463 uri
1464 } else {
1465 return Task::ready(Err(anyhow!("invalid symbol path")));
1466 };
1467
1468 self.open_local_buffer_via_lsp(
1469 symbol_uri,
1470 symbol.language_name.clone(),
1471 language_server,
1472 cx,
1473 )
1474 } else if let Some(project_id) = self.remote_id() {
1475 let request_handle = self.start_buffer_request(cx);
1476 let request = self.client.request(proto::OpenBufferForSymbol {
1477 project_id,
1478 symbol: Some(serialize_symbol(symbol)),
1479 });
1480 cx.spawn(|this, mut cx| async move {
1481 let response = request.await?;
1482 let buffer = response.buffer.ok_or_else(|| anyhow!("invalid buffer"))?;
1483 this.update(&mut cx, |this, cx| {
1484 this.deserialize_buffer(buffer, request_handle, cx)
1485 })
1486 .await
1487 })
1488 } else {
1489 Task::ready(Err(anyhow!("project does not have a remote id")))
1490 }
1491 }
1492
1493 pub fn completions<T: ToPointUtf16>(
1494 &self,
1495 source_buffer_handle: &ModelHandle<Buffer>,
1496 position: T,
1497 cx: &mut ModelContext<Self>,
1498 ) -> Task<Result<Vec<Completion>>> {
1499 let source_buffer_handle = source_buffer_handle.clone();
1500 let source_buffer = source_buffer_handle.read(cx);
1501 let buffer_id = source_buffer.remote_id();
1502 let language = source_buffer.language().cloned();
1503 let worktree;
1504 let buffer_abs_path;
1505 if let Some(file) = File::from_dyn(source_buffer.file()) {
1506 worktree = file.worktree.clone();
1507 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
1508 } else {
1509 return Task::ready(Ok(Default::default()));
1510 };
1511
1512 let position = position.to_point_utf16(source_buffer);
1513 let anchor = source_buffer.anchor_after(position);
1514
1515 if worktree.read(cx).as_local().is_some() {
1516 let buffer_abs_path = buffer_abs_path.unwrap();
1517 let lang_server = if let Some(server) = source_buffer.language_server().cloned() {
1518 server
1519 } else {
1520 return Task::ready(Ok(Default::default()));
1521 };
1522
1523 cx.spawn(|_, cx| async move {
1524 let completions = lang_server
1525 .request::<lsp::request::Completion>(lsp::CompletionParams {
1526 text_document_position: lsp::TextDocumentPositionParams::new(
1527 lsp::TextDocumentIdentifier::new(
1528 lsp::Url::from_file_path(buffer_abs_path).unwrap(),
1529 ),
1530 position.to_lsp_position(),
1531 ),
1532 context: Default::default(),
1533 work_done_progress_params: Default::default(),
1534 partial_result_params: Default::default(),
1535 })
1536 .await
1537 .context("lsp completion request failed")?;
1538
1539 let completions = if let Some(completions) = completions {
1540 match completions {
1541 lsp::CompletionResponse::Array(completions) => completions,
1542 lsp::CompletionResponse::List(list) => list.items,
1543 }
1544 } else {
1545 Default::default()
1546 };
1547
1548 source_buffer_handle.read_with(&cx, |this, _| {
1549 Ok(completions
1550 .into_iter()
1551 .filter_map(|lsp_completion| {
1552 let (old_range, new_text) = match lsp_completion.text_edit.as_ref()? {
1553 lsp::CompletionTextEdit::Edit(edit) => {
1554 (range_from_lsp(edit.range), edit.new_text.clone())
1555 }
1556 lsp::CompletionTextEdit::InsertAndReplace(_) => {
1557 log::info!("unsupported insert/replace completion");
1558 return None;
1559 }
1560 };
1561
1562 let clipped_start = this.clip_point_utf16(old_range.start, Bias::Left);
1563 let clipped_end = this.clip_point_utf16(old_range.end, Bias::Left);
1564 if clipped_start == old_range.start && clipped_end == old_range.end {
1565 Some(Completion {
1566 old_range: this.anchor_before(old_range.start)
1567 ..this.anchor_after(old_range.end),
1568 new_text,
1569 label: language
1570 .as_ref()
1571 .and_then(|l| l.label_for_completion(&lsp_completion))
1572 .unwrap_or_else(|| {
1573 CodeLabel::plain(
1574 lsp_completion.label.clone(),
1575 lsp_completion.filter_text.as_deref(),
1576 )
1577 }),
1578 lsp_completion,
1579 })
1580 } else {
1581 None
1582 }
1583 })
1584 .collect())
1585 })
1586 })
1587 } else if let Some(project_id) = self.remote_id() {
1588 let rpc = self.client.clone();
1589 let message = proto::GetCompletions {
1590 project_id,
1591 buffer_id,
1592 position: Some(language::proto::serialize_anchor(&anchor)),
1593 version: (&source_buffer.version()).into(),
1594 };
1595 cx.spawn_weak(|_, mut cx| async move {
1596 let response = rpc.request(message).await?;
1597
1598 source_buffer_handle
1599 .update(&mut cx, |buffer, _| {
1600 buffer.wait_for_version(response.version.into())
1601 })
1602 .await;
1603
1604 response
1605 .completions
1606 .into_iter()
1607 .map(|completion| {
1608 language::proto::deserialize_completion(completion, language.as_ref())
1609 })
1610 .collect()
1611 })
1612 } else {
1613 Task::ready(Ok(Default::default()))
1614 }
1615 }
1616
1617 pub fn apply_additional_edits_for_completion(
1618 &self,
1619 buffer_handle: ModelHandle<Buffer>,
1620 completion: Completion,
1621 push_to_history: bool,
1622 cx: &mut ModelContext<Self>,
1623 ) -> Task<Result<Option<Transaction>>> {
1624 let buffer = buffer_handle.read(cx);
1625 let buffer_id = buffer.remote_id();
1626
1627 if self.is_local() {
1628 let lang_server = if let Some(language_server) = buffer.language_server() {
1629 language_server.clone()
1630 } else {
1631 return Task::ready(Err(anyhow!("buffer does not have a language server")));
1632 };
1633
1634 cx.spawn(|_, mut cx| async move {
1635 let resolved_completion = lang_server
1636 .request::<lsp::request::ResolveCompletionItem>(completion.lsp_completion)
1637 .await?;
1638 if let Some(edits) = resolved_completion.additional_text_edits {
1639 let edits = buffer_handle
1640 .update(&mut cx, |buffer, cx| buffer.edits_from_lsp(edits, None, cx))
1641 .await?;
1642 buffer_handle.update(&mut cx, |buffer, cx| {
1643 buffer.finalize_last_transaction();
1644 buffer.start_transaction();
1645 for (range, text) in edits {
1646 buffer.edit([range], text, cx);
1647 }
1648 let transaction = if buffer.end_transaction(cx).is_some() {
1649 let transaction = buffer.finalize_last_transaction().unwrap().clone();
1650 if !push_to_history {
1651 buffer.forget_transaction(transaction.id);
1652 }
1653 Some(transaction)
1654 } else {
1655 None
1656 };
1657 Ok(transaction)
1658 })
1659 } else {
1660 Ok(None)
1661 }
1662 })
1663 } else if let Some(project_id) = self.remote_id() {
1664 let client = self.client.clone();
1665 cx.spawn(|_, mut cx| async move {
1666 let response = client
1667 .request(proto::ApplyCompletionAdditionalEdits {
1668 project_id,
1669 buffer_id,
1670 completion: Some(language::proto::serialize_completion(&completion)),
1671 })
1672 .await?;
1673
1674 if let Some(transaction) = response.transaction {
1675 let transaction = language::proto::deserialize_transaction(transaction)?;
1676 buffer_handle
1677 .update(&mut cx, |buffer, _| {
1678 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
1679 })
1680 .await;
1681 if push_to_history {
1682 buffer_handle.update(&mut cx, |buffer, _| {
1683 buffer.push_transaction(transaction.clone(), Instant::now());
1684 });
1685 }
1686 Ok(Some(transaction))
1687 } else {
1688 Ok(None)
1689 }
1690 })
1691 } else {
1692 Task::ready(Err(anyhow!("project does not have a remote id")))
1693 }
1694 }
1695
1696 pub fn code_actions<T: ToOffset>(
1697 &self,
1698 buffer_handle: &ModelHandle<Buffer>,
1699 range: Range<T>,
1700 cx: &mut ModelContext<Self>,
1701 ) -> Task<Result<Vec<CodeAction>>> {
1702 let buffer_handle = buffer_handle.clone();
1703 let buffer = buffer_handle.read(cx);
1704 let buffer_id = buffer.remote_id();
1705 let worktree;
1706 let buffer_abs_path;
1707 if let Some(file) = File::from_dyn(buffer.file()) {
1708 worktree = file.worktree.clone();
1709 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
1710 } else {
1711 return Task::ready(Ok(Default::default()));
1712 };
1713 let range = buffer.anchor_before(range.start)..buffer.anchor_before(range.end);
1714
1715 if worktree.read(cx).as_local().is_some() {
1716 let buffer_abs_path = buffer_abs_path.unwrap();
1717 let lang_name;
1718 let lang_server;
1719 if let Some(lang) = buffer.language() {
1720 lang_name = lang.name().to_string();
1721 if let Some(server) = self
1722 .language_servers
1723 .get(&(worktree.read(cx).id(), lang_name.clone()))
1724 {
1725 lang_server = server.clone();
1726 } else {
1727 return Task::ready(Ok(Default::default()));
1728 };
1729 } else {
1730 return Task::ready(Ok(Default::default()));
1731 }
1732
1733 let lsp_range = lsp::Range::new(
1734 range.start.to_point_utf16(buffer).to_lsp_position(),
1735 range.end.to_point_utf16(buffer).to_lsp_position(),
1736 );
1737 cx.foreground().spawn(async move {
1738 Ok(lang_server
1739 .request::<lsp::request::CodeActionRequest>(lsp::CodeActionParams {
1740 text_document: lsp::TextDocumentIdentifier::new(
1741 lsp::Url::from_file_path(buffer_abs_path).unwrap(),
1742 ),
1743 range: lsp_range,
1744 work_done_progress_params: Default::default(),
1745 partial_result_params: Default::default(),
1746 context: lsp::CodeActionContext {
1747 diagnostics: Default::default(),
1748 only: Some(vec![
1749 lsp::CodeActionKind::QUICKFIX,
1750 lsp::CodeActionKind::REFACTOR,
1751 lsp::CodeActionKind::REFACTOR_EXTRACT,
1752 ]),
1753 },
1754 })
1755 .await?
1756 .unwrap_or_default()
1757 .into_iter()
1758 .filter_map(|entry| {
1759 if let lsp::CodeActionOrCommand::CodeAction(lsp_action) = entry {
1760 Some(CodeAction {
1761 range: range.clone(),
1762 lsp_action,
1763 })
1764 } else {
1765 None
1766 }
1767 })
1768 .collect())
1769 })
1770 } else if let Some(project_id) = self.remote_id() {
1771 let rpc = self.client.clone();
1772 cx.spawn_weak(|_, mut cx| async move {
1773 let response = rpc
1774 .request(proto::GetCodeActions {
1775 project_id,
1776 buffer_id,
1777 start: Some(language::proto::serialize_anchor(&range.start)),
1778 end: Some(language::proto::serialize_anchor(&range.end)),
1779 })
1780 .await?;
1781
1782 buffer_handle
1783 .update(&mut cx, |buffer, _| {
1784 buffer.wait_for_version(response.version.into())
1785 })
1786 .await;
1787
1788 response
1789 .actions
1790 .into_iter()
1791 .map(language::proto::deserialize_code_action)
1792 .collect()
1793 })
1794 } else {
1795 Task::ready(Ok(Default::default()))
1796 }
1797 }
1798
1799 pub fn apply_code_action(
1800 &self,
1801 buffer_handle: ModelHandle<Buffer>,
1802 mut action: CodeAction,
1803 push_to_history: bool,
1804 cx: &mut ModelContext<Self>,
1805 ) -> Task<Result<ProjectTransaction>> {
1806 if self.is_local() {
1807 let buffer = buffer_handle.read(cx);
1808 let lang_name = if let Some(lang) = buffer.language() {
1809 lang.name().to_string()
1810 } else {
1811 return Task::ready(Ok(Default::default()));
1812 };
1813 let lang_server = if let Some(language_server) = buffer.language_server() {
1814 language_server.clone()
1815 } else {
1816 return Task::ready(Err(anyhow!("buffer does not have a language server")));
1817 };
1818 let range = action.range.to_point_utf16(buffer);
1819
1820 cx.spawn(|this, mut cx| async move {
1821 if let Some(lsp_range) = action
1822 .lsp_action
1823 .data
1824 .as_mut()
1825 .and_then(|d| d.get_mut("codeActionParams"))
1826 .and_then(|d| d.get_mut("range"))
1827 {
1828 *lsp_range = serde_json::to_value(&lsp::Range::new(
1829 range.start.to_lsp_position(),
1830 range.end.to_lsp_position(),
1831 ))
1832 .unwrap();
1833 action.lsp_action = lang_server
1834 .request::<lsp::request::CodeActionResolveRequest>(action.lsp_action)
1835 .await?;
1836 } else {
1837 let actions = this
1838 .update(&mut cx, |this, cx| {
1839 this.code_actions(&buffer_handle, action.range, cx)
1840 })
1841 .await?;
1842 action.lsp_action = actions
1843 .into_iter()
1844 .find(|a| a.lsp_action.title == action.lsp_action.title)
1845 .ok_or_else(|| anyhow!("code action is outdated"))?
1846 .lsp_action;
1847 }
1848
1849 if let Some(edit) = action.lsp_action.edit {
1850 Self::deserialize_workspace_edit(
1851 this,
1852 edit,
1853 push_to_history,
1854 lang_name,
1855 lang_server,
1856 &mut cx,
1857 )
1858 .await
1859 } else {
1860 Ok(ProjectTransaction::default())
1861 }
1862 })
1863 } else if let Some(project_id) = self.remote_id() {
1864 let client = self.client.clone();
1865 let request_handle = self.start_buffer_request(cx);
1866 let request = proto::ApplyCodeAction {
1867 project_id,
1868 buffer_id: buffer_handle.read(cx).remote_id(),
1869 action: Some(language::proto::serialize_code_action(&action)),
1870 };
1871 cx.spawn(|this, mut cx| async move {
1872 let response = client
1873 .request(request)
1874 .await?
1875 .transaction
1876 .ok_or_else(|| anyhow!("missing transaction"))?;
1877 this.update(&mut cx, |this, cx| {
1878 this.deserialize_project_transaction(
1879 response,
1880 push_to_history,
1881 request_handle,
1882 cx,
1883 )
1884 })
1885 .await
1886 })
1887 } else {
1888 Task::ready(Err(anyhow!("project does not have a remote id")))
1889 }
1890 }
1891
1892 async fn deserialize_workspace_edit(
1893 this: ModelHandle<Self>,
1894 edit: lsp::WorkspaceEdit,
1895 push_to_history: bool,
1896 language_name: String,
1897 language_server: Arc<LanguageServer>,
1898 cx: &mut AsyncAppContext,
1899 ) -> Result<ProjectTransaction> {
1900 let fs = this.read_with(cx, |this, _| this.fs.clone());
1901 let mut operations = Vec::new();
1902 if let Some(document_changes) = edit.document_changes {
1903 match document_changes {
1904 lsp::DocumentChanges::Edits(edits) => {
1905 operations.extend(edits.into_iter().map(lsp::DocumentChangeOperation::Edit))
1906 }
1907 lsp::DocumentChanges::Operations(ops) => operations = ops,
1908 }
1909 } else if let Some(changes) = edit.changes {
1910 operations.extend(changes.into_iter().map(|(uri, edits)| {
1911 lsp::DocumentChangeOperation::Edit(lsp::TextDocumentEdit {
1912 text_document: lsp::OptionalVersionedTextDocumentIdentifier {
1913 uri,
1914 version: None,
1915 },
1916 edits: edits.into_iter().map(lsp::OneOf::Left).collect(),
1917 })
1918 }));
1919 }
1920
1921 let mut project_transaction = ProjectTransaction::default();
1922 for operation in operations {
1923 match operation {
1924 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Create(op)) => {
1925 let abs_path = op
1926 .uri
1927 .to_file_path()
1928 .map_err(|_| anyhow!("can't convert URI to path"))?;
1929
1930 if let Some(parent_path) = abs_path.parent() {
1931 fs.create_dir(parent_path).await?;
1932 }
1933 if abs_path.ends_with("/") {
1934 fs.create_dir(&abs_path).await?;
1935 } else {
1936 fs.create_file(&abs_path, op.options.map(Into::into).unwrap_or_default())
1937 .await?;
1938 }
1939 }
1940 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Rename(op)) => {
1941 let source_abs_path = op
1942 .old_uri
1943 .to_file_path()
1944 .map_err(|_| anyhow!("can't convert URI to path"))?;
1945 let target_abs_path = op
1946 .new_uri
1947 .to_file_path()
1948 .map_err(|_| anyhow!("can't convert URI to path"))?;
1949 fs.rename(
1950 &source_abs_path,
1951 &target_abs_path,
1952 op.options.map(Into::into).unwrap_or_default(),
1953 )
1954 .await?;
1955 }
1956 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Delete(op)) => {
1957 let abs_path = op
1958 .uri
1959 .to_file_path()
1960 .map_err(|_| anyhow!("can't convert URI to path"))?;
1961 let options = op.options.map(Into::into).unwrap_or_default();
1962 if abs_path.ends_with("/") {
1963 fs.remove_dir(&abs_path, options).await?;
1964 } else {
1965 fs.remove_file(&abs_path, options).await?;
1966 }
1967 }
1968 lsp::DocumentChangeOperation::Edit(op) => {
1969 let buffer_to_edit = this
1970 .update(cx, |this, cx| {
1971 this.open_local_buffer_via_lsp(
1972 op.text_document.uri,
1973 language_name.clone(),
1974 language_server.clone(),
1975 cx,
1976 )
1977 })
1978 .await?;
1979
1980 let edits = buffer_to_edit
1981 .update(cx, |buffer, cx| {
1982 let edits = op.edits.into_iter().map(|edit| match edit {
1983 lsp::OneOf::Left(edit) => edit,
1984 lsp::OneOf::Right(edit) => edit.text_edit,
1985 });
1986 buffer.edits_from_lsp(edits, op.text_document.version, cx)
1987 })
1988 .await?;
1989
1990 let transaction = buffer_to_edit.update(cx, |buffer, cx| {
1991 buffer.finalize_last_transaction();
1992 buffer.start_transaction();
1993 for (range, text) in edits {
1994 buffer.edit([range], text, cx);
1995 }
1996 let transaction = if buffer.end_transaction(cx).is_some() {
1997 let transaction = buffer.finalize_last_transaction().unwrap().clone();
1998 if !push_to_history {
1999 buffer.forget_transaction(transaction.id);
2000 }
2001 Some(transaction)
2002 } else {
2003 None
2004 };
2005
2006 transaction
2007 });
2008 if let Some(transaction) = transaction {
2009 project_transaction.0.insert(buffer_to_edit, transaction);
2010 }
2011 }
2012 }
2013 }
2014
2015 Ok(project_transaction)
2016 }
2017
2018 pub fn prepare_rename<T: ToPointUtf16>(
2019 &self,
2020 buffer: ModelHandle<Buffer>,
2021 position: T,
2022 cx: &mut ModelContext<Self>,
2023 ) -> Task<Result<Option<Range<Anchor>>>> {
2024 let position = position.to_point_utf16(buffer.read(cx));
2025 self.request_lsp(buffer, PrepareRename { position }, cx)
2026 }
2027
2028 pub fn perform_rename<T: ToPointUtf16>(
2029 &self,
2030 buffer: ModelHandle<Buffer>,
2031 position: T,
2032 new_name: String,
2033 push_to_history: bool,
2034 cx: &mut ModelContext<Self>,
2035 ) -> Task<Result<ProjectTransaction>> {
2036 let position = position.to_point_utf16(buffer.read(cx));
2037 self.request_lsp(
2038 buffer,
2039 PerformRename {
2040 position,
2041 new_name,
2042 push_to_history,
2043 },
2044 cx,
2045 )
2046 }
2047
2048 pub fn search(
2049 &self,
2050 query: SearchQuery,
2051 cx: &mut ModelContext<Self>,
2052 ) -> Task<HashMap<ModelHandle<Buffer>, Vec<Range<Anchor>>>> {
2053 if self.is_local() {
2054 let snapshots = self
2055 .strong_worktrees(cx)
2056 .filter_map(|tree| {
2057 let tree = tree.read(cx).as_local()?;
2058 Some(tree.snapshot())
2059 })
2060 .collect::<Vec<_>>();
2061
2062 let background = cx.background().clone();
2063 let path_count: usize = snapshots.iter().map(|s| s.visible_file_count()).sum();
2064 let workers = background.num_cpus().min(path_count);
2065 let (matching_paths_tx, mut matching_paths_rx) = smol::channel::bounded(1024);
2066 cx.background()
2067 .spawn({
2068 let fs = self.fs.clone();
2069 let background = cx.background().clone();
2070 let query = query.clone();
2071 async move {
2072 let fs = &fs;
2073 let query = &query;
2074 let matching_paths_tx = &matching_paths_tx;
2075 let paths_per_worker = (path_count + workers - 1) / workers;
2076 let snapshots = &snapshots;
2077 background
2078 .scoped(|scope| {
2079 for worker_ix in 0..workers {
2080 let worker_start_ix = worker_ix * paths_per_worker;
2081 let worker_end_ix = worker_start_ix + paths_per_worker;
2082 scope.spawn(async move {
2083 let mut snapshot_start_ix = 0;
2084 let mut abs_path = PathBuf::new();
2085 for snapshot in snapshots {
2086 let snapshot_end_ix =
2087 snapshot_start_ix + snapshot.visible_file_count();
2088 if worker_end_ix <= snapshot_start_ix {
2089 break;
2090 } else if worker_start_ix > snapshot_end_ix {
2091 snapshot_start_ix = snapshot_end_ix;
2092 continue;
2093 } else {
2094 let start_in_snapshot = worker_start_ix
2095 .saturating_sub(snapshot_start_ix);
2096 let end_in_snapshot =
2097 cmp::min(worker_end_ix, snapshot_end_ix)
2098 - snapshot_start_ix;
2099
2100 for entry in snapshot
2101 .files(false, start_in_snapshot)
2102 .take(end_in_snapshot - start_in_snapshot)
2103 {
2104 if matching_paths_tx.is_closed() {
2105 break;
2106 }
2107
2108 abs_path.clear();
2109 abs_path.push(&snapshot.abs_path());
2110 abs_path.push(&entry.path);
2111 let matches = if let Some(file) =
2112 fs.open_sync(&abs_path).await.log_err()
2113 {
2114 query.detect(file).unwrap_or(false)
2115 } else {
2116 false
2117 };
2118
2119 if matches {
2120 let project_path =
2121 (snapshot.id(), entry.path.clone());
2122 if matching_paths_tx
2123 .send(project_path)
2124 .await
2125 .is_err()
2126 {
2127 break;
2128 }
2129 }
2130 }
2131
2132 snapshot_start_ix = snapshot_end_ix;
2133 }
2134 }
2135 });
2136 }
2137 })
2138 .await;
2139 }
2140 })
2141 .detach();
2142
2143 let (buffers_tx, buffers_rx) = smol::channel::bounded(1024);
2144 let open_buffers = self
2145 .buffers_state
2146 .borrow()
2147 .open_buffers
2148 .values()
2149 .filter_map(|b| b.upgrade(cx))
2150 .collect::<HashSet<_>>();
2151 cx.spawn(|this, cx| async move {
2152 for buffer in &open_buffers {
2153 let snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
2154 buffers_tx.send((buffer.clone(), snapshot)).await?;
2155 }
2156
2157 let open_buffers = Rc::new(RefCell::new(open_buffers));
2158 while let Some(project_path) = matching_paths_rx.next().await {
2159 if buffers_tx.is_closed() {
2160 break;
2161 }
2162
2163 let this = this.clone();
2164 let open_buffers = open_buffers.clone();
2165 let buffers_tx = buffers_tx.clone();
2166 cx.spawn(|mut cx| async move {
2167 if let Some(buffer) = this
2168 .update(&mut cx, |this, cx| this.open_buffer(project_path, cx))
2169 .await
2170 .log_err()
2171 {
2172 if open_buffers.borrow_mut().insert(buffer.clone()) {
2173 let snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
2174 buffers_tx.send((buffer, snapshot)).await?;
2175 }
2176 }
2177
2178 Ok::<_, anyhow::Error>(())
2179 })
2180 .detach();
2181 }
2182
2183 Ok::<_, anyhow::Error>(())
2184 })
2185 .detach_and_log_err(cx);
2186
2187 let background = cx.background().clone();
2188 cx.background().spawn(async move {
2189 let query = &query;
2190 let mut matched_buffers = Vec::new();
2191 for _ in 0..workers {
2192 matched_buffers.push(HashMap::default());
2193 }
2194 background
2195 .scoped(|scope| {
2196 for worker_matched_buffers in matched_buffers.iter_mut() {
2197 let mut buffers_rx = buffers_rx.clone();
2198 scope.spawn(async move {
2199 while let Some((buffer, snapshot)) = buffers_rx.next().await {
2200 let buffer_matches = query
2201 .search(snapshot.as_rope())
2202 .await
2203 .iter()
2204 .map(|range| {
2205 snapshot.anchor_before(range.start)
2206 ..snapshot.anchor_after(range.end)
2207 })
2208 .collect::<Vec<_>>();
2209 if !buffer_matches.is_empty() {
2210 worker_matched_buffers
2211 .insert(buffer.clone(), buffer_matches);
2212 }
2213 }
2214 });
2215 }
2216 })
2217 .await;
2218 matched_buffers.into_iter().flatten().collect()
2219 })
2220 } else {
2221 todo!()
2222 }
2223 }
2224
2225 fn request_lsp<R: LspCommand>(
2226 &self,
2227 buffer_handle: ModelHandle<Buffer>,
2228 request: R,
2229 cx: &mut ModelContext<Self>,
2230 ) -> Task<Result<R::Response>>
2231 where
2232 <R::LspRequest as lsp::request::Request>::Result: Send,
2233 {
2234 let buffer = buffer_handle.read(cx);
2235 if self.is_local() {
2236 let file = File::from_dyn(buffer.file()).and_then(File::as_local);
2237 if let Some((file, language_server)) = file.zip(buffer.language_server().cloned()) {
2238 let lsp_params = request.to_lsp(&file.abs_path(cx), cx);
2239 return cx.spawn(|this, cx| async move {
2240 let response = language_server
2241 .request::<R::LspRequest>(lsp_params)
2242 .await
2243 .context("lsp request failed")?;
2244 request
2245 .response_from_lsp(response, this, buffer_handle, cx)
2246 .await
2247 });
2248 }
2249 } else if let Some(project_id) = self.remote_id() {
2250 let rpc = self.client.clone();
2251 let request_handle = self.start_buffer_request(cx);
2252 let message = request.to_proto(project_id, buffer);
2253 return cx.spawn(|this, cx| async move {
2254 let response = rpc.request(message).await?;
2255 request
2256 .response_from_proto(response, this, buffer_handle, request_handle, cx)
2257 .await
2258 });
2259 }
2260 Task::ready(Ok(Default::default()))
2261 }
2262
2263 pub fn find_or_create_local_worktree(
2264 &self,
2265 abs_path: impl AsRef<Path>,
2266 weak: bool,
2267 cx: &mut ModelContext<Self>,
2268 ) -> Task<Result<(ModelHandle<Worktree>, PathBuf)>> {
2269 let abs_path = abs_path.as_ref();
2270 if let Some((tree, relative_path)) = self.find_local_worktree(abs_path, cx) {
2271 Task::ready(Ok((tree.clone(), relative_path.into())))
2272 } else {
2273 let worktree = self.create_local_worktree(abs_path, weak, cx);
2274 cx.foreground()
2275 .spawn(async move { Ok((worktree.await?, PathBuf::new())) })
2276 }
2277 }
2278
2279 pub fn find_local_worktree(
2280 &self,
2281 abs_path: &Path,
2282 cx: &AppContext,
2283 ) -> Option<(ModelHandle<Worktree>, PathBuf)> {
2284 for tree in self.worktrees(cx) {
2285 if let Some(relative_path) = tree
2286 .read(cx)
2287 .as_local()
2288 .and_then(|t| abs_path.strip_prefix(t.abs_path()).ok())
2289 {
2290 return Some((tree.clone(), relative_path.into()));
2291 }
2292 }
2293 None
2294 }
2295
2296 pub fn is_shared(&self) -> bool {
2297 match &self.client_state {
2298 ProjectClientState::Local { is_shared, .. } => *is_shared,
2299 ProjectClientState::Remote { .. } => false,
2300 }
2301 }
2302
2303 fn create_local_worktree(
2304 &self,
2305 abs_path: impl AsRef<Path>,
2306 weak: bool,
2307 cx: &mut ModelContext<Self>,
2308 ) -> Task<Result<ModelHandle<Worktree>>> {
2309 let fs = self.fs.clone();
2310 let client = self.client.clone();
2311 let path = Arc::from(abs_path.as_ref());
2312 cx.spawn(|project, mut cx| async move {
2313 let worktree = Worktree::local(client.clone(), path, weak, fs, &mut cx).await?;
2314
2315 let (remote_project_id, is_shared) = project.update(&mut cx, |project, cx| {
2316 project.add_worktree(&worktree, cx);
2317 (project.remote_id(), project.is_shared())
2318 });
2319
2320 if let Some(project_id) = remote_project_id {
2321 worktree
2322 .update(&mut cx, |worktree, cx| {
2323 worktree.as_local_mut().unwrap().register(project_id, cx)
2324 })
2325 .await?;
2326 if is_shared {
2327 worktree
2328 .update(&mut cx, |worktree, cx| {
2329 worktree.as_local_mut().unwrap().share(project_id, cx)
2330 })
2331 .await?;
2332 }
2333 }
2334
2335 Ok(worktree)
2336 })
2337 }
2338
2339 pub fn remove_worktree(&mut self, id: WorktreeId, cx: &mut ModelContext<Self>) {
2340 self.worktrees.retain(|worktree| {
2341 worktree
2342 .upgrade(cx)
2343 .map_or(false, |w| w.read(cx).id() != id)
2344 });
2345 cx.notify();
2346 }
2347
2348 fn add_worktree(&mut self, worktree: &ModelHandle<Worktree>, cx: &mut ModelContext<Self>) {
2349 cx.observe(&worktree, |_, _, cx| cx.notify()).detach();
2350 if worktree.read(cx).is_local() {
2351 cx.subscribe(&worktree, |this, worktree, _, cx| {
2352 this.update_local_worktree_buffers(worktree, cx);
2353 })
2354 .detach();
2355 }
2356
2357 let push_weak_handle = {
2358 let worktree = worktree.read(cx);
2359 worktree.is_local() && worktree.is_weak()
2360 };
2361 if push_weak_handle {
2362 cx.observe_release(&worktree, |this, cx| {
2363 this.worktrees
2364 .retain(|worktree| worktree.upgrade(cx).is_some());
2365 cx.notify();
2366 })
2367 .detach();
2368 self.worktrees
2369 .push(WorktreeHandle::Weak(worktree.downgrade()));
2370 } else {
2371 self.worktrees
2372 .push(WorktreeHandle::Strong(worktree.clone()));
2373 }
2374 cx.notify();
2375 }
2376
2377 fn update_local_worktree_buffers(
2378 &mut self,
2379 worktree_handle: ModelHandle<Worktree>,
2380 cx: &mut ModelContext<Self>,
2381 ) {
2382 let snapshot = worktree_handle.read(cx).snapshot();
2383 let mut buffers_to_delete = Vec::new();
2384 for (buffer_id, buffer) in &self.buffers_state.borrow().open_buffers {
2385 if let Some(buffer) = buffer.upgrade(cx) {
2386 buffer.update(cx, |buffer, cx| {
2387 if let Some(old_file) = File::from_dyn(buffer.file()) {
2388 if old_file.worktree != worktree_handle {
2389 return;
2390 }
2391
2392 let new_file = if let Some(entry) = old_file
2393 .entry_id
2394 .and_then(|entry_id| snapshot.entry_for_id(entry_id))
2395 {
2396 File {
2397 is_local: true,
2398 entry_id: Some(entry.id),
2399 mtime: entry.mtime,
2400 path: entry.path.clone(),
2401 worktree: worktree_handle.clone(),
2402 }
2403 } else if let Some(entry) =
2404 snapshot.entry_for_path(old_file.path().as_ref())
2405 {
2406 File {
2407 is_local: true,
2408 entry_id: Some(entry.id),
2409 mtime: entry.mtime,
2410 path: entry.path.clone(),
2411 worktree: worktree_handle.clone(),
2412 }
2413 } else {
2414 File {
2415 is_local: true,
2416 entry_id: None,
2417 path: old_file.path().clone(),
2418 mtime: old_file.mtime(),
2419 worktree: worktree_handle.clone(),
2420 }
2421 };
2422
2423 if let Some(project_id) = self.remote_id() {
2424 self.client
2425 .send(proto::UpdateBufferFile {
2426 project_id,
2427 buffer_id: *buffer_id as u64,
2428 file: Some(new_file.to_proto()),
2429 })
2430 .log_err();
2431 }
2432 buffer.file_updated(Box::new(new_file), cx).detach();
2433 }
2434 });
2435 } else {
2436 buffers_to_delete.push(*buffer_id);
2437 }
2438 }
2439
2440 for buffer_id in buffers_to_delete {
2441 self.buffers_state
2442 .borrow_mut()
2443 .open_buffers
2444 .remove(&buffer_id);
2445 }
2446 }
2447
2448 pub fn set_active_path(&mut self, entry: Option<ProjectPath>, cx: &mut ModelContext<Self>) {
2449 let new_active_entry = entry.and_then(|project_path| {
2450 let worktree = self.worktree_for_id(project_path.worktree_id, cx)?;
2451 let entry = worktree.read(cx).entry_for_path(project_path.path)?;
2452 Some(ProjectEntry {
2453 worktree_id: project_path.worktree_id,
2454 entry_id: entry.id,
2455 })
2456 });
2457 if new_active_entry != self.active_entry {
2458 self.active_entry = new_active_entry;
2459 cx.emit(Event::ActiveEntryChanged(new_active_entry));
2460 }
2461 }
2462
2463 pub fn is_running_disk_based_diagnostics(&self) -> bool {
2464 self.language_servers_with_diagnostics_running > 0
2465 }
2466
2467 pub fn diagnostic_summary(&self, cx: &AppContext) -> DiagnosticSummary {
2468 let mut summary = DiagnosticSummary::default();
2469 for (_, path_summary) in self.diagnostic_summaries(cx) {
2470 summary.error_count += path_summary.error_count;
2471 summary.warning_count += path_summary.warning_count;
2472 summary.info_count += path_summary.info_count;
2473 summary.hint_count += path_summary.hint_count;
2474 }
2475 summary
2476 }
2477
2478 pub fn diagnostic_summaries<'a>(
2479 &'a self,
2480 cx: &'a AppContext,
2481 ) -> impl Iterator<Item = (ProjectPath, DiagnosticSummary)> + 'a {
2482 self.worktrees(cx).flat_map(move |worktree| {
2483 let worktree = worktree.read(cx);
2484 let worktree_id = worktree.id();
2485 worktree
2486 .diagnostic_summaries()
2487 .map(move |(path, summary)| (ProjectPath { worktree_id, path }, summary))
2488 })
2489 }
2490
2491 pub fn disk_based_diagnostics_started(&mut self, cx: &mut ModelContext<Self>) {
2492 self.language_servers_with_diagnostics_running += 1;
2493 if self.language_servers_with_diagnostics_running == 1 {
2494 cx.emit(Event::DiskBasedDiagnosticsStarted);
2495 }
2496 }
2497
2498 pub fn disk_based_diagnostics_finished(&mut self, cx: &mut ModelContext<Self>) {
2499 cx.emit(Event::DiskBasedDiagnosticsUpdated);
2500 self.language_servers_with_diagnostics_running -= 1;
2501 if self.language_servers_with_diagnostics_running == 0 {
2502 cx.emit(Event::DiskBasedDiagnosticsFinished);
2503 }
2504 }
2505
2506 pub fn active_entry(&self) -> Option<ProjectEntry> {
2507 self.active_entry
2508 }
2509
2510 // RPC message handlers
2511
2512 async fn handle_unshare_project(
2513 this: ModelHandle<Self>,
2514 _: TypedEnvelope<proto::UnshareProject>,
2515 _: Arc<Client>,
2516 mut cx: AsyncAppContext,
2517 ) -> Result<()> {
2518 this.update(&mut cx, |this, cx| {
2519 if let ProjectClientState::Remote {
2520 sharing_has_stopped,
2521 ..
2522 } = &mut this.client_state
2523 {
2524 *sharing_has_stopped = true;
2525 this.collaborators.clear();
2526 cx.notify();
2527 } else {
2528 unreachable!()
2529 }
2530 });
2531
2532 Ok(())
2533 }
2534
2535 async fn handle_add_collaborator(
2536 this: ModelHandle<Self>,
2537 mut envelope: TypedEnvelope<proto::AddProjectCollaborator>,
2538 _: Arc<Client>,
2539 mut cx: AsyncAppContext,
2540 ) -> Result<()> {
2541 let user_store = this.read_with(&cx, |this, _| this.user_store.clone());
2542 let collaborator = envelope
2543 .payload
2544 .collaborator
2545 .take()
2546 .ok_or_else(|| anyhow!("empty collaborator"))?;
2547
2548 let collaborator = Collaborator::from_proto(collaborator, &user_store, &mut cx).await?;
2549 this.update(&mut cx, |this, cx| {
2550 this.collaborators
2551 .insert(collaborator.peer_id, collaborator);
2552 cx.notify();
2553 });
2554
2555 Ok(())
2556 }
2557
2558 async fn handle_remove_collaborator(
2559 this: ModelHandle<Self>,
2560 envelope: TypedEnvelope<proto::RemoveProjectCollaborator>,
2561 _: Arc<Client>,
2562 mut cx: AsyncAppContext,
2563 ) -> Result<()> {
2564 this.update(&mut cx, |this, cx| {
2565 let peer_id = PeerId(envelope.payload.peer_id);
2566 let replica_id = this
2567 .collaborators
2568 .remove(&peer_id)
2569 .ok_or_else(|| anyhow!("unknown peer {:?}", peer_id))?
2570 .replica_id;
2571 this.shared_buffers.remove(&peer_id);
2572 for (_, buffer) in &this.buffers_state.borrow().open_buffers {
2573 if let Some(buffer) = buffer.upgrade(cx) {
2574 buffer.update(cx, |buffer, cx| buffer.remove_peer(replica_id, cx));
2575 }
2576 }
2577 cx.notify();
2578 Ok(())
2579 })
2580 }
2581
2582 async fn handle_register_worktree(
2583 this: ModelHandle<Self>,
2584 envelope: TypedEnvelope<proto::RegisterWorktree>,
2585 client: Arc<Client>,
2586 mut cx: AsyncAppContext,
2587 ) -> Result<()> {
2588 this.update(&mut cx, |this, cx| {
2589 let remote_id = this.remote_id().ok_or_else(|| anyhow!("invalid project"))?;
2590 let replica_id = this.replica_id();
2591 let worktree = proto::Worktree {
2592 id: envelope.payload.worktree_id,
2593 root_name: envelope.payload.root_name,
2594 entries: Default::default(),
2595 diagnostic_summaries: Default::default(),
2596 weak: envelope.payload.weak,
2597 };
2598 let (worktree, load_task) =
2599 Worktree::remote(remote_id, replica_id, worktree, client, cx);
2600 this.add_worktree(&worktree, cx);
2601 load_task.detach();
2602 Ok(())
2603 })
2604 }
2605
2606 async fn handle_unregister_worktree(
2607 this: ModelHandle<Self>,
2608 envelope: TypedEnvelope<proto::UnregisterWorktree>,
2609 _: Arc<Client>,
2610 mut cx: AsyncAppContext,
2611 ) -> Result<()> {
2612 this.update(&mut cx, |this, cx| {
2613 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
2614 this.remove_worktree(worktree_id, cx);
2615 Ok(())
2616 })
2617 }
2618
2619 async fn handle_update_worktree(
2620 this: ModelHandle<Self>,
2621 envelope: TypedEnvelope<proto::UpdateWorktree>,
2622 _: Arc<Client>,
2623 mut cx: AsyncAppContext,
2624 ) -> Result<()> {
2625 this.update(&mut cx, |this, cx| {
2626 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
2627 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
2628 worktree.update(cx, |worktree, _| {
2629 let worktree = worktree.as_remote_mut().unwrap();
2630 worktree.update_from_remote(envelope)
2631 })?;
2632 }
2633 Ok(())
2634 })
2635 }
2636
2637 async fn handle_update_diagnostic_summary(
2638 this: ModelHandle<Self>,
2639 envelope: TypedEnvelope<proto::UpdateDiagnosticSummary>,
2640 _: Arc<Client>,
2641 mut cx: AsyncAppContext,
2642 ) -> Result<()> {
2643 this.update(&mut cx, |this, cx| {
2644 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
2645 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
2646 if let Some(summary) = envelope.payload.summary {
2647 let project_path = ProjectPath {
2648 worktree_id,
2649 path: Path::new(&summary.path).into(),
2650 };
2651 worktree.update(cx, |worktree, _| {
2652 worktree
2653 .as_remote_mut()
2654 .unwrap()
2655 .update_diagnostic_summary(project_path.path.clone(), &summary);
2656 });
2657 cx.emit(Event::DiagnosticsUpdated(project_path));
2658 }
2659 }
2660 Ok(())
2661 })
2662 }
2663
2664 async fn handle_disk_based_diagnostics_updating(
2665 this: ModelHandle<Self>,
2666 _: TypedEnvelope<proto::DiskBasedDiagnosticsUpdating>,
2667 _: Arc<Client>,
2668 mut cx: AsyncAppContext,
2669 ) -> Result<()> {
2670 this.update(&mut cx, |this, cx| this.disk_based_diagnostics_started(cx));
2671 Ok(())
2672 }
2673
2674 async fn handle_disk_based_diagnostics_updated(
2675 this: ModelHandle<Self>,
2676 _: TypedEnvelope<proto::DiskBasedDiagnosticsUpdated>,
2677 _: Arc<Client>,
2678 mut cx: AsyncAppContext,
2679 ) -> Result<()> {
2680 this.update(&mut cx, |this, cx| this.disk_based_diagnostics_finished(cx));
2681 Ok(())
2682 }
2683
2684 async fn handle_update_buffer(
2685 this: ModelHandle<Self>,
2686 envelope: TypedEnvelope<proto::UpdateBuffer>,
2687 _: Arc<Client>,
2688 mut cx: AsyncAppContext,
2689 ) -> Result<()> {
2690 this.update(&mut cx, |this, cx| {
2691 let payload = envelope.payload.clone();
2692 let buffer_id = payload.buffer_id;
2693 let ops = payload
2694 .operations
2695 .into_iter()
2696 .map(|op| language::proto::deserialize_operation(op))
2697 .collect::<Result<Vec<_>, _>>()?;
2698 let is_remote = this.is_remote();
2699 let mut buffers_state = this.buffers_state.borrow_mut();
2700 let buffer_request_count = buffers_state.buffer_request_count;
2701 match buffers_state.open_buffers.entry(buffer_id) {
2702 hash_map::Entry::Occupied(mut e) => match e.get_mut() {
2703 OpenBuffer::Loaded(buffer) => {
2704 if let Some(buffer) = buffer.upgrade(cx) {
2705 buffer.update(cx, |buffer, cx| buffer.apply_ops(ops, cx))?;
2706 } else if is_remote && buffer_request_count > 0 {
2707 e.insert(OpenBuffer::Loading(ops));
2708 }
2709 }
2710 OpenBuffer::Loading(operations) => operations.extend_from_slice(&ops),
2711 },
2712 hash_map::Entry::Vacant(e) => {
2713 if is_remote && buffer_request_count > 0 {
2714 e.insert(OpenBuffer::Loading(ops));
2715 }
2716 }
2717 }
2718 Ok(())
2719 })
2720 }
2721
2722 async fn handle_update_buffer_file(
2723 this: ModelHandle<Self>,
2724 envelope: TypedEnvelope<proto::UpdateBufferFile>,
2725 _: Arc<Client>,
2726 mut cx: AsyncAppContext,
2727 ) -> Result<()> {
2728 this.update(&mut cx, |this, cx| {
2729 let payload = envelope.payload.clone();
2730 let buffer_id = payload.buffer_id;
2731 let file = payload.file.ok_or_else(|| anyhow!("invalid file"))?;
2732 let worktree = this
2733 .worktree_for_id(WorktreeId::from_proto(file.worktree_id), cx)
2734 .ok_or_else(|| anyhow!("no such worktree"))?;
2735 let file = File::from_proto(file, worktree.clone(), cx)?;
2736 let buffer = this
2737 .buffers_state
2738 .borrow_mut()
2739 .open_buffers
2740 .get_mut(&buffer_id)
2741 .and_then(|b| b.upgrade(cx))
2742 .ok_or_else(|| anyhow!("no such buffer"))?;
2743 buffer.update(cx, |buffer, cx| {
2744 buffer.file_updated(Box::new(file), cx).detach();
2745 });
2746 Ok(())
2747 })
2748 }
2749
2750 async fn handle_save_buffer(
2751 this: ModelHandle<Self>,
2752 envelope: TypedEnvelope<proto::SaveBuffer>,
2753 _: Arc<Client>,
2754 mut cx: AsyncAppContext,
2755 ) -> Result<proto::BufferSaved> {
2756 let buffer_id = envelope.payload.buffer_id;
2757 let sender_id = envelope.original_sender_id()?;
2758 let requested_version = envelope.payload.version.try_into()?;
2759
2760 let (project_id, buffer) = this.update(&mut cx, |this, _| {
2761 let project_id = this.remote_id().ok_or_else(|| anyhow!("not connected"))?;
2762 let buffer = this
2763 .shared_buffers
2764 .get(&sender_id)
2765 .and_then(|shared_buffers| shared_buffers.get(&buffer_id).cloned())
2766 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?;
2767 Ok::<_, anyhow::Error>((project_id, buffer))
2768 })?;
2769
2770 if !buffer
2771 .read_with(&cx, |buffer, _| buffer.version())
2772 .observed_all(&requested_version)
2773 {
2774 Err(anyhow!("save request depends on unreceived edits"))?;
2775 }
2776
2777 let (saved_version, mtime) = buffer.update(&mut cx, |buffer, cx| buffer.save(cx)).await?;
2778 Ok(proto::BufferSaved {
2779 project_id,
2780 buffer_id,
2781 version: (&saved_version).into(),
2782 mtime: Some(mtime.into()),
2783 })
2784 }
2785
2786 async fn handle_format_buffers(
2787 this: ModelHandle<Self>,
2788 envelope: TypedEnvelope<proto::FormatBuffers>,
2789 _: Arc<Client>,
2790 mut cx: AsyncAppContext,
2791 ) -> Result<proto::FormatBuffersResponse> {
2792 let sender_id = envelope.original_sender_id()?;
2793 let format = this.update(&mut cx, |this, cx| {
2794 let shared_buffers = this
2795 .shared_buffers
2796 .get(&sender_id)
2797 .ok_or_else(|| anyhow!("peer has no buffers"))?;
2798 let mut buffers = HashSet::default();
2799 for buffer_id in &envelope.payload.buffer_ids {
2800 buffers.insert(
2801 shared_buffers
2802 .get(buffer_id)
2803 .cloned()
2804 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?,
2805 );
2806 }
2807 Ok::<_, anyhow::Error>(this.format(buffers, false, cx))
2808 })?;
2809
2810 let project_transaction = format.await?;
2811 let project_transaction = this.update(&mut cx, |this, cx| {
2812 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
2813 });
2814 Ok(proto::FormatBuffersResponse {
2815 transaction: Some(project_transaction),
2816 })
2817 }
2818
2819 async fn handle_get_completions(
2820 this: ModelHandle<Self>,
2821 envelope: TypedEnvelope<proto::GetCompletions>,
2822 _: Arc<Client>,
2823 mut cx: AsyncAppContext,
2824 ) -> Result<proto::GetCompletionsResponse> {
2825 let sender_id = envelope.original_sender_id()?;
2826 let position = envelope
2827 .payload
2828 .position
2829 .and_then(language::proto::deserialize_anchor)
2830 .ok_or_else(|| anyhow!("invalid position"))?;
2831 let version = clock::Global::from(envelope.payload.version);
2832 let buffer = this.read_with(&cx, |this, _| {
2833 this.shared_buffers
2834 .get(&sender_id)
2835 .and_then(|shared_buffers| shared_buffers.get(&envelope.payload.buffer_id).cloned())
2836 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
2837 })?;
2838 if !buffer
2839 .read_with(&cx, |buffer, _| buffer.version())
2840 .observed_all(&version)
2841 {
2842 Err(anyhow!("completion request depends on unreceived edits"))?;
2843 }
2844 let version = buffer.read_with(&cx, |buffer, _| buffer.version());
2845 let completions = this
2846 .update(&mut cx, |this, cx| this.completions(&buffer, position, cx))
2847 .await?;
2848
2849 Ok(proto::GetCompletionsResponse {
2850 completions: completions
2851 .iter()
2852 .map(language::proto::serialize_completion)
2853 .collect(),
2854 version: (&version).into(),
2855 })
2856 }
2857
2858 async fn handle_apply_additional_edits_for_completion(
2859 this: ModelHandle<Self>,
2860 envelope: TypedEnvelope<proto::ApplyCompletionAdditionalEdits>,
2861 _: Arc<Client>,
2862 mut cx: AsyncAppContext,
2863 ) -> Result<proto::ApplyCompletionAdditionalEditsResponse> {
2864 let sender_id = envelope.original_sender_id()?;
2865 let apply_additional_edits = this.update(&mut cx, |this, cx| {
2866 let buffer = this
2867 .shared_buffers
2868 .get(&sender_id)
2869 .and_then(|shared_buffers| shared_buffers.get(&envelope.payload.buffer_id).cloned())
2870 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
2871 let language = buffer.read(cx).language();
2872 let completion = language::proto::deserialize_completion(
2873 envelope
2874 .payload
2875 .completion
2876 .ok_or_else(|| anyhow!("invalid completion"))?,
2877 language,
2878 )?;
2879 Ok::<_, anyhow::Error>(
2880 this.apply_additional_edits_for_completion(buffer, completion, false, cx),
2881 )
2882 })?;
2883
2884 Ok(proto::ApplyCompletionAdditionalEditsResponse {
2885 transaction: apply_additional_edits
2886 .await?
2887 .as_ref()
2888 .map(language::proto::serialize_transaction),
2889 })
2890 }
2891
2892 async fn handle_get_code_actions(
2893 this: ModelHandle<Self>,
2894 envelope: TypedEnvelope<proto::GetCodeActions>,
2895 _: Arc<Client>,
2896 mut cx: AsyncAppContext,
2897 ) -> Result<proto::GetCodeActionsResponse> {
2898 let sender_id = envelope.original_sender_id()?;
2899 let start = envelope
2900 .payload
2901 .start
2902 .and_then(language::proto::deserialize_anchor)
2903 .ok_or_else(|| anyhow!("invalid start"))?;
2904 let end = envelope
2905 .payload
2906 .end
2907 .and_then(language::proto::deserialize_anchor)
2908 .ok_or_else(|| anyhow!("invalid end"))?;
2909 let buffer = this.update(&mut cx, |this, _| {
2910 this.shared_buffers
2911 .get(&sender_id)
2912 .and_then(|shared_buffers| shared_buffers.get(&envelope.payload.buffer_id).cloned())
2913 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
2914 })?;
2915 let version = buffer.read_with(&cx, |buffer, _| buffer.version());
2916 if !version.observed(start.timestamp) || !version.observed(end.timestamp) {
2917 Err(anyhow!("code action request references unreceived edits"))?;
2918 }
2919 let code_actions = this.update(&mut cx, |this, cx| {
2920 Ok::<_, anyhow::Error>(this.code_actions(&buffer, start..end, cx))
2921 })?;
2922
2923 Ok(proto::GetCodeActionsResponse {
2924 actions: code_actions
2925 .await?
2926 .iter()
2927 .map(language::proto::serialize_code_action)
2928 .collect(),
2929 version: (&version).into(),
2930 })
2931 }
2932
2933 async fn handle_apply_code_action(
2934 this: ModelHandle<Self>,
2935 envelope: TypedEnvelope<proto::ApplyCodeAction>,
2936 _: Arc<Client>,
2937 mut cx: AsyncAppContext,
2938 ) -> Result<proto::ApplyCodeActionResponse> {
2939 let sender_id = envelope.original_sender_id()?;
2940 let action = language::proto::deserialize_code_action(
2941 envelope
2942 .payload
2943 .action
2944 .ok_or_else(|| anyhow!("invalid action"))?,
2945 )?;
2946 let apply_code_action = this.update(&mut cx, |this, cx| {
2947 let buffer = this
2948 .shared_buffers
2949 .get(&sender_id)
2950 .and_then(|shared_buffers| shared_buffers.get(&envelope.payload.buffer_id).cloned())
2951 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
2952 Ok::<_, anyhow::Error>(this.apply_code_action(buffer, action, false, cx))
2953 })?;
2954
2955 let project_transaction = apply_code_action.await?;
2956 let project_transaction = this.update(&mut cx, |this, cx| {
2957 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
2958 });
2959 Ok(proto::ApplyCodeActionResponse {
2960 transaction: Some(project_transaction),
2961 })
2962 }
2963
2964 async fn handle_lsp_command<T: LspCommand>(
2965 this: ModelHandle<Self>,
2966 envelope: TypedEnvelope<T::ProtoRequest>,
2967 _: Arc<Client>,
2968 mut cx: AsyncAppContext,
2969 ) -> Result<<T::ProtoRequest as proto::RequestMessage>::Response>
2970 where
2971 <T::LspRequest as lsp::request::Request>::Result: Send,
2972 {
2973 let sender_id = envelope.original_sender_id()?;
2974 let (request, buffer_version) = this.update(&mut cx, |this, cx| {
2975 let buffer_id = T::buffer_id_from_proto(&envelope.payload);
2976 let buffer_handle = this
2977 .shared_buffers
2978 .get(&sender_id)
2979 .and_then(|shared_buffers| shared_buffers.get(&buffer_id).cloned())
2980 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?;
2981 let buffer = buffer_handle.read(cx);
2982 let buffer_version = buffer.version();
2983 let request = T::from_proto(envelope.payload, this, buffer)?;
2984 Ok::<_, anyhow::Error>((this.request_lsp(buffer_handle, request, cx), buffer_version))
2985 })?;
2986 let response = request.await?;
2987 this.update(&mut cx, |this, cx| {
2988 Ok(T::response_to_proto(
2989 response,
2990 this,
2991 sender_id,
2992 &buffer_version,
2993 cx,
2994 ))
2995 })
2996 }
2997
2998 async fn handle_get_project_symbols(
2999 this: ModelHandle<Self>,
3000 envelope: TypedEnvelope<proto::GetProjectSymbols>,
3001 _: Arc<Client>,
3002 mut cx: AsyncAppContext,
3003 ) -> Result<proto::GetProjectSymbolsResponse> {
3004 let symbols = this
3005 .update(&mut cx, |this, cx| {
3006 this.symbols(&envelope.payload.query, cx)
3007 })
3008 .await?;
3009
3010 Ok(proto::GetProjectSymbolsResponse {
3011 symbols: symbols.iter().map(serialize_symbol).collect(),
3012 })
3013 }
3014
3015 async fn handle_open_buffer_for_symbol(
3016 this: ModelHandle<Self>,
3017 envelope: TypedEnvelope<proto::OpenBufferForSymbol>,
3018 _: Arc<Client>,
3019 mut cx: AsyncAppContext,
3020 ) -> Result<proto::OpenBufferForSymbolResponse> {
3021 let peer_id = envelope.original_sender_id()?;
3022 let symbol = envelope
3023 .payload
3024 .symbol
3025 .ok_or_else(|| anyhow!("invalid symbol"))?;
3026 let symbol = this.read_with(&cx, |this, _| {
3027 let symbol = this.deserialize_symbol(symbol)?;
3028 let signature = this.symbol_signature(symbol.worktree_id, &symbol.path);
3029 if signature == symbol.signature {
3030 Ok(symbol)
3031 } else {
3032 Err(anyhow!("invalid symbol signature"))
3033 }
3034 })?;
3035 let buffer = this
3036 .update(&mut cx, |this, cx| this.open_buffer_for_symbol(&symbol, cx))
3037 .await?;
3038
3039 Ok(proto::OpenBufferForSymbolResponse {
3040 buffer: Some(this.update(&mut cx, |this, cx| {
3041 this.serialize_buffer_for_peer(&buffer, peer_id, cx)
3042 })),
3043 })
3044 }
3045
3046 fn symbol_signature(&self, worktree_id: WorktreeId, path: &Path) -> [u8; 32] {
3047 let mut hasher = Sha256::new();
3048 hasher.update(worktree_id.to_proto().to_be_bytes());
3049 hasher.update(path.to_string_lossy().as_bytes());
3050 hasher.update(self.nonce.to_be_bytes());
3051 hasher.finalize().as_slice().try_into().unwrap()
3052 }
3053
3054 async fn handle_open_buffer(
3055 this: ModelHandle<Self>,
3056 envelope: TypedEnvelope<proto::OpenBuffer>,
3057 _: Arc<Client>,
3058 mut cx: AsyncAppContext,
3059 ) -> Result<proto::OpenBufferResponse> {
3060 let peer_id = envelope.original_sender_id()?;
3061 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
3062 let open_buffer = this.update(&mut cx, |this, cx| {
3063 this.open_buffer(
3064 ProjectPath {
3065 worktree_id,
3066 path: PathBuf::from(envelope.payload.path).into(),
3067 },
3068 cx,
3069 )
3070 });
3071
3072 let buffer = open_buffer.await?;
3073 this.update(&mut cx, |this, cx| {
3074 Ok(proto::OpenBufferResponse {
3075 buffer: Some(this.serialize_buffer_for_peer(&buffer, peer_id, cx)),
3076 })
3077 })
3078 }
3079
3080 fn serialize_project_transaction_for_peer(
3081 &mut self,
3082 project_transaction: ProjectTransaction,
3083 peer_id: PeerId,
3084 cx: &AppContext,
3085 ) -> proto::ProjectTransaction {
3086 let mut serialized_transaction = proto::ProjectTransaction {
3087 buffers: Default::default(),
3088 transactions: Default::default(),
3089 };
3090 for (buffer, transaction) in project_transaction.0 {
3091 serialized_transaction
3092 .buffers
3093 .push(self.serialize_buffer_for_peer(&buffer, peer_id, cx));
3094 serialized_transaction
3095 .transactions
3096 .push(language::proto::serialize_transaction(&transaction));
3097 }
3098 serialized_transaction
3099 }
3100
3101 fn deserialize_project_transaction(
3102 &mut self,
3103 message: proto::ProjectTransaction,
3104 push_to_history: bool,
3105 request_handle: BufferRequestHandle,
3106 cx: &mut ModelContext<Self>,
3107 ) -> Task<Result<ProjectTransaction>> {
3108 cx.spawn(|this, mut cx| async move {
3109 let mut project_transaction = ProjectTransaction::default();
3110 for (buffer, transaction) in message.buffers.into_iter().zip(message.transactions) {
3111 let buffer = this
3112 .update(&mut cx, |this, cx| {
3113 this.deserialize_buffer(buffer, request_handle.clone(), cx)
3114 })
3115 .await?;
3116 let transaction = language::proto::deserialize_transaction(transaction)?;
3117 project_transaction.0.insert(buffer, transaction);
3118 }
3119
3120 for (buffer, transaction) in &project_transaction.0 {
3121 buffer
3122 .update(&mut cx, |buffer, _| {
3123 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
3124 })
3125 .await;
3126
3127 if push_to_history {
3128 buffer.update(&mut cx, |buffer, _| {
3129 buffer.push_transaction(transaction.clone(), Instant::now());
3130 });
3131 }
3132 }
3133
3134 Ok(project_transaction)
3135 })
3136 }
3137
3138 fn serialize_buffer_for_peer(
3139 &mut self,
3140 buffer: &ModelHandle<Buffer>,
3141 peer_id: PeerId,
3142 cx: &AppContext,
3143 ) -> proto::Buffer {
3144 let buffer_id = buffer.read(cx).remote_id();
3145 let shared_buffers = self.shared_buffers.entry(peer_id).or_default();
3146 match shared_buffers.entry(buffer_id) {
3147 hash_map::Entry::Occupied(_) => proto::Buffer {
3148 variant: Some(proto::buffer::Variant::Id(buffer_id)),
3149 },
3150 hash_map::Entry::Vacant(entry) => {
3151 entry.insert(buffer.clone());
3152 proto::Buffer {
3153 variant: Some(proto::buffer::Variant::State(buffer.read(cx).to_proto())),
3154 }
3155 }
3156 }
3157 }
3158
3159 fn deserialize_buffer(
3160 &mut self,
3161 buffer: proto::Buffer,
3162 request_handle: BufferRequestHandle,
3163 cx: &mut ModelContext<Self>,
3164 ) -> Task<Result<ModelHandle<Buffer>>> {
3165 let replica_id = self.replica_id();
3166
3167 let mut opened_buffer_tx = self.opened_buffer.clone();
3168 let mut opened_buffer_rx = self.opened_buffer.subscribe();
3169 cx.spawn(|this, mut cx| async move {
3170 match buffer.variant.ok_or_else(|| anyhow!("missing buffer"))? {
3171 proto::buffer::Variant::Id(id) => {
3172 let buffer = loop {
3173 let buffer = this.read_with(&cx, |this, cx| {
3174 this.buffers_state
3175 .borrow()
3176 .open_buffers
3177 .get(&id)
3178 .and_then(|buffer| buffer.upgrade(cx))
3179 });
3180 if let Some(buffer) = buffer {
3181 break buffer;
3182 }
3183 opened_buffer_rx
3184 .recv()
3185 .await
3186 .ok_or_else(|| anyhow!("project dropped while waiting for buffer"))?;
3187 };
3188 Ok(buffer)
3189 }
3190 proto::buffer::Variant::State(mut buffer) => {
3191 let mut buffer_worktree = None;
3192 let mut buffer_file = None;
3193 if let Some(file) = buffer.file.take() {
3194 this.read_with(&cx, |this, cx| {
3195 let worktree_id = WorktreeId::from_proto(file.worktree_id);
3196 let worktree =
3197 this.worktree_for_id(worktree_id, cx).ok_or_else(|| {
3198 anyhow!("no worktree found for id {}", file.worktree_id)
3199 })?;
3200 buffer_file =
3201 Some(Box::new(File::from_proto(file, worktree.clone(), cx)?)
3202 as Box<dyn language::File>);
3203 buffer_worktree = Some(worktree);
3204 Ok::<_, anyhow::Error>(())
3205 })?;
3206 }
3207
3208 let buffer = cx.add_model(|cx| {
3209 Buffer::from_proto(replica_id, buffer, buffer_file, cx).unwrap()
3210 });
3211
3212 request_handle.preserve_buffer(buffer.clone());
3213 this.update(&mut cx, |this, cx| {
3214 this.register_buffer(&buffer, buffer_worktree.as_ref(), cx)
3215 })?;
3216
3217 let _ = opened_buffer_tx.send(()).await;
3218 Ok(buffer)
3219 }
3220 }
3221 })
3222 }
3223
3224 fn deserialize_symbol(&self, serialized_symbol: proto::Symbol) -> Result<Symbol> {
3225 let language = self
3226 .languages
3227 .get_language(&serialized_symbol.language_name);
3228 let start = serialized_symbol
3229 .start
3230 .ok_or_else(|| anyhow!("invalid start"))?;
3231 let end = serialized_symbol
3232 .end
3233 .ok_or_else(|| anyhow!("invalid end"))?;
3234 let kind = unsafe { mem::transmute(serialized_symbol.kind) };
3235 Ok(Symbol {
3236 source_worktree_id: WorktreeId::from_proto(serialized_symbol.source_worktree_id),
3237 worktree_id: WorktreeId::from_proto(serialized_symbol.worktree_id),
3238 language_name: serialized_symbol.language_name.clone(),
3239 label: language
3240 .and_then(|language| language.label_for_symbol(&serialized_symbol.name, kind))
3241 .unwrap_or_else(|| CodeLabel::plain(serialized_symbol.name.clone(), None)),
3242 name: serialized_symbol.name,
3243 path: PathBuf::from(serialized_symbol.path),
3244 range: PointUtf16::new(start.row, start.column)..PointUtf16::new(end.row, end.column),
3245 kind,
3246 signature: serialized_symbol
3247 .signature
3248 .try_into()
3249 .map_err(|_| anyhow!("invalid signature"))?,
3250 })
3251 }
3252
3253 async fn handle_close_buffer(
3254 this: ModelHandle<Self>,
3255 envelope: TypedEnvelope<proto::CloseBuffer>,
3256 _: Arc<Client>,
3257 mut cx: AsyncAppContext,
3258 ) -> Result<()> {
3259 this.update(&mut cx, |this, cx| {
3260 if let Some(shared_buffers) =
3261 this.shared_buffers.get_mut(&envelope.original_sender_id()?)
3262 {
3263 shared_buffers.remove(&envelope.payload.buffer_id);
3264 cx.notify();
3265 }
3266 Ok(())
3267 })
3268 }
3269
3270 async fn handle_buffer_saved(
3271 this: ModelHandle<Self>,
3272 envelope: TypedEnvelope<proto::BufferSaved>,
3273 _: Arc<Client>,
3274 mut cx: AsyncAppContext,
3275 ) -> Result<()> {
3276 let version = envelope.payload.version.try_into()?;
3277 let mtime = envelope
3278 .payload
3279 .mtime
3280 .ok_or_else(|| anyhow!("missing mtime"))?
3281 .into();
3282
3283 this.update(&mut cx, |this, cx| {
3284 let buffer = this
3285 .buffers_state
3286 .borrow()
3287 .open_buffers
3288 .get(&envelope.payload.buffer_id)
3289 .and_then(|buffer| buffer.upgrade(cx));
3290 if let Some(buffer) = buffer {
3291 buffer.update(cx, |buffer, cx| {
3292 buffer.did_save(version, mtime, None, cx);
3293 });
3294 }
3295 Ok(())
3296 })
3297 }
3298
3299 async fn handle_buffer_reloaded(
3300 this: ModelHandle<Self>,
3301 envelope: TypedEnvelope<proto::BufferReloaded>,
3302 _: Arc<Client>,
3303 mut cx: AsyncAppContext,
3304 ) -> Result<()> {
3305 let payload = envelope.payload.clone();
3306 let version = payload.version.try_into()?;
3307 let mtime = payload
3308 .mtime
3309 .ok_or_else(|| anyhow!("missing mtime"))?
3310 .into();
3311 this.update(&mut cx, |this, cx| {
3312 let buffer = this
3313 .buffers_state
3314 .borrow()
3315 .open_buffers
3316 .get(&payload.buffer_id)
3317 .and_then(|buffer| buffer.upgrade(cx));
3318 if let Some(buffer) = buffer {
3319 buffer.update(cx, |buffer, cx| {
3320 buffer.did_reload(version, mtime, cx);
3321 });
3322 }
3323 Ok(())
3324 })
3325 }
3326
3327 pub fn match_paths<'a>(
3328 &self,
3329 query: &'a str,
3330 include_ignored: bool,
3331 smart_case: bool,
3332 max_results: usize,
3333 cancel_flag: &'a AtomicBool,
3334 cx: &AppContext,
3335 ) -> impl 'a + Future<Output = Vec<PathMatch>> {
3336 let worktrees = self
3337 .worktrees(cx)
3338 .filter(|worktree| !worktree.read(cx).is_weak())
3339 .collect::<Vec<_>>();
3340 let include_root_name = worktrees.len() > 1;
3341 let candidate_sets = worktrees
3342 .into_iter()
3343 .map(|worktree| CandidateSet {
3344 snapshot: worktree.read(cx).snapshot(),
3345 include_ignored,
3346 include_root_name,
3347 })
3348 .collect::<Vec<_>>();
3349
3350 let background = cx.background().clone();
3351 async move {
3352 fuzzy::match_paths(
3353 candidate_sets.as_slice(),
3354 query,
3355 smart_case,
3356 max_results,
3357 cancel_flag,
3358 background,
3359 )
3360 .await
3361 }
3362 }
3363}
3364
3365impl BufferRequestHandle {
3366 fn new(state: Rc<RefCell<ProjectBuffers>>, cx: &AppContext) -> Self {
3367 {
3368 let state = &mut *state.borrow_mut();
3369 state.buffer_request_count += 1;
3370 if state.buffer_request_count == 1 {
3371 state.preserved_buffers.extend(
3372 state
3373 .open_buffers
3374 .values()
3375 .filter_map(|buffer| buffer.upgrade(cx)),
3376 )
3377 }
3378 }
3379 Self(state)
3380 }
3381
3382 fn preserve_buffer(&self, buffer: ModelHandle<Buffer>) {
3383 self.0.borrow_mut().preserved_buffers.push(buffer);
3384 }
3385}
3386
3387impl Clone for BufferRequestHandle {
3388 fn clone(&self) -> Self {
3389 self.0.borrow_mut().buffer_request_count += 1;
3390 Self(self.0.clone())
3391 }
3392}
3393
3394impl Drop for BufferRequestHandle {
3395 fn drop(&mut self) {
3396 let mut state = self.0.borrow_mut();
3397 state.buffer_request_count -= 1;
3398 if state.buffer_request_count == 0 {
3399 state.preserved_buffers.clear();
3400 state
3401 .open_buffers
3402 .retain(|_, buffer| matches!(buffer, OpenBuffer::Loaded(_)))
3403 }
3404 }
3405}
3406
3407impl WorktreeHandle {
3408 pub fn upgrade(&self, cx: &AppContext) -> Option<ModelHandle<Worktree>> {
3409 match self {
3410 WorktreeHandle::Strong(handle) => Some(handle.clone()),
3411 WorktreeHandle::Weak(handle) => handle.upgrade(cx),
3412 }
3413 }
3414}
3415
3416impl OpenBuffer {
3417 pub fn upgrade(&self, cx: &impl UpgradeModelHandle) -> Option<ModelHandle<Buffer>> {
3418 match self {
3419 OpenBuffer::Loaded(handle) => handle.upgrade(cx),
3420 OpenBuffer::Loading(_) => None,
3421 }
3422 }
3423}
3424
3425struct CandidateSet {
3426 snapshot: Snapshot,
3427 include_ignored: bool,
3428 include_root_name: bool,
3429}
3430
3431impl<'a> PathMatchCandidateSet<'a> for CandidateSet {
3432 type Candidates = CandidateSetIter<'a>;
3433
3434 fn id(&self) -> usize {
3435 self.snapshot.id().to_usize()
3436 }
3437
3438 fn len(&self) -> usize {
3439 if self.include_ignored {
3440 self.snapshot.file_count()
3441 } else {
3442 self.snapshot.visible_file_count()
3443 }
3444 }
3445
3446 fn prefix(&self) -> Arc<str> {
3447 if self.snapshot.root_entry().map_or(false, |e| e.is_file()) {
3448 self.snapshot.root_name().into()
3449 } else if self.include_root_name {
3450 format!("{}/", self.snapshot.root_name()).into()
3451 } else {
3452 "".into()
3453 }
3454 }
3455
3456 fn candidates(&'a self, start: usize) -> Self::Candidates {
3457 CandidateSetIter {
3458 traversal: self.snapshot.files(self.include_ignored, start),
3459 }
3460 }
3461}
3462
3463struct CandidateSetIter<'a> {
3464 traversal: Traversal<'a>,
3465}
3466
3467impl<'a> Iterator for CandidateSetIter<'a> {
3468 type Item = PathMatchCandidate<'a>;
3469
3470 fn next(&mut self) -> Option<Self::Item> {
3471 self.traversal.next().map(|entry| {
3472 if let EntryKind::File(char_bag) = entry.kind {
3473 PathMatchCandidate {
3474 path: &entry.path,
3475 char_bag,
3476 }
3477 } else {
3478 unreachable!()
3479 }
3480 })
3481 }
3482}
3483
3484impl Entity for Project {
3485 type Event = Event;
3486
3487 fn release(&mut self, _: &mut gpui::MutableAppContext) {
3488 match &self.client_state {
3489 ProjectClientState::Local { remote_id_rx, .. } => {
3490 if let Some(project_id) = *remote_id_rx.borrow() {
3491 self.client
3492 .send(proto::UnregisterProject { project_id })
3493 .log_err();
3494 }
3495 }
3496 ProjectClientState::Remote { remote_id, .. } => {
3497 self.client
3498 .send(proto::LeaveProject {
3499 project_id: *remote_id,
3500 })
3501 .log_err();
3502 }
3503 }
3504 }
3505
3506 fn app_will_quit(
3507 &mut self,
3508 _: &mut MutableAppContext,
3509 ) -> Option<std::pin::Pin<Box<dyn 'static + Future<Output = ()>>>> {
3510 let shutdown_futures = self
3511 .language_servers
3512 .drain()
3513 .filter_map(|(_, server)| server.shutdown())
3514 .collect::<Vec<_>>();
3515 Some(
3516 async move {
3517 futures::future::join_all(shutdown_futures).await;
3518 }
3519 .boxed(),
3520 )
3521 }
3522}
3523
3524impl Collaborator {
3525 fn from_proto(
3526 message: proto::Collaborator,
3527 user_store: &ModelHandle<UserStore>,
3528 cx: &mut AsyncAppContext,
3529 ) -> impl Future<Output = Result<Self>> {
3530 let user = user_store.update(cx, |user_store, cx| {
3531 user_store.fetch_user(message.user_id, cx)
3532 });
3533
3534 async move {
3535 Ok(Self {
3536 peer_id: PeerId(message.peer_id),
3537 user: user.await?,
3538 replica_id: message.replica_id as ReplicaId,
3539 })
3540 }
3541 }
3542}
3543
3544impl<P: AsRef<Path>> From<(WorktreeId, P)> for ProjectPath {
3545 fn from((worktree_id, path): (WorktreeId, P)) -> Self {
3546 Self {
3547 worktree_id,
3548 path: path.as_ref().into(),
3549 }
3550 }
3551}
3552
3553impl From<lsp::CreateFileOptions> for fs::CreateOptions {
3554 fn from(options: lsp::CreateFileOptions) -> Self {
3555 Self {
3556 overwrite: options.overwrite.unwrap_or(false),
3557 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
3558 }
3559 }
3560}
3561
3562impl From<lsp::RenameFileOptions> for fs::RenameOptions {
3563 fn from(options: lsp::RenameFileOptions) -> Self {
3564 Self {
3565 overwrite: options.overwrite.unwrap_or(false),
3566 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
3567 }
3568 }
3569}
3570
3571impl From<lsp::DeleteFileOptions> for fs::RemoveOptions {
3572 fn from(options: lsp::DeleteFileOptions) -> Self {
3573 Self {
3574 recursive: options.recursive.unwrap_or(false),
3575 ignore_if_not_exists: options.ignore_if_not_exists.unwrap_or(false),
3576 }
3577 }
3578}
3579
3580fn serialize_symbol(symbol: &Symbol) -> proto::Symbol {
3581 proto::Symbol {
3582 source_worktree_id: symbol.source_worktree_id.to_proto(),
3583 worktree_id: symbol.worktree_id.to_proto(),
3584 language_name: symbol.language_name.clone(),
3585 name: symbol.name.clone(),
3586 kind: unsafe { mem::transmute(symbol.kind) },
3587 path: symbol.path.to_string_lossy().to_string(),
3588 start: Some(proto::Point {
3589 row: symbol.range.start.row,
3590 column: symbol.range.start.column,
3591 }),
3592 end: Some(proto::Point {
3593 row: symbol.range.end.row,
3594 column: symbol.range.end.column,
3595 }),
3596 signature: symbol.signature.to_vec(),
3597 }
3598}
3599
3600fn relativize_path(base: &Path, path: &Path) -> PathBuf {
3601 let mut path_components = path.components();
3602 let mut base_components = base.components();
3603 let mut components: Vec<Component> = Vec::new();
3604 loop {
3605 match (path_components.next(), base_components.next()) {
3606 (None, None) => break,
3607 (Some(a), None) => {
3608 components.push(a);
3609 components.extend(path_components.by_ref());
3610 break;
3611 }
3612 (None, _) => components.push(Component::ParentDir),
3613 (Some(a), Some(b)) if components.is_empty() && a == b => (),
3614 (Some(a), Some(b)) if b == Component::CurDir => components.push(a),
3615 (Some(a), Some(_)) => {
3616 components.push(Component::ParentDir);
3617 for _ in base_components {
3618 components.push(Component::ParentDir);
3619 }
3620 components.push(a);
3621 components.extend(path_components.by_ref());
3622 break;
3623 }
3624 }
3625 }
3626 components.iter().map(|c| c.as_os_str()).collect()
3627}
3628
3629#[cfg(test)]
3630mod tests {
3631 use super::{Event, *};
3632 use fs::RealFs;
3633 use futures::StreamExt;
3634 use gpui::test::subscribe;
3635 use language::{
3636 tree_sitter_rust, AnchorRangeExt, Diagnostic, LanguageConfig, LanguageServerConfig, Point,
3637 };
3638 use lsp::Url;
3639 use serde_json::json;
3640 use std::{cell::RefCell, os::unix, path::PathBuf, rc::Rc};
3641 use unindent::Unindent as _;
3642 use util::test::temp_tree;
3643 use worktree::WorktreeHandle as _;
3644
3645 #[gpui::test]
3646 async fn test_populate_and_search(mut cx: gpui::TestAppContext) {
3647 let dir = temp_tree(json!({
3648 "root": {
3649 "apple": "",
3650 "banana": {
3651 "carrot": {
3652 "date": "",
3653 "endive": "",
3654 }
3655 },
3656 "fennel": {
3657 "grape": "",
3658 }
3659 }
3660 }));
3661
3662 let root_link_path = dir.path().join("root_link");
3663 unix::fs::symlink(&dir.path().join("root"), &root_link_path).unwrap();
3664 unix::fs::symlink(
3665 &dir.path().join("root/fennel"),
3666 &dir.path().join("root/finnochio"),
3667 )
3668 .unwrap();
3669
3670 let project = Project::test(Arc::new(RealFs), &mut cx);
3671
3672 let (tree, _) = project
3673 .update(&mut cx, |project, cx| {
3674 project.find_or_create_local_worktree(&root_link_path, false, cx)
3675 })
3676 .await
3677 .unwrap();
3678
3679 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
3680 .await;
3681 cx.read(|cx| {
3682 let tree = tree.read(cx);
3683 assert_eq!(tree.file_count(), 5);
3684 assert_eq!(
3685 tree.inode_for_path("fennel/grape"),
3686 tree.inode_for_path("finnochio/grape")
3687 );
3688 });
3689
3690 let cancel_flag = Default::default();
3691 let results = project
3692 .read_with(&cx, |project, cx| {
3693 project.match_paths("bna", false, false, 10, &cancel_flag, cx)
3694 })
3695 .await;
3696 assert_eq!(
3697 results
3698 .into_iter()
3699 .map(|result| result.path)
3700 .collect::<Vec<Arc<Path>>>(),
3701 vec![
3702 PathBuf::from("banana/carrot/date").into(),
3703 PathBuf::from("banana/carrot/endive").into(),
3704 ]
3705 );
3706 }
3707
3708 #[gpui::test]
3709 async fn test_language_server_diagnostics(mut cx: gpui::TestAppContext) {
3710 let (language_server_config, mut fake_servers) = LanguageServerConfig::fake();
3711 let progress_token = language_server_config
3712 .disk_based_diagnostics_progress_token
3713 .clone()
3714 .unwrap();
3715
3716 let language = Arc::new(Language::new(
3717 LanguageConfig {
3718 name: "Rust".into(),
3719 path_suffixes: vec!["rs".to_string()],
3720 language_server: Some(language_server_config),
3721 ..Default::default()
3722 },
3723 Some(tree_sitter_rust::language()),
3724 ));
3725
3726 let fs = FakeFs::new(cx.background());
3727 fs.insert_tree(
3728 "/dir",
3729 json!({
3730 "a.rs": "fn a() { A }",
3731 "b.rs": "const y: i32 = 1",
3732 }),
3733 )
3734 .await;
3735
3736 let project = Project::test(fs, &mut cx);
3737 project.update(&mut cx, |project, _| {
3738 Arc::get_mut(&mut project.languages).unwrap().add(language);
3739 });
3740
3741 let (tree, _) = project
3742 .update(&mut cx, |project, cx| {
3743 project.find_or_create_local_worktree("/dir", false, cx)
3744 })
3745 .await
3746 .unwrap();
3747 let worktree_id = tree.read_with(&cx, |tree, _| tree.id());
3748
3749 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
3750 .await;
3751
3752 // Cause worktree to start the fake language server
3753 let _buffer = project
3754 .update(&mut cx, |project, cx| {
3755 project.open_buffer((worktree_id, Path::new("b.rs")), cx)
3756 })
3757 .await
3758 .unwrap();
3759
3760 let mut events = subscribe(&project, &mut cx);
3761
3762 let mut fake_server = fake_servers.next().await.unwrap();
3763 fake_server.start_progress(&progress_token).await;
3764 assert_eq!(
3765 events.next().await.unwrap(),
3766 Event::DiskBasedDiagnosticsStarted
3767 );
3768
3769 fake_server.start_progress(&progress_token).await;
3770 fake_server.end_progress(&progress_token).await;
3771 fake_server.start_progress(&progress_token).await;
3772
3773 fake_server
3774 .notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
3775 uri: Url::from_file_path("/dir/a.rs").unwrap(),
3776 version: None,
3777 diagnostics: vec![lsp::Diagnostic {
3778 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
3779 severity: Some(lsp::DiagnosticSeverity::ERROR),
3780 message: "undefined variable 'A'".to_string(),
3781 ..Default::default()
3782 }],
3783 })
3784 .await;
3785 assert_eq!(
3786 events.next().await.unwrap(),
3787 Event::DiagnosticsUpdated((worktree_id, Path::new("a.rs")).into())
3788 );
3789
3790 fake_server.end_progress(&progress_token).await;
3791 fake_server.end_progress(&progress_token).await;
3792 assert_eq!(
3793 events.next().await.unwrap(),
3794 Event::DiskBasedDiagnosticsUpdated
3795 );
3796 assert_eq!(
3797 events.next().await.unwrap(),
3798 Event::DiskBasedDiagnosticsFinished
3799 );
3800
3801 let buffer = project
3802 .update(&mut cx, |p, cx| p.open_buffer((worktree_id, "a.rs"), cx))
3803 .await
3804 .unwrap();
3805
3806 buffer.read_with(&cx, |buffer, _| {
3807 let snapshot = buffer.snapshot();
3808 let diagnostics = snapshot
3809 .diagnostics_in_range::<_, Point>(0..buffer.len())
3810 .collect::<Vec<_>>();
3811 assert_eq!(
3812 diagnostics,
3813 &[DiagnosticEntry {
3814 range: Point::new(0, 9)..Point::new(0, 10),
3815 diagnostic: Diagnostic {
3816 severity: lsp::DiagnosticSeverity::ERROR,
3817 message: "undefined variable 'A'".to_string(),
3818 group_id: 0,
3819 is_primary: true,
3820 ..Default::default()
3821 }
3822 }]
3823 )
3824 });
3825 }
3826
3827 #[gpui::test]
3828 async fn test_search_worktree_without_files(mut cx: gpui::TestAppContext) {
3829 let dir = temp_tree(json!({
3830 "root": {
3831 "dir1": {},
3832 "dir2": {
3833 "dir3": {}
3834 }
3835 }
3836 }));
3837
3838 let project = Project::test(Arc::new(RealFs), &mut cx);
3839 let (tree, _) = project
3840 .update(&mut cx, |project, cx| {
3841 project.find_or_create_local_worktree(&dir.path(), false, cx)
3842 })
3843 .await
3844 .unwrap();
3845
3846 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
3847 .await;
3848
3849 let cancel_flag = Default::default();
3850 let results = project
3851 .read_with(&cx, |project, cx| {
3852 project.match_paths("dir", false, false, 10, &cancel_flag, cx)
3853 })
3854 .await;
3855
3856 assert!(results.is_empty());
3857 }
3858
3859 #[gpui::test]
3860 async fn test_definition(mut cx: gpui::TestAppContext) {
3861 let (language_server_config, mut fake_servers) = LanguageServerConfig::fake();
3862 let language = Arc::new(Language::new(
3863 LanguageConfig {
3864 name: "Rust".into(),
3865 path_suffixes: vec!["rs".to_string()],
3866 language_server: Some(language_server_config),
3867 ..Default::default()
3868 },
3869 Some(tree_sitter_rust::language()),
3870 ));
3871
3872 let fs = FakeFs::new(cx.background());
3873 fs.insert_tree(
3874 "/dir",
3875 json!({
3876 "a.rs": "const fn a() { A }",
3877 "b.rs": "const y: i32 = crate::a()",
3878 }),
3879 )
3880 .await;
3881
3882 let project = Project::test(fs, &mut cx);
3883 project.update(&mut cx, |project, _| {
3884 Arc::get_mut(&mut project.languages).unwrap().add(language);
3885 });
3886
3887 let (tree, _) = project
3888 .update(&mut cx, |project, cx| {
3889 project.find_or_create_local_worktree("/dir/b.rs", false, cx)
3890 })
3891 .await
3892 .unwrap();
3893 let worktree_id = tree.read_with(&cx, |tree, _| tree.id());
3894 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
3895 .await;
3896
3897 let buffer = project
3898 .update(&mut cx, |project, cx| {
3899 project.open_buffer(
3900 ProjectPath {
3901 worktree_id,
3902 path: Path::new("").into(),
3903 },
3904 cx,
3905 )
3906 })
3907 .await
3908 .unwrap();
3909
3910 let mut fake_server = fake_servers.next().await.unwrap();
3911 fake_server.handle_request::<lsp::request::GotoDefinition, _>(move |params, _| {
3912 let params = params.text_document_position_params;
3913 assert_eq!(
3914 params.text_document.uri.to_file_path().unwrap(),
3915 Path::new("/dir/b.rs"),
3916 );
3917 assert_eq!(params.position, lsp::Position::new(0, 22));
3918
3919 Some(lsp::GotoDefinitionResponse::Scalar(lsp::Location::new(
3920 lsp::Url::from_file_path("/dir/a.rs").unwrap(),
3921 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
3922 )))
3923 });
3924
3925 let mut definitions = project
3926 .update(&mut cx, |project, cx| project.definition(&buffer, 22, cx))
3927 .await
3928 .unwrap();
3929
3930 assert_eq!(definitions.len(), 1);
3931 let definition = definitions.pop().unwrap();
3932 cx.update(|cx| {
3933 let target_buffer = definition.buffer.read(cx);
3934 assert_eq!(
3935 target_buffer
3936 .file()
3937 .unwrap()
3938 .as_local()
3939 .unwrap()
3940 .abs_path(cx),
3941 Path::new("/dir/a.rs"),
3942 );
3943 assert_eq!(definition.range.to_offset(target_buffer), 9..10);
3944 assert_eq!(
3945 list_worktrees(&project, cx),
3946 [("/dir/b.rs".as_ref(), false), ("/dir/a.rs".as_ref(), true)]
3947 );
3948
3949 drop(definition);
3950 });
3951 cx.read(|cx| {
3952 assert_eq!(
3953 list_worktrees(&project, cx),
3954 [("/dir/b.rs".as_ref(), false)]
3955 );
3956 });
3957
3958 fn list_worktrees<'a>(
3959 project: &'a ModelHandle<Project>,
3960 cx: &'a AppContext,
3961 ) -> Vec<(&'a Path, bool)> {
3962 project
3963 .read(cx)
3964 .worktrees(cx)
3965 .map(|worktree| {
3966 let worktree = worktree.read(cx);
3967 (
3968 worktree.as_local().unwrap().abs_path().as_ref(),
3969 worktree.is_weak(),
3970 )
3971 })
3972 .collect::<Vec<_>>()
3973 }
3974 }
3975
3976 #[gpui::test]
3977 async fn test_save_file(mut cx: gpui::TestAppContext) {
3978 let fs = FakeFs::new(cx.background());
3979 fs.insert_tree(
3980 "/dir",
3981 json!({
3982 "file1": "the old contents",
3983 }),
3984 )
3985 .await;
3986
3987 let project = Project::test(fs.clone(), &mut cx);
3988 let worktree_id = project
3989 .update(&mut cx, |p, cx| {
3990 p.find_or_create_local_worktree("/dir", false, cx)
3991 })
3992 .await
3993 .unwrap()
3994 .0
3995 .read_with(&cx, |tree, _| tree.id());
3996
3997 let buffer = project
3998 .update(&mut cx, |p, cx| p.open_buffer((worktree_id, "file1"), cx))
3999 .await
4000 .unwrap();
4001 buffer
4002 .update(&mut cx, |buffer, cx| {
4003 assert_eq!(buffer.text(), "the old contents");
4004 buffer.edit(Some(0..0), "a line of text.\n".repeat(10 * 1024), cx);
4005 buffer.save(cx)
4006 })
4007 .await
4008 .unwrap();
4009
4010 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
4011 assert_eq!(new_text, buffer.read_with(&cx, |buffer, _| buffer.text()));
4012 }
4013
4014 #[gpui::test]
4015 async fn test_save_in_single_file_worktree(mut cx: gpui::TestAppContext) {
4016 let fs = FakeFs::new(cx.background());
4017 fs.insert_tree(
4018 "/dir",
4019 json!({
4020 "file1": "the old contents",
4021 }),
4022 )
4023 .await;
4024
4025 let project = Project::test(fs.clone(), &mut cx);
4026 let worktree_id = project
4027 .update(&mut cx, |p, cx| {
4028 p.find_or_create_local_worktree("/dir/file1", false, cx)
4029 })
4030 .await
4031 .unwrap()
4032 .0
4033 .read_with(&cx, |tree, _| tree.id());
4034
4035 let buffer = project
4036 .update(&mut cx, |p, cx| p.open_buffer((worktree_id, ""), cx))
4037 .await
4038 .unwrap();
4039 buffer
4040 .update(&mut cx, |buffer, cx| {
4041 buffer.edit(Some(0..0), "a line of text.\n".repeat(10 * 1024), cx);
4042 buffer.save(cx)
4043 })
4044 .await
4045 .unwrap();
4046
4047 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
4048 assert_eq!(new_text, buffer.read_with(&cx, |buffer, _| buffer.text()));
4049 }
4050
4051 #[gpui::test(retries = 5)]
4052 async fn test_rescan_and_remote_updates(mut cx: gpui::TestAppContext) {
4053 let dir = temp_tree(json!({
4054 "a": {
4055 "file1": "",
4056 "file2": "",
4057 "file3": "",
4058 },
4059 "b": {
4060 "c": {
4061 "file4": "",
4062 "file5": "",
4063 }
4064 }
4065 }));
4066
4067 let project = Project::test(Arc::new(RealFs), &mut cx);
4068 let rpc = project.read_with(&cx, |p, _| p.client.clone());
4069
4070 let (tree, _) = project
4071 .update(&mut cx, |p, cx| {
4072 p.find_or_create_local_worktree(dir.path(), false, cx)
4073 })
4074 .await
4075 .unwrap();
4076 let worktree_id = tree.read_with(&cx, |tree, _| tree.id());
4077
4078 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
4079 let buffer = project.update(cx, |p, cx| p.open_buffer((worktree_id, path), cx));
4080 async move { buffer.await.unwrap() }
4081 };
4082 let id_for_path = |path: &'static str, cx: &gpui::TestAppContext| {
4083 tree.read_with(cx, |tree, _| {
4084 tree.entry_for_path(path)
4085 .expect(&format!("no entry for path {}", path))
4086 .id
4087 })
4088 };
4089
4090 let buffer2 = buffer_for_path("a/file2", &mut cx).await;
4091 let buffer3 = buffer_for_path("a/file3", &mut cx).await;
4092 let buffer4 = buffer_for_path("b/c/file4", &mut cx).await;
4093 let buffer5 = buffer_for_path("b/c/file5", &mut cx).await;
4094
4095 let file2_id = id_for_path("a/file2", &cx);
4096 let file3_id = id_for_path("a/file3", &cx);
4097 let file4_id = id_for_path("b/c/file4", &cx);
4098
4099 // Wait for the initial scan.
4100 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
4101 .await;
4102
4103 // Create a remote copy of this worktree.
4104 let initial_snapshot = tree.read_with(&cx, |tree, _| tree.as_local().unwrap().snapshot());
4105 let (remote, load_task) = cx.update(|cx| {
4106 Worktree::remote(
4107 1,
4108 1,
4109 initial_snapshot.to_proto(&Default::default(), Default::default()),
4110 rpc.clone(),
4111 cx,
4112 )
4113 });
4114 load_task.await;
4115
4116 cx.read(|cx| {
4117 assert!(!buffer2.read(cx).is_dirty());
4118 assert!(!buffer3.read(cx).is_dirty());
4119 assert!(!buffer4.read(cx).is_dirty());
4120 assert!(!buffer5.read(cx).is_dirty());
4121 });
4122
4123 // Rename and delete files and directories.
4124 tree.flush_fs_events(&cx).await;
4125 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
4126 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
4127 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
4128 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
4129 tree.flush_fs_events(&cx).await;
4130
4131 let expected_paths = vec![
4132 "a",
4133 "a/file1",
4134 "a/file2.new",
4135 "b",
4136 "d",
4137 "d/file3",
4138 "d/file4",
4139 ];
4140
4141 cx.read(|app| {
4142 assert_eq!(
4143 tree.read(app)
4144 .paths()
4145 .map(|p| p.to_str().unwrap())
4146 .collect::<Vec<_>>(),
4147 expected_paths
4148 );
4149
4150 assert_eq!(id_for_path("a/file2.new", &cx), file2_id);
4151 assert_eq!(id_for_path("d/file3", &cx), file3_id);
4152 assert_eq!(id_for_path("d/file4", &cx), file4_id);
4153
4154 assert_eq!(
4155 buffer2.read(app).file().unwrap().path().as_ref(),
4156 Path::new("a/file2.new")
4157 );
4158 assert_eq!(
4159 buffer3.read(app).file().unwrap().path().as_ref(),
4160 Path::new("d/file3")
4161 );
4162 assert_eq!(
4163 buffer4.read(app).file().unwrap().path().as_ref(),
4164 Path::new("d/file4")
4165 );
4166 assert_eq!(
4167 buffer5.read(app).file().unwrap().path().as_ref(),
4168 Path::new("b/c/file5")
4169 );
4170
4171 assert!(!buffer2.read(app).file().unwrap().is_deleted());
4172 assert!(!buffer3.read(app).file().unwrap().is_deleted());
4173 assert!(!buffer4.read(app).file().unwrap().is_deleted());
4174 assert!(buffer5.read(app).file().unwrap().is_deleted());
4175 });
4176
4177 // Update the remote worktree. Check that it becomes consistent with the
4178 // local worktree.
4179 remote.update(&mut cx, |remote, cx| {
4180 let update_message = tree.read(cx).as_local().unwrap().snapshot().build_update(
4181 &initial_snapshot,
4182 1,
4183 1,
4184 true,
4185 );
4186 remote
4187 .as_remote_mut()
4188 .unwrap()
4189 .snapshot
4190 .apply_remote_update(update_message)
4191 .unwrap();
4192
4193 assert_eq!(
4194 remote
4195 .paths()
4196 .map(|p| p.to_str().unwrap())
4197 .collect::<Vec<_>>(),
4198 expected_paths
4199 );
4200 });
4201 }
4202
4203 #[gpui::test]
4204 async fn test_buffer_deduping(mut cx: gpui::TestAppContext) {
4205 let fs = FakeFs::new(cx.background());
4206 fs.insert_tree(
4207 "/the-dir",
4208 json!({
4209 "a.txt": "a-contents",
4210 "b.txt": "b-contents",
4211 }),
4212 )
4213 .await;
4214
4215 let project = Project::test(fs.clone(), &mut cx);
4216 let worktree_id = project
4217 .update(&mut cx, |p, cx| {
4218 p.find_or_create_local_worktree("/the-dir", false, cx)
4219 })
4220 .await
4221 .unwrap()
4222 .0
4223 .read_with(&cx, |tree, _| tree.id());
4224
4225 // Spawn multiple tasks to open paths, repeating some paths.
4226 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(&mut cx, |p, cx| {
4227 (
4228 p.open_buffer((worktree_id, "a.txt"), cx),
4229 p.open_buffer((worktree_id, "b.txt"), cx),
4230 p.open_buffer((worktree_id, "a.txt"), cx),
4231 )
4232 });
4233
4234 let buffer_a_1 = buffer_a_1.await.unwrap();
4235 let buffer_a_2 = buffer_a_2.await.unwrap();
4236 let buffer_b = buffer_b.await.unwrap();
4237 assert_eq!(buffer_a_1.read_with(&cx, |b, _| b.text()), "a-contents");
4238 assert_eq!(buffer_b.read_with(&cx, |b, _| b.text()), "b-contents");
4239
4240 // There is only one buffer per path.
4241 let buffer_a_id = buffer_a_1.id();
4242 assert_eq!(buffer_a_2.id(), buffer_a_id);
4243
4244 // Open the same path again while it is still open.
4245 drop(buffer_a_1);
4246 let buffer_a_3 = project
4247 .update(&mut cx, |p, cx| p.open_buffer((worktree_id, "a.txt"), cx))
4248 .await
4249 .unwrap();
4250
4251 // There's still only one buffer per path.
4252 assert_eq!(buffer_a_3.id(), buffer_a_id);
4253 }
4254
4255 #[gpui::test]
4256 async fn test_buffer_is_dirty(mut cx: gpui::TestAppContext) {
4257 use std::fs;
4258
4259 let dir = temp_tree(json!({
4260 "file1": "abc",
4261 "file2": "def",
4262 "file3": "ghi",
4263 }));
4264
4265 let project = Project::test(Arc::new(RealFs), &mut cx);
4266 let (worktree, _) = project
4267 .update(&mut cx, |p, cx| {
4268 p.find_or_create_local_worktree(dir.path(), false, cx)
4269 })
4270 .await
4271 .unwrap();
4272 let worktree_id = worktree.read_with(&cx, |worktree, _| worktree.id());
4273
4274 worktree.flush_fs_events(&cx).await;
4275 worktree
4276 .read_with(&cx, |t, _| t.as_local().unwrap().scan_complete())
4277 .await;
4278
4279 let buffer1 = project
4280 .update(&mut cx, |p, cx| p.open_buffer((worktree_id, "file1"), cx))
4281 .await
4282 .unwrap();
4283 let events = Rc::new(RefCell::new(Vec::new()));
4284
4285 // initially, the buffer isn't dirty.
4286 buffer1.update(&mut cx, |buffer, cx| {
4287 cx.subscribe(&buffer1, {
4288 let events = events.clone();
4289 move |_, _, event, _| events.borrow_mut().push(event.clone())
4290 })
4291 .detach();
4292
4293 assert!(!buffer.is_dirty());
4294 assert!(events.borrow().is_empty());
4295
4296 buffer.edit(vec![1..2], "", cx);
4297 });
4298
4299 // after the first edit, the buffer is dirty, and emits a dirtied event.
4300 buffer1.update(&mut cx, |buffer, cx| {
4301 assert!(buffer.text() == "ac");
4302 assert!(buffer.is_dirty());
4303 assert_eq!(
4304 *events.borrow(),
4305 &[language::Event::Edited, language::Event::Dirtied]
4306 );
4307 events.borrow_mut().clear();
4308 buffer.did_save(buffer.version(), buffer.file().unwrap().mtime(), None, cx);
4309 });
4310
4311 // after saving, the buffer is not dirty, and emits a saved event.
4312 buffer1.update(&mut cx, |buffer, cx| {
4313 assert!(!buffer.is_dirty());
4314 assert_eq!(*events.borrow(), &[language::Event::Saved]);
4315 events.borrow_mut().clear();
4316
4317 buffer.edit(vec![1..1], "B", cx);
4318 buffer.edit(vec![2..2], "D", cx);
4319 });
4320
4321 // after editing again, the buffer is dirty, and emits another dirty event.
4322 buffer1.update(&mut cx, |buffer, cx| {
4323 assert!(buffer.text() == "aBDc");
4324 assert!(buffer.is_dirty());
4325 assert_eq!(
4326 *events.borrow(),
4327 &[
4328 language::Event::Edited,
4329 language::Event::Dirtied,
4330 language::Event::Edited,
4331 ],
4332 );
4333 events.borrow_mut().clear();
4334
4335 // TODO - currently, after restoring the buffer to its
4336 // previously-saved state, the is still considered dirty.
4337 buffer.edit([1..3], "", cx);
4338 assert!(buffer.text() == "ac");
4339 assert!(buffer.is_dirty());
4340 });
4341
4342 assert_eq!(*events.borrow(), &[language::Event::Edited]);
4343
4344 // When a file is deleted, the buffer is considered dirty.
4345 let events = Rc::new(RefCell::new(Vec::new()));
4346 let buffer2 = project
4347 .update(&mut cx, |p, cx| p.open_buffer((worktree_id, "file2"), cx))
4348 .await
4349 .unwrap();
4350 buffer2.update(&mut cx, |_, cx| {
4351 cx.subscribe(&buffer2, {
4352 let events = events.clone();
4353 move |_, _, event, _| events.borrow_mut().push(event.clone())
4354 })
4355 .detach();
4356 });
4357
4358 fs::remove_file(dir.path().join("file2")).unwrap();
4359 buffer2.condition(&cx, |b, _| b.is_dirty()).await;
4360 assert_eq!(
4361 *events.borrow(),
4362 &[language::Event::Dirtied, language::Event::FileHandleChanged]
4363 );
4364
4365 // When a file is already dirty when deleted, we don't emit a Dirtied event.
4366 let events = Rc::new(RefCell::new(Vec::new()));
4367 let buffer3 = project
4368 .update(&mut cx, |p, cx| p.open_buffer((worktree_id, "file3"), cx))
4369 .await
4370 .unwrap();
4371 buffer3.update(&mut cx, |_, cx| {
4372 cx.subscribe(&buffer3, {
4373 let events = events.clone();
4374 move |_, _, event, _| events.borrow_mut().push(event.clone())
4375 })
4376 .detach();
4377 });
4378
4379 worktree.flush_fs_events(&cx).await;
4380 buffer3.update(&mut cx, |buffer, cx| {
4381 buffer.edit(Some(0..0), "x", cx);
4382 });
4383 events.borrow_mut().clear();
4384 fs::remove_file(dir.path().join("file3")).unwrap();
4385 buffer3
4386 .condition(&cx, |_, _| !events.borrow().is_empty())
4387 .await;
4388 assert_eq!(*events.borrow(), &[language::Event::FileHandleChanged]);
4389 cx.read(|cx| assert!(buffer3.read(cx).is_dirty()));
4390 }
4391
4392 #[gpui::test]
4393 async fn test_buffer_file_changes_on_disk(mut cx: gpui::TestAppContext) {
4394 use std::fs;
4395
4396 let initial_contents = "aaa\nbbbbb\nc\n";
4397 let dir = temp_tree(json!({ "the-file": initial_contents }));
4398
4399 let project = Project::test(Arc::new(RealFs), &mut cx);
4400 let (worktree, _) = project
4401 .update(&mut cx, |p, cx| {
4402 p.find_or_create_local_worktree(dir.path(), false, cx)
4403 })
4404 .await
4405 .unwrap();
4406 let worktree_id = worktree.read_with(&cx, |tree, _| tree.id());
4407
4408 worktree
4409 .read_with(&cx, |t, _| t.as_local().unwrap().scan_complete())
4410 .await;
4411
4412 let abs_path = dir.path().join("the-file");
4413 let buffer = project
4414 .update(&mut cx, |p, cx| {
4415 p.open_buffer((worktree_id, "the-file"), cx)
4416 })
4417 .await
4418 .unwrap();
4419
4420 // TODO
4421 // Add a cursor on each row.
4422 // let selection_set_id = buffer.update(&mut cx, |buffer, cx| {
4423 // assert!(!buffer.is_dirty());
4424 // buffer.add_selection_set(
4425 // &(0..3)
4426 // .map(|row| Selection {
4427 // id: row as usize,
4428 // start: Point::new(row, 1),
4429 // end: Point::new(row, 1),
4430 // reversed: false,
4431 // goal: SelectionGoal::None,
4432 // })
4433 // .collect::<Vec<_>>(),
4434 // cx,
4435 // )
4436 // });
4437
4438 // Change the file on disk, adding two new lines of text, and removing
4439 // one line.
4440 buffer.read_with(&cx, |buffer, _| {
4441 assert!(!buffer.is_dirty());
4442 assert!(!buffer.has_conflict());
4443 });
4444 let new_contents = "AAAA\naaa\nBB\nbbbbb\n";
4445 fs::write(&abs_path, new_contents).unwrap();
4446
4447 // Because the buffer was not modified, it is reloaded from disk. Its
4448 // contents are edited according to the diff between the old and new
4449 // file contents.
4450 buffer
4451 .condition(&cx, |buffer, _| buffer.text() == new_contents)
4452 .await;
4453
4454 buffer.update(&mut cx, |buffer, _| {
4455 assert_eq!(buffer.text(), new_contents);
4456 assert!(!buffer.is_dirty());
4457 assert!(!buffer.has_conflict());
4458
4459 // TODO
4460 // let cursor_positions = buffer
4461 // .selection_set(selection_set_id)
4462 // .unwrap()
4463 // .selections::<Point>(&*buffer)
4464 // .map(|selection| {
4465 // assert_eq!(selection.start, selection.end);
4466 // selection.start
4467 // })
4468 // .collect::<Vec<_>>();
4469 // assert_eq!(
4470 // cursor_positions,
4471 // [Point::new(1, 1), Point::new(3, 1), Point::new(4, 0)]
4472 // );
4473 });
4474
4475 // Modify the buffer
4476 buffer.update(&mut cx, |buffer, cx| {
4477 buffer.edit(vec![0..0], " ", cx);
4478 assert!(buffer.is_dirty());
4479 assert!(!buffer.has_conflict());
4480 });
4481
4482 // Change the file on disk again, adding blank lines to the beginning.
4483 fs::write(&abs_path, "\n\n\nAAAA\naaa\nBB\nbbbbb\n").unwrap();
4484
4485 // Because the buffer is modified, it doesn't reload from disk, but is
4486 // marked as having a conflict.
4487 buffer
4488 .condition(&cx, |buffer, _| buffer.has_conflict())
4489 .await;
4490 }
4491
4492 #[gpui::test]
4493 async fn test_grouped_diagnostics(mut cx: gpui::TestAppContext) {
4494 let fs = FakeFs::new(cx.background());
4495 fs.insert_tree(
4496 "/the-dir",
4497 json!({
4498 "a.rs": "
4499 fn foo(mut v: Vec<usize>) {
4500 for x in &v {
4501 v.push(1);
4502 }
4503 }
4504 "
4505 .unindent(),
4506 }),
4507 )
4508 .await;
4509
4510 let project = Project::test(fs.clone(), &mut cx);
4511 let (worktree, _) = project
4512 .update(&mut cx, |p, cx| {
4513 p.find_or_create_local_worktree("/the-dir", false, cx)
4514 })
4515 .await
4516 .unwrap();
4517 let worktree_id = worktree.read_with(&cx, |tree, _| tree.id());
4518
4519 let buffer = project
4520 .update(&mut cx, |p, cx| p.open_buffer((worktree_id, "a.rs"), cx))
4521 .await
4522 .unwrap();
4523
4524 let buffer_uri = Url::from_file_path("/the-dir/a.rs").unwrap();
4525 let message = lsp::PublishDiagnosticsParams {
4526 uri: buffer_uri.clone(),
4527 diagnostics: vec![
4528 lsp::Diagnostic {
4529 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
4530 severity: Some(DiagnosticSeverity::WARNING),
4531 message: "error 1".to_string(),
4532 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
4533 location: lsp::Location {
4534 uri: buffer_uri.clone(),
4535 range: lsp::Range::new(
4536 lsp::Position::new(1, 8),
4537 lsp::Position::new(1, 9),
4538 ),
4539 },
4540 message: "error 1 hint 1".to_string(),
4541 }]),
4542 ..Default::default()
4543 },
4544 lsp::Diagnostic {
4545 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
4546 severity: Some(DiagnosticSeverity::HINT),
4547 message: "error 1 hint 1".to_string(),
4548 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
4549 location: lsp::Location {
4550 uri: buffer_uri.clone(),
4551 range: lsp::Range::new(
4552 lsp::Position::new(1, 8),
4553 lsp::Position::new(1, 9),
4554 ),
4555 },
4556 message: "original diagnostic".to_string(),
4557 }]),
4558 ..Default::default()
4559 },
4560 lsp::Diagnostic {
4561 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
4562 severity: Some(DiagnosticSeverity::ERROR),
4563 message: "error 2".to_string(),
4564 related_information: Some(vec![
4565 lsp::DiagnosticRelatedInformation {
4566 location: lsp::Location {
4567 uri: buffer_uri.clone(),
4568 range: lsp::Range::new(
4569 lsp::Position::new(1, 13),
4570 lsp::Position::new(1, 15),
4571 ),
4572 },
4573 message: "error 2 hint 1".to_string(),
4574 },
4575 lsp::DiagnosticRelatedInformation {
4576 location: lsp::Location {
4577 uri: buffer_uri.clone(),
4578 range: lsp::Range::new(
4579 lsp::Position::new(1, 13),
4580 lsp::Position::new(1, 15),
4581 ),
4582 },
4583 message: "error 2 hint 2".to_string(),
4584 },
4585 ]),
4586 ..Default::default()
4587 },
4588 lsp::Diagnostic {
4589 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
4590 severity: Some(DiagnosticSeverity::HINT),
4591 message: "error 2 hint 1".to_string(),
4592 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
4593 location: lsp::Location {
4594 uri: buffer_uri.clone(),
4595 range: lsp::Range::new(
4596 lsp::Position::new(2, 8),
4597 lsp::Position::new(2, 17),
4598 ),
4599 },
4600 message: "original diagnostic".to_string(),
4601 }]),
4602 ..Default::default()
4603 },
4604 lsp::Diagnostic {
4605 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
4606 severity: Some(DiagnosticSeverity::HINT),
4607 message: "error 2 hint 2".to_string(),
4608 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
4609 location: lsp::Location {
4610 uri: buffer_uri.clone(),
4611 range: lsp::Range::new(
4612 lsp::Position::new(2, 8),
4613 lsp::Position::new(2, 17),
4614 ),
4615 },
4616 message: "original diagnostic".to_string(),
4617 }]),
4618 ..Default::default()
4619 },
4620 ],
4621 version: None,
4622 };
4623
4624 project
4625 .update(&mut cx, |p, cx| {
4626 p.update_diagnostics(message, &Default::default(), cx)
4627 })
4628 .unwrap();
4629 let buffer = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
4630
4631 assert_eq!(
4632 buffer
4633 .diagnostics_in_range::<_, Point>(0..buffer.len())
4634 .collect::<Vec<_>>(),
4635 &[
4636 DiagnosticEntry {
4637 range: Point::new(1, 8)..Point::new(1, 9),
4638 diagnostic: Diagnostic {
4639 severity: DiagnosticSeverity::WARNING,
4640 message: "error 1".to_string(),
4641 group_id: 0,
4642 is_primary: true,
4643 ..Default::default()
4644 }
4645 },
4646 DiagnosticEntry {
4647 range: Point::new(1, 8)..Point::new(1, 9),
4648 diagnostic: Diagnostic {
4649 severity: DiagnosticSeverity::HINT,
4650 message: "error 1 hint 1".to_string(),
4651 group_id: 0,
4652 is_primary: false,
4653 ..Default::default()
4654 }
4655 },
4656 DiagnosticEntry {
4657 range: Point::new(1, 13)..Point::new(1, 15),
4658 diagnostic: Diagnostic {
4659 severity: DiagnosticSeverity::HINT,
4660 message: "error 2 hint 1".to_string(),
4661 group_id: 1,
4662 is_primary: false,
4663 ..Default::default()
4664 }
4665 },
4666 DiagnosticEntry {
4667 range: Point::new(1, 13)..Point::new(1, 15),
4668 diagnostic: Diagnostic {
4669 severity: DiagnosticSeverity::HINT,
4670 message: "error 2 hint 2".to_string(),
4671 group_id: 1,
4672 is_primary: false,
4673 ..Default::default()
4674 }
4675 },
4676 DiagnosticEntry {
4677 range: Point::new(2, 8)..Point::new(2, 17),
4678 diagnostic: Diagnostic {
4679 severity: DiagnosticSeverity::ERROR,
4680 message: "error 2".to_string(),
4681 group_id: 1,
4682 is_primary: true,
4683 ..Default::default()
4684 }
4685 }
4686 ]
4687 );
4688
4689 assert_eq!(
4690 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
4691 &[
4692 DiagnosticEntry {
4693 range: Point::new(1, 8)..Point::new(1, 9),
4694 diagnostic: Diagnostic {
4695 severity: DiagnosticSeverity::WARNING,
4696 message: "error 1".to_string(),
4697 group_id: 0,
4698 is_primary: true,
4699 ..Default::default()
4700 }
4701 },
4702 DiagnosticEntry {
4703 range: Point::new(1, 8)..Point::new(1, 9),
4704 diagnostic: Diagnostic {
4705 severity: DiagnosticSeverity::HINT,
4706 message: "error 1 hint 1".to_string(),
4707 group_id: 0,
4708 is_primary: false,
4709 ..Default::default()
4710 }
4711 },
4712 ]
4713 );
4714 assert_eq!(
4715 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
4716 &[
4717 DiagnosticEntry {
4718 range: Point::new(1, 13)..Point::new(1, 15),
4719 diagnostic: Diagnostic {
4720 severity: DiagnosticSeverity::HINT,
4721 message: "error 2 hint 1".to_string(),
4722 group_id: 1,
4723 is_primary: false,
4724 ..Default::default()
4725 }
4726 },
4727 DiagnosticEntry {
4728 range: Point::new(1, 13)..Point::new(1, 15),
4729 diagnostic: Diagnostic {
4730 severity: DiagnosticSeverity::HINT,
4731 message: "error 2 hint 2".to_string(),
4732 group_id: 1,
4733 is_primary: false,
4734 ..Default::default()
4735 }
4736 },
4737 DiagnosticEntry {
4738 range: Point::new(2, 8)..Point::new(2, 17),
4739 diagnostic: Diagnostic {
4740 severity: DiagnosticSeverity::ERROR,
4741 message: "error 2".to_string(),
4742 group_id: 1,
4743 is_primary: true,
4744 ..Default::default()
4745 }
4746 }
4747 ]
4748 );
4749 }
4750
4751 #[gpui::test]
4752 async fn test_rename(mut cx: gpui::TestAppContext) {
4753 let (language_server_config, mut fake_servers) = LanguageServerConfig::fake();
4754 let language = Arc::new(Language::new(
4755 LanguageConfig {
4756 name: "Rust".into(),
4757 path_suffixes: vec!["rs".to_string()],
4758 language_server: Some(language_server_config),
4759 ..Default::default()
4760 },
4761 Some(tree_sitter_rust::language()),
4762 ));
4763
4764 let fs = FakeFs::new(cx.background());
4765 fs.insert_tree(
4766 "/dir",
4767 json!({
4768 "one.rs": "const ONE: usize = 1;",
4769 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
4770 }),
4771 )
4772 .await;
4773
4774 let project = Project::test(fs.clone(), &mut cx);
4775 project.update(&mut cx, |project, _| {
4776 Arc::get_mut(&mut project.languages).unwrap().add(language);
4777 });
4778
4779 let (tree, _) = project
4780 .update(&mut cx, |project, cx| {
4781 project.find_or_create_local_worktree("/dir", false, cx)
4782 })
4783 .await
4784 .unwrap();
4785 let worktree_id = tree.read_with(&cx, |tree, _| tree.id());
4786 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
4787 .await;
4788
4789 let buffer = project
4790 .update(&mut cx, |project, cx| {
4791 project.open_buffer((worktree_id, Path::new("one.rs")), cx)
4792 })
4793 .await
4794 .unwrap();
4795
4796 let mut fake_server = fake_servers.next().await.unwrap();
4797
4798 let response = project.update(&mut cx, |project, cx| {
4799 project.prepare_rename(buffer.clone(), 7, cx)
4800 });
4801 fake_server
4802 .handle_request::<lsp::request::PrepareRenameRequest, _>(|params, _| {
4803 assert_eq!(params.text_document.uri.as_str(), "file:///dir/one.rs");
4804 assert_eq!(params.position, lsp::Position::new(0, 7));
4805 Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
4806 lsp::Position::new(0, 6),
4807 lsp::Position::new(0, 9),
4808 )))
4809 })
4810 .next()
4811 .await
4812 .unwrap();
4813 let range = response.await.unwrap().unwrap();
4814 let range = buffer.read_with(&cx, |buffer, _| range.to_offset(buffer));
4815 assert_eq!(range, 6..9);
4816
4817 let response = project.update(&mut cx, |project, cx| {
4818 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), true, cx)
4819 });
4820 fake_server
4821 .handle_request::<lsp::request::Rename, _>(|params, _| {
4822 assert_eq!(
4823 params.text_document_position.text_document.uri.as_str(),
4824 "file:///dir/one.rs"
4825 );
4826 assert_eq!(
4827 params.text_document_position.position,
4828 lsp::Position::new(0, 7)
4829 );
4830 assert_eq!(params.new_name, "THREE");
4831 Some(lsp::WorkspaceEdit {
4832 changes: Some(
4833 [
4834 (
4835 lsp::Url::from_file_path("/dir/one.rs").unwrap(),
4836 vec![lsp::TextEdit::new(
4837 lsp::Range::new(
4838 lsp::Position::new(0, 6),
4839 lsp::Position::new(0, 9),
4840 ),
4841 "THREE".to_string(),
4842 )],
4843 ),
4844 (
4845 lsp::Url::from_file_path("/dir/two.rs").unwrap(),
4846 vec![
4847 lsp::TextEdit::new(
4848 lsp::Range::new(
4849 lsp::Position::new(0, 24),
4850 lsp::Position::new(0, 27),
4851 ),
4852 "THREE".to_string(),
4853 ),
4854 lsp::TextEdit::new(
4855 lsp::Range::new(
4856 lsp::Position::new(0, 35),
4857 lsp::Position::new(0, 38),
4858 ),
4859 "THREE".to_string(),
4860 ),
4861 ],
4862 ),
4863 ]
4864 .into_iter()
4865 .collect(),
4866 ),
4867 ..Default::default()
4868 })
4869 })
4870 .next()
4871 .await
4872 .unwrap();
4873 let mut transaction = response.await.unwrap().0;
4874 assert_eq!(transaction.len(), 2);
4875 assert_eq!(
4876 transaction
4877 .remove_entry(&buffer)
4878 .unwrap()
4879 .0
4880 .read_with(&cx, |buffer, _| buffer.text()),
4881 "const THREE: usize = 1;"
4882 );
4883 assert_eq!(
4884 transaction
4885 .into_keys()
4886 .next()
4887 .unwrap()
4888 .read_with(&cx, |buffer, _| buffer.text()),
4889 "const TWO: usize = one::THREE + one::THREE;"
4890 );
4891 }
4892
4893 #[gpui::test]
4894 async fn test_search(mut cx: gpui::TestAppContext) {
4895 let fs = FakeFs::new(cx.background());
4896 fs.insert_tree(
4897 "/dir",
4898 json!({
4899 "one.rs": "const ONE: usize = 1;",
4900 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
4901 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
4902 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
4903 }),
4904 )
4905 .await;
4906 let project = Project::test(fs.clone(), &mut cx);
4907 let (tree, _) = project
4908 .update(&mut cx, |project, cx| {
4909 project.find_or_create_local_worktree("/dir", false, cx)
4910 })
4911 .await
4912 .unwrap();
4913 let worktree_id = tree.read_with(&cx, |tree, _| tree.id());
4914 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
4915 .await;
4916
4917 assert_eq!(
4918 search(&project, SearchQuery::text("TWO", false, true), &mut cx).await,
4919 HashMap::from_iter([
4920 ("two.rs".to_string(), vec![6..9]),
4921 ("three.rs".to_string(), vec![37..40])
4922 ])
4923 );
4924
4925 let buffer_4 = project
4926 .update(&mut cx, |project, cx| {
4927 project.open_buffer((worktree_id, "four.rs"), cx)
4928 })
4929 .await
4930 .unwrap();
4931 buffer_4.update(&mut cx, |buffer, cx| {
4932 buffer.edit([20..28, 31..43], "two::TWO", cx);
4933 });
4934
4935 assert_eq!(
4936 search(&project, SearchQuery::text("TWO", false, true), &mut cx).await,
4937 HashMap::from_iter([
4938 ("two.rs".to_string(), vec![6..9]),
4939 ("three.rs".to_string(), vec![37..40]),
4940 ("four.rs".to_string(), vec![25..28, 36..39])
4941 ])
4942 );
4943
4944 async fn search(
4945 project: &ModelHandle<Project>,
4946 query: SearchQuery,
4947 cx: &mut gpui::TestAppContext,
4948 ) -> HashMap<String, Vec<Range<usize>>> {
4949 project
4950 .update(cx, |project, cx| project.search(query, cx))
4951 .await
4952 .into_iter()
4953 .map(|(buffer, ranges)| {
4954 buffer.read_with(cx, |buffer, _| {
4955 let path = buffer.file().unwrap().path().to_string_lossy().to_string();
4956 let ranges = ranges
4957 .into_iter()
4958 .map(|range| range.to_offset(buffer))
4959 .collect::<Vec<_>>();
4960 (path, ranges)
4961 })
4962 })
4963 .collect()
4964 }
4965 }
4966}