1pub mod fs;
2mod ignore;
3mod lsp_command;
4pub mod search;
5pub mod worktree;
6
7use anyhow::{anyhow, Context, Result};
8use client::{proto, Client, PeerId, TypedEnvelope, User, UserStore};
9use clock::ReplicaId;
10use collections::{hash_map, HashMap, HashSet};
11use futures::{future::Shared, Future, FutureExt, StreamExt};
12use fuzzy::{PathMatch, PathMatchCandidate, PathMatchCandidateSet};
13use gpui::{
14 AppContext, AsyncAppContext, Entity, ModelContext, ModelHandle, MutableAppContext, Task,
15 UpgradeModelHandle, WeakModelHandle,
16};
17use language::{
18 proto::{deserialize_anchor, serialize_anchor},
19 range_from_lsp, Anchor, AnchorRangeExt, Bias, Buffer, CodeAction, CodeLabel, Completion,
20 Diagnostic, DiagnosticEntry, File as _, Language, LanguageRegistry, Operation, PointUtf16,
21 ToLspPosition, ToOffset, ToPointUtf16, Transaction,
22};
23use lsp::{DiagnosticSeverity, DocumentHighlightKind, LanguageServer};
24use lsp_command::*;
25use postage::watch;
26use rand::prelude::*;
27use search::SearchQuery;
28use sha2::{Digest, Sha256};
29use smol::block_on;
30use std::{
31 cell::RefCell,
32 cmp,
33 convert::TryInto,
34 hash::Hash,
35 mem,
36 ops::Range,
37 path::{Component, Path, PathBuf},
38 rc::Rc,
39 sync::{atomic::AtomicBool, Arc},
40 time::Instant,
41};
42use util::{post_inc, ResultExt, TryFutureExt as _};
43
44pub use fs::*;
45pub use worktree::*;
46
47pub struct Project {
48 worktrees: Vec<WorktreeHandle>,
49 active_entry: Option<ProjectEntry>,
50 languages: Arc<LanguageRegistry>,
51 language_servers: HashMap<(WorktreeId, String), Arc<LanguageServer>>,
52 started_language_servers:
53 HashMap<(WorktreeId, String), Shared<Task<Option<Arc<LanguageServer>>>>>,
54 client: Arc<client::Client>,
55 user_store: ModelHandle<UserStore>,
56 fs: Arc<dyn Fs>,
57 client_state: ProjectClientState,
58 collaborators: HashMap<PeerId, Collaborator>,
59 subscriptions: Vec<client::Subscription>,
60 language_servers_with_diagnostics_running: isize,
61 opened_buffer: (Rc<RefCell<watch::Sender<()>>>, watch::Receiver<()>),
62 shared_buffers: HashMap<PeerId, HashSet<u64>>,
63 loading_buffers: HashMap<
64 ProjectPath,
65 postage::watch::Receiver<Option<Result<ModelHandle<Buffer>, Arc<anyhow::Error>>>>,
66 >,
67 opened_buffers: HashMap<u64, OpenBuffer>,
68 nonce: u128,
69}
70
71enum OpenBuffer {
72 Strong(ModelHandle<Buffer>),
73 Weak(WeakModelHandle<Buffer>),
74 Loading(Vec<Operation>),
75}
76
77enum WorktreeHandle {
78 Strong(ModelHandle<Worktree>),
79 Weak(WeakModelHandle<Worktree>),
80}
81
82enum ProjectClientState {
83 Local {
84 is_shared: bool,
85 remote_id_tx: watch::Sender<Option<u64>>,
86 remote_id_rx: watch::Receiver<Option<u64>>,
87 _maintain_remote_id_task: Task<Option<()>>,
88 },
89 Remote {
90 sharing_has_stopped: bool,
91 remote_id: u64,
92 replica_id: ReplicaId,
93 },
94}
95
96#[derive(Clone, Debug)]
97pub struct Collaborator {
98 pub user: Arc<User>,
99 pub peer_id: PeerId,
100 pub replica_id: ReplicaId,
101}
102
103#[derive(Clone, Debug, PartialEq)]
104pub enum Event {
105 ActiveEntryChanged(Option<ProjectEntry>),
106 WorktreeRemoved(WorktreeId),
107 DiskBasedDiagnosticsStarted,
108 DiskBasedDiagnosticsUpdated,
109 DiskBasedDiagnosticsFinished,
110 DiagnosticsUpdated(ProjectPath),
111}
112
113#[derive(Clone, Debug, Eq, PartialEq, Hash, PartialOrd, Ord)]
114pub struct ProjectPath {
115 pub worktree_id: WorktreeId,
116 pub path: Arc<Path>,
117}
118
119#[derive(Clone, Debug, Default, PartialEq)]
120pub struct DiagnosticSummary {
121 pub error_count: usize,
122 pub warning_count: usize,
123 pub info_count: usize,
124 pub hint_count: usize,
125}
126
127#[derive(Debug)]
128pub struct Location {
129 pub buffer: ModelHandle<Buffer>,
130 pub range: Range<language::Anchor>,
131}
132
133#[derive(Debug)]
134pub struct DocumentHighlight {
135 pub range: Range<language::Anchor>,
136 pub kind: DocumentHighlightKind,
137}
138
139#[derive(Clone, Debug)]
140pub struct Symbol {
141 pub source_worktree_id: WorktreeId,
142 pub worktree_id: WorktreeId,
143 pub language_name: String,
144 pub path: PathBuf,
145 pub label: CodeLabel,
146 pub name: String,
147 pub kind: lsp::SymbolKind,
148 pub range: Range<PointUtf16>,
149 pub signature: [u8; 32],
150}
151
152#[derive(Default)]
153pub struct ProjectTransaction(pub HashMap<ModelHandle<Buffer>, language::Transaction>);
154
155impl DiagnosticSummary {
156 fn new<'a, T: 'a>(diagnostics: impl IntoIterator<Item = &'a DiagnosticEntry<T>>) -> Self {
157 let mut this = Self {
158 error_count: 0,
159 warning_count: 0,
160 info_count: 0,
161 hint_count: 0,
162 };
163
164 for entry in diagnostics {
165 if entry.diagnostic.is_primary {
166 match entry.diagnostic.severity {
167 DiagnosticSeverity::ERROR => this.error_count += 1,
168 DiagnosticSeverity::WARNING => this.warning_count += 1,
169 DiagnosticSeverity::INFORMATION => this.info_count += 1,
170 DiagnosticSeverity::HINT => this.hint_count += 1,
171 _ => {}
172 }
173 }
174 }
175
176 this
177 }
178
179 pub fn to_proto(&self, path: &Path) -> proto::DiagnosticSummary {
180 proto::DiagnosticSummary {
181 path: path.to_string_lossy().to_string(),
182 error_count: self.error_count as u32,
183 warning_count: self.warning_count as u32,
184 info_count: self.info_count as u32,
185 hint_count: self.hint_count as u32,
186 }
187 }
188}
189
190#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
191pub struct ProjectEntry {
192 pub worktree_id: WorktreeId,
193 pub entry_id: usize,
194}
195
196impl Project {
197 pub fn init(client: &Arc<Client>) {
198 client.add_entity_message_handler(Self::handle_add_collaborator);
199 client.add_entity_message_handler(Self::handle_buffer_reloaded);
200 client.add_entity_message_handler(Self::handle_buffer_saved);
201 client.add_entity_message_handler(Self::handle_close_buffer);
202 client.add_entity_message_handler(Self::handle_disk_based_diagnostics_updated);
203 client.add_entity_message_handler(Self::handle_disk_based_diagnostics_updating);
204 client.add_entity_message_handler(Self::handle_remove_collaborator);
205 client.add_entity_message_handler(Self::handle_register_worktree);
206 client.add_entity_message_handler(Self::handle_unregister_worktree);
207 client.add_entity_message_handler(Self::handle_unshare_project);
208 client.add_entity_message_handler(Self::handle_update_buffer_file);
209 client.add_entity_message_handler(Self::handle_update_buffer);
210 client.add_entity_message_handler(Self::handle_update_diagnostic_summary);
211 client.add_entity_message_handler(Self::handle_update_worktree);
212 client.add_entity_request_handler(Self::handle_apply_additional_edits_for_completion);
213 client.add_entity_request_handler(Self::handle_apply_code_action);
214 client.add_entity_request_handler(Self::handle_format_buffers);
215 client.add_entity_request_handler(Self::handle_get_code_actions);
216 client.add_entity_request_handler(Self::handle_get_completions);
217 client.add_entity_request_handler(Self::handle_lsp_command::<GetDefinition>);
218 client.add_entity_request_handler(Self::handle_lsp_command::<GetDocumentHighlights>);
219 client.add_entity_request_handler(Self::handle_lsp_command::<GetReferences>);
220 client.add_entity_request_handler(Self::handle_lsp_command::<PrepareRename>);
221 client.add_entity_request_handler(Self::handle_lsp_command::<PerformRename>);
222 client.add_entity_request_handler(Self::handle_search_project);
223 client.add_entity_request_handler(Self::handle_get_project_symbols);
224 client.add_entity_request_handler(Self::handle_open_buffer_for_symbol);
225 client.add_entity_request_handler(Self::handle_open_buffer);
226 client.add_entity_request_handler(Self::handle_save_buffer);
227 }
228
229 pub fn local(
230 client: Arc<Client>,
231 user_store: ModelHandle<UserStore>,
232 languages: Arc<LanguageRegistry>,
233 fs: Arc<dyn Fs>,
234 cx: &mut MutableAppContext,
235 ) -> ModelHandle<Self> {
236 cx.add_model(|cx: &mut ModelContext<Self>| {
237 let (remote_id_tx, remote_id_rx) = watch::channel();
238 let _maintain_remote_id_task = cx.spawn_weak({
239 let rpc = client.clone();
240 move |this, mut cx| {
241 async move {
242 let mut status = rpc.status();
243 while let Some(status) = status.next().await {
244 if let Some(this) = this.upgrade(&cx) {
245 let remote_id = if let client::Status::Connected { .. } = status {
246 let response = rpc.request(proto::RegisterProject {}).await?;
247 Some(response.project_id)
248 } else {
249 None
250 };
251
252 if let Some(project_id) = remote_id {
253 let mut registrations = Vec::new();
254 this.update(&mut cx, |this, cx| {
255 for worktree in this.worktrees(cx).collect::<Vec<_>>() {
256 registrations.push(worktree.update(
257 cx,
258 |worktree, cx| {
259 let worktree = worktree.as_local_mut().unwrap();
260 worktree.register(project_id, cx)
261 },
262 ));
263 }
264 });
265 for registration in registrations {
266 registration.await?;
267 }
268 }
269 this.update(&mut cx, |this, cx| this.set_remote_id(remote_id, cx));
270 }
271 }
272 Ok(())
273 }
274 .log_err()
275 }
276 });
277
278 let (opened_buffer_tx, opened_buffer_rx) = watch::channel();
279 Self {
280 worktrees: Default::default(),
281 collaborators: Default::default(),
282 opened_buffers: Default::default(),
283 shared_buffers: Default::default(),
284 loading_buffers: Default::default(),
285 client_state: ProjectClientState::Local {
286 is_shared: false,
287 remote_id_tx,
288 remote_id_rx,
289 _maintain_remote_id_task,
290 },
291 opened_buffer: (Rc::new(RefCell::new(opened_buffer_tx)), opened_buffer_rx),
292 subscriptions: Vec::new(),
293 active_entry: None,
294 languages,
295 client,
296 user_store,
297 fs,
298 language_servers_with_diagnostics_running: 0,
299 language_servers: Default::default(),
300 started_language_servers: Default::default(),
301 nonce: StdRng::from_entropy().gen(),
302 }
303 })
304 }
305
306 pub async fn remote(
307 remote_id: u64,
308 client: Arc<Client>,
309 user_store: ModelHandle<UserStore>,
310 languages: Arc<LanguageRegistry>,
311 fs: Arc<dyn Fs>,
312 cx: &mut AsyncAppContext,
313 ) -> Result<ModelHandle<Self>> {
314 client.authenticate_and_connect(&cx).await?;
315
316 let response = client
317 .request(proto::JoinProject {
318 project_id: remote_id,
319 })
320 .await?;
321
322 let replica_id = response.replica_id as ReplicaId;
323
324 let mut worktrees = Vec::new();
325 for worktree in response.worktrees {
326 let (worktree, load_task) = cx
327 .update(|cx| Worktree::remote(remote_id, replica_id, worktree, client.clone(), cx));
328 worktrees.push(worktree);
329 load_task.detach();
330 }
331
332 let (opened_buffer_tx, opened_buffer_rx) = watch::channel();
333 let this = cx.add_model(|cx| {
334 let mut this = Self {
335 worktrees: Vec::new(),
336 loading_buffers: Default::default(),
337 opened_buffer: (Rc::new(RefCell::new(opened_buffer_tx)), opened_buffer_rx),
338 shared_buffers: Default::default(),
339 active_entry: None,
340 collaborators: Default::default(),
341 languages,
342 user_store: user_store.clone(),
343 fs,
344 subscriptions: vec![client.add_model_for_remote_entity(remote_id, cx)],
345 client,
346 client_state: ProjectClientState::Remote {
347 sharing_has_stopped: false,
348 remote_id,
349 replica_id,
350 },
351 language_servers_with_diagnostics_running: 0,
352 language_servers: Default::default(),
353 started_language_servers: Default::default(),
354 opened_buffers: Default::default(),
355 nonce: StdRng::from_entropy().gen(),
356 };
357 for worktree in worktrees {
358 this.add_worktree(&worktree, cx);
359 }
360 this
361 });
362
363 let user_ids = response
364 .collaborators
365 .iter()
366 .map(|peer| peer.user_id)
367 .collect();
368 user_store
369 .update(cx, |user_store, cx| user_store.load_users(user_ids, cx))
370 .await?;
371 let mut collaborators = HashMap::default();
372 for message in response.collaborators {
373 let collaborator = Collaborator::from_proto(message, &user_store, cx).await?;
374 collaborators.insert(collaborator.peer_id, collaborator);
375 }
376
377 this.update(cx, |this, _| {
378 this.collaborators = collaborators;
379 });
380
381 Ok(this)
382 }
383
384 #[cfg(any(test, feature = "test-support"))]
385 pub fn test(fs: Arc<dyn Fs>, cx: &mut gpui::TestAppContext) -> ModelHandle<Project> {
386 let languages = Arc::new(LanguageRegistry::new());
387 let http_client = client::test::FakeHttpClient::with_404_response();
388 let client = client::Client::new(http_client.clone());
389 let user_store = cx.add_model(|cx| UserStore::new(client.clone(), http_client, cx));
390 cx.update(|cx| Project::local(client, user_store, languages, fs, cx))
391 }
392
393 #[cfg(any(test, feature = "test-support"))]
394 pub fn buffer_for_id(&self, remote_id: u64, cx: &AppContext) -> Option<ModelHandle<Buffer>> {
395 self.opened_buffers
396 .get(&remote_id)
397 .and_then(|buffer| buffer.upgrade(cx))
398 }
399
400 #[cfg(any(test, feature = "test-support"))]
401 pub fn has_deferred_operations(&self, cx: &AppContext) -> bool {
402 self.opened_buffers.values().any(|buffer| match buffer {
403 OpenBuffer::Strong(buffer) => buffer.read(cx).deferred_ops_len() > 0,
404 OpenBuffer::Weak(buffer) => buffer
405 .upgrade(cx)
406 .map_or(false, |buffer| buffer.read(cx).deferred_ops_len() > 0),
407 OpenBuffer::Loading(_) => false,
408 })
409 }
410
411 #[cfg(any(test, feature = "test-support"))]
412 pub fn languages(&self) -> &Arc<LanguageRegistry> {
413 &self.languages
414 }
415
416 pub fn fs(&self) -> &Arc<dyn Fs> {
417 &self.fs
418 }
419
420 fn set_remote_id(&mut self, remote_id: Option<u64>, cx: &mut ModelContext<Self>) {
421 if let ProjectClientState::Local { remote_id_tx, .. } = &mut self.client_state {
422 *remote_id_tx.borrow_mut() = remote_id;
423 }
424
425 self.subscriptions.clear();
426 if let Some(remote_id) = remote_id {
427 self.subscriptions
428 .push(self.client.add_model_for_remote_entity(remote_id, cx));
429 }
430 }
431
432 pub fn remote_id(&self) -> Option<u64> {
433 match &self.client_state {
434 ProjectClientState::Local { remote_id_rx, .. } => *remote_id_rx.borrow(),
435 ProjectClientState::Remote { remote_id, .. } => Some(*remote_id),
436 }
437 }
438
439 pub fn next_remote_id(&self) -> impl Future<Output = u64> {
440 let mut id = None;
441 let mut watch = None;
442 match &self.client_state {
443 ProjectClientState::Local { remote_id_rx, .. } => watch = Some(remote_id_rx.clone()),
444 ProjectClientState::Remote { remote_id, .. } => id = Some(*remote_id),
445 }
446
447 async move {
448 if let Some(id) = id {
449 return id;
450 }
451 let mut watch = watch.unwrap();
452 loop {
453 let id = *watch.borrow();
454 if let Some(id) = id {
455 return id;
456 }
457 watch.next().await;
458 }
459 }
460 }
461
462 pub fn replica_id(&self) -> ReplicaId {
463 match &self.client_state {
464 ProjectClientState::Local { .. } => 0,
465 ProjectClientState::Remote { replica_id, .. } => *replica_id,
466 }
467 }
468
469 pub fn collaborators(&self) -> &HashMap<PeerId, Collaborator> {
470 &self.collaborators
471 }
472
473 pub fn worktrees<'a>(
474 &'a self,
475 cx: &'a AppContext,
476 ) -> impl 'a + Iterator<Item = ModelHandle<Worktree>> {
477 self.worktrees
478 .iter()
479 .filter_map(move |worktree| worktree.upgrade(cx))
480 }
481
482 pub fn visible_worktrees<'a>(
483 &'a self,
484 cx: &'a AppContext,
485 ) -> impl 'a + Iterator<Item = ModelHandle<Worktree>> {
486 self.worktrees.iter().filter_map(|worktree| {
487 worktree.upgrade(cx).and_then(|worktree| {
488 if worktree.read(cx).is_visible() {
489 Some(worktree)
490 } else {
491 None
492 }
493 })
494 })
495 }
496
497 pub fn worktree_for_id(
498 &self,
499 id: WorktreeId,
500 cx: &AppContext,
501 ) -> Option<ModelHandle<Worktree>> {
502 self.worktrees(cx)
503 .find(|worktree| worktree.read(cx).id() == id)
504 }
505
506 pub fn share(&self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
507 let rpc = self.client.clone();
508 cx.spawn(|this, mut cx| async move {
509 let project_id = this.update(&mut cx, |this, cx| {
510 if let ProjectClientState::Local {
511 is_shared,
512 remote_id_rx,
513 ..
514 } = &mut this.client_state
515 {
516 *is_shared = true;
517
518 for open_buffer in this.opened_buffers.values_mut() {
519 match open_buffer {
520 OpenBuffer::Strong(_) => {}
521 OpenBuffer::Weak(buffer) => {
522 if let Some(buffer) = buffer.upgrade(cx) {
523 *open_buffer = OpenBuffer::Strong(buffer);
524 }
525 }
526 OpenBuffer::Loading(_) => unreachable!(),
527 }
528 }
529
530 for worktree_handle in this.worktrees.iter_mut() {
531 match worktree_handle {
532 WorktreeHandle::Strong(_) => {}
533 WorktreeHandle::Weak(worktree) => {
534 if let Some(worktree) = worktree.upgrade(cx) {
535 *worktree_handle = WorktreeHandle::Strong(worktree);
536 }
537 }
538 }
539 }
540
541 remote_id_rx
542 .borrow()
543 .ok_or_else(|| anyhow!("no project id"))
544 } else {
545 Err(anyhow!("can't share a remote project"))
546 }
547 })?;
548
549 rpc.request(proto::ShareProject { project_id }).await?;
550
551 let mut tasks = Vec::new();
552 this.update(&mut cx, |this, cx| {
553 for worktree in this.worktrees(cx).collect::<Vec<_>>() {
554 worktree.update(cx, |worktree, cx| {
555 let worktree = worktree.as_local_mut().unwrap();
556 tasks.push(worktree.share(project_id, cx));
557 });
558 }
559 });
560 for task in tasks {
561 task.await?;
562 }
563 this.update(&mut cx, |_, cx| cx.notify());
564 Ok(())
565 })
566 }
567
568 pub fn unshare(&self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
569 let rpc = self.client.clone();
570 cx.spawn(|this, mut cx| async move {
571 let project_id = this.update(&mut cx, |this, cx| {
572 if let ProjectClientState::Local {
573 is_shared,
574 remote_id_rx,
575 ..
576 } = &mut this.client_state
577 {
578 *is_shared = false;
579
580 for open_buffer in this.opened_buffers.values_mut() {
581 match open_buffer {
582 OpenBuffer::Strong(buffer) => {
583 *open_buffer = OpenBuffer::Weak(buffer.downgrade());
584 }
585 _ => {}
586 }
587 }
588
589 for worktree_handle in this.worktrees.iter_mut() {
590 match worktree_handle {
591 WorktreeHandle::Strong(worktree) => {
592 if !worktree.read(cx).is_visible() {
593 *worktree_handle = WorktreeHandle::Weak(worktree.downgrade());
594 }
595 }
596 _ => {}
597 }
598 }
599
600 remote_id_rx
601 .borrow()
602 .ok_or_else(|| anyhow!("no project id"))
603 } else {
604 Err(anyhow!("can't share a remote project"))
605 }
606 })?;
607
608 rpc.send(proto::UnshareProject { project_id })?;
609 this.update(&mut cx, |this, cx| {
610 this.collaborators.clear();
611 this.shared_buffers.clear();
612 for worktree in this.worktrees(cx).collect::<Vec<_>>() {
613 worktree.update(cx, |worktree, _| {
614 worktree.as_local_mut().unwrap().unshare();
615 });
616 }
617 cx.notify()
618 });
619 Ok(())
620 })
621 }
622
623 pub fn is_read_only(&self) -> bool {
624 match &self.client_state {
625 ProjectClientState::Local { .. } => false,
626 ProjectClientState::Remote {
627 sharing_has_stopped,
628 ..
629 } => *sharing_has_stopped,
630 }
631 }
632
633 pub fn is_local(&self) -> bool {
634 match &self.client_state {
635 ProjectClientState::Local { .. } => true,
636 ProjectClientState::Remote { .. } => false,
637 }
638 }
639
640 pub fn is_remote(&self) -> bool {
641 !self.is_local()
642 }
643
644 pub fn open_buffer(
645 &mut self,
646 path: impl Into<ProjectPath>,
647 cx: &mut ModelContext<Self>,
648 ) -> Task<Result<ModelHandle<Buffer>>> {
649 let project_path = path.into();
650 let worktree = if let Some(worktree) = self.worktree_for_id(project_path.worktree_id, cx) {
651 worktree
652 } else {
653 return Task::ready(Err(anyhow!("no such worktree")));
654 };
655
656 // If there is already a buffer for the given path, then return it.
657 let existing_buffer = self.get_open_buffer(&project_path, cx);
658 if let Some(existing_buffer) = existing_buffer {
659 return Task::ready(Ok(existing_buffer));
660 }
661
662 let mut loading_watch = match self.loading_buffers.entry(project_path.clone()) {
663 // If the given path is already being loaded, then wait for that existing
664 // task to complete and return the same buffer.
665 hash_map::Entry::Occupied(e) => e.get().clone(),
666
667 // Otherwise, record the fact that this path is now being loaded.
668 hash_map::Entry::Vacant(entry) => {
669 let (mut tx, rx) = postage::watch::channel();
670 entry.insert(rx.clone());
671
672 let load_buffer = if worktree.read(cx).is_local() {
673 self.open_local_buffer(&project_path.path, &worktree, cx)
674 } else {
675 self.open_remote_buffer(&project_path.path, &worktree, cx)
676 };
677
678 cx.spawn(move |this, mut cx| async move {
679 let load_result = load_buffer.await;
680 *tx.borrow_mut() = Some(this.update(&mut cx, |this, _| {
681 // Record the fact that the buffer is no longer loading.
682 this.loading_buffers.remove(&project_path);
683 let buffer = load_result.map_err(Arc::new)?;
684 Ok(buffer)
685 }));
686 })
687 .detach();
688 rx
689 }
690 };
691
692 cx.foreground().spawn(async move {
693 loop {
694 if let Some(result) = loading_watch.borrow().as_ref() {
695 match result {
696 Ok(buffer) => return Ok(buffer.clone()),
697 Err(error) => return Err(anyhow!("{}", error)),
698 }
699 }
700 loading_watch.next().await;
701 }
702 })
703 }
704
705 fn open_local_buffer(
706 &mut self,
707 path: &Arc<Path>,
708 worktree: &ModelHandle<Worktree>,
709 cx: &mut ModelContext<Self>,
710 ) -> Task<Result<ModelHandle<Buffer>>> {
711 let load_buffer = worktree.update(cx, |worktree, cx| {
712 let worktree = worktree.as_local_mut().unwrap();
713 worktree.load_buffer(path, cx)
714 });
715 let worktree = worktree.downgrade();
716 cx.spawn(|this, mut cx| async move {
717 let buffer = load_buffer.await?;
718 let worktree = worktree
719 .upgrade(&cx)
720 .ok_or_else(|| anyhow!("worktree was removed"))?;
721 this.update(&mut cx, |this, cx| {
722 this.register_buffer(&buffer, Some(&worktree), cx)
723 })?;
724 Ok(buffer)
725 })
726 }
727
728 fn open_remote_buffer(
729 &mut self,
730 path: &Arc<Path>,
731 worktree: &ModelHandle<Worktree>,
732 cx: &mut ModelContext<Self>,
733 ) -> Task<Result<ModelHandle<Buffer>>> {
734 let rpc = self.client.clone();
735 let project_id = self.remote_id().unwrap();
736 let remote_worktree_id = worktree.read(cx).id();
737 let path = path.clone();
738 let path_string = path.to_string_lossy().to_string();
739 cx.spawn(|this, mut cx| async move {
740 let response = rpc
741 .request(proto::OpenBuffer {
742 project_id,
743 worktree_id: remote_worktree_id.to_proto(),
744 path: path_string,
745 })
746 .await?;
747 let buffer = response.buffer.ok_or_else(|| anyhow!("missing buffer"))?;
748 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
749 .await
750 })
751 }
752
753 fn open_local_buffer_via_lsp(
754 &mut self,
755 abs_path: lsp::Url,
756 lang_name: String,
757 lang_server: Arc<LanguageServer>,
758 cx: &mut ModelContext<Self>,
759 ) -> Task<Result<ModelHandle<Buffer>>> {
760 cx.spawn(|this, mut cx| async move {
761 let abs_path = abs_path
762 .to_file_path()
763 .map_err(|_| anyhow!("can't convert URI to path"))?;
764 let (worktree, relative_path) = if let Some(result) =
765 this.read_with(&cx, |this, cx| this.find_local_worktree(&abs_path, cx))
766 {
767 result
768 } else {
769 let worktree = this
770 .update(&mut cx, |this, cx| {
771 this.create_local_worktree(&abs_path, false, cx)
772 })
773 .await?;
774 this.update(&mut cx, |this, cx| {
775 this.language_servers
776 .insert((worktree.read(cx).id(), lang_name), lang_server);
777 });
778 (worktree, PathBuf::new())
779 };
780
781 let project_path = ProjectPath {
782 worktree_id: worktree.read_with(&cx, |worktree, _| worktree.id()),
783 path: relative_path.into(),
784 };
785 this.update(&mut cx, |this, cx| this.open_buffer(project_path, cx))
786 .await
787 })
788 }
789
790 pub fn save_buffer_as(
791 &self,
792 buffer: ModelHandle<Buffer>,
793 abs_path: PathBuf,
794 cx: &mut ModelContext<Project>,
795 ) -> Task<Result<()>> {
796 let worktree_task = self.find_or_create_local_worktree(&abs_path, true, cx);
797 cx.spawn(|this, mut cx| async move {
798 let (worktree, path) = worktree_task.await?;
799 worktree
800 .update(&mut cx, |worktree, cx| {
801 worktree
802 .as_local_mut()
803 .unwrap()
804 .save_buffer_as(buffer.clone(), path, cx)
805 })
806 .await?;
807 this.update(&mut cx, |this, cx| {
808 this.assign_language_to_buffer(&buffer, Some(&worktree), cx);
809 });
810 Ok(())
811 })
812 }
813
814 #[cfg(any(test, feature = "test-support"))]
815 pub fn has_open_buffer(&self, path: impl Into<ProjectPath>, cx: &AppContext) -> bool {
816 let path = path.into();
817 if let Some(worktree) = self.worktree_for_id(path.worktree_id, cx) {
818 self.opened_buffers.iter().any(|(_, buffer)| {
819 if let Some(buffer) = buffer.upgrade(cx) {
820 if let Some(file) = File::from_dyn(buffer.read(cx).file()) {
821 if file.worktree == worktree && file.path() == &path.path {
822 return true;
823 }
824 }
825 }
826 false
827 })
828 } else {
829 false
830 }
831 }
832
833 pub fn get_open_buffer(
834 &mut self,
835 path: &ProjectPath,
836 cx: &mut ModelContext<Self>,
837 ) -> Option<ModelHandle<Buffer>> {
838 let worktree = self.worktree_for_id(path.worktree_id, cx)?;
839 self.opened_buffers.values().find_map(|buffer| {
840 let buffer = buffer.upgrade(cx)?;
841 let file = File::from_dyn(buffer.read(cx).file())?;
842 if file.worktree == worktree && file.path() == &path.path {
843 Some(buffer)
844 } else {
845 None
846 }
847 })
848 }
849
850 fn register_buffer(
851 &mut self,
852 buffer: &ModelHandle<Buffer>,
853 worktree: Option<&ModelHandle<Worktree>>,
854 cx: &mut ModelContext<Self>,
855 ) -> Result<()> {
856 let remote_id = buffer.read(cx).remote_id();
857 let open_buffer = if self.is_remote() || self.is_shared() {
858 OpenBuffer::Strong(buffer.clone())
859 } else {
860 OpenBuffer::Weak(buffer.downgrade())
861 };
862
863 match self.opened_buffers.insert(remote_id, open_buffer) {
864 None => {}
865 Some(OpenBuffer::Loading(operations)) => {
866 buffer.update(cx, |buffer, cx| buffer.apply_ops(operations, cx))?
867 }
868 Some(OpenBuffer::Weak(existing_handle)) => {
869 if existing_handle.upgrade(cx).is_some() {
870 Err(anyhow!(
871 "already registered buffer with remote id {}",
872 remote_id
873 ))?
874 }
875 }
876 Some(OpenBuffer::Strong(_)) => Err(anyhow!(
877 "already registered buffer with remote id {}",
878 remote_id
879 ))?,
880 }
881 self.assign_language_to_buffer(&buffer, worktree, cx);
882 Ok(())
883 }
884
885 fn assign_language_to_buffer(
886 &mut self,
887 buffer: &ModelHandle<Buffer>,
888 worktree: Option<&ModelHandle<Worktree>>,
889 cx: &mut ModelContext<Self>,
890 ) -> Option<()> {
891 let (path, full_path) = {
892 let file = buffer.read(cx).file()?;
893 (file.path().clone(), file.full_path(cx))
894 };
895
896 // If the buffer has a language, set it and start/assign the language server
897 if let Some(language) = self.languages.select_language(&full_path) {
898 buffer.update(cx, |buffer, cx| {
899 buffer.set_language(Some(language.clone()), cx);
900 });
901
902 // For local worktrees, start a language server if needed.
903 // Also assign the language server and any previously stored diagnostics to the buffer.
904 if let Some(local_worktree) = worktree.and_then(|w| w.read(cx).as_local()) {
905 let worktree_id = local_worktree.id();
906 let worktree_abs_path = local_worktree.abs_path().clone();
907 let buffer = buffer.downgrade();
908 let language_server =
909 self.start_language_server(worktree_id, worktree_abs_path, language, cx);
910
911 cx.spawn_weak(|_, mut cx| async move {
912 if let Some(language_server) = language_server.await {
913 if let Some(buffer) = buffer.upgrade(&cx) {
914 buffer.update(&mut cx, |buffer, cx| {
915 buffer.set_language_server(Some(language_server), cx);
916 });
917 }
918 }
919 })
920 .detach();
921 }
922 }
923
924 if let Some(local_worktree) = worktree.and_then(|w| w.read(cx).as_local()) {
925 if let Some(diagnostics) = local_worktree.diagnostics_for_path(&path) {
926 buffer.update(cx, |buffer, cx| {
927 buffer.update_diagnostics(diagnostics, None, cx).log_err();
928 });
929 }
930 }
931
932 None
933 }
934
935 fn start_language_server(
936 &mut self,
937 worktree_id: WorktreeId,
938 worktree_path: Arc<Path>,
939 language: Arc<Language>,
940 cx: &mut ModelContext<Self>,
941 ) -> Shared<Task<Option<Arc<LanguageServer>>>> {
942 enum LspEvent {
943 DiagnosticsStart,
944 DiagnosticsUpdate(lsp::PublishDiagnosticsParams),
945 DiagnosticsFinish,
946 }
947
948 let key = (worktree_id, language.name().to_string());
949 self.started_language_servers
950 .entry(key.clone())
951 .or_insert_with(|| {
952 let language_server = self.languages.start_language_server(
953 &language,
954 worktree_path,
955 self.client.http_client(),
956 cx,
957 );
958 let rpc = self.client.clone();
959 cx.spawn_weak(|this, mut cx| async move {
960 let language_server = language_server?.await.log_err()?;
961 if let Some(this) = this.upgrade(&cx) {
962 this.update(&mut cx, |this, _| {
963 this.language_servers.insert(key, language_server.clone());
964 });
965 }
966
967 let disk_based_sources = language
968 .disk_based_diagnostic_sources()
969 .cloned()
970 .unwrap_or_default();
971 let disk_based_diagnostics_progress_token =
972 language.disk_based_diagnostics_progress_token().cloned();
973 let has_disk_based_diagnostic_progress_token =
974 disk_based_diagnostics_progress_token.is_some();
975 let (diagnostics_tx, diagnostics_rx) = smol::channel::unbounded();
976
977 // Listen for `PublishDiagnostics` notifications.
978 language_server
979 .on_notification::<lsp::notification::PublishDiagnostics, _>({
980 let diagnostics_tx = diagnostics_tx.clone();
981 move |params| {
982 if !has_disk_based_diagnostic_progress_token {
983 block_on(diagnostics_tx.send(LspEvent::DiagnosticsStart)).ok();
984 }
985 block_on(diagnostics_tx.send(LspEvent::DiagnosticsUpdate(params)))
986 .ok();
987 if !has_disk_based_diagnostic_progress_token {
988 block_on(diagnostics_tx.send(LspEvent::DiagnosticsFinish)).ok();
989 }
990 }
991 })
992 .detach();
993
994 // Listen for `Progress` notifications. Send an event when the language server
995 // transitions between running jobs and not running any jobs.
996 let mut running_jobs_for_this_server: i32 = 0;
997 language_server
998 .on_notification::<lsp::notification::Progress, _>(move |params| {
999 let token = match params.token {
1000 lsp::NumberOrString::Number(_) => None,
1001 lsp::NumberOrString::String(token) => Some(token),
1002 };
1003
1004 if token == disk_based_diagnostics_progress_token {
1005 match params.value {
1006 lsp::ProgressParamsValue::WorkDone(progress) => {
1007 match progress {
1008 lsp::WorkDoneProgress::Begin(_) => {
1009 running_jobs_for_this_server += 1;
1010 if running_jobs_for_this_server == 1 {
1011 block_on(
1012 diagnostics_tx
1013 .send(LspEvent::DiagnosticsStart),
1014 )
1015 .ok();
1016 }
1017 }
1018 lsp::WorkDoneProgress::End(_) => {
1019 running_jobs_for_this_server -= 1;
1020 if running_jobs_for_this_server == 0 {
1021 block_on(
1022 diagnostics_tx
1023 .send(LspEvent::DiagnosticsFinish),
1024 )
1025 .ok();
1026 }
1027 }
1028 _ => {}
1029 }
1030 }
1031 }
1032 }
1033 })
1034 .detach();
1035
1036 // Process all the LSP events.
1037 cx.spawn(|mut cx| async move {
1038 while let Ok(message) = diagnostics_rx.recv().await {
1039 let this = this.upgrade(&cx)?;
1040 match message {
1041 LspEvent::DiagnosticsStart => {
1042 this.update(&mut cx, |this, cx| {
1043 this.disk_based_diagnostics_started(cx);
1044 if let Some(project_id) = this.remote_id() {
1045 rpc.send(proto::DiskBasedDiagnosticsUpdating {
1046 project_id,
1047 })
1048 .log_err();
1049 }
1050 });
1051 }
1052 LspEvent::DiagnosticsUpdate(mut params) => {
1053 language.process_diagnostics(&mut params);
1054 this.update(&mut cx, |this, cx| {
1055 this.update_diagnostics(params, &disk_based_sources, cx)
1056 .log_err();
1057 });
1058 }
1059 LspEvent::DiagnosticsFinish => {
1060 this.update(&mut cx, |this, cx| {
1061 this.disk_based_diagnostics_finished(cx);
1062 if let Some(project_id) = this.remote_id() {
1063 rpc.send(proto::DiskBasedDiagnosticsUpdated {
1064 project_id,
1065 })
1066 .log_err();
1067 }
1068 });
1069 }
1070 }
1071 }
1072 Some(())
1073 })
1074 .detach();
1075
1076 Some(language_server)
1077 })
1078 .shared()
1079 })
1080 .clone()
1081 }
1082
1083 pub fn update_diagnostics(
1084 &mut self,
1085 params: lsp::PublishDiagnosticsParams,
1086 disk_based_sources: &HashSet<String>,
1087 cx: &mut ModelContext<Self>,
1088 ) -> Result<()> {
1089 let abs_path = params
1090 .uri
1091 .to_file_path()
1092 .map_err(|_| anyhow!("URI is not a file"))?;
1093 let mut next_group_id = 0;
1094 let mut diagnostics = Vec::default();
1095 let mut primary_diagnostic_group_ids = HashMap::default();
1096 let mut sources_by_group_id = HashMap::default();
1097 let mut supporting_diagnostic_severities = HashMap::default();
1098 for diagnostic in ¶ms.diagnostics {
1099 let source = diagnostic.source.as_ref();
1100 let code = diagnostic.code.as_ref().map(|code| match code {
1101 lsp::NumberOrString::Number(code) => code.to_string(),
1102 lsp::NumberOrString::String(code) => code.clone(),
1103 });
1104 let range = range_from_lsp(diagnostic.range);
1105 let is_supporting = diagnostic
1106 .related_information
1107 .as_ref()
1108 .map_or(false, |infos| {
1109 infos.iter().any(|info| {
1110 primary_diagnostic_group_ids.contains_key(&(
1111 source,
1112 code.clone(),
1113 range_from_lsp(info.location.range),
1114 ))
1115 })
1116 });
1117
1118 if is_supporting {
1119 if let Some(severity) = diagnostic.severity {
1120 supporting_diagnostic_severities
1121 .insert((source, code.clone(), range), severity);
1122 }
1123 } else {
1124 let group_id = post_inc(&mut next_group_id);
1125 let is_disk_based =
1126 source.map_or(false, |source| disk_based_sources.contains(source));
1127
1128 sources_by_group_id.insert(group_id, source);
1129 primary_diagnostic_group_ids
1130 .insert((source, code.clone(), range.clone()), group_id);
1131
1132 diagnostics.push(DiagnosticEntry {
1133 range,
1134 diagnostic: Diagnostic {
1135 code: code.clone(),
1136 severity: diagnostic.severity.unwrap_or(DiagnosticSeverity::ERROR),
1137 message: diagnostic.message.clone(),
1138 group_id,
1139 is_primary: true,
1140 is_valid: true,
1141 is_disk_based,
1142 },
1143 });
1144 if let Some(infos) = &diagnostic.related_information {
1145 for info in infos {
1146 if info.location.uri == params.uri && !info.message.is_empty() {
1147 let range = range_from_lsp(info.location.range);
1148 diagnostics.push(DiagnosticEntry {
1149 range,
1150 diagnostic: Diagnostic {
1151 code: code.clone(),
1152 severity: DiagnosticSeverity::INFORMATION,
1153 message: info.message.clone(),
1154 group_id,
1155 is_primary: false,
1156 is_valid: true,
1157 is_disk_based,
1158 },
1159 });
1160 }
1161 }
1162 }
1163 }
1164 }
1165
1166 for entry in &mut diagnostics {
1167 let diagnostic = &mut entry.diagnostic;
1168 if !diagnostic.is_primary {
1169 let source = *sources_by_group_id.get(&diagnostic.group_id).unwrap();
1170 if let Some(&severity) = supporting_diagnostic_severities.get(&(
1171 source,
1172 diagnostic.code.clone(),
1173 entry.range.clone(),
1174 )) {
1175 diagnostic.severity = severity;
1176 }
1177 }
1178 }
1179
1180 self.update_diagnostic_entries(abs_path, params.version, diagnostics, cx)?;
1181 Ok(())
1182 }
1183
1184 pub fn update_diagnostic_entries(
1185 &mut self,
1186 abs_path: PathBuf,
1187 version: Option<i32>,
1188 diagnostics: Vec<DiagnosticEntry<PointUtf16>>,
1189 cx: &mut ModelContext<Project>,
1190 ) -> Result<(), anyhow::Error> {
1191 let (worktree, relative_path) = self
1192 .find_local_worktree(&abs_path, cx)
1193 .ok_or_else(|| anyhow!("no worktree found for diagnostics"))?;
1194 let project_path = ProjectPath {
1195 worktree_id: worktree.read(cx).id(),
1196 path: relative_path.into(),
1197 };
1198
1199 for buffer in self.opened_buffers.values() {
1200 if let Some(buffer) = buffer.upgrade(cx) {
1201 if buffer
1202 .read(cx)
1203 .file()
1204 .map_or(false, |file| *file.path() == project_path.path)
1205 {
1206 buffer.update(cx, |buffer, cx| {
1207 buffer.update_diagnostics(diagnostics.clone(), version, cx)
1208 })?;
1209 break;
1210 }
1211 }
1212 }
1213 worktree.update(cx, |worktree, cx| {
1214 worktree
1215 .as_local_mut()
1216 .ok_or_else(|| anyhow!("not a local worktree"))?
1217 .update_diagnostics(project_path.path.clone(), diagnostics, cx)
1218 })?;
1219 cx.emit(Event::DiagnosticsUpdated(project_path));
1220 Ok(())
1221 }
1222
1223 pub fn format(
1224 &self,
1225 buffers: HashSet<ModelHandle<Buffer>>,
1226 push_to_history: bool,
1227 cx: &mut ModelContext<Project>,
1228 ) -> Task<Result<ProjectTransaction>> {
1229 let mut local_buffers = Vec::new();
1230 let mut remote_buffers = None;
1231 for buffer_handle in buffers {
1232 let buffer = buffer_handle.read(cx);
1233 let worktree;
1234 if let Some(file) = File::from_dyn(buffer.file()) {
1235 worktree = file.worktree.clone();
1236 if let Some(buffer_abs_path) = file.as_local().map(|f| f.abs_path(cx)) {
1237 let lang_server;
1238 if let Some(lang) = buffer.language() {
1239 if let Some(server) = self
1240 .language_servers
1241 .get(&(worktree.read(cx).id(), lang.name().to_string()))
1242 {
1243 lang_server = server.clone();
1244 } else {
1245 return Task::ready(Ok(Default::default()));
1246 };
1247 } else {
1248 return Task::ready(Ok(Default::default()));
1249 }
1250
1251 local_buffers.push((buffer_handle, buffer_abs_path, lang_server));
1252 } else {
1253 remote_buffers.get_or_insert(Vec::new()).push(buffer_handle);
1254 }
1255 } else {
1256 return Task::ready(Ok(Default::default()));
1257 }
1258 }
1259
1260 let remote_buffers = self.remote_id().zip(remote_buffers);
1261 let client = self.client.clone();
1262
1263 cx.spawn(|this, mut cx| async move {
1264 let mut project_transaction = ProjectTransaction::default();
1265
1266 if let Some((project_id, remote_buffers)) = remote_buffers {
1267 let response = client
1268 .request(proto::FormatBuffers {
1269 project_id,
1270 buffer_ids: remote_buffers
1271 .iter()
1272 .map(|buffer| buffer.read_with(&cx, |buffer, _| buffer.remote_id()))
1273 .collect(),
1274 })
1275 .await?
1276 .transaction
1277 .ok_or_else(|| anyhow!("missing transaction"))?;
1278 project_transaction = this
1279 .update(&mut cx, |this, cx| {
1280 this.deserialize_project_transaction(response, push_to_history, cx)
1281 })
1282 .await?;
1283 }
1284
1285 for (buffer, buffer_abs_path, lang_server) in local_buffers {
1286 let lsp_edits = lang_server
1287 .request::<lsp::request::Formatting>(lsp::DocumentFormattingParams {
1288 text_document: lsp::TextDocumentIdentifier::new(
1289 lsp::Url::from_file_path(&buffer_abs_path).unwrap(),
1290 ),
1291 options: Default::default(),
1292 work_done_progress_params: Default::default(),
1293 })
1294 .await?;
1295
1296 if let Some(lsp_edits) = lsp_edits {
1297 let edits = buffer
1298 .update(&mut cx, |buffer, cx| {
1299 buffer.edits_from_lsp(lsp_edits, None, cx)
1300 })
1301 .await?;
1302 buffer.update(&mut cx, |buffer, cx| {
1303 buffer.finalize_last_transaction();
1304 buffer.start_transaction();
1305 for (range, text) in edits {
1306 buffer.edit([range], text, cx);
1307 }
1308 if buffer.end_transaction(cx).is_some() {
1309 let transaction = buffer.finalize_last_transaction().unwrap().clone();
1310 if !push_to_history {
1311 buffer.forget_transaction(transaction.id);
1312 }
1313 project_transaction.0.insert(cx.handle(), transaction);
1314 }
1315 });
1316 }
1317 }
1318
1319 Ok(project_transaction)
1320 })
1321 }
1322
1323 pub fn definition<T: ToPointUtf16>(
1324 &self,
1325 buffer: &ModelHandle<Buffer>,
1326 position: T,
1327 cx: &mut ModelContext<Self>,
1328 ) -> Task<Result<Vec<Location>>> {
1329 let position = position.to_point_utf16(buffer.read(cx));
1330 self.request_lsp(buffer.clone(), GetDefinition { position }, cx)
1331 }
1332
1333 pub fn references<T: ToPointUtf16>(
1334 &self,
1335 buffer: &ModelHandle<Buffer>,
1336 position: T,
1337 cx: &mut ModelContext<Self>,
1338 ) -> Task<Result<Vec<Location>>> {
1339 let position = position.to_point_utf16(buffer.read(cx));
1340 self.request_lsp(buffer.clone(), GetReferences { position }, cx)
1341 }
1342
1343 pub fn document_highlights<T: ToPointUtf16>(
1344 &self,
1345 buffer: &ModelHandle<Buffer>,
1346 position: T,
1347 cx: &mut ModelContext<Self>,
1348 ) -> Task<Result<Vec<DocumentHighlight>>> {
1349 let position = position.to_point_utf16(buffer.read(cx));
1350 self.request_lsp(buffer.clone(), GetDocumentHighlights { position }, cx)
1351 }
1352
1353 pub fn symbols(&self, query: &str, cx: &mut ModelContext<Self>) -> Task<Result<Vec<Symbol>>> {
1354 if self.is_local() {
1355 let mut language_servers = HashMap::default();
1356 for ((worktree_id, language_name), language_server) in self.language_servers.iter() {
1357 if let Some((worktree, language)) = self
1358 .worktree_for_id(*worktree_id, cx)
1359 .and_then(|worktree| worktree.read(cx).as_local())
1360 .zip(self.languages.get_language(language_name))
1361 {
1362 language_servers
1363 .entry(Arc::as_ptr(language_server))
1364 .or_insert((
1365 language_server.clone(),
1366 *worktree_id,
1367 worktree.abs_path().clone(),
1368 language.clone(),
1369 ));
1370 }
1371 }
1372
1373 let mut requests = Vec::new();
1374 for (language_server, _, _, _) in language_servers.values() {
1375 requests.push(language_server.request::<lsp::request::WorkspaceSymbol>(
1376 lsp::WorkspaceSymbolParams {
1377 query: query.to_string(),
1378 ..Default::default()
1379 },
1380 ));
1381 }
1382
1383 cx.spawn_weak(|this, cx| async move {
1384 let responses = futures::future::try_join_all(requests).await?;
1385
1386 let mut symbols = Vec::new();
1387 if let Some(this) = this.upgrade(&cx) {
1388 this.read_with(&cx, |this, cx| {
1389 for ((_, source_worktree_id, worktree_abs_path, language), lsp_symbols) in
1390 language_servers.into_values().zip(responses)
1391 {
1392 symbols.extend(lsp_symbols.into_iter().flatten().filter_map(
1393 |lsp_symbol| {
1394 let abs_path = lsp_symbol.location.uri.to_file_path().ok()?;
1395 let mut worktree_id = source_worktree_id;
1396 let path;
1397 if let Some((worktree, rel_path)) =
1398 this.find_local_worktree(&abs_path, cx)
1399 {
1400 worktree_id = worktree.read(cx).id();
1401 path = rel_path;
1402 } else {
1403 path = relativize_path(&worktree_abs_path, &abs_path);
1404 }
1405
1406 let label = language
1407 .label_for_symbol(&lsp_symbol.name, lsp_symbol.kind)
1408 .unwrap_or_else(|| {
1409 CodeLabel::plain(lsp_symbol.name.clone(), None)
1410 });
1411 let signature = this.symbol_signature(worktree_id, &path);
1412
1413 Some(Symbol {
1414 source_worktree_id,
1415 worktree_id,
1416 language_name: language.name().to_string(),
1417 name: lsp_symbol.name,
1418 kind: lsp_symbol.kind,
1419 label,
1420 path,
1421 range: range_from_lsp(lsp_symbol.location.range),
1422 signature,
1423 })
1424 },
1425 ));
1426 }
1427 })
1428 }
1429
1430 Ok(symbols)
1431 })
1432 } else if let Some(project_id) = self.remote_id() {
1433 let request = self.client.request(proto::GetProjectSymbols {
1434 project_id,
1435 query: query.to_string(),
1436 });
1437 cx.spawn_weak(|this, cx| async move {
1438 let response = request.await?;
1439 let mut symbols = Vec::new();
1440 if let Some(this) = this.upgrade(&cx) {
1441 this.read_with(&cx, |this, _| {
1442 symbols.extend(
1443 response
1444 .symbols
1445 .into_iter()
1446 .filter_map(|symbol| this.deserialize_symbol(symbol).log_err()),
1447 );
1448 })
1449 }
1450 Ok(symbols)
1451 })
1452 } else {
1453 Task::ready(Ok(Default::default()))
1454 }
1455 }
1456
1457 pub fn open_buffer_for_symbol(
1458 &mut self,
1459 symbol: &Symbol,
1460 cx: &mut ModelContext<Self>,
1461 ) -> Task<Result<ModelHandle<Buffer>>> {
1462 if self.is_local() {
1463 let language_server = if let Some(server) = self
1464 .language_servers
1465 .get(&(symbol.source_worktree_id, symbol.language_name.clone()))
1466 {
1467 server.clone()
1468 } else {
1469 return Task::ready(Err(anyhow!(
1470 "language server for worktree and language not found"
1471 )));
1472 };
1473
1474 let worktree_abs_path = if let Some(worktree_abs_path) = self
1475 .worktree_for_id(symbol.worktree_id, cx)
1476 .and_then(|worktree| worktree.read(cx).as_local())
1477 .map(|local_worktree| local_worktree.abs_path())
1478 {
1479 worktree_abs_path
1480 } else {
1481 return Task::ready(Err(anyhow!("worktree not found for symbol")));
1482 };
1483 let symbol_abs_path = worktree_abs_path.join(&symbol.path);
1484 let symbol_uri = if let Ok(uri) = lsp::Url::from_file_path(symbol_abs_path) {
1485 uri
1486 } else {
1487 return Task::ready(Err(anyhow!("invalid symbol path")));
1488 };
1489
1490 self.open_local_buffer_via_lsp(
1491 symbol_uri,
1492 symbol.language_name.clone(),
1493 language_server,
1494 cx,
1495 )
1496 } else if let Some(project_id) = self.remote_id() {
1497 let request = self.client.request(proto::OpenBufferForSymbol {
1498 project_id,
1499 symbol: Some(serialize_symbol(symbol)),
1500 });
1501 cx.spawn(|this, mut cx| async move {
1502 let response = request.await?;
1503 let buffer = response.buffer.ok_or_else(|| anyhow!("invalid buffer"))?;
1504 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
1505 .await
1506 })
1507 } else {
1508 Task::ready(Err(anyhow!("project does not have a remote id")))
1509 }
1510 }
1511
1512 pub fn completions<T: ToPointUtf16>(
1513 &self,
1514 source_buffer_handle: &ModelHandle<Buffer>,
1515 position: T,
1516 cx: &mut ModelContext<Self>,
1517 ) -> Task<Result<Vec<Completion>>> {
1518 let source_buffer_handle = source_buffer_handle.clone();
1519 let source_buffer = source_buffer_handle.read(cx);
1520 let buffer_id = source_buffer.remote_id();
1521 let language = source_buffer.language().cloned();
1522 let worktree;
1523 let buffer_abs_path;
1524 if let Some(file) = File::from_dyn(source_buffer.file()) {
1525 worktree = file.worktree.clone();
1526 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
1527 } else {
1528 return Task::ready(Ok(Default::default()));
1529 };
1530
1531 let position = position.to_point_utf16(source_buffer);
1532 let anchor = source_buffer.anchor_after(position);
1533
1534 if worktree.read(cx).as_local().is_some() {
1535 let buffer_abs_path = buffer_abs_path.unwrap();
1536 let lang_server = if let Some(server) = source_buffer.language_server().cloned() {
1537 server
1538 } else {
1539 return Task::ready(Ok(Default::default()));
1540 };
1541
1542 cx.spawn(|_, cx| async move {
1543 let completions = lang_server
1544 .request::<lsp::request::Completion>(lsp::CompletionParams {
1545 text_document_position: lsp::TextDocumentPositionParams::new(
1546 lsp::TextDocumentIdentifier::new(
1547 lsp::Url::from_file_path(buffer_abs_path).unwrap(),
1548 ),
1549 position.to_lsp_position(),
1550 ),
1551 context: Default::default(),
1552 work_done_progress_params: Default::default(),
1553 partial_result_params: Default::default(),
1554 })
1555 .await
1556 .context("lsp completion request failed")?;
1557
1558 let completions = if let Some(completions) = completions {
1559 match completions {
1560 lsp::CompletionResponse::Array(completions) => completions,
1561 lsp::CompletionResponse::List(list) => list.items,
1562 }
1563 } else {
1564 Default::default()
1565 };
1566
1567 source_buffer_handle.read_with(&cx, |this, _| {
1568 Ok(completions
1569 .into_iter()
1570 .filter_map(|lsp_completion| {
1571 let (old_range, new_text) = match lsp_completion.text_edit.as_ref()? {
1572 lsp::CompletionTextEdit::Edit(edit) => {
1573 (range_from_lsp(edit.range), edit.new_text.clone())
1574 }
1575 lsp::CompletionTextEdit::InsertAndReplace(_) => {
1576 log::info!("unsupported insert/replace completion");
1577 return None;
1578 }
1579 };
1580
1581 let clipped_start = this.clip_point_utf16(old_range.start, Bias::Left);
1582 let clipped_end = this.clip_point_utf16(old_range.end, Bias::Left);
1583 if clipped_start == old_range.start && clipped_end == old_range.end {
1584 Some(Completion {
1585 old_range: this.anchor_before(old_range.start)
1586 ..this.anchor_after(old_range.end),
1587 new_text,
1588 label: language
1589 .as_ref()
1590 .and_then(|l| l.label_for_completion(&lsp_completion))
1591 .unwrap_or_else(|| {
1592 CodeLabel::plain(
1593 lsp_completion.label.clone(),
1594 lsp_completion.filter_text.as_deref(),
1595 )
1596 }),
1597 lsp_completion,
1598 })
1599 } else {
1600 None
1601 }
1602 })
1603 .collect())
1604 })
1605 })
1606 } else if let Some(project_id) = self.remote_id() {
1607 let rpc = self.client.clone();
1608 let message = proto::GetCompletions {
1609 project_id,
1610 buffer_id,
1611 position: Some(language::proto::serialize_anchor(&anchor)),
1612 version: (&source_buffer.version()).into(),
1613 };
1614 cx.spawn_weak(|_, mut cx| async move {
1615 let response = rpc.request(message).await?;
1616
1617 source_buffer_handle
1618 .update(&mut cx, |buffer, _| {
1619 buffer.wait_for_version(response.version.into())
1620 })
1621 .await;
1622
1623 response
1624 .completions
1625 .into_iter()
1626 .map(|completion| {
1627 language::proto::deserialize_completion(completion, language.as_ref())
1628 })
1629 .collect()
1630 })
1631 } else {
1632 Task::ready(Ok(Default::default()))
1633 }
1634 }
1635
1636 pub fn apply_additional_edits_for_completion(
1637 &self,
1638 buffer_handle: ModelHandle<Buffer>,
1639 completion: Completion,
1640 push_to_history: bool,
1641 cx: &mut ModelContext<Self>,
1642 ) -> Task<Result<Option<Transaction>>> {
1643 let buffer = buffer_handle.read(cx);
1644 let buffer_id = buffer.remote_id();
1645
1646 if self.is_local() {
1647 let lang_server = if let Some(language_server) = buffer.language_server() {
1648 language_server.clone()
1649 } else {
1650 return Task::ready(Err(anyhow!("buffer does not have a language server")));
1651 };
1652
1653 cx.spawn(|_, mut cx| async move {
1654 let resolved_completion = lang_server
1655 .request::<lsp::request::ResolveCompletionItem>(completion.lsp_completion)
1656 .await?;
1657 if let Some(edits) = resolved_completion.additional_text_edits {
1658 let edits = buffer_handle
1659 .update(&mut cx, |buffer, cx| buffer.edits_from_lsp(edits, None, cx))
1660 .await?;
1661 buffer_handle.update(&mut cx, |buffer, cx| {
1662 buffer.finalize_last_transaction();
1663 buffer.start_transaction();
1664 for (range, text) in edits {
1665 buffer.edit([range], text, cx);
1666 }
1667 let transaction = if buffer.end_transaction(cx).is_some() {
1668 let transaction = buffer.finalize_last_transaction().unwrap().clone();
1669 if !push_to_history {
1670 buffer.forget_transaction(transaction.id);
1671 }
1672 Some(transaction)
1673 } else {
1674 None
1675 };
1676 Ok(transaction)
1677 })
1678 } else {
1679 Ok(None)
1680 }
1681 })
1682 } else if let Some(project_id) = self.remote_id() {
1683 let client = self.client.clone();
1684 cx.spawn(|_, mut cx| async move {
1685 let response = client
1686 .request(proto::ApplyCompletionAdditionalEdits {
1687 project_id,
1688 buffer_id,
1689 completion: Some(language::proto::serialize_completion(&completion)),
1690 })
1691 .await?;
1692
1693 if let Some(transaction) = response.transaction {
1694 let transaction = language::proto::deserialize_transaction(transaction)?;
1695 buffer_handle
1696 .update(&mut cx, |buffer, _| {
1697 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
1698 })
1699 .await;
1700 if push_to_history {
1701 buffer_handle.update(&mut cx, |buffer, _| {
1702 buffer.push_transaction(transaction.clone(), Instant::now());
1703 });
1704 }
1705 Ok(Some(transaction))
1706 } else {
1707 Ok(None)
1708 }
1709 })
1710 } else {
1711 Task::ready(Err(anyhow!("project does not have a remote id")))
1712 }
1713 }
1714
1715 pub fn code_actions<T: ToOffset>(
1716 &self,
1717 buffer_handle: &ModelHandle<Buffer>,
1718 range: Range<T>,
1719 cx: &mut ModelContext<Self>,
1720 ) -> Task<Result<Vec<CodeAction>>> {
1721 let buffer_handle = buffer_handle.clone();
1722 let buffer = buffer_handle.read(cx);
1723 let buffer_id = buffer.remote_id();
1724 let worktree;
1725 let buffer_abs_path;
1726 if let Some(file) = File::from_dyn(buffer.file()) {
1727 worktree = file.worktree.clone();
1728 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
1729 } else {
1730 return Task::ready(Ok(Default::default()));
1731 };
1732 let range = buffer.anchor_before(range.start)..buffer.anchor_before(range.end);
1733
1734 if worktree.read(cx).as_local().is_some() {
1735 let buffer_abs_path = buffer_abs_path.unwrap();
1736 let lang_name;
1737 let lang_server;
1738 if let Some(lang) = buffer.language() {
1739 lang_name = lang.name().to_string();
1740 if let Some(server) = self
1741 .language_servers
1742 .get(&(worktree.read(cx).id(), lang_name.clone()))
1743 {
1744 lang_server = server.clone();
1745 } else {
1746 return Task::ready(Ok(Default::default()));
1747 };
1748 } else {
1749 return Task::ready(Ok(Default::default()));
1750 }
1751
1752 let lsp_range = lsp::Range::new(
1753 range.start.to_point_utf16(buffer).to_lsp_position(),
1754 range.end.to_point_utf16(buffer).to_lsp_position(),
1755 );
1756 cx.foreground().spawn(async move {
1757 Ok(lang_server
1758 .request::<lsp::request::CodeActionRequest>(lsp::CodeActionParams {
1759 text_document: lsp::TextDocumentIdentifier::new(
1760 lsp::Url::from_file_path(buffer_abs_path).unwrap(),
1761 ),
1762 range: lsp_range,
1763 work_done_progress_params: Default::default(),
1764 partial_result_params: Default::default(),
1765 context: lsp::CodeActionContext {
1766 diagnostics: Default::default(),
1767 only: Some(vec![
1768 lsp::CodeActionKind::QUICKFIX,
1769 lsp::CodeActionKind::REFACTOR,
1770 lsp::CodeActionKind::REFACTOR_EXTRACT,
1771 ]),
1772 },
1773 })
1774 .await?
1775 .unwrap_or_default()
1776 .into_iter()
1777 .filter_map(|entry| {
1778 if let lsp::CodeActionOrCommand::CodeAction(lsp_action) = entry {
1779 Some(CodeAction {
1780 range: range.clone(),
1781 lsp_action,
1782 })
1783 } else {
1784 None
1785 }
1786 })
1787 .collect())
1788 })
1789 } else if let Some(project_id) = self.remote_id() {
1790 let rpc = self.client.clone();
1791 cx.spawn_weak(|_, mut cx| async move {
1792 let response = rpc
1793 .request(proto::GetCodeActions {
1794 project_id,
1795 buffer_id,
1796 start: Some(language::proto::serialize_anchor(&range.start)),
1797 end: Some(language::proto::serialize_anchor(&range.end)),
1798 })
1799 .await?;
1800
1801 buffer_handle
1802 .update(&mut cx, |buffer, _| {
1803 buffer.wait_for_version(response.version.into())
1804 })
1805 .await;
1806
1807 response
1808 .actions
1809 .into_iter()
1810 .map(language::proto::deserialize_code_action)
1811 .collect()
1812 })
1813 } else {
1814 Task::ready(Ok(Default::default()))
1815 }
1816 }
1817
1818 pub fn apply_code_action(
1819 &self,
1820 buffer_handle: ModelHandle<Buffer>,
1821 mut action: CodeAction,
1822 push_to_history: bool,
1823 cx: &mut ModelContext<Self>,
1824 ) -> Task<Result<ProjectTransaction>> {
1825 if self.is_local() {
1826 let buffer = buffer_handle.read(cx);
1827 let lang_name = if let Some(lang) = buffer.language() {
1828 lang.name().to_string()
1829 } else {
1830 return Task::ready(Ok(Default::default()));
1831 };
1832 let lang_server = if let Some(language_server) = buffer.language_server() {
1833 language_server.clone()
1834 } else {
1835 return Task::ready(Err(anyhow!("buffer does not have a language server")));
1836 };
1837 let range = action.range.to_point_utf16(buffer);
1838
1839 cx.spawn(|this, mut cx| async move {
1840 if let Some(lsp_range) = action
1841 .lsp_action
1842 .data
1843 .as_mut()
1844 .and_then(|d| d.get_mut("codeActionParams"))
1845 .and_then(|d| d.get_mut("range"))
1846 {
1847 *lsp_range = serde_json::to_value(&lsp::Range::new(
1848 range.start.to_lsp_position(),
1849 range.end.to_lsp_position(),
1850 ))
1851 .unwrap();
1852 action.lsp_action = lang_server
1853 .request::<lsp::request::CodeActionResolveRequest>(action.lsp_action)
1854 .await?;
1855 } else {
1856 let actions = this
1857 .update(&mut cx, |this, cx| {
1858 this.code_actions(&buffer_handle, action.range, cx)
1859 })
1860 .await?;
1861 action.lsp_action = actions
1862 .into_iter()
1863 .find(|a| a.lsp_action.title == action.lsp_action.title)
1864 .ok_or_else(|| anyhow!("code action is outdated"))?
1865 .lsp_action;
1866 }
1867
1868 if let Some(edit) = action.lsp_action.edit {
1869 Self::deserialize_workspace_edit(
1870 this,
1871 edit,
1872 push_to_history,
1873 lang_name,
1874 lang_server,
1875 &mut cx,
1876 )
1877 .await
1878 } else {
1879 Ok(ProjectTransaction::default())
1880 }
1881 })
1882 } else if let Some(project_id) = self.remote_id() {
1883 let client = self.client.clone();
1884 let request = proto::ApplyCodeAction {
1885 project_id,
1886 buffer_id: buffer_handle.read(cx).remote_id(),
1887 action: Some(language::proto::serialize_code_action(&action)),
1888 };
1889 cx.spawn(|this, mut cx| async move {
1890 let response = client
1891 .request(request)
1892 .await?
1893 .transaction
1894 .ok_or_else(|| anyhow!("missing transaction"))?;
1895 this.update(&mut cx, |this, cx| {
1896 this.deserialize_project_transaction(response, push_to_history, cx)
1897 })
1898 .await
1899 })
1900 } else {
1901 Task::ready(Err(anyhow!("project does not have a remote id")))
1902 }
1903 }
1904
1905 async fn deserialize_workspace_edit(
1906 this: ModelHandle<Self>,
1907 edit: lsp::WorkspaceEdit,
1908 push_to_history: bool,
1909 language_name: String,
1910 language_server: Arc<LanguageServer>,
1911 cx: &mut AsyncAppContext,
1912 ) -> Result<ProjectTransaction> {
1913 let fs = this.read_with(cx, |this, _| this.fs.clone());
1914 let mut operations = Vec::new();
1915 if let Some(document_changes) = edit.document_changes {
1916 match document_changes {
1917 lsp::DocumentChanges::Edits(edits) => {
1918 operations.extend(edits.into_iter().map(lsp::DocumentChangeOperation::Edit))
1919 }
1920 lsp::DocumentChanges::Operations(ops) => operations = ops,
1921 }
1922 } else if let Some(changes) = edit.changes {
1923 operations.extend(changes.into_iter().map(|(uri, edits)| {
1924 lsp::DocumentChangeOperation::Edit(lsp::TextDocumentEdit {
1925 text_document: lsp::OptionalVersionedTextDocumentIdentifier {
1926 uri,
1927 version: None,
1928 },
1929 edits: edits.into_iter().map(lsp::OneOf::Left).collect(),
1930 })
1931 }));
1932 }
1933
1934 let mut project_transaction = ProjectTransaction::default();
1935 for operation in operations {
1936 match operation {
1937 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Create(op)) => {
1938 let abs_path = op
1939 .uri
1940 .to_file_path()
1941 .map_err(|_| anyhow!("can't convert URI to path"))?;
1942
1943 if let Some(parent_path) = abs_path.parent() {
1944 fs.create_dir(parent_path).await?;
1945 }
1946 if abs_path.ends_with("/") {
1947 fs.create_dir(&abs_path).await?;
1948 } else {
1949 fs.create_file(&abs_path, op.options.map(Into::into).unwrap_or_default())
1950 .await?;
1951 }
1952 }
1953 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Rename(op)) => {
1954 let source_abs_path = op
1955 .old_uri
1956 .to_file_path()
1957 .map_err(|_| anyhow!("can't convert URI to path"))?;
1958 let target_abs_path = op
1959 .new_uri
1960 .to_file_path()
1961 .map_err(|_| anyhow!("can't convert URI to path"))?;
1962 fs.rename(
1963 &source_abs_path,
1964 &target_abs_path,
1965 op.options.map(Into::into).unwrap_or_default(),
1966 )
1967 .await?;
1968 }
1969 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Delete(op)) => {
1970 let abs_path = op
1971 .uri
1972 .to_file_path()
1973 .map_err(|_| anyhow!("can't convert URI to path"))?;
1974 let options = op.options.map(Into::into).unwrap_or_default();
1975 if abs_path.ends_with("/") {
1976 fs.remove_dir(&abs_path, options).await?;
1977 } else {
1978 fs.remove_file(&abs_path, options).await?;
1979 }
1980 }
1981 lsp::DocumentChangeOperation::Edit(op) => {
1982 let buffer_to_edit = this
1983 .update(cx, |this, cx| {
1984 this.open_local_buffer_via_lsp(
1985 op.text_document.uri,
1986 language_name.clone(),
1987 language_server.clone(),
1988 cx,
1989 )
1990 })
1991 .await?;
1992
1993 let edits = buffer_to_edit
1994 .update(cx, |buffer, cx| {
1995 let edits = op.edits.into_iter().map(|edit| match edit {
1996 lsp::OneOf::Left(edit) => edit,
1997 lsp::OneOf::Right(edit) => edit.text_edit,
1998 });
1999 buffer.edits_from_lsp(edits, op.text_document.version, cx)
2000 })
2001 .await?;
2002
2003 let transaction = buffer_to_edit.update(cx, |buffer, cx| {
2004 buffer.finalize_last_transaction();
2005 buffer.start_transaction();
2006 for (range, text) in edits {
2007 buffer.edit([range], text, cx);
2008 }
2009 let transaction = if buffer.end_transaction(cx).is_some() {
2010 let transaction = buffer.finalize_last_transaction().unwrap().clone();
2011 if !push_to_history {
2012 buffer.forget_transaction(transaction.id);
2013 }
2014 Some(transaction)
2015 } else {
2016 None
2017 };
2018
2019 transaction
2020 });
2021 if let Some(transaction) = transaction {
2022 project_transaction.0.insert(buffer_to_edit, transaction);
2023 }
2024 }
2025 }
2026 }
2027
2028 Ok(project_transaction)
2029 }
2030
2031 pub fn prepare_rename<T: ToPointUtf16>(
2032 &self,
2033 buffer: ModelHandle<Buffer>,
2034 position: T,
2035 cx: &mut ModelContext<Self>,
2036 ) -> Task<Result<Option<Range<Anchor>>>> {
2037 let position = position.to_point_utf16(buffer.read(cx));
2038 self.request_lsp(buffer, PrepareRename { position }, cx)
2039 }
2040
2041 pub fn perform_rename<T: ToPointUtf16>(
2042 &self,
2043 buffer: ModelHandle<Buffer>,
2044 position: T,
2045 new_name: String,
2046 push_to_history: bool,
2047 cx: &mut ModelContext<Self>,
2048 ) -> Task<Result<ProjectTransaction>> {
2049 let position = position.to_point_utf16(buffer.read(cx));
2050 self.request_lsp(
2051 buffer,
2052 PerformRename {
2053 position,
2054 new_name,
2055 push_to_history,
2056 },
2057 cx,
2058 )
2059 }
2060
2061 pub fn search(
2062 &self,
2063 query: SearchQuery,
2064 cx: &mut ModelContext<Self>,
2065 ) -> Task<Result<HashMap<ModelHandle<Buffer>, Vec<Range<Anchor>>>>> {
2066 if self.is_local() {
2067 let snapshots = self
2068 .visible_worktrees(cx)
2069 .filter_map(|tree| {
2070 let tree = tree.read(cx).as_local()?;
2071 Some(tree.snapshot())
2072 })
2073 .collect::<Vec<_>>();
2074
2075 let background = cx.background().clone();
2076 let path_count: usize = snapshots.iter().map(|s| s.visible_file_count()).sum();
2077 if path_count == 0 {
2078 return Task::ready(Ok(Default::default()));
2079 }
2080 let workers = background.num_cpus().min(path_count);
2081 let (matching_paths_tx, mut matching_paths_rx) = smol::channel::bounded(1024);
2082 cx.background()
2083 .spawn({
2084 let fs = self.fs.clone();
2085 let background = cx.background().clone();
2086 let query = query.clone();
2087 async move {
2088 let fs = &fs;
2089 let query = &query;
2090 let matching_paths_tx = &matching_paths_tx;
2091 let paths_per_worker = (path_count + workers - 1) / workers;
2092 let snapshots = &snapshots;
2093 background
2094 .scoped(|scope| {
2095 for worker_ix in 0..workers {
2096 let worker_start_ix = worker_ix * paths_per_worker;
2097 let worker_end_ix = worker_start_ix + paths_per_worker;
2098 scope.spawn(async move {
2099 let mut snapshot_start_ix = 0;
2100 let mut abs_path = PathBuf::new();
2101 for snapshot in snapshots {
2102 let snapshot_end_ix =
2103 snapshot_start_ix + snapshot.visible_file_count();
2104 if worker_end_ix <= snapshot_start_ix {
2105 break;
2106 } else if worker_start_ix > snapshot_end_ix {
2107 snapshot_start_ix = snapshot_end_ix;
2108 continue;
2109 } else {
2110 let start_in_snapshot = worker_start_ix
2111 .saturating_sub(snapshot_start_ix);
2112 let end_in_snapshot =
2113 cmp::min(worker_end_ix, snapshot_end_ix)
2114 - snapshot_start_ix;
2115
2116 for entry in snapshot
2117 .files(false, start_in_snapshot)
2118 .take(end_in_snapshot - start_in_snapshot)
2119 {
2120 if matching_paths_tx.is_closed() {
2121 break;
2122 }
2123
2124 abs_path.clear();
2125 abs_path.push(&snapshot.abs_path());
2126 abs_path.push(&entry.path);
2127 let matches = if let Some(file) =
2128 fs.open_sync(&abs_path).await.log_err()
2129 {
2130 query.detect(file).unwrap_or(false)
2131 } else {
2132 false
2133 };
2134
2135 if matches {
2136 let project_path =
2137 (snapshot.id(), entry.path.clone());
2138 if matching_paths_tx
2139 .send(project_path)
2140 .await
2141 .is_err()
2142 {
2143 break;
2144 }
2145 }
2146 }
2147
2148 snapshot_start_ix = snapshot_end_ix;
2149 }
2150 }
2151 });
2152 }
2153 })
2154 .await;
2155 }
2156 })
2157 .detach();
2158
2159 let (buffers_tx, buffers_rx) = smol::channel::bounded(1024);
2160 let open_buffers = self
2161 .opened_buffers
2162 .values()
2163 .filter_map(|b| b.upgrade(cx))
2164 .collect::<HashSet<_>>();
2165 cx.spawn(|this, cx| async move {
2166 for buffer in &open_buffers {
2167 let snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
2168 buffers_tx.send((buffer.clone(), snapshot)).await?;
2169 }
2170
2171 let open_buffers = Rc::new(RefCell::new(open_buffers));
2172 while let Some(project_path) = matching_paths_rx.next().await {
2173 if buffers_tx.is_closed() {
2174 break;
2175 }
2176
2177 let this = this.clone();
2178 let open_buffers = open_buffers.clone();
2179 let buffers_tx = buffers_tx.clone();
2180 cx.spawn(|mut cx| async move {
2181 if let Some(buffer) = this
2182 .update(&mut cx, |this, cx| this.open_buffer(project_path, cx))
2183 .await
2184 .log_err()
2185 {
2186 if open_buffers.borrow_mut().insert(buffer.clone()) {
2187 let snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
2188 buffers_tx.send((buffer, snapshot)).await?;
2189 }
2190 }
2191
2192 Ok::<_, anyhow::Error>(())
2193 })
2194 .detach();
2195 }
2196
2197 Ok::<_, anyhow::Error>(())
2198 })
2199 .detach_and_log_err(cx);
2200
2201 let background = cx.background().clone();
2202 cx.background().spawn(async move {
2203 let query = &query;
2204 let mut matched_buffers = Vec::new();
2205 for _ in 0..workers {
2206 matched_buffers.push(HashMap::default());
2207 }
2208 background
2209 .scoped(|scope| {
2210 for worker_matched_buffers in matched_buffers.iter_mut() {
2211 let mut buffers_rx = buffers_rx.clone();
2212 scope.spawn(async move {
2213 while let Some((buffer, snapshot)) = buffers_rx.next().await {
2214 let buffer_matches = query
2215 .search(snapshot.as_rope())
2216 .await
2217 .iter()
2218 .map(|range| {
2219 snapshot.anchor_before(range.start)
2220 ..snapshot.anchor_after(range.end)
2221 })
2222 .collect::<Vec<_>>();
2223 if !buffer_matches.is_empty() {
2224 worker_matched_buffers
2225 .insert(buffer.clone(), buffer_matches);
2226 }
2227 }
2228 });
2229 }
2230 })
2231 .await;
2232 Ok(matched_buffers.into_iter().flatten().collect())
2233 })
2234 } else if let Some(project_id) = self.remote_id() {
2235 let request = self.client.request(query.to_proto(project_id));
2236 cx.spawn(|this, mut cx| async move {
2237 let response = request.await?;
2238 let mut result = HashMap::default();
2239 for location in response.locations {
2240 let buffer = location.buffer.ok_or_else(|| anyhow!("missing buffer"))?;
2241 let target_buffer = this
2242 .update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
2243 .await?;
2244 let start = location
2245 .start
2246 .and_then(deserialize_anchor)
2247 .ok_or_else(|| anyhow!("missing target start"))?;
2248 let end = location
2249 .end
2250 .and_then(deserialize_anchor)
2251 .ok_or_else(|| anyhow!("missing target end"))?;
2252 result
2253 .entry(target_buffer)
2254 .or_insert(Vec::new())
2255 .push(start..end)
2256 }
2257 Ok(result)
2258 })
2259 } else {
2260 Task::ready(Ok(Default::default()))
2261 }
2262 }
2263
2264 fn request_lsp<R: LspCommand>(
2265 &self,
2266 buffer_handle: ModelHandle<Buffer>,
2267 request: R,
2268 cx: &mut ModelContext<Self>,
2269 ) -> Task<Result<R::Response>>
2270 where
2271 <R::LspRequest as lsp::request::Request>::Result: Send,
2272 {
2273 let buffer = buffer_handle.read(cx);
2274 if self.is_local() {
2275 let file = File::from_dyn(buffer.file()).and_then(File::as_local);
2276 if let Some((file, language_server)) = file.zip(buffer.language_server().cloned()) {
2277 let lsp_params = request.to_lsp(&file.abs_path(cx), cx);
2278 return cx.spawn(|this, cx| async move {
2279 let response = language_server
2280 .request::<R::LspRequest>(lsp_params)
2281 .await
2282 .context("lsp request failed")?;
2283 request
2284 .response_from_lsp(response, this, buffer_handle, cx)
2285 .await
2286 });
2287 }
2288 } else if let Some(project_id) = self.remote_id() {
2289 let rpc = self.client.clone();
2290 let message = request.to_proto(project_id, buffer);
2291 return cx.spawn(|this, cx| async move {
2292 let response = rpc.request(message).await?;
2293 request
2294 .response_from_proto(response, this, buffer_handle, cx)
2295 .await
2296 });
2297 }
2298 Task::ready(Ok(Default::default()))
2299 }
2300
2301 pub fn find_or_create_local_worktree(
2302 &self,
2303 abs_path: impl AsRef<Path>,
2304 visible: bool,
2305 cx: &mut ModelContext<Self>,
2306 ) -> Task<Result<(ModelHandle<Worktree>, PathBuf)>> {
2307 let abs_path = abs_path.as_ref();
2308 if let Some((tree, relative_path)) = self.find_local_worktree(abs_path, cx) {
2309 Task::ready(Ok((tree.clone(), relative_path.into())))
2310 } else {
2311 let worktree = self.create_local_worktree(abs_path, visible, cx);
2312 cx.foreground()
2313 .spawn(async move { Ok((worktree.await?, PathBuf::new())) })
2314 }
2315 }
2316
2317 pub fn find_local_worktree(
2318 &self,
2319 abs_path: &Path,
2320 cx: &AppContext,
2321 ) -> Option<(ModelHandle<Worktree>, PathBuf)> {
2322 for tree in self.worktrees(cx) {
2323 if let Some(relative_path) = tree
2324 .read(cx)
2325 .as_local()
2326 .and_then(|t| abs_path.strip_prefix(t.abs_path()).ok())
2327 {
2328 return Some((tree.clone(), relative_path.into()));
2329 }
2330 }
2331 None
2332 }
2333
2334 pub fn is_shared(&self) -> bool {
2335 match &self.client_state {
2336 ProjectClientState::Local { is_shared, .. } => *is_shared,
2337 ProjectClientState::Remote { .. } => false,
2338 }
2339 }
2340
2341 fn create_local_worktree(
2342 &self,
2343 abs_path: impl AsRef<Path>,
2344 visible: bool,
2345 cx: &mut ModelContext<Self>,
2346 ) -> Task<Result<ModelHandle<Worktree>>> {
2347 let fs = self.fs.clone();
2348 let client = self.client.clone();
2349 let path = Arc::from(abs_path.as_ref());
2350 cx.spawn(|project, mut cx| async move {
2351 let worktree = Worktree::local(client.clone(), path, visible, fs, &mut cx).await?;
2352
2353 let (remote_project_id, is_shared) = project.update(&mut cx, |project, cx| {
2354 project.add_worktree(&worktree, cx);
2355 (project.remote_id(), project.is_shared())
2356 });
2357
2358 if let Some(project_id) = remote_project_id {
2359 worktree
2360 .update(&mut cx, |worktree, cx| {
2361 worktree.as_local_mut().unwrap().register(project_id, cx)
2362 })
2363 .await?;
2364 if is_shared {
2365 worktree
2366 .update(&mut cx, |worktree, cx| {
2367 worktree.as_local_mut().unwrap().share(project_id, cx)
2368 })
2369 .await?;
2370 }
2371 }
2372
2373 Ok(worktree)
2374 })
2375 }
2376
2377 pub fn remove_worktree(&mut self, id: WorktreeId, cx: &mut ModelContext<Self>) {
2378 self.worktrees.retain(|worktree| {
2379 worktree
2380 .upgrade(cx)
2381 .map_or(false, |w| w.read(cx).id() != id)
2382 });
2383 cx.notify();
2384 }
2385
2386 fn add_worktree(&mut self, worktree: &ModelHandle<Worktree>, cx: &mut ModelContext<Self>) {
2387 cx.observe(&worktree, |_, _, cx| cx.notify()).detach();
2388 if worktree.read(cx).is_local() {
2389 cx.subscribe(&worktree, |this, worktree, _, cx| {
2390 this.update_local_worktree_buffers(worktree, cx);
2391 })
2392 .detach();
2393 }
2394
2395 let push_strong_handle = {
2396 let worktree = worktree.read(cx);
2397 self.is_shared() || worktree.is_visible() || worktree.is_remote()
2398 };
2399 if push_strong_handle {
2400 self.worktrees
2401 .push(WorktreeHandle::Strong(worktree.clone()));
2402 } else {
2403 cx.observe_release(&worktree, |this, cx| {
2404 this.worktrees
2405 .retain(|worktree| worktree.upgrade(cx).is_some());
2406 cx.notify();
2407 })
2408 .detach();
2409 self.worktrees
2410 .push(WorktreeHandle::Weak(worktree.downgrade()));
2411 }
2412 cx.notify();
2413 }
2414
2415 fn update_local_worktree_buffers(
2416 &mut self,
2417 worktree_handle: ModelHandle<Worktree>,
2418 cx: &mut ModelContext<Self>,
2419 ) {
2420 let snapshot = worktree_handle.read(cx).snapshot();
2421 let mut buffers_to_delete = Vec::new();
2422 for (buffer_id, buffer) in &self.opened_buffers {
2423 if let Some(buffer) = buffer.upgrade(cx) {
2424 buffer.update(cx, |buffer, cx| {
2425 if let Some(old_file) = File::from_dyn(buffer.file()) {
2426 if old_file.worktree != worktree_handle {
2427 return;
2428 }
2429
2430 let new_file = if let Some(entry) = old_file
2431 .entry_id
2432 .and_then(|entry_id| snapshot.entry_for_id(entry_id))
2433 {
2434 File {
2435 is_local: true,
2436 entry_id: Some(entry.id),
2437 mtime: entry.mtime,
2438 path: entry.path.clone(),
2439 worktree: worktree_handle.clone(),
2440 }
2441 } else if let Some(entry) =
2442 snapshot.entry_for_path(old_file.path().as_ref())
2443 {
2444 File {
2445 is_local: true,
2446 entry_id: Some(entry.id),
2447 mtime: entry.mtime,
2448 path: entry.path.clone(),
2449 worktree: worktree_handle.clone(),
2450 }
2451 } else {
2452 File {
2453 is_local: true,
2454 entry_id: None,
2455 path: old_file.path().clone(),
2456 mtime: old_file.mtime(),
2457 worktree: worktree_handle.clone(),
2458 }
2459 };
2460
2461 if let Some(project_id) = self.remote_id() {
2462 self.client
2463 .send(proto::UpdateBufferFile {
2464 project_id,
2465 buffer_id: *buffer_id as u64,
2466 file: Some(new_file.to_proto()),
2467 })
2468 .log_err();
2469 }
2470 buffer.file_updated(Box::new(new_file), cx).detach();
2471 }
2472 });
2473 } else {
2474 buffers_to_delete.push(*buffer_id);
2475 }
2476 }
2477
2478 for buffer_id in buffers_to_delete {
2479 self.opened_buffers.remove(&buffer_id);
2480 }
2481 }
2482
2483 pub fn set_active_path(&mut self, entry: Option<ProjectPath>, cx: &mut ModelContext<Self>) {
2484 let new_active_entry = entry.and_then(|project_path| {
2485 let worktree = self.worktree_for_id(project_path.worktree_id, cx)?;
2486 let entry = worktree.read(cx).entry_for_path(project_path.path)?;
2487 Some(ProjectEntry {
2488 worktree_id: project_path.worktree_id,
2489 entry_id: entry.id,
2490 })
2491 });
2492 if new_active_entry != self.active_entry {
2493 self.active_entry = new_active_entry;
2494 cx.emit(Event::ActiveEntryChanged(new_active_entry));
2495 }
2496 }
2497
2498 pub fn is_running_disk_based_diagnostics(&self) -> bool {
2499 self.language_servers_with_diagnostics_running > 0
2500 }
2501
2502 pub fn diagnostic_summary(&self, cx: &AppContext) -> DiagnosticSummary {
2503 let mut summary = DiagnosticSummary::default();
2504 for (_, path_summary) in self.diagnostic_summaries(cx) {
2505 summary.error_count += path_summary.error_count;
2506 summary.warning_count += path_summary.warning_count;
2507 summary.info_count += path_summary.info_count;
2508 summary.hint_count += path_summary.hint_count;
2509 }
2510 summary
2511 }
2512
2513 pub fn diagnostic_summaries<'a>(
2514 &'a self,
2515 cx: &'a AppContext,
2516 ) -> impl Iterator<Item = (ProjectPath, DiagnosticSummary)> + 'a {
2517 self.worktrees(cx).flat_map(move |worktree| {
2518 let worktree = worktree.read(cx);
2519 let worktree_id = worktree.id();
2520 worktree
2521 .diagnostic_summaries()
2522 .map(move |(path, summary)| (ProjectPath { worktree_id, path }, summary))
2523 })
2524 }
2525
2526 pub fn disk_based_diagnostics_started(&mut self, cx: &mut ModelContext<Self>) {
2527 self.language_servers_with_diagnostics_running += 1;
2528 if self.language_servers_with_diagnostics_running == 1 {
2529 cx.emit(Event::DiskBasedDiagnosticsStarted);
2530 }
2531 }
2532
2533 pub fn disk_based_diagnostics_finished(&mut self, cx: &mut ModelContext<Self>) {
2534 cx.emit(Event::DiskBasedDiagnosticsUpdated);
2535 self.language_servers_with_diagnostics_running -= 1;
2536 if self.language_servers_with_diagnostics_running == 0 {
2537 cx.emit(Event::DiskBasedDiagnosticsFinished);
2538 }
2539 }
2540
2541 pub fn active_entry(&self) -> Option<ProjectEntry> {
2542 self.active_entry
2543 }
2544
2545 // RPC message handlers
2546
2547 async fn handle_unshare_project(
2548 this: ModelHandle<Self>,
2549 _: TypedEnvelope<proto::UnshareProject>,
2550 _: Arc<Client>,
2551 mut cx: AsyncAppContext,
2552 ) -> Result<()> {
2553 this.update(&mut cx, |this, cx| {
2554 if let ProjectClientState::Remote {
2555 sharing_has_stopped,
2556 ..
2557 } = &mut this.client_state
2558 {
2559 *sharing_has_stopped = true;
2560 this.collaborators.clear();
2561 cx.notify();
2562 } else {
2563 unreachable!()
2564 }
2565 });
2566
2567 Ok(())
2568 }
2569
2570 async fn handle_add_collaborator(
2571 this: ModelHandle<Self>,
2572 mut envelope: TypedEnvelope<proto::AddProjectCollaborator>,
2573 _: Arc<Client>,
2574 mut cx: AsyncAppContext,
2575 ) -> Result<()> {
2576 let user_store = this.read_with(&cx, |this, _| this.user_store.clone());
2577 let collaborator = envelope
2578 .payload
2579 .collaborator
2580 .take()
2581 .ok_or_else(|| anyhow!("empty collaborator"))?;
2582
2583 let collaborator = Collaborator::from_proto(collaborator, &user_store, &mut cx).await?;
2584 this.update(&mut cx, |this, cx| {
2585 this.collaborators
2586 .insert(collaborator.peer_id, collaborator);
2587 cx.notify();
2588 });
2589
2590 Ok(())
2591 }
2592
2593 async fn handle_remove_collaborator(
2594 this: ModelHandle<Self>,
2595 envelope: TypedEnvelope<proto::RemoveProjectCollaborator>,
2596 _: Arc<Client>,
2597 mut cx: AsyncAppContext,
2598 ) -> Result<()> {
2599 this.update(&mut cx, |this, cx| {
2600 let peer_id = PeerId(envelope.payload.peer_id);
2601 let replica_id = this
2602 .collaborators
2603 .remove(&peer_id)
2604 .ok_or_else(|| anyhow!("unknown peer {:?}", peer_id))?
2605 .replica_id;
2606 for (_, buffer) in &this.opened_buffers {
2607 if let Some(buffer) = buffer.upgrade(cx) {
2608 buffer.update(cx, |buffer, cx| buffer.remove_peer(replica_id, cx));
2609 }
2610 }
2611 cx.notify();
2612 Ok(())
2613 })
2614 }
2615
2616 async fn handle_register_worktree(
2617 this: ModelHandle<Self>,
2618 envelope: TypedEnvelope<proto::RegisterWorktree>,
2619 client: Arc<Client>,
2620 mut cx: AsyncAppContext,
2621 ) -> Result<()> {
2622 this.update(&mut cx, |this, cx| {
2623 let remote_id = this.remote_id().ok_or_else(|| anyhow!("invalid project"))?;
2624 let replica_id = this.replica_id();
2625 let worktree = proto::Worktree {
2626 id: envelope.payload.worktree_id,
2627 root_name: envelope.payload.root_name,
2628 entries: Default::default(),
2629 diagnostic_summaries: Default::default(),
2630 visible: envelope.payload.visible,
2631 };
2632 let (worktree, load_task) =
2633 Worktree::remote(remote_id, replica_id, worktree, client, cx);
2634 this.add_worktree(&worktree, cx);
2635 load_task.detach();
2636 Ok(())
2637 })
2638 }
2639
2640 async fn handle_unregister_worktree(
2641 this: ModelHandle<Self>,
2642 envelope: TypedEnvelope<proto::UnregisterWorktree>,
2643 _: Arc<Client>,
2644 mut cx: AsyncAppContext,
2645 ) -> Result<()> {
2646 this.update(&mut cx, |this, cx| {
2647 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
2648 this.remove_worktree(worktree_id, cx);
2649 Ok(())
2650 })
2651 }
2652
2653 async fn handle_update_worktree(
2654 this: ModelHandle<Self>,
2655 envelope: TypedEnvelope<proto::UpdateWorktree>,
2656 _: Arc<Client>,
2657 mut cx: AsyncAppContext,
2658 ) -> Result<()> {
2659 this.update(&mut cx, |this, cx| {
2660 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
2661 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
2662 worktree.update(cx, |worktree, _| {
2663 let worktree = worktree.as_remote_mut().unwrap();
2664 worktree.update_from_remote(envelope)
2665 })?;
2666 }
2667 Ok(())
2668 })
2669 }
2670
2671 async fn handle_update_diagnostic_summary(
2672 this: ModelHandle<Self>,
2673 envelope: TypedEnvelope<proto::UpdateDiagnosticSummary>,
2674 _: Arc<Client>,
2675 mut cx: AsyncAppContext,
2676 ) -> Result<()> {
2677 this.update(&mut cx, |this, cx| {
2678 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
2679 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
2680 if let Some(summary) = envelope.payload.summary {
2681 let project_path = ProjectPath {
2682 worktree_id,
2683 path: Path::new(&summary.path).into(),
2684 };
2685 worktree.update(cx, |worktree, _| {
2686 worktree
2687 .as_remote_mut()
2688 .unwrap()
2689 .update_diagnostic_summary(project_path.path.clone(), &summary);
2690 });
2691 cx.emit(Event::DiagnosticsUpdated(project_path));
2692 }
2693 }
2694 Ok(())
2695 })
2696 }
2697
2698 async fn handle_disk_based_diagnostics_updating(
2699 this: ModelHandle<Self>,
2700 _: TypedEnvelope<proto::DiskBasedDiagnosticsUpdating>,
2701 _: Arc<Client>,
2702 mut cx: AsyncAppContext,
2703 ) -> Result<()> {
2704 this.update(&mut cx, |this, cx| this.disk_based_diagnostics_started(cx));
2705 Ok(())
2706 }
2707
2708 async fn handle_disk_based_diagnostics_updated(
2709 this: ModelHandle<Self>,
2710 _: TypedEnvelope<proto::DiskBasedDiagnosticsUpdated>,
2711 _: Arc<Client>,
2712 mut cx: AsyncAppContext,
2713 ) -> Result<()> {
2714 this.update(&mut cx, |this, cx| this.disk_based_diagnostics_finished(cx));
2715 Ok(())
2716 }
2717
2718 async fn handle_update_buffer(
2719 this: ModelHandle<Self>,
2720 envelope: TypedEnvelope<proto::UpdateBuffer>,
2721 _: Arc<Client>,
2722 mut cx: AsyncAppContext,
2723 ) -> Result<()> {
2724 this.update(&mut cx, |this, cx| {
2725 let payload = envelope.payload.clone();
2726 let buffer_id = payload.buffer_id;
2727 let ops = payload
2728 .operations
2729 .into_iter()
2730 .map(|op| language::proto::deserialize_operation(op))
2731 .collect::<Result<Vec<_>, _>>()?;
2732 match this.opened_buffers.entry(buffer_id) {
2733 hash_map::Entry::Occupied(mut e) => match e.get_mut() {
2734 OpenBuffer::Strong(buffer) => {
2735 buffer.update(cx, |buffer, cx| buffer.apply_ops(ops, cx))?;
2736 }
2737 OpenBuffer::Loading(operations) => operations.extend_from_slice(&ops),
2738 OpenBuffer::Weak(_) => {}
2739 },
2740 hash_map::Entry::Vacant(e) => {
2741 e.insert(OpenBuffer::Loading(ops));
2742 }
2743 }
2744 Ok(())
2745 })
2746 }
2747
2748 async fn handle_update_buffer_file(
2749 this: ModelHandle<Self>,
2750 envelope: TypedEnvelope<proto::UpdateBufferFile>,
2751 _: Arc<Client>,
2752 mut cx: AsyncAppContext,
2753 ) -> Result<()> {
2754 this.update(&mut cx, |this, cx| {
2755 let payload = envelope.payload.clone();
2756 let buffer_id = payload.buffer_id;
2757 let file = payload.file.ok_or_else(|| anyhow!("invalid file"))?;
2758 let worktree = this
2759 .worktree_for_id(WorktreeId::from_proto(file.worktree_id), cx)
2760 .ok_or_else(|| anyhow!("no such worktree"))?;
2761 let file = File::from_proto(file, worktree.clone(), cx)?;
2762 let buffer = this
2763 .opened_buffers
2764 .get_mut(&buffer_id)
2765 .and_then(|b| b.upgrade(cx))
2766 .ok_or_else(|| anyhow!("no such buffer"))?;
2767 buffer.update(cx, |buffer, cx| {
2768 buffer.file_updated(Box::new(file), cx).detach();
2769 });
2770 Ok(())
2771 })
2772 }
2773
2774 async fn handle_save_buffer(
2775 this: ModelHandle<Self>,
2776 envelope: TypedEnvelope<proto::SaveBuffer>,
2777 _: Arc<Client>,
2778 mut cx: AsyncAppContext,
2779 ) -> Result<proto::BufferSaved> {
2780 let buffer_id = envelope.payload.buffer_id;
2781 let requested_version = envelope.payload.version.try_into()?;
2782
2783 let (project_id, buffer) = this.update(&mut cx, |this, cx| {
2784 let project_id = this.remote_id().ok_or_else(|| anyhow!("not connected"))?;
2785 let buffer = this
2786 .opened_buffers
2787 .get(&buffer_id)
2788 .map(|buffer| buffer.upgrade(cx).unwrap())
2789 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?;
2790 Ok::<_, anyhow::Error>((project_id, buffer))
2791 })?;
2792
2793 if !buffer
2794 .read_with(&cx, |buffer, _| buffer.version())
2795 .observed_all(&requested_version)
2796 {
2797 Err(anyhow!("save request depends on unreceived edits"))?;
2798 }
2799
2800 let (saved_version, mtime) = buffer.update(&mut cx, |buffer, cx| buffer.save(cx)).await?;
2801 Ok(proto::BufferSaved {
2802 project_id,
2803 buffer_id,
2804 version: (&saved_version).into(),
2805 mtime: Some(mtime.into()),
2806 })
2807 }
2808
2809 async fn handle_format_buffers(
2810 this: ModelHandle<Self>,
2811 envelope: TypedEnvelope<proto::FormatBuffers>,
2812 _: Arc<Client>,
2813 mut cx: AsyncAppContext,
2814 ) -> Result<proto::FormatBuffersResponse> {
2815 let sender_id = envelope.original_sender_id()?;
2816 let format = this.update(&mut cx, |this, cx| {
2817 let mut buffers = HashSet::default();
2818 for buffer_id in &envelope.payload.buffer_ids {
2819 buffers.insert(
2820 this.opened_buffers
2821 .get(buffer_id)
2822 .map(|buffer| buffer.upgrade(cx).unwrap())
2823 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?,
2824 );
2825 }
2826 Ok::<_, anyhow::Error>(this.format(buffers, false, cx))
2827 })?;
2828
2829 let project_transaction = format.await?;
2830 let project_transaction = this.update(&mut cx, |this, cx| {
2831 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
2832 });
2833 Ok(proto::FormatBuffersResponse {
2834 transaction: Some(project_transaction),
2835 })
2836 }
2837
2838 async fn handle_get_completions(
2839 this: ModelHandle<Self>,
2840 envelope: TypedEnvelope<proto::GetCompletions>,
2841 _: Arc<Client>,
2842 mut cx: AsyncAppContext,
2843 ) -> Result<proto::GetCompletionsResponse> {
2844 let position = envelope
2845 .payload
2846 .position
2847 .and_then(language::proto::deserialize_anchor)
2848 .ok_or_else(|| anyhow!("invalid position"))?;
2849 let version = clock::Global::from(envelope.payload.version);
2850 let buffer = this.read_with(&cx, |this, cx| {
2851 this.opened_buffers
2852 .get(&envelope.payload.buffer_id)
2853 .map(|buffer| buffer.upgrade(cx).unwrap())
2854 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
2855 })?;
2856 if !buffer
2857 .read_with(&cx, |buffer, _| buffer.version())
2858 .observed_all(&version)
2859 {
2860 Err(anyhow!("completion request depends on unreceived edits"))?;
2861 }
2862 let version = buffer.read_with(&cx, |buffer, _| buffer.version());
2863 let completions = this
2864 .update(&mut cx, |this, cx| this.completions(&buffer, position, cx))
2865 .await?;
2866
2867 Ok(proto::GetCompletionsResponse {
2868 completions: completions
2869 .iter()
2870 .map(language::proto::serialize_completion)
2871 .collect(),
2872 version: (&version).into(),
2873 })
2874 }
2875
2876 async fn handle_apply_additional_edits_for_completion(
2877 this: ModelHandle<Self>,
2878 envelope: TypedEnvelope<proto::ApplyCompletionAdditionalEdits>,
2879 _: Arc<Client>,
2880 mut cx: AsyncAppContext,
2881 ) -> Result<proto::ApplyCompletionAdditionalEditsResponse> {
2882 let apply_additional_edits = this.update(&mut cx, |this, cx| {
2883 let buffer = this
2884 .opened_buffers
2885 .get(&envelope.payload.buffer_id)
2886 .map(|buffer| buffer.upgrade(cx).unwrap())
2887 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
2888 let language = buffer.read(cx).language();
2889 let completion = language::proto::deserialize_completion(
2890 envelope
2891 .payload
2892 .completion
2893 .ok_or_else(|| anyhow!("invalid completion"))?,
2894 language,
2895 )?;
2896 Ok::<_, anyhow::Error>(
2897 this.apply_additional_edits_for_completion(buffer, completion, false, cx),
2898 )
2899 })?;
2900
2901 Ok(proto::ApplyCompletionAdditionalEditsResponse {
2902 transaction: apply_additional_edits
2903 .await?
2904 .as_ref()
2905 .map(language::proto::serialize_transaction),
2906 })
2907 }
2908
2909 async fn handle_get_code_actions(
2910 this: ModelHandle<Self>,
2911 envelope: TypedEnvelope<proto::GetCodeActions>,
2912 _: Arc<Client>,
2913 mut cx: AsyncAppContext,
2914 ) -> Result<proto::GetCodeActionsResponse> {
2915 let start = envelope
2916 .payload
2917 .start
2918 .and_then(language::proto::deserialize_anchor)
2919 .ok_or_else(|| anyhow!("invalid start"))?;
2920 let end = envelope
2921 .payload
2922 .end
2923 .and_then(language::proto::deserialize_anchor)
2924 .ok_or_else(|| anyhow!("invalid end"))?;
2925 let buffer = this.update(&mut cx, |this, cx| {
2926 this.opened_buffers
2927 .get(&envelope.payload.buffer_id)
2928 .map(|buffer| buffer.upgrade(cx).unwrap())
2929 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
2930 })?;
2931 let version = buffer.read_with(&cx, |buffer, _| buffer.version());
2932 if !version.observed(start.timestamp) || !version.observed(end.timestamp) {
2933 Err(anyhow!("code action request references unreceived edits"))?;
2934 }
2935 let code_actions = this.update(&mut cx, |this, cx| {
2936 Ok::<_, anyhow::Error>(this.code_actions(&buffer, start..end, cx))
2937 })?;
2938
2939 Ok(proto::GetCodeActionsResponse {
2940 actions: code_actions
2941 .await?
2942 .iter()
2943 .map(language::proto::serialize_code_action)
2944 .collect(),
2945 version: (&version).into(),
2946 })
2947 }
2948
2949 async fn handle_apply_code_action(
2950 this: ModelHandle<Self>,
2951 envelope: TypedEnvelope<proto::ApplyCodeAction>,
2952 _: Arc<Client>,
2953 mut cx: AsyncAppContext,
2954 ) -> Result<proto::ApplyCodeActionResponse> {
2955 let sender_id = envelope.original_sender_id()?;
2956 let action = language::proto::deserialize_code_action(
2957 envelope
2958 .payload
2959 .action
2960 .ok_or_else(|| anyhow!("invalid action"))?,
2961 )?;
2962 let apply_code_action = this.update(&mut cx, |this, cx| {
2963 let buffer = this
2964 .opened_buffers
2965 .get(&envelope.payload.buffer_id)
2966 .map(|buffer| buffer.upgrade(cx).unwrap())
2967 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
2968 Ok::<_, anyhow::Error>(this.apply_code_action(buffer, action, false, cx))
2969 })?;
2970
2971 let project_transaction = apply_code_action.await?;
2972 let project_transaction = this.update(&mut cx, |this, cx| {
2973 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
2974 });
2975 Ok(proto::ApplyCodeActionResponse {
2976 transaction: Some(project_transaction),
2977 })
2978 }
2979
2980 async fn handle_lsp_command<T: LspCommand>(
2981 this: ModelHandle<Self>,
2982 envelope: TypedEnvelope<T::ProtoRequest>,
2983 _: Arc<Client>,
2984 mut cx: AsyncAppContext,
2985 ) -> Result<<T::ProtoRequest as proto::RequestMessage>::Response>
2986 where
2987 <T::LspRequest as lsp::request::Request>::Result: Send,
2988 {
2989 let sender_id = envelope.original_sender_id()?;
2990 let (request, buffer_version) = this.update(&mut cx, |this, cx| {
2991 let buffer_id = T::buffer_id_from_proto(&envelope.payload);
2992 let buffer_handle = this
2993 .opened_buffers
2994 .get(&buffer_id)
2995 .map(|buffer| buffer.upgrade(cx).unwrap())
2996 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?;
2997 let buffer = buffer_handle.read(cx);
2998 let buffer_version = buffer.version();
2999 let request = T::from_proto(envelope.payload, this, buffer)?;
3000 Ok::<_, anyhow::Error>((this.request_lsp(buffer_handle, request, cx), buffer_version))
3001 })?;
3002 let response = request.await?;
3003 this.update(&mut cx, |this, cx| {
3004 Ok(T::response_to_proto(
3005 response,
3006 this,
3007 sender_id,
3008 &buffer_version,
3009 cx,
3010 ))
3011 })
3012 }
3013
3014 async fn handle_get_project_symbols(
3015 this: ModelHandle<Self>,
3016 envelope: TypedEnvelope<proto::GetProjectSymbols>,
3017 _: Arc<Client>,
3018 mut cx: AsyncAppContext,
3019 ) -> Result<proto::GetProjectSymbolsResponse> {
3020 let symbols = this
3021 .update(&mut cx, |this, cx| {
3022 this.symbols(&envelope.payload.query, cx)
3023 })
3024 .await?;
3025
3026 Ok(proto::GetProjectSymbolsResponse {
3027 symbols: symbols.iter().map(serialize_symbol).collect(),
3028 })
3029 }
3030
3031 async fn handle_search_project(
3032 this: ModelHandle<Self>,
3033 envelope: TypedEnvelope<proto::SearchProject>,
3034 _: Arc<Client>,
3035 mut cx: AsyncAppContext,
3036 ) -> Result<proto::SearchProjectResponse> {
3037 let peer_id = envelope.original_sender_id()?;
3038 let query = SearchQuery::from_proto(envelope.payload)?;
3039 let result = this
3040 .update(&mut cx, |this, cx| this.search(query, cx))
3041 .await?;
3042
3043 this.update(&mut cx, |this, cx| {
3044 let mut locations = Vec::new();
3045 for (buffer, ranges) in result {
3046 for range in ranges {
3047 let start = serialize_anchor(&range.start);
3048 let end = serialize_anchor(&range.end);
3049 let buffer = this.serialize_buffer_for_peer(&buffer, peer_id, cx);
3050 locations.push(proto::Location {
3051 buffer: Some(buffer),
3052 start: Some(start),
3053 end: Some(end),
3054 });
3055 }
3056 }
3057 Ok(proto::SearchProjectResponse { locations })
3058 })
3059 }
3060
3061 async fn handle_open_buffer_for_symbol(
3062 this: ModelHandle<Self>,
3063 envelope: TypedEnvelope<proto::OpenBufferForSymbol>,
3064 _: Arc<Client>,
3065 mut cx: AsyncAppContext,
3066 ) -> Result<proto::OpenBufferForSymbolResponse> {
3067 let peer_id = envelope.original_sender_id()?;
3068 let symbol = envelope
3069 .payload
3070 .symbol
3071 .ok_or_else(|| anyhow!("invalid symbol"))?;
3072 let symbol = this.read_with(&cx, |this, _| {
3073 let symbol = this.deserialize_symbol(symbol)?;
3074 let signature = this.symbol_signature(symbol.worktree_id, &symbol.path);
3075 if signature == symbol.signature {
3076 Ok(symbol)
3077 } else {
3078 Err(anyhow!("invalid symbol signature"))
3079 }
3080 })?;
3081 let buffer = this
3082 .update(&mut cx, |this, cx| this.open_buffer_for_symbol(&symbol, cx))
3083 .await?;
3084
3085 Ok(proto::OpenBufferForSymbolResponse {
3086 buffer: Some(this.update(&mut cx, |this, cx| {
3087 this.serialize_buffer_for_peer(&buffer, peer_id, cx)
3088 })),
3089 })
3090 }
3091
3092 fn symbol_signature(&self, worktree_id: WorktreeId, path: &Path) -> [u8; 32] {
3093 let mut hasher = Sha256::new();
3094 hasher.update(worktree_id.to_proto().to_be_bytes());
3095 hasher.update(path.to_string_lossy().as_bytes());
3096 hasher.update(self.nonce.to_be_bytes());
3097 hasher.finalize().as_slice().try_into().unwrap()
3098 }
3099
3100 async fn handle_open_buffer(
3101 this: ModelHandle<Self>,
3102 envelope: TypedEnvelope<proto::OpenBuffer>,
3103 _: Arc<Client>,
3104 mut cx: AsyncAppContext,
3105 ) -> Result<proto::OpenBufferResponse> {
3106 let peer_id = envelope.original_sender_id()?;
3107 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
3108 let open_buffer = this.update(&mut cx, |this, cx| {
3109 this.open_buffer(
3110 ProjectPath {
3111 worktree_id,
3112 path: PathBuf::from(envelope.payload.path).into(),
3113 },
3114 cx,
3115 )
3116 });
3117
3118 let buffer = open_buffer.await?;
3119 this.update(&mut cx, |this, cx| {
3120 Ok(proto::OpenBufferResponse {
3121 buffer: Some(this.serialize_buffer_for_peer(&buffer, peer_id, cx)),
3122 })
3123 })
3124 }
3125
3126 fn serialize_project_transaction_for_peer(
3127 &mut self,
3128 project_transaction: ProjectTransaction,
3129 peer_id: PeerId,
3130 cx: &AppContext,
3131 ) -> proto::ProjectTransaction {
3132 let mut serialized_transaction = proto::ProjectTransaction {
3133 buffers: Default::default(),
3134 transactions: Default::default(),
3135 };
3136 for (buffer, transaction) in project_transaction.0 {
3137 serialized_transaction
3138 .buffers
3139 .push(self.serialize_buffer_for_peer(&buffer, peer_id, cx));
3140 serialized_transaction
3141 .transactions
3142 .push(language::proto::serialize_transaction(&transaction));
3143 }
3144 serialized_transaction
3145 }
3146
3147 fn deserialize_project_transaction(
3148 &mut self,
3149 message: proto::ProjectTransaction,
3150 push_to_history: bool,
3151 cx: &mut ModelContext<Self>,
3152 ) -> Task<Result<ProjectTransaction>> {
3153 cx.spawn(|this, mut cx| async move {
3154 let mut project_transaction = ProjectTransaction::default();
3155 for (buffer, transaction) in message.buffers.into_iter().zip(message.transactions) {
3156 let buffer = this
3157 .update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
3158 .await?;
3159 let transaction = language::proto::deserialize_transaction(transaction)?;
3160 project_transaction.0.insert(buffer, transaction);
3161 }
3162
3163 for (buffer, transaction) in &project_transaction.0 {
3164 buffer
3165 .update(&mut cx, |buffer, _| {
3166 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
3167 })
3168 .await;
3169
3170 if push_to_history {
3171 buffer.update(&mut cx, |buffer, _| {
3172 buffer.push_transaction(transaction.clone(), Instant::now());
3173 });
3174 }
3175 }
3176
3177 Ok(project_transaction)
3178 })
3179 }
3180
3181 fn serialize_buffer_for_peer(
3182 &mut self,
3183 buffer: &ModelHandle<Buffer>,
3184 peer_id: PeerId,
3185 cx: &AppContext,
3186 ) -> proto::Buffer {
3187 let buffer_id = buffer.read(cx).remote_id();
3188 let shared_buffers = self.shared_buffers.entry(peer_id).or_default();
3189 if shared_buffers.insert(buffer_id) {
3190 proto::Buffer {
3191 variant: Some(proto::buffer::Variant::State(buffer.read(cx).to_proto())),
3192 }
3193 } else {
3194 proto::Buffer {
3195 variant: Some(proto::buffer::Variant::Id(buffer_id)),
3196 }
3197 }
3198 }
3199
3200 fn deserialize_buffer(
3201 &mut self,
3202 buffer: proto::Buffer,
3203 cx: &mut ModelContext<Self>,
3204 ) -> Task<Result<ModelHandle<Buffer>>> {
3205 let replica_id = self.replica_id();
3206
3207 let opened_buffer_tx = self.opened_buffer.0.clone();
3208 let mut opened_buffer_rx = self.opened_buffer.1.clone();
3209 cx.spawn(|this, mut cx| async move {
3210 match buffer.variant.ok_or_else(|| anyhow!("missing buffer"))? {
3211 proto::buffer::Variant::Id(id) => {
3212 let buffer = loop {
3213 let buffer = this.read_with(&cx, |this, cx| {
3214 this.opened_buffers
3215 .get(&id)
3216 .and_then(|buffer| buffer.upgrade(cx))
3217 });
3218 if let Some(buffer) = buffer {
3219 break buffer;
3220 }
3221 opened_buffer_rx
3222 .next()
3223 .await
3224 .ok_or_else(|| anyhow!("project dropped while waiting for buffer"))?;
3225 };
3226 Ok(buffer)
3227 }
3228 proto::buffer::Variant::State(mut buffer) => {
3229 let mut buffer_worktree = None;
3230 let mut buffer_file = None;
3231 if let Some(file) = buffer.file.take() {
3232 this.read_with(&cx, |this, cx| {
3233 let worktree_id = WorktreeId::from_proto(file.worktree_id);
3234 let worktree =
3235 this.worktree_for_id(worktree_id, cx).ok_or_else(|| {
3236 anyhow!("no worktree found for id {}", file.worktree_id)
3237 })?;
3238 buffer_file =
3239 Some(Box::new(File::from_proto(file, worktree.clone(), cx)?)
3240 as Box<dyn language::File>);
3241 buffer_worktree = Some(worktree);
3242 Ok::<_, anyhow::Error>(())
3243 })?;
3244 }
3245
3246 let buffer = cx.add_model(|cx| {
3247 Buffer::from_proto(replica_id, buffer, buffer_file, cx).unwrap()
3248 });
3249
3250 this.update(&mut cx, |this, cx| {
3251 this.register_buffer(&buffer, buffer_worktree.as_ref(), cx)
3252 })?;
3253
3254 *opened_buffer_tx.borrow_mut().borrow_mut() = ();
3255 Ok(buffer)
3256 }
3257 }
3258 })
3259 }
3260
3261 fn deserialize_symbol(&self, serialized_symbol: proto::Symbol) -> Result<Symbol> {
3262 let language = self
3263 .languages
3264 .get_language(&serialized_symbol.language_name);
3265 let start = serialized_symbol
3266 .start
3267 .ok_or_else(|| anyhow!("invalid start"))?;
3268 let end = serialized_symbol
3269 .end
3270 .ok_or_else(|| anyhow!("invalid end"))?;
3271 let kind = unsafe { mem::transmute(serialized_symbol.kind) };
3272 Ok(Symbol {
3273 source_worktree_id: WorktreeId::from_proto(serialized_symbol.source_worktree_id),
3274 worktree_id: WorktreeId::from_proto(serialized_symbol.worktree_id),
3275 language_name: serialized_symbol.language_name.clone(),
3276 label: language
3277 .and_then(|language| language.label_for_symbol(&serialized_symbol.name, kind))
3278 .unwrap_or_else(|| CodeLabel::plain(serialized_symbol.name.clone(), None)),
3279 name: serialized_symbol.name,
3280 path: PathBuf::from(serialized_symbol.path),
3281 range: PointUtf16::new(start.row, start.column)..PointUtf16::new(end.row, end.column),
3282 kind,
3283 signature: serialized_symbol
3284 .signature
3285 .try_into()
3286 .map_err(|_| anyhow!("invalid signature"))?,
3287 })
3288 }
3289
3290 async fn handle_close_buffer(
3291 _: ModelHandle<Self>,
3292 _: TypedEnvelope<proto::CloseBuffer>,
3293 _: Arc<Client>,
3294 _: AsyncAppContext,
3295 ) -> Result<()> {
3296 // TODO: use this for following
3297 Ok(())
3298 }
3299
3300 async fn handle_buffer_saved(
3301 this: ModelHandle<Self>,
3302 envelope: TypedEnvelope<proto::BufferSaved>,
3303 _: Arc<Client>,
3304 mut cx: AsyncAppContext,
3305 ) -> Result<()> {
3306 let version = envelope.payload.version.try_into()?;
3307 let mtime = envelope
3308 .payload
3309 .mtime
3310 .ok_or_else(|| anyhow!("missing mtime"))?
3311 .into();
3312
3313 this.update(&mut cx, |this, cx| {
3314 let buffer = this
3315 .opened_buffers
3316 .get(&envelope.payload.buffer_id)
3317 .and_then(|buffer| buffer.upgrade(cx));
3318 if let Some(buffer) = buffer {
3319 buffer.update(cx, |buffer, cx| {
3320 buffer.did_save(version, mtime, None, cx);
3321 });
3322 }
3323 Ok(())
3324 })
3325 }
3326
3327 async fn handle_buffer_reloaded(
3328 this: ModelHandle<Self>,
3329 envelope: TypedEnvelope<proto::BufferReloaded>,
3330 _: Arc<Client>,
3331 mut cx: AsyncAppContext,
3332 ) -> Result<()> {
3333 let payload = envelope.payload.clone();
3334 let version = payload.version.try_into()?;
3335 let mtime = payload
3336 .mtime
3337 .ok_or_else(|| anyhow!("missing mtime"))?
3338 .into();
3339 this.update(&mut cx, |this, cx| {
3340 let buffer = this
3341 .opened_buffers
3342 .get(&payload.buffer_id)
3343 .and_then(|buffer| buffer.upgrade(cx));
3344 if let Some(buffer) = buffer {
3345 buffer.update(cx, |buffer, cx| {
3346 buffer.did_reload(version, mtime, cx);
3347 });
3348 }
3349 Ok(())
3350 })
3351 }
3352
3353 pub fn match_paths<'a>(
3354 &self,
3355 query: &'a str,
3356 include_ignored: bool,
3357 smart_case: bool,
3358 max_results: usize,
3359 cancel_flag: &'a AtomicBool,
3360 cx: &AppContext,
3361 ) -> impl 'a + Future<Output = Vec<PathMatch>> {
3362 let worktrees = self
3363 .worktrees(cx)
3364 .filter(|worktree| worktree.read(cx).is_visible())
3365 .collect::<Vec<_>>();
3366 let include_root_name = worktrees.len() > 1;
3367 let candidate_sets = worktrees
3368 .into_iter()
3369 .map(|worktree| CandidateSet {
3370 snapshot: worktree.read(cx).snapshot(),
3371 include_ignored,
3372 include_root_name,
3373 })
3374 .collect::<Vec<_>>();
3375
3376 let background = cx.background().clone();
3377 async move {
3378 fuzzy::match_paths(
3379 candidate_sets.as_slice(),
3380 query,
3381 smart_case,
3382 max_results,
3383 cancel_flag,
3384 background,
3385 )
3386 .await
3387 }
3388 }
3389}
3390
3391impl WorktreeHandle {
3392 pub fn upgrade(&self, cx: &AppContext) -> Option<ModelHandle<Worktree>> {
3393 match self {
3394 WorktreeHandle::Strong(handle) => Some(handle.clone()),
3395 WorktreeHandle::Weak(handle) => handle.upgrade(cx),
3396 }
3397 }
3398}
3399
3400impl OpenBuffer {
3401 pub fn upgrade(&self, cx: &impl UpgradeModelHandle) -> Option<ModelHandle<Buffer>> {
3402 match self {
3403 OpenBuffer::Strong(handle) => Some(handle.clone()),
3404 OpenBuffer::Weak(handle) => handle.upgrade(cx),
3405 OpenBuffer::Loading(_) => None,
3406 }
3407 }
3408}
3409
3410struct CandidateSet {
3411 snapshot: Snapshot,
3412 include_ignored: bool,
3413 include_root_name: bool,
3414}
3415
3416impl<'a> PathMatchCandidateSet<'a> for CandidateSet {
3417 type Candidates = CandidateSetIter<'a>;
3418
3419 fn id(&self) -> usize {
3420 self.snapshot.id().to_usize()
3421 }
3422
3423 fn len(&self) -> usize {
3424 if self.include_ignored {
3425 self.snapshot.file_count()
3426 } else {
3427 self.snapshot.visible_file_count()
3428 }
3429 }
3430
3431 fn prefix(&self) -> Arc<str> {
3432 if self.snapshot.root_entry().map_or(false, |e| e.is_file()) {
3433 self.snapshot.root_name().into()
3434 } else if self.include_root_name {
3435 format!("{}/", self.snapshot.root_name()).into()
3436 } else {
3437 "".into()
3438 }
3439 }
3440
3441 fn candidates(&'a self, start: usize) -> Self::Candidates {
3442 CandidateSetIter {
3443 traversal: self.snapshot.files(self.include_ignored, start),
3444 }
3445 }
3446}
3447
3448struct CandidateSetIter<'a> {
3449 traversal: Traversal<'a>,
3450}
3451
3452impl<'a> Iterator for CandidateSetIter<'a> {
3453 type Item = PathMatchCandidate<'a>;
3454
3455 fn next(&mut self) -> Option<Self::Item> {
3456 self.traversal.next().map(|entry| {
3457 if let EntryKind::File(char_bag) = entry.kind {
3458 PathMatchCandidate {
3459 path: &entry.path,
3460 char_bag,
3461 }
3462 } else {
3463 unreachable!()
3464 }
3465 })
3466 }
3467}
3468
3469impl Entity for Project {
3470 type Event = Event;
3471
3472 fn release(&mut self, _: &mut gpui::MutableAppContext) {
3473 match &self.client_state {
3474 ProjectClientState::Local { remote_id_rx, .. } => {
3475 if let Some(project_id) = *remote_id_rx.borrow() {
3476 self.client
3477 .send(proto::UnregisterProject { project_id })
3478 .log_err();
3479 }
3480 }
3481 ProjectClientState::Remote { remote_id, .. } => {
3482 self.client
3483 .send(proto::LeaveProject {
3484 project_id: *remote_id,
3485 })
3486 .log_err();
3487 }
3488 }
3489 }
3490
3491 fn app_will_quit(
3492 &mut self,
3493 _: &mut MutableAppContext,
3494 ) -> Option<std::pin::Pin<Box<dyn 'static + Future<Output = ()>>>> {
3495 let shutdown_futures = self
3496 .language_servers
3497 .drain()
3498 .filter_map(|(_, server)| server.shutdown())
3499 .collect::<Vec<_>>();
3500 Some(
3501 async move {
3502 futures::future::join_all(shutdown_futures).await;
3503 }
3504 .boxed(),
3505 )
3506 }
3507}
3508
3509impl Collaborator {
3510 fn from_proto(
3511 message: proto::Collaborator,
3512 user_store: &ModelHandle<UserStore>,
3513 cx: &mut AsyncAppContext,
3514 ) -> impl Future<Output = Result<Self>> {
3515 let user = user_store.update(cx, |user_store, cx| {
3516 user_store.fetch_user(message.user_id, cx)
3517 });
3518
3519 async move {
3520 Ok(Self {
3521 peer_id: PeerId(message.peer_id),
3522 user: user.await?,
3523 replica_id: message.replica_id as ReplicaId,
3524 })
3525 }
3526 }
3527}
3528
3529impl<P: AsRef<Path>> From<(WorktreeId, P)> for ProjectPath {
3530 fn from((worktree_id, path): (WorktreeId, P)) -> Self {
3531 Self {
3532 worktree_id,
3533 path: path.as_ref().into(),
3534 }
3535 }
3536}
3537
3538impl From<lsp::CreateFileOptions> for fs::CreateOptions {
3539 fn from(options: lsp::CreateFileOptions) -> Self {
3540 Self {
3541 overwrite: options.overwrite.unwrap_or(false),
3542 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
3543 }
3544 }
3545}
3546
3547impl From<lsp::RenameFileOptions> for fs::RenameOptions {
3548 fn from(options: lsp::RenameFileOptions) -> Self {
3549 Self {
3550 overwrite: options.overwrite.unwrap_or(false),
3551 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
3552 }
3553 }
3554}
3555
3556impl From<lsp::DeleteFileOptions> for fs::RemoveOptions {
3557 fn from(options: lsp::DeleteFileOptions) -> Self {
3558 Self {
3559 recursive: options.recursive.unwrap_or(false),
3560 ignore_if_not_exists: options.ignore_if_not_exists.unwrap_or(false),
3561 }
3562 }
3563}
3564
3565fn serialize_symbol(symbol: &Symbol) -> proto::Symbol {
3566 proto::Symbol {
3567 source_worktree_id: symbol.source_worktree_id.to_proto(),
3568 worktree_id: symbol.worktree_id.to_proto(),
3569 language_name: symbol.language_name.clone(),
3570 name: symbol.name.clone(),
3571 kind: unsafe { mem::transmute(symbol.kind) },
3572 path: symbol.path.to_string_lossy().to_string(),
3573 start: Some(proto::Point {
3574 row: symbol.range.start.row,
3575 column: symbol.range.start.column,
3576 }),
3577 end: Some(proto::Point {
3578 row: symbol.range.end.row,
3579 column: symbol.range.end.column,
3580 }),
3581 signature: symbol.signature.to_vec(),
3582 }
3583}
3584
3585fn relativize_path(base: &Path, path: &Path) -> PathBuf {
3586 let mut path_components = path.components();
3587 let mut base_components = base.components();
3588 let mut components: Vec<Component> = Vec::new();
3589 loop {
3590 match (path_components.next(), base_components.next()) {
3591 (None, None) => break,
3592 (Some(a), None) => {
3593 components.push(a);
3594 components.extend(path_components.by_ref());
3595 break;
3596 }
3597 (None, _) => components.push(Component::ParentDir),
3598 (Some(a), Some(b)) if components.is_empty() && a == b => (),
3599 (Some(a), Some(b)) if b == Component::CurDir => components.push(a),
3600 (Some(a), Some(_)) => {
3601 components.push(Component::ParentDir);
3602 for _ in base_components {
3603 components.push(Component::ParentDir);
3604 }
3605 components.push(a);
3606 components.extend(path_components.by_ref());
3607 break;
3608 }
3609 }
3610 }
3611 components.iter().map(|c| c.as_os_str()).collect()
3612}
3613
3614#[cfg(test)]
3615mod tests {
3616 use super::{Event, *};
3617 use fs::RealFs;
3618 use futures::StreamExt;
3619 use gpui::test::subscribe;
3620 use language::{
3621 tree_sitter_rust, AnchorRangeExt, Diagnostic, LanguageConfig, LanguageServerConfig, Point,
3622 };
3623 use lsp::Url;
3624 use serde_json::json;
3625 use std::{cell::RefCell, os::unix, path::PathBuf, rc::Rc};
3626 use unindent::Unindent as _;
3627 use util::test::temp_tree;
3628 use worktree::WorktreeHandle as _;
3629
3630 #[gpui::test]
3631 async fn test_populate_and_search(cx: &mut gpui::TestAppContext) {
3632 let dir = temp_tree(json!({
3633 "root": {
3634 "apple": "",
3635 "banana": {
3636 "carrot": {
3637 "date": "",
3638 "endive": "",
3639 }
3640 },
3641 "fennel": {
3642 "grape": "",
3643 }
3644 }
3645 }));
3646
3647 let root_link_path = dir.path().join("root_link");
3648 unix::fs::symlink(&dir.path().join("root"), &root_link_path).unwrap();
3649 unix::fs::symlink(
3650 &dir.path().join("root/fennel"),
3651 &dir.path().join("root/finnochio"),
3652 )
3653 .unwrap();
3654
3655 let project = Project::test(Arc::new(RealFs), cx);
3656
3657 let (tree, _) = project
3658 .update(cx, |project, cx| {
3659 project.find_or_create_local_worktree(&root_link_path, true, cx)
3660 })
3661 .await
3662 .unwrap();
3663
3664 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
3665 .await;
3666 cx.read(|cx| {
3667 let tree = tree.read(cx);
3668 assert_eq!(tree.file_count(), 5);
3669 assert_eq!(
3670 tree.inode_for_path("fennel/grape"),
3671 tree.inode_for_path("finnochio/grape")
3672 );
3673 });
3674
3675 let cancel_flag = Default::default();
3676 let results = project
3677 .read_with(cx, |project, cx| {
3678 project.match_paths("bna", false, false, 10, &cancel_flag, cx)
3679 })
3680 .await;
3681 assert_eq!(
3682 results
3683 .into_iter()
3684 .map(|result| result.path)
3685 .collect::<Vec<Arc<Path>>>(),
3686 vec![
3687 PathBuf::from("banana/carrot/date").into(),
3688 PathBuf::from("banana/carrot/endive").into(),
3689 ]
3690 );
3691 }
3692
3693 #[gpui::test]
3694 async fn test_language_server_diagnostics(cx: &mut gpui::TestAppContext) {
3695 let (language_server_config, mut fake_servers) = LanguageServerConfig::fake();
3696 let progress_token = language_server_config
3697 .disk_based_diagnostics_progress_token
3698 .clone()
3699 .unwrap();
3700
3701 let language = Arc::new(Language::new(
3702 LanguageConfig {
3703 name: "Rust".into(),
3704 path_suffixes: vec!["rs".to_string()],
3705 language_server: Some(language_server_config),
3706 ..Default::default()
3707 },
3708 Some(tree_sitter_rust::language()),
3709 ));
3710
3711 let fs = FakeFs::new(cx.background());
3712 fs.insert_tree(
3713 "/dir",
3714 json!({
3715 "a.rs": "fn a() { A }",
3716 "b.rs": "const y: i32 = 1",
3717 }),
3718 )
3719 .await;
3720
3721 let project = Project::test(fs, cx);
3722 project.update(cx, |project, _| {
3723 Arc::get_mut(&mut project.languages).unwrap().add(language);
3724 });
3725
3726 let (tree, _) = project
3727 .update(cx, |project, cx| {
3728 project.find_or_create_local_worktree("/dir", true, cx)
3729 })
3730 .await
3731 .unwrap();
3732 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
3733
3734 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
3735 .await;
3736
3737 // Cause worktree to start the fake language server
3738 let _buffer = project
3739 .update(cx, |project, cx| {
3740 project.open_buffer((worktree_id, Path::new("b.rs")), cx)
3741 })
3742 .await
3743 .unwrap();
3744
3745 let mut events = subscribe(&project, cx);
3746
3747 let mut fake_server = fake_servers.next().await.unwrap();
3748 fake_server.start_progress(&progress_token).await;
3749 assert_eq!(
3750 events.next().await.unwrap(),
3751 Event::DiskBasedDiagnosticsStarted
3752 );
3753
3754 fake_server.start_progress(&progress_token).await;
3755 fake_server.end_progress(&progress_token).await;
3756 fake_server.start_progress(&progress_token).await;
3757
3758 fake_server
3759 .notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
3760 uri: Url::from_file_path("/dir/a.rs").unwrap(),
3761 version: None,
3762 diagnostics: vec![lsp::Diagnostic {
3763 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
3764 severity: Some(lsp::DiagnosticSeverity::ERROR),
3765 message: "undefined variable 'A'".to_string(),
3766 ..Default::default()
3767 }],
3768 })
3769 .await;
3770 assert_eq!(
3771 events.next().await.unwrap(),
3772 Event::DiagnosticsUpdated((worktree_id, Path::new("a.rs")).into())
3773 );
3774
3775 fake_server.end_progress(&progress_token).await;
3776 fake_server.end_progress(&progress_token).await;
3777 assert_eq!(
3778 events.next().await.unwrap(),
3779 Event::DiskBasedDiagnosticsUpdated
3780 );
3781 assert_eq!(
3782 events.next().await.unwrap(),
3783 Event::DiskBasedDiagnosticsFinished
3784 );
3785
3786 let buffer = project
3787 .update(cx, |p, cx| p.open_buffer((worktree_id, "a.rs"), cx))
3788 .await
3789 .unwrap();
3790
3791 buffer.read_with(cx, |buffer, _| {
3792 let snapshot = buffer.snapshot();
3793 let diagnostics = snapshot
3794 .diagnostics_in_range::<_, Point>(0..buffer.len())
3795 .collect::<Vec<_>>();
3796 assert_eq!(
3797 diagnostics,
3798 &[DiagnosticEntry {
3799 range: Point::new(0, 9)..Point::new(0, 10),
3800 diagnostic: Diagnostic {
3801 severity: lsp::DiagnosticSeverity::ERROR,
3802 message: "undefined variable 'A'".to_string(),
3803 group_id: 0,
3804 is_primary: true,
3805 ..Default::default()
3806 }
3807 }]
3808 )
3809 });
3810 }
3811
3812 #[gpui::test]
3813 async fn test_search_worktree_without_files(cx: &mut gpui::TestAppContext) {
3814 let dir = temp_tree(json!({
3815 "root": {
3816 "dir1": {},
3817 "dir2": {
3818 "dir3": {}
3819 }
3820 }
3821 }));
3822
3823 let project = Project::test(Arc::new(RealFs), cx);
3824 let (tree, _) = project
3825 .update(cx, |project, cx| {
3826 project.find_or_create_local_worktree(&dir.path(), true, cx)
3827 })
3828 .await
3829 .unwrap();
3830
3831 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
3832 .await;
3833
3834 let cancel_flag = Default::default();
3835 let results = project
3836 .read_with(cx, |project, cx| {
3837 project.match_paths("dir", false, false, 10, &cancel_flag, cx)
3838 })
3839 .await;
3840
3841 assert!(results.is_empty());
3842 }
3843
3844 #[gpui::test]
3845 async fn test_definition(cx: &mut gpui::TestAppContext) {
3846 let (language_server_config, mut fake_servers) = LanguageServerConfig::fake();
3847 let language = Arc::new(Language::new(
3848 LanguageConfig {
3849 name: "Rust".into(),
3850 path_suffixes: vec!["rs".to_string()],
3851 language_server: Some(language_server_config),
3852 ..Default::default()
3853 },
3854 Some(tree_sitter_rust::language()),
3855 ));
3856
3857 let fs = FakeFs::new(cx.background());
3858 fs.insert_tree(
3859 "/dir",
3860 json!({
3861 "a.rs": "const fn a() { A }",
3862 "b.rs": "const y: i32 = crate::a()",
3863 }),
3864 )
3865 .await;
3866
3867 let project = Project::test(fs, cx);
3868 project.update(cx, |project, _| {
3869 Arc::get_mut(&mut project.languages).unwrap().add(language);
3870 });
3871
3872 let (tree, _) = project
3873 .update(cx, |project, cx| {
3874 project.find_or_create_local_worktree("/dir/b.rs", true, cx)
3875 })
3876 .await
3877 .unwrap();
3878 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
3879 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
3880 .await;
3881
3882 let buffer = project
3883 .update(cx, |project, cx| {
3884 project.open_buffer(
3885 ProjectPath {
3886 worktree_id,
3887 path: Path::new("").into(),
3888 },
3889 cx,
3890 )
3891 })
3892 .await
3893 .unwrap();
3894
3895 let mut fake_server = fake_servers.next().await.unwrap();
3896 fake_server.handle_request::<lsp::request::GotoDefinition, _>(move |params, _| {
3897 let params = params.text_document_position_params;
3898 assert_eq!(
3899 params.text_document.uri.to_file_path().unwrap(),
3900 Path::new("/dir/b.rs"),
3901 );
3902 assert_eq!(params.position, lsp::Position::new(0, 22));
3903
3904 Some(lsp::GotoDefinitionResponse::Scalar(lsp::Location::new(
3905 lsp::Url::from_file_path("/dir/a.rs").unwrap(),
3906 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
3907 )))
3908 });
3909
3910 let mut definitions = project
3911 .update(cx, |project, cx| project.definition(&buffer, 22, cx))
3912 .await
3913 .unwrap();
3914
3915 assert_eq!(definitions.len(), 1);
3916 let definition = definitions.pop().unwrap();
3917 cx.update(|cx| {
3918 let target_buffer = definition.buffer.read(cx);
3919 assert_eq!(
3920 target_buffer
3921 .file()
3922 .unwrap()
3923 .as_local()
3924 .unwrap()
3925 .abs_path(cx),
3926 Path::new("/dir/a.rs"),
3927 );
3928 assert_eq!(definition.range.to_offset(target_buffer), 9..10);
3929 assert_eq!(
3930 list_worktrees(&project, cx),
3931 [("/dir/b.rs".as_ref(), true), ("/dir/a.rs".as_ref(), false)]
3932 );
3933
3934 drop(definition);
3935 });
3936 cx.read(|cx| {
3937 assert_eq!(list_worktrees(&project, cx), [("/dir/b.rs".as_ref(), true)]);
3938 });
3939
3940 fn list_worktrees<'a>(
3941 project: &'a ModelHandle<Project>,
3942 cx: &'a AppContext,
3943 ) -> Vec<(&'a Path, bool)> {
3944 project
3945 .read(cx)
3946 .worktrees(cx)
3947 .map(|worktree| {
3948 let worktree = worktree.read(cx);
3949 (
3950 worktree.as_local().unwrap().abs_path().as_ref(),
3951 worktree.is_visible(),
3952 )
3953 })
3954 .collect::<Vec<_>>()
3955 }
3956 }
3957
3958 #[gpui::test]
3959 async fn test_save_file(cx: &mut gpui::TestAppContext) {
3960 let fs = FakeFs::new(cx.background());
3961 fs.insert_tree(
3962 "/dir",
3963 json!({
3964 "file1": "the old contents",
3965 }),
3966 )
3967 .await;
3968
3969 let project = Project::test(fs.clone(), cx);
3970 let worktree_id = project
3971 .update(cx, |p, cx| {
3972 p.find_or_create_local_worktree("/dir", true, cx)
3973 })
3974 .await
3975 .unwrap()
3976 .0
3977 .read_with(cx, |tree, _| tree.id());
3978
3979 let buffer = project
3980 .update(cx, |p, cx| p.open_buffer((worktree_id, "file1"), cx))
3981 .await
3982 .unwrap();
3983 buffer
3984 .update(cx, |buffer, cx| {
3985 assert_eq!(buffer.text(), "the old contents");
3986 buffer.edit(Some(0..0), "a line of text.\n".repeat(10 * 1024), cx);
3987 buffer.save(cx)
3988 })
3989 .await
3990 .unwrap();
3991
3992 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
3993 assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
3994 }
3995
3996 #[gpui::test]
3997 async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
3998 let fs = FakeFs::new(cx.background());
3999 fs.insert_tree(
4000 "/dir",
4001 json!({
4002 "file1": "the old contents",
4003 }),
4004 )
4005 .await;
4006
4007 let project = Project::test(fs.clone(), cx);
4008 let worktree_id = project
4009 .update(cx, |p, cx| {
4010 p.find_or_create_local_worktree("/dir/file1", true, cx)
4011 })
4012 .await
4013 .unwrap()
4014 .0
4015 .read_with(cx, |tree, _| tree.id());
4016
4017 let buffer = project
4018 .update(cx, |p, cx| p.open_buffer((worktree_id, ""), cx))
4019 .await
4020 .unwrap();
4021 buffer
4022 .update(cx, |buffer, cx| {
4023 buffer.edit(Some(0..0), "a line of text.\n".repeat(10 * 1024), cx);
4024 buffer.save(cx)
4025 })
4026 .await
4027 .unwrap();
4028
4029 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
4030 assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
4031 }
4032
4033 #[gpui::test(retries = 5)]
4034 async fn test_rescan_and_remote_updates(cx: &mut gpui::TestAppContext) {
4035 let dir = temp_tree(json!({
4036 "a": {
4037 "file1": "",
4038 "file2": "",
4039 "file3": "",
4040 },
4041 "b": {
4042 "c": {
4043 "file4": "",
4044 "file5": "",
4045 }
4046 }
4047 }));
4048
4049 let project = Project::test(Arc::new(RealFs), cx);
4050 let rpc = project.read_with(cx, |p, _| p.client.clone());
4051
4052 let (tree, _) = project
4053 .update(cx, |p, cx| {
4054 p.find_or_create_local_worktree(dir.path(), true, cx)
4055 })
4056 .await
4057 .unwrap();
4058 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
4059
4060 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
4061 let buffer = project.update(cx, |p, cx| p.open_buffer((worktree_id, path), cx));
4062 async move { buffer.await.unwrap() }
4063 };
4064 let id_for_path = |path: &'static str, cx: &gpui::TestAppContext| {
4065 tree.read_with(cx, |tree, _| {
4066 tree.entry_for_path(path)
4067 .expect(&format!("no entry for path {}", path))
4068 .id
4069 })
4070 };
4071
4072 let buffer2 = buffer_for_path("a/file2", cx).await;
4073 let buffer3 = buffer_for_path("a/file3", cx).await;
4074 let buffer4 = buffer_for_path("b/c/file4", cx).await;
4075 let buffer5 = buffer_for_path("b/c/file5", cx).await;
4076
4077 let file2_id = id_for_path("a/file2", &cx);
4078 let file3_id = id_for_path("a/file3", &cx);
4079 let file4_id = id_for_path("b/c/file4", &cx);
4080
4081 // Wait for the initial scan.
4082 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
4083 .await;
4084
4085 // Create a remote copy of this worktree.
4086 let initial_snapshot = tree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot());
4087 let (remote, load_task) = cx.update(|cx| {
4088 Worktree::remote(
4089 1,
4090 1,
4091 initial_snapshot.to_proto(&Default::default(), true),
4092 rpc.clone(),
4093 cx,
4094 )
4095 });
4096 load_task.await;
4097
4098 cx.read(|cx| {
4099 assert!(!buffer2.read(cx).is_dirty());
4100 assert!(!buffer3.read(cx).is_dirty());
4101 assert!(!buffer4.read(cx).is_dirty());
4102 assert!(!buffer5.read(cx).is_dirty());
4103 });
4104
4105 // Rename and delete files and directories.
4106 tree.flush_fs_events(&cx).await;
4107 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
4108 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
4109 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
4110 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
4111 tree.flush_fs_events(&cx).await;
4112
4113 let expected_paths = vec![
4114 "a",
4115 "a/file1",
4116 "a/file2.new",
4117 "b",
4118 "d",
4119 "d/file3",
4120 "d/file4",
4121 ];
4122
4123 cx.read(|app| {
4124 assert_eq!(
4125 tree.read(app)
4126 .paths()
4127 .map(|p| p.to_str().unwrap())
4128 .collect::<Vec<_>>(),
4129 expected_paths
4130 );
4131
4132 assert_eq!(id_for_path("a/file2.new", &cx), file2_id);
4133 assert_eq!(id_for_path("d/file3", &cx), file3_id);
4134 assert_eq!(id_for_path("d/file4", &cx), file4_id);
4135
4136 assert_eq!(
4137 buffer2.read(app).file().unwrap().path().as_ref(),
4138 Path::new("a/file2.new")
4139 );
4140 assert_eq!(
4141 buffer3.read(app).file().unwrap().path().as_ref(),
4142 Path::new("d/file3")
4143 );
4144 assert_eq!(
4145 buffer4.read(app).file().unwrap().path().as_ref(),
4146 Path::new("d/file4")
4147 );
4148 assert_eq!(
4149 buffer5.read(app).file().unwrap().path().as_ref(),
4150 Path::new("b/c/file5")
4151 );
4152
4153 assert!(!buffer2.read(app).file().unwrap().is_deleted());
4154 assert!(!buffer3.read(app).file().unwrap().is_deleted());
4155 assert!(!buffer4.read(app).file().unwrap().is_deleted());
4156 assert!(buffer5.read(app).file().unwrap().is_deleted());
4157 });
4158
4159 // Update the remote worktree. Check that it becomes consistent with the
4160 // local worktree.
4161 remote.update(cx, |remote, cx| {
4162 let update_message = tree.read(cx).as_local().unwrap().snapshot().build_update(
4163 &initial_snapshot,
4164 1,
4165 1,
4166 true,
4167 );
4168 remote
4169 .as_remote_mut()
4170 .unwrap()
4171 .snapshot
4172 .apply_remote_update(update_message)
4173 .unwrap();
4174
4175 assert_eq!(
4176 remote
4177 .paths()
4178 .map(|p| p.to_str().unwrap())
4179 .collect::<Vec<_>>(),
4180 expected_paths
4181 );
4182 });
4183 }
4184
4185 #[gpui::test]
4186 async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
4187 let fs = FakeFs::new(cx.background());
4188 fs.insert_tree(
4189 "/the-dir",
4190 json!({
4191 "a.txt": "a-contents",
4192 "b.txt": "b-contents",
4193 }),
4194 )
4195 .await;
4196
4197 let project = Project::test(fs.clone(), cx);
4198 let worktree_id = project
4199 .update(cx, |p, cx| {
4200 p.find_or_create_local_worktree("/the-dir", true, cx)
4201 })
4202 .await
4203 .unwrap()
4204 .0
4205 .read_with(cx, |tree, _| tree.id());
4206
4207 // Spawn multiple tasks to open paths, repeating some paths.
4208 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
4209 (
4210 p.open_buffer((worktree_id, "a.txt"), cx),
4211 p.open_buffer((worktree_id, "b.txt"), cx),
4212 p.open_buffer((worktree_id, "a.txt"), cx),
4213 )
4214 });
4215
4216 let buffer_a_1 = buffer_a_1.await.unwrap();
4217 let buffer_a_2 = buffer_a_2.await.unwrap();
4218 let buffer_b = buffer_b.await.unwrap();
4219 assert_eq!(buffer_a_1.read_with(cx, |b, _| b.text()), "a-contents");
4220 assert_eq!(buffer_b.read_with(cx, |b, _| b.text()), "b-contents");
4221
4222 // There is only one buffer per path.
4223 let buffer_a_id = buffer_a_1.id();
4224 assert_eq!(buffer_a_2.id(), buffer_a_id);
4225
4226 // Open the same path again while it is still open.
4227 drop(buffer_a_1);
4228 let buffer_a_3 = project
4229 .update(cx, |p, cx| p.open_buffer((worktree_id, "a.txt"), cx))
4230 .await
4231 .unwrap();
4232
4233 // There's still only one buffer per path.
4234 assert_eq!(buffer_a_3.id(), buffer_a_id);
4235 }
4236
4237 #[gpui::test]
4238 async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
4239 use std::fs;
4240
4241 let dir = temp_tree(json!({
4242 "file1": "abc",
4243 "file2": "def",
4244 "file3": "ghi",
4245 }));
4246
4247 let project = Project::test(Arc::new(RealFs), cx);
4248 let (worktree, _) = project
4249 .update(cx, |p, cx| {
4250 p.find_or_create_local_worktree(dir.path(), true, cx)
4251 })
4252 .await
4253 .unwrap();
4254 let worktree_id = worktree.read_with(cx, |worktree, _| worktree.id());
4255
4256 worktree.flush_fs_events(&cx).await;
4257 worktree
4258 .read_with(cx, |t, _| t.as_local().unwrap().scan_complete())
4259 .await;
4260
4261 let buffer1 = project
4262 .update(cx, |p, cx| p.open_buffer((worktree_id, "file1"), cx))
4263 .await
4264 .unwrap();
4265 let events = Rc::new(RefCell::new(Vec::new()));
4266
4267 // initially, the buffer isn't dirty.
4268 buffer1.update(cx, |buffer, cx| {
4269 cx.subscribe(&buffer1, {
4270 let events = events.clone();
4271 move |_, _, event, _| events.borrow_mut().push(event.clone())
4272 })
4273 .detach();
4274
4275 assert!(!buffer.is_dirty());
4276 assert!(events.borrow().is_empty());
4277
4278 buffer.edit(vec![1..2], "", cx);
4279 });
4280
4281 // after the first edit, the buffer is dirty, and emits a dirtied event.
4282 buffer1.update(cx, |buffer, cx| {
4283 assert!(buffer.text() == "ac");
4284 assert!(buffer.is_dirty());
4285 assert_eq!(
4286 *events.borrow(),
4287 &[language::Event::Edited, language::Event::Dirtied]
4288 );
4289 events.borrow_mut().clear();
4290 buffer.did_save(buffer.version(), buffer.file().unwrap().mtime(), None, cx);
4291 });
4292
4293 // after saving, the buffer is not dirty, and emits a saved event.
4294 buffer1.update(cx, |buffer, cx| {
4295 assert!(!buffer.is_dirty());
4296 assert_eq!(*events.borrow(), &[language::Event::Saved]);
4297 events.borrow_mut().clear();
4298
4299 buffer.edit(vec![1..1], "B", cx);
4300 buffer.edit(vec![2..2], "D", cx);
4301 });
4302
4303 // after editing again, the buffer is dirty, and emits another dirty event.
4304 buffer1.update(cx, |buffer, cx| {
4305 assert!(buffer.text() == "aBDc");
4306 assert!(buffer.is_dirty());
4307 assert_eq!(
4308 *events.borrow(),
4309 &[
4310 language::Event::Edited,
4311 language::Event::Dirtied,
4312 language::Event::Edited,
4313 ],
4314 );
4315 events.borrow_mut().clear();
4316
4317 // TODO - currently, after restoring the buffer to its
4318 // previously-saved state, the is still considered dirty.
4319 buffer.edit([1..3], "", cx);
4320 assert!(buffer.text() == "ac");
4321 assert!(buffer.is_dirty());
4322 });
4323
4324 assert_eq!(*events.borrow(), &[language::Event::Edited]);
4325
4326 // When a file is deleted, the buffer is considered dirty.
4327 let events = Rc::new(RefCell::new(Vec::new()));
4328 let buffer2 = project
4329 .update(cx, |p, cx| p.open_buffer((worktree_id, "file2"), cx))
4330 .await
4331 .unwrap();
4332 buffer2.update(cx, |_, cx| {
4333 cx.subscribe(&buffer2, {
4334 let events = events.clone();
4335 move |_, _, event, _| events.borrow_mut().push(event.clone())
4336 })
4337 .detach();
4338 });
4339
4340 fs::remove_file(dir.path().join("file2")).unwrap();
4341 buffer2.condition(&cx, |b, _| b.is_dirty()).await;
4342 assert_eq!(
4343 *events.borrow(),
4344 &[language::Event::Dirtied, language::Event::FileHandleChanged]
4345 );
4346
4347 // When a file is already dirty when deleted, we don't emit a Dirtied event.
4348 let events = Rc::new(RefCell::new(Vec::new()));
4349 let buffer3 = project
4350 .update(cx, |p, cx| p.open_buffer((worktree_id, "file3"), cx))
4351 .await
4352 .unwrap();
4353 buffer3.update(cx, |_, cx| {
4354 cx.subscribe(&buffer3, {
4355 let events = events.clone();
4356 move |_, _, event, _| events.borrow_mut().push(event.clone())
4357 })
4358 .detach();
4359 });
4360
4361 worktree.flush_fs_events(&cx).await;
4362 buffer3.update(cx, |buffer, cx| {
4363 buffer.edit(Some(0..0), "x", cx);
4364 });
4365 events.borrow_mut().clear();
4366 fs::remove_file(dir.path().join("file3")).unwrap();
4367 buffer3
4368 .condition(&cx, |_, _| !events.borrow().is_empty())
4369 .await;
4370 assert_eq!(*events.borrow(), &[language::Event::FileHandleChanged]);
4371 cx.read(|cx| assert!(buffer3.read(cx).is_dirty()));
4372 }
4373
4374 #[gpui::test]
4375 async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
4376 use std::fs;
4377
4378 let initial_contents = "aaa\nbbbbb\nc\n";
4379 let dir = temp_tree(json!({ "the-file": initial_contents }));
4380
4381 let project = Project::test(Arc::new(RealFs), cx);
4382 let (worktree, _) = project
4383 .update(cx, |p, cx| {
4384 p.find_or_create_local_worktree(dir.path(), true, cx)
4385 })
4386 .await
4387 .unwrap();
4388 let worktree_id = worktree.read_with(cx, |tree, _| tree.id());
4389
4390 worktree
4391 .read_with(cx, |t, _| t.as_local().unwrap().scan_complete())
4392 .await;
4393
4394 let abs_path = dir.path().join("the-file");
4395 let buffer = project
4396 .update(cx, |p, cx| p.open_buffer((worktree_id, "the-file"), cx))
4397 .await
4398 .unwrap();
4399
4400 // TODO
4401 // Add a cursor on each row.
4402 // let selection_set_id = buffer.update(&mut cx, |buffer, cx| {
4403 // assert!(!buffer.is_dirty());
4404 // buffer.add_selection_set(
4405 // &(0..3)
4406 // .map(|row| Selection {
4407 // id: row as usize,
4408 // start: Point::new(row, 1),
4409 // end: Point::new(row, 1),
4410 // reversed: false,
4411 // goal: SelectionGoal::None,
4412 // })
4413 // .collect::<Vec<_>>(),
4414 // cx,
4415 // )
4416 // });
4417
4418 // Change the file on disk, adding two new lines of text, and removing
4419 // one line.
4420 buffer.read_with(cx, |buffer, _| {
4421 assert!(!buffer.is_dirty());
4422 assert!(!buffer.has_conflict());
4423 });
4424 let new_contents = "AAAA\naaa\nBB\nbbbbb\n";
4425 fs::write(&abs_path, new_contents).unwrap();
4426
4427 // Because the buffer was not modified, it is reloaded from disk. Its
4428 // contents are edited according to the diff between the old and new
4429 // file contents.
4430 buffer
4431 .condition(&cx, |buffer, _| buffer.text() == new_contents)
4432 .await;
4433
4434 buffer.update(cx, |buffer, _| {
4435 assert_eq!(buffer.text(), new_contents);
4436 assert!(!buffer.is_dirty());
4437 assert!(!buffer.has_conflict());
4438
4439 // TODO
4440 // let cursor_positions = buffer
4441 // .selection_set(selection_set_id)
4442 // .unwrap()
4443 // .selections::<Point>(&*buffer)
4444 // .map(|selection| {
4445 // assert_eq!(selection.start, selection.end);
4446 // selection.start
4447 // })
4448 // .collect::<Vec<_>>();
4449 // assert_eq!(
4450 // cursor_positions,
4451 // [Point::new(1, 1), Point::new(3, 1), Point::new(4, 0)]
4452 // );
4453 });
4454
4455 // Modify the buffer
4456 buffer.update(cx, |buffer, cx| {
4457 buffer.edit(vec![0..0], " ", cx);
4458 assert!(buffer.is_dirty());
4459 assert!(!buffer.has_conflict());
4460 });
4461
4462 // Change the file on disk again, adding blank lines to the beginning.
4463 fs::write(&abs_path, "\n\n\nAAAA\naaa\nBB\nbbbbb\n").unwrap();
4464
4465 // Because the buffer is modified, it doesn't reload from disk, but is
4466 // marked as having a conflict.
4467 buffer
4468 .condition(&cx, |buffer, _| buffer.has_conflict())
4469 .await;
4470 }
4471
4472 #[gpui::test]
4473 async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
4474 let fs = FakeFs::new(cx.background());
4475 fs.insert_tree(
4476 "/the-dir",
4477 json!({
4478 "a.rs": "
4479 fn foo(mut v: Vec<usize>) {
4480 for x in &v {
4481 v.push(1);
4482 }
4483 }
4484 "
4485 .unindent(),
4486 }),
4487 )
4488 .await;
4489
4490 let project = Project::test(fs.clone(), cx);
4491 let (worktree, _) = project
4492 .update(cx, |p, cx| {
4493 p.find_or_create_local_worktree("/the-dir", true, cx)
4494 })
4495 .await
4496 .unwrap();
4497 let worktree_id = worktree.read_with(cx, |tree, _| tree.id());
4498
4499 let buffer = project
4500 .update(cx, |p, cx| p.open_buffer((worktree_id, "a.rs"), cx))
4501 .await
4502 .unwrap();
4503
4504 let buffer_uri = Url::from_file_path("/the-dir/a.rs").unwrap();
4505 let message = lsp::PublishDiagnosticsParams {
4506 uri: buffer_uri.clone(),
4507 diagnostics: vec![
4508 lsp::Diagnostic {
4509 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
4510 severity: Some(DiagnosticSeverity::WARNING),
4511 message: "error 1".to_string(),
4512 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
4513 location: lsp::Location {
4514 uri: buffer_uri.clone(),
4515 range: lsp::Range::new(
4516 lsp::Position::new(1, 8),
4517 lsp::Position::new(1, 9),
4518 ),
4519 },
4520 message: "error 1 hint 1".to_string(),
4521 }]),
4522 ..Default::default()
4523 },
4524 lsp::Diagnostic {
4525 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
4526 severity: Some(DiagnosticSeverity::HINT),
4527 message: "error 1 hint 1".to_string(),
4528 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
4529 location: lsp::Location {
4530 uri: buffer_uri.clone(),
4531 range: lsp::Range::new(
4532 lsp::Position::new(1, 8),
4533 lsp::Position::new(1, 9),
4534 ),
4535 },
4536 message: "original diagnostic".to_string(),
4537 }]),
4538 ..Default::default()
4539 },
4540 lsp::Diagnostic {
4541 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
4542 severity: Some(DiagnosticSeverity::ERROR),
4543 message: "error 2".to_string(),
4544 related_information: Some(vec![
4545 lsp::DiagnosticRelatedInformation {
4546 location: lsp::Location {
4547 uri: buffer_uri.clone(),
4548 range: lsp::Range::new(
4549 lsp::Position::new(1, 13),
4550 lsp::Position::new(1, 15),
4551 ),
4552 },
4553 message: "error 2 hint 1".to_string(),
4554 },
4555 lsp::DiagnosticRelatedInformation {
4556 location: lsp::Location {
4557 uri: buffer_uri.clone(),
4558 range: lsp::Range::new(
4559 lsp::Position::new(1, 13),
4560 lsp::Position::new(1, 15),
4561 ),
4562 },
4563 message: "error 2 hint 2".to_string(),
4564 },
4565 ]),
4566 ..Default::default()
4567 },
4568 lsp::Diagnostic {
4569 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
4570 severity: Some(DiagnosticSeverity::HINT),
4571 message: "error 2 hint 1".to_string(),
4572 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
4573 location: lsp::Location {
4574 uri: buffer_uri.clone(),
4575 range: lsp::Range::new(
4576 lsp::Position::new(2, 8),
4577 lsp::Position::new(2, 17),
4578 ),
4579 },
4580 message: "original diagnostic".to_string(),
4581 }]),
4582 ..Default::default()
4583 },
4584 lsp::Diagnostic {
4585 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
4586 severity: Some(DiagnosticSeverity::HINT),
4587 message: "error 2 hint 2".to_string(),
4588 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
4589 location: lsp::Location {
4590 uri: buffer_uri.clone(),
4591 range: lsp::Range::new(
4592 lsp::Position::new(2, 8),
4593 lsp::Position::new(2, 17),
4594 ),
4595 },
4596 message: "original diagnostic".to_string(),
4597 }]),
4598 ..Default::default()
4599 },
4600 ],
4601 version: None,
4602 };
4603
4604 project
4605 .update(cx, |p, cx| {
4606 p.update_diagnostics(message, &Default::default(), cx)
4607 })
4608 .unwrap();
4609 let buffer = buffer.read_with(cx, |buffer, _| buffer.snapshot());
4610
4611 assert_eq!(
4612 buffer
4613 .diagnostics_in_range::<_, Point>(0..buffer.len())
4614 .collect::<Vec<_>>(),
4615 &[
4616 DiagnosticEntry {
4617 range: Point::new(1, 8)..Point::new(1, 9),
4618 diagnostic: Diagnostic {
4619 severity: DiagnosticSeverity::WARNING,
4620 message: "error 1".to_string(),
4621 group_id: 0,
4622 is_primary: true,
4623 ..Default::default()
4624 }
4625 },
4626 DiagnosticEntry {
4627 range: Point::new(1, 8)..Point::new(1, 9),
4628 diagnostic: Diagnostic {
4629 severity: DiagnosticSeverity::HINT,
4630 message: "error 1 hint 1".to_string(),
4631 group_id: 0,
4632 is_primary: false,
4633 ..Default::default()
4634 }
4635 },
4636 DiagnosticEntry {
4637 range: Point::new(1, 13)..Point::new(1, 15),
4638 diagnostic: Diagnostic {
4639 severity: DiagnosticSeverity::HINT,
4640 message: "error 2 hint 1".to_string(),
4641 group_id: 1,
4642 is_primary: false,
4643 ..Default::default()
4644 }
4645 },
4646 DiagnosticEntry {
4647 range: Point::new(1, 13)..Point::new(1, 15),
4648 diagnostic: Diagnostic {
4649 severity: DiagnosticSeverity::HINT,
4650 message: "error 2 hint 2".to_string(),
4651 group_id: 1,
4652 is_primary: false,
4653 ..Default::default()
4654 }
4655 },
4656 DiagnosticEntry {
4657 range: Point::new(2, 8)..Point::new(2, 17),
4658 diagnostic: Diagnostic {
4659 severity: DiagnosticSeverity::ERROR,
4660 message: "error 2".to_string(),
4661 group_id: 1,
4662 is_primary: true,
4663 ..Default::default()
4664 }
4665 }
4666 ]
4667 );
4668
4669 assert_eq!(
4670 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
4671 &[
4672 DiagnosticEntry {
4673 range: Point::new(1, 8)..Point::new(1, 9),
4674 diagnostic: Diagnostic {
4675 severity: DiagnosticSeverity::WARNING,
4676 message: "error 1".to_string(),
4677 group_id: 0,
4678 is_primary: true,
4679 ..Default::default()
4680 }
4681 },
4682 DiagnosticEntry {
4683 range: Point::new(1, 8)..Point::new(1, 9),
4684 diagnostic: Diagnostic {
4685 severity: DiagnosticSeverity::HINT,
4686 message: "error 1 hint 1".to_string(),
4687 group_id: 0,
4688 is_primary: false,
4689 ..Default::default()
4690 }
4691 },
4692 ]
4693 );
4694 assert_eq!(
4695 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
4696 &[
4697 DiagnosticEntry {
4698 range: Point::new(1, 13)..Point::new(1, 15),
4699 diagnostic: Diagnostic {
4700 severity: DiagnosticSeverity::HINT,
4701 message: "error 2 hint 1".to_string(),
4702 group_id: 1,
4703 is_primary: false,
4704 ..Default::default()
4705 }
4706 },
4707 DiagnosticEntry {
4708 range: Point::new(1, 13)..Point::new(1, 15),
4709 diagnostic: Diagnostic {
4710 severity: DiagnosticSeverity::HINT,
4711 message: "error 2 hint 2".to_string(),
4712 group_id: 1,
4713 is_primary: false,
4714 ..Default::default()
4715 }
4716 },
4717 DiagnosticEntry {
4718 range: Point::new(2, 8)..Point::new(2, 17),
4719 diagnostic: Diagnostic {
4720 severity: DiagnosticSeverity::ERROR,
4721 message: "error 2".to_string(),
4722 group_id: 1,
4723 is_primary: true,
4724 ..Default::default()
4725 }
4726 }
4727 ]
4728 );
4729 }
4730
4731 #[gpui::test]
4732 async fn test_rename(cx: &mut gpui::TestAppContext) {
4733 let (language_server_config, mut fake_servers) = LanguageServerConfig::fake();
4734 let language = Arc::new(Language::new(
4735 LanguageConfig {
4736 name: "Rust".into(),
4737 path_suffixes: vec!["rs".to_string()],
4738 language_server: Some(language_server_config),
4739 ..Default::default()
4740 },
4741 Some(tree_sitter_rust::language()),
4742 ));
4743
4744 let fs = FakeFs::new(cx.background());
4745 fs.insert_tree(
4746 "/dir",
4747 json!({
4748 "one.rs": "const ONE: usize = 1;",
4749 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
4750 }),
4751 )
4752 .await;
4753
4754 let project = Project::test(fs.clone(), cx);
4755 project.update(cx, |project, _| {
4756 Arc::get_mut(&mut project.languages).unwrap().add(language);
4757 });
4758
4759 let (tree, _) = project
4760 .update(cx, |project, cx| {
4761 project.find_or_create_local_worktree("/dir", true, cx)
4762 })
4763 .await
4764 .unwrap();
4765 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
4766 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
4767 .await;
4768
4769 let buffer = project
4770 .update(cx, |project, cx| {
4771 project.open_buffer((worktree_id, Path::new("one.rs")), cx)
4772 })
4773 .await
4774 .unwrap();
4775
4776 let mut fake_server = fake_servers.next().await.unwrap();
4777
4778 let response = project.update(cx, |project, cx| {
4779 project.prepare_rename(buffer.clone(), 7, cx)
4780 });
4781 fake_server
4782 .handle_request::<lsp::request::PrepareRenameRequest, _>(|params, _| {
4783 assert_eq!(params.text_document.uri.as_str(), "file:///dir/one.rs");
4784 assert_eq!(params.position, lsp::Position::new(0, 7));
4785 Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
4786 lsp::Position::new(0, 6),
4787 lsp::Position::new(0, 9),
4788 )))
4789 })
4790 .next()
4791 .await
4792 .unwrap();
4793 let range = response.await.unwrap().unwrap();
4794 let range = buffer.read_with(cx, |buffer, _| range.to_offset(buffer));
4795 assert_eq!(range, 6..9);
4796
4797 let response = project.update(cx, |project, cx| {
4798 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), true, cx)
4799 });
4800 fake_server
4801 .handle_request::<lsp::request::Rename, _>(|params, _| {
4802 assert_eq!(
4803 params.text_document_position.text_document.uri.as_str(),
4804 "file:///dir/one.rs"
4805 );
4806 assert_eq!(
4807 params.text_document_position.position,
4808 lsp::Position::new(0, 7)
4809 );
4810 assert_eq!(params.new_name, "THREE");
4811 Some(lsp::WorkspaceEdit {
4812 changes: Some(
4813 [
4814 (
4815 lsp::Url::from_file_path("/dir/one.rs").unwrap(),
4816 vec![lsp::TextEdit::new(
4817 lsp::Range::new(
4818 lsp::Position::new(0, 6),
4819 lsp::Position::new(0, 9),
4820 ),
4821 "THREE".to_string(),
4822 )],
4823 ),
4824 (
4825 lsp::Url::from_file_path("/dir/two.rs").unwrap(),
4826 vec![
4827 lsp::TextEdit::new(
4828 lsp::Range::new(
4829 lsp::Position::new(0, 24),
4830 lsp::Position::new(0, 27),
4831 ),
4832 "THREE".to_string(),
4833 ),
4834 lsp::TextEdit::new(
4835 lsp::Range::new(
4836 lsp::Position::new(0, 35),
4837 lsp::Position::new(0, 38),
4838 ),
4839 "THREE".to_string(),
4840 ),
4841 ],
4842 ),
4843 ]
4844 .into_iter()
4845 .collect(),
4846 ),
4847 ..Default::default()
4848 })
4849 })
4850 .next()
4851 .await
4852 .unwrap();
4853 let mut transaction = response.await.unwrap().0;
4854 assert_eq!(transaction.len(), 2);
4855 assert_eq!(
4856 transaction
4857 .remove_entry(&buffer)
4858 .unwrap()
4859 .0
4860 .read_with(cx, |buffer, _| buffer.text()),
4861 "const THREE: usize = 1;"
4862 );
4863 assert_eq!(
4864 transaction
4865 .into_keys()
4866 .next()
4867 .unwrap()
4868 .read_with(cx, |buffer, _| buffer.text()),
4869 "const TWO: usize = one::THREE + one::THREE;"
4870 );
4871 }
4872
4873 #[gpui::test]
4874 async fn test_search(cx: &mut gpui::TestAppContext) {
4875 let fs = FakeFs::new(cx.background());
4876 fs.insert_tree(
4877 "/dir",
4878 json!({
4879 "one.rs": "const ONE: usize = 1;",
4880 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
4881 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
4882 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
4883 }),
4884 )
4885 .await;
4886 let project = Project::test(fs.clone(), cx);
4887 let (tree, _) = project
4888 .update(cx, |project, cx| {
4889 project.find_or_create_local_worktree("/dir", true, cx)
4890 })
4891 .await
4892 .unwrap();
4893 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
4894 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
4895 .await;
4896
4897 assert_eq!(
4898 search(&project, SearchQuery::text("TWO", false, true), cx)
4899 .await
4900 .unwrap(),
4901 HashMap::from_iter([
4902 ("two.rs".to_string(), vec![6..9]),
4903 ("three.rs".to_string(), vec![37..40])
4904 ])
4905 );
4906
4907 let buffer_4 = project
4908 .update(cx, |project, cx| {
4909 project.open_buffer((worktree_id, "four.rs"), cx)
4910 })
4911 .await
4912 .unwrap();
4913 buffer_4.update(cx, |buffer, cx| {
4914 buffer.edit([20..28, 31..43], "two::TWO", cx);
4915 });
4916
4917 assert_eq!(
4918 search(&project, SearchQuery::text("TWO", false, true), cx)
4919 .await
4920 .unwrap(),
4921 HashMap::from_iter([
4922 ("two.rs".to_string(), vec![6..9]),
4923 ("three.rs".to_string(), vec![37..40]),
4924 ("four.rs".to_string(), vec![25..28, 36..39])
4925 ])
4926 );
4927
4928 async fn search(
4929 project: &ModelHandle<Project>,
4930 query: SearchQuery,
4931 cx: &mut gpui::TestAppContext,
4932 ) -> Result<HashMap<String, Vec<Range<usize>>>> {
4933 let results = project
4934 .update(cx, |project, cx| project.search(query, cx))
4935 .await?;
4936
4937 Ok(results
4938 .into_iter()
4939 .map(|(buffer, ranges)| {
4940 buffer.read_with(cx, |buffer, _| {
4941 let path = buffer.file().unwrap().path().to_string_lossy().to_string();
4942 let ranges = ranges
4943 .into_iter()
4944 .map(|range| range.to_offset(buffer))
4945 .collect::<Vec<_>>();
4946 (path, ranges)
4947 })
4948 })
4949 .collect())
4950 }
4951 }
4952}