1pub mod fs;
2mod ignore;
3mod lsp_command;
4pub mod search;
5pub mod worktree;
6
7use anyhow::{anyhow, Context, Result};
8use client::{proto, Client, PeerId, TypedEnvelope, User, UserStore};
9use clock::ReplicaId;
10use collections::{hash_map, BTreeMap, HashMap, HashSet};
11use futures::{future::Shared, Future, FutureExt, StreamExt, TryFutureExt};
12use fuzzy::{PathMatch, PathMatchCandidate, PathMatchCandidateSet};
13use gpui::{
14 AnyModelHandle, AppContext, AsyncAppContext, Entity, ModelContext, ModelHandle,
15 MutableAppContext, Task, UpgradeModelHandle, WeakModelHandle,
16};
17use language::{
18 point_to_lsp,
19 proto::{deserialize_anchor, deserialize_version, serialize_anchor, serialize_version},
20 range_from_lsp, range_to_lsp, Anchor, Bias, Buffer, CodeAction, CodeLabel, Completion,
21 Diagnostic, DiagnosticEntry, DiagnosticSet, Event as BufferEvent, File as _, Language,
22 LanguageRegistry, LanguageServerName, LocalFile, LspAdapter, OffsetRangeExt, Operation, Patch,
23 PointUtf16, TextBufferSnapshot, ToOffset, ToPointUtf16, Transaction,
24};
25use lsp::{DiagnosticSeverity, DiagnosticTag, DocumentHighlightKind, LanguageServer};
26use lsp_command::*;
27use parking_lot::Mutex;
28use postage::watch;
29use rand::prelude::*;
30use search::SearchQuery;
31use sha2::{Digest, Sha256};
32use similar::{ChangeTag, TextDiff};
33use std::{
34 cell::RefCell,
35 cmp::{self, Ordering},
36 convert::TryInto,
37 hash::Hash,
38 mem,
39 ops::Range,
40 path::{Component, Path, PathBuf},
41 rc::Rc,
42 sync::{
43 atomic::{AtomicBool, AtomicUsize, Ordering::SeqCst},
44 Arc,
45 },
46 time::Instant,
47};
48use util::{post_inc, ResultExt, TryFutureExt as _};
49
50pub use fs::*;
51pub use worktree::*;
52
53pub trait Item: Entity {
54 fn entry_id(&self, cx: &AppContext) -> Option<ProjectEntryId>;
55}
56
57pub struct Project {
58 worktrees: Vec<WorktreeHandle>,
59 active_entry: Option<ProjectEntryId>,
60 languages: Arc<LanguageRegistry>,
61 language_servers:
62 HashMap<(WorktreeId, LanguageServerName), (Arc<dyn LspAdapter>, Arc<LanguageServer>)>,
63 started_language_servers:
64 HashMap<(WorktreeId, LanguageServerName), Task<Option<Arc<LanguageServer>>>>,
65 language_server_statuses: BTreeMap<usize, LanguageServerStatus>,
66 language_server_settings: Arc<Mutex<serde_json::Value>>,
67 next_language_server_id: usize,
68 client: Arc<client::Client>,
69 next_entry_id: Arc<AtomicUsize>,
70 user_store: ModelHandle<UserStore>,
71 fs: Arc<dyn Fs>,
72 client_state: ProjectClientState,
73 collaborators: HashMap<PeerId, Collaborator>,
74 subscriptions: Vec<client::Subscription>,
75 language_servers_with_diagnostics_running: isize,
76 opened_buffer: (Rc<RefCell<watch::Sender<()>>>, watch::Receiver<()>),
77 shared_buffers: HashMap<PeerId, HashSet<u64>>,
78 loading_buffers: HashMap<
79 ProjectPath,
80 postage::watch::Receiver<Option<Result<ModelHandle<Buffer>, Arc<anyhow::Error>>>>,
81 >,
82 loading_local_worktrees:
83 HashMap<Arc<Path>, Shared<Task<Result<ModelHandle<Worktree>, Arc<anyhow::Error>>>>>,
84 opened_buffers: HashMap<u64, OpenBuffer>,
85 buffer_snapshots: HashMap<u64, Vec<(i32, TextBufferSnapshot)>>,
86 nonce: u128,
87}
88
89enum OpenBuffer {
90 Strong(ModelHandle<Buffer>),
91 Weak(WeakModelHandle<Buffer>),
92 Loading(Vec<Operation>),
93}
94
95enum WorktreeHandle {
96 Strong(ModelHandle<Worktree>),
97 Weak(WeakModelHandle<Worktree>),
98}
99
100enum ProjectClientState {
101 Local {
102 is_shared: bool,
103 remote_id_tx: watch::Sender<Option<u64>>,
104 remote_id_rx: watch::Receiver<Option<u64>>,
105 _maintain_remote_id_task: Task<Option<()>>,
106 },
107 Remote {
108 sharing_has_stopped: bool,
109 remote_id: u64,
110 replica_id: ReplicaId,
111 _detect_unshare_task: Task<Option<()>>,
112 },
113}
114
115#[derive(Clone, Debug)]
116pub struct Collaborator {
117 pub user: Arc<User>,
118 pub peer_id: PeerId,
119 pub replica_id: ReplicaId,
120}
121
122#[derive(Clone, Debug, PartialEq)]
123pub enum Event {
124 ActiveEntryChanged(Option<ProjectEntryId>),
125 WorktreeRemoved(WorktreeId),
126 DiskBasedDiagnosticsStarted,
127 DiskBasedDiagnosticsUpdated,
128 DiskBasedDiagnosticsFinished,
129 DiagnosticsUpdated(ProjectPath),
130 RemoteIdChanged(Option<u64>),
131 CollaboratorLeft(PeerId),
132}
133
134enum LanguageServerEvent {
135 WorkStart {
136 token: String,
137 },
138 WorkProgress {
139 token: String,
140 progress: LanguageServerProgress,
141 },
142 WorkEnd {
143 token: String,
144 },
145 DiagnosticsUpdate(lsp::PublishDiagnosticsParams),
146}
147
148pub struct LanguageServerStatus {
149 pub name: String,
150 pub pending_work: BTreeMap<String, LanguageServerProgress>,
151 pending_diagnostic_updates: isize,
152}
153
154#[derive(Clone, Debug)]
155pub struct LanguageServerProgress {
156 pub message: Option<String>,
157 pub percentage: Option<usize>,
158 pub last_update_at: Instant,
159}
160
161#[derive(Clone, Debug, Eq, PartialEq, Hash, PartialOrd, Ord)]
162pub struct ProjectPath {
163 pub worktree_id: WorktreeId,
164 pub path: Arc<Path>,
165}
166
167#[derive(Clone, Debug, Default, PartialEq)]
168pub struct DiagnosticSummary {
169 pub error_count: usize,
170 pub warning_count: usize,
171 pub info_count: usize,
172 pub hint_count: usize,
173}
174
175#[derive(Debug)]
176pub struct Location {
177 pub buffer: ModelHandle<Buffer>,
178 pub range: Range<language::Anchor>,
179}
180
181#[derive(Debug)]
182pub struct DocumentHighlight {
183 pub range: Range<language::Anchor>,
184 pub kind: DocumentHighlightKind,
185}
186
187#[derive(Clone, Debug)]
188pub struct Symbol {
189 pub source_worktree_id: WorktreeId,
190 pub worktree_id: WorktreeId,
191 pub language_server_name: LanguageServerName,
192 pub path: PathBuf,
193 pub label: CodeLabel,
194 pub name: String,
195 pub kind: lsp::SymbolKind,
196 pub range: Range<PointUtf16>,
197 pub signature: [u8; 32],
198}
199
200#[derive(Default)]
201pub struct ProjectTransaction(pub HashMap<ModelHandle<Buffer>, language::Transaction>);
202
203impl DiagnosticSummary {
204 fn new<'a, T: 'a>(diagnostics: impl IntoIterator<Item = &'a DiagnosticEntry<T>>) -> Self {
205 let mut this = Self {
206 error_count: 0,
207 warning_count: 0,
208 info_count: 0,
209 hint_count: 0,
210 };
211
212 for entry in diagnostics {
213 if entry.diagnostic.is_primary {
214 match entry.diagnostic.severity {
215 DiagnosticSeverity::ERROR => this.error_count += 1,
216 DiagnosticSeverity::WARNING => this.warning_count += 1,
217 DiagnosticSeverity::INFORMATION => this.info_count += 1,
218 DiagnosticSeverity::HINT => this.hint_count += 1,
219 _ => {}
220 }
221 }
222 }
223
224 this
225 }
226
227 pub fn to_proto(&self, path: &Path) -> proto::DiagnosticSummary {
228 proto::DiagnosticSummary {
229 path: path.to_string_lossy().to_string(),
230 error_count: self.error_count as u32,
231 warning_count: self.warning_count as u32,
232 info_count: self.info_count as u32,
233 hint_count: self.hint_count as u32,
234 }
235 }
236}
237
238#[derive(Clone, Copy, Debug, Default, Hash, PartialEq, Eq, PartialOrd, Ord)]
239pub struct ProjectEntryId(usize);
240
241impl ProjectEntryId {
242 pub fn new(counter: &AtomicUsize) -> Self {
243 Self(counter.fetch_add(1, SeqCst))
244 }
245
246 pub fn from_proto(id: u64) -> Self {
247 Self(id as usize)
248 }
249
250 pub fn to_proto(&self) -> u64 {
251 self.0 as u64
252 }
253
254 pub fn to_usize(&self) -> usize {
255 self.0
256 }
257}
258
259impl Project {
260 pub fn init(client: &Arc<Client>) {
261 client.add_model_message_handler(Self::handle_add_collaborator);
262 client.add_model_message_handler(Self::handle_buffer_reloaded);
263 client.add_model_message_handler(Self::handle_buffer_saved);
264 client.add_model_message_handler(Self::handle_start_language_server);
265 client.add_model_message_handler(Self::handle_update_language_server);
266 client.add_model_message_handler(Self::handle_remove_collaborator);
267 client.add_model_message_handler(Self::handle_register_worktree);
268 client.add_model_message_handler(Self::handle_unregister_worktree);
269 client.add_model_message_handler(Self::handle_unshare_project);
270 client.add_model_message_handler(Self::handle_update_buffer_file);
271 client.add_model_message_handler(Self::handle_update_buffer);
272 client.add_model_message_handler(Self::handle_update_diagnostic_summary);
273 client.add_model_message_handler(Self::handle_update_worktree);
274 client.add_model_request_handler(Self::handle_apply_additional_edits_for_completion);
275 client.add_model_request_handler(Self::handle_apply_code_action);
276 client.add_model_request_handler(Self::handle_format_buffers);
277 client.add_model_request_handler(Self::handle_get_code_actions);
278 client.add_model_request_handler(Self::handle_get_completions);
279 client.add_model_request_handler(Self::handle_lsp_command::<GetDefinition>);
280 client.add_model_request_handler(Self::handle_lsp_command::<GetDocumentHighlights>);
281 client.add_model_request_handler(Self::handle_lsp_command::<GetReferences>);
282 client.add_model_request_handler(Self::handle_lsp_command::<PrepareRename>);
283 client.add_model_request_handler(Self::handle_lsp_command::<PerformRename>);
284 client.add_model_request_handler(Self::handle_search_project);
285 client.add_model_request_handler(Self::handle_get_project_symbols);
286 client.add_model_request_handler(Self::handle_open_buffer_for_symbol);
287 client.add_model_request_handler(Self::handle_open_buffer_by_id);
288 client.add_model_request_handler(Self::handle_open_buffer_by_path);
289 client.add_model_request_handler(Self::handle_save_buffer);
290 }
291
292 pub fn local(
293 client: Arc<Client>,
294 user_store: ModelHandle<UserStore>,
295 languages: Arc<LanguageRegistry>,
296 fs: Arc<dyn Fs>,
297 cx: &mut MutableAppContext,
298 ) -> ModelHandle<Self> {
299 cx.add_model(|cx: &mut ModelContext<Self>| {
300 let (remote_id_tx, remote_id_rx) = watch::channel();
301 let _maintain_remote_id_task = cx.spawn_weak({
302 let rpc = client.clone();
303 move |this, mut cx| {
304 async move {
305 let mut status = rpc.status();
306 while let Some(status) = status.next().await {
307 if let Some(this) = this.upgrade(&cx) {
308 if status.is_connected() {
309 this.update(&mut cx, |this, cx| this.register(cx)).await?;
310 } else {
311 this.update(&mut cx, |this, cx| this.unregister(cx));
312 }
313 }
314 }
315 Ok(())
316 }
317 .log_err()
318 }
319 });
320
321 let (opened_buffer_tx, opened_buffer_rx) = watch::channel();
322 Self {
323 worktrees: Default::default(),
324 collaborators: Default::default(),
325 opened_buffers: Default::default(),
326 shared_buffers: Default::default(),
327 loading_buffers: Default::default(),
328 loading_local_worktrees: Default::default(),
329 buffer_snapshots: Default::default(),
330 client_state: ProjectClientState::Local {
331 is_shared: false,
332 remote_id_tx,
333 remote_id_rx,
334 _maintain_remote_id_task,
335 },
336 opened_buffer: (Rc::new(RefCell::new(opened_buffer_tx)), opened_buffer_rx),
337 subscriptions: Vec::new(),
338 active_entry: None,
339 languages,
340 client,
341 user_store,
342 fs,
343 next_entry_id: Default::default(),
344 language_servers_with_diagnostics_running: 0,
345 language_servers: Default::default(),
346 started_language_servers: Default::default(),
347 language_server_statuses: Default::default(),
348 language_server_settings: Default::default(),
349 next_language_server_id: 0,
350 nonce: StdRng::from_entropy().gen(),
351 }
352 })
353 }
354
355 pub async fn remote(
356 remote_id: u64,
357 client: Arc<Client>,
358 user_store: ModelHandle<UserStore>,
359 languages: Arc<LanguageRegistry>,
360 fs: Arc<dyn Fs>,
361 cx: &mut AsyncAppContext,
362 ) -> Result<ModelHandle<Self>> {
363 client.authenticate_and_connect(true, &cx).await?;
364
365 let response = client
366 .request(proto::JoinProject {
367 project_id: remote_id,
368 })
369 .await?;
370
371 let replica_id = response.replica_id as ReplicaId;
372
373 let mut worktrees = Vec::new();
374 for worktree in response.worktrees {
375 let (worktree, load_task) = cx
376 .update(|cx| Worktree::remote(remote_id, replica_id, worktree, client.clone(), cx));
377 worktrees.push(worktree);
378 load_task.detach();
379 }
380
381 let (opened_buffer_tx, opened_buffer_rx) = watch::channel();
382 let this = cx.add_model(|cx: &mut ModelContext<Self>| {
383 let mut this = Self {
384 worktrees: Vec::new(),
385 loading_buffers: Default::default(),
386 opened_buffer: (Rc::new(RefCell::new(opened_buffer_tx)), opened_buffer_rx),
387 shared_buffers: Default::default(),
388 loading_local_worktrees: Default::default(),
389 active_entry: None,
390 collaborators: Default::default(),
391 languages,
392 user_store: user_store.clone(),
393 fs,
394 next_entry_id: Default::default(),
395 subscriptions: vec![client.add_model_for_remote_entity(remote_id, cx)],
396 client: client.clone(),
397 client_state: ProjectClientState::Remote {
398 sharing_has_stopped: false,
399 remote_id,
400 replica_id,
401 _detect_unshare_task: cx.spawn_weak(move |this, mut cx| {
402 async move {
403 let mut status = client.status();
404 let is_connected =
405 status.next().await.map_or(false, |s| s.is_connected());
406 // Even if we're initially connected, any future change of the status means we momentarily disconnected.
407 if !is_connected || status.next().await.is_some() {
408 if let Some(this) = this.upgrade(&cx) {
409 this.update(&mut cx, |this, cx| this.project_unshared(cx))
410 }
411 }
412 Ok(())
413 }
414 .log_err()
415 }),
416 },
417 language_servers_with_diagnostics_running: 0,
418 language_servers: Default::default(),
419 started_language_servers: Default::default(),
420 language_server_settings: Default::default(),
421 language_server_statuses: response
422 .language_servers
423 .into_iter()
424 .map(|server| {
425 (
426 server.id as usize,
427 LanguageServerStatus {
428 name: server.name,
429 pending_work: Default::default(),
430 pending_diagnostic_updates: 0,
431 },
432 )
433 })
434 .collect(),
435 next_language_server_id: 0,
436 opened_buffers: Default::default(),
437 buffer_snapshots: Default::default(),
438 nonce: StdRng::from_entropy().gen(),
439 };
440 for worktree in worktrees {
441 this.add_worktree(&worktree, cx);
442 }
443 this
444 });
445
446 let user_ids = response
447 .collaborators
448 .iter()
449 .map(|peer| peer.user_id)
450 .collect();
451 user_store
452 .update(cx, |user_store, cx| user_store.load_users(user_ids, cx))
453 .await?;
454 let mut collaborators = HashMap::default();
455 for message in response.collaborators {
456 let collaborator = Collaborator::from_proto(message, &user_store, cx).await?;
457 collaborators.insert(collaborator.peer_id, collaborator);
458 }
459
460 this.update(cx, |this, _| {
461 this.collaborators = collaborators;
462 });
463
464 Ok(this)
465 }
466
467 #[cfg(any(test, feature = "test-support"))]
468 pub fn test(fs: Arc<dyn Fs>, cx: &mut gpui::TestAppContext) -> ModelHandle<Project> {
469 let languages = Arc::new(LanguageRegistry::test());
470 let http_client = client::test::FakeHttpClient::with_404_response();
471 let client = client::Client::new(http_client.clone());
472 let user_store = cx.add_model(|cx| UserStore::new(client.clone(), http_client, cx));
473 cx.update(|cx| Project::local(client, user_store, languages, fs, cx))
474 }
475
476 pub fn buffer_for_id(&self, remote_id: u64, cx: &AppContext) -> Option<ModelHandle<Buffer>> {
477 self.opened_buffers
478 .get(&remote_id)
479 .and_then(|buffer| buffer.upgrade(cx))
480 }
481
482 #[cfg(any(test, feature = "test-support"))]
483 pub fn languages(&self) -> &Arc<LanguageRegistry> {
484 &self.languages
485 }
486
487 #[cfg(any(test, feature = "test-support"))]
488 pub fn check_invariants(&self, cx: &AppContext) {
489 if self.is_local() {
490 let mut worktree_root_paths = HashMap::default();
491 for worktree in self.worktrees(cx) {
492 let worktree = worktree.read(cx);
493 let abs_path = worktree.as_local().unwrap().abs_path().clone();
494 let prev_worktree_id = worktree_root_paths.insert(abs_path.clone(), worktree.id());
495 assert_eq!(
496 prev_worktree_id,
497 None,
498 "abs path {:?} for worktree {:?} is not unique ({:?} was already registered with the same path)",
499 abs_path,
500 worktree.id(),
501 prev_worktree_id
502 )
503 }
504 } else {
505 let replica_id = self.replica_id();
506 for buffer in self.opened_buffers.values() {
507 if let Some(buffer) = buffer.upgrade(cx) {
508 let buffer = buffer.read(cx);
509 assert_eq!(
510 buffer.deferred_ops_len(),
511 0,
512 "replica {}, buffer {} has deferred operations",
513 replica_id,
514 buffer.remote_id()
515 );
516 }
517 }
518 }
519 }
520
521 #[cfg(any(test, feature = "test-support"))]
522 pub fn has_open_buffer(&self, path: impl Into<ProjectPath>, cx: &AppContext) -> bool {
523 let path = path.into();
524 if let Some(worktree) = self.worktree_for_id(path.worktree_id, cx) {
525 self.opened_buffers.iter().any(|(_, buffer)| {
526 if let Some(buffer) = buffer.upgrade(cx) {
527 if let Some(file) = File::from_dyn(buffer.read(cx).file()) {
528 if file.worktree == worktree && file.path() == &path.path {
529 return true;
530 }
531 }
532 }
533 false
534 })
535 } else {
536 false
537 }
538 }
539
540 pub fn fs(&self) -> &Arc<dyn Fs> {
541 &self.fs
542 }
543
544 fn unregister(&mut self, cx: &mut ModelContext<Self>) {
545 self.unshare(cx);
546 for worktree in &self.worktrees {
547 if let Some(worktree) = worktree.upgrade(cx) {
548 worktree.update(cx, |worktree, _| {
549 worktree.as_local_mut().unwrap().unregister();
550 });
551 }
552 }
553
554 if let ProjectClientState::Local { remote_id_tx, .. } = &mut self.client_state {
555 *remote_id_tx.borrow_mut() = None;
556 }
557
558 self.subscriptions.clear();
559 }
560
561 fn register(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
562 self.unregister(cx);
563
564 let response = self.client.request(proto::RegisterProject {});
565 cx.spawn(|this, mut cx| async move {
566 let remote_id = response.await?.project_id;
567
568 let mut registrations = Vec::new();
569 this.update(&mut cx, |this, cx| {
570 if let ProjectClientState::Local { remote_id_tx, .. } = &mut this.client_state {
571 *remote_id_tx.borrow_mut() = Some(remote_id);
572 }
573
574 cx.emit(Event::RemoteIdChanged(Some(remote_id)));
575
576 this.subscriptions
577 .push(this.client.add_model_for_remote_entity(remote_id, cx));
578
579 for worktree in &this.worktrees {
580 if let Some(worktree) = worktree.upgrade(cx) {
581 registrations.push(worktree.update(cx, |worktree, cx| {
582 let worktree = worktree.as_local_mut().unwrap();
583 worktree.register(remote_id, cx)
584 }));
585 }
586 }
587 });
588
589 futures::future::try_join_all(registrations).await?;
590 Ok(())
591 })
592 }
593
594 pub fn remote_id(&self) -> Option<u64> {
595 match &self.client_state {
596 ProjectClientState::Local { remote_id_rx, .. } => *remote_id_rx.borrow(),
597 ProjectClientState::Remote { remote_id, .. } => Some(*remote_id),
598 }
599 }
600
601 pub fn next_remote_id(&self) -> impl Future<Output = u64> {
602 let mut id = None;
603 let mut watch = None;
604 match &self.client_state {
605 ProjectClientState::Local { remote_id_rx, .. } => watch = Some(remote_id_rx.clone()),
606 ProjectClientState::Remote { remote_id, .. } => id = Some(*remote_id),
607 }
608
609 async move {
610 if let Some(id) = id {
611 return id;
612 }
613 let mut watch = watch.unwrap();
614 loop {
615 let id = *watch.borrow();
616 if let Some(id) = id {
617 return id;
618 }
619 watch.next().await;
620 }
621 }
622 }
623
624 pub fn replica_id(&self) -> ReplicaId {
625 match &self.client_state {
626 ProjectClientState::Local { .. } => 0,
627 ProjectClientState::Remote { replica_id, .. } => *replica_id,
628 }
629 }
630
631 pub fn collaborators(&self) -> &HashMap<PeerId, Collaborator> {
632 &self.collaborators
633 }
634
635 pub fn worktrees<'a>(
636 &'a self,
637 cx: &'a AppContext,
638 ) -> impl 'a + Iterator<Item = ModelHandle<Worktree>> {
639 self.worktrees
640 .iter()
641 .filter_map(move |worktree| worktree.upgrade(cx))
642 }
643
644 pub fn visible_worktrees<'a>(
645 &'a self,
646 cx: &'a AppContext,
647 ) -> impl 'a + Iterator<Item = ModelHandle<Worktree>> {
648 self.worktrees.iter().filter_map(|worktree| {
649 worktree.upgrade(cx).and_then(|worktree| {
650 if worktree.read(cx).is_visible() {
651 Some(worktree)
652 } else {
653 None
654 }
655 })
656 })
657 }
658
659 pub fn worktree_for_id(
660 &self,
661 id: WorktreeId,
662 cx: &AppContext,
663 ) -> Option<ModelHandle<Worktree>> {
664 self.worktrees(cx)
665 .find(|worktree| worktree.read(cx).id() == id)
666 }
667
668 pub fn worktree_for_entry(
669 &self,
670 entry_id: ProjectEntryId,
671 cx: &AppContext,
672 ) -> Option<ModelHandle<Worktree>> {
673 self.worktrees(cx)
674 .find(|worktree| worktree.read(cx).contains_entry(entry_id))
675 }
676
677 pub fn worktree_id_for_entry(
678 &self,
679 entry_id: ProjectEntryId,
680 cx: &AppContext,
681 ) -> Option<WorktreeId> {
682 self.worktree_for_entry(entry_id, cx)
683 .map(|worktree| worktree.read(cx).id())
684 }
685
686 pub fn share(&self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
687 let rpc = self.client.clone();
688 cx.spawn(|this, mut cx| async move {
689 let project_id = this.update(&mut cx, |this, cx| {
690 if let ProjectClientState::Local {
691 is_shared,
692 remote_id_rx,
693 ..
694 } = &mut this.client_state
695 {
696 *is_shared = true;
697
698 for open_buffer in this.opened_buffers.values_mut() {
699 match open_buffer {
700 OpenBuffer::Strong(_) => {}
701 OpenBuffer::Weak(buffer) => {
702 if let Some(buffer) = buffer.upgrade(cx) {
703 *open_buffer = OpenBuffer::Strong(buffer);
704 }
705 }
706 OpenBuffer::Loading(_) => unreachable!(),
707 }
708 }
709
710 for worktree_handle in this.worktrees.iter_mut() {
711 match worktree_handle {
712 WorktreeHandle::Strong(_) => {}
713 WorktreeHandle::Weak(worktree) => {
714 if let Some(worktree) = worktree.upgrade(cx) {
715 *worktree_handle = WorktreeHandle::Strong(worktree);
716 }
717 }
718 }
719 }
720
721 remote_id_rx
722 .borrow()
723 .ok_or_else(|| anyhow!("no project id"))
724 } else {
725 Err(anyhow!("can't share a remote project"))
726 }
727 })?;
728
729 rpc.request(proto::ShareProject { project_id }).await?;
730
731 let mut tasks = Vec::new();
732 this.update(&mut cx, |this, cx| {
733 for worktree in this.worktrees(cx).collect::<Vec<_>>() {
734 worktree.update(cx, |worktree, cx| {
735 let worktree = worktree.as_local_mut().unwrap();
736 tasks.push(worktree.share(project_id, cx));
737 });
738 }
739 });
740 for task in tasks {
741 task.await?;
742 }
743 this.update(&mut cx, |_, cx| cx.notify());
744 Ok(())
745 })
746 }
747
748 pub fn unshare(&mut self, cx: &mut ModelContext<Self>) {
749 let rpc = self.client.clone();
750
751 if let ProjectClientState::Local {
752 is_shared,
753 remote_id_rx,
754 ..
755 } = &mut self.client_state
756 {
757 if !*is_shared {
758 return;
759 }
760
761 *is_shared = false;
762 self.collaborators.clear();
763 self.shared_buffers.clear();
764 for worktree_handle in self.worktrees.iter_mut() {
765 if let WorktreeHandle::Strong(worktree) = worktree_handle {
766 let is_visible = worktree.update(cx, |worktree, _| {
767 worktree.as_local_mut().unwrap().unshare();
768 worktree.is_visible()
769 });
770 if !is_visible {
771 *worktree_handle = WorktreeHandle::Weak(worktree.downgrade());
772 }
773 }
774 }
775
776 for open_buffer in self.opened_buffers.values_mut() {
777 match open_buffer {
778 OpenBuffer::Strong(buffer) => {
779 *open_buffer = OpenBuffer::Weak(buffer.downgrade());
780 }
781 _ => {}
782 }
783 }
784
785 if let Some(project_id) = *remote_id_rx.borrow() {
786 rpc.send(proto::UnshareProject { project_id }).log_err();
787 }
788
789 cx.notify();
790 } else {
791 log::error!("attempted to unshare a remote project");
792 }
793 }
794
795 fn project_unshared(&mut self, cx: &mut ModelContext<Self>) {
796 if let ProjectClientState::Remote {
797 sharing_has_stopped,
798 ..
799 } = &mut self.client_state
800 {
801 *sharing_has_stopped = true;
802 self.collaborators.clear();
803 cx.notify();
804 }
805 }
806
807 pub fn is_read_only(&self) -> bool {
808 match &self.client_state {
809 ProjectClientState::Local { .. } => false,
810 ProjectClientState::Remote {
811 sharing_has_stopped,
812 ..
813 } => *sharing_has_stopped,
814 }
815 }
816
817 pub fn is_local(&self) -> bool {
818 match &self.client_state {
819 ProjectClientState::Local { .. } => true,
820 ProjectClientState::Remote { .. } => false,
821 }
822 }
823
824 pub fn is_remote(&self) -> bool {
825 !self.is_local()
826 }
827
828 pub fn create_buffer(&mut self, cx: &mut ModelContext<Self>) -> Result<ModelHandle<Buffer>> {
829 if self.is_remote() {
830 return Err(anyhow!("creating buffers as a guest is not supported yet"));
831 }
832
833 let buffer = cx.add_model(|cx| {
834 Buffer::new(self.replica_id(), "", cx).with_language(language::PLAIN_TEXT.clone(), cx)
835 });
836 self.register_buffer(&buffer, cx)?;
837 Ok(buffer)
838 }
839
840 pub fn open_path(
841 &mut self,
842 path: impl Into<ProjectPath>,
843 cx: &mut ModelContext<Self>,
844 ) -> Task<Result<(ProjectEntryId, AnyModelHandle)>> {
845 let task = self.open_buffer(path, cx);
846 cx.spawn_weak(|_, cx| async move {
847 let buffer = task.await?;
848 let project_entry_id = buffer
849 .read_with(&cx, |buffer, cx| {
850 File::from_dyn(buffer.file()).and_then(|file| file.project_entry_id(cx))
851 })
852 .ok_or_else(|| anyhow!("no project entry"))?;
853 Ok((project_entry_id, buffer.into()))
854 })
855 }
856
857 pub fn open_buffer(
858 &mut self,
859 path: impl Into<ProjectPath>,
860 cx: &mut ModelContext<Self>,
861 ) -> Task<Result<ModelHandle<Buffer>>> {
862 let project_path = path.into();
863 let worktree = if let Some(worktree) = self.worktree_for_id(project_path.worktree_id, cx) {
864 worktree
865 } else {
866 return Task::ready(Err(anyhow!("no such worktree")));
867 };
868
869 // If there is already a buffer for the given path, then return it.
870 let existing_buffer = self.get_open_buffer(&project_path, cx);
871 if let Some(existing_buffer) = existing_buffer {
872 return Task::ready(Ok(existing_buffer));
873 }
874
875 let mut loading_watch = match self.loading_buffers.entry(project_path.clone()) {
876 // If the given path is already being loaded, then wait for that existing
877 // task to complete and return the same buffer.
878 hash_map::Entry::Occupied(e) => e.get().clone(),
879
880 // Otherwise, record the fact that this path is now being loaded.
881 hash_map::Entry::Vacant(entry) => {
882 let (mut tx, rx) = postage::watch::channel();
883 entry.insert(rx.clone());
884
885 let load_buffer = if worktree.read(cx).is_local() {
886 self.open_local_buffer(&project_path.path, &worktree, cx)
887 } else {
888 self.open_remote_buffer(&project_path.path, &worktree, cx)
889 };
890
891 cx.spawn(move |this, mut cx| async move {
892 let load_result = load_buffer.await;
893 *tx.borrow_mut() = Some(this.update(&mut cx, |this, _| {
894 // Record the fact that the buffer is no longer loading.
895 this.loading_buffers.remove(&project_path);
896 let buffer = load_result.map_err(Arc::new)?;
897 Ok(buffer)
898 }));
899 })
900 .detach();
901 rx
902 }
903 };
904
905 cx.foreground().spawn(async move {
906 loop {
907 if let Some(result) = loading_watch.borrow().as_ref() {
908 match result {
909 Ok(buffer) => return Ok(buffer.clone()),
910 Err(error) => return Err(anyhow!("{}", error)),
911 }
912 }
913 loading_watch.next().await;
914 }
915 })
916 }
917
918 fn open_local_buffer(
919 &mut self,
920 path: &Arc<Path>,
921 worktree: &ModelHandle<Worktree>,
922 cx: &mut ModelContext<Self>,
923 ) -> Task<Result<ModelHandle<Buffer>>> {
924 let load_buffer = worktree.update(cx, |worktree, cx| {
925 let worktree = worktree.as_local_mut().unwrap();
926 worktree.load_buffer(path, cx)
927 });
928 cx.spawn(|this, mut cx| async move {
929 let buffer = load_buffer.await?;
930 this.update(&mut cx, |this, cx| this.register_buffer(&buffer, cx))?;
931 Ok(buffer)
932 })
933 }
934
935 fn open_remote_buffer(
936 &mut self,
937 path: &Arc<Path>,
938 worktree: &ModelHandle<Worktree>,
939 cx: &mut ModelContext<Self>,
940 ) -> Task<Result<ModelHandle<Buffer>>> {
941 let rpc = self.client.clone();
942 let project_id = self.remote_id().unwrap();
943 let remote_worktree_id = worktree.read(cx).id();
944 let path = path.clone();
945 let path_string = path.to_string_lossy().to_string();
946 cx.spawn(|this, mut cx| async move {
947 let response = rpc
948 .request(proto::OpenBufferByPath {
949 project_id,
950 worktree_id: remote_worktree_id.to_proto(),
951 path: path_string,
952 })
953 .await?;
954 let buffer = response.buffer.ok_or_else(|| anyhow!("missing buffer"))?;
955 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
956 .await
957 })
958 }
959
960 fn open_local_buffer_via_lsp(
961 &mut self,
962 abs_path: lsp::Url,
963 lsp_adapter: Arc<dyn LspAdapter>,
964 lsp_server: Arc<LanguageServer>,
965 cx: &mut ModelContext<Self>,
966 ) -> Task<Result<ModelHandle<Buffer>>> {
967 cx.spawn(|this, mut cx| async move {
968 let abs_path = abs_path
969 .to_file_path()
970 .map_err(|_| anyhow!("can't convert URI to path"))?;
971 let (worktree, relative_path) = if let Some(result) =
972 this.read_with(&cx, |this, cx| this.find_local_worktree(&abs_path, cx))
973 {
974 result
975 } else {
976 let worktree = this
977 .update(&mut cx, |this, cx| {
978 this.create_local_worktree(&abs_path, false, cx)
979 })
980 .await?;
981 this.update(&mut cx, |this, cx| {
982 this.language_servers.insert(
983 (worktree.read(cx).id(), lsp_adapter.name()),
984 (lsp_adapter, lsp_server),
985 );
986 });
987 (worktree, PathBuf::new())
988 };
989
990 let project_path = ProjectPath {
991 worktree_id: worktree.read_with(&cx, |worktree, _| worktree.id()),
992 path: relative_path.into(),
993 };
994 this.update(&mut cx, |this, cx| this.open_buffer(project_path, cx))
995 .await
996 })
997 }
998
999 pub fn open_buffer_by_id(
1000 &mut self,
1001 id: u64,
1002 cx: &mut ModelContext<Self>,
1003 ) -> Task<Result<ModelHandle<Buffer>>> {
1004 if let Some(buffer) = self.buffer_for_id(id, cx) {
1005 Task::ready(Ok(buffer))
1006 } else if self.is_local() {
1007 Task::ready(Err(anyhow!("buffer {} does not exist", id)))
1008 } else if let Some(project_id) = self.remote_id() {
1009 let request = self
1010 .client
1011 .request(proto::OpenBufferById { project_id, id });
1012 cx.spawn(|this, mut cx| async move {
1013 let buffer = request
1014 .await?
1015 .buffer
1016 .ok_or_else(|| anyhow!("invalid buffer"))?;
1017 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
1018 .await
1019 })
1020 } else {
1021 Task::ready(Err(anyhow!("cannot open buffer while disconnected")))
1022 }
1023 }
1024
1025 pub fn save_buffer_as(
1026 &mut self,
1027 buffer: ModelHandle<Buffer>,
1028 abs_path: PathBuf,
1029 cx: &mut ModelContext<Project>,
1030 ) -> Task<Result<()>> {
1031 let worktree_task = self.find_or_create_local_worktree(&abs_path, true, cx);
1032 cx.spawn(|this, mut cx| async move {
1033 let (worktree, path) = worktree_task.await?;
1034 worktree
1035 .update(&mut cx, |worktree, cx| {
1036 worktree
1037 .as_local_mut()
1038 .unwrap()
1039 .save_buffer_as(buffer.clone(), path, cx)
1040 })
1041 .await?;
1042 this.update(&mut cx, |this, cx| {
1043 this.assign_language_to_buffer(&buffer, cx);
1044 this.register_buffer_with_language_server(&buffer, cx);
1045 });
1046 Ok(())
1047 })
1048 }
1049
1050 pub fn get_open_buffer(
1051 &mut self,
1052 path: &ProjectPath,
1053 cx: &mut ModelContext<Self>,
1054 ) -> Option<ModelHandle<Buffer>> {
1055 let worktree = self.worktree_for_id(path.worktree_id, cx)?;
1056 self.opened_buffers.values().find_map(|buffer| {
1057 let buffer = buffer.upgrade(cx)?;
1058 let file = File::from_dyn(buffer.read(cx).file())?;
1059 if file.worktree == worktree && file.path() == &path.path {
1060 Some(buffer)
1061 } else {
1062 None
1063 }
1064 })
1065 }
1066
1067 fn register_buffer(
1068 &mut self,
1069 buffer: &ModelHandle<Buffer>,
1070 cx: &mut ModelContext<Self>,
1071 ) -> Result<()> {
1072 let remote_id = buffer.read(cx).remote_id();
1073 let open_buffer = if self.is_remote() || self.is_shared() {
1074 OpenBuffer::Strong(buffer.clone())
1075 } else {
1076 OpenBuffer::Weak(buffer.downgrade())
1077 };
1078
1079 match self.opened_buffers.insert(remote_id, open_buffer) {
1080 None => {}
1081 Some(OpenBuffer::Loading(operations)) => {
1082 buffer.update(cx, |buffer, cx| buffer.apply_ops(operations, cx))?
1083 }
1084 Some(OpenBuffer::Weak(existing_handle)) => {
1085 if existing_handle.upgrade(cx).is_some() {
1086 Err(anyhow!(
1087 "already registered buffer with remote id {}",
1088 remote_id
1089 ))?
1090 }
1091 }
1092 Some(OpenBuffer::Strong(_)) => Err(anyhow!(
1093 "already registered buffer with remote id {}",
1094 remote_id
1095 ))?,
1096 }
1097 cx.subscribe(buffer, |this, buffer, event, cx| {
1098 this.on_buffer_event(buffer, event, cx);
1099 })
1100 .detach();
1101
1102 self.assign_language_to_buffer(buffer, cx);
1103 self.register_buffer_with_language_server(buffer, cx);
1104
1105 Ok(())
1106 }
1107
1108 fn register_buffer_with_language_server(
1109 &mut self,
1110 buffer_handle: &ModelHandle<Buffer>,
1111 cx: &mut ModelContext<Self>,
1112 ) {
1113 let buffer = buffer_handle.read(cx);
1114 let buffer_id = buffer.remote_id();
1115 if let Some(file) = File::from_dyn(buffer.file()) {
1116 if file.is_local() {
1117 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
1118 let initial_snapshot = buffer.text_snapshot();
1119 let language_server = self.language_server_for_buffer(buffer, cx).cloned();
1120
1121 if let Some(local_worktree) = file.worktree.read(cx).as_local() {
1122 if let Some(diagnostics) = local_worktree.diagnostics_for_path(file.path()) {
1123 self.update_buffer_diagnostics(&buffer_handle, diagnostics, None, cx)
1124 .log_err();
1125 }
1126 }
1127
1128 if let Some((_, server)) = language_server {
1129 server
1130 .notify::<lsp::notification::DidOpenTextDocument>(
1131 lsp::DidOpenTextDocumentParams {
1132 text_document: lsp::TextDocumentItem::new(
1133 uri,
1134 Default::default(),
1135 0,
1136 initial_snapshot.text(),
1137 ),
1138 }
1139 .clone(),
1140 )
1141 .log_err();
1142 buffer_handle.update(cx, |buffer, cx| {
1143 buffer.set_completion_triggers(
1144 server
1145 .capabilities()
1146 .completion_provider
1147 .as_ref()
1148 .and_then(|provider| provider.trigger_characters.clone())
1149 .unwrap_or(Vec::new()),
1150 cx,
1151 )
1152 });
1153 self.buffer_snapshots
1154 .insert(buffer_id, vec![(0, initial_snapshot)]);
1155 }
1156
1157 cx.observe_release(buffer_handle, |this, buffer, cx| {
1158 if let Some(file) = File::from_dyn(buffer.file()) {
1159 if file.is_local() {
1160 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
1161 if let Some((_, server)) = this.language_server_for_buffer(buffer, cx) {
1162 server
1163 .notify::<lsp::notification::DidCloseTextDocument>(
1164 lsp::DidCloseTextDocumentParams {
1165 text_document: lsp::TextDocumentIdentifier::new(
1166 uri.clone(),
1167 ),
1168 },
1169 )
1170 .log_err();
1171 }
1172 }
1173 }
1174 })
1175 .detach();
1176 }
1177 }
1178 }
1179
1180 fn on_buffer_event(
1181 &mut self,
1182 buffer: ModelHandle<Buffer>,
1183 event: &BufferEvent,
1184 cx: &mut ModelContext<Self>,
1185 ) -> Option<()> {
1186 match event {
1187 BufferEvent::Operation(operation) => {
1188 let project_id = self.remote_id()?;
1189 let request = self.client.request(proto::UpdateBuffer {
1190 project_id,
1191 buffer_id: buffer.read(cx).remote_id(),
1192 operations: vec![language::proto::serialize_operation(&operation)],
1193 });
1194 cx.background().spawn(request).detach_and_log_err(cx);
1195 }
1196 BufferEvent::Edited { .. } => {
1197 let (_, language_server) = self
1198 .language_server_for_buffer(buffer.read(cx), cx)?
1199 .clone();
1200 let buffer = buffer.read(cx);
1201 let file = File::from_dyn(buffer.file())?;
1202 let abs_path = file.as_local()?.abs_path(cx);
1203 let uri = lsp::Url::from_file_path(abs_path).unwrap();
1204 let buffer_snapshots = self.buffer_snapshots.entry(buffer.remote_id()).or_default();
1205 let (version, prev_snapshot) = buffer_snapshots.last()?;
1206 let next_snapshot = buffer.text_snapshot();
1207 let next_version = version + 1;
1208
1209 let content_changes = buffer
1210 .edits_since::<(PointUtf16, usize)>(prev_snapshot.version())
1211 .map(|edit| {
1212 let edit_start = edit.new.start.0;
1213 let edit_end = edit_start + (edit.old.end.0 - edit.old.start.0);
1214 let new_text = next_snapshot
1215 .text_for_range(edit.new.start.1..edit.new.end.1)
1216 .collect();
1217 lsp::TextDocumentContentChangeEvent {
1218 range: Some(lsp::Range::new(
1219 point_to_lsp(edit_start),
1220 point_to_lsp(edit_end),
1221 )),
1222 range_length: None,
1223 text: new_text,
1224 }
1225 })
1226 .collect();
1227
1228 buffer_snapshots.push((next_version, next_snapshot));
1229
1230 language_server
1231 .notify::<lsp::notification::DidChangeTextDocument>(
1232 lsp::DidChangeTextDocumentParams {
1233 text_document: lsp::VersionedTextDocumentIdentifier::new(
1234 uri,
1235 next_version,
1236 ),
1237 content_changes,
1238 },
1239 )
1240 .log_err();
1241 }
1242 BufferEvent::Saved => {
1243 let file = File::from_dyn(buffer.read(cx).file())?;
1244 let worktree_id = file.worktree_id(cx);
1245 let abs_path = file.as_local()?.abs_path(cx);
1246 let text_document = lsp::TextDocumentIdentifier {
1247 uri: lsp::Url::from_file_path(abs_path).unwrap(),
1248 };
1249
1250 for (_, server) in self.language_servers_for_worktree(worktree_id) {
1251 server
1252 .notify::<lsp::notification::DidSaveTextDocument>(
1253 lsp::DidSaveTextDocumentParams {
1254 text_document: text_document.clone(),
1255 text: None,
1256 },
1257 )
1258 .log_err();
1259 }
1260 }
1261 _ => {}
1262 }
1263
1264 None
1265 }
1266
1267 fn language_servers_for_worktree(
1268 &self,
1269 worktree_id: WorktreeId,
1270 ) -> impl Iterator<Item = &(Arc<dyn LspAdapter>, Arc<LanguageServer>)> {
1271 self.language_servers.iter().filter_map(
1272 move |((language_server_worktree_id, _), server)| {
1273 if *language_server_worktree_id == worktree_id {
1274 Some(server)
1275 } else {
1276 None
1277 }
1278 },
1279 )
1280 }
1281
1282 fn assign_language_to_buffer(
1283 &mut self,
1284 buffer: &ModelHandle<Buffer>,
1285 cx: &mut ModelContext<Self>,
1286 ) -> Option<()> {
1287 // If the buffer has a language, set it and start the language server if we haven't already.
1288 let full_path = buffer.read(cx).file()?.full_path(cx);
1289 let language = self.languages.select_language(&full_path)?;
1290 buffer.update(cx, |buffer, cx| {
1291 buffer.set_language(Some(language.clone()), cx);
1292 });
1293
1294 let file = File::from_dyn(buffer.read(cx).file())?;
1295 let worktree = file.worktree.read(cx).as_local()?;
1296 let worktree_id = worktree.id();
1297 let worktree_abs_path = worktree.abs_path().clone();
1298 self.start_language_server(worktree_id, worktree_abs_path, language, cx);
1299
1300 None
1301 }
1302
1303 fn start_language_server(
1304 &mut self,
1305 worktree_id: WorktreeId,
1306 worktree_path: Arc<Path>,
1307 language: Arc<Language>,
1308 cx: &mut ModelContext<Self>,
1309 ) {
1310 let adapter = if let Some(adapter) = language.lsp_adapter() {
1311 adapter
1312 } else {
1313 return;
1314 };
1315 let key = (worktree_id, adapter.name());
1316 self.started_language_servers
1317 .entry(key.clone())
1318 .or_insert_with(|| {
1319 let server_id = post_inc(&mut self.next_language_server_id);
1320 let language_server = self.languages.start_language_server(
1321 server_id,
1322 language.clone(),
1323 worktree_path,
1324 self.client.http_client(),
1325 cx,
1326 );
1327 cx.spawn_weak(|this, mut cx| async move {
1328 let mut language_server = language_server?.await.log_err()?;
1329 let this = this.upgrade(&cx)?;
1330 let (language_server_events_tx, language_server_events_rx) =
1331 smol::channel::unbounded();
1332
1333 language_server
1334 .on_notification::<lsp::notification::PublishDiagnostics, _>({
1335 let language_server_events_tx = language_server_events_tx.clone();
1336 move |params| {
1337 language_server_events_tx
1338 .try_send(LanguageServerEvent::DiagnosticsUpdate(params))
1339 .ok();
1340 }
1341 })
1342 .detach();
1343
1344 language_server
1345 .on_request::<lsp::request::WorkspaceConfiguration, _>({
1346 let settings = this
1347 .read_with(&cx, |this, _| this.language_server_settings.clone());
1348 move |params| {
1349 let settings = settings.lock();
1350 Ok(params
1351 .items
1352 .into_iter()
1353 .map(|item| {
1354 if let Some(section) = &item.section {
1355 settings
1356 .get(section)
1357 .cloned()
1358 .unwrap_or(serde_json::Value::Null)
1359 } else {
1360 settings.clone()
1361 }
1362 })
1363 .collect())
1364 }
1365 })
1366 .detach();
1367
1368 language_server
1369 .on_notification::<lsp::notification::Progress, _>(move |params| {
1370 let token = match params.token {
1371 lsp::NumberOrString::String(token) => token,
1372 lsp::NumberOrString::Number(token) => {
1373 log::info!("skipping numeric progress token {}", token);
1374 return;
1375 }
1376 };
1377
1378 match params.value {
1379 lsp::ProgressParamsValue::WorkDone(progress) => match progress {
1380 lsp::WorkDoneProgress::Begin(_) => {
1381 language_server_events_tx
1382 .try_send(LanguageServerEvent::WorkStart { token })
1383 .ok();
1384 }
1385 lsp::WorkDoneProgress::Report(report) => {
1386 language_server_events_tx
1387 .try_send(LanguageServerEvent::WorkProgress {
1388 token,
1389 progress: LanguageServerProgress {
1390 message: report.message,
1391 percentage: report
1392 .percentage
1393 .map(|p| p as usize),
1394 last_update_at: Instant::now(),
1395 },
1396 })
1397 .ok();
1398 }
1399 lsp::WorkDoneProgress::End(_) => {
1400 language_server_events_tx
1401 .try_send(LanguageServerEvent::WorkEnd { token })
1402 .ok();
1403 }
1404 },
1405 }
1406 })
1407 .detach();
1408
1409 // Process all the LSP events.
1410 cx.spawn(|mut cx| {
1411 let this = this.downgrade();
1412 async move {
1413 while let Ok(event) = language_server_events_rx.recv().await {
1414 let this = this.upgrade(&cx)?;
1415 this.update(&mut cx, |this, cx| {
1416 this.on_lsp_event(server_id, event, &language, cx)
1417 });
1418
1419 // Don't starve the main thread when lots of events arrive all at once.
1420 smol::future::yield_now().await;
1421 }
1422 Some(())
1423 }
1424 })
1425 .detach();
1426
1427 let language_server = language_server.initialize().await.log_err()?;
1428 this.update(&mut cx, |this, cx| {
1429 this.language_servers
1430 .insert(key.clone(), (adapter, language_server.clone()));
1431 this.language_server_statuses.insert(
1432 server_id,
1433 LanguageServerStatus {
1434 name: language_server.name().to_string(),
1435 pending_work: Default::default(),
1436 pending_diagnostic_updates: 0,
1437 },
1438 );
1439 language_server
1440 .notify::<lsp::notification::DidChangeConfiguration>(
1441 lsp::DidChangeConfigurationParams {
1442 settings: this.language_server_settings.lock().clone(),
1443 },
1444 )
1445 .ok();
1446
1447 if let Some(project_id) = this.remote_id() {
1448 this.client
1449 .send(proto::StartLanguageServer {
1450 project_id,
1451 server: Some(proto::LanguageServer {
1452 id: server_id as u64,
1453 name: language_server.name().to_string(),
1454 }),
1455 })
1456 .log_err();
1457 }
1458
1459 // Tell the language server about every open buffer in the worktree that matches the language.
1460 for buffer in this.opened_buffers.values() {
1461 if let Some(buffer_handle) = buffer.upgrade(cx) {
1462 let buffer = buffer_handle.read(cx);
1463 let file = if let Some(file) = File::from_dyn(buffer.file()) {
1464 file
1465 } else {
1466 continue;
1467 };
1468 let language = if let Some(language) = buffer.language() {
1469 language
1470 } else {
1471 continue;
1472 };
1473 if file.worktree.read(cx).id() != key.0
1474 || language.lsp_adapter().map(|a| a.name())
1475 != Some(key.1.clone())
1476 {
1477 continue;
1478 }
1479
1480 let file = file.as_local()?;
1481 let versions = this
1482 .buffer_snapshots
1483 .entry(buffer.remote_id())
1484 .or_insert_with(|| vec![(0, buffer.text_snapshot())]);
1485 let (version, initial_snapshot) = versions.last().unwrap();
1486 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
1487 language_server
1488 .notify::<lsp::notification::DidOpenTextDocument>(
1489 lsp::DidOpenTextDocumentParams {
1490 text_document: lsp::TextDocumentItem::new(
1491 uri,
1492 Default::default(),
1493 *version,
1494 initial_snapshot.text(),
1495 ),
1496 },
1497 )
1498 .log_err()?;
1499 buffer_handle.update(cx, |buffer, cx| {
1500 buffer.set_completion_triggers(
1501 language_server
1502 .capabilities()
1503 .completion_provider
1504 .as_ref()
1505 .and_then(|provider| {
1506 provider.trigger_characters.clone()
1507 })
1508 .unwrap_or(Vec::new()),
1509 cx,
1510 )
1511 });
1512 }
1513 }
1514
1515 cx.notify();
1516 Some(())
1517 });
1518
1519 Some(language_server)
1520 })
1521 });
1522 }
1523
1524 pub fn restart_language_servers_for_buffers(
1525 &mut self,
1526 buffers: impl IntoIterator<Item = ModelHandle<Buffer>>,
1527 cx: &mut ModelContext<Self>,
1528 ) -> Option<()> {
1529 let language_server_lookup_info: HashSet<(WorktreeId, Arc<Path>, PathBuf)> = buffers
1530 .into_iter()
1531 .filter_map(|buffer| {
1532 let file = File::from_dyn(buffer.read(cx).file())?;
1533 let worktree = file.worktree.read(cx).as_local()?;
1534 let worktree_id = worktree.id();
1535 let worktree_abs_path = worktree.abs_path().clone();
1536 let full_path = file.full_path(cx);
1537 Some((worktree_id, worktree_abs_path, full_path))
1538 })
1539 .collect();
1540 for (worktree_id, worktree_abs_path, full_path) in language_server_lookup_info {
1541 let language = self.languages.select_language(&full_path)?;
1542 self.restart_language_server(worktree_id, worktree_abs_path, language, cx);
1543 }
1544
1545 None
1546 }
1547
1548 fn restart_language_server(
1549 &mut self,
1550 worktree_id: WorktreeId,
1551 worktree_path: Arc<Path>,
1552 language: Arc<Language>,
1553 cx: &mut ModelContext<Self>,
1554 ) {
1555 let adapter = if let Some(adapter) = language.lsp_adapter() {
1556 adapter
1557 } else {
1558 return;
1559 };
1560 let key = (worktree_id, adapter.name());
1561 let server_to_shutdown = self.language_servers.remove(&key);
1562 self.started_language_servers.remove(&key);
1563 server_to_shutdown
1564 .as_ref()
1565 .map(|(_, server)| self.language_server_statuses.remove(&server.server_id()));
1566 cx.spawn_weak(|this, mut cx| async move {
1567 if let Some(this) = this.upgrade(&cx) {
1568 if let Some((_, server_to_shutdown)) = server_to_shutdown {
1569 if let Some(shutdown_task) = server_to_shutdown.shutdown() {
1570 shutdown_task.await;
1571 }
1572 }
1573
1574 this.update(&mut cx, |this, cx| {
1575 this.start_language_server(worktree_id, worktree_path, language, cx);
1576 });
1577 }
1578 })
1579 .detach();
1580 }
1581
1582 fn on_lsp_event(
1583 &mut self,
1584 language_server_id: usize,
1585 event: LanguageServerEvent,
1586 language: &Arc<Language>,
1587 cx: &mut ModelContext<Self>,
1588 ) {
1589 let disk_diagnostics_token = language.disk_based_diagnostics_progress_token();
1590 let language_server_status =
1591 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
1592 status
1593 } else {
1594 return;
1595 };
1596
1597 match event {
1598 LanguageServerEvent::WorkStart { token } => {
1599 if Some(token.as_str()) == disk_diagnostics_token {
1600 language_server_status.pending_diagnostic_updates += 1;
1601 if language_server_status.pending_diagnostic_updates == 1 {
1602 self.disk_based_diagnostics_started(cx);
1603 self.broadcast_language_server_update(
1604 language_server_id,
1605 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(
1606 proto::LspDiskBasedDiagnosticsUpdating {},
1607 ),
1608 );
1609 }
1610 } else {
1611 self.on_lsp_work_start(language_server_id, token.clone(), cx);
1612 self.broadcast_language_server_update(
1613 language_server_id,
1614 proto::update_language_server::Variant::WorkStart(proto::LspWorkStart {
1615 token,
1616 }),
1617 );
1618 }
1619 }
1620 LanguageServerEvent::WorkProgress { token, progress } => {
1621 if Some(token.as_str()) != disk_diagnostics_token {
1622 self.on_lsp_work_progress(
1623 language_server_id,
1624 token.clone(),
1625 progress.clone(),
1626 cx,
1627 );
1628 self.broadcast_language_server_update(
1629 language_server_id,
1630 proto::update_language_server::Variant::WorkProgress(
1631 proto::LspWorkProgress {
1632 token,
1633 message: progress.message,
1634 percentage: progress.percentage.map(|p| p as u32),
1635 },
1636 ),
1637 );
1638 }
1639 }
1640 LanguageServerEvent::WorkEnd { token } => {
1641 if Some(token.as_str()) == disk_diagnostics_token {
1642 language_server_status.pending_diagnostic_updates -= 1;
1643 if language_server_status.pending_diagnostic_updates == 0 {
1644 self.disk_based_diagnostics_finished(cx);
1645 self.broadcast_language_server_update(
1646 language_server_id,
1647 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(
1648 proto::LspDiskBasedDiagnosticsUpdated {},
1649 ),
1650 );
1651 }
1652 } else {
1653 self.on_lsp_work_end(language_server_id, token.clone(), cx);
1654 self.broadcast_language_server_update(
1655 language_server_id,
1656 proto::update_language_server::Variant::WorkEnd(proto::LspWorkEnd {
1657 token,
1658 }),
1659 );
1660 }
1661 }
1662 LanguageServerEvent::DiagnosticsUpdate(mut params) => {
1663 language.process_diagnostics(&mut params);
1664
1665 if disk_diagnostics_token.is_none() {
1666 self.disk_based_diagnostics_started(cx);
1667 self.broadcast_language_server_update(
1668 language_server_id,
1669 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(
1670 proto::LspDiskBasedDiagnosticsUpdating {},
1671 ),
1672 );
1673 }
1674 self.update_diagnostics(params, language.disk_based_diagnostic_sources(), cx)
1675 .log_err();
1676 if disk_diagnostics_token.is_none() {
1677 self.disk_based_diagnostics_finished(cx);
1678 self.broadcast_language_server_update(
1679 language_server_id,
1680 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(
1681 proto::LspDiskBasedDiagnosticsUpdated {},
1682 ),
1683 );
1684 }
1685 }
1686 }
1687 }
1688
1689 fn on_lsp_work_start(
1690 &mut self,
1691 language_server_id: usize,
1692 token: String,
1693 cx: &mut ModelContext<Self>,
1694 ) {
1695 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
1696 status.pending_work.insert(
1697 token,
1698 LanguageServerProgress {
1699 message: None,
1700 percentage: None,
1701 last_update_at: Instant::now(),
1702 },
1703 );
1704 cx.notify();
1705 }
1706 }
1707
1708 fn on_lsp_work_progress(
1709 &mut self,
1710 language_server_id: usize,
1711 token: String,
1712 progress: LanguageServerProgress,
1713 cx: &mut ModelContext<Self>,
1714 ) {
1715 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
1716 status.pending_work.insert(token, progress);
1717 cx.notify();
1718 }
1719 }
1720
1721 fn on_lsp_work_end(
1722 &mut self,
1723 language_server_id: usize,
1724 token: String,
1725 cx: &mut ModelContext<Self>,
1726 ) {
1727 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
1728 status.pending_work.remove(&token);
1729 cx.notify();
1730 }
1731 }
1732
1733 fn broadcast_language_server_update(
1734 &self,
1735 language_server_id: usize,
1736 event: proto::update_language_server::Variant,
1737 ) {
1738 if let Some(project_id) = self.remote_id() {
1739 self.client
1740 .send(proto::UpdateLanguageServer {
1741 project_id,
1742 language_server_id: language_server_id as u64,
1743 variant: Some(event),
1744 })
1745 .log_err();
1746 }
1747 }
1748
1749 pub fn set_language_server_settings(&mut self, settings: serde_json::Value) {
1750 for (_, server) in self.language_servers.values() {
1751 server
1752 .notify::<lsp::notification::DidChangeConfiguration>(
1753 lsp::DidChangeConfigurationParams {
1754 settings: settings.clone(),
1755 },
1756 )
1757 .ok();
1758 }
1759 *self.language_server_settings.lock() = settings;
1760 }
1761
1762 pub fn language_server_statuses(
1763 &self,
1764 ) -> impl DoubleEndedIterator<Item = &LanguageServerStatus> {
1765 self.language_server_statuses.values()
1766 }
1767
1768 pub fn update_diagnostics(
1769 &mut self,
1770 params: lsp::PublishDiagnosticsParams,
1771 disk_based_sources: &[&str],
1772 cx: &mut ModelContext<Self>,
1773 ) -> Result<()> {
1774 let abs_path = params
1775 .uri
1776 .to_file_path()
1777 .map_err(|_| anyhow!("URI is not a file"))?;
1778 let mut next_group_id = 0;
1779 let mut diagnostics = Vec::default();
1780 let mut primary_diagnostic_group_ids = HashMap::default();
1781 let mut sources_by_group_id = HashMap::default();
1782 let mut supporting_diagnostics = HashMap::default();
1783 for diagnostic in ¶ms.diagnostics {
1784 let source = diagnostic.source.as_ref();
1785 let code = diagnostic.code.as_ref().map(|code| match code {
1786 lsp::NumberOrString::Number(code) => code.to_string(),
1787 lsp::NumberOrString::String(code) => code.clone(),
1788 });
1789 let range = range_from_lsp(diagnostic.range);
1790 let is_supporting = diagnostic
1791 .related_information
1792 .as_ref()
1793 .map_or(false, |infos| {
1794 infos.iter().any(|info| {
1795 primary_diagnostic_group_ids.contains_key(&(
1796 source,
1797 code.clone(),
1798 range_from_lsp(info.location.range),
1799 ))
1800 })
1801 });
1802
1803 let is_unnecessary = diagnostic.tags.as_ref().map_or(false, |tags| {
1804 tags.iter().any(|tag| *tag == DiagnosticTag::UNNECESSARY)
1805 });
1806
1807 if is_supporting {
1808 supporting_diagnostics.insert(
1809 (source, code.clone(), range),
1810 (diagnostic.severity, is_unnecessary),
1811 );
1812 } else {
1813 let group_id = post_inc(&mut next_group_id);
1814 let is_disk_based = source.map_or(false, |source| {
1815 disk_based_sources.contains(&source.as_str())
1816 });
1817
1818 sources_by_group_id.insert(group_id, source);
1819 primary_diagnostic_group_ids
1820 .insert((source, code.clone(), range.clone()), group_id);
1821
1822 diagnostics.push(DiagnosticEntry {
1823 range,
1824 diagnostic: Diagnostic {
1825 code: code.clone(),
1826 severity: diagnostic.severity.unwrap_or(DiagnosticSeverity::ERROR),
1827 message: diagnostic.message.clone(),
1828 group_id,
1829 is_primary: true,
1830 is_valid: true,
1831 is_disk_based,
1832 is_unnecessary,
1833 },
1834 });
1835 if let Some(infos) = &diagnostic.related_information {
1836 for info in infos {
1837 if info.location.uri == params.uri && !info.message.is_empty() {
1838 let range = range_from_lsp(info.location.range);
1839 diagnostics.push(DiagnosticEntry {
1840 range,
1841 diagnostic: Diagnostic {
1842 code: code.clone(),
1843 severity: DiagnosticSeverity::INFORMATION,
1844 message: info.message.clone(),
1845 group_id,
1846 is_primary: false,
1847 is_valid: true,
1848 is_disk_based,
1849 is_unnecessary: false,
1850 },
1851 });
1852 }
1853 }
1854 }
1855 }
1856 }
1857
1858 for entry in &mut diagnostics {
1859 let diagnostic = &mut entry.diagnostic;
1860 if !diagnostic.is_primary {
1861 let source = *sources_by_group_id.get(&diagnostic.group_id).unwrap();
1862 if let Some(&(severity, is_unnecessary)) = supporting_diagnostics.get(&(
1863 source,
1864 diagnostic.code.clone(),
1865 entry.range.clone(),
1866 )) {
1867 if let Some(severity) = severity {
1868 diagnostic.severity = severity;
1869 }
1870 diagnostic.is_unnecessary = is_unnecessary;
1871 }
1872 }
1873 }
1874
1875 self.update_diagnostic_entries(abs_path, params.version, diagnostics, cx)?;
1876 Ok(())
1877 }
1878
1879 pub fn update_diagnostic_entries(
1880 &mut self,
1881 abs_path: PathBuf,
1882 version: Option<i32>,
1883 diagnostics: Vec<DiagnosticEntry<PointUtf16>>,
1884 cx: &mut ModelContext<Project>,
1885 ) -> Result<(), anyhow::Error> {
1886 let (worktree, relative_path) = self
1887 .find_local_worktree(&abs_path, cx)
1888 .ok_or_else(|| anyhow!("no worktree found for diagnostics"))?;
1889 if !worktree.read(cx).is_visible() {
1890 return Ok(());
1891 }
1892
1893 let project_path = ProjectPath {
1894 worktree_id: worktree.read(cx).id(),
1895 path: relative_path.into(),
1896 };
1897
1898 for buffer in self.opened_buffers.values() {
1899 if let Some(buffer) = buffer.upgrade(cx) {
1900 if buffer
1901 .read(cx)
1902 .file()
1903 .map_or(false, |file| *file.path() == project_path.path)
1904 {
1905 self.update_buffer_diagnostics(&buffer, diagnostics.clone(), version, cx)?;
1906 break;
1907 }
1908 }
1909 }
1910 worktree.update(cx, |worktree, cx| {
1911 worktree
1912 .as_local_mut()
1913 .ok_or_else(|| anyhow!("not a local worktree"))?
1914 .update_diagnostics(project_path.path.clone(), diagnostics, cx)
1915 })?;
1916 cx.emit(Event::DiagnosticsUpdated(project_path));
1917 Ok(())
1918 }
1919
1920 fn update_buffer_diagnostics(
1921 &mut self,
1922 buffer: &ModelHandle<Buffer>,
1923 mut diagnostics: Vec<DiagnosticEntry<PointUtf16>>,
1924 version: Option<i32>,
1925 cx: &mut ModelContext<Self>,
1926 ) -> Result<()> {
1927 fn compare_diagnostics(a: &Diagnostic, b: &Diagnostic) -> Ordering {
1928 Ordering::Equal
1929 .then_with(|| b.is_primary.cmp(&a.is_primary))
1930 .then_with(|| a.is_disk_based.cmp(&b.is_disk_based))
1931 .then_with(|| a.severity.cmp(&b.severity))
1932 .then_with(|| a.message.cmp(&b.message))
1933 }
1934
1935 let snapshot = self.buffer_snapshot_for_lsp_version(buffer, version, cx)?;
1936
1937 diagnostics.sort_unstable_by(|a, b| {
1938 Ordering::Equal
1939 .then_with(|| a.range.start.cmp(&b.range.start))
1940 .then_with(|| b.range.end.cmp(&a.range.end))
1941 .then_with(|| compare_diagnostics(&a.diagnostic, &b.diagnostic))
1942 });
1943
1944 let mut sanitized_diagnostics = Vec::new();
1945 let edits_since_save = Patch::new(
1946 snapshot
1947 .edits_since::<PointUtf16>(buffer.read(cx).saved_version())
1948 .collect(),
1949 );
1950 for entry in diagnostics {
1951 let start;
1952 let end;
1953 if entry.diagnostic.is_disk_based {
1954 // Some diagnostics are based on files on disk instead of buffers'
1955 // current contents. Adjust these diagnostics' ranges to reflect
1956 // any unsaved edits.
1957 start = edits_since_save.old_to_new(entry.range.start);
1958 end = edits_since_save.old_to_new(entry.range.end);
1959 } else {
1960 start = entry.range.start;
1961 end = entry.range.end;
1962 }
1963
1964 let mut range = snapshot.clip_point_utf16(start, Bias::Left)
1965 ..snapshot.clip_point_utf16(end, Bias::Right);
1966
1967 // Expand empty ranges by one character
1968 if range.start == range.end {
1969 range.end.column += 1;
1970 range.end = snapshot.clip_point_utf16(range.end, Bias::Right);
1971 if range.start == range.end && range.end.column > 0 {
1972 range.start.column -= 1;
1973 range.start = snapshot.clip_point_utf16(range.start, Bias::Left);
1974 }
1975 }
1976
1977 sanitized_diagnostics.push(DiagnosticEntry {
1978 range,
1979 diagnostic: entry.diagnostic,
1980 });
1981 }
1982 drop(edits_since_save);
1983
1984 let set = DiagnosticSet::new(sanitized_diagnostics, &snapshot);
1985 buffer.update(cx, |buffer, cx| buffer.update_diagnostics(set, cx));
1986 Ok(())
1987 }
1988
1989 pub fn format(
1990 &self,
1991 buffers: HashSet<ModelHandle<Buffer>>,
1992 push_to_history: bool,
1993 cx: &mut ModelContext<Project>,
1994 ) -> Task<Result<ProjectTransaction>> {
1995 let mut local_buffers = Vec::new();
1996 let mut remote_buffers = None;
1997 for buffer_handle in buffers {
1998 let buffer = buffer_handle.read(cx);
1999 if let Some(file) = File::from_dyn(buffer.file()) {
2000 if let Some(buffer_abs_path) = file.as_local().map(|f| f.abs_path(cx)) {
2001 if let Some((_, server)) = self.language_server_for_buffer(buffer, cx) {
2002 local_buffers.push((buffer_handle, buffer_abs_path, server.clone()));
2003 }
2004 } else {
2005 remote_buffers.get_or_insert(Vec::new()).push(buffer_handle);
2006 }
2007 } else {
2008 return Task::ready(Ok(Default::default()));
2009 }
2010 }
2011
2012 let remote_buffers = self.remote_id().zip(remote_buffers);
2013 let client = self.client.clone();
2014
2015 cx.spawn(|this, mut cx| async move {
2016 let mut project_transaction = ProjectTransaction::default();
2017
2018 if let Some((project_id, remote_buffers)) = remote_buffers {
2019 let response = client
2020 .request(proto::FormatBuffers {
2021 project_id,
2022 buffer_ids: remote_buffers
2023 .iter()
2024 .map(|buffer| buffer.read_with(&cx, |buffer, _| buffer.remote_id()))
2025 .collect(),
2026 })
2027 .await?
2028 .transaction
2029 .ok_or_else(|| anyhow!("missing transaction"))?;
2030 project_transaction = this
2031 .update(&mut cx, |this, cx| {
2032 this.deserialize_project_transaction(response, push_to_history, cx)
2033 })
2034 .await?;
2035 }
2036
2037 for (buffer, buffer_abs_path, language_server) in local_buffers {
2038 let text_document = lsp::TextDocumentIdentifier::new(
2039 lsp::Url::from_file_path(&buffer_abs_path).unwrap(),
2040 );
2041 let capabilities = &language_server.capabilities();
2042 let lsp_edits = if capabilities
2043 .document_formatting_provider
2044 .as_ref()
2045 .map_or(false, |provider| *provider != lsp::OneOf::Left(false))
2046 {
2047 language_server
2048 .request::<lsp::request::Formatting>(lsp::DocumentFormattingParams {
2049 text_document,
2050 options: lsp::FormattingOptions {
2051 tab_size: 4,
2052 insert_spaces: true,
2053 insert_final_newline: Some(true),
2054 ..Default::default()
2055 },
2056 work_done_progress_params: Default::default(),
2057 })
2058 .await?
2059 } else if capabilities
2060 .document_range_formatting_provider
2061 .as_ref()
2062 .map_or(false, |provider| *provider != lsp::OneOf::Left(false))
2063 {
2064 let buffer_start = lsp::Position::new(0, 0);
2065 let buffer_end =
2066 buffer.read_with(&cx, |buffer, _| point_to_lsp(buffer.max_point_utf16()));
2067 language_server
2068 .request::<lsp::request::RangeFormatting>(
2069 lsp::DocumentRangeFormattingParams {
2070 text_document,
2071 range: lsp::Range::new(buffer_start, buffer_end),
2072 options: lsp::FormattingOptions {
2073 tab_size: 4,
2074 insert_spaces: true,
2075 insert_final_newline: Some(true),
2076 ..Default::default()
2077 },
2078 work_done_progress_params: Default::default(),
2079 },
2080 )
2081 .await?
2082 } else {
2083 continue;
2084 };
2085
2086 if let Some(lsp_edits) = lsp_edits {
2087 let edits = this
2088 .update(&mut cx, |this, cx| {
2089 this.edits_from_lsp(&buffer, lsp_edits, None, cx)
2090 })
2091 .await?;
2092 buffer.update(&mut cx, |buffer, cx| {
2093 buffer.finalize_last_transaction();
2094 buffer.start_transaction();
2095 for (range, text) in edits {
2096 buffer.edit([range], text, cx);
2097 }
2098 if buffer.end_transaction(cx).is_some() {
2099 let transaction = buffer.finalize_last_transaction().unwrap().clone();
2100 if !push_to_history {
2101 buffer.forget_transaction(transaction.id);
2102 }
2103 project_transaction.0.insert(cx.handle(), transaction);
2104 }
2105 });
2106 }
2107 }
2108
2109 Ok(project_transaction)
2110 })
2111 }
2112
2113 pub fn definition<T: ToPointUtf16>(
2114 &self,
2115 buffer: &ModelHandle<Buffer>,
2116 position: T,
2117 cx: &mut ModelContext<Self>,
2118 ) -> Task<Result<Vec<Location>>> {
2119 let position = position.to_point_utf16(buffer.read(cx));
2120 self.request_lsp(buffer.clone(), GetDefinition { position }, cx)
2121 }
2122
2123 pub fn references<T: ToPointUtf16>(
2124 &self,
2125 buffer: &ModelHandle<Buffer>,
2126 position: T,
2127 cx: &mut ModelContext<Self>,
2128 ) -> Task<Result<Vec<Location>>> {
2129 let position = position.to_point_utf16(buffer.read(cx));
2130 self.request_lsp(buffer.clone(), GetReferences { position }, cx)
2131 }
2132
2133 pub fn document_highlights<T: ToPointUtf16>(
2134 &self,
2135 buffer: &ModelHandle<Buffer>,
2136 position: T,
2137 cx: &mut ModelContext<Self>,
2138 ) -> Task<Result<Vec<DocumentHighlight>>> {
2139 let position = position.to_point_utf16(buffer.read(cx));
2140
2141 self.request_lsp(buffer.clone(), GetDocumentHighlights { position }, cx)
2142 }
2143
2144 pub fn symbols(&self, query: &str, cx: &mut ModelContext<Self>) -> Task<Result<Vec<Symbol>>> {
2145 if self.is_local() {
2146 let mut language_servers = HashMap::default();
2147 for ((worktree_id, _), (lsp_adapter, language_server)) in self.language_servers.iter() {
2148 if let Some(worktree) = self
2149 .worktree_for_id(*worktree_id, cx)
2150 .and_then(|worktree| worktree.read(cx).as_local())
2151 {
2152 language_servers
2153 .entry(Arc::as_ptr(language_server))
2154 .or_insert((
2155 lsp_adapter.clone(),
2156 language_server.clone(),
2157 *worktree_id,
2158 worktree.abs_path().clone(),
2159 ));
2160 }
2161 }
2162
2163 let mut requests = Vec::new();
2164 for (_, language_server, _, _) in language_servers.values() {
2165 requests.push(language_server.request::<lsp::request::WorkspaceSymbol>(
2166 lsp::WorkspaceSymbolParams {
2167 query: query.to_string(),
2168 ..Default::default()
2169 },
2170 ));
2171 }
2172
2173 cx.spawn_weak(|this, cx| async move {
2174 let responses = futures::future::try_join_all(requests).await?;
2175
2176 let mut symbols = Vec::new();
2177 if let Some(this) = this.upgrade(&cx) {
2178 this.read_with(&cx, |this, cx| {
2179 for ((adapter, _, source_worktree_id, worktree_abs_path), lsp_symbols) in
2180 language_servers.into_values().zip(responses)
2181 {
2182 symbols.extend(lsp_symbols.into_iter().flatten().filter_map(
2183 |lsp_symbol| {
2184 let abs_path = lsp_symbol.location.uri.to_file_path().ok()?;
2185 let mut worktree_id = source_worktree_id;
2186 let path;
2187 if let Some((worktree, rel_path)) =
2188 this.find_local_worktree(&abs_path, cx)
2189 {
2190 worktree_id = worktree.read(cx).id();
2191 path = rel_path;
2192 } else {
2193 path = relativize_path(&worktree_abs_path, &abs_path);
2194 }
2195
2196 let label = this
2197 .languages
2198 .select_language(&path)
2199 .and_then(|language| {
2200 language
2201 .label_for_symbol(&lsp_symbol.name, lsp_symbol.kind)
2202 })
2203 .unwrap_or_else(|| {
2204 CodeLabel::plain(lsp_symbol.name.clone(), None)
2205 });
2206 let signature = this.symbol_signature(worktree_id, &path);
2207
2208 Some(Symbol {
2209 source_worktree_id,
2210 worktree_id,
2211 language_server_name: adapter.name(),
2212 name: lsp_symbol.name,
2213 kind: lsp_symbol.kind,
2214 label,
2215 path,
2216 range: range_from_lsp(lsp_symbol.location.range),
2217 signature,
2218 })
2219 },
2220 ));
2221 }
2222 })
2223 }
2224
2225 Ok(symbols)
2226 })
2227 } else if let Some(project_id) = self.remote_id() {
2228 let request = self.client.request(proto::GetProjectSymbols {
2229 project_id,
2230 query: query.to_string(),
2231 });
2232 cx.spawn_weak(|this, cx| async move {
2233 let response = request.await?;
2234 let mut symbols = Vec::new();
2235 if let Some(this) = this.upgrade(&cx) {
2236 this.read_with(&cx, |this, _| {
2237 symbols.extend(
2238 response
2239 .symbols
2240 .into_iter()
2241 .filter_map(|symbol| this.deserialize_symbol(symbol).log_err()),
2242 );
2243 })
2244 }
2245 Ok(symbols)
2246 })
2247 } else {
2248 Task::ready(Ok(Default::default()))
2249 }
2250 }
2251
2252 pub fn open_buffer_for_symbol(
2253 &mut self,
2254 symbol: &Symbol,
2255 cx: &mut ModelContext<Self>,
2256 ) -> Task<Result<ModelHandle<Buffer>>> {
2257 if self.is_local() {
2258 let (lsp_adapter, language_server) = if let Some(server) = self.language_servers.get(&(
2259 symbol.source_worktree_id,
2260 symbol.language_server_name.clone(),
2261 )) {
2262 server.clone()
2263 } else {
2264 return Task::ready(Err(anyhow!(
2265 "language server for worktree and language not found"
2266 )));
2267 };
2268
2269 let worktree_abs_path = if let Some(worktree_abs_path) = self
2270 .worktree_for_id(symbol.worktree_id, cx)
2271 .and_then(|worktree| worktree.read(cx).as_local())
2272 .map(|local_worktree| local_worktree.abs_path())
2273 {
2274 worktree_abs_path
2275 } else {
2276 return Task::ready(Err(anyhow!("worktree not found for symbol")));
2277 };
2278 let symbol_abs_path = worktree_abs_path.join(&symbol.path);
2279 let symbol_uri = if let Ok(uri) = lsp::Url::from_file_path(symbol_abs_path) {
2280 uri
2281 } else {
2282 return Task::ready(Err(anyhow!("invalid symbol path")));
2283 };
2284
2285 self.open_local_buffer_via_lsp(symbol_uri, lsp_adapter, language_server, cx)
2286 } else if let Some(project_id) = self.remote_id() {
2287 let request = self.client.request(proto::OpenBufferForSymbol {
2288 project_id,
2289 symbol: Some(serialize_symbol(symbol)),
2290 });
2291 cx.spawn(|this, mut cx| async move {
2292 let response = request.await?;
2293 let buffer = response.buffer.ok_or_else(|| anyhow!("invalid buffer"))?;
2294 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
2295 .await
2296 })
2297 } else {
2298 Task::ready(Err(anyhow!("project does not have a remote id")))
2299 }
2300 }
2301
2302 pub fn completions<T: ToPointUtf16>(
2303 &self,
2304 source_buffer_handle: &ModelHandle<Buffer>,
2305 position: T,
2306 cx: &mut ModelContext<Self>,
2307 ) -> Task<Result<Vec<Completion>>> {
2308 let source_buffer_handle = source_buffer_handle.clone();
2309 let source_buffer = source_buffer_handle.read(cx);
2310 let buffer_id = source_buffer.remote_id();
2311 let language = source_buffer.language().cloned();
2312 let worktree;
2313 let buffer_abs_path;
2314 if let Some(file) = File::from_dyn(source_buffer.file()) {
2315 worktree = file.worktree.clone();
2316 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
2317 } else {
2318 return Task::ready(Ok(Default::default()));
2319 };
2320
2321 let position = position.to_point_utf16(source_buffer);
2322 let anchor = source_buffer.anchor_after(position);
2323
2324 if worktree.read(cx).as_local().is_some() {
2325 let buffer_abs_path = buffer_abs_path.unwrap();
2326 let (_, lang_server) =
2327 if let Some(server) = self.language_server_for_buffer(source_buffer, cx) {
2328 server.clone()
2329 } else {
2330 return Task::ready(Ok(Default::default()));
2331 };
2332
2333 cx.spawn(|_, cx| async move {
2334 let clipped_position = source_buffer_handle
2335 .read_with(&cx, |this, _| this.clip_point_utf16(position, Bias::Left));
2336 if clipped_position != position {
2337 log::info!("Completion position out of date");
2338 return Ok(Default::default());
2339 }
2340
2341 let completions = lang_server
2342 .request::<lsp::request::Completion>(lsp::CompletionParams {
2343 text_document_position: lsp::TextDocumentPositionParams::new(
2344 lsp::TextDocumentIdentifier::new(
2345 lsp::Url::from_file_path(buffer_abs_path).unwrap(),
2346 ),
2347 point_to_lsp(position),
2348 ),
2349 context: Default::default(),
2350 work_done_progress_params: Default::default(),
2351 partial_result_params: Default::default(),
2352 })
2353 .await
2354 .context("lsp completion request failed")?;
2355
2356 let completions = if let Some(completions) = completions {
2357 match completions {
2358 lsp::CompletionResponse::Array(completions) => completions,
2359 lsp::CompletionResponse::List(list) => list.items,
2360 }
2361 } else {
2362 Default::default()
2363 };
2364
2365 source_buffer_handle.read_with(&cx, |this, _| {
2366 Ok(completions
2367 .into_iter()
2368 .filter_map(|lsp_completion| {
2369 let (old_range, new_text) = match lsp_completion.text_edit.as_ref() {
2370 Some(lsp::CompletionTextEdit::Edit(edit)) => {
2371 (range_from_lsp(edit.range), edit.new_text.clone())
2372 }
2373 None => (
2374 this.common_prefix_at(position, &lsp_completion.label),
2375 lsp_completion.label.clone(),
2376 ),
2377 Some(lsp::CompletionTextEdit::InsertAndReplace(_)) => {
2378 log::info!("unsupported insert/replace completion");
2379 return None;
2380 }
2381 };
2382
2383 let clipped_start = this.clip_point_utf16(old_range.start, Bias::Left);
2384 let clipped_end = this.clip_point_utf16(old_range.end, Bias::Left);
2385 if clipped_start == old_range.start && clipped_end == old_range.end {
2386 Some(Completion {
2387 old_range: this.anchor_before(old_range.start)
2388 ..this.anchor_after(old_range.end),
2389 new_text,
2390 label: language
2391 .as_ref()
2392 .and_then(|l| l.label_for_completion(&lsp_completion))
2393 .unwrap_or_else(|| {
2394 CodeLabel::plain(
2395 lsp_completion.label.clone(),
2396 lsp_completion.filter_text.as_deref(),
2397 )
2398 }),
2399 lsp_completion,
2400 })
2401 } else {
2402 log::info!("completion out of expected range");
2403 None
2404 }
2405 })
2406 .collect())
2407 })
2408 })
2409 } else if let Some(project_id) = self.remote_id() {
2410 let rpc = self.client.clone();
2411 let message = proto::GetCompletions {
2412 project_id,
2413 buffer_id,
2414 position: Some(language::proto::serialize_anchor(&anchor)),
2415 version: serialize_version(&source_buffer.version()),
2416 };
2417 cx.spawn_weak(|_, mut cx| async move {
2418 let response = rpc.request(message).await?;
2419
2420 source_buffer_handle
2421 .update(&mut cx, |buffer, _| {
2422 buffer.wait_for_version(deserialize_version(response.version))
2423 })
2424 .await;
2425
2426 response
2427 .completions
2428 .into_iter()
2429 .map(|completion| {
2430 language::proto::deserialize_completion(completion, language.as_ref())
2431 })
2432 .collect()
2433 })
2434 } else {
2435 Task::ready(Ok(Default::default()))
2436 }
2437 }
2438
2439 pub fn apply_additional_edits_for_completion(
2440 &self,
2441 buffer_handle: ModelHandle<Buffer>,
2442 completion: Completion,
2443 push_to_history: bool,
2444 cx: &mut ModelContext<Self>,
2445 ) -> Task<Result<Option<Transaction>>> {
2446 let buffer = buffer_handle.read(cx);
2447 let buffer_id = buffer.remote_id();
2448
2449 if self.is_local() {
2450 let (_, lang_server) = if let Some(server) = self.language_server_for_buffer(buffer, cx)
2451 {
2452 server.clone()
2453 } else {
2454 return Task::ready(Ok(Default::default()));
2455 };
2456
2457 cx.spawn(|this, mut cx| async move {
2458 let resolved_completion = lang_server
2459 .request::<lsp::request::ResolveCompletionItem>(completion.lsp_completion)
2460 .await?;
2461 if let Some(edits) = resolved_completion.additional_text_edits {
2462 let edits = this
2463 .update(&mut cx, |this, cx| {
2464 this.edits_from_lsp(&buffer_handle, edits, None, cx)
2465 })
2466 .await?;
2467 buffer_handle.update(&mut cx, |buffer, cx| {
2468 buffer.finalize_last_transaction();
2469 buffer.start_transaction();
2470 for (range, text) in edits {
2471 buffer.edit([range], text, cx);
2472 }
2473 let transaction = if buffer.end_transaction(cx).is_some() {
2474 let transaction = buffer.finalize_last_transaction().unwrap().clone();
2475 if !push_to_history {
2476 buffer.forget_transaction(transaction.id);
2477 }
2478 Some(transaction)
2479 } else {
2480 None
2481 };
2482 Ok(transaction)
2483 })
2484 } else {
2485 Ok(None)
2486 }
2487 })
2488 } else if let Some(project_id) = self.remote_id() {
2489 let client = self.client.clone();
2490 cx.spawn(|_, mut cx| async move {
2491 let response = client
2492 .request(proto::ApplyCompletionAdditionalEdits {
2493 project_id,
2494 buffer_id,
2495 completion: Some(language::proto::serialize_completion(&completion)),
2496 })
2497 .await?;
2498
2499 if let Some(transaction) = response.transaction {
2500 let transaction = language::proto::deserialize_transaction(transaction)?;
2501 buffer_handle
2502 .update(&mut cx, |buffer, _| {
2503 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
2504 })
2505 .await;
2506 if push_to_history {
2507 buffer_handle.update(&mut cx, |buffer, _| {
2508 buffer.push_transaction(transaction.clone(), Instant::now());
2509 });
2510 }
2511 Ok(Some(transaction))
2512 } else {
2513 Ok(None)
2514 }
2515 })
2516 } else {
2517 Task::ready(Err(anyhow!("project does not have a remote id")))
2518 }
2519 }
2520
2521 pub fn code_actions<T: Clone + ToOffset>(
2522 &self,
2523 buffer_handle: &ModelHandle<Buffer>,
2524 range: Range<T>,
2525 cx: &mut ModelContext<Self>,
2526 ) -> Task<Result<Vec<CodeAction>>> {
2527 let buffer_handle = buffer_handle.clone();
2528 let buffer = buffer_handle.read(cx);
2529 let snapshot = buffer.snapshot();
2530 let relevant_diagnostics = snapshot
2531 .diagnostics_in_range::<usize, usize>(range.to_offset(&snapshot), false)
2532 .map(|entry| entry.to_lsp_diagnostic_stub())
2533 .collect();
2534 let buffer_id = buffer.remote_id();
2535 let worktree;
2536 let buffer_abs_path;
2537 if let Some(file) = File::from_dyn(buffer.file()) {
2538 worktree = file.worktree.clone();
2539 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
2540 } else {
2541 return Task::ready(Ok(Default::default()));
2542 };
2543 let range = buffer.anchor_before(range.start)..buffer.anchor_before(range.end);
2544
2545 if worktree.read(cx).as_local().is_some() {
2546 let buffer_abs_path = buffer_abs_path.unwrap();
2547 let (_, lang_server) = if let Some(server) = self.language_server_for_buffer(buffer, cx)
2548 {
2549 server.clone()
2550 } else {
2551 return Task::ready(Ok(Default::default()));
2552 };
2553
2554 let lsp_range = range_to_lsp(range.to_point_utf16(buffer));
2555 cx.foreground().spawn(async move {
2556 if !lang_server.capabilities().code_action_provider.is_some() {
2557 return Ok(Default::default());
2558 }
2559
2560 Ok(lang_server
2561 .request::<lsp::request::CodeActionRequest>(lsp::CodeActionParams {
2562 text_document: lsp::TextDocumentIdentifier::new(
2563 lsp::Url::from_file_path(buffer_abs_path).unwrap(),
2564 ),
2565 range: lsp_range,
2566 work_done_progress_params: Default::default(),
2567 partial_result_params: Default::default(),
2568 context: lsp::CodeActionContext {
2569 diagnostics: relevant_diagnostics,
2570 only: Some(vec![
2571 lsp::CodeActionKind::QUICKFIX,
2572 lsp::CodeActionKind::REFACTOR,
2573 lsp::CodeActionKind::REFACTOR_EXTRACT,
2574 lsp::CodeActionKind::SOURCE,
2575 ]),
2576 },
2577 })
2578 .await?
2579 .unwrap_or_default()
2580 .into_iter()
2581 .filter_map(|entry| {
2582 if let lsp::CodeActionOrCommand::CodeAction(lsp_action) = entry {
2583 Some(CodeAction {
2584 range: range.clone(),
2585 lsp_action,
2586 })
2587 } else {
2588 None
2589 }
2590 })
2591 .collect())
2592 })
2593 } else if let Some(project_id) = self.remote_id() {
2594 let rpc = self.client.clone();
2595 let version = buffer.version();
2596 cx.spawn_weak(|_, mut cx| async move {
2597 let response = rpc
2598 .request(proto::GetCodeActions {
2599 project_id,
2600 buffer_id,
2601 start: Some(language::proto::serialize_anchor(&range.start)),
2602 end: Some(language::proto::serialize_anchor(&range.end)),
2603 version: serialize_version(&version),
2604 })
2605 .await?;
2606
2607 buffer_handle
2608 .update(&mut cx, |buffer, _| {
2609 buffer.wait_for_version(deserialize_version(response.version))
2610 })
2611 .await;
2612
2613 response
2614 .actions
2615 .into_iter()
2616 .map(language::proto::deserialize_code_action)
2617 .collect()
2618 })
2619 } else {
2620 Task::ready(Ok(Default::default()))
2621 }
2622 }
2623
2624 pub fn apply_code_action(
2625 &self,
2626 buffer_handle: ModelHandle<Buffer>,
2627 mut action: CodeAction,
2628 push_to_history: bool,
2629 cx: &mut ModelContext<Self>,
2630 ) -> Task<Result<ProjectTransaction>> {
2631 if self.is_local() {
2632 let buffer = buffer_handle.read(cx);
2633 let (lsp_adapter, lang_server) =
2634 if let Some(server) = self.language_server_for_buffer(buffer, cx) {
2635 server.clone()
2636 } else {
2637 return Task::ready(Ok(Default::default()));
2638 };
2639 let range = action.range.to_point_utf16(buffer);
2640
2641 cx.spawn(|this, mut cx| async move {
2642 if let Some(lsp_range) = action
2643 .lsp_action
2644 .data
2645 .as_mut()
2646 .and_then(|d| d.get_mut("codeActionParams"))
2647 .and_then(|d| d.get_mut("range"))
2648 {
2649 *lsp_range = serde_json::to_value(&range_to_lsp(range)).unwrap();
2650 action.lsp_action = lang_server
2651 .request::<lsp::request::CodeActionResolveRequest>(action.lsp_action)
2652 .await?;
2653 } else {
2654 let actions = this
2655 .update(&mut cx, |this, cx| {
2656 this.code_actions(&buffer_handle, action.range, cx)
2657 })
2658 .await?;
2659 action.lsp_action = actions
2660 .into_iter()
2661 .find(|a| a.lsp_action.title == action.lsp_action.title)
2662 .ok_or_else(|| anyhow!("code action is outdated"))?
2663 .lsp_action;
2664 }
2665
2666 if let Some(edit) = action.lsp_action.edit {
2667 Self::deserialize_workspace_edit(
2668 this,
2669 edit,
2670 push_to_history,
2671 lsp_adapter,
2672 lang_server,
2673 &mut cx,
2674 )
2675 .await
2676 } else {
2677 Ok(ProjectTransaction::default())
2678 }
2679 })
2680 } else if let Some(project_id) = self.remote_id() {
2681 let client = self.client.clone();
2682 let request = proto::ApplyCodeAction {
2683 project_id,
2684 buffer_id: buffer_handle.read(cx).remote_id(),
2685 action: Some(language::proto::serialize_code_action(&action)),
2686 };
2687 cx.spawn(|this, mut cx| async move {
2688 let response = client
2689 .request(request)
2690 .await?
2691 .transaction
2692 .ok_or_else(|| anyhow!("missing transaction"))?;
2693 this.update(&mut cx, |this, cx| {
2694 this.deserialize_project_transaction(response, push_to_history, cx)
2695 })
2696 .await
2697 })
2698 } else {
2699 Task::ready(Err(anyhow!("project does not have a remote id")))
2700 }
2701 }
2702
2703 async fn deserialize_workspace_edit(
2704 this: ModelHandle<Self>,
2705 edit: lsp::WorkspaceEdit,
2706 push_to_history: bool,
2707 lsp_adapter: Arc<dyn LspAdapter>,
2708 language_server: Arc<LanguageServer>,
2709 cx: &mut AsyncAppContext,
2710 ) -> Result<ProjectTransaction> {
2711 let fs = this.read_with(cx, |this, _| this.fs.clone());
2712 let mut operations = Vec::new();
2713 if let Some(document_changes) = edit.document_changes {
2714 match document_changes {
2715 lsp::DocumentChanges::Edits(edits) => {
2716 operations.extend(edits.into_iter().map(lsp::DocumentChangeOperation::Edit))
2717 }
2718 lsp::DocumentChanges::Operations(ops) => operations = ops,
2719 }
2720 } else if let Some(changes) = edit.changes {
2721 operations.extend(changes.into_iter().map(|(uri, edits)| {
2722 lsp::DocumentChangeOperation::Edit(lsp::TextDocumentEdit {
2723 text_document: lsp::OptionalVersionedTextDocumentIdentifier {
2724 uri,
2725 version: None,
2726 },
2727 edits: edits.into_iter().map(lsp::OneOf::Left).collect(),
2728 })
2729 }));
2730 }
2731
2732 let mut project_transaction = ProjectTransaction::default();
2733 for operation in operations {
2734 match operation {
2735 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Create(op)) => {
2736 let abs_path = op
2737 .uri
2738 .to_file_path()
2739 .map_err(|_| anyhow!("can't convert URI to path"))?;
2740
2741 if let Some(parent_path) = abs_path.parent() {
2742 fs.create_dir(parent_path).await?;
2743 }
2744 if abs_path.ends_with("/") {
2745 fs.create_dir(&abs_path).await?;
2746 } else {
2747 fs.create_file(&abs_path, op.options.map(Into::into).unwrap_or_default())
2748 .await?;
2749 }
2750 }
2751 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Rename(op)) => {
2752 let source_abs_path = op
2753 .old_uri
2754 .to_file_path()
2755 .map_err(|_| anyhow!("can't convert URI to path"))?;
2756 let target_abs_path = op
2757 .new_uri
2758 .to_file_path()
2759 .map_err(|_| anyhow!("can't convert URI to path"))?;
2760 fs.rename(
2761 &source_abs_path,
2762 &target_abs_path,
2763 op.options.map(Into::into).unwrap_or_default(),
2764 )
2765 .await?;
2766 }
2767 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Delete(op)) => {
2768 let abs_path = op
2769 .uri
2770 .to_file_path()
2771 .map_err(|_| anyhow!("can't convert URI to path"))?;
2772 let options = op.options.map(Into::into).unwrap_or_default();
2773 if abs_path.ends_with("/") {
2774 fs.remove_dir(&abs_path, options).await?;
2775 } else {
2776 fs.remove_file(&abs_path, options).await?;
2777 }
2778 }
2779 lsp::DocumentChangeOperation::Edit(op) => {
2780 let buffer_to_edit = this
2781 .update(cx, |this, cx| {
2782 this.open_local_buffer_via_lsp(
2783 op.text_document.uri,
2784 lsp_adapter.clone(),
2785 language_server.clone(),
2786 cx,
2787 )
2788 })
2789 .await?;
2790
2791 let edits = this
2792 .update(cx, |this, cx| {
2793 let edits = op.edits.into_iter().map(|edit| match edit {
2794 lsp::OneOf::Left(edit) => edit,
2795 lsp::OneOf::Right(edit) => edit.text_edit,
2796 });
2797 this.edits_from_lsp(
2798 &buffer_to_edit,
2799 edits,
2800 op.text_document.version,
2801 cx,
2802 )
2803 })
2804 .await?;
2805
2806 let transaction = buffer_to_edit.update(cx, |buffer, cx| {
2807 buffer.finalize_last_transaction();
2808 buffer.start_transaction();
2809 for (range, text) in edits {
2810 buffer.edit([range], text, cx);
2811 }
2812 let transaction = if buffer.end_transaction(cx).is_some() {
2813 let transaction = buffer.finalize_last_transaction().unwrap().clone();
2814 if !push_to_history {
2815 buffer.forget_transaction(transaction.id);
2816 }
2817 Some(transaction)
2818 } else {
2819 None
2820 };
2821
2822 transaction
2823 });
2824 if let Some(transaction) = transaction {
2825 project_transaction.0.insert(buffer_to_edit, transaction);
2826 }
2827 }
2828 }
2829 }
2830
2831 Ok(project_transaction)
2832 }
2833
2834 pub fn prepare_rename<T: ToPointUtf16>(
2835 &self,
2836 buffer: ModelHandle<Buffer>,
2837 position: T,
2838 cx: &mut ModelContext<Self>,
2839 ) -> Task<Result<Option<Range<Anchor>>>> {
2840 let position = position.to_point_utf16(buffer.read(cx));
2841 self.request_lsp(buffer, PrepareRename { position }, cx)
2842 }
2843
2844 pub fn perform_rename<T: ToPointUtf16>(
2845 &self,
2846 buffer: ModelHandle<Buffer>,
2847 position: T,
2848 new_name: String,
2849 push_to_history: bool,
2850 cx: &mut ModelContext<Self>,
2851 ) -> Task<Result<ProjectTransaction>> {
2852 let position = position.to_point_utf16(buffer.read(cx));
2853 self.request_lsp(
2854 buffer,
2855 PerformRename {
2856 position,
2857 new_name,
2858 push_to_history,
2859 },
2860 cx,
2861 )
2862 }
2863
2864 pub fn search(
2865 &self,
2866 query: SearchQuery,
2867 cx: &mut ModelContext<Self>,
2868 ) -> Task<Result<HashMap<ModelHandle<Buffer>, Vec<Range<Anchor>>>>> {
2869 if self.is_local() {
2870 let snapshots = self
2871 .visible_worktrees(cx)
2872 .filter_map(|tree| {
2873 let tree = tree.read(cx).as_local()?;
2874 Some(tree.snapshot())
2875 })
2876 .collect::<Vec<_>>();
2877
2878 let background = cx.background().clone();
2879 let path_count: usize = snapshots.iter().map(|s| s.visible_file_count()).sum();
2880 if path_count == 0 {
2881 return Task::ready(Ok(Default::default()));
2882 }
2883 let workers = background.num_cpus().min(path_count);
2884 let (matching_paths_tx, mut matching_paths_rx) = smol::channel::bounded(1024);
2885 cx.background()
2886 .spawn({
2887 let fs = self.fs.clone();
2888 let background = cx.background().clone();
2889 let query = query.clone();
2890 async move {
2891 let fs = &fs;
2892 let query = &query;
2893 let matching_paths_tx = &matching_paths_tx;
2894 let paths_per_worker = (path_count + workers - 1) / workers;
2895 let snapshots = &snapshots;
2896 background
2897 .scoped(|scope| {
2898 for worker_ix in 0..workers {
2899 let worker_start_ix = worker_ix * paths_per_worker;
2900 let worker_end_ix = worker_start_ix + paths_per_worker;
2901 scope.spawn(async move {
2902 let mut snapshot_start_ix = 0;
2903 let mut abs_path = PathBuf::new();
2904 for snapshot in snapshots {
2905 let snapshot_end_ix =
2906 snapshot_start_ix + snapshot.visible_file_count();
2907 if worker_end_ix <= snapshot_start_ix {
2908 break;
2909 } else if worker_start_ix > snapshot_end_ix {
2910 snapshot_start_ix = snapshot_end_ix;
2911 continue;
2912 } else {
2913 let start_in_snapshot = worker_start_ix
2914 .saturating_sub(snapshot_start_ix);
2915 let end_in_snapshot =
2916 cmp::min(worker_end_ix, snapshot_end_ix)
2917 - snapshot_start_ix;
2918
2919 for entry in snapshot
2920 .files(false, start_in_snapshot)
2921 .take(end_in_snapshot - start_in_snapshot)
2922 {
2923 if matching_paths_tx.is_closed() {
2924 break;
2925 }
2926
2927 abs_path.clear();
2928 abs_path.push(&snapshot.abs_path());
2929 abs_path.push(&entry.path);
2930 let matches = if let Some(file) =
2931 fs.open_sync(&abs_path).await.log_err()
2932 {
2933 query.detect(file).unwrap_or(false)
2934 } else {
2935 false
2936 };
2937
2938 if matches {
2939 let project_path =
2940 (snapshot.id(), entry.path.clone());
2941 if matching_paths_tx
2942 .send(project_path)
2943 .await
2944 .is_err()
2945 {
2946 break;
2947 }
2948 }
2949 }
2950
2951 snapshot_start_ix = snapshot_end_ix;
2952 }
2953 }
2954 });
2955 }
2956 })
2957 .await;
2958 }
2959 })
2960 .detach();
2961
2962 let (buffers_tx, buffers_rx) = smol::channel::bounded(1024);
2963 let open_buffers = self
2964 .opened_buffers
2965 .values()
2966 .filter_map(|b| b.upgrade(cx))
2967 .collect::<HashSet<_>>();
2968 cx.spawn(|this, cx| async move {
2969 for buffer in &open_buffers {
2970 let snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
2971 buffers_tx.send((buffer.clone(), snapshot)).await?;
2972 }
2973
2974 let open_buffers = Rc::new(RefCell::new(open_buffers));
2975 while let Some(project_path) = matching_paths_rx.next().await {
2976 if buffers_tx.is_closed() {
2977 break;
2978 }
2979
2980 let this = this.clone();
2981 let open_buffers = open_buffers.clone();
2982 let buffers_tx = buffers_tx.clone();
2983 cx.spawn(|mut cx| async move {
2984 if let Some(buffer) = this
2985 .update(&mut cx, |this, cx| this.open_buffer(project_path, cx))
2986 .await
2987 .log_err()
2988 {
2989 if open_buffers.borrow_mut().insert(buffer.clone()) {
2990 let snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
2991 buffers_tx.send((buffer, snapshot)).await?;
2992 }
2993 }
2994
2995 Ok::<_, anyhow::Error>(())
2996 })
2997 .detach();
2998 }
2999
3000 Ok::<_, anyhow::Error>(())
3001 })
3002 .detach_and_log_err(cx);
3003
3004 let background = cx.background().clone();
3005 cx.background().spawn(async move {
3006 let query = &query;
3007 let mut matched_buffers = Vec::new();
3008 for _ in 0..workers {
3009 matched_buffers.push(HashMap::default());
3010 }
3011 background
3012 .scoped(|scope| {
3013 for worker_matched_buffers in matched_buffers.iter_mut() {
3014 let mut buffers_rx = buffers_rx.clone();
3015 scope.spawn(async move {
3016 while let Some((buffer, snapshot)) = buffers_rx.next().await {
3017 let buffer_matches = query
3018 .search(snapshot.as_rope())
3019 .await
3020 .iter()
3021 .map(|range| {
3022 snapshot.anchor_before(range.start)
3023 ..snapshot.anchor_after(range.end)
3024 })
3025 .collect::<Vec<_>>();
3026 if !buffer_matches.is_empty() {
3027 worker_matched_buffers
3028 .insert(buffer.clone(), buffer_matches);
3029 }
3030 }
3031 });
3032 }
3033 })
3034 .await;
3035 Ok(matched_buffers.into_iter().flatten().collect())
3036 })
3037 } else if let Some(project_id) = self.remote_id() {
3038 let request = self.client.request(query.to_proto(project_id));
3039 cx.spawn(|this, mut cx| async move {
3040 let response = request.await?;
3041 let mut result = HashMap::default();
3042 for location in response.locations {
3043 let buffer = location.buffer.ok_or_else(|| anyhow!("missing buffer"))?;
3044 let target_buffer = this
3045 .update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
3046 .await?;
3047 let start = location
3048 .start
3049 .and_then(deserialize_anchor)
3050 .ok_or_else(|| anyhow!("missing target start"))?;
3051 let end = location
3052 .end
3053 .and_then(deserialize_anchor)
3054 .ok_or_else(|| anyhow!("missing target end"))?;
3055 result
3056 .entry(target_buffer)
3057 .or_insert(Vec::new())
3058 .push(start..end)
3059 }
3060 Ok(result)
3061 })
3062 } else {
3063 Task::ready(Ok(Default::default()))
3064 }
3065 }
3066
3067 fn request_lsp<R: LspCommand>(
3068 &self,
3069 buffer_handle: ModelHandle<Buffer>,
3070 request: R,
3071 cx: &mut ModelContext<Self>,
3072 ) -> Task<Result<R::Response>>
3073 where
3074 <R::LspRequest as lsp::request::Request>::Result: Send,
3075 {
3076 let buffer = buffer_handle.read(cx);
3077 if self.is_local() {
3078 let file = File::from_dyn(buffer.file()).and_then(File::as_local);
3079 if let Some((file, (_, language_server))) =
3080 file.zip(self.language_server_for_buffer(buffer, cx).cloned())
3081 {
3082 let lsp_params = request.to_lsp(&file.abs_path(cx), cx);
3083 return cx.spawn(|this, cx| async move {
3084 if !request.check_capabilities(&language_server.capabilities()) {
3085 return Ok(Default::default());
3086 }
3087
3088 let response = language_server
3089 .request::<R::LspRequest>(lsp_params)
3090 .await
3091 .context("lsp request failed")?;
3092 request
3093 .response_from_lsp(response, this, buffer_handle, cx)
3094 .await
3095 });
3096 }
3097 } else if let Some(project_id) = self.remote_id() {
3098 let rpc = self.client.clone();
3099 let message = request.to_proto(project_id, buffer);
3100 return cx.spawn(|this, cx| async move {
3101 let response = rpc.request(message).await?;
3102 request
3103 .response_from_proto(response, this, buffer_handle, cx)
3104 .await
3105 });
3106 }
3107 Task::ready(Ok(Default::default()))
3108 }
3109
3110 pub fn find_or_create_local_worktree(
3111 &mut self,
3112 abs_path: impl AsRef<Path>,
3113 visible: bool,
3114 cx: &mut ModelContext<Self>,
3115 ) -> Task<Result<(ModelHandle<Worktree>, PathBuf)>> {
3116 let abs_path = abs_path.as_ref();
3117 if let Some((tree, relative_path)) = self.find_local_worktree(abs_path, cx) {
3118 Task::ready(Ok((tree.clone(), relative_path.into())))
3119 } else {
3120 let worktree = self.create_local_worktree(abs_path, visible, cx);
3121 cx.foreground()
3122 .spawn(async move { Ok((worktree.await?, PathBuf::new())) })
3123 }
3124 }
3125
3126 pub fn find_local_worktree(
3127 &self,
3128 abs_path: &Path,
3129 cx: &AppContext,
3130 ) -> Option<(ModelHandle<Worktree>, PathBuf)> {
3131 for tree in self.worktrees(cx) {
3132 if let Some(relative_path) = tree
3133 .read(cx)
3134 .as_local()
3135 .and_then(|t| abs_path.strip_prefix(t.abs_path()).ok())
3136 {
3137 return Some((tree.clone(), relative_path.into()));
3138 }
3139 }
3140 None
3141 }
3142
3143 pub fn is_shared(&self) -> bool {
3144 match &self.client_state {
3145 ProjectClientState::Local { is_shared, .. } => *is_shared,
3146 ProjectClientState::Remote { .. } => false,
3147 }
3148 }
3149
3150 fn create_local_worktree(
3151 &mut self,
3152 abs_path: impl AsRef<Path>,
3153 visible: bool,
3154 cx: &mut ModelContext<Self>,
3155 ) -> Task<Result<ModelHandle<Worktree>>> {
3156 let fs = self.fs.clone();
3157 let client = self.client.clone();
3158 let next_entry_id = self.next_entry_id.clone();
3159 let path: Arc<Path> = abs_path.as_ref().into();
3160 let task = self
3161 .loading_local_worktrees
3162 .entry(path.clone())
3163 .or_insert_with(|| {
3164 cx.spawn(|project, mut cx| {
3165 async move {
3166 let worktree = Worktree::local(
3167 client.clone(),
3168 path.clone(),
3169 visible,
3170 fs,
3171 next_entry_id,
3172 &mut cx,
3173 )
3174 .await;
3175 project.update(&mut cx, |project, _| {
3176 project.loading_local_worktrees.remove(&path);
3177 });
3178 let worktree = worktree?;
3179
3180 let (remote_project_id, is_shared) =
3181 project.update(&mut cx, |project, cx| {
3182 project.add_worktree(&worktree, cx);
3183 (project.remote_id(), project.is_shared())
3184 });
3185
3186 if let Some(project_id) = remote_project_id {
3187 if is_shared {
3188 worktree
3189 .update(&mut cx, |worktree, cx| {
3190 worktree.as_local_mut().unwrap().share(project_id, cx)
3191 })
3192 .await?;
3193 } else {
3194 worktree
3195 .update(&mut cx, |worktree, cx| {
3196 worktree.as_local_mut().unwrap().register(project_id, cx)
3197 })
3198 .await?;
3199 }
3200 }
3201
3202 Ok(worktree)
3203 }
3204 .map_err(|err| Arc::new(err))
3205 })
3206 .shared()
3207 })
3208 .clone();
3209 cx.foreground().spawn(async move {
3210 match task.await {
3211 Ok(worktree) => Ok(worktree),
3212 Err(err) => Err(anyhow!("{}", err)),
3213 }
3214 })
3215 }
3216
3217 pub fn remove_worktree(&mut self, id: WorktreeId, cx: &mut ModelContext<Self>) {
3218 self.worktrees.retain(|worktree| {
3219 worktree
3220 .upgrade(cx)
3221 .map_or(false, |w| w.read(cx).id() != id)
3222 });
3223 cx.notify();
3224 }
3225
3226 fn add_worktree(&mut self, worktree: &ModelHandle<Worktree>, cx: &mut ModelContext<Self>) {
3227 cx.observe(&worktree, |_, _, cx| cx.notify()).detach();
3228 if worktree.read(cx).is_local() {
3229 cx.subscribe(&worktree, |this, worktree, _, cx| {
3230 this.update_local_worktree_buffers(worktree, cx);
3231 })
3232 .detach();
3233 }
3234
3235 let push_strong_handle = {
3236 let worktree = worktree.read(cx);
3237 self.is_shared() || worktree.is_visible() || worktree.is_remote()
3238 };
3239 if push_strong_handle {
3240 self.worktrees
3241 .push(WorktreeHandle::Strong(worktree.clone()));
3242 } else {
3243 cx.observe_release(&worktree, |this, _, cx| {
3244 this.worktrees
3245 .retain(|worktree| worktree.upgrade(cx).is_some());
3246 cx.notify();
3247 })
3248 .detach();
3249 self.worktrees
3250 .push(WorktreeHandle::Weak(worktree.downgrade()));
3251 }
3252 cx.notify();
3253 }
3254
3255 fn update_local_worktree_buffers(
3256 &mut self,
3257 worktree_handle: ModelHandle<Worktree>,
3258 cx: &mut ModelContext<Self>,
3259 ) {
3260 let snapshot = worktree_handle.read(cx).snapshot();
3261 let mut buffers_to_delete = Vec::new();
3262 for (buffer_id, buffer) in &self.opened_buffers {
3263 if let Some(buffer) = buffer.upgrade(cx) {
3264 buffer.update(cx, |buffer, cx| {
3265 if let Some(old_file) = File::from_dyn(buffer.file()) {
3266 if old_file.worktree != worktree_handle {
3267 return;
3268 }
3269
3270 let new_file = if let Some(entry) = old_file
3271 .entry_id
3272 .and_then(|entry_id| snapshot.entry_for_id(entry_id))
3273 {
3274 File {
3275 is_local: true,
3276 entry_id: Some(entry.id),
3277 mtime: entry.mtime,
3278 path: entry.path.clone(),
3279 worktree: worktree_handle.clone(),
3280 }
3281 } else if let Some(entry) =
3282 snapshot.entry_for_path(old_file.path().as_ref())
3283 {
3284 File {
3285 is_local: true,
3286 entry_id: Some(entry.id),
3287 mtime: entry.mtime,
3288 path: entry.path.clone(),
3289 worktree: worktree_handle.clone(),
3290 }
3291 } else {
3292 File {
3293 is_local: true,
3294 entry_id: None,
3295 path: old_file.path().clone(),
3296 mtime: old_file.mtime(),
3297 worktree: worktree_handle.clone(),
3298 }
3299 };
3300
3301 if let Some(project_id) = self.remote_id() {
3302 self.client
3303 .send(proto::UpdateBufferFile {
3304 project_id,
3305 buffer_id: *buffer_id as u64,
3306 file: Some(new_file.to_proto()),
3307 })
3308 .log_err();
3309 }
3310 buffer.file_updated(Box::new(new_file), cx).detach();
3311 }
3312 });
3313 } else {
3314 buffers_to_delete.push(*buffer_id);
3315 }
3316 }
3317
3318 for buffer_id in buffers_to_delete {
3319 self.opened_buffers.remove(&buffer_id);
3320 }
3321 }
3322
3323 pub fn set_active_path(&mut self, entry: Option<ProjectPath>, cx: &mut ModelContext<Self>) {
3324 let new_active_entry = entry.and_then(|project_path| {
3325 let worktree = self.worktree_for_id(project_path.worktree_id, cx)?;
3326 let entry = worktree.read(cx).entry_for_path(project_path.path)?;
3327 Some(entry.id)
3328 });
3329 if new_active_entry != self.active_entry {
3330 self.active_entry = new_active_entry;
3331 cx.emit(Event::ActiveEntryChanged(new_active_entry));
3332 }
3333 }
3334
3335 pub fn is_running_disk_based_diagnostics(&self) -> bool {
3336 self.language_servers_with_diagnostics_running > 0
3337 }
3338
3339 pub fn diagnostic_summary(&self, cx: &AppContext) -> DiagnosticSummary {
3340 let mut summary = DiagnosticSummary::default();
3341 for (_, path_summary) in self.diagnostic_summaries(cx) {
3342 summary.error_count += path_summary.error_count;
3343 summary.warning_count += path_summary.warning_count;
3344 summary.info_count += path_summary.info_count;
3345 summary.hint_count += path_summary.hint_count;
3346 }
3347 summary
3348 }
3349
3350 pub fn diagnostic_summaries<'a>(
3351 &'a self,
3352 cx: &'a AppContext,
3353 ) -> impl Iterator<Item = (ProjectPath, DiagnosticSummary)> + 'a {
3354 self.worktrees(cx).flat_map(move |worktree| {
3355 let worktree = worktree.read(cx);
3356 let worktree_id = worktree.id();
3357 worktree
3358 .diagnostic_summaries()
3359 .map(move |(path, summary)| (ProjectPath { worktree_id, path }, summary))
3360 })
3361 }
3362
3363 pub fn disk_based_diagnostics_started(&mut self, cx: &mut ModelContext<Self>) {
3364 self.language_servers_with_diagnostics_running += 1;
3365 if self.language_servers_with_diagnostics_running == 1 {
3366 cx.emit(Event::DiskBasedDiagnosticsStarted);
3367 }
3368 }
3369
3370 pub fn disk_based_diagnostics_finished(&mut self, cx: &mut ModelContext<Self>) {
3371 cx.emit(Event::DiskBasedDiagnosticsUpdated);
3372 self.language_servers_with_diagnostics_running -= 1;
3373 if self.language_servers_with_diagnostics_running == 0 {
3374 cx.emit(Event::DiskBasedDiagnosticsFinished);
3375 }
3376 }
3377
3378 pub fn active_entry(&self) -> Option<ProjectEntryId> {
3379 self.active_entry
3380 }
3381
3382 pub fn entry_for_path(&self, path: &ProjectPath, cx: &AppContext) -> Option<ProjectEntryId> {
3383 self.worktree_for_id(path.worktree_id, cx)?
3384 .read(cx)
3385 .entry_for_path(&path.path)
3386 .map(|entry| entry.id)
3387 }
3388
3389 pub fn path_for_entry(&self, entry_id: ProjectEntryId, cx: &AppContext) -> Option<ProjectPath> {
3390 let worktree = self.worktree_for_entry(entry_id, cx)?;
3391 let worktree = worktree.read(cx);
3392 let worktree_id = worktree.id();
3393 let path = worktree.entry_for_id(entry_id)?.path.clone();
3394 Some(ProjectPath { worktree_id, path })
3395 }
3396
3397 // RPC message handlers
3398
3399 async fn handle_unshare_project(
3400 this: ModelHandle<Self>,
3401 _: TypedEnvelope<proto::UnshareProject>,
3402 _: Arc<Client>,
3403 mut cx: AsyncAppContext,
3404 ) -> Result<()> {
3405 this.update(&mut cx, |this, cx| this.project_unshared(cx));
3406 Ok(())
3407 }
3408
3409 async fn handle_add_collaborator(
3410 this: ModelHandle<Self>,
3411 mut envelope: TypedEnvelope<proto::AddProjectCollaborator>,
3412 _: Arc<Client>,
3413 mut cx: AsyncAppContext,
3414 ) -> Result<()> {
3415 let user_store = this.read_with(&cx, |this, _| this.user_store.clone());
3416 let collaborator = envelope
3417 .payload
3418 .collaborator
3419 .take()
3420 .ok_or_else(|| anyhow!("empty collaborator"))?;
3421
3422 let collaborator = Collaborator::from_proto(collaborator, &user_store, &mut cx).await?;
3423 this.update(&mut cx, |this, cx| {
3424 this.collaborators
3425 .insert(collaborator.peer_id, collaborator);
3426 cx.notify();
3427 });
3428
3429 Ok(())
3430 }
3431
3432 async fn handle_remove_collaborator(
3433 this: ModelHandle<Self>,
3434 envelope: TypedEnvelope<proto::RemoveProjectCollaborator>,
3435 _: Arc<Client>,
3436 mut cx: AsyncAppContext,
3437 ) -> Result<()> {
3438 this.update(&mut cx, |this, cx| {
3439 let peer_id = PeerId(envelope.payload.peer_id);
3440 let replica_id = this
3441 .collaborators
3442 .remove(&peer_id)
3443 .ok_or_else(|| anyhow!("unknown peer {:?}", peer_id))?
3444 .replica_id;
3445 for (_, buffer) in &this.opened_buffers {
3446 if let Some(buffer) = buffer.upgrade(cx) {
3447 buffer.update(cx, |buffer, cx| buffer.remove_peer(replica_id, cx));
3448 }
3449 }
3450 cx.emit(Event::CollaboratorLeft(peer_id));
3451 cx.notify();
3452 Ok(())
3453 })
3454 }
3455
3456 async fn handle_register_worktree(
3457 this: ModelHandle<Self>,
3458 envelope: TypedEnvelope<proto::RegisterWorktree>,
3459 client: Arc<Client>,
3460 mut cx: AsyncAppContext,
3461 ) -> Result<()> {
3462 this.update(&mut cx, |this, cx| {
3463 let remote_id = this.remote_id().ok_or_else(|| anyhow!("invalid project"))?;
3464 let replica_id = this.replica_id();
3465 let worktree = proto::Worktree {
3466 id: envelope.payload.worktree_id,
3467 root_name: envelope.payload.root_name,
3468 entries: Default::default(),
3469 diagnostic_summaries: Default::default(),
3470 visible: envelope.payload.visible,
3471 };
3472 let (worktree, load_task) =
3473 Worktree::remote(remote_id, replica_id, worktree, client, cx);
3474 this.add_worktree(&worktree, cx);
3475 load_task.detach();
3476 Ok(())
3477 })
3478 }
3479
3480 async fn handle_unregister_worktree(
3481 this: ModelHandle<Self>,
3482 envelope: TypedEnvelope<proto::UnregisterWorktree>,
3483 _: Arc<Client>,
3484 mut cx: AsyncAppContext,
3485 ) -> Result<()> {
3486 this.update(&mut cx, |this, cx| {
3487 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
3488 this.remove_worktree(worktree_id, cx);
3489 Ok(())
3490 })
3491 }
3492
3493 async fn handle_update_worktree(
3494 this: ModelHandle<Self>,
3495 envelope: TypedEnvelope<proto::UpdateWorktree>,
3496 _: Arc<Client>,
3497 mut cx: AsyncAppContext,
3498 ) -> Result<()> {
3499 this.update(&mut cx, |this, cx| {
3500 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
3501 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
3502 worktree.update(cx, |worktree, _| {
3503 let worktree = worktree.as_remote_mut().unwrap();
3504 worktree.update_from_remote(envelope)
3505 })?;
3506 }
3507 Ok(())
3508 })
3509 }
3510
3511 async fn handle_update_diagnostic_summary(
3512 this: ModelHandle<Self>,
3513 envelope: TypedEnvelope<proto::UpdateDiagnosticSummary>,
3514 _: Arc<Client>,
3515 mut cx: AsyncAppContext,
3516 ) -> Result<()> {
3517 this.update(&mut cx, |this, cx| {
3518 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
3519 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
3520 if let Some(summary) = envelope.payload.summary {
3521 let project_path = ProjectPath {
3522 worktree_id,
3523 path: Path::new(&summary.path).into(),
3524 };
3525 worktree.update(cx, |worktree, _| {
3526 worktree
3527 .as_remote_mut()
3528 .unwrap()
3529 .update_diagnostic_summary(project_path.path.clone(), &summary);
3530 });
3531 cx.emit(Event::DiagnosticsUpdated(project_path));
3532 }
3533 }
3534 Ok(())
3535 })
3536 }
3537
3538 async fn handle_start_language_server(
3539 this: ModelHandle<Self>,
3540 envelope: TypedEnvelope<proto::StartLanguageServer>,
3541 _: Arc<Client>,
3542 mut cx: AsyncAppContext,
3543 ) -> Result<()> {
3544 let server = envelope
3545 .payload
3546 .server
3547 .ok_or_else(|| anyhow!("invalid server"))?;
3548 this.update(&mut cx, |this, cx| {
3549 this.language_server_statuses.insert(
3550 server.id as usize,
3551 LanguageServerStatus {
3552 name: server.name,
3553 pending_work: Default::default(),
3554 pending_diagnostic_updates: 0,
3555 },
3556 );
3557 cx.notify();
3558 });
3559 Ok(())
3560 }
3561
3562 async fn handle_update_language_server(
3563 this: ModelHandle<Self>,
3564 envelope: TypedEnvelope<proto::UpdateLanguageServer>,
3565 _: Arc<Client>,
3566 mut cx: AsyncAppContext,
3567 ) -> Result<()> {
3568 let language_server_id = envelope.payload.language_server_id as usize;
3569 match envelope
3570 .payload
3571 .variant
3572 .ok_or_else(|| anyhow!("invalid variant"))?
3573 {
3574 proto::update_language_server::Variant::WorkStart(payload) => {
3575 this.update(&mut cx, |this, cx| {
3576 this.on_lsp_work_start(language_server_id, payload.token, cx);
3577 })
3578 }
3579 proto::update_language_server::Variant::WorkProgress(payload) => {
3580 this.update(&mut cx, |this, cx| {
3581 this.on_lsp_work_progress(
3582 language_server_id,
3583 payload.token,
3584 LanguageServerProgress {
3585 message: payload.message,
3586 percentage: payload.percentage.map(|p| p as usize),
3587 last_update_at: Instant::now(),
3588 },
3589 cx,
3590 );
3591 })
3592 }
3593 proto::update_language_server::Variant::WorkEnd(payload) => {
3594 this.update(&mut cx, |this, cx| {
3595 this.on_lsp_work_end(language_server_id, payload.token, cx);
3596 })
3597 }
3598 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(_) => {
3599 this.update(&mut cx, |this, cx| {
3600 this.disk_based_diagnostics_started(cx);
3601 })
3602 }
3603 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(_) => {
3604 this.update(&mut cx, |this, cx| this.disk_based_diagnostics_finished(cx));
3605 }
3606 }
3607
3608 Ok(())
3609 }
3610
3611 async fn handle_update_buffer(
3612 this: ModelHandle<Self>,
3613 envelope: TypedEnvelope<proto::UpdateBuffer>,
3614 _: Arc<Client>,
3615 mut cx: AsyncAppContext,
3616 ) -> Result<()> {
3617 this.update(&mut cx, |this, cx| {
3618 let payload = envelope.payload.clone();
3619 let buffer_id = payload.buffer_id;
3620 let ops = payload
3621 .operations
3622 .into_iter()
3623 .map(|op| language::proto::deserialize_operation(op))
3624 .collect::<Result<Vec<_>, _>>()?;
3625 match this.opened_buffers.entry(buffer_id) {
3626 hash_map::Entry::Occupied(mut e) => match e.get_mut() {
3627 OpenBuffer::Strong(buffer) => {
3628 buffer.update(cx, |buffer, cx| buffer.apply_ops(ops, cx))?;
3629 }
3630 OpenBuffer::Loading(operations) => operations.extend_from_slice(&ops),
3631 OpenBuffer::Weak(_) => {}
3632 },
3633 hash_map::Entry::Vacant(e) => {
3634 e.insert(OpenBuffer::Loading(ops));
3635 }
3636 }
3637 Ok(())
3638 })
3639 }
3640
3641 async fn handle_update_buffer_file(
3642 this: ModelHandle<Self>,
3643 envelope: TypedEnvelope<proto::UpdateBufferFile>,
3644 _: Arc<Client>,
3645 mut cx: AsyncAppContext,
3646 ) -> Result<()> {
3647 this.update(&mut cx, |this, cx| {
3648 let payload = envelope.payload.clone();
3649 let buffer_id = payload.buffer_id;
3650 let file = payload.file.ok_or_else(|| anyhow!("invalid file"))?;
3651 let worktree = this
3652 .worktree_for_id(WorktreeId::from_proto(file.worktree_id), cx)
3653 .ok_or_else(|| anyhow!("no such worktree"))?;
3654 let file = File::from_proto(file, worktree.clone(), cx)?;
3655 let buffer = this
3656 .opened_buffers
3657 .get_mut(&buffer_id)
3658 .and_then(|b| b.upgrade(cx))
3659 .ok_or_else(|| anyhow!("no such buffer"))?;
3660 buffer.update(cx, |buffer, cx| {
3661 buffer.file_updated(Box::new(file), cx).detach();
3662 });
3663 Ok(())
3664 })
3665 }
3666
3667 async fn handle_save_buffer(
3668 this: ModelHandle<Self>,
3669 envelope: TypedEnvelope<proto::SaveBuffer>,
3670 _: Arc<Client>,
3671 mut cx: AsyncAppContext,
3672 ) -> Result<proto::BufferSaved> {
3673 let buffer_id = envelope.payload.buffer_id;
3674 let requested_version = deserialize_version(envelope.payload.version);
3675
3676 let (project_id, buffer) = this.update(&mut cx, |this, cx| {
3677 let project_id = this.remote_id().ok_or_else(|| anyhow!("not connected"))?;
3678 let buffer = this
3679 .opened_buffers
3680 .get(&buffer_id)
3681 .map(|buffer| buffer.upgrade(cx).unwrap())
3682 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?;
3683 Ok::<_, anyhow::Error>((project_id, buffer))
3684 })?;
3685 buffer
3686 .update(&mut cx, |buffer, _| {
3687 buffer.wait_for_version(requested_version)
3688 })
3689 .await;
3690
3691 let (saved_version, mtime) = buffer.update(&mut cx, |buffer, cx| buffer.save(cx)).await?;
3692 Ok(proto::BufferSaved {
3693 project_id,
3694 buffer_id,
3695 version: serialize_version(&saved_version),
3696 mtime: Some(mtime.into()),
3697 })
3698 }
3699
3700 async fn handle_format_buffers(
3701 this: ModelHandle<Self>,
3702 envelope: TypedEnvelope<proto::FormatBuffers>,
3703 _: Arc<Client>,
3704 mut cx: AsyncAppContext,
3705 ) -> Result<proto::FormatBuffersResponse> {
3706 let sender_id = envelope.original_sender_id()?;
3707 let format = this.update(&mut cx, |this, cx| {
3708 let mut buffers = HashSet::default();
3709 for buffer_id in &envelope.payload.buffer_ids {
3710 buffers.insert(
3711 this.opened_buffers
3712 .get(buffer_id)
3713 .map(|buffer| buffer.upgrade(cx).unwrap())
3714 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?,
3715 );
3716 }
3717 Ok::<_, anyhow::Error>(this.format(buffers, false, cx))
3718 })?;
3719
3720 let project_transaction = format.await?;
3721 let project_transaction = this.update(&mut cx, |this, cx| {
3722 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
3723 });
3724 Ok(proto::FormatBuffersResponse {
3725 transaction: Some(project_transaction),
3726 })
3727 }
3728
3729 async fn handle_get_completions(
3730 this: ModelHandle<Self>,
3731 envelope: TypedEnvelope<proto::GetCompletions>,
3732 _: Arc<Client>,
3733 mut cx: AsyncAppContext,
3734 ) -> Result<proto::GetCompletionsResponse> {
3735 let position = envelope
3736 .payload
3737 .position
3738 .and_then(language::proto::deserialize_anchor)
3739 .ok_or_else(|| anyhow!("invalid position"))?;
3740 let version = deserialize_version(envelope.payload.version);
3741 let buffer = this.read_with(&cx, |this, cx| {
3742 this.opened_buffers
3743 .get(&envelope.payload.buffer_id)
3744 .map(|buffer| buffer.upgrade(cx).unwrap())
3745 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
3746 })?;
3747 buffer
3748 .update(&mut cx, |buffer, _| buffer.wait_for_version(version))
3749 .await;
3750 let version = buffer.read_with(&cx, |buffer, _| buffer.version());
3751 let completions = this
3752 .update(&mut cx, |this, cx| this.completions(&buffer, position, cx))
3753 .await?;
3754
3755 Ok(proto::GetCompletionsResponse {
3756 completions: completions
3757 .iter()
3758 .map(language::proto::serialize_completion)
3759 .collect(),
3760 version: serialize_version(&version),
3761 })
3762 }
3763
3764 async fn handle_apply_additional_edits_for_completion(
3765 this: ModelHandle<Self>,
3766 envelope: TypedEnvelope<proto::ApplyCompletionAdditionalEdits>,
3767 _: Arc<Client>,
3768 mut cx: AsyncAppContext,
3769 ) -> Result<proto::ApplyCompletionAdditionalEditsResponse> {
3770 let apply_additional_edits = this.update(&mut cx, |this, cx| {
3771 let buffer = this
3772 .opened_buffers
3773 .get(&envelope.payload.buffer_id)
3774 .map(|buffer| buffer.upgrade(cx).unwrap())
3775 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
3776 let language = buffer.read(cx).language();
3777 let completion = language::proto::deserialize_completion(
3778 envelope
3779 .payload
3780 .completion
3781 .ok_or_else(|| anyhow!("invalid completion"))?,
3782 language,
3783 )?;
3784 Ok::<_, anyhow::Error>(
3785 this.apply_additional_edits_for_completion(buffer, completion, false, cx),
3786 )
3787 })?;
3788
3789 Ok(proto::ApplyCompletionAdditionalEditsResponse {
3790 transaction: apply_additional_edits
3791 .await?
3792 .as_ref()
3793 .map(language::proto::serialize_transaction),
3794 })
3795 }
3796
3797 async fn handle_get_code_actions(
3798 this: ModelHandle<Self>,
3799 envelope: TypedEnvelope<proto::GetCodeActions>,
3800 _: Arc<Client>,
3801 mut cx: AsyncAppContext,
3802 ) -> Result<proto::GetCodeActionsResponse> {
3803 let start = envelope
3804 .payload
3805 .start
3806 .and_then(language::proto::deserialize_anchor)
3807 .ok_or_else(|| anyhow!("invalid start"))?;
3808 let end = envelope
3809 .payload
3810 .end
3811 .and_then(language::proto::deserialize_anchor)
3812 .ok_or_else(|| anyhow!("invalid end"))?;
3813 let buffer = this.update(&mut cx, |this, cx| {
3814 this.opened_buffers
3815 .get(&envelope.payload.buffer_id)
3816 .map(|buffer| buffer.upgrade(cx).unwrap())
3817 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
3818 })?;
3819 buffer
3820 .update(&mut cx, |buffer, _| {
3821 buffer.wait_for_version(deserialize_version(envelope.payload.version))
3822 })
3823 .await;
3824
3825 let version = buffer.read_with(&cx, |buffer, _| buffer.version());
3826 let code_actions = this.update(&mut cx, |this, cx| {
3827 Ok::<_, anyhow::Error>(this.code_actions(&buffer, start..end, cx))
3828 })?;
3829
3830 Ok(proto::GetCodeActionsResponse {
3831 actions: code_actions
3832 .await?
3833 .iter()
3834 .map(language::proto::serialize_code_action)
3835 .collect(),
3836 version: serialize_version(&version),
3837 })
3838 }
3839
3840 async fn handle_apply_code_action(
3841 this: ModelHandle<Self>,
3842 envelope: TypedEnvelope<proto::ApplyCodeAction>,
3843 _: Arc<Client>,
3844 mut cx: AsyncAppContext,
3845 ) -> Result<proto::ApplyCodeActionResponse> {
3846 let sender_id = envelope.original_sender_id()?;
3847 let action = language::proto::deserialize_code_action(
3848 envelope
3849 .payload
3850 .action
3851 .ok_or_else(|| anyhow!("invalid action"))?,
3852 )?;
3853 let apply_code_action = this.update(&mut cx, |this, cx| {
3854 let buffer = this
3855 .opened_buffers
3856 .get(&envelope.payload.buffer_id)
3857 .map(|buffer| buffer.upgrade(cx).unwrap())
3858 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
3859 Ok::<_, anyhow::Error>(this.apply_code_action(buffer, action, false, cx))
3860 })?;
3861
3862 let project_transaction = apply_code_action.await?;
3863 let project_transaction = this.update(&mut cx, |this, cx| {
3864 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
3865 });
3866 Ok(proto::ApplyCodeActionResponse {
3867 transaction: Some(project_transaction),
3868 })
3869 }
3870
3871 async fn handle_lsp_command<T: LspCommand>(
3872 this: ModelHandle<Self>,
3873 envelope: TypedEnvelope<T::ProtoRequest>,
3874 _: Arc<Client>,
3875 mut cx: AsyncAppContext,
3876 ) -> Result<<T::ProtoRequest as proto::RequestMessage>::Response>
3877 where
3878 <T::LspRequest as lsp::request::Request>::Result: Send,
3879 {
3880 let sender_id = envelope.original_sender_id()?;
3881 let buffer_id = T::buffer_id_from_proto(&envelope.payload);
3882 let buffer_handle = this.read_with(&cx, |this, _| {
3883 this.opened_buffers
3884 .get(&buffer_id)
3885 .and_then(|buffer| buffer.upgrade(&cx))
3886 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))
3887 })?;
3888 let request = T::from_proto(
3889 envelope.payload,
3890 this.clone(),
3891 buffer_handle.clone(),
3892 cx.clone(),
3893 )
3894 .await?;
3895 let buffer_version = buffer_handle.read_with(&cx, |buffer, _| buffer.version());
3896 let response = this
3897 .update(&mut cx, |this, cx| {
3898 this.request_lsp(buffer_handle, request, cx)
3899 })
3900 .await?;
3901 this.update(&mut cx, |this, cx| {
3902 Ok(T::response_to_proto(
3903 response,
3904 this,
3905 sender_id,
3906 &buffer_version,
3907 cx,
3908 ))
3909 })
3910 }
3911
3912 async fn handle_get_project_symbols(
3913 this: ModelHandle<Self>,
3914 envelope: TypedEnvelope<proto::GetProjectSymbols>,
3915 _: Arc<Client>,
3916 mut cx: AsyncAppContext,
3917 ) -> Result<proto::GetProjectSymbolsResponse> {
3918 let symbols = this
3919 .update(&mut cx, |this, cx| {
3920 this.symbols(&envelope.payload.query, cx)
3921 })
3922 .await?;
3923
3924 Ok(proto::GetProjectSymbolsResponse {
3925 symbols: symbols.iter().map(serialize_symbol).collect(),
3926 })
3927 }
3928
3929 async fn handle_search_project(
3930 this: ModelHandle<Self>,
3931 envelope: TypedEnvelope<proto::SearchProject>,
3932 _: Arc<Client>,
3933 mut cx: AsyncAppContext,
3934 ) -> Result<proto::SearchProjectResponse> {
3935 let peer_id = envelope.original_sender_id()?;
3936 let query = SearchQuery::from_proto(envelope.payload)?;
3937 let result = this
3938 .update(&mut cx, |this, cx| this.search(query, cx))
3939 .await?;
3940
3941 this.update(&mut cx, |this, cx| {
3942 let mut locations = Vec::new();
3943 for (buffer, ranges) in result {
3944 for range in ranges {
3945 let start = serialize_anchor(&range.start);
3946 let end = serialize_anchor(&range.end);
3947 let buffer = this.serialize_buffer_for_peer(&buffer, peer_id, cx);
3948 locations.push(proto::Location {
3949 buffer: Some(buffer),
3950 start: Some(start),
3951 end: Some(end),
3952 });
3953 }
3954 }
3955 Ok(proto::SearchProjectResponse { locations })
3956 })
3957 }
3958
3959 async fn handle_open_buffer_for_symbol(
3960 this: ModelHandle<Self>,
3961 envelope: TypedEnvelope<proto::OpenBufferForSymbol>,
3962 _: Arc<Client>,
3963 mut cx: AsyncAppContext,
3964 ) -> Result<proto::OpenBufferForSymbolResponse> {
3965 let peer_id = envelope.original_sender_id()?;
3966 let symbol = envelope
3967 .payload
3968 .symbol
3969 .ok_or_else(|| anyhow!("invalid symbol"))?;
3970 let symbol = this.read_with(&cx, |this, _| {
3971 let symbol = this.deserialize_symbol(symbol)?;
3972 let signature = this.symbol_signature(symbol.worktree_id, &symbol.path);
3973 if signature == symbol.signature {
3974 Ok(symbol)
3975 } else {
3976 Err(anyhow!("invalid symbol signature"))
3977 }
3978 })?;
3979 let buffer = this
3980 .update(&mut cx, |this, cx| this.open_buffer_for_symbol(&symbol, cx))
3981 .await?;
3982
3983 Ok(proto::OpenBufferForSymbolResponse {
3984 buffer: Some(this.update(&mut cx, |this, cx| {
3985 this.serialize_buffer_for_peer(&buffer, peer_id, cx)
3986 })),
3987 })
3988 }
3989
3990 fn symbol_signature(&self, worktree_id: WorktreeId, path: &Path) -> [u8; 32] {
3991 let mut hasher = Sha256::new();
3992 hasher.update(worktree_id.to_proto().to_be_bytes());
3993 hasher.update(path.to_string_lossy().as_bytes());
3994 hasher.update(self.nonce.to_be_bytes());
3995 hasher.finalize().as_slice().try_into().unwrap()
3996 }
3997
3998 async fn handle_open_buffer_by_id(
3999 this: ModelHandle<Self>,
4000 envelope: TypedEnvelope<proto::OpenBufferById>,
4001 _: Arc<Client>,
4002 mut cx: AsyncAppContext,
4003 ) -> Result<proto::OpenBufferResponse> {
4004 let peer_id = envelope.original_sender_id()?;
4005 let buffer = this
4006 .update(&mut cx, |this, cx| {
4007 this.open_buffer_by_id(envelope.payload.id, cx)
4008 })
4009 .await?;
4010 this.update(&mut cx, |this, cx| {
4011 Ok(proto::OpenBufferResponse {
4012 buffer: Some(this.serialize_buffer_for_peer(&buffer, peer_id, cx)),
4013 })
4014 })
4015 }
4016
4017 async fn handle_open_buffer_by_path(
4018 this: ModelHandle<Self>,
4019 envelope: TypedEnvelope<proto::OpenBufferByPath>,
4020 _: Arc<Client>,
4021 mut cx: AsyncAppContext,
4022 ) -> Result<proto::OpenBufferResponse> {
4023 let peer_id = envelope.original_sender_id()?;
4024 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
4025 let open_buffer = this.update(&mut cx, |this, cx| {
4026 this.open_buffer(
4027 ProjectPath {
4028 worktree_id,
4029 path: PathBuf::from(envelope.payload.path).into(),
4030 },
4031 cx,
4032 )
4033 });
4034
4035 let buffer = open_buffer.await?;
4036 this.update(&mut cx, |this, cx| {
4037 Ok(proto::OpenBufferResponse {
4038 buffer: Some(this.serialize_buffer_for_peer(&buffer, peer_id, cx)),
4039 })
4040 })
4041 }
4042
4043 fn serialize_project_transaction_for_peer(
4044 &mut self,
4045 project_transaction: ProjectTransaction,
4046 peer_id: PeerId,
4047 cx: &AppContext,
4048 ) -> proto::ProjectTransaction {
4049 let mut serialized_transaction = proto::ProjectTransaction {
4050 buffers: Default::default(),
4051 transactions: Default::default(),
4052 };
4053 for (buffer, transaction) in project_transaction.0 {
4054 serialized_transaction
4055 .buffers
4056 .push(self.serialize_buffer_for_peer(&buffer, peer_id, cx));
4057 serialized_transaction
4058 .transactions
4059 .push(language::proto::serialize_transaction(&transaction));
4060 }
4061 serialized_transaction
4062 }
4063
4064 fn deserialize_project_transaction(
4065 &mut self,
4066 message: proto::ProjectTransaction,
4067 push_to_history: bool,
4068 cx: &mut ModelContext<Self>,
4069 ) -> Task<Result<ProjectTransaction>> {
4070 cx.spawn(|this, mut cx| async move {
4071 let mut project_transaction = ProjectTransaction::default();
4072 for (buffer, transaction) in message.buffers.into_iter().zip(message.transactions) {
4073 let buffer = this
4074 .update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
4075 .await?;
4076 let transaction = language::proto::deserialize_transaction(transaction)?;
4077 project_transaction.0.insert(buffer, transaction);
4078 }
4079
4080 for (buffer, transaction) in &project_transaction.0 {
4081 buffer
4082 .update(&mut cx, |buffer, _| {
4083 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
4084 })
4085 .await;
4086
4087 if push_to_history {
4088 buffer.update(&mut cx, |buffer, _| {
4089 buffer.push_transaction(transaction.clone(), Instant::now());
4090 });
4091 }
4092 }
4093
4094 Ok(project_transaction)
4095 })
4096 }
4097
4098 fn serialize_buffer_for_peer(
4099 &mut self,
4100 buffer: &ModelHandle<Buffer>,
4101 peer_id: PeerId,
4102 cx: &AppContext,
4103 ) -> proto::Buffer {
4104 let buffer_id = buffer.read(cx).remote_id();
4105 let shared_buffers = self.shared_buffers.entry(peer_id).or_default();
4106 if shared_buffers.insert(buffer_id) {
4107 proto::Buffer {
4108 variant: Some(proto::buffer::Variant::State(buffer.read(cx).to_proto())),
4109 }
4110 } else {
4111 proto::Buffer {
4112 variant: Some(proto::buffer::Variant::Id(buffer_id)),
4113 }
4114 }
4115 }
4116
4117 fn deserialize_buffer(
4118 &mut self,
4119 buffer: proto::Buffer,
4120 cx: &mut ModelContext<Self>,
4121 ) -> Task<Result<ModelHandle<Buffer>>> {
4122 let replica_id = self.replica_id();
4123
4124 let opened_buffer_tx = self.opened_buffer.0.clone();
4125 let mut opened_buffer_rx = self.opened_buffer.1.clone();
4126 cx.spawn(|this, mut cx| async move {
4127 match buffer.variant.ok_or_else(|| anyhow!("missing buffer"))? {
4128 proto::buffer::Variant::Id(id) => {
4129 let buffer = loop {
4130 let buffer = this.read_with(&cx, |this, cx| {
4131 this.opened_buffers
4132 .get(&id)
4133 .and_then(|buffer| buffer.upgrade(cx))
4134 });
4135 if let Some(buffer) = buffer {
4136 break buffer;
4137 }
4138 opened_buffer_rx
4139 .next()
4140 .await
4141 .ok_or_else(|| anyhow!("project dropped while waiting for buffer"))?;
4142 };
4143 Ok(buffer)
4144 }
4145 proto::buffer::Variant::State(mut buffer) => {
4146 let mut buffer_worktree = None;
4147 let mut buffer_file = None;
4148 if let Some(file) = buffer.file.take() {
4149 this.read_with(&cx, |this, cx| {
4150 let worktree_id = WorktreeId::from_proto(file.worktree_id);
4151 let worktree =
4152 this.worktree_for_id(worktree_id, cx).ok_or_else(|| {
4153 anyhow!("no worktree found for id {}", file.worktree_id)
4154 })?;
4155 buffer_file =
4156 Some(Box::new(File::from_proto(file, worktree.clone(), cx)?)
4157 as Box<dyn language::File>);
4158 buffer_worktree = Some(worktree);
4159 Ok::<_, anyhow::Error>(())
4160 })?;
4161 }
4162
4163 let buffer = cx.add_model(|cx| {
4164 Buffer::from_proto(replica_id, buffer, buffer_file, cx).unwrap()
4165 });
4166
4167 this.update(&mut cx, |this, cx| this.register_buffer(&buffer, cx))?;
4168
4169 *opened_buffer_tx.borrow_mut().borrow_mut() = ();
4170 Ok(buffer)
4171 }
4172 }
4173 })
4174 }
4175
4176 fn deserialize_symbol(&self, serialized_symbol: proto::Symbol) -> Result<Symbol> {
4177 let source_worktree_id = WorktreeId::from_proto(serialized_symbol.source_worktree_id);
4178 let worktree_id = WorktreeId::from_proto(serialized_symbol.worktree_id);
4179 let start = serialized_symbol
4180 .start
4181 .ok_or_else(|| anyhow!("invalid start"))?;
4182 let end = serialized_symbol
4183 .end
4184 .ok_or_else(|| anyhow!("invalid end"))?;
4185 let kind = unsafe { mem::transmute(serialized_symbol.kind) };
4186 let path = PathBuf::from(serialized_symbol.path);
4187 let language = self.languages.select_language(&path);
4188 Ok(Symbol {
4189 source_worktree_id,
4190 worktree_id,
4191 language_server_name: LanguageServerName(serialized_symbol.language_server_name.into()),
4192 label: language
4193 .and_then(|language| language.label_for_symbol(&serialized_symbol.name, kind))
4194 .unwrap_or_else(|| CodeLabel::plain(serialized_symbol.name.clone(), None)),
4195 name: serialized_symbol.name,
4196 path,
4197 range: PointUtf16::new(start.row, start.column)..PointUtf16::new(end.row, end.column),
4198 kind,
4199 signature: serialized_symbol
4200 .signature
4201 .try_into()
4202 .map_err(|_| anyhow!("invalid signature"))?,
4203 })
4204 }
4205
4206 async fn handle_buffer_saved(
4207 this: ModelHandle<Self>,
4208 envelope: TypedEnvelope<proto::BufferSaved>,
4209 _: Arc<Client>,
4210 mut cx: AsyncAppContext,
4211 ) -> Result<()> {
4212 let version = deserialize_version(envelope.payload.version);
4213 let mtime = envelope
4214 .payload
4215 .mtime
4216 .ok_or_else(|| anyhow!("missing mtime"))?
4217 .into();
4218
4219 this.update(&mut cx, |this, cx| {
4220 let buffer = this
4221 .opened_buffers
4222 .get(&envelope.payload.buffer_id)
4223 .and_then(|buffer| buffer.upgrade(cx));
4224 if let Some(buffer) = buffer {
4225 buffer.update(cx, |buffer, cx| {
4226 buffer.did_save(version, mtime, None, cx);
4227 });
4228 }
4229 Ok(())
4230 })
4231 }
4232
4233 async fn handle_buffer_reloaded(
4234 this: ModelHandle<Self>,
4235 envelope: TypedEnvelope<proto::BufferReloaded>,
4236 _: Arc<Client>,
4237 mut cx: AsyncAppContext,
4238 ) -> Result<()> {
4239 let payload = envelope.payload.clone();
4240 let version = deserialize_version(payload.version);
4241 let mtime = payload
4242 .mtime
4243 .ok_or_else(|| anyhow!("missing mtime"))?
4244 .into();
4245 this.update(&mut cx, |this, cx| {
4246 let buffer = this
4247 .opened_buffers
4248 .get(&payload.buffer_id)
4249 .and_then(|buffer| buffer.upgrade(cx));
4250 if let Some(buffer) = buffer {
4251 buffer.update(cx, |buffer, cx| {
4252 buffer.did_reload(version, mtime, cx);
4253 });
4254 }
4255 Ok(())
4256 })
4257 }
4258
4259 pub fn match_paths<'a>(
4260 &self,
4261 query: &'a str,
4262 include_ignored: bool,
4263 smart_case: bool,
4264 max_results: usize,
4265 cancel_flag: &'a AtomicBool,
4266 cx: &AppContext,
4267 ) -> impl 'a + Future<Output = Vec<PathMatch>> {
4268 let worktrees = self
4269 .worktrees(cx)
4270 .filter(|worktree| worktree.read(cx).is_visible())
4271 .collect::<Vec<_>>();
4272 let include_root_name = worktrees.len() > 1;
4273 let candidate_sets = worktrees
4274 .into_iter()
4275 .map(|worktree| CandidateSet {
4276 snapshot: worktree.read(cx).snapshot(),
4277 include_ignored,
4278 include_root_name,
4279 })
4280 .collect::<Vec<_>>();
4281
4282 let background = cx.background().clone();
4283 async move {
4284 fuzzy::match_paths(
4285 candidate_sets.as_slice(),
4286 query,
4287 smart_case,
4288 max_results,
4289 cancel_flag,
4290 background,
4291 )
4292 .await
4293 }
4294 }
4295
4296 fn edits_from_lsp(
4297 &mut self,
4298 buffer: &ModelHandle<Buffer>,
4299 lsp_edits: impl 'static + Send + IntoIterator<Item = lsp::TextEdit>,
4300 version: Option<i32>,
4301 cx: &mut ModelContext<Self>,
4302 ) -> Task<Result<Vec<(Range<Anchor>, String)>>> {
4303 let snapshot = self.buffer_snapshot_for_lsp_version(buffer, version, cx);
4304 cx.background().spawn(async move {
4305 let snapshot = snapshot?;
4306 let mut lsp_edits = lsp_edits
4307 .into_iter()
4308 .map(|edit| (range_from_lsp(edit.range), edit.new_text))
4309 .peekable();
4310
4311 let mut edits = Vec::new();
4312 while let Some((mut range, mut new_text)) = lsp_edits.next() {
4313 // Combine any LSP edits that are adjacent.
4314 //
4315 // Also, combine LSP edits that are separated from each other by only
4316 // a newline. This is important because for some code actions,
4317 // Rust-analyzer rewrites the entire buffer via a series of edits that
4318 // are separated by unchanged newline characters.
4319 //
4320 // In order for the diffing logic below to work properly, any edits that
4321 // cancel each other out must be combined into one.
4322 while let Some((next_range, next_text)) = lsp_edits.peek() {
4323 if next_range.start > range.end {
4324 if next_range.start.row > range.end.row + 1
4325 || next_range.start.column > 0
4326 || snapshot.clip_point_utf16(
4327 PointUtf16::new(range.end.row, u32::MAX),
4328 Bias::Left,
4329 ) > range.end
4330 {
4331 break;
4332 }
4333 new_text.push('\n');
4334 }
4335 range.end = next_range.end;
4336 new_text.push_str(&next_text);
4337 lsp_edits.next();
4338 }
4339
4340 if snapshot.clip_point_utf16(range.start, Bias::Left) != range.start
4341 || snapshot.clip_point_utf16(range.end, Bias::Left) != range.end
4342 {
4343 return Err(anyhow!("invalid edits received from language server"));
4344 }
4345
4346 // For multiline edits, perform a diff of the old and new text so that
4347 // we can identify the changes more precisely, preserving the locations
4348 // of any anchors positioned in the unchanged regions.
4349 if range.end.row > range.start.row {
4350 let mut offset = range.start.to_offset(&snapshot);
4351 let old_text = snapshot.text_for_range(range).collect::<String>();
4352
4353 let diff = TextDiff::from_lines(old_text.as_str(), &new_text);
4354 let mut moved_since_edit = true;
4355 for change in diff.iter_all_changes() {
4356 let tag = change.tag();
4357 let value = change.value();
4358 match tag {
4359 ChangeTag::Equal => {
4360 offset += value.len();
4361 moved_since_edit = true;
4362 }
4363 ChangeTag::Delete => {
4364 let start = snapshot.anchor_after(offset);
4365 let end = snapshot.anchor_before(offset + value.len());
4366 if moved_since_edit {
4367 edits.push((start..end, String::new()));
4368 } else {
4369 edits.last_mut().unwrap().0.end = end;
4370 }
4371 offset += value.len();
4372 moved_since_edit = false;
4373 }
4374 ChangeTag::Insert => {
4375 if moved_since_edit {
4376 let anchor = snapshot.anchor_after(offset);
4377 edits.push((anchor.clone()..anchor, value.to_string()));
4378 } else {
4379 edits.last_mut().unwrap().1.push_str(value);
4380 }
4381 moved_since_edit = false;
4382 }
4383 }
4384 }
4385 } else if range.end == range.start {
4386 let anchor = snapshot.anchor_after(range.start);
4387 edits.push((anchor.clone()..anchor, new_text));
4388 } else {
4389 let edit_start = snapshot.anchor_after(range.start);
4390 let edit_end = snapshot.anchor_before(range.end);
4391 edits.push((edit_start..edit_end, new_text));
4392 }
4393 }
4394
4395 Ok(edits)
4396 })
4397 }
4398
4399 fn buffer_snapshot_for_lsp_version(
4400 &mut self,
4401 buffer: &ModelHandle<Buffer>,
4402 version: Option<i32>,
4403 cx: &AppContext,
4404 ) -> Result<TextBufferSnapshot> {
4405 const OLD_VERSIONS_TO_RETAIN: i32 = 10;
4406
4407 if let Some(version) = version {
4408 let buffer_id = buffer.read(cx).remote_id();
4409 let snapshots = self
4410 .buffer_snapshots
4411 .get_mut(&buffer_id)
4412 .ok_or_else(|| anyhow!("no snapshot found for buffer {}", buffer_id))?;
4413 let mut found_snapshot = None;
4414 snapshots.retain(|(snapshot_version, snapshot)| {
4415 if snapshot_version + OLD_VERSIONS_TO_RETAIN < version {
4416 false
4417 } else {
4418 if *snapshot_version == version {
4419 found_snapshot = Some(snapshot.clone());
4420 }
4421 true
4422 }
4423 });
4424
4425 found_snapshot.ok_or_else(|| {
4426 anyhow!(
4427 "snapshot not found for buffer {} at version {}",
4428 buffer_id,
4429 version
4430 )
4431 })
4432 } else {
4433 Ok((buffer.read(cx)).text_snapshot())
4434 }
4435 }
4436
4437 fn language_server_for_buffer(
4438 &self,
4439 buffer: &Buffer,
4440 cx: &AppContext,
4441 ) -> Option<&(Arc<dyn LspAdapter>, Arc<LanguageServer>)> {
4442 if let Some((file, language)) = File::from_dyn(buffer.file()).zip(buffer.language()) {
4443 let worktree_id = file.worktree_id(cx);
4444 self.language_servers
4445 .get(&(worktree_id, language.lsp_adapter()?.name()))
4446 } else {
4447 None
4448 }
4449 }
4450}
4451
4452impl WorktreeHandle {
4453 pub fn upgrade(&self, cx: &AppContext) -> Option<ModelHandle<Worktree>> {
4454 match self {
4455 WorktreeHandle::Strong(handle) => Some(handle.clone()),
4456 WorktreeHandle::Weak(handle) => handle.upgrade(cx),
4457 }
4458 }
4459}
4460
4461impl OpenBuffer {
4462 pub fn upgrade(&self, cx: &impl UpgradeModelHandle) -> Option<ModelHandle<Buffer>> {
4463 match self {
4464 OpenBuffer::Strong(handle) => Some(handle.clone()),
4465 OpenBuffer::Weak(handle) => handle.upgrade(cx),
4466 OpenBuffer::Loading(_) => None,
4467 }
4468 }
4469}
4470
4471struct CandidateSet {
4472 snapshot: Snapshot,
4473 include_ignored: bool,
4474 include_root_name: bool,
4475}
4476
4477impl<'a> PathMatchCandidateSet<'a> for CandidateSet {
4478 type Candidates = CandidateSetIter<'a>;
4479
4480 fn id(&self) -> usize {
4481 self.snapshot.id().to_usize()
4482 }
4483
4484 fn len(&self) -> usize {
4485 if self.include_ignored {
4486 self.snapshot.file_count()
4487 } else {
4488 self.snapshot.visible_file_count()
4489 }
4490 }
4491
4492 fn prefix(&self) -> Arc<str> {
4493 if self.snapshot.root_entry().map_or(false, |e| e.is_file()) {
4494 self.snapshot.root_name().into()
4495 } else if self.include_root_name {
4496 format!("{}/", self.snapshot.root_name()).into()
4497 } else {
4498 "".into()
4499 }
4500 }
4501
4502 fn candidates(&'a self, start: usize) -> Self::Candidates {
4503 CandidateSetIter {
4504 traversal: self.snapshot.files(self.include_ignored, start),
4505 }
4506 }
4507}
4508
4509struct CandidateSetIter<'a> {
4510 traversal: Traversal<'a>,
4511}
4512
4513impl<'a> Iterator for CandidateSetIter<'a> {
4514 type Item = PathMatchCandidate<'a>;
4515
4516 fn next(&mut self) -> Option<Self::Item> {
4517 self.traversal.next().map(|entry| {
4518 if let EntryKind::File(char_bag) = entry.kind {
4519 PathMatchCandidate {
4520 path: &entry.path,
4521 char_bag,
4522 }
4523 } else {
4524 unreachable!()
4525 }
4526 })
4527 }
4528}
4529
4530impl Entity for Project {
4531 type Event = Event;
4532
4533 fn release(&mut self, _: &mut gpui::MutableAppContext) {
4534 match &self.client_state {
4535 ProjectClientState::Local { remote_id_rx, .. } => {
4536 if let Some(project_id) = *remote_id_rx.borrow() {
4537 self.client
4538 .send(proto::UnregisterProject { project_id })
4539 .log_err();
4540 }
4541 }
4542 ProjectClientState::Remote { remote_id, .. } => {
4543 self.client
4544 .send(proto::LeaveProject {
4545 project_id: *remote_id,
4546 })
4547 .log_err();
4548 }
4549 }
4550 }
4551
4552 fn app_will_quit(
4553 &mut self,
4554 _: &mut MutableAppContext,
4555 ) -> Option<std::pin::Pin<Box<dyn 'static + Future<Output = ()>>>> {
4556 let shutdown_futures = self
4557 .language_servers
4558 .drain()
4559 .filter_map(|(_, (_, server))| server.shutdown())
4560 .collect::<Vec<_>>();
4561 Some(
4562 async move {
4563 futures::future::join_all(shutdown_futures).await;
4564 }
4565 .boxed(),
4566 )
4567 }
4568}
4569
4570impl Collaborator {
4571 fn from_proto(
4572 message: proto::Collaborator,
4573 user_store: &ModelHandle<UserStore>,
4574 cx: &mut AsyncAppContext,
4575 ) -> impl Future<Output = Result<Self>> {
4576 let user = user_store.update(cx, |user_store, cx| {
4577 user_store.fetch_user(message.user_id, cx)
4578 });
4579
4580 async move {
4581 Ok(Self {
4582 peer_id: PeerId(message.peer_id),
4583 user: user.await?,
4584 replica_id: message.replica_id as ReplicaId,
4585 })
4586 }
4587 }
4588}
4589
4590impl<P: AsRef<Path>> From<(WorktreeId, P)> for ProjectPath {
4591 fn from((worktree_id, path): (WorktreeId, P)) -> Self {
4592 Self {
4593 worktree_id,
4594 path: path.as_ref().into(),
4595 }
4596 }
4597}
4598
4599impl From<lsp::CreateFileOptions> for fs::CreateOptions {
4600 fn from(options: lsp::CreateFileOptions) -> Self {
4601 Self {
4602 overwrite: options.overwrite.unwrap_or(false),
4603 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
4604 }
4605 }
4606}
4607
4608impl From<lsp::RenameFileOptions> for fs::RenameOptions {
4609 fn from(options: lsp::RenameFileOptions) -> Self {
4610 Self {
4611 overwrite: options.overwrite.unwrap_or(false),
4612 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
4613 }
4614 }
4615}
4616
4617impl From<lsp::DeleteFileOptions> for fs::RemoveOptions {
4618 fn from(options: lsp::DeleteFileOptions) -> Self {
4619 Self {
4620 recursive: options.recursive.unwrap_or(false),
4621 ignore_if_not_exists: options.ignore_if_not_exists.unwrap_or(false),
4622 }
4623 }
4624}
4625
4626fn serialize_symbol(symbol: &Symbol) -> proto::Symbol {
4627 proto::Symbol {
4628 source_worktree_id: symbol.source_worktree_id.to_proto(),
4629 worktree_id: symbol.worktree_id.to_proto(),
4630 language_server_name: symbol.language_server_name.0.to_string(),
4631 name: symbol.name.clone(),
4632 kind: unsafe { mem::transmute(symbol.kind) },
4633 path: symbol.path.to_string_lossy().to_string(),
4634 start: Some(proto::Point {
4635 row: symbol.range.start.row,
4636 column: symbol.range.start.column,
4637 }),
4638 end: Some(proto::Point {
4639 row: symbol.range.end.row,
4640 column: symbol.range.end.column,
4641 }),
4642 signature: symbol.signature.to_vec(),
4643 }
4644}
4645
4646fn relativize_path(base: &Path, path: &Path) -> PathBuf {
4647 let mut path_components = path.components();
4648 let mut base_components = base.components();
4649 let mut components: Vec<Component> = Vec::new();
4650 loop {
4651 match (path_components.next(), base_components.next()) {
4652 (None, None) => break,
4653 (Some(a), None) => {
4654 components.push(a);
4655 components.extend(path_components.by_ref());
4656 break;
4657 }
4658 (None, _) => components.push(Component::ParentDir),
4659 (Some(a), Some(b)) if components.is_empty() && a == b => (),
4660 (Some(a), Some(b)) if b == Component::CurDir => components.push(a),
4661 (Some(a), Some(_)) => {
4662 components.push(Component::ParentDir);
4663 for _ in base_components {
4664 components.push(Component::ParentDir);
4665 }
4666 components.push(a);
4667 components.extend(path_components.by_ref());
4668 break;
4669 }
4670 }
4671 }
4672 components.iter().map(|c| c.as_os_str()).collect()
4673}
4674
4675impl Item for Buffer {
4676 fn entry_id(&self, cx: &AppContext) -> Option<ProjectEntryId> {
4677 File::from_dyn(self.file()).and_then(|file| file.project_entry_id(cx))
4678 }
4679}
4680
4681#[cfg(test)]
4682mod tests {
4683 use super::{Event, *};
4684 use fs::RealFs;
4685 use futures::{future, StreamExt};
4686 use gpui::test::subscribe;
4687 use language::{
4688 tree_sitter_rust, Diagnostic, FakeLspAdapter, LanguageConfig, OffsetRangeExt, Point,
4689 ToPoint,
4690 };
4691 use lsp::Url;
4692 use serde_json::json;
4693 use std::{cell::RefCell, os::unix, path::PathBuf, rc::Rc};
4694 use unindent::Unindent as _;
4695 use util::{assert_set_eq, test::temp_tree};
4696 use worktree::WorktreeHandle as _;
4697
4698 #[gpui::test]
4699 async fn test_populate_and_search(cx: &mut gpui::TestAppContext) {
4700 let dir = temp_tree(json!({
4701 "root": {
4702 "apple": "",
4703 "banana": {
4704 "carrot": {
4705 "date": "",
4706 "endive": "",
4707 }
4708 },
4709 "fennel": {
4710 "grape": "",
4711 }
4712 }
4713 }));
4714
4715 let root_link_path = dir.path().join("root_link");
4716 unix::fs::symlink(&dir.path().join("root"), &root_link_path).unwrap();
4717 unix::fs::symlink(
4718 &dir.path().join("root/fennel"),
4719 &dir.path().join("root/finnochio"),
4720 )
4721 .unwrap();
4722
4723 let project = Project::test(Arc::new(RealFs), cx);
4724
4725 let (tree, _) = project
4726 .update(cx, |project, cx| {
4727 project.find_or_create_local_worktree(&root_link_path, true, cx)
4728 })
4729 .await
4730 .unwrap();
4731
4732 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
4733 .await;
4734 cx.read(|cx| {
4735 let tree = tree.read(cx);
4736 assert_eq!(tree.file_count(), 5);
4737 assert_eq!(
4738 tree.inode_for_path("fennel/grape"),
4739 tree.inode_for_path("finnochio/grape")
4740 );
4741 });
4742
4743 let cancel_flag = Default::default();
4744 let results = project
4745 .read_with(cx, |project, cx| {
4746 project.match_paths("bna", false, false, 10, &cancel_flag, cx)
4747 })
4748 .await;
4749 assert_eq!(
4750 results
4751 .into_iter()
4752 .map(|result| result.path)
4753 .collect::<Vec<Arc<Path>>>(),
4754 vec![
4755 PathBuf::from("banana/carrot/date").into(),
4756 PathBuf::from("banana/carrot/endive").into(),
4757 ]
4758 );
4759 }
4760
4761 #[gpui::test]
4762 async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
4763 cx.foreground().forbid_parking();
4764
4765 let mut rust_language = Language::new(
4766 LanguageConfig {
4767 name: "Rust".into(),
4768 path_suffixes: vec!["rs".to_string()],
4769 ..Default::default()
4770 },
4771 Some(tree_sitter_rust::language()),
4772 );
4773 let mut json_language = Language::new(
4774 LanguageConfig {
4775 name: "JSON".into(),
4776 path_suffixes: vec!["json".to_string()],
4777 ..Default::default()
4778 },
4779 None,
4780 );
4781 let mut fake_rust_servers = rust_language.set_fake_lsp_adapter(FakeLspAdapter {
4782 name: "the-rust-language-server",
4783 capabilities: lsp::ServerCapabilities {
4784 completion_provider: Some(lsp::CompletionOptions {
4785 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
4786 ..Default::default()
4787 }),
4788 ..Default::default()
4789 },
4790 ..Default::default()
4791 });
4792 let mut fake_json_servers = json_language.set_fake_lsp_adapter(FakeLspAdapter {
4793 name: "the-json-language-server",
4794 capabilities: lsp::ServerCapabilities {
4795 completion_provider: Some(lsp::CompletionOptions {
4796 trigger_characters: Some(vec![":".to_string()]),
4797 ..Default::default()
4798 }),
4799 ..Default::default()
4800 },
4801 ..Default::default()
4802 });
4803
4804 let fs = FakeFs::new(cx.background());
4805 fs.insert_tree(
4806 "/the-root",
4807 json!({
4808 "test.rs": "const A: i32 = 1;",
4809 "test2.rs": "",
4810 "Cargo.toml": "a = 1",
4811 "package.json": "{\"a\": 1}",
4812 }),
4813 )
4814 .await;
4815
4816 let project = Project::test(fs, cx);
4817 project.update(cx, |project, _| {
4818 project.languages.add(Arc::new(rust_language));
4819 project.languages.add(Arc::new(json_language));
4820 });
4821
4822 let worktree_id = project
4823 .update(cx, |project, cx| {
4824 project.find_or_create_local_worktree("/the-root", true, cx)
4825 })
4826 .await
4827 .unwrap()
4828 .0
4829 .read_with(cx, |tree, _| tree.id());
4830
4831 // Open a buffer without an associated language server.
4832 let toml_buffer = project
4833 .update(cx, |project, cx| {
4834 project.open_buffer((worktree_id, "Cargo.toml"), cx)
4835 })
4836 .await
4837 .unwrap();
4838
4839 // Open a buffer with an associated language server.
4840 let rust_buffer = project
4841 .update(cx, |project, cx| {
4842 project.open_buffer((worktree_id, "test.rs"), cx)
4843 })
4844 .await
4845 .unwrap();
4846
4847 // A server is started up, and it is notified about Rust files.
4848 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
4849 assert_eq!(
4850 fake_rust_server
4851 .receive_notification::<lsp::notification::DidOpenTextDocument>()
4852 .await
4853 .text_document,
4854 lsp::TextDocumentItem {
4855 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
4856 version: 0,
4857 text: "const A: i32 = 1;".to_string(),
4858 language_id: Default::default()
4859 }
4860 );
4861
4862 // The buffer is configured based on the language server's capabilities.
4863 rust_buffer.read_with(cx, |buffer, _| {
4864 assert_eq!(
4865 buffer.completion_triggers(),
4866 &[".".to_string(), "::".to_string()]
4867 );
4868 });
4869 toml_buffer.read_with(cx, |buffer, _| {
4870 assert!(buffer.completion_triggers().is_empty());
4871 });
4872
4873 // Edit a buffer. The changes are reported to the language server.
4874 rust_buffer.update(cx, |buffer, cx| buffer.edit([16..16], "2", cx));
4875 assert_eq!(
4876 fake_rust_server
4877 .receive_notification::<lsp::notification::DidChangeTextDocument>()
4878 .await
4879 .text_document,
4880 lsp::VersionedTextDocumentIdentifier::new(
4881 lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
4882 1
4883 )
4884 );
4885
4886 // Open a third buffer with a different associated language server.
4887 let json_buffer = project
4888 .update(cx, |project, cx| {
4889 project.open_buffer((worktree_id, "package.json"), cx)
4890 })
4891 .await
4892 .unwrap();
4893
4894 // A json language server is started up and is only notified about the json buffer.
4895 let mut fake_json_server = fake_json_servers.next().await.unwrap();
4896 assert_eq!(
4897 fake_json_server
4898 .receive_notification::<lsp::notification::DidOpenTextDocument>()
4899 .await
4900 .text_document,
4901 lsp::TextDocumentItem {
4902 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
4903 version: 0,
4904 text: "{\"a\": 1}".to_string(),
4905 language_id: Default::default()
4906 }
4907 );
4908
4909 // This buffer is configured based on the second language server's
4910 // capabilities.
4911 json_buffer.read_with(cx, |buffer, _| {
4912 assert_eq!(buffer.completion_triggers(), &[":".to_string()]);
4913 });
4914
4915 // When opening another buffer whose language server is already running,
4916 // it is also configured based on the existing language server's capabilities.
4917 let rust_buffer2 = project
4918 .update(cx, |project, cx| {
4919 project.open_buffer((worktree_id, "test2.rs"), cx)
4920 })
4921 .await
4922 .unwrap();
4923 rust_buffer2.read_with(cx, |buffer, _| {
4924 assert_eq!(
4925 buffer.completion_triggers(),
4926 &[".".to_string(), "::".to_string()]
4927 );
4928 });
4929
4930 // Changes are reported only to servers matching the buffer's language.
4931 toml_buffer.update(cx, |buffer, cx| buffer.edit([5..5], "23", cx));
4932 rust_buffer2.update(cx, |buffer, cx| buffer.edit([0..0], "let x = 1;", cx));
4933 assert_eq!(
4934 fake_rust_server
4935 .receive_notification::<lsp::notification::DidChangeTextDocument>()
4936 .await
4937 .text_document,
4938 lsp::VersionedTextDocumentIdentifier::new(
4939 lsp::Url::from_file_path("/the-root/test2.rs").unwrap(),
4940 1
4941 )
4942 );
4943
4944 // Save notifications are reported to all servers.
4945 toml_buffer
4946 .update(cx, |buffer, cx| buffer.save(cx))
4947 .await
4948 .unwrap();
4949 assert_eq!(
4950 fake_rust_server
4951 .receive_notification::<lsp::notification::DidSaveTextDocument>()
4952 .await
4953 .text_document,
4954 lsp::TextDocumentIdentifier::new(
4955 lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap()
4956 )
4957 );
4958 assert_eq!(
4959 fake_json_server
4960 .receive_notification::<lsp::notification::DidSaveTextDocument>()
4961 .await
4962 .text_document,
4963 lsp::TextDocumentIdentifier::new(
4964 lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap()
4965 )
4966 );
4967
4968 // Restart language servers
4969 project.update(cx, |project, cx| {
4970 project.restart_language_servers_for_buffers(
4971 vec![rust_buffer.clone(), json_buffer.clone()],
4972 cx,
4973 );
4974 });
4975
4976 let mut rust_shutdown_requests = fake_rust_server
4977 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(()));
4978 let mut json_shutdown_requests = fake_json_server
4979 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(()));
4980 futures::join!(rust_shutdown_requests.next(), json_shutdown_requests.next());
4981
4982 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
4983 let mut fake_json_server = fake_json_servers.next().await.unwrap();
4984
4985 // Ensure both rust documents are reopened in new rust language server without worrying about order
4986 assert_set_eq!(
4987 [
4988 fake_rust_server
4989 .receive_notification::<lsp::notification::DidOpenTextDocument>()
4990 .await
4991 .text_document,
4992 fake_rust_server
4993 .receive_notification::<lsp::notification::DidOpenTextDocument>()
4994 .await
4995 .text_document,
4996 ],
4997 [
4998 lsp::TextDocumentItem {
4999 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
5000 version: 1,
5001 text: rust_buffer.read_with(cx, |buffer, _| buffer.text()),
5002 language_id: Default::default()
5003 },
5004 lsp::TextDocumentItem {
5005 uri: lsp::Url::from_file_path("/the-root/test2.rs").unwrap(),
5006 version: 1,
5007 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
5008 language_id: Default::default()
5009 },
5010 ]
5011 );
5012
5013 // Ensure json document is reopened in new json language server
5014 assert_eq!(
5015 fake_json_server
5016 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5017 .await
5018 .text_document,
5019 lsp::TextDocumentItem {
5020 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
5021 version: 0,
5022 text: json_buffer.read_with(cx, |buffer, _| buffer.text()),
5023 language_id: Default::default()
5024 }
5025 );
5026
5027 // Close notifications are reported only to servers matching the buffer's language.
5028 cx.update(|_| drop(json_buffer));
5029 let close_message = lsp::DidCloseTextDocumentParams {
5030 text_document: lsp::TextDocumentIdentifier::new(
5031 lsp::Url::from_file_path("/the-root/package.json").unwrap(),
5032 ),
5033 };
5034 assert_eq!(
5035 fake_json_server
5036 .receive_notification::<lsp::notification::DidCloseTextDocument>()
5037 .await,
5038 close_message,
5039 );
5040 }
5041
5042 #[gpui::test]
5043 async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
5044 cx.foreground().forbid_parking();
5045
5046 let progress_token = "the-progress-token";
5047 let mut language = Language::new(
5048 LanguageConfig {
5049 name: "Rust".into(),
5050 path_suffixes: vec!["rs".to_string()],
5051 ..Default::default()
5052 },
5053 Some(tree_sitter_rust::language()),
5054 );
5055 let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
5056 disk_based_diagnostics_progress_token: Some(progress_token),
5057 disk_based_diagnostics_sources: &["disk"],
5058 ..Default::default()
5059 });
5060
5061 let fs = FakeFs::new(cx.background());
5062 fs.insert_tree(
5063 "/dir",
5064 json!({
5065 "a.rs": "fn a() { A }",
5066 "b.rs": "const y: i32 = 1",
5067 }),
5068 )
5069 .await;
5070
5071 let project = Project::test(fs, cx);
5072 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
5073
5074 let (tree, _) = project
5075 .update(cx, |project, cx| {
5076 project.find_or_create_local_worktree("/dir", true, cx)
5077 })
5078 .await
5079 .unwrap();
5080 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
5081
5082 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
5083 .await;
5084
5085 // Cause worktree to start the fake language server
5086 let _buffer = project
5087 .update(cx, |project, cx| {
5088 project.open_buffer((worktree_id, Path::new("b.rs")), cx)
5089 })
5090 .await
5091 .unwrap();
5092
5093 let mut events = subscribe(&project, cx);
5094
5095 let mut fake_server = fake_servers.next().await.unwrap();
5096 fake_server.start_progress(progress_token).await;
5097 assert_eq!(
5098 events.next().await.unwrap(),
5099 Event::DiskBasedDiagnosticsStarted
5100 );
5101
5102 fake_server.start_progress(progress_token).await;
5103 fake_server.end_progress(progress_token).await;
5104 fake_server.start_progress(progress_token).await;
5105
5106 fake_server.notify::<lsp::notification::PublishDiagnostics>(
5107 lsp::PublishDiagnosticsParams {
5108 uri: Url::from_file_path("/dir/a.rs").unwrap(),
5109 version: None,
5110 diagnostics: vec![lsp::Diagnostic {
5111 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
5112 severity: Some(lsp::DiagnosticSeverity::ERROR),
5113 message: "undefined variable 'A'".to_string(),
5114 ..Default::default()
5115 }],
5116 },
5117 );
5118 assert_eq!(
5119 events.next().await.unwrap(),
5120 Event::DiagnosticsUpdated((worktree_id, Path::new("a.rs")).into())
5121 );
5122
5123 fake_server.end_progress(progress_token).await;
5124 fake_server.end_progress(progress_token).await;
5125 assert_eq!(
5126 events.next().await.unwrap(),
5127 Event::DiskBasedDiagnosticsUpdated
5128 );
5129 assert_eq!(
5130 events.next().await.unwrap(),
5131 Event::DiskBasedDiagnosticsFinished
5132 );
5133
5134 let buffer = project
5135 .update(cx, |p, cx| p.open_buffer((worktree_id, "a.rs"), cx))
5136 .await
5137 .unwrap();
5138
5139 buffer.read_with(cx, |buffer, _| {
5140 let snapshot = buffer.snapshot();
5141 let diagnostics = snapshot
5142 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
5143 .collect::<Vec<_>>();
5144 assert_eq!(
5145 diagnostics,
5146 &[DiagnosticEntry {
5147 range: Point::new(0, 9)..Point::new(0, 10),
5148 diagnostic: Diagnostic {
5149 severity: lsp::DiagnosticSeverity::ERROR,
5150 message: "undefined variable 'A'".to_string(),
5151 group_id: 0,
5152 is_primary: true,
5153 ..Default::default()
5154 }
5155 }]
5156 )
5157 });
5158 }
5159
5160 #[gpui::test]
5161 async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
5162 cx.foreground().forbid_parking();
5163
5164 let mut language = Language::new(
5165 LanguageConfig {
5166 name: "Rust".into(),
5167 path_suffixes: vec!["rs".to_string()],
5168 ..Default::default()
5169 },
5170 Some(tree_sitter_rust::language()),
5171 );
5172 let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
5173 disk_based_diagnostics_sources: &["disk"],
5174 ..Default::default()
5175 });
5176
5177 let text = "
5178 fn a() { A }
5179 fn b() { BB }
5180 fn c() { CCC }
5181 "
5182 .unindent();
5183
5184 let fs = FakeFs::new(cx.background());
5185 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
5186
5187 let project = Project::test(fs, cx);
5188 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
5189
5190 let worktree_id = project
5191 .update(cx, |project, cx| {
5192 project.find_or_create_local_worktree("/dir", true, cx)
5193 })
5194 .await
5195 .unwrap()
5196 .0
5197 .read_with(cx, |tree, _| tree.id());
5198
5199 let buffer = project
5200 .update(cx, |project, cx| {
5201 project.open_buffer((worktree_id, "a.rs"), cx)
5202 })
5203 .await
5204 .unwrap();
5205
5206 let mut fake_server = fake_servers.next().await.unwrap();
5207 let open_notification = fake_server
5208 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5209 .await;
5210
5211 // Edit the buffer, moving the content down
5212 buffer.update(cx, |buffer, cx| buffer.edit([0..0], "\n\n", cx));
5213 let change_notification_1 = fake_server
5214 .receive_notification::<lsp::notification::DidChangeTextDocument>()
5215 .await;
5216 assert!(
5217 change_notification_1.text_document.version > open_notification.text_document.version
5218 );
5219
5220 // Report some diagnostics for the initial version of the buffer
5221 fake_server.notify::<lsp::notification::PublishDiagnostics>(
5222 lsp::PublishDiagnosticsParams {
5223 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
5224 version: Some(open_notification.text_document.version),
5225 diagnostics: vec![
5226 lsp::Diagnostic {
5227 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
5228 severity: Some(DiagnosticSeverity::ERROR),
5229 message: "undefined variable 'A'".to_string(),
5230 source: Some("disk".to_string()),
5231 ..Default::default()
5232 },
5233 lsp::Diagnostic {
5234 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
5235 severity: Some(DiagnosticSeverity::ERROR),
5236 message: "undefined variable 'BB'".to_string(),
5237 source: Some("disk".to_string()),
5238 ..Default::default()
5239 },
5240 lsp::Diagnostic {
5241 range: lsp::Range::new(lsp::Position::new(2, 9), lsp::Position::new(2, 12)),
5242 severity: Some(DiagnosticSeverity::ERROR),
5243 source: Some("disk".to_string()),
5244 message: "undefined variable 'CCC'".to_string(),
5245 ..Default::default()
5246 },
5247 ],
5248 },
5249 );
5250
5251 // The diagnostics have moved down since they were created.
5252 buffer.next_notification(cx).await;
5253 buffer.read_with(cx, |buffer, _| {
5254 assert_eq!(
5255 buffer
5256 .snapshot()
5257 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0), false)
5258 .collect::<Vec<_>>(),
5259 &[
5260 DiagnosticEntry {
5261 range: Point::new(3, 9)..Point::new(3, 11),
5262 diagnostic: Diagnostic {
5263 severity: DiagnosticSeverity::ERROR,
5264 message: "undefined variable 'BB'".to_string(),
5265 is_disk_based: true,
5266 group_id: 1,
5267 is_primary: true,
5268 ..Default::default()
5269 },
5270 },
5271 DiagnosticEntry {
5272 range: Point::new(4, 9)..Point::new(4, 12),
5273 diagnostic: Diagnostic {
5274 severity: DiagnosticSeverity::ERROR,
5275 message: "undefined variable 'CCC'".to_string(),
5276 is_disk_based: true,
5277 group_id: 2,
5278 is_primary: true,
5279 ..Default::default()
5280 }
5281 }
5282 ]
5283 );
5284 assert_eq!(
5285 chunks_with_diagnostics(buffer, 0..buffer.len()),
5286 [
5287 ("\n\nfn a() { ".to_string(), None),
5288 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
5289 (" }\nfn b() { ".to_string(), None),
5290 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
5291 (" }\nfn c() { ".to_string(), None),
5292 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
5293 (" }\n".to_string(), None),
5294 ]
5295 );
5296 assert_eq!(
5297 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
5298 [
5299 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
5300 (" }\nfn c() { ".to_string(), None),
5301 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
5302 ]
5303 );
5304 });
5305
5306 // Ensure overlapping diagnostics are highlighted correctly.
5307 fake_server.notify::<lsp::notification::PublishDiagnostics>(
5308 lsp::PublishDiagnosticsParams {
5309 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
5310 version: Some(open_notification.text_document.version),
5311 diagnostics: vec![
5312 lsp::Diagnostic {
5313 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
5314 severity: Some(DiagnosticSeverity::ERROR),
5315 message: "undefined variable 'A'".to_string(),
5316 source: Some("disk".to_string()),
5317 ..Default::default()
5318 },
5319 lsp::Diagnostic {
5320 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 12)),
5321 severity: Some(DiagnosticSeverity::WARNING),
5322 message: "unreachable statement".to_string(),
5323 source: Some("disk".to_string()),
5324 ..Default::default()
5325 },
5326 ],
5327 },
5328 );
5329
5330 buffer.next_notification(cx).await;
5331 buffer.read_with(cx, |buffer, _| {
5332 assert_eq!(
5333 buffer
5334 .snapshot()
5335 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0), false)
5336 .collect::<Vec<_>>(),
5337 &[
5338 DiagnosticEntry {
5339 range: Point::new(2, 9)..Point::new(2, 12),
5340 diagnostic: Diagnostic {
5341 severity: DiagnosticSeverity::WARNING,
5342 message: "unreachable statement".to_string(),
5343 is_disk_based: true,
5344 group_id: 1,
5345 is_primary: true,
5346 ..Default::default()
5347 }
5348 },
5349 DiagnosticEntry {
5350 range: Point::new(2, 9)..Point::new(2, 10),
5351 diagnostic: Diagnostic {
5352 severity: DiagnosticSeverity::ERROR,
5353 message: "undefined variable 'A'".to_string(),
5354 is_disk_based: true,
5355 group_id: 0,
5356 is_primary: true,
5357 ..Default::default()
5358 },
5359 }
5360 ]
5361 );
5362 assert_eq!(
5363 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
5364 [
5365 ("fn a() { ".to_string(), None),
5366 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
5367 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
5368 ("\n".to_string(), None),
5369 ]
5370 );
5371 assert_eq!(
5372 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
5373 [
5374 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
5375 ("\n".to_string(), None),
5376 ]
5377 );
5378 });
5379
5380 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
5381 // changes since the last save.
5382 buffer.update(cx, |buffer, cx| {
5383 buffer.edit(Some(Point::new(2, 0)..Point::new(2, 0)), " ", cx);
5384 buffer.edit(Some(Point::new(2, 8)..Point::new(2, 10)), "(x: usize)", cx);
5385 buffer.edit(Some(Point::new(3, 10)..Point::new(3, 10)), "xxx", cx);
5386 });
5387 let change_notification_2 = fake_server
5388 .receive_notification::<lsp::notification::DidChangeTextDocument>()
5389 .await;
5390 assert!(
5391 change_notification_2.text_document.version
5392 > change_notification_1.text_document.version
5393 );
5394
5395 // Handle out-of-order diagnostics
5396 fake_server.notify::<lsp::notification::PublishDiagnostics>(
5397 lsp::PublishDiagnosticsParams {
5398 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
5399 version: Some(change_notification_2.text_document.version),
5400 diagnostics: vec![
5401 lsp::Diagnostic {
5402 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
5403 severity: Some(DiagnosticSeverity::ERROR),
5404 message: "undefined variable 'BB'".to_string(),
5405 source: Some("disk".to_string()),
5406 ..Default::default()
5407 },
5408 lsp::Diagnostic {
5409 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
5410 severity: Some(DiagnosticSeverity::WARNING),
5411 message: "undefined variable 'A'".to_string(),
5412 source: Some("disk".to_string()),
5413 ..Default::default()
5414 },
5415 ],
5416 },
5417 );
5418
5419 buffer.next_notification(cx).await;
5420 buffer.read_with(cx, |buffer, _| {
5421 assert_eq!(
5422 buffer
5423 .snapshot()
5424 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
5425 .collect::<Vec<_>>(),
5426 &[
5427 DiagnosticEntry {
5428 range: Point::new(2, 21)..Point::new(2, 22),
5429 diagnostic: Diagnostic {
5430 severity: DiagnosticSeverity::WARNING,
5431 message: "undefined variable 'A'".to_string(),
5432 is_disk_based: true,
5433 group_id: 1,
5434 is_primary: true,
5435 ..Default::default()
5436 }
5437 },
5438 DiagnosticEntry {
5439 range: Point::new(3, 9)..Point::new(3, 14),
5440 diagnostic: Diagnostic {
5441 severity: DiagnosticSeverity::ERROR,
5442 message: "undefined variable 'BB'".to_string(),
5443 is_disk_based: true,
5444 group_id: 0,
5445 is_primary: true,
5446 ..Default::default()
5447 },
5448 }
5449 ]
5450 );
5451 });
5452 }
5453
5454 #[gpui::test]
5455 async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
5456 cx.foreground().forbid_parking();
5457
5458 let text = concat!(
5459 "let one = ;\n", //
5460 "let two = \n",
5461 "let three = 3;\n",
5462 );
5463
5464 let fs = FakeFs::new(cx.background());
5465 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
5466
5467 let project = Project::test(fs, cx);
5468 let worktree_id = project
5469 .update(cx, |project, cx| {
5470 project.find_or_create_local_worktree("/dir", true, cx)
5471 })
5472 .await
5473 .unwrap()
5474 .0
5475 .read_with(cx, |tree, _| tree.id());
5476
5477 let buffer = project
5478 .update(cx, |project, cx| {
5479 project.open_buffer((worktree_id, "a.rs"), cx)
5480 })
5481 .await
5482 .unwrap();
5483
5484 project.update(cx, |project, cx| {
5485 project
5486 .update_buffer_diagnostics(
5487 &buffer,
5488 vec![
5489 DiagnosticEntry {
5490 range: PointUtf16::new(0, 10)..PointUtf16::new(0, 10),
5491 diagnostic: Diagnostic {
5492 severity: DiagnosticSeverity::ERROR,
5493 message: "syntax error 1".to_string(),
5494 ..Default::default()
5495 },
5496 },
5497 DiagnosticEntry {
5498 range: PointUtf16::new(1, 10)..PointUtf16::new(1, 10),
5499 diagnostic: Diagnostic {
5500 severity: DiagnosticSeverity::ERROR,
5501 message: "syntax error 2".to_string(),
5502 ..Default::default()
5503 },
5504 },
5505 ],
5506 None,
5507 cx,
5508 )
5509 .unwrap();
5510 });
5511
5512 // An empty range is extended forward to include the following character.
5513 // At the end of a line, an empty range is extended backward to include
5514 // the preceding character.
5515 buffer.read_with(cx, |buffer, _| {
5516 let chunks = chunks_with_diagnostics(&buffer, 0..buffer.len());
5517 assert_eq!(
5518 chunks
5519 .iter()
5520 .map(|(s, d)| (s.as_str(), *d))
5521 .collect::<Vec<_>>(),
5522 &[
5523 ("let one = ", None),
5524 (";", Some(DiagnosticSeverity::ERROR)),
5525 ("\nlet two =", None),
5526 (" ", Some(DiagnosticSeverity::ERROR)),
5527 ("\nlet three = 3;\n", None)
5528 ]
5529 );
5530 });
5531 }
5532
5533 #[gpui::test]
5534 async fn test_edits_from_lsp_with_past_version(cx: &mut gpui::TestAppContext) {
5535 cx.foreground().forbid_parking();
5536
5537 let mut language = Language::new(
5538 LanguageConfig {
5539 name: "Rust".into(),
5540 path_suffixes: vec!["rs".to_string()],
5541 ..Default::default()
5542 },
5543 Some(tree_sitter_rust::language()),
5544 );
5545 let mut fake_servers = language.set_fake_lsp_adapter(Default::default());
5546
5547 let text = "
5548 fn a() {
5549 f1();
5550 }
5551 fn b() {
5552 f2();
5553 }
5554 fn c() {
5555 f3();
5556 }
5557 "
5558 .unindent();
5559
5560 let fs = FakeFs::new(cx.background());
5561 fs.insert_tree(
5562 "/dir",
5563 json!({
5564 "a.rs": text.clone(),
5565 }),
5566 )
5567 .await;
5568
5569 let project = Project::test(fs, cx);
5570 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
5571
5572 let worktree_id = project
5573 .update(cx, |project, cx| {
5574 project.find_or_create_local_worktree("/dir", true, cx)
5575 })
5576 .await
5577 .unwrap()
5578 .0
5579 .read_with(cx, |tree, _| tree.id());
5580
5581 let buffer = project
5582 .update(cx, |project, cx| {
5583 project.open_buffer((worktree_id, "a.rs"), cx)
5584 })
5585 .await
5586 .unwrap();
5587
5588 let mut fake_server = fake_servers.next().await.unwrap();
5589 let lsp_document_version = fake_server
5590 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5591 .await
5592 .text_document
5593 .version;
5594
5595 // Simulate editing the buffer after the language server computes some edits.
5596 buffer.update(cx, |buffer, cx| {
5597 buffer.edit(
5598 [Point::new(0, 0)..Point::new(0, 0)],
5599 "// above first function\n",
5600 cx,
5601 );
5602 buffer.edit(
5603 [Point::new(2, 0)..Point::new(2, 0)],
5604 " // inside first function\n",
5605 cx,
5606 );
5607 buffer.edit(
5608 [Point::new(6, 4)..Point::new(6, 4)],
5609 "// inside second function ",
5610 cx,
5611 );
5612
5613 assert_eq!(
5614 buffer.text(),
5615 "
5616 // above first function
5617 fn a() {
5618 // inside first function
5619 f1();
5620 }
5621 fn b() {
5622 // inside second function f2();
5623 }
5624 fn c() {
5625 f3();
5626 }
5627 "
5628 .unindent()
5629 );
5630 });
5631
5632 let edits = project
5633 .update(cx, |project, cx| {
5634 project.edits_from_lsp(
5635 &buffer,
5636 vec![
5637 // replace body of first function
5638 lsp::TextEdit {
5639 range: lsp::Range::new(
5640 lsp::Position::new(0, 0),
5641 lsp::Position::new(3, 0),
5642 ),
5643 new_text: "
5644 fn a() {
5645 f10();
5646 }
5647 "
5648 .unindent(),
5649 },
5650 // edit inside second function
5651 lsp::TextEdit {
5652 range: lsp::Range::new(
5653 lsp::Position::new(4, 6),
5654 lsp::Position::new(4, 6),
5655 ),
5656 new_text: "00".into(),
5657 },
5658 // edit inside third function via two distinct edits
5659 lsp::TextEdit {
5660 range: lsp::Range::new(
5661 lsp::Position::new(7, 5),
5662 lsp::Position::new(7, 5),
5663 ),
5664 new_text: "4000".into(),
5665 },
5666 lsp::TextEdit {
5667 range: lsp::Range::new(
5668 lsp::Position::new(7, 5),
5669 lsp::Position::new(7, 6),
5670 ),
5671 new_text: "".into(),
5672 },
5673 ],
5674 Some(lsp_document_version),
5675 cx,
5676 )
5677 })
5678 .await
5679 .unwrap();
5680
5681 buffer.update(cx, |buffer, cx| {
5682 for (range, new_text) in edits {
5683 buffer.edit([range], new_text, cx);
5684 }
5685 assert_eq!(
5686 buffer.text(),
5687 "
5688 // above first function
5689 fn a() {
5690 // inside first function
5691 f10();
5692 }
5693 fn b() {
5694 // inside second function f200();
5695 }
5696 fn c() {
5697 f4000();
5698 }
5699 "
5700 .unindent()
5701 );
5702 });
5703 }
5704
5705 #[gpui::test]
5706 async fn test_edits_from_lsp_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
5707 cx.foreground().forbid_parking();
5708
5709 let text = "
5710 use a::b;
5711 use a::c;
5712
5713 fn f() {
5714 b();
5715 c();
5716 }
5717 "
5718 .unindent();
5719
5720 let fs = FakeFs::new(cx.background());
5721 fs.insert_tree(
5722 "/dir",
5723 json!({
5724 "a.rs": text.clone(),
5725 }),
5726 )
5727 .await;
5728
5729 let project = Project::test(fs, cx);
5730 let worktree_id = project
5731 .update(cx, |project, cx| {
5732 project.find_or_create_local_worktree("/dir", true, cx)
5733 })
5734 .await
5735 .unwrap()
5736 .0
5737 .read_with(cx, |tree, _| tree.id());
5738
5739 let buffer = project
5740 .update(cx, |project, cx| {
5741 project.open_buffer((worktree_id, "a.rs"), cx)
5742 })
5743 .await
5744 .unwrap();
5745
5746 // Simulate the language server sending us a small edit in the form of a very large diff.
5747 // Rust-analyzer does this when performing a merge-imports code action.
5748 let edits = project
5749 .update(cx, |project, cx| {
5750 project.edits_from_lsp(
5751 &buffer,
5752 [
5753 // Replace the first use statement without editing the semicolon.
5754 lsp::TextEdit {
5755 range: lsp::Range::new(
5756 lsp::Position::new(0, 4),
5757 lsp::Position::new(0, 8),
5758 ),
5759 new_text: "a::{b, c}".into(),
5760 },
5761 // Reinsert the remainder of the file between the semicolon and the final
5762 // newline of the file.
5763 lsp::TextEdit {
5764 range: lsp::Range::new(
5765 lsp::Position::new(0, 9),
5766 lsp::Position::new(0, 9),
5767 ),
5768 new_text: "\n\n".into(),
5769 },
5770 lsp::TextEdit {
5771 range: lsp::Range::new(
5772 lsp::Position::new(0, 9),
5773 lsp::Position::new(0, 9),
5774 ),
5775 new_text: "
5776 fn f() {
5777 b();
5778 c();
5779 }"
5780 .unindent(),
5781 },
5782 // Delete everything after the first newline of the file.
5783 lsp::TextEdit {
5784 range: lsp::Range::new(
5785 lsp::Position::new(1, 0),
5786 lsp::Position::new(7, 0),
5787 ),
5788 new_text: "".into(),
5789 },
5790 ],
5791 None,
5792 cx,
5793 )
5794 })
5795 .await
5796 .unwrap();
5797
5798 buffer.update(cx, |buffer, cx| {
5799 let edits = edits
5800 .into_iter()
5801 .map(|(range, text)| {
5802 (
5803 range.start.to_point(&buffer)..range.end.to_point(&buffer),
5804 text,
5805 )
5806 })
5807 .collect::<Vec<_>>();
5808
5809 assert_eq!(
5810 edits,
5811 [
5812 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
5813 (Point::new(1, 0)..Point::new(2, 0), "".into())
5814 ]
5815 );
5816
5817 for (range, new_text) in edits {
5818 buffer.edit([range], new_text, cx);
5819 }
5820 assert_eq!(
5821 buffer.text(),
5822 "
5823 use a::{b, c};
5824
5825 fn f() {
5826 b();
5827 c();
5828 }
5829 "
5830 .unindent()
5831 );
5832 });
5833 }
5834
5835 fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
5836 buffer: &Buffer,
5837 range: Range<T>,
5838 ) -> Vec<(String, Option<DiagnosticSeverity>)> {
5839 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
5840 for chunk in buffer.snapshot().chunks(range, true) {
5841 if chunks.last().map_or(false, |prev_chunk| {
5842 prev_chunk.1 == chunk.diagnostic_severity
5843 }) {
5844 chunks.last_mut().unwrap().0.push_str(chunk.text);
5845 } else {
5846 chunks.push((chunk.text.to_string(), chunk.diagnostic_severity));
5847 }
5848 }
5849 chunks
5850 }
5851
5852 #[gpui::test]
5853 async fn test_search_worktree_without_files(cx: &mut gpui::TestAppContext) {
5854 let dir = temp_tree(json!({
5855 "root": {
5856 "dir1": {},
5857 "dir2": {
5858 "dir3": {}
5859 }
5860 }
5861 }));
5862
5863 let project = Project::test(Arc::new(RealFs), cx);
5864 let (tree, _) = project
5865 .update(cx, |project, cx| {
5866 project.find_or_create_local_worktree(&dir.path(), true, cx)
5867 })
5868 .await
5869 .unwrap();
5870
5871 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
5872 .await;
5873
5874 let cancel_flag = Default::default();
5875 let results = project
5876 .read_with(cx, |project, cx| {
5877 project.match_paths("dir", false, false, 10, &cancel_flag, cx)
5878 })
5879 .await;
5880
5881 assert!(results.is_empty());
5882 }
5883
5884 #[gpui::test]
5885 async fn test_definition(cx: &mut gpui::TestAppContext) {
5886 let mut language = Language::new(
5887 LanguageConfig {
5888 name: "Rust".into(),
5889 path_suffixes: vec!["rs".to_string()],
5890 ..Default::default()
5891 },
5892 Some(tree_sitter_rust::language()),
5893 );
5894 let mut fake_servers = language.set_fake_lsp_adapter(Default::default());
5895
5896 let fs = FakeFs::new(cx.background());
5897 fs.insert_tree(
5898 "/dir",
5899 json!({
5900 "a.rs": "const fn a() { A }",
5901 "b.rs": "const y: i32 = crate::a()",
5902 }),
5903 )
5904 .await;
5905
5906 let project = Project::test(fs, cx);
5907 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
5908
5909 let (tree, _) = project
5910 .update(cx, |project, cx| {
5911 project.find_or_create_local_worktree("/dir/b.rs", true, cx)
5912 })
5913 .await
5914 .unwrap();
5915 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
5916 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
5917 .await;
5918
5919 let buffer = project
5920 .update(cx, |project, cx| {
5921 project.open_buffer(
5922 ProjectPath {
5923 worktree_id,
5924 path: Path::new("").into(),
5925 },
5926 cx,
5927 )
5928 })
5929 .await
5930 .unwrap();
5931
5932 let mut fake_server = fake_servers.next().await.unwrap();
5933 fake_server.handle_request::<lsp::request::GotoDefinition, _, _>(|params, _| async move {
5934 let params = params.text_document_position_params;
5935 assert_eq!(
5936 params.text_document.uri.to_file_path().unwrap(),
5937 Path::new("/dir/b.rs"),
5938 );
5939 assert_eq!(params.position, lsp::Position::new(0, 22));
5940
5941 Some(lsp::GotoDefinitionResponse::Scalar(lsp::Location::new(
5942 lsp::Url::from_file_path("/dir/a.rs").unwrap(),
5943 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
5944 )))
5945 });
5946
5947 let mut definitions = project
5948 .update(cx, |project, cx| project.definition(&buffer, 22, cx))
5949 .await
5950 .unwrap();
5951
5952 assert_eq!(definitions.len(), 1);
5953 let definition = definitions.pop().unwrap();
5954 cx.update(|cx| {
5955 let target_buffer = definition.buffer.read(cx);
5956 assert_eq!(
5957 target_buffer
5958 .file()
5959 .unwrap()
5960 .as_local()
5961 .unwrap()
5962 .abs_path(cx),
5963 Path::new("/dir/a.rs"),
5964 );
5965 assert_eq!(definition.range.to_offset(target_buffer), 9..10);
5966 assert_eq!(
5967 list_worktrees(&project, cx),
5968 [("/dir/b.rs".as_ref(), true), ("/dir/a.rs".as_ref(), false)]
5969 );
5970
5971 drop(definition);
5972 });
5973 cx.read(|cx| {
5974 assert_eq!(list_worktrees(&project, cx), [("/dir/b.rs".as_ref(), true)]);
5975 });
5976
5977 fn list_worktrees<'a>(
5978 project: &'a ModelHandle<Project>,
5979 cx: &'a AppContext,
5980 ) -> Vec<(&'a Path, bool)> {
5981 project
5982 .read(cx)
5983 .worktrees(cx)
5984 .map(|worktree| {
5985 let worktree = worktree.read(cx);
5986 (
5987 worktree.as_local().unwrap().abs_path().as_ref(),
5988 worktree.is_visible(),
5989 )
5990 })
5991 .collect::<Vec<_>>()
5992 }
5993 }
5994
5995 #[gpui::test]
5996 async fn test_save_file(cx: &mut gpui::TestAppContext) {
5997 let fs = FakeFs::new(cx.background());
5998 fs.insert_tree(
5999 "/dir",
6000 json!({
6001 "file1": "the old contents",
6002 }),
6003 )
6004 .await;
6005
6006 let project = Project::test(fs.clone(), cx);
6007 let worktree_id = project
6008 .update(cx, |p, cx| {
6009 p.find_or_create_local_worktree("/dir", true, cx)
6010 })
6011 .await
6012 .unwrap()
6013 .0
6014 .read_with(cx, |tree, _| tree.id());
6015
6016 let buffer = project
6017 .update(cx, |p, cx| p.open_buffer((worktree_id, "file1"), cx))
6018 .await
6019 .unwrap();
6020 buffer
6021 .update(cx, |buffer, cx| {
6022 assert_eq!(buffer.text(), "the old contents");
6023 buffer.edit(Some(0..0), "a line of text.\n".repeat(10 * 1024), cx);
6024 buffer.save(cx)
6025 })
6026 .await
6027 .unwrap();
6028
6029 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
6030 assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
6031 }
6032
6033 #[gpui::test]
6034 async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
6035 let fs = FakeFs::new(cx.background());
6036 fs.insert_tree(
6037 "/dir",
6038 json!({
6039 "file1": "the old contents",
6040 }),
6041 )
6042 .await;
6043
6044 let project = Project::test(fs.clone(), cx);
6045 let worktree_id = project
6046 .update(cx, |p, cx| {
6047 p.find_or_create_local_worktree("/dir/file1", true, cx)
6048 })
6049 .await
6050 .unwrap()
6051 .0
6052 .read_with(cx, |tree, _| tree.id());
6053
6054 let buffer = project
6055 .update(cx, |p, cx| p.open_buffer((worktree_id, ""), cx))
6056 .await
6057 .unwrap();
6058 buffer
6059 .update(cx, |buffer, cx| {
6060 buffer.edit(Some(0..0), "a line of text.\n".repeat(10 * 1024), cx);
6061 buffer.save(cx)
6062 })
6063 .await
6064 .unwrap();
6065
6066 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
6067 assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
6068 }
6069
6070 #[gpui::test]
6071 async fn test_save_as(cx: &mut gpui::TestAppContext) {
6072 let fs = FakeFs::new(cx.background());
6073 fs.insert_tree("/dir", json!({})).await;
6074
6075 let project = Project::test(fs.clone(), cx);
6076 let (worktree, _) = project
6077 .update(cx, |project, cx| {
6078 project.find_or_create_local_worktree("/dir", true, cx)
6079 })
6080 .await
6081 .unwrap();
6082 let worktree_id = worktree.read_with(cx, |worktree, _| worktree.id());
6083
6084 let buffer = project.update(cx, |project, cx| project.create_buffer(cx).unwrap());
6085 buffer.update(cx, |buffer, cx| {
6086 buffer.edit([0..0], "abc", cx);
6087 assert!(buffer.is_dirty());
6088 assert!(!buffer.has_conflict());
6089 });
6090 project
6091 .update(cx, |project, cx| {
6092 project.save_buffer_as(buffer.clone(), "/dir/file1".into(), cx)
6093 })
6094 .await
6095 .unwrap();
6096 assert_eq!(fs.load(Path::new("/dir/file1")).await.unwrap(), "abc");
6097 buffer.read_with(cx, |buffer, cx| {
6098 assert_eq!(buffer.file().unwrap().full_path(cx), Path::new("dir/file1"));
6099 assert!(!buffer.is_dirty());
6100 assert!(!buffer.has_conflict());
6101 });
6102
6103 let opened_buffer = project
6104 .update(cx, |project, cx| {
6105 project.open_buffer((worktree_id, "file1"), cx)
6106 })
6107 .await
6108 .unwrap();
6109 assert_eq!(opened_buffer, buffer);
6110 }
6111
6112 #[gpui::test(retries = 5)]
6113 async fn test_rescan_and_remote_updates(cx: &mut gpui::TestAppContext) {
6114 let dir = temp_tree(json!({
6115 "a": {
6116 "file1": "",
6117 "file2": "",
6118 "file3": "",
6119 },
6120 "b": {
6121 "c": {
6122 "file4": "",
6123 "file5": "",
6124 }
6125 }
6126 }));
6127
6128 let project = Project::test(Arc::new(RealFs), cx);
6129 let rpc = project.read_with(cx, |p, _| p.client.clone());
6130
6131 let (tree, _) = project
6132 .update(cx, |p, cx| {
6133 p.find_or_create_local_worktree(dir.path(), true, cx)
6134 })
6135 .await
6136 .unwrap();
6137 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
6138
6139 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
6140 let buffer = project.update(cx, |p, cx| p.open_buffer((worktree_id, path), cx));
6141 async move { buffer.await.unwrap() }
6142 };
6143 let id_for_path = |path: &'static str, cx: &gpui::TestAppContext| {
6144 tree.read_with(cx, |tree, _| {
6145 tree.entry_for_path(path)
6146 .expect(&format!("no entry for path {}", path))
6147 .id
6148 })
6149 };
6150
6151 let buffer2 = buffer_for_path("a/file2", cx).await;
6152 let buffer3 = buffer_for_path("a/file3", cx).await;
6153 let buffer4 = buffer_for_path("b/c/file4", cx).await;
6154 let buffer5 = buffer_for_path("b/c/file5", cx).await;
6155
6156 let file2_id = id_for_path("a/file2", &cx);
6157 let file3_id = id_for_path("a/file3", &cx);
6158 let file4_id = id_for_path("b/c/file4", &cx);
6159
6160 // Wait for the initial scan.
6161 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
6162 .await;
6163
6164 // Create a remote copy of this worktree.
6165 let initial_snapshot = tree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot());
6166 let (remote, load_task) = cx.update(|cx| {
6167 Worktree::remote(
6168 1,
6169 1,
6170 initial_snapshot.to_proto(&Default::default(), true),
6171 rpc.clone(),
6172 cx,
6173 )
6174 });
6175 load_task.await;
6176
6177 cx.read(|cx| {
6178 assert!(!buffer2.read(cx).is_dirty());
6179 assert!(!buffer3.read(cx).is_dirty());
6180 assert!(!buffer4.read(cx).is_dirty());
6181 assert!(!buffer5.read(cx).is_dirty());
6182 });
6183
6184 // Rename and delete files and directories.
6185 tree.flush_fs_events(&cx).await;
6186 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
6187 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
6188 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
6189 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
6190 tree.flush_fs_events(&cx).await;
6191
6192 let expected_paths = vec![
6193 "a",
6194 "a/file1",
6195 "a/file2.new",
6196 "b",
6197 "d",
6198 "d/file3",
6199 "d/file4",
6200 ];
6201
6202 cx.read(|app| {
6203 assert_eq!(
6204 tree.read(app)
6205 .paths()
6206 .map(|p| p.to_str().unwrap())
6207 .collect::<Vec<_>>(),
6208 expected_paths
6209 );
6210
6211 assert_eq!(id_for_path("a/file2.new", &cx), file2_id);
6212 assert_eq!(id_for_path("d/file3", &cx), file3_id);
6213 assert_eq!(id_for_path("d/file4", &cx), file4_id);
6214
6215 assert_eq!(
6216 buffer2.read(app).file().unwrap().path().as_ref(),
6217 Path::new("a/file2.new")
6218 );
6219 assert_eq!(
6220 buffer3.read(app).file().unwrap().path().as_ref(),
6221 Path::new("d/file3")
6222 );
6223 assert_eq!(
6224 buffer4.read(app).file().unwrap().path().as_ref(),
6225 Path::new("d/file4")
6226 );
6227 assert_eq!(
6228 buffer5.read(app).file().unwrap().path().as_ref(),
6229 Path::new("b/c/file5")
6230 );
6231
6232 assert!(!buffer2.read(app).file().unwrap().is_deleted());
6233 assert!(!buffer3.read(app).file().unwrap().is_deleted());
6234 assert!(!buffer4.read(app).file().unwrap().is_deleted());
6235 assert!(buffer5.read(app).file().unwrap().is_deleted());
6236 });
6237
6238 // Update the remote worktree. Check that it becomes consistent with the
6239 // local worktree.
6240 remote.update(cx, |remote, cx| {
6241 let update_message = tree.read(cx).as_local().unwrap().snapshot().build_update(
6242 &initial_snapshot,
6243 1,
6244 1,
6245 true,
6246 );
6247 remote
6248 .as_remote_mut()
6249 .unwrap()
6250 .snapshot
6251 .apply_remote_update(update_message)
6252 .unwrap();
6253
6254 assert_eq!(
6255 remote
6256 .paths()
6257 .map(|p| p.to_str().unwrap())
6258 .collect::<Vec<_>>(),
6259 expected_paths
6260 );
6261 });
6262 }
6263
6264 #[gpui::test]
6265 async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
6266 let fs = FakeFs::new(cx.background());
6267 fs.insert_tree(
6268 "/the-dir",
6269 json!({
6270 "a.txt": "a-contents",
6271 "b.txt": "b-contents",
6272 }),
6273 )
6274 .await;
6275
6276 let project = Project::test(fs.clone(), cx);
6277 let worktree_id = project
6278 .update(cx, |p, cx| {
6279 p.find_or_create_local_worktree("/the-dir", true, cx)
6280 })
6281 .await
6282 .unwrap()
6283 .0
6284 .read_with(cx, |tree, _| tree.id());
6285
6286 // Spawn multiple tasks to open paths, repeating some paths.
6287 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
6288 (
6289 p.open_buffer((worktree_id, "a.txt"), cx),
6290 p.open_buffer((worktree_id, "b.txt"), cx),
6291 p.open_buffer((worktree_id, "a.txt"), cx),
6292 )
6293 });
6294
6295 let buffer_a_1 = buffer_a_1.await.unwrap();
6296 let buffer_a_2 = buffer_a_2.await.unwrap();
6297 let buffer_b = buffer_b.await.unwrap();
6298 assert_eq!(buffer_a_1.read_with(cx, |b, _| b.text()), "a-contents");
6299 assert_eq!(buffer_b.read_with(cx, |b, _| b.text()), "b-contents");
6300
6301 // There is only one buffer per path.
6302 let buffer_a_id = buffer_a_1.id();
6303 assert_eq!(buffer_a_2.id(), buffer_a_id);
6304
6305 // Open the same path again while it is still open.
6306 drop(buffer_a_1);
6307 let buffer_a_3 = project
6308 .update(cx, |p, cx| p.open_buffer((worktree_id, "a.txt"), cx))
6309 .await
6310 .unwrap();
6311
6312 // There's still only one buffer per path.
6313 assert_eq!(buffer_a_3.id(), buffer_a_id);
6314 }
6315
6316 #[gpui::test]
6317 async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
6318 use std::fs;
6319
6320 let dir = temp_tree(json!({
6321 "file1": "abc",
6322 "file2": "def",
6323 "file3": "ghi",
6324 }));
6325
6326 let project = Project::test(Arc::new(RealFs), cx);
6327 let (worktree, _) = project
6328 .update(cx, |p, cx| {
6329 p.find_or_create_local_worktree(dir.path(), true, cx)
6330 })
6331 .await
6332 .unwrap();
6333 let worktree_id = worktree.read_with(cx, |worktree, _| worktree.id());
6334
6335 worktree.flush_fs_events(&cx).await;
6336 worktree
6337 .read_with(cx, |t, _| t.as_local().unwrap().scan_complete())
6338 .await;
6339
6340 let buffer1 = project
6341 .update(cx, |p, cx| p.open_buffer((worktree_id, "file1"), cx))
6342 .await
6343 .unwrap();
6344 let events = Rc::new(RefCell::new(Vec::new()));
6345
6346 // initially, the buffer isn't dirty.
6347 buffer1.update(cx, |buffer, cx| {
6348 cx.subscribe(&buffer1, {
6349 let events = events.clone();
6350 move |_, _, event, _| match event {
6351 BufferEvent::Operation(_) => {}
6352 _ => events.borrow_mut().push(event.clone()),
6353 }
6354 })
6355 .detach();
6356
6357 assert!(!buffer.is_dirty());
6358 assert!(events.borrow().is_empty());
6359
6360 buffer.edit(vec![1..2], "", cx);
6361 });
6362
6363 // after the first edit, the buffer is dirty, and emits a dirtied event.
6364 buffer1.update(cx, |buffer, cx| {
6365 assert!(buffer.text() == "ac");
6366 assert!(buffer.is_dirty());
6367 assert_eq!(
6368 *events.borrow(),
6369 &[language::Event::Edited, language::Event::Dirtied]
6370 );
6371 events.borrow_mut().clear();
6372 buffer.did_save(buffer.version(), buffer.file().unwrap().mtime(), None, cx);
6373 });
6374
6375 // after saving, the buffer is not dirty, and emits a saved event.
6376 buffer1.update(cx, |buffer, cx| {
6377 assert!(!buffer.is_dirty());
6378 assert_eq!(*events.borrow(), &[language::Event::Saved]);
6379 events.borrow_mut().clear();
6380
6381 buffer.edit(vec![1..1], "B", cx);
6382 buffer.edit(vec![2..2], "D", cx);
6383 });
6384
6385 // after editing again, the buffer is dirty, and emits another dirty event.
6386 buffer1.update(cx, |buffer, cx| {
6387 assert!(buffer.text() == "aBDc");
6388 assert!(buffer.is_dirty());
6389 assert_eq!(
6390 *events.borrow(),
6391 &[
6392 language::Event::Edited,
6393 language::Event::Dirtied,
6394 language::Event::Edited,
6395 ],
6396 );
6397 events.borrow_mut().clear();
6398
6399 // TODO - currently, after restoring the buffer to its
6400 // previously-saved state, the is still considered dirty.
6401 buffer.edit([1..3], "", cx);
6402 assert!(buffer.text() == "ac");
6403 assert!(buffer.is_dirty());
6404 });
6405
6406 assert_eq!(*events.borrow(), &[language::Event::Edited]);
6407
6408 // When a file is deleted, the buffer is considered dirty.
6409 let events = Rc::new(RefCell::new(Vec::new()));
6410 let buffer2 = project
6411 .update(cx, |p, cx| p.open_buffer((worktree_id, "file2"), cx))
6412 .await
6413 .unwrap();
6414 buffer2.update(cx, |_, cx| {
6415 cx.subscribe(&buffer2, {
6416 let events = events.clone();
6417 move |_, _, event, _| events.borrow_mut().push(event.clone())
6418 })
6419 .detach();
6420 });
6421
6422 fs::remove_file(dir.path().join("file2")).unwrap();
6423 buffer2.condition(&cx, |b, _| b.is_dirty()).await;
6424 assert_eq!(
6425 *events.borrow(),
6426 &[language::Event::Dirtied, language::Event::FileHandleChanged]
6427 );
6428
6429 // When a file is already dirty when deleted, we don't emit a Dirtied event.
6430 let events = Rc::new(RefCell::new(Vec::new()));
6431 let buffer3 = project
6432 .update(cx, |p, cx| p.open_buffer((worktree_id, "file3"), cx))
6433 .await
6434 .unwrap();
6435 buffer3.update(cx, |_, cx| {
6436 cx.subscribe(&buffer3, {
6437 let events = events.clone();
6438 move |_, _, event, _| events.borrow_mut().push(event.clone())
6439 })
6440 .detach();
6441 });
6442
6443 worktree.flush_fs_events(&cx).await;
6444 buffer3.update(cx, |buffer, cx| {
6445 buffer.edit(Some(0..0), "x", cx);
6446 });
6447 events.borrow_mut().clear();
6448 fs::remove_file(dir.path().join("file3")).unwrap();
6449 buffer3
6450 .condition(&cx, |_, _| !events.borrow().is_empty())
6451 .await;
6452 assert_eq!(*events.borrow(), &[language::Event::FileHandleChanged]);
6453 cx.read(|cx| assert!(buffer3.read(cx).is_dirty()));
6454 }
6455
6456 #[gpui::test]
6457 async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
6458 use std::fs;
6459
6460 let initial_contents = "aaa\nbbbbb\nc\n";
6461 let dir = temp_tree(json!({ "the-file": initial_contents }));
6462
6463 let project = Project::test(Arc::new(RealFs), cx);
6464 let (worktree, _) = project
6465 .update(cx, |p, cx| {
6466 p.find_or_create_local_worktree(dir.path(), true, cx)
6467 })
6468 .await
6469 .unwrap();
6470 let worktree_id = worktree.read_with(cx, |tree, _| tree.id());
6471
6472 worktree
6473 .read_with(cx, |t, _| t.as_local().unwrap().scan_complete())
6474 .await;
6475
6476 let abs_path = dir.path().join("the-file");
6477 let buffer = project
6478 .update(cx, |p, cx| p.open_buffer((worktree_id, "the-file"), cx))
6479 .await
6480 .unwrap();
6481
6482 // TODO
6483 // Add a cursor on each row.
6484 // let selection_set_id = buffer.update(&mut cx, |buffer, cx| {
6485 // assert!(!buffer.is_dirty());
6486 // buffer.add_selection_set(
6487 // &(0..3)
6488 // .map(|row| Selection {
6489 // id: row as usize,
6490 // start: Point::new(row, 1),
6491 // end: Point::new(row, 1),
6492 // reversed: false,
6493 // goal: SelectionGoal::None,
6494 // })
6495 // .collect::<Vec<_>>(),
6496 // cx,
6497 // )
6498 // });
6499
6500 // Change the file on disk, adding two new lines of text, and removing
6501 // one line.
6502 buffer.read_with(cx, |buffer, _| {
6503 assert!(!buffer.is_dirty());
6504 assert!(!buffer.has_conflict());
6505 });
6506 let new_contents = "AAAA\naaa\nBB\nbbbbb\n";
6507 fs::write(&abs_path, new_contents).unwrap();
6508
6509 // Because the buffer was not modified, it is reloaded from disk. Its
6510 // contents are edited according to the diff between the old and new
6511 // file contents.
6512 buffer
6513 .condition(&cx, |buffer, _| buffer.text() == new_contents)
6514 .await;
6515
6516 buffer.update(cx, |buffer, _| {
6517 assert_eq!(buffer.text(), new_contents);
6518 assert!(!buffer.is_dirty());
6519 assert!(!buffer.has_conflict());
6520
6521 // TODO
6522 // let cursor_positions = buffer
6523 // .selection_set(selection_set_id)
6524 // .unwrap()
6525 // .selections::<Point>(&*buffer)
6526 // .map(|selection| {
6527 // assert_eq!(selection.start, selection.end);
6528 // selection.start
6529 // })
6530 // .collect::<Vec<_>>();
6531 // assert_eq!(
6532 // cursor_positions,
6533 // [Point::new(1, 1), Point::new(3, 1), Point::new(4, 0)]
6534 // );
6535 });
6536
6537 // Modify the buffer
6538 buffer.update(cx, |buffer, cx| {
6539 buffer.edit(vec![0..0], " ", cx);
6540 assert!(buffer.is_dirty());
6541 assert!(!buffer.has_conflict());
6542 });
6543
6544 // Change the file on disk again, adding blank lines to the beginning.
6545 fs::write(&abs_path, "\n\n\nAAAA\naaa\nBB\nbbbbb\n").unwrap();
6546
6547 // Because the buffer is modified, it doesn't reload from disk, but is
6548 // marked as having a conflict.
6549 buffer
6550 .condition(&cx, |buffer, _| buffer.has_conflict())
6551 .await;
6552 }
6553
6554 #[gpui::test]
6555 async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
6556 cx.foreground().forbid_parking();
6557
6558 let fs = FakeFs::new(cx.background());
6559 fs.insert_tree(
6560 "/the-dir",
6561 json!({
6562 "a.rs": "
6563 fn foo(mut v: Vec<usize>) {
6564 for x in &v {
6565 v.push(1);
6566 }
6567 }
6568 "
6569 .unindent(),
6570 }),
6571 )
6572 .await;
6573
6574 let project = Project::test(fs.clone(), cx);
6575 let (worktree, _) = project
6576 .update(cx, |p, cx| {
6577 p.find_or_create_local_worktree("/the-dir", true, cx)
6578 })
6579 .await
6580 .unwrap();
6581 let worktree_id = worktree.read_with(cx, |tree, _| tree.id());
6582
6583 let buffer = project
6584 .update(cx, |p, cx| p.open_buffer((worktree_id, "a.rs"), cx))
6585 .await
6586 .unwrap();
6587
6588 let buffer_uri = Url::from_file_path("/the-dir/a.rs").unwrap();
6589 let message = lsp::PublishDiagnosticsParams {
6590 uri: buffer_uri.clone(),
6591 diagnostics: vec![
6592 lsp::Diagnostic {
6593 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
6594 severity: Some(DiagnosticSeverity::WARNING),
6595 message: "error 1".to_string(),
6596 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
6597 location: lsp::Location {
6598 uri: buffer_uri.clone(),
6599 range: lsp::Range::new(
6600 lsp::Position::new(1, 8),
6601 lsp::Position::new(1, 9),
6602 ),
6603 },
6604 message: "error 1 hint 1".to_string(),
6605 }]),
6606 ..Default::default()
6607 },
6608 lsp::Diagnostic {
6609 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
6610 severity: Some(DiagnosticSeverity::HINT),
6611 message: "error 1 hint 1".to_string(),
6612 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
6613 location: lsp::Location {
6614 uri: buffer_uri.clone(),
6615 range: lsp::Range::new(
6616 lsp::Position::new(1, 8),
6617 lsp::Position::new(1, 9),
6618 ),
6619 },
6620 message: "original diagnostic".to_string(),
6621 }]),
6622 ..Default::default()
6623 },
6624 lsp::Diagnostic {
6625 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
6626 severity: Some(DiagnosticSeverity::ERROR),
6627 message: "error 2".to_string(),
6628 related_information: Some(vec![
6629 lsp::DiagnosticRelatedInformation {
6630 location: lsp::Location {
6631 uri: buffer_uri.clone(),
6632 range: lsp::Range::new(
6633 lsp::Position::new(1, 13),
6634 lsp::Position::new(1, 15),
6635 ),
6636 },
6637 message: "error 2 hint 1".to_string(),
6638 },
6639 lsp::DiagnosticRelatedInformation {
6640 location: lsp::Location {
6641 uri: buffer_uri.clone(),
6642 range: lsp::Range::new(
6643 lsp::Position::new(1, 13),
6644 lsp::Position::new(1, 15),
6645 ),
6646 },
6647 message: "error 2 hint 2".to_string(),
6648 },
6649 ]),
6650 ..Default::default()
6651 },
6652 lsp::Diagnostic {
6653 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
6654 severity: Some(DiagnosticSeverity::HINT),
6655 message: "error 2 hint 1".to_string(),
6656 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
6657 location: lsp::Location {
6658 uri: buffer_uri.clone(),
6659 range: lsp::Range::new(
6660 lsp::Position::new(2, 8),
6661 lsp::Position::new(2, 17),
6662 ),
6663 },
6664 message: "original diagnostic".to_string(),
6665 }]),
6666 ..Default::default()
6667 },
6668 lsp::Diagnostic {
6669 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
6670 severity: Some(DiagnosticSeverity::HINT),
6671 message: "error 2 hint 2".to_string(),
6672 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
6673 location: lsp::Location {
6674 uri: buffer_uri.clone(),
6675 range: lsp::Range::new(
6676 lsp::Position::new(2, 8),
6677 lsp::Position::new(2, 17),
6678 ),
6679 },
6680 message: "original diagnostic".to_string(),
6681 }]),
6682 ..Default::default()
6683 },
6684 ],
6685 version: None,
6686 };
6687
6688 project
6689 .update(cx, |p, cx| p.update_diagnostics(message, &[], cx))
6690 .unwrap();
6691 let buffer = buffer.read_with(cx, |buffer, _| buffer.snapshot());
6692
6693 assert_eq!(
6694 buffer
6695 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
6696 .collect::<Vec<_>>(),
6697 &[
6698 DiagnosticEntry {
6699 range: Point::new(1, 8)..Point::new(1, 9),
6700 diagnostic: Diagnostic {
6701 severity: DiagnosticSeverity::WARNING,
6702 message: "error 1".to_string(),
6703 group_id: 0,
6704 is_primary: true,
6705 ..Default::default()
6706 }
6707 },
6708 DiagnosticEntry {
6709 range: Point::new(1, 8)..Point::new(1, 9),
6710 diagnostic: Diagnostic {
6711 severity: DiagnosticSeverity::HINT,
6712 message: "error 1 hint 1".to_string(),
6713 group_id: 0,
6714 is_primary: false,
6715 ..Default::default()
6716 }
6717 },
6718 DiagnosticEntry {
6719 range: Point::new(1, 13)..Point::new(1, 15),
6720 diagnostic: Diagnostic {
6721 severity: DiagnosticSeverity::HINT,
6722 message: "error 2 hint 1".to_string(),
6723 group_id: 1,
6724 is_primary: false,
6725 ..Default::default()
6726 }
6727 },
6728 DiagnosticEntry {
6729 range: Point::new(1, 13)..Point::new(1, 15),
6730 diagnostic: Diagnostic {
6731 severity: DiagnosticSeverity::HINT,
6732 message: "error 2 hint 2".to_string(),
6733 group_id: 1,
6734 is_primary: false,
6735 ..Default::default()
6736 }
6737 },
6738 DiagnosticEntry {
6739 range: Point::new(2, 8)..Point::new(2, 17),
6740 diagnostic: Diagnostic {
6741 severity: DiagnosticSeverity::ERROR,
6742 message: "error 2".to_string(),
6743 group_id: 1,
6744 is_primary: true,
6745 ..Default::default()
6746 }
6747 }
6748 ]
6749 );
6750
6751 assert_eq!(
6752 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
6753 &[
6754 DiagnosticEntry {
6755 range: Point::new(1, 8)..Point::new(1, 9),
6756 diagnostic: Diagnostic {
6757 severity: DiagnosticSeverity::WARNING,
6758 message: "error 1".to_string(),
6759 group_id: 0,
6760 is_primary: true,
6761 ..Default::default()
6762 }
6763 },
6764 DiagnosticEntry {
6765 range: Point::new(1, 8)..Point::new(1, 9),
6766 diagnostic: Diagnostic {
6767 severity: DiagnosticSeverity::HINT,
6768 message: "error 1 hint 1".to_string(),
6769 group_id: 0,
6770 is_primary: false,
6771 ..Default::default()
6772 }
6773 },
6774 ]
6775 );
6776 assert_eq!(
6777 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
6778 &[
6779 DiagnosticEntry {
6780 range: Point::new(1, 13)..Point::new(1, 15),
6781 diagnostic: Diagnostic {
6782 severity: DiagnosticSeverity::HINT,
6783 message: "error 2 hint 1".to_string(),
6784 group_id: 1,
6785 is_primary: false,
6786 ..Default::default()
6787 }
6788 },
6789 DiagnosticEntry {
6790 range: Point::new(1, 13)..Point::new(1, 15),
6791 diagnostic: Diagnostic {
6792 severity: DiagnosticSeverity::HINT,
6793 message: "error 2 hint 2".to_string(),
6794 group_id: 1,
6795 is_primary: false,
6796 ..Default::default()
6797 }
6798 },
6799 DiagnosticEntry {
6800 range: Point::new(2, 8)..Point::new(2, 17),
6801 diagnostic: Diagnostic {
6802 severity: DiagnosticSeverity::ERROR,
6803 message: "error 2".to_string(),
6804 group_id: 1,
6805 is_primary: true,
6806 ..Default::default()
6807 }
6808 }
6809 ]
6810 );
6811 }
6812
6813 #[gpui::test]
6814 async fn test_rename(cx: &mut gpui::TestAppContext) {
6815 cx.foreground().forbid_parking();
6816
6817 let mut language = Language::new(
6818 LanguageConfig {
6819 name: "Rust".into(),
6820 path_suffixes: vec!["rs".to_string()],
6821 ..Default::default()
6822 },
6823 Some(tree_sitter_rust::language()),
6824 );
6825 let mut fake_servers = language.set_fake_lsp_adapter(Default::default());
6826
6827 let fs = FakeFs::new(cx.background());
6828 fs.insert_tree(
6829 "/dir",
6830 json!({
6831 "one.rs": "const ONE: usize = 1;",
6832 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
6833 }),
6834 )
6835 .await;
6836
6837 let project = Project::test(fs.clone(), cx);
6838 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
6839
6840 let (tree, _) = project
6841 .update(cx, |project, cx| {
6842 project.find_or_create_local_worktree("/dir", true, cx)
6843 })
6844 .await
6845 .unwrap();
6846 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
6847 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
6848 .await;
6849
6850 let buffer = project
6851 .update(cx, |project, cx| {
6852 project.open_buffer((worktree_id, Path::new("one.rs")), cx)
6853 })
6854 .await
6855 .unwrap();
6856
6857 let mut fake_server = fake_servers.next().await.unwrap();
6858
6859 let response = project.update(cx, |project, cx| {
6860 project.prepare_rename(buffer.clone(), 7, cx)
6861 });
6862 fake_server
6863 .handle_request::<lsp::request::PrepareRenameRequest, _, _>(|params, _| async move {
6864 assert_eq!(params.text_document.uri.as_str(), "file:///dir/one.rs");
6865 assert_eq!(params.position, lsp::Position::new(0, 7));
6866 Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
6867 lsp::Position::new(0, 6),
6868 lsp::Position::new(0, 9),
6869 )))
6870 })
6871 .next()
6872 .await
6873 .unwrap();
6874 let range = response.await.unwrap().unwrap();
6875 let range = buffer.read_with(cx, |buffer, _| range.to_offset(buffer));
6876 assert_eq!(range, 6..9);
6877
6878 let response = project.update(cx, |project, cx| {
6879 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), true, cx)
6880 });
6881 fake_server
6882 .handle_request::<lsp::request::Rename, _, _>(|params, _| async move {
6883 assert_eq!(
6884 params.text_document_position.text_document.uri.as_str(),
6885 "file:///dir/one.rs"
6886 );
6887 assert_eq!(
6888 params.text_document_position.position,
6889 lsp::Position::new(0, 7)
6890 );
6891 assert_eq!(params.new_name, "THREE");
6892 Some(lsp::WorkspaceEdit {
6893 changes: Some(
6894 [
6895 (
6896 lsp::Url::from_file_path("/dir/one.rs").unwrap(),
6897 vec![lsp::TextEdit::new(
6898 lsp::Range::new(
6899 lsp::Position::new(0, 6),
6900 lsp::Position::new(0, 9),
6901 ),
6902 "THREE".to_string(),
6903 )],
6904 ),
6905 (
6906 lsp::Url::from_file_path("/dir/two.rs").unwrap(),
6907 vec![
6908 lsp::TextEdit::new(
6909 lsp::Range::new(
6910 lsp::Position::new(0, 24),
6911 lsp::Position::new(0, 27),
6912 ),
6913 "THREE".to_string(),
6914 ),
6915 lsp::TextEdit::new(
6916 lsp::Range::new(
6917 lsp::Position::new(0, 35),
6918 lsp::Position::new(0, 38),
6919 ),
6920 "THREE".to_string(),
6921 ),
6922 ],
6923 ),
6924 ]
6925 .into_iter()
6926 .collect(),
6927 ),
6928 ..Default::default()
6929 })
6930 })
6931 .next()
6932 .await
6933 .unwrap();
6934 let mut transaction = response.await.unwrap().0;
6935 assert_eq!(transaction.len(), 2);
6936 assert_eq!(
6937 transaction
6938 .remove_entry(&buffer)
6939 .unwrap()
6940 .0
6941 .read_with(cx, |buffer, _| buffer.text()),
6942 "const THREE: usize = 1;"
6943 );
6944 assert_eq!(
6945 transaction
6946 .into_keys()
6947 .next()
6948 .unwrap()
6949 .read_with(cx, |buffer, _| buffer.text()),
6950 "const TWO: usize = one::THREE + one::THREE;"
6951 );
6952 }
6953
6954 #[gpui::test]
6955 async fn test_search(cx: &mut gpui::TestAppContext) {
6956 let fs = FakeFs::new(cx.background());
6957 fs.insert_tree(
6958 "/dir",
6959 json!({
6960 "one.rs": "const ONE: usize = 1;",
6961 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
6962 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
6963 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
6964 }),
6965 )
6966 .await;
6967 let project = Project::test(fs.clone(), cx);
6968 let (tree, _) = project
6969 .update(cx, |project, cx| {
6970 project.find_or_create_local_worktree("/dir", true, cx)
6971 })
6972 .await
6973 .unwrap();
6974 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
6975 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
6976 .await;
6977
6978 assert_eq!(
6979 search(&project, SearchQuery::text("TWO", false, true), cx)
6980 .await
6981 .unwrap(),
6982 HashMap::from_iter([
6983 ("two.rs".to_string(), vec![6..9]),
6984 ("three.rs".to_string(), vec![37..40])
6985 ])
6986 );
6987
6988 let buffer_4 = project
6989 .update(cx, |project, cx| {
6990 project.open_buffer((worktree_id, "four.rs"), cx)
6991 })
6992 .await
6993 .unwrap();
6994 buffer_4.update(cx, |buffer, cx| {
6995 buffer.edit([20..28, 31..43], "two::TWO", cx);
6996 });
6997
6998 assert_eq!(
6999 search(&project, SearchQuery::text("TWO", false, true), cx)
7000 .await
7001 .unwrap(),
7002 HashMap::from_iter([
7003 ("two.rs".to_string(), vec![6..9]),
7004 ("three.rs".to_string(), vec![37..40]),
7005 ("four.rs".to_string(), vec![25..28, 36..39])
7006 ])
7007 );
7008
7009 async fn search(
7010 project: &ModelHandle<Project>,
7011 query: SearchQuery,
7012 cx: &mut gpui::TestAppContext,
7013 ) -> Result<HashMap<String, Vec<Range<usize>>>> {
7014 let results = project
7015 .update(cx, |project, cx| project.search(query, cx))
7016 .await?;
7017
7018 Ok(results
7019 .into_iter()
7020 .map(|(buffer, ranges)| {
7021 buffer.read_with(cx, |buffer, _| {
7022 let path = buffer.file().unwrap().path().to_string_lossy().to_string();
7023 let ranges = ranges
7024 .into_iter()
7025 .map(|range| range.to_offset(buffer))
7026 .collect::<Vec<_>>();
7027 (path, ranges)
7028 })
7029 })
7030 .collect())
7031 }
7032 }
7033}