1pub mod fs;
2mod ignore;
3mod lsp_command;
4pub mod search;
5pub mod worktree;
6
7use anyhow::{anyhow, Context, Result};
8use client::{proto, Client, PeerId, TypedEnvelope, User, UserStore};
9use clock::ReplicaId;
10use collections::{hash_map, BTreeMap, HashMap, HashSet};
11use futures::{future::Shared, Future, FutureExt, StreamExt, TryFutureExt};
12use fuzzy::{PathMatch, PathMatchCandidate, PathMatchCandidateSet};
13use gpui::{
14 AppContext, AsyncAppContext, Entity, ModelContext, ModelHandle, MutableAppContext, Task,
15 UpgradeModelHandle, WeakModelHandle,
16};
17use language::{
18 proto::{deserialize_anchor, deserialize_version, serialize_anchor, serialize_version},
19 range_from_lsp, Anchor, Bias, Buffer, CodeAction, CodeLabel, Completion, Diagnostic,
20 DiagnosticEntry, DiagnosticSet, Event as BufferEvent, File as _, Language, LanguageRegistry,
21 LocalFile, OffsetRangeExt, Operation, PointUtf16, TextBufferSnapshot, ToLspPosition, ToOffset,
22 ToPointUtf16, Transaction,
23};
24use lsp::{DiagnosticSeverity, DocumentHighlightKind, LanguageServer};
25use lsp_command::*;
26use postage::watch;
27use rand::prelude::*;
28use search::SearchQuery;
29use sha2::{Digest, Sha256};
30use similar::{ChangeTag, TextDiff};
31use std::{
32 cell::RefCell,
33 cmp::{self, Ordering},
34 convert::TryInto,
35 hash::Hash,
36 mem,
37 ops::Range,
38 path::{Component, Path, PathBuf},
39 rc::Rc,
40 sync::{atomic::AtomicBool, Arc},
41 time::Instant,
42};
43use util::{post_inc, ResultExt, TryFutureExt as _};
44
45pub use fs::*;
46pub use worktree::*;
47
48pub struct Project {
49 worktrees: Vec<WorktreeHandle>,
50 active_entry: Option<ProjectEntry>,
51 languages: Arc<LanguageRegistry>,
52 language_servers: HashMap<(WorktreeId, Arc<str>), Arc<LanguageServer>>,
53 started_language_servers: HashMap<(WorktreeId, Arc<str>), Task<Option<Arc<LanguageServer>>>>,
54 language_server_statuses: BTreeMap<usize, LanguageServerStatus>,
55 next_language_server_id: usize,
56 client: Arc<client::Client>,
57 user_store: ModelHandle<UserStore>,
58 fs: Arc<dyn Fs>,
59 client_state: ProjectClientState,
60 collaborators: HashMap<PeerId, Collaborator>,
61 subscriptions: Vec<client::Subscription>,
62 language_servers_with_diagnostics_running: isize,
63 opened_buffer: (Rc<RefCell<watch::Sender<()>>>, watch::Receiver<()>),
64 shared_buffers: HashMap<PeerId, HashSet<u64>>,
65 loading_buffers: HashMap<
66 ProjectPath,
67 postage::watch::Receiver<Option<Result<ModelHandle<Buffer>, Arc<anyhow::Error>>>>,
68 >,
69 loading_local_worktrees:
70 HashMap<Arc<Path>, Shared<Task<Result<ModelHandle<Worktree>, Arc<anyhow::Error>>>>>,
71 opened_buffers: HashMap<u64, OpenBuffer>,
72 buffer_snapshots: HashMap<u64, Vec<(i32, TextBufferSnapshot)>>,
73 nonce: u128,
74}
75
76enum OpenBuffer {
77 Strong(ModelHandle<Buffer>),
78 Weak(WeakModelHandle<Buffer>),
79 Loading(Vec<Operation>),
80}
81
82enum WorktreeHandle {
83 Strong(ModelHandle<Worktree>),
84 Weak(WeakModelHandle<Worktree>),
85}
86
87enum ProjectClientState {
88 Local {
89 is_shared: bool,
90 remote_id_tx: watch::Sender<Option<u64>>,
91 remote_id_rx: watch::Receiver<Option<u64>>,
92 _maintain_remote_id_task: Task<Option<()>>,
93 },
94 Remote {
95 sharing_has_stopped: bool,
96 remote_id: u64,
97 replica_id: ReplicaId,
98 _detect_unshare_task: Task<Option<()>>,
99 },
100}
101
102#[derive(Clone, Debug)]
103pub struct Collaborator {
104 pub user: Arc<User>,
105 pub peer_id: PeerId,
106 pub replica_id: ReplicaId,
107}
108
109#[derive(Clone, Debug, PartialEq)]
110pub enum Event {
111 ActiveEntryChanged(Option<ProjectEntry>),
112 WorktreeRemoved(WorktreeId),
113 DiskBasedDiagnosticsStarted,
114 DiskBasedDiagnosticsUpdated,
115 DiskBasedDiagnosticsFinished,
116 DiagnosticsUpdated(ProjectPath),
117}
118
119enum LanguageServerEvent {
120 WorkStart {
121 token: String,
122 },
123 WorkProgress {
124 token: String,
125 progress: LanguageServerProgress,
126 },
127 WorkEnd {
128 token: String,
129 },
130 DiagnosticsUpdate(lsp::PublishDiagnosticsParams),
131}
132
133pub struct LanguageServerStatus {
134 pub name: String,
135 pub pending_work: BTreeMap<String, LanguageServerProgress>,
136 pending_diagnostic_updates: isize,
137}
138
139#[derive(Clone, Default)]
140pub struct LanguageServerProgress {
141 pub message: Option<String>,
142 pub percentage: Option<usize>,
143}
144
145#[derive(Clone, Debug, Eq, PartialEq, Hash, PartialOrd, Ord)]
146pub struct ProjectPath {
147 pub worktree_id: WorktreeId,
148 pub path: Arc<Path>,
149}
150
151#[derive(Clone, Debug, Default, PartialEq)]
152pub struct DiagnosticSummary {
153 pub error_count: usize,
154 pub warning_count: usize,
155 pub info_count: usize,
156 pub hint_count: usize,
157}
158
159#[derive(Debug)]
160pub struct Location {
161 pub buffer: ModelHandle<Buffer>,
162 pub range: Range<language::Anchor>,
163}
164
165#[derive(Debug)]
166pub struct DocumentHighlight {
167 pub range: Range<language::Anchor>,
168 pub kind: DocumentHighlightKind,
169}
170
171#[derive(Clone, Debug)]
172pub struct Symbol {
173 pub source_worktree_id: WorktreeId,
174 pub worktree_id: WorktreeId,
175 pub language_name: String,
176 pub path: PathBuf,
177 pub label: CodeLabel,
178 pub name: String,
179 pub kind: lsp::SymbolKind,
180 pub range: Range<PointUtf16>,
181 pub signature: [u8; 32],
182}
183
184#[derive(Default)]
185pub struct ProjectTransaction(pub HashMap<ModelHandle<Buffer>, language::Transaction>);
186
187impl DiagnosticSummary {
188 fn new<'a, T: 'a>(diagnostics: impl IntoIterator<Item = &'a DiagnosticEntry<T>>) -> Self {
189 let mut this = Self {
190 error_count: 0,
191 warning_count: 0,
192 info_count: 0,
193 hint_count: 0,
194 };
195
196 for entry in diagnostics {
197 if entry.diagnostic.is_primary {
198 match entry.diagnostic.severity {
199 DiagnosticSeverity::ERROR => this.error_count += 1,
200 DiagnosticSeverity::WARNING => this.warning_count += 1,
201 DiagnosticSeverity::INFORMATION => this.info_count += 1,
202 DiagnosticSeverity::HINT => this.hint_count += 1,
203 _ => {}
204 }
205 }
206 }
207
208 this
209 }
210
211 pub fn to_proto(&self, path: &Path) -> proto::DiagnosticSummary {
212 proto::DiagnosticSummary {
213 path: path.to_string_lossy().to_string(),
214 error_count: self.error_count as u32,
215 warning_count: self.warning_count as u32,
216 info_count: self.info_count as u32,
217 hint_count: self.hint_count as u32,
218 }
219 }
220}
221
222#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
223pub struct ProjectEntry {
224 pub worktree_id: WorktreeId,
225 pub entry_id: usize,
226}
227
228impl Project {
229 pub fn init(client: &Arc<Client>) {
230 client.add_entity_message_handler(Self::handle_add_collaborator);
231 client.add_entity_message_handler(Self::handle_buffer_reloaded);
232 client.add_entity_message_handler(Self::handle_buffer_saved);
233 client.add_entity_message_handler(Self::handle_start_language_server);
234 client.add_entity_message_handler(Self::handle_update_language_server);
235 client.add_entity_message_handler(Self::handle_remove_collaborator);
236 client.add_entity_message_handler(Self::handle_register_worktree);
237 client.add_entity_message_handler(Self::handle_unregister_worktree);
238 client.add_entity_message_handler(Self::handle_unshare_project);
239 client.add_entity_message_handler(Self::handle_update_buffer_file);
240 client.add_entity_message_handler(Self::handle_update_buffer);
241 client.add_entity_message_handler(Self::handle_update_diagnostic_summary);
242 client.add_entity_message_handler(Self::handle_update_worktree);
243 client.add_entity_request_handler(Self::handle_apply_additional_edits_for_completion);
244 client.add_entity_request_handler(Self::handle_apply_code_action);
245 client.add_entity_request_handler(Self::handle_format_buffers);
246 client.add_entity_request_handler(Self::handle_get_code_actions);
247 client.add_entity_request_handler(Self::handle_get_completions);
248 client.add_entity_request_handler(Self::handle_lsp_command::<GetDefinition>);
249 client.add_entity_request_handler(Self::handle_lsp_command::<GetDocumentHighlights>);
250 client.add_entity_request_handler(Self::handle_lsp_command::<GetReferences>);
251 client.add_entity_request_handler(Self::handle_lsp_command::<PrepareRename>);
252 client.add_entity_request_handler(Self::handle_lsp_command::<PerformRename>);
253 client.add_entity_request_handler(Self::handle_search_project);
254 client.add_entity_request_handler(Self::handle_get_project_symbols);
255 client.add_entity_request_handler(Self::handle_open_buffer_for_symbol);
256 client.add_entity_request_handler(Self::handle_open_buffer);
257 client.add_entity_request_handler(Self::handle_save_buffer);
258 }
259
260 pub fn local(
261 client: Arc<Client>,
262 user_store: ModelHandle<UserStore>,
263 languages: Arc<LanguageRegistry>,
264 fs: Arc<dyn Fs>,
265 cx: &mut MutableAppContext,
266 ) -> ModelHandle<Self> {
267 cx.add_model(|cx: &mut ModelContext<Self>| {
268 let (remote_id_tx, remote_id_rx) = watch::channel();
269 let _maintain_remote_id_task = cx.spawn_weak({
270 let rpc = client.clone();
271 move |this, mut cx| {
272 async move {
273 let mut status = rpc.status();
274 while let Some(status) = status.next().await {
275 if let Some(this) = this.upgrade(&cx) {
276 let remote_id = if status.is_connected() {
277 let response = rpc.request(proto::RegisterProject {}).await?;
278 Some(response.project_id)
279 } else {
280 None
281 };
282
283 if let Some(project_id) = remote_id {
284 let mut registrations = Vec::new();
285 this.update(&mut cx, |this, cx| {
286 for worktree in this.worktrees(cx).collect::<Vec<_>>() {
287 registrations.push(worktree.update(
288 cx,
289 |worktree, cx| {
290 let worktree = worktree.as_local_mut().unwrap();
291 worktree.register(project_id, cx)
292 },
293 ));
294 }
295 });
296 for registration in registrations {
297 registration.await?;
298 }
299 }
300 this.update(&mut cx, |this, cx| this.set_remote_id(remote_id, cx));
301 }
302 }
303 Ok(())
304 }
305 .log_err()
306 }
307 });
308
309 let (opened_buffer_tx, opened_buffer_rx) = watch::channel();
310 Self {
311 worktrees: Default::default(),
312 collaborators: Default::default(),
313 opened_buffers: Default::default(),
314 shared_buffers: Default::default(),
315 loading_buffers: Default::default(),
316 loading_local_worktrees: Default::default(),
317 buffer_snapshots: Default::default(),
318 client_state: ProjectClientState::Local {
319 is_shared: false,
320 remote_id_tx,
321 remote_id_rx,
322 _maintain_remote_id_task,
323 },
324 opened_buffer: (Rc::new(RefCell::new(opened_buffer_tx)), opened_buffer_rx),
325 subscriptions: Vec::new(),
326 active_entry: None,
327 languages,
328 client,
329 user_store,
330 fs,
331 language_servers_with_diagnostics_running: 0,
332 language_servers: Default::default(),
333 started_language_servers: Default::default(),
334 language_server_statuses: Default::default(),
335 next_language_server_id: 0,
336 nonce: StdRng::from_entropy().gen(),
337 }
338 })
339 }
340
341 pub async fn remote(
342 remote_id: u64,
343 client: Arc<Client>,
344 user_store: ModelHandle<UserStore>,
345 languages: Arc<LanguageRegistry>,
346 fs: Arc<dyn Fs>,
347 cx: &mut AsyncAppContext,
348 ) -> Result<ModelHandle<Self>> {
349 client.authenticate_and_connect(&cx).await?;
350
351 let response = client
352 .request(proto::JoinProject {
353 project_id: remote_id,
354 })
355 .await?;
356
357 let replica_id = response.replica_id as ReplicaId;
358
359 let mut worktrees = Vec::new();
360 for worktree in response.worktrees {
361 let (worktree, load_task) = cx
362 .update(|cx| Worktree::remote(remote_id, replica_id, worktree, client.clone(), cx));
363 worktrees.push(worktree);
364 load_task.detach();
365 }
366
367 let (opened_buffer_tx, opened_buffer_rx) = watch::channel();
368 let this = cx.add_model(|cx: &mut ModelContext<Self>| {
369 let mut this = Self {
370 worktrees: Vec::new(),
371 loading_buffers: Default::default(),
372 opened_buffer: (Rc::new(RefCell::new(opened_buffer_tx)), opened_buffer_rx),
373 shared_buffers: Default::default(),
374 loading_local_worktrees: Default::default(),
375 active_entry: None,
376 collaborators: Default::default(),
377 languages,
378 user_store: user_store.clone(),
379 fs,
380 subscriptions: vec![client.add_model_for_remote_entity(remote_id, cx)],
381 client: client.clone(),
382 client_state: ProjectClientState::Remote {
383 sharing_has_stopped: false,
384 remote_id,
385 replica_id,
386 _detect_unshare_task: cx.spawn_weak(move |this, mut cx| {
387 async move {
388 let mut status = client.status();
389 let is_connected =
390 status.next().await.map_or(false, |s| s.is_connected());
391 // Even if we're initially connected, any future change of the status means we momentarily disconnected.
392 if !is_connected || status.next().await.is_some() {
393 if let Some(this) = this.upgrade(&cx) {
394 this.update(&mut cx, |this, cx| this.project_unshared(cx))
395 }
396 }
397 Ok(())
398 }
399 .log_err()
400 }),
401 },
402 language_servers_with_diagnostics_running: 0,
403 language_servers: Default::default(),
404 started_language_servers: Default::default(),
405 language_server_statuses: response
406 .language_servers
407 .into_iter()
408 .map(|server| {
409 (
410 server.id as usize,
411 LanguageServerStatus {
412 name: server.name,
413 pending_work: Default::default(),
414 pending_diagnostic_updates: 0,
415 },
416 )
417 })
418 .collect(),
419 next_language_server_id: 0,
420 opened_buffers: Default::default(),
421 buffer_snapshots: Default::default(),
422 nonce: StdRng::from_entropy().gen(),
423 };
424 for worktree in worktrees {
425 this.add_worktree(&worktree, cx);
426 }
427 this
428 });
429
430 let user_ids = response
431 .collaborators
432 .iter()
433 .map(|peer| peer.user_id)
434 .collect();
435 user_store
436 .update(cx, |user_store, cx| user_store.load_users(user_ids, cx))
437 .await?;
438 let mut collaborators = HashMap::default();
439 for message in response.collaborators {
440 let collaborator = Collaborator::from_proto(message, &user_store, cx).await?;
441 collaborators.insert(collaborator.peer_id, collaborator);
442 }
443
444 this.update(cx, |this, _| {
445 this.collaborators = collaborators;
446 });
447
448 Ok(this)
449 }
450
451 #[cfg(any(test, feature = "test-support"))]
452 pub fn test(fs: Arc<dyn Fs>, cx: &mut gpui::TestAppContext) -> ModelHandle<Project> {
453 let languages = Arc::new(LanguageRegistry::test());
454 let http_client = client::test::FakeHttpClient::with_404_response();
455 let client = client::Client::new(http_client.clone());
456 let user_store = cx.add_model(|cx| UserStore::new(client.clone(), http_client, cx));
457 cx.update(|cx| Project::local(client, user_store, languages, fs, cx))
458 }
459
460 #[cfg(any(test, feature = "test-support"))]
461 pub fn buffer_for_id(&self, remote_id: u64, cx: &AppContext) -> Option<ModelHandle<Buffer>> {
462 self.opened_buffers
463 .get(&remote_id)
464 .and_then(|buffer| buffer.upgrade(cx))
465 }
466
467 #[cfg(any(test, feature = "test-support"))]
468 pub fn languages(&self) -> &Arc<LanguageRegistry> {
469 &self.languages
470 }
471
472 #[cfg(any(test, feature = "test-support"))]
473 pub fn check_invariants(&self, cx: &AppContext) {
474 if self.is_local() {
475 let mut worktree_root_paths = HashMap::default();
476 for worktree in self.worktrees(cx) {
477 let worktree = worktree.read(cx);
478 let abs_path = worktree.as_local().unwrap().abs_path().clone();
479 let prev_worktree_id = worktree_root_paths.insert(abs_path.clone(), worktree.id());
480 assert_eq!(
481 prev_worktree_id,
482 None,
483 "abs path {:?} for worktree {:?} is not unique ({:?} was already registered with the same path)",
484 abs_path,
485 worktree.id(),
486 prev_worktree_id
487 )
488 }
489 } else {
490 let replica_id = self.replica_id();
491 for buffer in self.opened_buffers.values() {
492 if let Some(buffer) = buffer.upgrade(cx) {
493 let buffer = buffer.read(cx);
494 assert_eq!(
495 buffer.deferred_ops_len(),
496 0,
497 "replica {}, buffer {} has deferred operations",
498 replica_id,
499 buffer.remote_id()
500 );
501 }
502 }
503 }
504 }
505
506 #[cfg(any(test, feature = "test-support"))]
507 pub fn has_open_buffer(&self, path: impl Into<ProjectPath>, cx: &AppContext) -> bool {
508 let path = path.into();
509 if let Some(worktree) = self.worktree_for_id(path.worktree_id, cx) {
510 self.opened_buffers.iter().any(|(_, buffer)| {
511 if let Some(buffer) = buffer.upgrade(cx) {
512 if let Some(file) = File::from_dyn(buffer.read(cx).file()) {
513 if file.worktree == worktree && file.path() == &path.path {
514 return true;
515 }
516 }
517 }
518 false
519 })
520 } else {
521 false
522 }
523 }
524
525 pub fn fs(&self) -> &Arc<dyn Fs> {
526 &self.fs
527 }
528
529 fn set_remote_id(&mut self, remote_id: Option<u64>, cx: &mut ModelContext<Self>) {
530 if let ProjectClientState::Local { remote_id_tx, .. } = &mut self.client_state {
531 *remote_id_tx.borrow_mut() = remote_id;
532 }
533
534 self.subscriptions.clear();
535 if let Some(remote_id) = remote_id {
536 self.subscriptions
537 .push(self.client.add_model_for_remote_entity(remote_id, cx));
538 }
539 }
540
541 pub fn remote_id(&self) -> Option<u64> {
542 match &self.client_state {
543 ProjectClientState::Local { remote_id_rx, .. } => *remote_id_rx.borrow(),
544 ProjectClientState::Remote { remote_id, .. } => Some(*remote_id),
545 }
546 }
547
548 pub fn next_remote_id(&self) -> impl Future<Output = u64> {
549 let mut id = None;
550 let mut watch = None;
551 match &self.client_state {
552 ProjectClientState::Local { remote_id_rx, .. } => watch = Some(remote_id_rx.clone()),
553 ProjectClientState::Remote { remote_id, .. } => id = Some(*remote_id),
554 }
555
556 async move {
557 if let Some(id) = id {
558 return id;
559 }
560 let mut watch = watch.unwrap();
561 loop {
562 let id = *watch.borrow();
563 if let Some(id) = id {
564 return id;
565 }
566 watch.next().await;
567 }
568 }
569 }
570
571 pub fn replica_id(&self) -> ReplicaId {
572 match &self.client_state {
573 ProjectClientState::Local { .. } => 0,
574 ProjectClientState::Remote { replica_id, .. } => *replica_id,
575 }
576 }
577
578 pub fn collaborators(&self) -> &HashMap<PeerId, Collaborator> {
579 &self.collaborators
580 }
581
582 pub fn worktrees<'a>(
583 &'a self,
584 cx: &'a AppContext,
585 ) -> impl 'a + Iterator<Item = ModelHandle<Worktree>> {
586 self.worktrees
587 .iter()
588 .filter_map(move |worktree| worktree.upgrade(cx))
589 }
590
591 pub fn visible_worktrees<'a>(
592 &'a self,
593 cx: &'a AppContext,
594 ) -> impl 'a + Iterator<Item = ModelHandle<Worktree>> {
595 self.worktrees.iter().filter_map(|worktree| {
596 worktree.upgrade(cx).and_then(|worktree| {
597 if worktree.read(cx).is_visible() {
598 Some(worktree)
599 } else {
600 None
601 }
602 })
603 })
604 }
605
606 pub fn worktree_for_id(
607 &self,
608 id: WorktreeId,
609 cx: &AppContext,
610 ) -> Option<ModelHandle<Worktree>> {
611 self.worktrees(cx)
612 .find(|worktree| worktree.read(cx).id() == id)
613 }
614
615 pub fn share(&self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
616 let rpc = self.client.clone();
617 cx.spawn(|this, mut cx| async move {
618 let project_id = this.update(&mut cx, |this, cx| {
619 if let ProjectClientState::Local {
620 is_shared,
621 remote_id_rx,
622 ..
623 } = &mut this.client_state
624 {
625 *is_shared = true;
626
627 for open_buffer in this.opened_buffers.values_mut() {
628 match open_buffer {
629 OpenBuffer::Strong(_) => {}
630 OpenBuffer::Weak(buffer) => {
631 if let Some(buffer) = buffer.upgrade(cx) {
632 *open_buffer = OpenBuffer::Strong(buffer);
633 }
634 }
635 OpenBuffer::Loading(_) => unreachable!(),
636 }
637 }
638
639 for worktree_handle in this.worktrees.iter_mut() {
640 match worktree_handle {
641 WorktreeHandle::Strong(_) => {}
642 WorktreeHandle::Weak(worktree) => {
643 if let Some(worktree) = worktree.upgrade(cx) {
644 *worktree_handle = WorktreeHandle::Strong(worktree);
645 }
646 }
647 }
648 }
649
650 remote_id_rx
651 .borrow()
652 .ok_or_else(|| anyhow!("no project id"))
653 } else {
654 Err(anyhow!("can't share a remote project"))
655 }
656 })?;
657
658 rpc.request(proto::ShareProject { project_id }).await?;
659
660 let mut tasks = Vec::new();
661 this.update(&mut cx, |this, cx| {
662 for worktree in this.worktrees(cx).collect::<Vec<_>>() {
663 worktree.update(cx, |worktree, cx| {
664 let worktree = worktree.as_local_mut().unwrap();
665 tasks.push(worktree.share(project_id, cx));
666 });
667 }
668 });
669 for task in tasks {
670 task.await?;
671 }
672 this.update(&mut cx, |_, cx| cx.notify());
673 Ok(())
674 })
675 }
676
677 pub fn unshare(&self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
678 let rpc = self.client.clone();
679 cx.spawn(|this, mut cx| async move {
680 let project_id = this.update(&mut cx, |this, cx| {
681 if let ProjectClientState::Local {
682 is_shared,
683 remote_id_rx,
684 ..
685 } = &mut this.client_state
686 {
687 *is_shared = false;
688
689 for open_buffer in this.opened_buffers.values_mut() {
690 match open_buffer {
691 OpenBuffer::Strong(buffer) => {
692 *open_buffer = OpenBuffer::Weak(buffer.downgrade());
693 }
694 _ => {}
695 }
696 }
697
698 for worktree_handle in this.worktrees.iter_mut() {
699 match worktree_handle {
700 WorktreeHandle::Strong(worktree) => {
701 if !worktree.read(cx).is_visible() {
702 *worktree_handle = WorktreeHandle::Weak(worktree.downgrade());
703 }
704 }
705 _ => {}
706 }
707 }
708
709 remote_id_rx
710 .borrow()
711 .ok_or_else(|| anyhow!("no project id"))
712 } else {
713 Err(anyhow!("can't share a remote project"))
714 }
715 })?;
716
717 rpc.send(proto::UnshareProject { project_id })?;
718 this.update(&mut cx, |this, cx| {
719 this.collaborators.clear();
720 this.shared_buffers.clear();
721 for worktree in this.worktrees(cx).collect::<Vec<_>>() {
722 worktree.update(cx, |worktree, _| {
723 worktree.as_local_mut().unwrap().unshare();
724 });
725 }
726 cx.notify()
727 });
728 Ok(())
729 })
730 }
731
732 fn project_unshared(&mut self, cx: &mut ModelContext<Self>) {
733 if let ProjectClientState::Remote {
734 sharing_has_stopped,
735 ..
736 } = &mut self.client_state
737 {
738 *sharing_has_stopped = true;
739 self.collaborators.clear();
740 cx.notify();
741 }
742 }
743
744 pub fn is_read_only(&self) -> bool {
745 match &self.client_state {
746 ProjectClientState::Local { .. } => false,
747 ProjectClientState::Remote {
748 sharing_has_stopped,
749 ..
750 } => *sharing_has_stopped,
751 }
752 }
753
754 pub fn is_local(&self) -> bool {
755 match &self.client_state {
756 ProjectClientState::Local { .. } => true,
757 ProjectClientState::Remote { .. } => false,
758 }
759 }
760
761 pub fn is_remote(&self) -> bool {
762 !self.is_local()
763 }
764
765 pub fn create_buffer(&mut self, cx: &mut ModelContext<Self>) -> Result<ModelHandle<Buffer>> {
766 if self.is_remote() {
767 return Err(anyhow!("creating buffers as a guest is not supported yet"));
768 }
769
770 let buffer = cx.add_model(|cx| {
771 Buffer::new(self.replica_id(), "", cx).with_language(language::PLAIN_TEXT.clone(), cx)
772 });
773 self.register_buffer(&buffer, cx)?;
774 Ok(buffer)
775 }
776
777 pub fn open_buffer(
778 &mut self,
779 path: impl Into<ProjectPath>,
780 cx: &mut ModelContext<Self>,
781 ) -> Task<Result<ModelHandle<Buffer>>> {
782 let project_path = path.into();
783 let worktree = if let Some(worktree) = self.worktree_for_id(project_path.worktree_id, cx) {
784 worktree
785 } else {
786 return Task::ready(Err(anyhow!("no such worktree")));
787 };
788
789 // If there is already a buffer for the given path, then return it.
790 let existing_buffer = self.get_open_buffer(&project_path, cx);
791 if let Some(existing_buffer) = existing_buffer {
792 return Task::ready(Ok(existing_buffer));
793 }
794
795 let mut loading_watch = match self.loading_buffers.entry(project_path.clone()) {
796 // If the given path is already being loaded, then wait for that existing
797 // task to complete and return the same buffer.
798 hash_map::Entry::Occupied(e) => e.get().clone(),
799
800 // Otherwise, record the fact that this path is now being loaded.
801 hash_map::Entry::Vacant(entry) => {
802 let (mut tx, rx) = postage::watch::channel();
803 entry.insert(rx.clone());
804
805 let load_buffer = if worktree.read(cx).is_local() {
806 self.open_local_buffer(&project_path.path, &worktree, cx)
807 } else {
808 self.open_remote_buffer(&project_path.path, &worktree, cx)
809 };
810
811 cx.spawn(move |this, mut cx| async move {
812 let load_result = load_buffer.await;
813 *tx.borrow_mut() = Some(this.update(&mut cx, |this, _| {
814 // Record the fact that the buffer is no longer loading.
815 this.loading_buffers.remove(&project_path);
816 let buffer = load_result.map_err(Arc::new)?;
817 Ok(buffer)
818 }));
819 })
820 .detach();
821 rx
822 }
823 };
824
825 cx.foreground().spawn(async move {
826 loop {
827 if let Some(result) = loading_watch.borrow().as_ref() {
828 match result {
829 Ok(buffer) => return Ok(buffer.clone()),
830 Err(error) => return Err(anyhow!("{}", error)),
831 }
832 }
833 loading_watch.next().await;
834 }
835 })
836 }
837
838 fn open_local_buffer(
839 &mut self,
840 path: &Arc<Path>,
841 worktree: &ModelHandle<Worktree>,
842 cx: &mut ModelContext<Self>,
843 ) -> Task<Result<ModelHandle<Buffer>>> {
844 let load_buffer = worktree.update(cx, |worktree, cx| {
845 let worktree = worktree.as_local_mut().unwrap();
846 worktree.load_buffer(path, cx)
847 });
848 cx.spawn(|this, mut cx| async move {
849 let buffer = load_buffer.await?;
850 this.update(&mut cx, |this, cx| this.register_buffer(&buffer, cx))?;
851 Ok(buffer)
852 })
853 }
854
855 fn open_remote_buffer(
856 &mut self,
857 path: &Arc<Path>,
858 worktree: &ModelHandle<Worktree>,
859 cx: &mut ModelContext<Self>,
860 ) -> Task<Result<ModelHandle<Buffer>>> {
861 let rpc = self.client.clone();
862 let project_id = self.remote_id().unwrap();
863 let remote_worktree_id = worktree.read(cx).id();
864 let path = path.clone();
865 let path_string = path.to_string_lossy().to_string();
866 cx.spawn(|this, mut cx| async move {
867 let response = rpc
868 .request(proto::OpenBuffer {
869 project_id,
870 worktree_id: remote_worktree_id.to_proto(),
871 path: path_string,
872 })
873 .await?;
874 let buffer = response.buffer.ok_or_else(|| anyhow!("missing buffer"))?;
875 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
876 .await
877 })
878 }
879
880 fn open_local_buffer_via_lsp(
881 &mut self,
882 abs_path: lsp::Url,
883 lang_name: Arc<str>,
884 lang_server: Arc<LanguageServer>,
885 cx: &mut ModelContext<Self>,
886 ) -> Task<Result<ModelHandle<Buffer>>> {
887 cx.spawn(|this, mut cx| async move {
888 let abs_path = abs_path
889 .to_file_path()
890 .map_err(|_| anyhow!("can't convert URI to path"))?;
891 let (worktree, relative_path) = if let Some(result) =
892 this.read_with(&cx, |this, cx| this.find_local_worktree(&abs_path, cx))
893 {
894 result
895 } else {
896 let worktree = this
897 .update(&mut cx, |this, cx| {
898 this.create_local_worktree(&abs_path, false, cx)
899 })
900 .await?;
901 this.update(&mut cx, |this, cx| {
902 this.language_servers
903 .insert((worktree.read(cx).id(), lang_name), lang_server);
904 });
905 (worktree, PathBuf::new())
906 };
907
908 let project_path = ProjectPath {
909 worktree_id: worktree.read_with(&cx, |worktree, _| worktree.id()),
910 path: relative_path.into(),
911 };
912 this.update(&mut cx, |this, cx| this.open_buffer(project_path, cx))
913 .await
914 })
915 }
916
917 pub fn save_buffer_as(
918 &mut self,
919 buffer: ModelHandle<Buffer>,
920 abs_path: PathBuf,
921 cx: &mut ModelContext<Project>,
922 ) -> Task<Result<()>> {
923 let worktree_task = self.find_or_create_local_worktree(&abs_path, true, cx);
924 cx.spawn(|this, mut cx| async move {
925 let (worktree, path) = worktree_task.await?;
926 worktree
927 .update(&mut cx, |worktree, cx| {
928 worktree
929 .as_local_mut()
930 .unwrap()
931 .save_buffer_as(buffer.clone(), path, cx)
932 })
933 .await?;
934 this.update(&mut cx, |this, cx| {
935 this.assign_language_to_buffer(&buffer, cx);
936 this.register_buffer_with_language_server(&buffer, cx);
937 });
938 Ok(())
939 })
940 }
941
942 pub fn get_open_buffer(
943 &mut self,
944 path: &ProjectPath,
945 cx: &mut ModelContext<Self>,
946 ) -> Option<ModelHandle<Buffer>> {
947 let worktree = self.worktree_for_id(path.worktree_id, cx)?;
948 self.opened_buffers.values().find_map(|buffer| {
949 let buffer = buffer.upgrade(cx)?;
950 let file = File::from_dyn(buffer.read(cx).file())?;
951 if file.worktree == worktree && file.path() == &path.path {
952 Some(buffer)
953 } else {
954 None
955 }
956 })
957 }
958
959 fn register_buffer(
960 &mut self,
961 buffer: &ModelHandle<Buffer>,
962 cx: &mut ModelContext<Self>,
963 ) -> Result<()> {
964 let remote_id = buffer.read(cx).remote_id();
965 let open_buffer = if self.is_remote() || self.is_shared() {
966 OpenBuffer::Strong(buffer.clone())
967 } else {
968 OpenBuffer::Weak(buffer.downgrade())
969 };
970
971 match self.opened_buffers.insert(remote_id, open_buffer) {
972 None => {}
973 Some(OpenBuffer::Loading(operations)) => {
974 buffer.update(cx, |buffer, cx| buffer.apply_ops(operations, cx))?
975 }
976 Some(OpenBuffer::Weak(existing_handle)) => {
977 if existing_handle.upgrade(cx).is_some() {
978 Err(anyhow!(
979 "already registered buffer with remote id {}",
980 remote_id
981 ))?
982 }
983 }
984 Some(OpenBuffer::Strong(_)) => Err(anyhow!(
985 "already registered buffer with remote id {}",
986 remote_id
987 ))?,
988 }
989 cx.subscribe(buffer, |this, buffer, event, cx| {
990 this.on_buffer_event(buffer, event, cx);
991 })
992 .detach();
993
994 self.assign_language_to_buffer(buffer, cx);
995 self.register_buffer_with_language_server(buffer, cx);
996
997 Ok(())
998 }
999
1000 fn register_buffer_with_language_server(
1001 &mut self,
1002 buffer_handle: &ModelHandle<Buffer>,
1003 cx: &mut ModelContext<Self>,
1004 ) {
1005 let buffer = buffer_handle.read(cx);
1006 let buffer_id = buffer.remote_id();
1007 if let Some(file) = File::from_dyn(buffer.file()) {
1008 if file.is_local() {
1009 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
1010 let initial_snapshot = buffer.text_snapshot();
1011 let language_server = self.language_server_for_buffer(buffer, cx).cloned();
1012
1013 if let Some(local_worktree) = file.worktree.read(cx).as_local() {
1014 if let Some(diagnostics) = local_worktree.diagnostics_for_path(file.path()) {
1015 self.update_buffer_diagnostics(&buffer_handle, diagnostics, None, cx)
1016 .log_err();
1017 }
1018 }
1019
1020 if let Some(server) = language_server {
1021 server
1022 .notify::<lsp::notification::DidOpenTextDocument>(
1023 lsp::DidOpenTextDocumentParams {
1024 text_document: lsp::TextDocumentItem::new(
1025 uri,
1026 Default::default(),
1027 0,
1028 initial_snapshot.text(),
1029 ),
1030 }
1031 .clone(),
1032 )
1033 .log_err();
1034 buffer_handle.update(cx, |buffer, cx| {
1035 buffer.set_completion_triggers(
1036 server
1037 .capabilities()
1038 .completion_provider
1039 .as_ref()
1040 .and_then(|provider| provider.trigger_characters.clone())
1041 .unwrap_or(Vec::new()),
1042 cx,
1043 )
1044 });
1045 self.buffer_snapshots
1046 .insert(buffer_id, vec![(0, initial_snapshot)]);
1047 }
1048
1049 cx.observe_release(buffer_handle, |this, buffer, cx| {
1050 if let Some(file) = File::from_dyn(buffer.file()) {
1051 if file.is_local() {
1052 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
1053 if let Some(server) = this.language_server_for_buffer(buffer, cx) {
1054 server
1055 .notify::<lsp::notification::DidCloseTextDocument>(
1056 lsp::DidCloseTextDocumentParams {
1057 text_document: lsp::TextDocumentIdentifier::new(
1058 uri.clone(),
1059 ),
1060 },
1061 )
1062 .log_err();
1063 }
1064 }
1065 }
1066 })
1067 .detach();
1068 }
1069 }
1070 }
1071
1072 fn on_buffer_event(
1073 &mut self,
1074 buffer: ModelHandle<Buffer>,
1075 event: &BufferEvent,
1076 cx: &mut ModelContext<Self>,
1077 ) -> Option<()> {
1078 match event {
1079 BufferEvent::Operation(operation) => {
1080 let project_id = self.remote_id()?;
1081 let request = self.client.request(proto::UpdateBuffer {
1082 project_id,
1083 buffer_id: buffer.read(cx).remote_id(),
1084 operations: vec![language::proto::serialize_operation(&operation)],
1085 });
1086 cx.background().spawn(request).detach_and_log_err(cx);
1087 }
1088 BufferEvent::Edited => {
1089 let language_server = self
1090 .language_server_for_buffer(buffer.read(cx), cx)?
1091 .clone();
1092 let buffer = buffer.read(cx);
1093 let file = File::from_dyn(buffer.file())?;
1094 let abs_path = file.as_local()?.abs_path(cx);
1095 let uri = lsp::Url::from_file_path(abs_path).unwrap();
1096 let buffer_snapshots = self.buffer_snapshots.entry(buffer.remote_id()).or_default();
1097 let (version, prev_snapshot) = buffer_snapshots.last()?;
1098 let next_snapshot = buffer.text_snapshot();
1099 let next_version = version + 1;
1100
1101 let content_changes = buffer
1102 .edits_since::<(PointUtf16, usize)>(prev_snapshot.version())
1103 .map(|edit| {
1104 let edit_start = edit.new.start.0;
1105 let edit_end = edit_start + (edit.old.end.0 - edit.old.start.0);
1106 let new_text = next_snapshot
1107 .text_for_range(edit.new.start.1..edit.new.end.1)
1108 .collect();
1109 lsp::TextDocumentContentChangeEvent {
1110 range: Some(lsp::Range::new(
1111 edit_start.to_lsp_position(),
1112 edit_end.to_lsp_position(),
1113 )),
1114 range_length: None,
1115 text: new_text,
1116 }
1117 })
1118 .collect();
1119
1120 buffer_snapshots.push((next_version, next_snapshot));
1121
1122 language_server
1123 .notify::<lsp::notification::DidChangeTextDocument>(
1124 lsp::DidChangeTextDocumentParams {
1125 text_document: lsp::VersionedTextDocumentIdentifier::new(
1126 uri,
1127 next_version,
1128 ),
1129 content_changes,
1130 },
1131 )
1132 .log_err();
1133 }
1134 BufferEvent::Saved => {
1135 let file = File::from_dyn(buffer.read(cx).file())?;
1136 let worktree_id = file.worktree_id(cx);
1137 let abs_path = file.as_local()?.abs_path(cx);
1138 let text_document = lsp::TextDocumentIdentifier {
1139 uri: lsp::Url::from_file_path(abs_path).unwrap(),
1140 };
1141
1142 for (_, server) in self.language_servers_for_worktree(worktree_id) {
1143 server
1144 .notify::<lsp::notification::DidSaveTextDocument>(
1145 lsp::DidSaveTextDocumentParams {
1146 text_document: text_document.clone(),
1147 text: None,
1148 },
1149 )
1150 .log_err();
1151 }
1152 }
1153 _ => {}
1154 }
1155
1156 None
1157 }
1158
1159 fn language_servers_for_worktree(
1160 &self,
1161 worktree_id: WorktreeId,
1162 ) -> impl Iterator<Item = (&str, &Arc<LanguageServer>)> {
1163 self.language_servers.iter().filter_map(
1164 move |((language_server_worktree_id, language_name), server)| {
1165 if *language_server_worktree_id == worktree_id {
1166 Some((language_name.as_ref(), server))
1167 } else {
1168 None
1169 }
1170 },
1171 )
1172 }
1173
1174 fn assign_language_to_buffer(
1175 &mut self,
1176 buffer: &ModelHandle<Buffer>,
1177 cx: &mut ModelContext<Self>,
1178 ) -> Option<()> {
1179 // If the buffer has a language, set it and start the language server if we haven't already.
1180 let full_path = buffer.read(cx).file()?.full_path(cx);
1181 let language = self.languages.select_language(&full_path)?;
1182 buffer.update(cx, |buffer, cx| {
1183 buffer.set_language(Some(language.clone()), cx);
1184 });
1185
1186 let file = File::from_dyn(buffer.read(cx).file())?;
1187 let worktree = file.worktree.read(cx).as_local()?;
1188 let worktree_id = worktree.id();
1189 let worktree_abs_path = worktree.abs_path().clone();
1190 self.start_language_server(worktree_id, worktree_abs_path, language, cx);
1191
1192 None
1193 }
1194
1195 fn start_language_server(
1196 &mut self,
1197 worktree_id: WorktreeId,
1198 worktree_path: Arc<Path>,
1199 language: Arc<Language>,
1200 cx: &mut ModelContext<Self>,
1201 ) {
1202 let key = (worktree_id, language.name());
1203 self.started_language_servers
1204 .entry(key.clone())
1205 .or_insert_with(|| {
1206 let server_id = post_inc(&mut self.next_language_server_id);
1207 let language_server = self.languages.start_language_server(
1208 language.clone(),
1209 worktree_path,
1210 self.client.http_client(),
1211 cx,
1212 );
1213 cx.spawn_weak(|this, mut cx| async move {
1214 let mut language_server = language_server?.await.log_err()?;
1215 let this = this.upgrade(&cx)?;
1216 let (language_server_events_tx, language_server_events_rx) =
1217 smol::channel::unbounded();
1218
1219 language_server
1220 .on_notification::<lsp::notification::PublishDiagnostics, _>({
1221 let language_server_events_tx = language_server_events_tx.clone();
1222 move |params| {
1223 language_server_events_tx
1224 .try_send(LanguageServerEvent::DiagnosticsUpdate(params))
1225 .ok();
1226 }
1227 })
1228 .detach();
1229
1230 language_server
1231 .on_notification::<lsp::notification::Progress, _>(move |params| {
1232 let token = match params.token {
1233 lsp::NumberOrString::String(token) => token,
1234 lsp::NumberOrString::Number(token) => {
1235 log::info!("skipping numeric progress token {}", token);
1236 return;
1237 }
1238 };
1239
1240 match params.value {
1241 lsp::ProgressParamsValue::WorkDone(progress) => match progress {
1242 lsp::WorkDoneProgress::Begin(_) => {
1243 language_server_events_tx
1244 .try_send(LanguageServerEvent::WorkStart { token })
1245 .ok();
1246 }
1247 lsp::WorkDoneProgress::Report(report) => {
1248 language_server_events_tx
1249 .try_send(LanguageServerEvent::WorkProgress {
1250 token,
1251 progress: LanguageServerProgress {
1252 message: report.message,
1253 percentage: report
1254 .percentage
1255 .map(|p| p as usize),
1256 },
1257 })
1258 .ok();
1259 }
1260 lsp::WorkDoneProgress::End(_) => {
1261 language_server_events_tx
1262 .try_send(LanguageServerEvent::WorkEnd { token })
1263 .ok();
1264 }
1265 },
1266 }
1267 })
1268 .detach();
1269
1270 // Process all the LSP events.
1271 cx.spawn(|mut cx| {
1272 let this = this.downgrade();
1273 async move {
1274 while let Ok(event) = language_server_events_rx.recv().await {
1275 let this = this.upgrade(&cx)?;
1276 this.update(&mut cx, |this, cx| {
1277 this.on_lsp_event(server_id, event, &language, cx)
1278 });
1279 }
1280 Some(())
1281 }
1282 })
1283 .detach();
1284
1285 let language_server = language_server.initialize().await.log_err()?;
1286 this.update(&mut cx, |this, cx| {
1287 this.language_servers
1288 .insert(key.clone(), language_server.clone());
1289 this.language_server_statuses.insert(
1290 server_id,
1291 LanguageServerStatus {
1292 name: language_server.name().to_string(),
1293 pending_work: Default::default(),
1294 pending_diagnostic_updates: 0,
1295 },
1296 );
1297
1298 if let Some(project_id) = this.remote_id() {
1299 this.client
1300 .send(proto::StartLanguageServer {
1301 project_id,
1302 server: Some(proto::LanguageServer {
1303 id: server_id as u64,
1304 name: language_server.name().to_string(),
1305 }),
1306 })
1307 .log_err();
1308 }
1309
1310 // Tell the language server about every open buffer in the worktree that matches the language.
1311 for buffer in this.opened_buffers.values() {
1312 if let Some(buffer_handle) = buffer.upgrade(cx) {
1313 let buffer = buffer_handle.read(cx);
1314 let file = if let Some(file) = File::from_dyn(buffer.file()) {
1315 file
1316 } else {
1317 continue;
1318 };
1319 let language = if let Some(language) = buffer.language() {
1320 language
1321 } else {
1322 continue;
1323 };
1324 if (file.worktree.read(cx).id(), language.name()) != key {
1325 continue;
1326 }
1327
1328 let file = file.as_local()?;
1329 let versions = this
1330 .buffer_snapshots
1331 .entry(buffer.remote_id())
1332 .or_insert_with(|| vec![(0, buffer.text_snapshot())]);
1333 let (version, initial_snapshot) = versions.last().unwrap();
1334 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
1335 language_server
1336 .notify::<lsp::notification::DidOpenTextDocument>(
1337 lsp::DidOpenTextDocumentParams {
1338 text_document: lsp::TextDocumentItem::new(
1339 uri,
1340 Default::default(),
1341 *version,
1342 initial_snapshot.text(),
1343 ),
1344 },
1345 )
1346 .log_err()?;
1347 buffer_handle.update(cx, |buffer, cx| {
1348 buffer.set_completion_triggers(
1349 language_server
1350 .capabilities()
1351 .completion_provider
1352 .as_ref()
1353 .and_then(|provider| {
1354 provider.trigger_characters.clone()
1355 })
1356 .unwrap_or(Vec::new()),
1357 cx,
1358 )
1359 });
1360 }
1361 }
1362
1363 cx.notify();
1364 Some(())
1365 });
1366
1367 Some(language_server)
1368 })
1369 });
1370 }
1371
1372 fn on_lsp_event(
1373 &mut self,
1374 language_server_id: usize,
1375 event: LanguageServerEvent,
1376 language: &Arc<Language>,
1377 cx: &mut ModelContext<Self>,
1378 ) {
1379 let disk_diagnostics_token = language.disk_based_diagnostics_progress_token();
1380 let language_server_status =
1381 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
1382 status
1383 } else {
1384 return;
1385 };
1386
1387 match event {
1388 LanguageServerEvent::WorkStart { token } => {
1389 if Some(&token) == disk_diagnostics_token {
1390 language_server_status.pending_diagnostic_updates += 1;
1391 if language_server_status.pending_diagnostic_updates == 1 {
1392 self.disk_based_diagnostics_started(cx);
1393 self.broadcast_language_server_update(
1394 language_server_id,
1395 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(
1396 proto::LspDiskBasedDiagnosticsUpdating {},
1397 ),
1398 );
1399 }
1400 } else {
1401 self.on_lsp_work_start(language_server_id, token.clone(), cx);
1402 self.broadcast_language_server_update(
1403 language_server_id,
1404 proto::update_language_server::Variant::WorkStart(proto::LspWorkStart {
1405 token,
1406 }),
1407 );
1408 }
1409 }
1410 LanguageServerEvent::WorkProgress { token, progress } => {
1411 if Some(&token) != disk_diagnostics_token {
1412 self.on_lsp_work_progress(
1413 language_server_id,
1414 token.clone(),
1415 progress.clone(),
1416 cx,
1417 );
1418 self.broadcast_language_server_update(
1419 language_server_id,
1420 proto::update_language_server::Variant::WorkProgress(
1421 proto::LspWorkProgress {
1422 token,
1423 message: progress.message,
1424 percentage: progress.percentage.map(|p| p as u32),
1425 },
1426 ),
1427 );
1428 }
1429 }
1430 LanguageServerEvent::WorkEnd { token } => {
1431 if Some(&token) == disk_diagnostics_token {
1432 language_server_status.pending_diagnostic_updates -= 1;
1433 if language_server_status.pending_diagnostic_updates == 0 {
1434 self.disk_based_diagnostics_finished(cx);
1435 self.broadcast_language_server_update(
1436 language_server_id,
1437 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(
1438 proto::LspDiskBasedDiagnosticsUpdated {},
1439 ),
1440 );
1441 }
1442 } else {
1443 self.on_lsp_work_end(language_server_id, token.clone(), cx);
1444 self.broadcast_language_server_update(
1445 language_server_id,
1446 proto::update_language_server::Variant::WorkEnd(proto::LspWorkEnd {
1447 token,
1448 }),
1449 );
1450 }
1451 }
1452 LanguageServerEvent::DiagnosticsUpdate(mut params) => {
1453 language.process_diagnostics(&mut params);
1454
1455 if disk_diagnostics_token.is_none() {
1456 self.disk_based_diagnostics_started(cx);
1457 self.broadcast_language_server_update(
1458 language_server_id,
1459 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(
1460 proto::LspDiskBasedDiagnosticsUpdating {},
1461 ),
1462 );
1463 }
1464 self.update_diagnostics(
1465 params,
1466 language
1467 .disk_based_diagnostic_sources()
1468 .unwrap_or(&Default::default()),
1469 cx,
1470 )
1471 .log_err();
1472 if disk_diagnostics_token.is_none() {
1473 self.disk_based_diagnostics_finished(cx);
1474 self.broadcast_language_server_update(
1475 language_server_id,
1476 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(
1477 proto::LspDiskBasedDiagnosticsUpdated {},
1478 ),
1479 );
1480 }
1481 }
1482 }
1483 }
1484
1485 fn on_lsp_work_start(
1486 &mut self,
1487 language_server_id: usize,
1488 token: String,
1489 cx: &mut ModelContext<Self>,
1490 ) {
1491 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
1492 status.pending_work.insert(
1493 token,
1494 LanguageServerProgress {
1495 message: None,
1496 percentage: None,
1497 },
1498 );
1499 cx.notify();
1500 }
1501 }
1502
1503 fn on_lsp_work_progress(
1504 &mut self,
1505 language_server_id: usize,
1506 token: String,
1507 progress: LanguageServerProgress,
1508 cx: &mut ModelContext<Self>,
1509 ) {
1510 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
1511 status.pending_work.insert(token, progress);
1512 cx.notify();
1513 }
1514 }
1515
1516 fn on_lsp_work_end(
1517 &mut self,
1518 language_server_id: usize,
1519 token: String,
1520 cx: &mut ModelContext<Self>,
1521 ) {
1522 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
1523 status.pending_work.remove(&token);
1524 cx.notify();
1525 }
1526 }
1527
1528 fn broadcast_language_server_update(
1529 &self,
1530 language_server_id: usize,
1531 event: proto::update_language_server::Variant,
1532 ) {
1533 if let Some(project_id) = self.remote_id() {
1534 self.client
1535 .send(proto::UpdateLanguageServer {
1536 project_id,
1537 language_server_id: language_server_id as u64,
1538 variant: Some(event),
1539 })
1540 .log_err();
1541 }
1542 }
1543
1544 pub fn language_server_statuses(&self) -> impl Iterator<Item = &LanguageServerStatus> {
1545 self.language_server_statuses.values()
1546 }
1547
1548 pub fn update_diagnostics(
1549 &mut self,
1550 params: lsp::PublishDiagnosticsParams,
1551 disk_based_sources: &HashSet<String>,
1552 cx: &mut ModelContext<Self>,
1553 ) -> Result<()> {
1554 let abs_path = params
1555 .uri
1556 .to_file_path()
1557 .map_err(|_| anyhow!("URI is not a file"))?;
1558 let mut next_group_id = 0;
1559 let mut diagnostics = Vec::default();
1560 let mut primary_diagnostic_group_ids = HashMap::default();
1561 let mut sources_by_group_id = HashMap::default();
1562 let mut supporting_diagnostic_severities = HashMap::default();
1563 for diagnostic in ¶ms.diagnostics {
1564 let source = diagnostic.source.as_ref();
1565 let code = diagnostic.code.as_ref().map(|code| match code {
1566 lsp::NumberOrString::Number(code) => code.to_string(),
1567 lsp::NumberOrString::String(code) => code.clone(),
1568 });
1569 let range = range_from_lsp(diagnostic.range);
1570 let is_supporting = diagnostic
1571 .related_information
1572 .as_ref()
1573 .map_or(false, |infos| {
1574 infos.iter().any(|info| {
1575 primary_diagnostic_group_ids.contains_key(&(
1576 source,
1577 code.clone(),
1578 range_from_lsp(info.location.range),
1579 ))
1580 })
1581 });
1582
1583 if is_supporting {
1584 if let Some(severity) = diagnostic.severity {
1585 supporting_diagnostic_severities
1586 .insert((source, code.clone(), range), severity);
1587 }
1588 } else {
1589 let group_id = post_inc(&mut next_group_id);
1590 let is_disk_based =
1591 source.map_or(false, |source| disk_based_sources.contains(source));
1592
1593 sources_by_group_id.insert(group_id, source);
1594 primary_diagnostic_group_ids
1595 .insert((source, code.clone(), range.clone()), group_id);
1596
1597 diagnostics.push(DiagnosticEntry {
1598 range,
1599 diagnostic: Diagnostic {
1600 code: code.clone(),
1601 severity: diagnostic.severity.unwrap_or(DiagnosticSeverity::ERROR),
1602 message: diagnostic.message.clone(),
1603 group_id,
1604 is_primary: true,
1605 is_valid: true,
1606 is_disk_based,
1607 },
1608 });
1609 if let Some(infos) = &diagnostic.related_information {
1610 for info in infos {
1611 if info.location.uri == params.uri && !info.message.is_empty() {
1612 let range = range_from_lsp(info.location.range);
1613 diagnostics.push(DiagnosticEntry {
1614 range,
1615 diagnostic: Diagnostic {
1616 code: code.clone(),
1617 severity: DiagnosticSeverity::INFORMATION,
1618 message: info.message.clone(),
1619 group_id,
1620 is_primary: false,
1621 is_valid: true,
1622 is_disk_based,
1623 },
1624 });
1625 }
1626 }
1627 }
1628 }
1629 }
1630
1631 for entry in &mut diagnostics {
1632 let diagnostic = &mut entry.diagnostic;
1633 if !diagnostic.is_primary {
1634 let source = *sources_by_group_id.get(&diagnostic.group_id).unwrap();
1635 if let Some(&severity) = supporting_diagnostic_severities.get(&(
1636 source,
1637 diagnostic.code.clone(),
1638 entry.range.clone(),
1639 )) {
1640 diagnostic.severity = severity;
1641 }
1642 }
1643 }
1644
1645 self.update_diagnostic_entries(abs_path, params.version, diagnostics, cx)?;
1646 Ok(())
1647 }
1648
1649 pub fn update_diagnostic_entries(
1650 &mut self,
1651 abs_path: PathBuf,
1652 version: Option<i32>,
1653 diagnostics: Vec<DiagnosticEntry<PointUtf16>>,
1654 cx: &mut ModelContext<Project>,
1655 ) -> Result<(), anyhow::Error> {
1656 let (worktree, relative_path) = self
1657 .find_local_worktree(&abs_path, cx)
1658 .ok_or_else(|| anyhow!("no worktree found for diagnostics"))?;
1659 if !worktree.read(cx).is_visible() {
1660 return Ok(());
1661 }
1662
1663 let project_path = ProjectPath {
1664 worktree_id: worktree.read(cx).id(),
1665 path: relative_path.into(),
1666 };
1667
1668 for buffer in self.opened_buffers.values() {
1669 if let Some(buffer) = buffer.upgrade(cx) {
1670 if buffer
1671 .read(cx)
1672 .file()
1673 .map_or(false, |file| *file.path() == project_path.path)
1674 {
1675 self.update_buffer_diagnostics(&buffer, diagnostics.clone(), version, cx)?;
1676 break;
1677 }
1678 }
1679 }
1680 worktree.update(cx, |worktree, cx| {
1681 worktree
1682 .as_local_mut()
1683 .ok_or_else(|| anyhow!("not a local worktree"))?
1684 .update_diagnostics(project_path.path.clone(), diagnostics, cx)
1685 })?;
1686 cx.emit(Event::DiagnosticsUpdated(project_path));
1687 Ok(())
1688 }
1689
1690 fn update_buffer_diagnostics(
1691 &mut self,
1692 buffer: &ModelHandle<Buffer>,
1693 mut diagnostics: Vec<DiagnosticEntry<PointUtf16>>,
1694 version: Option<i32>,
1695 cx: &mut ModelContext<Self>,
1696 ) -> Result<()> {
1697 fn compare_diagnostics(a: &Diagnostic, b: &Diagnostic) -> Ordering {
1698 Ordering::Equal
1699 .then_with(|| b.is_primary.cmp(&a.is_primary))
1700 .then_with(|| a.is_disk_based.cmp(&b.is_disk_based))
1701 .then_with(|| a.severity.cmp(&b.severity))
1702 .then_with(|| a.message.cmp(&b.message))
1703 }
1704
1705 let snapshot = self.buffer_snapshot_for_lsp_version(buffer, version, cx)?;
1706
1707 diagnostics.sort_unstable_by(|a, b| {
1708 Ordering::Equal
1709 .then_with(|| a.range.start.cmp(&b.range.start))
1710 .then_with(|| b.range.end.cmp(&a.range.end))
1711 .then_with(|| compare_diagnostics(&a.diagnostic, &b.diagnostic))
1712 });
1713
1714 let mut sanitized_diagnostics = Vec::new();
1715 let mut edits_since_save = snapshot
1716 .edits_since::<PointUtf16>(buffer.read(cx).saved_version())
1717 .peekable();
1718 let mut last_edit_old_end = PointUtf16::zero();
1719 let mut last_edit_new_end = PointUtf16::zero();
1720 'outer: for entry in diagnostics {
1721 let mut start = entry.range.start;
1722 let mut end = entry.range.end;
1723
1724 // Some diagnostics are based on files on disk instead of buffers'
1725 // current contents. Adjust these diagnostics' ranges to reflect
1726 // any unsaved edits.
1727 if entry.diagnostic.is_disk_based {
1728 while let Some(edit) = edits_since_save.peek() {
1729 if edit.old.end <= start {
1730 last_edit_old_end = edit.old.end;
1731 last_edit_new_end = edit.new.end;
1732 edits_since_save.next();
1733 } else if edit.old.start <= end && edit.old.end >= start {
1734 continue 'outer;
1735 } else {
1736 break;
1737 }
1738 }
1739
1740 let start_overshoot = start - last_edit_old_end;
1741 start = last_edit_new_end;
1742 start += start_overshoot;
1743
1744 let end_overshoot = end - last_edit_old_end;
1745 end = last_edit_new_end;
1746 end += end_overshoot;
1747 }
1748
1749 let mut range = snapshot.clip_point_utf16(start, Bias::Left)
1750 ..snapshot.clip_point_utf16(end, Bias::Right);
1751
1752 // Expand empty ranges by one character
1753 if range.start == range.end {
1754 range.end.column += 1;
1755 range.end = snapshot.clip_point_utf16(range.end, Bias::Right);
1756 if range.start == range.end && range.end.column > 0 {
1757 range.start.column -= 1;
1758 range.start = snapshot.clip_point_utf16(range.start, Bias::Left);
1759 }
1760 }
1761
1762 sanitized_diagnostics.push(DiagnosticEntry {
1763 range,
1764 diagnostic: entry.diagnostic,
1765 });
1766 }
1767 drop(edits_since_save);
1768
1769 let set = DiagnosticSet::new(sanitized_diagnostics, &snapshot);
1770 buffer.update(cx, |buffer, cx| buffer.update_diagnostics(set, cx));
1771 Ok(())
1772 }
1773
1774 pub fn format(
1775 &self,
1776 buffers: HashSet<ModelHandle<Buffer>>,
1777 push_to_history: bool,
1778 cx: &mut ModelContext<Project>,
1779 ) -> Task<Result<ProjectTransaction>> {
1780 let mut local_buffers = Vec::new();
1781 let mut remote_buffers = None;
1782 for buffer_handle in buffers {
1783 let buffer = buffer_handle.read(cx);
1784 if let Some(file) = File::from_dyn(buffer.file()) {
1785 if let Some(buffer_abs_path) = file.as_local().map(|f| f.abs_path(cx)) {
1786 if let Some(server) = self.language_server_for_buffer(buffer, cx) {
1787 local_buffers.push((buffer_handle, buffer_abs_path, server.clone()));
1788 }
1789 } else {
1790 remote_buffers.get_or_insert(Vec::new()).push(buffer_handle);
1791 }
1792 } else {
1793 return Task::ready(Ok(Default::default()));
1794 }
1795 }
1796
1797 let remote_buffers = self.remote_id().zip(remote_buffers);
1798 let client = self.client.clone();
1799
1800 cx.spawn(|this, mut cx| async move {
1801 let mut project_transaction = ProjectTransaction::default();
1802
1803 if let Some((project_id, remote_buffers)) = remote_buffers {
1804 let response = client
1805 .request(proto::FormatBuffers {
1806 project_id,
1807 buffer_ids: remote_buffers
1808 .iter()
1809 .map(|buffer| buffer.read_with(&cx, |buffer, _| buffer.remote_id()))
1810 .collect(),
1811 })
1812 .await?
1813 .transaction
1814 .ok_or_else(|| anyhow!("missing transaction"))?;
1815 project_transaction = this
1816 .update(&mut cx, |this, cx| {
1817 this.deserialize_project_transaction(response, push_to_history, cx)
1818 })
1819 .await?;
1820 }
1821
1822 for (buffer, buffer_abs_path, language_server) in local_buffers {
1823 let text_document = lsp::TextDocumentIdentifier::new(
1824 lsp::Url::from_file_path(&buffer_abs_path).unwrap(),
1825 );
1826 let capabilities = &language_server.capabilities();
1827 let lsp_edits = if capabilities
1828 .document_formatting_provider
1829 .as_ref()
1830 .map_or(false, |provider| *provider != lsp::OneOf::Left(false))
1831 {
1832 language_server
1833 .request::<lsp::request::Formatting>(lsp::DocumentFormattingParams {
1834 text_document,
1835 options: Default::default(),
1836 work_done_progress_params: Default::default(),
1837 })
1838 .await?
1839 } else if capabilities
1840 .document_range_formatting_provider
1841 .as_ref()
1842 .map_or(false, |provider| *provider != lsp::OneOf::Left(false))
1843 {
1844 let buffer_start = lsp::Position::new(0, 0);
1845 let buffer_end = buffer
1846 .read_with(&cx, |buffer, _| buffer.max_point_utf16())
1847 .to_lsp_position();
1848 language_server
1849 .request::<lsp::request::RangeFormatting>(
1850 lsp::DocumentRangeFormattingParams {
1851 text_document,
1852 range: lsp::Range::new(buffer_start, buffer_end),
1853 options: Default::default(),
1854 work_done_progress_params: Default::default(),
1855 },
1856 )
1857 .await?
1858 } else {
1859 continue;
1860 };
1861
1862 if let Some(lsp_edits) = lsp_edits {
1863 let edits = this
1864 .update(&mut cx, |this, cx| {
1865 this.edits_from_lsp(&buffer, lsp_edits, None, cx)
1866 })
1867 .await?;
1868 buffer.update(&mut cx, |buffer, cx| {
1869 buffer.finalize_last_transaction();
1870 buffer.start_transaction();
1871 for (range, text) in edits {
1872 buffer.edit([range], text, cx);
1873 }
1874 if buffer.end_transaction(cx).is_some() {
1875 let transaction = buffer.finalize_last_transaction().unwrap().clone();
1876 if !push_to_history {
1877 buffer.forget_transaction(transaction.id);
1878 }
1879 project_transaction.0.insert(cx.handle(), transaction);
1880 }
1881 });
1882 }
1883 }
1884
1885 Ok(project_transaction)
1886 })
1887 }
1888
1889 pub fn definition<T: ToPointUtf16>(
1890 &self,
1891 buffer: &ModelHandle<Buffer>,
1892 position: T,
1893 cx: &mut ModelContext<Self>,
1894 ) -> Task<Result<Vec<Location>>> {
1895 let position = position.to_point_utf16(buffer.read(cx));
1896 self.request_lsp(buffer.clone(), GetDefinition { position }, cx)
1897 }
1898
1899 pub fn references<T: ToPointUtf16>(
1900 &self,
1901 buffer: &ModelHandle<Buffer>,
1902 position: T,
1903 cx: &mut ModelContext<Self>,
1904 ) -> Task<Result<Vec<Location>>> {
1905 let position = position.to_point_utf16(buffer.read(cx));
1906 self.request_lsp(buffer.clone(), GetReferences { position }, cx)
1907 }
1908
1909 pub fn document_highlights<T: ToPointUtf16>(
1910 &self,
1911 buffer: &ModelHandle<Buffer>,
1912 position: T,
1913 cx: &mut ModelContext<Self>,
1914 ) -> Task<Result<Vec<DocumentHighlight>>> {
1915 let position = position.to_point_utf16(buffer.read(cx));
1916
1917 self.request_lsp(buffer.clone(), GetDocumentHighlights { position }, cx)
1918 }
1919
1920 pub fn symbols(&self, query: &str, cx: &mut ModelContext<Self>) -> Task<Result<Vec<Symbol>>> {
1921 if self.is_local() {
1922 let mut language_servers = HashMap::default();
1923 for ((worktree_id, language_name), language_server) in self.language_servers.iter() {
1924 if let Some((worktree, language)) = self
1925 .worktree_for_id(*worktree_id, cx)
1926 .and_then(|worktree| worktree.read(cx).as_local())
1927 .zip(self.languages.get_language(language_name))
1928 {
1929 language_servers
1930 .entry(Arc::as_ptr(language_server))
1931 .or_insert((
1932 language_server.clone(),
1933 *worktree_id,
1934 worktree.abs_path().clone(),
1935 language.clone(),
1936 ));
1937 }
1938 }
1939
1940 let mut requests = Vec::new();
1941 for (language_server, _, _, _) in language_servers.values() {
1942 requests.push(language_server.request::<lsp::request::WorkspaceSymbol>(
1943 lsp::WorkspaceSymbolParams {
1944 query: query.to_string(),
1945 ..Default::default()
1946 },
1947 ));
1948 }
1949
1950 cx.spawn_weak(|this, cx| async move {
1951 let responses = futures::future::try_join_all(requests).await?;
1952
1953 let mut symbols = Vec::new();
1954 if let Some(this) = this.upgrade(&cx) {
1955 this.read_with(&cx, |this, cx| {
1956 for ((_, source_worktree_id, worktree_abs_path, language), lsp_symbols) in
1957 language_servers.into_values().zip(responses)
1958 {
1959 symbols.extend(lsp_symbols.into_iter().flatten().filter_map(
1960 |lsp_symbol| {
1961 let abs_path = lsp_symbol.location.uri.to_file_path().ok()?;
1962 let mut worktree_id = source_worktree_id;
1963 let path;
1964 if let Some((worktree, rel_path)) =
1965 this.find_local_worktree(&abs_path, cx)
1966 {
1967 worktree_id = worktree.read(cx).id();
1968 path = rel_path;
1969 } else {
1970 path = relativize_path(&worktree_abs_path, &abs_path);
1971 }
1972
1973 let label = language
1974 .label_for_symbol(&lsp_symbol.name, lsp_symbol.kind)
1975 .unwrap_or_else(|| {
1976 CodeLabel::plain(lsp_symbol.name.clone(), None)
1977 });
1978 let signature = this.symbol_signature(worktree_id, &path);
1979
1980 Some(Symbol {
1981 source_worktree_id,
1982 worktree_id,
1983 language_name: language.name().to_string(),
1984 name: lsp_symbol.name,
1985 kind: lsp_symbol.kind,
1986 label,
1987 path,
1988 range: range_from_lsp(lsp_symbol.location.range),
1989 signature,
1990 })
1991 },
1992 ));
1993 }
1994 })
1995 }
1996
1997 Ok(symbols)
1998 })
1999 } else if let Some(project_id) = self.remote_id() {
2000 let request = self.client.request(proto::GetProjectSymbols {
2001 project_id,
2002 query: query.to_string(),
2003 });
2004 cx.spawn_weak(|this, cx| async move {
2005 let response = request.await?;
2006 let mut symbols = Vec::new();
2007 if let Some(this) = this.upgrade(&cx) {
2008 this.read_with(&cx, |this, _| {
2009 symbols.extend(
2010 response
2011 .symbols
2012 .into_iter()
2013 .filter_map(|symbol| this.deserialize_symbol(symbol).log_err()),
2014 );
2015 })
2016 }
2017 Ok(symbols)
2018 })
2019 } else {
2020 Task::ready(Ok(Default::default()))
2021 }
2022 }
2023
2024 pub fn open_buffer_for_symbol(
2025 &mut self,
2026 symbol: &Symbol,
2027 cx: &mut ModelContext<Self>,
2028 ) -> Task<Result<ModelHandle<Buffer>>> {
2029 if self.is_local() {
2030 let language_server = if let Some(server) = self.language_servers.get(&(
2031 symbol.source_worktree_id,
2032 Arc::from(symbol.language_name.as_str()),
2033 )) {
2034 server.clone()
2035 } else {
2036 return Task::ready(Err(anyhow!(
2037 "language server for worktree and language not found"
2038 )));
2039 };
2040
2041 let worktree_abs_path = if let Some(worktree_abs_path) = self
2042 .worktree_for_id(symbol.worktree_id, cx)
2043 .and_then(|worktree| worktree.read(cx).as_local())
2044 .map(|local_worktree| local_worktree.abs_path())
2045 {
2046 worktree_abs_path
2047 } else {
2048 return Task::ready(Err(anyhow!("worktree not found for symbol")));
2049 };
2050 let symbol_abs_path = worktree_abs_path.join(&symbol.path);
2051 let symbol_uri = if let Ok(uri) = lsp::Url::from_file_path(symbol_abs_path) {
2052 uri
2053 } else {
2054 return Task::ready(Err(anyhow!("invalid symbol path")));
2055 };
2056
2057 self.open_local_buffer_via_lsp(
2058 symbol_uri,
2059 Arc::from(symbol.language_name.as_str()),
2060 language_server,
2061 cx,
2062 )
2063 } else if let Some(project_id) = self.remote_id() {
2064 let request = self.client.request(proto::OpenBufferForSymbol {
2065 project_id,
2066 symbol: Some(serialize_symbol(symbol)),
2067 });
2068 cx.spawn(|this, mut cx| async move {
2069 let response = request.await?;
2070 let buffer = response.buffer.ok_or_else(|| anyhow!("invalid buffer"))?;
2071 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
2072 .await
2073 })
2074 } else {
2075 Task::ready(Err(anyhow!("project does not have a remote id")))
2076 }
2077 }
2078
2079 pub fn completions<T: ToPointUtf16>(
2080 &self,
2081 source_buffer_handle: &ModelHandle<Buffer>,
2082 position: T,
2083 cx: &mut ModelContext<Self>,
2084 ) -> Task<Result<Vec<Completion>>> {
2085 let source_buffer_handle = source_buffer_handle.clone();
2086 let source_buffer = source_buffer_handle.read(cx);
2087 let buffer_id = source_buffer.remote_id();
2088 let language = source_buffer.language().cloned();
2089 let worktree;
2090 let buffer_abs_path;
2091 if let Some(file) = File::from_dyn(source_buffer.file()) {
2092 worktree = file.worktree.clone();
2093 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
2094 } else {
2095 return Task::ready(Ok(Default::default()));
2096 };
2097
2098 let position = position.to_point_utf16(source_buffer);
2099 let anchor = source_buffer.anchor_after(position);
2100
2101 if worktree.read(cx).as_local().is_some() {
2102 let buffer_abs_path = buffer_abs_path.unwrap();
2103 let lang_server =
2104 if let Some(server) = self.language_server_for_buffer(source_buffer, cx) {
2105 server.clone()
2106 } else {
2107 return Task::ready(Ok(Default::default()));
2108 };
2109
2110 cx.spawn(|_, cx| async move {
2111 let completions = lang_server
2112 .request::<lsp::request::Completion>(lsp::CompletionParams {
2113 text_document_position: lsp::TextDocumentPositionParams::new(
2114 lsp::TextDocumentIdentifier::new(
2115 lsp::Url::from_file_path(buffer_abs_path).unwrap(),
2116 ),
2117 position.to_lsp_position(),
2118 ),
2119 context: Default::default(),
2120 work_done_progress_params: Default::default(),
2121 partial_result_params: Default::default(),
2122 })
2123 .await
2124 .context("lsp completion request failed")?;
2125
2126 let completions = if let Some(completions) = completions {
2127 match completions {
2128 lsp::CompletionResponse::Array(completions) => completions,
2129 lsp::CompletionResponse::List(list) => list.items,
2130 }
2131 } else {
2132 Default::default()
2133 };
2134
2135 source_buffer_handle.read_with(&cx, |this, _| {
2136 Ok(completions
2137 .into_iter()
2138 .filter_map(|lsp_completion| {
2139 let (old_range, new_text) = match lsp_completion.text_edit.as_ref()? {
2140 lsp::CompletionTextEdit::Edit(edit) => {
2141 (range_from_lsp(edit.range), edit.new_text.clone())
2142 }
2143 lsp::CompletionTextEdit::InsertAndReplace(_) => {
2144 log::info!("unsupported insert/replace completion");
2145 return None;
2146 }
2147 };
2148
2149 let clipped_start = this.clip_point_utf16(old_range.start, Bias::Left);
2150 let clipped_end = this.clip_point_utf16(old_range.end, Bias::Left);
2151 if clipped_start == old_range.start && clipped_end == old_range.end {
2152 Some(Completion {
2153 old_range: this.anchor_before(old_range.start)
2154 ..this.anchor_after(old_range.end),
2155 new_text,
2156 label: language
2157 .as_ref()
2158 .and_then(|l| l.label_for_completion(&lsp_completion))
2159 .unwrap_or_else(|| {
2160 CodeLabel::plain(
2161 lsp_completion.label.clone(),
2162 lsp_completion.filter_text.as_deref(),
2163 )
2164 }),
2165 lsp_completion,
2166 })
2167 } else {
2168 None
2169 }
2170 })
2171 .collect())
2172 })
2173 })
2174 } else if let Some(project_id) = self.remote_id() {
2175 let rpc = self.client.clone();
2176 let message = proto::GetCompletions {
2177 project_id,
2178 buffer_id,
2179 position: Some(language::proto::serialize_anchor(&anchor)),
2180 version: serialize_version(&source_buffer.version()),
2181 };
2182 cx.spawn_weak(|_, mut cx| async move {
2183 let response = rpc.request(message).await?;
2184
2185 source_buffer_handle
2186 .update(&mut cx, |buffer, _| {
2187 buffer.wait_for_version(deserialize_version(response.version))
2188 })
2189 .await;
2190
2191 response
2192 .completions
2193 .into_iter()
2194 .map(|completion| {
2195 language::proto::deserialize_completion(completion, language.as_ref())
2196 })
2197 .collect()
2198 })
2199 } else {
2200 Task::ready(Ok(Default::default()))
2201 }
2202 }
2203
2204 pub fn apply_additional_edits_for_completion(
2205 &self,
2206 buffer_handle: ModelHandle<Buffer>,
2207 completion: Completion,
2208 push_to_history: bool,
2209 cx: &mut ModelContext<Self>,
2210 ) -> Task<Result<Option<Transaction>>> {
2211 let buffer = buffer_handle.read(cx);
2212 let buffer_id = buffer.remote_id();
2213
2214 if self.is_local() {
2215 let lang_server = if let Some(server) = self.language_server_for_buffer(buffer, cx) {
2216 server.clone()
2217 } else {
2218 return Task::ready(Ok(Default::default()));
2219 };
2220
2221 cx.spawn(|this, mut cx| async move {
2222 let resolved_completion = lang_server
2223 .request::<lsp::request::ResolveCompletionItem>(completion.lsp_completion)
2224 .await?;
2225 if let Some(edits) = resolved_completion.additional_text_edits {
2226 let edits = this
2227 .update(&mut cx, |this, cx| {
2228 this.edits_from_lsp(&buffer_handle, edits, None, cx)
2229 })
2230 .await?;
2231 buffer_handle.update(&mut cx, |buffer, cx| {
2232 buffer.finalize_last_transaction();
2233 buffer.start_transaction();
2234 for (range, text) in edits {
2235 buffer.edit([range], text, cx);
2236 }
2237 let transaction = if buffer.end_transaction(cx).is_some() {
2238 let transaction = buffer.finalize_last_transaction().unwrap().clone();
2239 if !push_to_history {
2240 buffer.forget_transaction(transaction.id);
2241 }
2242 Some(transaction)
2243 } else {
2244 None
2245 };
2246 Ok(transaction)
2247 })
2248 } else {
2249 Ok(None)
2250 }
2251 })
2252 } else if let Some(project_id) = self.remote_id() {
2253 let client = self.client.clone();
2254 cx.spawn(|_, mut cx| async move {
2255 let response = client
2256 .request(proto::ApplyCompletionAdditionalEdits {
2257 project_id,
2258 buffer_id,
2259 completion: Some(language::proto::serialize_completion(&completion)),
2260 })
2261 .await?;
2262
2263 if let Some(transaction) = response.transaction {
2264 let transaction = language::proto::deserialize_transaction(transaction)?;
2265 buffer_handle
2266 .update(&mut cx, |buffer, _| {
2267 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
2268 })
2269 .await;
2270 if push_to_history {
2271 buffer_handle.update(&mut cx, |buffer, _| {
2272 buffer.push_transaction(transaction.clone(), Instant::now());
2273 });
2274 }
2275 Ok(Some(transaction))
2276 } else {
2277 Ok(None)
2278 }
2279 })
2280 } else {
2281 Task::ready(Err(anyhow!("project does not have a remote id")))
2282 }
2283 }
2284
2285 pub fn code_actions<T: ToOffset>(
2286 &self,
2287 buffer_handle: &ModelHandle<Buffer>,
2288 range: Range<T>,
2289 cx: &mut ModelContext<Self>,
2290 ) -> Task<Result<Vec<CodeAction>>> {
2291 let buffer_handle = buffer_handle.clone();
2292 let buffer = buffer_handle.read(cx);
2293 let buffer_id = buffer.remote_id();
2294 let worktree;
2295 let buffer_abs_path;
2296 if let Some(file) = File::from_dyn(buffer.file()) {
2297 worktree = file.worktree.clone();
2298 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
2299 } else {
2300 return Task::ready(Ok(Default::default()));
2301 };
2302 let range = buffer.anchor_before(range.start)..buffer.anchor_before(range.end);
2303
2304 if worktree.read(cx).as_local().is_some() {
2305 let buffer_abs_path = buffer_abs_path.unwrap();
2306 let lang_server = if let Some(server) = self.language_server_for_buffer(buffer, cx) {
2307 server.clone()
2308 } else {
2309 return Task::ready(Ok(Default::default()));
2310 };
2311
2312 let lsp_range = lsp::Range::new(
2313 range.start.to_point_utf16(buffer).to_lsp_position(),
2314 range.end.to_point_utf16(buffer).to_lsp_position(),
2315 );
2316 cx.foreground().spawn(async move {
2317 if !lang_server.capabilities().code_action_provider.is_some() {
2318 return Ok(Default::default());
2319 }
2320
2321 Ok(lang_server
2322 .request::<lsp::request::CodeActionRequest>(lsp::CodeActionParams {
2323 text_document: lsp::TextDocumentIdentifier::new(
2324 lsp::Url::from_file_path(buffer_abs_path).unwrap(),
2325 ),
2326 range: lsp_range,
2327 work_done_progress_params: Default::default(),
2328 partial_result_params: Default::default(),
2329 context: lsp::CodeActionContext {
2330 diagnostics: Default::default(),
2331 only: Some(vec![
2332 lsp::CodeActionKind::QUICKFIX,
2333 lsp::CodeActionKind::REFACTOR,
2334 lsp::CodeActionKind::REFACTOR_EXTRACT,
2335 ]),
2336 },
2337 })
2338 .await?
2339 .unwrap_or_default()
2340 .into_iter()
2341 .filter_map(|entry| {
2342 if let lsp::CodeActionOrCommand::CodeAction(lsp_action) = entry {
2343 Some(CodeAction {
2344 range: range.clone(),
2345 lsp_action,
2346 })
2347 } else {
2348 None
2349 }
2350 })
2351 .collect())
2352 })
2353 } else if let Some(project_id) = self.remote_id() {
2354 let rpc = self.client.clone();
2355 let version = buffer.version();
2356 cx.spawn_weak(|_, mut cx| async move {
2357 let response = rpc
2358 .request(proto::GetCodeActions {
2359 project_id,
2360 buffer_id,
2361 start: Some(language::proto::serialize_anchor(&range.start)),
2362 end: Some(language::proto::serialize_anchor(&range.end)),
2363 version: serialize_version(&version),
2364 })
2365 .await?;
2366
2367 buffer_handle
2368 .update(&mut cx, |buffer, _| {
2369 buffer.wait_for_version(deserialize_version(response.version))
2370 })
2371 .await;
2372
2373 response
2374 .actions
2375 .into_iter()
2376 .map(language::proto::deserialize_code_action)
2377 .collect()
2378 })
2379 } else {
2380 Task::ready(Ok(Default::default()))
2381 }
2382 }
2383
2384 pub fn apply_code_action(
2385 &self,
2386 buffer_handle: ModelHandle<Buffer>,
2387 mut action: CodeAction,
2388 push_to_history: bool,
2389 cx: &mut ModelContext<Self>,
2390 ) -> Task<Result<ProjectTransaction>> {
2391 if self.is_local() {
2392 let buffer = buffer_handle.read(cx);
2393 let lang_name = if let Some(lang) = buffer.language() {
2394 lang.name()
2395 } else {
2396 return Task::ready(Ok(Default::default()));
2397 };
2398 let lang_server = if let Some(server) = self.language_server_for_buffer(buffer, cx) {
2399 server.clone()
2400 } else {
2401 return Task::ready(Ok(Default::default()));
2402 };
2403 let range = action.range.to_point_utf16(buffer);
2404
2405 cx.spawn(|this, mut cx| async move {
2406 if let Some(lsp_range) = action
2407 .lsp_action
2408 .data
2409 .as_mut()
2410 .and_then(|d| d.get_mut("codeActionParams"))
2411 .and_then(|d| d.get_mut("range"))
2412 {
2413 *lsp_range = serde_json::to_value(&lsp::Range::new(
2414 range.start.to_lsp_position(),
2415 range.end.to_lsp_position(),
2416 ))
2417 .unwrap();
2418 action.lsp_action = lang_server
2419 .request::<lsp::request::CodeActionResolveRequest>(action.lsp_action)
2420 .await?;
2421 } else {
2422 let actions = this
2423 .update(&mut cx, |this, cx| {
2424 this.code_actions(&buffer_handle, action.range, cx)
2425 })
2426 .await?;
2427 action.lsp_action = actions
2428 .into_iter()
2429 .find(|a| a.lsp_action.title == action.lsp_action.title)
2430 .ok_or_else(|| anyhow!("code action is outdated"))?
2431 .lsp_action;
2432 }
2433
2434 if let Some(edit) = action.lsp_action.edit {
2435 Self::deserialize_workspace_edit(
2436 this,
2437 edit,
2438 push_to_history,
2439 lang_name,
2440 lang_server,
2441 &mut cx,
2442 )
2443 .await
2444 } else {
2445 Ok(ProjectTransaction::default())
2446 }
2447 })
2448 } else if let Some(project_id) = self.remote_id() {
2449 let client = self.client.clone();
2450 let request = proto::ApplyCodeAction {
2451 project_id,
2452 buffer_id: buffer_handle.read(cx).remote_id(),
2453 action: Some(language::proto::serialize_code_action(&action)),
2454 };
2455 cx.spawn(|this, mut cx| async move {
2456 let response = client
2457 .request(request)
2458 .await?
2459 .transaction
2460 .ok_or_else(|| anyhow!("missing transaction"))?;
2461 this.update(&mut cx, |this, cx| {
2462 this.deserialize_project_transaction(response, push_to_history, cx)
2463 })
2464 .await
2465 })
2466 } else {
2467 Task::ready(Err(anyhow!("project does not have a remote id")))
2468 }
2469 }
2470
2471 async fn deserialize_workspace_edit(
2472 this: ModelHandle<Self>,
2473 edit: lsp::WorkspaceEdit,
2474 push_to_history: bool,
2475 language_name: Arc<str>,
2476 language_server: Arc<LanguageServer>,
2477 cx: &mut AsyncAppContext,
2478 ) -> Result<ProjectTransaction> {
2479 let fs = this.read_with(cx, |this, _| this.fs.clone());
2480 let mut operations = Vec::new();
2481 if let Some(document_changes) = edit.document_changes {
2482 match document_changes {
2483 lsp::DocumentChanges::Edits(edits) => {
2484 operations.extend(edits.into_iter().map(lsp::DocumentChangeOperation::Edit))
2485 }
2486 lsp::DocumentChanges::Operations(ops) => operations = ops,
2487 }
2488 } else if let Some(changes) = edit.changes {
2489 operations.extend(changes.into_iter().map(|(uri, edits)| {
2490 lsp::DocumentChangeOperation::Edit(lsp::TextDocumentEdit {
2491 text_document: lsp::OptionalVersionedTextDocumentIdentifier {
2492 uri,
2493 version: None,
2494 },
2495 edits: edits.into_iter().map(lsp::OneOf::Left).collect(),
2496 })
2497 }));
2498 }
2499
2500 let mut project_transaction = ProjectTransaction::default();
2501 for operation in operations {
2502 match operation {
2503 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Create(op)) => {
2504 let abs_path = op
2505 .uri
2506 .to_file_path()
2507 .map_err(|_| anyhow!("can't convert URI to path"))?;
2508
2509 if let Some(parent_path) = abs_path.parent() {
2510 fs.create_dir(parent_path).await?;
2511 }
2512 if abs_path.ends_with("/") {
2513 fs.create_dir(&abs_path).await?;
2514 } else {
2515 fs.create_file(&abs_path, op.options.map(Into::into).unwrap_or_default())
2516 .await?;
2517 }
2518 }
2519 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Rename(op)) => {
2520 let source_abs_path = op
2521 .old_uri
2522 .to_file_path()
2523 .map_err(|_| anyhow!("can't convert URI to path"))?;
2524 let target_abs_path = op
2525 .new_uri
2526 .to_file_path()
2527 .map_err(|_| anyhow!("can't convert URI to path"))?;
2528 fs.rename(
2529 &source_abs_path,
2530 &target_abs_path,
2531 op.options.map(Into::into).unwrap_or_default(),
2532 )
2533 .await?;
2534 }
2535 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Delete(op)) => {
2536 let abs_path = op
2537 .uri
2538 .to_file_path()
2539 .map_err(|_| anyhow!("can't convert URI to path"))?;
2540 let options = op.options.map(Into::into).unwrap_or_default();
2541 if abs_path.ends_with("/") {
2542 fs.remove_dir(&abs_path, options).await?;
2543 } else {
2544 fs.remove_file(&abs_path, options).await?;
2545 }
2546 }
2547 lsp::DocumentChangeOperation::Edit(op) => {
2548 let buffer_to_edit = this
2549 .update(cx, |this, cx| {
2550 this.open_local_buffer_via_lsp(
2551 op.text_document.uri,
2552 language_name.clone(),
2553 language_server.clone(),
2554 cx,
2555 )
2556 })
2557 .await?;
2558
2559 let edits = this
2560 .update(cx, |this, cx| {
2561 let edits = op.edits.into_iter().map(|edit| match edit {
2562 lsp::OneOf::Left(edit) => edit,
2563 lsp::OneOf::Right(edit) => edit.text_edit,
2564 });
2565 this.edits_from_lsp(
2566 &buffer_to_edit,
2567 edits,
2568 op.text_document.version,
2569 cx,
2570 )
2571 })
2572 .await?;
2573
2574 let transaction = buffer_to_edit.update(cx, |buffer, cx| {
2575 buffer.finalize_last_transaction();
2576 buffer.start_transaction();
2577 for (range, text) in edits {
2578 buffer.edit([range], text, cx);
2579 }
2580 let transaction = if buffer.end_transaction(cx).is_some() {
2581 let transaction = buffer.finalize_last_transaction().unwrap().clone();
2582 if !push_to_history {
2583 buffer.forget_transaction(transaction.id);
2584 }
2585 Some(transaction)
2586 } else {
2587 None
2588 };
2589
2590 transaction
2591 });
2592 if let Some(transaction) = transaction {
2593 project_transaction.0.insert(buffer_to_edit, transaction);
2594 }
2595 }
2596 }
2597 }
2598
2599 Ok(project_transaction)
2600 }
2601
2602 pub fn prepare_rename<T: ToPointUtf16>(
2603 &self,
2604 buffer: ModelHandle<Buffer>,
2605 position: T,
2606 cx: &mut ModelContext<Self>,
2607 ) -> Task<Result<Option<Range<Anchor>>>> {
2608 let position = position.to_point_utf16(buffer.read(cx));
2609 self.request_lsp(buffer, PrepareRename { position }, cx)
2610 }
2611
2612 pub fn perform_rename<T: ToPointUtf16>(
2613 &self,
2614 buffer: ModelHandle<Buffer>,
2615 position: T,
2616 new_name: String,
2617 push_to_history: bool,
2618 cx: &mut ModelContext<Self>,
2619 ) -> Task<Result<ProjectTransaction>> {
2620 let position = position.to_point_utf16(buffer.read(cx));
2621 self.request_lsp(
2622 buffer,
2623 PerformRename {
2624 position,
2625 new_name,
2626 push_to_history,
2627 },
2628 cx,
2629 )
2630 }
2631
2632 pub fn search(
2633 &self,
2634 query: SearchQuery,
2635 cx: &mut ModelContext<Self>,
2636 ) -> Task<Result<HashMap<ModelHandle<Buffer>, Vec<Range<Anchor>>>>> {
2637 if self.is_local() {
2638 let snapshots = self
2639 .visible_worktrees(cx)
2640 .filter_map(|tree| {
2641 let tree = tree.read(cx).as_local()?;
2642 Some(tree.snapshot())
2643 })
2644 .collect::<Vec<_>>();
2645
2646 let background = cx.background().clone();
2647 let path_count: usize = snapshots.iter().map(|s| s.visible_file_count()).sum();
2648 if path_count == 0 {
2649 return Task::ready(Ok(Default::default()));
2650 }
2651 let workers = background.num_cpus().min(path_count);
2652 let (matching_paths_tx, mut matching_paths_rx) = smol::channel::bounded(1024);
2653 cx.background()
2654 .spawn({
2655 let fs = self.fs.clone();
2656 let background = cx.background().clone();
2657 let query = query.clone();
2658 async move {
2659 let fs = &fs;
2660 let query = &query;
2661 let matching_paths_tx = &matching_paths_tx;
2662 let paths_per_worker = (path_count + workers - 1) / workers;
2663 let snapshots = &snapshots;
2664 background
2665 .scoped(|scope| {
2666 for worker_ix in 0..workers {
2667 let worker_start_ix = worker_ix * paths_per_worker;
2668 let worker_end_ix = worker_start_ix + paths_per_worker;
2669 scope.spawn(async move {
2670 let mut snapshot_start_ix = 0;
2671 let mut abs_path = PathBuf::new();
2672 for snapshot in snapshots {
2673 let snapshot_end_ix =
2674 snapshot_start_ix + snapshot.visible_file_count();
2675 if worker_end_ix <= snapshot_start_ix {
2676 break;
2677 } else if worker_start_ix > snapshot_end_ix {
2678 snapshot_start_ix = snapshot_end_ix;
2679 continue;
2680 } else {
2681 let start_in_snapshot = worker_start_ix
2682 .saturating_sub(snapshot_start_ix);
2683 let end_in_snapshot =
2684 cmp::min(worker_end_ix, snapshot_end_ix)
2685 - snapshot_start_ix;
2686
2687 for entry in snapshot
2688 .files(false, start_in_snapshot)
2689 .take(end_in_snapshot - start_in_snapshot)
2690 {
2691 if matching_paths_tx.is_closed() {
2692 break;
2693 }
2694
2695 abs_path.clear();
2696 abs_path.push(&snapshot.abs_path());
2697 abs_path.push(&entry.path);
2698 let matches = if let Some(file) =
2699 fs.open_sync(&abs_path).await.log_err()
2700 {
2701 query.detect(file).unwrap_or(false)
2702 } else {
2703 false
2704 };
2705
2706 if matches {
2707 let project_path =
2708 (snapshot.id(), entry.path.clone());
2709 if matching_paths_tx
2710 .send(project_path)
2711 .await
2712 .is_err()
2713 {
2714 break;
2715 }
2716 }
2717 }
2718
2719 snapshot_start_ix = snapshot_end_ix;
2720 }
2721 }
2722 });
2723 }
2724 })
2725 .await;
2726 }
2727 })
2728 .detach();
2729
2730 let (buffers_tx, buffers_rx) = smol::channel::bounded(1024);
2731 let open_buffers = self
2732 .opened_buffers
2733 .values()
2734 .filter_map(|b| b.upgrade(cx))
2735 .collect::<HashSet<_>>();
2736 cx.spawn(|this, cx| async move {
2737 for buffer in &open_buffers {
2738 let snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
2739 buffers_tx.send((buffer.clone(), snapshot)).await?;
2740 }
2741
2742 let open_buffers = Rc::new(RefCell::new(open_buffers));
2743 while let Some(project_path) = matching_paths_rx.next().await {
2744 if buffers_tx.is_closed() {
2745 break;
2746 }
2747
2748 let this = this.clone();
2749 let open_buffers = open_buffers.clone();
2750 let buffers_tx = buffers_tx.clone();
2751 cx.spawn(|mut cx| async move {
2752 if let Some(buffer) = this
2753 .update(&mut cx, |this, cx| this.open_buffer(project_path, cx))
2754 .await
2755 .log_err()
2756 {
2757 if open_buffers.borrow_mut().insert(buffer.clone()) {
2758 let snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
2759 buffers_tx.send((buffer, snapshot)).await?;
2760 }
2761 }
2762
2763 Ok::<_, anyhow::Error>(())
2764 })
2765 .detach();
2766 }
2767
2768 Ok::<_, anyhow::Error>(())
2769 })
2770 .detach_and_log_err(cx);
2771
2772 let background = cx.background().clone();
2773 cx.background().spawn(async move {
2774 let query = &query;
2775 let mut matched_buffers = Vec::new();
2776 for _ in 0..workers {
2777 matched_buffers.push(HashMap::default());
2778 }
2779 background
2780 .scoped(|scope| {
2781 for worker_matched_buffers in matched_buffers.iter_mut() {
2782 let mut buffers_rx = buffers_rx.clone();
2783 scope.spawn(async move {
2784 while let Some((buffer, snapshot)) = buffers_rx.next().await {
2785 let buffer_matches = query
2786 .search(snapshot.as_rope())
2787 .await
2788 .iter()
2789 .map(|range| {
2790 snapshot.anchor_before(range.start)
2791 ..snapshot.anchor_after(range.end)
2792 })
2793 .collect::<Vec<_>>();
2794 if !buffer_matches.is_empty() {
2795 worker_matched_buffers
2796 .insert(buffer.clone(), buffer_matches);
2797 }
2798 }
2799 });
2800 }
2801 })
2802 .await;
2803 Ok(matched_buffers.into_iter().flatten().collect())
2804 })
2805 } else if let Some(project_id) = self.remote_id() {
2806 let request = self.client.request(query.to_proto(project_id));
2807 cx.spawn(|this, mut cx| async move {
2808 let response = request.await?;
2809 let mut result = HashMap::default();
2810 for location in response.locations {
2811 let buffer = location.buffer.ok_or_else(|| anyhow!("missing buffer"))?;
2812 let target_buffer = this
2813 .update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
2814 .await?;
2815 let start = location
2816 .start
2817 .and_then(deserialize_anchor)
2818 .ok_or_else(|| anyhow!("missing target start"))?;
2819 let end = location
2820 .end
2821 .and_then(deserialize_anchor)
2822 .ok_or_else(|| anyhow!("missing target end"))?;
2823 result
2824 .entry(target_buffer)
2825 .or_insert(Vec::new())
2826 .push(start..end)
2827 }
2828 Ok(result)
2829 })
2830 } else {
2831 Task::ready(Ok(Default::default()))
2832 }
2833 }
2834
2835 fn request_lsp<R: LspCommand>(
2836 &self,
2837 buffer_handle: ModelHandle<Buffer>,
2838 request: R,
2839 cx: &mut ModelContext<Self>,
2840 ) -> Task<Result<R::Response>>
2841 where
2842 <R::LspRequest as lsp::request::Request>::Result: Send,
2843 {
2844 let buffer = buffer_handle.read(cx);
2845 if self.is_local() {
2846 let file = File::from_dyn(buffer.file()).and_then(File::as_local);
2847 if let Some((file, language_server)) =
2848 file.zip(self.language_server_for_buffer(buffer, cx).cloned())
2849 {
2850 let lsp_params = request.to_lsp(&file.abs_path(cx), cx);
2851 return cx.spawn(|this, cx| async move {
2852 if !request.check_capabilities(&language_server.capabilities()) {
2853 return Ok(Default::default());
2854 }
2855
2856 let response = language_server
2857 .request::<R::LspRequest>(lsp_params)
2858 .await
2859 .context("lsp request failed")?;
2860 request
2861 .response_from_lsp(response, this, buffer_handle, cx)
2862 .await
2863 });
2864 }
2865 } else if let Some(project_id) = self.remote_id() {
2866 let rpc = self.client.clone();
2867 let message = request.to_proto(project_id, buffer);
2868 return cx.spawn(|this, cx| async move {
2869 let response = rpc.request(message).await?;
2870 request
2871 .response_from_proto(response, this, buffer_handle, cx)
2872 .await
2873 });
2874 }
2875 Task::ready(Ok(Default::default()))
2876 }
2877
2878 pub fn find_or_create_local_worktree(
2879 &mut self,
2880 abs_path: impl AsRef<Path>,
2881 visible: bool,
2882 cx: &mut ModelContext<Self>,
2883 ) -> Task<Result<(ModelHandle<Worktree>, PathBuf)>> {
2884 let abs_path = abs_path.as_ref();
2885 if let Some((tree, relative_path)) = self.find_local_worktree(abs_path, cx) {
2886 Task::ready(Ok((tree.clone(), relative_path.into())))
2887 } else {
2888 let worktree = self.create_local_worktree(abs_path, visible, cx);
2889 cx.foreground()
2890 .spawn(async move { Ok((worktree.await?, PathBuf::new())) })
2891 }
2892 }
2893
2894 pub fn find_local_worktree(
2895 &self,
2896 abs_path: &Path,
2897 cx: &AppContext,
2898 ) -> Option<(ModelHandle<Worktree>, PathBuf)> {
2899 for tree in self.worktrees(cx) {
2900 if let Some(relative_path) = tree
2901 .read(cx)
2902 .as_local()
2903 .and_then(|t| abs_path.strip_prefix(t.abs_path()).ok())
2904 {
2905 return Some((tree.clone(), relative_path.into()));
2906 }
2907 }
2908 None
2909 }
2910
2911 pub fn is_shared(&self) -> bool {
2912 match &self.client_state {
2913 ProjectClientState::Local { is_shared, .. } => *is_shared,
2914 ProjectClientState::Remote { .. } => false,
2915 }
2916 }
2917
2918 fn create_local_worktree(
2919 &mut self,
2920 abs_path: impl AsRef<Path>,
2921 visible: bool,
2922 cx: &mut ModelContext<Self>,
2923 ) -> Task<Result<ModelHandle<Worktree>>> {
2924 let fs = self.fs.clone();
2925 let client = self.client.clone();
2926 let path: Arc<Path> = abs_path.as_ref().into();
2927 let task = self
2928 .loading_local_worktrees
2929 .entry(path.clone())
2930 .or_insert_with(|| {
2931 cx.spawn(|project, mut cx| {
2932 async move {
2933 let worktree =
2934 Worktree::local(client.clone(), path.clone(), visible, fs, &mut cx)
2935 .await;
2936 project.update(&mut cx, |project, _| {
2937 project.loading_local_worktrees.remove(&path);
2938 });
2939 let worktree = worktree?;
2940
2941 let (remote_project_id, is_shared) =
2942 project.update(&mut cx, |project, cx| {
2943 project.add_worktree(&worktree, cx);
2944 (project.remote_id(), project.is_shared())
2945 });
2946
2947 if let Some(project_id) = remote_project_id {
2948 if is_shared {
2949 worktree
2950 .update(&mut cx, |worktree, cx| {
2951 worktree.as_local_mut().unwrap().share(project_id, cx)
2952 })
2953 .await?;
2954 } else {
2955 worktree
2956 .update(&mut cx, |worktree, cx| {
2957 worktree.as_local_mut().unwrap().register(project_id, cx)
2958 })
2959 .await?;
2960 }
2961 }
2962
2963 Ok(worktree)
2964 }
2965 .map_err(|err| Arc::new(err))
2966 })
2967 .shared()
2968 })
2969 .clone();
2970 cx.foreground().spawn(async move {
2971 match task.await {
2972 Ok(worktree) => Ok(worktree),
2973 Err(err) => Err(anyhow!("{}", err)),
2974 }
2975 })
2976 }
2977
2978 pub fn remove_worktree(&mut self, id: WorktreeId, cx: &mut ModelContext<Self>) {
2979 self.worktrees.retain(|worktree| {
2980 worktree
2981 .upgrade(cx)
2982 .map_or(false, |w| w.read(cx).id() != id)
2983 });
2984 cx.notify();
2985 }
2986
2987 fn add_worktree(&mut self, worktree: &ModelHandle<Worktree>, cx: &mut ModelContext<Self>) {
2988 cx.observe(&worktree, |_, _, cx| cx.notify()).detach();
2989 if worktree.read(cx).is_local() {
2990 cx.subscribe(&worktree, |this, worktree, _, cx| {
2991 this.update_local_worktree_buffers(worktree, cx);
2992 })
2993 .detach();
2994 }
2995
2996 let push_strong_handle = {
2997 let worktree = worktree.read(cx);
2998 self.is_shared() || worktree.is_visible() || worktree.is_remote()
2999 };
3000 if push_strong_handle {
3001 self.worktrees
3002 .push(WorktreeHandle::Strong(worktree.clone()));
3003 } else {
3004 cx.observe_release(&worktree, |this, _, cx| {
3005 this.worktrees
3006 .retain(|worktree| worktree.upgrade(cx).is_some());
3007 cx.notify();
3008 })
3009 .detach();
3010 self.worktrees
3011 .push(WorktreeHandle::Weak(worktree.downgrade()));
3012 }
3013 cx.notify();
3014 }
3015
3016 fn update_local_worktree_buffers(
3017 &mut self,
3018 worktree_handle: ModelHandle<Worktree>,
3019 cx: &mut ModelContext<Self>,
3020 ) {
3021 let snapshot = worktree_handle.read(cx).snapshot();
3022 let mut buffers_to_delete = Vec::new();
3023 for (buffer_id, buffer) in &self.opened_buffers {
3024 if let Some(buffer) = buffer.upgrade(cx) {
3025 buffer.update(cx, |buffer, cx| {
3026 if let Some(old_file) = File::from_dyn(buffer.file()) {
3027 if old_file.worktree != worktree_handle {
3028 return;
3029 }
3030
3031 let new_file = if let Some(entry) = old_file
3032 .entry_id
3033 .and_then(|entry_id| snapshot.entry_for_id(entry_id))
3034 {
3035 File {
3036 is_local: true,
3037 entry_id: Some(entry.id),
3038 mtime: entry.mtime,
3039 path: entry.path.clone(),
3040 worktree: worktree_handle.clone(),
3041 }
3042 } else if let Some(entry) =
3043 snapshot.entry_for_path(old_file.path().as_ref())
3044 {
3045 File {
3046 is_local: true,
3047 entry_id: Some(entry.id),
3048 mtime: entry.mtime,
3049 path: entry.path.clone(),
3050 worktree: worktree_handle.clone(),
3051 }
3052 } else {
3053 File {
3054 is_local: true,
3055 entry_id: None,
3056 path: old_file.path().clone(),
3057 mtime: old_file.mtime(),
3058 worktree: worktree_handle.clone(),
3059 }
3060 };
3061
3062 if let Some(project_id) = self.remote_id() {
3063 self.client
3064 .send(proto::UpdateBufferFile {
3065 project_id,
3066 buffer_id: *buffer_id as u64,
3067 file: Some(new_file.to_proto()),
3068 })
3069 .log_err();
3070 }
3071 buffer.file_updated(Box::new(new_file), cx).detach();
3072 }
3073 });
3074 } else {
3075 buffers_to_delete.push(*buffer_id);
3076 }
3077 }
3078
3079 for buffer_id in buffers_to_delete {
3080 self.opened_buffers.remove(&buffer_id);
3081 }
3082 }
3083
3084 pub fn set_active_path(&mut self, entry: Option<ProjectPath>, cx: &mut ModelContext<Self>) {
3085 let new_active_entry = entry.and_then(|project_path| {
3086 let worktree = self.worktree_for_id(project_path.worktree_id, cx)?;
3087 let entry = worktree.read(cx).entry_for_path(project_path.path)?;
3088 Some(ProjectEntry {
3089 worktree_id: project_path.worktree_id,
3090 entry_id: entry.id,
3091 })
3092 });
3093 if new_active_entry != self.active_entry {
3094 self.active_entry = new_active_entry;
3095 cx.emit(Event::ActiveEntryChanged(new_active_entry));
3096 }
3097 }
3098
3099 pub fn is_running_disk_based_diagnostics(&self) -> bool {
3100 self.language_servers_with_diagnostics_running > 0
3101 }
3102
3103 pub fn diagnostic_summary(&self, cx: &AppContext) -> DiagnosticSummary {
3104 let mut summary = DiagnosticSummary::default();
3105 for (_, path_summary) in self.diagnostic_summaries(cx) {
3106 summary.error_count += path_summary.error_count;
3107 summary.warning_count += path_summary.warning_count;
3108 summary.info_count += path_summary.info_count;
3109 summary.hint_count += path_summary.hint_count;
3110 }
3111 summary
3112 }
3113
3114 pub fn diagnostic_summaries<'a>(
3115 &'a self,
3116 cx: &'a AppContext,
3117 ) -> impl Iterator<Item = (ProjectPath, DiagnosticSummary)> + 'a {
3118 self.worktrees(cx).flat_map(move |worktree| {
3119 let worktree = worktree.read(cx);
3120 let worktree_id = worktree.id();
3121 worktree
3122 .diagnostic_summaries()
3123 .map(move |(path, summary)| (ProjectPath { worktree_id, path }, summary))
3124 })
3125 }
3126
3127 pub fn disk_based_diagnostics_started(&mut self, cx: &mut ModelContext<Self>) {
3128 self.language_servers_with_diagnostics_running += 1;
3129 if self.language_servers_with_diagnostics_running == 1 {
3130 cx.emit(Event::DiskBasedDiagnosticsStarted);
3131 }
3132 }
3133
3134 pub fn disk_based_diagnostics_finished(&mut self, cx: &mut ModelContext<Self>) {
3135 cx.emit(Event::DiskBasedDiagnosticsUpdated);
3136 self.language_servers_with_diagnostics_running -= 1;
3137 if self.language_servers_with_diagnostics_running == 0 {
3138 cx.emit(Event::DiskBasedDiagnosticsFinished);
3139 }
3140 }
3141
3142 pub fn active_entry(&self) -> Option<ProjectEntry> {
3143 self.active_entry
3144 }
3145
3146 // RPC message handlers
3147
3148 async fn handle_unshare_project(
3149 this: ModelHandle<Self>,
3150 _: TypedEnvelope<proto::UnshareProject>,
3151 _: Arc<Client>,
3152 mut cx: AsyncAppContext,
3153 ) -> Result<()> {
3154 this.update(&mut cx, |this, cx| this.project_unshared(cx));
3155 Ok(())
3156 }
3157
3158 async fn handle_add_collaborator(
3159 this: ModelHandle<Self>,
3160 mut envelope: TypedEnvelope<proto::AddProjectCollaborator>,
3161 _: Arc<Client>,
3162 mut cx: AsyncAppContext,
3163 ) -> Result<()> {
3164 let user_store = this.read_with(&cx, |this, _| this.user_store.clone());
3165 let collaborator = envelope
3166 .payload
3167 .collaborator
3168 .take()
3169 .ok_or_else(|| anyhow!("empty collaborator"))?;
3170
3171 let collaborator = Collaborator::from_proto(collaborator, &user_store, &mut cx).await?;
3172 this.update(&mut cx, |this, cx| {
3173 this.collaborators
3174 .insert(collaborator.peer_id, collaborator);
3175 cx.notify();
3176 });
3177
3178 Ok(())
3179 }
3180
3181 async fn handle_remove_collaborator(
3182 this: ModelHandle<Self>,
3183 envelope: TypedEnvelope<proto::RemoveProjectCollaborator>,
3184 _: Arc<Client>,
3185 mut cx: AsyncAppContext,
3186 ) -> Result<()> {
3187 this.update(&mut cx, |this, cx| {
3188 let peer_id = PeerId(envelope.payload.peer_id);
3189 let replica_id = this
3190 .collaborators
3191 .remove(&peer_id)
3192 .ok_or_else(|| anyhow!("unknown peer {:?}", peer_id))?
3193 .replica_id;
3194 for (_, buffer) in &this.opened_buffers {
3195 if let Some(buffer) = buffer.upgrade(cx) {
3196 buffer.update(cx, |buffer, cx| buffer.remove_peer(replica_id, cx));
3197 }
3198 }
3199 cx.notify();
3200 Ok(())
3201 })
3202 }
3203
3204 async fn handle_register_worktree(
3205 this: ModelHandle<Self>,
3206 envelope: TypedEnvelope<proto::RegisterWorktree>,
3207 client: Arc<Client>,
3208 mut cx: AsyncAppContext,
3209 ) -> Result<()> {
3210 this.update(&mut cx, |this, cx| {
3211 let remote_id = this.remote_id().ok_or_else(|| anyhow!("invalid project"))?;
3212 let replica_id = this.replica_id();
3213 let worktree = proto::Worktree {
3214 id: envelope.payload.worktree_id,
3215 root_name: envelope.payload.root_name,
3216 entries: Default::default(),
3217 diagnostic_summaries: Default::default(),
3218 visible: envelope.payload.visible,
3219 };
3220 let (worktree, load_task) =
3221 Worktree::remote(remote_id, replica_id, worktree, client, cx);
3222 this.add_worktree(&worktree, cx);
3223 load_task.detach();
3224 Ok(())
3225 })
3226 }
3227
3228 async fn handle_unregister_worktree(
3229 this: ModelHandle<Self>,
3230 envelope: TypedEnvelope<proto::UnregisterWorktree>,
3231 _: Arc<Client>,
3232 mut cx: AsyncAppContext,
3233 ) -> Result<()> {
3234 this.update(&mut cx, |this, cx| {
3235 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
3236 this.remove_worktree(worktree_id, cx);
3237 Ok(())
3238 })
3239 }
3240
3241 async fn handle_update_worktree(
3242 this: ModelHandle<Self>,
3243 envelope: TypedEnvelope<proto::UpdateWorktree>,
3244 _: Arc<Client>,
3245 mut cx: AsyncAppContext,
3246 ) -> Result<()> {
3247 this.update(&mut cx, |this, cx| {
3248 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
3249 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
3250 worktree.update(cx, |worktree, _| {
3251 let worktree = worktree.as_remote_mut().unwrap();
3252 worktree.update_from_remote(envelope)
3253 })?;
3254 }
3255 Ok(())
3256 })
3257 }
3258
3259 async fn handle_update_diagnostic_summary(
3260 this: ModelHandle<Self>,
3261 envelope: TypedEnvelope<proto::UpdateDiagnosticSummary>,
3262 _: Arc<Client>,
3263 mut cx: AsyncAppContext,
3264 ) -> Result<()> {
3265 this.update(&mut cx, |this, cx| {
3266 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
3267 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
3268 if let Some(summary) = envelope.payload.summary {
3269 let project_path = ProjectPath {
3270 worktree_id,
3271 path: Path::new(&summary.path).into(),
3272 };
3273 worktree.update(cx, |worktree, _| {
3274 worktree
3275 .as_remote_mut()
3276 .unwrap()
3277 .update_diagnostic_summary(project_path.path.clone(), &summary);
3278 });
3279 cx.emit(Event::DiagnosticsUpdated(project_path));
3280 }
3281 }
3282 Ok(())
3283 })
3284 }
3285
3286 async fn handle_start_language_server(
3287 this: ModelHandle<Self>,
3288 envelope: TypedEnvelope<proto::StartLanguageServer>,
3289 _: Arc<Client>,
3290 mut cx: AsyncAppContext,
3291 ) -> Result<()> {
3292 let server = envelope
3293 .payload
3294 .server
3295 .ok_or_else(|| anyhow!("invalid server"))?;
3296 this.update(&mut cx, |this, cx| {
3297 this.language_server_statuses.insert(
3298 server.id as usize,
3299 LanguageServerStatus {
3300 name: server.name,
3301 pending_work: Default::default(),
3302 pending_diagnostic_updates: 0,
3303 },
3304 );
3305 cx.notify();
3306 });
3307 Ok(())
3308 }
3309
3310 async fn handle_update_language_server(
3311 this: ModelHandle<Self>,
3312 envelope: TypedEnvelope<proto::UpdateLanguageServer>,
3313 _: Arc<Client>,
3314 mut cx: AsyncAppContext,
3315 ) -> Result<()> {
3316 let language_server_id = envelope.payload.language_server_id as usize;
3317 match envelope
3318 .payload
3319 .variant
3320 .ok_or_else(|| anyhow!("invalid variant"))?
3321 {
3322 proto::update_language_server::Variant::WorkStart(payload) => {
3323 this.update(&mut cx, |this, cx| {
3324 this.on_lsp_work_start(language_server_id, payload.token, cx);
3325 })
3326 }
3327 proto::update_language_server::Variant::WorkProgress(payload) => {
3328 this.update(&mut cx, |this, cx| {
3329 this.on_lsp_work_progress(
3330 language_server_id,
3331 payload.token,
3332 LanguageServerProgress {
3333 message: payload.message,
3334 percentage: payload.percentage.map(|p| p as usize),
3335 },
3336 cx,
3337 );
3338 })
3339 }
3340 proto::update_language_server::Variant::WorkEnd(payload) => {
3341 this.update(&mut cx, |this, cx| {
3342 this.on_lsp_work_end(language_server_id, payload.token, cx);
3343 })
3344 }
3345 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(_) => {
3346 this.update(&mut cx, |this, cx| {
3347 this.disk_based_diagnostics_started(cx);
3348 })
3349 }
3350 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(_) => {
3351 this.update(&mut cx, |this, cx| this.disk_based_diagnostics_finished(cx));
3352 }
3353 }
3354
3355 Ok(())
3356 }
3357
3358 async fn handle_update_buffer(
3359 this: ModelHandle<Self>,
3360 envelope: TypedEnvelope<proto::UpdateBuffer>,
3361 _: Arc<Client>,
3362 mut cx: AsyncAppContext,
3363 ) -> Result<()> {
3364 this.update(&mut cx, |this, cx| {
3365 let payload = envelope.payload.clone();
3366 let buffer_id = payload.buffer_id;
3367 let ops = payload
3368 .operations
3369 .into_iter()
3370 .map(|op| language::proto::deserialize_operation(op))
3371 .collect::<Result<Vec<_>, _>>()?;
3372 match this.opened_buffers.entry(buffer_id) {
3373 hash_map::Entry::Occupied(mut e) => match e.get_mut() {
3374 OpenBuffer::Strong(buffer) => {
3375 buffer.update(cx, |buffer, cx| buffer.apply_ops(ops, cx))?;
3376 }
3377 OpenBuffer::Loading(operations) => operations.extend_from_slice(&ops),
3378 OpenBuffer::Weak(_) => {}
3379 },
3380 hash_map::Entry::Vacant(e) => {
3381 e.insert(OpenBuffer::Loading(ops));
3382 }
3383 }
3384 Ok(())
3385 })
3386 }
3387
3388 async fn handle_update_buffer_file(
3389 this: ModelHandle<Self>,
3390 envelope: TypedEnvelope<proto::UpdateBufferFile>,
3391 _: Arc<Client>,
3392 mut cx: AsyncAppContext,
3393 ) -> Result<()> {
3394 this.update(&mut cx, |this, cx| {
3395 let payload = envelope.payload.clone();
3396 let buffer_id = payload.buffer_id;
3397 let file = payload.file.ok_or_else(|| anyhow!("invalid file"))?;
3398 let worktree = this
3399 .worktree_for_id(WorktreeId::from_proto(file.worktree_id), cx)
3400 .ok_or_else(|| anyhow!("no such worktree"))?;
3401 let file = File::from_proto(file, worktree.clone(), cx)?;
3402 let buffer = this
3403 .opened_buffers
3404 .get_mut(&buffer_id)
3405 .and_then(|b| b.upgrade(cx))
3406 .ok_or_else(|| anyhow!("no such buffer"))?;
3407 buffer.update(cx, |buffer, cx| {
3408 buffer.file_updated(Box::new(file), cx).detach();
3409 });
3410 Ok(())
3411 })
3412 }
3413
3414 async fn handle_save_buffer(
3415 this: ModelHandle<Self>,
3416 envelope: TypedEnvelope<proto::SaveBuffer>,
3417 _: Arc<Client>,
3418 mut cx: AsyncAppContext,
3419 ) -> Result<proto::BufferSaved> {
3420 let buffer_id = envelope.payload.buffer_id;
3421 let requested_version = deserialize_version(envelope.payload.version);
3422
3423 let (project_id, buffer) = this.update(&mut cx, |this, cx| {
3424 let project_id = this.remote_id().ok_or_else(|| anyhow!("not connected"))?;
3425 let buffer = this
3426 .opened_buffers
3427 .get(&buffer_id)
3428 .map(|buffer| buffer.upgrade(cx).unwrap())
3429 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?;
3430 Ok::<_, anyhow::Error>((project_id, buffer))
3431 })?;
3432 buffer
3433 .update(&mut cx, |buffer, _| {
3434 buffer.wait_for_version(requested_version)
3435 })
3436 .await;
3437
3438 let (saved_version, mtime) = buffer.update(&mut cx, |buffer, cx| buffer.save(cx)).await?;
3439 Ok(proto::BufferSaved {
3440 project_id,
3441 buffer_id,
3442 version: serialize_version(&saved_version),
3443 mtime: Some(mtime.into()),
3444 })
3445 }
3446
3447 async fn handle_format_buffers(
3448 this: ModelHandle<Self>,
3449 envelope: TypedEnvelope<proto::FormatBuffers>,
3450 _: Arc<Client>,
3451 mut cx: AsyncAppContext,
3452 ) -> Result<proto::FormatBuffersResponse> {
3453 let sender_id = envelope.original_sender_id()?;
3454 let format = this.update(&mut cx, |this, cx| {
3455 let mut buffers = HashSet::default();
3456 for buffer_id in &envelope.payload.buffer_ids {
3457 buffers.insert(
3458 this.opened_buffers
3459 .get(buffer_id)
3460 .map(|buffer| buffer.upgrade(cx).unwrap())
3461 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?,
3462 );
3463 }
3464 Ok::<_, anyhow::Error>(this.format(buffers, false, cx))
3465 })?;
3466
3467 let project_transaction = format.await?;
3468 let project_transaction = this.update(&mut cx, |this, cx| {
3469 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
3470 });
3471 Ok(proto::FormatBuffersResponse {
3472 transaction: Some(project_transaction),
3473 })
3474 }
3475
3476 async fn handle_get_completions(
3477 this: ModelHandle<Self>,
3478 envelope: TypedEnvelope<proto::GetCompletions>,
3479 _: Arc<Client>,
3480 mut cx: AsyncAppContext,
3481 ) -> Result<proto::GetCompletionsResponse> {
3482 let position = envelope
3483 .payload
3484 .position
3485 .and_then(language::proto::deserialize_anchor)
3486 .ok_or_else(|| anyhow!("invalid position"))?;
3487 let version = deserialize_version(envelope.payload.version);
3488 let buffer = this.read_with(&cx, |this, cx| {
3489 this.opened_buffers
3490 .get(&envelope.payload.buffer_id)
3491 .map(|buffer| buffer.upgrade(cx).unwrap())
3492 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
3493 })?;
3494 buffer
3495 .update(&mut cx, |buffer, _| buffer.wait_for_version(version))
3496 .await;
3497 let version = buffer.read_with(&cx, |buffer, _| buffer.version());
3498 let completions = this
3499 .update(&mut cx, |this, cx| this.completions(&buffer, position, cx))
3500 .await?;
3501
3502 Ok(proto::GetCompletionsResponse {
3503 completions: completions
3504 .iter()
3505 .map(language::proto::serialize_completion)
3506 .collect(),
3507 version: serialize_version(&version),
3508 })
3509 }
3510
3511 async fn handle_apply_additional_edits_for_completion(
3512 this: ModelHandle<Self>,
3513 envelope: TypedEnvelope<proto::ApplyCompletionAdditionalEdits>,
3514 _: Arc<Client>,
3515 mut cx: AsyncAppContext,
3516 ) -> Result<proto::ApplyCompletionAdditionalEditsResponse> {
3517 let apply_additional_edits = this.update(&mut cx, |this, cx| {
3518 let buffer = this
3519 .opened_buffers
3520 .get(&envelope.payload.buffer_id)
3521 .map(|buffer| buffer.upgrade(cx).unwrap())
3522 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
3523 let language = buffer.read(cx).language();
3524 let completion = language::proto::deserialize_completion(
3525 envelope
3526 .payload
3527 .completion
3528 .ok_or_else(|| anyhow!("invalid completion"))?,
3529 language,
3530 )?;
3531 Ok::<_, anyhow::Error>(
3532 this.apply_additional_edits_for_completion(buffer, completion, false, cx),
3533 )
3534 })?;
3535
3536 Ok(proto::ApplyCompletionAdditionalEditsResponse {
3537 transaction: apply_additional_edits
3538 .await?
3539 .as_ref()
3540 .map(language::proto::serialize_transaction),
3541 })
3542 }
3543
3544 async fn handle_get_code_actions(
3545 this: ModelHandle<Self>,
3546 envelope: TypedEnvelope<proto::GetCodeActions>,
3547 _: Arc<Client>,
3548 mut cx: AsyncAppContext,
3549 ) -> Result<proto::GetCodeActionsResponse> {
3550 let start = envelope
3551 .payload
3552 .start
3553 .and_then(language::proto::deserialize_anchor)
3554 .ok_or_else(|| anyhow!("invalid start"))?;
3555 let end = envelope
3556 .payload
3557 .end
3558 .and_then(language::proto::deserialize_anchor)
3559 .ok_or_else(|| anyhow!("invalid end"))?;
3560 let buffer = this.update(&mut cx, |this, cx| {
3561 this.opened_buffers
3562 .get(&envelope.payload.buffer_id)
3563 .map(|buffer| buffer.upgrade(cx).unwrap())
3564 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
3565 })?;
3566 buffer
3567 .update(&mut cx, |buffer, _| {
3568 buffer.wait_for_version(deserialize_version(envelope.payload.version))
3569 })
3570 .await;
3571
3572 let version = buffer.read_with(&cx, |buffer, _| buffer.version());
3573 let code_actions = this.update(&mut cx, |this, cx| {
3574 Ok::<_, anyhow::Error>(this.code_actions(&buffer, start..end, cx))
3575 })?;
3576
3577 Ok(proto::GetCodeActionsResponse {
3578 actions: code_actions
3579 .await?
3580 .iter()
3581 .map(language::proto::serialize_code_action)
3582 .collect(),
3583 version: serialize_version(&version),
3584 })
3585 }
3586
3587 async fn handle_apply_code_action(
3588 this: ModelHandle<Self>,
3589 envelope: TypedEnvelope<proto::ApplyCodeAction>,
3590 _: Arc<Client>,
3591 mut cx: AsyncAppContext,
3592 ) -> Result<proto::ApplyCodeActionResponse> {
3593 let sender_id = envelope.original_sender_id()?;
3594 let action = language::proto::deserialize_code_action(
3595 envelope
3596 .payload
3597 .action
3598 .ok_or_else(|| anyhow!("invalid action"))?,
3599 )?;
3600 let apply_code_action = this.update(&mut cx, |this, cx| {
3601 let buffer = this
3602 .opened_buffers
3603 .get(&envelope.payload.buffer_id)
3604 .map(|buffer| buffer.upgrade(cx).unwrap())
3605 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
3606 Ok::<_, anyhow::Error>(this.apply_code_action(buffer, action, false, cx))
3607 })?;
3608
3609 let project_transaction = apply_code_action.await?;
3610 let project_transaction = this.update(&mut cx, |this, cx| {
3611 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
3612 });
3613 Ok(proto::ApplyCodeActionResponse {
3614 transaction: Some(project_transaction),
3615 })
3616 }
3617
3618 async fn handle_lsp_command<T: LspCommand>(
3619 this: ModelHandle<Self>,
3620 envelope: TypedEnvelope<T::ProtoRequest>,
3621 _: Arc<Client>,
3622 mut cx: AsyncAppContext,
3623 ) -> Result<<T::ProtoRequest as proto::RequestMessage>::Response>
3624 where
3625 <T::LspRequest as lsp::request::Request>::Result: Send,
3626 {
3627 let sender_id = envelope.original_sender_id()?;
3628 let buffer_id = T::buffer_id_from_proto(&envelope.payload);
3629 let buffer_handle = this.read_with(&cx, |this, _| {
3630 this.opened_buffers
3631 .get(&buffer_id)
3632 .map(|buffer| buffer.upgrade(&cx).unwrap())
3633 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))
3634 })?;
3635 let request = T::from_proto(
3636 envelope.payload,
3637 this.clone(),
3638 buffer_handle.clone(),
3639 cx.clone(),
3640 )
3641 .await?;
3642 let buffer_version = buffer_handle.read_with(&cx, |buffer, _| buffer.version());
3643 let response = this
3644 .update(&mut cx, |this, cx| {
3645 this.request_lsp(buffer_handle, request, cx)
3646 })
3647 .await?;
3648 this.update(&mut cx, |this, cx| {
3649 Ok(T::response_to_proto(
3650 response,
3651 this,
3652 sender_id,
3653 &buffer_version,
3654 cx,
3655 ))
3656 })
3657 }
3658
3659 async fn handle_get_project_symbols(
3660 this: ModelHandle<Self>,
3661 envelope: TypedEnvelope<proto::GetProjectSymbols>,
3662 _: Arc<Client>,
3663 mut cx: AsyncAppContext,
3664 ) -> Result<proto::GetProjectSymbolsResponse> {
3665 let symbols = this
3666 .update(&mut cx, |this, cx| {
3667 this.symbols(&envelope.payload.query, cx)
3668 })
3669 .await?;
3670
3671 Ok(proto::GetProjectSymbolsResponse {
3672 symbols: symbols.iter().map(serialize_symbol).collect(),
3673 })
3674 }
3675
3676 async fn handle_search_project(
3677 this: ModelHandle<Self>,
3678 envelope: TypedEnvelope<proto::SearchProject>,
3679 _: Arc<Client>,
3680 mut cx: AsyncAppContext,
3681 ) -> Result<proto::SearchProjectResponse> {
3682 let peer_id = envelope.original_sender_id()?;
3683 let query = SearchQuery::from_proto(envelope.payload)?;
3684 let result = this
3685 .update(&mut cx, |this, cx| this.search(query, cx))
3686 .await?;
3687
3688 this.update(&mut cx, |this, cx| {
3689 let mut locations = Vec::new();
3690 for (buffer, ranges) in result {
3691 for range in ranges {
3692 let start = serialize_anchor(&range.start);
3693 let end = serialize_anchor(&range.end);
3694 let buffer = this.serialize_buffer_for_peer(&buffer, peer_id, cx);
3695 locations.push(proto::Location {
3696 buffer: Some(buffer),
3697 start: Some(start),
3698 end: Some(end),
3699 });
3700 }
3701 }
3702 Ok(proto::SearchProjectResponse { locations })
3703 })
3704 }
3705
3706 async fn handle_open_buffer_for_symbol(
3707 this: ModelHandle<Self>,
3708 envelope: TypedEnvelope<proto::OpenBufferForSymbol>,
3709 _: Arc<Client>,
3710 mut cx: AsyncAppContext,
3711 ) -> Result<proto::OpenBufferForSymbolResponse> {
3712 let peer_id = envelope.original_sender_id()?;
3713 let symbol = envelope
3714 .payload
3715 .symbol
3716 .ok_or_else(|| anyhow!("invalid symbol"))?;
3717 let symbol = this.read_with(&cx, |this, _| {
3718 let symbol = this.deserialize_symbol(symbol)?;
3719 let signature = this.symbol_signature(symbol.worktree_id, &symbol.path);
3720 if signature == symbol.signature {
3721 Ok(symbol)
3722 } else {
3723 Err(anyhow!("invalid symbol signature"))
3724 }
3725 })?;
3726 let buffer = this
3727 .update(&mut cx, |this, cx| this.open_buffer_for_symbol(&symbol, cx))
3728 .await?;
3729
3730 Ok(proto::OpenBufferForSymbolResponse {
3731 buffer: Some(this.update(&mut cx, |this, cx| {
3732 this.serialize_buffer_for_peer(&buffer, peer_id, cx)
3733 })),
3734 })
3735 }
3736
3737 fn symbol_signature(&self, worktree_id: WorktreeId, path: &Path) -> [u8; 32] {
3738 let mut hasher = Sha256::new();
3739 hasher.update(worktree_id.to_proto().to_be_bytes());
3740 hasher.update(path.to_string_lossy().as_bytes());
3741 hasher.update(self.nonce.to_be_bytes());
3742 hasher.finalize().as_slice().try_into().unwrap()
3743 }
3744
3745 async fn handle_open_buffer(
3746 this: ModelHandle<Self>,
3747 envelope: TypedEnvelope<proto::OpenBuffer>,
3748 _: Arc<Client>,
3749 mut cx: AsyncAppContext,
3750 ) -> Result<proto::OpenBufferResponse> {
3751 let peer_id = envelope.original_sender_id()?;
3752 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
3753 let open_buffer = this.update(&mut cx, |this, cx| {
3754 this.open_buffer(
3755 ProjectPath {
3756 worktree_id,
3757 path: PathBuf::from(envelope.payload.path).into(),
3758 },
3759 cx,
3760 )
3761 });
3762
3763 let buffer = open_buffer.await?;
3764 this.update(&mut cx, |this, cx| {
3765 Ok(proto::OpenBufferResponse {
3766 buffer: Some(this.serialize_buffer_for_peer(&buffer, peer_id, cx)),
3767 })
3768 })
3769 }
3770
3771 fn serialize_project_transaction_for_peer(
3772 &mut self,
3773 project_transaction: ProjectTransaction,
3774 peer_id: PeerId,
3775 cx: &AppContext,
3776 ) -> proto::ProjectTransaction {
3777 let mut serialized_transaction = proto::ProjectTransaction {
3778 buffers: Default::default(),
3779 transactions: Default::default(),
3780 };
3781 for (buffer, transaction) in project_transaction.0 {
3782 serialized_transaction
3783 .buffers
3784 .push(self.serialize_buffer_for_peer(&buffer, peer_id, cx));
3785 serialized_transaction
3786 .transactions
3787 .push(language::proto::serialize_transaction(&transaction));
3788 }
3789 serialized_transaction
3790 }
3791
3792 fn deserialize_project_transaction(
3793 &mut self,
3794 message: proto::ProjectTransaction,
3795 push_to_history: bool,
3796 cx: &mut ModelContext<Self>,
3797 ) -> Task<Result<ProjectTransaction>> {
3798 cx.spawn(|this, mut cx| async move {
3799 let mut project_transaction = ProjectTransaction::default();
3800 for (buffer, transaction) in message.buffers.into_iter().zip(message.transactions) {
3801 let buffer = this
3802 .update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
3803 .await?;
3804 let transaction = language::proto::deserialize_transaction(transaction)?;
3805 project_transaction.0.insert(buffer, transaction);
3806 }
3807
3808 for (buffer, transaction) in &project_transaction.0 {
3809 buffer
3810 .update(&mut cx, |buffer, _| {
3811 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
3812 })
3813 .await;
3814
3815 if push_to_history {
3816 buffer.update(&mut cx, |buffer, _| {
3817 buffer.push_transaction(transaction.clone(), Instant::now());
3818 });
3819 }
3820 }
3821
3822 Ok(project_transaction)
3823 })
3824 }
3825
3826 fn serialize_buffer_for_peer(
3827 &mut self,
3828 buffer: &ModelHandle<Buffer>,
3829 peer_id: PeerId,
3830 cx: &AppContext,
3831 ) -> proto::Buffer {
3832 let buffer_id = buffer.read(cx).remote_id();
3833 let shared_buffers = self.shared_buffers.entry(peer_id).or_default();
3834 if shared_buffers.insert(buffer_id) {
3835 proto::Buffer {
3836 variant: Some(proto::buffer::Variant::State(buffer.read(cx).to_proto())),
3837 }
3838 } else {
3839 proto::Buffer {
3840 variant: Some(proto::buffer::Variant::Id(buffer_id)),
3841 }
3842 }
3843 }
3844
3845 fn deserialize_buffer(
3846 &mut self,
3847 buffer: proto::Buffer,
3848 cx: &mut ModelContext<Self>,
3849 ) -> Task<Result<ModelHandle<Buffer>>> {
3850 let replica_id = self.replica_id();
3851
3852 let opened_buffer_tx = self.opened_buffer.0.clone();
3853 let mut opened_buffer_rx = self.opened_buffer.1.clone();
3854 cx.spawn(|this, mut cx| async move {
3855 match buffer.variant.ok_or_else(|| anyhow!("missing buffer"))? {
3856 proto::buffer::Variant::Id(id) => {
3857 let buffer = loop {
3858 let buffer = this.read_with(&cx, |this, cx| {
3859 this.opened_buffers
3860 .get(&id)
3861 .and_then(|buffer| buffer.upgrade(cx))
3862 });
3863 if let Some(buffer) = buffer {
3864 break buffer;
3865 }
3866 opened_buffer_rx
3867 .next()
3868 .await
3869 .ok_or_else(|| anyhow!("project dropped while waiting for buffer"))?;
3870 };
3871 Ok(buffer)
3872 }
3873 proto::buffer::Variant::State(mut buffer) => {
3874 let mut buffer_worktree = None;
3875 let mut buffer_file = None;
3876 if let Some(file) = buffer.file.take() {
3877 this.read_with(&cx, |this, cx| {
3878 let worktree_id = WorktreeId::from_proto(file.worktree_id);
3879 let worktree =
3880 this.worktree_for_id(worktree_id, cx).ok_or_else(|| {
3881 anyhow!("no worktree found for id {}", file.worktree_id)
3882 })?;
3883 buffer_file =
3884 Some(Box::new(File::from_proto(file, worktree.clone(), cx)?)
3885 as Box<dyn language::File>);
3886 buffer_worktree = Some(worktree);
3887 Ok::<_, anyhow::Error>(())
3888 })?;
3889 }
3890
3891 let buffer = cx.add_model(|cx| {
3892 Buffer::from_proto(replica_id, buffer, buffer_file, cx).unwrap()
3893 });
3894
3895 this.update(&mut cx, |this, cx| this.register_buffer(&buffer, cx))?;
3896
3897 *opened_buffer_tx.borrow_mut().borrow_mut() = ();
3898 Ok(buffer)
3899 }
3900 }
3901 })
3902 }
3903
3904 fn deserialize_symbol(&self, serialized_symbol: proto::Symbol) -> Result<Symbol> {
3905 let language = self
3906 .languages
3907 .get_language(&serialized_symbol.language_name);
3908 let start = serialized_symbol
3909 .start
3910 .ok_or_else(|| anyhow!("invalid start"))?;
3911 let end = serialized_symbol
3912 .end
3913 .ok_or_else(|| anyhow!("invalid end"))?;
3914 let kind = unsafe { mem::transmute(serialized_symbol.kind) };
3915 Ok(Symbol {
3916 source_worktree_id: WorktreeId::from_proto(serialized_symbol.source_worktree_id),
3917 worktree_id: WorktreeId::from_proto(serialized_symbol.worktree_id),
3918 language_name: serialized_symbol.language_name.clone(),
3919 label: language
3920 .and_then(|language| language.label_for_symbol(&serialized_symbol.name, kind))
3921 .unwrap_or_else(|| CodeLabel::plain(serialized_symbol.name.clone(), None)),
3922 name: serialized_symbol.name,
3923 path: PathBuf::from(serialized_symbol.path),
3924 range: PointUtf16::new(start.row, start.column)..PointUtf16::new(end.row, end.column),
3925 kind,
3926 signature: serialized_symbol
3927 .signature
3928 .try_into()
3929 .map_err(|_| anyhow!("invalid signature"))?,
3930 })
3931 }
3932
3933 async fn handle_buffer_saved(
3934 this: ModelHandle<Self>,
3935 envelope: TypedEnvelope<proto::BufferSaved>,
3936 _: Arc<Client>,
3937 mut cx: AsyncAppContext,
3938 ) -> Result<()> {
3939 let version = deserialize_version(envelope.payload.version);
3940 let mtime = envelope
3941 .payload
3942 .mtime
3943 .ok_or_else(|| anyhow!("missing mtime"))?
3944 .into();
3945
3946 this.update(&mut cx, |this, cx| {
3947 let buffer = this
3948 .opened_buffers
3949 .get(&envelope.payload.buffer_id)
3950 .and_then(|buffer| buffer.upgrade(cx));
3951 if let Some(buffer) = buffer {
3952 buffer.update(cx, |buffer, cx| {
3953 buffer.did_save(version, mtime, None, cx);
3954 });
3955 }
3956 Ok(())
3957 })
3958 }
3959
3960 async fn handle_buffer_reloaded(
3961 this: ModelHandle<Self>,
3962 envelope: TypedEnvelope<proto::BufferReloaded>,
3963 _: Arc<Client>,
3964 mut cx: AsyncAppContext,
3965 ) -> Result<()> {
3966 let payload = envelope.payload.clone();
3967 let version = deserialize_version(payload.version);
3968 let mtime = payload
3969 .mtime
3970 .ok_or_else(|| anyhow!("missing mtime"))?
3971 .into();
3972 this.update(&mut cx, |this, cx| {
3973 let buffer = this
3974 .opened_buffers
3975 .get(&payload.buffer_id)
3976 .and_then(|buffer| buffer.upgrade(cx));
3977 if let Some(buffer) = buffer {
3978 buffer.update(cx, |buffer, cx| {
3979 buffer.did_reload(version, mtime, cx);
3980 });
3981 }
3982 Ok(())
3983 })
3984 }
3985
3986 pub fn match_paths<'a>(
3987 &self,
3988 query: &'a str,
3989 include_ignored: bool,
3990 smart_case: bool,
3991 max_results: usize,
3992 cancel_flag: &'a AtomicBool,
3993 cx: &AppContext,
3994 ) -> impl 'a + Future<Output = Vec<PathMatch>> {
3995 let worktrees = self
3996 .worktrees(cx)
3997 .filter(|worktree| worktree.read(cx).is_visible())
3998 .collect::<Vec<_>>();
3999 let include_root_name = worktrees.len() > 1;
4000 let candidate_sets = worktrees
4001 .into_iter()
4002 .map(|worktree| CandidateSet {
4003 snapshot: worktree.read(cx).snapshot(),
4004 include_ignored,
4005 include_root_name,
4006 })
4007 .collect::<Vec<_>>();
4008
4009 let background = cx.background().clone();
4010 async move {
4011 fuzzy::match_paths(
4012 candidate_sets.as_slice(),
4013 query,
4014 smart_case,
4015 max_results,
4016 cancel_flag,
4017 background,
4018 )
4019 .await
4020 }
4021 }
4022
4023 fn edits_from_lsp(
4024 &mut self,
4025 buffer: &ModelHandle<Buffer>,
4026 lsp_edits: impl 'static + Send + IntoIterator<Item = lsp::TextEdit>,
4027 version: Option<i32>,
4028 cx: &mut ModelContext<Self>,
4029 ) -> Task<Result<Vec<(Range<Anchor>, String)>>> {
4030 let snapshot = self.buffer_snapshot_for_lsp_version(buffer, version, cx);
4031 cx.background().spawn(async move {
4032 let snapshot = snapshot?;
4033 let mut lsp_edits = lsp_edits
4034 .into_iter()
4035 .map(|edit| (range_from_lsp(edit.range), edit.new_text))
4036 .peekable();
4037
4038 let mut edits = Vec::new();
4039 while let Some((mut range, mut new_text)) = lsp_edits.next() {
4040 // Combine any LSP edits that are adjacent.
4041 //
4042 // Also, combine LSP edits that are separated from each other by only
4043 // a newline. This is important because for some code actions,
4044 // Rust-analyzer rewrites the entire buffer via a series of edits that
4045 // are separated by unchanged newline characters.
4046 //
4047 // In order for the diffing logic below to work properly, any edits that
4048 // cancel each other out must be combined into one.
4049 while let Some((next_range, next_text)) = lsp_edits.peek() {
4050 if next_range.start > range.end {
4051 if next_range.start.row > range.end.row + 1
4052 || next_range.start.column > 0
4053 || snapshot.clip_point_utf16(
4054 PointUtf16::new(range.end.row, u32::MAX),
4055 Bias::Left,
4056 ) > range.end
4057 {
4058 break;
4059 }
4060 new_text.push('\n');
4061 }
4062 range.end = next_range.end;
4063 new_text.push_str(&next_text);
4064 lsp_edits.next();
4065 }
4066
4067 if snapshot.clip_point_utf16(range.start, Bias::Left) != range.start
4068 || snapshot.clip_point_utf16(range.end, Bias::Left) != range.end
4069 {
4070 return Err(anyhow!("invalid edits received from language server"));
4071 }
4072
4073 // For multiline edits, perform a diff of the old and new text so that
4074 // we can identify the changes more precisely, preserving the locations
4075 // of any anchors positioned in the unchanged regions.
4076 if range.end.row > range.start.row {
4077 let mut offset = range.start.to_offset(&snapshot);
4078 let old_text = snapshot.text_for_range(range).collect::<String>();
4079
4080 let diff = TextDiff::from_lines(old_text.as_str(), &new_text);
4081 let mut moved_since_edit = true;
4082 for change in diff.iter_all_changes() {
4083 let tag = change.tag();
4084 let value = change.value();
4085 match tag {
4086 ChangeTag::Equal => {
4087 offset += value.len();
4088 moved_since_edit = true;
4089 }
4090 ChangeTag::Delete => {
4091 let start = snapshot.anchor_after(offset);
4092 let end = snapshot.anchor_before(offset + value.len());
4093 if moved_since_edit {
4094 edits.push((start..end, String::new()));
4095 } else {
4096 edits.last_mut().unwrap().0.end = end;
4097 }
4098 offset += value.len();
4099 moved_since_edit = false;
4100 }
4101 ChangeTag::Insert => {
4102 if moved_since_edit {
4103 let anchor = snapshot.anchor_after(offset);
4104 edits.push((anchor.clone()..anchor, value.to_string()));
4105 } else {
4106 edits.last_mut().unwrap().1.push_str(value);
4107 }
4108 moved_since_edit = false;
4109 }
4110 }
4111 }
4112 } else if range.end == range.start {
4113 let anchor = snapshot.anchor_after(range.start);
4114 edits.push((anchor.clone()..anchor, new_text));
4115 } else {
4116 let edit_start = snapshot.anchor_after(range.start);
4117 let edit_end = snapshot.anchor_before(range.end);
4118 edits.push((edit_start..edit_end, new_text));
4119 }
4120 }
4121
4122 Ok(edits)
4123 })
4124 }
4125
4126 fn buffer_snapshot_for_lsp_version(
4127 &mut self,
4128 buffer: &ModelHandle<Buffer>,
4129 version: Option<i32>,
4130 cx: &AppContext,
4131 ) -> Result<TextBufferSnapshot> {
4132 const OLD_VERSIONS_TO_RETAIN: i32 = 10;
4133
4134 if let Some(version) = version {
4135 let buffer_id = buffer.read(cx).remote_id();
4136 let snapshots = self
4137 .buffer_snapshots
4138 .get_mut(&buffer_id)
4139 .ok_or_else(|| anyhow!("no snapshot found for buffer {}", buffer_id))?;
4140 let mut found_snapshot = None;
4141 snapshots.retain(|(snapshot_version, snapshot)| {
4142 if snapshot_version + OLD_VERSIONS_TO_RETAIN < version {
4143 false
4144 } else {
4145 if *snapshot_version == version {
4146 found_snapshot = Some(snapshot.clone());
4147 }
4148 true
4149 }
4150 });
4151
4152 found_snapshot.ok_or_else(|| {
4153 anyhow!(
4154 "snapshot not found for buffer {} at version {}",
4155 buffer_id,
4156 version
4157 )
4158 })
4159 } else {
4160 Ok((buffer.read(cx)).text_snapshot())
4161 }
4162 }
4163
4164 fn language_server_for_buffer(
4165 &self,
4166 buffer: &Buffer,
4167 cx: &AppContext,
4168 ) -> Option<&Arc<LanguageServer>> {
4169 if let Some((file, language)) = File::from_dyn(buffer.file()).zip(buffer.language()) {
4170 let worktree_id = file.worktree_id(cx);
4171 self.language_servers.get(&(worktree_id, language.name()))
4172 } else {
4173 None
4174 }
4175 }
4176}
4177
4178impl WorktreeHandle {
4179 pub fn upgrade(&self, cx: &AppContext) -> Option<ModelHandle<Worktree>> {
4180 match self {
4181 WorktreeHandle::Strong(handle) => Some(handle.clone()),
4182 WorktreeHandle::Weak(handle) => handle.upgrade(cx),
4183 }
4184 }
4185}
4186
4187impl OpenBuffer {
4188 pub fn upgrade(&self, cx: &impl UpgradeModelHandle) -> Option<ModelHandle<Buffer>> {
4189 match self {
4190 OpenBuffer::Strong(handle) => Some(handle.clone()),
4191 OpenBuffer::Weak(handle) => handle.upgrade(cx),
4192 OpenBuffer::Loading(_) => None,
4193 }
4194 }
4195}
4196
4197struct CandidateSet {
4198 snapshot: Snapshot,
4199 include_ignored: bool,
4200 include_root_name: bool,
4201}
4202
4203impl<'a> PathMatchCandidateSet<'a> for CandidateSet {
4204 type Candidates = CandidateSetIter<'a>;
4205
4206 fn id(&self) -> usize {
4207 self.snapshot.id().to_usize()
4208 }
4209
4210 fn len(&self) -> usize {
4211 if self.include_ignored {
4212 self.snapshot.file_count()
4213 } else {
4214 self.snapshot.visible_file_count()
4215 }
4216 }
4217
4218 fn prefix(&self) -> Arc<str> {
4219 if self.snapshot.root_entry().map_or(false, |e| e.is_file()) {
4220 self.snapshot.root_name().into()
4221 } else if self.include_root_name {
4222 format!("{}/", self.snapshot.root_name()).into()
4223 } else {
4224 "".into()
4225 }
4226 }
4227
4228 fn candidates(&'a self, start: usize) -> Self::Candidates {
4229 CandidateSetIter {
4230 traversal: self.snapshot.files(self.include_ignored, start),
4231 }
4232 }
4233}
4234
4235struct CandidateSetIter<'a> {
4236 traversal: Traversal<'a>,
4237}
4238
4239impl<'a> Iterator for CandidateSetIter<'a> {
4240 type Item = PathMatchCandidate<'a>;
4241
4242 fn next(&mut self) -> Option<Self::Item> {
4243 self.traversal.next().map(|entry| {
4244 if let EntryKind::File(char_bag) = entry.kind {
4245 PathMatchCandidate {
4246 path: &entry.path,
4247 char_bag,
4248 }
4249 } else {
4250 unreachable!()
4251 }
4252 })
4253 }
4254}
4255
4256impl Entity for Project {
4257 type Event = Event;
4258
4259 fn release(&mut self, _: &mut gpui::MutableAppContext) {
4260 match &self.client_state {
4261 ProjectClientState::Local { remote_id_rx, .. } => {
4262 if let Some(project_id) = *remote_id_rx.borrow() {
4263 self.client
4264 .send(proto::UnregisterProject { project_id })
4265 .log_err();
4266 }
4267 }
4268 ProjectClientState::Remote { remote_id, .. } => {
4269 self.client
4270 .send(proto::LeaveProject {
4271 project_id: *remote_id,
4272 })
4273 .log_err();
4274 }
4275 }
4276 }
4277
4278 fn app_will_quit(
4279 &mut self,
4280 _: &mut MutableAppContext,
4281 ) -> Option<std::pin::Pin<Box<dyn 'static + Future<Output = ()>>>> {
4282 let shutdown_futures = self
4283 .language_servers
4284 .drain()
4285 .filter_map(|(_, server)| server.shutdown())
4286 .collect::<Vec<_>>();
4287 Some(
4288 async move {
4289 futures::future::join_all(shutdown_futures).await;
4290 }
4291 .boxed(),
4292 )
4293 }
4294}
4295
4296impl Collaborator {
4297 fn from_proto(
4298 message: proto::Collaborator,
4299 user_store: &ModelHandle<UserStore>,
4300 cx: &mut AsyncAppContext,
4301 ) -> impl Future<Output = Result<Self>> {
4302 let user = user_store.update(cx, |user_store, cx| {
4303 user_store.fetch_user(message.user_id, cx)
4304 });
4305
4306 async move {
4307 Ok(Self {
4308 peer_id: PeerId(message.peer_id),
4309 user: user.await?,
4310 replica_id: message.replica_id as ReplicaId,
4311 })
4312 }
4313 }
4314}
4315
4316impl<P: AsRef<Path>> From<(WorktreeId, P)> for ProjectPath {
4317 fn from((worktree_id, path): (WorktreeId, P)) -> Self {
4318 Self {
4319 worktree_id,
4320 path: path.as_ref().into(),
4321 }
4322 }
4323}
4324
4325impl From<lsp::CreateFileOptions> for fs::CreateOptions {
4326 fn from(options: lsp::CreateFileOptions) -> Self {
4327 Self {
4328 overwrite: options.overwrite.unwrap_or(false),
4329 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
4330 }
4331 }
4332}
4333
4334impl From<lsp::RenameFileOptions> for fs::RenameOptions {
4335 fn from(options: lsp::RenameFileOptions) -> Self {
4336 Self {
4337 overwrite: options.overwrite.unwrap_or(false),
4338 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
4339 }
4340 }
4341}
4342
4343impl From<lsp::DeleteFileOptions> for fs::RemoveOptions {
4344 fn from(options: lsp::DeleteFileOptions) -> Self {
4345 Self {
4346 recursive: options.recursive.unwrap_or(false),
4347 ignore_if_not_exists: options.ignore_if_not_exists.unwrap_or(false),
4348 }
4349 }
4350}
4351
4352fn serialize_symbol(symbol: &Symbol) -> proto::Symbol {
4353 proto::Symbol {
4354 source_worktree_id: symbol.source_worktree_id.to_proto(),
4355 worktree_id: symbol.worktree_id.to_proto(),
4356 language_name: symbol.language_name.clone(),
4357 name: symbol.name.clone(),
4358 kind: unsafe { mem::transmute(symbol.kind) },
4359 path: symbol.path.to_string_lossy().to_string(),
4360 start: Some(proto::Point {
4361 row: symbol.range.start.row,
4362 column: symbol.range.start.column,
4363 }),
4364 end: Some(proto::Point {
4365 row: symbol.range.end.row,
4366 column: symbol.range.end.column,
4367 }),
4368 signature: symbol.signature.to_vec(),
4369 }
4370}
4371
4372fn relativize_path(base: &Path, path: &Path) -> PathBuf {
4373 let mut path_components = path.components();
4374 let mut base_components = base.components();
4375 let mut components: Vec<Component> = Vec::new();
4376 loop {
4377 match (path_components.next(), base_components.next()) {
4378 (None, None) => break,
4379 (Some(a), None) => {
4380 components.push(a);
4381 components.extend(path_components.by_ref());
4382 break;
4383 }
4384 (None, _) => components.push(Component::ParentDir),
4385 (Some(a), Some(b)) if components.is_empty() && a == b => (),
4386 (Some(a), Some(b)) if b == Component::CurDir => components.push(a),
4387 (Some(a), Some(_)) => {
4388 components.push(Component::ParentDir);
4389 for _ in base_components {
4390 components.push(Component::ParentDir);
4391 }
4392 components.push(a);
4393 components.extend(path_components.by_ref());
4394 break;
4395 }
4396 }
4397 }
4398 components.iter().map(|c| c.as_os_str()).collect()
4399}
4400
4401#[cfg(test)]
4402mod tests {
4403 use super::{Event, *};
4404 use fs::RealFs;
4405 use futures::StreamExt;
4406 use gpui::test::subscribe;
4407 use language::{
4408 tree_sitter_rust, Diagnostic, LanguageConfig, LanguageServerConfig, OffsetRangeExt, Point,
4409 ToPoint,
4410 };
4411 use lsp::Url;
4412 use serde_json::json;
4413 use std::{cell::RefCell, os::unix, path::PathBuf, rc::Rc};
4414 use unindent::Unindent as _;
4415 use util::test::temp_tree;
4416 use worktree::WorktreeHandle as _;
4417
4418 #[gpui::test]
4419 async fn test_populate_and_search(cx: &mut gpui::TestAppContext) {
4420 let dir = temp_tree(json!({
4421 "root": {
4422 "apple": "",
4423 "banana": {
4424 "carrot": {
4425 "date": "",
4426 "endive": "",
4427 }
4428 },
4429 "fennel": {
4430 "grape": "",
4431 }
4432 }
4433 }));
4434
4435 let root_link_path = dir.path().join("root_link");
4436 unix::fs::symlink(&dir.path().join("root"), &root_link_path).unwrap();
4437 unix::fs::symlink(
4438 &dir.path().join("root/fennel"),
4439 &dir.path().join("root/finnochio"),
4440 )
4441 .unwrap();
4442
4443 let project = Project::test(Arc::new(RealFs), cx);
4444
4445 let (tree, _) = project
4446 .update(cx, |project, cx| {
4447 project.find_or_create_local_worktree(&root_link_path, true, cx)
4448 })
4449 .await
4450 .unwrap();
4451
4452 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
4453 .await;
4454 cx.read(|cx| {
4455 let tree = tree.read(cx);
4456 assert_eq!(tree.file_count(), 5);
4457 assert_eq!(
4458 tree.inode_for_path("fennel/grape"),
4459 tree.inode_for_path("finnochio/grape")
4460 );
4461 });
4462
4463 let cancel_flag = Default::default();
4464 let results = project
4465 .read_with(cx, |project, cx| {
4466 project.match_paths("bna", false, false, 10, &cancel_flag, cx)
4467 })
4468 .await;
4469 assert_eq!(
4470 results
4471 .into_iter()
4472 .map(|result| result.path)
4473 .collect::<Vec<Arc<Path>>>(),
4474 vec![
4475 PathBuf::from("banana/carrot/date").into(),
4476 PathBuf::from("banana/carrot/endive").into(),
4477 ]
4478 );
4479 }
4480
4481 #[gpui::test]
4482 async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
4483 cx.foreground().forbid_parking();
4484
4485 let (mut rust_lsp_config, mut fake_rust_servers) = LanguageServerConfig::fake();
4486 let (mut json_lsp_config, mut fake_json_servers) = LanguageServerConfig::fake();
4487 rust_lsp_config.set_fake_capabilities(lsp::ServerCapabilities {
4488 completion_provider: Some(lsp::CompletionOptions {
4489 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
4490 ..Default::default()
4491 }),
4492 ..Default::default()
4493 });
4494 json_lsp_config.set_fake_capabilities(lsp::ServerCapabilities {
4495 completion_provider: Some(lsp::CompletionOptions {
4496 trigger_characters: Some(vec![":".to_string()]),
4497 ..Default::default()
4498 }),
4499 ..Default::default()
4500 });
4501
4502 let rust_language = Arc::new(Language::new(
4503 LanguageConfig {
4504 name: "Rust".into(),
4505 path_suffixes: vec!["rs".to_string()],
4506 language_server: Some(rust_lsp_config),
4507 ..Default::default()
4508 },
4509 Some(tree_sitter_rust::language()),
4510 ));
4511 let json_language = Arc::new(Language::new(
4512 LanguageConfig {
4513 name: "JSON".into(),
4514 path_suffixes: vec!["json".to_string()],
4515 language_server: Some(json_lsp_config),
4516 ..Default::default()
4517 },
4518 None,
4519 ));
4520
4521 let fs = FakeFs::new(cx.background());
4522 fs.insert_tree(
4523 "/the-root",
4524 json!({
4525 "test.rs": "const A: i32 = 1;",
4526 "test2.rs": "",
4527 "Cargo.toml": "a = 1",
4528 "package.json": "{\"a\": 1}",
4529 }),
4530 )
4531 .await;
4532
4533 let project = Project::test(fs, cx);
4534 project.update(cx, |project, _| {
4535 project.languages.add(rust_language);
4536 project.languages.add(json_language);
4537 });
4538
4539 let worktree_id = project
4540 .update(cx, |project, cx| {
4541 project.find_or_create_local_worktree("/the-root", true, cx)
4542 })
4543 .await
4544 .unwrap()
4545 .0
4546 .read_with(cx, |tree, _| tree.id());
4547
4548 // Open a buffer without an associated language server.
4549 let toml_buffer = project
4550 .update(cx, |project, cx| {
4551 project.open_buffer((worktree_id, "Cargo.toml"), cx)
4552 })
4553 .await
4554 .unwrap();
4555
4556 // Open a buffer with an associated language server.
4557 let rust_buffer = project
4558 .update(cx, |project, cx| {
4559 project.open_buffer((worktree_id, "test.rs"), cx)
4560 })
4561 .await
4562 .unwrap();
4563
4564 // A server is started up, and it is notified about Rust files.
4565 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
4566 assert_eq!(
4567 fake_rust_server
4568 .receive_notification::<lsp::notification::DidOpenTextDocument>()
4569 .await
4570 .text_document,
4571 lsp::TextDocumentItem {
4572 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
4573 version: 0,
4574 text: "const A: i32 = 1;".to_string(),
4575 language_id: Default::default()
4576 }
4577 );
4578
4579 // The buffer is configured based on the language server's capabilities.
4580 rust_buffer.read_with(cx, |buffer, _| {
4581 assert_eq!(
4582 buffer.completion_triggers(),
4583 &[".".to_string(), "::".to_string()]
4584 );
4585 });
4586 toml_buffer.read_with(cx, |buffer, _| {
4587 assert!(buffer.completion_triggers().is_empty());
4588 });
4589
4590 // Edit a buffer. The changes are reported to the language server.
4591 rust_buffer.update(cx, |buffer, cx| buffer.edit([16..16], "2", cx));
4592 assert_eq!(
4593 fake_rust_server
4594 .receive_notification::<lsp::notification::DidChangeTextDocument>()
4595 .await
4596 .text_document,
4597 lsp::VersionedTextDocumentIdentifier::new(
4598 lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
4599 1
4600 )
4601 );
4602
4603 // Open a third buffer with a different associated language server.
4604 let json_buffer = project
4605 .update(cx, |project, cx| {
4606 project.open_buffer((worktree_id, "package.json"), cx)
4607 })
4608 .await
4609 .unwrap();
4610
4611 // Another language server is started up, and it is notified about
4612 // all three open buffers.
4613 let mut fake_json_server = fake_json_servers.next().await.unwrap();
4614 assert_eq!(
4615 fake_json_server
4616 .receive_notification::<lsp::notification::DidOpenTextDocument>()
4617 .await
4618 .text_document,
4619 lsp::TextDocumentItem {
4620 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
4621 version: 0,
4622 text: "{\"a\": 1}".to_string(),
4623 language_id: Default::default()
4624 }
4625 );
4626
4627 // This buffer is configured based on the second language server's
4628 // capabilities.
4629 json_buffer.read_with(cx, |buffer, _| {
4630 assert_eq!(buffer.completion_triggers(), &[":".to_string()]);
4631 });
4632
4633 // When opening another buffer whose language server is already running,
4634 // it is also configured based on the existing language server's capabilities.
4635 let rust_buffer2 = project
4636 .update(cx, |project, cx| {
4637 project.open_buffer((worktree_id, "test2.rs"), cx)
4638 })
4639 .await
4640 .unwrap();
4641 rust_buffer2.read_with(cx, |buffer, _| {
4642 assert_eq!(
4643 buffer.completion_triggers(),
4644 &[".".to_string(), "::".to_string()]
4645 );
4646 });
4647
4648 // Changes are reported only to servers matching the buffer's language.
4649 toml_buffer.update(cx, |buffer, cx| buffer.edit([5..5], "23", cx));
4650 rust_buffer2.update(cx, |buffer, cx| buffer.edit([0..0], "let x = 1;", cx));
4651 assert_eq!(
4652 fake_rust_server
4653 .receive_notification::<lsp::notification::DidChangeTextDocument>()
4654 .await
4655 .text_document,
4656 lsp::VersionedTextDocumentIdentifier::new(
4657 lsp::Url::from_file_path("/the-root/test2.rs").unwrap(),
4658 1
4659 )
4660 );
4661
4662 // Save notifications are reported to all servers.
4663 toml_buffer
4664 .update(cx, |buffer, cx| buffer.save(cx))
4665 .await
4666 .unwrap();
4667 assert_eq!(
4668 fake_rust_server
4669 .receive_notification::<lsp::notification::DidSaveTextDocument>()
4670 .await
4671 .text_document,
4672 lsp::TextDocumentIdentifier::new(
4673 lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap()
4674 )
4675 );
4676 assert_eq!(
4677 fake_json_server
4678 .receive_notification::<lsp::notification::DidSaveTextDocument>()
4679 .await
4680 .text_document,
4681 lsp::TextDocumentIdentifier::new(
4682 lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap()
4683 )
4684 );
4685
4686 // Close notifications are reported only to servers matching the buffer's language.
4687 cx.update(|_| drop(json_buffer));
4688 let close_message = lsp::DidCloseTextDocumentParams {
4689 text_document: lsp::TextDocumentIdentifier::new(
4690 lsp::Url::from_file_path("/the-root/package.json").unwrap(),
4691 ),
4692 };
4693 assert_eq!(
4694 fake_json_server
4695 .receive_notification::<lsp::notification::DidCloseTextDocument>()
4696 .await,
4697 close_message,
4698 );
4699 }
4700
4701 #[gpui::test]
4702 async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
4703 cx.foreground().forbid_parking();
4704
4705 let (language_server_config, mut fake_servers) = LanguageServerConfig::fake();
4706 let progress_token = language_server_config
4707 .disk_based_diagnostics_progress_token
4708 .clone()
4709 .unwrap();
4710
4711 let language = Arc::new(Language::new(
4712 LanguageConfig {
4713 name: "Rust".into(),
4714 path_suffixes: vec!["rs".to_string()],
4715 language_server: Some(language_server_config),
4716 ..Default::default()
4717 },
4718 Some(tree_sitter_rust::language()),
4719 ));
4720
4721 let fs = FakeFs::new(cx.background());
4722 fs.insert_tree(
4723 "/dir",
4724 json!({
4725 "a.rs": "fn a() { A }",
4726 "b.rs": "const y: i32 = 1",
4727 }),
4728 )
4729 .await;
4730
4731 let project = Project::test(fs, cx);
4732 project.update(cx, |project, _| project.languages.add(language));
4733
4734 let (tree, _) = project
4735 .update(cx, |project, cx| {
4736 project.find_or_create_local_worktree("/dir", true, cx)
4737 })
4738 .await
4739 .unwrap();
4740 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
4741
4742 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
4743 .await;
4744
4745 // Cause worktree to start the fake language server
4746 let _buffer = project
4747 .update(cx, |project, cx| {
4748 project.open_buffer((worktree_id, Path::new("b.rs")), cx)
4749 })
4750 .await
4751 .unwrap();
4752
4753 let mut events = subscribe(&project, cx);
4754
4755 let mut fake_server = fake_servers.next().await.unwrap();
4756 fake_server.start_progress(&progress_token).await;
4757 assert_eq!(
4758 events.next().await.unwrap(),
4759 Event::DiskBasedDiagnosticsStarted
4760 );
4761
4762 fake_server.start_progress(&progress_token).await;
4763 fake_server.end_progress(&progress_token).await;
4764 fake_server.start_progress(&progress_token).await;
4765
4766 fake_server.notify::<lsp::notification::PublishDiagnostics>(
4767 lsp::PublishDiagnosticsParams {
4768 uri: Url::from_file_path("/dir/a.rs").unwrap(),
4769 version: None,
4770 diagnostics: vec![lsp::Diagnostic {
4771 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
4772 severity: Some(lsp::DiagnosticSeverity::ERROR),
4773 message: "undefined variable 'A'".to_string(),
4774 ..Default::default()
4775 }],
4776 },
4777 );
4778 assert_eq!(
4779 events.next().await.unwrap(),
4780 Event::DiagnosticsUpdated((worktree_id, Path::new("a.rs")).into())
4781 );
4782
4783 fake_server.end_progress(&progress_token).await;
4784 fake_server.end_progress(&progress_token).await;
4785 assert_eq!(
4786 events.next().await.unwrap(),
4787 Event::DiskBasedDiagnosticsUpdated
4788 );
4789 assert_eq!(
4790 events.next().await.unwrap(),
4791 Event::DiskBasedDiagnosticsFinished
4792 );
4793
4794 let buffer = project
4795 .update(cx, |p, cx| p.open_buffer((worktree_id, "a.rs"), cx))
4796 .await
4797 .unwrap();
4798
4799 buffer.read_with(cx, |buffer, _| {
4800 let snapshot = buffer.snapshot();
4801 let diagnostics = snapshot
4802 .diagnostics_in_range::<_, Point>(0..buffer.len())
4803 .collect::<Vec<_>>();
4804 assert_eq!(
4805 diagnostics,
4806 &[DiagnosticEntry {
4807 range: Point::new(0, 9)..Point::new(0, 10),
4808 diagnostic: Diagnostic {
4809 severity: lsp::DiagnosticSeverity::ERROR,
4810 message: "undefined variable 'A'".to_string(),
4811 group_id: 0,
4812 is_primary: true,
4813 ..Default::default()
4814 }
4815 }]
4816 )
4817 });
4818 }
4819
4820 #[gpui::test]
4821 async fn test_transforming_disk_based_diagnostics(cx: &mut gpui::TestAppContext) {
4822 cx.foreground().forbid_parking();
4823
4824 let (mut lsp_config, mut fake_servers) = LanguageServerConfig::fake();
4825 lsp_config
4826 .disk_based_diagnostic_sources
4827 .insert("disk".to_string());
4828 let language = Arc::new(Language::new(
4829 LanguageConfig {
4830 name: "Rust".into(),
4831 path_suffixes: vec!["rs".to_string()],
4832 language_server: Some(lsp_config),
4833 ..Default::default()
4834 },
4835 Some(tree_sitter_rust::language()),
4836 ));
4837
4838 let text = "
4839 fn a() { A }
4840 fn b() { BB }
4841 fn c() { CCC }
4842 "
4843 .unindent();
4844
4845 let fs = FakeFs::new(cx.background());
4846 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
4847
4848 let project = Project::test(fs, cx);
4849 project.update(cx, |project, _| project.languages.add(language));
4850
4851 let worktree_id = project
4852 .update(cx, |project, cx| {
4853 project.find_or_create_local_worktree("/dir", true, cx)
4854 })
4855 .await
4856 .unwrap()
4857 .0
4858 .read_with(cx, |tree, _| tree.id());
4859
4860 let buffer = project
4861 .update(cx, |project, cx| {
4862 project.open_buffer((worktree_id, "a.rs"), cx)
4863 })
4864 .await
4865 .unwrap();
4866
4867 let mut fake_server = fake_servers.next().await.unwrap();
4868 let open_notification = fake_server
4869 .receive_notification::<lsp::notification::DidOpenTextDocument>()
4870 .await;
4871
4872 // Edit the buffer, moving the content down
4873 buffer.update(cx, |buffer, cx| buffer.edit([0..0], "\n\n", cx));
4874 let change_notification_1 = fake_server
4875 .receive_notification::<lsp::notification::DidChangeTextDocument>()
4876 .await;
4877 assert!(
4878 change_notification_1.text_document.version > open_notification.text_document.version
4879 );
4880
4881 // Report some diagnostics for the initial version of the buffer
4882 fake_server.notify::<lsp::notification::PublishDiagnostics>(
4883 lsp::PublishDiagnosticsParams {
4884 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
4885 version: Some(open_notification.text_document.version),
4886 diagnostics: vec![
4887 lsp::Diagnostic {
4888 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
4889 severity: Some(DiagnosticSeverity::ERROR),
4890 message: "undefined variable 'A'".to_string(),
4891 source: Some("disk".to_string()),
4892 ..Default::default()
4893 },
4894 lsp::Diagnostic {
4895 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
4896 severity: Some(DiagnosticSeverity::ERROR),
4897 message: "undefined variable 'BB'".to_string(),
4898 source: Some("disk".to_string()),
4899 ..Default::default()
4900 },
4901 lsp::Diagnostic {
4902 range: lsp::Range::new(lsp::Position::new(2, 9), lsp::Position::new(2, 12)),
4903 severity: Some(DiagnosticSeverity::ERROR),
4904 source: Some("disk".to_string()),
4905 message: "undefined variable 'CCC'".to_string(),
4906 ..Default::default()
4907 },
4908 ],
4909 },
4910 );
4911
4912 // The diagnostics have moved down since they were created.
4913 buffer.next_notification(cx).await;
4914 buffer.read_with(cx, |buffer, _| {
4915 assert_eq!(
4916 buffer
4917 .snapshot()
4918 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0))
4919 .collect::<Vec<_>>(),
4920 &[
4921 DiagnosticEntry {
4922 range: Point::new(3, 9)..Point::new(3, 11),
4923 diagnostic: Diagnostic {
4924 severity: DiagnosticSeverity::ERROR,
4925 message: "undefined variable 'BB'".to_string(),
4926 is_disk_based: true,
4927 group_id: 1,
4928 is_primary: true,
4929 ..Default::default()
4930 },
4931 },
4932 DiagnosticEntry {
4933 range: Point::new(4, 9)..Point::new(4, 12),
4934 diagnostic: Diagnostic {
4935 severity: DiagnosticSeverity::ERROR,
4936 message: "undefined variable 'CCC'".to_string(),
4937 is_disk_based: true,
4938 group_id: 2,
4939 is_primary: true,
4940 ..Default::default()
4941 }
4942 }
4943 ]
4944 );
4945 assert_eq!(
4946 chunks_with_diagnostics(buffer, 0..buffer.len()),
4947 [
4948 ("\n\nfn a() { ".to_string(), None),
4949 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
4950 (" }\nfn b() { ".to_string(), None),
4951 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
4952 (" }\nfn c() { ".to_string(), None),
4953 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
4954 (" }\n".to_string(), None),
4955 ]
4956 );
4957 assert_eq!(
4958 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
4959 [
4960 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
4961 (" }\nfn c() { ".to_string(), None),
4962 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
4963 ]
4964 );
4965 });
4966
4967 // Ensure overlapping diagnostics are highlighted correctly.
4968 fake_server.notify::<lsp::notification::PublishDiagnostics>(
4969 lsp::PublishDiagnosticsParams {
4970 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
4971 version: Some(open_notification.text_document.version),
4972 diagnostics: vec![
4973 lsp::Diagnostic {
4974 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
4975 severity: Some(DiagnosticSeverity::ERROR),
4976 message: "undefined variable 'A'".to_string(),
4977 source: Some("disk".to_string()),
4978 ..Default::default()
4979 },
4980 lsp::Diagnostic {
4981 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 12)),
4982 severity: Some(DiagnosticSeverity::WARNING),
4983 message: "unreachable statement".to_string(),
4984 source: Some("disk".to_string()),
4985 ..Default::default()
4986 },
4987 ],
4988 },
4989 );
4990
4991 buffer.next_notification(cx).await;
4992 buffer.read_with(cx, |buffer, _| {
4993 assert_eq!(
4994 buffer
4995 .snapshot()
4996 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0))
4997 .collect::<Vec<_>>(),
4998 &[
4999 DiagnosticEntry {
5000 range: Point::new(2, 9)..Point::new(2, 12),
5001 diagnostic: Diagnostic {
5002 severity: DiagnosticSeverity::WARNING,
5003 message: "unreachable statement".to_string(),
5004 is_disk_based: true,
5005 group_id: 1,
5006 is_primary: true,
5007 ..Default::default()
5008 }
5009 },
5010 DiagnosticEntry {
5011 range: Point::new(2, 9)..Point::new(2, 10),
5012 diagnostic: Diagnostic {
5013 severity: DiagnosticSeverity::ERROR,
5014 message: "undefined variable 'A'".to_string(),
5015 is_disk_based: true,
5016 group_id: 0,
5017 is_primary: true,
5018 ..Default::default()
5019 },
5020 }
5021 ]
5022 );
5023 assert_eq!(
5024 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
5025 [
5026 ("fn a() { ".to_string(), None),
5027 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
5028 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
5029 ("\n".to_string(), None),
5030 ]
5031 );
5032 assert_eq!(
5033 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
5034 [
5035 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
5036 ("\n".to_string(), None),
5037 ]
5038 );
5039 });
5040
5041 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
5042 // changes since the last save.
5043 buffer.update(cx, |buffer, cx| {
5044 buffer.edit(Some(Point::new(2, 0)..Point::new(2, 0)), " ", cx);
5045 buffer.edit(Some(Point::new(2, 8)..Point::new(2, 10)), "(x: usize)", cx);
5046 });
5047 let change_notification_2 =
5048 fake_server.receive_notification::<lsp::notification::DidChangeTextDocument>();
5049 assert!(
5050 change_notification_2.await.text_document.version
5051 > change_notification_1.text_document.version
5052 );
5053
5054 // Handle out-of-order diagnostics
5055 fake_server.notify::<lsp::notification::PublishDiagnostics>(
5056 lsp::PublishDiagnosticsParams {
5057 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
5058 version: Some(open_notification.text_document.version),
5059 diagnostics: vec![
5060 lsp::Diagnostic {
5061 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
5062 severity: Some(DiagnosticSeverity::ERROR),
5063 message: "undefined variable 'BB'".to_string(),
5064 source: Some("disk".to_string()),
5065 ..Default::default()
5066 },
5067 lsp::Diagnostic {
5068 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
5069 severity: Some(DiagnosticSeverity::WARNING),
5070 message: "undefined variable 'A'".to_string(),
5071 source: Some("disk".to_string()),
5072 ..Default::default()
5073 },
5074 ],
5075 },
5076 );
5077
5078 buffer.next_notification(cx).await;
5079 buffer.read_with(cx, |buffer, _| {
5080 assert_eq!(
5081 buffer
5082 .snapshot()
5083 .diagnostics_in_range::<_, Point>(0..buffer.len())
5084 .collect::<Vec<_>>(),
5085 &[
5086 DiagnosticEntry {
5087 range: Point::new(2, 21)..Point::new(2, 22),
5088 diagnostic: Diagnostic {
5089 severity: DiagnosticSeverity::WARNING,
5090 message: "undefined variable 'A'".to_string(),
5091 is_disk_based: true,
5092 group_id: 1,
5093 is_primary: true,
5094 ..Default::default()
5095 }
5096 },
5097 DiagnosticEntry {
5098 range: Point::new(3, 9)..Point::new(3, 11),
5099 diagnostic: Diagnostic {
5100 severity: DiagnosticSeverity::ERROR,
5101 message: "undefined variable 'BB'".to_string(),
5102 is_disk_based: true,
5103 group_id: 0,
5104 is_primary: true,
5105 ..Default::default()
5106 },
5107 }
5108 ]
5109 );
5110 });
5111 }
5112
5113 #[gpui::test]
5114 async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
5115 cx.foreground().forbid_parking();
5116
5117 let text = concat!(
5118 "let one = ;\n", //
5119 "let two = \n",
5120 "let three = 3;\n",
5121 );
5122
5123 let fs = FakeFs::new(cx.background());
5124 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
5125
5126 let project = Project::test(fs, cx);
5127 let worktree_id = project
5128 .update(cx, |project, cx| {
5129 project.find_or_create_local_worktree("/dir", true, cx)
5130 })
5131 .await
5132 .unwrap()
5133 .0
5134 .read_with(cx, |tree, _| tree.id());
5135
5136 let buffer = project
5137 .update(cx, |project, cx| {
5138 project.open_buffer((worktree_id, "a.rs"), cx)
5139 })
5140 .await
5141 .unwrap();
5142
5143 project.update(cx, |project, cx| {
5144 project
5145 .update_buffer_diagnostics(
5146 &buffer,
5147 vec![
5148 DiagnosticEntry {
5149 range: PointUtf16::new(0, 10)..PointUtf16::new(0, 10),
5150 diagnostic: Diagnostic {
5151 severity: DiagnosticSeverity::ERROR,
5152 message: "syntax error 1".to_string(),
5153 ..Default::default()
5154 },
5155 },
5156 DiagnosticEntry {
5157 range: PointUtf16::new(1, 10)..PointUtf16::new(1, 10),
5158 diagnostic: Diagnostic {
5159 severity: DiagnosticSeverity::ERROR,
5160 message: "syntax error 2".to_string(),
5161 ..Default::default()
5162 },
5163 },
5164 ],
5165 None,
5166 cx,
5167 )
5168 .unwrap();
5169 });
5170
5171 // An empty range is extended forward to include the following character.
5172 // At the end of a line, an empty range is extended backward to include
5173 // the preceding character.
5174 buffer.read_with(cx, |buffer, _| {
5175 let chunks = chunks_with_diagnostics(&buffer, 0..buffer.len());
5176 assert_eq!(
5177 chunks
5178 .iter()
5179 .map(|(s, d)| (s.as_str(), *d))
5180 .collect::<Vec<_>>(),
5181 &[
5182 ("let one = ", None),
5183 (";", Some(DiagnosticSeverity::ERROR)),
5184 ("\nlet two =", None),
5185 (" ", Some(DiagnosticSeverity::ERROR)),
5186 ("\nlet three = 3;\n", None)
5187 ]
5188 );
5189 });
5190 }
5191
5192 #[gpui::test]
5193 async fn test_edits_from_lsp_with_past_version(cx: &mut gpui::TestAppContext) {
5194 cx.foreground().forbid_parking();
5195
5196 let (lsp_config, mut fake_servers) = LanguageServerConfig::fake();
5197 let language = Arc::new(Language::new(
5198 LanguageConfig {
5199 name: "Rust".into(),
5200 path_suffixes: vec!["rs".to_string()],
5201 language_server: Some(lsp_config),
5202 ..Default::default()
5203 },
5204 Some(tree_sitter_rust::language()),
5205 ));
5206
5207 let text = "
5208 fn a() {
5209 f1();
5210 }
5211 fn b() {
5212 f2();
5213 }
5214 fn c() {
5215 f3();
5216 }
5217 "
5218 .unindent();
5219
5220 let fs = FakeFs::new(cx.background());
5221 fs.insert_tree(
5222 "/dir",
5223 json!({
5224 "a.rs": text.clone(),
5225 }),
5226 )
5227 .await;
5228
5229 let project = Project::test(fs, cx);
5230 project.update(cx, |project, _| project.languages.add(language));
5231
5232 let worktree_id = project
5233 .update(cx, |project, cx| {
5234 project.find_or_create_local_worktree("/dir", true, cx)
5235 })
5236 .await
5237 .unwrap()
5238 .0
5239 .read_with(cx, |tree, _| tree.id());
5240
5241 let buffer = project
5242 .update(cx, |project, cx| {
5243 project.open_buffer((worktree_id, "a.rs"), cx)
5244 })
5245 .await
5246 .unwrap();
5247
5248 let mut fake_server = fake_servers.next().await.unwrap();
5249 let lsp_document_version = fake_server
5250 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5251 .await
5252 .text_document
5253 .version;
5254
5255 // Simulate editing the buffer after the language server computes some edits.
5256 buffer.update(cx, |buffer, cx| {
5257 buffer.edit(
5258 [Point::new(0, 0)..Point::new(0, 0)],
5259 "// above first function\n",
5260 cx,
5261 );
5262 buffer.edit(
5263 [Point::new(2, 0)..Point::new(2, 0)],
5264 " // inside first function\n",
5265 cx,
5266 );
5267 buffer.edit(
5268 [Point::new(6, 4)..Point::new(6, 4)],
5269 "// inside second function ",
5270 cx,
5271 );
5272
5273 assert_eq!(
5274 buffer.text(),
5275 "
5276 // above first function
5277 fn a() {
5278 // inside first function
5279 f1();
5280 }
5281 fn b() {
5282 // inside second function f2();
5283 }
5284 fn c() {
5285 f3();
5286 }
5287 "
5288 .unindent()
5289 );
5290 });
5291
5292 let edits = project
5293 .update(cx, |project, cx| {
5294 project.edits_from_lsp(
5295 &buffer,
5296 vec![
5297 // replace body of first function
5298 lsp::TextEdit {
5299 range: lsp::Range::new(
5300 lsp::Position::new(0, 0),
5301 lsp::Position::new(3, 0),
5302 ),
5303 new_text: "
5304 fn a() {
5305 f10();
5306 }
5307 "
5308 .unindent(),
5309 },
5310 // edit inside second function
5311 lsp::TextEdit {
5312 range: lsp::Range::new(
5313 lsp::Position::new(4, 6),
5314 lsp::Position::new(4, 6),
5315 ),
5316 new_text: "00".into(),
5317 },
5318 // edit inside third function via two distinct edits
5319 lsp::TextEdit {
5320 range: lsp::Range::new(
5321 lsp::Position::new(7, 5),
5322 lsp::Position::new(7, 5),
5323 ),
5324 new_text: "4000".into(),
5325 },
5326 lsp::TextEdit {
5327 range: lsp::Range::new(
5328 lsp::Position::new(7, 5),
5329 lsp::Position::new(7, 6),
5330 ),
5331 new_text: "".into(),
5332 },
5333 ],
5334 Some(lsp_document_version),
5335 cx,
5336 )
5337 })
5338 .await
5339 .unwrap();
5340
5341 buffer.update(cx, |buffer, cx| {
5342 for (range, new_text) in edits {
5343 buffer.edit([range], new_text, cx);
5344 }
5345 assert_eq!(
5346 buffer.text(),
5347 "
5348 // above first function
5349 fn a() {
5350 // inside first function
5351 f10();
5352 }
5353 fn b() {
5354 // inside second function f200();
5355 }
5356 fn c() {
5357 f4000();
5358 }
5359 "
5360 .unindent()
5361 );
5362 });
5363 }
5364
5365 #[gpui::test]
5366 async fn test_edits_from_lsp_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
5367 cx.foreground().forbid_parking();
5368
5369 let text = "
5370 use a::b;
5371 use a::c;
5372
5373 fn f() {
5374 b();
5375 c();
5376 }
5377 "
5378 .unindent();
5379
5380 let fs = FakeFs::new(cx.background());
5381 fs.insert_tree(
5382 "/dir",
5383 json!({
5384 "a.rs": text.clone(),
5385 }),
5386 )
5387 .await;
5388
5389 let project = Project::test(fs, cx);
5390 let worktree_id = project
5391 .update(cx, |project, cx| {
5392 project.find_or_create_local_worktree("/dir", true, cx)
5393 })
5394 .await
5395 .unwrap()
5396 .0
5397 .read_with(cx, |tree, _| tree.id());
5398
5399 let buffer = project
5400 .update(cx, |project, cx| {
5401 project.open_buffer((worktree_id, "a.rs"), cx)
5402 })
5403 .await
5404 .unwrap();
5405
5406 // Simulate the language server sending us a small edit in the form of a very large diff.
5407 // Rust-analyzer does this when performing a merge-imports code action.
5408 let edits = project
5409 .update(cx, |project, cx| {
5410 project.edits_from_lsp(
5411 &buffer,
5412 [
5413 // Replace the first use statement without editing the semicolon.
5414 lsp::TextEdit {
5415 range: lsp::Range::new(
5416 lsp::Position::new(0, 4),
5417 lsp::Position::new(0, 8),
5418 ),
5419 new_text: "a::{b, c}".into(),
5420 },
5421 // Reinsert the remainder of the file between the semicolon and the final
5422 // newline of the file.
5423 lsp::TextEdit {
5424 range: lsp::Range::new(
5425 lsp::Position::new(0, 9),
5426 lsp::Position::new(0, 9),
5427 ),
5428 new_text: "\n\n".into(),
5429 },
5430 lsp::TextEdit {
5431 range: lsp::Range::new(
5432 lsp::Position::new(0, 9),
5433 lsp::Position::new(0, 9),
5434 ),
5435 new_text: "
5436 fn f() {
5437 b();
5438 c();
5439 }"
5440 .unindent(),
5441 },
5442 // Delete everything after the first newline of the file.
5443 lsp::TextEdit {
5444 range: lsp::Range::new(
5445 lsp::Position::new(1, 0),
5446 lsp::Position::new(7, 0),
5447 ),
5448 new_text: "".into(),
5449 },
5450 ],
5451 None,
5452 cx,
5453 )
5454 })
5455 .await
5456 .unwrap();
5457
5458 buffer.update(cx, |buffer, cx| {
5459 let edits = edits
5460 .into_iter()
5461 .map(|(range, text)| {
5462 (
5463 range.start.to_point(&buffer)..range.end.to_point(&buffer),
5464 text,
5465 )
5466 })
5467 .collect::<Vec<_>>();
5468
5469 assert_eq!(
5470 edits,
5471 [
5472 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
5473 (Point::new(1, 0)..Point::new(2, 0), "".into())
5474 ]
5475 );
5476
5477 for (range, new_text) in edits {
5478 buffer.edit([range], new_text, cx);
5479 }
5480 assert_eq!(
5481 buffer.text(),
5482 "
5483 use a::{b, c};
5484
5485 fn f() {
5486 b();
5487 c();
5488 }
5489 "
5490 .unindent()
5491 );
5492 });
5493 }
5494
5495 fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
5496 buffer: &Buffer,
5497 range: Range<T>,
5498 ) -> Vec<(String, Option<DiagnosticSeverity>)> {
5499 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
5500 for chunk in buffer.snapshot().chunks(range, true) {
5501 if chunks
5502 .last()
5503 .map_or(false, |prev_chunk| prev_chunk.1 == chunk.diagnostic)
5504 {
5505 chunks.last_mut().unwrap().0.push_str(chunk.text);
5506 } else {
5507 chunks.push((chunk.text.to_string(), chunk.diagnostic));
5508 }
5509 }
5510 chunks
5511 }
5512
5513 #[gpui::test]
5514 async fn test_search_worktree_without_files(cx: &mut gpui::TestAppContext) {
5515 let dir = temp_tree(json!({
5516 "root": {
5517 "dir1": {},
5518 "dir2": {
5519 "dir3": {}
5520 }
5521 }
5522 }));
5523
5524 let project = Project::test(Arc::new(RealFs), cx);
5525 let (tree, _) = project
5526 .update(cx, |project, cx| {
5527 project.find_or_create_local_worktree(&dir.path(), true, cx)
5528 })
5529 .await
5530 .unwrap();
5531
5532 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
5533 .await;
5534
5535 let cancel_flag = Default::default();
5536 let results = project
5537 .read_with(cx, |project, cx| {
5538 project.match_paths("dir", false, false, 10, &cancel_flag, cx)
5539 })
5540 .await;
5541
5542 assert!(results.is_empty());
5543 }
5544
5545 #[gpui::test]
5546 async fn test_definition(cx: &mut gpui::TestAppContext) {
5547 let (language_server_config, mut fake_servers) = LanguageServerConfig::fake();
5548 let language = Arc::new(Language::new(
5549 LanguageConfig {
5550 name: "Rust".into(),
5551 path_suffixes: vec!["rs".to_string()],
5552 language_server: Some(language_server_config),
5553 ..Default::default()
5554 },
5555 Some(tree_sitter_rust::language()),
5556 ));
5557
5558 let fs = FakeFs::new(cx.background());
5559 fs.insert_tree(
5560 "/dir",
5561 json!({
5562 "a.rs": "const fn a() { A }",
5563 "b.rs": "const y: i32 = crate::a()",
5564 }),
5565 )
5566 .await;
5567
5568 let project = Project::test(fs, cx);
5569 project.update(cx, |project, _| {
5570 Arc::get_mut(&mut project.languages).unwrap().add(language);
5571 });
5572
5573 let (tree, _) = project
5574 .update(cx, |project, cx| {
5575 project.find_or_create_local_worktree("/dir/b.rs", true, cx)
5576 })
5577 .await
5578 .unwrap();
5579 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
5580 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
5581 .await;
5582
5583 let buffer = project
5584 .update(cx, |project, cx| {
5585 project.open_buffer(
5586 ProjectPath {
5587 worktree_id,
5588 path: Path::new("").into(),
5589 },
5590 cx,
5591 )
5592 })
5593 .await
5594 .unwrap();
5595
5596 let mut fake_server = fake_servers.next().await.unwrap();
5597 fake_server.handle_request::<lsp::request::GotoDefinition, _>(move |params, _| {
5598 let params = params.text_document_position_params;
5599 assert_eq!(
5600 params.text_document.uri.to_file_path().unwrap(),
5601 Path::new("/dir/b.rs"),
5602 );
5603 assert_eq!(params.position, lsp::Position::new(0, 22));
5604
5605 Some(lsp::GotoDefinitionResponse::Scalar(lsp::Location::new(
5606 lsp::Url::from_file_path("/dir/a.rs").unwrap(),
5607 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
5608 )))
5609 });
5610
5611 let mut definitions = project
5612 .update(cx, |project, cx| project.definition(&buffer, 22, cx))
5613 .await
5614 .unwrap();
5615
5616 assert_eq!(definitions.len(), 1);
5617 let definition = definitions.pop().unwrap();
5618 cx.update(|cx| {
5619 let target_buffer = definition.buffer.read(cx);
5620 assert_eq!(
5621 target_buffer
5622 .file()
5623 .unwrap()
5624 .as_local()
5625 .unwrap()
5626 .abs_path(cx),
5627 Path::new("/dir/a.rs"),
5628 );
5629 assert_eq!(definition.range.to_offset(target_buffer), 9..10);
5630 assert_eq!(
5631 list_worktrees(&project, cx),
5632 [("/dir/b.rs".as_ref(), true), ("/dir/a.rs".as_ref(), false)]
5633 );
5634
5635 drop(definition);
5636 });
5637 cx.read(|cx| {
5638 assert_eq!(list_worktrees(&project, cx), [("/dir/b.rs".as_ref(), true)]);
5639 });
5640
5641 fn list_worktrees<'a>(
5642 project: &'a ModelHandle<Project>,
5643 cx: &'a AppContext,
5644 ) -> Vec<(&'a Path, bool)> {
5645 project
5646 .read(cx)
5647 .worktrees(cx)
5648 .map(|worktree| {
5649 let worktree = worktree.read(cx);
5650 (
5651 worktree.as_local().unwrap().abs_path().as_ref(),
5652 worktree.is_visible(),
5653 )
5654 })
5655 .collect::<Vec<_>>()
5656 }
5657 }
5658
5659 #[gpui::test]
5660 async fn test_save_file(cx: &mut gpui::TestAppContext) {
5661 let fs = FakeFs::new(cx.background());
5662 fs.insert_tree(
5663 "/dir",
5664 json!({
5665 "file1": "the old contents",
5666 }),
5667 )
5668 .await;
5669
5670 let project = Project::test(fs.clone(), cx);
5671 let worktree_id = project
5672 .update(cx, |p, cx| {
5673 p.find_or_create_local_worktree("/dir", true, cx)
5674 })
5675 .await
5676 .unwrap()
5677 .0
5678 .read_with(cx, |tree, _| tree.id());
5679
5680 let buffer = project
5681 .update(cx, |p, cx| p.open_buffer((worktree_id, "file1"), cx))
5682 .await
5683 .unwrap();
5684 buffer
5685 .update(cx, |buffer, cx| {
5686 assert_eq!(buffer.text(), "the old contents");
5687 buffer.edit(Some(0..0), "a line of text.\n".repeat(10 * 1024), cx);
5688 buffer.save(cx)
5689 })
5690 .await
5691 .unwrap();
5692
5693 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
5694 assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
5695 }
5696
5697 #[gpui::test]
5698 async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
5699 let fs = FakeFs::new(cx.background());
5700 fs.insert_tree(
5701 "/dir",
5702 json!({
5703 "file1": "the old contents",
5704 }),
5705 )
5706 .await;
5707
5708 let project = Project::test(fs.clone(), cx);
5709 let worktree_id = project
5710 .update(cx, |p, cx| {
5711 p.find_or_create_local_worktree("/dir/file1", true, cx)
5712 })
5713 .await
5714 .unwrap()
5715 .0
5716 .read_with(cx, |tree, _| tree.id());
5717
5718 let buffer = project
5719 .update(cx, |p, cx| p.open_buffer((worktree_id, ""), cx))
5720 .await
5721 .unwrap();
5722 buffer
5723 .update(cx, |buffer, cx| {
5724 buffer.edit(Some(0..0), "a line of text.\n".repeat(10 * 1024), cx);
5725 buffer.save(cx)
5726 })
5727 .await
5728 .unwrap();
5729
5730 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
5731 assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
5732 }
5733
5734 #[gpui::test]
5735 async fn test_save_as(cx: &mut gpui::TestAppContext) {
5736 let fs = FakeFs::new(cx.background());
5737 fs.insert_tree("/dir", json!({})).await;
5738
5739 let project = Project::test(fs.clone(), cx);
5740 let (worktree, _) = project
5741 .update(cx, |project, cx| {
5742 project.find_or_create_local_worktree("/dir", true, cx)
5743 })
5744 .await
5745 .unwrap();
5746 let worktree_id = worktree.read_with(cx, |worktree, _| worktree.id());
5747
5748 let buffer = project.update(cx, |project, cx| project.create_buffer(cx).unwrap());
5749 buffer.update(cx, |buffer, cx| {
5750 buffer.edit([0..0], "abc", cx);
5751 assert!(buffer.is_dirty());
5752 assert!(!buffer.has_conflict());
5753 });
5754 project
5755 .update(cx, |project, cx| {
5756 project.save_buffer_as(buffer.clone(), "/dir/file1".into(), cx)
5757 })
5758 .await
5759 .unwrap();
5760 assert_eq!(fs.load(Path::new("/dir/file1")).await.unwrap(), "abc");
5761 buffer.read_with(cx, |buffer, cx| {
5762 assert_eq!(buffer.file().unwrap().full_path(cx), Path::new("dir/file1"));
5763 assert!(!buffer.is_dirty());
5764 assert!(!buffer.has_conflict());
5765 });
5766
5767 let opened_buffer = project
5768 .update(cx, |project, cx| {
5769 project.open_buffer((worktree_id, "file1"), cx)
5770 })
5771 .await
5772 .unwrap();
5773 assert_eq!(opened_buffer, buffer);
5774 }
5775
5776 #[gpui::test(retries = 5)]
5777 async fn test_rescan_and_remote_updates(cx: &mut gpui::TestAppContext) {
5778 let dir = temp_tree(json!({
5779 "a": {
5780 "file1": "",
5781 "file2": "",
5782 "file3": "",
5783 },
5784 "b": {
5785 "c": {
5786 "file4": "",
5787 "file5": "",
5788 }
5789 }
5790 }));
5791
5792 let project = Project::test(Arc::new(RealFs), cx);
5793 let rpc = project.read_with(cx, |p, _| p.client.clone());
5794
5795 let (tree, _) = project
5796 .update(cx, |p, cx| {
5797 p.find_or_create_local_worktree(dir.path(), true, cx)
5798 })
5799 .await
5800 .unwrap();
5801 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
5802
5803 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
5804 let buffer = project.update(cx, |p, cx| p.open_buffer((worktree_id, path), cx));
5805 async move { buffer.await.unwrap() }
5806 };
5807 let id_for_path = |path: &'static str, cx: &gpui::TestAppContext| {
5808 tree.read_with(cx, |tree, _| {
5809 tree.entry_for_path(path)
5810 .expect(&format!("no entry for path {}", path))
5811 .id
5812 })
5813 };
5814
5815 let buffer2 = buffer_for_path("a/file2", cx).await;
5816 let buffer3 = buffer_for_path("a/file3", cx).await;
5817 let buffer4 = buffer_for_path("b/c/file4", cx).await;
5818 let buffer5 = buffer_for_path("b/c/file5", cx).await;
5819
5820 let file2_id = id_for_path("a/file2", &cx);
5821 let file3_id = id_for_path("a/file3", &cx);
5822 let file4_id = id_for_path("b/c/file4", &cx);
5823
5824 // Wait for the initial scan.
5825 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
5826 .await;
5827
5828 // Create a remote copy of this worktree.
5829 let initial_snapshot = tree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot());
5830 let (remote, load_task) = cx.update(|cx| {
5831 Worktree::remote(
5832 1,
5833 1,
5834 initial_snapshot.to_proto(&Default::default(), true),
5835 rpc.clone(),
5836 cx,
5837 )
5838 });
5839 load_task.await;
5840
5841 cx.read(|cx| {
5842 assert!(!buffer2.read(cx).is_dirty());
5843 assert!(!buffer3.read(cx).is_dirty());
5844 assert!(!buffer4.read(cx).is_dirty());
5845 assert!(!buffer5.read(cx).is_dirty());
5846 });
5847
5848 // Rename and delete files and directories.
5849 tree.flush_fs_events(&cx).await;
5850 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
5851 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
5852 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
5853 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
5854 tree.flush_fs_events(&cx).await;
5855
5856 let expected_paths = vec![
5857 "a",
5858 "a/file1",
5859 "a/file2.new",
5860 "b",
5861 "d",
5862 "d/file3",
5863 "d/file4",
5864 ];
5865
5866 cx.read(|app| {
5867 assert_eq!(
5868 tree.read(app)
5869 .paths()
5870 .map(|p| p.to_str().unwrap())
5871 .collect::<Vec<_>>(),
5872 expected_paths
5873 );
5874
5875 assert_eq!(id_for_path("a/file2.new", &cx), file2_id);
5876 assert_eq!(id_for_path("d/file3", &cx), file3_id);
5877 assert_eq!(id_for_path("d/file4", &cx), file4_id);
5878
5879 assert_eq!(
5880 buffer2.read(app).file().unwrap().path().as_ref(),
5881 Path::new("a/file2.new")
5882 );
5883 assert_eq!(
5884 buffer3.read(app).file().unwrap().path().as_ref(),
5885 Path::new("d/file3")
5886 );
5887 assert_eq!(
5888 buffer4.read(app).file().unwrap().path().as_ref(),
5889 Path::new("d/file4")
5890 );
5891 assert_eq!(
5892 buffer5.read(app).file().unwrap().path().as_ref(),
5893 Path::new("b/c/file5")
5894 );
5895
5896 assert!(!buffer2.read(app).file().unwrap().is_deleted());
5897 assert!(!buffer3.read(app).file().unwrap().is_deleted());
5898 assert!(!buffer4.read(app).file().unwrap().is_deleted());
5899 assert!(buffer5.read(app).file().unwrap().is_deleted());
5900 });
5901
5902 // Update the remote worktree. Check that it becomes consistent with the
5903 // local worktree.
5904 remote.update(cx, |remote, cx| {
5905 let update_message = tree.read(cx).as_local().unwrap().snapshot().build_update(
5906 &initial_snapshot,
5907 1,
5908 1,
5909 true,
5910 );
5911 remote
5912 .as_remote_mut()
5913 .unwrap()
5914 .snapshot
5915 .apply_remote_update(update_message)
5916 .unwrap();
5917
5918 assert_eq!(
5919 remote
5920 .paths()
5921 .map(|p| p.to_str().unwrap())
5922 .collect::<Vec<_>>(),
5923 expected_paths
5924 );
5925 });
5926 }
5927
5928 #[gpui::test]
5929 async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
5930 let fs = FakeFs::new(cx.background());
5931 fs.insert_tree(
5932 "/the-dir",
5933 json!({
5934 "a.txt": "a-contents",
5935 "b.txt": "b-contents",
5936 }),
5937 )
5938 .await;
5939
5940 let project = Project::test(fs.clone(), cx);
5941 let worktree_id = project
5942 .update(cx, |p, cx| {
5943 p.find_or_create_local_worktree("/the-dir", true, cx)
5944 })
5945 .await
5946 .unwrap()
5947 .0
5948 .read_with(cx, |tree, _| tree.id());
5949
5950 // Spawn multiple tasks to open paths, repeating some paths.
5951 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
5952 (
5953 p.open_buffer((worktree_id, "a.txt"), cx),
5954 p.open_buffer((worktree_id, "b.txt"), cx),
5955 p.open_buffer((worktree_id, "a.txt"), cx),
5956 )
5957 });
5958
5959 let buffer_a_1 = buffer_a_1.await.unwrap();
5960 let buffer_a_2 = buffer_a_2.await.unwrap();
5961 let buffer_b = buffer_b.await.unwrap();
5962 assert_eq!(buffer_a_1.read_with(cx, |b, _| b.text()), "a-contents");
5963 assert_eq!(buffer_b.read_with(cx, |b, _| b.text()), "b-contents");
5964
5965 // There is only one buffer per path.
5966 let buffer_a_id = buffer_a_1.id();
5967 assert_eq!(buffer_a_2.id(), buffer_a_id);
5968
5969 // Open the same path again while it is still open.
5970 drop(buffer_a_1);
5971 let buffer_a_3 = project
5972 .update(cx, |p, cx| p.open_buffer((worktree_id, "a.txt"), cx))
5973 .await
5974 .unwrap();
5975
5976 // There's still only one buffer per path.
5977 assert_eq!(buffer_a_3.id(), buffer_a_id);
5978 }
5979
5980 #[gpui::test]
5981 async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
5982 use std::fs;
5983
5984 let dir = temp_tree(json!({
5985 "file1": "abc",
5986 "file2": "def",
5987 "file3": "ghi",
5988 }));
5989
5990 let project = Project::test(Arc::new(RealFs), cx);
5991 let (worktree, _) = project
5992 .update(cx, |p, cx| {
5993 p.find_or_create_local_worktree(dir.path(), true, cx)
5994 })
5995 .await
5996 .unwrap();
5997 let worktree_id = worktree.read_with(cx, |worktree, _| worktree.id());
5998
5999 worktree.flush_fs_events(&cx).await;
6000 worktree
6001 .read_with(cx, |t, _| t.as_local().unwrap().scan_complete())
6002 .await;
6003
6004 let buffer1 = project
6005 .update(cx, |p, cx| p.open_buffer((worktree_id, "file1"), cx))
6006 .await
6007 .unwrap();
6008 let events = Rc::new(RefCell::new(Vec::new()));
6009
6010 // initially, the buffer isn't dirty.
6011 buffer1.update(cx, |buffer, cx| {
6012 cx.subscribe(&buffer1, {
6013 let events = events.clone();
6014 move |_, _, event, _| match event {
6015 BufferEvent::Operation(_) => {}
6016 _ => events.borrow_mut().push(event.clone()),
6017 }
6018 })
6019 .detach();
6020
6021 assert!(!buffer.is_dirty());
6022 assert!(events.borrow().is_empty());
6023
6024 buffer.edit(vec![1..2], "", cx);
6025 });
6026
6027 // after the first edit, the buffer is dirty, and emits a dirtied event.
6028 buffer1.update(cx, |buffer, cx| {
6029 assert!(buffer.text() == "ac");
6030 assert!(buffer.is_dirty());
6031 assert_eq!(
6032 *events.borrow(),
6033 &[language::Event::Edited, language::Event::Dirtied]
6034 );
6035 events.borrow_mut().clear();
6036 buffer.did_save(buffer.version(), buffer.file().unwrap().mtime(), None, cx);
6037 });
6038
6039 // after saving, the buffer is not dirty, and emits a saved event.
6040 buffer1.update(cx, |buffer, cx| {
6041 assert!(!buffer.is_dirty());
6042 assert_eq!(*events.borrow(), &[language::Event::Saved]);
6043 events.borrow_mut().clear();
6044
6045 buffer.edit(vec![1..1], "B", cx);
6046 buffer.edit(vec![2..2], "D", cx);
6047 });
6048
6049 // after editing again, the buffer is dirty, and emits another dirty event.
6050 buffer1.update(cx, |buffer, cx| {
6051 assert!(buffer.text() == "aBDc");
6052 assert!(buffer.is_dirty());
6053 assert_eq!(
6054 *events.borrow(),
6055 &[
6056 language::Event::Edited,
6057 language::Event::Dirtied,
6058 language::Event::Edited,
6059 ],
6060 );
6061 events.borrow_mut().clear();
6062
6063 // TODO - currently, after restoring the buffer to its
6064 // previously-saved state, the is still considered dirty.
6065 buffer.edit([1..3], "", cx);
6066 assert!(buffer.text() == "ac");
6067 assert!(buffer.is_dirty());
6068 });
6069
6070 assert_eq!(*events.borrow(), &[language::Event::Edited]);
6071
6072 // When a file is deleted, the buffer is considered dirty.
6073 let events = Rc::new(RefCell::new(Vec::new()));
6074 let buffer2 = project
6075 .update(cx, |p, cx| p.open_buffer((worktree_id, "file2"), cx))
6076 .await
6077 .unwrap();
6078 buffer2.update(cx, |_, cx| {
6079 cx.subscribe(&buffer2, {
6080 let events = events.clone();
6081 move |_, _, event, _| events.borrow_mut().push(event.clone())
6082 })
6083 .detach();
6084 });
6085
6086 fs::remove_file(dir.path().join("file2")).unwrap();
6087 buffer2.condition(&cx, |b, _| b.is_dirty()).await;
6088 assert_eq!(
6089 *events.borrow(),
6090 &[language::Event::Dirtied, language::Event::FileHandleChanged]
6091 );
6092
6093 // When a file is already dirty when deleted, we don't emit a Dirtied event.
6094 let events = Rc::new(RefCell::new(Vec::new()));
6095 let buffer3 = project
6096 .update(cx, |p, cx| p.open_buffer((worktree_id, "file3"), cx))
6097 .await
6098 .unwrap();
6099 buffer3.update(cx, |_, cx| {
6100 cx.subscribe(&buffer3, {
6101 let events = events.clone();
6102 move |_, _, event, _| events.borrow_mut().push(event.clone())
6103 })
6104 .detach();
6105 });
6106
6107 worktree.flush_fs_events(&cx).await;
6108 buffer3.update(cx, |buffer, cx| {
6109 buffer.edit(Some(0..0), "x", cx);
6110 });
6111 events.borrow_mut().clear();
6112 fs::remove_file(dir.path().join("file3")).unwrap();
6113 buffer3
6114 .condition(&cx, |_, _| !events.borrow().is_empty())
6115 .await;
6116 assert_eq!(*events.borrow(), &[language::Event::FileHandleChanged]);
6117 cx.read(|cx| assert!(buffer3.read(cx).is_dirty()));
6118 }
6119
6120 #[gpui::test]
6121 async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
6122 use std::fs;
6123
6124 let initial_contents = "aaa\nbbbbb\nc\n";
6125 let dir = temp_tree(json!({ "the-file": initial_contents }));
6126
6127 let project = Project::test(Arc::new(RealFs), cx);
6128 let (worktree, _) = project
6129 .update(cx, |p, cx| {
6130 p.find_or_create_local_worktree(dir.path(), true, cx)
6131 })
6132 .await
6133 .unwrap();
6134 let worktree_id = worktree.read_with(cx, |tree, _| tree.id());
6135
6136 worktree
6137 .read_with(cx, |t, _| t.as_local().unwrap().scan_complete())
6138 .await;
6139
6140 let abs_path = dir.path().join("the-file");
6141 let buffer = project
6142 .update(cx, |p, cx| p.open_buffer((worktree_id, "the-file"), cx))
6143 .await
6144 .unwrap();
6145
6146 // TODO
6147 // Add a cursor on each row.
6148 // let selection_set_id = buffer.update(&mut cx, |buffer, cx| {
6149 // assert!(!buffer.is_dirty());
6150 // buffer.add_selection_set(
6151 // &(0..3)
6152 // .map(|row| Selection {
6153 // id: row as usize,
6154 // start: Point::new(row, 1),
6155 // end: Point::new(row, 1),
6156 // reversed: false,
6157 // goal: SelectionGoal::None,
6158 // })
6159 // .collect::<Vec<_>>(),
6160 // cx,
6161 // )
6162 // });
6163
6164 // Change the file on disk, adding two new lines of text, and removing
6165 // one line.
6166 buffer.read_with(cx, |buffer, _| {
6167 assert!(!buffer.is_dirty());
6168 assert!(!buffer.has_conflict());
6169 });
6170 let new_contents = "AAAA\naaa\nBB\nbbbbb\n";
6171 fs::write(&abs_path, new_contents).unwrap();
6172
6173 // Because the buffer was not modified, it is reloaded from disk. Its
6174 // contents are edited according to the diff between the old and new
6175 // file contents.
6176 buffer
6177 .condition(&cx, |buffer, _| buffer.text() == new_contents)
6178 .await;
6179
6180 buffer.update(cx, |buffer, _| {
6181 assert_eq!(buffer.text(), new_contents);
6182 assert!(!buffer.is_dirty());
6183 assert!(!buffer.has_conflict());
6184
6185 // TODO
6186 // let cursor_positions = buffer
6187 // .selection_set(selection_set_id)
6188 // .unwrap()
6189 // .selections::<Point>(&*buffer)
6190 // .map(|selection| {
6191 // assert_eq!(selection.start, selection.end);
6192 // selection.start
6193 // })
6194 // .collect::<Vec<_>>();
6195 // assert_eq!(
6196 // cursor_positions,
6197 // [Point::new(1, 1), Point::new(3, 1), Point::new(4, 0)]
6198 // );
6199 });
6200
6201 // Modify the buffer
6202 buffer.update(cx, |buffer, cx| {
6203 buffer.edit(vec![0..0], " ", cx);
6204 assert!(buffer.is_dirty());
6205 assert!(!buffer.has_conflict());
6206 });
6207
6208 // Change the file on disk again, adding blank lines to the beginning.
6209 fs::write(&abs_path, "\n\n\nAAAA\naaa\nBB\nbbbbb\n").unwrap();
6210
6211 // Because the buffer is modified, it doesn't reload from disk, but is
6212 // marked as having a conflict.
6213 buffer
6214 .condition(&cx, |buffer, _| buffer.has_conflict())
6215 .await;
6216 }
6217
6218 #[gpui::test]
6219 async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
6220 cx.foreground().forbid_parking();
6221
6222 let fs = FakeFs::new(cx.background());
6223 fs.insert_tree(
6224 "/the-dir",
6225 json!({
6226 "a.rs": "
6227 fn foo(mut v: Vec<usize>) {
6228 for x in &v {
6229 v.push(1);
6230 }
6231 }
6232 "
6233 .unindent(),
6234 }),
6235 )
6236 .await;
6237
6238 let project = Project::test(fs.clone(), cx);
6239 let (worktree, _) = project
6240 .update(cx, |p, cx| {
6241 p.find_or_create_local_worktree("/the-dir", true, cx)
6242 })
6243 .await
6244 .unwrap();
6245 let worktree_id = worktree.read_with(cx, |tree, _| tree.id());
6246
6247 let buffer = project
6248 .update(cx, |p, cx| p.open_buffer((worktree_id, "a.rs"), cx))
6249 .await
6250 .unwrap();
6251
6252 let buffer_uri = Url::from_file_path("/the-dir/a.rs").unwrap();
6253 let message = lsp::PublishDiagnosticsParams {
6254 uri: buffer_uri.clone(),
6255 diagnostics: vec![
6256 lsp::Diagnostic {
6257 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
6258 severity: Some(DiagnosticSeverity::WARNING),
6259 message: "error 1".to_string(),
6260 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
6261 location: lsp::Location {
6262 uri: buffer_uri.clone(),
6263 range: lsp::Range::new(
6264 lsp::Position::new(1, 8),
6265 lsp::Position::new(1, 9),
6266 ),
6267 },
6268 message: "error 1 hint 1".to_string(),
6269 }]),
6270 ..Default::default()
6271 },
6272 lsp::Diagnostic {
6273 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
6274 severity: Some(DiagnosticSeverity::HINT),
6275 message: "error 1 hint 1".to_string(),
6276 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
6277 location: lsp::Location {
6278 uri: buffer_uri.clone(),
6279 range: lsp::Range::new(
6280 lsp::Position::new(1, 8),
6281 lsp::Position::new(1, 9),
6282 ),
6283 },
6284 message: "original diagnostic".to_string(),
6285 }]),
6286 ..Default::default()
6287 },
6288 lsp::Diagnostic {
6289 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
6290 severity: Some(DiagnosticSeverity::ERROR),
6291 message: "error 2".to_string(),
6292 related_information: Some(vec![
6293 lsp::DiagnosticRelatedInformation {
6294 location: lsp::Location {
6295 uri: buffer_uri.clone(),
6296 range: lsp::Range::new(
6297 lsp::Position::new(1, 13),
6298 lsp::Position::new(1, 15),
6299 ),
6300 },
6301 message: "error 2 hint 1".to_string(),
6302 },
6303 lsp::DiagnosticRelatedInformation {
6304 location: lsp::Location {
6305 uri: buffer_uri.clone(),
6306 range: lsp::Range::new(
6307 lsp::Position::new(1, 13),
6308 lsp::Position::new(1, 15),
6309 ),
6310 },
6311 message: "error 2 hint 2".to_string(),
6312 },
6313 ]),
6314 ..Default::default()
6315 },
6316 lsp::Diagnostic {
6317 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
6318 severity: Some(DiagnosticSeverity::HINT),
6319 message: "error 2 hint 1".to_string(),
6320 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
6321 location: lsp::Location {
6322 uri: buffer_uri.clone(),
6323 range: lsp::Range::new(
6324 lsp::Position::new(2, 8),
6325 lsp::Position::new(2, 17),
6326 ),
6327 },
6328 message: "original diagnostic".to_string(),
6329 }]),
6330 ..Default::default()
6331 },
6332 lsp::Diagnostic {
6333 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
6334 severity: Some(DiagnosticSeverity::HINT),
6335 message: "error 2 hint 2".to_string(),
6336 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
6337 location: lsp::Location {
6338 uri: buffer_uri.clone(),
6339 range: lsp::Range::new(
6340 lsp::Position::new(2, 8),
6341 lsp::Position::new(2, 17),
6342 ),
6343 },
6344 message: "original diagnostic".to_string(),
6345 }]),
6346 ..Default::default()
6347 },
6348 ],
6349 version: None,
6350 };
6351
6352 project
6353 .update(cx, |p, cx| {
6354 p.update_diagnostics(message, &Default::default(), cx)
6355 })
6356 .unwrap();
6357 let buffer = buffer.read_with(cx, |buffer, _| buffer.snapshot());
6358
6359 assert_eq!(
6360 buffer
6361 .diagnostics_in_range::<_, Point>(0..buffer.len())
6362 .collect::<Vec<_>>(),
6363 &[
6364 DiagnosticEntry {
6365 range: Point::new(1, 8)..Point::new(1, 9),
6366 diagnostic: Diagnostic {
6367 severity: DiagnosticSeverity::WARNING,
6368 message: "error 1".to_string(),
6369 group_id: 0,
6370 is_primary: true,
6371 ..Default::default()
6372 }
6373 },
6374 DiagnosticEntry {
6375 range: Point::new(1, 8)..Point::new(1, 9),
6376 diagnostic: Diagnostic {
6377 severity: DiagnosticSeverity::HINT,
6378 message: "error 1 hint 1".to_string(),
6379 group_id: 0,
6380 is_primary: false,
6381 ..Default::default()
6382 }
6383 },
6384 DiagnosticEntry {
6385 range: Point::new(1, 13)..Point::new(1, 15),
6386 diagnostic: Diagnostic {
6387 severity: DiagnosticSeverity::HINT,
6388 message: "error 2 hint 1".to_string(),
6389 group_id: 1,
6390 is_primary: false,
6391 ..Default::default()
6392 }
6393 },
6394 DiagnosticEntry {
6395 range: Point::new(1, 13)..Point::new(1, 15),
6396 diagnostic: Diagnostic {
6397 severity: DiagnosticSeverity::HINT,
6398 message: "error 2 hint 2".to_string(),
6399 group_id: 1,
6400 is_primary: false,
6401 ..Default::default()
6402 }
6403 },
6404 DiagnosticEntry {
6405 range: Point::new(2, 8)..Point::new(2, 17),
6406 diagnostic: Diagnostic {
6407 severity: DiagnosticSeverity::ERROR,
6408 message: "error 2".to_string(),
6409 group_id: 1,
6410 is_primary: true,
6411 ..Default::default()
6412 }
6413 }
6414 ]
6415 );
6416
6417 assert_eq!(
6418 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
6419 &[
6420 DiagnosticEntry {
6421 range: Point::new(1, 8)..Point::new(1, 9),
6422 diagnostic: Diagnostic {
6423 severity: DiagnosticSeverity::WARNING,
6424 message: "error 1".to_string(),
6425 group_id: 0,
6426 is_primary: true,
6427 ..Default::default()
6428 }
6429 },
6430 DiagnosticEntry {
6431 range: Point::new(1, 8)..Point::new(1, 9),
6432 diagnostic: Diagnostic {
6433 severity: DiagnosticSeverity::HINT,
6434 message: "error 1 hint 1".to_string(),
6435 group_id: 0,
6436 is_primary: false,
6437 ..Default::default()
6438 }
6439 },
6440 ]
6441 );
6442 assert_eq!(
6443 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
6444 &[
6445 DiagnosticEntry {
6446 range: Point::new(1, 13)..Point::new(1, 15),
6447 diagnostic: Diagnostic {
6448 severity: DiagnosticSeverity::HINT,
6449 message: "error 2 hint 1".to_string(),
6450 group_id: 1,
6451 is_primary: false,
6452 ..Default::default()
6453 }
6454 },
6455 DiagnosticEntry {
6456 range: Point::new(1, 13)..Point::new(1, 15),
6457 diagnostic: Diagnostic {
6458 severity: DiagnosticSeverity::HINT,
6459 message: "error 2 hint 2".to_string(),
6460 group_id: 1,
6461 is_primary: false,
6462 ..Default::default()
6463 }
6464 },
6465 DiagnosticEntry {
6466 range: Point::new(2, 8)..Point::new(2, 17),
6467 diagnostic: Diagnostic {
6468 severity: DiagnosticSeverity::ERROR,
6469 message: "error 2".to_string(),
6470 group_id: 1,
6471 is_primary: true,
6472 ..Default::default()
6473 }
6474 }
6475 ]
6476 );
6477 }
6478
6479 #[gpui::test]
6480 async fn test_rename(cx: &mut gpui::TestAppContext) {
6481 cx.foreground().forbid_parking();
6482
6483 let (language_server_config, mut fake_servers) = LanguageServerConfig::fake();
6484 let language = Arc::new(Language::new(
6485 LanguageConfig {
6486 name: "Rust".into(),
6487 path_suffixes: vec!["rs".to_string()],
6488 language_server: Some(language_server_config),
6489 ..Default::default()
6490 },
6491 Some(tree_sitter_rust::language()),
6492 ));
6493
6494 let fs = FakeFs::new(cx.background());
6495 fs.insert_tree(
6496 "/dir",
6497 json!({
6498 "one.rs": "const ONE: usize = 1;",
6499 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
6500 }),
6501 )
6502 .await;
6503
6504 let project = Project::test(fs.clone(), cx);
6505 project.update(cx, |project, _| {
6506 Arc::get_mut(&mut project.languages).unwrap().add(language);
6507 });
6508
6509 let (tree, _) = project
6510 .update(cx, |project, cx| {
6511 project.find_or_create_local_worktree("/dir", true, cx)
6512 })
6513 .await
6514 .unwrap();
6515 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
6516 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
6517 .await;
6518
6519 let buffer = project
6520 .update(cx, |project, cx| {
6521 project.open_buffer((worktree_id, Path::new("one.rs")), cx)
6522 })
6523 .await
6524 .unwrap();
6525
6526 let mut fake_server = fake_servers.next().await.unwrap();
6527
6528 let response = project.update(cx, |project, cx| {
6529 project.prepare_rename(buffer.clone(), 7, cx)
6530 });
6531 fake_server
6532 .handle_request::<lsp::request::PrepareRenameRequest, _>(|params, _| {
6533 assert_eq!(params.text_document.uri.as_str(), "file:///dir/one.rs");
6534 assert_eq!(params.position, lsp::Position::new(0, 7));
6535 Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
6536 lsp::Position::new(0, 6),
6537 lsp::Position::new(0, 9),
6538 )))
6539 })
6540 .next()
6541 .await
6542 .unwrap();
6543 let range = response.await.unwrap().unwrap();
6544 let range = buffer.read_with(cx, |buffer, _| range.to_offset(buffer));
6545 assert_eq!(range, 6..9);
6546
6547 let response = project.update(cx, |project, cx| {
6548 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), true, cx)
6549 });
6550 fake_server
6551 .handle_request::<lsp::request::Rename, _>(|params, _| {
6552 assert_eq!(
6553 params.text_document_position.text_document.uri.as_str(),
6554 "file:///dir/one.rs"
6555 );
6556 assert_eq!(
6557 params.text_document_position.position,
6558 lsp::Position::new(0, 7)
6559 );
6560 assert_eq!(params.new_name, "THREE");
6561 Some(lsp::WorkspaceEdit {
6562 changes: Some(
6563 [
6564 (
6565 lsp::Url::from_file_path("/dir/one.rs").unwrap(),
6566 vec![lsp::TextEdit::new(
6567 lsp::Range::new(
6568 lsp::Position::new(0, 6),
6569 lsp::Position::new(0, 9),
6570 ),
6571 "THREE".to_string(),
6572 )],
6573 ),
6574 (
6575 lsp::Url::from_file_path("/dir/two.rs").unwrap(),
6576 vec![
6577 lsp::TextEdit::new(
6578 lsp::Range::new(
6579 lsp::Position::new(0, 24),
6580 lsp::Position::new(0, 27),
6581 ),
6582 "THREE".to_string(),
6583 ),
6584 lsp::TextEdit::new(
6585 lsp::Range::new(
6586 lsp::Position::new(0, 35),
6587 lsp::Position::new(0, 38),
6588 ),
6589 "THREE".to_string(),
6590 ),
6591 ],
6592 ),
6593 ]
6594 .into_iter()
6595 .collect(),
6596 ),
6597 ..Default::default()
6598 })
6599 })
6600 .next()
6601 .await
6602 .unwrap();
6603 let mut transaction = response.await.unwrap().0;
6604 assert_eq!(transaction.len(), 2);
6605 assert_eq!(
6606 transaction
6607 .remove_entry(&buffer)
6608 .unwrap()
6609 .0
6610 .read_with(cx, |buffer, _| buffer.text()),
6611 "const THREE: usize = 1;"
6612 );
6613 assert_eq!(
6614 transaction
6615 .into_keys()
6616 .next()
6617 .unwrap()
6618 .read_with(cx, |buffer, _| buffer.text()),
6619 "const TWO: usize = one::THREE + one::THREE;"
6620 );
6621 }
6622
6623 #[gpui::test]
6624 async fn test_search(cx: &mut gpui::TestAppContext) {
6625 let fs = FakeFs::new(cx.background());
6626 fs.insert_tree(
6627 "/dir",
6628 json!({
6629 "one.rs": "const ONE: usize = 1;",
6630 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
6631 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
6632 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
6633 }),
6634 )
6635 .await;
6636 let project = Project::test(fs.clone(), cx);
6637 let (tree, _) = project
6638 .update(cx, |project, cx| {
6639 project.find_or_create_local_worktree("/dir", true, cx)
6640 })
6641 .await
6642 .unwrap();
6643 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
6644 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
6645 .await;
6646
6647 assert_eq!(
6648 search(&project, SearchQuery::text("TWO", false, true), cx)
6649 .await
6650 .unwrap(),
6651 HashMap::from_iter([
6652 ("two.rs".to_string(), vec![6..9]),
6653 ("three.rs".to_string(), vec![37..40])
6654 ])
6655 );
6656
6657 let buffer_4 = project
6658 .update(cx, |project, cx| {
6659 project.open_buffer((worktree_id, "four.rs"), cx)
6660 })
6661 .await
6662 .unwrap();
6663 buffer_4.update(cx, |buffer, cx| {
6664 buffer.edit([20..28, 31..43], "two::TWO", cx);
6665 });
6666
6667 assert_eq!(
6668 search(&project, SearchQuery::text("TWO", false, true), cx)
6669 .await
6670 .unwrap(),
6671 HashMap::from_iter([
6672 ("two.rs".to_string(), vec![6..9]),
6673 ("three.rs".to_string(), vec![37..40]),
6674 ("four.rs".to_string(), vec![25..28, 36..39])
6675 ])
6676 );
6677
6678 async fn search(
6679 project: &ModelHandle<Project>,
6680 query: SearchQuery,
6681 cx: &mut gpui::TestAppContext,
6682 ) -> Result<HashMap<String, Vec<Range<usize>>>> {
6683 let results = project
6684 .update(cx, |project, cx| project.search(query, cx))
6685 .await?;
6686
6687 Ok(results
6688 .into_iter()
6689 .map(|(buffer, ranges)| {
6690 buffer.read_with(cx, |buffer, _| {
6691 let path = buffer.file().unwrap().path().to_string_lossy().to_string();
6692 let ranges = ranges
6693 .into_iter()
6694 .map(|range| range.to_offset(buffer))
6695 .collect::<Vec<_>>();
6696 (path, ranges)
6697 })
6698 })
6699 .collect())
6700 }
6701 }
6702}