1pub mod fs;
2mod ignore;
3mod lsp_command;
4pub mod search;
5pub mod worktree;
6
7use anyhow::{anyhow, Context, Result};
8use client::{proto, Client, PeerId, TypedEnvelope, User, UserStore};
9use clock::ReplicaId;
10use collections::{hash_map, BTreeMap, HashMap, HashSet};
11use futures::{future::Shared, Future, FutureExt, StreamExt, TryFutureExt};
12use fuzzy::{PathMatch, PathMatchCandidate, PathMatchCandidateSet};
13use gpui::{
14 AppContext, AsyncAppContext, Entity, ModelContext, ModelHandle, MutableAppContext, Task,
15 UpgradeModelHandle, WeakModelHandle,
16};
17use language::{
18 proto::{deserialize_anchor, deserialize_version, serialize_anchor, serialize_version},
19 range_from_lsp, Anchor, Bias, Buffer, CodeAction, CodeLabel, Completion, Diagnostic,
20 DiagnosticEntry, DiagnosticSet, Event as BufferEvent, File as _, Language, LanguageRegistry,
21 LocalFile, OffsetRangeExt, Operation, PointUtf16, TextBufferSnapshot, ToLspPosition, ToOffset,
22 ToPointUtf16, Transaction,
23};
24use lsp::{DiagnosticSeverity, DocumentHighlightKind, LanguageServer};
25use lsp_command::*;
26use postage::watch;
27use rand::prelude::*;
28use search::SearchQuery;
29use sha2::{Digest, Sha256};
30use similar::{ChangeTag, TextDiff};
31use std::{
32 cell::RefCell,
33 cmp::{self, Ordering},
34 convert::TryInto,
35 hash::Hash,
36 mem,
37 ops::Range,
38 path::{Component, Path, PathBuf},
39 rc::Rc,
40 sync::{atomic::AtomicBool, Arc},
41 time::Instant,
42};
43use util::{post_inc, ResultExt, TryFutureExt as _};
44
45pub use fs::*;
46pub use worktree::*;
47
48pub struct Project {
49 worktrees: Vec<WorktreeHandle>,
50 active_entry: Option<ProjectEntry>,
51 languages: Arc<LanguageRegistry>,
52 language_servers: HashMap<(WorktreeId, Arc<str>), Arc<LanguageServer>>,
53 started_language_servers: HashMap<(WorktreeId, Arc<str>), Task<Option<Arc<LanguageServer>>>>,
54 language_server_statuses: BTreeMap<usize, LanguageServerStatus>,
55 next_language_server_id: usize,
56 client: Arc<client::Client>,
57 user_store: ModelHandle<UserStore>,
58 fs: Arc<dyn Fs>,
59 client_state: ProjectClientState,
60 collaborators: HashMap<PeerId, Collaborator>,
61 subscriptions: Vec<client::Subscription>,
62 language_servers_with_diagnostics_running: isize,
63 opened_buffer: (Rc<RefCell<watch::Sender<()>>>, watch::Receiver<()>),
64 shared_buffers: HashMap<PeerId, HashSet<u64>>,
65 loading_buffers: HashMap<
66 ProjectPath,
67 postage::watch::Receiver<Option<Result<ModelHandle<Buffer>, Arc<anyhow::Error>>>>,
68 >,
69 loading_local_worktrees:
70 HashMap<Arc<Path>, Shared<Task<Result<ModelHandle<Worktree>, Arc<anyhow::Error>>>>>,
71 opened_buffers: HashMap<u64, OpenBuffer>,
72 buffer_snapshots: HashMap<u64, Vec<(i32, TextBufferSnapshot)>>,
73 nonce: u128,
74}
75
76enum OpenBuffer {
77 Strong(ModelHandle<Buffer>),
78 Weak(WeakModelHandle<Buffer>),
79 Loading(Vec<Operation>),
80}
81
82enum WorktreeHandle {
83 Strong(ModelHandle<Worktree>),
84 Weak(WeakModelHandle<Worktree>),
85}
86
87enum ProjectClientState {
88 Local {
89 is_shared: bool,
90 remote_id_tx: watch::Sender<Option<u64>>,
91 remote_id_rx: watch::Receiver<Option<u64>>,
92 _maintain_remote_id_task: Task<Option<()>>,
93 },
94 Remote {
95 sharing_has_stopped: bool,
96 remote_id: u64,
97 replica_id: ReplicaId,
98 _detect_unshare_task: Task<Option<()>>,
99 },
100}
101
102#[derive(Clone, Debug)]
103pub struct Collaborator {
104 pub user: Arc<User>,
105 pub peer_id: PeerId,
106 pub replica_id: ReplicaId,
107}
108
109#[derive(Clone, Debug, PartialEq)]
110pub enum Event {
111 ActiveEntryChanged(Option<ProjectEntry>),
112 WorktreeRemoved(WorktreeId),
113 DiskBasedDiagnosticsStarted,
114 DiskBasedDiagnosticsUpdated,
115 DiskBasedDiagnosticsFinished,
116 DiagnosticsUpdated(ProjectPath),
117}
118
119enum LanguageServerEvent {
120 WorkStart {
121 token: String,
122 },
123 WorkProgress {
124 token: String,
125 progress: LanguageServerProgress,
126 },
127 WorkEnd {
128 token: String,
129 },
130 DiagnosticsUpdate(lsp::PublishDiagnosticsParams),
131}
132
133pub struct LanguageServerStatus {
134 pub name: String,
135 pub pending_work: BTreeMap<String, LanguageServerProgress>,
136 pending_diagnostic_updates: isize,
137}
138
139#[derive(Clone, Debug)]
140pub struct LanguageServerProgress {
141 pub message: Option<String>,
142 pub percentage: Option<usize>,
143 pub last_update_at: Instant,
144}
145
146#[derive(Clone, Debug, Eq, PartialEq, Hash, PartialOrd, Ord)]
147pub struct ProjectPath {
148 pub worktree_id: WorktreeId,
149 pub path: Arc<Path>,
150}
151
152#[derive(Clone, Debug, Default, PartialEq)]
153pub struct DiagnosticSummary {
154 pub error_count: usize,
155 pub warning_count: usize,
156 pub info_count: usize,
157 pub hint_count: usize,
158}
159
160#[derive(Debug)]
161pub struct Location {
162 pub buffer: ModelHandle<Buffer>,
163 pub range: Range<language::Anchor>,
164}
165
166#[derive(Debug)]
167pub struct DocumentHighlight {
168 pub range: Range<language::Anchor>,
169 pub kind: DocumentHighlightKind,
170}
171
172#[derive(Clone, Debug)]
173pub struct Symbol {
174 pub source_worktree_id: WorktreeId,
175 pub worktree_id: WorktreeId,
176 pub language_name: String,
177 pub path: PathBuf,
178 pub label: CodeLabel,
179 pub name: String,
180 pub kind: lsp::SymbolKind,
181 pub range: Range<PointUtf16>,
182 pub signature: [u8; 32],
183}
184
185#[derive(Default)]
186pub struct ProjectTransaction(pub HashMap<ModelHandle<Buffer>, language::Transaction>);
187
188impl DiagnosticSummary {
189 fn new<'a, T: 'a>(diagnostics: impl IntoIterator<Item = &'a DiagnosticEntry<T>>) -> Self {
190 let mut this = Self {
191 error_count: 0,
192 warning_count: 0,
193 info_count: 0,
194 hint_count: 0,
195 };
196
197 for entry in diagnostics {
198 if entry.diagnostic.is_primary {
199 match entry.diagnostic.severity {
200 DiagnosticSeverity::ERROR => this.error_count += 1,
201 DiagnosticSeverity::WARNING => this.warning_count += 1,
202 DiagnosticSeverity::INFORMATION => this.info_count += 1,
203 DiagnosticSeverity::HINT => this.hint_count += 1,
204 _ => {}
205 }
206 }
207 }
208
209 this
210 }
211
212 pub fn to_proto(&self, path: &Path) -> proto::DiagnosticSummary {
213 proto::DiagnosticSummary {
214 path: path.to_string_lossy().to_string(),
215 error_count: self.error_count as u32,
216 warning_count: self.warning_count as u32,
217 info_count: self.info_count as u32,
218 hint_count: self.hint_count as u32,
219 }
220 }
221}
222
223#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
224pub struct ProjectEntry {
225 pub worktree_id: WorktreeId,
226 pub entry_id: usize,
227}
228
229impl Project {
230 pub fn init(client: &Arc<Client>) {
231 client.add_entity_message_handler(Self::handle_add_collaborator);
232 client.add_entity_message_handler(Self::handle_buffer_reloaded);
233 client.add_entity_message_handler(Self::handle_buffer_saved);
234 client.add_entity_message_handler(Self::handle_start_language_server);
235 client.add_entity_message_handler(Self::handle_update_language_server);
236 client.add_entity_message_handler(Self::handle_remove_collaborator);
237 client.add_entity_message_handler(Self::handle_register_worktree);
238 client.add_entity_message_handler(Self::handle_unregister_worktree);
239 client.add_entity_message_handler(Self::handle_unshare_project);
240 client.add_entity_message_handler(Self::handle_update_buffer_file);
241 client.add_entity_message_handler(Self::handle_update_buffer);
242 client.add_entity_message_handler(Self::handle_update_diagnostic_summary);
243 client.add_entity_message_handler(Self::handle_update_worktree);
244 client.add_entity_request_handler(Self::handle_apply_additional_edits_for_completion);
245 client.add_entity_request_handler(Self::handle_apply_code_action);
246 client.add_entity_request_handler(Self::handle_format_buffers);
247 client.add_entity_request_handler(Self::handle_get_code_actions);
248 client.add_entity_request_handler(Self::handle_get_completions);
249 client.add_entity_request_handler(Self::handle_lsp_command::<GetDefinition>);
250 client.add_entity_request_handler(Self::handle_lsp_command::<GetDocumentHighlights>);
251 client.add_entity_request_handler(Self::handle_lsp_command::<GetReferences>);
252 client.add_entity_request_handler(Self::handle_lsp_command::<PrepareRename>);
253 client.add_entity_request_handler(Self::handle_lsp_command::<PerformRename>);
254 client.add_entity_request_handler(Self::handle_search_project);
255 client.add_entity_request_handler(Self::handle_get_project_symbols);
256 client.add_entity_request_handler(Self::handle_open_buffer_for_symbol);
257 client.add_entity_request_handler(Self::handle_open_buffer);
258 client.add_entity_request_handler(Self::handle_save_buffer);
259 }
260
261 pub fn local(
262 client: Arc<Client>,
263 user_store: ModelHandle<UserStore>,
264 languages: Arc<LanguageRegistry>,
265 fs: Arc<dyn Fs>,
266 cx: &mut MutableAppContext,
267 ) -> ModelHandle<Self> {
268 cx.add_model(|cx: &mut ModelContext<Self>| {
269 let (remote_id_tx, remote_id_rx) = watch::channel();
270 let _maintain_remote_id_task = cx.spawn_weak({
271 let rpc = client.clone();
272 move |this, mut cx| {
273 async move {
274 let mut status = rpc.status();
275 while let Some(status) = status.next().await {
276 if let Some(this) = this.upgrade(&cx) {
277 let remote_id = if status.is_connected() {
278 let response = rpc.request(proto::RegisterProject {}).await?;
279 Some(response.project_id)
280 } else {
281 None
282 };
283
284 if let Some(project_id) = remote_id {
285 let mut registrations = Vec::new();
286 this.update(&mut cx, |this, cx| {
287 for worktree in this.worktrees(cx).collect::<Vec<_>>() {
288 registrations.push(worktree.update(
289 cx,
290 |worktree, cx| {
291 let worktree = worktree.as_local_mut().unwrap();
292 worktree.register(project_id, cx)
293 },
294 ));
295 }
296 });
297 for registration in registrations {
298 registration.await?;
299 }
300 }
301 this.update(&mut cx, |this, cx| this.set_remote_id(remote_id, cx));
302 }
303 }
304 Ok(())
305 }
306 .log_err()
307 }
308 });
309
310 let (opened_buffer_tx, opened_buffer_rx) = watch::channel();
311 Self {
312 worktrees: Default::default(),
313 collaborators: Default::default(),
314 opened_buffers: Default::default(),
315 shared_buffers: Default::default(),
316 loading_buffers: Default::default(),
317 loading_local_worktrees: Default::default(),
318 buffer_snapshots: Default::default(),
319 client_state: ProjectClientState::Local {
320 is_shared: false,
321 remote_id_tx,
322 remote_id_rx,
323 _maintain_remote_id_task,
324 },
325 opened_buffer: (Rc::new(RefCell::new(opened_buffer_tx)), opened_buffer_rx),
326 subscriptions: Vec::new(),
327 active_entry: None,
328 languages,
329 client,
330 user_store,
331 fs,
332 language_servers_with_diagnostics_running: 0,
333 language_servers: Default::default(),
334 started_language_servers: Default::default(),
335 language_server_statuses: Default::default(),
336 next_language_server_id: 0,
337 nonce: StdRng::from_entropy().gen(),
338 }
339 })
340 }
341
342 pub async fn remote(
343 remote_id: u64,
344 client: Arc<Client>,
345 user_store: ModelHandle<UserStore>,
346 languages: Arc<LanguageRegistry>,
347 fs: Arc<dyn Fs>,
348 cx: &mut AsyncAppContext,
349 ) -> Result<ModelHandle<Self>> {
350 client.authenticate_and_connect(&cx).await?;
351
352 let response = client
353 .request(proto::JoinProject {
354 project_id: remote_id,
355 })
356 .await?;
357
358 let replica_id = response.replica_id as ReplicaId;
359
360 let mut worktrees = Vec::new();
361 for worktree in response.worktrees {
362 let (worktree, load_task) = cx
363 .update(|cx| Worktree::remote(remote_id, replica_id, worktree, client.clone(), cx));
364 worktrees.push(worktree);
365 load_task.detach();
366 }
367
368 let (opened_buffer_tx, opened_buffer_rx) = watch::channel();
369 let this = cx.add_model(|cx: &mut ModelContext<Self>| {
370 let mut this = Self {
371 worktrees: Vec::new(),
372 loading_buffers: Default::default(),
373 opened_buffer: (Rc::new(RefCell::new(opened_buffer_tx)), opened_buffer_rx),
374 shared_buffers: Default::default(),
375 loading_local_worktrees: Default::default(),
376 active_entry: None,
377 collaborators: Default::default(),
378 languages,
379 user_store: user_store.clone(),
380 fs,
381 subscriptions: vec![client.add_model_for_remote_entity(remote_id, cx)],
382 client: client.clone(),
383 client_state: ProjectClientState::Remote {
384 sharing_has_stopped: false,
385 remote_id,
386 replica_id,
387 _detect_unshare_task: cx.spawn_weak(move |this, mut cx| {
388 async move {
389 let mut status = client.status();
390 let is_connected =
391 status.next().await.map_or(false, |s| s.is_connected());
392 // Even if we're initially connected, any future change of the status means we momentarily disconnected.
393 if !is_connected || status.next().await.is_some() {
394 if let Some(this) = this.upgrade(&cx) {
395 this.update(&mut cx, |this, cx| this.project_unshared(cx))
396 }
397 }
398 Ok(())
399 }
400 .log_err()
401 }),
402 },
403 language_servers_with_diagnostics_running: 0,
404 language_servers: Default::default(),
405 started_language_servers: Default::default(),
406 language_server_statuses: response
407 .language_servers
408 .into_iter()
409 .map(|server| {
410 (
411 server.id as usize,
412 LanguageServerStatus {
413 name: server.name,
414 pending_work: Default::default(),
415 pending_diagnostic_updates: 0,
416 },
417 )
418 })
419 .collect(),
420 next_language_server_id: 0,
421 opened_buffers: Default::default(),
422 buffer_snapshots: Default::default(),
423 nonce: StdRng::from_entropy().gen(),
424 };
425 for worktree in worktrees {
426 this.add_worktree(&worktree, cx);
427 }
428 this
429 });
430
431 let user_ids = response
432 .collaborators
433 .iter()
434 .map(|peer| peer.user_id)
435 .collect();
436 user_store
437 .update(cx, |user_store, cx| user_store.load_users(user_ids, cx))
438 .await?;
439 let mut collaborators = HashMap::default();
440 for message in response.collaborators {
441 let collaborator = Collaborator::from_proto(message, &user_store, cx).await?;
442 collaborators.insert(collaborator.peer_id, collaborator);
443 }
444
445 this.update(cx, |this, _| {
446 this.collaborators = collaborators;
447 });
448
449 Ok(this)
450 }
451
452 #[cfg(any(test, feature = "test-support"))]
453 pub fn test(fs: Arc<dyn Fs>, cx: &mut gpui::TestAppContext) -> ModelHandle<Project> {
454 let languages = Arc::new(LanguageRegistry::test());
455 let http_client = client::test::FakeHttpClient::with_404_response();
456 let client = client::Client::new(http_client.clone());
457 let user_store = cx.add_model(|cx| UserStore::new(client.clone(), http_client, cx));
458 cx.update(|cx| Project::local(client, user_store, languages, fs, cx))
459 }
460
461 #[cfg(any(test, feature = "test-support"))]
462 pub fn buffer_for_id(&self, remote_id: u64, cx: &AppContext) -> Option<ModelHandle<Buffer>> {
463 self.opened_buffers
464 .get(&remote_id)
465 .and_then(|buffer| buffer.upgrade(cx))
466 }
467
468 #[cfg(any(test, feature = "test-support"))]
469 pub fn languages(&self) -> &Arc<LanguageRegistry> {
470 &self.languages
471 }
472
473 #[cfg(any(test, feature = "test-support"))]
474 pub fn check_invariants(&self, cx: &AppContext) {
475 if self.is_local() {
476 let mut worktree_root_paths = HashMap::default();
477 for worktree in self.worktrees(cx) {
478 let worktree = worktree.read(cx);
479 let abs_path = worktree.as_local().unwrap().abs_path().clone();
480 let prev_worktree_id = worktree_root_paths.insert(abs_path.clone(), worktree.id());
481 assert_eq!(
482 prev_worktree_id,
483 None,
484 "abs path {:?} for worktree {:?} is not unique ({:?} was already registered with the same path)",
485 abs_path,
486 worktree.id(),
487 prev_worktree_id
488 )
489 }
490 } else {
491 let replica_id = self.replica_id();
492 for buffer in self.opened_buffers.values() {
493 if let Some(buffer) = buffer.upgrade(cx) {
494 let buffer = buffer.read(cx);
495 assert_eq!(
496 buffer.deferred_ops_len(),
497 0,
498 "replica {}, buffer {} has deferred operations",
499 replica_id,
500 buffer.remote_id()
501 );
502 }
503 }
504 }
505 }
506
507 #[cfg(any(test, feature = "test-support"))]
508 pub fn has_open_buffer(&self, path: impl Into<ProjectPath>, cx: &AppContext) -> bool {
509 let path = path.into();
510 if let Some(worktree) = self.worktree_for_id(path.worktree_id, cx) {
511 self.opened_buffers.iter().any(|(_, buffer)| {
512 if let Some(buffer) = buffer.upgrade(cx) {
513 if let Some(file) = File::from_dyn(buffer.read(cx).file()) {
514 if file.worktree == worktree && file.path() == &path.path {
515 return true;
516 }
517 }
518 }
519 false
520 })
521 } else {
522 false
523 }
524 }
525
526 pub fn fs(&self) -> &Arc<dyn Fs> {
527 &self.fs
528 }
529
530 fn set_remote_id(&mut self, remote_id: Option<u64>, cx: &mut ModelContext<Self>) {
531 if let ProjectClientState::Local { remote_id_tx, .. } = &mut self.client_state {
532 *remote_id_tx.borrow_mut() = remote_id;
533 }
534
535 self.subscriptions.clear();
536 if let Some(remote_id) = remote_id {
537 self.subscriptions
538 .push(self.client.add_model_for_remote_entity(remote_id, cx));
539 }
540 }
541
542 pub fn remote_id(&self) -> Option<u64> {
543 match &self.client_state {
544 ProjectClientState::Local { remote_id_rx, .. } => *remote_id_rx.borrow(),
545 ProjectClientState::Remote { remote_id, .. } => Some(*remote_id),
546 }
547 }
548
549 pub fn next_remote_id(&self) -> impl Future<Output = u64> {
550 let mut id = None;
551 let mut watch = None;
552 match &self.client_state {
553 ProjectClientState::Local { remote_id_rx, .. } => watch = Some(remote_id_rx.clone()),
554 ProjectClientState::Remote { remote_id, .. } => id = Some(*remote_id),
555 }
556
557 async move {
558 if let Some(id) = id {
559 return id;
560 }
561 let mut watch = watch.unwrap();
562 loop {
563 let id = *watch.borrow();
564 if let Some(id) = id {
565 return id;
566 }
567 watch.next().await;
568 }
569 }
570 }
571
572 pub fn replica_id(&self) -> ReplicaId {
573 match &self.client_state {
574 ProjectClientState::Local { .. } => 0,
575 ProjectClientState::Remote { replica_id, .. } => *replica_id,
576 }
577 }
578
579 pub fn collaborators(&self) -> &HashMap<PeerId, Collaborator> {
580 &self.collaborators
581 }
582
583 pub fn worktrees<'a>(
584 &'a self,
585 cx: &'a AppContext,
586 ) -> impl 'a + Iterator<Item = ModelHandle<Worktree>> {
587 self.worktrees
588 .iter()
589 .filter_map(move |worktree| worktree.upgrade(cx))
590 }
591
592 pub fn visible_worktrees<'a>(
593 &'a self,
594 cx: &'a AppContext,
595 ) -> impl 'a + Iterator<Item = ModelHandle<Worktree>> {
596 self.worktrees.iter().filter_map(|worktree| {
597 worktree.upgrade(cx).and_then(|worktree| {
598 if worktree.read(cx).is_visible() {
599 Some(worktree)
600 } else {
601 None
602 }
603 })
604 })
605 }
606
607 pub fn worktree_for_id(
608 &self,
609 id: WorktreeId,
610 cx: &AppContext,
611 ) -> Option<ModelHandle<Worktree>> {
612 self.worktrees(cx)
613 .find(|worktree| worktree.read(cx).id() == id)
614 }
615
616 pub fn share(&self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
617 let rpc = self.client.clone();
618 cx.spawn(|this, mut cx| async move {
619 let project_id = this.update(&mut cx, |this, cx| {
620 if let ProjectClientState::Local {
621 is_shared,
622 remote_id_rx,
623 ..
624 } = &mut this.client_state
625 {
626 *is_shared = true;
627
628 for open_buffer in this.opened_buffers.values_mut() {
629 match open_buffer {
630 OpenBuffer::Strong(_) => {}
631 OpenBuffer::Weak(buffer) => {
632 if let Some(buffer) = buffer.upgrade(cx) {
633 *open_buffer = OpenBuffer::Strong(buffer);
634 }
635 }
636 OpenBuffer::Loading(_) => unreachable!(),
637 }
638 }
639
640 for worktree_handle in this.worktrees.iter_mut() {
641 match worktree_handle {
642 WorktreeHandle::Strong(_) => {}
643 WorktreeHandle::Weak(worktree) => {
644 if let Some(worktree) = worktree.upgrade(cx) {
645 *worktree_handle = WorktreeHandle::Strong(worktree);
646 }
647 }
648 }
649 }
650
651 remote_id_rx
652 .borrow()
653 .ok_or_else(|| anyhow!("no project id"))
654 } else {
655 Err(anyhow!("can't share a remote project"))
656 }
657 })?;
658
659 rpc.request(proto::ShareProject { project_id }).await?;
660
661 let mut tasks = Vec::new();
662 this.update(&mut cx, |this, cx| {
663 for worktree in this.worktrees(cx).collect::<Vec<_>>() {
664 worktree.update(cx, |worktree, cx| {
665 let worktree = worktree.as_local_mut().unwrap();
666 tasks.push(worktree.share(project_id, cx));
667 });
668 }
669 });
670 for task in tasks {
671 task.await?;
672 }
673 this.update(&mut cx, |_, cx| cx.notify());
674 Ok(())
675 })
676 }
677
678 pub fn unshare(&self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
679 let rpc = self.client.clone();
680 cx.spawn(|this, mut cx| async move {
681 let project_id = this.update(&mut cx, |this, cx| {
682 if let ProjectClientState::Local {
683 is_shared,
684 remote_id_rx,
685 ..
686 } = &mut this.client_state
687 {
688 *is_shared = false;
689
690 for open_buffer in this.opened_buffers.values_mut() {
691 match open_buffer {
692 OpenBuffer::Strong(buffer) => {
693 *open_buffer = OpenBuffer::Weak(buffer.downgrade());
694 }
695 _ => {}
696 }
697 }
698
699 for worktree_handle in this.worktrees.iter_mut() {
700 match worktree_handle {
701 WorktreeHandle::Strong(worktree) => {
702 if !worktree.read(cx).is_visible() {
703 *worktree_handle = WorktreeHandle::Weak(worktree.downgrade());
704 }
705 }
706 _ => {}
707 }
708 }
709
710 remote_id_rx
711 .borrow()
712 .ok_or_else(|| anyhow!("no project id"))
713 } else {
714 Err(anyhow!("can't share a remote project"))
715 }
716 })?;
717
718 rpc.send(proto::UnshareProject { project_id })?;
719 this.update(&mut cx, |this, cx| {
720 this.collaborators.clear();
721 this.shared_buffers.clear();
722 for worktree in this.worktrees(cx).collect::<Vec<_>>() {
723 worktree.update(cx, |worktree, _| {
724 worktree.as_local_mut().unwrap().unshare();
725 });
726 }
727 cx.notify()
728 });
729 Ok(())
730 })
731 }
732
733 fn project_unshared(&mut self, cx: &mut ModelContext<Self>) {
734 if let ProjectClientState::Remote {
735 sharing_has_stopped,
736 ..
737 } = &mut self.client_state
738 {
739 *sharing_has_stopped = true;
740 self.collaborators.clear();
741 cx.notify();
742 }
743 }
744
745 pub fn is_read_only(&self) -> bool {
746 match &self.client_state {
747 ProjectClientState::Local { .. } => false,
748 ProjectClientState::Remote {
749 sharing_has_stopped,
750 ..
751 } => *sharing_has_stopped,
752 }
753 }
754
755 pub fn is_local(&self) -> bool {
756 match &self.client_state {
757 ProjectClientState::Local { .. } => true,
758 ProjectClientState::Remote { .. } => false,
759 }
760 }
761
762 pub fn is_remote(&self) -> bool {
763 !self.is_local()
764 }
765
766 pub fn create_buffer(&mut self, cx: &mut ModelContext<Self>) -> Result<ModelHandle<Buffer>> {
767 if self.is_remote() {
768 return Err(anyhow!("creating buffers as a guest is not supported yet"));
769 }
770
771 let buffer = cx.add_model(|cx| {
772 Buffer::new(self.replica_id(), "", cx).with_language(language::PLAIN_TEXT.clone(), cx)
773 });
774 self.register_buffer(&buffer, cx)?;
775 Ok(buffer)
776 }
777
778 pub fn open_buffer(
779 &mut self,
780 path: impl Into<ProjectPath>,
781 cx: &mut ModelContext<Self>,
782 ) -> Task<Result<ModelHandle<Buffer>>> {
783 let project_path = path.into();
784 let worktree = if let Some(worktree) = self.worktree_for_id(project_path.worktree_id, cx) {
785 worktree
786 } else {
787 return Task::ready(Err(anyhow!("no such worktree")));
788 };
789
790 // If there is already a buffer for the given path, then return it.
791 let existing_buffer = self.get_open_buffer(&project_path, cx);
792 if let Some(existing_buffer) = existing_buffer {
793 return Task::ready(Ok(existing_buffer));
794 }
795
796 let mut loading_watch = match self.loading_buffers.entry(project_path.clone()) {
797 // If the given path is already being loaded, then wait for that existing
798 // task to complete and return the same buffer.
799 hash_map::Entry::Occupied(e) => e.get().clone(),
800
801 // Otherwise, record the fact that this path is now being loaded.
802 hash_map::Entry::Vacant(entry) => {
803 let (mut tx, rx) = postage::watch::channel();
804 entry.insert(rx.clone());
805
806 let load_buffer = if worktree.read(cx).is_local() {
807 self.open_local_buffer(&project_path.path, &worktree, cx)
808 } else {
809 self.open_remote_buffer(&project_path.path, &worktree, cx)
810 };
811
812 cx.spawn(move |this, mut cx| async move {
813 let load_result = load_buffer.await;
814 *tx.borrow_mut() = Some(this.update(&mut cx, |this, _| {
815 // Record the fact that the buffer is no longer loading.
816 this.loading_buffers.remove(&project_path);
817 let buffer = load_result.map_err(Arc::new)?;
818 Ok(buffer)
819 }));
820 })
821 .detach();
822 rx
823 }
824 };
825
826 cx.foreground().spawn(async move {
827 loop {
828 if let Some(result) = loading_watch.borrow().as_ref() {
829 match result {
830 Ok(buffer) => return Ok(buffer.clone()),
831 Err(error) => return Err(anyhow!("{}", error)),
832 }
833 }
834 loading_watch.next().await;
835 }
836 })
837 }
838
839 fn open_local_buffer(
840 &mut self,
841 path: &Arc<Path>,
842 worktree: &ModelHandle<Worktree>,
843 cx: &mut ModelContext<Self>,
844 ) -> Task<Result<ModelHandle<Buffer>>> {
845 let load_buffer = worktree.update(cx, |worktree, cx| {
846 let worktree = worktree.as_local_mut().unwrap();
847 worktree.load_buffer(path, cx)
848 });
849 cx.spawn(|this, mut cx| async move {
850 let buffer = load_buffer.await?;
851 this.update(&mut cx, |this, cx| this.register_buffer(&buffer, cx))?;
852 Ok(buffer)
853 })
854 }
855
856 fn open_remote_buffer(
857 &mut self,
858 path: &Arc<Path>,
859 worktree: &ModelHandle<Worktree>,
860 cx: &mut ModelContext<Self>,
861 ) -> Task<Result<ModelHandle<Buffer>>> {
862 let rpc = self.client.clone();
863 let project_id = self.remote_id().unwrap();
864 let remote_worktree_id = worktree.read(cx).id();
865 let path = path.clone();
866 let path_string = path.to_string_lossy().to_string();
867 cx.spawn(|this, mut cx| async move {
868 let response = rpc
869 .request(proto::OpenBuffer {
870 project_id,
871 worktree_id: remote_worktree_id.to_proto(),
872 path: path_string,
873 })
874 .await?;
875 let buffer = response.buffer.ok_or_else(|| anyhow!("missing buffer"))?;
876 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
877 .await
878 })
879 }
880
881 fn open_local_buffer_via_lsp(
882 &mut self,
883 abs_path: lsp::Url,
884 lang_name: Arc<str>,
885 lang_server: Arc<LanguageServer>,
886 cx: &mut ModelContext<Self>,
887 ) -> Task<Result<ModelHandle<Buffer>>> {
888 cx.spawn(|this, mut cx| async move {
889 let abs_path = abs_path
890 .to_file_path()
891 .map_err(|_| anyhow!("can't convert URI to path"))?;
892 let (worktree, relative_path) = if let Some(result) =
893 this.read_with(&cx, |this, cx| this.find_local_worktree(&abs_path, cx))
894 {
895 result
896 } else {
897 let worktree = this
898 .update(&mut cx, |this, cx| {
899 this.create_local_worktree(&abs_path, false, cx)
900 })
901 .await?;
902 this.update(&mut cx, |this, cx| {
903 this.language_servers
904 .insert((worktree.read(cx).id(), lang_name), lang_server);
905 });
906 (worktree, PathBuf::new())
907 };
908
909 let project_path = ProjectPath {
910 worktree_id: worktree.read_with(&cx, |worktree, _| worktree.id()),
911 path: relative_path.into(),
912 };
913 this.update(&mut cx, |this, cx| this.open_buffer(project_path, cx))
914 .await
915 })
916 }
917
918 pub fn save_buffer_as(
919 &mut self,
920 buffer: ModelHandle<Buffer>,
921 abs_path: PathBuf,
922 cx: &mut ModelContext<Project>,
923 ) -> Task<Result<()>> {
924 let worktree_task = self.find_or_create_local_worktree(&abs_path, true, cx);
925 cx.spawn(|this, mut cx| async move {
926 let (worktree, path) = worktree_task.await?;
927 worktree
928 .update(&mut cx, |worktree, cx| {
929 worktree
930 .as_local_mut()
931 .unwrap()
932 .save_buffer_as(buffer.clone(), path, cx)
933 })
934 .await?;
935 this.update(&mut cx, |this, cx| {
936 this.assign_language_to_buffer(&buffer, cx);
937 this.register_buffer_with_language_server(&buffer, cx);
938 });
939 Ok(())
940 })
941 }
942
943 pub fn get_open_buffer(
944 &mut self,
945 path: &ProjectPath,
946 cx: &mut ModelContext<Self>,
947 ) -> Option<ModelHandle<Buffer>> {
948 let worktree = self.worktree_for_id(path.worktree_id, cx)?;
949 self.opened_buffers.values().find_map(|buffer| {
950 let buffer = buffer.upgrade(cx)?;
951 let file = File::from_dyn(buffer.read(cx).file())?;
952 if file.worktree == worktree && file.path() == &path.path {
953 Some(buffer)
954 } else {
955 None
956 }
957 })
958 }
959
960 fn register_buffer(
961 &mut self,
962 buffer: &ModelHandle<Buffer>,
963 cx: &mut ModelContext<Self>,
964 ) -> Result<()> {
965 let remote_id = buffer.read(cx).remote_id();
966 let open_buffer = if self.is_remote() || self.is_shared() {
967 OpenBuffer::Strong(buffer.clone())
968 } else {
969 OpenBuffer::Weak(buffer.downgrade())
970 };
971
972 match self.opened_buffers.insert(remote_id, open_buffer) {
973 None => {}
974 Some(OpenBuffer::Loading(operations)) => {
975 buffer.update(cx, |buffer, cx| buffer.apply_ops(operations, cx))?
976 }
977 Some(OpenBuffer::Weak(existing_handle)) => {
978 if existing_handle.upgrade(cx).is_some() {
979 Err(anyhow!(
980 "already registered buffer with remote id {}",
981 remote_id
982 ))?
983 }
984 }
985 Some(OpenBuffer::Strong(_)) => Err(anyhow!(
986 "already registered buffer with remote id {}",
987 remote_id
988 ))?,
989 }
990 cx.subscribe(buffer, |this, buffer, event, cx| {
991 this.on_buffer_event(buffer, event, cx);
992 })
993 .detach();
994
995 self.assign_language_to_buffer(buffer, cx);
996 self.register_buffer_with_language_server(buffer, cx);
997
998 Ok(())
999 }
1000
1001 fn register_buffer_with_language_server(
1002 &mut self,
1003 buffer_handle: &ModelHandle<Buffer>,
1004 cx: &mut ModelContext<Self>,
1005 ) {
1006 let buffer = buffer_handle.read(cx);
1007 let buffer_id = buffer.remote_id();
1008 if let Some(file) = File::from_dyn(buffer.file()) {
1009 if file.is_local() {
1010 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
1011 let initial_snapshot = buffer.text_snapshot();
1012 let language_server = self.language_server_for_buffer(buffer, cx).cloned();
1013
1014 if let Some(local_worktree) = file.worktree.read(cx).as_local() {
1015 if let Some(diagnostics) = local_worktree.diagnostics_for_path(file.path()) {
1016 self.update_buffer_diagnostics(&buffer_handle, diagnostics, None, cx)
1017 .log_err();
1018 }
1019 }
1020
1021 if let Some(server) = language_server {
1022 server
1023 .notify::<lsp::notification::DidOpenTextDocument>(
1024 lsp::DidOpenTextDocumentParams {
1025 text_document: lsp::TextDocumentItem::new(
1026 uri,
1027 Default::default(),
1028 0,
1029 initial_snapshot.text(),
1030 ),
1031 }
1032 .clone(),
1033 )
1034 .log_err();
1035 buffer_handle.update(cx, |buffer, cx| {
1036 buffer.set_completion_triggers(
1037 server
1038 .capabilities()
1039 .completion_provider
1040 .as_ref()
1041 .and_then(|provider| provider.trigger_characters.clone())
1042 .unwrap_or(Vec::new()),
1043 cx,
1044 )
1045 });
1046 self.buffer_snapshots
1047 .insert(buffer_id, vec![(0, initial_snapshot)]);
1048 }
1049
1050 cx.observe_release(buffer_handle, |this, buffer, cx| {
1051 if let Some(file) = File::from_dyn(buffer.file()) {
1052 if file.is_local() {
1053 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
1054 if let Some(server) = this.language_server_for_buffer(buffer, cx) {
1055 server
1056 .notify::<lsp::notification::DidCloseTextDocument>(
1057 lsp::DidCloseTextDocumentParams {
1058 text_document: lsp::TextDocumentIdentifier::new(
1059 uri.clone(),
1060 ),
1061 },
1062 )
1063 .log_err();
1064 }
1065 }
1066 }
1067 })
1068 .detach();
1069 }
1070 }
1071 }
1072
1073 fn on_buffer_event(
1074 &mut self,
1075 buffer: ModelHandle<Buffer>,
1076 event: &BufferEvent,
1077 cx: &mut ModelContext<Self>,
1078 ) -> Option<()> {
1079 match event {
1080 BufferEvent::Operation(operation) => {
1081 let project_id = self.remote_id()?;
1082 let request = self.client.request(proto::UpdateBuffer {
1083 project_id,
1084 buffer_id: buffer.read(cx).remote_id(),
1085 operations: vec![language::proto::serialize_operation(&operation)],
1086 });
1087 cx.background().spawn(request).detach_and_log_err(cx);
1088 }
1089 BufferEvent::Edited => {
1090 let language_server = self
1091 .language_server_for_buffer(buffer.read(cx), cx)?
1092 .clone();
1093 let buffer = buffer.read(cx);
1094 let file = File::from_dyn(buffer.file())?;
1095 let abs_path = file.as_local()?.abs_path(cx);
1096 let uri = lsp::Url::from_file_path(abs_path).unwrap();
1097 let buffer_snapshots = self.buffer_snapshots.entry(buffer.remote_id()).or_default();
1098 let (version, prev_snapshot) = buffer_snapshots.last()?;
1099 let next_snapshot = buffer.text_snapshot();
1100 let next_version = version + 1;
1101
1102 let content_changes = buffer
1103 .edits_since::<(PointUtf16, usize)>(prev_snapshot.version())
1104 .map(|edit| {
1105 let edit_start = edit.new.start.0;
1106 let edit_end = edit_start + (edit.old.end.0 - edit.old.start.0);
1107 let new_text = next_snapshot
1108 .text_for_range(edit.new.start.1..edit.new.end.1)
1109 .collect();
1110 lsp::TextDocumentContentChangeEvent {
1111 range: Some(lsp::Range::new(
1112 edit_start.to_lsp_position(),
1113 edit_end.to_lsp_position(),
1114 )),
1115 range_length: None,
1116 text: new_text,
1117 }
1118 })
1119 .collect();
1120
1121 buffer_snapshots.push((next_version, next_snapshot));
1122
1123 language_server
1124 .notify::<lsp::notification::DidChangeTextDocument>(
1125 lsp::DidChangeTextDocumentParams {
1126 text_document: lsp::VersionedTextDocumentIdentifier::new(
1127 uri,
1128 next_version,
1129 ),
1130 content_changes,
1131 },
1132 )
1133 .log_err();
1134 }
1135 BufferEvent::Saved => {
1136 let file = File::from_dyn(buffer.read(cx).file())?;
1137 let worktree_id = file.worktree_id(cx);
1138 let abs_path = file.as_local()?.abs_path(cx);
1139 let text_document = lsp::TextDocumentIdentifier {
1140 uri: lsp::Url::from_file_path(abs_path).unwrap(),
1141 };
1142
1143 for (_, server) in self.language_servers_for_worktree(worktree_id) {
1144 server
1145 .notify::<lsp::notification::DidSaveTextDocument>(
1146 lsp::DidSaveTextDocumentParams {
1147 text_document: text_document.clone(),
1148 text: None,
1149 },
1150 )
1151 .log_err();
1152 }
1153 }
1154 _ => {}
1155 }
1156
1157 None
1158 }
1159
1160 fn language_servers_for_worktree(
1161 &self,
1162 worktree_id: WorktreeId,
1163 ) -> impl Iterator<Item = (&str, &Arc<LanguageServer>)> {
1164 self.language_servers.iter().filter_map(
1165 move |((language_server_worktree_id, language_name), server)| {
1166 if *language_server_worktree_id == worktree_id {
1167 Some((language_name.as_ref(), server))
1168 } else {
1169 None
1170 }
1171 },
1172 )
1173 }
1174
1175 fn assign_language_to_buffer(
1176 &mut self,
1177 buffer: &ModelHandle<Buffer>,
1178 cx: &mut ModelContext<Self>,
1179 ) -> Option<()> {
1180 // If the buffer has a language, set it and start the language server if we haven't already.
1181 let full_path = buffer.read(cx).file()?.full_path(cx);
1182 let language = self.languages.select_language(&full_path)?;
1183 buffer.update(cx, |buffer, cx| {
1184 buffer.set_language(Some(language.clone()), cx);
1185 });
1186
1187 let file = File::from_dyn(buffer.read(cx).file())?;
1188 let worktree = file.worktree.read(cx).as_local()?;
1189 let worktree_id = worktree.id();
1190 let worktree_abs_path = worktree.abs_path().clone();
1191 self.start_language_server(worktree_id, worktree_abs_path, language, cx);
1192
1193 None
1194 }
1195
1196 fn start_language_server(
1197 &mut self,
1198 worktree_id: WorktreeId,
1199 worktree_path: Arc<Path>,
1200 language: Arc<Language>,
1201 cx: &mut ModelContext<Self>,
1202 ) {
1203 let key = (worktree_id, language.name());
1204 self.started_language_servers
1205 .entry(key.clone())
1206 .or_insert_with(|| {
1207 let server_id = post_inc(&mut self.next_language_server_id);
1208 let language_server = self.languages.start_language_server(
1209 language.clone(),
1210 worktree_path,
1211 self.client.http_client(),
1212 cx,
1213 );
1214 cx.spawn_weak(|this, mut cx| async move {
1215 let mut language_server = language_server?.await.log_err()?;
1216 let this = this.upgrade(&cx)?;
1217 let (language_server_events_tx, language_server_events_rx) =
1218 smol::channel::unbounded();
1219
1220 language_server
1221 .on_notification::<lsp::notification::PublishDiagnostics, _>({
1222 let language_server_events_tx = language_server_events_tx.clone();
1223 move |params| {
1224 language_server_events_tx
1225 .try_send(LanguageServerEvent::DiagnosticsUpdate(params))
1226 .ok();
1227 }
1228 })
1229 .detach();
1230
1231 language_server
1232 .on_notification::<lsp::notification::Progress, _>(move |params| {
1233 let token = match params.token {
1234 lsp::NumberOrString::String(token) => token,
1235 lsp::NumberOrString::Number(token) => {
1236 log::info!("skipping numeric progress token {}", token);
1237 return;
1238 }
1239 };
1240
1241 match params.value {
1242 lsp::ProgressParamsValue::WorkDone(progress) => match progress {
1243 lsp::WorkDoneProgress::Begin(_) => {
1244 language_server_events_tx
1245 .try_send(LanguageServerEvent::WorkStart { token })
1246 .ok();
1247 }
1248 lsp::WorkDoneProgress::Report(report) => {
1249 language_server_events_tx
1250 .try_send(LanguageServerEvent::WorkProgress {
1251 token,
1252 progress: LanguageServerProgress {
1253 message: report.message,
1254 percentage: report
1255 .percentage
1256 .map(|p| p as usize),
1257 last_update_at: Instant::now(),
1258 },
1259 })
1260 .ok();
1261 }
1262 lsp::WorkDoneProgress::End(_) => {
1263 language_server_events_tx
1264 .try_send(LanguageServerEvent::WorkEnd { token })
1265 .ok();
1266 }
1267 },
1268 }
1269 })
1270 .detach();
1271
1272 // Process all the LSP events.
1273 cx.spawn(|mut cx| {
1274 let this = this.downgrade();
1275 async move {
1276 while let Ok(event) = language_server_events_rx.recv().await {
1277 let this = this.upgrade(&cx)?;
1278 this.update(&mut cx, |this, cx| {
1279 this.on_lsp_event(server_id, event, &language, cx)
1280 });
1281
1282 // Don't starve the main thread when lots of events arrive all at once.
1283 smol::future::yield_now().await;
1284 }
1285 Some(())
1286 }
1287 })
1288 .detach();
1289
1290 let language_server = language_server.initialize().await.log_err()?;
1291 this.update(&mut cx, |this, cx| {
1292 this.language_servers
1293 .insert(key.clone(), language_server.clone());
1294 this.language_server_statuses.insert(
1295 server_id,
1296 LanguageServerStatus {
1297 name: language_server.name().to_string(),
1298 pending_work: Default::default(),
1299 pending_diagnostic_updates: 0,
1300 },
1301 );
1302
1303 if let Some(project_id) = this.remote_id() {
1304 this.client
1305 .send(proto::StartLanguageServer {
1306 project_id,
1307 server: Some(proto::LanguageServer {
1308 id: server_id as u64,
1309 name: language_server.name().to_string(),
1310 }),
1311 })
1312 .log_err();
1313 }
1314
1315 // Tell the language server about every open buffer in the worktree that matches the language.
1316 for buffer in this.opened_buffers.values() {
1317 if let Some(buffer_handle) = buffer.upgrade(cx) {
1318 let buffer = buffer_handle.read(cx);
1319 let file = if let Some(file) = File::from_dyn(buffer.file()) {
1320 file
1321 } else {
1322 continue;
1323 };
1324 let language = if let Some(language) = buffer.language() {
1325 language
1326 } else {
1327 continue;
1328 };
1329 if (file.worktree.read(cx).id(), language.name()) != key {
1330 continue;
1331 }
1332
1333 let file = file.as_local()?;
1334 let versions = this
1335 .buffer_snapshots
1336 .entry(buffer.remote_id())
1337 .or_insert_with(|| vec![(0, buffer.text_snapshot())]);
1338 let (version, initial_snapshot) = versions.last().unwrap();
1339 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
1340 language_server
1341 .notify::<lsp::notification::DidOpenTextDocument>(
1342 lsp::DidOpenTextDocumentParams {
1343 text_document: lsp::TextDocumentItem::new(
1344 uri,
1345 Default::default(),
1346 *version,
1347 initial_snapshot.text(),
1348 ),
1349 },
1350 )
1351 .log_err()?;
1352 buffer_handle.update(cx, |buffer, cx| {
1353 buffer.set_completion_triggers(
1354 language_server
1355 .capabilities()
1356 .completion_provider
1357 .as_ref()
1358 .and_then(|provider| {
1359 provider.trigger_characters.clone()
1360 })
1361 .unwrap_or(Vec::new()),
1362 cx,
1363 )
1364 });
1365 }
1366 }
1367
1368 cx.notify();
1369 Some(())
1370 });
1371
1372 Some(language_server)
1373 })
1374 });
1375 }
1376
1377 fn on_lsp_event(
1378 &mut self,
1379 language_server_id: usize,
1380 event: LanguageServerEvent,
1381 language: &Arc<Language>,
1382 cx: &mut ModelContext<Self>,
1383 ) {
1384 let disk_diagnostics_token = language.disk_based_diagnostics_progress_token();
1385 let language_server_status =
1386 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
1387 status
1388 } else {
1389 return;
1390 };
1391
1392 match event {
1393 LanguageServerEvent::WorkStart { token } => {
1394 if Some(&token) == disk_diagnostics_token {
1395 language_server_status.pending_diagnostic_updates += 1;
1396 if language_server_status.pending_diagnostic_updates == 1 {
1397 self.disk_based_diagnostics_started(cx);
1398 self.broadcast_language_server_update(
1399 language_server_id,
1400 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(
1401 proto::LspDiskBasedDiagnosticsUpdating {},
1402 ),
1403 );
1404 }
1405 } else {
1406 self.on_lsp_work_start(language_server_id, token.clone(), cx);
1407 self.broadcast_language_server_update(
1408 language_server_id,
1409 proto::update_language_server::Variant::WorkStart(proto::LspWorkStart {
1410 token,
1411 }),
1412 );
1413 }
1414 }
1415 LanguageServerEvent::WorkProgress { token, progress } => {
1416 if Some(&token) != disk_diagnostics_token {
1417 self.on_lsp_work_progress(
1418 language_server_id,
1419 token.clone(),
1420 progress.clone(),
1421 cx,
1422 );
1423 self.broadcast_language_server_update(
1424 language_server_id,
1425 proto::update_language_server::Variant::WorkProgress(
1426 proto::LspWorkProgress {
1427 token,
1428 message: progress.message,
1429 percentage: progress.percentage.map(|p| p as u32),
1430 },
1431 ),
1432 );
1433 }
1434 }
1435 LanguageServerEvent::WorkEnd { token } => {
1436 if Some(&token) == disk_diagnostics_token {
1437 language_server_status.pending_diagnostic_updates -= 1;
1438 if language_server_status.pending_diagnostic_updates == 0 {
1439 self.disk_based_diagnostics_finished(cx);
1440 self.broadcast_language_server_update(
1441 language_server_id,
1442 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(
1443 proto::LspDiskBasedDiagnosticsUpdated {},
1444 ),
1445 );
1446 }
1447 } else {
1448 self.on_lsp_work_end(language_server_id, token.clone(), cx);
1449 self.broadcast_language_server_update(
1450 language_server_id,
1451 proto::update_language_server::Variant::WorkEnd(proto::LspWorkEnd {
1452 token,
1453 }),
1454 );
1455 }
1456 }
1457 LanguageServerEvent::DiagnosticsUpdate(mut params) => {
1458 language.process_diagnostics(&mut params);
1459
1460 if disk_diagnostics_token.is_none() {
1461 self.disk_based_diagnostics_started(cx);
1462 self.broadcast_language_server_update(
1463 language_server_id,
1464 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(
1465 proto::LspDiskBasedDiagnosticsUpdating {},
1466 ),
1467 );
1468 }
1469 self.update_diagnostics(
1470 params,
1471 language
1472 .disk_based_diagnostic_sources()
1473 .unwrap_or(&Default::default()),
1474 cx,
1475 )
1476 .log_err();
1477 if disk_diagnostics_token.is_none() {
1478 self.disk_based_diagnostics_finished(cx);
1479 self.broadcast_language_server_update(
1480 language_server_id,
1481 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(
1482 proto::LspDiskBasedDiagnosticsUpdated {},
1483 ),
1484 );
1485 }
1486 }
1487 }
1488 }
1489
1490 fn on_lsp_work_start(
1491 &mut self,
1492 language_server_id: usize,
1493 token: String,
1494 cx: &mut ModelContext<Self>,
1495 ) {
1496 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
1497 status.pending_work.insert(
1498 token,
1499 LanguageServerProgress {
1500 message: None,
1501 percentage: None,
1502 last_update_at: Instant::now(),
1503 },
1504 );
1505 cx.notify();
1506 }
1507 }
1508
1509 fn on_lsp_work_progress(
1510 &mut self,
1511 language_server_id: usize,
1512 token: String,
1513 progress: LanguageServerProgress,
1514 cx: &mut ModelContext<Self>,
1515 ) {
1516 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
1517 status.pending_work.insert(token, progress);
1518 cx.notify();
1519 }
1520 }
1521
1522 fn on_lsp_work_end(
1523 &mut self,
1524 language_server_id: usize,
1525 token: String,
1526 cx: &mut ModelContext<Self>,
1527 ) {
1528 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
1529 status.pending_work.remove(&token);
1530 cx.notify();
1531 }
1532 }
1533
1534 fn broadcast_language_server_update(
1535 &self,
1536 language_server_id: usize,
1537 event: proto::update_language_server::Variant,
1538 ) {
1539 if let Some(project_id) = self.remote_id() {
1540 self.client
1541 .send(proto::UpdateLanguageServer {
1542 project_id,
1543 language_server_id: language_server_id as u64,
1544 variant: Some(event),
1545 })
1546 .log_err();
1547 }
1548 }
1549
1550 pub fn language_server_statuses(
1551 &self,
1552 ) -> impl DoubleEndedIterator<Item = &LanguageServerStatus> {
1553 self.language_server_statuses.values()
1554 }
1555
1556 pub fn update_diagnostics(
1557 &mut self,
1558 params: lsp::PublishDiagnosticsParams,
1559 disk_based_sources: &HashSet<String>,
1560 cx: &mut ModelContext<Self>,
1561 ) -> Result<()> {
1562 let abs_path = params
1563 .uri
1564 .to_file_path()
1565 .map_err(|_| anyhow!("URI is not a file"))?;
1566 let mut next_group_id = 0;
1567 let mut diagnostics = Vec::default();
1568 let mut primary_diagnostic_group_ids = HashMap::default();
1569 let mut sources_by_group_id = HashMap::default();
1570 let mut supporting_diagnostic_severities = HashMap::default();
1571 for diagnostic in ¶ms.diagnostics {
1572 let source = diagnostic.source.as_ref();
1573 let code = diagnostic.code.as_ref().map(|code| match code {
1574 lsp::NumberOrString::Number(code) => code.to_string(),
1575 lsp::NumberOrString::String(code) => code.clone(),
1576 });
1577 let range = range_from_lsp(diagnostic.range);
1578 let is_supporting = diagnostic
1579 .related_information
1580 .as_ref()
1581 .map_or(false, |infos| {
1582 infos.iter().any(|info| {
1583 primary_diagnostic_group_ids.contains_key(&(
1584 source,
1585 code.clone(),
1586 range_from_lsp(info.location.range),
1587 ))
1588 })
1589 });
1590
1591 if is_supporting {
1592 if let Some(severity) = diagnostic.severity {
1593 supporting_diagnostic_severities
1594 .insert((source, code.clone(), range), severity);
1595 }
1596 } else {
1597 let group_id = post_inc(&mut next_group_id);
1598 let is_disk_based =
1599 source.map_or(false, |source| disk_based_sources.contains(source));
1600
1601 sources_by_group_id.insert(group_id, source);
1602 primary_diagnostic_group_ids
1603 .insert((source, code.clone(), range.clone()), group_id);
1604
1605 diagnostics.push(DiagnosticEntry {
1606 range,
1607 diagnostic: Diagnostic {
1608 code: code.clone(),
1609 severity: diagnostic.severity.unwrap_or(DiagnosticSeverity::ERROR),
1610 message: diagnostic.message.clone(),
1611 group_id,
1612 is_primary: true,
1613 is_valid: true,
1614 is_disk_based,
1615 },
1616 });
1617 if let Some(infos) = &diagnostic.related_information {
1618 for info in infos {
1619 if info.location.uri == params.uri && !info.message.is_empty() {
1620 let range = range_from_lsp(info.location.range);
1621 diagnostics.push(DiagnosticEntry {
1622 range,
1623 diagnostic: Diagnostic {
1624 code: code.clone(),
1625 severity: DiagnosticSeverity::INFORMATION,
1626 message: info.message.clone(),
1627 group_id,
1628 is_primary: false,
1629 is_valid: true,
1630 is_disk_based,
1631 },
1632 });
1633 }
1634 }
1635 }
1636 }
1637 }
1638
1639 for entry in &mut diagnostics {
1640 let diagnostic = &mut entry.diagnostic;
1641 if !diagnostic.is_primary {
1642 let source = *sources_by_group_id.get(&diagnostic.group_id).unwrap();
1643 if let Some(&severity) = supporting_diagnostic_severities.get(&(
1644 source,
1645 diagnostic.code.clone(),
1646 entry.range.clone(),
1647 )) {
1648 diagnostic.severity = severity;
1649 }
1650 }
1651 }
1652
1653 self.update_diagnostic_entries(abs_path, params.version, diagnostics, cx)?;
1654 Ok(())
1655 }
1656
1657 pub fn update_diagnostic_entries(
1658 &mut self,
1659 abs_path: PathBuf,
1660 version: Option<i32>,
1661 diagnostics: Vec<DiagnosticEntry<PointUtf16>>,
1662 cx: &mut ModelContext<Project>,
1663 ) -> Result<(), anyhow::Error> {
1664 let (worktree, relative_path) = self
1665 .find_local_worktree(&abs_path, cx)
1666 .ok_or_else(|| anyhow!("no worktree found for diagnostics"))?;
1667 if !worktree.read(cx).is_visible() {
1668 return Ok(());
1669 }
1670
1671 let project_path = ProjectPath {
1672 worktree_id: worktree.read(cx).id(),
1673 path: relative_path.into(),
1674 };
1675
1676 for buffer in self.opened_buffers.values() {
1677 if let Some(buffer) = buffer.upgrade(cx) {
1678 if buffer
1679 .read(cx)
1680 .file()
1681 .map_or(false, |file| *file.path() == project_path.path)
1682 {
1683 self.update_buffer_diagnostics(&buffer, diagnostics.clone(), version, cx)?;
1684 break;
1685 }
1686 }
1687 }
1688 worktree.update(cx, |worktree, cx| {
1689 worktree
1690 .as_local_mut()
1691 .ok_or_else(|| anyhow!("not a local worktree"))?
1692 .update_diagnostics(project_path.path.clone(), diagnostics, cx)
1693 })?;
1694 cx.emit(Event::DiagnosticsUpdated(project_path));
1695 Ok(())
1696 }
1697
1698 fn update_buffer_diagnostics(
1699 &mut self,
1700 buffer: &ModelHandle<Buffer>,
1701 mut diagnostics: Vec<DiagnosticEntry<PointUtf16>>,
1702 version: Option<i32>,
1703 cx: &mut ModelContext<Self>,
1704 ) -> Result<()> {
1705 fn compare_diagnostics(a: &Diagnostic, b: &Diagnostic) -> Ordering {
1706 Ordering::Equal
1707 .then_with(|| b.is_primary.cmp(&a.is_primary))
1708 .then_with(|| a.is_disk_based.cmp(&b.is_disk_based))
1709 .then_with(|| a.severity.cmp(&b.severity))
1710 .then_with(|| a.message.cmp(&b.message))
1711 }
1712
1713 let snapshot = self.buffer_snapshot_for_lsp_version(buffer, version, cx)?;
1714
1715 diagnostics.sort_unstable_by(|a, b| {
1716 Ordering::Equal
1717 .then_with(|| a.range.start.cmp(&b.range.start))
1718 .then_with(|| b.range.end.cmp(&a.range.end))
1719 .then_with(|| compare_diagnostics(&a.diagnostic, &b.diagnostic))
1720 });
1721
1722 let mut sanitized_diagnostics = Vec::new();
1723 let mut edits_since_save = snapshot
1724 .edits_since::<PointUtf16>(buffer.read(cx).saved_version())
1725 .peekable();
1726 let mut last_edit_old_end = PointUtf16::zero();
1727 let mut last_edit_new_end = PointUtf16::zero();
1728 'outer: for entry in diagnostics {
1729 let mut start = entry.range.start;
1730 let mut end = entry.range.end;
1731
1732 // Some diagnostics are based on files on disk instead of buffers'
1733 // current contents. Adjust these diagnostics' ranges to reflect
1734 // any unsaved edits.
1735 if entry.diagnostic.is_disk_based {
1736 while let Some(edit) = edits_since_save.peek() {
1737 if edit.old.end <= start {
1738 last_edit_old_end = edit.old.end;
1739 last_edit_new_end = edit.new.end;
1740 edits_since_save.next();
1741 } else if edit.old.start <= end && edit.old.end >= start {
1742 continue 'outer;
1743 } else {
1744 break;
1745 }
1746 }
1747
1748 let start_overshoot = start - last_edit_old_end;
1749 start = last_edit_new_end;
1750 start += start_overshoot;
1751
1752 let end_overshoot = end - last_edit_old_end;
1753 end = last_edit_new_end;
1754 end += end_overshoot;
1755 }
1756
1757 let mut range = snapshot.clip_point_utf16(start, Bias::Left)
1758 ..snapshot.clip_point_utf16(end, Bias::Right);
1759
1760 // Expand empty ranges by one character
1761 if range.start == range.end {
1762 range.end.column += 1;
1763 range.end = snapshot.clip_point_utf16(range.end, Bias::Right);
1764 if range.start == range.end && range.end.column > 0 {
1765 range.start.column -= 1;
1766 range.start = snapshot.clip_point_utf16(range.start, Bias::Left);
1767 }
1768 }
1769
1770 sanitized_diagnostics.push(DiagnosticEntry {
1771 range,
1772 diagnostic: entry.diagnostic,
1773 });
1774 }
1775 drop(edits_since_save);
1776
1777 let set = DiagnosticSet::new(sanitized_diagnostics, &snapshot);
1778 buffer.update(cx, |buffer, cx| buffer.update_diagnostics(set, cx));
1779 Ok(())
1780 }
1781
1782 pub fn format(
1783 &self,
1784 buffers: HashSet<ModelHandle<Buffer>>,
1785 push_to_history: bool,
1786 cx: &mut ModelContext<Project>,
1787 ) -> Task<Result<ProjectTransaction>> {
1788 let mut local_buffers = Vec::new();
1789 let mut remote_buffers = None;
1790 for buffer_handle in buffers {
1791 let buffer = buffer_handle.read(cx);
1792 if let Some(file) = File::from_dyn(buffer.file()) {
1793 if let Some(buffer_abs_path) = file.as_local().map(|f| f.abs_path(cx)) {
1794 if let Some(server) = self.language_server_for_buffer(buffer, cx) {
1795 local_buffers.push((buffer_handle, buffer_abs_path, server.clone()));
1796 }
1797 } else {
1798 remote_buffers.get_or_insert(Vec::new()).push(buffer_handle);
1799 }
1800 } else {
1801 return Task::ready(Ok(Default::default()));
1802 }
1803 }
1804
1805 let remote_buffers = self.remote_id().zip(remote_buffers);
1806 let client = self.client.clone();
1807
1808 cx.spawn(|this, mut cx| async move {
1809 let mut project_transaction = ProjectTransaction::default();
1810
1811 if let Some((project_id, remote_buffers)) = remote_buffers {
1812 let response = client
1813 .request(proto::FormatBuffers {
1814 project_id,
1815 buffer_ids: remote_buffers
1816 .iter()
1817 .map(|buffer| buffer.read_with(&cx, |buffer, _| buffer.remote_id()))
1818 .collect(),
1819 })
1820 .await?
1821 .transaction
1822 .ok_or_else(|| anyhow!("missing transaction"))?;
1823 project_transaction = this
1824 .update(&mut cx, |this, cx| {
1825 this.deserialize_project_transaction(response, push_to_history, cx)
1826 })
1827 .await?;
1828 }
1829
1830 for (buffer, buffer_abs_path, language_server) in local_buffers {
1831 let text_document = lsp::TextDocumentIdentifier::new(
1832 lsp::Url::from_file_path(&buffer_abs_path).unwrap(),
1833 );
1834 let capabilities = &language_server.capabilities();
1835 let lsp_edits = if capabilities
1836 .document_formatting_provider
1837 .as_ref()
1838 .map_or(false, |provider| *provider != lsp::OneOf::Left(false))
1839 {
1840 language_server
1841 .request::<lsp::request::Formatting>(lsp::DocumentFormattingParams {
1842 text_document,
1843 options: Default::default(),
1844 work_done_progress_params: Default::default(),
1845 })
1846 .await?
1847 } else if capabilities
1848 .document_range_formatting_provider
1849 .as_ref()
1850 .map_or(false, |provider| *provider != lsp::OneOf::Left(false))
1851 {
1852 let buffer_start = lsp::Position::new(0, 0);
1853 let buffer_end = buffer
1854 .read_with(&cx, |buffer, _| buffer.max_point_utf16())
1855 .to_lsp_position();
1856 language_server
1857 .request::<lsp::request::RangeFormatting>(
1858 lsp::DocumentRangeFormattingParams {
1859 text_document,
1860 range: lsp::Range::new(buffer_start, buffer_end),
1861 options: Default::default(),
1862 work_done_progress_params: Default::default(),
1863 },
1864 )
1865 .await?
1866 } else {
1867 continue;
1868 };
1869
1870 if let Some(lsp_edits) = lsp_edits {
1871 let edits = this
1872 .update(&mut cx, |this, cx| {
1873 this.edits_from_lsp(&buffer, lsp_edits, None, cx)
1874 })
1875 .await?;
1876 buffer.update(&mut cx, |buffer, cx| {
1877 buffer.finalize_last_transaction();
1878 buffer.start_transaction();
1879 for (range, text) in edits {
1880 buffer.edit([range], text, cx);
1881 }
1882 if buffer.end_transaction(cx).is_some() {
1883 let transaction = buffer.finalize_last_transaction().unwrap().clone();
1884 if !push_to_history {
1885 buffer.forget_transaction(transaction.id);
1886 }
1887 project_transaction.0.insert(cx.handle(), transaction);
1888 }
1889 });
1890 }
1891 }
1892
1893 Ok(project_transaction)
1894 })
1895 }
1896
1897 pub fn definition<T: ToPointUtf16>(
1898 &self,
1899 buffer: &ModelHandle<Buffer>,
1900 position: T,
1901 cx: &mut ModelContext<Self>,
1902 ) -> Task<Result<Vec<Location>>> {
1903 let position = position.to_point_utf16(buffer.read(cx));
1904 self.request_lsp(buffer.clone(), GetDefinition { position }, cx)
1905 }
1906
1907 pub fn references<T: ToPointUtf16>(
1908 &self,
1909 buffer: &ModelHandle<Buffer>,
1910 position: T,
1911 cx: &mut ModelContext<Self>,
1912 ) -> Task<Result<Vec<Location>>> {
1913 let position = position.to_point_utf16(buffer.read(cx));
1914 self.request_lsp(buffer.clone(), GetReferences { position }, cx)
1915 }
1916
1917 pub fn document_highlights<T: ToPointUtf16>(
1918 &self,
1919 buffer: &ModelHandle<Buffer>,
1920 position: T,
1921 cx: &mut ModelContext<Self>,
1922 ) -> Task<Result<Vec<DocumentHighlight>>> {
1923 let position = position.to_point_utf16(buffer.read(cx));
1924
1925 self.request_lsp(buffer.clone(), GetDocumentHighlights { position }, cx)
1926 }
1927
1928 pub fn symbols(&self, query: &str, cx: &mut ModelContext<Self>) -> Task<Result<Vec<Symbol>>> {
1929 if self.is_local() {
1930 let mut language_servers = HashMap::default();
1931 for ((worktree_id, language_name), language_server) in self.language_servers.iter() {
1932 if let Some((worktree, language)) = self
1933 .worktree_for_id(*worktree_id, cx)
1934 .and_then(|worktree| worktree.read(cx).as_local())
1935 .zip(self.languages.get_language(language_name))
1936 {
1937 language_servers
1938 .entry(Arc::as_ptr(language_server))
1939 .or_insert((
1940 language_server.clone(),
1941 *worktree_id,
1942 worktree.abs_path().clone(),
1943 language.clone(),
1944 ));
1945 }
1946 }
1947
1948 let mut requests = Vec::new();
1949 for (language_server, _, _, _) in language_servers.values() {
1950 requests.push(language_server.request::<lsp::request::WorkspaceSymbol>(
1951 lsp::WorkspaceSymbolParams {
1952 query: query.to_string(),
1953 ..Default::default()
1954 },
1955 ));
1956 }
1957
1958 cx.spawn_weak(|this, cx| async move {
1959 let responses = futures::future::try_join_all(requests).await?;
1960
1961 let mut symbols = Vec::new();
1962 if let Some(this) = this.upgrade(&cx) {
1963 this.read_with(&cx, |this, cx| {
1964 for ((_, source_worktree_id, worktree_abs_path, language), lsp_symbols) in
1965 language_servers.into_values().zip(responses)
1966 {
1967 symbols.extend(lsp_symbols.into_iter().flatten().filter_map(
1968 |lsp_symbol| {
1969 let abs_path = lsp_symbol.location.uri.to_file_path().ok()?;
1970 let mut worktree_id = source_worktree_id;
1971 let path;
1972 if let Some((worktree, rel_path)) =
1973 this.find_local_worktree(&abs_path, cx)
1974 {
1975 worktree_id = worktree.read(cx).id();
1976 path = rel_path;
1977 } else {
1978 path = relativize_path(&worktree_abs_path, &abs_path);
1979 }
1980
1981 let label = language
1982 .label_for_symbol(&lsp_symbol.name, lsp_symbol.kind)
1983 .unwrap_or_else(|| {
1984 CodeLabel::plain(lsp_symbol.name.clone(), None)
1985 });
1986 let signature = this.symbol_signature(worktree_id, &path);
1987
1988 Some(Symbol {
1989 source_worktree_id,
1990 worktree_id,
1991 language_name: language.name().to_string(),
1992 name: lsp_symbol.name,
1993 kind: lsp_symbol.kind,
1994 label,
1995 path,
1996 range: range_from_lsp(lsp_symbol.location.range),
1997 signature,
1998 })
1999 },
2000 ));
2001 }
2002 })
2003 }
2004
2005 Ok(symbols)
2006 })
2007 } else if let Some(project_id) = self.remote_id() {
2008 let request = self.client.request(proto::GetProjectSymbols {
2009 project_id,
2010 query: query.to_string(),
2011 });
2012 cx.spawn_weak(|this, cx| async move {
2013 let response = request.await?;
2014 let mut symbols = Vec::new();
2015 if let Some(this) = this.upgrade(&cx) {
2016 this.read_with(&cx, |this, _| {
2017 symbols.extend(
2018 response
2019 .symbols
2020 .into_iter()
2021 .filter_map(|symbol| this.deserialize_symbol(symbol).log_err()),
2022 );
2023 })
2024 }
2025 Ok(symbols)
2026 })
2027 } else {
2028 Task::ready(Ok(Default::default()))
2029 }
2030 }
2031
2032 pub fn open_buffer_for_symbol(
2033 &mut self,
2034 symbol: &Symbol,
2035 cx: &mut ModelContext<Self>,
2036 ) -> Task<Result<ModelHandle<Buffer>>> {
2037 if self.is_local() {
2038 let language_server = if let Some(server) = self.language_servers.get(&(
2039 symbol.source_worktree_id,
2040 Arc::from(symbol.language_name.as_str()),
2041 )) {
2042 server.clone()
2043 } else {
2044 return Task::ready(Err(anyhow!(
2045 "language server for worktree and language not found"
2046 )));
2047 };
2048
2049 let worktree_abs_path = if let Some(worktree_abs_path) = self
2050 .worktree_for_id(symbol.worktree_id, cx)
2051 .and_then(|worktree| worktree.read(cx).as_local())
2052 .map(|local_worktree| local_worktree.abs_path())
2053 {
2054 worktree_abs_path
2055 } else {
2056 return Task::ready(Err(anyhow!("worktree not found for symbol")));
2057 };
2058 let symbol_abs_path = worktree_abs_path.join(&symbol.path);
2059 let symbol_uri = if let Ok(uri) = lsp::Url::from_file_path(symbol_abs_path) {
2060 uri
2061 } else {
2062 return Task::ready(Err(anyhow!("invalid symbol path")));
2063 };
2064
2065 self.open_local_buffer_via_lsp(
2066 symbol_uri,
2067 Arc::from(symbol.language_name.as_str()),
2068 language_server,
2069 cx,
2070 )
2071 } else if let Some(project_id) = self.remote_id() {
2072 let request = self.client.request(proto::OpenBufferForSymbol {
2073 project_id,
2074 symbol: Some(serialize_symbol(symbol)),
2075 });
2076 cx.spawn(|this, mut cx| async move {
2077 let response = request.await?;
2078 let buffer = response.buffer.ok_or_else(|| anyhow!("invalid buffer"))?;
2079 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
2080 .await
2081 })
2082 } else {
2083 Task::ready(Err(anyhow!("project does not have a remote id")))
2084 }
2085 }
2086
2087 pub fn completions<T: ToPointUtf16>(
2088 &self,
2089 source_buffer_handle: &ModelHandle<Buffer>,
2090 position: T,
2091 cx: &mut ModelContext<Self>,
2092 ) -> Task<Result<Vec<Completion>>> {
2093 let source_buffer_handle = source_buffer_handle.clone();
2094 let source_buffer = source_buffer_handle.read(cx);
2095 let buffer_id = source_buffer.remote_id();
2096 let language = source_buffer.language().cloned();
2097 let worktree;
2098 let buffer_abs_path;
2099 if let Some(file) = File::from_dyn(source_buffer.file()) {
2100 worktree = file.worktree.clone();
2101 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
2102 } else {
2103 return Task::ready(Ok(Default::default()));
2104 };
2105
2106 let position = position.to_point_utf16(source_buffer);
2107 let anchor = source_buffer.anchor_after(position);
2108
2109 if worktree.read(cx).as_local().is_some() {
2110 let buffer_abs_path = buffer_abs_path.unwrap();
2111 let lang_server =
2112 if let Some(server) = self.language_server_for_buffer(source_buffer, cx) {
2113 server.clone()
2114 } else {
2115 return Task::ready(Ok(Default::default()));
2116 };
2117
2118 cx.spawn(|_, cx| async move {
2119 let completions = lang_server
2120 .request::<lsp::request::Completion>(lsp::CompletionParams {
2121 text_document_position: lsp::TextDocumentPositionParams::new(
2122 lsp::TextDocumentIdentifier::new(
2123 lsp::Url::from_file_path(buffer_abs_path).unwrap(),
2124 ),
2125 position.to_lsp_position(),
2126 ),
2127 context: Default::default(),
2128 work_done_progress_params: Default::default(),
2129 partial_result_params: Default::default(),
2130 })
2131 .await
2132 .context("lsp completion request failed")?;
2133
2134 let completions = if let Some(completions) = completions {
2135 match completions {
2136 lsp::CompletionResponse::Array(completions) => completions,
2137 lsp::CompletionResponse::List(list) => list.items,
2138 }
2139 } else {
2140 Default::default()
2141 };
2142
2143 source_buffer_handle.read_with(&cx, |this, _| {
2144 Ok(completions
2145 .into_iter()
2146 .filter_map(|lsp_completion| {
2147 let (old_range, new_text) = match lsp_completion.text_edit.as_ref()? {
2148 lsp::CompletionTextEdit::Edit(edit) => {
2149 (range_from_lsp(edit.range), edit.new_text.clone())
2150 }
2151 lsp::CompletionTextEdit::InsertAndReplace(_) => {
2152 log::info!("unsupported insert/replace completion");
2153 return None;
2154 }
2155 };
2156
2157 let clipped_start = this.clip_point_utf16(old_range.start, Bias::Left);
2158 let clipped_end = this.clip_point_utf16(old_range.end, Bias::Left);
2159 if clipped_start == old_range.start && clipped_end == old_range.end {
2160 Some(Completion {
2161 old_range: this.anchor_before(old_range.start)
2162 ..this.anchor_after(old_range.end),
2163 new_text,
2164 label: language
2165 .as_ref()
2166 .and_then(|l| l.label_for_completion(&lsp_completion))
2167 .unwrap_or_else(|| {
2168 CodeLabel::plain(
2169 lsp_completion.label.clone(),
2170 lsp_completion.filter_text.as_deref(),
2171 )
2172 }),
2173 lsp_completion,
2174 })
2175 } else {
2176 None
2177 }
2178 })
2179 .collect())
2180 })
2181 })
2182 } else if let Some(project_id) = self.remote_id() {
2183 let rpc = self.client.clone();
2184 let message = proto::GetCompletions {
2185 project_id,
2186 buffer_id,
2187 position: Some(language::proto::serialize_anchor(&anchor)),
2188 version: serialize_version(&source_buffer.version()),
2189 };
2190 cx.spawn_weak(|_, mut cx| async move {
2191 let response = rpc.request(message).await?;
2192
2193 source_buffer_handle
2194 .update(&mut cx, |buffer, _| {
2195 buffer.wait_for_version(deserialize_version(response.version))
2196 })
2197 .await;
2198
2199 response
2200 .completions
2201 .into_iter()
2202 .map(|completion| {
2203 language::proto::deserialize_completion(completion, language.as_ref())
2204 })
2205 .collect()
2206 })
2207 } else {
2208 Task::ready(Ok(Default::default()))
2209 }
2210 }
2211
2212 pub fn apply_additional_edits_for_completion(
2213 &self,
2214 buffer_handle: ModelHandle<Buffer>,
2215 completion: Completion,
2216 push_to_history: bool,
2217 cx: &mut ModelContext<Self>,
2218 ) -> Task<Result<Option<Transaction>>> {
2219 let buffer = buffer_handle.read(cx);
2220 let buffer_id = buffer.remote_id();
2221
2222 if self.is_local() {
2223 let lang_server = if let Some(server) = self.language_server_for_buffer(buffer, cx) {
2224 server.clone()
2225 } else {
2226 return Task::ready(Ok(Default::default()));
2227 };
2228
2229 cx.spawn(|this, mut cx| async move {
2230 let resolved_completion = lang_server
2231 .request::<lsp::request::ResolveCompletionItem>(completion.lsp_completion)
2232 .await?;
2233 if let Some(edits) = resolved_completion.additional_text_edits {
2234 let edits = this
2235 .update(&mut cx, |this, cx| {
2236 this.edits_from_lsp(&buffer_handle, edits, None, cx)
2237 })
2238 .await?;
2239 buffer_handle.update(&mut cx, |buffer, cx| {
2240 buffer.finalize_last_transaction();
2241 buffer.start_transaction();
2242 for (range, text) in edits {
2243 buffer.edit([range], text, cx);
2244 }
2245 let transaction = if buffer.end_transaction(cx).is_some() {
2246 let transaction = buffer.finalize_last_transaction().unwrap().clone();
2247 if !push_to_history {
2248 buffer.forget_transaction(transaction.id);
2249 }
2250 Some(transaction)
2251 } else {
2252 None
2253 };
2254 Ok(transaction)
2255 })
2256 } else {
2257 Ok(None)
2258 }
2259 })
2260 } else if let Some(project_id) = self.remote_id() {
2261 let client = self.client.clone();
2262 cx.spawn(|_, mut cx| async move {
2263 let response = client
2264 .request(proto::ApplyCompletionAdditionalEdits {
2265 project_id,
2266 buffer_id,
2267 completion: Some(language::proto::serialize_completion(&completion)),
2268 })
2269 .await?;
2270
2271 if let Some(transaction) = response.transaction {
2272 let transaction = language::proto::deserialize_transaction(transaction)?;
2273 buffer_handle
2274 .update(&mut cx, |buffer, _| {
2275 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
2276 })
2277 .await;
2278 if push_to_history {
2279 buffer_handle.update(&mut cx, |buffer, _| {
2280 buffer.push_transaction(transaction.clone(), Instant::now());
2281 });
2282 }
2283 Ok(Some(transaction))
2284 } else {
2285 Ok(None)
2286 }
2287 })
2288 } else {
2289 Task::ready(Err(anyhow!("project does not have a remote id")))
2290 }
2291 }
2292
2293 pub fn code_actions<T: ToOffset>(
2294 &self,
2295 buffer_handle: &ModelHandle<Buffer>,
2296 range: Range<T>,
2297 cx: &mut ModelContext<Self>,
2298 ) -> Task<Result<Vec<CodeAction>>> {
2299 let buffer_handle = buffer_handle.clone();
2300 let buffer = buffer_handle.read(cx);
2301 let buffer_id = buffer.remote_id();
2302 let worktree;
2303 let buffer_abs_path;
2304 if let Some(file) = File::from_dyn(buffer.file()) {
2305 worktree = file.worktree.clone();
2306 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
2307 } else {
2308 return Task::ready(Ok(Default::default()));
2309 };
2310 let range = buffer.anchor_before(range.start)..buffer.anchor_before(range.end);
2311
2312 if worktree.read(cx).as_local().is_some() {
2313 let buffer_abs_path = buffer_abs_path.unwrap();
2314 let lang_server = if let Some(server) = self.language_server_for_buffer(buffer, cx) {
2315 server.clone()
2316 } else {
2317 return Task::ready(Ok(Default::default()));
2318 };
2319
2320 let lsp_range = lsp::Range::new(
2321 range.start.to_point_utf16(buffer).to_lsp_position(),
2322 range.end.to_point_utf16(buffer).to_lsp_position(),
2323 );
2324 cx.foreground().spawn(async move {
2325 if !lang_server.capabilities().code_action_provider.is_some() {
2326 return Ok(Default::default());
2327 }
2328
2329 Ok(lang_server
2330 .request::<lsp::request::CodeActionRequest>(lsp::CodeActionParams {
2331 text_document: lsp::TextDocumentIdentifier::new(
2332 lsp::Url::from_file_path(buffer_abs_path).unwrap(),
2333 ),
2334 range: lsp_range,
2335 work_done_progress_params: Default::default(),
2336 partial_result_params: Default::default(),
2337 context: lsp::CodeActionContext {
2338 diagnostics: Default::default(),
2339 only: Some(vec![
2340 lsp::CodeActionKind::QUICKFIX,
2341 lsp::CodeActionKind::REFACTOR,
2342 lsp::CodeActionKind::REFACTOR_EXTRACT,
2343 ]),
2344 },
2345 })
2346 .await?
2347 .unwrap_or_default()
2348 .into_iter()
2349 .filter_map(|entry| {
2350 if let lsp::CodeActionOrCommand::CodeAction(lsp_action) = entry {
2351 Some(CodeAction {
2352 range: range.clone(),
2353 lsp_action,
2354 })
2355 } else {
2356 None
2357 }
2358 })
2359 .collect())
2360 })
2361 } else if let Some(project_id) = self.remote_id() {
2362 let rpc = self.client.clone();
2363 let version = buffer.version();
2364 cx.spawn_weak(|_, mut cx| async move {
2365 let response = rpc
2366 .request(proto::GetCodeActions {
2367 project_id,
2368 buffer_id,
2369 start: Some(language::proto::serialize_anchor(&range.start)),
2370 end: Some(language::proto::serialize_anchor(&range.end)),
2371 version: serialize_version(&version),
2372 })
2373 .await?;
2374
2375 buffer_handle
2376 .update(&mut cx, |buffer, _| {
2377 buffer.wait_for_version(deserialize_version(response.version))
2378 })
2379 .await;
2380
2381 response
2382 .actions
2383 .into_iter()
2384 .map(language::proto::deserialize_code_action)
2385 .collect()
2386 })
2387 } else {
2388 Task::ready(Ok(Default::default()))
2389 }
2390 }
2391
2392 pub fn apply_code_action(
2393 &self,
2394 buffer_handle: ModelHandle<Buffer>,
2395 mut action: CodeAction,
2396 push_to_history: bool,
2397 cx: &mut ModelContext<Self>,
2398 ) -> Task<Result<ProjectTransaction>> {
2399 if self.is_local() {
2400 let buffer = buffer_handle.read(cx);
2401 let lang_name = if let Some(lang) = buffer.language() {
2402 lang.name()
2403 } else {
2404 return Task::ready(Ok(Default::default()));
2405 };
2406 let lang_server = if let Some(server) = self.language_server_for_buffer(buffer, cx) {
2407 server.clone()
2408 } else {
2409 return Task::ready(Ok(Default::default()));
2410 };
2411 let range = action.range.to_point_utf16(buffer);
2412
2413 cx.spawn(|this, mut cx| async move {
2414 if let Some(lsp_range) = action
2415 .lsp_action
2416 .data
2417 .as_mut()
2418 .and_then(|d| d.get_mut("codeActionParams"))
2419 .and_then(|d| d.get_mut("range"))
2420 {
2421 *lsp_range = serde_json::to_value(&lsp::Range::new(
2422 range.start.to_lsp_position(),
2423 range.end.to_lsp_position(),
2424 ))
2425 .unwrap();
2426 action.lsp_action = lang_server
2427 .request::<lsp::request::CodeActionResolveRequest>(action.lsp_action)
2428 .await?;
2429 } else {
2430 let actions = this
2431 .update(&mut cx, |this, cx| {
2432 this.code_actions(&buffer_handle, action.range, cx)
2433 })
2434 .await?;
2435 action.lsp_action = actions
2436 .into_iter()
2437 .find(|a| a.lsp_action.title == action.lsp_action.title)
2438 .ok_or_else(|| anyhow!("code action is outdated"))?
2439 .lsp_action;
2440 }
2441
2442 if let Some(edit) = action.lsp_action.edit {
2443 Self::deserialize_workspace_edit(
2444 this,
2445 edit,
2446 push_to_history,
2447 lang_name,
2448 lang_server,
2449 &mut cx,
2450 )
2451 .await
2452 } else {
2453 Ok(ProjectTransaction::default())
2454 }
2455 })
2456 } else if let Some(project_id) = self.remote_id() {
2457 let client = self.client.clone();
2458 let request = proto::ApplyCodeAction {
2459 project_id,
2460 buffer_id: buffer_handle.read(cx).remote_id(),
2461 action: Some(language::proto::serialize_code_action(&action)),
2462 };
2463 cx.spawn(|this, mut cx| async move {
2464 let response = client
2465 .request(request)
2466 .await?
2467 .transaction
2468 .ok_or_else(|| anyhow!("missing transaction"))?;
2469 this.update(&mut cx, |this, cx| {
2470 this.deserialize_project_transaction(response, push_to_history, cx)
2471 })
2472 .await
2473 })
2474 } else {
2475 Task::ready(Err(anyhow!("project does not have a remote id")))
2476 }
2477 }
2478
2479 async fn deserialize_workspace_edit(
2480 this: ModelHandle<Self>,
2481 edit: lsp::WorkspaceEdit,
2482 push_to_history: bool,
2483 language_name: Arc<str>,
2484 language_server: Arc<LanguageServer>,
2485 cx: &mut AsyncAppContext,
2486 ) -> Result<ProjectTransaction> {
2487 let fs = this.read_with(cx, |this, _| this.fs.clone());
2488 let mut operations = Vec::new();
2489 if let Some(document_changes) = edit.document_changes {
2490 match document_changes {
2491 lsp::DocumentChanges::Edits(edits) => {
2492 operations.extend(edits.into_iter().map(lsp::DocumentChangeOperation::Edit))
2493 }
2494 lsp::DocumentChanges::Operations(ops) => operations = ops,
2495 }
2496 } else if let Some(changes) = edit.changes {
2497 operations.extend(changes.into_iter().map(|(uri, edits)| {
2498 lsp::DocumentChangeOperation::Edit(lsp::TextDocumentEdit {
2499 text_document: lsp::OptionalVersionedTextDocumentIdentifier {
2500 uri,
2501 version: None,
2502 },
2503 edits: edits.into_iter().map(lsp::OneOf::Left).collect(),
2504 })
2505 }));
2506 }
2507
2508 let mut project_transaction = ProjectTransaction::default();
2509 for operation in operations {
2510 match operation {
2511 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Create(op)) => {
2512 let abs_path = op
2513 .uri
2514 .to_file_path()
2515 .map_err(|_| anyhow!("can't convert URI to path"))?;
2516
2517 if let Some(parent_path) = abs_path.parent() {
2518 fs.create_dir(parent_path).await?;
2519 }
2520 if abs_path.ends_with("/") {
2521 fs.create_dir(&abs_path).await?;
2522 } else {
2523 fs.create_file(&abs_path, op.options.map(Into::into).unwrap_or_default())
2524 .await?;
2525 }
2526 }
2527 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Rename(op)) => {
2528 let source_abs_path = op
2529 .old_uri
2530 .to_file_path()
2531 .map_err(|_| anyhow!("can't convert URI to path"))?;
2532 let target_abs_path = op
2533 .new_uri
2534 .to_file_path()
2535 .map_err(|_| anyhow!("can't convert URI to path"))?;
2536 fs.rename(
2537 &source_abs_path,
2538 &target_abs_path,
2539 op.options.map(Into::into).unwrap_or_default(),
2540 )
2541 .await?;
2542 }
2543 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Delete(op)) => {
2544 let abs_path = op
2545 .uri
2546 .to_file_path()
2547 .map_err(|_| anyhow!("can't convert URI to path"))?;
2548 let options = op.options.map(Into::into).unwrap_or_default();
2549 if abs_path.ends_with("/") {
2550 fs.remove_dir(&abs_path, options).await?;
2551 } else {
2552 fs.remove_file(&abs_path, options).await?;
2553 }
2554 }
2555 lsp::DocumentChangeOperation::Edit(op) => {
2556 let buffer_to_edit = this
2557 .update(cx, |this, cx| {
2558 this.open_local_buffer_via_lsp(
2559 op.text_document.uri,
2560 language_name.clone(),
2561 language_server.clone(),
2562 cx,
2563 )
2564 })
2565 .await?;
2566
2567 let edits = this
2568 .update(cx, |this, cx| {
2569 let edits = op.edits.into_iter().map(|edit| match edit {
2570 lsp::OneOf::Left(edit) => edit,
2571 lsp::OneOf::Right(edit) => edit.text_edit,
2572 });
2573 this.edits_from_lsp(
2574 &buffer_to_edit,
2575 edits,
2576 op.text_document.version,
2577 cx,
2578 )
2579 })
2580 .await?;
2581
2582 let transaction = buffer_to_edit.update(cx, |buffer, cx| {
2583 buffer.finalize_last_transaction();
2584 buffer.start_transaction();
2585 for (range, text) in edits {
2586 buffer.edit([range], text, cx);
2587 }
2588 let transaction = if buffer.end_transaction(cx).is_some() {
2589 let transaction = buffer.finalize_last_transaction().unwrap().clone();
2590 if !push_to_history {
2591 buffer.forget_transaction(transaction.id);
2592 }
2593 Some(transaction)
2594 } else {
2595 None
2596 };
2597
2598 transaction
2599 });
2600 if let Some(transaction) = transaction {
2601 project_transaction.0.insert(buffer_to_edit, transaction);
2602 }
2603 }
2604 }
2605 }
2606
2607 Ok(project_transaction)
2608 }
2609
2610 pub fn prepare_rename<T: ToPointUtf16>(
2611 &self,
2612 buffer: ModelHandle<Buffer>,
2613 position: T,
2614 cx: &mut ModelContext<Self>,
2615 ) -> Task<Result<Option<Range<Anchor>>>> {
2616 let position = position.to_point_utf16(buffer.read(cx));
2617 self.request_lsp(buffer, PrepareRename { position }, cx)
2618 }
2619
2620 pub fn perform_rename<T: ToPointUtf16>(
2621 &self,
2622 buffer: ModelHandle<Buffer>,
2623 position: T,
2624 new_name: String,
2625 push_to_history: bool,
2626 cx: &mut ModelContext<Self>,
2627 ) -> Task<Result<ProjectTransaction>> {
2628 let position = position.to_point_utf16(buffer.read(cx));
2629 self.request_lsp(
2630 buffer,
2631 PerformRename {
2632 position,
2633 new_name,
2634 push_to_history,
2635 },
2636 cx,
2637 )
2638 }
2639
2640 pub fn search(
2641 &self,
2642 query: SearchQuery,
2643 cx: &mut ModelContext<Self>,
2644 ) -> Task<Result<HashMap<ModelHandle<Buffer>, Vec<Range<Anchor>>>>> {
2645 if self.is_local() {
2646 let snapshots = self
2647 .visible_worktrees(cx)
2648 .filter_map(|tree| {
2649 let tree = tree.read(cx).as_local()?;
2650 Some(tree.snapshot())
2651 })
2652 .collect::<Vec<_>>();
2653
2654 let background = cx.background().clone();
2655 let path_count: usize = snapshots.iter().map(|s| s.visible_file_count()).sum();
2656 if path_count == 0 {
2657 return Task::ready(Ok(Default::default()));
2658 }
2659 let workers = background.num_cpus().min(path_count);
2660 let (matching_paths_tx, mut matching_paths_rx) = smol::channel::bounded(1024);
2661 cx.background()
2662 .spawn({
2663 let fs = self.fs.clone();
2664 let background = cx.background().clone();
2665 let query = query.clone();
2666 async move {
2667 let fs = &fs;
2668 let query = &query;
2669 let matching_paths_tx = &matching_paths_tx;
2670 let paths_per_worker = (path_count + workers - 1) / workers;
2671 let snapshots = &snapshots;
2672 background
2673 .scoped(|scope| {
2674 for worker_ix in 0..workers {
2675 let worker_start_ix = worker_ix * paths_per_worker;
2676 let worker_end_ix = worker_start_ix + paths_per_worker;
2677 scope.spawn(async move {
2678 let mut snapshot_start_ix = 0;
2679 let mut abs_path = PathBuf::new();
2680 for snapshot in snapshots {
2681 let snapshot_end_ix =
2682 snapshot_start_ix + snapshot.visible_file_count();
2683 if worker_end_ix <= snapshot_start_ix {
2684 break;
2685 } else if worker_start_ix > snapshot_end_ix {
2686 snapshot_start_ix = snapshot_end_ix;
2687 continue;
2688 } else {
2689 let start_in_snapshot = worker_start_ix
2690 .saturating_sub(snapshot_start_ix);
2691 let end_in_snapshot =
2692 cmp::min(worker_end_ix, snapshot_end_ix)
2693 - snapshot_start_ix;
2694
2695 for entry in snapshot
2696 .files(false, start_in_snapshot)
2697 .take(end_in_snapshot - start_in_snapshot)
2698 {
2699 if matching_paths_tx.is_closed() {
2700 break;
2701 }
2702
2703 abs_path.clear();
2704 abs_path.push(&snapshot.abs_path());
2705 abs_path.push(&entry.path);
2706 let matches = if let Some(file) =
2707 fs.open_sync(&abs_path).await.log_err()
2708 {
2709 query.detect(file).unwrap_or(false)
2710 } else {
2711 false
2712 };
2713
2714 if matches {
2715 let project_path =
2716 (snapshot.id(), entry.path.clone());
2717 if matching_paths_tx
2718 .send(project_path)
2719 .await
2720 .is_err()
2721 {
2722 break;
2723 }
2724 }
2725 }
2726
2727 snapshot_start_ix = snapshot_end_ix;
2728 }
2729 }
2730 });
2731 }
2732 })
2733 .await;
2734 }
2735 })
2736 .detach();
2737
2738 let (buffers_tx, buffers_rx) = smol::channel::bounded(1024);
2739 let open_buffers = self
2740 .opened_buffers
2741 .values()
2742 .filter_map(|b| b.upgrade(cx))
2743 .collect::<HashSet<_>>();
2744 cx.spawn(|this, cx| async move {
2745 for buffer in &open_buffers {
2746 let snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
2747 buffers_tx.send((buffer.clone(), snapshot)).await?;
2748 }
2749
2750 let open_buffers = Rc::new(RefCell::new(open_buffers));
2751 while let Some(project_path) = matching_paths_rx.next().await {
2752 if buffers_tx.is_closed() {
2753 break;
2754 }
2755
2756 let this = this.clone();
2757 let open_buffers = open_buffers.clone();
2758 let buffers_tx = buffers_tx.clone();
2759 cx.spawn(|mut cx| async move {
2760 if let Some(buffer) = this
2761 .update(&mut cx, |this, cx| this.open_buffer(project_path, cx))
2762 .await
2763 .log_err()
2764 {
2765 if open_buffers.borrow_mut().insert(buffer.clone()) {
2766 let snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
2767 buffers_tx.send((buffer, snapshot)).await?;
2768 }
2769 }
2770
2771 Ok::<_, anyhow::Error>(())
2772 })
2773 .detach();
2774 }
2775
2776 Ok::<_, anyhow::Error>(())
2777 })
2778 .detach_and_log_err(cx);
2779
2780 let background = cx.background().clone();
2781 cx.background().spawn(async move {
2782 let query = &query;
2783 let mut matched_buffers = Vec::new();
2784 for _ in 0..workers {
2785 matched_buffers.push(HashMap::default());
2786 }
2787 background
2788 .scoped(|scope| {
2789 for worker_matched_buffers in matched_buffers.iter_mut() {
2790 let mut buffers_rx = buffers_rx.clone();
2791 scope.spawn(async move {
2792 while let Some((buffer, snapshot)) = buffers_rx.next().await {
2793 let buffer_matches = query
2794 .search(snapshot.as_rope())
2795 .await
2796 .iter()
2797 .map(|range| {
2798 snapshot.anchor_before(range.start)
2799 ..snapshot.anchor_after(range.end)
2800 })
2801 .collect::<Vec<_>>();
2802 if !buffer_matches.is_empty() {
2803 worker_matched_buffers
2804 .insert(buffer.clone(), buffer_matches);
2805 }
2806 }
2807 });
2808 }
2809 })
2810 .await;
2811 Ok(matched_buffers.into_iter().flatten().collect())
2812 })
2813 } else if let Some(project_id) = self.remote_id() {
2814 let request = self.client.request(query.to_proto(project_id));
2815 cx.spawn(|this, mut cx| async move {
2816 let response = request.await?;
2817 let mut result = HashMap::default();
2818 for location in response.locations {
2819 let buffer = location.buffer.ok_or_else(|| anyhow!("missing buffer"))?;
2820 let target_buffer = this
2821 .update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
2822 .await?;
2823 let start = location
2824 .start
2825 .and_then(deserialize_anchor)
2826 .ok_or_else(|| anyhow!("missing target start"))?;
2827 let end = location
2828 .end
2829 .and_then(deserialize_anchor)
2830 .ok_or_else(|| anyhow!("missing target end"))?;
2831 result
2832 .entry(target_buffer)
2833 .or_insert(Vec::new())
2834 .push(start..end)
2835 }
2836 Ok(result)
2837 })
2838 } else {
2839 Task::ready(Ok(Default::default()))
2840 }
2841 }
2842
2843 fn request_lsp<R: LspCommand>(
2844 &self,
2845 buffer_handle: ModelHandle<Buffer>,
2846 request: R,
2847 cx: &mut ModelContext<Self>,
2848 ) -> Task<Result<R::Response>>
2849 where
2850 <R::LspRequest as lsp::request::Request>::Result: Send,
2851 {
2852 let buffer = buffer_handle.read(cx);
2853 if self.is_local() {
2854 let file = File::from_dyn(buffer.file()).and_then(File::as_local);
2855 if let Some((file, language_server)) =
2856 file.zip(self.language_server_for_buffer(buffer, cx).cloned())
2857 {
2858 let lsp_params = request.to_lsp(&file.abs_path(cx), cx);
2859 return cx.spawn(|this, cx| async move {
2860 if !request.check_capabilities(&language_server.capabilities()) {
2861 return Ok(Default::default());
2862 }
2863
2864 let response = language_server
2865 .request::<R::LspRequest>(lsp_params)
2866 .await
2867 .context("lsp request failed")?;
2868 request
2869 .response_from_lsp(response, this, buffer_handle, cx)
2870 .await
2871 });
2872 }
2873 } else if let Some(project_id) = self.remote_id() {
2874 let rpc = self.client.clone();
2875 let message = request.to_proto(project_id, buffer);
2876 return cx.spawn(|this, cx| async move {
2877 let response = rpc.request(message).await?;
2878 request
2879 .response_from_proto(response, this, buffer_handle, cx)
2880 .await
2881 });
2882 }
2883 Task::ready(Ok(Default::default()))
2884 }
2885
2886 pub fn find_or_create_local_worktree(
2887 &mut self,
2888 abs_path: impl AsRef<Path>,
2889 visible: bool,
2890 cx: &mut ModelContext<Self>,
2891 ) -> Task<Result<(ModelHandle<Worktree>, PathBuf)>> {
2892 let abs_path = abs_path.as_ref();
2893 if let Some((tree, relative_path)) = self.find_local_worktree(abs_path, cx) {
2894 Task::ready(Ok((tree.clone(), relative_path.into())))
2895 } else {
2896 let worktree = self.create_local_worktree(abs_path, visible, cx);
2897 cx.foreground()
2898 .spawn(async move { Ok((worktree.await?, PathBuf::new())) })
2899 }
2900 }
2901
2902 pub fn find_local_worktree(
2903 &self,
2904 abs_path: &Path,
2905 cx: &AppContext,
2906 ) -> Option<(ModelHandle<Worktree>, PathBuf)> {
2907 for tree in self.worktrees(cx) {
2908 if let Some(relative_path) = tree
2909 .read(cx)
2910 .as_local()
2911 .and_then(|t| abs_path.strip_prefix(t.abs_path()).ok())
2912 {
2913 return Some((tree.clone(), relative_path.into()));
2914 }
2915 }
2916 None
2917 }
2918
2919 pub fn is_shared(&self) -> bool {
2920 match &self.client_state {
2921 ProjectClientState::Local { is_shared, .. } => *is_shared,
2922 ProjectClientState::Remote { .. } => false,
2923 }
2924 }
2925
2926 fn create_local_worktree(
2927 &mut self,
2928 abs_path: impl AsRef<Path>,
2929 visible: bool,
2930 cx: &mut ModelContext<Self>,
2931 ) -> Task<Result<ModelHandle<Worktree>>> {
2932 let fs = self.fs.clone();
2933 let client = self.client.clone();
2934 let path: Arc<Path> = abs_path.as_ref().into();
2935 let task = self
2936 .loading_local_worktrees
2937 .entry(path.clone())
2938 .or_insert_with(|| {
2939 cx.spawn(|project, mut cx| {
2940 async move {
2941 let worktree =
2942 Worktree::local(client.clone(), path.clone(), visible, fs, &mut cx)
2943 .await;
2944 project.update(&mut cx, |project, _| {
2945 project.loading_local_worktrees.remove(&path);
2946 });
2947 let worktree = worktree?;
2948
2949 let (remote_project_id, is_shared) =
2950 project.update(&mut cx, |project, cx| {
2951 project.add_worktree(&worktree, cx);
2952 (project.remote_id(), project.is_shared())
2953 });
2954
2955 if let Some(project_id) = remote_project_id {
2956 if is_shared {
2957 worktree
2958 .update(&mut cx, |worktree, cx| {
2959 worktree.as_local_mut().unwrap().share(project_id, cx)
2960 })
2961 .await?;
2962 } else {
2963 worktree
2964 .update(&mut cx, |worktree, cx| {
2965 worktree.as_local_mut().unwrap().register(project_id, cx)
2966 })
2967 .await?;
2968 }
2969 }
2970
2971 Ok(worktree)
2972 }
2973 .map_err(|err| Arc::new(err))
2974 })
2975 .shared()
2976 })
2977 .clone();
2978 cx.foreground().spawn(async move {
2979 match task.await {
2980 Ok(worktree) => Ok(worktree),
2981 Err(err) => Err(anyhow!("{}", err)),
2982 }
2983 })
2984 }
2985
2986 pub fn remove_worktree(&mut self, id: WorktreeId, cx: &mut ModelContext<Self>) {
2987 self.worktrees.retain(|worktree| {
2988 worktree
2989 .upgrade(cx)
2990 .map_or(false, |w| w.read(cx).id() != id)
2991 });
2992 cx.notify();
2993 }
2994
2995 fn add_worktree(&mut self, worktree: &ModelHandle<Worktree>, cx: &mut ModelContext<Self>) {
2996 cx.observe(&worktree, |_, _, cx| cx.notify()).detach();
2997 if worktree.read(cx).is_local() {
2998 cx.subscribe(&worktree, |this, worktree, _, cx| {
2999 this.update_local_worktree_buffers(worktree, cx);
3000 })
3001 .detach();
3002 }
3003
3004 let push_strong_handle = {
3005 let worktree = worktree.read(cx);
3006 self.is_shared() || worktree.is_visible() || worktree.is_remote()
3007 };
3008 if push_strong_handle {
3009 self.worktrees
3010 .push(WorktreeHandle::Strong(worktree.clone()));
3011 } else {
3012 cx.observe_release(&worktree, |this, _, cx| {
3013 this.worktrees
3014 .retain(|worktree| worktree.upgrade(cx).is_some());
3015 cx.notify();
3016 })
3017 .detach();
3018 self.worktrees
3019 .push(WorktreeHandle::Weak(worktree.downgrade()));
3020 }
3021 cx.notify();
3022 }
3023
3024 fn update_local_worktree_buffers(
3025 &mut self,
3026 worktree_handle: ModelHandle<Worktree>,
3027 cx: &mut ModelContext<Self>,
3028 ) {
3029 let snapshot = worktree_handle.read(cx).snapshot();
3030 let mut buffers_to_delete = Vec::new();
3031 for (buffer_id, buffer) in &self.opened_buffers {
3032 if let Some(buffer) = buffer.upgrade(cx) {
3033 buffer.update(cx, |buffer, cx| {
3034 if let Some(old_file) = File::from_dyn(buffer.file()) {
3035 if old_file.worktree != worktree_handle {
3036 return;
3037 }
3038
3039 let new_file = if let Some(entry) = old_file
3040 .entry_id
3041 .and_then(|entry_id| snapshot.entry_for_id(entry_id))
3042 {
3043 File {
3044 is_local: true,
3045 entry_id: Some(entry.id),
3046 mtime: entry.mtime,
3047 path: entry.path.clone(),
3048 worktree: worktree_handle.clone(),
3049 }
3050 } else if let Some(entry) =
3051 snapshot.entry_for_path(old_file.path().as_ref())
3052 {
3053 File {
3054 is_local: true,
3055 entry_id: Some(entry.id),
3056 mtime: entry.mtime,
3057 path: entry.path.clone(),
3058 worktree: worktree_handle.clone(),
3059 }
3060 } else {
3061 File {
3062 is_local: true,
3063 entry_id: None,
3064 path: old_file.path().clone(),
3065 mtime: old_file.mtime(),
3066 worktree: worktree_handle.clone(),
3067 }
3068 };
3069
3070 if let Some(project_id) = self.remote_id() {
3071 self.client
3072 .send(proto::UpdateBufferFile {
3073 project_id,
3074 buffer_id: *buffer_id as u64,
3075 file: Some(new_file.to_proto()),
3076 })
3077 .log_err();
3078 }
3079 buffer.file_updated(Box::new(new_file), cx).detach();
3080 }
3081 });
3082 } else {
3083 buffers_to_delete.push(*buffer_id);
3084 }
3085 }
3086
3087 for buffer_id in buffers_to_delete {
3088 self.opened_buffers.remove(&buffer_id);
3089 }
3090 }
3091
3092 pub fn set_active_path(&mut self, entry: Option<ProjectPath>, cx: &mut ModelContext<Self>) {
3093 let new_active_entry = entry.and_then(|project_path| {
3094 let worktree = self.worktree_for_id(project_path.worktree_id, cx)?;
3095 let entry = worktree.read(cx).entry_for_path(project_path.path)?;
3096 Some(ProjectEntry {
3097 worktree_id: project_path.worktree_id,
3098 entry_id: entry.id,
3099 })
3100 });
3101 if new_active_entry != self.active_entry {
3102 self.active_entry = new_active_entry;
3103 cx.emit(Event::ActiveEntryChanged(new_active_entry));
3104 }
3105 }
3106
3107 pub fn is_running_disk_based_diagnostics(&self) -> bool {
3108 self.language_servers_with_diagnostics_running > 0
3109 }
3110
3111 pub fn diagnostic_summary(&self, cx: &AppContext) -> DiagnosticSummary {
3112 let mut summary = DiagnosticSummary::default();
3113 for (_, path_summary) in self.diagnostic_summaries(cx) {
3114 summary.error_count += path_summary.error_count;
3115 summary.warning_count += path_summary.warning_count;
3116 summary.info_count += path_summary.info_count;
3117 summary.hint_count += path_summary.hint_count;
3118 }
3119 summary
3120 }
3121
3122 pub fn diagnostic_summaries<'a>(
3123 &'a self,
3124 cx: &'a AppContext,
3125 ) -> impl Iterator<Item = (ProjectPath, DiagnosticSummary)> + 'a {
3126 self.worktrees(cx).flat_map(move |worktree| {
3127 let worktree = worktree.read(cx);
3128 let worktree_id = worktree.id();
3129 worktree
3130 .diagnostic_summaries()
3131 .map(move |(path, summary)| (ProjectPath { worktree_id, path }, summary))
3132 })
3133 }
3134
3135 pub fn disk_based_diagnostics_started(&mut self, cx: &mut ModelContext<Self>) {
3136 self.language_servers_with_diagnostics_running += 1;
3137 if self.language_servers_with_diagnostics_running == 1 {
3138 cx.emit(Event::DiskBasedDiagnosticsStarted);
3139 }
3140 }
3141
3142 pub fn disk_based_diagnostics_finished(&mut self, cx: &mut ModelContext<Self>) {
3143 cx.emit(Event::DiskBasedDiagnosticsUpdated);
3144 self.language_servers_with_diagnostics_running -= 1;
3145 if self.language_servers_with_diagnostics_running == 0 {
3146 cx.emit(Event::DiskBasedDiagnosticsFinished);
3147 }
3148 }
3149
3150 pub fn active_entry(&self) -> Option<ProjectEntry> {
3151 self.active_entry
3152 }
3153
3154 // RPC message handlers
3155
3156 async fn handle_unshare_project(
3157 this: ModelHandle<Self>,
3158 _: TypedEnvelope<proto::UnshareProject>,
3159 _: Arc<Client>,
3160 mut cx: AsyncAppContext,
3161 ) -> Result<()> {
3162 this.update(&mut cx, |this, cx| this.project_unshared(cx));
3163 Ok(())
3164 }
3165
3166 async fn handle_add_collaborator(
3167 this: ModelHandle<Self>,
3168 mut envelope: TypedEnvelope<proto::AddProjectCollaborator>,
3169 _: Arc<Client>,
3170 mut cx: AsyncAppContext,
3171 ) -> Result<()> {
3172 let user_store = this.read_with(&cx, |this, _| this.user_store.clone());
3173 let collaborator = envelope
3174 .payload
3175 .collaborator
3176 .take()
3177 .ok_or_else(|| anyhow!("empty collaborator"))?;
3178
3179 let collaborator = Collaborator::from_proto(collaborator, &user_store, &mut cx).await?;
3180 this.update(&mut cx, |this, cx| {
3181 this.collaborators
3182 .insert(collaborator.peer_id, collaborator);
3183 cx.notify();
3184 });
3185
3186 Ok(())
3187 }
3188
3189 async fn handle_remove_collaborator(
3190 this: ModelHandle<Self>,
3191 envelope: TypedEnvelope<proto::RemoveProjectCollaborator>,
3192 _: Arc<Client>,
3193 mut cx: AsyncAppContext,
3194 ) -> Result<()> {
3195 this.update(&mut cx, |this, cx| {
3196 let peer_id = PeerId(envelope.payload.peer_id);
3197 let replica_id = this
3198 .collaborators
3199 .remove(&peer_id)
3200 .ok_or_else(|| anyhow!("unknown peer {:?}", peer_id))?
3201 .replica_id;
3202 for (_, buffer) in &this.opened_buffers {
3203 if let Some(buffer) = buffer.upgrade(cx) {
3204 buffer.update(cx, |buffer, cx| buffer.remove_peer(replica_id, cx));
3205 }
3206 }
3207 cx.notify();
3208 Ok(())
3209 })
3210 }
3211
3212 async fn handle_register_worktree(
3213 this: ModelHandle<Self>,
3214 envelope: TypedEnvelope<proto::RegisterWorktree>,
3215 client: Arc<Client>,
3216 mut cx: AsyncAppContext,
3217 ) -> Result<()> {
3218 this.update(&mut cx, |this, cx| {
3219 let remote_id = this.remote_id().ok_or_else(|| anyhow!("invalid project"))?;
3220 let replica_id = this.replica_id();
3221 let worktree = proto::Worktree {
3222 id: envelope.payload.worktree_id,
3223 root_name: envelope.payload.root_name,
3224 entries: Default::default(),
3225 diagnostic_summaries: Default::default(),
3226 visible: envelope.payload.visible,
3227 };
3228 let (worktree, load_task) =
3229 Worktree::remote(remote_id, replica_id, worktree, client, cx);
3230 this.add_worktree(&worktree, cx);
3231 load_task.detach();
3232 Ok(())
3233 })
3234 }
3235
3236 async fn handle_unregister_worktree(
3237 this: ModelHandle<Self>,
3238 envelope: TypedEnvelope<proto::UnregisterWorktree>,
3239 _: Arc<Client>,
3240 mut cx: AsyncAppContext,
3241 ) -> Result<()> {
3242 this.update(&mut cx, |this, cx| {
3243 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
3244 this.remove_worktree(worktree_id, cx);
3245 Ok(())
3246 })
3247 }
3248
3249 async fn handle_update_worktree(
3250 this: ModelHandle<Self>,
3251 envelope: TypedEnvelope<proto::UpdateWorktree>,
3252 _: Arc<Client>,
3253 mut cx: AsyncAppContext,
3254 ) -> Result<()> {
3255 this.update(&mut cx, |this, cx| {
3256 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
3257 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
3258 worktree.update(cx, |worktree, _| {
3259 let worktree = worktree.as_remote_mut().unwrap();
3260 worktree.update_from_remote(envelope)
3261 })?;
3262 }
3263 Ok(())
3264 })
3265 }
3266
3267 async fn handle_update_diagnostic_summary(
3268 this: ModelHandle<Self>,
3269 envelope: TypedEnvelope<proto::UpdateDiagnosticSummary>,
3270 _: Arc<Client>,
3271 mut cx: AsyncAppContext,
3272 ) -> Result<()> {
3273 this.update(&mut cx, |this, cx| {
3274 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
3275 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
3276 if let Some(summary) = envelope.payload.summary {
3277 let project_path = ProjectPath {
3278 worktree_id,
3279 path: Path::new(&summary.path).into(),
3280 };
3281 worktree.update(cx, |worktree, _| {
3282 worktree
3283 .as_remote_mut()
3284 .unwrap()
3285 .update_diagnostic_summary(project_path.path.clone(), &summary);
3286 });
3287 cx.emit(Event::DiagnosticsUpdated(project_path));
3288 }
3289 }
3290 Ok(())
3291 })
3292 }
3293
3294 async fn handle_start_language_server(
3295 this: ModelHandle<Self>,
3296 envelope: TypedEnvelope<proto::StartLanguageServer>,
3297 _: Arc<Client>,
3298 mut cx: AsyncAppContext,
3299 ) -> Result<()> {
3300 let server = envelope
3301 .payload
3302 .server
3303 .ok_or_else(|| anyhow!("invalid server"))?;
3304 this.update(&mut cx, |this, cx| {
3305 this.language_server_statuses.insert(
3306 server.id as usize,
3307 LanguageServerStatus {
3308 name: server.name,
3309 pending_work: Default::default(),
3310 pending_diagnostic_updates: 0,
3311 },
3312 );
3313 cx.notify();
3314 });
3315 Ok(())
3316 }
3317
3318 async fn handle_update_language_server(
3319 this: ModelHandle<Self>,
3320 envelope: TypedEnvelope<proto::UpdateLanguageServer>,
3321 _: Arc<Client>,
3322 mut cx: AsyncAppContext,
3323 ) -> Result<()> {
3324 let language_server_id = envelope.payload.language_server_id as usize;
3325 match envelope
3326 .payload
3327 .variant
3328 .ok_or_else(|| anyhow!("invalid variant"))?
3329 {
3330 proto::update_language_server::Variant::WorkStart(payload) => {
3331 this.update(&mut cx, |this, cx| {
3332 this.on_lsp_work_start(language_server_id, payload.token, cx);
3333 })
3334 }
3335 proto::update_language_server::Variant::WorkProgress(payload) => {
3336 this.update(&mut cx, |this, cx| {
3337 this.on_lsp_work_progress(
3338 language_server_id,
3339 payload.token,
3340 LanguageServerProgress {
3341 message: payload.message,
3342 percentage: payload.percentage.map(|p| p as usize),
3343 last_update_at: Instant::now(),
3344 },
3345 cx,
3346 );
3347 })
3348 }
3349 proto::update_language_server::Variant::WorkEnd(payload) => {
3350 this.update(&mut cx, |this, cx| {
3351 this.on_lsp_work_end(language_server_id, payload.token, cx);
3352 })
3353 }
3354 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(_) => {
3355 this.update(&mut cx, |this, cx| {
3356 this.disk_based_diagnostics_started(cx);
3357 })
3358 }
3359 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(_) => {
3360 this.update(&mut cx, |this, cx| this.disk_based_diagnostics_finished(cx));
3361 }
3362 }
3363
3364 Ok(())
3365 }
3366
3367 async fn handle_update_buffer(
3368 this: ModelHandle<Self>,
3369 envelope: TypedEnvelope<proto::UpdateBuffer>,
3370 _: Arc<Client>,
3371 mut cx: AsyncAppContext,
3372 ) -> Result<()> {
3373 this.update(&mut cx, |this, cx| {
3374 let payload = envelope.payload.clone();
3375 let buffer_id = payload.buffer_id;
3376 let ops = payload
3377 .operations
3378 .into_iter()
3379 .map(|op| language::proto::deserialize_operation(op))
3380 .collect::<Result<Vec<_>, _>>()?;
3381 match this.opened_buffers.entry(buffer_id) {
3382 hash_map::Entry::Occupied(mut e) => match e.get_mut() {
3383 OpenBuffer::Strong(buffer) => {
3384 buffer.update(cx, |buffer, cx| buffer.apply_ops(ops, cx))?;
3385 }
3386 OpenBuffer::Loading(operations) => operations.extend_from_slice(&ops),
3387 OpenBuffer::Weak(_) => {}
3388 },
3389 hash_map::Entry::Vacant(e) => {
3390 e.insert(OpenBuffer::Loading(ops));
3391 }
3392 }
3393 Ok(())
3394 })
3395 }
3396
3397 async fn handle_update_buffer_file(
3398 this: ModelHandle<Self>,
3399 envelope: TypedEnvelope<proto::UpdateBufferFile>,
3400 _: Arc<Client>,
3401 mut cx: AsyncAppContext,
3402 ) -> Result<()> {
3403 this.update(&mut cx, |this, cx| {
3404 let payload = envelope.payload.clone();
3405 let buffer_id = payload.buffer_id;
3406 let file = payload.file.ok_or_else(|| anyhow!("invalid file"))?;
3407 let worktree = this
3408 .worktree_for_id(WorktreeId::from_proto(file.worktree_id), cx)
3409 .ok_or_else(|| anyhow!("no such worktree"))?;
3410 let file = File::from_proto(file, worktree.clone(), cx)?;
3411 let buffer = this
3412 .opened_buffers
3413 .get_mut(&buffer_id)
3414 .and_then(|b| b.upgrade(cx))
3415 .ok_or_else(|| anyhow!("no such buffer"))?;
3416 buffer.update(cx, |buffer, cx| {
3417 buffer.file_updated(Box::new(file), cx).detach();
3418 });
3419 Ok(())
3420 })
3421 }
3422
3423 async fn handle_save_buffer(
3424 this: ModelHandle<Self>,
3425 envelope: TypedEnvelope<proto::SaveBuffer>,
3426 _: Arc<Client>,
3427 mut cx: AsyncAppContext,
3428 ) -> Result<proto::BufferSaved> {
3429 let buffer_id = envelope.payload.buffer_id;
3430 let requested_version = deserialize_version(envelope.payload.version);
3431
3432 let (project_id, buffer) = this.update(&mut cx, |this, cx| {
3433 let project_id = this.remote_id().ok_or_else(|| anyhow!("not connected"))?;
3434 let buffer = this
3435 .opened_buffers
3436 .get(&buffer_id)
3437 .map(|buffer| buffer.upgrade(cx).unwrap())
3438 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?;
3439 Ok::<_, anyhow::Error>((project_id, buffer))
3440 })?;
3441 buffer
3442 .update(&mut cx, |buffer, _| {
3443 buffer.wait_for_version(requested_version)
3444 })
3445 .await;
3446
3447 let (saved_version, mtime) = buffer.update(&mut cx, |buffer, cx| buffer.save(cx)).await?;
3448 Ok(proto::BufferSaved {
3449 project_id,
3450 buffer_id,
3451 version: serialize_version(&saved_version),
3452 mtime: Some(mtime.into()),
3453 })
3454 }
3455
3456 async fn handle_format_buffers(
3457 this: ModelHandle<Self>,
3458 envelope: TypedEnvelope<proto::FormatBuffers>,
3459 _: Arc<Client>,
3460 mut cx: AsyncAppContext,
3461 ) -> Result<proto::FormatBuffersResponse> {
3462 let sender_id = envelope.original_sender_id()?;
3463 let format = this.update(&mut cx, |this, cx| {
3464 let mut buffers = HashSet::default();
3465 for buffer_id in &envelope.payload.buffer_ids {
3466 buffers.insert(
3467 this.opened_buffers
3468 .get(buffer_id)
3469 .map(|buffer| buffer.upgrade(cx).unwrap())
3470 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?,
3471 );
3472 }
3473 Ok::<_, anyhow::Error>(this.format(buffers, false, cx))
3474 })?;
3475
3476 let project_transaction = format.await?;
3477 let project_transaction = this.update(&mut cx, |this, cx| {
3478 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
3479 });
3480 Ok(proto::FormatBuffersResponse {
3481 transaction: Some(project_transaction),
3482 })
3483 }
3484
3485 async fn handle_get_completions(
3486 this: ModelHandle<Self>,
3487 envelope: TypedEnvelope<proto::GetCompletions>,
3488 _: Arc<Client>,
3489 mut cx: AsyncAppContext,
3490 ) -> Result<proto::GetCompletionsResponse> {
3491 let position = envelope
3492 .payload
3493 .position
3494 .and_then(language::proto::deserialize_anchor)
3495 .ok_or_else(|| anyhow!("invalid position"))?;
3496 let version = deserialize_version(envelope.payload.version);
3497 let buffer = this.read_with(&cx, |this, cx| {
3498 this.opened_buffers
3499 .get(&envelope.payload.buffer_id)
3500 .map(|buffer| buffer.upgrade(cx).unwrap())
3501 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
3502 })?;
3503 buffer
3504 .update(&mut cx, |buffer, _| buffer.wait_for_version(version))
3505 .await;
3506 let version = buffer.read_with(&cx, |buffer, _| buffer.version());
3507 let completions = this
3508 .update(&mut cx, |this, cx| this.completions(&buffer, position, cx))
3509 .await?;
3510
3511 Ok(proto::GetCompletionsResponse {
3512 completions: completions
3513 .iter()
3514 .map(language::proto::serialize_completion)
3515 .collect(),
3516 version: serialize_version(&version),
3517 })
3518 }
3519
3520 async fn handle_apply_additional_edits_for_completion(
3521 this: ModelHandle<Self>,
3522 envelope: TypedEnvelope<proto::ApplyCompletionAdditionalEdits>,
3523 _: Arc<Client>,
3524 mut cx: AsyncAppContext,
3525 ) -> Result<proto::ApplyCompletionAdditionalEditsResponse> {
3526 let apply_additional_edits = this.update(&mut cx, |this, cx| {
3527 let buffer = this
3528 .opened_buffers
3529 .get(&envelope.payload.buffer_id)
3530 .map(|buffer| buffer.upgrade(cx).unwrap())
3531 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
3532 let language = buffer.read(cx).language();
3533 let completion = language::proto::deserialize_completion(
3534 envelope
3535 .payload
3536 .completion
3537 .ok_or_else(|| anyhow!("invalid completion"))?,
3538 language,
3539 )?;
3540 Ok::<_, anyhow::Error>(
3541 this.apply_additional_edits_for_completion(buffer, completion, false, cx),
3542 )
3543 })?;
3544
3545 Ok(proto::ApplyCompletionAdditionalEditsResponse {
3546 transaction: apply_additional_edits
3547 .await?
3548 .as_ref()
3549 .map(language::proto::serialize_transaction),
3550 })
3551 }
3552
3553 async fn handle_get_code_actions(
3554 this: ModelHandle<Self>,
3555 envelope: TypedEnvelope<proto::GetCodeActions>,
3556 _: Arc<Client>,
3557 mut cx: AsyncAppContext,
3558 ) -> Result<proto::GetCodeActionsResponse> {
3559 let start = envelope
3560 .payload
3561 .start
3562 .and_then(language::proto::deserialize_anchor)
3563 .ok_or_else(|| anyhow!("invalid start"))?;
3564 let end = envelope
3565 .payload
3566 .end
3567 .and_then(language::proto::deserialize_anchor)
3568 .ok_or_else(|| anyhow!("invalid end"))?;
3569 let buffer = this.update(&mut cx, |this, cx| {
3570 this.opened_buffers
3571 .get(&envelope.payload.buffer_id)
3572 .map(|buffer| buffer.upgrade(cx).unwrap())
3573 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
3574 })?;
3575 buffer
3576 .update(&mut cx, |buffer, _| {
3577 buffer.wait_for_version(deserialize_version(envelope.payload.version))
3578 })
3579 .await;
3580
3581 let version = buffer.read_with(&cx, |buffer, _| buffer.version());
3582 let code_actions = this.update(&mut cx, |this, cx| {
3583 Ok::<_, anyhow::Error>(this.code_actions(&buffer, start..end, cx))
3584 })?;
3585
3586 Ok(proto::GetCodeActionsResponse {
3587 actions: code_actions
3588 .await?
3589 .iter()
3590 .map(language::proto::serialize_code_action)
3591 .collect(),
3592 version: serialize_version(&version),
3593 })
3594 }
3595
3596 async fn handle_apply_code_action(
3597 this: ModelHandle<Self>,
3598 envelope: TypedEnvelope<proto::ApplyCodeAction>,
3599 _: Arc<Client>,
3600 mut cx: AsyncAppContext,
3601 ) -> Result<proto::ApplyCodeActionResponse> {
3602 let sender_id = envelope.original_sender_id()?;
3603 let action = language::proto::deserialize_code_action(
3604 envelope
3605 .payload
3606 .action
3607 .ok_or_else(|| anyhow!("invalid action"))?,
3608 )?;
3609 let apply_code_action = this.update(&mut cx, |this, cx| {
3610 let buffer = this
3611 .opened_buffers
3612 .get(&envelope.payload.buffer_id)
3613 .map(|buffer| buffer.upgrade(cx).unwrap())
3614 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
3615 Ok::<_, anyhow::Error>(this.apply_code_action(buffer, action, false, cx))
3616 })?;
3617
3618 let project_transaction = apply_code_action.await?;
3619 let project_transaction = this.update(&mut cx, |this, cx| {
3620 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
3621 });
3622 Ok(proto::ApplyCodeActionResponse {
3623 transaction: Some(project_transaction),
3624 })
3625 }
3626
3627 async fn handle_lsp_command<T: LspCommand>(
3628 this: ModelHandle<Self>,
3629 envelope: TypedEnvelope<T::ProtoRequest>,
3630 _: Arc<Client>,
3631 mut cx: AsyncAppContext,
3632 ) -> Result<<T::ProtoRequest as proto::RequestMessage>::Response>
3633 where
3634 <T::LspRequest as lsp::request::Request>::Result: Send,
3635 {
3636 let sender_id = envelope.original_sender_id()?;
3637 let buffer_id = T::buffer_id_from_proto(&envelope.payload);
3638 let buffer_handle = this.read_with(&cx, |this, _| {
3639 this.opened_buffers
3640 .get(&buffer_id)
3641 .map(|buffer| buffer.upgrade(&cx).unwrap())
3642 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))
3643 })?;
3644 let request = T::from_proto(
3645 envelope.payload,
3646 this.clone(),
3647 buffer_handle.clone(),
3648 cx.clone(),
3649 )
3650 .await?;
3651 let buffer_version = buffer_handle.read_with(&cx, |buffer, _| buffer.version());
3652 let response = this
3653 .update(&mut cx, |this, cx| {
3654 this.request_lsp(buffer_handle, request, cx)
3655 })
3656 .await?;
3657 this.update(&mut cx, |this, cx| {
3658 Ok(T::response_to_proto(
3659 response,
3660 this,
3661 sender_id,
3662 &buffer_version,
3663 cx,
3664 ))
3665 })
3666 }
3667
3668 async fn handle_get_project_symbols(
3669 this: ModelHandle<Self>,
3670 envelope: TypedEnvelope<proto::GetProjectSymbols>,
3671 _: Arc<Client>,
3672 mut cx: AsyncAppContext,
3673 ) -> Result<proto::GetProjectSymbolsResponse> {
3674 let symbols = this
3675 .update(&mut cx, |this, cx| {
3676 this.symbols(&envelope.payload.query, cx)
3677 })
3678 .await?;
3679
3680 Ok(proto::GetProjectSymbolsResponse {
3681 symbols: symbols.iter().map(serialize_symbol).collect(),
3682 })
3683 }
3684
3685 async fn handle_search_project(
3686 this: ModelHandle<Self>,
3687 envelope: TypedEnvelope<proto::SearchProject>,
3688 _: Arc<Client>,
3689 mut cx: AsyncAppContext,
3690 ) -> Result<proto::SearchProjectResponse> {
3691 let peer_id = envelope.original_sender_id()?;
3692 let query = SearchQuery::from_proto(envelope.payload)?;
3693 let result = this
3694 .update(&mut cx, |this, cx| this.search(query, cx))
3695 .await?;
3696
3697 this.update(&mut cx, |this, cx| {
3698 let mut locations = Vec::new();
3699 for (buffer, ranges) in result {
3700 for range in ranges {
3701 let start = serialize_anchor(&range.start);
3702 let end = serialize_anchor(&range.end);
3703 let buffer = this.serialize_buffer_for_peer(&buffer, peer_id, cx);
3704 locations.push(proto::Location {
3705 buffer: Some(buffer),
3706 start: Some(start),
3707 end: Some(end),
3708 });
3709 }
3710 }
3711 Ok(proto::SearchProjectResponse { locations })
3712 })
3713 }
3714
3715 async fn handle_open_buffer_for_symbol(
3716 this: ModelHandle<Self>,
3717 envelope: TypedEnvelope<proto::OpenBufferForSymbol>,
3718 _: Arc<Client>,
3719 mut cx: AsyncAppContext,
3720 ) -> Result<proto::OpenBufferForSymbolResponse> {
3721 let peer_id = envelope.original_sender_id()?;
3722 let symbol = envelope
3723 .payload
3724 .symbol
3725 .ok_or_else(|| anyhow!("invalid symbol"))?;
3726 let symbol = this.read_with(&cx, |this, _| {
3727 let symbol = this.deserialize_symbol(symbol)?;
3728 let signature = this.symbol_signature(symbol.worktree_id, &symbol.path);
3729 if signature == symbol.signature {
3730 Ok(symbol)
3731 } else {
3732 Err(anyhow!("invalid symbol signature"))
3733 }
3734 })?;
3735 let buffer = this
3736 .update(&mut cx, |this, cx| this.open_buffer_for_symbol(&symbol, cx))
3737 .await?;
3738
3739 Ok(proto::OpenBufferForSymbolResponse {
3740 buffer: Some(this.update(&mut cx, |this, cx| {
3741 this.serialize_buffer_for_peer(&buffer, peer_id, cx)
3742 })),
3743 })
3744 }
3745
3746 fn symbol_signature(&self, worktree_id: WorktreeId, path: &Path) -> [u8; 32] {
3747 let mut hasher = Sha256::new();
3748 hasher.update(worktree_id.to_proto().to_be_bytes());
3749 hasher.update(path.to_string_lossy().as_bytes());
3750 hasher.update(self.nonce.to_be_bytes());
3751 hasher.finalize().as_slice().try_into().unwrap()
3752 }
3753
3754 async fn handle_open_buffer(
3755 this: ModelHandle<Self>,
3756 envelope: TypedEnvelope<proto::OpenBuffer>,
3757 _: Arc<Client>,
3758 mut cx: AsyncAppContext,
3759 ) -> Result<proto::OpenBufferResponse> {
3760 let peer_id = envelope.original_sender_id()?;
3761 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
3762 let open_buffer = this.update(&mut cx, |this, cx| {
3763 this.open_buffer(
3764 ProjectPath {
3765 worktree_id,
3766 path: PathBuf::from(envelope.payload.path).into(),
3767 },
3768 cx,
3769 )
3770 });
3771
3772 let buffer = open_buffer.await?;
3773 this.update(&mut cx, |this, cx| {
3774 Ok(proto::OpenBufferResponse {
3775 buffer: Some(this.serialize_buffer_for_peer(&buffer, peer_id, cx)),
3776 })
3777 })
3778 }
3779
3780 fn serialize_project_transaction_for_peer(
3781 &mut self,
3782 project_transaction: ProjectTransaction,
3783 peer_id: PeerId,
3784 cx: &AppContext,
3785 ) -> proto::ProjectTransaction {
3786 let mut serialized_transaction = proto::ProjectTransaction {
3787 buffers: Default::default(),
3788 transactions: Default::default(),
3789 };
3790 for (buffer, transaction) in project_transaction.0 {
3791 serialized_transaction
3792 .buffers
3793 .push(self.serialize_buffer_for_peer(&buffer, peer_id, cx));
3794 serialized_transaction
3795 .transactions
3796 .push(language::proto::serialize_transaction(&transaction));
3797 }
3798 serialized_transaction
3799 }
3800
3801 fn deserialize_project_transaction(
3802 &mut self,
3803 message: proto::ProjectTransaction,
3804 push_to_history: bool,
3805 cx: &mut ModelContext<Self>,
3806 ) -> Task<Result<ProjectTransaction>> {
3807 cx.spawn(|this, mut cx| async move {
3808 let mut project_transaction = ProjectTransaction::default();
3809 for (buffer, transaction) in message.buffers.into_iter().zip(message.transactions) {
3810 let buffer = this
3811 .update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
3812 .await?;
3813 let transaction = language::proto::deserialize_transaction(transaction)?;
3814 project_transaction.0.insert(buffer, transaction);
3815 }
3816
3817 for (buffer, transaction) in &project_transaction.0 {
3818 buffer
3819 .update(&mut cx, |buffer, _| {
3820 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
3821 })
3822 .await;
3823
3824 if push_to_history {
3825 buffer.update(&mut cx, |buffer, _| {
3826 buffer.push_transaction(transaction.clone(), Instant::now());
3827 });
3828 }
3829 }
3830
3831 Ok(project_transaction)
3832 })
3833 }
3834
3835 fn serialize_buffer_for_peer(
3836 &mut self,
3837 buffer: &ModelHandle<Buffer>,
3838 peer_id: PeerId,
3839 cx: &AppContext,
3840 ) -> proto::Buffer {
3841 let buffer_id = buffer.read(cx).remote_id();
3842 let shared_buffers = self.shared_buffers.entry(peer_id).or_default();
3843 if shared_buffers.insert(buffer_id) {
3844 proto::Buffer {
3845 variant: Some(proto::buffer::Variant::State(buffer.read(cx).to_proto())),
3846 }
3847 } else {
3848 proto::Buffer {
3849 variant: Some(proto::buffer::Variant::Id(buffer_id)),
3850 }
3851 }
3852 }
3853
3854 fn deserialize_buffer(
3855 &mut self,
3856 buffer: proto::Buffer,
3857 cx: &mut ModelContext<Self>,
3858 ) -> Task<Result<ModelHandle<Buffer>>> {
3859 let replica_id = self.replica_id();
3860
3861 let opened_buffer_tx = self.opened_buffer.0.clone();
3862 let mut opened_buffer_rx = self.opened_buffer.1.clone();
3863 cx.spawn(|this, mut cx| async move {
3864 match buffer.variant.ok_or_else(|| anyhow!("missing buffer"))? {
3865 proto::buffer::Variant::Id(id) => {
3866 let buffer = loop {
3867 let buffer = this.read_with(&cx, |this, cx| {
3868 this.opened_buffers
3869 .get(&id)
3870 .and_then(|buffer| buffer.upgrade(cx))
3871 });
3872 if let Some(buffer) = buffer {
3873 break buffer;
3874 }
3875 opened_buffer_rx
3876 .next()
3877 .await
3878 .ok_or_else(|| anyhow!("project dropped while waiting for buffer"))?;
3879 };
3880 Ok(buffer)
3881 }
3882 proto::buffer::Variant::State(mut buffer) => {
3883 let mut buffer_worktree = None;
3884 let mut buffer_file = None;
3885 if let Some(file) = buffer.file.take() {
3886 this.read_with(&cx, |this, cx| {
3887 let worktree_id = WorktreeId::from_proto(file.worktree_id);
3888 let worktree =
3889 this.worktree_for_id(worktree_id, cx).ok_or_else(|| {
3890 anyhow!("no worktree found for id {}", file.worktree_id)
3891 })?;
3892 buffer_file =
3893 Some(Box::new(File::from_proto(file, worktree.clone(), cx)?)
3894 as Box<dyn language::File>);
3895 buffer_worktree = Some(worktree);
3896 Ok::<_, anyhow::Error>(())
3897 })?;
3898 }
3899
3900 let buffer = cx.add_model(|cx| {
3901 Buffer::from_proto(replica_id, buffer, buffer_file, cx).unwrap()
3902 });
3903
3904 this.update(&mut cx, |this, cx| this.register_buffer(&buffer, cx))?;
3905
3906 *opened_buffer_tx.borrow_mut().borrow_mut() = ();
3907 Ok(buffer)
3908 }
3909 }
3910 })
3911 }
3912
3913 fn deserialize_symbol(&self, serialized_symbol: proto::Symbol) -> Result<Symbol> {
3914 let language = self
3915 .languages
3916 .get_language(&serialized_symbol.language_name);
3917 let start = serialized_symbol
3918 .start
3919 .ok_or_else(|| anyhow!("invalid start"))?;
3920 let end = serialized_symbol
3921 .end
3922 .ok_or_else(|| anyhow!("invalid end"))?;
3923 let kind = unsafe { mem::transmute(serialized_symbol.kind) };
3924 Ok(Symbol {
3925 source_worktree_id: WorktreeId::from_proto(serialized_symbol.source_worktree_id),
3926 worktree_id: WorktreeId::from_proto(serialized_symbol.worktree_id),
3927 language_name: serialized_symbol.language_name.clone(),
3928 label: language
3929 .and_then(|language| language.label_for_symbol(&serialized_symbol.name, kind))
3930 .unwrap_or_else(|| CodeLabel::plain(serialized_symbol.name.clone(), None)),
3931 name: serialized_symbol.name,
3932 path: PathBuf::from(serialized_symbol.path),
3933 range: PointUtf16::new(start.row, start.column)..PointUtf16::new(end.row, end.column),
3934 kind,
3935 signature: serialized_symbol
3936 .signature
3937 .try_into()
3938 .map_err(|_| anyhow!("invalid signature"))?,
3939 })
3940 }
3941
3942 async fn handle_buffer_saved(
3943 this: ModelHandle<Self>,
3944 envelope: TypedEnvelope<proto::BufferSaved>,
3945 _: Arc<Client>,
3946 mut cx: AsyncAppContext,
3947 ) -> Result<()> {
3948 let version = deserialize_version(envelope.payload.version);
3949 let mtime = envelope
3950 .payload
3951 .mtime
3952 .ok_or_else(|| anyhow!("missing mtime"))?
3953 .into();
3954
3955 this.update(&mut cx, |this, cx| {
3956 let buffer = this
3957 .opened_buffers
3958 .get(&envelope.payload.buffer_id)
3959 .and_then(|buffer| buffer.upgrade(cx));
3960 if let Some(buffer) = buffer {
3961 buffer.update(cx, |buffer, cx| {
3962 buffer.did_save(version, mtime, None, cx);
3963 });
3964 }
3965 Ok(())
3966 })
3967 }
3968
3969 async fn handle_buffer_reloaded(
3970 this: ModelHandle<Self>,
3971 envelope: TypedEnvelope<proto::BufferReloaded>,
3972 _: Arc<Client>,
3973 mut cx: AsyncAppContext,
3974 ) -> Result<()> {
3975 let payload = envelope.payload.clone();
3976 let version = deserialize_version(payload.version);
3977 let mtime = payload
3978 .mtime
3979 .ok_or_else(|| anyhow!("missing mtime"))?
3980 .into();
3981 this.update(&mut cx, |this, cx| {
3982 let buffer = this
3983 .opened_buffers
3984 .get(&payload.buffer_id)
3985 .and_then(|buffer| buffer.upgrade(cx));
3986 if let Some(buffer) = buffer {
3987 buffer.update(cx, |buffer, cx| {
3988 buffer.did_reload(version, mtime, cx);
3989 });
3990 }
3991 Ok(())
3992 })
3993 }
3994
3995 pub fn match_paths<'a>(
3996 &self,
3997 query: &'a str,
3998 include_ignored: bool,
3999 smart_case: bool,
4000 max_results: usize,
4001 cancel_flag: &'a AtomicBool,
4002 cx: &AppContext,
4003 ) -> impl 'a + Future<Output = Vec<PathMatch>> {
4004 let worktrees = self
4005 .worktrees(cx)
4006 .filter(|worktree| worktree.read(cx).is_visible())
4007 .collect::<Vec<_>>();
4008 let include_root_name = worktrees.len() > 1;
4009 let candidate_sets = worktrees
4010 .into_iter()
4011 .map(|worktree| CandidateSet {
4012 snapshot: worktree.read(cx).snapshot(),
4013 include_ignored,
4014 include_root_name,
4015 })
4016 .collect::<Vec<_>>();
4017
4018 let background = cx.background().clone();
4019 async move {
4020 fuzzy::match_paths(
4021 candidate_sets.as_slice(),
4022 query,
4023 smart_case,
4024 max_results,
4025 cancel_flag,
4026 background,
4027 )
4028 .await
4029 }
4030 }
4031
4032 fn edits_from_lsp(
4033 &mut self,
4034 buffer: &ModelHandle<Buffer>,
4035 lsp_edits: impl 'static + Send + IntoIterator<Item = lsp::TextEdit>,
4036 version: Option<i32>,
4037 cx: &mut ModelContext<Self>,
4038 ) -> Task<Result<Vec<(Range<Anchor>, String)>>> {
4039 let snapshot = self.buffer_snapshot_for_lsp_version(buffer, version, cx);
4040 cx.background().spawn(async move {
4041 let snapshot = snapshot?;
4042 let mut lsp_edits = lsp_edits
4043 .into_iter()
4044 .map(|edit| (range_from_lsp(edit.range), edit.new_text))
4045 .peekable();
4046
4047 let mut edits = Vec::new();
4048 while let Some((mut range, mut new_text)) = lsp_edits.next() {
4049 // Combine any LSP edits that are adjacent.
4050 //
4051 // Also, combine LSP edits that are separated from each other by only
4052 // a newline. This is important because for some code actions,
4053 // Rust-analyzer rewrites the entire buffer via a series of edits that
4054 // are separated by unchanged newline characters.
4055 //
4056 // In order for the diffing logic below to work properly, any edits that
4057 // cancel each other out must be combined into one.
4058 while let Some((next_range, next_text)) = lsp_edits.peek() {
4059 if next_range.start > range.end {
4060 if next_range.start.row > range.end.row + 1
4061 || next_range.start.column > 0
4062 || snapshot.clip_point_utf16(
4063 PointUtf16::new(range.end.row, u32::MAX),
4064 Bias::Left,
4065 ) > range.end
4066 {
4067 break;
4068 }
4069 new_text.push('\n');
4070 }
4071 range.end = next_range.end;
4072 new_text.push_str(&next_text);
4073 lsp_edits.next();
4074 }
4075
4076 if snapshot.clip_point_utf16(range.start, Bias::Left) != range.start
4077 || snapshot.clip_point_utf16(range.end, Bias::Left) != range.end
4078 {
4079 return Err(anyhow!("invalid edits received from language server"));
4080 }
4081
4082 // For multiline edits, perform a diff of the old and new text so that
4083 // we can identify the changes more precisely, preserving the locations
4084 // of any anchors positioned in the unchanged regions.
4085 if range.end.row > range.start.row {
4086 let mut offset = range.start.to_offset(&snapshot);
4087 let old_text = snapshot.text_for_range(range).collect::<String>();
4088
4089 let diff = TextDiff::from_lines(old_text.as_str(), &new_text);
4090 let mut moved_since_edit = true;
4091 for change in diff.iter_all_changes() {
4092 let tag = change.tag();
4093 let value = change.value();
4094 match tag {
4095 ChangeTag::Equal => {
4096 offset += value.len();
4097 moved_since_edit = true;
4098 }
4099 ChangeTag::Delete => {
4100 let start = snapshot.anchor_after(offset);
4101 let end = snapshot.anchor_before(offset + value.len());
4102 if moved_since_edit {
4103 edits.push((start..end, String::new()));
4104 } else {
4105 edits.last_mut().unwrap().0.end = end;
4106 }
4107 offset += value.len();
4108 moved_since_edit = false;
4109 }
4110 ChangeTag::Insert => {
4111 if moved_since_edit {
4112 let anchor = snapshot.anchor_after(offset);
4113 edits.push((anchor.clone()..anchor, value.to_string()));
4114 } else {
4115 edits.last_mut().unwrap().1.push_str(value);
4116 }
4117 moved_since_edit = false;
4118 }
4119 }
4120 }
4121 } else if range.end == range.start {
4122 let anchor = snapshot.anchor_after(range.start);
4123 edits.push((anchor.clone()..anchor, new_text));
4124 } else {
4125 let edit_start = snapshot.anchor_after(range.start);
4126 let edit_end = snapshot.anchor_before(range.end);
4127 edits.push((edit_start..edit_end, new_text));
4128 }
4129 }
4130
4131 Ok(edits)
4132 })
4133 }
4134
4135 fn buffer_snapshot_for_lsp_version(
4136 &mut self,
4137 buffer: &ModelHandle<Buffer>,
4138 version: Option<i32>,
4139 cx: &AppContext,
4140 ) -> Result<TextBufferSnapshot> {
4141 const OLD_VERSIONS_TO_RETAIN: i32 = 10;
4142
4143 if let Some(version) = version {
4144 let buffer_id = buffer.read(cx).remote_id();
4145 let snapshots = self
4146 .buffer_snapshots
4147 .get_mut(&buffer_id)
4148 .ok_or_else(|| anyhow!("no snapshot found for buffer {}", buffer_id))?;
4149 let mut found_snapshot = None;
4150 snapshots.retain(|(snapshot_version, snapshot)| {
4151 if snapshot_version + OLD_VERSIONS_TO_RETAIN < version {
4152 false
4153 } else {
4154 if *snapshot_version == version {
4155 found_snapshot = Some(snapshot.clone());
4156 }
4157 true
4158 }
4159 });
4160
4161 found_snapshot.ok_or_else(|| {
4162 anyhow!(
4163 "snapshot not found for buffer {} at version {}",
4164 buffer_id,
4165 version
4166 )
4167 })
4168 } else {
4169 Ok((buffer.read(cx)).text_snapshot())
4170 }
4171 }
4172
4173 fn language_server_for_buffer(
4174 &self,
4175 buffer: &Buffer,
4176 cx: &AppContext,
4177 ) -> Option<&Arc<LanguageServer>> {
4178 if let Some((file, language)) = File::from_dyn(buffer.file()).zip(buffer.language()) {
4179 let worktree_id = file.worktree_id(cx);
4180 self.language_servers.get(&(worktree_id, language.name()))
4181 } else {
4182 None
4183 }
4184 }
4185}
4186
4187impl WorktreeHandle {
4188 pub fn upgrade(&self, cx: &AppContext) -> Option<ModelHandle<Worktree>> {
4189 match self {
4190 WorktreeHandle::Strong(handle) => Some(handle.clone()),
4191 WorktreeHandle::Weak(handle) => handle.upgrade(cx),
4192 }
4193 }
4194}
4195
4196impl OpenBuffer {
4197 pub fn upgrade(&self, cx: &impl UpgradeModelHandle) -> Option<ModelHandle<Buffer>> {
4198 match self {
4199 OpenBuffer::Strong(handle) => Some(handle.clone()),
4200 OpenBuffer::Weak(handle) => handle.upgrade(cx),
4201 OpenBuffer::Loading(_) => None,
4202 }
4203 }
4204}
4205
4206struct CandidateSet {
4207 snapshot: Snapshot,
4208 include_ignored: bool,
4209 include_root_name: bool,
4210}
4211
4212impl<'a> PathMatchCandidateSet<'a> for CandidateSet {
4213 type Candidates = CandidateSetIter<'a>;
4214
4215 fn id(&self) -> usize {
4216 self.snapshot.id().to_usize()
4217 }
4218
4219 fn len(&self) -> usize {
4220 if self.include_ignored {
4221 self.snapshot.file_count()
4222 } else {
4223 self.snapshot.visible_file_count()
4224 }
4225 }
4226
4227 fn prefix(&self) -> Arc<str> {
4228 if self.snapshot.root_entry().map_or(false, |e| e.is_file()) {
4229 self.snapshot.root_name().into()
4230 } else if self.include_root_name {
4231 format!("{}/", self.snapshot.root_name()).into()
4232 } else {
4233 "".into()
4234 }
4235 }
4236
4237 fn candidates(&'a self, start: usize) -> Self::Candidates {
4238 CandidateSetIter {
4239 traversal: self.snapshot.files(self.include_ignored, start),
4240 }
4241 }
4242}
4243
4244struct CandidateSetIter<'a> {
4245 traversal: Traversal<'a>,
4246}
4247
4248impl<'a> Iterator for CandidateSetIter<'a> {
4249 type Item = PathMatchCandidate<'a>;
4250
4251 fn next(&mut self) -> Option<Self::Item> {
4252 self.traversal.next().map(|entry| {
4253 if let EntryKind::File(char_bag) = entry.kind {
4254 PathMatchCandidate {
4255 path: &entry.path,
4256 char_bag,
4257 }
4258 } else {
4259 unreachable!()
4260 }
4261 })
4262 }
4263}
4264
4265impl Entity for Project {
4266 type Event = Event;
4267
4268 fn release(&mut self, _: &mut gpui::MutableAppContext) {
4269 match &self.client_state {
4270 ProjectClientState::Local { remote_id_rx, .. } => {
4271 if let Some(project_id) = *remote_id_rx.borrow() {
4272 self.client
4273 .send(proto::UnregisterProject { project_id })
4274 .log_err();
4275 }
4276 }
4277 ProjectClientState::Remote { remote_id, .. } => {
4278 self.client
4279 .send(proto::LeaveProject {
4280 project_id: *remote_id,
4281 })
4282 .log_err();
4283 }
4284 }
4285 }
4286
4287 fn app_will_quit(
4288 &mut self,
4289 _: &mut MutableAppContext,
4290 ) -> Option<std::pin::Pin<Box<dyn 'static + Future<Output = ()>>>> {
4291 let shutdown_futures = self
4292 .language_servers
4293 .drain()
4294 .filter_map(|(_, server)| server.shutdown())
4295 .collect::<Vec<_>>();
4296 Some(
4297 async move {
4298 futures::future::join_all(shutdown_futures).await;
4299 }
4300 .boxed(),
4301 )
4302 }
4303}
4304
4305impl Collaborator {
4306 fn from_proto(
4307 message: proto::Collaborator,
4308 user_store: &ModelHandle<UserStore>,
4309 cx: &mut AsyncAppContext,
4310 ) -> impl Future<Output = Result<Self>> {
4311 let user = user_store.update(cx, |user_store, cx| {
4312 user_store.fetch_user(message.user_id, cx)
4313 });
4314
4315 async move {
4316 Ok(Self {
4317 peer_id: PeerId(message.peer_id),
4318 user: user.await?,
4319 replica_id: message.replica_id as ReplicaId,
4320 })
4321 }
4322 }
4323}
4324
4325impl<P: AsRef<Path>> From<(WorktreeId, P)> for ProjectPath {
4326 fn from((worktree_id, path): (WorktreeId, P)) -> Self {
4327 Self {
4328 worktree_id,
4329 path: path.as_ref().into(),
4330 }
4331 }
4332}
4333
4334impl From<lsp::CreateFileOptions> for fs::CreateOptions {
4335 fn from(options: lsp::CreateFileOptions) -> Self {
4336 Self {
4337 overwrite: options.overwrite.unwrap_or(false),
4338 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
4339 }
4340 }
4341}
4342
4343impl From<lsp::RenameFileOptions> for fs::RenameOptions {
4344 fn from(options: lsp::RenameFileOptions) -> Self {
4345 Self {
4346 overwrite: options.overwrite.unwrap_or(false),
4347 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
4348 }
4349 }
4350}
4351
4352impl From<lsp::DeleteFileOptions> for fs::RemoveOptions {
4353 fn from(options: lsp::DeleteFileOptions) -> Self {
4354 Self {
4355 recursive: options.recursive.unwrap_or(false),
4356 ignore_if_not_exists: options.ignore_if_not_exists.unwrap_or(false),
4357 }
4358 }
4359}
4360
4361fn serialize_symbol(symbol: &Symbol) -> proto::Symbol {
4362 proto::Symbol {
4363 source_worktree_id: symbol.source_worktree_id.to_proto(),
4364 worktree_id: symbol.worktree_id.to_proto(),
4365 language_name: symbol.language_name.clone(),
4366 name: symbol.name.clone(),
4367 kind: unsafe { mem::transmute(symbol.kind) },
4368 path: symbol.path.to_string_lossy().to_string(),
4369 start: Some(proto::Point {
4370 row: symbol.range.start.row,
4371 column: symbol.range.start.column,
4372 }),
4373 end: Some(proto::Point {
4374 row: symbol.range.end.row,
4375 column: symbol.range.end.column,
4376 }),
4377 signature: symbol.signature.to_vec(),
4378 }
4379}
4380
4381fn relativize_path(base: &Path, path: &Path) -> PathBuf {
4382 let mut path_components = path.components();
4383 let mut base_components = base.components();
4384 let mut components: Vec<Component> = Vec::new();
4385 loop {
4386 match (path_components.next(), base_components.next()) {
4387 (None, None) => break,
4388 (Some(a), None) => {
4389 components.push(a);
4390 components.extend(path_components.by_ref());
4391 break;
4392 }
4393 (None, _) => components.push(Component::ParentDir),
4394 (Some(a), Some(b)) if components.is_empty() && a == b => (),
4395 (Some(a), Some(b)) if b == Component::CurDir => components.push(a),
4396 (Some(a), Some(_)) => {
4397 components.push(Component::ParentDir);
4398 for _ in base_components {
4399 components.push(Component::ParentDir);
4400 }
4401 components.push(a);
4402 components.extend(path_components.by_ref());
4403 break;
4404 }
4405 }
4406 }
4407 components.iter().map(|c| c.as_os_str()).collect()
4408}
4409
4410#[cfg(test)]
4411mod tests {
4412 use super::{Event, *};
4413 use fs::RealFs;
4414 use futures::StreamExt;
4415 use gpui::test::subscribe;
4416 use language::{
4417 tree_sitter_rust, Diagnostic, LanguageConfig, LanguageServerConfig, OffsetRangeExt, Point,
4418 ToPoint,
4419 };
4420 use lsp::Url;
4421 use serde_json::json;
4422 use std::{cell::RefCell, os::unix, path::PathBuf, rc::Rc};
4423 use unindent::Unindent as _;
4424 use util::test::temp_tree;
4425 use worktree::WorktreeHandle as _;
4426
4427 #[gpui::test]
4428 async fn test_populate_and_search(cx: &mut gpui::TestAppContext) {
4429 let dir = temp_tree(json!({
4430 "root": {
4431 "apple": "",
4432 "banana": {
4433 "carrot": {
4434 "date": "",
4435 "endive": "",
4436 }
4437 },
4438 "fennel": {
4439 "grape": "",
4440 }
4441 }
4442 }));
4443
4444 let root_link_path = dir.path().join("root_link");
4445 unix::fs::symlink(&dir.path().join("root"), &root_link_path).unwrap();
4446 unix::fs::symlink(
4447 &dir.path().join("root/fennel"),
4448 &dir.path().join("root/finnochio"),
4449 )
4450 .unwrap();
4451
4452 let project = Project::test(Arc::new(RealFs), cx);
4453
4454 let (tree, _) = project
4455 .update(cx, |project, cx| {
4456 project.find_or_create_local_worktree(&root_link_path, true, cx)
4457 })
4458 .await
4459 .unwrap();
4460
4461 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
4462 .await;
4463 cx.read(|cx| {
4464 let tree = tree.read(cx);
4465 assert_eq!(tree.file_count(), 5);
4466 assert_eq!(
4467 tree.inode_for_path("fennel/grape"),
4468 tree.inode_for_path("finnochio/grape")
4469 );
4470 });
4471
4472 let cancel_flag = Default::default();
4473 let results = project
4474 .read_with(cx, |project, cx| {
4475 project.match_paths("bna", false, false, 10, &cancel_flag, cx)
4476 })
4477 .await;
4478 assert_eq!(
4479 results
4480 .into_iter()
4481 .map(|result| result.path)
4482 .collect::<Vec<Arc<Path>>>(),
4483 vec![
4484 PathBuf::from("banana/carrot/date").into(),
4485 PathBuf::from("banana/carrot/endive").into(),
4486 ]
4487 );
4488 }
4489
4490 #[gpui::test]
4491 async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
4492 cx.foreground().forbid_parking();
4493
4494 let (mut rust_lsp_config, mut fake_rust_servers) = LanguageServerConfig::fake();
4495 let (mut json_lsp_config, mut fake_json_servers) = LanguageServerConfig::fake();
4496 rust_lsp_config.set_fake_capabilities(lsp::ServerCapabilities {
4497 completion_provider: Some(lsp::CompletionOptions {
4498 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
4499 ..Default::default()
4500 }),
4501 ..Default::default()
4502 });
4503 json_lsp_config.set_fake_capabilities(lsp::ServerCapabilities {
4504 completion_provider: Some(lsp::CompletionOptions {
4505 trigger_characters: Some(vec![":".to_string()]),
4506 ..Default::default()
4507 }),
4508 ..Default::default()
4509 });
4510
4511 let rust_language = Arc::new(Language::new(
4512 LanguageConfig {
4513 name: "Rust".into(),
4514 path_suffixes: vec!["rs".to_string()],
4515 language_server: Some(rust_lsp_config),
4516 ..Default::default()
4517 },
4518 Some(tree_sitter_rust::language()),
4519 ));
4520 let json_language = Arc::new(Language::new(
4521 LanguageConfig {
4522 name: "JSON".into(),
4523 path_suffixes: vec!["json".to_string()],
4524 language_server: Some(json_lsp_config),
4525 ..Default::default()
4526 },
4527 None,
4528 ));
4529
4530 let fs = FakeFs::new(cx.background());
4531 fs.insert_tree(
4532 "/the-root",
4533 json!({
4534 "test.rs": "const A: i32 = 1;",
4535 "test2.rs": "",
4536 "Cargo.toml": "a = 1",
4537 "package.json": "{\"a\": 1}",
4538 }),
4539 )
4540 .await;
4541
4542 let project = Project::test(fs, cx);
4543 project.update(cx, |project, _| {
4544 project.languages.add(rust_language);
4545 project.languages.add(json_language);
4546 });
4547
4548 let worktree_id = project
4549 .update(cx, |project, cx| {
4550 project.find_or_create_local_worktree("/the-root", true, cx)
4551 })
4552 .await
4553 .unwrap()
4554 .0
4555 .read_with(cx, |tree, _| tree.id());
4556
4557 // Open a buffer without an associated language server.
4558 let toml_buffer = project
4559 .update(cx, |project, cx| {
4560 project.open_buffer((worktree_id, "Cargo.toml"), cx)
4561 })
4562 .await
4563 .unwrap();
4564
4565 // Open a buffer with an associated language server.
4566 let rust_buffer = project
4567 .update(cx, |project, cx| {
4568 project.open_buffer((worktree_id, "test.rs"), cx)
4569 })
4570 .await
4571 .unwrap();
4572
4573 // A server is started up, and it is notified about Rust files.
4574 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
4575 assert_eq!(
4576 fake_rust_server
4577 .receive_notification::<lsp::notification::DidOpenTextDocument>()
4578 .await
4579 .text_document,
4580 lsp::TextDocumentItem {
4581 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
4582 version: 0,
4583 text: "const A: i32 = 1;".to_string(),
4584 language_id: Default::default()
4585 }
4586 );
4587
4588 // The buffer is configured based on the language server's capabilities.
4589 rust_buffer.read_with(cx, |buffer, _| {
4590 assert_eq!(
4591 buffer.completion_triggers(),
4592 &[".".to_string(), "::".to_string()]
4593 );
4594 });
4595 toml_buffer.read_with(cx, |buffer, _| {
4596 assert!(buffer.completion_triggers().is_empty());
4597 });
4598
4599 // Edit a buffer. The changes are reported to the language server.
4600 rust_buffer.update(cx, |buffer, cx| buffer.edit([16..16], "2", cx));
4601 assert_eq!(
4602 fake_rust_server
4603 .receive_notification::<lsp::notification::DidChangeTextDocument>()
4604 .await
4605 .text_document,
4606 lsp::VersionedTextDocumentIdentifier::new(
4607 lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
4608 1
4609 )
4610 );
4611
4612 // Open a third buffer with a different associated language server.
4613 let json_buffer = project
4614 .update(cx, |project, cx| {
4615 project.open_buffer((worktree_id, "package.json"), cx)
4616 })
4617 .await
4618 .unwrap();
4619
4620 // Another language server is started up, and it is notified about
4621 // all three open buffers.
4622 let mut fake_json_server = fake_json_servers.next().await.unwrap();
4623 assert_eq!(
4624 fake_json_server
4625 .receive_notification::<lsp::notification::DidOpenTextDocument>()
4626 .await
4627 .text_document,
4628 lsp::TextDocumentItem {
4629 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
4630 version: 0,
4631 text: "{\"a\": 1}".to_string(),
4632 language_id: Default::default()
4633 }
4634 );
4635
4636 // This buffer is configured based on the second language server's
4637 // capabilities.
4638 json_buffer.read_with(cx, |buffer, _| {
4639 assert_eq!(buffer.completion_triggers(), &[":".to_string()]);
4640 });
4641
4642 // When opening another buffer whose language server is already running,
4643 // it is also configured based on the existing language server's capabilities.
4644 let rust_buffer2 = project
4645 .update(cx, |project, cx| {
4646 project.open_buffer((worktree_id, "test2.rs"), cx)
4647 })
4648 .await
4649 .unwrap();
4650 rust_buffer2.read_with(cx, |buffer, _| {
4651 assert_eq!(
4652 buffer.completion_triggers(),
4653 &[".".to_string(), "::".to_string()]
4654 );
4655 });
4656
4657 // Changes are reported only to servers matching the buffer's language.
4658 toml_buffer.update(cx, |buffer, cx| buffer.edit([5..5], "23", cx));
4659 rust_buffer2.update(cx, |buffer, cx| buffer.edit([0..0], "let x = 1;", cx));
4660 assert_eq!(
4661 fake_rust_server
4662 .receive_notification::<lsp::notification::DidChangeTextDocument>()
4663 .await
4664 .text_document,
4665 lsp::VersionedTextDocumentIdentifier::new(
4666 lsp::Url::from_file_path("/the-root/test2.rs").unwrap(),
4667 1
4668 )
4669 );
4670
4671 // Save notifications are reported to all servers.
4672 toml_buffer
4673 .update(cx, |buffer, cx| buffer.save(cx))
4674 .await
4675 .unwrap();
4676 assert_eq!(
4677 fake_rust_server
4678 .receive_notification::<lsp::notification::DidSaveTextDocument>()
4679 .await
4680 .text_document,
4681 lsp::TextDocumentIdentifier::new(
4682 lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap()
4683 )
4684 );
4685 assert_eq!(
4686 fake_json_server
4687 .receive_notification::<lsp::notification::DidSaveTextDocument>()
4688 .await
4689 .text_document,
4690 lsp::TextDocumentIdentifier::new(
4691 lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap()
4692 )
4693 );
4694
4695 // Close notifications are reported only to servers matching the buffer's language.
4696 cx.update(|_| drop(json_buffer));
4697 let close_message = lsp::DidCloseTextDocumentParams {
4698 text_document: lsp::TextDocumentIdentifier::new(
4699 lsp::Url::from_file_path("/the-root/package.json").unwrap(),
4700 ),
4701 };
4702 assert_eq!(
4703 fake_json_server
4704 .receive_notification::<lsp::notification::DidCloseTextDocument>()
4705 .await,
4706 close_message,
4707 );
4708 }
4709
4710 #[gpui::test]
4711 async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
4712 cx.foreground().forbid_parking();
4713
4714 let (language_server_config, mut fake_servers) = LanguageServerConfig::fake();
4715 let progress_token = language_server_config
4716 .disk_based_diagnostics_progress_token
4717 .clone()
4718 .unwrap();
4719
4720 let language = Arc::new(Language::new(
4721 LanguageConfig {
4722 name: "Rust".into(),
4723 path_suffixes: vec!["rs".to_string()],
4724 language_server: Some(language_server_config),
4725 ..Default::default()
4726 },
4727 Some(tree_sitter_rust::language()),
4728 ));
4729
4730 let fs = FakeFs::new(cx.background());
4731 fs.insert_tree(
4732 "/dir",
4733 json!({
4734 "a.rs": "fn a() { A }",
4735 "b.rs": "const y: i32 = 1",
4736 }),
4737 )
4738 .await;
4739
4740 let project = Project::test(fs, cx);
4741 project.update(cx, |project, _| project.languages.add(language));
4742
4743 let (tree, _) = project
4744 .update(cx, |project, cx| {
4745 project.find_or_create_local_worktree("/dir", true, cx)
4746 })
4747 .await
4748 .unwrap();
4749 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
4750
4751 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
4752 .await;
4753
4754 // Cause worktree to start the fake language server
4755 let _buffer = project
4756 .update(cx, |project, cx| {
4757 project.open_buffer((worktree_id, Path::new("b.rs")), cx)
4758 })
4759 .await
4760 .unwrap();
4761
4762 let mut events = subscribe(&project, cx);
4763
4764 let mut fake_server = fake_servers.next().await.unwrap();
4765 fake_server.start_progress(&progress_token).await;
4766 assert_eq!(
4767 events.next().await.unwrap(),
4768 Event::DiskBasedDiagnosticsStarted
4769 );
4770
4771 fake_server.start_progress(&progress_token).await;
4772 fake_server.end_progress(&progress_token).await;
4773 fake_server.start_progress(&progress_token).await;
4774
4775 fake_server.notify::<lsp::notification::PublishDiagnostics>(
4776 lsp::PublishDiagnosticsParams {
4777 uri: Url::from_file_path("/dir/a.rs").unwrap(),
4778 version: None,
4779 diagnostics: vec![lsp::Diagnostic {
4780 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
4781 severity: Some(lsp::DiagnosticSeverity::ERROR),
4782 message: "undefined variable 'A'".to_string(),
4783 ..Default::default()
4784 }],
4785 },
4786 );
4787 assert_eq!(
4788 events.next().await.unwrap(),
4789 Event::DiagnosticsUpdated((worktree_id, Path::new("a.rs")).into())
4790 );
4791
4792 fake_server.end_progress(&progress_token).await;
4793 fake_server.end_progress(&progress_token).await;
4794 assert_eq!(
4795 events.next().await.unwrap(),
4796 Event::DiskBasedDiagnosticsUpdated
4797 );
4798 assert_eq!(
4799 events.next().await.unwrap(),
4800 Event::DiskBasedDiagnosticsFinished
4801 );
4802
4803 let buffer = project
4804 .update(cx, |p, cx| p.open_buffer((worktree_id, "a.rs"), cx))
4805 .await
4806 .unwrap();
4807
4808 buffer.read_with(cx, |buffer, _| {
4809 let snapshot = buffer.snapshot();
4810 let diagnostics = snapshot
4811 .diagnostics_in_range::<_, Point>(0..buffer.len())
4812 .collect::<Vec<_>>();
4813 assert_eq!(
4814 diagnostics,
4815 &[DiagnosticEntry {
4816 range: Point::new(0, 9)..Point::new(0, 10),
4817 diagnostic: Diagnostic {
4818 severity: lsp::DiagnosticSeverity::ERROR,
4819 message: "undefined variable 'A'".to_string(),
4820 group_id: 0,
4821 is_primary: true,
4822 ..Default::default()
4823 }
4824 }]
4825 )
4826 });
4827 }
4828
4829 #[gpui::test]
4830 async fn test_transforming_disk_based_diagnostics(cx: &mut gpui::TestAppContext) {
4831 cx.foreground().forbid_parking();
4832
4833 let (mut lsp_config, mut fake_servers) = LanguageServerConfig::fake();
4834 lsp_config
4835 .disk_based_diagnostic_sources
4836 .insert("disk".to_string());
4837 let language = Arc::new(Language::new(
4838 LanguageConfig {
4839 name: "Rust".into(),
4840 path_suffixes: vec!["rs".to_string()],
4841 language_server: Some(lsp_config),
4842 ..Default::default()
4843 },
4844 Some(tree_sitter_rust::language()),
4845 ));
4846
4847 let text = "
4848 fn a() { A }
4849 fn b() { BB }
4850 fn c() { CCC }
4851 "
4852 .unindent();
4853
4854 let fs = FakeFs::new(cx.background());
4855 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
4856
4857 let project = Project::test(fs, cx);
4858 project.update(cx, |project, _| project.languages.add(language));
4859
4860 let worktree_id = project
4861 .update(cx, |project, cx| {
4862 project.find_or_create_local_worktree("/dir", true, cx)
4863 })
4864 .await
4865 .unwrap()
4866 .0
4867 .read_with(cx, |tree, _| tree.id());
4868
4869 let buffer = project
4870 .update(cx, |project, cx| {
4871 project.open_buffer((worktree_id, "a.rs"), cx)
4872 })
4873 .await
4874 .unwrap();
4875
4876 let mut fake_server = fake_servers.next().await.unwrap();
4877 let open_notification = fake_server
4878 .receive_notification::<lsp::notification::DidOpenTextDocument>()
4879 .await;
4880
4881 // Edit the buffer, moving the content down
4882 buffer.update(cx, |buffer, cx| buffer.edit([0..0], "\n\n", cx));
4883 let change_notification_1 = fake_server
4884 .receive_notification::<lsp::notification::DidChangeTextDocument>()
4885 .await;
4886 assert!(
4887 change_notification_1.text_document.version > open_notification.text_document.version
4888 );
4889
4890 // Report some diagnostics for the initial version of the buffer
4891 fake_server.notify::<lsp::notification::PublishDiagnostics>(
4892 lsp::PublishDiagnosticsParams {
4893 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
4894 version: Some(open_notification.text_document.version),
4895 diagnostics: vec![
4896 lsp::Diagnostic {
4897 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
4898 severity: Some(DiagnosticSeverity::ERROR),
4899 message: "undefined variable 'A'".to_string(),
4900 source: Some("disk".to_string()),
4901 ..Default::default()
4902 },
4903 lsp::Diagnostic {
4904 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
4905 severity: Some(DiagnosticSeverity::ERROR),
4906 message: "undefined variable 'BB'".to_string(),
4907 source: Some("disk".to_string()),
4908 ..Default::default()
4909 },
4910 lsp::Diagnostic {
4911 range: lsp::Range::new(lsp::Position::new(2, 9), lsp::Position::new(2, 12)),
4912 severity: Some(DiagnosticSeverity::ERROR),
4913 source: Some("disk".to_string()),
4914 message: "undefined variable 'CCC'".to_string(),
4915 ..Default::default()
4916 },
4917 ],
4918 },
4919 );
4920
4921 // The diagnostics have moved down since they were created.
4922 buffer.next_notification(cx).await;
4923 buffer.read_with(cx, |buffer, _| {
4924 assert_eq!(
4925 buffer
4926 .snapshot()
4927 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0))
4928 .collect::<Vec<_>>(),
4929 &[
4930 DiagnosticEntry {
4931 range: Point::new(3, 9)..Point::new(3, 11),
4932 diagnostic: Diagnostic {
4933 severity: DiagnosticSeverity::ERROR,
4934 message: "undefined variable 'BB'".to_string(),
4935 is_disk_based: true,
4936 group_id: 1,
4937 is_primary: true,
4938 ..Default::default()
4939 },
4940 },
4941 DiagnosticEntry {
4942 range: Point::new(4, 9)..Point::new(4, 12),
4943 diagnostic: Diagnostic {
4944 severity: DiagnosticSeverity::ERROR,
4945 message: "undefined variable 'CCC'".to_string(),
4946 is_disk_based: true,
4947 group_id: 2,
4948 is_primary: true,
4949 ..Default::default()
4950 }
4951 }
4952 ]
4953 );
4954 assert_eq!(
4955 chunks_with_diagnostics(buffer, 0..buffer.len()),
4956 [
4957 ("\n\nfn a() { ".to_string(), None),
4958 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
4959 (" }\nfn b() { ".to_string(), None),
4960 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
4961 (" }\nfn c() { ".to_string(), None),
4962 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
4963 (" }\n".to_string(), None),
4964 ]
4965 );
4966 assert_eq!(
4967 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
4968 [
4969 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
4970 (" }\nfn c() { ".to_string(), None),
4971 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
4972 ]
4973 );
4974 });
4975
4976 // Ensure overlapping diagnostics are highlighted correctly.
4977 fake_server.notify::<lsp::notification::PublishDiagnostics>(
4978 lsp::PublishDiagnosticsParams {
4979 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
4980 version: Some(open_notification.text_document.version),
4981 diagnostics: vec![
4982 lsp::Diagnostic {
4983 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
4984 severity: Some(DiagnosticSeverity::ERROR),
4985 message: "undefined variable 'A'".to_string(),
4986 source: Some("disk".to_string()),
4987 ..Default::default()
4988 },
4989 lsp::Diagnostic {
4990 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 12)),
4991 severity: Some(DiagnosticSeverity::WARNING),
4992 message: "unreachable statement".to_string(),
4993 source: Some("disk".to_string()),
4994 ..Default::default()
4995 },
4996 ],
4997 },
4998 );
4999
5000 buffer.next_notification(cx).await;
5001 buffer.read_with(cx, |buffer, _| {
5002 assert_eq!(
5003 buffer
5004 .snapshot()
5005 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0))
5006 .collect::<Vec<_>>(),
5007 &[
5008 DiagnosticEntry {
5009 range: Point::new(2, 9)..Point::new(2, 12),
5010 diagnostic: Diagnostic {
5011 severity: DiagnosticSeverity::WARNING,
5012 message: "unreachable statement".to_string(),
5013 is_disk_based: true,
5014 group_id: 1,
5015 is_primary: true,
5016 ..Default::default()
5017 }
5018 },
5019 DiagnosticEntry {
5020 range: Point::new(2, 9)..Point::new(2, 10),
5021 diagnostic: Diagnostic {
5022 severity: DiagnosticSeverity::ERROR,
5023 message: "undefined variable 'A'".to_string(),
5024 is_disk_based: true,
5025 group_id: 0,
5026 is_primary: true,
5027 ..Default::default()
5028 },
5029 }
5030 ]
5031 );
5032 assert_eq!(
5033 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
5034 [
5035 ("fn a() { ".to_string(), None),
5036 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
5037 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
5038 ("\n".to_string(), None),
5039 ]
5040 );
5041 assert_eq!(
5042 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
5043 [
5044 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
5045 ("\n".to_string(), None),
5046 ]
5047 );
5048 });
5049
5050 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
5051 // changes since the last save.
5052 buffer.update(cx, |buffer, cx| {
5053 buffer.edit(Some(Point::new(2, 0)..Point::new(2, 0)), " ", cx);
5054 buffer.edit(Some(Point::new(2, 8)..Point::new(2, 10)), "(x: usize)", cx);
5055 });
5056 let change_notification_2 =
5057 fake_server.receive_notification::<lsp::notification::DidChangeTextDocument>();
5058 assert!(
5059 change_notification_2.await.text_document.version
5060 > change_notification_1.text_document.version
5061 );
5062
5063 // Handle out-of-order diagnostics
5064 fake_server.notify::<lsp::notification::PublishDiagnostics>(
5065 lsp::PublishDiagnosticsParams {
5066 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
5067 version: Some(open_notification.text_document.version),
5068 diagnostics: vec![
5069 lsp::Diagnostic {
5070 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
5071 severity: Some(DiagnosticSeverity::ERROR),
5072 message: "undefined variable 'BB'".to_string(),
5073 source: Some("disk".to_string()),
5074 ..Default::default()
5075 },
5076 lsp::Diagnostic {
5077 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
5078 severity: Some(DiagnosticSeverity::WARNING),
5079 message: "undefined variable 'A'".to_string(),
5080 source: Some("disk".to_string()),
5081 ..Default::default()
5082 },
5083 ],
5084 },
5085 );
5086
5087 buffer.next_notification(cx).await;
5088 buffer.read_with(cx, |buffer, _| {
5089 assert_eq!(
5090 buffer
5091 .snapshot()
5092 .diagnostics_in_range::<_, Point>(0..buffer.len())
5093 .collect::<Vec<_>>(),
5094 &[
5095 DiagnosticEntry {
5096 range: Point::new(2, 21)..Point::new(2, 22),
5097 diagnostic: Diagnostic {
5098 severity: DiagnosticSeverity::WARNING,
5099 message: "undefined variable 'A'".to_string(),
5100 is_disk_based: true,
5101 group_id: 1,
5102 is_primary: true,
5103 ..Default::default()
5104 }
5105 },
5106 DiagnosticEntry {
5107 range: Point::new(3, 9)..Point::new(3, 11),
5108 diagnostic: Diagnostic {
5109 severity: DiagnosticSeverity::ERROR,
5110 message: "undefined variable 'BB'".to_string(),
5111 is_disk_based: true,
5112 group_id: 0,
5113 is_primary: true,
5114 ..Default::default()
5115 },
5116 }
5117 ]
5118 );
5119 });
5120 }
5121
5122 #[gpui::test]
5123 async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
5124 cx.foreground().forbid_parking();
5125
5126 let text = concat!(
5127 "let one = ;\n", //
5128 "let two = \n",
5129 "let three = 3;\n",
5130 );
5131
5132 let fs = FakeFs::new(cx.background());
5133 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
5134
5135 let project = Project::test(fs, cx);
5136 let worktree_id = project
5137 .update(cx, |project, cx| {
5138 project.find_or_create_local_worktree("/dir", true, cx)
5139 })
5140 .await
5141 .unwrap()
5142 .0
5143 .read_with(cx, |tree, _| tree.id());
5144
5145 let buffer = project
5146 .update(cx, |project, cx| {
5147 project.open_buffer((worktree_id, "a.rs"), cx)
5148 })
5149 .await
5150 .unwrap();
5151
5152 project.update(cx, |project, cx| {
5153 project
5154 .update_buffer_diagnostics(
5155 &buffer,
5156 vec![
5157 DiagnosticEntry {
5158 range: PointUtf16::new(0, 10)..PointUtf16::new(0, 10),
5159 diagnostic: Diagnostic {
5160 severity: DiagnosticSeverity::ERROR,
5161 message: "syntax error 1".to_string(),
5162 ..Default::default()
5163 },
5164 },
5165 DiagnosticEntry {
5166 range: PointUtf16::new(1, 10)..PointUtf16::new(1, 10),
5167 diagnostic: Diagnostic {
5168 severity: DiagnosticSeverity::ERROR,
5169 message: "syntax error 2".to_string(),
5170 ..Default::default()
5171 },
5172 },
5173 ],
5174 None,
5175 cx,
5176 )
5177 .unwrap();
5178 });
5179
5180 // An empty range is extended forward to include the following character.
5181 // At the end of a line, an empty range is extended backward to include
5182 // the preceding character.
5183 buffer.read_with(cx, |buffer, _| {
5184 let chunks = chunks_with_diagnostics(&buffer, 0..buffer.len());
5185 assert_eq!(
5186 chunks
5187 .iter()
5188 .map(|(s, d)| (s.as_str(), *d))
5189 .collect::<Vec<_>>(),
5190 &[
5191 ("let one = ", None),
5192 (";", Some(DiagnosticSeverity::ERROR)),
5193 ("\nlet two =", None),
5194 (" ", Some(DiagnosticSeverity::ERROR)),
5195 ("\nlet three = 3;\n", None)
5196 ]
5197 );
5198 });
5199 }
5200
5201 #[gpui::test]
5202 async fn test_edits_from_lsp_with_past_version(cx: &mut gpui::TestAppContext) {
5203 cx.foreground().forbid_parking();
5204
5205 let (lsp_config, mut fake_servers) = LanguageServerConfig::fake();
5206 let language = Arc::new(Language::new(
5207 LanguageConfig {
5208 name: "Rust".into(),
5209 path_suffixes: vec!["rs".to_string()],
5210 language_server: Some(lsp_config),
5211 ..Default::default()
5212 },
5213 Some(tree_sitter_rust::language()),
5214 ));
5215
5216 let text = "
5217 fn a() {
5218 f1();
5219 }
5220 fn b() {
5221 f2();
5222 }
5223 fn c() {
5224 f3();
5225 }
5226 "
5227 .unindent();
5228
5229 let fs = FakeFs::new(cx.background());
5230 fs.insert_tree(
5231 "/dir",
5232 json!({
5233 "a.rs": text.clone(),
5234 }),
5235 )
5236 .await;
5237
5238 let project = Project::test(fs, cx);
5239 project.update(cx, |project, _| project.languages.add(language));
5240
5241 let worktree_id = project
5242 .update(cx, |project, cx| {
5243 project.find_or_create_local_worktree("/dir", true, cx)
5244 })
5245 .await
5246 .unwrap()
5247 .0
5248 .read_with(cx, |tree, _| tree.id());
5249
5250 let buffer = project
5251 .update(cx, |project, cx| {
5252 project.open_buffer((worktree_id, "a.rs"), cx)
5253 })
5254 .await
5255 .unwrap();
5256
5257 let mut fake_server = fake_servers.next().await.unwrap();
5258 let lsp_document_version = fake_server
5259 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5260 .await
5261 .text_document
5262 .version;
5263
5264 // Simulate editing the buffer after the language server computes some edits.
5265 buffer.update(cx, |buffer, cx| {
5266 buffer.edit(
5267 [Point::new(0, 0)..Point::new(0, 0)],
5268 "// above first function\n",
5269 cx,
5270 );
5271 buffer.edit(
5272 [Point::new(2, 0)..Point::new(2, 0)],
5273 " // inside first function\n",
5274 cx,
5275 );
5276 buffer.edit(
5277 [Point::new(6, 4)..Point::new(6, 4)],
5278 "// inside second function ",
5279 cx,
5280 );
5281
5282 assert_eq!(
5283 buffer.text(),
5284 "
5285 // above first function
5286 fn a() {
5287 // inside first function
5288 f1();
5289 }
5290 fn b() {
5291 // inside second function f2();
5292 }
5293 fn c() {
5294 f3();
5295 }
5296 "
5297 .unindent()
5298 );
5299 });
5300
5301 let edits = project
5302 .update(cx, |project, cx| {
5303 project.edits_from_lsp(
5304 &buffer,
5305 vec![
5306 // replace body of first function
5307 lsp::TextEdit {
5308 range: lsp::Range::new(
5309 lsp::Position::new(0, 0),
5310 lsp::Position::new(3, 0),
5311 ),
5312 new_text: "
5313 fn a() {
5314 f10();
5315 }
5316 "
5317 .unindent(),
5318 },
5319 // edit inside second function
5320 lsp::TextEdit {
5321 range: lsp::Range::new(
5322 lsp::Position::new(4, 6),
5323 lsp::Position::new(4, 6),
5324 ),
5325 new_text: "00".into(),
5326 },
5327 // edit inside third function via two distinct edits
5328 lsp::TextEdit {
5329 range: lsp::Range::new(
5330 lsp::Position::new(7, 5),
5331 lsp::Position::new(7, 5),
5332 ),
5333 new_text: "4000".into(),
5334 },
5335 lsp::TextEdit {
5336 range: lsp::Range::new(
5337 lsp::Position::new(7, 5),
5338 lsp::Position::new(7, 6),
5339 ),
5340 new_text: "".into(),
5341 },
5342 ],
5343 Some(lsp_document_version),
5344 cx,
5345 )
5346 })
5347 .await
5348 .unwrap();
5349
5350 buffer.update(cx, |buffer, cx| {
5351 for (range, new_text) in edits {
5352 buffer.edit([range], new_text, cx);
5353 }
5354 assert_eq!(
5355 buffer.text(),
5356 "
5357 // above first function
5358 fn a() {
5359 // inside first function
5360 f10();
5361 }
5362 fn b() {
5363 // inside second function f200();
5364 }
5365 fn c() {
5366 f4000();
5367 }
5368 "
5369 .unindent()
5370 );
5371 });
5372 }
5373
5374 #[gpui::test]
5375 async fn test_edits_from_lsp_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
5376 cx.foreground().forbid_parking();
5377
5378 let text = "
5379 use a::b;
5380 use a::c;
5381
5382 fn f() {
5383 b();
5384 c();
5385 }
5386 "
5387 .unindent();
5388
5389 let fs = FakeFs::new(cx.background());
5390 fs.insert_tree(
5391 "/dir",
5392 json!({
5393 "a.rs": text.clone(),
5394 }),
5395 )
5396 .await;
5397
5398 let project = Project::test(fs, cx);
5399 let worktree_id = project
5400 .update(cx, |project, cx| {
5401 project.find_or_create_local_worktree("/dir", true, cx)
5402 })
5403 .await
5404 .unwrap()
5405 .0
5406 .read_with(cx, |tree, _| tree.id());
5407
5408 let buffer = project
5409 .update(cx, |project, cx| {
5410 project.open_buffer((worktree_id, "a.rs"), cx)
5411 })
5412 .await
5413 .unwrap();
5414
5415 // Simulate the language server sending us a small edit in the form of a very large diff.
5416 // Rust-analyzer does this when performing a merge-imports code action.
5417 let edits = project
5418 .update(cx, |project, cx| {
5419 project.edits_from_lsp(
5420 &buffer,
5421 [
5422 // Replace the first use statement without editing the semicolon.
5423 lsp::TextEdit {
5424 range: lsp::Range::new(
5425 lsp::Position::new(0, 4),
5426 lsp::Position::new(0, 8),
5427 ),
5428 new_text: "a::{b, c}".into(),
5429 },
5430 // Reinsert the remainder of the file between the semicolon and the final
5431 // newline of the file.
5432 lsp::TextEdit {
5433 range: lsp::Range::new(
5434 lsp::Position::new(0, 9),
5435 lsp::Position::new(0, 9),
5436 ),
5437 new_text: "\n\n".into(),
5438 },
5439 lsp::TextEdit {
5440 range: lsp::Range::new(
5441 lsp::Position::new(0, 9),
5442 lsp::Position::new(0, 9),
5443 ),
5444 new_text: "
5445 fn f() {
5446 b();
5447 c();
5448 }"
5449 .unindent(),
5450 },
5451 // Delete everything after the first newline of the file.
5452 lsp::TextEdit {
5453 range: lsp::Range::new(
5454 lsp::Position::new(1, 0),
5455 lsp::Position::new(7, 0),
5456 ),
5457 new_text: "".into(),
5458 },
5459 ],
5460 None,
5461 cx,
5462 )
5463 })
5464 .await
5465 .unwrap();
5466
5467 buffer.update(cx, |buffer, cx| {
5468 let edits = edits
5469 .into_iter()
5470 .map(|(range, text)| {
5471 (
5472 range.start.to_point(&buffer)..range.end.to_point(&buffer),
5473 text,
5474 )
5475 })
5476 .collect::<Vec<_>>();
5477
5478 assert_eq!(
5479 edits,
5480 [
5481 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
5482 (Point::new(1, 0)..Point::new(2, 0), "".into())
5483 ]
5484 );
5485
5486 for (range, new_text) in edits {
5487 buffer.edit([range], new_text, cx);
5488 }
5489 assert_eq!(
5490 buffer.text(),
5491 "
5492 use a::{b, c};
5493
5494 fn f() {
5495 b();
5496 c();
5497 }
5498 "
5499 .unindent()
5500 );
5501 });
5502 }
5503
5504 fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
5505 buffer: &Buffer,
5506 range: Range<T>,
5507 ) -> Vec<(String, Option<DiagnosticSeverity>)> {
5508 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
5509 for chunk in buffer.snapshot().chunks(range, true) {
5510 if chunks
5511 .last()
5512 .map_or(false, |prev_chunk| prev_chunk.1 == chunk.diagnostic)
5513 {
5514 chunks.last_mut().unwrap().0.push_str(chunk.text);
5515 } else {
5516 chunks.push((chunk.text.to_string(), chunk.diagnostic));
5517 }
5518 }
5519 chunks
5520 }
5521
5522 #[gpui::test]
5523 async fn test_search_worktree_without_files(cx: &mut gpui::TestAppContext) {
5524 let dir = temp_tree(json!({
5525 "root": {
5526 "dir1": {},
5527 "dir2": {
5528 "dir3": {}
5529 }
5530 }
5531 }));
5532
5533 let project = Project::test(Arc::new(RealFs), cx);
5534 let (tree, _) = project
5535 .update(cx, |project, cx| {
5536 project.find_or_create_local_worktree(&dir.path(), true, cx)
5537 })
5538 .await
5539 .unwrap();
5540
5541 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
5542 .await;
5543
5544 let cancel_flag = Default::default();
5545 let results = project
5546 .read_with(cx, |project, cx| {
5547 project.match_paths("dir", false, false, 10, &cancel_flag, cx)
5548 })
5549 .await;
5550
5551 assert!(results.is_empty());
5552 }
5553
5554 #[gpui::test]
5555 async fn test_definition(cx: &mut gpui::TestAppContext) {
5556 let (language_server_config, mut fake_servers) = LanguageServerConfig::fake();
5557 let language = Arc::new(Language::new(
5558 LanguageConfig {
5559 name: "Rust".into(),
5560 path_suffixes: vec!["rs".to_string()],
5561 language_server: Some(language_server_config),
5562 ..Default::default()
5563 },
5564 Some(tree_sitter_rust::language()),
5565 ));
5566
5567 let fs = FakeFs::new(cx.background());
5568 fs.insert_tree(
5569 "/dir",
5570 json!({
5571 "a.rs": "const fn a() { A }",
5572 "b.rs": "const y: i32 = crate::a()",
5573 }),
5574 )
5575 .await;
5576
5577 let project = Project::test(fs, cx);
5578 project.update(cx, |project, _| {
5579 Arc::get_mut(&mut project.languages).unwrap().add(language);
5580 });
5581
5582 let (tree, _) = project
5583 .update(cx, |project, cx| {
5584 project.find_or_create_local_worktree("/dir/b.rs", true, cx)
5585 })
5586 .await
5587 .unwrap();
5588 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
5589 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
5590 .await;
5591
5592 let buffer = project
5593 .update(cx, |project, cx| {
5594 project.open_buffer(
5595 ProjectPath {
5596 worktree_id,
5597 path: Path::new("").into(),
5598 },
5599 cx,
5600 )
5601 })
5602 .await
5603 .unwrap();
5604
5605 let mut fake_server = fake_servers.next().await.unwrap();
5606 fake_server.handle_request::<lsp::request::GotoDefinition, _>(move |params, _| {
5607 let params = params.text_document_position_params;
5608 assert_eq!(
5609 params.text_document.uri.to_file_path().unwrap(),
5610 Path::new("/dir/b.rs"),
5611 );
5612 assert_eq!(params.position, lsp::Position::new(0, 22));
5613
5614 Some(lsp::GotoDefinitionResponse::Scalar(lsp::Location::new(
5615 lsp::Url::from_file_path("/dir/a.rs").unwrap(),
5616 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
5617 )))
5618 });
5619
5620 let mut definitions = project
5621 .update(cx, |project, cx| project.definition(&buffer, 22, cx))
5622 .await
5623 .unwrap();
5624
5625 assert_eq!(definitions.len(), 1);
5626 let definition = definitions.pop().unwrap();
5627 cx.update(|cx| {
5628 let target_buffer = definition.buffer.read(cx);
5629 assert_eq!(
5630 target_buffer
5631 .file()
5632 .unwrap()
5633 .as_local()
5634 .unwrap()
5635 .abs_path(cx),
5636 Path::new("/dir/a.rs"),
5637 );
5638 assert_eq!(definition.range.to_offset(target_buffer), 9..10);
5639 assert_eq!(
5640 list_worktrees(&project, cx),
5641 [("/dir/b.rs".as_ref(), true), ("/dir/a.rs".as_ref(), false)]
5642 );
5643
5644 drop(definition);
5645 });
5646 cx.read(|cx| {
5647 assert_eq!(list_worktrees(&project, cx), [("/dir/b.rs".as_ref(), true)]);
5648 });
5649
5650 fn list_worktrees<'a>(
5651 project: &'a ModelHandle<Project>,
5652 cx: &'a AppContext,
5653 ) -> Vec<(&'a Path, bool)> {
5654 project
5655 .read(cx)
5656 .worktrees(cx)
5657 .map(|worktree| {
5658 let worktree = worktree.read(cx);
5659 (
5660 worktree.as_local().unwrap().abs_path().as_ref(),
5661 worktree.is_visible(),
5662 )
5663 })
5664 .collect::<Vec<_>>()
5665 }
5666 }
5667
5668 #[gpui::test]
5669 async fn test_save_file(cx: &mut gpui::TestAppContext) {
5670 let fs = FakeFs::new(cx.background());
5671 fs.insert_tree(
5672 "/dir",
5673 json!({
5674 "file1": "the old contents",
5675 }),
5676 )
5677 .await;
5678
5679 let project = Project::test(fs.clone(), cx);
5680 let worktree_id = project
5681 .update(cx, |p, cx| {
5682 p.find_or_create_local_worktree("/dir", true, cx)
5683 })
5684 .await
5685 .unwrap()
5686 .0
5687 .read_with(cx, |tree, _| tree.id());
5688
5689 let buffer = project
5690 .update(cx, |p, cx| p.open_buffer((worktree_id, "file1"), cx))
5691 .await
5692 .unwrap();
5693 buffer
5694 .update(cx, |buffer, cx| {
5695 assert_eq!(buffer.text(), "the old contents");
5696 buffer.edit(Some(0..0), "a line of text.\n".repeat(10 * 1024), cx);
5697 buffer.save(cx)
5698 })
5699 .await
5700 .unwrap();
5701
5702 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
5703 assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
5704 }
5705
5706 #[gpui::test]
5707 async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
5708 let fs = FakeFs::new(cx.background());
5709 fs.insert_tree(
5710 "/dir",
5711 json!({
5712 "file1": "the old contents",
5713 }),
5714 )
5715 .await;
5716
5717 let project = Project::test(fs.clone(), cx);
5718 let worktree_id = project
5719 .update(cx, |p, cx| {
5720 p.find_or_create_local_worktree("/dir/file1", true, cx)
5721 })
5722 .await
5723 .unwrap()
5724 .0
5725 .read_with(cx, |tree, _| tree.id());
5726
5727 let buffer = project
5728 .update(cx, |p, cx| p.open_buffer((worktree_id, ""), cx))
5729 .await
5730 .unwrap();
5731 buffer
5732 .update(cx, |buffer, cx| {
5733 buffer.edit(Some(0..0), "a line of text.\n".repeat(10 * 1024), cx);
5734 buffer.save(cx)
5735 })
5736 .await
5737 .unwrap();
5738
5739 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
5740 assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
5741 }
5742
5743 #[gpui::test]
5744 async fn test_save_as(cx: &mut gpui::TestAppContext) {
5745 let fs = FakeFs::new(cx.background());
5746 fs.insert_tree("/dir", json!({})).await;
5747
5748 let project = Project::test(fs.clone(), cx);
5749 let (worktree, _) = project
5750 .update(cx, |project, cx| {
5751 project.find_or_create_local_worktree("/dir", true, cx)
5752 })
5753 .await
5754 .unwrap();
5755 let worktree_id = worktree.read_with(cx, |worktree, _| worktree.id());
5756
5757 let buffer = project.update(cx, |project, cx| project.create_buffer(cx).unwrap());
5758 buffer.update(cx, |buffer, cx| {
5759 buffer.edit([0..0], "abc", cx);
5760 assert!(buffer.is_dirty());
5761 assert!(!buffer.has_conflict());
5762 });
5763 project
5764 .update(cx, |project, cx| {
5765 project.save_buffer_as(buffer.clone(), "/dir/file1".into(), cx)
5766 })
5767 .await
5768 .unwrap();
5769 assert_eq!(fs.load(Path::new("/dir/file1")).await.unwrap(), "abc");
5770 buffer.read_with(cx, |buffer, cx| {
5771 assert_eq!(buffer.file().unwrap().full_path(cx), Path::new("dir/file1"));
5772 assert!(!buffer.is_dirty());
5773 assert!(!buffer.has_conflict());
5774 });
5775
5776 let opened_buffer = project
5777 .update(cx, |project, cx| {
5778 project.open_buffer((worktree_id, "file1"), cx)
5779 })
5780 .await
5781 .unwrap();
5782 assert_eq!(opened_buffer, buffer);
5783 }
5784
5785 #[gpui::test(retries = 5)]
5786 async fn test_rescan_and_remote_updates(cx: &mut gpui::TestAppContext) {
5787 let dir = temp_tree(json!({
5788 "a": {
5789 "file1": "",
5790 "file2": "",
5791 "file3": "",
5792 },
5793 "b": {
5794 "c": {
5795 "file4": "",
5796 "file5": "",
5797 }
5798 }
5799 }));
5800
5801 let project = Project::test(Arc::new(RealFs), cx);
5802 let rpc = project.read_with(cx, |p, _| p.client.clone());
5803
5804 let (tree, _) = project
5805 .update(cx, |p, cx| {
5806 p.find_or_create_local_worktree(dir.path(), true, cx)
5807 })
5808 .await
5809 .unwrap();
5810 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
5811
5812 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
5813 let buffer = project.update(cx, |p, cx| p.open_buffer((worktree_id, path), cx));
5814 async move { buffer.await.unwrap() }
5815 };
5816 let id_for_path = |path: &'static str, cx: &gpui::TestAppContext| {
5817 tree.read_with(cx, |tree, _| {
5818 tree.entry_for_path(path)
5819 .expect(&format!("no entry for path {}", path))
5820 .id
5821 })
5822 };
5823
5824 let buffer2 = buffer_for_path("a/file2", cx).await;
5825 let buffer3 = buffer_for_path("a/file3", cx).await;
5826 let buffer4 = buffer_for_path("b/c/file4", cx).await;
5827 let buffer5 = buffer_for_path("b/c/file5", cx).await;
5828
5829 let file2_id = id_for_path("a/file2", &cx);
5830 let file3_id = id_for_path("a/file3", &cx);
5831 let file4_id = id_for_path("b/c/file4", &cx);
5832
5833 // Wait for the initial scan.
5834 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
5835 .await;
5836
5837 // Create a remote copy of this worktree.
5838 let initial_snapshot = tree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot());
5839 let (remote, load_task) = cx.update(|cx| {
5840 Worktree::remote(
5841 1,
5842 1,
5843 initial_snapshot.to_proto(&Default::default(), true),
5844 rpc.clone(),
5845 cx,
5846 )
5847 });
5848 load_task.await;
5849
5850 cx.read(|cx| {
5851 assert!(!buffer2.read(cx).is_dirty());
5852 assert!(!buffer3.read(cx).is_dirty());
5853 assert!(!buffer4.read(cx).is_dirty());
5854 assert!(!buffer5.read(cx).is_dirty());
5855 });
5856
5857 // Rename and delete files and directories.
5858 tree.flush_fs_events(&cx).await;
5859 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
5860 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
5861 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
5862 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
5863 tree.flush_fs_events(&cx).await;
5864
5865 let expected_paths = vec![
5866 "a",
5867 "a/file1",
5868 "a/file2.new",
5869 "b",
5870 "d",
5871 "d/file3",
5872 "d/file4",
5873 ];
5874
5875 cx.read(|app| {
5876 assert_eq!(
5877 tree.read(app)
5878 .paths()
5879 .map(|p| p.to_str().unwrap())
5880 .collect::<Vec<_>>(),
5881 expected_paths
5882 );
5883
5884 assert_eq!(id_for_path("a/file2.new", &cx), file2_id);
5885 assert_eq!(id_for_path("d/file3", &cx), file3_id);
5886 assert_eq!(id_for_path("d/file4", &cx), file4_id);
5887
5888 assert_eq!(
5889 buffer2.read(app).file().unwrap().path().as_ref(),
5890 Path::new("a/file2.new")
5891 );
5892 assert_eq!(
5893 buffer3.read(app).file().unwrap().path().as_ref(),
5894 Path::new("d/file3")
5895 );
5896 assert_eq!(
5897 buffer4.read(app).file().unwrap().path().as_ref(),
5898 Path::new("d/file4")
5899 );
5900 assert_eq!(
5901 buffer5.read(app).file().unwrap().path().as_ref(),
5902 Path::new("b/c/file5")
5903 );
5904
5905 assert!(!buffer2.read(app).file().unwrap().is_deleted());
5906 assert!(!buffer3.read(app).file().unwrap().is_deleted());
5907 assert!(!buffer4.read(app).file().unwrap().is_deleted());
5908 assert!(buffer5.read(app).file().unwrap().is_deleted());
5909 });
5910
5911 // Update the remote worktree. Check that it becomes consistent with the
5912 // local worktree.
5913 remote.update(cx, |remote, cx| {
5914 let update_message = tree.read(cx).as_local().unwrap().snapshot().build_update(
5915 &initial_snapshot,
5916 1,
5917 1,
5918 true,
5919 );
5920 remote
5921 .as_remote_mut()
5922 .unwrap()
5923 .snapshot
5924 .apply_remote_update(update_message)
5925 .unwrap();
5926
5927 assert_eq!(
5928 remote
5929 .paths()
5930 .map(|p| p.to_str().unwrap())
5931 .collect::<Vec<_>>(),
5932 expected_paths
5933 );
5934 });
5935 }
5936
5937 #[gpui::test]
5938 async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
5939 let fs = FakeFs::new(cx.background());
5940 fs.insert_tree(
5941 "/the-dir",
5942 json!({
5943 "a.txt": "a-contents",
5944 "b.txt": "b-contents",
5945 }),
5946 )
5947 .await;
5948
5949 let project = Project::test(fs.clone(), cx);
5950 let worktree_id = project
5951 .update(cx, |p, cx| {
5952 p.find_or_create_local_worktree("/the-dir", true, cx)
5953 })
5954 .await
5955 .unwrap()
5956 .0
5957 .read_with(cx, |tree, _| tree.id());
5958
5959 // Spawn multiple tasks to open paths, repeating some paths.
5960 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
5961 (
5962 p.open_buffer((worktree_id, "a.txt"), cx),
5963 p.open_buffer((worktree_id, "b.txt"), cx),
5964 p.open_buffer((worktree_id, "a.txt"), cx),
5965 )
5966 });
5967
5968 let buffer_a_1 = buffer_a_1.await.unwrap();
5969 let buffer_a_2 = buffer_a_2.await.unwrap();
5970 let buffer_b = buffer_b.await.unwrap();
5971 assert_eq!(buffer_a_1.read_with(cx, |b, _| b.text()), "a-contents");
5972 assert_eq!(buffer_b.read_with(cx, |b, _| b.text()), "b-contents");
5973
5974 // There is only one buffer per path.
5975 let buffer_a_id = buffer_a_1.id();
5976 assert_eq!(buffer_a_2.id(), buffer_a_id);
5977
5978 // Open the same path again while it is still open.
5979 drop(buffer_a_1);
5980 let buffer_a_3 = project
5981 .update(cx, |p, cx| p.open_buffer((worktree_id, "a.txt"), cx))
5982 .await
5983 .unwrap();
5984
5985 // There's still only one buffer per path.
5986 assert_eq!(buffer_a_3.id(), buffer_a_id);
5987 }
5988
5989 #[gpui::test]
5990 async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
5991 use std::fs;
5992
5993 let dir = temp_tree(json!({
5994 "file1": "abc",
5995 "file2": "def",
5996 "file3": "ghi",
5997 }));
5998
5999 let project = Project::test(Arc::new(RealFs), cx);
6000 let (worktree, _) = project
6001 .update(cx, |p, cx| {
6002 p.find_or_create_local_worktree(dir.path(), true, cx)
6003 })
6004 .await
6005 .unwrap();
6006 let worktree_id = worktree.read_with(cx, |worktree, _| worktree.id());
6007
6008 worktree.flush_fs_events(&cx).await;
6009 worktree
6010 .read_with(cx, |t, _| t.as_local().unwrap().scan_complete())
6011 .await;
6012
6013 let buffer1 = project
6014 .update(cx, |p, cx| p.open_buffer((worktree_id, "file1"), cx))
6015 .await
6016 .unwrap();
6017 let events = Rc::new(RefCell::new(Vec::new()));
6018
6019 // initially, the buffer isn't dirty.
6020 buffer1.update(cx, |buffer, cx| {
6021 cx.subscribe(&buffer1, {
6022 let events = events.clone();
6023 move |_, _, event, _| match event {
6024 BufferEvent::Operation(_) => {}
6025 _ => events.borrow_mut().push(event.clone()),
6026 }
6027 })
6028 .detach();
6029
6030 assert!(!buffer.is_dirty());
6031 assert!(events.borrow().is_empty());
6032
6033 buffer.edit(vec![1..2], "", cx);
6034 });
6035
6036 // after the first edit, the buffer is dirty, and emits a dirtied event.
6037 buffer1.update(cx, |buffer, cx| {
6038 assert!(buffer.text() == "ac");
6039 assert!(buffer.is_dirty());
6040 assert_eq!(
6041 *events.borrow(),
6042 &[language::Event::Edited, language::Event::Dirtied]
6043 );
6044 events.borrow_mut().clear();
6045 buffer.did_save(buffer.version(), buffer.file().unwrap().mtime(), None, cx);
6046 });
6047
6048 // after saving, the buffer is not dirty, and emits a saved event.
6049 buffer1.update(cx, |buffer, cx| {
6050 assert!(!buffer.is_dirty());
6051 assert_eq!(*events.borrow(), &[language::Event::Saved]);
6052 events.borrow_mut().clear();
6053
6054 buffer.edit(vec![1..1], "B", cx);
6055 buffer.edit(vec![2..2], "D", cx);
6056 });
6057
6058 // after editing again, the buffer is dirty, and emits another dirty event.
6059 buffer1.update(cx, |buffer, cx| {
6060 assert!(buffer.text() == "aBDc");
6061 assert!(buffer.is_dirty());
6062 assert_eq!(
6063 *events.borrow(),
6064 &[
6065 language::Event::Edited,
6066 language::Event::Dirtied,
6067 language::Event::Edited,
6068 ],
6069 );
6070 events.borrow_mut().clear();
6071
6072 // TODO - currently, after restoring the buffer to its
6073 // previously-saved state, the is still considered dirty.
6074 buffer.edit([1..3], "", cx);
6075 assert!(buffer.text() == "ac");
6076 assert!(buffer.is_dirty());
6077 });
6078
6079 assert_eq!(*events.borrow(), &[language::Event::Edited]);
6080
6081 // When a file is deleted, the buffer is considered dirty.
6082 let events = Rc::new(RefCell::new(Vec::new()));
6083 let buffer2 = project
6084 .update(cx, |p, cx| p.open_buffer((worktree_id, "file2"), cx))
6085 .await
6086 .unwrap();
6087 buffer2.update(cx, |_, cx| {
6088 cx.subscribe(&buffer2, {
6089 let events = events.clone();
6090 move |_, _, event, _| events.borrow_mut().push(event.clone())
6091 })
6092 .detach();
6093 });
6094
6095 fs::remove_file(dir.path().join("file2")).unwrap();
6096 buffer2.condition(&cx, |b, _| b.is_dirty()).await;
6097 assert_eq!(
6098 *events.borrow(),
6099 &[language::Event::Dirtied, language::Event::FileHandleChanged]
6100 );
6101
6102 // When a file is already dirty when deleted, we don't emit a Dirtied event.
6103 let events = Rc::new(RefCell::new(Vec::new()));
6104 let buffer3 = project
6105 .update(cx, |p, cx| p.open_buffer((worktree_id, "file3"), cx))
6106 .await
6107 .unwrap();
6108 buffer3.update(cx, |_, cx| {
6109 cx.subscribe(&buffer3, {
6110 let events = events.clone();
6111 move |_, _, event, _| events.borrow_mut().push(event.clone())
6112 })
6113 .detach();
6114 });
6115
6116 worktree.flush_fs_events(&cx).await;
6117 buffer3.update(cx, |buffer, cx| {
6118 buffer.edit(Some(0..0), "x", cx);
6119 });
6120 events.borrow_mut().clear();
6121 fs::remove_file(dir.path().join("file3")).unwrap();
6122 buffer3
6123 .condition(&cx, |_, _| !events.borrow().is_empty())
6124 .await;
6125 assert_eq!(*events.borrow(), &[language::Event::FileHandleChanged]);
6126 cx.read(|cx| assert!(buffer3.read(cx).is_dirty()));
6127 }
6128
6129 #[gpui::test]
6130 async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
6131 use std::fs;
6132
6133 let initial_contents = "aaa\nbbbbb\nc\n";
6134 let dir = temp_tree(json!({ "the-file": initial_contents }));
6135
6136 let project = Project::test(Arc::new(RealFs), cx);
6137 let (worktree, _) = project
6138 .update(cx, |p, cx| {
6139 p.find_or_create_local_worktree(dir.path(), true, cx)
6140 })
6141 .await
6142 .unwrap();
6143 let worktree_id = worktree.read_with(cx, |tree, _| tree.id());
6144
6145 worktree
6146 .read_with(cx, |t, _| t.as_local().unwrap().scan_complete())
6147 .await;
6148
6149 let abs_path = dir.path().join("the-file");
6150 let buffer = project
6151 .update(cx, |p, cx| p.open_buffer((worktree_id, "the-file"), cx))
6152 .await
6153 .unwrap();
6154
6155 // TODO
6156 // Add a cursor on each row.
6157 // let selection_set_id = buffer.update(&mut cx, |buffer, cx| {
6158 // assert!(!buffer.is_dirty());
6159 // buffer.add_selection_set(
6160 // &(0..3)
6161 // .map(|row| Selection {
6162 // id: row as usize,
6163 // start: Point::new(row, 1),
6164 // end: Point::new(row, 1),
6165 // reversed: false,
6166 // goal: SelectionGoal::None,
6167 // })
6168 // .collect::<Vec<_>>(),
6169 // cx,
6170 // )
6171 // });
6172
6173 // Change the file on disk, adding two new lines of text, and removing
6174 // one line.
6175 buffer.read_with(cx, |buffer, _| {
6176 assert!(!buffer.is_dirty());
6177 assert!(!buffer.has_conflict());
6178 });
6179 let new_contents = "AAAA\naaa\nBB\nbbbbb\n";
6180 fs::write(&abs_path, new_contents).unwrap();
6181
6182 // Because the buffer was not modified, it is reloaded from disk. Its
6183 // contents are edited according to the diff between the old and new
6184 // file contents.
6185 buffer
6186 .condition(&cx, |buffer, _| buffer.text() == new_contents)
6187 .await;
6188
6189 buffer.update(cx, |buffer, _| {
6190 assert_eq!(buffer.text(), new_contents);
6191 assert!(!buffer.is_dirty());
6192 assert!(!buffer.has_conflict());
6193
6194 // TODO
6195 // let cursor_positions = buffer
6196 // .selection_set(selection_set_id)
6197 // .unwrap()
6198 // .selections::<Point>(&*buffer)
6199 // .map(|selection| {
6200 // assert_eq!(selection.start, selection.end);
6201 // selection.start
6202 // })
6203 // .collect::<Vec<_>>();
6204 // assert_eq!(
6205 // cursor_positions,
6206 // [Point::new(1, 1), Point::new(3, 1), Point::new(4, 0)]
6207 // );
6208 });
6209
6210 // Modify the buffer
6211 buffer.update(cx, |buffer, cx| {
6212 buffer.edit(vec![0..0], " ", cx);
6213 assert!(buffer.is_dirty());
6214 assert!(!buffer.has_conflict());
6215 });
6216
6217 // Change the file on disk again, adding blank lines to the beginning.
6218 fs::write(&abs_path, "\n\n\nAAAA\naaa\nBB\nbbbbb\n").unwrap();
6219
6220 // Because the buffer is modified, it doesn't reload from disk, but is
6221 // marked as having a conflict.
6222 buffer
6223 .condition(&cx, |buffer, _| buffer.has_conflict())
6224 .await;
6225 }
6226
6227 #[gpui::test]
6228 async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
6229 cx.foreground().forbid_parking();
6230
6231 let fs = FakeFs::new(cx.background());
6232 fs.insert_tree(
6233 "/the-dir",
6234 json!({
6235 "a.rs": "
6236 fn foo(mut v: Vec<usize>) {
6237 for x in &v {
6238 v.push(1);
6239 }
6240 }
6241 "
6242 .unindent(),
6243 }),
6244 )
6245 .await;
6246
6247 let project = Project::test(fs.clone(), cx);
6248 let (worktree, _) = project
6249 .update(cx, |p, cx| {
6250 p.find_or_create_local_worktree("/the-dir", true, cx)
6251 })
6252 .await
6253 .unwrap();
6254 let worktree_id = worktree.read_with(cx, |tree, _| tree.id());
6255
6256 let buffer = project
6257 .update(cx, |p, cx| p.open_buffer((worktree_id, "a.rs"), cx))
6258 .await
6259 .unwrap();
6260
6261 let buffer_uri = Url::from_file_path("/the-dir/a.rs").unwrap();
6262 let message = lsp::PublishDiagnosticsParams {
6263 uri: buffer_uri.clone(),
6264 diagnostics: vec![
6265 lsp::Diagnostic {
6266 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
6267 severity: Some(DiagnosticSeverity::WARNING),
6268 message: "error 1".to_string(),
6269 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
6270 location: lsp::Location {
6271 uri: buffer_uri.clone(),
6272 range: lsp::Range::new(
6273 lsp::Position::new(1, 8),
6274 lsp::Position::new(1, 9),
6275 ),
6276 },
6277 message: "error 1 hint 1".to_string(),
6278 }]),
6279 ..Default::default()
6280 },
6281 lsp::Diagnostic {
6282 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
6283 severity: Some(DiagnosticSeverity::HINT),
6284 message: "error 1 hint 1".to_string(),
6285 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
6286 location: lsp::Location {
6287 uri: buffer_uri.clone(),
6288 range: lsp::Range::new(
6289 lsp::Position::new(1, 8),
6290 lsp::Position::new(1, 9),
6291 ),
6292 },
6293 message: "original diagnostic".to_string(),
6294 }]),
6295 ..Default::default()
6296 },
6297 lsp::Diagnostic {
6298 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
6299 severity: Some(DiagnosticSeverity::ERROR),
6300 message: "error 2".to_string(),
6301 related_information: Some(vec![
6302 lsp::DiagnosticRelatedInformation {
6303 location: lsp::Location {
6304 uri: buffer_uri.clone(),
6305 range: lsp::Range::new(
6306 lsp::Position::new(1, 13),
6307 lsp::Position::new(1, 15),
6308 ),
6309 },
6310 message: "error 2 hint 1".to_string(),
6311 },
6312 lsp::DiagnosticRelatedInformation {
6313 location: lsp::Location {
6314 uri: buffer_uri.clone(),
6315 range: lsp::Range::new(
6316 lsp::Position::new(1, 13),
6317 lsp::Position::new(1, 15),
6318 ),
6319 },
6320 message: "error 2 hint 2".to_string(),
6321 },
6322 ]),
6323 ..Default::default()
6324 },
6325 lsp::Diagnostic {
6326 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
6327 severity: Some(DiagnosticSeverity::HINT),
6328 message: "error 2 hint 1".to_string(),
6329 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
6330 location: lsp::Location {
6331 uri: buffer_uri.clone(),
6332 range: lsp::Range::new(
6333 lsp::Position::new(2, 8),
6334 lsp::Position::new(2, 17),
6335 ),
6336 },
6337 message: "original diagnostic".to_string(),
6338 }]),
6339 ..Default::default()
6340 },
6341 lsp::Diagnostic {
6342 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
6343 severity: Some(DiagnosticSeverity::HINT),
6344 message: "error 2 hint 2".to_string(),
6345 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
6346 location: lsp::Location {
6347 uri: buffer_uri.clone(),
6348 range: lsp::Range::new(
6349 lsp::Position::new(2, 8),
6350 lsp::Position::new(2, 17),
6351 ),
6352 },
6353 message: "original diagnostic".to_string(),
6354 }]),
6355 ..Default::default()
6356 },
6357 ],
6358 version: None,
6359 };
6360
6361 project
6362 .update(cx, |p, cx| {
6363 p.update_diagnostics(message, &Default::default(), cx)
6364 })
6365 .unwrap();
6366 let buffer = buffer.read_with(cx, |buffer, _| buffer.snapshot());
6367
6368 assert_eq!(
6369 buffer
6370 .diagnostics_in_range::<_, Point>(0..buffer.len())
6371 .collect::<Vec<_>>(),
6372 &[
6373 DiagnosticEntry {
6374 range: Point::new(1, 8)..Point::new(1, 9),
6375 diagnostic: Diagnostic {
6376 severity: DiagnosticSeverity::WARNING,
6377 message: "error 1".to_string(),
6378 group_id: 0,
6379 is_primary: true,
6380 ..Default::default()
6381 }
6382 },
6383 DiagnosticEntry {
6384 range: Point::new(1, 8)..Point::new(1, 9),
6385 diagnostic: Diagnostic {
6386 severity: DiagnosticSeverity::HINT,
6387 message: "error 1 hint 1".to_string(),
6388 group_id: 0,
6389 is_primary: false,
6390 ..Default::default()
6391 }
6392 },
6393 DiagnosticEntry {
6394 range: Point::new(1, 13)..Point::new(1, 15),
6395 diagnostic: Diagnostic {
6396 severity: DiagnosticSeverity::HINT,
6397 message: "error 2 hint 1".to_string(),
6398 group_id: 1,
6399 is_primary: false,
6400 ..Default::default()
6401 }
6402 },
6403 DiagnosticEntry {
6404 range: Point::new(1, 13)..Point::new(1, 15),
6405 diagnostic: Diagnostic {
6406 severity: DiagnosticSeverity::HINT,
6407 message: "error 2 hint 2".to_string(),
6408 group_id: 1,
6409 is_primary: false,
6410 ..Default::default()
6411 }
6412 },
6413 DiagnosticEntry {
6414 range: Point::new(2, 8)..Point::new(2, 17),
6415 diagnostic: Diagnostic {
6416 severity: DiagnosticSeverity::ERROR,
6417 message: "error 2".to_string(),
6418 group_id: 1,
6419 is_primary: true,
6420 ..Default::default()
6421 }
6422 }
6423 ]
6424 );
6425
6426 assert_eq!(
6427 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
6428 &[
6429 DiagnosticEntry {
6430 range: Point::new(1, 8)..Point::new(1, 9),
6431 diagnostic: Diagnostic {
6432 severity: DiagnosticSeverity::WARNING,
6433 message: "error 1".to_string(),
6434 group_id: 0,
6435 is_primary: true,
6436 ..Default::default()
6437 }
6438 },
6439 DiagnosticEntry {
6440 range: Point::new(1, 8)..Point::new(1, 9),
6441 diagnostic: Diagnostic {
6442 severity: DiagnosticSeverity::HINT,
6443 message: "error 1 hint 1".to_string(),
6444 group_id: 0,
6445 is_primary: false,
6446 ..Default::default()
6447 }
6448 },
6449 ]
6450 );
6451 assert_eq!(
6452 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
6453 &[
6454 DiagnosticEntry {
6455 range: Point::new(1, 13)..Point::new(1, 15),
6456 diagnostic: Diagnostic {
6457 severity: DiagnosticSeverity::HINT,
6458 message: "error 2 hint 1".to_string(),
6459 group_id: 1,
6460 is_primary: false,
6461 ..Default::default()
6462 }
6463 },
6464 DiagnosticEntry {
6465 range: Point::new(1, 13)..Point::new(1, 15),
6466 diagnostic: Diagnostic {
6467 severity: DiagnosticSeverity::HINT,
6468 message: "error 2 hint 2".to_string(),
6469 group_id: 1,
6470 is_primary: false,
6471 ..Default::default()
6472 }
6473 },
6474 DiagnosticEntry {
6475 range: Point::new(2, 8)..Point::new(2, 17),
6476 diagnostic: Diagnostic {
6477 severity: DiagnosticSeverity::ERROR,
6478 message: "error 2".to_string(),
6479 group_id: 1,
6480 is_primary: true,
6481 ..Default::default()
6482 }
6483 }
6484 ]
6485 );
6486 }
6487
6488 #[gpui::test]
6489 async fn test_rename(cx: &mut gpui::TestAppContext) {
6490 cx.foreground().forbid_parking();
6491
6492 let (language_server_config, mut fake_servers) = LanguageServerConfig::fake();
6493 let language = Arc::new(Language::new(
6494 LanguageConfig {
6495 name: "Rust".into(),
6496 path_suffixes: vec!["rs".to_string()],
6497 language_server: Some(language_server_config),
6498 ..Default::default()
6499 },
6500 Some(tree_sitter_rust::language()),
6501 ));
6502
6503 let fs = FakeFs::new(cx.background());
6504 fs.insert_tree(
6505 "/dir",
6506 json!({
6507 "one.rs": "const ONE: usize = 1;",
6508 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
6509 }),
6510 )
6511 .await;
6512
6513 let project = Project::test(fs.clone(), cx);
6514 project.update(cx, |project, _| {
6515 Arc::get_mut(&mut project.languages).unwrap().add(language);
6516 });
6517
6518 let (tree, _) = project
6519 .update(cx, |project, cx| {
6520 project.find_or_create_local_worktree("/dir", true, cx)
6521 })
6522 .await
6523 .unwrap();
6524 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
6525 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
6526 .await;
6527
6528 let buffer = project
6529 .update(cx, |project, cx| {
6530 project.open_buffer((worktree_id, Path::new("one.rs")), cx)
6531 })
6532 .await
6533 .unwrap();
6534
6535 let mut fake_server = fake_servers.next().await.unwrap();
6536
6537 let response = project.update(cx, |project, cx| {
6538 project.prepare_rename(buffer.clone(), 7, cx)
6539 });
6540 fake_server
6541 .handle_request::<lsp::request::PrepareRenameRequest, _>(|params, _| {
6542 assert_eq!(params.text_document.uri.as_str(), "file:///dir/one.rs");
6543 assert_eq!(params.position, lsp::Position::new(0, 7));
6544 Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
6545 lsp::Position::new(0, 6),
6546 lsp::Position::new(0, 9),
6547 )))
6548 })
6549 .next()
6550 .await
6551 .unwrap();
6552 let range = response.await.unwrap().unwrap();
6553 let range = buffer.read_with(cx, |buffer, _| range.to_offset(buffer));
6554 assert_eq!(range, 6..9);
6555
6556 let response = project.update(cx, |project, cx| {
6557 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), true, cx)
6558 });
6559 fake_server
6560 .handle_request::<lsp::request::Rename, _>(|params, _| {
6561 assert_eq!(
6562 params.text_document_position.text_document.uri.as_str(),
6563 "file:///dir/one.rs"
6564 );
6565 assert_eq!(
6566 params.text_document_position.position,
6567 lsp::Position::new(0, 7)
6568 );
6569 assert_eq!(params.new_name, "THREE");
6570 Some(lsp::WorkspaceEdit {
6571 changes: Some(
6572 [
6573 (
6574 lsp::Url::from_file_path("/dir/one.rs").unwrap(),
6575 vec![lsp::TextEdit::new(
6576 lsp::Range::new(
6577 lsp::Position::new(0, 6),
6578 lsp::Position::new(0, 9),
6579 ),
6580 "THREE".to_string(),
6581 )],
6582 ),
6583 (
6584 lsp::Url::from_file_path("/dir/two.rs").unwrap(),
6585 vec![
6586 lsp::TextEdit::new(
6587 lsp::Range::new(
6588 lsp::Position::new(0, 24),
6589 lsp::Position::new(0, 27),
6590 ),
6591 "THREE".to_string(),
6592 ),
6593 lsp::TextEdit::new(
6594 lsp::Range::new(
6595 lsp::Position::new(0, 35),
6596 lsp::Position::new(0, 38),
6597 ),
6598 "THREE".to_string(),
6599 ),
6600 ],
6601 ),
6602 ]
6603 .into_iter()
6604 .collect(),
6605 ),
6606 ..Default::default()
6607 })
6608 })
6609 .next()
6610 .await
6611 .unwrap();
6612 let mut transaction = response.await.unwrap().0;
6613 assert_eq!(transaction.len(), 2);
6614 assert_eq!(
6615 transaction
6616 .remove_entry(&buffer)
6617 .unwrap()
6618 .0
6619 .read_with(cx, |buffer, _| buffer.text()),
6620 "const THREE: usize = 1;"
6621 );
6622 assert_eq!(
6623 transaction
6624 .into_keys()
6625 .next()
6626 .unwrap()
6627 .read_with(cx, |buffer, _| buffer.text()),
6628 "const TWO: usize = one::THREE + one::THREE;"
6629 );
6630 }
6631
6632 #[gpui::test]
6633 async fn test_search(cx: &mut gpui::TestAppContext) {
6634 let fs = FakeFs::new(cx.background());
6635 fs.insert_tree(
6636 "/dir",
6637 json!({
6638 "one.rs": "const ONE: usize = 1;",
6639 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
6640 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
6641 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
6642 }),
6643 )
6644 .await;
6645 let project = Project::test(fs.clone(), cx);
6646 let (tree, _) = project
6647 .update(cx, |project, cx| {
6648 project.find_or_create_local_worktree("/dir", true, cx)
6649 })
6650 .await
6651 .unwrap();
6652 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
6653 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
6654 .await;
6655
6656 assert_eq!(
6657 search(&project, SearchQuery::text("TWO", false, true), cx)
6658 .await
6659 .unwrap(),
6660 HashMap::from_iter([
6661 ("two.rs".to_string(), vec![6..9]),
6662 ("three.rs".to_string(), vec![37..40])
6663 ])
6664 );
6665
6666 let buffer_4 = project
6667 .update(cx, |project, cx| {
6668 project.open_buffer((worktree_id, "four.rs"), cx)
6669 })
6670 .await
6671 .unwrap();
6672 buffer_4.update(cx, |buffer, cx| {
6673 buffer.edit([20..28, 31..43], "two::TWO", cx);
6674 });
6675
6676 assert_eq!(
6677 search(&project, SearchQuery::text("TWO", false, true), cx)
6678 .await
6679 .unwrap(),
6680 HashMap::from_iter([
6681 ("two.rs".to_string(), vec![6..9]),
6682 ("three.rs".to_string(), vec![37..40]),
6683 ("four.rs".to_string(), vec![25..28, 36..39])
6684 ])
6685 );
6686
6687 async fn search(
6688 project: &ModelHandle<Project>,
6689 query: SearchQuery,
6690 cx: &mut gpui::TestAppContext,
6691 ) -> Result<HashMap<String, Vec<Range<usize>>>> {
6692 let results = project
6693 .update(cx, |project, cx| project.search(query, cx))
6694 .await?;
6695
6696 Ok(results
6697 .into_iter()
6698 .map(|(buffer, ranges)| {
6699 buffer.read_with(cx, |buffer, _| {
6700 let path = buffer.file().unwrap().path().to_string_lossy().to_string();
6701 let ranges = ranges
6702 .into_iter()
6703 .map(|range| range.to_offset(buffer))
6704 .collect::<Vec<_>>();
6705 (path, ranges)
6706 })
6707 })
6708 .collect())
6709 }
6710 }
6711}