1pub mod fs;
2mod ignore;
3mod lsp_command;
4pub mod search;
5pub mod worktree;
6
7use anyhow::{anyhow, Context, Result};
8use client::{proto, Client, PeerId, TypedEnvelope, User, UserStore};
9use clock::ReplicaId;
10use collections::{hash_map, BTreeMap, HashMap, HashSet};
11use futures::{future::Shared, Future, FutureExt, StreamExt, TryFutureExt};
12use fuzzy::{PathMatch, PathMatchCandidate, PathMatchCandidateSet};
13use gpui::{
14 AppContext, AsyncAppContext, Entity, ModelContext, ModelHandle, MutableAppContext, Task,
15 UpgradeModelHandle, WeakModelHandle,
16};
17use language::{
18 proto::{deserialize_anchor, deserialize_version, serialize_anchor, serialize_version},
19 range_from_lsp, Anchor, Bias, Buffer, CodeAction, CodeLabel, Completion, Diagnostic,
20 DiagnosticEntry, DiagnosticSet, Event as BufferEvent, File as _, Language, LanguageRegistry,
21 LocalFile, OffsetRangeExt, Operation, PointUtf16, TextBufferSnapshot, ToLspPosition, ToOffset,
22 ToPointUtf16, Transaction,
23};
24use lsp::{DiagnosticSeverity, DiagnosticTag, DocumentHighlightKind, LanguageServer};
25use lsp_command::*;
26use parking_lot::Mutex;
27use postage::watch;
28use rand::prelude::*;
29use search::SearchQuery;
30use sha2::{Digest, Sha256};
31use similar::{ChangeTag, TextDiff};
32use std::{
33 cell::RefCell,
34 cmp::{self, Ordering},
35 convert::TryInto,
36 hash::Hash,
37 mem,
38 ops::Range,
39 path::{Component, Path, PathBuf},
40 rc::Rc,
41 sync::{
42 atomic::{AtomicBool, AtomicUsize},
43 Arc,
44 },
45 time::Instant,
46};
47use util::{post_inc, ResultExt, TryFutureExt as _};
48
49pub use fs::*;
50pub use worktree::*;
51
52pub struct Project {
53 worktrees: Vec<WorktreeHandle>,
54 active_entry: Option<ProjectEntry>,
55 languages: Arc<LanguageRegistry>,
56 language_servers: HashMap<(WorktreeId, Arc<str>), Arc<LanguageServer>>,
57 started_language_servers: HashMap<(WorktreeId, Arc<str>), Task<Option<Arc<LanguageServer>>>>,
58 language_server_statuses: BTreeMap<usize, LanguageServerStatus>,
59 language_server_settings: Arc<Mutex<serde_json::Value>>,
60 next_language_server_id: usize,
61 client: Arc<client::Client>,
62 next_entry_id: Arc<AtomicUsize>,
63 user_store: ModelHandle<UserStore>,
64 fs: Arc<dyn Fs>,
65 client_state: ProjectClientState,
66 collaborators: HashMap<PeerId, Collaborator>,
67 subscriptions: Vec<client::Subscription>,
68 language_servers_with_diagnostics_running: isize,
69 opened_buffer: (Rc<RefCell<watch::Sender<()>>>, watch::Receiver<()>),
70 shared_buffers: HashMap<PeerId, HashSet<u64>>,
71 loading_buffers: HashMap<
72 ProjectPath,
73 postage::watch::Receiver<Option<Result<ModelHandle<Buffer>, Arc<anyhow::Error>>>>,
74 >,
75 loading_local_worktrees:
76 HashMap<Arc<Path>, Shared<Task<Result<ModelHandle<Worktree>, Arc<anyhow::Error>>>>>,
77 opened_buffers: HashMap<u64, OpenBuffer>,
78 buffer_snapshots: HashMap<u64, Vec<(i32, TextBufferSnapshot)>>,
79 nonce: u128,
80}
81
82enum OpenBuffer {
83 Strong(ModelHandle<Buffer>),
84 Weak(WeakModelHandle<Buffer>),
85 Loading(Vec<Operation>),
86}
87
88enum WorktreeHandle {
89 Strong(ModelHandle<Worktree>),
90 Weak(WeakModelHandle<Worktree>),
91}
92
93enum ProjectClientState {
94 Local {
95 is_shared: bool,
96 remote_id_tx: watch::Sender<Option<u64>>,
97 remote_id_rx: watch::Receiver<Option<u64>>,
98 _maintain_remote_id_task: Task<Option<()>>,
99 },
100 Remote {
101 sharing_has_stopped: bool,
102 remote_id: u64,
103 replica_id: ReplicaId,
104 _detect_unshare_task: Task<Option<()>>,
105 },
106}
107
108#[derive(Clone, Debug)]
109pub struct Collaborator {
110 pub user: Arc<User>,
111 pub peer_id: PeerId,
112 pub replica_id: ReplicaId,
113}
114
115#[derive(Clone, Debug, PartialEq)]
116pub enum Event {
117 ActiveEntryChanged(Option<ProjectEntry>),
118 WorktreeRemoved(WorktreeId),
119 DiskBasedDiagnosticsStarted,
120 DiskBasedDiagnosticsUpdated,
121 DiskBasedDiagnosticsFinished,
122 DiagnosticsUpdated(ProjectPath),
123}
124
125enum LanguageServerEvent {
126 WorkStart {
127 token: String,
128 },
129 WorkProgress {
130 token: String,
131 progress: LanguageServerProgress,
132 },
133 WorkEnd {
134 token: String,
135 },
136 DiagnosticsUpdate(lsp::PublishDiagnosticsParams),
137}
138
139pub struct LanguageServerStatus {
140 pub name: String,
141 pub pending_work: BTreeMap<String, LanguageServerProgress>,
142 pending_diagnostic_updates: isize,
143}
144
145#[derive(Clone, Debug)]
146pub struct LanguageServerProgress {
147 pub message: Option<String>,
148 pub percentage: Option<usize>,
149 pub last_update_at: Instant,
150}
151
152#[derive(Clone, Debug, Eq, PartialEq, Hash, PartialOrd, Ord)]
153pub struct ProjectPath {
154 pub worktree_id: WorktreeId,
155 pub path: Arc<Path>,
156}
157
158#[derive(Clone, Debug, Default, PartialEq)]
159pub struct DiagnosticSummary {
160 pub error_count: usize,
161 pub warning_count: usize,
162 pub info_count: usize,
163 pub hint_count: usize,
164}
165
166#[derive(Debug)]
167pub struct Location {
168 pub buffer: ModelHandle<Buffer>,
169 pub range: Range<language::Anchor>,
170}
171
172#[derive(Debug)]
173pub struct DocumentHighlight {
174 pub range: Range<language::Anchor>,
175 pub kind: DocumentHighlightKind,
176}
177
178#[derive(Clone, Debug)]
179pub struct Symbol {
180 pub source_worktree_id: WorktreeId,
181 pub worktree_id: WorktreeId,
182 pub language_name: String,
183 pub path: PathBuf,
184 pub label: CodeLabel,
185 pub name: String,
186 pub kind: lsp::SymbolKind,
187 pub range: Range<PointUtf16>,
188 pub signature: [u8; 32],
189}
190
191#[derive(Default)]
192pub struct ProjectTransaction(pub HashMap<ModelHandle<Buffer>, language::Transaction>);
193
194impl DiagnosticSummary {
195 fn new<'a, T: 'a>(diagnostics: impl IntoIterator<Item = &'a DiagnosticEntry<T>>) -> Self {
196 let mut this = Self {
197 error_count: 0,
198 warning_count: 0,
199 info_count: 0,
200 hint_count: 0,
201 };
202
203 for entry in diagnostics {
204 if entry.diagnostic.is_primary {
205 match entry.diagnostic.severity {
206 DiagnosticSeverity::ERROR => this.error_count += 1,
207 DiagnosticSeverity::WARNING => this.warning_count += 1,
208 DiagnosticSeverity::INFORMATION => this.info_count += 1,
209 DiagnosticSeverity::HINT => this.hint_count += 1,
210 _ => {}
211 }
212 }
213 }
214
215 this
216 }
217
218 pub fn to_proto(&self, path: &Path) -> proto::DiagnosticSummary {
219 proto::DiagnosticSummary {
220 path: path.to_string_lossy().to_string(),
221 error_count: self.error_count as u32,
222 warning_count: self.warning_count as u32,
223 info_count: self.info_count as u32,
224 hint_count: self.hint_count as u32,
225 }
226 }
227}
228
229#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
230pub struct ProjectEntry {
231 pub worktree_id: WorktreeId,
232 pub entry_id: usize,
233}
234
235impl Project {
236 pub fn init(client: &Arc<Client>) {
237 client.add_entity_message_handler(Self::handle_add_collaborator);
238 client.add_entity_message_handler(Self::handle_buffer_reloaded);
239 client.add_entity_message_handler(Self::handle_buffer_saved);
240 client.add_entity_message_handler(Self::handle_start_language_server);
241 client.add_entity_message_handler(Self::handle_update_language_server);
242 client.add_entity_message_handler(Self::handle_remove_collaborator);
243 client.add_entity_message_handler(Self::handle_register_worktree);
244 client.add_entity_message_handler(Self::handle_unregister_worktree);
245 client.add_entity_message_handler(Self::handle_unshare_project);
246 client.add_entity_message_handler(Self::handle_update_buffer_file);
247 client.add_entity_message_handler(Self::handle_update_buffer);
248 client.add_entity_message_handler(Self::handle_update_diagnostic_summary);
249 client.add_entity_message_handler(Self::handle_update_worktree);
250 client.add_entity_request_handler(Self::handle_apply_additional_edits_for_completion);
251 client.add_entity_request_handler(Self::handle_apply_code_action);
252 client.add_entity_request_handler(Self::handle_format_buffers);
253 client.add_entity_request_handler(Self::handle_get_code_actions);
254 client.add_entity_request_handler(Self::handle_get_completions);
255 client.add_entity_request_handler(Self::handle_lsp_command::<GetDefinition>);
256 client.add_entity_request_handler(Self::handle_lsp_command::<GetDocumentHighlights>);
257 client.add_entity_request_handler(Self::handle_lsp_command::<GetReferences>);
258 client.add_entity_request_handler(Self::handle_lsp_command::<PrepareRename>);
259 client.add_entity_request_handler(Self::handle_lsp_command::<PerformRename>);
260 client.add_entity_request_handler(Self::handle_search_project);
261 client.add_entity_request_handler(Self::handle_get_project_symbols);
262 client.add_entity_request_handler(Self::handle_open_buffer_for_symbol);
263 client.add_entity_request_handler(Self::handle_open_buffer);
264 client.add_entity_request_handler(Self::handle_save_buffer);
265 }
266
267 pub fn local(
268 client: Arc<Client>,
269 user_store: ModelHandle<UserStore>,
270 languages: Arc<LanguageRegistry>,
271 fs: Arc<dyn Fs>,
272 cx: &mut MutableAppContext,
273 ) -> ModelHandle<Self> {
274 cx.add_model(|cx: &mut ModelContext<Self>| {
275 let (remote_id_tx, remote_id_rx) = watch::channel();
276 let _maintain_remote_id_task = cx.spawn_weak({
277 let rpc = client.clone();
278 move |this, mut cx| {
279 async move {
280 let mut status = rpc.status();
281 while let Some(status) = status.next().await {
282 if let Some(this) = this.upgrade(&cx) {
283 let remote_id = if status.is_connected() {
284 let response = rpc.request(proto::RegisterProject {}).await?;
285 Some(response.project_id)
286 } else {
287 None
288 };
289
290 if let Some(project_id) = remote_id {
291 let mut registrations = Vec::new();
292 this.update(&mut cx, |this, cx| {
293 for worktree in this.worktrees(cx).collect::<Vec<_>>() {
294 registrations.push(worktree.update(
295 cx,
296 |worktree, cx| {
297 let worktree = worktree.as_local_mut().unwrap();
298 worktree.register(project_id, cx)
299 },
300 ));
301 }
302 });
303 for registration in registrations {
304 registration.await?;
305 }
306 }
307 this.update(&mut cx, |this, cx| this.set_remote_id(remote_id, cx));
308 }
309 }
310 Ok(())
311 }
312 .log_err()
313 }
314 });
315
316 let (opened_buffer_tx, opened_buffer_rx) = watch::channel();
317 Self {
318 worktrees: Default::default(),
319 collaborators: Default::default(),
320 opened_buffers: Default::default(),
321 shared_buffers: Default::default(),
322 loading_buffers: Default::default(),
323 loading_local_worktrees: Default::default(),
324 buffer_snapshots: Default::default(),
325 client_state: ProjectClientState::Local {
326 is_shared: false,
327 remote_id_tx,
328 remote_id_rx,
329 _maintain_remote_id_task,
330 },
331 opened_buffer: (Rc::new(RefCell::new(opened_buffer_tx)), opened_buffer_rx),
332 subscriptions: Vec::new(),
333 active_entry: None,
334 languages,
335 client,
336 user_store,
337 fs,
338 next_entry_id: Default::default(),
339 language_servers_with_diagnostics_running: 0,
340 language_servers: Default::default(),
341 started_language_servers: Default::default(),
342 language_server_statuses: Default::default(),
343 language_server_settings: Default::default(),
344 next_language_server_id: 0,
345 nonce: StdRng::from_entropy().gen(),
346 }
347 })
348 }
349
350 pub async fn remote(
351 remote_id: u64,
352 client: Arc<Client>,
353 user_store: ModelHandle<UserStore>,
354 languages: Arc<LanguageRegistry>,
355 fs: Arc<dyn Fs>,
356 cx: &mut AsyncAppContext,
357 ) -> Result<ModelHandle<Self>> {
358 client.authenticate_and_connect(&cx).await?;
359
360 let response = client
361 .request(proto::JoinProject {
362 project_id: remote_id,
363 })
364 .await?;
365
366 let replica_id = response.replica_id as ReplicaId;
367
368 let mut worktrees = Vec::new();
369 for worktree in response.worktrees {
370 let (worktree, load_task) = cx
371 .update(|cx| Worktree::remote(remote_id, replica_id, worktree, client.clone(), cx));
372 worktrees.push(worktree);
373 load_task.detach();
374 }
375
376 let (opened_buffer_tx, opened_buffer_rx) = watch::channel();
377 let this = cx.add_model(|cx: &mut ModelContext<Self>| {
378 let mut this = Self {
379 worktrees: Vec::new(),
380 loading_buffers: Default::default(),
381 opened_buffer: (Rc::new(RefCell::new(opened_buffer_tx)), opened_buffer_rx),
382 shared_buffers: Default::default(),
383 loading_local_worktrees: Default::default(),
384 active_entry: None,
385 collaborators: Default::default(),
386 languages,
387 user_store: user_store.clone(),
388 fs,
389 next_entry_id: Default::default(),
390 subscriptions: vec![client.add_model_for_remote_entity(remote_id, cx)],
391 client: client.clone(),
392 client_state: ProjectClientState::Remote {
393 sharing_has_stopped: false,
394 remote_id,
395 replica_id,
396 _detect_unshare_task: cx.spawn_weak(move |this, mut cx| {
397 async move {
398 let mut status = client.status();
399 let is_connected =
400 status.next().await.map_or(false, |s| s.is_connected());
401 // Even if we're initially connected, any future change of the status means we momentarily disconnected.
402 if !is_connected || status.next().await.is_some() {
403 if let Some(this) = this.upgrade(&cx) {
404 this.update(&mut cx, |this, cx| this.project_unshared(cx))
405 }
406 }
407 Ok(())
408 }
409 .log_err()
410 }),
411 },
412 language_servers_with_diagnostics_running: 0,
413 language_servers: Default::default(),
414 started_language_servers: Default::default(),
415 language_server_settings: Default::default(),
416 language_server_statuses: response
417 .language_servers
418 .into_iter()
419 .map(|server| {
420 (
421 server.id as usize,
422 LanguageServerStatus {
423 name: server.name,
424 pending_work: Default::default(),
425 pending_diagnostic_updates: 0,
426 },
427 )
428 })
429 .collect(),
430 next_language_server_id: 0,
431 opened_buffers: Default::default(),
432 buffer_snapshots: Default::default(),
433 nonce: StdRng::from_entropy().gen(),
434 };
435 for worktree in worktrees {
436 this.add_worktree(&worktree, cx);
437 }
438 this
439 });
440
441 let user_ids = response
442 .collaborators
443 .iter()
444 .map(|peer| peer.user_id)
445 .collect();
446 user_store
447 .update(cx, |user_store, cx| user_store.load_users(user_ids, cx))
448 .await?;
449 let mut collaborators = HashMap::default();
450 for message in response.collaborators {
451 let collaborator = Collaborator::from_proto(message, &user_store, cx).await?;
452 collaborators.insert(collaborator.peer_id, collaborator);
453 }
454
455 this.update(cx, |this, _| {
456 this.collaborators = collaborators;
457 });
458
459 Ok(this)
460 }
461
462 #[cfg(any(test, feature = "test-support"))]
463 pub fn test(fs: Arc<dyn Fs>, cx: &mut gpui::TestAppContext) -> ModelHandle<Project> {
464 let languages = Arc::new(LanguageRegistry::test());
465 let http_client = client::test::FakeHttpClient::with_404_response();
466 let client = client::Client::new(http_client.clone());
467 let user_store = cx.add_model(|cx| UserStore::new(client.clone(), http_client, cx));
468 cx.update(|cx| Project::local(client, user_store, languages, fs, cx))
469 }
470
471 #[cfg(any(test, feature = "test-support"))]
472 pub fn buffer_for_id(&self, remote_id: u64, cx: &AppContext) -> Option<ModelHandle<Buffer>> {
473 self.opened_buffers
474 .get(&remote_id)
475 .and_then(|buffer| buffer.upgrade(cx))
476 }
477
478 #[cfg(any(test, feature = "test-support"))]
479 pub fn languages(&self) -> &Arc<LanguageRegistry> {
480 &self.languages
481 }
482
483 #[cfg(any(test, feature = "test-support"))]
484 pub fn check_invariants(&self, cx: &AppContext) {
485 if self.is_local() {
486 let mut worktree_root_paths = HashMap::default();
487 for worktree in self.worktrees(cx) {
488 let worktree = worktree.read(cx);
489 let abs_path = worktree.as_local().unwrap().abs_path().clone();
490 let prev_worktree_id = worktree_root_paths.insert(abs_path.clone(), worktree.id());
491 assert_eq!(
492 prev_worktree_id,
493 None,
494 "abs path {:?} for worktree {:?} is not unique ({:?} was already registered with the same path)",
495 abs_path,
496 worktree.id(),
497 prev_worktree_id
498 )
499 }
500 } else {
501 let replica_id = self.replica_id();
502 for buffer in self.opened_buffers.values() {
503 if let Some(buffer) = buffer.upgrade(cx) {
504 let buffer = buffer.read(cx);
505 assert_eq!(
506 buffer.deferred_ops_len(),
507 0,
508 "replica {}, buffer {} has deferred operations",
509 replica_id,
510 buffer.remote_id()
511 );
512 }
513 }
514 }
515 }
516
517 #[cfg(any(test, feature = "test-support"))]
518 pub fn has_open_buffer(&self, path: impl Into<ProjectPath>, cx: &AppContext) -> bool {
519 let path = path.into();
520 if let Some(worktree) = self.worktree_for_id(path.worktree_id, cx) {
521 self.opened_buffers.iter().any(|(_, buffer)| {
522 if let Some(buffer) = buffer.upgrade(cx) {
523 if let Some(file) = File::from_dyn(buffer.read(cx).file()) {
524 if file.worktree == worktree && file.path() == &path.path {
525 return true;
526 }
527 }
528 }
529 false
530 })
531 } else {
532 false
533 }
534 }
535
536 pub fn fs(&self) -> &Arc<dyn Fs> {
537 &self.fs
538 }
539
540 fn set_remote_id(&mut self, remote_id: Option<u64>, cx: &mut ModelContext<Self>) {
541 if let ProjectClientState::Local { remote_id_tx, .. } = &mut self.client_state {
542 *remote_id_tx.borrow_mut() = remote_id;
543 }
544
545 self.subscriptions.clear();
546 if let Some(remote_id) = remote_id {
547 self.subscriptions
548 .push(self.client.add_model_for_remote_entity(remote_id, cx));
549 }
550 }
551
552 pub fn remote_id(&self) -> Option<u64> {
553 match &self.client_state {
554 ProjectClientState::Local { remote_id_rx, .. } => *remote_id_rx.borrow(),
555 ProjectClientState::Remote { remote_id, .. } => Some(*remote_id),
556 }
557 }
558
559 pub fn next_remote_id(&self) -> impl Future<Output = u64> {
560 let mut id = None;
561 let mut watch = None;
562 match &self.client_state {
563 ProjectClientState::Local { remote_id_rx, .. } => watch = Some(remote_id_rx.clone()),
564 ProjectClientState::Remote { remote_id, .. } => id = Some(*remote_id),
565 }
566
567 async move {
568 if let Some(id) = id {
569 return id;
570 }
571 let mut watch = watch.unwrap();
572 loop {
573 let id = *watch.borrow();
574 if let Some(id) = id {
575 return id;
576 }
577 watch.next().await;
578 }
579 }
580 }
581
582 pub fn replica_id(&self) -> ReplicaId {
583 match &self.client_state {
584 ProjectClientState::Local { .. } => 0,
585 ProjectClientState::Remote { replica_id, .. } => *replica_id,
586 }
587 }
588
589 pub fn collaborators(&self) -> &HashMap<PeerId, Collaborator> {
590 &self.collaborators
591 }
592
593 pub fn worktrees<'a>(
594 &'a self,
595 cx: &'a AppContext,
596 ) -> impl 'a + Iterator<Item = ModelHandle<Worktree>> {
597 self.worktrees
598 .iter()
599 .filter_map(move |worktree| worktree.upgrade(cx))
600 }
601
602 pub fn visible_worktrees<'a>(
603 &'a self,
604 cx: &'a AppContext,
605 ) -> impl 'a + Iterator<Item = ModelHandle<Worktree>> {
606 self.worktrees.iter().filter_map(|worktree| {
607 worktree.upgrade(cx).and_then(|worktree| {
608 if worktree.read(cx).is_visible() {
609 Some(worktree)
610 } else {
611 None
612 }
613 })
614 })
615 }
616
617 pub fn worktree_for_id(
618 &self,
619 id: WorktreeId,
620 cx: &AppContext,
621 ) -> Option<ModelHandle<Worktree>> {
622 self.worktrees(cx)
623 .find(|worktree| worktree.read(cx).id() == id)
624 }
625
626 pub fn share(&self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
627 let rpc = self.client.clone();
628 cx.spawn(|this, mut cx| async move {
629 let project_id = this.update(&mut cx, |this, cx| {
630 if let ProjectClientState::Local {
631 is_shared,
632 remote_id_rx,
633 ..
634 } = &mut this.client_state
635 {
636 *is_shared = true;
637
638 for open_buffer in this.opened_buffers.values_mut() {
639 match open_buffer {
640 OpenBuffer::Strong(_) => {}
641 OpenBuffer::Weak(buffer) => {
642 if let Some(buffer) = buffer.upgrade(cx) {
643 *open_buffer = OpenBuffer::Strong(buffer);
644 }
645 }
646 OpenBuffer::Loading(_) => unreachable!(),
647 }
648 }
649
650 for worktree_handle in this.worktrees.iter_mut() {
651 match worktree_handle {
652 WorktreeHandle::Strong(_) => {}
653 WorktreeHandle::Weak(worktree) => {
654 if let Some(worktree) = worktree.upgrade(cx) {
655 *worktree_handle = WorktreeHandle::Strong(worktree);
656 }
657 }
658 }
659 }
660
661 remote_id_rx
662 .borrow()
663 .ok_or_else(|| anyhow!("no project id"))
664 } else {
665 Err(anyhow!("can't share a remote project"))
666 }
667 })?;
668
669 rpc.request(proto::ShareProject { project_id }).await?;
670
671 let mut tasks = Vec::new();
672 this.update(&mut cx, |this, cx| {
673 for worktree in this.worktrees(cx).collect::<Vec<_>>() {
674 worktree.update(cx, |worktree, cx| {
675 let worktree = worktree.as_local_mut().unwrap();
676 tasks.push(worktree.share(project_id, cx));
677 });
678 }
679 });
680 for task in tasks {
681 task.await?;
682 }
683 this.update(&mut cx, |_, cx| cx.notify());
684 Ok(())
685 })
686 }
687
688 pub fn unshare(&self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
689 let rpc = self.client.clone();
690 cx.spawn(|this, mut cx| async move {
691 let project_id = this.update(&mut cx, |this, cx| {
692 if let ProjectClientState::Local {
693 is_shared,
694 remote_id_rx,
695 ..
696 } = &mut this.client_state
697 {
698 *is_shared = false;
699
700 for open_buffer in this.opened_buffers.values_mut() {
701 match open_buffer {
702 OpenBuffer::Strong(buffer) => {
703 *open_buffer = OpenBuffer::Weak(buffer.downgrade());
704 }
705 _ => {}
706 }
707 }
708
709 for worktree_handle in this.worktrees.iter_mut() {
710 match worktree_handle {
711 WorktreeHandle::Strong(worktree) => {
712 if !worktree.read(cx).is_visible() {
713 *worktree_handle = WorktreeHandle::Weak(worktree.downgrade());
714 }
715 }
716 _ => {}
717 }
718 }
719
720 remote_id_rx
721 .borrow()
722 .ok_or_else(|| anyhow!("no project id"))
723 } else {
724 Err(anyhow!("can't share a remote project"))
725 }
726 })?;
727
728 rpc.send(proto::UnshareProject { project_id })?;
729 this.update(&mut cx, |this, cx| {
730 this.collaborators.clear();
731 this.shared_buffers.clear();
732 for worktree in this.worktrees(cx).collect::<Vec<_>>() {
733 worktree.update(cx, |worktree, _| {
734 worktree.as_local_mut().unwrap().unshare();
735 });
736 }
737 cx.notify()
738 });
739 Ok(())
740 })
741 }
742
743 fn project_unshared(&mut self, cx: &mut ModelContext<Self>) {
744 if let ProjectClientState::Remote {
745 sharing_has_stopped,
746 ..
747 } = &mut self.client_state
748 {
749 *sharing_has_stopped = true;
750 self.collaborators.clear();
751 cx.notify();
752 }
753 }
754
755 pub fn is_read_only(&self) -> bool {
756 match &self.client_state {
757 ProjectClientState::Local { .. } => false,
758 ProjectClientState::Remote {
759 sharing_has_stopped,
760 ..
761 } => *sharing_has_stopped,
762 }
763 }
764
765 pub fn is_local(&self) -> bool {
766 match &self.client_state {
767 ProjectClientState::Local { .. } => true,
768 ProjectClientState::Remote { .. } => false,
769 }
770 }
771
772 pub fn is_remote(&self) -> bool {
773 !self.is_local()
774 }
775
776 pub fn create_buffer(&mut self, cx: &mut ModelContext<Self>) -> Result<ModelHandle<Buffer>> {
777 if self.is_remote() {
778 return Err(anyhow!("creating buffers as a guest is not supported yet"));
779 }
780
781 let buffer = cx.add_model(|cx| {
782 Buffer::new(self.replica_id(), "", cx).with_language(language::PLAIN_TEXT.clone(), cx)
783 });
784 self.register_buffer(&buffer, cx)?;
785 Ok(buffer)
786 }
787
788 pub fn open_buffer(
789 &mut self,
790 path: impl Into<ProjectPath>,
791 cx: &mut ModelContext<Self>,
792 ) -> Task<Result<ModelHandle<Buffer>>> {
793 let project_path = path.into();
794 let worktree = if let Some(worktree) = self.worktree_for_id(project_path.worktree_id, cx) {
795 worktree
796 } else {
797 return Task::ready(Err(anyhow!("no such worktree")));
798 };
799
800 // If there is already a buffer for the given path, then return it.
801 let existing_buffer = self.get_open_buffer(&project_path, cx);
802 if let Some(existing_buffer) = existing_buffer {
803 return Task::ready(Ok(existing_buffer));
804 }
805
806 let mut loading_watch = match self.loading_buffers.entry(project_path.clone()) {
807 // If the given path is already being loaded, then wait for that existing
808 // task to complete and return the same buffer.
809 hash_map::Entry::Occupied(e) => e.get().clone(),
810
811 // Otherwise, record the fact that this path is now being loaded.
812 hash_map::Entry::Vacant(entry) => {
813 let (mut tx, rx) = postage::watch::channel();
814 entry.insert(rx.clone());
815
816 let load_buffer = if worktree.read(cx).is_local() {
817 self.open_local_buffer(&project_path.path, &worktree, cx)
818 } else {
819 self.open_remote_buffer(&project_path.path, &worktree, cx)
820 };
821
822 cx.spawn(move |this, mut cx| async move {
823 let load_result = load_buffer.await;
824 *tx.borrow_mut() = Some(this.update(&mut cx, |this, _| {
825 // Record the fact that the buffer is no longer loading.
826 this.loading_buffers.remove(&project_path);
827 let buffer = load_result.map_err(Arc::new)?;
828 Ok(buffer)
829 }));
830 })
831 .detach();
832 rx
833 }
834 };
835
836 cx.foreground().spawn(async move {
837 loop {
838 if let Some(result) = loading_watch.borrow().as_ref() {
839 match result {
840 Ok(buffer) => return Ok(buffer.clone()),
841 Err(error) => return Err(anyhow!("{}", error)),
842 }
843 }
844 loading_watch.next().await;
845 }
846 })
847 }
848
849 fn open_local_buffer(
850 &mut self,
851 path: &Arc<Path>,
852 worktree: &ModelHandle<Worktree>,
853 cx: &mut ModelContext<Self>,
854 ) -> Task<Result<ModelHandle<Buffer>>> {
855 let load_buffer = worktree.update(cx, |worktree, cx| {
856 let worktree = worktree.as_local_mut().unwrap();
857 worktree.load_buffer(path, cx)
858 });
859 cx.spawn(|this, mut cx| async move {
860 let buffer = load_buffer.await?;
861 this.update(&mut cx, |this, cx| this.register_buffer(&buffer, cx))?;
862 Ok(buffer)
863 })
864 }
865
866 fn open_remote_buffer(
867 &mut self,
868 path: &Arc<Path>,
869 worktree: &ModelHandle<Worktree>,
870 cx: &mut ModelContext<Self>,
871 ) -> Task<Result<ModelHandle<Buffer>>> {
872 let rpc = self.client.clone();
873 let project_id = self.remote_id().unwrap();
874 let remote_worktree_id = worktree.read(cx).id();
875 let path = path.clone();
876 let path_string = path.to_string_lossy().to_string();
877 cx.spawn(|this, mut cx| async move {
878 let response = rpc
879 .request(proto::OpenBuffer {
880 project_id,
881 worktree_id: remote_worktree_id.to_proto(),
882 path: path_string,
883 })
884 .await?;
885 let buffer = response.buffer.ok_or_else(|| anyhow!("missing buffer"))?;
886 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
887 .await
888 })
889 }
890
891 fn open_local_buffer_via_lsp(
892 &mut self,
893 abs_path: lsp::Url,
894 lang_name: Arc<str>,
895 lang_server: Arc<LanguageServer>,
896 cx: &mut ModelContext<Self>,
897 ) -> Task<Result<ModelHandle<Buffer>>> {
898 cx.spawn(|this, mut cx| async move {
899 let abs_path = abs_path
900 .to_file_path()
901 .map_err(|_| anyhow!("can't convert URI to path"))?;
902 let (worktree, relative_path) = if let Some(result) =
903 this.read_with(&cx, |this, cx| this.find_local_worktree(&abs_path, cx))
904 {
905 result
906 } else {
907 let worktree = this
908 .update(&mut cx, |this, cx| {
909 this.create_local_worktree(&abs_path, false, cx)
910 })
911 .await?;
912 this.update(&mut cx, |this, cx| {
913 this.language_servers
914 .insert((worktree.read(cx).id(), lang_name), lang_server);
915 });
916 (worktree, PathBuf::new())
917 };
918
919 let project_path = ProjectPath {
920 worktree_id: worktree.read_with(&cx, |worktree, _| worktree.id()),
921 path: relative_path.into(),
922 };
923 this.update(&mut cx, |this, cx| this.open_buffer(project_path, cx))
924 .await
925 })
926 }
927
928 pub fn save_buffer_as(
929 &mut self,
930 buffer: ModelHandle<Buffer>,
931 abs_path: PathBuf,
932 cx: &mut ModelContext<Project>,
933 ) -> Task<Result<()>> {
934 let worktree_task = self.find_or_create_local_worktree(&abs_path, true, cx);
935 cx.spawn(|this, mut cx| async move {
936 let (worktree, path) = worktree_task.await?;
937 worktree
938 .update(&mut cx, |worktree, cx| {
939 worktree
940 .as_local_mut()
941 .unwrap()
942 .save_buffer_as(buffer.clone(), path, cx)
943 })
944 .await?;
945 this.update(&mut cx, |this, cx| {
946 this.assign_language_to_buffer(&buffer, cx);
947 this.register_buffer_with_language_server(&buffer, cx);
948 });
949 Ok(())
950 })
951 }
952
953 pub fn get_open_buffer(
954 &mut self,
955 path: &ProjectPath,
956 cx: &mut ModelContext<Self>,
957 ) -> Option<ModelHandle<Buffer>> {
958 let worktree = self.worktree_for_id(path.worktree_id, cx)?;
959 self.opened_buffers.values().find_map(|buffer| {
960 let buffer = buffer.upgrade(cx)?;
961 let file = File::from_dyn(buffer.read(cx).file())?;
962 if file.worktree == worktree && file.path() == &path.path {
963 Some(buffer)
964 } else {
965 None
966 }
967 })
968 }
969
970 fn register_buffer(
971 &mut self,
972 buffer: &ModelHandle<Buffer>,
973 cx: &mut ModelContext<Self>,
974 ) -> Result<()> {
975 let remote_id = buffer.read(cx).remote_id();
976 let open_buffer = if self.is_remote() || self.is_shared() {
977 OpenBuffer::Strong(buffer.clone())
978 } else {
979 OpenBuffer::Weak(buffer.downgrade())
980 };
981
982 match self.opened_buffers.insert(remote_id, open_buffer) {
983 None => {}
984 Some(OpenBuffer::Loading(operations)) => {
985 buffer.update(cx, |buffer, cx| buffer.apply_ops(operations, cx))?
986 }
987 Some(OpenBuffer::Weak(existing_handle)) => {
988 if existing_handle.upgrade(cx).is_some() {
989 Err(anyhow!(
990 "already registered buffer with remote id {}",
991 remote_id
992 ))?
993 }
994 }
995 Some(OpenBuffer::Strong(_)) => Err(anyhow!(
996 "already registered buffer with remote id {}",
997 remote_id
998 ))?,
999 }
1000 cx.subscribe(buffer, |this, buffer, event, cx| {
1001 this.on_buffer_event(buffer, event, cx);
1002 })
1003 .detach();
1004
1005 self.assign_language_to_buffer(buffer, cx);
1006 self.register_buffer_with_language_server(buffer, cx);
1007
1008 Ok(())
1009 }
1010
1011 fn register_buffer_with_language_server(
1012 &mut self,
1013 buffer_handle: &ModelHandle<Buffer>,
1014 cx: &mut ModelContext<Self>,
1015 ) {
1016 let buffer = buffer_handle.read(cx);
1017 let buffer_id = buffer.remote_id();
1018 if let Some(file) = File::from_dyn(buffer.file()) {
1019 if file.is_local() {
1020 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
1021 let initial_snapshot = buffer.text_snapshot();
1022 let language_server = self.language_server_for_buffer(buffer, cx).cloned();
1023
1024 if let Some(local_worktree) = file.worktree.read(cx).as_local() {
1025 if let Some(diagnostics) = local_worktree.diagnostics_for_path(file.path()) {
1026 self.update_buffer_diagnostics(&buffer_handle, diagnostics, None, cx)
1027 .log_err();
1028 }
1029 }
1030
1031 if let Some(server) = language_server {
1032 server
1033 .notify::<lsp::notification::DidOpenTextDocument>(
1034 lsp::DidOpenTextDocumentParams {
1035 text_document: lsp::TextDocumentItem::new(
1036 uri,
1037 Default::default(),
1038 0,
1039 initial_snapshot.text(),
1040 ),
1041 }
1042 .clone(),
1043 )
1044 .log_err();
1045 buffer_handle.update(cx, |buffer, cx| {
1046 buffer.set_completion_triggers(
1047 server
1048 .capabilities()
1049 .completion_provider
1050 .as_ref()
1051 .and_then(|provider| provider.trigger_characters.clone())
1052 .unwrap_or(Vec::new()),
1053 cx,
1054 )
1055 });
1056 self.buffer_snapshots
1057 .insert(buffer_id, vec![(0, initial_snapshot)]);
1058 }
1059
1060 cx.observe_release(buffer_handle, |this, buffer, cx| {
1061 if let Some(file) = File::from_dyn(buffer.file()) {
1062 if file.is_local() {
1063 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
1064 if let Some(server) = this.language_server_for_buffer(buffer, cx) {
1065 server
1066 .notify::<lsp::notification::DidCloseTextDocument>(
1067 lsp::DidCloseTextDocumentParams {
1068 text_document: lsp::TextDocumentIdentifier::new(
1069 uri.clone(),
1070 ),
1071 },
1072 )
1073 .log_err();
1074 }
1075 }
1076 }
1077 })
1078 .detach();
1079 }
1080 }
1081 }
1082
1083 fn on_buffer_event(
1084 &mut self,
1085 buffer: ModelHandle<Buffer>,
1086 event: &BufferEvent,
1087 cx: &mut ModelContext<Self>,
1088 ) -> Option<()> {
1089 match event {
1090 BufferEvent::Operation(operation) => {
1091 let project_id = self.remote_id()?;
1092 let request = self.client.request(proto::UpdateBuffer {
1093 project_id,
1094 buffer_id: buffer.read(cx).remote_id(),
1095 operations: vec![language::proto::serialize_operation(&operation)],
1096 });
1097 cx.background().spawn(request).detach_and_log_err(cx);
1098 }
1099 BufferEvent::Edited => {
1100 let language_server = self
1101 .language_server_for_buffer(buffer.read(cx), cx)?
1102 .clone();
1103 let buffer = buffer.read(cx);
1104 let file = File::from_dyn(buffer.file())?;
1105 let abs_path = file.as_local()?.abs_path(cx);
1106 let uri = lsp::Url::from_file_path(abs_path).unwrap();
1107 let buffer_snapshots = self.buffer_snapshots.entry(buffer.remote_id()).or_default();
1108 let (version, prev_snapshot) = buffer_snapshots.last()?;
1109 let next_snapshot = buffer.text_snapshot();
1110 let next_version = version + 1;
1111
1112 let content_changes = buffer
1113 .edits_since::<(PointUtf16, usize)>(prev_snapshot.version())
1114 .map(|edit| {
1115 let edit_start = edit.new.start.0;
1116 let edit_end = edit_start + (edit.old.end.0 - edit.old.start.0);
1117 let new_text = next_snapshot
1118 .text_for_range(edit.new.start.1..edit.new.end.1)
1119 .collect();
1120 lsp::TextDocumentContentChangeEvent {
1121 range: Some(lsp::Range::new(
1122 edit_start.to_lsp_position(),
1123 edit_end.to_lsp_position(),
1124 )),
1125 range_length: None,
1126 text: new_text,
1127 }
1128 })
1129 .collect();
1130
1131 buffer_snapshots.push((next_version, next_snapshot));
1132
1133 language_server
1134 .notify::<lsp::notification::DidChangeTextDocument>(
1135 lsp::DidChangeTextDocumentParams {
1136 text_document: lsp::VersionedTextDocumentIdentifier::new(
1137 uri,
1138 next_version,
1139 ),
1140 content_changes,
1141 },
1142 )
1143 .log_err();
1144 }
1145 BufferEvent::Saved => {
1146 let file = File::from_dyn(buffer.read(cx).file())?;
1147 let worktree_id = file.worktree_id(cx);
1148 let abs_path = file.as_local()?.abs_path(cx);
1149 let text_document = lsp::TextDocumentIdentifier {
1150 uri: lsp::Url::from_file_path(abs_path).unwrap(),
1151 };
1152
1153 for (_, server) in self.language_servers_for_worktree(worktree_id) {
1154 server
1155 .notify::<lsp::notification::DidSaveTextDocument>(
1156 lsp::DidSaveTextDocumentParams {
1157 text_document: text_document.clone(),
1158 text: None,
1159 },
1160 )
1161 .log_err();
1162 }
1163 }
1164 _ => {}
1165 }
1166
1167 None
1168 }
1169
1170 fn language_servers_for_worktree(
1171 &self,
1172 worktree_id: WorktreeId,
1173 ) -> impl Iterator<Item = (&str, &Arc<LanguageServer>)> {
1174 self.language_servers.iter().filter_map(
1175 move |((language_server_worktree_id, language_name), server)| {
1176 if *language_server_worktree_id == worktree_id {
1177 Some((language_name.as_ref(), server))
1178 } else {
1179 None
1180 }
1181 },
1182 )
1183 }
1184
1185 fn assign_language_to_buffer(
1186 &mut self,
1187 buffer: &ModelHandle<Buffer>,
1188 cx: &mut ModelContext<Self>,
1189 ) -> Option<()> {
1190 // If the buffer has a language, set it and start the language server if we haven't already.
1191 let full_path = buffer.read(cx).file()?.full_path(cx);
1192 let language = self.languages.select_language(&full_path)?;
1193 buffer.update(cx, |buffer, cx| {
1194 buffer.set_language(Some(language.clone()), cx);
1195 });
1196
1197 let file = File::from_dyn(buffer.read(cx).file())?;
1198 let worktree = file.worktree.read(cx).as_local()?;
1199 let worktree_id = worktree.id();
1200 let worktree_abs_path = worktree.abs_path().clone();
1201 self.start_language_server(worktree_id, worktree_abs_path, language, cx);
1202
1203 None
1204 }
1205
1206 fn start_language_server(
1207 &mut self,
1208 worktree_id: WorktreeId,
1209 worktree_path: Arc<Path>,
1210 language: Arc<Language>,
1211 cx: &mut ModelContext<Self>,
1212 ) {
1213 let key = (worktree_id, language.name());
1214 self.started_language_servers
1215 .entry(key.clone())
1216 .or_insert_with(|| {
1217 let server_id = post_inc(&mut self.next_language_server_id);
1218 let language_server = self.languages.start_language_server(
1219 language.clone(),
1220 worktree_path,
1221 self.client.http_client(),
1222 cx,
1223 );
1224 cx.spawn_weak(|this, mut cx| async move {
1225 let mut language_server = language_server?.await.log_err()?;
1226 let this = this.upgrade(&cx)?;
1227 let (language_server_events_tx, language_server_events_rx) =
1228 smol::channel::unbounded();
1229
1230 language_server
1231 .on_notification::<lsp::notification::PublishDiagnostics, _>({
1232 let language_server_events_tx = language_server_events_tx.clone();
1233 move |params| {
1234 language_server_events_tx
1235 .try_send(LanguageServerEvent::DiagnosticsUpdate(params))
1236 .ok();
1237 }
1238 })
1239 .detach();
1240
1241 language_server
1242 .on_request::<lsp::request::WorkspaceConfiguration, _>({
1243 let settings = this
1244 .read_with(&cx, |this, _| this.language_server_settings.clone());
1245 move |params| {
1246 let settings = settings.lock();
1247 Ok(params
1248 .items
1249 .into_iter()
1250 .map(|item| {
1251 if let Some(section) = &item.section {
1252 settings
1253 .get(section)
1254 .cloned()
1255 .unwrap_or(serde_json::Value::Null)
1256 } else {
1257 settings.clone()
1258 }
1259 })
1260 .collect())
1261 }
1262 })
1263 .detach();
1264
1265 language_server
1266 .on_notification::<lsp::notification::Progress, _>(move |params| {
1267 let token = match params.token {
1268 lsp::NumberOrString::String(token) => token,
1269 lsp::NumberOrString::Number(token) => {
1270 log::info!("skipping numeric progress token {}", token);
1271 return;
1272 }
1273 };
1274
1275 match params.value {
1276 lsp::ProgressParamsValue::WorkDone(progress) => match progress {
1277 lsp::WorkDoneProgress::Begin(_) => {
1278 language_server_events_tx
1279 .try_send(LanguageServerEvent::WorkStart { token })
1280 .ok();
1281 }
1282 lsp::WorkDoneProgress::Report(report) => {
1283 language_server_events_tx
1284 .try_send(LanguageServerEvent::WorkProgress {
1285 token,
1286 progress: LanguageServerProgress {
1287 message: report.message,
1288 percentage: report
1289 .percentage
1290 .map(|p| p as usize),
1291 last_update_at: Instant::now(),
1292 },
1293 })
1294 .ok();
1295 }
1296 lsp::WorkDoneProgress::End(_) => {
1297 language_server_events_tx
1298 .try_send(LanguageServerEvent::WorkEnd { token })
1299 .ok();
1300 }
1301 },
1302 }
1303 })
1304 .detach();
1305
1306 // Process all the LSP events.
1307 cx.spawn(|mut cx| {
1308 let this = this.downgrade();
1309 async move {
1310 while let Ok(event) = language_server_events_rx.recv().await {
1311 let this = this.upgrade(&cx)?;
1312 this.update(&mut cx, |this, cx| {
1313 this.on_lsp_event(server_id, event, &language, cx)
1314 });
1315
1316 // Don't starve the main thread when lots of events arrive all at once.
1317 smol::future::yield_now().await;
1318 }
1319 Some(())
1320 }
1321 })
1322 .detach();
1323
1324 let language_server = language_server.initialize().await.log_err()?;
1325 this.update(&mut cx, |this, cx| {
1326 this.language_servers
1327 .insert(key.clone(), language_server.clone());
1328 this.language_server_statuses.insert(
1329 server_id,
1330 LanguageServerStatus {
1331 name: language_server.name().to_string(),
1332 pending_work: Default::default(),
1333 pending_diagnostic_updates: 0,
1334 },
1335 );
1336 language_server
1337 .notify::<lsp::notification::DidChangeConfiguration>(
1338 lsp::DidChangeConfigurationParams {
1339 settings: this.language_server_settings.lock().clone(),
1340 },
1341 )
1342 .ok();
1343
1344 if let Some(project_id) = this.remote_id() {
1345 this.client
1346 .send(proto::StartLanguageServer {
1347 project_id,
1348 server: Some(proto::LanguageServer {
1349 id: server_id as u64,
1350 name: language_server.name().to_string(),
1351 }),
1352 })
1353 .log_err();
1354 }
1355
1356 // Tell the language server about every open buffer in the worktree that matches the language.
1357 for buffer in this.opened_buffers.values() {
1358 if let Some(buffer_handle) = buffer.upgrade(cx) {
1359 let buffer = buffer_handle.read(cx);
1360 let file = if let Some(file) = File::from_dyn(buffer.file()) {
1361 file
1362 } else {
1363 continue;
1364 };
1365 let language = if let Some(language) = buffer.language() {
1366 language
1367 } else {
1368 continue;
1369 };
1370 if (file.worktree.read(cx).id(), language.name()) != key {
1371 continue;
1372 }
1373
1374 let file = file.as_local()?;
1375 let versions = this
1376 .buffer_snapshots
1377 .entry(buffer.remote_id())
1378 .or_insert_with(|| vec![(0, buffer.text_snapshot())]);
1379 let (version, initial_snapshot) = versions.last().unwrap();
1380 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
1381 language_server
1382 .notify::<lsp::notification::DidOpenTextDocument>(
1383 lsp::DidOpenTextDocumentParams {
1384 text_document: lsp::TextDocumentItem::new(
1385 uri,
1386 Default::default(),
1387 *version,
1388 initial_snapshot.text(),
1389 ),
1390 },
1391 )
1392 .log_err()?;
1393 buffer_handle.update(cx, |buffer, cx| {
1394 buffer.set_completion_triggers(
1395 language_server
1396 .capabilities()
1397 .completion_provider
1398 .as_ref()
1399 .and_then(|provider| {
1400 provider.trigger_characters.clone()
1401 })
1402 .unwrap_or(Vec::new()),
1403 cx,
1404 )
1405 });
1406 }
1407 }
1408
1409 cx.notify();
1410 Some(())
1411 });
1412
1413 Some(language_server)
1414 })
1415 });
1416 }
1417
1418 fn on_lsp_event(
1419 &mut self,
1420 language_server_id: usize,
1421 event: LanguageServerEvent,
1422 language: &Arc<Language>,
1423 cx: &mut ModelContext<Self>,
1424 ) {
1425 let disk_diagnostics_token = language.disk_based_diagnostics_progress_token();
1426 let language_server_status =
1427 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
1428 status
1429 } else {
1430 return;
1431 };
1432
1433 match event {
1434 LanguageServerEvent::WorkStart { token } => {
1435 if Some(&token) == disk_diagnostics_token {
1436 language_server_status.pending_diagnostic_updates += 1;
1437 if language_server_status.pending_diagnostic_updates == 1 {
1438 self.disk_based_diagnostics_started(cx);
1439 self.broadcast_language_server_update(
1440 language_server_id,
1441 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(
1442 proto::LspDiskBasedDiagnosticsUpdating {},
1443 ),
1444 );
1445 }
1446 } else {
1447 self.on_lsp_work_start(language_server_id, token.clone(), cx);
1448 self.broadcast_language_server_update(
1449 language_server_id,
1450 proto::update_language_server::Variant::WorkStart(proto::LspWorkStart {
1451 token,
1452 }),
1453 );
1454 }
1455 }
1456 LanguageServerEvent::WorkProgress { token, progress } => {
1457 if Some(&token) != disk_diagnostics_token {
1458 self.on_lsp_work_progress(
1459 language_server_id,
1460 token.clone(),
1461 progress.clone(),
1462 cx,
1463 );
1464 self.broadcast_language_server_update(
1465 language_server_id,
1466 proto::update_language_server::Variant::WorkProgress(
1467 proto::LspWorkProgress {
1468 token,
1469 message: progress.message,
1470 percentage: progress.percentage.map(|p| p as u32),
1471 },
1472 ),
1473 );
1474 }
1475 }
1476 LanguageServerEvent::WorkEnd { token } => {
1477 if Some(&token) == disk_diagnostics_token {
1478 language_server_status.pending_diagnostic_updates -= 1;
1479 if language_server_status.pending_diagnostic_updates == 0 {
1480 self.disk_based_diagnostics_finished(cx);
1481 self.broadcast_language_server_update(
1482 language_server_id,
1483 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(
1484 proto::LspDiskBasedDiagnosticsUpdated {},
1485 ),
1486 );
1487 }
1488 } else {
1489 self.on_lsp_work_end(language_server_id, token.clone(), cx);
1490 self.broadcast_language_server_update(
1491 language_server_id,
1492 proto::update_language_server::Variant::WorkEnd(proto::LspWorkEnd {
1493 token,
1494 }),
1495 );
1496 }
1497 }
1498 LanguageServerEvent::DiagnosticsUpdate(mut params) => {
1499 language.process_diagnostics(&mut params);
1500
1501 if disk_diagnostics_token.is_none() {
1502 self.disk_based_diagnostics_started(cx);
1503 self.broadcast_language_server_update(
1504 language_server_id,
1505 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(
1506 proto::LspDiskBasedDiagnosticsUpdating {},
1507 ),
1508 );
1509 }
1510 self.update_diagnostics(
1511 params,
1512 language
1513 .disk_based_diagnostic_sources()
1514 .unwrap_or(&Default::default()),
1515 cx,
1516 )
1517 .log_err();
1518 if disk_diagnostics_token.is_none() {
1519 self.disk_based_diagnostics_finished(cx);
1520 self.broadcast_language_server_update(
1521 language_server_id,
1522 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(
1523 proto::LspDiskBasedDiagnosticsUpdated {},
1524 ),
1525 );
1526 }
1527 }
1528 }
1529 }
1530
1531 fn on_lsp_work_start(
1532 &mut self,
1533 language_server_id: usize,
1534 token: String,
1535 cx: &mut ModelContext<Self>,
1536 ) {
1537 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
1538 status.pending_work.insert(
1539 token,
1540 LanguageServerProgress {
1541 message: None,
1542 percentage: None,
1543 last_update_at: Instant::now(),
1544 },
1545 );
1546 cx.notify();
1547 }
1548 }
1549
1550 fn on_lsp_work_progress(
1551 &mut self,
1552 language_server_id: usize,
1553 token: String,
1554 progress: LanguageServerProgress,
1555 cx: &mut ModelContext<Self>,
1556 ) {
1557 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
1558 status.pending_work.insert(token, progress);
1559 cx.notify();
1560 }
1561 }
1562
1563 fn on_lsp_work_end(
1564 &mut self,
1565 language_server_id: usize,
1566 token: String,
1567 cx: &mut ModelContext<Self>,
1568 ) {
1569 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
1570 status.pending_work.remove(&token);
1571 cx.notify();
1572 }
1573 }
1574
1575 fn broadcast_language_server_update(
1576 &self,
1577 language_server_id: usize,
1578 event: proto::update_language_server::Variant,
1579 ) {
1580 if let Some(project_id) = self.remote_id() {
1581 self.client
1582 .send(proto::UpdateLanguageServer {
1583 project_id,
1584 language_server_id: language_server_id as u64,
1585 variant: Some(event),
1586 })
1587 .log_err();
1588 }
1589 }
1590
1591 pub fn set_language_server_settings(&mut self, settings: serde_json::Value) {
1592 for server in self.language_servers.values() {
1593 server
1594 .notify::<lsp::notification::DidChangeConfiguration>(
1595 lsp::DidChangeConfigurationParams {
1596 settings: settings.clone(),
1597 },
1598 )
1599 .ok();
1600 }
1601 *self.language_server_settings.lock() = settings;
1602 }
1603
1604 pub fn language_server_statuses(
1605 &self,
1606 ) -> impl DoubleEndedIterator<Item = &LanguageServerStatus> {
1607 self.language_server_statuses.values()
1608 }
1609
1610 pub fn update_diagnostics(
1611 &mut self,
1612 params: lsp::PublishDiagnosticsParams,
1613 disk_based_sources: &HashSet<String>,
1614 cx: &mut ModelContext<Self>,
1615 ) -> Result<()> {
1616 let abs_path = params
1617 .uri
1618 .to_file_path()
1619 .map_err(|_| anyhow!("URI is not a file"))?;
1620 let mut next_group_id = 0;
1621 let mut diagnostics = Vec::default();
1622 let mut primary_diagnostic_group_ids = HashMap::default();
1623 let mut sources_by_group_id = HashMap::default();
1624 let mut supporting_diagnostics = HashMap::default();
1625 for diagnostic in ¶ms.diagnostics {
1626 let source = diagnostic.source.as_ref();
1627 let code = diagnostic.code.as_ref().map(|code| match code {
1628 lsp::NumberOrString::Number(code) => code.to_string(),
1629 lsp::NumberOrString::String(code) => code.clone(),
1630 });
1631 let range = range_from_lsp(diagnostic.range);
1632 let is_supporting = diagnostic
1633 .related_information
1634 .as_ref()
1635 .map_or(false, |infos| {
1636 infos.iter().any(|info| {
1637 primary_diagnostic_group_ids.contains_key(&(
1638 source,
1639 code.clone(),
1640 range_from_lsp(info.location.range),
1641 ))
1642 })
1643 });
1644
1645 let is_unnecessary = diagnostic.tags.as_ref().map_or(false, |tags| {
1646 tags.iter().any(|tag| *tag == DiagnosticTag::UNNECESSARY)
1647 });
1648
1649 if is_supporting {
1650 supporting_diagnostics.insert(
1651 (source, code.clone(), range),
1652 (diagnostic.severity, is_unnecessary),
1653 );
1654 } else {
1655 let group_id = post_inc(&mut next_group_id);
1656 let is_disk_based =
1657 source.map_or(false, |source| disk_based_sources.contains(source));
1658
1659 sources_by_group_id.insert(group_id, source);
1660 primary_diagnostic_group_ids
1661 .insert((source, code.clone(), range.clone()), group_id);
1662
1663 diagnostics.push(DiagnosticEntry {
1664 range,
1665 diagnostic: Diagnostic {
1666 code: code.clone(),
1667 severity: diagnostic.severity.unwrap_or(DiagnosticSeverity::ERROR),
1668 message: diagnostic.message.clone(),
1669 group_id,
1670 is_primary: true,
1671 is_valid: true,
1672 is_disk_based,
1673 is_unnecessary,
1674 },
1675 });
1676 if let Some(infos) = &diagnostic.related_information {
1677 for info in infos {
1678 if info.location.uri == params.uri && !info.message.is_empty() {
1679 let range = range_from_lsp(info.location.range);
1680 diagnostics.push(DiagnosticEntry {
1681 range,
1682 diagnostic: Diagnostic {
1683 code: code.clone(),
1684 severity: DiagnosticSeverity::INFORMATION,
1685 message: info.message.clone(),
1686 group_id,
1687 is_primary: false,
1688 is_valid: true,
1689 is_disk_based,
1690 is_unnecessary: false,
1691 },
1692 });
1693 }
1694 }
1695 }
1696 }
1697 }
1698
1699 for entry in &mut diagnostics {
1700 let diagnostic = &mut entry.diagnostic;
1701 if !diagnostic.is_primary {
1702 let source = *sources_by_group_id.get(&diagnostic.group_id).unwrap();
1703 if let Some(&(severity, is_unnecessary)) = supporting_diagnostics.get(&(
1704 source,
1705 diagnostic.code.clone(),
1706 entry.range.clone(),
1707 )) {
1708 if let Some(severity) = severity {
1709 diagnostic.severity = severity;
1710 }
1711 diagnostic.is_unnecessary = is_unnecessary;
1712 }
1713 }
1714 }
1715
1716 self.update_diagnostic_entries(abs_path, params.version, diagnostics, cx)?;
1717 Ok(())
1718 }
1719
1720 pub fn update_diagnostic_entries(
1721 &mut self,
1722 abs_path: PathBuf,
1723 version: Option<i32>,
1724 diagnostics: Vec<DiagnosticEntry<PointUtf16>>,
1725 cx: &mut ModelContext<Project>,
1726 ) -> Result<(), anyhow::Error> {
1727 let (worktree, relative_path) = self
1728 .find_local_worktree(&abs_path, cx)
1729 .ok_or_else(|| anyhow!("no worktree found for diagnostics"))?;
1730 if !worktree.read(cx).is_visible() {
1731 return Ok(());
1732 }
1733
1734 let project_path = ProjectPath {
1735 worktree_id: worktree.read(cx).id(),
1736 path: relative_path.into(),
1737 };
1738
1739 for buffer in self.opened_buffers.values() {
1740 if let Some(buffer) = buffer.upgrade(cx) {
1741 if buffer
1742 .read(cx)
1743 .file()
1744 .map_or(false, |file| *file.path() == project_path.path)
1745 {
1746 self.update_buffer_diagnostics(&buffer, diagnostics.clone(), version, cx)?;
1747 break;
1748 }
1749 }
1750 }
1751 worktree.update(cx, |worktree, cx| {
1752 worktree
1753 .as_local_mut()
1754 .ok_or_else(|| anyhow!("not a local worktree"))?
1755 .update_diagnostics(project_path.path.clone(), diagnostics, cx)
1756 })?;
1757 cx.emit(Event::DiagnosticsUpdated(project_path));
1758 Ok(())
1759 }
1760
1761 fn update_buffer_diagnostics(
1762 &mut self,
1763 buffer: &ModelHandle<Buffer>,
1764 mut diagnostics: Vec<DiagnosticEntry<PointUtf16>>,
1765 version: Option<i32>,
1766 cx: &mut ModelContext<Self>,
1767 ) -> Result<()> {
1768 fn compare_diagnostics(a: &Diagnostic, b: &Diagnostic) -> Ordering {
1769 Ordering::Equal
1770 .then_with(|| b.is_primary.cmp(&a.is_primary))
1771 .then_with(|| a.is_disk_based.cmp(&b.is_disk_based))
1772 .then_with(|| a.severity.cmp(&b.severity))
1773 .then_with(|| a.message.cmp(&b.message))
1774 }
1775
1776 let snapshot = self.buffer_snapshot_for_lsp_version(buffer, version, cx)?;
1777
1778 diagnostics.sort_unstable_by(|a, b| {
1779 Ordering::Equal
1780 .then_with(|| a.range.start.cmp(&b.range.start))
1781 .then_with(|| b.range.end.cmp(&a.range.end))
1782 .then_with(|| compare_diagnostics(&a.diagnostic, &b.diagnostic))
1783 });
1784
1785 let mut sanitized_diagnostics = Vec::new();
1786 let mut edits_since_save = snapshot
1787 .edits_since::<PointUtf16>(buffer.read(cx).saved_version())
1788 .peekable();
1789 let mut last_edit_old_end = PointUtf16::zero();
1790 let mut last_edit_new_end = PointUtf16::zero();
1791 'outer: for entry in diagnostics {
1792 let mut start = entry.range.start;
1793 let mut end = entry.range.end;
1794
1795 // Some diagnostics are based on files on disk instead of buffers'
1796 // current contents. Adjust these diagnostics' ranges to reflect
1797 // any unsaved edits.
1798 if entry.diagnostic.is_disk_based {
1799 while let Some(edit) = edits_since_save.peek() {
1800 if edit.old.end <= start {
1801 last_edit_old_end = edit.old.end;
1802 last_edit_new_end = edit.new.end;
1803 edits_since_save.next();
1804 } else if edit.old.start <= end && edit.old.end >= start {
1805 continue 'outer;
1806 } else {
1807 break;
1808 }
1809 }
1810
1811 let start_overshoot = start - last_edit_old_end;
1812 start = last_edit_new_end;
1813 start += start_overshoot;
1814
1815 let end_overshoot = end - last_edit_old_end;
1816 end = last_edit_new_end;
1817 end += end_overshoot;
1818 }
1819
1820 let mut range = snapshot.clip_point_utf16(start, Bias::Left)
1821 ..snapshot.clip_point_utf16(end, Bias::Right);
1822
1823 // Expand empty ranges by one character
1824 if range.start == range.end {
1825 range.end.column += 1;
1826 range.end = snapshot.clip_point_utf16(range.end, Bias::Right);
1827 if range.start == range.end && range.end.column > 0 {
1828 range.start.column -= 1;
1829 range.start = snapshot.clip_point_utf16(range.start, Bias::Left);
1830 }
1831 }
1832
1833 sanitized_diagnostics.push(DiagnosticEntry {
1834 range,
1835 diagnostic: entry.diagnostic,
1836 });
1837 }
1838 drop(edits_since_save);
1839
1840 let set = DiagnosticSet::new(sanitized_diagnostics, &snapshot);
1841 buffer.update(cx, |buffer, cx| buffer.update_diagnostics(set, cx));
1842 Ok(())
1843 }
1844
1845 pub fn format(
1846 &self,
1847 buffers: HashSet<ModelHandle<Buffer>>,
1848 push_to_history: bool,
1849 cx: &mut ModelContext<Project>,
1850 ) -> Task<Result<ProjectTransaction>> {
1851 let mut local_buffers = Vec::new();
1852 let mut remote_buffers = None;
1853 for buffer_handle in buffers {
1854 let buffer = buffer_handle.read(cx);
1855 if let Some(file) = File::from_dyn(buffer.file()) {
1856 if let Some(buffer_abs_path) = file.as_local().map(|f| f.abs_path(cx)) {
1857 if let Some(server) = self.language_server_for_buffer(buffer, cx) {
1858 local_buffers.push((buffer_handle, buffer_abs_path, server.clone()));
1859 }
1860 } else {
1861 remote_buffers.get_or_insert(Vec::new()).push(buffer_handle);
1862 }
1863 } else {
1864 return Task::ready(Ok(Default::default()));
1865 }
1866 }
1867
1868 let remote_buffers = self.remote_id().zip(remote_buffers);
1869 let client = self.client.clone();
1870
1871 cx.spawn(|this, mut cx| async move {
1872 let mut project_transaction = ProjectTransaction::default();
1873
1874 if let Some((project_id, remote_buffers)) = remote_buffers {
1875 let response = client
1876 .request(proto::FormatBuffers {
1877 project_id,
1878 buffer_ids: remote_buffers
1879 .iter()
1880 .map(|buffer| buffer.read_with(&cx, |buffer, _| buffer.remote_id()))
1881 .collect(),
1882 })
1883 .await?
1884 .transaction
1885 .ok_or_else(|| anyhow!("missing transaction"))?;
1886 project_transaction = this
1887 .update(&mut cx, |this, cx| {
1888 this.deserialize_project_transaction(response, push_to_history, cx)
1889 })
1890 .await?;
1891 }
1892
1893 for (buffer, buffer_abs_path, language_server) in local_buffers {
1894 let text_document = lsp::TextDocumentIdentifier::new(
1895 lsp::Url::from_file_path(&buffer_abs_path).unwrap(),
1896 );
1897 let capabilities = &language_server.capabilities();
1898 let lsp_edits = if capabilities
1899 .document_formatting_provider
1900 .as_ref()
1901 .map_or(false, |provider| *provider != lsp::OneOf::Left(false))
1902 {
1903 language_server
1904 .request::<lsp::request::Formatting>(lsp::DocumentFormattingParams {
1905 text_document,
1906 options: Default::default(),
1907 work_done_progress_params: Default::default(),
1908 })
1909 .await?
1910 } else if capabilities
1911 .document_range_formatting_provider
1912 .as_ref()
1913 .map_or(false, |provider| *provider != lsp::OneOf::Left(false))
1914 {
1915 let buffer_start = lsp::Position::new(0, 0);
1916 let buffer_end = buffer
1917 .read_with(&cx, |buffer, _| buffer.max_point_utf16())
1918 .to_lsp_position();
1919 language_server
1920 .request::<lsp::request::RangeFormatting>(
1921 lsp::DocumentRangeFormattingParams {
1922 text_document,
1923 range: lsp::Range::new(buffer_start, buffer_end),
1924 options: Default::default(),
1925 work_done_progress_params: Default::default(),
1926 },
1927 )
1928 .await?
1929 } else {
1930 continue;
1931 };
1932
1933 if let Some(lsp_edits) = lsp_edits {
1934 let edits = this
1935 .update(&mut cx, |this, cx| {
1936 this.edits_from_lsp(&buffer, lsp_edits, None, cx)
1937 })
1938 .await?;
1939 buffer.update(&mut cx, |buffer, cx| {
1940 buffer.finalize_last_transaction();
1941 buffer.start_transaction();
1942 for (range, text) in edits {
1943 buffer.edit([range], text, cx);
1944 }
1945 if buffer.end_transaction(cx).is_some() {
1946 let transaction = buffer.finalize_last_transaction().unwrap().clone();
1947 if !push_to_history {
1948 buffer.forget_transaction(transaction.id);
1949 }
1950 project_transaction.0.insert(cx.handle(), transaction);
1951 }
1952 });
1953 }
1954 }
1955
1956 Ok(project_transaction)
1957 })
1958 }
1959
1960 pub fn definition<T: ToPointUtf16>(
1961 &self,
1962 buffer: &ModelHandle<Buffer>,
1963 position: T,
1964 cx: &mut ModelContext<Self>,
1965 ) -> Task<Result<Vec<Location>>> {
1966 let position = position.to_point_utf16(buffer.read(cx));
1967 self.request_lsp(buffer.clone(), GetDefinition { position }, cx)
1968 }
1969
1970 pub fn references<T: ToPointUtf16>(
1971 &self,
1972 buffer: &ModelHandle<Buffer>,
1973 position: T,
1974 cx: &mut ModelContext<Self>,
1975 ) -> Task<Result<Vec<Location>>> {
1976 let position = position.to_point_utf16(buffer.read(cx));
1977 self.request_lsp(buffer.clone(), GetReferences { position }, cx)
1978 }
1979
1980 pub fn document_highlights<T: ToPointUtf16>(
1981 &self,
1982 buffer: &ModelHandle<Buffer>,
1983 position: T,
1984 cx: &mut ModelContext<Self>,
1985 ) -> Task<Result<Vec<DocumentHighlight>>> {
1986 let position = position.to_point_utf16(buffer.read(cx));
1987
1988 self.request_lsp(buffer.clone(), GetDocumentHighlights { position }, cx)
1989 }
1990
1991 pub fn symbols(&self, query: &str, cx: &mut ModelContext<Self>) -> Task<Result<Vec<Symbol>>> {
1992 if self.is_local() {
1993 let mut language_servers = HashMap::default();
1994 for ((worktree_id, language_name), language_server) in self.language_servers.iter() {
1995 if let Some((worktree, language)) = self
1996 .worktree_for_id(*worktree_id, cx)
1997 .and_then(|worktree| worktree.read(cx).as_local())
1998 .zip(self.languages.get_language(language_name))
1999 {
2000 language_servers
2001 .entry(Arc::as_ptr(language_server))
2002 .or_insert((
2003 language_server.clone(),
2004 *worktree_id,
2005 worktree.abs_path().clone(),
2006 language.clone(),
2007 ));
2008 }
2009 }
2010
2011 let mut requests = Vec::new();
2012 for (language_server, _, _, _) in language_servers.values() {
2013 requests.push(language_server.request::<lsp::request::WorkspaceSymbol>(
2014 lsp::WorkspaceSymbolParams {
2015 query: query.to_string(),
2016 ..Default::default()
2017 },
2018 ));
2019 }
2020
2021 cx.spawn_weak(|this, cx| async move {
2022 let responses = futures::future::try_join_all(requests).await?;
2023
2024 let mut symbols = Vec::new();
2025 if let Some(this) = this.upgrade(&cx) {
2026 this.read_with(&cx, |this, cx| {
2027 for ((_, source_worktree_id, worktree_abs_path, language), lsp_symbols) in
2028 language_servers.into_values().zip(responses)
2029 {
2030 symbols.extend(lsp_symbols.into_iter().flatten().filter_map(
2031 |lsp_symbol| {
2032 let abs_path = lsp_symbol.location.uri.to_file_path().ok()?;
2033 let mut worktree_id = source_worktree_id;
2034 let path;
2035 if let Some((worktree, rel_path)) =
2036 this.find_local_worktree(&abs_path, cx)
2037 {
2038 worktree_id = worktree.read(cx).id();
2039 path = rel_path;
2040 } else {
2041 path = relativize_path(&worktree_abs_path, &abs_path);
2042 }
2043
2044 let label = language
2045 .label_for_symbol(&lsp_symbol.name, lsp_symbol.kind)
2046 .unwrap_or_else(|| {
2047 CodeLabel::plain(lsp_symbol.name.clone(), None)
2048 });
2049 let signature = this.symbol_signature(worktree_id, &path);
2050
2051 Some(Symbol {
2052 source_worktree_id,
2053 worktree_id,
2054 language_name: language.name().to_string(),
2055 name: lsp_symbol.name,
2056 kind: lsp_symbol.kind,
2057 label,
2058 path,
2059 range: range_from_lsp(lsp_symbol.location.range),
2060 signature,
2061 })
2062 },
2063 ));
2064 }
2065 })
2066 }
2067
2068 Ok(symbols)
2069 })
2070 } else if let Some(project_id) = self.remote_id() {
2071 let request = self.client.request(proto::GetProjectSymbols {
2072 project_id,
2073 query: query.to_string(),
2074 });
2075 cx.spawn_weak(|this, cx| async move {
2076 let response = request.await?;
2077 let mut symbols = Vec::new();
2078 if let Some(this) = this.upgrade(&cx) {
2079 this.read_with(&cx, |this, _| {
2080 symbols.extend(
2081 response
2082 .symbols
2083 .into_iter()
2084 .filter_map(|symbol| this.deserialize_symbol(symbol).log_err()),
2085 );
2086 })
2087 }
2088 Ok(symbols)
2089 })
2090 } else {
2091 Task::ready(Ok(Default::default()))
2092 }
2093 }
2094
2095 pub fn open_buffer_for_symbol(
2096 &mut self,
2097 symbol: &Symbol,
2098 cx: &mut ModelContext<Self>,
2099 ) -> Task<Result<ModelHandle<Buffer>>> {
2100 if self.is_local() {
2101 let language_server = if let Some(server) = self.language_servers.get(&(
2102 symbol.source_worktree_id,
2103 Arc::from(symbol.language_name.as_str()),
2104 )) {
2105 server.clone()
2106 } else {
2107 return Task::ready(Err(anyhow!(
2108 "language server for worktree and language not found"
2109 )));
2110 };
2111
2112 let worktree_abs_path = if let Some(worktree_abs_path) = self
2113 .worktree_for_id(symbol.worktree_id, cx)
2114 .and_then(|worktree| worktree.read(cx).as_local())
2115 .map(|local_worktree| local_worktree.abs_path())
2116 {
2117 worktree_abs_path
2118 } else {
2119 return Task::ready(Err(anyhow!("worktree not found for symbol")));
2120 };
2121 let symbol_abs_path = worktree_abs_path.join(&symbol.path);
2122 let symbol_uri = if let Ok(uri) = lsp::Url::from_file_path(symbol_abs_path) {
2123 uri
2124 } else {
2125 return Task::ready(Err(anyhow!("invalid symbol path")));
2126 };
2127
2128 self.open_local_buffer_via_lsp(
2129 symbol_uri,
2130 Arc::from(symbol.language_name.as_str()),
2131 language_server,
2132 cx,
2133 )
2134 } else if let Some(project_id) = self.remote_id() {
2135 let request = self.client.request(proto::OpenBufferForSymbol {
2136 project_id,
2137 symbol: Some(serialize_symbol(symbol)),
2138 });
2139 cx.spawn(|this, mut cx| async move {
2140 let response = request.await?;
2141 let buffer = response.buffer.ok_or_else(|| anyhow!("invalid buffer"))?;
2142 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
2143 .await
2144 })
2145 } else {
2146 Task::ready(Err(anyhow!("project does not have a remote id")))
2147 }
2148 }
2149
2150 pub fn completions<T: ToPointUtf16>(
2151 &self,
2152 source_buffer_handle: &ModelHandle<Buffer>,
2153 position: T,
2154 cx: &mut ModelContext<Self>,
2155 ) -> Task<Result<Vec<Completion>>> {
2156 let source_buffer_handle = source_buffer_handle.clone();
2157 let source_buffer = source_buffer_handle.read(cx);
2158 let buffer_id = source_buffer.remote_id();
2159 let language = source_buffer.language().cloned();
2160 let worktree;
2161 let buffer_abs_path;
2162 if let Some(file) = File::from_dyn(source_buffer.file()) {
2163 worktree = file.worktree.clone();
2164 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
2165 } else {
2166 return Task::ready(Ok(Default::default()));
2167 };
2168
2169 let position = position.to_point_utf16(source_buffer);
2170 let anchor = source_buffer.anchor_after(position);
2171
2172 if worktree.read(cx).as_local().is_some() {
2173 let buffer_abs_path = buffer_abs_path.unwrap();
2174 let lang_server =
2175 if let Some(server) = self.language_server_for_buffer(source_buffer, cx) {
2176 server.clone()
2177 } else {
2178 return Task::ready(Ok(Default::default()));
2179 };
2180
2181 cx.spawn(|_, cx| async move {
2182 let completions = lang_server
2183 .request::<lsp::request::Completion>(lsp::CompletionParams {
2184 text_document_position: lsp::TextDocumentPositionParams::new(
2185 lsp::TextDocumentIdentifier::new(
2186 lsp::Url::from_file_path(buffer_abs_path).unwrap(),
2187 ),
2188 position.to_lsp_position(),
2189 ),
2190 context: Default::default(),
2191 work_done_progress_params: Default::default(),
2192 partial_result_params: Default::default(),
2193 })
2194 .await
2195 .context("lsp completion request failed")?;
2196
2197 let completions = if let Some(completions) = completions {
2198 match completions {
2199 lsp::CompletionResponse::Array(completions) => completions,
2200 lsp::CompletionResponse::List(list) => list.items,
2201 }
2202 } else {
2203 Default::default()
2204 };
2205
2206 source_buffer_handle.read_with(&cx, |this, _| {
2207 Ok(completions
2208 .into_iter()
2209 .filter_map(|lsp_completion| {
2210 let (old_range, new_text) = match lsp_completion.text_edit.as_ref()? {
2211 lsp::CompletionTextEdit::Edit(edit) => {
2212 (range_from_lsp(edit.range), edit.new_text.clone())
2213 }
2214 lsp::CompletionTextEdit::InsertAndReplace(_) => {
2215 log::info!("unsupported insert/replace completion");
2216 return None;
2217 }
2218 };
2219
2220 let clipped_start = this.clip_point_utf16(old_range.start, Bias::Left);
2221 let clipped_end = this.clip_point_utf16(old_range.end, Bias::Left);
2222 if clipped_start == old_range.start && clipped_end == old_range.end {
2223 Some(Completion {
2224 old_range: this.anchor_before(old_range.start)
2225 ..this.anchor_after(old_range.end),
2226 new_text,
2227 label: language
2228 .as_ref()
2229 .and_then(|l| l.label_for_completion(&lsp_completion))
2230 .unwrap_or_else(|| {
2231 CodeLabel::plain(
2232 lsp_completion.label.clone(),
2233 lsp_completion.filter_text.as_deref(),
2234 )
2235 }),
2236 lsp_completion,
2237 })
2238 } else {
2239 None
2240 }
2241 })
2242 .collect())
2243 })
2244 })
2245 } else if let Some(project_id) = self.remote_id() {
2246 let rpc = self.client.clone();
2247 let message = proto::GetCompletions {
2248 project_id,
2249 buffer_id,
2250 position: Some(language::proto::serialize_anchor(&anchor)),
2251 version: serialize_version(&source_buffer.version()),
2252 };
2253 cx.spawn_weak(|_, mut cx| async move {
2254 let response = rpc.request(message).await?;
2255
2256 source_buffer_handle
2257 .update(&mut cx, |buffer, _| {
2258 buffer.wait_for_version(deserialize_version(response.version))
2259 })
2260 .await;
2261
2262 response
2263 .completions
2264 .into_iter()
2265 .map(|completion| {
2266 language::proto::deserialize_completion(completion, language.as_ref())
2267 })
2268 .collect()
2269 })
2270 } else {
2271 Task::ready(Ok(Default::default()))
2272 }
2273 }
2274
2275 pub fn apply_additional_edits_for_completion(
2276 &self,
2277 buffer_handle: ModelHandle<Buffer>,
2278 completion: Completion,
2279 push_to_history: bool,
2280 cx: &mut ModelContext<Self>,
2281 ) -> Task<Result<Option<Transaction>>> {
2282 let buffer = buffer_handle.read(cx);
2283 let buffer_id = buffer.remote_id();
2284
2285 if self.is_local() {
2286 let lang_server = if let Some(server) = self.language_server_for_buffer(buffer, cx) {
2287 server.clone()
2288 } else {
2289 return Task::ready(Ok(Default::default()));
2290 };
2291
2292 cx.spawn(|this, mut cx| async move {
2293 let resolved_completion = lang_server
2294 .request::<lsp::request::ResolveCompletionItem>(completion.lsp_completion)
2295 .await?;
2296 if let Some(edits) = resolved_completion.additional_text_edits {
2297 let edits = this
2298 .update(&mut cx, |this, cx| {
2299 this.edits_from_lsp(&buffer_handle, edits, None, cx)
2300 })
2301 .await?;
2302 buffer_handle.update(&mut cx, |buffer, cx| {
2303 buffer.finalize_last_transaction();
2304 buffer.start_transaction();
2305 for (range, text) in edits {
2306 buffer.edit([range], text, cx);
2307 }
2308 let transaction = if buffer.end_transaction(cx).is_some() {
2309 let transaction = buffer.finalize_last_transaction().unwrap().clone();
2310 if !push_to_history {
2311 buffer.forget_transaction(transaction.id);
2312 }
2313 Some(transaction)
2314 } else {
2315 None
2316 };
2317 Ok(transaction)
2318 })
2319 } else {
2320 Ok(None)
2321 }
2322 })
2323 } else if let Some(project_id) = self.remote_id() {
2324 let client = self.client.clone();
2325 cx.spawn(|_, mut cx| async move {
2326 let response = client
2327 .request(proto::ApplyCompletionAdditionalEdits {
2328 project_id,
2329 buffer_id,
2330 completion: Some(language::proto::serialize_completion(&completion)),
2331 })
2332 .await?;
2333
2334 if let Some(transaction) = response.transaction {
2335 let transaction = language::proto::deserialize_transaction(transaction)?;
2336 buffer_handle
2337 .update(&mut cx, |buffer, _| {
2338 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
2339 })
2340 .await;
2341 if push_to_history {
2342 buffer_handle.update(&mut cx, |buffer, _| {
2343 buffer.push_transaction(transaction.clone(), Instant::now());
2344 });
2345 }
2346 Ok(Some(transaction))
2347 } else {
2348 Ok(None)
2349 }
2350 })
2351 } else {
2352 Task::ready(Err(anyhow!("project does not have a remote id")))
2353 }
2354 }
2355
2356 pub fn code_actions<T: ToOffset>(
2357 &self,
2358 buffer_handle: &ModelHandle<Buffer>,
2359 range: Range<T>,
2360 cx: &mut ModelContext<Self>,
2361 ) -> Task<Result<Vec<CodeAction>>> {
2362 let buffer_handle = buffer_handle.clone();
2363 let buffer = buffer_handle.read(cx);
2364 let buffer_id = buffer.remote_id();
2365 let worktree;
2366 let buffer_abs_path;
2367 if let Some(file) = File::from_dyn(buffer.file()) {
2368 worktree = file.worktree.clone();
2369 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
2370 } else {
2371 return Task::ready(Ok(Default::default()));
2372 };
2373 let range = buffer.anchor_before(range.start)..buffer.anchor_before(range.end);
2374
2375 if worktree.read(cx).as_local().is_some() {
2376 let buffer_abs_path = buffer_abs_path.unwrap();
2377 let lang_server = if let Some(server) = self.language_server_for_buffer(buffer, cx) {
2378 server.clone()
2379 } else {
2380 return Task::ready(Ok(Default::default()));
2381 };
2382
2383 let lsp_range = lsp::Range::new(
2384 range.start.to_point_utf16(buffer).to_lsp_position(),
2385 range.end.to_point_utf16(buffer).to_lsp_position(),
2386 );
2387 cx.foreground().spawn(async move {
2388 if !lang_server.capabilities().code_action_provider.is_some() {
2389 return Ok(Default::default());
2390 }
2391
2392 Ok(lang_server
2393 .request::<lsp::request::CodeActionRequest>(lsp::CodeActionParams {
2394 text_document: lsp::TextDocumentIdentifier::new(
2395 lsp::Url::from_file_path(buffer_abs_path).unwrap(),
2396 ),
2397 range: lsp_range,
2398 work_done_progress_params: Default::default(),
2399 partial_result_params: Default::default(),
2400 context: lsp::CodeActionContext {
2401 diagnostics: Default::default(),
2402 only: Some(vec![
2403 lsp::CodeActionKind::QUICKFIX,
2404 lsp::CodeActionKind::REFACTOR,
2405 lsp::CodeActionKind::REFACTOR_EXTRACT,
2406 ]),
2407 },
2408 })
2409 .await?
2410 .unwrap_or_default()
2411 .into_iter()
2412 .filter_map(|entry| {
2413 if let lsp::CodeActionOrCommand::CodeAction(lsp_action) = entry {
2414 Some(CodeAction {
2415 range: range.clone(),
2416 lsp_action,
2417 })
2418 } else {
2419 None
2420 }
2421 })
2422 .collect())
2423 })
2424 } else if let Some(project_id) = self.remote_id() {
2425 let rpc = self.client.clone();
2426 let version = buffer.version();
2427 cx.spawn_weak(|_, mut cx| async move {
2428 let response = rpc
2429 .request(proto::GetCodeActions {
2430 project_id,
2431 buffer_id,
2432 start: Some(language::proto::serialize_anchor(&range.start)),
2433 end: Some(language::proto::serialize_anchor(&range.end)),
2434 version: serialize_version(&version),
2435 })
2436 .await?;
2437
2438 buffer_handle
2439 .update(&mut cx, |buffer, _| {
2440 buffer.wait_for_version(deserialize_version(response.version))
2441 })
2442 .await;
2443
2444 response
2445 .actions
2446 .into_iter()
2447 .map(language::proto::deserialize_code_action)
2448 .collect()
2449 })
2450 } else {
2451 Task::ready(Ok(Default::default()))
2452 }
2453 }
2454
2455 pub fn apply_code_action(
2456 &self,
2457 buffer_handle: ModelHandle<Buffer>,
2458 mut action: CodeAction,
2459 push_to_history: bool,
2460 cx: &mut ModelContext<Self>,
2461 ) -> Task<Result<ProjectTransaction>> {
2462 if self.is_local() {
2463 let buffer = buffer_handle.read(cx);
2464 let lang_name = if let Some(lang) = buffer.language() {
2465 lang.name()
2466 } else {
2467 return Task::ready(Ok(Default::default()));
2468 };
2469 let lang_server = if let Some(server) = self.language_server_for_buffer(buffer, cx) {
2470 server.clone()
2471 } else {
2472 return Task::ready(Ok(Default::default()));
2473 };
2474 let range = action.range.to_point_utf16(buffer);
2475
2476 cx.spawn(|this, mut cx| async move {
2477 if let Some(lsp_range) = action
2478 .lsp_action
2479 .data
2480 .as_mut()
2481 .and_then(|d| d.get_mut("codeActionParams"))
2482 .and_then(|d| d.get_mut("range"))
2483 {
2484 *lsp_range = serde_json::to_value(&lsp::Range::new(
2485 range.start.to_lsp_position(),
2486 range.end.to_lsp_position(),
2487 ))
2488 .unwrap();
2489 action.lsp_action = lang_server
2490 .request::<lsp::request::CodeActionResolveRequest>(action.lsp_action)
2491 .await?;
2492 } else {
2493 let actions = this
2494 .update(&mut cx, |this, cx| {
2495 this.code_actions(&buffer_handle, action.range, cx)
2496 })
2497 .await?;
2498 action.lsp_action = actions
2499 .into_iter()
2500 .find(|a| a.lsp_action.title == action.lsp_action.title)
2501 .ok_or_else(|| anyhow!("code action is outdated"))?
2502 .lsp_action;
2503 }
2504
2505 if let Some(edit) = action.lsp_action.edit {
2506 Self::deserialize_workspace_edit(
2507 this,
2508 edit,
2509 push_to_history,
2510 lang_name,
2511 lang_server,
2512 &mut cx,
2513 )
2514 .await
2515 } else {
2516 Ok(ProjectTransaction::default())
2517 }
2518 })
2519 } else if let Some(project_id) = self.remote_id() {
2520 let client = self.client.clone();
2521 let request = proto::ApplyCodeAction {
2522 project_id,
2523 buffer_id: buffer_handle.read(cx).remote_id(),
2524 action: Some(language::proto::serialize_code_action(&action)),
2525 };
2526 cx.spawn(|this, mut cx| async move {
2527 let response = client
2528 .request(request)
2529 .await?
2530 .transaction
2531 .ok_or_else(|| anyhow!("missing transaction"))?;
2532 this.update(&mut cx, |this, cx| {
2533 this.deserialize_project_transaction(response, push_to_history, cx)
2534 })
2535 .await
2536 })
2537 } else {
2538 Task::ready(Err(anyhow!("project does not have a remote id")))
2539 }
2540 }
2541
2542 async fn deserialize_workspace_edit(
2543 this: ModelHandle<Self>,
2544 edit: lsp::WorkspaceEdit,
2545 push_to_history: bool,
2546 language_name: Arc<str>,
2547 language_server: Arc<LanguageServer>,
2548 cx: &mut AsyncAppContext,
2549 ) -> Result<ProjectTransaction> {
2550 let fs = this.read_with(cx, |this, _| this.fs.clone());
2551 let mut operations = Vec::new();
2552 if let Some(document_changes) = edit.document_changes {
2553 match document_changes {
2554 lsp::DocumentChanges::Edits(edits) => {
2555 operations.extend(edits.into_iter().map(lsp::DocumentChangeOperation::Edit))
2556 }
2557 lsp::DocumentChanges::Operations(ops) => operations = ops,
2558 }
2559 } else if let Some(changes) = edit.changes {
2560 operations.extend(changes.into_iter().map(|(uri, edits)| {
2561 lsp::DocumentChangeOperation::Edit(lsp::TextDocumentEdit {
2562 text_document: lsp::OptionalVersionedTextDocumentIdentifier {
2563 uri,
2564 version: None,
2565 },
2566 edits: edits.into_iter().map(lsp::OneOf::Left).collect(),
2567 })
2568 }));
2569 }
2570
2571 let mut project_transaction = ProjectTransaction::default();
2572 for operation in operations {
2573 match operation {
2574 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Create(op)) => {
2575 let abs_path = op
2576 .uri
2577 .to_file_path()
2578 .map_err(|_| anyhow!("can't convert URI to path"))?;
2579
2580 if let Some(parent_path) = abs_path.parent() {
2581 fs.create_dir(parent_path).await?;
2582 }
2583 if abs_path.ends_with("/") {
2584 fs.create_dir(&abs_path).await?;
2585 } else {
2586 fs.create_file(&abs_path, op.options.map(Into::into).unwrap_or_default())
2587 .await?;
2588 }
2589 }
2590 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Rename(op)) => {
2591 let source_abs_path = op
2592 .old_uri
2593 .to_file_path()
2594 .map_err(|_| anyhow!("can't convert URI to path"))?;
2595 let target_abs_path = op
2596 .new_uri
2597 .to_file_path()
2598 .map_err(|_| anyhow!("can't convert URI to path"))?;
2599 fs.rename(
2600 &source_abs_path,
2601 &target_abs_path,
2602 op.options.map(Into::into).unwrap_or_default(),
2603 )
2604 .await?;
2605 }
2606 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Delete(op)) => {
2607 let abs_path = op
2608 .uri
2609 .to_file_path()
2610 .map_err(|_| anyhow!("can't convert URI to path"))?;
2611 let options = op.options.map(Into::into).unwrap_or_default();
2612 if abs_path.ends_with("/") {
2613 fs.remove_dir(&abs_path, options).await?;
2614 } else {
2615 fs.remove_file(&abs_path, options).await?;
2616 }
2617 }
2618 lsp::DocumentChangeOperation::Edit(op) => {
2619 let buffer_to_edit = this
2620 .update(cx, |this, cx| {
2621 this.open_local_buffer_via_lsp(
2622 op.text_document.uri,
2623 language_name.clone(),
2624 language_server.clone(),
2625 cx,
2626 )
2627 })
2628 .await?;
2629
2630 let edits = this
2631 .update(cx, |this, cx| {
2632 let edits = op.edits.into_iter().map(|edit| match edit {
2633 lsp::OneOf::Left(edit) => edit,
2634 lsp::OneOf::Right(edit) => edit.text_edit,
2635 });
2636 this.edits_from_lsp(
2637 &buffer_to_edit,
2638 edits,
2639 op.text_document.version,
2640 cx,
2641 )
2642 })
2643 .await?;
2644
2645 let transaction = buffer_to_edit.update(cx, |buffer, cx| {
2646 buffer.finalize_last_transaction();
2647 buffer.start_transaction();
2648 for (range, text) in edits {
2649 buffer.edit([range], text, cx);
2650 }
2651 let transaction = if buffer.end_transaction(cx).is_some() {
2652 let transaction = buffer.finalize_last_transaction().unwrap().clone();
2653 if !push_to_history {
2654 buffer.forget_transaction(transaction.id);
2655 }
2656 Some(transaction)
2657 } else {
2658 None
2659 };
2660
2661 transaction
2662 });
2663 if let Some(transaction) = transaction {
2664 project_transaction.0.insert(buffer_to_edit, transaction);
2665 }
2666 }
2667 }
2668 }
2669
2670 Ok(project_transaction)
2671 }
2672
2673 pub fn prepare_rename<T: ToPointUtf16>(
2674 &self,
2675 buffer: ModelHandle<Buffer>,
2676 position: T,
2677 cx: &mut ModelContext<Self>,
2678 ) -> Task<Result<Option<Range<Anchor>>>> {
2679 let position = position.to_point_utf16(buffer.read(cx));
2680 self.request_lsp(buffer, PrepareRename { position }, cx)
2681 }
2682
2683 pub fn perform_rename<T: ToPointUtf16>(
2684 &self,
2685 buffer: ModelHandle<Buffer>,
2686 position: T,
2687 new_name: String,
2688 push_to_history: bool,
2689 cx: &mut ModelContext<Self>,
2690 ) -> Task<Result<ProjectTransaction>> {
2691 let position = position.to_point_utf16(buffer.read(cx));
2692 self.request_lsp(
2693 buffer,
2694 PerformRename {
2695 position,
2696 new_name,
2697 push_to_history,
2698 },
2699 cx,
2700 )
2701 }
2702
2703 pub fn search(
2704 &self,
2705 query: SearchQuery,
2706 cx: &mut ModelContext<Self>,
2707 ) -> Task<Result<HashMap<ModelHandle<Buffer>, Vec<Range<Anchor>>>>> {
2708 if self.is_local() {
2709 let snapshots = self
2710 .visible_worktrees(cx)
2711 .filter_map(|tree| {
2712 let tree = tree.read(cx).as_local()?;
2713 Some(tree.snapshot())
2714 })
2715 .collect::<Vec<_>>();
2716
2717 let background = cx.background().clone();
2718 let path_count: usize = snapshots.iter().map(|s| s.visible_file_count()).sum();
2719 if path_count == 0 {
2720 return Task::ready(Ok(Default::default()));
2721 }
2722 let workers = background.num_cpus().min(path_count);
2723 let (matching_paths_tx, mut matching_paths_rx) = smol::channel::bounded(1024);
2724 cx.background()
2725 .spawn({
2726 let fs = self.fs.clone();
2727 let background = cx.background().clone();
2728 let query = query.clone();
2729 async move {
2730 let fs = &fs;
2731 let query = &query;
2732 let matching_paths_tx = &matching_paths_tx;
2733 let paths_per_worker = (path_count + workers - 1) / workers;
2734 let snapshots = &snapshots;
2735 background
2736 .scoped(|scope| {
2737 for worker_ix in 0..workers {
2738 let worker_start_ix = worker_ix * paths_per_worker;
2739 let worker_end_ix = worker_start_ix + paths_per_worker;
2740 scope.spawn(async move {
2741 let mut snapshot_start_ix = 0;
2742 let mut abs_path = PathBuf::new();
2743 for snapshot in snapshots {
2744 let snapshot_end_ix =
2745 snapshot_start_ix + snapshot.visible_file_count();
2746 if worker_end_ix <= snapshot_start_ix {
2747 break;
2748 } else if worker_start_ix > snapshot_end_ix {
2749 snapshot_start_ix = snapshot_end_ix;
2750 continue;
2751 } else {
2752 let start_in_snapshot = worker_start_ix
2753 .saturating_sub(snapshot_start_ix);
2754 let end_in_snapshot =
2755 cmp::min(worker_end_ix, snapshot_end_ix)
2756 - snapshot_start_ix;
2757
2758 for entry in snapshot
2759 .files(false, start_in_snapshot)
2760 .take(end_in_snapshot - start_in_snapshot)
2761 {
2762 if matching_paths_tx.is_closed() {
2763 break;
2764 }
2765
2766 abs_path.clear();
2767 abs_path.push(&snapshot.abs_path());
2768 abs_path.push(&entry.path);
2769 let matches = if let Some(file) =
2770 fs.open_sync(&abs_path).await.log_err()
2771 {
2772 query.detect(file).unwrap_or(false)
2773 } else {
2774 false
2775 };
2776
2777 if matches {
2778 let project_path =
2779 (snapshot.id(), entry.path.clone());
2780 if matching_paths_tx
2781 .send(project_path)
2782 .await
2783 .is_err()
2784 {
2785 break;
2786 }
2787 }
2788 }
2789
2790 snapshot_start_ix = snapshot_end_ix;
2791 }
2792 }
2793 });
2794 }
2795 })
2796 .await;
2797 }
2798 })
2799 .detach();
2800
2801 let (buffers_tx, buffers_rx) = smol::channel::bounded(1024);
2802 let open_buffers = self
2803 .opened_buffers
2804 .values()
2805 .filter_map(|b| b.upgrade(cx))
2806 .collect::<HashSet<_>>();
2807 cx.spawn(|this, cx| async move {
2808 for buffer in &open_buffers {
2809 let snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
2810 buffers_tx.send((buffer.clone(), snapshot)).await?;
2811 }
2812
2813 let open_buffers = Rc::new(RefCell::new(open_buffers));
2814 while let Some(project_path) = matching_paths_rx.next().await {
2815 if buffers_tx.is_closed() {
2816 break;
2817 }
2818
2819 let this = this.clone();
2820 let open_buffers = open_buffers.clone();
2821 let buffers_tx = buffers_tx.clone();
2822 cx.spawn(|mut cx| async move {
2823 if let Some(buffer) = this
2824 .update(&mut cx, |this, cx| this.open_buffer(project_path, cx))
2825 .await
2826 .log_err()
2827 {
2828 if open_buffers.borrow_mut().insert(buffer.clone()) {
2829 let snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
2830 buffers_tx.send((buffer, snapshot)).await?;
2831 }
2832 }
2833
2834 Ok::<_, anyhow::Error>(())
2835 })
2836 .detach();
2837 }
2838
2839 Ok::<_, anyhow::Error>(())
2840 })
2841 .detach_and_log_err(cx);
2842
2843 let background = cx.background().clone();
2844 cx.background().spawn(async move {
2845 let query = &query;
2846 let mut matched_buffers = Vec::new();
2847 for _ in 0..workers {
2848 matched_buffers.push(HashMap::default());
2849 }
2850 background
2851 .scoped(|scope| {
2852 for worker_matched_buffers in matched_buffers.iter_mut() {
2853 let mut buffers_rx = buffers_rx.clone();
2854 scope.spawn(async move {
2855 while let Some((buffer, snapshot)) = buffers_rx.next().await {
2856 let buffer_matches = query
2857 .search(snapshot.as_rope())
2858 .await
2859 .iter()
2860 .map(|range| {
2861 snapshot.anchor_before(range.start)
2862 ..snapshot.anchor_after(range.end)
2863 })
2864 .collect::<Vec<_>>();
2865 if !buffer_matches.is_empty() {
2866 worker_matched_buffers
2867 .insert(buffer.clone(), buffer_matches);
2868 }
2869 }
2870 });
2871 }
2872 })
2873 .await;
2874 Ok(matched_buffers.into_iter().flatten().collect())
2875 })
2876 } else if let Some(project_id) = self.remote_id() {
2877 let request = self.client.request(query.to_proto(project_id));
2878 cx.spawn(|this, mut cx| async move {
2879 let response = request.await?;
2880 let mut result = HashMap::default();
2881 for location in response.locations {
2882 let buffer = location.buffer.ok_or_else(|| anyhow!("missing buffer"))?;
2883 let target_buffer = this
2884 .update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
2885 .await?;
2886 let start = location
2887 .start
2888 .and_then(deserialize_anchor)
2889 .ok_or_else(|| anyhow!("missing target start"))?;
2890 let end = location
2891 .end
2892 .and_then(deserialize_anchor)
2893 .ok_or_else(|| anyhow!("missing target end"))?;
2894 result
2895 .entry(target_buffer)
2896 .or_insert(Vec::new())
2897 .push(start..end)
2898 }
2899 Ok(result)
2900 })
2901 } else {
2902 Task::ready(Ok(Default::default()))
2903 }
2904 }
2905
2906 fn request_lsp<R: LspCommand>(
2907 &self,
2908 buffer_handle: ModelHandle<Buffer>,
2909 request: R,
2910 cx: &mut ModelContext<Self>,
2911 ) -> Task<Result<R::Response>>
2912 where
2913 <R::LspRequest as lsp::request::Request>::Result: Send,
2914 {
2915 let buffer = buffer_handle.read(cx);
2916 if self.is_local() {
2917 let file = File::from_dyn(buffer.file()).and_then(File::as_local);
2918 if let Some((file, language_server)) =
2919 file.zip(self.language_server_for_buffer(buffer, cx).cloned())
2920 {
2921 let lsp_params = request.to_lsp(&file.abs_path(cx), cx);
2922 return cx.spawn(|this, cx| async move {
2923 if !request.check_capabilities(&language_server.capabilities()) {
2924 return Ok(Default::default());
2925 }
2926
2927 let response = language_server
2928 .request::<R::LspRequest>(lsp_params)
2929 .await
2930 .context("lsp request failed")?;
2931 request
2932 .response_from_lsp(response, this, buffer_handle, cx)
2933 .await
2934 });
2935 }
2936 } else if let Some(project_id) = self.remote_id() {
2937 let rpc = self.client.clone();
2938 let message = request.to_proto(project_id, buffer);
2939 return cx.spawn(|this, cx| async move {
2940 let response = rpc.request(message).await?;
2941 request
2942 .response_from_proto(response, this, buffer_handle, cx)
2943 .await
2944 });
2945 }
2946 Task::ready(Ok(Default::default()))
2947 }
2948
2949 pub fn find_or_create_local_worktree(
2950 &mut self,
2951 abs_path: impl AsRef<Path>,
2952 visible: bool,
2953 cx: &mut ModelContext<Self>,
2954 ) -> Task<Result<(ModelHandle<Worktree>, PathBuf)>> {
2955 let abs_path = abs_path.as_ref();
2956 if let Some((tree, relative_path)) = self.find_local_worktree(abs_path, cx) {
2957 Task::ready(Ok((tree.clone(), relative_path.into())))
2958 } else {
2959 let worktree = self.create_local_worktree(abs_path, visible, cx);
2960 cx.foreground()
2961 .spawn(async move { Ok((worktree.await?, PathBuf::new())) })
2962 }
2963 }
2964
2965 pub fn find_local_worktree(
2966 &self,
2967 abs_path: &Path,
2968 cx: &AppContext,
2969 ) -> Option<(ModelHandle<Worktree>, PathBuf)> {
2970 for tree in self.worktrees(cx) {
2971 if let Some(relative_path) = tree
2972 .read(cx)
2973 .as_local()
2974 .and_then(|t| abs_path.strip_prefix(t.abs_path()).ok())
2975 {
2976 return Some((tree.clone(), relative_path.into()));
2977 }
2978 }
2979 None
2980 }
2981
2982 pub fn is_shared(&self) -> bool {
2983 match &self.client_state {
2984 ProjectClientState::Local { is_shared, .. } => *is_shared,
2985 ProjectClientState::Remote { .. } => false,
2986 }
2987 }
2988
2989 fn create_local_worktree(
2990 &mut self,
2991 abs_path: impl AsRef<Path>,
2992 visible: bool,
2993 cx: &mut ModelContext<Self>,
2994 ) -> Task<Result<ModelHandle<Worktree>>> {
2995 let fs = self.fs.clone();
2996 let client = self.client.clone();
2997 let next_entry_id = self.next_entry_id.clone();
2998 let path: Arc<Path> = abs_path.as_ref().into();
2999 let task = self
3000 .loading_local_worktrees
3001 .entry(path.clone())
3002 .or_insert_with(|| {
3003 cx.spawn(|project, mut cx| {
3004 async move {
3005 let worktree = Worktree::local(
3006 client.clone(),
3007 path.clone(),
3008 visible,
3009 fs,
3010 next_entry_id,
3011 &mut cx,
3012 )
3013 .await;
3014 project.update(&mut cx, |project, _| {
3015 project.loading_local_worktrees.remove(&path);
3016 });
3017 let worktree = worktree?;
3018
3019 let (remote_project_id, is_shared) =
3020 project.update(&mut cx, |project, cx| {
3021 project.add_worktree(&worktree, cx);
3022 (project.remote_id(), project.is_shared())
3023 });
3024
3025 if let Some(project_id) = remote_project_id {
3026 if is_shared {
3027 worktree
3028 .update(&mut cx, |worktree, cx| {
3029 worktree.as_local_mut().unwrap().share(project_id, cx)
3030 })
3031 .await?;
3032 } else {
3033 worktree
3034 .update(&mut cx, |worktree, cx| {
3035 worktree.as_local_mut().unwrap().register(project_id, cx)
3036 })
3037 .await?;
3038 }
3039 }
3040
3041 Ok(worktree)
3042 }
3043 .map_err(|err| Arc::new(err))
3044 })
3045 .shared()
3046 })
3047 .clone();
3048 cx.foreground().spawn(async move {
3049 match task.await {
3050 Ok(worktree) => Ok(worktree),
3051 Err(err) => Err(anyhow!("{}", err)),
3052 }
3053 })
3054 }
3055
3056 pub fn remove_worktree(&mut self, id: WorktreeId, cx: &mut ModelContext<Self>) {
3057 self.worktrees.retain(|worktree| {
3058 worktree
3059 .upgrade(cx)
3060 .map_or(false, |w| w.read(cx).id() != id)
3061 });
3062 cx.notify();
3063 }
3064
3065 fn add_worktree(&mut self, worktree: &ModelHandle<Worktree>, cx: &mut ModelContext<Self>) {
3066 cx.observe(&worktree, |_, _, cx| cx.notify()).detach();
3067 if worktree.read(cx).is_local() {
3068 cx.subscribe(&worktree, |this, worktree, _, cx| {
3069 this.update_local_worktree_buffers(worktree, cx);
3070 })
3071 .detach();
3072 }
3073
3074 let push_strong_handle = {
3075 let worktree = worktree.read(cx);
3076 self.is_shared() || worktree.is_visible() || worktree.is_remote()
3077 };
3078 if push_strong_handle {
3079 self.worktrees
3080 .push(WorktreeHandle::Strong(worktree.clone()));
3081 } else {
3082 cx.observe_release(&worktree, |this, _, cx| {
3083 this.worktrees
3084 .retain(|worktree| worktree.upgrade(cx).is_some());
3085 cx.notify();
3086 })
3087 .detach();
3088 self.worktrees
3089 .push(WorktreeHandle::Weak(worktree.downgrade()));
3090 }
3091 cx.notify();
3092 }
3093
3094 fn update_local_worktree_buffers(
3095 &mut self,
3096 worktree_handle: ModelHandle<Worktree>,
3097 cx: &mut ModelContext<Self>,
3098 ) {
3099 let snapshot = worktree_handle.read(cx).snapshot();
3100 let mut buffers_to_delete = Vec::new();
3101 for (buffer_id, buffer) in &self.opened_buffers {
3102 if let Some(buffer) = buffer.upgrade(cx) {
3103 buffer.update(cx, |buffer, cx| {
3104 if let Some(old_file) = File::from_dyn(buffer.file()) {
3105 if old_file.worktree != worktree_handle {
3106 return;
3107 }
3108
3109 let new_file = if let Some(entry) = old_file
3110 .entry_id
3111 .and_then(|entry_id| snapshot.entry_for_id(entry_id))
3112 {
3113 File {
3114 is_local: true,
3115 entry_id: Some(entry.id),
3116 mtime: entry.mtime,
3117 path: entry.path.clone(),
3118 worktree: worktree_handle.clone(),
3119 }
3120 } else if let Some(entry) =
3121 snapshot.entry_for_path(old_file.path().as_ref())
3122 {
3123 File {
3124 is_local: true,
3125 entry_id: Some(entry.id),
3126 mtime: entry.mtime,
3127 path: entry.path.clone(),
3128 worktree: worktree_handle.clone(),
3129 }
3130 } else {
3131 File {
3132 is_local: true,
3133 entry_id: None,
3134 path: old_file.path().clone(),
3135 mtime: old_file.mtime(),
3136 worktree: worktree_handle.clone(),
3137 }
3138 };
3139
3140 if let Some(project_id) = self.remote_id() {
3141 self.client
3142 .send(proto::UpdateBufferFile {
3143 project_id,
3144 buffer_id: *buffer_id as u64,
3145 file: Some(new_file.to_proto()),
3146 })
3147 .log_err();
3148 }
3149 buffer.file_updated(Box::new(new_file), cx).detach();
3150 }
3151 });
3152 } else {
3153 buffers_to_delete.push(*buffer_id);
3154 }
3155 }
3156
3157 for buffer_id in buffers_to_delete {
3158 self.opened_buffers.remove(&buffer_id);
3159 }
3160 }
3161
3162 pub fn set_active_path(&mut self, entry: Option<ProjectPath>, cx: &mut ModelContext<Self>) {
3163 let new_active_entry = entry.and_then(|project_path| {
3164 let worktree = self.worktree_for_id(project_path.worktree_id, cx)?;
3165 let entry = worktree.read(cx).entry_for_path(project_path.path)?;
3166 Some(ProjectEntry {
3167 worktree_id: project_path.worktree_id,
3168 entry_id: entry.id,
3169 })
3170 });
3171 if new_active_entry != self.active_entry {
3172 self.active_entry = new_active_entry;
3173 cx.emit(Event::ActiveEntryChanged(new_active_entry));
3174 }
3175 }
3176
3177 pub fn is_running_disk_based_diagnostics(&self) -> bool {
3178 self.language_servers_with_diagnostics_running > 0
3179 }
3180
3181 pub fn diagnostic_summary(&self, cx: &AppContext) -> DiagnosticSummary {
3182 let mut summary = DiagnosticSummary::default();
3183 for (_, path_summary) in self.diagnostic_summaries(cx) {
3184 summary.error_count += path_summary.error_count;
3185 summary.warning_count += path_summary.warning_count;
3186 summary.info_count += path_summary.info_count;
3187 summary.hint_count += path_summary.hint_count;
3188 }
3189 summary
3190 }
3191
3192 pub fn diagnostic_summaries<'a>(
3193 &'a self,
3194 cx: &'a AppContext,
3195 ) -> impl Iterator<Item = (ProjectPath, DiagnosticSummary)> + 'a {
3196 self.worktrees(cx).flat_map(move |worktree| {
3197 let worktree = worktree.read(cx);
3198 let worktree_id = worktree.id();
3199 worktree
3200 .diagnostic_summaries()
3201 .map(move |(path, summary)| (ProjectPath { worktree_id, path }, summary))
3202 })
3203 }
3204
3205 pub fn disk_based_diagnostics_started(&mut self, cx: &mut ModelContext<Self>) {
3206 self.language_servers_with_diagnostics_running += 1;
3207 if self.language_servers_with_diagnostics_running == 1 {
3208 cx.emit(Event::DiskBasedDiagnosticsStarted);
3209 }
3210 }
3211
3212 pub fn disk_based_diagnostics_finished(&mut self, cx: &mut ModelContext<Self>) {
3213 cx.emit(Event::DiskBasedDiagnosticsUpdated);
3214 self.language_servers_with_diagnostics_running -= 1;
3215 if self.language_servers_with_diagnostics_running == 0 {
3216 cx.emit(Event::DiskBasedDiagnosticsFinished);
3217 }
3218 }
3219
3220 pub fn active_entry(&self) -> Option<ProjectEntry> {
3221 self.active_entry
3222 }
3223
3224 pub fn entry_for_path(&self, path: &ProjectPath, cx: &AppContext) -> Option<ProjectEntry> {
3225 self.worktree_for_id(path.worktree_id, cx)?
3226 .read(cx)
3227 .entry_for_path(&path.path)
3228 .map(|entry| ProjectEntry {
3229 worktree_id: path.worktree_id,
3230 entry_id: entry.id,
3231 })
3232 }
3233
3234 // RPC message handlers
3235
3236 async fn handle_unshare_project(
3237 this: ModelHandle<Self>,
3238 _: TypedEnvelope<proto::UnshareProject>,
3239 _: Arc<Client>,
3240 mut cx: AsyncAppContext,
3241 ) -> Result<()> {
3242 this.update(&mut cx, |this, cx| this.project_unshared(cx));
3243 Ok(())
3244 }
3245
3246 async fn handle_add_collaborator(
3247 this: ModelHandle<Self>,
3248 mut envelope: TypedEnvelope<proto::AddProjectCollaborator>,
3249 _: Arc<Client>,
3250 mut cx: AsyncAppContext,
3251 ) -> Result<()> {
3252 let user_store = this.read_with(&cx, |this, _| this.user_store.clone());
3253 let collaborator = envelope
3254 .payload
3255 .collaborator
3256 .take()
3257 .ok_or_else(|| anyhow!("empty collaborator"))?;
3258
3259 let collaborator = Collaborator::from_proto(collaborator, &user_store, &mut cx).await?;
3260 this.update(&mut cx, |this, cx| {
3261 this.collaborators
3262 .insert(collaborator.peer_id, collaborator);
3263 cx.notify();
3264 });
3265
3266 Ok(())
3267 }
3268
3269 async fn handle_remove_collaborator(
3270 this: ModelHandle<Self>,
3271 envelope: TypedEnvelope<proto::RemoveProjectCollaborator>,
3272 _: Arc<Client>,
3273 mut cx: AsyncAppContext,
3274 ) -> Result<()> {
3275 this.update(&mut cx, |this, cx| {
3276 let peer_id = PeerId(envelope.payload.peer_id);
3277 let replica_id = this
3278 .collaborators
3279 .remove(&peer_id)
3280 .ok_or_else(|| anyhow!("unknown peer {:?}", peer_id))?
3281 .replica_id;
3282 for (_, buffer) in &this.opened_buffers {
3283 if let Some(buffer) = buffer.upgrade(cx) {
3284 buffer.update(cx, |buffer, cx| buffer.remove_peer(replica_id, cx));
3285 }
3286 }
3287 cx.notify();
3288 Ok(())
3289 })
3290 }
3291
3292 async fn handle_register_worktree(
3293 this: ModelHandle<Self>,
3294 envelope: TypedEnvelope<proto::RegisterWorktree>,
3295 client: Arc<Client>,
3296 mut cx: AsyncAppContext,
3297 ) -> Result<()> {
3298 this.update(&mut cx, |this, cx| {
3299 let remote_id = this.remote_id().ok_or_else(|| anyhow!("invalid project"))?;
3300 let replica_id = this.replica_id();
3301 let worktree = proto::Worktree {
3302 id: envelope.payload.worktree_id,
3303 root_name: envelope.payload.root_name,
3304 entries: Default::default(),
3305 diagnostic_summaries: Default::default(),
3306 visible: envelope.payload.visible,
3307 };
3308 let (worktree, load_task) =
3309 Worktree::remote(remote_id, replica_id, worktree, client, cx);
3310 this.add_worktree(&worktree, cx);
3311 load_task.detach();
3312 Ok(())
3313 })
3314 }
3315
3316 async fn handle_unregister_worktree(
3317 this: ModelHandle<Self>,
3318 envelope: TypedEnvelope<proto::UnregisterWorktree>,
3319 _: Arc<Client>,
3320 mut cx: AsyncAppContext,
3321 ) -> Result<()> {
3322 this.update(&mut cx, |this, cx| {
3323 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
3324 this.remove_worktree(worktree_id, cx);
3325 Ok(())
3326 })
3327 }
3328
3329 async fn handle_update_worktree(
3330 this: ModelHandle<Self>,
3331 envelope: TypedEnvelope<proto::UpdateWorktree>,
3332 _: Arc<Client>,
3333 mut cx: AsyncAppContext,
3334 ) -> Result<()> {
3335 this.update(&mut cx, |this, cx| {
3336 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
3337 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
3338 worktree.update(cx, |worktree, _| {
3339 let worktree = worktree.as_remote_mut().unwrap();
3340 worktree.update_from_remote(envelope)
3341 })?;
3342 }
3343 Ok(())
3344 })
3345 }
3346
3347 async fn handle_update_diagnostic_summary(
3348 this: ModelHandle<Self>,
3349 envelope: TypedEnvelope<proto::UpdateDiagnosticSummary>,
3350 _: Arc<Client>,
3351 mut cx: AsyncAppContext,
3352 ) -> Result<()> {
3353 this.update(&mut cx, |this, cx| {
3354 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
3355 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
3356 if let Some(summary) = envelope.payload.summary {
3357 let project_path = ProjectPath {
3358 worktree_id,
3359 path: Path::new(&summary.path).into(),
3360 };
3361 worktree.update(cx, |worktree, _| {
3362 worktree
3363 .as_remote_mut()
3364 .unwrap()
3365 .update_diagnostic_summary(project_path.path.clone(), &summary);
3366 });
3367 cx.emit(Event::DiagnosticsUpdated(project_path));
3368 }
3369 }
3370 Ok(())
3371 })
3372 }
3373
3374 async fn handle_start_language_server(
3375 this: ModelHandle<Self>,
3376 envelope: TypedEnvelope<proto::StartLanguageServer>,
3377 _: Arc<Client>,
3378 mut cx: AsyncAppContext,
3379 ) -> Result<()> {
3380 let server = envelope
3381 .payload
3382 .server
3383 .ok_or_else(|| anyhow!("invalid server"))?;
3384 this.update(&mut cx, |this, cx| {
3385 this.language_server_statuses.insert(
3386 server.id as usize,
3387 LanguageServerStatus {
3388 name: server.name,
3389 pending_work: Default::default(),
3390 pending_diagnostic_updates: 0,
3391 },
3392 );
3393 cx.notify();
3394 });
3395 Ok(())
3396 }
3397
3398 async fn handle_update_language_server(
3399 this: ModelHandle<Self>,
3400 envelope: TypedEnvelope<proto::UpdateLanguageServer>,
3401 _: Arc<Client>,
3402 mut cx: AsyncAppContext,
3403 ) -> Result<()> {
3404 let language_server_id = envelope.payload.language_server_id as usize;
3405 match envelope
3406 .payload
3407 .variant
3408 .ok_or_else(|| anyhow!("invalid variant"))?
3409 {
3410 proto::update_language_server::Variant::WorkStart(payload) => {
3411 this.update(&mut cx, |this, cx| {
3412 this.on_lsp_work_start(language_server_id, payload.token, cx);
3413 })
3414 }
3415 proto::update_language_server::Variant::WorkProgress(payload) => {
3416 this.update(&mut cx, |this, cx| {
3417 this.on_lsp_work_progress(
3418 language_server_id,
3419 payload.token,
3420 LanguageServerProgress {
3421 message: payload.message,
3422 percentage: payload.percentage.map(|p| p as usize),
3423 last_update_at: Instant::now(),
3424 },
3425 cx,
3426 );
3427 })
3428 }
3429 proto::update_language_server::Variant::WorkEnd(payload) => {
3430 this.update(&mut cx, |this, cx| {
3431 this.on_lsp_work_end(language_server_id, payload.token, cx);
3432 })
3433 }
3434 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(_) => {
3435 this.update(&mut cx, |this, cx| {
3436 this.disk_based_diagnostics_started(cx);
3437 })
3438 }
3439 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(_) => {
3440 this.update(&mut cx, |this, cx| this.disk_based_diagnostics_finished(cx));
3441 }
3442 }
3443
3444 Ok(())
3445 }
3446
3447 async fn handle_update_buffer(
3448 this: ModelHandle<Self>,
3449 envelope: TypedEnvelope<proto::UpdateBuffer>,
3450 _: Arc<Client>,
3451 mut cx: AsyncAppContext,
3452 ) -> Result<()> {
3453 this.update(&mut cx, |this, cx| {
3454 let payload = envelope.payload.clone();
3455 let buffer_id = payload.buffer_id;
3456 let ops = payload
3457 .operations
3458 .into_iter()
3459 .map(|op| language::proto::deserialize_operation(op))
3460 .collect::<Result<Vec<_>, _>>()?;
3461 match this.opened_buffers.entry(buffer_id) {
3462 hash_map::Entry::Occupied(mut e) => match e.get_mut() {
3463 OpenBuffer::Strong(buffer) => {
3464 buffer.update(cx, |buffer, cx| buffer.apply_ops(ops, cx))?;
3465 }
3466 OpenBuffer::Loading(operations) => operations.extend_from_slice(&ops),
3467 OpenBuffer::Weak(_) => {}
3468 },
3469 hash_map::Entry::Vacant(e) => {
3470 e.insert(OpenBuffer::Loading(ops));
3471 }
3472 }
3473 Ok(())
3474 })
3475 }
3476
3477 async fn handle_update_buffer_file(
3478 this: ModelHandle<Self>,
3479 envelope: TypedEnvelope<proto::UpdateBufferFile>,
3480 _: Arc<Client>,
3481 mut cx: AsyncAppContext,
3482 ) -> Result<()> {
3483 this.update(&mut cx, |this, cx| {
3484 let payload = envelope.payload.clone();
3485 let buffer_id = payload.buffer_id;
3486 let file = payload.file.ok_or_else(|| anyhow!("invalid file"))?;
3487 let worktree = this
3488 .worktree_for_id(WorktreeId::from_proto(file.worktree_id), cx)
3489 .ok_or_else(|| anyhow!("no such worktree"))?;
3490 let file = File::from_proto(file, worktree.clone(), cx)?;
3491 let buffer = this
3492 .opened_buffers
3493 .get_mut(&buffer_id)
3494 .and_then(|b| b.upgrade(cx))
3495 .ok_or_else(|| anyhow!("no such buffer"))?;
3496 buffer.update(cx, |buffer, cx| {
3497 buffer.file_updated(Box::new(file), cx).detach();
3498 });
3499 Ok(())
3500 })
3501 }
3502
3503 async fn handle_save_buffer(
3504 this: ModelHandle<Self>,
3505 envelope: TypedEnvelope<proto::SaveBuffer>,
3506 _: Arc<Client>,
3507 mut cx: AsyncAppContext,
3508 ) -> Result<proto::BufferSaved> {
3509 let buffer_id = envelope.payload.buffer_id;
3510 let requested_version = deserialize_version(envelope.payload.version);
3511
3512 let (project_id, buffer) = this.update(&mut cx, |this, cx| {
3513 let project_id = this.remote_id().ok_or_else(|| anyhow!("not connected"))?;
3514 let buffer = this
3515 .opened_buffers
3516 .get(&buffer_id)
3517 .map(|buffer| buffer.upgrade(cx).unwrap())
3518 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?;
3519 Ok::<_, anyhow::Error>((project_id, buffer))
3520 })?;
3521 buffer
3522 .update(&mut cx, |buffer, _| {
3523 buffer.wait_for_version(requested_version)
3524 })
3525 .await;
3526
3527 let (saved_version, mtime) = buffer.update(&mut cx, |buffer, cx| buffer.save(cx)).await?;
3528 Ok(proto::BufferSaved {
3529 project_id,
3530 buffer_id,
3531 version: serialize_version(&saved_version),
3532 mtime: Some(mtime.into()),
3533 })
3534 }
3535
3536 async fn handle_format_buffers(
3537 this: ModelHandle<Self>,
3538 envelope: TypedEnvelope<proto::FormatBuffers>,
3539 _: Arc<Client>,
3540 mut cx: AsyncAppContext,
3541 ) -> Result<proto::FormatBuffersResponse> {
3542 let sender_id = envelope.original_sender_id()?;
3543 let format = this.update(&mut cx, |this, cx| {
3544 let mut buffers = HashSet::default();
3545 for buffer_id in &envelope.payload.buffer_ids {
3546 buffers.insert(
3547 this.opened_buffers
3548 .get(buffer_id)
3549 .map(|buffer| buffer.upgrade(cx).unwrap())
3550 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?,
3551 );
3552 }
3553 Ok::<_, anyhow::Error>(this.format(buffers, false, cx))
3554 })?;
3555
3556 let project_transaction = format.await?;
3557 let project_transaction = this.update(&mut cx, |this, cx| {
3558 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
3559 });
3560 Ok(proto::FormatBuffersResponse {
3561 transaction: Some(project_transaction),
3562 })
3563 }
3564
3565 async fn handle_get_completions(
3566 this: ModelHandle<Self>,
3567 envelope: TypedEnvelope<proto::GetCompletions>,
3568 _: Arc<Client>,
3569 mut cx: AsyncAppContext,
3570 ) -> Result<proto::GetCompletionsResponse> {
3571 let position = envelope
3572 .payload
3573 .position
3574 .and_then(language::proto::deserialize_anchor)
3575 .ok_or_else(|| anyhow!("invalid position"))?;
3576 let version = deserialize_version(envelope.payload.version);
3577 let buffer = this.read_with(&cx, |this, cx| {
3578 this.opened_buffers
3579 .get(&envelope.payload.buffer_id)
3580 .map(|buffer| buffer.upgrade(cx).unwrap())
3581 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
3582 })?;
3583 buffer
3584 .update(&mut cx, |buffer, _| buffer.wait_for_version(version))
3585 .await;
3586 let version = buffer.read_with(&cx, |buffer, _| buffer.version());
3587 let completions = this
3588 .update(&mut cx, |this, cx| this.completions(&buffer, position, cx))
3589 .await?;
3590
3591 Ok(proto::GetCompletionsResponse {
3592 completions: completions
3593 .iter()
3594 .map(language::proto::serialize_completion)
3595 .collect(),
3596 version: serialize_version(&version),
3597 })
3598 }
3599
3600 async fn handle_apply_additional_edits_for_completion(
3601 this: ModelHandle<Self>,
3602 envelope: TypedEnvelope<proto::ApplyCompletionAdditionalEdits>,
3603 _: Arc<Client>,
3604 mut cx: AsyncAppContext,
3605 ) -> Result<proto::ApplyCompletionAdditionalEditsResponse> {
3606 let apply_additional_edits = this.update(&mut cx, |this, cx| {
3607 let buffer = this
3608 .opened_buffers
3609 .get(&envelope.payload.buffer_id)
3610 .map(|buffer| buffer.upgrade(cx).unwrap())
3611 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
3612 let language = buffer.read(cx).language();
3613 let completion = language::proto::deserialize_completion(
3614 envelope
3615 .payload
3616 .completion
3617 .ok_or_else(|| anyhow!("invalid completion"))?,
3618 language,
3619 )?;
3620 Ok::<_, anyhow::Error>(
3621 this.apply_additional_edits_for_completion(buffer, completion, false, cx),
3622 )
3623 })?;
3624
3625 Ok(proto::ApplyCompletionAdditionalEditsResponse {
3626 transaction: apply_additional_edits
3627 .await?
3628 .as_ref()
3629 .map(language::proto::serialize_transaction),
3630 })
3631 }
3632
3633 async fn handle_get_code_actions(
3634 this: ModelHandle<Self>,
3635 envelope: TypedEnvelope<proto::GetCodeActions>,
3636 _: Arc<Client>,
3637 mut cx: AsyncAppContext,
3638 ) -> Result<proto::GetCodeActionsResponse> {
3639 let start = envelope
3640 .payload
3641 .start
3642 .and_then(language::proto::deserialize_anchor)
3643 .ok_or_else(|| anyhow!("invalid start"))?;
3644 let end = envelope
3645 .payload
3646 .end
3647 .and_then(language::proto::deserialize_anchor)
3648 .ok_or_else(|| anyhow!("invalid end"))?;
3649 let buffer = this.update(&mut cx, |this, cx| {
3650 this.opened_buffers
3651 .get(&envelope.payload.buffer_id)
3652 .map(|buffer| buffer.upgrade(cx).unwrap())
3653 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
3654 })?;
3655 buffer
3656 .update(&mut cx, |buffer, _| {
3657 buffer.wait_for_version(deserialize_version(envelope.payload.version))
3658 })
3659 .await;
3660
3661 let version = buffer.read_with(&cx, |buffer, _| buffer.version());
3662 let code_actions = this.update(&mut cx, |this, cx| {
3663 Ok::<_, anyhow::Error>(this.code_actions(&buffer, start..end, cx))
3664 })?;
3665
3666 Ok(proto::GetCodeActionsResponse {
3667 actions: code_actions
3668 .await?
3669 .iter()
3670 .map(language::proto::serialize_code_action)
3671 .collect(),
3672 version: serialize_version(&version),
3673 })
3674 }
3675
3676 async fn handle_apply_code_action(
3677 this: ModelHandle<Self>,
3678 envelope: TypedEnvelope<proto::ApplyCodeAction>,
3679 _: Arc<Client>,
3680 mut cx: AsyncAppContext,
3681 ) -> Result<proto::ApplyCodeActionResponse> {
3682 let sender_id = envelope.original_sender_id()?;
3683 let action = language::proto::deserialize_code_action(
3684 envelope
3685 .payload
3686 .action
3687 .ok_or_else(|| anyhow!("invalid action"))?,
3688 )?;
3689 let apply_code_action = this.update(&mut cx, |this, cx| {
3690 let buffer = this
3691 .opened_buffers
3692 .get(&envelope.payload.buffer_id)
3693 .map(|buffer| buffer.upgrade(cx).unwrap())
3694 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
3695 Ok::<_, anyhow::Error>(this.apply_code_action(buffer, action, false, cx))
3696 })?;
3697
3698 let project_transaction = apply_code_action.await?;
3699 let project_transaction = this.update(&mut cx, |this, cx| {
3700 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
3701 });
3702 Ok(proto::ApplyCodeActionResponse {
3703 transaction: Some(project_transaction),
3704 })
3705 }
3706
3707 async fn handle_lsp_command<T: LspCommand>(
3708 this: ModelHandle<Self>,
3709 envelope: TypedEnvelope<T::ProtoRequest>,
3710 _: Arc<Client>,
3711 mut cx: AsyncAppContext,
3712 ) -> Result<<T::ProtoRequest as proto::RequestMessage>::Response>
3713 where
3714 <T::LspRequest as lsp::request::Request>::Result: Send,
3715 {
3716 let sender_id = envelope.original_sender_id()?;
3717 let buffer_id = T::buffer_id_from_proto(&envelope.payload);
3718 let buffer_handle = this.read_with(&cx, |this, _| {
3719 this.opened_buffers
3720 .get(&buffer_id)
3721 .and_then(|buffer| buffer.upgrade(&cx))
3722 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))
3723 })?;
3724 let request = T::from_proto(
3725 envelope.payload,
3726 this.clone(),
3727 buffer_handle.clone(),
3728 cx.clone(),
3729 )
3730 .await?;
3731 let buffer_version = buffer_handle.read_with(&cx, |buffer, _| buffer.version());
3732 let response = this
3733 .update(&mut cx, |this, cx| {
3734 this.request_lsp(buffer_handle, request, cx)
3735 })
3736 .await?;
3737 this.update(&mut cx, |this, cx| {
3738 Ok(T::response_to_proto(
3739 response,
3740 this,
3741 sender_id,
3742 &buffer_version,
3743 cx,
3744 ))
3745 })
3746 }
3747
3748 async fn handle_get_project_symbols(
3749 this: ModelHandle<Self>,
3750 envelope: TypedEnvelope<proto::GetProjectSymbols>,
3751 _: Arc<Client>,
3752 mut cx: AsyncAppContext,
3753 ) -> Result<proto::GetProjectSymbolsResponse> {
3754 let symbols = this
3755 .update(&mut cx, |this, cx| {
3756 this.symbols(&envelope.payload.query, cx)
3757 })
3758 .await?;
3759
3760 Ok(proto::GetProjectSymbolsResponse {
3761 symbols: symbols.iter().map(serialize_symbol).collect(),
3762 })
3763 }
3764
3765 async fn handle_search_project(
3766 this: ModelHandle<Self>,
3767 envelope: TypedEnvelope<proto::SearchProject>,
3768 _: Arc<Client>,
3769 mut cx: AsyncAppContext,
3770 ) -> Result<proto::SearchProjectResponse> {
3771 let peer_id = envelope.original_sender_id()?;
3772 let query = SearchQuery::from_proto(envelope.payload)?;
3773 let result = this
3774 .update(&mut cx, |this, cx| this.search(query, cx))
3775 .await?;
3776
3777 this.update(&mut cx, |this, cx| {
3778 let mut locations = Vec::new();
3779 for (buffer, ranges) in result {
3780 for range in ranges {
3781 let start = serialize_anchor(&range.start);
3782 let end = serialize_anchor(&range.end);
3783 let buffer = this.serialize_buffer_for_peer(&buffer, peer_id, cx);
3784 locations.push(proto::Location {
3785 buffer: Some(buffer),
3786 start: Some(start),
3787 end: Some(end),
3788 });
3789 }
3790 }
3791 Ok(proto::SearchProjectResponse { locations })
3792 })
3793 }
3794
3795 async fn handle_open_buffer_for_symbol(
3796 this: ModelHandle<Self>,
3797 envelope: TypedEnvelope<proto::OpenBufferForSymbol>,
3798 _: Arc<Client>,
3799 mut cx: AsyncAppContext,
3800 ) -> Result<proto::OpenBufferForSymbolResponse> {
3801 let peer_id = envelope.original_sender_id()?;
3802 let symbol = envelope
3803 .payload
3804 .symbol
3805 .ok_or_else(|| anyhow!("invalid symbol"))?;
3806 let symbol = this.read_with(&cx, |this, _| {
3807 let symbol = this.deserialize_symbol(symbol)?;
3808 let signature = this.symbol_signature(symbol.worktree_id, &symbol.path);
3809 if signature == symbol.signature {
3810 Ok(symbol)
3811 } else {
3812 Err(anyhow!("invalid symbol signature"))
3813 }
3814 })?;
3815 let buffer = this
3816 .update(&mut cx, |this, cx| this.open_buffer_for_symbol(&symbol, cx))
3817 .await?;
3818
3819 Ok(proto::OpenBufferForSymbolResponse {
3820 buffer: Some(this.update(&mut cx, |this, cx| {
3821 this.serialize_buffer_for_peer(&buffer, peer_id, cx)
3822 })),
3823 })
3824 }
3825
3826 fn symbol_signature(&self, worktree_id: WorktreeId, path: &Path) -> [u8; 32] {
3827 let mut hasher = Sha256::new();
3828 hasher.update(worktree_id.to_proto().to_be_bytes());
3829 hasher.update(path.to_string_lossy().as_bytes());
3830 hasher.update(self.nonce.to_be_bytes());
3831 hasher.finalize().as_slice().try_into().unwrap()
3832 }
3833
3834 async fn handle_open_buffer(
3835 this: ModelHandle<Self>,
3836 envelope: TypedEnvelope<proto::OpenBuffer>,
3837 _: Arc<Client>,
3838 mut cx: AsyncAppContext,
3839 ) -> Result<proto::OpenBufferResponse> {
3840 let peer_id = envelope.original_sender_id()?;
3841 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
3842 let open_buffer = this.update(&mut cx, |this, cx| {
3843 this.open_buffer(
3844 ProjectPath {
3845 worktree_id,
3846 path: PathBuf::from(envelope.payload.path).into(),
3847 },
3848 cx,
3849 )
3850 });
3851
3852 let buffer = open_buffer.await?;
3853 this.update(&mut cx, |this, cx| {
3854 Ok(proto::OpenBufferResponse {
3855 buffer: Some(this.serialize_buffer_for_peer(&buffer, peer_id, cx)),
3856 })
3857 })
3858 }
3859
3860 fn serialize_project_transaction_for_peer(
3861 &mut self,
3862 project_transaction: ProjectTransaction,
3863 peer_id: PeerId,
3864 cx: &AppContext,
3865 ) -> proto::ProjectTransaction {
3866 let mut serialized_transaction = proto::ProjectTransaction {
3867 buffers: Default::default(),
3868 transactions: Default::default(),
3869 };
3870 for (buffer, transaction) in project_transaction.0 {
3871 serialized_transaction
3872 .buffers
3873 .push(self.serialize_buffer_for_peer(&buffer, peer_id, cx));
3874 serialized_transaction
3875 .transactions
3876 .push(language::proto::serialize_transaction(&transaction));
3877 }
3878 serialized_transaction
3879 }
3880
3881 fn deserialize_project_transaction(
3882 &mut self,
3883 message: proto::ProjectTransaction,
3884 push_to_history: bool,
3885 cx: &mut ModelContext<Self>,
3886 ) -> Task<Result<ProjectTransaction>> {
3887 cx.spawn(|this, mut cx| async move {
3888 let mut project_transaction = ProjectTransaction::default();
3889 for (buffer, transaction) in message.buffers.into_iter().zip(message.transactions) {
3890 let buffer = this
3891 .update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
3892 .await?;
3893 let transaction = language::proto::deserialize_transaction(transaction)?;
3894 project_transaction.0.insert(buffer, transaction);
3895 }
3896
3897 for (buffer, transaction) in &project_transaction.0 {
3898 buffer
3899 .update(&mut cx, |buffer, _| {
3900 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
3901 })
3902 .await;
3903
3904 if push_to_history {
3905 buffer.update(&mut cx, |buffer, _| {
3906 buffer.push_transaction(transaction.clone(), Instant::now());
3907 });
3908 }
3909 }
3910
3911 Ok(project_transaction)
3912 })
3913 }
3914
3915 fn serialize_buffer_for_peer(
3916 &mut self,
3917 buffer: &ModelHandle<Buffer>,
3918 peer_id: PeerId,
3919 cx: &AppContext,
3920 ) -> proto::Buffer {
3921 let buffer_id = buffer.read(cx).remote_id();
3922 let shared_buffers = self.shared_buffers.entry(peer_id).or_default();
3923 if shared_buffers.insert(buffer_id) {
3924 proto::Buffer {
3925 variant: Some(proto::buffer::Variant::State(buffer.read(cx).to_proto())),
3926 }
3927 } else {
3928 proto::Buffer {
3929 variant: Some(proto::buffer::Variant::Id(buffer_id)),
3930 }
3931 }
3932 }
3933
3934 fn deserialize_buffer(
3935 &mut self,
3936 buffer: proto::Buffer,
3937 cx: &mut ModelContext<Self>,
3938 ) -> Task<Result<ModelHandle<Buffer>>> {
3939 let replica_id = self.replica_id();
3940
3941 let opened_buffer_tx = self.opened_buffer.0.clone();
3942 let mut opened_buffer_rx = self.opened_buffer.1.clone();
3943 cx.spawn(|this, mut cx| async move {
3944 match buffer.variant.ok_or_else(|| anyhow!("missing buffer"))? {
3945 proto::buffer::Variant::Id(id) => {
3946 let buffer = loop {
3947 let buffer = this.read_with(&cx, |this, cx| {
3948 this.opened_buffers
3949 .get(&id)
3950 .and_then(|buffer| buffer.upgrade(cx))
3951 });
3952 if let Some(buffer) = buffer {
3953 break buffer;
3954 }
3955 opened_buffer_rx
3956 .next()
3957 .await
3958 .ok_or_else(|| anyhow!("project dropped while waiting for buffer"))?;
3959 };
3960 Ok(buffer)
3961 }
3962 proto::buffer::Variant::State(mut buffer) => {
3963 let mut buffer_worktree = None;
3964 let mut buffer_file = None;
3965 if let Some(file) = buffer.file.take() {
3966 this.read_with(&cx, |this, cx| {
3967 let worktree_id = WorktreeId::from_proto(file.worktree_id);
3968 let worktree =
3969 this.worktree_for_id(worktree_id, cx).ok_or_else(|| {
3970 anyhow!("no worktree found for id {}", file.worktree_id)
3971 })?;
3972 buffer_file =
3973 Some(Box::new(File::from_proto(file, worktree.clone(), cx)?)
3974 as Box<dyn language::File>);
3975 buffer_worktree = Some(worktree);
3976 Ok::<_, anyhow::Error>(())
3977 })?;
3978 }
3979
3980 let buffer = cx.add_model(|cx| {
3981 Buffer::from_proto(replica_id, buffer, buffer_file, cx).unwrap()
3982 });
3983
3984 this.update(&mut cx, |this, cx| this.register_buffer(&buffer, cx))?;
3985
3986 *opened_buffer_tx.borrow_mut().borrow_mut() = ();
3987 Ok(buffer)
3988 }
3989 }
3990 })
3991 }
3992
3993 fn deserialize_symbol(&self, serialized_symbol: proto::Symbol) -> Result<Symbol> {
3994 let language = self
3995 .languages
3996 .get_language(&serialized_symbol.language_name);
3997 let start = serialized_symbol
3998 .start
3999 .ok_or_else(|| anyhow!("invalid start"))?;
4000 let end = serialized_symbol
4001 .end
4002 .ok_or_else(|| anyhow!("invalid end"))?;
4003 let kind = unsafe { mem::transmute(serialized_symbol.kind) };
4004 Ok(Symbol {
4005 source_worktree_id: WorktreeId::from_proto(serialized_symbol.source_worktree_id),
4006 worktree_id: WorktreeId::from_proto(serialized_symbol.worktree_id),
4007 language_name: serialized_symbol.language_name.clone(),
4008 label: language
4009 .and_then(|language| language.label_for_symbol(&serialized_symbol.name, kind))
4010 .unwrap_or_else(|| CodeLabel::plain(serialized_symbol.name.clone(), None)),
4011 name: serialized_symbol.name,
4012 path: PathBuf::from(serialized_symbol.path),
4013 range: PointUtf16::new(start.row, start.column)..PointUtf16::new(end.row, end.column),
4014 kind,
4015 signature: serialized_symbol
4016 .signature
4017 .try_into()
4018 .map_err(|_| anyhow!("invalid signature"))?,
4019 })
4020 }
4021
4022 async fn handle_buffer_saved(
4023 this: ModelHandle<Self>,
4024 envelope: TypedEnvelope<proto::BufferSaved>,
4025 _: Arc<Client>,
4026 mut cx: AsyncAppContext,
4027 ) -> Result<()> {
4028 let version = deserialize_version(envelope.payload.version);
4029 let mtime = envelope
4030 .payload
4031 .mtime
4032 .ok_or_else(|| anyhow!("missing mtime"))?
4033 .into();
4034
4035 this.update(&mut cx, |this, cx| {
4036 let buffer = this
4037 .opened_buffers
4038 .get(&envelope.payload.buffer_id)
4039 .and_then(|buffer| buffer.upgrade(cx));
4040 if let Some(buffer) = buffer {
4041 buffer.update(cx, |buffer, cx| {
4042 buffer.did_save(version, mtime, None, cx);
4043 });
4044 }
4045 Ok(())
4046 })
4047 }
4048
4049 async fn handle_buffer_reloaded(
4050 this: ModelHandle<Self>,
4051 envelope: TypedEnvelope<proto::BufferReloaded>,
4052 _: Arc<Client>,
4053 mut cx: AsyncAppContext,
4054 ) -> Result<()> {
4055 let payload = envelope.payload.clone();
4056 let version = deserialize_version(payload.version);
4057 let mtime = payload
4058 .mtime
4059 .ok_or_else(|| anyhow!("missing mtime"))?
4060 .into();
4061 this.update(&mut cx, |this, cx| {
4062 let buffer = this
4063 .opened_buffers
4064 .get(&payload.buffer_id)
4065 .and_then(|buffer| buffer.upgrade(cx));
4066 if let Some(buffer) = buffer {
4067 buffer.update(cx, |buffer, cx| {
4068 buffer.did_reload(version, mtime, cx);
4069 });
4070 }
4071 Ok(())
4072 })
4073 }
4074
4075 pub fn match_paths<'a>(
4076 &self,
4077 query: &'a str,
4078 include_ignored: bool,
4079 smart_case: bool,
4080 max_results: usize,
4081 cancel_flag: &'a AtomicBool,
4082 cx: &AppContext,
4083 ) -> impl 'a + Future<Output = Vec<PathMatch>> {
4084 let worktrees = self
4085 .worktrees(cx)
4086 .filter(|worktree| worktree.read(cx).is_visible())
4087 .collect::<Vec<_>>();
4088 let include_root_name = worktrees.len() > 1;
4089 let candidate_sets = worktrees
4090 .into_iter()
4091 .map(|worktree| CandidateSet {
4092 snapshot: worktree.read(cx).snapshot(),
4093 include_ignored,
4094 include_root_name,
4095 })
4096 .collect::<Vec<_>>();
4097
4098 let background = cx.background().clone();
4099 async move {
4100 fuzzy::match_paths(
4101 candidate_sets.as_slice(),
4102 query,
4103 smart_case,
4104 max_results,
4105 cancel_flag,
4106 background,
4107 )
4108 .await
4109 }
4110 }
4111
4112 fn edits_from_lsp(
4113 &mut self,
4114 buffer: &ModelHandle<Buffer>,
4115 lsp_edits: impl 'static + Send + IntoIterator<Item = lsp::TextEdit>,
4116 version: Option<i32>,
4117 cx: &mut ModelContext<Self>,
4118 ) -> Task<Result<Vec<(Range<Anchor>, String)>>> {
4119 let snapshot = self.buffer_snapshot_for_lsp_version(buffer, version, cx);
4120 cx.background().spawn(async move {
4121 let snapshot = snapshot?;
4122 let mut lsp_edits = lsp_edits
4123 .into_iter()
4124 .map(|edit| (range_from_lsp(edit.range), edit.new_text))
4125 .peekable();
4126
4127 let mut edits = Vec::new();
4128 while let Some((mut range, mut new_text)) = lsp_edits.next() {
4129 // Combine any LSP edits that are adjacent.
4130 //
4131 // Also, combine LSP edits that are separated from each other by only
4132 // a newline. This is important because for some code actions,
4133 // Rust-analyzer rewrites the entire buffer via a series of edits that
4134 // are separated by unchanged newline characters.
4135 //
4136 // In order for the diffing logic below to work properly, any edits that
4137 // cancel each other out must be combined into one.
4138 while let Some((next_range, next_text)) = lsp_edits.peek() {
4139 if next_range.start > range.end {
4140 if next_range.start.row > range.end.row + 1
4141 || next_range.start.column > 0
4142 || snapshot.clip_point_utf16(
4143 PointUtf16::new(range.end.row, u32::MAX),
4144 Bias::Left,
4145 ) > range.end
4146 {
4147 break;
4148 }
4149 new_text.push('\n');
4150 }
4151 range.end = next_range.end;
4152 new_text.push_str(&next_text);
4153 lsp_edits.next();
4154 }
4155
4156 if snapshot.clip_point_utf16(range.start, Bias::Left) != range.start
4157 || snapshot.clip_point_utf16(range.end, Bias::Left) != range.end
4158 {
4159 return Err(anyhow!("invalid edits received from language server"));
4160 }
4161
4162 // For multiline edits, perform a diff of the old and new text so that
4163 // we can identify the changes more precisely, preserving the locations
4164 // of any anchors positioned in the unchanged regions.
4165 if range.end.row > range.start.row {
4166 let mut offset = range.start.to_offset(&snapshot);
4167 let old_text = snapshot.text_for_range(range).collect::<String>();
4168
4169 let diff = TextDiff::from_lines(old_text.as_str(), &new_text);
4170 let mut moved_since_edit = true;
4171 for change in diff.iter_all_changes() {
4172 let tag = change.tag();
4173 let value = change.value();
4174 match tag {
4175 ChangeTag::Equal => {
4176 offset += value.len();
4177 moved_since_edit = true;
4178 }
4179 ChangeTag::Delete => {
4180 let start = snapshot.anchor_after(offset);
4181 let end = snapshot.anchor_before(offset + value.len());
4182 if moved_since_edit {
4183 edits.push((start..end, String::new()));
4184 } else {
4185 edits.last_mut().unwrap().0.end = end;
4186 }
4187 offset += value.len();
4188 moved_since_edit = false;
4189 }
4190 ChangeTag::Insert => {
4191 if moved_since_edit {
4192 let anchor = snapshot.anchor_after(offset);
4193 edits.push((anchor.clone()..anchor, value.to_string()));
4194 } else {
4195 edits.last_mut().unwrap().1.push_str(value);
4196 }
4197 moved_since_edit = false;
4198 }
4199 }
4200 }
4201 } else if range.end == range.start {
4202 let anchor = snapshot.anchor_after(range.start);
4203 edits.push((anchor.clone()..anchor, new_text));
4204 } else {
4205 let edit_start = snapshot.anchor_after(range.start);
4206 let edit_end = snapshot.anchor_before(range.end);
4207 edits.push((edit_start..edit_end, new_text));
4208 }
4209 }
4210
4211 Ok(edits)
4212 })
4213 }
4214
4215 fn buffer_snapshot_for_lsp_version(
4216 &mut self,
4217 buffer: &ModelHandle<Buffer>,
4218 version: Option<i32>,
4219 cx: &AppContext,
4220 ) -> Result<TextBufferSnapshot> {
4221 const OLD_VERSIONS_TO_RETAIN: i32 = 10;
4222
4223 if let Some(version) = version {
4224 let buffer_id = buffer.read(cx).remote_id();
4225 let snapshots = self
4226 .buffer_snapshots
4227 .get_mut(&buffer_id)
4228 .ok_or_else(|| anyhow!("no snapshot found for buffer {}", buffer_id))?;
4229 let mut found_snapshot = None;
4230 snapshots.retain(|(snapshot_version, snapshot)| {
4231 if snapshot_version + OLD_VERSIONS_TO_RETAIN < version {
4232 false
4233 } else {
4234 if *snapshot_version == version {
4235 found_snapshot = Some(snapshot.clone());
4236 }
4237 true
4238 }
4239 });
4240
4241 found_snapshot.ok_or_else(|| {
4242 anyhow!(
4243 "snapshot not found for buffer {} at version {}",
4244 buffer_id,
4245 version
4246 )
4247 })
4248 } else {
4249 Ok((buffer.read(cx)).text_snapshot())
4250 }
4251 }
4252
4253 fn language_server_for_buffer(
4254 &self,
4255 buffer: &Buffer,
4256 cx: &AppContext,
4257 ) -> Option<&Arc<LanguageServer>> {
4258 if let Some((file, language)) = File::from_dyn(buffer.file()).zip(buffer.language()) {
4259 let worktree_id = file.worktree_id(cx);
4260 self.language_servers.get(&(worktree_id, language.name()))
4261 } else {
4262 None
4263 }
4264 }
4265}
4266
4267impl WorktreeHandle {
4268 pub fn upgrade(&self, cx: &AppContext) -> Option<ModelHandle<Worktree>> {
4269 match self {
4270 WorktreeHandle::Strong(handle) => Some(handle.clone()),
4271 WorktreeHandle::Weak(handle) => handle.upgrade(cx),
4272 }
4273 }
4274}
4275
4276impl OpenBuffer {
4277 pub fn upgrade(&self, cx: &impl UpgradeModelHandle) -> Option<ModelHandle<Buffer>> {
4278 match self {
4279 OpenBuffer::Strong(handle) => Some(handle.clone()),
4280 OpenBuffer::Weak(handle) => handle.upgrade(cx),
4281 OpenBuffer::Loading(_) => None,
4282 }
4283 }
4284}
4285
4286struct CandidateSet {
4287 snapshot: Snapshot,
4288 include_ignored: bool,
4289 include_root_name: bool,
4290}
4291
4292impl<'a> PathMatchCandidateSet<'a> for CandidateSet {
4293 type Candidates = CandidateSetIter<'a>;
4294
4295 fn id(&self) -> usize {
4296 self.snapshot.id().to_usize()
4297 }
4298
4299 fn len(&self) -> usize {
4300 if self.include_ignored {
4301 self.snapshot.file_count()
4302 } else {
4303 self.snapshot.visible_file_count()
4304 }
4305 }
4306
4307 fn prefix(&self) -> Arc<str> {
4308 if self.snapshot.root_entry().map_or(false, |e| e.is_file()) {
4309 self.snapshot.root_name().into()
4310 } else if self.include_root_name {
4311 format!("{}/", self.snapshot.root_name()).into()
4312 } else {
4313 "".into()
4314 }
4315 }
4316
4317 fn candidates(&'a self, start: usize) -> Self::Candidates {
4318 CandidateSetIter {
4319 traversal: self.snapshot.files(self.include_ignored, start),
4320 }
4321 }
4322}
4323
4324struct CandidateSetIter<'a> {
4325 traversal: Traversal<'a>,
4326}
4327
4328impl<'a> Iterator for CandidateSetIter<'a> {
4329 type Item = PathMatchCandidate<'a>;
4330
4331 fn next(&mut self) -> Option<Self::Item> {
4332 self.traversal.next().map(|entry| {
4333 if let EntryKind::File(char_bag) = entry.kind {
4334 PathMatchCandidate {
4335 path: &entry.path,
4336 char_bag,
4337 }
4338 } else {
4339 unreachable!()
4340 }
4341 })
4342 }
4343}
4344
4345impl Entity for Project {
4346 type Event = Event;
4347
4348 fn release(&mut self, _: &mut gpui::MutableAppContext) {
4349 match &self.client_state {
4350 ProjectClientState::Local { remote_id_rx, .. } => {
4351 if let Some(project_id) = *remote_id_rx.borrow() {
4352 self.client
4353 .send(proto::UnregisterProject { project_id })
4354 .log_err();
4355 }
4356 }
4357 ProjectClientState::Remote { remote_id, .. } => {
4358 self.client
4359 .send(proto::LeaveProject {
4360 project_id: *remote_id,
4361 })
4362 .log_err();
4363 }
4364 }
4365 }
4366
4367 fn app_will_quit(
4368 &mut self,
4369 _: &mut MutableAppContext,
4370 ) -> Option<std::pin::Pin<Box<dyn 'static + Future<Output = ()>>>> {
4371 let shutdown_futures = self
4372 .language_servers
4373 .drain()
4374 .filter_map(|(_, server)| server.shutdown())
4375 .collect::<Vec<_>>();
4376 Some(
4377 async move {
4378 futures::future::join_all(shutdown_futures).await;
4379 }
4380 .boxed(),
4381 )
4382 }
4383}
4384
4385impl Collaborator {
4386 fn from_proto(
4387 message: proto::Collaborator,
4388 user_store: &ModelHandle<UserStore>,
4389 cx: &mut AsyncAppContext,
4390 ) -> impl Future<Output = Result<Self>> {
4391 let user = user_store.update(cx, |user_store, cx| {
4392 user_store.fetch_user(message.user_id, cx)
4393 });
4394
4395 async move {
4396 Ok(Self {
4397 peer_id: PeerId(message.peer_id),
4398 user: user.await?,
4399 replica_id: message.replica_id as ReplicaId,
4400 })
4401 }
4402 }
4403}
4404
4405impl<P: AsRef<Path>> From<(WorktreeId, P)> for ProjectPath {
4406 fn from((worktree_id, path): (WorktreeId, P)) -> Self {
4407 Self {
4408 worktree_id,
4409 path: path.as_ref().into(),
4410 }
4411 }
4412}
4413
4414impl From<lsp::CreateFileOptions> for fs::CreateOptions {
4415 fn from(options: lsp::CreateFileOptions) -> Self {
4416 Self {
4417 overwrite: options.overwrite.unwrap_or(false),
4418 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
4419 }
4420 }
4421}
4422
4423impl From<lsp::RenameFileOptions> for fs::RenameOptions {
4424 fn from(options: lsp::RenameFileOptions) -> Self {
4425 Self {
4426 overwrite: options.overwrite.unwrap_or(false),
4427 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
4428 }
4429 }
4430}
4431
4432impl From<lsp::DeleteFileOptions> for fs::RemoveOptions {
4433 fn from(options: lsp::DeleteFileOptions) -> Self {
4434 Self {
4435 recursive: options.recursive.unwrap_or(false),
4436 ignore_if_not_exists: options.ignore_if_not_exists.unwrap_or(false),
4437 }
4438 }
4439}
4440
4441fn serialize_symbol(symbol: &Symbol) -> proto::Symbol {
4442 proto::Symbol {
4443 source_worktree_id: symbol.source_worktree_id.to_proto(),
4444 worktree_id: symbol.worktree_id.to_proto(),
4445 language_name: symbol.language_name.clone(),
4446 name: symbol.name.clone(),
4447 kind: unsafe { mem::transmute(symbol.kind) },
4448 path: symbol.path.to_string_lossy().to_string(),
4449 start: Some(proto::Point {
4450 row: symbol.range.start.row,
4451 column: symbol.range.start.column,
4452 }),
4453 end: Some(proto::Point {
4454 row: symbol.range.end.row,
4455 column: symbol.range.end.column,
4456 }),
4457 signature: symbol.signature.to_vec(),
4458 }
4459}
4460
4461fn relativize_path(base: &Path, path: &Path) -> PathBuf {
4462 let mut path_components = path.components();
4463 let mut base_components = base.components();
4464 let mut components: Vec<Component> = Vec::new();
4465 loop {
4466 match (path_components.next(), base_components.next()) {
4467 (None, None) => break,
4468 (Some(a), None) => {
4469 components.push(a);
4470 components.extend(path_components.by_ref());
4471 break;
4472 }
4473 (None, _) => components.push(Component::ParentDir),
4474 (Some(a), Some(b)) if components.is_empty() && a == b => (),
4475 (Some(a), Some(b)) if b == Component::CurDir => components.push(a),
4476 (Some(a), Some(_)) => {
4477 components.push(Component::ParentDir);
4478 for _ in base_components {
4479 components.push(Component::ParentDir);
4480 }
4481 components.push(a);
4482 components.extend(path_components.by_ref());
4483 break;
4484 }
4485 }
4486 }
4487 components.iter().map(|c| c.as_os_str()).collect()
4488}
4489
4490#[cfg(test)]
4491mod tests {
4492 use super::{Event, *};
4493 use fs::RealFs;
4494 use futures::StreamExt;
4495 use gpui::test::subscribe;
4496 use language::{
4497 tree_sitter_rust, Diagnostic, LanguageConfig, LanguageServerConfig, OffsetRangeExt, Point,
4498 ToPoint,
4499 };
4500 use lsp::Url;
4501 use serde_json::json;
4502 use std::{cell::RefCell, os::unix, path::PathBuf, rc::Rc};
4503 use unindent::Unindent as _;
4504 use util::test::temp_tree;
4505 use worktree::WorktreeHandle as _;
4506
4507 #[gpui::test]
4508 async fn test_populate_and_search(cx: &mut gpui::TestAppContext) {
4509 let dir = temp_tree(json!({
4510 "root": {
4511 "apple": "",
4512 "banana": {
4513 "carrot": {
4514 "date": "",
4515 "endive": "",
4516 }
4517 },
4518 "fennel": {
4519 "grape": "",
4520 }
4521 }
4522 }));
4523
4524 let root_link_path = dir.path().join("root_link");
4525 unix::fs::symlink(&dir.path().join("root"), &root_link_path).unwrap();
4526 unix::fs::symlink(
4527 &dir.path().join("root/fennel"),
4528 &dir.path().join("root/finnochio"),
4529 )
4530 .unwrap();
4531
4532 let project = Project::test(Arc::new(RealFs), cx);
4533
4534 let (tree, _) = project
4535 .update(cx, |project, cx| {
4536 project.find_or_create_local_worktree(&root_link_path, true, cx)
4537 })
4538 .await
4539 .unwrap();
4540
4541 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
4542 .await;
4543 cx.read(|cx| {
4544 let tree = tree.read(cx);
4545 assert_eq!(tree.file_count(), 5);
4546 assert_eq!(
4547 tree.inode_for_path("fennel/grape"),
4548 tree.inode_for_path("finnochio/grape")
4549 );
4550 });
4551
4552 let cancel_flag = Default::default();
4553 let results = project
4554 .read_with(cx, |project, cx| {
4555 project.match_paths("bna", false, false, 10, &cancel_flag, cx)
4556 })
4557 .await;
4558 assert_eq!(
4559 results
4560 .into_iter()
4561 .map(|result| result.path)
4562 .collect::<Vec<Arc<Path>>>(),
4563 vec![
4564 PathBuf::from("banana/carrot/date").into(),
4565 PathBuf::from("banana/carrot/endive").into(),
4566 ]
4567 );
4568 }
4569
4570 #[gpui::test]
4571 async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
4572 cx.foreground().forbid_parking();
4573
4574 let (mut rust_lsp_config, mut fake_rust_servers) = LanguageServerConfig::fake();
4575 let (mut json_lsp_config, mut fake_json_servers) = LanguageServerConfig::fake();
4576 rust_lsp_config.set_fake_capabilities(lsp::ServerCapabilities {
4577 completion_provider: Some(lsp::CompletionOptions {
4578 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
4579 ..Default::default()
4580 }),
4581 ..Default::default()
4582 });
4583 json_lsp_config.set_fake_capabilities(lsp::ServerCapabilities {
4584 completion_provider: Some(lsp::CompletionOptions {
4585 trigger_characters: Some(vec![":".to_string()]),
4586 ..Default::default()
4587 }),
4588 ..Default::default()
4589 });
4590
4591 let rust_language = Arc::new(Language::new(
4592 LanguageConfig {
4593 name: "Rust".into(),
4594 path_suffixes: vec!["rs".to_string()],
4595 language_server: Some(rust_lsp_config),
4596 ..Default::default()
4597 },
4598 Some(tree_sitter_rust::language()),
4599 ));
4600 let json_language = Arc::new(Language::new(
4601 LanguageConfig {
4602 name: "JSON".into(),
4603 path_suffixes: vec!["json".to_string()],
4604 language_server: Some(json_lsp_config),
4605 ..Default::default()
4606 },
4607 None,
4608 ));
4609
4610 let fs = FakeFs::new(cx.background());
4611 fs.insert_tree(
4612 "/the-root",
4613 json!({
4614 "test.rs": "const A: i32 = 1;",
4615 "test2.rs": "",
4616 "Cargo.toml": "a = 1",
4617 "package.json": "{\"a\": 1}",
4618 }),
4619 )
4620 .await;
4621
4622 let project = Project::test(fs, cx);
4623 project.update(cx, |project, _| {
4624 project.languages.add(rust_language);
4625 project.languages.add(json_language);
4626 });
4627
4628 let worktree_id = project
4629 .update(cx, |project, cx| {
4630 project.find_or_create_local_worktree("/the-root", true, cx)
4631 })
4632 .await
4633 .unwrap()
4634 .0
4635 .read_with(cx, |tree, _| tree.id());
4636
4637 // Open a buffer without an associated language server.
4638 let toml_buffer = project
4639 .update(cx, |project, cx| {
4640 project.open_buffer((worktree_id, "Cargo.toml"), cx)
4641 })
4642 .await
4643 .unwrap();
4644
4645 // Open a buffer with an associated language server.
4646 let rust_buffer = project
4647 .update(cx, |project, cx| {
4648 project.open_buffer((worktree_id, "test.rs"), cx)
4649 })
4650 .await
4651 .unwrap();
4652
4653 // A server is started up, and it is notified about Rust files.
4654 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
4655 assert_eq!(
4656 fake_rust_server
4657 .receive_notification::<lsp::notification::DidOpenTextDocument>()
4658 .await
4659 .text_document,
4660 lsp::TextDocumentItem {
4661 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
4662 version: 0,
4663 text: "const A: i32 = 1;".to_string(),
4664 language_id: Default::default()
4665 }
4666 );
4667
4668 // The buffer is configured based on the language server's capabilities.
4669 rust_buffer.read_with(cx, |buffer, _| {
4670 assert_eq!(
4671 buffer.completion_triggers(),
4672 &[".".to_string(), "::".to_string()]
4673 );
4674 });
4675 toml_buffer.read_with(cx, |buffer, _| {
4676 assert!(buffer.completion_triggers().is_empty());
4677 });
4678
4679 // Edit a buffer. The changes are reported to the language server.
4680 rust_buffer.update(cx, |buffer, cx| buffer.edit([16..16], "2", cx));
4681 assert_eq!(
4682 fake_rust_server
4683 .receive_notification::<lsp::notification::DidChangeTextDocument>()
4684 .await
4685 .text_document,
4686 lsp::VersionedTextDocumentIdentifier::new(
4687 lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
4688 1
4689 )
4690 );
4691
4692 // Open a third buffer with a different associated language server.
4693 let json_buffer = project
4694 .update(cx, |project, cx| {
4695 project.open_buffer((worktree_id, "package.json"), cx)
4696 })
4697 .await
4698 .unwrap();
4699
4700 // Another language server is started up, and it is notified about
4701 // all three open buffers.
4702 let mut fake_json_server = fake_json_servers.next().await.unwrap();
4703 assert_eq!(
4704 fake_json_server
4705 .receive_notification::<lsp::notification::DidOpenTextDocument>()
4706 .await
4707 .text_document,
4708 lsp::TextDocumentItem {
4709 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
4710 version: 0,
4711 text: "{\"a\": 1}".to_string(),
4712 language_id: Default::default()
4713 }
4714 );
4715
4716 // This buffer is configured based on the second language server's
4717 // capabilities.
4718 json_buffer.read_with(cx, |buffer, _| {
4719 assert_eq!(buffer.completion_triggers(), &[":".to_string()]);
4720 });
4721
4722 // When opening another buffer whose language server is already running,
4723 // it is also configured based on the existing language server's capabilities.
4724 let rust_buffer2 = project
4725 .update(cx, |project, cx| {
4726 project.open_buffer((worktree_id, "test2.rs"), cx)
4727 })
4728 .await
4729 .unwrap();
4730 rust_buffer2.read_with(cx, |buffer, _| {
4731 assert_eq!(
4732 buffer.completion_triggers(),
4733 &[".".to_string(), "::".to_string()]
4734 );
4735 });
4736
4737 // Changes are reported only to servers matching the buffer's language.
4738 toml_buffer.update(cx, |buffer, cx| buffer.edit([5..5], "23", cx));
4739 rust_buffer2.update(cx, |buffer, cx| buffer.edit([0..0], "let x = 1;", cx));
4740 assert_eq!(
4741 fake_rust_server
4742 .receive_notification::<lsp::notification::DidChangeTextDocument>()
4743 .await
4744 .text_document,
4745 lsp::VersionedTextDocumentIdentifier::new(
4746 lsp::Url::from_file_path("/the-root/test2.rs").unwrap(),
4747 1
4748 )
4749 );
4750
4751 // Save notifications are reported to all servers.
4752 toml_buffer
4753 .update(cx, |buffer, cx| buffer.save(cx))
4754 .await
4755 .unwrap();
4756 assert_eq!(
4757 fake_rust_server
4758 .receive_notification::<lsp::notification::DidSaveTextDocument>()
4759 .await
4760 .text_document,
4761 lsp::TextDocumentIdentifier::new(
4762 lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap()
4763 )
4764 );
4765 assert_eq!(
4766 fake_json_server
4767 .receive_notification::<lsp::notification::DidSaveTextDocument>()
4768 .await
4769 .text_document,
4770 lsp::TextDocumentIdentifier::new(
4771 lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap()
4772 )
4773 );
4774
4775 // Close notifications are reported only to servers matching the buffer's language.
4776 cx.update(|_| drop(json_buffer));
4777 let close_message = lsp::DidCloseTextDocumentParams {
4778 text_document: lsp::TextDocumentIdentifier::new(
4779 lsp::Url::from_file_path("/the-root/package.json").unwrap(),
4780 ),
4781 };
4782 assert_eq!(
4783 fake_json_server
4784 .receive_notification::<lsp::notification::DidCloseTextDocument>()
4785 .await,
4786 close_message,
4787 );
4788 }
4789
4790 #[gpui::test]
4791 async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
4792 cx.foreground().forbid_parking();
4793
4794 let (language_server_config, mut fake_servers) = LanguageServerConfig::fake();
4795 let progress_token = language_server_config
4796 .disk_based_diagnostics_progress_token
4797 .clone()
4798 .unwrap();
4799
4800 let language = Arc::new(Language::new(
4801 LanguageConfig {
4802 name: "Rust".into(),
4803 path_suffixes: vec!["rs".to_string()],
4804 language_server: Some(language_server_config),
4805 ..Default::default()
4806 },
4807 Some(tree_sitter_rust::language()),
4808 ));
4809
4810 let fs = FakeFs::new(cx.background());
4811 fs.insert_tree(
4812 "/dir",
4813 json!({
4814 "a.rs": "fn a() { A }",
4815 "b.rs": "const y: i32 = 1",
4816 }),
4817 )
4818 .await;
4819
4820 let project = Project::test(fs, cx);
4821 project.update(cx, |project, _| project.languages.add(language));
4822
4823 let (tree, _) = project
4824 .update(cx, |project, cx| {
4825 project.find_or_create_local_worktree("/dir", true, cx)
4826 })
4827 .await
4828 .unwrap();
4829 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
4830
4831 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
4832 .await;
4833
4834 // Cause worktree to start the fake language server
4835 let _buffer = project
4836 .update(cx, |project, cx| {
4837 project.open_buffer((worktree_id, Path::new("b.rs")), cx)
4838 })
4839 .await
4840 .unwrap();
4841
4842 let mut events = subscribe(&project, cx);
4843
4844 let mut fake_server = fake_servers.next().await.unwrap();
4845 fake_server.start_progress(&progress_token).await;
4846 assert_eq!(
4847 events.next().await.unwrap(),
4848 Event::DiskBasedDiagnosticsStarted
4849 );
4850
4851 fake_server.start_progress(&progress_token).await;
4852 fake_server.end_progress(&progress_token).await;
4853 fake_server.start_progress(&progress_token).await;
4854
4855 fake_server.notify::<lsp::notification::PublishDiagnostics>(
4856 lsp::PublishDiagnosticsParams {
4857 uri: Url::from_file_path("/dir/a.rs").unwrap(),
4858 version: None,
4859 diagnostics: vec![lsp::Diagnostic {
4860 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
4861 severity: Some(lsp::DiagnosticSeverity::ERROR),
4862 message: "undefined variable 'A'".to_string(),
4863 ..Default::default()
4864 }],
4865 },
4866 );
4867 assert_eq!(
4868 events.next().await.unwrap(),
4869 Event::DiagnosticsUpdated((worktree_id, Path::new("a.rs")).into())
4870 );
4871
4872 fake_server.end_progress(&progress_token).await;
4873 fake_server.end_progress(&progress_token).await;
4874 assert_eq!(
4875 events.next().await.unwrap(),
4876 Event::DiskBasedDiagnosticsUpdated
4877 );
4878 assert_eq!(
4879 events.next().await.unwrap(),
4880 Event::DiskBasedDiagnosticsFinished
4881 );
4882
4883 let buffer = project
4884 .update(cx, |p, cx| p.open_buffer((worktree_id, "a.rs"), cx))
4885 .await
4886 .unwrap();
4887
4888 buffer.read_with(cx, |buffer, _| {
4889 let snapshot = buffer.snapshot();
4890 let diagnostics = snapshot
4891 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
4892 .collect::<Vec<_>>();
4893 assert_eq!(
4894 diagnostics,
4895 &[DiagnosticEntry {
4896 range: Point::new(0, 9)..Point::new(0, 10),
4897 diagnostic: Diagnostic {
4898 severity: lsp::DiagnosticSeverity::ERROR,
4899 message: "undefined variable 'A'".to_string(),
4900 group_id: 0,
4901 is_primary: true,
4902 ..Default::default()
4903 }
4904 }]
4905 )
4906 });
4907 }
4908
4909 #[gpui::test]
4910 async fn test_transforming_disk_based_diagnostics(cx: &mut gpui::TestAppContext) {
4911 cx.foreground().forbid_parking();
4912
4913 let (mut lsp_config, mut fake_servers) = LanguageServerConfig::fake();
4914 lsp_config
4915 .disk_based_diagnostic_sources
4916 .insert("disk".to_string());
4917 let language = Arc::new(Language::new(
4918 LanguageConfig {
4919 name: "Rust".into(),
4920 path_suffixes: vec!["rs".to_string()],
4921 language_server: Some(lsp_config),
4922 ..Default::default()
4923 },
4924 Some(tree_sitter_rust::language()),
4925 ));
4926
4927 let text = "
4928 fn a() { A }
4929 fn b() { BB }
4930 fn c() { CCC }
4931 "
4932 .unindent();
4933
4934 let fs = FakeFs::new(cx.background());
4935 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
4936
4937 let project = Project::test(fs, cx);
4938 project.update(cx, |project, _| project.languages.add(language));
4939
4940 let worktree_id = project
4941 .update(cx, |project, cx| {
4942 project.find_or_create_local_worktree("/dir", true, cx)
4943 })
4944 .await
4945 .unwrap()
4946 .0
4947 .read_with(cx, |tree, _| tree.id());
4948
4949 let buffer = project
4950 .update(cx, |project, cx| {
4951 project.open_buffer((worktree_id, "a.rs"), cx)
4952 })
4953 .await
4954 .unwrap();
4955
4956 let mut fake_server = fake_servers.next().await.unwrap();
4957 let open_notification = fake_server
4958 .receive_notification::<lsp::notification::DidOpenTextDocument>()
4959 .await;
4960
4961 // Edit the buffer, moving the content down
4962 buffer.update(cx, |buffer, cx| buffer.edit([0..0], "\n\n", cx));
4963 let change_notification_1 = fake_server
4964 .receive_notification::<lsp::notification::DidChangeTextDocument>()
4965 .await;
4966 assert!(
4967 change_notification_1.text_document.version > open_notification.text_document.version
4968 );
4969
4970 // Report some diagnostics for the initial version of the buffer
4971 fake_server.notify::<lsp::notification::PublishDiagnostics>(
4972 lsp::PublishDiagnosticsParams {
4973 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
4974 version: Some(open_notification.text_document.version),
4975 diagnostics: vec![
4976 lsp::Diagnostic {
4977 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
4978 severity: Some(DiagnosticSeverity::ERROR),
4979 message: "undefined variable 'A'".to_string(),
4980 source: Some("disk".to_string()),
4981 ..Default::default()
4982 },
4983 lsp::Diagnostic {
4984 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
4985 severity: Some(DiagnosticSeverity::ERROR),
4986 message: "undefined variable 'BB'".to_string(),
4987 source: Some("disk".to_string()),
4988 ..Default::default()
4989 },
4990 lsp::Diagnostic {
4991 range: lsp::Range::new(lsp::Position::new(2, 9), lsp::Position::new(2, 12)),
4992 severity: Some(DiagnosticSeverity::ERROR),
4993 source: Some("disk".to_string()),
4994 message: "undefined variable 'CCC'".to_string(),
4995 ..Default::default()
4996 },
4997 ],
4998 },
4999 );
5000
5001 // The diagnostics have moved down since they were created.
5002 buffer.next_notification(cx).await;
5003 buffer.read_with(cx, |buffer, _| {
5004 assert_eq!(
5005 buffer
5006 .snapshot()
5007 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0), false)
5008 .collect::<Vec<_>>(),
5009 &[
5010 DiagnosticEntry {
5011 range: Point::new(3, 9)..Point::new(3, 11),
5012 diagnostic: Diagnostic {
5013 severity: DiagnosticSeverity::ERROR,
5014 message: "undefined variable 'BB'".to_string(),
5015 is_disk_based: true,
5016 group_id: 1,
5017 is_primary: true,
5018 ..Default::default()
5019 },
5020 },
5021 DiagnosticEntry {
5022 range: Point::new(4, 9)..Point::new(4, 12),
5023 diagnostic: Diagnostic {
5024 severity: DiagnosticSeverity::ERROR,
5025 message: "undefined variable 'CCC'".to_string(),
5026 is_disk_based: true,
5027 group_id: 2,
5028 is_primary: true,
5029 ..Default::default()
5030 }
5031 }
5032 ]
5033 );
5034 assert_eq!(
5035 chunks_with_diagnostics(buffer, 0..buffer.len()),
5036 [
5037 ("\n\nfn a() { ".to_string(), None),
5038 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
5039 (" }\nfn b() { ".to_string(), None),
5040 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
5041 (" }\nfn c() { ".to_string(), None),
5042 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
5043 (" }\n".to_string(), None),
5044 ]
5045 );
5046 assert_eq!(
5047 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
5048 [
5049 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
5050 (" }\nfn c() { ".to_string(), None),
5051 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
5052 ]
5053 );
5054 });
5055
5056 // Ensure overlapping diagnostics are highlighted correctly.
5057 fake_server.notify::<lsp::notification::PublishDiagnostics>(
5058 lsp::PublishDiagnosticsParams {
5059 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
5060 version: Some(open_notification.text_document.version),
5061 diagnostics: vec![
5062 lsp::Diagnostic {
5063 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
5064 severity: Some(DiagnosticSeverity::ERROR),
5065 message: "undefined variable 'A'".to_string(),
5066 source: Some("disk".to_string()),
5067 ..Default::default()
5068 },
5069 lsp::Diagnostic {
5070 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 12)),
5071 severity: Some(DiagnosticSeverity::WARNING),
5072 message: "unreachable statement".to_string(),
5073 source: Some("disk".to_string()),
5074 ..Default::default()
5075 },
5076 ],
5077 },
5078 );
5079
5080 buffer.next_notification(cx).await;
5081 buffer.read_with(cx, |buffer, _| {
5082 assert_eq!(
5083 buffer
5084 .snapshot()
5085 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0), false)
5086 .collect::<Vec<_>>(),
5087 &[
5088 DiagnosticEntry {
5089 range: Point::new(2, 9)..Point::new(2, 12),
5090 diagnostic: Diagnostic {
5091 severity: DiagnosticSeverity::WARNING,
5092 message: "unreachable statement".to_string(),
5093 is_disk_based: true,
5094 group_id: 1,
5095 is_primary: true,
5096 ..Default::default()
5097 }
5098 },
5099 DiagnosticEntry {
5100 range: Point::new(2, 9)..Point::new(2, 10),
5101 diagnostic: Diagnostic {
5102 severity: DiagnosticSeverity::ERROR,
5103 message: "undefined variable 'A'".to_string(),
5104 is_disk_based: true,
5105 group_id: 0,
5106 is_primary: true,
5107 ..Default::default()
5108 },
5109 }
5110 ]
5111 );
5112 assert_eq!(
5113 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
5114 [
5115 ("fn a() { ".to_string(), None),
5116 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
5117 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
5118 ("\n".to_string(), None),
5119 ]
5120 );
5121 assert_eq!(
5122 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
5123 [
5124 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
5125 ("\n".to_string(), None),
5126 ]
5127 );
5128 });
5129
5130 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
5131 // changes since the last save.
5132 buffer.update(cx, |buffer, cx| {
5133 buffer.edit(Some(Point::new(2, 0)..Point::new(2, 0)), " ", cx);
5134 buffer.edit(Some(Point::new(2, 8)..Point::new(2, 10)), "(x: usize)", cx);
5135 });
5136 let change_notification_2 =
5137 fake_server.receive_notification::<lsp::notification::DidChangeTextDocument>();
5138 assert!(
5139 change_notification_2.await.text_document.version
5140 > change_notification_1.text_document.version
5141 );
5142
5143 // Handle out-of-order diagnostics
5144 fake_server.notify::<lsp::notification::PublishDiagnostics>(
5145 lsp::PublishDiagnosticsParams {
5146 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
5147 version: Some(open_notification.text_document.version),
5148 diagnostics: vec![
5149 lsp::Diagnostic {
5150 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
5151 severity: Some(DiagnosticSeverity::ERROR),
5152 message: "undefined variable 'BB'".to_string(),
5153 source: Some("disk".to_string()),
5154 ..Default::default()
5155 },
5156 lsp::Diagnostic {
5157 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
5158 severity: Some(DiagnosticSeverity::WARNING),
5159 message: "undefined variable 'A'".to_string(),
5160 source: Some("disk".to_string()),
5161 ..Default::default()
5162 },
5163 ],
5164 },
5165 );
5166
5167 buffer.next_notification(cx).await;
5168 buffer.read_with(cx, |buffer, _| {
5169 assert_eq!(
5170 buffer
5171 .snapshot()
5172 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
5173 .collect::<Vec<_>>(),
5174 &[
5175 DiagnosticEntry {
5176 range: Point::new(2, 21)..Point::new(2, 22),
5177 diagnostic: Diagnostic {
5178 severity: DiagnosticSeverity::WARNING,
5179 message: "undefined variable 'A'".to_string(),
5180 is_disk_based: true,
5181 group_id: 1,
5182 is_primary: true,
5183 ..Default::default()
5184 }
5185 },
5186 DiagnosticEntry {
5187 range: Point::new(3, 9)..Point::new(3, 11),
5188 diagnostic: Diagnostic {
5189 severity: DiagnosticSeverity::ERROR,
5190 message: "undefined variable 'BB'".to_string(),
5191 is_disk_based: true,
5192 group_id: 0,
5193 is_primary: true,
5194 ..Default::default()
5195 },
5196 }
5197 ]
5198 );
5199 });
5200 }
5201
5202 #[gpui::test]
5203 async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
5204 cx.foreground().forbid_parking();
5205
5206 let text = concat!(
5207 "let one = ;\n", //
5208 "let two = \n",
5209 "let three = 3;\n",
5210 );
5211
5212 let fs = FakeFs::new(cx.background());
5213 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
5214
5215 let project = Project::test(fs, cx);
5216 let worktree_id = project
5217 .update(cx, |project, cx| {
5218 project.find_or_create_local_worktree("/dir", true, cx)
5219 })
5220 .await
5221 .unwrap()
5222 .0
5223 .read_with(cx, |tree, _| tree.id());
5224
5225 let buffer = project
5226 .update(cx, |project, cx| {
5227 project.open_buffer((worktree_id, "a.rs"), cx)
5228 })
5229 .await
5230 .unwrap();
5231
5232 project.update(cx, |project, cx| {
5233 project
5234 .update_buffer_diagnostics(
5235 &buffer,
5236 vec![
5237 DiagnosticEntry {
5238 range: PointUtf16::new(0, 10)..PointUtf16::new(0, 10),
5239 diagnostic: Diagnostic {
5240 severity: DiagnosticSeverity::ERROR,
5241 message: "syntax error 1".to_string(),
5242 ..Default::default()
5243 },
5244 },
5245 DiagnosticEntry {
5246 range: PointUtf16::new(1, 10)..PointUtf16::new(1, 10),
5247 diagnostic: Diagnostic {
5248 severity: DiagnosticSeverity::ERROR,
5249 message: "syntax error 2".to_string(),
5250 ..Default::default()
5251 },
5252 },
5253 ],
5254 None,
5255 cx,
5256 )
5257 .unwrap();
5258 });
5259
5260 // An empty range is extended forward to include the following character.
5261 // At the end of a line, an empty range is extended backward to include
5262 // the preceding character.
5263 buffer.read_with(cx, |buffer, _| {
5264 let chunks = chunks_with_diagnostics(&buffer, 0..buffer.len());
5265 assert_eq!(
5266 chunks
5267 .iter()
5268 .map(|(s, d)| (s.as_str(), *d))
5269 .collect::<Vec<_>>(),
5270 &[
5271 ("let one = ", None),
5272 (";", Some(DiagnosticSeverity::ERROR)),
5273 ("\nlet two =", None),
5274 (" ", Some(DiagnosticSeverity::ERROR)),
5275 ("\nlet three = 3;\n", None)
5276 ]
5277 );
5278 });
5279 }
5280
5281 #[gpui::test]
5282 async fn test_edits_from_lsp_with_past_version(cx: &mut gpui::TestAppContext) {
5283 cx.foreground().forbid_parking();
5284
5285 let (lsp_config, mut fake_servers) = LanguageServerConfig::fake();
5286 let language = Arc::new(Language::new(
5287 LanguageConfig {
5288 name: "Rust".into(),
5289 path_suffixes: vec!["rs".to_string()],
5290 language_server: Some(lsp_config),
5291 ..Default::default()
5292 },
5293 Some(tree_sitter_rust::language()),
5294 ));
5295
5296 let text = "
5297 fn a() {
5298 f1();
5299 }
5300 fn b() {
5301 f2();
5302 }
5303 fn c() {
5304 f3();
5305 }
5306 "
5307 .unindent();
5308
5309 let fs = FakeFs::new(cx.background());
5310 fs.insert_tree(
5311 "/dir",
5312 json!({
5313 "a.rs": text.clone(),
5314 }),
5315 )
5316 .await;
5317
5318 let project = Project::test(fs, cx);
5319 project.update(cx, |project, _| project.languages.add(language));
5320
5321 let worktree_id = project
5322 .update(cx, |project, cx| {
5323 project.find_or_create_local_worktree("/dir", true, cx)
5324 })
5325 .await
5326 .unwrap()
5327 .0
5328 .read_with(cx, |tree, _| tree.id());
5329
5330 let buffer = project
5331 .update(cx, |project, cx| {
5332 project.open_buffer((worktree_id, "a.rs"), cx)
5333 })
5334 .await
5335 .unwrap();
5336
5337 let mut fake_server = fake_servers.next().await.unwrap();
5338 let lsp_document_version = fake_server
5339 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5340 .await
5341 .text_document
5342 .version;
5343
5344 // Simulate editing the buffer after the language server computes some edits.
5345 buffer.update(cx, |buffer, cx| {
5346 buffer.edit(
5347 [Point::new(0, 0)..Point::new(0, 0)],
5348 "// above first function\n",
5349 cx,
5350 );
5351 buffer.edit(
5352 [Point::new(2, 0)..Point::new(2, 0)],
5353 " // inside first function\n",
5354 cx,
5355 );
5356 buffer.edit(
5357 [Point::new(6, 4)..Point::new(6, 4)],
5358 "// inside second function ",
5359 cx,
5360 );
5361
5362 assert_eq!(
5363 buffer.text(),
5364 "
5365 // above first function
5366 fn a() {
5367 // inside first function
5368 f1();
5369 }
5370 fn b() {
5371 // inside second function f2();
5372 }
5373 fn c() {
5374 f3();
5375 }
5376 "
5377 .unindent()
5378 );
5379 });
5380
5381 let edits = project
5382 .update(cx, |project, cx| {
5383 project.edits_from_lsp(
5384 &buffer,
5385 vec![
5386 // replace body of first function
5387 lsp::TextEdit {
5388 range: lsp::Range::new(
5389 lsp::Position::new(0, 0),
5390 lsp::Position::new(3, 0),
5391 ),
5392 new_text: "
5393 fn a() {
5394 f10();
5395 }
5396 "
5397 .unindent(),
5398 },
5399 // edit inside second function
5400 lsp::TextEdit {
5401 range: lsp::Range::new(
5402 lsp::Position::new(4, 6),
5403 lsp::Position::new(4, 6),
5404 ),
5405 new_text: "00".into(),
5406 },
5407 // edit inside third function via two distinct edits
5408 lsp::TextEdit {
5409 range: lsp::Range::new(
5410 lsp::Position::new(7, 5),
5411 lsp::Position::new(7, 5),
5412 ),
5413 new_text: "4000".into(),
5414 },
5415 lsp::TextEdit {
5416 range: lsp::Range::new(
5417 lsp::Position::new(7, 5),
5418 lsp::Position::new(7, 6),
5419 ),
5420 new_text: "".into(),
5421 },
5422 ],
5423 Some(lsp_document_version),
5424 cx,
5425 )
5426 })
5427 .await
5428 .unwrap();
5429
5430 buffer.update(cx, |buffer, cx| {
5431 for (range, new_text) in edits {
5432 buffer.edit([range], new_text, cx);
5433 }
5434 assert_eq!(
5435 buffer.text(),
5436 "
5437 // above first function
5438 fn a() {
5439 // inside first function
5440 f10();
5441 }
5442 fn b() {
5443 // inside second function f200();
5444 }
5445 fn c() {
5446 f4000();
5447 }
5448 "
5449 .unindent()
5450 );
5451 });
5452 }
5453
5454 #[gpui::test]
5455 async fn test_edits_from_lsp_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
5456 cx.foreground().forbid_parking();
5457
5458 let text = "
5459 use a::b;
5460 use a::c;
5461
5462 fn f() {
5463 b();
5464 c();
5465 }
5466 "
5467 .unindent();
5468
5469 let fs = FakeFs::new(cx.background());
5470 fs.insert_tree(
5471 "/dir",
5472 json!({
5473 "a.rs": text.clone(),
5474 }),
5475 )
5476 .await;
5477
5478 let project = Project::test(fs, cx);
5479 let worktree_id = project
5480 .update(cx, |project, cx| {
5481 project.find_or_create_local_worktree("/dir", true, cx)
5482 })
5483 .await
5484 .unwrap()
5485 .0
5486 .read_with(cx, |tree, _| tree.id());
5487
5488 let buffer = project
5489 .update(cx, |project, cx| {
5490 project.open_buffer((worktree_id, "a.rs"), cx)
5491 })
5492 .await
5493 .unwrap();
5494
5495 // Simulate the language server sending us a small edit in the form of a very large diff.
5496 // Rust-analyzer does this when performing a merge-imports code action.
5497 let edits = project
5498 .update(cx, |project, cx| {
5499 project.edits_from_lsp(
5500 &buffer,
5501 [
5502 // Replace the first use statement without editing the semicolon.
5503 lsp::TextEdit {
5504 range: lsp::Range::new(
5505 lsp::Position::new(0, 4),
5506 lsp::Position::new(0, 8),
5507 ),
5508 new_text: "a::{b, c}".into(),
5509 },
5510 // Reinsert the remainder of the file between the semicolon and the final
5511 // newline of the file.
5512 lsp::TextEdit {
5513 range: lsp::Range::new(
5514 lsp::Position::new(0, 9),
5515 lsp::Position::new(0, 9),
5516 ),
5517 new_text: "\n\n".into(),
5518 },
5519 lsp::TextEdit {
5520 range: lsp::Range::new(
5521 lsp::Position::new(0, 9),
5522 lsp::Position::new(0, 9),
5523 ),
5524 new_text: "
5525 fn f() {
5526 b();
5527 c();
5528 }"
5529 .unindent(),
5530 },
5531 // Delete everything after the first newline of the file.
5532 lsp::TextEdit {
5533 range: lsp::Range::new(
5534 lsp::Position::new(1, 0),
5535 lsp::Position::new(7, 0),
5536 ),
5537 new_text: "".into(),
5538 },
5539 ],
5540 None,
5541 cx,
5542 )
5543 })
5544 .await
5545 .unwrap();
5546
5547 buffer.update(cx, |buffer, cx| {
5548 let edits = edits
5549 .into_iter()
5550 .map(|(range, text)| {
5551 (
5552 range.start.to_point(&buffer)..range.end.to_point(&buffer),
5553 text,
5554 )
5555 })
5556 .collect::<Vec<_>>();
5557
5558 assert_eq!(
5559 edits,
5560 [
5561 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
5562 (Point::new(1, 0)..Point::new(2, 0), "".into())
5563 ]
5564 );
5565
5566 for (range, new_text) in edits {
5567 buffer.edit([range], new_text, cx);
5568 }
5569 assert_eq!(
5570 buffer.text(),
5571 "
5572 use a::{b, c};
5573
5574 fn f() {
5575 b();
5576 c();
5577 }
5578 "
5579 .unindent()
5580 );
5581 });
5582 }
5583
5584 fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
5585 buffer: &Buffer,
5586 range: Range<T>,
5587 ) -> Vec<(String, Option<DiagnosticSeverity>)> {
5588 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
5589 for chunk in buffer.snapshot().chunks(range, true) {
5590 if chunks.last().map_or(false, |prev_chunk| {
5591 prev_chunk.1 == chunk.diagnostic_severity
5592 }) {
5593 chunks.last_mut().unwrap().0.push_str(chunk.text);
5594 } else {
5595 chunks.push((chunk.text.to_string(), chunk.diagnostic_severity));
5596 }
5597 }
5598 chunks
5599 }
5600
5601 #[gpui::test]
5602 async fn test_search_worktree_without_files(cx: &mut gpui::TestAppContext) {
5603 let dir = temp_tree(json!({
5604 "root": {
5605 "dir1": {},
5606 "dir2": {
5607 "dir3": {}
5608 }
5609 }
5610 }));
5611
5612 let project = Project::test(Arc::new(RealFs), cx);
5613 let (tree, _) = project
5614 .update(cx, |project, cx| {
5615 project.find_or_create_local_worktree(&dir.path(), true, cx)
5616 })
5617 .await
5618 .unwrap();
5619
5620 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
5621 .await;
5622
5623 let cancel_flag = Default::default();
5624 let results = project
5625 .read_with(cx, |project, cx| {
5626 project.match_paths("dir", false, false, 10, &cancel_flag, cx)
5627 })
5628 .await;
5629
5630 assert!(results.is_empty());
5631 }
5632
5633 #[gpui::test]
5634 async fn test_definition(cx: &mut gpui::TestAppContext) {
5635 let (language_server_config, mut fake_servers) = LanguageServerConfig::fake();
5636 let language = Arc::new(Language::new(
5637 LanguageConfig {
5638 name: "Rust".into(),
5639 path_suffixes: vec!["rs".to_string()],
5640 language_server: Some(language_server_config),
5641 ..Default::default()
5642 },
5643 Some(tree_sitter_rust::language()),
5644 ));
5645
5646 let fs = FakeFs::new(cx.background());
5647 fs.insert_tree(
5648 "/dir",
5649 json!({
5650 "a.rs": "const fn a() { A }",
5651 "b.rs": "const y: i32 = crate::a()",
5652 }),
5653 )
5654 .await;
5655
5656 let project = Project::test(fs, cx);
5657 project.update(cx, |project, _| {
5658 Arc::get_mut(&mut project.languages).unwrap().add(language);
5659 });
5660
5661 let (tree, _) = project
5662 .update(cx, |project, cx| {
5663 project.find_or_create_local_worktree("/dir/b.rs", true, cx)
5664 })
5665 .await
5666 .unwrap();
5667 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
5668 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
5669 .await;
5670
5671 let buffer = project
5672 .update(cx, |project, cx| {
5673 project.open_buffer(
5674 ProjectPath {
5675 worktree_id,
5676 path: Path::new("").into(),
5677 },
5678 cx,
5679 )
5680 })
5681 .await
5682 .unwrap();
5683
5684 let mut fake_server = fake_servers.next().await.unwrap();
5685 fake_server.handle_request::<lsp::request::GotoDefinition, _>(move |params, _| {
5686 let params = params.text_document_position_params;
5687 assert_eq!(
5688 params.text_document.uri.to_file_path().unwrap(),
5689 Path::new("/dir/b.rs"),
5690 );
5691 assert_eq!(params.position, lsp::Position::new(0, 22));
5692
5693 Some(lsp::GotoDefinitionResponse::Scalar(lsp::Location::new(
5694 lsp::Url::from_file_path("/dir/a.rs").unwrap(),
5695 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
5696 )))
5697 });
5698
5699 let mut definitions = project
5700 .update(cx, |project, cx| project.definition(&buffer, 22, cx))
5701 .await
5702 .unwrap();
5703
5704 assert_eq!(definitions.len(), 1);
5705 let definition = definitions.pop().unwrap();
5706 cx.update(|cx| {
5707 let target_buffer = definition.buffer.read(cx);
5708 assert_eq!(
5709 target_buffer
5710 .file()
5711 .unwrap()
5712 .as_local()
5713 .unwrap()
5714 .abs_path(cx),
5715 Path::new("/dir/a.rs"),
5716 );
5717 assert_eq!(definition.range.to_offset(target_buffer), 9..10);
5718 assert_eq!(
5719 list_worktrees(&project, cx),
5720 [("/dir/b.rs".as_ref(), true), ("/dir/a.rs".as_ref(), false)]
5721 );
5722
5723 drop(definition);
5724 });
5725 cx.read(|cx| {
5726 assert_eq!(list_worktrees(&project, cx), [("/dir/b.rs".as_ref(), true)]);
5727 });
5728
5729 fn list_worktrees<'a>(
5730 project: &'a ModelHandle<Project>,
5731 cx: &'a AppContext,
5732 ) -> Vec<(&'a Path, bool)> {
5733 project
5734 .read(cx)
5735 .worktrees(cx)
5736 .map(|worktree| {
5737 let worktree = worktree.read(cx);
5738 (
5739 worktree.as_local().unwrap().abs_path().as_ref(),
5740 worktree.is_visible(),
5741 )
5742 })
5743 .collect::<Vec<_>>()
5744 }
5745 }
5746
5747 #[gpui::test]
5748 async fn test_save_file(cx: &mut gpui::TestAppContext) {
5749 let fs = FakeFs::new(cx.background());
5750 fs.insert_tree(
5751 "/dir",
5752 json!({
5753 "file1": "the old contents",
5754 }),
5755 )
5756 .await;
5757
5758 let project = Project::test(fs.clone(), cx);
5759 let worktree_id = project
5760 .update(cx, |p, cx| {
5761 p.find_or_create_local_worktree("/dir", true, cx)
5762 })
5763 .await
5764 .unwrap()
5765 .0
5766 .read_with(cx, |tree, _| tree.id());
5767
5768 let buffer = project
5769 .update(cx, |p, cx| p.open_buffer((worktree_id, "file1"), cx))
5770 .await
5771 .unwrap();
5772 buffer
5773 .update(cx, |buffer, cx| {
5774 assert_eq!(buffer.text(), "the old contents");
5775 buffer.edit(Some(0..0), "a line of text.\n".repeat(10 * 1024), cx);
5776 buffer.save(cx)
5777 })
5778 .await
5779 .unwrap();
5780
5781 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
5782 assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
5783 }
5784
5785 #[gpui::test]
5786 async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
5787 let fs = FakeFs::new(cx.background());
5788 fs.insert_tree(
5789 "/dir",
5790 json!({
5791 "file1": "the old contents",
5792 }),
5793 )
5794 .await;
5795
5796 let project = Project::test(fs.clone(), cx);
5797 let worktree_id = project
5798 .update(cx, |p, cx| {
5799 p.find_or_create_local_worktree("/dir/file1", true, cx)
5800 })
5801 .await
5802 .unwrap()
5803 .0
5804 .read_with(cx, |tree, _| tree.id());
5805
5806 let buffer = project
5807 .update(cx, |p, cx| p.open_buffer((worktree_id, ""), cx))
5808 .await
5809 .unwrap();
5810 buffer
5811 .update(cx, |buffer, cx| {
5812 buffer.edit(Some(0..0), "a line of text.\n".repeat(10 * 1024), cx);
5813 buffer.save(cx)
5814 })
5815 .await
5816 .unwrap();
5817
5818 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
5819 assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
5820 }
5821
5822 #[gpui::test]
5823 async fn test_save_as(cx: &mut gpui::TestAppContext) {
5824 let fs = FakeFs::new(cx.background());
5825 fs.insert_tree("/dir", json!({})).await;
5826
5827 let project = Project::test(fs.clone(), cx);
5828 let (worktree, _) = project
5829 .update(cx, |project, cx| {
5830 project.find_or_create_local_worktree("/dir", true, cx)
5831 })
5832 .await
5833 .unwrap();
5834 let worktree_id = worktree.read_with(cx, |worktree, _| worktree.id());
5835
5836 let buffer = project.update(cx, |project, cx| project.create_buffer(cx).unwrap());
5837 buffer.update(cx, |buffer, cx| {
5838 buffer.edit([0..0], "abc", cx);
5839 assert!(buffer.is_dirty());
5840 assert!(!buffer.has_conflict());
5841 });
5842 project
5843 .update(cx, |project, cx| {
5844 project.save_buffer_as(buffer.clone(), "/dir/file1".into(), cx)
5845 })
5846 .await
5847 .unwrap();
5848 assert_eq!(fs.load(Path::new("/dir/file1")).await.unwrap(), "abc");
5849 buffer.read_with(cx, |buffer, cx| {
5850 assert_eq!(buffer.file().unwrap().full_path(cx), Path::new("dir/file1"));
5851 assert!(!buffer.is_dirty());
5852 assert!(!buffer.has_conflict());
5853 });
5854
5855 let opened_buffer = project
5856 .update(cx, |project, cx| {
5857 project.open_buffer((worktree_id, "file1"), cx)
5858 })
5859 .await
5860 .unwrap();
5861 assert_eq!(opened_buffer, buffer);
5862 }
5863
5864 #[gpui::test(retries = 5)]
5865 async fn test_rescan_and_remote_updates(cx: &mut gpui::TestAppContext) {
5866 let dir = temp_tree(json!({
5867 "a": {
5868 "file1": "",
5869 "file2": "",
5870 "file3": "",
5871 },
5872 "b": {
5873 "c": {
5874 "file4": "",
5875 "file5": "",
5876 }
5877 }
5878 }));
5879
5880 let project = Project::test(Arc::new(RealFs), cx);
5881 let rpc = project.read_with(cx, |p, _| p.client.clone());
5882
5883 let (tree, _) = project
5884 .update(cx, |p, cx| {
5885 p.find_or_create_local_worktree(dir.path(), true, cx)
5886 })
5887 .await
5888 .unwrap();
5889 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
5890
5891 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
5892 let buffer = project.update(cx, |p, cx| p.open_buffer((worktree_id, path), cx));
5893 async move { buffer.await.unwrap() }
5894 };
5895 let id_for_path = |path: &'static str, cx: &gpui::TestAppContext| {
5896 tree.read_with(cx, |tree, _| {
5897 tree.entry_for_path(path)
5898 .expect(&format!("no entry for path {}", path))
5899 .id
5900 })
5901 };
5902
5903 let buffer2 = buffer_for_path("a/file2", cx).await;
5904 let buffer3 = buffer_for_path("a/file3", cx).await;
5905 let buffer4 = buffer_for_path("b/c/file4", cx).await;
5906 let buffer5 = buffer_for_path("b/c/file5", cx).await;
5907
5908 let file2_id = id_for_path("a/file2", &cx);
5909 let file3_id = id_for_path("a/file3", &cx);
5910 let file4_id = id_for_path("b/c/file4", &cx);
5911
5912 // Wait for the initial scan.
5913 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
5914 .await;
5915
5916 // Create a remote copy of this worktree.
5917 let initial_snapshot = tree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot());
5918 let (remote, load_task) = cx.update(|cx| {
5919 Worktree::remote(
5920 1,
5921 1,
5922 initial_snapshot.to_proto(&Default::default(), true),
5923 rpc.clone(),
5924 cx,
5925 )
5926 });
5927 load_task.await;
5928
5929 cx.read(|cx| {
5930 assert!(!buffer2.read(cx).is_dirty());
5931 assert!(!buffer3.read(cx).is_dirty());
5932 assert!(!buffer4.read(cx).is_dirty());
5933 assert!(!buffer5.read(cx).is_dirty());
5934 });
5935
5936 // Rename and delete files and directories.
5937 tree.flush_fs_events(&cx).await;
5938 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
5939 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
5940 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
5941 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
5942 tree.flush_fs_events(&cx).await;
5943
5944 let expected_paths = vec![
5945 "a",
5946 "a/file1",
5947 "a/file2.new",
5948 "b",
5949 "d",
5950 "d/file3",
5951 "d/file4",
5952 ];
5953
5954 cx.read(|app| {
5955 assert_eq!(
5956 tree.read(app)
5957 .paths()
5958 .map(|p| p.to_str().unwrap())
5959 .collect::<Vec<_>>(),
5960 expected_paths
5961 );
5962
5963 assert_eq!(id_for_path("a/file2.new", &cx), file2_id);
5964 assert_eq!(id_for_path("d/file3", &cx), file3_id);
5965 assert_eq!(id_for_path("d/file4", &cx), file4_id);
5966
5967 assert_eq!(
5968 buffer2.read(app).file().unwrap().path().as_ref(),
5969 Path::new("a/file2.new")
5970 );
5971 assert_eq!(
5972 buffer3.read(app).file().unwrap().path().as_ref(),
5973 Path::new("d/file3")
5974 );
5975 assert_eq!(
5976 buffer4.read(app).file().unwrap().path().as_ref(),
5977 Path::new("d/file4")
5978 );
5979 assert_eq!(
5980 buffer5.read(app).file().unwrap().path().as_ref(),
5981 Path::new("b/c/file5")
5982 );
5983
5984 assert!(!buffer2.read(app).file().unwrap().is_deleted());
5985 assert!(!buffer3.read(app).file().unwrap().is_deleted());
5986 assert!(!buffer4.read(app).file().unwrap().is_deleted());
5987 assert!(buffer5.read(app).file().unwrap().is_deleted());
5988 });
5989
5990 // Update the remote worktree. Check that it becomes consistent with the
5991 // local worktree.
5992 remote.update(cx, |remote, cx| {
5993 let update_message = tree.read(cx).as_local().unwrap().snapshot().build_update(
5994 &initial_snapshot,
5995 1,
5996 1,
5997 true,
5998 );
5999 remote
6000 .as_remote_mut()
6001 .unwrap()
6002 .snapshot
6003 .apply_remote_update(update_message)
6004 .unwrap();
6005
6006 assert_eq!(
6007 remote
6008 .paths()
6009 .map(|p| p.to_str().unwrap())
6010 .collect::<Vec<_>>(),
6011 expected_paths
6012 );
6013 });
6014 }
6015
6016 #[gpui::test]
6017 async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
6018 let fs = FakeFs::new(cx.background());
6019 fs.insert_tree(
6020 "/the-dir",
6021 json!({
6022 "a.txt": "a-contents",
6023 "b.txt": "b-contents",
6024 }),
6025 )
6026 .await;
6027
6028 let project = Project::test(fs.clone(), cx);
6029 let worktree_id = project
6030 .update(cx, |p, cx| {
6031 p.find_or_create_local_worktree("/the-dir", true, cx)
6032 })
6033 .await
6034 .unwrap()
6035 .0
6036 .read_with(cx, |tree, _| tree.id());
6037
6038 // Spawn multiple tasks to open paths, repeating some paths.
6039 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
6040 (
6041 p.open_buffer((worktree_id, "a.txt"), cx),
6042 p.open_buffer((worktree_id, "b.txt"), cx),
6043 p.open_buffer((worktree_id, "a.txt"), cx),
6044 )
6045 });
6046
6047 let buffer_a_1 = buffer_a_1.await.unwrap();
6048 let buffer_a_2 = buffer_a_2.await.unwrap();
6049 let buffer_b = buffer_b.await.unwrap();
6050 assert_eq!(buffer_a_1.read_with(cx, |b, _| b.text()), "a-contents");
6051 assert_eq!(buffer_b.read_with(cx, |b, _| b.text()), "b-contents");
6052
6053 // There is only one buffer per path.
6054 let buffer_a_id = buffer_a_1.id();
6055 assert_eq!(buffer_a_2.id(), buffer_a_id);
6056
6057 // Open the same path again while it is still open.
6058 drop(buffer_a_1);
6059 let buffer_a_3 = project
6060 .update(cx, |p, cx| p.open_buffer((worktree_id, "a.txt"), cx))
6061 .await
6062 .unwrap();
6063
6064 // There's still only one buffer per path.
6065 assert_eq!(buffer_a_3.id(), buffer_a_id);
6066 }
6067
6068 #[gpui::test]
6069 async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
6070 use std::fs;
6071
6072 let dir = temp_tree(json!({
6073 "file1": "abc",
6074 "file2": "def",
6075 "file3": "ghi",
6076 }));
6077
6078 let project = Project::test(Arc::new(RealFs), cx);
6079 let (worktree, _) = project
6080 .update(cx, |p, cx| {
6081 p.find_or_create_local_worktree(dir.path(), true, cx)
6082 })
6083 .await
6084 .unwrap();
6085 let worktree_id = worktree.read_with(cx, |worktree, _| worktree.id());
6086
6087 worktree.flush_fs_events(&cx).await;
6088 worktree
6089 .read_with(cx, |t, _| t.as_local().unwrap().scan_complete())
6090 .await;
6091
6092 let buffer1 = project
6093 .update(cx, |p, cx| p.open_buffer((worktree_id, "file1"), cx))
6094 .await
6095 .unwrap();
6096 let events = Rc::new(RefCell::new(Vec::new()));
6097
6098 // initially, the buffer isn't dirty.
6099 buffer1.update(cx, |buffer, cx| {
6100 cx.subscribe(&buffer1, {
6101 let events = events.clone();
6102 move |_, _, event, _| match event {
6103 BufferEvent::Operation(_) => {}
6104 _ => events.borrow_mut().push(event.clone()),
6105 }
6106 })
6107 .detach();
6108
6109 assert!(!buffer.is_dirty());
6110 assert!(events.borrow().is_empty());
6111
6112 buffer.edit(vec![1..2], "", cx);
6113 });
6114
6115 // after the first edit, the buffer is dirty, and emits a dirtied event.
6116 buffer1.update(cx, |buffer, cx| {
6117 assert!(buffer.text() == "ac");
6118 assert!(buffer.is_dirty());
6119 assert_eq!(
6120 *events.borrow(),
6121 &[language::Event::Edited, language::Event::Dirtied]
6122 );
6123 events.borrow_mut().clear();
6124 buffer.did_save(buffer.version(), buffer.file().unwrap().mtime(), None, cx);
6125 });
6126
6127 // after saving, the buffer is not dirty, and emits a saved event.
6128 buffer1.update(cx, |buffer, cx| {
6129 assert!(!buffer.is_dirty());
6130 assert_eq!(*events.borrow(), &[language::Event::Saved]);
6131 events.borrow_mut().clear();
6132
6133 buffer.edit(vec![1..1], "B", cx);
6134 buffer.edit(vec![2..2], "D", cx);
6135 });
6136
6137 // after editing again, the buffer is dirty, and emits another dirty event.
6138 buffer1.update(cx, |buffer, cx| {
6139 assert!(buffer.text() == "aBDc");
6140 assert!(buffer.is_dirty());
6141 assert_eq!(
6142 *events.borrow(),
6143 &[
6144 language::Event::Edited,
6145 language::Event::Dirtied,
6146 language::Event::Edited,
6147 ],
6148 );
6149 events.borrow_mut().clear();
6150
6151 // TODO - currently, after restoring the buffer to its
6152 // previously-saved state, the is still considered dirty.
6153 buffer.edit([1..3], "", cx);
6154 assert!(buffer.text() == "ac");
6155 assert!(buffer.is_dirty());
6156 });
6157
6158 assert_eq!(*events.borrow(), &[language::Event::Edited]);
6159
6160 // When a file is deleted, the buffer is considered dirty.
6161 let events = Rc::new(RefCell::new(Vec::new()));
6162 let buffer2 = project
6163 .update(cx, |p, cx| p.open_buffer((worktree_id, "file2"), cx))
6164 .await
6165 .unwrap();
6166 buffer2.update(cx, |_, cx| {
6167 cx.subscribe(&buffer2, {
6168 let events = events.clone();
6169 move |_, _, event, _| events.borrow_mut().push(event.clone())
6170 })
6171 .detach();
6172 });
6173
6174 fs::remove_file(dir.path().join("file2")).unwrap();
6175 buffer2.condition(&cx, |b, _| b.is_dirty()).await;
6176 assert_eq!(
6177 *events.borrow(),
6178 &[language::Event::Dirtied, language::Event::FileHandleChanged]
6179 );
6180
6181 // When a file is already dirty when deleted, we don't emit a Dirtied event.
6182 let events = Rc::new(RefCell::new(Vec::new()));
6183 let buffer3 = project
6184 .update(cx, |p, cx| p.open_buffer((worktree_id, "file3"), cx))
6185 .await
6186 .unwrap();
6187 buffer3.update(cx, |_, cx| {
6188 cx.subscribe(&buffer3, {
6189 let events = events.clone();
6190 move |_, _, event, _| events.borrow_mut().push(event.clone())
6191 })
6192 .detach();
6193 });
6194
6195 worktree.flush_fs_events(&cx).await;
6196 buffer3.update(cx, |buffer, cx| {
6197 buffer.edit(Some(0..0), "x", cx);
6198 });
6199 events.borrow_mut().clear();
6200 fs::remove_file(dir.path().join("file3")).unwrap();
6201 buffer3
6202 .condition(&cx, |_, _| !events.borrow().is_empty())
6203 .await;
6204 assert_eq!(*events.borrow(), &[language::Event::FileHandleChanged]);
6205 cx.read(|cx| assert!(buffer3.read(cx).is_dirty()));
6206 }
6207
6208 #[gpui::test]
6209 async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
6210 use std::fs;
6211
6212 let initial_contents = "aaa\nbbbbb\nc\n";
6213 let dir = temp_tree(json!({ "the-file": initial_contents }));
6214
6215 let project = Project::test(Arc::new(RealFs), cx);
6216 let (worktree, _) = project
6217 .update(cx, |p, cx| {
6218 p.find_or_create_local_worktree(dir.path(), true, cx)
6219 })
6220 .await
6221 .unwrap();
6222 let worktree_id = worktree.read_with(cx, |tree, _| tree.id());
6223
6224 worktree
6225 .read_with(cx, |t, _| t.as_local().unwrap().scan_complete())
6226 .await;
6227
6228 let abs_path = dir.path().join("the-file");
6229 let buffer = project
6230 .update(cx, |p, cx| p.open_buffer((worktree_id, "the-file"), cx))
6231 .await
6232 .unwrap();
6233
6234 // TODO
6235 // Add a cursor on each row.
6236 // let selection_set_id = buffer.update(&mut cx, |buffer, cx| {
6237 // assert!(!buffer.is_dirty());
6238 // buffer.add_selection_set(
6239 // &(0..3)
6240 // .map(|row| Selection {
6241 // id: row as usize,
6242 // start: Point::new(row, 1),
6243 // end: Point::new(row, 1),
6244 // reversed: false,
6245 // goal: SelectionGoal::None,
6246 // })
6247 // .collect::<Vec<_>>(),
6248 // cx,
6249 // )
6250 // });
6251
6252 // Change the file on disk, adding two new lines of text, and removing
6253 // one line.
6254 buffer.read_with(cx, |buffer, _| {
6255 assert!(!buffer.is_dirty());
6256 assert!(!buffer.has_conflict());
6257 });
6258 let new_contents = "AAAA\naaa\nBB\nbbbbb\n";
6259 fs::write(&abs_path, new_contents).unwrap();
6260
6261 // Because the buffer was not modified, it is reloaded from disk. Its
6262 // contents are edited according to the diff between the old and new
6263 // file contents.
6264 buffer
6265 .condition(&cx, |buffer, _| buffer.text() == new_contents)
6266 .await;
6267
6268 buffer.update(cx, |buffer, _| {
6269 assert_eq!(buffer.text(), new_contents);
6270 assert!(!buffer.is_dirty());
6271 assert!(!buffer.has_conflict());
6272
6273 // TODO
6274 // let cursor_positions = buffer
6275 // .selection_set(selection_set_id)
6276 // .unwrap()
6277 // .selections::<Point>(&*buffer)
6278 // .map(|selection| {
6279 // assert_eq!(selection.start, selection.end);
6280 // selection.start
6281 // })
6282 // .collect::<Vec<_>>();
6283 // assert_eq!(
6284 // cursor_positions,
6285 // [Point::new(1, 1), Point::new(3, 1), Point::new(4, 0)]
6286 // );
6287 });
6288
6289 // Modify the buffer
6290 buffer.update(cx, |buffer, cx| {
6291 buffer.edit(vec![0..0], " ", cx);
6292 assert!(buffer.is_dirty());
6293 assert!(!buffer.has_conflict());
6294 });
6295
6296 // Change the file on disk again, adding blank lines to the beginning.
6297 fs::write(&abs_path, "\n\n\nAAAA\naaa\nBB\nbbbbb\n").unwrap();
6298
6299 // Because the buffer is modified, it doesn't reload from disk, but is
6300 // marked as having a conflict.
6301 buffer
6302 .condition(&cx, |buffer, _| buffer.has_conflict())
6303 .await;
6304 }
6305
6306 #[gpui::test]
6307 async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
6308 cx.foreground().forbid_parking();
6309
6310 let fs = FakeFs::new(cx.background());
6311 fs.insert_tree(
6312 "/the-dir",
6313 json!({
6314 "a.rs": "
6315 fn foo(mut v: Vec<usize>) {
6316 for x in &v {
6317 v.push(1);
6318 }
6319 }
6320 "
6321 .unindent(),
6322 }),
6323 )
6324 .await;
6325
6326 let project = Project::test(fs.clone(), cx);
6327 let (worktree, _) = project
6328 .update(cx, |p, cx| {
6329 p.find_or_create_local_worktree("/the-dir", true, cx)
6330 })
6331 .await
6332 .unwrap();
6333 let worktree_id = worktree.read_with(cx, |tree, _| tree.id());
6334
6335 let buffer = project
6336 .update(cx, |p, cx| p.open_buffer((worktree_id, "a.rs"), cx))
6337 .await
6338 .unwrap();
6339
6340 let buffer_uri = Url::from_file_path("/the-dir/a.rs").unwrap();
6341 let message = lsp::PublishDiagnosticsParams {
6342 uri: buffer_uri.clone(),
6343 diagnostics: vec![
6344 lsp::Diagnostic {
6345 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
6346 severity: Some(DiagnosticSeverity::WARNING),
6347 message: "error 1".to_string(),
6348 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
6349 location: lsp::Location {
6350 uri: buffer_uri.clone(),
6351 range: lsp::Range::new(
6352 lsp::Position::new(1, 8),
6353 lsp::Position::new(1, 9),
6354 ),
6355 },
6356 message: "error 1 hint 1".to_string(),
6357 }]),
6358 ..Default::default()
6359 },
6360 lsp::Diagnostic {
6361 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
6362 severity: Some(DiagnosticSeverity::HINT),
6363 message: "error 1 hint 1".to_string(),
6364 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
6365 location: lsp::Location {
6366 uri: buffer_uri.clone(),
6367 range: lsp::Range::new(
6368 lsp::Position::new(1, 8),
6369 lsp::Position::new(1, 9),
6370 ),
6371 },
6372 message: "original diagnostic".to_string(),
6373 }]),
6374 ..Default::default()
6375 },
6376 lsp::Diagnostic {
6377 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
6378 severity: Some(DiagnosticSeverity::ERROR),
6379 message: "error 2".to_string(),
6380 related_information: Some(vec![
6381 lsp::DiagnosticRelatedInformation {
6382 location: lsp::Location {
6383 uri: buffer_uri.clone(),
6384 range: lsp::Range::new(
6385 lsp::Position::new(1, 13),
6386 lsp::Position::new(1, 15),
6387 ),
6388 },
6389 message: "error 2 hint 1".to_string(),
6390 },
6391 lsp::DiagnosticRelatedInformation {
6392 location: lsp::Location {
6393 uri: buffer_uri.clone(),
6394 range: lsp::Range::new(
6395 lsp::Position::new(1, 13),
6396 lsp::Position::new(1, 15),
6397 ),
6398 },
6399 message: "error 2 hint 2".to_string(),
6400 },
6401 ]),
6402 ..Default::default()
6403 },
6404 lsp::Diagnostic {
6405 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
6406 severity: Some(DiagnosticSeverity::HINT),
6407 message: "error 2 hint 1".to_string(),
6408 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
6409 location: lsp::Location {
6410 uri: buffer_uri.clone(),
6411 range: lsp::Range::new(
6412 lsp::Position::new(2, 8),
6413 lsp::Position::new(2, 17),
6414 ),
6415 },
6416 message: "original diagnostic".to_string(),
6417 }]),
6418 ..Default::default()
6419 },
6420 lsp::Diagnostic {
6421 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
6422 severity: Some(DiagnosticSeverity::HINT),
6423 message: "error 2 hint 2".to_string(),
6424 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
6425 location: lsp::Location {
6426 uri: buffer_uri.clone(),
6427 range: lsp::Range::new(
6428 lsp::Position::new(2, 8),
6429 lsp::Position::new(2, 17),
6430 ),
6431 },
6432 message: "original diagnostic".to_string(),
6433 }]),
6434 ..Default::default()
6435 },
6436 ],
6437 version: None,
6438 };
6439
6440 project
6441 .update(cx, |p, cx| {
6442 p.update_diagnostics(message, &Default::default(), cx)
6443 })
6444 .unwrap();
6445 let buffer = buffer.read_with(cx, |buffer, _| buffer.snapshot());
6446
6447 assert_eq!(
6448 buffer
6449 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
6450 .collect::<Vec<_>>(),
6451 &[
6452 DiagnosticEntry {
6453 range: Point::new(1, 8)..Point::new(1, 9),
6454 diagnostic: Diagnostic {
6455 severity: DiagnosticSeverity::WARNING,
6456 message: "error 1".to_string(),
6457 group_id: 0,
6458 is_primary: true,
6459 ..Default::default()
6460 }
6461 },
6462 DiagnosticEntry {
6463 range: Point::new(1, 8)..Point::new(1, 9),
6464 diagnostic: Diagnostic {
6465 severity: DiagnosticSeverity::HINT,
6466 message: "error 1 hint 1".to_string(),
6467 group_id: 0,
6468 is_primary: false,
6469 ..Default::default()
6470 }
6471 },
6472 DiagnosticEntry {
6473 range: Point::new(1, 13)..Point::new(1, 15),
6474 diagnostic: Diagnostic {
6475 severity: DiagnosticSeverity::HINT,
6476 message: "error 2 hint 1".to_string(),
6477 group_id: 1,
6478 is_primary: false,
6479 ..Default::default()
6480 }
6481 },
6482 DiagnosticEntry {
6483 range: Point::new(1, 13)..Point::new(1, 15),
6484 diagnostic: Diagnostic {
6485 severity: DiagnosticSeverity::HINT,
6486 message: "error 2 hint 2".to_string(),
6487 group_id: 1,
6488 is_primary: false,
6489 ..Default::default()
6490 }
6491 },
6492 DiagnosticEntry {
6493 range: Point::new(2, 8)..Point::new(2, 17),
6494 diagnostic: Diagnostic {
6495 severity: DiagnosticSeverity::ERROR,
6496 message: "error 2".to_string(),
6497 group_id: 1,
6498 is_primary: true,
6499 ..Default::default()
6500 }
6501 }
6502 ]
6503 );
6504
6505 assert_eq!(
6506 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
6507 &[
6508 DiagnosticEntry {
6509 range: Point::new(1, 8)..Point::new(1, 9),
6510 diagnostic: Diagnostic {
6511 severity: DiagnosticSeverity::WARNING,
6512 message: "error 1".to_string(),
6513 group_id: 0,
6514 is_primary: true,
6515 ..Default::default()
6516 }
6517 },
6518 DiagnosticEntry {
6519 range: Point::new(1, 8)..Point::new(1, 9),
6520 diagnostic: Diagnostic {
6521 severity: DiagnosticSeverity::HINT,
6522 message: "error 1 hint 1".to_string(),
6523 group_id: 0,
6524 is_primary: false,
6525 ..Default::default()
6526 }
6527 },
6528 ]
6529 );
6530 assert_eq!(
6531 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
6532 &[
6533 DiagnosticEntry {
6534 range: Point::new(1, 13)..Point::new(1, 15),
6535 diagnostic: Diagnostic {
6536 severity: DiagnosticSeverity::HINT,
6537 message: "error 2 hint 1".to_string(),
6538 group_id: 1,
6539 is_primary: false,
6540 ..Default::default()
6541 }
6542 },
6543 DiagnosticEntry {
6544 range: Point::new(1, 13)..Point::new(1, 15),
6545 diagnostic: Diagnostic {
6546 severity: DiagnosticSeverity::HINT,
6547 message: "error 2 hint 2".to_string(),
6548 group_id: 1,
6549 is_primary: false,
6550 ..Default::default()
6551 }
6552 },
6553 DiagnosticEntry {
6554 range: Point::new(2, 8)..Point::new(2, 17),
6555 diagnostic: Diagnostic {
6556 severity: DiagnosticSeverity::ERROR,
6557 message: "error 2".to_string(),
6558 group_id: 1,
6559 is_primary: true,
6560 ..Default::default()
6561 }
6562 }
6563 ]
6564 );
6565 }
6566
6567 #[gpui::test]
6568 async fn test_rename(cx: &mut gpui::TestAppContext) {
6569 cx.foreground().forbid_parking();
6570
6571 let (language_server_config, mut fake_servers) = LanguageServerConfig::fake();
6572 let language = Arc::new(Language::new(
6573 LanguageConfig {
6574 name: "Rust".into(),
6575 path_suffixes: vec!["rs".to_string()],
6576 language_server: Some(language_server_config),
6577 ..Default::default()
6578 },
6579 Some(tree_sitter_rust::language()),
6580 ));
6581
6582 let fs = FakeFs::new(cx.background());
6583 fs.insert_tree(
6584 "/dir",
6585 json!({
6586 "one.rs": "const ONE: usize = 1;",
6587 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
6588 }),
6589 )
6590 .await;
6591
6592 let project = Project::test(fs.clone(), cx);
6593 project.update(cx, |project, _| {
6594 Arc::get_mut(&mut project.languages).unwrap().add(language);
6595 });
6596
6597 let (tree, _) = project
6598 .update(cx, |project, cx| {
6599 project.find_or_create_local_worktree("/dir", true, cx)
6600 })
6601 .await
6602 .unwrap();
6603 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
6604 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
6605 .await;
6606
6607 let buffer = project
6608 .update(cx, |project, cx| {
6609 project.open_buffer((worktree_id, Path::new("one.rs")), cx)
6610 })
6611 .await
6612 .unwrap();
6613
6614 let mut fake_server = fake_servers.next().await.unwrap();
6615
6616 let response = project.update(cx, |project, cx| {
6617 project.prepare_rename(buffer.clone(), 7, cx)
6618 });
6619 fake_server
6620 .handle_request::<lsp::request::PrepareRenameRequest, _>(|params, _| {
6621 assert_eq!(params.text_document.uri.as_str(), "file:///dir/one.rs");
6622 assert_eq!(params.position, lsp::Position::new(0, 7));
6623 Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
6624 lsp::Position::new(0, 6),
6625 lsp::Position::new(0, 9),
6626 )))
6627 })
6628 .next()
6629 .await
6630 .unwrap();
6631 let range = response.await.unwrap().unwrap();
6632 let range = buffer.read_with(cx, |buffer, _| range.to_offset(buffer));
6633 assert_eq!(range, 6..9);
6634
6635 let response = project.update(cx, |project, cx| {
6636 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), true, cx)
6637 });
6638 fake_server
6639 .handle_request::<lsp::request::Rename, _>(|params, _| {
6640 assert_eq!(
6641 params.text_document_position.text_document.uri.as_str(),
6642 "file:///dir/one.rs"
6643 );
6644 assert_eq!(
6645 params.text_document_position.position,
6646 lsp::Position::new(0, 7)
6647 );
6648 assert_eq!(params.new_name, "THREE");
6649 Some(lsp::WorkspaceEdit {
6650 changes: Some(
6651 [
6652 (
6653 lsp::Url::from_file_path("/dir/one.rs").unwrap(),
6654 vec![lsp::TextEdit::new(
6655 lsp::Range::new(
6656 lsp::Position::new(0, 6),
6657 lsp::Position::new(0, 9),
6658 ),
6659 "THREE".to_string(),
6660 )],
6661 ),
6662 (
6663 lsp::Url::from_file_path("/dir/two.rs").unwrap(),
6664 vec![
6665 lsp::TextEdit::new(
6666 lsp::Range::new(
6667 lsp::Position::new(0, 24),
6668 lsp::Position::new(0, 27),
6669 ),
6670 "THREE".to_string(),
6671 ),
6672 lsp::TextEdit::new(
6673 lsp::Range::new(
6674 lsp::Position::new(0, 35),
6675 lsp::Position::new(0, 38),
6676 ),
6677 "THREE".to_string(),
6678 ),
6679 ],
6680 ),
6681 ]
6682 .into_iter()
6683 .collect(),
6684 ),
6685 ..Default::default()
6686 })
6687 })
6688 .next()
6689 .await
6690 .unwrap();
6691 let mut transaction = response.await.unwrap().0;
6692 assert_eq!(transaction.len(), 2);
6693 assert_eq!(
6694 transaction
6695 .remove_entry(&buffer)
6696 .unwrap()
6697 .0
6698 .read_with(cx, |buffer, _| buffer.text()),
6699 "const THREE: usize = 1;"
6700 );
6701 assert_eq!(
6702 transaction
6703 .into_keys()
6704 .next()
6705 .unwrap()
6706 .read_with(cx, |buffer, _| buffer.text()),
6707 "const TWO: usize = one::THREE + one::THREE;"
6708 );
6709 }
6710
6711 #[gpui::test]
6712 async fn test_search(cx: &mut gpui::TestAppContext) {
6713 let fs = FakeFs::new(cx.background());
6714 fs.insert_tree(
6715 "/dir",
6716 json!({
6717 "one.rs": "const ONE: usize = 1;",
6718 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
6719 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
6720 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
6721 }),
6722 )
6723 .await;
6724 let project = Project::test(fs.clone(), cx);
6725 let (tree, _) = project
6726 .update(cx, |project, cx| {
6727 project.find_or_create_local_worktree("/dir", true, cx)
6728 })
6729 .await
6730 .unwrap();
6731 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
6732 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
6733 .await;
6734
6735 assert_eq!(
6736 search(&project, SearchQuery::text("TWO", false, true), cx)
6737 .await
6738 .unwrap(),
6739 HashMap::from_iter([
6740 ("two.rs".to_string(), vec![6..9]),
6741 ("three.rs".to_string(), vec![37..40])
6742 ])
6743 );
6744
6745 let buffer_4 = project
6746 .update(cx, |project, cx| {
6747 project.open_buffer((worktree_id, "four.rs"), cx)
6748 })
6749 .await
6750 .unwrap();
6751 buffer_4.update(cx, |buffer, cx| {
6752 buffer.edit([20..28, 31..43], "two::TWO", cx);
6753 });
6754
6755 assert_eq!(
6756 search(&project, SearchQuery::text("TWO", false, true), cx)
6757 .await
6758 .unwrap(),
6759 HashMap::from_iter([
6760 ("two.rs".to_string(), vec![6..9]),
6761 ("three.rs".to_string(), vec![37..40]),
6762 ("four.rs".to_string(), vec![25..28, 36..39])
6763 ])
6764 );
6765
6766 async fn search(
6767 project: &ModelHandle<Project>,
6768 query: SearchQuery,
6769 cx: &mut gpui::TestAppContext,
6770 ) -> Result<HashMap<String, Vec<Range<usize>>>> {
6771 let results = project
6772 .update(cx, |project, cx| project.search(query, cx))
6773 .await?;
6774
6775 Ok(results
6776 .into_iter()
6777 .map(|(buffer, ranges)| {
6778 buffer.read_with(cx, |buffer, _| {
6779 let path = buffer.file().unwrap().path().to_string_lossy().to_string();
6780 let ranges = ranges
6781 .into_iter()
6782 .map(|range| range.to_offset(buffer))
6783 .collect::<Vec<_>>();
6784 (path, ranges)
6785 })
6786 })
6787 .collect())
6788 }
6789 }
6790}