1pub mod fs;
2mod ignore;
3mod lsp_command;
4pub mod search;
5pub mod worktree;
6
7use anyhow::{anyhow, Context, Result};
8use client::{proto, Client, PeerId, TypedEnvelope, User, UserStore};
9use clock::ReplicaId;
10use collections::{hash_map, BTreeMap, HashMap, HashSet};
11use futures::{future::Shared, Future, FutureExt, StreamExt, TryFutureExt};
12use fuzzy::{PathMatch, PathMatchCandidate, PathMatchCandidateSet};
13use gpui::{
14 AppContext, AsyncAppContext, Entity, ModelContext, ModelHandle, MutableAppContext, Task,
15 UpgradeModelHandle, WeakModelHandle,
16};
17use language::{
18 proto::{deserialize_anchor, deserialize_version, serialize_anchor, serialize_version},
19 range_from_lsp, Anchor, Bias, Buffer, CodeAction, CodeLabel, Completion, Diagnostic,
20 DiagnosticEntry, DiagnosticSet, Event as BufferEvent, File as _, Language, LanguageRegistry,
21 LocalFile, OffsetRangeExt, Operation, PointUtf16, TextBufferSnapshot, ToLspPosition, ToOffset,
22 ToPointUtf16, Transaction,
23};
24use lsp::{DiagnosticSeverity, DiagnosticTag, DocumentHighlightKind, LanguageServer};
25use lsp_command::*;
26use parking_lot::Mutex;
27use postage::watch;
28use rand::prelude::*;
29use search::SearchQuery;
30use sha2::{Digest, Sha256};
31use similar::{ChangeTag, TextDiff};
32use std::{
33 cell::RefCell,
34 cmp::{self, Ordering},
35 convert::TryInto,
36 hash::Hash,
37 mem,
38 ops::Range,
39 path::{Component, Path, PathBuf},
40 rc::Rc,
41 sync::{
42 atomic::{AtomicBool, AtomicUsize, Ordering::SeqCst},
43 Arc,
44 },
45 time::Instant,
46};
47use util::{post_inc, ResultExt, TryFutureExt as _};
48
49pub use fs::*;
50pub use worktree::*;
51
52pub struct Project {
53 worktrees: Vec<WorktreeHandle>,
54 active_entry: Option<ProjectEntryId>,
55 languages: Arc<LanguageRegistry>,
56 language_servers: HashMap<(WorktreeId, Arc<str>), Arc<LanguageServer>>,
57 started_language_servers: HashMap<(WorktreeId, Arc<str>), Task<Option<Arc<LanguageServer>>>>,
58 language_server_statuses: BTreeMap<usize, LanguageServerStatus>,
59 language_server_settings: Arc<Mutex<serde_json::Value>>,
60 next_language_server_id: usize,
61 client: Arc<client::Client>,
62 next_entry_id: Arc<AtomicUsize>,
63 user_store: ModelHandle<UserStore>,
64 fs: Arc<dyn Fs>,
65 client_state: ProjectClientState,
66 collaborators: HashMap<PeerId, Collaborator>,
67 subscriptions: Vec<client::Subscription>,
68 language_servers_with_diagnostics_running: isize,
69 opened_buffer: (Rc<RefCell<watch::Sender<()>>>, watch::Receiver<()>),
70 shared_buffers: HashMap<PeerId, HashSet<u64>>,
71 loading_buffers: HashMap<
72 ProjectPath,
73 postage::watch::Receiver<Option<Result<ModelHandle<Buffer>, Arc<anyhow::Error>>>>,
74 >,
75 loading_local_worktrees:
76 HashMap<Arc<Path>, Shared<Task<Result<ModelHandle<Worktree>, Arc<anyhow::Error>>>>>,
77 opened_buffers: HashMap<u64, OpenBuffer>,
78 buffer_snapshots: HashMap<u64, Vec<(i32, TextBufferSnapshot)>>,
79 nonce: u128,
80}
81
82enum OpenBuffer {
83 Strong(ModelHandle<Buffer>),
84 Weak(WeakModelHandle<Buffer>),
85 Loading(Vec<Operation>),
86}
87
88enum WorktreeHandle {
89 Strong(ModelHandle<Worktree>),
90 Weak(WeakModelHandle<Worktree>),
91}
92
93enum ProjectClientState {
94 Local {
95 is_shared: bool,
96 remote_id_tx: watch::Sender<Option<u64>>,
97 remote_id_rx: watch::Receiver<Option<u64>>,
98 _maintain_remote_id_task: Task<Option<()>>,
99 },
100 Remote {
101 sharing_has_stopped: bool,
102 remote_id: u64,
103 replica_id: ReplicaId,
104 _detect_unshare_task: Task<Option<()>>,
105 },
106}
107
108#[derive(Clone, Debug)]
109pub struct Collaborator {
110 pub user: Arc<User>,
111 pub peer_id: PeerId,
112 pub replica_id: ReplicaId,
113}
114
115#[derive(Clone, Debug, PartialEq)]
116pub enum Event {
117 ActiveEntryChanged(Option<ProjectEntryId>),
118 WorktreeRemoved(WorktreeId),
119 DiskBasedDiagnosticsStarted,
120 DiskBasedDiagnosticsUpdated,
121 DiskBasedDiagnosticsFinished,
122 DiagnosticsUpdated(ProjectPath),
123}
124
125enum LanguageServerEvent {
126 WorkStart {
127 token: String,
128 },
129 WorkProgress {
130 token: String,
131 progress: LanguageServerProgress,
132 },
133 WorkEnd {
134 token: String,
135 },
136 DiagnosticsUpdate(lsp::PublishDiagnosticsParams),
137}
138
139pub struct LanguageServerStatus {
140 pub name: String,
141 pub pending_work: BTreeMap<String, LanguageServerProgress>,
142 pending_diagnostic_updates: isize,
143}
144
145#[derive(Clone, Debug)]
146pub struct LanguageServerProgress {
147 pub message: Option<String>,
148 pub percentage: Option<usize>,
149 pub last_update_at: Instant,
150}
151
152#[derive(Clone, Debug, Eq, PartialEq, Hash, PartialOrd, Ord)]
153pub struct ProjectPath {
154 pub worktree_id: WorktreeId,
155 pub path: Arc<Path>,
156}
157
158#[derive(Clone, Debug, Default, PartialEq)]
159pub struct DiagnosticSummary {
160 pub error_count: usize,
161 pub warning_count: usize,
162 pub info_count: usize,
163 pub hint_count: usize,
164}
165
166#[derive(Debug)]
167pub struct Location {
168 pub buffer: ModelHandle<Buffer>,
169 pub range: Range<language::Anchor>,
170}
171
172#[derive(Debug)]
173pub struct DocumentHighlight {
174 pub range: Range<language::Anchor>,
175 pub kind: DocumentHighlightKind,
176}
177
178#[derive(Clone, Debug)]
179pub struct Symbol {
180 pub source_worktree_id: WorktreeId,
181 pub worktree_id: WorktreeId,
182 pub language_name: String,
183 pub path: PathBuf,
184 pub label: CodeLabel,
185 pub name: String,
186 pub kind: lsp::SymbolKind,
187 pub range: Range<PointUtf16>,
188 pub signature: [u8; 32],
189}
190
191#[derive(Default)]
192pub struct ProjectTransaction(pub HashMap<ModelHandle<Buffer>, language::Transaction>);
193
194impl DiagnosticSummary {
195 fn new<'a, T: 'a>(diagnostics: impl IntoIterator<Item = &'a DiagnosticEntry<T>>) -> Self {
196 let mut this = Self {
197 error_count: 0,
198 warning_count: 0,
199 info_count: 0,
200 hint_count: 0,
201 };
202
203 for entry in diagnostics {
204 if entry.diagnostic.is_primary {
205 match entry.diagnostic.severity {
206 DiagnosticSeverity::ERROR => this.error_count += 1,
207 DiagnosticSeverity::WARNING => this.warning_count += 1,
208 DiagnosticSeverity::INFORMATION => this.info_count += 1,
209 DiagnosticSeverity::HINT => this.hint_count += 1,
210 _ => {}
211 }
212 }
213 }
214
215 this
216 }
217
218 pub fn to_proto(&self, path: &Path) -> proto::DiagnosticSummary {
219 proto::DiagnosticSummary {
220 path: path.to_string_lossy().to_string(),
221 error_count: self.error_count as u32,
222 warning_count: self.warning_count as u32,
223 info_count: self.info_count as u32,
224 hint_count: self.hint_count as u32,
225 }
226 }
227}
228
229#[derive(Clone, Copy, Debug, Default, Hash, PartialEq, Eq, PartialOrd, Ord)]
230pub struct ProjectEntryId(usize);
231
232impl ProjectEntryId {
233 pub fn new(counter: &AtomicUsize) -> Self {
234 Self(counter.fetch_add(1, SeqCst))
235 }
236
237 pub fn from_proto(id: u64) -> Self {
238 Self(id as usize)
239 }
240
241 pub fn to_proto(&self) -> u64 {
242 self.0 as u64
243 }
244
245 pub fn to_usize(&self) -> usize {
246 self.0
247 }
248}
249
250impl Project {
251 pub fn init(client: &Arc<Client>) {
252 client.add_entity_message_handler(Self::handle_add_collaborator);
253 client.add_entity_message_handler(Self::handle_buffer_reloaded);
254 client.add_entity_message_handler(Self::handle_buffer_saved);
255 client.add_entity_message_handler(Self::handle_start_language_server);
256 client.add_entity_message_handler(Self::handle_update_language_server);
257 client.add_entity_message_handler(Self::handle_remove_collaborator);
258 client.add_entity_message_handler(Self::handle_register_worktree);
259 client.add_entity_message_handler(Self::handle_unregister_worktree);
260 client.add_entity_message_handler(Self::handle_unshare_project);
261 client.add_entity_message_handler(Self::handle_update_buffer_file);
262 client.add_entity_message_handler(Self::handle_update_buffer);
263 client.add_entity_message_handler(Self::handle_update_diagnostic_summary);
264 client.add_entity_message_handler(Self::handle_update_worktree);
265 client.add_entity_request_handler(Self::handle_apply_additional_edits_for_completion);
266 client.add_entity_request_handler(Self::handle_apply_code_action);
267 client.add_entity_request_handler(Self::handle_format_buffers);
268 client.add_entity_request_handler(Self::handle_get_code_actions);
269 client.add_entity_request_handler(Self::handle_get_completions);
270 client.add_entity_request_handler(Self::handle_lsp_command::<GetDefinition>);
271 client.add_entity_request_handler(Self::handle_lsp_command::<GetDocumentHighlights>);
272 client.add_entity_request_handler(Self::handle_lsp_command::<GetReferences>);
273 client.add_entity_request_handler(Self::handle_lsp_command::<PrepareRename>);
274 client.add_entity_request_handler(Self::handle_lsp_command::<PerformRename>);
275 client.add_entity_request_handler(Self::handle_search_project);
276 client.add_entity_request_handler(Self::handle_get_project_symbols);
277 client.add_entity_request_handler(Self::handle_open_buffer_for_symbol);
278 client.add_entity_request_handler(Self::handle_open_buffer);
279 client.add_entity_request_handler(Self::handle_save_buffer);
280 }
281
282 pub fn local(
283 client: Arc<Client>,
284 user_store: ModelHandle<UserStore>,
285 languages: Arc<LanguageRegistry>,
286 fs: Arc<dyn Fs>,
287 cx: &mut MutableAppContext,
288 ) -> ModelHandle<Self> {
289 cx.add_model(|cx: &mut ModelContext<Self>| {
290 let (remote_id_tx, remote_id_rx) = watch::channel();
291 let _maintain_remote_id_task = cx.spawn_weak({
292 let rpc = client.clone();
293 move |this, mut cx| {
294 async move {
295 let mut status = rpc.status();
296 while let Some(status) = status.next().await {
297 if let Some(this) = this.upgrade(&cx) {
298 let remote_id = if status.is_connected() {
299 let response = rpc.request(proto::RegisterProject {}).await?;
300 Some(response.project_id)
301 } else {
302 None
303 };
304
305 if let Some(project_id) = remote_id {
306 let mut registrations = Vec::new();
307 this.update(&mut cx, |this, cx| {
308 for worktree in this.worktrees(cx).collect::<Vec<_>>() {
309 registrations.push(worktree.update(
310 cx,
311 |worktree, cx| {
312 let worktree = worktree.as_local_mut().unwrap();
313 worktree.register(project_id, cx)
314 },
315 ));
316 }
317 });
318 for registration in registrations {
319 registration.await?;
320 }
321 }
322 this.update(&mut cx, |this, cx| this.set_remote_id(remote_id, cx));
323 }
324 }
325 Ok(())
326 }
327 .log_err()
328 }
329 });
330
331 let (opened_buffer_tx, opened_buffer_rx) = watch::channel();
332 Self {
333 worktrees: Default::default(),
334 collaborators: Default::default(),
335 opened_buffers: Default::default(),
336 shared_buffers: Default::default(),
337 loading_buffers: Default::default(),
338 loading_local_worktrees: Default::default(),
339 buffer_snapshots: Default::default(),
340 client_state: ProjectClientState::Local {
341 is_shared: false,
342 remote_id_tx,
343 remote_id_rx,
344 _maintain_remote_id_task,
345 },
346 opened_buffer: (Rc::new(RefCell::new(opened_buffer_tx)), opened_buffer_rx),
347 subscriptions: Vec::new(),
348 active_entry: None,
349 languages,
350 client,
351 user_store,
352 fs,
353 next_entry_id: Default::default(),
354 language_servers_with_diagnostics_running: 0,
355 language_servers: Default::default(),
356 started_language_servers: Default::default(),
357 language_server_statuses: Default::default(),
358 language_server_settings: Default::default(),
359 next_language_server_id: 0,
360 nonce: StdRng::from_entropy().gen(),
361 }
362 })
363 }
364
365 pub async fn remote(
366 remote_id: u64,
367 client: Arc<Client>,
368 user_store: ModelHandle<UserStore>,
369 languages: Arc<LanguageRegistry>,
370 fs: Arc<dyn Fs>,
371 cx: &mut AsyncAppContext,
372 ) -> Result<ModelHandle<Self>> {
373 client.authenticate_and_connect(&cx).await?;
374
375 let response = client
376 .request(proto::JoinProject {
377 project_id: remote_id,
378 })
379 .await?;
380
381 let replica_id = response.replica_id as ReplicaId;
382
383 let mut worktrees = Vec::new();
384 for worktree in response.worktrees {
385 let (worktree, load_task) = cx
386 .update(|cx| Worktree::remote(remote_id, replica_id, worktree, client.clone(), cx));
387 worktrees.push(worktree);
388 load_task.detach();
389 }
390
391 let (opened_buffer_tx, opened_buffer_rx) = watch::channel();
392 let this = cx.add_model(|cx: &mut ModelContext<Self>| {
393 let mut this = Self {
394 worktrees: Vec::new(),
395 loading_buffers: Default::default(),
396 opened_buffer: (Rc::new(RefCell::new(opened_buffer_tx)), opened_buffer_rx),
397 shared_buffers: Default::default(),
398 loading_local_worktrees: Default::default(),
399 active_entry: None,
400 collaborators: Default::default(),
401 languages,
402 user_store: user_store.clone(),
403 fs,
404 next_entry_id: Default::default(),
405 subscriptions: vec![client.add_model_for_remote_entity(remote_id, cx)],
406 client: client.clone(),
407 client_state: ProjectClientState::Remote {
408 sharing_has_stopped: false,
409 remote_id,
410 replica_id,
411 _detect_unshare_task: cx.spawn_weak(move |this, mut cx| {
412 async move {
413 let mut status = client.status();
414 let is_connected =
415 status.next().await.map_or(false, |s| s.is_connected());
416 // Even if we're initially connected, any future change of the status means we momentarily disconnected.
417 if !is_connected || status.next().await.is_some() {
418 if let Some(this) = this.upgrade(&cx) {
419 this.update(&mut cx, |this, cx| this.project_unshared(cx))
420 }
421 }
422 Ok(())
423 }
424 .log_err()
425 }),
426 },
427 language_servers_with_diagnostics_running: 0,
428 language_servers: Default::default(),
429 started_language_servers: Default::default(),
430 language_server_settings: Default::default(),
431 language_server_statuses: response
432 .language_servers
433 .into_iter()
434 .map(|server| {
435 (
436 server.id as usize,
437 LanguageServerStatus {
438 name: server.name,
439 pending_work: Default::default(),
440 pending_diagnostic_updates: 0,
441 },
442 )
443 })
444 .collect(),
445 next_language_server_id: 0,
446 opened_buffers: Default::default(),
447 buffer_snapshots: Default::default(),
448 nonce: StdRng::from_entropy().gen(),
449 };
450 for worktree in worktrees {
451 this.add_worktree(&worktree, cx);
452 }
453 this
454 });
455
456 let user_ids = response
457 .collaborators
458 .iter()
459 .map(|peer| peer.user_id)
460 .collect();
461 user_store
462 .update(cx, |user_store, cx| user_store.load_users(user_ids, cx))
463 .await?;
464 let mut collaborators = HashMap::default();
465 for message in response.collaborators {
466 let collaborator = Collaborator::from_proto(message, &user_store, cx).await?;
467 collaborators.insert(collaborator.peer_id, collaborator);
468 }
469
470 this.update(cx, |this, _| {
471 this.collaborators = collaborators;
472 });
473
474 Ok(this)
475 }
476
477 #[cfg(any(test, feature = "test-support"))]
478 pub fn test(fs: Arc<dyn Fs>, cx: &mut gpui::TestAppContext) -> ModelHandle<Project> {
479 let languages = Arc::new(LanguageRegistry::test());
480 let http_client = client::test::FakeHttpClient::with_404_response();
481 let client = client::Client::new(http_client.clone());
482 let user_store = cx.add_model(|cx| UserStore::new(client.clone(), http_client, cx));
483 cx.update(|cx| Project::local(client, user_store, languages, fs, cx))
484 }
485
486 #[cfg(any(test, feature = "test-support"))]
487 pub fn buffer_for_id(&self, remote_id: u64, cx: &AppContext) -> Option<ModelHandle<Buffer>> {
488 self.opened_buffers
489 .get(&remote_id)
490 .and_then(|buffer| buffer.upgrade(cx))
491 }
492
493 #[cfg(any(test, feature = "test-support"))]
494 pub fn languages(&self) -> &Arc<LanguageRegistry> {
495 &self.languages
496 }
497
498 #[cfg(any(test, feature = "test-support"))]
499 pub fn check_invariants(&self, cx: &AppContext) {
500 if self.is_local() {
501 let mut worktree_root_paths = HashMap::default();
502 for worktree in self.worktrees(cx) {
503 let worktree = worktree.read(cx);
504 let abs_path = worktree.as_local().unwrap().abs_path().clone();
505 let prev_worktree_id = worktree_root_paths.insert(abs_path.clone(), worktree.id());
506 assert_eq!(
507 prev_worktree_id,
508 None,
509 "abs path {:?} for worktree {:?} is not unique ({:?} was already registered with the same path)",
510 abs_path,
511 worktree.id(),
512 prev_worktree_id
513 )
514 }
515 } else {
516 let replica_id = self.replica_id();
517 for buffer in self.opened_buffers.values() {
518 if let Some(buffer) = buffer.upgrade(cx) {
519 let buffer = buffer.read(cx);
520 assert_eq!(
521 buffer.deferred_ops_len(),
522 0,
523 "replica {}, buffer {} has deferred operations",
524 replica_id,
525 buffer.remote_id()
526 );
527 }
528 }
529 }
530 }
531
532 #[cfg(any(test, feature = "test-support"))]
533 pub fn has_open_buffer(&self, path: impl Into<ProjectPath>, cx: &AppContext) -> bool {
534 let path = path.into();
535 if let Some(worktree) = self.worktree_for_id(path.worktree_id, cx) {
536 self.opened_buffers.iter().any(|(_, buffer)| {
537 if let Some(buffer) = buffer.upgrade(cx) {
538 if let Some(file) = File::from_dyn(buffer.read(cx).file()) {
539 if file.worktree == worktree && file.path() == &path.path {
540 return true;
541 }
542 }
543 }
544 false
545 })
546 } else {
547 false
548 }
549 }
550
551 pub fn fs(&self) -> &Arc<dyn Fs> {
552 &self.fs
553 }
554
555 fn set_remote_id(&mut self, remote_id: Option<u64>, cx: &mut ModelContext<Self>) {
556 if let ProjectClientState::Local { remote_id_tx, .. } = &mut self.client_state {
557 *remote_id_tx.borrow_mut() = remote_id;
558 }
559
560 self.subscriptions.clear();
561 if let Some(remote_id) = remote_id {
562 self.subscriptions
563 .push(self.client.add_model_for_remote_entity(remote_id, cx));
564 }
565 }
566
567 pub fn remote_id(&self) -> Option<u64> {
568 match &self.client_state {
569 ProjectClientState::Local { remote_id_rx, .. } => *remote_id_rx.borrow(),
570 ProjectClientState::Remote { remote_id, .. } => Some(*remote_id),
571 }
572 }
573
574 pub fn next_remote_id(&self) -> impl Future<Output = u64> {
575 let mut id = None;
576 let mut watch = None;
577 match &self.client_state {
578 ProjectClientState::Local { remote_id_rx, .. } => watch = Some(remote_id_rx.clone()),
579 ProjectClientState::Remote { remote_id, .. } => id = Some(*remote_id),
580 }
581
582 async move {
583 if let Some(id) = id {
584 return id;
585 }
586 let mut watch = watch.unwrap();
587 loop {
588 let id = *watch.borrow();
589 if let Some(id) = id {
590 return id;
591 }
592 watch.next().await;
593 }
594 }
595 }
596
597 pub fn replica_id(&self) -> ReplicaId {
598 match &self.client_state {
599 ProjectClientState::Local { .. } => 0,
600 ProjectClientState::Remote { replica_id, .. } => *replica_id,
601 }
602 }
603
604 pub fn collaborators(&self) -> &HashMap<PeerId, Collaborator> {
605 &self.collaborators
606 }
607
608 pub fn worktrees<'a>(
609 &'a self,
610 cx: &'a AppContext,
611 ) -> impl 'a + Iterator<Item = ModelHandle<Worktree>> {
612 self.worktrees
613 .iter()
614 .filter_map(move |worktree| worktree.upgrade(cx))
615 }
616
617 pub fn visible_worktrees<'a>(
618 &'a self,
619 cx: &'a AppContext,
620 ) -> impl 'a + Iterator<Item = ModelHandle<Worktree>> {
621 self.worktrees.iter().filter_map(|worktree| {
622 worktree.upgrade(cx).and_then(|worktree| {
623 if worktree.read(cx).is_visible() {
624 Some(worktree)
625 } else {
626 None
627 }
628 })
629 })
630 }
631
632 pub fn worktree_for_id(
633 &self,
634 id: WorktreeId,
635 cx: &AppContext,
636 ) -> Option<ModelHandle<Worktree>> {
637 self.worktrees(cx)
638 .find(|worktree| worktree.read(cx).id() == id)
639 }
640
641 pub fn worktree_for_entry(
642 &self,
643 entry_id: ProjectEntryId,
644 cx: &AppContext,
645 ) -> Option<ModelHandle<Worktree>> {
646 self.worktrees(cx)
647 .find(|worktree| worktree.read(cx).contains_entry(entry_id))
648 }
649
650 pub fn worktree_id_for_entry(
651 &self,
652 entry_id: ProjectEntryId,
653 cx: &AppContext,
654 ) -> Option<WorktreeId> {
655 self.worktree_for_entry(entry_id, cx)
656 .map(|worktree| worktree.read(cx).id())
657 }
658
659 pub fn share(&self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
660 let rpc = self.client.clone();
661 cx.spawn(|this, mut cx| async move {
662 let project_id = this.update(&mut cx, |this, cx| {
663 if let ProjectClientState::Local {
664 is_shared,
665 remote_id_rx,
666 ..
667 } = &mut this.client_state
668 {
669 *is_shared = true;
670
671 for open_buffer in this.opened_buffers.values_mut() {
672 match open_buffer {
673 OpenBuffer::Strong(_) => {}
674 OpenBuffer::Weak(buffer) => {
675 if let Some(buffer) = buffer.upgrade(cx) {
676 *open_buffer = OpenBuffer::Strong(buffer);
677 }
678 }
679 OpenBuffer::Loading(_) => unreachable!(),
680 }
681 }
682
683 for worktree_handle in this.worktrees.iter_mut() {
684 match worktree_handle {
685 WorktreeHandle::Strong(_) => {}
686 WorktreeHandle::Weak(worktree) => {
687 if let Some(worktree) = worktree.upgrade(cx) {
688 *worktree_handle = WorktreeHandle::Strong(worktree);
689 }
690 }
691 }
692 }
693
694 remote_id_rx
695 .borrow()
696 .ok_or_else(|| anyhow!("no project id"))
697 } else {
698 Err(anyhow!("can't share a remote project"))
699 }
700 })?;
701
702 rpc.request(proto::ShareProject { project_id }).await?;
703
704 let mut tasks = Vec::new();
705 this.update(&mut cx, |this, cx| {
706 for worktree in this.worktrees(cx).collect::<Vec<_>>() {
707 worktree.update(cx, |worktree, cx| {
708 let worktree = worktree.as_local_mut().unwrap();
709 tasks.push(worktree.share(project_id, cx));
710 });
711 }
712 });
713 for task in tasks {
714 task.await?;
715 }
716 this.update(&mut cx, |_, cx| cx.notify());
717 Ok(())
718 })
719 }
720
721 pub fn unshare(&self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
722 let rpc = self.client.clone();
723 cx.spawn(|this, mut cx| async move {
724 let project_id = this.update(&mut cx, |this, cx| {
725 if let ProjectClientState::Local {
726 is_shared,
727 remote_id_rx,
728 ..
729 } = &mut this.client_state
730 {
731 *is_shared = false;
732
733 for open_buffer in this.opened_buffers.values_mut() {
734 match open_buffer {
735 OpenBuffer::Strong(buffer) => {
736 *open_buffer = OpenBuffer::Weak(buffer.downgrade());
737 }
738 _ => {}
739 }
740 }
741
742 for worktree_handle in this.worktrees.iter_mut() {
743 match worktree_handle {
744 WorktreeHandle::Strong(worktree) => {
745 if !worktree.read(cx).is_visible() {
746 *worktree_handle = WorktreeHandle::Weak(worktree.downgrade());
747 }
748 }
749 _ => {}
750 }
751 }
752
753 remote_id_rx
754 .borrow()
755 .ok_or_else(|| anyhow!("no project id"))
756 } else {
757 Err(anyhow!("can't share a remote project"))
758 }
759 })?;
760
761 rpc.send(proto::UnshareProject { project_id })?;
762 this.update(&mut cx, |this, cx| {
763 this.collaborators.clear();
764 this.shared_buffers.clear();
765 for worktree in this.worktrees(cx).collect::<Vec<_>>() {
766 worktree.update(cx, |worktree, _| {
767 worktree.as_local_mut().unwrap().unshare();
768 });
769 }
770 cx.notify()
771 });
772 Ok(())
773 })
774 }
775
776 fn project_unshared(&mut self, cx: &mut ModelContext<Self>) {
777 if let ProjectClientState::Remote {
778 sharing_has_stopped,
779 ..
780 } = &mut self.client_state
781 {
782 *sharing_has_stopped = true;
783 self.collaborators.clear();
784 cx.notify();
785 }
786 }
787
788 pub fn is_read_only(&self) -> bool {
789 match &self.client_state {
790 ProjectClientState::Local { .. } => false,
791 ProjectClientState::Remote {
792 sharing_has_stopped,
793 ..
794 } => *sharing_has_stopped,
795 }
796 }
797
798 pub fn is_local(&self) -> bool {
799 match &self.client_state {
800 ProjectClientState::Local { .. } => true,
801 ProjectClientState::Remote { .. } => false,
802 }
803 }
804
805 pub fn is_remote(&self) -> bool {
806 !self.is_local()
807 }
808
809 pub fn create_buffer(&mut self, cx: &mut ModelContext<Self>) -> Result<ModelHandle<Buffer>> {
810 if self.is_remote() {
811 return Err(anyhow!("creating buffers as a guest is not supported yet"));
812 }
813
814 let buffer = cx.add_model(|cx| {
815 Buffer::new(self.replica_id(), "", cx).with_language(language::PLAIN_TEXT.clone(), cx)
816 });
817 self.register_buffer(&buffer, cx)?;
818 Ok(buffer)
819 }
820
821 pub fn open_buffer(
822 &mut self,
823 path: impl Into<ProjectPath>,
824 cx: &mut ModelContext<Self>,
825 ) -> Task<Result<ModelHandle<Buffer>>> {
826 let project_path = path.into();
827 let worktree = if let Some(worktree) = self.worktree_for_id(project_path.worktree_id, cx) {
828 worktree
829 } else {
830 return Task::ready(Err(anyhow!("no such worktree")));
831 };
832
833 // If there is already a buffer for the given path, then return it.
834 let existing_buffer = self.get_open_buffer(&project_path, cx);
835 if let Some(existing_buffer) = existing_buffer {
836 return Task::ready(Ok(existing_buffer));
837 }
838
839 let mut loading_watch = match self.loading_buffers.entry(project_path.clone()) {
840 // If the given path is already being loaded, then wait for that existing
841 // task to complete and return the same buffer.
842 hash_map::Entry::Occupied(e) => e.get().clone(),
843
844 // Otherwise, record the fact that this path is now being loaded.
845 hash_map::Entry::Vacant(entry) => {
846 let (mut tx, rx) = postage::watch::channel();
847 entry.insert(rx.clone());
848
849 let load_buffer = if worktree.read(cx).is_local() {
850 self.open_local_buffer(&project_path.path, &worktree, cx)
851 } else {
852 self.open_remote_buffer(&project_path.path, &worktree, cx)
853 };
854
855 cx.spawn(move |this, mut cx| async move {
856 let load_result = load_buffer.await;
857 *tx.borrow_mut() = Some(this.update(&mut cx, |this, _| {
858 // Record the fact that the buffer is no longer loading.
859 this.loading_buffers.remove(&project_path);
860 let buffer = load_result.map_err(Arc::new)?;
861 Ok(buffer)
862 }));
863 })
864 .detach();
865 rx
866 }
867 };
868
869 cx.foreground().spawn(async move {
870 loop {
871 if let Some(result) = loading_watch.borrow().as_ref() {
872 match result {
873 Ok(buffer) => return Ok(buffer.clone()),
874 Err(error) => return Err(anyhow!("{}", error)),
875 }
876 }
877 loading_watch.next().await;
878 }
879 })
880 }
881
882 fn open_local_buffer(
883 &mut self,
884 path: &Arc<Path>,
885 worktree: &ModelHandle<Worktree>,
886 cx: &mut ModelContext<Self>,
887 ) -> Task<Result<ModelHandle<Buffer>>> {
888 let load_buffer = worktree.update(cx, |worktree, cx| {
889 let worktree = worktree.as_local_mut().unwrap();
890 worktree.load_buffer(path, cx)
891 });
892 cx.spawn(|this, mut cx| async move {
893 let buffer = load_buffer.await?;
894 this.update(&mut cx, |this, cx| this.register_buffer(&buffer, cx))?;
895 Ok(buffer)
896 })
897 }
898
899 fn open_remote_buffer(
900 &mut self,
901 path: &Arc<Path>,
902 worktree: &ModelHandle<Worktree>,
903 cx: &mut ModelContext<Self>,
904 ) -> Task<Result<ModelHandle<Buffer>>> {
905 let rpc = self.client.clone();
906 let project_id = self.remote_id().unwrap();
907 let remote_worktree_id = worktree.read(cx).id();
908 let path = path.clone();
909 let path_string = path.to_string_lossy().to_string();
910 cx.spawn(|this, mut cx| async move {
911 let response = rpc
912 .request(proto::OpenBuffer {
913 project_id,
914 worktree_id: remote_worktree_id.to_proto(),
915 path: path_string,
916 })
917 .await?;
918 let buffer = response.buffer.ok_or_else(|| anyhow!("missing buffer"))?;
919 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
920 .await
921 })
922 }
923
924 fn open_local_buffer_via_lsp(
925 &mut self,
926 abs_path: lsp::Url,
927 lang_name: Arc<str>,
928 lang_server: Arc<LanguageServer>,
929 cx: &mut ModelContext<Self>,
930 ) -> Task<Result<ModelHandle<Buffer>>> {
931 cx.spawn(|this, mut cx| async move {
932 let abs_path = abs_path
933 .to_file_path()
934 .map_err(|_| anyhow!("can't convert URI to path"))?;
935 let (worktree, relative_path) = if let Some(result) =
936 this.read_with(&cx, |this, cx| this.find_local_worktree(&abs_path, cx))
937 {
938 result
939 } else {
940 let worktree = this
941 .update(&mut cx, |this, cx| {
942 this.create_local_worktree(&abs_path, false, cx)
943 })
944 .await?;
945 this.update(&mut cx, |this, cx| {
946 this.language_servers
947 .insert((worktree.read(cx).id(), lang_name), lang_server);
948 });
949 (worktree, PathBuf::new())
950 };
951
952 let project_path = ProjectPath {
953 worktree_id: worktree.read_with(&cx, |worktree, _| worktree.id()),
954 path: relative_path.into(),
955 };
956 this.update(&mut cx, |this, cx| this.open_buffer(project_path, cx))
957 .await
958 })
959 }
960
961 pub fn save_buffer_as(
962 &mut self,
963 buffer: ModelHandle<Buffer>,
964 abs_path: PathBuf,
965 cx: &mut ModelContext<Project>,
966 ) -> Task<Result<()>> {
967 let worktree_task = self.find_or_create_local_worktree(&abs_path, true, cx);
968 cx.spawn(|this, mut cx| async move {
969 let (worktree, path) = worktree_task.await?;
970 worktree
971 .update(&mut cx, |worktree, cx| {
972 worktree
973 .as_local_mut()
974 .unwrap()
975 .save_buffer_as(buffer.clone(), path, cx)
976 })
977 .await?;
978 this.update(&mut cx, |this, cx| {
979 this.assign_language_to_buffer(&buffer, cx);
980 this.register_buffer_with_language_server(&buffer, cx);
981 });
982 Ok(())
983 })
984 }
985
986 pub fn get_open_buffer(
987 &mut self,
988 path: &ProjectPath,
989 cx: &mut ModelContext<Self>,
990 ) -> Option<ModelHandle<Buffer>> {
991 let worktree = self.worktree_for_id(path.worktree_id, cx)?;
992 self.opened_buffers.values().find_map(|buffer| {
993 let buffer = buffer.upgrade(cx)?;
994 let file = File::from_dyn(buffer.read(cx).file())?;
995 if file.worktree == worktree && file.path() == &path.path {
996 Some(buffer)
997 } else {
998 None
999 }
1000 })
1001 }
1002
1003 fn register_buffer(
1004 &mut self,
1005 buffer: &ModelHandle<Buffer>,
1006 cx: &mut ModelContext<Self>,
1007 ) -> Result<()> {
1008 let remote_id = buffer.read(cx).remote_id();
1009 let open_buffer = if self.is_remote() || self.is_shared() {
1010 OpenBuffer::Strong(buffer.clone())
1011 } else {
1012 OpenBuffer::Weak(buffer.downgrade())
1013 };
1014
1015 match self.opened_buffers.insert(remote_id, open_buffer) {
1016 None => {}
1017 Some(OpenBuffer::Loading(operations)) => {
1018 buffer.update(cx, |buffer, cx| buffer.apply_ops(operations, cx))?
1019 }
1020 Some(OpenBuffer::Weak(existing_handle)) => {
1021 if existing_handle.upgrade(cx).is_some() {
1022 Err(anyhow!(
1023 "already registered buffer with remote id {}",
1024 remote_id
1025 ))?
1026 }
1027 }
1028 Some(OpenBuffer::Strong(_)) => Err(anyhow!(
1029 "already registered buffer with remote id {}",
1030 remote_id
1031 ))?,
1032 }
1033 cx.subscribe(buffer, |this, buffer, event, cx| {
1034 this.on_buffer_event(buffer, event, cx);
1035 })
1036 .detach();
1037
1038 self.assign_language_to_buffer(buffer, cx);
1039 self.register_buffer_with_language_server(buffer, cx);
1040
1041 Ok(())
1042 }
1043
1044 fn register_buffer_with_language_server(
1045 &mut self,
1046 buffer_handle: &ModelHandle<Buffer>,
1047 cx: &mut ModelContext<Self>,
1048 ) {
1049 let buffer = buffer_handle.read(cx);
1050 let buffer_id = buffer.remote_id();
1051 if let Some(file) = File::from_dyn(buffer.file()) {
1052 if file.is_local() {
1053 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
1054 let initial_snapshot = buffer.text_snapshot();
1055 let language_server = self.language_server_for_buffer(buffer, cx).cloned();
1056
1057 if let Some(local_worktree) = file.worktree.read(cx).as_local() {
1058 if let Some(diagnostics) = local_worktree.diagnostics_for_path(file.path()) {
1059 self.update_buffer_diagnostics(&buffer_handle, diagnostics, None, cx)
1060 .log_err();
1061 }
1062 }
1063
1064 if let Some(server) = language_server {
1065 server
1066 .notify::<lsp::notification::DidOpenTextDocument>(
1067 lsp::DidOpenTextDocumentParams {
1068 text_document: lsp::TextDocumentItem::new(
1069 uri,
1070 Default::default(),
1071 0,
1072 initial_snapshot.text(),
1073 ),
1074 }
1075 .clone(),
1076 )
1077 .log_err();
1078 buffer_handle.update(cx, |buffer, cx| {
1079 buffer.set_completion_triggers(
1080 server
1081 .capabilities()
1082 .completion_provider
1083 .as_ref()
1084 .and_then(|provider| provider.trigger_characters.clone())
1085 .unwrap_or(Vec::new()),
1086 cx,
1087 )
1088 });
1089 self.buffer_snapshots
1090 .insert(buffer_id, vec![(0, initial_snapshot)]);
1091 }
1092
1093 cx.observe_release(buffer_handle, |this, buffer, cx| {
1094 if let Some(file) = File::from_dyn(buffer.file()) {
1095 if file.is_local() {
1096 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
1097 if let Some(server) = this.language_server_for_buffer(buffer, cx) {
1098 server
1099 .notify::<lsp::notification::DidCloseTextDocument>(
1100 lsp::DidCloseTextDocumentParams {
1101 text_document: lsp::TextDocumentIdentifier::new(
1102 uri.clone(),
1103 ),
1104 },
1105 )
1106 .log_err();
1107 }
1108 }
1109 }
1110 })
1111 .detach();
1112 }
1113 }
1114 }
1115
1116 fn on_buffer_event(
1117 &mut self,
1118 buffer: ModelHandle<Buffer>,
1119 event: &BufferEvent,
1120 cx: &mut ModelContext<Self>,
1121 ) -> Option<()> {
1122 match event {
1123 BufferEvent::Operation(operation) => {
1124 let project_id = self.remote_id()?;
1125 let request = self.client.request(proto::UpdateBuffer {
1126 project_id,
1127 buffer_id: buffer.read(cx).remote_id(),
1128 operations: vec![language::proto::serialize_operation(&operation)],
1129 });
1130 cx.background().spawn(request).detach_and_log_err(cx);
1131 }
1132 BufferEvent::Edited => {
1133 let language_server = self
1134 .language_server_for_buffer(buffer.read(cx), cx)?
1135 .clone();
1136 let buffer = buffer.read(cx);
1137 let file = File::from_dyn(buffer.file())?;
1138 let abs_path = file.as_local()?.abs_path(cx);
1139 let uri = lsp::Url::from_file_path(abs_path).unwrap();
1140 let buffer_snapshots = self.buffer_snapshots.entry(buffer.remote_id()).or_default();
1141 let (version, prev_snapshot) = buffer_snapshots.last()?;
1142 let next_snapshot = buffer.text_snapshot();
1143 let next_version = version + 1;
1144
1145 let content_changes = buffer
1146 .edits_since::<(PointUtf16, usize)>(prev_snapshot.version())
1147 .map(|edit| {
1148 let edit_start = edit.new.start.0;
1149 let edit_end = edit_start + (edit.old.end.0 - edit.old.start.0);
1150 let new_text = next_snapshot
1151 .text_for_range(edit.new.start.1..edit.new.end.1)
1152 .collect();
1153 lsp::TextDocumentContentChangeEvent {
1154 range: Some(lsp::Range::new(
1155 edit_start.to_lsp_position(),
1156 edit_end.to_lsp_position(),
1157 )),
1158 range_length: None,
1159 text: new_text,
1160 }
1161 })
1162 .collect();
1163
1164 buffer_snapshots.push((next_version, next_snapshot));
1165
1166 language_server
1167 .notify::<lsp::notification::DidChangeTextDocument>(
1168 lsp::DidChangeTextDocumentParams {
1169 text_document: lsp::VersionedTextDocumentIdentifier::new(
1170 uri,
1171 next_version,
1172 ),
1173 content_changes,
1174 },
1175 )
1176 .log_err();
1177 }
1178 BufferEvent::Saved => {
1179 let file = File::from_dyn(buffer.read(cx).file())?;
1180 let worktree_id = file.worktree_id(cx);
1181 let abs_path = file.as_local()?.abs_path(cx);
1182 let text_document = lsp::TextDocumentIdentifier {
1183 uri: lsp::Url::from_file_path(abs_path).unwrap(),
1184 };
1185
1186 for (_, server) in self.language_servers_for_worktree(worktree_id) {
1187 server
1188 .notify::<lsp::notification::DidSaveTextDocument>(
1189 lsp::DidSaveTextDocumentParams {
1190 text_document: text_document.clone(),
1191 text: None,
1192 },
1193 )
1194 .log_err();
1195 }
1196 }
1197 _ => {}
1198 }
1199
1200 None
1201 }
1202
1203 fn language_servers_for_worktree(
1204 &self,
1205 worktree_id: WorktreeId,
1206 ) -> impl Iterator<Item = (&str, &Arc<LanguageServer>)> {
1207 self.language_servers.iter().filter_map(
1208 move |((language_server_worktree_id, language_name), server)| {
1209 if *language_server_worktree_id == worktree_id {
1210 Some((language_name.as_ref(), server))
1211 } else {
1212 None
1213 }
1214 },
1215 )
1216 }
1217
1218 fn assign_language_to_buffer(
1219 &mut self,
1220 buffer: &ModelHandle<Buffer>,
1221 cx: &mut ModelContext<Self>,
1222 ) -> Option<()> {
1223 // If the buffer has a language, set it and start the language server if we haven't already.
1224 let full_path = buffer.read(cx).file()?.full_path(cx);
1225 let language = self.languages.select_language(&full_path)?;
1226 buffer.update(cx, |buffer, cx| {
1227 buffer.set_language(Some(language.clone()), cx);
1228 });
1229
1230 let file = File::from_dyn(buffer.read(cx).file())?;
1231 let worktree = file.worktree.read(cx).as_local()?;
1232 let worktree_id = worktree.id();
1233 let worktree_abs_path = worktree.abs_path().clone();
1234 self.start_language_server(worktree_id, worktree_abs_path, language, cx);
1235
1236 None
1237 }
1238
1239 fn start_language_server(
1240 &mut self,
1241 worktree_id: WorktreeId,
1242 worktree_path: Arc<Path>,
1243 language: Arc<Language>,
1244 cx: &mut ModelContext<Self>,
1245 ) {
1246 let key = (worktree_id, language.name());
1247 self.started_language_servers
1248 .entry(key.clone())
1249 .or_insert_with(|| {
1250 let server_id = post_inc(&mut self.next_language_server_id);
1251 let language_server = self.languages.start_language_server(
1252 language.clone(),
1253 worktree_path,
1254 self.client.http_client(),
1255 cx,
1256 );
1257 cx.spawn_weak(|this, mut cx| async move {
1258 let mut language_server = language_server?.await.log_err()?;
1259 let this = this.upgrade(&cx)?;
1260 let (language_server_events_tx, language_server_events_rx) =
1261 smol::channel::unbounded();
1262
1263 language_server
1264 .on_notification::<lsp::notification::PublishDiagnostics, _>({
1265 let language_server_events_tx = language_server_events_tx.clone();
1266 move |params| {
1267 language_server_events_tx
1268 .try_send(LanguageServerEvent::DiagnosticsUpdate(params))
1269 .ok();
1270 }
1271 })
1272 .detach();
1273
1274 language_server
1275 .on_request::<lsp::request::WorkspaceConfiguration, _>({
1276 let settings = this
1277 .read_with(&cx, |this, _| this.language_server_settings.clone());
1278 move |params| {
1279 let settings = settings.lock();
1280 Ok(params
1281 .items
1282 .into_iter()
1283 .map(|item| {
1284 if let Some(section) = &item.section {
1285 settings
1286 .get(section)
1287 .cloned()
1288 .unwrap_or(serde_json::Value::Null)
1289 } else {
1290 settings.clone()
1291 }
1292 })
1293 .collect())
1294 }
1295 })
1296 .detach();
1297
1298 language_server
1299 .on_notification::<lsp::notification::Progress, _>(move |params| {
1300 let token = match params.token {
1301 lsp::NumberOrString::String(token) => token,
1302 lsp::NumberOrString::Number(token) => {
1303 log::info!("skipping numeric progress token {}", token);
1304 return;
1305 }
1306 };
1307
1308 match params.value {
1309 lsp::ProgressParamsValue::WorkDone(progress) => match progress {
1310 lsp::WorkDoneProgress::Begin(_) => {
1311 language_server_events_tx
1312 .try_send(LanguageServerEvent::WorkStart { token })
1313 .ok();
1314 }
1315 lsp::WorkDoneProgress::Report(report) => {
1316 language_server_events_tx
1317 .try_send(LanguageServerEvent::WorkProgress {
1318 token,
1319 progress: LanguageServerProgress {
1320 message: report.message,
1321 percentage: report
1322 .percentage
1323 .map(|p| p as usize),
1324 last_update_at: Instant::now(),
1325 },
1326 })
1327 .ok();
1328 }
1329 lsp::WorkDoneProgress::End(_) => {
1330 language_server_events_tx
1331 .try_send(LanguageServerEvent::WorkEnd { token })
1332 .ok();
1333 }
1334 },
1335 }
1336 })
1337 .detach();
1338
1339 // Process all the LSP events.
1340 cx.spawn(|mut cx| {
1341 let this = this.downgrade();
1342 async move {
1343 while let Ok(event) = language_server_events_rx.recv().await {
1344 let this = this.upgrade(&cx)?;
1345 this.update(&mut cx, |this, cx| {
1346 this.on_lsp_event(server_id, event, &language, cx)
1347 });
1348
1349 // Don't starve the main thread when lots of events arrive all at once.
1350 smol::future::yield_now().await;
1351 }
1352 Some(())
1353 }
1354 })
1355 .detach();
1356
1357 let language_server = language_server.initialize().await.log_err()?;
1358 this.update(&mut cx, |this, cx| {
1359 this.language_servers
1360 .insert(key.clone(), language_server.clone());
1361 this.language_server_statuses.insert(
1362 server_id,
1363 LanguageServerStatus {
1364 name: language_server.name().to_string(),
1365 pending_work: Default::default(),
1366 pending_diagnostic_updates: 0,
1367 },
1368 );
1369 language_server
1370 .notify::<lsp::notification::DidChangeConfiguration>(
1371 lsp::DidChangeConfigurationParams {
1372 settings: this.language_server_settings.lock().clone(),
1373 },
1374 )
1375 .ok();
1376
1377 if let Some(project_id) = this.remote_id() {
1378 this.client
1379 .send(proto::StartLanguageServer {
1380 project_id,
1381 server: Some(proto::LanguageServer {
1382 id: server_id as u64,
1383 name: language_server.name().to_string(),
1384 }),
1385 })
1386 .log_err();
1387 }
1388
1389 // Tell the language server about every open buffer in the worktree that matches the language.
1390 for buffer in this.opened_buffers.values() {
1391 if let Some(buffer_handle) = buffer.upgrade(cx) {
1392 let buffer = buffer_handle.read(cx);
1393 let file = if let Some(file) = File::from_dyn(buffer.file()) {
1394 file
1395 } else {
1396 continue;
1397 };
1398 let language = if let Some(language) = buffer.language() {
1399 language
1400 } else {
1401 continue;
1402 };
1403 if (file.worktree.read(cx).id(), language.name()) != key {
1404 continue;
1405 }
1406
1407 let file = file.as_local()?;
1408 let versions = this
1409 .buffer_snapshots
1410 .entry(buffer.remote_id())
1411 .or_insert_with(|| vec![(0, buffer.text_snapshot())]);
1412 let (version, initial_snapshot) = versions.last().unwrap();
1413 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
1414 language_server
1415 .notify::<lsp::notification::DidOpenTextDocument>(
1416 lsp::DidOpenTextDocumentParams {
1417 text_document: lsp::TextDocumentItem::new(
1418 uri,
1419 Default::default(),
1420 *version,
1421 initial_snapshot.text(),
1422 ),
1423 },
1424 )
1425 .log_err()?;
1426 buffer_handle.update(cx, |buffer, cx| {
1427 buffer.set_completion_triggers(
1428 language_server
1429 .capabilities()
1430 .completion_provider
1431 .as_ref()
1432 .and_then(|provider| {
1433 provider.trigger_characters.clone()
1434 })
1435 .unwrap_or(Vec::new()),
1436 cx,
1437 )
1438 });
1439 }
1440 }
1441
1442 cx.notify();
1443 Some(())
1444 });
1445
1446 Some(language_server)
1447 })
1448 });
1449 }
1450
1451 fn on_lsp_event(
1452 &mut self,
1453 language_server_id: usize,
1454 event: LanguageServerEvent,
1455 language: &Arc<Language>,
1456 cx: &mut ModelContext<Self>,
1457 ) {
1458 let disk_diagnostics_token = language.disk_based_diagnostics_progress_token();
1459 let language_server_status =
1460 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
1461 status
1462 } else {
1463 return;
1464 };
1465
1466 match event {
1467 LanguageServerEvent::WorkStart { token } => {
1468 if Some(&token) == disk_diagnostics_token {
1469 language_server_status.pending_diagnostic_updates += 1;
1470 if language_server_status.pending_diagnostic_updates == 1 {
1471 self.disk_based_diagnostics_started(cx);
1472 self.broadcast_language_server_update(
1473 language_server_id,
1474 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(
1475 proto::LspDiskBasedDiagnosticsUpdating {},
1476 ),
1477 );
1478 }
1479 } else {
1480 self.on_lsp_work_start(language_server_id, token.clone(), cx);
1481 self.broadcast_language_server_update(
1482 language_server_id,
1483 proto::update_language_server::Variant::WorkStart(proto::LspWorkStart {
1484 token,
1485 }),
1486 );
1487 }
1488 }
1489 LanguageServerEvent::WorkProgress { token, progress } => {
1490 if Some(&token) != disk_diagnostics_token {
1491 self.on_lsp_work_progress(
1492 language_server_id,
1493 token.clone(),
1494 progress.clone(),
1495 cx,
1496 );
1497 self.broadcast_language_server_update(
1498 language_server_id,
1499 proto::update_language_server::Variant::WorkProgress(
1500 proto::LspWorkProgress {
1501 token,
1502 message: progress.message,
1503 percentage: progress.percentage.map(|p| p as u32),
1504 },
1505 ),
1506 );
1507 }
1508 }
1509 LanguageServerEvent::WorkEnd { token } => {
1510 if Some(&token) == disk_diagnostics_token {
1511 language_server_status.pending_diagnostic_updates -= 1;
1512 if language_server_status.pending_diagnostic_updates == 0 {
1513 self.disk_based_diagnostics_finished(cx);
1514 self.broadcast_language_server_update(
1515 language_server_id,
1516 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(
1517 proto::LspDiskBasedDiagnosticsUpdated {},
1518 ),
1519 );
1520 }
1521 } else {
1522 self.on_lsp_work_end(language_server_id, token.clone(), cx);
1523 self.broadcast_language_server_update(
1524 language_server_id,
1525 proto::update_language_server::Variant::WorkEnd(proto::LspWorkEnd {
1526 token,
1527 }),
1528 );
1529 }
1530 }
1531 LanguageServerEvent::DiagnosticsUpdate(mut params) => {
1532 language.process_diagnostics(&mut params);
1533
1534 if disk_diagnostics_token.is_none() {
1535 self.disk_based_diagnostics_started(cx);
1536 self.broadcast_language_server_update(
1537 language_server_id,
1538 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(
1539 proto::LspDiskBasedDiagnosticsUpdating {},
1540 ),
1541 );
1542 }
1543 self.update_diagnostics(
1544 params,
1545 language
1546 .disk_based_diagnostic_sources()
1547 .unwrap_or(&Default::default()),
1548 cx,
1549 )
1550 .log_err();
1551 if disk_diagnostics_token.is_none() {
1552 self.disk_based_diagnostics_finished(cx);
1553 self.broadcast_language_server_update(
1554 language_server_id,
1555 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(
1556 proto::LspDiskBasedDiagnosticsUpdated {},
1557 ),
1558 );
1559 }
1560 }
1561 }
1562 }
1563
1564 fn on_lsp_work_start(
1565 &mut self,
1566 language_server_id: usize,
1567 token: String,
1568 cx: &mut ModelContext<Self>,
1569 ) {
1570 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
1571 status.pending_work.insert(
1572 token,
1573 LanguageServerProgress {
1574 message: None,
1575 percentage: None,
1576 last_update_at: Instant::now(),
1577 },
1578 );
1579 cx.notify();
1580 }
1581 }
1582
1583 fn on_lsp_work_progress(
1584 &mut self,
1585 language_server_id: usize,
1586 token: String,
1587 progress: LanguageServerProgress,
1588 cx: &mut ModelContext<Self>,
1589 ) {
1590 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
1591 status.pending_work.insert(token, progress);
1592 cx.notify();
1593 }
1594 }
1595
1596 fn on_lsp_work_end(
1597 &mut self,
1598 language_server_id: usize,
1599 token: String,
1600 cx: &mut ModelContext<Self>,
1601 ) {
1602 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
1603 status.pending_work.remove(&token);
1604 cx.notify();
1605 }
1606 }
1607
1608 fn broadcast_language_server_update(
1609 &self,
1610 language_server_id: usize,
1611 event: proto::update_language_server::Variant,
1612 ) {
1613 if let Some(project_id) = self.remote_id() {
1614 self.client
1615 .send(proto::UpdateLanguageServer {
1616 project_id,
1617 language_server_id: language_server_id as u64,
1618 variant: Some(event),
1619 })
1620 .log_err();
1621 }
1622 }
1623
1624 pub fn set_language_server_settings(&mut self, settings: serde_json::Value) {
1625 for server in self.language_servers.values() {
1626 server
1627 .notify::<lsp::notification::DidChangeConfiguration>(
1628 lsp::DidChangeConfigurationParams {
1629 settings: settings.clone(),
1630 },
1631 )
1632 .ok();
1633 }
1634 *self.language_server_settings.lock() = settings;
1635 }
1636
1637 pub fn language_server_statuses(
1638 &self,
1639 ) -> impl DoubleEndedIterator<Item = &LanguageServerStatus> {
1640 self.language_server_statuses.values()
1641 }
1642
1643 pub fn update_diagnostics(
1644 &mut self,
1645 params: lsp::PublishDiagnosticsParams,
1646 disk_based_sources: &HashSet<String>,
1647 cx: &mut ModelContext<Self>,
1648 ) -> Result<()> {
1649 let abs_path = params
1650 .uri
1651 .to_file_path()
1652 .map_err(|_| anyhow!("URI is not a file"))?;
1653 let mut next_group_id = 0;
1654 let mut diagnostics = Vec::default();
1655 let mut primary_diagnostic_group_ids = HashMap::default();
1656 let mut sources_by_group_id = HashMap::default();
1657 let mut supporting_diagnostics = HashMap::default();
1658 for diagnostic in ¶ms.diagnostics {
1659 let source = diagnostic.source.as_ref();
1660 let code = diagnostic.code.as_ref().map(|code| match code {
1661 lsp::NumberOrString::Number(code) => code.to_string(),
1662 lsp::NumberOrString::String(code) => code.clone(),
1663 });
1664 let range = range_from_lsp(diagnostic.range);
1665 let is_supporting = diagnostic
1666 .related_information
1667 .as_ref()
1668 .map_or(false, |infos| {
1669 infos.iter().any(|info| {
1670 primary_diagnostic_group_ids.contains_key(&(
1671 source,
1672 code.clone(),
1673 range_from_lsp(info.location.range),
1674 ))
1675 })
1676 });
1677
1678 let is_unnecessary = diagnostic.tags.as_ref().map_or(false, |tags| {
1679 tags.iter().any(|tag| *tag == DiagnosticTag::UNNECESSARY)
1680 });
1681
1682 if is_supporting {
1683 supporting_diagnostics.insert(
1684 (source, code.clone(), range),
1685 (diagnostic.severity, is_unnecessary),
1686 );
1687 } else {
1688 let group_id = post_inc(&mut next_group_id);
1689 let is_disk_based =
1690 source.map_or(false, |source| disk_based_sources.contains(source));
1691
1692 sources_by_group_id.insert(group_id, source);
1693 primary_diagnostic_group_ids
1694 .insert((source, code.clone(), range.clone()), group_id);
1695
1696 diagnostics.push(DiagnosticEntry {
1697 range,
1698 diagnostic: Diagnostic {
1699 code: code.clone(),
1700 severity: diagnostic.severity.unwrap_or(DiagnosticSeverity::ERROR),
1701 message: diagnostic.message.clone(),
1702 group_id,
1703 is_primary: true,
1704 is_valid: true,
1705 is_disk_based,
1706 is_unnecessary,
1707 },
1708 });
1709 if let Some(infos) = &diagnostic.related_information {
1710 for info in infos {
1711 if info.location.uri == params.uri && !info.message.is_empty() {
1712 let range = range_from_lsp(info.location.range);
1713 diagnostics.push(DiagnosticEntry {
1714 range,
1715 diagnostic: Diagnostic {
1716 code: code.clone(),
1717 severity: DiagnosticSeverity::INFORMATION,
1718 message: info.message.clone(),
1719 group_id,
1720 is_primary: false,
1721 is_valid: true,
1722 is_disk_based,
1723 is_unnecessary: false,
1724 },
1725 });
1726 }
1727 }
1728 }
1729 }
1730 }
1731
1732 for entry in &mut diagnostics {
1733 let diagnostic = &mut entry.diagnostic;
1734 if !diagnostic.is_primary {
1735 let source = *sources_by_group_id.get(&diagnostic.group_id).unwrap();
1736 if let Some(&(severity, is_unnecessary)) = supporting_diagnostics.get(&(
1737 source,
1738 diagnostic.code.clone(),
1739 entry.range.clone(),
1740 )) {
1741 if let Some(severity) = severity {
1742 diagnostic.severity = severity;
1743 }
1744 diagnostic.is_unnecessary = is_unnecessary;
1745 }
1746 }
1747 }
1748
1749 self.update_diagnostic_entries(abs_path, params.version, diagnostics, cx)?;
1750 Ok(())
1751 }
1752
1753 pub fn update_diagnostic_entries(
1754 &mut self,
1755 abs_path: PathBuf,
1756 version: Option<i32>,
1757 diagnostics: Vec<DiagnosticEntry<PointUtf16>>,
1758 cx: &mut ModelContext<Project>,
1759 ) -> Result<(), anyhow::Error> {
1760 let (worktree, relative_path) = self
1761 .find_local_worktree(&abs_path, cx)
1762 .ok_or_else(|| anyhow!("no worktree found for diagnostics"))?;
1763 if !worktree.read(cx).is_visible() {
1764 return Ok(());
1765 }
1766
1767 let project_path = ProjectPath {
1768 worktree_id: worktree.read(cx).id(),
1769 path: relative_path.into(),
1770 };
1771
1772 for buffer in self.opened_buffers.values() {
1773 if let Some(buffer) = buffer.upgrade(cx) {
1774 if buffer
1775 .read(cx)
1776 .file()
1777 .map_or(false, |file| *file.path() == project_path.path)
1778 {
1779 self.update_buffer_diagnostics(&buffer, diagnostics.clone(), version, cx)?;
1780 break;
1781 }
1782 }
1783 }
1784 worktree.update(cx, |worktree, cx| {
1785 worktree
1786 .as_local_mut()
1787 .ok_or_else(|| anyhow!("not a local worktree"))?
1788 .update_diagnostics(project_path.path.clone(), diagnostics, cx)
1789 })?;
1790 cx.emit(Event::DiagnosticsUpdated(project_path));
1791 Ok(())
1792 }
1793
1794 fn update_buffer_diagnostics(
1795 &mut self,
1796 buffer: &ModelHandle<Buffer>,
1797 mut diagnostics: Vec<DiagnosticEntry<PointUtf16>>,
1798 version: Option<i32>,
1799 cx: &mut ModelContext<Self>,
1800 ) -> Result<()> {
1801 fn compare_diagnostics(a: &Diagnostic, b: &Diagnostic) -> Ordering {
1802 Ordering::Equal
1803 .then_with(|| b.is_primary.cmp(&a.is_primary))
1804 .then_with(|| a.is_disk_based.cmp(&b.is_disk_based))
1805 .then_with(|| a.severity.cmp(&b.severity))
1806 .then_with(|| a.message.cmp(&b.message))
1807 }
1808
1809 let snapshot = self.buffer_snapshot_for_lsp_version(buffer, version, cx)?;
1810
1811 diagnostics.sort_unstable_by(|a, b| {
1812 Ordering::Equal
1813 .then_with(|| a.range.start.cmp(&b.range.start))
1814 .then_with(|| b.range.end.cmp(&a.range.end))
1815 .then_with(|| compare_diagnostics(&a.diagnostic, &b.diagnostic))
1816 });
1817
1818 let mut sanitized_diagnostics = Vec::new();
1819 let mut edits_since_save = snapshot
1820 .edits_since::<PointUtf16>(buffer.read(cx).saved_version())
1821 .peekable();
1822 let mut last_edit_old_end = PointUtf16::zero();
1823 let mut last_edit_new_end = PointUtf16::zero();
1824 'outer: for entry in diagnostics {
1825 let mut start = entry.range.start;
1826 let mut end = entry.range.end;
1827
1828 // Some diagnostics are based on files on disk instead of buffers'
1829 // current contents. Adjust these diagnostics' ranges to reflect
1830 // any unsaved edits.
1831 if entry.diagnostic.is_disk_based {
1832 while let Some(edit) = edits_since_save.peek() {
1833 if edit.old.end <= start {
1834 last_edit_old_end = edit.old.end;
1835 last_edit_new_end = edit.new.end;
1836 edits_since_save.next();
1837 } else if edit.old.start <= end && edit.old.end >= start {
1838 continue 'outer;
1839 } else {
1840 break;
1841 }
1842 }
1843
1844 let start_overshoot = start - last_edit_old_end;
1845 start = last_edit_new_end;
1846 start += start_overshoot;
1847
1848 let end_overshoot = end - last_edit_old_end;
1849 end = last_edit_new_end;
1850 end += end_overshoot;
1851 }
1852
1853 let mut range = snapshot.clip_point_utf16(start, Bias::Left)
1854 ..snapshot.clip_point_utf16(end, Bias::Right);
1855
1856 // Expand empty ranges by one character
1857 if range.start == range.end {
1858 range.end.column += 1;
1859 range.end = snapshot.clip_point_utf16(range.end, Bias::Right);
1860 if range.start == range.end && range.end.column > 0 {
1861 range.start.column -= 1;
1862 range.start = snapshot.clip_point_utf16(range.start, Bias::Left);
1863 }
1864 }
1865
1866 sanitized_diagnostics.push(DiagnosticEntry {
1867 range,
1868 diagnostic: entry.diagnostic,
1869 });
1870 }
1871 drop(edits_since_save);
1872
1873 let set = DiagnosticSet::new(sanitized_diagnostics, &snapshot);
1874 buffer.update(cx, |buffer, cx| buffer.update_diagnostics(set, cx));
1875 Ok(())
1876 }
1877
1878 pub fn format(
1879 &self,
1880 buffers: HashSet<ModelHandle<Buffer>>,
1881 push_to_history: bool,
1882 cx: &mut ModelContext<Project>,
1883 ) -> Task<Result<ProjectTransaction>> {
1884 let mut local_buffers = Vec::new();
1885 let mut remote_buffers = None;
1886 for buffer_handle in buffers {
1887 let buffer = buffer_handle.read(cx);
1888 if let Some(file) = File::from_dyn(buffer.file()) {
1889 if let Some(buffer_abs_path) = file.as_local().map(|f| f.abs_path(cx)) {
1890 if let Some(server) = self.language_server_for_buffer(buffer, cx) {
1891 local_buffers.push((buffer_handle, buffer_abs_path, server.clone()));
1892 }
1893 } else {
1894 remote_buffers.get_or_insert(Vec::new()).push(buffer_handle);
1895 }
1896 } else {
1897 return Task::ready(Ok(Default::default()));
1898 }
1899 }
1900
1901 let remote_buffers = self.remote_id().zip(remote_buffers);
1902 let client = self.client.clone();
1903
1904 cx.spawn(|this, mut cx| async move {
1905 let mut project_transaction = ProjectTransaction::default();
1906
1907 if let Some((project_id, remote_buffers)) = remote_buffers {
1908 let response = client
1909 .request(proto::FormatBuffers {
1910 project_id,
1911 buffer_ids: remote_buffers
1912 .iter()
1913 .map(|buffer| buffer.read_with(&cx, |buffer, _| buffer.remote_id()))
1914 .collect(),
1915 })
1916 .await?
1917 .transaction
1918 .ok_or_else(|| anyhow!("missing transaction"))?;
1919 project_transaction = this
1920 .update(&mut cx, |this, cx| {
1921 this.deserialize_project_transaction(response, push_to_history, cx)
1922 })
1923 .await?;
1924 }
1925
1926 for (buffer, buffer_abs_path, language_server) in local_buffers {
1927 let text_document = lsp::TextDocumentIdentifier::new(
1928 lsp::Url::from_file_path(&buffer_abs_path).unwrap(),
1929 );
1930 let capabilities = &language_server.capabilities();
1931 let lsp_edits = if capabilities
1932 .document_formatting_provider
1933 .as_ref()
1934 .map_or(false, |provider| *provider != lsp::OneOf::Left(false))
1935 {
1936 language_server
1937 .request::<lsp::request::Formatting>(lsp::DocumentFormattingParams {
1938 text_document,
1939 options: Default::default(),
1940 work_done_progress_params: Default::default(),
1941 })
1942 .await?
1943 } else if capabilities
1944 .document_range_formatting_provider
1945 .as_ref()
1946 .map_or(false, |provider| *provider != lsp::OneOf::Left(false))
1947 {
1948 let buffer_start = lsp::Position::new(0, 0);
1949 let buffer_end = buffer
1950 .read_with(&cx, |buffer, _| buffer.max_point_utf16())
1951 .to_lsp_position();
1952 language_server
1953 .request::<lsp::request::RangeFormatting>(
1954 lsp::DocumentRangeFormattingParams {
1955 text_document,
1956 range: lsp::Range::new(buffer_start, buffer_end),
1957 options: Default::default(),
1958 work_done_progress_params: Default::default(),
1959 },
1960 )
1961 .await?
1962 } else {
1963 continue;
1964 };
1965
1966 if let Some(lsp_edits) = lsp_edits {
1967 let edits = this
1968 .update(&mut cx, |this, cx| {
1969 this.edits_from_lsp(&buffer, lsp_edits, None, cx)
1970 })
1971 .await?;
1972 buffer.update(&mut cx, |buffer, cx| {
1973 buffer.finalize_last_transaction();
1974 buffer.start_transaction();
1975 for (range, text) in edits {
1976 buffer.edit([range], text, cx);
1977 }
1978 if buffer.end_transaction(cx).is_some() {
1979 let transaction = buffer.finalize_last_transaction().unwrap().clone();
1980 if !push_to_history {
1981 buffer.forget_transaction(transaction.id);
1982 }
1983 project_transaction.0.insert(cx.handle(), transaction);
1984 }
1985 });
1986 }
1987 }
1988
1989 Ok(project_transaction)
1990 })
1991 }
1992
1993 pub fn definition<T: ToPointUtf16>(
1994 &self,
1995 buffer: &ModelHandle<Buffer>,
1996 position: T,
1997 cx: &mut ModelContext<Self>,
1998 ) -> Task<Result<Vec<Location>>> {
1999 let position = position.to_point_utf16(buffer.read(cx));
2000 self.request_lsp(buffer.clone(), GetDefinition { position }, cx)
2001 }
2002
2003 pub fn references<T: ToPointUtf16>(
2004 &self,
2005 buffer: &ModelHandle<Buffer>,
2006 position: T,
2007 cx: &mut ModelContext<Self>,
2008 ) -> Task<Result<Vec<Location>>> {
2009 let position = position.to_point_utf16(buffer.read(cx));
2010 self.request_lsp(buffer.clone(), GetReferences { position }, cx)
2011 }
2012
2013 pub fn document_highlights<T: ToPointUtf16>(
2014 &self,
2015 buffer: &ModelHandle<Buffer>,
2016 position: T,
2017 cx: &mut ModelContext<Self>,
2018 ) -> Task<Result<Vec<DocumentHighlight>>> {
2019 let position = position.to_point_utf16(buffer.read(cx));
2020
2021 self.request_lsp(buffer.clone(), GetDocumentHighlights { position }, cx)
2022 }
2023
2024 pub fn symbols(&self, query: &str, cx: &mut ModelContext<Self>) -> Task<Result<Vec<Symbol>>> {
2025 if self.is_local() {
2026 let mut language_servers = HashMap::default();
2027 for ((worktree_id, language_name), language_server) in self.language_servers.iter() {
2028 if let Some((worktree, language)) = self
2029 .worktree_for_id(*worktree_id, cx)
2030 .and_then(|worktree| worktree.read(cx).as_local())
2031 .zip(self.languages.get_language(language_name))
2032 {
2033 language_servers
2034 .entry(Arc::as_ptr(language_server))
2035 .or_insert((
2036 language_server.clone(),
2037 *worktree_id,
2038 worktree.abs_path().clone(),
2039 language.clone(),
2040 ));
2041 }
2042 }
2043
2044 let mut requests = Vec::new();
2045 for (language_server, _, _, _) in language_servers.values() {
2046 requests.push(language_server.request::<lsp::request::WorkspaceSymbol>(
2047 lsp::WorkspaceSymbolParams {
2048 query: query.to_string(),
2049 ..Default::default()
2050 },
2051 ));
2052 }
2053
2054 cx.spawn_weak(|this, cx| async move {
2055 let responses = futures::future::try_join_all(requests).await?;
2056
2057 let mut symbols = Vec::new();
2058 if let Some(this) = this.upgrade(&cx) {
2059 this.read_with(&cx, |this, cx| {
2060 for ((_, source_worktree_id, worktree_abs_path, language), lsp_symbols) in
2061 language_servers.into_values().zip(responses)
2062 {
2063 symbols.extend(lsp_symbols.into_iter().flatten().filter_map(
2064 |lsp_symbol| {
2065 let abs_path = lsp_symbol.location.uri.to_file_path().ok()?;
2066 let mut worktree_id = source_worktree_id;
2067 let path;
2068 if let Some((worktree, rel_path)) =
2069 this.find_local_worktree(&abs_path, cx)
2070 {
2071 worktree_id = worktree.read(cx).id();
2072 path = rel_path;
2073 } else {
2074 path = relativize_path(&worktree_abs_path, &abs_path);
2075 }
2076
2077 let label = language
2078 .label_for_symbol(&lsp_symbol.name, lsp_symbol.kind)
2079 .unwrap_or_else(|| {
2080 CodeLabel::plain(lsp_symbol.name.clone(), None)
2081 });
2082 let signature = this.symbol_signature(worktree_id, &path);
2083
2084 Some(Symbol {
2085 source_worktree_id,
2086 worktree_id,
2087 language_name: language.name().to_string(),
2088 name: lsp_symbol.name,
2089 kind: lsp_symbol.kind,
2090 label,
2091 path,
2092 range: range_from_lsp(lsp_symbol.location.range),
2093 signature,
2094 })
2095 },
2096 ));
2097 }
2098 })
2099 }
2100
2101 Ok(symbols)
2102 })
2103 } else if let Some(project_id) = self.remote_id() {
2104 let request = self.client.request(proto::GetProjectSymbols {
2105 project_id,
2106 query: query.to_string(),
2107 });
2108 cx.spawn_weak(|this, cx| async move {
2109 let response = request.await?;
2110 let mut symbols = Vec::new();
2111 if let Some(this) = this.upgrade(&cx) {
2112 this.read_with(&cx, |this, _| {
2113 symbols.extend(
2114 response
2115 .symbols
2116 .into_iter()
2117 .filter_map(|symbol| this.deserialize_symbol(symbol).log_err()),
2118 );
2119 })
2120 }
2121 Ok(symbols)
2122 })
2123 } else {
2124 Task::ready(Ok(Default::default()))
2125 }
2126 }
2127
2128 pub fn open_buffer_for_symbol(
2129 &mut self,
2130 symbol: &Symbol,
2131 cx: &mut ModelContext<Self>,
2132 ) -> Task<Result<ModelHandle<Buffer>>> {
2133 if self.is_local() {
2134 let language_server = if let Some(server) = self.language_servers.get(&(
2135 symbol.source_worktree_id,
2136 Arc::from(symbol.language_name.as_str()),
2137 )) {
2138 server.clone()
2139 } else {
2140 return Task::ready(Err(anyhow!(
2141 "language server for worktree and language not found"
2142 )));
2143 };
2144
2145 let worktree_abs_path = if let Some(worktree_abs_path) = self
2146 .worktree_for_id(symbol.worktree_id, cx)
2147 .and_then(|worktree| worktree.read(cx).as_local())
2148 .map(|local_worktree| local_worktree.abs_path())
2149 {
2150 worktree_abs_path
2151 } else {
2152 return Task::ready(Err(anyhow!("worktree not found for symbol")));
2153 };
2154 let symbol_abs_path = worktree_abs_path.join(&symbol.path);
2155 let symbol_uri = if let Ok(uri) = lsp::Url::from_file_path(symbol_abs_path) {
2156 uri
2157 } else {
2158 return Task::ready(Err(anyhow!("invalid symbol path")));
2159 };
2160
2161 self.open_local_buffer_via_lsp(
2162 symbol_uri,
2163 Arc::from(symbol.language_name.as_str()),
2164 language_server,
2165 cx,
2166 )
2167 } else if let Some(project_id) = self.remote_id() {
2168 let request = self.client.request(proto::OpenBufferForSymbol {
2169 project_id,
2170 symbol: Some(serialize_symbol(symbol)),
2171 });
2172 cx.spawn(|this, mut cx| async move {
2173 let response = request.await?;
2174 let buffer = response.buffer.ok_or_else(|| anyhow!("invalid buffer"))?;
2175 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
2176 .await
2177 })
2178 } else {
2179 Task::ready(Err(anyhow!("project does not have a remote id")))
2180 }
2181 }
2182
2183 pub fn completions<T: ToPointUtf16>(
2184 &self,
2185 source_buffer_handle: &ModelHandle<Buffer>,
2186 position: T,
2187 cx: &mut ModelContext<Self>,
2188 ) -> Task<Result<Vec<Completion>>> {
2189 let source_buffer_handle = source_buffer_handle.clone();
2190 let source_buffer = source_buffer_handle.read(cx);
2191 let buffer_id = source_buffer.remote_id();
2192 let language = source_buffer.language().cloned();
2193 let worktree;
2194 let buffer_abs_path;
2195 if let Some(file) = File::from_dyn(source_buffer.file()) {
2196 worktree = file.worktree.clone();
2197 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
2198 } else {
2199 return Task::ready(Ok(Default::default()));
2200 };
2201
2202 let position = position.to_point_utf16(source_buffer);
2203 let anchor = source_buffer.anchor_after(position);
2204
2205 if worktree.read(cx).as_local().is_some() {
2206 let buffer_abs_path = buffer_abs_path.unwrap();
2207 let lang_server =
2208 if let Some(server) = self.language_server_for_buffer(source_buffer, cx) {
2209 server.clone()
2210 } else {
2211 return Task::ready(Ok(Default::default()));
2212 };
2213
2214 cx.spawn(|_, cx| async move {
2215 let completions = lang_server
2216 .request::<lsp::request::Completion>(lsp::CompletionParams {
2217 text_document_position: lsp::TextDocumentPositionParams::new(
2218 lsp::TextDocumentIdentifier::new(
2219 lsp::Url::from_file_path(buffer_abs_path).unwrap(),
2220 ),
2221 position.to_lsp_position(),
2222 ),
2223 context: Default::default(),
2224 work_done_progress_params: Default::default(),
2225 partial_result_params: Default::default(),
2226 })
2227 .await
2228 .context("lsp completion request failed")?;
2229
2230 let completions = if let Some(completions) = completions {
2231 match completions {
2232 lsp::CompletionResponse::Array(completions) => completions,
2233 lsp::CompletionResponse::List(list) => list.items,
2234 }
2235 } else {
2236 Default::default()
2237 };
2238
2239 source_buffer_handle.read_with(&cx, |this, _| {
2240 Ok(completions
2241 .into_iter()
2242 .filter_map(|lsp_completion| {
2243 let (old_range, new_text) = match lsp_completion.text_edit.as_ref()? {
2244 lsp::CompletionTextEdit::Edit(edit) => {
2245 (range_from_lsp(edit.range), edit.new_text.clone())
2246 }
2247 lsp::CompletionTextEdit::InsertAndReplace(_) => {
2248 log::info!("unsupported insert/replace completion");
2249 return None;
2250 }
2251 };
2252
2253 let clipped_start = this.clip_point_utf16(old_range.start, Bias::Left);
2254 let clipped_end = this.clip_point_utf16(old_range.end, Bias::Left);
2255 if clipped_start == old_range.start && clipped_end == old_range.end {
2256 Some(Completion {
2257 old_range: this.anchor_before(old_range.start)
2258 ..this.anchor_after(old_range.end),
2259 new_text,
2260 label: language
2261 .as_ref()
2262 .and_then(|l| l.label_for_completion(&lsp_completion))
2263 .unwrap_or_else(|| {
2264 CodeLabel::plain(
2265 lsp_completion.label.clone(),
2266 lsp_completion.filter_text.as_deref(),
2267 )
2268 }),
2269 lsp_completion,
2270 })
2271 } else {
2272 None
2273 }
2274 })
2275 .collect())
2276 })
2277 })
2278 } else if let Some(project_id) = self.remote_id() {
2279 let rpc = self.client.clone();
2280 let message = proto::GetCompletions {
2281 project_id,
2282 buffer_id,
2283 position: Some(language::proto::serialize_anchor(&anchor)),
2284 version: serialize_version(&source_buffer.version()),
2285 };
2286 cx.spawn_weak(|_, mut cx| async move {
2287 let response = rpc.request(message).await?;
2288
2289 source_buffer_handle
2290 .update(&mut cx, |buffer, _| {
2291 buffer.wait_for_version(deserialize_version(response.version))
2292 })
2293 .await;
2294
2295 response
2296 .completions
2297 .into_iter()
2298 .map(|completion| {
2299 language::proto::deserialize_completion(completion, language.as_ref())
2300 })
2301 .collect()
2302 })
2303 } else {
2304 Task::ready(Ok(Default::default()))
2305 }
2306 }
2307
2308 pub fn apply_additional_edits_for_completion(
2309 &self,
2310 buffer_handle: ModelHandle<Buffer>,
2311 completion: Completion,
2312 push_to_history: bool,
2313 cx: &mut ModelContext<Self>,
2314 ) -> Task<Result<Option<Transaction>>> {
2315 let buffer = buffer_handle.read(cx);
2316 let buffer_id = buffer.remote_id();
2317
2318 if self.is_local() {
2319 let lang_server = if let Some(server) = self.language_server_for_buffer(buffer, cx) {
2320 server.clone()
2321 } else {
2322 return Task::ready(Ok(Default::default()));
2323 };
2324
2325 cx.spawn(|this, mut cx| async move {
2326 let resolved_completion = lang_server
2327 .request::<lsp::request::ResolveCompletionItem>(completion.lsp_completion)
2328 .await?;
2329 if let Some(edits) = resolved_completion.additional_text_edits {
2330 let edits = this
2331 .update(&mut cx, |this, cx| {
2332 this.edits_from_lsp(&buffer_handle, edits, None, cx)
2333 })
2334 .await?;
2335 buffer_handle.update(&mut cx, |buffer, cx| {
2336 buffer.finalize_last_transaction();
2337 buffer.start_transaction();
2338 for (range, text) in edits {
2339 buffer.edit([range], text, cx);
2340 }
2341 let transaction = if buffer.end_transaction(cx).is_some() {
2342 let transaction = buffer.finalize_last_transaction().unwrap().clone();
2343 if !push_to_history {
2344 buffer.forget_transaction(transaction.id);
2345 }
2346 Some(transaction)
2347 } else {
2348 None
2349 };
2350 Ok(transaction)
2351 })
2352 } else {
2353 Ok(None)
2354 }
2355 })
2356 } else if let Some(project_id) = self.remote_id() {
2357 let client = self.client.clone();
2358 cx.spawn(|_, mut cx| async move {
2359 let response = client
2360 .request(proto::ApplyCompletionAdditionalEdits {
2361 project_id,
2362 buffer_id,
2363 completion: Some(language::proto::serialize_completion(&completion)),
2364 })
2365 .await?;
2366
2367 if let Some(transaction) = response.transaction {
2368 let transaction = language::proto::deserialize_transaction(transaction)?;
2369 buffer_handle
2370 .update(&mut cx, |buffer, _| {
2371 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
2372 })
2373 .await;
2374 if push_to_history {
2375 buffer_handle.update(&mut cx, |buffer, _| {
2376 buffer.push_transaction(transaction.clone(), Instant::now());
2377 });
2378 }
2379 Ok(Some(transaction))
2380 } else {
2381 Ok(None)
2382 }
2383 })
2384 } else {
2385 Task::ready(Err(anyhow!("project does not have a remote id")))
2386 }
2387 }
2388
2389 pub fn code_actions<T: ToOffset>(
2390 &self,
2391 buffer_handle: &ModelHandle<Buffer>,
2392 range: Range<T>,
2393 cx: &mut ModelContext<Self>,
2394 ) -> Task<Result<Vec<CodeAction>>> {
2395 let buffer_handle = buffer_handle.clone();
2396 let buffer = buffer_handle.read(cx);
2397 let buffer_id = buffer.remote_id();
2398 let worktree;
2399 let buffer_abs_path;
2400 if let Some(file) = File::from_dyn(buffer.file()) {
2401 worktree = file.worktree.clone();
2402 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
2403 } else {
2404 return Task::ready(Ok(Default::default()));
2405 };
2406 let range = buffer.anchor_before(range.start)..buffer.anchor_before(range.end);
2407
2408 if worktree.read(cx).as_local().is_some() {
2409 let buffer_abs_path = buffer_abs_path.unwrap();
2410 let lang_server = if let Some(server) = self.language_server_for_buffer(buffer, cx) {
2411 server.clone()
2412 } else {
2413 return Task::ready(Ok(Default::default()));
2414 };
2415
2416 let lsp_range = lsp::Range::new(
2417 range.start.to_point_utf16(buffer).to_lsp_position(),
2418 range.end.to_point_utf16(buffer).to_lsp_position(),
2419 );
2420 cx.foreground().spawn(async move {
2421 if !lang_server.capabilities().code_action_provider.is_some() {
2422 return Ok(Default::default());
2423 }
2424
2425 Ok(lang_server
2426 .request::<lsp::request::CodeActionRequest>(lsp::CodeActionParams {
2427 text_document: lsp::TextDocumentIdentifier::new(
2428 lsp::Url::from_file_path(buffer_abs_path).unwrap(),
2429 ),
2430 range: lsp_range,
2431 work_done_progress_params: Default::default(),
2432 partial_result_params: Default::default(),
2433 context: lsp::CodeActionContext {
2434 diagnostics: Default::default(),
2435 only: Some(vec![
2436 lsp::CodeActionKind::QUICKFIX,
2437 lsp::CodeActionKind::REFACTOR,
2438 lsp::CodeActionKind::REFACTOR_EXTRACT,
2439 ]),
2440 },
2441 })
2442 .await?
2443 .unwrap_or_default()
2444 .into_iter()
2445 .filter_map(|entry| {
2446 if let lsp::CodeActionOrCommand::CodeAction(lsp_action) = entry {
2447 Some(CodeAction {
2448 range: range.clone(),
2449 lsp_action,
2450 })
2451 } else {
2452 None
2453 }
2454 })
2455 .collect())
2456 })
2457 } else if let Some(project_id) = self.remote_id() {
2458 let rpc = self.client.clone();
2459 let version = buffer.version();
2460 cx.spawn_weak(|_, mut cx| async move {
2461 let response = rpc
2462 .request(proto::GetCodeActions {
2463 project_id,
2464 buffer_id,
2465 start: Some(language::proto::serialize_anchor(&range.start)),
2466 end: Some(language::proto::serialize_anchor(&range.end)),
2467 version: serialize_version(&version),
2468 })
2469 .await?;
2470
2471 buffer_handle
2472 .update(&mut cx, |buffer, _| {
2473 buffer.wait_for_version(deserialize_version(response.version))
2474 })
2475 .await;
2476
2477 response
2478 .actions
2479 .into_iter()
2480 .map(language::proto::deserialize_code_action)
2481 .collect()
2482 })
2483 } else {
2484 Task::ready(Ok(Default::default()))
2485 }
2486 }
2487
2488 pub fn apply_code_action(
2489 &self,
2490 buffer_handle: ModelHandle<Buffer>,
2491 mut action: CodeAction,
2492 push_to_history: bool,
2493 cx: &mut ModelContext<Self>,
2494 ) -> Task<Result<ProjectTransaction>> {
2495 if self.is_local() {
2496 let buffer = buffer_handle.read(cx);
2497 let lang_name = if let Some(lang) = buffer.language() {
2498 lang.name()
2499 } else {
2500 return Task::ready(Ok(Default::default()));
2501 };
2502 let lang_server = if let Some(server) = self.language_server_for_buffer(buffer, cx) {
2503 server.clone()
2504 } else {
2505 return Task::ready(Ok(Default::default()));
2506 };
2507 let range = action.range.to_point_utf16(buffer);
2508
2509 cx.spawn(|this, mut cx| async move {
2510 if let Some(lsp_range) = action
2511 .lsp_action
2512 .data
2513 .as_mut()
2514 .and_then(|d| d.get_mut("codeActionParams"))
2515 .and_then(|d| d.get_mut("range"))
2516 {
2517 *lsp_range = serde_json::to_value(&lsp::Range::new(
2518 range.start.to_lsp_position(),
2519 range.end.to_lsp_position(),
2520 ))
2521 .unwrap();
2522 action.lsp_action = lang_server
2523 .request::<lsp::request::CodeActionResolveRequest>(action.lsp_action)
2524 .await?;
2525 } else {
2526 let actions = this
2527 .update(&mut cx, |this, cx| {
2528 this.code_actions(&buffer_handle, action.range, cx)
2529 })
2530 .await?;
2531 action.lsp_action = actions
2532 .into_iter()
2533 .find(|a| a.lsp_action.title == action.lsp_action.title)
2534 .ok_or_else(|| anyhow!("code action is outdated"))?
2535 .lsp_action;
2536 }
2537
2538 if let Some(edit) = action.lsp_action.edit {
2539 Self::deserialize_workspace_edit(
2540 this,
2541 edit,
2542 push_to_history,
2543 lang_name,
2544 lang_server,
2545 &mut cx,
2546 )
2547 .await
2548 } else {
2549 Ok(ProjectTransaction::default())
2550 }
2551 })
2552 } else if let Some(project_id) = self.remote_id() {
2553 let client = self.client.clone();
2554 let request = proto::ApplyCodeAction {
2555 project_id,
2556 buffer_id: buffer_handle.read(cx).remote_id(),
2557 action: Some(language::proto::serialize_code_action(&action)),
2558 };
2559 cx.spawn(|this, mut cx| async move {
2560 let response = client
2561 .request(request)
2562 .await?
2563 .transaction
2564 .ok_or_else(|| anyhow!("missing transaction"))?;
2565 this.update(&mut cx, |this, cx| {
2566 this.deserialize_project_transaction(response, push_to_history, cx)
2567 })
2568 .await
2569 })
2570 } else {
2571 Task::ready(Err(anyhow!("project does not have a remote id")))
2572 }
2573 }
2574
2575 async fn deserialize_workspace_edit(
2576 this: ModelHandle<Self>,
2577 edit: lsp::WorkspaceEdit,
2578 push_to_history: bool,
2579 language_name: Arc<str>,
2580 language_server: Arc<LanguageServer>,
2581 cx: &mut AsyncAppContext,
2582 ) -> Result<ProjectTransaction> {
2583 let fs = this.read_with(cx, |this, _| this.fs.clone());
2584 let mut operations = Vec::new();
2585 if let Some(document_changes) = edit.document_changes {
2586 match document_changes {
2587 lsp::DocumentChanges::Edits(edits) => {
2588 operations.extend(edits.into_iter().map(lsp::DocumentChangeOperation::Edit))
2589 }
2590 lsp::DocumentChanges::Operations(ops) => operations = ops,
2591 }
2592 } else if let Some(changes) = edit.changes {
2593 operations.extend(changes.into_iter().map(|(uri, edits)| {
2594 lsp::DocumentChangeOperation::Edit(lsp::TextDocumentEdit {
2595 text_document: lsp::OptionalVersionedTextDocumentIdentifier {
2596 uri,
2597 version: None,
2598 },
2599 edits: edits.into_iter().map(lsp::OneOf::Left).collect(),
2600 })
2601 }));
2602 }
2603
2604 let mut project_transaction = ProjectTransaction::default();
2605 for operation in operations {
2606 match operation {
2607 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Create(op)) => {
2608 let abs_path = op
2609 .uri
2610 .to_file_path()
2611 .map_err(|_| anyhow!("can't convert URI to path"))?;
2612
2613 if let Some(parent_path) = abs_path.parent() {
2614 fs.create_dir(parent_path).await?;
2615 }
2616 if abs_path.ends_with("/") {
2617 fs.create_dir(&abs_path).await?;
2618 } else {
2619 fs.create_file(&abs_path, op.options.map(Into::into).unwrap_or_default())
2620 .await?;
2621 }
2622 }
2623 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Rename(op)) => {
2624 let source_abs_path = op
2625 .old_uri
2626 .to_file_path()
2627 .map_err(|_| anyhow!("can't convert URI to path"))?;
2628 let target_abs_path = op
2629 .new_uri
2630 .to_file_path()
2631 .map_err(|_| anyhow!("can't convert URI to path"))?;
2632 fs.rename(
2633 &source_abs_path,
2634 &target_abs_path,
2635 op.options.map(Into::into).unwrap_or_default(),
2636 )
2637 .await?;
2638 }
2639 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Delete(op)) => {
2640 let abs_path = op
2641 .uri
2642 .to_file_path()
2643 .map_err(|_| anyhow!("can't convert URI to path"))?;
2644 let options = op.options.map(Into::into).unwrap_or_default();
2645 if abs_path.ends_with("/") {
2646 fs.remove_dir(&abs_path, options).await?;
2647 } else {
2648 fs.remove_file(&abs_path, options).await?;
2649 }
2650 }
2651 lsp::DocumentChangeOperation::Edit(op) => {
2652 let buffer_to_edit = this
2653 .update(cx, |this, cx| {
2654 this.open_local_buffer_via_lsp(
2655 op.text_document.uri,
2656 language_name.clone(),
2657 language_server.clone(),
2658 cx,
2659 )
2660 })
2661 .await?;
2662
2663 let edits = this
2664 .update(cx, |this, cx| {
2665 let edits = op.edits.into_iter().map(|edit| match edit {
2666 lsp::OneOf::Left(edit) => edit,
2667 lsp::OneOf::Right(edit) => edit.text_edit,
2668 });
2669 this.edits_from_lsp(
2670 &buffer_to_edit,
2671 edits,
2672 op.text_document.version,
2673 cx,
2674 )
2675 })
2676 .await?;
2677
2678 let transaction = buffer_to_edit.update(cx, |buffer, cx| {
2679 buffer.finalize_last_transaction();
2680 buffer.start_transaction();
2681 for (range, text) in edits {
2682 buffer.edit([range], text, cx);
2683 }
2684 let transaction = if buffer.end_transaction(cx).is_some() {
2685 let transaction = buffer.finalize_last_transaction().unwrap().clone();
2686 if !push_to_history {
2687 buffer.forget_transaction(transaction.id);
2688 }
2689 Some(transaction)
2690 } else {
2691 None
2692 };
2693
2694 transaction
2695 });
2696 if let Some(transaction) = transaction {
2697 project_transaction.0.insert(buffer_to_edit, transaction);
2698 }
2699 }
2700 }
2701 }
2702
2703 Ok(project_transaction)
2704 }
2705
2706 pub fn prepare_rename<T: ToPointUtf16>(
2707 &self,
2708 buffer: ModelHandle<Buffer>,
2709 position: T,
2710 cx: &mut ModelContext<Self>,
2711 ) -> Task<Result<Option<Range<Anchor>>>> {
2712 let position = position.to_point_utf16(buffer.read(cx));
2713 self.request_lsp(buffer, PrepareRename { position }, cx)
2714 }
2715
2716 pub fn perform_rename<T: ToPointUtf16>(
2717 &self,
2718 buffer: ModelHandle<Buffer>,
2719 position: T,
2720 new_name: String,
2721 push_to_history: bool,
2722 cx: &mut ModelContext<Self>,
2723 ) -> Task<Result<ProjectTransaction>> {
2724 let position = position.to_point_utf16(buffer.read(cx));
2725 self.request_lsp(
2726 buffer,
2727 PerformRename {
2728 position,
2729 new_name,
2730 push_to_history,
2731 },
2732 cx,
2733 )
2734 }
2735
2736 pub fn search(
2737 &self,
2738 query: SearchQuery,
2739 cx: &mut ModelContext<Self>,
2740 ) -> Task<Result<HashMap<ModelHandle<Buffer>, Vec<Range<Anchor>>>>> {
2741 if self.is_local() {
2742 let snapshots = self
2743 .visible_worktrees(cx)
2744 .filter_map(|tree| {
2745 let tree = tree.read(cx).as_local()?;
2746 Some(tree.snapshot())
2747 })
2748 .collect::<Vec<_>>();
2749
2750 let background = cx.background().clone();
2751 let path_count: usize = snapshots.iter().map(|s| s.visible_file_count()).sum();
2752 if path_count == 0 {
2753 return Task::ready(Ok(Default::default()));
2754 }
2755 let workers = background.num_cpus().min(path_count);
2756 let (matching_paths_tx, mut matching_paths_rx) = smol::channel::bounded(1024);
2757 cx.background()
2758 .spawn({
2759 let fs = self.fs.clone();
2760 let background = cx.background().clone();
2761 let query = query.clone();
2762 async move {
2763 let fs = &fs;
2764 let query = &query;
2765 let matching_paths_tx = &matching_paths_tx;
2766 let paths_per_worker = (path_count + workers - 1) / workers;
2767 let snapshots = &snapshots;
2768 background
2769 .scoped(|scope| {
2770 for worker_ix in 0..workers {
2771 let worker_start_ix = worker_ix * paths_per_worker;
2772 let worker_end_ix = worker_start_ix + paths_per_worker;
2773 scope.spawn(async move {
2774 let mut snapshot_start_ix = 0;
2775 let mut abs_path = PathBuf::new();
2776 for snapshot in snapshots {
2777 let snapshot_end_ix =
2778 snapshot_start_ix + snapshot.visible_file_count();
2779 if worker_end_ix <= snapshot_start_ix {
2780 break;
2781 } else if worker_start_ix > snapshot_end_ix {
2782 snapshot_start_ix = snapshot_end_ix;
2783 continue;
2784 } else {
2785 let start_in_snapshot = worker_start_ix
2786 .saturating_sub(snapshot_start_ix);
2787 let end_in_snapshot =
2788 cmp::min(worker_end_ix, snapshot_end_ix)
2789 - snapshot_start_ix;
2790
2791 for entry in snapshot
2792 .files(false, start_in_snapshot)
2793 .take(end_in_snapshot - start_in_snapshot)
2794 {
2795 if matching_paths_tx.is_closed() {
2796 break;
2797 }
2798
2799 abs_path.clear();
2800 abs_path.push(&snapshot.abs_path());
2801 abs_path.push(&entry.path);
2802 let matches = if let Some(file) =
2803 fs.open_sync(&abs_path).await.log_err()
2804 {
2805 query.detect(file).unwrap_or(false)
2806 } else {
2807 false
2808 };
2809
2810 if matches {
2811 let project_path =
2812 (snapshot.id(), entry.path.clone());
2813 if matching_paths_tx
2814 .send(project_path)
2815 .await
2816 .is_err()
2817 {
2818 break;
2819 }
2820 }
2821 }
2822
2823 snapshot_start_ix = snapshot_end_ix;
2824 }
2825 }
2826 });
2827 }
2828 })
2829 .await;
2830 }
2831 })
2832 .detach();
2833
2834 let (buffers_tx, buffers_rx) = smol::channel::bounded(1024);
2835 let open_buffers = self
2836 .opened_buffers
2837 .values()
2838 .filter_map(|b| b.upgrade(cx))
2839 .collect::<HashSet<_>>();
2840 cx.spawn(|this, cx| async move {
2841 for buffer in &open_buffers {
2842 let snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
2843 buffers_tx.send((buffer.clone(), snapshot)).await?;
2844 }
2845
2846 let open_buffers = Rc::new(RefCell::new(open_buffers));
2847 while let Some(project_path) = matching_paths_rx.next().await {
2848 if buffers_tx.is_closed() {
2849 break;
2850 }
2851
2852 let this = this.clone();
2853 let open_buffers = open_buffers.clone();
2854 let buffers_tx = buffers_tx.clone();
2855 cx.spawn(|mut cx| async move {
2856 if let Some(buffer) = this
2857 .update(&mut cx, |this, cx| this.open_buffer(project_path, cx))
2858 .await
2859 .log_err()
2860 {
2861 if open_buffers.borrow_mut().insert(buffer.clone()) {
2862 let snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
2863 buffers_tx.send((buffer, snapshot)).await?;
2864 }
2865 }
2866
2867 Ok::<_, anyhow::Error>(())
2868 })
2869 .detach();
2870 }
2871
2872 Ok::<_, anyhow::Error>(())
2873 })
2874 .detach_and_log_err(cx);
2875
2876 let background = cx.background().clone();
2877 cx.background().spawn(async move {
2878 let query = &query;
2879 let mut matched_buffers = Vec::new();
2880 for _ in 0..workers {
2881 matched_buffers.push(HashMap::default());
2882 }
2883 background
2884 .scoped(|scope| {
2885 for worker_matched_buffers in matched_buffers.iter_mut() {
2886 let mut buffers_rx = buffers_rx.clone();
2887 scope.spawn(async move {
2888 while let Some((buffer, snapshot)) = buffers_rx.next().await {
2889 let buffer_matches = query
2890 .search(snapshot.as_rope())
2891 .await
2892 .iter()
2893 .map(|range| {
2894 snapshot.anchor_before(range.start)
2895 ..snapshot.anchor_after(range.end)
2896 })
2897 .collect::<Vec<_>>();
2898 if !buffer_matches.is_empty() {
2899 worker_matched_buffers
2900 .insert(buffer.clone(), buffer_matches);
2901 }
2902 }
2903 });
2904 }
2905 })
2906 .await;
2907 Ok(matched_buffers.into_iter().flatten().collect())
2908 })
2909 } else if let Some(project_id) = self.remote_id() {
2910 let request = self.client.request(query.to_proto(project_id));
2911 cx.spawn(|this, mut cx| async move {
2912 let response = request.await?;
2913 let mut result = HashMap::default();
2914 for location in response.locations {
2915 let buffer = location.buffer.ok_or_else(|| anyhow!("missing buffer"))?;
2916 let target_buffer = this
2917 .update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
2918 .await?;
2919 let start = location
2920 .start
2921 .and_then(deserialize_anchor)
2922 .ok_or_else(|| anyhow!("missing target start"))?;
2923 let end = location
2924 .end
2925 .and_then(deserialize_anchor)
2926 .ok_or_else(|| anyhow!("missing target end"))?;
2927 result
2928 .entry(target_buffer)
2929 .or_insert(Vec::new())
2930 .push(start..end)
2931 }
2932 Ok(result)
2933 })
2934 } else {
2935 Task::ready(Ok(Default::default()))
2936 }
2937 }
2938
2939 fn request_lsp<R: LspCommand>(
2940 &self,
2941 buffer_handle: ModelHandle<Buffer>,
2942 request: R,
2943 cx: &mut ModelContext<Self>,
2944 ) -> Task<Result<R::Response>>
2945 where
2946 <R::LspRequest as lsp::request::Request>::Result: Send,
2947 {
2948 let buffer = buffer_handle.read(cx);
2949 if self.is_local() {
2950 let file = File::from_dyn(buffer.file()).and_then(File::as_local);
2951 if let Some((file, language_server)) =
2952 file.zip(self.language_server_for_buffer(buffer, cx).cloned())
2953 {
2954 let lsp_params = request.to_lsp(&file.abs_path(cx), cx);
2955 return cx.spawn(|this, cx| async move {
2956 if !request.check_capabilities(&language_server.capabilities()) {
2957 return Ok(Default::default());
2958 }
2959
2960 let response = language_server
2961 .request::<R::LspRequest>(lsp_params)
2962 .await
2963 .context("lsp request failed")?;
2964 request
2965 .response_from_lsp(response, this, buffer_handle, cx)
2966 .await
2967 });
2968 }
2969 } else if let Some(project_id) = self.remote_id() {
2970 let rpc = self.client.clone();
2971 let message = request.to_proto(project_id, buffer);
2972 return cx.spawn(|this, cx| async move {
2973 let response = rpc.request(message).await?;
2974 request
2975 .response_from_proto(response, this, buffer_handle, cx)
2976 .await
2977 });
2978 }
2979 Task::ready(Ok(Default::default()))
2980 }
2981
2982 pub fn find_or_create_local_worktree(
2983 &mut self,
2984 abs_path: impl AsRef<Path>,
2985 visible: bool,
2986 cx: &mut ModelContext<Self>,
2987 ) -> Task<Result<(ModelHandle<Worktree>, PathBuf)>> {
2988 let abs_path = abs_path.as_ref();
2989 if let Some((tree, relative_path)) = self.find_local_worktree(abs_path, cx) {
2990 Task::ready(Ok((tree.clone(), relative_path.into())))
2991 } else {
2992 let worktree = self.create_local_worktree(abs_path, visible, cx);
2993 cx.foreground()
2994 .spawn(async move { Ok((worktree.await?, PathBuf::new())) })
2995 }
2996 }
2997
2998 pub fn find_local_worktree(
2999 &self,
3000 abs_path: &Path,
3001 cx: &AppContext,
3002 ) -> Option<(ModelHandle<Worktree>, PathBuf)> {
3003 for tree in self.worktrees(cx) {
3004 if let Some(relative_path) = tree
3005 .read(cx)
3006 .as_local()
3007 .and_then(|t| abs_path.strip_prefix(t.abs_path()).ok())
3008 {
3009 return Some((tree.clone(), relative_path.into()));
3010 }
3011 }
3012 None
3013 }
3014
3015 pub fn is_shared(&self) -> bool {
3016 match &self.client_state {
3017 ProjectClientState::Local { is_shared, .. } => *is_shared,
3018 ProjectClientState::Remote { .. } => false,
3019 }
3020 }
3021
3022 fn create_local_worktree(
3023 &mut self,
3024 abs_path: impl AsRef<Path>,
3025 visible: bool,
3026 cx: &mut ModelContext<Self>,
3027 ) -> Task<Result<ModelHandle<Worktree>>> {
3028 let fs = self.fs.clone();
3029 let client = self.client.clone();
3030 let next_entry_id = self.next_entry_id.clone();
3031 let path: Arc<Path> = abs_path.as_ref().into();
3032 let task = self
3033 .loading_local_worktrees
3034 .entry(path.clone())
3035 .or_insert_with(|| {
3036 cx.spawn(|project, mut cx| {
3037 async move {
3038 let worktree = Worktree::local(
3039 client.clone(),
3040 path.clone(),
3041 visible,
3042 fs,
3043 next_entry_id,
3044 &mut cx,
3045 )
3046 .await;
3047 project.update(&mut cx, |project, _| {
3048 project.loading_local_worktrees.remove(&path);
3049 });
3050 let worktree = worktree?;
3051
3052 let (remote_project_id, is_shared) =
3053 project.update(&mut cx, |project, cx| {
3054 project.add_worktree(&worktree, cx);
3055 (project.remote_id(), project.is_shared())
3056 });
3057
3058 if let Some(project_id) = remote_project_id {
3059 if is_shared {
3060 worktree
3061 .update(&mut cx, |worktree, cx| {
3062 worktree.as_local_mut().unwrap().share(project_id, cx)
3063 })
3064 .await?;
3065 } else {
3066 worktree
3067 .update(&mut cx, |worktree, cx| {
3068 worktree.as_local_mut().unwrap().register(project_id, cx)
3069 })
3070 .await?;
3071 }
3072 }
3073
3074 Ok(worktree)
3075 }
3076 .map_err(|err| Arc::new(err))
3077 })
3078 .shared()
3079 })
3080 .clone();
3081 cx.foreground().spawn(async move {
3082 match task.await {
3083 Ok(worktree) => Ok(worktree),
3084 Err(err) => Err(anyhow!("{}", err)),
3085 }
3086 })
3087 }
3088
3089 pub fn remove_worktree(&mut self, id: WorktreeId, cx: &mut ModelContext<Self>) {
3090 self.worktrees.retain(|worktree| {
3091 worktree
3092 .upgrade(cx)
3093 .map_or(false, |w| w.read(cx).id() != id)
3094 });
3095 cx.notify();
3096 }
3097
3098 fn add_worktree(&mut self, worktree: &ModelHandle<Worktree>, cx: &mut ModelContext<Self>) {
3099 cx.observe(&worktree, |_, _, cx| cx.notify()).detach();
3100 if worktree.read(cx).is_local() {
3101 cx.subscribe(&worktree, |this, worktree, _, cx| {
3102 this.update_local_worktree_buffers(worktree, cx);
3103 })
3104 .detach();
3105 }
3106
3107 let push_strong_handle = {
3108 let worktree = worktree.read(cx);
3109 self.is_shared() || worktree.is_visible() || worktree.is_remote()
3110 };
3111 if push_strong_handle {
3112 self.worktrees
3113 .push(WorktreeHandle::Strong(worktree.clone()));
3114 } else {
3115 cx.observe_release(&worktree, |this, _, cx| {
3116 this.worktrees
3117 .retain(|worktree| worktree.upgrade(cx).is_some());
3118 cx.notify();
3119 })
3120 .detach();
3121 self.worktrees
3122 .push(WorktreeHandle::Weak(worktree.downgrade()));
3123 }
3124 cx.notify();
3125 }
3126
3127 fn update_local_worktree_buffers(
3128 &mut self,
3129 worktree_handle: ModelHandle<Worktree>,
3130 cx: &mut ModelContext<Self>,
3131 ) {
3132 let snapshot = worktree_handle.read(cx).snapshot();
3133 let mut buffers_to_delete = Vec::new();
3134 for (buffer_id, buffer) in &self.opened_buffers {
3135 if let Some(buffer) = buffer.upgrade(cx) {
3136 buffer.update(cx, |buffer, cx| {
3137 if let Some(old_file) = File::from_dyn(buffer.file()) {
3138 if old_file.worktree != worktree_handle {
3139 return;
3140 }
3141
3142 let new_file = if let Some(entry) = old_file
3143 .entry_id
3144 .and_then(|entry_id| snapshot.entry_for_id(entry_id))
3145 {
3146 File {
3147 is_local: true,
3148 entry_id: Some(entry.id),
3149 mtime: entry.mtime,
3150 path: entry.path.clone(),
3151 worktree: worktree_handle.clone(),
3152 }
3153 } else if let Some(entry) =
3154 snapshot.entry_for_path(old_file.path().as_ref())
3155 {
3156 File {
3157 is_local: true,
3158 entry_id: Some(entry.id),
3159 mtime: entry.mtime,
3160 path: entry.path.clone(),
3161 worktree: worktree_handle.clone(),
3162 }
3163 } else {
3164 File {
3165 is_local: true,
3166 entry_id: None,
3167 path: old_file.path().clone(),
3168 mtime: old_file.mtime(),
3169 worktree: worktree_handle.clone(),
3170 }
3171 };
3172
3173 if let Some(project_id) = self.remote_id() {
3174 self.client
3175 .send(proto::UpdateBufferFile {
3176 project_id,
3177 buffer_id: *buffer_id as u64,
3178 file: Some(new_file.to_proto()),
3179 })
3180 .log_err();
3181 }
3182 buffer.file_updated(Box::new(new_file), cx).detach();
3183 }
3184 });
3185 } else {
3186 buffers_to_delete.push(*buffer_id);
3187 }
3188 }
3189
3190 for buffer_id in buffers_to_delete {
3191 self.opened_buffers.remove(&buffer_id);
3192 }
3193 }
3194
3195 pub fn set_active_path(&mut self, entry: Option<ProjectPath>, cx: &mut ModelContext<Self>) {
3196 let new_active_entry = entry.and_then(|project_path| {
3197 let worktree = self.worktree_for_id(project_path.worktree_id, cx)?;
3198 let entry = worktree.read(cx).entry_for_path(project_path.path)?;
3199 Some(entry.id)
3200 });
3201 if new_active_entry != self.active_entry {
3202 self.active_entry = new_active_entry;
3203 cx.emit(Event::ActiveEntryChanged(new_active_entry));
3204 }
3205 }
3206
3207 pub fn is_running_disk_based_diagnostics(&self) -> bool {
3208 self.language_servers_with_diagnostics_running > 0
3209 }
3210
3211 pub fn diagnostic_summary(&self, cx: &AppContext) -> DiagnosticSummary {
3212 let mut summary = DiagnosticSummary::default();
3213 for (_, path_summary) in self.diagnostic_summaries(cx) {
3214 summary.error_count += path_summary.error_count;
3215 summary.warning_count += path_summary.warning_count;
3216 summary.info_count += path_summary.info_count;
3217 summary.hint_count += path_summary.hint_count;
3218 }
3219 summary
3220 }
3221
3222 pub fn diagnostic_summaries<'a>(
3223 &'a self,
3224 cx: &'a AppContext,
3225 ) -> impl Iterator<Item = (ProjectPath, DiagnosticSummary)> + 'a {
3226 self.worktrees(cx).flat_map(move |worktree| {
3227 let worktree = worktree.read(cx);
3228 let worktree_id = worktree.id();
3229 worktree
3230 .diagnostic_summaries()
3231 .map(move |(path, summary)| (ProjectPath { worktree_id, path }, summary))
3232 })
3233 }
3234
3235 pub fn disk_based_diagnostics_started(&mut self, cx: &mut ModelContext<Self>) {
3236 self.language_servers_with_diagnostics_running += 1;
3237 if self.language_servers_with_diagnostics_running == 1 {
3238 cx.emit(Event::DiskBasedDiagnosticsStarted);
3239 }
3240 }
3241
3242 pub fn disk_based_diagnostics_finished(&mut self, cx: &mut ModelContext<Self>) {
3243 cx.emit(Event::DiskBasedDiagnosticsUpdated);
3244 self.language_servers_with_diagnostics_running -= 1;
3245 if self.language_servers_with_diagnostics_running == 0 {
3246 cx.emit(Event::DiskBasedDiagnosticsFinished);
3247 }
3248 }
3249
3250 pub fn active_entry(&self) -> Option<ProjectEntryId> {
3251 self.active_entry
3252 }
3253
3254 pub fn entry_for_path(&self, path: &ProjectPath, cx: &AppContext) -> Option<ProjectEntryId> {
3255 self.worktree_for_id(path.worktree_id, cx)?
3256 .read(cx)
3257 .entry_for_path(&path.path)
3258 .map(|entry| entry.id)
3259 }
3260
3261 pub fn path_for_entry(&self, entry_id: ProjectEntryId, cx: &AppContext) -> Option<ProjectPath> {
3262 let worktree = self.worktree_for_entry(entry_id, cx)?;
3263 let worktree = worktree.read(cx);
3264 let worktree_id = worktree.id();
3265 let path = worktree.entry_for_id(entry_id)?.path.clone();
3266 Some(ProjectPath { worktree_id, path })
3267 }
3268
3269 // RPC message handlers
3270
3271 async fn handle_unshare_project(
3272 this: ModelHandle<Self>,
3273 _: TypedEnvelope<proto::UnshareProject>,
3274 _: Arc<Client>,
3275 mut cx: AsyncAppContext,
3276 ) -> Result<()> {
3277 this.update(&mut cx, |this, cx| this.project_unshared(cx));
3278 Ok(())
3279 }
3280
3281 async fn handle_add_collaborator(
3282 this: ModelHandle<Self>,
3283 mut envelope: TypedEnvelope<proto::AddProjectCollaborator>,
3284 _: Arc<Client>,
3285 mut cx: AsyncAppContext,
3286 ) -> Result<()> {
3287 let user_store = this.read_with(&cx, |this, _| this.user_store.clone());
3288 let collaborator = envelope
3289 .payload
3290 .collaborator
3291 .take()
3292 .ok_or_else(|| anyhow!("empty collaborator"))?;
3293
3294 let collaborator = Collaborator::from_proto(collaborator, &user_store, &mut cx).await?;
3295 this.update(&mut cx, |this, cx| {
3296 this.collaborators
3297 .insert(collaborator.peer_id, collaborator);
3298 cx.notify();
3299 });
3300
3301 Ok(())
3302 }
3303
3304 async fn handle_remove_collaborator(
3305 this: ModelHandle<Self>,
3306 envelope: TypedEnvelope<proto::RemoveProjectCollaborator>,
3307 _: Arc<Client>,
3308 mut cx: AsyncAppContext,
3309 ) -> Result<()> {
3310 this.update(&mut cx, |this, cx| {
3311 let peer_id = PeerId(envelope.payload.peer_id);
3312 let replica_id = this
3313 .collaborators
3314 .remove(&peer_id)
3315 .ok_or_else(|| anyhow!("unknown peer {:?}", peer_id))?
3316 .replica_id;
3317 for (_, buffer) in &this.opened_buffers {
3318 if let Some(buffer) = buffer.upgrade(cx) {
3319 buffer.update(cx, |buffer, cx| buffer.remove_peer(replica_id, cx));
3320 }
3321 }
3322 cx.notify();
3323 Ok(())
3324 })
3325 }
3326
3327 async fn handle_register_worktree(
3328 this: ModelHandle<Self>,
3329 envelope: TypedEnvelope<proto::RegisterWorktree>,
3330 client: Arc<Client>,
3331 mut cx: AsyncAppContext,
3332 ) -> Result<()> {
3333 this.update(&mut cx, |this, cx| {
3334 let remote_id = this.remote_id().ok_or_else(|| anyhow!("invalid project"))?;
3335 let replica_id = this.replica_id();
3336 let worktree = proto::Worktree {
3337 id: envelope.payload.worktree_id,
3338 root_name: envelope.payload.root_name,
3339 entries: Default::default(),
3340 diagnostic_summaries: Default::default(),
3341 visible: envelope.payload.visible,
3342 };
3343 let (worktree, load_task) =
3344 Worktree::remote(remote_id, replica_id, worktree, client, cx);
3345 this.add_worktree(&worktree, cx);
3346 load_task.detach();
3347 Ok(())
3348 })
3349 }
3350
3351 async fn handle_unregister_worktree(
3352 this: ModelHandle<Self>,
3353 envelope: TypedEnvelope<proto::UnregisterWorktree>,
3354 _: Arc<Client>,
3355 mut cx: AsyncAppContext,
3356 ) -> Result<()> {
3357 this.update(&mut cx, |this, cx| {
3358 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
3359 this.remove_worktree(worktree_id, cx);
3360 Ok(())
3361 })
3362 }
3363
3364 async fn handle_update_worktree(
3365 this: ModelHandle<Self>,
3366 envelope: TypedEnvelope<proto::UpdateWorktree>,
3367 _: Arc<Client>,
3368 mut cx: AsyncAppContext,
3369 ) -> Result<()> {
3370 this.update(&mut cx, |this, cx| {
3371 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
3372 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
3373 worktree.update(cx, |worktree, _| {
3374 let worktree = worktree.as_remote_mut().unwrap();
3375 worktree.update_from_remote(envelope)
3376 })?;
3377 }
3378 Ok(())
3379 })
3380 }
3381
3382 async fn handle_update_diagnostic_summary(
3383 this: ModelHandle<Self>,
3384 envelope: TypedEnvelope<proto::UpdateDiagnosticSummary>,
3385 _: Arc<Client>,
3386 mut cx: AsyncAppContext,
3387 ) -> Result<()> {
3388 this.update(&mut cx, |this, cx| {
3389 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
3390 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
3391 if let Some(summary) = envelope.payload.summary {
3392 let project_path = ProjectPath {
3393 worktree_id,
3394 path: Path::new(&summary.path).into(),
3395 };
3396 worktree.update(cx, |worktree, _| {
3397 worktree
3398 .as_remote_mut()
3399 .unwrap()
3400 .update_diagnostic_summary(project_path.path.clone(), &summary);
3401 });
3402 cx.emit(Event::DiagnosticsUpdated(project_path));
3403 }
3404 }
3405 Ok(())
3406 })
3407 }
3408
3409 async fn handle_start_language_server(
3410 this: ModelHandle<Self>,
3411 envelope: TypedEnvelope<proto::StartLanguageServer>,
3412 _: Arc<Client>,
3413 mut cx: AsyncAppContext,
3414 ) -> Result<()> {
3415 let server = envelope
3416 .payload
3417 .server
3418 .ok_or_else(|| anyhow!("invalid server"))?;
3419 this.update(&mut cx, |this, cx| {
3420 this.language_server_statuses.insert(
3421 server.id as usize,
3422 LanguageServerStatus {
3423 name: server.name,
3424 pending_work: Default::default(),
3425 pending_diagnostic_updates: 0,
3426 },
3427 );
3428 cx.notify();
3429 });
3430 Ok(())
3431 }
3432
3433 async fn handle_update_language_server(
3434 this: ModelHandle<Self>,
3435 envelope: TypedEnvelope<proto::UpdateLanguageServer>,
3436 _: Arc<Client>,
3437 mut cx: AsyncAppContext,
3438 ) -> Result<()> {
3439 let language_server_id = envelope.payload.language_server_id as usize;
3440 match envelope
3441 .payload
3442 .variant
3443 .ok_or_else(|| anyhow!("invalid variant"))?
3444 {
3445 proto::update_language_server::Variant::WorkStart(payload) => {
3446 this.update(&mut cx, |this, cx| {
3447 this.on_lsp_work_start(language_server_id, payload.token, cx);
3448 })
3449 }
3450 proto::update_language_server::Variant::WorkProgress(payload) => {
3451 this.update(&mut cx, |this, cx| {
3452 this.on_lsp_work_progress(
3453 language_server_id,
3454 payload.token,
3455 LanguageServerProgress {
3456 message: payload.message,
3457 percentage: payload.percentage.map(|p| p as usize),
3458 last_update_at: Instant::now(),
3459 },
3460 cx,
3461 );
3462 })
3463 }
3464 proto::update_language_server::Variant::WorkEnd(payload) => {
3465 this.update(&mut cx, |this, cx| {
3466 this.on_lsp_work_end(language_server_id, payload.token, cx);
3467 })
3468 }
3469 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(_) => {
3470 this.update(&mut cx, |this, cx| {
3471 this.disk_based_diagnostics_started(cx);
3472 })
3473 }
3474 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(_) => {
3475 this.update(&mut cx, |this, cx| this.disk_based_diagnostics_finished(cx));
3476 }
3477 }
3478
3479 Ok(())
3480 }
3481
3482 async fn handle_update_buffer(
3483 this: ModelHandle<Self>,
3484 envelope: TypedEnvelope<proto::UpdateBuffer>,
3485 _: Arc<Client>,
3486 mut cx: AsyncAppContext,
3487 ) -> Result<()> {
3488 this.update(&mut cx, |this, cx| {
3489 let payload = envelope.payload.clone();
3490 let buffer_id = payload.buffer_id;
3491 let ops = payload
3492 .operations
3493 .into_iter()
3494 .map(|op| language::proto::deserialize_operation(op))
3495 .collect::<Result<Vec<_>, _>>()?;
3496 match this.opened_buffers.entry(buffer_id) {
3497 hash_map::Entry::Occupied(mut e) => match e.get_mut() {
3498 OpenBuffer::Strong(buffer) => {
3499 buffer.update(cx, |buffer, cx| buffer.apply_ops(ops, cx))?;
3500 }
3501 OpenBuffer::Loading(operations) => operations.extend_from_slice(&ops),
3502 OpenBuffer::Weak(_) => {}
3503 },
3504 hash_map::Entry::Vacant(e) => {
3505 e.insert(OpenBuffer::Loading(ops));
3506 }
3507 }
3508 Ok(())
3509 })
3510 }
3511
3512 async fn handle_update_buffer_file(
3513 this: ModelHandle<Self>,
3514 envelope: TypedEnvelope<proto::UpdateBufferFile>,
3515 _: Arc<Client>,
3516 mut cx: AsyncAppContext,
3517 ) -> Result<()> {
3518 this.update(&mut cx, |this, cx| {
3519 let payload = envelope.payload.clone();
3520 let buffer_id = payload.buffer_id;
3521 let file = payload.file.ok_or_else(|| anyhow!("invalid file"))?;
3522 let worktree = this
3523 .worktree_for_id(WorktreeId::from_proto(file.worktree_id), cx)
3524 .ok_or_else(|| anyhow!("no such worktree"))?;
3525 let file = File::from_proto(file, worktree.clone(), cx)?;
3526 let buffer = this
3527 .opened_buffers
3528 .get_mut(&buffer_id)
3529 .and_then(|b| b.upgrade(cx))
3530 .ok_or_else(|| anyhow!("no such buffer"))?;
3531 buffer.update(cx, |buffer, cx| {
3532 buffer.file_updated(Box::new(file), cx).detach();
3533 });
3534 Ok(())
3535 })
3536 }
3537
3538 async fn handle_save_buffer(
3539 this: ModelHandle<Self>,
3540 envelope: TypedEnvelope<proto::SaveBuffer>,
3541 _: Arc<Client>,
3542 mut cx: AsyncAppContext,
3543 ) -> Result<proto::BufferSaved> {
3544 let buffer_id = envelope.payload.buffer_id;
3545 let requested_version = deserialize_version(envelope.payload.version);
3546
3547 let (project_id, buffer) = this.update(&mut cx, |this, cx| {
3548 let project_id = this.remote_id().ok_or_else(|| anyhow!("not connected"))?;
3549 let buffer = this
3550 .opened_buffers
3551 .get(&buffer_id)
3552 .map(|buffer| buffer.upgrade(cx).unwrap())
3553 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?;
3554 Ok::<_, anyhow::Error>((project_id, buffer))
3555 })?;
3556 buffer
3557 .update(&mut cx, |buffer, _| {
3558 buffer.wait_for_version(requested_version)
3559 })
3560 .await;
3561
3562 let (saved_version, mtime) = buffer.update(&mut cx, |buffer, cx| buffer.save(cx)).await?;
3563 Ok(proto::BufferSaved {
3564 project_id,
3565 buffer_id,
3566 version: serialize_version(&saved_version),
3567 mtime: Some(mtime.into()),
3568 })
3569 }
3570
3571 async fn handle_format_buffers(
3572 this: ModelHandle<Self>,
3573 envelope: TypedEnvelope<proto::FormatBuffers>,
3574 _: Arc<Client>,
3575 mut cx: AsyncAppContext,
3576 ) -> Result<proto::FormatBuffersResponse> {
3577 let sender_id = envelope.original_sender_id()?;
3578 let format = this.update(&mut cx, |this, cx| {
3579 let mut buffers = HashSet::default();
3580 for buffer_id in &envelope.payload.buffer_ids {
3581 buffers.insert(
3582 this.opened_buffers
3583 .get(buffer_id)
3584 .map(|buffer| buffer.upgrade(cx).unwrap())
3585 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?,
3586 );
3587 }
3588 Ok::<_, anyhow::Error>(this.format(buffers, false, cx))
3589 })?;
3590
3591 let project_transaction = format.await?;
3592 let project_transaction = this.update(&mut cx, |this, cx| {
3593 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
3594 });
3595 Ok(proto::FormatBuffersResponse {
3596 transaction: Some(project_transaction),
3597 })
3598 }
3599
3600 async fn handle_get_completions(
3601 this: ModelHandle<Self>,
3602 envelope: TypedEnvelope<proto::GetCompletions>,
3603 _: Arc<Client>,
3604 mut cx: AsyncAppContext,
3605 ) -> Result<proto::GetCompletionsResponse> {
3606 let position = envelope
3607 .payload
3608 .position
3609 .and_then(language::proto::deserialize_anchor)
3610 .ok_or_else(|| anyhow!("invalid position"))?;
3611 let version = deserialize_version(envelope.payload.version);
3612 let buffer = this.read_with(&cx, |this, cx| {
3613 this.opened_buffers
3614 .get(&envelope.payload.buffer_id)
3615 .map(|buffer| buffer.upgrade(cx).unwrap())
3616 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
3617 })?;
3618 buffer
3619 .update(&mut cx, |buffer, _| buffer.wait_for_version(version))
3620 .await;
3621 let version = buffer.read_with(&cx, |buffer, _| buffer.version());
3622 let completions = this
3623 .update(&mut cx, |this, cx| this.completions(&buffer, position, cx))
3624 .await?;
3625
3626 Ok(proto::GetCompletionsResponse {
3627 completions: completions
3628 .iter()
3629 .map(language::proto::serialize_completion)
3630 .collect(),
3631 version: serialize_version(&version),
3632 })
3633 }
3634
3635 async fn handle_apply_additional_edits_for_completion(
3636 this: ModelHandle<Self>,
3637 envelope: TypedEnvelope<proto::ApplyCompletionAdditionalEdits>,
3638 _: Arc<Client>,
3639 mut cx: AsyncAppContext,
3640 ) -> Result<proto::ApplyCompletionAdditionalEditsResponse> {
3641 let apply_additional_edits = this.update(&mut cx, |this, cx| {
3642 let buffer = this
3643 .opened_buffers
3644 .get(&envelope.payload.buffer_id)
3645 .map(|buffer| buffer.upgrade(cx).unwrap())
3646 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
3647 let language = buffer.read(cx).language();
3648 let completion = language::proto::deserialize_completion(
3649 envelope
3650 .payload
3651 .completion
3652 .ok_or_else(|| anyhow!("invalid completion"))?,
3653 language,
3654 )?;
3655 Ok::<_, anyhow::Error>(
3656 this.apply_additional_edits_for_completion(buffer, completion, false, cx),
3657 )
3658 })?;
3659
3660 Ok(proto::ApplyCompletionAdditionalEditsResponse {
3661 transaction: apply_additional_edits
3662 .await?
3663 .as_ref()
3664 .map(language::proto::serialize_transaction),
3665 })
3666 }
3667
3668 async fn handle_get_code_actions(
3669 this: ModelHandle<Self>,
3670 envelope: TypedEnvelope<proto::GetCodeActions>,
3671 _: Arc<Client>,
3672 mut cx: AsyncAppContext,
3673 ) -> Result<proto::GetCodeActionsResponse> {
3674 let start = envelope
3675 .payload
3676 .start
3677 .and_then(language::proto::deserialize_anchor)
3678 .ok_or_else(|| anyhow!("invalid start"))?;
3679 let end = envelope
3680 .payload
3681 .end
3682 .and_then(language::proto::deserialize_anchor)
3683 .ok_or_else(|| anyhow!("invalid end"))?;
3684 let buffer = this.update(&mut cx, |this, cx| {
3685 this.opened_buffers
3686 .get(&envelope.payload.buffer_id)
3687 .map(|buffer| buffer.upgrade(cx).unwrap())
3688 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
3689 })?;
3690 buffer
3691 .update(&mut cx, |buffer, _| {
3692 buffer.wait_for_version(deserialize_version(envelope.payload.version))
3693 })
3694 .await;
3695
3696 let version = buffer.read_with(&cx, |buffer, _| buffer.version());
3697 let code_actions = this.update(&mut cx, |this, cx| {
3698 Ok::<_, anyhow::Error>(this.code_actions(&buffer, start..end, cx))
3699 })?;
3700
3701 Ok(proto::GetCodeActionsResponse {
3702 actions: code_actions
3703 .await?
3704 .iter()
3705 .map(language::proto::serialize_code_action)
3706 .collect(),
3707 version: serialize_version(&version),
3708 })
3709 }
3710
3711 async fn handle_apply_code_action(
3712 this: ModelHandle<Self>,
3713 envelope: TypedEnvelope<proto::ApplyCodeAction>,
3714 _: Arc<Client>,
3715 mut cx: AsyncAppContext,
3716 ) -> Result<proto::ApplyCodeActionResponse> {
3717 let sender_id = envelope.original_sender_id()?;
3718 let action = language::proto::deserialize_code_action(
3719 envelope
3720 .payload
3721 .action
3722 .ok_or_else(|| anyhow!("invalid action"))?,
3723 )?;
3724 let apply_code_action = this.update(&mut cx, |this, cx| {
3725 let buffer = this
3726 .opened_buffers
3727 .get(&envelope.payload.buffer_id)
3728 .map(|buffer| buffer.upgrade(cx).unwrap())
3729 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
3730 Ok::<_, anyhow::Error>(this.apply_code_action(buffer, action, false, cx))
3731 })?;
3732
3733 let project_transaction = apply_code_action.await?;
3734 let project_transaction = this.update(&mut cx, |this, cx| {
3735 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
3736 });
3737 Ok(proto::ApplyCodeActionResponse {
3738 transaction: Some(project_transaction),
3739 })
3740 }
3741
3742 async fn handle_lsp_command<T: LspCommand>(
3743 this: ModelHandle<Self>,
3744 envelope: TypedEnvelope<T::ProtoRequest>,
3745 _: Arc<Client>,
3746 mut cx: AsyncAppContext,
3747 ) -> Result<<T::ProtoRequest as proto::RequestMessage>::Response>
3748 where
3749 <T::LspRequest as lsp::request::Request>::Result: Send,
3750 {
3751 let sender_id = envelope.original_sender_id()?;
3752 let buffer_id = T::buffer_id_from_proto(&envelope.payload);
3753 let buffer_handle = this.read_with(&cx, |this, _| {
3754 this.opened_buffers
3755 .get(&buffer_id)
3756 .and_then(|buffer| buffer.upgrade(&cx))
3757 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))
3758 })?;
3759 let request = T::from_proto(
3760 envelope.payload,
3761 this.clone(),
3762 buffer_handle.clone(),
3763 cx.clone(),
3764 )
3765 .await?;
3766 let buffer_version = buffer_handle.read_with(&cx, |buffer, _| buffer.version());
3767 let response = this
3768 .update(&mut cx, |this, cx| {
3769 this.request_lsp(buffer_handle, request, cx)
3770 })
3771 .await?;
3772 this.update(&mut cx, |this, cx| {
3773 Ok(T::response_to_proto(
3774 response,
3775 this,
3776 sender_id,
3777 &buffer_version,
3778 cx,
3779 ))
3780 })
3781 }
3782
3783 async fn handle_get_project_symbols(
3784 this: ModelHandle<Self>,
3785 envelope: TypedEnvelope<proto::GetProjectSymbols>,
3786 _: Arc<Client>,
3787 mut cx: AsyncAppContext,
3788 ) -> Result<proto::GetProjectSymbolsResponse> {
3789 let symbols = this
3790 .update(&mut cx, |this, cx| {
3791 this.symbols(&envelope.payload.query, cx)
3792 })
3793 .await?;
3794
3795 Ok(proto::GetProjectSymbolsResponse {
3796 symbols: symbols.iter().map(serialize_symbol).collect(),
3797 })
3798 }
3799
3800 async fn handle_search_project(
3801 this: ModelHandle<Self>,
3802 envelope: TypedEnvelope<proto::SearchProject>,
3803 _: Arc<Client>,
3804 mut cx: AsyncAppContext,
3805 ) -> Result<proto::SearchProjectResponse> {
3806 let peer_id = envelope.original_sender_id()?;
3807 let query = SearchQuery::from_proto(envelope.payload)?;
3808 let result = this
3809 .update(&mut cx, |this, cx| this.search(query, cx))
3810 .await?;
3811
3812 this.update(&mut cx, |this, cx| {
3813 let mut locations = Vec::new();
3814 for (buffer, ranges) in result {
3815 for range in ranges {
3816 let start = serialize_anchor(&range.start);
3817 let end = serialize_anchor(&range.end);
3818 let buffer = this.serialize_buffer_for_peer(&buffer, peer_id, cx);
3819 locations.push(proto::Location {
3820 buffer: Some(buffer),
3821 start: Some(start),
3822 end: Some(end),
3823 });
3824 }
3825 }
3826 Ok(proto::SearchProjectResponse { locations })
3827 })
3828 }
3829
3830 async fn handle_open_buffer_for_symbol(
3831 this: ModelHandle<Self>,
3832 envelope: TypedEnvelope<proto::OpenBufferForSymbol>,
3833 _: Arc<Client>,
3834 mut cx: AsyncAppContext,
3835 ) -> Result<proto::OpenBufferForSymbolResponse> {
3836 let peer_id = envelope.original_sender_id()?;
3837 let symbol = envelope
3838 .payload
3839 .symbol
3840 .ok_or_else(|| anyhow!("invalid symbol"))?;
3841 let symbol = this.read_with(&cx, |this, _| {
3842 let symbol = this.deserialize_symbol(symbol)?;
3843 let signature = this.symbol_signature(symbol.worktree_id, &symbol.path);
3844 if signature == symbol.signature {
3845 Ok(symbol)
3846 } else {
3847 Err(anyhow!("invalid symbol signature"))
3848 }
3849 })?;
3850 let buffer = this
3851 .update(&mut cx, |this, cx| this.open_buffer_for_symbol(&symbol, cx))
3852 .await?;
3853
3854 Ok(proto::OpenBufferForSymbolResponse {
3855 buffer: Some(this.update(&mut cx, |this, cx| {
3856 this.serialize_buffer_for_peer(&buffer, peer_id, cx)
3857 })),
3858 })
3859 }
3860
3861 fn symbol_signature(&self, worktree_id: WorktreeId, path: &Path) -> [u8; 32] {
3862 let mut hasher = Sha256::new();
3863 hasher.update(worktree_id.to_proto().to_be_bytes());
3864 hasher.update(path.to_string_lossy().as_bytes());
3865 hasher.update(self.nonce.to_be_bytes());
3866 hasher.finalize().as_slice().try_into().unwrap()
3867 }
3868
3869 async fn handle_open_buffer(
3870 this: ModelHandle<Self>,
3871 envelope: TypedEnvelope<proto::OpenBuffer>,
3872 _: Arc<Client>,
3873 mut cx: AsyncAppContext,
3874 ) -> Result<proto::OpenBufferResponse> {
3875 let peer_id = envelope.original_sender_id()?;
3876 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
3877 let open_buffer = this.update(&mut cx, |this, cx| {
3878 this.open_buffer(
3879 ProjectPath {
3880 worktree_id,
3881 path: PathBuf::from(envelope.payload.path).into(),
3882 },
3883 cx,
3884 )
3885 });
3886
3887 let buffer = open_buffer.await?;
3888 this.update(&mut cx, |this, cx| {
3889 Ok(proto::OpenBufferResponse {
3890 buffer: Some(this.serialize_buffer_for_peer(&buffer, peer_id, cx)),
3891 })
3892 })
3893 }
3894
3895 fn serialize_project_transaction_for_peer(
3896 &mut self,
3897 project_transaction: ProjectTransaction,
3898 peer_id: PeerId,
3899 cx: &AppContext,
3900 ) -> proto::ProjectTransaction {
3901 let mut serialized_transaction = proto::ProjectTransaction {
3902 buffers: Default::default(),
3903 transactions: Default::default(),
3904 };
3905 for (buffer, transaction) in project_transaction.0 {
3906 serialized_transaction
3907 .buffers
3908 .push(self.serialize_buffer_for_peer(&buffer, peer_id, cx));
3909 serialized_transaction
3910 .transactions
3911 .push(language::proto::serialize_transaction(&transaction));
3912 }
3913 serialized_transaction
3914 }
3915
3916 fn deserialize_project_transaction(
3917 &mut self,
3918 message: proto::ProjectTransaction,
3919 push_to_history: bool,
3920 cx: &mut ModelContext<Self>,
3921 ) -> Task<Result<ProjectTransaction>> {
3922 cx.spawn(|this, mut cx| async move {
3923 let mut project_transaction = ProjectTransaction::default();
3924 for (buffer, transaction) in message.buffers.into_iter().zip(message.transactions) {
3925 let buffer = this
3926 .update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
3927 .await?;
3928 let transaction = language::proto::deserialize_transaction(transaction)?;
3929 project_transaction.0.insert(buffer, transaction);
3930 }
3931
3932 for (buffer, transaction) in &project_transaction.0 {
3933 buffer
3934 .update(&mut cx, |buffer, _| {
3935 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
3936 })
3937 .await;
3938
3939 if push_to_history {
3940 buffer.update(&mut cx, |buffer, _| {
3941 buffer.push_transaction(transaction.clone(), Instant::now());
3942 });
3943 }
3944 }
3945
3946 Ok(project_transaction)
3947 })
3948 }
3949
3950 fn serialize_buffer_for_peer(
3951 &mut self,
3952 buffer: &ModelHandle<Buffer>,
3953 peer_id: PeerId,
3954 cx: &AppContext,
3955 ) -> proto::Buffer {
3956 let buffer_id = buffer.read(cx).remote_id();
3957 let shared_buffers = self.shared_buffers.entry(peer_id).or_default();
3958 if shared_buffers.insert(buffer_id) {
3959 proto::Buffer {
3960 variant: Some(proto::buffer::Variant::State(buffer.read(cx).to_proto())),
3961 }
3962 } else {
3963 proto::Buffer {
3964 variant: Some(proto::buffer::Variant::Id(buffer_id)),
3965 }
3966 }
3967 }
3968
3969 fn deserialize_buffer(
3970 &mut self,
3971 buffer: proto::Buffer,
3972 cx: &mut ModelContext<Self>,
3973 ) -> Task<Result<ModelHandle<Buffer>>> {
3974 let replica_id = self.replica_id();
3975
3976 let opened_buffer_tx = self.opened_buffer.0.clone();
3977 let mut opened_buffer_rx = self.opened_buffer.1.clone();
3978 cx.spawn(|this, mut cx| async move {
3979 match buffer.variant.ok_or_else(|| anyhow!("missing buffer"))? {
3980 proto::buffer::Variant::Id(id) => {
3981 let buffer = loop {
3982 let buffer = this.read_with(&cx, |this, cx| {
3983 this.opened_buffers
3984 .get(&id)
3985 .and_then(|buffer| buffer.upgrade(cx))
3986 });
3987 if let Some(buffer) = buffer {
3988 break buffer;
3989 }
3990 opened_buffer_rx
3991 .next()
3992 .await
3993 .ok_or_else(|| anyhow!("project dropped while waiting for buffer"))?;
3994 };
3995 Ok(buffer)
3996 }
3997 proto::buffer::Variant::State(mut buffer) => {
3998 let mut buffer_worktree = None;
3999 let mut buffer_file = None;
4000 if let Some(file) = buffer.file.take() {
4001 this.read_with(&cx, |this, cx| {
4002 let worktree_id = WorktreeId::from_proto(file.worktree_id);
4003 let worktree =
4004 this.worktree_for_id(worktree_id, cx).ok_or_else(|| {
4005 anyhow!("no worktree found for id {}", file.worktree_id)
4006 })?;
4007 buffer_file =
4008 Some(Box::new(File::from_proto(file, worktree.clone(), cx)?)
4009 as Box<dyn language::File>);
4010 buffer_worktree = Some(worktree);
4011 Ok::<_, anyhow::Error>(())
4012 })?;
4013 }
4014
4015 let buffer = cx.add_model(|cx| {
4016 Buffer::from_proto(replica_id, buffer, buffer_file, cx).unwrap()
4017 });
4018
4019 this.update(&mut cx, |this, cx| this.register_buffer(&buffer, cx))?;
4020
4021 *opened_buffer_tx.borrow_mut().borrow_mut() = ();
4022 Ok(buffer)
4023 }
4024 }
4025 })
4026 }
4027
4028 fn deserialize_symbol(&self, serialized_symbol: proto::Symbol) -> Result<Symbol> {
4029 let language = self
4030 .languages
4031 .get_language(&serialized_symbol.language_name);
4032 let start = serialized_symbol
4033 .start
4034 .ok_or_else(|| anyhow!("invalid start"))?;
4035 let end = serialized_symbol
4036 .end
4037 .ok_or_else(|| anyhow!("invalid end"))?;
4038 let kind = unsafe { mem::transmute(serialized_symbol.kind) };
4039 Ok(Symbol {
4040 source_worktree_id: WorktreeId::from_proto(serialized_symbol.source_worktree_id),
4041 worktree_id: WorktreeId::from_proto(serialized_symbol.worktree_id),
4042 language_name: serialized_symbol.language_name.clone(),
4043 label: language
4044 .and_then(|language| language.label_for_symbol(&serialized_symbol.name, kind))
4045 .unwrap_or_else(|| CodeLabel::plain(serialized_symbol.name.clone(), None)),
4046 name: serialized_symbol.name,
4047 path: PathBuf::from(serialized_symbol.path),
4048 range: PointUtf16::new(start.row, start.column)..PointUtf16::new(end.row, end.column),
4049 kind,
4050 signature: serialized_symbol
4051 .signature
4052 .try_into()
4053 .map_err(|_| anyhow!("invalid signature"))?,
4054 })
4055 }
4056
4057 async fn handle_buffer_saved(
4058 this: ModelHandle<Self>,
4059 envelope: TypedEnvelope<proto::BufferSaved>,
4060 _: Arc<Client>,
4061 mut cx: AsyncAppContext,
4062 ) -> Result<()> {
4063 let version = deserialize_version(envelope.payload.version);
4064 let mtime = envelope
4065 .payload
4066 .mtime
4067 .ok_or_else(|| anyhow!("missing mtime"))?
4068 .into();
4069
4070 this.update(&mut cx, |this, cx| {
4071 let buffer = this
4072 .opened_buffers
4073 .get(&envelope.payload.buffer_id)
4074 .and_then(|buffer| buffer.upgrade(cx));
4075 if let Some(buffer) = buffer {
4076 buffer.update(cx, |buffer, cx| {
4077 buffer.did_save(version, mtime, None, cx);
4078 });
4079 }
4080 Ok(())
4081 })
4082 }
4083
4084 async fn handle_buffer_reloaded(
4085 this: ModelHandle<Self>,
4086 envelope: TypedEnvelope<proto::BufferReloaded>,
4087 _: Arc<Client>,
4088 mut cx: AsyncAppContext,
4089 ) -> Result<()> {
4090 let payload = envelope.payload.clone();
4091 let version = deserialize_version(payload.version);
4092 let mtime = payload
4093 .mtime
4094 .ok_or_else(|| anyhow!("missing mtime"))?
4095 .into();
4096 this.update(&mut cx, |this, cx| {
4097 let buffer = this
4098 .opened_buffers
4099 .get(&payload.buffer_id)
4100 .and_then(|buffer| buffer.upgrade(cx));
4101 if let Some(buffer) = buffer {
4102 buffer.update(cx, |buffer, cx| {
4103 buffer.did_reload(version, mtime, cx);
4104 });
4105 }
4106 Ok(())
4107 })
4108 }
4109
4110 pub fn match_paths<'a>(
4111 &self,
4112 query: &'a str,
4113 include_ignored: bool,
4114 smart_case: bool,
4115 max_results: usize,
4116 cancel_flag: &'a AtomicBool,
4117 cx: &AppContext,
4118 ) -> impl 'a + Future<Output = Vec<PathMatch>> {
4119 let worktrees = self
4120 .worktrees(cx)
4121 .filter(|worktree| worktree.read(cx).is_visible())
4122 .collect::<Vec<_>>();
4123 let include_root_name = worktrees.len() > 1;
4124 let candidate_sets = worktrees
4125 .into_iter()
4126 .map(|worktree| CandidateSet {
4127 snapshot: worktree.read(cx).snapshot(),
4128 include_ignored,
4129 include_root_name,
4130 })
4131 .collect::<Vec<_>>();
4132
4133 let background = cx.background().clone();
4134 async move {
4135 fuzzy::match_paths(
4136 candidate_sets.as_slice(),
4137 query,
4138 smart_case,
4139 max_results,
4140 cancel_flag,
4141 background,
4142 )
4143 .await
4144 }
4145 }
4146
4147 fn edits_from_lsp(
4148 &mut self,
4149 buffer: &ModelHandle<Buffer>,
4150 lsp_edits: impl 'static + Send + IntoIterator<Item = lsp::TextEdit>,
4151 version: Option<i32>,
4152 cx: &mut ModelContext<Self>,
4153 ) -> Task<Result<Vec<(Range<Anchor>, String)>>> {
4154 let snapshot = self.buffer_snapshot_for_lsp_version(buffer, version, cx);
4155 cx.background().spawn(async move {
4156 let snapshot = snapshot?;
4157 let mut lsp_edits = lsp_edits
4158 .into_iter()
4159 .map(|edit| (range_from_lsp(edit.range), edit.new_text))
4160 .peekable();
4161
4162 let mut edits = Vec::new();
4163 while let Some((mut range, mut new_text)) = lsp_edits.next() {
4164 // Combine any LSP edits that are adjacent.
4165 //
4166 // Also, combine LSP edits that are separated from each other by only
4167 // a newline. This is important because for some code actions,
4168 // Rust-analyzer rewrites the entire buffer via a series of edits that
4169 // are separated by unchanged newline characters.
4170 //
4171 // In order for the diffing logic below to work properly, any edits that
4172 // cancel each other out must be combined into one.
4173 while let Some((next_range, next_text)) = lsp_edits.peek() {
4174 if next_range.start > range.end {
4175 if next_range.start.row > range.end.row + 1
4176 || next_range.start.column > 0
4177 || snapshot.clip_point_utf16(
4178 PointUtf16::new(range.end.row, u32::MAX),
4179 Bias::Left,
4180 ) > range.end
4181 {
4182 break;
4183 }
4184 new_text.push('\n');
4185 }
4186 range.end = next_range.end;
4187 new_text.push_str(&next_text);
4188 lsp_edits.next();
4189 }
4190
4191 if snapshot.clip_point_utf16(range.start, Bias::Left) != range.start
4192 || snapshot.clip_point_utf16(range.end, Bias::Left) != range.end
4193 {
4194 return Err(anyhow!("invalid edits received from language server"));
4195 }
4196
4197 // For multiline edits, perform a diff of the old and new text so that
4198 // we can identify the changes more precisely, preserving the locations
4199 // of any anchors positioned in the unchanged regions.
4200 if range.end.row > range.start.row {
4201 let mut offset = range.start.to_offset(&snapshot);
4202 let old_text = snapshot.text_for_range(range).collect::<String>();
4203
4204 let diff = TextDiff::from_lines(old_text.as_str(), &new_text);
4205 let mut moved_since_edit = true;
4206 for change in diff.iter_all_changes() {
4207 let tag = change.tag();
4208 let value = change.value();
4209 match tag {
4210 ChangeTag::Equal => {
4211 offset += value.len();
4212 moved_since_edit = true;
4213 }
4214 ChangeTag::Delete => {
4215 let start = snapshot.anchor_after(offset);
4216 let end = snapshot.anchor_before(offset + value.len());
4217 if moved_since_edit {
4218 edits.push((start..end, String::new()));
4219 } else {
4220 edits.last_mut().unwrap().0.end = end;
4221 }
4222 offset += value.len();
4223 moved_since_edit = false;
4224 }
4225 ChangeTag::Insert => {
4226 if moved_since_edit {
4227 let anchor = snapshot.anchor_after(offset);
4228 edits.push((anchor.clone()..anchor, value.to_string()));
4229 } else {
4230 edits.last_mut().unwrap().1.push_str(value);
4231 }
4232 moved_since_edit = false;
4233 }
4234 }
4235 }
4236 } else if range.end == range.start {
4237 let anchor = snapshot.anchor_after(range.start);
4238 edits.push((anchor.clone()..anchor, new_text));
4239 } else {
4240 let edit_start = snapshot.anchor_after(range.start);
4241 let edit_end = snapshot.anchor_before(range.end);
4242 edits.push((edit_start..edit_end, new_text));
4243 }
4244 }
4245
4246 Ok(edits)
4247 })
4248 }
4249
4250 fn buffer_snapshot_for_lsp_version(
4251 &mut self,
4252 buffer: &ModelHandle<Buffer>,
4253 version: Option<i32>,
4254 cx: &AppContext,
4255 ) -> Result<TextBufferSnapshot> {
4256 const OLD_VERSIONS_TO_RETAIN: i32 = 10;
4257
4258 if let Some(version) = version {
4259 let buffer_id = buffer.read(cx).remote_id();
4260 let snapshots = self
4261 .buffer_snapshots
4262 .get_mut(&buffer_id)
4263 .ok_or_else(|| anyhow!("no snapshot found for buffer {}", buffer_id))?;
4264 let mut found_snapshot = None;
4265 snapshots.retain(|(snapshot_version, snapshot)| {
4266 if snapshot_version + OLD_VERSIONS_TO_RETAIN < version {
4267 false
4268 } else {
4269 if *snapshot_version == version {
4270 found_snapshot = Some(snapshot.clone());
4271 }
4272 true
4273 }
4274 });
4275
4276 found_snapshot.ok_or_else(|| {
4277 anyhow!(
4278 "snapshot not found for buffer {} at version {}",
4279 buffer_id,
4280 version
4281 )
4282 })
4283 } else {
4284 Ok((buffer.read(cx)).text_snapshot())
4285 }
4286 }
4287
4288 fn language_server_for_buffer(
4289 &self,
4290 buffer: &Buffer,
4291 cx: &AppContext,
4292 ) -> Option<&Arc<LanguageServer>> {
4293 if let Some((file, language)) = File::from_dyn(buffer.file()).zip(buffer.language()) {
4294 let worktree_id = file.worktree_id(cx);
4295 self.language_servers.get(&(worktree_id, language.name()))
4296 } else {
4297 None
4298 }
4299 }
4300}
4301
4302impl WorktreeHandle {
4303 pub fn upgrade(&self, cx: &AppContext) -> Option<ModelHandle<Worktree>> {
4304 match self {
4305 WorktreeHandle::Strong(handle) => Some(handle.clone()),
4306 WorktreeHandle::Weak(handle) => handle.upgrade(cx),
4307 }
4308 }
4309}
4310
4311impl OpenBuffer {
4312 pub fn upgrade(&self, cx: &impl UpgradeModelHandle) -> Option<ModelHandle<Buffer>> {
4313 match self {
4314 OpenBuffer::Strong(handle) => Some(handle.clone()),
4315 OpenBuffer::Weak(handle) => handle.upgrade(cx),
4316 OpenBuffer::Loading(_) => None,
4317 }
4318 }
4319}
4320
4321struct CandidateSet {
4322 snapshot: Snapshot,
4323 include_ignored: bool,
4324 include_root_name: bool,
4325}
4326
4327impl<'a> PathMatchCandidateSet<'a> for CandidateSet {
4328 type Candidates = CandidateSetIter<'a>;
4329
4330 fn id(&self) -> usize {
4331 self.snapshot.id().to_usize()
4332 }
4333
4334 fn len(&self) -> usize {
4335 if self.include_ignored {
4336 self.snapshot.file_count()
4337 } else {
4338 self.snapshot.visible_file_count()
4339 }
4340 }
4341
4342 fn prefix(&self) -> Arc<str> {
4343 if self.snapshot.root_entry().map_or(false, |e| e.is_file()) {
4344 self.snapshot.root_name().into()
4345 } else if self.include_root_name {
4346 format!("{}/", self.snapshot.root_name()).into()
4347 } else {
4348 "".into()
4349 }
4350 }
4351
4352 fn candidates(&'a self, start: usize) -> Self::Candidates {
4353 CandidateSetIter {
4354 traversal: self.snapshot.files(self.include_ignored, start),
4355 }
4356 }
4357}
4358
4359struct CandidateSetIter<'a> {
4360 traversal: Traversal<'a>,
4361}
4362
4363impl<'a> Iterator for CandidateSetIter<'a> {
4364 type Item = PathMatchCandidate<'a>;
4365
4366 fn next(&mut self) -> Option<Self::Item> {
4367 self.traversal.next().map(|entry| {
4368 if let EntryKind::File(char_bag) = entry.kind {
4369 PathMatchCandidate {
4370 path: &entry.path,
4371 char_bag,
4372 }
4373 } else {
4374 unreachable!()
4375 }
4376 })
4377 }
4378}
4379
4380impl Entity for Project {
4381 type Event = Event;
4382
4383 fn release(&mut self, _: &mut gpui::MutableAppContext) {
4384 match &self.client_state {
4385 ProjectClientState::Local { remote_id_rx, .. } => {
4386 if let Some(project_id) = *remote_id_rx.borrow() {
4387 self.client
4388 .send(proto::UnregisterProject { project_id })
4389 .log_err();
4390 }
4391 }
4392 ProjectClientState::Remote { remote_id, .. } => {
4393 self.client
4394 .send(proto::LeaveProject {
4395 project_id: *remote_id,
4396 })
4397 .log_err();
4398 }
4399 }
4400 }
4401
4402 fn app_will_quit(
4403 &mut self,
4404 _: &mut MutableAppContext,
4405 ) -> Option<std::pin::Pin<Box<dyn 'static + Future<Output = ()>>>> {
4406 let shutdown_futures = self
4407 .language_servers
4408 .drain()
4409 .filter_map(|(_, server)| server.shutdown())
4410 .collect::<Vec<_>>();
4411 Some(
4412 async move {
4413 futures::future::join_all(shutdown_futures).await;
4414 }
4415 .boxed(),
4416 )
4417 }
4418}
4419
4420impl Collaborator {
4421 fn from_proto(
4422 message: proto::Collaborator,
4423 user_store: &ModelHandle<UserStore>,
4424 cx: &mut AsyncAppContext,
4425 ) -> impl Future<Output = Result<Self>> {
4426 let user = user_store.update(cx, |user_store, cx| {
4427 user_store.fetch_user(message.user_id, cx)
4428 });
4429
4430 async move {
4431 Ok(Self {
4432 peer_id: PeerId(message.peer_id),
4433 user: user.await?,
4434 replica_id: message.replica_id as ReplicaId,
4435 })
4436 }
4437 }
4438}
4439
4440impl<P: AsRef<Path>> From<(WorktreeId, P)> for ProjectPath {
4441 fn from((worktree_id, path): (WorktreeId, P)) -> Self {
4442 Self {
4443 worktree_id,
4444 path: path.as_ref().into(),
4445 }
4446 }
4447}
4448
4449impl From<lsp::CreateFileOptions> for fs::CreateOptions {
4450 fn from(options: lsp::CreateFileOptions) -> Self {
4451 Self {
4452 overwrite: options.overwrite.unwrap_or(false),
4453 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
4454 }
4455 }
4456}
4457
4458impl From<lsp::RenameFileOptions> for fs::RenameOptions {
4459 fn from(options: lsp::RenameFileOptions) -> Self {
4460 Self {
4461 overwrite: options.overwrite.unwrap_or(false),
4462 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
4463 }
4464 }
4465}
4466
4467impl From<lsp::DeleteFileOptions> for fs::RemoveOptions {
4468 fn from(options: lsp::DeleteFileOptions) -> Self {
4469 Self {
4470 recursive: options.recursive.unwrap_or(false),
4471 ignore_if_not_exists: options.ignore_if_not_exists.unwrap_or(false),
4472 }
4473 }
4474}
4475
4476fn serialize_symbol(symbol: &Symbol) -> proto::Symbol {
4477 proto::Symbol {
4478 source_worktree_id: symbol.source_worktree_id.to_proto(),
4479 worktree_id: symbol.worktree_id.to_proto(),
4480 language_name: symbol.language_name.clone(),
4481 name: symbol.name.clone(),
4482 kind: unsafe { mem::transmute(symbol.kind) },
4483 path: symbol.path.to_string_lossy().to_string(),
4484 start: Some(proto::Point {
4485 row: symbol.range.start.row,
4486 column: symbol.range.start.column,
4487 }),
4488 end: Some(proto::Point {
4489 row: symbol.range.end.row,
4490 column: symbol.range.end.column,
4491 }),
4492 signature: symbol.signature.to_vec(),
4493 }
4494}
4495
4496fn relativize_path(base: &Path, path: &Path) -> PathBuf {
4497 let mut path_components = path.components();
4498 let mut base_components = base.components();
4499 let mut components: Vec<Component> = Vec::new();
4500 loop {
4501 match (path_components.next(), base_components.next()) {
4502 (None, None) => break,
4503 (Some(a), None) => {
4504 components.push(a);
4505 components.extend(path_components.by_ref());
4506 break;
4507 }
4508 (None, _) => components.push(Component::ParentDir),
4509 (Some(a), Some(b)) if components.is_empty() && a == b => (),
4510 (Some(a), Some(b)) if b == Component::CurDir => components.push(a),
4511 (Some(a), Some(_)) => {
4512 components.push(Component::ParentDir);
4513 for _ in base_components {
4514 components.push(Component::ParentDir);
4515 }
4516 components.push(a);
4517 components.extend(path_components.by_ref());
4518 break;
4519 }
4520 }
4521 }
4522 components.iter().map(|c| c.as_os_str()).collect()
4523}
4524
4525#[cfg(test)]
4526mod tests {
4527 use super::{Event, *};
4528 use fs::RealFs;
4529 use futures::StreamExt;
4530 use gpui::test::subscribe;
4531 use language::{
4532 tree_sitter_rust, Diagnostic, LanguageConfig, LanguageServerConfig, OffsetRangeExt, Point,
4533 ToPoint,
4534 };
4535 use lsp::Url;
4536 use serde_json::json;
4537 use std::{cell::RefCell, os::unix, path::PathBuf, rc::Rc};
4538 use unindent::Unindent as _;
4539 use util::test::temp_tree;
4540 use worktree::WorktreeHandle as _;
4541
4542 #[gpui::test]
4543 async fn test_populate_and_search(cx: &mut gpui::TestAppContext) {
4544 let dir = temp_tree(json!({
4545 "root": {
4546 "apple": "",
4547 "banana": {
4548 "carrot": {
4549 "date": "",
4550 "endive": "",
4551 }
4552 },
4553 "fennel": {
4554 "grape": "",
4555 }
4556 }
4557 }));
4558
4559 let root_link_path = dir.path().join("root_link");
4560 unix::fs::symlink(&dir.path().join("root"), &root_link_path).unwrap();
4561 unix::fs::symlink(
4562 &dir.path().join("root/fennel"),
4563 &dir.path().join("root/finnochio"),
4564 )
4565 .unwrap();
4566
4567 let project = Project::test(Arc::new(RealFs), cx);
4568
4569 let (tree, _) = project
4570 .update(cx, |project, cx| {
4571 project.find_or_create_local_worktree(&root_link_path, true, cx)
4572 })
4573 .await
4574 .unwrap();
4575
4576 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
4577 .await;
4578 cx.read(|cx| {
4579 let tree = tree.read(cx);
4580 assert_eq!(tree.file_count(), 5);
4581 assert_eq!(
4582 tree.inode_for_path("fennel/grape"),
4583 tree.inode_for_path("finnochio/grape")
4584 );
4585 });
4586
4587 let cancel_flag = Default::default();
4588 let results = project
4589 .read_with(cx, |project, cx| {
4590 project.match_paths("bna", false, false, 10, &cancel_flag, cx)
4591 })
4592 .await;
4593 assert_eq!(
4594 results
4595 .into_iter()
4596 .map(|result| result.path)
4597 .collect::<Vec<Arc<Path>>>(),
4598 vec![
4599 PathBuf::from("banana/carrot/date").into(),
4600 PathBuf::from("banana/carrot/endive").into(),
4601 ]
4602 );
4603 }
4604
4605 #[gpui::test]
4606 async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
4607 cx.foreground().forbid_parking();
4608
4609 let (mut rust_lsp_config, mut fake_rust_servers) = LanguageServerConfig::fake();
4610 let (mut json_lsp_config, mut fake_json_servers) = LanguageServerConfig::fake();
4611 rust_lsp_config.set_fake_capabilities(lsp::ServerCapabilities {
4612 completion_provider: Some(lsp::CompletionOptions {
4613 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
4614 ..Default::default()
4615 }),
4616 ..Default::default()
4617 });
4618 json_lsp_config.set_fake_capabilities(lsp::ServerCapabilities {
4619 completion_provider: Some(lsp::CompletionOptions {
4620 trigger_characters: Some(vec![":".to_string()]),
4621 ..Default::default()
4622 }),
4623 ..Default::default()
4624 });
4625
4626 let rust_language = Arc::new(Language::new(
4627 LanguageConfig {
4628 name: "Rust".into(),
4629 path_suffixes: vec!["rs".to_string()],
4630 language_server: Some(rust_lsp_config),
4631 ..Default::default()
4632 },
4633 Some(tree_sitter_rust::language()),
4634 ));
4635 let json_language = Arc::new(Language::new(
4636 LanguageConfig {
4637 name: "JSON".into(),
4638 path_suffixes: vec!["json".to_string()],
4639 language_server: Some(json_lsp_config),
4640 ..Default::default()
4641 },
4642 None,
4643 ));
4644
4645 let fs = FakeFs::new(cx.background());
4646 fs.insert_tree(
4647 "/the-root",
4648 json!({
4649 "test.rs": "const A: i32 = 1;",
4650 "test2.rs": "",
4651 "Cargo.toml": "a = 1",
4652 "package.json": "{\"a\": 1}",
4653 }),
4654 )
4655 .await;
4656
4657 let project = Project::test(fs, cx);
4658 project.update(cx, |project, _| {
4659 project.languages.add(rust_language);
4660 project.languages.add(json_language);
4661 });
4662
4663 let worktree_id = project
4664 .update(cx, |project, cx| {
4665 project.find_or_create_local_worktree("/the-root", true, cx)
4666 })
4667 .await
4668 .unwrap()
4669 .0
4670 .read_with(cx, |tree, _| tree.id());
4671
4672 // Open a buffer without an associated language server.
4673 let toml_buffer = project
4674 .update(cx, |project, cx| {
4675 project.open_buffer((worktree_id, "Cargo.toml"), cx)
4676 })
4677 .await
4678 .unwrap();
4679
4680 // Open a buffer with an associated language server.
4681 let rust_buffer = project
4682 .update(cx, |project, cx| {
4683 project.open_buffer((worktree_id, "test.rs"), cx)
4684 })
4685 .await
4686 .unwrap();
4687
4688 // A server is started up, and it is notified about Rust files.
4689 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
4690 assert_eq!(
4691 fake_rust_server
4692 .receive_notification::<lsp::notification::DidOpenTextDocument>()
4693 .await
4694 .text_document,
4695 lsp::TextDocumentItem {
4696 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
4697 version: 0,
4698 text: "const A: i32 = 1;".to_string(),
4699 language_id: Default::default()
4700 }
4701 );
4702
4703 // The buffer is configured based on the language server's capabilities.
4704 rust_buffer.read_with(cx, |buffer, _| {
4705 assert_eq!(
4706 buffer.completion_triggers(),
4707 &[".".to_string(), "::".to_string()]
4708 );
4709 });
4710 toml_buffer.read_with(cx, |buffer, _| {
4711 assert!(buffer.completion_triggers().is_empty());
4712 });
4713
4714 // Edit a buffer. The changes are reported to the language server.
4715 rust_buffer.update(cx, |buffer, cx| buffer.edit([16..16], "2", cx));
4716 assert_eq!(
4717 fake_rust_server
4718 .receive_notification::<lsp::notification::DidChangeTextDocument>()
4719 .await
4720 .text_document,
4721 lsp::VersionedTextDocumentIdentifier::new(
4722 lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
4723 1
4724 )
4725 );
4726
4727 // Open a third buffer with a different associated language server.
4728 let json_buffer = project
4729 .update(cx, |project, cx| {
4730 project.open_buffer((worktree_id, "package.json"), cx)
4731 })
4732 .await
4733 .unwrap();
4734
4735 // Another language server is started up, and it is notified about
4736 // all three open buffers.
4737 let mut fake_json_server = fake_json_servers.next().await.unwrap();
4738 assert_eq!(
4739 fake_json_server
4740 .receive_notification::<lsp::notification::DidOpenTextDocument>()
4741 .await
4742 .text_document,
4743 lsp::TextDocumentItem {
4744 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
4745 version: 0,
4746 text: "{\"a\": 1}".to_string(),
4747 language_id: Default::default()
4748 }
4749 );
4750
4751 // This buffer is configured based on the second language server's
4752 // capabilities.
4753 json_buffer.read_with(cx, |buffer, _| {
4754 assert_eq!(buffer.completion_triggers(), &[":".to_string()]);
4755 });
4756
4757 // When opening another buffer whose language server is already running,
4758 // it is also configured based on the existing language server's capabilities.
4759 let rust_buffer2 = project
4760 .update(cx, |project, cx| {
4761 project.open_buffer((worktree_id, "test2.rs"), cx)
4762 })
4763 .await
4764 .unwrap();
4765 rust_buffer2.read_with(cx, |buffer, _| {
4766 assert_eq!(
4767 buffer.completion_triggers(),
4768 &[".".to_string(), "::".to_string()]
4769 );
4770 });
4771
4772 // Changes are reported only to servers matching the buffer's language.
4773 toml_buffer.update(cx, |buffer, cx| buffer.edit([5..5], "23", cx));
4774 rust_buffer2.update(cx, |buffer, cx| buffer.edit([0..0], "let x = 1;", cx));
4775 assert_eq!(
4776 fake_rust_server
4777 .receive_notification::<lsp::notification::DidChangeTextDocument>()
4778 .await
4779 .text_document,
4780 lsp::VersionedTextDocumentIdentifier::new(
4781 lsp::Url::from_file_path("/the-root/test2.rs").unwrap(),
4782 1
4783 )
4784 );
4785
4786 // Save notifications are reported to all servers.
4787 toml_buffer
4788 .update(cx, |buffer, cx| buffer.save(cx))
4789 .await
4790 .unwrap();
4791 assert_eq!(
4792 fake_rust_server
4793 .receive_notification::<lsp::notification::DidSaveTextDocument>()
4794 .await
4795 .text_document,
4796 lsp::TextDocumentIdentifier::new(
4797 lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap()
4798 )
4799 );
4800 assert_eq!(
4801 fake_json_server
4802 .receive_notification::<lsp::notification::DidSaveTextDocument>()
4803 .await
4804 .text_document,
4805 lsp::TextDocumentIdentifier::new(
4806 lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap()
4807 )
4808 );
4809
4810 // Close notifications are reported only to servers matching the buffer's language.
4811 cx.update(|_| drop(json_buffer));
4812 let close_message = lsp::DidCloseTextDocumentParams {
4813 text_document: lsp::TextDocumentIdentifier::new(
4814 lsp::Url::from_file_path("/the-root/package.json").unwrap(),
4815 ),
4816 };
4817 assert_eq!(
4818 fake_json_server
4819 .receive_notification::<lsp::notification::DidCloseTextDocument>()
4820 .await,
4821 close_message,
4822 );
4823 }
4824
4825 #[gpui::test]
4826 async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
4827 cx.foreground().forbid_parking();
4828
4829 let (language_server_config, mut fake_servers) = LanguageServerConfig::fake();
4830 let progress_token = language_server_config
4831 .disk_based_diagnostics_progress_token
4832 .clone()
4833 .unwrap();
4834
4835 let language = Arc::new(Language::new(
4836 LanguageConfig {
4837 name: "Rust".into(),
4838 path_suffixes: vec!["rs".to_string()],
4839 language_server: Some(language_server_config),
4840 ..Default::default()
4841 },
4842 Some(tree_sitter_rust::language()),
4843 ));
4844
4845 let fs = FakeFs::new(cx.background());
4846 fs.insert_tree(
4847 "/dir",
4848 json!({
4849 "a.rs": "fn a() { A }",
4850 "b.rs": "const y: i32 = 1",
4851 }),
4852 )
4853 .await;
4854
4855 let project = Project::test(fs, cx);
4856 project.update(cx, |project, _| project.languages.add(language));
4857
4858 let (tree, _) = project
4859 .update(cx, |project, cx| {
4860 project.find_or_create_local_worktree("/dir", true, cx)
4861 })
4862 .await
4863 .unwrap();
4864 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
4865
4866 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
4867 .await;
4868
4869 // Cause worktree to start the fake language server
4870 let _buffer = project
4871 .update(cx, |project, cx| {
4872 project.open_buffer((worktree_id, Path::new("b.rs")), cx)
4873 })
4874 .await
4875 .unwrap();
4876
4877 let mut events = subscribe(&project, cx);
4878
4879 let mut fake_server = fake_servers.next().await.unwrap();
4880 fake_server.start_progress(&progress_token).await;
4881 assert_eq!(
4882 events.next().await.unwrap(),
4883 Event::DiskBasedDiagnosticsStarted
4884 );
4885
4886 fake_server.start_progress(&progress_token).await;
4887 fake_server.end_progress(&progress_token).await;
4888 fake_server.start_progress(&progress_token).await;
4889
4890 fake_server.notify::<lsp::notification::PublishDiagnostics>(
4891 lsp::PublishDiagnosticsParams {
4892 uri: Url::from_file_path("/dir/a.rs").unwrap(),
4893 version: None,
4894 diagnostics: vec![lsp::Diagnostic {
4895 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
4896 severity: Some(lsp::DiagnosticSeverity::ERROR),
4897 message: "undefined variable 'A'".to_string(),
4898 ..Default::default()
4899 }],
4900 },
4901 );
4902 assert_eq!(
4903 events.next().await.unwrap(),
4904 Event::DiagnosticsUpdated((worktree_id, Path::new("a.rs")).into())
4905 );
4906
4907 fake_server.end_progress(&progress_token).await;
4908 fake_server.end_progress(&progress_token).await;
4909 assert_eq!(
4910 events.next().await.unwrap(),
4911 Event::DiskBasedDiagnosticsUpdated
4912 );
4913 assert_eq!(
4914 events.next().await.unwrap(),
4915 Event::DiskBasedDiagnosticsFinished
4916 );
4917
4918 let buffer = project
4919 .update(cx, |p, cx| p.open_buffer((worktree_id, "a.rs"), cx))
4920 .await
4921 .unwrap();
4922
4923 buffer.read_with(cx, |buffer, _| {
4924 let snapshot = buffer.snapshot();
4925 let diagnostics = snapshot
4926 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
4927 .collect::<Vec<_>>();
4928 assert_eq!(
4929 diagnostics,
4930 &[DiagnosticEntry {
4931 range: Point::new(0, 9)..Point::new(0, 10),
4932 diagnostic: Diagnostic {
4933 severity: lsp::DiagnosticSeverity::ERROR,
4934 message: "undefined variable 'A'".to_string(),
4935 group_id: 0,
4936 is_primary: true,
4937 ..Default::default()
4938 }
4939 }]
4940 )
4941 });
4942 }
4943
4944 #[gpui::test]
4945 async fn test_transforming_disk_based_diagnostics(cx: &mut gpui::TestAppContext) {
4946 cx.foreground().forbid_parking();
4947
4948 let (mut lsp_config, mut fake_servers) = LanguageServerConfig::fake();
4949 lsp_config
4950 .disk_based_diagnostic_sources
4951 .insert("disk".to_string());
4952 let language = Arc::new(Language::new(
4953 LanguageConfig {
4954 name: "Rust".into(),
4955 path_suffixes: vec!["rs".to_string()],
4956 language_server: Some(lsp_config),
4957 ..Default::default()
4958 },
4959 Some(tree_sitter_rust::language()),
4960 ));
4961
4962 let text = "
4963 fn a() { A }
4964 fn b() { BB }
4965 fn c() { CCC }
4966 "
4967 .unindent();
4968
4969 let fs = FakeFs::new(cx.background());
4970 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
4971
4972 let project = Project::test(fs, cx);
4973 project.update(cx, |project, _| project.languages.add(language));
4974
4975 let worktree_id = project
4976 .update(cx, |project, cx| {
4977 project.find_or_create_local_worktree("/dir", true, cx)
4978 })
4979 .await
4980 .unwrap()
4981 .0
4982 .read_with(cx, |tree, _| tree.id());
4983
4984 let buffer = project
4985 .update(cx, |project, cx| {
4986 project.open_buffer((worktree_id, "a.rs"), cx)
4987 })
4988 .await
4989 .unwrap();
4990
4991 let mut fake_server = fake_servers.next().await.unwrap();
4992 let open_notification = fake_server
4993 .receive_notification::<lsp::notification::DidOpenTextDocument>()
4994 .await;
4995
4996 // Edit the buffer, moving the content down
4997 buffer.update(cx, |buffer, cx| buffer.edit([0..0], "\n\n", cx));
4998 let change_notification_1 = fake_server
4999 .receive_notification::<lsp::notification::DidChangeTextDocument>()
5000 .await;
5001 assert!(
5002 change_notification_1.text_document.version > open_notification.text_document.version
5003 );
5004
5005 // Report some diagnostics for the initial version of the buffer
5006 fake_server.notify::<lsp::notification::PublishDiagnostics>(
5007 lsp::PublishDiagnosticsParams {
5008 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
5009 version: Some(open_notification.text_document.version),
5010 diagnostics: vec![
5011 lsp::Diagnostic {
5012 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
5013 severity: Some(DiagnosticSeverity::ERROR),
5014 message: "undefined variable 'A'".to_string(),
5015 source: Some("disk".to_string()),
5016 ..Default::default()
5017 },
5018 lsp::Diagnostic {
5019 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
5020 severity: Some(DiagnosticSeverity::ERROR),
5021 message: "undefined variable 'BB'".to_string(),
5022 source: Some("disk".to_string()),
5023 ..Default::default()
5024 },
5025 lsp::Diagnostic {
5026 range: lsp::Range::new(lsp::Position::new(2, 9), lsp::Position::new(2, 12)),
5027 severity: Some(DiagnosticSeverity::ERROR),
5028 source: Some("disk".to_string()),
5029 message: "undefined variable 'CCC'".to_string(),
5030 ..Default::default()
5031 },
5032 ],
5033 },
5034 );
5035
5036 // The diagnostics have moved down since they were created.
5037 buffer.next_notification(cx).await;
5038 buffer.read_with(cx, |buffer, _| {
5039 assert_eq!(
5040 buffer
5041 .snapshot()
5042 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0), false)
5043 .collect::<Vec<_>>(),
5044 &[
5045 DiagnosticEntry {
5046 range: Point::new(3, 9)..Point::new(3, 11),
5047 diagnostic: Diagnostic {
5048 severity: DiagnosticSeverity::ERROR,
5049 message: "undefined variable 'BB'".to_string(),
5050 is_disk_based: true,
5051 group_id: 1,
5052 is_primary: true,
5053 ..Default::default()
5054 },
5055 },
5056 DiagnosticEntry {
5057 range: Point::new(4, 9)..Point::new(4, 12),
5058 diagnostic: Diagnostic {
5059 severity: DiagnosticSeverity::ERROR,
5060 message: "undefined variable 'CCC'".to_string(),
5061 is_disk_based: true,
5062 group_id: 2,
5063 is_primary: true,
5064 ..Default::default()
5065 }
5066 }
5067 ]
5068 );
5069 assert_eq!(
5070 chunks_with_diagnostics(buffer, 0..buffer.len()),
5071 [
5072 ("\n\nfn a() { ".to_string(), None),
5073 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
5074 (" }\nfn b() { ".to_string(), None),
5075 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
5076 (" }\nfn c() { ".to_string(), None),
5077 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
5078 (" }\n".to_string(), None),
5079 ]
5080 );
5081 assert_eq!(
5082 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
5083 [
5084 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
5085 (" }\nfn c() { ".to_string(), None),
5086 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
5087 ]
5088 );
5089 });
5090
5091 // Ensure overlapping diagnostics are highlighted correctly.
5092 fake_server.notify::<lsp::notification::PublishDiagnostics>(
5093 lsp::PublishDiagnosticsParams {
5094 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
5095 version: Some(open_notification.text_document.version),
5096 diagnostics: vec![
5097 lsp::Diagnostic {
5098 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
5099 severity: Some(DiagnosticSeverity::ERROR),
5100 message: "undefined variable 'A'".to_string(),
5101 source: Some("disk".to_string()),
5102 ..Default::default()
5103 },
5104 lsp::Diagnostic {
5105 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 12)),
5106 severity: Some(DiagnosticSeverity::WARNING),
5107 message: "unreachable statement".to_string(),
5108 source: Some("disk".to_string()),
5109 ..Default::default()
5110 },
5111 ],
5112 },
5113 );
5114
5115 buffer.next_notification(cx).await;
5116 buffer.read_with(cx, |buffer, _| {
5117 assert_eq!(
5118 buffer
5119 .snapshot()
5120 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0), false)
5121 .collect::<Vec<_>>(),
5122 &[
5123 DiagnosticEntry {
5124 range: Point::new(2, 9)..Point::new(2, 12),
5125 diagnostic: Diagnostic {
5126 severity: DiagnosticSeverity::WARNING,
5127 message: "unreachable statement".to_string(),
5128 is_disk_based: true,
5129 group_id: 1,
5130 is_primary: true,
5131 ..Default::default()
5132 }
5133 },
5134 DiagnosticEntry {
5135 range: Point::new(2, 9)..Point::new(2, 10),
5136 diagnostic: Diagnostic {
5137 severity: DiagnosticSeverity::ERROR,
5138 message: "undefined variable 'A'".to_string(),
5139 is_disk_based: true,
5140 group_id: 0,
5141 is_primary: true,
5142 ..Default::default()
5143 },
5144 }
5145 ]
5146 );
5147 assert_eq!(
5148 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
5149 [
5150 ("fn a() { ".to_string(), None),
5151 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
5152 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
5153 ("\n".to_string(), None),
5154 ]
5155 );
5156 assert_eq!(
5157 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
5158 [
5159 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
5160 ("\n".to_string(), None),
5161 ]
5162 );
5163 });
5164
5165 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
5166 // changes since the last save.
5167 buffer.update(cx, |buffer, cx| {
5168 buffer.edit(Some(Point::new(2, 0)..Point::new(2, 0)), " ", cx);
5169 buffer.edit(Some(Point::new(2, 8)..Point::new(2, 10)), "(x: usize)", cx);
5170 });
5171 let change_notification_2 =
5172 fake_server.receive_notification::<lsp::notification::DidChangeTextDocument>();
5173 assert!(
5174 change_notification_2.await.text_document.version
5175 > change_notification_1.text_document.version
5176 );
5177
5178 // Handle out-of-order diagnostics
5179 fake_server.notify::<lsp::notification::PublishDiagnostics>(
5180 lsp::PublishDiagnosticsParams {
5181 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
5182 version: Some(open_notification.text_document.version),
5183 diagnostics: vec![
5184 lsp::Diagnostic {
5185 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
5186 severity: Some(DiagnosticSeverity::ERROR),
5187 message: "undefined variable 'BB'".to_string(),
5188 source: Some("disk".to_string()),
5189 ..Default::default()
5190 },
5191 lsp::Diagnostic {
5192 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
5193 severity: Some(DiagnosticSeverity::WARNING),
5194 message: "undefined variable 'A'".to_string(),
5195 source: Some("disk".to_string()),
5196 ..Default::default()
5197 },
5198 ],
5199 },
5200 );
5201
5202 buffer.next_notification(cx).await;
5203 buffer.read_with(cx, |buffer, _| {
5204 assert_eq!(
5205 buffer
5206 .snapshot()
5207 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
5208 .collect::<Vec<_>>(),
5209 &[
5210 DiagnosticEntry {
5211 range: Point::new(2, 21)..Point::new(2, 22),
5212 diagnostic: Diagnostic {
5213 severity: DiagnosticSeverity::WARNING,
5214 message: "undefined variable 'A'".to_string(),
5215 is_disk_based: true,
5216 group_id: 1,
5217 is_primary: true,
5218 ..Default::default()
5219 }
5220 },
5221 DiagnosticEntry {
5222 range: Point::new(3, 9)..Point::new(3, 11),
5223 diagnostic: Diagnostic {
5224 severity: DiagnosticSeverity::ERROR,
5225 message: "undefined variable 'BB'".to_string(),
5226 is_disk_based: true,
5227 group_id: 0,
5228 is_primary: true,
5229 ..Default::default()
5230 },
5231 }
5232 ]
5233 );
5234 });
5235 }
5236
5237 #[gpui::test]
5238 async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
5239 cx.foreground().forbid_parking();
5240
5241 let text = concat!(
5242 "let one = ;\n", //
5243 "let two = \n",
5244 "let three = 3;\n",
5245 );
5246
5247 let fs = FakeFs::new(cx.background());
5248 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
5249
5250 let project = Project::test(fs, cx);
5251 let worktree_id = project
5252 .update(cx, |project, cx| {
5253 project.find_or_create_local_worktree("/dir", true, cx)
5254 })
5255 .await
5256 .unwrap()
5257 .0
5258 .read_with(cx, |tree, _| tree.id());
5259
5260 let buffer = project
5261 .update(cx, |project, cx| {
5262 project.open_buffer((worktree_id, "a.rs"), cx)
5263 })
5264 .await
5265 .unwrap();
5266
5267 project.update(cx, |project, cx| {
5268 project
5269 .update_buffer_diagnostics(
5270 &buffer,
5271 vec![
5272 DiagnosticEntry {
5273 range: PointUtf16::new(0, 10)..PointUtf16::new(0, 10),
5274 diagnostic: Diagnostic {
5275 severity: DiagnosticSeverity::ERROR,
5276 message: "syntax error 1".to_string(),
5277 ..Default::default()
5278 },
5279 },
5280 DiagnosticEntry {
5281 range: PointUtf16::new(1, 10)..PointUtf16::new(1, 10),
5282 diagnostic: Diagnostic {
5283 severity: DiagnosticSeverity::ERROR,
5284 message: "syntax error 2".to_string(),
5285 ..Default::default()
5286 },
5287 },
5288 ],
5289 None,
5290 cx,
5291 )
5292 .unwrap();
5293 });
5294
5295 // An empty range is extended forward to include the following character.
5296 // At the end of a line, an empty range is extended backward to include
5297 // the preceding character.
5298 buffer.read_with(cx, |buffer, _| {
5299 let chunks = chunks_with_diagnostics(&buffer, 0..buffer.len());
5300 assert_eq!(
5301 chunks
5302 .iter()
5303 .map(|(s, d)| (s.as_str(), *d))
5304 .collect::<Vec<_>>(),
5305 &[
5306 ("let one = ", None),
5307 (";", Some(DiagnosticSeverity::ERROR)),
5308 ("\nlet two =", None),
5309 (" ", Some(DiagnosticSeverity::ERROR)),
5310 ("\nlet three = 3;\n", None)
5311 ]
5312 );
5313 });
5314 }
5315
5316 #[gpui::test]
5317 async fn test_edits_from_lsp_with_past_version(cx: &mut gpui::TestAppContext) {
5318 cx.foreground().forbid_parking();
5319
5320 let (lsp_config, mut fake_servers) = LanguageServerConfig::fake();
5321 let language = Arc::new(Language::new(
5322 LanguageConfig {
5323 name: "Rust".into(),
5324 path_suffixes: vec!["rs".to_string()],
5325 language_server: Some(lsp_config),
5326 ..Default::default()
5327 },
5328 Some(tree_sitter_rust::language()),
5329 ));
5330
5331 let text = "
5332 fn a() {
5333 f1();
5334 }
5335 fn b() {
5336 f2();
5337 }
5338 fn c() {
5339 f3();
5340 }
5341 "
5342 .unindent();
5343
5344 let fs = FakeFs::new(cx.background());
5345 fs.insert_tree(
5346 "/dir",
5347 json!({
5348 "a.rs": text.clone(),
5349 }),
5350 )
5351 .await;
5352
5353 let project = Project::test(fs, cx);
5354 project.update(cx, |project, _| project.languages.add(language));
5355
5356 let worktree_id = project
5357 .update(cx, |project, cx| {
5358 project.find_or_create_local_worktree("/dir", true, cx)
5359 })
5360 .await
5361 .unwrap()
5362 .0
5363 .read_with(cx, |tree, _| tree.id());
5364
5365 let buffer = project
5366 .update(cx, |project, cx| {
5367 project.open_buffer((worktree_id, "a.rs"), cx)
5368 })
5369 .await
5370 .unwrap();
5371
5372 let mut fake_server = fake_servers.next().await.unwrap();
5373 let lsp_document_version = fake_server
5374 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5375 .await
5376 .text_document
5377 .version;
5378
5379 // Simulate editing the buffer after the language server computes some edits.
5380 buffer.update(cx, |buffer, cx| {
5381 buffer.edit(
5382 [Point::new(0, 0)..Point::new(0, 0)],
5383 "// above first function\n",
5384 cx,
5385 );
5386 buffer.edit(
5387 [Point::new(2, 0)..Point::new(2, 0)],
5388 " // inside first function\n",
5389 cx,
5390 );
5391 buffer.edit(
5392 [Point::new(6, 4)..Point::new(6, 4)],
5393 "// inside second function ",
5394 cx,
5395 );
5396
5397 assert_eq!(
5398 buffer.text(),
5399 "
5400 // above first function
5401 fn a() {
5402 // inside first function
5403 f1();
5404 }
5405 fn b() {
5406 // inside second function f2();
5407 }
5408 fn c() {
5409 f3();
5410 }
5411 "
5412 .unindent()
5413 );
5414 });
5415
5416 let edits = project
5417 .update(cx, |project, cx| {
5418 project.edits_from_lsp(
5419 &buffer,
5420 vec![
5421 // replace body of first function
5422 lsp::TextEdit {
5423 range: lsp::Range::new(
5424 lsp::Position::new(0, 0),
5425 lsp::Position::new(3, 0),
5426 ),
5427 new_text: "
5428 fn a() {
5429 f10();
5430 }
5431 "
5432 .unindent(),
5433 },
5434 // edit inside second function
5435 lsp::TextEdit {
5436 range: lsp::Range::new(
5437 lsp::Position::new(4, 6),
5438 lsp::Position::new(4, 6),
5439 ),
5440 new_text: "00".into(),
5441 },
5442 // edit inside third function via two distinct edits
5443 lsp::TextEdit {
5444 range: lsp::Range::new(
5445 lsp::Position::new(7, 5),
5446 lsp::Position::new(7, 5),
5447 ),
5448 new_text: "4000".into(),
5449 },
5450 lsp::TextEdit {
5451 range: lsp::Range::new(
5452 lsp::Position::new(7, 5),
5453 lsp::Position::new(7, 6),
5454 ),
5455 new_text: "".into(),
5456 },
5457 ],
5458 Some(lsp_document_version),
5459 cx,
5460 )
5461 })
5462 .await
5463 .unwrap();
5464
5465 buffer.update(cx, |buffer, cx| {
5466 for (range, new_text) in edits {
5467 buffer.edit([range], new_text, cx);
5468 }
5469 assert_eq!(
5470 buffer.text(),
5471 "
5472 // above first function
5473 fn a() {
5474 // inside first function
5475 f10();
5476 }
5477 fn b() {
5478 // inside second function f200();
5479 }
5480 fn c() {
5481 f4000();
5482 }
5483 "
5484 .unindent()
5485 );
5486 });
5487 }
5488
5489 #[gpui::test]
5490 async fn test_edits_from_lsp_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
5491 cx.foreground().forbid_parking();
5492
5493 let text = "
5494 use a::b;
5495 use a::c;
5496
5497 fn f() {
5498 b();
5499 c();
5500 }
5501 "
5502 .unindent();
5503
5504 let fs = FakeFs::new(cx.background());
5505 fs.insert_tree(
5506 "/dir",
5507 json!({
5508 "a.rs": text.clone(),
5509 }),
5510 )
5511 .await;
5512
5513 let project = Project::test(fs, cx);
5514 let worktree_id = project
5515 .update(cx, |project, cx| {
5516 project.find_or_create_local_worktree("/dir", true, cx)
5517 })
5518 .await
5519 .unwrap()
5520 .0
5521 .read_with(cx, |tree, _| tree.id());
5522
5523 let buffer = project
5524 .update(cx, |project, cx| {
5525 project.open_buffer((worktree_id, "a.rs"), cx)
5526 })
5527 .await
5528 .unwrap();
5529
5530 // Simulate the language server sending us a small edit in the form of a very large diff.
5531 // Rust-analyzer does this when performing a merge-imports code action.
5532 let edits = project
5533 .update(cx, |project, cx| {
5534 project.edits_from_lsp(
5535 &buffer,
5536 [
5537 // Replace the first use statement without editing the semicolon.
5538 lsp::TextEdit {
5539 range: lsp::Range::new(
5540 lsp::Position::new(0, 4),
5541 lsp::Position::new(0, 8),
5542 ),
5543 new_text: "a::{b, c}".into(),
5544 },
5545 // Reinsert the remainder of the file between the semicolon and the final
5546 // newline of the file.
5547 lsp::TextEdit {
5548 range: lsp::Range::new(
5549 lsp::Position::new(0, 9),
5550 lsp::Position::new(0, 9),
5551 ),
5552 new_text: "\n\n".into(),
5553 },
5554 lsp::TextEdit {
5555 range: lsp::Range::new(
5556 lsp::Position::new(0, 9),
5557 lsp::Position::new(0, 9),
5558 ),
5559 new_text: "
5560 fn f() {
5561 b();
5562 c();
5563 }"
5564 .unindent(),
5565 },
5566 // Delete everything after the first newline of the file.
5567 lsp::TextEdit {
5568 range: lsp::Range::new(
5569 lsp::Position::new(1, 0),
5570 lsp::Position::new(7, 0),
5571 ),
5572 new_text: "".into(),
5573 },
5574 ],
5575 None,
5576 cx,
5577 )
5578 })
5579 .await
5580 .unwrap();
5581
5582 buffer.update(cx, |buffer, cx| {
5583 let edits = edits
5584 .into_iter()
5585 .map(|(range, text)| {
5586 (
5587 range.start.to_point(&buffer)..range.end.to_point(&buffer),
5588 text,
5589 )
5590 })
5591 .collect::<Vec<_>>();
5592
5593 assert_eq!(
5594 edits,
5595 [
5596 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
5597 (Point::new(1, 0)..Point::new(2, 0), "".into())
5598 ]
5599 );
5600
5601 for (range, new_text) in edits {
5602 buffer.edit([range], new_text, cx);
5603 }
5604 assert_eq!(
5605 buffer.text(),
5606 "
5607 use a::{b, c};
5608
5609 fn f() {
5610 b();
5611 c();
5612 }
5613 "
5614 .unindent()
5615 );
5616 });
5617 }
5618
5619 fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
5620 buffer: &Buffer,
5621 range: Range<T>,
5622 ) -> Vec<(String, Option<DiagnosticSeverity>)> {
5623 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
5624 for chunk in buffer.snapshot().chunks(range, true) {
5625 if chunks.last().map_or(false, |prev_chunk| {
5626 prev_chunk.1 == chunk.diagnostic_severity
5627 }) {
5628 chunks.last_mut().unwrap().0.push_str(chunk.text);
5629 } else {
5630 chunks.push((chunk.text.to_string(), chunk.diagnostic_severity));
5631 }
5632 }
5633 chunks
5634 }
5635
5636 #[gpui::test]
5637 async fn test_search_worktree_without_files(cx: &mut gpui::TestAppContext) {
5638 let dir = temp_tree(json!({
5639 "root": {
5640 "dir1": {},
5641 "dir2": {
5642 "dir3": {}
5643 }
5644 }
5645 }));
5646
5647 let project = Project::test(Arc::new(RealFs), cx);
5648 let (tree, _) = project
5649 .update(cx, |project, cx| {
5650 project.find_or_create_local_worktree(&dir.path(), true, cx)
5651 })
5652 .await
5653 .unwrap();
5654
5655 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
5656 .await;
5657
5658 let cancel_flag = Default::default();
5659 let results = project
5660 .read_with(cx, |project, cx| {
5661 project.match_paths("dir", false, false, 10, &cancel_flag, cx)
5662 })
5663 .await;
5664
5665 assert!(results.is_empty());
5666 }
5667
5668 #[gpui::test]
5669 async fn test_definition(cx: &mut gpui::TestAppContext) {
5670 let (language_server_config, mut fake_servers) = LanguageServerConfig::fake();
5671 let language = Arc::new(Language::new(
5672 LanguageConfig {
5673 name: "Rust".into(),
5674 path_suffixes: vec!["rs".to_string()],
5675 language_server: Some(language_server_config),
5676 ..Default::default()
5677 },
5678 Some(tree_sitter_rust::language()),
5679 ));
5680
5681 let fs = FakeFs::new(cx.background());
5682 fs.insert_tree(
5683 "/dir",
5684 json!({
5685 "a.rs": "const fn a() { A }",
5686 "b.rs": "const y: i32 = crate::a()",
5687 }),
5688 )
5689 .await;
5690
5691 let project = Project::test(fs, cx);
5692 project.update(cx, |project, _| {
5693 Arc::get_mut(&mut project.languages).unwrap().add(language);
5694 });
5695
5696 let (tree, _) = project
5697 .update(cx, |project, cx| {
5698 project.find_or_create_local_worktree("/dir/b.rs", true, cx)
5699 })
5700 .await
5701 .unwrap();
5702 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
5703 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
5704 .await;
5705
5706 let buffer = project
5707 .update(cx, |project, cx| {
5708 project.open_buffer(
5709 ProjectPath {
5710 worktree_id,
5711 path: Path::new("").into(),
5712 },
5713 cx,
5714 )
5715 })
5716 .await
5717 .unwrap();
5718
5719 let mut fake_server = fake_servers.next().await.unwrap();
5720 fake_server.handle_request::<lsp::request::GotoDefinition, _>(move |params, _| {
5721 let params = params.text_document_position_params;
5722 assert_eq!(
5723 params.text_document.uri.to_file_path().unwrap(),
5724 Path::new("/dir/b.rs"),
5725 );
5726 assert_eq!(params.position, lsp::Position::new(0, 22));
5727
5728 Some(lsp::GotoDefinitionResponse::Scalar(lsp::Location::new(
5729 lsp::Url::from_file_path("/dir/a.rs").unwrap(),
5730 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
5731 )))
5732 });
5733
5734 let mut definitions = project
5735 .update(cx, |project, cx| project.definition(&buffer, 22, cx))
5736 .await
5737 .unwrap();
5738
5739 assert_eq!(definitions.len(), 1);
5740 let definition = definitions.pop().unwrap();
5741 cx.update(|cx| {
5742 let target_buffer = definition.buffer.read(cx);
5743 assert_eq!(
5744 target_buffer
5745 .file()
5746 .unwrap()
5747 .as_local()
5748 .unwrap()
5749 .abs_path(cx),
5750 Path::new("/dir/a.rs"),
5751 );
5752 assert_eq!(definition.range.to_offset(target_buffer), 9..10);
5753 assert_eq!(
5754 list_worktrees(&project, cx),
5755 [("/dir/b.rs".as_ref(), true), ("/dir/a.rs".as_ref(), false)]
5756 );
5757
5758 drop(definition);
5759 });
5760 cx.read(|cx| {
5761 assert_eq!(list_worktrees(&project, cx), [("/dir/b.rs".as_ref(), true)]);
5762 });
5763
5764 fn list_worktrees<'a>(
5765 project: &'a ModelHandle<Project>,
5766 cx: &'a AppContext,
5767 ) -> Vec<(&'a Path, bool)> {
5768 project
5769 .read(cx)
5770 .worktrees(cx)
5771 .map(|worktree| {
5772 let worktree = worktree.read(cx);
5773 (
5774 worktree.as_local().unwrap().abs_path().as_ref(),
5775 worktree.is_visible(),
5776 )
5777 })
5778 .collect::<Vec<_>>()
5779 }
5780 }
5781
5782 #[gpui::test]
5783 async fn test_save_file(cx: &mut gpui::TestAppContext) {
5784 let fs = FakeFs::new(cx.background());
5785 fs.insert_tree(
5786 "/dir",
5787 json!({
5788 "file1": "the old contents",
5789 }),
5790 )
5791 .await;
5792
5793 let project = Project::test(fs.clone(), cx);
5794 let worktree_id = project
5795 .update(cx, |p, cx| {
5796 p.find_or_create_local_worktree("/dir", true, cx)
5797 })
5798 .await
5799 .unwrap()
5800 .0
5801 .read_with(cx, |tree, _| tree.id());
5802
5803 let buffer = project
5804 .update(cx, |p, cx| p.open_buffer((worktree_id, "file1"), cx))
5805 .await
5806 .unwrap();
5807 buffer
5808 .update(cx, |buffer, cx| {
5809 assert_eq!(buffer.text(), "the old contents");
5810 buffer.edit(Some(0..0), "a line of text.\n".repeat(10 * 1024), cx);
5811 buffer.save(cx)
5812 })
5813 .await
5814 .unwrap();
5815
5816 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
5817 assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
5818 }
5819
5820 #[gpui::test]
5821 async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
5822 let fs = FakeFs::new(cx.background());
5823 fs.insert_tree(
5824 "/dir",
5825 json!({
5826 "file1": "the old contents",
5827 }),
5828 )
5829 .await;
5830
5831 let project = Project::test(fs.clone(), cx);
5832 let worktree_id = project
5833 .update(cx, |p, cx| {
5834 p.find_or_create_local_worktree("/dir/file1", true, cx)
5835 })
5836 .await
5837 .unwrap()
5838 .0
5839 .read_with(cx, |tree, _| tree.id());
5840
5841 let buffer = project
5842 .update(cx, |p, cx| p.open_buffer((worktree_id, ""), cx))
5843 .await
5844 .unwrap();
5845 buffer
5846 .update(cx, |buffer, cx| {
5847 buffer.edit(Some(0..0), "a line of text.\n".repeat(10 * 1024), cx);
5848 buffer.save(cx)
5849 })
5850 .await
5851 .unwrap();
5852
5853 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
5854 assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
5855 }
5856
5857 #[gpui::test]
5858 async fn test_save_as(cx: &mut gpui::TestAppContext) {
5859 let fs = FakeFs::new(cx.background());
5860 fs.insert_tree("/dir", json!({})).await;
5861
5862 let project = Project::test(fs.clone(), cx);
5863 let (worktree, _) = project
5864 .update(cx, |project, cx| {
5865 project.find_or_create_local_worktree("/dir", true, cx)
5866 })
5867 .await
5868 .unwrap();
5869 let worktree_id = worktree.read_with(cx, |worktree, _| worktree.id());
5870
5871 let buffer = project.update(cx, |project, cx| project.create_buffer(cx).unwrap());
5872 buffer.update(cx, |buffer, cx| {
5873 buffer.edit([0..0], "abc", cx);
5874 assert!(buffer.is_dirty());
5875 assert!(!buffer.has_conflict());
5876 });
5877 project
5878 .update(cx, |project, cx| {
5879 project.save_buffer_as(buffer.clone(), "/dir/file1".into(), cx)
5880 })
5881 .await
5882 .unwrap();
5883 assert_eq!(fs.load(Path::new("/dir/file1")).await.unwrap(), "abc");
5884 buffer.read_with(cx, |buffer, cx| {
5885 assert_eq!(buffer.file().unwrap().full_path(cx), Path::new("dir/file1"));
5886 assert!(!buffer.is_dirty());
5887 assert!(!buffer.has_conflict());
5888 });
5889
5890 let opened_buffer = project
5891 .update(cx, |project, cx| {
5892 project.open_buffer((worktree_id, "file1"), cx)
5893 })
5894 .await
5895 .unwrap();
5896 assert_eq!(opened_buffer, buffer);
5897 }
5898
5899 #[gpui::test(retries = 5)]
5900 async fn test_rescan_and_remote_updates(cx: &mut gpui::TestAppContext) {
5901 let dir = temp_tree(json!({
5902 "a": {
5903 "file1": "",
5904 "file2": "",
5905 "file3": "",
5906 },
5907 "b": {
5908 "c": {
5909 "file4": "",
5910 "file5": "",
5911 }
5912 }
5913 }));
5914
5915 let project = Project::test(Arc::new(RealFs), cx);
5916 let rpc = project.read_with(cx, |p, _| p.client.clone());
5917
5918 let (tree, _) = project
5919 .update(cx, |p, cx| {
5920 p.find_or_create_local_worktree(dir.path(), true, cx)
5921 })
5922 .await
5923 .unwrap();
5924 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
5925
5926 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
5927 let buffer = project.update(cx, |p, cx| p.open_buffer((worktree_id, path), cx));
5928 async move { buffer.await.unwrap() }
5929 };
5930 let id_for_path = |path: &'static str, cx: &gpui::TestAppContext| {
5931 tree.read_with(cx, |tree, _| {
5932 tree.entry_for_path(path)
5933 .expect(&format!("no entry for path {}", path))
5934 .id
5935 })
5936 };
5937
5938 let buffer2 = buffer_for_path("a/file2", cx).await;
5939 let buffer3 = buffer_for_path("a/file3", cx).await;
5940 let buffer4 = buffer_for_path("b/c/file4", cx).await;
5941 let buffer5 = buffer_for_path("b/c/file5", cx).await;
5942
5943 let file2_id = id_for_path("a/file2", &cx);
5944 let file3_id = id_for_path("a/file3", &cx);
5945 let file4_id = id_for_path("b/c/file4", &cx);
5946
5947 // Wait for the initial scan.
5948 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
5949 .await;
5950
5951 // Create a remote copy of this worktree.
5952 let initial_snapshot = tree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot());
5953 let (remote, load_task) = cx.update(|cx| {
5954 Worktree::remote(
5955 1,
5956 1,
5957 initial_snapshot.to_proto(&Default::default(), true),
5958 rpc.clone(),
5959 cx,
5960 )
5961 });
5962 load_task.await;
5963
5964 cx.read(|cx| {
5965 assert!(!buffer2.read(cx).is_dirty());
5966 assert!(!buffer3.read(cx).is_dirty());
5967 assert!(!buffer4.read(cx).is_dirty());
5968 assert!(!buffer5.read(cx).is_dirty());
5969 });
5970
5971 // Rename and delete files and directories.
5972 tree.flush_fs_events(&cx).await;
5973 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
5974 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
5975 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
5976 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
5977 tree.flush_fs_events(&cx).await;
5978
5979 let expected_paths = vec![
5980 "a",
5981 "a/file1",
5982 "a/file2.new",
5983 "b",
5984 "d",
5985 "d/file3",
5986 "d/file4",
5987 ];
5988
5989 cx.read(|app| {
5990 assert_eq!(
5991 tree.read(app)
5992 .paths()
5993 .map(|p| p.to_str().unwrap())
5994 .collect::<Vec<_>>(),
5995 expected_paths
5996 );
5997
5998 assert_eq!(id_for_path("a/file2.new", &cx), file2_id);
5999 assert_eq!(id_for_path("d/file3", &cx), file3_id);
6000 assert_eq!(id_for_path("d/file4", &cx), file4_id);
6001
6002 assert_eq!(
6003 buffer2.read(app).file().unwrap().path().as_ref(),
6004 Path::new("a/file2.new")
6005 );
6006 assert_eq!(
6007 buffer3.read(app).file().unwrap().path().as_ref(),
6008 Path::new("d/file3")
6009 );
6010 assert_eq!(
6011 buffer4.read(app).file().unwrap().path().as_ref(),
6012 Path::new("d/file4")
6013 );
6014 assert_eq!(
6015 buffer5.read(app).file().unwrap().path().as_ref(),
6016 Path::new("b/c/file5")
6017 );
6018
6019 assert!(!buffer2.read(app).file().unwrap().is_deleted());
6020 assert!(!buffer3.read(app).file().unwrap().is_deleted());
6021 assert!(!buffer4.read(app).file().unwrap().is_deleted());
6022 assert!(buffer5.read(app).file().unwrap().is_deleted());
6023 });
6024
6025 // Update the remote worktree. Check that it becomes consistent with the
6026 // local worktree.
6027 remote.update(cx, |remote, cx| {
6028 let update_message = tree.read(cx).as_local().unwrap().snapshot().build_update(
6029 &initial_snapshot,
6030 1,
6031 1,
6032 true,
6033 );
6034 remote
6035 .as_remote_mut()
6036 .unwrap()
6037 .snapshot
6038 .apply_remote_update(update_message)
6039 .unwrap();
6040
6041 assert_eq!(
6042 remote
6043 .paths()
6044 .map(|p| p.to_str().unwrap())
6045 .collect::<Vec<_>>(),
6046 expected_paths
6047 );
6048 });
6049 }
6050
6051 #[gpui::test]
6052 async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
6053 let fs = FakeFs::new(cx.background());
6054 fs.insert_tree(
6055 "/the-dir",
6056 json!({
6057 "a.txt": "a-contents",
6058 "b.txt": "b-contents",
6059 }),
6060 )
6061 .await;
6062
6063 let project = Project::test(fs.clone(), cx);
6064 let worktree_id = project
6065 .update(cx, |p, cx| {
6066 p.find_or_create_local_worktree("/the-dir", true, cx)
6067 })
6068 .await
6069 .unwrap()
6070 .0
6071 .read_with(cx, |tree, _| tree.id());
6072
6073 // Spawn multiple tasks to open paths, repeating some paths.
6074 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
6075 (
6076 p.open_buffer((worktree_id, "a.txt"), cx),
6077 p.open_buffer((worktree_id, "b.txt"), cx),
6078 p.open_buffer((worktree_id, "a.txt"), cx),
6079 )
6080 });
6081
6082 let buffer_a_1 = buffer_a_1.await.unwrap();
6083 let buffer_a_2 = buffer_a_2.await.unwrap();
6084 let buffer_b = buffer_b.await.unwrap();
6085 assert_eq!(buffer_a_1.read_with(cx, |b, _| b.text()), "a-contents");
6086 assert_eq!(buffer_b.read_with(cx, |b, _| b.text()), "b-contents");
6087
6088 // There is only one buffer per path.
6089 let buffer_a_id = buffer_a_1.id();
6090 assert_eq!(buffer_a_2.id(), buffer_a_id);
6091
6092 // Open the same path again while it is still open.
6093 drop(buffer_a_1);
6094 let buffer_a_3 = project
6095 .update(cx, |p, cx| p.open_buffer((worktree_id, "a.txt"), cx))
6096 .await
6097 .unwrap();
6098
6099 // There's still only one buffer per path.
6100 assert_eq!(buffer_a_3.id(), buffer_a_id);
6101 }
6102
6103 #[gpui::test]
6104 async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
6105 use std::fs;
6106
6107 let dir = temp_tree(json!({
6108 "file1": "abc",
6109 "file2": "def",
6110 "file3": "ghi",
6111 }));
6112
6113 let project = Project::test(Arc::new(RealFs), cx);
6114 let (worktree, _) = project
6115 .update(cx, |p, cx| {
6116 p.find_or_create_local_worktree(dir.path(), true, cx)
6117 })
6118 .await
6119 .unwrap();
6120 let worktree_id = worktree.read_with(cx, |worktree, _| worktree.id());
6121
6122 worktree.flush_fs_events(&cx).await;
6123 worktree
6124 .read_with(cx, |t, _| t.as_local().unwrap().scan_complete())
6125 .await;
6126
6127 let buffer1 = project
6128 .update(cx, |p, cx| p.open_buffer((worktree_id, "file1"), cx))
6129 .await
6130 .unwrap();
6131 let events = Rc::new(RefCell::new(Vec::new()));
6132
6133 // initially, the buffer isn't dirty.
6134 buffer1.update(cx, |buffer, cx| {
6135 cx.subscribe(&buffer1, {
6136 let events = events.clone();
6137 move |_, _, event, _| match event {
6138 BufferEvent::Operation(_) => {}
6139 _ => events.borrow_mut().push(event.clone()),
6140 }
6141 })
6142 .detach();
6143
6144 assert!(!buffer.is_dirty());
6145 assert!(events.borrow().is_empty());
6146
6147 buffer.edit(vec![1..2], "", cx);
6148 });
6149
6150 // after the first edit, the buffer is dirty, and emits a dirtied event.
6151 buffer1.update(cx, |buffer, cx| {
6152 assert!(buffer.text() == "ac");
6153 assert!(buffer.is_dirty());
6154 assert_eq!(
6155 *events.borrow(),
6156 &[language::Event::Edited, language::Event::Dirtied]
6157 );
6158 events.borrow_mut().clear();
6159 buffer.did_save(buffer.version(), buffer.file().unwrap().mtime(), None, cx);
6160 });
6161
6162 // after saving, the buffer is not dirty, and emits a saved event.
6163 buffer1.update(cx, |buffer, cx| {
6164 assert!(!buffer.is_dirty());
6165 assert_eq!(*events.borrow(), &[language::Event::Saved]);
6166 events.borrow_mut().clear();
6167
6168 buffer.edit(vec![1..1], "B", cx);
6169 buffer.edit(vec![2..2], "D", cx);
6170 });
6171
6172 // after editing again, the buffer is dirty, and emits another dirty event.
6173 buffer1.update(cx, |buffer, cx| {
6174 assert!(buffer.text() == "aBDc");
6175 assert!(buffer.is_dirty());
6176 assert_eq!(
6177 *events.borrow(),
6178 &[
6179 language::Event::Edited,
6180 language::Event::Dirtied,
6181 language::Event::Edited,
6182 ],
6183 );
6184 events.borrow_mut().clear();
6185
6186 // TODO - currently, after restoring the buffer to its
6187 // previously-saved state, the is still considered dirty.
6188 buffer.edit([1..3], "", cx);
6189 assert!(buffer.text() == "ac");
6190 assert!(buffer.is_dirty());
6191 });
6192
6193 assert_eq!(*events.borrow(), &[language::Event::Edited]);
6194
6195 // When a file is deleted, the buffer is considered dirty.
6196 let events = Rc::new(RefCell::new(Vec::new()));
6197 let buffer2 = project
6198 .update(cx, |p, cx| p.open_buffer((worktree_id, "file2"), cx))
6199 .await
6200 .unwrap();
6201 buffer2.update(cx, |_, cx| {
6202 cx.subscribe(&buffer2, {
6203 let events = events.clone();
6204 move |_, _, event, _| events.borrow_mut().push(event.clone())
6205 })
6206 .detach();
6207 });
6208
6209 fs::remove_file(dir.path().join("file2")).unwrap();
6210 buffer2.condition(&cx, |b, _| b.is_dirty()).await;
6211 assert_eq!(
6212 *events.borrow(),
6213 &[language::Event::Dirtied, language::Event::FileHandleChanged]
6214 );
6215
6216 // When a file is already dirty when deleted, we don't emit a Dirtied event.
6217 let events = Rc::new(RefCell::new(Vec::new()));
6218 let buffer3 = project
6219 .update(cx, |p, cx| p.open_buffer((worktree_id, "file3"), cx))
6220 .await
6221 .unwrap();
6222 buffer3.update(cx, |_, cx| {
6223 cx.subscribe(&buffer3, {
6224 let events = events.clone();
6225 move |_, _, event, _| events.borrow_mut().push(event.clone())
6226 })
6227 .detach();
6228 });
6229
6230 worktree.flush_fs_events(&cx).await;
6231 buffer3.update(cx, |buffer, cx| {
6232 buffer.edit(Some(0..0), "x", cx);
6233 });
6234 events.borrow_mut().clear();
6235 fs::remove_file(dir.path().join("file3")).unwrap();
6236 buffer3
6237 .condition(&cx, |_, _| !events.borrow().is_empty())
6238 .await;
6239 assert_eq!(*events.borrow(), &[language::Event::FileHandleChanged]);
6240 cx.read(|cx| assert!(buffer3.read(cx).is_dirty()));
6241 }
6242
6243 #[gpui::test]
6244 async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
6245 use std::fs;
6246
6247 let initial_contents = "aaa\nbbbbb\nc\n";
6248 let dir = temp_tree(json!({ "the-file": initial_contents }));
6249
6250 let project = Project::test(Arc::new(RealFs), cx);
6251 let (worktree, _) = project
6252 .update(cx, |p, cx| {
6253 p.find_or_create_local_worktree(dir.path(), true, cx)
6254 })
6255 .await
6256 .unwrap();
6257 let worktree_id = worktree.read_with(cx, |tree, _| tree.id());
6258
6259 worktree
6260 .read_with(cx, |t, _| t.as_local().unwrap().scan_complete())
6261 .await;
6262
6263 let abs_path = dir.path().join("the-file");
6264 let buffer = project
6265 .update(cx, |p, cx| p.open_buffer((worktree_id, "the-file"), cx))
6266 .await
6267 .unwrap();
6268
6269 // TODO
6270 // Add a cursor on each row.
6271 // let selection_set_id = buffer.update(&mut cx, |buffer, cx| {
6272 // assert!(!buffer.is_dirty());
6273 // buffer.add_selection_set(
6274 // &(0..3)
6275 // .map(|row| Selection {
6276 // id: row as usize,
6277 // start: Point::new(row, 1),
6278 // end: Point::new(row, 1),
6279 // reversed: false,
6280 // goal: SelectionGoal::None,
6281 // })
6282 // .collect::<Vec<_>>(),
6283 // cx,
6284 // )
6285 // });
6286
6287 // Change the file on disk, adding two new lines of text, and removing
6288 // one line.
6289 buffer.read_with(cx, |buffer, _| {
6290 assert!(!buffer.is_dirty());
6291 assert!(!buffer.has_conflict());
6292 });
6293 let new_contents = "AAAA\naaa\nBB\nbbbbb\n";
6294 fs::write(&abs_path, new_contents).unwrap();
6295
6296 // Because the buffer was not modified, it is reloaded from disk. Its
6297 // contents are edited according to the diff between the old and new
6298 // file contents.
6299 buffer
6300 .condition(&cx, |buffer, _| buffer.text() == new_contents)
6301 .await;
6302
6303 buffer.update(cx, |buffer, _| {
6304 assert_eq!(buffer.text(), new_contents);
6305 assert!(!buffer.is_dirty());
6306 assert!(!buffer.has_conflict());
6307
6308 // TODO
6309 // let cursor_positions = buffer
6310 // .selection_set(selection_set_id)
6311 // .unwrap()
6312 // .selections::<Point>(&*buffer)
6313 // .map(|selection| {
6314 // assert_eq!(selection.start, selection.end);
6315 // selection.start
6316 // })
6317 // .collect::<Vec<_>>();
6318 // assert_eq!(
6319 // cursor_positions,
6320 // [Point::new(1, 1), Point::new(3, 1), Point::new(4, 0)]
6321 // );
6322 });
6323
6324 // Modify the buffer
6325 buffer.update(cx, |buffer, cx| {
6326 buffer.edit(vec![0..0], " ", cx);
6327 assert!(buffer.is_dirty());
6328 assert!(!buffer.has_conflict());
6329 });
6330
6331 // Change the file on disk again, adding blank lines to the beginning.
6332 fs::write(&abs_path, "\n\n\nAAAA\naaa\nBB\nbbbbb\n").unwrap();
6333
6334 // Because the buffer is modified, it doesn't reload from disk, but is
6335 // marked as having a conflict.
6336 buffer
6337 .condition(&cx, |buffer, _| buffer.has_conflict())
6338 .await;
6339 }
6340
6341 #[gpui::test]
6342 async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
6343 cx.foreground().forbid_parking();
6344
6345 let fs = FakeFs::new(cx.background());
6346 fs.insert_tree(
6347 "/the-dir",
6348 json!({
6349 "a.rs": "
6350 fn foo(mut v: Vec<usize>) {
6351 for x in &v {
6352 v.push(1);
6353 }
6354 }
6355 "
6356 .unindent(),
6357 }),
6358 )
6359 .await;
6360
6361 let project = Project::test(fs.clone(), cx);
6362 let (worktree, _) = project
6363 .update(cx, |p, cx| {
6364 p.find_or_create_local_worktree("/the-dir", true, cx)
6365 })
6366 .await
6367 .unwrap();
6368 let worktree_id = worktree.read_with(cx, |tree, _| tree.id());
6369
6370 let buffer = project
6371 .update(cx, |p, cx| p.open_buffer((worktree_id, "a.rs"), cx))
6372 .await
6373 .unwrap();
6374
6375 let buffer_uri = Url::from_file_path("/the-dir/a.rs").unwrap();
6376 let message = lsp::PublishDiagnosticsParams {
6377 uri: buffer_uri.clone(),
6378 diagnostics: vec![
6379 lsp::Diagnostic {
6380 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
6381 severity: Some(DiagnosticSeverity::WARNING),
6382 message: "error 1".to_string(),
6383 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
6384 location: lsp::Location {
6385 uri: buffer_uri.clone(),
6386 range: lsp::Range::new(
6387 lsp::Position::new(1, 8),
6388 lsp::Position::new(1, 9),
6389 ),
6390 },
6391 message: "error 1 hint 1".to_string(),
6392 }]),
6393 ..Default::default()
6394 },
6395 lsp::Diagnostic {
6396 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
6397 severity: Some(DiagnosticSeverity::HINT),
6398 message: "error 1 hint 1".to_string(),
6399 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
6400 location: lsp::Location {
6401 uri: buffer_uri.clone(),
6402 range: lsp::Range::new(
6403 lsp::Position::new(1, 8),
6404 lsp::Position::new(1, 9),
6405 ),
6406 },
6407 message: "original diagnostic".to_string(),
6408 }]),
6409 ..Default::default()
6410 },
6411 lsp::Diagnostic {
6412 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
6413 severity: Some(DiagnosticSeverity::ERROR),
6414 message: "error 2".to_string(),
6415 related_information: Some(vec![
6416 lsp::DiagnosticRelatedInformation {
6417 location: lsp::Location {
6418 uri: buffer_uri.clone(),
6419 range: lsp::Range::new(
6420 lsp::Position::new(1, 13),
6421 lsp::Position::new(1, 15),
6422 ),
6423 },
6424 message: "error 2 hint 1".to_string(),
6425 },
6426 lsp::DiagnosticRelatedInformation {
6427 location: lsp::Location {
6428 uri: buffer_uri.clone(),
6429 range: lsp::Range::new(
6430 lsp::Position::new(1, 13),
6431 lsp::Position::new(1, 15),
6432 ),
6433 },
6434 message: "error 2 hint 2".to_string(),
6435 },
6436 ]),
6437 ..Default::default()
6438 },
6439 lsp::Diagnostic {
6440 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
6441 severity: Some(DiagnosticSeverity::HINT),
6442 message: "error 2 hint 1".to_string(),
6443 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
6444 location: lsp::Location {
6445 uri: buffer_uri.clone(),
6446 range: lsp::Range::new(
6447 lsp::Position::new(2, 8),
6448 lsp::Position::new(2, 17),
6449 ),
6450 },
6451 message: "original diagnostic".to_string(),
6452 }]),
6453 ..Default::default()
6454 },
6455 lsp::Diagnostic {
6456 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
6457 severity: Some(DiagnosticSeverity::HINT),
6458 message: "error 2 hint 2".to_string(),
6459 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
6460 location: lsp::Location {
6461 uri: buffer_uri.clone(),
6462 range: lsp::Range::new(
6463 lsp::Position::new(2, 8),
6464 lsp::Position::new(2, 17),
6465 ),
6466 },
6467 message: "original diagnostic".to_string(),
6468 }]),
6469 ..Default::default()
6470 },
6471 ],
6472 version: None,
6473 };
6474
6475 project
6476 .update(cx, |p, cx| {
6477 p.update_diagnostics(message, &Default::default(), cx)
6478 })
6479 .unwrap();
6480 let buffer = buffer.read_with(cx, |buffer, _| buffer.snapshot());
6481
6482 assert_eq!(
6483 buffer
6484 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
6485 .collect::<Vec<_>>(),
6486 &[
6487 DiagnosticEntry {
6488 range: Point::new(1, 8)..Point::new(1, 9),
6489 diagnostic: Diagnostic {
6490 severity: DiagnosticSeverity::WARNING,
6491 message: "error 1".to_string(),
6492 group_id: 0,
6493 is_primary: true,
6494 ..Default::default()
6495 }
6496 },
6497 DiagnosticEntry {
6498 range: Point::new(1, 8)..Point::new(1, 9),
6499 diagnostic: Diagnostic {
6500 severity: DiagnosticSeverity::HINT,
6501 message: "error 1 hint 1".to_string(),
6502 group_id: 0,
6503 is_primary: false,
6504 ..Default::default()
6505 }
6506 },
6507 DiagnosticEntry {
6508 range: Point::new(1, 13)..Point::new(1, 15),
6509 diagnostic: Diagnostic {
6510 severity: DiagnosticSeverity::HINT,
6511 message: "error 2 hint 1".to_string(),
6512 group_id: 1,
6513 is_primary: false,
6514 ..Default::default()
6515 }
6516 },
6517 DiagnosticEntry {
6518 range: Point::new(1, 13)..Point::new(1, 15),
6519 diagnostic: Diagnostic {
6520 severity: DiagnosticSeverity::HINT,
6521 message: "error 2 hint 2".to_string(),
6522 group_id: 1,
6523 is_primary: false,
6524 ..Default::default()
6525 }
6526 },
6527 DiagnosticEntry {
6528 range: Point::new(2, 8)..Point::new(2, 17),
6529 diagnostic: Diagnostic {
6530 severity: DiagnosticSeverity::ERROR,
6531 message: "error 2".to_string(),
6532 group_id: 1,
6533 is_primary: true,
6534 ..Default::default()
6535 }
6536 }
6537 ]
6538 );
6539
6540 assert_eq!(
6541 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
6542 &[
6543 DiagnosticEntry {
6544 range: Point::new(1, 8)..Point::new(1, 9),
6545 diagnostic: Diagnostic {
6546 severity: DiagnosticSeverity::WARNING,
6547 message: "error 1".to_string(),
6548 group_id: 0,
6549 is_primary: true,
6550 ..Default::default()
6551 }
6552 },
6553 DiagnosticEntry {
6554 range: Point::new(1, 8)..Point::new(1, 9),
6555 diagnostic: Diagnostic {
6556 severity: DiagnosticSeverity::HINT,
6557 message: "error 1 hint 1".to_string(),
6558 group_id: 0,
6559 is_primary: false,
6560 ..Default::default()
6561 }
6562 },
6563 ]
6564 );
6565 assert_eq!(
6566 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
6567 &[
6568 DiagnosticEntry {
6569 range: Point::new(1, 13)..Point::new(1, 15),
6570 diagnostic: Diagnostic {
6571 severity: DiagnosticSeverity::HINT,
6572 message: "error 2 hint 1".to_string(),
6573 group_id: 1,
6574 is_primary: false,
6575 ..Default::default()
6576 }
6577 },
6578 DiagnosticEntry {
6579 range: Point::new(1, 13)..Point::new(1, 15),
6580 diagnostic: Diagnostic {
6581 severity: DiagnosticSeverity::HINT,
6582 message: "error 2 hint 2".to_string(),
6583 group_id: 1,
6584 is_primary: false,
6585 ..Default::default()
6586 }
6587 },
6588 DiagnosticEntry {
6589 range: Point::new(2, 8)..Point::new(2, 17),
6590 diagnostic: Diagnostic {
6591 severity: DiagnosticSeverity::ERROR,
6592 message: "error 2".to_string(),
6593 group_id: 1,
6594 is_primary: true,
6595 ..Default::default()
6596 }
6597 }
6598 ]
6599 );
6600 }
6601
6602 #[gpui::test]
6603 async fn test_rename(cx: &mut gpui::TestAppContext) {
6604 cx.foreground().forbid_parking();
6605
6606 let (language_server_config, mut fake_servers) = LanguageServerConfig::fake();
6607 let language = Arc::new(Language::new(
6608 LanguageConfig {
6609 name: "Rust".into(),
6610 path_suffixes: vec!["rs".to_string()],
6611 language_server: Some(language_server_config),
6612 ..Default::default()
6613 },
6614 Some(tree_sitter_rust::language()),
6615 ));
6616
6617 let fs = FakeFs::new(cx.background());
6618 fs.insert_tree(
6619 "/dir",
6620 json!({
6621 "one.rs": "const ONE: usize = 1;",
6622 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
6623 }),
6624 )
6625 .await;
6626
6627 let project = Project::test(fs.clone(), cx);
6628 project.update(cx, |project, _| {
6629 Arc::get_mut(&mut project.languages).unwrap().add(language);
6630 });
6631
6632 let (tree, _) = project
6633 .update(cx, |project, cx| {
6634 project.find_or_create_local_worktree("/dir", true, cx)
6635 })
6636 .await
6637 .unwrap();
6638 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
6639 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
6640 .await;
6641
6642 let buffer = project
6643 .update(cx, |project, cx| {
6644 project.open_buffer((worktree_id, Path::new("one.rs")), cx)
6645 })
6646 .await
6647 .unwrap();
6648
6649 let mut fake_server = fake_servers.next().await.unwrap();
6650
6651 let response = project.update(cx, |project, cx| {
6652 project.prepare_rename(buffer.clone(), 7, cx)
6653 });
6654 fake_server
6655 .handle_request::<lsp::request::PrepareRenameRequest, _>(|params, _| {
6656 assert_eq!(params.text_document.uri.as_str(), "file:///dir/one.rs");
6657 assert_eq!(params.position, lsp::Position::new(0, 7));
6658 Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
6659 lsp::Position::new(0, 6),
6660 lsp::Position::new(0, 9),
6661 )))
6662 })
6663 .next()
6664 .await
6665 .unwrap();
6666 let range = response.await.unwrap().unwrap();
6667 let range = buffer.read_with(cx, |buffer, _| range.to_offset(buffer));
6668 assert_eq!(range, 6..9);
6669
6670 let response = project.update(cx, |project, cx| {
6671 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), true, cx)
6672 });
6673 fake_server
6674 .handle_request::<lsp::request::Rename, _>(|params, _| {
6675 assert_eq!(
6676 params.text_document_position.text_document.uri.as_str(),
6677 "file:///dir/one.rs"
6678 );
6679 assert_eq!(
6680 params.text_document_position.position,
6681 lsp::Position::new(0, 7)
6682 );
6683 assert_eq!(params.new_name, "THREE");
6684 Some(lsp::WorkspaceEdit {
6685 changes: Some(
6686 [
6687 (
6688 lsp::Url::from_file_path("/dir/one.rs").unwrap(),
6689 vec![lsp::TextEdit::new(
6690 lsp::Range::new(
6691 lsp::Position::new(0, 6),
6692 lsp::Position::new(0, 9),
6693 ),
6694 "THREE".to_string(),
6695 )],
6696 ),
6697 (
6698 lsp::Url::from_file_path("/dir/two.rs").unwrap(),
6699 vec![
6700 lsp::TextEdit::new(
6701 lsp::Range::new(
6702 lsp::Position::new(0, 24),
6703 lsp::Position::new(0, 27),
6704 ),
6705 "THREE".to_string(),
6706 ),
6707 lsp::TextEdit::new(
6708 lsp::Range::new(
6709 lsp::Position::new(0, 35),
6710 lsp::Position::new(0, 38),
6711 ),
6712 "THREE".to_string(),
6713 ),
6714 ],
6715 ),
6716 ]
6717 .into_iter()
6718 .collect(),
6719 ),
6720 ..Default::default()
6721 })
6722 })
6723 .next()
6724 .await
6725 .unwrap();
6726 let mut transaction = response.await.unwrap().0;
6727 assert_eq!(transaction.len(), 2);
6728 assert_eq!(
6729 transaction
6730 .remove_entry(&buffer)
6731 .unwrap()
6732 .0
6733 .read_with(cx, |buffer, _| buffer.text()),
6734 "const THREE: usize = 1;"
6735 );
6736 assert_eq!(
6737 transaction
6738 .into_keys()
6739 .next()
6740 .unwrap()
6741 .read_with(cx, |buffer, _| buffer.text()),
6742 "const TWO: usize = one::THREE + one::THREE;"
6743 );
6744 }
6745
6746 #[gpui::test]
6747 async fn test_search(cx: &mut gpui::TestAppContext) {
6748 let fs = FakeFs::new(cx.background());
6749 fs.insert_tree(
6750 "/dir",
6751 json!({
6752 "one.rs": "const ONE: usize = 1;",
6753 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
6754 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
6755 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
6756 }),
6757 )
6758 .await;
6759 let project = Project::test(fs.clone(), cx);
6760 let (tree, _) = project
6761 .update(cx, |project, cx| {
6762 project.find_or_create_local_worktree("/dir", true, cx)
6763 })
6764 .await
6765 .unwrap();
6766 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
6767 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
6768 .await;
6769
6770 assert_eq!(
6771 search(&project, SearchQuery::text("TWO", false, true), cx)
6772 .await
6773 .unwrap(),
6774 HashMap::from_iter([
6775 ("two.rs".to_string(), vec![6..9]),
6776 ("three.rs".to_string(), vec![37..40])
6777 ])
6778 );
6779
6780 let buffer_4 = project
6781 .update(cx, |project, cx| {
6782 project.open_buffer((worktree_id, "four.rs"), cx)
6783 })
6784 .await
6785 .unwrap();
6786 buffer_4.update(cx, |buffer, cx| {
6787 buffer.edit([20..28, 31..43], "two::TWO", cx);
6788 });
6789
6790 assert_eq!(
6791 search(&project, SearchQuery::text("TWO", false, true), cx)
6792 .await
6793 .unwrap(),
6794 HashMap::from_iter([
6795 ("two.rs".to_string(), vec![6..9]),
6796 ("three.rs".to_string(), vec![37..40]),
6797 ("four.rs".to_string(), vec![25..28, 36..39])
6798 ])
6799 );
6800
6801 async fn search(
6802 project: &ModelHandle<Project>,
6803 query: SearchQuery,
6804 cx: &mut gpui::TestAppContext,
6805 ) -> Result<HashMap<String, Vec<Range<usize>>>> {
6806 let results = project
6807 .update(cx, |project, cx| project.search(query, cx))
6808 .await?;
6809
6810 Ok(results
6811 .into_iter()
6812 .map(|(buffer, ranges)| {
6813 buffer.read_with(cx, |buffer, _| {
6814 let path = buffer.file().unwrap().path().to_string_lossy().to_string();
6815 let ranges = ranges
6816 .into_iter()
6817 .map(|range| range.to_offset(buffer))
6818 .collect::<Vec<_>>();
6819 (path, ranges)
6820 })
6821 })
6822 .collect())
6823 }
6824 }
6825}