1pub mod fs;
2mod ignore;
3mod lsp_command;
4pub mod search;
5pub mod worktree;
6
7use anyhow::{anyhow, Context, Result};
8use client::{proto, Client, PeerId, TypedEnvelope, User, UserStore};
9use clock::ReplicaId;
10use collections::{hash_map, BTreeMap, HashMap, HashSet};
11use futures::{future::Shared, Future, FutureExt, StreamExt, TryFutureExt};
12use fuzzy::{PathMatch, PathMatchCandidate, PathMatchCandidateSet};
13use gpui::{
14 AppContext, AsyncAppContext, Entity, ModelContext, ModelHandle, MutableAppContext, Task,
15 UpgradeModelHandle, WeakModelHandle,
16};
17use language::{
18 proto::{deserialize_anchor, deserialize_version, serialize_anchor, serialize_version},
19 range_from_lsp, Anchor, Bias, Buffer, CodeAction, CodeLabel, Completion, Diagnostic,
20 DiagnosticEntry, DiagnosticSet, Event as BufferEvent, File as _, Language, LanguageRegistry,
21 LocalFile, OffsetRangeExt, Operation, PointUtf16, TextBufferSnapshot, ToLspPosition, ToOffset,
22 ToPointUtf16, Transaction,
23};
24use lsp::{DiagnosticSeverity, DocumentHighlightKind, LanguageServer};
25use lsp_command::*;
26use parking_lot::Mutex;
27use postage::watch;
28use rand::prelude::*;
29use search::SearchQuery;
30use sha2::{Digest, Sha256};
31use similar::{ChangeTag, TextDiff};
32use std::{
33 cell::RefCell,
34 cmp::{self, Ordering},
35 convert::TryInto,
36 hash::Hash,
37 mem,
38 ops::Range,
39 path::{Component, Path, PathBuf},
40 rc::Rc,
41 sync::{atomic::AtomicBool, Arc},
42 time::Instant,
43};
44use util::{post_inc, ResultExt, TryFutureExt as _};
45
46pub use fs::*;
47pub use worktree::*;
48
49pub struct Project {
50 worktrees: Vec<WorktreeHandle>,
51 active_entry: Option<ProjectEntry>,
52 languages: Arc<LanguageRegistry>,
53 language_servers: HashMap<(WorktreeId, Arc<str>), Arc<LanguageServer>>,
54 started_language_servers: HashMap<(WorktreeId, Arc<str>), Task<Option<Arc<LanguageServer>>>>,
55 language_server_statuses: BTreeMap<usize, LanguageServerStatus>,
56 language_server_settings: Arc<Mutex<serde_json::Value>>,
57 next_language_server_id: usize,
58 client: Arc<client::Client>,
59 user_store: ModelHandle<UserStore>,
60 fs: Arc<dyn Fs>,
61 client_state: ProjectClientState,
62 collaborators: HashMap<PeerId, Collaborator>,
63 subscriptions: Vec<client::Subscription>,
64 language_servers_with_diagnostics_running: isize,
65 opened_buffer: (Rc<RefCell<watch::Sender<()>>>, watch::Receiver<()>),
66 shared_buffers: HashMap<PeerId, HashSet<u64>>,
67 loading_buffers: HashMap<
68 ProjectPath,
69 postage::watch::Receiver<Option<Result<ModelHandle<Buffer>, Arc<anyhow::Error>>>>,
70 >,
71 loading_local_worktrees:
72 HashMap<Arc<Path>, Shared<Task<Result<ModelHandle<Worktree>, Arc<anyhow::Error>>>>>,
73 opened_buffers: HashMap<u64, OpenBuffer>,
74 buffer_snapshots: HashMap<u64, Vec<(i32, TextBufferSnapshot)>>,
75 nonce: u128,
76}
77
78enum OpenBuffer {
79 Strong(ModelHandle<Buffer>),
80 Weak(WeakModelHandle<Buffer>),
81 Loading(Vec<Operation>),
82}
83
84enum WorktreeHandle {
85 Strong(ModelHandle<Worktree>),
86 Weak(WeakModelHandle<Worktree>),
87}
88
89enum ProjectClientState {
90 Local {
91 is_shared: bool,
92 remote_id_tx: watch::Sender<Option<u64>>,
93 remote_id_rx: watch::Receiver<Option<u64>>,
94 _maintain_remote_id_task: Task<Option<()>>,
95 },
96 Remote {
97 sharing_has_stopped: bool,
98 remote_id: u64,
99 replica_id: ReplicaId,
100 _detect_unshare_task: Task<Option<()>>,
101 },
102}
103
104#[derive(Clone, Debug)]
105pub struct Collaborator {
106 pub user: Arc<User>,
107 pub peer_id: PeerId,
108 pub replica_id: ReplicaId,
109}
110
111#[derive(Clone, Debug, PartialEq)]
112pub enum Event {
113 ActiveEntryChanged(Option<ProjectEntry>),
114 WorktreeRemoved(WorktreeId),
115 DiskBasedDiagnosticsStarted,
116 DiskBasedDiagnosticsUpdated,
117 DiskBasedDiagnosticsFinished,
118 DiagnosticsUpdated(ProjectPath),
119}
120
121enum LanguageServerEvent {
122 WorkStart {
123 token: String,
124 },
125 WorkProgress {
126 token: String,
127 progress: LanguageServerProgress,
128 },
129 WorkEnd {
130 token: String,
131 },
132 DiagnosticsUpdate(lsp::PublishDiagnosticsParams),
133}
134
135pub struct LanguageServerStatus {
136 pub name: String,
137 pub pending_work: BTreeMap<String, LanguageServerProgress>,
138 pending_diagnostic_updates: isize,
139}
140
141#[derive(Clone, Debug)]
142pub struct LanguageServerProgress {
143 pub message: Option<String>,
144 pub percentage: Option<usize>,
145 pub last_update_at: Instant,
146}
147
148#[derive(Clone, Debug, Eq, PartialEq, Hash, PartialOrd, Ord)]
149pub struct ProjectPath {
150 pub worktree_id: WorktreeId,
151 pub path: Arc<Path>,
152}
153
154#[derive(Clone, Debug, Default, PartialEq)]
155pub struct DiagnosticSummary {
156 pub error_count: usize,
157 pub warning_count: usize,
158 pub info_count: usize,
159 pub hint_count: usize,
160}
161
162#[derive(Debug)]
163pub struct Location {
164 pub buffer: ModelHandle<Buffer>,
165 pub range: Range<language::Anchor>,
166}
167
168#[derive(Debug)]
169pub struct DocumentHighlight {
170 pub range: Range<language::Anchor>,
171 pub kind: DocumentHighlightKind,
172}
173
174#[derive(Clone, Debug)]
175pub struct Symbol {
176 pub source_worktree_id: WorktreeId,
177 pub worktree_id: WorktreeId,
178 pub language_name: String,
179 pub path: PathBuf,
180 pub label: CodeLabel,
181 pub name: String,
182 pub kind: lsp::SymbolKind,
183 pub range: Range<PointUtf16>,
184 pub signature: [u8; 32],
185}
186
187#[derive(Default)]
188pub struct ProjectTransaction(pub HashMap<ModelHandle<Buffer>, language::Transaction>);
189
190impl DiagnosticSummary {
191 fn new<'a, T: 'a>(diagnostics: impl IntoIterator<Item = &'a DiagnosticEntry<T>>) -> Self {
192 let mut this = Self {
193 error_count: 0,
194 warning_count: 0,
195 info_count: 0,
196 hint_count: 0,
197 };
198
199 for entry in diagnostics {
200 if entry.diagnostic.is_primary {
201 match entry.diagnostic.severity {
202 DiagnosticSeverity::ERROR => this.error_count += 1,
203 DiagnosticSeverity::WARNING => this.warning_count += 1,
204 DiagnosticSeverity::INFORMATION => this.info_count += 1,
205 DiagnosticSeverity::HINT => this.hint_count += 1,
206 _ => {}
207 }
208 }
209 }
210
211 this
212 }
213
214 pub fn to_proto(&self, path: &Path) -> proto::DiagnosticSummary {
215 proto::DiagnosticSummary {
216 path: path.to_string_lossy().to_string(),
217 error_count: self.error_count as u32,
218 warning_count: self.warning_count as u32,
219 info_count: self.info_count as u32,
220 hint_count: self.hint_count as u32,
221 }
222 }
223}
224
225#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
226pub struct ProjectEntry {
227 pub worktree_id: WorktreeId,
228 pub entry_id: usize,
229}
230
231impl Project {
232 pub fn init(client: &Arc<Client>) {
233 client.add_entity_message_handler(Self::handle_add_collaborator);
234 client.add_entity_message_handler(Self::handle_buffer_reloaded);
235 client.add_entity_message_handler(Self::handle_buffer_saved);
236 client.add_entity_message_handler(Self::handle_start_language_server);
237 client.add_entity_message_handler(Self::handle_update_language_server);
238 client.add_entity_message_handler(Self::handle_remove_collaborator);
239 client.add_entity_message_handler(Self::handle_register_worktree);
240 client.add_entity_message_handler(Self::handle_unregister_worktree);
241 client.add_entity_message_handler(Self::handle_unshare_project);
242 client.add_entity_message_handler(Self::handle_update_buffer_file);
243 client.add_entity_message_handler(Self::handle_update_buffer);
244 client.add_entity_message_handler(Self::handle_update_diagnostic_summary);
245 client.add_entity_message_handler(Self::handle_update_worktree);
246 client.add_entity_request_handler(Self::handle_apply_additional_edits_for_completion);
247 client.add_entity_request_handler(Self::handle_apply_code_action);
248 client.add_entity_request_handler(Self::handle_format_buffers);
249 client.add_entity_request_handler(Self::handle_get_code_actions);
250 client.add_entity_request_handler(Self::handle_get_completions);
251 client.add_entity_request_handler(Self::handle_lsp_command::<GetDefinition>);
252 client.add_entity_request_handler(Self::handle_lsp_command::<GetDocumentHighlights>);
253 client.add_entity_request_handler(Self::handle_lsp_command::<GetReferences>);
254 client.add_entity_request_handler(Self::handle_lsp_command::<PrepareRename>);
255 client.add_entity_request_handler(Self::handle_lsp_command::<PerformRename>);
256 client.add_entity_request_handler(Self::handle_search_project);
257 client.add_entity_request_handler(Self::handle_get_project_symbols);
258 client.add_entity_request_handler(Self::handle_open_buffer_for_symbol);
259 client.add_entity_request_handler(Self::handle_open_buffer);
260 client.add_entity_request_handler(Self::handle_save_buffer);
261 }
262
263 pub fn local(
264 client: Arc<Client>,
265 user_store: ModelHandle<UserStore>,
266 languages: Arc<LanguageRegistry>,
267 fs: Arc<dyn Fs>,
268 cx: &mut MutableAppContext,
269 ) -> ModelHandle<Self> {
270 cx.add_model(|cx: &mut ModelContext<Self>| {
271 let (remote_id_tx, remote_id_rx) = watch::channel();
272 let _maintain_remote_id_task = cx.spawn_weak({
273 let rpc = client.clone();
274 move |this, mut cx| {
275 async move {
276 let mut status = rpc.status();
277 while let Some(status) = status.next().await {
278 if let Some(this) = this.upgrade(&cx) {
279 let remote_id = if status.is_connected() {
280 let response = rpc.request(proto::RegisterProject {}).await?;
281 Some(response.project_id)
282 } else {
283 None
284 };
285
286 if let Some(project_id) = remote_id {
287 let mut registrations = Vec::new();
288 this.update(&mut cx, |this, cx| {
289 for worktree in this.worktrees(cx).collect::<Vec<_>>() {
290 registrations.push(worktree.update(
291 cx,
292 |worktree, cx| {
293 let worktree = worktree.as_local_mut().unwrap();
294 worktree.register(project_id, cx)
295 },
296 ));
297 }
298 });
299 for registration in registrations {
300 registration.await?;
301 }
302 }
303 this.update(&mut cx, |this, cx| this.set_remote_id(remote_id, cx));
304 }
305 }
306 Ok(())
307 }
308 .log_err()
309 }
310 });
311
312 let (opened_buffer_tx, opened_buffer_rx) = watch::channel();
313 Self {
314 worktrees: Default::default(),
315 collaborators: Default::default(),
316 opened_buffers: Default::default(),
317 shared_buffers: Default::default(),
318 loading_buffers: Default::default(),
319 loading_local_worktrees: Default::default(),
320 buffer_snapshots: Default::default(),
321 client_state: ProjectClientState::Local {
322 is_shared: false,
323 remote_id_tx,
324 remote_id_rx,
325 _maintain_remote_id_task,
326 },
327 opened_buffer: (Rc::new(RefCell::new(opened_buffer_tx)), opened_buffer_rx),
328 subscriptions: Vec::new(),
329 active_entry: None,
330 languages,
331 client,
332 user_store,
333 fs,
334 language_servers_with_diagnostics_running: 0,
335 language_servers: Default::default(),
336 started_language_servers: Default::default(),
337 language_server_statuses: Default::default(),
338 language_server_settings: Default::default(),
339 next_language_server_id: 0,
340 nonce: StdRng::from_entropy().gen(),
341 }
342 })
343 }
344
345 pub async fn remote(
346 remote_id: u64,
347 client: Arc<Client>,
348 user_store: ModelHandle<UserStore>,
349 languages: Arc<LanguageRegistry>,
350 fs: Arc<dyn Fs>,
351 cx: &mut AsyncAppContext,
352 ) -> Result<ModelHandle<Self>> {
353 client.authenticate_and_connect(&cx).await?;
354
355 let response = client
356 .request(proto::JoinProject {
357 project_id: remote_id,
358 })
359 .await?;
360
361 let replica_id = response.replica_id as ReplicaId;
362
363 let mut worktrees = Vec::new();
364 for worktree in response.worktrees {
365 let (worktree, load_task) = cx
366 .update(|cx| Worktree::remote(remote_id, replica_id, worktree, client.clone(), cx));
367 worktrees.push(worktree);
368 load_task.detach();
369 }
370
371 let (opened_buffer_tx, opened_buffer_rx) = watch::channel();
372 let this = cx.add_model(|cx: &mut ModelContext<Self>| {
373 let mut this = Self {
374 worktrees: Vec::new(),
375 loading_buffers: Default::default(),
376 opened_buffer: (Rc::new(RefCell::new(opened_buffer_tx)), opened_buffer_rx),
377 shared_buffers: Default::default(),
378 loading_local_worktrees: Default::default(),
379 active_entry: None,
380 collaborators: Default::default(),
381 languages,
382 user_store: user_store.clone(),
383 fs,
384 subscriptions: vec![client.add_model_for_remote_entity(remote_id, cx)],
385 client: client.clone(),
386 client_state: ProjectClientState::Remote {
387 sharing_has_stopped: false,
388 remote_id,
389 replica_id,
390 _detect_unshare_task: cx.spawn_weak(move |this, mut cx| {
391 async move {
392 let mut status = client.status();
393 let is_connected =
394 status.next().await.map_or(false, |s| s.is_connected());
395 // Even if we're initially connected, any future change of the status means we momentarily disconnected.
396 if !is_connected || status.next().await.is_some() {
397 if let Some(this) = this.upgrade(&cx) {
398 this.update(&mut cx, |this, cx| this.project_unshared(cx))
399 }
400 }
401 Ok(())
402 }
403 .log_err()
404 }),
405 },
406 language_servers_with_diagnostics_running: 0,
407 language_servers: Default::default(),
408 started_language_servers: Default::default(),
409 language_server_settings: Default::default(),
410 language_server_statuses: response
411 .language_servers
412 .into_iter()
413 .map(|server| {
414 (
415 server.id as usize,
416 LanguageServerStatus {
417 name: server.name,
418 pending_work: Default::default(),
419 pending_diagnostic_updates: 0,
420 },
421 )
422 })
423 .collect(),
424 next_language_server_id: 0,
425 opened_buffers: Default::default(),
426 buffer_snapshots: Default::default(),
427 nonce: StdRng::from_entropy().gen(),
428 };
429 for worktree in worktrees {
430 this.add_worktree(&worktree, cx);
431 }
432 this
433 });
434
435 let user_ids = response
436 .collaborators
437 .iter()
438 .map(|peer| peer.user_id)
439 .collect();
440 user_store
441 .update(cx, |user_store, cx| user_store.load_users(user_ids, cx))
442 .await?;
443 let mut collaborators = HashMap::default();
444 for message in response.collaborators {
445 let collaborator = Collaborator::from_proto(message, &user_store, cx).await?;
446 collaborators.insert(collaborator.peer_id, collaborator);
447 }
448
449 this.update(cx, |this, _| {
450 this.collaborators = collaborators;
451 });
452
453 Ok(this)
454 }
455
456 #[cfg(any(test, feature = "test-support"))]
457 pub fn test(fs: Arc<dyn Fs>, cx: &mut gpui::TestAppContext) -> ModelHandle<Project> {
458 let languages = Arc::new(LanguageRegistry::test());
459 let http_client = client::test::FakeHttpClient::with_404_response();
460 let client = client::Client::new(http_client.clone());
461 let user_store = cx.add_model(|cx| UserStore::new(client.clone(), http_client, cx));
462 cx.update(|cx| Project::local(client, user_store, languages, fs, cx))
463 }
464
465 #[cfg(any(test, feature = "test-support"))]
466 pub fn buffer_for_id(&self, remote_id: u64, cx: &AppContext) -> Option<ModelHandle<Buffer>> {
467 self.opened_buffers
468 .get(&remote_id)
469 .and_then(|buffer| buffer.upgrade(cx))
470 }
471
472 #[cfg(any(test, feature = "test-support"))]
473 pub fn languages(&self) -> &Arc<LanguageRegistry> {
474 &self.languages
475 }
476
477 #[cfg(any(test, feature = "test-support"))]
478 pub fn check_invariants(&self, cx: &AppContext) {
479 if self.is_local() {
480 let mut worktree_root_paths = HashMap::default();
481 for worktree in self.worktrees(cx) {
482 let worktree = worktree.read(cx);
483 let abs_path = worktree.as_local().unwrap().abs_path().clone();
484 let prev_worktree_id = worktree_root_paths.insert(abs_path.clone(), worktree.id());
485 assert_eq!(
486 prev_worktree_id,
487 None,
488 "abs path {:?} for worktree {:?} is not unique ({:?} was already registered with the same path)",
489 abs_path,
490 worktree.id(),
491 prev_worktree_id
492 )
493 }
494 } else {
495 let replica_id = self.replica_id();
496 for buffer in self.opened_buffers.values() {
497 if let Some(buffer) = buffer.upgrade(cx) {
498 let buffer = buffer.read(cx);
499 assert_eq!(
500 buffer.deferred_ops_len(),
501 0,
502 "replica {}, buffer {} has deferred operations",
503 replica_id,
504 buffer.remote_id()
505 );
506 }
507 }
508 }
509 }
510
511 #[cfg(any(test, feature = "test-support"))]
512 pub fn has_open_buffer(&self, path: impl Into<ProjectPath>, cx: &AppContext) -> bool {
513 let path = path.into();
514 if let Some(worktree) = self.worktree_for_id(path.worktree_id, cx) {
515 self.opened_buffers.iter().any(|(_, buffer)| {
516 if let Some(buffer) = buffer.upgrade(cx) {
517 if let Some(file) = File::from_dyn(buffer.read(cx).file()) {
518 if file.worktree == worktree && file.path() == &path.path {
519 return true;
520 }
521 }
522 }
523 false
524 })
525 } else {
526 false
527 }
528 }
529
530 pub fn fs(&self) -> &Arc<dyn Fs> {
531 &self.fs
532 }
533
534 fn set_remote_id(&mut self, remote_id: Option<u64>, cx: &mut ModelContext<Self>) {
535 if let ProjectClientState::Local { remote_id_tx, .. } = &mut self.client_state {
536 *remote_id_tx.borrow_mut() = remote_id;
537 }
538
539 self.subscriptions.clear();
540 if let Some(remote_id) = remote_id {
541 self.subscriptions
542 .push(self.client.add_model_for_remote_entity(remote_id, cx));
543 }
544 }
545
546 pub fn remote_id(&self) -> Option<u64> {
547 match &self.client_state {
548 ProjectClientState::Local { remote_id_rx, .. } => *remote_id_rx.borrow(),
549 ProjectClientState::Remote { remote_id, .. } => Some(*remote_id),
550 }
551 }
552
553 pub fn next_remote_id(&self) -> impl Future<Output = u64> {
554 let mut id = None;
555 let mut watch = None;
556 match &self.client_state {
557 ProjectClientState::Local { remote_id_rx, .. } => watch = Some(remote_id_rx.clone()),
558 ProjectClientState::Remote { remote_id, .. } => id = Some(*remote_id),
559 }
560
561 async move {
562 if let Some(id) = id {
563 return id;
564 }
565 let mut watch = watch.unwrap();
566 loop {
567 let id = *watch.borrow();
568 if let Some(id) = id {
569 return id;
570 }
571 watch.next().await;
572 }
573 }
574 }
575
576 pub fn replica_id(&self) -> ReplicaId {
577 match &self.client_state {
578 ProjectClientState::Local { .. } => 0,
579 ProjectClientState::Remote { replica_id, .. } => *replica_id,
580 }
581 }
582
583 pub fn collaborators(&self) -> &HashMap<PeerId, Collaborator> {
584 &self.collaborators
585 }
586
587 pub fn worktrees<'a>(
588 &'a self,
589 cx: &'a AppContext,
590 ) -> impl 'a + Iterator<Item = ModelHandle<Worktree>> {
591 self.worktrees
592 .iter()
593 .filter_map(move |worktree| worktree.upgrade(cx))
594 }
595
596 pub fn visible_worktrees<'a>(
597 &'a self,
598 cx: &'a AppContext,
599 ) -> impl 'a + Iterator<Item = ModelHandle<Worktree>> {
600 self.worktrees.iter().filter_map(|worktree| {
601 worktree.upgrade(cx).and_then(|worktree| {
602 if worktree.read(cx).is_visible() {
603 Some(worktree)
604 } else {
605 None
606 }
607 })
608 })
609 }
610
611 pub fn worktree_for_id(
612 &self,
613 id: WorktreeId,
614 cx: &AppContext,
615 ) -> Option<ModelHandle<Worktree>> {
616 self.worktrees(cx)
617 .find(|worktree| worktree.read(cx).id() == id)
618 }
619
620 pub fn share(&self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
621 let rpc = self.client.clone();
622 cx.spawn(|this, mut cx| async move {
623 let project_id = this.update(&mut cx, |this, cx| {
624 if let ProjectClientState::Local {
625 is_shared,
626 remote_id_rx,
627 ..
628 } = &mut this.client_state
629 {
630 *is_shared = true;
631
632 for open_buffer in this.opened_buffers.values_mut() {
633 match open_buffer {
634 OpenBuffer::Strong(_) => {}
635 OpenBuffer::Weak(buffer) => {
636 if let Some(buffer) = buffer.upgrade(cx) {
637 *open_buffer = OpenBuffer::Strong(buffer);
638 }
639 }
640 OpenBuffer::Loading(_) => unreachable!(),
641 }
642 }
643
644 for worktree_handle in this.worktrees.iter_mut() {
645 match worktree_handle {
646 WorktreeHandle::Strong(_) => {}
647 WorktreeHandle::Weak(worktree) => {
648 if let Some(worktree) = worktree.upgrade(cx) {
649 *worktree_handle = WorktreeHandle::Strong(worktree);
650 }
651 }
652 }
653 }
654
655 remote_id_rx
656 .borrow()
657 .ok_or_else(|| anyhow!("no project id"))
658 } else {
659 Err(anyhow!("can't share a remote project"))
660 }
661 })?;
662
663 rpc.request(proto::ShareProject { project_id }).await?;
664
665 let mut tasks = Vec::new();
666 this.update(&mut cx, |this, cx| {
667 for worktree in this.worktrees(cx).collect::<Vec<_>>() {
668 worktree.update(cx, |worktree, cx| {
669 let worktree = worktree.as_local_mut().unwrap();
670 tasks.push(worktree.share(project_id, cx));
671 });
672 }
673 });
674 for task in tasks {
675 task.await?;
676 }
677 this.update(&mut cx, |_, cx| cx.notify());
678 Ok(())
679 })
680 }
681
682 pub fn unshare(&self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
683 let rpc = self.client.clone();
684 cx.spawn(|this, mut cx| async move {
685 let project_id = this.update(&mut cx, |this, cx| {
686 if let ProjectClientState::Local {
687 is_shared,
688 remote_id_rx,
689 ..
690 } = &mut this.client_state
691 {
692 *is_shared = false;
693
694 for open_buffer in this.opened_buffers.values_mut() {
695 match open_buffer {
696 OpenBuffer::Strong(buffer) => {
697 *open_buffer = OpenBuffer::Weak(buffer.downgrade());
698 }
699 _ => {}
700 }
701 }
702
703 for worktree_handle in this.worktrees.iter_mut() {
704 match worktree_handle {
705 WorktreeHandle::Strong(worktree) => {
706 if !worktree.read(cx).is_visible() {
707 *worktree_handle = WorktreeHandle::Weak(worktree.downgrade());
708 }
709 }
710 _ => {}
711 }
712 }
713
714 remote_id_rx
715 .borrow()
716 .ok_or_else(|| anyhow!("no project id"))
717 } else {
718 Err(anyhow!("can't share a remote project"))
719 }
720 })?;
721
722 rpc.send(proto::UnshareProject { project_id })?;
723 this.update(&mut cx, |this, cx| {
724 this.collaborators.clear();
725 this.shared_buffers.clear();
726 for worktree in this.worktrees(cx).collect::<Vec<_>>() {
727 worktree.update(cx, |worktree, _| {
728 worktree.as_local_mut().unwrap().unshare();
729 });
730 }
731 cx.notify()
732 });
733 Ok(())
734 })
735 }
736
737 fn project_unshared(&mut self, cx: &mut ModelContext<Self>) {
738 if let ProjectClientState::Remote {
739 sharing_has_stopped,
740 ..
741 } = &mut self.client_state
742 {
743 *sharing_has_stopped = true;
744 self.collaborators.clear();
745 cx.notify();
746 }
747 }
748
749 pub fn is_read_only(&self) -> bool {
750 match &self.client_state {
751 ProjectClientState::Local { .. } => false,
752 ProjectClientState::Remote {
753 sharing_has_stopped,
754 ..
755 } => *sharing_has_stopped,
756 }
757 }
758
759 pub fn is_local(&self) -> bool {
760 match &self.client_state {
761 ProjectClientState::Local { .. } => true,
762 ProjectClientState::Remote { .. } => false,
763 }
764 }
765
766 pub fn is_remote(&self) -> bool {
767 !self.is_local()
768 }
769
770 pub fn create_buffer(&mut self, cx: &mut ModelContext<Self>) -> Result<ModelHandle<Buffer>> {
771 if self.is_remote() {
772 return Err(anyhow!("creating buffers as a guest is not supported yet"));
773 }
774
775 let buffer = cx.add_model(|cx| {
776 Buffer::new(self.replica_id(), "", cx).with_language(language::PLAIN_TEXT.clone(), cx)
777 });
778 self.register_buffer(&buffer, cx)?;
779 Ok(buffer)
780 }
781
782 pub fn open_buffer(
783 &mut self,
784 path: impl Into<ProjectPath>,
785 cx: &mut ModelContext<Self>,
786 ) -> Task<Result<ModelHandle<Buffer>>> {
787 let project_path = path.into();
788 let worktree = if let Some(worktree) = self.worktree_for_id(project_path.worktree_id, cx) {
789 worktree
790 } else {
791 return Task::ready(Err(anyhow!("no such worktree")));
792 };
793
794 // If there is already a buffer for the given path, then return it.
795 let existing_buffer = self.get_open_buffer(&project_path, cx);
796 if let Some(existing_buffer) = existing_buffer {
797 return Task::ready(Ok(existing_buffer));
798 }
799
800 let mut loading_watch = match self.loading_buffers.entry(project_path.clone()) {
801 // If the given path is already being loaded, then wait for that existing
802 // task to complete and return the same buffer.
803 hash_map::Entry::Occupied(e) => e.get().clone(),
804
805 // Otherwise, record the fact that this path is now being loaded.
806 hash_map::Entry::Vacant(entry) => {
807 let (mut tx, rx) = postage::watch::channel();
808 entry.insert(rx.clone());
809
810 let load_buffer = if worktree.read(cx).is_local() {
811 self.open_local_buffer(&project_path.path, &worktree, cx)
812 } else {
813 self.open_remote_buffer(&project_path.path, &worktree, cx)
814 };
815
816 cx.spawn(move |this, mut cx| async move {
817 let load_result = load_buffer.await;
818 *tx.borrow_mut() = Some(this.update(&mut cx, |this, _| {
819 // Record the fact that the buffer is no longer loading.
820 this.loading_buffers.remove(&project_path);
821 let buffer = load_result.map_err(Arc::new)?;
822 Ok(buffer)
823 }));
824 })
825 .detach();
826 rx
827 }
828 };
829
830 cx.foreground().spawn(async move {
831 loop {
832 if let Some(result) = loading_watch.borrow().as_ref() {
833 match result {
834 Ok(buffer) => return Ok(buffer.clone()),
835 Err(error) => return Err(anyhow!("{}", error)),
836 }
837 }
838 loading_watch.next().await;
839 }
840 })
841 }
842
843 fn open_local_buffer(
844 &mut self,
845 path: &Arc<Path>,
846 worktree: &ModelHandle<Worktree>,
847 cx: &mut ModelContext<Self>,
848 ) -> Task<Result<ModelHandle<Buffer>>> {
849 let load_buffer = worktree.update(cx, |worktree, cx| {
850 let worktree = worktree.as_local_mut().unwrap();
851 worktree.load_buffer(path, cx)
852 });
853 cx.spawn(|this, mut cx| async move {
854 let buffer = load_buffer.await?;
855 this.update(&mut cx, |this, cx| this.register_buffer(&buffer, cx))?;
856 Ok(buffer)
857 })
858 }
859
860 fn open_remote_buffer(
861 &mut self,
862 path: &Arc<Path>,
863 worktree: &ModelHandle<Worktree>,
864 cx: &mut ModelContext<Self>,
865 ) -> Task<Result<ModelHandle<Buffer>>> {
866 let rpc = self.client.clone();
867 let project_id = self.remote_id().unwrap();
868 let remote_worktree_id = worktree.read(cx).id();
869 let path = path.clone();
870 let path_string = path.to_string_lossy().to_string();
871 cx.spawn(|this, mut cx| async move {
872 let response = rpc
873 .request(proto::OpenBuffer {
874 project_id,
875 worktree_id: remote_worktree_id.to_proto(),
876 path: path_string,
877 })
878 .await?;
879 let buffer = response.buffer.ok_or_else(|| anyhow!("missing buffer"))?;
880 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
881 .await
882 })
883 }
884
885 fn open_local_buffer_via_lsp(
886 &mut self,
887 abs_path: lsp::Url,
888 lang_name: Arc<str>,
889 lang_server: Arc<LanguageServer>,
890 cx: &mut ModelContext<Self>,
891 ) -> Task<Result<ModelHandle<Buffer>>> {
892 cx.spawn(|this, mut cx| async move {
893 let abs_path = abs_path
894 .to_file_path()
895 .map_err(|_| anyhow!("can't convert URI to path"))?;
896 let (worktree, relative_path) = if let Some(result) =
897 this.read_with(&cx, |this, cx| this.find_local_worktree(&abs_path, cx))
898 {
899 result
900 } else {
901 let worktree = this
902 .update(&mut cx, |this, cx| {
903 this.create_local_worktree(&abs_path, false, cx)
904 })
905 .await?;
906 this.update(&mut cx, |this, cx| {
907 this.language_servers
908 .insert((worktree.read(cx).id(), lang_name), lang_server);
909 });
910 (worktree, PathBuf::new())
911 };
912
913 let project_path = ProjectPath {
914 worktree_id: worktree.read_with(&cx, |worktree, _| worktree.id()),
915 path: relative_path.into(),
916 };
917 this.update(&mut cx, |this, cx| this.open_buffer(project_path, cx))
918 .await
919 })
920 }
921
922 pub fn save_buffer_as(
923 &mut self,
924 buffer: ModelHandle<Buffer>,
925 abs_path: PathBuf,
926 cx: &mut ModelContext<Project>,
927 ) -> Task<Result<()>> {
928 let worktree_task = self.find_or_create_local_worktree(&abs_path, true, cx);
929 cx.spawn(|this, mut cx| async move {
930 let (worktree, path) = worktree_task.await?;
931 worktree
932 .update(&mut cx, |worktree, cx| {
933 worktree
934 .as_local_mut()
935 .unwrap()
936 .save_buffer_as(buffer.clone(), path, cx)
937 })
938 .await?;
939 this.update(&mut cx, |this, cx| {
940 this.assign_language_to_buffer(&buffer, cx);
941 this.register_buffer_with_language_server(&buffer, cx);
942 });
943 Ok(())
944 })
945 }
946
947 pub fn get_open_buffer(
948 &mut self,
949 path: &ProjectPath,
950 cx: &mut ModelContext<Self>,
951 ) -> Option<ModelHandle<Buffer>> {
952 let worktree = self.worktree_for_id(path.worktree_id, cx)?;
953 self.opened_buffers.values().find_map(|buffer| {
954 let buffer = buffer.upgrade(cx)?;
955 let file = File::from_dyn(buffer.read(cx).file())?;
956 if file.worktree == worktree && file.path() == &path.path {
957 Some(buffer)
958 } else {
959 None
960 }
961 })
962 }
963
964 fn register_buffer(
965 &mut self,
966 buffer: &ModelHandle<Buffer>,
967 cx: &mut ModelContext<Self>,
968 ) -> Result<()> {
969 let remote_id = buffer.read(cx).remote_id();
970 let open_buffer = if self.is_remote() || self.is_shared() {
971 OpenBuffer::Strong(buffer.clone())
972 } else {
973 OpenBuffer::Weak(buffer.downgrade())
974 };
975
976 match self.opened_buffers.insert(remote_id, open_buffer) {
977 None => {}
978 Some(OpenBuffer::Loading(operations)) => {
979 buffer.update(cx, |buffer, cx| buffer.apply_ops(operations, cx))?
980 }
981 Some(OpenBuffer::Weak(existing_handle)) => {
982 if existing_handle.upgrade(cx).is_some() {
983 Err(anyhow!(
984 "already registered buffer with remote id {}",
985 remote_id
986 ))?
987 }
988 }
989 Some(OpenBuffer::Strong(_)) => Err(anyhow!(
990 "already registered buffer with remote id {}",
991 remote_id
992 ))?,
993 }
994 cx.subscribe(buffer, |this, buffer, event, cx| {
995 this.on_buffer_event(buffer, event, cx);
996 })
997 .detach();
998
999 self.assign_language_to_buffer(buffer, cx);
1000 self.register_buffer_with_language_server(buffer, cx);
1001
1002 Ok(())
1003 }
1004
1005 fn register_buffer_with_language_server(
1006 &mut self,
1007 buffer_handle: &ModelHandle<Buffer>,
1008 cx: &mut ModelContext<Self>,
1009 ) {
1010 let buffer = buffer_handle.read(cx);
1011 let buffer_id = buffer.remote_id();
1012 if let Some(file) = File::from_dyn(buffer.file()) {
1013 if file.is_local() {
1014 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
1015 let initial_snapshot = buffer.text_snapshot();
1016 let language_server = self.language_server_for_buffer(buffer, cx).cloned();
1017
1018 if let Some(local_worktree) = file.worktree.read(cx).as_local() {
1019 if let Some(diagnostics) = local_worktree.diagnostics_for_path(file.path()) {
1020 self.update_buffer_diagnostics(&buffer_handle, diagnostics, None, cx)
1021 .log_err();
1022 }
1023 }
1024
1025 if let Some(server) = language_server {
1026 server
1027 .notify::<lsp::notification::DidOpenTextDocument>(
1028 lsp::DidOpenTextDocumentParams {
1029 text_document: lsp::TextDocumentItem::new(
1030 uri,
1031 Default::default(),
1032 0,
1033 initial_snapshot.text(),
1034 ),
1035 }
1036 .clone(),
1037 )
1038 .log_err();
1039 buffer_handle.update(cx, |buffer, cx| {
1040 buffer.set_completion_triggers(
1041 server
1042 .capabilities()
1043 .completion_provider
1044 .as_ref()
1045 .and_then(|provider| provider.trigger_characters.clone())
1046 .unwrap_or(Vec::new()),
1047 cx,
1048 )
1049 });
1050 self.buffer_snapshots
1051 .insert(buffer_id, vec![(0, initial_snapshot)]);
1052 }
1053
1054 cx.observe_release(buffer_handle, |this, buffer, cx| {
1055 if let Some(file) = File::from_dyn(buffer.file()) {
1056 if file.is_local() {
1057 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
1058 if let Some(server) = this.language_server_for_buffer(buffer, cx) {
1059 server
1060 .notify::<lsp::notification::DidCloseTextDocument>(
1061 lsp::DidCloseTextDocumentParams {
1062 text_document: lsp::TextDocumentIdentifier::new(
1063 uri.clone(),
1064 ),
1065 },
1066 )
1067 .log_err();
1068 }
1069 }
1070 }
1071 })
1072 .detach();
1073 }
1074 }
1075 }
1076
1077 fn on_buffer_event(
1078 &mut self,
1079 buffer: ModelHandle<Buffer>,
1080 event: &BufferEvent,
1081 cx: &mut ModelContext<Self>,
1082 ) -> Option<()> {
1083 match event {
1084 BufferEvent::Operation(operation) => {
1085 let project_id = self.remote_id()?;
1086 let request = self.client.request(proto::UpdateBuffer {
1087 project_id,
1088 buffer_id: buffer.read(cx).remote_id(),
1089 operations: vec![language::proto::serialize_operation(&operation)],
1090 });
1091 cx.background().spawn(request).detach_and_log_err(cx);
1092 }
1093 BufferEvent::Edited => {
1094 let language_server = self
1095 .language_server_for_buffer(buffer.read(cx), cx)?
1096 .clone();
1097 let buffer = buffer.read(cx);
1098 let file = File::from_dyn(buffer.file())?;
1099 let abs_path = file.as_local()?.abs_path(cx);
1100 let uri = lsp::Url::from_file_path(abs_path).unwrap();
1101 let buffer_snapshots = self.buffer_snapshots.entry(buffer.remote_id()).or_default();
1102 let (version, prev_snapshot) = buffer_snapshots.last()?;
1103 let next_snapshot = buffer.text_snapshot();
1104 let next_version = version + 1;
1105
1106 let content_changes = buffer
1107 .edits_since::<(PointUtf16, usize)>(prev_snapshot.version())
1108 .map(|edit| {
1109 let edit_start = edit.new.start.0;
1110 let edit_end = edit_start + (edit.old.end.0 - edit.old.start.0);
1111 let new_text = next_snapshot
1112 .text_for_range(edit.new.start.1..edit.new.end.1)
1113 .collect();
1114 lsp::TextDocumentContentChangeEvent {
1115 range: Some(lsp::Range::new(
1116 edit_start.to_lsp_position(),
1117 edit_end.to_lsp_position(),
1118 )),
1119 range_length: None,
1120 text: new_text,
1121 }
1122 })
1123 .collect();
1124
1125 buffer_snapshots.push((next_version, next_snapshot));
1126
1127 language_server
1128 .notify::<lsp::notification::DidChangeTextDocument>(
1129 lsp::DidChangeTextDocumentParams {
1130 text_document: lsp::VersionedTextDocumentIdentifier::new(
1131 uri,
1132 next_version,
1133 ),
1134 content_changes,
1135 },
1136 )
1137 .log_err();
1138 }
1139 BufferEvent::Saved => {
1140 let file = File::from_dyn(buffer.read(cx).file())?;
1141 let worktree_id = file.worktree_id(cx);
1142 let abs_path = file.as_local()?.abs_path(cx);
1143 let text_document = lsp::TextDocumentIdentifier {
1144 uri: lsp::Url::from_file_path(abs_path).unwrap(),
1145 };
1146
1147 for (_, server) in self.language_servers_for_worktree(worktree_id) {
1148 server
1149 .notify::<lsp::notification::DidSaveTextDocument>(
1150 lsp::DidSaveTextDocumentParams {
1151 text_document: text_document.clone(),
1152 text: None,
1153 },
1154 )
1155 .log_err();
1156 }
1157 }
1158 _ => {}
1159 }
1160
1161 None
1162 }
1163
1164 fn language_servers_for_worktree(
1165 &self,
1166 worktree_id: WorktreeId,
1167 ) -> impl Iterator<Item = (&str, &Arc<LanguageServer>)> {
1168 self.language_servers.iter().filter_map(
1169 move |((language_server_worktree_id, language_name), server)| {
1170 if *language_server_worktree_id == worktree_id {
1171 Some((language_name.as_ref(), server))
1172 } else {
1173 None
1174 }
1175 },
1176 )
1177 }
1178
1179 fn assign_language_to_buffer(
1180 &mut self,
1181 buffer: &ModelHandle<Buffer>,
1182 cx: &mut ModelContext<Self>,
1183 ) -> Option<()> {
1184 // If the buffer has a language, set it and start the language server if we haven't already.
1185 let full_path = buffer.read(cx).file()?.full_path(cx);
1186 let language = self.languages.select_language(&full_path)?;
1187 buffer.update(cx, |buffer, cx| {
1188 buffer.set_language(Some(language.clone()), cx);
1189 });
1190
1191 let file = File::from_dyn(buffer.read(cx).file())?;
1192 let worktree = file.worktree.read(cx).as_local()?;
1193 let worktree_id = worktree.id();
1194 let worktree_abs_path = worktree.abs_path().clone();
1195 self.start_language_server(worktree_id, worktree_abs_path, language, cx);
1196
1197 None
1198 }
1199
1200 fn start_language_server(
1201 &mut self,
1202 worktree_id: WorktreeId,
1203 worktree_path: Arc<Path>,
1204 language: Arc<Language>,
1205 cx: &mut ModelContext<Self>,
1206 ) {
1207 let key = (worktree_id, language.name());
1208 self.started_language_servers
1209 .entry(key.clone())
1210 .or_insert_with(|| {
1211 let server_id = post_inc(&mut self.next_language_server_id);
1212 let language_server = self.languages.start_language_server(
1213 language.clone(),
1214 worktree_path,
1215 self.client.http_client(),
1216 cx,
1217 );
1218 cx.spawn_weak(|this, mut cx| async move {
1219 let mut language_server = language_server?.await.log_err()?;
1220 let this = this.upgrade(&cx)?;
1221 let (language_server_events_tx, language_server_events_rx) =
1222 smol::channel::unbounded();
1223
1224 language_server
1225 .on_notification::<lsp::notification::PublishDiagnostics, _>({
1226 let language_server_events_tx = language_server_events_tx.clone();
1227 move |params| {
1228 language_server_events_tx
1229 .try_send(LanguageServerEvent::DiagnosticsUpdate(params))
1230 .ok();
1231 }
1232 })
1233 .detach();
1234
1235 language_server
1236 .on_request::<lsp::request::WorkspaceConfiguration, _>({
1237 let settings = this
1238 .read_with(&cx, |this, _| this.language_server_settings.clone());
1239 move |params| {
1240 let settings = settings.lock();
1241 Ok(params
1242 .items
1243 .into_iter()
1244 .map(|item| {
1245 if let Some(section) = &item.section {
1246 settings
1247 .get(section)
1248 .cloned()
1249 .unwrap_or(serde_json::Value::Null)
1250 } else {
1251 settings.clone()
1252 }
1253 })
1254 .collect())
1255 }
1256 })
1257 .detach();
1258
1259 language_server
1260 .on_notification::<lsp::notification::Progress, _>(move |params| {
1261 let token = match params.token {
1262 lsp::NumberOrString::String(token) => token,
1263 lsp::NumberOrString::Number(token) => {
1264 log::info!("skipping numeric progress token {}", token);
1265 return;
1266 }
1267 };
1268
1269 match params.value {
1270 lsp::ProgressParamsValue::WorkDone(progress) => match progress {
1271 lsp::WorkDoneProgress::Begin(_) => {
1272 language_server_events_tx
1273 .try_send(LanguageServerEvent::WorkStart { token })
1274 .ok();
1275 }
1276 lsp::WorkDoneProgress::Report(report) => {
1277 language_server_events_tx
1278 .try_send(LanguageServerEvent::WorkProgress {
1279 token,
1280 progress: LanguageServerProgress {
1281 message: report.message,
1282 percentage: report
1283 .percentage
1284 .map(|p| p as usize),
1285 last_update_at: Instant::now(),
1286 },
1287 })
1288 .ok();
1289 }
1290 lsp::WorkDoneProgress::End(_) => {
1291 language_server_events_tx
1292 .try_send(LanguageServerEvent::WorkEnd { token })
1293 .ok();
1294 }
1295 },
1296 }
1297 })
1298 .detach();
1299
1300 // Process all the LSP events.
1301 cx.spawn(|mut cx| {
1302 let this = this.downgrade();
1303 async move {
1304 while let Ok(event) = language_server_events_rx.recv().await {
1305 let this = this.upgrade(&cx)?;
1306 this.update(&mut cx, |this, cx| {
1307 this.on_lsp_event(server_id, event, &language, cx)
1308 });
1309
1310 // Don't starve the main thread when lots of events arrive all at once.
1311 smol::future::yield_now().await;
1312 }
1313 Some(())
1314 }
1315 })
1316 .detach();
1317
1318 let language_server = language_server.initialize().await.log_err()?;
1319 this.update(&mut cx, |this, cx| {
1320 this.language_servers
1321 .insert(key.clone(), language_server.clone());
1322 this.language_server_statuses.insert(
1323 server_id,
1324 LanguageServerStatus {
1325 name: language_server.name().to_string(),
1326 pending_work: Default::default(),
1327 pending_diagnostic_updates: 0,
1328 },
1329 );
1330 language_server
1331 .notify::<lsp::notification::DidChangeConfiguration>(
1332 lsp::DidChangeConfigurationParams {
1333 settings: this.language_server_settings.lock().clone(),
1334 },
1335 )
1336 .ok();
1337
1338 if let Some(project_id) = this.remote_id() {
1339 this.client
1340 .send(proto::StartLanguageServer {
1341 project_id,
1342 server: Some(proto::LanguageServer {
1343 id: server_id as u64,
1344 name: language_server.name().to_string(),
1345 }),
1346 })
1347 .log_err();
1348 }
1349
1350 // Tell the language server about every open buffer in the worktree that matches the language.
1351 for buffer in this.opened_buffers.values() {
1352 if let Some(buffer_handle) = buffer.upgrade(cx) {
1353 let buffer = buffer_handle.read(cx);
1354 let file = if let Some(file) = File::from_dyn(buffer.file()) {
1355 file
1356 } else {
1357 continue;
1358 };
1359 let language = if let Some(language) = buffer.language() {
1360 language
1361 } else {
1362 continue;
1363 };
1364 if (file.worktree.read(cx).id(), language.name()) != key {
1365 continue;
1366 }
1367
1368 let file = file.as_local()?;
1369 let versions = this
1370 .buffer_snapshots
1371 .entry(buffer.remote_id())
1372 .or_insert_with(|| vec![(0, buffer.text_snapshot())]);
1373 let (version, initial_snapshot) = versions.last().unwrap();
1374 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
1375 language_server
1376 .notify::<lsp::notification::DidOpenTextDocument>(
1377 lsp::DidOpenTextDocumentParams {
1378 text_document: lsp::TextDocumentItem::new(
1379 uri,
1380 Default::default(),
1381 *version,
1382 initial_snapshot.text(),
1383 ),
1384 },
1385 )
1386 .log_err()?;
1387 buffer_handle.update(cx, |buffer, cx| {
1388 buffer.set_completion_triggers(
1389 language_server
1390 .capabilities()
1391 .completion_provider
1392 .as_ref()
1393 .and_then(|provider| {
1394 provider.trigger_characters.clone()
1395 })
1396 .unwrap_or(Vec::new()),
1397 cx,
1398 )
1399 });
1400 }
1401 }
1402
1403 cx.notify();
1404 Some(())
1405 });
1406
1407 Some(language_server)
1408 })
1409 });
1410 }
1411
1412 fn on_lsp_event(
1413 &mut self,
1414 language_server_id: usize,
1415 event: LanguageServerEvent,
1416 language: &Arc<Language>,
1417 cx: &mut ModelContext<Self>,
1418 ) {
1419 let disk_diagnostics_token = language.disk_based_diagnostics_progress_token();
1420 let language_server_status =
1421 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
1422 status
1423 } else {
1424 return;
1425 };
1426
1427 match event {
1428 LanguageServerEvent::WorkStart { token } => {
1429 if Some(&token) == disk_diagnostics_token {
1430 language_server_status.pending_diagnostic_updates += 1;
1431 if language_server_status.pending_diagnostic_updates == 1 {
1432 self.disk_based_diagnostics_started(cx);
1433 self.broadcast_language_server_update(
1434 language_server_id,
1435 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(
1436 proto::LspDiskBasedDiagnosticsUpdating {},
1437 ),
1438 );
1439 }
1440 } else {
1441 self.on_lsp_work_start(language_server_id, token.clone(), cx);
1442 self.broadcast_language_server_update(
1443 language_server_id,
1444 proto::update_language_server::Variant::WorkStart(proto::LspWorkStart {
1445 token,
1446 }),
1447 );
1448 }
1449 }
1450 LanguageServerEvent::WorkProgress { token, progress } => {
1451 if Some(&token) != disk_diagnostics_token {
1452 self.on_lsp_work_progress(
1453 language_server_id,
1454 token.clone(),
1455 progress.clone(),
1456 cx,
1457 );
1458 self.broadcast_language_server_update(
1459 language_server_id,
1460 proto::update_language_server::Variant::WorkProgress(
1461 proto::LspWorkProgress {
1462 token,
1463 message: progress.message,
1464 percentage: progress.percentage.map(|p| p as u32),
1465 },
1466 ),
1467 );
1468 }
1469 }
1470 LanguageServerEvent::WorkEnd { token } => {
1471 if Some(&token) == disk_diagnostics_token {
1472 language_server_status.pending_diagnostic_updates -= 1;
1473 if language_server_status.pending_diagnostic_updates == 0 {
1474 self.disk_based_diagnostics_finished(cx);
1475 self.broadcast_language_server_update(
1476 language_server_id,
1477 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(
1478 proto::LspDiskBasedDiagnosticsUpdated {},
1479 ),
1480 );
1481 }
1482 } else {
1483 self.on_lsp_work_end(language_server_id, token.clone(), cx);
1484 self.broadcast_language_server_update(
1485 language_server_id,
1486 proto::update_language_server::Variant::WorkEnd(proto::LspWorkEnd {
1487 token,
1488 }),
1489 );
1490 }
1491 }
1492 LanguageServerEvent::DiagnosticsUpdate(mut params) => {
1493 language.process_diagnostics(&mut params);
1494
1495 if disk_diagnostics_token.is_none() {
1496 self.disk_based_diagnostics_started(cx);
1497 self.broadcast_language_server_update(
1498 language_server_id,
1499 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(
1500 proto::LspDiskBasedDiagnosticsUpdating {},
1501 ),
1502 );
1503 }
1504 self.update_diagnostics(
1505 params,
1506 language
1507 .disk_based_diagnostic_sources()
1508 .unwrap_or(&Default::default()),
1509 cx,
1510 )
1511 .log_err();
1512 if disk_diagnostics_token.is_none() {
1513 self.disk_based_diagnostics_finished(cx);
1514 self.broadcast_language_server_update(
1515 language_server_id,
1516 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(
1517 proto::LspDiskBasedDiagnosticsUpdated {},
1518 ),
1519 );
1520 }
1521 }
1522 }
1523 }
1524
1525 fn on_lsp_work_start(
1526 &mut self,
1527 language_server_id: usize,
1528 token: String,
1529 cx: &mut ModelContext<Self>,
1530 ) {
1531 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
1532 status.pending_work.insert(
1533 token,
1534 LanguageServerProgress {
1535 message: None,
1536 percentage: None,
1537 last_update_at: Instant::now(),
1538 },
1539 );
1540 cx.notify();
1541 }
1542 }
1543
1544 fn on_lsp_work_progress(
1545 &mut self,
1546 language_server_id: usize,
1547 token: String,
1548 progress: LanguageServerProgress,
1549 cx: &mut ModelContext<Self>,
1550 ) {
1551 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
1552 status.pending_work.insert(token, progress);
1553 cx.notify();
1554 }
1555 }
1556
1557 fn on_lsp_work_end(
1558 &mut self,
1559 language_server_id: usize,
1560 token: String,
1561 cx: &mut ModelContext<Self>,
1562 ) {
1563 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
1564 status.pending_work.remove(&token);
1565 cx.notify();
1566 }
1567 }
1568
1569 fn broadcast_language_server_update(
1570 &self,
1571 language_server_id: usize,
1572 event: proto::update_language_server::Variant,
1573 ) {
1574 if let Some(project_id) = self.remote_id() {
1575 self.client
1576 .send(proto::UpdateLanguageServer {
1577 project_id,
1578 language_server_id: language_server_id as u64,
1579 variant: Some(event),
1580 })
1581 .log_err();
1582 }
1583 }
1584
1585 pub fn set_language_server_settings(&mut self, settings: serde_json::Value) {
1586 for server in self.language_servers.values() {
1587 server
1588 .notify::<lsp::notification::DidChangeConfiguration>(
1589 lsp::DidChangeConfigurationParams {
1590 settings: settings.clone(),
1591 },
1592 )
1593 .ok();
1594 }
1595 *self.language_server_settings.lock() = settings;
1596 }
1597
1598 pub fn language_server_statuses(
1599 &self,
1600 ) -> impl DoubleEndedIterator<Item = &LanguageServerStatus> {
1601 self.language_server_statuses.values()
1602 }
1603
1604 pub fn update_diagnostics(
1605 &mut self,
1606 params: lsp::PublishDiagnosticsParams,
1607 disk_based_sources: &HashSet<String>,
1608 cx: &mut ModelContext<Self>,
1609 ) -> Result<()> {
1610 let abs_path = params
1611 .uri
1612 .to_file_path()
1613 .map_err(|_| anyhow!("URI is not a file"))?;
1614 let mut next_group_id = 0;
1615 let mut diagnostics = Vec::default();
1616 let mut primary_diagnostic_group_ids = HashMap::default();
1617 let mut sources_by_group_id = HashMap::default();
1618 let mut supporting_diagnostic_severities = HashMap::default();
1619 for diagnostic in ¶ms.diagnostics {
1620 let source = diagnostic.source.as_ref();
1621 let code = diagnostic.code.as_ref().map(|code| match code {
1622 lsp::NumberOrString::Number(code) => code.to_string(),
1623 lsp::NumberOrString::String(code) => code.clone(),
1624 });
1625 let range = range_from_lsp(diagnostic.range);
1626 let is_supporting = diagnostic
1627 .related_information
1628 .as_ref()
1629 .map_or(false, |infos| {
1630 infos.iter().any(|info| {
1631 primary_diagnostic_group_ids.contains_key(&(
1632 source,
1633 code.clone(),
1634 range_from_lsp(info.location.range),
1635 ))
1636 })
1637 });
1638
1639 if is_supporting {
1640 if let Some(severity) = diagnostic.severity {
1641 supporting_diagnostic_severities
1642 .insert((source, code.clone(), range), severity);
1643 }
1644 } else {
1645 let group_id = post_inc(&mut next_group_id);
1646 let is_disk_based =
1647 source.map_or(false, |source| disk_based_sources.contains(source));
1648
1649 sources_by_group_id.insert(group_id, source);
1650 primary_diagnostic_group_ids
1651 .insert((source, code.clone(), range.clone()), group_id);
1652
1653 diagnostics.push(DiagnosticEntry {
1654 range,
1655 diagnostic: Diagnostic {
1656 code: code.clone(),
1657 severity: diagnostic.severity.unwrap_or(DiagnosticSeverity::ERROR),
1658 message: diagnostic.message.clone(),
1659 group_id,
1660 is_primary: true,
1661 is_valid: true,
1662 is_disk_based,
1663 },
1664 });
1665 if let Some(infos) = &diagnostic.related_information {
1666 for info in infos {
1667 if info.location.uri == params.uri && !info.message.is_empty() {
1668 let range = range_from_lsp(info.location.range);
1669 diagnostics.push(DiagnosticEntry {
1670 range,
1671 diagnostic: Diagnostic {
1672 code: code.clone(),
1673 severity: DiagnosticSeverity::INFORMATION,
1674 message: info.message.clone(),
1675 group_id,
1676 is_primary: false,
1677 is_valid: true,
1678 is_disk_based,
1679 },
1680 });
1681 }
1682 }
1683 }
1684 }
1685 }
1686
1687 for entry in &mut diagnostics {
1688 let diagnostic = &mut entry.diagnostic;
1689 if !diagnostic.is_primary {
1690 let source = *sources_by_group_id.get(&diagnostic.group_id).unwrap();
1691 if let Some(&severity) = supporting_diagnostic_severities.get(&(
1692 source,
1693 diagnostic.code.clone(),
1694 entry.range.clone(),
1695 )) {
1696 diagnostic.severity = severity;
1697 }
1698 }
1699 }
1700
1701 self.update_diagnostic_entries(abs_path, params.version, diagnostics, cx)?;
1702 Ok(())
1703 }
1704
1705 pub fn update_diagnostic_entries(
1706 &mut self,
1707 abs_path: PathBuf,
1708 version: Option<i32>,
1709 diagnostics: Vec<DiagnosticEntry<PointUtf16>>,
1710 cx: &mut ModelContext<Project>,
1711 ) -> Result<(), anyhow::Error> {
1712 let (worktree, relative_path) = self
1713 .find_local_worktree(&abs_path, cx)
1714 .ok_or_else(|| anyhow!("no worktree found for diagnostics"))?;
1715 if !worktree.read(cx).is_visible() {
1716 return Ok(());
1717 }
1718
1719 let project_path = ProjectPath {
1720 worktree_id: worktree.read(cx).id(),
1721 path: relative_path.into(),
1722 };
1723
1724 for buffer in self.opened_buffers.values() {
1725 if let Some(buffer) = buffer.upgrade(cx) {
1726 if buffer
1727 .read(cx)
1728 .file()
1729 .map_or(false, |file| *file.path() == project_path.path)
1730 {
1731 self.update_buffer_diagnostics(&buffer, diagnostics.clone(), version, cx)?;
1732 break;
1733 }
1734 }
1735 }
1736 worktree.update(cx, |worktree, cx| {
1737 worktree
1738 .as_local_mut()
1739 .ok_or_else(|| anyhow!("not a local worktree"))?
1740 .update_diagnostics(project_path.path.clone(), diagnostics, cx)
1741 })?;
1742 cx.emit(Event::DiagnosticsUpdated(project_path));
1743 Ok(())
1744 }
1745
1746 fn update_buffer_diagnostics(
1747 &mut self,
1748 buffer: &ModelHandle<Buffer>,
1749 mut diagnostics: Vec<DiagnosticEntry<PointUtf16>>,
1750 version: Option<i32>,
1751 cx: &mut ModelContext<Self>,
1752 ) -> Result<()> {
1753 fn compare_diagnostics(a: &Diagnostic, b: &Diagnostic) -> Ordering {
1754 Ordering::Equal
1755 .then_with(|| b.is_primary.cmp(&a.is_primary))
1756 .then_with(|| a.is_disk_based.cmp(&b.is_disk_based))
1757 .then_with(|| a.severity.cmp(&b.severity))
1758 .then_with(|| a.message.cmp(&b.message))
1759 }
1760
1761 let snapshot = self.buffer_snapshot_for_lsp_version(buffer, version, cx)?;
1762
1763 diagnostics.sort_unstable_by(|a, b| {
1764 Ordering::Equal
1765 .then_with(|| a.range.start.cmp(&b.range.start))
1766 .then_with(|| b.range.end.cmp(&a.range.end))
1767 .then_with(|| compare_diagnostics(&a.diagnostic, &b.diagnostic))
1768 });
1769
1770 let mut sanitized_diagnostics = Vec::new();
1771 let mut edits_since_save = snapshot
1772 .edits_since::<PointUtf16>(buffer.read(cx).saved_version())
1773 .peekable();
1774 let mut last_edit_old_end = PointUtf16::zero();
1775 let mut last_edit_new_end = PointUtf16::zero();
1776 'outer: for entry in diagnostics {
1777 let mut start = entry.range.start;
1778 let mut end = entry.range.end;
1779
1780 // Some diagnostics are based on files on disk instead of buffers'
1781 // current contents. Adjust these diagnostics' ranges to reflect
1782 // any unsaved edits.
1783 if entry.diagnostic.is_disk_based {
1784 while let Some(edit) = edits_since_save.peek() {
1785 if edit.old.end <= start {
1786 last_edit_old_end = edit.old.end;
1787 last_edit_new_end = edit.new.end;
1788 edits_since_save.next();
1789 } else if edit.old.start <= end && edit.old.end >= start {
1790 continue 'outer;
1791 } else {
1792 break;
1793 }
1794 }
1795
1796 let start_overshoot = start - last_edit_old_end;
1797 start = last_edit_new_end;
1798 start += start_overshoot;
1799
1800 let end_overshoot = end - last_edit_old_end;
1801 end = last_edit_new_end;
1802 end += end_overshoot;
1803 }
1804
1805 let mut range = snapshot.clip_point_utf16(start, Bias::Left)
1806 ..snapshot.clip_point_utf16(end, Bias::Right);
1807
1808 // Expand empty ranges by one character
1809 if range.start == range.end {
1810 range.end.column += 1;
1811 range.end = snapshot.clip_point_utf16(range.end, Bias::Right);
1812 if range.start == range.end && range.end.column > 0 {
1813 range.start.column -= 1;
1814 range.start = snapshot.clip_point_utf16(range.start, Bias::Left);
1815 }
1816 }
1817
1818 sanitized_diagnostics.push(DiagnosticEntry {
1819 range,
1820 diagnostic: entry.diagnostic,
1821 });
1822 }
1823 drop(edits_since_save);
1824
1825 let set = DiagnosticSet::new(sanitized_diagnostics, &snapshot);
1826 buffer.update(cx, |buffer, cx| buffer.update_diagnostics(set, cx));
1827 Ok(())
1828 }
1829
1830 pub fn format(
1831 &self,
1832 buffers: HashSet<ModelHandle<Buffer>>,
1833 push_to_history: bool,
1834 cx: &mut ModelContext<Project>,
1835 ) -> Task<Result<ProjectTransaction>> {
1836 let mut local_buffers = Vec::new();
1837 let mut remote_buffers = None;
1838 for buffer_handle in buffers {
1839 let buffer = buffer_handle.read(cx);
1840 if let Some(file) = File::from_dyn(buffer.file()) {
1841 if let Some(buffer_abs_path) = file.as_local().map(|f| f.abs_path(cx)) {
1842 if let Some(server) = self.language_server_for_buffer(buffer, cx) {
1843 local_buffers.push((buffer_handle, buffer_abs_path, server.clone()));
1844 }
1845 } else {
1846 remote_buffers.get_or_insert(Vec::new()).push(buffer_handle);
1847 }
1848 } else {
1849 return Task::ready(Ok(Default::default()));
1850 }
1851 }
1852
1853 let remote_buffers = self.remote_id().zip(remote_buffers);
1854 let client = self.client.clone();
1855
1856 cx.spawn(|this, mut cx| async move {
1857 let mut project_transaction = ProjectTransaction::default();
1858
1859 if let Some((project_id, remote_buffers)) = remote_buffers {
1860 let response = client
1861 .request(proto::FormatBuffers {
1862 project_id,
1863 buffer_ids: remote_buffers
1864 .iter()
1865 .map(|buffer| buffer.read_with(&cx, |buffer, _| buffer.remote_id()))
1866 .collect(),
1867 })
1868 .await?
1869 .transaction
1870 .ok_or_else(|| anyhow!("missing transaction"))?;
1871 project_transaction = this
1872 .update(&mut cx, |this, cx| {
1873 this.deserialize_project_transaction(response, push_to_history, cx)
1874 })
1875 .await?;
1876 }
1877
1878 for (buffer, buffer_abs_path, language_server) in local_buffers {
1879 let text_document = lsp::TextDocumentIdentifier::new(
1880 lsp::Url::from_file_path(&buffer_abs_path).unwrap(),
1881 );
1882 let capabilities = &language_server.capabilities();
1883 let lsp_edits = if capabilities
1884 .document_formatting_provider
1885 .as_ref()
1886 .map_or(false, |provider| *provider != lsp::OneOf::Left(false))
1887 {
1888 language_server
1889 .request::<lsp::request::Formatting>(lsp::DocumentFormattingParams {
1890 text_document,
1891 options: Default::default(),
1892 work_done_progress_params: Default::default(),
1893 })
1894 .await?
1895 } else if capabilities
1896 .document_range_formatting_provider
1897 .as_ref()
1898 .map_or(false, |provider| *provider != lsp::OneOf::Left(false))
1899 {
1900 let buffer_start = lsp::Position::new(0, 0);
1901 let buffer_end = buffer
1902 .read_with(&cx, |buffer, _| buffer.max_point_utf16())
1903 .to_lsp_position();
1904 language_server
1905 .request::<lsp::request::RangeFormatting>(
1906 lsp::DocumentRangeFormattingParams {
1907 text_document,
1908 range: lsp::Range::new(buffer_start, buffer_end),
1909 options: Default::default(),
1910 work_done_progress_params: Default::default(),
1911 },
1912 )
1913 .await?
1914 } else {
1915 continue;
1916 };
1917
1918 if let Some(lsp_edits) = lsp_edits {
1919 let edits = this
1920 .update(&mut cx, |this, cx| {
1921 this.edits_from_lsp(&buffer, lsp_edits, None, cx)
1922 })
1923 .await?;
1924 buffer.update(&mut cx, |buffer, cx| {
1925 buffer.finalize_last_transaction();
1926 buffer.start_transaction();
1927 for (range, text) in edits {
1928 buffer.edit([range], text, cx);
1929 }
1930 if buffer.end_transaction(cx).is_some() {
1931 let transaction = buffer.finalize_last_transaction().unwrap().clone();
1932 if !push_to_history {
1933 buffer.forget_transaction(transaction.id);
1934 }
1935 project_transaction.0.insert(cx.handle(), transaction);
1936 }
1937 });
1938 }
1939 }
1940
1941 Ok(project_transaction)
1942 })
1943 }
1944
1945 pub fn definition<T: ToPointUtf16>(
1946 &self,
1947 buffer: &ModelHandle<Buffer>,
1948 position: T,
1949 cx: &mut ModelContext<Self>,
1950 ) -> Task<Result<Vec<Location>>> {
1951 let position = position.to_point_utf16(buffer.read(cx));
1952 self.request_lsp(buffer.clone(), GetDefinition { position }, cx)
1953 }
1954
1955 pub fn references<T: ToPointUtf16>(
1956 &self,
1957 buffer: &ModelHandle<Buffer>,
1958 position: T,
1959 cx: &mut ModelContext<Self>,
1960 ) -> Task<Result<Vec<Location>>> {
1961 let position = position.to_point_utf16(buffer.read(cx));
1962 self.request_lsp(buffer.clone(), GetReferences { position }, cx)
1963 }
1964
1965 pub fn document_highlights<T: ToPointUtf16>(
1966 &self,
1967 buffer: &ModelHandle<Buffer>,
1968 position: T,
1969 cx: &mut ModelContext<Self>,
1970 ) -> Task<Result<Vec<DocumentHighlight>>> {
1971 let position = position.to_point_utf16(buffer.read(cx));
1972
1973 self.request_lsp(buffer.clone(), GetDocumentHighlights { position }, cx)
1974 }
1975
1976 pub fn symbols(&self, query: &str, cx: &mut ModelContext<Self>) -> Task<Result<Vec<Symbol>>> {
1977 if self.is_local() {
1978 let mut language_servers = HashMap::default();
1979 for ((worktree_id, language_name), language_server) in self.language_servers.iter() {
1980 if let Some((worktree, language)) = self
1981 .worktree_for_id(*worktree_id, cx)
1982 .and_then(|worktree| worktree.read(cx).as_local())
1983 .zip(self.languages.get_language(language_name))
1984 {
1985 language_servers
1986 .entry(Arc::as_ptr(language_server))
1987 .or_insert((
1988 language_server.clone(),
1989 *worktree_id,
1990 worktree.abs_path().clone(),
1991 language.clone(),
1992 ));
1993 }
1994 }
1995
1996 let mut requests = Vec::new();
1997 for (language_server, _, _, _) in language_servers.values() {
1998 requests.push(language_server.request::<lsp::request::WorkspaceSymbol>(
1999 lsp::WorkspaceSymbolParams {
2000 query: query.to_string(),
2001 ..Default::default()
2002 },
2003 ));
2004 }
2005
2006 cx.spawn_weak(|this, cx| async move {
2007 let responses = futures::future::try_join_all(requests).await?;
2008
2009 let mut symbols = Vec::new();
2010 if let Some(this) = this.upgrade(&cx) {
2011 this.read_with(&cx, |this, cx| {
2012 for ((_, source_worktree_id, worktree_abs_path, language), lsp_symbols) in
2013 language_servers.into_values().zip(responses)
2014 {
2015 symbols.extend(lsp_symbols.into_iter().flatten().filter_map(
2016 |lsp_symbol| {
2017 let abs_path = lsp_symbol.location.uri.to_file_path().ok()?;
2018 let mut worktree_id = source_worktree_id;
2019 let path;
2020 if let Some((worktree, rel_path)) =
2021 this.find_local_worktree(&abs_path, cx)
2022 {
2023 worktree_id = worktree.read(cx).id();
2024 path = rel_path;
2025 } else {
2026 path = relativize_path(&worktree_abs_path, &abs_path);
2027 }
2028
2029 let label = language
2030 .label_for_symbol(&lsp_symbol.name, lsp_symbol.kind)
2031 .unwrap_or_else(|| {
2032 CodeLabel::plain(lsp_symbol.name.clone(), None)
2033 });
2034 let signature = this.symbol_signature(worktree_id, &path);
2035
2036 Some(Symbol {
2037 source_worktree_id,
2038 worktree_id,
2039 language_name: language.name().to_string(),
2040 name: lsp_symbol.name,
2041 kind: lsp_symbol.kind,
2042 label,
2043 path,
2044 range: range_from_lsp(lsp_symbol.location.range),
2045 signature,
2046 })
2047 },
2048 ));
2049 }
2050 })
2051 }
2052
2053 Ok(symbols)
2054 })
2055 } else if let Some(project_id) = self.remote_id() {
2056 let request = self.client.request(proto::GetProjectSymbols {
2057 project_id,
2058 query: query.to_string(),
2059 });
2060 cx.spawn_weak(|this, cx| async move {
2061 let response = request.await?;
2062 let mut symbols = Vec::new();
2063 if let Some(this) = this.upgrade(&cx) {
2064 this.read_with(&cx, |this, _| {
2065 symbols.extend(
2066 response
2067 .symbols
2068 .into_iter()
2069 .filter_map(|symbol| this.deserialize_symbol(symbol).log_err()),
2070 );
2071 })
2072 }
2073 Ok(symbols)
2074 })
2075 } else {
2076 Task::ready(Ok(Default::default()))
2077 }
2078 }
2079
2080 pub fn open_buffer_for_symbol(
2081 &mut self,
2082 symbol: &Symbol,
2083 cx: &mut ModelContext<Self>,
2084 ) -> Task<Result<ModelHandle<Buffer>>> {
2085 if self.is_local() {
2086 let language_server = if let Some(server) = self.language_servers.get(&(
2087 symbol.source_worktree_id,
2088 Arc::from(symbol.language_name.as_str()),
2089 )) {
2090 server.clone()
2091 } else {
2092 return Task::ready(Err(anyhow!(
2093 "language server for worktree and language not found"
2094 )));
2095 };
2096
2097 let worktree_abs_path = if let Some(worktree_abs_path) = self
2098 .worktree_for_id(symbol.worktree_id, cx)
2099 .and_then(|worktree| worktree.read(cx).as_local())
2100 .map(|local_worktree| local_worktree.abs_path())
2101 {
2102 worktree_abs_path
2103 } else {
2104 return Task::ready(Err(anyhow!("worktree not found for symbol")));
2105 };
2106 let symbol_abs_path = worktree_abs_path.join(&symbol.path);
2107 let symbol_uri = if let Ok(uri) = lsp::Url::from_file_path(symbol_abs_path) {
2108 uri
2109 } else {
2110 return Task::ready(Err(anyhow!("invalid symbol path")));
2111 };
2112
2113 self.open_local_buffer_via_lsp(
2114 symbol_uri,
2115 Arc::from(symbol.language_name.as_str()),
2116 language_server,
2117 cx,
2118 )
2119 } else if let Some(project_id) = self.remote_id() {
2120 let request = self.client.request(proto::OpenBufferForSymbol {
2121 project_id,
2122 symbol: Some(serialize_symbol(symbol)),
2123 });
2124 cx.spawn(|this, mut cx| async move {
2125 let response = request.await?;
2126 let buffer = response.buffer.ok_or_else(|| anyhow!("invalid buffer"))?;
2127 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
2128 .await
2129 })
2130 } else {
2131 Task::ready(Err(anyhow!("project does not have a remote id")))
2132 }
2133 }
2134
2135 pub fn completions<T: ToPointUtf16>(
2136 &self,
2137 source_buffer_handle: &ModelHandle<Buffer>,
2138 position: T,
2139 cx: &mut ModelContext<Self>,
2140 ) -> Task<Result<Vec<Completion>>> {
2141 let source_buffer_handle = source_buffer_handle.clone();
2142 let source_buffer = source_buffer_handle.read(cx);
2143 let buffer_id = source_buffer.remote_id();
2144 let language = source_buffer.language().cloned();
2145 let worktree;
2146 let buffer_abs_path;
2147 if let Some(file) = File::from_dyn(source_buffer.file()) {
2148 worktree = file.worktree.clone();
2149 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
2150 } else {
2151 return Task::ready(Ok(Default::default()));
2152 };
2153
2154 let position = position.to_point_utf16(source_buffer);
2155 let anchor = source_buffer.anchor_after(position);
2156
2157 if worktree.read(cx).as_local().is_some() {
2158 let buffer_abs_path = buffer_abs_path.unwrap();
2159 let lang_server =
2160 if let Some(server) = self.language_server_for_buffer(source_buffer, cx) {
2161 server.clone()
2162 } else {
2163 return Task::ready(Ok(Default::default()));
2164 };
2165
2166 cx.spawn(|_, cx| async move {
2167 let completions = lang_server
2168 .request::<lsp::request::Completion>(lsp::CompletionParams {
2169 text_document_position: lsp::TextDocumentPositionParams::new(
2170 lsp::TextDocumentIdentifier::new(
2171 lsp::Url::from_file_path(buffer_abs_path).unwrap(),
2172 ),
2173 position.to_lsp_position(),
2174 ),
2175 context: Default::default(),
2176 work_done_progress_params: Default::default(),
2177 partial_result_params: Default::default(),
2178 })
2179 .await
2180 .context("lsp completion request failed")?;
2181
2182 let completions = if let Some(completions) = completions {
2183 match completions {
2184 lsp::CompletionResponse::Array(completions) => completions,
2185 lsp::CompletionResponse::List(list) => list.items,
2186 }
2187 } else {
2188 Default::default()
2189 };
2190
2191 source_buffer_handle.read_with(&cx, |this, _| {
2192 Ok(completions
2193 .into_iter()
2194 .filter_map(|lsp_completion| {
2195 let (old_range, new_text) = match lsp_completion.text_edit.as_ref()? {
2196 lsp::CompletionTextEdit::Edit(edit) => {
2197 (range_from_lsp(edit.range), edit.new_text.clone())
2198 }
2199 lsp::CompletionTextEdit::InsertAndReplace(_) => {
2200 log::info!("unsupported insert/replace completion");
2201 return None;
2202 }
2203 };
2204
2205 let clipped_start = this.clip_point_utf16(old_range.start, Bias::Left);
2206 let clipped_end = this.clip_point_utf16(old_range.end, Bias::Left);
2207 if clipped_start == old_range.start && clipped_end == old_range.end {
2208 Some(Completion {
2209 old_range: this.anchor_before(old_range.start)
2210 ..this.anchor_after(old_range.end),
2211 new_text,
2212 label: language
2213 .as_ref()
2214 .and_then(|l| l.label_for_completion(&lsp_completion))
2215 .unwrap_or_else(|| {
2216 CodeLabel::plain(
2217 lsp_completion.label.clone(),
2218 lsp_completion.filter_text.as_deref(),
2219 )
2220 }),
2221 lsp_completion,
2222 })
2223 } else {
2224 None
2225 }
2226 })
2227 .collect())
2228 })
2229 })
2230 } else if let Some(project_id) = self.remote_id() {
2231 let rpc = self.client.clone();
2232 let message = proto::GetCompletions {
2233 project_id,
2234 buffer_id,
2235 position: Some(language::proto::serialize_anchor(&anchor)),
2236 version: serialize_version(&source_buffer.version()),
2237 };
2238 cx.spawn_weak(|_, mut cx| async move {
2239 let response = rpc.request(message).await?;
2240
2241 source_buffer_handle
2242 .update(&mut cx, |buffer, _| {
2243 buffer.wait_for_version(deserialize_version(response.version))
2244 })
2245 .await;
2246
2247 response
2248 .completions
2249 .into_iter()
2250 .map(|completion| {
2251 language::proto::deserialize_completion(completion, language.as_ref())
2252 })
2253 .collect()
2254 })
2255 } else {
2256 Task::ready(Ok(Default::default()))
2257 }
2258 }
2259
2260 pub fn apply_additional_edits_for_completion(
2261 &self,
2262 buffer_handle: ModelHandle<Buffer>,
2263 completion: Completion,
2264 push_to_history: bool,
2265 cx: &mut ModelContext<Self>,
2266 ) -> Task<Result<Option<Transaction>>> {
2267 let buffer = buffer_handle.read(cx);
2268 let buffer_id = buffer.remote_id();
2269
2270 if self.is_local() {
2271 let lang_server = if let Some(server) = self.language_server_for_buffer(buffer, cx) {
2272 server.clone()
2273 } else {
2274 return Task::ready(Ok(Default::default()));
2275 };
2276
2277 cx.spawn(|this, mut cx| async move {
2278 let resolved_completion = lang_server
2279 .request::<lsp::request::ResolveCompletionItem>(completion.lsp_completion)
2280 .await?;
2281 if let Some(edits) = resolved_completion.additional_text_edits {
2282 let edits = this
2283 .update(&mut cx, |this, cx| {
2284 this.edits_from_lsp(&buffer_handle, edits, None, cx)
2285 })
2286 .await?;
2287 buffer_handle.update(&mut cx, |buffer, cx| {
2288 buffer.finalize_last_transaction();
2289 buffer.start_transaction();
2290 for (range, text) in edits {
2291 buffer.edit([range], text, cx);
2292 }
2293 let transaction = if buffer.end_transaction(cx).is_some() {
2294 let transaction = buffer.finalize_last_transaction().unwrap().clone();
2295 if !push_to_history {
2296 buffer.forget_transaction(transaction.id);
2297 }
2298 Some(transaction)
2299 } else {
2300 None
2301 };
2302 Ok(transaction)
2303 })
2304 } else {
2305 Ok(None)
2306 }
2307 })
2308 } else if let Some(project_id) = self.remote_id() {
2309 let client = self.client.clone();
2310 cx.spawn(|_, mut cx| async move {
2311 let response = client
2312 .request(proto::ApplyCompletionAdditionalEdits {
2313 project_id,
2314 buffer_id,
2315 completion: Some(language::proto::serialize_completion(&completion)),
2316 })
2317 .await?;
2318
2319 if let Some(transaction) = response.transaction {
2320 let transaction = language::proto::deserialize_transaction(transaction)?;
2321 buffer_handle
2322 .update(&mut cx, |buffer, _| {
2323 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
2324 })
2325 .await;
2326 if push_to_history {
2327 buffer_handle.update(&mut cx, |buffer, _| {
2328 buffer.push_transaction(transaction.clone(), Instant::now());
2329 });
2330 }
2331 Ok(Some(transaction))
2332 } else {
2333 Ok(None)
2334 }
2335 })
2336 } else {
2337 Task::ready(Err(anyhow!("project does not have a remote id")))
2338 }
2339 }
2340
2341 pub fn code_actions<T: ToOffset>(
2342 &self,
2343 buffer_handle: &ModelHandle<Buffer>,
2344 range: Range<T>,
2345 cx: &mut ModelContext<Self>,
2346 ) -> Task<Result<Vec<CodeAction>>> {
2347 let buffer_handle = buffer_handle.clone();
2348 let buffer = buffer_handle.read(cx);
2349 let buffer_id = buffer.remote_id();
2350 let worktree;
2351 let buffer_abs_path;
2352 if let Some(file) = File::from_dyn(buffer.file()) {
2353 worktree = file.worktree.clone();
2354 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
2355 } else {
2356 return Task::ready(Ok(Default::default()));
2357 };
2358 let range = buffer.anchor_before(range.start)..buffer.anchor_before(range.end);
2359
2360 if worktree.read(cx).as_local().is_some() {
2361 let buffer_abs_path = buffer_abs_path.unwrap();
2362 let lang_server = if let Some(server) = self.language_server_for_buffer(buffer, cx) {
2363 server.clone()
2364 } else {
2365 return Task::ready(Ok(Default::default()));
2366 };
2367
2368 let lsp_range = lsp::Range::new(
2369 range.start.to_point_utf16(buffer).to_lsp_position(),
2370 range.end.to_point_utf16(buffer).to_lsp_position(),
2371 );
2372 cx.foreground().spawn(async move {
2373 if !lang_server.capabilities().code_action_provider.is_some() {
2374 return Ok(Default::default());
2375 }
2376
2377 Ok(lang_server
2378 .request::<lsp::request::CodeActionRequest>(lsp::CodeActionParams {
2379 text_document: lsp::TextDocumentIdentifier::new(
2380 lsp::Url::from_file_path(buffer_abs_path).unwrap(),
2381 ),
2382 range: lsp_range,
2383 work_done_progress_params: Default::default(),
2384 partial_result_params: Default::default(),
2385 context: lsp::CodeActionContext {
2386 diagnostics: Default::default(),
2387 only: Some(vec![
2388 lsp::CodeActionKind::QUICKFIX,
2389 lsp::CodeActionKind::REFACTOR,
2390 lsp::CodeActionKind::REFACTOR_EXTRACT,
2391 ]),
2392 },
2393 })
2394 .await?
2395 .unwrap_or_default()
2396 .into_iter()
2397 .filter_map(|entry| {
2398 if let lsp::CodeActionOrCommand::CodeAction(lsp_action) = entry {
2399 Some(CodeAction {
2400 range: range.clone(),
2401 lsp_action,
2402 })
2403 } else {
2404 None
2405 }
2406 })
2407 .collect())
2408 })
2409 } else if let Some(project_id) = self.remote_id() {
2410 let rpc = self.client.clone();
2411 let version = buffer.version();
2412 cx.spawn_weak(|_, mut cx| async move {
2413 let response = rpc
2414 .request(proto::GetCodeActions {
2415 project_id,
2416 buffer_id,
2417 start: Some(language::proto::serialize_anchor(&range.start)),
2418 end: Some(language::proto::serialize_anchor(&range.end)),
2419 version: serialize_version(&version),
2420 })
2421 .await?;
2422
2423 buffer_handle
2424 .update(&mut cx, |buffer, _| {
2425 buffer.wait_for_version(deserialize_version(response.version))
2426 })
2427 .await;
2428
2429 response
2430 .actions
2431 .into_iter()
2432 .map(language::proto::deserialize_code_action)
2433 .collect()
2434 })
2435 } else {
2436 Task::ready(Ok(Default::default()))
2437 }
2438 }
2439
2440 pub fn apply_code_action(
2441 &self,
2442 buffer_handle: ModelHandle<Buffer>,
2443 mut action: CodeAction,
2444 push_to_history: bool,
2445 cx: &mut ModelContext<Self>,
2446 ) -> Task<Result<ProjectTransaction>> {
2447 if self.is_local() {
2448 let buffer = buffer_handle.read(cx);
2449 let lang_name = if let Some(lang) = buffer.language() {
2450 lang.name()
2451 } else {
2452 return Task::ready(Ok(Default::default()));
2453 };
2454 let lang_server = if let Some(server) = self.language_server_for_buffer(buffer, cx) {
2455 server.clone()
2456 } else {
2457 return Task::ready(Ok(Default::default()));
2458 };
2459 let range = action.range.to_point_utf16(buffer);
2460
2461 cx.spawn(|this, mut cx| async move {
2462 if let Some(lsp_range) = action
2463 .lsp_action
2464 .data
2465 .as_mut()
2466 .and_then(|d| d.get_mut("codeActionParams"))
2467 .and_then(|d| d.get_mut("range"))
2468 {
2469 *lsp_range = serde_json::to_value(&lsp::Range::new(
2470 range.start.to_lsp_position(),
2471 range.end.to_lsp_position(),
2472 ))
2473 .unwrap();
2474 action.lsp_action = lang_server
2475 .request::<lsp::request::CodeActionResolveRequest>(action.lsp_action)
2476 .await?;
2477 } else {
2478 let actions = this
2479 .update(&mut cx, |this, cx| {
2480 this.code_actions(&buffer_handle, action.range, cx)
2481 })
2482 .await?;
2483 action.lsp_action = actions
2484 .into_iter()
2485 .find(|a| a.lsp_action.title == action.lsp_action.title)
2486 .ok_or_else(|| anyhow!("code action is outdated"))?
2487 .lsp_action;
2488 }
2489
2490 if let Some(edit) = action.lsp_action.edit {
2491 Self::deserialize_workspace_edit(
2492 this,
2493 edit,
2494 push_to_history,
2495 lang_name,
2496 lang_server,
2497 &mut cx,
2498 )
2499 .await
2500 } else {
2501 Ok(ProjectTransaction::default())
2502 }
2503 })
2504 } else if let Some(project_id) = self.remote_id() {
2505 let client = self.client.clone();
2506 let request = proto::ApplyCodeAction {
2507 project_id,
2508 buffer_id: buffer_handle.read(cx).remote_id(),
2509 action: Some(language::proto::serialize_code_action(&action)),
2510 };
2511 cx.spawn(|this, mut cx| async move {
2512 let response = client
2513 .request(request)
2514 .await?
2515 .transaction
2516 .ok_or_else(|| anyhow!("missing transaction"))?;
2517 this.update(&mut cx, |this, cx| {
2518 this.deserialize_project_transaction(response, push_to_history, cx)
2519 })
2520 .await
2521 })
2522 } else {
2523 Task::ready(Err(anyhow!("project does not have a remote id")))
2524 }
2525 }
2526
2527 async fn deserialize_workspace_edit(
2528 this: ModelHandle<Self>,
2529 edit: lsp::WorkspaceEdit,
2530 push_to_history: bool,
2531 language_name: Arc<str>,
2532 language_server: Arc<LanguageServer>,
2533 cx: &mut AsyncAppContext,
2534 ) -> Result<ProjectTransaction> {
2535 let fs = this.read_with(cx, |this, _| this.fs.clone());
2536 let mut operations = Vec::new();
2537 if let Some(document_changes) = edit.document_changes {
2538 match document_changes {
2539 lsp::DocumentChanges::Edits(edits) => {
2540 operations.extend(edits.into_iter().map(lsp::DocumentChangeOperation::Edit))
2541 }
2542 lsp::DocumentChanges::Operations(ops) => operations = ops,
2543 }
2544 } else if let Some(changes) = edit.changes {
2545 operations.extend(changes.into_iter().map(|(uri, edits)| {
2546 lsp::DocumentChangeOperation::Edit(lsp::TextDocumentEdit {
2547 text_document: lsp::OptionalVersionedTextDocumentIdentifier {
2548 uri,
2549 version: None,
2550 },
2551 edits: edits.into_iter().map(lsp::OneOf::Left).collect(),
2552 })
2553 }));
2554 }
2555
2556 let mut project_transaction = ProjectTransaction::default();
2557 for operation in operations {
2558 match operation {
2559 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Create(op)) => {
2560 let abs_path = op
2561 .uri
2562 .to_file_path()
2563 .map_err(|_| anyhow!("can't convert URI to path"))?;
2564
2565 if let Some(parent_path) = abs_path.parent() {
2566 fs.create_dir(parent_path).await?;
2567 }
2568 if abs_path.ends_with("/") {
2569 fs.create_dir(&abs_path).await?;
2570 } else {
2571 fs.create_file(&abs_path, op.options.map(Into::into).unwrap_or_default())
2572 .await?;
2573 }
2574 }
2575 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Rename(op)) => {
2576 let source_abs_path = op
2577 .old_uri
2578 .to_file_path()
2579 .map_err(|_| anyhow!("can't convert URI to path"))?;
2580 let target_abs_path = op
2581 .new_uri
2582 .to_file_path()
2583 .map_err(|_| anyhow!("can't convert URI to path"))?;
2584 fs.rename(
2585 &source_abs_path,
2586 &target_abs_path,
2587 op.options.map(Into::into).unwrap_or_default(),
2588 )
2589 .await?;
2590 }
2591 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Delete(op)) => {
2592 let abs_path = op
2593 .uri
2594 .to_file_path()
2595 .map_err(|_| anyhow!("can't convert URI to path"))?;
2596 let options = op.options.map(Into::into).unwrap_or_default();
2597 if abs_path.ends_with("/") {
2598 fs.remove_dir(&abs_path, options).await?;
2599 } else {
2600 fs.remove_file(&abs_path, options).await?;
2601 }
2602 }
2603 lsp::DocumentChangeOperation::Edit(op) => {
2604 let buffer_to_edit = this
2605 .update(cx, |this, cx| {
2606 this.open_local_buffer_via_lsp(
2607 op.text_document.uri,
2608 language_name.clone(),
2609 language_server.clone(),
2610 cx,
2611 )
2612 })
2613 .await?;
2614
2615 let edits = this
2616 .update(cx, |this, cx| {
2617 let edits = op.edits.into_iter().map(|edit| match edit {
2618 lsp::OneOf::Left(edit) => edit,
2619 lsp::OneOf::Right(edit) => edit.text_edit,
2620 });
2621 this.edits_from_lsp(
2622 &buffer_to_edit,
2623 edits,
2624 op.text_document.version,
2625 cx,
2626 )
2627 })
2628 .await?;
2629
2630 let transaction = buffer_to_edit.update(cx, |buffer, cx| {
2631 buffer.finalize_last_transaction();
2632 buffer.start_transaction();
2633 for (range, text) in edits {
2634 buffer.edit([range], text, cx);
2635 }
2636 let transaction = if buffer.end_transaction(cx).is_some() {
2637 let transaction = buffer.finalize_last_transaction().unwrap().clone();
2638 if !push_to_history {
2639 buffer.forget_transaction(transaction.id);
2640 }
2641 Some(transaction)
2642 } else {
2643 None
2644 };
2645
2646 transaction
2647 });
2648 if let Some(transaction) = transaction {
2649 project_transaction.0.insert(buffer_to_edit, transaction);
2650 }
2651 }
2652 }
2653 }
2654
2655 Ok(project_transaction)
2656 }
2657
2658 pub fn prepare_rename<T: ToPointUtf16>(
2659 &self,
2660 buffer: ModelHandle<Buffer>,
2661 position: T,
2662 cx: &mut ModelContext<Self>,
2663 ) -> Task<Result<Option<Range<Anchor>>>> {
2664 let position = position.to_point_utf16(buffer.read(cx));
2665 self.request_lsp(buffer, PrepareRename { position }, cx)
2666 }
2667
2668 pub fn perform_rename<T: ToPointUtf16>(
2669 &self,
2670 buffer: ModelHandle<Buffer>,
2671 position: T,
2672 new_name: String,
2673 push_to_history: bool,
2674 cx: &mut ModelContext<Self>,
2675 ) -> Task<Result<ProjectTransaction>> {
2676 let position = position.to_point_utf16(buffer.read(cx));
2677 self.request_lsp(
2678 buffer,
2679 PerformRename {
2680 position,
2681 new_name,
2682 push_to_history,
2683 },
2684 cx,
2685 )
2686 }
2687
2688 pub fn search(
2689 &self,
2690 query: SearchQuery,
2691 cx: &mut ModelContext<Self>,
2692 ) -> Task<Result<HashMap<ModelHandle<Buffer>, Vec<Range<Anchor>>>>> {
2693 if self.is_local() {
2694 let snapshots = self
2695 .visible_worktrees(cx)
2696 .filter_map(|tree| {
2697 let tree = tree.read(cx).as_local()?;
2698 Some(tree.snapshot())
2699 })
2700 .collect::<Vec<_>>();
2701
2702 let background = cx.background().clone();
2703 let path_count: usize = snapshots.iter().map(|s| s.visible_file_count()).sum();
2704 if path_count == 0 {
2705 return Task::ready(Ok(Default::default()));
2706 }
2707 let workers = background.num_cpus().min(path_count);
2708 let (matching_paths_tx, mut matching_paths_rx) = smol::channel::bounded(1024);
2709 cx.background()
2710 .spawn({
2711 let fs = self.fs.clone();
2712 let background = cx.background().clone();
2713 let query = query.clone();
2714 async move {
2715 let fs = &fs;
2716 let query = &query;
2717 let matching_paths_tx = &matching_paths_tx;
2718 let paths_per_worker = (path_count + workers - 1) / workers;
2719 let snapshots = &snapshots;
2720 background
2721 .scoped(|scope| {
2722 for worker_ix in 0..workers {
2723 let worker_start_ix = worker_ix * paths_per_worker;
2724 let worker_end_ix = worker_start_ix + paths_per_worker;
2725 scope.spawn(async move {
2726 let mut snapshot_start_ix = 0;
2727 let mut abs_path = PathBuf::new();
2728 for snapshot in snapshots {
2729 let snapshot_end_ix =
2730 snapshot_start_ix + snapshot.visible_file_count();
2731 if worker_end_ix <= snapshot_start_ix {
2732 break;
2733 } else if worker_start_ix > snapshot_end_ix {
2734 snapshot_start_ix = snapshot_end_ix;
2735 continue;
2736 } else {
2737 let start_in_snapshot = worker_start_ix
2738 .saturating_sub(snapshot_start_ix);
2739 let end_in_snapshot =
2740 cmp::min(worker_end_ix, snapshot_end_ix)
2741 - snapshot_start_ix;
2742
2743 for entry in snapshot
2744 .files(false, start_in_snapshot)
2745 .take(end_in_snapshot - start_in_snapshot)
2746 {
2747 if matching_paths_tx.is_closed() {
2748 break;
2749 }
2750
2751 abs_path.clear();
2752 abs_path.push(&snapshot.abs_path());
2753 abs_path.push(&entry.path);
2754 let matches = if let Some(file) =
2755 fs.open_sync(&abs_path).await.log_err()
2756 {
2757 query.detect(file).unwrap_or(false)
2758 } else {
2759 false
2760 };
2761
2762 if matches {
2763 let project_path =
2764 (snapshot.id(), entry.path.clone());
2765 if matching_paths_tx
2766 .send(project_path)
2767 .await
2768 .is_err()
2769 {
2770 break;
2771 }
2772 }
2773 }
2774
2775 snapshot_start_ix = snapshot_end_ix;
2776 }
2777 }
2778 });
2779 }
2780 })
2781 .await;
2782 }
2783 })
2784 .detach();
2785
2786 let (buffers_tx, buffers_rx) = smol::channel::bounded(1024);
2787 let open_buffers = self
2788 .opened_buffers
2789 .values()
2790 .filter_map(|b| b.upgrade(cx))
2791 .collect::<HashSet<_>>();
2792 cx.spawn(|this, cx| async move {
2793 for buffer in &open_buffers {
2794 let snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
2795 buffers_tx.send((buffer.clone(), snapshot)).await?;
2796 }
2797
2798 let open_buffers = Rc::new(RefCell::new(open_buffers));
2799 while let Some(project_path) = matching_paths_rx.next().await {
2800 if buffers_tx.is_closed() {
2801 break;
2802 }
2803
2804 let this = this.clone();
2805 let open_buffers = open_buffers.clone();
2806 let buffers_tx = buffers_tx.clone();
2807 cx.spawn(|mut cx| async move {
2808 if let Some(buffer) = this
2809 .update(&mut cx, |this, cx| this.open_buffer(project_path, cx))
2810 .await
2811 .log_err()
2812 {
2813 if open_buffers.borrow_mut().insert(buffer.clone()) {
2814 let snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
2815 buffers_tx.send((buffer, snapshot)).await?;
2816 }
2817 }
2818
2819 Ok::<_, anyhow::Error>(())
2820 })
2821 .detach();
2822 }
2823
2824 Ok::<_, anyhow::Error>(())
2825 })
2826 .detach_and_log_err(cx);
2827
2828 let background = cx.background().clone();
2829 cx.background().spawn(async move {
2830 let query = &query;
2831 let mut matched_buffers = Vec::new();
2832 for _ in 0..workers {
2833 matched_buffers.push(HashMap::default());
2834 }
2835 background
2836 .scoped(|scope| {
2837 for worker_matched_buffers in matched_buffers.iter_mut() {
2838 let mut buffers_rx = buffers_rx.clone();
2839 scope.spawn(async move {
2840 while let Some((buffer, snapshot)) = buffers_rx.next().await {
2841 let buffer_matches = query
2842 .search(snapshot.as_rope())
2843 .await
2844 .iter()
2845 .map(|range| {
2846 snapshot.anchor_before(range.start)
2847 ..snapshot.anchor_after(range.end)
2848 })
2849 .collect::<Vec<_>>();
2850 if !buffer_matches.is_empty() {
2851 worker_matched_buffers
2852 .insert(buffer.clone(), buffer_matches);
2853 }
2854 }
2855 });
2856 }
2857 })
2858 .await;
2859 Ok(matched_buffers.into_iter().flatten().collect())
2860 })
2861 } else if let Some(project_id) = self.remote_id() {
2862 let request = self.client.request(query.to_proto(project_id));
2863 cx.spawn(|this, mut cx| async move {
2864 let response = request.await?;
2865 let mut result = HashMap::default();
2866 for location in response.locations {
2867 let buffer = location.buffer.ok_or_else(|| anyhow!("missing buffer"))?;
2868 let target_buffer = this
2869 .update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
2870 .await?;
2871 let start = location
2872 .start
2873 .and_then(deserialize_anchor)
2874 .ok_or_else(|| anyhow!("missing target start"))?;
2875 let end = location
2876 .end
2877 .and_then(deserialize_anchor)
2878 .ok_or_else(|| anyhow!("missing target end"))?;
2879 result
2880 .entry(target_buffer)
2881 .or_insert(Vec::new())
2882 .push(start..end)
2883 }
2884 Ok(result)
2885 })
2886 } else {
2887 Task::ready(Ok(Default::default()))
2888 }
2889 }
2890
2891 fn request_lsp<R: LspCommand>(
2892 &self,
2893 buffer_handle: ModelHandle<Buffer>,
2894 request: R,
2895 cx: &mut ModelContext<Self>,
2896 ) -> Task<Result<R::Response>>
2897 where
2898 <R::LspRequest as lsp::request::Request>::Result: Send,
2899 {
2900 let buffer = buffer_handle.read(cx);
2901 if self.is_local() {
2902 let file = File::from_dyn(buffer.file()).and_then(File::as_local);
2903 if let Some((file, language_server)) =
2904 file.zip(self.language_server_for_buffer(buffer, cx).cloned())
2905 {
2906 let lsp_params = request.to_lsp(&file.abs_path(cx), cx);
2907 return cx.spawn(|this, cx| async move {
2908 if !request.check_capabilities(&language_server.capabilities()) {
2909 return Ok(Default::default());
2910 }
2911
2912 let response = language_server
2913 .request::<R::LspRequest>(lsp_params)
2914 .await
2915 .context("lsp request failed")?;
2916 request
2917 .response_from_lsp(response, this, buffer_handle, cx)
2918 .await
2919 });
2920 }
2921 } else if let Some(project_id) = self.remote_id() {
2922 let rpc = self.client.clone();
2923 let message = request.to_proto(project_id, buffer);
2924 return cx.spawn(|this, cx| async move {
2925 let response = rpc.request(message).await?;
2926 request
2927 .response_from_proto(response, this, buffer_handle, cx)
2928 .await
2929 });
2930 }
2931 Task::ready(Ok(Default::default()))
2932 }
2933
2934 pub fn find_or_create_local_worktree(
2935 &mut self,
2936 abs_path: impl AsRef<Path>,
2937 visible: bool,
2938 cx: &mut ModelContext<Self>,
2939 ) -> Task<Result<(ModelHandle<Worktree>, PathBuf)>> {
2940 let abs_path = abs_path.as_ref();
2941 if let Some((tree, relative_path)) = self.find_local_worktree(abs_path, cx) {
2942 Task::ready(Ok((tree.clone(), relative_path.into())))
2943 } else {
2944 let worktree = self.create_local_worktree(abs_path, visible, cx);
2945 cx.foreground()
2946 .spawn(async move { Ok((worktree.await?, PathBuf::new())) })
2947 }
2948 }
2949
2950 pub fn find_local_worktree(
2951 &self,
2952 abs_path: &Path,
2953 cx: &AppContext,
2954 ) -> Option<(ModelHandle<Worktree>, PathBuf)> {
2955 for tree in self.worktrees(cx) {
2956 if let Some(relative_path) = tree
2957 .read(cx)
2958 .as_local()
2959 .and_then(|t| abs_path.strip_prefix(t.abs_path()).ok())
2960 {
2961 return Some((tree.clone(), relative_path.into()));
2962 }
2963 }
2964 None
2965 }
2966
2967 pub fn is_shared(&self) -> bool {
2968 match &self.client_state {
2969 ProjectClientState::Local { is_shared, .. } => *is_shared,
2970 ProjectClientState::Remote { .. } => false,
2971 }
2972 }
2973
2974 fn create_local_worktree(
2975 &mut self,
2976 abs_path: impl AsRef<Path>,
2977 visible: bool,
2978 cx: &mut ModelContext<Self>,
2979 ) -> Task<Result<ModelHandle<Worktree>>> {
2980 let fs = self.fs.clone();
2981 let client = self.client.clone();
2982 let path: Arc<Path> = abs_path.as_ref().into();
2983 let task = self
2984 .loading_local_worktrees
2985 .entry(path.clone())
2986 .or_insert_with(|| {
2987 cx.spawn(|project, mut cx| {
2988 async move {
2989 let worktree =
2990 Worktree::local(client.clone(), path.clone(), visible, fs, &mut cx)
2991 .await;
2992 project.update(&mut cx, |project, _| {
2993 project.loading_local_worktrees.remove(&path);
2994 });
2995 let worktree = worktree?;
2996
2997 let (remote_project_id, is_shared) =
2998 project.update(&mut cx, |project, cx| {
2999 project.add_worktree(&worktree, cx);
3000 (project.remote_id(), project.is_shared())
3001 });
3002
3003 if let Some(project_id) = remote_project_id {
3004 if is_shared {
3005 worktree
3006 .update(&mut cx, |worktree, cx| {
3007 worktree.as_local_mut().unwrap().share(project_id, cx)
3008 })
3009 .await?;
3010 } else {
3011 worktree
3012 .update(&mut cx, |worktree, cx| {
3013 worktree.as_local_mut().unwrap().register(project_id, cx)
3014 })
3015 .await?;
3016 }
3017 }
3018
3019 Ok(worktree)
3020 }
3021 .map_err(|err| Arc::new(err))
3022 })
3023 .shared()
3024 })
3025 .clone();
3026 cx.foreground().spawn(async move {
3027 match task.await {
3028 Ok(worktree) => Ok(worktree),
3029 Err(err) => Err(anyhow!("{}", err)),
3030 }
3031 })
3032 }
3033
3034 pub fn remove_worktree(&mut self, id: WorktreeId, cx: &mut ModelContext<Self>) {
3035 self.worktrees.retain(|worktree| {
3036 worktree
3037 .upgrade(cx)
3038 .map_or(false, |w| w.read(cx).id() != id)
3039 });
3040 cx.notify();
3041 }
3042
3043 fn add_worktree(&mut self, worktree: &ModelHandle<Worktree>, cx: &mut ModelContext<Self>) {
3044 cx.observe(&worktree, |_, _, cx| cx.notify()).detach();
3045 if worktree.read(cx).is_local() {
3046 cx.subscribe(&worktree, |this, worktree, _, cx| {
3047 this.update_local_worktree_buffers(worktree, cx);
3048 })
3049 .detach();
3050 }
3051
3052 let push_strong_handle = {
3053 let worktree = worktree.read(cx);
3054 self.is_shared() || worktree.is_visible() || worktree.is_remote()
3055 };
3056 if push_strong_handle {
3057 self.worktrees
3058 .push(WorktreeHandle::Strong(worktree.clone()));
3059 } else {
3060 cx.observe_release(&worktree, |this, _, cx| {
3061 this.worktrees
3062 .retain(|worktree| worktree.upgrade(cx).is_some());
3063 cx.notify();
3064 })
3065 .detach();
3066 self.worktrees
3067 .push(WorktreeHandle::Weak(worktree.downgrade()));
3068 }
3069 cx.notify();
3070 }
3071
3072 fn update_local_worktree_buffers(
3073 &mut self,
3074 worktree_handle: ModelHandle<Worktree>,
3075 cx: &mut ModelContext<Self>,
3076 ) {
3077 let snapshot = worktree_handle.read(cx).snapshot();
3078 let mut buffers_to_delete = Vec::new();
3079 for (buffer_id, buffer) in &self.opened_buffers {
3080 if let Some(buffer) = buffer.upgrade(cx) {
3081 buffer.update(cx, |buffer, cx| {
3082 if let Some(old_file) = File::from_dyn(buffer.file()) {
3083 if old_file.worktree != worktree_handle {
3084 return;
3085 }
3086
3087 let new_file = if let Some(entry) = old_file
3088 .entry_id
3089 .and_then(|entry_id| snapshot.entry_for_id(entry_id))
3090 {
3091 File {
3092 is_local: true,
3093 entry_id: Some(entry.id),
3094 mtime: entry.mtime,
3095 path: entry.path.clone(),
3096 worktree: worktree_handle.clone(),
3097 }
3098 } else if let Some(entry) =
3099 snapshot.entry_for_path(old_file.path().as_ref())
3100 {
3101 File {
3102 is_local: true,
3103 entry_id: Some(entry.id),
3104 mtime: entry.mtime,
3105 path: entry.path.clone(),
3106 worktree: worktree_handle.clone(),
3107 }
3108 } else {
3109 File {
3110 is_local: true,
3111 entry_id: None,
3112 path: old_file.path().clone(),
3113 mtime: old_file.mtime(),
3114 worktree: worktree_handle.clone(),
3115 }
3116 };
3117
3118 if let Some(project_id) = self.remote_id() {
3119 self.client
3120 .send(proto::UpdateBufferFile {
3121 project_id,
3122 buffer_id: *buffer_id as u64,
3123 file: Some(new_file.to_proto()),
3124 })
3125 .log_err();
3126 }
3127 buffer.file_updated(Box::new(new_file), cx).detach();
3128 }
3129 });
3130 } else {
3131 buffers_to_delete.push(*buffer_id);
3132 }
3133 }
3134
3135 for buffer_id in buffers_to_delete {
3136 self.opened_buffers.remove(&buffer_id);
3137 }
3138 }
3139
3140 pub fn set_active_path(&mut self, entry: Option<ProjectPath>, cx: &mut ModelContext<Self>) {
3141 let new_active_entry = entry.and_then(|project_path| {
3142 let worktree = self.worktree_for_id(project_path.worktree_id, cx)?;
3143 let entry = worktree.read(cx).entry_for_path(project_path.path)?;
3144 Some(ProjectEntry {
3145 worktree_id: project_path.worktree_id,
3146 entry_id: entry.id,
3147 })
3148 });
3149 if new_active_entry != self.active_entry {
3150 self.active_entry = new_active_entry;
3151 cx.emit(Event::ActiveEntryChanged(new_active_entry));
3152 }
3153 }
3154
3155 pub fn is_running_disk_based_diagnostics(&self) -> bool {
3156 self.language_servers_with_diagnostics_running > 0
3157 }
3158
3159 pub fn diagnostic_summary(&self, cx: &AppContext) -> DiagnosticSummary {
3160 let mut summary = DiagnosticSummary::default();
3161 for (_, path_summary) in self.diagnostic_summaries(cx) {
3162 summary.error_count += path_summary.error_count;
3163 summary.warning_count += path_summary.warning_count;
3164 summary.info_count += path_summary.info_count;
3165 summary.hint_count += path_summary.hint_count;
3166 }
3167 summary
3168 }
3169
3170 pub fn diagnostic_summaries<'a>(
3171 &'a self,
3172 cx: &'a AppContext,
3173 ) -> impl Iterator<Item = (ProjectPath, DiagnosticSummary)> + 'a {
3174 self.worktrees(cx).flat_map(move |worktree| {
3175 let worktree = worktree.read(cx);
3176 let worktree_id = worktree.id();
3177 worktree
3178 .diagnostic_summaries()
3179 .map(move |(path, summary)| (ProjectPath { worktree_id, path }, summary))
3180 })
3181 }
3182
3183 pub fn disk_based_diagnostics_started(&mut self, cx: &mut ModelContext<Self>) {
3184 self.language_servers_with_diagnostics_running += 1;
3185 if self.language_servers_with_diagnostics_running == 1 {
3186 cx.emit(Event::DiskBasedDiagnosticsStarted);
3187 }
3188 }
3189
3190 pub fn disk_based_diagnostics_finished(&mut self, cx: &mut ModelContext<Self>) {
3191 cx.emit(Event::DiskBasedDiagnosticsUpdated);
3192 self.language_servers_with_diagnostics_running -= 1;
3193 if self.language_servers_with_diagnostics_running == 0 {
3194 cx.emit(Event::DiskBasedDiagnosticsFinished);
3195 }
3196 }
3197
3198 pub fn active_entry(&self) -> Option<ProjectEntry> {
3199 self.active_entry
3200 }
3201
3202 // RPC message handlers
3203
3204 async fn handle_unshare_project(
3205 this: ModelHandle<Self>,
3206 _: TypedEnvelope<proto::UnshareProject>,
3207 _: Arc<Client>,
3208 mut cx: AsyncAppContext,
3209 ) -> Result<()> {
3210 this.update(&mut cx, |this, cx| this.project_unshared(cx));
3211 Ok(())
3212 }
3213
3214 async fn handle_add_collaborator(
3215 this: ModelHandle<Self>,
3216 mut envelope: TypedEnvelope<proto::AddProjectCollaborator>,
3217 _: Arc<Client>,
3218 mut cx: AsyncAppContext,
3219 ) -> Result<()> {
3220 let user_store = this.read_with(&cx, |this, _| this.user_store.clone());
3221 let collaborator = envelope
3222 .payload
3223 .collaborator
3224 .take()
3225 .ok_or_else(|| anyhow!("empty collaborator"))?;
3226
3227 let collaborator = Collaborator::from_proto(collaborator, &user_store, &mut cx).await?;
3228 this.update(&mut cx, |this, cx| {
3229 this.collaborators
3230 .insert(collaborator.peer_id, collaborator);
3231 cx.notify();
3232 });
3233
3234 Ok(())
3235 }
3236
3237 async fn handle_remove_collaborator(
3238 this: ModelHandle<Self>,
3239 envelope: TypedEnvelope<proto::RemoveProjectCollaborator>,
3240 _: Arc<Client>,
3241 mut cx: AsyncAppContext,
3242 ) -> Result<()> {
3243 this.update(&mut cx, |this, cx| {
3244 let peer_id = PeerId(envelope.payload.peer_id);
3245 let replica_id = this
3246 .collaborators
3247 .remove(&peer_id)
3248 .ok_or_else(|| anyhow!("unknown peer {:?}", peer_id))?
3249 .replica_id;
3250 for (_, buffer) in &this.opened_buffers {
3251 if let Some(buffer) = buffer.upgrade(cx) {
3252 buffer.update(cx, |buffer, cx| buffer.remove_peer(replica_id, cx));
3253 }
3254 }
3255 cx.notify();
3256 Ok(())
3257 })
3258 }
3259
3260 async fn handle_register_worktree(
3261 this: ModelHandle<Self>,
3262 envelope: TypedEnvelope<proto::RegisterWorktree>,
3263 client: Arc<Client>,
3264 mut cx: AsyncAppContext,
3265 ) -> Result<()> {
3266 this.update(&mut cx, |this, cx| {
3267 let remote_id = this.remote_id().ok_or_else(|| anyhow!("invalid project"))?;
3268 let replica_id = this.replica_id();
3269 let worktree = proto::Worktree {
3270 id: envelope.payload.worktree_id,
3271 root_name: envelope.payload.root_name,
3272 entries: Default::default(),
3273 diagnostic_summaries: Default::default(),
3274 visible: envelope.payload.visible,
3275 };
3276 let (worktree, load_task) =
3277 Worktree::remote(remote_id, replica_id, worktree, client, cx);
3278 this.add_worktree(&worktree, cx);
3279 load_task.detach();
3280 Ok(())
3281 })
3282 }
3283
3284 async fn handle_unregister_worktree(
3285 this: ModelHandle<Self>,
3286 envelope: TypedEnvelope<proto::UnregisterWorktree>,
3287 _: Arc<Client>,
3288 mut cx: AsyncAppContext,
3289 ) -> Result<()> {
3290 this.update(&mut cx, |this, cx| {
3291 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
3292 this.remove_worktree(worktree_id, cx);
3293 Ok(())
3294 })
3295 }
3296
3297 async fn handle_update_worktree(
3298 this: ModelHandle<Self>,
3299 envelope: TypedEnvelope<proto::UpdateWorktree>,
3300 _: Arc<Client>,
3301 mut cx: AsyncAppContext,
3302 ) -> Result<()> {
3303 this.update(&mut cx, |this, cx| {
3304 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
3305 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
3306 worktree.update(cx, |worktree, _| {
3307 let worktree = worktree.as_remote_mut().unwrap();
3308 worktree.update_from_remote(envelope)
3309 })?;
3310 }
3311 Ok(())
3312 })
3313 }
3314
3315 async fn handle_update_diagnostic_summary(
3316 this: ModelHandle<Self>,
3317 envelope: TypedEnvelope<proto::UpdateDiagnosticSummary>,
3318 _: Arc<Client>,
3319 mut cx: AsyncAppContext,
3320 ) -> Result<()> {
3321 this.update(&mut cx, |this, cx| {
3322 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
3323 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
3324 if let Some(summary) = envelope.payload.summary {
3325 let project_path = ProjectPath {
3326 worktree_id,
3327 path: Path::new(&summary.path).into(),
3328 };
3329 worktree.update(cx, |worktree, _| {
3330 worktree
3331 .as_remote_mut()
3332 .unwrap()
3333 .update_diagnostic_summary(project_path.path.clone(), &summary);
3334 });
3335 cx.emit(Event::DiagnosticsUpdated(project_path));
3336 }
3337 }
3338 Ok(())
3339 })
3340 }
3341
3342 async fn handle_start_language_server(
3343 this: ModelHandle<Self>,
3344 envelope: TypedEnvelope<proto::StartLanguageServer>,
3345 _: Arc<Client>,
3346 mut cx: AsyncAppContext,
3347 ) -> Result<()> {
3348 let server = envelope
3349 .payload
3350 .server
3351 .ok_or_else(|| anyhow!("invalid server"))?;
3352 this.update(&mut cx, |this, cx| {
3353 this.language_server_statuses.insert(
3354 server.id as usize,
3355 LanguageServerStatus {
3356 name: server.name,
3357 pending_work: Default::default(),
3358 pending_diagnostic_updates: 0,
3359 },
3360 );
3361 cx.notify();
3362 });
3363 Ok(())
3364 }
3365
3366 async fn handle_update_language_server(
3367 this: ModelHandle<Self>,
3368 envelope: TypedEnvelope<proto::UpdateLanguageServer>,
3369 _: Arc<Client>,
3370 mut cx: AsyncAppContext,
3371 ) -> Result<()> {
3372 let language_server_id = envelope.payload.language_server_id as usize;
3373 match envelope
3374 .payload
3375 .variant
3376 .ok_or_else(|| anyhow!("invalid variant"))?
3377 {
3378 proto::update_language_server::Variant::WorkStart(payload) => {
3379 this.update(&mut cx, |this, cx| {
3380 this.on_lsp_work_start(language_server_id, payload.token, cx);
3381 })
3382 }
3383 proto::update_language_server::Variant::WorkProgress(payload) => {
3384 this.update(&mut cx, |this, cx| {
3385 this.on_lsp_work_progress(
3386 language_server_id,
3387 payload.token,
3388 LanguageServerProgress {
3389 message: payload.message,
3390 percentage: payload.percentage.map(|p| p as usize),
3391 last_update_at: Instant::now(),
3392 },
3393 cx,
3394 );
3395 })
3396 }
3397 proto::update_language_server::Variant::WorkEnd(payload) => {
3398 this.update(&mut cx, |this, cx| {
3399 this.on_lsp_work_end(language_server_id, payload.token, cx);
3400 })
3401 }
3402 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(_) => {
3403 this.update(&mut cx, |this, cx| {
3404 this.disk_based_diagnostics_started(cx);
3405 })
3406 }
3407 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(_) => {
3408 this.update(&mut cx, |this, cx| this.disk_based_diagnostics_finished(cx));
3409 }
3410 }
3411
3412 Ok(())
3413 }
3414
3415 async fn handle_update_buffer(
3416 this: ModelHandle<Self>,
3417 envelope: TypedEnvelope<proto::UpdateBuffer>,
3418 _: Arc<Client>,
3419 mut cx: AsyncAppContext,
3420 ) -> Result<()> {
3421 this.update(&mut cx, |this, cx| {
3422 let payload = envelope.payload.clone();
3423 let buffer_id = payload.buffer_id;
3424 let ops = payload
3425 .operations
3426 .into_iter()
3427 .map(|op| language::proto::deserialize_operation(op))
3428 .collect::<Result<Vec<_>, _>>()?;
3429 match this.opened_buffers.entry(buffer_id) {
3430 hash_map::Entry::Occupied(mut e) => match e.get_mut() {
3431 OpenBuffer::Strong(buffer) => {
3432 buffer.update(cx, |buffer, cx| buffer.apply_ops(ops, cx))?;
3433 }
3434 OpenBuffer::Loading(operations) => operations.extend_from_slice(&ops),
3435 OpenBuffer::Weak(_) => {}
3436 },
3437 hash_map::Entry::Vacant(e) => {
3438 e.insert(OpenBuffer::Loading(ops));
3439 }
3440 }
3441 Ok(())
3442 })
3443 }
3444
3445 async fn handle_update_buffer_file(
3446 this: ModelHandle<Self>,
3447 envelope: TypedEnvelope<proto::UpdateBufferFile>,
3448 _: Arc<Client>,
3449 mut cx: AsyncAppContext,
3450 ) -> Result<()> {
3451 this.update(&mut cx, |this, cx| {
3452 let payload = envelope.payload.clone();
3453 let buffer_id = payload.buffer_id;
3454 let file = payload.file.ok_or_else(|| anyhow!("invalid file"))?;
3455 let worktree = this
3456 .worktree_for_id(WorktreeId::from_proto(file.worktree_id), cx)
3457 .ok_or_else(|| anyhow!("no such worktree"))?;
3458 let file = File::from_proto(file, worktree.clone(), cx)?;
3459 let buffer = this
3460 .opened_buffers
3461 .get_mut(&buffer_id)
3462 .and_then(|b| b.upgrade(cx))
3463 .ok_or_else(|| anyhow!("no such buffer"))?;
3464 buffer.update(cx, |buffer, cx| {
3465 buffer.file_updated(Box::new(file), cx).detach();
3466 });
3467 Ok(())
3468 })
3469 }
3470
3471 async fn handle_save_buffer(
3472 this: ModelHandle<Self>,
3473 envelope: TypedEnvelope<proto::SaveBuffer>,
3474 _: Arc<Client>,
3475 mut cx: AsyncAppContext,
3476 ) -> Result<proto::BufferSaved> {
3477 let buffer_id = envelope.payload.buffer_id;
3478 let requested_version = deserialize_version(envelope.payload.version);
3479
3480 let (project_id, buffer) = this.update(&mut cx, |this, cx| {
3481 let project_id = this.remote_id().ok_or_else(|| anyhow!("not connected"))?;
3482 let buffer = this
3483 .opened_buffers
3484 .get(&buffer_id)
3485 .map(|buffer| buffer.upgrade(cx).unwrap())
3486 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?;
3487 Ok::<_, anyhow::Error>((project_id, buffer))
3488 })?;
3489 buffer
3490 .update(&mut cx, |buffer, _| {
3491 buffer.wait_for_version(requested_version)
3492 })
3493 .await;
3494
3495 let (saved_version, mtime) = buffer.update(&mut cx, |buffer, cx| buffer.save(cx)).await?;
3496 Ok(proto::BufferSaved {
3497 project_id,
3498 buffer_id,
3499 version: serialize_version(&saved_version),
3500 mtime: Some(mtime.into()),
3501 })
3502 }
3503
3504 async fn handle_format_buffers(
3505 this: ModelHandle<Self>,
3506 envelope: TypedEnvelope<proto::FormatBuffers>,
3507 _: Arc<Client>,
3508 mut cx: AsyncAppContext,
3509 ) -> Result<proto::FormatBuffersResponse> {
3510 let sender_id = envelope.original_sender_id()?;
3511 let format = this.update(&mut cx, |this, cx| {
3512 let mut buffers = HashSet::default();
3513 for buffer_id in &envelope.payload.buffer_ids {
3514 buffers.insert(
3515 this.opened_buffers
3516 .get(buffer_id)
3517 .map(|buffer| buffer.upgrade(cx).unwrap())
3518 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?,
3519 );
3520 }
3521 Ok::<_, anyhow::Error>(this.format(buffers, false, cx))
3522 })?;
3523
3524 let project_transaction = format.await?;
3525 let project_transaction = this.update(&mut cx, |this, cx| {
3526 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
3527 });
3528 Ok(proto::FormatBuffersResponse {
3529 transaction: Some(project_transaction),
3530 })
3531 }
3532
3533 async fn handle_get_completions(
3534 this: ModelHandle<Self>,
3535 envelope: TypedEnvelope<proto::GetCompletions>,
3536 _: Arc<Client>,
3537 mut cx: AsyncAppContext,
3538 ) -> Result<proto::GetCompletionsResponse> {
3539 let position = envelope
3540 .payload
3541 .position
3542 .and_then(language::proto::deserialize_anchor)
3543 .ok_or_else(|| anyhow!("invalid position"))?;
3544 let version = deserialize_version(envelope.payload.version);
3545 let buffer = this.read_with(&cx, |this, cx| {
3546 this.opened_buffers
3547 .get(&envelope.payload.buffer_id)
3548 .map(|buffer| buffer.upgrade(cx).unwrap())
3549 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
3550 })?;
3551 buffer
3552 .update(&mut cx, |buffer, _| buffer.wait_for_version(version))
3553 .await;
3554 let version = buffer.read_with(&cx, |buffer, _| buffer.version());
3555 let completions = this
3556 .update(&mut cx, |this, cx| this.completions(&buffer, position, cx))
3557 .await?;
3558
3559 Ok(proto::GetCompletionsResponse {
3560 completions: completions
3561 .iter()
3562 .map(language::proto::serialize_completion)
3563 .collect(),
3564 version: serialize_version(&version),
3565 })
3566 }
3567
3568 async fn handle_apply_additional_edits_for_completion(
3569 this: ModelHandle<Self>,
3570 envelope: TypedEnvelope<proto::ApplyCompletionAdditionalEdits>,
3571 _: Arc<Client>,
3572 mut cx: AsyncAppContext,
3573 ) -> Result<proto::ApplyCompletionAdditionalEditsResponse> {
3574 let apply_additional_edits = this.update(&mut cx, |this, cx| {
3575 let buffer = this
3576 .opened_buffers
3577 .get(&envelope.payload.buffer_id)
3578 .map(|buffer| buffer.upgrade(cx).unwrap())
3579 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
3580 let language = buffer.read(cx).language();
3581 let completion = language::proto::deserialize_completion(
3582 envelope
3583 .payload
3584 .completion
3585 .ok_or_else(|| anyhow!("invalid completion"))?,
3586 language,
3587 )?;
3588 Ok::<_, anyhow::Error>(
3589 this.apply_additional_edits_for_completion(buffer, completion, false, cx),
3590 )
3591 })?;
3592
3593 Ok(proto::ApplyCompletionAdditionalEditsResponse {
3594 transaction: apply_additional_edits
3595 .await?
3596 .as_ref()
3597 .map(language::proto::serialize_transaction),
3598 })
3599 }
3600
3601 async fn handle_get_code_actions(
3602 this: ModelHandle<Self>,
3603 envelope: TypedEnvelope<proto::GetCodeActions>,
3604 _: Arc<Client>,
3605 mut cx: AsyncAppContext,
3606 ) -> Result<proto::GetCodeActionsResponse> {
3607 let start = envelope
3608 .payload
3609 .start
3610 .and_then(language::proto::deserialize_anchor)
3611 .ok_or_else(|| anyhow!("invalid start"))?;
3612 let end = envelope
3613 .payload
3614 .end
3615 .and_then(language::proto::deserialize_anchor)
3616 .ok_or_else(|| anyhow!("invalid end"))?;
3617 let buffer = this.update(&mut cx, |this, cx| {
3618 this.opened_buffers
3619 .get(&envelope.payload.buffer_id)
3620 .map(|buffer| buffer.upgrade(cx).unwrap())
3621 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
3622 })?;
3623 buffer
3624 .update(&mut cx, |buffer, _| {
3625 buffer.wait_for_version(deserialize_version(envelope.payload.version))
3626 })
3627 .await;
3628
3629 let version = buffer.read_with(&cx, |buffer, _| buffer.version());
3630 let code_actions = this.update(&mut cx, |this, cx| {
3631 Ok::<_, anyhow::Error>(this.code_actions(&buffer, start..end, cx))
3632 })?;
3633
3634 Ok(proto::GetCodeActionsResponse {
3635 actions: code_actions
3636 .await?
3637 .iter()
3638 .map(language::proto::serialize_code_action)
3639 .collect(),
3640 version: serialize_version(&version),
3641 })
3642 }
3643
3644 async fn handle_apply_code_action(
3645 this: ModelHandle<Self>,
3646 envelope: TypedEnvelope<proto::ApplyCodeAction>,
3647 _: Arc<Client>,
3648 mut cx: AsyncAppContext,
3649 ) -> Result<proto::ApplyCodeActionResponse> {
3650 let sender_id = envelope.original_sender_id()?;
3651 let action = language::proto::deserialize_code_action(
3652 envelope
3653 .payload
3654 .action
3655 .ok_or_else(|| anyhow!("invalid action"))?,
3656 )?;
3657 let apply_code_action = this.update(&mut cx, |this, cx| {
3658 let buffer = this
3659 .opened_buffers
3660 .get(&envelope.payload.buffer_id)
3661 .map(|buffer| buffer.upgrade(cx).unwrap())
3662 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
3663 Ok::<_, anyhow::Error>(this.apply_code_action(buffer, action, false, cx))
3664 })?;
3665
3666 let project_transaction = apply_code_action.await?;
3667 let project_transaction = this.update(&mut cx, |this, cx| {
3668 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
3669 });
3670 Ok(proto::ApplyCodeActionResponse {
3671 transaction: Some(project_transaction),
3672 })
3673 }
3674
3675 async fn handle_lsp_command<T: LspCommand>(
3676 this: ModelHandle<Self>,
3677 envelope: TypedEnvelope<T::ProtoRequest>,
3678 _: Arc<Client>,
3679 mut cx: AsyncAppContext,
3680 ) -> Result<<T::ProtoRequest as proto::RequestMessage>::Response>
3681 where
3682 <T::LspRequest as lsp::request::Request>::Result: Send,
3683 {
3684 let sender_id = envelope.original_sender_id()?;
3685 let buffer_id = T::buffer_id_from_proto(&envelope.payload);
3686 let buffer_handle = this.read_with(&cx, |this, _| {
3687 this.opened_buffers
3688 .get(&buffer_id)
3689 .map(|buffer| buffer.upgrade(&cx).unwrap())
3690 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))
3691 })?;
3692 let request = T::from_proto(
3693 envelope.payload,
3694 this.clone(),
3695 buffer_handle.clone(),
3696 cx.clone(),
3697 )
3698 .await?;
3699 let buffer_version = buffer_handle.read_with(&cx, |buffer, _| buffer.version());
3700 let response = this
3701 .update(&mut cx, |this, cx| {
3702 this.request_lsp(buffer_handle, request, cx)
3703 })
3704 .await?;
3705 this.update(&mut cx, |this, cx| {
3706 Ok(T::response_to_proto(
3707 response,
3708 this,
3709 sender_id,
3710 &buffer_version,
3711 cx,
3712 ))
3713 })
3714 }
3715
3716 async fn handle_get_project_symbols(
3717 this: ModelHandle<Self>,
3718 envelope: TypedEnvelope<proto::GetProjectSymbols>,
3719 _: Arc<Client>,
3720 mut cx: AsyncAppContext,
3721 ) -> Result<proto::GetProjectSymbolsResponse> {
3722 let symbols = this
3723 .update(&mut cx, |this, cx| {
3724 this.symbols(&envelope.payload.query, cx)
3725 })
3726 .await?;
3727
3728 Ok(proto::GetProjectSymbolsResponse {
3729 symbols: symbols.iter().map(serialize_symbol).collect(),
3730 })
3731 }
3732
3733 async fn handle_search_project(
3734 this: ModelHandle<Self>,
3735 envelope: TypedEnvelope<proto::SearchProject>,
3736 _: Arc<Client>,
3737 mut cx: AsyncAppContext,
3738 ) -> Result<proto::SearchProjectResponse> {
3739 let peer_id = envelope.original_sender_id()?;
3740 let query = SearchQuery::from_proto(envelope.payload)?;
3741 let result = this
3742 .update(&mut cx, |this, cx| this.search(query, cx))
3743 .await?;
3744
3745 this.update(&mut cx, |this, cx| {
3746 let mut locations = Vec::new();
3747 for (buffer, ranges) in result {
3748 for range in ranges {
3749 let start = serialize_anchor(&range.start);
3750 let end = serialize_anchor(&range.end);
3751 let buffer = this.serialize_buffer_for_peer(&buffer, peer_id, cx);
3752 locations.push(proto::Location {
3753 buffer: Some(buffer),
3754 start: Some(start),
3755 end: Some(end),
3756 });
3757 }
3758 }
3759 Ok(proto::SearchProjectResponse { locations })
3760 })
3761 }
3762
3763 async fn handle_open_buffer_for_symbol(
3764 this: ModelHandle<Self>,
3765 envelope: TypedEnvelope<proto::OpenBufferForSymbol>,
3766 _: Arc<Client>,
3767 mut cx: AsyncAppContext,
3768 ) -> Result<proto::OpenBufferForSymbolResponse> {
3769 let peer_id = envelope.original_sender_id()?;
3770 let symbol = envelope
3771 .payload
3772 .symbol
3773 .ok_or_else(|| anyhow!("invalid symbol"))?;
3774 let symbol = this.read_with(&cx, |this, _| {
3775 let symbol = this.deserialize_symbol(symbol)?;
3776 let signature = this.symbol_signature(symbol.worktree_id, &symbol.path);
3777 if signature == symbol.signature {
3778 Ok(symbol)
3779 } else {
3780 Err(anyhow!("invalid symbol signature"))
3781 }
3782 })?;
3783 let buffer = this
3784 .update(&mut cx, |this, cx| this.open_buffer_for_symbol(&symbol, cx))
3785 .await?;
3786
3787 Ok(proto::OpenBufferForSymbolResponse {
3788 buffer: Some(this.update(&mut cx, |this, cx| {
3789 this.serialize_buffer_for_peer(&buffer, peer_id, cx)
3790 })),
3791 })
3792 }
3793
3794 fn symbol_signature(&self, worktree_id: WorktreeId, path: &Path) -> [u8; 32] {
3795 let mut hasher = Sha256::new();
3796 hasher.update(worktree_id.to_proto().to_be_bytes());
3797 hasher.update(path.to_string_lossy().as_bytes());
3798 hasher.update(self.nonce.to_be_bytes());
3799 hasher.finalize().as_slice().try_into().unwrap()
3800 }
3801
3802 async fn handle_open_buffer(
3803 this: ModelHandle<Self>,
3804 envelope: TypedEnvelope<proto::OpenBuffer>,
3805 _: Arc<Client>,
3806 mut cx: AsyncAppContext,
3807 ) -> Result<proto::OpenBufferResponse> {
3808 let peer_id = envelope.original_sender_id()?;
3809 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
3810 let open_buffer = this.update(&mut cx, |this, cx| {
3811 this.open_buffer(
3812 ProjectPath {
3813 worktree_id,
3814 path: PathBuf::from(envelope.payload.path).into(),
3815 },
3816 cx,
3817 )
3818 });
3819
3820 let buffer = open_buffer.await?;
3821 this.update(&mut cx, |this, cx| {
3822 Ok(proto::OpenBufferResponse {
3823 buffer: Some(this.serialize_buffer_for_peer(&buffer, peer_id, cx)),
3824 })
3825 })
3826 }
3827
3828 fn serialize_project_transaction_for_peer(
3829 &mut self,
3830 project_transaction: ProjectTransaction,
3831 peer_id: PeerId,
3832 cx: &AppContext,
3833 ) -> proto::ProjectTransaction {
3834 let mut serialized_transaction = proto::ProjectTransaction {
3835 buffers: Default::default(),
3836 transactions: Default::default(),
3837 };
3838 for (buffer, transaction) in project_transaction.0 {
3839 serialized_transaction
3840 .buffers
3841 .push(self.serialize_buffer_for_peer(&buffer, peer_id, cx));
3842 serialized_transaction
3843 .transactions
3844 .push(language::proto::serialize_transaction(&transaction));
3845 }
3846 serialized_transaction
3847 }
3848
3849 fn deserialize_project_transaction(
3850 &mut self,
3851 message: proto::ProjectTransaction,
3852 push_to_history: bool,
3853 cx: &mut ModelContext<Self>,
3854 ) -> Task<Result<ProjectTransaction>> {
3855 cx.spawn(|this, mut cx| async move {
3856 let mut project_transaction = ProjectTransaction::default();
3857 for (buffer, transaction) in message.buffers.into_iter().zip(message.transactions) {
3858 let buffer = this
3859 .update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
3860 .await?;
3861 let transaction = language::proto::deserialize_transaction(transaction)?;
3862 project_transaction.0.insert(buffer, transaction);
3863 }
3864
3865 for (buffer, transaction) in &project_transaction.0 {
3866 buffer
3867 .update(&mut cx, |buffer, _| {
3868 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
3869 })
3870 .await;
3871
3872 if push_to_history {
3873 buffer.update(&mut cx, |buffer, _| {
3874 buffer.push_transaction(transaction.clone(), Instant::now());
3875 });
3876 }
3877 }
3878
3879 Ok(project_transaction)
3880 })
3881 }
3882
3883 fn serialize_buffer_for_peer(
3884 &mut self,
3885 buffer: &ModelHandle<Buffer>,
3886 peer_id: PeerId,
3887 cx: &AppContext,
3888 ) -> proto::Buffer {
3889 let buffer_id = buffer.read(cx).remote_id();
3890 let shared_buffers = self.shared_buffers.entry(peer_id).or_default();
3891 if shared_buffers.insert(buffer_id) {
3892 proto::Buffer {
3893 variant: Some(proto::buffer::Variant::State(buffer.read(cx).to_proto())),
3894 }
3895 } else {
3896 proto::Buffer {
3897 variant: Some(proto::buffer::Variant::Id(buffer_id)),
3898 }
3899 }
3900 }
3901
3902 fn deserialize_buffer(
3903 &mut self,
3904 buffer: proto::Buffer,
3905 cx: &mut ModelContext<Self>,
3906 ) -> Task<Result<ModelHandle<Buffer>>> {
3907 let replica_id = self.replica_id();
3908
3909 let opened_buffer_tx = self.opened_buffer.0.clone();
3910 let mut opened_buffer_rx = self.opened_buffer.1.clone();
3911 cx.spawn(|this, mut cx| async move {
3912 match buffer.variant.ok_or_else(|| anyhow!("missing buffer"))? {
3913 proto::buffer::Variant::Id(id) => {
3914 let buffer = loop {
3915 let buffer = this.read_with(&cx, |this, cx| {
3916 this.opened_buffers
3917 .get(&id)
3918 .and_then(|buffer| buffer.upgrade(cx))
3919 });
3920 if let Some(buffer) = buffer {
3921 break buffer;
3922 }
3923 opened_buffer_rx
3924 .next()
3925 .await
3926 .ok_or_else(|| anyhow!("project dropped while waiting for buffer"))?;
3927 };
3928 Ok(buffer)
3929 }
3930 proto::buffer::Variant::State(mut buffer) => {
3931 let mut buffer_worktree = None;
3932 let mut buffer_file = None;
3933 if let Some(file) = buffer.file.take() {
3934 this.read_with(&cx, |this, cx| {
3935 let worktree_id = WorktreeId::from_proto(file.worktree_id);
3936 let worktree =
3937 this.worktree_for_id(worktree_id, cx).ok_or_else(|| {
3938 anyhow!("no worktree found for id {}", file.worktree_id)
3939 })?;
3940 buffer_file =
3941 Some(Box::new(File::from_proto(file, worktree.clone(), cx)?)
3942 as Box<dyn language::File>);
3943 buffer_worktree = Some(worktree);
3944 Ok::<_, anyhow::Error>(())
3945 })?;
3946 }
3947
3948 let buffer = cx.add_model(|cx| {
3949 Buffer::from_proto(replica_id, buffer, buffer_file, cx).unwrap()
3950 });
3951
3952 this.update(&mut cx, |this, cx| this.register_buffer(&buffer, cx))?;
3953
3954 *opened_buffer_tx.borrow_mut().borrow_mut() = ();
3955 Ok(buffer)
3956 }
3957 }
3958 })
3959 }
3960
3961 fn deserialize_symbol(&self, serialized_symbol: proto::Symbol) -> Result<Symbol> {
3962 let language = self
3963 .languages
3964 .get_language(&serialized_symbol.language_name);
3965 let start = serialized_symbol
3966 .start
3967 .ok_or_else(|| anyhow!("invalid start"))?;
3968 let end = serialized_symbol
3969 .end
3970 .ok_or_else(|| anyhow!("invalid end"))?;
3971 let kind = unsafe { mem::transmute(serialized_symbol.kind) };
3972 Ok(Symbol {
3973 source_worktree_id: WorktreeId::from_proto(serialized_symbol.source_worktree_id),
3974 worktree_id: WorktreeId::from_proto(serialized_symbol.worktree_id),
3975 language_name: serialized_symbol.language_name.clone(),
3976 label: language
3977 .and_then(|language| language.label_for_symbol(&serialized_symbol.name, kind))
3978 .unwrap_or_else(|| CodeLabel::plain(serialized_symbol.name.clone(), None)),
3979 name: serialized_symbol.name,
3980 path: PathBuf::from(serialized_symbol.path),
3981 range: PointUtf16::new(start.row, start.column)..PointUtf16::new(end.row, end.column),
3982 kind,
3983 signature: serialized_symbol
3984 .signature
3985 .try_into()
3986 .map_err(|_| anyhow!("invalid signature"))?,
3987 })
3988 }
3989
3990 async fn handle_buffer_saved(
3991 this: ModelHandle<Self>,
3992 envelope: TypedEnvelope<proto::BufferSaved>,
3993 _: Arc<Client>,
3994 mut cx: AsyncAppContext,
3995 ) -> Result<()> {
3996 let version = deserialize_version(envelope.payload.version);
3997 let mtime = envelope
3998 .payload
3999 .mtime
4000 .ok_or_else(|| anyhow!("missing mtime"))?
4001 .into();
4002
4003 this.update(&mut cx, |this, cx| {
4004 let buffer = this
4005 .opened_buffers
4006 .get(&envelope.payload.buffer_id)
4007 .and_then(|buffer| buffer.upgrade(cx));
4008 if let Some(buffer) = buffer {
4009 buffer.update(cx, |buffer, cx| {
4010 buffer.did_save(version, mtime, None, cx);
4011 });
4012 }
4013 Ok(())
4014 })
4015 }
4016
4017 async fn handle_buffer_reloaded(
4018 this: ModelHandle<Self>,
4019 envelope: TypedEnvelope<proto::BufferReloaded>,
4020 _: Arc<Client>,
4021 mut cx: AsyncAppContext,
4022 ) -> Result<()> {
4023 let payload = envelope.payload.clone();
4024 let version = deserialize_version(payload.version);
4025 let mtime = payload
4026 .mtime
4027 .ok_or_else(|| anyhow!("missing mtime"))?
4028 .into();
4029 this.update(&mut cx, |this, cx| {
4030 let buffer = this
4031 .opened_buffers
4032 .get(&payload.buffer_id)
4033 .and_then(|buffer| buffer.upgrade(cx));
4034 if let Some(buffer) = buffer {
4035 buffer.update(cx, |buffer, cx| {
4036 buffer.did_reload(version, mtime, cx);
4037 });
4038 }
4039 Ok(())
4040 })
4041 }
4042
4043 pub fn match_paths<'a>(
4044 &self,
4045 query: &'a str,
4046 include_ignored: bool,
4047 smart_case: bool,
4048 max_results: usize,
4049 cancel_flag: &'a AtomicBool,
4050 cx: &AppContext,
4051 ) -> impl 'a + Future<Output = Vec<PathMatch>> {
4052 let worktrees = self
4053 .worktrees(cx)
4054 .filter(|worktree| worktree.read(cx).is_visible())
4055 .collect::<Vec<_>>();
4056 let include_root_name = worktrees.len() > 1;
4057 let candidate_sets = worktrees
4058 .into_iter()
4059 .map(|worktree| CandidateSet {
4060 snapshot: worktree.read(cx).snapshot(),
4061 include_ignored,
4062 include_root_name,
4063 })
4064 .collect::<Vec<_>>();
4065
4066 let background = cx.background().clone();
4067 async move {
4068 fuzzy::match_paths(
4069 candidate_sets.as_slice(),
4070 query,
4071 smart_case,
4072 max_results,
4073 cancel_flag,
4074 background,
4075 )
4076 .await
4077 }
4078 }
4079
4080 fn edits_from_lsp(
4081 &mut self,
4082 buffer: &ModelHandle<Buffer>,
4083 lsp_edits: impl 'static + Send + IntoIterator<Item = lsp::TextEdit>,
4084 version: Option<i32>,
4085 cx: &mut ModelContext<Self>,
4086 ) -> Task<Result<Vec<(Range<Anchor>, String)>>> {
4087 let snapshot = self.buffer_snapshot_for_lsp_version(buffer, version, cx);
4088 cx.background().spawn(async move {
4089 let snapshot = snapshot?;
4090 let mut lsp_edits = lsp_edits
4091 .into_iter()
4092 .map(|edit| (range_from_lsp(edit.range), edit.new_text))
4093 .peekable();
4094
4095 let mut edits = Vec::new();
4096 while let Some((mut range, mut new_text)) = lsp_edits.next() {
4097 // Combine any LSP edits that are adjacent.
4098 //
4099 // Also, combine LSP edits that are separated from each other by only
4100 // a newline. This is important because for some code actions,
4101 // Rust-analyzer rewrites the entire buffer via a series of edits that
4102 // are separated by unchanged newline characters.
4103 //
4104 // In order for the diffing logic below to work properly, any edits that
4105 // cancel each other out must be combined into one.
4106 while let Some((next_range, next_text)) = lsp_edits.peek() {
4107 if next_range.start > range.end {
4108 if next_range.start.row > range.end.row + 1
4109 || next_range.start.column > 0
4110 || snapshot.clip_point_utf16(
4111 PointUtf16::new(range.end.row, u32::MAX),
4112 Bias::Left,
4113 ) > range.end
4114 {
4115 break;
4116 }
4117 new_text.push('\n');
4118 }
4119 range.end = next_range.end;
4120 new_text.push_str(&next_text);
4121 lsp_edits.next();
4122 }
4123
4124 if snapshot.clip_point_utf16(range.start, Bias::Left) != range.start
4125 || snapshot.clip_point_utf16(range.end, Bias::Left) != range.end
4126 {
4127 return Err(anyhow!("invalid edits received from language server"));
4128 }
4129
4130 // For multiline edits, perform a diff of the old and new text so that
4131 // we can identify the changes more precisely, preserving the locations
4132 // of any anchors positioned in the unchanged regions.
4133 if range.end.row > range.start.row {
4134 let mut offset = range.start.to_offset(&snapshot);
4135 let old_text = snapshot.text_for_range(range).collect::<String>();
4136
4137 let diff = TextDiff::from_lines(old_text.as_str(), &new_text);
4138 let mut moved_since_edit = true;
4139 for change in diff.iter_all_changes() {
4140 let tag = change.tag();
4141 let value = change.value();
4142 match tag {
4143 ChangeTag::Equal => {
4144 offset += value.len();
4145 moved_since_edit = true;
4146 }
4147 ChangeTag::Delete => {
4148 let start = snapshot.anchor_after(offset);
4149 let end = snapshot.anchor_before(offset + value.len());
4150 if moved_since_edit {
4151 edits.push((start..end, String::new()));
4152 } else {
4153 edits.last_mut().unwrap().0.end = end;
4154 }
4155 offset += value.len();
4156 moved_since_edit = false;
4157 }
4158 ChangeTag::Insert => {
4159 if moved_since_edit {
4160 let anchor = snapshot.anchor_after(offset);
4161 edits.push((anchor.clone()..anchor, value.to_string()));
4162 } else {
4163 edits.last_mut().unwrap().1.push_str(value);
4164 }
4165 moved_since_edit = false;
4166 }
4167 }
4168 }
4169 } else if range.end == range.start {
4170 let anchor = snapshot.anchor_after(range.start);
4171 edits.push((anchor.clone()..anchor, new_text));
4172 } else {
4173 let edit_start = snapshot.anchor_after(range.start);
4174 let edit_end = snapshot.anchor_before(range.end);
4175 edits.push((edit_start..edit_end, new_text));
4176 }
4177 }
4178
4179 Ok(edits)
4180 })
4181 }
4182
4183 fn buffer_snapshot_for_lsp_version(
4184 &mut self,
4185 buffer: &ModelHandle<Buffer>,
4186 version: Option<i32>,
4187 cx: &AppContext,
4188 ) -> Result<TextBufferSnapshot> {
4189 const OLD_VERSIONS_TO_RETAIN: i32 = 10;
4190
4191 if let Some(version) = version {
4192 let buffer_id = buffer.read(cx).remote_id();
4193 let snapshots = self
4194 .buffer_snapshots
4195 .get_mut(&buffer_id)
4196 .ok_or_else(|| anyhow!("no snapshot found for buffer {}", buffer_id))?;
4197 let mut found_snapshot = None;
4198 snapshots.retain(|(snapshot_version, snapshot)| {
4199 if snapshot_version + OLD_VERSIONS_TO_RETAIN < version {
4200 false
4201 } else {
4202 if *snapshot_version == version {
4203 found_snapshot = Some(snapshot.clone());
4204 }
4205 true
4206 }
4207 });
4208
4209 found_snapshot.ok_or_else(|| {
4210 anyhow!(
4211 "snapshot not found for buffer {} at version {}",
4212 buffer_id,
4213 version
4214 )
4215 })
4216 } else {
4217 Ok((buffer.read(cx)).text_snapshot())
4218 }
4219 }
4220
4221 fn language_server_for_buffer(
4222 &self,
4223 buffer: &Buffer,
4224 cx: &AppContext,
4225 ) -> Option<&Arc<LanguageServer>> {
4226 if let Some((file, language)) = File::from_dyn(buffer.file()).zip(buffer.language()) {
4227 let worktree_id = file.worktree_id(cx);
4228 self.language_servers.get(&(worktree_id, language.name()))
4229 } else {
4230 None
4231 }
4232 }
4233}
4234
4235impl WorktreeHandle {
4236 pub fn upgrade(&self, cx: &AppContext) -> Option<ModelHandle<Worktree>> {
4237 match self {
4238 WorktreeHandle::Strong(handle) => Some(handle.clone()),
4239 WorktreeHandle::Weak(handle) => handle.upgrade(cx),
4240 }
4241 }
4242}
4243
4244impl OpenBuffer {
4245 pub fn upgrade(&self, cx: &impl UpgradeModelHandle) -> Option<ModelHandle<Buffer>> {
4246 match self {
4247 OpenBuffer::Strong(handle) => Some(handle.clone()),
4248 OpenBuffer::Weak(handle) => handle.upgrade(cx),
4249 OpenBuffer::Loading(_) => None,
4250 }
4251 }
4252}
4253
4254struct CandidateSet {
4255 snapshot: Snapshot,
4256 include_ignored: bool,
4257 include_root_name: bool,
4258}
4259
4260impl<'a> PathMatchCandidateSet<'a> for CandidateSet {
4261 type Candidates = CandidateSetIter<'a>;
4262
4263 fn id(&self) -> usize {
4264 self.snapshot.id().to_usize()
4265 }
4266
4267 fn len(&self) -> usize {
4268 if self.include_ignored {
4269 self.snapshot.file_count()
4270 } else {
4271 self.snapshot.visible_file_count()
4272 }
4273 }
4274
4275 fn prefix(&self) -> Arc<str> {
4276 if self.snapshot.root_entry().map_or(false, |e| e.is_file()) {
4277 self.snapshot.root_name().into()
4278 } else if self.include_root_name {
4279 format!("{}/", self.snapshot.root_name()).into()
4280 } else {
4281 "".into()
4282 }
4283 }
4284
4285 fn candidates(&'a self, start: usize) -> Self::Candidates {
4286 CandidateSetIter {
4287 traversal: self.snapshot.files(self.include_ignored, start),
4288 }
4289 }
4290}
4291
4292struct CandidateSetIter<'a> {
4293 traversal: Traversal<'a>,
4294}
4295
4296impl<'a> Iterator for CandidateSetIter<'a> {
4297 type Item = PathMatchCandidate<'a>;
4298
4299 fn next(&mut self) -> Option<Self::Item> {
4300 self.traversal.next().map(|entry| {
4301 if let EntryKind::File(char_bag) = entry.kind {
4302 PathMatchCandidate {
4303 path: &entry.path,
4304 char_bag,
4305 }
4306 } else {
4307 unreachable!()
4308 }
4309 })
4310 }
4311}
4312
4313impl Entity for Project {
4314 type Event = Event;
4315
4316 fn release(&mut self, _: &mut gpui::MutableAppContext) {
4317 match &self.client_state {
4318 ProjectClientState::Local { remote_id_rx, .. } => {
4319 if let Some(project_id) = *remote_id_rx.borrow() {
4320 self.client
4321 .send(proto::UnregisterProject { project_id })
4322 .log_err();
4323 }
4324 }
4325 ProjectClientState::Remote { remote_id, .. } => {
4326 self.client
4327 .send(proto::LeaveProject {
4328 project_id: *remote_id,
4329 })
4330 .log_err();
4331 }
4332 }
4333 }
4334
4335 fn app_will_quit(
4336 &mut self,
4337 _: &mut MutableAppContext,
4338 ) -> Option<std::pin::Pin<Box<dyn 'static + Future<Output = ()>>>> {
4339 let shutdown_futures = self
4340 .language_servers
4341 .drain()
4342 .filter_map(|(_, server)| server.shutdown())
4343 .collect::<Vec<_>>();
4344 Some(
4345 async move {
4346 futures::future::join_all(shutdown_futures).await;
4347 }
4348 .boxed(),
4349 )
4350 }
4351}
4352
4353impl Collaborator {
4354 fn from_proto(
4355 message: proto::Collaborator,
4356 user_store: &ModelHandle<UserStore>,
4357 cx: &mut AsyncAppContext,
4358 ) -> impl Future<Output = Result<Self>> {
4359 let user = user_store.update(cx, |user_store, cx| {
4360 user_store.fetch_user(message.user_id, cx)
4361 });
4362
4363 async move {
4364 Ok(Self {
4365 peer_id: PeerId(message.peer_id),
4366 user: user.await?,
4367 replica_id: message.replica_id as ReplicaId,
4368 })
4369 }
4370 }
4371}
4372
4373impl<P: AsRef<Path>> From<(WorktreeId, P)> for ProjectPath {
4374 fn from((worktree_id, path): (WorktreeId, P)) -> Self {
4375 Self {
4376 worktree_id,
4377 path: path.as_ref().into(),
4378 }
4379 }
4380}
4381
4382impl From<lsp::CreateFileOptions> for fs::CreateOptions {
4383 fn from(options: lsp::CreateFileOptions) -> Self {
4384 Self {
4385 overwrite: options.overwrite.unwrap_or(false),
4386 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
4387 }
4388 }
4389}
4390
4391impl From<lsp::RenameFileOptions> for fs::RenameOptions {
4392 fn from(options: lsp::RenameFileOptions) -> Self {
4393 Self {
4394 overwrite: options.overwrite.unwrap_or(false),
4395 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
4396 }
4397 }
4398}
4399
4400impl From<lsp::DeleteFileOptions> for fs::RemoveOptions {
4401 fn from(options: lsp::DeleteFileOptions) -> Self {
4402 Self {
4403 recursive: options.recursive.unwrap_or(false),
4404 ignore_if_not_exists: options.ignore_if_not_exists.unwrap_or(false),
4405 }
4406 }
4407}
4408
4409fn serialize_symbol(symbol: &Symbol) -> proto::Symbol {
4410 proto::Symbol {
4411 source_worktree_id: symbol.source_worktree_id.to_proto(),
4412 worktree_id: symbol.worktree_id.to_proto(),
4413 language_name: symbol.language_name.clone(),
4414 name: symbol.name.clone(),
4415 kind: unsafe { mem::transmute(symbol.kind) },
4416 path: symbol.path.to_string_lossy().to_string(),
4417 start: Some(proto::Point {
4418 row: symbol.range.start.row,
4419 column: symbol.range.start.column,
4420 }),
4421 end: Some(proto::Point {
4422 row: symbol.range.end.row,
4423 column: symbol.range.end.column,
4424 }),
4425 signature: symbol.signature.to_vec(),
4426 }
4427}
4428
4429fn relativize_path(base: &Path, path: &Path) -> PathBuf {
4430 let mut path_components = path.components();
4431 let mut base_components = base.components();
4432 let mut components: Vec<Component> = Vec::new();
4433 loop {
4434 match (path_components.next(), base_components.next()) {
4435 (None, None) => break,
4436 (Some(a), None) => {
4437 components.push(a);
4438 components.extend(path_components.by_ref());
4439 break;
4440 }
4441 (None, _) => components.push(Component::ParentDir),
4442 (Some(a), Some(b)) if components.is_empty() && a == b => (),
4443 (Some(a), Some(b)) if b == Component::CurDir => components.push(a),
4444 (Some(a), Some(_)) => {
4445 components.push(Component::ParentDir);
4446 for _ in base_components {
4447 components.push(Component::ParentDir);
4448 }
4449 components.push(a);
4450 components.extend(path_components.by_ref());
4451 break;
4452 }
4453 }
4454 }
4455 components.iter().map(|c| c.as_os_str()).collect()
4456}
4457
4458#[cfg(test)]
4459mod tests {
4460 use super::{Event, *};
4461 use fs::RealFs;
4462 use futures::StreamExt;
4463 use gpui::test::subscribe;
4464 use language::{
4465 tree_sitter_rust, Diagnostic, LanguageConfig, LanguageServerConfig, OffsetRangeExt, Point,
4466 ToPoint,
4467 };
4468 use lsp::Url;
4469 use serde_json::json;
4470 use std::{cell::RefCell, os::unix, path::PathBuf, rc::Rc};
4471 use unindent::Unindent as _;
4472 use util::test::temp_tree;
4473 use worktree::WorktreeHandle as _;
4474
4475 #[gpui::test]
4476 async fn test_populate_and_search(cx: &mut gpui::TestAppContext) {
4477 let dir = temp_tree(json!({
4478 "root": {
4479 "apple": "",
4480 "banana": {
4481 "carrot": {
4482 "date": "",
4483 "endive": "",
4484 }
4485 },
4486 "fennel": {
4487 "grape": "",
4488 }
4489 }
4490 }));
4491
4492 let root_link_path = dir.path().join("root_link");
4493 unix::fs::symlink(&dir.path().join("root"), &root_link_path).unwrap();
4494 unix::fs::symlink(
4495 &dir.path().join("root/fennel"),
4496 &dir.path().join("root/finnochio"),
4497 )
4498 .unwrap();
4499
4500 let project = Project::test(Arc::new(RealFs), cx);
4501
4502 let (tree, _) = project
4503 .update(cx, |project, cx| {
4504 project.find_or_create_local_worktree(&root_link_path, true, cx)
4505 })
4506 .await
4507 .unwrap();
4508
4509 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
4510 .await;
4511 cx.read(|cx| {
4512 let tree = tree.read(cx);
4513 assert_eq!(tree.file_count(), 5);
4514 assert_eq!(
4515 tree.inode_for_path("fennel/grape"),
4516 tree.inode_for_path("finnochio/grape")
4517 );
4518 });
4519
4520 let cancel_flag = Default::default();
4521 let results = project
4522 .read_with(cx, |project, cx| {
4523 project.match_paths("bna", false, false, 10, &cancel_flag, cx)
4524 })
4525 .await;
4526 assert_eq!(
4527 results
4528 .into_iter()
4529 .map(|result| result.path)
4530 .collect::<Vec<Arc<Path>>>(),
4531 vec![
4532 PathBuf::from("banana/carrot/date").into(),
4533 PathBuf::from("banana/carrot/endive").into(),
4534 ]
4535 );
4536 }
4537
4538 #[gpui::test]
4539 async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
4540 cx.foreground().forbid_parking();
4541
4542 let (mut rust_lsp_config, mut fake_rust_servers) = LanguageServerConfig::fake();
4543 let (mut json_lsp_config, mut fake_json_servers) = LanguageServerConfig::fake();
4544 rust_lsp_config.set_fake_capabilities(lsp::ServerCapabilities {
4545 completion_provider: Some(lsp::CompletionOptions {
4546 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
4547 ..Default::default()
4548 }),
4549 ..Default::default()
4550 });
4551 json_lsp_config.set_fake_capabilities(lsp::ServerCapabilities {
4552 completion_provider: Some(lsp::CompletionOptions {
4553 trigger_characters: Some(vec![":".to_string()]),
4554 ..Default::default()
4555 }),
4556 ..Default::default()
4557 });
4558
4559 let rust_language = Arc::new(Language::new(
4560 LanguageConfig {
4561 name: "Rust".into(),
4562 path_suffixes: vec!["rs".to_string()],
4563 language_server: Some(rust_lsp_config),
4564 ..Default::default()
4565 },
4566 Some(tree_sitter_rust::language()),
4567 ));
4568 let json_language = Arc::new(Language::new(
4569 LanguageConfig {
4570 name: "JSON".into(),
4571 path_suffixes: vec!["json".to_string()],
4572 language_server: Some(json_lsp_config),
4573 ..Default::default()
4574 },
4575 None,
4576 ));
4577
4578 let fs = FakeFs::new(cx.background());
4579 fs.insert_tree(
4580 "/the-root",
4581 json!({
4582 "test.rs": "const A: i32 = 1;",
4583 "test2.rs": "",
4584 "Cargo.toml": "a = 1",
4585 "package.json": "{\"a\": 1}",
4586 }),
4587 )
4588 .await;
4589
4590 let project = Project::test(fs, cx);
4591 project.update(cx, |project, _| {
4592 project.languages.add(rust_language);
4593 project.languages.add(json_language);
4594 });
4595
4596 let worktree_id = project
4597 .update(cx, |project, cx| {
4598 project.find_or_create_local_worktree("/the-root", true, cx)
4599 })
4600 .await
4601 .unwrap()
4602 .0
4603 .read_with(cx, |tree, _| tree.id());
4604
4605 // Open a buffer without an associated language server.
4606 let toml_buffer = project
4607 .update(cx, |project, cx| {
4608 project.open_buffer((worktree_id, "Cargo.toml"), cx)
4609 })
4610 .await
4611 .unwrap();
4612
4613 // Open a buffer with an associated language server.
4614 let rust_buffer = project
4615 .update(cx, |project, cx| {
4616 project.open_buffer((worktree_id, "test.rs"), cx)
4617 })
4618 .await
4619 .unwrap();
4620
4621 // A server is started up, and it is notified about Rust files.
4622 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
4623 assert_eq!(
4624 fake_rust_server
4625 .receive_notification::<lsp::notification::DidOpenTextDocument>()
4626 .await
4627 .text_document,
4628 lsp::TextDocumentItem {
4629 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
4630 version: 0,
4631 text: "const A: i32 = 1;".to_string(),
4632 language_id: Default::default()
4633 }
4634 );
4635
4636 // The buffer is configured based on the language server's capabilities.
4637 rust_buffer.read_with(cx, |buffer, _| {
4638 assert_eq!(
4639 buffer.completion_triggers(),
4640 &[".".to_string(), "::".to_string()]
4641 );
4642 });
4643 toml_buffer.read_with(cx, |buffer, _| {
4644 assert!(buffer.completion_triggers().is_empty());
4645 });
4646
4647 // Edit a buffer. The changes are reported to the language server.
4648 rust_buffer.update(cx, |buffer, cx| buffer.edit([16..16], "2", cx));
4649 assert_eq!(
4650 fake_rust_server
4651 .receive_notification::<lsp::notification::DidChangeTextDocument>()
4652 .await
4653 .text_document,
4654 lsp::VersionedTextDocumentIdentifier::new(
4655 lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
4656 1
4657 )
4658 );
4659
4660 // Open a third buffer with a different associated language server.
4661 let json_buffer = project
4662 .update(cx, |project, cx| {
4663 project.open_buffer((worktree_id, "package.json"), cx)
4664 })
4665 .await
4666 .unwrap();
4667
4668 // Another language server is started up, and it is notified about
4669 // all three open buffers.
4670 let mut fake_json_server = fake_json_servers.next().await.unwrap();
4671 assert_eq!(
4672 fake_json_server
4673 .receive_notification::<lsp::notification::DidOpenTextDocument>()
4674 .await
4675 .text_document,
4676 lsp::TextDocumentItem {
4677 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
4678 version: 0,
4679 text: "{\"a\": 1}".to_string(),
4680 language_id: Default::default()
4681 }
4682 );
4683
4684 // This buffer is configured based on the second language server's
4685 // capabilities.
4686 json_buffer.read_with(cx, |buffer, _| {
4687 assert_eq!(buffer.completion_triggers(), &[":".to_string()]);
4688 });
4689
4690 // When opening another buffer whose language server is already running,
4691 // it is also configured based on the existing language server's capabilities.
4692 let rust_buffer2 = project
4693 .update(cx, |project, cx| {
4694 project.open_buffer((worktree_id, "test2.rs"), cx)
4695 })
4696 .await
4697 .unwrap();
4698 rust_buffer2.read_with(cx, |buffer, _| {
4699 assert_eq!(
4700 buffer.completion_triggers(),
4701 &[".".to_string(), "::".to_string()]
4702 );
4703 });
4704
4705 // Changes are reported only to servers matching the buffer's language.
4706 toml_buffer.update(cx, |buffer, cx| buffer.edit([5..5], "23", cx));
4707 rust_buffer2.update(cx, |buffer, cx| buffer.edit([0..0], "let x = 1;", cx));
4708 assert_eq!(
4709 fake_rust_server
4710 .receive_notification::<lsp::notification::DidChangeTextDocument>()
4711 .await
4712 .text_document,
4713 lsp::VersionedTextDocumentIdentifier::new(
4714 lsp::Url::from_file_path("/the-root/test2.rs").unwrap(),
4715 1
4716 )
4717 );
4718
4719 // Save notifications are reported to all servers.
4720 toml_buffer
4721 .update(cx, |buffer, cx| buffer.save(cx))
4722 .await
4723 .unwrap();
4724 assert_eq!(
4725 fake_rust_server
4726 .receive_notification::<lsp::notification::DidSaveTextDocument>()
4727 .await
4728 .text_document,
4729 lsp::TextDocumentIdentifier::new(
4730 lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap()
4731 )
4732 );
4733 assert_eq!(
4734 fake_json_server
4735 .receive_notification::<lsp::notification::DidSaveTextDocument>()
4736 .await
4737 .text_document,
4738 lsp::TextDocumentIdentifier::new(
4739 lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap()
4740 )
4741 );
4742
4743 // Close notifications are reported only to servers matching the buffer's language.
4744 cx.update(|_| drop(json_buffer));
4745 let close_message = lsp::DidCloseTextDocumentParams {
4746 text_document: lsp::TextDocumentIdentifier::new(
4747 lsp::Url::from_file_path("/the-root/package.json").unwrap(),
4748 ),
4749 };
4750 assert_eq!(
4751 fake_json_server
4752 .receive_notification::<lsp::notification::DidCloseTextDocument>()
4753 .await,
4754 close_message,
4755 );
4756 }
4757
4758 #[gpui::test]
4759 async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
4760 cx.foreground().forbid_parking();
4761
4762 let (language_server_config, mut fake_servers) = LanguageServerConfig::fake();
4763 let progress_token = language_server_config
4764 .disk_based_diagnostics_progress_token
4765 .clone()
4766 .unwrap();
4767
4768 let language = Arc::new(Language::new(
4769 LanguageConfig {
4770 name: "Rust".into(),
4771 path_suffixes: vec!["rs".to_string()],
4772 language_server: Some(language_server_config),
4773 ..Default::default()
4774 },
4775 Some(tree_sitter_rust::language()),
4776 ));
4777
4778 let fs = FakeFs::new(cx.background());
4779 fs.insert_tree(
4780 "/dir",
4781 json!({
4782 "a.rs": "fn a() { A }",
4783 "b.rs": "const y: i32 = 1",
4784 }),
4785 )
4786 .await;
4787
4788 let project = Project::test(fs, cx);
4789 project.update(cx, |project, _| project.languages.add(language));
4790
4791 let (tree, _) = project
4792 .update(cx, |project, cx| {
4793 project.find_or_create_local_worktree("/dir", true, cx)
4794 })
4795 .await
4796 .unwrap();
4797 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
4798
4799 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
4800 .await;
4801
4802 // Cause worktree to start the fake language server
4803 let _buffer = project
4804 .update(cx, |project, cx| {
4805 project.open_buffer((worktree_id, Path::new("b.rs")), cx)
4806 })
4807 .await
4808 .unwrap();
4809
4810 let mut events = subscribe(&project, cx);
4811
4812 let mut fake_server = fake_servers.next().await.unwrap();
4813 fake_server.start_progress(&progress_token).await;
4814 assert_eq!(
4815 events.next().await.unwrap(),
4816 Event::DiskBasedDiagnosticsStarted
4817 );
4818
4819 fake_server.start_progress(&progress_token).await;
4820 fake_server.end_progress(&progress_token).await;
4821 fake_server.start_progress(&progress_token).await;
4822
4823 fake_server.notify::<lsp::notification::PublishDiagnostics>(
4824 lsp::PublishDiagnosticsParams {
4825 uri: Url::from_file_path("/dir/a.rs").unwrap(),
4826 version: None,
4827 diagnostics: vec![lsp::Diagnostic {
4828 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
4829 severity: Some(lsp::DiagnosticSeverity::ERROR),
4830 message: "undefined variable 'A'".to_string(),
4831 ..Default::default()
4832 }],
4833 },
4834 );
4835 assert_eq!(
4836 events.next().await.unwrap(),
4837 Event::DiagnosticsUpdated((worktree_id, Path::new("a.rs")).into())
4838 );
4839
4840 fake_server.end_progress(&progress_token).await;
4841 fake_server.end_progress(&progress_token).await;
4842 assert_eq!(
4843 events.next().await.unwrap(),
4844 Event::DiskBasedDiagnosticsUpdated
4845 );
4846 assert_eq!(
4847 events.next().await.unwrap(),
4848 Event::DiskBasedDiagnosticsFinished
4849 );
4850
4851 let buffer = project
4852 .update(cx, |p, cx| p.open_buffer((worktree_id, "a.rs"), cx))
4853 .await
4854 .unwrap();
4855
4856 buffer.read_with(cx, |buffer, _| {
4857 let snapshot = buffer.snapshot();
4858 let diagnostics = snapshot
4859 .diagnostics_in_range::<_, Point>(0..buffer.len())
4860 .collect::<Vec<_>>();
4861 assert_eq!(
4862 diagnostics,
4863 &[DiagnosticEntry {
4864 range: Point::new(0, 9)..Point::new(0, 10),
4865 diagnostic: Diagnostic {
4866 severity: lsp::DiagnosticSeverity::ERROR,
4867 message: "undefined variable 'A'".to_string(),
4868 group_id: 0,
4869 is_primary: true,
4870 ..Default::default()
4871 }
4872 }]
4873 )
4874 });
4875 }
4876
4877 #[gpui::test]
4878 async fn test_transforming_disk_based_diagnostics(cx: &mut gpui::TestAppContext) {
4879 cx.foreground().forbid_parking();
4880
4881 let (mut lsp_config, mut fake_servers) = LanguageServerConfig::fake();
4882 lsp_config
4883 .disk_based_diagnostic_sources
4884 .insert("disk".to_string());
4885 let language = Arc::new(Language::new(
4886 LanguageConfig {
4887 name: "Rust".into(),
4888 path_suffixes: vec!["rs".to_string()],
4889 language_server: Some(lsp_config),
4890 ..Default::default()
4891 },
4892 Some(tree_sitter_rust::language()),
4893 ));
4894
4895 let text = "
4896 fn a() { A }
4897 fn b() { BB }
4898 fn c() { CCC }
4899 "
4900 .unindent();
4901
4902 let fs = FakeFs::new(cx.background());
4903 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
4904
4905 let project = Project::test(fs, cx);
4906 project.update(cx, |project, _| project.languages.add(language));
4907
4908 let worktree_id = project
4909 .update(cx, |project, cx| {
4910 project.find_or_create_local_worktree("/dir", true, cx)
4911 })
4912 .await
4913 .unwrap()
4914 .0
4915 .read_with(cx, |tree, _| tree.id());
4916
4917 let buffer = project
4918 .update(cx, |project, cx| {
4919 project.open_buffer((worktree_id, "a.rs"), cx)
4920 })
4921 .await
4922 .unwrap();
4923
4924 let mut fake_server = fake_servers.next().await.unwrap();
4925 let open_notification = fake_server
4926 .receive_notification::<lsp::notification::DidOpenTextDocument>()
4927 .await;
4928
4929 // Edit the buffer, moving the content down
4930 buffer.update(cx, |buffer, cx| buffer.edit([0..0], "\n\n", cx));
4931 let change_notification_1 = fake_server
4932 .receive_notification::<lsp::notification::DidChangeTextDocument>()
4933 .await;
4934 assert!(
4935 change_notification_1.text_document.version > open_notification.text_document.version
4936 );
4937
4938 // Report some diagnostics for the initial version of the buffer
4939 fake_server.notify::<lsp::notification::PublishDiagnostics>(
4940 lsp::PublishDiagnosticsParams {
4941 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
4942 version: Some(open_notification.text_document.version),
4943 diagnostics: vec![
4944 lsp::Diagnostic {
4945 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
4946 severity: Some(DiagnosticSeverity::ERROR),
4947 message: "undefined variable 'A'".to_string(),
4948 source: Some("disk".to_string()),
4949 ..Default::default()
4950 },
4951 lsp::Diagnostic {
4952 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
4953 severity: Some(DiagnosticSeverity::ERROR),
4954 message: "undefined variable 'BB'".to_string(),
4955 source: Some("disk".to_string()),
4956 ..Default::default()
4957 },
4958 lsp::Diagnostic {
4959 range: lsp::Range::new(lsp::Position::new(2, 9), lsp::Position::new(2, 12)),
4960 severity: Some(DiagnosticSeverity::ERROR),
4961 source: Some("disk".to_string()),
4962 message: "undefined variable 'CCC'".to_string(),
4963 ..Default::default()
4964 },
4965 ],
4966 },
4967 );
4968
4969 // The diagnostics have moved down since they were created.
4970 buffer.next_notification(cx).await;
4971 buffer.read_with(cx, |buffer, _| {
4972 assert_eq!(
4973 buffer
4974 .snapshot()
4975 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0))
4976 .collect::<Vec<_>>(),
4977 &[
4978 DiagnosticEntry {
4979 range: Point::new(3, 9)..Point::new(3, 11),
4980 diagnostic: Diagnostic {
4981 severity: DiagnosticSeverity::ERROR,
4982 message: "undefined variable 'BB'".to_string(),
4983 is_disk_based: true,
4984 group_id: 1,
4985 is_primary: true,
4986 ..Default::default()
4987 },
4988 },
4989 DiagnosticEntry {
4990 range: Point::new(4, 9)..Point::new(4, 12),
4991 diagnostic: Diagnostic {
4992 severity: DiagnosticSeverity::ERROR,
4993 message: "undefined variable 'CCC'".to_string(),
4994 is_disk_based: true,
4995 group_id: 2,
4996 is_primary: true,
4997 ..Default::default()
4998 }
4999 }
5000 ]
5001 );
5002 assert_eq!(
5003 chunks_with_diagnostics(buffer, 0..buffer.len()),
5004 [
5005 ("\n\nfn a() { ".to_string(), None),
5006 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
5007 (" }\nfn b() { ".to_string(), None),
5008 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
5009 (" }\nfn c() { ".to_string(), None),
5010 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
5011 (" }\n".to_string(), None),
5012 ]
5013 );
5014 assert_eq!(
5015 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
5016 [
5017 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
5018 (" }\nfn c() { ".to_string(), None),
5019 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
5020 ]
5021 );
5022 });
5023
5024 // Ensure overlapping diagnostics are highlighted correctly.
5025 fake_server.notify::<lsp::notification::PublishDiagnostics>(
5026 lsp::PublishDiagnosticsParams {
5027 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
5028 version: Some(open_notification.text_document.version),
5029 diagnostics: vec![
5030 lsp::Diagnostic {
5031 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
5032 severity: Some(DiagnosticSeverity::ERROR),
5033 message: "undefined variable 'A'".to_string(),
5034 source: Some("disk".to_string()),
5035 ..Default::default()
5036 },
5037 lsp::Diagnostic {
5038 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 12)),
5039 severity: Some(DiagnosticSeverity::WARNING),
5040 message: "unreachable statement".to_string(),
5041 source: Some("disk".to_string()),
5042 ..Default::default()
5043 },
5044 ],
5045 },
5046 );
5047
5048 buffer.next_notification(cx).await;
5049 buffer.read_with(cx, |buffer, _| {
5050 assert_eq!(
5051 buffer
5052 .snapshot()
5053 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0))
5054 .collect::<Vec<_>>(),
5055 &[
5056 DiagnosticEntry {
5057 range: Point::new(2, 9)..Point::new(2, 12),
5058 diagnostic: Diagnostic {
5059 severity: DiagnosticSeverity::WARNING,
5060 message: "unreachable statement".to_string(),
5061 is_disk_based: true,
5062 group_id: 1,
5063 is_primary: true,
5064 ..Default::default()
5065 }
5066 },
5067 DiagnosticEntry {
5068 range: Point::new(2, 9)..Point::new(2, 10),
5069 diagnostic: Diagnostic {
5070 severity: DiagnosticSeverity::ERROR,
5071 message: "undefined variable 'A'".to_string(),
5072 is_disk_based: true,
5073 group_id: 0,
5074 is_primary: true,
5075 ..Default::default()
5076 },
5077 }
5078 ]
5079 );
5080 assert_eq!(
5081 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
5082 [
5083 ("fn a() { ".to_string(), None),
5084 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
5085 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
5086 ("\n".to_string(), None),
5087 ]
5088 );
5089 assert_eq!(
5090 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
5091 [
5092 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
5093 ("\n".to_string(), None),
5094 ]
5095 );
5096 });
5097
5098 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
5099 // changes since the last save.
5100 buffer.update(cx, |buffer, cx| {
5101 buffer.edit(Some(Point::new(2, 0)..Point::new(2, 0)), " ", cx);
5102 buffer.edit(Some(Point::new(2, 8)..Point::new(2, 10)), "(x: usize)", cx);
5103 });
5104 let change_notification_2 =
5105 fake_server.receive_notification::<lsp::notification::DidChangeTextDocument>();
5106 assert!(
5107 change_notification_2.await.text_document.version
5108 > change_notification_1.text_document.version
5109 );
5110
5111 // Handle out-of-order diagnostics
5112 fake_server.notify::<lsp::notification::PublishDiagnostics>(
5113 lsp::PublishDiagnosticsParams {
5114 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
5115 version: Some(open_notification.text_document.version),
5116 diagnostics: vec![
5117 lsp::Diagnostic {
5118 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
5119 severity: Some(DiagnosticSeverity::ERROR),
5120 message: "undefined variable 'BB'".to_string(),
5121 source: Some("disk".to_string()),
5122 ..Default::default()
5123 },
5124 lsp::Diagnostic {
5125 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
5126 severity: Some(DiagnosticSeverity::WARNING),
5127 message: "undefined variable 'A'".to_string(),
5128 source: Some("disk".to_string()),
5129 ..Default::default()
5130 },
5131 ],
5132 },
5133 );
5134
5135 buffer.next_notification(cx).await;
5136 buffer.read_with(cx, |buffer, _| {
5137 assert_eq!(
5138 buffer
5139 .snapshot()
5140 .diagnostics_in_range::<_, Point>(0..buffer.len())
5141 .collect::<Vec<_>>(),
5142 &[
5143 DiagnosticEntry {
5144 range: Point::new(2, 21)..Point::new(2, 22),
5145 diagnostic: Diagnostic {
5146 severity: DiagnosticSeverity::WARNING,
5147 message: "undefined variable 'A'".to_string(),
5148 is_disk_based: true,
5149 group_id: 1,
5150 is_primary: true,
5151 ..Default::default()
5152 }
5153 },
5154 DiagnosticEntry {
5155 range: Point::new(3, 9)..Point::new(3, 11),
5156 diagnostic: Diagnostic {
5157 severity: DiagnosticSeverity::ERROR,
5158 message: "undefined variable 'BB'".to_string(),
5159 is_disk_based: true,
5160 group_id: 0,
5161 is_primary: true,
5162 ..Default::default()
5163 },
5164 }
5165 ]
5166 );
5167 });
5168 }
5169
5170 #[gpui::test]
5171 async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
5172 cx.foreground().forbid_parking();
5173
5174 let text = concat!(
5175 "let one = ;\n", //
5176 "let two = \n",
5177 "let three = 3;\n",
5178 );
5179
5180 let fs = FakeFs::new(cx.background());
5181 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
5182
5183 let project = Project::test(fs, cx);
5184 let worktree_id = project
5185 .update(cx, |project, cx| {
5186 project.find_or_create_local_worktree("/dir", true, cx)
5187 })
5188 .await
5189 .unwrap()
5190 .0
5191 .read_with(cx, |tree, _| tree.id());
5192
5193 let buffer = project
5194 .update(cx, |project, cx| {
5195 project.open_buffer((worktree_id, "a.rs"), cx)
5196 })
5197 .await
5198 .unwrap();
5199
5200 project.update(cx, |project, cx| {
5201 project
5202 .update_buffer_diagnostics(
5203 &buffer,
5204 vec![
5205 DiagnosticEntry {
5206 range: PointUtf16::new(0, 10)..PointUtf16::new(0, 10),
5207 diagnostic: Diagnostic {
5208 severity: DiagnosticSeverity::ERROR,
5209 message: "syntax error 1".to_string(),
5210 ..Default::default()
5211 },
5212 },
5213 DiagnosticEntry {
5214 range: PointUtf16::new(1, 10)..PointUtf16::new(1, 10),
5215 diagnostic: Diagnostic {
5216 severity: DiagnosticSeverity::ERROR,
5217 message: "syntax error 2".to_string(),
5218 ..Default::default()
5219 },
5220 },
5221 ],
5222 None,
5223 cx,
5224 )
5225 .unwrap();
5226 });
5227
5228 // An empty range is extended forward to include the following character.
5229 // At the end of a line, an empty range is extended backward to include
5230 // the preceding character.
5231 buffer.read_with(cx, |buffer, _| {
5232 let chunks = chunks_with_diagnostics(&buffer, 0..buffer.len());
5233 assert_eq!(
5234 chunks
5235 .iter()
5236 .map(|(s, d)| (s.as_str(), *d))
5237 .collect::<Vec<_>>(),
5238 &[
5239 ("let one = ", None),
5240 (";", Some(DiagnosticSeverity::ERROR)),
5241 ("\nlet two =", None),
5242 (" ", Some(DiagnosticSeverity::ERROR)),
5243 ("\nlet three = 3;\n", None)
5244 ]
5245 );
5246 });
5247 }
5248
5249 #[gpui::test]
5250 async fn test_edits_from_lsp_with_past_version(cx: &mut gpui::TestAppContext) {
5251 cx.foreground().forbid_parking();
5252
5253 let (lsp_config, mut fake_servers) = LanguageServerConfig::fake();
5254 let language = Arc::new(Language::new(
5255 LanguageConfig {
5256 name: "Rust".into(),
5257 path_suffixes: vec!["rs".to_string()],
5258 language_server: Some(lsp_config),
5259 ..Default::default()
5260 },
5261 Some(tree_sitter_rust::language()),
5262 ));
5263
5264 let text = "
5265 fn a() {
5266 f1();
5267 }
5268 fn b() {
5269 f2();
5270 }
5271 fn c() {
5272 f3();
5273 }
5274 "
5275 .unindent();
5276
5277 let fs = FakeFs::new(cx.background());
5278 fs.insert_tree(
5279 "/dir",
5280 json!({
5281 "a.rs": text.clone(),
5282 }),
5283 )
5284 .await;
5285
5286 let project = Project::test(fs, cx);
5287 project.update(cx, |project, _| project.languages.add(language));
5288
5289 let worktree_id = project
5290 .update(cx, |project, cx| {
5291 project.find_or_create_local_worktree("/dir", true, cx)
5292 })
5293 .await
5294 .unwrap()
5295 .0
5296 .read_with(cx, |tree, _| tree.id());
5297
5298 let buffer = project
5299 .update(cx, |project, cx| {
5300 project.open_buffer((worktree_id, "a.rs"), cx)
5301 })
5302 .await
5303 .unwrap();
5304
5305 let mut fake_server = fake_servers.next().await.unwrap();
5306 let lsp_document_version = fake_server
5307 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5308 .await
5309 .text_document
5310 .version;
5311
5312 // Simulate editing the buffer after the language server computes some edits.
5313 buffer.update(cx, |buffer, cx| {
5314 buffer.edit(
5315 [Point::new(0, 0)..Point::new(0, 0)],
5316 "// above first function\n",
5317 cx,
5318 );
5319 buffer.edit(
5320 [Point::new(2, 0)..Point::new(2, 0)],
5321 " // inside first function\n",
5322 cx,
5323 );
5324 buffer.edit(
5325 [Point::new(6, 4)..Point::new(6, 4)],
5326 "// inside second function ",
5327 cx,
5328 );
5329
5330 assert_eq!(
5331 buffer.text(),
5332 "
5333 // above first function
5334 fn a() {
5335 // inside first function
5336 f1();
5337 }
5338 fn b() {
5339 // inside second function f2();
5340 }
5341 fn c() {
5342 f3();
5343 }
5344 "
5345 .unindent()
5346 );
5347 });
5348
5349 let edits = project
5350 .update(cx, |project, cx| {
5351 project.edits_from_lsp(
5352 &buffer,
5353 vec![
5354 // replace body of first function
5355 lsp::TextEdit {
5356 range: lsp::Range::new(
5357 lsp::Position::new(0, 0),
5358 lsp::Position::new(3, 0),
5359 ),
5360 new_text: "
5361 fn a() {
5362 f10();
5363 }
5364 "
5365 .unindent(),
5366 },
5367 // edit inside second function
5368 lsp::TextEdit {
5369 range: lsp::Range::new(
5370 lsp::Position::new(4, 6),
5371 lsp::Position::new(4, 6),
5372 ),
5373 new_text: "00".into(),
5374 },
5375 // edit inside third function via two distinct edits
5376 lsp::TextEdit {
5377 range: lsp::Range::new(
5378 lsp::Position::new(7, 5),
5379 lsp::Position::new(7, 5),
5380 ),
5381 new_text: "4000".into(),
5382 },
5383 lsp::TextEdit {
5384 range: lsp::Range::new(
5385 lsp::Position::new(7, 5),
5386 lsp::Position::new(7, 6),
5387 ),
5388 new_text: "".into(),
5389 },
5390 ],
5391 Some(lsp_document_version),
5392 cx,
5393 )
5394 })
5395 .await
5396 .unwrap();
5397
5398 buffer.update(cx, |buffer, cx| {
5399 for (range, new_text) in edits {
5400 buffer.edit([range], new_text, cx);
5401 }
5402 assert_eq!(
5403 buffer.text(),
5404 "
5405 // above first function
5406 fn a() {
5407 // inside first function
5408 f10();
5409 }
5410 fn b() {
5411 // inside second function f200();
5412 }
5413 fn c() {
5414 f4000();
5415 }
5416 "
5417 .unindent()
5418 );
5419 });
5420 }
5421
5422 #[gpui::test]
5423 async fn test_edits_from_lsp_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
5424 cx.foreground().forbid_parking();
5425
5426 let text = "
5427 use a::b;
5428 use a::c;
5429
5430 fn f() {
5431 b();
5432 c();
5433 }
5434 "
5435 .unindent();
5436
5437 let fs = FakeFs::new(cx.background());
5438 fs.insert_tree(
5439 "/dir",
5440 json!({
5441 "a.rs": text.clone(),
5442 }),
5443 )
5444 .await;
5445
5446 let project = Project::test(fs, cx);
5447 let worktree_id = project
5448 .update(cx, |project, cx| {
5449 project.find_or_create_local_worktree("/dir", true, cx)
5450 })
5451 .await
5452 .unwrap()
5453 .0
5454 .read_with(cx, |tree, _| tree.id());
5455
5456 let buffer = project
5457 .update(cx, |project, cx| {
5458 project.open_buffer((worktree_id, "a.rs"), cx)
5459 })
5460 .await
5461 .unwrap();
5462
5463 // Simulate the language server sending us a small edit in the form of a very large diff.
5464 // Rust-analyzer does this when performing a merge-imports code action.
5465 let edits = project
5466 .update(cx, |project, cx| {
5467 project.edits_from_lsp(
5468 &buffer,
5469 [
5470 // Replace the first use statement without editing the semicolon.
5471 lsp::TextEdit {
5472 range: lsp::Range::new(
5473 lsp::Position::new(0, 4),
5474 lsp::Position::new(0, 8),
5475 ),
5476 new_text: "a::{b, c}".into(),
5477 },
5478 // Reinsert the remainder of the file between the semicolon and the final
5479 // newline of the file.
5480 lsp::TextEdit {
5481 range: lsp::Range::new(
5482 lsp::Position::new(0, 9),
5483 lsp::Position::new(0, 9),
5484 ),
5485 new_text: "\n\n".into(),
5486 },
5487 lsp::TextEdit {
5488 range: lsp::Range::new(
5489 lsp::Position::new(0, 9),
5490 lsp::Position::new(0, 9),
5491 ),
5492 new_text: "
5493 fn f() {
5494 b();
5495 c();
5496 }"
5497 .unindent(),
5498 },
5499 // Delete everything after the first newline of the file.
5500 lsp::TextEdit {
5501 range: lsp::Range::new(
5502 lsp::Position::new(1, 0),
5503 lsp::Position::new(7, 0),
5504 ),
5505 new_text: "".into(),
5506 },
5507 ],
5508 None,
5509 cx,
5510 )
5511 })
5512 .await
5513 .unwrap();
5514
5515 buffer.update(cx, |buffer, cx| {
5516 let edits = edits
5517 .into_iter()
5518 .map(|(range, text)| {
5519 (
5520 range.start.to_point(&buffer)..range.end.to_point(&buffer),
5521 text,
5522 )
5523 })
5524 .collect::<Vec<_>>();
5525
5526 assert_eq!(
5527 edits,
5528 [
5529 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
5530 (Point::new(1, 0)..Point::new(2, 0), "".into())
5531 ]
5532 );
5533
5534 for (range, new_text) in edits {
5535 buffer.edit([range], new_text, cx);
5536 }
5537 assert_eq!(
5538 buffer.text(),
5539 "
5540 use a::{b, c};
5541
5542 fn f() {
5543 b();
5544 c();
5545 }
5546 "
5547 .unindent()
5548 );
5549 });
5550 }
5551
5552 fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
5553 buffer: &Buffer,
5554 range: Range<T>,
5555 ) -> Vec<(String, Option<DiagnosticSeverity>)> {
5556 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
5557 for chunk in buffer.snapshot().chunks(range, true) {
5558 if chunks
5559 .last()
5560 .map_or(false, |prev_chunk| prev_chunk.1 == chunk.diagnostic)
5561 {
5562 chunks.last_mut().unwrap().0.push_str(chunk.text);
5563 } else {
5564 chunks.push((chunk.text.to_string(), chunk.diagnostic));
5565 }
5566 }
5567 chunks
5568 }
5569
5570 #[gpui::test]
5571 async fn test_search_worktree_without_files(cx: &mut gpui::TestAppContext) {
5572 let dir = temp_tree(json!({
5573 "root": {
5574 "dir1": {},
5575 "dir2": {
5576 "dir3": {}
5577 }
5578 }
5579 }));
5580
5581 let project = Project::test(Arc::new(RealFs), cx);
5582 let (tree, _) = project
5583 .update(cx, |project, cx| {
5584 project.find_or_create_local_worktree(&dir.path(), true, cx)
5585 })
5586 .await
5587 .unwrap();
5588
5589 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
5590 .await;
5591
5592 let cancel_flag = Default::default();
5593 let results = project
5594 .read_with(cx, |project, cx| {
5595 project.match_paths("dir", false, false, 10, &cancel_flag, cx)
5596 })
5597 .await;
5598
5599 assert!(results.is_empty());
5600 }
5601
5602 #[gpui::test]
5603 async fn test_definition(cx: &mut gpui::TestAppContext) {
5604 let (language_server_config, mut fake_servers) = LanguageServerConfig::fake();
5605 let language = Arc::new(Language::new(
5606 LanguageConfig {
5607 name: "Rust".into(),
5608 path_suffixes: vec!["rs".to_string()],
5609 language_server: Some(language_server_config),
5610 ..Default::default()
5611 },
5612 Some(tree_sitter_rust::language()),
5613 ));
5614
5615 let fs = FakeFs::new(cx.background());
5616 fs.insert_tree(
5617 "/dir",
5618 json!({
5619 "a.rs": "const fn a() { A }",
5620 "b.rs": "const y: i32 = crate::a()",
5621 }),
5622 )
5623 .await;
5624
5625 let project = Project::test(fs, cx);
5626 project.update(cx, |project, _| {
5627 Arc::get_mut(&mut project.languages).unwrap().add(language);
5628 });
5629
5630 let (tree, _) = project
5631 .update(cx, |project, cx| {
5632 project.find_or_create_local_worktree("/dir/b.rs", true, cx)
5633 })
5634 .await
5635 .unwrap();
5636 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
5637 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
5638 .await;
5639
5640 let buffer = project
5641 .update(cx, |project, cx| {
5642 project.open_buffer(
5643 ProjectPath {
5644 worktree_id,
5645 path: Path::new("").into(),
5646 },
5647 cx,
5648 )
5649 })
5650 .await
5651 .unwrap();
5652
5653 let mut fake_server = fake_servers.next().await.unwrap();
5654 fake_server.handle_request::<lsp::request::GotoDefinition, _>(move |params, _| {
5655 let params = params.text_document_position_params;
5656 assert_eq!(
5657 params.text_document.uri.to_file_path().unwrap(),
5658 Path::new("/dir/b.rs"),
5659 );
5660 assert_eq!(params.position, lsp::Position::new(0, 22));
5661
5662 Some(lsp::GotoDefinitionResponse::Scalar(lsp::Location::new(
5663 lsp::Url::from_file_path("/dir/a.rs").unwrap(),
5664 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
5665 )))
5666 });
5667
5668 let mut definitions = project
5669 .update(cx, |project, cx| project.definition(&buffer, 22, cx))
5670 .await
5671 .unwrap();
5672
5673 assert_eq!(definitions.len(), 1);
5674 let definition = definitions.pop().unwrap();
5675 cx.update(|cx| {
5676 let target_buffer = definition.buffer.read(cx);
5677 assert_eq!(
5678 target_buffer
5679 .file()
5680 .unwrap()
5681 .as_local()
5682 .unwrap()
5683 .abs_path(cx),
5684 Path::new("/dir/a.rs"),
5685 );
5686 assert_eq!(definition.range.to_offset(target_buffer), 9..10);
5687 assert_eq!(
5688 list_worktrees(&project, cx),
5689 [("/dir/b.rs".as_ref(), true), ("/dir/a.rs".as_ref(), false)]
5690 );
5691
5692 drop(definition);
5693 });
5694 cx.read(|cx| {
5695 assert_eq!(list_worktrees(&project, cx), [("/dir/b.rs".as_ref(), true)]);
5696 });
5697
5698 fn list_worktrees<'a>(
5699 project: &'a ModelHandle<Project>,
5700 cx: &'a AppContext,
5701 ) -> Vec<(&'a Path, bool)> {
5702 project
5703 .read(cx)
5704 .worktrees(cx)
5705 .map(|worktree| {
5706 let worktree = worktree.read(cx);
5707 (
5708 worktree.as_local().unwrap().abs_path().as_ref(),
5709 worktree.is_visible(),
5710 )
5711 })
5712 .collect::<Vec<_>>()
5713 }
5714 }
5715
5716 #[gpui::test]
5717 async fn test_save_file(cx: &mut gpui::TestAppContext) {
5718 let fs = FakeFs::new(cx.background());
5719 fs.insert_tree(
5720 "/dir",
5721 json!({
5722 "file1": "the old contents",
5723 }),
5724 )
5725 .await;
5726
5727 let project = Project::test(fs.clone(), cx);
5728 let worktree_id = project
5729 .update(cx, |p, cx| {
5730 p.find_or_create_local_worktree("/dir", true, cx)
5731 })
5732 .await
5733 .unwrap()
5734 .0
5735 .read_with(cx, |tree, _| tree.id());
5736
5737 let buffer = project
5738 .update(cx, |p, cx| p.open_buffer((worktree_id, "file1"), cx))
5739 .await
5740 .unwrap();
5741 buffer
5742 .update(cx, |buffer, cx| {
5743 assert_eq!(buffer.text(), "the old contents");
5744 buffer.edit(Some(0..0), "a line of text.\n".repeat(10 * 1024), cx);
5745 buffer.save(cx)
5746 })
5747 .await
5748 .unwrap();
5749
5750 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
5751 assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
5752 }
5753
5754 #[gpui::test]
5755 async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
5756 let fs = FakeFs::new(cx.background());
5757 fs.insert_tree(
5758 "/dir",
5759 json!({
5760 "file1": "the old contents",
5761 }),
5762 )
5763 .await;
5764
5765 let project = Project::test(fs.clone(), cx);
5766 let worktree_id = project
5767 .update(cx, |p, cx| {
5768 p.find_or_create_local_worktree("/dir/file1", true, cx)
5769 })
5770 .await
5771 .unwrap()
5772 .0
5773 .read_with(cx, |tree, _| tree.id());
5774
5775 let buffer = project
5776 .update(cx, |p, cx| p.open_buffer((worktree_id, ""), cx))
5777 .await
5778 .unwrap();
5779 buffer
5780 .update(cx, |buffer, cx| {
5781 buffer.edit(Some(0..0), "a line of text.\n".repeat(10 * 1024), cx);
5782 buffer.save(cx)
5783 })
5784 .await
5785 .unwrap();
5786
5787 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
5788 assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
5789 }
5790
5791 #[gpui::test]
5792 async fn test_save_as(cx: &mut gpui::TestAppContext) {
5793 let fs = FakeFs::new(cx.background());
5794 fs.insert_tree("/dir", json!({})).await;
5795
5796 let project = Project::test(fs.clone(), cx);
5797 let (worktree, _) = project
5798 .update(cx, |project, cx| {
5799 project.find_or_create_local_worktree("/dir", true, cx)
5800 })
5801 .await
5802 .unwrap();
5803 let worktree_id = worktree.read_with(cx, |worktree, _| worktree.id());
5804
5805 let buffer = project.update(cx, |project, cx| project.create_buffer(cx).unwrap());
5806 buffer.update(cx, |buffer, cx| {
5807 buffer.edit([0..0], "abc", cx);
5808 assert!(buffer.is_dirty());
5809 assert!(!buffer.has_conflict());
5810 });
5811 project
5812 .update(cx, |project, cx| {
5813 project.save_buffer_as(buffer.clone(), "/dir/file1".into(), cx)
5814 })
5815 .await
5816 .unwrap();
5817 assert_eq!(fs.load(Path::new("/dir/file1")).await.unwrap(), "abc");
5818 buffer.read_with(cx, |buffer, cx| {
5819 assert_eq!(buffer.file().unwrap().full_path(cx), Path::new("dir/file1"));
5820 assert!(!buffer.is_dirty());
5821 assert!(!buffer.has_conflict());
5822 });
5823
5824 let opened_buffer = project
5825 .update(cx, |project, cx| {
5826 project.open_buffer((worktree_id, "file1"), cx)
5827 })
5828 .await
5829 .unwrap();
5830 assert_eq!(opened_buffer, buffer);
5831 }
5832
5833 #[gpui::test(retries = 5)]
5834 async fn test_rescan_and_remote_updates(cx: &mut gpui::TestAppContext) {
5835 let dir = temp_tree(json!({
5836 "a": {
5837 "file1": "",
5838 "file2": "",
5839 "file3": "",
5840 },
5841 "b": {
5842 "c": {
5843 "file4": "",
5844 "file5": "",
5845 }
5846 }
5847 }));
5848
5849 let project = Project::test(Arc::new(RealFs), cx);
5850 let rpc = project.read_with(cx, |p, _| p.client.clone());
5851
5852 let (tree, _) = project
5853 .update(cx, |p, cx| {
5854 p.find_or_create_local_worktree(dir.path(), true, cx)
5855 })
5856 .await
5857 .unwrap();
5858 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
5859
5860 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
5861 let buffer = project.update(cx, |p, cx| p.open_buffer((worktree_id, path), cx));
5862 async move { buffer.await.unwrap() }
5863 };
5864 let id_for_path = |path: &'static str, cx: &gpui::TestAppContext| {
5865 tree.read_with(cx, |tree, _| {
5866 tree.entry_for_path(path)
5867 .expect(&format!("no entry for path {}", path))
5868 .id
5869 })
5870 };
5871
5872 let buffer2 = buffer_for_path("a/file2", cx).await;
5873 let buffer3 = buffer_for_path("a/file3", cx).await;
5874 let buffer4 = buffer_for_path("b/c/file4", cx).await;
5875 let buffer5 = buffer_for_path("b/c/file5", cx).await;
5876
5877 let file2_id = id_for_path("a/file2", &cx);
5878 let file3_id = id_for_path("a/file3", &cx);
5879 let file4_id = id_for_path("b/c/file4", &cx);
5880
5881 // Wait for the initial scan.
5882 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
5883 .await;
5884
5885 // Create a remote copy of this worktree.
5886 let initial_snapshot = tree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot());
5887 let (remote, load_task) = cx.update(|cx| {
5888 Worktree::remote(
5889 1,
5890 1,
5891 initial_snapshot.to_proto(&Default::default(), true),
5892 rpc.clone(),
5893 cx,
5894 )
5895 });
5896 load_task.await;
5897
5898 cx.read(|cx| {
5899 assert!(!buffer2.read(cx).is_dirty());
5900 assert!(!buffer3.read(cx).is_dirty());
5901 assert!(!buffer4.read(cx).is_dirty());
5902 assert!(!buffer5.read(cx).is_dirty());
5903 });
5904
5905 // Rename and delete files and directories.
5906 tree.flush_fs_events(&cx).await;
5907 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
5908 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
5909 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
5910 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
5911 tree.flush_fs_events(&cx).await;
5912
5913 let expected_paths = vec![
5914 "a",
5915 "a/file1",
5916 "a/file2.new",
5917 "b",
5918 "d",
5919 "d/file3",
5920 "d/file4",
5921 ];
5922
5923 cx.read(|app| {
5924 assert_eq!(
5925 tree.read(app)
5926 .paths()
5927 .map(|p| p.to_str().unwrap())
5928 .collect::<Vec<_>>(),
5929 expected_paths
5930 );
5931
5932 assert_eq!(id_for_path("a/file2.new", &cx), file2_id);
5933 assert_eq!(id_for_path("d/file3", &cx), file3_id);
5934 assert_eq!(id_for_path("d/file4", &cx), file4_id);
5935
5936 assert_eq!(
5937 buffer2.read(app).file().unwrap().path().as_ref(),
5938 Path::new("a/file2.new")
5939 );
5940 assert_eq!(
5941 buffer3.read(app).file().unwrap().path().as_ref(),
5942 Path::new("d/file3")
5943 );
5944 assert_eq!(
5945 buffer4.read(app).file().unwrap().path().as_ref(),
5946 Path::new("d/file4")
5947 );
5948 assert_eq!(
5949 buffer5.read(app).file().unwrap().path().as_ref(),
5950 Path::new("b/c/file5")
5951 );
5952
5953 assert!(!buffer2.read(app).file().unwrap().is_deleted());
5954 assert!(!buffer3.read(app).file().unwrap().is_deleted());
5955 assert!(!buffer4.read(app).file().unwrap().is_deleted());
5956 assert!(buffer5.read(app).file().unwrap().is_deleted());
5957 });
5958
5959 // Update the remote worktree. Check that it becomes consistent with the
5960 // local worktree.
5961 remote.update(cx, |remote, cx| {
5962 let update_message = tree.read(cx).as_local().unwrap().snapshot().build_update(
5963 &initial_snapshot,
5964 1,
5965 1,
5966 true,
5967 );
5968 remote
5969 .as_remote_mut()
5970 .unwrap()
5971 .snapshot
5972 .apply_remote_update(update_message)
5973 .unwrap();
5974
5975 assert_eq!(
5976 remote
5977 .paths()
5978 .map(|p| p.to_str().unwrap())
5979 .collect::<Vec<_>>(),
5980 expected_paths
5981 );
5982 });
5983 }
5984
5985 #[gpui::test]
5986 async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
5987 let fs = FakeFs::new(cx.background());
5988 fs.insert_tree(
5989 "/the-dir",
5990 json!({
5991 "a.txt": "a-contents",
5992 "b.txt": "b-contents",
5993 }),
5994 )
5995 .await;
5996
5997 let project = Project::test(fs.clone(), cx);
5998 let worktree_id = project
5999 .update(cx, |p, cx| {
6000 p.find_or_create_local_worktree("/the-dir", true, cx)
6001 })
6002 .await
6003 .unwrap()
6004 .0
6005 .read_with(cx, |tree, _| tree.id());
6006
6007 // Spawn multiple tasks to open paths, repeating some paths.
6008 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
6009 (
6010 p.open_buffer((worktree_id, "a.txt"), cx),
6011 p.open_buffer((worktree_id, "b.txt"), cx),
6012 p.open_buffer((worktree_id, "a.txt"), cx),
6013 )
6014 });
6015
6016 let buffer_a_1 = buffer_a_1.await.unwrap();
6017 let buffer_a_2 = buffer_a_2.await.unwrap();
6018 let buffer_b = buffer_b.await.unwrap();
6019 assert_eq!(buffer_a_1.read_with(cx, |b, _| b.text()), "a-contents");
6020 assert_eq!(buffer_b.read_with(cx, |b, _| b.text()), "b-contents");
6021
6022 // There is only one buffer per path.
6023 let buffer_a_id = buffer_a_1.id();
6024 assert_eq!(buffer_a_2.id(), buffer_a_id);
6025
6026 // Open the same path again while it is still open.
6027 drop(buffer_a_1);
6028 let buffer_a_3 = project
6029 .update(cx, |p, cx| p.open_buffer((worktree_id, "a.txt"), cx))
6030 .await
6031 .unwrap();
6032
6033 // There's still only one buffer per path.
6034 assert_eq!(buffer_a_3.id(), buffer_a_id);
6035 }
6036
6037 #[gpui::test]
6038 async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
6039 use std::fs;
6040
6041 let dir = temp_tree(json!({
6042 "file1": "abc",
6043 "file2": "def",
6044 "file3": "ghi",
6045 }));
6046
6047 let project = Project::test(Arc::new(RealFs), cx);
6048 let (worktree, _) = project
6049 .update(cx, |p, cx| {
6050 p.find_or_create_local_worktree(dir.path(), true, cx)
6051 })
6052 .await
6053 .unwrap();
6054 let worktree_id = worktree.read_with(cx, |worktree, _| worktree.id());
6055
6056 worktree.flush_fs_events(&cx).await;
6057 worktree
6058 .read_with(cx, |t, _| t.as_local().unwrap().scan_complete())
6059 .await;
6060
6061 let buffer1 = project
6062 .update(cx, |p, cx| p.open_buffer((worktree_id, "file1"), cx))
6063 .await
6064 .unwrap();
6065 let events = Rc::new(RefCell::new(Vec::new()));
6066
6067 // initially, the buffer isn't dirty.
6068 buffer1.update(cx, |buffer, cx| {
6069 cx.subscribe(&buffer1, {
6070 let events = events.clone();
6071 move |_, _, event, _| match event {
6072 BufferEvent::Operation(_) => {}
6073 _ => events.borrow_mut().push(event.clone()),
6074 }
6075 })
6076 .detach();
6077
6078 assert!(!buffer.is_dirty());
6079 assert!(events.borrow().is_empty());
6080
6081 buffer.edit(vec![1..2], "", cx);
6082 });
6083
6084 // after the first edit, the buffer is dirty, and emits a dirtied event.
6085 buffer1.update(cx, |buffer, cx| {
6086 assert!(buffer.text() == "ac");
6087 assert!(buffer.is_dirty());
6088 assert_eq!(
6089 *events.borrow(),
6090 &[language::Event::Edited, language::Event::Dirtied]
6091 );
6092 events.borrow_mut().clear();
6093 buffer.did_save(buffer.version(), buffer.file().unwrap().mtime(), None, cx);
6094 });
6095
6096 // after saving, the buffer is not dirty, and emits a saved event.
6097 buffer1.update(cx, |buffer, cx| {
6098 assert!(!buffer.is_dirty());
6099 assert_eq!(*events.borrow(), &[language::Event::Saved]);
6100 events.borrow_mut().clear();
6101
6102 buffer.edit(vec![1..1], "B", cx);
6103 buffer.edit(vec![2..2], "D", cx);
6104 });
6105
6106 // after editing again, the buffer is dirty, and emits another dirty event.
6107 buffer1.update(cx, |buffer, cx| {
6108 assert!(buffer.text() == "aBDc");
6109 assert!(buffer.is_dirty());
6110 assert_eq!(
6111 *events.borrow(),
6112 &[
6113 language::Event::Edited,
6114 language::Event::Dirtied,
6115 language::Event::Edited,
6116 ],
6117 );
6118 events.borrow_mut().clear();
6119
6120 // TODO - currently, after restoring the buffer to its
6121 // previously-saved state, the is still considered dirty.
6122 buffer.edit([1..3], "", cx);
6123 assert!(buffer.text() == "ac");
6124 assert!(buffer.is_dirty());
6125 });
6126
6127 assert_eq!(*events.borrow(), &[language::Event::Edited]);
6128
6129 // When a file is deleted, the buffer is considered dirty.
6130 let events = Rc::new(RefCell::new(Vec::new()));
6131 let buffer2 = project
6132 .update(cx, |p, cx| p.open_buffer((worktree_id, "file2"), cx))
6133 .await
6134 .unwrap();
6135 buffer2.update(cx, |_, cx| {
6136 cx.subscribe(&buffer2, {
6137 let events = events.clone();
6138 move |_, _, event, _| events.borrow_mut().push(event.clone())
6139 })
6140 .detach();
6141 });
6142
6143 fs::remove_file(dir.path().join("file2")).unwrap();
6144 buffer2.condition(&cx, |b, _| b.is_dirty()).await;
6145 assert_eq!(
6146 *events.borrow(),
6147 &[language::Event::Dirtied, language::Event::FileHandleChanged]
6148 );
6149
6150 // When a file is already dirty when deleted, we don't emit a Dirtied event.
6151 let events = Rc::new(RefCell::new(Vec::new()));
6152 let buffer3 = project
6153 .update(cx, |p, cx| p.open_buffer((worktree_id, "file3"), cx))
6154 .await
6155 .unwrap();
6156 buffer3.update(cx, |_, cx| {
6157 cx.subscribe(&buffer3, {
6158 let events = events.clone();
6159 move |_, _, event, _| events.borrow_mut().push(event.clone())
6160 })
6161 .detach();
6162 });
6163
6164 worktree.flush_fs_events(&cx).await;
6165 buffer3.update(cx, |buffer, cx| {
6166 buffer.edit(Some(0..0), "x", cx);
6167 });
6168 events.borrow_mut().clear();
6169 fs::remove_file(dir.path().join("file3")).unwrap();
6170 buffer3
6171 .condition(&cx, |_, _| !events.borrow().is_empty())
6172 .await;
6173 assert_eq!(*events.borrow(), &[language::Event::FileHandleChanged]);
6174 cx.read(|cx| assert!(buffer3.read(cx).is_dirty()));
6175 }
6176
6177 #[gpui::test]
6178 async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
6179 use std::fs;
6180
6181 let initial_contents = "aaa\nbbbbb\nc\n";
6182 let dir = temp_tree(json!({ "the-file": initial_contents }));
6183
6184 let project = Project::test(Arc::new(RealFs), cx);
6185 let (worktree, _) = project
6186 .update(cx, |p, cx| {
6187 p.find_or_create_local_worktree(dir.path(), true, cx)
6188 })
6189 .await
6190 .unwrap();
6191 let worktree_id = worktree.read_with(cx, |tree, _| tree.id());
6192
6193 worktree
6194 .read_with(cx, |t, _| t.as_local().unwrap().scan_complete())
6195 .await;
6196
6197 let abs_path = dir.path().join("the-file");
6198 let buffer = project
6199 .update(cx, |p, cx| p.open_buffer((worktree_id, "the-file"), cx))
6200 .await
6201 .unwrap();
6202
6203 // TODO
6204 // Add a cursor on each row.
6205 // let selection_set_id = buffer.update(&mut cx, |buffer, cx| {
6206 // assert!(!buffer.is_dirty());
6207 // buffer.add_selection_set(
6208 // &(0..3)
6209 // .map(|row| Selection {
6210 // id: row as usize,
6211 // start: Point::new(row, 1),
6212 // end: Point::new(row, 1),
6213 // reversed: false,
6214 // goal: SelectionGoal::None,
6215 // })
6216 // .collect::<Vec<_>>(),
6217 // cx,
6218 // )
6219 // });
6220
6221 // Change the file on disk, adding two new lines of text, and removing
6222 // one line.
6223 buffer.read_with(cx, |buffer, _| {
6224 assert!(!buffer.is_dirty());
6225 assert!(!buffer.has_conflict());
6226 });
6227 let new_contents = "AAAA\naaa\nBB\nbbbbb\n";
6228 fs::write(&abs_path, new_contents).unwrap();
6229
6230 // Because the buffer was not modified, it is reloaded from disk. Its
6231 // contents are edited according to the diff between the old and new
6232 // file contents.
6233 buffer
6234 .condition(&cx, |buffer, _| buffer.text() == new_contents)
6235 .await;
6236
6237 buffer.update(cx, |buffer, _| {
6238 assert_eq!(buffer.text(), new_contents);
6239 assert!(!buffer.is_dirty());
6240 assert!(!buffer.has_conflict());
6241
6242 // TODO
6243 // let cursor_positions = buffer
6244 // .selection_set(selection_set_id)
6245 // .unwrap()
6246 // .selections::<Point>(&*buffer)
6247 // .map(|selection| {
6248 // assert_eq!(selection.start, selection.end);
6249 // selection.start
6250 // })
6251 // .collect::<Vec<_>>();
6252 // assert_eq!(
6253 // cursor_positions,
6254 // [Point::new(1, 1), Point::new(3, 1), Point::new(4, 0)]
6255 // );
6256 });
6257
6258 // Modify the buffer
6259 buffer.update(cx, |buffer, cx| {
6260 buffer.edit(vec![0..0], " ", cx);
6261 assert!(buffer.is_dirty());
6262 assert!(!buffer.has_conflict());
6263 });
6264
6265 // Change the file on disk again, adding blank lines to the beginning.
6266 fs::write(&abs_path, "\n\n\nAAAA\naaa\nBB\nbbbbb\n").unwrap();
6267
6268 // Because the buffer is modified, it doesn't reload from disk, but is
6269 // marked as having a conflict.
6270 buffer
6271 .condition(&cx, |buffer, _| buffer.has_conflict())
6272 .await;
6273 }
6274
6275 #[gpui::test]
6276 async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
6277 cx.foreground().forbid_parking();
6278
6279 let fs = FakeFs::new(cx.background());
6280 fs.insert_tree(
6281 "/the-dir",
6282 json!({
6283 "a.rs": "
6284 fn foo(mut v: Vec<usize>) {
6285 for x in &v {
6286 v.push(1);
6287 }
6288 }
6289 "
6290 .unindent(),
6291 }),
6292 )
6293 .await;
6294
6295 let project = Project::test(fs.clone(), cx);
6296 let (worktree, _) = project
6297 .update(cx, |p, cx| {
6298 p.find_or_create_local_worktree("/the-dir", true, cx)
6299 })
6300 .await
6301 .unwrap();
6302 let worktree_id = worktree.read_with(cx, |tree, _| tree.id());
6303
6304 let buffer = project
6305 .update(cx, |p, cx| p.open_buffer((worktree_id, "a.rs"), cx))
6306 .await
6307 .unwrap();
6308
6309 let buffer_uri = Url::from_file_path("/the-dir/a.rs").unwrap();
6310 let message = lsp::PublishDiagnosticsParams {
6311 uri: buffer_uri.clone(),
6312 diagnostics: vec![
6313 lsp::Diagnostic {
6314 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
6315 severity: Some(DiagnosticSeverity::WARNING),
6316 message: "error 1".to_string(),
6317 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
6318 location: lsp::Location {
6319 uri: buffer_uri.clone(),
6320 range: lsp::Range::new(
6321 lsp::Position::new(1, 8),
6322 lsp::Position::new(1, 9),
6323 ),
6324 },
6325 message: "error 1 hint 1".to_string(),
6326 }]),
6327 ..Default::default()
6328 },
6329 lsp::Diagnostic {
6330 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
6331 severity: Some(DiagnosticSeverity::HINT),
6332 message: "error 1 hint 1".to_string(),
6333 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
6334 location: lsp::Location {
6335 uri: buffer_uri.clone(),
6336 range: lsp::Range::new(
6337 lsp::Position::new(1, 8),
6338 lsp::Position::new(1, 9),
6339 ),
6340 },
6341 message: "original diagnostic".to_string(),
6342 }]),
6343 ..Default::default()
6344 },
6345 lsp::Diagnostic {
6346 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
6347 severity: Some(DiagnosticSeverity::ERROR),
6348 message: "error 2".to_string(),
6349 related_information: Some(vec![
6350 lsp::DiagnosticRelatedInformation {
6351 location: lsp::Location {
6352 uri: buffer_uri.clone(),
6353 range: lsp::Range::new(
6354 lsp::Position::new(1, 13),
6355 lsp::Position::new(1, 15),
6356 ),
6357 },
6358 message: "error 2 hint 1".to_string(),
6359 },
6360 lsp::DiagnosticRelatedInformation {
6361 location: lsp::Location {
6362 uri: buffer_uri.clone(),
6363 range: lsp::Range::new(
6364 lsp::Position::new(1, 13),
6365 lsp::Position::new(1, 15),
6366 ),
6367 },
6368 message: "error 2 hint 2".to_string(),
6369 },
6370 ]),
6371 ..Default::default()
6372 },
6373 lsp::Diagnostic {
6374 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
6375 severity: Some(DiagnosticSeverity::HINT),
6376 message: "error 2 hint 1".to_string(),
6377 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
6378 location: lsp::Location {
6379 uri: buffer_uri.clone(),
6380 range: lsp::Range::new(
6381 lsp::Position::new(2, 8),
6382 lsp::Position::new(2, 17),
6383 ),
6384 },
6385 message: "original diagnostic".to_string(),
6386 }]),
6387 ..Default::default()
6388 },
6389 lsp::Diagnostic {
6390 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
6391 severity: Some(DiagnosticSeverity::HINT),
6392 message: "error 2 hint 2".to_string(),
6393 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
6394 location: lsp::Location {
6395 uri: buffer_uri.clone(),
6396 range: lsp::Range::new(
6397 lsp::Position::new(2, 8),
6398 lsp::Position::new(2, 17),
6399 ),
6400 },
6401 message: "original diagnostic".to_string(),
6402 }]),
6403 ..Default::default()
6404 },
6405 ],
6406 version: None,
6407 };
6408
6409 project
6410 .update(cx, |p, cx| {
6411 p.update_diagnostics(message, &Default::default(), cx)
6412 })
6413 .unwrap();
6414 let buffer = buffer.read_with(cx, |buffer, _| buffer.snapshot());
6415
6416 assert_eq!(
6417 buffer
6418 .diagnostics_in_range::<_, Point>(0..buffer.len())
6419 .collect::<Vec<_>>(),
6420 &[
6421 DiagnosticEntry {
6422 range: Point::new(1, 8)..Point::new(1, 9),
6423 diagnostic: Diagnostic {
6424 severity: DiagnosticSeverity::WARNING,
6425 message: "error 1".to_string(),
6426 group_id: 0,
6427 is_primary: true,
6428 ..Default::default()
6429 }
6430 },
6431 DiagnosticEntry {
6432 range: Point::new(1, 8)..Point::new(1, 9),
6433 diagnostic: Diagnostic {
6434 severity: DiagnosticSeverity::HINT,
6435 message: "error 1 hint 1".to_string(),
6436 group_id: 0,
6437 is_primary: false,
6438 ..Default::default()
6439 }
6440 },
6441 DiagnosticEntry {
6442 range: Point::new(1, 13)..Point::new(1, 15),
6443 diagnostic: Diagnostic {
6444 severity: DiagnosticSeverity::HINT,
6445 message: "error 2 hint 1".to_string(),
6446 group_id: 1,
6447 is_primary: false,
6448 ..Default::default()
6449 }
6450 },
6451 DiagnosticEntry {
6452 range: Point::new(1, 13)..Point::new(1, 15),
6453 diagnostic: Diagnostic {
6454 severity: DiagnosticSeverity::HINT,
6455 message: "error 2 hint 2".to_string(),
6456 group_id: 1,
6457 is_primary: false,
6458 ..Default::default()
6459 }
6460 },
6461 DiagnosticEntry {
6462 range: Point::new(2, 8)..Point::new(2, 17),
6463 diagnostic: Diagnostic {
6464 severity: DiagnosticSeverity::ERROR,
6465 message: "error 2".to_string(),
6466 group_id: 1,
6467 is_primary: true,
6468 ..Default::default()
6469 }
6470 }
6471 ]
6472 );
6473
6474 assert_eq!(
6475 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
6476 &[
6477 DiagnosticEntry {
6478 range: Point::new(1, 8)..Point::new(1, 9),
6479 diagnostic: Diagnostic {
6480 severity: DiagnosticSeverity::WARNING,
6481 message: "error 1".to_string(),
6482 group_id: 0,
6483 is_primary: true,
6484 ..Default::default()
6485 }
6486 },
6487 DiagnosticEntry {
6488 range: Point::new(1, 8)..Point::new(1, 9),
6489 diagnostic: Diagnostic {
6490 severity: DiagnosticSeverity::HINT,
6491 message: "error 1 hint 1".to_string(),
6492 group_id: 0,
6493 is_primary: false,
6494 ..Default::default()
6495 }
6496 },
6497 ]
6498 );
6499 assert_eq!(
6500 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
6501 &[
6502 DiagnosticEntry {
6503 range: Point::new(1, 13)..Point::new(1, 15),
6504 diagnostic: Diagnostic {
6505 severity: DiagnosticSeverity::HINT,
6506 message: "error 2 hint 1".to_string(),
6507 group_id: 1,
6508 is_primary: false,
6509 ..Default::default()
6510 }
6511 },
6512 DiagnosticEntry {
6513 range: Point::new(1, 13)..Point::new(1, 15),
6514 diagnostic: Diagnostic {
6515 severity: DiagnosticSeverity::HINT,
6516 message: "error 2 hint 2".to_string(),
6517 group_id: 1,
6518 is_primary: false,
6519 ..Default::default()
6520 }
6521 },
6522 DiagnosticEntry {
6523 range: Point::new(2, 8)..Point::new(2, 17),
6524 diagnostic: Diagnostic {
6525 severity: DiagnosticSeverity::ERROR,
6526 message: "error 2".to_string(),
6527 group_id: 1,
6528 is_primary: true,
6529 ..Default::default()
6530 }
6531 }
6532 ]
6533 );
6534 }
6535
6536 #[gpui::test]
6537 async fn test_rename(cx: &mut gpui::TestAppContext) {
6538 cx.foreground().forbid_parking();
6539
6540 let (language_server_config, mut fake_servers) = LanguageServerConfig::fake();
6541 let language = Arc::new(Language::new(
6542 LanguageConfig {
6543 name: "Rust".into(),
6544 path_suffixes: vec!["rs".to_string()],
6545 language_server: Some(language_server_config),
6546 ..Default::default()
6547 },
6548 Some(tree_sitter_rust::language()),
6549 ));
6550
6551 let fs = FakeFs::new(cx.background());
6552 fs.insert_tree(
6553 "/dir",
6554 json!({
6555 "one.rs": "const ONE: usize = 1;",
6556 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
6557 }),
6558 )
6559 .await;
6560
6561 let project = Project::test(fs.clone(), cx);
6562 project.update(cx, |project, _| {
6563 Arc::get_mut(&mut project.languages).unwrap().add(language);
6564 });
6565
6566 let (tree, _) = project
6567 .update(cx, |project, cx| {
6568 project.find_or_create_local_worktree("/dir", true, cx)
6569 })
6570 .await
6571 .unwrap();
6572 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
6573 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
6574 .await;
6575
6576 let buffer = project
6577 .update(cx, |project, cx| {
6578 project.open_buffer((worktree_id, Path::new("one.rs")), cx)
6579 })
6580 .await
6581 .unwrap();
6582
6583 let mut fake_server = fake_servers.next().await.unwrap();
6584
6585 let response = project.update(cx, |project, cx| {
6586 project.prepare_rename(buffer.clone(), 7, cx)
6587 });
6588 fake_server
6589 .handle_request::<lsp::request::PrepareRenameRequest, _>(|params, _| {
6590 assert_eq!(params.text_document.uri.as_str(), "file:///dir/one.rs");
6591 assert_eq!(params.position, lsp::Position::new(0, 7));
6592 Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
6593 lsp::Position::new(0, 6),
6594 lsp::Position::new(0, 9),
6595 )))
6596 })
6597 .next()
6598 .await
6599 .unwrap();
6600 let range = response.await.unwrap().unwrap();
6601 let range = buffer.read_with(cx, |buffer, _| range.to_offset(buffer));
6602 assert_eq!(range, 6..9);
6603
6604 let response = project.update(cx, |project, cx| {
6605 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), true, cx)
6606 });
6607 fake_server
6608 .handle_request::<lsp::request::Rename, _>(|params, _| {
6609 assert_eq!(
6610 params.text_document_position.text_document.uri.as_str(),
6611 "file:///dir/one.rs"
6612 );
6613 assert_eq!(
6614 params.text_document_position.position,
6615 lsp::Position::new(0, 7)
6616 );
6617 assert_eq!(params.new_name, "THREE");
6618 Some(lsp::WorkspaceEdit {
6619 changes: Some(
6620 [
6621 (
6622 lsp::Url::from_file_path("/dir/one.rs").unwrap(),
6623 vec![lsp::TextEdit::new(
6624 lsp::Range::new(
6625 lsp::Position::new(0, 6),
6626 lsp::Position::new(0, 9),
6627 ),
6628 "THREE".to_string(),
6629 )],
6630 ),
6631 (
6632 lsp::Url::from_file_path("/dir/two.rs").unwrap(),
6633 vec![
6634 lsp::TextEdit::new(
6635 lsp::Range::new(
6636 lsp::Position::new(0, 24),
6637 lsp::Position::new(0, 27),
6638 ),
6639 "THREE".to_string(),
6640 ),
6641 lsp::TextEdit::new(
6642 lsp::Range::new(
6643 lsp::Position::new(0, 35),
6644 lsp::Position::new(0, 38),
6645 ),
6646 "THREE".to_string(),
6647 ),
6648 ],
6649 ),
6650 ]
6651 .into_iter()
6652 .collect(),
6653 ),
6654 ..Default::default()
6655 })
6656 })
6657 .next()
6658 .await
6659 .unwrap();
6660 let mut transaction = response.await.unwrap().0;
6661 assert_eq!(transaction.len(), 2);
6662 assert_eq!(
6663 transaction
6664 .remove_entry(&buffer)
6665 .unwrap()
6666 .0
6667 .read_with(cx, |buffer, _| buffer.text()),
6668 "const THREE: usize = 1;"
6669 );
6670 assert_eq!(
6671 transaction
6672 .into_keys()
6673 .next()
6674 .unwrap()
6675 .read_with(cx, |buffer, _| buffer.text()),
6676 "const TWO: usize = one::THREE + one::THREE;"
6677 );
6678 }
6679
6680 #[gpui::test]
6681 async fn test_search(cx: &mut gpui::TestAppContext) {
6682 let fs = FakeFs::new(cx.background());
6683 fs.insert_tree(
6684 "/dir",
6685 json!({
6686 "one.rs": "const ONE: usize = 1;",
6687 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
6688 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
6689 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
6690 }),
6691 )
6692 .await;
6693 let project = Project::test(fs.clone(), cx);
6694 let (tree, _) = project
6695 .update(cx, |project, cx| {
6696 project.find_or_create_local_worktree("/dir", true, cx)
6697 })
6698 .await
6699 .unwrap();
6700 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
6701 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
6702 .await;
6703
6704 assert_eq!(
6705 search(&project, SearchQuery::text("TWO", false, true), cx)
6706 .await
6707 .unwrap(),
6708 HashMap::from_iter([
6709 ("two.rs".to_string(), vec![6..9]),
6710 ("three.rs".to_string(), vec![37..40])
6711 ])
6712 );
6713
6714 let buffer_4 = project
6715 .update(cx, |project, cx| {
6716 project.open_buffer((worktree_id, "four.rs"), cx)
6717 })
6718 .await
6719 .unwrap();
6720 buffer_4.update(cx, |buffer, cx| {
6721 buffer.edit([20..28, 31..43], "two::TWO", cx);
6722 });
6723
6724 assert_eq!(
6725 search(&project, SearchQuery::text("TWO", false, true), cx)
6726 .await
6727 .unwrap(),
6728 HashMap::from_iter([
6729 ("two.rs".to_string(), vec![6..9]),
6730 ("three.rs".to_string(), vec![37..40]),
6731 ("four.rs".to_string(), vec![25..28, 36..39])
6732 ])
6733 );
6734
6735 async fn search(
6736 project: &ModelHandle<Project>,
6737 query: SearchQuery,
6738 cx: &mut gpui::TestAppContext,
6739 ) -> Result<HashMap<String, Vec<Range<usize>>>> {
6740 let results = project
6741 .update(cx, |project, cx| project.search(query, cx))
6742 .await?;
6743
6744 Ok(results
6745 .into_iter()
6746 .map(|(buffer, ranges)| {
6747 buffer.read_with(cx, |buffer, _| {
6748 let path = buffer.file().unwrap().path().to_string_lossy().to_string();
6749 let ranges = ranges
6750 .into_iter()
6751 .map(|range| range.to_offset(buffer))
6752 .collect::<Vec<_>>();
6753 (path, ranges)
6754 })
6755 })
6756 .collect())
6757 }
6758 }
6759}