1pub mod fs;
2mod ignore;
3mod lsp_command;
4pub mod search;
5pub mod worktree;
6
7use anyhow::{anyhow, Context, Result};
8use client::{proto, Client, PeerId, TypedEnvelope, User, UserStore};
9use clock::ReplicaId;
10use collections::{hash_map, BTreeMap, HashMap, HashSet};
11use futures::{future::Shared, Future, FutureExt, StreamExt, TryFutureExt};
12use fuzzy::{PathMatch, PathMatchCandidate, PathMatchCandidateSet};
13use gpui::{
14 AnyModelHandle, AppContext, AsyncAppContext, Entity, ModelContext, ModelHandle,
15 MutableAppContext, Task, UpgradeModelHandle, WeakModelHandle,
16};
17use language::{
18 point_to_lsp,
19 proto::{deserialize_anchor, deserialize_version, serialize_anchor, serialize_version},
20 range_from_lsp, range_to_lsp, Anchor, Bias, Buffer, CodeAction, CodeLabel, Completion,
21 Diagnostic, DiagnosticEntry, DiagnosticSet, Event as BufferEvent, File as _, Language,
22 LanguageRegistry, LanguageServerName, LocalFile, LspAdapter, OffsetRangeExt, Operation, Patch,
23 PointUtf16, TextBufferSnapshot, ToOffset, ToPointUtf16, Transaction,
24};
25use lsp::{DiagnosticSeverity, DiagnosticTag, DocumentHighlightKind, LanguageServer};
26use lsp_command::*;
27use parking_lot::Mutex;
28use postage::watch;
29use rand::prelude::*;
30use search::SearchQuery;
31use serde::Serialize;
32use settings::Settings;
33use sha2::{Digest, Sha256};
34use similar::{ChangeTag, TextDiff};
35use std::{
36 cell::RefCell,
37 cmp::{self, Ordering},
38 convert::TryInto,
39 hash::Hash,
40 mem,
41 ops::Range,
42 path::{Component, Path, PathBuf},
43 rc::Rc,
44 sync::{
45 atomic::{AtomicBool, AtomicUsize, Ordering::SeqCst},
46 Arc,
47 },
48 time::Instant,
49};
50use util::{post_inc, ResultExt, TryFutureExt as _};
51
52pub use fs::*;
53pub use worktree::*;
54
55pub trait Item: Entity {
56 fn entry_id(&self, cx: &AppContext) -> Option<ProjectEntryId>;
57}
58
59pub struct Project {
60 worktrees: Vec<WorktreeHandle>,
61 active_entry: Option<ProjectEntryId>,
62 languages: Arc<LanguageRegistry>,
63 language_servers:
64 HashMap<(WorktreeId, LanguageServerName), (Arc<dyn LspAdapter>, Arc<LanguageServer>)>,
65 started_language_servers:
66 HashMap<(WorktreeId, LanguageServerName), Task<Option<Arc<LanguageServer>>>>,
67 language_server_statuses: BTreeMap<usize, LanguageServerStatus>,
68 language_server_settings: Arc<Mutex<serde_json::Value>>,
69 last_workspace_edits_by_language_server: HashMap<usize, ProjectTransaction>,
70 next_language_server_id: usize,
71 client: Arc<client::Client>,
72 next_entry_id: Arc<AtomicUsize>,
73 user_store: ModelHandle<UserStore>,
74 fs: Arc<dyn Fs>,
75 client_state: ProjectClientState,
76 collaborators: HashMap<PeerId, Collaborator>,
77 subscriptions: Vec<client::Subscription>,
78 opened_buffer: (Rc<RefCell<watch::Sender<()>>>, watch::Receiver<()>),
79 shared_buffers: HashMap<PeerId, HashSet<u64>>,
80 loading_buffers: HashMap<
81 ProjectPath,
82 postage::watch::Receiver<Option<Result<ModelHandle<Buffer>, Arc<anyhow::Error>>>>,
83 >,
84 loading_local_worktrees:
85 HashMap<Arc<Path>, Shared<Task<Result<ModelHandle<Worktree>, Arc<anyhow::Error>>>>>,
86 opened_buffers: HashMap<u64, OpenBuffer>,
87 buffer_snapshots: HashMap<u64, Vec<(i32, TextBufferSnapshot)>>,
88 nonce: u128,
89}
90
91enum OpenBuffer {
92 Strong(ModelHandle<Buffer>),
93 Weak(WeakModelHandle<Buffer>),
94 Loading(Vec<Operation>),
95}
96
97enum WorktreeHandle {
98 Strong(ModelHandle<Worktree>),
99 Weak(WeakModelHandle<Worktree>),
100}
101
102enum ProjectClientState {
103 Local {
104 is_shared: bool,
105 remote_id_tx: watch::Sender<Option<u64>>,
106 remote_id_rx: watch::Receiver<Option<u64>>,
107 _maintain_remote_id_task: Task<Option<()>>,
108 },
109 Remote {
110 sharing_has_stopped: bool,
111 remote_id: u64,
112 replica_id: ReplicaId,
113 _detect_unshare_task: Task<Option<()>>,
114 },
115}
116
117#[derive(Clone, Debug)]
118pub struct Collaborator {
119 pub user: Arc<User>,
120 pub peer_id: PeerId,
121 pub replica_id: ReplicaId,
122}
123
124#[derive(Clone, Debug, PartialEq)]
125pub enum Event {
126 ActiveEntryChanged(Option<ProjectEntryId>),
127 WorktreeRemoved(WorktreeId),
128 DiskBasedDiagnosticsStarted,
129 DiskBasedDiagnosticsUpdated,
130 DiskBasedDiagnosticsFinished,
131 DiagnosticsUpdated(ProjectPath),
132 RemoteIdChanged(Option<u64>),
133 CollaboratorLeft(PeerId),
134}
135
136#[derive(Serialize)]
137pub struct LanguageServerStatus {
138 pub name: String,
139 pub pending_work: BTreeMap<String, LanguageServerProgress>,
140 pub pending_diagnostic_updates: isize,
141}
142
143#[derive(Clone, Debug, Serialize)]
144pub struct LanguageServerProgress {
145 pub message: Option<String>,
146 pub percentage: Option<usize>,
147 #[serde(skip_serializing)]
148 pub last_update_at: Instant,
149}
150
151#[derive(Clone, Debug, Eq, PartialEq, Hash, PartialOrd, Ord)]
152pub struct ProjectPath {
153 pub worktree_id: WorktreeId,
154 pub path: Arc<Path>,
155}
156
157#[derive(Clone, Debug, Default, PartialEq, Serialize)]
158pub struct DiagnosticSummary {
159 pub error_count: usize,
160 pub warning_count: usize,
161 pub info_count: usize,
162 pub hint_count: usize,
163}
164
165#[derive(Debug)]
166pub struct Location {
167 pub buffer: ModelHandle<Buffer>,
168 pub range: Range<language::Anchor>,
169}
170
171#[derive(Debug)]
172pub struct DocumentHighlight {
173 pub range: Range<language::Anchor>,
174 pub kind: DocumentHighlightKind,
175}
176
177#[derive(Clone, Debug)]
178pub struct Symbol {
179 pub source_worktree_id: WorktreeId,
180 pub worktree_id: WorktreeId,
181 pub language_server_name: LanguageServerName,
182 pub path: PathBuf,
183 pub label: CodeLabel,
184 pub name: String,
185 pub kind: lsp::SymbolKind,
186 pub range: Range<PointUtf16>,
187 pub signature: [u8; 32],
188}
189
190#[derive(Default)]
191pub struct ProjectTransaction(pub HashMap<ModelHandle<Buffer>, language::Transaction>);
192
193impl DiagnosticSummary {
194 fn new<'a, T: 'a>(diagnostics: impl IntoIterator<Item = &'a DiagnosticEntry<T>>) -> Self {
195 let mut this = Self {
196 error_count: 0,
197 warning_count: 0,
198 info_count: 0,
199 hint_count: 0,
200 };
201
202 for entry in diagnostics {
203 if entry.diagnostic.is_primary {
204 match entry.diagnostic.severity {
205 DiagnosticSeverity::ERROR => this.error_count += 1,
206 DiagnosticSeverity::WARNING => this.warning_count += 1,
207 DiagnosticSeverity::INFORMATION => this.info_count += 1,
208 DiagnosticSeverity::HINT => this.hint_count += 1,
209 _ => {}
210 }
211 }
212 }
213
214 this
215 }
216
217 pub fn to_proto(&self, path: &Path) -> proto::DiagnosticSummary {
218 proto::DiagnosticSummary {
219 path: path.to_string_lossy().to_string(),
220 error_count: self.error_count as u32,
221 warning_count: self.warning_count as u32,
222 info_count: self.info_count as u32,
223 hint_count: self.hint_count as u32,
224 }
225 }
226}
227
228#[derive(Clone, Copy, Debug, Default, Hash, PartialEq, Eq, PartialOrd, Ord)]
229pub struct ProjectEntryId(usize);
230
231impl ProjectEntryId {
232 pub fn new(counter: &AtomicUsize) -> Self {
233 Self(counter.fetch_add(1, SeqCst))
234 }
235
236 pub fn from_proto(id: u64) -> Self {
237 Self(id as usize)
238 }
239
240 pub fn to_proto(&self) -> u64 {
241 self.0 as u64
242 }
243
244 pub fn to_usize(&self) -> usize {
245 self.0
246 }
247}
248
249impl Project {
250 pub fn init(client: &Arc<Client>) {
251 client.add_model_message_handler(Self::handle_add_collaborator);
252 client.add_model_message_handler(Self::handle_buffer_reloaded);
253 client.add_model_message_handler(Self::handle_buffer_saved);
254 client.add_model_message_handler(Self::handle_start_language_server);
255 client.add_model_message_handler(Self::handle_update_language_server);
256 client.add_model_message_handler(Self::handle_remove_collaborator);
257 client.add_model_message_handler(Self::handle_register_worktree);
258 client.add_model_message_handler(Self::handle_unregister_worktree);
259 client.add_model_message_handler(Self::handle_unshare_project);
260 client.add_model_message_handler(Self::handle_update_buffer_file);
261 client.add_model_message_handler(Self::handle_update_buffer);
262 client.add_model_message_handler(Self::handle_update_diagnostic_summary);
263 client.add_model_message_handler(Self::handle_update_worktree);
264 client.add_model_request_handler(Self::handle_apply_additional_edits_for_completion);
265 client.add_model_request_handler(Self::handle_apply_code_action);
266 client.add_model_request_handler(Self::handle_reload_buffers);
267 client.add_model_request_handler(Self::handle_format_buffers);
268 client.add_model_request_handler(Self::handle_get_code_actions);
269 client.add_model_request_handler(Self::handle_get_completions);
270 client.add_model_request_handler(Self::handle_lsp_command::<GetDefinition>);
271 client.add_model_request_handler(Self::handle_lsp_command::<GetDocumentHighlights>);
272 client.add_model_request_handler(Self::handle_lsp_command::<GetReferences>);
273 client.add_model_request_handler(Self::handle_lsp_command::<PrepareRename>);
274 client.add_model_request_handler(Self::handle_lsp_command::<PerformRename>);
275 client.add_model_request_handler(Self::handle_search_project);
276 client.add_model_request_handler(Self::handle_get_project_symbols);
277 client.add_model_request_handler(Self::handle_open_buffer_for_symbol);
278 client.add_model_request_handler(Self::handle_open_buffer_by_id);
279 client.add_model_request_handler(Self::handle_open_buffer_by_path);
280 client.add_model_request_handler(Self::handle_save_buffer);
281 }
282
283 pub fn local(
284 client: Arc<Client>,
285 user_store: ModelHandle<UserStore>,
286 languages: Arc<LanguageRegistry>,
287 fs: Arc<dyn Fs>,
288 cx: &mut MutableAppContext,
289 ) -> ModelHandle<Self> {
290 cx.add_model(|cx: &mut ModelContext<Self>| {
291 let (remote_id_tx, remote_id_rx) = watch::channel();
292 let _maintain_remote_id_task = cx.spawn_weak({
293 let rpc = client.clone();
294 move |this, mut cx| {
295 async move {
296 let mut status = rpc.status();
297 while let Some(status) = status.next().await {
298 if let Some(this) = this.upgrade(&cx) {
299 if status.is_connected() {
300 this.update(&mut cx, |this, cx| this.register(cx)).await?;
301 } else {
302 this.update(&mut cx, |this, cx| this.unregister(cx));
303 }
304 }
305 }
306 Ok(())
307 }
308 .log_err()
309 }
310 });
311
312 let (opened_buffer_tx, opened_buffer_rx) = watch::channel();
313 Self {
314 worktrees: Default::default(),
315 collaborators: Default::default(),
316 opened_buffers: Default::default(),
317 shared_buffers: Default::default(),
318 loading_buffers: Default::default(),
319 loading_local_worktrees: Default::default(),
320 buffer_snapshots: Default::default(),
321 client_state: ProjectClientState::Local {
322 is_shared: false,
323 remote_id_tx,
324 remote_id_rx,
325 _maintain_remote_id_task,
326 },
327 opened_buffer: (Rc::new(RefCell::new(opened_buffer_tx)), opened_buffer_rx),
328 subscriptions: Vec::new(),
329 active_entry: None,
330 languages,
331 client,
332 user_store,
333 fs,
334 next_entry_id: Default::default(),
335 language_servers: Default::default(),
336 started_language_servers: Default::default(),
337 language_server_statuses: Default::default(),
338 last_workspace_edits_by_language_server: Default::default(),
339 language_server_settings: Default::default(),
340 next_language_server_id: 0,
341 nonce: StdRng::from_entropy().gen(),
342 }
343 })
344 }
345
346 pub async fn remote(
347 remote_id: u64,
348 client: Arc<Client>,
349 user_store: ModelHandle<UserStore>,
350 languages: Arc<LanguageRegistry>,
351 fs: Arc<dyn Fs>,
352 cx: &mut AsyncAppContext,
353 ) -> Result<ModelHandle<Self>> {
354 client.authenticate_and_connect(true, &cx).await?;
355
356 let response = client
357 .request(proto::JoinProject {
358 project_id: remote_id,
359 })
360 .await?;
361
362 let replica_id = response.replica_id as ReplicaId;
363
364 let mut worktrees = Vec::new();
365 for worktree in response.worktrees {
366 let (worktree, load_task) = cx
367 .update(|cx| Worktree::remote(remote_id, replica_id, worktree, client.clone(), cx));
368 worktrees.push(worktree);
369 load_task.detach();
370 }
371
372 let (opened_buffer_tx, opened_buffer_rx) = watch::channel();
373 let this = cx.add_model(|cx: &mut ModelContext<Self>| {
374 let mut this = Self {
375 worktrees: Vec::new(),
376 loading_buffers: Default::default(),
377 opened_buffer: (Rc::new(RefCell::new(opened_buffer_tx)), opened_buffer_rx),
378 shared_buffers: Default::default(),
379 loading_local_worktrees: Default::default(),
380 active_entry: None,
381 collaborators: Default::default(),
382 languages,
383 user_store: user_store.clone(),
384 fs,
385 next_entry_id: Default::default(),
386 subscriptions: vec![client.add_model_for_remote_entity(remote_id, cx)],
387 client: client.clone(),
388 client_state: ProjectClientState::Remote {
389 sharing_has_stopped: false,
390 remote_id,
391 replica_id,
392 _detect_unshare_task: cx.spawn_weak(move |this, mut cx| {
393 async move {
394 let mut status = client.status();
395 let is_connected =
396 status.next().await.map_or(false, |s| s.is_connected());
397 // Even if we're initially connected, any future change of the status means we momentarily disconnected.
398 if !is_connected || status.next().await.is_some() {
399 if let Some(this) = this.upgrade(&cx) {
400 this.update(&mut cx, |this, cx| this.project_unshared(cx))
401 }
402 }
403 Ok(())
404 }
405 .log_err()
406 }),
407 },
408 language_servers: Default::default(),
409 started_language_servers: Default::default(),
410 language_server_settings: Default::default(),
411 language_server_statuses: response
412 .language_servers
413 .into_iter()
414 .map(|server| {
415 (
416 server.id as usize,
417 LanguageServerStatus {
418 name: server.name,
419 pending_work: Default::default(),
420 pending_diagnostic_updates: 0,
421 },
422 )
423 })
424 .collect(),
425 last_workspace_edits_by_language_server: Default::default(),
426 next_language_server_id: 0,
427 opened_buffers: Default::default(),
428 buffer_snapshots: Default::default(),
429 nonce: StdRng::from_entropy().gen(),
430 };
431 for worktree in worktrees {
432 this.add_worktree(&worktree, cx);
433 }
434 this
435 });
436
437 let user_ids = response
438 .collaborators
439 .iter()
440 .map(|peer| peer.user_id)
441 .collect();
442 user_store
443 .update(cx, |user_store, cx| user_store.load_users(user_ids, cx))
444 .await?;
445 let mut collaborators = HashMap::default();
446 for message in response.collaborators {
447 let collaborator = Collaborator::from_proto(message, &user_store, cx).await?;
448 collaborators.insert(collaborator.peer_id, collaborator);
449 }
450
451 this.update(cx, |this, _| {
452 this.collaborators = collaborators;
453 });
454
455 Ok(this)
456 }
457
458 #[cfg(any(test, feature = "test-support"))]
459 pub fn test(fs: Arc<dyn Fs>, cx: &mut gpui::TestAppContext) -> ModelHandle<Project> {
460 let languages = Arc::new(LanguageRegistry::test());
461 let http_client = client::test::FakeHttpClient::with_404_response();
462 let client = client::Client::new(http_client.clone());
463 let user_store = cx.add_model(|cx| UserStore::new(client.clone(), http_client, cx));
464 cx.update(|cx| Project::local(client, user_store, languages, fs, cx))
465 }
466
467 pub fn buffer_for_id(&self, remote_id: u64, cx: &AppContext) -> Option<ModelHandle<Buffer>> {
468 self.opened_buffers
469 .get(&remote_id)
470 .and_then(|buffer| buffer.upgrade(cx))
471 }
472
473 pub fn languages(&self) -> &Arc<LanguageRegistry> {
474 &self.languages
475 }
476
477 #[cfg(any(test, feature = "test-support"))]
478 pub fn check_invariants(&self, cx: &AppContext) {
479 if self.is_local() {
480 let mut worktree_root_paths = HashMap::default();
481 for worktree in self.worktrees(cx) {
482 let worktree = worktree.read(cx);
483 let abs_path = worktree.as_local().unwrap().abs_path().clone();
484 let prev_worktree_id = worktree_root_paths.insert(abs_path.clone(), worktree.id());
485 assert_eq!(
486 prev_worktree_id,
487 None,
488 "abs path {:?} for worktree {:?} is not unique ({:?} was already registered with the same path)",
489 abs_path,
490 worktree.id(),
491 prev_worktree_id
492 )
493 }
494 } else {
495 let replica_id = self.replica_id();
496 for buffer in self.opened_buffers.values() {
497 if let Some(buffer) = buffer.upgrade(cx) {
498 let buffer = buffer.read(cx);
499 assert_eq!(
500 buffer.deferred_ops_len(),
501 0,
502 "replica {}, buffer {} has deferred operations",
503 replica_id,
504 buffer.remote_id()
505 );
506 }
507 }
508 }
509 }
510
511 #[cfg(any(test, feature = "test-support"))]
512 pub fn has_open_buffer(&self, path: impl Into<ProjectPath>, cx: &AppContext) -> bool {
513 let path = path.into();
514 if let Some(worktree) = self.worktree_for_id(path.worktree_id, cx) {
515 self.opened_buffers.iter().any(|(_, buffer)| {
516 if let Some(buffer) = buffer.upgrade(cx) {
517 if let Some(file) = File::from_dyn(buffer.read(cx).file()) {
518 if file.worktree == worktree && file.path() == &path.path {
519 return true;
520 }
521 }
522 }
523 false
524 })
525 } else {
526 false
527 }
528 }
529
530 pub fn fs(&self) -> &Arc<dyn Fs> {
531 &self.fs
532 }
533
534 fn unregister(&mut self, cx: &mut ModelContext<Self>) {
535 self.unshare(cx);
536 for worktree in &self.worktrees {
537 if let Some(worktree) = worktree.upgrade(cx) {
538 worktree.update(cx, |worktree, _| {
539 worktree.as_local_mut().unwrap().unregister();
540 });
541 }
542 }
543
544 if let ProjectClientState::Local { remote_id_tx, .. } = &mut self.client_state {
545 *remote_id_tx.borrow_mut() = None;
546 }
547
548 self.subscriptions.clear();
549 }
550
551 fn register(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
552 self.unregister(cx);
553
554 let response = self.client.request(proto::RegisterProject {});
555 cx.spawn(|this, mut cx| async move {
556 let remote_id = response.await?.project_id;
557
558 let mut registrations = Vec::new();
559 this.update(&mut cx, |this, cx| {
560 if let ProjectClientState::Local { remote_id_tx, .. } = &mut this.client_state {
561 *remote_id_tx.borrow_mut() = Some(remote_id);
562 }
563
564 cx.emit(Event::RemoteIdChanged(Some(remote_id)));
565
566 this.subscriptions
567 .push(this.client.add_model_for_remote_entity(remote_id, cx));
568
569 for worktree in &this.worktrees {
570 if let Some(worktree) = worktree.upgrade(cx) {
571 registrations.push(worktree.update(cx, |worktree, cx| {
572 let worktree = worktree.as_local_mut().unwrap();
573 worktree.register(remote_id, cx)
574 }));
575 }
576 }
577 });
578
579 futures::future::try_join_all(registrations).await?;
580 Ok(())
581 })
582 }
583
584 pub fn remote_id(&self) -> Option<u64> {
585 match &self.client_state {
586 ProjectClientState::Local { remote_id_rx, .. } => *remote_id_rx.borrow(),
587 ProjectClientState::Remote { remote_id, .. } => Some(*remote_id),
588 }
589 }
590
591 pub fn next_remote_id(&self) -> impl Future<Output = u64> {
592 let mut id = None;
593 let mut watch = None;
594 match &self.client_state {
595 ProjectClientState::Local { remote_id_rx, .. } => watch = Some(remote_id_rx.clone()),
596 ProjectClientState::Remote { remote_id, .. } => id = Some(*remote_id),
597 }
598
599 async move {
600 if let Some(id) = id {
601 return id;
602 }
603 let mut watch = watch.unwrap();
604 loop {
605 let id = *watch.borrow();
606 if let Some(id) = id {
607 return id;
608 }
609 watch.next().await;
610 }
611 }
612 }
613
614 pub fn replica_id(&self) -> ReplicaId {
615 match &self.client_state {
616 ProjectClientState::Local { .. } => 0,
617 ProjectClientState::Remote { replica_id, .. } => *replica_id,
618 }
619 }
620
621 pub fn collaborators(&self) -> &HashMap<PeerId, Collaborator> {
622 &self.collaborators
623 }
624
625 pub fn worktrees<'a>(
626 &'a self,
627 cx: &'a AppContext,
628 ) -> impl 'a + Iterator<Item = ModelHandle<Worktree>> {
629 self.worktrees
630 .iter()
631 .filter_map(move |worktree| worktree.upgrade(cx))
632 }
633
634 pub fn visible_worktrees<'a>(
635 &'a self,
636 cx: &'a AppContext,
637 ) -> impl 'a + Iterator<Item = ModelHandle<Worktree>> {
638 self.worktrees.iter().filter_map(|worktree| {
639 worktree.upgrade(cx).and_then(|worktree| {
640 if worktree.read(cx).is_visible() {
641 Some(worktree)
642 } else {
643 None
644 }
645 })
646 })
647 }
648
649 pub fn worktree_for_id(
650 &self,
651 id: WorktreeId,
652 cx: &AppContext,
653 ) -> Option<ModelHandle<Worktree>> {
654 self.worktrees(cx)
655 .find(|worktree| worktree.read(cx).id() == id)
656 }
657
658 pub fn worktree_for_entry(
659 &self,
660 entry_id: ProjectEntryId,
661 cx: &AppContext,
662 ) -> Option<ModelHandle<Worktree>> {
663 self.worktrees(cx)
664 .find(|worktree| worktree.read(cx).contains_entry(entry_id))
665 }
666
667 pub fn worktree_id_for_entry(
668 &self,
669 entry_id: ProjectEntryId,
670 cx: &AppContext,
671 ) -> Option<WorktreeId> {
672 self.worktree_for_entry(entry_id, cx)
673 .map(|worktree| worktree.read(cx).id())
674 }
675
676 pub fn share(&self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
677 let rpc = self.client.clone();
678 cx.spawn(|this, mut cx| async move {
679 let project_id = this.update(&mut cx, |this, cx| {
680 if let ProjectClientState::Local {
681 is_shared,
682 remote_id_rx,
683 ..
684 } = &mut this.client_state
685 {
686 *is_shared = true;
687
688 for open_buffer in this.opened_buffers.values_mut() {
689 match open_buffer {
690 OpenBuffer::Strong(_) => {}
691 OpenBuffer::Weak(buffer) => {
692 if let Some(buffer) = buffer.upgrade(cx) {
693 *open_buffer = OpenBuffer::Strong(buffer);
694 }
695 }
696 OpenBuffer::Loading(_) => unreachable!(),
697 }
698 }
699
700 for worktree_handle in this.worktrees.iter_mut() {
701 match worktree_handle {
702 WorktreeHandle::Strong(_) => {}
703 WorktreeHandle::Weak(worktree) => {
704 if let Some(worktree) = worktree.upgrade(cx) {
705 *worktree_handle = WorktreeHandle::Strong(worktree);
706 }
707 }
708 }
709 }
710
711 remote_id_rx
712 .borrow()
713 .ok_or_else(|| anyhow!("no project id"))
714 } else {
715 Err(anyhow!("can't share a remote project"))
716 }
717 })?;
718
719 rpc.request(proto::ShareProject { project_id }).await?;
720
721 let mut tasks = Vec::new();
722 this.update(&mut cx, |this, cx| {
723 for worktree in this.worktrees(cx).collect::<Vec<_>>() {
724 worktree.update(cx, |worktree, cx| {
725 let worktree = worktree.as_local_mut().unwrap();
726 tasks.push(worktree.share(project_id, cx));
727 });
728 }
729 });
730 for task in tasks {
731 task.await?;
732 }
733 this.update(&mut cx, |_, cx| cx.notify());
734 Ok(())
735 })
736 }
737
738 pub fn unshare(&mut self, cx: &mut ModelContext<Self>) {
739 let rpc = self.client.clone();
740
741 if let ProjectClientState::Local {
742 is_shared,
743 remote_id_rx,
744 ..
745 } = &mut self.client_state
746 {
747 if !*is_shared {
748 return;
749 }
750
751 *is_shared = false;
752 self.collaborators.clear();
753 self.shared_buffers.clear();
754 for worktree_handle in self.worktrees.iter_mut() {
755 if let WorktreeHandle::Strong(worktree) = worktree_handle {
756 let is_visible = worktree.update(cx, |worktree, _| {
757 worktree.as_local_mut().unwrap().unshare();
758 worktree.is_visible()
759 });
760 if !is_visible {
761 *worktree_handle = WorktreeHandle::Weak(worktree.downgrade());
762 }
763 }
764 }
765
766 for open_buffer in self.opened_buffers.values_mut() {
767 match open_buffer {
768 OpenBuffer::Strong(buffer) => {
769 *open_buffer = OpenBuffer::Weak(buffer.downgrade());
770 }
771 _ => {}
772 }
773 }
774
775 if let Some(project_id) = *remote_id_rx.borrow() {
776 rpc.send(proto::UnshareProject { project_id }).log_err();
777 }
778
779 cx.notify();
780 } else {
781 log::error!("attempted to unshare a remote project");
782 }
783 }
784
785 fn project_unshared(&mut self, cx: &mut ModelContext<Self>) {
786 if let ProjectClientState::Remote {
787 sharing_has_stopped,
788 ..
789 } = &mut self.client_state
790 {
791 *sharing_has_stopped = true;
792 self.collaborators.clear();
793 cx.notify();
794 }
795 }
796
797 pub fn is_read_only(&self) -> bool {
798 match &self.client_state {
799 ProjectClientState::Local { .. } => false,
800 ProjectClientState::Remote {
801 sharing_has_stopped,
802 ..
803 } => *sharing_has_stopped,
804 }
805 }
806
807 pub fn is_local(&self) -> bool {
808 match &self.client_state {
809 ProjectClientState::Local { .. } => true,
810 ProjectClientState::Remote { .. } => false,
811 }
812 }
813
814 pub fn is_remote(&self) -> bool {
815 !self.is_local()
816 }
817
818 pub fn create_buffer(
819 &mut self,
820 text: &str,
821 language: Option<Arc<Language>>,
822 cx: &mut ModelContext<Self>,
823 ) -> Result<ModelHandle<Buffer>> {
824 if self.is_remote() {
825 return Err(anyhow!("creating buffers as a guest is not supported yet"));
826 }
827
828 let buffer = cx.add_model(|cx| {
829 Buffer::new(self.replica_id(), text, cx)
830 .with_language(language.unwrap_or(language::PLAIN_TEXT.clone()), cx)
831 });
832 self.register_buffer(&buffer, cx)?;
833 Ok(buffer)
834 }
835
836 pub fn open_path(
837 &mut self,
838 path: impl Into<ProjectPath>,
839 cx: &mut ModelContext<Self>,
840 ) -> Task<Result<(ProjectEntryId, AnyModelHandle)>> {
841 let task = self.open_buffer(path, cx);
842 cx.spawn_weak(|_, cx| async move {
843 let buffer = task.await?;
844 let project_entry_id = buffer
845 .read_with(&cx, |buffer, cx| {
846 File::from_dyn(buffer.file()).and_then(|file| file.project_entry_id(cx))
847 })
848 .ok_or_else(|| anyhow!("no project entry"))?;
849 Ok((project_entry_id, buffer.into()))
850 })
851 }
852
853 pub fn open_buffer(
854 &mut self,
855 path: impl Into<ProjectPath>,
856 cx: &mut ModelContext<Self>,
857 ) -> Task<Result<ModelHandle<Buffer>>> {
858 let project_path = path.into();
859 let worktree = if let Some(worktree) = self.worktree_for_id(project_path.worktree_id, cx) {
860 worktree
861 } else {
862 return Task::ready(Err(anyhow!("no such worktree")));
863 };
864
865 // If there is already a buffer for the given path, then return it.
866 let existing_buffer = self.get_open_buffer(&project_path, cx);
867 if let Some(existing_buffer) = existing_buffer {
868 return Task::ready(Ok(existing_buffer));
869 }
870
871 let mut loading_watch = match self.loading_buffers.entry(project_path.clone()) {
872 // If the given path is already being loaded, then wait for that existing
873 // task to complete and return the same buffer.
874 hash_map::Entry::Occupied(e) => e.get().clone(),
875
876 // Otherwise, record the fact that this path is now being loaded.
877 hash_map::Entry::Vacant(entry) => {
878 let (mut tx, rx) = postage::watch::channel();
879 entry.insert(rx.clone());
880
881 let load_buffer = if worktree.read(cx).is_local() {
882 self.open_local_buffer(&project_path.path, &worktree, cx)
883 } else {
884 self.open_remote_buffer(&project_path.path, &worktree, cx)
885 };
886
887 cx.spawn(move |this, mut cx| async move {
888 let load_result = load_buffer.await;
889 *tx.borrow_mut() = Some(this.update(&mut cx, |this, _| {
890 // Record the fact that the buffer is no longer loading.
891 this.loading_buffers.remove(&project_path);
892 let buffer = load_result.map_err(Arc::new)?;
893 Ok(buffer)
894 }));
895 })
896 .detach();
897 rx
898 }
899 };
900
901 cx.foreground().spawn(async move {
902 loop {
903 if let Some(result) = loading_watch.borrow().as_ref() {
904 match result {
905 Ok(buffer) => return Ok(buffer.clone()),
906 Err(error) => return Err(anyhow!("{}", error)),
907 }
908 }
909 loading_watch.next().await;
910 }
911 })
912 }
913
914 fn open_local_buffer(
915 &mut self,
916 path: &Arc<Path>,
917 worktree: &ModelHandle<Worktree>,
918 cx: &mut ModelContext<Self>,
919 ) -> Task<Result<ModelHandle<Buffer>>> {
920 let load_buffer = worktree.update(cx, |worktree, cx| {
921 let worktree = worktree.as_local_mut().unwrap();
922 worktree.load_buffer(path, cx)
923 });
924 cx.spawn(|this, mut cx| async move {
925 let buffer = load_buffer.await?;
926 this.update(&mut cx, |this, cx| this.register_buffer(&buffer, cx))?;
927 Ok(buffer)
928 })
929 }
930
931 fn open_remote_buffer(
932 &mut self,
933 path: &Arc<Path>,
934 worktree: &ModelHandle<Worktree>,
935 cx: &mut ModelContext<Self>,
936 ) -> Task<Result<ModelHandle<Buffer>>> {
937 let rpc = self.client.clone();
938 let project_id = self.remote_id().unwrap();
939 let remote_worktree_id = worktree.read(cx).id();
940 let path = path.clone();
941 let path_string = path.to_string_lossy().to_string();
942 cx.spawn(|this, mut cx| async move {
943 let response = rpc
944 .request(proto::OpenBufferByPath {
945 project_id,
946 worktree_id: remote_worktree_id.to_proto(),
947 path: path_string,
948 })
949 .await?;
950 let buffer = response.buffer.ok_or_else(|| anyhow!("missing buffer"))?;
951 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
952 .await
953 })
954 }
955
956 fn open_local_buffer_via_lsp(
957 &mut self,
958 abs_path: lsp::Url,
959 lsp_adapter: Arc<dyn LspAdapter>,
960 lsp_server: Arc<LanguageServer>,
961 cx: &mut ModelContext<Self>,
962 ) -> Task<Result<ModelHandle<Buffer>>> {
963 cx.spawn(|this, mut cx| async move {
964 let abs_path = abs_path
965 .to_file_path()
966 .map_err(|_| anyhow!("can't convert URI to path"))?;
967 let (worktree, relative_path) = if let Some(result) =
968 this.read_with(&cx, |this, cx| this.find_local_worktree(&abs_path, cx))
969 {
970 result
971 } else {
972 let worktree = this
973 .update(&mut cx, |this, cx| {
974 this.create_local_worktree(&abs_path, false, cx)
975 })
976 .await?;
977 this.update(&mut cx, |this, cx| {
978 this.language_servers.insert(
979 (worktree.read(cx).id(), lsp_adapter.name()),
980 (lsp_adapter, lsp_server),
981 );
982 });
983 (worktree, PathBuf::new())
984 };
985
986 let project_path = ProjectPath {
987 worktree_id: worktree.read_with(&cx, |worktree, _| worktree.id()),
988 path: relative_path.into(),
989 };
990 this.update(&mut cx, |this, cx| this.open_buffer(project_path, cx))
991 .await
992 })
993 }
994
995 pub fn open_buffer_by_id(
996 &mut self,
997 id: u64,
998 cx: &mut ModelContext<Self>,
999 ) -> Task<Result<ModelHandle<Buffer>>> {
1000 if let Some(buffer) = self.buffer_for_id(id, cx) {
1001 Task::ready(Ok(buffer))
1002 } else if self.is_local() {
1003 Task::ready(Err(anyhow!("buffer {} does not exist", id)))
1004 } else if let Some(project_id) = self.remote_id() {
1005 let request = self
1006 .client
1007 .request(proto::OpenBufferById { project_id, id });
1008 cx.spawn(|this, mut cx| async move {
1009 let buffer = request
1010 .await?
1011 .buffer
1012 .ok_or_else(|| anyhow!("invalid buffer"))?;
1013 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
1014 .await
1015 })
1016 } else {
1017 Task::ready(Err(anyhow!("cannot open buffer while disconnected")))
1018 }
1019 }
1020
1021 pub fn save_buffer_as(
1022 &mut self,
1023 buffer: ModelHandle<Buffer>,
1024 abs_path: PathBuf,
1025 cx: &mut ModelContext<Project>,
1026 ) -> Task<Result<()>> {
1027 let worktree_task = self.find_or_create_local_worktree(&abs_path, true, cx);
1028 let old_path =
1029 File::from_dyn(buffer.read(cx).file()).and_then(|f| Some(f.as_local()?.abs_path(cx)));
1030 cx.spawn(|this, mut cx| async move {
1031 if let Some(old_path) = old_path {
1032 this.update(&mut cx, |this, cx| {
1033 this.unregister_buffer_from_language_server(&buffer, old_path, cx);
1034 });
1035 }
1036 let (worktree, path) = worktree_task.await?;
1037 worktree
1038 .update(&mut cx, |worktree, cx| {
1039 worktree
1040 .as_local_mut()
1041 .unwrap()
1042 .save_buffer_as(buffer.clone(), path, cx)
1043 })
1044 .await?;
1045 this.update(&mut cx, |this, cx| {
1046 this.assign_language_to_buffer(&buffer, cx);
1047 this.register_buffer_with_language_server(&buffer, cx);
1048 });
1049 Ok(())
1050 })
1051 }
1052
1053 pub fn get_open_buffer(
1054 &mut self,
1055 path: &ProjectPath,
1056 cx: &mut ModelContext<Self>,
1057 ) -> Option<ModelHandle<Buffer>> {
1058 let worktree = self.worktree_for_id(path.worktree_id, cx)?;
1059 self.opened_buffers.values().find_map(|buffer| {
1060 let buffer = buffer.upgrade(cx)?;
1061 let file = File::from_dyn(buffer.read(cx).file())?;
1062 if file.worktree == worktree && file.path() == &path.path {
1063 Some(buffer)
1064 } else {
1065 None
1066 }
1067 })
1068 }
1069
1070 fn register_buffer(
1071 &mut self,
1072 buffer: &ModelHandle<Buffer>,
1073 cx: &mut ModelContext<Self>,
1074 ) -> Result<()> {
1075 let remote_id = buffer.read(cx).remote_id();
1076 let open_buffer = if self.is_remote() || self.is_shared() {
1077 OpenBuffer::Strong(buffer.clone())
1078 } else {
1079 OpenBuffer::Weak(buffer.downgrade())
1080 };
1081
1082 match self.opened_buffers.insert(remote_id, open_buffer) {
1083 None => {}
1084 Some(OpenBuffer::Loading(operations)) => {
1085 buffer.update(cx, |buffer, cx| buffer.apply_ops(operations, cx))?
1086 }
1087 Some(OpenBuffer::Weak(existing_handle)) => {
1088 if existing_handle.upgrade(cx).is_some() {
1089 Err(anyhow!(
1090 "already registered buffer with remote id {}",
1091 remote_id
1092 ))?
1093 }
1094 }
1095 Some(OpenBuffer::Strong(_)) => Err(anyhow!(
1096 "already registered buffer with remote id {}",
1097 remote_id
1098 ))?,
1099 }
1100 cx.subscribe(buffer, |this, buffer, event, cx| {
1101 this.on_buffer_event(buffer, event, cx);
1102 })
1103 .detach();
1104
1105 self.assign_language_to_buffer(buffer, cx);
1106 self.register_buffer_with_language_server(buffer, cx);
1107 cx.observe_release(buffer, |this, buffer, cx| {
1108 if let Some(file) = File::from_dyn(buffer.file()) {
1109 if file.is_local() {
1110 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
1111 if let Some((_, server)) = this.language_server_for_buffer(buffer, cx) {
1112 server
1113 .notify::<lsp::notification::DidCloseTextDocument>(
1114 lsp::DidCloseTextDocumentParams {
1115 text_document: lsp::TextDocumentIdentifier::new(uri.clone()),
1116 },
1117 )
1118 .log_err();
1119 }
1120 }
1121 }
1122 })
1123 .detach();
1124
1125 Ok(())
1126 }
1127
1128 fn register_buffer_with_language_server(
1129 &mut self,
1130 buffer_handle: &ModelHandle<Buffer>,
1131 cx: &mut ModelContext<Self>,
1132 ) {
1133 let buffer = buffer_handle.read(cx);
1134 let buffer_id = buffer.remote_id();
1135 if let Some(file) = File::from_dyn(buffer.file()) {
1136 if file.is_local() {
1137 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
1138 let initial_snapshot = buffer.text_snapshot();
1139 let language_server = self.language_server_for_buffer(buffer, cx).cloned();
1140
1141 if let Some(local_worktree) = file.worktree.read(cx).as_local() {
1142 if let Some(diagnostics) = local_worktree.diagnostics_for_path(file.path()) {
1143 self.update_buffer_diagnostics(&buffer_handle, diagnostics, None, cx)
1144 .log_err();
1145 }
1146 }
1147
1148 if let Some((_, server)) = language_server {
1149 server
1150 .notify::<lsp::notification::DidOpenTextDocument>(
1151 lsp::DidOpenTextDocumentParams {
1152 text_document: lsp::TextDocumentItem::new(
1153 uri,
1154 Default::default(),
1155 0,
1156 initial_snapshot.text(),
1157 ),
1158 }
1159 .clone(),
1160 )
1161 .log_err();
1162 buffer_handle.update(cx, |buffer, cx| {
1163 buffer.set_completion_triggers(
1164 server
1165 .capabilities()
1166 .completion_provider
1167 .as_ref()
1168 .and_then(|provider| provider.trigger_characters.clone())
1169 .unwrap_or(Vec::new()),
1170 cx,
1171 )
1172 });
1173 self.buffer_snapshots
1174 .insert(buffer_id, vec![(0, initial_snapshot)]);
1175 }
1176 }
1177 }
1178 }
1179
1180 fn unregister_buffer_from_language_server(
1181 &mut self,
1182 buffer: &ModelHandle<Buffer>,
1183 old_path: PathBuf,
1184 cx: &mut ModelContext<Self>,
1185 ) {
1186 buffer.update(cx, |buffer, cx| {
1187 buffer.update_diagnostics(Default::default(), cx);
1188 self.buffer_snapshots.remove(&buffer.remote_id());
1189 if let Some((_, language_server)) = self.language_server_for_buffer(buffer, cx) {
1190 language_server
1191 .notify::<lsp::notification::DidCloseTextDocument>(
1192 lsp::DidCloseTextDocumentParams {
1193 text_document: lsp::TextDocumentIdentifier::new(
1194 lsp::Url::from_file_path(old_path).unwrap(),
1195 ),
1196 },
1197 )
1198 .log_err();
1199 }
1200 });
1201 }
1202
1203 fn on_buffer_event(
1204 &mut self,
1205 buffer: ModelHandle<Buffer>,
1206 event: &BufferEvent,
1207 cx: &mut ModelContext<Self>,
1208 ) -> Option<()> {
1209 match event {
1210 BufferEvent::Operation(operation) => {
1211 let project_id = self.remote_id()?;
1212 let request = self.client.request(proto::UpdateBuffer {
1213 project_id,
1214 buffer_id: buffer.read(cx).remote_id(),
1215 operations: vec![language::proto::serialize_operation(&operation)],
1216 });
1217 cx.background().spawn(request).detach_and_log_err(cx);
1218 }
1219 BufferEvent::Edited { .. } => {
1220 let (_, language_server) = self
1221 .language_server_for_buffer(buffer.read(cx), cx)?
1222 .clone();
1223 let buffer = buffer.read(cx);
1224 let file = File::from_dyn(buffer.file())?;
1225 let abs_path = file.as_local()?.abs_path(cx);
1226 let uri = lsp::Url::from_file_path(abs_path).unwrap();
1227 let buffer_snapshots = self.buffer_snapshots.get_mut(&buffer.remote_id())?;
1228 let (version, prev_snapshot) = buffer_snapshots.last()?;
1229 let next_snapshot = buffer.text_snapshot();
1230 let next_version = version + 1;
1231
1232 let content_changes = buffer
1233 .edits_since::<(PointUtf16, usize)>(prev_snapshot.version())
1234 .map(|edit| {
1235 let edit_start = edit.new.start.0;
1236 let edit_end = edit_start + (edit.old.end.0 - edit.old.start.0);
1237 let new_text = next_snapshot
1238 .text_for_range(edit.new.start.1..edit.new.end.1)
1239 .collect();
1240 lsp::TextDocumentContentChangeEvent {
1241 range: Some(lsp::Range::new(
1242 point_to_lsp(edit_start),
1243 point_to_lsp(edit_end),
1244 )),
1245 range_length: None,
1246 text: new_text,
1247 }
1248 })
1249 .collect();
1250
1251 buffer_snapshots.push((next_version, next_snapshot));
1252
1253 language_server
1254 .notify::<lsp::notification::DidChangeTextDocument>(
1255 lsp::DidChangeTextDocumentParams {
1256 text_document: lsp::VersionedTextDocumentIdentifier::new(
1257 uri,
1258 next_version,
1259 ),
1260 content_changes,
1261 },
1262 )
1263 .log_err();
1264 }
1265 BufferEvent::Saved => {
1266 let file = File::from_dyn(buffer.read(cx).file())?;
1267 let worktree_id = file.worktree_id(cx);
1268 let abs_path = file.as_local()?.abs_path(cx);
1269 let text_document = lsp::TextDocumentIdentifier {
1270 uri: lsp::Url::from_file_path(abs_path).unwrap(),
1271 };
1272
1273 for (_, server) in self.language_servers_for_worktree(worktree_id) {
1274 server
1275 .notify::<lsp::notification::DidSaveTextDocument>(
1276 lsp::DidSaveTextDocumentParams {
1277 text_document: text_document.clone(),
1278 text: None,
1279 },
1280 )
1281 .log_err();
1282 }
1283 }
1284 _ => {}
1285 }
1286
1287 None
1288 }
1289
1290 fn language_servers_for_worktree(
1291 &self,
1292 worktree_id: WorktreeId,
1293 ) -> impl Iterator<Item = &(Arc<dyn LspAdapter>, Arc<LanguageServer>)> {
1294 self.language_servers.iter().filter_map(
1295 move |((language_server_worktree_id, _), server)| {
1296 if *language_server_worktree_id == worktree_id {
1297 Some(server)
1298 } else {
1299 None
1300 }
1301 },
1302 )
1303 }
1304
1305 fn assign_language_to_buffer(
1306 &mut self,
1307 buffer: &ModelHandle<Buffer>,
1308 cx: &mut ModelContext<Self>,
1309 ) -> Option<()> {
1310 // If the buffer has a language, set it and start the language server if we haven't already.
1311 let full_path = buffer.read(cx).file()?.full_path(cx);
1312 let language = self.languages.select_language(&full_path)?;
1313 buffer.update(cx, |buffer, cx| {
1314 buffer.set_language(Some(language.clone()), cx);
1315 });
1316
1317 let file = File::from_dyn(buffer.read(cx).file())?;
1318 let worktree = file.worktree.read(cx).as_local()?;
1319 let worktree_id = worktree.id();
1320 let worktree_abs_path = worktree.abs_path().clone();
1321 self.start_language_server(worktree_id, worktree_abs_path, language, cx);
1322
1323 None
1324 }
1325
1326 fn start_language_server(
1327 &mut self,
1328 worktree_id: WorktreeId,
1329 worktree_path: Arc<Path>,
1330 language: Arc<Language>,
1331 cx: &mut ModelContext<Self>,
1332 ) {
1333 let adapter = if let Some(adapter) = language.lsp_adapter() {
1334 adapter
1335 } else {
1336 return;
1337 };
1338 let key = (worktree_id, adapter.name());
1339 self.started_language_servers
1340 .entry(key.clone())
1341 .or_insert_with(|| {
1342 let server_id = post_inc(&mut self.next_language_server_id);
1343 let language_server = self.languages.start_language_server(
1344 server_id,
1345 language.clone(),
1346 worktree_path,
1347 self.client.http_client(),
1348 cx,
1349 );
1350 cx.spawn_weak(|this, mut cx| async move {
1351 let language_server = language_server?.await.log_err()?;
1352 let language_server = language_server
1353 .initialize(adapter.initialization_options())
1354 .await
1355 .log_err()?;
1356 let this = this.upgrade(&cx)?;
1357 let disk_based_diagnostics_progress_token =
1358 adapter.disk_based_diagnostics_progress_token();
1359
1360 language_server
1361 .on_notification::<lsp::notification::PublishDiagnostics, _>({
1362 let this = this.downgrade();
1363 let adapter = adapter.clone();
1364 move |params, mut cx| {
1365 if let Some(this) = this.upgrade(&cx) {
1366 this.update(&mut cx, |this, cx| {
1367 this.on_lsp_diagnostics_published(
1368 server_id,
1369 params,
1370 &adapter,
1371 disk_based_diagnostics_progress_token,
1372 cx,
1373 );
1374 });
1375 }
1376 }
1377 })
1378 .detach();
1379
1380 language_server
1381 .on_request::<lsp::request::WorkspaceConfiguration, _, _>({
1382 let settings = this
1383 .read_with(&cx, |this, _| this.language_server_settings.clone());
1384 move |params, _| {
1385 let settings = settings.lock().clone();
1386 async move {
1387 Ok(params
1388 .items
1389 .into_iter()
1390 .map(|item| {
1391 if let Some(section) = &item.section {
1392 settings
1393 .get(section)
1394 .cloned()
1395 .unwrap_or(serde_json::Value::Null)
1396 } else {
1397 settings.clone()
1398 }
1399 })
1400 .collect())
1401 }
1402 }
1403 })
1404 .detach();
1405
1406 language_server
1407 .on_request::<lsp::request::ApplyWorkspaceEdit, _, _>({
1408 let this = this.downgrade();
1409 let adapter = adapter.clone();
1410 let language_server = language_server.clone();
1411 move |params, cx| {
1412 Self::on_lsp_workspace_edit(
1413 this,
1414 params,
1415 server_id,
1416 adapter.clone(),
1417 language_server.clone(),
1418 cx,
1419 )
1420 }
1421 })
1422 .detach();
1423
1424 language_server
1425 .on_notification::<lsp::notification::Progress, _>({
1426 let this = this.downgrade();
1427 move |params, mut cx| {
1428 if let Some(this) = this.upgrade(&cx) {
1429 this.update(&mut cx, |this, cx| {
1430 this.on_lsp_progress(
1431 params,
1432 server_id,
1433 disk_based_diagnostics_progress_token,
1434 cx,
1435 );
1436 });
1437 }
1438 }
1439 })
1440 .detach();
1441
1442 this.update(&mut cx, |this, cx| {
1443 this.language_servers
1444 .insert(key.clone(), (adapter, language_server.clone()));
1445 this.language_server_statuses.insert(
1446 server_id,
1447 LanguageServerStatus {
1448 name: language_server.name().to_string(),
1449 pending_work: Default::default(),
1450 pending_diagnostic_updates: 0,
1451 },
1452 );
1453 language_server
1454 .notify::<lsp::notification::DidChangeConfiguration>(
1455 lsp::DidChangeConfigurationParams {
1456 settings: this.language_server_settings.lock().clone(),
1457 },
1458 )
1459 .ok();
1460
1461 if let Some(project_id) = this.remote_id() {
1462 this.client
1463 .send(proto::StartLanguageServer {
1464 project_id,
1465 server: Some(proto::LanguageServer {
1466 id: server_id as u64,
1467 name: language_server.name().to_string(),
1468 }),
1469 })
1470 .log_err();
1471 }
1472
1473 // Tell the language server about every open buffer in the worktree that matches the language.
1474 for buffer in this.opened_buffers.values() {
1475 if let Some(buffer_handle) = buffer.upgrade(cx) {
1476 let buffer = buffer_handle.read(cx);
1477 let file = if let Some(file) = File::from_dyn(buffer.file()) {
1478 file
1479 } else {
1480 continue;
1481 };
1482 let language = if let Some(language) = buffer.language() {
1483 language
1484 } else {
1485 continue;
1486 };
1487 if file.worktree.read(cx).id() != key.0
1488 || language.lsp_adapter().map(|a| a.name())
1489 != Some(key.1.clone())
1490 {
1491 continue;
1492 }
1493
1494 let file = file.as_local()?;
1495 let versions = this
1496 .buffer_snapshots
1497 .entry(buffer.remote_id())
1498 .or_insert_with(|| vec![(0, buffer.text_snapshot())]);
1499 let (version, initial_snapshot) = versions.last().unwrap();
1500 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
1501 language_server
1502 .notify::<lsp::notification::DidOpenTextDocument>(
1503 lsp::DidOpenTextDocumentParams {
1504 text_document: lsp::TextDocumentItem::new(
1505 uri,
1506 Default::default(),
1507 *version,
1508 initial_snapshot.text(),
1509 ),
1510 },
1511 )
1512 .log_err()?;
1513 buffer_handle.update(cx, |buffer, cx| {
1514 buffer.set_completion_triggers(
1515 language_server
1516 .capabilities()
1517 .completion_provider
1518 .as_ref()
1519 .and_then(|provider| {
1520 provider.trigger_characters.clone()
1521 })
1522 .unwrap_or(Vec::new()),
1523 cx,
1524 )
1525 });
1526 }
1527 }
1528
1529 cx.notify();
1530 Some(())
1531 });
1532
1533 Some(language_server)
1534 })
1535 });
1536 }
1537
1538 pub fn restart_language_servers_for_buffers(
1539 &mut self,
1540 buffers: impl IntoIterator<Item = ModelHandle<Buffer>>,
1541 cx: &mut ModelContext<Self>,
1542 ) -> Option<()> {
1543 let language_server_lookup_info: HashSet<(WorktreeId, Arc<Path>, PathBuf)> = buffers
1544 .into_iter()
1545 .filter_map(|buffer| {
1546 let file = File::from_dyn(buffer.read(cx).file())?;
1547 let worktree = file.worktree.read(cx).as_local()?;
1548 let worktree_id = worktree.id();
1549 let worktree_abs_path = worktree.abs_path().clone();
1550 let full_path = file.full_path(cx);
1551 Some((worktree_id, worktree_abs_path, full_path))
1552 })
1553 .collect();
1554 for (worktree_id, worktree_abs_path, full_path) in language_server_lookup_info {
1555 let language = self.languages.select_language(&full_path)?;
1556 self.restart_language_server(worktree_id, worktree_abs_path, language, cx);
1557 }
1558
1559 None
1560 }
1561
1562 fn restart_language_server(
1563 &mut self,
1564 worktree_id: WorktreeId,
1565 worktree_path: Arc<Path>,
1566 language: Arc<Language>,
1567 cx: &mut ModelContext<Self>,
1568 ) {
1569 let adapter = if let Some(adapter) = language.lsp_adapter() {
1570 adapter
1571 } else {
1572 return;
1573 };
1574 let key = (worktree_id, adapter.name());
1575 let server_to_shutdown = self.language_servers.remove(&key);
1576 self.started_language_servers.remove(&key);
1577 server_to_shutdown
1578 .as_ref()
1579 .map(|(_, server)| self.language_server_statuses.remove(&server.server_id()));
1580 cx.spawn_weak(|this, mut cx| async move {
1581 if let Some(this) = this.upgrade(&cx) {
1582 if let Some((_, server_to_shutdown)) = server_to_shutdown {
1583 if let Some(shutdown_task) = server_to_shutdown.shutdown() {
1584 shutdown_task.await;
1585 }
1586 }
1587
1588 this.update(&mut cx, |this, cx| {
1589 this.start_language_server(worktree_id, worktree_path, language, cx);
1590 });
1591 }
1592 })
1593 .detach();
1594 }
1595
1596 fn on_lsp_diagnostics_published(
1597 &mut self,
1598 server_id: usize,
1599 mut params: lsp::PublishDiagnosticsParams,
1600 adapter: &Arc<dyn LspAdapter>,
1601 disk_based_diagnostics_progress_token: Option<&str>,
1602 cx: &mut ModelContext<Self>,
1603 ) {
1604 adapter.process_diagnostics(&mut params);
1605 if disk_based_diagnostics_progress_token.is_none() {
1606 self.disk_based_diagnostics_started(cx);
1607 self.broadcast_language_server_update(
1608 server_id,
1609 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(
1610 proto::LspDiskBasedDiagnosticsUpdating {},
1611 ),
1612 );
1613 }
1614 self.update_diagnostics(params, adapter.disk_based_diagnostic_sources(), cx)
1615 .log_err();
1616 if disk_based_diagnostics_progress_token.is_none() {
1617 self.disk_based_diagnostics_finished(cx);
1618 self.broadcast_language_server_update(
1619 server_id,
1620 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(
1621 proto::LspDiskBasedDiagnosticsUpdated {},
1622 ),
1623 );
1624 }
1625 }
1626
1627 fn on_lsp_progress(
1628 &mut self,
1629 progress: lsp::ProgressParams,
1630 server_id: usize,
1631 disk_based_diagnostics_progress_token: Option<&str>,
1632 cx: &mut ModelContext<Self>,
1633 ) {
1634 let token = match progress.token {
1635 lsp::NumberOrString::String(token) => token,
1636 lsp::NumberOrString::Number(token) => {
1637 log::info!("skipping numeric progress token {}", token);
1638 return;
1639 }
1640 };
1641 let progress = match progress.value {
1642 lsp::ProgressParamsValue::WorkDone(value) => value,
1643 };
1644 let language_server_status =
1645 if let Some(status) = self.language_server_statuses.get_mut(&server_id) {
1646 status
1647 } else {
1648 return;
1649 };
1650 match progress {
1651 lsp::WorkDoneProgress::Begin(_) => {
1652 if Some(token.as_str()) == disk_based_diagnostics_progress_token {
1653 language_server_status.pending_diagnostic_updates += 1;
1654 if language_server_status.pending_diagnostic_updates == 1 {
1655 self.disk_based_diagnostics_started(cx);
1656 self.broadcast_language_server_update(
1657 server_id,
1658 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(
1659 proto::LspDiskBasedDiagnosticsUpdating {},
1660 ),
1661 );
1662 }
1663 } else {
1664 self.on_lsp_work_start(server_id, token.clone(), cx);
1665 self.broadcast_language_server_update(
1666 server_id,
1667 proto::update_language_server::Variant::WorkStart(proto::LspWorkStart {
1668 token,
1669 }),
1670 );
1671 }
1672 }
1673 lsp::WorkDoneProgress::Report(report) => {
1674 if Some(token.as_str()) != disk_based_diagnostics_progress_token {
1675 self.on_lsp_work_progress(
1676 server_id,
1677 token.clone(),
1678 LanguageServerProgress {
1679 message: report.message.clone(),
1680 percentage: report.percentage.map(|p| p as usize),
1681 last_update_at: Instant::now(),
1682 },
1683 cx,
1684 );
1685 self.broadcast_language_server_update(
1686 server_id,
1687 proto::update_language_server::Variant::WorkProgress(
1688 proto::LspWorkProgress {
1689 token,
1690 message: report.message,
1691 percentage: report.percentage.map(|p| p as u32),
1692 },
1693 ),
1694 );
1695 }
1696 }
1697 lsp::WorkDoneProgress::End(_) => {
1698 if Some(token.as_str()) == disk_based_diagnostics_progress_token {
1699 language_server_status.pending_diagnostic_updates -= 1;
1700 if language_server_status.pending_diagnostic_updates == 0 {
1701 self.disk_based_diagnostics_finished(cx);
1702 self.broadcast_language_server_update(
1703 server_id,
1704 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(
1705 proto::LspDiskBasedDiagnosticsUpdated {},
1706 ),
1707 );
1708 }
1709 } else {
1710 self.on_lsp_work_end(server_id, token.clone(), cx);
1711 self.broadcast_language_server_update(
1712 server_id,
1713 proto::update_language_server::Variant::WorkEnd(proto::LspWorkEnd {
1714 token,
1715 }),
1716 );
1717 }
1718 }
1719 }
1720 }
1721
1722 fn on_lsp_work_start(
1723 &mut self,
1724 language_server_id: usize,
1725 token: String,
1726 cx: &mut ModelContext<Self>,
1727 ) {
1728 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
1729 status.pending_work.insert(
1730 token,
1731 LanguageServerProgress {
1732 message: None,
1733 percentage: None,
1734 last_update_at: Instant::now(),
1735 },
1736 );
1737 cx.notify();
1738 }
1739 }
1740
1741 fn on_lsp_work_progress(
1742 &mut self,
1743 language_server_id: usize,
1744 token: String,
1745 progress: LanguageServerProgress,
1746 cx: &mut ModelContext<Self>,
1747 ) {
1748 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
1749 status.pending_work.insert(token, progress);
1750 cx.notify();
1751 }
1752 }
1753
1754 fn on_lsp_work_end(
1755 &mut self,
1756 language_server_id: usize,
1757 token: String,
1758 cx: &mut ModelContext<Self>,
1759 ) {
1760 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
1761 status.pending_work.remove(&token);
1762 cx.notify();
1763 }
1764 }
1765
1766 async fn on_lsp_workspace_edit(
1767 this: WeakModelHandle<Self>,
1768 params: lsp::ApplyWorkspaceEditParams,
1769 server_id: usize,
1770 adapter: Arc<dyn LspAdapter>,
1771 language_server: Arc<LanguageServer>,
1772 mut cx: AsyncAppContext,
1773 ) -> Result<lsp::ApplyWorkspaceEditResponse> {
1774 let this = this
1775 .upgrade(&cx)
1776 .ok_or_else(|| anyhow!("project project closed"))?;
1777 let transaction = Self::deserialize_workspace_edit(
1778 this.clone(),
1779 params.edit,
1780 true,
1781 adapter.clone(),
1782 language_server.clone(),
1783 &mut cx,
1784 )
1785 .await
1786 .log_err();
1787 this.update(&mut cx, |this, _| {
1788 if let Some(transaction) = transaction {
1789 this.last_workspace_edits_by_language_server
1790 .insert(server_id, transaction);
1791 }
1792 });
1793 Ok(lsp::ApplyWorkspaceEditResponse {
1794 applied: true,
1795 failed_change: None,
1796 failure_reason: None,
1797 })
1798 }
1799
1800 fn broadcast_language_server_update(
1801 &self,
1802 language_server_id: usize,
1803 event: proto::update_language_server::Variant,
1804 ) {
1805 if let Some(project_id) = self.remote_id() {
1806 self.client
1807 .send(proto::UpdateLanguageServer {
1808 project_id,
1809 language_server_id: language_server_id as u64,
1810 variant: Some(event),
1811 })
1812 .log_err();
1813 }
1814 }
1815
1816 pub fn set_language_server_settings(&mut self, settings: serde_json::Value) {
1817 for (_, server) in self.language_servers.values() {
1818 server
1819 .notify::<lsp::notification::DidChangeConfiguration>(
1820 lsp::DidChangeConfigurationParams {
1821 settings: settings.clone(),
1822 },
1823 )
1824 .ok();
1825 }
1826 *self.language_server_settings.lock() = settings;
1827 }
1828
1829 pub fn language_server_statuses(
1830 &self,
1831 ) -> impl DoubleEndedIterator<Item = &LanguageServerStatus> {
1832 self.language_server_statuses.values()
1833 }
1834
1835 pub fn update_diagnostics(
1836 &mut self,
1837 params: lsp::PublishDiagnosticsParams,
1838 disk_based_sources: &[&str],
1839 cx: &mut ModelContext<Self>,
1840 ) -> Result<()> {
1841 let abs_path = params
1842 .uri
1843 .to_file_path()
1844 .map_err(|_| anyhow!("URI is not a file"))?;
1845 let mut next_group_id = 0;
1846 let mut diagnostics = Vec::default();
1847 let mut primary_diagnostic_group_ids = HashMap::default();
1848 let mut sources_by_group_id = HashMap::default();
1849 let mut supporting_diagnostics = HashMap::default();
1850 for diagnostic in ¶ms.diagnostics {
1851 let source = diagnostic.source.as_ref();
1852 let code = diagnostic.code.as_ref().map(|code| match code {
1853 lsp::NumberOrString::Number(code) => code.to_string(),
1854 lsp::NumberOrString::String(code) => code.clone(),
1855 });
1856 let range = range_from_lsp(diagnostic.range);
1857 let is_supporting = diagnostic
1858 .related_information
1859 .as_ref()
1860 .map_or(false, |infos| {
1861 infos.iter().any(|info| {
1862 primary_diagnostic_group_ids.contains_key(&(
1863 source,
1864 code.clone(),
1865 range_from_lsp(info.location.range),
1866 ))
1867 })
1868 });
1869
1870 let is_unnecessary = diagnostic.tags.as_ref().map_or(false, |tags| {
1871 tags.iter().any(|tag| *tag == DiagnosticTag::UNNECESSARY)
1872 });
1873
1874 if is_supporting {
1875 supporting_diagnostics.insert(
1876 (source, code.clone(), range),
1877 (diagnostic.severity, is_unnecessary),
1878 );
1879 } else {
1880 let group_id = post_inc(&mut next_group_id);
1881 let is_disk_based = source.map_or(false, |source| {
1882 disk_based_sources.contains(&source.as_str())
1883 });
1884
1885 sources_by_group_id.insert(group_id, source);
1886 primary_diagnostic_group_ids
1887 .insert((source, code.clone(), range.clone()), group_id);
1888
1889 diagnostics.push(DiagnosticEntry {
1890 range,
1891 diagnostic: Diagnostic {
1892 code: code.clone(),
1893 severity: diagnostic.severity.unwrap_or(DiagnosticSeverity::ERROR),
1894 message: diagnostic.message.clone(),
1895 group_id,
1896 is_primary: true,
1897 is_valid: true,
1898 is_disk_based,
1899 is_unnecessary,
1900 },
1901 });
1902 if let Some(infos) = &diagnostic.related_information {
1903 for info in infos {
1904 if info.location.uri == params.uri && !info.message.is_empty() {
1905 let range = range_from_lsp(info.location.range);
1906 diagnostics.push(DiagnosticEntry {
1907 range,
1908 diagnostic: Diagnostic {
1909 code: code.clone(),
1910 severity: DiagnosticSeverity::INFORMATION,
1911 message: info.message.clone(),
1912 group_id,
1913 is_primary: false,
1914 is_valid: true,
1915 is_disk_based,
1916 is_unnecessary: false,
1917 },
1918 });
1919 }
1920 }
1921 }
1922 }
1923 }
1924
1925 for entry in &mut diagnostics {
1926 let diagnostic = &mut entry.diagnostic;
1927 if !diagnostic.is_primary {
1928 let source = *sources_by_group_id.get(&diagnostic.group_id).unwrap();
1929 if let Some(&(severity, is_unnecessary)) = supporting_diagnostics.get(&(
1930 source,
1931 diagnostic.code.clone(),
1932 entry.range.clone(),
1933 )) {
1934 if let Some(severity) = severity {
1935 diagnostic.severity = severity;
1936 }
1937 diagnostic.is_unnecessary = is_unnecessary;
1938 }
1939 }
1940 }
1941
1942 self.update_diagnostic_entries(abs_path, params.version, diagnostics, cx)?;
1943 Ok(())
1944 }
1945
1946 pub fn update_diagnostic_entries(
1947 &mut self,
1948 abs_path: PathBuf,
1949 version: Option<i32>,
1950 diagnostics: Vec<DiagnosticEntry<PointUtf16>>,
1951 cx: &mut ModelContext<Project>,
1952 ) -> Result<(), anyhow::Error> {
1953 let (worktree, relative_path) = self
1954 .find_local_worktree(&abs_path, cx)
1955 .ok_or_else(|| anyhow!("no worktree found for diagnostics"))?;
1956 if !worktree.read(cx).is_visible() {
1957 return Ok(());
1958 }
1959
1960 let project_path = ProjectPath {
1961 worktree_id: worktree.read(cx).id(),
1962 path: relative_path.into(),
1963 };
1964 if let Some(buffer) = self.get_open_buffer(&project_path, cx) {
1965 self.update_buffer_diagnostics(&buffer, diagnostics.clone(), version, cx)?;
1966 }
1967
1968 worktree.update(cx, |worktree, cx| {
1969 worktree
1970 .as_local_mut()
1971 .ok_or_else(|| anyhow!("not a local worktree"))?
1972 .update_diagnostics(project_path.path.clone(), diagnostics, cx)
1973 })?;
1974 cx.emit(Event::DiagnosticsUpdated(project_path));
1975 Ok(())
1976 }
1977
1978 fn update_buffer_diagnostics(
1979 &mut self,
1980 buffer: &ModelHandle<Buffer>,
1981 mut diagnostics: Vec<DiagnosticEntry<PointUtf16>>,
1982 version: Option<i32>,
1983 cx: &mut ModelContext<Self>,
1984 ) -> Result<()> {
1985 fn compare_diagnostics(a: &Diagnostic, b: &Diagnostic) -> Ordering {
1986 Ordering::Equal
1987 .then_with(|| b.is_primary.cmp(&a.is_primary))
1988 .then_with(|| a.is_disk_based.cmp(&b.is_disk_based))
1989 .then_with(|| a.severity.cmp(&b.severity))
1990 .then_with(|| a.message.cmp(&b.message))
1991 }
1992
1993 let snapshot = self.buffer_snapshot_for_lsp_version(buffer, version, cx)?;
1994
1995 diagnostics.sort_unstable_by(|a, b| {
1996 Ordering::Equal
1997 .then_with(|| a.range.start.cmp(&b.range.start))
1998 .then_with(|| b.range.end.cmp(&a.range.end))
1999 .then_with(|| compare_diagnostics(&a.diagnostic, &b.diagnostic))
2000 });
2001
2002 let mut sanitized_diagnostics = Vec::new();
2003 let edits_since_save = Patch::new(
2004 snapshot
2005 .edits_since::<PointUtf16>(buffer.read(cx).saved_version())
2006 .collect(),
2007 );
2008 for entry in diagnostics {
2009 let start;
2010 let end;
2011 if entry.diagnostic.is_disk_based {
2012 // Some diagnostics are based on files on disk instead of buffers'
2013 // current contents. Adjust these diagnostics' ranges to reflect
2014 // any unsaved edits.
2015 start = edits_since_save.old_to_new(entry.range.start);
2016 end = edits_since_save.old_to_new(entry.range.end);
2017 } else {
2018 start = entry.range.start;
2019 end = entry.range.end;
2020 }
2021
2022 let mut range = snapshot.clip_point_utf16(start, Bias::Left)
2023 ..snapshot.clip_point_utf16(end, Bias::Right);
2024
2025 // Expand empty ranges by one character
2026 if range.start == range.end {
2027 range.end.column += 1;
2028 range.end = snapshot.clip_point_utf16(range.end, Bias::Right);
2029 if range.start == range.end && range.end.column > 0 {
2030 range.start.column -= 1;
2031 range.start = snapshot.clip_point_utf16(range.start, Bias::Left);
2032 }
2033 }
2034
2035 sanitized_diagnostics.push(DiagnosticEntry {
2036 range,
2037 diagnostic: entry.diagnostic,
2038 });
2039 }
2040 drop(edits_since_save);
2041
2042 let set = DiagnosticSet::new(sanitized_diagnostics, &snapshot);
2043 buffer.update(cx, |buffer, cx| buffer.update_diagnostics(set, cx));
2044 Ok(())
2045 }
2046
2047 pub fn reload_buffers(
2048 &self,
2049 buffers: HashSet<ModelHandle<Buffer>>,
2050 push_to_history: bool,
2051 cx: &mut ModelContext<Self>,
2052 ) -> Task<Result<ProjectTransaction>> {
2053 let mut local_buffers = Vec::new();
2054 let mut remote_buffers = None;
2055 for buffer_handle in buffers {
2056 let buffer = buffer_handle.read(cx);
2057 if buffer.is_dirty() {
2058 if let Some(file) = File::from_dyn(buffer.file()) {
2059 if file.is_local() {
2060 local_buffers.push(buffer_handle);
2061 } else {
2062 remote_buffers.get_or_insert(Vec::new()).push(buffer_handle);
2063 }
2064 }
2065 }
2066 }
2067
2068 let remote_buffers = self.remote_id().zip(remote_buffers);
2069 let client = self.client.clone();
2070
2071 cx.spawn(|this, mut cx| async move {
2072 let mut project_transaction = ProjectTransaction::default();
2073
2074 if let Some((project_id, remote_buffers)) = remote_buffers {
2075 let response = client
2076 .request(proto::ReloadBuffers {
2077 project_id,
2078 buffer_ids: remote_buffers
2079 .iter()
2080 .map(|buffer| buffer.read_with(&cx, |buffer, _| buffer.remote_id()))
2081 .collect(),
2082 })
2083 .await?
2084 .transaction
2085 .ok_or_else(|| anyhow!("missing transaction"))?;
2086 project_transaction = this
2087 .update(&mut cx, |this, cx| {
2088 this.deserialize_project_transaction(response, push_to_history, cx)
2089 })
2090 .await?;
2091 }
2092
2093 for buffer in local_buffers {
2094 let transaction = buffer
2095 .update(&mut cx, |buffer, cx| buffer.reload(cx))
2096 .await?;
2097 buffer.update(&mut cx, |buffer, cx| {
2098 if let Some(transaction) = transaction {
2099 if !push_to_history {
2100 buffer.forget_transaction(transaction.id);
2101 }
2102 project_transaction.0.insert(cx.handle(), transaction);
2103 }
2104 });
2105 }
2106
2107 Ok(project_transaction)
2108 })
2109 }
2110
2111 pub fn format(
2112 &self,
2113 buffers: HashSet<ModelHandle<Buffer>>,
2114 push_to_history: bool,
2115 cx: &mut ModelContext<Project>,
2116 ) -> Task<Result<ProjectTransaction>> {
2117 let mut local_buffers = Vec::new();
2118 let mut remote_buffers = None;
2119 for buffer_handle in buffers {
2120 let buffer = buffer_handle.read(cx);
2121 if let Some(file) = File::from_dyn(buffer.file()) {
2122 if let Some(buffer_abs_path) = file.as_local().map(|f| f.abs_path(cx)) {
2123 if let Some((_, server)) = self.language_server_for_buffer(buffer, cx) {
2124 local_buffers.push((buffer_handle, buffer_abs_path, server.clone()));
2125 }
2126 } else {
2127 remote_buffers.get_or_insert(Vec::new()).push(buffer_handle);
2128 }
2129 } else {
2130 return Task::ready(Ok(Default::default()));
2131 }
2132 }
2133
2134 let remote_buffers = self.remote_id().zip(remote_buffers);
2135 let client = self.client.clone();
2136
2137 cx.spawn(|this, mut cx| async move {
2138 let mut project_transaction = ProjectTransaction::default();
2139
2140 if let Some((project_id, remote_buffers)) = remote_buffers {
2141 let response = client
2142 .request(proto::FormatBuffers {
2143 project_id,
2144 buffer_ids: remote_buffers
2145 .iter()
2146 .map(|buffer| buffer.read_with(&cx, |buffer, _| buffer.remote_id()))
2147 .collect(),
2148 })
2149 .await?
2150 .transaction
2151 .ok_or_else(|| anyhow!("missing transaction"))?;
2152 project_transaction = this
2153 .update(&mut cx, |this, cx| {
2154 this.deserialize_project_transaction(response, push_to_history, cx)
2155 })
2156 .await?;
2157 }
2158
2159 for (buffer, buffer_abs_path, language_server) in local_buffers {
2160 let text_document = lsp::TextDocumentIdentifier::new(
2161 lsp::Url::from_file_path(&buffer_abs_path).unwrap(),
2162 );
2163 let capabilities = &language_server.capabilities();
2164 let tab_size = cx.update(|cx| {
2165 let language_name = buffer.read(cx).language().map(|language| language.name());
2166 cx.global::<Settings>().tab_size(language_name.as_deref())
2167 });
2168 let lsp_edits = if capabilities
2169 .document_formatting_provider
2170 .as_ref()
2171 .map_or(false, |provider| *provider != lsp::OneOf::Left(false))
2172 {
2173 language_server
2174 .request::<lsp::request::Formatting>(lsp::DocumentFormattingParams {
2175 text_document,
2176 options: lsp::FormattingOptions {
2177 tab_size,
2178 insert_spaces: true,
2179 insert_final_newline: Some(true),
2180 ..Default::default()
2181 },
2182 work_done_progress_params: Default::default(),
2183 })
2184 .await?
2185 } else if capabilities
2186 .document_range_formatting_provider
2187 .as_ref()
2188 .map_or(false, |provider| *provider != lsp::OneOf::Left(false))
2189 {
2190 let buffer_start = lsp::Position::new(0, 0);
2191 let buffer_end =
2192 buffer.read_with(&cx, |buffer, _| point_to_lsp(buffer.max_point_utf16()));
2193 language_server
2194 .request::<lsp::request::RangeFormatting>(
2195 lsp::DocumentRangeFormattingParams {
2196 text_document,
2197 range: lsp::Range::new(buffer_start, buffer_end),
2198 options: lsp::FormattingOptions {
2199 tab_size: 4,
2200 insert_spaces: true,
2201 insert_final_newline: Some(true),
2202 ..Default::default()
2203 },
2204 work_done_progress_params: Default::default(),
2205 },
2206 )
2207 .await?
2208 } else {
2209 continue;
2210 };
2211
2212 if let Some(lsp_edits) = lsp_edits {
2213 let edits = this
2214 .update(&mut cx, |this, cx| {
2215 this.edits_from_lsp(&buffer, lsp_edits, None, cx)
2216 })
2217 .await?;
2218 buffer.update(&mut cx, |buffer, cx| {
2219 buffer.finalize_last_transaction();
2220 buffer.start_transaction();
2221 for (range, text) in edits {
2222 buffer.edit([range], text, cx);
2223 }
2224 if buffer.end_transaction(cx).is_some() {
2225 let transaction = buffer.finalize_last_transaction().unwrap().clone();
2226 if !push_to_history {
2227 buffer.forget_transaction(transaction.id);
2228 }
2229 project_transaction.0.insert(cx.handle(), transaction);
2230 }
2231 });
2232 }
2233 }
2234
2235 Ok(project_transaction)
2236 })
2237 }
2238
2239 pub fn definition<T: ToPointUtf16>(
2240 &self,
2241 buffer: &ModelHandle<Buffer>,
2242 position: T,
2243 cx: &mut ModelContext<Self>,
2244 ) -> Task<Result<Vec<Location>>> {
2245 let position = position.to_point_utf16(buffer.read(cx));
2246 self.request_lsp(buffer.clone(), GetDefinition { position }, cx)
2247 }
2248
2249 pub fn references<T: ToPointUtf16>(
2250 &self,
2251 buffer: &ModelHandle<Buffer>,
2252 position: T,
2253 cx: &mut ModelContext<Self>,
2254 ) -> Task<Result<Vec<Location>>> {
2255 let position = position.to_point_utf16(buffer.read(cx));
2256 self.request_lsp(buffer.clone(), GetReferences { position }, cx)
2257 }
2258
2259 pub fn document_highlights<T: ToPointUtf16>(
2260 &self,
2261 buffer: &ModelHandle<Buffer>,
2262 position: T,
2263 cx: &mut ModelContext<Self>,
2264 ) -> Task<Result<Vec<DocumentHighlight>>> {
2265 let position = position.to_point_utf16(buffer.read(cx));
2266
2267 self.request_lsp(buffer.clone(), GetDocumentHighlights { position }, cx)
2268 }
2269
2270 pub fn symbols(&self, query: &str, cx: &mut ModelContext<Self>) -> Task<Result<Vec<Symbol>>> {
2271 if self.is_local() {
2272 let mut requests = Vec::new();
2273 for ((worktree_id, _), (lsp_adapter, language_server)) in self.language_servers.iter() {
2274 let worktree_id = *worktree_id;
2275 if let Some(worktree) = self
2276 .worktree_for_id(worktree_id, cx)
2277 .and_then(|worktree| worktree.read(cx).as_local())
2278 {
2279 let lsp_adapter = lsp_adapter.clone();
2280 let worktree_abs_path = worktree.abs_path().clone();
2281 requests.push(
2282 language_server
2283 .request::<lsp::request::WorkspaceSymbol>(lsp::WorkspaceSymbolParams {
2284 query: query.to_string(),
2285 ..Default::default()
2286 })
2287 .log_err()
2288 .map(move |response| {
2289 (
2290 lsp_adapter,
2291 worktree_id,
2292 worktree_abs_path,
2293 response.unwrap_or_default(),
2294 )
2295 }),
2296 );
2297 }
2298 }
2299
2300 cx.spawn_weak(|this, cx| async move {
2301 let responses = futures::future::join_all(requests).await;
2302 let this = if let Some(this) = this.upgrade(&cx) {
2303 this
2304 } else {
2305 return Ok(Default::default());
2306 };
2307 this.read_with(&cx, |this, cx| {
2308 let mut symbols = Vec::new();
2309 for (adapter, source_worktree_id, worktree_abs_path, response) in responses {
2310 symbols.extend(response.into_iter().flatten().filter_map(|lsp_symbol| {
2311 let abs_path = lsp_symbol.location.uri.to_file_path().ok()?;
2312 let mut worktree_id = source_worktree_id;
2313 let path;
2314 if let Some((worktree, rel_path)) =
2315 this.find_local_worktree(&abs_path, cx)
2316 {
2317 worktree_id = worktree.read(cx).id();
2318 path = rel_path;
2319 } else {
2320 path = relativize_path(&worktree_abs_path, &abs_path);
2321 }
2322
2323 let label = this
2324 .languages
2325 .select_language(&path)
2326 .and_then(|language| {
2327 language.label_for_symbol(&lsp_symbol.name, lsp_symbol.kind)
2328 })
2329 .unwrap_or_else(|| CodeLabel::plain(lsp_symbol.name.clone(), None));
2330 let signature = this.symbol_signature(worktree_id, &path);
2331
2332 Some(Symbol {
2333 source_worktree_id,
2334 worktree_id,
2335 language_server_name: adapter.name(),
2336 name: lsp_symbol.name,
2337 kind: lsp_symbol.kind,
2338 label,
2339 path,
2340 range: range_from_lsp(lsp_symbol.location.range),
2341 signature,
2342 })
2343 }));
2344 }
2345 Ok(symbols)
2346 })
2347 })
2348 } else if let Some(project_id) = self.remote_id() {
2349 let request = self.client.request(proto::GetProjectSymbols {
2350 project_id,
2351 query: query.to_string(),
2352 });
2353 cx.spawn_weak(|this, cx| async move {
2354 let response = request.await?;
2355 let mut symbols = Vec::new();
2356 if let Some(this) = this.upgrade(&cx) {
2357 this.read_with(&cx, |this, _| {
2358 symbols.extend(
2359 response
2360 .symbols
2361 .into_iter()
2362 .filter_map(|symbol| this.deserialize_symbol(symbol).log_err()),
2363 );
2364 })
2365 }
2366 Ok(symbols)
2367 })
2368 } else {
2369 Task::ready(Ok(Default::default()))
2370 }
2371 }
2372
2373 pub fn open_buffer_for_symbol(
2374 &mut self,
2375 symbol: &Symbol,
2376 cx: &mut ModelContext<Self>,
2377 ) -> Task<Result<ModelHandle<Buffer>>> {
2378 if self.is_local() {
2379 let (lsp_adapter, language_server) = if let Some(server) = self.language_servers.get(&(
2380 symbol.source_worktree_id,
2381 symbol.language_server_name.clone(),
2382 )) {
2383 server.clone()
2384 } else {
2385 return Task::ready(Err(anyhow!(
2386 "language server for worktree and language not found"
2387 )));
2388 };
2389
2390 let worktree_abs_path = if let Some(worktree_abs_path) = self
2391 .worktree_for_id(symbol.worktree_id, cx)
2392 .and_then(|worktree| worktree.read(cx).as_local())
2393 .map(|local_worktree| local_worktree.abs_path())
2394 {
2395 worktree_abs_path
2396 } else {
2397 return Task::ready(Err(anyhow!("worktree not found for symbol")));
2398 };
2399 let symbol_abs_path = worktree_abs_path.join(&symbol.path);
2400 let symbol_uri = if let Ok(uri) = lsp::Url::from_file_path(symbol_abs_path) {
2401 uri
2402 } else {
2403 return Task::ready(Err(anyhow!("invalid symbol path")));
2404 };
2405
2406 self.open_local_buffer_via_lsp(symbol_uri, lsp_adapter, language_server, cx)
2407 } else if let Some(project_id) = self.remote_id() {
2408 let request = self.client.request(proto::OpenBufferForSymbol {
2409 project_id,
2410 symbol: Some(serialize_symbol(symbol)),
2411 });
2412 cx.spawn(|this, mut cx| async move {
2413 let response = request.await?;
2414 let buffer = response.buffer.ok_or_else(|| anyhow!("invalid buffer"))?;
2415 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
2416 .await
2417 })
2418 } else {
2419 Task::ready(Err(anyhow!("project does not have a remote id")))
2420 }
2421 }
2422
2423 pub fn completions<T: ToPointUtf16>(
2424 &self,
2425 source_buffer_handle: &ModelHandle<Buffer>,
2426 position: T,
2427 cx: &mut ModelContext<Self>,
2428 ) -> Task<Result<Vec<Completion>>> {
2429 let source_buffer_handle = source_buffer_handle.clone();
2430 let source_buffer = source_buffer_handle.read(cx);
2431 let buffer_id = source_buffer.remote_id();
2432 let language = source_buffer.language().cloned();
2433 let worktree;
2434 let buffer_abs_path;
2435 if let Some(file) = File::from_dyn(source_buffer.file()) {
2436 worktree = file.worktree.clone();
2437 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
2438 } else {
2439 return Task::ready(Ok(Default::default()));
2440 };
2441
2442 let position = position.to_point_utf16(source_buffer);
2443 let anchor = source_buffer.anchor_after(position);
2444
2445 if worktree.read(cx).as_local().is_some() {
2446 let buffer_abs_path = buffer_abs_path.unwrap();
2447 let (_, lang_server) =
2448 if let Some(server) = self.language_server_for_buffer(source_buffer, cx) {
2449 server.clone()
2450 } else {
2451 return Task::ready(Ok(Default::default()));
2452 };
2453
2454 cx.spawn(|_, cx| async move {
2455 let completions = lang_server
2456 .request::<lsp::request::Completion>(lsp::CompletionParams {
2457 text_document_position: lsp::TextDocumentPositionParams::new(
2458 lsp::TextDocumentIdentifier::new(
2459 lsp::Url::from_file_path(buffer_abs_path).unwrap(),
2460 ),
2461 point_to_lsp(position),
2462 ),
2463 context: Default::default(),
2464 work_done_progress_params: Default::default(),
2465 partial_result_params: Default::default(),
2466 })
2467 .await
2468 .context("lsp completion request failed")?;
2469
2470 let completions = if let Some(completions) = completions {
2471 match completions {
2472 lsp::CompletionResponse::Array(completions) => completions,
2473 lsp::CompletionResponse::List(list) => list.items,
2474 }
2475 } else {
2476 Default::default()
2477 };
2478
2479 source_buffer_handle.read_with(&cx, |this, _| {
2480 Ok(completions
2481 .into_iter()
2482 .filter_map(|lsp_completion| {
2483 let (old_range, new_text) = match lsp_completion.text_edit.as_ref() {
2484 Some(lsp::CompletionTextEdit::Edit(edit)) => {
2485 (range_from_lsp(edit.range), edit.new_text.clone())
2486 }
2487 None => {
2488 let clipped_position =
2489 this.clip_point_utf16(position, Bias::Left);
2490 if position != clipped_position {
2491 log::info!("completion out of expected range");
2492 return None;
2493 }
2494 (
2495 this.common_prefix_at(
2496 clipped_position,
2497 &lsp_completion.label,
2498 ),
2499 lsp_completion.label.clone(),
2500 )
2501 }
2502 Some(lsp::CompletionTextEdit::InsertAndReplace(_)) => {
2503 log::info!("unsupported insert/replace completion");
2504 return None;
2505 }
2506 };
2507
2508 let clipped_start = this.clip_point_utf16(old_range.start, Bias::Left);
2509 let clipped_end = this.clip_point_utf16(old_range.end, Bias::Left);
2510 if clipped_start == old_range.start && clipped_end == old_range.end {
2511 Some(Completion {
2512 old_range: this.anchor_before(old_range.start)
2513 ..this.anchor_after(old_range.end),
2514 new_text,
2515 label: language
2516 .as_ref()
2517 .and_then(|l| l.label_for_completion(&lsp_completion))
2518 .unwrap_or_else(|| {
2519 CodeLabel::plain(
2520 lsp_completion.label.clone(),
2521 lsp_completion.filter_text.as_deref(),
2522 )
2523 }),
2524 lsp_completion,
2525 })
2526 } else {
2527 log::info!("completion out of expected range");
2528 None
2529 }
2530 })
2531 .collect())
2532 })
2533 })
2534 } else if let Some(project_id) = self.remote_id() {
2535 let rpc = self.client.clone();
2536 let message = proto::GetCompletions {
2537 project_id,
2538 buffer_id,
2539 position: Some(language::proto::serialize_anchor(&anchor)),
2540 version: serialize_version(&source_buffer.version()),
2541 };
2542 cx.spawn_weak(|_, mut cx| async move {
2543 let response = rpc.request(message).await?;
2544
2545 source_buffer_handle
2546 .update(&mut cx, |buffer, _| {
2547 buffer.wait_for_version(deserialize_version(response.version))
2548 })
2549 .await;
2550
2551 response
2552 .completions
2553 .into_iter()
2554 .map(|completion| {
2555 language::proto::deserialize_completion(completion, language.as_ref())
2556 })
2557 .collect()
2558 })
2559 } else {
2560 Task::ready(Ok(Default::default()))
2561 }
2562 }
2563
2564 pub fn apply_additional_edits_for_completion(
2565 &self,
2566 buffer_handle: ModelHandle<Buffer>,
2567 completion: Completion,
2568 push_to_history: bool,
2569 cx: &mut ModelContext<Self>,
2570 ) -> Task<Result<Option<Transaction>>> {
2571 let buffer = buffer_handle.read(cx);
2572 let buffer_id = buffer.remote_id();
2573
2574 if self.is_local() {
2575 let (_, lang_server) = if let Some(server) = self.language_server_for_buffer(buffer, cx)
2576 {
2577 server.clone()
2578 } else {
2579 return Task::ready(Ok(Default::default()));
2580 };
2581
2582 cx.spawn(|this, mut cx| async move {
2583 let resolved_completion = lang_server
2584 .request::<lsp::request::ResolveCompletionItem>(completion.lsp_completion)
2585 .await?;
2586 if let Some(edits) = resolved_completion.additional_text_edits {
2587 let edits = this
2588 .update(&mut cx, |this, cx| {
2589 this.edits_from_lsp(&buffer_handle, edits, None, cx)
2590 })
2591 .await?;
2592 buffer_handle.update(&mut cx, |buffer, cx| {
2593 buffer.finalize_last_transaction();
2594 buffer.start_transaction();
2595 for (range, text) in edits {
2596 buffer.edit([range], text, cx);
2597 }
2598 let transaction = if buffer.end_transaction(cx).is_some() {
2599 let transaction = buffer.finalize_last_transaction().unwrap().clone();
2600 if !push_to_history {
2601 buffer.forget_transaction(transaction.id);
2602 }
2603 Some(transaction)
2604 } else {
2605 None
2606 };
2607 Ok(transaction)
2608 })
2609 } else {
2610 Ok(None)
2611 }
2612 })
2613 } else if let Some(project_id) = self.remote_id() {
2614 let client = self.client.clone();
2615 cx.spawn(|_, mut cx| async move {
2616 let response = client
2617 .request(proto::ApplyCompletionAdditionalEdits {
2618 project_id,
2619 buffer_id,
2620 completion: Some(language::proto::serialize_completion(&completion)),
2621 })
2622 .await?;
2623
2624 if let Some(transaction) = response.transaction {
2625 let transaction = language::proto::deserialize_transaction(transaction)?;
2626 buffer_handle
2627 .update(&mut cx, |buffer, _| {
2628 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
2629 })
2630 .await;
2631 if push_to_history {
2632 buffer_handle.update(&mut cx, |buffer, _| {
2633 buffer.push_transaction(transaction.clone(), Instant::now());
2634 });
2635 }
2636 Ok(Some(transaction))
2637 } else {
2638 Ok(None)
2639 }
2640 })
2641 } else {
2642 Task::ready(Err(anyhow!("project does not have a remote id")))
2643 }
2644 }
2645
2646 pub fn code_actions<T: Clone + ToOffset>(
2647 &self,
2648 buffer_handle: &ModelHandle<Buffer>,
2649 range: Range<T>,
2650 cx: &mut ModelContext<Self>,
2651 ) -> Task<Result<Vec<CodeAction>>> {
2652 let buffer_handle = buffer_handle.clone();
2653 let buffer = buffer_handle.read(cx);
2654 let snapshot = buffer.snapshot();
2655 let relevant_diagnostics = snapshot
2656 .diagnostics_in_range::<usize, usize>(range.to_offset(&snapshot), false)
2657 .map(|entry| entry.to_lsp_diagnostic_stub())
2658 .collect();
2659 let buffer_id = buffer.remote_id();
2660 let worktree;
2661 let buffer_abs_path;
2662 if let Some(file) = File::from_dyn(buffer.file()) {
2663 worktree = file.worktree.clone();
2664 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
2665 } else {
2666 return Task::ready(Ok(Default::default()));
2667 };
2668 let range = buffer.anchor_before(range.start)..buffer.anchor_before(range.end);
2669
2670 if worktree.read(cx).as_local().is_some() {
2671 let buffer_abs_path = buffer_abs_path.unwrap();
2672 let (_, lang_server) = if let Some(server) = self.language_server_for_buffer(buffer, cx)
2673 {
2674 server.clone()
2675 } else {
2676 return Task::ready(Ok(Default::default()));
2677 };
2678
2679 let lsp_range = range_to_lsp(range.to_point_utf16(buffer));
2680 cx.foreground().spawn(async move {
2681 if !lang_server.capabilities().code_action_provider.is_some() {
2682 return Ok(Default::default());
2683 }
2684
2685 Ok(lang_server
2686 .request::<lsp::request::CodeActionRequest>(lsp::CodeActionParams {
2687 text_document: lsp::TextDocumentIdentifier::new(
2688 lsp::Url::from_file_path(buffer_abs_path).unwrap(),
2689 ),
2690 range: lsp_range,
2691 work_done_progress_params: Default::default(),
2692 partial_result_params: Default::default(),
2693 context: lsp::CodeActionContext {
2694 diagnostics: relevant_diagnostics,
2695 only: Some(vec![
2696 lsp::CodeActionKind::QUICKFIX,
2697 lsp::CodeActionKind::REFACTOR,
2698 lsp::CodeActionKind::REFACTOR_EXTRACT,
2699 lsp::CodeActionKind::SOURCE,
2700 ]),
2701 },
2702 })
2703 .await?
2704 .unwrap_or_default()
2705 .into_iter()
2706 .filter_map(|entry| {
2707 if let lsp::CodeActionOrCommand::CodeAction(lsp_action) = entry {
2708 Some(CodeAction {
2709 range: range.clone(),
2710 lsp_action,
2711 })
2712 } else {
2713 None
2714 }
2715 })
2716 .collect())
2717 })
2718 } else if let Some(project_id) = self.remote_id() {
2719 let rpc = self.client.clone();
2720 let version = buffer.version();
2721 cx.spawn_weak(|_, mut cx| async move {
2722 let response = rpc
2723 .request(proto::GetCodeActions {
2724 project_id,
2725 buffer_id,
2726 start: Some(language::proto::serialize_anchor(&range.start)),
2727 end: Some(language::proto::serialize_anchor(&range.end)),
2728 version: serialize_version(&version),
2729 })
2730 .await?;
2731
2732 buffer_handle
2733 .update(&mut cx, |buffer, _| {
2734 buffer.wait_for_version(deserialize_version(response.version))
2735 })
2736 .await;
2737
2738 response
2739 .actions
2740 .into_iter()
2741 .map(language::proto::deserialize_code_action)
2742 .collect()
2743 })
2744 } else {
2745 Task::ready(Ok(Default::default()))
2746 }
2747 }
2748
2749 pub fn apply_code_action(
2750 &self,
2751 buffer_handle: ModelHandle<Buffer>,
2752 mut action: CodeAction,
2753 push_to_history: bool,
2754 cx: &mut ModelContext<Self>,
2755 ) -> Task<Result<ProjectTransaction>> {
2756 if self.is_local() {
2757 let buffer = buffer_handle.read(cx);
2758 let (lsp_adapter, lang_server) =
2759 if let Some(server) = self.language_server_for_buffer(buffer, cx) {
2760 server.clone()
2761 } else {
2762 return Task::ready(Ok(Default::default()));
2763 };
2764 let range = action.range.to_point_utf16(buffer);
2765
2766 cx.spawn(|this, mut cx| async move {
2767 if let Some(lsp_range) = action
2768 .lsp_action
2769 .data
2770 .as_mut()
2771 .and_then(|d| d.get_mut("codeActionParams"))
2772 .and_then(|d| d.get_mut("range"))
2773 {
2774 *lsp_range = serde_json::to_value(&range_to_lsp(range)).unwrap();
2775 action.lsp_action = lang_server
2776 .request::<lsp::request::CodeActionResolveRequest>(action.lsp_action)
2777 .await?;
2778 } else {
2779 let actions = this
2780 .update(&mut cx, |this, cx| {
2781 this.code_actions(&buffer_handle, action.range, cx)
2782 })
2783 .await?;
2784 action.lsp_action = actions
2785 .into_iter()
2786 .find(|a| a.lsp_action.title == action.lsp_action.title)
2787 .ok_or_else(|| anyhow!("code action is outdated"))?
2788 .lsp_action;
2789 }
2790
2791 if let Some(edit) = action.lsp_action.edit {
2792 Self::deserialize_workspace_edit(
2793 this,
2794 edit,
2795 push_to_history,
2796 lsp_adapter,
2797 lang_server,
2798 &mut cx,
2799 )
2800 .await
2801 } else if let Some(command) = action.lsp_action.command {
2802 this.update(&mut cx, |this, _| {
2803 this.last_workspace_edits_by_language_server
2804 .remove(&lang_server.server_id());
2805 });
2806 lang_server
2807 .request::<lsp::request::ExecuteCommand>(lsp::ExecuteCommandParams {
2808 command: command.command,
2809 arguments: command.arguments.unwrap_or_default(),
2810 ..Default::default()
2811 })
2812 .await?;
2813 Ok(this.update(&mut cx, |this, _| {
2814 this.last_workspace_edits_by_language_server
2815 .remove(&lang_server.server_id())
2816 .unwrap_or_default()
2817 }))
2818 } else {
2819 Ok(ProjectTransaction::default())
2820 }
2821 })
2822 } else if let Some(project_id) = self.remote_id() {
2823 let client = self.client.clone();
2824 let request = proto::ApplyCodeAction {
2825 project_id,
2826 buffer_id: buffer_handle.read(cx).remote_id(),
2827 action: Some(language::proto::serialize_code_action(&action)),
2828 };
2829 cx.spawn(|this, mut cx| async move {
2830 let response = client
2831 .request(request)
2832 .await?
2833 .transaction
2834 .ok_or_else(|| anyhow!("missing transaction"))?;
2835 this.update(&mut cx, |this, cx| {
2836 this.deserialize_project_transaction(response, push_to_history, cx)
2837 })
2838 .await
2839 })
2840 } else {
2841 Task::ready(Err(anyhow!("project does not have a remote id")))
2842 }
2843 }
2844
2845 async fn deserialize_workspace_edit(
2846 this: ModelHandle<Self>,
2847 edit: lsp::WorkspaceEdit,
2848 push_to_history: bool,
2849 lsp_adapter: Arc<dyn LspAdapter>,
2850 language_server: Arc<LanguageServer>,
2851 cx: &mut AsyncAppContext,
2852 ) -> Result<ProjectTransaction> {
2853 let fs = this.read_with(cx, |this, _| this.fs.clone());
2854 let mut operations = Vec::new();
2855 if let Some(document_changes) = edit.document_changes {
2856 match document_changes {
2857 lsp::DocumentChanges::Edits(edits) => {
2858 operations.extend(edits.into_iter().map(lsp::DocumentChangeOperation::Edit))
2859 }
2860 lsp::DocumentChanges::Operations(ops) => operations = ops,
2861 }
2862 } else if let Some(changes) = edit.changes {
2863 operations.extend(changes.into_iter().map(|(uri, edits)| {
2864 lsp::DocumentChangeOperation::Edit(lsp::TextDocumentEdit {
2865 text_document: lsp::OptionalVersionedTextDocumentIdentifier {
2866 uri,
2867 version: None,
2868 },
2869 edits: edits.into_iter().map(lsp::OneOf::Left).collect(),
2870 })
2871 }));
2872 }
2873
2874 let mut project_transaction = ProjectTransaction::default();
2875 for operation in operations {
2876 match operation {
2877 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Create(op)) => {
2878 let abs_path = op
2879 .uri
2880 .to_file_path()
2881 .map_err(|_| anyhow!("can't convert URI to path"))?;
2882
2883 if let Some(parent_path) = abs_path.parent() {
2884 fs.create_dir(parent_path).await?;
2885 }
2886 if abs_path.ends_with("/") {
2887 fs.create_dir(&abs_path).await?;
2888 } else {
2889 fs.create_file(&abs_path, op.options.map(Into::into).unwrap_or_default())
2890 .await?;
2891 }
2892 }
2893 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Rename(op)) => {
2894 let source_abs_path = op
2895 .old_uri
2896 .to_file_path()
2897 .map_err(|_| anyhow!("can't convert URI to path"))?;
2898 let target_abs_path = op
2899 .new_uri
2900 .to_file_path()
2901 .map_err(|_| anyhow!("can't convert URI to path"))?;
2902 fs.rename(
2903 &source_abs_path,
2904 &target_abs_path,
2905 op.options.map(Into::into).unwrap_or_default(),
2906 )
2907 .await?;
2908 }
2909 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Delete(op)) => {
2910 let abs_path = op
2911 .uri
2912 .to_file_path()
2913 .map_err(|_| anyhow!("can't convert URI to path"))?;
2914 let options = op.options.map(Into::into).unwrap_or_default();
2915 if abs_path.ends_with("/") {
2916 fs.remove_dir(&abs_path, options).await?;
2917 } else {
2918 fs.remove_file(&abs_path, options).await?;
2919 }
2920 }
2921 lsp::DocumentChangeOperation::Edit(op) => {
2922 let buffer_to_edit = this
2923 .update(cx, |this, cx| {
2924 this.open_local_buffer_via_lsp(
2925 op.text_document.uri,
2926 lsp_adapter.clone(),
2927 language_server.clone(),
2928 cx,
2929 )
2930 })
2931 .await?;
2932
2933 let edits = this
2934 .update(cx, |this, cx| {
2935 let edits = op.edits.into_iter().map(|edit| match edit {
2936 lsp::OneOf::Left(edit) => edit,
2937 lsp::OneOf::Right(edit) => edit.text_edit,
2938 });
2939 this.edits_from_lsp(
2940 &buffer_to_edit,
2941 edits,
2942 op.text_document.version,
2943 cx,
2944 )
2945 })
2946 .await?;
2947
2948 let transaction = buffer_to_edit.update(cx, |buffer, cx| {
2949 buffer.finalize_last_transaction();
2950 buffer.start_transaction();
2951 for (range, text) in edits {
2952 buffer.edit([range], text, cx);
2953 }
2954 let transaction = if buffer.end_transaction(cx).is_some() {
2955 let transaction = buffer.finalize_last_transaction().unwrap().clone();
2956 if !push_to_history {
2957 buffer.forget_transaction(transaction.id);
2958 }
2959 Some(transaction)
2960 } else {
2961 None
2962 };
2963
2964 transaction
2965 });
2966 if let Some(transaction) = transaction {
2967 project_transaction.0.insert(buffer_to_edit, transaction);
2968 }
2969 }
2970 }
2971 }
2972
2973 Ok(project_transaction)
2974 }
2975
2976 pub fn prepare_rename<T: ToPointUtf16>(
2977 &self,
2978 buffer: ModelHandle<Buffer>,
2979 position: T,
2980 cx: &mut ModelContext<Self>,
2981 ) -> Task<Result<Option<Range<Anchor>>>> {
2982 let position = position.to_point_utf16(buffer.read(cx));
2983 self.request_lsp(buffer, PrepareRename { position }, cx)
2984 }
2985
2986 pub fn perform_rename<T: ToPointUtf16>(
2987 &self,
2988 buffer: ModelHandle<Buffer>,
2989 position: T,
2990 new_name: String,
2991 push_to_history: bool,
2992 cx: &mut ModelContext<Self>,
2993 ) -> Task<Result<ProjectTransaction>> {
2994 let position = position.to_point_utf16(buffer.read(cx));
2995 self.request_lsp(
2996 buffer,
2997 PerformRename {
2998 position,
2999 new_name,
3000 push_to_history,
3001 },
3002 cx,
3003 )
3004 }
3005
3006 pub fn search(
3007 &self,
3008 query: SearchQuery,
3009 cx: &mut ModelContext<Self>,
3010 ) -> Task<Result<HashMap<ModelHandle<Buffer>, Vec<Range<Anchor>>>>> {
3011 if self.is_local() {
3012 let snapshots = self
3013 .visible_worktrees(cx)
3014 .filter_map(|tree| {
3015 let tree = tree.read(cx).as_local()?;
3016 Some(tree.snapshot())
3017 })
3018 .collect::<Vec<_>>();
3019
3020 let background = cx.background().clone();
3021 let path_count: usize = snapshots.iter().map(|s| s.visible_file_count()).sum();
3022 if path_count == 0 {
3023 return Task::ready(Ok(Default::default()));
3024 }
3025 let workers = background.num_cpus().min(path_count);
3026 let (matching_paths_tx, mut matching_paths_rx) = smol::channel::bounded(1024);
3027 cx.background()
3028 .spawn({
3029 let fs = self.fs.clone();
3030 let background = cx.background().clone();
3031 let query = query.clone();
3032 async move {
3033 let fs = &fs;
3034 let query = &query;
3035 let matching_paths_tx = &matching_paths_tx;
3036 let paths_per_worker = (path_count + workers - 1) / workers;
3037 let snapshots = &snapshots;
3038 background
3039 .scoped(|scope| {
3040 for worker_ix in 0..workers {
3041 let worker_start_ix = worker_ix * paths_per_worker;
3042 let worker_end_ix = worker_start_ix + paths_per_worker;
3043 scope.spawn(async move {
3044 let mut snapshot_start_ix = 0;
3045 let mut abs_path = PathBuf::new();
3046 for snapshot in snapshots {
3047 let snapshot_end_ix =
3048 snapshot_start_ix + snapshot.visible_file_count();
3049 if worker_end_ix <= snapshot_start_ix {
3050 break;
3051 } else if worker_start_ix > snapshot_end_ix {
3052 snapshot_start_ix = snapshot_end_ix;
3053 continue;
3054 } else {
3055 let start_in_snapshot = worker_start_ix
3056 .saturating_sub(snapshot_start_ix);
3057 let end_in_snapshot =
3058 cmp::min(worker_end_ix, snapshot_end_ix)
3059 - snapshot_start_ix;
3060
3061 for entry in snapshot
3062 .files(false, start_in_snapshot)
3063 .take(end_in_snapshot - start_in_snapshot)
3064 {
3065 if matching_paths_tx.is_closed() {
3066 break;
3067 }
3068
3069 abs_path.clear();
3070 abs_path.push(&snapshot.abs_path());
3071 abs_path.push(&entry.path);
3072 let matches = if let Some(file) =
3073 fs.open_sync(&abs_path).await.log_err()
3074 {
3075 query.detect(file).unwrap_or(false)
3076 } else {
3077 false
3078 };
3079
3080 if matches {
3081 let project_path =
3082 (snapshot.id(), entry.path.clone());
3083 if matching_paths_tx
3084 .send(project_path)
3085 .await
3086 .is_err()
3087 {
3088 break;
3089 }
3090 }
3091 }
3092
3093 snapshot_start_ix = snapshot_end_ix;
3094 }
3095 }
3096 });
3097 }
3098 })
3099 .await;
3100 }
3101 })
3102 .detach();
3103
3104 let (buffers_tx, buffers_rx) = smol::channel::bounded(1024);
3105 let open_buffers = self
3106 .opened_buffers
3107 .values()
3108 .filter_map(|b| b.upgrade(cx))
3109 .collect::<HashSet<_>>();
3110 cx.spawn(|this, cx| async move {
3111 for buffer in &open_buffers {
3112 let snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
3113 buffers_tx.send((buffer.clone(), snapshot)).await?;
3114 }
3115
3116 let open_buffers = Rc::new(RefCell::new(open_buffers));
3117 while let Some(project_path) = matching_paths_rx.next().await {
3118 if buffers_tx.is_closed() {
3119 break;
3120 }
3121
3122 let this = this.clone();
3123 let open_buffers = open_buffers.clone();
3124 let buffers_tx = buffers_tx.clone();
3125 cx.spawn(|mut cx| async move {
3126 if let Some(buffer) = this
3127 .update(&mut cx, |this, cx| this.open_buffer(project_path, cx))
3128 .await
3129 .log_err()
3130 {
3131 if open_buffers.borrow_mut().insert(buffer.clone()) {
3132 let snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
3133 buffers_tx.send((buffer, snapshot)).await?;
3134 }
3135 }
3136
3137 Ok::<_, anyhow::Error>(())
3138 })
3139 .detach();
3140 }
3141
3142 Ok::<_, anyhow::Error>(())
3143 })
3144 .detach_and_log_err(cx);
3145
3146 let background = cx.background().clone();
3147 cx.background().spawn(async move {
3148 let query = &query;
3149 let mut matched_buffers = Vec::new();
3150 for _ in 0..workers {
3151 matched_buffers.push(HashMap::default());
3152 }
3153 background
3154 .scoped(|scope| {
3155 for worker_matched_buffers in matched_buffers.iter_mut() {
3156 let mut buffers_rx = buffers_rx.clone();
3157 scope.spawn(async move {
3158 while let Some((buffer, snapshot)) = buffers_rx.next().await {
3159 let buffer_matches = query
3160 .search(snapshot.as_rope())
3161 .await
3162 .iter()
3163 .map(|range| {
3164 snapshot.anchor_before(range.start)
3165 ..snapshot.anchor_after(range.end)
3166 })
3167 .collect::<Vec<_>>();
3168 if !buffer_matches.is_empty() {
3169 worker_matched_buffers
3170 .insert(buffer.clone(), buffer_matches);
3171 }
3172 }
3173 });
3174 }
3175 })
3176 .await;
3177 Ok(matched_buffers.into_iter().flatten().collect())
3178 })
3179 } else if let Some(project_id) = self.remote_id() {
3180 let request = self.client.request(query.to_proto(project_id));
3181 cx.spawn(|this, mut cx| async move {
3182 let response = request.await?;
3183 let mut result = HashMap::default();
3184 for location in response.locations {
3185 let buffer = location.buffer.ok_or_else(|| anyhow!("missing buffer"))?;
3186 let target_buffer = this
3187 .update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
3188 .await?;
3189 let start = location
3190 .start
3191 .and_then(deserialize_anchor)
3192 .ok_or_else(|| anyhow!("missing target start"))?;
3193 let end = location
3194 .end
3195 .and_then(deserialize_anchor)
3196 .ok_or_else(|| anyhow!("missing target end"))?;
3197 result
3198 .entry(target_buffer)
3199 .or_insert(Vec::new())
3200 .push(start..end)
3201 }
3202 Ok(result)
3203 })
3204 } else {
3205 Task::ready(Ok(Default::default()))
3206 }
3207 }
3208
3209 fn request_lsp<R: LspCommand>(
3210 &self,
3211 buffer_handle: ModelHandle<Buffer>,
3212 request: R,
3213 cx: &mut ModelContext<Self>,
3214 ) -> Task<Result<R::Response>>
3215 where
3216 <R::LspRequest as lsp::request::Request>::Result: Send,
3217 {
3218 let buffer = buffer_handle.read(cx);
3219 if self.is_local() {
3220 let file = File::from_dyn(buffer.file()).and_then(File::as_local);
3221 if let Some((file, (_, language_server))) =
3222 file.zip(self.language_server_for_buffer(buffer, cx).cloned())
3223 {
3224 let lsp_params = request.to_lsp(&file.abs_path(cx), cx);
3225 return cx.spawn(|this, cx| async move {
3226 if !request.check_capabilities(&language_server.capabilities()) {
3227 return Ok(Default::default());
3228 }
3229
3230 let response = language_server
3231 .request::<R::LspRequest>(lsp_params)
3232 .await
3233 .context("lsp request failed")?;
3234 request
3235 .response_from_lsp(response, this, buffer_handle, cx)
3236 .await
3237 });
3238 }
3239 } else if let Some(project_id) = self.remote_id() {
3240 let rpc = self.client.clone();
3241 let message = request.to_proto(project_id, buffer);
3242 return cx.spawn(|this, cx| async move {
3243 let response = rpc.request(message).await?;
3244 request
3245 .response_from_proto(response, this, buffer_handle, cx)
3246 .await
3247 });
3248 }
3249 Task::ready(Ok(Default::default()))
3250 }
3251
3252 pub fn find_or_create_local_worktree(
3253 &mut self,
3254 abs_path: impl AsRef<Path>,
3255 visible: bool,
3256 cx: &mut ModelContext<Self>,
3257 ) -> Task<Result<(ModelHandle<Worktree>, PathBuf)>> {
3258 let abs_path = abs_path.as_ref();
3259 if let Some((tree, relative_path)) = self.find_local_worktree(abs_path, cx) {
3260 Task::ready(Ok((tree.clone(), relative_path.into())))
3261 } else {
3262 let worktree = self.create_local_worktree(abs_path, visible, cx);
3263 cx.foreground()
3264 .spawn(async move { Ok((worktree.await?, PathBuf::new())) })
3265 }
3266 }
3267
3268 pub fn find_local_worktree(
3269 &self,
3270 abs_path: &Path,
3271 cx: &AppContext,
3272 ) -> Option<(ModelHandle<Worktree>, PathBuf)> {
3273 for tree in self.worktrees(cx) {
3274 if let Some(relative_path) = tree
3275 .read(cx)
3276 .as_local()
3277 .and_then(|t| abs_path.strip_prefix(t.abs_path()).ok())
3278 {
3279 return Some((tree.clone(), relative_path.into()));
3280 }
3281 }
3282 None
3283 }
3284
3285 pub fn is_shared(&self) -> bool {
3286 match &self.client_state {
3287 ProjectClientState::Local { is_shared, .. } => *is_shared,
3288 ProjectClientState::Remote { .. } => false,
3289 }
3290 }
3291
3292 fn create_local_worktree(
3293 &mut self,
3294 abs_path: impl AsRef<Path>,
3295 visible: bool,
3296 cx: &mut ModelContext<Self>,
3297 ) -> Task<Result<ModelHandle<Worktree>>> {
3298 let fs = self.fs.clone();
3299 let client = self.client.clone();
3300 let next_entry_id = self.next_entry_id.clone();
3301 let path: Arc<Path> = abs_path.as_ref().into();
3302 let task = self
3303 .loading_local_worktrees
3304 .entry(path.clone())
3305 .or_insert_with(|| {
3306 cx.spawn(|project, mut cx| {
3307 async move {
3308 let worktree = Worktree::local(
3309 client.clone(),
3310 path.clone(),
3311 visible,
3312 fs,
3313 next_entry_id,
3314 &mut cx,
3315 )
3316 .await;
3317 project.update(&mut cx, |project, _| {
3318 project.loading_local_worktrees.remove(&path);
3319 });
3320 let worktree = worktree?;
3321
3322 let (remote_project_id, is_shared) =
3323 project.update(&mut cx, |project, cx| {
3324 project.add_worktree(&worktree, cx);
3325 (project.remote_id(), project.is_shared())
3326 });
3327
3328 if let Some(project_id) = remote_project_id {
3329 if is_shared {
3330 worktree
3331 .update(&mut cx, |worktree, cx| {
3332 worktree.as_local_mut().unwrap().share(project_id, cx)
3333 })
3334 .await?;
3335 } else {
3336 worktree
3337 .update(&mut cx, |worktree, cx| {
3338 worktree.as_local_mut().unwrap().register(project_id, cx)
3339 })
3340 .await?;
3341 }
3342 }
3343
3344 Ok(worktree)
3345 }
3346 .map_err(|err| Arc::new(err))
3347 })
3348 .shared()
3349 })
3350 .clone();
3351 cx.foreground().spawn(async move {
3352 match task.await {
3353 Ok(worktree) => Ok(worktree),
3354 Err(err) => Err(anyhow!("{}", err)),
3355 }
3356 })
3357 }
3358
3359 pub fn remove_worktree(&mut self, id: WorktreeId, cx: &mut ModelContext<Self>) {
3360 self.worktrees.retain(|worktree| {
3361 worktree
3362 .upgrade(cx)
3363 .map_or(false, |w| w.read(cx).id() != id)
3364 });
3365 cx.notify();
3366 }
3367
3368 fn add_worktree(&mut self, worktree: &ModelHandle<Worktree>, cx: &mut ModelContext<Self>) {
3369 cx.observe(&worktree, |_, _, cx| cx.notify()).detach();
3370 if worktree.read(cx).is_local() {
3371 cx.subscribe(&worktree, |this, worktree, _, cx| {
3372 this.update_local_worktree_buffers(worktree, cx);
3373 })
3374 .detach();
3375 }
3376
3377 let push_strong_handle = {
3378 let worktree = worktree.read(cx);
3379 self.is_shared() || worktree.is_visible() || worktree.is_remote()
3380 };
3381 if push_strong_handle {
3382 self.worktrees
3383 .push(WorktreeHandle::Strong(worktree.clone()));
3384 } else {
3385 cx.observe_release(&worktree, |this, _, cx| {
3386 this.worktrees
3387 .retain(|worktree| worktree.upgrade(cx).is_some());
3388 cx.notify();
3389 })
3390 .detach();
3391 self.worktrees
3392 .push(WorktreeHandle::Weak(worktree.downgrade()));
3393 }
3394 cx.notify();
3395 }
3396
3397 fn update_local_worktree_buffers(
3398 &mut self,
3399 worktree_handle: ModelHandle<Worktree>,
3400 cx: &mut ModelContext<Self>,
3401 ) {
3402 let snapshot = worktree_handle.read(cx).snapshot();
3403 let mut buffers_to_delete = Vec::new();
3404 let mut renamed_buffers = Vec::new();
3405 for (buffer_id, buffer) in &self.opened_buffers {
3406 if let Some(buffer) = buffer.upgrade(cx) {
3407 buffer.update(cx, |buffer, cx| {
3408 if let Some(old_file) = File::from_dyn(buffer.file()) {
3409 if old_file.worktree != worktree_handle {
3410 return;
3411 }
3412
3413 let new_file = if let Some(entry) = old_file
3414 .entry_id
3415 .and_then(|entry_id| snapshot.entry_for_id(entry_id))
3416 {
3417 File {
3418 is_local: true,
3419 entry_id: Some(entry.id),
3420 mtime: entry.mtime,
3421 path: entry.path.clone(),
3422 worktree: worktree_handle.clone(),
3423 }
3424 } else if let Some(entry) =
3425 snapshot.entry_for_path(old_file.path().as_ref())
3426 {
3427 File {
3428 is_local: true,
3429 entry_id: Some(entry.id),
3430 mtime: entry.mtime,
3431 path: entry.path.clone(),
3432 worktree: worktree_handle.clone(),
3433 }
3434 } else {
3435 File {
3436 is_local: true,
3437 entry_id: None,
3438 path: old_file.path().clone(),
3439 mtime: old_file.mtime(),
3440 worktree: worktree_handle.clone(),
3441 }
3442 };
3443
3444 let old_path = old_file.abs_path(cx);
3445 if new_file.abs_path(cx) != old_path {
3446 renamed_buffers.push((cx.handle(), old_path));
3447 }
3448
3449 if let Some(project_id) = self.remote_id() {
3450 self.client
3451 .send(proto::UpdateBufferFile {
3452 project_id,
3453 buffer_id: *buffer_id as u64,
3454 file: Some(new_file.to_proto()),
3455 })
3456 .log_err();
3457 }
3458 buffer.file_updated(Box::new(new_file), cx).detach();
3459 }
3460 });
3461 } else {
3462 buffers_to_delete.push(*buffer_id);
3463 }
3464 }
3465
3466 for buffer_id in buffers_to_delete {
3467 self.opened_buffers.remove(&buffer_id);
3468 }
3469
3470 for (buffer, old_path) in renamed_buffers {
3471 self.unregister_buffer_from_language_server(&buffer, old_path, cx);
3472 self.assign_language_to_buffer(&buffer, cx);
3473 self.register_buffer_with_language_server(&buffer, cx);
3474 }
3475 }
3476
3477 pub fn set_active_path(&mut self, entry: Option<ProjectPath>, cx: &mut ModelContext<Self>) {
3478 let new_active_entry = entry.and_then(|project_path| {
3479 let worktree = self.worktree_for_id(project_path.worktree_id, cx)?;
3480 let entry = worktree.read(cx).entry_for_path(project_path.path)?;
3481 Some(entry.id)
3482 });
3483 if new_active_entry != self.active_entry {
3484 self.active_entry = new_active_entry;
3485 cx.emit(Event::ActiveEntryChanged(new_active_entry));
3486 }
3487 }
3488
3489 pub fn is_running_disk_based_diagnostics(&self) -> bool {
3490 self.language_server_statuses
3491 .values()
3492 .any(|status| status.pending_diagnostic_updates > 0)
3493 }
3494
3495 pub fn diagnostic_summary(&self, cx: &AppContext) -> DiagnosticSummary {
3496 let mut summary = DiagnosticSummary::default();
3497 for (_, path_summary) in self.diagnostic_summaries(cx) {
3498 summary.error_count += path_summary.error_count;
3499 summary.warning_count += path_summary.warning_count;
3500 summary.info_count += path_summary.info_count;
3501 summary.hint_count += path_summary.hint_count;
3502 }
3503 summary
3504 }
3505
3506 pub fn diagnostic_summaries<'a>(
3507 &'a self,
3508 cx: &'a AppContext,
3509 ) -> impl Iterator<Item = (ProjectPath, DiagnosticSummary)> + 'a {
3510 self.worktrees(cx).flat_map(move |worktree| {
3511 let worktree = worktree.read(cx);
3512 let worktree_id = worktree.id();
3513 worktree
3514 .diagnostic_summaries()
3515 .map(move |(path, summary)| (ProjectPath { worktree_id, path }, summary))
3516 })
3517 }
3518
3519 pub fn disk_based_diagnostics_started(&mut self, cx: &mut ModelContext<Self>) {
3520 if self
3521 .language_server_statuses
3522 .values()
3523 .map(|status| status.pending_diagnostic_updates)
3524 .sum::<isize>()
3525 == 1
3526 {
3527 cx.emit(Event::DiskBasedDiagnosticsStarted);
3528 }
3529 }
3530
3531 pub fn disk_based_diagnostics_finished(&mut self, cx: &mut ModelContext<Self>) {
3532 cx.emit(Event::DiskBasedDiagnosticsUpdated);
3533 if self
3534 .language_server_statuses
3535 .values()
3536 .map(|status| status.pending_diagnostic_updates)
3537 .sum::<isize>()
3538 == 0
3539 {
3540 cx.emit(Event::DiskBasedDiagnosticsFinished);
3541 }
3542 }
3543
3544 pub fn active_entry(&self) -> Option<ProjectEntryId> {
3545 self.active_entry
3546 }
3547
3548 pub fn entry_for_path(&self, path: &ProjectPath, cx: &AppContext) -> Option<ProjectEntryId> {
3549 self.worktree_for_id(path.worktree_id, cx)?
3550 .read(cx)
3551 .entry_for_path(&path.path)
3552 .map(|entry| entry.id)
3553 }
3554
3555 pub fn path_for_entry(&self, entry_id: ProjectEntryId, cx: &AppContext) -> Option<ProjectPath> {
3556 let worktree = self.worktree_for_entry(entry_id, cx)?;
3557 let worktree = worktree.read(cx);
3558 let worktree_id = worktree.id();
3559 let path = worktree.entry_for_id(entry_id)?.path.clone();
3560 Some(ProjectPath { worktree_id, path })
3561 }
3562
3563 // RPC message handlers
3564
3565 async fn handle_unshare_project(
3566 this: ModelHandle<Self>,
3567 _: TypedEnvelope<proto::UnshareProject>,
3568 _: Arc<Client>,
3569 mut cx: AsyncAppContext,
3570 ) -> Result<()> {
3571 this.update(&mut cx, |this, cx| this.project_unshared(cx));
3572 Ok(())
3573 }
3574
3575 async fn handle_add_collaborator(
3576 this: ModelHandle<Self>,
3577 mut envelope: TypedEnvelope<proto::AddProjectCollaborator>,
3578 _: Arc<Client>,
3579 mut cx: AsyncAppContext,
3580 ) -> Result<()> {
3581 let user_store = this.read_with(&cx, |this, _| this.user_store.clone());
3582 let collaborator = envelope
3583 .payload
3584 .collaborator
3585 .take()
3586 .ok_or_else(|| anyhow!("empty collaborator"))?;
3587
3588 let collaborator = Collaborator::from_proto(collaborator, &user_store, &mut cx).await?;
3589 this.update(&mut cx, |this, cx| {
3590 this.collaborators
3591 .insert(collaborator.peer_id, collaborator);
3592 cx.notify();
3593 });
3594
3595 Ok(())
3596 }
3597
3598 async fn handle_remove_collaborator(
3599 this: ModelHandle<Self>,
3600 envelope: TypedEnvelope<proto::RemoveProjectCollaborator>,
3601 _: Arc<Client>,
3602 mut cx: AsyncAppContext,
3603 ) -> Result<()> {
3604 this.update(&mut cx, |this, cx| {
3605 let peer_id = PeerId(envelope.payload.peer_id);
3606 let replica_id = this
3607 .collaborators
3608 .remove(&peer_id)
3609 .ok_or_else(|| anyhow!("unknown peer {:?}", peer_id))?
3610 .replica_id;
3611 for (_, buffer) in &this.opened_buffers {
3612 if let Some(buffer) = buffer.upgrade(cx) {
3613 buffer.update(cx, |buffer, cx| buffer.remove_peer(replica_id, cx));
3614 }
3615 }
3616 cx.emit(Event::CollaboratorLeft(peer_id));
3617 cx.notify();
3618 Ok(())
3619 })
3620 }
3621
3622 async fn handle_register_worktree(
3623 this: ModelHandle<Self>,
3624 envelope: TypedEnvelope<proto::RegisterWorktree>,
3625 client: Arc<Client>,
3626 mut cx: AsyncAppContext,
3627 ) -> Result<()> {
3628 this.update(&mut cx, |this, cx| {
3629 let remote_id = this.remote_id().ok_or_else(|| anyhow!("invalid project"))?;
3630 let replica_id = this.replica_id();
3631 let worktree = proto::Worktree {
3632 id: envelope.payload.worktree_id,
3633 root_name: envelope.payload.root_name,
3634 entries: Default::default(),
3635 diagnostic_summaries: Default::default(),
3636 visible: envelope.payload.visible,
3637 };
3638 let (worktree, load_task) =
3639 Worktree::remote(remote_id, replica_id, worktree, client, cx);
3640 this.add_worktree(&worktree, cx);
3641 load_task.detach();
3642 Ok(())
3643 })
3644 }
3645
3646 async fn handle_unregister_worktree(
3647 this: ModelHandle<Self>,
3648 envelope: TypedEnvelope<proto::UnregisterWorktree>,
3649 _: Arc<Client>,
3650 mut cx: AsyncAppContext,
3651 ) -> Result<()> {
3652 this.update(&mut cx, |this, cx| {
3653 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
3654 this.remove_worktree(worktree_id, cx);
3655 Ok(())
3656 })
3657 }
3658
3659 async fn handle_update_worktree(
3660 this: ModelHandle<Self>,
3661 envelope: TypedEnvelope<proto::UpdateWorktree>,
3662 _: Arc<Client>,
3663 mut cx: AsyncAppContext,
3664 ) -> Result<()> {
3665 this.update(&mut cx, |this, cx| {
3666 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
3667 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
3668 worktree.update(cx, |worktree, _| {
3669 let worktree = worktree.as_remote_mut().unwrap();
3670 worktree.update_from_remote(envelope)
3671 })?;
3672 }
3673 Ok(())
3674 })
3675 }
3676
3677 async fn handle_update_diagnostic_summary(
3678 this: ModelHandle<Self>,
3679 envelope: TypedEnvelope<proto::UpdateDiagnosticSummary>,
3680 _: Arc<Client>,
3681 mut cx: AsyncAppContext,
3682 ) -> Result<()> {
3683 this.update(&mut cx, |this, cx| {
3684 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
3685 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
3686 if let Some(summary) = envelope.payload.summary {
3687 let project_path = ProjectPath {
3688 worktree_id,
3689 path: Path::new(&summary.path).into(),
3690 };
3691 worktree.update(cx, |worktree, _| {
3692 worktree
3693 .as_remote_mut()
3694 .unwrap()
3695 .update_diagnostic_summary(project_path.path.clone(), &summary);
3696 });
3697 cx.emit(Event::DiagnosticsUpdated(project_path));
3698 }
3699 }
3700 Ok(())
3701 })
3702 }
3703
3704 async fn handle_start_language_server(
3705 this: ModelHandle<Self>,
3706 envelope: TypedEnvelope<proto::StartLanguageServer>,
3707 _: Arc<Client>,
3708 mut cx: AsyncAppContext,
3709 ) -> Result<()> {
3710 let server = envelope
3711 .payload
3712 .server
3713 .ok_or_else(|| anyhow!("invalid server"))?;
3714 this.update(&mut cx, |this, cx| {
3715 this.language_server_statuses.insert(
3716 server.id as usize,
3717 LanguageServerStatus {
3718 name: server.name,
3719 pending_work: Default::default(),
3720 pending_diagnostic_updates: 0,
3721 },
3722 );
3723 cx.notify();
3724 });
3725 Ok(())
3726 }
3727
3728 async fn handle_update_language_server(
3729 this: ModelHandle<Self>,
3730 envelope: TypedEnvelope<proto::UpdateLanguageServer>,
3731 _: Arc<Client>,
3732 mut cx: AsyncAppContext,
3733 ) -> Result<()> {
3734 let language_server_id = envelope.payload.language_server_id as usize;
3735 match envelope
3736 .payload
3737 .variant
3738 .ok_or_else(|| anyhow!("invalid variant"))?
3739 {
3740 proto::update_language_server::Variant::WorkStart(payload) => {
3741 this.update(&mut cx, |this, cx| {
3742 this.on_lsp_work_start(language_server_id, payload.token, cx);
3743 })
3744 }
3745 proto::update_language_server::Variant::WorkProgress(payload) => {
3746 this.update(&mut cx, |this, cx| {
3747 this.on_lsp_work_progress(
3748 language_server_id,
3749 payload.token,
3750 LanguageServerProgress {
3751 message: payload.message,
3752 percentage: payload.percentage.map(|p| p as usize),
3753 last_update_at: Instant::now(),
3754 },
3755 cx,
3756 );
3757 })
3758 }
3759 proto::update_language_server::Variant::WorkEnd(payload) => {
3760 this.update(&mut cx, |this, cx| {
3761 this.on_lsp_work_end(language_server_id, payload.token, cx);
3762 })
3763 }
3764 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(_) => {
3765 this.update(&mut cx, |this, cx| {
3766 this.disk_based_diagnostics_started(cx);
3767 })
3768 }
3769 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(_) => {
3770 this.update(&mut cx, |this, cx| this.disk_based_diagnostics_finished(cx));
3771 }
3772 }
3773
3774 Ok(())
3775 }
3776
3777 async fn handle_update_buffer(
3778 this: ModelHandle<Self>,
3779 envelope: TypedEnvelope<proto::UpdateBuffer>,
3780 _: Arc<Client>,
3781 mut cx: AsyncAppContext,
3782 ) -> Result<()> {
3783 this.update(&mut cx, |this, cx| {
3784 let payload = envelope.payload.clone();
3785 let buffer_id = payload.buffer_id;
3786 let ops = payload
3787 .operations
3788 .into_iter()
3789 .map(|op| language::proto::deserialize_operation(op))
3790 .collect::<Result<Vec<_>, _>>()?;
3791 match this.opened_buffers.entry(buffer_id) {
3792 hash_map::Entry::Occupied(mut e) => match e.get_mut() {
3793 OpenBuffer::Strong(buffer) => {
3794 buffer.update(cx, |buffer, cx| buffer.apply_ops(ops, cx))?;
3795 }
3796 OpenBuffer::Loading(operations) => operations.extend_from_slice(&ops),
3797 OpenBuffer::Weak(_) => {}
3798 },
3799 hash_map::Entry::Vacant(e) => {
3800 e.insert(OpenBuffer::Loading(ops));
3801 }
3802 }
3803 Ok(())
3804 })
3805 }
3806
3807 async fn handle_update_buffer_file(
3808 this: ModelHandle<Self>,
3809 envelope: TypedEnvelope<proto::UpdateBufferFile>,
3810 _: Arc<Client>,
3811 mut cx: AsyncAppContext,
3812 ) -> Result<()> {
3813 this.update(&mut cx, |this, cx| {
3814 let payload = envelope.payload.clone();
3815 let buffer_id = payload.buffer_id;
3816 let file = payload.file.ok_or_else(|| anyhow!("invalid file"))?;
3817 let worktree = this
3818 .worktree_for_id(WorktreeId::from_proto(file.worktree_id), cx)
3819 .ok_or_else(|| anyhow!("no such worktree"))?;
3820 let file = File::from_proto(file, worktree.clone(), cx)?;
3821 let buffer = this
3822 .opened_buffers
3823 .get_mut(&buffer_id)
3824 .and_then(|b| b.upgrade(cx))
3825 .ok_or_else(|| anyhow!("no such buffer"))?;
3826 buffer.update(cx, |buffer, cx| {
3827 buffer.file_updated(Box::new(file), cx).detach();
3828 });
3829 Ok(())
3830 })
3831 }
3832
3833 async fn handle_save_buffer(
3834 this: ModelHandle<Self>,
3835 envelope: TypedEnvelope<proto::SaveBuffer>,
3836 _: Arc<Client>,
3837 mut cx: AsyncAppContext,
3838 ) -> Result<proto::BufferSaved> {
3839 let buffer_id = envelope.payload.buffer_id;
3840 let requested_version = deserialize_version(envelope.payload.version);
3841
3842 let (project_id, buffer) = this.update(&mut cx, |this, cx| {
3843 let project_id = this.remote_id().ok_or_else(|| anyhow!("not connected"))?;
3844 let buffer = this
3845 .opened_buffers
3846 .get(&buffer_id)
3847 .and_then(|buffer| buffer.upgrade(cx))
3848 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?;
3849 Ok::<_, anyhow::Error>((project_id, buffer))
3850 })?;
3851 buffer
3852 .update(&mut cx, |buffer, _| {
3853 buffer.wait_for_version(requested_version)
3854 })
3855 .await;
3856
3857 let (saved_version, mtime) = buffer.update(&mut cx, |buffer, cx| buffer.save(cx)).await?;
3858 Ok(proto::BufferSaved {
3859 project_id,
3860 buffer_id,
3861 version: serialize_version(&saved_version),
3862 mtime: Some(mtime.into()),
3863 })
3864 }
3865
3866 async fn handle_reload_buffers(
3867 this: ModelHandle<Self>,
3868 envelope: TypedEnvelope<proto::ReloadBuffers>,
3869 _: Arc<Client>,
3870 mut cx: AsyncAppContext,
3871 ) -> Result<proto::ReloadBuffersResponse> {
3872 let sender_id = envelope.original_sender_id()?;
3873 let reload = this.update(&mut cx, |this, cx| {
3874 let mut buffers = HashSet::default();
3875 for buffer_id in &envelope.payload.buffer_ids {
3876 buffers.insert(
3877 this.opened_buffers
3878 .get(buffer_id)
3879 .and_then(|buffer| buffer.upgrade(cx))
3880 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?,
3881 );
3882 }
3883 Ok::<_, anyhow::Error>(this.reload_buffers(buffers, false, cx))
3884 })?;
3885
3886 let project_transaction = reload.await?;
3887 let project_transaction = this.update(&mut cx, |this, cx| {
3888 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
3889 });
3890 Ok(proto::ReloadBuffersResponse {
3891 transaction: Some(project_transaction),
3892 })
3893 }
3894
3895 async fn handle_format_buffers(
3896 this: ModelHandle<Self>,
3897 envelope: TypedEnvelope<proto::FormatBuffers>,
3898 _: Arc<Client>,
3899 mut cx: AsyncAppContext,
3900 ) -> Result<proto::FormatBuffersResponse> {
3901 let sender_id = envelope.original_sender_id()?;
3902 let format = this.update(&mut cx, |this, cx| {
3903 let mut buffers = HashSet::default();
3904 for buffer_id in &envelope.payload.buffer_ids {
3905 buffers.insert(
3906 this.opened_buffers
3907 .get(buffer_id)
3908 .and_then(|buffer| buffer.upgrade(cx))
3909 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?,
3910 );
3911 }
3912 Ok::<_, anyhow::Error>(this.format(buffers, false, cx))
3913 })?;
3914
3915 let project_transaction = format.await?;
3916 let project_transaction = this.update(&mut cx, |this, cx| {
3917 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
3918 });
3919 Ok(proto::FormatBuffersResponse {
3920 transaction: Some(project_transaction),
3921 })
3922 }
3923
3924 async fn handle_get_completions(
3925 this: ModelHandle<Self>,
3926 envelope: TypedEnvelope<proto::GetCompletions>,
3927 _: Arc<Client>,
3928 mut cx: AsyncAppContext,
3929 ) -> Result<proto::GetCompletionsResponse> {
3930 let position = envelope
3931 .payload
3932 .position
3933 .and_then(language::proto::deserialize_anchor)
3934 .ok_or_else(|| anyhow!("invalid position"))?;
3935 let version = deserialize_version(envelope.payload.version);
3936 let buffer = this.read_with(&cx, |this, cx| {
3937 this.opened_buffers
3938 .get(&envelope.payload.buffer_id)
3939 .and_then(|buffer| buffer.upgrade(cx))
3940 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
3941 })?;
3942 buffer
3943 .update(&mut cx, |buffer, _| buffer.wait_for_version(version))
3944 .await;
3945 let version = buffer.read_with(&cx, |buffer, _| buffer.version());
3946 let completions = this
3947 .update(&mut cx, |this, cx| this.completions(&buffer, position, cx))
3948 .await?;
3949
3950 Ok(proto::GetCompletionsResponse {
3951 completions: completions
3952 .iter()
3953 .map(language::proto::serialize_completion)
3954 .collect(),
3955 version: serialize_version(&version),
3956 })
3957 }
3958
3959 async fn handle_apply_additional_edits_for_completion(
3960 this: ModelHandle<Self>,
3961 envelope: TypedEnvelope<proto::ApplyCompletionAdditionalEdits>,
3962 _: Arc<Client>,
3963 mut cx: AsyncAppContext,
3964 ) -> Result<proto::ApplyCompletionAdditionalEditsResponse> {
3965 let apply_additional_edits = this.update(&mut cx, |this, cx| {
3966 let buffer = this
3967 .opened_buffers
3968 .get(&envelope.payload.buffer_id)
3969 .and_then(|buffer| buffer.upgrade(cx))
3970 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
3971 let language = buffer.read(cx).language();
3972 let completion = language::proto::deserialize_completion(
3973 envelope
3974 .payload
3975 .completion
3976 .ok_or_else(|| anyhow!("invalid completion"))?,
3977 language,
3978 )?;
3979 Ok::<_, anyhow::Error>(
3980 this.apply_additional_edits_for_completion(buffer, completion, false, cx),
3981 )
3982 })?;
3983
3984 Ok(proto::ApplyCompletionAdditionalEditsResponse {
3985 transaction: apply_additional_edits
3986 .await?
3987 .as_ref()
3988 .map(language::proto::serialize_transaction),
3989 })
3990 }
3991
3992 async fn handle_get_code_actions(
3993 this: ModelHandle<Self>,
3994 envelope: TypedEnvelope<proto::GetCodeActions>,
3995 _: Arc<Client>,
3996 mut cx: AsyncAppContext,
3997 ) -> Result<proto::GetCodeActionsResponse> {
3998 let start = envelope
3999 .payload
4000 .start
4001 .and_then(language::proto::deserialize_anchor)
4002 .ok_or_else(|| anyhow!("invalid start"))?;
4003 let end = envelope
4004 .payload
4005 .end
4006 .and_then(language::proto::deserialize_anchor)
4007 .ok_or_else(|| anyhow!("invalid end"))?;
4008 let buffer = this.update(&mut cx, |this, cx| {
4009 this.opened_buffers
4010 .get(&envelope.payload.buffer_id)
4011 .and_then(|buffer| buffer.upgrade(cx))
4012 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
4013 })?;
4014 buffer
4015 .update(&mut cx, |buffer, _| {
4016 buffer.wait_for_version(deserialize_version(envelope.payload.version))
4017 })
4018 .await;
4019
4020 let version = buffer.read_with(&cx, |buffer, _| buffer.version());
4021 let code_actions = this.update(&mut cx, |this, cx| {
4022 Ok::<_, anyhow::Error>(this.code_actions(&buffer, start..end, cx))
4023 })?;
4024
4025 Ok(proto::GetCodeActionsResponse {
4026 actions: code_actions
4027 .await?
4028 .iter()
4029 .map(language::proto::serialize_code_action)
4030 .collect(),
4031 version: serialize_version(&version),
4032 })
4033 }
4034
4035 async fn handle_apply_code_action(
4036 this: ModelHandle<Self>,
4037 envelope: TypedEnvelope<proto::ApplyCodeAction>,
4038 _: Arc<Client>,
4039 mut cx: AsyncAppContext,
4040 ) -> Result<proto::ApplyCodeActionResponse> {
4041 let sender_id = envelope.original_sender_id()?;
4042 let action = language::proto::deserialize_code_action(
4043 envelope
4044 .payload
4045 .action
4046 .ok_or_else(|| anyhow!("invalid action"))?,
4047 )?;
4048 let apply_code_action = this.update(&mut cx, |this, cx| {
4049 let buffer = this
4050 .opened_buffers
4051 .get(&envelope.payload.buffer_id)
4052 .and_then(|buffer| buffer.upgrade(cx))
4053 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
4054 Ok::<_, anyhow::Error>(this.apply_code_action(buffer, action, false, cx))
4055 })?;
4056
4057 let project_transaction = apply_code_action.await?;
4058 let project_transaction = this.update(&mut cx, |this, cx| {
4059 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
4060 });
4061 Ok(proto::ApplyCodeActionResponse {
4062 transaction: Some(project_transaction),
4063 })
4064 }
4065
4066 async fn handle_lsp_command<T: LspCommand>(
4067 this: ModelHandle<Self>,
4068 envelope: TypedEnvelope<T::ProtoRequest>,
4069 _: Arc<Client>,
4070 mut cx: AsyncAppContext,
4071 ) -> Result<<T::ProtoRequest as proto::RequestMessage>::Response>
4072 where
4073 <T::LspRequest as lsp::request::Request>::Result: Send,
4074 {
4075 let sender_id = envelope.original_sender_id()?;
4076 let buffer_id = T::buffer_id_from_proto(&envelope.payload);
4077 let buffer_handle = this.read_with(&cx, |this, _| {
4078 this.opened_buffers
4079 .get(&buffer_id)
4080 .and_then(|buffer| buffer.upgrade(&cx))
4081 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))
4082 })?;
4083 let request = T::from_proto(
4084 envelope.payload,
4085 this.clone(),
4086 buffer_handle.clone(),
4087 cx.clone(),
4088 )
4089 .await?;
4090 let buffer_version = buffer_handle.read_with(&cx, |buffer, _| buffer.version());
4091 let response = this
4092 .update(&mut cx, |this, cx| {
4093 this.request_lsp(buffer_handle, request, cx)
4094 })
4095 .await?;
4096 this.update(&mut cx, |this, cx| {
4097 Ok(T::response_to_proto(
4098 response,
4099 this,
4100 sender_id,
4101 &buffer_version,
4102 cx,
4103 ))
4104 })
4105 }
4106
4107 async fn handle_get_project_symbols(
4108 this: ModelHandle<Self>,
4109 envelope: TypedEnvelope<proto::GetProjectSymbols>,
4110 _: Arc<Client>,
4111 mut cx: AsyncAppContext,
4112 ) -> Result<proto::GetProjectSymbolsResponse> {
4113 let symbols = this
4114 .update(&mut cx, |this, cx| {
4115 this.symbols(&envelope.payload.query, cx)
4116 })
4117 .await?;
4118
4119 Ok(proto::GetProjectSymbolsResponse {
4120 symbols: symbols.iter().map(serialize_symbol).collect(),
4121 })
4122 }
4123
4124 async fn handle_search_project(
4125 this: ModelHandle<Self>,
4126 envelope: TypedEnvelope<proto::SearchProject>,
4127 _: Arc<Client>,
4128 mut cx: AsyncAppContext,
4129 ) -> Result<proto::SearchProjectResponse> {
4130 let peer_id = envelope.original_sender_id()?;
4131 let query = SearchQuery::from_proto(envelope.payload)?;
4132 let result = this
4133 .update(&mut cx, |this, cx| this.search(query, cx))
4134 .await?;
4135
4136 this.update(&mut cx, |this, cx| {
4137 let mut locations = Vec::new();
4138 for (buffer, ranges) in result {
4139 for range in ranges {
4140 let start = serialize_anchor(&range.start);
4141 let end = serialize_anchor(&range.end);
4142 let buffer = this.serialize_buffer_for_peer(&buffer, peer_id, cx);
4143 locations.push(proto::Location {
4144 buffer: Some(buffer),
4145 start: Some(start),
4146 end: Some(end),
4147 });
4148 }
4149 }
4150 Ok(proto::SearchProjectResponse { locations })
4151 })
4152 }
4153
4154 async fn handle_open_buffer_for_symbol(
4155 this: ModelHandle<Self>,
4156 envelope: TypedEnvelope<proto::OpenBufferForSymbol>,
4157 _: Arc<Client>,
4158 mut cx: AsyncAppContext,
4159 ) -> Result<proto::OpenBufferForSymbolResponse> {
4160 let peer_id = envelope.original_sender_id()?;
4161 let symbol = envelope
4162 .payload
4163 .symbol
4164 .ok_or_else(|| anyhow!("invalid symbol"))?;
4165 let symbol = this.read_with(&cx, |this, _| {
4166 let symbol = this.deserialize_symbol(symbol)?;
4167 let signature = this.symbol_signature(symbol.worktree_id, &symbol.path);
4168 if signature == symbol.signature {
4169 Ok(symbol)
4170 } else {
4171 Err(anyhow!("invalid symbol signature"))
4172 }
4173 })?;
4174 let buffer = this
4175 .update(&mut cx, |this, cx| this.open_buffer_for_symbol(&symbol, cx))
4176 .await?;
4177
4178 Ok(proto::OpenBufferForSymbolResponse {
4179 buffer: Some(this.update(&mut cx, |this, cx| {
4180 this.serialize_buffer_for_peer(&buffer, peer_id, cx)
4181 })),
4182 })
4183 }
4184
4185 fn symbol_signature(&self, worktree_id: WorktreeId, path: &Path) -> [u8; 32] {
4186 let mut hasher = Sha256::new();
4187 hasher.update(worktree_id.to_proto().to_be_bytes());
4188 hasher.update(path.to_string_lossy().as_bytes());
4189 hasher.update(self.nonce.to_be_bytes());
4190 hasher.finalize().as_slice().try_into().unwrap()
4191 }
4192
4193 async fn handle_open_buffer_by_id(
4194 this: ModelHandle<Self>,
4195 envelope: TypedEnvelope<proto::OpenBufferById>,
4196 _: Arc<Client>,
4197 mut cx: AsyncAppContext,
4198 ) -> Result<proto::OpenBufferResponse> {
4199 let peer_id = envelope.original_sender_id()?;
4200 let buffer = this
4201 .update(&mut cx, |this, cx| {
4202 this.open_buffer_by_id(envelope.payload.id, cx)
4203 })
4204 .await?;
4205 this.update(&mut cx, |this, cx| {
4206 Ok(proto::OpenBufferResponse {
4207 buffer: Some(this.serialize_buffer_for_peer(&buffer, peer_id, cx)),
4208 })
4209 })
4210 }
4211
4212 async fn handle_open_buffer_by_path(
4213 this: ModelHandle<Self>,
4214 envelope: TypedEnvelope<proto::OpenBufferByPath>,
4215 _: Arc<Client>,
4216 mut cx: AsyncAppContext,
4217 ) -> Result<proto::OpenBufferResponse> {
4218 let peer_id = envelope.original_sender_id()?;
4219 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
4220 let open_buffer = this.update(&mut cx, |this, cx| {
4221 this.open_buffer(
4222 ProjectPath {
4223 worktree_id,
4224 path: PathBuf::from(envelope.payload.path).into(),
4225 },
4226 cx,
4227 )
4228 });
4229
4230 let buffer = open_buffer.await?;
4231 this.update(&mut cx, |this, cx| {
4232 Ok(proto::OpenBufferResponse {
4233 buffer: Some(this.serialize_buffer_for_peer(&buffer, peer_id, cx)),
4234 })
4235 })
4236 }
4237
4238 fn serialize_project_transaction_for_peer(
4239 &mut self,
4240 project_transaction: ProjectTransaction,
4241 peer_id: PeerId,
4242 cx: &AppContext,
4243 ) -> proto::ProjectTransaction {
4244 let mut serialized_transaction = proto::ProjectTransaction {
4245 buffers: Default::default(),
4246 transactions: Default::default(),
4247 };
4248 for (buffer, transaction) in project_transaction.0 {
4249 serialized_transaction
4250 .buffers
4251 .push(self.serialize_buffer_for_peer(&buffer, peer_id, cx));
4252 serialized_transaction
4253 .transactions
4254 .push(language::proto::serialize_transaction(&transaction));
4255 }
4256 serialized_transaction
4257 }
4258
4259 fn deserialize_project_transaction(
4260 &mut self,
4261 message: proto::ProjectTransaction,
4262 push_to_history: bool,
4263 cx: &mut ModelContext<Self>,
4264 ) -> Task<Result<ProjectTransaction>> {
4265 cx.spawn(|this, mut cx| async move {
4266 let mut project_transaction = ProjectTransaction::default();
4267 for (buffer, transaction) in message.buffers.into_iter().zip(message.transactions) {
4268 let buffer = this
4269 .update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
4270 .await?;
4271 let transaction = language::proto::deserialize_transaction(transaction)?;
4272 project_transaction.0.insert(buffer, transaction);
4273 }
4274
4275 for (buffer, transaction) in &project_transaction.0 {
4276 buffer
4277 .update(&mut cx, |buffer, _| {
4278 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
4279 })
4280 .await;
4281
4282 if push_to_history {
4283 buffer.update(&mut cx, |buffer, _| {
4284 buffer.push_transaction(transaction.clone(), Instant::now());
4285 });
4286 }
4287 }
4288
4289 Ok(project_transaction)
4290 })
4291 }
4292
4293 fn serialize_buffer_for_peer(
4294 &mut self,
4295 buffer: &ModelHandle<Buffer>,
4296 peer_id: PeerId,
4297 cx: &AppContext,
4298 ) -> proto::Buffer {
4299 let buffer_id = buffer.read(cx).remote_id();
4300 let shared_buffers = self.shared_buffers.entry(peer_id).or_default();
4301 if shared_buffers.insert(buffer_id) {
4302 proto::Buffer {
4303 variant: Some(proto::buffer::Variant::State(buffer.read(cx).to_proto())),
4304 }
4305 } else {
4306 proto::Buffer {
4307 variant: Some(proto::buffer::Variant::Id(buffer_id)),
4308 }
4309 }
4310 }
4311
4312 fn deserialize_buffer(
4313 &mut self,
4314 buffer: proto::Buffer,
4315 cx: &mut ModelContext<Self>,
4316 ) -> Task<Result<ModelHandle<Buffer>>> {
4317 let replica_id = self.replica_id();
4318
4319 let opened_buffer_tx = self.opened_buffer.0.clone();
4320 let mut opened_buffer_rx = self.opened_buffer.1.clone();
4321 cx.spawn(|this, mut cx| async move {
4322 match buffer.variant.ok_or_else(|| anyhow!("missing buffer"))? {
4323 proto::buffer::Variant::Id(id) => {
4324 let buffer = loop {
4325 let buffer = this.read_with(&cx, |this, cx| {
4326 this.opened_buffers
4327 .get(&id)
4328 .and_then(|buffer| buffer.upgrade(cx))
4329 });
4330 if let Some(buffer) = buffer {
4331 break buffer;
4332 }
4333 opened_buffer_rx
4334 .next()
4335 .await
4336 .ok_or_else(|| anyhow!("project dropped while waiting for buffer"))?;
4337 };
4338 Ok(buffer)
4339 }
4340 proto::buffer::Variant::State(mut buffer) => {
4341 let mut buffer_worktree = None;
4342 let mut buffer_file = None;
4343 if let Some(file) = buffer.file.take() {
4344 this.read_with(&cx, |this, cx| {
4345 let worktree_id = WorktreeId::from_proto(file.worktree_id);
4346 let worktree =
4347 this.worktree_for_id(worktree_id, cx).ok_or_else(|| {
4348 anyhow!("no worktree found for id {}", file.worktree_id)
4349 })?;
4350 buffer_file =
4351 Some(Box::new(File::from_proto(file, worktree.clone(), cx)?)
4352 as Box<dyn language::File>);
4353 buffer_worktree = Some(worktree);
4354 Ok::<_, anyhow::Error>(())
4355 })?;
4356 }
4357
4358 let buffer = cx.add_model(|cx| {
4359 Buffer::from_proto(replica_id, buffer, buffer_file, cx).unwrap()
4360 });
4361
4362 this.update(&mut cx, |this, cx| this.register_buffer(&buffer, cx))?;
4363
4364 *opened_buffer_tx.borrow_mut().borrow_mut() = ();
4365 Ok(buffer)
4366 }
4367 }
4368 })
4369 }
4370
4371 fn deserialize_symbol(&self, serialized_symbol: proto::Symbol) -> Result<Symbol> {
4372 let source_worktree_id = WorktreeId::from_proto(serialized_symbol.source_worktree_id);
4373 let worktree_id = WorktreeId::from_proto(serialized_symbol.worktree_id);
4374 let start = serialized_symbol
4375 .start
4376 .ok_or_else(|| anyhow!("invalid start"))?;
4377 let end = serialized_symbol
4378 .end
4379 .ok_or_else(|| anyhow!("invalid end"))?;
4380 let kind = unsafe { mem::transmute(serialized_symbol.kind) };
4381 let path = PathBuf::from(serialized_symbol.path);
4382 let language = self.languages.select_language(&path);
4383 Ok(Symbol {
4384 source_worktree_id,
4385 worktree_id,
4386 language_server_name: LanguageServerName(serialized_symbol.language_server_name.into()),
4387 label: language
4388 .and_then(|language| language.label_for_symbol(&serialized_symbol.name, kind))
4389 .unwrap_or_else(|| CodeLabel::plain(serialized_symbol.name.clone(), None)),
4390 name: serialized_symbol.name,
4391 path,
4392 range: PointUtf16::new(start.row, start.column)..PointUtf16::new(end.row, end.column),
4393 kind,
4394 signature: serialized_symbol
4395 .signature
4396 .try_into()
4397 .map_err(|_| anyhow!("invalid signature"))?,
4398 })
4399 }
4400
4401 async fn handle_buffer_saved(
4402 this: ModelHandle<Self>,
4403 envelope: TypedEnvelope<proto::BufferSaved>,
4404 _: Arc<Client>,
4405 mut cx: AsyncAppContext,
4406 ) -> Result<()> {
4407 let version = deserialize_version(envelope.payload.version);
4408 let mtime = envelope
4409 .payload
4410 .mtime
4411 .ok_or_else(|| anyhow!("missing mtime"))?
4412 .into();
4413
4414 this.update(&mut cx, |this, cx| {
4415 let buffer = this
4416 .opened_buffers
4417 .get(&envelope.payload.buffer_id)
4418 .and_then(|buffer| buffer.upgrade(cx));
4419 if let Some(buffer) = buffer {
4420 buffer.update(cx, |buffer, cx| {
4421 buffer.did_save(version, mtime, None, cx);
4422 });
4423 }
4424 Ok(())
4425 })
4426 }
4427
4428 async fn handle_buffer_reloaded(
4429 this: ModelHandle<Self>,
4430 envelope: TypedEnvelope<proto::BufferReloaded>,
4431 _: Arc<Client>,
4432 mut cx: AsyncAppContext,
4433 ) -> Result<()> {
4434 let payload = envelope.payload.clone();
4435 let version = deserialize_version(payload.version);
4436 let mtime = payload
4437 .mtime
4438 .ok_or_else(|| anyhow!("missing mtime"))?
4439 .into();
4440 this.update(&mut cx, |this, cx| {
4441 let buffer = this
4442 .opened_buffers
4443 .get(&payload.buffer_id)
4444 .and_then(|buffer| buffer.upgrade(cx));
4445 if let Some(buffer) = buffer {
4446 buffer.update(cx, |buffer, cx| {
4447 buffer.did_reload(version, mtime, cx);
4448 });
4449 }
4450 Ok(())
4451 })
4452 }
4453
4454 pub fn match_paths<'a>(
4455 &self,
4456 query: &'a str,
4457 include_ignored: bool,
4458 smart_case: bool,
4459 max_results: usize,
4460 cancel_flag: &'a AtomicBool,
4461 cx: &AppContext,
4462 ) -> impl 'a + Future<Output = Vec<PathMatch>> {
4463 let worktrees = self
4464 .worktrees(cx)
4465 .filter(|worktree| worktree.read(cx).is_visible())
4466 .collect::<Vec<_>>();
4467 let include_root_name = worktrees.len() > 1;
4468 let candidate_sets = worktrees
4469 .into_iter()
4470 .map(|worktree| CandidateSet {
4471 snapshot: worktree.read(cx).snapshot(),
4472 include_ignored,
4473 include_root_name,
4474 })
4475 .collect::<Vec<_>>();
4476
4477 let background = cx.background().clone();
4478 async move {
4479 fuzzy::match_paths(
4480 candidate_sets.as_slice(),
4481 query,
4482 smart_case,
4483 max_results,
4484 cancel_flag,
4485 background,
4486 )
4487 .await
4488 }
4489 }
4490
4491 fn edits_from_lsp(
4492 &mut self,
4493 buffer: &ModelHandle<Buffer>,
4494 lsp_edits: impl 'static + Send + IntoIterator<Item = lsp::TextEdit>,
4495 version: Option<i32>,
4496 cx: &mut ModelContext<Self>,
4497 ) -> Task<Result<Vec<(Range<Anchor>, String)>>> {
4498 let snapshot = self.buffer_snapshot_for_lsp_version(buffer, version, cx);
4499 cx.background().spawn(async move {
4500 let snapshot = snapshot?;
4501 let mut lsp_edits = lsp_edits
4502 .into_iter()
4503 .map(|edit| (range_from_lsp(edit.range), edit.new_text))
4504 .peekable();
4505
4506 let mut edits = Vec::new();
4507 while let Some((mut range, mut new_text)) = lsp_edits.next() {
4508 // Combine any LSP edits that are adjacent.
4509 //
4510 // Also, combine LSP edits that are separated from each other by only
4511 // a newline. This is important because for some code actions,
4512 // Rust-analyzer rewrites the entire buffer via a series of edits that
4513 // are separated by unchanged newline characters.
4514 //
4515 // In order for the diffing logic below to work properly, any edits that
4516 // cancel each other out must be combined into one.
4517 while let Some((next_range, next_text)) = lsp_edits.peek() {
4518 if next_range.start > range.end {
4519 if next_range.start.row > range.end.row + 1
4520 || next_range.start.column > 0
4521 || snapshot.clip_point_utf16(
4522 PointUtf16::new(range.end.row, u32::MAX),
4523 Bias::Left,
4524 ) > range.end
4525 {
4526 break;
4527 }
4528 new_text.push('\n');
4529 }
4530 range.end = next_range.end;
4531 new_text.push_str(&next_text);
4532 lsp_edits.next();
4533 }
4534
4535 if snapshot.clip_point_utf16(range.start, Bias::Left) != range.start
4536 || snapshot.clip_point_utf16(range.end, Bias::Left) != range.end
4537 {
4538 return Err(anyhow!("invalid edits received from language server"));
4539 }
4540
4541 // For multiline edits, perform a diff of the old and new text so that
4542 // we can identify the changes more precisely, preserving the locations
4543 // of any anchors positioned in the unchanged regions.
4544 if range.end.row > range.start.row {
4545 let mut offset = range.start.to_offset(&snapshot);
4546 let old_text = snapshot.text_for_range(range).collect::<String>();
4547
4548 let diff = TextDiff::from_lines(old_text.as_str(), &new_text);
4549 let mut moved_since_edit = true;
4550 for change in diff.iter_all_changes() {
4551 let tag = change.tag();
4552 let value = change.value();
4553 match tag {
4554 ChangeTag::Equal => {
4555 offset += value.len();
4556 moved_since_edit = true;
4557 }
4558 ChangeTag::Delete => {
4559 let start = snapshot.anchor_after(offset);
4560 let end = snapshot.anchor_before(offset + value.len());
4561 if moved_since_edit {
4562 edits.push((start..end, String::new()));
4563 } else {
4564 edits.last_mut().unwrap().0.end = end;
4565 }
4566 offset += value.len();
4567 moved_since_edit = false;
4568 }
4569 ChangeTag::Insert => {
4570 if moved_since_edit {
4571 let anchor = snapshot.anchor_after(offset);
4572 edits.push((anchor.clone()..anchor, value.to_string()));
4573 } else {
4574 edits.last_mut().unwrap().1.push_str(value);
4575 }
4576 moved_since_edit = false;
4577 }
4578 }
4579 }
4580 } else if range.end == range.start {
4581 let anchor = snapshot.anchor_after(range.start);
4582 edits.push((anchor.clone()..anchor, new_text));
4583 } else {
4584 let edit_start = snapshot.anchor_after(range.start);
4585 let edit_end = snapshot.anchor_before(range.end);
4586 edits.push((edit_start..edit_end, new_text));
4587 }
4588 }
4589
4590 Ok(edits)
4591 })
4592 }
4593
4594 fn buffer_snapshot_for_lsp_version(
4595 &mut self,
4596 buffer: &ModelHandle<Buffer>,
4597 version: Option<i32>,
4598 cx: &AppContext,
4599 ) -> Result<TextBufferSnapshot> {
4600 const OLD_VERSIONS_TO_RETAIN: i32 = 10;
4601
4602 if let Some(version) = version {
4603 let buffer_id = buffer.read(cx).remote_id();
4604 let snapshots = self
4605 .buffer_snapshots
4606 .get_mut(&buffer_id)
4607 .ok_or_else(|| anyhow!("no snapshot found for buffer {}", buffer_id))?;
4608 let mut found_snapshot = None;
4609 snapshots.retain(|(snapshot_version, snapshot)| {
4610 if snapshot_version + OLD_VERSIONS_TO_RETAIN < version {
4611 false
4612 } else {
4613 if *snapshot_version == version {
4614 found_snapshot = Some(snapshot.clone());
4615 }
4616 true
4617 }
4618 });
4619
4620 found_snapshot.ok_or_else(|| {
4621 anyhow!(
4622 "snapshot not found for buffer {} at version {}",
4623 buffer_id,
4624 version
4625 )
4626 })
4627 } else {
4628 Ok((buffer.read(cx)).text_snapshot())
4629 }
4630 }
4631
4632 fn language_server_for_buffer(
4633 &self,
4634 buffer: &Buffer,
4635 cx: &AppContext,
4636 ) -> Option<&(Arc<dyn LspAdapter>, Arc<LanguageServer>)> {
4637 if let Some((file, language)) = File::from_dyn(buffer.file()).zip(buffer.language()) {
4638 let worktree_id = file.worktree_id(cx);
4639 self.language_servers
4640 .get(&(worktree_id, language.lsp_adapter()?.name()))
4641 } else {
4642 None
4643 }
4644 }
4645}
4646
4647impl WorktreeHandle {
4648 pub fn upgrade(&self, cx: &AppContext) -> Option<ModelHandle<Worktree>> {
4649 match self {
4650 WorktreeHandle::Strong(handle) => Some(handle.clone()),
4651 WorktreeHandle::Weak(handle) => handle.upgrade(cx),
4652 }
4653 }
4654}
4655
4656impl OpenBuffer {
4657 pub fn upgrade(&self, cx: &impl UpgradeModelHandle) -> Option<ModelHandle<Buffer>> {
4658 match self {
4659 OpenBuffer::Strong(handle) => Some(handle.clone()),
4660 OpenBuffer::Weak(handle) => handle.upgrade(cx),
4661 OpenBuffer::Loading(_) => None,
4662 }
4663 }
4664}
4665
4666struct CandidateSet {
4667 snapshot: Snapshot,
4668 include_ignored: bool,
4669 include_root_name: bool,
4670}
4671
4672impl<'a> PathMatchCandidateSet<'a> for CandidateSet {
4673 type Candidates = CandidateSetIter<'a>;
4674
4675 fn id(&self) -> usize {
4676 self.snapshot.id().to_usize()
4677 }
4678
4679 fn len(&self) -> usize {
4680 if self.include_ignored {
4681 self.snapshot.file_count()
4682 } else {
4683 self.snapshot.visible_file_count()
4684 }
4685 }
4686
4687 fn prefix(&self) -> Arc<str> {
4688 if self.snapshot.root_entry().map_or(false, |e| e.is_file()) {
4689 self.snapshot.root_name().into()
4690 } else if self.include_root_name {
4691 format!("{}/", self.snapshot.root_name()).into()
4692 } else {
4693 "".into()
4694 }
4695 }
4696
4697 fn candidates(&'a self, start: usize) -> Self::Candidates {
4698 CandidateSetIter {
4699 traversal: self.snapshot.files(self.include_ignored, start),
4700 }
4701 }
4702}
4703
4704struct CandidateSetIter<'a> {
4705 traversal: Traversal<'a>,
4706}
4707
4708impl<'a> Iterator for CandidateSetIter<'a> {
4709 type Item = PathMatchCandidate<'a>;
4710
4711 fn next(&mut self) -> Option<Self::Item> {
4712 self.traversal.next().map(|entry| {
4713 if let EntryKind::File(char_bag) = entry.kind {
4714 PathMatchCandidate {
4715 path: &entry.path,
4716 char_bag,
4717 }
4718 } else {
4719 unreachable!()
4720 }
4721 })
4722 }
4723}
4724
4725impl Entity for Project {
4726 type Event = Event;
4727
4728 fn release(&mut self, _: &mut gpui::MutableAppContext) {
4729 match &self.client_state {
4730 ProjectClientState::Local { remote_id_rx, .. } => {
4731 if let Some(project_id) = *remote_id_rx.borrow() {
4732 self.client
4733 .send(proto::UnregisterProject { project_id })
4734 .log_err();
4735 }
4736 }
4737 ProjectClientState::Remote { remote_id, .. } => {
4738 self.client
4739 .send(proto::LeaveProject {
4740 project_id: *remote_id,
4741 })
4742 .log_err();
4743 }
4744 }
4745 }
4746
4747 fn app_will_quit(
4748 &mut self,
4749 _: &mut MutableAppContext,
4750 ) -> Option<std::pin::Pin<Box<dyn 'static + Future<Output = ()>>>> {
4751 let shutdown_futures = self
4752 .language_servers
4753 .drain()
4754 .filter_map(|(_, (_, server))| server.shutdown())
4755 .collect::<Vec<_>>();
4756 Some(
4757 async move {
4758 futures::future::join_all(shutdown_futures).await;
4759 }
4760 .boxed(),
4761 )
4762 }
4763}
4764
4765impl Collaborator {
4766 fn from_proto(
4767 message: proto::Collaborator,
4768 user_store: &ModelHandle<UserStore>,
4769 cx: &mut AsyncAppContext,
4770 ) -> impl Future<Output = Result<Self>> {
4771 let user = user_store.update(cx, |user_store, cx| {
4772 user_store.fetch_user(message.user_id, cx)
4773 });
4774
4775 async move {
4776 Ok(Self {
4777 peer_id: PeerId(message.peer_id),
4778 user: user.await?,
4779 replica_id: message.replica_id as ReplicaId,
4780 })
4781 }
4782 }
4783}
4784
4785impl<P: AsRef<Path>> From<(WorktreeId, P)> for ProjectPath {
4786 fn from((worktree_id, path): (WorktreeId, P)) -> Self {
4787 Self {
4788 worktree_id,
4789 path: path.as_ref().into(),
4790 }
4791 }
4792}
4793
4794impl From<lsp::CreateFileOptions> for fs::CreateOptions {
4795 fn from(options: lsp::CreateFileOptions) -> Self {
4796 Self {
4797 overwrite: options.overwrite.unwrap_or(false),
4798 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
4799 }
4800 }
4801}
4802
4803impl From<lsp::RenameFileOptions> for fs::RenameOptions {
4804 fn from(options: lsp::RenameFileOptions) -> Self {
4805 Self {
4806 overwrite: options.overwrite.unwrap_or(false),
4807 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
4808 }
4809 }
4810}
4811
4812impl From<lsp::DeleteFileOptions> for fs::RemoveOptions {
4813 fn from(options: lsp::DeleteFileOptions) -> Self {
4814 Self {
4815 recursive: options.recursive.unwrap_or(false),
4816 ignore_if_not_exists: options.ignore_if_not_exists.unwrap_or(false),
4817 }
4818 }
4819}
4820
4821fn serialize_symbol(symbol: &Symbol) -> proto::Symbol {
4822 proto::Symbol {
4823 source_worktree_id: symbol.source_worktree_id.to_proto(),
4824 worktree_id: symbol.worktree_id.to_proto(),
4825 language_server_name: symbol.language_server_name.0.to_string(),
4826 name: symbol.name.clone(),
4827 kind: unsafe { mem::transmute(symbol.kind) },
4828 path: symbol.path.to_string_lossy().to_string(),
4829 start: Some(proto::Point {
4830 row: symbol.range.start.row,
4831 column: symbol.range.start.column,
4832 }),
4833 end: Some(proto::Point {
4834 row: symbol.range.end.row,
4835 column: symbol.range.end.column,
4836 }),
4837 signature: symbol.signature.to_vec(),
4838 }
4839}
4840
4841fn relativize_path(base: &Path, path: &Path) -> PathBuf {
4842 let mut path_components = path.components();
4843 let mut base_components = base.components();
4844 let mut components: Vec<Component> = Vec::new();
4845 loop {
4846 match (path_components.next(), base_components.next()) {
4847 (None, None) => break,
4848 (Some(a), None) => {
4849 components.push(a);
4850 components.extend(path_components.by_ref());
4851 break;
4852 }
4853 (None, _) => components.push(Component::ParentDir),
4854 (Some(a), Some(b)) if components.is_empty() && a == b => (),
4855 (Some(a), Some(b)) if b == Component::CurDir => components.push(a),
4856 (Some(a), Some(_)) => {
4857 components.push(Component::ParentDir);
4858 for _ in base_components {
4859 components.push(Component::ParentDir);
4860 }
4861 components.push(a);
4862 components.extend(path_components.by_ref());
4863 break;
4864 }
4865 }
4866 }
4867 components.iter().map(|c| c.as_os_str()).collect()
4868}
4869
4870impl Item for Buffer {
4871 fn entry_id(&self, cx: &AppContext) -> Option<ProjectEntryId> {
4872 File::from_dyn(self.file()).and_then(|file| file.project_entry_id(cx))
4873 }
4874}
4875
4876#[cfg(test)]
4877mod tests {
4878 use super::{Event, *};
4879 use fs::RealFs;
4880 use futures::{future, StreamExt};
4881 use gpui::test::subscribe;
4882 use language::{
4883 tree_sitter_rust, Diagnostic, FakeLspAdapter, LanguageConfig, OffsetRangeExt, Point,
4884 ToPoint,
4885 };
4886 use lsp::Url;
4887 use serde_json::json;
4888 use std::{cell::RefCell, os::unix, path::PathBuf, rc::Rc};
4889 use unindent::Unindent as _;
4890 use util::{assert_set_eq, test::temp_tree};
4891 use worktree::WorktreeHandle as _;
4892
4893 #[gpui::test]
4894 async fn test_populate_and_search(cx: &mut gpui::TestAppContext) {
4895 let dir = temp_tree(json!({
4896 "root": {
4897 "apple": "",
4898 "banana": {
4899 "carrot": {
4900 "date": "",
4901 "endive": "",
4902 }
4903 },
4904 "fennel": {
4905 "grape": "",
4906 }
4907 }
4908 }));
4909
4910 let root_link_path = dir.path().join("root_link");
4911 unix::fs::symlink(&dir.path().join("root"), &root_link_path).unwrap();
4912 unix::fs::symlink(
4913 &dir.path().join("root/fennel"),
4914 &dir.path().join("root/finnochio"),
4915 )
4916 .unwrap();
4917
4918 let project = Project::test(Arc::new(RealFs), cx);
4919
4920 let (tree, _) = project
4921 .update(cx, |project, cx| {
4922 project.find_or_create_local_worktree(&root_link_path, true, cx)
4923 })
4924 .await
4925 .unwrap();
4926
4927 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
4928 .await;
4929 cx.read(|cx| {
4930 let tree = tree.read(cx);
4931 assert_eq!(tree.file_count(), 5);
4932 assert_eq!(
4933 tree.inode_for_path("fennel/grape"),
4934 tree.inode_for_path("finnochio/grape")
4935 );
4936 });
4937
4938 let cancel_flag = Default::default();
4939 let results = project
4940 .read_with(cx, |project, cx| {
4941 project.match_paths("bna", false, false, 10, &cancel_flag, cx)
4942 })
4943 .await;
4944 assert_eq!(
4945 results
4946 .into_iter()
4947 .map(|result| result.path)
4948 .collect::<Vec<Arc<Path>>>(),
4949 vec![
4950 PathBuf::from("banana/carrot/date").into(),
4951 PathBuf::from("banana/carrot/endive").into(),
4952 ]
4953 );
4954 }
4955
4956 #[gpui::test]
4957 async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
4958 cx.foreground().forbid_parking();
4959
4960 let mut rust_language = Language::new(
4961 LanguageConfig {
4962 name: "Rust".into(),
4963 path_suffixes: vec!["rs".to_string()],
4964 ..Default::default()
4965 },
4966 Some(tree_sitter_rust::language()),
4967 );
4968 let mut json_language = Language::new(
4969 LanguageConfig {
4970 name: "JSON".into(),
4971 path_suffixes: vec!["json".to_string()],
4972 ..Default::default()
4973 },
4974 None,
4975 );
4976 let mut fake_rust_servers = rust_language.set_fake_lsp_adapter(FakeLspAdapter {
4977 name: "the-rust-language-server",
4978 capabilities: lsp::ServerCapabilities {
4979 completion_provider: Some(lsp::CompletionOptions {
4980 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
4981 ..Default::default()
4982 }),
4983 ..Default::default()
4984 },
4985 ..Default::default()
4986 });
4987 let mut fake_json_servers = json_language.set_fake_lsp_adapter(FakeLspAdapter {
4988 name: "the-json-language-server",
4989 capabilities: lsp::ServerCapabilities {
4990 completion_provider: Some(lsp::CompletionOptions {
4991 trigger_characters: Some(vec![":".to_string()]),
4992 ..Default::default()
4993 }),
4994 ..Default::default()
4995 },
4996 ..Default::default()
4997 });
4998
4999 let fs = FakeFs::new(cx.background());
5000 fs.insert_tree(
5001 "/the-root",
5002 json!({
5003 "test.rs": "const A: i32 = 1;",
5004 "test2.rs": "",
5005 "Cargo.toml": "a = 1",
5006 "package.json": "{\"a\": 1}",
5007 }),
5008 )
5009 .await;
5010
5011 let project = Project::test(fs.clone(), cx);
5012 project.update(cx, |project, _| {
5013 project.languages.add(Arc::new(rust_language));
5014 project.languages.add(Arc::new(json_language));
5015 });
5016
5017 let worktree_id = project
5018 .update(cx, |project, cx| {
5019 project.find_or_create_local_worktree("/the-root", true, cx)
5020 })
5021 .await
5022 .unwrap()
5023 .0
5024 .read_with(cx, |tree, _| tree.id());
5025
5026 // Open a buffer without an associated language server.
5027 let toml_buffer = project
5028 .update(cx, |project, cx| {
5029 project.open_buffer((worktree_id, "Cargo.toml"), cx)
5030 })
5031 .await
5032 .unwrap();
5033
5034 // Open a buffer with an associated language server.
5035 let rust_buffer = project
5036 .update(cx, |project, cx| {
5037 project.open_buffer((worktree_id, "test.rs"), cx)
5038 })
5039 .await
5040 .unwrap();
5041
5042 // A server is started up, and it is notified about Rust files.
5043 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
5044 assert_eq!(
5045 fake_rust_server
5046 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5047 .await
5048 .text_document,
5049 lsp::TextDocumentItem {
5050 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
5051 version: 0,
5052 text: "const A: i32 = 1;".to_string(),
5053 language_id: Default::default()
5054 }
5055 );
5056
5057 // The buffer is configured based on the language server's capabilities.
5058 rust_buffer.read_with(cx, |buffer, _| {
5059 assert_eq!(
5060 buffer.completion_triggers(),
5061 &[".".to_string(), "::".to_string()]
5062 );
5063 });
5064 toml_buffer.read_with(cx, |buffer, _| {
5065 assert!(buffer.completion_triggers().is_empty());
5066 });
5067
5068 // Edit a buffer. The changes are reported to the language server.
5069 rust_buffer.update(cx, |buffer, cx| buffer.edit([16..16], "2", cx));
5070 assert_eq!(
5071 fake_rust_server
5072 .receive_notification::<lsp::notification::DidChangeTextDocument>()
5073 .await
5074 .text_document,
5075 lsp::VersionedTextDocumentIdentifier::new(
5076 lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
5077 1
5078 )
5079 );
5080
5081 // Open a third buffer with a different associated language server.
5082 let json_buffer = project
5083 .update(cx, |project, cx| {
5084 project.open_buffer((worktree_id, "package.json"), cx)
5085 })
5086 .await
5087 .unwrap();
5088
5089 // A json language server is started up and is only notified about the json buffer.
5090 let mut fake_json_server = fake_json_servers.next().await.unwrap();
5091 assert_eq!(
5092 fake_json_server
5093 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5094 .await
5095 .text_document,
5096 lsp::TextDocumentItem {
5097 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
5098 version: 0,
5099 text: "{\"a\": 1}".to_string(),
5100 language_id: Default::default()
5101 }
5102 );
5103
5104 // This buffer is configured based on the second language server's
5105 // capabilities.
5106 json_buffer.read_with(cx, |buffer, _| {
5107 assert_eq!(buffer.completion_triggers(), &[":".to_string()]);
5108 });
5109
5110 // When opening another buffer whose language server is already running,
5111 // it is also configured based on the existing language server's capabilities.
5112 let rust_buffer2 = project
5113 .update(cx, |project, cx| {
5114 project.open_buffer((worktree_id, "test2.rs"), cx)
5115 })
5116 .await
5117 .unwrap();
5118 rust_buffer2.read_with(cx, |buffer, _| {
5119 assert_eq!(
5120 buffer.completion_triggers(),
5121 &[".".to_string(), "::".to_string()]
5122 );
5123 });
5124
5125 // Changes are reported only to servers matching the buffer's language.
5126 toml_buffer.update(cx, |buffer, cx| buffer.edit([5..5], "23", cx));
5127 rust_buffer2.update(cx, |buffer, cx| buffer.edit([0..0], "let x = 1;", cx));
5128 assert_eq!(
5129 fake_rust_server
5130 .receive_notification::<lsp::notification::DidChangeTextDocument>()
5131 .await
5132 .text_document,
5133 lsp::VersionedTextDocumentIdentifier::new(
5134 lsp::Url::from_file_path("/the-root/test2.rs").unwrap(),
5135 1
5136 )
5137 );
5138
5139 // Save notifications are reported to all servers.
5140 toml_buffer
5141 .update(cx, |buffer, cx| buffer.save(cx))
5142 .await
5143 .unwrap();
5144 assert_eq!(
5145 fake_rust_server
5146 .receive_notification::<lsp::notification::DidSaveTextDocument>()
5147 .await
5148 .text_document,
5149 lsp::TextDocumentIdentifier::new(
5150 lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap()
5151 )
5152 );
5153 assert_eq!(
5154 fake_json_server
5155 .receive_notification::<lsp::notification::DidSaveTextDocument>()
5156 .await
5157 .text_document,
5158 lsp::TextDocumentIdentifier::new(
5159 lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap()
5160 )
5161 );
5162
5163 // Renames are reported only to servers matching the buffer's language.
5164 fs.rename(
5165 Path::new("/the-root/test2.rs"),
5166 Path::new("/the-root/test3.rs"),
5167 Default::default(),
5168 )
5169 .await
5170 .unwrap();
5171 assert_eq!(
5172 fake_rust_server
5173 .receive_notification::<lsp::notification::DidCloseTextDocument>()
5174 .await
5175 .text_document,
5176 lsp::TextDocumentIdentifier::new(
5177 lsp::Url::from_file_path("/the-root/test2.rs").unwrap()
5178 ),
5179 );
5180 assert_eq!(
5181 fake_rust_server
5182 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5183 .await
5184 .text_document,
5185 lsp::TextDocumentItem {
5186 uri: lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),
5187 version: 0,
5188 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
5189 language_id: Default::default()
5190 },
5191 );
5192
5193 rust_buffer2.update(cx, |buffer, cx| {
5194 buffer.update_diagnostics(
5195 DiagnosticSet::from_sorted_entries(
5196 vec![DiagnosticEntry {
5197 diagnostic: Default::default(),
5198 range: Anchor::MIN..Anchor::MAX,
5199 }],
5200 &buffer.snapshot(),
5201 ),
5202 cx,
5203 );
5204 assert_eq!(
5205 buffer
5206 .snapshot()
5207 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
5208 .count(),
5209 1
5210 );
5211 });
5212
5213 // When the rename changes the extension of the file, the buffer gets closed on the old
5214 // language server and gets opened on the new one.
5215 fs.rename(
5216 Path::new("/the-root/test3.rs"),
5217 Path::new("/the-root/test3.json"),
5218 Default::default(),
5219 )
5220 .await
5221 .unwrap();
5222 assert_eq!(
5223 fake_rust_server
5224 .receive_notification::<lsp::notification::DidCloseTextDocument>()
5225 .await
5226 .text_document,
5227 lsp::TextDocumentIdentifier::new(
5228 lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),
5229 ),
5230 );
5231 assert_eq!(
5232 fake_json_server
5233 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5234 .await
5235 .text_document,
5236 lsp::TextDocumentItem {
5237 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
5238 version: 0,
5239 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
5240 language_id: Default::default()
5241 },
5242 );
5243 // We clear the diagnostics, since the language has changed.
5244 rust_buffer2.read_with(cx, |buffer, _| {
5245 assert_eq!(
5246 buffer
5247 .snapshot()
5248 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
5249 .count(),
5250 0
5251 );
5252 });
5253
5254 // The renamed file's version resets after changing language server.
5255 rust_buffer2.update(cx, |buffer, cx| buffer.edit([0..0], "// ", cx));
5256 assert_eq!(
5257 fake_json_server
5258 .receive_notification::<lsp::notification::DidChangeTextDocument>()
5259 .await
5260 .text_document,
5261 lsp::VersionedTextDocumentIdentifier::new(
5262 lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
5263 1
5264 )
5265 );
5266
5267 // Restart language servers
5268 project.update(cx, |project, cx| {
5269 project.restart_language_servers_for_buffers(
5270 vec![rust_buffer.clone(), json_buffer.clone()],
5271 cx,
5272 );
5273 });
5274
5275 let mut rust_shutdown_requests = fake_rust_server
5276 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
5277 let mut json_shutdown_requests = fake_json_server
5278 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
5279 futures::join!(rust_shutdown_requests.next(), json_shutdown_requests.next());
5280
5281 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
5282 let mut fake_json_server = fake_json_servers.next().await.unwrap();
5283
5284 // Ensure rust document is reopened in new rust language server
5285 assert_eq!(
5286 fake_rust_server
5287 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5288 .await
5289 .text_document,
5290 lsp::TextDocumentItem {
5291 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
5292 version: 1,
5293 text: rust_buffer.read_with(cx, |buffer, _| buffer.text()),
5294 language_id: Default::default()
5295 }
5296 );
5297
5298 // Ensure json documents are reopened in new json language server
5299 assert_set_eq!(
5300 [
5301 fake_json_server
5302 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5303 .await
5304 .text_document,
5305 fake_json_server
5306 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5307 .await
5308 .text_document,
5309 ],
5310 [
5311 lsp::TextDocumentItem {
5312 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
5313 version: 0,
5314 text: json_buffer.read_with(cx, |buffer, _| buffer.text()),
5315 language_id: Default::default()
5316 },
5317 lsp::TextDocumentItem {
5318 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
5319 version: 1,
5320 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
5321 language_id: Default::default()
5322 }
5323 ]
5324 );
5325
5326 // Close notifications are reported only to servers matching the buffer's language.
5327 cx.update(|_| drop(json_buffer));
5328 let close_message = lsp::DidCloseTextDocumentParams {
5329 text_document: lsp::TextDocumentIdentifier::new(
5330 lsp::Url::from_file_path("/the-root/package.json").unwrap(),
5331 ),
5332 };
5333 assert_eq!(
5334 fake_json_server
5335 .receive_notification::<lsp::notification::DidCloseTextDocument>()
5336 .await,
5337 close_message,
5338 );
5339 }
5340
5341 #[gpui::test]
5342 async fn test_single_file_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
5343 cx.foreground().forbid_parking();
5344
5345 let fs = FakeFs::new(cx.background());
5346 fs.insert_tree(
5347 "/dir",
5348 json!({
5349 "a.rs": "let a = 1;",
5350 "b.rs": "let b = 2;"
5351 }),
5352 )
5353 .await;
5354
5355 let project = Project::test(fs, cx);
5356 let worktree_a_id = project
5357 .update(cx, |project, cx| {
5358 project.find_or_create_local_worktree("/dir/a.rs", true, cx)
5359 })
5360 .await
5361 .unwrap()
5362 .0
5363 .read_with(cx, |tree, _| tree.id());
5364 let worktree_b_id = project
5365 .update(cx, |project, cx| {
5366 project.find_or_create_local_worktree("/dir/b.rs", true, cx)
5367 })
5368 .await
5369 .unwrap()
5370 .0
5371 .read_with(cx, |tree, _| tree.id());
5372
5373 let buffer_a = project
5374 .update(cx, |project, cx| {
5375 project.open_buffer((worktree_a_id, ""), cx)
5376 })
5377 .await
5378 .unwrap();
5379 let buffer_b = project
5380 .update(cx, |project, cx| {
5381 project.open_buffer((worktree_b_id, ""), cx)
5382 })
5383 .await
5384 .unwrap();
5385
5386 project.update(cx, |project, cx| {
5387 project
5388 .update_diagnostics(
5389 lsp::PublishDiagnosticsParams {
5390 uri: Url::from_file_path("/dir/a.rs").unwrap(),
5391 version: None,
5392 diagnostics: vec![lsp::Diagnostic {
5393 range: lsp::Range::new(
5394 lsp::Position::new(0, 4),
5395 lsp::Position::new(0, 5),
5396 ),
5397 severity: Some(lsp::DiagnosticSeverity::ERROR),
5398 message: "error 1".to_string(),
5399 ..Default::default()
5400 }],
5401 },
5402 &[],
5403 cx,
5404 )
5405 .unwrap();
5406 project
5407 .update_diagnostics(
5408 lsp::PublishDiagnosticsParams {
5409 uri: Url::from_file_path("/dir/b.rs").unwrap(),
5410 version: None,
5411 diagnostics: vec![lsp::Diagnostic {
5412 range: lsp::Range::new(
5413 lsp::Position::new(0, 4),
5414 lsp::Position::new(0, 5),
5415 ),
5416 severity: Some(lsp::DiagnosticSeverity::WARNING),
5417 message: "error 2".to_string(),
5418 ..Default::default()
5419 }],
5420 },
5421 &[],
5422 cx,
5423 )
5424 .unwrap();
5425 });
5426
5427 buffer_a.read_with(cx, |buffer, _| {
5428 let chunks = chunks_with_diagnostics(&buffer, 0..buffer.len());
5429 assert_eq!(
5430 chunks
5431 .iter()
5432 .map(|(s, d)| (s.as_str(), *d))
5433 .collect::<Vec<_>>(),
5434 &[
5435 ("let ", None),
5436 ("a", Some(DiagnosticSeverity::ERROR)),
5437 (" = 1;", None),
5438 ]
5439 );
5440 });
5441 buffer_b.read_with(cx, |buffer, _| {
5442 let chunks = chunks_with_diagnostics(&buffer, 0..buffer.len());
5443 assert_eq!(
5444 chunks
5445 .iter()
5446 .map(|(s, d)| (s.as_str(), *d))
5447 .collect::<Vec<_>>(),
5448 &[
5449 ("let ", None),
5450 ("b", Some(DiagnosticSeverity::WARNING)),
5451 (" = 2;", None),
5452 ]
5453 );
5454 });
5455 }
5456
5457 #[gpui::test]
5458 async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
5459 cx.foreground().forbid_parking();
5460
5461 let progress_token = "the-progress-token";
5462 let mut language = Language::new(
5463 LanguageConfig {
5464 name: "Rust".into(),
5465 path_suffixes: vec!["rs".to_string()],
5466 ..Default::default()
5467 },
5468 Some(tree_sitter_rust::language()),
5469 );
5470 let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
5471 disk_based_diagnostics_progress_token: Some(progress_token),
5472 disk_based_diagnostics_sources: &["disk"],
5473 ..Default::default()
5474 });
5475
5476 let fs = FakeFs::new(cx.background());
5477 fs.insert_tree(
5478 "/dir",
5479 json!({
5480 "a.rs": "fn a() { A }",
5481 "b.rs": "const y: i32 = 1",
5482 }),
5483 )
5484 .await;
5485
5486 let project = Project::test(fs, cx);
5487 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
5488
5489 let (tree, _) = project
5490 .update(cx, |project, cx| {
5491 project.find_or_create_local_worktree("/dir", true, cx)
5492 })
5493 .await
5494 .unwrap();
5495 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
5496
5497 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
5498 .await;
5499
5500 // Cause worktree to start the fake language server
5501 let _buffer = project
5502 .update(cx, |project, cx| {
5503 project.open_buffer((worktree_id, Path::new("b.rs")), cx)
5504 })
5505 .await
5506 .unwrap();
5507
5508 let mut events = subscribe(&project, cx);
5509
5510 let mut fake_server = fake_servers.next().await.unwrap();
5511 fake_server.start_progress(progress_token).await;
5512 assert_eq!(
5513 events.next().await.unwrap(),
5514 Event::DiskBasedDiagnosticsStarted
5515 );
5516
5517 fake_server.start_progress(progress_token).await;
5518 fake_server.end_progress(progress_token).await;
5519 fake_server.start_progress(progress_token).await;
5520
5521 fake_server.notify::<lsp::notification::PublishDiagnostics>(
5522 lsp::PublishDiagnosticsParams {
5523 uri: Url::from_file_path("/dir/a.rs").unwrap(),
5524 version: None,
5525 diagnostics: vec![lsp::Diagnostic {
5526 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
5527 severity: Some(lsp::DiagnosticSeverity::ERROR),
5528 message: "undefined variable 'A'".to_string(),
5529 ..Default::default()
5530 }],
5531 },
5532 );
5533 assert_eq!(
5534 events.next().await.unwrap(),
5535 Event::DiagnosticsUpdated((worktree_id, Path::new("a.rs")).into())
5536 );
5537
5538 fake_server.end_progress(progress_token).await;
5539 fake_server.end_progress(progress_token).await;
5540 assert_eq!(
5541 events.next().await.unwrap(),
5542 Event::DiskBasedDiagnosticsUpdated
5543 );
5544 assert_eq!(
5545 events.next().await.unwrap(),
5546 Event::DiskBasedDiagnosticsFinished
5547 );
5548
5549 let buffer = project
5550 .update(cx, |p, cx| p.open_buffer((worktree_id, "a.rs"), cx))
5551 .await
5552 .unwrap();
5553
5554 buffer.read_with(cx, |buffer, _| {
5555 let snapshot = buffer.snapshot();
5556 let diagnostics = snapshot
5557 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
5558 .collect::<Vec<_>>();
5559 assert_eq!(
5560 diagnostics,
5561 &[DiagnosticEntry {
5562 range: Point::new(0, 9)..Point::new(0, 10),
5563 diagnostic: Diagnostic {
5564 severity: lsp::DiagnosticSeverity::ERROR,
5565 message: "undefined variable 'A'".to_string(),
5566 group_id: 0,
5567 is_primary: true,
5568 ..Default::default()
5569 }
5570 }]
5571 )
5572 });
5573 }
5574
5575 #[gpui::test]
5576 async fn test_restarting_server_with_diagnostics_running(cx: &mut gpui::TestAppContext) {
5577 cx.foreground().forbid_parking();
5578
5579 let progress_token = "the-progress-token";
5580 let mut language = Language::new(
5581 LanguageConfig {
5582 path_suffixes: vec!["rs".to_string()],
5583 ..Default::default()
5584 },
5585 None,
5586 );
5587 let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
5588 disk_based_diagnostics_sources: &["disk"],
5589 disk_based_diagnostics_progress_token: Some(progress_token),
5590 ..Default::default()
5591 });
5592
5593 let fs = FakeFs::new(cx.background());
5594 fs.insert_tree("/dir", json!({ "a.rs": "" })).await;
5595
5596 let project = Project::test(fs, cx);
5597 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
5598
5599 let worktree_id = project
5600 .update(cx, |project, cx| {
5601 project.find_or_create_local_worktree("/dir", true, cx)
5602 })
5603 .await
5604 .unwrap()
5605 .0
5606 .read_with(cx, |tree, _| tree.id());
5607
5608 let buffer = project
5609 .update(cx, |project, cx| {
5610 project.open_buffer((worktree_id, "a.rs"), cx)
5611 })
5612 .await
5613 .unwrap();
5614
5615 // Simulate diagnostics starting to update.
5616 let mut fake_server = fake_servers.next().await.unwrap();
5617 fake_server.start_progress(progress_token).await;
5618
5619 // Restart the server before the diagnostics finish updating.
5620 project.update(cx, |project, cx| {
5621 project.restart_language_servers_for_buffers([buffer], cx);
5622 });
5623 let mut events = subscribe(&project, cx);
5624
5625 // Simulate the newly started server sending more diagnostics.
5626 let mut fake_server = fake_servers.next().await.unwrap();
5627 fake_server.start_progress(progress_token).await;
5628 assert_eq!(
5629 events.next().await.unwrap(),
5630 Event::DiskBasedDiagnosticsStarted
5631 );
5632
5633 // All diagnostics are considered done, despite the old server's diagnostic
5634 // task never completing.
5635 fake_server.end_progress(progress_token).await;
5636 assert_eq!(
5637 events.next().await.unwrap(),
5638 Event::DiskBasedDiagnosticsUpdated
5639 );
5640 assert_eq!(
5641 events.next().await.unwrap(),
5642 Event::DiskBasedDiagnosticsFinished
5643 );
5644 project.read_with(cx, |project, _| {
5645 assert!(!project.is_running_disk_based_diagnostics());
5646 });
5647 }
5648
5649 #[gpui::test]
5650 async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
5651 cx.foreground().forbid_parking();
5652
5653 let mut language = Language::new(
5654 LanguageConfig {
5655 name: "Rust".into(),
5656 path_suffixes: vec!["rs".to_string()],
5657 ..Default::default()
5658 },
5659 Some(tree_sitter_rust::language()),
5660 );
5661 let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
5662 disk_based_diagnostics_sources: &["disk"],
5663 ..Default::default()
5664 });
5665
5666 let text = "
5667 fn a() { A }
5668 fn b() { BB }
5669 fn c() { CCC }
5670 "
5671 .unindent();
5672
5673 let fs = FakeFs::new(cx.background());
5674 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
5675
5676 let project = Project::test(fs, cx);
5677 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
5678
5679 let worktree_id = project
5680 .update(cx, |project, cx| {
5681 project.find_or_create_local_worktree("/dir", true, cx)
5682 })
5683 .await
5684 .unwrap()
5685 .0
5686 .read_with(cx, |tree, _| tree.id());
5687
5688 let buffer = project
5689 .update(cx, |project, cx| {
5690 project.open_buffer((worktree_id, "a.rs"), cx)
5691 })
5692 .await
5693 .unwrap();
5694
5695 let mut fake_server = fake_servers.next().await.unwrap();
5696 let open_notification = fake_server
5697 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5698 .await;
5699
5700 // Edit the buffer, moving the content down
5701 buffer.update(cx, |buffer, cx| buffer.edit([0..0], "\n\n", cx));
5702 let change_notification_1 = fake_server
5703 .receive_notification::<lsp::notification::DidChangeTextDocument>()
5704 .await;
5705 assert!(
5706 change_notification_1.text_document.version > open_notification.text_document.version
5707 );
5708
5709 // Report some diagnostics for the initial version of the buffer
5710 fake_server.notify::<lsp::notification::PublishDiagnostics>(
5711 lsp::PublishDiagnosticsParams {
5712 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
5713 version: Some(open_notification.text_document.version),
5714 diagnostics: vec![
5715 lsp::Diagnostic {
5716 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
5717 severity: Some(DiagnosticSeverity::ERROR),
5718 message: "undefined variable 'A'".to_string(),
5719 source: Some("disk".to_string()),
5720 ..Default::default()
5721 },
5722 lsp::Diagnostic {
5723 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
5724 severity: Some(DiagnosticSeverity::ERROR),
5725 message: "undefined variable 'BB'".to_string(),
5726 source: Some("disk".to_string()),
5727 ..Default::default()
5728 },
5729 lsp::Diagnostic {
5730 range: lsp::Range::new(lsp::Position::new(2, 9), lsp::Position::new(2, 12)),
5731 severity: Some(DiagnosticSeverity::ERROR),
5732 source: Some("disk".to_string()),
5733 message: "undefined variable 'CCC'".to_string(),
5734 ..Default::default()
5735 },
5736 ],
5737 },
5738 );
5739
5740 // The diagnostics have moved down since they were created.
5741 buffer.next_notification(cx).await;
5742 buffer.read_with(cx, |buffer, _| {
5743 assert_eq!(
5744 buffer
5745 .snapshot()
5746 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0), false)
5747 .collect::<Vec<_>>(),
5748 &[
5749 DiagnosticEntry {
5750 range: Point::new(3, 9)..Point::new(3, 11),
5751 diagnostic: Diagnostic {
5752 severity: DiagnosticSeverity::ERROR,
5753 message: "undefined variable 'BB'".to_string(),
5754 is_disk_based: true,
5755 group_id: 1,
5756 is_primary: true,
5757 ..Default::default()
5758 },
5759 },
5760 DiagnosticEntry {
5761 range: Point::new(4, 9)..Point::new(4, 12),
5762 diagnostic: Diagnostic {
5763 severity: DiagnosticSeverity::ERROR,
5764 message: "undefined variable 'CCC'".to_string(),
5765 is_disk_based: true,
5766 group_id: 2,
5767 is_primary: true,
5768 ..Default::default()
5769 }
5770 }
5771 ]
5772 );
5773 assert_eq!(
5774 chunks_with_diagnostics(buffer, 0..buffer.len()),
5775 [
5776 ("\n\nfn a() { ".to_string(), None),
5777 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
5778 (" }\nfn b() { ".to_string(), None),
5779 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
5780 (" }\nfn c() { ".to_string(), None),
5781 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
5782 (" }\n".to_string(), None),
5783 ]
5784 );
5785 assert_eq!(
5786 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
5787 [
5788 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
5789 (" }\nfn c() { ".to_string(), None),
5790 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
5791 ]
5792 );
5793 });
5794
5795 // Ensure overlapping diagnostics are highlighted correctly.
5796 fake_server.notify::<lsp::notification::PublishDiagnostics>(
5797 lsp::PublishDiagnosticsParams {
5798 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
5799 version: Some(open_notification.text_document.version),
5800 diagnostics: vec![
5801 lsp::Diagnostic {
5802 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
5803 severity: Some(DiagnosticSeverity::ERROR),
5804 message: "undefined variable 'A'".to_string(),
5805 source: Some("disk".to_string()),
5806 ..Default::default()
5807 },
5808 lsp::Diagnostic {
5809 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 12)),
5810 severity: Some(DiagnosticSeverity::WARNING),
5811 message: "unreachable statement".to_string(),
5812 source: Some("disk".to_string()),
5813 ..Default::default()
5814 },
5815 ],
5816 },
5817 );
5818
5819 buffer.next_notification(cx).await;
5820 buffer.read_with(cx, |buffer, _| {
5821 assert_eq!(
5822 buffer
5823 .snapshot()
5824 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0), false)
5825 .collect::<Vec<_>>(),
5826 &[
5827 DiagnosticEntry {
5828 range: Point::new(2, 9)..Point::new(2, 12),
5829 diagnostic: Diagnostic {
5830 severity: DiagnosticSeverity::WARNING,
5831 message: "unreachable statement".to_string(),
5832 is_disk_based: true,
5833 group_id: 1,
5834 is_primary: true,
5835 ..Default::default()
5836 }
5837 },
5838 DiagnosticEntry {
5839 range: Point::new(2, 9)..Point::new(2, 10),
5840 diagnostic: Diagnostic {
5841 severity: DiagnosticSeverity::ERROR,
5842 message: "undefined variable 'A'".to_string(),
5843 is_disk_based: true,
5844 group_id: 0,
5845 is_primary: true,
5846 ..Default::default()
5847 },
5848 }
5849 ]
5850 );
5851 assert_eq!(
5852 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
5853 [
5854 ("fn a() { ".to_string(), None),
5855 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
5856 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
5857 ("\n".to_string(), None),
5858 ]
5859 );
5860 assert_eq!(
5861 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
5862 [
5863 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
5864 ("\n".to_string(), None),
5865 ]
5866 );
5867 });
5868
5869 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
5870 // changes since the last save.
5871 buffer.update(cx, |buffer, cx| {
5872 buffer.edit(Some(Point::new(2, 0)..Point::new(2, 0)), " ", cx);
5873 buffer.edit(Some(Point::new(2, 8)..Point::new(2, 10)), "(x: usize)", cx);
5874 buffer.edit(Some(Point::new(3, 10)..Point::new(3, 10)), "xxx", cx);
5875 });
5876 let change_notification_2 = fake_server
5877 .receive_notification::<lsp::notification::DidChangeTextDocument>()
5878 .await;
5879 assert!(
5880 change_notification_2.text_document.version
5881 > change_notification_1.text_document.version
5882 );
5883
5884 // Handle out-of-order diagnostics
5885 fake_server.notify::<lsp::notification::PublishDiagnostics>(
5886 lsp::PublishDiagnosticsParams {
5887 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
5888 version: Some(change_notification_2.text_document.version),
5889 diagnostics: vec![
5890 lsp::Diagnostic {
5891 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
5892 severity: Some(DiagnosticSeverity::ERROR),
5893 message: "undefined variable 'BB'".to_string(),
5894 source: Some("disk".to_string()),
5895 ..Default::default()
5896 },
5897 lsp::Diagnostic {
5898 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
5899 severity: Some(DiagnosticSeverity::WARNING),
5900 message: "undefined variable 'A'".to_string(),
5901 source: Some("disk".to_string()),
5902 ..Default::default()
5903 },
5904 ],
5905 },
5906 );
5907
5908 buffer.next_notification(cx).await;
5909 buffer.read_with(cx, |buffer, _| {
5910 assert_eq!(
5911 buffer
5912 .snapshot()
5913 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
5914 .collect::<Vec<_>>(),
5915 &[
5916 DiagnosticEntry {
5917 range: Point::new(2, 21)..Point::new(2, 22),
5918 diagnostic: Diagnostic {
5919 severity: DiagnosticSeverity::WARNING,
5920 message: "undefined variable 'A'".to_string(),
5921 is_disk_based: true,
5922 group_id: 1,
5923 is_primary: true,
5924 ..Default::default()
5925 }
5926 },
5927 DiagnosticEntry {
5928 range: Point::new(3, 9)..Point::new(3, 14),
5929 diagnostic: Diagnostic {
5930 severity: DiagnosticSeverity::ERROR,
5931 message: "undefined variable 'BB'".to_string(),
5932 is_disk_based: true,
5933 group_id: 0,
5934 is_primary: true,
5935 ..Default::default()
5936 },
5937 }
5938 ]
5939 );
5940 });
5941 }
5942
5943 #[gpui::test]
5944 async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
5945 cx.foreground().forbid_parking();
5946
5947 let text = concat!(
5948 "let one = ;\n", //
5949 "let two = \n",
5950 "let three = 3;\n",
5951 );
5952
5953 let fs = FakeFs::new(cx.background());
5954 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
5955
5956 let project = Project::test(fs, cx);
5957 let worktree_id = project
5958 .update(cx, |project, cx| {
5959 project.find_or_create_local_worktree("/dir", true, cx)
5960 })
5961 .await
5962 .unwrap()
5963 .0
5964 .read_with(cx, |tree, _| tree.id());
5965
5966 let buffer = project
5967 .update(cx, |project, cx| {
5968 project.open_buffer((worktree_id, "a.rs"), cx)
5969 })
5970 .await
5971 .unwrap();
5972
5973 project.update(cx, |project, cx| {
5974 project
5975 .update_buffer_diagnostics(
5976 &buffer,
5977 vec![
5978 DiagnosticEntry {
5979 range: PointUtf16::new(0, 10)..PointUtf16::new(0, 10),
5980 diagnostic: Diagnostic {
5981 severity: DiagnosticSeverity::ERROR,
5982 message: "syntax error 1".to_string(),
5983 ..Default::default()
5984 },
5985 },
5986 DiagnosticEntry {
5987 range: PointUtf16::new(1, 10)..PointUtf16::new(1, 10),
5988 diagnostic: Diagnostic {
5989 severity: DiagnosticSeverity::ERROR,
5990 message: "syntax error 2".to_string(),
5991 ..Default::default()
5992 },
5993 },
5994 ],
5995 None,
5996 cx,
5997 )
5998 .unwrap();
5999 });
6000
6001 // An empty range is extended forward to include the following character.
6002 // At the end of a line, an empty range is extended backward to include
6003 // the preceding character.
6004 buffer.read_with(cx, |buffer, _| {
6005 let chunks = chunks_with_diagnostics(&buffer, 0..buffer.len());
6006 assert_eq!(
6007 chunks
6008 .iter()
6009 .map(|(s, d)| (s.as_str(), *d))
6010 .collect::<Vec<_>>(),
6011 &[
6012 ("let one = ", None),
6013 (";", Some(DiagnosticSeverity::ERROR)),
6014 ("\nlet two =", None),
6015 (" ", Some(DiagnosticSeverity::ERROR)),
6016 ("\nlet three = 3;\n", None)
6017 ]
6018 );
6019 });
6020 }
6021
6022 #[gpui::test]
6023 async fn test_edits_from_lsp_with_past_version(cx: &mut gpui::TestAppContext) {
6024 cx.foreground().forbid_parking();
6025
6026 let mut language = Language::new(
6027 LanguageConfig {
6028 name: "Rust".into(),
6029 path_suffixes: vec!["rs".to_string()],
6030 ..Default::default()
6031 },
6032 Some(tree_sitter_rust::language()),
6033 );
6034 let mut fake_servers = language.set_fake_lsp_adapter(Default::default());
6035
6036 let text = "
6037 fn a() {
6038 f1();
6039 }
6040 fn b() {
6041 f2();
6042 }
6043 fn c() {
6044 f3();
6045 }
6046 "
6047 .unindent();
6048
6049 let fs = FakeFs::new(cx.background());
6050 fs.insert_tree(
6051 "/dir",
6052 json!({
6053 "a.rs": text.clone(),
6054 }),
6055 )
6056 .await;
6057
6058 let project = Project::test(fs, cx);
6059 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
6060
6061 let worktree_id = project
6062 .update(cx, |project, cx| {
6063 project.find_or_create_local_worktree("/dir", true, cx)
6064 })
6065 .await
6066 .unwrap()
6067 .0
6068 .read_with(cx, |tree, _| tree.id());
6069
6070 let buffer = project
6071 .update(cx, |project, cx| {
6072 project.open_buffer((worktree_id, "a.rs"), cx)
6073 })
6074 .await
6075 .unwrap();
6076
6077 let mut fake_server = fake_servers.next().await.unwrap();
6078 let lsp_document_version = fake_server
6079 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6080 .await
6081 .text_document
6082 .version;
6083
6084 // Simulate editing the buffer after the language server computes some edits.
6085 buffer.update(cx, |buffer, cx| {
6086 buffer.edit(
6087 [Point::new(0, 0)..Point::new(0, 0)],
6088 "// above first function\n",
6089 cx,
6090 );
6091 buffer.edit(
6092 [Point::new(2, 0)..Point::new(2, 0)],
6093 " // inside first function\n",
6094 cx,
6095 );
6096 buffer.edit(
6097 [Point::new(6, 4)..Point::new(6, 4)],
6098 "// inside second function ",
6099 cx,
6100 );
6101
6102 assert_eq!(
6103 buffer.text(),
6104 "
6105 // above first function
6106 fn a() {
6107 // inside first function
6108 f1();
6109 }
6110 fn b() {
6111 // inside second function f2();
6112 }
6113 fn c() {
6114 f3();
6115 }
6116 "
6117 .unindent()
6118 );
6119 });
6120
6121 let edits = project
6122 .update(cx, |project, cx| {
6123 project.edits_from_lsp(
6124 &buffer,
6125 vec![
6126 // replace body of first function
6127 lsp::TextEdit {
6128 range: lsp::Range::new(
6129 lsp::Position::new(0, 0),
6130 lsp::Position::new(3, 0),
6131 ),
6132 new_text: "
6133 fn a() {
6134 f10();
6135 }
6136 "
6137 .unindent(),
6138 },
6139 // edit inside second function
6140 lsp::TextEdit {
6141 range: lsp::Range::new(
6142 lsp::Position::new(4, 6),
6143 lsp::Position::new(4, 6),
6144 ),
6145 new_text: "00".into(),
6146 },
6147 // edit inside third function via two distinct edits
6148 lsp::TextEdit {
6149 range: lsp::Range::new(
6150 lsp::Position::new(7, 5),
6151 lsp::Position::new(7, 5),
6152 ),
6153 new_text: "4000".into(),
6154 },
6155 lsp::TextEdit {
6156 range: lsp::Range::new(
6157 lsp::Position::new(7, 5),
6158 lsp::Position::new(7, 6),
6159 ),
6160 new_text: "".into(),
6161 },
6162 ],
6163 Some(lsp_document_version),
6164 cx,
6165 )
6166 })
6167 .await
6168 .unwrap();
6169
6170 buffer.update(cx, |buffer, cx| {
6171 for (range, new_text) in edits {
6172 buffer.edit([range], new_text, cx);
6173 }
6174 assert_eq!(
6175 buffer.text(),
6176 "
6177 // above first function
6178 fn a() {
6179 // inside first function
6180 f10();
6181 }
6182 fn b() {
6183 // inside second function f200();
6184 }
6185 fn c() {
6186 f4000();
6187 }
6188 "
6189 .unindent()
6190 );
6191 });
6192 }
6193
6194 #[gpui::test]
6195 async fn test_edits_from_lsp_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
6196 cx.foreground().forbid_parking();
6197
6198 let text = "
6199 use a::b;
6200 use a::c;
6201
6202 fn f() {
6203 b();
6204 c();
6205 }
6206 "
6207 .unindent();
6208
6209 let fs = FakeFs::new(cx.background());
6210 fs.insert_tree(
6211 "/dir",
6212 json!({
6213 "a.rs": text.clone(),
6214 }),
6215 )
6216 .await;
6217
6218 let project = Project::test(fs, cx);
6219 let worktree_id = project
6220 .update(cx, |project, cx| {
6221 project.find_or_create_local_worktree("/dir", true, cx)
6222 })
6223 .await
6224 .unwrap()
6225 .0
6226 .read_with(cx, |tree, _| tree.id());
6227
6228 let buffer = project
6229 .update(cx, |project, cx| {
6230 project.open_buffer((worktree_id, "a.rs"), cx)
6231 })
6232 .await
6233 .unwrap();
6234
6235 // Simulate the language server sending us a small edit in the form of a very large diff.
6236 // Rust-analyzer does this when performing a merge-imports code action.
6237 let edits = project
6238 .update(cx, |project, cx| {
6239 project.edits_from_lsp(
6240 &buffer,
6241 [
6242 // Replace the first use statement without editing the semicolon.
6243 lsp::TextEdit {
6244 range: lsp::Range::new(
6245 lsp::Position::new(0, 4),
6246 lsp::Position::new(0, 8),
6247 ),
6248 new_text: "a::{b, c}".into(),
6249 },
6250 // Reinsert the remainder of the file between the semicolon and the final
6251 // newline of the file.
6252 lsp::TextEdit {
6253 range: lsp::Range::new(
6254 lsp::Position::new(0, 9),
6255 lsp::Position::new(0, 9),
6256 ),
6257 new_text: "\n\n".into(),
6258 },
6259 lsp::TextEdit {
6260 range: lsp::Range::new(
6261 lsp::Position::new(0, 9),
6262 lsp::Position::new(0, 9),
6263 ),
6264 new_text: "
6265 fn f() {
6266 b();
6267 c();
6268 }"
6269 .unindent(),
6270 },
6271 // Delete everything after the first newline of the file.
6272 lsp::TextEdit {
6273 range: lsp::Range::new(
6274 lsp::Position::new(1, 0),
6275 lsp::Position::new(7, 0),
6276 ),
6277 new_text: "".into(),
6278 },
6279 ],
6280 None,
6281 cx,
6282 )
6283 })
6284 .await
6285 .unwrap();
6286
6287 buffer.update(cx, |buffer, cx| {
6288 let edits = edits
6289 .into_iter()
6290 .map(|(range, text)| {
6291 (
6292 range.start.to_point(&buffer)..range.end.to_point(&buffer),
6293 text,
6294 )
6295 })
6296 .collect::<Vec<_>>();
6297
6298 assert_eq!(
6299 edits,
6300 [
6301 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
6302 (Point::new(1, 0)..Point::new(2, 0), "".into())
6303 ]
6304 );
6305
6306 for (range, new_text) in edits {
6307 buffer.edit([range], new_text, cx);
6308 }
6309 assert_eq!(
6310 buffer.text(),
6311 "
6312 use a::{b, c};
6313
6314 fn f() {
6315 b();
6316 c();
6317 }
6318 "
6319 .unindent()
6320 );
6321 });
6322 }
6323
6324 fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
6325 buffer: &Buffer,
6326 range: Range<T>,
6327 ) -> Vec<(String, Option<DiagnosticSeverity>)> {
6328 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
6329 for chunk in buffer.snapshot().chunks(range, true) {
6330 if chunks.last().map_or(false, |prev_chunk| {
6331 prev_chunk.1 == chunk.diagnostic_severity
6332 }) {
6333 chunks.last_mut().unwrap().0.push_str(chunk.text);
6334 } else {
6335 chunks.push((chunk.text.to_string(), chunk.diagnostic_severity));
6336 }
6337 }
6338 chunks
6339 }
6340
6341 #[gpui::test]
6342 async fn test_search_worktree_without_files(cx: &mut gpui::TestAppContext) {
6343 let dir = temp_tree(json!({
6344 "root": {
6345 "dir1": {},
6346 "dir2": {
6347 "dir3": {}
6348 }
6349 }
6350 }));
6351
6352 let project = Project::test(Arc::new(RealFs), cx);
6353 let (tree, _) = project
6354 .update(cx, |project, cx| {
6355 project.find_or_create_local_worktree(&dir.path(), true, cx)
6356 })
6357 .await
6358 .unwrap();
6359
6360 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
6361 .await;
6362
6363 let cancel_flag = Default::default();
6364 let results = project
6365 .read_with(cx, |project, cx| {
6366 project.match_paths("dir", false, false, 10, &cancel_flag, cx)
6367 })
6368 .await;
6369
6370 assert!(results.is_empty());
6371 }
6372
6373 #[gpui::test]
6374 async fn test_definition(cx: &mut gpui::TestAppContext) {
6375 let mut language = Language::new(
6376 LanguageConfig {
6377 name: "Rust".into(),
6378 path_suffixes: vec!["rs".to_string()],
6379 ..Default::default()
6380 },
6381 Some(tree_sitter_rust::language()),
6382 );
6383 let mut fake_servers = language.set_fake_lsp_adapter(Default::default());
6384
6385 let fs = FakeFs::new(cx.background());
6386 fs.insert_tree(
6387 "/dir",
6388 json!({
6389 "a.rs": "const fn a() { A }",
6390 "b.rs": "const y: i32 = crate::a()",
6391 }),
6392 )
6393 .await;
6394
6395 let project = Project::test(fs, cx);
6396 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
6397
6398 let (tree, _) = project
6399 .update(cx, |project, cx| {
6400 project.find_or_create_local_worktree("/dir/b.rs", true, cx)
6401 })
6402 .await
6403 .unwrap();
6404 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
6405 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
6406 .await;
6407
6408 let buffer = project
6409 .update(cx, |project, cx| project.open_buffer((worktree_id, ""), cx))
6410 .await
6411 .unwrap();
6412
6413 let fake_server = fake_servers.next().await.unwrap();
6414 fake_server.handle_request::<lsp::request::GotoDefinition, _, _>(|params, _| async move {
6415 let params = params.text_document_position_params;
6416 assert_eq!(
6417 params.text_document.uri.to_file_path().unwrap(),
6418 Path::new("/dir/b.rs"),
6419 );
6420 assert_eq!(params.position, lsp::Position::new(0, 22));
6421
6422 Ok(Some(lsp::GotoDefinitionResponse::Scalar(
6423 lsp::Location::new(
6424 lsp::Url::from_file_path("/dir/a.rs").unwrap(),
6425 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
6426 ),
6427 )))
6428 });
6429
6430 let mut definitions = project
6431 .update(cx, |project, cx| project.definition(&buffer, 22, cx))
6432 .await
6433 .unwrap();
6434
6435 assert_eq!(definitions.len(), 1);
6436 let definition = definitions.pop().unwrap();
6437 cx.update(|cx| {
6438 let target_buffer = definition.buffer.read(cx);
6439 assert_eq!(
6440 target_buffer
6441 .file()
6442 .unwrap()
6443 .as_local()
6444 .unwrap()
6445 .abs_path(cx),
6446 Path::new("/dir/a.rs"),
6447 );
6448 assert_eq!(definition.range.to_offset(target_buffer), 9..10);
6449 assert_eq!(
6450 list_worktrees(&project, cx),
6451 [("/dir/b.rs".as_ref(), true), ("/dir/a.rs".as_ref(), false)]
6452 );
6453
6454 drop(definition);
6455 });
6456 cx.read(|cx| {
6457 assert_eq!(list_worktrees(&project, cx), [("/dir/b.rs".as_ref(), true)]);
6458 });
6459
6460 fn list_worktrees<'a>(
6461 project: &'a ModelHandle<Project>,
6462 cx: &'a AppContext,
6463 ) -> Vec<(&'a Path, bool)> {
6464 project
6465 .read(cx)
6466 .worktrees(cx)
6467 .map(|worktree| {
6468 let worktree = worktree.read(cx);
6469 (
6470 worktree.as_local().unwrap().abs_path().as_ref(),
6471 worktree.is_visible(),
6472 )
6473 })
6474 .collect::<Vec<_>>()
6475 }
6476 }
6477
6478 #[gpui::test(iterations = 10)]
6479 async fn test_apply_code_actions_with_commands(cx: &mut gpui::TestAppContext) {
6480 let mut language = Language::new(
6481 LanguageConfig {
6482 name: "TypeScript".into(),
6483 path_suffixes: vec!["ts".to_string()],
6484 ..Default::default()
6485 },
6486 None,
6487 );
6488 let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default());
6489
6490 let fs = FakeFs::new(cx.background());
6491 fs.insert_tree(
6492 "/dir",
6493 json!({
6494 "a.ts": "a",
6495 }),
6496 )
6497 .await;
6498
6499 let project = Project::test(fs, cx);
6500 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
6501
6502 let (tree, _) = project
6503 .update(cx, |project, cx| {
6504 project.find_or_create_local_worktree("/dir", true, cx)
6505 })
6506 .await
6507 .unwrap();
6508 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
6509 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
6510 .await;
6511
6512 let buffer = project
6513 .update(cx, |p, cx| p.open_buffer((worktree_id, "a.ts"), cx))
6514 .await
6515 .unwrap();
6516
6517 let fake_server = fake_language_servers.next().await.unwrap();
6518
6519 // Language server returns code actions that contain commands, and not edits.
6520 let actions = project.update(cx, |project, cx| project.code_actions(&buffer, 0..0, cx));
6521 fake_server
6522 .handle_request::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
6523 Ok(Some(vec![
6524 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
6525 title: "The code action".into(),
6526 command: Some(lsp::Command {
6527 title: "The command".into(),
6528 command: "_the/command".into(),
6529 arguments: Some(vec![json!("the-argument")]),
6530 }),
6531 ..Default::default()
6532 }),
6533 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
6534 title: "two".into(),
6535 ..Default::default()
6536 }),
6537 ]))
6538 })
6539 .next()
6540 .await;
6541
6542 let action = actions.await.unwrap()[0].clone();
6543 let apply = project.update(cx, |project, cx| {
6544 project.apply_code_action(buffer.clone(), action, true, cx)
6545 });
6546
6547 // Resolving the code action does not populate its edits. In absence of
6548 // edits, we must execute the given command.
6549 fake_server.handle_request::<lsp::request::CodeActionResolveRequest, _, _>(
6550 |action, _| async move { Ok(action) },
6551 );
6552
6553 // While executing the command, the language server sends the editor
6554 // a `workspaceEdit` request.
6555 fake_server
6556 .handle_request::<lsp::request::ExecuteCommand, _, _>({
6557 let fake = fake_server.clone();
6558 move |params, _| {
6559 assert_eq!(params.command, "_the/command");
6560 let fake = fake.clone();
6561 async move {
6562 fake.server
6563 .request::<lsp::request::ApplyWorkspaceEdit>(
6564 lsp::ApplyWorkspaceEditParams {
6565 label: None,
6566 edit: lsp::WorkspaceEdit {
6567 changes: Some(
6568 [(
6569 lsp::Url::from_file_path("/dir/a.ts").unwrap(),
6570 vec![lsp::TextEdit {
6571 range: lsp::Range::new(
6572 lsp::Position::new(0, 0),
6573 lsp::Position::new(0, 0),
6574 ),
6575 new_text: "X".into(),
6576 }],
6577 )]
6578 .into_iter()
6579 .collect(),
6580 ),
6581 ..Default::default()
6582 },
6583 },
6584 )
6585 .await
6586 .unwrap();
6587 Ok(Some(json!(null)))
6588 }
6589 }
6590 })
6591 .next()
6592 .await;
6593
6594 // Applying the code action returns a project transaction containing the edits
6595 // sent by the language server in its `workspaceEdit` request.
6596 let transaction = apply.await.unwrap();
6597 assert!(transaction.0.contains_key(&buffer));
6598 buffer.update(cx, |buffer, cx| {
6599 assert_eq!(buffer.text(), "Xa");
6600 buffer.undo(cx);
6601 assert_eq!(buffer.text(), "a");
6602 });
6603 }
6604
6605 #[gpui::test]
6606 async fn test_save_file(cx: &mut gpui::TestAppContext) {
6607 let fs = FakeFs::new(cx.background());
6608 fs.insert_tree(
6609 "/dir",
6610 json!({
6611 "file1": "the old contents",
6612 }),
6613 )
6614 .await;
6615
6616 let project = Project::test(fs.clone(), cx);
6617 let worktree_id = project
6618 .update(cx, |p, cx| {
6619 p.find_or_create_local_worktree("/dir", true, cx)
6620 })
6621 .await
6622 .unwrap()
6623 .0
6624 .read_with(cx, |tree, _| tree.id());
6625
6626 let buffer = project
6627 .update(cx, |p, cx| p.open_buffer((worktree_id, "file1"), cx))
6628 .await
6629 .unwrap();
6630 buffer
6631 .update(cx, |buffer, cx| {
6632 assert_eq!(buffer.text(), "the old contents");
6633 buffer.edit(Some(0..0), "a line of text.\n".repeat(10 * 1024), cx);
6634 buffer.save(cx)
6635 })
6636 .await
6637 .unwrap();
6638
6639 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
6640 assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
6641 }
6642
6643 #[gpui::test]
6644 async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
6645 let fs = FakeFs::new(cx.background());
6646 fs.insert_tree(
6647 "/dir",
6648 json!({
6649 "file1": "the old contents",
6650 }),
6651 )
6652 .await;
6653
6654 let project = Project::test(fs.clone(), cx);
6655 let worktree_id = project
6656 .update(cx, |p, cx| {
6657 p.find_or_create_local_worktree("/dir/file1", true, cx)
6658 })
6659 .await
6660 .unwrap()
6661 .0
6662 .read_with(cx, |tree, _| tree.id());
6663
6664 let buffer = project
6665 .update(cx, |p, cx| p.open_buffer((worktree_id, ""), cx))
6666 .await
6667 .unwrap();
6668 buffer
6669 .update(cx, |buffer, cx| {
6670 buffer.edit(Some(0..0), "a line of text.\n".repeat(10 * 1024), cx);
6671 buffer.save(cx)
6672 })
6673 .await
6674 .unwrap();
6675
6676 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
6677 assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
6678 }
6679
6680 #[gpui::test]
6681 async fn test_save_as(cx: &mut gpui::TestAppContext) {
6682 let fs = FakeFs::new(cx.background());
6683 fs.insert_tree("/dir", json!({})).await;
6684
6685 let project = Project::test(fs.clone(), cx);
6686 let (worktree, _) = project
6687 .update(cx, |project, cx| {
6688 project.find_or_create_local_worktree("/dir", true, cx)
6689 })
6690 .await
6691 .unwrap();
6692 let worktree_id = worktree.read_with(cx, |worktree, _| worktree.id());
6693
6694 let buffer = project.update(cx, |project, cx| {
6695 project.create_buffer("", None, cx).unwrap()
6696 });
6697 buffer.update(cx, |buffer, cx| {
6698 buffer.edit([0..0], "abc", cx);
6699 assert!(buffer.is_dirty());
6700 assert!(!buffer.has_conflict());
6701 });
6702 project
6703 .update(cx, |project, cx| {
6704 project.save_buffer_as(buffer.clone(), "/dir/file1".into(), cx)
6705 })
6706 .await
6707 .unwrap();
6708 assert_eq!(fs.load(Path::new("/dir/file1")).await.unwrap(), "abc");
6709 buffer.read_with(cx, |buffer, cx| {
6710 assert_eq!(buffer.file().unwrap().full_path(cx), Path::new("dir/file1"));
6711 assert!(!buffer.is_dirty());
6712 assert!(!buffer.has_conflict());
6713 });
6714
6715 let opened_buffer = project
6716 .update(cx, |project, cx| {
6717 project.open_buffer((worktree_id, "file1"), cx)
6718 })
6719 .await
6720 .unwrap();
6721 assert_eq!(opened_buffer, buffer);
6722 }
6723
6724 #[gpui::test(retries = 5)]
6725 async fn test_rescan_and_remote_updates(cx: &mut gpui::TestAppContext) {
6726 let dir = temp_tree(json!({
6727 "a": {
6728 "file1": "",
6729 "file2": "",
6730 "file3": "",
6731 },
6732 "b": {
6733 "c": {
6734 "file4": "",
6735 "file5": "",
6736 }
6737 }
6738 }));
6739
6740 let project = Project::test(Arc::new(RealFs), cx);
6741 let rpc = project.read_with(cx, |p, _| p.client.clone());
6742
6743 let (tree, _) = project
6744 .update(cx, |p, cx| {
6745 p.find_or_create_local_worktree(dir.path(), true, cx)
6746 })
6747 .await
6748 .unwrap();
6749 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
6750
6751 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
6752 let buffer = project.update(cx, |p, cx| p.open_buffer((worktree_id, path), cx));
6753 async move { buffer.await.unwrap() }
6754 };
6755 let id_for_path = |path: &'static str, cx: &gpui::TestAppContext| {
6756 tree.read_with(cx, |tree, _| {
6757 tree.entry_for_path(path)
6758 .expect(&format!("no entry for path {}", path))
6759 .id
6760 })
6761 };
6762
6763 let buffer2 = buffer_for_path("a/file2", cx).await;
6764 let buffer3 = buffer_for_path("a/file3", cx).await;
6765 let buffer4 = buffer_for_path("b/c/file4", cx).await;
6766 let buffer5 = buffer_for_path("b/c/file5", cx).await;
6767
6768 let file2_id = id_for_path("a/file2", &cx);
6769 let file3_id = id_for_path("a/file3", &cx);
6770 let file4_id = id_for_path("b/c/file4", &cx);
6771
6772 // Wait for the initial scan.
6773 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
6774 .await;
6775
6776 // Create a remote copy of this worktree.
6777 let initial_snapshot = tree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot());
6778 let (remote, load_task) = cx.update(|cx| {
6779 Worktree::remote(
6780 1,
6781 1,
6782 initial_snapshot.to_proto(&Default::default(), true),
6783 rpc.clone(),
6784 cx,
6785 )
6786 });
6787 load_task.await;
6788
6789 cx.read(|cx| {
6790 assert!(!buffer2.read(cx).is_dirty());
6791 assert!(!buffer3.read(cx).is_dirty());
6792 assert!(!buffer4.read(cx).is_dirty());
6793 assert!(!buffer5.read(cx).is_dirty());
6794 });
6795
6796 // Rename and delete files and directories.
6797 tree.flush_fs_events(&cx).await;
6798 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
6799 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
6800 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
6801 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
6802 tree.flush_fs_events(&cx).await;
6803
6804 let expected_paths = vec![
6805 "a",
6806 "a/file1",
6807 "a/file2.new",
6808 "b",
6809 "d",
6810 "d/file3",
6811 "d/file4",
6812 ];
6813
6814 cx.read(|app| {
6815 assert_eq!(
6816 tree.read(app)
6817 .paths()
6818 .map(|p| p.to_str().unwrap())
6819 .collect::<Vec<_>>(),
6820 expected_paths
6821 );
6822
6823 assert_eq!(id_for_path("a/file2.new", &cx), file2_id);
6824 assert_eq!(id_for_path("d/file3", &cx), file3_id);
6825 assert_eq!(id_for_path("d/file4", &cx), file4_id);
6826
6827 assert_eq!(
6828 buffer2.read(app).file().unwrap().path().as_ref(),
6829 Path::new("a/file2.new")
6830 );
6831 assert_eq!(
6832 buffer3.read(app).file().unwrap().path().as_ref(),
6833 Path::new("d/file3")
6834 );
6835 assert_eq!(
6836 buffer4.read(app).file().unwrap().path().as_ref(),
6837 Path::new("d/file4")
6838 );
6839 assert_eq!(
6840 buffer5.read(app).file().unwrap().path().as_ref(),
6841 Path::new("b/c/file5")
6842 );
6843
6844 assert!(!buffer2.read(app).file().unwrap().is_deleted());
6845 assert!(!buffer3.read(app).file().unwrap().is_deleted());
6846 assert!(!buffer4.read(app).file().unwrap().is_deleted());
6847 assert!(buffer5.read(app).file().unwrap().is_deleted());
6848 });
6849
6850 // Update the remote worktree. Check that it becomes consistent with the
6851 // local worktree.
6852 remote.update(cx, |remote, cx| {
6853 let update_message = tree.read(cx).as_local().unwrap().snapshot().build_update(
6854 &initial_snapshot,
6855 1,
6856 1,
6857 true,
6858 );
6859 remote
6860 .as_remote_mut()
6861 .unwrap()
6862 .snapshot
6863 .apply_remote_update(update_message)
6864 .unwrap();
6865
6866 assert_eq!(
6867 remote
6868 .paths()
6869 .map(|p| p.to_str().unwrap())
6870 .collect::<Vec<_>>(),
6871 expected_paths
6872 );
6873 });
6874 }
6875
6876 #[gpui::test]
6877 async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
6878 let fs = FakeFs::new(cx.background());
6879 fs.insert_tree(
6880 "/the-dir",
6881 json!({
6882 "a.txt": "a-contents",
6883 "b.txt": "b-contents",
6884 }),
6885 )
6886 .await;
6887
6888 let project = Project::test(fs.clone(), cx);
6889 let worktree_id = project
6890 .update(cx, |p, cx| {
6891 p.find_or_create_local_worktree("/the-dir", true, cx)
6892 })
6893 .await
6894 .unwrap()
6895 .0
6896 .read_with(cx, |tree, _| tree.id());
6897
6898 // Spawn multiple tasks to open paths, repeating some paths.
6899 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
6900 (
6901 p.open_buffer((worktree_id, "a.txt"), cx),
6902 p.open_buffer((worktree_id, "b.txt"), cx),
6903 p.open_buffer((worktree_id, "a.txt"), cx),
6904 )
6905 });
6906
6907 let buffer_a_1 = buffer_a_1.await.unwrap();
6908 let buffer_a_2 = buffer_a_2.await.unwrap();
6909 let buffer_b = buffer_b.await.unwrap();
6910 assert_eq!(buffer_a_1.read_with(cx, |b, _| b.text()), "a-contents");
6911 assert_eq!(buffer_b.read_with(cx, |b, _| b.text()), "b-contents");
6912
6913 // There is only one buffer per path.
6914 let buffer_a_id = buffer_a_1.id();
6915 assert_eq!(buffer_a_2.id(), buffer_a_id);
6916
6917 // Open the same path again while it is still open.
6918 drop(buffer_a_1);
6919 let buffer_a_3 = project
6920 .update(cx, |p, cx| p.open_buffer((worktree_id, "a.txt"), cx))
6921 .await
6922 .unwrap();
6923
6924 // There's still only one buffer per path.
6925 assert_eq!(buffer_a_3.id(), buffer_a_id);
6926 }
6927
6928 #[gpui::test]
6929 async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
6930 use std::fs;
6931
6932 let dir = temp_tree(json!({
6933 "file1": "abc",
6934 "file2": "def",
6935 "file3": "ghi",
6936 }));
6937
6938 let project = Project::test(Arc::new(RealFs), cx);
6939 let (worktree, _) = project
6940 .update(cx, |p, cx| {
6941 p.find_or_create_local_worktree(dir.path(), true, cx)
6942 })
6943 .await
6944 .unwrap();
6945 let worktree_id = worktree.read_with(cx, |worktree, _| worktree.id());
6946
6947 worktree.flush_fs_events(&cx).await;
6948 worktree
6949 .read_with(cx, |t, _| t.as_local().unwrap().scan_complete())
6950 .await;
6951
6952 let buffer1 = project
6953 .update(cx, |p, cx| p.open_buffer((worktree_id, "file1"), cx))
6954 .await
6955 .unwrap();
6956 let events = Rc::new(RefCell::new(Vec::new()));
6957
6958 // initially, the buffer isn't dirty.
6959 buffer1.update(cx, |buffer, cx| {
6960 cx.subscribe(&buffer1, {
6961 let events = events.clone();
6962 move |_, _, event, _| match event {
6963 BufferEvent::Operation(_) => {}
6964 _ => events.borrow_mut().push(event.clone()),
6965 }
6966 })
6967 .detach();
6968
6969 assert!(!buffer.is_dirty());
6970 assert!(events.borrow().is_empty());
6971
6972 buffer.edit(vec![1..2], "", cx);
6973 });
6974
6975 // after the first edit, the buffer is dirty, and emits a dirtied event.
6976 buffer1.update(cx, |buffer, cx| {
6977 assert!(buffer.text() == "ac");
6978 assert!(buffer.is_dirty());
6979 assert_eq!(
6980 *events.borrow(),
6981 &[language::Event::Edited, language::Event::Dirtied]
6982 );
6983 events.borrow_mut().clear();
6984 buffer.did_save(buffer.version(), buffer.file().unwrap().mtime(), None, cx);
6985 });
6986
6987 // after saving, the buffer is not dirty, and emits a saved event.
6988 buffer1.update(cx, |buffer, cx| {
6989 assert!(!buffer.is_dirty());
6990 assert_eq!(*events.borrow(), &[language::Event::Saved]);
6991 events.borrow_mut().clear();
6992
6993 buffer.edit(vec![1..1], "B", cx);
6994 buffer.edit(vec![2..2], "D", cx);
6995 });
6996
6997 // after editing again, the buffer is dirty, and emits another dirty event.
6998 buffer1.update(cx, |buffer, cx| {
6999 assert!(buffer.text() == "aBDc");
7000 assert!(buffer.is_dirty());
7001 assert_eq!(
7002 *events.borrow(),
7003 &[
7004 language::Event::Edited,
7005 language::Event::Dirtied,
7006 language::Event::Edited,
7007 ],
7008 );
7009 events.borrow_mut().clear();
7010
7011 // TODO - currently, after restoring the buffer to its
7012 // previously-saved state, the is still considered dirty.
7013 buffer.edit([1..3], "", cx);
7014 assert!(buffer.text() == "ac");
7015 assert!(buffer.is_dirty());
7016 });
7017
7018 assert_eq!(*events.borrow(), &[language::Event::Edited]);
7019
7020 // When a file is deleted, the buffer is considered dirty.
7021 let events = Rc::new(RefCell::new(Vec::new()));
7022 let buffer2 = project
7023 .update(cx, |p, cx| p.open_buffer((worktree_id, "file2"), cx))
7024 .await
7025 .unwrap();
7026 buffer2.update(cx, |_, cx| {
7027 cx.subscribe(&buffer2, {
7028 let events = events.clone();
7029 move |_, _, event, _| events.borrow_mut().push(event.clone())
7030 })
7031 .detach();
7032 });
7033
7034 fs::remove_file(dir.path().join("file2")).unwrap();
7035 buffer2.condition(&cx, |b, _| b.is_dirty()).await;
7036 assert_eq!(
7037 *events.borrow(),
7038 &[language::Event::Dirtied, language::Event::FileHandleChanged]
7039 );
7040
7041 // When a file is already dirty when deleted, we don't emit a Dirtied event.
7042 let events = Rc::new(RefCell::new(Vec::new()));
7043 let buffer3 = project
7044 .update(cx, |p, cx| p.open_buffer((worktree_id, "file3"), cx))
7045 .await
7046 .unwrap();
7047 buffer3.update(cx, |_, cx| {
7048 cx.subscribe(&buffer3, {
7049 let events = events.clone();
7050 move |_, _, event, _| events.borrow_mut().push(event.clone())
7051 })
7052 .detach();
7053 });
7054
7055 worktree.flush_fs_events(&cx).await;
7056 buffer3.update(cx, |buffer, cx| {
7057 buffer.edit(Some(0..0), "x", cx);
7058 });
7059 events.borrow_mut().clear();
7060 fs::remove_file(dir.path().join("file3")).unwrap();
7061 buffer3
7062 .condition(&cx, |_, _| !events.borrow().is_empty())
7063 .await;
7064 assert_eq!(*events.borrow(), &[language::Event::FileHandleChanged]);
7065 cx.read(|cx| assert!(buffer3.read(cx).is_dirty()));
7066 }
7067
7068 #[gpui::test]
7069 async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
7070 use std::fs;
7071
7072 let initial_contents = "aaa\nbbbbb\nc\n";
7073 let dir = temp_tree(json!({ "the-file": initial_contents }));
7074
7075 let project = Project::test(Arc::new(RealFs), cx);
7076 let (worktree, _) = project
7077 .update(cx, |p, cx| {
7078 p.find_or_create_local_worktree(dir.path(), true, cx)
7079 })
7080 .await
7081 .unwrap();
7082 let worktree_id = worktree.read_with(cx, |tree, _| tree.id());
7083
7084 worktree
7085 .read_with(cx, |t, _| t.as_local().unwrap().scan_complete())
7086 .await;
7087
7088 let abs_path = dir.path().join("the-file");
7089 let buffer = project
7090 .update(cx, |p, cx| p.open_buffer((worktree_id, "the-file"), cx))
7091 .await
7092 .unwrap();
7093
7094 // TODO
7095 // Add a cursor on each row.
7096 // let selection_set_id = buffer.update(&mut cx, |buffer, cx| {
7097 // assert!(!buffer.is_dirty());
7098 // buffer.add_selection_set(
7099 // &(0..3)
7100 // .map(|row| Selection {
7101 // id: row as usize,
7102 // start: Point::new(row, 1),
7103 // end: Point::new(row, 1),
7104 // reversed: false,
7105 // goal: SelectionGoal::None,
7106 // })
7107 // .collect::<Vec<_>>(),
7108 // cx,
7109 // )
7110 // });
7111
7112 // Change the file on disk, adding two new lines of text, and removing
7113 // one line.
7114 buffer.read_with(cx, |buffer, _| {
7115 assert!(!buffer.is_dirty());
7116 assert!(!buffer.has_conflict());
7117 });
7118 let new_contents = "AAAA\naaa\nBB\nbbbbb\n";
7119 fs::write(&abs_path, new_contents).unwrap();
7120
7121 // Because the buffer was not modified, it is reloaded from disk. Its
7122 // contents are edited according to the diff between the old and new
7123 // file contents.
7124 buffer
7125 .condition(&cx, |buffer, _| buffer.text() == new_contents)
7126 .await;
7127
7128 buffer.update(cx, |buffer, _| {
7129 assert_eq!(buffer.text(), new_contents);
7130 assert!(!buffer.is_dirty());
7131 assert!(!buffer.has_conflict());
7132
7133 // TODO
7134 // let cursor_positions = buffer
7135 // .selection_set(selection_set_id)
7136 // .unwrap()
7137 // .selections::<Point>(&*buffer)
7138 // .map(|selection| {
7139 // assert_eq!(selection.start, selection.end);
7140 // selection.start
7141 // })
7142 // .collect::<Vec<_>>();
7143 // assert_eq!(
7144 // cursor_positions,
7145 // [Point::new(1, 1), Point::new(3, 1), Point::new(4, 0)]
7146 // );
7147 });
7148
7149 // Modify the buffer
7150 buffer.update(cx, |buffer, cx| {
7151 buffer.edit(vec![0..0], " ", cx);
7152 assert!(buffer.is_dirty());
7153 assert!(!buffer.has_conflict());
7154 });
7155
7156 // Change the file on disk again, adding blank lines to the beginning.
7157 fs::write(&abs_path, "\n\n\nAAAA\naaa\nBB\nbbbbb\n").unwrap();
7158
7159 // Because the buffer is modified, it doesn't reload from disk, but is
7160 // marked as having a conflict.
7161 buffer
7162 .condition(&cx, |buffer, _| buffer.has_conflict())
7163 .await;
7164 }
7165
7166 #[gpui::test]
7167 async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
7168 cx.foreground().forbid_parking();
7169
7170 let fs = FakeFs::new(cx.background());
7171 fs.insert_tree(
7172 "/the-dir",
7173 json!({
7174 "a.rs": "
7175 fn foo(mut v: Vec<usize>) {
7176 for x in &v {
7177 v.push(1);
7178 }
7179 }
7180 "
7181 .unindent(),
7182 }),
7183 )
7184 .await;
7185
7186 let project = Project::test(fs.clone(), cx);
7187 let (worktree, _) = project
7188 .update(cx, |p, cx| {
7189 p.find_or_create_local_worktree("/the-dir", true, cx)
7190 })
7191 .await
7192 .unwrap();
7193 let worktree_id = worktree.read_with(cx, |tree, _| tree.id());
7194
7195 let buffer = project
7196 .update(cx, |p, cx| p.open_buffer((worktree_id, "a.rs"), cx))
7197 .await
7198 .unwrap();
7199
7200 let buffer_uri = Url::from_file_path("/the-dir/a.rs").unwrap();
7201 let message = lsp::PublishDiagnosticsParams {
7202 uri: buffer_uri.clone(),
7203 diagnostics: vec![
7204 lsp::Diagnostic {
7205 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
7206 severity: Some(DiagnosticSeverity::WARNING),
7207 message: "error 1".to_string(),
7208 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
7209 location: lsp::Location {
7210 uri: buffer_uri.clone(),
7211 range: lsp::Range::new(
7212 lsp::Position::new(1, 8),
7213 lsp::Position::new(1, 9),
7214 ),
7215 },
7216 message: "error 1 hint 1".to_string(),
7217 }]),
7218 ..Default::default()
7219 },
7220 lsp::Diagnostic {
7221 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
7222 severity: Some(DiagnosticSeverity::HINT),
7223 message: "error 1 hint 1".to_string(),
7224 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
7225 location: lsp::Location {
7226 uri: buffer_uri.clone(),
7227 range: lsp::Range::new(
7228 lsp::Position::new(1, 8),
7229 lsp::Position::new(1, 9),
7230 ),
7231 },
7232 message: "original diagnostic".to_string(),
7233 }]),
7234 ..Default::default()
7235 },
7236 lsp::Diagnostic {
7237 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
7238 severity: Some(DiagnosticSeverity::ERROR),
7239 message: "error 2".to_string(),
7240 related_information: Some(vec![
7241 lsp::DiagnosticRelatedInformation {
7242 location: lsp::Location {
7243 uri: buffer_uri.clone(),
7244 range: lsp::Range::new(
7245 lsp::Position::new(1, 13),
7246 lsp::Position::new(1, 15),
7247 ),
7248 },
7249 message: "error 2 hint 1".to_string(),
7250 },
7251 lsp::DiagnosticRelatedInformation {
7252 location: lsp::Location {
7253 uri: buffer_uri.clone(),
7254 range: lsp::Range::new(
7255 lsp::Position::new(1, 13),
7256 lsp::Position::new(1, 15),
7257 ),
7258 },
7259 message: "error 2 hint 2".to_string(),
7260 },
7261 ]),
7262 ..Default::default()
7263 },
7264 lsp::Diagnostic {
7265 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
7266 severity: Some(DiagnosticSeverity::HINT),
7267 message: "error 2 hint 1".to_string(),
7268 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
7269 location: lsp::Location {
7270 uri: buffer_uri.clone(),
7271 range: lsp::Range::new(
7272 lsp::Position::new(2, 8),
7273 lsp::Position::new(2, 17),
7274 ),
7275 },
7276 message: "original diagnostic".to_string(),
7277 }]),
7278 ..Default::default()
7279 },
7280 lsp::Diagnostic {
7281 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
7282 severity: Some(DiagnosticSeverity::HINT),
7283 message: "error 2 hint 2".to_string(),
7284 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
7285 location: lsp::Location {
7286 uri: buffer_uri.clone(),
7287 range: lsp::Range::new(
7288 lsp::Position::new(2, 8),
7289 lsp::Position::new(2, 17),
7290 ),
7291 },
7292 message: "original diagnostic".to_string(),
7293 }]),
7294 ..Default::default()
7295 },
7296 ],
7297 version: None,
7298 };
7299
7300 project
7301 .update(cx, |p, cx| p.update_diagnostics(message, &[], cx))
7302 .unwrap();
7303 let buffer = buffer.read_with(cx, |buffer, _| buffer.snapshot());
7304
7305 assert_eq!(
7306 buffer
7307 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
7308 .collect::<Vec<_>>(),
7309 &[
7310 DiagnosticEntry {
7311 range: Point::new(1, 8)..Point::new(1, 9),
7312 diagnostic: Diagnostic {
7313 severity: DiagnosticSeverity::WARNING,
7314 message: "error 1".to_string(),
7315 group_id: 0,
7316 is_primary: true,
7317 ..Default::default()
7318 }
7319 },
7320 DiagnosticEntry {
7321 range: Point::new(1, 8)..Point::new(1, 9),
7322 diagnostic: Diagnostic {
7323 severity: DiagnosticSeverity::HINT,
7324 message: "error 1 hint 1".to_string(),
7325 group_id: 0,
7326 is_primary: false,
7327 ..Default::default()
7328 }
7329 },
7330 DiagnosticEntry {
7331 range: Point::new(1, 13)..Point::new(1, 15),
7332 diagnostic: Diagnostic {
7333 severity: DiagnosticSeverity::HINT,
7334 message: "error 2 hint 1".to_string(),
7335 group_id: 1,
7336 is_primary: false,
7337 ..Default::default()
7338 }
7339 },
7340 DiagnosticEntry {
7341 range: Point::new(1, 13)..Point::new(1, 15),
7342 diagnostic: Diagnostic {
7343 severity: DiagnosticSeverity::HINT,
7344 message: "error 2 hint 2".to_string(),
7345 group_id: 1,
7346 is_primary: false,
7347 ..Default::default()
7348 }
7349 },
7350 DiagnosticEntry {
7351 range: Point::new(2, 8)..Point::new(2, 17),
7352 diagnostic: Diagnostic {
7353 severity: DiagnosticSeverity::ERROR,
7354 message: "error 2".to_string(),
7355 group_id: 1,
7356 is_primary: true,
7357 ..Default::default()
7358 }
7359 }
7360 ]
7361 );
7362
7363 assert_eq!(
7364 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
7365 &[
7366 DiagnosticEntry {
7367 range: Point::new(1, 8)..Point::new(1, 9),
7368 diagnostic: Diagnostic {
7369 severity: DiagnosticSeverity::WARNING,
7370 message: "error 1".to_string(),
7371 group_id: 0,
7372 is_primary: true,
7373 ..Default::default()
7374 }
7375 },
7376 DiagnosticEntry {
7377 range: Point::new(1, 8)..Point::new(1, 9),
7378 diagnostic: Diagnostic {
7379 severity: DiagnosticSeverity::HINT,
7380 message: "error 1 hint 1".to_string(),
7381 group_id: 0,
7382 is_primary: false,
7383 ..Default::default()
7384 }
7385 },
7386 ]
7387 );
7388 assert_eq!(
7389 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
7390 &[
7391 DiagnosticEntry {
7392 range: Point::new(1, 13)..Point::new(1, 15),
7393 diagnostic: Diagnostic {
7394 severity: DiagnosticSeverity::HINT,
7395 message: "error 2 hint 1".to_string(),
7396 group_id: 1,
7397 is_primary: false,
7398 ..Default::default()
7399 }
7400 },
7401 DiagnosticEntry {
7402 range: Point::new(1, 13)..Point::new(1, 15),
7403 diagnostic: Diagnostic {
7404 severity: DiagnosticSeverity::HINT,
7405 message: "error 2 hint 2".to_string(),
7406 group_id: 1,
7407 is_primary: false,
7408 ..Default::default()
7409 }
7410 },
7411 DiagnosticEntry {
7412 range: Point::new(2, 8)..Point::new(2, 17),
7413 diagnostic: Diagnostic {
7414 severity: DiagnosticSeverity::ERROR,
7415 message: "error 2".to_string(),
7416 group_id: 1,
7417 is_primary: true,
7418 ..Default::default()
7419 }
7420 }
7421 ]
7422 );
7423 }
7424
7425 #[gpui::test]
7426 async fn test_rename(cx: &mut gpui::TestAppContext) {
7427 cx.foreground().forbid_parking();
7428
7429 let mut language = Language::new(
7430 LanguageConfig {
7431 name: "Rust".into(),
7432 path_suffixes: vec!["rs".to_string()],
7433 ..Default::default()
7434 },
7435 Some(tree_sitter_rust::language()),
7436 );
7437 let mut fake_servers = language.set_fake_lsp_adapter(Default::default());
7438
7439 let fs = FakeFs::new(cx.background());
7440 fs.insert_tree(
7441 "/dir",
7442 json!({
7443 "one.rs": "const ONE: usize = 1;",
7444 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
7445 }),
7446 )
7447 .await;
7448
7449 let project = Project::test(fs.clone(), cx);
7450 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
7451
7452 let (tree, _) = project
7453 .update(cx, |project, cx| {
7454 project.find_or_create_local_worktree("/dir", true, cx)
7455 })
7456 .await
7457 .unwrap();
7458 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
7459 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
7460 .await;
7461
7462 let buffer = project
7463 .update(cx, |project, cx| {
7464 project.open_buffer((worktree_id, Path::new("one.rs")), cx)
7465 })
7466 .await
7467 .unwrap();
7468
7469 let fake_server = fake_servers.next().await.unwrap();
7470
7471 let response = project.update(cx, |project, cx| {
7472 project.prepare_rename(buffer.clone(), 7, cx)
7473 });
7474 fake_server
7475 .handle_request::<lsp::request::PrepareRenameRequest, _, _>(|params, _| async move {
7476 assert_eq!(params.text_document.uri.as_str(), "file:///dir/one.rs");
7477 assert_eq!(params.position, lsp::Position::new(0, 7));
7478 Ok(Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
7479 lsp::Position::new(0, 6),
7480 lsp::Position::new(0, 9),
7481 ))))
7482 })
7483 .next()
7484 .await
7485 .unwrap();
7486 let range = response.await.unwrap().unwrap();
7487 let range = buffer.read_with(cx, |buffer, _| range.to_offset(buffer));
7488 assert_eq!(range, 6..9);
7489
7490 let response = project.update(cx, |project, cx| {
7491 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), true, cx)
7492 });
7493 fake_server
7494 .handle_request::<lsp::request::Rename, _, _>(|params, _| async move {
7495 assert_eq!(
7496 params.text_document_position.text_document.uri.as_str(),
7497 "file:///dir/one.rs"
7498 );
7499 assert_eq!(
7500 params.text_document_position.position,
7501 lsp::Position::new(0, 7)
7502 );
7503 assert_eq!(params.new_name, "THREE");
7504 Ok(Some(lsp::WorkspaceEdit {
7505 changes: Some(
7506 [
7507 (
7508 lsp::Url::from_file_path("/dir/one.rs").unwrap(),
7509 vec![lsp::TextEdit::new(
7510 lsp::Range::new(
7511 lsp::Position::new(0, 6),
7512 lsp::Position::new(0, 9),
7513 ),
7514 "THREE".to_string(),
7515 )],
7516 ),
7517 (
7518 lsp::Url::from_file_path("/dir/two.rs").unwrap(),
7519 vec![
7520 lsp::TextEdit::new(
7521 lsp::Range::new(
7522 lsp::Position::new(0, 24),
7523 lsp::Position::new(0, 27),
7524 ),
7525 "THREE".to_string(),
7526 ),
7527 lsp::TextEdit::new(
7528 lsp::Range::new(
7529 lsp::Position::new(0, 35),
7530 lsp::Position::new(0, 38),
7531 ),
7532 "THREE".to_string(),
7533 ),
7534 ],
7535 ),
7536 ]
7537 .into_iter()
7538 .collect(),
7539 ),
7540 ..Default::default()
7541 }))
7542 })
7543 .next()
7544 .await
7545 .unwrap();
7546 let mut transaction = response.await.unwrap().0;
7547 assert_eq!(transaction.len(), 2);
7548 assert_eq!(
7549 transaction
7550 .remove_entry(&buffer)
7551 .unwrap()
7552 .0
7553 .read_with(cx, |buffer, _| buffer.text()),
7554 "const THREE: usize = 1;"
7555 );
7556 assert_eq!(
7557 transaction
7558 .into_keys()
7559 .next()
7560 .unwrap()
7561 .read_with(cx, |buffer, _| buffer.text()),
7562 "const TWO: usize = one::THREE + one::THREE;"
7563 );
7564 }
7565
7566 #[gpui::test]
7567 async fn test_search(cx: &mut gpui::TestAppContext) {
7568 let fs = FakeFs::new(cx.background());
7569 fs.insert_tree(
7570 "/dir",
7571 json!({
7572 "one.rs": "const ONE: usize = 1;",
7573 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
7574 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
7575 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
7576 }),
7577 )
7578 .await;
7579 let project = Project::test(fs.clone(), cx);
7580 let (tree, _) = project
7581 .update(cx, |project, cx| {
7582 project.find_or_create_local_worktree("/dir", true, cx)
7583 })
7584 .await
7585 .unwrap();
7586 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
7587 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
7588 .await;
7589
7590 assert_eq!(
7591 search(&project, SearchQuery::text("TWO", false, true), cx)
7592 .await
7593 .unwrap(),
7594 HashMap::from_iter([
7595 ("two.rs".to_string(), vec![6..9]),
7596 ("three.rs".to_string(), vec![37..40])
7597 ])
7598 );
7599
7600 let buffer_4 = project
7601 .update(cx, |project, cx| {
7602 project.open_buffer((worktree_id, "four.rs"), cx)
7603 })
7604 .await
7605 .unwrap();
7606 buffer_4.update(cx, |buffer, cx| {
7607 buffer.edit([20..28, 31..43], "two::TWO", cx);
7608 });
7609
7610 assert_eq!(
7611 search(&project, SearchQuery::text("TWO", false, true), cx)
7612 .await
7613 .unwrap(),
7614 HashMap::from_iter([
7615 ("two.rs".to_string(), vec![6..9]),
7616 ("three.rs".to_string(), vec![37..40]),
7617 ("four.rs".to_string(), vec![25..28, 36..39])
7618 ])
7619 );
7620
7621 async fn search(
7622 project: &ModelHandle<Project>,
7623 query: SearchQuery,
7624 cx: &mut gpui::TestAppContext,
7625 ) -> Result<HashMap<String, Vec<Range<usize>>>> {
7626 let results = project
7627 .update(cx, |project, cx| project.search(query, cx))
7628 .await?;
7629
7630 Ok(results
7631 .into_iter()
7632 .map(|(buffer, ranges)| {
7633 buffer.read_with(cx, |buffer, _| {
7634 let path = buffer.file().unwrap().path().to_string_lossy().to_string();
7635 let ranges = ranges
7636 .into_iter()
7637 .map(|range| range.to_offset(buffer))
7638 .collect::<Vec<_>>();
7639 (path, ranges)
7640 })
7641 })
7642 .collect())
7643 }
7644 }
7645}