1pub mod fs;
2mod ignore;
3mod lsp_command;
4pub mod search;
5pub mod worktree;
6
7use anyhow::{anyhow, Context, Result};
8use client::{proto, Client, PeerId, TypedEnvelope, User, UserStore};
9use clock::ReplicaId;
10use collections::{hash_map, BTreeMap, HashMap, HashSet};
11use futures::{future::Shared, Future, FutureExt, StreamExt, TryFutureExt};
12use fuzzy::{PathMatch, PathMatchCandidate, PathMatchCandidateSet};
13use gpui::{
14 AnyModelHandle, AppContext, AsyncAppContext, Entity, ModelContext, ModelHandle,
15 MutableAppContext, Task, UpgradeModelHandle, WeakModelHandle,
16};
17use language::{
18 point_to_lsp,
19 proto::{deserialize_anchor, deserialize_version, serialize_anchor, serialize_version},
20 range_from_lsp, range_to_lsp, Anchor, Bias, Buffer, CodeAction, CodeLabel, Completion,
21 Diagnostic, DiagnosticEntry, DiagnosticSet, Event as BufferEvent, File as _, Language,
22 LanguageRegistry, LanguageServerName, LocalFile, LspAdapter, OffsetRangeExt, Operation, Patch,
23 PointUtf16, TextBufferSnapshot, ToOffset, ToPointUtf16, Transaction,
24};
25use lsp::{DiagnosticSeverity, DiagnosticTag, DocumentHighlightKind, LanguageServer};
26use lsp_command::*;
27use parking_lot::Mutex;
28use postage::watch;
29use rand::prelude::*;
30use search::SearchQuery;
31use serde::Serialize;
32use settings::Settings;
33use sha2::{Digest, Sha256};
34use similar::{ChangeTag, TextDiff};
35use std::{
36 cell::RefCell,
37 cmp::{self, Ordering},
38 convert::TryInto,
39 hash::Hash,
40 mem,
41 ops::Range,
42 path::{Component, Path, PathBuf},
43 rc::Rc,
44 sync::{
45 atomic::{AtomicBool, AtomicUsize, Ordering::SeqCst},
46 Arc,
47 },
48 time::Instant,
49};
50use util::{post_inc, ResultExt, TryFutureExt as _};
51
52pub use fs::*;
53pub use worktree::*;
54
55pub trait Item: Entity {
56 fn entry_id(&self, cx: &AppContext) -> Option<ProjectEntryId>;
57}
58
59pub struct Project {
60 worktrees: Vec<WorktreeHandle>,
61 active_entry: Option<ProjectEntryId>,
62 languages: Arc<LanguageRegistry>,
63 language_servers:
64 HashMap<(WorktreeId, LanguageServerName), (Arc<dyn LspAdapter>, Arc<LanguageServer>)>,
65 started_language_servers:
66 HashMap<(WorktreeId, LanguageServerName), Task<Option<Arc<LanguageServer>>>>,
67 language_server_statuses: BTreeMap<usize, LanguageServerStatus>,
68 language_server_settings: Arc<Mutex<serde_json::Value>>,
69 last_workspace_edits_by_language_server: HashMap<usize, ProjectTransaction>,
70 next_language_server_id: usize,
71 client: Arc<client::Client>,
72 next_entry_id: Arc<AtomicUsize>,
73 user_store: ModelHandle<UserStore>,
74 fs: Arc<dyn Fs>,
75 client_state: ProjectClientState,
76 collaborators: HashMap<PeerId, Collaborator>,
77 subscriptions: Vec<client::Subscription>,
78 opened_buffer: (Rc<RefCell<watch::Sender<()>>>, watch::Receiver<()>),
79 shared_buffers: HashMap<PeerId, HashSet<u64>>,
80 loading_buffers: HashMap<
81 ProjectPath,
82 postage::watch::Receiver<Option<Result<ModelHandle<Buffer>, Arc<anyhow::Error>>>>,
83 >,
84 loading_local_worktrees:
85 HashMap<Arc<Path>, Shared<Task<Result<ModelHandle<Worktree>, Arc<anyhow::Error>>>>>,
86 opened_buffers: HashMap<u64, OpenBuffer>,
87 buffer_snapshots: HashMap<u64, Vec<(i32, TextBufferSnapshot)>>,
88 nonce: u128,
89}
90
91enum OpenBuffer {
92 Strong(ModelHandle<Buffer>),
93 Weak(WeakModelHandle<Buffer>),
94 Loading(Vec<Operation>),
95}
96
97enum WorktreeHandle {
98 Strong(ModelHandle<Worktree>),
99 Weak(WeakModelHandle<Worktree>),
100}
101
102enum ProjectClientState {
103 Local {
104 is_shared: bool,
105 remote_id_tx: watch::Sender<Option<u64>>,
106 remote_id_rx: watch::Receiver<Option<u64>>,
107 _maintain_remote_id_task: Task<Option<()>>,
108 },
109 Remote {
110 sharing_has_stopped: bool,
111 remote_id: u64,
112 replica_id: ReplicaId,
113 _detect_unshare_task: Task<Option<()>>,
114 },
115}
116
117#[derive(Clone, Debug)]
118pub struct Collaborator {
119 pub user: Arc<User>,
120 pub peer_id: PeerId,
121 pub replica_id: ReplicaId,
122}
123
124#[derive(Clone, Debug, PartialEq)]
125pub enum Event {
126 ActiveEntryChanged(Option<ProjectEntryId>),
127 WorktreeRemoved(WorktreeId),
128 DiskBasedDiagnosticsStarted,
129 DiskBasedDiagnosticsUpdated,
130 DiskBasedDiagnosticsFinished,
131 DiagnosticsUpdated(ProjectPath),
132 RemoteIdChanged(Option<u64>),
133 CollaboratorLeft(PeerId),
134}
135
136#[derive(Serialize)]
137pub struct LanguageServerStatus {
138 pub name: String,
139 pub pending_work: BTreeMap<String, LanguageServerProgress>,
140 pub pending_diagnostic_updates: isize,
141}
142
143#[derive(Clone, Debug, Serialize)]
144pub struct LanguageServerProgress {
145 pub message: Option<String>,
146 pub percentage: Option<usize>,
147 #[serde(skip_serializing)]
148 pub last_update_at: Instant,
149}
150
151#[derive(Clone, Debug, Eq, PartialEq, Hash, PartialOrd, Ord)]
152pub struct ProjectPath {
153 pub worktree_id: WorktreeId,
154 pub path: Arc<Path>,
155}
156
157#[derive(Copy, Clone, Debug, Default, PartialEq, Serialize)]
158pub struct DiagnosticSummary {
159 pub error_count: usize,
160 pub warning_count: usize,
161}
162
163#[derive(Debug)]
164pub struct Location {
165 pub buffer: ModelHandle<Buffer>,
166 pub range: Range<language::Anchor>,
167}
168
169#[derive(Debug)]
170pub struct DocumentHighlight {
171 pub range: Range<language::Anchor>,
172 pub kind: DocumentHighlightKind,
173}
174
175#[derive(Clone, Debug)]
176pub struct Symbol {
177 pub source_worktree_id: WorktreeId,
178 pub worktree_id: WorktreeId,
179 pub language_server_name: LanguageServerName,
180 pub path: PathBuf,
181 pub label: CodeLabel,
182 pub name: String,
183 pub kind: lsp::SymbolKind,
184 pub range: Range<PointUtf16>,
185 pub signature: [u8; 32],
186}
187
188#[derive(Default)]
189pub struct ProjectTransaction(pub HashMap<ModelHandle<Buffer>, language::Transaction>);
190
191impl DiagnosticSummary {
192 fn new<'a, T: 'a>(diagnostics: impl IntoIterator<Item = &'a DiagnosticEntry<T>>) -> Self {
193 let mut this = Self {
194 error_count: 0,
195 warning_count: 0,
196 };
197
198 for entry in diagnostics {
199 if entry.diagnostic.is_primary {
200 match entry.diagnostic.severity {
201 DiagnosticSeverity::ERROR => this.error_count += 1,
202 DiagnosticSeverity::WARNING => this.warning_count += 1,
203 _ => {}
204 }
205 }
206 }
207
208 this
209 }
210
211 pub fn is_empty(&self) -> bool {
212 self.error_count == 0 && self.warning_count == 0
213 }
214
215 pub fn to_proto(&self, path: &Path) -> proto::DiagnosticSummary {
216 proto::DiagnosticSummary {
217 path: path.to_string_lossy().to_string(),
218 error_count: self.error_count as u32,
219 warning_count: self.warning_count as u32,
220 }
221 }
222}
223
224#[derive(Clone, Copy, Debug, Default, Hash, PartialEq, Eq, PartialOrd, Ord)]
225pub struct ProjectEntryId(usize);
226
227impl ProjectEntryId {
228 pub fn new(counter: &AtomicUsize) -> Self {
229 Self(counter.fetch_add(1, SeqCst))
230 }
231
232 pub fn from_proto(id: u64) -> Self {
233 Self(id as usize)
234 }
235
236 pub fn to_proto(&self) -> u64 {
237 self.0 as u64
238 }
239
240 pub fn to_usize(&self) -> usize {
241 self.0
242 }
243}
244
245impl Project {
246 pub fn init(client: &Arc<Client>) {
247 client.add_model_message_handler(Self::handle_add_collaborator);
248 client.add_model_message_handler(Self::handle_buffer_reloaded);
249 client.add_model_message_handler(Self::handle_buffer_saved);
250 client.add_model_message_handler(Self::handle_start_language_server);
251 client.add_model_message_handler(Self::handle_update_language_server);
252 client.add_model_message_handler(Self::handle_remove_collaborator);
253 client.add_model_message_handler(Self::handle_register_worktree);
254 client.add_model_message_handler(Self::handle_unregister_worktree);
255 client.add_model_message_handler(Self::handle_unshare_project);
256 client.add_model_message_handler(Self::handle_update_buffer_file);
257 client.add_model_message_handler(Self::handle_update_buffer);
258 client.add_model_message_handler(Self::handle_update_diagnostic_summary);
259 client.add_model_message_handler(Self::handle_update_worktree);
260 client.add_model_request_handler(Self::handle_apply_additional_edits_for_completion);
261 client.add_model_request_handler(Self::handle_apply_code_action);
262 client.add_model_request_handler(Self::handle_reload_buffers);
263 client.add_model_request_handler(Self::handle_format_buffers);
264 client.add_model_request_handler(Self::handle_get_code_actions);
265 client.add_model_request_handler(Self::handle_get_completions);
266 client.add_model_request_handler(Self::handle_lsp_command::<GetDefinition>);
267 client.add_model_request_handler(Self::handle_lsp_command::<GetDocumentHighlights>);
268 client.add_model_request_handler(Self::handle_lsp_command::<GetReferences>);
269 client.add_model_request_handler(Self::handle_lsp_command::<PrepareRename>);
270 client.add_model_request_handler(Self::handle_lsp_command::<PerformRename>);
271 client.add_model_request_handler(Self::handle_search_project);
272 client.add_model_request_handler(Self::handle_get_project_symbols);
273 client.add_model_request_handler(Self::handle_open_buffer_for_symbol);
274 client.add_model_request_handler(Self::handle_open_buffer_by_id);
275 client.add_model_request_handler(Self::handle_open_buffer_by_path);
276 client.add_model_request_handler(Self::handle_save_buffer);
277 }
278
279 pub fn local(
280 client: Arc<Client>,
281 user_store: ModelHandle<UserStore>,
282 languages: Arc<LanguageRegistry>,
283 fs: Arc<dyn Fs>,
284 cx: &mut MutableAppContext,
285 ) -> ModelHandle<Self> {
286 cx.add_model(|cx: &mut ModelContext<Self>| {
287 let (remote_id_tx, remote_id_rx) = watch::channel();
288 let _maintain_remote_id_task = cx.spawn_weak({
289 let rpc = client.clone();
290 move |this, mut cx| {
291 async move {
292 let mut status = rpc.status();
293 while let Some(status) = status.next().await {
294 if let Some(this) = this.upgrade(&cx) {
295 if status.is_connected() {
296 this.update(&mut cx, |this, cx| this.register(cx)).await?;
297 } else {
298 this.update(&mut cx, |this, cx| this.unregister(cx));
299 }
300 }
301 }
302 Ok(())
303 }
304 .log_err()
305 }
306 });
307
308 let (opened_buffer_tx, opened_buffer_rx) = watch::channel();
309 Self {
310 worktrees: Default::default(),
311 collaborators: Default::default(),
312 opened_buffers: Default::default(),
313 shared_buffers: Default::default(),
314 loading_buffers: Default::default(),
315 loading_local_worktrees: Default::default(),
316 buffer_snapshots: Default::default(),
317 client_state: ProjectClientState::Local {
318 is_shared: false,
319 remote_id_tx,
320 remote_id_rx,
321 _maintain_remote_id_task,
322 },
323 opened_buffer: (Rc::new(RefCell::new(opened_buffer_tx)), opened_buffer_rx),
324 subscriptions: Vec::new(),
325 active_entry: None,
326 languages,
327 client,
328 user_store,
329 fs,
330 next_entry_id: Default::default(),
331 language_servers: Default::default(),
332 started_language_servers: Default::default(),
333 language_server_statuses: Default::default(),
334 last_workspace_edits_by_language_server: Default::default(),
335 language_server_settings: Default::default(),
336 next_language_server_id: 0,
337 nonce: StdRng::from_entropy().gen(),
338 }
339 })
340 }
341
342 pub async fn remote(
343 remote_id: u64,
344 client: Arc<Client>,
345 user_store: ModelHandle<UserStore>,
346 languages: Arc<LanguageRegistry>,
347 fs: Arc<dyn Fs>,
348 cx: &mut AsyncAppContext,
349 ) -> Result<ModelHandle<Self>> {
350 client.authenticate_and_connect(true, &cx).await?;
351
352 let response = client
353 .request(proto::JoinProject {
354 project_id: remote_id,
355 })
356 .await?;
357
358 let replica_id = response.replica_id as ReplicaId;
359
360 let mut worktrees = Vec::new();
361 for worktree in response.worktrees {
362 let (worktree, load_task) = cx
363 .update(|cx| Worktree::remote(remote_id, replica_id, worktree, client.clone(), cx));
364 worktrees.push(worktree);
365 load_task.detach();
366 }
367
368 let (opened_buffer_tx, opened_buffer_rx) = watch::channel();
369 let this = cx.add_model(|cx: &mut ModelContext<Self>| {
370 let mut this = Self {
371 worktrees: Vec::new(),
372 loading_buffers: Default::default(),
373 opened_buffer: (Rc::new(RefCell::new(opened_buffer_tx)), opened_buffer_rx),
374 shared_buffers: Default::default(),
375 loading_local_worktrees: Default::default(),
376 active_entry: None,
377 collaborators: Default::default(),
378 languages,
379 user_store: user_store.clone(),
380 fs,
381 next_entry_id: Default::default(),
382 subscriptions: vec![client.add_model_for_remote_entity(remote_id, cx)],
383 client: client.clone(),
384 client_state: ProjectClientState::Remote {
385 sharing_has_stopped: false,
386 remote_id,
387 replica_id,
388 _detect_unshare_task: cx.spawn_weak(move |this, mut cx| {
389 async move {
390 let mut status = client.status();
391 let is_connected =
392 status.next().await.map_or(false, |s| s.is_connected());
393 // Even if we're initially connected, any future change of the status means we momentarily disconnected.
394 if !is_connected || status.next().await.is_some() {
395 if let Some(this) = this.upgrade(&cx) {
396 this.update(&mut cx, |this, cx| this.project_unshared(cx))
397 }
398 }
399 Ok(())
400 }
401 .log_err()
402 }),
403 },
404 language_servers: Default::default(),
405 started_language_servers: Default::default(),
406 language_server_settings: Default::default(),
407 language_server_statuses: response
408 .language_servers
409 .into_iter()
410 .map(|server| {
411 (
412 server.id as usize,
413 LanguageServerStatus {
414 name: server.name,
415 pending_work: Default::default(),
416 pending_diagnostic_updates: 0,
417 },
418 )
419 })
420 .collect(),
421 last_workspace_edits_by_language_server: Default::default(),
422 next_language_server_id: 0,
423 opened_buffers: Default::default(),
424 buffer_snapshots: Default::default(),
425 nonce: StdRng::from_entropy().gen(),
426 };
427 for worktree in worktrees {
428 this.add_worktree(&worktree, cx);
429 }
430 this
431 });
432
433 let user_ids = response
434 .collaborators
435 .iter()
436 .map(|peer| peer.user_id)
437 .collect();
438 user_store
439 .update(cx, |user_store, cx| user_store.load_users(user_ids, cx))
440 .await?;
441 let mut collaborators = HashMap::default();
442 for message in response.collaborators {
443 let collaborator = Collaborator::from_proto(message, &user_store, cx).await?;
444 collaborators.insert(collaborator.peer_id, collaborator);
445 }
446
447 this.update(cx, |this, _| {
448 this.collaborators = collaborators;
449 });
450
451 Ok(this)
452 }
453
454 #[cfg(any(test, feature = "test-support"))]
455 pub async fn test(
456 fs: Arc<dyn Fs>,
457 root_paths: impl IntoIterator<Item = impl AsRef<Path>>,
458 cx: &mut gpui::TestAppContext,
459 ) -> ModelHandle<Project> {
460 let languages = Arc::new(LanguageRegistry::test());
461 let http_client = client::test::FakeHttpClient::with_404_response();
462 let client = client::Client::new(http_client.clone());
463 let user_store = cx.add_model(|cx| UserStore::new(client.clone(), http_client, cx));
464 let project = cx.update(|cx| Project::local(client, user_store, languages, fs, cx));
465 for path in root_paths {
466 let (tree, _) = project
467 .update(cx, |project, cx| {
468 project.find_or_create_local_worktree(path, true, cx)
469 })
470 .await
471 .unwrap();
472 tree.read_with(cx, |tree, _| tree.as_local().unwrap().scan_complete())
473 .await;
474 }
475 project
476 }
477
478 pub fn buffer_for_id(&self, remote_id: u64, cx: &AppContext) -> Option<ModelHandle<Buffer>> {
479 self.opened_buffers
480 .get(&remote_id)
481 .and_then(|buffer| buffer.upgrade(cx))
482 }
483
484 pub fn languages(&self) -> &Arc<LanguageRegistry> {
485 &self.languages
486 }
487
488 #[cfg(any(test, feature = "test-support"))]
489 pub fn check_invariants(&self, cx: &AppContext) {
490 if self.is_local() {
491 let mut worktree_root_paths = HashMap::default();
492 for worktree in self.worktrees(cx) {
493 let worktree = worktree.read(cx);
494 let abs_path = worktree.as_local().unwrap().abs_path().clone();
495 let prev_worktree_id = worktree_root_paths.insert(abs_path.clone(), worktree.id());
496 assert_eq!(
497 prev_worktree_id,
498 None,
499 "abs path {:?} for worktree {:?} is not unique ({:?} was already registered with the same path)",
500 abs_path,
501 worktree.id(),
502 prev_worktree_id
503 )
504 }
505 } else {
506 let replica_id = self.replica_id();
507 for buffer in self.opened_buffers.values() {
508 if let Some(buffer) = buffer.upgrade(cx) {
509 let buffer = buffer.read(cx);
510 assert_eq!(
511 buffer.deferred_ops_len(),
512 0,
513 "replica {}, buffer {} has deferred operations",
514 replica_id,
515 buffer.remote_id()
516 );
517 }
518 }
519 }
520 }
521
522 #[cfg(any(test, feature = "test-support"))]
523 pub fn has_open_buffer(&self, path: impl Into<ProjectPath>, cx: &AppContext) -> bool {
524 let path = path.into();
525 if let Some(worktree) = self.worktree_for_id(path.worktree_id, cx) {
526 self.opened_buffers.iter().any(|(_, buffer)| {
527 if let Some(buffer) = buffer.upgrade(cx) {
528 if let Some(file) = File::from_dyn(buffer.read(cx).file()) {
529 if file.worktree == worktree && file.path() == &path.path {
530 return true;
531 }
532 }
533 }
534 false
535 })
536 } else {
537 false
538 }
539 }
540
541 pub fn fs(&self) -> &Arc<dyn Fs> {
542 &self.fs
543 }
544
545 fn unregister(&mut self, cx: &mut ModelContext<Self>) {
546 self.unshare(cx);
547 for worktree in &self.worktrees {
548 if let Some(worktree) = worktree.upgrade(cx) {
549 worktree.update(cx, |worktree, _| {
550 worktree.as_local_mut().unwrap().unregister();
551 });
552 }
553 }
554
555 if let ProjectClientState::Local { remote_id_tx, .. } = &mut self.client_state {
556 *remote_id_tx.borrow_mut() = None;
557 }
558
559 self.subscriptions.clear();
560 }
561
562 fn register(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
563 self.unregister(cx);
564
565 let response = self.client.request(proto::RegisterProject {});
566 cx.spawn(|this, mut cx| async move {
567 let remote_id = response.await?.project_id;
568
569 let mut registrations = Vec::new();
570 this.update(&mut cx, |this, cx| {
571 if let ProjectClientState::Local { remote_id_tx, .. } = &mut this.client_state {
572 *remote_id_tx.borrow_mut() = Some(remote_id);
573 }
574
575 cx.emit(Event::RemoteIdChanged(Some(remote_id)));
576
577 this.subscriptions
578 .push(this.client.add_model_for_remote_entity(remote_id, cx));
579
580 for worktree in &this.worktrees {
581 if let Some(worktree) = worktree.upgrade(cx) {
582 registrations.push(worktree.update(cx, |worktree, cx| {
583 let worktree = worktree.as_local_mut().unwrap();
584 worktree.register(remote_id, cx)
585 }));
586 }
587 }
588 });
589
590 futures::future::try_join_all(registrations).await?;
591 Ok(())
592 })
593 }
594
595 pub fn remote_id(&self) -> Option<u64> {
596 match &self.client_state {
597 ProjectClientState::Local { remote_id_rx, .. } => *remote_id_rx.borrow(),
598 ProjectClientState::Remote { remote_id, .. } => Some(*remote_id),
599 }
600 }
601
602 pub fn next_remote_id(&self) -> impl Future<Output = u64> {
603 let mut id = None;
604 let mut watch = None;
605 match &self.client_state {
606 ProjectClientState::Local { remote_id_rx, .. } => watch = Some(remote_id_rx.clone()),
607 ProjectClientState::Remote { remote_id, .. } => id = Some(*remote_id),
608 }
609
610 async move {
611 if let Some(id) = id {
612 return id;
613 }
614 let mut watch = watch.unwrap();
615 loop {
616 let id = *watch.borrow();
617 if let Some(id) = id {
618 return id;
619 }
620 watch.next().await;
621 }
622 }
623 }
624
625 pub fn replica_id(&self) -> ReplicaId {
626 match &self.client_state {
627 ProjectClientState::Local { .. } => 0,
628 ProjectClientState::Remote { replica_id, .. } => *replica_id,
629 }
630 }
631
632 pub fn collaborators(&self) -> &HashMap<PeerId, Collaborator> {
633 &self.collaborators
634 }
635
636 pub fn worktrees<'a>(
637 &'a self,
638 cx: &'a AppContext,
639 ) -> impl 'a + Iterator<Item = ModelHandle<Worktree>> {
640 self.worktrees
641 .iter()
642 .filter_map(move |worktree| worktree.upgrade(cx))
643 }
644
645 pub fn visible_worktrees<'a>(
646 &'a self,
647 cx: &'a AppContext,
648 ) -> impl 'a + Iterator<Item = ModelHandle<Worktree>> {
649 self.worktrees.iter().filter_map(|worktree| {
650 worktree.upgrade(cx).and_then(|worktree| {
651 if worktree.read(cx).is_visible() {
652 Some(worktree)
653 } else {
654 None
655 }
656 })
657 })
658 }
659
660 pub fn worktree_for_id(
661 &self,
662 id: WorktreeId,
663 cx: &AppContext,
664 ) -> Option<ModelHandle<Worktree>> {
665 self.worktrees(cx)
666 .find(|worktree| worktree.read(cx).id() == id)
667 }
668
669 pub fn worktree_for_entry(
670 &self,
671 entry_id: ProjectEntryId,
672 cx: &AppContext,
673 ) -> Option<ModelHandle<Worktree>> {
674 self.worktrees(cx)
675 .find(|worktree| worktree.read(cx).contains_entry(entry_id))
676 }
677
678 pub fn worktree_id_for_entry(
679 &self,
680 entry_id: ProjectEntryId,
681 cx: &AppContext,
682 ) -> Option<WorktreeId> {
683 self.worktree_for_entry(entry_id, cx)
684 .map(|worktree| worktree.read(cx).id())
685 }
686
687 pub fn can_share(&self, cx: &AppContext) -> bool {
688 self.is_local() && self.visible_worktrees(cx).next().is_some()
689 }
690
691 pub fn share(&self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
692 let rpc = self.client.clone();
693 cx.spawn(|this, mut cx| async move {
694 let project_id = this.update(&mut cx, |this, cx| {
695 if let ProjectClientState::Local {
696 is_shared,
697 remote_id_rx,
698 ..
699 } = &mut this.client_state
700 {
701 *is_shared = true;
702
703 for open_buffer in this.opened_buffers.values_mut() {
704 match open_buffer {
705 OpenBuffer::Strong(_) => {}
706 OpenBuffer::Weak(buffer) => {
707 if let Some(buffer) = buffer.upgrade(cx) {
708 *open_buffer = OpenBuffer::Strong(buffer);
709 }
710 }
711 OpenBuffer::Loading(_) => unreachable!(),
712 }
713 }
714
715 for worktree_handle in this.worktrees.iter_mut() {
716 match worktree_handle {
717 WorktreeHandle::Strong(_) => {}
718 WorktreeHandle::Weak(worktree) => {
719 if let Some(worktree) = worktree.upgrade(cx) {
720 *worktree_handle = WorktreeHandle::Strong(worktree);
721 }
722 }
723 }
724 }
725
726 remote_id_rx
727 .borrow()
728 .ok_or_else(|| anyhow!("no project id"))
729 } else {
730 Err(anyhow!("can't share a remote project"))
731 }
732 })?;
733
734 rpc.request(proto::ShareProject { project_id }).await?;
735
736 let mut tasks = Vec::new();
737 this.update(&mut cx, |this, cx| {
738 for worktree in this.worktrees(cx).collect::<Vec<_>>() {
739 worktree.update(cx, |worktree, cx| {
740 let worktree = worktree.as_local_mut().unwrap();
741 tasks.push(worktree.share(project_id, cx));
742 });
743 }
744 });
745 for task in tasks {
746 task.await?;
747 }
748 this.update(&mut cx, |_, cx| cx.notify());
749 Ok(())
750 })
751 }
752
753 pub fn unshare(&mut self, cx: &mut ModelContext<Self>) {
754 let rpc = self.client.clone();
755
756 if let ProjectClientState::Local {
757 is_shared,
758 remote_id_rx,
759 ..
760 } = &mut self.client_state
761 {
762 if !*is_shared {
763 return;
764 }
765
766 *is_shared = false;
767 self.collaborators.clear();
768 self.shared_buffers.clear();
769 for worktree_handle in self.worktrees.iter_mut() {
770 if let WorktreeHandle::Strong(worktree) = worktree_handle {
771 let is_visible = worktree.update(cx, |worktree, _| {
772 worktree.as_local_mut().unwrap().unshare();
773 worktree.is_visible()
774 });
775 if !is_visible {
776 *worktree_handle = WorktreeHandle::Weak(worktree.downgrade());
777 }
778 }
779 }
780
781 for open_buffer in self.opened_buffers.values_mut() {
782 match open_buffer {
783 OpenBuffer::Strong(buffer) => {
784 *open_buffer = OpenBuffer::Weak(buffer.downgrade());
785 }
786 _ => {}
787 }
788 }
789
790 if let Some(project_id) = *remote_id_rx.borrow() {
791 rpc.send(proto::UnshareProject { project_id }).log_err();
792 }
793
794 cx.notify();
795 } else {
796 log::error!("attempted to unshare a remote project");
797 }
798 }
799
800 fn project_unshared(&mut self, cx: &mut ModelContext<Self>) {
801 if let ProjectClientState::Remote {
802 sharing_has_stopped,
803 ..
804 } = &mut self.client_state
805 {
806 *sharing_has_stopped = true;
807 self.collaborators.clear();
808 cx.notify();
809 }
810 }
811
812 pub fn is_read_only(&self) -> bool {
813 match &self.client_state {
814 ProjectClientState::Local { .. } => false,
815 ProjectClientState::Remote {
816 sharing_has_stopped,
817 ..
818 } => *sharing_has_stopped,
819 }
820 }
821
822 pub fn is_local(&self) -> bool {
823 match &self.client_state {
824 ProjectClientState::Local { .. } => true,
825 ProjectClientState::Remote { .. } => false,
826 }
827 }
828
829 pub fn is_remote(&self) -> bool {
830 !self.is_local()
831 }
832
833 pub fn create_buffer(
834 &mut self,
835 text: &str,
836 language: Option<Arc<Language>>,
837 cx: &mut ModelContext<Self>,
838 ) -> Result<ModelHandle<Buffer>> {
839 if self.is_remote() {
840 return Err(anyhow!("creating buffers as a guest is not supported yet"));
841 }
842
843 let buffer = cx.add_model(|cx| {
844 Buffer::new(self.replica_id(), text, cx)
845 .with_language(language.unwrap_or(language::PLAIN_TEXT.clone()), cx)
846 });
847 self.register_buffer(&buffer, cx)?;
848 Ok(buffer)
849 }
850
851 pub fn open_path(
852 &mut self,
853 path: impl Into<ProjectPath>,
854 cx: &mut ModelContext<Self>,
855 ) -> Task<Result<(ProjectEntryId, AnyModelHandle)>> {
856 let task = self.open_buffer(path, cx);
857 cx.spawn_weak(|_, cx| async move {
858 let buffer = task.await?;
859 let project_entry_id = buffer
860 .read_with(&cx, |buffer, cx| {
861 File::from_dyn(buffer.file()).and_then(|file| file.project_entry_id(cx))
862 })
863 .ok_or_else(|| anyhow!("no project entry"))?;
864 Ok((project_entry_id, buffer.into()))
865 })
866 }
867
868 pub fn open_local_buffer(
869 &mut self,
870 abs_path: impl AsRef<Path>,
871 cx: &mut ModelContext<Self>,
872 ) -> Task<Result<ModelHandle<Buffer>>> {
873 if let Some((worktree, relative_path)) = self.find_local_worktree(abs_path.as_ref(), cx) {
874 self.open_buffer((worktree.read(cx).id(), relative_path), cx)
875 } else {
876 Task::ready(Err(anyhow!("no such path")))
877 }
878 }
879
880 pub fn open_buffer(
881 &mut self,
882 path: impl Into<ProjectPath>,
883 cx: &mut ModelContext<Self>,
884 ) -> Task<Result<ModelHandle<Buffer>>> {
885 let project_path = path.into();
886 let worktree = if let Some(worktree) = self.worktree_for_id(project_path.worktree_id, cx) {
887 worktree
888 } else {
889 return Task::ready(Err(anyhow!("no such worktree")));
890 };
891
892 // If there is already a buffer for the given path, then return it.
893 let existing_buffer = self.get_open_buffer(&project_path, cx);
894 if let Some(existing_buffer) = existing_buffer {
895 return Task::ready(Ok(existing_buffer));
896 }
897
898 let mut loading_watch = match self.loading_buffers.entry(project_path.clone()) {
899 // If the given path is already being loaded, then wait for that existing
900 // task to complete and return the same buffer.
901 hash_map::Entry::Occupied(e) => e.get().clone(),
902
903 // Otherwise, record the fact that this path is now being loaded.
904 hash_map::Entry::Vacant(entry) => {
905 let (mut tx, rx) = postage::watch::channel();
906 entry.insert(rx.clone());
907
908 let load_buffer = if worktree.read(cx).is_local() {
909 self.open_local_buffer_internal(&project_path.path, &worktree, cx)
910 } else {
911 self.open_remote_buffer_internal(&project_path.path, &worktree, cx)
912 };
913
914 cx.spawn(move |this, mut cx| async move {
915 let load_result = load_buffer.await;
916 *tx.borrow_mut() = Some(this.update(&mut cx, |this, _| {
917 // Record the fact that the buffer is no longer loading.
918 this.loading_buffers.remove(&project_path);
919 let buffer = load_result.map_err(Arc::new)?;
920 Ok(buffer)
921 }));
922 })
923 .detach();
924 rx
925 }
926 };
927
928 cx.foreground().spawn(async move {
929 loop {
930 if let Some(result) = loading_watch.borrow().as_ref() {
931 match result {
932 Ok(buffer) => return Ok(buffer.clone()),
933 Err(error) => return Err(anyhow!("{}", error)),
934 }
935 }
936 loading_watch.next().await;
937 }
938 })
939 }
940
941 fn open_local_buffer_internal(
942 &mut self,
943 path: &Arc<Path>,
944 worktree: &ModelHandle<Worktree>,
945 cx: &mut ModelContext<Self>,
946 ) -> Task<Result<ModelHandle<Buffer>>> {
947 let load_buffer = worktree.update(cx, |worktree, cx| {
948 let worktree = worktree.as_local_mut().unwrap();
949 worktree.load_buffer(path, cx)
950 });
951 cx.spawn(|this, mut cx| async move {
952 let buffer = load_buffer.await?;
953 this.update(&mut cx, |this, cx| this.register_buffer(&buffer, cx))?;
954 Ok(buffer)
955 })
956 }
957
958 fn open_remote_buffer_internal(
959 &mut self,
960 path: &Arc<Path>,
961 worktree: &ModelHandle<Worktree>,
962 cx: &mut ModelContext<Self>,
963 ) -> Task<Result<ModelHandle<Buffer>>> {
964 let rpc = self.client.clone();
965 let project_id = self.remote_id().unwrap();
966 let remote_worktree_id = worktree.read(cx).id();
967 let path = path.clone();
968 let path_string = path.to_string_lossy().to_string();
969 cx.spawn(|this, mut cx| async move {
970 let response = rpc
971 .request(proto::OpenBufferByPath {
972 project_id,
973 worktree_id: remote_worktree_id.to_proto(),
974 path: path_string,
975 })
976 .await?;
977 let buffer = response.buffer.ok_or_else(|| anyhow!("missing buffer"))?;
978 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
979 .await
980 })
981 }
982
983 fn open_local_buffer_via_lsp(
984 &mut self,
985 abs_path: lsp::Url,
986 lsp_adapter: Arc<dyn LspAdapter>,
987 lsp_server: Arc<LanguageServer>,
988 cx: &mut ModelContext<Self>,
989 ) -> Task<Result<ModelHandle<Buffer>>> {
990 cx.spawn(|this, mut cx| async move {
991 let abs_path = abs_path
992 .to_file_path()
993 .map_err(|_| anyhow!("can't convert URI to path"))?;
994 let (worktree, relative_path) = if let Some(result) =
995 this.read_with(&cx, |this, cx| this.find_local_worktree(&abs_path, cx))
996 {
997 result
998 } else {
999 let worktree = this
1000 .update(&mut cx, |this, cx| {
1001 this.create_local_worktree(&abs_path, false, cx)
1002 })
1003 .await?;
1004 this.update(&mut cx, |this, cx| {
1005 this.language_servers.insert(
1006 (worktree.read(cx).id(), lsp_adapter.name()),
1007 (lsp_adapter, lsp_server),
1008 );
1009 });
1010 (worktree, PathBuf::new())
1011 };
1012
1013 let project_path = ProjectPath {
1014 worktree_id: worktree.read_with(&cx, |worktree, _| worktree.id()),
1015 path: relative_path.into(),
1016 };
1017 this.update(&mut cx, |this, cx| this.open_buffer(project_path, cx))
1018 .await
1019 })
1020 }
1021
1022 pub fn open_buffer_by_id(
1023 &mut self,
1024 id: u64,
1025 cx: &mut ModelContext<Self>,
1026 ) -> Task<Result<ModelHandle<Buffer>>> {
1027 if let Some(buffer) = self.buffer_for_id(id, cx) {
1028 Task::ready(Ok(buffer))
1029 } else if self.is_local() {
1030 Task::ready(Err(anyhow!("buffer {} does not exist", id)))
1031 } else if let Some(project_id) = self.remote_id() {
1032 let request = self
1033 .client
1034 .request(proto::OpenBufferById { project_id, id });
1035 cx.spawn(|this, mut cx| async move {
1036 let buffer = request
1037 .await?
1038 .buffer
1039 .ok_or_else(|| anyhow!("invalid buffer"))?;
1040 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
1041 .await
1042 })
1043 } else {
1044 Task::ready(Err(anyhow!("cannot open buffer while disconnected")))
1045 }
1046 }
1047
1048 pub fn save_buffer_as(
1049 &mut self,
1050 buffer: ModelHandle<Buffer>,
1051 abs_path: PathBuf,
1052 cx: &mut ModelContext<Project>,
1053 ) -> Task<Result<()>> {
1054 let worktree_task = self.find_or_create_local_worktree(&abs_path, true, cx);
1055 let old_path =
1056 File::from_dyn(buffer.read(cx).file()).and_then(|f| Some(f.as_local()?.abs_path(cx)));
1057 cx.spawn(|this, mut cx| async move {
1058 if let Some(old_path) = old_path {
1059 this.update(&mut cx, |this, cx| {
1060 this.unregister_buffer_from_language_server(&buffer, old_path, cx);
1061 });
1062 }
1063 let (worktree, path) = worktree_task.await?;
1064 worktree
1065 .update(&mut cx, |worktree, cx| {
1066 worktree
1067 .as_local_mut()
1068 .unwrap()
1069 .save_buffer_as(buffer.clone(), path, cx)
1070 })
1071 .await?;
1072 this.update(&mut cx, |this, cx| {
1073 this.assign_language_to_buffer(&buffer, cx);
1074 this.register_buffer_with_language_server(&buffer, cx);
1075 });
1076 Ok(())
1077 })
1078 }
1079
1080 pub fn get_open_buffer(
1081 &mut self,
1082 path: &ProjectPath,
1083 cx: &mut ModelContext<Self>,
1084 ) -> Option<ModelHandle<Buffer>> {
1085 let worktree = self.worktree_for_id(path.worktree_id, cx)?;
1086 self.opened_buffers.values().find_map(|buffer| {
1087 let buffer = buffer.upgrade(cx)?;
1088 let file = File::from_dyn(buffer.read(cx).file())?;
1089 if file.worktree == worktree && file.path() == &path.path {
1090 Some(buffer)
1091 } else {
1092 None
1093 }
1094 })
1095 }
1096
1097 fn register_buffer(
1098 &mut self,
1099 buffer: &ModelHandle<Buffer>,
1100 cx: &mut ModelContext<Self>,
1101 ) -> Result<()> {
1102 let remote_id = buffer.read(cx).remote_id();
1103 let open_buffer = if self.is_remote() || self.is_shared() {
1104 OpenBuffer::Strong(buffer.clone())
1105 } else {
1106 OpenBuffer::Weak(buffer.downgrade())
1107 };
1108
1109 match self.opened_buffers.insert(remote_id, open_buffer) {
1110 None => {}
1111 Some(OpenBuffer::Loading(operations)) => {
1112 buffer.update(cx, |buffer, cx| buffer.apply_ops(operations, cx))?
1113 }
1114 Some(OpenBuffer::Weak(existing_handle)) => {
1115 if existing_handle.upgrade(cx).is_some() {
1116 Err(anyhow!(
1117 "already registered buffer with remote id {}",
1118 remote_id
1119 ))?
1120 }
1121 }
1122 Some(OpenBuffer::Strong(_)) => Err(anyhow!(
1123 "already registered buffer with remote id {}",
1124 remote_id
1125 ))?,
1126 }
1127 cx.subscribe(buffer, |this, buffer, event, cx| {
1128 this.on_buffer_event(buffer, event, cx);
1129 })
1130 .detach();
1131
1132 self.assign_language_to_buffer(buffer, cx);
1133 self.register_buffer_with_language_server(buffer, cx);
1134 cx.observe_release(buffer, |this, buffer, cx| {
1135 if let Some(file) = File::from_dyn(buffer.file()) {
1136 if file.is_local() {
1137 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
1138 if let Some((_, server)) = this.language_server_for_buffer(buffer, cx) {
1139 server
1140 .notify::<lsp::notification::DidCloseTextDocument>(
1141 lsp::DidCloseTextDocumentParams {
1142 text_document: lsp::TextDocumentIdentifier::new(uri.clone()),
1143 },
1144 )
1145 .log_err();
1146 }
1147 }
1148 }
1149 })
1150 .detach();
1151
1152 Ok(())
1153 }
1154
1155 fn register_buffer_with_language_server(
1156 &mut self,
1157 buffer_handle: &ModelHandle<Buffer>,
1158 cx: &mut ModelContext<Self>,
1159 ) {
1160 let buffer = buffer_handle.read(cx);
1161 let buffer_id = buffer.remote_id();
1162 if let Some(file) = File::from_dyn(buffer.file()) {
1163 if file.is_local() {
1164 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
1165 let initial_snapshot = buffer.text_snapshot();
1166
1167 let mut language_server = None;
1168 let mut language_id = None;
1169 if let Some(language) = buffer.language() {
1170 let worktree_id = file.worktree_id(cx);
1171 if let Some(adapter) = language.lsp_adapter() {
1172 language_id = adapter.id_for_language(language.name().as_ref());
1173 language_server = self
1174 .language_servers
1175 .get(&(worktree_id, adapter.name()))
1176 .cloned();
1177 }
1178 }
1179
1180 if let Some(local_worktree) = file.worktree.read(cx).as_local() {
1181 if let Some(diagnostics) = local_worktree.diagnostics_for_path(file.path()) {
1182 self.update_buffer_diagnostics(&buffer_handle, diagnostics, None, cx)
1183 .log_err();
1184 }
1185 }
1186
1187 if let Some((_, server)) = language_server {
1188 server
1189 .notify::<lsp::notification::DidOpenTextDocument>(
1190 lsp::DidOpenTextDocumentParams {
1191 text_document: lsp::TextDocumentItem::new(
1192 uri,
1193 language_id.unwrap_or_default(),
1194 0,
1195 initial_snapshot.text(),
1196 ),
1197 }
1198 .clone(),
1199 )
1200 .log_err();
1201 buffer_handle.update(cx, |buffer, cx| {
1202 buffer.set_completion_triggers(
1203 server
1204 .capabilities()
1205 .completion_provider
1206 .as_ref()
1207 .and_then(|provider| provider.trigger_characters.clone())
1208 .unwrap_or(Vec::new()),
1209 cx,
1210 )
1211 });
1212 self.buffer_snapshots
1213 .insert(buffer_id, vec![(0, initial_snapshot)]);
1214 }
1215 }
1216 }
1217 }
1218
1219 fn unregister_buffer_from_language_server(
1220 &mut self,
1221 buffer: &ModelHandle<Buffer>,
1222 old_path: PathBuf,
1223 cx: &mut ModelContext<Self>,
1224 ) {
1225 buffer.update(cx, |buffer, cx| {
1226 buffer.update_diagnostics(Default::default(), cx);
1227 self.buffer_snapshots.remove(&buffer.remote_id());
1228 if let Some((_, language_server)) = self.language_server_for_buffer(buffer, cx) {
1229 language_server
1230 .notify::<lsp::notification::DidCloseTextDocument>(
1231 lsp::DidCloseTextDocumentParams {
1232 text_document: lsp::TextDocumentIdentifier::new(
1233 lsp::Url::from_file_path(old_path).unwrap(),
1234 ),
1235 },
1236 )
1237 .log_err();
1238 }
1239 });
1240 }
1241
1242 fn on_buffer_event(
1243 &mut self,
1244 buffer: ModelHandle<Buffer>,
1245 event: &BufferEvent,
1246 cx: &mut ModelContext<Self>,
1247 ) -> Option<()> {
1248 match event {
1249 BufferEvent::Operation(operation) => {
1250 let project_id = self.remote_id()?;
1251 let request = self.client.request(proto::UpdateBuffer {
1252 project_id,
1253 buffer_id: buffer.read(cx).remote_id(),
1254 operations: vec![language::proto::serialize_operation(&operation)],
1255 });
1256 cx.background().spawn(request).detach_and_log_err(cx);
1257 }
1258 BufferEvent::Edited { .. } => {
1259 let (_, language_server) = self
1260 .language_server_for_buffer(buffer.read(cx), cx)?
1261 .clone();
1262 let buffer = buffer.read(cx);
1263 let file = File::from_dyn(buffer.file())?;
1264 let abs_path = file.as_local()?.abs_path(cx);
1265 let uri = lsp::Url::from_file_path(abs_path).unwrap();
1266 let buffer_snapshots = self.buffer_snapshots.get_mut(&buffer.remote_id())?;
1267 let (version, prev_snapshot) = buffer_snapshots.last()?;
1268 let next_snapshot = buffer.text_snapshot();
1269 let next_version = version + 1;
1270
1271 let content_changes = buffer
1272 .edits_since::<(PointUtf16, usize)>(prev_snapshot.version())
1273 .map(|edit| {
1274 let edit_start = edit.new.start.0;
1275 let edit_end = edit_start + (edit.old.end.0 - edit.old.start.0);
1276 let new_text = next_snapshot
1277 .text_for_range(edit.new.start.1..edit.new.end.1)
1278 .collect();
1279 lsp::TextDocumentContentChangeEvent {
1280 range: Some(lsp::Range::new(
1281 point_to_lsp(edit_start),
1282 point_to_lsp(edit_end),
1283 )),
1284 range_length: None,
1285 text: new_text,
1286 }
1287 })
1288 .collect();
1289
1290 buffer_snapshots.push((next_version, next_snapshot));
1291
1292 language_server
1293 .notify::<lsp::notification::DidChangeTextDocument>(
1294 lsp::DidChangeTextDocumentParams {
1295 text_document: lsp::VersionedTextDocumentIdentifier::new(
1296 uri,
1297 next_version,
1298 ),
1299 content_changes,
1300 },
1301 )
1302 .log_err();
1303 }
1304 BufferEvent::Saved => {
1305 let file = File::from_dyn(buffer.read(cx).file())?;
1306 let worktree_id = file.worktree_id(cx);
1307 let abs_path = file.as_local()?.abs_path(cx);
1308 let text_document = lsp::TextDocumentIdentifier {
1309 uri: lsp::Url::from_file_path(abs_path).unwrap(),
1310 };
1311
1312 for (_, server) in self.language_servers_for_worktree(worktree_id) {
1313 server
1314 .notify::<lsp::notification::DidSaveTextDocument>(
1315 lsp::DidSaveTextDocumentParams {
1316 text_document: text_document.clone(),
1317 text: None,
1318 },
1319 )
1320 .log_err();
1321 }
1322 }
1323 _ => {}
1324 }
1325
1326 None
1327 }
1328
1329 fn language_servers_for_worktree(
1330 &self,
1331 worktree_id: WorktreeId,
1332 ) -> impl Iterator<Item = &(Arc<dyn LspAdapter>, Arc<LanguageServer>)> {
1333 self.language_servers.iter().filter_map(
1334 move |((language_server_worktree_id, _), server)| {
1335 if *language_server_worktree_id == worktree_id {
1336 Some(server)
1337 } else {
1338 None
1339 }
1340 },
1341 )
1342 }
1343
1344 fn assign_language_to_buffer(
1345 &mut self,
1346 buffer: &ModelHandle<Buffer>,
1347 cx: &mut ModelContext<Self>,
1348 ) -> Option<()> {
1349 // If the buffer has a language, set it and start the language server if we haven't already.
1350 let full_path = buffer.read(cx).file()?.full_path(cx);
1351 let language = self.languages.select_language(&full_path)?;
1352 buffer.update(cx, |buffer, cx| {
1353 buffer.set_language(Some(language.clone()), cx);
1354 });
1355
1356 let file = File::from_dyn(buffer.read(cx).file())?;
1357 let worktree = file.worktree.read(cx).as_local()?;
1358 let worktree_id = worktree.id();
1359 let worktree_abs_path = worktree.abs_path().clone();
1360 self.start_language_server(worktree_id, worktree_abs_path, language, cx);
1361
1362 None
1363 }
1364
1365 fn start_language_server(
1366 &mut self,
1367 worktree_id: WorktreeId,
1368 worktree_path: Arc<Path>,
1369 language: Arc<Language>,
1370 cx: &mut ModelContext<Self>,
1371 ) {
1372 let adapter = if let Some(adapter) = language.lsp_adapter() {
1373 adapter
1374 } else {
1375 return;
1376 };
1377 let key = (worktree_id, adapter.name());
1378 self.started_language_servers
1379 .entry(key.clone())
1380 .or_insert_with(|| {
1381 let server_id = post_inc(&mut self.next_language_server_id);
1382 let language_server = self.languages.start_language_server(
1383 server_id,
1384 language.clone(),
1385 worktree_path,
1386 self.client.http_client(),
1387 cx,
1388 );
1389 cx.spawn_weak(|this, mut cx| async move {
1390 let language_server = language_server?.await.log_err()?;
1391 let language_server = language_server
1392 .initialize(adapter.initialization_options())
1393 .await
1394 .log_err()?;
1395 let this = this.upgrade(&cx)?;
1396 let disk_based_diagnostics_progress_token =
1397 adapter.disk_based_diagnostics_progress_token();
1398
1399 language_server
1400 .on_notification::<lsp::notification::PublishDiagnostics, _>({
1401 let this = this.downgrade();
1402 let adapter = adapter.clone();
1403 move |params, mut cx| {
1404 if let Some(this) = this.upgrade(&cx) {
1405 this.update(&mut cx, |this, cx| {
1406 this.on_lsp_diagnostics_published(
1407 server_id,
1408 params,
1409 &adapter,
1410 disk_based_diagnostics_progress_token,
1411 cx,
1412 );
1413 });
1414 }
1415 }
1416 })
1417 .detach();
1418
1419 language_server
1420 .on_request::<lsp::request::WorkspaceConfiguration, _, _>({
1421 let settings = this
1422 .read_with(&cx, |this, _| this.language_server_settings.clone());
1423 move |params, _| {
1424 let settings = settings.lock().clone();
1425 async move {
1426 Ok(params
1427 .items
1428 .into_iter()
1429 .map(|item| {
1430 if let Some(section) = &item.section {
1431 settings
1432 .get(section)
1433 .cloned()
1434 .unwrap_or(serde_json::Value::Null)
1435 } else {
1436 settings.clone()
1437 }
1438 })
1439 .collect())
1440 }
1441 }
1442 })
1443 .detach();
1444
1445 language_server
1446 .on_request::<lsp::request::ApplyWorkspaceEdit, _, _>({
1447 let this = this.downgrade();
1448 let adapter = adapter.clone();
1449 let language_server = language_server.clone();
1450 move |params, cx| {
1451 Self::on_lsp_workspace_edit(
1452 this,
1453 params,
1454 server_id,
1455 adapter.clone(),
1456 language_server.clone(),
1457 cx,
1458 )
1459 }
1460 })
1461 .detach();
1462
1463 language_server
1464 .on_notification::<lsp::notification::Progress, _>({
1465 let this = this.downgrade();
1466 move |params, mut cx| {
1467 if let Some(this) = this.upgrade(&cx) {
1468 this.update(&mut cx, |this, cx| {
1469 this.on_lsp_progress(
1470 params,
1471 server_id,
1472 disk_based_diagnostics_progress_token,
1473 cx,
1474 );
1475 });
1476 }
1477 }
1478 })
1479 .detach();
1480
1481 this.update(&mut cx, |this, cx| {
1482 this.language_servers
1483 .insert(key.clone(), (adapter.clone(), language_server.clone()));
1484 this.language_server_statuses.insert(
1485 server_id,
1486 LanguageServerStatus {
1487 name: language_server.name().to_string(),
1488 pending_work: Default::default(),
1489 pending_diagnostic_updates: 0,
1490 },
1491 );
1492 language_server
1493 .notify::<lsp::notification::DidChangeConfiguration>(
1494 lsp::DidChangeConfigurationParams {
1495 settings: this.language_server_settings.lock().clone(),
1496 },
1497 )
1498 .ok();
1499
1500 if let Some(project_id) = this.remote_id() {
1501 this.client
1502 .send(proto::StartLanguageServer {
1503 project_id,
1504 server: Some(proto::LanguageServer {
1505 id: server_id as u64,
1506 name: language_server.name().to_string(),
1507 }),
1508 })
1509 .log_err();
1510 }
1511
1512 // Tell the language server about every open buffer in the worktree that matches the language.
1513 for buffer in this.opened_buffers.values() {
1514 if let Some(buffer_handle) = buffer.upgrade(cx) {
1515 let buffer = buffer_handle.read(cx);
1516 let file = if let Some(file) = File::from_dyn(buffer.file()) {
1517 file
1518 } else {
1519 continue;
1520 };
1521 let language = if let Some(language) = buffer.language() {
1522 language
1523 } else {
1524 continue;
1525 };
1526 if file.worktree.read(cx).id() != key.0
1527 || language.lsp_adapter().map(|a| a.name())
1528 != Some(key.1.clone())
1529 {
1530 continue;
1531 }
1532
1533 let file = file.as_local()?;
1534 let versions = this
1535 .buffer_snapshots
1536 .entry(buffer.remote_id())
1537 .or_insert_with(|| vec![(0, buffer.text_snapshot())]);
1538 let (version, initial_snapshot) = versions.last().unwrap();
1539 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
1540 let language_id = adapter.id_for_language(language.name().as_ref());
1541 language_server
1542 .notify::<lsp::notification::DidOpenTextDocument>(
1543 lsp::DidOpenTextDocumentParams {
1544 text_document: lsp::TextDocumentItem::new(
1545 uri,
1546 language_id.unwrap_or_default(),
1547 *version,
1548 initial_snapshot.text(),
1549 ),
1550 },
1551 )
1552 .log_err()?;
1553 buffer_handle.update(cx, |buffer, cx| {
1554 buffer.set_completion_triggers(
1555 language_server
1556 .capabilities()
1557 .completion_provider
1558 .as_ref()
1559 .and_then(|provider| {
1560 provider.trigger_characters.clone()
1561 })
1562 .unwrap_or(Vec::new()),
1563 cx,
1564 )
1565 });
1566 }
1567 }
1568
1569 cx.notify();
1570 Some(())
1571 });
1572
1573 Some(language_server)
1574 })
1575 });
1576 }
1577
1578 pub fn restart_language_servers_for_buffers(
1579 &mut self,
1580 buffers: impl IntoIterator<Item = ModelHandle<Buffer>>,
1581 cx: &mut ModelContext<Self>,
1582 ) -> Option<()> {
1583 let language_server_lookup_info: HashSet<(WorktreeId, Arc<Path>, PathBuf)> = buffers
1584 .into_iter()
1585 .filter_map(|buffer| {
1586 let file = File::from_dyn(buffer.read(cx).file())?;
1587 let worktree = file.worktree.read(cx).as_local()?;
1588 let worktree_id = worktree.id();
1589 let worktree_abs_path = worktree.abs_path().clone();
1590 let full_path = file.full_path(cx);
1591 Some((worktree_id, worktree_abs_path, full_path))
1592 })
1593 .collect();
1594 for (worktree_id, worktree_abs_path, full_path) in language_server_lookup_info {
1595 let language = self.languages.select_language(&full_path)?;
1596 self.restart_language_server(worktree_id, worktree_abs_path, language, cx);
1597 }
1598
1599 None
1600 }
1601
1602 fn restart_language_server(
1603 &mut self,
1604 worktree_id: WorktreeId,
1605 worktree_path: Arc<Path>,
1606 language: Arc<Language>,
1607 cx: &mut ModelContext<Self>,
1608 ) {
1609 let adapter = if let Some(adapter) = language.lsp_adapter() {
1610 adapter
1611 } else {
1612 return;
1613 };
1614 let key = (worktree_id, adapter.name());
1615 let server_to_shutdown = self.language_servers.remove(&key);
1616 self.started_language_servers.remove(&key);
1617 server_to_shutdown
1618 .as_ref()
1619 .map(|(_, server)| self.language_server_statuses.remove(&server.server_id()));
1620 cx.spawn_weak(|this, mut cx| async move {
1621 if let Some(this) = this.upgrade(&cx) {
1622 if let Some((_, server_to_shutdown)) = server_to_shutdown {
1623 if let Some(shutdown_task) = server_to_shutdown.shutdown() {
1624 shutdown_task.await;
1625 }
1626 }
1627
1628 this.update(&mut cx, |this, cx| {
1629 this.start_language_server(worktree_id, worktree_path, language, cx);
1630 });
1631 }
1632 })
1633 .detach();
1634 }
1635
1636 fn on_lsp_diagnostics_published(
1637 &mut self,
1638 server_id: usize,
1639 mut params: lsp::PublishDiagnosticsParams,
1640 adapter: &Arc<dyn LspAdapter>,
1641 disk_based_diagnostics_progress_token: Option<&str>,
1642 cx: &mut ModelContext<Self>,
1643 ) {
1644 adapter.process_diagnostics(&mut params);
1645 if disk_based_diagnostics_progress_token.is_none() {
1646 self.disk_based_diagnostics_started(cx);
1647 self.broadcast_language_server_update(
1648 server_id,
1649 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(
1650 proto::LspDiskBasedDiagnosticsUpdating {},
1651 ),
1652 );
1653 }
1654 self.update_diagnostics(params, adapter.disk_based_diagnostic_sources(), cx)
1655 .log_err();
1656 if disk_based_diagnostics_progress_token.is_none() {
1657 self.disk_based_diagnostics_finished(cx);
1658 self.broadcast_language_server_update(
1659 server_id,
1660 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(
1661 proto::LspDiskBasedDiagnosticsUpdated {},
1662 ),
1663 );
1664 }
1665 }
1666
1667 fn on_lsp_progress(
1668 &mut self,
1669 progress: lsp::ProgressParams,
1670 server_id: usize,
1671 disk_based_diagnostics_progress_token: Option<&str>,
1672 cx: &mut ModelContext<Self>,
1673 ) {
1674 let token = match progress.token {
1675 lsp::NumberOrString::String(token) => token,
1676 lsp::NumberOrString::Number(token) => {
1677 log::info!("skipping numeric progress token {}", token);
1678 return;
1679 }
1680 };
1681 let progress = match progress.value {
1682 lsp::ProgressParamsValue::WorkDone(value) => value,
1683 };
1684 let language_server_status =
1685 if let Some(status) = self.language_server_statuses.get_mut(&server_id) {
1686 status
1687 } else {
1688 return;
1689 };
1690 match progress {
1691 lsp::WorkDoneProgress::Begin(_) => {
1692 if Some(token.as_str()) == disk_based_diagnostics_progress_token {
1693 language_server_status.pending_diagnostic_updates += 1;
1694 if language_server_status.pending_diagnostic_updates == 1 {
1695 self.disk_based_diagnostics_started(cx);
1696 self.broadcast_language_server_update(
1697 server_id,
1698 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(
1699 proto::LspDiskBasedDiagnosticsUpdating {},
1700 ),
1701 );
1702 }
1703 } else {
1704 self.on_lsp_work_start(server_id, token.clone(), cx);
1705 self.broadcast_language_server_update(
1706 server_id,
1707 proto::update_language_server::Variant::WorkStart(proto::LspWorkStart {
1708 token,
1709 }),
1710 );
1711 }
1712 }
1713 lsp::WorkDoneProgress::Report(report) => {
1714 if Some(token.as_str()) != disk_based_diagnostics_progress_token {
1715 self.on_lsp_work_progress(
1716 server_id,
1717 token.clone(),
1718 LanguageServerProgress {
1719 message: report.message.clone(),
1720 percentage: report.percentage.map(|p| p as usize),
1721 last_update_at: Instant::now(),
1722 },
1723 cx,
1724 );
1725 self.broadcast_language_server_update(
1726 server_id,
1727 proto::update_language_server::Variant::WorkProgress(
1728 proto::LspWorkProgress {
1729 token,
1730 message: report.message,
1731 percentage: report.percentage.map(|p| p as u32),
1732 },
1733 ),
1734 );
1735 }
1736 }
1737 lsp::WorkDoneProgress::End(_) => {
1738 if Some(token.as_str()) == disk_based_diagnostics_progress_token {
1739 language_server_status.pending_diagnostic_updates -= 1;
1740 if language_server_status.pending_diagnostic_updates == 0 {
1741 self.disk_based_diagnostics_finished(cx);
1742 self.broadcast_language_server_update(
1743 server_id,
1744 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(
1745 proto::LspDiskBasedDiagnosticsUpdated {},
1746 ),
1747 );
1748 }
1749 } else {
1750 self.on_lsp_work_end(server_id, token.clone(), cx);
1751 self.broadcast_language_server_update(
1752 server_id,
1753 proto::update_language_server::Variant::WorkEnd(proto::LspWorkEnd {
1754 token,
1755 }),
1756 );
1757 }
1758 }
1759 }
1760 }
1761
1762 fn on_lsp_work_start(
1763 &mut self,
1764 language_server_id: usize,
1765 token: String,
1766 cx: &mut ModelContext<Self>,
1767 ) {
1768 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
1769 status.pending_work.insert(
1770 token,
1771 LanguageServerProgress {
1772 message: None,
1773 percentage: None,
1774 last_update_at: Instant::now(),
1775 },
1776 );
1777 cx.notify();
1778 }
1779 }
1780
1781 fn on_lsp_work_progress(
1782 &mut self,
1783 language_server_id: usize,
1784 token: String,
1785 progress: LanguageServerProgress,
1786 cx: &mut ModelContext<Self>,
1787 ) {
1788 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
1789 status.pending_work.insert(token, progress);
1790 cx.notify();
1791 }
1792 }
1793
1794 fn on_lsp_work_end(
1795 &mut self,
1796 language_server_id: usize,
1797 token: String,
1798 cx: &mut ModelContext<Self>,
1799 ) {
1800 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
1801 status.pending_work.remove(&token);
1802 cx.notify();
1803 }
1804 }
1805
1806 async fn on_lsp_workspace_edit(
1807 this: WeakModelHandle<Self>,
1808 params: lsp::ApplyWorkspaceEditParams,
1809 server_id: usize,
1810 adapter: Arc<dyn LspAdapter>,
1811 language_server: Arc<LanguageServer>,
1812 mut cx: AsyncAppContext,
1813 ) -> Result<lsp::ApplyWorkspaceEditResponse> {
1814 let this = this
1815 .upgrade(&cx)
1816 .ok_or_else(|| anyhow!("project project closed"))?;
1817 let transaction = Self::deserialize_workspace_edit(
1818 this.clone(),
1819 params.edit,
1820 true,
1821 adapter.clone(),
1822 language_server.clone(),
1823 &mut cx,
1824 )
1825 .await
1826 .log_err();
1827 this.update(&mut cx, |this, _| {
1828 if let Some(transaction) = transaction {
1829 this.last_workspace_edits_by_language_server
1830 .insert(server_id, transaction);
1831 }
1832 });
1833 Ok(lsp::ApplyWorkspaceEditResponse {
1834 applied: true,
1835 failed_change: None,
1836 failure_reason: None,
1837 })
1838 }
1839
1840 fn broadcast_language_server_update(
1841 &self,
1842 language_server_id: usize,
1843 event: proto::update_language_server::Variant,
1844 ) {
1845 if let Some(project_id) = self.remote_id() {
1846 self.client
1847 .send(proto::UpdateLanguageServer {
1848 project_id,
1849 language_server_id: language_server_id as u64,
1850 variant: Some(event),
1851 })
1852 .log_err();
1853 }
1854 }
1855
1856 pub fn set_language_server_settings(&mut self, settings: serde_json::Value) {
1857 for (_, server) in self.language_servers.values() {
1858 server
1859 .notify::<lsp::notification::DidChangeConfiguration>(
1860 lsp::DidChangeConfigurationParams {
1861 settings: settings.clone(),
1862 },
1863 )
1864 .ok();
1865 }
1866 *self.language_server_settings.lock() = settings;
1867 }
1868
1869 pub fn language_server_statuses(
1870 &self,
1871 ) -> impl DoubleEndedIterator<Item = &LanguageServerStatus> {
1872 self.language_server_statuses.values()
1873 }
1874
1875 pub fn update_diagnostics(
1876 &mut self,
1877 params: lsp::PublishDiagnosticsParams,
1878 disk_based_sources: &[&str],
1879 cx: &mut ModelContext<Self>,
1880 ) -> Result<()> {
1881 let abs_path = params
1882 .uri
1883 .to_file_path()
1884 .map_err(|_| anyhow!("URI is not a file"))?;
1885 let mut next_group_id = 0;
1886 let mut diagnostics = Vec::default();
1887 let mut primary_diagnostic_group_ids = HashMap::default();
1888 let mut sources_by_group_id = HashMap::default();
1889 let mut supporting_diagnostics = HashMap::default();
1890 for diagnostic in ¶ms.diagnostics {
1891 let source = diagnostic.source.as_ref();
1892 let code = diagnostic.code.as_ref().map(|code| match code {
1893 lsp::NumberOrString::Number(code) => code.to_string(),
1894 lsp::NumberOrString::String(code) => code.clone(),
1895 });
1896 let range = range_from_lsp(diagnostic.range);
1897 let is_supporting = diagnostic
1898 .related_information
1899 .as_ref()
1900 .map_or(false, |infos| {
1901 infos.iter().any(|info| {
1902 primary_diagnostic_group_ids.contains_key(&(
1903 source,
1904 code.clone(),
1905 range_from_lsp(info.location.range),
1906 ))
1907 })
1908 });
1909
1910 let is_unnecessary = diagnostic.tags.as_ref().map_or(false, |tags| {
1911 tags.iter().any(|tag| *tag == DiagnosticTag::UNNECESSARY)
1912 });
1913
1914 if is_supporting {
1915 supporting_diagnostics.insert(
1916 (source, code.clone(), range),
1917 (diagnostic.severity, is_unnecessary),
1918 );
1919 } else {
1920 let group_id = post_inc(&mut next_group_id);
1921 let is_disk_based = source.map_or(false, |source| {
1922 disk_based_sources.contains(&source.as_str())
1923 });
1924
1925 sources_by_group_id.insert(group_id, source);
1926 primary_diagnostic_group_ids
1927 .insert((source, code.clone(), range.clone()), group_id);
1928
1929 diagnostics.push(DiagnosticEntry {
1930 range,
1931 diagnostic: Diagnostic {
1932 code: code.clone(),
1933 severity: diagnostic.severity.unwrap_or(DiagnosticSeverity::ERROR),
1934 message: diagnostic.message.clone(),
1935 group_id,
1936 is_primary: true,
1937 is_valid: true,
1938 is_disk_based,
1939 is_unnecessary,
1940 },
1941 });
1942 if let Some(infos) = &diagnostic.related_information {
1943 for info in infos {
1944 if info.location.uri == params.uri && !info.message.is_empty() {
1945 let range = range_from_lsp(info.location.range);
1946 diagnostics.push(DiagnosticEntry {
1947 range,
1948 diagnostic: Diagnostic {
1949 code: code.clone(),
1950 severity: DiagnosticSeverity::INFORMATION,
1951 message: info.message.clone(),
1952 group_id,
1953 is_primary: false,
1954 is_valid: true,
1955 is_disk_based,
1956 is_unnecessary: false,
1957 },
1958 });
1959 }
1960 }
1961 }
1962 }
1963 }
1964
1965 for entry in &mut diagnostics {
1966 let diagnostic = &mut entry.diagnostic;
1967 if !diagnostic.is_primary {
1968 let source = *sources_by_group_id.get(&diagnostic.group_id).unwrap();
1969 if let Some(&(severity, is_unnecessary)) = supporting_diagnostics.get(&(
1970 source,
1971 diagnostic.code.clone(),
1972 entry.range.clone(),
1973 )) {
1974 if let Some(severity) = severity {
1975 diagnostic.severity = severity;
1976 }
1977 diagnostic.is_unnecessary = is_unnecessary;
1978 }
1979 }
1980 }
1981
1982 self.update_diagnostic_entries(abs_path, params.version, diagnostics, cx)?;
1983 Ok(())
1984 }
1985
1986 pub fn update_diagnostic_entries(
1987 &mut self,
1988 abs_path: PathBuf,
1989 version: Option<i32>,
1990 diagnostics: Vec<DiagnosticEntry<PointUtf16>>,
1991 cx: &mut ModelContext<Project>,
1992 ) -> Result<(), anyhow::Error> {
1993 let (worktree, relative_path) = self
1994 .find_local_worktree(&abs_path, cx)
1995 .ok_or_else(|| anyhow!("no worktree found for diagnostics"))?;
1996 if !worktree.read(cx).is_visible() {
1997 return Ok(());
1998 }
1999
2000 let project_path = ProjectPath {
2001 worktree_id: worktree.read(cx).id(),
2002 path: relative_path.into(),
2003 };
2004 if let Some(buffer) = self.get_open_buffer(&project_path, cx) {
2005 self.update_buffer_diagnostics(&buffer, diagnostics.clone(), version, cx)?;
2006 }
2007
2008 let updated = worktree.update(cx, |worktree, cx| {
2009 worktree
2010 .as_local_mut()
2011 .ok_or_else(|| anyhow!("not a local worktree"))?
2012 .update_diagnostics(project_path.path.clone(), diagnostics, cx)
2013 })?;
2014 if updated {
2015 cx.emit(Event::DiagnosticsUpdated(project_path));
2016 }
2017 Ok(())
2018 }
2019
2020 fn update_buffer_diagnostics(
2021 &mut self,
2022 buffer: &ModelHandle<Buffer>,
2023 mut diagnostics: Vec<DiagnosticEntry<PointUtf16>>,
2024 version: Option<i32>,
2025 cx: &mut ModelContext<Self>,
2026 ) -> Result<()> {
2027 fn compare_diagnostics(a: &Diagnostic, b: &Diagnostic) -> Ordering {
2028 Ordering::Equal
2029 .then_with(|| b.is_primary.cmp(&a.is_primary))
2030 .then_with(|| a.is_disk_based.cmp(&b.is_disk_based))
2031 .then_with(|| a.severity.cmp(&b.severity))
2032 .then_with(|| a.message.cmp(&b.message))
2033 }
2034
2035 let snapshot = self.buffer_snapshot_for_lsp_version(buffer, version, cx)?;
2036
2037 diagnostics.sort_unstable_by(|a, b| {
2038 Ordering::Equal
2039 .then_with(|| a.range.start.cmp(&b.range.start))
2040 .then_with(|| b.range.end.cmp(&a.range.end))
2041 .then_with(|| compare_diagnostics(&a.diagnostic, &b.diagnostic))
2042 });
2043
2044 let mut sanitized_diagnostics = Vec::new();
2045 let edits_since_save = Patch::new(
2046 snapshot
2047 .edits_since::<PointUtf16>(buffer.read(cx).saved_version())
2048 .collect(),
2049 );
2050 for entry in diagnostics {
2051 let start;
2052 let end;
2053 if entry.diagnostic.is_disk_based {
2054 // Some diagnostics are based on files on disk instead of buffers'
2055 // current contents. Adjust these diagnostics' ranges to reflect
2056 // any unsaved edits.
2057 start = edits_since_save.old_to_new(entry.range.start);
2058 end = edits_since_save.old_to_new(entry.range.end);
2059 } else {
2060 start = entry.range.start;
2061 end = entry.range.end;
2062 }
2063
2064 let mut range = snapshot.clip_point_utf16(start, Bias::Left)
2065 ..snapshot.clip_point_utf16(end, Bias::Right);
2066
2067 // Expand empty ranges by one character
2068 if range.start == range.end {
2069 range.end.column += 1;
2070 range.end = snapshot.clip_point_utf16(range.end, Bias::Right);
2071 if range.start == range.end && range.end.column > 0 {
2072 range.start.column -= 1;
2073 range.start = snapshot.clip_point_utf16(range.start, Bias::Left);
2074 }
2075 }
2076
2077 sanitized_diagnostics.push(DiagnosticEntry {
2078 range,
2079 diagnostic: entry.diagnostic,
2080 });
2081 }
2082 drop(edits_since_save);
2083
2084 let set = DiagnosticSet::new(sanitized_diagnostics, &snapshot);
2085 buffer.update(cx, |buffer, cx| buffer.update_diagnostics(set, cx));
2086 Ok(())
2087 }
2088
2089 pub fn reload_buffers(
2090 &self,
2091 buffers: HashSet<ModelHandle<Buffer>>,
2092 push_to_history: bool,
2093 cx: &mut ModelContext<Self>,
2094 ) -> Task<Result<ProjectTransaction>> {
2095 let mut local_buffers = Vec::new();
2096 let mut remote_buffers = None;
2097 for buffer_handle in buffers {
2098 let buffer = buffer_handle.read(cx);
2099 if buffer.is_dirty() {
2100 if let Some(file) = File::from_dyn(buffer.file()) {
2101 if file.is_local() {
2102 local_buffers.push(buffer_handle);
2103 } else {
2104 remote_buffers.get_or_insert(Vec::new()).push(buffer_handle);
2105 }
2106 }
2107 }
2108 }
2109
2110 let remote_buffers = self.remote_id().zip(remote_buffers);
2111 let client = self.client.clone();
2112
2113 cx.spawn(|this, mut cx| async move {
2114 let mut project_transaction = ProjectTransaction::default();
2115
2116 if let Some((project_id, remote_buffers)) = remote_buffers {
2117 let response = client
2118 .request(proto::ReloadBuffers {
2119 project_id,
2120 buffer_ids: remote_buffers
2121 .iter()
2122 .map(|buffer| buffer.read_with(&cx, |buffer, _| buffer.remote_id()))
2123 .collect(),
2124 })
2125 .await?
2126 .transaction
2127 .ok_or_else(|| anyhow!("missing transaction"))?;
2128 project_transaction = this
2129 .update(&mut cx, |this, cx| {
2130 this.deserialize_project_transaction(response, push_to_history, cx)
2131 })
2132 .await?;
2133 }
2134
2135 for buffer in local_buffers {
2136 let transaction = buffer
2137 .update(&mut cx, |buffer, cx| buffer.reload(cx))
2138 .await?;
2139 buffer.update(&mut cx, |buffer, cx| {
2140 if let Some(transaction) = transaction {
2141 if !push_to_history {
2142 buffer.forget_transaction(transaction.id);
2143 }
2144 project_transaction.0.insert(cx.handle(), transaction);
2145 }
2146 });
2147 }
2148
2149 Ok(project_transaction)
2150 })
2151 }
2152
2153 pub fn format(
2154 &self,
2155 buffers: HashSet<ModelHandle<Buffer>>,
2156 push_to_history: bool,
2157 cx: &mut ModelContext<Project>,
2158 ) -> Task<Result<ProjectTransaction>> {
2159 let mut local_buffers = Vec::new();
2160 let mut remote_buffers = None;
2161 for buffer_handle in buffers {
2162 let buffer = buffer_handle.read(cx);
2163 if let Some(file) = File::from_dyn(buffer.file()) {
2164 if let Some(buffer_abs_path) = file.as_local().map(|f| f.abs_path(cx)) {
2165 if let Some((_, server)) = self.language_server_for_buffer(buffer, cx) {
2166 local_buffers.push((buffer_handle, buffer_abs_path, server.clone()));
2167 }
2168 } else {
2169 remote_buffers.get_or_insert(Vec::new()).push(buffer_handle);
2170 }
2171 } else {
2172 return Task::ready(Ok(Default::default()));
2173 }
2174 }
2175
2176 let remote_buffers = self.remote_id().zip(remote_buffers);
2177 let client = self.client.clone();
2178
2179 cx.spawn(|this, mut cx| async move {
2180 let mut project_transaction = ProjectTransaction::default();
2181
2182 if let Some((project_id, remote_buffers)) = remote_buffers {
2183 let response = client
2184 .request(proto::FormatBuffers {
2185 project_id,
2186 buffer_ids: remote_buffers
2187 .iter()
2188 .map(|buffer| buffer.read_with(&cx, |buffer, _| buffer.remote_id()))
2189 .collect(),
2190 })
2191 .await?
2192 .transaction
2193 .ok_or_else(|| anyhow!("missing transaction"))?;
2194 project_transaction = this
2195 .update(&mut cx, |this, cx| {
2196 this.deserialize_project_transaction(response, push_to_history, cx)
2197 })
2198 .await?;
2199 }
2200
2201 for (buffer, buffer_abs_path, language_server) in local_buffers {
2202 let text_document = lsp::TextDocumentIdentifier::new(
2203 lsp::Url::from_file_path(&buffer_abs_path).unwrap(),
2204 );
2205 let capabilities = &language_server.capabilities();
2206 let tab_size = cx.update(|cx| {
2207 let language_name = buffer.read(cx).language().map(|language| language.name());
2208 cx.global::<Settings>().tab_size(language_name.as_deref())
2209 });
2210 let lsp_edits = if capabilities
2211 .document_formatting_provider
2212 .as_ref()
2213 .map_or(false, |provider| *provider != lsp::OneOf::Left(false))
2214 {
2215 language_server
2216 .request::<lsp::request::Formatting>(lsp::DocumentFormattingParams {
2217 text_document,
2218 options: lsp::FormattingOptions {
2219 tab_size,
2220 insert_spaces: true,
2221 insert_final_newline: Some(true),
2222 ..Default::default()
2223 },
2224 work_done_progress_params: Default::default(),
2225 })
2226 .await?
2227 } else if capabilities
2228 .document_range_formatting_provider
2229 .as_ref()
2230 .map_or(false, |provider| *provider != lsp::OneOf::Left(false))
2231 {
2232 let buffer_start = lsp::Position::new(0, 0);
2233 let buffer_end =
2234 buffer.read_with(&cx, |buffer, _| point_to_lsp(buffer.max_point_utf16()));
2235 language_server
2236 .request::<lsp::request::RangeFormatting>(
2237 lsp::DocumentRangeFormattingParams {
2238 text_document,
2239 range: lsp::Range::new(buffer_start, buffer_end),
2240 options: lsp::FormattingOptions {
2241 tab_size: 4,
2242 insert_spaces: true,
2243 insert_final_newline: Some(true),
2244 ..Default::default()
2245 },
2246 work_done_progress_params: Default::default(),
2247 },
2248 )
2249 .await?
2250 } else {
2251 continue;
2252 };
2253
2254 if let Some(lsp_edits) = lsp_edits {
2255 let edits = this
2256 .update(&mut cx, |this, cx| {
2257 this.edits_from_lsp(&buffer, lsp_edits, None, cx)
2258 })
2259 .await?;
2260 buffer.update(&mut cx, |buffer, cx| {
2261 buffer.finalize_last_transaction();
2262 buffer.start_transaction();
2263 for (range, text) in edits {
2264 buffer.edit([(range, text)], cx);
2265 }
2266 if buffer.end_transaction(cx).is_some() {
2267 let transaction = buffer.finalize_last_transaction().unwrap().clone();
2268 if !push_to_history {
2269 buffer.forget_transaction(transaction.id);
2270 }
2271 project_transaction.0.insert(cx.handle(), transaction);
2272 }
2273 });
2274 }
2275 }
2276
2277 Ok(project_transaction)
2278 })
2279 }
2280
2281 pub fn definition<T: ToPointUtf16>(
2282 &self,
2283 buffer: &ModelHandle<Buffer>,
2284 position: T,
2285 cx: &mut ModelContext<Self>,
2286 ) -> Task<Result<Vec<Location>>> {
2287 let position = position.to_point_utf16(buffer.read(cx));
2288 self.request_lsp(buffer.clone(), GetDefinition { position }, cx)
2289 }
2290
2291 pub fn references<T: ToPointUtf16>(
2292 &self,
2293 buffer: &ModelHandle<Buffer>,
2294 position: T,
2295 cx: &mut ModelContext<Self>,
2296 ) -> Task<Result<Vec<Location>>> {
2297 let position = position.to_point_utf16(buffer.read(cx));
2298 self.request_lsp(buffer.clone(), GetReferences { position }, cx)
2299 }
2300
2301 pub fn document_highlights<T: ToPointUtf16>(
2302 &self,
2303 buffer: &ModelHandle<Buffer>,
2304 position: T,
2305 cx: &mut ModelContext<Self>,
2306 ) -> Task<Result<Vec<DocumentHighlight>>> {
2307 let position = position.to_point_utf16(buffer.read(cx));
2308
2309 self.request_lsp(buffer.clone(), GetDocumentHighlights { position }, cx)
2310 }
2311
2312 pub fn symbols(&self, query: &str, cx: &mut ModelContext<Self>) -> Task<Result<Vec<Symbol>>> {
2313 if self.is_local() {
2314 let mut requests = Vec::new();
2315 for ((worktree_id, _), (lsp_adapter, language_server)) in self.language_servers.iter() {
2316 let worktree_id = *worktree_id;
2317 if let Some(worktree) = self
2318 .worktree_for_id(worktree_id, cx)
2319 .and_then(|worktree| worktree.read(cx).as_local())
2320 {
2321 let lsp_adapter = lsp_adapter.clone();
2322 let worktree_abs_path = worktree.abs_path().clone();
2323 requests.push(
2324 language_server
2325 .request::<lsp::request::WorkspaceSymbol>(lsp::WorkspaceSymbolParams {
2326 query: query.to_string(),
2327 ..Default::default()
2328 })
2329 .log_err()
2330 .map(move |response| {
2331 (
2332 lsp_adapter,
2333 worktree_id,
2334 worktree_abs_path,
2335 response.unwrap_or_default(),
2336 )
2337 }),
2338 );
2339 }
2340 }
2341
2342 cx.spawn_weak(|this, cx| async move {
2343 let responses = futures::future::join_all(requests).await;
2344 let this = if let Some(this) = this.upgrade(&cx) {
2345 this
2346 } else {
2347 return Ok(Default::default());
2348 };
2349 this.read_with(&cx, |this, cx| {
2350 let mut symbols = Vec::new();
2351 for (adapter, source_worktree_id, worktree_abs_path, response) in responses {
2352 symbols.extend(response.into_iter().flatten().filter_map(|lsp_symbol| {
2353 let abs_path = lsp_symbol.location.uri.to_file_path().ok()?;
2354 let mut worktree_id = source_worktree_id;
2355 let path;
2356 if let Some((worktree, rel_path)) =
2357 this.find_local_worktree(&abs_path, cx)
2358 {
2359 worktree_id = worktree.read(cx).id();
2360 path = rel_path;
2361 } else {
2362 path = relativize_path(&worktree_abs_path, &abs_path);
2363 }
2364
2365 let label = this
2366 .languages
2367 .select_language(&path)
2368 .and_then(|language| {
2369 language.label_for_symbol(&lsp_symbol.name, lsp_symbol.kind)
2370 })
2371 .unwrap_or_else(|| CodeLabel::plain(lsp_symbol.name.clone(), None));
2372 let signature = this.symbol_signature(worktree_id, &path);
2373
2374 Some(Symbol {
2375 source_worktree_id,
2376 worktree_id,
2377 language_server_name: adapter.name(),
2378 name: lsp_symbol.name,
2379 kind: lsp_symbol.kind,
2380 label,
2381 path,
2382 range: range_from_lsp(lsp_symbol.location.range),
2383 signature,
2384 })
2385 }));
2386 }
2387 Ok(symbols)
2388 })
2389 })
2390 } else if let Some(project_id) = self.remote_id() {
2391 let request = self.client.request(proto::GetProjectSymbols {
2392 project_id,
2393 query: query.to_string(),
2394 });
2395 cx.spawn_weak(|this, cx| async move {
2396 let response = request.await?;
2397 let mut symbols = Vec::new();
2398 if let Some(this) = this.upgrade(&cx) {
2399 this.read_with(&cx, |this, _| {
2400 symbols.extend(
2401 response
2402 .symbols
2403 .into_iter()
2404 .filter_map(|symbol| this.deserialize_symbol(symbol).log_err()),
2405 );
2406 })
2407 }
2408 Ok(symbols)
2409 })
2410 } else {
2411 Task::ready(Ok(Default::default()))
2412 }
2413 }
2414
2415 pub fn open_buffer_for_symbol(
2416 &mut self,
2417 symbol: &Symbol,
2418 cx: &mut ModelContext<Self>,
2419 ) -> Task<Result<ModelHandle<Buffer>>> {
2420 if self.is_local() {
2421 let (lsp_adapter, language_server) = if let Some(server) = self.language_servers.get(&(
2422 symbol.source_worktree_id,
2423 symbol.language_server_name.clone(),
2424 )) {
2425 server.clone()
2426 } else {
2427 return Task::ready(Err(anyhow!(
2428 "language server for worktree and language not found"
2429 )));
2430 };
2431
2432 let worktree_abs_path = if let Some(worktree_abs_path) = self
2433 .worktree_for_id(symbol.worktree_id, cx)
2434 .and_then(|worktree| worktree.read(cx).as_local())
2435 .map(|local_worktree| local_worktree.abs_path())
2436 {
2437 worktree_abs_path
2438 } else {
2439 return Task::ready(Err(anyhow!("worktree not found for symbol")));
2440 };
2441 let symbol_abs_path = worktree_abs_path.join(&symbol.path);
2442 let symbol_uri = if let Ok(uri) = lsp::Url::from_file_path(symbol_abs_path) {
2443 uri
2444 } else {
2445 return Task::ready(Err(anyhow!("invalid symbol path")));
2446 };
2447
2448 self.open_local_buffer_via_lsp(symbol_uri, lsp_adapter, language_server, cx)
2449 } else if let Some(project_id) = self.remote_id() {
2450 let request = self.client.request(proto::OpenBufferForSymbol {
2451 project_id,
2452 symbol: Some(serialize_symbol(symbol)),
2453 });
2454 cx.spawn(|this, mut cx| async move {
2455 let response = request.await?;
2456 let buffer = response.buffer.ok_or_else(|| anyhow!("invalid buffer"))?;
2457 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
2458 .await
2459 })
2460 } else {
2461 Task::ready(Err(anyhow!("project does not have a remote id")))
2462 }
2463 }
2464
2465 pub fn completions<T: ToPointUtf16>(
2466 &self,
2467 source_buffer_handle: &ModelHandle<Buffer>,
2468 position: T,
2469 cx: &mut ModelContext<Self>,
2470 ) -> Task<Result<Vec<Completion>>> {
2471 let source_buffer_handle = source_buffer_handle.clone();
2472 let source_buffer = source_buffer_handle.read(cx);
2473 let buffer_id = source_buffer.remote_id();
2474 let language = source_buffer.language().cloned();
2475 let worktree;
2476 let buffer_abs_path;
2477 if let Some(file) = File::from_dyn(source_buffer.file()) {
2478 worktree = file.worktree.clone();
2479 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
2480 } else {
2481 return Task::ready(Ok(Default::default()));
2482 };
2483
2484 let position = position.to_point_utf16(source_buffer);
2485 let anchor = source_buffer.anchor_after(position);
2486
2487 if worktree.read(cx).as_local().is_some() {
2488 let buffer_abs_path = buffer_abs_path.unwrap();
2489 let (_, lang_server) =
2490 if let Some(server) = self.language_server_for_buffer(source_buffer, cx) {
2491 server.clone()
2492 } else {
2493 return Task::ready(Ok(Default::default()));
2494 };
2495
2496 cx.spawn(|_, cx| async move {
2497 let completions = lang_server
2498 .request::<lsp::request::Completion>(lsp::CompletionParams {
2499 text_document_position: lsp::TextDocumentPositionParams::new(
2500 lsp::TextDocumentIdentifier::new(
2501 lsp::Url::from_file_path(buffer_abs_path).unwrap(),
2502 ),
2503 point_to_lsp(position),
2504 ),
2505 context: Default::default(),
2506 work_done_progress_params: Default::default(),
2507 partial_result_params: Default::default(),
2508 })
2509 .await
2510 .context("lsp completion request failed")?;
2511
2512 let completions = if let Some(completions) = completions {
2513 match completions {
2514 lsp::CompletionResponse::Array(completions) => completions,
2515 lsp::CompletionResponse::List(list) => list.items,
2516 }
2517 } else {
2518 Default::default()
2519 };
2520
2521 source_buffer_handle.read_with(&cx, |this, _| {
2522 let snapshot = this.snapshot();
2523 let clipped_position = this.clip_point_utf16(position, Bias::Left);
2524 let mut range_for_token = None;
2525 Ok(completions
2526 .into_iter()
2527 .filter_map(|lsp_completion| {
2528 let (old_range, new_text) = match lsp_completion.text_edit.as_ref() {
2529 // If the language server provides a range to overwrite, then
2530 // check that the range is valid.
2531 Some(lsp::CompletionTextEdit::Edit(edit)) => {
2532 let range = range_from_lsp(edit.range);
2533 let start = snapshot.clip_point_utf16(range.start, Bias::Left);
2534 let end = snapshot.clip_point_utf16(range.end, Bias::Left);
2535 if start != range.start || end != range.end {
2536 log::info!("completion out of expected range");
2537 return None;
2538 }
2539 (
2540 snapshot.anchor_before(start)..snapshot.anchor_after(end),
2541 edit.new_text.clone(),
2542 )
2543 }
2544 // If the language server does not provide a range, then infer
2545 // the range based on the syntax tree.
2546 None => {
2547 if position != clipped_position {
2548 log::info!("completion out of expected range");
2549 return None;
2550 }
2551 let Range { start, end } = range_for_token
2552 .get_or_insert_with(|| {
2553 let offset = position.to_offset(&snapshot);
2554 snapshot
2555 .range_for_word_token_at(offset)
2556 .unwrap_or_else(|| offset..offset)
2557 })
2558 .clone();
2559 let text = lsp_completion
2560 .insert_text
2561 .as_ref()
2562 .unwrap_or(&lsp_completion.label)
2563 .clone();
2564 (
2565 snapshot.anchor_before(start)..snapshot.anchor_after(end),
2566 text.clone(),
2567 )
2568 }
2569 Some(lsp::CompletionTextEdit::InsertAndReplace(_)) => {
2570 log::info!("unsupported insert/replace completion");
2571 return None;
2572 }
2573 };
2574
2575 Some(Completion {
2576 old_range,
2577 new_text,
2578 label: language
2579 .as_ref()
2580 .and_then(|l| l.label_for_completion(&lsp_completion))
2581 .unwrap_or_else(|| {
2582 CodeLabel::plain(
2583 lsp_completion.label.clone(),
2584 lsp_completion.filter_text.as_deref(),
2585 )
2586 }),
2587 lsp_completion,
2588 })
2589 })
2590 .collect())
2591 })
2592 })
2593 } else if let Some(project_id) = self.remote_id() {
2594 let rpc = self.client.clone();
2595 let message = proto::GetCompletions {
2596 project_id,
2597 buffer_id,
2598 position: Some(language::proto::serialize_anchor(&anchor)),
2599 version: serialize_version(&source_buffer.version()),
2600 };
2601 cx.spawn_weak(|_, mut cx| async move {
2602 let response = rpc.request(message).await?;
2603
2604 source_buffer_handle
2605 .update(&mut cx, |buffer, _| {
2606 buffer.wait_for_version(deserialize_version(response.version))
2607 })
2608 .await;
2609
2610 response
2611 .completions
2612 .into_iter()
2613 .map(|completion| {
2614 language::proto::deserialize_completion(completion, language.as_ref())
2615 })
2616 .collect()
2617 })
2618 } else {
2619 Task::ready(Ok(Default::default()))
2620 }
2621 }
2622
2623 pub fn apply_additional_edits_for_completion(
2624 &self,
2625 buffer_handle: ModelHandle<Buffer>,
2626 completion: Completion,
2627 push_to_history: bool,
2628 cx: &mut ModelContext<Self>,
2629 ) -> Task<Result<Option<Transaction>>> {
2630 let buffer = buffer_handle.read(cx);
2631 let buffer_id = buffer.remote_id();
2632
2633 if self.is_local() {
2634 let (_, lang_server) = if let Some(server) = self.language_server_for_buffer(buffer, cx)
2635 {
2636 server.clone()
2637 } else {
2638 return Task::ready(Ok(Default::default()));
2639 };
2640
2641 cx.spawn(|this, mut cx| async move {
2642 let resolved_completion = lang_server
2643 .request::<lsp::request::ResolveCompletionItem>(completion.lsp_completion)
2644 .await?;
2645 if let Some(edits) = resolved_completion.additional_text_edits {
2646 let edits = this
2647 .update(&mut cx, |this, cx| {
2648 this.edits_from_lsp(&buffer_handle, edits, None, cx)
2649 })
2650 .await?;
2651 buffer_handle.update(&mut cx, |buffer, cx| {
2652 buffer.finalize_last_transaction();
2653 buffer.start_transaction();
2654 for (range, text) in edits {
2655 buffer.edit([(range, text)], cx);
2656 }
2657 let transaction = if buffer.end_transaction(cx).is_some() {
2658 let transaction = buffer.finalize_last_transaction().unwrap().clone();
2659 if !push_to_history {
2660 buffer.forget_transaction(transaction.id);
2661 }
2662 Some(transaction)
2663 } else {
2664 None
2665 };
2666 Ok(transaction)
2667 })
2668 } else {
2669 Ok(None)
2670 }
2671 })
2672 } else if let Some(project_id) = self.remote_id() {
2673 let client = self.client.clone();
2674 cx.spawn(|_, mut cx| async move {
2675 let response = client
2676 .request(proto::ApplyCompletionAdditionalEdits {
2677 project_id,
2678 buffer_id,
2679 completion: Some(language::proto::serialize_completion(&completion)),
2680 })
2681 .await?;
2682
2683 if let Some(transaction) = response.transaction {
2684 let transaction = language::proto::deserialize_transaction(transaction)?;
2685 buffer_handle
2686 .update(&mut cx, |buffer, _| {
2687 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
2688 })
2689 .await;
2690 if push_to_history {
2691 buffer_handle.update(&mut cx, |buffer, _| {
2692 buffer.push_transaction(transaction.clone(), Instant::now());
2693 });
2694 }
2695 Ok(Some(transaction))
2696 } else {
2697 Ok(None)
2698 }
2699 })
2700 } else {
2701 Task::ready(Err(anyhow!("project does not have a remote id")))
2702 }
2703 }
2704
2705 pub fn code_actions<T: Clone + ToOffset>(
2706 &self,
2707 buffer_handle: &ModelHandle<Buffer>,
2708 range: Range<T>,
2709 cx: &mut ModelContext<Self>,
2710 ) -> Task<Result<Vec<CodeAction>>> {
2711 let buffer_handle = buffer_handle.clone();
2712 let buffer = buffer_handle.read(cx);
2713 let snapshot = buffer.snapshot();
2714 let relevant_diagnostics = snapshot
2715 .diagnostics_in_range::<usize, usize>(range.to_offset(&snapshot), false)
2716 .map(|entry| entry.to_lsp_diagnostic_stub())
2717 .collect();
2718 let buffer_id = buffer.remote_id();
2719 let worktree;
2720 let buffer_abs_path;
2721 if let Some(file) = File::from_dyn(buffer.file()) {
2722 worktree = file.worktree.clone();
2723 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
2724 } else {
2725 return Task::ready(Ok(Default::default()));
2726 };
2727 let range = buffer.anchor_before(range.start)..buffer.anchor_before(range.end);
2728
2729 if worktree.read(cx).as_local().is_some() {
2730 let buffer_abs_path = buffer_abs_path.unwrap();
2731 let (_, lang_server) = if let Some(server) = self.language_server_for_buffer(buffer, cx)
2732 {
2733 server.clone()
2734 } else {
2735 return Task::ready(Ok(Default::default()));
2736 };
2737
2738 let lsp_range = range_to_lsp(range.to_point_utf16(buffer));
2739 cx.foreground().spawn(async move {
2740 if !lang_server.capabilities().code_action_provider.is_some() {
2741 return Ok(Default::default());
2742 }
2743
2744 Ok(lang_server
2745 .request::<lsp::request::CodeActionRequest>(lsp::CodeActionParams {
2746 text_document: lsp::TextDocumentIdentifier::new(
2747 lsp::Url::from_file_path(buffer_abs_path).unwrap(),
2748 ),
2749 range: lsp_range,
2750 work_done_progress_params: Default::default(),
2751 partial_result_params: Default::default(),
2752 context: lsp::CodeActionContext {
2753 diagnostics: relevant_diagnostics,
2754 only: Some(vec![
2755 lsp::CodeActionKind::QUICKFIX,
2756 lsp::CodeActionKind::REFACTOR,
2757 lsp::CodeActionKind::REFACTOR_EXTRACT,
2758 lsp::CodeActionKind::SOURCE,
2759 ]),
2760 },
2761 })
2762 .await?
2763 .unwrap_or_default()
2764 .into_iter()
2765 .filter_map(|entry| {
2766 if let lsp::CodeActionOrCommand::CodeAction(lsp_action) = entry {
2767 Some(CodeAction {
2768 range: range.clone(),
2769 lsp_action,
2770 })
2771 } else {
2772 None
2773 }
2774 })
2775 .collect())
2776 })
2777 } else if let Some(project_id) = self.remote_id() {
2778 let rpc = self.client.clone();
2779 let version = buffer.version();
2780 cx.spawn_weak(|_, mut cx| async move {
2781 let response = rpc
2782 .request(proto::GetCodeActions {
2783 project_id,
2784 buffer_id,
2785 start: Some(language::proto::serialize_anchor(&range.start)),
2786 end: Some(language::proto::serialize_anchor(&range.end)),
2787 version: serialize_version(&version),
2788 })
2789 .await?;
2790
2791 buffer_handle
2792 .update(&mut cx, |buffer, _| {
2793 buffer.wait_for_version(deserialize_version(response.version))
2794 })
2795 .await;
2796
2797 response
2798 .actions
2799 .into_iter()
2800 .map(language::proto::deserialize_code_action)
2801 .collect()
2802 })
2803 } else {
2804 Task::ready(Ok(Default::default()))
2805 }
2806 }
2807
2808 pub fn apply_code_action(
2809 &self,
2810 buffer_handle: ModelHandle<Buffer>,
2811 mut action: CodeAction,
2812 push_to_history: bool,
2813 cx: &mut ModelContext<Self>,
2814 ) -> Task<Result<ProjectTransaction>> {
2815 if self.is_local() {
2816 let buffer = buffer_handle.read(cx);
2817 let (lsp_adapter, lang_server) =
2818 if let Some(server) = self.language_server_for_buffer(buffer, cx) {
2819 server.clone()
2820 } else {
2821 return Task::ready(Ok(Default::default()));
2822 };
2823 let range = action.range.to_point_utf16(buffer);
2824
2825 cx.spawn(|this, mut cx| async move {
2826 if let Some(lsp_range) = action
2827 .lsp_action
2828 .data
2829 .as_mut()
2830 .and_then(|d| d.get_mut("codeActionParams"))
2831 .and_then(|d| d.get_mut("range"))
2832 {
2833 *lsp_range = serde_json::to_value(&range_to_lsp(range)).unwrap();
2834 action.lsp_action = lang_server
2835 .request::<lsp::request::CodeActionResolveRequest>(action.lsp_action)
2836 .await?;
2837 } else {
2838 let actions = this
2839 .update(&mut cx, |this, cx| {
2840 this.code_actions(&buffer_handle, action.range, cx)
2841 })
2842 .await?;
2843 action.lsp_action = actions
2844 .into_iter()
2845 .find(|a| a.lsp_action.title == action.lsp_action.title)
2846 .ok_or_else(|| anyhow!("code action is outdated"))?
2847 .lsp_action;
2848 }
2849
2850 if let Some(edit) = action.lsp_action.edit {
2851 Self::deserialize_workspace_edit(
2852 this,
2853 edit,
2854 push_to_history,
2855 lsp_adapter,
2856 lang_server,
2857 &mut cx,
2858 )
2859 .await
2860 } else if let Some(command) = action.lsp_action.command {
2861 this.update(&mut cx, |this, _| {
2862 this.last_workspace_edits_by_language_server
2863 .remove(&lang_server.server_id());
2864 });
2865 lang_server
2866 .request::<lsp::request::ExecuteCommand>(lsp::ExecuteCommandParams {
2867 command: command.command,
2868 arguments: command.arguments.unwrap_or_default(),
2869 ..Default::default()
2870 })
2871 .await?;
2872 Ok(this.update(&mut cx, |this, _| {
2873 this.last_workspace_edits_by_language_server
2874 .remove(&lang_server.server_id())
2875 .unwrap_or_default()
2876 }))
2877 } else {
2878 Ok(ProjectTransaction::default())
2879 }
2880 })
2881 } else if let Some(project_id) = self.remote_id() {
2882 let client = self.client.clone();
2883 let request = proto::ApplyCodeAction {
2884 project_id,
2885 buffer_id: buffer_handle.read(cx).remote_id(),
2886 action: Some(language::proto::serialize_code_action(&action)),
2887 };
2888 cx.spawn(|this, mut cx| async move {
2889 let response = client
2890 .request(request)
2891 .await?
2892 .transaction
2893 .ok_or_else(|| anyhow!("missing transaction"))?;
2894 this.update(&mut cx, |this, cx| {
2895 this.deserialize_project_transaction(response, push_to_history, cx)
2896 })
2897 .await
2898 })
2899 } else {
2900 Task::ready(Err(anyhow!("project does not have a remote id")))
2901 }
2902 }
2903
2904 async fn deserialize_workspace_edit(
2905 this: ModelHandle<Self>,
2906 edit: lsp::WorkspaceEdit,
2907 push_to_history: bool,
2908 lsp_adapter: Arc<dyn LspAdapter>,
2909 language_server: Arc<LanguageServer>,
2910 cx: &mut AsyncAppContext,
2911 ) -> Result<ProjectTransaction> {
2912 let fs = this.read_with(cx, |this, _| this.fs.clone());
2913 let mut operations = Vec::new();
2914 if let Some(document_changes) = edit.document_changes {
2915 match document_changes {
2916 lsp::DocumentChanges::Edits(edits) => {
2917 operations.extend(edits.into_iter().map(lsp::DocumentChangeOperation::Edit))
2918 }
2919 lsp::DocumentChanges::Operations(ops) => operations = ops,
2920 }
2921 } else if let Some(changes) = edit.changes {
2922 operations.extend(changes.into_iter().map(|(uri, edits)| {
2923 lsp::DocumentChangeOperation::Edit(lsp::TextDocumentEdit {
2924 text_document: lsp::OptionalVersionedTextDocumentIdentifier {
2925 uri,
2926 version: None,
2927 },
2928 edits: edits.into_iter().map(lsp::OneOf::Left).collect(),
2929 })
2930 }));
2931 }
2932
2933 let mut project_transaction = ProjectTransaction::default();
2934 for operation in operations {
2935 match operation {
2936 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Create(op)) => {
2937 let abs_path = op
2938 .uri
2939 .to_file_path()
2940 .map_err(|_| anyhow!("can't convert URI to path"))?;
2941
2942 if let Some(parent_path) = abs_path.parent() {
2943 fs.create_dir(parent_path).await?;
2944 }
2945 if abs_path.ends_with("/") {
2946 fs.create_dir(&abs_path).await?;
2947 } else {
2948 fs.create_file(&abs_path, op.options.map(Into::into).unwrap_or_default())
2949 .await?;
2950 }
2951 }
2952 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Rename(op)) => {
2953 let source_abs_path = op
2954 .old_uri
2955 .to_file_path()
2956 .map_err(|_| anyhow!("can't convert URI to path"))?;
2957 let target_abs_path = op
2958 .new_uri
2959 .to_file_path()
2960 .map_err(|_| anyhow!("can't convert URI to path"))?;
2961 fs.rename(
2962 &source_abs_path,
2963 &target_abs_path,
2964 op.options.map(Into::into).unwrap_or_default(),
2965 )
2966 .await?;
2967 }
2968 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Delete(op)) => {
2969 let abs_path = op
2970 .uri
2971 .to_file_path()
2972 .map_err(|_| anyhow!("can't convert URI to path"))?;
2973 let options = op.options.map(Into::into).unwrap_or_default();
2974 if abs_path.ends_with("/") {
2975 fs.remove_dir(&abs_path, options).await?;
2976 } else {
2977 fs.remove_file(&abs_path, options).await?;
2978 }
2979 }
2980 lsp::DocumentChangeOperation::Edit(op) => {
2981 let buffer_to_edit = this
2982 .update(cx, |this, cx| {
2983 this.open_local_buffer_via_lsp(
2984 op.text_document.uri,
2985 lsp_adapter.clone(),
2986 language_server.clone(),
2987 cx,
2988 )
2989 })
2990 .await?;
2991
2992 let edits = this
2993 .update(cx, |this, cx| {
2994 let edits = op.edits.into_iter().map(|edit| match edit {
2995 lsp::OneOf::Left(edit) => edit,
2996 lsp::OneOf::Right(edit) => edit.text_edit,
2997 });
2998 this.edits_from_lsp(
2999 &buffer_to_edit,
3000 edits,
3001 op.text_document.version,
3002 cx,
3003 )
3004 })
3005 .await?;
3006
3007 let transaction = buffer_to_edit.update(cx, |buffer, cx| {
3008 buffer.finalize_last_transaction();
3009 buffer.start_transaction();
3010 for (range, text) in edits {
3011 buffer.edit([(range, text)], cx);
3012 }
3013 let transaction = if buffer.end_transaction(cx).is_some() {
3014 let transaction = buffer.finalize_last_transaction().unwrap().clone();
3015 if !push_to_history {
3016 buffer.forget_transaction(transaction.id);
3017 }
3018 Some(transaction)
3019 } else {
3020 None
3021 };
3022
3023 transaction
3024 });
3025 if let Some(transaction) = transaction {
3026 project_transaction.0.insert(buffer_to_edit, transaction);
3027 }
3028 }
3029 }
3030 }
3031
3032 Ok(project_transaction)
3033 }
3034
3035 pub fn prepare_rename<T: ToPointUtf16>(
3036 &self,
3037 buffer: ModelHandle<Buffer>,
3038 position: T,
3039 cx: &mut ModelContext<Self>,
3040 ) -> Task<Result<Option<Range<Anchor>>>> {
3041 let position = position.to_point_utf16(buffer.read(cx));
3042 self.request_lsp(buffer, PrepareRename { position }, cx)
3043 }
3044
3045 pub fn perform_rename<T: ToPointUtf16>(
3046 &self,
3047 buffer: ModelHandle<Buffer>,
3048 position: T,
3049 new_name: String,
3050 push_to_history: bool,
3051 cx: &mut ModelContext<Self>,
3052 ) -> Task<Result<ProjectTransaction>> {
3053 let position = position.to_point_utf16(buffer.read(cx));
3054 self.request_lsp(
3055 buffer,
3056 PerformRename {
3057 position,
3058 new_name,
3059 push_to_history,
3060 },
3061 cx,
3062 )
3063 }
3064
3065 pub fn search(
3066 &self,
3067 query: SearchQuery,
3068 cx: &mut ModelContext<Self>,
3069 ) -> Task<Result<HashMap<ModelHandle<Buffer>, Vec<Range<Anchor>>>>> {
3070 if self.is_local() {
3071 let snapshots = self
3072 .visible_worktrees(cx)
3073 .filter_map(|tree| {
3074 let tree = tree.read(cx).as_local()?;
3075 Some(tree.snapshot())
3076 })
3077 .collect::<Vec<_>>();
3078
3079 let background = cx.background().clone();
3080 let path_count: usize = snapshots.iter().map(|s| s.visible_file_count()).sum();
3081 if path_count == 0 {
3082 return Task::ready(Ok(Default::default()));
3083 }
3084 let workers = background.num_cpus().min(path_count);
3085 let (matching_paths_tx, mut matching_paths_rx) = smol::channel::bounded(1024);
3086 cx.background()
3087 .spawn({
3088 let fs = self.fs.clone();
3089 let background = cx.background().clone();
3090 let query = query.clone();
3091 async move {
3092 let fs = &fs;
3093 let query = &query;
3094 let matching_paths_tx = &matching_paths_tx;
3095 let paths_per_worker = (path_count + workers - 1) / workers;
3096 let snapshots = &snapshots;
3097 background
3098 .scoped(|scope| {
3099 for worker_ix in 0..workers {
3100 let worker_start_ix = worker_ix * paths_per_worker;
3101 let worker_end_ix = worker_start_ix + paths_per_worker;
3102 scope.spawn(async move {
3103 let mut snapshot_start_ix = 0;
3104 let mut abs_path = PathBuf::new();
3105 for snapshot in snapshots {
3106 let snapshot_end_ix =
3107 snapshot_start_ix + snapshot.visible_file_count();
3108 if worker_end_ix <= snapshot_start_ix {
3109 break;
3110 } else if worker_start_ix > snapshot_end_ix {
3111 snapshot_start_ix = snapshot_end_ix;
3112 continue;
3113 } else {
3114 let start_in_snapshot = worker_start_ix
3115 .saturating_sub(snapshot_start_ix);
3116 let end_in_snapshot =
3117 cmp::min(worker_end_ix, snapshot_end_ix)
3118 - snapshot_start_ix;
3119
3120 for entry in snapshot
3121 .files(false, start_in_snapshot)
3122 .take(end_in_snapshot - start_in_snapshot)
3123 {
3124 if matching_paths_tx.is_closed() {
3125 break;
3126 }
3127
3128 abs_path.clear();
3129 abs_path.push(&snapshot.abs_path());
3130 abs_path.push(&entry.path);
3131 let matches = if let Some(file) =
3132 fs.open_sync(&abs_path).await.log_err()
3133 {
3134 query.detect(file).unwrap_or(false)
3135 } else {
3136 false
3137 };
3138
3139 if matches {
3140 let project_path =
3141 (snapshot.id(), entry.path.clone());
3142 if matching_paths_tx
3143 .send(project_path)
3144 .await
3145 .is_err()
3146 {
3147 break;
3148 }
3149 }
3150 }
3151
3152 snapshot_start_ix = snapshot_end_ix;
3153 }
3154 }
3155 });
3156 }
3157 })
3158 .await;
3159 }
3160 })
3161 .detach();
3162
3163 let (buffers_tx, buffers_rx) = smol::channel::bounded(1024);
3164 let open_buffers = self
3165 .opened_buffers
3166 .values()
3167 .filter_map(|b| b.upgrade(cx))
3168 .collect::<HashSet<_>>();
3169 cx.spawn(|this, cx| async move {
3170 for buffer in &open_buffers {
3171 let snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
3172 buffers_tx.send((buffer.clone(), snapshot)).await?;
3173 }
3174
3175 let open_buffers = Rc::new(RefCell::new(open_buffers));
3176 while let Some(project_path) = matching_paths_rx.next().await {
3177 if buffers_tx.is_closed() {
3178 break;
3179 }
3180
3181 let this = this.clone();
3182 let open_buffers = open_buffers.clone();
3183 let buffers_tx = buffers_tx.clone();
3184 cx.spawn(|mut cx| async move {
3185 if let Some(buffer) = this
3186 .update(&mut cx, |this, cx| this.open_buffer(project_path, cx))
3187 .await
3188 .log_err()
3189 {
3190 if open_buffers.borrow_mut().insert(buffer.clone()) {
3191 let snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
3192 buffers_tx.send((buffer, snapshot)).await?;
3193 }
3194 }
3195
3196 Ok::<_, anyhow::Error>(())
3197 })
3198 .detach();
3199 }
3200
3201 Ok::<_, anyhow::Error>(())
3202 })
3203 .detach_and_log_err(cx);
3204
3205 let background = cx.background().clone();
3206 cx.background().spawn(async move {
3207 let query = &query;
3208 let mut matched_buffers = Vec::new();
3209 for _ in 0..workers {
3210 matched_buffers.push(HashMap::default());
3211 }
3212 background
3213 .scoped(|scope| {
3214 for worker_matched_buffers in matched_buffers.iter_mut() {
3215 let mut buffers_rx = buffers_rx.clone();
3216 scope.spawn(async move {
3217 while let Some((buffer, snapshot)) = buffers_rx.next().await {
3218 let buffer_matches = query
3219 .search(snapshot.as_rope())
3220 .await
3221 .iter()
3222 .map(|range| {
3223 snapshot.anchor_before(range.start)
3224 ..snapshot.anchor_after(range.end)
3225 })
3226 .collect::<Vec<_>>();
3227 if !buffer_matches.is_empty() {
3228 worker_matched_buffers
3229 .insert(buffer.clone(), buffer_matches);
3230 }
3231 }
3232 });
3233 }
3234 })
3235 .await;
3236 Ok(matched_buffers.into_iter().flatten().collect())
3237 })
3238 } else if let Some(project_id) = self.remote_id() {
3239 let request = self.client.request(query.to_proto(project_id));
3240 cx.spawn(|this, mut cx| async move {
3241 let response = request.await?;
3242 let mut result = HashMap::default();
3243 for location in response.locations {
3244 let buffer = location.buffer.ok_or_else(|| anyhow!("missing buffer"))?;
3245 let target_buffer = this
3246 .update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
3247 .await?;
3248 let start = location
3249 .start
3250 .and_then(deserialize_anchor)
3251 .ok_or_else(|| anyhow!("missing target start"))?;
3252 let end = location
3253 .end
3254 .and_then(deserialize_anchor)
3255 .ok_or_else(|| anyhow!("missing target end"))?;
3256 result
3257 .entry(target_buffer)
3258 .or_insert(Vec::new())
3259 .push(start..end)
3260 }
3261 Ok(result)
3262 })
3263 } else {
3264 Task::ready(Ok(Default::default()))
3265 }
3266 }
3267
3268 fn request_lsp<R: LspCommand>(
3269 &self,
3270 buffer_handle: ModelHandle<Buffer>,
3271 request: R,
3272 cx: &mut ModelContext<Self>,
3273 ) -> Task<Result<R::Response>>
3274 where
3275 <R::LspRequest as lsp::request::Request>::Result: Send,
3276 {
3277 let buffer = buffer_handle.read(cx);
3278 if self.is_local() {
3279 let file = File::from_dyn(buffer.file()).and_then(File::as_local);
3280 if let Some((file, (_, language_server))) =
3281 file.zip(self.language_server_for_buffer(buffer, cx).cloned())
3282 {
3283 let lsp_params = request.to_lsp(&file.abs_path(cx), cx);
3284 return cx.spawn(|this, cx| async move {
3285 if !request.check_capabilities(&language_server.capabilities()) {
3286 return Ok(Default::default());
3287 }
3288
3289 let response = language_server
3290 .request::<R::LspRequest>(lsp_params)
3291 .await
3292 .context("lsp request failed")?;
3293 request
3294 .response_from_lsp(response, this, buffer_handle, cx)
3295 .await
3296 });
3297 }
3298 } else if let Some(project_id) = self.remote_id() {
3299 let rpc = self.client.clone();
3300 let message = request.to_proto(project_id, buffer);
3301 return cx.spawn(|this, cx| async move {
3302 let response = rpc.request(message).await?;
3303 request
3304 .response_from_proto(response, this, buffer_handle, cx)
3305 .await
3306 });
3307 }
3308 Task::ready(Ok(Default::default()))
3309 }
3310
3311 pub fn find_or_create_local_worktree(
3312 &mut self,
3313 abs_path: impl AsRef<Path>,
3314 visible: bool,
3315 cx: &mut ModelContext<Self>,
3316 ) -> Task<Result<(ModelHandle<Worktree>, PathBuf)>> {
3317 let abs_path = abs_path.as_ref();
3318 if let Some((tree, relative_path)) = self.find_local_worktree(abs_path, cx) {
3319 Task::ready(Ok((tree.clone(), relative_path.into())))
3320 } else {
3321 let worktree = self.create_local_worktree(abs_path, visible, cx);
3322 cx.foreground()
3323 .spawn(async move { Ok((worktree.await?, PathBuf::new())) })
3324 }
3325 }
3326
3327 pub fn find_local_worktree(
3328 &self,
3329 abs_path: &Path,
3330 cx: &AppContext,
3331 ) -> Option<(ModelHandle<Worktree>, PathBuf)> {
3332 for tree in self.worktrees(cx) {
3333 if let Some(relative_path) = tree
3334 .read(cx)
3335 .as_local()
3336 .and_then(|t| abs_path.strip_prefix(t.abs_path()).ok())
3337 {
3338 return Some((tree.clone(), relative_path.into()));
3339 }
3340 }
3341 None
3342 }
3343
3344 pub fn is_shared(&self) -> bool {
3345 match &self.client_state {
3346 ProjectClientState::Local { is_shared, .. } => *is_shared,
3347 ProjectClientState::Remote { .. } => false,
3348 }
3349 }
3350
3351 fn create_local_worktree(
3352 &mut self,
3353 abs_path: impl AsRef<Path>,
3354 visible: bool,
3355 cx: &mut ModelContext<Self>,
3356 ) -> Task<Result<ModelHandle<Worktree>>> {
3357 let fs = self.fs.clone();
3358 let client = self.client.clone();
3359 let next_entry_id = self.next_entry_id.clone();
3360 let path: Arc<Path> = abs_path.as_ref().into();
3361 let task = self
3362 .loading_local_worktrees
3363 .entry(path.clone())
3364 .or_insert_with(|| {
3365 cx.spawn(|project, mut cx| {
3366 async move {
3367 let worktree = Worktree::local(
3368 client.clone(),
3369 path.clone(),
3370 visible,
3371 fs,
3372 next_entry_id,
3373 &mut cx,
3374 )
3375 .await;
3376 project.update(&mut cx, |project, _| {
3377 project.loading_local_worktrees.remove(&path);
3378 });
3379 let worktree = worktree?;
3380
3381 let (remote_project_id, is_shared) =
3382 project.update(&mut cx, |project, cx| {
3383 project.add_worktree(&worktree, cx);
3384 (project.remote_id(), project.is_shared())
3385 });
3386
3387 if let Some(project_id) = remote_project_id {
3388 if is_shared {
3389 worktree
3390 .update(&mut cx, |worktree, cx| {
3391 worktree.as_local_mut().unwrap().share(project_id, cx)
3392 })
3393 .await?;
3394 } else {
3395 worktree
3396 .update(&mut cx, |worktree, cx| {
3397 worktree.as_local_mut().unwrap().register(project_id, cx)
3398 })
3399 .await?;
3400 }
3401 }
3402
3403 Ok(worktree)
3404 }
3405 .map_err(|err| Arc::new(err))
3406 })
3407 .shared()
3408 })
3409 .clone();
3410 cx.foreground().spawn(async move {
3411 match task.await {
3412 Ok(worktree) => Ok(worktree),
3413 Err(err) => Err(anyhow!("{}", err)),
3414 }
3415 })
3416 }
3417
3418 pub fn remove_worktree(&mut self, id: WorktreeId, cx: &mut ModelContext<Self>) {
3419 self.worktrees.retain(|worktree| {
3420 worktree
3421 .upgrade(cx)
3422 .map_or(false, |w| w.read(cx).id() != id)
3423 });
3424 cx.notify();
3425 }
3426
3427 fn add_worktree(&mut self, worktree: &ModelHandle<Worktree>, cx: &mut ModelContext<Self>) {
3428 cx.observe(&worktree, |_, _, cx| cx.notify()).detach();
3429 if worktree.read(cx).is_local() {
3430 cx.subscribe(&worktree, |this, worktree, _, cx| {
3431 this.update_local_worktree_buffers(worktree, cx);
3432 })
3433 .detach();
3434 }
3435
3436 let push_strong_handle = {
3437 let worktree = worktree.read(cx);
3438 self.is_shared() || worktree.is_visible() || worktree.is_remote()
3439 };
3440 if push_strong_handle {
3441 self.worktrees
3442 .push(WorktreeHandle::Strong(worktree.clone()));
3443 } else {
3444 cx.observe_release(&worktree, |this, _, cx| {
3445 this.worktrees
3446 .retain(|worktree| worktree.upgrade(cx).is_some());
3447 cx.notify();
3448 })
3449 .detach();
3450 self.worktrees
3451 .push(WorktreeHandle::Weak(worktree.downgrade()));
3452 }
3453 cx.notify();
3454 }
3455
3456 fn update_local_worktree_buffers(
3457 &mut self,
3458 worktree_handle: ModelHandle<Worktree>,
3459 cx: &mut ModelContext<Self>,
3460 ) {
3461 let snapshot = worktree_handle.read(cx).snapshot();
3462 let mut buffers_to_delete = Vec::new();
3463 let mut renamed_buffers = Vec::new();
3464 for (buffer_id, buffer) in &self.opened_buffers {
3465 if let Some(buffer) = buffer.upgrade(cx) {
3466 buffer.update(cx, |buffer, cx| {
3467 if let Some(old_file) = File::from_dyn(buffer.file()) {
3468 if old_file.worktree != worktree_handle {
3469 return;
3470 }
3471
3472 let new_file = if let Some(entry) = old_file
3473 .entry_id
3474 .and_then(|entry_id| snapshot.entry_for_id(entry_id))
3475 {
3476 File {
3477 is_local: true,
3478 entry_id: Some(entry.id),
3479 mtime: entry.mtime,
3480 path: entry.path.clone(),
3481 worktree: worktree_handle.clone(),
3482 }
3483 } else if let Some(entry) =
3484 snapshot.entry_for_path(old_file.path().as_ref())
3485 {
3486 File {
3487 is_local: true,
3488 entry_id: Some(entry.id),
3489 mtime: entry.mtime,
3490 path: entry.path.clone(),
3491 worktree: worktree_handle.clone(),
3492 }
3493 } else {
3494 File {
3495 is_local: true,
3496 entry_id: None,
3497 path: old_file.path().clone(),
3498 mtime: old_file.mtime(),
3499 worktree: worktree_handle.clone(),
3500 }
3501 };
3502
3503 let old_path = old_file.abs_path(cx);
3504 if new_file.abs_path(cx) != old_path {
3505 renamed_buffers.push((cx.handle(), old_path));
3506 }
3507
3508 if let Some(project_id) = self.remote_id() {
3509 self.client
3510 .send(proto::UpdateBufferFile {
3511 project_id,
3512 buffer_id: *buffer_id as u64,
3513 file: Some(new_file.to_proto()),
3514 })
3515 .log_err();
3516 }
3517 buffer.file_updated(Box::new(new_file), cx).detach();
3518 }
3519 });
3520 } else {
3521 buffers_to_delete.push(*buffer_id);
3522 }
3523 }
3524
3525 for buffer_id in buffers_to_delete {
3526 self.opened_buffers.remove(&buffer_id);
3527 }
3528
3529 for (buffer, old_path) in renamed_buffers {
3530 self.unregister_buffer_from_language_server(&buffer, old_path, cx);
3531 self.assign_language_to_buffer(&buffer, cx);
3532 self.register_buffer_with_language_server(&buffer, cx);
3533 }
3534 }
3535
3536 pub fn set_active_path(&mut self, entry: Option<ProjectPath>, cx: &mut ModelContext<Self>) {
3537 let new_active_entry = entry.and_then(|project_path| {
3538 let worktree = self.worktree_for_id(project_path.worktree_id, cx)?;
3539 let entry = worktree.read(cx).entry_for_path(project_path.path)?;
3540 Some(entry.id)
3541 });
3542 if new_active_entry != self.active_entry {
3543 self.active_entry = new_active_entry;
3544 cx.emit(Event::ActiveEntryChanged(new_active_entry));
3545 }
3546 }
3547
3548 pub fn is_running_disk_based_diagnostics(&self) -> bool {
3549 self.language_server_statuses
3550 .values()
3551 .any(|status| status.pending_diagnostic_updates > 0)
3552 }
3553
3554 pub fn diagnostic_summary(&self, cx: &AppContext) -> DiagnosticSummary {
3555 let mut summary = DiagnosticSummary::default();
3556 for (_, path_summary) in self.diagnostic_summaries(cx) {
3557 summary.error_count += path_summary.error_count;
3558 summary.warning_count += path_summary.warning_count;
3559 }
3560 summary
3561 }
3562
3563 pub fn diagnostic_summaries<'a>(
3564 &'a self,
3565 cx: &'a AppContext,
3566 ) -> impl Iterator<Item = (ProjectPath, DiagnosticSummary)> + 'a {
3567 self.worktrees(cx).flat_map(move |worktree| {
3568 let worktree = worktree.read(cx);
3569 let worktree_id = worktree.id();
3570 worktree
3571 .diagnostic_summaries()
3572 .map(move |(path, summary)| (ProjectPath { worktree_id, path }, summary))
3573 })
3574 }
3575
3576 pub fn disk_based_diagnostics_started(&mut self, cx: &mut ModelContext<Self>) {
3577 if self
3578 .language_server_statuses
3579 .values()
3580 .map(|status| status.pending_diagnostic_updates)
3581 .sum::<isize>()
3582 == 1
3583 {
3584 cx.emit(Event::DiskBasedDiagnosticsStarted);
3585 }
3586 }
3587
3588 pub fn disk_based_diagnostics_finished(&mut self, cx: &mut ModelContext<Self>) {
3589 cx.emit(Event::DiskBasedDiagnosticsUpdated);
3590 if self
3591 .language_server_statuses
3592 .values()
3593 .map(|status| status.pending_diagnostic_updates)
3594 .sum::<isize>()
3595 == 0
3596 {
3597 cx.emit(Event::DiskBasedDiagnosticsFinished);
3598 }
3599 }
3600
3601 pub fn active_entry(&self) -> Option<ProjectEntryId> {
3602 self.active_entry
3603 }
3604
3605 pub fn entry_for_path(&self, path: &ProjectPath, cx: &AppContext) -> Option<ProjectEntryId> {
3606 self.worktree_for_id(path.worktree_id, cx)?
3607 .read(cx)
3608 .entry_for_path(&path.path)
3609 .map(|entry| entry.id)
3610 }
3611
3612 pub fn path_for_entry(&self, entry_id: ProjectEntryId, cx: &AppContext) -> Option<ProjectPath> {
3613 let worktree = self.worktree_for_entry(entry_id, cx)?;
3614 let worktree = worktree.read(cx);
3615 let worktree_id = worktree.id();
3616 let path = worktree.entry_for_id(entry_id)?.path.clone();
3617 Some(ProjectPath { worktree_id, path })
3618 }
3619
3620 // RPC message handlers
3621
3622 async fn handle_unshare_project(
3623 this: ModelHandle<Self>,
3624 _: TypedEnvelope<proto::UnshareProject>,
3625 _: Arc<Client>,
3626 mut cx: AsyncAppContext,
3627 ) -> Result<()> {
3628 this.update(&mut cx, |this, cx| this.project_unshared(cx));
3629 Ok(())
3630 }
3631
3632 async fn handle_add_collaborator(
3633 this: ModelHandle<Self>,
3634 mut envelope: TypedEnvelope<proto::AddProjectCollaborator>,
3635 _: Arc<Client>,
3636 mut cx: AsyncAppContext,
3637 ) -> Result<()> {
3638 let user_store = this.read_with(&cx, |this, _| this.user_store.clone());
3639 let collaborator = envelope
3640 .payload
3641 .collaborator
3642 .take()
3643 .ok_or_else(|| anyhow!("empty collaborator"))?;
3644
3645 let collaborator = Collaborator::from_proto(collaborator, &user_store, &mut cx).await?;
3646 this.update(&mut cx, |this, cx| {
3647 this.collaborators
3648 .insert(collaborator.peer_id, collaborator);
3649 cx.notify();
3650 });
3651
3652 Ok(())
3653 }
3654
3655 async fn handle_remove_collaborator(
3656 this: ModelHandle<Self>,
3657 envelope: TypedEnvelope<proto::RemoveProjectCollaborator>,
3658 _: Arc<Client>,
3659 mut cx: AsyncAppContext,
3660 ) -> Result<()> {
3661 this.update(&mut cx, |this, cx| {
3662 let peer_id = PeerId(envelope.payload.peer_id);
3663 let replica_id = this
3664 .collaborators
3665 .remove(&peer_id)
3666 .ok_or_else(|| anyhow!("unknown peer {:?}", peer_id))?
3667 .replica_id;
3668 for (_, buffer) in &this.opened_buffers {
3669 if let Some(buffer) = buffer.upgrade(cx) {
3670 buffer.update(cx, |buffer, cx| buffer.remove_peer(replica_id, cx));
3671 }
3672 }
3673 cx.emit(Event::CollaboratorLeft(peer_id));
3674 cx.notify();
3675 Ok(())
3676 })
3677 }
3678
3679 async fn handle_register_worktree(
3680 this: ModelHandle<Self>,
3681 envelope: TypedEnvelope<proto::RegisterWorktree>,
3682 client: Arc<Client>,
3683 mut cx: AsyncAppContext,
3684 ) -> Result<()> {
3685 this.update(&mut cx, |this, cx| {
3686 let remote_id = this.remote_id().ok_or_else(|| anyhow!("invalid project"))?;
3687 let replica_id = this.replica_id();
3688 let worktree = proto::Worktree {
3689 id: envelope.payload.worktree_id,
3690 root_name: envelope.payload.root_name,
3691 entries: Default::default(),
3692 diagnostic_summaries: Default::default(),
3693 visible: envelope.payload.visible,
3694 };
3695 let (worktree, load_task) =
3696 Worktree::remote(remote_id, replica_id, worktree, client, cx);
3697 this.add_worktree(&worktree, cx);
3698 load_task.detach();
3699 Ok(())
3700 })
3701 }
3702
3703 async fn handle_unregister_worktree(
3704 this: ModelHandle<Self>,
3705 envelope: TypedEnvelope<proto::UnregisterWorktree>,
3706 _: Arc<Client>,
3707 mut cx: AsyncAppContext,
3708 ) -> Result<()> {
3709 this.update(&mut cx, |this, cx| {
3710 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
3711 this.remove_worktree(worktree_id, cx);
3712 Ok(())
3713 })
3714 }
3715
3716 async fn handle_update_worktree(
3717 this: ModelHandle<Self>,
3718 envelope: TypedEnvelope<proto::UpdateWorktree>,
3719 _: Arc<Client>,
3720 mut cx: AsyncAppContext,
3721 ) -> Result<()> {
3722 this.update(&mut cx, |this, cx| {
3723 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
3724 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
3725 worktree.update(cx, |worktree, _| {
3726 let worktree = worktree.as_remote_mut().unwrap();
3727 worktree.update_from_remote(envelope)
3728 })?;
3729 }
3730 Ok(())
3731 })
3732 }
3733
3734 async fn handle_update_diagnostic_summary(
3735 this: ModelHandle<Self>,
3736 envelope: TypedEnvelope<proto::UpdateDiagnosticSummary>,
3737 _: Arc<Client>,
3738 mut cx: AsyncAppContext,
3739 ) -> Result<()> {
3740 this.update(&mut cx, |this, cx| {
3741 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
3742 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
3743 if let Some(summary) = envelope.payload.summary {
3744 let project_path = ProjectPath {
3745 worktree_id,
3746 path: Path::new(&summary.path).into(),
3747 };
3748 worktree.update(cx, |worktree, _| {
3749 worktree
3750 .as_remote_mut()
3751 .unwrap()
3752 .update_diagnostic_summary(project_path.path.clone(), &summary);
3753 });
3754 cx.emit(Event::DiagnosticsUpdated(project_path));
3755 }
3756 }
3757 Ok(())
3758 })
3759 }
3760
3761 async fn handle_start_language_server(
3762 this: ModelHandle<Self>,
3763 envelope: TypedEnvelope<proto::StartLanguageServer>,
3764 _: Arc<Client>,
3765 mut cx: AsyncAppContext,
3766 ) -> Result<()> {
3767 let server = envelope
3768 .payload
3769 .server
3770 .ok_or_else(|| anyhow!("invalid server"))?;
3771 this.update(&mut cx, |this, cx| {
3772 this.language_server_statuses.insert(
3773 server.id as usize,
3774 LanguageServerStatus {
3775 name: server.name,
3776 pending_work: Default::default(),
3777 pending_diagnostic_updates: 0,
3778 },
3779 );
3780 cx.notify();
3781 });
3782 Ok(())
3783 }
3784
3785 async fn handle_update_language_server(
3786 this: ModelHandle<Self>,
3787 envelope: TypedEnvelope<proto::UpdateLanguageServer>,
3788 _: Arc<Client>,
3789 mut cx: AsyncAppContext,
3790 ) -> Result<()> {
3791 let language_server_id = envelope.payload.language_server_id as usize;
3792 match envelope
3793 .payload
3794 .variant
3795 .ok_or_else(|| anyhow!("invalid variant"))?
3796 {
3797 proto::update_language_server::Variant::WorkStart(payload) => {
3798 this.update(&mut cx, |this, cx| {
3799 this.on_lsp_work_start(language_server_id, payload.token, cx);
3800 })
3801 }
3802 proto::update_language_server::Variant::WorkProgress(payload) => {
3803 this.update(&mut cx, |this, cx| {
3804 this.on_lsp_work_progress(
3805 language_server_id,
3806 payload.token,
3807 LanguageServerProgress {
3808 message: payload.message,
3809 percentage: payload.percentage.map(|p| p as usize),
3810 last_update_at: Instant::now(),
3811 },
3812 cx,
3813 );
3814 })
3815 }
3816 proto::update_language_server::Variant::WorkEnd(payload) => {
3817 this.update(&mut cx, |this, cx| {
3818 this.on_lsp_work_end(language_server_id, payload.token, cx);
3819 })
3820 }
3821 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(_) => {
3822 this.update(&mut cx, |this, cx| {
3823 this.disk_based_diagnostics_started(cx);
3824 })
3825 }
3826 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(_) => {
3827 this.update(&mut cx, |this, cx| this.disk_based_diagnostics_finished(cx));
3828 }
3829 }
3830
3831 Ok(())
3832 }
3833
3834 async fn handle_update_buffer(
3835 this: ModelHandle<Self>,
3836 envelope: TypedEnvelope<proto::UpdateBuffer>,
3837 _: Arc<Client>,
3838 mut cx: AsyncAppContext,
3839 ) -> Result<()> {
3840 this.update(&mut cx, |this, cx| {
3841 let payload = envelope.payload.clone();
3842 let buffer_id = payload.buffer_id;
3843 let ops = payload
3844 .operations
3845 .into_iter()
3846 .map(|op| language::proto::deserialize_operation(op))
3847 .collect::<Result<Vec<_>, _>>()?;
3848 match this.opened_buffers.entry(buffer_id) {
3849 hash_map::Entry::Occupied(mut e) => match e.get_mut() {
3850 OpenBuffer::Strong(buffer) => {
3851 buffer.update(cx, |buffer, cx| buffer.apply_ops(ops, cx))?;
3852 }
3853 OpenBuffer::Loading(operations) => operations.extend_from_slice(&ops),
3854 OpenBuffer::Weak(_) => {}
3855 },
3856 hash_map::Entry::Vacant(e) => {
3857 e.insert(OpenBuffer::Loading(ops));
3858 }
3859 }
3860 Ok(())
3861 })
3862 }
3863
3864 async fn handle_update_buffer_file(
3865 this: ModelHandle<Self>,
3866 envelope: TypedEnvelope<proto::UpdateBufferFile>,
3867 _: Arc<Client>,
3868 mut cx: AsyncAppContext,
3869 ) -> Result<()> {
3870 this.update(&mut cx, |this, cx| {
3871 let payload = envelope.payload.clone();
3872 let buffer_id = payload.buffer_id;
3873 let file = payload.file.ok_or_else(|| anyhow!("invalid file"))?;
3874 let worktree = this
3875 .worktree_for_id(WorktreeId::from_proto(file.worktree_id), cx)
3876 .ok_or_else(|| anyhow!("no such worktree"))?;
3877 let file = File::from_proto(file, worktree.clone(), cx)?;
3878 let buffer = this
3879 .opened_buffers
3880 .get_mut(&buffer_id)
3881 .and_then(|b| b.upgrade(cx))
3882 .ok_or_else(|| anyhow!("no such buffer"))?;
3883 buffer.update(cx, |buffer, cx| {
3884 buffer.file_updated(Box::new(file), cx).detach();
3885 });
3886 Ok(())
3887 })
3888 }
3889
3890 async fn handle_save_buffer(
3891 this: ModelHandle<Self>,
3892 envelope: TypedEnvelope<proto::SaveBuffer>,
3893 _: Arc<Client>,
3894 mut cx: AsyncAppContext,
3895 ) -> Result<proto::BufferSaved> {
3896 let buffer_id = envelope.payload.buffer_id;
3897 let requested_version = deserialize_version(envelope.payload.version);
3898
3899 let (project_id, buffer) = this.update(&mut cx, |this, cx| {
3900 let project_id = this.remote_id().ok_or_else(|| anyhow!("not connected"))?;
3901 let buffer = this
3902 .opened_buffers
3903 .get(&buffer_id)
3904 .and_then(|buffer| buffer.upgrade(cx))
3905 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?;
3906 Ok::<_, anyhow::Error>((project_id, buffer))
3907 })?;
3908 buffer
3909 .update(&mut cx, |buffer, _| {
3910 buffer.wait_for_version(requested_version)
3911 })
3912 .await;
3913
3914 let (saved_version, mtime) = buffer.update(&mut cx, |buffer, cx| buffer.save(cx)).await?;
3915 Ok(proto::BufferSaved {
3916 project_id,
3917 buffer_id,
3918 version: serialize_version(&saved_version),
3919 mtime: Some(mtime.into()),
3920 })
3921 }
3922
3923 async fn handle_reload_buffers(
3924 this: ModelHandle<Self>,
3925 envelope: TypedEnvelope<proto::ReloadBuffers>,
3926 _: Arc<Client>,
3927 mut cx: AsyncAppContext,
3928 ) -> Result<proto::ReloadBuffersResponse> {
3929 let sender_id = envelope.original_sender_id()?;
3930 let reload = this.update(&mut cx, |this, cx| {
3931 let mut buffers = HashSet::default();
3932 for buffer_id in &envelope.payload.buffer_ids {
3933 buffers.insert(
3934 this.opened_buffers
3935 .get(buffer_id)
3936 .and_then(|buffer| buffer.upgrade(cx))
3937 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?,
3938 );
3939 }
3940 Ok::<_, anyhow::Error>(this.reload_buffers(buffers, false, cx))
3941 })?;
3942
3943 let project_transaction = reload.await?;
3944 let project_transaction = this.update(&mut cx, |this, cx| {
3945 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
3946 });
3947 Ok(proto::ReloadBuffersResponse {
3948 transaction: Some(project_transaction),
3949 })
3950 }
3951
3952 async fn handle_format_buffers(
3953 this: ModelHandle<Self>,
3954 envelope: TypedEnvelope<proto::FormatBuffers>,
3955 _: Arc<Client>,
3956 mut cx: AsyncAppContext,
3957 ) -> Result<proto::FormatBuffersResponse> {
3958 let sender_id = envelope.original_sender_id()?;
3959 let format = this.update(&mut cx, |this, cx| {
3960 let mut buffers = HashSet::default();
3961 for buffer_id in &envelope.payload.buffer_ids {
3962 buffers.insert(
3963 this.opened_buffers
3964 .get(buffer_id)
3965 .and_then(|buffer| buffer.upgrade(cx))
3966 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?,
3967 );
3968 }
3969 Ok::<_, anyhow::Error>(this.format(buffers, false, cx))
3970 })?;
3971
3972 let project_transaction = format.await?;
3973 let project_transaction = this.update(&mut cx, |this, cx| {
3974 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
3975 });
3976 Ok(proto::FormatBuffersResponse {
3977 transaction: Some(project_transaction),
3978 })
3979 }
3980
3981 async fn handle_get_completions(
3982 this: ModelHandle<Self>,
3983 envelope: TypedEnvelope<proto::GetCompletions>,
3984 _: Arc<Client>,
3985 mut cx: AsyncAppContext,
3986 ) -> Result<proto::GetCompletionsResponse> {
3987 let position = envelope
3988 .payload
3989 .position
3990 .and_then(language::proto::deserialize_anchor)
3991 .ok_or_else(|| anyhow!("invalid position"))?;
3992 let version = deserialize_version(envelope.payload.version);
3993 let buffer = this.read_with(&cx, |this, cx| {
3994 this.opened_buffers
3995 .get(&envelope.payload.buffer_id)
3996 .and_then(|buffer| buffer.upgrade(cx))
3997 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
3998 })?;
3999 buffer
4000 .update(&mut cx, |buffer, _| buffer.wait_for_version(version))
4001 .await;
4002 let version = buffer.read_with(&cx, |buffer, _| buffer.version());
4003 let completions = this
4004 .update(&mut cx, |this, cx| this.completions(&buffer, position, cx))
4005 .await?;
4006
4007 Ok(proto::GetCompletionsResponse {
4008 completions: completions
4009 .iter()
4010 .map(language::proto::serialize_completion)
4011 .collect(),
4012 version: serialize_version(&version),
4013 })
4014 }
4015
4016 async fn handle_apply_additional_edits_for_completion(
4017 this: ModelHandle<Self>,
4018 envelope: TypedEnvelope<proto::ApplyCompletionAdditionalEdits>,
4019 _: Arc<Client>,
4020 mut cx: AsyncAppContext,
4021 ) -> Result<proto::ApplyCompletionAdditionalEditsResponse> {
4022 let apply_additional_edits = this.update(&mut cx, |this, cx| {
4023 let buffer = this
4024 .opened_buffers
4025 .get(&envelope.payload.buffer_id)
4026 .and_then(|buffer| buffer.upgrade(cx))
4027 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
4028 let language = buffer.read(cx).language();
4029 let completion = language::proto::deserialize_completion(
4030 envelope
4031 .payload
4032 .completion
4033 .ok_or_else(|| anyhow!("invalid completion"))?,
4034 language,
4035 )?;
4036 Ok::<_, anyhow::Error>(
4037 this.apply_additional_edits_for_completion(buffer, completion, false, cx),
4038 )
4039 })?;
4040
4041 Ok(proto::ApplyCompletionAdditionalEditsResponse {
4042 transaction: apply_additional_edits
4043 .await?
4044 .as_ref()
4045 .map(language::proto::serialize_transaction),
4046 })
4047 }
4048
4049 async fn handle_get_code_actions(
4050 this: ModelHandle<Self>,
4051 envelope: TypedEnvelope<proto::GetCodeActions>,
4052 _: Arc<Client>,
4053 mut cx: AsyncAppContext,
4054 ) -> Result<proto::GetCodeActionsResponse> {
4055 let start = envelope
4056 .payload
4057 .start
4058 .and_then(language::proto::deserialize_anchor)
4059 .ok_or_else(|| anyhow!("invalid start"))?;
4060 let end = envelope
4061 .payload
4062 .end
4063 .and_then(language::proto::deserialize_anchor)
4064 .ok_or_else(|| anyhow!("invalid end"))?;
4065 let buffer = this.update(&mut cx, |this, cx| {
4066 this.opened_buffers
4067 .get(&envelope.payload.buffer_id)
4068 .and_then(|buffer| buffer.upgrade(cx))
4069 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
4070 })?;
4071 buffer
4072 .update(&mut cx, |buffer, _| {
4073 buffer.wait_for_version(deserialize_version(envelope.payload.version))
4074 })
4075 .await;
4076
4077 let version = buffer.read_with(&cx, |buffer, _| buffer.version());
4078 let code_actions = this.update(&mut cx, |this, cx| {
4079 Ok::<_, anyhow::Error>(this.code_actions(&buffer, start..end, cx))
4080 })?;
4081
4082 Ok(proto::GetCodeActionsResponse {
4083 actions: code_actions
4084 .await?
4085 .iter()
4086 .map(language::proto::serialize_code_action)
4087 .collect(),
4088 version: serialize_version(&version),
4089 })
4090 }
4091
4092 async fn handle_apply_code_action(
4093 this: ModelHandle<Self>,
4094 envelope: TypedEnvelope<proto::ApplyCodeAction>,
4095 _: Arc<Client>,
4096 mut cx: AsyncAppContext,
4097 ) -> Result<proto::ApplyCodeActionResponse> {
4098 let sender_id = envelope.original_sender_id()?;
4099 let action = language::proto::deserialize_code_action(
4100 envelope
4101 .payload
4102 .action
4103 .ok_or_else(|| anyhow!("invalid action"))?,
4104 )?;
4105 let apply_code_action = this.update(&mut cx, |this, cx| {
4106 let buffer = this
4107 .opened_buffers
4108 .get(&envelope.payload.buffer_id)
4109 .and_then(|buffer| buffer.upgrade(cx))
4110 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
4111 Ok::<_, anyhow::Error>(this.apply_code_action(buffer, action, false, cx))
4112 })?;
4113
4114 let project_transaction = apply_code_action.await?;
4115 let project_transaction = this.update(&mut cx, |this, cx| {
4116 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
4117 });
4118 Ok(proto::ApplyCodeActionResponse {
4119 transaction: Some(project_transaction),
4120 })
4121 }
4122
4123 async fn handle_lsp_command<T: LspCommand>(
4124 this: ModelHandle<Self>,
4125 envelope: TypedEnvelope<T::ProtoRequest>,
4126 _: Arc<Client>,
4127 mut cx: AsyncAppContext,
4128 ) -> Result<<T::ProtoRequest as proto::RequestMessage>::Response>
4129 where
4130 <T::LspRequest as lsp::request::Request>::Result: Send,
4131 {
4132 let sender_id = envelope.original_sender_id()?;
4133 let buffer_id = T::buffer_id_from_proto(&envelope.payload);
4134 let buffer_handle = this.read_with(&cx, |this, _| {
4135 this.opened_buffers
4136 .get(&buffer_id)
4137 .and_then(|buffer| buffer.upgrade(&cx))
4138 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))
4139 })?;
4140 let request = T::from_proto(
4141 envelope.payload,
4142 this.clone(),
4143 buffer_handle.clone(),
4144 cx.clone(),
4145 )
4146 .await?;
4147 let buffer_version = buffer_handle.read_with(&cx, |buffer, _| buffer.version());
4148 let response = this
4149 .update(&mut cx, |this, cx| {
4150 this.request_lsp(buffer_handle, request, cx)
4151 })
4152 .await?;
4153 this.update(&mut cx, |this, cx| {
4154 Ok(T::response_to_proto(
4155 response,
4156 this,
4157 sender_id,
4158 &buffer_version,
4159 cx,
4160 ))
4161 })
4162 }
4163
4164 async fn handle_get_project_symbols(
4165 this: ModelHandle<Self>,
4166 envelope: TypedEnvelope<proto::GetProjectSymbols>,
4167 _: Arc<Client>,
4168 mut cx: AsyncAppContext,
4169 ) -> Result<proto::GetProjectSymbolsResponse> {
4170 let symbols = this
4171 .update(&mut cx, |this, cx| {
4172 this.symbols(&envelope.payload.query, cx)
4173 })
4174 .await?;
4175
4176 Ok(proto::GetProjectSymbolsResponse {
4177 symbols: symbols.iter().map(serialize_symbol).collect(),
4178 })
4179 }
4180
4181 async fn handle_search_project(
4182 this: ModelHandle<Self>,
4183 envelope: TypedEnvelope<proto::SearchProject>,
4184 _: Arc<Client>,
4185 mut cx: AsyncAppContext,
4186 ) -> Result<proto::SearchProjectResponse> {
4187 let peer_id = envelope.original_sender_id()?;
4188 let query = SearchQuery::from_proto(envelope.payload)?;
4189 let result = this
4190 .update(&mut cx, |this, cx| this.search(query, cx))
4191 .await?;
4192
4193 this.update(&mut cx, |this, cx| {
4194 let mut locations = Vec::new();
4195 for (buffer, ranges) in result {
4196 for range in ranges {
4197 let start = serialize_anchor(&range.start);
4198 let end = serialize_anchor(&range.end);
4199 let buffer = this.serialize_buffer_for_peer(&buffer, peer_id, cx);
4200 locations.push(proto::Location {
4201 buffer: Some(buffer),
4202 start: Some(start),
4203 end: Some(end),
4204 });
4205 }
4206 }
4207 Ok(proto::SearchProjectResponse { locations })
4208 })
4209 }
4210
4211 async fn handle_open_buffer_for_symbol(
4212 this: ModelHandle<Self>,
4213 envelope: TypedEnvelope<proto::OpenBufferForSymbol>,
4214 _: Arc<Client>,
4215 mut cx: AsyncAppContext,
4216 ) -> Result<proto::OpenBufferForSymbolResponse> {
4217 let peer_id = envelope.original_sender_id()?;
4218 let symbol = envelope
4219 .payload
4220 .symbol
4221 .ok_or_else(|| anyhow!("invalid symbol"))?;
4222 let symbol = this.read_with(&cx, |this, _| {
4223 let symbol = this.deserialize_symbol(symbol)?;
4224 let signature = this.symbol_signature(symbol.worktree_id, &symbol.path);
4225 if signature == symbol.signature {
4226 Ok(symbol)
4227 } else {
4228 Err(anyhow!("invalid symbol signature"))
4229 }
4230 })?;
4231 let buffer = this
4232 .update(&mut cx, |this, cx| this.open_buffer_for_symbol(&symbol, cx))
4233 .await?;
4234
4235 Ok(proto::OpenBufferForSymbolResponse {
4236 buffer: Some(this.update(&mut cx, |this, cx| {
4237 this.serialize_buffer_for_peer(&buffer, peer_id, cx)
4238 })),
4239 })
4240 }
4241
4242 fn symbol_signature(&self, worktree_id: WorktreeId, path: &Path) -> [u8; 32] {
4243 let mut hasher = Sha256::new();
4244 hasher.update(worktree_id.to_proto().to_be_bytes());
4245 hasher.update(path.to_string_lossy().as_bytes());
4246 hasher.update(self.nonce.to_be_bytes());
4247 hasher.finalize().as_slice().try_into().unwrap()
4248 }
4249
4250 async fn handle_open_buffer_by_id(
4251 this: ModelHandle<Self>,
4252 envelope: TypedEnvelope<proto::OpenBufferById>,
4253 _: Arc<Client>,
4254 mut cx: AsyncAppContext,
4255 ) -> Result<proto::OpenBufferResponse> {
4256 let peer_id = envelope.original_sender_id()?;
4257 let buffer = this
4258 .update(&mut cx, |this, cx| {
4259 this.open_buffer_by_id(envelope.payload.id, cx)
4260 })
4261 .await?;
4262 this.update(&mut cx, |this, cx| {
4263 Ok(proto::OpenBufferResponse {
4264 buffer: Some(this.serialize_buffer_for_peer(&buffer, peer_id, cx)),
4265 })
4266 })
4267 }
4268
4269 async fn handle_open_buffer_by_path(
4270 this: ModelHandle<Self>,
4271 envelope: TypedEnvelope<proto::OpenBufferByPath>,
4272 _: Arc<Client>,
4273 mut cx: AsyncAppContext,
4274 ) -> Result<proto::OpenBufferResponse> {
4275 let peer_id = envelope.original_sender_id()?;
4276 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
4277 let open_buffer = this.update(&mut cx, |this, cx| {
4278 this.open_buffer(
4279 ProjectPath {
4280 worktree_id,
4281 path: PathBuf::from(envelope.payload.path).into(),
4282 },
4283 cx,
4284 )
4285 });
4286
4287 let buffer = open_buffer.await?;
4288 this.update(&mut cx, |this, cx| {
4289 Ok(proto::OpenBufferResponse {
4290 buffer: Some(this.serialize_buffer_for_peer(&buffer, peer_id, cx)),
4291 })
4292 })
4293 }
4294
4295 fn serialize_project_transaction_for_peer(
4296 &mut self,
4297 project_transaction: ProjectTransaction,
4298 peer_id: PeerId,
4299 cx: &AppContext,
4300 ) -> proto::ProjectTransaction {
4301 let mut serialized_transaction = proto::ProjectTransaction {
4302 buffers: Default::default(),
4303 transactions: Default::default(),
4304 };
4305 for (buffer, transaction) in project_transaction.0 {
4306 serialized_transaction
4307 .buffers
4308 .push(self.serialize_buffer_for_peer(&buffer, peer_id, cx));
4309 serialized_transaction
4310 .transactions
4311 .push(language::proto::serialize_transaction(&transaction));
4312 }
4313 serialized_transaction
4314 }
4315
4316 fn deserialize_project_transaction(
4317 &mut self,
4318 message: proto::ProjectTransaction,
4319 push_to_history: bool,
4320 cx: &mut ModelContext<Self>,
4321 ) -> Task<Result<ProjectTransaction>> {
4322 cx.spawn(|this, mut cx| async move {
4323 let mut project_transaction = ProjectTransaction::default();
4324 for (buffer, transaction) in message.buffers.into_iter().zip(message.transactions) {
4325 let buffer = this
4326 .update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
4327 .await?;
4328 let transaction = language::proto::deserialize_transaction(transaction)?;
4329 project_transaction.0.insert(buffer, transaction);
4330 }
4331
4332 for (buffer, transaction) in &project_transaction.0 {
4333 buffer
4334 .update(&mut cx, |buffer, _| {
4335 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
4336 })
4337 .await;
4338
4339 if push_to_history {
4340 buffer.update(&mut cx, |buffer, _| {
4341 buffer.push_transaction(transaction.clone(), Instant::now());
4342 });
4343 }
4344 }
4345
4346 Ok(project_transaction)
4347 })
4348 }
4349
4350 fn serialize_buffer_for_peer(
4351 &mut self,
4352 buffer: &ModelHandle<Buffer>,
4353 peer_id: PeerId,
4354 cx: &AppContext,
4355 ) -> proto::Buffer {
4356 let buffer_id = buffer.read(cx).remote_id();
4357 let shared_buffers = self.shared_buffers.entry(peer_id).or_default();
4358 if shared_buffers.insert(buffer_id) {
4359 proto::Buffer {
4360 variant: Some(proto::buffer::Variant::State(buffer.read(cx).to_proto())),
4361 }
4362 } else {
4363 proto::Buffer {
4364 variant: Some(proto::buffer::Variant::Id(buffer_id)),
4365 }
4366 }
4367 }
4368
4369 fn deserialize_buffer(
4370 &mut self,
4371 buffer: proto::Buffer,
4372 cx: &mut ModelContext<Self>,
4373 ) -> Task<Result<ModelHandle<Buffer>>> {
4374 let replica_id = self.replica_id();
4375
4376 let opened_buffer_tx = self.opened_buffer.0.clone();
4377 let mut opened_buffer_rx = self.opened_buffer.1.clone();
4378 cx.spawn(|this, mut cx| async move {
4379 match buffer.variant.ok_or_else(|| anyhow!("missing buffer"))? {
4380 proto::buffer::Variant::Id(id) => {
4381 let buffer = loop {
4382 let buffer = this.read_with(&cx, |this, cx| {
4383 this.opened_buffers
4384 .get(&id)
4385 .and_then(|buffer| buffer.upgrade(cx))
4386 });
4387 if let Some(buffer) = buffer {
4388 break buffer;
4389 }
4390 opened_buffer_rx
4391 .next()
4392 .await
4393 .ok_or_else(|| anyhow!("project dropped while waiting for buffer"))?;
4394 };
4395 Ok(buffer)
4396 }
4397 proto::buffer::Variant::State(mut buffer) => {
4398 let mut buffer_worktree = None;
4399 let mut buffer_file = None;
4400 if let Some(file) = buffer.file.take() {
4401 this.read_with(&cx, |this, cx| {
4402 let worktree_id = WorktreeId::from_proto(file.worktree_id);
4403 let worktree =
4404 this.worktree_for_id(worktree_id, cx).ok_or_else(|| {
4405 anyhow!("no worktree found for id {}", file.worktree_id)
4406 })?;
4407 buffer_file =
4408 Some(Box::new(File::from_proto(file, worktree.clone(), cx)?)
4409 as Box<dyn language::File>);
4410 buffer_worktree = Some(worktree);
4411 Ok::<_, anyhow::Error>(())
4412 })?;
4413 }
4414
4415 let buffer = cx.add_model(|cx| {
4416 Buffer::from_proto(replica_id, buffer, buffer_file, cx).unwrap()
4417 });
4418
4419 this.update(&mut cx, |this, cx| this.register_buffer(&buffer, cx))?;
4420
4421 *opened_buffer_tx.borrow_mut().borrow_mut() = ();
4422 Ok(buffer)
4423 }
4424 }
4425 })
4426 }
4427
4428 fn deserialize_symbol(&self, serialized_symbol: proto::Symbol) -> Result<Symbol> {
4429 let source_worktree_id = WorktreeId::from_proto(serialized_symbol.source_worktree_id);
4430 let worktree_id = WorktreeId::from_proto(serialized_symbol.worktree_id);
4431 let start = serialized_symbol
4432 .start
4433 .ok_or_else(|| anyhow!("invalid start"))?;
4434 let end = serialized_symbol
4435 .end
4436 .ok_or_else(|| anyhow!("invalid end"))?;
4437 let kind = unsafe { mem::transmute(serialized_symbol.kind) };
4438 let path = PathBuf::from(serialized_symbol.path);
4439 let language = self.languages.select_language(&path);
4440 Ok(Symbol {
4441 source_worktree_id,
4442 worktree_id,
4443 language_server_name: LanguageServerName(serialized_symbol.language_server_name.into()),
4444 label: language
4445 .and_then(|language| language.label_for_symbol(&serialized_symbol.name, kind))
4446 .unwrap_or_else(|| CodeLabel::plain(serialized_symbol.name.clone(), None)),
4447 name: serialized_symbol.name,
4448 path,
4449 range: PointUtf16::new(start.row, start.column)..PointUtf16::new(end.row, end.column),
4450 kind,
4451 signature: serialized_symbol
4452 .signature
4453 .try_into()
4454 .map_err(|_| anyhow!("invalid signature"))?,
4455 })
4456 }
4457
4458 async fn handle_buffer_saved(
4459 this: ModelHandle<Self>,
4460 envelope: TypedEnvelope<proto::BufferSaved>,
4461 _: Arc<Client>,
4462 mut cx: AsyncAppContext,
4463 ) -> Result<()> {
4464 let version = deserialize_version(envelope.payload.version);
4465 let mtime = envelope
4466 .payload
4467 .mtime
4468 .ok_or_else(|| anyhow!("missing mtime"))?
4469 .into();
4470
4471 this.update(&mut cx, |this, cx| {
4472 let buffer = this
4473 .opened_buffers
4474 .get(&envelope.payload.buffer_id)
4475 .and_then(|buffer| buffer.upgrade(cx));
4476 if let Some(buffer) = buffer {
4477 buffer.update(cx, |buffer, cx| {
4478 buffer.did_save(version, mtime, None, cx);
4479 });
4480 }
4481 Ok(())
4482 })
4483 }
4484
4485 async fn handle_buffer_reloaded(
4486 this: ModelHandle<Self>,
4487 envelope: TypedEnvelope<proto::BufferReloaded>,
4488 _: Arc<Client>,
4489 mut cx: AsyncAppContext,
4490 ) -> Result<()> {
4491 let payload = envelope.payload.clone();
4492 let version = deserialize_version(payload.version);
4493 let mtime = payload
4494 .mtime
4495 .ok_or_else(|| anyhow!("missing mtime"))?
4496 .into();
4497 this.update(&mut cx, |this, cx| {
4498 let buffer = this
4499 .opened_buffers
4500 .get(&payload.buffer_id)
4501 .and_then(|buffer| buffer.upgrade(cx));
4502 if let Some(buffer) = buffer {
4503 buffer.update(cx, |buffer, cx| {
4504 buffer.did_reload(version, mtime, cx);
4505 });
4506 }
4507 Ok(())
4508 })
4509 }
4510
4511 pub fn match_paths<'a>(
4512 &self,
4513 query: &'a str,
4514 include_ignored: bool,
4515 smart_case: bool,
4516 max_results: usize,
4517 cancel_flag: &'a AtomicBool,
4518 cx: &AppContext,
4519 ) -> impl 'a + Future<Output = Vec<PathMatch>> {
4520 let worktrees = self
4521 .worktrees(cx)
4522 .filter(|worktree| worktree.read(cx).is_visible())
4523 .collect::<Vec<_>>();
4524 let include_root_name = worktrees.len() > 1;
4525 let candidate_sets = worktrees
4526 .into_iter()
4527 .map(|worktree| CandidateSet {
4528 snapshot: worktree.read(cx).snapshot(),
4529 include_ignored,
4530 include_root_name,
4531 })
4532 .collect::<Vec<_>>();
4533
4534 let background = cx.background().clone();
4535 async move {
4536 fuzzy::match_paths(
4537 candidate_sets.as_slice(),
4538 query,
4539 smart_case,
4540 max_results,
4541 cancel_flag,
4542 background,
4543 )
4544 .await
4545 }
4546 }
4547
4548 fn edits_from_lsp(
4549 &mut self,
4550 buffer: &ModelHandle<Buffer>,
4551 lsp_edits: impl 'static + Send + IntoIterator<Item = lsp::TextEdit>,
4552 version: Option<i32>,
4553 cx: &mut ModelContext<Self>,
4554 ) -> Task<Result<Vec<(Range<Anchor>, String)>>> {
4555 let snapshot = self.buffer_snapshot_for_lsp_version(buffer, version, cx);
4556 cx.background().spawn(async move {
4557 let snapshot = snapshot?;
4558 let mut lsp_edits = lsp_edits
4559 .into_iter()
4560 .map(|edit| (range_from_lsp(edit.range), edit.new_text))
4561 .peekable();
4562
4563 let mut edits = Vec::new();
4564 while let Some((mut range, mut new_text)) = lsp_edits.next() {
4565 // Combine any LSP edits that are adjacent.
4566 //
4567 // Also, combine LSP edits that are separated from each other by only
4568 // a newline. This is important because for some code actions,
4569 // Rust-analyzer rewrites the entire buffer via a series of edits that
4570 // are separated by unchanged newline characters.
4571 //
4572 // In order for the diffing logic below to work properly, any edits that
4573 // cancel each other out must be combined into one.
4574 while let Some((next_range, next_text)) = lsp_edits.peek() {
4575 if next_range.start > range.end {
4576 if next_range.start.row > range.end.row + 1
4577 || next_range.start.column > 0
4578 || snapshot.clip_point_utf16(
4579 PointUtf16::new(range.end.row, u32::MAX),
4580 Bias::Left,
4581 ) > range.end
4582 {
4583 break;
4584 }
4585 new_text.push('\n');
4586 }
4587 range.end = next_range.end;
4588 new_text.push_str(&next_text);
4589 lsp_edits.next();
4590 }
4591
4592 if snapshot.clip_point_utf16(range.start, Bias::Left) != range.start
4593 || snapshot.clip_point_utf16(range.end, Bias::Left) != range.end
4594 {
4595 return Err(anyhow!("invalid edits received from language server"));
4596 }
4597
4598 // For multiline edits, perform a diff of the old and new text so that
4599 // we can identify the changes more precisely, preserving the locations
4600 // of any anchors positioned in the unchanged regions.
4601 if range.end.row > range.start.row {
4602 let mut offset = range.start.to_offset(&snapshot);
4603 let old_text = snapshot.text_for_range(range).collect::<String>();
4604
4605 let diff = TextDiff::from_lines(old_text.as_str(), &new_text);
4606 let mut moved_since_edit = true;
4607 for change in diff.iter_all_changes() {
4608 let tag = change.tag();
4609 let value = change.value();
4610 match tag {
4611 ChangeTag::Equal => {
4612 offset += value.len();
4613 moved_since_edit = true;
4614 }
4615 ChangeTag::Delete => {
4616 let start = snapshot.anchor_after(offset);
4617 let end = snapshot.anchor_before(offset + value.len());
4618 if moved_since_edit {
4619 edits.push((start..end, String::new()));
4620 } else {
4621 edits.last_mut().unwrap().0.end = end;
4622 }
4623 offset += value.len();
4624 moved_since_edit = false;
4625 }
4626 ChangeTag::Insert => {
4627 if moved_since_edit {
4628 let anchor = snapshot.anchor_after(offset);
4629 edits.push((anchor.clone()..anchor, value.to_string()));
4630 } else {
4631 edits.last_mut().unwrap().1.push_str(value);
4632 }
4633 moved_since_edit = false;
4634 }
4635 }
4636 }
4637 } else if range.end == range.start {
4638 let anchor = snapshot.anchor_after(range.start);
4639 edits.push((anchor.clone()..anchor, new_text));
4640 } else {
4641 let edit_start = snapshot.anchor_after(range.start);
4642 let edit_end = snapshot.anchor_before(range.end);
4643 edits.push((edit_start..edit_end, new_text));
4644 }
4645 }
4646
4647 Ok(edits)
4648 })
4649 }
4650
4651 fn buffer_snapshot_for_lsp_version(
4652 &mut self,
4653 buffer: &ModelHandle<Buffer>,
4654 version: Option<i32>,
4655 cx: &AppContext,
4656 ) -> Result<TextBufferSnapshot> {
4657 const OLD_VERSIONS_TO_RETAIN: i32 = 10;
4658
4659 if let Some(version) = version {
4660 let buffer_id = buffer.read(cx).remote_id();
4661 let snapshots = self
4662 .buffer_snapshots
4663 .get_mut(&buffer_id)
4664 .ok_or_else(|| anyhow!("no snapshot found for buffer {}", buffer_id))?;
4665 let mut found_snapshot = None;
4666 snapshots.retain(|(snapshot_version, snapshot)| {
4667 if snapshot_version + OLD_VERSIONS_TO_RETAIN < version {
4668 false
4669 } else {
4670 if *snapshot_version == version {
4671 found_snapshot = Some(snapshot.clone());
4672 }
4673 true
4674 }
4675 });
4676
4677 found_snapshot.ok_or_else(|| {
4678 anyhow!(
4679 "snapshot not found for buffer {} at version {}",
4680 buffer_id,
4681 version
4682 )
4683 })
4684 } else {
4685 Ok((buffer.read(cx)).text_snapshot())
4686 }
4687 }
4688
4689 fn language_server_for_buffer(
4690 &self,
4691 buffer: &Buffer,
4692 cx: &AppContext,
4693 ) -> Option<&(Arc<dyn LspAdapter>, Arc<LanguageServer>)> {
4694 if let Some((file, language)) = File::from_dyn(buffer.file()).zip(buffer.language()) {
4695 let worktree_id = file.worktree_id(cx);
4696 self.language_servers
4697 .get(&(worktree_id, language.lsp_adapter()?.name()))
4698 } else {
4699 None
4700 }
4701 }
4702}
4703
4704impl WorktreeHandle {
4705 pub fn upgrade(&self, cx: &AppContext) -> Option<ModelHandle<Worktree>> {
4706 match self {
4707 WorktreeHandle::Strong(handle) => Some(handle.clone()),
4708 WorktreeHandle::Weak(handle) => handle.upgrade(cx),
4709 }
4710 }
4711}
4712
4713impl OpenBuffer {
4714 pub fn upgrade(&self, cx: &impl UpgradeModelHandle) -> Option<ModelHandle<Buffer>> {
4715 match self {
4716 OpenBuffer::Strong(handle) => Some(handle.clone()),
4717 OpenBuffer::Weak(handle) => handle.upgrade(cx),
4718 OpenBuffer::Loading(_) => None,
4719 }
4720 }
4721}
4722
4723struct CandidateSet {
4724 snapshot: Snapshot,
4725 include_ignored: bool,
4726 include_root_name: bool,
4727}
4728
4729impl<'a> PathMatchCandidateSet<'a> for CandidateSet {
4730 type Candidates = CandidateSetIter<'a>;
4731
4732 fn id(&self) -> usize {
4733 self.snapshot.id().to_usize()
4734 }
4735
4736 fn len(&self) -> usize {
4737 if self.include_ignored {
4738 self.snapshot.file_count()
4739 } else {
4740 self.snapshot.visible_file_count()
4741 }
4742 }
4743
4744 fn prefix(&self) -> Arc<str> {
4745 if self.snapshot.root_entry().map_or(false, |e| e.is_file()) {
4746 self.snapshot.root_name().into()
4747 } else if self.include_root_name {
4748 format!("{}/", self.snapshot.root_name()).into()
4749 } else {
4750 "".into()
4751 }
4752 }
4753
4754 fn candidates(&'a self, start: usize) -> Self::Candidates {
4755 CandidateSetIter {
4756 traversal: self.snapshot.files(self.include_ignored, start),
4757 }
4758 }
4759}
4760
4761struct CandidateSetIter<'a> {
4762 traversal: Traversal<'a>,
4763}
4764
4765impl<'a> Iterator for CandidateSetIter<'a> {
4766 type Item = PathMatchCandidate<'a>;
4767
4768 fn next(&mut self) -> Option<Self::Item> {
4769 self.traversal.next().map(|entry| {
4770 if let EntryKind::File(char_bag) = entry.kind {
4771 PathMatchCandidate {
4772 path: &entry.path,
4773 char_bag,
4774 }
4775 } else {
4776 unreachable!()
4777 }
4778 })
4779 }
4780}
4781
4782impl Entity for Project {
4783 type Event = Event;
4784
4785 fn release(&mut self, _: &mut gpui::MutableAppContext) {
4786 match &self.client_state {
4787 ProjectClientState::Local { remote_id_rx, .. } => {
4788 if let Some(project_id) = *remote_id_rx.borrow() {
4789 self.client
4790 .send(proto::UnregisterProject { project_id })
4791 .log_err();
4792 }
4793 }
4794 ProjectClientState::Remote { remote_id, .. } => {
4795 self.client
4796 .send(proto::LeaveProject {
4797 project_id: *remote_id,
4798 })
4799 .log_err();
4800 }
4801 }
4802 }
4803
4804 fn app_will_quit(
4805 &mut self,
4806 _: &mut MutableAppContext,
4807 ) -> Option<std::pin::Pin<Box<dyn 'static + Future<Output = ()>>>> {
4808 let shutdown_futures = self
4809 .language_servers
4810 .drain()
4811 .filter_map(|(_, (_, server))| server.shutdown())
4812 .collect::<Vec<_>>();
4813 Some(
4814 async move {
4815 futures::future::join_all(shutdown_futures).await;
4816 }
4817 .boxed(),
4818 )
4819 }
4820}
4821
4822impl Collaborator {
4823 fn from_proto(
4824 message: proto::Collaborator,
4825 user_store: &ModelHandle<UserStore>,
4826 cx: &mut AsyncAppContext,
4827 ) -> impl Future<Output = Result<Self>> {
4828 let user = user_store.update(cx, |user_store, cx| {
4829 user_store.fetch_user(message.user_id, cx)
4830 });
4831
4832 async move {
4833 Ok(Self {
4834 peer_id: PeerId(message.peer_id),
4835 user: user.await?,
4836 replica_id: message.replica_id as ReplicaId,
4837 })
4838 }
4839 }
4840}
4841
4842impl<P: AsRef<Path>> From<(WorktreeId, P)> for ProjectPath {
4843 fn from((worktree_id, path): (WorktreeId, P)) -> Self {
4844 Self {
4845 worktree_id,
4846 path: path.as_ref().into(),
4847 }
4848 }
4849}
4850
4851impl From<lsp::CreateFileOptions> for fs::CreateOptions {
4852 fn from(options: lsp::CreateFileOptions) -> Self {
4853 Self {
4854 overwrite: options.overwrite.unwrap_or(false),
4855 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
4856 }
4857 }
4858}
4859
4860impl From<lsp::RenameFileOptions> for fs::RenameOptions {
4861 fn from(options: lsp::RenameFileOptions) -> Self {
4862 Self {
4863 overwrite: options.overwrite.unwrap_or(false),
4864 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
4865 }
4866 }
4867}
4868
4869impl From<lsp::DeleteFileOptions> for fs::RemoveOptions {
4870 fn from(options: lsp::DeleteFileOptions) -> Self {
4871 Self {
4872 recursive: options.recursive.unwrap_or(false),
4873 ignore_if_not_exists: options.ignore_if_not_exists.unwrap_or(false),
4874 }
4875 }
4876}
4877
4878fn serialize_symbol(symbol: &Symbol) -> proto::Symbol {
4879 proto::Symbol {
4880 source_worktree_id: symbol.source_worktree_id.to_proto(),
4881 worktree_id: symbol.worktree_id.to_proto(),
4882 language_server_name: symbol.language_server_name.0.to_string(),
4883 name: symbol.name.clone(),
4884 kind: unsafe { mem::transmute(symbol.kind) },
4885 path: symbol.path.to_string_lossy().to_string(),
4886 start: Some(proto::Point {
4887 row: symbol.range.start.row,
4888 column: symbol.range.start.column,
4889 }),
4890 end: Some(proto::Point {
4891 row: symbol.range.end.row,
4892 column: symbol.range.end.column,
4893 }),
4894 signature: symbol.signature.to_vec(),
4895 }
4896}
4897
4898fn relativize_path(base: &Path, path: &Path) -> PathBuf {
4899 let mut path_components = path.components();
4900 let mut base_components = base.components();
4901 let mut components: Vec<Component> = Vec::new();
4902 loop {
4903 match (path_components.next(), base_components.next()) {
4904 (None, None) => break,
4905 (Some(a), None) => {
4906 components.push(a);
4907 components.extend(path_components.by_ref());
4908 break;
4909 }
4910 (None, _) => components.push(Component::ParentDir),
4911 (Some(a), Some(b)) if components.is_empty() && a == b => (),
4912 (Some(a), Some(b)) if b == Component::CurDir => components.push(a),
4913 (Some(a), Some(_)) => {
4914 components.push(Component::ParentDir);
4915 for _ in base_components {
4916 components.push(Component::ParentDir);
4917 }
4918 components.push(a);
4919 components.extend(path_components.by_ref());
4920 break;
4921 }
4922 }
4923 }
4924 components.iter().map(|c| c.as_os_str()).collect()
4925}
4926
4927impl Item for Buffer {
4928 fn entry_id(&self, cx: &AppContext) -> Option<ProjectEntryId> {
4929 File::from_dyn(self.file()).and_then(|file| file.project_entry_id(cx))
4930 }
4931}
4932
4933#[cfg(test)]
4934mod tests {
4935 use crate::worktree::WorktreeHandle;
4936
4937 use super::{Event, *};
4938 use fs::RealFs;
4939 use futures::{future, StreamExt};
4940 use gpui::test::subscribe;
4941 use language::{
4942 tree_sitter_rust, tree_sitter_typescript, Diagnostic, FakeLspAdapter, LanguageConfig,
4943 OffsetRangeExt, Point, ToPoint,
4944 };
4945 use lsp::Url;
4946 use serde_json::json;
4947 use std::{cell::RefCell, os::unix, path::PathBuf, rc::Rc, task::Poll};
4948 use unindent::Unindent as _;
4949 use util::{assert_set_eq, test::temp_tree};
4950
4951 #[gpui::test]
4952 async fn test_populate_and_search(cx: &mut gpui::TestAppContext) {
4953 let dir = temp_tree(json!({
4954 "root": {
4955 "apple": "",
4956 "banana": {
4957 "carrot": {
4958 "date": "",
4959 "endive": "",
4960 }
4961 },
4962 "fennel": {
4963 "grape": "",
4964 }
4965 }
4966 }));
4967
4968 let root_link_path = dir.path().join("root_link");
4969 unix::fs::symlink(&dir.path().join("root"), &root_link_path).unwrap();
4970 unix::fs::symlink(
4971 &dir.path().join("root/fennel"),
4972 &dir.path().join("root/finnochio"),
4973 )
4974 .unwrap();
4975
4976 let project = Project::test(Arc::new(RealFs), [root_link_path], cx).await;
4977
4978 project.read_with(cx, |project, cx| {
4979 let tree = project.worktrees(cx).next().unwrap().read(cx);
4980 assert_eq!(tree.file_count(), 5);
4981 assert_eq!(
4982 tree.inode_for_path("fennel/grape"),
4983 tree.inode_for_path("finnochio/grape")
4984 );
4985 });
4986
4987 let cancel_flag = Default::default();
4988 let results = project
4989 .read_with(cx, |project, cx| {
4990 project.match_paths("bna", false, false, 10, &cancel_flag, cx)
4991 })
4992 .await;
4993 assert_eq!(
4994 results
4995 .into_iter()
4996 .map(|result| result.path)
4997 .collect::<Vec<Arc<Path>>>(),
4998 vec![
4999 PathBuf::from("banana/carrot/date").into(),
5000 PathBuf::from("banana/carrot/endive").into(),
5001 ]
5002 );
5003 }
5004
5005 #[gpui::test]
5006 async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
5007 cx.foreground().forbid_parking();
5008
5009 let mut rust_language = Language::new(
5010 LanguageConfig {
5011 name: "Rust".into(),
5012 path_suffixes: vec!["rs".to_string()],
5013 ..Default::default()
5014 },
5015 Some(tree_sitter_rust::language()),
5016 );
5017 let mut json_language = Language::new(
5018 LanguageConfig {
5019 name: "JSON".into(),
5020 path_suffixes: vec!["json".to_string()],
5021 ..Default::default()
5022 },
5023 None,
5024 );
5025 let mut fake_rust_servers = rust_language.set_fake_lsp_adapter(FakeLspAdapter {
5026 name: "the-rust-language-server",
5027 capabilities: lsp::ServerCapabilities {
5028 completion_provider: Some(lsp::CompletionOptions {
5029 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
5030 ..Default::default()
5031 }),
5032 ..Default::default()
5033 },
5034 ..Default::default()
5035 });
5036 let mut fake_json_servers = json_language.set_fake_lsp_adapter(FakeLspAdapter {
5037 name: "the-json-language-server",
5038 capabilities: lsp::ServerCapabilities {
5039 completion_provider: Some(lsp::CompletionOptions {
5040 trigger_characters: Some(vec![":".to_string()]),
5041 ..Default::default()
5042 }),
5043 ..Default::default()
5044 },
5045 ..Default::default()
5046 });
5047
5048 let fs = FakeFs::new(cx.background());
5049 fs.insert_tree(
5050 "/the-root",
5051 json!({
5052 "test.rs": "const A: i32 = 1;",
5053 "test2.rs": "",
5054 "Cargo.toml": "a = 1",
5055 "package.json": "{\"a\": 1}",
5056 }),
5057 )
5058 .await;
5059
5060 let project = Project::test(fs.clone(), ["/the-root"], cx).await;
5061 project.update(cx, |project, _| {
5062 project.languages.add(Arc::new(rust_language));
5063 project.languages.add(Arc::new(json_language));
5064 });
5065
5066 // Open a buffer without an associated language server.
5067 let toml_buffer = project
5068 .update(cx, |project, cx| {
5069 project.open_local_buffer("/the-root/Cargo.toml", cx)
5070 })
5071 .await
5072 .unwrap();
5073
5074 // Open a buffer with an associated language server.
5075 let rust_buffer = project
5076 .update(cx, |project, cx| {
5077 project.open_local_buffer("/the-root/test.rs", cx)
5078 })
5079 .await
5080 .unwrap();
5081
5082 // A server is started up, and it is notified about Rust files.
5083 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
5084 assert_eq!(
5085 fake_rust_server
5086 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5087 .await
5088 .text_document,
5089 lsp::TextDocumentItem {
5090 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
5091 version: 0,
5092 text: "const A: i32 = 1;".to_string(),
5093 language_id: Default::default()
5094 }
5095 );
5096
5097 // The buffer is configured based on the language server's capabilities.
5098 rust_buffer.read_with(cx, |buffer, _| {
5099 assert_eq!(
5100 buffer.completion_triggers(),
5101 &[".".to_string(), "::".to_string()]
5102 );
5103 });
5104 toml_buffer.read_with(cx, |buffer, _| {
5105 assert!(buffer.completion_triggers().is_empty());
5106 });
5107
5108 // Edit a buffer. The changes are reported to the language server.
5109 rust_buffer.update(cx, |buffer, cx| buffer.edit([(16..16, "2")], cx));
5110 assert_eq!(
5111 fake_rust_server
5112 .receive_notification::<lsp::notification::DidChangeTextDocument>()
5113 .await
5114 .text_document,
5115 lsp::VersionedTextDocumentIdentifier::new(
5116 lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
5117 1
5118 )
5119 );
5120
5121 // Open a third buffer with a different associated language server.
5122 let json_buffer = project
5123 .update(cx, |project, cx| {
5124 project.open_local_buffer("/the-root/package.json", cx)
5125 })
5126 .await
5127 .unwrap();
5128
5129 // A json language server is started up and is only notified about the json buffer.
5130 let mut fake_json_server = fake_json_servers.next().await.unwrap();
5131 assert_eq!(
5132 fake_json_server
5133 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5134 .await
5135 .text_document,
5136 lsp::TextDocumentItem {
5137 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
5138 version: 0,
5139 text: "{\"a\": 1}".to_string(),
5140 language_id: Default::default()
5141 }
5142 );
5143
5144 // This buffer is configured based on the second language server's
5145 // capabilities.
5146 json_buffer.read_with(cx, |buffer, _| {
5147 assert_eq!(buffer.completion_triggers(), &[":".to_string()]);
5148 });
5149
5150 // When opening another buffer whose language server is already running,
5151 // it is also configured based on the existing language server's capabilities.
5152 let rust_buffer2 = project
5153 .update(cx, |project, cx| {
5154 project.open_local_buffer("/the-root/test2.rs", cx)
5155 })
5156 .await
5157 .unwrap();
5158 rust_buffer2.read_with(cx, |buffer, _| {
5159 assert_eq!(
5160 buffer.completion_triggers(),
5161 &[".".to_string(), "::".to_string()]
5162 );
5163 });
5164
5165 // Changes are reported only to servers matching the buffer's language.
5166 toml_buffer.update(cx, |buffer, cx| buffer.edit([(5..5, "23")], cx));
5167 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "let x = 1;")], cx));
5168 assert_eq!(
5169 fake_rust_server
5170 .receive_notification::<lsp::notification::DidChangeTextDocument>()
5171 .await
5172 .text_document,
5173 lsp::VersionedTextDocumentIdentifier::new(
5174 lsp::Url::from_file_path("/the-root/test2.rs").unwrap(),
5175 1
5176 )
5177 );
5178
5179 // Save notifications are reported to all servers.
5180 toml_buffer
5181 .update(cx, |buffer, cx| buffer.save(cx))
5182 .await
5183 .unwrap();
5184 assert_eq!(
5185 fake_rust_server
5186 .receive_notification::<lsp::notification::DidSaveTextDocument>()
5187 .await
5188 .text_document,
5189 lsp::TextDocumentIdentifier::new(
5190 lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap()
5191 )
5192 );
5193 assert_eq!(
5194 fake_json_server
5195 .receive_notification::<lsp::notification::DidSaveTextDocument>()
5196 .await
5197 .text_document,
5198 lsp::TextDocumentIdentifier::new(
5199 lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap()
5200 )
5201 );
5202
5203 // Renames are reported only to servers matching the buffer's language.
5204 fs.rename(
5205 Path::new("/the-root/test2.rs"),
5206 Path::new("/the-root/test3.rs"),
5207 Default::default(),
5208 )
5209 .await
5210 .unwrap();
5211 assert_eq!(
5212 fake_rust_server
5213 .receive_notification::<lsp::notification::DidCloseTextDocument>()
5214 .await
5215 .text_document,
5216 lsp::TextDocumentIdentifier::new(
5217 lsp::Url::from_file_path("/the-root/test2.rs").unwrap()
5218 ),
5219 );
5220 assert_eq!(
5221 fake_rust_server
5222 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5223 .await
5224 .text_document,
5225 lsp::TextDocumentItem {
5226 uri: lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),
5227 version: 0,
5228 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
5229 language_id: Default::default()
5230 },
5231 );
5232
5233 rust_buffer2.update(cx, |buffer, cx| {
5234 buffer.update_diagnostics(
5235 DiagnosticSet::from_sorted_entries(
5236 vec![DiagnosticEntry {
5237 diagnostic: Default::default(),
5238 range: Anchor::MIN..Anchor::MAX,
5239 }],
5240 &buffer.snapshot(),
5241 ),
5242 cx,
5243 );
5244 assert_eq!(
5245 buffer
5246 .snapshot()
5247 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
5248 .count(),
5249 1
5250 );
5251 });
5252
5253 // When the rename changes the extension of the file, the buffer gets closed on the old
5254 // language server and gets opened on the new one.
5255 fs.rename(
5256 Path::new("/the-root/test3.rs"),
5257 Path::new("/the-root/test3.json"),
5258 Default::default(),
5259 )
5260 .await
5261 .unwrap();
5262 assert_eq!(
5263 fake_rust_server
5264 .receive_notification::<lsp::notification::DidCloseTextDocument>()
5265 .await
5266 .text_document,
5267 lsp::TextDocumentIdentifier::new(
5268 lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),
5269 ),
5270 );
5271 assert_eq!(
5272 fake_json_server
5273 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5274 .await
5275 .text_document,
5276 lsp::TextDocumentItem {
5277 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
5278 version: 0,
5279 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
5280 language_id: Default::default()
5281 },
5282 );
5283 // We clear the diagnostics, since the language has changed.
5284 rust_buffer2.read_with(cx, |buffer, _| {
5285 assert_eq!(
5286 buffer
5287 .snapshot()
5288 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
5289 .count(),
5290 0
5291 );
5292 });
5293
5294 // The renamed file's version resets after changing language server.
5295 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "// ")], cx));
5296 assert_eq!(
5297 fake_json_server
5298 .receive_notification::<lsp::notification::DidChangeTextDocument>()
5299 .await
5300 .text_document,
5301 lsp::VersionedTextDocumentIdentifier::new(
5302 lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
5303 1
5304 )
5305 );
5306
5307 // Restart language servers
5308 project.update(cx, |project, cx| {
5309 project.restart_language_servers_for_buffers(
5310 vec![rust_buffer.clone(), json_buffer.clone()],
5311 cx,
5312 );
5313 });
5314
5315 let mut rust_shutdown_requests = fake_rust_server
5316 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
5317 let mut json_shutdown_requests = fake_json_server
5318 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
5319 futures::join!(rust_shutdown_requests.next(), json_shutdown_requests.next());
5320
5321 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
5322 let mut fake_json_server = fake_json_servers.next().await.unwrap();
5323
5324 // Ensure rust document is reopened in new rust language server
5325 assert_eq!(
5326 fake_rust_server
5327 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5328 .await
5329 .text_document,
5330 lsp::TextDocumentItem {
5331 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
5332 version: 1,
5333 text: rust_buffer.read_with(cx, |buffer, _| buffer.text()),
5334 language_id: Default::default()
5335 }
5336 );
5337
5338 // Ensure json documents are reopened in new json language server
5339 assert_set_eq!(
5340 [
5341 fake_json_server
5342 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5343 .await
5344 .text_document,
5345 fake_json_server
5346 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5347 .await
5348 .text_document,
5349 ],
5350 [
5351 lsp::TextDocumentItem {
5352 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
5353 version: 0,
5354 text: json_buffer.read_with(cx, |buffer, _| buffer.text()),
5355 language_id: Default::default()
5356 },
5357 lsp::TextDocumentItem {
5358 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
5359 version: 1,
5360 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
5361 language_id: Default::default()
5362 }
5363 ]
5364 );
5365
5366 // Close notifications are reported only to servers matching the buffer's language.
5367 cx.update(|_| drop(json_buffer));
5368 let close_message = lsp::DidCloseTextDocumentParams {
5369 text_document: lsp::TextDocumentIdentifier::new(
5370 lsp::Url::from_file_path("/the-root/package.json").unwrap(),
5371 ),
5372 };
5373 assert_eq!(
5374 fake_json_server
5375 .receive_notification::<lsp::notification::DidCloseTextDocument>()
5376 .await,
5377 close_message,
5378 );
5379 }
5380
5381 #[gpui::test]
5382 async fn test_single_file_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
5383 cx.foreground().forbid_parking();
5384
5385 let fs = FakeFs::new(cx.background());
5386 fs.insert_tree(
5387 "/dir",
5388 json!({
5389 "a.rs": "let a = 1;",
5390 "b.rs": "let b = 2;"
5391 }),
5392 )
5393 .await;
5394
5395 let project = Project::test(fs, ["/dir/a.rs", "/dir/b.rs"], cx).await;
5396
5397 let buffer_a = project
5398 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
5399 .await
5400 .unwrap();
5401 let buffer_b = project
5402 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
5403 .await
5404 .unwrap();
5405
5406 project.update(cx, |project, cx| {
5407 project
5408 .update_diagnostics(
5409 lsp::PublishDiagnosticsParams {
5410 uri: Url::from_file_path("/dir/a.rs").unwrap(),
5411 version: None,
5412 diagnostics: vec![lsp::Diagnostic {
5413 range: lsp::Range::new(
5414 lsp::Position::new(0, 4),
5415 lsp::Position::new(0, 5),
5416 ),
5417 severity: Some(lsp::DiagnosticSeverity::ERROR),
5418 message: "error 1".to_string(),
5419 ..Default::default()
5420 }],
5421 },
5422 &[],
5423 cx,
5424 )
5425 .unwrap();
5426 project
5427 .update_diagnostics(
5428 lsp::PublishDiagnosticsParams {
5429 uri: Url::from_file_path("/dir/b.rs").unwrap(),
5430 version: None,
5431 diagnostics: vec![lsp::Diagnostic {
5432 range: lsp::Range::new(
5433 lsp::Position::new(0, 4),
5434 lsp::Position::new(0, 5),
5435 ),
5436 severity: Some(lsp::DiagnosticSeverity::WARNING),
5437 message: "error 2".to_string(),
5438 ..Default::default()
5439 }],
5440 },
5441 &[],
5442 cx,
5443 )
5444 .unwrap();
5445 });
5446
5447 buffer_a.read_with(cx, |buffer, _| {
5448 let chunks = chunks_with_diagnostics(&buffer, 0..buffer.len());
5449 assert_eq!(
5450 chunks
5451 .iter()
5452 .map(|(s, d)| (s.as_str(), *d))
5453 .collect::<Vec<_>>(),
5454 &[
5455 ("let ", None),
5456 ("a", Some(DiagnosticSeverity::ERROR)),
5457 (" = 1;", None),
5458 ]
5459 );
5460 });
5461 buffer_b.read_with(cx, |buffer, _| {
5462 let chunks = chunks_with_diagnostics(&buffer, 0..buffer.len());
5463 assert_eq!(
5464 chunks
5465 .iter()
5466 .map(|(s, d)| (s.as_str(), *d))
5467 .collect::<Vec<_>>(),
5468 &[
5469 ("let ", None),
5470 ("b", Some(DiagnosticSeverity::WARNING)),
5471 (" = 2;", None),
5472 ]
5473 );
5474 });
5475 }
5476
5477 #[gpui::test]
5478 async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
5479 cx.foreground().forbid_parking();
5480
5481 let progress_token = "the-progress-token";
5482 let mut language = Language::new(
5483 LanguageConfig {
5484 name: "Rust".into(),
5485 path_suffixes: vec!["rs".to_string()],
5486 ..Default::default()
5487 },
5488 Some(tree_sitter_rust::language()),
5489 );
5490 let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
5491 disk_based_diagnostics_progress_token: Some(progress_token),
5492 disk_based_diagnostics_sources: &["disk"],
5493 ..Default::default()
5494 });
5495
5496 let fs = FakeFs::new(cx.background());
5497 fs.insert_tree(
5498 "/dir",
5499 json!({
5500 "a.rs": "fn a() { A }",
5501 "b.rs": "const y: i32 = 1",
5502 }),
5503 )
5504 .await;
5505
5506 let project = Project::test(fs, ["/dir"], cx).await;
5507 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
5508 let worktree_id =
5509 project.read_with(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
5510
5511 // Cause worktree to start the fake language server
5512 let _buffer = project
5513 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
5514 .await
5515 .unwrap();
5516
5517 let mut events = subscribe(&project, cx);
5518
5519 let mut fake_server = fake_servers.next().await.unwrap();
5520 fake_server.start_progress(progress_token).await;
5521 assert_eq!(
5522 events.next().await.unwrap(),
5523 Event::DiskBasedDiagnosticsStarted
5524 );
5525
5526 fake_server.start_progress(progress_token).await;
5527 fake_server.end_progress(progress_token).await;
5528 fake_server.start_progress(progress_token).await;
5529
5530 fake_server.notify::<lsp::notification::PublishDiagnostics>(
5531 lsp::PublishDiagnosticsParams {
5532 uri: Url::from_file_path("/dir/a.rs").unwrap(),
5533 version: None,
5534 diagnostics: vec![lsp::Diagnostic {
5535 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
5536 severity: Some(lsp::DiagnosticSeverity::ERROR),
5537 message: "undefined variable 'A'".to_string(),
5538 ..Default::default()
5539 }],
5540 },
5541 );
5542 assert_eq!(
5543 events.next().await.unwrap(),
5544 Event::DiagnosticsUpdated((worktree_id, Path::new("a.rs")).into())
5545 );
5546
5547 fake_server.end_progress(progress_token).await;
5548 fake_server.end_progress(progress_token).await;
5549 assert_eq!(
5550 events.next().await.unwrap(),
5551 Event::DiskBasedDiagnosticsUpdated
5552 );
5553 assert_eq!(
5554 events.next().await.unwrap(),
5555 Event::DiskBasedDiagnosticsFinished
5556 );
5557
5558 let buffer = project
5559 .update(cx, |p, cx| p.open_local_buffer("/dir/a.rs", cx))
5560 .await
5561 .unwrap();
5562
5563 buffer.read_with(cx, |buffer, _| {
5564 let snapshot = buffer.snapshot();
5565 let diagnostics = snapshot
5566 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
5567 .collect::<Vec<_>>();
5568 assert_eq!(
5569 diagnostics,
5570 &[DiagnosticEntry {
5571 range: Point::new(0, 9)..Point::new(0, 10),
5572 diagnostic: Diagnostic {
5573 severity: lsp::DiagnosticSeverity::ERROR,
5574 message: "undefined variable 'A'".to_string(),
5575 group_id: 0,
5576 is_primary: true,
5577 ..Default::default()
5578 }
5579 }]
5580 )
5581 });
5582
5583 // Ensure publishing empty diagnostics twice only results in one update event.
5584 fake_server.notify::<lsp::notification::PublishDiagnostics>(
5585 lsp::PublishDiagnosticsParams {
5586 uri: Url::from_file_path("/dir/a.rs").unwrap(),
5587 version: None,
5588 diagnostics: Default::default(),
5589 },
5590 );
5591 assert_eq!(
5592 events.next().await.unwrap(),
5593 Event::DiagnosticsUpdated((worktree_id, Path::new("a.rs")).into())
5594 );
5595
5596 fake_server.notify::<lsp::notification::PublishDiagnostics>(
5597 lsp::PublishDiagnosticsParams {
5598 uri: Url::from_file_path("/dir/a.rs").unwrap(),
5599 version: None,
5600 diagnostics: Default::default(),
5601 },
5602 );
5603 cx.foreground().run_until_parked();
5604 assert_eq!(futures::poll!(events.next()), Poll::Pending);
5605 }
5606
5607 #[gpui::test]
5608 async fn test_restarting_server_with_diagnostics_running(cx: &mut gpui::TestAppContext) {
5609 cx.foreground().forbid_parking();
5610
5611 let progress_token = "the-progress-token";
5612 let mut language = Language::new(
5613 LanguageConfig {
5614 path_suffixes: vec!["rs".to_string()],
5615 ..Default::default()
5616 },
5617 None,
5618 );
5619 let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
5620 disk_based_diagnostics_sources: &["disk"],
5621 disk_based_diagnostics_progress_token: Some(progress_token),
5622 ..Default::default()
5623 });
5624
5625 let fs = FakeFs::new(cx.background());
5626 fs.insert_tree("/dir", json!({ "a.rs": "" })).await;
5627
5628 let project = Project::test(fs, ["/dir"], cx).await;
5629 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
5630
5631 let buffer = project
5632 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
5633 .await
5634 .unwrap();
5635
5636 // Simulate diagnostics starting to update.
5637 let mut fake_server = fake_servers.next().await.unwrap();
5638 fake_server.start_progress(progress_token).await;
5639
5640 // Restart the server before the diagnostics finish updating.
5641 project.update(cx, |project, cx| {
5642 project.restart_language_servers_for_buffers([buffer], cx);
5643 });
5644 let mut events = subscribe(&project, cx);
5645
5646 // Simulate the newly started server sending more diagnostics.
5647 let mut fake_server = fake_servers.next().await.unwrap();
5648 fake_server.start_progress(progress_token).await;
5649 assert_eq!(
5650 events.next().await.unwrap(),
5651 Event::DiskBasedDiagnosticsStarted
5652 );
5653
5654 // All diagnostics are considered done, despite the old server's diagnostic
5655 // task never completing.
5656 fake_server.end_progress(progress_token).await;
5657 assert_eq!(
5658 events.next().await.unwrap(),
5659 Event::DiskBasedDiagnosticsUpdated
5660 );
5661 assert_eq!(
5662 events.next().await.unwrap(),
5663 Event::DiskBasedDiagnosticsFinished
5664 );
5665 project.read_with(cx, |project, _| {
5666 assert!(!project.is_running_disk_based_diagnostics());
5667 });
5668 }
5669
5670 #[gpui::test]
5671 async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
5672 cx.foreground().forbid_parking();
5673
5674 let mut language = Language::new(
5675 LanguageConfig {
5676 name: "Rust".into(),
5677 path_suffixes: vec!["rs".to_string()],
5678 ..Default::default()
5679 },
5680 Some(tree_sitter_rust::language()),
5681 );
5682 let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
5683 disk_based_diagnostics_sources: &["disk"],
5684 ..Default::default()
5685 });
5686
5687 let text = "
5688 fn a() { A }
5689 fn b() { BB }
5690 fn c() { CCC }
5691 "
5692 .unindent();
5693
5694 let fs = FakeFs::new(cx.background());
5695 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
5696
5697 let project = Project::test(fs, ["/dir"], cx).await;
5698 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
5699
5700 let buffer = project
5701 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
5702 .await
5703 .unwrap();
5704
5705 let mut fake_server = fake_servers.next().await.unwrap();
5706 let open_notification = fake_server
5707 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5708 .await;
5709
5710 // Edit the buffer, moving the content down
5711 buffer.update(cx, |buffer, cx| buffer.edit([(0..0, "\n\n")], cx));
5712 let change_notification_1 = fake_server
5713 .receive_notification::<lsp::notification::DidChangeTextDocument>()
5714 .await;
5715 assert!(
5716 change_notification_1.text_document.version > open_notification.text_document.version
5717 );
5718
5719 // Report some diagnostics for the initial version of the buffer
5720 fake_server.notify::<lsp::notification::PublishDiagnostics>(
5721 lsp::PublishDiagnosticsParams {
5722 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
5723 version: Some(open_notification.text_document.version),
5724 diagnostics: vec![
5725 lsp::Diagnostic {
5726 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
5727 severity: Some(DiagnosticSeverity::ERROR),
5728 message: "undefined variable 'A'".to_string(),
5729 source: Some("disk".to_string()),
5730 ..Default::default()
5731 },
5732 lsp::Diagnostic {
5733 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
5734 severity: Some(DiagnosticSeverity::ERROR),
5735 message: "undefined variable 'BB'".to_string(),
5736 source: Some("disk".to_string()),
5737 ..Default::default()
5738 },
5739 lsp::Diagnostic {
5740 range: lsp::Range::new(lsp::Position::new(2, 9), lsp::Position::new(2, 12)),
5741 severity: Some(DiagnosticSeverity::ERROR),
5742 source: Some("disk".to_string()),
5743 message: "undefined variable 'CCC'".to_string(),
5744 ..Default::default()
5745 },
5746 ],
5747 },
5748 );
5749
5750 // The diagnostics have moved down since they were created.
5751 buffer.next_notification(cx).await;
5752 buffer.read_with(cx, |buffer, _| {
5753 assert_eq!(
5754 buffer
5755 .snapshot()
5756 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0), false)
5757 .collect::<Vec<_>>(),
5758 &[
5759 DiagnosticEntry {
5760 range: Point::new(3, 9)..Point::new(3, 11),
5761 diagnostic: Diagnostic {
5762 severity: DiagnosticSeverity::ERROR,
5763 message: "undefined variable 'BB'".to_string(),
5764 is_disk_based: true,
5765 group_id: 1,
5766 is_primary: true,
5767 ..Default::default()
5768 },
5769 },
5770 DiagnosticEntry {
5771 range: Point::new(4, 9)..Point::new(4, 12),
5772 diagnostic: Diagnostic {
5773 severity: DiagnosticSeverity::ERROR,
5774 message: "undefined variable 'CCC'".to_string(),
5775 is_disk_based: true,
5776 group_id: 2,
5777 is_primary: true,
5778 ..Default::default()
5779 }
5780 }
5781 ]
5782 );
5783 assert_eq!(
5784 chunks_with_diagnostics(buffer, 0..buffer.len()),
5785 [
5786 ("\n\nfn a() { ".to_string(), None),
5787 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
5788 (" }\nfn b() { ".to_string(), None),
5789 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
5790 (" }\nfn c() { ".to_string(), None),
5791 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
5792 (" }\n".to_string(), None),
5793 ]
5794 );
5795 assert_eq!(
5796 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
5797 [
5798 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
5799 (" }\nfn c() { ".to_string(), None),
5800 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
5801 ]
5802 );
5803 });
5804
5805 // Ensure overlapping diagnostics are highlighted correctly.
5806 fake_server.notify::<lsp::notification::PublishDiagnostics>(
5807 lsp::PublishDiagnosticsParams {
5808 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
5809 version: Some(open_notification.text_document.version),
5810 diagnostics: vec![
5811 lsp::Diagnostic {
5812 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
5813 severity: Some(DiagnosticSeverity::ERROR),
5814 message: "undefined variable 'A'".to_string(),
5815 source: Some("disk".to_string()),
5816 ..Default::default()
5817 },
5818 lsp::Diagnostic {
5819 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 12)),
5820 severity: Some(DiagnosticSeverity::WARNING),
5821 message: "unreachable statement".to_string(),
5822 source: Some("disk".to_string()),
5823 ..Default::default()
5824 },
5825 ],
5826 },
5827 );
5828
5829 buffer.next_notification(cx).await;
5830 buffer.read_with(cx, |buffer, _| {
5831 assert_eq!(
5832 buffer
5833 .snapshot()
5834 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0), false)
5835 .collect::<Vec<_>>(),
5836 &[
5837 DiagnosticEntry {
5838 range: Point::new(2, 9)..Point::new(2, 12),
5839 diagnostic: Diagnostic {
5840 severity: DiagnosticSeverity::WARNING,
5841 message: "unreachable statement".to_string(),
5842 is_disk_based: true,
5843 group_id: 1,
5844 is_primary: true,
5845 ..Default::default()
5846 }
5847 },
5848 DiagnosticEntry {
5849 range: Point::new(2, 9)..Point::new(2, 10),
5850 diagnostic: Diagnostic {
5851 severity: DiagnosticSeverity::ERROR,
5852 message: "undefined variable 'A'".to_string(),
5853 is_disk_based: true,
5854 group_id: 0,
5855 is_primary: true,
5856 ..Default::default()
5857 },
5858 }
5859 ]
5860 );
5861 assert_eq!(
5862 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
5863 [
5864 ("fn a() { ".to_string(), None),
5865 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
5866 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
5867 ("\n".to_string(), None),
5868 ]
5869 );
5870 assert_eq!(
5871 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
5872 [
5873 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
5874 ("\n".to_string(), None),
5875 ]
5876 );
5877 });
5878
5879 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
5880 // changes since the last save.
5881 buffer.update(cx, |buffer, cx| {
5882 buffer.edit([(Point::new(2, 0)..Point::new(2, 0), " ")], cx);
5883 buffer.edit([(Point::new(2, 8)..Point::new(2, 10), "(x: usize)")], cx);
5884 buffer.edit([(Point::new(3, 10)..Point::new(3, 10), "xxx")], cx);
5885 });
5886 let change_notification_2 = fake_server
5887 .receive_notification::<lsp::notification::DidChangeTextDocument>()
5888 .await;
5889 assert!(
5890 change_notification_2.text_document.version
5891 > change_notification_1.text_document.version
5892 );
5893
5894 // Handle out-of-order diagnostics
5895 fake_server.notify::<lsp::notification::PublishDiagnostics>(
5896 lsp::PublishDiagnosticsParams {
5897 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
5898 version: Some(change_notification_2.text_document.version),
5899 diagnostics: vec![
5900 lsp::Diagnostic {
5901 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
5902 severity: Some(DiagnosticSeverity::ERROR),
5903 message: "undefined variable 'BB'".to_string(),
5904 source: Some("disk".to_string()),
5905 ..Default::default()
5906 },
5907 lsp::Diagnostic {
5908 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
5909 severity: Some(DiagnosticSeverity::WARNING),
5910 message: "undefined variable 'A'".to_string(),
5911 source: Some("disk".to_string()),
5912 ..Default::default()
5913 },
5914 ],
5915 },
5916 );
5917
5918 buffer.next_notification(cx).await;
5919 buffer.read_with(cx, |buffer, _| {
5920 assert_eq!(
5921 buffer
5922 .snapshot()
5923 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
5924 .collect::<Vec<_>>(),
5925 &[
5926 DiagnosticEntry {
5927 range: Point::new(2, 21)..Point::new(2, 22),
5928 diagnostic: Diagnostic {
5929 severity: DiagnosticSeverity::WARNING,
5930 message: "undefined variable 'A'".to_string(),
5931 is_disk_based: true,
5932 group_id: 1,
5933 is_primary: true,
5934 ..Default::default()
5935 }
5936 },
5937 DiagnosticEntry {
5938 range: Point::new(3, 9)..Point::new(3, 14),
5939 diagnostic: Diagnostic {
5940 severity: DiagnosticSeverity::ERROR,
5941 message: "undefined variable 'BB'".to_string(),
5942 is_disk_based: true,
5943 group_id: 0,
5944 is_primary: true,
5945 ..Default::default()
5946 },
5947 }
5948 ]
5949 );
5950 });
5951 }
5952
5953 #[gpui::test]
5954 async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
5955 cx.foreground().forbid_parking();
5956
5957 let text = concat!(
5958 "let one = ;\n", //
5959 "let two = \n",
5960 "let three = 3;\n",
5961 );
5962
5963 let fs = FakeFs::new(cx.background());
5964 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
5965
5966 let project = Project::test(fs, ["/dir"], cx).await;
5967 let buffer = project
5968 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
5969 .await
5970 .unwrap();
5971
5972 project.update(cx, |project, cx| {
5973 project
5974 .update_buffer_diagnostics(
5975 &buffer,
5976 vec![
5977 DiagnosticEntry {
5978 range: PointUtf16::new(0, 10)..PointUtf16::new(0, 10),
5979 diagnostic: Diagnostic {
5980 severity: DiagnosticSeverity::ERROR,
5981 message: "syntax error 1".to_string(),
5982 ..Default::default()
5983 },
5984 },
5985 DiagnosticEntry {
5986 range: PointUtf16::new(1, 10)..PointUtf16::new(1, 10),
5987 diagnostic: Diagnostic {
5988 severity: DiagnosticSeverity::ERROR,
5989 message: "syntax error 2".to_string(),
5990 ..Default::default()
5991 },
5992 },
5993 ],
5994 None,
5995 cx,
5996 )
5997 .unwrap();
5998 });
5999
6000 // An empty range is extended forward to include the following character.
6001 // At the end of a line, an empty range is extended backward to include
6002 // the preceding character.
6003 buffer.read_with(cx, |buffer, _| {
6004 let chunks = chunks_with_diagnostics(&buffer, 0..buffer.len());
6005 assert_eq!(
6006 chunks
6007 .iter()
6008 .map(|(s, d)| (s.as_str(), *d))
6009 .collect::<Vec<_>>(),
6010 &[
6011 ("let one = ", None),
6012 (";", Some(DiagnosticSeverity::ERROR)),
6013 ("\nlet two =", None),
6014 (" ", Some(DiagnosticSeverity::ERROR)),
6015 ("\nlet three = 3;\n", None)
6016 ]
6017 );
6018 });
6019 }
6020
6021 #[gpui::test]
6022 async fn test_edits_from_lsp_with_past_version(cx: &mut gpui::TestAppContext) {
6023 cx.foreground().forbid_parking();
6024
6025 let mut language = Language::new(
6026 LanguageConfig {
6027 name: "Rust".into(),
6028 path_suffixes: vec!["rs".to_string()],
6029 ..Default::default()
6030 },
6031 Some(tree_sitter_rust::language()),
6032 );
6033 let mut fake_servers = language.set_fake_lsp_adapter(Default::default());
6034
6035 let text = "
6036 fn a() {
6037 f1();
6038 }
6039 fn b() {
6040 f2();
6041 }
6042 fn c() {
6043 f3();
6044 }
6045 "
6046 .unindent();
6047
6048 let fs = FakeFs::new(cx.background());
6049 fs.insert_tree(
6050 "/dir",
6051 json!({
6052 "a.rs": text.clone(),
6053 }),
6054 )
6055 .await;
6056
6057 let project = Project::test(fs, ["/dir"], cx).await;
6058 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
6059 let buffer = project
6060 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
6061 .await
6062 .unwrap();
6063
6064 let mut fake_server = fake_servers.next().await.unwrap();
6065 let lsp_document_version = fake_server
6066 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6067 .await
6068 .text_document
6069 .version;
6070
6071 // Simulate editing the buffer after the language server computes some edits.
6072 buffer.update(cx, |buffer, cx| {
6073 buffer.edit(
6074 [(
6075 Point::new(0, 0)..Point::new(0, 0),
6076 "// above first function\n",
6077 )],
6078 cx,
6079 );
6080 buffer.edit(
6081 [(
6082 Point::new(2, 0)..Point::new(2, 0),
6083 " // inside first function\n",
6084 )],
6085 cx,
6086 );
6087 buffer.edit(
6088 [(
6089 Point::new(6, 4)..Point::new(6, 4),
6090 "// inside second function ",
6091 )],
6092 cx,
6093 );
6094
6095 assert_eq!(
6096 buffer.text(),
6097 "
6098 // above first function
6099 fn a() {
6100 // inside first function
6101 f1();
6102 }
6103 fn b() {
6104 // inside second function f2();
6105 }
6106 fn c() {
6107 f3();
6108 }
6109 "
6110 .unindent()
6111 );
6112 });
6113
6114 let edits = project
6115 .update(cx, |project, cx| {
6116 project.edits_from_lsp(
6117 &buffer,
6118 vec![
6119 // replace body of first function
6120 lsp::TextEdit {
6121 range: lsp::Range::new(
6122 lsp::Position::new(0, 0),
6123 lsp::Position::new(3, 0),
6124 ),
6125 new_text: "
6126 fn a() {
6127 f10();
6128 }
6129 "
6130 .unindent(),
6131 },
6132 // edit inside second function
6133 lsp::TextEdit {
6134 range: lsp::Range::new(
6135 lsp::Position::new(4, 6),
6136 lsp::Position::new(4, 6),
6137 ),
6138 new_text: "00".into(),
6139 },
6140 // edit inside third function via two distinct edits
6141 lsp::TextEdit {
6142 range: lsp::Range::new(
6143 lsp::Position::new(7, 5),
6144 lsp::Position::new(7, 5),
6145 ),
6146 new_text: "4000".into(),
6147 },
6148 lsp::TextEdit {
6149 range: lsp::Range::new(
6150 lsp::Position::new(7, 5),
6151 lsp::Position::new(7, 6),
6152 ),
6153 new_text: "".into(),
6154 },
6155 ],
6156 Some(lsp_document_version),
6157 cx,
6158 )
6159 })
6160 .await
6161 .unwrap();
6162
6163 buffer.update(cx, |buffer, cx| {
6164 for (range, new_text) in edits {
6165 buffer.edit([(range, new_text)], cx);
6166 }
6167 assert_eq!(
6168 buffer.text(),
6169 "
6170 // above first function
6171 fn a() {
6172 // inside first function
6173 f10();
6174 }
6175 fn b() {
6176 // inside second function f200();
6177 }
6178 fn c() {
6179 f4000();
6180 }
6181 "
6182 .unindent()
6183 );
6184 });
6185 }
6186
6187 #[gpui::test]
6188 async fn test_edits_from_lsp_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
6189 cx.foreground().forbid_parking();
6190
6191 let text = "
6192 use a::b;
6193 use a::c;
6194
6195 fn f() {
6196 b();
6197 c();
6198 }
6199 "
6200 .unindent();
6201
6202 let fs = FakeFs::new(cx.background());
6203 fs.insert_tree(
6204 "/dir",
6205 json!({
6206 "a.rs": text.clone(),
6207 }),
6208 )
6209 .await;
6210
6211 let project = Project::test(fs, ["/dir"], cx).await;
6212 let buffer = project
6213 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
6214 .await
6215 .unwrap();
6216
6217 // Simulate the language server sending us a small edit in the form of a very large diff.
6218 // Rust-analyzer does this when performing a merge-imports code action.
6219 let edits = project
6220 .update(cx, |project, cx| {
6221 project.edits_from_lsp(
6222 &buffer,
6223 [
6224 // Replace the first use statement without editing the semicolon.
6225 lsp::TextEdit {
6226 range: lsp::Range::new(
6227 lsp::Position::new(0, 4),
6228 lsp::Position::new(0, 8),
6229 ),
6230 new_text: "a::{b, c}".into(),
6231 },
6232 // Reinsert the remainder of the file between the semicolon and the final
6233 // newline of the file.
6234 lsp::TextEdit {
6235 range: lsp::Range::new(
6236 lsp::Position::new(0, 9),
6237 lsp::Position::new(0, 9),
6238 ),
6239 new_text: "\n\n".into(),
6240 },
6241 lsp::TextEdit {
6242 range: lsp::Range::new(
6243 lsp::Position::new(0, 9),
6244 lsp::Position::new(0, 9),
6245 ),
6246 new_text: "
6247 fn f() {
6248 b();
6249 c();
6250 }"
6251 .unindent(),
6252 },
6253 // Delete everything after the first newline of the file.
6254 lsp::TextEdit {
6255 range: lsp::Range::new(
6256 lsp::Position::new(1, 0),
6257 lsp::Position::new(7, 0),
6258 ),
6259 new_text: "".into(),
6260 },
6261 ],
6262 None,
6263 cx,
6264 )
6265 })
6266 .await
6267 .unwrap();
6268
6269 buffer.update(cx, |buffer, cx| {
6270 let edits = edits
6271 .into_iter()
6272 .map(|(range, text)| {
6273 (
6274 range.start.to_point(&buffer)..range.end.to_point(&buffer),
6275 text,
6276 )
6277 })
6278 .collect::<Vec<_>>();
6279
6280 assert_eq!(
6281 edits,
6282 [
6283 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
6284 (Point::new(1, 0)..Point::new(2, 0), "".into())
6285 ]
6286 );
6287
6288 for (range, new_text) in edits {
6289 buffer.edit([(range, new_text)], cx);
6290 }
6291 assert_eq!(
6292 buffer.text(),
6293 "
6294 use a::{b, c};
6295
6296 fn f() {
6297 b();
6298 c();
6299 }
6300 "
6301 .unindent()
6302 );
6303 });
6304 }
6305
6306 fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
6307 buffer: &Buffer,
6308 range: Range<T>,
6309 ) -> Vec<(String, Option<DiagnosticSeverity>)> {
6310 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
6311 for chunk in buffer.snapshot().chunks(range, true) {
6312 if chunks.last().map_or(false, |prev_chunk| {
6313 prev_chunk.1 == chunk.diagnostic_severity
6314 }) {
6315 chunks.last_mut().unwrap().0.push_str(chunk.text);
6316 } else {
6317 chunks.push((chunk.text.to_string(), chunk.diagnostic_severity));
6318 }
6319 }
6320 chunks
6321 }
6322
6323 #[gpui::test]
6324 async fn test_search_worktree_without_files(cx: &mut gpui::TestAppContext) {
6325 let dir = temp_tree(json!({
6326 "root": {
6327 "dir1": {},
6328 "dir2": {
6329 "dir3": {}
6330 }
6331 }
6332 }));
6333
6334 let project = Project::test(Arc::new(RealFs), [dir.path()], cx).await;
6335 let cancel_flag = Default::default();
6336 let results = project
6337 .read_with(cx, |project, cx| {
6338 project.match_paths("dir", false, false, 10, &cancel_flag, cx)
6339 })
6340 .await;
6341
6342 assert!(results.is_empty());
6343 }
6344
6345 #[gpui::test]
6346 async fn test_definition(cx: &mut gpui::TestAppContext) {
6347 let mut language = Language::new(
6348 LanguageConfig {
6349 name: "Rust".into(),
6350 path_suffixes: vec!["rs".to_string()],
6351 ..Default::default()
6352 },
6353 Some(tree_sitter_rust::language()),
6354 );
6355 let mut fake_servers = language.set_fake_lsp_adapter(Default::default());
6356
6357 let fs = FakeFs::new(cx.background());
6358 fs.insert_tree(
6359 "/dir",
6360 json!({
6361 "a.rs": "const fn a() { A }",
6362 "b.rs": "const y: i32 = crate::a()",
6363 }),
6364 )
6365 .await;
6366
6367 let project = Project::test(fs, ["/dir/b.rs"], cx).await;
6368 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
6369
6370 let buffer = project
6371 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
6372 .await
6373 .unwrap();
6374
6375 let fake_server = fake_servers.next().await.unwrap();
6376 fake_server.handle_request::<lsp::request::GotoDefinition, _, _>(|params, _| async move {
6377 let params = params.text_document_position_params;
6378 assert_eq!(
6379 params.text_document.uri.to_file_path().unwrap(),
6380 Path::new("/dir/b.rs"),
6381 );
6382 assert_eq!(params.position, lsp::Position::new(0, 22));
6383
6384 Ok(Some(lsp::GotoDefinitionResponse::Scalar(
6385 lsp::Location::new(
6386 lsp::Url::from_file_path("/dir/a.rs").unwrap(),
6387 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
6388 ),
6389 )))
6390 });
6391
6392 let mut definitions = project
6393 .update(cx, |project, cx| project.definition(&buffer, 22, cx))
6394 .await
6395 .unwrap();
6396
6397 assert_eq!(definitions.len(), 1);
6398 let definition = definitions.pop().unwrap();
6399 cx.update(|cx| {
6400 let target_buffer = definition.buffer.read(cx);
6401 assert_eq!(
6402 target_buffer
6403 .file()
6404 .unwrap()
6405 .as_local()
6406 .unwrap()
6407 .abs_path(cx),
6408 Path::new("/dir/a.rs"),
6409 );
6410 assert_eq!(definition.range.to_offset(target_buffer), 9..10);
6411 assert_eq!(
6412 list_worktrees(&project, cx),
6413 [("/dir/b.rs".as_ref(), true), ("/dir/a.rs".as_ref(), false)]
6414 );
6415
6416 drop(definition);
6417 });
6418 cx.read(|cx| {
6419 assert_eq!(list_worktrees(&project, cx), [("/dir/b.rs".as_ref(), true)]);
6420 });
6421
6422 fn list_worktrees<'a>(
6423 project: &'a ModelHandle<Project>,
6424 cx: &'a AppContext,
6425 ) -> Vec<(&'a Path, bool)> {
6426 project
6427 .read(cx)
6428 .worktrees(cx)
6429 .map(|worktree| {
6430 let worktree = worktree.read(cx);
6431 (
6432 worktree.as_local().unwrap().abs_path().as_ref(),
6433 worktree.is_visible(),
6434 )
6435 })
6436 .collect::<Vec<_>>()
6437 }
6438 }
6439
6440 #[gpui::test]
6441 async fn test_completions_without_edit_ranges(cx: &mut gpui::TestAppContext) {
6442 let mut language = Language::new(
6443 LanguageConfig {
6444 name: "TypeScript".into(),
6445 path_suffixes: vec!["ts".to_string()],
6446 ..Default::default()
6447 },
6448 Some(tree_sitter_typescript::language_typescript()),
6449 );
6450 let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default());
6451
6452 let fs = FakeFs::new(cx.background());
6453 fs.insert_tree(
6454 "/dir",
6455 json!({
6456 "a.ts": "",
6457 }),
6458 )
6459 .await;
6460
6461 let project = Project::test(fs, ["/dir"], cx).await;
6462 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
6463 let buffer = project
6464 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
6465 .await
6466 .unwrap();
6467
6468 let fake_server = fake_language_servers.next().await.unwrap();
6469
6470 let text = "let a = b.fqn";
6471 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
6472 let completions = project.update(cx, |project, cx| {
6473 project.completions(&buffer, text.len(), cx)
6474 });
6475
6476 fake_server
6477 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
6478 Ok(Some(lsp::CompletionResponse::Array(vec![
6479 lsp::CompletionItem {
6480 label: "fullyQualifiedName?".into(),
6481 insert_text: Some("fullyQualifiedName".into()),
6482 ..Default::default()
6483 },
6484 ])))
6485 })
6486 .next()
6487 .await;
6488 let completions = completions.await.unwrap();
6489 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
6490 assert_eq!(completions.len(), 1);
6491 assert_eq!(completions[0].new_text, "fullyQualifiedName");
6492 assert_eq!(
6493 completions[0].old_range.to_offset(&snapshot),
6494 text.len() - 3..text.len()
6495 );
6496 }
6497
6498 #[gpui::test(iterations = 10)]
6499 async fn test_apply_code_actions_with_commands(cx: &mut gpui::TestAppContext) {
6500 let mut language = Language::new(
6501 LanguageConfig {
6502 name: "TypeScript".into(),
6503 path_suffixes: vec!["ts".to_string()],
6504 ..Default::default()
6505 },
6506 None,
6507 );
6508 let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default());
6509
6510 let fs = FakeFs::new(cx.background());
6511 fs.insert_tree(
6512 "/dir",
6513 json!({
6514 "a.ts": "a",
6515 }),
6516 )
6517 .await;
6518
6519 let project = Project::test(fs, ["/dir"], cx).await;
6520 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
6521 let buffer = project
6522 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
6523 .await
6524 .unwrap();
6525
6526 let fake_server = fake_language_servers.next().await.unwrap();
6527
6528 // Language server returns code actions that contain commands, and not edits.
6529 let actions = project.update(cx, |project, cx| project.code_actions(&buffer, 0..0, cx));
6530 fake_server
6531 .handle_request::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
6532 Ok(Some(vec![
6533 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
6534 title: "The code action".into(),
6535 command: Some(lsp::Command {
6536 title: "The command".into(),
6537 command: "_the/command".into(),
6538 arguments: Some(vec![json!("the-argument")]),
6539 }),
6540 ..Default::default()
6541 }),
6542 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
6543 title: "two".into(),
6544 ..Default::default()
6545 }),
6546 ]))
6547 })
6548 .next()
6549 .await;
6550
6551 let action = actions.await.unwrap()[0].clone();
6552 let apply = project.update(cx, |project, cx| {
6553 project.apply_code_action(buffer.clone(), action, true, cx)
6554 });
6555
6556 // Resolving the code action does not populate its edits. In absence of
6557 // edits, we must execute the given command.
6558 fake_server.handle_request::<lsp::request::CodeActionResolveRequest, _, _>(
6559 |action, _| async move { Ok(action) },
6560 );
6561
6562 // While executing the command, the language server sends the editor
6563 // a `workspaceEdit` request.
6564 fake_server
6565 .handle_request::<lsp::request::ExecuteCommand, _, _>({
6566 let fake = fake_server.clone();
6567 move |params, _| {
6568 assert_eq!(params.command, "_the/command");
6569 let fake = fake.clone();
6570 async move {
6571 fake.server
6572 .request::<lsp::request::ApplyWorkspaceEdit>(
6573 lsp::ApplyWorkspaceEditParams {
6574 label: None,
6575 edit: lsp::WorkspaceEdit {
6576 changes: Some(
6577 [(
6578 lsp::Url::from_file_path("/dir/a.ts").unwrap(),
6579 vec![lsp::TextEdit {
6580 range: lsp::Range::new(
6581 lsp::Position::new(0, 0),
6582 lsp::Position::new(0, 0),
6583 ),
6584 new_text: "X".into(),
6585 }],
6586 )]
6587 .into_iter()
6588 .collect(),
6589 ),
6590 ..Default::default()
6591 },
6592 },
6593 )
6594 .await
6595 .unwrap();
6596 Ok(Some(json!(null)))
6597 }
6598 }
6599 })
6600 .next()
6601 .await;
6602
6603 // Applying the code action returns a project transaction containing the edits
6604 // sent by the language server in its `workspaceEdit` request.
6605 let transaction = apply.await.unwrap();
6606 assert!(transaction.0.contains_key(&buffer));
6607 buffer.update(cx, |buffer, cx| {
6608 assert_eq!(buffer.text(), "Xa");
6609 buffer.undo(cx);
6610 assert_eq!(buffer.text(), "a");
6611 });
6612 }
6613
6614 #[gpui::test]
6615 async fn test_save_file(cx: &mut gpui::TestAppContext) {
6616 let fs = FakeFs::new(cx.background());
6617 fs.insert_tree(
6618 "/dir",
6619 json!({
6620 "file1": "the old contents",
6621 }),
6622 )
6623 .await;
6624
6625 let project = Project::test(fs.clone(), ["/dir"], cx).await;
6626 let buffer = project
6627 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
6628 .await
6629 .unwrap();
6630 buffer
6631 .update(cx, |buffer, cx| {
6632 assert_eq!(buffer.text(), "the old contents");
6633 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], cx);
6634 buffer.save(cx)
6635 })
6636 .await
6637 .unwrap();
6638
6639 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
6640 assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
6641 }
6642
6643 #[gpui::test]
6644 async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
6645 let fs = FakeFs::new(cx.background());
6646 fs.insert_tree(
6647 "/dir",
6648 json!({
6649 "file1": "the old contents",
6650 }),
6651 )
6652 .await;
6653
6654 let project = Project::test(fs.clone(), ["/dir/file1"], cx).await;
6655 let buffer = project
6656 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
6657 .await
6658 .unwrap();
6659 buffer
6660 .update(cx, |buffer, cx| {
6661 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], cx);
6662 buffer.save(cx)
6663 })
6664 .await
6665 .unwrap();
6666
6667 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
6668 assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
6669 }
6670
6671 #[gpui::test]
6672 async fn test_save_as(cx: &mut gpui::TestAppContext) {
6673 let fs = FakeFs::new(cx.background());
6674 fs.insert_tree("/dir", json!({})).await;
6675
6676 let project = Project::test(fs.clone(), ["/dir"], cx).await;
6677 let buffer = project.update(cx, |project, cx| {
6678 project.create_buffer("", None, cx).unwrap()
6679 });
6680 buffer.update(cx, |buffer, cx| {
6681 buffer.edit([(0..0, "abc")], cx);
6682 assert!(buffer.is_dirty());
6683 assert!(!buffer.has_conflict());
6684 });
6685 project
6686 .update(cx, |project, cx| {
6687 project.save_buffer_as(buffer.clone(), "/dir/file1".into(), cx)
6688 })
6689 .await
6690 .unwrap();
6691 assert_eq!(fs.load(Path::new("/dir/file1")).await.unwrap(), "abc");
6692 buffer.read_with(cx, |buffer, cx| {
6693 assert_eq!(buffer.file().unwrap().full_path(cx), Path::new("dir/file1"));
6694 assert!(!buffer.is_dirty());
6695 assert!(!buffer.has_conflict());
6696 });
6697
6698 let opened_buffer = project
6699 .update(cx, |project, cx| {
6700 project.open_local_buffer("/dir/file1", cx)
6701 })
6702 .await
6703 .unwrap();
6704 assert_eq!(opened_buffer, buffer);
6705 }
6706
6707 #[gpui::test(retries = 5)]
6708 async fn test_rescan_and_remote_updates(cx: &mut gpui::TestAppContext) {
6709 let dir = temp_tree(json!({
6710 "a": {
6711 "file1": "",
6712 "file2": "",
6713 "file3": "",
6714 },
6715 "b": {
6716 "c": {
6717 "file4": "",
6718 "file5": "",
6719 }
6720 }
6721 }));
6722
6723 let project = Project::test(Arc::new(RealFs), [dir.path()], cx).await;
6724 let rpc = project.read_with(cx, |p, _| p.client.clone());
6725
6726 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
6727 let buffer = project.update(cx, |p, cx| p.open_local_buffer(dir.path().join(path), cx));
6728 async move { buffer.await.unwrap() }
6729 };
6730 let id_for_path = |path: &'static str, cx: &gpui::TestAppContext| {
6731 project.read_with(cx, |project, cx| {
6732 let tree = project.worktrees(cx).next().unwrap();
6733 tree.read(cx)
6734 .entry_for_path(path)
6735 .expect(&format!("no entry for path {}", path))
6736 .id
6737 })
6738 };
6739
6740 let buffer2 = buffer_for_path("a/file2", cx).await;
6741 let buffer3 = buffer_for_path("a/file3", cx).await;
6742 let buffer4 = buffer_for_path("b/c/file4", cx).await;
6743 let buffer5 = buffer_for_path("b/c/file5", cx).await;
6744
6745 let file2_id = id_for_path("a/file2", &cx);
6746 let file3_id = id_for_path("a/file3", &cx);
6747 let file4_id = id_for_path("b/c/file4", &cx);
6748
6749 // Create a remote copy of this worktree.
6750 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
6751 let initial_snapshot = tree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot());
6752 let (remote, load_task) = cx.update(|cx| {
6753 Worktree::remote(
6754 1,
6755 1,
6756 initial_snapshot.to_proto(&Default::default(), true),
6757 rpc.clone(),
6758 cx,
6759 )
6760 });
6761 // tree
6762 load_task.await;
6763
6764 cx.read(|cx| {
6765 assert!(!buffer2.read(cx).is_dirty());
6766 assert!(!buffer3.read(cx).is_dirty());
6767 assert!(!buffer4.read(cx).is_dirty());
6768 assert!(!buffer5.read(cx).is_dirty());
6769 });
6770
6771 // Rename and delete files and directories.
6772 tree.flush_fs_events(&cx).await;
6773 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
6774 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
6775 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
6776 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
6777 tree.flush_fs_events(&cx).await;
6778
6779 let expected_paths = vec![
6780 "a",
6781 "a/file1",
6782 "a/file2.new",
6783 "b",
6784 "d",
6785 "d/file3",
6786 "d/file4",
6787 ];
6788
6789 cx.read(|app| {
6790 assert_eq!(
6791 tree.read(app)
6792 .paths()
6793 .map(|p| p.to_str().unwrap())
6794 .collect::<Vec<_>>(),
6795 expected_paths
6796 );
6797
6798 assert_eq!(id_for_path("a/file2.new", &cx), file2_id);
6799 assert_eq!(id_for_path("d/file3", &cx), file3_id);
6800 assert_eq!(id_for_path("d/file4", &cx), file4_id);
6801
6802 assert_eq!(
6803 buffer2.read(app).file().unwrap().path().as_ref(),
6804 Path::new("a/file2.new")
6805 );
6806 assert_eq!(
6807 buffer3.read(app).file().unwrap().path().as_ref(),
6808 Path::new("d/file3")
6809 );
6810 assert_eq!(
6811 buffer4.read(app).file().unwrap().path().as_ref(),
6812 Path::new("d/file4")
6813 );
6814 assert_eq!(
6815 buffer5.read(app).file().unwrap().path().as_ref(),
6816 Path::new("b/c/file5")
6817 );
6818
6819 assert!(!buffer2.read(app).file().unwrap().is_deleted());
6820 assert!(!buffer3.read(app).file().unwrap().is_deleted());
6821 assert!(!buffer4.read(app).file().unwrap().is_deleted());
6822 assert!(buffer5.read(app).file().unwrap().is_deleted());
6823 });
6824
6825 // Update the remote worktree. Check that it becomes consistent with the
6826 // local worktree.
6827 remote.update(cx, |remote, cx| {
6828 let update_message = tree.read(cx).as_local().unwrap().snapshot().build_update(
6829 &initial_snapshot,
6830 1,
6831 1,
6832 true,
6833 );
6834 remote
6835 .as_remote_mut()
6836 .unwrap()
6837 .snapshot
6838 .apply_remote_update(update_message)
6839 .unwrap();
6840
6841 assert_eq!(
6842 remote
6843 .paths()
6844 .map(|p| p.to_str().unwrap())
6845 .collect::<Vec<_>>(),
6846 expected_paths
6847 );
6848 });
6849 }
6850
6851 #[gpui::test]
6852 async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
6853 let fs = FakeFs::new(cx.background());
6854 fs.insert_tree(
6855 "/dir",
6856 json!({
6857 "a.txt": "a-contents",
6858 "b.txt": "b-contents",
6859 }),
6860 )
6861 .await;
6862
6863 let project = Project::test(fs.clone(), ["/dir"], cx).await;
6864
6865 // Spawn multiple tasks to open paths, repeating some paths.
6866 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
6867 (
6868 p.open_local_buffer("/dir/a.txt", cx),
6869 p.open_local_buffer("/dir/b.txt", cx),
6870 p.open_local_buffer("/dir/a.txt", cx),
6871 )
6872 });
6873
6874 let buffer_a_1 = buffer_a_1.await.unwrap();
6875 let buffer_a_2 = buffer_a_2.await.unwrap();
6876 let buffer_b = buffer_b.await.unwrap();
6877 assert_eq!(buffer_a_1.read_with(cx, |b, _| b.text()), "a-contents");
6878 assert_eq!(buffer_b.read_with(cx, |b, _| b.text()), "b-contents");
6879
6880 // There is only one buffer per path.
6881 let buffer_a_id = buffer_a_1.id();
6882 assert_eq!(buffer_a_2.id(), buffer_a_id);
6883
6884 // Open the same path again while it is still open.
6885 drop(buffer_a_1);
6886 let buffer_a_3 = project
6887 .update(cx, |p, cx| p.open_local_buffer("/dir/a.txt", cx))
6888 .await
6889 .unwrap();
6890
6891 // There's still only one buffer per path.
6892 assert_eq!(buffer_a_3.id(), buffer_a_id);
6893 }
6894
6895 #[gpui::test]
6896 async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
6897 let fs = FakeFs::new(cx.background());
6898 fs.insert_tree(
6899 "/dir",
6900 json!({
6901 "file1": "abc",
6902 "file2": "def",
6903 "file3": "ghi",
6904 }),
6905 )
6906 .await;
6907
6908 let project = Project::test(fs.clone(), ["/dir"], cx).await;
6909
6910 let buffer1 = project
6911 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
6912 .await
6913 .unwrap();
6914 let events = Rc::new(RefCell::new(Vec::new()));
6915
6916 // initially, the buffer isn't dirty.
6917 buffer1.update(cx, |buffer, cx| {
6918 cx.subscribe(&buffer1, {
6919 let events = events.clone();
6920 move |_, _, event, _| match event {
6921 BufferEvent::Operation(_) => {}
6922 _ => events.borrow_mut().push(event.clone()),
6923 }
6924 })
6925 .detach();
6926
6927 assert!(!buffer.is_dirty());
6928 assert!(events.borrow().is_empty());
6929
6930 buffer.edit([(1..2, "")], cx);
6931 });
6932
6933 // after the first edit, the buffer is dirty, and emits a dirtied event.
6934 buffer1.update(cx, |buffer, cx| {
6935 assert!(buffer.text() == "ac");
6936 assert!(buffer.is_dirty());
6937 assert_eq!(
6938 *events.borrow(),
6939 &[language::Event::Edited, language::Event::Dirtied]
6940 );
6941 events.borrow_mut().clear();
6942 buffer.did_save(buffer.version(), buffer.file().unwrap().mtime(), None, cx);
6943 });
6944
6945 // after saving, the buffer is not dirty, and emits a saved event.
6946 buffer1.update(cx, |buffer, cx| {
6947 assert!(!buffer.is_dirty());
6948 assert_eq!(*events.borrow(), &[language::Event::Saved]);
6949 events.borrow_mut().clear();
6950
6951 buffer.edit([(1..1, "B")], cx);
6952 buffer.edit([(2..2, "D")], cx);
6953 });
6954
6955 // after editing again, the buffer is dirty, and emits another dirty event.
6956 buffer1.update(cx, |buffer, cx| {
6957 assert!(buffer.text() == "aBDc");
6958 assert!(buffer.is_dirty());
6959 assert_eq!(
6960 *events.borrow(),
6961 &[
6962 language::Event::Edited,
6963 language::Event::Dirtied,
6964 language::Event::Edited,
6965 ],
6966 );
6967 events.borrow_mut().clear();
6968
6969 // TODO - currently, after restoring the buffer to its
6970 // previously-saved state, the is still considered dirty.
6971 buffer.edit([(1..3, "")], cx);
6972 assert!(buffer.text() == "ac");
6973 assert!(buffer.is_dirty());
6974 });
6975
6976 assert_eq!(*events.borrow(), &[language::Event::Edited]);
6977
6978 // When a file is deleted, the buffer is considered dirty.
6979 let events = Rc::new(RefCell::new(Vec::new()));
6980 let buffer2 = project
6981 .update(cx, |p, cx| p.open_local_buffer("/dir/file2", cx))
6982 .await
6983 .unwrap();
6984 buffer2.update(cx, |_, cx| {
6985 cx.subscribe(&buffer2, {
6986 let events = events.clone();
6987 move |_, _, event, _| events.borrow_mut().push(event.clone())
6988 })
6989 .detach();
6990 });
6991
6992 fs.remove_file("/dir/file2".as_ref(), Default::default())
6993 .await
6994 .unwrap();
6995 buffer2.condition(&cx, |b, _| b.is_dirty()).await;
6996 assert_eq!(
6997 *events.borrow(),
6998 &[language::Event::Dirtied, language::Event::FileHandleChanged]
6999 );
7000
7001 // When a file is already dirty when deleted, we don't emit a Dirtied event.
7002 let events = Rc::new(RefCell::new(Vec::new()));
7003 let buffer3 = project
7004 .update(cx, |p, cx| p.open_local_buffer("/dir/file3", cx))
7005 .await
7006 .unwrap();
7007 buffer3.update(cx, |_, cx| {
7008 cx.subscribe(&buffer3, {
7009 let events = events.clone();
7010 move |_, _, event, _| events.borrow_mut().push(event.clone())
7011 })
7012 .detach();
7013 });
7014
7015 buffer3.update(cx, |buffer, cx| {
7016 buffer.edit([(0..0, "x")], cx);
7017 });
7018 events.borrow_mut().clear();
7019 fs.remove_file("/dir/file3".as_ref(), Default::default())
7020 .await
7021 .unwrap();
7022 buffer3
7023 .condition(&cx, |_, _| !events.borrow().is_empty())
7024 .await;
7025 assert_eq!(*events.borrow(), &[language::Event::FileHandleChanged]);
7026 cx.read(|cx| assert!(buffer3.read(cx).is_dirty()));
7027 }
7028
7029 #[gpui::test]
7030 async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
7031 let initial_contents = "aaa\nbbbbb\nc\n";
7032 let fs = FakeFs::new(cx.background());
7033 fs.insert_tree(
7034 "/dir",
7035 json!({
7036 "the-file": initial_contents,
7037 }),
7038 )
7039 .await;
7040 let project = Project::test(fs.clone(), ["/dir"], cx).await;
7041 let buffer = project
7042 .update(cx, |p, cx| p.open_local_buffer("/dir/the-file", cx))
7043 .await
7044 .unwrap();
7045
7046 let anchors = (0..3)
7047 .map(|row| buffer.read_with(cx, |b, _| b.anchor_before(Point::new(row, 1))))
7048 .collect::<Vec<_>>();
7049
7050 // Change the file on disk, adding two new lines of text, and removing
7051 // one line.
7052 buffer.read_with(cx, |buffer, _| {
7053 assert!(!buffer.is_dirty());
7054 assert!(!buffer.has_conflict());
7055 });
7056 let new_contents = "AAAA\naaa\nBB\nbbbbb\n";
7057 fs.save("/dir/the-file".as_ref(), &new_contents.into())
7058 .await
7059 .unwrap();
7060
7061 // Because the buffer was not modified, it is reloaded from disk. Its
7062 // contents are edited according to the diff between the old and new
7063 // file contents.
7064 buffer
7065 .condition(&cx, |buffer, _| buffer.text() == new_contents)
7066 .await;
7067
7068 buffer.update(cx, |buffer, _| {
7069 assert_eq!(buffer.text(), new_contents);
7070 assert!(!buffer.is_dirty());
7071 assert!(!buffer.has_conflict());
7072
7073 let anchor_positions = anchors
7074 .iter()
7075 .map(|anchor| anchor.to_point(&*buffer))
7076 .collect::<Vec<_>>();
7077 assert_eq!(
7078 anchor_positions,
7079 [Point::new(1, 1), Point::new(3, 1), Point::new(4, 0)]
7080 );
7081 });
7082
7083 // Modify the buffer
7084 buffer.update(cx, |buffer, cx| {
7085 buffer.edit([(0..0, " ")], cx);
7086 assert!(buffer.is_dirty());
7087 assert!(!buffer.has_conflict());
7088 });
7089
7090 // Change the file on disk again, adding blank lines to the beginning.
7091 fs.save(
7092 "/dir/the-file".as_ref(),
7093 &"\n\n\nAAAA\naaa\nBB\nbbbbb\n".into(),
7094 )
7095 .await
7096 .unwrap();
7097
7098 // Because the buffer is modified, it doesn't reload from disk, but is
7099 // marked as having a conflict.
7100 buffer
7101 .condition(&cx, |buffer, _| buffer.has_conflict())
7102 .await;
7103 }
7104
7105 #[gpui::test]
7106 async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
7107 cx.foreground().forbid_parking();
7108
7109 let fs = FakeFs::new(cx.background());
7110 fs.insert_tree(
7111 "/the-dir",
7112 json!({
7113 "a.rs": "
7114 fn foo(mut v: Vec<usize>) {
7115 for x in &v {
7116 v.push(1);
7117 }
7118 }
7119 "
7120 .unindent(),
7121 }),
7122 )
7123 .await;
7124
7125 let project = Project::test(fs.clone(), ["/the-dir"], cx).await;
7126 let buffer = project
7127 .update(cx, |p, cx| p.open_local_buffer("/the-dir/a.rs", cx))
7128 .await
7129 .unwrap();
7130
7131 let buffer_uri = Url::from_file_path("/the-dir/a.rs").unwrap();
7132 let message = lsp::PublishDiagnosticsParams {
7133 uri: buffer_uri.clone(),
7134 diagnostics: vec![
7135 lsp::Diagnostic {
7136 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
7137 severity: Some(DiagnosticSeverity::WARNING),
7138 message: "error 1".to_string(),
7139 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
7140 location: lsp::Location {
7141 uri: buffer_uri.clone(),
7142 range: lsp::Range::new(
7143 lsp::Position::new(1, 8),
7144 lsp::Position::new(1, 9),
7145 ),
7146 },
7147 message: "error 1 hint 1".to_string(),
7148 }]),
7149 ..Default::default()
7150 },
7151 lsp::Diagnostic {
7152 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
7153 severity: Some(DiagnosticSeverity::HINT),
7154 message: "error 1 hint 1".to_string(),
7155 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
7156 location: lsp::Location {
7157 uri: buffer_uri.clone(),
7158 range: lsp::Range::new(
7159 lsp::Position::new(1, 8),
7160 lsp::Position::new(1, 9),
7161 ),
7162 },
7163 message: "original diagnostic".to_string(),
7164 }]),
7165 ..Default::default()
7166 },
7167 lsp::Diagnostic {
7168 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
7169 severity: Some(DiagnosticSeverity::ERROR),
7170 message: "error 2".to_string(),
7171 related_information: Some(vec![
7172 lsp::DiagnosticRelatedInformation {
7173 location: lsp::Location {
7174 uri: buffer_uri.clone(),
7175 range: lsp::Range::new(
7176 lsp::Position::new(1, 13),
7177 lsp::Position::new(1, 15),
7178 ),
7179 },
7180 message: "error 2 hint 1".to_string(),
7181 },
7182 lsp::DiagnosticRelatedInformation {
7183 location: lsp::Location {
7184 uri: buffer_uri.clone(),
7185 range: lsp::Range::new(
7186 lsp::Position::new(1, 13),
7187 lsp::Position::new(1, 15),
7188 ),
7189 },
7190 message: "error 2 hint 2".to_string(),
7191 },
7192 ]),
7193 ..Default::default()
7194 },
7195 lsp::Diagnostic {
7196 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
7197 severity: Some(DiagnosticSeverity::HINT),
7198 message: "error 2 hint 1".to_string(),
7199 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
7200 location: lsp::Location {
7201 uri: buffer_uri.clone(),
7202 range: lsp::Range::new(
7203 lsp::Position::new(2, 8),
7204 lsp::Position::new(2, 17),
7205 ),
7206 },
7207 message: "original diagnostic".to_string(),
7208 }]),
7209 ..Default::default()
7210 },
7211 lsp::Diagnostic {
7212 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
7213 severity: Some(DiagnosticSeverity::HINT),
7214 message: "error 2 hint 2".to_string(),
7215 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
7216 location: lsp::Location {
7217 uri: buffer_uri.clone(),
7218 range: lsp::Range::new(
7219 lsp::Position::new(2, 8),
7220 lsp::Position::new(2, 17),
7221 ),
7222 },
7223 message: "original diagnostic".to_string(),
7224 }]),
7225 ..Default::default()
7226 },
7227 ],
7228 version: None,
7229 };
7230
7231 project
7232 .update(cx, |p, cx| p.update_diagnostics(message, &[], cx))
7233 .unwrap();
7234 let buffer = buffer.read_with(cx, |buffer, _| buffer.snapshot());
7235
7236 assert_eq!(
7237 buffer
7238 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
7239 .collect::<Vec<_>>(),
7240 &[
7241 DiagnosticEntry {
7242 range: Point::new(1, 8)..Point::new(1, 9),
7243 diagnostic: Diagnostic {
7244 severity: DiagnosticSeverity::WARNING,
7245 message: "error 1".to_string(),
7246 group_id: 0,
7247 is_primary: true,
7248 ..Default::default()
7249 }
7250 },
7251 DiagnosticEntry {
7252 range: Point::new(1, 8)..Point::new(1, 9),
7253 diagnostic: Diagnostic {
7254 severity: DiagnosticSeverity::HINT,
7255 message: "error 1 hint 1".to_string(),
7256 group_id: 0,
7257 is_primary: false,
7258 ..Default::default()
7259 }
7260 },
7261 DiagnosticEntry {
7262 range: Point::new(1, 13)..Point::new(1, 15),
7263 diagnostic: Diagnostic {
7264 severity: DiagnosticSeverity::HINT,
7265 message: "error 2 hint 1".to_string(),
7266 group_id: 1,
7267 is_primary: false,
7268 ..Default::default()
7269 }
7270 },
7271 DiagnosticEntry {
7272 range: Point::new(1, 13)..Point::new(1, 15),
7273 diagnostic: Diagnostic {
7274 severity: DiagnosticSeverity::HINT,
7275 message: "error 2 hint 2".to_string(),
7276 group_id: 1,
7277 is_primary: false,
7278 ..Default::default()
7279 }
7280 },
7281 DiagnosticEntry {
7282 range: Point::new(2, 8)..Point::new(2, 17),
7283 diagnostic: Diagnostic {
7284 severity: DiagnosticSeverity::ERROR,
7285 message: "error 2".to_string(),
7286 group_id: 1,
7287 is_primary: true,
7288 ..Default::default()
7289 }
7290 }
7291 ]
7292 );
7293
7294 assert_eq!(
7295 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
7296 &[
7297 DiagnosticEntry {
7298 range: Point::new(1, 8)..Point::new(1, 9),
7299 diagnostic: Diagnostic {
7300 severity: DiagnosticSeverity::WARNING,
7301 message: "error 1".to_string(),
7302 group_id: 0,
7303 is_primary: true,
7304 ..Default::default()
7305 }
7306 },
7307 DiagnosticEntry {
7308 range: Point::new(1, 8)..Point::new(1, 9),
7309 diagnostic: Diagnostic {
7310 severity: DiagnosticSeverity::HINT,
7311 message: "error 1 hint 1".to_string(),
7312 group_id: 0,
7313 is_primary: false,
7314 ..Default::default()
7315 }
7316 },
7317 ]
7318 );
7319 assert_eq!(
7320 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
7321 &[
7322 DiagnosticEntry {
7323 range: Point::new(1, 13)..Point::new(1, 15),
7324 diagnostic: Diagnostic {
7325 severity: DiagnosticSeverity::HINT,
7326 message: "error 2 hint 1".to_string(),
7327 group_id: 1,
7328 is_primary: false,
7329 ..Default::default()
7330 }
7331 },
7332 DiagnosticEntry {
7333 range: Point::new(1, 13)..Point::new(1, 15),
7334 diagnostic: Diagnostic {
7335 severity: DiagnosticSeverity::HINT,
7336 message: "error 2 hint 2".to_string(),
7337 group_id: 1,
7338 is_primary: false,
7339 ..Default::default()
7340 }
7341 },
7342 DiagnosticEntry {
7343 range: Point::new(2, 8)..Point::new(2, 17),
7344 diagnostic: Diagnostic {
7345 severity: DiagnosticSeverity::ERROR,
7346 message: "error 2".to_string(),
7347 group_id: 1,
7348 is_primary: true,
7349 ..Default::default()
7350 }
7351 }
7352 ]
7353 );
7354 }
7355
7356 #[gpui::test]
7357 async fn test_rename(cx: &mut gpui::TestAppContext) {
7358 cx.foreground().forbid_parking();
7359
7360 let mut language = Language::new(
7361 LanguageConfig {
7362 name: "Rust".into(),
7363 path_suffixes: vec!["rs".to_string()],
7364 ..Default::default()
7365 },
7366 Some(tree_sitter_rust::language()),
7367 );
7368 let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
7369 capabilities: lsp::ServerCapabilities {
7370 rename_provider: Some(lsp::OneOf::Right(lsp::RenameOptions {
7371 prepare_provider: Some(true),
7372 work_done_progress_options: Default::default(),
7373 })),
7374 ..Default::default()
7375 },
7376 ..Default::default()
7377 });
7378
7379 let fs = FakeFs::new(cx.background());
7380 fs.insert_tree(
7381 "/dir",
7382 json!({
7383 "one.rs": "const ONE: usize = 1;",
7384 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
7385 }),
7386 )
7387 .await;
7388
7389 let project = Project::test(fs.clone(), ["/dir"], cx).await;
7390 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
7391 let buffer = project
7392 .update(cx, |project, cx| {
7393 project.open_local_buffer("/dir/one.rs", cx)
7394 })
7395 .await
7396 .unwrap();
7397
7398 let fake_server = fake_servers.next().await.unwrap();
7399
7400 let response = project.update(cx, |project, cx| {
7401 project.prepare_rename(buffer.clone(), 7, cx)
7402 });
7403 fake_server
7404 .handle_request::<lsp::request::PrepareRenameRequest, _, _>(|params, _| async move {
7405 assert_eq!(params.text_document.uri.as_str(), "file:///dir/one.rs");
7406 assert_eq!(params.position, lsp::Position::new(0, 7));
7407 Ok(Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
7408 lsp::Position::new(0, 6),
7409 lsp::Position::new(0, 9),
7410 ))))
7411 })
7412 .next()
7413 .await
7414 .unwrap();
7415 let range = response.await.unwrap().unwrap();
7416 let range = buffer.read_with(cx, |buffer, _| range.to_offset(buffer));
7417 assert_eq!(range, 6..9);
7418
7419 let response = project.update(cx, |project, cx| {
7420 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), true, cx)
7421 });
7422 fake_server
7423 .handle_request::<lsp::request::Rename, _, _>(|params, _| async move {
7424 assert_eq!(
7425 params.text_document_position.text_document.uri.as_str(),
7426 "file:///dir/one.rs"
7427 );
7428 assert_eq!(
7429 params.text_document_position.position,
7430 lsp::Position::new(0, 7)
7431 );
7432 assert_eq!(params.new_name, "THREE");
7433 Ok(Some(lsp::WorkspaceEdit {
7434 changes: Some(
7435 [
7436 (
7437 lsp::Url::from_file_path("/dir/one.rs").unwrap(),
7438 vec![lsp::TextEdit::new(
7439 lsp::Range::new(
7440 lsp::Position::new(0, 6),
7441 lsp::Position::new(0, 9),
7442 ),
7443 "THREE".to_string(),
7444 )],
7445 ),
7446 (
7447 lsp::Url::from_file_path("/dir/two.rs").unwrap(),
7448 vec![
7449 lsp::TextEdit::new(
7450 lsp::Range::new(
7451 lsp::Position::new(0, 24),
7452 lsp::Position::new(0, 27),
7453 ),
7454 "THREE".to_string(),
7455 ),
7456 lsp::TextEdit::new(
7457 lsp::Range::new(
7458 lsp::Position::new(0, 35),
7459 lsp::Position::new(0, 38),
7460 ),
7461 "THREE".to_string(),
7462 ),
7463 ],
7464 ),
7465 ]
7466 .into_iter()
7467 .collect(),
7468 ),
7469 ..Default::default()
7470 }))
7471 })
7472 .next()
7473 .await
7474 .unwrap();
7475 let mut transaction = response.await.unwrap().0;
7476 assert_eq!(transaction.len(), 2);
7477 assert_eq!(
7478 transaction
7479 .remove_entry(&buffer)
7480 .unwrap()
7481 .0
7482 .read_with(cx, |buffer, _| buffer.text()),
7483 "const THREE: usize = 1;"
7484 );
7485 assert_eq!(
7486 transaction
7487 .into_keys()
7488 .next()
7489 .unwrap()
7490 .read_with(cx, |buffer, _| buffer.text()),
7491 "const TWO: usize = one::THREE + one::THREE;"
7492 );
7493 }
7494
7495 #[gpui::test]
7496 async fn test_search(cx: &mut gpui::TestAppContext) {
7497 let fs = FakeFs::new(cx.background());
7498 fs.insert_tree(
7499 "/dir",
7500 json!({
7501 "one.rs": "const ONE: usize = 1;",
7502 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
7503 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
7504 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
7505 }),
7506 )
7507 .await;
7508 let project = Project::test(fs.clone(), ["/dir"], cx).await;
7509 assert_eq!(
7510 search(&project, SearchQuery::text("TWO", false, true), cx)
7511 .await
7512 .unwrap(),
7513 HashMap::from_iter([
7514 ("two.rs".to_string(), vec![6..9]),
7515 ("three.rs".to_string(), vec![37..40])
7516 ])
7517 );
7518
7519 let buffer_4 = project
7520 .update(cx, |project, cx| {
7521 project.open_local_buffer("/dir/four.rs", cx)
7522 })
7523 .await
7524 .unwrap();
7525 buffer_4.update(cx, |buffer, cx| {
7526 let text = "two::TWO";
7527 buffer.edit([(20..28, text), (31..43, text)], cx);
7528 });
7529
7530 assert_eq!(
7531 search(&project, SearchQuery::text("TWO", false, true), cx)
7532 .await
7533 .unwrap(),
7534 HashMap::from_iter([
7535 ("two.rs".to_string(), vec![6..9]),
7536 ("three.rs".to_string(), vec![37..40]),
7537 ("four.rs".to_string(), vec![25..28, 36..39])
7538 ])
7539 );
7540
7541 async fn search(
7542 project: &ModelHandle<Project>,
7543 query: SearchQuery,
7544 cx: &mut gpui::TestAppContext,
7545 ) -> Result<HashMap<String, Vec<Range<usize>>>> {
7546 let results = project
7547 .update(cx, |project, cx| project.search(query, cx))
7548 .await?;
7549
7550 Ok(results
7551 .into_iter()
7552 .map(|(buffer, ranges)| {
7553 buffer.read_with(cx, |buffer, _| {
7554 let path = buffer.file().unwrap().path().to_string_lossy().to_string();
7555 let ranges = ranges
7556 .into_iter()
7557 .map(|range| range.to_offset(buffer))
7558 .collect::<Vec<_>>();
7559 (path, ranges)
7560 })
7561 })
7562 .collect())
7563 }
7564 }
7565}