1pub mod fs;
2mod ignore;
3mod lsp_command;
4pub mod search;
5pub mod worktree;
6
7use anyhow::{anyhow, Context, Result};
8use client::{proto, Client, PeerId, TypedEnvelope, User, UserStore};
9use clock::ReplicaId;
10use collections::{hash_map, BTreeMap, HashMap, HashSet};
11use futures::{future::Shared, Future, FutureExt, StreamExt, TryFutureExt};
12use fuzzy::{PathMatch, PathMatchCandidate, PathMatchCandidateSet};
13use gpui::{
14 AnyModelHandle, AppContext, AsyncAppContext, Entity, ModelContext, ModelHandle,
15 MutableAppContext, Task, UpgradeModelHandle, WeakModelHandle,
16};
17use language::{
18 point_to_lsp,
19 proto::{deserialize_anchor, deserialize_version, serialize_anchor, serialize_version},
20 range_from_lsp, range_to_lsp, Anchor, Bias, Buffer, CodeAction, CodeLabel, Completion,
21 Diagnostic, DiagnosticEntry, DiagnosticSet, Event as BufferEvent, File as _, Language,
22 LanguageRegistry, LanguageServerName, LocalFile, LspAdapter, OffsetRangeExt, Operation, Patch,
23 PointUtf16, TextBufferSnapshot, ToOffset, ToPointUtf16, Transaction,
24};
25use lsp::{DiagnosticSeverity, DiagnosticTag, DocumentHighlightKind, LanguageServer};
26use lsp_command::*;
27use parking_lot::Mutex;
28use postage::watch;
29use rand::prelude::*;
30use search::SearchQuery;
31use serde::Serialize;
32use settings::Settings;
33use sha2::{Digest, Sha256};
34use similar::{ChangeTag, TextDiff};
35use std::{
36 cell::RefCell,
37 cmp::{self, Ordering},
38 convert::TryInto,
39 ffi::OsString,
40 hash::Hash,
41 mem,
42 ops::Range,
43 os::unix::{ffi::OsStrExt, prelude::OsStringExt},
44 path::{Component, Path, PathBuf},
45 rc::Rc,
46 sync::{
47 atomic::{AtomicBool, AtomicUsize, Ordering::SeqCst},
48 Arc,
49 },
50 time::Instant,
51};
52use util::{post_inc, ResultExt, TryFutureExt as _};
53
54pub use fs::*;
55pub use worktree::*;
56
57pub trait Item: Entity {
58 fn entry_id(&self, cx: &AppContext) -> Option<ProjectEntryId>;
59}
60
61pub struct Project {
62 worktrees: Vec<WorktreeHandle>,
63 active_entry: Option<ProjectEntryId>,
64 languages: Arc<LanguageRegistry>,
65 language_servers:
66 HashMap<(WorktreeId, LanguageServerName), (Arc<dyn LspAdapter>, Arc<LanguageServer>)>,
67 started_language_servers:
68 HashMap<(WorktreeId, LanguageServerName), Task<Option<Arc<LanguageServer>>>>,
69 language_server_statuses: BTreeMap<usize, LanguageServerStatus>,
70 language_server_settings: Arc<Mutex<serde_json::Value>>,
71 last_workspace_edits_by_language_server: HashMap<usize, ProjectTransaction>,
72 next_language_server_id: usize,
73 client: Arc<client::Client>,
74 next_entry_id: Arc<AtomicUsize>,
75 user_store: ModelHandle<UserStore>,
76 fs: Arc<dyn Fs>,
77 client_state: ProjectClientState,
78 collaborators: HashMap<PeerId, Collaborator>,
79 subscriptions: Vec<client::Subscription>,
80 opened_buffer: (Rc<RefCell<watch::Sender<()>>>, watch::Receiver<()>),
81 shared_buffers: HashMap<PeerId, HashSet<u64>>,
82 loading_buffers: HashMap<
83 ProjectPath,
84 postage::watch::Receiver<Option<Result<ModelHandle<Buffer>, Arc<anyhow::Error>>>>,
85 >,
86 loading_local_worktrees:
87 HashMap<Arc<Path>, Shared<Task<Result<ModelHandle<Worktree>, Arc<anyhow::Error>>>>>,
88 opened_buffers: HashMap<u64, OpenBuffer>,
89 buffer_snapshots: HashMap<u64, Vec<(i32, TextBufferSnapshot)>>,
90 nonce: u128,
91}
92
93enum OpenBuffer {
94 Strong(ModelHandle<Buffer>),
95 Weak(WeakModelHandle<Buffer>),
96 Loading(Vec<Operation>),
97}
98
99enum WorktreeHandle {
100 Strong(ModelHandle<Worktree>),
101 Weak(WeakModelHandle<Worktree>),
102}
103
104enum ProjectClientState {
105 Local {
106 is_shared: bool,
107 remote_id_tx: watch::Sender<Option<u64>>,
108 remote_id_rx: watch::Receiver<Option<u64>>,
109 _maintain_remote_id_task: Task<Option<()>>,
110 },
111 Remote {
112 sharing_has_stopped: bool,
113 remote_id: u64,
114 replica_id: ReplicaId,
115 _detect_unshare_task: Task<Option<()>>,
116 },
117}
118
119#[derive(Clone, Debug)]
120pub struct Collaborator {
121 pub user: Arc<User>,
122 pub peer_id: PeerId,
123 pub replica_id: ReplicaId,
124}
125
126#[derive(Clone, Debug, PartialEq)]
127pub enum Event {
128 ActiveEntryChanged(Option<ProjectEntryId>),
129 WorktreeRemoved(WorktreeId),
130 DiskBasedDiagnosticsStarted,
131 DiskBasedDiagnosticsUpdated,
132 DiskBasedDiagnosticsFinished,
133 DiagnosticsUpdated(ProjectPath),
134 RemoteIdChanged(Option<u64>),
135 CollaboratorLeft(PeerId),
136 ContactRequestedJoin(Arc<User>),
137}
138
139#[derive(Serialize)]
140pub struct LanguageServerStatus {
141 pub name: String,
142 pub pending_work: BTreeMap<String, LanguageServerProgress>,
143 pub pending_diagnostic_updates: isize,
144}
145
146#[derive(Clone, Debug, Serialize)]
147pub struct LanguageServerProgress {
148 pub message: Option<String>,
149 pub percentage: Option<usize>,
150 #[serde(skip_serializing)]
151 pub last_update_at: Instant,
152}
153
154#[derive(Clone, Debug, Eq, PartialEq, Hash, PartialOrd, Ord)]
155pub struct ProjectPath {
156 pub worktree_id: WorktreeId,
157 pub path: Arc<Path>,
158}
159
160#[derive(Copy, Clone, Debug, Default, PartialEq, Serialize)]
161pub struct DiagnosticSummary {
162 pub error_count: usize,
163 pub warning_count: usize,
164}
165
166#[derive(Debug)]
167pub struct Location {
168 pub buffer: ModelHandle<Buffer>,
169 pub range: Range<language::Anchor>,
170}
171
172#[derive(Debug)]
173pub struct DocumentHighlight {
174 pub range: Range<language::Anchor>,
175 pub kind: DocumentHighlightKind,
176}
177
178#[derive(Clone, Debug)]
179pub struct Symbol {
180 pub source_worktree_id: WorktreeId,
181 pub worktree_id: WorktreeId,
182 pub language_server_name: LanguageServerName,
183 pub path: PathBuf,
184 pub label: CodeLabel,
185 pub name: String,
186 pub kind: lsp::SymbolKind,
187 pub range: Range<PointUtf16>,
188 pub signature: [u8; 32],
189}
190
191#[derive(Default)]
192pub struct ProjectTransaction(pub HashMap<ModelHandle<Buffer>, language::Transaction>);
193
194impl DiagnosticSummary {
195 fn new<'a, T: 'a>(diagnostics: impl IntoIterator<Item = &'a DiagnosticEntry<T>>) -> Self {
196 let mut this = Self {
197 error_count: 0,
198 warning_count: 0,
199 };
200
201 for entry in diagnostics {
202 if entry.diagnostic.is_primary {
203 match entry.diagnostic.severity {
204 DiagnosticSeverity::ERROR => this.error_count += 1,
205 DiagnosticSeverity::WARNING => this.warning_count += 1,
206 _ => {}
207 }
208 }
209 }
210
211 this
212 }
213
214 pub fn is_empty(&self) -> bool {
215 self.error_count == 0 && self.warning_count == 0
216 }
217
218 pub fn to_proto(&self, path: &Path) -> proto::DiagnosticSummary {
219 proto::DiagnosticSummary {
220 path: path.to_string_lossy().to_string(),
221 error_count: self.error_count as u32,
222 warning_count: self.warning_count as u32,
223 }
224 }
225}
226
227#[derive(Clone, Copy, Debug, Default, Hash, PartialEq, Eq, PartialOrd, Ord)]
228pub struct ProjectEntryId(usize);
229
230impl ProjectEntryId {
231 pub const MAX: Self = Self(usize::MAX);
232
233 pub fn new(counter: &AtomicUsize) -> Self {
234 Self(counter.fetch_add(1, SeqCst))
235 }
236
237 pub fn from_proto(id: u64) -> Self {
238 Self(id as usize)
239 }
240
241 pub fn to_proto(&self) -> u64 {
242 self.0 as u64
243 }
244
245 pub fn to_usize(&self) -> usize {
246 self.0
247 }
248}
249
250impl Project {
251 pub fn init(client: &Arc<Client>) {
252 client.add_model_message_handler(Self::handle_request_join_project);
253 client.add_model_message_handler(Self::handle_add_collaborator);
254 client.add_model_message_handler(Self::handle_buffer_reloaded);
255 client.add_model_message_handler(Self::handle_buffer_saved);
256 client.add_model_message_handler(Self::handle_start_language_server);
257 client.add_model_message_handler(Self::handle_update_language_server);
258 client.add_model_message_handler(Self::handle_remove_collaborator);
259 client.add_model_message_handler(Self::handle_register_worktree);
260 client.add_model_message_handler(Self::handle_unregister_worktree);
261 client.add_model_message_handler(Self::handle_unregister_project);
262 client.add_model_message_handler(Self::handle_project_unshared);
263 client.add_model_message_handler(Self::handle_update_buffer_file);
264 client.add_model_message_handler(Self::handle_update_buffer);
265 client.add_model_message_handler(Self::handle_update_diagnostic_summary);
266 client.add_model_message_handler(Self::handle_update_worktree);
267 client.add_model_request_handler(Self::handle_create_project_entry);
268 client.add_model_request_handler(Self::handle_rename_project_entry);
269 client.add_model_request_handler(Self::handle_delete_project_entry);
270 client.add_model_request_handler(Self::handle_apply_additional_edits_for_completion);
271 client.add_model_request_handler(Self::handle_apply_code_action);
272 client.add_model_request_handler(Self::handle_reload_buffers);
273 client.add_model_request_handler(Self::handle_format_buffers);
274 client.add_model_request_handler(Self::handle_get_code_actions);
275 client.add_model_request_handler(Self::handle_get_completions);
276 client.add_model_request_handler(Self::handle_lsp_command::<GetDefinition>);
277 client.add_model_request_handler(Self::handle_lsp_command::<GetDocumentHighlights>);
278 client.add_model_request_handler(Self::handle_lsp_command::<GetReferences>);
279 client.add_model_request_handler(Self::handle_lsp_command::<PrepareRename>);
280 client.add_model_request_handler(Self::handle_lsp_command::<PerformRename>);
281 client.add_model_request_handler(Self::handle_search_project);
282 client.add_model_request_handler(Self::handle_get_project_symbols);
283 client.add_model_request_handler(Self::handle_open_buffer_for_symbol);
284 client.add_model_request_handler(Self::handle_open_buffer_by_id);
285 client.add_model_request_handler(Self::handle_open_buffer_by_path);
286 client.add_model_request_handler(Self::handle_save_buffer);
287 }
288
289 pub fn local(
290 client: Arc<Client>,
291 user_store: ModelHandle<UserStore>,
292 languages: Arc<LanguageRegistry>,
293 fs: Arc<dyn Fs>,
294 cx: &mut MutableAppContext,
295 ) -> ModelHandle<Self> {
296 cx.add_model(|cx: &mut ModelContext<Self>| {
297 let (remote_id_tx, remote_id_rx) = watch::channel();
298 let _maintain_remote_id_task = cx.spawn_weak({
299 let rpc = client.clone();
300 move |this, mut cx| {
301 async move {
302 let mut status = rpc.status();
303 while let Some(status) = status.next().await {
304 if let Some(this) = this.upgrade(&cx) {
305 if status.is_connected() {
306 this.update(&mut cx, |this, cx| this.register(cx)).await?;
307 } else {
308 this.update(&mut cx, |this, cx| this.unregister(cx));
309 }
310 }
311 }
312 Ok(())
313 }
314 .log_err()
315 }
316 });
317
318 let (opened_buffer_tx, opened_buffer_rx) = watch::channel();
319 Self {
320 worktrees: Default::default(),
321 collaborators: Default::default(),
322 opened_buffers: Default::default(),
323 shared_buffers: Default::default(),
324 loading_buffers: Default::default(),
325 loading_local_worktrees: Default::default(),
326 buffer_snapshots: Default::default(),
327 client_state: ProjectClientState::Local {
328 is_shared: false,
329 remote_id_tx,
330 remote_id_rx,
331 _maintain_remote_id_task,
332 },
333 opened_buffer: (Rc::new(RefCell::new(opened_buffer_tx)), opened_buffer_rx),
334 subscriptions: Vec::new(),
335 active_entry: None,
336 languages,
337 client,
338 user_store,
339 fs,
340 next_entry_id: Default::default(),
341 language_servers: Default::default(),
342 started_language_servers: Default::default(),
343 language_server_statuses: Default::default(),
344 last_workspace_edits_by_language_server: Default::default(),
345 language_server_settings: Default::default(),
346 next_language_server_id: 0,
347 nonce: StdRng::from_entropy().gen(),
348 }
349 })
350 }
351
352 pub async fn remote(
353 remote_id: u64,
354 client: Arc<Client>,
355 user_store: ModelHandle<UserStore>,
356 languages: Arc<LanguageRegistry>,
357 fs: Arc<dyn Fs>,
358 cx: &mut AsyncAppContext,
359 ) -> Result<ModelHandle<Self>> {
360 client.authenticate_and_connect(true, &cx).await?;
361
362 let response = client
363 .request(proto::JoinProject {
364 project_id: remote_id,
365 })
366 .await?;
367
368 let response = match response.variant.ok_or_else(|| anyhow!("missing variant"))? {
369 proto::join_project_response::Variant::Accept(response) => response,
370 proto::join_project_response::Variant::Decline(_) => Err(anyhow!("rejected"))?,
371 };
372
373 let replica_id = response.replica_id as ReplicaId;
374
375 let mut worktrees = Vec::new();
376 for worktree in response.worktrees {
377 let (worktree, load_task) = cx
378 .update(|cx| Worktree::remote(remote_id, replica_id, worktree, client.clone(), cx));
379 worktrees.push(worktree);
380 load_task.detach();
381 }
382
383 let (opened_buffer_tx, opened_buffer_rx) = watch::channel();
384 let this = cx.add_model(|cx: &mut ModelContext<Self>| {
385 let mut this = Self {
386 worktrees: Vec::new(),
387 loading_buffers: Default::default(),
388 opened_buffer: (Rc::new(RefCell::new(opened_buffer_tx)), opened_buffer_rx),
389 shared_buffers: Default::default(),
390 loading_local_worktrees: Default::default(),
391 active_entry: None,
392 collaborators: Default::default(),
393 languages,
394 user_store: user_store.clone(),
395 fs,
396 next_entry_id: Default::default(),
397 subscriptions: vec![client.add_model_for_remote_entity(remote_id, cx)],
398 client: client.clone(),
399 client_state: ProjectClientState::Remote {
400 sharing_has_stopped: false,
401 remote_id,
402 replica_id,
403 _detect_unshare_task: cx.spawn_weak(move |this, mut cx| {
404 async move {
405 let mut status = client.status();
406 let is_connected =
407 status.next().await.map_or(false, |s| s.is_connected());
408 // Even if we're initially connected, any future change of the status means we momentarily disconnected.
409 if !is_connected || status.next().await.is_some() {
410 if let Some(this) = this.upgrade(&cx) {
411 this.update(&mut cx, |this, cx| this.removed_from_project(cx))
412 }
413 }
414 Ok(())
415 }
416 .log_err()
417 }),
418 },
419 language_servers: Default::default(),
420 started_language_servers: Default::default(),
421 language_server_settings: Default::default(),
422 language_server_statuses: response
423 .language_servers
424 .into_iter()
425 .map(|server| {
426 (
427 server.id as usize,
428 LanguageServerStatus {
429 name: server.name,
430 pending_work: Default::default(),
431 pending_diagnostic_updates: 0,
432 },
433 )
434 })
435 .collect(),
436 last_workspace_edits_by_language_server: Default::default(),
437 next_language_server_id: 0,
438 opened_buffers: Default::default(),
439 buffer_snapshots: Default::default(),
440 nonce: StdRng::from_entropy().gen(),
441 };
442 for worktree in worktrees {
443 this.add_worktree(&worktree, cx);
444 }
445 this
446 });
447
448 let user_ids = response
449 .collaborators
450 .iter()
451 .map(|peer| peer.user_id)
452 .collect();
453 user_store
454 .update(cx, |user_store, cx| user_store.get_users(user_ids, cx))
455 .await?;
456 let mut collaborators = HashMap::default();
457 for message in response.collaborators {
458 let collaborator = Collaborator::from_proto(message, &user_store, cx).await?;
459 collaborators.insert(collaborator.peer_id, collaborator);
460 }
461
462 this.update(cx, |this, _| {
463 this.collaborators = collaborators;
464 });
465
466 Ok(this)
467 }
468
469 #[cfg(any(test, feature = "test-support"))]
470 pub async fn test(
471 fs: Arc<dyn Fs>,
472 root_paths: impl IntoIterator<Item = impl AsRef<Path>>,
473 cx: &mut gpui::TestAppContext,
474 ) -> ModelHandle<Project> {
475 let languages = Arc::new(LanguageRegistry::test());
476 let http_client = client::test::FakeHttpClient::with_404_response();
477 let client = client::Client::new(http_client.clone());
478 let user_store = cx.add_model(|cx| UserStore::new(client.clone(), http_client, cx));
479 let project = cx.update(|cx| Project::local(client, user_store, languages, fs, cx));
480 for path in root_paths {
481 let (tree, _) = project
482 .update(cx, |project, cx| {
483 project.find_or_create_local_worktree(path, true, cx)
484 })
485 .await
486 .unwrap();
487 tree.read_with(cx, |tree, _| tree.as_local().unwrap().scan_complete())
488 .await;
489 }
490 project
491 }
492
493 pub fn buffer_for_id(&self, remote_id: u64, cx: &AppContext) -> Option<ModelHandle<Buffer>> {
494 self.opened_buffers
495 .get(&remote_id)
496 .and_then(|buffer| buffer.upgrade(cx))
497 }
498
499 pub fn languages(&self) -> &Arc<LanguageRegistry> {
500 &self.languages
501 }
502
503 #[cfg(any(test, feature = "test-support"))]
504 pub fn check_invariants(&self, cx: &AppContext) {
505 if self.is_local() {
506 let mut worktree_root_paths = HashMap::default();
507 for worktree in self.worktrees(cx) {
508 let worktree = worktree.read(cx);
509 let abs_path = worktree.as_local().unwrap().abs_path().clone();
510 let prev_worktree_id = worktree_root_paths.insert(abs_path.clone(), worktree.id());
511 assert_eq!(
512 prev_worktree_id,
513 None,
514 "abs path {:?} for worktree {:?} is not unique ({:?} was already registered with the same path)",
515 abs_path,
516 worktree.id(),
517 prev_worktree_id
518 )
519 }
520 } else {
521 let replica_id = self.replica_id();
522 for buffer in self.opened_buffers.values() {
523 if let Some(buffer) = buffer.upgrade(cx) {
524 let buffer = buffer.read(cx);
525 assert_eq!(
526 buffer.deferred_ops_len(),
527 0,
528 "replica {}, buffer {} has deferred operations",
529 replica_id,
530 buffer.remote_id()
531 );
532 }
533 }
534 }
535 }
536
537 #[cfg(any(test, feature = "test-support"))]
538 pub fn has_open_buffer(&self, path: impl Into<ProjectPath>, cx: &AppContext) -> bool {
539 let path = path.into();
540 if let Some(worktree) = self.worktree_for_id(path.worktree_id, cx) {
541 self.opened_buffers.iter().any(|(_, buffer)| {
542 if let Some(buffer) = buffer.upgrade(cx) {
543 if let Some(file) = File::from_dyn(buffer.read(cx).file()) {
544 if file.worktree == worktree && file.path() == &path.path {
545 return true;
546 }
547 }
548 }
549 false
550 })
551 } else {
552 false
553 }
554 }
555
556 pub fn fs(&self) -> &Arc<dyn Fs> {
557 &self.fs
558 }
559
560 fn unregister(&mut self, cx: &mut ModelContext<Self>) {
561 self.unshared(cx);
562 for worktree in &self.worktrees {
563 if let Some(worktree) = worktree.upgrade(cx) {
564 worktree.update(cx, |worktree, _| {
565 worktree.as_local_mut().unwrap().unregister();
566 });
567 }
568 }
569
570 if let ProjectClientState::Local { remote_id_tx, .. } = &mut self.client_state {
571 *remote_id_tx.borrow_mut() = None;
572 }
573
574 self.subscriptions.clear();
575 }
576
577 fn register(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
578 self.unregister(cx);
579
580 let response = self.client.request(proto::RegisterProject {});
581 cx.spawn(|this, mut cx| async move {
582 let remote_id = response.await?.project_id;
583
584 let mut registrations = Vec::new();
585 this.update(&mut cx, |this, cx| {
586 if let ProjectClientState::Local { remote_id_tx, .. } = &mut this.client_state {
587 *remote_id_tx.borrow_mut() = Some(remote_id);
588 }
589
590 cx.emit(Event::RemoteIdChanged(Some(remote_id)));
591
592 this.subscriptions
593 .push(this.client.add_model_for_remote_entity(remote_id, cx));
594
595 for worktree in &this.worktrees {
596 if let Some(worktree) = worktree.upgrade(cx) {
597 registrations.push(worktree.update(cx, |worktree, cx| {
598 let worktree = worktree.as_local_mut().unwrap();
599 worktree.register(remote_id, cx)
600 }));
601 }
602 }
603 });
604
605 futures::future::try_join_all(registrations).await?;
606 Ok(())
607 })
608 }
609
610 pub fn remote_id(&self) -> Option<u64> {
611 match &self.client_state {
612 ProjectClientState::Local { remote_id_rx, .. } => *remote_id_rx.borrow(),
613 ProjectClientState::Remote { remote_id, .. } => Some(*remote_id),
614 }
615 }
616
617 pub fn next_remote_id(&self) -> impl Future<Output = u64> {
618 let mut id = None;
619 let mut watch = None;
620 match &self.client_state {
621 ProjectClientState::Local { remote_id_rx, .. } => watch = Some(remote_id_rx.clone()),
622 ProjectClientState::Remote { remote_id, .. } => id = Some(*remote_id),
623 }
624
625 async move {
626 if let Some(id) = id {
627 return id;
628 }
629 let mut watch = watch.unwrap();
630 loop {
631 let id = *watch.borrow();
632 if let Some(id) = id {
633 return id;
634 }
635 watch.next().await;
636 }
637 }
638 }
639
640 pub fn replica_id(&self) -> ReplicaId {
641 match &self.client_state {
642 ProjectClientState::Local { .. } => 0,
643 ProjectClientState::Remote { replica_id, .. } => *replica_id,
644 }
645 }
646
647 pub fn collaborators(&self) -> &HashMap<PeerId, Collaborator> {
648 &self.collaborators
649 }
650
651 pub fn worktrees<'a>(
652 &'a self,
653 cx: &'a AppContext,
654 ) -> impl 'a + Iterator<Item = ModelHandle<Worktree>> {
655 self.worktrees
656 .iter()
657 .filter_map(move |worktree| worktree.upgrade(cx))
658 }
659
660 pub fn visible_worktrees<'a>(
661 &'a self,
662 cx: &'a AppContext,
663 ) -> impl 'a + Iterator<Item = ModelHandle<Worktree>> {
664 self.worktrees.iter().filter_map(|worktree| {
665 worktree.upgrade(cx).and_then(|worktree| {
666 if worktree.read(cx).is_visible() {
667 Some(worktree)
668 } else {
669 None
670 }
671 })
672 })
673 }
674
675 pub fn worktree_for_id(
676 &self,
677 id: WorktreeId,
678 cx: &AppContext,
679 ) -> Option<ModelHandle<Worktree>> {
680 self.worktrees(cx)
681 .find(|worktree| worktree.read(cx).id() == id)
682 }
683
684 pub fn worktree_for_entry(
685 &self,
686 entry_id: ProjectEntryId,
687 cx: &AppContext,
688 ) -> Option<ModelHandle<Worktree>> {
689 self.worktrees(cx)
690 .find(|worktree| worktree.read(cx).contains_entry(entry_id))
691 }
692
693 pub fn worktree_id_for_entry(
694 &self,
695 entry_id: ProjectEntryId,
696 cx: &AppContext,
697 ) -> Option<WorktreeId> {
698 self.worktree_for_entry(entry_id, cx)
699 .map(|worktree| worktree.read(cx).id())
700 }
701
702 pub fn create_entry(
703 &mut self,
704 project_path: impl Into<ProjectPath>,
705 is_directory: bool,
706 cx: &mut ModelContext<Self>,
707 ) -> Option<Task<Result<Entry>>> {
708 let project_path = project_path.into();
709 let worktree = self.worktree_for_id(project_path.worktree_id, cx)?;
710 if self.is_local() {
711 Some(worktree.update(cx, |worktree, cx| {
712 worktree
713 .as_local_mut()
714 .unwrap()
715 .create_entry(project_path.path, is_directory, cx)
716 }))
717 } else {
718 let client = self.client.clone();
719 let project_id = self.remote_id().unwrap();
720 Some(cx.spawn_weak(|_, mut cx| async move {
721 let response = client
722 .request(proto::CreateProjectEntry {
723 worktree_id: project_path.worktree_id.to_proto(),
724 project_id,
725 path: project_path.path.as_os_str().as_bytes().to_vec(),
726 is_directory,
727 })
728 .await?;
729 let entry = response
730 .entry
731 .ok_or_else(|| anyhow!("missing entry in response"))?;
732 worktree
733 .update(&mut cx, |worktree, cx| {
734 worktree.as_remote().unwrap().insert_entry(
735 entry,
736 response.worktree_scan_id as usize,
737 cx,
738 )
739 })
740 .await
741 }))
742 }
743 }
744
745 pub fn rename_entry(
746 &mut self,
747 entry_id: ProjectEntryId,
748 new_path: impl Into<Arc<Path>>,
749 cx: &mut ModelContext<Self>,
750 ) -> Option<Task<Result<Entry>>> {
751 let worktree = self.worktree_for_entry(entry_id, cx)?;
752 let new_path = new_path.into();
753 if self.is_local() {
754 worktree.update(cx, |worktree, cx| {
755 worktree
756 .as_local_mut()
757 .unwrap()
758 .rename_entry(entry_id, new_path, cx)
759 })
760 } else {
761 let client = self.client.clone();
762 let project_id = self.remote_id().unwrap();
763
764 Some(cx.spawn_weak(|_, mut cx| async move {
765 let response = client
766 .request(proto::RenameProjectEntry {
767 project_id,
768 entry_id: entry_id.to_proto(),
769 new_path: new_path.as_os_str().as_bytes().to_vec(),
770 })
771 .await?;
772 let entry = response
773 .entry
774 .ok_or_else(|| anyhow!("missing entry in response"))?;
775 worktree
776 .update(&mut cx, |worktree, cx| {
777 worktree.as_remote().unwrap().insert_entry(
778 entry,
779 response.worktree_scan_id as usize,
780 cx,
781 )
782 })
783 .await
784 }))
785 }
786 }
787
788 pub fn delete_entry(
789 &mut self,
790 entry_id: ProjectEntryId,
791 cx: &mut ModelContext<Self>,
792 ) -> Option<Task<Result<()>>> {
793 let worktree = self.worktree_for_entry(entry_id, cx)?;
794 if self.is_local() {
795 worktree.update(cx, |worktree, cx| {
796 worktree.as_local_mut().unwrap().delete_entry(entry_id, cx)
797 })
798 } else {
799 let client = self.client.clone();
800 let project_id = self.remote_id().unwrap();
801 Some(cx.spawn_weak(|_, mut cx| async move {
802 let response = client
803 .request(proto::DeleteProjectEntry {
804 project_id,
805 entry_id: entry_id.to_proto(),
806 })
807 .await?;
808 worktree
809 .update(&mut cx, move |worktree, cx| {
810 worktree.as_remote().unwrap().delete_entry(
811 entry_id,
812 response.worktree_scan_id as usize,
813 cx,
814 )
815 })
816 .await
817 }))
818 }
819 }
820
821 fn share(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
822 let project_id;
823 if let ProjectClientState::Local {
824 remote_id_rx,
825 is_shared,
826 ..
827 } = &mut self.client_state
828 {
829 if *is_shared {
830 return Task::ready(Ok(()));
831 }
832 *is_shared = true;
833 if let Some(id) = *remote_id_rx.borrow() {
834 project_id = id;
835 } else {
836 return Task::ready(Err(anyhow!("project hasn't been registered")));
837 }
838 } else {
839 return Task::ready(Err(anyhow!("can't share a remote project")));
840 };
841
842 for open_buffer in self.opened_buffers.values_mut() {
843 match open_buffer {
844 OpenBuffer::Strong(_) => {}
845 OpenBuffer::Weak(buffer) => {
846 if let Some(buffer) = buffer.upgrade(cx) {
847 *open_buffer = OpenBuffer::Strong(buffer);
848 }
849 }
850 OpenBuffer::Loading(_) => unreachable!(),
851 }
852 }
853
854 for worktree_handle in self.worktrees.iter_mut() {
855 match worktree_handle {
856 WorktreeHandle::Strong(_) => {}
857 WorktreeHandle::Weak(worktree) => {
858 if let Some(worktree) = worktree.upgrade(cx) {
859 *worktree_handle = WorktreeHandle::Strong(worktree);
860 }
861 }
862 }
863 }
864
865 let mut tasks = Vec::new();
866 for worktree in self.worktrees(cx).collect::<Vec<_>>() {
867 worktree.update(cx, |worktree, cx| {
868 let worktree = worktree.as_local_mut().unwrap();
869 tasks.push(worktree.share(project_id, cx));
870 });
871 }
872
873 cx.spawn(|this, mut cx| async move {
874 for task in tasks {
875 task.await?;
876 }
877 this.update(&mut cx, |_, cx| cx.notify());
878 Ok(())
879 })
880 }
881
882 fn unshared(&mut self, cx: &mut ModelContext<Self>) {
883 if let ProjectClientState::Local { is_shared, .. } = &mut self.client_state {
884 if !*is_shared {
885 return;
886 }
887
888 *is_shared = false;
889 self.collaborators.clear();
890 self.shared_buffers.clear();
891 for worktree_handle in self.worktrees.iter_mut() {
892 if let WorktreeHandle::Strong(worktree) = worktree_handle {
893 let is_visible = worktree.update(cx, |worktree, _| {
894 worktree.as_local_mut().unwrap().unshare();
895 worktree.is_visible()
896 });
897 if !is_visible {
898 *worktree_handle = WorktreeHandle::Weak(worktree.downgrade());
899 }
900 }
901 }
902
903 for open_buffer in self.opened_buffers.values_mut() {
904 match open_buffer {
905 OpenBuffer::Strong(buffer) => {
906 *open_buffer = OpenBuffer::Weak(buffer.downgrade());
907 }
908 _ => {}
909 }
910 }
911
912 cx.notify();
913 } else {
914 log::error!("attempted to unshare a remote project");
915 }
916 }
917
918 pub fn respond_to_join_request(
919 &mut self,
920 requester_id: u64,
921 allow: bool,
922 cx: &mut ModelContext<Self>,
923 ) {
924 if let Some(project_id) = self.remote_id() {
925 let share = self.share(cx);
926 let client = self.client.clone();
927 cx.foreground()
928 .spawn(async move {
929 share.await?;
930 client.send(proto::RespondToJoinProjectRequest {
931 requester_id,
932 project_id,
933 allow,
934 })
935 })
936 .detach_and_log_err(cx);
937 }
938 }
939
940 fn removed_from_project(&mut self, cx: &mut ModelContext<Self>) {
941 if let ProjectClientState::Remote {
942 sharing_has_stopped,
943 ..
944 } = &mut self.client_state
945 {
946 *sharing_has_stopped = true;
947 self.collaborators.clear();
948 cx.notify();
949 }
950 }
951
952 pub fn is_read_only(&self) -> bool {
953 match &self.client_state {
954 ProjectClientState::Local { .. } => false,
955 ProjectClientState::Remote {
956 sharing_has_stopped,
957 ..
958 } => *sharing_has_stopped,
959 }
960 }
961
962 pub fn is_local(&self) -> bool {
963 match &self.client_state {
964 ProjectClientState::Local { .. } => true,
965 ProjectClientState::Remote { .. } => false,
966 }
967 }
968
969 pub fn is_remote(&self) -> bool {
970 !self.is_local()
971 }
972
973 pub fn create_buffer(
974 &mut self,
975 text: &str,
976 language: Option<Arc<Language>>,
977 cx: &mut ModelContext<Self>,
978 ) -> Result<ModelHandle<Buffer>> {
979 if self.is_remote() {
980 return Err(anyhow!("creating buffers as a guest is not supported yet"));
981 }
982
983 let buffer = cx.add_model(|cx| {
984 Buffer::new(self.replica_id(), text, cx)
985 .with_language(language.unwrap_or(language::PLAIN_TEXT.clone()), cx)
986 });
987 self.register_buffer(&buffer, cx)?;
988 Ok(buffer)
989 }
990
991 pub fn open_path(
992 &mut self,
993 path: impl Into<ProjectPath>,
994 cx: &mut ModelContext<Self>,
995 ) -> Task<Result<(ProjectEntryId, AnyModelHandle)>> {
996 let task = self.open_buffer(path, cx);
997 cx.spawn_weak(|_, cx| async move {
998 let buffer = task.await?;
999 let project_entry_id = buffer
1000 .read_with(&cx, |buffer, cx| {
1001 File::from_dyn(buffer.file()).and_then(|file| file.project_entry_id(cx))
1002 })
1003 .ok_or_else(|| anyhow!("no project entry"))?;
1004 Ok((project_entry_id, buffer.into()))
1005 })
1006 }
1007
1008 pub fn open_local_buffer(
1009 &mut self,
1010 abs_path: impl AsRef<Path>,
1011 cx: &mut ModelContext<Self>,
1012 ) -> Task<Result<ModelHandle<Buffer>>> {
1013 if let Some((worktree, relative_path)) = self.find_local_worktree(abs_path.as_ref(), cx) {
1014 self.open_buffer((worktree.read(cx).id(), relative_path), cx)
1015 } else {
1016 Task::ready(Err(anyhow!("no such path")))
1017 }
1018 }
1019
1020 pub fn open_buffer(
1021 &mut self,
1022 path: impl Into<ProjectPath>,
1023 cx: &mut ModelContext<Self>,
1024 ) -> Task<Result<ModelHandle<Buffer>>> {
1025 let project_path = path.into();
1026 let worktree = if let Some(worktree) = self.worktree_for_id(project_path.worktree_id, cx) {
1027 worktree
1028 } else {
1029 return Task::ready(Err(anyhow!("no such worktree")));
1030 };
1031
1032 // If there is already a buffer for the given path, then return it.
1033 let existing_buffer = self.get_open_buffer(&project_path, cx);
1034 if let Some(existing_buffer) = existing_buffer {
1035 return Task::ready(Ok(existing_buffer));
1036 }
1037
1038 let mut loading_watch = match self.loading_buffers.entry(project_path.clone()) {
1039 // If the given path is already being loaded, then wait for that existing
1040 // task to complete and return the same buffer.
1041 hash_map::Entry::Occupied(e) => e.get().clone(),
1042
1043 // Otherwise, record the fact that this path is now being loaded.
1044 hash_map::Entry::Vacant(entry) => {
1045 let (mut tx, rx) = postage::watch::channel();
1046 entry.insert(rx.clone());
1047
1048 let load_buffer = if worktree.read(cx).is_local() {
1049 self.open_local_buffer_internal(&project_path.path, &worktree, cx)
1050 } else {
1051 self.open_remote_buffer_internal(&project_path.path, &worktree, cx)
1052 };
1053
1054 cx.spawn(move |this, mut cx| async move {
1055 let load_result = load_buffer.await;
1056 *tx.borrow_mut() = Some(this.update(&mut cx, |this, _| {
1057 // Record the fact that the buffer is no longer loading.
1058 this.loading_buffers.remove(&project_path);
1059 let buffer = load_result.map_err(Arc::new)?;
1060 Ok(buffer)
1061 }));
1062 })
1063 .detach();
1064 rx
1065 }
1066 };
1067
1068 cx.foreground().spawn(async move {
1069 loop {
1070 if let Some(result) = loading_watch.borrow().as_ref() {
1071 match result {
1072 Ok(buffer) => return Ok(buffer.clone()),
1073 Err(error) => return Err(anyhow!("{}", error)),
1074 }
1075 }
1076 loading_watch.next().await;
1077 }
1078 })
1079 }
1080
1081 fn open_local_buffer_internal(
1082 &mut self,
1083 path: &Arc<Path>,
1084 worktree: &ModelHandle<Worktree>,
1085 cx: &mut ModelContext<Self>,
1086 ) -> Task<Result<ModelHandle<Buffer>>> {
1087 let load_buffer = worktree.update(cx, |worktree, cx| {
1088 let worktree = worktree.as_local_mut().unwrap();
1089 worktree.load_buffer(path, cx)
1090 });
1091 cx.spawn(|this, mut cx| async move {
1092 let buffer = load_buffer.await?;
1093 this.update(&mut cx, |this, cx| this.register_buffer(&buffer, cx))?;
1094 Ok(buffer)
1095 })
1096 }
1097
1098 fn open_remote_buffer_internal(
1099 &mut self,
1100 path: &Arc<Path>,
1101 worktree: &ModelHandle<Worktree>,
1102 cx: &mut ModelContext<Self>,
1103 ) -> Task<Result<ModelHandle<Buffer>>> {
1104 let rpc = self.client.clone();
1105 let project_id = self.remote_id().unwrap();
1106 let remote_worktree_id = worktree.read(cx).id();
1107 let path = path.clone();
1108 let path_string = path.to_string_lossy().to_string();
1109 cx.spawn(|this, mut cx| async move {
1110 let response = rpc
1111 .request(proto::OpenBufferByPath {
1112 project_id,
1113 worktree_id: remote_worktree_id.to_proto(),
1114 path: path_string,
1115 })
1116 .await?;
1117 let buffer = response.buffer.ok_or_else(|| anyhow!("missing buffer"))?;
1118 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
1119 .await
1120 })
1121 }
1122
1123 fn open_local_buffer_via_lsp(
1124 &mut self,
1125 abs_path: lsp::Url,
1126 lsp_adapter: Arc<dyn LspAdapter>,
1127 lsp_server: Arc<LanguageServer>,
1128 cx: &mut ModelContext<Self>,
1129 ) -> Task<Result<ModelHandle<Buffer>>> {
1130 cx.spawn(|this, mut cx| async move {
1131 let abs_path = abs_path
1132 .to_file_path()
1133 .map_err(|_| anyhow!("can't convert URI to path"))?;
1134 let (worktree, relative_path) = if let Some(result) =
1135 this.read_with(&cx, |this, cx| this.find_local_worktree(&abs_path, cx))
1136 {
1137 result
1138 } else {
1139 let worktree = this
1140 .update(&mut cx, |this, cx| {
1141 this.create_local_worktree(&abs_path, false, cx)
1142 })
1143 .await?;
1144 this.update(&mut cx, |this, cx| {
1145 this.language_servers.insert(
1146 (worktree.read(cx).id(), lsp_adapter.name()),
1147 (lsp_adapter, lsp_server),
1148 );
1149 });
1150 (worktree, PathBuf::new())
1151 };
1152
1153 let project_path = ProjectPath {
1154 worktree_id: worktree.read_with(&cx, |worktree, _| worktree.id()),
1155 path: relative_path.into(),
1156 };
1157 this.update(&mut cx, |this, cx| this.open_buffer(project_path, cx))
1158 .await
1159 })
1160 }
1161
1162 pub fn open_buffer_by_id(
1163 &mut self,
1164 id: u64,
1165 cx: &mut ModelContext<Self>,
1166 ) -> Task<Result<ModelHandle<Buffer>>> {
1167 if let Some(buffer) = self.buffer_for_id(id, cx) {
1168 Task::ready(Ok(buffer))
1169 } else if self.is_local() {
1170 Task::ready(Err(anyhow!("buffer {} does not exist", id)))
1171 } else if let Some(project_id) = self.remote_id() {
1172 let request = self
1173 .client
1174 .request(proto::OpenBufferById { project_id, id });
1175 cx.spawn(|this, mut cx| async move {
1176 let buffer = request
1177 .await?
1178 .buffer
1179 .ok_or_else(|| anyhow!("invalid buffer"))?;
1180 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
1181 .await
1182 })
1183 } else {
1184 Task::ready(Err(anyhow!("cannot open buffer while disconnected")))
1185 }
1186 }
1187
1188 pub fn save_buffer_as(
1189 &mut self,
1190 buffer: ModelHandle<Buffer>,
1191 abs_path: PathBuf,
1192 cx: &mut ModelContext<Project>,
1193 ) -> Task<Result<()>> {
1194 let worktree_task = self.find_or_create_local_worktree(&abs_path, true, cx);
1195 let old_path =
1196 File::from_dyn(buffer.read(cx).file()).and_then(|f| Some(f.as_local()?.abs_path(cx)));
1197 cx.spawn(|this, mut cx| async move {
1198 if let Some(old_path) = old_path {
1199 this.update(&mut cx, |this, cx| {
1200 this.unregister_buffer_from_language_server(&buffer, old_path, cx);
1201 });
1202 }
1203 let (worktree, path) = worktree_task.await?;
1204 worktree
1205 .update(&mut cx, |worktree, cx| {
1206 worktree
1207 .as_local_mut()
1208 .unwrap()
1209 .save_buffer_as(buffer.clone(), path, cx)
1210 })
1211 .await?;
1212 this.update(&mut cx, |this, cx| {
1213 this.assign_language_to_buffer(&buffer, cx);
1214 this.register_buffer_with_language_server(&buffer, cx);
1215 });
1216 Ok(())
1217 })
1218 }
1219
1220 pub fn get_open_buffer(
1221 &mut self,
1222 path: &ProjectPath,
1223 cx: &mut ModelContext<Self>,
1224 ) -> Option<ModelHandle<Buffer>> {
1225 let worktree = self.worktree_for_id(path.worktree_id, cx)?;
1226 self.opened_buffers.values().find_map(|buffer| {
1227 let buffer = buffer.upgrade(cx)?;
1228 let file = File::from_dyn(buffer.read(cx).file())?;
1229 if file.worktree == worktree && file.path() == &path.path {
1230 Some(buffer)
1231 } else {
1232 None
1233 }
1234 })
1235 }
1236
1237 fn register_buffer(
1238 &mut self,
1239 buffer: &ModelHandle<Buffer>,
1240 cx: &mut ModelContext<Self>,
1241 ) -> Result<()> {
1242 let remote_id = buffer.read(cx).remote_id();
1243 let open_buffer = if self.is_remote() || self.is_shared() {
1244 OpenBuffer::Strong(buffer.clone())
1245 } else {
1246 OpenBuffer::Weak(buffer.downgrade())
1247 };
1248
1249 match self.opened_buffers.insert(remote_id, open_buffer) {
1250 None => {}
1251 Some(OpenBuffer::Loading(operations)) => {
1252 buffer.update(cx, |buffer, cx| buffer.apply_ops(operations, cx))?
1253 }
1254 Some(OpenBuffer::Weak(existing_handle)) => {
1255 if existing_handle.upgrade(cx).is_some() {
1256 Err(anyhow!(
1257 "already registered buffer with remote id {}",
1258 remote_id
1259 ))?
1260 }
1261 }
1262 Some(OpenBuffer::Strong(_)) => Err(anyhow!(
1263 "already registered buffer with remote id {}",
1264 remote_id
1265 ))?,
1266 }
1267 cx.subscribe(buffer, |this, buffer, event, cx| {
1268 this.on_buffer_event(buffer, event, cx);
1269 })
1270 .detach();
1271
1272 self.assign_language_to_buffer(buffer, cx);
1273 self.register_buffer_with_language_server(buffer, cx);
1274 cx.observe_release(buffer, |this, buffer, cx| {
1275 if let Some(file) = File::from_dyn(buffer.file()) {
1276 if file.is_local() {
1277 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
1278 if let Some((_, server)) = this.language_server_for_buffer(buffer, cx) {
1279 server
1280 .notify::<lsp::notification::DidCloseTextDocument>(
1281 lsp::DidCloseTextDocumentParams {
1282 text_document: lsp::TextDocumentIdentifier::new(uri.clone()),
1283 },
1284 )
1285 .log_err();
1286 }
1287 }
1288 }
1289 })
1290 .detach();
1291
1292 Ok(())
1293 }
1294
1295 fn register_buffer_with_language_server(
1296 &mut self,
1297 buffer_handle: &ModelHandle<Buffer>,
1298 cx: &mut ModelContext<Self>,
1299 ) {
1300 let buffer = buffer_handle.read(cx);
1301 let buffer_id = buffer.remote_id();
1302 if let Some(file) = File::from_dyn(buffer.file()) {
1303 if file.is_local() {
1304 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
1305 let initial_snapshot = buffer.text_snapshot();
1306
1307 let mut language_server = None;
1308 let mut language_id = None;
1309 if let Some(language) = buffer.language() {
1310 let worktree_id = file.worktree_id(cx);
1311 if let Some(adapter) = language.lsp_adapter() {
1312 language_id = adapter.id_for_language(language.name().as_ref());
1313 language_server = self
1314 .language_servers
1315 .get(&(worktree_id, adapter.name()))
1316 .cloned();
1317 }
1318 }
1319
1320 if let Some(local_worktree) = file.worktree.read(cx).as_local() {
1321 if let Some(diagnostics) = local_worktree.diagnostics_for_path(file.path()) {
1322 self.update_buffer_diagnostics(&buffer_handle, diagnostics, None, cx)
1323 .log_err();
1324 }
1325 }
1326
1327 if let Some((_, server)) = language_server {
1328 server
1329 .notify::<lsp::notification::DidOpenTextDocument>(
1330 lsp::DidOpenTextDocumentParams {
1331 text_document: lsp::TextDocumentItem::new(
1332 uri,
1333 language_id.unwrap_or_default(),
1334 0,
1335 initial_snapshot.text(),
1336 ),
1337 }
1338 .clone(),
1339 )
1340 .log_err();
1341 buffer_handle.update(cx, |buffer, cx| {
1342 buffer.set_completion_triggers(
1343 server
1344 .capabilities()
1345 .completion_provider
1346 .as_ref()
1347 .and_then(|provider| provider.trigger_characters.clone())
1348 .unwrap_or(Vec::new()),
1349 cx,
1350 )
1351 });
1352 self.buffer_snapshots
1353 .insert(buffer_id, vec![(0, initial_snapshot)]);
1354 }
1355 }
1356 }
1357 }
1358
1359 fn unregister_buffer_from_language_server(
1360 &mut self,
1361 buffer: &ModelHandle<Buffer>,
1362 old_path: PathBuf,
1363 cx: &mut ModelContext<Self>,
1364 ) {
1365 buffer.update(cx, |buffer, cx| {
1366 buffer.update_diagnostics(Default::default(), cx);
1367 self.buffer_snapshots.remove(&buffer.remote_id());
1368 if let Some((_, language_server)) = self.language_server_for_buffer(buffer, cx) {
1369 language_server
1370 .notify::<lsp::notification::DidCloseTextDocument>(
1371 lsp::DidCloseTextDocumentParams {
1372 text_document: lsp::TextDocumentIdentifier::new(
1373 lsp::Url::from_file_path(old_path).unwrap(),
1374 ),
1375 },
1376 )
1377 .log_err();
1378 }
1379 });
1380 }
1381
1382 fn on_buffer_event(
1383 &mut self,
1384 buffer: ModelHandle<Buffer>,
1385 event: &BufferEvent,
1386 cx: &mut ModelContext<Self>,
1387 ) -> Option<()> {
1388 match event {
1389 BufferEvent::Operation(operation) => {
1390 let project_id = self.remote_id()?;
1391 let request = self.client.request(proto::UpdateBuffer {
1392 project_id,
1393 buffer_id: buffer.read(cx).remote_id(),
1394 operations: vec![language::proto::serialize_operation(&operation)],
1395 });
1396 cx.background().spawn(request).detach_and_log_err(cx);
1397 }
1398 BufferEvent::Edited { .. } => {
1399 let (_, language_server) = self
1400 .language_server_for_buffer(buffer.read(cx), cx)?
1401 .clone();
1402 let buffer = buffer.read(cx);
1403 let file = File::from_dyn(buffer.file())?;
1404 let abs_path = file.as_local()?.abs_path(cx);
1405 let uri = lsp::Url::from_file_path(abs_path).unwrap();
1406 let buffer_snapshots = self.buffer_snapshots.get_mut(&buffer.remote_id())?;
1407 let (version, prev_snapshot) = buffer_snapshots.last()?;
1408 let next_snapshot = buffer.text_snapshot();
1409 let next_version = version + 1;
1410
1411 let content_changes = buffer
1412 .edits_since::<(PointUtf16, usize)>(prev_snapshot.version())
1413 .map(|edit| {
1414 let edit_start = edit.new.start.0;
1415 let edit_end = edit_start + (edit.old.end.0 - edit.old.start.0);
1416 let new_text = next_snapshot
1417 .text_for_range(edit.new.start.1..edit.new.end.1)
1418 .collect();
1419 lsp::TextDocumentContentChangeEvent {
1420 range: Some(lsp::Range::new(
1421 point_to_lsp(edit_start),
1422 point_to_lsp(edit_end),
1423 )),
1424 range_length: None,
1425 text: new_text,
1426 }
1427 })
1428 .collect();
1429
1430 buffer_snapshots.push((next_version, next_snapshot));
1431
1432 language_server
1433 .notify::<lsp::notification::DidChangeTextDocument>(
1434 lsp::DidChangeTextDocumentParams {
1435 text_document: lsp::VersionedTextDocumentIdentifier::new(
1436 uri,
1437 next_version,
1438 ),
1439 content_changes,
1440 },
1441 )
1442 .log_err();
1443 }
1444 BufferEvent::Saved => {
1445 let file = File::from_dyn(buffer.read(cx).file())?;
1446 let worktree_id = file.worktree_id(cx);
1447 let abs_path = file.as_local()?.abs_path(cx);
1448 let text_document = lsp::TextDocumentIdentifier {
1449 uri: lsp::Url::from_file_path(abs_path).unwrap(),
1450 };
1451
1452 for (_, server) in self.language_servers_for_worktree(worktree_id) {
1453 server
1454 .notify::<lsp::notification::DidSaveTextDocument>(
1455 lsp::DidSaveTextDocumentParams {
1456 text_document: text_document.clone(),
1457 text: None,
1458 },
1459 )
1460 .log_err();
1461 }
1462 }
1463 _ => {}
1464 }
1465
1466 None
1467 }
1468
1469 fn language_servers_for_worktree(
1470 &self,
1471 worktree_id: WorktreeId,
1472 ) -> impl Iterator<Item = &(Arc<dyn LspAdapter>, Arc<LanguageServer>)> {
1473 self.language_servers.iter().filter_map(
1474 move |((language_server_worktree_id, _), server)| {
1475 if *language_server_worktree_id == worktree_id {
1476 Some(server)
1477 } else {
1478 None
1479 }
1480 },
1481 )
1482 }
1483
1484 fn assign_language_to_buffer(
1485 &mut self,
1486 buffer: &ModelHandle<Buffer>,
1487 cx: &mut ModelContext<Self>,
1488 ) -> Option<()> {
1489 // If the buffer has a language, set it and start the language server if we haven't already.
1490 let full_path = buffer.read(cx).file()?.full_path(cx);
1491 let language = self.languages.select_language(&full_path)?;
1492 buffer.update(cx, |buffer, cx| {
1493 buffer.set_language(Some(language.clone()), cx);
1494 });
1495
1496 let file = File::from_dyn(buffer.read(cx).file())?;
1497 let worktree = file.worktree.read(cx).as_local()?;
1498 let worktree_id = worktree.id();
1499 let worktree_abs_path = worktree.abs_path().clone();
1500 self.start_language_server(worktree_id, worktree_abs_path, language, cx);
1501
1502 None
1503 }
1504
1505 fn start_language_server(
1506 &mut self,
1507 worktree_id: WorktreeId,
1508 worktree_path: Arc<Path>,
1509 language: Arc<Language>,
1510 cx: &mut ModelContext<Self>,
1511 ) {
1512 let adapter = if let Some(adapter) = language.lsp_adapter() {
1513 adapter
1514 } else {
1515 return;
1516 };
1517 let key = (worktree_id, adapter.name());
1518 self.started_language_servers
1519 .entry(key.clone())
1520 .or_insert_with(|| {
1521 let server_id = post_inc(&mut self.next_language_server_id);
1522 let language_server = self.languages.start_language_server(
1523 server_id,
1524 language.clone(),
1525 worktree_path,
1526 self.client.http_client(),
1527 cx,
1528 );
1529 cx.spawn_weak(|this, mut cx| async move {
1530 let language_server = language_server?.await.log_err()?;
1531 let language_server = language_server
1532 .initialize(adapter.initialization_options())
1533 .await
1534 .log_err()?;
1535 let this = this.upgrade(&cx)?;
1536 let disk_based_diagnostics_progress_token =
1537 adapter.disk_based_diagnostics_progress_token();
1538
1539 language_server
1540 .on_notification::<lsp::notification::PublishDiagnostics, _>({
1541 let this = this.downgrade();
1542 let adapter = adapter.clone();
1543 move |params, mut cx| {
1544 if let Some(this) = this.upgrade(&cx) {
1545 this.update(&mut cx, |this, cx| {
1546 this.on_lsp_diagnostics_published(
1547 server_id,
1548 params,
1549 &adapter,
1550 disk_based_diagnostics_progress_token,
1551 cx,
1552 );
1553 });
1554 }
1555 }
1556 })
1557 .detach();
1558
1559 language_server
1560 .on_request::<lsp::request::WorkspaceConfiguration, _, _>({
1561 let settings = this
1562 .read_with(&cx, |this, _| this.language_server_settings.clone());
1563 move |params, _| {
1564 let settings = settings.lock().clone();
1565 async move {
1566 Ok(params
1567 .items
1568 .into_iter()
1569 .map(|item| {
1570 if let Some(section) = &item.section {
1571 settings
1572 .get(section)
1573 .cloned()
1574 .unwrap_or(serde_json::Value::Null)
1575 } else {
1576 settings.clone()
1577 }
1578 })
1579 .collect())
1580 }
1581 }
1582 })
1583 .detach();
1584
1585 language_server
1586 .on_request::<lsp::request::ApplyWorkspaceEdit, _, _>({
1587 let this = this.downgrade();
1588 let adapter = adapter.clone();
1589 let language_server = language_server.clone();
1590 move |params, cx| {
1591 Self::on_lsp_workspace_edit(
1592 this,
1593 params,
1594 server_id,
1595 adapter.clone(),
1596 language_server.clone(),
1597 cx,
1598 )
1599 }
1600 })
1601 .detach();
1602
1603 language_server
1604 .on_notification::<lsp::notification::Progress, _>({
1605 let this = this.downgrade();
1606 move |params, mut cx| {
1607 if let Some(this) = this.upgrade(&cx) {
1608 this.update(&mut cx, |this, cx| {
1609 this.on_lsp_progress(
1610 params,
1611 server_id,
1612 disk_based_diagnostics_progress_token,
1613 cx,
1614 );
1615 });
1616 }
1617 }
1618 })
1619 .detach();
1620
1621 this.update(&mut cx, |this, cx| {
1622 this.language_servers
1623 .insert(key.clone(), (adapter.clone(), language_server.clone()));
1624 this.language_server_statuses.insert(
1625 server_id,
1626 LanguageServerStatus {
1627 name: language_server.name().to_string(),
1628 pending_work: Default::default(),
1629 pending_diagnostic_updates: 0,
1630 },
1631 );
1632 language_server
1633 .notify::<lsp::notification::DidChangeConfiguration>(
1634 lsp::DidChangeConfigurationParams {
1635 settings: this.language_server_settings.lock().clone(),
1636 },
1637 )
1638 .ok();
1639
1640 if let Some(project_id) = this.remote_id() {
1641 this.client
1642 .send(proto::StartLanguageServer {
1643 project_id,
1644 server: Some(proto::LanguageServer {
1645 id: server_id as u64,
1646 name: language_server.name().to_string(),
1647 }),
1648 })
1649 .log_err();
1650 }
1651
1652 // Tell the language server about every open buffer in the worktree that matches the language.
1653 for buffer in this.opened_buffers.values() {
1654 if let Some(buffer_handle) = buffer.upgrade(cx) {
1655 let buffer = buffer_handle.read(cx);
1656 let file = if let Some(file) = File::from_dyn(buffer.file()) {
1657 file
1658 } else {
1659 continue;
1660 };
1661 let language = if let Some(language) = buffer.language() {
1662 language
1663 } else {
1664 continue;
1665 };
1666 if file.worktree.read(cx).id() != key.0
1667 || language.lsp_adapter().map(|a| a.name())
1668 != Some(key.1.clone())
1669 {
1670 continue;
1671 }
1672
1673 let file = file.as_local()?;
1674 let versions = this
1675 .buffer_snapshots
1676 .entry(buffer.remote_id())
1677 .or_insert_with(|| vec![(0, buffer.text_snapshot())]);
1678 let (version, initial_snapshot) = versions.last().unwrap();
1679 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
1680 let language_id = adapter.id_for_language(language.name().as_ref());
1681 language_server
1682 .notify::<lsp::notification::DidOpenTextDocument>(
1683 lsp::DidOpenTextDocumentParams {
1684 text_document: lsp::TextDocumentItem::new(
1685 uri,
1686 language_id.unwrap_or_default(),
1687 *version,
1688 initial_snapshot.text(),
1689 ),
1690 },
1691 )
1692 .log_err()?;
1693 buffer_handle.update(cx, |buffer, cx| {
1694 buffer.set_completion_triggers(
1695 language_server
1696 .capabilities()
1697 .completion_provider
1698 .as_ref()
1699 .and_then(|provider| {
1700 provider.trigger_characters.clone()
1701 })
1702 .unwrap_or(Vec::new()),
1703 cx,
1704 )
1705 });
1706 }
1707 }
1708
1709 cx.notify();
1710 Some(())
1711 });
1712
1713 Some(language_server)
1714 })
1715 });
1716 }
1717
1718 pub fn restart_language_servers_for_buffers(
1719 &mut self,
1720 buffers: impl IntoIterator<Item = ModelHandle<Buffer>>,
1721 cx: &mut ModelContext<Self>,
1722 ) -> Option<()> {
1723 let language_server_lookup_info: HashSet<(WorktreeId, Arc<Path>, PathBuf)> = buffers
1724 .into_iter()
1725 .filter_map(|buffer| {
1726 let file = File::from_dyn(buffer.read(cx).file())?;
1727 let worktree = file.worktree.read(cx).as_local()?;
1728 let worktree_id = worktree.id();
1729 let worktree_abs_path = worktree.abs_path().clone();
1730 let full_path = file.full_path(cx);
1731 Some((worktree_id, worktree_abs_path, full_path))
1732 })
1733 .collect();
1734 for (worktree_id, worktree_abs_path, full_path) in language_server_lookup_info {
1735 let language = self.languages.select_language(&full_path)?;
1736 self.restart_language_server(worktree_id, worktree_abs_path, language, cx);
1737 }
1738
1739 None
1740 }
1741
1742 fn restart_language_server(
1743 &mut self,
1744 worktree_id: WorktreeId,
1745 worktree_path: Arc<Path>,
1746 language: Arc<Language>,
1747 cx: &mut ModelContext<Self>,
1748 ) {
1749 let adapter = if let Some(adapter) = language.lsp_adapter() {
1750 adapter
1751 } else {
1752 return;
1753 };
1754 let key = (worktree_id, adapter.name());
1755 let server_to_shutdown = self.language_servers.remove(&key);
1756 self.started_language_servers.remove(&key);
1757 server_to_shutdown
1758 .as_ref()
1759 .map(|(_, server)| self.language_server_statuses.remove(&server.server_id()));
1760 cx.spawn_weak(|this, mut cx| async move {
1761 if let Some(this) = this.upgrade(&cx) {
1762 if let Some((_, server_to_shutdown)) = server_to_shutdown {
1763 if let Some(shutdown_task) = server_to_shutdown.shutdown() {
1764 shutdown_task.await;
1765 }
1766 }
1767
1768 this.update(&mut cx, |this, cx| {
1769 this.start_language_server(worktree_id, worktree_path, language, cx);
1770 });
1771 }
1772 })
1773 .detach();
1774 }
1775
1776 fn on_lsp_diagnostics_published(
1777 &mut self,
1778 server_id: usize,
1779 mut params: lsp::PublishDiagnosticsParams,
1780 adapter: &Arc<dyn LspAdapter>,
1781 disk_based_diagnostics_progress_token: Option<&str>,
1782 cx: &mut ModelContext<Self>,
1783 ) {
1784 adapter.process_diagnostics(&mut params);
1785 if disk_based_diagnostics_progress_token.is_none() {
1786 self.disk_based_diagnostics_started(cx);
1787 self.broadcast_language_server_update(
1788 server_id,
1789 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(
1790 proto::LspDiskBasedDiagnosticsUpdating {},
1791 ),
1792 );
1793 }
1794 self.update_diagnostics(params, adapter.disk_based_diagnostic_sources(), cx)
1795 .log_err();
1796 if disk_based_diagnostics_progress_token.is_none() {
1797 self.disk_based_diagnostics_finished(cx);
1798 self.broadcast_language_server_update(
1799 server_id,
1800 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(
1801 proto::LspDiskBasedDiagnosticsUpdated {},
1802 ),
1803 );
1804 }
1805 }
1806
1807 fn on_lsp_progress(
1808 &mut self,
1809 progress: lsp::ProgressParams,
1810 server_id: usize,
1811 disk_based_diagnostics_progress_token: Option<&str>,
1812 cx: &mut ModelContext<Self>,
1813 ) {
1814 let token = match progress.token {
1815 lsp::NumberOrString::String(token) => token,
1816 lsp::NumberOrString::Number(token) => {
1817 log::info!("skipping numeric progress token {}", token);
1818 return;
1819 }
1820 };
1821 let progress = match progress.value {
1822 lsp::ProgressParamsValue::WorkDone(value) => value,
1823 };
1824 let language_server_status =
1825 if let Some(status) = self.language_server_statuses.get_mut(&server_id) {
1826 status
1827 } else {
1828 return;
1829 };
1830 match progress {
1831 lsp::WorkDoneProgress::Begin(_) => {
1832 if Some(token.as_str()) == disk_based_diagnostics_progress_token {
1833 language_server_status.pending_diagnostic_updates += 1;
1834 if language_server_status.pending_diagnostic_updates == 1 {
1835 self.disk_based_diagnostics_started(cx);
1836 self.broadcast_language_server_update(
1837 server_id,
1838 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(
1839 proto::LspDiskBasedDiagnosticsUpdating {},
1840 ),
1841 );
1842 }
1843 } else {
1844 self.on_lsp_work_start(server_id, token.clone(), cx);
1845 self.broadcast_language_server_update(
1846 server_id,
1847 proto::update_language_server::Variant::WorkStart(proto::LspWorkStart {
1848 token,
1849 }),
1850 );
1851 }
1852 }
1853 lsp::WorkDoneProgress::Report(report) => {
1854 if Some(token.as_str()) != disk_based_diagnostics_progress_token {
1855 self.on_lsp_work_progress(
1856 server_id,
1857 token.clone(),
1858 LanguageServerProgress {
1859 message: report.message.clone(),
1860 percentage: report.percentage.map(|p| p as usize),
1861 last_update_at: Instant::now(),
1862 },
1863 cx,
1864 );
1865 self.broadcast_language_server_update(
1866 server_id,
1867 proto::update_language_server::Variant::WorkProgress(
1868 proto::LspWorkProgress {
1869 token,
1870 message: report.message,
1871 percentage: report.percentage.map(|p| p as u32),
1872 },
1873 ),
1874 );
1875 }
1876 }
1877 lsp::WorkDoneProgress::End(_) => {
1878 if Some(token.as_str()) == disk_based_diagnostics_progress_token {
1879 language_server_status.pending_diagnostic_updates -= 1;
1880 if language_server_status.pending_diagnostic_updates == 0 {
1881 self.disk_based_diagnostics_finished(cx);
1882 self.broadcast_language_server_update(
1883 server_id,
1884 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(
1885 proto::LspDiskBasedDiagnosticsUpdated {},
1886 ),
1887 );
1888 }
1889 } else {
1890 self.on_lsp_work_end(server_id, token.clone(), cx);
1891 self.broadcast_language_server_update(
1892 server_id,
1893 proto::update_language_server::Variant::WorkEnd(proto::LspWorkEnd {
1894 token,
1895 }),
1896 );
1897 }
1898 }
1899 }
1900 }
1901
1902 fn on_lsp_work_start(
1903 &mut self,
1904 language_server_id: usize,
1905 token: String,
1906 cx: &mut ModelContext<Self>,
1907 ) {
1908 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
1909 status.pending_work.insert(
1910 token,
1911 LanguageServerProgress {
1912 message: None,
1913 percentage: None,
1914 last_update_at: Instant::now(),
1915 },
1916 );
1917 cx.notify();
1918 }
1919 }
1920
1921 fn on_lsp_work_progress(
1922 &mut self,
1923 language_server_id: usize,
1924 token: String,
1925 progress: LanguageServerProgress,
1926 cx: &mut ModelContext<Self>,
1927 ) {
1928 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
1929 status.pending_work.insert(token, progress);
1930 cx.notify();
1931 }
1932 }
1933
1934 fn on_lsp_work_end(
1935 &mut self,
1936 language_server_id: usize,
1937 token: String,
1938 cx: &mut ModelContext<Self>,
1939 ) {
1940 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
1941 status.pending_work.remove(&token);
1942 cx.notify();
1943 }
1944 }
1945
1946 async fn on_lsp_workspace_edit(
1947 this: WeakModelHandle<Self>,
1948 params: lsp::ApplyWorkspaceEditParams,
1949 server_id: usize,
1950 adapter: Arc<dyn LspAdapter>,
1951 language_server: Arc<LanguageServer>,
1952 mut cx: AsyncAppContext,
1953 ) -> Result<lsp::ApplyWorkspaceEditResponse> {
1954 let this = this
1955 .upgrade(&cx)
1956 .ok_or_else(|| anyhow!("project project closed"))?;
1957 let transaction = Self::deserialize_workspace_edit(
1958 this.clone(),
1959 params.edit,
1960 true,
1961 adapter.clone(),
1962 language_server.clone(),
1963 &mut cx,
1964 )
1965 .await
1966 .log_err();
1967 this.update(&mut cx, |this, _| {
1968 if let Some(transaction) = transaction {
1969 this.last_workspace_edits_by_language_server
1970 .insert(server_id, transaction);
1971 }
1972 });
1973 Ok(lsp::ApplyWorkspaceEditResponse {
1974 applied: true,
1975 failed_change: None,
1976 failure_reason: None,
1977 })
1978 }
1979
1980 fn broadcast_language_server_update(
1981 &self,
1982 language_server_id: usize,
1983 event: proto::update_language_server::Variant,
1984 ) {
1985 if let Some(project_id) = self.remote_id() {
1986 self.client
1987 .send(proto::UpdateLanguageServer {
1988 project_id,
1989 language_server_id: language_server_id as u64,
1990 variant: Some(event),
1991 })
1992 .log_err();
1993 }
1994 }
1995
1996 pub fn set_language_server_settings(&mut self, settings: serde_json::Value) {
1997 for (_, server) in self.language_servers.values() {
1998 server
1999 .notify::<lsp::notification::DidChangeConfiguration>(
2000 lsp::DidChangeConfigurationParams {
2001 settings: settings.clone(),
2002 },
2003 )
2004 .ok();
2005 }
2006 *self.language_server_settings.lock() = settings;
2007 }
2008
2009 pub fn language_server_statuses(
2010 &self,
2011 ) -> impl DoubleEndedIterator<Item = &LanguageServerStatus> {
2012 self.language_server_statuses.values()
2013 }
2014
2015 pub fn update_diagnostics(
2016 &mut self,
2017 params: lsp::PublishDiagnosticsParams,
2018 disk_based_sources: &[&str],
2019 cx: &mut ModelContext<Self>,
2020 ) -> Result<()> {
2021 let abs_path = params
2022 .uri
2023 .to_file_path()
2024 .map_err(|_| anyhow!("URI is not a file"))?;
2025 let mut next_group_id = 0;
2026 let mut diagnostics = Vec::default();
2027 let mut primary_diagnostic_group_ids = HashMap::default();
2028 let mut sources_by_group_id = HashMap::default();
2029 let mut supporting_diagnostics = HashMap::default();
2030 for diagnostic in ¶ms.diagnostics {
2031 let source = diagnostic.source.as_ref();
2032 let code = diagnostic.code.as_ref().map(|code| match code {
2033 lsp::NumberOrString::Number(code) => code.to_string(),
2034 lsp::NumberOrString::String(code) => code.clone(),
2035 });
2036 let range = range_from_lsp(diagnostic.range);
2037 let is_supporting = diagnostic
2038 .related_information
2039 .as_ref()
2040 .map_or(false, |infos| {
2041 infos.iter().any(|info| {
2042 primary_diagnostic_group_ids.contains_key(&(
2043 source,
2044 code.clone(),
2045 range_from_lsp(info.location.range),
2046 ))
2047 })
2048 });
2049
2050 let is_unnecessary = diagnostic.tags.as_ref().map_or(false, |tags| {
2051 tags.iter().any(|tag| *tag == DiagnosticTag::UNNECESSARY)
2052 });
2053
2054 if is_supporting {
2055 supporting_diagnostics.insert(
2056 (source, code.clone(), range),
2057 (diagnostic.severity, is_unnecessary),
2058 );
2059 } else {
2060 let group_id = post_inc(&mut next_group_id);
2061 let is_disk_based = source.map_or(false, |source| {
2062 disk_based_sources.contains(&source.as_str())
2063 });
2064
2065 sources_by_group_id.insert(group_id, source);
2066 primary_diagnostic_group_ids
2067 .insert((source, code.clone(), range.clone()), group_id);
2068
2069 diagnostics.push(DiagnosticEntry {
2070 range,
2071 diagnostic: Diagnostic {
2072 code: code.clone(),
2073 severity: diagnostic.severity.unwrap_or(DiagnosticSeverity::ERROR),
2074 message: diagnostic.message.clone(),
2075 group_id,
2076 is_primary: true,
2077 is_valid: true,
2078 is_disk_based,
2079 is_unnecessary,
2080 },
2081 });
2082 if let Some(infos) = &diagnostic.related_information {
2083 for info in infos {
2084 if info.location.uri == params.uri && !info.message.is_empty() {
2085 let range = range_from_lsp(info.location.range);
2086 diagnostics.push(DiagnosticEntry {
2087 range,
2088 diagnostic: Diagnostic {
2089 code: code.clone(),
2090 severity: DiagnosticSeverity::INFORMATION,
2091 message: info.message.clone(),
2092 group_id,
2093 is_primary: false,
2094 is_valid: true,
2095 is_disk_based,
2096 is_unnecessary: false,
2097 },
2098 });
2099 }
2100 }
2101 }
2102 }
2103 }
2104
2105 for entry in &mut diagnostics {
2106 let diagnostic = &mut entry.diagnostic;
2107 if !diagnostic.is_primary {
2108 let source = *sources_by_group_id.get(&diagnostic.group_id).unwrap();
2109 if let Some(&(severity, is_unnecessary)) = supporting_diagnostics.get(&(
2110 source,
2111 diagnostic.code.clone(),
2112 entry.range.clone(),
2113 )) {
2114 if let Some(severity) = severity {
2115 diagnostic.severity = severity;
2116 }
2117 diagnostic.is_unnecessary = is_unnecessary;
2118 }
2119 }
2120 }
2121
2122 self.update_diagnostic_entries(abs_path, params.version, diagnostics, cx)?;
2123 Ok(())
2124 }
2125
2126 pub fn update_diagnostic_entries(
2127 &mut self,
2128 abs_path: PathBuf,
2129 version: Option<i32>,
2130 diagnostics: Vec<DiagnosticEntry<PointUtf16>>,
2131 cx: &mut ModelContext<Project>,
2132 ) -> Result<(), anyhow::Error> {
2133 let (worktree, relative_path) = self
2134 .find_local_worktree(&abs_path, cx)
2135 .ok_or_else(|| anyhow!("no worktree found for diagnostics"))?;
2136 if !worktree.read(cx).is_visible() {
2137 return Ok(());
2138 }
2139
2140 let project_path = ProjectPath {
2141 worktree_id: worktree.read(cx).id(),
2142 path: relative_path.into(),
2143 };
2144 if let Some(buffer) = self.get_open_buffer(&project_path, cx) {
2145 self.update_buffer_diagnostics(&buffer, diagnostics.clone(), version, cx)?;
2146 }
2147
2148 let updated = worktree.update(cx, |worktree, cx| {
2149 worktree
2150 .as_local_mut()
2151 .ok_or_else(|| anyhow!("not a local worktree"))?
2152 .update_diagnostics(project_path.path.clone(), diagnostics, cx)
2153 })?;
2154 if updated {
2155 cx.emit(Event::DiagnosticsUpdated(project_path));
2156 }
2157 Ok(())
2158 }
2159
2160 fn update_buffer_diagnostics(
2161 &mut self,
2162 buffer: &ModelHandle<Buffer>,
2163 mut diagnostics: Vec<DiagnosticEntry<PointUtf16>>,
2164 version: Option<i32>,
2165 cx: &mut ModelContext<Self>,
2166 ) -> Result<()> {
2167 fn compare_diagnostics(a: &Diagnostic, b: &Diagnostic) -> Ordering {
2168 Ordering::Equal
2169 .then_with(|| b.is_primary.cmp(&a.is_primary))
2170 .then_with(|| a.is_disk_based.cmp(&b.is_disk_based))
2171 .then_with(|| a.severity.cmp(&b.severity))
2172 .then_with(|| a.message.cmp(&b.message))
2173 }
2174
2175 let snapshot = self.buffer_snapshot_for_lsp_version(buffer, version, cx)?;
2176
2177 diagnostics.sort_unstable_by(|a, b| {
2178 Ordering::Equal
2179 .then_with(|| a.range.start.cmp(&b.range.start))
2180 .then_with(|| b.range.end.cmp(&a.range.end))
2181 .then_with(|| compare_diagnostics(&a.diagnostic, &b.diagnostic))
2182 });
2183
2184 let mut sanitized_diagnostics = Vec::new();
2185 let edits_since_save = Patch::new(
2186 snapshot
2187 .edits_since::<PointUtf16>(buffer.read(cx).saved_version())
2188 .collect(),
2189 );
2190 for entry in diagnostics {
2191 let start;
2192 let end;
2193 if entry.diagnostic.is_disk_based {
2194 // Some diagnostics are based on files on disk instead of buffers'
2195 // current contents. Adjust these diagnostics' ranges to reflect
2196 // any unsaved edits.
2197 start = edits_since_save.old_to_new(entry.range.start);
2198 end = edits_since_save.old_to_new(entry.range.end);
2199 } else {
2200 start = entry.range.start;
2201 end = entry.range.end;
2202 }
2203
2204 let mut range = snapshot.clip_point_utf16(start, Bias::Left)
2205 ..snapshot.clip_point_utf16(end, Bias::Right);
2206
2207 // Expand empty ranges by one character
2208 if range.start == range.end {
2209 range.end.column += 1;
2210 range.end = snapshot.clip_point_utf16(range.end, Bias::Right);
2211 if range.start == range.end && range.end.column > 0 {
2212 range.start.column -= 1;
2213 range.start = snapshot.clip_point_utf16(range.start, Bias::Left);
2214 }
2215 }
2216
2217 sanitized_diagnostics.push(DiagnosticEntry {
2218 range,
2219 diagnostic: entry.diagnostic,
2220 });
2221 }
2222 drop(edits_since_save);
2223
2224 let set = DiagnosticSet::new(sanitized_diagnostics, &snapshot);
2225 buffer.update(cx, |buffer, cx| buffer.update_diagnostics(set, cx));
2226 Ok(())
2227 }
2228
2229 pub fn reload_buffers(
2230 &self,
2231 buffers: HashSet<ModelHandle<Buffer>>,
2232 push_to_history: bool,
2233 cx: &mut ModelContext<Self>,
2234 ) -> Task<Result<ProjectTransaction>> {
2235 let mut local_buffers = Vec::new();
2236 let mut remote_buffers = None;
2237 for buffer_handle in buffers {
2238 let buffer = buffer_handle.read(cx);
2239 if buffer.is_dirty() {
2240 if let Some(file) = File::from_dyn(buffer.file()) {
2241 if file.is_local() {
2242 local_buffers.push(buffer_handle);
2243 } else {
2244 remote_buffers.get_or_insert(Vec::new()).push(buffer_handle);
2245 }
2246 }
2247 }
2248 }
2249
2250 let remote_buffers = self.remote_id().zip(remote_buffers);
2251 let client = self.client.clone();
2252
2253 cx.spawn(|this, mut cx| async move {
2254 let mut project_transaction = ProjectTransaction::default();
2255
2256 if let Some((project_id, remote_buffers)) = remote_buffers {
2257 let response = client
2258 .request(proto::ReloadBuffers {
2259 project_id,
2260 buffer_ids: remote_buffers
2261 .iter()
2262 .map(|buffer| buffer.read_with(&cx, |buffer, _| buffer.remote_id()))
2263 .collect(),
2264 })
2265 .await?
2266 .transaction
2267 .ok_or_else(|| anyhow!("missing transaction"))?;
2268 project_transaction = this
2269 .update(&mut cx, |this, cx| {
2270 this.deserialize_project_transaction(response, push_to_history, cx)
2271 })
2272 .await?;
2273 }
2274
2275 for buffer in local_buffers {
2276 let transaction = buffer
2277 .update(&mut cx, |buffer, cx| buffer.reload(cx))
2278 .await?;
2279 buffer.update(&mut cx, |buffer, cx| {
2280 if let Some(transaction) = transaction {
2281 if !push_to_history {
2282 buffer.forget_transaction(transaction.id);
2283 }
2284 project_transaction.0.insert(cx.handle(), transaction);
2285 }
2286 });
2287 }
2288
2289 Ok(project_transaction)
2290 })
2291 }
2292
2293 pub fn format(
2294 &self,
2295 buffers: HashSet<ModelHandle<Buffer>>,
2296 push_to_history: bool,
2297 cx: &mut ModelContext<Project>,
2298 ) -> Task<Result<ProjectTransaction>> {
2299 let mut local_buffers = Vec::new();
2300 let mut remote_buffers = None;
2301 for buffer_handle in buffers {
2302 let buffer = buffer_handle.read(cx);
2303 if let Some(file) = File::from_dyn(buffer.file()) {
2304 if let Some(buffer_abs_path) = file.as_local().map(|f| f.abs_path(cx)) {
2305 if let Some((_, server)) = self.language_server_for_buffer(buffer, cx) {
2306 local_buffers.push((buffer_handle, buffer_abs_path, server.clone()));
2307 }
2308 } else {
2309 remote_buffers.get_or_insert(Vec::new()).push(buffer_handle);
2310 }
2311 } else {
2312 return Task::ready(Ok(Default::default()));
2313 }
2314 }
2315
2316 let remote_buffers = self.remote_id().zip(remote_buffers);
2317 let client = self.client.clone();
2318
2319 cx.spawn(|this, mut cx| async move {
2320 let mut project_transaction = ProjectTransaction::default();
2321
2322 if let Some((project_id, remote_buffers)) = remote_buffers {
2323 let response = client
2324 .request(proto::FormatBuffers {
2325 project_id,
2326 buffer_ids: remote_buffers
2327 .iter()
2328 .map(|buffer| buffer.read_with(&cx, |buffer, _| buffer.remote_id()))
2329 .collect(),
2330 })
2331 .await?
2332 .transaction
2333 .ok_or_else(|| anyhow!("missing transaction"))?;
2334 project_transaction = this
2335 .update(&mut cx, |this, cx| {
2336 this.deserialize_project_transaction(response, push_to_history, cx)
2337 })
2338 .await?;
2339 }
2340
2341 for (buffer, buffer_abs_path, language_server) in local_buffers {
2342 let text_document = lsp::TextDocumentIdentifier::new(
2343 lsp::Url::from_file_path(&buffer_abs_path).unwrap(),
2344 );
2345 let capabilities = &language_server.capabilities();
2346 let tab_size = cx.update(|cx| {
2347 let language_name = buffer.read(cx).language().map(|language| language.name());
2348 cx.global::<Settings>().tab_size(language_name.as_deref())
2349 });
2350 let lsp_edits = if capabilities
2351 .document_formatting_provider
2352 .as_ref()
2353 .map_or(false, |provider| *provider != lsp::OneOf::Left(false))
2354 {
2355 language_server
2356 .request::<lsp::request::Formatting>(lsp::DocumentFormattingParams {
2357 text_document,
2358 options: lsp::FormattingOptions {
2359 tab_size,
2360 insert_spaces: true,
2361 insert_final_newline: Some(true),
2362 ..Default::default()
2363 },
2364 work_done_progress_params: Default::default(),
2365 })
2366 .await?
2367 } else if capabilities
2368 .document_range_formatting_provider
2369 .as_ref()
2370 .map_or(false, |provider| *provider != lsp::OneOf::Left(false))
2371 {
2372 let buffer_start = lsp::Position::new(0, 0);
2373 let buffer_end =
2374 buffer.read_with(&cx, |buffer, _| point_to_lsp(buffer.max_point_utf16()));
2375 language_server
2376 .request::<lsp::request::RangeFormatting>(
2377 lsp::DocumentRangeFormattingParams {
2378 text_document,
2379 range: lsp::Range::new(buffer_start, buffer_end),
2380 options: lsp::FormattingOptions {
2381 tab_size: 4,
2382 insert_spaces: true,
2383 insert_final_newline: Some(true),
2384 ..Default::default()
2385 },
2386 work_done_progress_params: Default::default(),
2387 },
2388 )
2389 .await?
2390 } else {
2391 continue;
2392 };
2393
2394 if let Some(lsp_edits) = lsp_edits {
2395 let edits = this
2396 .update(&mut cx, |this, cx| {
2397 this.edits_from_lsp(&buffer, lsp_edits, None, cx)
2398 })
2399 .await?;
2400 buffer.update(&mut cx, |buffer, cx| {
2401 buffer.finalize_last_transaction();
2402 buffer.start_transaction();
2403 for (range, text) in edits {
2404 buffer.edit([(range, text)], cx);
2405 }
2406 if buffer.end_transaction(cx).is_some() {
2407 let transaction = buffer.finalize_last_transaction().unwrap().clone();
2408 if !push_to_history {
2409 buffer.forget_transaction(transaction.id);
2410 }
2411 project_transaction.0.insert(cx.handle(), transaction);
2412 }
2413 });
2414 }
2415 }
2416
2417 Ok(project_transaction)
2418 })
2419 }
2420
2421 pub fn definition<T: ToPointUtf16>(
2422 &self,
2423 buffer: &ModelHandle<Buffer>,
2424 position: T,
2425 cx: &mut ModelContext<Self>,
2426 ) -> Task<Result<Vec<Location>>> {
2427 let position = position.to_point_utf16(buffer.read(cx));
2428 self.request_lsp(buffer.clone(), GetDefinition { position }, cx)
2429 }
2430
2431 pub fn references<T: ToPointUtf16>(
2432 &self,
2433 buffer: &ModelHandle<Buffer>,
2434 position: T,
2435 cx: &mut ModelContext<Self>,
2436 ) -> Task<Result<Vec<Location>>> {
2437 let position = position.to_point_utf16(buffer.read(cx));
2438 self.request_lsp(buffer.clone(), GetReferences { position }, cx)
2439 }
2440
2441 pub fn document_highlights<T: ToPointUtf16>(
2442 &self,
2443 buffer: &ModelHandle<Buffer>,
2444 position: T,
2445 cx: &mut ModelContext<Self>,
2446 ) -> Task<Result<Vec<DocumentHighlight>>> {
2447 let position = position.to_point_utf16(buffer.read(cx));
2448
2449 self.request_lsp(buffer.clone(), GetDocumentHighlights { position }, cx)
2450 }
2451
2452 pub fn symbols(&self, query: &str, cx: &mut ModelContext<Self>) -> Task<Result<Vec<Symbol>>> {
2453 if self.is_local() {
2454 let mut requests = Vec::new();
2455 for ((worktree_id, _), (lsp_adapter, language_server)) in self.language_servers.iter() {
2456 let worktree_id = *worktree_id;
2457 if let Some(worktree) = self
2458 .worktree_for_id(worktree_id, cx)
2459 .and_then(|worktree| worktree.read(cx).as_local())
2460 {
2461 let lsp_adapter = lsp_adapter.clone();
2462 let worktree_abs_path = worktree.abs_path().clone();
2463 requests.push(
2464 language_server
2465 .request::<lsp::request::WorkspaceSymbol>(lsp::WorkspaceSymbolParams {
2466 query: query.to_string(),
2467 ..Default::default()
2468 })
2469 .log_err()
2470 .map(move |response| {
2471 (
2472 lsp_adapter,
2473 worktree_id,
2474 worktree_abs_path,
2475 response.unwrap_or_default(),
2476 )
2477 }),
2478 );
2479 }
2480 }
2481
2482 cx.spawn_weak(|this, cx| async move {
2483 let responses = futures::future::join_all(requests).await;
2484 let this = if let Some(this) = this.upgrade(&cx) {
2485 this
2486 } else {
2487 return Ok(Default::default());
2488 };
2489 this.read_with(&cx, |this, cx| {
2490 let mut symbols = Vec::new();
2491 for (adapter, source_worktree_id, worktree_abs_path, response) in responses {
2492 symbols.extend(response.into_iter().flatten().filter_map(|lsp_symbol| {
2493 let abs_path = lsp_symbol.location.uri.to_file_path().ok()?;
2494 let mut worktree_id = source_worktree_id;
2495 let path;
2496 if let Some((worktree, rel_path)) =
2497 this.find_local_worktree(&abs_path, cx)
2498 {
2499 worktree_id = worktree.read(cx).id();
2500 path = rel_path;
2501 } else {
2502 path = relativize_path(&worktree_abs_path, &abs_path);
2503 }
2504
2505 let label = this
2506 .languages
2507 .select_language(&path)
2508 .and_then(|language| {
2509 language.label_for_symbol(&lsp_symbol.name, lsp_symbol.kind)
2510 })
2511 .unwrap_or_else(|| CodeLabel::plain(lsp_symbol.name.clone(), None));
2512 let signature = this.symbol_signature(worktree_id, &path);
2513
2514 Some(Symbol {
2515 source_worktree_id,
2516 worktree_id,
2517 language_server_name: adapter.name(),
2518 name: lsp_symbol.name,
2519 kind: lsp_symbol.kind,
2520 label,
2521 path,
2522 range: range_from_lsp(lsp_symbol.location.range),
2523 signature,
2524 })
2525 }));
2526 }
2527 Ok(symbols)
2528 })
2529 })
2530 } else if let Some(project_id) = self.remote_id() {
2531 let request = self.client.request(proto::GetProjectSymbols {
2532 project_id,
2533 query: query.to_string(),
2534 });
2535 cx.spawn_weak(|this, cx| async move {
2536 let response = request.await?;
2537 let mut symbols = Vec::new();
2538 if let Some(this) = this.upgrade(&cx) {
2539 this.read_with(&cx, |this, _| {
2540 symbols.extend(
2541 response
2542 .symbols
2543 .into_iter()
2544 .filter_map(|symbol| this.deserialize_symbol(symbol).log_err()),
2545 );
2546 })
2547 }
2548 Ok(symbols)
2549 })
2550 } else {
2551 Task::ready(Ok(Default::default()))
2552 }
2553 }
2554
2555 pub fn open_buffer_for_symbol(
2556 &mut self,
2557 symbol: &Symbol,
2558 cx: &mut ModelContext<Self>,
2559 ) -> Task<Result<ModelHandle<Buffer>>> {
2560 if self.is_local() {
2561 let (lsp_adapter, language_server) = if let Some(server) = self.language_servers.get(&(
2562 symbol.source_worktree_id,
2563 symbol.language_server_name.clone(),
2564 )) {
2565 server.clone()
2566 } else {
2567 return Task::ready(Err(anyhow!(
2568 "language server for worktree and language not found"
2569 )));
2570 };
2571
2572 let worktree_abs_path = if let Some(worktree_abs_path) = self
2573 .worktree_for_id(symbol.worktree_id, cx)
2574 .and_then(|worktree| worktree.read(cx).as_local())
2575 .map(|local_worktree| local_worktree.abs_path())
2576 {
2577 worktree_abs_path
2578 } else {
2579 return Task::ready(Err(anyhow!("worktree not found for symbol")));
2580 };
2581 let symbol_abs_path = worktree_abs_path.join(&symbol.path);
2582 let symbol_uri = if let Ok(uri) = lsp::Url::from_file_path(symbol_abs_path) {
2583 uri
2584 } else {
2585 return Task::ready(Err(anyhow!("invalid symbol path")));
2586 };
2587
2588 self.open_local_buffer_via_lsp(symbol_uri, lsp_adapter, language_server, cx)
2589 } else if let Some(project_id) = self.remote_id() {
2590 let request = self.client.request(proto::OpenBufferForSymbol {
2591 project_id,
2592 symbol: Some(serialize_symbol(symbol)),
2593 });
2594 cx.spawn(|this, mut cx| async move {
2595 let response = request.await?;
2596 let buffer = response.buffer.ok_or_else(|| anyhow!("invalid buffer"))?;
2597 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
2598 .await
2599 })
2600 } else {
2601 Task::ready(Err(anyhow!("project does not have a remote id")))
2602 }
2603 }
2604
2605 pub fn completions<T: ToPointUtf16>(
2606 &self,
2607 source_buffer_handle: &ModelHandle<Buffer>,
2608 position: T,
2609 cx: &mut ModelContext<Self>,
2610 ) -> Task<Result<Vec<Completion>>> {
2611 let source_buffer_handle = source_buffer_handle.clone();
2612 let source_buffer = source_buffer_handle.read(cx);
2613 let buffer_id = source_buffer.remote_id();
2614 let language = source_buffer.language().cloned();
2615 let worktree;
2616 let buffer_abs_path;
2617 if let Some(file) = File::from_dyn(source_buffer.file()) {
2618 worktree = file.worktree.clone();
2619 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
2620 } else {
2621 return Task::ready(Ok(Default::default()));
2622 };
2623
2624 let position = position.to_point_utf16(source_buffer);
2625 let anchor = source_buffer.anchor_after(position);
2626
2627 if worktree.read(cx).as_local().is_some() {
2628 let buffer_abs_path = buffer_abs_path.unwrap();
2629 let (_, lang_server) =
2630 if let Some(server) = self.language_server_for_buffer(source_buffer, cx) {
2631 server.clone()
2632 } else {
2633 return Task::ready(Ok(Default::default()));
2634 };
2635
2636 cx.spawn(|_, cx| async move {
2637 let completions = lang_server
2638 .request::<lsp::request::Completion>(lsp::CompletionParams {
2639 text_document_position: lsp::TextDocumentPositionParams::new(
2640 lsp::TextDocumentIdentifier::new(
2641 lsp::Url::from_file_path(buffer_abs_path).unwrap(),
2642 ),
2643 point_to_lsp(position),
2644 ),
2645 context: Default::default(),
2646 work_done_progress_params: Default::default(),
2647 partial_result_params: Default::default(),
2648 })
2649 .await
2650 .context("lsp completion request failed")?;
2651
2652 let completions = if let Some(completions) = completions {
2653 match completions {
2654 lsp::CompletionResponse::Array(completions) => completions,
2655 lsp::CompletionResponse::List(list) => list.items,
2656 }
2657 } else {
2658 Default::default()
2659 };
2660
2661 source_buffer_handle.read_with(&cx, |this, _| {
2662 let snapshot = this.snapshot();
2663 let clipped_position = this.clip_point_utf16(position, Bias::Left);
2664 let mut range_for_token = None;
2665 Ok(completions
2666 .into_iter()
2667 .filter_map(|lsp_completion| {
2668 let (old_range, new_text) = match lsp_completion.text_edit.as_ref() {
2669 // If the language server provides a range to overwrite, then
2670 // check that the range is valid.
2671 Some(lsp::CompletionTextEdit::Edit(edit)) => {
2672 let range = range_from_lsp(edit.range);
2673 let start = snapshot.clip_point_utf16(range.start, Bias::Left);
2674 let end = snapshot.clip_point_utf16(range.end, Bias::Left);
2675 if start != range.start || end != range.end {
2676 log::info!("completion out of expected range");
2677 return None;
2678 }
2679 (
2680 snapshot.anchor_before(start)..snapshot.anchor_after(end),
2681 edit.new_text.clone(),
2682 )
2683 }
2684 // If the language server does not provide a range, then infer
2685 // the range based on the syntax tree.
2686 None => {
2687 if position != clipped_position {
2688 log::info!("completion out of expected range");
2689 return None;
2690 }
2691 let Range { start, end } = range_for_token
2692 .get_or_insert_with(|| {
2693 let offset = position.to_offset(&snapshot);
2694 snapshot
2695 .range_for_word_token_at(offset)
2696 .unwrap_or_else(|| offset..offset)
2697 })
2698 .clone();
2699 let text = lsp_completion
2700 .insert_text
2701 .as_ref()
2702 .unwrap_or(&lsp_completion.label)
2703 .clone();
2704 (
2705 snapshot.anchor_before(start)..snapshot.anchor_after(end),
2706 text.clone(),
2707 )
2708 }
2709 Some(lsp::CompletionTextEdit::InsertAndReplace(_)) => {
2710 log::info!("unsupported insert/replace completion");
2711 return None;
2712 }
2713 };
2714
2715 Some(Completion {
2716 old_range,
2717 new_text,
2718 label: language
2719 .as_ref()
2720 .and_then(|l| l.label_for_completion(&lsp_completion))
2721 .unwrap_or_else(|| {
2722 CodeLabel::plain(
2723 lsp_completion.label.clone(),
2724 lsp_completion.filter_text.as_deref(),
2725 )
2726 }),
2727 lsp_completion,
2728 })
2729 })
2730 .collect())
2731 })
2732 })
2733 } else if let Some(project_id) = self.remote_id() {
2734 let rpc = self.client.clone();
2735 let message = proto::GetCompletions {
2736 project_id,
2737 buffer_id,
2738 position: Some(language::proto::serialize_anchor(&anchor)),
2739 version: serialize_version(&source_buffer.version()),
2740 };
2741 cx.spawn_weak(|_, mut cx| async move {
2742 let response = rpc.request(message).await?;
2743
2744 source_buffer_handle
2745 .update(&mut cx, |buffer, _| {
2746 buffer.wait_for_version(deserialize_version(response.version))
2747 })
2748 .await;
2749
2750 response
2751 .completions
2752 .into_iter()
2753 .map(|completion| {
2754 language::proto::deserialize_completion(completion, language.as_ref())
2755 })
2756 .collect()
2757 })
2758 } else {
2759 Task::ready(Ok(Default::default()))
2760 }
2761 }
2762
2763 pub fn apply_additional_edits_for_completion(
2764 &self,
2765 buffer_handle: ModelHandle<Buffer>,
2766 completion: Completion,
2767 push_to_history: bool,
2768 cx: &mut ModelContext<Self>,
2769 ) -> Task<Result<Option<Transaction>>> {
2770 let buffer = buffer_handle.read(cx);
2771 let buffer_id = buffer.remote_id();
2772
2773 if self.is_local() {
2774 let (_, lang_server) = if let Some(server) = self.language_server_for_buffer(buffer, cx)
2775 {
2776 server.clone()
2777 } else {
2778 return Task::ready(Ok(Default::default()));
2779 };
2780
2781 cx.spawn(|this, mut cx| async move {
2782 let resolved_completion = lang_server
2783 .request::<lsp::request::ResolveCompletionItem>(completion.lsp_completion)
2784 .await?;
2785 if let Some(edits) = resolved_completion.additional_text_edits {
2786 let edits = this
2787 .update(&mut cx, |this, cx| {
2788 this.edits_from_lsp(&buffer_handle, edits, None, cx)
2789 })
2790 .await?;
2791 buffer_handle.update(&mut cx, |buffer, cx| {
2792 buffer.finalize_last_transaction();
2793 buffer.start_transaction();
2794 for (range, text) in edits {
2795 buffer.edit([(range, text)], cx);
2796 }
2797 let transaction = if buffer.end_transaction(cx).is_some() {
2798 let transaction = buffer.finalize_last_transaction().unwrap().clone();
2799 if !push_to_history {
2800 buffer.forget_transaction(transaction.id);
2801 }
2802 Some(transaction)
2803 } else {
2804 None
2805 };
2806 Ok(transaction)
2807 })
2808 } else {
2809 Ok(None)
2810 }
2811 })
2812 } else if let Some(project_id) = self.remote_id() {
2813 let client = self.client.clone();
2814 cx.spawn(|_, mut cx| async move {
2815 let response = client
2816 .request(proto::ApplyCompletionAdditionalEdits {
2817 project_id,
2818 buffer_id,
2819 completion: Some(language::proto::serialize_completion(&completion)),
2820 })
2821 .await?;
2822
2823 if let Some(transaction) = response.transaction {
2824 let transaction = language::proto::deserialize_transaction(transaction)?;
2825 buffer_handle
2826 .update(&mut cx, |buffer, _| {
2827 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
2828 })
2829 .await;
2830 if push_to_history {
2831 buffer_handle.update(&mut cx, |buffer, _| {
2832 buffer.push_transaction(transaction.clone(), Instant::now());
2833 });
2834 }
2835 Ok(Some(transaction))
2836 } else {
2837 Ok(None)
2838 }
2839 })
2840 } else {
2841 Task::ready(Err(anyhow!("project does not have a remote id")))
2842 }
2843 }
2844
2845 pub fn code_actions<T: Clone + ToOffset>(
2846 &self,
2847 buffer_handle: &ModelHandle<Buffer>,
2848 range: Range<T>,
2849 cx: &mut ModelContext<Self>,
2850 ) -> Task<Result<Vec<CodeAction>>> {
2851 let buffer_handle = buffer_handle.clone();
2852 let buffer = buffer_handle.read(cx);
2853 let snapshot = buffer.snapshot();
2854 let relevant_diagnostics = snapshot
2855 .diagnostics_in_range::<usize, usize>(range.to_offset(&snapshot), false)
2856 .map(|entry| entry.to_lsp_diagnostic_stub())
2857 .collect();
2858 let buffer_id = buffer.remote_id();
2859 let worktree;
2860 let buffer_abs_path;
2861 if let Some(file) = File::from_dyn(buffer.file()) {
2862 worktree = file.worktree.clone();
2863 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
2864 } else {
2865 return Task::ready(Ok(Default::default()));
2866 };
2867 let range = buffer.anchor_before(range.start)..buffer.anchor_before(range.end);
2868
2869 if worktree.read(cx).as_local().is_some() {
2870 let buffer_abs_path = buffer_abs_path.unwrap();
2871 let (_, lang_server) = if let Some(server) = self.language_server_for_buffer(buffer, cx)
2872 {
2873 server.clone()
2874 } else {
2875 return Task::ready(Ok(Default::default()));
2876 };
2877
2878 let lsp_range = range_to_lsp(range.to_point_utf16(buffer));
2879 cx.foreground().spawn(async move {
2880 if !lang_server.capabilities().code_action_provider.is_some() {
2881 return Ok(Default::default());
2882 }
2883
2884 Ok(lang_server
2885 .request::<lsp::request::CodeActionRequest>(lsp::CodeActionParams {
2886 text_document: lsp::TextDocumentIdentifier::new(
2887 lsp::Url::from_file_path(buffer_abs_path).unwrap(),
2888 ),
2889 range: lsp_range,
2890 work_done_progress_params: Default::default(),
2891 partial_result_params: Default::default(),
2892 context: lsp::CodeActionContext {
2893 diagnostics: relevant_diagnostics,
2894 only: Some(vec![
2895 lsp::CodeActionKind::QUICKFIX,
2896 lsp::CodeActionKind::REFACTOR,
2897 lsp::CodeActionKind::REFACTOR_EXTRACT,
2898 lsp::CodeActionKind::SOURCE,
2899 ]),
2900 },
2901 })
2902 .await?
2903 .unwrap_or_default()
2904 .into_iter()
2905 .filter_map(|entry| {
2906 if let lsp::CodeActionOrCommand::CodeAction(lsp_action) = entry {
2907 Some(CodeAction {
2908 range: range.clone(),
2909 lsp_action,
2910 })
2911 } else {
2912 None
2913 }
2914 })
2915 .collect())
2916 })
2917 } else if let Some(project_id) = self.remote_id() {
2918 let rpc = self.client.clone();
2919 let version = buffer.version();
2920 cx.spawn_weak(|_, mut cx| async move {
2921 let response = rpc
2922 .request(proto::GetCodeActions {
2923 project_id,
2924 buffer_id,
2925 start: Some(language::proto::serialize_anchor(&range.start)),
2926 end: Some(language::proto::serialize_anchor(&range.end)),
2927 version: serialize_version(&version),
2928 })
2929 .await?;
2930
2931 buffer_handle
2932 .update(&mut cx, |buffer, _| {
2933 buffer.wait_for_version(deserialize_version(response.version))
2934 })
2935 .await;
2936
2937 response
2938 .actions
2939 .into_iter()
2940 .map(language::proto::deserialize_code_action)
2941 .collect()
2942 })
2943 } else {
2944 Task::ready(Ok(Default::default()))
2945 }
2946 }
2947
2948 pub fn apply_code_action(
2949 &self,
2950 buffer_handle: ModelHandle<Buffer>,
2951 mut action: CodeAction,
2952 push_to_history: bool,
2953 cx: &mut ModelContext<Self>,
2954 ) -> Task<Result<ProjectTransaction>> {
2955 if self.is_local() {
2956 let buffer = buffer_handle.read(cx);
2957 let (lsp_adapter, lang_server) =
2958 if let Some(server) = self.language_server_for_buffer(buffer, cx) {
2959 server.clone()
2960 } else {
2961 return Task::ready(Ok(Default::default()));
2962 };
2963 let range = action.range.to_point_utf16(buffer);
2964
2965 cx.spawn(|this, mut cx| async move {
2966 if let Some(lsp_range) = action
2967 .lsp_action
2968 .data
2969 .as_mut()
2970 .and_then(|d| d.get_mut("codeActionParams"))
2971 .and_then(|d| d.get_mut("range"))
2972 {
2973 *lsp_range = serde_json::to_value(&range_to_lsp(range)).unwrap();
2974 action.lsp_action = lang_server
2975 .request::<lsp::request::CodeActionResolveRequest>(action.lsp_action)
2976 .await?;
2977 } else {
2978 let actions = this
2979 .update(&mut cx, |this, cx| {
2980 this.code_actions(&buffer_handle, action.range, cx)
2981 })
2982 .await?;
2983 action.lsp_action = actions
2984 .into_iter()
2985 .find(|a| a.lsp_action.title == action.lsp_action.title)
2986 .ok_or_else(|| anyhow!("code action is outdated"))?
2987 .lsp_action;
2988 }
2989
2990 if let Some(edit) = action.lsp_action.edit {
2991 Self::deserialize_workspace_edit(
2992 this,
2993 edit,
2994 push_to_history,
2995 lsp_adapter,
2996 lang_server,
2997 &mut cx,
2998 )
2999 .await
3000 } else if let Some(command) = action.lsp_action.command {
3001 this.update(&mut cx, |this, _| {
3002 this.last_workspace_edits_by_language_server
3003 .remove(&lang_server.server_id());
3004 });
3005 lang_server
3006 .request::<lsp::request::ExecuteCommand>(lsp::ExecuteCommandParams {
3007 command: command.command,
3008 arguments: command.arguments.unwrap_or_default(),
3009 ..Default::default()
3010 })
3011 .await?;
3012 Ok(this.update(&mut cx, |this, _| {
3013 this.last_workspace_edits_by_language_server
3014 .remove(&lang_server.server_id())
3015 .unwrap_or_default()
3016 }))
3017 } else {
3018 Ok(ProjectTransaction::default())
3019 }
3020 })
3021 } else if let Some(project_id) = self.remote_id() {
3022 let client = self.client.clone();
3023 let request = proto::ApplyCodeAction {
3024 project_id,
3025 buffer_id: buffer_handle.read(cx).remote_id(),
3026 action: Some(language::proto::serialize_code_action(&action)),
3027 };
3028 cx.spawn(|this, mut cx| async move {
3029 let response = client
3030 .request(request)
3031 .await?
3032 .transaction
3033 .ok_or_else(|| anyhow!("missing transaction"))?;
3034 this.update(&mut cx, |this, cx| {
3035 this.deserialize_project_transaction(response, push_to_history, cx)
3036 })
3037 .await
3038 })
3039 } else {
3040 Task::ready(Err(anyhow!("project does not have a remote id")))
3041 }
3042 }
3043
3044 async fn deserialize_workspace_edit(
3045 this: ModelHandle<Self>,
3046 edit: lsp::WorkspaceEdit,
3047 push_to_history: bool,
3048 lsp_adapter: Arc<dyn LspAdapter>,
3049 language_server: Arc<LanguageServer>,
3050 cx: &mut AsyncAppContext,
3051 ) -> Result<ProjectTransaction> {
3052 let fs = this.read_with(cx, |this, _| this.fs.clone());
3053 let mut operations = Vec::new();
3054 if let Some(document_changes) = edit.document_changes {
3055 match document_changes {
3056 lsp::DocumentChanges::Edits(edits) => {
3057 operations.extend(edits.into_iter().map(lsp::DocumentChangeOperation::Edit))
3058 }
3059 lsp::DocumentChanges::Operations(ops) => operations = ops,
3060 }
3061 } else if let Some(changes) = edit.changes {
3062 operations.extend(changes.into_iter().map(|(uri, edits)| {
3063 lsp::DocumentChangeOperation::Edit(lsp::TextDocumentEdit {
3064 text_document: lsp::OptionalVersionedTextDocumentIdentifier {
3065 uri,
3066 version: None,
3067 },
3068 edits: edits.into_iter().map(lsp::OneOf::Left).collect(),
3069 })
3070 }));
3071 }
3072
3073 let mut project_transaction = ProjectTransaction::default();
3074 for operation in operations {
3075 match operation {
3076 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Create(op)) => {
3077 let abs_path = op
3078 .uri
3079 .to_file_path()
3080 .map_err(|_| anyhow!("can't convert URI to path"))?;
3081
3082 if let Some(parent_path) = abs_path.parent() {
3083 fs.create_dir(parent_path).await?;
3084 }
3085 if abs_path.ends_with("/") {
3086 fs.create_dir(&abs_path).await?;
3087 } else {
3088 fs.create_file(&abs_path, op.options.map(Into::into).unwrap_or_default())
3089 .await?;
3090 }
3091 }
3092 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Rename(op)) => {
3093 let source_abs_path = op
3094 .old_uri
3095 .to_file_path()
3096 .map_err(|_| anyhow!("can't convert URI to path"))?;
3097 let target_abs_path = op
3098 .new_uri
3099 .to_file_path()
3100 .map_err(|_| anyhow!("can't convert URI to path"))?;
3101 fs.rename(
3102 &source_abs_path,
3103 &target_abs_path,
3104 op.options.map(Into::into).unwrap_or_default(),
3105 )
3106 .await?;
3107 }
3108 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Delete(op)) => {
3109 let abs_path = op
3110 .uri
3111 .to_file_path()
3112 .map_err(|_| anyhow!("can't convert URI to path"))?;
3113 let options = op.options.map(Into::into).unwrap_or_default();
3114 if abs_path.ends_with("/") {
3115 fs.remove_dir(&abs_path, options).await?;
3116 } else {
3117 fs.remove_file(&abs_path, options).await?;
3118 }
3119 }
3120 lsp::DocumentChangeOperation::Edit(op) => {
3121 let buffer_to_edit = this
3122 .update(cx, |this, cx| {
3123 this.open_local_buffer_via_lsp(
3124 op.text_document.uri,
3125 lsp_adapter.clone(),
3126 language_server.clone(),
3127 cx,
3128 )
3129 })
3130 .await?;
3131
3132 let edits = this
3133 .update(cx, |this, cx| {
3134 let edits = op.edits.into_iter().map(|edit| match edit {
3135 lsp::OneOf::Left(edit) => edit,
3136 lsp::OneOf::Right(edit) => edit.text_edit,
3137 });
3138 this.edits_from_lsp(
3139 &buffer_to_edit,
3140 edits,
3141 op.text_document.version,
3142 cx,
3143 )
3144 })
3145 .await?;
3146
3147 let transaction = buffer_to_edit.update(cx, |buffer, cx| {
3148 buffer.finalize_last_transaction();
3149 buffer.start_transaction();
3150 for (range, text) in edits {
3151 buffer.edit([(range, text)], cx);
3152 }
3153 let transaction = if buffer.end_transaction(cx).is_some() {
3154 let transaction = buffer.finalize_last_transaction().unwrap().clone();
3155 if !push_to_history {
3156 buffer.forget_transaction(transaction.id);
3157 }
3158 Some(transaction)
3159 } else {
3160 None
3161 };
3162
3163 transaction
3164 });
3165 if let Some(transaction) = transaction {
3166 project_transaction.0.insert(buffer_to_edit, transaction);
3167 }
3168 }
3169 }
3170 }
3171
3172 Ok(project_transaction)
3173 }
3174
3175 pub fn prepare_rename<T: ToPointUtf16>(
3176 &self,
3177 buffer: ModelHandle<Buffer>,
3178 position: T,
3179 cx: &mut ModelContext<Self>,
3180 ) -> Task<Result<Option<Range<Anchor>>>> {
3181 let position = position.to_point_utf16(buffer.read(cx));
3182 self.request_lsp(buffer, PrepareRename { position }, cx)
3183 }
3184
3185 pub fn perform_rename<T: ToPointUtf16>(
3186 &self,
3187 buffer: ModelHandle<Buffer>,
3188 position: T,
3189 new_name: String,
3190 push_to_history: bool,
3191 cx: &mut ModelContext<Self>,
3192 ) -> Task<Result<ProjectTransaction>> {
3193 let position = position.to_point_utf16(buffer.read(cx));
3194 self.request_lsp(
3195 buffer,
3196 PerformRename {
3197 position,
3198 new_name,
3199 push_to_history,
3200 },
3201 cx,
3202 )
3203 }
3204
3205 pub fn search(
3206 &self,
3207 query: SearchQuery,
3208 cx: &mut ModelContext<Self>,
3209 ) -> Task<Result<HashMap<ModelHandle<Buffer>, Vec<Range<Anchor>>>>> {
3210 if self.is_local() {
3211 let snapshots = self
3212 .visible_worktrees(cx)
3213 .filter_map(|tree| {
3214 let tree = tree.read(cx).as_local()?;
3215 Some(tree.snapshot())
3216 })
3217 .collect::<Vec<_>>();
3218
3219 let background = cx.background().clone();
3220 let path_count: usize = snapshots.iter().map(|s| s.visible_file_count()).sum();
3221 if path_count == 0 {
3222 return Task::ready(Ok(Default::default()));
3223 }
3224 let workers = background.num_cpus().min(path_count);
3225 let (matching_paths_tx, mut matching_paths_rx) = smol::channel::bounded(1024);
3226 cx.background()
3227 .spawn({
3228 let fs = self.fs.clone();
3229 let background = cx.background().clone();
3230 let query = query.clone();
3231 async move {
3232 let fs = &fs;
3233 let query = &query;
3234 let matching_paths_tx = &matching_paths_tx;
3235 let paths_per_worker = (path_count + workers - 1) / workers;
3236 let snapshots = &snapshots;
3237 background
3238 .scoped(|scope| {
3239 for worker_ix in 0..workers {
3240 let worker_start_ix = worker_ix * paths_per_worker;
3241 let worker_end_ix = worker_start_ix + paths_per_worker;
3242 scope.spawn(async move {
3243 let mut snapshot_start_ix = 0;
3244 let mut abs_path = PathBuf::new();
3245 for snapshot in snapshots {
3246 let snapshot_end_ix =
3247 snapshot_start_ix + snapshot.visible_file_count();
3248 if worker_end_ix <= snapshot_start_ix {
3249 break;
3250 } else if worker_start_ix > snapshot_end_ix {
3251 snapshot_start_ix = snapshot_end_ix;
3252 continue;
3253 } else {
3254 let start_in_snapshot = worker_start_ix
3255 .saturating_sub(snapshot_start_ix);
3256 let end_in_snapshot =
3257 cmp::min(worker_end_ix, snapshot_end_ix)
3258 - snapshot_start_ix;
3259
3260 for entry in snapshot
3261 .files(false, start_in_snapshot)
3262 .take(end_in_snapshot - start_in_snapshot)
3263 {
3264 if matching_paths_tx.is_closed() {
3265 break;
3266 }
3267
3268 abs_path.clear();
3269 abs_path.push(&snapshot.abs_path());
3270 abs_path.push(&entry.path);
3271 let matches = if let Some(file) =
3272 fs.open_sync(&abs_path).await.log_err()
3273 {
3274 query.detect(file).unwrap_or(false)
3275 } else {
3276 false
3277 };
3278
3279 if matches {
3280 let project_path =
3281 (snapshot.id(), entry.path.clone());
3282 if matching_paths_tx
3283 .send(project_path)
3284 .await
3285 .is_err()
3286 {
3287 break;
3288 }
3289 }
3290 }
3291
3292 snapshot_start_ix = snapshot_end_ix;
3293 }
3294 }
3295 });
3296 }
3297 })
3298 .await;
3299 }
3300 })
3301 .detach();
3302
3303 let (buffers_tx, buffers_rx) = smol::channel::bounded(1024);
3304 let open_buffers = self
3305 .opened_buffers
3306 .values()
3307 .filter_map(|b| b.upgrade(cx))
3308 .collect::<HashSet<_>>();
3309 cx.spawn(|this, cx| async move {
3310 for buffer in &open_buffers {
3311 let snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
3312 buffers_tx.send((buffer.clone(), snapshot)).await?;
3313 }
3314
3315 let open_buffers = Rc::new(RefCell::new(open_buffers));
3316 while let Some(project_path) = matching_paths_rx.next().await {
3317 if buffers_tx.is_closed() {
3318 break;
3319 }
3320
3321 let this = this.clone();
3322 let open_buffers = open_buffers.clone();
3323 let buffers_tx = buffers_tx.clone();
3324 cx.spawn(|mut cx| async move {
3325 if let Some(buffer) = this
3326 .update(&mut cx, |this, cx| this.open_buffer(project_path, cx))
3327 .await
3328 .log_err()
3329 {
3330 if open_buffers.borrow_mut().insert(buffer.clone()) {
3331 let snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
3332 buffers_tx.send((buffer, snapshot)).await?;
3333 }
3334 }
3335
3336 Ok::<_, anyhow::Error>(())
3337 })
3338 .detach();
3339 }
3340
3341 Ok::<_, anyhow::Error>(())
3342 })
3343 .detach_and_log_err(cx);
3344
3345 let background = cx.background().clone();
3346 cx.background().spawn(async move {
3347 let query = &query;
3348 let mut matched_buffers = Vec::new();
3349 for _ in 0..workers {
3350 matched_buffers.push(HashMap::default());
3351 }
3352 background
3353 .scoped(|scope| {
3354 for worker_matched_buffers in matched_buffers.iter_mut() {
3355 let mut buffers_rx = buffers_rx.clone();
3356 scope.spawn(async move {
3357 while let Some((buffer, snapshot)) = buffers_rx.next().await {
3358 let buffer_matches = query
3359 .search(snapshot.as_rope())
3360 .await
3361 .iter()
3362 .map(|range| {
3363 snapshot.anchor_before(range.start)
3364 ..snapshot.anchor_after(range.end)
3365 })
3366 .collect::<Vec<_>>();
3367 if !buffer_matches.is_empty() {
3368 worker_matched_buffers
3369 .insert(buffer.clone(), buffer_matches);
3370 }
3371 }
3372 });
3373 }
3374 })
3375 .await;
3376 Ok(matched_buffers.into_iter().flatten().collect())
3377 })
3378 } else if let Some(project_id) = self.remote_id() {
3379 let request = self.client.request(query.to_proto(project_id));
3380 cx.spawn(|this, mut cx| async move {
3381 let response = request.await?;
3382 let mut result = HashMap::default();
3383 for location in response.locations {
3384 let buffer = location.buffer.ok_or_else(|| anyhow!("missing buffer"))?;
3385 let target_buffer = this
3386 .update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
3387 .await?;
3388 let start = location
3389 .start
3390 .and_then(deserialize_anchor)
3391 .ok_or_else(|| anyhow!("missing target start"))?;
3392 let end = location
3393 .end
3394 .and_then(deserialize_anchor)
3395 .ok_or_else(|| anyhow!("missing target end"))?;
3396 result
3397 .entry(target_buffer)
3398 .or_insert(Vec::new())
3399 .push(start..end)
3400 }
3401 Ok(result)
3402 })
3403 } else {
3404 Task::ready(Ok(Default::default()))
3405 }
3406 }
3407
3408 fn request_lsp<R: LspCommand>(
3409 &self,
3410 buffer_handle: ModelHandle<Buffer>,
3411 request: R,
3412 cx: &mut ModelContext<Self>,
3413 ) -> Task<Result<R::Response>>
3414 where
3415 <R::LspRequest as lsp::request::Request>::Result: Send,
3416 {
3417 let buffer = buffer_handle.read(cx);
3418 if self.is_local() {
3419 let file = File::from_dyn(buffer.file()).and_then(File::as_local);
3420 if let Some((file, (_, language_server))) =
3421 file.zip(self.language_server_for_buffer(buffer, cx).cloned())
3422 {
3423 let lsp_params = request.to_lsp(&file.abs_path(cx), cx);
3424 return cx.spawn(|this, cx| async move {
3425 if !request.check_capabilities(&language_server.capabilities()) {
3426 return Ok(Default::default());
3427 }
3428
3429 let response = language_server
3430 .request::<R::LspRequest>(lsp_params)
3431 .await
3432 .context("lsp request failed")?;
3433 request
3434 .response_from_lsp(response, this, buffer_handle, cx)
3435 .await
3436 });
3437 }
3438 } else if let Some(project_id) = self.remote_id() {
3439 let rpc = self.client.clone();
3440 let message = request.to_proto(project_id, buffer);
3441 return cx.spawn(|this, cx| async move {
3442 let response = rpc.request(message).await?;
3443 request
3444 .response_from_proto(response, this, buffer_handle, cx)
3445 .await
3446 });
3447 }
3448 Task::ready(Ok(Default::default()))
3449 }
3450
3451 pub fn find_or_create_local_worktree(
3452 &mut self,
3453 abs_path: impl AsRef<Path>,
3454 visible: bool,
3455 cx: &mut ModelContext<Self>,
3456 ) -> Task<Result<(ModelHandle<Worktree>, PathBuf)>> {
3457 let abs_path = abs_path.as_ref();
3458 if let Some((tree, relative_path)) = self.find_local_worktree(abs_path, cx) {
3459 Task::ready(Ok((tree.clone(), relative_path.into())))
3460 } else {
3461 let worktree = self.create_local_worktree(abs_path, visible, cx);
3462 cx.foreground()
3463 .spawn(async move { Ok((worktree.await?, PathBuf::new())) })
3464 }
3465 }
3466
3467 pub fn find_local_worktree(
3468 &self,
3469 abs_path: &Path,
3470 cx: &AppContext,
3471 ) -> Option<(ModelHandle<Worktree>, PathBuf)> {
3472 for tree in self.worktrees(cx) {
3473 if let Some(relative_path) = tree
3474 .read(cx)
3475 .as_local()
3476 .and_then(|t| abs_path.strip_prefix(t.abs_path()).ok())
3477 {
3478 return Some((tree.clone(), relative_path.into()));
3479 }
3480 }
3481 None
3482 }
3483
3484 pub fn is_shared(&self) -> bool {
3485 match &self.client_state {
3486 ProjectClientState::Local { is_shared, .. } => *is_shared,
3487 ProjectClientState::Remote { .. } => false,
3488 }
3489 }
3490
3491 fn create_local_worktree(
3492 &mut self,
3493 abs_path: impl AsRef<Path>,
3494 visible: bool,
3495 cx: &mut ModelContext<Self>,
3496 ) -> Task<Result<ModelHandle<Worktree>>> {
3497 let fs = self.fs.clone();
3498 let client = self.client.clone();
3499 let next_entry_id = self.next_entry_id.clone();
3500 let path: Arc<Path> = abs_path.as_ref().into();
3501 let task = self
3502 .loading_local_worktrees
3503 .entry(path.clone())
3504 .or_insert_with(|| {
3505 cx.spawn(|project, mut cx| {
3506 async move {
3507 let worktree = Worktree::local(
3508 client.clone(),
3509 path.clone(),
3510 visible,
3511 fs,
3512 next_entry_id,
3513 &mut cx,
3514 )
3515 .await;
3516 project.update(&mut cx, |project, _| {
3517 project.loading_local_worktrees.remove(&path);
3518 });
3519 let worktree = worktree?;
3520
3521 let remote_project_id = project.update(&mut cx, |project, cx| {
3522 project.add_worktree(&worktree, cx);
3523 project.remote_id()
3524 });
3525
3526 if let Some(project_id) = remote_project_id {
3527 // Because sharing is async, we may have *unshared* the project by the time it completes,
3528 // in which case we need to register the worktree instead.
3529 loop {
3530 if project.read_with(&cx, |project, _| project.is_shared()) {
3531 if worktree
3532 .update(&mut cx, |worktree, cx| {
3533 worktree.as_local_mut().unwrap().share(project_id, cx)
3534 })
3535 .await
3536 .is_ok()
3537 {
3538 break;
3539 }
3540 } else {
3541 worktree
3542 .update(&mut cx, |worktree, cx| {
3543 worktree
3544 .as_local_mut()
3545 .unwrap()
3546 .register(project_id, cx)
3547 })
3548 .await?;
3549 break;
3550 }
3551 }
3552 }
3553
3554 Ok(worktree)
3555 }
3556 .map_err(|err| Arc::new(err))
3557 })
3558 .shared()
3559 })
3560 .clone();
3561 cx.foreground().spawn(async move {
3562 match task.await {
3563 Ok(worktree) => Ok(worktree),
3564 Err(err) => Err(anyhow!("{}", err)),
3565 }
3566 })
3567 }
3568
3569 pub fn remove_worktree(&mut self, id: WorktreeId, cx: &mut ModelContext<Self>) {
3570 self.worktrees.retain(|worktree| {
3571 worktree
3572 .upgrade(cx)
3573 .map_or(false, |w| w.read(cx).id() != id)
3574 });
3575 cx.notify();
3576 }
3577
3578 fn add_worktree(&mut self, worktree: &ModelHandle<Worktree>, cx: &mut ModelContext<Self>) {
3579 cx.observe(&worktree, |_, _, cx| cx.notify()).detach();
3580 if worktree.read(cx).is_local() {
3581 cx.subscribe(&worktree, |this, worktree, _, cx| {
3582 this.update_local_worktree_buffers(worktree, cx);
3583 })
3584 .detach();
3585 }
3586
3587 let push_strong_handle = {
3588 let worktree = worktree.read(cx);
3589 self.is_shared() || worktree.is_visible() || worktree.is_remote()
3590 };
3591 if push_strong_handle {
3592 self.worktrees
3593 .push(WorktreeHandle::Strong(worktree.clone()));
3594 } else {
3595 cx.observe_release(&worktree, |this, _, cx| {
3596 this.worktrees
3597 .retain(|worktree| worktree.upgrade(cx).is_some());
3598 cx.notify();
3599 })
3600 .detach();
3601 self.worktrees
3602 .push(WorktreeHandle::Weak(worktree.downgrade()));
3603 }
3604 cx.notify();
3605 }
3606
3607 fn update_local_worktree_buffers(
3608 &mut self,
3609 worktree_handle: ModelHandle<Worktree>,
3610 cx: &mut ModelContext<Self>,
3611 ) {
3612 let snapshot = worktree_handle.read(cx).snapshot();
3613 let mut buffers_to_delete = Vec::new();
3614 let mut renamed_buffers = Vec::new();
3615 for (buffer_id, buffer) in &self.opened_buffers {
3616 if let Some(buffer) = buffer.upgrade(cx) {
3617 buffer.update(cx, |buffer, cx| {
3618 if let Some(old_file) = File::from_dyn(buffer.file()) {
3619 if old_file.worktree != worktree_handle {
3620 return;
3621 }
3622
3623 let new_file = if let Some(entry) = old_file
3624 .entry_id
3625 .and_then(|entry_id| snapshot.entry_for_id(entry_id))
3626 {
3627 File {
3628 is_local: true,
3629 entry_id: Some(entry.id),
3630 mtime: entry.mtime,
3631 path: entry.path.clone(),
3632 worktree: worktree_handle.clone(),
3633 }
3634 } else if let Some(entry) =
3635 snapshot.entry_for_path(old_file.path().as_ref())
3636 {
3637 File {
3638 is_local: true,
3639 entry_id: Some(entry.id),
3640 mtime: entry.mtime,
3641 path: entry.path.clone(),
3642 worktree: worktree_handle.clone(),
3643 }
3644 } else {
3645 File {
3646 is_local: true,
3647 entry_id: None,
3648 path: old_file.path().clone(),
3649 mtime: old_file.mtime(),
3650 worktree: worktree_handle.clone(),
3651 }
3652 };
3653
3654 let old_path = old_file.abs_path(cx);
3655 if new_file.abs_path(cx) != old_path {
3656 renamed_buffers.push((cx.handle(), old_path));
3657 }
3658
3659 if let Some(project_id) = self.remote_id() {
3660 self.client
3661 .send(proto::UpdateBufferFile {
3662 project_id,
3663 buffer_id: *buffer_id as u64,
3664 file: Some(new_file.to_proto()),
3665 })
3666 .log_err();
3667 }
3668 buffer.file_updated(Box::new(new_file), cx).detach();
3669 }
3670 });
3671 } else {
3672 buffers_to_delete.push(*buffer_id);
3673 }
3674 }
3675
3676 for buffer_id in buffers_to_delete {
3677 self.opened_buffers.remove(&buffer_id);
3678 }
3679
3680 for (buffer, old_path) in renamed_buffers {
3681 self.unregister_buffer_from_language_server(&buffer, old_path, cx);
3682 self.assign_language_to_buffer(&buffer, cx);
3683 self.register_buffer_with_language_server(&buffer, cx);
3684 }
3685 }
3686
3687 pub fn set_active_path(&mut self, entry: Option<ProjectPath>, cx: &mut ModelContext<Self>) {
3688 let new_active_entry = entry.and_then(|project_path| {
3689 let worktree = self.worktree_for_id(project_path.worktree_id, cx)?;
3690 let entry = worktree.read(cx).entry_for_path(project_path.path)?;
3691 Some(entry.id)
3692 });
3693 if new_active_entry != self.active_entry {
3694 self.active_entry = new_active_entry;
3695 cx.emit(Event::ActiveEntryChanged(new_active_entry));
3696 }
3697 }
3698
3699 pub fn is_running_disk_based_diagnostics(&self) -> bool {
3700 self.language_server_statuses
3701 .values()
3702 .any(|status| status.pending_diagnostic_updates > 0)
3703 }
3704
3705 pub fn diagnostic_summary(&self, cx: &AppContext) -> DiagnosticSummary {
3706 let mut summary = DiagnosticSummary::default();
3707 for (_, path_summary) in self.diagnostic_summaries(cx) {
3708 summary.error_count += path_summary.error_count;
3709 summary.warning_count += path_summary.warning_count;
3710 }
3711 summary
3712 }
3713
3714 pub fn diagnostic_summaries<'a>(
3715 &'a self,
3716 cx: &'a AppContext,
3717 ) -> impl Iterator<Item = (ProjectPath, DiagnosticSummary)> + 'a {
3718 self.worktrees(cx).flat_map(move |worktree| {
3719 let worktree = worktree.read(cx);
3720 let worktree_id = worktree.id();
3721 worktree
3722 .diagnostic_summaries()
3723 .map(move |(path, summary)| (ProjectPath { worktree_id, path }, summary))
3724 })
3725 }
3726
3727 pub fn disk_based_diagnostics_started(&mut self, cx: &mut ModelContext<Self>) {
3728 if self
3729 .language_server_statuses
3730 .values()
3731 .map(|status| status.pending_diagnostic_updates)
3732 .sum::<isize>()
3733 == 1
3734 {
3735 cx.emit(Event::DiskBasedDiagnosticsStarted);
3736 }
3737 }
3738
3739 pub fn disk_based_diagnostics_finished(&mut self, cx: &mut ModelContext<Self>) {
3740 cx.emit(Event::DiskBasedDiagnosticsUpdated);
3741 if self
3742 .language_server_statuses
3743 .values()
3744 .map(|status| status.pending_diagnostic_updates)
3745 .sum::<isize>()
3746 == 0
3747 {
3748 cx.emit(Event::DiskBasedDiagnosticsFinished);
3749 }
3750 }
3751
3752 pub fn active_entry(&self) -> Option<ProjectEntryId> {
3753 self.active_entry
3754 }
3755
3756 pub fn entry_for_path(&self, path: &ProjectPath, cx: &AppContext) -> Option<ProjectEntryId> {
3757 self.worktree_for_id(path.worktree_id, cx)?
3758 .read(cx)
3759 .entry_for_path(&path.path)
3760 .map(|entry| entry.id)
3761 }
3762
3763 pub fn path_for_entry(&self, entry_id: ProjectEntryId, cx: &AppContext) -> Option<ProjectPath> {
3764 let worktree = self.worktree_for_entry(entry_id, cx)?;
3765 let worktree = worktree.read(cx);
3766 let worktree_id = worktree.id();
3767 let path = worktree.entry_for_id(entry_id)?.path.clone();
3768 Some(ProjectPath { worktree_id, path })
3769 }
3770
3771 // RPC message handlers
3772
3773 async fn handle_request_join_project(
3774 this: ModelHandle<Self>,
3775 message: TypedEnvelope<proto::RequestJoinProject>,
3776 _: Arc<Client>,
3777 mut cx: AsyncAppContext,
3778 ) -> Result<()> {
3779 let user_id = message.payload.requester_id;
3780 let user_store = this.read_with(&cx, |this, _| this.user_store.clone());
3781 let user = user_store
3782 .update(&mut cx, |store, cx| store.fetch_user(user_id, cx))
3783 .await?;
3784 this.update(&mut cx, |_, cx| cx.emit(Event::ContactRequestedJoin(user)));
3785 Ok(())
3786 }
3787
3788 async fn handle_unregister_project(
3789 this: ModelHandle<Self>,
3790 _: TypedEnvelope<proto::UnregisterProject>,
3791 _: Arc<Client>,
3792 mut cx: AsyncAppContext,
3793 ) -> Result<()> {
3794 this.update(&mut cx, |this, cx| this.removed_from_project(cx));
3795 Ok(())
3796 }
3797
3798 async fn handle_project_unshared(
3799 this: ModelHandle<Self>,
3800 _: TypedEnvelope<proto::ProjectUnshared>,
3801 _: Arc<Client>,
3802 mut cx: AsyncAppContext,
3803 ) -> Result<()> {
3804 this.update(&mut cx, |this, cx| this.unshared(cx));
3805 Ok(())
3806 }
3807
3808 async fn handle_add_collaborator(
3809 this: ModelHandle<Self>,
3810 mut envelope: TypedEnvelope<proto::AddProjectCollaborator>,
3811 _: Arc<Client>,
3812 mut cx: AsyncAppContext,
3813 ) -> Result<()> {
3814 let user_store = this.read_with(&cx, |this, _| this.user_store.clone());
3815 let collaborator = envelope
3816 .payload
3817 .collaborator
3818 .take()
3819 .ok_or_else(|| anyhow!("empty collaborator"))?;
3820
3821 let collaborator = Collaborator::from_proto(collaborator, &user_store, &mut cx).await?;
3822 this.update(&mut cx, |this, cx| {
3823 this.collaborators
3824 .insert(collaborator.peer_id, collaborator);
3825 cx.notify();
3826 });
3827
3828 Ok(())
3829 }
3830
3831 async fn handle_remove_collaborator(
3832 this: ModelHandle<Self>,
3833 envelope: TypedEnvelope<proto::RemoveProjectCollaborator>,
3834 _: Arc<Client>,
3835 mut cx: AsyncAppContext,
3836 ) -> Result<()> {
3837 this.update(&mut cx, |this, cx| {
3838 let peer_id = PeerId(envelope.payload.peer_id);
3839 let replica_id = this
3840 .collaborators
3841 .remove(&peer_id)
3842 .ok_or_else(|| anyhow!("unknown peer {:?}", peer_id))?
3843 .replica_id;
3844 for (_, buffer) in &this.opened_buffers {
3845 if let Some(buffer) = buffer.upgrade(cx) {
3846 buffer.update(cx, |buffer, cx| buffer.remove_peer(replica_id, cx));
3847 }
3848 }
3849
3850 cx.emit(Event::CollaboratorLeft(peer_id));
3851 cx.notify();
3852 Ok(())
3853 })
3854 }
3855
3856 async fn handle_register_worktree(
3857 this: ModelHandle<Self>,
3858 envelope: TypedEnvelope<proto::RegisterWorktree>,
3859 client: Arc<Client>,
3860 mut cx: AsyncAppContext,
3861 ) -> Result<()> {
3862 this.update(&mut cx, |this, cx| {
3863 let remote_id = this.remote_id().ok_or_else(|| anyhow!("invalid project"))?;
3864 let replica_id = this.replica_id();
3865 let worktree = proto::Worktree {
3866 id: envelope.payload.worktree_id,
3867 root_name: envelope.payload.root_name,
3868 entries: Default::default(),
3869 diagnostic_summaries: Default::default(),
3870 visible: envelope.payload.visible,
3871 scan_id: 0,
3872 };
3873 let (worktree, load_task) =
3874 Worktree::remote(remote_id, replica_id, worktree, client, cx);
3875 this.add_worktree(&worktree, cx);
3876 load_task.detach();
3877 Ok(())
3878 })
3879 }
3880
3881 async fn handle_unregister_worktree(
3882 this: ModelHandle<Self>,
3883 envelope: TypedEnvelope<proto::UnregisterWorktree>,
3884 _: Arc<Client>,
3885 mut cx: AsyncAppContext,
3886 ) -> Result<()> {
3887 this.update(&mut cx, |this, cx| {
3888 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
3889 this.remove_worktree(worktree_id, cx);
3890 Ok(())
3891 })
3892 }
3893
3894 async fn handle_update_worktree(
3895 this: ModelHandle<Self>,
3896 envelope: TypedEnvelope<proto::UpdateWorktree>,
3897 _: Arc<Client>,
3898 mut cx: AsyncAppContext,
3899 ) -> Result<()> {
3900 this.update(&mut cx, |this, cx| {
3901 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
3902 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
3903 worktree.update(cx, |worktree, _| {
3904 let worktree = worktree.as_remote_mut().unwrap();
3905 worktree.update_from_remote(envelope)
3906 })?;
3907 }
3908 Ok(())
3909 })
3910 }
3911
3912 async fn handle_create_project_entry(
3913 this: ModelHandle<Self>,
3914 envelope: TypedEnvelope<proto::CreateProjectEntry>,
3915 _: Arc<Client>,
3916 mut cx: AsyncAppContext,
3917 ) -> Result<proto::ProjectEntryResponse> {
3918 let worktree = this.update(&mut cx, |this, cx| {
3919 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
3920 this.worktree_for_id(worktree_id, cx)
3921 .ok_or_else(|| anyhow!("worktree not found"))
3922 })?;
3923 let worktree_scan_id = worktree.read_with(&cx, |worktree, _| worktree.scan_id());
3924 let entry = worktree
3925 .update(&mut cx, |worktree, cx| {
3926 let worktree = worktree.as_local_mut().unwrap();
3927 let path = PathBuf::from(OsString::from_vec(envelope.payload.path));
3928 worktree.create_entry(path, envelope.payload.is_directory, cx)
3929 })
3930 .await?;
3931 Ok(proto::ProjectEntryResponse {
3932 entry: Some((&entry).into()),
3933 worktree_scan_id: worktree_scan_id as u64,
3934 })
3935 }
3936
3937 async fn handle_rename_project_entry(
3938 this: ModelHandle<Self>,
3939 envelope: TypedEnvelope<proto::RenameProjectEntry>,
3940 _: Arc<Client>,
3941 mut cx: AsyncAppContext,
3942 ) -> Result<proto::ProjectEntryResponse> {
3943 let entry_id = ProjectEntryId::from_proto(envelope.payload.entry_id);
3944 let worktree = this.read_with(&cx, |this, cx| {
3945 this.worktree_for_entry(entry_id, cx)
3946 .ok_or_else(|| anyhow!("worktree not found"))
3947 })?;
3948 let worktree_scan_id = worktree.read_with(&cx, |worktree, _| worktree.scan_id());
3949 let entry = worktree
3950 .update(&mut cx, |worktree, cx| {
3951 let new_path = PathBuf::from(OsString::from_vec(envelope.payload.new_path));
3952 worktree
3953 .as_local_mut()
3954 .unwrap()
3955 .rename_entry(entry_id, new_path, cx)
3956 .ok_or_else(|| anyhow!("invalid entry"))
3957 })?
3958 .await?;
3959 Ok(proto::ProjectEntryResponse {
3960 entry: Some((&entry).into()),
3961 worktree_scan_id: worktree_scan_id as u64,
3962 })
3963 }
3964
3965 async fn handle_delete_project_entry(
3966 this: ModelHandle<Self>,
3967 envelope: TypedEnvelope<proto::DeleteProjectEntry>,
3968 _: Arc<Client>,
3969 mut cx: AsyncAppContext,
3970 ) -> Result<proto::ProjectEntryResponse> {
3971 let entry_id = ProjectEntryId::from_proto(envelope.payload.entry_id);
3972 let worktree = this.read_with(&cx, |this, cx| {
3973 this.worktree_for_entry(entry_id, cx)
3974 .ok_or_else(|| anyhow!("worktree not found"))
3975 })?;
3976 let worktree_scan_id = worktree.read_with(&cx, |worktree, _| worktree.scan_id());
3977 worktree
3978 .update(&mut cx, |worktree, cx| {
3979 worktree
3980 .as_local_mut()
3981 .unwrap()
3982 .delete_entry(entry_id, cx)
3983 .ok_or_else(|| anyhow!("invalid entry"))
3984 })?
3985 .await?;
3986 Ok(proto::ProjectEntryResponse {
3987 entry: None,
3988 worktree_scan_id: worktree_scan_id as u64,
3989 })
3990 }
3991
3992 async fn handle_update_diagnostic_summary(
3993 this: ModelHandle<Self>,
3994 envelope: TypedEnvelope<proto::UpdateDiagnosticSummary>,
3995 _: Arc<Client>,
3996 mut cx: AsyncAppContext,
3997 ) -> Result<()> {
3998 this.update(&mut cx, |this, cx| {
3999 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
4000 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
4001 if let Some(summary) = envelope.payload.summary {
4002 let project_path = ProjectPath {
4003 worktree_id,
4004 path: Path::new(&summary.path).into(),
4005 };
4006 worktree.update(cx, |worktree, _| {
4007 worktree
4008 .as_remote_mut()
4009 .unwrap()
4010 .update_diagnostic_summary(project_path.path.clone(), &summary);
4011 });
4012 cx.emit(Event::DiagnosticsUpdated(project_path));
4013 }
4014 }
4015 Ok(())
4016 })
4017 }
4018
4019 async fn handle_start_language_server(
4020 this: ModelHandle<Self>,
4021 envelope: TypedEnvelope<proto::StartLanguageServer>,
4022 _: Arc<Client>,
4023 mut cx: AsyncAppContext,
4024 ) -> Result<()> {
4025 let server = envelope
4026 .payload
4027 .server
4028 .ok_or_else(|| anyhow!("invalid server"))?;
4029 this.update(&mut cx, |this, cx| {
4030 this.language_server_statuses.insert(
4031 server.id as usize,
4032 LanguageServerStatus {
4033 name: server.name,
4034 pending_work: Default::default(),
4035 pending_diagnostic_updates: 0,
4036 },
4037 );
4038 cx.notify();
4039 });
4040 Ok(())
4041 }
4042
4043 async fn handle_update_language_server(
4044 this: ModelHandle<Self>,
4045 envelope: TypedEnvelope<proto::UpdateLanguageServer>,
4046 _: Arc<Client>,
4047 mut cx: AsyncAppContext,
4048 ) -> Result<()> {
4049 let language_server_id = envelope.payload.language_server_id as usize;
4050 match envelope
4051 .payload
4052 .variant
4053 .ok_or_else(|| anyhow!("invalid variant"))?
4054 {
4055 proto::update_language_server::Variant::WorkStart(payload) => {
4056 this.update(&mut cx, |this, cx| {
4057 this.on_lsp_work_start(language_server_id, payload.token, cx);
4058 })
4059 }
4060 proto::update_language_server::Variant::WorkProgress(payload) => {
4061 this.update(&mut cx, |this, cx| {
4062 this.on_lsp_work_progress(
4063 language_server_id,
4064 payload.token,
4065 LanguageServerProgress {
4066 message: payload.message,
4067 percentage: payload.percentage.map(|p| p as usize),
4068 last_update_at: Instant::now(),
4069 },
4070 cx,
4071 );
4072 })
4073 }
4074 proto::update_language_server::Variant::WorkEnd(payload) => {
4075 this.update(&mut cx, |this, cx| {
4076 this.on_lsp_work_end(language_server_id, payload.token, cx);
4077 })
4078 }
4079 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(_) => {
4080 this.update(&mut cx, |this, cx| {
4081 this.disk_based_diagnostics_started(cx);
4082 })
4083 }
4084 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(_) => {
4085 this.update(&mut cx, |this, cx| this.disk_based_diagnostics_finished(cx));
4086 }
4087 }
4088
4089 Ok(())
4090 }
4091
4092 async fn handle_update_buffer(
4093 this: ModelHandle<Self>,
4094 envelope: TypedEnvelope<proto::UpdateBuffer>,
4095 _: Arc<Client>,
4096 mut cx: AsyncAppContext,
4097 ) -> Result<()> {
4098 this.update(&mut cx, |this, cx| {
4099 let payload = envelope.payload.clone();
4100 let buffer_id = payload.buffer_id;
4101 let ops = payload
4102 .operations
4103 .into_iter()
4104 .map(|op| language::proto::deserialize_operation(op))
4105 .collect::<Result<Vec<_>, _>>()?;
4106 let is_remote = this.is_remote();
4107 match this.opened_buffers.entry(buffer_id) {
4108 hash_map::Entry::Occupied(mut e) => match e.get_mut() {
4109 OpenBuffer::Strong(buffer) => {
4110 buffer.update(cx, |buffer, cx| buffer.apply_ops(ops, cx))?;
4111 }
4112 OpenBuffer::Loading(operations) => operations.extend_from_slice(&ops),
4113 OpenBuffer::Weak(_) => {}
4114 },
4115 hash_map::Entry::Vacant(e) => {
4116 assert!(
4117 is_remote,
4118 "received buffer update from {:?}",
4119 envelope.original_sender_id
4120 );
4121 e.insert(OpenBuffer::Loading(ops));
4122 }
4123 }
4124 Ok(())
4125 })
4126 }
4127
4128 async fn handle_update_buffer_file(
4129 this: ModelHandle<Self>,
4130 envelope: TypedEnvelope<proto::UpdateBufferFile>,
4131 _: Arc<Client>,
4132 mut cx: AsyncAppContext,
4133 ) -> Result<()> {
4134 this.update(&mut cx, |this, cx| {
4135 let payload = envelope.payload.clone();
4136 let buffer_id = payload.buffer_id;
4137 let file = payload.file.ok_or_else(|| anyhow!("invalid file"))?;
4138 let worktree = this
4139 .worktree_for_id(WorktreeId::from_proto(file.worktree_id), cx)
4140 .ok_or_else(|| anyhow!("no such worktree"))?;
4141 let file = File::from_proto(file, worktree.clone(), cx)?;
4142 let buffer = this
4143 .opened_buffers
4144 .get_mut(&buffer_id)
4145 .and_then(|b| b.upgrade(cx))
4146 .ok_or_else(|| anyhow!("no such buffer"))?;
4147 buffer.update(cx, |buffer, cx| {
4148 buffer.file_updated(Box::new(file), cx).detach();
4149 });
4150 Ok(())
4151 })
4152 }
4153
4154 async fn handle_save_buffer(
4155 this: ModelHandle<Self>,
4156 envelope: TypedEnvelope<proto::SaveBuffer>,
4157 _: Arc<Client>,
4158 mut cx: AsyncAppContext,
4159 ) -> Result<proto::BufferSaved> {
4160 let buffer_id = envelope.payload.buffer_id;
4161 let requested_version = deserialize_version(envelope.payload.version);
4162
4163 let (project_id, buffer) = this.update(&mut cx, |this, cx| {
4164 let project_id = this.remote_id().ok_or_else(|| anyhow!("not connected"))?;
4165 let buffer = this
4166 .opened_buffers
4167 .get(&buffer_id)
4168 .and_then(|buffer| buffer.upgrade(cx))
4169 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?;
4170 Ok::<_, anyhow::Error>((project_id, buffer))
4171 })?;
4172 buffer
4173 .update(&mut cx, |buffer, _| {
4174 buffer.wait_for_version(requested_version)
4175 })
4176 .await;
4177
4178 let (saved_version, mtime) = buffer.update(&mut cx, |buffer, cx| buffer.save(cx)).await?;
4179 Ok(proto::BufferSaved {
4180 project_id,
4181 buffer_id,
4182 version: serialize_version(&saved_version),
4183 mtime: Some(mtime.into()),
4184 })
4185 }
4186
4187 async fn handle_reload_buffers(
4188 this: ModelHandle<Self>,
4189 envelope: TypedEnvelope<proto::ReloadBuffers>,
4190 _: Arc<Client>,
4191 mut cx: AsyncAppContext,
4192 ) -> Result<proto::ReloadBuffersResponse> {
4193 let sender_id = envelope.original_sender_id()?;
4194 let reload = this.update(&mut cx, |this, cx| {
4195 let mut buffers = HashSet::default();
4196 for buffer_id in &envelope.payload.buffer_ids {
4197 buffers.insert(
4198 this.opened_buffers
4199 .get(buffer_id)
4200 .and_then(|buffer| buffer.upgrade(cx))
4201 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?,
4202 );
4203 }
4204 Ok::<_, anyhow::Error>(this.reload_buffers(buffers, false, cx))
4205 })?;
4206
4207 let project_transaction = reload.await?;
4208 let project_transaction = this.update(&mut cx, |this, cx| {
4209 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
4210 });
4211 Ok(proto::ReloadBuffersResponse {
4212 transaction: Some(project_transaction),
4213 })
4214 }
4215
4216 async fn handle_format_buffers(
4217 this: ModelHandle<Self>,
4218 envelope: TypedEnvelope<proto::FormatBuffers>,
4219 _: Arc<Client>,
4220 mut cx: AsyncAppContext,
4221 ) -> Result<proto::FormatBuffersResponse> {
4222 let sender_id = envelope.original_sender_id()?;
4223 let format = this.update(&mut cx, |this, cx| {
4224 let mut buffers = HashSet::default();
4225 for buffer_id in &envelope.payload.buffer_ids {
4226 buffers.insert(
4227 this.opened_buffers
4228 .get(buffer_id)
4229 .and_then(|buffer| buffer.upgrade(cx))
4230 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?,
4231 );
4232 }
4233 Ok::<_, anyhow::Error>(this.format(buffers, false, cx))
4234 })?;
4235
4236 let project_transaction = format.await?;
4237 let project_transaction = this.update(&mut cx, |this, cx| {
4238 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
4239 });
4240 Ok(proto::FormatBuffersResponse {
4241 transaction: Some(project_transaction),
4242 })
4243 }
4244
4245 async fn handle_get_completions(
4246 this: ModelHandle<Self>,
4247 envelope: TypedEnvelope<proto::GetCompletions>,
4248 _: Arc<Client>,
4249 mut cx: AsyncAppContext,
4250 ) -> Result<proto::GetCompletionsResponse> {
4251 let position = envelope
4252 .payload
4253 .position
4254 .and_then(language::proto::deserialize_anchor)
4255 .ok_or_else(|| anyhow!("invalid position"))?;
4256 let version = deserialize_version(envelope.payload.version);
4257 let buffer = this.read_with(&cx, |this, cx| {
4258 this.opened_buffers
4259 .get(&envelope.payload.buffer_id)
4260 .and_then(|buffer| buffer.upgrade(cx))
4261 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
4262 })?;
4263 buffer
4264 .update(&mut cx, |buffer, _| buffer.wait_for_version(version))
4265 .await;
4266 let version = buffer.read_with(&cx, |buffer, _| buffer.version());
4267 let completions = this
4268 .update(&mut cx, |this, cx| this.completions(&buffer, position, cx))
4269 .await?;
4270
4271 Ok(proto::GetCompletionsResponse {
4272 completions: completions
4273 .iter()
4274 .map(language::proto::serialize_completion)
4275 .collect(),
4276 version: serialize_version(&version),
4277 })
4278 }
4279
4280 async fn handle_apply_additional_edits_for_completion(
4281 this: ModelHandle<Self>,
4282 envelope: TypedEnvelope<proto::ApplyCompletionAdditionalEdits>,
4283 _: Arc<Client>,
4284 mut cx: AsyncAppContext,
4285 ) -> Result<proto::ApplyCompletionAdditionalEditsResponse> {
4286 let apply_additional_edits = this.update(&mut cx, |this, cx| {
4287 let buffer = this
4288 .opened_buffers
4289 .get(&envelope.payload.buffer_id)
4290 .and_then(|buffer| buffer.upgrade(cx))
4291 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
4292 let language = buffer.read(cx).language();
4293 let completion = language::proto::deserialize_completion(
4294 envelope
4295 .payload
4296 .completion
4297 .ok_or_else(|| anyhow!("invalid completion"))?,
4298 language,
4299 )?;
4300 Ok::<_, anyhow::Error>(
4301 this.apply_additional_edits_for_completion(buffer, completion, false, cx),
4302 )
4303 })?;
4304
4305 Ok(proto::ApplyCompletionAdditionalEditsResponse {
4306 transaction: apply_additional_edits
4307 .await?
4308 .as_ref()
4309 .map(language::proto::serialize_transaction),
4310 })
4311 }
4312
4313 async fn handle_get_code_actions(
4314 this: ModelHandle<Self>,
4315 envelope: TypedEnvelope<proto::GetCodeActions>,
4316 _: Arc<Client>,
4317 mut cx: AsyncAppContext,
4318 ) -> Result<proto::GetCodeActionsResponse> {
4319 let start = envelope
4320 .payload
4321 .start
4322 .and_then(language::proto::deserialize_anchor)
4323 .ok_or_else(|| anyhow!("invalid start"))?;
4324 let end = envelope
4325 .payload
4326 .end
4327 .and_then(language::proto::deserialize_anchor)
4328 .ok_or_else(|| anyhow!("invalid end"))?;
4329 let buffer = this.update(&mut cx, |this, cx| {
4330 this.opened_buffers
4331 .get(&envelope.payload.buffer_id)
4332 .and_then(|buffer| buffer.upgrade(cx))
4333 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
4334 })?;
4335 buffer
4336 .update(&mut cx, |buffer, _| {
4337 buffer.wait_for_version(deserialize_version(envelope.payload.version))
4338 })
4339 .await;
4340
4341 let version = buffer.read_with(&cx, |buffer, _| buffer.version());
4342 let code_actions = this.update(&mut cx, |this, cx| {
4343 Ok::<_, anyhow::Error>(this.code_actions(&buffer, start..end, cx))
4344 })?;
4345
4346 Ok(proto::GetCodeActionsResponse {
4347 actions: code_actions
4348 .await?
4349 .iter()
4350 .map(language::proto::serialize_code_action)
4351 .collect(),
4352 version: serialize_version(&version),
4353 })
4354 }
4355
4356 async fn handle_apply_code_action(
4357 this: ModelHandle<Self>,
4358 envelope: TypedEnvelope<proto::ApplyCodeAction>,
4359 _: Arc<Client>,
4360 mut cx: AsyncAppContext,
4361 ) -> Result<proto::ApplyCodeActionResponse> {
4362 let sender_id = envelope.original_sender_id()?;
4363 let action = language::proto::deserialize_code_action(
4364 envelope
4365 .payload
4366 .action
4367 .ok_or_else(|| anyhow!("invalid action"))?,
4368 )?;
4369 let apply_code_action = this.update(&mut cx, |this, cx| {
4370 let buffer = this
4371 .opened_buffers
4372 .get(&envelope.payload.buffer_id)
4373 .and_then(|buffer| buffer.upgrade(cx))
4374 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
4375 Ok::<_, anyhow::Error>(this.apply_code_action(buffer, action, false, cx))
4376 })?;
4377
4378 let project_transaction = apply_code_action.await?;
4379 let project_transaction = this.update(&mut cx, |this, cx| {
4380 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
4381 });
4382 Ok(proto::ApplyCodeActionResponse {
4383 transaction: Some(project_transaction),
4384 })
4385 }
4386
4387 async fn handle_lsp_command<T: LspCommand>(
4388 this: ModelHandle<Self>,
4389 envelope: TypedEnvelope<T::ProtoRequest>,
4390 _: Arc<Client>,
4391 mut cx: AsyncAppContext,
4392 ) -> Result<<T::ProtoRequest as proto::RequestMessage>::Response>
4393 where
4394 <T::LspRequest as lsp::request::Request>::Result: Send,
4395 {
4396 let sender_id = envelope.original_sender_id()?;
4397 let buffer_id = T::buffer_id_from_proto(&envelope.payload);
4398 let buffer_handle = this.read_with(&cx, |this, _| {
4399 this.opened_buffers
4400 .get(&buffer_id)
4401 .and_then(|buffer| buffer.upgrade(&cx))
4402 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))
4403 })?;
4404 let request = T::from_proto(
4405 envelope.payload,
4406 this.clone(),
4407 buffer_handle.clone(),
4408 cx.clone(),
4409 )
4410 .await?;
4411 let buffer_version = buffer_handle.read_with(&cx, |buffer, _| buffer.version());
4412 let response = this
4413 .update(&mut cx, |this, cx| {
4414 this.request_lsp(buffer_handle, request, cx)
4415 })
4416 .await?;
4417 this.update(&mut cx, |this, cx| {
4418 Ok(T::response_to_proto(
4419 response,
4420 this,
4421 sender_id,
4422 &buffer_version,
4423 cx,
4424 ))
4425 })
4426 }
4427
4428 async fn handle_get_project_symbols(
4429 this: ModelHandle<Self>,
4430 envelope: TypedEnvelope<proto::GetProjectSymbols>,
4431 _: Arc<Client>,
4432 mut cx: AsyncAppContext,
4433 ) -> Result<proto::GetProjectSymbolsResponse> {
4434 let symbols = this
4435 .update(&mut cx, |this, cx| {
4436 this.symbols(&envelope.payload.query, cx)
4437 })
4438 .await?;
4439
4440 Ok(proto::GetProjectSymbolsResponse {
4441 symbols: symbols.iter().map(serialize_symbol).collect(),
4442 })
4443 }
4444
4445 async fn handle_search_project(
4446 this: ModelHandle<Self>,
4447 envelope: TypedEnvelope<proto::SearchProject>,
4448 _: Arc<Client>,
4449 mut cx: AsyncAppContext,
4450 ) -> Result<proto::SearchProjectResponse> {
4451 let peer_id = envelope.original_sender_id()?;
4452 let query = SearchQuery::from_proto(envelope.payload)?;
4453 let result = this
4454 .update(&mut cx, |this, cx| this.search(query, cx))
4455 .await?;
4456
4457 this.update(&mut cx, |this, cx| {
4458 let mut locations = Vec::new();
4459 for (buffer, ranges) in result {
4460 for range in ranges {
4461 let start = serialize_anchor(&range.start);
4462 let end = serialize_anchor(&range.end);
4463 let buffer = this.serialize_buffer_for_peer(&buffer, peer_id, cx);
4464 locations.push(proto::Location {
4465 buffer: Some(buffer),
4466 start: Some(start),
4467 end: Some(end),
4468 });
4469 }
4470 }
4471 Ok(proto::SearchProjectResponse { locations })
4472 })
4473 }
4474
4475 async fn handle_open_buffer_for_symbol(
4476 this: ModelHandle<Self>,
4477 envelope: TypedEnvelope<proto::OpenBufferForSymbol>,
4478 _: Arc<Client>,
4479 mut cx: AsyncAppContext,
4480 ) -> Result<proto::OpenBufferForSymbolResponse> {
4481 let peer_id = envelope.original_sender_id()?;
4482 let symbol = envelope
4483 .payload
4484 .symbol
4485 .ok_or_else(|| anyhow!("invalid symbol"))?;
4486 let symbol = this.read_with(&cx, |this, _| {
4487 let symbol = this.deserialize_symbol(symbol)?;
4488 let signature = this.symbol_signature(symbol.worktree_id, &symbol.path);
4489 if signature == symbol.signature {
4490 Ok(symbol)
4491 } else {
4492 Err(anyhow!("invalid symbol signature"))
4493 }
4494 })?;
4495 let buffer = this
4496 .update(&mut cx, |this, cx| this.open_buffer_for_symbol(&symbol, cx))
4497 .await?;
4498
4499 Ok(proto::OpenBufferForSymbolResponse {
4500 buffer: Some(this.update(&mut cx, |this, cx| {
4501 this.serialize_buffer_for_peer(&buffer, peer_id, cx)
4502 })),
4503 })
4504 }
4505
4506 fn symbol_signature(&self, worktree_id: WorktreeId, path: &Path) -> [u8; 32] {
4507 let mut hasher = Sha256::new();
4508 hasher.update(worktree_id.to_proto().to_be_bytes());
4509 hasher.update(path.to_string_lossy().as_bytes());
4510 hasher.update(self.nonce.to_be_bytes());
4511 hasher.finalize().as_slice().try_into().unwrap()
4512 }
4513
4514 async fn handle_open_buffer_by_id(
4515 this: ModelHandle<Self>,
4516 envelope: TypedEnvelope<proto::OpenBufferById>,
4517 _: Arc<Client>,
4518 mut cx: AsyncAppContext,
4519 ) -> Result<proto::OpenBufferResponse> {
4520 let peer_id = envelope.original_sender_id()?;
4521 let buffer = this
4522 .update(&mut cx, |this, cx| {
4523 this.open_buffer_by_id(envelope.payload.id, cx)
4524 })
4525 .await?;
4526 this.update(&mut cx, |this, cx| {
4527 Ok(proto::OpenBufferResponse {
4528 buffer: Some(this.serialize_buffer_for_peer(&buffer, peer_id, cx)),
4529 })
4530 })
4531 }
4532
4533 async fn handle_open_buffer_by_path(
4534 this: ModelHandle<Self>,
4535 envelope: TypedEnvelope<proto::OpenBufferByPath>,
4536 _: Arc<Client>,
4537 mut cx: AsyncAppContext,
4538 ) -> Result<proto::OpenBufferResponse> {
4539 let peer_id = envelope.original_sender_id()?;
4540 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
4541 let open_buffer = this.update(&mut cx, |this, cx| {
4542 this.open_buffer(
4543 ProjectPath {
4544 worktree_id,
4545 path: PathBuf::from(envelope.payload.path).into(),
4546 },
4547 cx,
4548 )
4549 });
4550
4551 let buffer = open_buffer.await?;
4552 this.update(&mut cx, |this, cx| {
4553 Ok(proto::OpenBufferResponse {
4554 buffer: Some(this.serialize_buffer_for_peer(&buffer, peer_id, cx)),
4555 })
4556 })
4557 }
4558
4559 fn serialize_project_transaction_for_peer(
4560 &mut self,
4561 project_transaction: ProjectTransaction,
4562 peer_id: PeerId,
4563 cx: &AppContext,
4564 ) -> proto::ProjectTransaction {
4565 let mut serialized_transaction = proto::ProjectTransaction {
4566 buffers: Default::default(),
4567 transactions: Default::default(),
4568 };
4569 for (buffer, transaction) in project_transaction.0 {
4570 serialized_transaction
4571 .buffers
4572 .push(self.serialize_buffer_for_peer(&buffer, peer_id, cx));
4573 serialized_transaction
4574 .transactions
4575 .push(language::proto::serialize_transaction(&transaction));
4576 }
4577 serialized_transaction
4578 }
4579
4580 fn deserialize_project_transaction(
4581 &mut self,
4582 message: proto::ProjectTransaction,
4583 push_to_history: bool,
4584 cx: &mut ModelContext<Self>,
4585 ) -> Task<Result<ProjectTransaction>> {
4586 cx.spawn(|this, mut cx| async move {
4587 let mut project_transaction = ProjectTransaction::default();
4588 for (buffer, transaction) in message.buffers.into_iter().zip(message.transactions) {
4589 let buffer = this
4590 .update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
4591 .await?;
4592 let transaction = language::proto::deserialize_transaction(transaction)?;
4593 project_transaction.0.insert(buffer, transaction);
4594 }
4595
4596 for (buffer, transaction) in &project_transaction.0 {
4597 buffer
4598 .update(&mut cx, |buffer, _| {
4599 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
4600 })
4601 .await;
4602
4603 if push_to_history {
4604 buffer.update(&mut cx, |buffer, _| {
4605 buffer.push_transaction(transaction.clone(), Instant::now());
4606 });
4607 }
4608 }
4609
4610 Ok(project_transaction)
4611 })
4612 }
4613
4614 fn serialize_buffer_for_peer(
4615 &mut self,
4616 buffer: &ModelHandle<Buffer>,
4617 peer_id: PeerId,
4618 cx: &AppContext,
4619 ) -> proto::Buffer {
4620 let buffer_id = buffer.read(cx).remote_id();
4621 let shared_buffers = self.shared_buffers.entry(peer_id).or_default();
4622 if shared_buffers.insert(buffer_id) {
4623 proto::Buffer {
4624 variant: Some(proto::buffer::Variant::State(buffer.read(cx).to_proto())),
4625 }
4626 } else {
4627 proto::Buffer {
4628 variant: Some(proto::buffer::Variant::Id(buffer_id)),
4629 }
4630 }
4631 }
4632
4633 fn deserialize_buffer(
4634 &mut self,
4635 buffer: proto::Buffer,
4636 cx: &mut ModelContext<Self>,
4637 ) -> Task<Result<ModelHandle<Buffer>>> {
4638 let replica_id = self.replica_id();
4639
4640 let opened_buffer_tx = self.opened_buffer.0.clone();
4641 let mut opened_buffer_rx = self.opened_buffer.1.clone();
4642 cx.spawn(|this, mut cx| async move {
4643 match buffer.variant.ok_or_else(|| anyhow!("missing buffer"))? {
4644 proto::buffer::Variant::Id(id) => {
4645 let buffer = loop {
4646 let buffer = this.read_with(&cx, |this, cx| {
4647 this.opened_buffers
4648 .get(&id)
4649 .and_then(|buffer| buffer.upgrade(cx))
4650 });
4651 if let Some(buffer) = buffer {
4652 break buffer;
4653 }
4654 opened_buffer_rx
4655 .next()
4656 .await
4657 .ok_or_else(|| anyhow!("project dropped while waiting for buffer"))?;
4658 };
4659 Ok(buffer)
4660 }
4661 proto::buffer::Variant::State(mut buffer) => {
4662 let mut buffer_worktree = None;
4663 let mut buffer_file = None;
4664 if let Some(file) = buffer.file.take() {
4665 this.read_with(&cx, |this, cx| {
4666 let worktree_id = WorktreeId::from_proto(file.worktree_id);
4667 let worktree =
4668 this.worktree_for_id(worktree_id, cx).ok_or_else(|| {
4669 anyhow!("no worktree found for id {}", file.worktree_id)
4670 })?;
4671 buffer_file =
4672 Some(Box::new(File::from_proto(file, worktree.clone(), cx)?)
4673 as Box<dyn language::File>);
4674 buffer_worktree = Some(worktree);
4675 Ok::<_, anyhow::Error>(())
4676 })?;
4677 }
4678
4679 let buffer = cx.add_model(|cx| {
4680 Buffer::from_proto(replica_id, buffer, buffer_file, cx).unwrap()
4681 });
4682
4683 this.update(&mut cx, |this, cx| this.register_buffer(&buffer, cx))?;
4684
4685 *opened_buffer_tx.borrow_mut().borrow_mut() = ();
4686 Ok(buffer)
4687 }
4688 }
4689 })
4690 }
4691
4692 fn deserialize_symbol(&self, serialized_symbol: proto::Symbol) -> Result<Symbol> {
4693 let source_worktree_id = WorktreeId::from_proto(serialized_symbol.source_worktree_id);
4694 let worktree_id = WorktreeId::from_proto(serialized_symbol.worktree_id);
4695 let start = serialized_symbol
4696 .start
4697 .ok_or_else(|| anyhow!("invalid start"))?;
4698 let end = serialized_symbol
4699 .end
4700 .ok_or_else(|| anyhow!("invalid end"))?;
4701 let kind = unsafe { mem::transmute(serialized_symbol.kind) };
4702 let path = PathBuf::from(serialized_symbol.path);
4703 let language = self.languages.select_language(&path);
4704 Ok(Symbol {
4705 source_worktree_id,
4706 worktree_id,
4707 language_server_name: LanguageServerName(serialized_symbol.language_server_name.into()),
4708 label: language
4709 .and_then(|language| language.label_for_symbol(&serialized_symbol.name, kind))
4710 .unwrap_or_else(|| CodeLabel::plain(serialized_symbol.name.clone(), None)),
4711 name: serialized_symbol.name,
4712 path,
4713 range: PointUtf16::new(start.row, start.column)..PointUtf16::new(end.row, end.column),
4714 kind,
4715 signature: serialized_symbol
4716 .signature
4717 .try_into()
4718 .map_err(|_| anyhow!("invalid signature"))?,
4719 })
4720 }
4721
4722 async fn handle_buffer_saved(
4723 this: ModelHandle<Self>,
4724 envelope: TypedEnvelope<proto::BufferSaved>,
4725 _: Arc<Client>,
4726 mut cx: AsyncAppContext,
4727 ) -> Result<()> {
4728 let version = deserialize_version(envelope.payload.version);
4729 let mtime = envelope
4730 .payload
4731 .mtime
4732 .ok_or_else(|| anyhow!("missing mtime"))?
4733 .into();
4734
4735 this.update(&mut cx, |this, cx| {
4736 let buffer = this
4737 .opened_buffers
4738 .get(&envelope.payload.buffer_id)
4739 .and_then(|buffer| buffer.upgrade(cx));
4740 if let Some(buffer) = buffer {
4741 buffer.update(cx, |buffer, cx| {
4742 buffer.did_save(version, mtime, None, cx);
4743 });
4744 }
4745 Ok(())
4746 })
4747 }
4748
4749 async fn handle_buffer_reloaded(
4750 this: ModelHandle<Self>,
4751 envelope: TypedEnvelope<proto::BufferReloaded>,
4752 _: Arc<Client>,
4753 mut cx: AsyncAppContext,
4754 ) -> Result<()> {
4755 let payload = envelope.payload.clone();
4756 let version = deserialize_version(payload.version);
4757 let mtime = payload
4758 .mtime
4759 .ok_or_else(|| anyhow!("missing mtime"))?
4760 .into();
4761 this.update(&mut cx, |this, cx| {
4762 let buffer = this
4763 .opened_buffers
4764 .get(&payload.buffer_id)
4765 .and_then(|buffer| buffer.upgrade(cx));
4766 if let Some(buffer) = buffer {
4767 buffer.update(cx, |buffer, cx| {
4768 buffer.did_reload(version, mtime, cx);
4769 });
4770 }
4771 Ok(())
4772 })
4773 }
4774
4775 pub fn match_paths<'a>(
4776 &self,
4777 query: &'a str,
4778 include_ignored: bool,
4779 smart_case: bool,
4780 max_results: usize,
4781 cancel_flag: &'a AtomicBool,
4782 cx: &AppContext,
4783 ) -> impl 'a + Future<Output = Vec<PathMatch>> {
4784 let worktrees = self
4785 .worktrees(cx)
4786 .filter(|worktree| worktree.read(cx).is_visible())
4787 .collect::<Vec<_>>();
4788 let include_root_name = worktrees.len() > 1;
4789 let candidate_sets = worktrees
4790 .into_iter()
4791 .map(|worktree| CandidateSet {
4792 snapshot: worktree.read(cx).snapshot(),
4793 include_ignored,
4794 include_root_name,
4795 })
4796 .collect::<Vec<_>>();
4797
4798 let background = cx.background().clone();
4799 async move {
4800 fuzzy::match_paths(
4801 candidate_sets.as_slice(),
4802 query,
4803 smart_case,
4804 max_results,
4805 cancel_flag,
4806 background,
4807 )
4808 .await
4809 }
4810 }
4811
4812 fn edits_from_lsp(
4813 &mut self,
4814 buffer: &ModelHandle<Buffer>,
4815 lsp_edits: impl 'static + Send + IntoIterator<Item = lsp::TextEdit>,
4816 version: Option<i32>,
4817 cx: &mut ModelContext<Self>,
4818 ) -> Task<Result<Vec<(Range<Anchor>, String)>>> {
4819 let snapshot = self.buffer_snapshot_for_lsp_version(buffer, version, cx);
4820 cx.background().spawn(async move {
4821 let snapshot = snapshot?;
4822 let mut lsp_edits = lsp_edits
4823 .into_iter()
4824 .map(|edit| (range_from_lsp(edit.range), edit.new_text))
4825 .peekable();
4826
4827 let mut edits = Vec::new();
4828 while let Some((mut range, mut new_text)) = lsp_edits.next() {
4829 // Combine any LSP edits that are adjacent.
4830 //
4831 // Also, combine LSP edits that are separated from each other by only
4832 // a newline. This is important because for some code actions,
4833 // Rust-analyzer rewrites the entire buffer via a series of edits that
4834 // are separated by unchanged newline characters.
4835 //
4836 // In order for the diffing logic below to work properly, any edits that
4837 // cancel each other out must be combined into one.
4838 while let Some((next_range, next_text)) = lsp_edits.peek() {
4839 if next_range.start > range.end {
4840 if next_range.start.row > range.end.row + 1
4841 || next_range.start.column > 0
4842 || snapshot.clip_point_utf16(
4843 PointUtf16::new(range.end.row, u32::MAX),
4844 Bias::Left,
4845 ) > range.end
4846 {
4847 break;
4848 }
4849 new_text.push('\n');
4850 }
4851 range.end = next_range.end;
4852 new_text.push_str(&next_text);
4853 lsp_edits.next();
4854 }
4855
4856 if snapshot.clip_point_utf16(range.start, Bias::Left) != range.start
4857 || snapshot.clip_point_utf16(range.end, Bias::Left) != range.end
4858 {
4859 return Err(anyhow!("invalid edits received from language server"));
4860 }
4861
4862 // For multiline edits, perform a diff of the old and new text so that
4863 // we can identify the changes more precisely, preserving the locations
4864 // of any anchors positioned in the unchanged regions.
4865 if range.end.row > range.start.row {
4866 let mut offset = range.start.to_offset(&snapshot);
4867 let old_text = snapshot.text_for_range(range).collect::<String>();
4868
4869 let diff = TextDiff::from_lines(old_text.as_str(), &new_text);
4870 let mut moved_since_edit = true;
4871 for change in diff.iter_all_changes() {
4872 let tag = change.tag();
4873 let value = change.value();
4874 match tag {
4875 ChangeTag::Equal => {
4876 offset += value.len();
4877 moved_since_edit = true;
4878 }
4879 ChangeTag::Delete => {
4880 let start = snapshot.anchor_after(offset);
4881 let end = snapshot.anchor_before(offset + value.len());
4882 if moved_since_edit {
4883 edits.push((start..end, String::new()));
4884 } else {
4885 edits.last_mut().unwrap().0.end = end;
4886 }
4887 offset += value.len();
4888 moved_since_edit = false;
4889 }
4890 ChangeTag::Insert => {
4891 if moved_since_edit {
4892 let anchor = snapshot.anchor_after(offset);
4893 edits.push((anchor.clone()..anchor, value.to_string()));
4894 } else {
4895 edits.last_mut().unwrap().1.push_str(value);
4896 }
4897 moved_since_edit = false;
4898 }
4899 }
4900 }
4901 } else if range.end == range.start {
4902 let anchor = snapshot.anchor_after(range.start);
4903 edits.push((anchor.clone()..anchor, new_text));
4904 } else {
4905 let edit_start = snapshot.anchor_after(range.start);
4906 let edit_end = snapshot.anchor_before(range.end);
4907 edits.push((edit_start..edit_end, new_text));
4908 }
4909 }
4910
4911 Ok(edits)
4912 })
4913 }
4914
4915 fn buffer_snapshot_for_lsp_version(
4916 &mut self,
4917 buffer: &ModelHandle<Buffer>,
4918 version: Option<i32>,
4919 cx: &AppContext,
4920 ) -> Result<TextBufferSnapshot> {
4921 const OLD_VERSIONS_TO_RETAIN: i32 = 10;
4922
4923 if let Some(version) = version {
4924 let buffer_id = buffer.read(cx).remote_id();
4925 let snapshots = self
4926 .buffer_snapshots
4927 .get_mut(&buffer_id)
4928 .ok_or_else(|| anyhow!("no snapshot found for buffer {}", buffer_id))?;
4929 let mut found_snapshot = None;
4930 snapshots.retain(|(snapshot_version, snapshot)| {
4931 if snapshot_version + OLD_VERSIONS_TO_RETAIN < version {
4932 false
4933 } else {
4934 if *snapshot_version == version {
4935 found_snapshot = Some(snapshot.clone());
4936 }
4937 true
4938 }
4939 });
4940
4941 found_snapshot.ok_or_else(|| {
4942 anyhow!(
4943 "snapshot not found for buffer {} at version {}",
4944 buffer_id,
4945 version
4946 )
4947 })
4948 } else {
4949 Ok((buffer.read(cx)).text_snapshot())
4950 }
4951 }
4952
4953 fn language_server_for_buffer(
4954 &self,
4955 buffer: &Buffer,
4956 cx: &AppContext,
4957 ) -> Option<&(Arc<dyn LspAdapter>, Arc<LanguageServer>)> {
4958 if let Some((file, language)) = File::from_dyn(buffer.file()).zip(buffer.language()) {
4959 let worktree_id = file.worktree_id(cx);
4960 self.language_servers
4961 .get(&(worktree_id, language.lsp_adapter()?.name()))
4962 } else {
4963 None
4964 }
4965 }
4966}
4967
4968impl WorktreeHandle {
4969 pub fn upgrade(&self, cx: &AppContext) -> Option<ModelHandle<Worktree>> {
4970 match self {
4971 WorktreeHandle::Strong(handle) => Some(handle.clone()),
4972 WorktreeHandle::Weak(handle) => handle.upgrade(cx),
4973 }
4974 }
4975}
4976
4977impl OpenBuffer {
4978 pub fn upgrade(&self, cx: &impl UpgradeModelHandle) -> Option<ModelHandle<Buffer>> {
4979 match self {
4980 OpenBuffer::Strong(handle) => Some(handle.clone()),
4981 OpenBuffer::Weak(handle) => handle.upgrade(cx),
4982 OpenBuffer::Loading(_) => None,
4983 }
4984 }
4985}
4986
4987struct CandidateSet {
4988 snapshot: Snapshot,
4989 include_ignored: bool,
4990 include_root_name: bool,
4991}
4992
4993impl<'a> PathMatchCandidateSet<'a> for CandidateSet {
4994 type Candidates = CandidateSetIter<'a>;
4995
4996 fn id(&self) -> usize {
4997 self.snapshot.id().to_usize()
4998 }
4999
5000 fn len(&self) -> usize {
5001 if self.include_ignored {
5002 self.snapshot.file_count()
5003 } else {
5004 self.snapshot.visible_file_count()
5005 }
5006 }
5007
5008 fn prefix(&self) -> Arc<str> {
5009 if self.snapshot.root_entry().map_or(false, |e| e.is_file()) {
5010 self.snapshot.root_name().into()
5011 } else if self.include_root_name {
5012 format!("{}/", self.snapshot.root_name()).into()
5013 } else {
5014 "".into()
5015 }
5016 }
5017
5018 fn candidates(&'a self, start: usize) -> Self::Candidates {
5019 CandidateSetIter {
5020 traversal: self.snapshot.files(self.include_ignored, start),
5021 }
5022 }
5023}
5024
5025struct CandidateSetIter<'a> {
5026 traversal: Traversal<'a>,
5027}
5028
5029impl<'a> Iterator for CandidateSetIter<'a> {
5030 type Item = PathMatchCandidate<'a>;
5031
5032 fn next(&mut self) -> Option<Self::Item> {
5033 self.traversal.next().map(|entry| {
5034 if let EntryKind::File(char_bag) = entry.kind {
5035 PathMatchCandidate {
5036 path: &entry.path,
5037 char_bag,
5038 }
5039 } else {
5040 unreachable!()
5041 }
5042 })
5043 }
5044}
5045
5046impl Entity for Project {
5047 type Event = Event;
5048
5049 fn release(&mut self, _: &mut gpui::MutableAppContext) {
5050 match &self.client_state {
5051 ProjectClientState::Local { remote_id_rx, .. } => {
5052 if let Some(project_id) = *remote_id_rx.borrow() {
5053 self.client
5054 .send(proto::UnregisterProject { project_id })
5055 .log_err();
5056 }
5057 }
5058 ProjectClientState::Remote { remote_id, .. } => {
5059 self.client
5060 .send(proto::LeaveProject {
5061 project_id: *remote_id,
5062 })
5063 .log_err();
5064 }
5065 }
5066 }
5067
5068 fn app_will_quit(
5069 &mut self,
5070 _: &mut MutableAppContext,
5071 ) -> Option<std::pin::Pin<Box<dyn 'static + Future<Output = ()>>>> {
5072 let shutdown_futures = self
5073 .language_servers
5074 .drain()
5075 .filter_map(|(_, (_, server))| server.shutdown())
5076 .collect::<Vec<_>>();
5077 Some(
5078 async move {
5079 futures::future::join_all(shutdown_futures).await;
5080 }
5081 .boxed(),
5082 )
5083 }
5084}
5085
5086impl Collaborator {
5087 fn from_proto(
5088 message: proto::Collaborator,
5089 user_store: &ModelHandle<UserStore>,
5090 cx: &mut AsyncAppContext,
5091 ) -> impl Future<Output = Result<Self>> {
5092 let user = user_store.update(cx, |user_store, cx| {
5093 user_store.fetch_user(message.user_id, cx)
5094 });
5095
5096 async move {
5097 Ok(Self {
5098 peer_id: PeerId(message.peer_id),
5099 user: user.await?,
5100 replica_id: message.replica_id as ReplicaId,
5101 })
5102 }
5103 }
5104}
5105
5106impl<P: AsRef<Path>> From<(WorktreeId, P)> for ProjectPath {
5107 fn from((worktree_id, path): (WorktreeId, P)) -> Self {
5108 Self {
5109 worktree_id,
5110 path: path.as_ref().into(),
5111 }
5112 }
5113}
5114
5115impl From<lsp::CreateFileOptions> for fs::CreateOptions {
5116 fn from(options: lsp::CreateFileOptions) -> Self {
5117 Self {
5118 overwrite: options.overwrite.unwrap_or(false),
5119 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
5120 }
5121 }
5122}
5123
5124impl From<lsp::RenameFileOptions> for fs::RenameOptions {
5125 fn from(options: lsp::RenameFileOptions) -> Self {
5126 Self {
5127 overwrite: options.overwrite.unwrap_or(false),
5128 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
5129 }
5130 }
5131}
5132
5133impl From<lsp::DeleteFileOptions> for fs::RemoveOptions {
5134 fn from(options: lsp::DeleteFileOptions) -> Self {
5135 Self {
5136 recursive: options.recursive.unwrap_or(false),
5137 ignore_if_not_exists: options.ignore_if_not_exists.unwrap_or(false),
5138 }
5139 }
5140}
5141
5142fn serialize_symbol(symbol: &Symbol) -> proto::Symbol {
5143 proto::Symbol {
5144 source_worktree_id: symbol.source_worktree_id.to_proto(),
5145 worktree_id: symbol.worktree_id.to_proto(),
5146 language_server_name: symbol.language_server_name.0.to_string(),
5147 name: symbol.name.clone(),
5148 kind: unsafe { mem::transmute(symbol.kind) },
5149 path: symbol.path.to_string_lossy().to_string(),
5150 start: Some(proto::Point {
5151 row: symbol.range.start.row,
5152 column: symbol.range.start.column,
5153 }),
5154 end: Some(proto::Point {
5155 row: symbol.range.end.row,
5156 column: symbol.range.end.column,
5157 }),
5158 signature: symbol.signature.to_vec(),
5159 }
5160}
5161
5162fn relativize_path(base: &Path, path: &Path) -> PathBuf {
5163 let mut path_components = path.components();
5164 let mut base_components = base.components();
5165 let mut components: Vec<Component> = Vec::new();
5166 loop {
5167 match (path_components.next(), base_components.next()) {
5168 (None, None) => break,
5169 (Some(a), None) => {
5170 components.push(a);
5171 components.extend(path_components.by_ref());
5172 break;
5173 }
5174 (None, _) => components.push(Component::ParentDir),
5175 (Some(a), Some(b)) if components.is_empty() && a == b => (),
5176 (Some(a), Some(b)) if b == Component::CurDir => components.push(a),
5177 (Some(a), Some(_)) => {
5178 components.push(Component::ParentDir);
5179 for _ in base_components {
5180 components.push(Component::ParentDir);
5181 }
5182 components.push(a);
5183 components.extend(path_components.by_ref());
5184 break;
5185 }
5186 }
5187 }
5188 components.iter().map(|c| c.as_os_str()).collect()
5189}
5190
5191impl Item for Buffer {
5192 fn entry_id(&self, cx: &AppContext) -> Option<ProjectEntryId> {
5193 File::from_dyn(self.file()).and_then(|file| file.project_entry_id(cx))
5194 }
5195}
5196
5197#[cfg(test)]
5198mod tests {
5199 use crate::worktree::WorktreeHandle;
5200
5201 use super::{Event, *};
5202 use fs::RealFs;
5203 use futures::{future, StreamExt};
5204 use gpui::test::subscribe;
5205 use language::{
5206 tree_sitter_rust, tree_sitter_typescript, Diagnostic, FakeLspAdapter, LanguageConfig,
5207 OffsetRangeExt, Point, ToPoint,
5208 };
5209 use lsp::Url;
5210 use serde_json::json;
5211 use std::{cell::RefCell, os::unix, path::PathBuf, rc::Rc, task::Poll};
5212 use unindent::Unindent as _;
5213 use util::{assert_set_eq, test::temp_tree};
5214
5215 #[gpui::test]
5216 async fn test_populate_and_search(cx: &mut gpui::TestAppContext) {
5217 let dir = temp_tree(json!({
5218 "root": {
5219 "apple": "",
5220 "banana": {
5221 "carrot": {
5222 "date": "",
5223 "endive": "",
5224 }
5225 },
5226 "fennel": {
5227 "grape": "",
5228 }
5229 }
5230 }));
5231
5232 let root_link_path = dir.path().join("root_link");
5233 unix::fs::symlink(&dir.path().join("root"), &root_link_path).unwrap();
5234 unix::fs::symlink(
5235 &dir.path().join("root/fennel"),
5236 &dir.path().join("root/finnochio"),
5237 )
5238 .unwrap();
5239
5240 let project = Project::test(Arc::new(RealFs), [root_link_path], cx).await;
5241
5242 project.read_with(cx, |project, cx| {
5243 let tree = project.worktrees(cx).next().unwrap().read(cx);
5244 assert_eq!(tree.file_count(), 5);
5245 assert_eq!(
5246 tree.inode_for_path("fennel/grape"),
5247 tree.inode_for_path("finnochio/grape")
5248 );
5249 });
5250
5251 let cancel_flag = Default::default();
5252 let results = project
5253 .read_with(cx, |project, cx| {
5254 project.match_paths("bna", false, false, 10, &cancel_flag, cx)
5255 })
5256 .await;
5257 assert_eq!(
5258 results
5259 .into_iter()
5260 .map(|result| result.path)
5261 .collect::<Vec<Arc<Path>>>(),
5262 vec![
5263 PathBuf::from("banana/carrot/date").into(),
5264 PathBuf::from("banana/carrot/endive").into(),
5265 ]
5266 );
5267 }
5268
5269 #[gpui::test]
5270 async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
5271 cx.foreground().forbid_parking();
5272
5273 let mut rust_language = Language::new(
5274 LanguageConfig {
5275 name: "Rust".into(),
5276 path_suffixes: vec!["rs".to_string()],
5277 ..Default::default()
5278 },
5279 Some(tree_sitter_rust::language()),
5280 );
5281 let mut json_language = Language::new(
5282 LanguageConfig {
5283 name: "JSON".into(),
5284 path_suffixes: vec!["json".to_string()],
5285 ..Default::default()
5286 },
5287 None,
5288 );
5289 let mut fake_rust_servers = rust_language.set_fake_lsp_adapter(FakeLspAdapter {
5290 name: "the-rust-language-server",
5291 capabilities: lsp::ServerCapabilities {
5292 completion_provider: Some(lsp::CompletionOptions {
5293 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
5294 ..Default::default()
5295 }),
5296 ..Default::default()
5297 },
5298 ..Default::default()
5299 });
5300 let mut fake_json_servers = json_language.set_fake_lsp_adapter(FakeLspAdapter {
5301 name: "the-json-language-server",
5302 capabilities: lsp::ServerCapabilities {
5303 completion_provider: Some(lsp::CompletionOptions {
5304 trigger_characters: Some(vec![":".to_string()]),
5305 ..Default::default()
5306 }),
5307 ..Default::default()
5308 },
5309 ..Default::default()
5310 });
5311
5312 let fs = FakeFs::new(cx.background());
5313 fs.insert_tree(
5314 "/the-root",
5315 json!({
5316 "test.rs": "const A: i32 = 1;",
5317 "test2.rs": "",
5318 "Cargo.toml": "a = 1",
5319 "package.json": "{\"a\": 1}",
5320 }),
5321 )
5322 .await;
5323
5324 let project = Project::test(fs.clone(), ["/the-root"], cx).await;
5325 project.update(cx, |project, _| {
5326 project.languages.add(Arc::new(rust_language));
5327 project.languages.add(Arc::new(json_language));
5328 });
5329
5330 // Open a buffer without an associated language server.
5331 let toml_buffer = project
5332 .update(cx, |project, cx| {
5333 project.open_local_buffer("/the-root/Cargo.toml", cx)
5334 })
5335 .await
5336 .unwrap();
5337
5338 // Open a buffer with an associated language server.
5339 let rust_buffer = project
5340 .update(cx, |project, cx| {
5341 project.open_local_buffer("/the-root/test.rs", cx)
5342 })
5343 .await
5344 .unwrap();
5345
5346 // A server is started up, and it is notified about Rust files.
5347 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
5348 assert_eq!(
5349 fake_rust_server
5350 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5351 .await
5352 .text_document,
5353 lsp::TextDocumentItem {
5354 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
5355 version: 0,
5356 text: "const A: i32 = 1;".to_string(),
5357 language_id: Default::default()
5358 }
5359 );
5360
5361 // The buffer is configured based on the language server's capabilities.
5362 rust_buffer.read_with(cx, |buffer, _| {
5363 assert_eq!(
5364 buffer.completion_triggers(),
5365 &[".".to_string(), "::".to_string()]
5366 );
5367 });
5368 toml_buffer.read_with(cx, |buffer, _| {
5369 assert!(buffer.completion_triggers().is_empty());
5370 });
5371
5372 // Edit a buffer. The changes are reported to the language server.
5373 rust_buffer.update(cx, |buffer, cx| buffer.edit([(16..16, "2")], cx));
5374 assert_eq!(
5375 fake_rust_server
5376 .receive_notification::<lsp::notification::DidChangeTextDocument>()
5377 .await
5378 .text_document,
5379 lsp::VersionedTextDocumentIdentifier::new(
5380 lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
5381 1
5382 )
5383 );
5384
5385 // Open a third buffer with a different associated language server.
5386 let json_buffer = project
5387 .update(cx, |project, cx| {
5388 project.open_local_buffer("/the-root/package.json", cx)
5389 })
5390 .await
5391 .unwrap();
5392
5393 // A json language server is started up and is only notified about the json buffer.
5394 let mut fake_json_server = fake_json_servers.next().await.unwrap();
5395 assert_eq!(
5396 fake_json_server
5397 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5398 .await
5399 .text_document,
5400 lsp::TextDocumentItem {
5401 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
5402 version: 0,
5403 text: "{\"a\": 1}".to_string(),
5404 language_id: Default::default()
5405 }
5406 );
5407
5408 // This buffer is configured based on the second language server's
5409 // capabilities.
5410 json_buffer.read_with(cx, |buffer, _| {
5411 assert_eq!(buffer.completion_triggers(), &[":".to_string()]);
5412 });
5413
5414 // When opening another buffer whose language server is already running,
5415 // it is also configured based on the existing language server's capabilities.
5416 let rust_buffer2 = project
5417 .update(cx, |project, cx| {
5418 project.open_local_buffer("/the-root/test2.rs", cx)
5419 })
5420 .await
5421 .unwrap();
5422 rust_buffer2.read_with(cx, |buffer, _| {
5423 assert_eq!(
5424 buffer.completion_triggers(),
5425 &[".".to_string(), "::".to_string()]
5426 );
5427 });
5428
5429 // Changes are reported only to servers matching the buffer's language.
5430 toml_buffer.update(cx, |buffer, cx| buffer.edit([(5..5, "23")], cx));
5431 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "let x = 1;")], cx));
5432 assert_eq!(
5433 fake_rust_server
5434 .receive_notification::<lsp::notification::DidChangeTextDocument>()
5435 .await
5436 .text_document,
5437 lsp::VersionedTextDocumentIdentifier::new(
5438 lsp::Url::from_file_path("/the-root/test2.rs").unwrap(),
5439 1
5440 )
5441 );
5442
5443 // Save notifications are reported to all servers.
5444 toml_buffer
5445 .update(cx, |buffer, cx| buffer.save(cx))
5446 .await
5447 .unwrap();
5448 assert_eq!(
5449 fake_rust_server
5450 .receive_notification::<lsp::notification::DidSaveTextDocument>()
5451 .await
5452 .text_document,
5453 lsp::TextDocumentIdentifier::new(
5454 lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap()
5455 )
5456 );
5457 assert_eq!(
5458 fake_json_server
5459 .receive_notification::<lsp::notification::DidSaveTextDocument>()
5460 .await
5461 .text_document,
5462 lsp::TextDocumentIdentifier::new(
5463 lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap()
5464 )
5465 );
5466
5467 // Renames are reported only to servers matching the buffer's language.
5468 fs.rename(
5469 Path::new("/the-root/test2.rs"),
5470 Path::new("/the-root/test3.rs"),
5471 Default::default(),
5472 )
5473 .await
5474 .unwrap();
5475 assert_eq!(
5476 fake_rust_server
5477 .receive_notification::<lsp::notification::DidCloseTextDocument>()
5478 .await
5479 .text_document,
5480 lsp::TextDocumentIdentifier::new(
5481 lsp::Url::from_file_path("/the-root/test2.rs").unwrap()
5482 ),
5483 );
5484 assert_eq!(
5485 fake_rust_server
5486 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5487 .await
5488 .text_document,
5489 lsp::TextDocumentItem {
5490 uri: lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),
5491 version: 0,
5492 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
5493 language_id: Default::default()
5494 },
5495 );
5496
5497 rust_buffer2.update(cx, |buffer, cx| {
5498 buffer.update_diagnostics(
5499 DiagnosticSet::from_sorted_entries(
5500 vec![DiagnosticEntry {
5501 diagnostic: Default::default(),
5502 range: Anchor::MIN..Anchor::MAX,
5503 }],
5504 &buffer.snapshot(),
5505 ),
5506 cx,
5507 );
5508 assert_eq!(
5509 buffer
5510 .snapshot()
5511 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
5512 .count(),
5513 1
5514 );
5515 });
5516
5517 // When the rename changes the extension of the file, the buffer gets closed on the old
5518 // language server and gets opened on the new one.
5519 fs.rename(
5520 Path::new("/the-root/test3.rs"),
5521 Path::new("/the-root/test3.json"),
5522 Default::default(),
5523 )
5524 .await
5525 .unwrap();
5526 assert_eq!(
5527 fake_rust_server
5528 .receive_notification::<lsp::notification::DidCloseTextDocument>()
5529 .await
5530 .text_document,
5531 lsp::TextDocumentIdentifier::new(
5532 lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),
5533 ),
5534 );
5535 assert_eq!(
5536 fake_json_server
5537 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5538 .await
5539 .text_document,
5540 lsp::TextDocumentItem {
5541 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
5542 version: 0,
5543 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
5544 language_id: Default::default()
5545 },
5546 );
5547
5548 // We clear the diagnostics, since the language has changed.
5549 rust_buffer2.read_with(cx, |buffer, _| {
5550 assert_eq!(
5551 buffer
5552 .snapshot()
5553 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
5554 .count(),
5555 0
5556 );
5557 });
5558
5559 // The renamed file's version resets after changing language server.
5560 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "// ")], cx));
5561 assert_eq!(
5562 fake_json_server
5563 .receive_notification::<lsp::notification::DidChangeTextDocument>()
5564 .await
5565 .text_document,
5566 lsp::VersionedTextDocumentIdentifier::new(
5567 lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
5568 1
5569 )
5570 );
5571
5572 // Restart language servers
5573 project.update(cx, |project, cx| {
5574 project.restart_language_servers_for_buffers(
5575 vec![rust_buffer.clone(), json_buffer.clone()],
5576 cx,
5577 );
5578 });
5579
5580 let mut rust_shutdown_requests = fake_rust_server
5581 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
5582 let mut json_shutdown_requests = fake_json_server
5583 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
5584 futures::join!(rust_shutdown_requests.next(), json_shutdown_requests.next());
5585
5586 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
5587 let mut fake_json_server = fake_json_servers.next().await.unwrap();
5588
5589 // Ensure rust document is reopened in new rust language server
5590 assert_eq!(
5591 fake_rust_server
5592 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5593 .await
5594 .text_document,
5595 lsp::TextDocumentItem {
5596 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
5597 version: 1,
5598 text: rust_buffer.read_with(cx, |buffer, _| buffer.text()),
5599 language_id: Default::default()
5600 }
5601 );
5602
5603 // Ensure json documents are reopened in new json language server
5604 assert_set_eq!(
5605 [
5606 fake_json_server
5607 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5608 .await
5609 .text_document,
5610 fake_json_server
5611 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5612 .await
5613 .text_document,
5614 ],
5615 [
5616 lsp::TextDocumentItem {
5617 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
5618 version: 0,
5619 text: json_buffer.read_with(cx, |buffer, _| buffer.text()),
5620 language_id: Default::default()
5621 },
5622 lsp::TextDocumentItem {
5623 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
5624 version: 1,
5625 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
5626 language_id: Default::default()
5627 }
5628 ]
5629 );
5630
5631 // Close notifications are reported only to servers matching the buffer's language.
5632 cx.update(|_| drop(json_buffer));
5633 let close_message = lsp::DidCloseTextDocumentParams {
5634 text_document: lsp::TextDocumentIdentifier::new(
5635 lsp::Url::from_file_path("/the-root/package.json").unwrap(),
5636 ),
5637 };
5638 assert_eq!(
5639 fake_json_server
5640 .receive_notification::<lsp::notification::DidCloseTextDocument>()
5641 .await,
5642 close_message,
5643 );
5644 }
5645
5646 #[gpui::test]
5647 async fn test_single_file_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
5648 cx.foreground().forbid_parking();
5649
5650 let fs = FakeFs::new(cx.background());
5651 fs.insert_tree(
5652 "/dir",
5653 json!({
5654 "a.rs": "let a = 1;",
5655 "b.rs": "let b = 2;"
5656 }),
5657 )
5658 .await;
5659
5660 let project = Project::test(fs, ["/dir/a.rs", "/dir/b.rs"], cx).await;
5661
5662 let buffer_a = project
5663 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
5664 .await
5665 .unwrap();
5666 let buffer_b = project
5667 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
5668 .await
5669 .unwrap();
5670
5671 project.update(cx, |project, cx| {
5672 project
5673 .update_diagnostics(
5674 lsp::PublishDiagnosticsParams {
5675 uri: Url::from_file_path("/dir/a.rs").unwrap(),
5676 version: None,
5677 diagnostics: vec![lsp::Diagnostic {
5678 range: lsp::Range::new(
5679 lsp::Position::new(0, 4),
5680 lsp::Position::new(0, 5),
5681 ),
5682 severity: Some(lsp::DiagnosticSeverity::ERROR),
5683 message: "error 1".to_string(),
5684 ..Default::default()
5685 }],
5686 },
5687 &[],
5688 cx,
5689 )
5690 .unwrap();
5691 project
5692 .update_diagnostics(
5693 lsp::PublishDiagnosticsParams {
5694 uri: Url::from_file_path("/dir/b.rs").unwrap(),
5695 version: None,
5696 diagnostics: vec![lsp::Diagnostic {
5697 range: lsp::Range::new(
5698 lsp::Position::new(0, 4),
5699 lsp::Position::new(0, 5),
5700 ),
5701 severity: Some(lsp::DiagnosticSeverity::WARNING),
5702 message: "error 2".to_string(),
5703 ..Default::default()
5704 }],
5705 },
5706 &[],
5707 cx,
5708 )
5709 .unwrap();
5710 });
5711
5712 buffer_a.read_with(cx, |buffer, _| {
5713 let chunks = chunks_with_diagnostics(&buffer, 0..buffer.len());
5714 assert_eq!(
5715 chunks
5716 .iter()
5717 .map(|(s, d)| (s.as_str(), *d))
5718 .collect::<Vec<_>>(),
5719 &[
5720 ("let ", None),
5721 ("a", Some(DiagnosticSeverity::ERROR)),
5722 (" = 1;", None),
5723 ]
5724 );
5725 });
5726 buffer_b.read_with(cx, |buffer, _| {
5727 let chunks = chunks_with_diagnostics(&buffer, 0..buffer.len());
5728 assert_eq!(
5729 chunks
5730 .iter()
5731 .map(|(s, d)| (s.as_str(), *d))
5732 .collect::<Vec<_>>(),
5733 &[
5734 ("let ", None),
5735 ("b", Some(DiagnosticSeverity::WARNING)),
5736 (" = 2;", None),
5737 ]
5738 );
5739 });
5740 }
5741
5742 #[gpui::test]
5743 async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
5744 cx.foreground().forbid_parking();
5745
5746 let progress_token = "the-progress-token";
5747 let mut language = Language::new(
5748 LanguageConfig {
5749 name: "Rust".into(),
5750 path_suffixes: vec!["rs".to_string()],
5751 ..Default::default()
5752 },
5753 Some(tree_sitter_rust::language()),
5754 );
5755 let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
5756 disk_based_diagnostics_progress_token: Some(progress_token),
5757 disk_based_diagnostics_sources: &["disk"],
5758 ..Default::default()
5759 });
5760
5761 let fs = FakeFs::new(cx.background());
5762 fs.insert_tree(
5763 "/dir",
5764 json!({
5765 "a.rs": "fn a() { A }",
5766 "b.rs": "const y: i32 = 1",
5767 }),
5768 )
5769 .await;
5770
5771 let project = Project::test(fs, ["/dir"], cx).await;
5772 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
5773 let worktree_id =
5774 project.read_with(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
5775
5776 // Cause worktree to start the fake language server
5777 let _buffer = project
5778 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
5779 .await
5780 .unwrap();
5781
5782 let mut events = subscribe(&project, cx);
5783
5784 let mut fake_server = fake_servers.next().await.unwrap();
5785 fake_server.start_progress(progress_token).await;
5786 assert_eq!(
5787 events.next().await.unwrap(),
5788 Event::DiskBasedDiagnosticsStarted
5789 );
5790
5791 fake_server.start_progress(progress_token).await;
5792 fake_server.end_progress(progress_token).await;
5793 fake_server.start_progress(progress_token).await;
5794
5795 fake_server.notify::<lsp::notification::PublishDiagnostics>(
5796 lsp::PublishDiagnosticsParams {
5797 uri: Url::from_file_path("/dir/a.rs").unwrap(),
5798 version: None,
5799 diagnostics: vec![lsp::Diagnostic {
5800 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
5801 severity: Some(lsp::DiagnosticSeverity::ERROR),
5802 message: "undefined variable 'A'".to_string(),
5803 ..Default::default()
5804 }],
5805 },
5806 );
5807 assert_eq!(
5808 events.next().await.unwrap(),
5809 Event::DiagnosticsUpdated((worktree_id, Path::new("a.rs")).into())
5810 );
5811
5812 fake_server.end_progress(progress_token).await;
5813 fake_server.end_progress(progress_token).await;
5814 assert_eq!(
5815 events.next().await.unwrap(),
5816 Event::DiskBasedDiagnosticsUpdated
5817 );
5818 assert_eq!(
5819 events.next().await.unwrap(),
5820 Event::DiskBasedDiagnosticsFinished
5821 );
5822
5823 let buffer = project
5824 .update(cx, |p, cx| p.open_local_buffer("/dir/a.rs", cx))
5825 .await
5826 .unwrap();
5827
5828 buffer.read_with(cx, |buffer, _| {
5829 let snapshot = buffer.snapshot();
5830 let diagnostics = snapshot
5831 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
5832 .collect::<Vec<_>>();
5833 assert_eq!(
5834 diagnostics,
5835 &[DiagnosticEntry {
5836 range: Point::new(0, 9)..Point::new(0, 10),
5837 diagnostic: Diagnostic {
5838 severity: lsp::DiagnosticSeverity::ERROR,
5839 message: "undefined variable 'A'".to_string(),
5840 group_id: 0,
5841 is_primary: true,
5842 ..Default::default()
5843 }
5844 }]
5845 )
5846 });
5847
5848 // Ensure publishing empty diagnostics twice only results in one update event.
5849 fake_server.notify::<lsp::notification::PublishDiagnostics>(
5850 lsp::PublishDiagnosticsParams {
5851 uri: Url::from_file_path("/dir/a.rs").unwrap(),
5852 version: None,
5853 diagnostics: Default::default(),
5854 },
5855 );
5856 assert_eq!(
5857 events.next().await.unwrap(),
5858 Event::DiagnosticsUpdated((worktree_id, Path::new("a.rs")).into())
5859 );
5860
5861 fake_server.notify::<lsp::notification::PublishDiagnostics>(
5862 lsp::PublishDiagnosticsParams {
5863 uri: Url::from_file_path("/dir/a.rs").unwrap(),
5864 version: None,
5865 diagnostics: Default::default(),
5866 },
5867 );
5868 cx.foreground().run_until_parked();
5869 assert_eq!(futures::poll!(events.next()), Poll::Pending);
5870 }
5871
5872 #[gpui::test]
5873 async fn test_restarting_server_with_diagnostics_running(cx: &mut gpui::TestAppContext) {
5874 cx.foreground().forbid_parking();
5875
5876 let progress_token = "the-progress-token";
5877 let mut language = Language::new(
5878 LanguageConfig {
5879 path_suffixes: vec!["rs".to_string()],
5880 ..Default::default()
5881 },
5882 None,
5883 );
5884 let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
5885 disk_based_diagnostics_sources: &["disk"],
5886 disk_based_diagnostics_progress_token: Some(progress_token),
5887 ..Default::default()
5888 });
5889
5890 let fs = FakeFs::new(cx.background());
5891 fs.insert_tree("/dir", json!({ "a.rs": "" })).await;
5892
5893 let project = Project::test(fs, ["/dir"], cx).await;
5894 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
5895
5896 let buffer = project
5897 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
5898 .await
5899 .unwrap();
5900
5901 // Simulate diagnostics starting to update.
5902 let mut fake_server = fake_servers.next().await.unwrap();
5903 fake_server.start_progress(progress_token).await;
5904
5905 // Restart the server before the diagnostics finish updating.
5906 project.update(cx, |project, cx| {
5907 project.restart_language_servers_for_buffers([buffer], cx);
5908 });
5909 let mut events = subscribe(&project, cx);
5910
5911 // Simulate the newly started server sending more diagnostics.
5912 let mut fake_server = fake_servers.next().await.unwrap();
5913 fake_server.start_progress(progress_token).await;
5914 assert_eq!(
5915 events.next().await.unwrap(),
5916 Event::DiskBasedDiagnosticsStarted
5917 );
5918
5919 // All diagnostics are considered done, despite the old server's diagnostic
5920 // task never completing.
5921 fake_server.end_progress(progress_token).await;
5922 assert_eq!(
5923 events.next().await.unwrap(),
5924 Event::DiskBasedDiagnosticsUpdated
5925 );
5926 assert_eq!(
5927 events.next().await.unwrap(),
5928 Event::DiskBasedDiagnosticsFinished
5929 );
5930 project.read_with(cx, |project, _| {
5931 assert!(!project.is_running_disk_based_diagnostics());
5932 });
5933 }
5934
5935 #[gpui::test]
5936 async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
5937 cx.foreground().forbid_parking();
5938
5939 let mut language = Language::new(
5940 LanguageConfig {
5941 name: "Rust".into(),
5942 path_suffixes: vec!["rs".to_string()],
5943 ..Default::default()
5944 },
5945 Some(tree_sitter_rust::language()),
5946 );
5947 let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
5948 disk_based_diagnostics_sources: &["disk"],
5949 ..Default::default()
5950 });
5951
5952 let text = "
5953 fn a() { A }
5954 fn b() { BB }
5955 fn c() { CCC }
5956 "
5957 .unindent();
5958
5959 let fs = FakeFs::new(cx.background());
5960 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
5961
5962 let project = Project::test(fs, ["/dir"], cx).await;
5963 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
5964
5965 let buffer = project
5966 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
5967 .await
5968 .unwrap();
5969
5970 let mut fake_server = fake_servers.next().await.unwrap();
5971 let open_notification = fake_server
5972 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5973 .await;
5974
5975 // Edit the buffer, moving the content down
5976 buffer.update(cx, |buffer, cx| buffer.edit([(0..0, "\n\n")], cx));
5977 let change_notification_1 = fake_server
5978 .receive_notification::<lsp::notification::DidChangeTextDocument>()
5979 .await;
5980 assert!(
5981 change_notification_1.text_document.version > open_notification.text_document.version
5982 );
5983
5984 // Report some diagnostics for the initial version of the buffer
5985 fake_server.notify::<lsp::notification::PublishDiagnostics>(
5986 lsp::PublishDiagnosticsParams {
5987 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
5988 version: Some(open_notification.text_document.version),
5989 diagnostics: vec![
5990 lsp::Diagnostic {
5991 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
5992 severity: Some(DiagnosticSeverity::ERROR),
5993 message: "undefined variable 'A'".to_string(),
5994 source: Some("disk".to_string()),
5995 ..Default::default()
5996 },
5997 lsp::Diagnostic {
5998 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
5999 severity: Some(DiagnosticSeverity::ERROR),
6000 message: "undefined variable 'BB'".to_string(),
6001 source: Some("disk".to_string()),
6002 ..Default::default()
6003 },
6004 lsp::Diagnostic {
6005 range: lsp::Range::new(lsp::Position::new(2, 9), lsp::Position::new(2, 12)),
6006 severity: Some(DiagnosticSeverity::ERROR),
6007 source: Some("disk".to_string()),
6008 message: "undefined variable 'CCC'".to_string(),
6009 ..Default::default()
6010 },
6011 ],
6012 },
6013 );
6014
6015 // The diagnostics have moved down since they were created.
6016 buffer.next_notification(cx).await;
6017 buffer.read_with(cx, |buffer, _| {
6018 assert_eq!(
6019 buffer
6020 .snapshot()
6021 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0), false)
6022 .collect::<Vec<_>>(),
6023 &[
6024 DiagnosticEntry {
6025 range: Point::new(3, 9)..Point::new(3, 11),
6026 diagnostic: Diagnostic {
6027 severity: DiagnosticSeverity::ERROR,
6028 message: "undefined variable 'BB'".to_string(),
6029 is_disk_based: true,
6030 group_id: 1,
6031 is_primary: true,
6032 ..Default::default()
6033 },
6034 },
6035 DiagnosticEntry {
6036 range: Point::new(4, 9)..Point::new(4, 12),
6037 diagnostic: Diagnostic {
6038 severity: DiagnosticSeverity::ERROR,
6039 message: "undefined variable 'CCC'".to_string(),
6040 is_disk_based: true,
6041 group_id: 2,
6042 is_primary: true,
6043 ..Default::default()
6044 }
6045 }
6046 ]
6047 );
6048 assert_eq!(
6049 chunks_with_diagnostics(buffer, 0..buffer.len()),
6050 [
6051 ("\n\nfn a() { ".to_string(), None),
6052 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
6053 (" }\nfn b() { ".to_string(), None),
6054 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
6055 (" }\nfn c() { ".to_string(), None),
6056 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
6057 (" }\n".to_string(), None),
6058 ]
6059 );
6060 assert_eq!(
6061 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
6062 [
6063 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
6064 (" }\nfn c() { ".to_string(), None),
6065 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
6066 ]
6067 );
6068 });
6069
6070 // Ensure overlapping diagnostics are highlighted correctly.
6071 fake_server.notify::<lsp::notification::PublishDiagnostics>(
6072 lsp::PublishDiagnosticsParams {
6073 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
6074 version: Some(open_notification.text_document.version),
6075 diagnostics: vec![
6076 lsp::Diagnostic {
6077 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
6078 severity: Some(DiagnosticSeverity::ERROR),
6079 message: "undefined variable 'A'".to_string(),
6080 source: Some("disk".to_string()),
6081 ..Default::default()
6082 },
6083 lsp::Diagnostic {
6084 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 12)),
6085 severity: Some(DiagnosticSeverity::WARNING),
6086 message: "unreachable statement".to_string(),
6087 source: Some("disk".to_string()),
6088 ..Default::default()
6089 },
6090 ],
6091 },
6092 );
6093
6094 buffer.next_notification(cx).await;
6095 buffer.read_with(cx, |buffer, _| {
6096 assert_eq!(
6097 buffer
6098 .snapshot()
6099 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0), false)
6100 .collect::<Vec<_>>(),
6101 &[
6102 DiagnosticEntry {
6103 range: Point::new(2, 9)..Point::new(2, 12),
6104 diagnostic: Diagnostic {
6105 severity: DiagnosticSeverity::WARNING,
6106 message: "unreachable statement".to_string(),
6107 is_disk_based: true,
6108 group_id: 1,
6109 is_primary: true,
6110 ..Default::default()
6111 }
6112 },
6113 DiagnosticEntry {
6114 range: Point::new(2, 9)..Point::new(2, 10),
6115 diagnostic: Diagnostic {
6116 severity: DiagnosticSeverity::ERROR,
6117 message: "undefined variable 'A'".to_string(),
6118 is_disk_based: true,
6119 group_id: 0,
6120 is_primary: true,
6121 ..Default::default()
6122 },
6123 }
6124 ]
6125 );
6126 assert_eq!(
6127 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
6128 [
6129 ("fn a() { ".to_string(), None),
6130 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
6131 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
6132 ("\n".to_string(), None),
6133 ]
6134 );
6135 assert_eq!(
6136 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
6137 [
6138 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
6139 ("\n".to_string(), None),
6140 ]
6141 );
6142 });
6143
6144 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
6145 // changes since the last save.
6146 buffer.update(cx, |buffer, cx| {
6147 buffer.edit([(Point::new(2, 0)..Point::new(2, 0), " ")], cx);
6148 buffer.edit([(Point::new(2, 8)..Point::new(2, 10), "(x: usize)")], cx);
6149 buffer.edit([(Point::new(3, 10)..Point::new(3, 10), "xxx")], cx);
6150 });
6151 let change_notification_2 = fake_server
6152 .receive_notification::<lsp::notification::DidChangeTextDocument>()
6153 .await;
6154 assert!(
6155 change_notification_2.text_document.version
6156 > change_notification_1.text_document.version
6157 );
6158
6159 // Handle out-of-order diagnostics
6160 fake_server.notify::<lsp::notification::PublishDiagnostics>(
6161 lsp::PublishDiagnosticsParams {
6162 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
6163 version: Some(change_notification_2.text_document.version),
6164 diagnostics: vec![
6165 lsp::Diagnostic {
6166 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
6167 severity: Some(DiagnosticSeverity::ERROR),
6168 message: "undefined variable 'BB'".to_string(),
6169 source: Some("disk".to_string()),
6170 ..Default::default()
6171 },
6172 lsp::Diagnostic {
6173 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
6174 severity: Some(DiagnosticSeverity::WARNING),
6175 message: "undefined variable 'A'".to_string(),
6176 source: Some("disk".to_string()),
6177 ..Default::default()
6178 },
6179 ],
6180 },
6181 );
6182
6183 buffer.next_notification(cx).await;
6184 buffer.read_with(cx, |buffer, _| {
6185 assert_eq!(
6186 buffer
6187 .snapshot()
6188 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
6189 .collect::<Vec<_>>(),
6190 &[
6191 DiagnosticEntry {
6192 range: Point::new(2, 21)..Point::new(2, 22),
6193 diagnostic: Diagnostic {
6194 severity: DiagnosticSeverity::WARNING,
6195 message: "undefined variable 'A'".to_string(),
6196 is_disk_based: true,
6197 group_id: 1,
6198 is_primary: true,
6199 ..Default::default()
6200 }
6201 },
6202 DiagnosticEntry {
6203 range: Point::new(3, 9)..Point::new(3, 14),
6204 diagnostic: Diagnostic {
6205 severity: DiagnosticSeverity::ERROR,
6206 message: "undefined variable 'BB'".to_string(),
6207 is_disk_based: true,
6208 group_id: 0,
6209 is_primary: true,
6210 ..Default::default()
6211 },
6212 }
6213 ]
6214 );
6215 });
6216 }
6217
6218 #[gpui::test]
6219 async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
6220 cx.foreground().forbid_parking();
6221
6222 let text = concat!(
6223 "let one = ;\n", //
6224 "let two = \n",
6225 "let three = 3;\n",
6226 );
6227
6228 let fs = FakeFs::new(cx.background());
6229 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
6230
6231 let project = Project::test(fs, ["/dir"], cx).await;
6232 let buffer = project
6233 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
6234 .await
6235 .unwrap();
6236
6237 project.update(cx, |project, cx| {
6238 project
6239 .update_buffer_diagnostics(
6240 &buffer,
6241 vec![
6242 DiagnosticEntry {
6243 range: PointUtf16::new(0, 10)..PointUtf16::new(0, 10),
6244 diagnostic: Diagnostic {
6245 severity: DiagnosticSeverity::ERROR,
6246 message: "syntax error 1".to_string(),
6247 ..Default::default()
6248 },
6249 },
6250 DiagnosticEntry {
6251 range: PointUtf16::new(1, 10)..PointUtf16::new(1, 10),
6252 diagnostic: Diagnostic {
6253 severity: DiagnosticSeverity::ERROR,
6254 message: "syntax error 2".to_string(),
6255 ..Default::default()
6256 },
6257 },
6258 ],
6259 None,
6260 cx,
6261 )
6262 .unwrap();
6263 });
6264
6265 // An empty range is extended forward to include the following character.
6266 // At the end of a line, an empty range is extended backward to include
6267 // the preceding character.
6268 buffer.read_with(cx, |buffer, _| {
6269 let chunks = chunks_with_diagnostics(&buffer, 0..buffer.len());
6270 assert_eq!(
6271 chunks
6272 .iter()
6273 .map(|(s, d)| (s.as_str(), *d))
6274 .collect::<Vec<_>>(),
6275 &[
6276 ("let one = ", None),
6277 (";", Some(DiagnosticSeverity::ERROR)),
6278 ("\nlet two =", None),
6279 (" ", Some(DiagnosticSeverity::ERROR)),
6280 ("\nlet three = 3;\n", None)
6281 ]
6282 );
6283 });
6284 }
6285
6286 #[gpui::test]
6287 async fn test_edits_from_lsp_with_past_version(cx: &mut gpui::TestAppContext) {
6288 cx.foreground().forbid_parking();
6289
6290 let mut language = Language::new(
6291 LanguageConfig {
6292 name: "Rust".into(),
6293 path_suffixes: vec!["rs".to_string()],
6294 ..Default::default()
6295 },
6296 Some(tree_sitter_rust::language()),
6297 );
6298 let mut fake_servers = language.set_fake_lsp_adapter(Default::default());
6299
6300 let text = "
6301 fn a() {
6302 f1();
6303 }
6304 fn b() {
6305 f2();
6306 }
6307 fn c() {
6308 f3();
6309 }
6310 "
6311 .unindent();
6312
6313 let fs = FakeFs::new(cx.background());
6314 fs.insert_tree(
6315 "/dir",
6316 json!({
6317 "a.rs": text.clone(),
6318 }),
6319 )
6320 .await;
6321
6322 let project = Project::test(fs, ["/dir"], cx).await;
6323 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
6324 let buffer = project
6325 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
6326 .await
6327 .unwrap();
6328
6329 let mut fake_server = fake_servers.next().await.unwrap();
6330 let lsp_document_version = fake_server
6331 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6332 .await
6333 .text_document
6334 .version;
6335
6336 // Simulate editing the buffer after the language server computes some edits.
6337 buffer.update(cx, |buffer, cx| {
6338 buffer.edit(
6339 [(
6340 Point::new(0, 0)..Point::new(0, 0),
6341 "// above first function\n",
6342 )],
6343 cx,
6344 );
6345 buffer.edit(
6346 [(
6347 Point::new(2, 0)..Point::new(2, 0),
6348 " // inside first function\n",
6349 )],
6350 cx,
6351 );
6352 buffer.edit(
6353 [(
6354 Point::new(6, 4)..Point::new(6, 4),
6355 "// inside second function ",
6356 )],
6357 cx,
6358 );
6359
6360 assert_eq!(
6361 buffer.text(),
6362 "
6363 // above first function
6364 fn a() {
6365 // inside first function
6366 f1();
6367 }
6368 fn b() {
6369 // inside second function f2();
6370 }
6371 fn c() {
6372 f3();
6373 }
6374 "
6375 .unindent()
6376 );
6377 });
6378
6379 let edits = project
6380 .update(cx, |project, cx| {
6381 project.edits_from_lsp(
6382 &buffer,
6383 vec![
6384 // replace body of first function
6385 lsp::TextEdit {
6386 range: lsp::Range::new(
6387 lsp::Position::new(0, 0),
6388 lsp::Position::new(3, 0),
6389 ),
6390 new_text: "
6391 fn a() {
6392 f10();
6393 }
6394 "
6395 .unindent(),
6396 },
6397 // edit inside second function
6398 lsp::TextEdit {
6399 range: lsp::Range::new(
6400 lsp::Position::new(4, 6),
6401 lsp::Position::new(4, 6),
6402 ),
6403 new_text: "00".into(),
6404 },
6405 // edit inside third function via two distinct edits
6406 lsp::TextEdit {
6407 range: lsp::Range::new(
6408 lsp::Position::new(7, 5),
6409 lsp::Position::new(7, 5),
6410 ),
6411 new_text: "4000".into(),
6412 },
6413 lsp::TextEdit {
6414 range: lsp::Range::new(
6415 lsp::Position::new(7, 5),
6416 lsp::Position::new(7, 6),
6417 ),
6418 new_text: "".into(),
6419 },
6420 ],
6421 Some(lsp_document_version),
6422 cx,
6423 )
6424 })
6425 .await
6426 .unwrap();
6427
6428 buffer.update(cx, |buffer, cx| {
6429 for (range, new_text) in edits {
6430 buffer.edit([(range, new_text)], cx);
6431 }
6432 assert_eq!(
6433 buffer.text(),
6434 "
6435 // above first function
6436 fn a() {
6437 // inside first function
6438 f10();
6439 }
6440 fn b() {
6441 // inside second function f200();
6442 }
6443 fn c() {
6444 f4000();
6445 }
6446 "
6447 .unindent()
6448 );
6449 });
6450 }
6451
6452 #[gpui::test]
6453 async fn test_edits_from_lsp_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
6454 cx.foreground().forbid_parking();
6455
6456 let text = "
6457 use a::b;
6458 use a::c;
6459
6460 fn f() {
6461 b();
6462 c();
6463 }
6464 "
6465 .unindent();
6466
6467 let fs = FakeFs::new(cx.background());
6468 fs.insert_tree(
6469 "/dir",
6470 json!({
6471 "a.rs": text.clone(),
6472 }),
6473 )
6474 .await;
6475
6476 let project = Project::test(fs, ["/dir"], cx).await;
6477 let buffer = project
6478 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
6479 .await
6480 .unwrap();
6481
6482 // Simulate the language server sending us a small edit in the form of a very large diff.
6483 // Rust-analyzer does this when performing a merge-imports code action.
6484 let edits = project
6485 .update(cx, |project, cx| {
6486 project.edits_from_lsp(
6487 &buffer,
6488 [
6489 // Replace the first use statement without editing the semicolon.
6490 lsp::TextEdit {
6491 range: lsp::Range::new(
6492 lsp::Position::new(0, 4),
6493 lsp::Position::new(0, 8),
6494 ),
6495 new_text: "a::{b, c}".into(),
6496 },
6497 // Reinsert the remainder of the file between the semicolon and the final
6498 // newline of the file.
6499 lsp::TextEdit {
6500 range: lsp::Range::new(
6501 lsp::Position::new(0, 9),
6502 lsp::Position::new(0, 9),
6503 ),
6504 new_text: "\n\n".into(),
6505 },
6506 lsp::TextEdit {
6507 range: lsp::Range::new(
6508 lsp::Position::new(0, 9),
6509 lsp::Position::new(0, 9),
6510 ),
6511 new_text: "
6512 fn f() {
6513 b();
6514 c();
6515 }"
6516 .unindent(),
6517 },
6518 // Delete everything after the first newline of the file.
6519 lsp::TextEdit {
6520 range: lsp::Range::new(
6521 lsp::Position::new(1, 0),
6522 lsp::Position::new(7, 0),
6523 ),
6524 new_text: "".into(),
6525 },
6526 ],
6527 None,
6528 cx,
6529 )
6530 })
6531 .await
6532 .unwrap();
6533
6534 buffer.update(cx, |buffer, cx| {
6535 let edits = edits
6536 .into_iter()
6537 .map(|(range, text)| {
6538 (
6539 range.start.to_point(&buffer)..range.end.to_point(&buffer),
6540 text,
6541 )
6542 })
6543 .collect::<Vec<_>>();
6544
6545 assert_eq!(
6546 edits,
6547 [
6548 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
6549 (Point::new(1, 0)..Point::new(2, 0), "".into())
6550 ]
6551 );
6552
6553 for (range, new_text) in edits {
6554 buffer.edit([(range, new_text)], cx);
6555 }
6556 assert_eq!(
6557 buffer.text(),
6558 "
6559 use a::{b, c};
6560
6561 fn f() {
6562 b();
6563 c();
6564 }
6565 "
6566 .unindent()
6567 );
6568 });
6569 }
6570
6571 fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
6572 buffer: &Buffer,
6573 range: Range<T>,
6574 ) -> Vec<(String, Option<DiagnosticSeverity>)> {
6575 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
6576 for chunk in buffer.snapshot().chunks(range, true) {
6577 if chunks.last().map_or(false, |prev_chunk| {
6578 prev_chunk.1 == chunk.diagnostic_severity
6579 }) {
6580 chunks.last_mut().unwrap().0.push_str(chunk.text);
6581 } else {
6582 chunks.push((chunk.text.to_string(), chunk.diagnostic_severity));
6583 }
6584 }
6585 chunks
6586 }
6587
6588 #[gpui::test]
6589 async fn test_search_worktree_without_files(cx: &mut gpui::TestAppContext) {
6590 let dir = temp_tree(json!({
6591 "root": {
6592 "dir1": {},
6593 "dir2": {
6594 "dir3": {}
6595 }
6596 }
6597 }));
6598
6599 let project = Project::test(Arc::new(RealFs), [dir.path()], cx).await;
6600 let cancel_flag = Default::default();
6601 let results = project
6602 .read_with(cx, |project, cx| {
6603 project.match_paths("dir", false, false, 10, &cancel_flag, cx)
6604 })
6605 .await;
6606
6607 assert!(results.is_empty());
6608 }
6609
6610 #[gpui::test(iterations = 10)]
6611 async fn test_definition(cx: &mut gpui::TestAppContext) {
6612 let mut language = Language::new(
6613 LanguageConfig {
6614 name: "Rust".into(),
6615 path_suffixes: vec!["rs".to_string()],
6616 ..Default::default()
6617 },
6618 Some(tree_sitter_rust::language()),
6619 );
6620 let mut fake_servers = language.set_fake_lsp_adapter(Default::default());
6621
6622 let fs = FakeFs::new(cx.background());
6623 fs.insert_tree(
6624 "/dir",
6625 json!({
6626 "a.rs": "const fn a() { A }",
6627 "b.rs": "const y: i32 = crate::a()",
6628 }),
6629 )
6630 .await;
6631
6632 let project = Project::test(fs, ["/dir/b.rs"], cx).await;
6633 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
6634
6635 let buffer = project
6636 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
6637 .await
6638 .unwrap();
6639
6640 let fake_server = fake_servers.next().await.unwrap();
6641 fake_server.handle_request::<lsp::request::GotoDefinition, _, _>(|params, _| async move {
6642 let params = params.text_document_position_params;
6643 assert_eq!(
6644 params.text_document.uri.to_file_path().unwrap(),
6645 Path::new("/dir/b.rs"),
6646 );
6647 assert_eq!(params.position, lsp::Position::new(0, 22));
6648
6649 Ok(Some(lsp::GotoDefinitionResponse::Scalar(
6650 lsp::Location::new(
6651 lsp::Url::from_file_path("/dir/a.rs").unwrap(),
6652 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
6653 ),
6654 )))
6655 });
6656
6657 let mut definitions = project
6658 .update(cx, |project, cx| project.definition(&buffer, 22, cx))
6659 .await
6660 .unwrap();
6661
6662 assert_eq!(definitions.len(), 1);
6663 let definition = definitions.pop().unwrap();
6664 cx.update(|cx| {
6665 let target_buffer = definition.buffer.read(cx);
6666 assert_eq!(
6667 target_buffer
6668 .file()
6669 .unwrap()
6670 .as_local()
6671 .unwrap()
6672 .abs_path(cx),
6673 Path::new("/dir/a.rs"),
6674 );
6675 assert_eq!(definition.range.to_offset(target_buffer), 9..10);
6676 assert_eq!(
6677 list_worktrees(&project, cx),
6678 [("/dir/b.rs".as_ref(), true), ("/dir/a.rs".as_ref(), false)]
6679 );
6680
6681 drop(definition);
6682 });
6683 cx.read(|cx| {
6684 assert_eq!(list_worktrees(&project, cx), [("/dir/b.rs".as_ref(), true)]);
6685 });
6686
6687 fn list_worktrees<'a>(
6688 project: &'a ModelHandle<Project>,
6689 cx: &'a AppContext,
6690 ) -> Vec<(&'a Path, bool)> {
6691 project
6692 .read(cx)
6693 .worktrees(cx)
6694 .map(|worktree| {
6695 let worktree = worktree.read(cx);
6696 (
6697 worktree.as_local().unwrap().abs_path().as_ref(),
6698 worktree.is_visible(),
6699 )
6700 })
6701 .collect::<Vec<_>>()
6702 }
6703 }
6704
6705 #[gpui::test]
6706 async fn test_completions_without_edit_ranges(cx: &mut gpui::TestAppContext) {
6707 let mut language = Language::new(
6708 LanguageConfig {
6709 name: "TypeScript".into(),
6710 path_suffixes: vec!["ts".to_string()],
6711 ..Default::default()
6712 },
6713 Some(tree_sitter_typescript::language_typescript()),
6714 );
6715 let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default());
6716
6717 let fs = FakeFs::new(cx.background());
6718 fs.insert_tree(
6719 "/dir",
6720 json!({
6721 "a.ts": "",
6722 }),
6723 )
6724 .await;
6725
6726 let project = Project::test(fs, ["/dir"], cx).await;
6727 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
6728 let buffer = project
6729 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
6730 .await
6731 .unwrap();
6732
6733 let fake_server = fake_language_servers.next().await.unwrap();
6734
6735 let text = "let a = b.fqn";
6736 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
6737 let completions = project.update(cx, |project, cx| {
6738 project.completions(&buffer, text.len(), cx)
6739 });
6740
6741 fake_server
6742 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
6743 Ok(Some(lsp::CompletionResponse::Array(vec![
6744 lsp::CompletionItem {
6745 label: "fullyQualifiedName?".into(),
6746 insert_text: Some("fullyQualifiedName".into()),
6747 ..Default::default()
6748 },
6749 ])))
6750 })
6751 .next()
6752 .await;
6753 let completions = completions.await.unwrap();
6754 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
6755 assert_eq!(completions.len(), 1);
6756 assert_eq!(completions[0].new_text, "fullyQualifiedName");
6757 assert_eq!(
6758 completions[0].old_range.to_offset(&snapshot),
6759 text.len() - 3..text.len()
6760 );
6761 }
6762
6763 #[gpui::test(iterations = 10)]
6764 async fn test_apply_code_actions_with_commands(cx: &mut gpui::TestAppContext) {
6765 let mut language = Language::new(
6766 LanguageConfig {
6767 name: "TypeScript".into(),
6768 path_suffixes: vec!["ts".to_string()],
6769 ..Default::default()
6770 },
6771 None,
6772 );
6773 let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default());
6774
6775 let fs = FakeFs::new(cx.background());
6776 fs.insert_tree(
6777 "/dir",
6778 json!({
6779 "a.ts": "a",
6780 }),
6781 )
6782 .await;
6783
6784 let project = Project::test(fs, ["/dir"], cx).await;
6785 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
6786 let buffer = project
6787 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
6788 .await
6789 .unwrap();
6790
6791 let fake_server = fake_language_servers.next().await.unwrap();
6792
6793 // Language server returns code actions that contain commands, and not edits.
6794 let actions = project.update(cx, |project, cx| project.code_actions(&buffer, 0..0, cx));
6795 fake_server
6796 .handle_request::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
6797 Ok(Some(vec![
6798 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
6799 title: "The code action".into(),
6800 command: Some(lsp::Command {
6801 title: "The command".into(),
6802 command: "_the/command".into(),
6803 arguments: Some(vec![json!("the-argument")]),
6804 }),
6805 ..Default::default()
6806 }),
6807 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
6808 title: "two".into(),
6809 ..Default::default()
6810 }),
6811 ]))
6812 })
6813 .next()
6814 .await;
6815
6816 let action = actions.await.unwrap()[0].clone();
6817 let apply = project.update(cx, |project, cx| {
6818 project.apply_code_action(buffer.clone(), action, true, cx)
6819 });
6820
6821 // Resolving the code action does not populate its edits. In absence of
6822 // edits, we must execute the given command.
6823 fake_server.handle_request::<lsp::request::CodeActionResolveRequest, _, _>(
6824 |action, _| async move { Ok(action) },
6825 );
6826
6827 // While executing the command, the language server sends the editor
6828 // a `workspaceEdit` request.
6829 fake_server
6830 .handle_request::<lsp::request::ExecuteCommand, _, _>({
6831 let fake = fake_server.clone();
6832 move |params, _| {
6833 assert_eq!(params.command, "_the/command");
6834 let fake = fake.clone();
6835 async move {
6836 fake.server
6837 .request::<lsp::request::ApplyWorkspaceEdit>(
6838 lsp::ApplyWorkspaceEditParams {
6839 label: None,
6840 edit: lsp::WorkspaceEdit {
6841 changes: Some(
6842 [(
6843 lsp::Url::from_file_path("/dir/a.ts").unwrap(),
6844 vec![lsp::TextEdit {
6845 range: lsp::Range::new(
6846 lsp::Position::new(0, 0),
6847 lsp::Position::new(0, 0),
6848 ),
6849 new_text: "X".into(),
6850 }],
6851 )]
6852 .into_iter()
6853 .collect(),
6854 ),
6855 ..Default::default()
6856 },
6857 },
6858 )
6859 .await
6860 .unwrap();
6861 Ok(Some(json!(null)))
6862 }
6863 }
6864 })
6865 .next()
6866 .await;
6867
6868 // Applying the code action returns a project transaction containing the edits
6869 // sent by the language server in its `workspaceEdit` request.
6870 let transaction = apply.await.unwrap();
6871 assert!(transaction.0.contains_key(&buffer));
6872 buffer.update(cx, |buffer, cx| {
6873 assert_eq!(buffer.text(), "Xa");
6874 buffer.undo(cx);
6875 assert_eq!(buffer.text(), "a");
6876 });
6877 }
6878
6879 #[gpui::test]
6880 async fn test_save_file(cx: &mut gpui::TestAppContext) {
6881 let fs = FakeFs::new(cx.background());
6882 fs.insert_tree(
6883 "/dir",
6884 json!({
6885 "file1": "the old contents",
6886 }),
6887 )
6888 .await;
6889
6890 let project = Project::test(fs.clone(), ["/dir"], cx).await;
6891 let buffer = project
6892 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
6893 .await
6894 .unwrap();
6895 buffer
6896 .update(cx, |buffer, cx| {
6897 assert_eq!(buffer.text(), "the old contents");
6898 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], cx);
6899 buffer.save(cx)
6900 })
6901 .await
6902 .unwrap();
6903
6904 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
6905 assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
6906 }
6907
6908 #[gpui::test]
6909 async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
6910 let fs = FakeFs::new(cx.background());
6911 fs.insert_tree(
6912 "/dir",
6913 json!({
6914 "file1": "the old contents",
6915 }),
6916 )
6917 .await;
6918
6919 let project = Project::test(fs.clone(), ["/dir/file1"], cx).await;
6920 let buffer = project
6921 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
6922 .await
6923 .unwrap();
6924 buffer
6925 .update(cx, |buffer, cx| {
6926 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], cx);
6927 buffer.save(cx)
6928 })
6929 .await
6930 .unwrap();
6931
6932 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
6933 assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
6934 }
6935
6936 #[gpui::test]
6937 async fn test_save_as(cx: &mut gpui::TestAppContext) {
6938 let fs = FakeFs::new(cx.background());
6939 fs.insert_tree("/dir", json!({})).await;
6940
6941 let project = Project::test(fs.clone(), ["/dir"], cx).await;
6942 let buffer = project.update(cx, |project, cx| {
6943 project.create_buffer("", None, cx).unwrap()
6944 });
6945 buffer.update(cx, |buffer, cx| {
6946 buffer.edit([(0..0, "abc")], cx);
6947 assert!(buffer.is_dirty());
6948 assert!(!buffer.has_conflict());
6949 });
6950 project
6951 .update(cx, |project, cx| {
6952 project.save_buffer_as(buffer.clone(), "/dir/file1".into(), cx)
6953 })
6954 .await
6955 .unwrap();
6956 assert_eq!(fs.load(Path::new("/dir/file1")).await.unwrap(), "abc");
6957 buffer.read_with(cx, |buffer, cx| {
6958 assert_eq!(buffer.file().unwrap().full_path(cx), Path::new("dir/file1"));
6959 assert!(!buffer.is_dirty());
6960 assert!(!buffer.has_conflict());
6961 });
6962
6963 let opened_buffer = project
6964 .update(cx, |project, cx| {
6965 project.open_local_buffer("/dir/file1", cx)
6966 })
6967 .await
6968 .unwrap();
6969 assert_eq!(opened_buffer, buffer);
6970 }
6971
6972 #[gpui::test(retries = 5)]
6973 async fn test_rescan_and_remote_updates(cx: &mut gpui::TestAppContext) {
6974 let dir = temp_tree(json!({
6975 "a": {
6976 "file1": "",
6977 "file2": "",
6978 "file3": "",
6979 },
6980 "b": {
6981 "c": {
6982 "file4": "",
6983 "file5": "",
6984 }
6985 }
6986 }));
6987
6988 let project = Project::test(Arc::new(RealFs), [dir.path()], cx).await;
6989 let rpc = project.read_with(cx, |p, _| p.client.clone());
6990
6991 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
6992 let buffer = project.update(cx, |p, cx| p.open_local_buffer(dir.path().join(path), cx));
6993 async move { buffer.await.unwrap() }
6994 };
6995 let id_for_path = |path: &'static str, cx: &gpui::TestAppContext| {
6996 project.read_with(cx, |project, cx| {
6997 let tree = project.worktrees(cx).next().unwrap();
6998 tree.read(cx)
6999 .entry_for_path(path)
7000 .expect(&format!("no entry for path {}", path))
7001 .id
7002 })
7003 };
7004
7005 let buffer2 = buffer_for_path("a/file2", cx).await;
7006 let buffer3 = buffer_for_path("a/file3", cx).await;
7007 let buffer4 = buffer_for_path("b/c/file4", cx).await;
7008 let buffer5 = buffer_for_path("b/c/file5", cx).await;
7009
7010 let file2_id = id_for_path("a/file2", &cx);
7011 let file3_id = id_for_path("a/file3", &cx);
7012 let file4_id = id_for_path("b/c/file4", &cx);
7013
7014 // Create a remote copy of this worktree.
7015 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
7016 let initial_snapshot = tree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot());
7017 let (remote, load_task) = cx.update(|cx| {
7018 Worktree::remote(
7019 1,
7020 1,
7021 initial_snapshot.to_proto(&Default::default(), true),
7022 rpc.clone(),
7023 cx,
7024 )
7025 });
7026 // tree
7027 load_task.await;
7028
7029 cx.read(|cx| {
7030 assert!(!buffer2.read(cx).is_dirty());
7031 assert!(!buffer3.read(cx).is_dirty());
7032 assert!(!buffer4.read(cx).is_dirty());
7033 assert!(!buffer5.read(cx).is_dirty());
7034 });
7035
7036 // Rename and delete files and directories.
7037 tree.flush_fs_events(&cx).await;
7038 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
7039 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
7040 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
7041 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
7042 tree.flush_fs_events(&cx).await;
7043
7044 let expected_paths = vec![
7045 "a",
7046 "a/file1",
7047 "a/file2.new",
7048 "b",
7049 "d",
7050 "d/file3",
7051 "d/file4",
7052 ];
7053
7054 cx.read(|app| {
7055 assert_eq!(
7056 tree.read(app)
7057 .paths()
7058 .map(|p| p.to_str().unwrap())
7059 .collect::<Vec<_>>(),
7060 expected_paths
7061 );
7062
7063 assert_eq!(id_for_path("a/file2.new", &cx), file2_id);
7064 assert_eq!(id_for_path("d/file3", &cx), file3_id);
7065 assert_eq!(id_for_path("d/file4", &cx), file4_id);
7066
7067 assert_eq!(
7068 buffer2.read(app).file().unwrap().path().as_ref(),
7069 Path::new("a/file2.new")
7070 );
7071 assert_eq!(
7072 buffer3.read(app).file().unwrap().path().as_ref(),
7073 Path::new("d/file3")
7074 );
7075 assert_eq!(
7076 buffer4.read(app).file().unwrap().path().as_ref(),
7077 Path::new("d/file4")
7078 );
7079 assert_eq!(
7080 buffer5.read(app).file().unwrap().path().as_ref(),
7081 Path::new("b/c/file5")
7082 );
7083
7084 assert!(!buffer2.read(app).file().unwrap().is_deleted());
7085 assert!(!buffer3.read(app).file().unwrap().is_deleted());
7086 assert!(!buffer4.read(app).file().unwrap().is_deleted());
7087 assert!(buffer5.read(app).file().unwrap().is_deleted());
7088 });
7089
7090 // Update the remote worktree. Check that it becomes consistent with the
7091 // local worktree.
7092 remote.update(cx, |remote, cx| {
7093 let update_message = tree.read(cx).as_local().unwrap().snapshot().build_update(
7094 &initial_snapshot,
7095 1,
7096 1,
7097 true,
7098 );
7099 remote
7100 .as_remote_mut()
7101 .unwrap()
7102 .snapshot
7103 .apply_remote_update(update_message)
7104 .unwrap();
7105
7106 assert_eq!(
7107 remote
7108 .paths()
7109 .map(|p| p.to_str().unwrap())
7110 .collect::<Vec<_>>(),
7111 expected_paths
7112 );
7113 });
7114 }
7115
7116 #[gpui::test]
7117 async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
7118 let fs = FakeFs::new(cx.background());
7119 fs.insert_tree(
7120 "/dir",
7121 json!({
7122 "a.txt": "a-contents",
7123 "b.txt": "b-contents",
7124 }),
7125 )
7126 .await;
7127
7128 let project = Project::test(fs.clone(), ["/dir"], cx).await;
7129
7130 // Spawn multiple tasks to open paths, repeating some paths.
7131 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
7132 (
7133 p.open_local_buffer("/dir/a.txt", cx),
7134 p.open_local_buffer("/dir/b.txt", cx),
7135 p.open_local_buffer("/dir/a.txt", cx),
7136 )
7137 });
7138
7139 let buffer_a_1 = buffer_a_1.await.unwrap();
7140 let buffer_a_2 = buffer_a_2.await.unwrap();
7141 let buffer_b = buffer_b.await.unwrap();
7142 assert_eq!(buffer_a_1.read_with(cx, |b, _| b.text()), "a-contents");
7143 assert_eq!(buffer_b.read_with(cx, |b, _| b.text()), "b-contents");
7144
7145 // There is only one buffer per path.
7146 let buffer_a_id = buffer_a_1.id();
7147 assert_eq!(buffer_a_2.id(), buffer_a_id);
7148
7149 // Open the same path again while it is still open.
7150 drop(buffer_a_1);
7151 let buffer_a_3 = project
7152 .update(cx, |p, cx| p.open_local_buffer("/dir/a.txt", cx))
7153 .await
7154 .unwrap();
7155
7156 // There's still only one buffer per path.
7157 assert_eq!(buffer_a_3.id(), buffer_a_id);
7158 }
7159
7160 #[gpui::test]
7161 async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
7162 let fs = FakeFs::new(cx.background());
7163 fs.insert_tree(
7164 "/dir",
7165 json!({
7166 "file1": "abc",
7167 "file2": "def",
7168 "file3": "ghi",
7169 }),
7170 )
7171 .await;
7172
7173 let project = Project::test(fs.clone(), ["/dir"], cx).await;
7174
7175 let buffer1 = project
7176 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
7177 .await
7178 .unwrap();
7179 let events = Rc::new(RefCell::new(Vec::new()));
7180
7181 // initially, the buffer isn't dirty.
7182 buffer1.update(cx, |buffer, cx| {
7183 cx.subscribe(&buffer1, {
7184 let events = events.clone();
7185 move |_, _, event, _| match event {
7186 BufferEvent::Operation(_) => {}
7187 _ => events.borrow_mut().push(event.clone()),
7188 }
7189 })
7190 .detach();
7191
7192 assert!(!buffer.is_dirty());
7193 assert!(events.borrow().is_empty());
7194
7195 buffer.edit([(1..2, "")], cx);
7196 });
7197
7198 // after the first edit, the buffer is dirty, and emits a dirtied event.
7199 buffer1.update(cx, |buffer, cx| {
7200 assert!(buffer.text() == "ac");
7201 assert!(buffer.is_dirty());
7202 assert_eq!(
7203 *events.borrow(),
7204 &[language::Event::Edited, language::Event::Dirtied]
7205 );
7206 events.borrow_mut().clear();
7207 buffer.did_save(buffer.version(), buffer.file().unwrap().mtime(), None, cx);
7208 });
7209
7210 // after saving, the buffer is not dirty, and emits a saved event.
7211 buffer1.update(cx, |buffer, cx| {
7212 assert!(!buffer.is_dirty());
7213 assert_eq!(*events.borrow(), &[language::Event::Saved]);
7214 events.borrow_mut().clear();
7215
7216 buffer.edit([(1..1, "B")], cx);
7217 buffer.edit([(2..2, "D")], cx);
7218 });
7219
7220 // after editing again, the buffer is dirty, and emits another dirty event.
7221 buffer1.update(cx, |buffer, cx| {
7222 assert!(buffer.text() == "aBDc");
7223 assert!(buffer.is_dirty());
7224 assert_eq!(
7225 *events.borrow(),
7226 &[
7227 language::Event::Edited,
7228 language::Event::Dirtied,
7229 language::Event::Edited,
7230 ],
7231 );
7232 events.borrow_mut().clear();
7233
7234 // TODO - currently, after restoring the buffer to its
7235 // previously-saved state, the is still considered dirty.
7236 buffer.edit([(1..3, "")], cx);
7237 assert!(buffer.text() == "ac");
7238 assert!(buffer.is_dirty());
7239 });
7240
7241 assert_eq!(*events.borrow(), &[language::Event::Edited]);
7242
7243 // When a file is deleted, the buffer is considered dirty.
7244 let events = Rc::new(RefCell::new(Vec::new()));
7245 let buffer2 = project
7246 .update(cx, |p, cx| p.open_local_buffer("/dir/file2", cx))
7247 .await
7248 .unwrap();
7249 buffer2.update(cx, |_, cx| {
7250 cx.subscribe(&buffer2, {
7251 let events = events.clone();
7252 move |_, _, event, _| events.borrow_mut().push(event.clone())
7253 })
7254 .detach();
7255 });
7256
7257 fs.remove_file("/dir/file2".as_ref(), Default::default())
7258 .await
7259 .unwrap();
7260 buffer2.condition(&cx, |b, _| b.is_dirty()).await;
7261 assert_eq!(
7262 *events.borrow(),
7263 &[language::Event::Dirtied, language::Event::FileHandleChanged]
7264 );
7265
7266 // When a file is already dirty when deleted, we don't emit a Dirtied event.
7267 let events = Rc::new(RefCell::new(Vec::new()));
7268 let buffer3 = project
7269 .update(cx, |p, cx| p.open_local_buffer("/dir/file3", cx))
7270 .await
7271 .unwrap();
7272 buffer3.update(cx, |_, cx| {
7273 cx.subscribe(&buffer3, {
7274 let events = events.clone();
7275 move |_, _, event, _| events.borrow_mut().push(event.clone())
7276 })
7277 .detach();
7278 });
7279
7280 buffer3.update(cx, |buffer, cx| {
7281 buffer.edit([(0..0, "x")], cx);
7282 });
7283 events.borrow_mut().clear();
7284 fs.remove_file("/dir/file3".as_ref(), Default::default())
7285 .await
7286 .unwrap();
7287 buffer3
7288 .condition(&cx, |_, _| !events.borrow().is_empty())
7289 .await;
7290 assert_eq!(*events.borrow(), &[language::Event::FileHandleChanged]);
7291 cx.read(|cx| assert!(buffer3.read(cx).is_dirty()));
7292 }
7293
7294 #[gpui::test]
7295 async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
7296 let initial_contents = "aaa\nbbbbb\nc\n";
7297 let fs = FakeFs::new(cx.background());
7298 fs.insert_tree(
7299 "/dir",
7300 json!({
7301 "the-file": initial_contents,
7302 }),
7303 )
7304 .await;
7305 let project = Project::test(fs.clone(), ["/dir"], cx).await;
7306 let buffer = project
7307 .update(cx, |p, cx| p.open_local_buffer("/dir/the-file", cx))
7308 .await
7309 .unwrap();
7310
7311 let anchors = (0..3)
7312 .map(|row| buffer.read_with(cx, |b, _| b.anchor_before(Point::new(row, 1))))
7313 .collect::<Vec<_>>();
7314
7315 // Change the file on disk, adding two new lines of text, and removing
7316 // one line.
7317 buffer.read_with(cx, |buffer, _| {
7318 assert!(!buffer.is_dirty());
7319 assert!(!buffer.has_conflict());
7320 });
7321 let new_contents = "AAAA\naaa\nBB\nbbbbb\n";
7322 fs.save("/dir/the-file".as_ref(), &new_contents.into())
7323 .await
7324 .unwrap();
7325
7326 // Because the buffer was not modified, it is reloaded from disk. Its
7327 // contents are edited according to the diff between the old and new
7328 // file contents.
7329 buffer
7330 .condition(&cx, |buffer, _| buffer.text() == new_contents)
7331 .await;
7332
7333 buffer.update(cx, |buffer, _| {
7334 assert_eq!(buffer.text(), new_contents);
7335 assert!(!buffer.is_dirty());
7336 assert!(!buffer.has_conflict());
7337
7338 let anchor_positions = anchors
7339 .iter()
7340 .map(|anchor| anchor.to_point(&*buffer))
7341 .collect::<Vec<_>>();
7342 assert_eq!(
7343 anchor_positions,
7344 [Point::new(1, 1), Point::new(3, 1), Point::new(4, 0)]
7345 );
7346 });
7347
7348 // Modify the buffer
7349 buffer.update(cx, |buffer, cx| {
7350 buffer.edit([(0..0, " ")], cx);
7351 assert!(buffer.is_dirty());
7352 assert!(!buffer.has_conflict());
7353 });
7354
7355 // Change the file on disk again, adding blank lines to the beginning.
7356 fs.save(
7357 "/dir/the-file".as_ref(),
7358 &"\n\n\nAAAA\naaa\nBB\nbbbbb\n".into(),
7359 )
7360 .await
7361 .unwrap();
7362
7363 // Because the buffer is modified, it doesn't reload from disk, but is
7364 // marked as having a conflict.
7365 buffer
7366 .condition(&cx, |buffer, _| buffer.has_conflict())
7367 .await;
7368 }
7369
7370 #[gpui::test]
7371 async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
7372 cx.foreground().forbid_parking();
7373
7374 let fs = FakeFs::new(cx.background());
7375 fs.insert_tree(
7376 "/the-dir",
7377 json!({
7378 "a.rs": "
7379 fn foo(mut v: Vec<usize>) {
7380 for x in &v {
7381 v.push(1);
7382 }
7383 }
7384 "
7385 .unindent(),
7386 }),
7387 )
7388 .await;
7389
7390 let project = Project::test(fs.clone(), ["/the-dir"], cx).await;
7391 let buffer = project
7392 .update(cx, |p, cx| p.open_local_buffer("/the-dir/a.rs", cx))
7393 .await
7394 .unwrap();
7395
7396 let buffer_uri = Url::from_file_path("/the-dir/a.rs").unwrap();
7397 let message = lsp::PublishDiagnosticsParams {
7398 uri: buffer_uri.clone(),
7399 diagnostics: vec![
7400 lsp::Diagnostic {
7401 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
7402 severity: Some(DiagnosticSeverity::WARNING),
7403 message: "error 1".to_string(),
7404 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
7405 location: lsp::Location {
7406 uri: buffer_uri.clone(),
7407 range: lsp::Range::new(
7408 lsp::Position::new(1, 8),
7409 lsp::Position::new(1, 9),
7410 ),
7411 },
7412 message: "error 1 hint 1".to_string(),
7413 }]),
7414 ..Default::default()
7415 },
7416 lsp::Diagnostic {
7417 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
7418 severity: Some(DiagnosticSeverity::HINT),
7419 message: "error 1 hint 1".to_string(),
7420 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
7421 location: lsp::Location {
7422 uri: buffer_uri.clone(),
7423 range: lsp::Range::new(
7424 lsp::Position::new(1, 8),
7425 lsp::Position::new(1, 9),
7426 ),
7427 },
7428 message: "original diagnostic".to_string(),
7429 }]),
7430 ..Default::default()
7431 },
7432 lsp::Diagnostic {
7433 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
7434 severity: Some(DiagnosticSeverity::ERROR),
7435 message: "error 2".to_string(),
7436 related_information: Some(vec![
7437 lsp::DiagnosticRelatedInformation {
7438 location: lsp::Location {
7439 uri: buffer_uri.clone(),
7440 range: lsp::Range::new(
7441 lsp::Position::new(1, 13),
7442 lsp::Position::new(1, 15),
7443 ),
7444 },
7445 message: "error 2 hint 1".to_string(),
7446 },
7447 lsp::DiagnosticRelatedInformation {
7448 location: lsp::Location {
7449 uri: buffer_uri.clone(),
7450 range: lsp::Range::new(
7451 lsp::Position::new(1, 13),
7452 lsp::Position::new(1, 15),
7453 ),
7454 },
7455 message: "error 2 hint 2".to_string(),
7456 },
7457 ]),
7458 ..Default::default()
7459 },
7460 lsp::Diagnostic {
7461 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
7462 severity: Some(DiagnosticSeverity::HINT),
7463 message: "error 2 hint 1".to_string(),
7464 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
7465 location: lsp::Location {
7466 uri: buffer_uri.clone(),
7467 range: lsp::Range::new(
7468 lsp::Position::new(2, 8),
7469 lsp::Position::new(2, 17),
7470 ),
7471 },
7472 message: "original diagnostic".to_string(),
7473 }]),
7474 ..Default::default()
7475 },
7476 lsp::Diagnostic {
7477 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
7478 severity: Some(DiagnosticSeverity::HINT),
7479 message: "error 2 hint 2".to_string(),
7480 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
7481 location: lsp::Location {
7482 uri: buffer_uri.clone(),
7483 range: lsp::Range::new(
7484 lsp::Position::new(2, 8),
7485 lsp::Position::new(2, 17),
7486 ),
7487 },
7488 message: "original diagnostic".to_string(),
7489 }]),
7490 ..Default::default()
7491 },
7492 ],
7493 version: None,
7494 };
7495
7496 project
7497 .update(cx, |p, cx| p.update_diagnostics(message, &[], cx))
7498 .unwrap();
7499 let buffer = buffer.read_with(cx, |buffer, _| buffer.snapshot());
7500
7501 assert_eq!(
7502 buffer
7503 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
7504 .collect::<Vec<_>>(),
7505 &[
7506 DiagnosticEntry {
7507 range: Point::new(1, 8)..Point::new(1, 9),
7508 diagnostic: Diagnostic {
7509 severity: DiagnosticSeverity::WARNING,
7510 message: "error 1".to_string(),
7511 group_id: 0,
7512 is_primary: true,
7513 ..Default::default()
7514 }
7515 },
7516 DiagnosticEntry {
7517 range: Point::new(1, 8)..Point::new(1, 9),
7518 diagnostic: Diagnostic {
7519 severity: DiagnosticSeverity::HINT,
7520 message: "error 1 hint 1".to_string(),
7521 group_id: 0,
7522 is_primary: false,
7523 ..Default::default()
7524 }
7525 },
7526 DiagnosticEntry {
7527 range: Point::new(1, 13)..Point::new(1, 15),
7528 diagnostic: Diagnostic {
7529 severity: DiagnosticSeverity::HINT,
7530 message: "error 2 hint 1".to_string(),
7531 group_id: 1,
7532 is_primary: false,
7533 ..Default::default()
7534 }
7535 },
7536 DiagnosticEntry {
7537 range: Point::new(1, 13)..Point::new(1, 15),
7538 diagnostic: Diagnostic {
7539 severity: DiagnosticSeverity::HINT,
7540 message: "error 2 hint 2".to_string(),
7541 group_id: 1,
7542 is_primary: false,
7543 ..Default::default()
7544 }
7545 },
7546 DiagnosticEntry {
7547 range: Point::new(2, 8)..Point::new(2, 17),
7548 diagnostic: Diagnostic {
7549 severity: DiagnosticSeverity::ERROR,
7550 message: "error 2".to_string(),
7551 group_id: 1,
7552 is_primary: true,
7553 ..Default::default()
7554 }
7555 }
7556 ]
7557 );
7558
7559 assert_eq!(
7560 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
7561 &[
7562 DiagnosticEntry {
7563 range: Point::new(1, 8)..Point::new(1, 9),
7564 diagnostic: Diagnostic {
7565 severity: DiagnosticSeverity::WARNING,
7566 message: "error 1".to_string(),
7567 group_id: 0,
7568 is_primary: true,
7569 ..Default::default()
7570 }
7571 },
7572 DiagnosticEntry {
7573 range: Point::new(1, 8)..Point::new(1, 9),
7574 diagnostic: Diagnostic {
7575 severity: DiagnosticSeverity::HINT,
7576 message: "error 1 hint 1".to_string(),
7577 group_id: 0,
7578 is_primary: false,
7579 ..Default::default()
7580 }
7581 },
7582 ]
7583 );
7584 assert_eq!(
7585 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
7586 &[
7587 DiagnosticEntry {
7588 range: Point::new(1, 13)..Point::new(1, 15),
7589 diagnostic: Diagnostic {
7590 severity: DiagnosticSeverity::HINT,
7591 message: "error 2 hint 1".to_string(),
7592 group_id: 1,
7593 is_primary: false,
7594 ..Default::default()
7595 }
7596 },
7597 DiagnosticEntry {
7598 range: Point::new(1, 13)..Point::new(1, 15),
7599 diagnostic: Diagnostic {
7600 severity: DiagnosticSeverity::HINT,
7601 message: "error 2 hint 2".to_string(),
7602 group_id: 1,
7603 is_primary: false,
7604 ..Default::default()
7605 }
7606 },
7607 DiagnosticEntry {
7608 range: Point::new(2, 8)..Point::new(2, 17),
7609 diagnostic: Diagnostic {
7610 severity: DiagnosticSeverity::ERROR,
7611 message: "error 2".to_string(),
7612 group_id: 1,
7613 is_primary: true,
7614 ..Default::default()
7615 }
7616 }
7617 ]
7618 );
7619 }
7620
7621 #[gpui::test]
7622 async fn test_rename(cx: &mut gpui::TestAppContext) {
7623 cx.foreground().forbid_parking();
7624
7625 let mut language = Language::new(
7626 LanguageConfig {
7627 name: "Rust".into(),
7628 path_suffixes: vec!["rs".to_string()],
7629 ..Default::default()
7630 },
7631 Some(tree_sitter_rust::language()),
7632 );
7633 let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
7634 capabilities: lsp::ServerCapabilities {
7635 rename_provider: Some(lsp::OneOf::Right(lsp::RenameOptions {
7636 prepare_provider: Some(true),
7637 work_done_progress_options: Default::default(),
7638 })),
7639 ..Default::default()
7640 },
7641 ..Default::default()
7642 });
7643
7644 let fs = FakeFs::new(cx.background());
7645 fs.insert_tree(
7646 "/dir",
7647 json!({
7648 "one.rs": "const ONE: usize = 1;",
7649 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
7650 }),
7651 )
7652 .await;
7653
7654 let project = Project::test(fs.clone(), ["/dir"], cx).await;
7655 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
7656 let buffer = project
7657 .update(cx, |project, cx| {
7658 project.open_local_buffer("/dir/one.rs", cx)
7659 })
7660 .await
7661 .unwrap();
7662
7663 let fake_server = fake_servers.next().await.unwrap();
7664
7665 let response = project.update(cx, |project, cx| {
7666 project.prepare_rename(buffer.clone(), 7, cx)
7667 });
7668 fake_server
7669 .handle_request::<lsp::request::PrepareRenameRequest, _, _>(|params, _| async move {
7670 assert_eq!(params.text_document.uri.as_str(), "file:///dir/one.rs");
7671 assert_eq!(params.position, lsp::Position::new(0, 7));
7672 Ok(Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
7673 lsp::Position::new(0, 6),
7674 lsp::Position::new(0, 9),
7675 ))))
7676 })
7677 .next()
7678 .await
7679 .unwrap();
7680 let range = response.await.unwrap().unwrap();
7681 let range = buffer.read_with(cx, |buffer, _| range.to_offset(buffer));
7682 assert_eq!(range, 6..9);
7683
7684 let response = project.update(cx, |project, cx| {
7685 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), true, cx)
7686 });
7687 fake_server
7688 .handle_request::<lsp::request::Rename, _, _>(|params, _| async move {
7689 assert_eq!(
7690 params.text_document_position.text_document.uri.as_str(),
7691 "file:///dir/one.rs"
7692 );
7693 assert_eq!(
7694 params.text_document_position.position,
7695 lsp::Position::new(0, 7)
7696 );
7697 assert_eq!(params.new_name, "THREE");
7698 Ok(Some(lsp::WorkspaceEdit {
7699 changes: Some(
7700 [
7701 (
7702 lsp::Url::from_file_path("/dir/one.rs").unwrap(),
7703 vec![lsp::TextEdit::new(
7704 lsp::Range::new(
7705 lsp::Position::new(0, 6),
7706 lsp::Position::new(0, 9),
7707 ),
7708 "THREE".to_string(),
7709 )],
7710 ),
7711 (
7712 lsp::Url::from_file_path("/dir/two.rs").unwrap(),
7713 vec![
7714 lsp::TextEdit::new(
7715 lsp::Range::new(
7716 lsp::Position::new(0, 24),
7717 lsp::Position::new(0, 27),
7718 ),
7719 "THREE".to_string(),
7720 ),
7721 lsp::TextEdit::new(
7722 lsp::Range::new(
7723 lsp::Position::new(0, 35),
7724 lsp::Position::new(0, 38),
7725 ),
7726 "THREE".to_string(),
7727 ),
7728 ],
7729 ),
7730 ]
7731 .into_iter()
7732 .collect(),
7733 ),
7734 ..Default::default()
7735 }))
7736 })
7737 .next()
7738 .await
7739 .unwrap();
7740 let mut transaction = response.await.unwrap().0;
7741 assert_eq!(transaction.len(), 2);
7742 assert_eq!(
7743 transaction
7744 .remove_entry(&buffer)
7745 .unwrap()
7746 .0
7747 .read_with(cx, |buffer, _| buffer.text()),
7748 "const THREE: usize = 1;"
7749 );
7750 assert_eq!(
7751 transaction
7752 .into_keys()
7753 .next()
7754 .unwrap()
7755 .read_with(cx, |buffer, _| buffer.text()),
7756 "const TWO: usize = one::THREE + one::THREE;"
7757 );
7758 }
7759
7760 #[gpui::test]
7761 async fn test_search(cx: &mut gpui::TestAppContext) {
7762 let fs = FakeFs::new(cx.background());
7763 fs.insert_tree(
7764 "/dir",
7765 json!({
7766 "one.rs": "const ONE: usize = 1;",
7767 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
7768 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
7769 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
7770 }),
7771 )
7772 .await;
7773 let project = Project::test(fs.clone(), ["/dir"], cx).await;
7774 assert_eq!(
7775 search(&project, SearchQuery::text("TWO", false, true), cx)
7776 .await
7777 .unwrap(),
7778 HashMap::from_iter([
7779 ("two.rs".to_string(), vec![6..9]),
7780 ("three.rs".to_string(), vec![37..40])
7781 ])
7782 );
7783
7784 let buffer_4 = project
7785 .update(cx, |project, cx| {
7786 project.open_local_buffer("/dir/four.rs", cx)
7787 })
7788 .await
7789 .unwrap();
7790 buffer_4.update(cx, |buffer, cx| {
7791 let text = "two::TWO";
7792 buffer.edit([(20..28, text), (31..43, text)], cx);
7793 });
7794
7795 assert_eq!(
7796 search(&project, SearchQuery::text("TWO", false, true), cx)
7797 .await
7798 .unwrap(),
7799 HashMap::from_iter([
7800 ("two.rs".to_string(), vec![6..9]),
7801 ("three.rs".to_string(), vec![37..40]),
7802 ("four.rs".to_string(), vec![25..28, 36..39])
7803 ])
7804 );
7805
7806 async fn search(
7807 project: &ModelHandle<Project>,
7808 query: SearchQuery,
7809 cx: &mut gpui::TestAppContext,
7810 ) -> Result<HashMap<String, Vec<Range<usize>>>> {
7811 let results = project
7812 .update(cx, |project, cx| project.search(query, cx))
7813 .await?;
7814
7815 Ok(results
7816 .into_iter()
7817 .map(|(buffer, ranges)| {
7818 buffer.read_with(cx, |buffer, _| {
7819 let path = buffer.file().unwrap().path().to_string_lossy().to_string();
7820 let ranges = ranges
7821 .into_iter()
7822 .map(|range| range.to_offset(buffer))
7823 .collect::<Vec<_>>();
7824 (path, ranges)
7825 })
7826 })
7827 .collect())
7828 }
7829 }
7830}