1pub mod fs;
2mod ignore;
3mod lsp_command;
4pub mod search;
5pub mod worktree;
6
7use anyhow::{anyhow, Context, Result};
8use client::{proto, Client, PeerId, TypedEnvelope, User, UserStore};
9use clock::ReplicaId;
10use collections::{hash_map, BTreeMap, HashMap, HashSet};
11use futures::{future::Shared, Future, FutureExt, StreamExt, TryFutureExt};
12use fuzzy::{PathMatch, PathMatchCandidate, PathMatchCandidateSet};
13use gpui::{
14 AnyModelHandle, AppContext, AsyncAppContext, Entity, ModelContext, ModelHandle,
15 MutableAppContext, Task, UpgradeModelHandle, WeakModelHandle,
16};
17use language::{
18 point_to_lsp,
19 proto::{deserialize_anchor, deserialize_version, serialize_anchor, serialize_version},
20 range_from_lsp, range_to_lsp, Anchor, Bias, Buffer, CodeAction, CodeLabel, Completion,
21 Diagnostic, DiagnosticEntry, DiagnosticSet, Event as BufferEvent, File as _, Language,
22 LanguageRegistry, LanguageServerName, LocalFile, LspAdapter, OffsetRangeExt, Operation, Patch,
23 PointUtf16, TextBufferSnapshot, ToOffset, ToPointUtf16, Transaction,
24};
25use lsp::{DiagnosticSeverity, DiagnosticTag, DocumentHighlightKind, LanguageServer};
26use lsp_command::*;
27use parking_lot::Mutex;
28use postage::watch;
29use rand::prelude::*;
30use search::SearchQuery;
31use serde::Serialize;
32use settings::Settings;
33use sha2::{Digest, Sha256};
34use similar::{ChangeTag, TextDiff};
35use std::{
36 cell::RefCell,
37 cmp::{self, Ordering},
38 convert::TryInto,
39 ffi::OsString,
40 hash::Hash,
41 mem,
42 ops::Range,
43 os::unix::{ffi::OsStrExt, prelude::OsStringExt},
44 path::{Component, Path, PathBuf},
45 rc::Rc,
46 sync::{
47 atomic::{AtomicBool, AtomicUsize, Ordering::SeqCst},
48 Arc,
49 },
50 time::Instant,
51};
52use thiserror::Error;
53use util::{post_inc, ResultExt, TryFutureExt as _};
54
55pub use fs::*;
56pub use worktree::*;
57
58pub trait Item: Entity {
59 fn entry_id(&self, cx: &AppContext) -> Option<ProjectEntryId>;
60}
61
62pub struct Project {
63 worktrees: Vec<WorktreeHandle>,
64 active_entry: Option<ProjectEntryId>,
65 languages: Arc<LanguageRegistry>,
66 language_servers:
67 HashMap<(WorktreeId, LanguageServerName), (Arc<dyn LspAdapter>, Arc<LanguageServer>)>,
68 started_language_servers:
69 HashMap<(WorktreeId, LanguageServerName), Task<Option<Arc<LanguageServer>>>>,
70 language_server_statuses: BTreeMap<usize, LanguageServerStatus>,
71 language_server_settings: Arc<Mutex<serde_json::Value>>,
72 last_workspace_edits_by_language_server: HashMap<usize, ProjectTransaction>,
73 next_language_server_id: usize,
74 client: Arc<client::Client>,
75 next_entry_id: Arc<AtomicUsize>,
76 user_store: ModelHandle<UserStore>,
77 fs: Arc<dyn Fs>,
78 client_state: ProjectClientState,
79 collaborators: HashMap<PeerId, Collaborator>,
80 subscriptions: Vec<client::Subscription>,
81 opened_buffer: (Rc<RefCell<watch::Sender<()>>>, watch::Receiver<()>),
82 shared_buffers: HashMap<PeerId, HashSet<u64>>,
83 loading_buffers: HashMap<
84 ProjectPath,
85 postage::watch::Receiver<Option<Result<ModelHandle<Buffer>, Arc<anyhow::Error>>>>,
86 >,
87 loading_local_worktrees:
88 HashMap<Arc<Path>, Shared<Task<Result<ModelHandle<Worktree>, Arc<anyhow::Error>>>>>,
89 opened_buffers: HashMap<u64, OpenBuffer>,
90 buffer_snapshots: HashMap<u64, Vec<(i32, TextBufferSnapshot)>>,
91 nonce: u128,
92}
93
94#[derive(Error, Debug)]
95pub enum JoinProjectError {
96 #[error("host declined join request")]
97 HostDeclined,
98 #[error("host closed the project")]
99 HostClosedProject,
100 #[error("host went offline")]
101 HostWentOffline,
102 #[error("{0}")]
103 Other(#[from] anyhow::Error),
104}
105
106enum OpenBuffer {
107 Strong(ModelHandle<Buffer>),
108 Weak(WeakModelHandle<Buffer>),
109 Loading(Vec<Operation>),
110}
111
112enum WorktreeHandle {
113 Strong(ModelHandle<Worktree>),
114 Weak(WeakModelHandle<Worktree>),
115}
116
117enum ProjectClientState {
118 Local {
119 is_shared: bool,
120 remote_id_tx: watch::Sender<Option<u64>>,
121 remote_id_rx: watch::Receiver<Option<u64>>,
122 _maintain_remote_id_task: Task<Option<()>>,
123 },
124 Remote {
125 sharing_has_stopped: bool,
126 remote_id: u64,
127 replica_id: ReplicaId,
128 _detect_unshare_task: Task<Option<()>>,
129 },
130}
131
132#[derive(Clone, Debug)]
133pub struct Collaborator {
134 pub user: Arc<User>,
135 pub peer_id: PeerId,
136 pub replica_id: ReplicaId,
137}
138
139#[derive(Clone, Debug, PartialEq, Eq)]
140pub enum Event {
141 ActiveEntryChanged(Option<ProjectEntryId>),
142 WorktreeAdded,
143 WorktreeRemoved(WorktreeId),
144 DiskBasedDiagnosticsStarted,
145 DiskBasedDiagnosticsUpdated,
146 DiskBasedDiagnosticsFinished,
147 DiagnosticsUpdated(ProjectPath),
148 RemoteIdChanged(Option<u64>),
149 CollaboratorLeft(PeerId),
150 ContactRequestedJoin(Arc<User>),
151 ContactCancelledJoinRequest(Arc<User>),
152}
153
154#[derive(Serialize)]
155pub struct LanguageServerStatus {
156 pub name: String,
157 pub pending_work: BTreeMap<String, LanguageServerProgress>,
158 pub pending_diagnostic_updates: isize,
159}
160
161#[derive(Clone, Debug, Serialize)]
162pub struct LanguageServerProgress {
163 pub message: Option<String>,
164 pub percentage: Option<usize>,
165 #[serde(skip_serializing)]
166 pub last_update_at: Instant,
167}
168
169#[derive(Clone, Debug, Eq, PartialEq, Hash, PartialOrd, Ord)]
170pub struct ProjectPath {
171 pub worktree_id: WorktreeId,
172 pub path: Arc<Path>,
173}
174
175#[derive(Copy, Clone, Debug, Default, PartialEq, Serialize)]
176pub struct DiagnosticSummary {
177 pub error_count: usize,
178 pub warning_count: usize,
179}
180
181#[derive(Debug)]
182pub struct Location {
183 pub buffer: ModelHandle<Buffer>,
184 pub range: Range<language::Anchor>,
185}
186
187#[derive(Debug)]
188pub struct DocumentHighlight {
189 pub range: Range<language::Anchor>,
190 pub kind: DocumentHighlightKind,
191}
192
193#[derive(Clone, Debug)]
194pub struct Symbol {
195 pub source_worktree_id: WorktreeId,
196 pub worktree_id: WorktreeId,
197 pub language_server_name: LanguageServerName,
198 pub path: PathBuf,
199 pub label: CodeLabel,
200 pub name: String,
201 pub kind: lsp::SymbolKind,
202 pub range: Range<PointUtf16>,
203 pub signature: [u8; 32],
204}
205
206#[derive(Default)]
207pub struct ProjectTransaction(pub HashMap<ModelHandle<Buffer>, language::Transaction>);
208
209impl DiagnosticSummary {
210 fn new<'a, T: 'a>(diagnostics: impl IntoIterator<Item = &'a DiagnosticEntry<T>>) -> Self {
211 let mut this = Self {
212 error_count: 0,
213 warning_count: 0,
214 };
215
216 for entry in diagnostics {
217 if entry.diagnostic.is_primary {
218 match entry.diagnostic.severity {
219 DiagnosticSeverity::ERROR => this.error_count += 1,
220 DiagnosticSeverity::WARNING => this.warning_count += 1,
221 _ => {}
222 }
223 }
224 }
225
226 this
227 }
228
229 pub fn is_empty(&self) -> bool {
230 self.error_count == 0 && self.warning_count == 0
231 }
232
233 pub fn to_proto(&self, path: &Path) -> proto::DiagnosticSummary {
234 proto::DiagnosticSummary {
235 path: path.to_string_lossy().to_string(),
236 error_count: self.error_count as u32,
237 warning_count: self.warning_count as u32,
238 }
239 }
240}
241
242#[derive(Clone, Copy, Debug, Default, Hash, PartialEq, Eq, PartialOrd, Ord)]
243pub struct ProjectEntryId(usize);
244
245impl ProjectEntryId {
246 pub const MAX: Self = Self(usize::MAX);
247
248 pub fn new(counter: &AtomicUsize) -> Self {
249 Self(counter.fetch_add(1, SeqCst))
250 }
251
252 pub fn from_proto(id: u64) -> Self {
253 Self(id as usize)
254 }
255
256 pub fn to_proto(&self) -> u64 {
257 self.0 as u64
258 }
259
260 pub fn to_usize(&self) -> usize {
261 self.0
262 }
263}
264
265impl Project {
266 pub fn init(client: &Arc<Client>) {
267 client.add_model_message_handler(Self::handle_request_join_project);
268 client.add_model_message_handler(Self::handle_add_collaborator);
269 client.add_model_message_handler(Self::handle_buffer_reloaded);
270 client.add_model_message_handler(Self::handle_buffer_saved);
271 client.add_model_message_handler(Self::handle_start_language_server);
272 client.add_model_message_handler(Self::handle_update_language_server);
273 client.add_model_message_handler(Self::handle_remove_collaborator);
274 client.add_model_message_handler(Self::handle_join_project_request_cancelled);
275 client.add_model_message_handler(Self::handle_register_worktree);
276 client.add_model_message_handler(Self::handle_unregister_worktree);
277 client.add_model_message_handler(Self::handle_unregister_project);
278 client.add_model_message_handler(Self::handle_project_unshared);
279 client.add_model_message_handler(Self::handle_update_buffer_file);
280 client.add_model_message_handler(Self::handle_update_buffer);
281 client.add_model_message_handler(Self::handle_update_diagnostic_summary);
282 client.add_model_message_handler(Self::handle_update_worktree);
283 client.add_model_request_handler(Self::handle_create_project_entry);
284 client.add_model_request_handler(Self::handle_rename_project_entry);
285 client.add_model_request_handler(Self::handle_copy_project_entry);
286 client.add_model_request_handler(Self::handle_delete_project_entry);
287 client.add_model_request_handler(Self::handle_apply_additional_edits_for_completion);
288 client.add_model_request_handler(Self::handle_apply_code_action);
289 client.add_model_request_handler(Self::handle_reload_buffers);
290 client.add_model_request_handler(Self::handle_format_buffers);
291 client.add_model_request_handler(Self::handle_get_code_actions);
292 client.add_model_request_handler(Self::handle_get_completions);
293 client.add_model_request_handler(Self::handle_lsp_command::<GetDefinition>);
294 client.add_model_request_handler(Self::handle_lsp_command::<GetDocumentHighlights>);
295 client.add_model_request_handler(Self::handle_lsp_command::<GetReferences>);
296 client.add_model_request_handler(Self::handle_lsp_command::<PrepareRename>);
297 client.add_model_request_handler(Self::handle_lsp_command::<PerformRename>);
298 client.add_model_request_handler(Self::handle_search_project);
299 client.add_model_request_handler(Self::handle_get_project_symbols);
300 client.add_model_request_handler(Self::handle_open_buffer_for_symbol);
301 client.add_model_request_handler(Self::handle_open_buffer_by_id);
302 client.add_model_request_handler(Self::handle_open_buffer_by_path);
303 client.add_model_request_handler(Self::handle_save_buffer);
304 }
305
306 pub fn local(
307 client: Arc<Client>,
308 user_store: ModelHandle<UserStore>,
309 languages: Arc<LanguageRegistry>,
310 fs: Arc<dyn Fs>,
311 cx: &mut MutableAppContext,
312 ) -> ModelHandle<Self> {
313 cx.add_model(|cx: &mut ModelContext<Self>| {
314 let (remote_id_tx, remote_id_rx) = watch::channel();
315 let _maintain_remote_id_task = cx.spawn_weak({
316 let rpc = client.clone();
317 move |this, mut cx| {
318 async move {
319 let mut status = rpc.status();
320 while let Some(status) = status.next().await {
321 if let Some(this) = this.upgrade(&cx) {
322 if status.is_connected() {
323 this.update(&mut cx, |this, cx| this.register(cx)).await?;
324 } else {
325 this.update(&mut cx, |this, cx| this.unregister(cx));
326 }
327 }
328 }
329 Ok(())
330 }
331 .log_err()
332 }
333 });
334
335 let (opened_buffer_tx, opened_buffer_rx) = watch::channel();
336 Self {
337 worktrees: Default::default(),
338 collaborators: Default::default(),
339 opened_buffers: Default::default(),
340 shared_buffers: Default::default(),
341 loading_buffers: Default::default(),
342 loading_local_worktrees: Default::default(),
343 buffer_snapshots: Default::default(),
344 client_state: ProjectClientState::Local {
345 is_shared: false,
346 remote_id_tx,
347 remote_id_rx,
348 _maintain_remote_id_task,
349 },
350 opened_buffer: (Rc::new(RefCell::new(opened_buffer_tx)), opened_buffer_rx),
351 subscriptions: Vec::new(),
352 active_entry: None,
353 languages,
354 client,
355 user_store,
356 fs,
357 next_entry_id: Default::default(),
358 language_servers: Default::default(),
359 started_language_servers: Default::default(),
360 language_server_statuses: Default::default(),
361 last_workspace_edits_by_language_server: Default::default(),
362 language_server_settings: Default::default(),
363 next_language_server_id: 0,
364 nonce: StdRng::from_entropy().gen(),
365 }
366 })
367 }
368
369 pub async fn remote(
370 remote_id: u64,
371 client: Arc<Client>,
372 user_store: ModelHandle<UserStore>,
373 languages: Arc<LanguageRegistry>,
374 fs: Arc<dyn Fs>,
375 cx: &mut AsyncAppContext,
376 ) -> Result<ModelHandle<Self>, JoinProjectError> {
377 client.authenticate_and_connect(true, &cx).await?;
378
379 let response = client
380 .request(proto::JoinProject {
381 project_id: remote_id,
382 })
383 .await?;
384
385 let response = match response.variant.ok_or_else(|| anyhow!("missing variant"))? {
386 proto::join_project_response::Variant::Accept(response) => response,
387 proto::join_project_response::Variant::Decline(decline) => {
388 match proto::join_project_response::decline::Reason::from_i32(decline.reason) {
389 Some(proto::join_project_response::decline::Reason::Declined) => {
390 Err(JoinProjectError::HostDeclined)?
391 }
392 Some(proto::join_project_response::decline::Reason::Closed) => {
393 Err(JoinProjectError::HostClosedProject)?
394 }
395 Some(proto::join_project_response::decline::Reason::WentOffline) => {
396 Err(JoinProjectError::HostWentOffline)?
397 }
398 None => Err(anyhow!("missing decline reason"))?,
399 }
400 }
401 };
402
403 let replica_id = response.replica_id as ReplicaId;
404
405 let mut worktrees = Vec::new();
406 for worktree in response.worktrees {
407 let (worktree, load_task) = cx
408 .update(|cx| Worktree::remote(remote_id, replica_id, worktree, client.clone(), cx));
409 worktrees.push(worktree);
410 load_task.detach();
411 }
412
413 let (opened_buffer_tx, opened_buffer_rx) = watch::channel();
414 let this = cx.add_model(|cx: &mut ModelContext<Self>| {
415 let mut this = Self {
416 worktrees: Vec::new(),
417 loading_buffers: Default::default(),
418 opened_buffer: (Rc::new(RefCell::new(opened_buffer_tx)), opened_buffer_rx),
419 shared_buffers: Default::default(),
420 loading_local_worktrees: Default::default(),
421 active_entry: None,
422 collaborators: Default::default(),
423 languages,
424 user_store: user_store.clone(),
425 fs,
426 next_entry_id: Default::default(),
427 subscriptions: vec![client.add_model_for_remote_entity(remote_id, cx)],
428 client: client.clone(),
429 client_state: ProjectClientState::Remote {
430 sharing_has_stopped: false,
431 remote_id,
432 replica_id,
433 _detect_unshare_task: cx.spawn_weak(move |this, mut cx| {
434 async move {
435 let mut status = client.status();
436 let is_connected =
437 status.next().await.map_or(false, |s| s.is_connected());
438 // Even if we're initially connected, any future change of the status means we momentarily disconnected.
439 if !is_connected || status.next().await.is_some() {
440 if let Some(this) = this.upgrade(&cx) {
441 this.update(&mut cx, |this, cx| this.removed_from_project(cx))
442 }
443 }
444 Ok(())
445 }
446 .log_err()
447 }),
448 },
449 language_servers: Default::default(),
450 started_language_servers: Default::default(),
451 language_server_settings: Default::default(),
452 language_server_statuses: response
453 .language_servers
454 .into_iter()
455 .map(|server| {
456 (
457 server.id as usize,
458 LanguageServerStatus {
459 name: server.name,
460 pending_work: Default::default(),
461 pending_diagnostic_updates: 0,
462 },
463 )
464 })
465 .collect(),
466 last_workspace_edits_by_language_server: Default::default(),
467 next_language_server_id: 0,
468 opened_buffers: Default::default(),
469 buffer_snapshots: Default::default(),
470 nonce: StdRng::from_entropy().gen(),
471 };
472 for worktree in worktrees {
473 this.add_worktree(&worktree, cx);
474 }
475 this
476 });
477
478 let user_ids = response
479 .collaborators
480 .iter()
481 .map(|peer| peer.user_id)
482 .collect();
483 user_store
484 .update(cx, |user_store, cx| user_store.get_users(user_ids, cx))
485 .await?;
486 let mut collaborators = HashMap::default();
487 for message in response.collaborators {
488 let collaborator = Collaborator::from_proto(message, &user_store, cx).await?;
489 collaborators.insert(collaborator.peer_id, collaborator);
490 }
491
492 this.update(cx, |this, _| {
493 this.collaborators = collaborators;
494 });
495
496 Ok(this)
497 }
498
499 #[cfg(any(test, feature = "test-support"))]
500 pub async fn test(
501 fs: Arc<dyn Fs>,
502 root_paths: impl IntoIterator<Item = &Path>,
503 cx: &mut gpui::TestAppContext,
504 ) -> ModelHandle<Project> {
505 let languages = Arc::new(LanguageRegistry::test());
506 let http_client = client::test::FakeHttpClient::with_404_response();
507 let client = client::Client::new(http_client.clone());
508 let user_store = cx.add_model(|cx| UserStore::new(client.clone(), http_client, cx));
509 let project = cx.update(|cx| Project::local(client, user_store, languages, fs, cx));
510 for path in root_paths {
511 let (tree, _) = project
512 .update(cx, |project, cx| {
513 project.find_or_create_local_worktree(path, true, cx)
514 })
515 .await
516 .unwrap();
517 tree.read_with(cx, |tree, _| tree.as_local().unwrap().scan_complete())
518 .await;
519 }
520 project
521 }
522
523 pub fn buffer_for_id(&self, remote_id: u64, cx: &AppContext) -> Option<ModelHandle<Buffer>> {
524 self.opened_buffers
525 .get(&remote_id)
526 .and_then(|buffer| buffer.upgrade(cx))
527 }
528
529 pub fn languages(&self) -> &Arc<LanguageRegistry> {
530 &self.languages
531 }
532
533 pub fn client(&self) -> Arc<Client> {
534 self.client.clone()
535 }
536
537 pub fn user_store(&self) -> ModelHandle<UserStore> {
538 self.user_store.clone()
539 }
540
541 #[cfg(any(test, feature = "test-support"))]
542 pub fn check_invariants(&self, cx: &AppContext) {
543 if self.is_local() {
544 let mut worktree_root_paths = HashMap::default();
545 for worktree in self.worktrees(cx) {
546 let worktree = worktree.read(cx);
547 let abs_path = worktree.as_local().unwrap().abs_path().clone();
548 let prev_worktree_id = worktree_root_paths.insert(abs_path.clone(), worktree.id());
549 assert_eq!(
550 prev_worktree_id,
551 None,
552 "abs path {:?} for worktree {:?} is not unique ({:?} was already registered with the same path)",
553 abs_path,
554 worktree.id(),
555 prev_worktree_id
556 )
557 }
558 } else {
559 let replica_id = self.replica_id();
560 for buffer in self.opened_buffers.values() {
561 if let Some(buffer) = buffer.upgrade(cx) {
562 let buffer = buffer.read(cx);
563 assert_eq!(
564 buffer.deferred_ops_len(),
565 0,
566 "replica {}, buffer {} has deferred operations",
567 replica_id,
568 buffer.remote_id()
569 );
570 }
571 }
572 }
573 }
574
575 #[cfg(any(test, feature = "test-support"))]
576 pub fn has_open_buffer(&self, path: impl Into<ProjectPath>, cx: &AppContext) -> bool {
577 let path = path.into();
578 if let Some(worktree) = self.worktree_for_id(path.worktree_id, cx) {
579 self.opened_buffers.iter().any(|(_, buffer)| {
580 if let Some(buffer) = buffer.upgrade(cx) {
581 if let Some(file) = File::from_dyn(buffer.read(cx).file()) {
582 if file.worktree == worktree && file.path() == &path.path {
583 return true;
584 }
585 }
586 }
587 false
588 })
589 } else {
590 false
591 }
592 }
593
594 pub fn fs(&self) -> &Arc<dyn Fs> {
595 &self.fs
596 }
597
598 fn unregister(&mut self, cx: &mut ModelContext<Self>) {
599 self.unshared(cx);
600 for worktree in &self.worktrees {
601 if let Some(worktree) = worktree.upgrade(cx) {
602 worktree.update(cx, |worktree, _| {
603 worktree.as_local_mut().unwrap().unregister();
604 });
605 }
606 }
607
608 if let ProjectClientState::Local { remote_id_tx, .. } = &mut self.client_state {
609 *remote_id_tx.borrow_mut() = None;
610 }
611
612 self.subscriptions.clear();
613 }
614
615 fn register(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
616 self.unregister(cx);
617
618 let response = self.client.request(proto::RegisterProject {});
619 cx.spawn(|this, mut cx| async move {
620 let remote_id = response.await?.project_id;
621
622 let mut registrations = Vec::new();
623 this.update(&mut cx, |this, cx| {
624 if let ProjectClientState::Local { remote_id_tx, .. } = &mut this.client_state {
625 *remote_id_tx.borrow_mut() = Some(remote_id);
626 }
627
628 cx.emit(Event::RemoteIdChanged(Some(remote_id)));
629
630 this.subscriptions
631 .push(this.client.add_model_for_remote_entity(remote_id, cx));
632
633 for worktree in &this.worktrees {
634 if let Some(worktree) = worktree.upgrade(cx) {
635 registrations.push(worktree.update(cx, |worktree, cx| {
636 let worktree = worktree.as_local_mut().unwrap();
637 worktree.register(remote_id, cx)
638 }));
639 }
640 }
641 });
642
643 futures::future::try_join_all(registrations).await?;
644 Ok(())
645 })
646 }
647
648 pub fn remote_id(&self) -> Option<u64> {
649 match &self.client_state {
650 ProjectClientState::Local { remote_id_rx, .. } => *remote_id_rx.borrow(),
651 ProjectClientState::Remote { remote_id, .. } => Some(*remote_id),
652 }
653 }
654
655 pub fn next_remote_id(&self) -> impl Future<Output = u64> {
656 let mut id = None;
657 let mut watch = None;
658 match &self.client_state {
659 ProjectClientState::Local { remote_id_rx, .. } => watch = Some(remote_id_rx.clone()),
660 ProjectClientState::Remote { remote_id, .. } => id = Some(*remote_id),
661 }
662
663 async move {
664 if let Some(id) = id {
665 return id;
666 }
667 let mut watch = watch.unwrap();
668 loop {
669 let id = *watch.borrow();
670 if let Some(id) = id {
671 return id;
672 }
673 watch.next().await;
674 }
675 }
676 }
677
678 pub fn shared_remote_id(&self) -> Option<u64> {
679 match &self.client_state {
680 ProjectClientState::Local {
681 remote_id_rx,
682 is_shared,
683 ..
684 } => {
685 if *is_shared {
686 *remote_id_rx.borrow()
687 } else {
688 None
689 }
690 }
691 ProjectClientState::Remote { remote_id, .. } => Some(*remote_id),
692 }
693 }
694
695 pub fn replica_id(&self) -> ReplicaId {
696 match &self.client_state {
697 ProjectClientState::Local { .. } => 0,
698 ProjectClientState::Remote { replica_id, .. } => *replica_id,
699 }
700 }
701
702 pub fn collaborators(&self) -> &HashMap<PeerId, Collaborator> {
703 &self.collaborators
704 }
705
706 pub fn worktrees<'a>(
707 &'a self,
708 cx: &'a AppContext,
709 ) -> impl 'a + Iterator<Item = ModelHandle<Worktree>> {
710 self.worktrees
711 .iter()
712 .filter_map(move |worktree| worktree.upgrade(cx))
713 }
714
715 pub fn visible_worktrees<'a>(
716 &'a self,
717 cx: &'a AppContext,
718 ) -> impl 'a + Iterator<Item = ModelHandle<Worktree>> {
719 self.worktrees.iter().filter_map(|worktree| {
720 worktree.upgrade(cx).and_then(|worktree| {
721 if worktree.read(cx).is_visible() {
722 Some(worktree)
723 } else {
724 None
725 }
726 })
727 })
728 }
729
730 pub fn worktree_for_id(
731 &self,
732 id: WorktreeId,
733 cx: &AppContext,
734 ) -> Option<ModelHandle<Worktree>> {
735 self.worktrees(cx)
736 .find(|worktree| worktree.read(cx).id() == id)
737 }
738
739 pub fn worktree_for_entry(
740 &self,
741 entry_id: ProjectEntryId,
742 cx: &AppContext,
743 ) -> Option<ModelHandle<Worktree>> {
744 self.worktrees(cx)
745 .find(|worktree| worktree.read(cx).contains_entry(entry_id))
746 }
747
748 pub fn worktree_id_for_entry(
749 &self,
750 entry_id: ProjectEntryId,
751 cx: &AppContext,
752 ) -> Option<WorktreeId> {
753 self.worktree_for_entry(entry_id, cx)
754 .map(|worktree| worktree.read(cx).id())
755 }
756
757 pub fn create_entry(
758 &mut self,
759 project_path: impl Into<ProjectPath>,
760 is_directory: bool,
761 cx: &mut ModelContext<Self>,
762 ) -> Option<Task<Result<Entry>>> {
763 let project_path = project_path.into();
764 let worktree = self.worktree_for_id(project_path.worktree_id, cx)?;
765 if self.is_local() {
766 Some(worktree.update(cx, |worktree, cx| {
767 worktree
768 .as_local_mut()
769 .unwrap()
770 .create_entry(project_path.path, is_directory, cx)
771 }))
772 } else {
773 let client = self.client.clone();
774 let project_id = self.remote_id().unwrap();
775 Some(cx.spawn_weak(|_, mut cx| async move {
776 let response = client
777 .request(proto::CreateProjectEntry {
778 worktree_id: project_path.worktree_id.to_proto(),
779 project_id,
780 path: project_path.path.as_os_str().as_bytes().to_vec(),
781 is_directory,
782 })
783 .await?;
784 let entry = response
785 .entry
786 .ok_or_else(|| anyhow!("missing entry in response"))?;
787 worktree
788 .update(&mut cx, |worktree, cx| {
789 worktree.as_remote().unwrap().insert_entry(
790 entry,
791 response.worktree_scan_id as usize,
792 cx,
793 )
794 })
795 .await
796 }))
797 }
798 }
799
800 pub fn copy_entry(
801 &mut self,
802 entry_id: ProjectEntryId,
803 new_path: impl Into<Arc<Path>>,
804 cx: &mut ModelContext<Self>,
805 ) -> Option<Task<Result<Entry>>> {
806 let worktree = self.worktree_for_entry(entry_id, cx)?;
807 let new_path = new_path.into();
808 if self.is_local() {
809 worktree.update(cx, |worktree, cx| {
810 worktree
811 .as_local_mut()
812 .unwrap()
813 .copy_entry(entry_id, new_path, cx)
814 })
815 } else {
816 let client = self.client.clone();
817 let project_id = self.remote_id().unwrap();
818
819 Some(cx.spawn_weak(|_, mut cx| async move {
820 let response = client
821 .request(proto::CopyProjectEntry {
822 project_id,
823 entry_id: entry_id.to_proto(),
824 new_path: new_path.as_os_str().as_bytes().to_vec(),
825 })
826 .await?;
827 let entry = response
828 .entry
829 .ok_or_else(|| anyhow!("missing entry in response"))?;
830 worktree
831 .update(&mut cx, |worktree, cx| {
832 worktree.as_remote().unwrap().insert_entry(
833 entry,
834 response.worktree_scan_id as usize,
835 cx,
836 )
837 })
838 .await
839 }))
840 }
841 }
842
843 pub fn rename_entry(
844 &mut self,
845 entry_id: ProjectEntryId,
846 new_path: impl Into<Arc<Path>>,
847 cx: &mut ModelContext<Self>,
848 ) -> Option<Task<Result<Entry>>> {
849 let worktree = self.worktree_for_entry(entry_id, cx)?;
850 let new_path = new_path.into();
851 if self.is_local() {
852 worktree.update(cx, |worktree, cx| {
853 worktree
854 .as_local_mut()
855 .unwrap()
856 .rename_entry(entry_id, new_path, cx)
857 })
858 } else {
859 let client = self.client.clone();
860 let project_id = self.remote_id().unwrap();
861
862 Some(cx.spawn_weak(|_, mut cx| async move {
863 let response = client
864 .request(proto::RenameProjectEntry {
865 project_id,
866 entry_id: entry_id.to_proto(),
867 new_path: new_path.as_os_str().as_bytes().to_vec(),
868 })
869 .await?;
870 let entry = response
871 .entry
872 .ok_or_else(|| anyhow!("missing entry in response"))?;
873 worktree
874 .update(&mut cx, |worktree, cx| {
875 worktree.as_remote().unwrap().insert_entry(
876 entry,
877 response.worktree_scan_id as usize,
878 cx,
879 )
880 })
881 .await
882 }))
883 }
884 }
885
886 pub fn delete_entry(
887 &mut self,
888 entry_id: ProjectEntryId,
889 cx: &mut ModelContext<Self>,
890 ) -> Option<Task<Result<()>>> {
891 let worktree = self.worktree_for_entry(entry_id, cx)?;
892 if self.is_local() {
893 worktree.update(cx, |worktree, cx| {
894 worktree.as_local_mut().unwrap().delete_entry(entry_id, cx)
895 })
896 } else {
897 let client = self.client.clone();
898 let project_id = self.remote_id().unwrap();
899 Some(cx.spawn_weak(|_, mut cx| async move {
900 let response = client
901 .request(proto::DeleteProjectEntry {
902 project_id,
903 entry_id: entry_id.to_proto(),
904 })
905 .await?;
906 worktree
907 .update(&mut cx, move |worktree, cx| {
908 worktree.as_remote().unwrap().delete_entry(
909 entry_id,
910 response.worktree_scan_id as usize,
911 cx,
912 )
913 })
914 .await
915 }))
916 }
917 }
918
919 fn share(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
920 let project_id;
921 if let ProjectClientState::Local {
922 remote_id_rx,
923 is_shared,
924 ..
925 } = &mut self.client_state
926 {
927 if *is_shared {
928 return Task::ready(Ok(()));
929 }
930 *is_shared = true;
931 if let Some(id) = *remote_id_rx.borrow() {
932 project_id = id;
933 } else {
934 return Task::ready(Err(anyhow!("project hasn't been registered")));
935 }
936 } else {
937 return Task::ready(Err(anyhow!("can't share a remote project")));
938 };
939
940 for open_buffer in self.opened_buffers.values_mut() {
941 match open_buffer {
942 OpenBuffer::Strong(_) => {}
943 OpenBuffer::Weak(buffer) => {
944 if let Some(buffer) = buffer.upgrade(cx) {
945 *open_buffer = OpenBuffer::Strong(buffer);
946 }
947 }
948 OpenBuffer::Loading(_) => unreachable!(),
949 }
950 }
951
952 for worktree_handle in self.worktrees.iter_mut() {
953 match worktree_handle {
954 WorktreeHandle::Strong(_) => {}
955 WorktreeHandle::Weak(worktree) => {
956 if let Some(worktree) = worktree.upgrade(cx) {
957 *worktree_handle = WorktreeHandle::Strong(worktree);
958 }
959 }
960 }
961 }
962
963 let mut tasks = Vec::new();
964 for worktree in self.worktrees(cx).collect::<Vec<_>>() {
965 worktree.update(cx, |worktree, cx| {
966 let worktree = worktree.as_local_mut().unwrap();
967 tasks.push(worktree.share(project_id, cx));
968 });
969 }
970
971 cx.spawn(|this, mut cx| async move {
972 for task in tasks {
973 task.await?;
974 }
975 this.update(&mut cx, |_, cx| cx.notify());
976 Ok(())
977 })
978 }
979
980 fn unshared(&mut self, cx: &mut ModelContext<Self>) {
981 if let ProjectClientState::Local { is_shared, .. } = &mut self.client_state {
982 if !*is_shared {
983 return;
984 }
985
986 *is_shared = false;
987 self.collaborators.clear();
988 self.shared_buffers.clear();
989 for worktree_handle in self.worktrees.iter_mut() {
990 if let WorktreeHandle::Strong(worktree) = worktree_handle {
991 let is_visible = worktree.update(cx, |worktree, _| {
992 worktree.as_local_mut().unwrap().unshare();
993 worktree.is_visible()
994 });
995 if !is_visible {
996 *worktree_handle = WorktreeHandle::Weak(worktree.downgrade());
997 }
998 }
999 }
1000
1001 for open_buffer in self.opened_buffers.values_mut() {
1002 match open_buffer {
1003 OpenBuffer::Strong(buffer) => {
1004 *open_buffer = OpenBuffer::Weak(buffer.downgrade());
1005 }
1006 _ => {}
1007 }
1008 }
1009
1010 cx.notify();
1011 } else {
1012 log::error!("attempted to unshare a remote project");
1013 }
1014 }
1015
1016 pub fn respond_to_join_request(
1017 &mut self,
1018 requester_id: u64,
1019 allow: bool,
1020 cx: &mut ModelContext<Self>,
1021 ) {
1022 if let Some(project_id) = self.remote_id() {
1023 let share = self.share(cx);
1024 let client = self.client.clone();
1025 cx.foreground()
1026 .spawn(async move {
1027 share.await?;
1028 client.send(proto::RespondToJoinProjectRequest {
1029 requester_id,
1030 project_id,
1031 allow,
1032 })
1033 })
1034 .detach_and_log_err(cx);
1035 }
1036 }
1037
1038 fn removed_from_project(&mut self, cx: &mut ModelContext<Self>) {
1039 if let ProjectClientState::Remote {
1040 sharing_has_stopped,
1041 ..
1042 } = &mut self.client_state
1043 {
1044 *sharing_has_stopped = true;
1045 self.collaborators.clear();
1046 cx.notify();
1047 }
1048 }
1049
1050 pub fn is_read_only(&self) -> bool {
1051 match &self.client_state {
1052 ProjectClientState::Local { .. } => false,
1053 ProjectClientState::Remote {
1054 sharing_has_stopped,
1055 ..
1056 } => *sharing_has_stopped,
1057 }
1058 }
1059
1060 pub fn is_local(&self) -> bool {
1061 match &self.client_state {
1062 ProjectClientState::Local { .. } => true,
1063 ProjectClientState::Remote { .. } => false,
1064 }
1065 }
1066
1067 pub fn is_remote(&self) -> bool {
1068 !self.is_local()
1069 }
1070
1071 pub fn create_buffer(
1072 &mut self,
1073 text: &str,
1074 language: Option<Arc<Language>>,
1075 cx: &mut ModelContext<Self>,
1076 ) -> Result<ModelHandle<Buffer>> {
1077 if self.is_remote() {
1078 return Err(anyhow!("creating buffers as a guest is not supported yet"));
1079 }
1080
1081 let buffer = cx.add_model(|cx| {
1082 Buffer::new(self.replica_id(), text, cx)
1083 .with_language(language.unwrap_or(language::PLAIN_TEXT.clone()), cx)
1084 });
1085 self.register_buffer(&buffer, cx)?;
1086 Ok(buffer)
1087 }
1088
1089 pub fn open_path(
1090 &mut self,
1091 path: impl Into<ProjectPath>,
1092 cx: &mut ModelContext<Self>,
1093 ) -> Task<Result<(ProjectEntryId, AnyModelHandle)>> {
1094 let task = self.open_buffer(path, cx);
1095 cx.spawn_weak(|_, cx| async move {
1096 let buffer = task.await?;
1097 let project_entry_id = buffer
1098 .read_with(&cx, |buffer, cx| {
1099 File::from_dyn(buffer.file()).and_then(|file| file.project_entry_id(cx))
1100 })
1101 .ok_or_else(|| anyhow!("no project entry"))?;
1102 Ok((project_entry_id, buffer.into()))
1103 })
1104 }
1105
1106 pub fn open_local_buffer(
1107 &mut self,
1108 abs_path: impl AsRef<Path>,
1109 cx: &mut ModelContext<Self>,
1110 ) -> Task<Result<ModelHandle<Buffer>>> {
1111 if let Some((worktree, relative_path)) = self.find_local_worktree(abs_path.as_ref(), cx) {
1112 self.open_buffer((worktree.read(cx).id(), relative_path), cx)
1113 } else {
1114 Task::ready(Err(anyhow!("no such path")))
1115 }
1116 }
1117
1118 pub fn open_buffer(
1119 &mut self,
1120 path: impl Into<ProjectPath>,
1121 cx: &mut ModelContext<Self>,
1122 ) -> Task<Result<ModelHandle<Buffer>>> {
1123 let project_path = path.into();
1124 let worktree = if let Some(worktree) = self.worktree_for_id(project_path.worktree_id, cx) {
1125 worktree
1126 } else {
1127 return Task::ready(Err(anyhow!("no such worktree")));
1128 };
1129
1130 // If there is already a buffer for the given path, then return it.
1131 let existing_buffer = self.get_open_buffer(&project_path, cx);
1132 if let Some(existing_buffer) = existing_buffer {
1133 return Task::ready(Ok(existing_buffer));
1134 }
1135
1136 let mut loading_watch = match self.loading_buffers.entry(project_path.clone()) {
1137 // If the given path is already being loaded, then wait for that existing
1138 // task to complete and return the same buffer.
1139 hash_map::Entry::Occupied(e) => e.get().clone(),
1140
1141 // Otherwise, record the fact that this path is now being loaded.
1142 hash_map::Entry::Vacant(entry) => {
1143 let (mut tx, rx) = postage::watch::channel();
1144 entry.insert(rx.clone());
1145
1146 let load_buffer = if worktree.read(cx).is_local() {
1147 self.open_local_buffer_internal(&project_path.path, &worktree, cx)
1148 } else {
1149 self.open_remote_buffer_internal(&project_path.path, &worktree, cx)
1150 };
1151
1152 cx.spawn(move |this, mut cx| async move {
1153 let load_result = load_buffer.await;
1154 *tx.borrow_mut() = Some(this.update(&mut cx, |this, _| {
1155 // Record the fact that the buffer is no longer loading.
1156 this.loading_buffers.remove(&project_path);
1157 let buffer = load_result.map_err(Arc::new)?;
1158 Ok(buffer)
1159 }));
1160 })
1161 .detach();
1162 rx
1163 }
1164 };
1165
1166 cx.foreground().spawn(async move {
1167 loop {
1168 if let Some(result) = loading_watch.borrow().as_ref() {
1169 match result {
1170 Ok(buffer) => return Ok(buffer.clone()),
1171 Err(error) => return Err(anyhow!("{}", error)),
1172 }
1173 }
1174 loading_watch.next().await;
1175 }
1176 })
1177 }
1178
1179 fn open_local_buffer_internal(
1180 &mut self,
1181 path: &Arc<Path>,
1182 worktree: &ModelHandle<Worktree>,
1183 cx: &mut ModelContext<Self>,
1184 ) -> Task<Result<ModelHandle<Buffer>>> {
1185 let load_buffer = worktree.update(cx, |worktree, cx| {
1186 let worktree = worktree.as_local_mut().unwrap();
1187 worktree.load_buffer(path, cx)
1188 });
1189 cx.spawn(|this, mut cx| async move {
1190 let buffer = load_buffer.await?;
1191 this.update(&mut cx, |this, cx| this.register_buffer(&buffer, cx))?;
1192 Ok(buffer)
1193 })
1194 }
1195
1196 fn open_remote_buffer_internal(
1197 &mut self,
1198 path: &Arc<Path>,
1199 worktree: &ModelHandle<Worktree>,
1200 cx: &mut ModelContext<Self>,
1201 ) -> Task<Result<ModelHandle<Buffer>>> {
1202 let rpc = self.client.clone();
1203 let project_id = self.remote_id().unwrap();
1204 let remote_worktree_id = worktree.read(cx).id();
1205 let path = path.clone();
1206 let path_string = path.to_string_lossy().to_string();
1207 cx.spawn(|this, mut cx| async move {
1208 let response = rpc
1209 .request(proto::OpenBufferByPath {
1210 project_id,
1211 worktree_id: remote_worktree_id.to_proto(),
1212 path: path_string,
1213 })
1214 .await?;
1215 let buffer = response.buffer.ok_or_else(|| anyhow!("missing buffer"))?;
1216 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
1217 .await
1218 })
1219 }
1220
1221 fn open_local_buffer_via_lsp(
1222 &mut self,
1223 abs_path: lsp::Url,
1224 lsp_adapter: Arc<dyn LspAdapter>,
1225 lsp_server: Arc<LanguageServer>,
1226 cx: &mut ModelContext<Self>,
1227 ) -> Task<Result<ModelHandle<Buffer>>> {
1228 cx.spawn(|this, mut cx| async move {
1229 let abs_path = abs_path
1230 .to_file_path()
1231 .map_err(|_| anyhow!("can't convert URI to path"))?;
1232 let (worktree, relative_path) = if let Some(result) =
1233 this.read_with(&cx, |this, cx| this.find_local_worktree(&abs_path, cx))
1234 {
1235 result
1236 } else {
1237 let worktree = this
1238 .update(&mut cx, |this, cx| {
1239 this.create_local_worktree(&abs_path, false, cx)
1240 })
1241 .await?;
1242 this.update(&mut cx, |this, cx| {
1243 this.language_servers.insert(
1244 (worktree.read(cx).id(), lsp_adapter.name()),
1245 (lsp_adapter, lsp_server),
1246 );
1247 });
1248 (worktree, PathBuf::new())
1249 };
1250
1251 let project_path = ProjectPath {
1252 worktree_id: worktree.read_with(&cx, |worktree, _| worktree.id()),
1253 path: relative_path.into(),
1254 };
1255 this.update(&mut cx, |this, cx| this.open_buffer(project_path, cx))
1256 .await
1257 })
1258 }
1259
1260 pub fn open_buffer_by_id(
1261 &mut self,
1262 id: u64,
1263 cx: &mut ModelContext<Self>,
1264 ) -> Task<Result<ModelHandle<Buffer>>> {
1265 if let Some(buffer) = self.buffer_for_id(id, cx) {
1266 Task::ready(Ok(buffer))
1267 } else if self.is_local() {
1268 Task::ready(Err(anyhow!("buffer {} does not exist", id)))
1269 } else if let Some(project_id) = self.remote_id() {
1270 let request = self
1271 .client
1272 .request(proto::OpenBufferById { project_id, id });
1273 cx.spawn(|this, mut cx| async move {
1274 let buffer = request
1275 .await?
1276 .buffer
1277 .ok_or_else(|| anyhow!("invalid buffer"))?;
1278 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
1279 .await
1280 })
1281 } else {
1282 Task::ready(Err(anyhow!("cannot open buffer while disconnected")))
1283 }
1284 }
1285
1286 pub fn save_buffer_as(
1287 &mut self,
1288 buffer: ModelHandle<Buffer>,
1289 abs_path: PathBuf,
1290 cx: &mut ModelContext<Project>,
1291 ) -> Task<Result<()>> {
1292 let worktree_task = self.find_or_create_local_worktree(&abs_path, true, cx);
1293 let old_path =
1294 File::from_dyn(buffer.read(cx).file()).and_then(|f| Some(f.as_local()?.abs_path(cx)));
1295 cx.spawn(|this, mut cx| async move {
1296 if let Some(old_path) = old_path {
1297 this.update(&mut cx, |this, cx| {
1298 this.unregister_buffer_from_language_server(&buffer, old_path, cx);
1299 });
1300 }
1301 let (worktree, path) = worktree_task.await?;
1302 worktree
1303 .update(&mut cx, |worktree, cx| {
1304 worktree
1305 .as_local_mut()
1306 .unwrap()
1307 .save_buffer_as(buffer.clone(), path, cx)
1308 })
1309 .await?;
1310 this.update(&mut cx, |this, cx| {
1311 this.assign_language_to_buffer(&buffer, cx);
1312 this.register_buffer_with_language_server(&buffer, cx);
1313 });
1314 Ok(())
1315 })
1316 }
1317
1318 pub fn get_open_buffer(
1319 &mut self,
1320 path: &ProjectPath,
1321 cx: &mut ModelContext<Self>,
1322 ) -> Option<ModelHandle<Buffer>> {
1323 let worktree = self.worktree_for_id(path.worktree_id, cx)?;
1324 self.opened_buffers.values().find_map(|buffer| {
1325 let buffer = buffer.upgrade(cx)?;
1326 let file = File::from_dyn(buffer.read(cx).file())?;
1327 if file.worktree == worktree && file.path() == &path.path {
1328 Some(buffer)
1329 } else {
1330 None
1331 }
1332 })
1333 }
1334
1335 fn register_buffer(
1336 &mut self,
1337 buffer: &ModelHandle<Buffer>,
1338 cx: &mut ModelContext<Self>,
1339 ) -> Result<()> {
1340 let remote_id = buffer.read(cx).remote_id();
1341 let open_buffer = if self.is_remote() || self.is_shared() {
1342 OpenBuffer::Strong(buffer.clone())
1343 } else {
1344 OpenBuffer::Weak(buffer.downgrade())
1345 };
1346
1347 match self.opened_buffers.insert(remote_id, open_buffer) {
1348 None => {}
1349 Some(OpenBuffer::Loading(operations)) => {
1350 buffer.update(cx, |buffer, cx| buffer.apply_ops(operations, cx))?
1351 }
1352 Some(OpenBuffer::Weak(existing_handle)) => {
1353 if existing_handle.upgrade(cx).is_some() {
1354 Err(anyhow!(
1355 "already registered buffer with remote id {}",
1356 remote_id
1357 ))?
1358 }
1359 }
1360 Some(OpenBuffer::Strong(_)) => Err(anyhow!(
1361 "already registered buffer with remote id {}",
1362 remote_id
1363 ))?,
1364 }
1365 cx.subscribe(buffer, |this, buffer, event, cx| {
1366 this.on_buffer_event(buffer, event, cx);
1367 })
1368 .detach();
1369
1370 self.assign_language_to_buffer(buffer, cx);
1371 self.register_buffer_with_language_server(buffer, cx);
1372 cx.observe_release(buffer, |this, buffer, cx| {
1373 if let Some(file) = File::from_dyn(buffer.file()) {
1374 if file.is_local() {
1375 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
1376 if let Some((_, server)) = this.language_server_for_buffer(buffer, cx) {
1377 server
1378 .notify::<lsp::notification::DidCloseTextDocument>(
1379 lsp::DidCloseTextDocumentParams {
1380 text_document: lsp::TextDocumentIdentifier::new(uri.clone()),
1381 },
1382 )
1383 .log_err();
1384 }
1385 }
1386 }
1387 })
1388 .detach();
1389
1390 Ok(())
1391 }
1392
1393 fn register_buffer_with_language_server(
1394 &mut self,
1395 buffer_handle: &ModelHandle<Buffer>,
1396 cx: &mut ModelContext<Self>,
1397 ) {
1398 let buffer = buffer_handle.read(cx);
1399 let buffer_id = buffer.remote_id();
1400 if let Some(file) = File::from_dyn(buffer.file()) {
1401 if file.is_local() {
1402 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
1403 let initial_snapshot = buffer.text_snapshot();
1404
1405 let mut language_server = None;
1406 let mut language_id = None;
1407 if let Some(language) = buffer.language() {
1408 let worktree_id = file.worktree_id(cx);
1409 if let Some(adapter) = language.lsp_adapter() {
1410 language_id = adapter.id_for_language(language.name().as_ref());
1411 language_server = self
1412 .language_servers
1413 .get(&(worktree_id, adapter.name()))
1414 .cloned();
1415 }
1416 }
1417
1418 if let Some(local_worktree) = file.worktree.read(cx).as_local() {
1419 if let Some(diagnostics) = local_worktree.diagnostics_for_path(file.path()) {
1420 self.update_buffer_diagnostics(&buffer_handle, diagnostics, None, cx)
1421 .log_err();
1422 }
1423 }
1424
1425 if let Some((_, server)) = language_server {
1426 server
1427 .notify::<lsp::notification::DidOpenTextDocument>(
1428 lsp::DidOpenTextDocumentParams {
1429 text_document: lsp::TextDocumentItem::new(
1430 uri,
1431 language_id.unwrap_or_default(),
1432 0,
1433 initial_snapshot.text(),
1434 ),
1435 }
1436 .clone(),
1437 )
1438 .log_err();
1439 buffer_handle.update(cx, |buffer, cx| {
1440 buffer.set_completion_triggers(
1441 server
1442 .capabilities()
1443 .completion_provider
1444 .as_ref()
1445 .and_then(|provider| provider.trigger_characters.clone())
1446 .unwrap_or(Vec::new()),
1447 cx,
1448 )
1449 });
1450 self.buffer_snapshots
1451 .insert(buffer_id, vec![(0, initial_snapshot)]);
1452 }
1453 }
1454 }
1455 }
1456
1457 fn unregister_buffer_from_language_server(
1458 &mut self,
1459 buffer: &ModelHandle<Buffer>,
1460 old_path: PathBuf,
1461 cx: &mut ModelContext<Self>,
1462 ) {
1463 buffer.update(cx, |buffer, cx| {
1464 buffer.update_diagnostics(Default::default(), cx);
1465 self.buffer_snapshots.remove(&buffer.remote_id());
1466 if let Some((_, language_server)) = self.language_server_for_buffer(buffer, cx) {
1467 language_server
1468 .notify::<lsp::notification::DidCloseTextDocument>(
1469 lsp::DidCloseTextDocumentParams {
1470 text_document: lsp::TextDocumentIdentifier::new(
1471 lsp::Url::from_file_path(old_path).unwrap(),
1472 ),
1473 },
1474 )
1475 .log_err();
1476 }
1477 });
1478 }
1479
1480 fn on_buffer_event(
1481 &mut self,
1482 buffer: ModelHandle<Buffer>,
1483 event: &BufferEvent,
1484 cx: &mut ModelContext<Self>,
1485 ) -> Option<()> {
1486 match event {
1487 BufferEvent::Operation(operation) => {
1488 if let Some(project_id) = self.shared_remote_id() {
1489 let request = self.client.request(proto::UpdateBuffer {
1490 project_id,
1491 buffer_id: buffer.read(cx).remote_id(),
1492 operations: vec![language::proto::serialize_operation(&operation)],
1493 });
1494 cx.background().spawn(request).detach_and_log_err(cx);
1495 }
1496 }
1497 BufferEvent::Edited { .. } => {
1498 let (_, language_server) = self
1499 .language_server_for_buffer(buffer.read(cx), cx)?
1500 .clone();
1501 let buffer = buffer.read(cx);
1502 let file = File::from_dyn(buffer.file())?;
1503 let abs_path = file.as_local()?.abs_path(cx);
1504 let uri = lsp::Url::from_file_path(abs_path).unwrap();
1505 let buffer_snapshots = self.buffer_snapshots.get_mut(&buffer.remote_id())?;
1506 let (version, prev_snapshot) = buffer_snapshots.last()?;
1507 let next_snapshot = buffer.text_snapshot();
1508 let next_version = version + 1;
1509
1510 let content_changes = buffer
1511 .edits_since::<(PointUtf16, usize)>(prev_snapshot.version())
1512 .map(|edit| {
1513 let edit_start = edit.new.start.0;
1514 let edit_end = edit_start + (edit.old.end.0 - edit.old.start.0);
1515 let new_text = next_snapshot
1516 .text_for_range(edit.new.start.1..edit.new.end.1)
1517 .collect();
1518 lsp::TextDocumentContentChangeEvent {
1519 range: Some(lsp::Range::new(
1520 point_to_lsp(edit_start),
1521 point_to_lsp(edit_end),
1522 )),
1523 range_length: None,
1524 text: new_text,
1525 }
1526 })
1527 .collect();
1528
1529 buffer_snapshots.push((next_version, next_snapshot));
1530
1531 language_server
1532 .notify::<lsp::notification::DidChangeTextDocument>(
1533 lsp::DidChangeTextDocumentParams {
1534 text_document: lsp::VersionedTextDocumentIdentifier::new(
1535 uri,
1536 next_version,
1537 ),
1538 content_changes,
1539 },
1540 )
1541 .log_err();
1542 }
1543 BufferEvent::Saved => {
1544 let file = File::from_dyn(buffer.read(cx).file())?;
1545 let worktree_id = file.worktree_id(cx);
1546 let abs_path = file.as_local()?.abs_path(cx);
1547 let text_document = lsp::TextDocumentIdentifier {
1548 uri: lsp::Url::from_file_path(abs_path).unwrap(),
1549 };
1550
1551 for (_, server) in self.language_servers_for_worktree(worktree_id) {
1552 server
1553 .notify::<lsp::notification::DidSaveTextDocument>(
1554 lsp::DidSaveTextDocumentParams {
1555 text_document: text_document.clone(),
1556 text: None,
1557 },
1558 )
1559 .log_err();
1560 }
1561 }
1562 _ => {}
1563 }
1564
1565 None
1566 }
1567
1568 fn language_servers_for_worktree(
1569 &self,
1570 worktree_id: WorktreeId,
1571 ) -> impl Iterator<Item = &(Arc<dyn LspAdapter>, Arc<LanguageServer>)> {
1572 self.language_servers.iter().filter_map(
1573 move |((language_server_worktree_id, _), server)| {
1574 if *language_server_worktree_id == worktree_id {
1575 Some(server)
1576 } else {
1577 None
1578 }
1579 },
1580 )
1581 }
1582
1583 fn assign_language_to_buffer(
1584 &mut self,
1585 buffer: &ModelHandle<Buffer>,
1586 cx: &mut ModelContext<Self>,
1587 ) -> Option<()> {
1588 // If the buffer has a language, set it and start the language server if we haven't already.
1589 let full_path = buffer.read(cx).file()?.full_path(cx);
1590 let language = self.languages.select_language(&full_path)?;
1591 buffer.update(cx, |buffer, cx| {
1592 buffer.set_language(Some(language.clone()), cx);
1593 });
1594
1595 let file = File::from_dyn(buffer.read(cx).file())?;
1596 let worktree = file.worktree.read(cx).as_local()?;
1597 let worktree_id = worktree.id();
1598 let worktree_abs_path = worktree.abs_path().clone();
1599 self.start_language_server(worktree_id, worktree_abs_path, language, cx);
1600
1601 None
1602 }
1603
1604 fn start_language_server(
1605 &mut self,
1606 worktree_id: WorktreeId,
1607 worktree_path: Arc<Path>,
1608 language: Arc<Language>,
1609 cx: &mut ModelContext<Self>,
1610 ) {
1611 let adapter = if let Some(adapter) = language.lsp_adapter() {
1612 adapter
1613 } else {
1614 return;
1615 };
1616 let key = (worktree_id, adapter.name());
1617 self.started_language_servers
1618 .entry(key.clone())
1619 .or_insert_with(|| {
1620 let server_id = post_inc(&mut self.next_language_server_id);
1621 let language_server = self.languages.start_language_server(
1622 server_id,
1623 language.clone(),
1624 worktree_path,
1625 self.client.http_client(),
1626 cx,
1627 );
1628 cx.spawn_weak(|this, mut cx| async move {
1629 let language_server = language_server?.await.log_err()?;
1630 let language_server = language_server
1631 .initialize(adapter.initialization_options())
1632 .await
1633 .log_err()?;
1634 let this = this.upgrade(&cx)?;
1635 let disk_based_diagnostics_progress_token =
1636 adapter.disk_based_diagnostics_progress_token();
1637
1638 language_server
1639 .on_notification::<lsp::notification::PublishDiagnostics, _>({
1640 let this = this.downgrade();
1641 let adapter = adapter.clone();
1642 move |params, mut cx| {
1643 if let Some(this) = this.upgrade(&cx) {
1644 this.update(&mut cx, |this, cx| {
1645 this.on_lsp_diagnostics_published(
1646 server_id,
1647 params,
1648 &adapter,
1649 disk_based_diagnostics_progress_token,
1650 cx,
1651 );
1652 });
1653 }
1654 }
1655 })
1656 .detach();
1657
1658 language_server
1659 .on_request::<lsp::request::WorkspaceConfiguration, _, _>({
1660 let settings = this
1661 .read_with(&cx, |this, _| this.language_server_settings.clone());
1662 move |params, _| {
1663 let settings = settings.lock().clone();
1664 async move {
1665 Ok(params
1666 .items
1667 .into_iter()
1668 .map(|item| {
1669 if let Some(section) = &item.section {
1670 settings
1671 .get(section)
1672 .cloned()
1673 .unwrap_or(serde_json::Value::Null)
1674 } else {
1675 settings.clone()
1676 }
1677 })
1678 .collect())
1679 }
1680 }
1681 })
1682 .detach();
1683
1684 language_server
1685 .on_request::<lsp::request::ApplyWorkspaceEdit, _, _>({
1686 let this = this.downgrade();
1687 let adapter = adapter.clone();
1688 let language_server = language_server.clone();
1689 move |params, cx| {
1690 Self::on_lsp_workspace_edit(
1691 this,
1692 params,
1693 server_id,
1694 adapter.clone(),
1695 language_server.clone(),
1696 cx,
1697 )
1698 }
1699 })
1700 .detach();
1701
1702 language_server
1703 .on_notification::<lsp::notification::Progress, _>({
1704 let this = this.downgrade();
1705 move |params, mut cx| {
1706 if let Some(this) = this.upgrade(&cx) {
1707 this.update(&mut cx, |this, cx| {
1708 this.on_lsp_progress(
1709 params,
1710 server_id,
1711 disk_based_diagnostics_progress_token,
1712 cx,
1713 );
1714 });
1715 }
1716 }
1717 })
1718 .detach();
1719
1720 this.update(&mut cx, |this, cx| {
1721 this.language_servers
1722 .insert(key.clone(), (adapter.clone(), language_server.clone()));
1723 this.language_server_statuses.insert(
1724 server_id,
1725 LanguageServerStatus {
1726 name: language_server.name().to_string(),
1727 pending_work: Default::default(),
1728 pending_diagnostic_updates: 0,
1729 },
1730 );
1731 language_server
1732 .notify::<lsp::notification::DidChangeConfiguration>(
1733 lsp::DidChangeConfigurationParams {
1734 settings: this.language_server_settings.lock().clone(),
1735 },
1736 )
1737 .ok();
1738
1739 if let Some(project_id) = this.shared_remote_id() {
1740 this.client
1741 .send(proto::StartLanguageServer {
1742 project_id,
1743 server: Some(proto::LanguageServer {
1744 id: server_id as u64,
1745 name: language_server.name().to_string(),
1746 }),
1747 })
1748 .log_err();
1749 }
1750
1751 // Tell the language server about every open buffer in the worktree that matches the language.
1752 for buffer in this.opened_buffers.values() {
1753 if let Some(buffer_handle) = buffer.upgrade(cx) {
1754 let buffer = buffer_handle.read(cx);
1755 let file = if let Some(file) = File::from_dyn(buffer.file()) {
1756 file
1757 } else {
1758 continue;
1759 };
1760 let language = if let Some(language) = buffer.language() {
1761 language
1762 } else {
1763 continue;
1764 };
1765 if file.worktree.read(cx).id() != key.0
1766 || language.lsp_adapter().map(|a| a.name())
1767 != Some(key.1.clone())
1768 {
1769 continue;
1770 }
1771
1772 let file = file.as_local()?;
1773 let versions = this
1774 .buffer_snapshots
1775 .entry(buffer.remote_id())
1776 .or_insert_with(|| vec![(0, buffer.text_snapshot())]);
1777 let (version, initial_snapshot) = versions.last().unwrap();
1778 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
1779 let language_id = adapter.id_for_language(language.name().as_ref());
1780 language_server
1781 .notify::<lsp::notification::DidOpenTextDocument>(
1782 lsp::DidOpenTextDocumentParams {
1783 text_document: lsp::TextDocumentItem::new(
1784 uri,
1785 language_id.unwrap_or_default(),
1786 *version,
1787 initial_snapshot.text(),
1788 ),
1789 },
1790 )
1791 .log_err()?;
1792 buffer_handle.update(cx, |buffer, cx| {
1793 buffer.set_completion_triggers(
1794 language_server
1795 .capabilities()
1796 .completion_provider
1797 .as_ref()
1798 .and_then(|provider| {
1799 provider.trigger_characters.clone()
1800 })
1801 .unwrap_or(Vec::new()),
1802 cx,
1803 )
1804 });
1805 }
1806 }
1807
1808 cx.notify();
1809 Some(())
1810 });
1811
1812 Some(language_server)
1813 })
1814 });
1815 }
1816
1817 pub fn restart_language_servers_for_buffers(
1818 &mut self,
1819 buffers: impl IntoIterator<Item = ModelHandle<Buffer>>,
1820 cx: &mut ModelContext<Self>,
1821 ) -> Option<()> {
1822 let language_server_lookup_info: HashSet<(WorktreeId, Arc<Path>, PathBuf)> = buffers
1823 .into_iter()
1824 .filter_map(|buffer| {
1825 let file = File::from_dyn(buffer.read(cx).file())?;
1826 let worktree = file.worktree.read(cx).as_local()?;
1827 let worktree_id = worktree.id();
1828 let worktree_abs_path = worktree.abs_path().clone();
1829 let full_path = file.full_path(cx);
1830 Some((worktree_id, worktree_abs_path, full_path))
1831 })
1832 .collect();
1833 for (worktree_id, worktree_abs_path, full_path) in language_server_lookup_info {
1834 let language = self.languages.select_language(&full_path)?;
1835 self.restart_language_server(worktree_id, worktree_abs_path, language, cx);
1836 }
1837
1838 None
1839 }
1840
1841 fn restart_language_server(
1842 &mut self,
1843 worktree_id: WorktreeId,
1844 worktree_path: Arc<Path>,
1845 language: Arc<Language>,
1846 cx: &mut ModelContext<Self>,
1847 ) {
1848 let adapter = if let Some(adapter) = language.lsp_adapter() {
1849 adapter
1850 } else {
1851 return;
1852 };
1853 let key = (worktree_id, adapter.name());
1854 let server_to_shutdown = self.language_servers.remove(&key);
1855 self.started_language_servers.remove(&key);
1856 server_to_shutdown
1857 .as_ref()
1858 .map(|(_, server)| self.language_server_statuses.remove(&server.server_id()));
1859 cx.spawn_weak(|this, mut cx| async move {
1860 if let Some(this) = this.upgrade(&cx) {
1861 if let Some((_, server_to_shutdown)) = server_to_shutdown {
1862 if let Some(shutdown_task) = server_to_shutdown.shutdown() {
1863 shutdown_task.await;
1864 }
1865 }
1866
1867 this.update(&mut cx, |this, cx| {
1868 this.start_language_server(worktree_id, worktree_path, language, cx);
1869 });
1870 }
1871 })
1872 .detach();
1873 }
1874
1875 fn on_lsp_diagnostics_published(
1876 &mut self,
1877 server_id: usize,
1878 mut params: lsp::PublishDiagnosticsParams,
1879 adapter: &Arc<dyn LspAdapter>,
1880 disk_based_diagnostics_progress_token: Option<&str>,
1881 cx: &mut ModelContext<Self>,
1882 ) {
1883 adapter.process_diagnostics(&mut params);
1884 if disk_based_diagnostics_progress_token.is_none() {
1885 self.disk_based_diagnostics_started(cx);
1886 self.broadcast_language_server_update(
1887 server_id,
1888 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(
1889 proto::LspDiskBasedDiagnosticsUpdating {},
1890 ),
1891 );
1892 }
1893 self.update_diagnostics(params, adapter.disk_based_diagnostic_sources(), cx)
1894 .log_err();
1895 if disk_based_diagnostics_progress_token.is_none() {
1896 self.disk_based_diagnostics_finished(cx);
1897 self.broadcast_language_server_update(
1898 server_id,
1899 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(
1900 proto::LspDiskBasedDiagnosticsUpdated {},
1901 ),
1902 );
1903 }
1904 }
1905
1906 fn on_lsp_progress(
1907 &mut self,
1908 progress: lsp::ProgressParams,
1909 server_id: usize,
1910 disk_based_diagnostics_progress_token: Option<&str>,
1911 cx: &mut ModelContext<Self>,
1912 ) {
1913 let token = match progress.token {
1914 lsp::NumberOrString::String(token) => token,
1915 lsp::NumberOrString::Number(token) => {
1916 log::info!("skipping numeric progress token {}", token);
1917 return;
1918 }
1919 };
1920 let progress = match progress.value {
1921 lsp::ProgressParamsValue::WorkDone(value) => value,
1922 };
1923 let language_server_status =
1924 if let Some(status) = self.language_server_statuses.get_mut(&server_id) {
1925 status
1926 } else {
1927 return;
1928 };
1929 match progress {
1930 lsp::WorkDoneProgress::Begin(_) => {
1931 if Some(token.as_str()) == disk_based_diagnostics_progress_token {
1932 language_server_status.pending_diagnostic_updates += 1;
1933 if language_server_status.pending_diagnostic_updates == 1 {
1934 self.disk_based_diagnostics_started(cx);
1935 self.broadcast_language_server_update(
1936 server_id,
1937 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(
1938 proto::LspDiskBasedDiagnosticsUpdating {},
1939 ),
1940 );
1941 }
1942 } else {
1943 self.on_lsp_work_start(server_id, token.clone(), cx);
1944 self.broadcast_language_server_update(
1945 server_id,
1946 proto::update_language_server::Variant::WorkStart(proto::LspWorkStart {
1947 token,
1948 }),
1949 );
1950 }
1951 }
1952 lsp::WorkDoneProgress::Report(report) => {
1953 if Some(token.as_str()) != disk_based_diagnostics_progress_token {
1954 self.on_lsp_work_progress(
1955 server_id,
1956 token.clone(),
1957 LanguageServerProgress {
1958 message: report.message.clone(),
1959 percentage: report.percentage.map(|p| p as usize),
1960 last_update_at: Instant::now(),
1961 },
1962 cx,
1963 );
1964 self.broadcast_language_server_update(
1965 server_id,
1966 proto::update_language_server::Variant::WorkProgress(
1967 proto::LspWorkProgress {
1968 token,
1969 message: report.message,
1970 percentage: report.percentage.map(|p| p as u32),
1971 },
1972 ),
1973 );
1974 }
1975 }
1976 lsp::WorkDoneProgress::End(_) => {
1977 if Some(token.as_str()) == disk_based_diagnostics_progress_token {
1978 language_server_status.pending_diagnostic_updates -= 1;
1979 if language_server_status.pending_diagnostic_updates == 0 {
1980 self.disk_based_diagnostics_finished(cx);
1981 self.broadcast_language_server_update(
1982 server_id,
1983 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(
1984 proto::LspDiskBasedDiagnosticsUpdated {},
1985 ),
1986 );
1987 }
1988 } else {
1989 self.on_lsp_work_end(server_id, token.clone(), cx);
1990 self.broadcast_language_server_update(
1991 server_id,
1992 proto::update_language_server::Variant::WorkEnd(proto::LspWorkEnd {
1993 token,
1994 }),
1995 );
1996 }
1997 }
1998 }
1999 }
2000
2001 fn on_lsp_work_start(
2002 &mut self,
2003 language_server_id: usize,
2004 token: String,
2005 cx: &mut ModelContext<Self>,
2006 ) {
2007 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
2008 status.pending_work.insert(
2009 token,
2010 LanguageServerProgress {
2011 message: None,
2012 percentage: None,
2013 last_update_at: Instant::now(),
2014 },
2015 );
2016 cx.notify();
2017 }
2018 }
2019
2020 fn on_lsp_work_progress(
2021 &mut self,
2022 language_server_id: usize,
2023 token: String,
2024 progress: LanguageServerProgress,
2025 cx: &mut ModelContext<Self>,
2026 ) {
2027 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
2028 status.pending_work.insert(token, progress);
2029 cx.notify();
2030 }
2031 }
2032
2033 fn on_lsp_work_end(
2034 &mut self,
2035 language_server_id: usize,
2036 token: String,
2037 cx: &mut ModelContext<Self>,
2038 ) {
2039 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
2040 status.pending_work.remove(&token);
2041 cx.notify();
2042 }
2043 }
2044
2045 async fn on_lsp_workspace_edit(
2046 this: WeakModelHandle<Self>,
2047 params: lsp::ApplyWorkspaceEditParams,
2048 server_id: usize,
2049 adapter: Arc<dyn LspAdapter>,
2050 language_server: Arc<LanguageServer>,
2051 mut cx: AsyncAppContext,
2052 ) -> Result<lsp::ApplyWorkspaceEditResponse> {
2053 let this = this
2054 .upgrade(&cx)
2055 .ok_or_else(|| anyhow!("project project closed"))?;
2056 let transaction = Self::deserialize_workspace_edit(
2057 this.clone(),
2058 params.edit,
2059 true,
2060 adapter.clone(),
2061 language_server.clone(),
2062 &mut cx,
2063 )
2064 .await
2065 .log_err();
2066 this.update(&mut cx, |this, _| {
2067 if let Some(transaction) = transaction {
2068 this.last_workspace_edits_by_language_server
2069 .insert(server_id, transaction);
2070 }
2071 });
2072 Ok(lsp::ApplyWorkspaceEditResponse {
2073 applied: true,
2074 failed_change: None,
2075 failure_reason: None,
2076 })
2077 }
2078
2079 fn broadcast_language_server_update(
2080 &self,
2081 language_server_id: usize,
2082 event: proto::update_language_server::Variant,
2083 ) {
2084 if let Some(project_id) = self.shared_remote_id() {
2085 self.client
2086 .send(proto::UpdateLanguageServer {
2087 project_id,
2088 language_server_id: language_server_id as u64,
2089 variant: Some(event),
2090 })
2091 .log_err();
2092 }
2093 }
2094
2095 pub fn set_language_server_settings(&mut self, settings: serde_json::Value) {
2096 for (_, server) in self.language_servers.values() {
2097 server
2098 .notify::<lsp::notification::DidChangeConfiguration>(
2099 lsp::DidChangeConfigurationParams {
2100 settings: settings.clone(),
2101 },
2102 )
2103 .ok();
2104 }
2105 *self.language_server_settings.lock() = settings;
2106 }
2107
2108 pub fn language_server_statuses(
2109 &self,
2110 ) -> impl DoubleEndedIterator<Item = &LanguageServerStatus> {
2111 self.language_server_statuses.values()
2112 }
2113
2114 pub fn update_diagnostics(
2115 &mut self,
2116 params: lsp::PublishDiagnosticsParams,
2117 disk_based_sources: &[&str],
2118 cx: &mut ModelContext<Self>,
2119 ) -> Result<()> {
2120 let abs_path = params
2121 .uri
2122 .to_file_path()
2123 .map_err(|_| anyhow!("URI is not a file"))?;
2124 let mut next_group_id = 0;
2125 let mut diagnostics = Vec::default();
2126 let mut primary_diagnostic_group_ids = HashMap::default();
2127 let mut sources_by_group_id = HashMap::default();
2128 let mut supporting_diagnostics = HashMap::default();
2129 for diagnostic in ¶ms.diagnostics {
2130 let source = diagnostic.source.as_ref();
2131 let code = diagnostic.code.as_ref().map(|code| match code {
2132 lsp::NumberOrString::Number(code) => code.to_string(),
2133 lsp::NumberOrString::String(code) => code.clone(),
2134 });
2135 let range = range_from_lsp(diagnostic.range);
2136 let is_supporting = diagnostic
2137 .related_information
2138 .as_ref()
2139 .map_or(false, |infos| {
2140 infos.iter().any(|info| {
2141 primary_diagnostic_group_ids.contains_key(&(
2142 source,
2143 code.clone(),
2144 range_from_lsp(info.location.range),
2145 ))
2146 })
2147 });
2148
2149 let is_unnecessary = diagnostic.tags.as_ref().map_or(false, |tags| {
2150 tags.iter().any(|tag| *tag == DiagnosticTag::UNNECESSARY)
2151 });
2152
2153 if is_supporting {
2154 supporting_diagnostics.insert(
2155 (source, code.clone(), range),
2156 (diagnostic.severity, is_unnecessary),
2157 );
2158 } else {
2159 let group_id = post_inc(&mut next_group_id);
2160 let is_disk_based = source.map_or(false, |source| {
2161 disk_based_sources.contains(&source.as_str())
2162 });
2163
2164 sources_by_group_id.insert(group_id, source);
2165 primary_diagnostic_group_ids
2166 .insert((source, code.clone(), range.clone()), group_id);
2167
2168 diagnostics.push(DiagnosticEntry {
2169 range,
2170 diagnostic: Diagnostic {
2171 code: code.clone(),
2172 severity: diagnostic.severity.unwrap_or(DiagnosticSeverity::ERROR),
2173 message: diagnostic.message.clone(),
2174 group_id,
2175 is_primary: true,
2176 is_valid: true,
2177 is_disk_based,
2178 is_unnecessary,
2179 },
2180 });
2181 if let Some(infos) = &diagnostic.related_information {
2182 for info in infos {
2183 if info.location.uri == params.uri && !info.message.is_empty() {
2184 let range = range_from_lsp(info.location.range);
2185 diagnostics.push(DiagnosticEntry {
2186 range,
2187 diagnostic: Diagnostic {
2188 code: code.clone(),
2189 severity: DiagnosticSeverity::INFORMATION,
2190 message: info.message.clone(),
2191 group_id,
2192 is_primary: false,
2193 is_valid: true,
2194 is_disk_based,
2195 is_unnecessary: false,
2196 },
2197 });
2198 }
2199 }
2200 }
2201 }
2202 }
2203
2204 for entry in &mut diagnostics {
2205 let diagnostic = &mut entry.diagnostic;
2206 if !diagnostic.is_primary {
2207 let source = *sources_by_group_id.get(&diagnostic.group_id).unwrap();
2208 if let Some(&(severity, is_unnecessary)) = supporting_diagnostics.get(&(
2209 source,
2210 diagnostic.code.clone(),
2211 entry.range.clone(),
2212 )) {
2213 if let Some(severity) = severity {
2214 diagnostic.severity = severity;
2215 }
2216 diagnostic.is_unnecessary = is_unnecessary;
2217 }
2218 }
2219 }
2220
2221 self.update_diagnostic_entries(abs_path, params.version, diagnostics, cx)?;
2222 Ok(())
2223 }
2224
2225 pub fn update_diagnostic_entries(
2226 &mut self,
2227 abs_path: PathBuf,
2228 version: Option<i32>,
2229 diagnostics: Vec<DiagnosticEntry<PointUtf16>>,
2230 cx: &mut ModelContext<Project>,
2231 ) -> Result<(), anyhow::Error> {
2232 let (worktree, relative_path) = self
2233 .find_local_worktree(&abs_path, cx)
2234 .ok_or_else(|| anyhow!("no worktree found for diagnostics"))?;
2235 if !worktree.read(cx).is_visible() {
2236 return Ok(());
2237 }
2238
2239 let project_path = ProjectPath {
2240 worktree_id: worktree.read(cx).id(),
2241 path: relative_path.into(),
2242 };
2243 if let Some(buffer) = self.get_open_buffer(&project_path, cx) {
2244 self.update_buffer_diagnostics(&buffer, diagnostics.clone(), version, cx)?;
2245 }
2246
2247 let updated = worktree.update(cx, |worktree, cx| {
2248 worktree
2249 .as_local_mut()
2250 .ok_or_else(|| anyhow!("not a local worktree"))?
2251 .update_diagnostics(project_path.path.clone(), diagnostics, cx)
2252 })?;
2253 if updated {
2254 cx.emit(Event::DiagnosticsUpdated(project_path));
2255 }
2256 Ok(())
2257 }
2258
2259 fn update_buffer_diagnostics(
2260 &mut self,
2261 buffer: &ModelHandle<Buffer>,
2262 mut diagnostics: Vec<DiagnosticEntry<PointUtf16>>,
2263 version: Option<i32>,
2264 cx: &mut ModelContext<Self>,
2265 ) -> Result<()> {
2266 fn compare_diagnostics(a: &Diagnostic, b: &Diagnostic) -> Ordering {
2267 Ordering::Equal
2268 .then_with(|| b.is_primary.cmp(&a.is_primary))
2269 .then_with(|| a.is_disk_based.cmp(&b.is_disk_based))
2270 .then_with(|| a.severity.cmp(&b.severity))
2271 .then_with(|| a.message.cmp(&b.message))
2272 }
2273
2274 let snapshot = self.buffer_snapshot_for_lsp_version(buffer, version, cx)?;
2275
2276 diagnostics.sort_unstable_by(|a, b| {
2277 Ordering::Equal
2278 .then_with(|| a.range.start.cmp(&b.range.start))
2279 .then_with(|| b.range.end.cmp(&a.range.end))
2280 .then_with(|| compare_diagnostics(&a.diagnostic, &b.diagnostic))
2281 });
2282
2283 let mut sanitized_diagnostics = Vec::new();
2284 let edits_since_save = Patch::new(
2285 snapshot
2286 .edits_since::<PointUtf16>(buffer.read(cx).saved_version())
2287 .collect(),
2288 );
2289 for entry in diagnostics {
2290 let start;
2291 let end;
2292 if entry.diagnostic.is_disk_based {
2293 // Some diagnostics are based on files on disk instead of buffers'
2294 // current contents. Adjust these diagnostics' ranges to reflect
2295 // any unsaved edits.
2296 start = edits_since_save.old_to_new(entry.range.start);
2297 end = edits_since_save.old_to_new(entry.range.end);
2298 } else {
2299 start = entry.range.start;
2300 end = entry.range.end;
2301 }
2302
2303 let mut range = snapshot.clip_point_utf16(start, Bias::Left)
2304 ..snapshot.clip_point_utf16(end, Bias::Right);
2305
2306 // Expand empty ranges by one character
2307 if range.start == range.end {
2308 range.end.column += 1;
2309 range.end = snapshot.clip_point_utf16(range.end, Bias::Right);
2310 if range.start == range.end && range.end.column > 0 {
2311 range.start.column -= 1;
2312 range.start = snapshot.clip_point_utf16(range.start, Bias::Left);
2313 }
2314 }
2315
2316 sanitized_diagnostics.push(DiagnosticEntry {
2317 range,
2318 diagnostic: entry.diagnostic,
2319 });
2320 }
2321 drop(edits_since_save);
2322
2323 let set = DiagnosticSet::new(sanitized_diagnostics, &snapshot);
2324 buffer.update(cx, |buffer, cx| buffer.update_diagnostics(set, cx));
2325 Ok(())
2326 }
2327
2328 pub fn reload_buffers(
2329 &self,
2330 buffers: HashSet<ModelHandle<Buffer>>,
2331 push_to_history: bool,
2332 cx: &mut ModelContext<Self>,
2333 ) -> Task<Result<ProjectTransaction>> {
2334 let mut local_buffers = Vec::new();
2335 let mut remote_buffers = None;
2336 for buffer_handle in buffers {
2337 let buffer = buffer_handle.read(cx);
2338 if buffer.is_dirty() {
2339 if let Some(file) = File::from_dyn(buffer.file()) {
2340 if file.is_local() {
2341 local_buffers.push(buffer_handle);
2342 } else {
2343 remote_buffers.get_or_insert(Vec::new()).push(buffer_handle);
2344 }
2345 }
2346 }
2347 }
2348
2349 let remote_buffers = self.remote_id().zip(remote_buffers);
2350 let client = self.client.clone();
2351
2352 cx.spawn(|this, mut cx| async move {
2353 let mut project_transaction = ProjectTransaction::default();
2354
2355 if let Some((project_id, remote_buffers)) = remote_buffers {
2356 let response = client
2357 .request(proto::ReloadBuffers {
2358 project_id,
2359 buffer_ids: remote_buffers
2360 .iter()
2361 .map(|buffer| buffer.read_with(&cx, |buffer, _| buffer.remote_id()))
2362 .collect(),
2363 })
2364 .await?
2365 .transaction
2366 .ok_or_else(|| anyhow!("missing transaction"))?;
2367 project_transaction = this
2368 .update(&mut cx, |this, cx| {
2369 this.deserialize_project_transaction(response, push_to_history, cx)
2370 })
2371 .await?;
2372 }
2373
2374 for buffer in local_buffers {
2375 let transaction = buffer
2376 .update(&mut cx, |buffer, cx| buffer.reload(cx))
2377 .await?;
2378 buffer.update(&mut cx, |buffer, cx| {
2379 if let Some(transaction) = transaction {
2380 if !push_to_history {
2381 buffer.forget_transaction(transaction.id);
2382 }
2383 project_transaction.0.insert(cx.handle(), transaction);
2384 }
2385 });
2386 }
2387
2388 Ok(project_transaction)
2389 })
2390 }
2391
2392 pub fn format(
2393 &self,
2394 buffers: HashSet<ModelHandle<Buffer>>,
2395 push_to_history: bool,
2396 cx: &mut ModelContext<Project>,
2397 ) -> Task<Result<ProjectTransaction>> {
2398 let mut local_buffers = Vec::new();
2399 let mut remote_buffers = None;
2400 for buffer_handle in buffers {
2401 let buffer = buffer_handle.read(cx);
2402 if let Some(file) = File::from_dyn(buffer.file()) {
2403 if let Some(buffer_abs_path) = file.as_local().map(|f| f.abs_path(cx)) {
2404 if let Some((_, server)) = self.language_server_for_buffer(buffer, cx) {
2405 local_buffers.push((buffer_handle, buffer_abs_path, server.clone()));
2406 }
2407 } else {
2408 remote_buffers.get_or_insert(Vec::new()).push(buffer_handle);
2409 }
2410 } else {
2411 return Task::ready(Ok(Default::default()));
2412 }
2413 }
2414
2415 let remote_buffers = self.remote_id().zip(remote_buffers);
2416 let client = self.client.clone();
2417
2418 cx.spawn(|this, mut cx| async move {
2419 let mut project_transaction = ProjectTransaction::default();
2420
2421 if let Some((project_id, remote_buffers)) = remote_buffers {
2422 let response = client
2423 .request(proto::FormatBuffers {
2424 project_id,
2425 buffer_ids: remote_buffers
2426 .iter()
2427 .map(|buffer| buffer.read_with(&cx, |buffer, _| buffer.remote_id()))
2428 .collect(),
2429 })
2430 .await?
2431 .transaction
2432 .ok_or_else(|| anyhow!("missing transaction"))?;
2433 project_transaction = this
2434 .update(&mut cx, |this, cx| {
2435 this.deserialize_project_transaction(response, push_to_history, cx)
2436 })
2437 .await?;
2438 }
2439
2440 for (buffer, buffer_abs_path, language_server) in local_buffers {
2441 let text_document = lsp::TextDocumentIdentifier::new(
2442 lsp::Url::from_file_path(&buffer_abs_path).unwrap(),
2443 );
2444 let capabilities = &language_server.capabilities();
2445 let tab_size = cx.update(|cx| {
2446 let language_name = buffer.read(cx).language().map(|language| language.name());
2447 cx.global::<Settings>().tab_size(language_name.as_deref())
2448 });
2449 let lsp_edits = if capabilities
2450 .document_formatting_provider
2451 .as_ref()
2452 .map_or(false, |provider| *provider != lsp::OneOf::Left(false))
2453 {
2454 language_server
2455 .request::<lsp::request::Formatting>(lsp::DocumentFormattingParams {
2456 text_document,
2457 options: lsp::FormattingOptions {
2458 tab_size,
2459 insert_spaces: true,
2460 insert_final_newline: Some(true),
2461 ..Default::default()
2462 },
2463 work_done_progress_params: Default::default(),
2464 })
2465 .await?
2466 } else if capabilities
2467 .document_range_formatting_provider
2468 .as_ref()
2469 .map_or(false, |provider| *provider != lsp::OneOf::Left(false))
2470 {
2471 let buffer_start = lsp::Position::new(0, 0);
2472 let buffer_end =
2473 buffer.read_with(&cx, |buffer, _| point_to_lsp(buffer.max_point_utf16()));
2474 language_server
2475 .request::<lsp::request::RangeFormatting>(
2476 lsp::DocumentRangeFormattingParams {
2477 text_document,
2478 range: lsp::Range::new(buffer_start, buffer_end),
2479 options: lsp::FormattingOptions {
2480 tab_size: 4,
2481 insert_spaces: true,
2482 insert_final_newline: Some(true),
2483 ..Default::default()
2484 },
2485 work_done_progress_params: Default::default(),
2486 },
2487 )
2488 .await?
2489 } else {
2490 continue;
2491 };
2492
2493 if let Some(lsp_edits) = lsp_edits {
2494 let edits = this
2495 .update(&mut cx, |this, cx| {
2496 this.edits_from_lsp(&buffer, lsp_edits, None, cx)
2497 })
2498 .await?;
2499 buffer.update(&mut cx, |buffer, cx| {
2500 buffer.finalize_last_transaction();
2501 buffer.start_transaction();
2502 for (range, text) in edits {
2503 buffer.edit([(range, text)], cx);
2504 }
2505 if buffer.end_transaction(cx).is_some() {
2506 let transaction = buffer.finalize_last_transaction().unwrap().clone();
2507 if !push_to_history {
2508 buffer.forget_transaction(transaction.id);
2509 }
2510 project_transaction.0.insert(cx.handle(), transaction);
2511 }
2512 });
2513 }
2514 }
2515
2516 Ok(project_transaction)
2517 })
2518 }
2519
2520 pub fn definition<T: ToPointUtf16>(
2521 &self,
2522 buffer: &ModelHandle<Buffer>,
2523 position: T,
2524 cx: &mut ModelContext<Self>,
2525 ) -> Task<Result<Vec<Location>>> {
2526 let position = position.to_point_utf16(buffer.read(cx));
2527 self.request_lsp(buffer.clone(), GetDefinition { position }, cx)
2528 }
2529
2530 pub fn references<T: ToPointUtf16>(
2531 &self,
2532 buffer: &ModelHandle<Buffer>,
2533 position: T,
2534 cx: &mut ModelContext<Self>,
2535 ) -> Task<Result<Vec<Location>>> {
2536 let position = position.to_point_utf16(buffer.read(cx));
2537 self.request_lsp(buffer.clone(), GetReferences { position }, cx)
2538 }
2539
2540 pub fn document_highlights<T: ToPointUtf16>(
2541 &self,
2542 buffer: &ModelHandle<Buffer>,
2543 position: T,
2544 cx: &mut ModelContext<Self>,
2545 ) -> Task<Result<Vec<DocumentHighlight>>> {
2546 let position = position.to_point_utf16(buffer.read(cx));
2547
2548 self.request_lsp(buffer.clone(), GetDocumentHighlights { position }, cx)
2549 }
2550
2551 pub fn symbols(&self, query: &str, cx: &mut ModelContext<Self>) -> Task<Result<Vec<Symbol>>> {
2552 if self.is_local() {
2553 let mut requests = Vec::new();
2554 for ((worktree_id, _), (lsp_adapter, language_server)) in self.language_servers.iter() {
2555 let worktree_id = *worktree_id;
2556 if let Some(worktree) = self
2557 .worktree_for_id(worktree_id, cx)
2558 .and_then(|worktree| worktree.read(cx).as_local())
2559 {
2560 let lsp_adapter = lsp_adapter.clone();
2561 let worktree_abs_path = worktree.abs_path().clone();
2562 requests.push(
2563 language_server
2564 .request::<lsp::request::WorkspaceSymbol>(lsp::WorkspaceSymbolParams {
2565 query: query.to_string(),
2566 ..Default::default()
2567 })
2568 .log_err()
2569 .map(move |response| {
2570 (
2571 lsp_adapter,
2572 worktree_id,
2573 worktree_abs_path,
2574 response.unwrap_or_default(),
2575 )
2576 }),
2577 );
2578 }
2579 }
2580
2581 cx.spawn_weak(|this, cx| async move {
2582 let responses = futures::future::join_all(requests).await;
2583 let this = if let Some(this) = this.upgrade(&cx) {
2584 this
2585 } else {
2586 return Ok(Default::default());
2587 };
2588 this.read_with(&cx, |this, cx| {
2589 let mut symbols = Vec::new();
2590 for (adapter, source_worktree_id, worktree_abs_path, response) in responses {
2591 symbols.extend(response.into_iter().flatten().filter_map(|lsp_symbol| {
2592 let abs_path = lsp_symbol.location.uri.to_file_path().ok()?;
2593 let mut worktree_id = source_worktree_id;
2594 let path;
2595 if let Some((worktree, rel_path)) =
2596 this.find_local_worktree(&abs_path, cx)
2597 {
2598 worktree_id = worktree.read(cx).id();
2599 path = rel_path;
2600 } else {
2601 path = relativize_path(&worktree_abs_path, &abs_path);
2602 }
2603
2604 let label = this
2605 .languages
2606 .select_language(&path)
2607 .and_then(|language| {
2608 language.label_for_symbol(&lsp_symbol.name, lsp_symbol.kind)
2609 })
2610 .unwrap_or_else(|| CodeLabel::plain(lsp_symbol.name.clone(), None));
2611 let signature = this.symbol_signature(worktree_id, &path);
2612
2613 Some(Symbol {
2614 source_worktree_id,
2615 worktree_id,
2616 language_server_name: adapter.name(),
2617 name: lsp_symbol.name,
2618 kind: lsp_symbol.kind,
2619 label,
2620 path,
2621 range: range_from_lsp(lsp_symbol.location.range),
2622 signature,
2623 })
2624 }));
2625 }
2626 Ok(symbols)
2627 })
2628 })
2629 } else if let Some(project_id) = self.remote_id() {
2630 let request = self.client.request(proto::GetProjectSymbols {
2631 project_id,
2632 query: query.to_string(),
2633 });
2634 cx.spawn_weak(|this, cx| async move {
2635 let response = request.await?;
2636 let mut symbols = Vec::new();
2637 if let Some(this) = this.upgrade(&cx) {
2638 this.read_with(&cx, |this, _| {
2639 symbols.extend(
2640 response
2641 .symbols
2642 .into_iter()
2643 .filter_map(|symbol| this.deserialize_symbol(symbol).log_err()),
2644 );
2645 })
2646 }
2647 Ok(symbols)
2648 })
2649 } else {
2650 Task::ready(Ok(Default::default()))
2651 }
2652 }
2653
2654 pub fn open_buffer_for_symbol(
2655 &mut self,
2656 symbol: &Symbol,
2657 cx: &mut ModelContext<Self>,
2658 ) -> Task<Result<ModelHandle<Buffer>>> {
2659 if self.is_local() {
2660 let (lsp_adapter, language_server) = if let Some(server) = self.language_servers.get(&(
2661 symbol.source_worktree_id,
2662 symbol.language_server_name.clone(),
2663 )) {
2664 server.clone()
2665 } else {
2666 return Task::ready(Err(anyhow!(
2667 "language server for worktree and language not found"
2668 )));
2669 };
2670
2671 let worktree_abs_path = if let Some(worktree_abs_path) = self
2672 .worktree_for_id(symbol.worktree_id, cx)
2673 .and_then(|worktree| worktree.read(cx).as_local())
2674 .map(|local_worktree| local_worktree.abs_path())
2675 {
2676 worktree_abs_path
2677 } else {
2678 return Task::ready(Err(anyhow!("worktree not found for symbol")));
2679 };
2680 let symbol_abs_path = worktree_abs_path.join(&symbol.path);
2681 let symbol_uri = if let Ok(uri) = lsp::Url::from_file_path(symbol_abs_path) {
2682 uri
2683 } else {
2684 return Task::ready(Err(anyhow!("invalid symbol path")));
2685 };
2686
2687 self.open_local_buffer_via_lsp(symbol_uri, lsp_adapter, language_server, cx)
2688 } else if let Some(project_id) = self.remote_id() {
2689 let request = self.client.request(proto::OpenBufferForSymbol {
2690 project_id,
2691 symbol: Some(serialize_symbol(symbol)),
2692 });
2693 cx.spawn(|this, mut cx| async move {
2694 let response = request.await?;
2695 let buffer = response.buffer.ok_or_else(|| anyhow!("invalid buffer"))?;
2696 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
2697 .await
2698 })
2699 } else {
2700 Task::ready(Err(anyhow!("project does not have a remote id")))
2701 }
2702 }
2703
2704 pub fn completions<T: ToPointUtf16>(
2705 &self,
2706 source_buffer_handle: &ModelHandle<Buffer>,
2707 position: T,
2708 cx: &mut ModelContext<Self>,
2709 ) -> Task<Result<Vec<Completion>>> {
2710 let source_buffer_handle = source_buffer_handle.clone();
2711 let source_buffer = source_buffer_handle.read(cx);
2712 let buffer_id = source_buffer.remote_id();
2713 let language = source_buffer.language().cloned();
2714 let worktree;
2715 let buffer_abs_path;
2716 if let Some(file) = File::from_dyn(source_buffer.file()) {
2717 worktree = file.worktree.clone();
2718 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
2719 } else {
2720 return Task::ready(Ok(Default::default()));
2721 };
2722
2723 let position = position.to_point_utf16(source_buffer);
2724 let anchor = source_buffer.anchor_after(position);
2725
2726 if worktree.read(cx).as_local().is_some() {
2727 let buffer_abs_path = buffer_abs_path.unwrap();
2728 let (_, lang_server) =
2729 if let Some(server) = self.language_server_for_buffer(source_buffer, cx) {
2730 server.clone()
2731 } else {
2732 return Task::ready(Ok(Default::default()));
2733 };
2734
2735 cx.spawn(|_, cx| async move {
2736 let completions = lang_server
2737 .request::<lsp::request::Completion>(lsp::CompletionParams {
2738 text_document_position: lsp::TextDocumentPositionParams::new(
2739 lsp::TextDocumentIdentifier::new(
2740 lsp::Url::from_file_path(buffer_abs_path).unwrap(),
2741 ),
2742 point_to_lsp(position),
2743 ),
2744 context: Default::default(),
2745 work_done_progress_params: Default::default(),
2746 partial_result_params: Default::default(),
2747 })
2748 .await
2749 .context("lsp completion request failed")?;
2750
2751 let completions = if let Some(completions) = completions {
2752 match completions {
2753 lsp::CompletionResponse::Array(completions) => completions,
2754 lsp::CompletionResponse::List(list) => list.items,
2755 }
2756 } else {
2757 Default::default()
2758 };
2759
2760 source_buffer_handle.read_with(&cx, |this, _| {
2761 let snapshot = this.snapshot();
2762 let clipped_position = this.clip_point_utf16(position, Bias::Left);
2763 let mut range_for_token = None;
2764 Ok(completions
2765 .into_iter()
2766 .filter_map(|lsp_completion| {
2767 let (old_range, new_text) = match lsp_completion.text_edit.as_ref() {
2768 // If the language server provides a range to overwrite, then
2769 // check that the range is valid.
2770 Some(lsp::CompletionTextEdit::Edit(edit)) => {
2771 let range = range_from_lsp(edit.range);
2772 let start = snapshot.clip_point_utf16(range.start, Bias::Left);
2773 let end = snapshot.clip_point_utf16(range.end, Bias::Left);
2774 if start != range.start || end != range.end {
2775 log::info!("completion out of expected range");
2776 return None;
2777 }
2778 (
2779 snapshot.anchor_before(start)..snapshot.anchor_after(end),
2780 edit.new_text.clone(),
2781 )
2782 }
2783 // If the language server does not provide a range, then infer
2784 // the range based on the syntax tree.
2785 None => {
2786 if position != clipped_position {
2787 log::info!("completion out of expected range");
2788 return None;
2789 }
2790 let Range { start, end } = range_for_token
2791 .get_or_insert_with(|| {
2792 let offset = position.to_offset(&snapshot);
2793 snapshot
2794 .range_for_word_token_at(offset)
2795 .unwrap_or_else(|| offset..offset)
2796 })
2797 .clone();
2798 let text = lsp_completion
2799 .insert_text
2800 .as_ref()
2801 .unwrap_or(&lsp_completion.label)
2802 .clone();
2803 (
2804 snapshot.anchor_before(start)..snapshot.anchor_after(end),
2805 text.clone(),
2806 )
2807 }
2808 Some(lsp::CompletionTextEdit::InsertAndReplace(_)) => {
2809 log::info!("unsupported insert/replace completion");
2810 return None;
2811 }
2812 };
2813
2814 Some(Completion {
2815 old_range,
2816 new_text,
2817 label: language
2818 .as_ref()
2819 .and_then(|l| l.label_for_completion(&lsp_completion))
2820 .unwrap_or_else(|| {
2821 CodeLabel::plain(
2822 lsp_completion.label.clone(),
2823 lsp_completion.filter_text.as_deref(),
2824 )
2825 }),
2826 lsp_completion,
2827 })
2828 })
2829 .collect())
2830 })
2831 })
2832 } else if let Some(project_id) = self.remote_id() {
2833 let rpc = self.client.clone();
2834 let message = proto::GetCompletions {
2835 project_id,
2836 buffer_id,
2837 position: Some(language::proto::serialize_anchor(&anchor)),
2838 version: serialize_version(&source_buffer.version()),
2839 };
2840 cx.spawn_weak(|_, mut cx| async move {
2841 let response = rpc.request(message).await?;
2842
2843 source_buffer_handle
2844 .update(&mut cx, |buffer, _| {
2845 buffer.wait_for_version(deserialize_version(response.version))
2846 })
2847 .await;
2848
2849 response
2850 .completions
2851 .into_iter()
2852 .map(|completion| {
2853 language::proto::deserialize_completion(completion, language.as_ref())
2854 })
2855 .collect()
2856 })
2857 } else {
2858 Task::ready(Ok(Default::default()))
2859 }
2860 }
2861
2862 pub fn apply_additional_edits_for_completion(
2863 &self,
2864 buffer_handle: ModelHandle<Buffer>,
2865 completion: Completion,
2866 push_to_history: bool,
2867 cx: &mut ModelContext<Self>,
2868 ) -> Task<Result<Option<Transaction>>> {
2869 let buffer = buffer_handle.read(cx);
2870 let buffer_id = buffer.remote_id();
2871
2872 if self.is_local() {
2873 let (_, lang_server) = if let Some(server) = self.language_server_for_buffer(buffer, cx)
2874 {
2875 server.clone()
2876 } else {
2877 return Task::ready(Ok(Default::default()));
2878 };
2879
2880 cx.spawn(|this, mut cx| async move {
2881 let resolved_completion = lang_server
2882 .request::<lsp::request::ResolveCompletionItem>(completion.lsp_completion)
2883 .await?;
2884 if let Some(edits) = resolved_completion.additional_text_edits {
2885 let edits = this
2886 .update(&mut cx, |this, cx| {
2887 this.edits_from_lsp(&buffer_handle, edits, None, cx)
2888 })
2889 .await?;
2890 buffer_handle.update(&mut cx, |buffer, cx| {
2891 buffer.finalize_last_transaction();
2892 buffer.start_transaction();
2893 for (range, text) in edits {
2894 buffer.edit([(range, text)], cx);
2895 }
2896 let transaction = if buffer.end_transaction(cx).is_some() {
2897 let transaction = buffer.finalize_last_transaction().unwrap().clone();
2898 if !push_to_history {
2899 buffer.forget_transaction(transaction.id);
2900 }
2901 Some(transaction)
2902 } else {
2903 None
2904 };
2905 Ok(transaction)
2906 })
2907 } else {
2908 Ok(None)
2909 }
2910 })
2911 } else if let Some(project_id) = self.remote_id() {
2912 let client = self.client.clone();
2913 cx.spawn(|_, mut cx| async move {
2914 let response = client
2915 .request(proto::ApplyCompletionAdditionalEdits {
2916 project_id,
2917 buffer_id,
2918 completion: Some(language::proto::serialize_completion(&completion)),
2919 })
2920 .await?;
2921
2922 if let Some(transaction) = response.transaction {
2923 let transaction = language::proto::deserialize_transaction(transaction)?;
2924 buffer_handle
2925 .update(&mut cx, |buffer, _| {
2926 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
2927 })
2928 .await;
2929 if push_to_history {
2930 buffer_handle.update(&mut cx, |buffer, _| {
2931 buffer.push_transaction(transaction.clone(), Instant::now());
2932 });
2933 }
2934 Ok(Some(transaction))
2935 } else {
2936 Ok(None)
2937 }
2938 })
2939 } else {
2940 Task::ready(Err(anyhow!("project does not have a remote id")))
2941 }
2942 }
2943
2944 pub fn code_actions<T: Clone + ToOffset>(
2945 &self,
2946 buffer_handle: &ModelHandle<Buffer>,
2947 range: Range<T>,
2948 cx: &mut ModelContext<Self>,
2949 ) -> Task<Result<Vec<CodeAction>>> {
2950 let buffer_handle = buffer_handle.clone();
2951 let buffer = buffer_handle.read(cx);
2952 let snapshot = buffer.snapshot();
2953 let relevant_diagnostics = snapshot
2954 .diagnostics_in_range::<usize, usize>(range.to_offset(&snapshot), false)
2955 .map(|entry| entry.to_lsp_diagnostic_stub())
2956 .collect();
2957 let buffer_id = buffer.remote_id();
2958 let worktree;
2959 let buffer_abs_path;
2960 if let Some(file) = File::from_dyn(buffer.file()) {
2961 worktree = file.worktree.clone();
2962 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
2963 } else {
2964 return Task::ready(Ok(Default::default()));
2965 };
2966 let range = buffer.anchor_before(range.start)..buffer.anchor_before(range.end);
2967
2968 if worktree.read(cx).as_local().is_some() {
2969 let buffer_abs_path = buffer_abs_path.unwrap();
2970 let (_, lang_server) = if let Some(server) = self.language_server_for_buffer(buffer, cx)
2971 {
2972 server.clone()
2973 } else {
2974 return Task::ready(Ok(Default::default()));
2975 };
2976
2977 let lsp_range = range_to_lsp(range.to_point_utf16(buffer));
2978 cx.foreground().spawn(async move {
2979 if !lang_server.capabilities().code_action_provider.is_some() {
2980 return Ok(Default::default());
2981 }
2982
2983 Ok(lang_server
2984 .request::<lsp::request::CodeActionRequest>(lsp::CodeActionParams {
2985 text_document: lsp::TextDocumentIdentifier::new(
2986 lsp::Url::from_file_path(buffer_abs_path).unwrap(),
2987 ),
2988 range: lsp_range,
2989 work_done_progress_params: Default::default(),
2990 partial_result_params: Default::default(),
2991 context: lsp::CodeActionContext {
2992 diagnostics: relevant_diagnostics,
2993 only: Some(vec![
2994 lsp::CodeActionKind::QUICKFIX,
2995 lsp::CodeActionKind::REFACTOR,
2996 lsp::CodeActionKind::REFACTOR_EXTRACT,
2997 lsp::CodeActionKind::SOURCE,
2998 ]),
2999 },
3000 })
3001 .await?
3002 .unwrap_or_default()
3003 .into_iter()
3004 .filter_map(|entry| {
3005 if let lsp::CodeActionOrCommand::CodeAction(lsp_action) = entry {
3006 Some(CodeAction {
3007 range: range.clone(),
3008 lsp_action,
3009 })
3010 } else {
3011 None
3012 }
3013 })
3014 .collect())
3015 })
3016 } else if let Some(project_id) = self.remote_id() {
3017 let rpc = self.client.clone();
3018 let version = buffer.version();
3019 cx.spawn_weak(|_, mut cx| async move {
3020 let response = rpc
3021 .request(proto::GetCodeActions {
3022 project_id,
3023 buffer_id,
3024 start: Some(language::proto::serialize_anchor(&range.start)),
3025 end: Some(language::proto::serialize_anchor(&range.end)),
3026 version: serialize_version(&version),
3027 })
3028 .await?;
3029
3030 buffer_handle
3031 .update(&mut cx, |buffer, _| {
3032 buffer.wait_for_version(deserialize_version(response.version))
3033 })
3034 .await;
3035
3036 response
3037 .actions
3038 .into_iter()
3039 .map(language::proto::deserialize_code_action)
3040 .collect()
3041 })
3042 } else {
3043 Task::ready(Ok(Default::default()))
3044 }
3045 }
3046
3047 pub fn apply_code_action(
3048 &self,
3049 buffer_handle: ModelHandle<Buffer>,
3050 mut action: CodeAction,
3051 push_to_history: bool,
3052 cx: &mut ModelContext<Self>,
3053 ) -> Task<Result<ProjectTransaction>> {
3054 if self.is_local() {
3055 let buffer = buffer_handle.read(cx);
3056 let (lsp_adapter, lang_server) =
3057 if let Some(server) = self.language_server_for_buffer(buffer, cx) {
3058 server.clone()
3059 } else {
3060 return Task::ready(Ok(Default::default()));
3061 };
3062 let range = action.range.to_point_utf16(buffer);
3063
3064 cx.spawn(|this, mut cx| async move {
3065 if let Some(lsp_range) = action
3066 .lsp_action
3067 .data
3068 .as_mut()
3069 .and_then(|d| d.get_mut("codeActionParams"))
3070 .and_then(|d| d.get_mut("range"))
3071 {
3072 *lsp_range = serde_json::to_value(&range_to_lsp(range)).unwrap();
3073 action.lsp_action = lang_server
3074 .request::<lsp::request::CodeActionResolveRequest>(action.lsp_action)
3075 .await?;
3076 } else {
3077 let actions = this
3078 .update(&mut cx, |this, cx| {
3079 this.code_actions(&buffer_handle, action.range, cx)
3080 })
3081 .await?;
3082 action.lsp_action = actions
3083 .into_iter()
3084 .find(|a| a.lsp_action.title == action.lsp_action.title)
3085 .ok_or_else(|| anyhow!("code action is outdated"))?
3086 .lsp_action;
3087 }
3088
3089 if let Some(edit) = action.lsp_action.edit {
3090 Self::deserialize_workspace_edit(
3091 this,
3092 edit,
3093 push_to_history,
3094 lsp_adapter,
3095 lang_server,
3096 &mut cx,
3097 )
3098 .await
3099 } else if let Some(command) = action.lsp_action.command {
3100 this.update(&mut cx, |this, _| {
3101 this.last_workspace_edits_by_language_server
3102 .remove(&lang_server.server_id());
3103 });
3104 lang_server
3105 .request::<lsp::request::ExecuteCommand>(lsp::ExecuteCommandParams {
3106 command: command.command,
3107 arguments: command.arguments.unwrap_or_default(),
3108 ..Default::default()
3109 })
3110 .await?;
3111 Ok(this.update(&mut cx, |this, _| {
3112 this.last_workspace_edits_by_language_server
3113 .remove(&lang_server.server_id())
3114 .unwrap_or_default()
3115 }))
3116 } else {
3117 Ok(ProjectTransaction::default())
3118 }
3119 })
3120 } else if let Some(project_id) = self.remote_id() {
3121 let client = self.client.clone();
3122 let request = proto::ApplyCodeAction {
3123 project_id,
3124 buffer_id: buffer_handle.read(cx).remote_id(),
3125 action: Some(language::proto::serialize_code_action(&action)),
3126 };
3127 cx.spawn(|this, mut cx| async move {
3128 let response = client
3129 .request(request)
3130 .await?
3131 .transaction
3132 .ok_or_else(|| anyhow!("missing transaction"))?;
3133 this.update(&mut cx, |this, cx| {
3134 this.deserialize_project_transaction(response, push_to_history, cx)
3135 })
3136 .await
3137 })
3138 } else {
3139 Task::ready(Err(anyhow!("project does not have a remote id")))
3140 }
3141 }
3142
3143 async fn deserialize_workspace_edit(
3144 this: ModelHandle<Self>,
3145 edit: lsp::WorkspaceEdit,
3146 push_to_history: bool,
3147 lsp_adapter: Arc<dyn LspAdapter>,
3148 language_server: Arc<LanguageServer>,
3149 cx: &mut AsyncAppContext,
3150 ) -> Result<ProjectTransaction> {
3151 let fs = this.read_with(cx, |this, _| this.fs.clone());
3152 let mut operations = Vec::new();
3153 if let Some(document_changes) = edit.document_changes {
3154 match document_changes {
3155 lsp::DocumentChanges::Edits(edits) => {
3156 operations.extend(edits.into_iter().map(lsp::DocumentChangeOperation::Edit))
3157 }
3158 lsp::DocumentChanges::Operations(ops) => operations = ops,
3159 }
3160 } else if let Some(changes) = edit.changes {
3161 operations.extend(changes.into_iter().map(|(uri, edits)| {
3162 lsp::DocumentChangeOperation::Edit(lsp::TextDocumentEdit {
3163 text_document: lsp::OptionalVersionedTextDocumentIdentifier {
3164 uri,
3165 version: None,
3166 },
3167 edits: edits.into_iter().map(lsp::OneOf::Left).collect(),
3168 })
3169 }));
3170 }
3171
3172 let mut project_transaction = ProjectTransaction::default();
3173 for operation in operations {
3174 match operation {
3175 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Create(op)) => {
3176 let abs_path = op
3177 .uri
3178 .to_file_path()
3179 .map_err(|_| anyhow!("can't convert URI to path"))?;
3180
3181 if let Some(parent_path) = abs_path.parent() {
3182 fs.create_dir(parent_path).await?;
3183 }
3184 if abs_path.ends_with("/") {
3185 fs.create_dir(&abs_path).await?;
3186 } else {
3187 fs.create_file(&abs_path, op.options.map(Into::into).unwrap_or_default())
3188 .await?;
3189 }
3190 }
3191 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Rename(op)) => {
3192 let source_abs_path = op
3193 .old_uri
3194 .to_file_path()
3195 .map_err(|_| anyhow!("can't convert URI to path"))?;
3196 let target_abs_path = op
3197 .new_uri
3198 .to_file_path()
3199 .map_err(|_| anyhow!("can't convert URI to path"))?;
3200 fs.rename(
3201 &source_abs_path,
3202 &target_abs_path,
3203 op.options.map(Into::into).unwrap_or_default(),
3204 )
3205 .await?;
3206 }
3207 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Delete(op)) => {
3208 let abs_path = op
3209 .uri
3210 .to_file_path()
3211 .map_err(|_| anyhow!("can't convert URI to path"))?;
3212 let options = op.options.map(Into::into).unwrap_or_default();
3213 if abs_path.ends_with("/") {
3214 fs.remove_dir(&abs_path, options).await?;
3215 } else {
3216 fs.remove_file(&abs_path, options).await?;
3217 }
3218 }
3219 lsp::DocumentChangeOperation::Edit(op) => {
3220 let buffer_to_edit = this
3221 .update(cx, |this, cx| {
3222 this.open_local_buffer_via_lsp(
3223 op.text_document.uri,
3224 lsp_adapter.clone(),
3225 language_server.clone(),
3226 cx,
3227 )
3228 })
3229 .await?;
3230
3231 let edits = this
3232 .update(cx, |this, cx| {
3233 let edits = op.edits.into_iter().map(|edit| match edit {
3234 lsp::OneOf::Left(edit) => edit,
3235 lsp::OneOf::Right(edit) => edit.text_edit,
3236 });
3237 this.edits_from_lsp(
3238 &buffer_to_edit,
3239 edits,
3240 op.text_document.version,
3241 cx,
3242 )
3243 })
3244 .await?;
3245
3246 let transaction = buffer_to_edit.update(cx, |buffer, cx| {
3247 buffer.finalize_last_transaction();
3248 buffer.start_transaction();
3249 for (range, text) in edits {
3250 buffer.edit([(range, text)], cx);
3251 }
3252 let transaction = if buffer.end_transaction(cx).is_some() {
3253 let transaction = buffer.finalize_last_transaction().unwrap().clone();
3254 if !push_to_history {
3255 buffer.forget_transaction(transaction.id);
3256 }
3257 Some(transaction)
3258 } else {
3259 None
3260 };
3261
3262 transaction
3263 });
3264 if let Some(transaction) = transaction {
3265 project_transaction.0.insert(buffer_to_edit, transaction);
3266 }
3267 }
3268 }
3269 }
3270
3271 Ok(project_transaction)
3272 }
3273
3274 pub fn prepare_rename<T: ToPointUtf16>(
3275 &self,
3276 buffer: ModelHandle<Buffer>,
3277 position: T,
3278 cx: &mut ModelContext<Self>,
3279 ) -> Task<Result<Option<Range<Anchor>>>> {
3280 let position = position.to_point_utf16(buffer.read(cx));
3281 self.request_lsp(buffer, PrepareRename { position }, cx)
3282 }
3283
3284 pub fn perform_rename<T: ToPointUtf16>(
3285 &self,
3286 buffer: ModelHandle<Buffer>,
3287 position: T,
3288 new_name: String,
3289 push_to_history: bool,
3290 cx: &mut ModelContext<Self>,
3291 ) -> Task<Result<ProjectTransaction>> {
3292 let position = position.to_point_utf16(buffer.read(cx));
3293 self.request_lsp(
3294 buffer,
3295 PerformRename {
3296 position,
3297 new_name,
3298 push_to_history,
3299 },
3300 cx,
3301 )
3302 }
3303
3304 pub fn search(
3305 &self,
3306 query: SearchQuery,
3307 cx: &mut ModelContext<Self>,
3308 ) -> Task<Result<HashMap<ModelHandle<Buffer>, Vec<Range<Anchor>>>>> {
3309 if self.is_local() {
3310 let snapshots = self
3311 .visible_worktrees(cx)
3312 .filter_map(|tree| {
3313 let tree = tree.read(cx).as_local()?;
3314 Some(tree.snapshot())
3315 })
3316 .collect::<Vec<_>>();
3317
3318 let background = cx.background().clone();
3319 let path_count: usize = snapshots.iter().map(|s| s.visible_file_count()).sum();
3320 if path_count == 0 {
3321 return Task::ready(Ok(Default::default()));
3322 }
3323 let workers = background.num_cpus().min(path_count);
3324 let (matching_paths_tx, mut matching_paths_rx) = smol::channel::bounded(1024);
3325 cx.background()
3326 .spawn({
3327 let fs = self.fs.clone();
3328 let background = cx.background().clone();
3329 let query = query.clone();
3330 async move {
3331 let fs = &fs;
3332 let query = &query;
3333 let matching_paths_tx = &matching_paths_tx;
3334 let paths_per_worker = (path_count + workers - 1) / workers;
3335 let snapshots = &snapshots;
3336 background
3337 .scoped(|scope| {
3338 for worker_ix in 0..workers {
3339 let worker_start_ix = worker_ix * paths_per_worker;
3340 let worker_end_ix = worker_start_ix + paths_per_worker;
3341 scope.spawn(async move {
3342 let mut snapshot_start_ix = 0;
3343 let mut abs_path = PathBuf::new();
3344 for snapshot in snapshots {
3345 let snapshot_end_ix =
3346 snapshot_start_ix + snapshot.visible_file_count();
3347 if worker_end_ix <= snapshot_start_ix {
3348 break;
3349 } else if worker_start_ix > snapshot_end_ix {
3350 snapshot_start_ix = snapshot_end_ix;
3351 continue;
3352 } else {
3353 let start_in_snapshot = worker_start_ix
3354 .saturating_sub(snapshot_start_ix);
3355 let end_in_snapshot =
3356 cmp::min(worker_end_ix, snapshot_end_ix)
3357 - snapshot_start_ix;
3358
3359 for entry in snapshot
3360 .files(false, start_in_snapshot)
3361 .take(end_in_snapshot - start_in_snapshot)
3362 {
3363 if matching_paths_tx.is_closed() {
3364 break;
3365 }
3366
3367 abs_path.clear();
3368 abs_path.push(&snapshot.abs_path());
3369 abs_path.push(&entry.path);
3370 let matches = if let Some(file) =
3371 fs.open_sync(&abs_path).await.log_err()
3372 {
3373 query.detect(file).unwrap_or(false)
3374 } else {
3375 false
3376 };
3377
3378 if matches {
3379 let project_path =
3380 (snapshot.id(), entry.path.clone());
3381 if matching_paths_tx
3382 .send(project_path)
3383 .await
3384 .is_err()
3385 {
3386 break;
3387 }
3388 }
3389 }
3390
3391 snapshot_start_ix = snapshot_end_ix;
3392 }
3393 }
3394 });
3395 }
3396 })
3397 .await;
3398 }
3399 })
3400 .detach();
3401
3402 let (buffers_tx, buffers_rx) = smol::channel::bounded(1024);
3403 let open_buffers = self
3404 .opened_buffers
3405 .values()
3406 .filter_map(|b| b.upgrade(cx))
3407 .collect::<HashSet<_>>();
3408 cx.spawn(|this, cx| async move {
3409 for buffer in &open_buffers {
3410 let snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
3411 buffers_tx.send((buffer.clone(), snapshot)).await?;
3412 }
3413
3414 let open_buffers = Rc::new(RefCell::new(open_buffers));
3415 while let Some(project_path) = matching_paths_rx.next().await {
3416 if buffers_tx.is_closed() {
3417 break;
3418 }
3419
3420 let this = this.clone();
3421 let open_buffers = open_buffers.clone();
3422 let buffers_tx = buffers_tx.clone();
3423 cx.spawn(|mut cx| async move {
3424 if let Some(buffer) = this
3425 .update(&mut cx, |this, cx| this.open_buffer(project_path, cx))
3426 .await
3427 .log_err()
3428 {
3429 if open_buffers.borrow_mut().insert(buffer.clone()) {
3430 let snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
3431 buffers_tx.send((buffer, snapshot)).await?;
3432 }
3433 }
3434
3435 Ok::<_, anyhow::Error>(())
3436 })
3437 .detach();
3438 }
3439
3440 Ok::<_, anyhow::Error>(())
3441 })
3442 .detach_and_log_err(cx);
3443
3444 let background = cx.background().clone();
3445 cx.background().spawn(async move {
3446 let query = &query;
3447 let mut matched_buffers = Vec::new();
3448 for _ in 0..workers {
3449 matched_buffers.push(HashMap::default());
3450 }
3451 background
3452 .scoped(|scope| {
3453 for worker_matched_buffers in matched_buffers.iter_mut() {
3454 let mut buffers_rx = buffers_rx.clone();
3455 scope.spawn(async move {
3456 while let Some((buffer, snapshot)) = buffers_rx.next().await {
3457 let buffer_matches = query
3458 .search(snapshot.as_rope())
3459 .await
3460 .iter()
3461 .map(|range| {
3462 snapshot.anchor_before(range.start)
3463 ..snapshot.anchor_after(range.end)
3464 })
3465 .collect::<Vec<_>>();
3466 if !buffer_matches.is_empty() {
3467 worker_matched_buffers
3468 .insert(buffer.clone(), buffer_matches);
3469 }
3470 }
3471 });
3472 }
3473 })
3474 .await;
3475 Ok(matched_buffers.into_iter().flatten().collect())
3476 })
3477 } else if let Some(project_id) = self.remote_id() {
3478 let request = self.client.request(query.to_proto(project_id));
3479 cx.spawn(|this, mut cx| async move {
3480 let response = request.await?;
3481 let mut result = HashMap::default();
3482 for location in response.locations {
3483 let buffer = location.buffer.ok_or_else(|| anyhow!("missing buffer"))?;
3484 let target_buffer = this
3485 .update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
3486 .await?;
3487 let start = location
3488 .start
3489 .and_then(deserialize_anchor)
3490 .ok_or_else(|| anyhow!("missing target start"))?;
3491 let end = location
3492 .end
3493 .and_then(deserialize_anchor)
3494 .ok_or_else(|| anyhow!("missing target end"))?;
3495 result
3496 .entry(target_buffer)
3497 .or_insert(Vec::new())
3498 .push(start..end)
3499 }
3500 Ok(result)
3501 })
3502 } else {
3503 Task::ready(Ok(Default::default()))
3504 }
3505 }
3506
3507 fn request_lsp<R: LspCommand>(
3508 &self,
3509 buffer_handle: ModelHandle<Buffer>,
3510 request: R,
3511 cx: &mut ModelContext<Self>,
3512 ) -> Task<Result<R::Response>>
3513 where
3514 <R::LspRequest as lsp::request::Request>::Result: Send,
3515 {
3516 let buffer = buffer_handle.read(cx);
3517 if self.is_local() {
3518 let file = File::from_dyn(buffer.file()).and_then(File::as_local);
3519 if let Some((file, (_, language_server))) =
3520 file.zip(self.language_server_for_buffer(buffer, cx).cloned())
3521 {
3522 let lsp_params = request.to_lsp(&file.abs_path(cx), cx);
3523 return cx.spawn(|this, cx| async move {
3524 if !request.check_capabilities(&language_server.capabilities()) {
3525 return Ok(Default::default());
3526 }
3527
3528 let response = language_server
3529 .request::<R::LspRequest>(lsp_params)
3530 .await
3531 .context("lsp request failed")?;
3532 request
3533 .response_from_lsp(response, this, buffer_handle, cx)
3534 .await
3535 });
3536 }
3537 } else if let Some(project_id) = self.remote_id() {
3538 let rpc = self.client.clone();
3539 let message = request.to_proto(project_id, buffer);
3540 return cx.spawn(|this, cx| async move {
3541 let response = rpc.request(message).await?;
3542 request
3543 .response_from_proto(response, this, buffer_handle, cx)
3544 .await
3545 });
3546 }
3547 Task::ready(Ok(Default::default()))
3548 }
3549
3550 pub fn find_or_create_local_worktree(
3551 &mut self,
3552 abs_path: impl AsRef<Path>,
3553 visible: bool,
3554 cx: &mut ModelContext<Self>,
3555 ) -> Task<Result<(ModelHandle<Worktree>, PathBuf)>> {
3556 let abs_path = abs_path.as_ref();
3557 if let Some((tree, relative_path)) = self.find_local_worktree(abs_path, cx) {
3558 Task::ready(Ok((tree.clone(), relative_path.into())))
3559 } else {
3560 let worktree = self.create_local_worktree(abs_path, visible, cx);
3561 cx.foreground()
3562 .spawn(async move { Ok((worktree.await?, PathBuf::new())) })
3563 }
3564 }
3565
3566 pub fn find_local_worktree(
3567 &self,
3568 abs_path: &Path,
3569 cx: &AppContext,
3570 ) -> Option<(ModelHandle<Worktree>, PathBuf)> {
3571 for tree in self.worktrees(cx) {
3572 if let Some(relative_path) = tree
3573 .read(cx)
3574 .as_local()
3575 .and_then(|t| abs_path.strip_prefix(t.abs_path()).ok())
3576 {
3577 return Some((tree.clone(), relative_path.into()));
3578 }
3579 }
3580 None
3581 }
3582
3583 pub fn is_shared(&self) -> bool {
3584 match &self.client_state {
3585 ProjectClientState::Local { is_shared, .. } => *is_shared,
3586 ProjectClientState::Remote { .. } => false,
3587 }
3588 }
3589
3590 fn create_local_worktree(
3591 &mut self,
3592 abs_path: impl AsRef<Path>,
3593 visible: bool,
3594 cx: &mut ModelContext<Self>,
3595 ) -> Task<Result<ModelHandle<Worktree>>> {
3596 let fs = self.fs.clone();
3597 let client = self.client.clone();
3598 let next_entry_id = self.next_entry_id.clone();
3599 let path: Arc<Path> = abs_path.as_ref().into();
3600 let task = self
3601 .loading_local_worktrees
3602 .entry(path.clone())
3603 .or_insert_with(|| {
3604 cx.spawn(|project, mut cx| {
3605 async move {
3606 let worktree = Worktree::local(
3607 client.clone(),
3608 path.clone(),
3609 visible,
3610 fs,
3611 next_entry_id,
3612 &mut cx,
3613 )
3614 .await;
3615 project.update(&mut cx, |project, _| {
3616 project.loading_local_worktrees.remove(&path);
3617 });
3618 let worktree = worktree?;
3619
3620 let remote_project_id = project.update(&mut cx, |project, cx| {
3621 project.add_worktree(&worktree, cx);
3622 project.remote_id()
3623 });
3624
3625 if let Some(project_id) = remote_project_id {
3626 // Because sharing is async, we may have *unshared* the project by the time it completes,
3627 // in which case we need to register the worktree instead.
3628 loop {
3629 if project.read_with(&cx, |project, _| project.is_shared()) {
3630 if worktree
3631 .update(&mut cx, |worktree, cx| {
3632 worktree.as_local_mut().unwrap().share(project_id, cx)
3633 })
3634 .await
3635 .is_ok()
3636 {
3637 break;
3638 }
3639 } else {
3640 worktree
3641 .update(&mut cx, |worktree, cx| {
3642 worktree
3643 .as_local_mut()
3644 .unwrap()
3645 .register(project_id, cx)
3646 })
3647 .await?;
3648 break;
3649 }
3650 }
3651 }
3652
3653 Ok(worktree)
3654 }
3655 .map_err(|err| Arc::new(err))
3656 })
3657 .shared()
3658 })
3659 .clone();
3660 cx.foreground().spawn(async move {
3661 match task.await {
3662 Ok(worktree) => Ok(worktree),
3663 Err(err) => Err(anyhow!("{}", err)),
3664 }
3665 })
3666 }
3667
3668 pub fn remove_worktree(&mut self, id_to_remove: WorktreeId, cx: &mut ModelContext<Self>) {
3669 self.worktrees.retain(|worktree| {
3670 if let Some(worktree) = worktree.upgrade(cx) {
3671 let id = worktree.read(cx).id();
3672 if id == id_to_remove {
3673 cx.emit(Event::WorktreeRemoved(id));
3674 false
3675 } else {
3676 true
3677 }
3678 } else {
3679 false
3680 }
3681 });
3682 cx.notify();
3683 }
3684
3685 fn add_worktree(&mut self, worktree: &ModelHandle<Worktree>, cx: &mut ModelContext<Self>) {
3686 cx.observe(&worktree, |_, _, cx| cx.notify()).detach();
3687 if worktree.read(cx).is_local() {
3688 cx.subscribe(&worktree, |this, worktree, _, cx| {
3689 this.update_local_worktree_buffers(worktree, cx);
3690 })
3691 .detach();
3692 }
3693
3694 let push_strong_handle = {
3695 let worktree = worktree.read(cx);
3696 self.is_shared() || worktree.is_visible() || worktree.is_remote()
3697 };
3698 if push_strong_handle {
3699 self.worktrees
3700 .push(WorktreeHandle::Strong(worktree.clone()));
3701 } else {
3702 cx.observe_release(&worktree, |this, _, cx| {
3703 this.worktrees
3704 .retain(|worktree| worktree.upgrade(cx).is_some());
3705 cx.notify();
3706 })
3707 .detach();
3708 self.worktrees
3709 .push(WorktreeHandle::Weak(worktree.downgrade()));
3710 }
3711 cx.emit(Event::WorktreeAdded);
3712 cx.notify();
3713 }
3714
3715 fn update_local_worktree_buffers(
3716 &mut self,
3717 worktree_handle: ModelHandle<Worktree>,
3718 cx: &mut ModelContext<Self>,
3719 ) {
3720 let snapshot = worktree_handle.read(cx).snapshot();
3721 let mut buffers_to_delete = Vec::new();
3722 let mut renamed_buffers = Vec::new();
3723 for (buffer_id, buffer) in &self.opened_buffers {
3724 if let Some(buffer) = buffer.upgrade(cx) {
3725 buffer.update(cx, |buffer, cx| {
3726 if let Some(old_file) = File::from_dyn(buffer.file()) {
3727 if old_file.worktree != worktree_handle {
3728 return;
3729 }
3730
3731 let new_file = if let Some(entry) = old_file
3732 .entry_id
3733 .and_then(|entry_id| snapshot.entry_for_id(entry_id))
3734 {
3735 File {
3736 is_local: true,
3737 entry_id: Some(entry.id),
3738 mtime: entry.mtime,
3739 path: entry.path.clone(),
3740 worktree: worktree_handle.clone(),
3741 }
3742 } else if let Some(entry) =
3743 snapshot.entry_for_path(old_file.path().as_ref())
3744 {
3745 File {
3746 is_local: true,
3747 entry_id: Some(entry.id),
3748 mtime: entry.mtime,
3749 path: entry.path.clone(),
3750 worktree: worktree_handle.clone(),
3751 }
3752 } else {
3753 File {
3754 is_local: true,
3755 entry_id: None,
3756 path: old_file.path().clone(),
3757 mtime: old_file.mtime(),
3758 worktree: worktree_handle.clone(),
3759 }
3760 };
3761
3762 let old_path = old_file.abs_path(cx);
3763 if new_file.abs_path(cx) != old_path {
3764 renamed_buffers.push((cx.handle(), old_path));
3765 }
3766
3767 if let Some(project_id) = self.shared_remote_id() {
3768 self.client
3769 .send(proto::UpdateBufferFile {
3770 project_id,
3771 buffer_id: *buffer_id as u64,
3772 file: Some(new_file.to_proto()),
3773 })
3774 .log_err();
3775 }
3776 buffer.file_updated(Box::new(new_file), cx).detach();
3777 }
3778 });
3779 } else {
3780 buffers_to_delete.push(*buffer_id);
3781 }
3782 }
3783
3784 for buffer_id in buffers_to_delete {
3785 self.opened_buffers.remove(&buffer_id);
3786 }
3787
3788 for (buffer, old_path) in renamed_buffers {
3789 self.unregister_buffer_from_language_server(&buffer, old_path, cx);
3790 self.assign_language_to_buffer(&buffer, cx);
3791 self.register_buffer_with_language_server(&buffer, cx);
3792 }
3793 }
3794
3795 pub fn set_active_path(&mut self, entry: Option<ProjectPath>, cx: &mut ModelContext<Self>) {
3796 let new_active_entry = entry.and_then(|project_path| {
3797 let worktree = self.worktree_for_id(project_path.worktree_id, cx)?;
3798 let entry = worktree.read(cx).entry_for_path(project_path.path)?;
3799 Some(entry.id)
3800 });
3801 if new_active_entry != self.active_entry {
3802 self.active_entry = new_active_entry;
3803 cx.emit(Event::ActiveEntryChanged(new_active_entry));
3804 }
3805 }
3806
3807 pub fn is_running_disk_based_diagnostics(&self) -> bool {
3808 self.language_server_statuses
3809 .values()
3810 .any(|status| status.pending_diagnostic_updates > 0)
3811 }
3812
3813 pub fn diagnostic_summary(&self, cx: &AppContext) -> DiagnosticSummary {
3814 let mut summary = DiagnosticSummary::default();
3815 for (_, path_summary) in self.diagnostic_summaries(cx) {
3816 summary.error_count += path_summary.error_count;
3817 summary.warning_count += path_summary.warning_count;
3818 }
3819 summary
3820 }
3821
3822 pub fn diagnostic_summaries<'a>(
3823 &'a self,
3824 cx: &'a AppContext,
3825 ) -> impl Iterator<Item = (ProjectPath, DiagnosticSummary)> + 'a {
3826 self.worktrees(cx).flat_map(move |worktree| {
3827 let worktree = worktree.read(cx);
3828 let worktree_id = worktree.id();
3829 worktree
3830 .diagnostic_summaries()
3831 .map(move |(path, summary)| (ProjectPath { worktree_id, path }, summary))
3832 })
3833 }
3834
3835 pub fn disk_based_diagnostics_started(&mut self, cx: &mut ModelContext<Self>) {
3836 if self
3837 .language_server_statuses
3838 .values()
3839 .map(|status| status.pending_diagnostic_updates)
3840 .sum::<isize>()
3841 == 1
3842 {
3843 cx.emit(Event::DiskBasedDiagnosticsStarted);
3844 }
3845 }
3846
3847 pub fn disk_based_diagnostics_finished(&mut self, cx: &mut ModelContext<Self>) {
3848 cx.emit(Event::DiskBasedDiagnosticsUpdated);
3849 if self
3850 .language_server_statuses
3851 .values()
3852 .map(|status| status.pending_diagnostic_updates)
3853 .sum::<isize>()
3854 == 0
3855 {
3856 cx.emit(Event::DiskBasedDiagnosticsFinished);
3857 }
3858 }
3859
3860 pub fn active_entry(&self) -> Option<ProjectEntryId> {
3861 self.active_entry
3862 }
3863
3864 pub fn entry_for_path(&self, path: &ProjectPath, cx: &AppContext) -> Option<ProjectEntryId> {
3865 self.worktree_for_id(path.worktree_id, cx)?
3866 .read(cx)
3867 .entry_for_path(&path.path)
3868 .map(|entry| entry.id)
3869 }
3870
3871 pub fn path_for_entry(&self, entry_id: ProjectEntryId, cx: &AppContext) -> Option<ProjectPath> {
3872 let worktree = self.worktree_for_entry(entry_id, cx)?;
3873 let worktree = worktree.read(cx);
3874 let worktree_id = worktree.id();
3875 let path = worktree.entry_for_id(entry_id)?.path.clone();
3876 Some(ProjectPath { worktree_id, path })
3877 }
3878
3879 // RPC message handlers
3880
3881 async fn handle_request_join_project(
3882 this: ModelHandle<Self>,
3883 message: TypedEnvelope<proto::RequestJoinProject>,
3884 _: Arc<Client>,
3885 mut cx: AsyncAppContext,
3886 ) -> Result<()> {
3887 let user_id = message.payload.requester_id;
3888 if this.read_with(&cx, |project, _| {
3889 project.collaborators.values().any(|c| c.user.id == user_id)
3890 }) {
3891 this.update(&mut cx, |this, cx| {
3892 this.respond_to_join_request(user_id, true, cx)
3893 });
3894 } else {
3895 let user_store = this.read_with(&cx, |this, _| this.user_store.clone());
3896 let user = user_store
3897 .update(&mut cx, |store, cx| store.fetch_user(user_id, cx))
3898 .await?;
3899 this.update(&mut cx, |_, cx| cx.emit(Event::ContactRequestedJoin(user)));
3900 }
3901 Ok(())
3902 }
3903
3904 async fn handle_unregister_project(
3905 this: ModelHandle<Self>,
3906 _: TypedEnvelope<proto::UnregisterProject>,
3907 _: Arc<Client>,
3908 mut cx: AsyncAppContext,
3909 ) -> Result<()> {
3910 this.update(&mut cx, |this, cx| this.removed_from_project(cx));
3911 Ok(())
3912 }
3913
3914 async fn handle_project_unshared(
3915 this: ModelHandle<Self>,
3916 _: TypedEnvelope<proto::ProjectUnshared>,
3917 _: Arc<Client>,
3918 mut cx: AsyncAppContext,
3919 ) -> Result<()> {
3920 this.update(&mut cx, |this, cx| this.unshared(cx));
3921 Ok(())
3922 }
3923
3924 async fn handle_add_collaborator(
3925 this: ModelHandle<Self>,
3926 mut envelope: TypedEnvelope<proto::AddProjectCollaborator>,
3927 _: Arc<Client>,
3928 mut cx: AsyncAppContext,
3929 ) -> Result<()> {
3930 let user_store = this.read_with(&cx, |this, _| this.user_store.clone());
3931 let collaborator = envelope
3932 .payload
3933 .collaborator
3934 .take()
3935 .ok_or_else(|| anyhow!("empty collaborator"))?;
3936
3937 let collaborator = Collaborator::from_proto(collaborator, &user_store, &mut cx).await?;
3938 this.update(&mut cx, |this, cx| {
3939 this.collaborators
3940 .insert(collaborator.peer_id, collaborator);
3941 cx.notify();
3942 });
3943
3944 Ok(())
3945 }
3946
3947 async fn handle_remove_collaborator(
3948 this: ModelHandle<Self>,
3949 envelope: TypedEnvelope<proto::RemoveProjectCollaborator>,
3950 _: Arc<Client>,
3951 mut cx: AsyncAppContext,
3952 ) -> Result<()> {
3953 this.update(&mut cx, |this, cx| {
3954 let peer_id = PeerId(envelope.payload.peer_id);
3955 let replica_id = this
3956 .collaborators
3957 .remove(&peer_id)
3958 .ok_or_else(|| anyhow!("unknown peer {:?}", peer_id))?
3959 .replica_id;
3960 for (_, buffer) in &this.opened_buffers {
3961 if let Some(buffer) = buffer.upgrade(cx) {
3962 buffer.update(cx, |buffer, cx| buffer.remove_peer(replica_id, cx));
3963 }
3964 }
3965
3966 cx.emit(Event::CollaboratorLeft(peer_id));
3967 cx.notify();
3968 Ok(())
3969 })
3970 }
3971
3972 async fn handle_join_project_request_cancelled(
3973 this: ModelHandle<Self>,
3974 envelope: TypedEnvelope<proto::JoinProjectRequestCancelled>,
3975 _: Arc<Client>,
3976 mut cx: AsyncAppContext,
3977 ) -> Result<()> {
3978 let user = this
3979 .update(&mut cx, |this, cx| {
3980 this.user_store.update(cx, |user_store, cx| {
3981 user_store.fetch_user(envelope.payload.requester_id, cx)
3982 })
3983 })
3984 .await?;
3985
3986 this.update(&mut cx, |_, cx| {
3987 cx.emit(Event::ContactCancelledJoinRequest(user));
3988 });
3989
3990 Ok(())
3991 }
3992
3993 async fn handle_register_worktree(
3994 this: ModelHandle<Self>,
3995 envelope: TypedEnvelope<proto::RegisterWorktree>,
3996 client: Arc<Client>,
3997 mut cx: AsyncAppContext,
3998 ) -> Result<()> {
3999 this.update(&mut cx, |this, cx| {
4000 let remote_id = this.remote_id().ok_or_else(|| anyhow!("invalid project"))?;
4001 let replica_id = this.replica_id();
4002 let worktree = proto::Worktree {
4003 id: envelope.payload.worktree_id,
4004 root_name: envelope.payload.root_name,
4005 entries: Default::default(),
4006 diagnostic_summaries: Default::default(),
4007 visible: envelope.payload.visible,
4008 scan_id: 0,
4009 };
4010 let (worktree, load_task) =
4011 Worktree::remote(remote_id, replica_id, worktree, client, cx);
4012 this.add_worktree(&worktree, cx);
4013 load_task.detach();
4014 Ok(())
4015 })
4016 }
4017
4018 async fn handle_unregister_worktree(
4019 this: ModelHandle<Self>,
4020 envelope: TypedEnvelope<proto::UnregisterWorktree>,
4021 _: Arc<Client>,
4022 mut cx: AsyncAppContext,
4023 ) -> Result<()> {
4024 this.update(&mut cx, |this, cx| {
4025 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
4026 this.remove_worktree(worktree_id, cx);
4027 Ok(())
4028 })
4029 }
4030
4031 async fn handle_update_worktree(
4032 this: ModelHandle<Self>,
4033 envelope: TypedEnvelope<proto::UpdateWorktree>,
4034 _: Arc<Client>,
4035 mut cx: AsyncAppContext,
4036 ) -> Result<()> {
4037 this.update(&mut cx, |this, cx| {
4038 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
4039 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
4040 worktree.update(cx, |worktree, _| {
4041 let worktree = worktree.as_remote_mut().unwrap();
4042 worktree.update_from_remote(envelope)
4043 })?;
4044 }
4045 Ok(())
4046 })
4047 }
4048
4049 async fn handle_create_project_entry(
4050 this: ModelHandle<Self>,
4051 envelope: TypedEnvelope<proto::CreateProjectEntry>,
4052 _: Arc<Client>,
4053 mut cx: AsyncAppContext,
4054 ) -> Result<proto::ProjectEntryResponse> {
4055 let worktree = this.update(&mut cx, |this, cx| {
4056 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
4057 this.worktree_for_id(worktree_id, cx)
4058 .ok_or_else(|| anyhow!("worktree not found"))
4059 })?;
4060 let worktree_scan_id = worktree.read_with(&cx, |worktree, _| worktree.scan_id());
4061 let entry = worktree
4062 .update(&mut cx, |worktree, cx| {
4063 let worktree = worktree.as_local_mut().unwrap();
4064 let path = PathBuf::from(OsString::from_vec(envelope.payload.path));
4065 worktree.create_entry(path, envelope.payload.is_directory, cx)
4066 })
4067 .await?;
4068 Ok(proto::ProjectEntryResponse {
4069 entry: Some((&entry).into()),
4070 worktree_scan_id: worktree_scan_id as u64,
4071 })
4072 }
4073
4074 async fn handle_rename_project_entry(
4075 this: ModelHandle<Self>,
4076 envelope: TypedEnvelope<proto::RenameProjectEntry>,
4077 _: Arc<Client>,
4078 mut cx: AsyncAppContext,
4079 ) -> Result<proto::ProjectEntryResponse> {
4080 let entry_id = ProjectEntryId::from_proto(envelope.payload.entry_id);
4081 let worktree = this.read_with(&cx, |this, cx| {
4082 this.worktree_for_entry(entry_id, cx)
4083 .ok_or_else(|| anyhow!("worktree not found"))
4084 })?;
4085 let worktree_scan_id = worktree.read_with(&cx, |worktree, _| worktree.scan_id());
4086 let entry = worktree
4087 .update(&mut cx, |worktree, cx| {
4088 let new_path = PathBuf::from(OsString::from_vec(envelope.payload.new_path));
4089 worktree
4090 .as_local_mut()
4091 .unwrap()
4092 .rename_entry(entry_id, new_path, cx)
4093 .ok_or_else(|| anyhow!("invalid entry"))
4094 })?
4095 .await?;
4096 Ok(proto::ProjectEntryResponse {
4097 entry: Some((&entry).into()),
4098 worktree_scan_id: worktree_scan_id as u64,
4099 })
4100 }
4101
4102 async fn handle_copy_project_entry(
4103 this: ModelHandle<Self>,
4104 envelope: TypedEnvelope<proto::CopyProjectEntry>,
4105 _: Arc<Client>,
4106 mut cx: AsyncAppContext,
4107 ) -> Result<proto::ProjectEntryResponse> {
4108 let entry_id = ProjectEntryId::from_proto(envelope.payload.entry_id);
4109 let worktree = this.read_with(&cx, |this, cx| {
4110 this.worktree_for_entry(entry_id, cx)
4111 .ok_or_else(|| anyhow!("worktree not found"))
4112 })?;
4113 let worktree_scan_id = worktree.read_with(&cx, |worktree, _| worktree.scan_id());
4114 let entry = worktree
4115 .update(&mut cx, |worktree, cx| {
4116 let new_path = PathBuf::from(OsString::from_vec(envelope.payload.new_path));
4117 worktree
4118 .as_local_mut()
4119 .unwrap()
4120 .copy_entry(entry_id, new_path, cx)
4121 .ok_or_else(|| anyhow!("invalid entry"))
4122 })?
4123 .await?;
4124 Ok(proto::ProjectEntryResponse {
4125 entry: Some((&entry).into()),
4126 worktree_scan_id: worktree_scan_id as u64,
4127 })
4128 }
4129
4130 async fn handle_delete_project_entry(
4131 this: ModelHandle<Self>,
4132 envelope: TypedEnvelope<proto::DeleteProjectEntry>,
4133 _: Arc<Client>,
4134 mut cx: AsyncAppContext,
4135 ) -> Result<proto::ProjectEntryResponse> {
4136 let entry_id = ProjectEntryId::from_proto(envelope.payload.entry_id);
4137 let worktree = this.read_with(&cx, |this, cx| {
4138 this.worktree_for_entry(entry_id, cx)
4139 .ok_or_else(|| anyhow!("worktree not found"))
4140 })?;
4141 let worktree_scan_id = worktree.read_with(&cx, |worktree, _| worktree.scan_id());
4142 worktree
4143 .update(&mut cx, |worktree, cx| {
4144 worktree
4145 .as_local_mut()
4146 .unwrap()
4147 .delete_entry(entry_id, cx)
4148 .ok_or_else(|| anyhow!("invalid entry"))
4149 })?
4150 .await?;
4151 Ok(proto::ProjectEntryResponse {
4152 entry: None,
4153 worktree_scan_id: worktree_scan_id as u64,
4154 })
4155 }
4156
4157 async fn handle_update_diagnostic_summary(
4158 this: ModelHandle<Self>,
4159 envelope: TypedEnvelope<proto::UpdateDiagnosticSummary>,
4160 _: Arc<Client>,
4161 mut cx: AsyncAppContext,
4162 ) -> Result<()> {
4163 this.update(&mut cx, |this, cx| {
4164 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
4165 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
4166 if let Some(summary) = envelope.payload.summary {
4167 let project_path = ProjectPath {
4168 worktree_id,
4169 path: Path::new(&summary.path).into(),
4170 };
4171 worktree.update(cx, |worktree, _| {
4172 worktree
4173 .as_remote_mut()
4174 .unwrap()
4175 .update_diagnostic_summary(project_path.path.clone(), &summary);
4176 });
4177 cx.emit(Event::DiagnosticsUpdated(project_path));
4178 }
4179 }
4180 Ok(())
4181 })
4182 }
4183
4184 async fn handle_start_language_server(
4185 this: ModelHandle<Self>,
4186 envelope: TypedEnvelope<proto::StartLanguageServer>,
4187 _: Arc<Client>,
4188 mut cx: AsyncAppContext,
4189 ) -> Result<()> {
4190 let server = envelope
4191 .payload
4192 .server
4193 .ok_or_else(|| anyhow!("invalid server"))?;
4194 this.update(&mut cx, |this, cx| {
4195 this.language_server_statuses.insert(
4196 server.id as usize,
4197 LanguageServerStatus {
4198 name: server.name,
4199 pending_work: Default::default(),
4200 pending_diagnostic_updates: 0,
4201 },
4202 );
4203 cx.notify();
4204 });
4205 Ok(())
4206 }
4207
4208 async fn handle_update_language_server(
4209 this: ModelHandle<Self>,
4210 envelope: TypedEnvelope<proto::UpdateLanguageServer>,
4211 _: Arc<Client>,
4212 mut cx: AsyncAppContext,
4213 ) -> Result<()> {
4214 let language_server_id = envelope.payload.language_server_id as usize;
4215 match envelope
4216 .payload
4217 .variant
4218 .ok_or_else(|| anyhow!("invalid variant"))?
4219 {
4220 proto::update_language_server::Variant::WorkStart(payload) => {
4221 this.update(&mut cx, |this, cx| {
4222 this.on_lsp_work_start(language_server_id, payload.token, cx);
4223 })
4224 }
4225 proto::update_language_server::Variant::WorkProgress(payload) => {
4226 this.update(&mut cx, |this, cx| {
4227 this.on_lsp_work_progress(
4228 language_server_id,
4229 payload.token,
4230 LanguageServerProgress {
4231 message: payload.message,
4232 percentage: payload.percentage.map(|p| p as usize),
4233 last_update_at: Instant::now(),
4234 },
4235 cx,
4236 );
4237 })
4238 }
4239 proto::update_language_server::Variant::WorkEnd(payload) => {
4240 this.update(&mut cx, |this, cx| {
4241 this.on_lsp_work_end(language_server_id, payload.token, cx);
4242 })
4243 }
4244 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(_) => {
4245 this.update(&mut cx, |this, cx| {
4246 this.disk_based_diagnostics_started(cx);
4247 })
4248 }
4249 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(_) => {
4250 this.update(&mut cx, |this, cx| this.disk_based_diagnostics_finished(cx));
4251 }
4252 }
4253
4254 Ok(())
4255 }
4256
4257 async fn handle_update_buffer(
4258 this: ModelHandle<Self>,
4259 envelope: TypedEnvelope<proto::UpdateBuffer>,
4260 _: Arc<Client>,
4261 mut cx: AsyncAppContext,
4262 ) -> Result<()> {
4263 this.update(&mut cx, |this, cx| {
4264 let payload = envelope.payload.clone();
4265 let buffer_id = payload.buffer_id;
4266 let ops = payload
4267 .operations
4268 .into_iter()
4269 .map(|op| language::proto::deserialize_operation(op))
4270 .collect::<Result<Vec<_>, _>>()?;
4271 let is_remote = this.is_remote();
4272 match this.opened_buffers.entry(buffer_id) {
4273 hash_map::Entry::Occupied(mut e) => match e.get_mut() {
4274 OpenBuffer::Strong(buffer) => {
4275 buffer.update(cx, |buffer, cx| buffer.apply_ops(ops, cx))?;
4276 }
4277 OpenBuffer::Loading(operations) => operations.extend_from_slice(&ops),
4278 OpenBuffer::Weak(_) => {}
4279 },
4280 hash_map::Entry::Vacant(e) => {
4281 assert!(
4282 is_remote,
4283 "received buffer update from {:?}",
4284 envelope.original_sender_id
4285 );
4286 e.insert(OpenBuffer::Loading(ops));
4287 }
4288 }
4289 Ok(())
4290 })
4291 }
4292
4293 async fn handle_update_buffer_file(
4294 this: ModelHandle<Self>,
4295 envelope: TypedEnvelope<proto::UpdateBufferFile>,
4296 _: Arc<Client>,
4297 mut cx: AsyncAppContext,
4298 ) -> Result<()> {
4299 this.update(&mut cx, |this, cx| {
4300 let payload = envelope.payload.clone();
4301 let buffer_id = payload.buffer_id;
4302 let file = payload.file.ok_or_else(|| anyhow!("invalid file"))?;
4303 let worktree = this
4304 .worktree_for_id(WorktreeId::from_proto(file.worktree_id), cx)
4305 .ok_or_else(|| anyhow!("no such worktree"))?;
4306 let file = File::from_proto(file, worktree.clone(), cx)?;
4307 let buffer = this
4308 .opened_buffers
4309 .get_mut(&buffer_id)
4310 .and_then(|b| b.upgrade(cx))
4311 .ok_or_else(|| anyhow!("no such buffer"))?;
4312 buffer.update(cx, |buffer, cx| {
4313 buffer.file_updated(Box::new(file), cx).detach();
4314 });
4315 Ok(())
4316 })
4317 }
4318
4319 async fn handle_save_buffer(
4320 this: ModelHandle<Self>,
4321 envelope: TypedEnvelope<proto::SaveBuffer>,
4322 _: Arc<Client>,
4323 mut cx: AsyncAppContext,
4324 ) -> Result<proto::BufferSaved> {
4325 let buffer_id = envelope.payload.buffer_id;
4326 let requested_version = deserialize_version(envelope.payload.version);
4327
4328 let (project_id, buffer) = this.update(&mut cx, |this, cx| {
4329 let project_id = this.remote_id().ok_or_else(|| anyhow!("not connected"))?;
4330 let buffer = this
4331 .opened_buffers
4332 .get(&buffer_id)
4333 .and_then(|buffer| buffer.upgrade(cx))
4334 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?;
4335 Ok::<_, anyhow::Error>((project_id, buffer))
4336 })?;
4337 buffer
4338 .update(&mut cx, |buffer, _| {
4339 buffer.wait_for_version(requested_version)
4340 })
4341 .await;
4342
4343 let (saved_version, mtime) = buffer.update(&mut cx, |buffer, cx| buffer.save(cx)).await?;
4344 Ok(proto::BufferSaved {
4345 project_id,
4346 buffer_id,
4347 version: serialize_version(&saved_version),
4348 mtime: Some(mtime.into()),
4349 })
4350 }
4351
4352 async fn handle_reload_buffers(
4353 this: ModelHandle<Self>,
4354 envelope: TypedEnvelope<proto::ReloadBuffers>,
4355 _: Arc<Client>,
4356 mut cx: AsyncAppContext,
4357 ) -> Result<proto::ReloadBuffersResponse> {
4358 let sender_id = envelope.original_sender_id()?;
4359 let reload = this.update(&mut cx, |this, cx| {
4360 let mut buffers = HashSet::default();
4361 for buffer_id in &envelope.payload.buffer_ids {
4362 buffers.insert(
4363 this.opened_buffers
4364 .get(buffer_id)
4365 .and_then(|buffer| buffer.upgrade(cx))
4366 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?,
4367 );
4368 }
4369 Ok::<_, anyhow::Error>(this.reload_buffers(buffers, false, cx))
4370 })?;
4371
4372 let project_transaction = reload.await?;
4373 let project_transaction = this.update(&mut cx, |this, cx| {
4374 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
4375 });
4376 Ok(proto::ReloadBuffersResponse {
4377 transaction: Some(project_transaction),
4378 })
4379 }
4380
4381 async fn handle_format_buffers(
4382 this: ModelHandle<Self>,
4383 envelope: TypedEnvelope<proto::FormatBuffers>,
4384 _: Arc<Client>,
4385 mut cx: AsyncAppContext,
4386 ) -> Result<proto::FormatBuffersResponse> {
4387 let sender_id = envelope.original_sender_id()?;
4388 let format = this.update(&mut cx, |this, cx| {
4389 let mut buffers = HashSet::default();
4390 for buffer_id in &envelope.payload.buffer_ids {
4391 buffers.insert(
4392 this.opened_buffers
4393 .get(buffer_id)
4394 .and_then(|buffer| buffer.upgrade(cx))
4395 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?,
4396 );
4397 }
4398 Ok::<_, anyhow::Error>(this.format(buffers, false, cx))
4399 })?;
4400
4401 let project_transaction = format.await?;
4402 let project_transaction = this.update(&mut cx, |this, cx| {
4403 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
4404 });
4405 Ok(proto::FormatBuffersResponse {
4406 transaction: Some(project_transaction),
4407 })
4408 }
4409
4410 async fn handle_get_completions(
4411 this: ModelHandle<Self>,
4412 envelope: TypedEnvelope<proto::GetCompletions>,
4413 _: Arc<Client>,
4414 mut cx: AsyncAppContext,
4415 ) -> Result<proto::GetCompletionsResponse> {
4416 let position = envelope
4417 .payload
4418 .position
4419 .and_then(language::proto::deserialize_anchor)
4420 .ok_or_else(|| anyhow!("invalid position"))?;
4421 let version = deserialize_version(envelope.payload.version);
4422 let buffer = this.read_with(&cx, |this, cx| {
4423 this.opened_buffers
4424 .get(&envelope.payload.buffer_id)
4425 .and_then(|buffer| buffer.upgrade(cx))
4426 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
4427 })?;
4428 buffer
4429 .update(&mut cx, |buffer, _| buffer.wait_for_version(version))
4430 .await;
4431 let version = buffer.read_with(&cx, |buffer, _| buffer.version());
4432 let completions = this
4433 .update(&mut cx, |this, cx| this.completions(&buffer, position, cx))
4434 .await?;
4435
4436 Ok(proto::GetCompletionsResponse {
4437 completions: completions
4438 .iter()
4439 .map(language::proto::serialize_completion)
4440 .collect(),
4441 version: serialize_version(&version),
4442 })
4443 }
4444
4445 async fn handle_apply_additional_edits_for_completion(
4446 this: ModelHandle<Self>,
4447 envelope: TypedEnvelope<proto::ApplyCompletionAdditionalEdits>,
4448 _: Arc<Client>,
4449 mut cx: AsyncAppContext,
4450 ) -> Result<proto::ApplyCompletionAdditionalEditsResponse> {
4451 let apply_additional_edits = this.update(&mut cx, |this, cx| {
4452 let buffer = this
4453 .opened_buffers
4454 .get(&envelope.payload.buffer_id)
4455 .and_then(|buffer| buffer.upgrade(cx))
4456 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
4457 let language = buffer.read(cx).language();
4458 let completion = language::proto::deserialize_completion(
4459 envelope
4460 .payload
4461 .completion
4462 .ok_or_else(|| anyhow!("invalid completion"))?,
4463 language,
4464 )?;
4465 Ok::<_, anyhow::Error>(
4466 this.apply_additional_edits_for_completion(buffer, completion, false, cx),
4467 )
4468 })?;
4469
4470 Ok(proto::ApplyCompletionAdditionalEditsResponse {
4471 transaction: apply_additional_edits
4472 .await?
4473 .as_ref()
4474 .map(language::proto::serialize_transaction),
4475 })
4476 }
4477
4478 async fn handle_get_code_actions(
4479 this: ModelHandle<Self>,
4480 envelope: TypedEnvelope<proto::GetCodeActions>,
4481 _: Arc<Client>,
4482 mut cx: AsyncAppContext,
4483 ) -> Result<proto::GetCodeActionsResponse> {
4484 let start = envelope
4485 .payload
4486 .start
4487 .and_then(language::proto::deserialize_anchor)
4488 .ok_or_else(|| anyhow!("invalid start"))?;
4489 let end = envelope
4490 .payload
4491 .end
4492 .and_then(language::proto::deserialize_anchor)
4493 .ok_or_else(|| anyhow!("invalid end"))?;
4494 let buffer = this.update(&mut cx, |this, cx| {
4495 this.opened_buffers
4496 .get(&envelope.payload.buffer_id)
4497 .and_then(|buffer| buffer.upgrade(cx))
4498 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
4499 })?;
4500 buffer
4501 .update(&mut cx, |buffer, _| {
4502 buffer.wait_for_version(deserialize_version(envelope.payload.version))
4503 })
4504 .await;
4505
4506 let version = buffer.read_with(&cx, |buffer, _| buffer.version());
4507 let code_actions = this.update(&mut cx, |this, cx| {
4508 Ok::<_, anyhow::Error>(this.code_actions(&buffer, start..end, cx))
4509 })?;
4510
4511 Ok(proto::GetCodeActionsResponse {
4512 actions: code_actions
4513 .await?
4514 .iter()
4515 .map(language::proto::serialize_code_action)
4516 .collect(),
4517 version: serialize_version(&version),
4518 })
4519 }
4520
4521 async fn handle_apply_code_action(
4522 this: ModelHandle<Self>,
4523 envelope: TypedEnvelope<proto::ApplyCodeAction>,
4524 _: Arc<Client>,
4525 mut cx: AsyncAppContext,
4526 ) -> Result<proto::ApplyCodeActionResponse> {
4527 let sender_id = envelope.original_sender_id()?;
4528 let action = language::proto::deserialize_code_action(
4529 envelope
4530 .payload
4531 .action
4532 .ok_or_else(|| anyhow!("invalid action"))?,
4533 )?;
4534 let apply_code_action = this.update(&mut cx, |this, cx| {
4535 let buffer = this
4536 .opened_buffers
4537 .get(&envelope.payload.buffer_id)
4538 .and_then(|buffer| buffer.upgrade(cx))
4539 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
4540 Ok::<_, anyhow::Error>(this.apply_code_action(buffer, action, false, cx))
4541 })?;
4542
4543 let project_transaction = apply_code_action.await?;
4544 let project_transaction = this.update(&mut cx, |this, cx| {
4545 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
4546 });
4547 Ok(proto::ApplyCodeActionResponse {
4548 transaction: Some(project_transaction),
4549 })
4550 }
4551
4552 async fn handle_lsp_command<T: LspCommand>(
4553 this: ModelHandle<Self>,
4554 envelope: TypedEnvelope<T::ProtoRequest>,
4555 _: Arc<Client>,
4556 mut cx: AsyncAppContext,
4557 ) -> Result<<T::ProtoRequest as proto::RequestMessage>::Response>
4558 where
4559 <T::LspRequest as lsp::request::Request>::Result: Send,
4560 {
4561 let sender_id = envelope.original_sender_id()?;
4562 let buffer_id = T::buffer_id_from_proto(&envelope.payload);
4563 let buffer_handle = this.read_with(&cx, |this, _| {
4564 this.opened_buffers
4565 .get(&buffer_id)
4566 .and_then(|buffer| buffer.upgrade(&cx))
4567 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))
4568 })?;
4569 let request = T::from_proto(
4570 envelope.payload,
4571 this.clone(),
4572 buffer_handle.clone(),
4573 cx.clone(),
4574 )
4575 .await?;
4576 let buffer_version = buffer_handle.read_with(&cx, |buffer, _| buffer.version());
4577 let response = this
4578 .update(&mut cx, |this, cx| {
4579 this.request_lsp(buffer_handle, request, cx)
4580 })
4581 .await?;
4582 this.update(&mut cx, |this, cx| {
4583 Ok(T::response_to_proto(
4584 response,
4585 this,
4586 sender_id,
4587 &buffer_version,
4588 cx,
4589 ))
4590 })
4591 }
4592
4593 async fn handle_get_project_symbols(
4594 this: ModelHandle<Self>,
4595 envelope: TypedEnvelope<proto::GetProjectSymbols>,
4596 _: Arc<Client>,
4597 mut cx: AsyncAppContext,
4598 ) -> Result<proto::GetProjectSymbolsResponse> {
4599 let symbols = this
4600 .update(&mut cx, |this, cx| {
4601 this.symbols(&envelope.payload.query, cx)
4602 })
4603 .await?;
4604
4605 Ok(proto::GetProjectSymbolsResponse {
4606 symbols: symbols.iter().map(serialize_symbol).collect(),
4607 })
4608 }
4609
4610 async fn handle_search_project(
4611 this: ModelHandle<Self>,
4612 envelope: TypedEnvelope<proto::SearchProject>,
4613 _: Arc<Client>,
4614 mut cx: AsyncAppContext,
4615 ) -> Result<proto::SearchProjectResponse> {
4616 let peer_id = envelope.original_sender_id()?;
4617 let query = SearchQuery::from_proto(envelope.payload)?;
4618 let result = this
4619 .update(&mut cx, |this, cx| this.search(query, cx))
4620 .await?;
4621
4622 this.update(&mut cx, |this, cx| {
4623 let mut locations = Vec::new();
4624 for (buffer, ranges) in result {
4625 for range in ranges {
4626 let start = serialize_anchor(&range.start);
4627 let end = serialize_anchor(&range.end);
4628 let buffer = this.serialize_buffer_for_peer(&buffer, peer_id, cx);
4629 locations.push(proto::Location {
4630 buffer: Some(buffer),
4631 start: Some(start),
4632 end: Some(end),
4633 });
4634 }
4635 }
4636 Ok(proto::SearchProjectResponse { locations })
4637 })
4638 }
4639
4640 async fn handle_open_buffer_for_symbol(
4641 this: ModelHandle<Self>,
4642 envelope: TypedEnvelope<proto::OpenBufferForSymbol>,
4643 _: Arc<Client>,
4644 mut cx: AsyncAppContext,
4645 ) -> Result<proto::OpenBufferForSymbolResponse> {
4646 let peer_id = envelope.original_sender_id()?;
4647 let symbol = envelope
4648 .payload
4649 .symbol
4650 .ok_or_else(|| anyhow!("invalid symbol"))?;
4651 let symbol = this.read_with(&cx, |this, _| {
4652 let symbol = this.deserialize_symbol(symbol)?;
4653 let signature = this.symbol_signature(symbol.worktree_id, &symbol.path);
4654 if signature == symbol.signature {
4655 Ok(symbol)
4656 } else {
4657 Err(anyhow!("invalid symbol signature"))
4658 }
4659 })?;
4660 let buffer = this
4661 .update(&mut cx, |this, cx| this.open_buffer_for_symbol(&symbol, cx))
4662 .await?;
4663
4664 Ok(proto::OpenBufferForSymbolResponse {
4665 buffer: Some(this.update(&mut cx, |this, cx| {
4666 this.serialize_buffer_for_peer(&buffer, peer_id, cx)
4667 })),
4668 })
4669 }
4670
4671 fn symbol_signature(&self, worktree_id: WorktreeId, path: &Path) -> [u8; 32] {
4672 let mut hasher = Sha256::new();
4673 hasher.update(worktree_id.to_proto().to_be_bytes());
4674 hasher.update(path.to_string_lossy().as_bytes());
4675 hasher.update(self.nonce.to_be_bytes());
4676 hasher.finalize().as_slice().try_into().unwrap()
4677 }
4678
4679 async fn handle_open_buffer_by_id(
4680 this: ModelHandle<Self>,
4681 envelope: TypedEnvelope<proto::OpenBufferById>,
4682 _: Arc<Client>,
4683 mut cx: AsyncAppContext,
4684 ) -> Result<proto::OpenBufferResponse> {
4685 let peer_id = envelope.original_sender_id()?;
4686 let buffer = this
4687 .update(&mut cx, |this, cx| {
4688 this.open_buffer_by_id(envelope.payload.id, cx)
4689 })
4690 .await?;
4691 this.update(&mut cx, |this, cx| {
4692 Ok(proto::OpenBufferResponse {
4693 buffer: Some(this.serialize_buffer_for_peer(&buffer, peer_id, cx)),
4694 })
4695 })
4696 }
4697
4698 async fn handle_open_buffer_by_path(
4699 this: ModelHandle<Self>,
4700 envelope: TypedEnvelope<proto::OpenBufferByPath>,
4701 _: Arc<Client>,
4702 mut cx: AsyncAppContext,
4703 ) -> Result<proto::OpenBufferResponse> {
4704 let peer_id = envelope.original_sender_id()?;
4705 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
4706 let open_buffer = this.update(&mut cx, |this, cx| {
4707 this.open_buffer(
4708 ProjectPath {
4709 worktree_id,
4710 path: PathBuf::from(envelope.payload.path).into(),
4711 },
4712 cx,
4713 )
4714 });
4715
4716 let buffer = open_buffer.await?;
4717 this.update(&mut cx, |this, cx| {
4718 Ok(proto::OpenBufferResponse {
4719 buffer: Some(this.serialize_buffer_for_peer(&buffer, peer_id, cx)),
4720 })
4721 })
4722 }
4723
4724 fn serialize_project_transaction_for_peer(
4725 &mut self,
4726 project_transaction: ProjectTransaction,
4727 peer_id: PeerId,
4728 cx: &AppContext,
4729 ) -> proto::ProjectTransaction {
4730 let mut serialized_transaction = proto::ProjectTransaction {
4731 buffers: Default::default(),
4732 transactions: Default::default(),
4733 };
4734 for (buffer, transaction) in project_transaction.0 {
4735 serialized_transaction
4736 .buffers
4737 .push(self.serialize_buffer_for_peer(&buffer, peer_id, cx));
4738 serialized_transaction
4739 .transactions
4740 .push(language::proto::serialize_transaction(&transaction));
4741 }
4742 serialized_transaction
4743 }
4744
4745 fn deserialize_project_transaction(
4746 &mut self,
4747 message: proto::ProjectTransaction,
4748 push_to_history: bool,
4749 cx: &mut ModelContext<Self>,
4750 ) -> Task<Result<ProjectTransaction>> {
4751 cx.spawn(|this, mut cx| async move {
4752 let mut project_transaction = ProjectTransaction::default();
4753 for (buffer, transaction) in message.buffers.into_iter().zip(message.transactions) {
4754 let buffer = this
4755 .update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
4756 .await?;
4757 let transaction = language::proto::deserialize_transaction(transaction)?;
4758 project_transaction.0.insert(buffer, transaction);
4759 }
4760
4761 for (buffer, transaction) in &project_transaction.0 {
4762 buffer
4763 .update(&mut cx, |buffer, _| {
4764 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
4765 })
4766 .await;
4767
4768 if push_to_history {
4769 buffer.update(&mut cx, |buffer, _| {
4770 buffer.push_transaction(transaction.clone(), Instant::now());
4771 });
4772 }
4773 }
4774
4775 Ok(project_transaction)
4776 })
4777 }
4778
4779 fn serialize_buffer_for_peer(
4780 &mut self,
4781 buffer: &ModelHandle<Buffer>,
4782 peer_id: PeerId,
4783 cx: &AppContext,
4784 ) -> proto::Buffer {
4785 let buffer_id = buffer.read(cx).remote_id();
4786 let shared_buffers = self.shared_buffers.entry(peer_id).or_default();
4787 if shared_buffers.insert(buffer_id) {
4788 proto::Buffer {
4789 variant: Some(proto::buffer::Variant::State(buffer.read(cx).to_proto())),
4790 }
4791 } else {
4792 proto::Buffer {
4793 variant: Some(proto::buffer::Variant::Id(buffer_id)),
4794 }
4795 }
4796 }
4797
4798 fn deserialize_buffer(
4799 &mut self,
4800 buffer: proto::Buffer,
4801 cx: &mut ModelContext<Self>,
4802 ) -> Task<Result<ModelHandle<Buffer>>> {
4803 let replica_id = self.replica_id();
4804
4805 let opened_buffer_tx = self.opened_buffer.0.clone();
4806 let mut opened_buffer_rx = self.opened_buffer.1.clone();
4807 cx.spawn(|this, mut cx| async move {
4808 match buffer.variant.ok_or_else(|| anyhow!("missing buffer"))? {
4809 proto::buffer::Variant::Id(id) => {
4810 let buffer = loop {
4811 let buffer = this.read_with(&cx, |this, cx| {
4812 this.opened_buffers
4813 .get(&id)
4814 .and_then(|buffer| buffer.upgrade(cx))
4815 });
4816 if let Some(buffer) = buffer {
4817 break buffer;
4818 }
4819 opened_buffer_rx
4820 .next()
4821 .await
4822 .ok_or_else(|| anyhow!("project dropped while waiting for buffer"))?;
4823 };
4824 Ok(buffer)
4825 }
4826 proto::buffer::Variant::State(mut buffer) => {
4827 let mut buffer_worktree = None;
4828 let mut buffer_file = None;
4829 if let Some(file) = buffer.file.take() {
4830 this.read_with(&cx, |this, cx| {
4831 let worktree_id = WorktreeId::from_proto(file.worktree_id);
4832 let worktree =
4833 this.worktree_for_id(worktree_id, cx).ok_or_else(|| {
4834 anyhow!("no worktree found for id {}", file.worktree_id)
4835 })?;
4836 buffer_file =
4837 Some(Box::new(File::from_proto(file, worktree.clone(), cx)?)
4838 as Box<dyn language::File>);
4839 buffer_worktree = Some(worktree);
4840 Ok::<_, anyhow::Error>(())
4841 })?;
4842 }
4843
4844 let buffer = cx.add_model(|cx| {
4845 Buffer::from_proto(replica_id, buffer, buffer_file, cx).unwrap()
4846 });
4847
4848 this.update(&mut cx, |this, cx| this.register_buffer(&buffer, cx))?;
4849
4850 *opened_buffer_tx.borrow_mut().borrow_mut() = ();
4851 Ok(buffer)
4852 }
4853 }
4854 })
4855 }
4856
4857 fn deserialize_symbol(&self, serialized_symbol: proto::Symbol) -> Result<Symbol> {
4858 let source_worktree_id = WorktreeId::from_proto(serialized_symbol.source_worktree_id);
4859 let worktree_id = WorktreeId::from_proto(serialized_symbol.worktree_id);
4860 let start = serialized_symbol
4861 .start
4862 .ok_or_else(|| anyhow!("invalid start"))?;
4863 let end = serialized_symbol
4864 .end
4865 .ok_or_else(|| anyhow!("invalid end"))?;
4866 let kind = unsafe { mem::transmute(serialized_symbol.kind) };
4867 let path = PathBuf::from(serialized_symbol.path);
4868 let language = self.languages.select_language(&path);
4869 Ok(Symbol {
4870 source_worktree_id,
4871 worktree_id,
4872 language_server_name: LanguageServerName(serialized_symbol.language_server_name.into()),
4873 label: language
4874 .and_then(|language| language.label_for_symbol(&serialized_symbol.name, kind))
4875 .unwrap_or_else(|| CodeLabel::plain(serialized_symbol.name.clone(), None)),
4876 name: serialized_symbol.name,
4877 path,
4878 range: PointUtf16::new(start.row, start.column)..PointUtf16::new(end.row, end.column),
4879 kind,
4880 signature: serialized_symbol
4881 .signature
4882 .try_into()
4883 .map_err(|_| anyhow!("invalid signature"))?,
4884 })
4885 }
4886
4887 async fn handle_buffer_saved(
4888 this: ModelHandle<Self>,
4889 envelope: TypedEnvelope<proto::BufferSaved>,
4890 _: Arc<Client>,
4891 mut cx: AsyncAppContext,
4892 ) -> Result<()> {
4893 let version = deserialize_version(envelope.payload.version);
4894 let mtime = envelope
4895 .payload
4896 .mtime
4897 .ok_or_else(|| anyhow!("missing mtime"))?
4898 .into();
4899
4900 this.update(&mut cx, |this, cx| {
4901 let buffer = this
4902 .opened_buffers
4903 .get(&envelope.payload.buffer_id)
4904 .and_then(|buffer| buffer.upgrade(cx));
4905 if let Some(buffer) = buffer {
4906 buffer.update(cx, |buffer, cx| {
4907 buffer.did_save(version, mtime, None, cx);
4908 });
4909 }
4910 Ok(())
4911 })
4912 }
4913
4914 async fn handle_buffer_reloaded(
4915 this: ModelHandle<Self>,
4916 envelope: TypedEnvelope<proto::BufferReloaded>,
4917 _: Arc<Client>,
4918 mut cx: AsyncAppContext,
4919 ) -> Result<()> {
4920 let payload = envelope.payload.clone();
4921 let version = deserialize_version(payload.version);
4922 let mtime = payload
4923 .mtime
4924 .ok_or_else(|| anyhow!("missing mtime"))?
4925 .into();
4926 this.update(&mut cx, |this, cx| {
4927 let buffer = this
4928 .opened_buffers
4929 .get(&payload.buffer_id)
4930 .and_then(|buffer| buffer.upgrade(cx));
4931 if let Some(buffer) = buffer {
4932 buffer.update(cx, |buffer, cx| {
4933 buffer.did_reload(version, mtime, cx);
4934 });
4935 }
4936 Ok(())
4937 })
4938 }
4939
4940 pub fn match_paths<'a>(
4941 &self,
4942 query: &'a str,
4943 include_ignored: bool,
4944 smart_case: bool,
4945 max_results: usize,
4946 cancel_flag: &'a AtomicBool,
4947 cx: &AppContext,
4948 ) -> impl 'a + Future<Output = Vec<PathMatch>> {
4949 let worktrees = self
4950 .worktrees(cx)
4951 .filter(|worktree| worktree.read(cx).is_visible())
4952 .collect::<Vec<_>>();
4953 let include_root_name = worktrees.len() > 1;
4954 let candidate_sets = worktrees
4955 .into_iter()
4956 .map(|worktree| CandidateSet {
4957 snapshot: worktree.read(cx).snapshot(),
4958 include_ignored,
4959 include_root_name,
4960 })
4961 .collect::<Vec<_>>();
4962
4963 let background = cx.background().clone();
4964 async move {
4965 fuzzy::match_paths(
4966 candidate_sets.as_slice(),
4967 query,
4968 smart_case,
4969 max_results,
4970 cancel_flag,
4971 background,
4972 )
4973 .await
4974 }
4975 }
4976
4977 fn edits_from_lsp(
4978 &mut self,
4979 buffer: &ModelHandle<Buffer>,
4980 lsp_edits: impl 'static + Send + IntoIterator<Item = lsp::TextEdit>,
4981 version: Option<i32>,
4982 cx: &mut ModelContext<Self>,
4983 ) -> Task<Result<Vec<(Range<Anchor>, String)>>> {
4984 let snapshot = self.buffer_snapshot_for_lsp_version(buffer, version, cx);
4985 cx.background().spawn(async move {
4986 let snapshot = snapshot?;
4987 let mut lsp_edits = lsp_edits
4988 .into_iter()
4989 .map(|edit| (range_from_lsp(edit.range), edit.new_text))
4990 .peekable();
4991
4992 let mut edits = Vec::new();
4993 while let Some((mut range, mut new_text)) = lsp_edits.next() {
4994 // Combine any LSP edits that are adjacent.
4995 //
4996 // Also, combine LSP edits that are separated from each other by only
4997 // a newline. This is important because for some code actions,
4998 // Rust-analyzer rewrites the entire buffer via a series of edits that
4999 // are separated by unchanged newline characters.
5000 //
5001 // In order for the diffing logic below to work properly, any edits that
5002 // cancel each other out must be combined into one.
5003 while let Some((next_range, next_text)) = lsp_edits.peek() {
5004 if next_range.start > range.end {
5005 if next_range.start.row > range.end.row + 1
5006 || next_range.start.column > 0
5007 || snapshot.clip_point_utf16(
5008 PointUtf16::new(range.end.row, u32::MAX),
5009 Bias::Left,
5010 ) > range.end
5011 {
5012 break;
5013 }
5014 new_text.push('\n');
5015 }
5016 range.end = next_range.end;
5017 new_text.push_str(&next_text);
5018 lsp_edits.next();
5019 }
5020
5021 if snapshot.clip_point_utf16(range.start, Bias::Left) != range.start
5022 || snapshot.clip_point_utf16(range.end, Bias::Left) != range.end
5023 {
5024 return Err(anyhow!("invalid edits received from language server"));
5025 }
5026
5027 // For multiline edits, perform a diff of the old and new text so that
5028 // we can identify the changes more precisely, preserving the locations
5029 // of any anchors positioned in the unchanged regions.
5030 if range.end.row > range.start.row {
5031 let mut offset = range.start.to_offset(&snapshot);
5032 let old_text = snapshot.text_for_range(range).collect::<String>();
5033
5034 let diff = TextDiff::from_lines(old_text.as_str(), &new_text);
5035 let mut moved_since_edit = true;
5036 for change in diff.iter_all_changes() {
5037 let tag = change.tag();
5038 let value = change.value();
5039 match tag {
5040 ChangeTag::Equal => {
5041 offset += value.len();
5042 moved_since_edit = true;
5043 }
5044 ChangeTag::Delete => {
5045 let start = snapshot.anchor_after(offset);
5046 let end = snapshot.anchor_before(offset + value.len());
5047 if moved_since_edit {
5048 edits.push((start..end, String::new()));
5049 } else {
5050 edits.last_mut().unwrap().0.end = end;
5051 }
5052 offset += value.len();
5053 moved_since_edit = false;
5054 }
5055 ChangeTag::Insert => {
5056 if moved_since_edit {
5057 let anchor = snapshot.anchor_after(offset);
5058 edits.push((anchor.clone()..anchor, value.to_string()));
5059 } else {
5060 edits.last_mut().unwrap().1.push_str(value);
5061 }
5062 moved_since_edit = false;
5063 }
5064 }
5065 }
5066 } else if range.end == range.start {
5067 let anchor = snapshot.anchor_after(range.start);
5068 edits.push((anchor.clone()..anchor, new_text));
5069 } else {
5070 let edit_start = snapshot.anchor_after(range.start);
5071 let edit_end = snapshot.anchor_before(range.end);
5072 edits.push((edit_start..edit_end, new_text));
5073 }
5074 }
5075
5076 Ok(edits)
5077 })
5078 }
5079
5080 fn buffer_snapshot_for_lsp_version(
5081 &mut self,
5082 buffer: &ModelHandle<Buffer>,
5083 version: Option<i32>,
5084 cx: &AppContext,
5085 ) -> Result<TextBufferSnapshot> {
5086 const OLD_VERSIONS_TO_RETAIN: i32 = 10;
5087
5088 if let Some(version) = version {
5089 let buffer_id = buffer.read(cx).remote_id();
5090 let snapshots = self
5091 .buffer_snapshots
5092 .get_mut(&buffer_id)
5093 .ok_or_else(|| anyhow!("no snapshot found for buffer {}", buffer_id))?;
5094 let mut found_snapshot = None;
5095 snapshots.retain(|(snapshot_version, snapshot)| {
5096 if snapshot_version + OLD_VERSIONS_TO_RETAIN < version {
5097 false
5098 } else {
5099 if *snapshot_version == version {
5100 found_snapshot = Some(snapshot.clone());
5101 }
5102 true
5103 }
5104 });
5105
5106 found_snapshot.ok_or_else(|| {
5107 anyhow!(
5108 "snapshot not found for buffer {} at version {}",
5109 buffer_id,
5110 version
5111 )
5112 })
5113 } else {
5114 Ok((buffer.read(cx)).text_snapshot())
5115 }
5116 }
5117
5118 fn language_server_for_buffer(
5119 &self,
5120 buffer: &Buffer,
5121 cx: &AppContext,
5122 ) -> Option<&(Arc<dyn LspAdapter>, Arc<LanguageServer>)> {
5123 if let Some((file, language)) = File::from_dyn(buffer.file()).zip(buffer.language()) {
5124 let worktree_id = file.worktree_id(cx);
5125 self.language_servers
5126 .get(&(worktree_id, language.lsp_adapter()?.name()))
5127 } else {
5128 None
5129 }
5130 }
5131}
5132
5133impl WorktreeHandle {
5134 pub fn upgrade(&self, cx: &AppContext) -> Option<ModelHandle<Worktree>> {
5135 match self {
5136 WorktreeHandle::Strong(handle) => Some(handle.clone()),
5137 WorktreeHandle::Weak(handle) => handle.upgrade(cx),
5138 }
5139 }
5140}
5141
5142impl OpenBuffer {
5143 pub fn upgrade(&self, cx: &impl UpgradeModelHandle) -> Option<ModelHandle<Buffer>> {
5144 match self {
5145 OpenBuffer::Strong(handle) => Some(handle.clone()),
5146 OpenBuffer::Weak(handle) => handle.upgrade(cx),
5147 OpenBuffer::Loading(_) => None,
5148 }
5149 }
5150}
5151
5152struct CandidateSet {
5153 snapshot: Snapshot,
5154 include_ignored: bool,
5155 include_root_name: bool,
5156}
5157
5158impl<'a> PathMatchCandidateSet<'a> for CandidateSet {
5159 type Candidates = CandidateSetIter<'a>;
5160
5161 fn id(&self) -> usize {
5162 self.snapshot.id().to_usize()
5163 }
5164
5165 fn len(&self) -> usize {
5166 if self.include_ignored {
5167 self.snapshot.file_count()
5168 } else {
5169 self.snapshot.visible_file_count()
5170 }
5171 }
5172
5173 fn prefix(&self) -> Arc<str> {
5174 if self.snapshot.root_entry().map_or(false, |e| e.is_file()) {
5175 self.snapshot.root_name().into()
5176 } else if self.include_root_name {
5177 format!("{}/", self.snapshot.root_name()).into()
5178 } else {
5179 "".into()
5180 }
5181 }
5182
5183 fn candidates(&'a self, start: usize) -> Self::Candidates {
5184 CandidateSetIter {
5185 traversal: self.snapshot.files(self.include_ignored, start),
5186 }
5187 }
5188}
5189
5190struct CandidateSetIter<'a> {
5191 traversal: Traversal<'a>,
5192}
5193
5194impl<'a> Iterator for CandidateSetIter<'a> {
5195 type Item = PathMatchCandidate<'a>;
5196
5197 fn next(&mut self) -> Option<Self::Item> {
5198 self.traversal.next().map(|entry| {
5199 if let EntryKind::File(char_bag) = entry.kind {
5200 PathMatchCandidate {
5201 path: &entry.path,
5202 char_bag,
5203 }
5204 } else {
5205 unreachable!()
5206 }
5207 })
5208 }
5209}
5210
5211impl Entity for Project {
5212 type Event = Event;
5213
5214 fn release(&mut self, _: &mut gpui::MutableAppContext) {
5215 match &self.client_state {
5216 ProjectClientState::Local { remote_id_rx, .. } => {
5217 if let Some(project_id) = *remote_id_rx.borrow() {
5218 self.client
5219 .send(proto::UnregisterProject { project_id })
5220 .log_err();
5221 }
5222 }
5223 ProjectClientState::Remote { remote_id, .. } => {
5224 self.client
5225 .send(proto::LeaveProject {
5226 project_id: *remote_id,
5227 })
5228 .log_err();
5229 }
5230 }
5231 }
5232
5233 fn app_will_quit(
5234 &mut self,
5235 _: &mut MutableAppContext,
5236 ) -> Option<std::pin::Pin<Box<dyn 'static + Future<Output = ()>>>> {
5237 let shutdown_futures = self
5238 .language_servers
5239 .drain()
5240 .filter_map(|(_, (_, server))| server.shutdown())
5241 .collect::<Vec<_>>();
5242 Some(
5243 async move {
5244 futures::future::join_all(shutdown_futures).await;
5245 }
5246 .boxed(),
5247 )
5248 }
5249}
5250
5251impl Collaborator {
5252 fn from_proto(
5253 message: proto::Collaborator,
5254 user_store: &ModelHandle<UserStore>,
5255 cx: &mut AsyncAppContext,
5256 ) -> impl Future<Output = Result<Self>> {
5257 let user = user_store.update(cx, |user_store, cx| {
5258 user_store.fetch_user(message.user_id, cx)
5259 });
5260
5261 async move {
5262 Ok(Self {
5263 peer_id: PeerId(message.peer_id),
5264 user: user.await?,
5265 replica_id: message.replica_id as ReplicaId,
5266 })
5267 }
5268 }
5269}
5270
5271impl<P: AsRef<Path>> From<(WorktreeId, P)> for ProjectPath {
5272 fn from((worktree_id, path): (WorktreeId, P)) -> Self {
5273 Self {
5274 worktree_id,
5275 path: path.as_ref().into(),
5276 }
5277 }
5278}
5279
5280impl From<lsp::CreateFileOptions> for fs::CreateOptions {
5281 fn from(options: lsp::CreateFileOptions) -> Self {
5282 Self {
5283 overwrite: options.overwrite.unwrap_or(false),
5284 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
5285 }
5286 }
5287}
5288
5289impl From<lsp::RenameFileOptions> for fs::RenameOptions {
5290 fn from(options: lsp::RenameFileOptions) -> Self {
5291 Self {
5292 overwrite: options.overwrite.unwrap_or(false),
5293 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
5294 }
5295 }
5296}
5297
5298impl From<lsp::DeleteFileOptions> for fs::RemoveOptions {
5299 fn from(options: lsp::DeleteFileOptions) -> Self {
5300 Self {
5301 recursive: options.recursive.unwrap_or(false),
5302 ignore_if_not_exists: options.ignore_if_not_exists.unwrap_or(false),
5303 }
5304 }
5305}
5306
5307fn serialize_symbol(symbol: &Symbol) -> proto::Symbol {
5308 proto::Symbol {
5309 source_worktree_id: symbol.source_worktree_id.to_proto(),
5310 worktree_id: symbol.worktree_id.to_proto(),
5311 language_server_name: symbol.language_server_name.0.to_string(),
5312 name: symbol.name.clone(),
5313 kind: unsafe { mem::transmute(symbol.kind) },
5314 path: symbol.path.to_string_lossy().to_string(),
5315 start: Some(proto::Point {
5316 row: symbol.range.start.row,
5317 column: symbol.range.start.column,
5318 }),
5319 end: Some(proto::Point {
5320 row: symbol.range.end.row,
5321 column: symbol.range.end.column,
5322 }),
5323 signature: symbol.signature.to_vec(),
5324 }
5325}
5326
5327fn relativize_path(base: &Path, path: &Path) -> PathBuf {
5328 let mut path_components = path.components();
5329 let mut base_components = base.components();
5330 let mut components: Vec<Component> = Vec::new();
5331 loop {
5332 match (path_components.next(), base_components.next()) {
5333 (None, None) => break,
5334 (Some(a), None) => {
5335 components.push(a);
5336 components.extend(path_components.by_ref());
5337 break;
5338 }
5339 (None, _) => components.push(Component::ParentDir),
5340 (Some(a), Some(b)) if components.is_empty() && a == b => (),
5341 (Some(a), Some(b)) if b == Component::CurDir => components.push(a),
5342 (Some(a), Some(_)) => {
5343 components.push(Component::ParentDir);
5344 for _ in base_components {
5345 components.push(Component::ParentDir);
5346 }
5347 components.push(a);
5348 components.extend(path_components.by_ref());
5349 break;
5350 }
5351 }
5352 }
5353 components.iter().map(|c| c.as_os_str()).collect()
5354}
5355
5356impl Item for Buffer {
5357 fn entry_id(&self, cx: &AppContext) -> Option<ProjectEntryId> {
5358 File::from_dyn(self.file()).and_then(|file| file.project_entry_id(cx))
5359 }
5360}
5361
5362#[cfg(test)]
5363mod tests {
5364 use crate::worktree::WorktreeHandle;
5365
5366 use super::{Event, *};
5367 use fs::RealFs;
5368 use futures::{future, StreamExt};
5369 use gpui::test::subscribe;
5370 use language::{
5371 tree_sitter_rust, tree_sitter_typescript, Diagnostic, FakeLspAdapter, LanguageConfig,
5372 OffsetRangeExt, Point, ToPoint,
5373 };
5374 use lsp::Url;
5375 use serde_json::json;
5376 use std::{cell::RefCell, os::unix, path::PathBuf, rc::Rc, task::Poll};
5377 use unindent::Unindent as _;
5378 use util::{assert_set_eq, test::temp_tree};
5379
5380 #[gpui::test]
5381 async fn test_populate_and_search(cx: &mut gpui::TestAppContext) {
5382 let dir = temp_tree(json!({
5383 "root": {
5384 "apple": "",
5385 "banana": {
5386 "carrot": {
5387 "date": "",
5388 "endive": "",
5389 }
5390 },
5391 "fennel": {
5392 "grape": "",
5393 }
5394 }
5395 }));
5396
5397 let root_link_path = dir.path().join("root_link");
5398 unix::fs::symlink(&dir.path().join("root"), &root_link_path).unwrap();
5399 unix::fs::symlink(
5400 &dir.path().join("root/fennel"),
5401 &dir.path().join("root/finnochio"),
5402 )
5403 .unwrap();
5404
5405 let project = Project::test(Arc::new(RealFs), [root_link_path.as_ref()], cx).await;
5406
5407 project.read_with(cx, |project, cx| {
5408 let tree = project.worktrees(cx).next().unwrap().read(cx);
5409 assert_eq!(tree.file_count(), 5);
5410 assert_eq!(
5411 tree.inode_for_path("fennel/grape"),
5412 tree.inode_for_path("finnochio/grape")
5413 );
5414 });
5415
5416 let cancel_flag = Default::default();
5417 let results = project
5418 .read_with(cx, |project, cx| {
5419 project.match_paths("bna", false, false, 10, &cancel_flag, cx)
5420 })
5421 .await;
5422 assert_eq!(
5423 results
5424 .into_iter()
5425 .map(|result| result.path)
5426 .collect::<Vec<Arc<Path>>>(),
5427 vec![
5428 PathBuf::from("banana/carrot/date").into(),
5429 PathBuf::from("banana/carrot/endive").into(),
5430 ]
5431 );
5432 }
5433
5434 #[gpui::test]
5435 async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
5436 cx.foreground().forbid_parking();
5437
5438 let mut rust_language = Language::new(
5439 LanguageConfig {
5440 name: "Rust".into(),
5441 path_suffixes: vec!["rs".to_string()],
5442 ..Default::default()
5443 },
5444 Some(tree_sitter_rust::language()),
5445 );
5446 let mut json_language = Language::new(
5447 LanguageConfig {
5448 name: "JSON".into(),
5449 path_suffixes: vec!["json".to_string()],
5450 ..Default::default()
5451 },
5452 None,
5453 );
5454 let mut fake_rust_servers = rust_language.set_fake_lsp_adapter(FakeLspAdapter {
5455 name: "the-rust-language-server",
5456 capabilities: lsp::ServerCapabilities {
5457 completion_provider: Some(lsp::CompletionOptions {
5458 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
5459 ..Default::default()
5460 }),
5461 ..Default::default()
5462 },
5463 ..Default::default()
5464 });
5465 let mut fake_json_servers = json_language.set_fake_lsp_adapter(FakeLspAdapter {
5466 name: "the-json-language-server",
5467 capabilities: lsp::ServerCapabilities {
5468 completion_provider: Some(lsp::CompletionOptions {
5469 trigger_characters: Some(vec![":".to_string()]),
5470 ..Default::default()
5471 }),
5472 ..Default::default()
5473 },
5474 ..Default::default()
5475 });
5476
5477 let fs = FakeFs::new(cx.background());
5478 fs.insert_tree(
5479 "/the-root",
5480 json!({
5481 "test.rs": "const A: i32 = 1;",
5482 "test2.rs": "",
5483 "Cargo.toml": "a = 1",
5484 "package.json": "{\"a\": 1}",
5485 }),
5486 )
5487 .await;
5488
5489 let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
5490 project.update(cx, |project, _| {
5491 project.languages.add(Arc::new(rust_language));
5492 project.languages.add(Arc::new(json_language));
5493 });
5494
5495 // Open a buffer without an associated language server.
5496 let toml_buffer = project
5497 .update(cx, |project, cx| {
5498 project.open_local_buffer("/the-root/Cargo.toml", cx)
5499 })
5500 .await
5501 .unwrap();
5502
5503 // Open a buffer with an associated language server.
5504 let rust_buffer = project
5505 .update(cx, |project, cx| {
5506 project.open_local_buffer("/the-root/test.rs", cx)
5507 })
5508 .await
5509 .unwrap();
5510
5511 // A server is started up, and it is notified about Rust files.
5512 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
5513 assert_eq!(
5514 fake_rust_server
5515 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5516 .await
5517 .text_document,
5518 lsp::TextDocumentItem {
5519 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
5520 version: 0,
5521 text: "const A: i32 = 1;".to_string(),
5522 language_id: Default::default()
5523 }
5524 );
5525
5526 // The buffer is configured based on the language server's capabilities.
5527 rust_buffer.read_with(cx, |buffer, _| {
5528 assert_eq!(
5529 buffer.completion_triggers(),
5530 &[".".to_string(), "::".to_string()]
5531 );
5532 });
5533 toml_buffer.read_with(cx, |buffer, _| {
5534 assert!(buffer.completion_triggers().is_empty());
5535 });
5536
5537 // Edit a buffer. The changes are reported to the language server.
5538 rust_buffer.update(cx, |buffer, cx| buffer.edit([(16..16, "2")], cx));
5539 assert_eq!(
5540 fake_rust_server
5541 .receive_notification::<lsp::notification::DidChangeTextDocument>()
5542 .await
5543 .text_document,
5544 lsp::VersionedTextDocumentIdentifier::new(
5545 lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
5546 1
5547 )
5548 );
5549
5550 // Open a third buffer with a different associated language server.
5551 let json_buffer = project
5552 .update(cx, |project, cx| {
5553 project.open_local_buffer("/the-root/package.json", cx)
5554 })
5555 .await
5556 .unwrap();
5557
5558 // A json language server is started up and is only notified about the json buffer.
5559 let mut fake_json_server = fake_json_servers.next().await.unwrap();
5560 assert_eq!(
5561 fake_json_server
5562 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5563 .await
5564 .text_document,
5565 lsp::TextDocumentItem {
5566 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
5567 version: 0,
5568 text: "{\"a\": 1}".to_string(),
5569 language_id: Default::default()
5570 }
5571 );
5572
5573 // This buffer is configured based on the second language server's
5574 // capabilities.
5575 json_buffer.read_with(cx, |buffer, _| {
5576 assert_eq!(buffer.completion_triggers(), &[":".to_string()]);
5577 });
5578
5579 // When opening another buffer whose language server is already running,
5580 // it is also configured based on the existing language server's capabilities.
5581 let rust_buffer2 = project
5582 .update(cx, |project, cx| {
5583 project.open_local_buffer("/the-root/test2.rs", cx)
5584 })
5585 .await
5586 .unwrap();
5587 rust_buffer2.read_with(cx, |buffer, _| {
5588 assert_eq!(
5589 buffer.completion_triggers(),
5590 &[".".to_string(), "::".to_string()]
5591 );
5592 });
5593
5594 // Changes are reported only to servers matching the buffer's language.
5595 toml_buffer.update(cx, |buffer, cx| buffer.edit([(5..5, "23")], cx));
5596 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "let x = 1;")], cx));
5597 assert_eq!(
5598 fake_rust_server
5599 .receive_notification::<lsp::notification::DidChangeTextDocument>()
5600 .await
5601 .text_document,
5602 lsp::VersionedTextDocumentIdentifier::new(
5603 lsp::Url::from_file_path("/the-root/test2.rs").unwrap(),
5604 1
5605 )
5606 );
5607
5608 // Save notifications are reported to all servers.
5609 toml_buffer
5610 .update(cx, |buffer, cx| buffer.save(cx))
5611 .await
5612 .unwrap();
5613 assert_eq!(
5614 fake_rust_server
5615 .receive_notification::<lsp::notification::DidSaveTextDocument>()
5616 .await
5617 .text_document,
5618 lsp::TextDocumentIdentifier::new(
5619 lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap()
5620 )
5621 );
5622 assert_eq!(
5623 fake_json_server
5624 .receive_notification::<lsp::notification::DidSaveTextDocument>()
5625 .await
5626 .text_document,
5627 lsp::TextDocumentIdentifier::new(
5628 lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap()
5629 )
5630 );
5631
5632 // Renames are reported only to servers matching the buffer's language.
5633 fs.rename(
5634 Path::new("/the-root/test2.rs"),
5635 Path::new("/the-root/test3.rs"),
5636 Default::default(),
5637 )
5638 .await
5639 .unwrap();
5640 assert_eq!(
5641 fake_rust_server
5642 .receive_notification::<lsp::notification::DidCloseTextDocument>()
5643 .await
5644 .text_document,
5645 lsp::TextDocumentIdentifier::new(
5646 lsp::Url::from_file_path("/the-root/test2.rs").unwrap()
5647 ),
5648 );
5649 assert_eq!(
5650 fake_rust_server
5651 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5652 .await
5653 .text_document,
5654 lsp::TextDocumentItem {
5655 uri: lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),
5656 version: 0,
5657 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
5658 language_id: Default::default()
5659 },
5660 );
5661
5662 rust_buffer2.update(cx, |buffer, cx| {
5663 buffer.update_diagnostics(
5664 DiagnosticSet::from_sorted_entries(
5665 vec![DiagnosticEntry {
5666 diagnostic: Default::default(),
5667 range: Anchor::MIN..Anchor::MAX,
5668 }],
5669 &buffer.snapshot(),
5670 ),
5671 cx,
5672 );
5673 assert_eq!(
5674 buffer
5675 .snapshot()
5676 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
5677 .count(),
5678 1
5679 );
5680 });
5681
5682 // When the rename changes the extension of the file, the buffer gets closed on the old
5683 // language server and gets opened on the new one.
5684 fs.rename(
5685 Path::new("/the-root/test3.rs"),
5686 Path::new("/the-root/test3.json"),
5687 Default::default(),
5688 )
5689 .await
5690 .unwrap();
5691 assert_eq!(
5692 fake_rust_server
5693 .receive_notification::<lsp::notification::DidCloseTextDocument>()
5694 .await
5695 .text_document,
5696 lsp::TextDocumentIdentifier::new(
5697 lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),
5698 ),
5699 );
5700 assert_eq!(
5701 fake_json_server
5702 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5703 .await
5704 .text_document,
5705 lsp::TextDocumentItem {
5706 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
5707 version: 0,
5708 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
5709 language_id: Default::default()
5710 },
5711 );
5712
5713 // We clear the diagnostics, since the language has changed.
5714 rust_buffer2.read_with(cx, |buffer, _| {
5715 assert_eq!(
5716 buffer
5717 .snapshot()
5718 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
5719 .count(),
5720 0
5721 );
5722 });
5723
5724 // The renamed file's version resets after changing language server.
5725 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "// ")], cx));
5726 assert_eq!(
5727 fake_json_server
5728 .receive_notification::<lsp::notification::DidChangeTextDocument>()
5729 .await
5730 .text_document,
5731 lsp::VersionedTextDocumentIdentifier::new(
5732 lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
5733 1
5734 )
5735 );
5736
5737 // Restart language servers
5738 project.update(cx, |project, cx| {
5739 project.restart_language_servers_for_buffers(
5740 vec![rust_buffer.clone(), json_buffer.clone()],
5741 cx,
5742 );
5743 });
5744
5745 let mut rust_shutdown_requests = fake_rust_server
5746 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
5747 let mut json_shutdown_requests = fake_json_server
5748 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
5749 futures::join!(rust_shutdown_requests.next(), json_shutdown_requests.next());
5750
5751 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
5752 let mut fake_json_server = fake_json_servers.next().await.unwrap();
5753
5754 // Ensure rust document is reopened in new rust language server
5755 assert_eq!(
5756 fake_rust_server
5757 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5758 .await
5759 .text_document,
5760 lsp::TextDocumentItem {
5761 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
5762 version: 1,
5763 text: rust_buffer.read_with(cx, |buffer, _| buffer.text()),
5764 language_id: Default::default()
5765 }
5766 );
5767
5768 // Ensure json documents are reopened in new json language server
5769 assert_set_eq!(
5770 [
5771 fake_json_server
5772 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5773 .await
5774 .text_document,
5775 fake_json_server
5776 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5777 .await
5778 .text_document,
5779 ],
5780 [
5781 lsp::TextDocumentItem {
5782 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
5783 version: 0,
5784 text: json_buffer.read_with(cx, |buffer, _| buffer.text()),
5785 language_id: Default::default()
5786 },
5787 lsp::TextDocumentItem {
5788 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
5789 version: 1,
5790 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
5791 language_id: Default::default()
5792 }
5793 ]
5794 );
5795
5796 // Close notifications are reported only to servers matching the buffer's language.
5797 cx.update(|_| drop(json_buffer));
5798 let close_message = lsp::DidCloseTextDocumentParams {
5799 text_document: lsp::TextDocumentIdentifier::new(
5800 lsp::Url::from_file_path("/the-root/package.json").unwrap(),
5801 ),
5802 };
5803 assert_eq!(
5804 fake_json_server
5805 .receive_notification::<lsp::notification::DidCloseTextDocument>()
5806 .await,
5807 close_message,
5808 );
5809 }
5810
5811 #[gpui::test]
5812 async fn test_single_file_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
5813 cx.foreground().forbid_parking();
5814
5815 let fs = FakeFs::new(cx.background());
5816 fs.insert_tree(
5817 "/dir",
5818 json!({
5819 "a.rs": "let a = 1;",
5820 "b.rs": "let b = 2;"
5821 }),
5822 )
5823 .await;
5824
5825 let project = Project::test(fs, ["/dir/a.rs".as_ref(), "/dir/b.rs".as_ref()], cx).await;
5826
5827 let buffer_a = project
5828 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
5829 .await
5830 .unwrap();
5831 let buffer_b = project
5832 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
5833 .await
5834 .unwrap();
5835
5836 project.update(cx, |project, cx| {
5837 project
5838 .update_diagnostics(
5839 lsp::PublishDiagnosticsParams {
5840 uri: Url::from_file_path("/dir/a.rs").unwrap(),
5841 version: None,
5842 diagnostics: vec![lsp::Diagnostic {
5843 range: lsp::Range::new(
5844 lsp::Position::new(0, 4),
5845 lsp::Position::new(0, 5),
5846 ),
5847 severity: Some(lsp::DiagnosticSeverity::ERROR),
5848 message: "error 1".to_string(),
5849 ..Default::default()
5850 }],
5851 },
5852 &[],
5853 cx,
5854 )
5855 .unwrap();
5856 project
5857 .update_diagnostics(
5858 lsp::PublishDiagnosticsParams {
5859 uri: Url::from_file_path("/dir/b.rs").unwrap(),
5860 version: None,
5861 diagnostics: vec![lsp::Diagnostic {
5862 range: lsp::Range::new(
5863 lsp::Position::new(0, 4),
5864 lsp::Position::new(0, 5),
5865 ),
5866 severity: Some(lsp::DiagnosticSeverity::WARNING),
5867 message: "error 2".to_string(),
5868 ..Default::default()
5869 }],
5870 },
5871 &[],
5872 cx,
5873 )
5874 .unwrap();
5875 });
5876
5877 buffer_a.read_with(cx, |buffer, _| {
5878 let chunks = chunks_with_diagnostics(&buffer, 0..buffer.len());
5879 assert_eq!(
5880 chunks
5881 .iter()
5882 .map(|(s, d)| (s.as_str(), *d))
5883 .collect::<Vec<_>>(),
5884 &[
5885 ("let ", None),
5886 ("a", Some(DiagnosticSeverity::ERROR)),
5887 (" = 1;", None),
5888 ]
5889 );
5890 });
5891 buffer_b.read_with(cx, |buffer, _| {
5892 let chunks = chunks_with_diagnostics(&buffer, 0..buffer.len());
5893 assert_eq!(
5894 chunks
5895 .iter()
5896 .map(|(s, d)| (s.as_str(), *d))
5897 .collect::<Vec<_>>(),
5898 &[
5899 ("let ", None),
5900 ("b", Some(DiagnosticSeverity::WARNING)),
5901 (" = 2;", None),
5902 ]
5903 );
5904 });
5905 }
5906
5907 #[gpui::test]
5908 async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
5909 cx.foreground().forbid_parking();
5910
5911 let progress_token = "the-progress-token";
5912 let mut language = Language::new(
5913 LanguageConfig {
5914 name: "Rust".into(),
5915 path_suffixes: vec!["rs".to_string()],
5916 ..Default::default()
5917 },
5918 Some(tree_sitter_rust::language()),
5919 );
5920 let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
5921 disk_based_diagnostics_progress_token: Some(progress_token),
5922 disk_based_diagnostics_sources: &["disk"],
5923 ..Default::default()
5924 });
5925
5926 let fs = FakeFs::new(cx.background());
5927 fs.insert_tree(
5928 "/dir",
5929 json!({
5930 "a.rs": "fn a() { A }",
5931 "b.rs": "const y: i32 = 1",
5932 }),
5933 )
5934 .await;
5935
5936 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
5937 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
5938 let worktree_id =
5939 project.read_with(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
5940
5941 // Cause worktree to start the fake language server
5942 let _buffer = project
5943 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
5944 .await
5945 .unwrap();
5946
5947 let mut events = subscribe(&project, cx);
5948
5949 let mut fake_server = fake_servers.next().await.unwrap();
5950 fake_server.start_progress(progress_token).await;
5951 assert_eq!(
5952 events.next().await.unwrap(),
5953 Event::DiskBasedDiagnosticsStarted
5954 );
5955
5956 fake_server.start_progress(progress_token).await;
5957 fake_server.end_progress(progress_token).await;
5958 fake_server.start_progress(progress_token).await;
5959
5960 fake_server.notify::<lsp::notification::PublishDiagnostics>(
5961 lsp::PublishDiagnosticsParams {
5962 uri: Url::from_file_path("/dir/a.rs").unwrap(),
5963 version: None,
5964 diagnostics: vec![lsp::Diagnostic {
5965 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
5966 severity: Some(lsp::DiagnosticSeverity::ERROR),
5967 message: "undefined variable 'A'".to_string(),
5968 ..Default::default()
5969 }],
5970 },
5971 );
5972 assert_eq!(
5973 events.next().await.unwrap(),
5974 Event::DiagnosticsUpdated((worktree_id, Path::new("a.rs")).into())
5975 );
5976
5977 fake_server.end_progress(progress_token).await;
5978 fake_server.end_progress(progress_token).await;
5979 assert_eq!(
5980 events.next().await.unwrap(),
5981 Event::DiskBasedDiagnosticsUpdated
5982 );
5983 assert_eq!(
5984 events.next().await.unwrap(),
5985 Event::DiskBasedDiagnosticsFinished
5986 );
5987
5988 let buffer = project
5989 .update(cx, |p, cx| p.open_local_buffer("/dir/a.rs", cx))
5990 .await
5991 .unwrap();
5992
5993 buffer.read_with(cx, |buffer, _| {
5994 let snapshot = buffer.snapshot();
5995 let diagnostics = snapshot
5996 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
5997 .collect::<Vec<_>>();
5998 assert_eq!(
5999 diagnostics,
6000 &[DiagnosticEntry {
6001 range: Point::new(0, 9)..Point::new(0, 10),
6002 diagnostic: Diagnostic {
6003 severity: lsp::DiagnosticSeverity::ERROR,
6004 message: "undefined variable 'A'".to_string(),
6005 group_id: 0,
6006 is_primary: true,
6007 ..Default::default()
6008 }
6009 }]
6010 )
6011 });
6012
6013 // Ensure publishing empty diagnostics twice only results in one update event.
6014 fake_server.notify::<lsp::notification::PublishDiagnostics>(
6015 lsp::PublishDiagnosticsParams {
6016 uri: Url::from_file_path("/dir/a.rs").unwrap(),
6017 version: None,
6018 diagnostics: Default::default(),
6019 },
6020 );
6021 assert_eq!(
6022 events.next().await.unwrap(),
6023 Event::DiagnosticsUpdated((worktree_id, Path::new("a.rs")).into())
6024 );
6025
6026 fake_server.notify::<lsp::notification::PublishDiagnostics>(
6027 lsp::PublishDiagnosticsParams {
6028 uri: Url::from_file_path("/dir/a.rs").unwrap(),
6029 version: None,
6030 diagnostics: Default::default(),
6031 },
6032 );
6033 cx.foreground().run_until_parked();
6034 assert_eq!(futures::poll!(events.next()), Poll::Pending);
6035 }
6036
6037 #[gpui::test]
6038 async fn test_restarting_server_with_diagnostics_running(cx: &mut gpui::TestAppContext) {
6039 cx.foreground().forbid_parking();
6040
6041 let progress_token = "the-progress-token";
6042 let mut language = Language::new(
6043 LanguageConfig {
6044 path_suffixes: vec!["rs".to_string()],
6045 ..Default::default()
6046 },
6047 None,
6048 );
6049 let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
6050 disk_based_diagnostics_sources: &["disk"],
6051 disk_based_diagnostics_progress_token: Some(progress_token),
6052 ..Default::default()
6053 });
6054
6055 let fs = FakeFs::new(cx.background());
6056 fs.insert_tree("/dir", json!({ "a.rs": "" })).await;
6057
6058 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
6059 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
6060
6061 let buffer = project
6062 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
6063 .await
6064 .unwrap();
6065
6066 // Simulate diagnostics starting to update.
6067 let mut fake_server = fake_servers.next().await.unwrap();
6068 fake_server.start_progress(progress_token).await;
6069
6070 // Restart the server before the diagnostics finish updating.
6071 project.update(cx, |project, cx| {
6072 project.restart_language_servers_for_buffers([buffer], cx);
6073 });
6074 let mut events = subscribe(&project, cx);
6075
6076 // Simulate the newly started server sending more diagnostics.
6077 let mut fake_server = fake_servers.next().await.unwrap();
6078 fake_server.start_progress(progress_token).await;
6079 assert_eq!(
6080 events.next().await.unwrap(),
6081 Event::DiskBasedDiagnosticsStarted
6082 );
6083
6084 // All diagnostics are considered done, despite the old server's diagnostic
6085 // task never completing.
6086 fake_server.end_progress(progress_token).await;
6087 assert_eq!(
6088 events.next().await.unwrap(),
6089 Event::DiskBasedDiagnosticsUpdated
6090 );
6091 assert_eq!(
6092 events.next().await.unwrap(),
6093 Event::DiskBasedDiagnosticsFinished
6094 );
6095 project.read_with(cx, |project, _| {
6096 assert!(!project.is_running_disk_based_diagnostics());
6097 });
6098 }
6099
6100 #[gpui::test]
6101 async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
6102 cx.foreground().forbid_parking();
6103
6104 let mut language = Language::new(
6105 LanguageConfig {
6106 name: "Rust".into(),
6107 path_suffixes: vec!["rs".to_string()],
6108 ..Default::default()
6109 },
6110 Some(tree_sitter_rust::language()),
6111 );
6112 let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
6113 disk_based_diagnostics_sources: &["disk"],
6114 ..Default::default()
6115 });
6116
6117 let text = "
6118 fn a() { A }
6119 fn b() { BB }
6120 fn c() { CCC }
6121 "
6122 .unindent();
6123
6124 let fs = FakeFs::new(cx.background());
6125 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
6126
6127 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
6128 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
6129
6130 let buffer = project
6131 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
6132 .await
6133 .unwrap();
6134
6135 let mut fake_server = fake_servers.next().await.unwrap();
6136 let open_notification = fake_server
6137 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6138 .await;
6139
6140 // Edit the buffer, moving the content down
6141 buffer.update(cx, |buffer, cx| buffer.edit([(0..0, "\n\n")], cx));
6142 let change_notification_1 = fake_server
6143 .receive_notification::<lsp::notification::DidChangeTextDocument>()
6144 .await;
6145 assert!(
6146 change_notification_1.text_document.version > open_notification.text_document.version
6147 );
6148
6149 // Report some diagnostics for the initial version of the buffer
6150 fake_server.notify::<lsp::notification::PublishDiagnostics>(
6151 lsp::PublishDiagnosticsParams {
6152 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
6153 version: Some(open_notification.text_document.version),
6154 diagnostics: vec![
6155 lsp::Diagnostic {
6156 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
6157 severity: Some(DiagnosticSeverity::ERROR),
6158 message: "undefined variable 'A'".to_string(),
6159 source: Some("disk".to_string()),
6160 ..Default::default()
6161 },
6162 lsp::Diagnostic {
6163 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
6164 severity: Some(DiagnosticSeverity::ERROR),
6165 message: "undefined variable 'BB'".to_string(),
6166 source: Some("disk".to_string()),
6167 ..Default::default()
6168 },
6169 lsp::Diagnostic {
6170 range: lsp::Range::new(lsp::Position::new(2, 9), lsp::Position::new(2, 12)),
6171 severity: Some(DiagnosticSeverity::ERROR),
6172 source: Some("disk".to_string()),
6173 message: "undefined variable 'CCC'".to_string(),
6174 ..Default::default()
6175 },
6176 ],
6177 },
6178 );
6179
6180 // The diagnostics have moved down since they were created.
6181 buffer.next_notification(cx).await;
6182 buffer.read_with(cx, |buffer, _| {
6183 assert_eq!(
6184 buffer
6185 .snapshot()
6186 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0), false)
6187 .collect::<Vec<_>>(),
6188 &[
6189 DiagnosticEntry {
6190 range: Point::new(3, 9)..Point::new(3, 11),
6191 diagnostic: Diagnostic {
6192 severity: DiagnosticSeverity::ERROR,
6193 message: "undefined variable 'BB'".to_string(),
6194 is_disk_based: true,
6195 group_id: 1,
6196 is_primary: true,
6197 ..Default::default()
6198 },
6199 },
6200 DiagnosticEntry {
6201 range: Point::new(4, 9)..Point::new(4, 12),
6202 diagnostic: Diagnostic {
6203 severity: DiagnosticSeverity::ERROR,
6204 message: "undefined variable 'CCC'".to_string(),
6205 is_disk_based: true,
6206 group_id: 2,
6207 is_primary: true,
6208 ..Default::default()
6209 }
6210 }
6211 ]
6212 );
6213 assert_eq!(
6214 chunks_with_diagnostics(buffer, 0..buffer.len()),
6215 [
6216 ("\n\nfn a() { ".to_string(), None),
6217 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
6218 (" }\nfn b() { ".to_string(), None),
6219 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
6220 (" }\nfn c() { ".to_string(), None),
6221 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
6222 (" }\n".to_string(), None),
6223 ]
6224 );
6225 assert_eq!(
6226 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
6227 [
6228 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
6229 (" }\nfn c() { ".to_string(), None),
6230 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
6231 ]
6232 );
6233 });
6234
6235 // Ensure overlapping diagnostics are highlighted correctly.
6236 fake_server.notify::<lsp::notification::PublishDiagnostics>(
6237 lsp::PublishDiagnosticsParams {
6238 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
6239 version: Some(open_notification.text_document.version),
6240 diagnostics: vec![
6241 lsp::Diagnostic {
6242 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
6243 severity: Some(DiagnosticSeverity::ERROR),
6244 message: "undefined variable 'A'".to_string(),
6245 source: Some("disk".to_string()),
6246 ..Default::default()
6247 },
6248 lsp::Diagnostic {
6249 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 12)),
6250 severity: Some(DiagnosticSeverity::WARNING),
6251 message: "unreachable statement".to_string(),
6252 source: Some("disk".to_string()),
6253 ..Default::default()
6254 },
6255 ],
6256 },
6257 );
6258
6259 buffer.next_notification(cx).await;
6260 buffer.read_with(cx, |buffer, _| {
6261 assert_eq!(
6262 buffer
6263 .snapshot()
6264 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0), false)
6265 .collect::<Vec<_>>(),
6266 &[
6267 DiagnosticEntry {
6268 range: Point::new(2, 9)..Point::new(2, 12),
6269 diagnostic: Diagnostic {
6270 severity: DiagnosticSeverity::WARNING,
6271 message: "unreachable statement".to_string(),
6272 is_disk_based: true,
6273 group_id: 1,
6274 is_primary: true,
6275 ..Default::default()
6276 }
6277 },
6278 DiagnosticEntry {
6279 range: Point::new(2, 9)..Point::new(2, 10),
6280 diagnostic: Diagnostic {
6281 severity: DiagnosticSeverity::ERROR,
6282 message: "undefined variable 'A'".to_string(),
6283 is_disk_based: true,
6284 group_id: 0,
6285 is_primary: true,
6286 ..Default::default()
6287 },
6288 }
6289 ]
6290 );
6291 assert_eq!(
6292 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
6293 [
6294 ("fn a() { ".to_string(), None),
6295 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
6296 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
6297 ("\n".to_string(), None),
6298 ]
6299 );
6300 assert_eq!(
6301 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
6302 [
6303 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
6304 ("\n".to_string(), None),
6305 ]
6306 );
6307 });
6308
6309 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
6310 // changes since the last save.
6311 buffer.update(cx, |buffer, cx| {
6312 buffer.edit([(Point::new(2, 0)..Point::new(2, 0), " ")], cx);
6313 buffer.edit([(Point::new(2, 8)..Point::new(2, 10), "(x: usize)")], cx);
6314 buffer.edit([(Point::new(3, 10)..Point::new(3, 10), "xxx")], cx);
6315 });
6316 let change_notification_2 = fake_server
6317 .receive_notification::<lsp::notification::DidChangeTextDocument>()
6318 .await;
6319 assert!(
6320 change_notification_2.text_document.version
6321 > change_notification_1.text_document.version
6322 );
6323
6324 // Handle out-of-order diagnostics
6325 fake_server.notify::<lsp::notification::PublishDiagnostics>(
6326 lsp::PublishDiagnosticsParams {
6327 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
6328 version: Some(change_notification_2.text_document.version),
6329 diagnostics: vec![
6330 lsp::Diagnostic {
6331 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
6332 severity: Some(DiagnosticSeverity::ERROR),
6333 message: "undefined variable 'BB'".to_string(),
6334 source: Some("disk".to_string()),
6335 ..Default::default()
6336 },
6337 lsp::Diagnostic {
6338 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
6339 severity: Some(DiagnosticSeverity::WARNING),
6340 message: "undefined variable 'A'".to_string(),
6341 source: Some("disk".to_string()),
6342 ..Default::default()
6343 },
6344 ],
6345 },
6346 );
6347
6348 buffer.next_notification(cx).await;
6349 buffer.read_with(cx, |buffer, _| {
6350 assert_eq!(
6351 buffer
6352 .snapshot()
6353 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
6354 .collect::<Vec<_>>(),
6355 &[
6356 DiagnosticEntry {
6357 range: Point::new(2, 21)..Point::new(2, 22),
6358 diagnostic: Diagnostic {
6359 severity: DiagnosticSeverity::WARNING,
6360 message: "undefined variable 'A'".to_string(),
6361 is_disk_based: true,
6362 group_id: 1,
6363 is_primary: true,
6364 ..Default::default()
6365 }
6366 },
6367 DiagnosticEntry {
6368 range: Point::new(3, 9)..Point::new(3, 14),
6369 diagnostic: Diagnostic {
6370 severity: DiagnosticSeverity::ERROR,
6371 message: "undefined variable 'BB'".to_string(),
6372 is_disk_based: true,
6373 group_id: 0,
6374 is_primary: true,
6375 ..Default::default()
6376 },
6377 }
6378 ]
6379 );
6380 });
6381 }
6382
6383 #[gpui::test]
6384 async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
6385 cx.foreground().forbid_parking();
6386
6387 let text = concat!(
6388 "let one = ;\n", //
6389 "let two = \n",
6390 "let three = 3;\n",
6391 );
6392
6393 let fs = FakeFs::new(cx.background());
6394 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
6395
6396 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
6397 let buffer = project
6398 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
6399 .await
6400 .unwrap();
6401
6402 project.update(cx, |project, cx| {
6403 project
6404 .update_buffer_diagnostics(
6405 &buffer,
6406 vec![
6407 DiagnosticEntry {
6408 range: PointUtf16::new(0, 10)..PointUtf16::new(0, 10),
6409 diagnostic: Diagnostic {
6410 severity: DiagnosticSeverity::ERROR,
6411 message: "syntax error 1".to_string(),
6412 ..Default::default()
6413 },
6414 },
6415 DiagnosticEntry {
6416 range: PointUtf16::new(1, 10)..PointUtf16::new(1, 10),
6417 diagnostic: Diagnostic {
6418 severity: DiagnosticSeverity::ERROR,
6419 message: "syntax error 2".to_string(),
6420 ..Default::default()
6421 },
6422 },
6423 ],
6424 None,
6425 cx,
6426 )
6427 .unwrap();
6428 });
6429
6430 // An empty range is extended forward to include the following character.
6431 // At the end of a line, an empty range is extended backward to include
6432 // the preceding character.
6433 buffer.read_with(cx, |buffer, _| {
6434 let chunks = chunks_with_diagnostics(&buffer, 0..buffer.len());
6435 assert_eq!(
6436 chunks
6437 .iter()
6438 .map(|(s, d)| (s.as_str(), *d))
6439 .collect::<Vec<_>>(),
6440 &[
6441 ("let one = ", None),
6442 (";", Some(DiagnosticSeverity::ERROR)),
6443 ("\nlet two =", None),
6444 (" ", Some(DiagnosticSeverity::ERROR)),
6445 ("\nlet three = 3;\n", None)
6446 ]
6447 );
6448 });
6449 }
6450
6451 #[gpui::test]
6452 async fn test_edits_from_lsp_with_past_version(cx: &mut gpui::TestAppContext) {
6453 cx.foreground().forbid_parking();
6454
6455 let mut language = Language::new(
6456 LanguageConfig {
6457 name: "Rust".into(),
6458 path_suffixes: vec!["rs".to_string()],
6459 ..Default::default()
6460 },
6461 Some(tree_sitter_rust::language()),
6462 );
6463 let mut fake_servers = language.set_fake_lsp_adapter(Default::default());
6464
6465 let text = "
6466 fn a() {
6467 f1();
6468 }
6469 fn b() {
6470 f2();
6471 }
6472 fn c() {
6473 f3();
6474 }
6475 "
6476 .unindent();
6477
6478 let fs = FakeFs::new(cx.background());
6479 fs.insert_tree(
6480 "/dir",
6481 json!({
6482 "a.rs": text.clone(),
6483 }),
6484 )
6485 .await;
6486
6487 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
6488 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
6489 let buffer = project
6490 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
6491 .await
6492 .unwrap();
6493
6494 let mut fake_server = fake_servers.next().await.unwrap();
6495 let lsp_document_version = fake_server
6496 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6497 .await
6498 .text_document
6499 .version;
6500
6501 // Simulate editing the buffer after the language server computes some edits.
6502 buffer.update(cx, |buffer, cx| {
6503 buffer.edit(
6504 [(
6505 Point::new(0, 0)..Point::new(0, 0),
6506 "// above first function\n",
6507 )],
6508 cx,
6509 );
6510 buffer.edit(
6511 [(
6512 Point::new(2, 0)..Point::new(2, 0),
6513 " // inside first function\n",
6514 )],
6515 cx,
6516 );
6517 buffer.edit(
6518 [(
6519 Point::new(6, 4)..Point::new(6, 4),
6520 "// inside second function ",
6521 )],
6522 cx,
6523 );
6524
6525 assert_eq!(
6526 buffer.text(),
6527 "
6528 // above first function
6529 fn a() {
6530 // inside first function
6531 f1();
6532 }
6533 fn b() {
6534 // inside second function f2();
6535 }
6536 fn c() {
6537 f3();
6538 }
6539 "
6540 .unindent()
6541 );
6542 });
6543
6544 let edits = project
6545 .update(cx, |project, cx| {
6546 project.edits_from_lsp(
6547 &buffer,
6548 vec![
6549 // replace body of first function
6550 lsp::TextEdit {
6551 range: lsp::Range::new(
6552 lsp::Position::new(0, 0),
6553 lsp::Position::new(3, 0),
6554 ),
6555 new_text: "
6556 fn a() {
6557 f10();
6558 }
6559 "
6560 .unindent(),
6561 },
6562 // edit inside second function
6563 lsp::TextEdit {
6564 range: lsp::Range::new(
6565 lsp::Position::new(4, 6),
6566 lsp::Position::new(4, 6),
6567 ),
6568 new_text: "00".into(),
6569 },
6570 // edit inside third function via two distinct edits
6571 lsp::TextEdit {
6572 range: lsp::Range::new(
6573 lsp::Position::new(7, 5),
6574 lsp::Position::new(7, 5),
6575 ),
6576 new_text: "4000".into(),
6577 },
6578 lsp::TextEdit {
6579 range: lsp::Range::new(
6580 lsp::Position::new(7, 5),
6581 lsp::Position::new(7, 6),
6582 ),
6583 new_text: "".into(),
6584 },
6585 ],
6586 Some(lsp_document_version),
6587 cx,
6588 )
6589 })
6590 .await
6591 .unwrap();
6592
6593 buffer.update(cx, |buffer, cx| {
6594 for (range, new_text) in edits {
6595 buffer.edit([(range, new_text)], cx);
6596 }
6597 assert_eq!(
6598 buffer.text(),
6599 "
6600 // above first function
6601 fn a() {
6602 // inside first function
6603 f10();
6604 }
6605 fn b() {
6606 // inside second function f200();
6607 }
6608 fn c() {
6609 f4000();
6610 }
6611 "
6612 .unindent()
6613 );
6614 });
6615 }
6616
6617 #[gpui::test]
6618 async fn test_edits_from_lsp_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
6619 cx.foreground().forbid_parking();
6620
6621 let text = "
6622 use a::b;
6623 use a::c;
6624
6625 fn f() {
6626 b();
6627 c();
6628 }
6629 "
6630 .unindent();
6631
6632 let fs = FakeFs::new(cx.background());
6633 fs.insert_tree(
6634 "/dir",
6635 json!({
6636 "a.rs": text.clone(),
6637 }),
6638 )
6639 .await;
6640
6641 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
6642 let buffer = project
6643 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
6644 .await
6645 .unwrap();
6646
6647 // Simulate the language server sending us a small edit in the form of a very large diff.
6648 // Rust-analyzer does this when performing a merge-imports code action.
6649 let edits = project
6650 .update(cx, |project, cx| {
6651 project.edits_from_lsp(
6652 &buffer,
6653 [
6654 // Replace the first use statement without editing the semicolon.
6655 lsp::TextEdit {
6656 range: lsp::Range::new(
6657 lsp::Position::new(0, 4),
6658 lsp::Position::new(0, 8),
6659 ),
6660 new_text: "a::{b, c}".into(),
6661 },
6662 // Reinsert the remainder of the file between the semicolon and the final
6663 // newline of the file.
6664 lsp::TextEdit {
6665 range: lsp::Range::new(
6666 lsp::Position::new(0, 9),
6667 lsp::Position::new(0, 9),
6668 ),
6669 new_text: "\n\n".into(),
6670 },
6671 lsp::TextEdit {
6672 range: lsp::Range::new(
6673 lsp::Position::new(0, 9),
6674 lsp::Position::new(0, 9),
6675 ),
6676 new_text: "
6677 fn f() {
6678 b();
6679 c();
6680 }"
6681 .unindent(),
6682 },
6683 // Delete everything after the first newline of the file.
6684 lsp::TextEdit {
6685 range: lsp::Range::new(
6686 lsp::Position::new(1, 0),
6687 lsp::Position::new(7, 0),
6688 ),
6689 new_text: "".into(),
6690 },
6691 ],
6692 None,
6693 cx,
6694 )
6695 })
6696 .await
6697 .unwrap();
6698
6699 buffer.update(cx, |buffer, cx| {
6700 let edits = edits
6701 .into_iter()
6702 .map(|(range, text)| {
6703 (
6704 range.start.to_point(&buffer)..range.end.to_point(&buffer),
6705 text,
6706 )
6707 })
6708 .collect::<Vec<_>>();
6709
6710 assert_eq!(
6711 edits,
6712 [
6713 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
6714 (Point::new(1, 0)..Point::new(2, 0), "".into())
6715 ]
6716 );
6717
6718 for (range, new_text) in edits {
6719 buffer.edit([(range, new_text)], cx);
6720 }
6721 assert_eq!(
6722 buffer.text(),
6723 "
6724 use a::{b, c};
6725
6726 fn f() {
6727 b();
6728 c();
6729 }
6730 "
6731 .unindent()
6732 );
6733 });
6734 }
6735
6736 fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
6737 buffer: &Buffer,
6738 range: Range<T>,
6739 ) -> Vec<(String, Option<DiagnosticSeverity>)> {
6740 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
6741 for chunk in buffer.snapshot().chunks(range, true) {
6742 if chunks.last().map_or(false, |prev_chunk| {
6743 prev_chunk.1 == chunk.diagnostic_severity
6744 }) {
6745 chunks.last_mut().unwrap().0.push_str(chunk.text);
6746 } else {
6747 chunks.push((chunk.text.to_string(), chunk.diagnostic_severity));
6748 }
6749 }
6750 chunks
6751 }
6752
6753 #[gpui::test]
6754 async fn test_search_worktree_without_files(cx: &mut gpui::TestAppContext) {
6755 let dir = temp_tree(json!({
6756 "root": {
6757 "dir1": {},
6758 "dir2": {
6759 "dir3": {}
6760 }
6761 }
6762 }));
6763
6764 let project = Project::test(Arc::new(RealFs), [dir.path()], cx).await;
6765 let cancel_flag = Default::default();
6766 let results = project
6767 .read_with(cx, |project, cx| {
6768 project.match_paths("dir", false, false, 10, &cancel_flag, cx)
6769 })
6770 .await;
6771
6772 assert!(results.is_empty());
6773 }
6774
6775 #[gpui::test(iterations = 10)]
6776 async fn test_definition(cx: &mut gpui::TestAppContext) {
6777 let mut language = Language::new(
6778 LanguageConfig {
6779 name: "Rust".into(),
6780 path_suffixes: vec!["rs".to_string()],
6781 ..Default::default()
6782 },
6783 Some(tree_sitter_rust::language()),
6784 );
6785 let mut fake_servers = language.set_fake_lsp_adapter(Default::default());
6786
6787 let fs = FakeFs::new(cx.background());
6788 fs.insert_tree(
6789 "/dir",
6790 json!({
6791 "a.rs": "const fn a() { A }",
6792 "b.rs": "const y: i32 = crate::a()",
6793 }),
6794 )
6795 .await;
6796
6797 let project = Project::test(fs, ["/dir/b.rs".as_ref()], cx).await;
6798 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
6799
6800 let buffer = project
6801 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
6802 .await
6803 .unwrap();
6804
6805 let fake_server = fake_servers.next().await.unwrap();
6806 fake_server.handle_request::<lsp::request::GotoDefinition, _, _>(|params, _| async move {
6807 let params = params.text_document_position_params;
6808 assert_eq!(
6809 params.text_document.uri.to_file_path().unwrap(),
6810 Path::new("/dir/b.rs"),
6811 );
6812 assert_eq!(params.position, lsp::Position::new(0, 22));
6813
6814 Ok(Some(lsp::GotoDefinitionResponse::Scalar(
6815 lsp::Location::new(
6816 lsp::Url::from_file_path("/dir/a.rs").unwrap(),
6817 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
6818 ),
6819 )))
6820 });
6821
6822 let mut definitions = project
6823 .update(cx, |project, cx| project.definition(&buffer, 22, cx))
6824 .await
6825 .unwrap();
6826
6827 assert_eq!(definitions.len(), 1);
6828 let definition = definitions.pop().unwrap();
6829 cx.update(|cx| {
6830 let target_buffer = definition.buffer.read(cx);
6831 assert_eq!(
6832 target_buffer
6833 .file()
6834 .unwrap()
6835 .as_local()
6836 .unwrap()
6837 .abs_path(cx),
6838 Path::new("/dir/a.rs"),
6839 );
6840 assert_eq!(definition.range.to_offset(target_buffer), 9..10);
6841 assert_eq!(
6842 list_worktrees(&project, cx),
6843 [("/dir/b.rs".as_ref(), true), ("/dir/a.rs".as_ref(), false)]
6844 );
6845
6846 drop(definition);
6847 });
6848 cx.read(|cx| {
6849 assert_eq!(list_worktrees(&project, cx), [("/dir/b.rs".as_ref(), true)]);
6850 });
6851
6852 fn list_worktrees<'a>(
6853 project: &'a ModelHandle<Project>,
6854 cx: &'a AppContext,
6855 ) -> Vec<(&'a Path, bool)> {
6856 project
6857 .read(cx)
6858 .worktrees(cx)
6859 .map(|worktree| {
6860 let worktree = worktree.read(cx);
6861 (
6862 worktree.as_local().unwrap().abs_path().as_ref(),
6863 worktree.is_visible(),
6864 )
6865 })
6866 .collect::<Vec<_>>()
6867 }
6868 }
6869
6870 #[gpui::test]
6871 async fn test_completions_without_edit_ranges(cx: &mut gpui::TestAppContext) {
6872 let mut language = Language::new(
6873 LanguageConfig {
6874 name: "TypeScript".into(),
6875 path_suffixes: vec!["ts".to_string()],
6876 ..Default::default()
6877 },
6878 Some(tree_sitter_typescript::language_typescript()),
6879 );
6880 let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default());
6881
6882 let fs = FakeFs::new(cx.background());
6883 fs.insert_tree(
6884 "/dir",
6885 json!({
6886 "a.ts": "",
6887 }),
6888 )
6889 .await;
6890
6891 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
6892 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
6893 let buffer = project
6894 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
6895 .await
6896 .unwrap();
6897
6898 let fake_server = fake_language_servers.next().await.unwrap();
6899
6900 let text = "let a = b.fqn";
6901 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
6902 let completions = project.update(cx, |project, cx| {
6903 project.completions(&buffer, text.len(), cx)
6904 });
6905
6906 fake_server
6907 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
6908 Ok(Some(lsp::CompletionResponse::Array(vec![
6909 lsp::CompletionItem {
6910 label: "fullyQualifiedName?".into(),
6911 insert_text: Some("fullyQualifiedName".into()),
6912 ..Default::default()
6913 },
6914 ])))
6915 })
6916 .next()
6917 .await;
6918 let completions = completions.await.unwrap();
6919 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
6920 assert_eq!(completions.len(), 1);
6921 assert_eq!(completions[0].new_text, "fullyQualifiedName");
6922 assert_eq!(
6923 completions[0].old_range.to_offset(&snapshot),
6924 text.len() - 3..text.len()
6925 );
6926 }
6927
6928 #[gpui::test(iterations = 10)]
6929 async fn test_apply_code_actions_with_commands(cx: &mut gpui::TestAppContext) {
6930 let mut language = Language::new(
6931 LanguageConfig {
6932 name: "TypeScript".into(),
6933 path_suffixes: vec!["ts".to_string()],
6934 ..Default::default()
6935 },
6936 None,
6937 );
6938 let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default());
6939
6940 let fs = FakeFs::new(cx.background());
6941 fs.insert_tree(
6942 "/dir",
6943 json!({
6944 "a.ts": "a",
6945 }),
6946 )
6947 .await;
6948
6949 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
6950 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
6951 let buffer = project
6952 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
6953 .await
6954 .unwrap();
6955
6956 let fake_server = fake_language_servers.next().await.unwrap();
6957
6958 // Language server returns code actions that contain commands, and not edits.
6959 let actions = project.update(cx, |project, cx| project.code_actions(&buffer, 0..0, cx));
6960 fake_server
6961 .handle_request::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
6962 Ok(Some(vec![
6963 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
6964 title: "The code action".into(),
6965 command: Some(lsp::Command {
6966 title: "The command".into(),
6967 command: "_the/command".into(),
6968 arguments: Some(vec![json!("the-argument")]),
6969 }),
6970 ..Default::default()
6971 }),
6972 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
6973 title: "two".into(),
6974 ..Default::default()
6975 }),
6976 ]))
6977 })
6978 .next()
6979 .await;
6980
6981 let action = actions.await.unwrap()[0].clone();
6982 let apply = project.update(cx, |project, cx| {
6983 project.apply_code_action(buffer.clone(), action, true, cx)
6984 });
6985
6986 // Resolving the code action does not populate its edits. In absence of
6987 // edits, we must execute the given command.
6988 fake_server.handle_request::<lsp::request::CodeActionResolveRequest, _, _>(
6989 |action, _| async move { Ok(action) },
6990 );
6991
6992 // While executing the command, the language server sends the editor
6993 // a `workspaceEdit` request.
6994 fake_server
6995 .handle_request::<lsp::request::ExecuteCommand, _, _>({
6996 let fake = fake_server.clone();
6997 move |params, _| {
6998 assert_eq!(params.command, "_the/command");
6999 let fake = fake.clone();
7000 async move {
7001 fake.server
7002 .request::<lsp::request::ApplyWorkspaceEdit>(
7003 lsp::ApplyWorkspaceEditParams {
7004 label: None,
7005 edit: lsp::WorkspaceEdit {
7006 changes: Some(
7007 [(
7008 lsp::Url::from_file_path("/dir/a.ts").unwrap(),
7009 vec![lsp::TextEdit {
7010 range: lsp::Range::new(
7011 lsp::Position::new(0, 0),
7012 lsp::Position::new(0, 0),
7013 ),
7014 new_text: "X".into(),
7015 }],
7016 )]
7017 .into_iter()
7018 .collect(),
7019 ),
7020 ..Default::default()
7021 },
7022 },
7023 )
7024 .await
7025 .unwrap();
7026 Ok(Some(json!(null)))
7027 }
7028 }
7029 })
7030 .next()
7031 .await;
7032
7033 // Applying the code action returns a project transaction containing the edits
7034 // sent by the language server in its `workspaceEdit` request.
7035 let transaction = apply.await.unwrap();
7036 assert!(transaction.0.contains_key(&buffer));
7037 buffer.update(cx, |buffer, cx| {
7038 assert_eq!(buffer.text(), "Xa");
7039 buffer.undo(cx);
7040 assert_eq!(buffer.text(), "a");
7041 });
7042 }
7043
7044 #[gpui::test]
7045 async fn test_save_file(cx: &mut gpui::TestAppContext) {
7046 let fs = FakeFs::new(cx.background());
7047 fs.insert_tree(
7048 "/dir",
7049 json!({
7050 "file1": "the old contents",
7051 }),
7052 )
7053 .await;
7054
7055 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7056 let buffer = project
7057 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
7058 .await
7059 .unwrap();
7060 buffer
7061 .update(cx, |buffer, cx| {
7062 assert_eq!(buffer.text(), "the old contents");
7063 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], cx);
7064 buffer.save(cx)
7065 })
7066 .await
7067 .unwrap();
7068
7069 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
7070 assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
7071 }
7072
7073 #[gpui::test]
7074 async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
7075 let fs = FakeFs::new(cx.background());
7076 fs.insert_tree(
7077 "/dir",
7078 json!({
7079 "file1": "the old contents",
7080 }),
7081 )
7082 .await;
7083
7084 let project = Project::test(fs.clone(), ["/dir/file1".as_ref()], cx).await;
7085 let buffer = project
7086 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
7087 .await
7088 .unwrap();
7089 buffer
7090 .update(cx, |buffer, cx| {
7091 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], cx);
7092 buffer.save(cx)
7093 })
7094 .await
7095 .unwrap();
7096
7097 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
7098 assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
7099 }
7100
7101 #[gpui::test]
7102 async fn test_save_as(cx: &mut gpui::TestAppContext) {
7103 let fs = FakeFs::new(cx.background());
7104 fs.insert_tree("/dir", json!({})).await;
7105
7106 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7107 let buffer = project.update(cx, |project, cx| {
7108 project.create_buffer("", None, cx).unwrap()
7109 });
7110 buffer.update(cx, |buffer, cx| {
7111 buffer.edit([(0..0, "abc")], cx);
7112 assert!(buffer.is_dirty());
7113 assert!(!buffer.has_conflict());
7114 });
7115 project
7116 .update(cx, |project, cx| {
7117 project.save_buffer_as(buffer.clone(), "/dir/file1".into(), cx)
7118 })
7119 .await
7120 .unwrap();
7121 assert_eq!(fs.load(Path::new("/dir/file1")).await.unwrap(), "abc");
7122 buffer.read_with(cx, |buffer, cx| {
7123 assert_eq!(buffer.file().unwrap().full_path(cx), Path::new("dir/file1"));
7124 assert!(!buffer.is_dirty());
7125 assert!(!buffer.has_conflict());
7126 });
7127
7128 let opened_buffer = project
7129 .update(cx, |project, cx| {
7130 project.open_local_buffer("/dir/file1", cx)
7131 })
7132 .await
7133 .unwrap();
7134 assert_eq!(opened_buffer, buffer);
7135 }
7136
7137 #[gpui::test(retries = 5)]
7138 async fn test_rescan_and_remote_updates(cx: &mut gpui::TestAppContext) {
7139 let dir = temp_tree(json!({
7140 "a": {
7141 "file1": "",
7142 "file2": "",
7143 "file3": "",
7144 },
7145 "b": {
7146 "c": {
7147 "file4": "",
7148 "file5": "",
7149 }
7150 }
7151 }));
7152
7153 let project = Project::test(Arc::new(RealFs), [dir.path()], cx).await;
7154 let rpc = project.read_with(cx, |p, _| p.client.clone());
7155
7156 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
7157 let buffer = project.update(cx, |p, cx| p.open_local_buffer(dir.path().join(path), cx));
7158 async move { buffer.await.unwrap() }
7159 };
7160 let id_for_path = |path: &'static str, cx: &gpui::TestAppContext| {
7161 project.read_with(cx, |project, cx| {
7162 let tree = project.worktrees(cx).next().unwrap();
7163 tree.read(cx)
7164 .entry_for_path(path)
7165 .expect(&format!("no entry for path {}", path))
7166 .id
7167 })
7168 };
7169
7170 let buffer2 = buffer_for_path("a/file2", cx).await;
7171 let buffer3 = buffer_for_path("a/file3", cx).await;
7172 let buffer4 = buffer_for_path("b/c/file4", cx).await;
7173 let buffer5 = buffer_for_path("b/c/file5", cx).await;
7174
7175 let file2_id = id_for_path("a/file2", &cx);
7176 let file3_id = id_for_path("a/file3", &cx);
7177 let file4_id = id_for_path("b/c/file4", &cx);
7178
7179 // Create a remote copy of this worktree.
7180 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
7181 let initial_snapshot = tree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot());
7182 let (remote, load_task) = cx.update(|cx| {
7183 Worktree::remote(
7184 1,
7185 1,
7186 initial_snapshot.to_proto(&Default::default(), true),
7187 rpc.clone(),
7188 cx,
7189 )
7190 });
7191 // tree
7192 load_task.await;
7193
7194 cx.read(|cx| {
7195 assert!(!buffer2.read(cx).is_dirty());
7196 assert!(!buffer3.read(cx).is_dirty());
7197 assert!(!buffer4.read(cx).is_dirty());
7198 assert!(!buffer5.read(cx).is_dirty());
7199 });
7200
7201 // Rename and delete files and directories.
7202 tree.flush_fs_events(&cx).await;
7203 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
7204 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
7205 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
7206 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
7207 tree.flush_fs_events(&cx).await;
7208
7209 let expected_paths = vec![
7210 "a",
7211 "a/file1",
7212 "a/file2.new",
7213 "b",
7214 "d",
7215 "d/file3",
7216 "d/file4",
7217 ];
7218
7219 cx.read(|app| {
7220 assert_eq!(
7221 tree.read(app)
7222 .paths()
7223 .map(|p| p.to_str().unwrap())
7224 .collect::<Vec<_>>(),
7225 expected_paths
7226 );
7227
7228 assert_eq!(id_for_path("a/file2.new", &cx), file2_id);
7229 assert_eq!(id_for_path("d/file3", &cx), file3_id);
7230 assert_eq!(id_for_path("d/file4", &cx), file4_id);
7231
7232 assert_eq!(
7233 buffer2.read(app).file().unwrap().path().as_ref(),
7234 Path::new("a/file2.new")
7235 );
7236 assert_eq!(
7237 buffer3.read(app).file().unwrap().path().as_ref(),
7238 Path::new("d/file3")
7239 );
7240 assert_eq!(
7241 buffer4.read(app).file().unwrap().path().as_ref(),
7242 Path::new("d/file4")
7243 );
7244 assert_eq!(
7245 buffer5.read(app).file().unwrap().path().as_ref(),
7246 Path::new("b/c/file5")
7247 );
7248
7249 assert!(!buffer2.read(app).file().unwrap().is_deleted());
7250 assert!(!buffer3.read(app).file().unwrap().is_deleted());
7251 assert!(!buffer4.read(app).file().unwrap().is_deleted());
7252 assert!(buffer5.read(app).file().unwrap().is_deleted());
7253 });
7254
7255 // Update the remote worktree. Check that it becomes consistent with the
7256 // local worktree.
7257 remote.update(cx, |remote, cx| {
7258 let update_message = tree.read(cx).as_local().unwrap().snapshot().build_update(
7259 &initial_snapshot,
7260 1,
7261 1,
7262 true,
7263 );
7264 remote
7265 .as_remote_mut()
7266 .unwrap()
7267 .snapshot
7268 .apply_remote_update(update_message)
7269 .unwrap();
7270
7271 assert_eq!(
7272 remote
7273 .paths()
7274 .map(|p| p.to_str().unwrap())
7275 .collect::<Vec<_>>(),
7276 expected_paths
7277 );
7278 });
7279 }
7280
7281 #[gpui::test]
7282 async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
7283 let fs = FakeFs::new(cx.background());
7284 fs.insert_tree(
7285 "/dir",
7286 json!({
7287 "a.txt": "a-contents",
7288 "b.txt": "b-contents",
7289 }),
7290 )
7291 .await;
7292
7293 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7294
7295 // Spawn multiple tasks to open paths, repeating some paths.
7296 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
7297 (
7298 p.open_local_buffer("/dir/a.txt", cx),
7299 p.open_local_buffer("/dir/b.txt", cx),
7300 p.open_local_buffer("/dir/a.txt", cx),
7301 )
7302 });
7303
7304 let buffer_a_1 = buffer_a_1.await.unwrap();
7305 let buffer_a_2 = buffer_a_2.await.unwrap();
7306 let buffer_b = buffer_b.await.unwrap();
7307 assert_eq!(buffer_a_1.read_with(cx, |b, _| b.text()), "a-contents");
7308 assert_eq!(buffer_b.read_with(cx, |b, _| b.text()), "b-contents");
7309
7310 // There is only one buffer per path.
7311 let buffer_a_id = buffer_a_1.id();
7312 assert_eq!(buffer_a_2.id(), buffer_a_id);
7313
7314 // Open the same path again while it is still open.
7315 drop(buffer_a_1);
7316 let buffer_a_3 = project
7317 .update(cx, |p, cx| p.open_local_buffer("/dir/a.txt", cx))
7318 .await
7319 .unwrap();
7320
7321 // There's still only one buffer per path.
7322 assert_eq!(buffer_a_3.id(), buffer_a_id);
7323 }
7324
7325 #[gpui::test]
7326 async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
7327 let fs = FakeFs::new(cx.background());
7328 fs.insert_tree(
7329 "/dir",
7330 json!({
7331 "file1": "abc",
7332 "file2": "def",
7333 "file3": "ghi",
7334 }),
7335 )
7336 .await;
7337
7338 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7339
7340 let buffer1 = project
7341 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
7342 .await
7343 .unwrap();
7344 let events = Rc::new(RefCell::new(Vec::new()));
7345
7346 // initially, the buffer isn't dirty.
7347 buffer1.update(cx, |buffer, cx| {
7348 cx.subscribe(&buffer1, {
7349 let events = events.clone();
7350 move |_, _, event, _| match event {
7351 BufferEvent::Operation(_) => {}
7352 _ => events.borrow_mut().push(event.clone()),
7353 }
7354 })
7355 .detach();
7356
7357 assert!(!buffer.is_dirty());
7358 assert!(events.borrow().is_empty());
7359
7360 buffer.edit([(1..2, "")], cx);
7361 });
7362
7363 // after the first edit, the buffer is dirty, and emits a dirtied event.
7364 buffer1.update(cx, |buffer, cx| {
7365 assert!(buffer.text() == "ac");
7366 assert!(buffer.is_dirty());
7367 assert_eq!(
7368 *events.borrow(),
7369 &[language::Event::Edited, language::Event::Dirtied]
7370 );
7371 events.borrow_mut().clear();
7372 buffer.did_save(buffer.version(), buffer.file().unwrap().mtime(), None, cx);
7373 });
7374
7375 // after saving, the buffer is not dirty, and emits a saved event.
7376 buffer1.update(cx, |buffer, cx| {
7377 assert!(!buffer.is_dirty());
7378 assert_eq!(*events.borrow(), &[language::Event::Saved]);
7379 events.borrow_mut().clear();
7380
7381 buffer.edit([(1..1, "B")], cx);
7382 buffer.edit([(2..2, "D")], cx);
7383 });
7384
7385 // after editing again, the buffer is dirty, and emits another dirty event.
7386 buffer1.update(cx, |buffer, cx| {
7387 assert!(buffer.text() == "aBDc");
7388 assert!(buffer.is_dirty());
7389 assert_eq!(
7390 *events.borrow(),
7391 &[
7392 language::Event::Edited,
7393 language::Event::Dirtied,
7394 language::Event::Edited,
7395 ],
7396 );
7397 events.borrow_mut().clear();
7398
7399 // TODO - currently, after restoring the buffer to its
7400 // previously-saved state, the is still considered dirty.
7401 buffer.edit([(1..3, "")], cx);
7402 assert!(buffer.text() == "ac");
7403 assert!(buffer.is_dirty());
7404 });
7405
7406 assert_eq!(*events.borrow(), &[language::Event::Edited]);
7407
7408 // When a file is deleted, the buffer is considered dirty.
7409 let events = Rc::new(RefCell::new(Vec::new()));
7410 let buffer2 = project
7411 .update(cx, |p, cx| p.open_local_buffer("/dir/file2", cx))
7412 .await
7413 .unwrap();
7414 buffer2.update(cx, |_, cx| {
7415 cx.subscribe(&buffer2, {
7416 let events = events.clone();
7417 move |_, _, event, _| events.borrow_mut().push(event.clone())
7418 })
7419 .detach();
7420 });
7421
7422 fs.remove_file("/dir/file2".as_ref(), Default::default())
7423 .await
7424 .unwrap();
7425 buffer2.condition(&cx, |b, _| b.is_dirty()).await;
7426 assert_eq!(
7427 *events.borrow(),
7428 &[language::Event::Dirtied, language::Event::FileHandleChanged]
7429 );
7430
7431 // When a file is already dirty when deleted, we don't emit a Dirtied event.
7432 let events = Rc::new(RefCell::new(Vec::new()));
7433 let buffer3 = project
7434 .update(cx, |p, cx| p.open_local_buffer("/dir/file3", cx))
7435 .await
7436 .unwrap();
7437 buffer3.update(cx, |_, cx| {
7438 cx.subscribe(&buffer3, {
7439 let events = events.clone();
7440 move |_, _, event, _| events.borrow_mut().push(event.clone())
7441 })
7442 .detach();
7443 });
7444
7445 buffer3.update(cx, |buffer, cx| {
7446 buffer.edit([(0..0, "x")], cx);
7447 });
7448 events.borrow_mut().clear();
7449 fs.remove_file("/dir/file3".as_ref(), Default::default())
7450 .await
7451 .unwrap();
7452 buffer3
7453 .condition(&cx, |_, _| !events.borrow().is_empty())
7454 .await;
7455 assert_eq!(*events.borrow(), &[language::Event::FileHandleChanged]);
7456 cx.read(|cx| assert!(buffer3.read(cx).is_dirty()));
7457 }
7458
7459 #[gpui::test]
7460 async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
7461 let initial_contents = "aaa\nbbbbb\nc\n";
7462 let fs = FakeFs::new(cx.background());
7463 fs.insert_tree(
7464 "/dir",
7465 json!({
7466 "the-file": initial_contents,
7467 }),
7468 )
7469 .await;
7470 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7471 let buffer = project
7472 .update(cx, |p, cx| p.open_local_buffer("/dir/the-file", cx))
7473 .await
7474 .unwrap();
7475
7476 let anchors = (0..3)
7477 .map(|row| buffer.read_with(cx, |b, _| b.anchor_before(Point::new(row, 1))))
7478 .collect::<Vec<_>>();
7479
7480 // Change the file on disk, adding two new lines of text, and removing
7481 // one line.
7482 buffer.read_with(cx, |buffer, _| {
7483 assert!(!buffer.is_dirty());
7484 assert!(!buffer.has_conflict());
7485 });
7486 let new_contents = "AAAA\naaa\nBB\nbbbbb\n";
7487 fs.save("/dir/the-file".as_ref(), &new_contents.into())
7488 .await
7489 .unwrap();
7490
7491 // Because the buffer was not modified, it is reloaded from disk. Its
7492 // contents are edited according to the diff between the old and new
7493 // file contents.
7494 buffer
7495 .condition(&cx, |buffer, _| buffer.text() == new_contents)
7496 .await;
7497
7498 buffer.update(cx, |buffer, _| {
7499 assert_eq!(buffer.text(), new_contents);
7500 assert!(!buffer.is_dirty());
7501 assert!(!buffer.has_conflict());
7502
7503 let anchor_positions = anchors
7504 .iter()
7505 .map(|anchor| anchor.to_point(&*buffer))
7506 .collect::<Vec<_>>();
7507 assert_eq!(
7508 anchor_positions,
7509 [Point::new(1, 1), Point::new(3, 1), Point::new(4, 0)]
7510 );
7511 });
7512
7513 // Modify the buffer
7514 buffer.update(cx, |buffer, cx| {
7515 buffer.edit([(0..0, " ")], cx);
7516 assert!(buffer.is_dirty());
7517 assert!(!buffer.has_conflict());
7518 });
7519
7520 // Change the file on disk again, adding blank lines to the beginning.
7521 fs.save(
7522 "/dir/the-file".as_ref(),
7523 &"\n\n\nAAAA\naaa\nBB\nbbbbb\n".into(),
7524 )
7525 .await
7526 .unwrap();
7527
7528 // Because the buffer is modified, it doesn't reload from disk, but is
7529 // marked as having a conflict.
7530 buffer
7531 .condition(&cx, |buffer, _| buffer.has_conflict())
7532 .await;
7533 }
7534
7535 #[gpui::test]
7536 async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
7537 cx.foreground().forbid_parking();
7538
7539 let fs = FakeFs::new(cx.background());
7540 fs.insert_tree(
7541 "/the-dir",
7542 json!({
7543 "a.rs": "
7544 fn foo(mut v: Vec<usize>) {
7545 for x in &v {
7546 v.push(1);
7547 }
7548 }
7549 "
7550 .unindent(),
7551 }),
7552 )
7553 .await;
7554
7555 let project = Project::test(fs.clone(), ["/the-dir".as_ref()], cx).await;
7556 let buffer = project
7557 .update(cx, |p, cx| p.open_local_buffer("/the-dir/a.rs", cx))
7558 .await
7559 .unwrap();
7560
7561 let buffer_uri = Url::from_file_path("/the-dir/a.rs").unwrap();
7562 let message = lsp::PublishDiagnosticsParams {
7563 uri: buffer_uri.clone(),
7564 diagnostics: vec![
7565 lsp::Diagnostic {
7566 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
7567 severity: Some(DiagnosticSeverity::WARNING),
7568 message: "error 1".to_string(),
7569 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
7570 location: lsp::Location {
7571 uri: buffer_uri.clone(),
7572 range: lsp::Range::new(
7573 lsp::Position::new(1, 8),
7574 lsp::Position::new(1, 9),
7575 ),
7576 },
7577 message: "error 1 hint 1".to_string(),
7578 }]),
7579 ..Default::default()
7580 },
7581 lsp::Diagnostic {
7582 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
7583 severity: Some(DiagnosticSeverity::HINT),
7584 message: "error 1 hint 1".to_string(),
7585 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
7586 location: lsp::Location {
7587 uri: buffer_uri.clone(),
7588 range: lsp::Range::new(
7589 lsp::Position::new(1, 8),
7590 lsp::Position::new(1, 9),
7591 ),
7592 },
7593 message: "original diagnostic".to_string(),
7594 }]),
7595 ..Default::default()
7596 },
7597 lsp::Diagnostic {
7598 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
7599 severity: Some(DiagnosticSeverity::ERROR),
7600 message: "error 2".to_string(),
7601 related_information: Some(vec![
7602 lsp::DiagnosticRelatedInformation {
7603 location: lsp::Location {
7604 uri: buffer_uri.clone(),
7605 range: lsp::Range::new(
7606 lsp::Position::new(1, 13),
7607 lsp::Position::new(1, 15),
7608 ),
7609 },
7610 message: "error 2 hint 1".to_string(),
7611 },
7612 lsp::DiagnosticRelatedInformation {
7613 location: lsp::Location {
7614 uri: buffer_uri.clone(),
7615 range: lsp::Range::new(
7616 lsp::Position::new(1, 13),
7617 lsp::Position::new(1, 15),
7618 ),
7619 },
7620 message: "error 2 hint 2".to_string(),
7621 },
7622 ]),
7623 ..Default::default()
7624 },
7625 lsp::Diagnostic {
7626 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
7627 severity: Some(DiagnosticSeverity::HINT),
7628 message: "error 2 hint 1".to_string(),
7629 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
7630 location: lsp::Location {
7631 uri: buffer_uri.clone(),
7632 range: lsp::Range::new(
7633 lsp::Position::new(2, 8),
7634 lsp::Position::new(2, 17),
7635 ),
7636 },
7637 message: "original diagnostic".to_string(),
7638 }]),
7639 ..Default::default()
7640 },
7641 lsp::Diagnostic {
7642 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
7643 severity: Some(DiagnosticSeverity::HINT),
7644 message: "error 2 hint 2".to_string(),
7645 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
7646 location: lsp::Location {
7647 uri: buffer_uri.clone(),
7648 range: lsp::Range::new(
7649 lsp::Position::new(2, 8),
7650 lsp::Position::new(2, 17),
7651 ),
7652 },
7653 message: "original diagnostic".to_string(),
7654 }]),
7655 ..Default::default()
7656 },
7657 ],
7658 version: None,
7659 };
7660
7661 project
7662 .update(cx, |p, cx| p.update_diagnostics(message, &[], cx))
7663 .unwrap();
7664 let buffer = buffer.read_with(cx, |buffer, _| buffer.snapshot());
7665
7666 assert_eq!(
7667 buffer
7668 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
7669 .collect::<Vec<_>>(),
7670 &[
7671 DiagnosticEntry {
7672 range: Point::new(1, 8)..Point::new(1, 9),
7673 diagnostic: Diagnostic {
7674 severity: DiagnosticSeverity::WARNING,
7675 message: "error 1".to_string(),
7676 group_id: 0,
7677 is_primary: true,
7678 ..Default::default()
7679 }
7680 },
7681 DiagnosticEntry {
7682 range: Point::new(1, 8)..Point::new(1, 9),
7683 diagnostic: Diagnostic {
7684 severity: DiagnosticSeverity::HINT,
7685 message: "error 1 hint 1".to_string(),
7686 group_id: 0,
7687 is_primary: false,
7688 ..Default::default()
7689 }
7690 },
7691 DiagnosticEntry {
7692 range: Point::new(1, 13)..Point::new(1, 15),
7693 diagnostic: Diagnostic {
7694 severity: DiagnosticSeverity::HINT,
7695 message: "error 2 hint 1".to_string(),
7696 group_id: 1,
7697 is_primary: false,
7698 ..Default::default()
7699 }
7700 },
7701 DiagnosticEntry {
7702 range: Point::new(1, 13)..Point::new(1, 15),
7703 diagnostic: Diagnostic {
7704 severity: DiagnosticSeverity::HINT,
7705 message: "error 2 hint 2".to_string(),
7706 group_id: 1,
7707 is_primary: false,
7708 ..Default::default()
7709 }
7710 },
7711 DiagnosticEntry {
7712 range: Point::new(2, 8)..Point::new(2, 17),
7713 diagnostic: Diagnostic {
7714 severity: DiagnosticSeverity::ERROR,
7715 message: "error 2".to_string(),
7716 group_id: 1,
7717 is_primary: true,
7718 ..Default::default()
7719 }
7720 }
7721 ]
7722 );
7723
7724 assert_eq!(
7725 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
7726 &[
7727 DiagnosticEntry {
7728 range: Point::new(1, 8)..Point::new(1, 9),
7729 diagnostic: Diagnostic {
7730 severity: DiagnosticSeverity::WARNING,
7731 message: "error 1".to_string(),
7732 group_id: 0,
7733 is_primary: true,
7734 ..Default::default()
7735 }
7736 },
7737 DiagnosticEntry {
7738 range: Point::new(1, 8)..Point::new(1, 9),
7739 diagnostic: Diagnostic {
7740 severity: DiagnosticSeverity::HINT,
7741 message: "error 1 hint 1".to_string(),
7742 group_id: 0,
7743 is_primary: false,
7744 ..Default::default()
7745 }
7746 },
7747 ]
7748 );
7749 assert_eq!(
7750 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
7751 &[
7752 DiagnosticEntry {
7753 range: Point::new(1, 13)..Point::new(1, 15),
7754 diagnostic: Diagnostic {
7755 severity: DiagnosticSeverity::HINT,
7756 message: "error 2 hint 1".to_string(),
7757 group_id: 1,
7758 is_primary: false,
7759 ..Default::default()
7760 }
7761 },
7762 DiagnosticEntry {
7763 range: Point::new(1, 13)..Point::new(1, 15),
7764 diagnostic: Diagnostic {
7765 severity: DiagnosticSeverity::HINT,
7766 message: "error 2 hint 2".to_string(),
7767 group_id: 1,
7768 is_primary: false,
7769 ..Default::default()
7770 }
7771 },
7772 DiagnosticEntry {
7773 range: Point::new(2, 8)..Point::new(2, 17),
7774 diagnostic: Diagnostic {
7775 severity: DiagnosticSeverity::ERROR,
7776 message: "error 2".to_string(),
7777 group_id: 1,
7778 is_primary: true,
7779 ..Default::default()
7780 }
7781 }
7782 ]
7783 );
7784 }
7785
7786 #[gpui::test]
7787 async fn test_rename(cx: &mut gpui::TestAppContext) {
7788 cx.foreground().forbid_parking();
7789
7790 let mut language = Language::new(
7791 LanguageConfig {
7792 name: "Rust".into(),
7793 path_suffixes: vec!["rs".to_string()],
7794 ..Default::default()
7795 },
7796 Some(tree_sitter_rust::language()),
7797 );
7798 let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
7799 capabilities: lsp::ServerCapabilities {
7800 rename_provider: Some(lsp::OneOf::Right(lsp::RenameOptions {
7801 prepare_provider: Some(true),
7802 work_done_progress_options: Default::default(),
7803 })),
7804 ..Default::default()
7805 },
7806 ..Default::default()
7807 });
7808
7809 let fs = FakeFs::new(cx.background());
7810 fs.insert_tree(
7811 "/dir",
7812 json!({
7813 "one.rs": "const ONE: usize = 1;",
7814 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
7815 }),
7816 )
7817 .await;
7818
7819 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7820 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
7821 let buffer = project
7822 .update(cx, |project, cx| {
7823 project.open_local_buffer("/dir/one.rs", cx)
7824 })
7825 .await
7826 .unwrap();
7827
7828 let fake_server = fake_servers.next().await.unwrap();
7829
7830 let response = project.update(cx, |project, cx| {
7831 project.prepare_rename(buffer.clone(), 7, cx)
7832 });
7833 fake_server
7834 .handle_request::<lsp::request::PrepareRenameRequest, _, _>(|params, _| async move {
7835 assert_eq!(params.text_document.uri.as_str(), "file:///dir/one.rs");
7836 assert_eq!(params.position, lsp::Position::new(0, 7));
7837 Ok(Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
7838 lsp::Position::new(0, 6),
7839 lsp::Position::new(0, 9),
7840 ))))
7841 })
7842 .next()
7843 .await
7844 .unwrap();
7845 let range = response.await.unwrap().unwrap();
7846 let range = buffer.read_with(cx, |buffer, _| range.to_offset(buffer));
7847 assert_eq!(range, 6..9);
7848
7849 let response = project.update(cx, |project, cx| {
7850 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), true, cx)
7851 });
7852 fake_server
7853 .handle_request::<lsp::request::Rename, _, _>(|params, _| async move {
7854 assert_eq!(
7855 params.text_document_position.text_document.uri.as_str(),
7856 "file:///dir/one.rs"
7857 );
7858 assert_eq!(
7859 params.text_document_position.position,
7860 lsp::Position::new(0, 7)
7861 );
7862 assert_eq!(params.new_name, "THREE");
7863 Ok(Some(lsp::WorkspaceEdit {
7864 changes: Some(
7865 [
7866 (
7867 lsp::Url::from_file_path("/dir/one.rs").unwrap(),
7868 vec![lsp::TextEdit::new(
7869 lsp::Range::new(
7870 lsp::Position::new(0, 6),
7871 lsp::Position::new(0, 9),
7872 ),
7873 "THREE".to_string(),
7874 )],
7875 ),
7876 (
7877 lsp::Url::from_file_path("/dir/two.rs").unwrap(),
7878 vec![
7879 lsp::TextEdit::new(
7880 lsp::Range::new(
7881 lsp::Position::new(0, 24),
7882 lsp::Position::new(0, 27),
7883 ),
7884 "THREE".to_string(),
7885 ),
7886 lsp::TextEdit::new(
7887 lsp::Range::new(
7888 lsp::Position::new(0, 35),
7889 lsp::Position::new(0, 38),
7890 ),
7891 "THREE".to_string(),
7892 ),
7893 ],
7894 ),
7895 ]
7896 .into_iter()
7897 .collect(),
7898 ),
7899 ..Default::default()
7900 }))
7901 })
7902 .next()
7903 .await
7904 .unwrap();
7905 let mut transaction = response.await.unwrap().0;
7906 assert_eq!(transaction.len(), 2);
7907 assert_eq!(
7908 transaction
7909 .remove_entry(&buffer)
7910 .unwrap()
7911 .0
7912 .read_with(cx, |buffer, _| buffer.text()),
7913 "const THREE: usize = 1;"
7914 );
7915 assert_eq!(
7916 transaction
7917 .into_keys()
7918 .next()
7919 .unwrap()
7920 .read_with(cx, |buffer, _| buffer.text()),
7921 "const TWO: usize = one::THREE + one::THREE;"
7922 );
7923 }
7924
7925 #[gpui::test]
7926 async fn test_search(cx: &mut gpui::TestAppContext) {
7927 let fs = FakeFs::new(cx.background());
7928 fs.insert_tree(
7929 "/dir",
7930 json!({
7931 "one.rs": "const ONE: usize = 1;",
7932 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
7933 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
7934 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
7935 }),
7936 )
7937 .await;
7938 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7939 assert_eq!(
7940 search(&project, SearchQuery::text("TWO", false, true), cx)
7941 .await
7942 .unwrap(),
7943 HashMap::from_iter([
7944 ("two.rs".to_string(), vec![6..9]),
7945 ("three.rs".to_string(), vec![37..40])
7946 ])
7947 );
7948
7949 let buffer_4 = project
7950 .update(cx, |project, cx| {
7951 project.open_local_buffer("/dir/four.rs", cx)
7952 })
7953 .await
7954 .unwrap();
7955 buffer_4.update(cx, |buffer, cx| {
7956 let text = "two::TWO";
7957 buffer.edit([(20..28, text), (31..43, text)], cx);
7958 });
7959
7960 assert_eq!(
7961 search(&project, SearchQuery::text("TWO", false, true), cx)
7962 .await
7963 .unwrap(),
7964 HashMap::from_iter([
7965 ("two.rs".to_string(), vec![6..9]),
7966 ("three.rs".to_string(), vec![37..40]),
7967 ("four.rs".to_string(), vec![25..28, 36..39])
7968 ])
7969 );
7970
7971 async fn search(
7972 project: &ModelHandle<Project>,
7973 query: SearchQuery,
7974 cx: &mut gpui::TestAppContext,
7975 ) -> Result<HashMap<String, Vec<Range<usize>>>> {
7976 let results = project
7977 .update(cx, |project, cx| project.search(query, cx))
7978 .await?;
7979
7980 Ok(results
7981 .into_iter()
7982 .map(|(buffer, ranges)| {
7983 buffer.read_with(cx, |buffer, _| {
7984 let path = buffer.file().unwrap().path().to_string_lossy().to_string();
7985 let ranges = ranges
7986 .into_iter()
7987 .map(|range| range.to_offset(buffer))
7988 .collect::<Vec<_>>();
7989 (path, ranges)
7990 })
7991 })
7992 .collect())
7993 }
7994 }
7995}