1pub mod fs;
2mod ignore;
3mod lsp_command;
4pub mod search;
5pub mod worktree;
6
7use anyhow::{anyhow, Context, Result};
8use client::{proto, Client, PeerId, TypedEnvelope, User, UserStore};
9use clock::ReplicaId;
10use collections::{hash_map, BTreeMap, HashMap, HashSet};
11use futures::{future::Shared, Future, FutureExt, StreamExt, TryFutureExt};
12use fuzzy::{PathMatch, PathMatchCandidate, PathMatchCandidateSet};
13use gpui::{
14 AnyModelHandle, AppContext, AsyncAppContext, Entity, ModelContext, ModelHandle,
15 MutableAppContext, Task, UpgradeModelHandle, WeakModelHandle,
16};
17use language::{
18 point_to_lsp,
19 proto::{deserialize_anchor, deserialize_version, serialize_anchor, serialize_version},
20 range_from_lsp, range_to_lsp, Anchor, Bias, Buffer, CodeAction, CodeLabel, Completion,
21 Diagnostic, DiagnosticEntry, DiagnosticSet, Event as BufferEvent, File as _, Language,
22 LanguageRegistry, LanguageServerName, LocalFile, LspAdapter, OffsetRangeExt, Operation, Patch,
23 PointUtf16, TextBufferSnapshot, ToOffset, ToPointUtf16, Transaction,
24};
25use lsp::{DiagnosticSeverity, DiagnosticTag, DocumentHighlightKind, LanguageServer};
26use lsp_command::*;
27use parking_lot::Mutex;
28use postage::watch;
29use rand::prelude::*;
30use search::SearchQuery;
31use serde::Serialize;
32use settings::Settings;
33use sha2::{Digest, Sha256};
34use similar::{ChangeTag, TextDiff};
35use std::{
36 cell::RefCell,
37 cmp::{self, Ordering},
38 convert::TryInto,
39 ffi::OsString,
40 hash::Hash,
41 mem,
42 ops::Range,
43 os::unix::{ffi::OsStrExt, prelude::OsStringExt},
44 path::{Component, Path, PathBuf},
45 rc::Rc,
46 sync::{
47 atomic::{AtomicBool, AtomicUsize, Ordering::SeqCst},
48 Arc,
49 },
50 time::Instant,
51};
52use thiserror::Error;
53use util::{post_inc, ResultExt, TryFutureExt as _};
54
55pub use fs::*;
56pub use worktree::*;
57
58pub trait Item: Entity {
59 fn entry_id(&self, cx: &AppContext) -> Option<ProjectEntryId>;
60}
61
62pub struct Project {
63 worktrees: Vec<WorktreeHandle>,
64 active_entry: Option<ProjectEntryId>,
65 languages: Arc<LanguageRegistry>,
66 language_servers:
67 HashMap<(WorktreeId, LanguageServerName), (Arc<dyn LspAdapter>, Arc<LanguageServer>)>,
68 started_language_servers:
69 HashMap<(WorktreeId, LanguageServerName), Task<Option<Arc<LanguageServer>>>>,
70 language_server_statuses: BTreeMap<usize, LanguageServerStatus>,
71 language_server_settings: Arc<Mutex<serde_json::Value>>,
72 last_workspace_edits_by_language_server: HashMap<usize, ProjectTransaction>,
73 next_language_server_id: usize,
74 client: Arc<client::Client>,
75 next_entry_id: Arc<AtomicUsize>,
76 user_store: ModelHandle<UserStore>,
77 fs: Arc<dyn Fs>,
78 client_state: ProjectClientState,
79 collaborators: HashMap<PeerId, Collaborator>,
80 subscriptions: Vec<client::Subscription>,
81 opened_buffer: (Rc<RefCell<watch::Sender<()>>>, watch::Receiver<()>),
82 shared_buffers: HashMap<PeerId, HashSet<u64>>,
83 loading_buffers: HashMap<
84 ProjectPath,
85 postage::watch::Receiver<Option<Result<ModelHandle<Buffer>, Arc<anyhow::Error>>>>,
86 >,
87 loading_local_worktrees:
88 HashMap<Arc<Path>, Shared<Task<Result<ModelHandle<Worktree>, Arc<anyhow::Error>>>>>,
89 opened_buffers: HashMap<u64, OpenBuffer>,
90 buffer_snapshots: HashMap<u64, Vec<(i32, TextBufferSnapshot)>>,
91 nonce: u128,
92}
93
94#[derive(Error, Debug)]
95pub enum JoinProjectError {
96 #[error("host declined join request")]
97 HostDeclined,
98 #[error("host closed the project")]
99 HostClosedProject,
100 #[error("host went offline")]
101 HostWentOffline,
102 #[error("{0}")]
103 Other(#[from] anyhow::Error),
104}
105
106enum OpenBuffer {
107 Strong(ModelHandle<Buffer>),
108 Weak(WeakModelHandle<Buffer>),
109 Loading(Vec<Operation>),
110}
111
112enum WorktreeHandle {
113 Strong(ModelHandle<Worktree>),
114 Weak(WeakModelHandle<Worktree>),
115}
116
117enum ProjectClientState {
118 Local {
119 is_shared: bool,
120 remote_id_tx: watch::Sender<Option<u64>>,
121 remote_id_rx: watch::Receiver<Option<u64>>,
122 _maintain_remote_id_task: Task<Option<()>>,
123 },
124 Remote {
125 sharing_has_stopped: bool,
126 remote_id: u64,
127 replica_id: ReplicaId,
128 _detect_unshare_task: Task<Option<()>>,
129 },
130}
131
132#[derive(Clone, Debug)]
133pub struct Collaborator {
134 pub user: Arc<User>,
135 pub peer_id: PeerId,
136 pub replica_id: ReplicaId,
137}
138
139#[derive(Clone, Debug, PartialEq, Eq)]
140pub enum Event {
141 ActiveEntryChanged(Option<ProjectEntryId>),
142 WorktreeRemoved(WorktreeId),
143 DiskBasedDiagnosticsStarted,
144 DiskBasedDiagnosticsUpdated,
145 DiskBasedDiagnosticsFinished,
146 DiagnosticsUpdated(ProjectPath),
147 RemoteIdChanged(Option<u64>),
148 CollaboratorLeft(PeerId),
149 ContactRequestedJoin(Arc<User>),
150 ContactCancelledJoinRequest(Arc<User>),
151}
152
153#[derive(Serialize)]
154pub struct LanguageServerStatus {
155 pub name: String,
156 pub pending_work: BTreeMap<String, LanguageServerProgress>,
157 pub pending_diagnostic_updates: isize,
158}
159
160#[derive(Clone, Debug, Serialize)]
161pub struct LanguageServerProgress {
162 pub message: Option<String>,
163 pub percentage: Option<usize>,
164 #[serde(skip_serializing)]
165 pub last_update_at: Instant,
166}
167
168#[derive(Clone, Debug, Eq, PartialEq, Hash, PartialOrd, Ord)]
169pub struct ProjectPath {
170 pub worktree_id: WorktreeId,
171 pub path: Arc<Path>,
172}
173
174#[derive(Copy, Clone, Debug, Default, PartialEq, Serialize)]
175pub struct DiagnosticSummary {
176 pub error_count: usize,
177 pub warning_count: usize,
178}
179
180#[derive(Debug)]
181pub struct Location {
182 pub buffer: ModelHandle<Buffer>,
183 pub range: Range<language::Anchor>,
184}
185
186#[derive(Debug)]
187pub struct DocumentHighlight {
188 pub range: Range<language::Anchor>,
189 pub kind: DocumentHighlightKind,
190}
191
192#[derive(Clone, Debug)]
193pub struct Symbol {
194 pub source_worktree_id: WorktreeId,
195 pub worktree_id: WorktreeId,
196 pub language_server_name: LanguageServerName,
197 pub path: PathBuf,
198 pub label: CodeLabel,
199 pub name: String,
200 pub kind: lsp::SymbolKind,
201 pub range: Range<PointUtf16>,
202 pub signature: [u8; 32],
203}
204
205#[derive(Default)]
206pub struct ProjectTransaction(pub HashMap<ModelHandle<Buffer>, language::Transaction>);
207
208impl DiagnosticSummary {
209 fn new<'a, T: 'a>(diagnostics: impl IntoIterator<Item = &'a DiagnosticEntry<T>>) -> Self {
210 let mut this = Self {
211 error_count: 0,
212 warning_count: 0,
213 };
214
215 for entry in diagnostics {
216 if entry.diagnostic.is_primary {
217 match entry.diagnostic.severity {
218 DiagnosticSeverity::ERROR => this.error_count += 1,
219 DiagnosticSeverity::WARNING => this.warning_count += 1,
220 _ => {}
221 }
222 }
223 }
224
225 this
226 }
227
228 pub fn is_empty(&self) -> bool {
229 self.error_count == 0 && self.warning_count == 0
230 }
231
232 pub fn to_proto(&self, path: &Path) -> proto::DiagnosticSummary {
233 proto::DiagnosticSummary {
234 path: path.to_string_lossy().to_string(),
235 error_count: self.error_count as u32,
236 warning_count: self.warning_count as u32,
237 }
238 }
239}
240
241#[derive(Clone, Copy, Debug, Default, Hash, PartialEq, Eq, PartialOrd, Ord)]
242pub struct ProjectEntryId(usize);
243
244impl ProjectEntryId {
245 pub const MAX: Self = Self(usize::MAX);
246
247 pub fn new(counter: &AtomicUsize) -> Self {
248 Self(counter.fetch_add(1, SeqCst))
249 }
250
251 pub fn from_proto(id: u64) -> Self {
252 Self(id as usize)
253 }
254
255 pub fn to_proto(&self) -> u64 {
256 self.0 as u64
257 }
258
259 pub fn to_usize(&self) -> usize {
260 self.0
261 }
262}
263
264impl Project {
265 pub fn init(client: &Arc<Client>) {
266 client.add_model_message_handler(Self::handle_request_join_project);
267 client.add_model_message_handler(Self::handle_add_collaborator);
268 client.add_model_message_handler(Self::handle_buffer_reloaded);
269 client.add_model_message_handler(Self::handle_buffer_saved);
270 client.add_model_message_handler(Self::handle_start_language_server);
271 client.add_model_message_handler(Self::handle_update_language_server);
272 client.add_model_message_handler(Self::handle_remove_collaborator);
273 client.add_model_message_handler(Self::handle_join_project_request_cancelled);
274 client.add_model_message_handler(Self::handle_register_worktree);
275 client.add_model_message_handler(Self::handle_unregister_worktree);
276 client.add_model_message_handler(Self::handle_unregister_project);
277 client.add_model_message_handler(Self::handle_project_unshared);
278 client.add_model_message_handler(Self::handle_update_buffer_file);
279 client.add_model_message_handler(Self::handle_update_buffer);
280 client.add_model_message_handler(Self::handle_update_diagnostic_summary);
281 client.add_model_message_handler(Self::handle_update_worktree);
282 client.add_model_request_handler(Self::handle_create_project_entry);
283 client.add_model_request_handler(Self::handle_rename_project_entry);
284 client.add_model_request_handler(Self::handle_copy_project_entry);
285 client.add_model_request_handler(Self::handle_delete_project_entry);
286 client.add_model_request_handler(Self::handle_apply_additional_edits_for_completion);
287 client.add_model_request_handler(Self::handle_apply_code_action);
288 client.add_model_request_handler(Self::handle_reload_buffers);
289 client.add_model_request_handler(Self::handle_format_buffers);
290 client.add_model_request_handler(Self::handle_get_code_actions);
291 client.add_model_request_handler(Self::handle_get_completions);
292 client.add_model_request_handler(Self::handle_lsp_command::<GetDefinition>);
293 client.add_model_request_handler(Self::handle_lsp_command::<GetDocumentHighlights>);
294 client.add_model_request_handler(Self::handle_lsp_command::<GetReferences>);
295 client.add_model_request_handler(Self::handle_lsp_command::<PrepareRename>);
296 client.add_model_request_handler(Self::handle_lsp_command::<PerformRename>);
297 client.add_model_request_handler(Self::handle_search_project);
298 client.add_model_request_handler(Self::handle_get_project_symbols);
299 client.add_model_request_handler(Self::handle_open_buffer_for_symbol);
300 client.add_model_request_handler(Self::handle_open_buffer_by_id);
301 client.add_model_request_handler(Self::handle_open_buffer_by_path);
302 client.add_model_request_handler(Self::handle_save_buffer);
303 }
304
305 pub fn local(
306 client: Arc<Client>,
307 user_store: ModelHandle<UserStore>,
308 languages: Arc<LanguageRegistry>,
309 fs: Arc<dyn Fs>,
310 cx: &mut MutableAppContext,
311 ) -> ModelHandle<Self> {
312 cx.add_model(|cx: &mut ModelContext<Self>| {
313 let (remote_id_tx, remote_id_rx) = watch::channel();
314 let _maintain_remote_id_task = cx.spawn_weak({
315 let rpc = client.clone();
316 move |this, mut cx| {
317 async move {
318 let mut status = rpc.status();
319 while let Some(status) = status.next().await {
320 if let Some(this) = this.upgrade(&cx) {
321 if status.is_connected() {
322 this.update(&mut cx, |this, cx| this.register(cx)).await?;
323 } else {
324 this.update(&mut cx, |this, cx| this.unregister(cx));
325 }
326 }
327 }
328 Ok(())
329 }
330 .log_err()
331 }
332 });
333
334 let (opened_buffer_tx, opened_buffer_rx) = watch::channel();
335 Self {
336 worktrees: Default::default(),
337 collaborators: Default::default(),
338 opened_buffers: Default::default(),
339 shared_buffers: Default::default(),
340 loading_buffers: Default::default(),
341 loading_local_worktrees: Default::default(),
342 buffer_snapshots: Default::default(),
343 client_state: ProjectClientState::Local {
344 is_shared: false,
345 remote_id_tx,
346 remote_id_rx,
347 _maintain_remote_id_task,
348 },
349 opened_buffer: (Rc::new(RefCell::new(opened_buffer_tx)), opened_buffer_rx),
350 subscriptions: Vec::new(),
351 active_entry: None,
352 languages,
353 client,
354 user_store,
355 fs,
356 next_entry_id: Default::default(),
357 language_servers: Default::default(),
358 started_language_servers: Default::default(),
359 language_server_statuses: Default::default(),
360 last_workspace_edits_by_language_server: Default::default(),
361 language_server_settings: Default::default(),
362 next_language_server_id: 0,
363 nonce: StdRng::from_entropy().gen(),
364 }
365 })
366 }
367
368 pub async fn remote(
369 remote_id: u64,
370 client: Arc<Client>,
371 user_store: ModelHandle<UserStore>,
372 languages: Arc<LanguageRegistry>,
373 fs: Arc<dyn Fs>,
374 cx: &mut AsyncAppContext,
375 ) -> Result<ModelHandle<Self>, JoinProjectError> {
376 client.authenticate_and_connect(true, &cx).await?;
377
378 let response = client
379 .request(proto::JoinProject {
380 project_id: remote_id,
381 })
382 .await?;
383
384 let response = match response.variant.ok_or_else(|| anyhow!("missing variant"))? {
385 proto::join_project_response::Variant::Accept(response) => response,
386 proto::join_project_response::Variant::Decline(decline) => {
387 match proto::join_project_response::decline::Reason::from_i32(decline.reason) {
388 Some(proto::join_project_response::decline::Reason::Declined) => {
389 Err(JoinProjectError::HostDeclined)?
390 }
391 Some(proto::join_project_response::decline::Reason::Closed) => {
392 Err(JoinProjectError::HostClosedProject)?
393 }
394 Some(proto::join_project_response::decline::Reason::WentOffline) => {
395 Err(JoinProjectError::HostWentOffline)?
396 }
397 None => Err(anyhow!("missing decline reason"))?,
398 }
399 }
400 };
401
402 let replica_id = response.replica_id as ReplicaId;
403
404 let mut worktrees = Vec::new();
405 for worktree in response.worktrees {
406 let (worktree, load_task) = cx
407 .update(|cx| Worktree::remote(remote_id, replica_id, worktree, client.clone(), cx));
408 worktrees.push(worktree);
409 load_task.detach();
410 }
411
412 let (opened_buffer_tx, opened_buffer_rx) = watch::channel();
413 let this = cx.add_model(|cx: &mut ModelContext<Self>| {
414 let mut this = Self {
415 worktrees: Vec::new(),
416 loading_buffers: Default::default(),
417 opened_buffer: (Rc::new(RefCell::new(opened_buffer_tx)), opened_buffer_rx),
418 shared_buffers: Default::default(),
419 loading_local_worktrees: Default::default(),
420 active_entry: None,
421 collaborators: Default::default(),
422 languages,
423 user_store: user_store.clone(),
424 fs,
425 next_entry_id: Default::default(),
426 subscriptions: vec![client.add_model_for_remote_entity(remote_id, cx)],
427 client: client.clone(),
428 client_state: ProjectClientState::Remote {
429 sharing_has_stopped: false,
430 remote_id,
431 replica_id,
432 _detect_unshare_task: cx.spawn_weak(move |this, mut cx| {
433 async move {
434 let mut status = client.status();
435 let is_connected =
436 status.next().await.map_or(false, |s| s.is_connected());
437 // Even if we're initially connected, any future change of the status means we momentarily disconnected.
438 if !is_connected || status.next().await.is_some() {
439 if let Some(this) = this.upgrade(&cx) {
440 this.update(&mut cx, |this, cx| this.removed_from_project(cx))
441 }
442 }
443 Ok(())
444 }
445 .log_err()
446 }),
447 },
448 language_servers: Default::default(),
449 started_language_servers: Default::default(),
450 language_server_settings: Default::default(),
451 language_server_statuses: response
452 .language_servers
453 .into_iter()
454 .map(|server| {
455 (
456 server.id as usize,
457 LanguageServerStatus {
458 name: server.name,
459 pending_work: Default::default(),
460 pending_diagnostic_updates: 0,
461 },
462 )
463 })
464 .collect(),
465 last_workspace_edits_by_language_server: Default::default(),
466 next_language_server_id: 0,
467 opened_buffers: Default::default(),
468 buffer_snapshots: Default::default(),
469 nonce: StdRng::from_entropy().gen(),
470 };
471 for worktree in worktrees {
472 this.add_worktree(&worktree, cx);
473 }
474 this
475 });
476
477 let user_ids = response
478 .collaborators
479 .iter()
480 .map(|peer| peer.user_id)
481 .collect();
482 user_store
483 .update(cx, |user_store, cx| user_store.get_users(user_ids, cx))
484 .await?;
485 let mut collaborators = HashMap::default();
486 for message in response.collaborators {
487 let collaborator = Collaborator::from_proto(message, &user_store, cx).await?;
488 collaborators.insert(collaborator.peer_id, collaborator);
489 }
490
491 this.update(cx, |this, _| {
492 this.collaborators = collaborators;
493 });
494
495 Ok(this)
496 }
497
498 #[cfg(any(test, feature = "test-support"))]
499 pub async fn test(
500 fs: Arc<dyn Fs>,
501 root_paths: impl IntoIterator<Item = &Path>,
502 cx: &mut gpui::TestAppContext,
503 ) -> ModelHandle<Project> {
504 let languages = Arc::new(LanguageRegistry::test());
505 let http_client = client::test::FakeHttpClient::with_404_response();
506 let client = client::Client::new(http_client.clone());
507 let user_store = cx.add_model(|cx| UserStore::new(client.clone(), http_client, cx));
508 let project = cx.update(|cx| Project::local(client, user_store, languages, fs, cx));
509 for path in root_paths {
510 let (tree, _) = project
511 .update(cx, |project, cx| {
512 project.find_or_create_local_worktree(path, true, cx)
513 })
514 .await
515 .unwrap();
516 tree.read_with(cx, |tree, _| tree.as_local().unwrap().scan_complete())
517 .await;
518 }
519 project
520 }
521
522 pub fn buffer_for_id(&self, remote_id: u64, cx: &AppContext) -> Option<ModelHandle<Buffer>> {
523 self.opened_buffers
524 .get(&remote_id)
525 .and_then(|buffer| buffer.upgrade(cx))
526 }
527
528 pub fn languages(&self) -> &Arc<LanguageRegistry> {
529 &self.languages
530 }
531
532 pub fn client(&self) -> Arc<Client> {
533 self.client.clone()
534 }
535
536 pub fn user_store(&self) -> ModelHandle<UserStore> {
537 self.user_store.clone()
538 }
539
540 #[cfg(any(test, feature = "test-support"))]
541 pub fn check_invariants(&self, cx: &AppContext) {
542 if self.is_local() {
543 let mut worktree_root_paths = HashMap::default();
544 for worktree in self.worktrees(cx) {
545 let worktree = worktree.read(cx);
546 let abs_path = worktree.as_local().unwrap().abs_path().clone();
547 let prev_worktree_id = worktree_root_paths.insert(abs_path.clone(), worktree.id());
548 assert_eq!(
549 prev_worktree_id,
550 None,
551 "abs path {:?} for worktree {:?} is not unique ({:?} was already registered with the same path)",
552 abs_path,
553 worktree.id(),
554 prev_worktree_id
555 )
556 }
557 } else {
558 let replica_id = self.replica_id();
559 for buffer in self.opened_buffers.values() {
560 if let Some(buffer) = buffer.upgrade(cx) {
561 let buffer = buffer.read(cx);
562 assert_eq!(
563 buffer.deferred_ops_len(),
564 0,
565 "replica {}, buffer {} has deferred operations",
566 replica_id,
567 buffer.remote_id()
568 );
569 }
570 }
571 }
572 }
573
574 #[cfg(any(test, feature = "test-support"))]
575 pub fn has_open_buffer(&self, path: impl Into<ProjectPath>, cx: &AppContext) -> bool {
576 let path = path.into();
577 if let Some(worktree) = self.worktree_for_id(path.worktree_id, cx) {
578 self.opened_buffers.iter().any(|(_, buffer)| {
579 if let Some(buffer) = buffer.upgrade(cx) {
580 if let Some(file) = File::from_dyn(buffer.read(cx).file()) {
581 if file.worktree == worktree && file.path() == &path.path {
582 return true;
583 }
584 }
585 }
586 false
587 })
588 } else {
589 false
590 }
591 }
592
593 pub fn fs(&self) -> &Arc<dyn Fs> {
594 &self.fs
595 }
596
597 fn unregister(&mut self, cx: &mut ModelContext<Self>) {
598 self.unshared(cx);
599 for worktree in &self.worktrees {
600 if let Some(worktree) = worktree.upgrade(cx) {
601 worktree.update(cx, |worktree, _| {
602 worktree.as_local_mut().unwrap().unregister();
603 });
604 }
605 }
606
607 if let ProjectClientState::Local { remote_id_tx, .. } = &mut self.client_state {
608 *remote_id_tx.borrow_mut() = None;
609 }
610
611 self.subscriptions.clear();
612 }
613
614 fn register(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
615 self.unregister(cx);
616
617 let response = self.client.request(proto::RegisterProject {});
618 cx.spawn(|this, mut cx| async move {
619 let remote_id = response.await?.project_id;
620
621 let mut registrations = Vec::new();
622 this.update(&mut cx, |this, cx| {
623 if let ProjectClientState::Local { remote_id_tx, .. } = &mut this.client_state {
624 *remote_id_tx.borrow_mut() = Some(remote_id);
625 }
626
627 cx.emit(Event::RemoteIdChanged(Some(remote_id)));
628
629 this.subscriptions
630 .push(this.client.add_model_for_remote_entity(remote_id, cx));
631
632 for worktree in &this.worktrees {
633 if let Some(worktree) = worktree.upgrade(cx) {
634 registrations.push(worktree.update(cx, |worktree, cx| {
635 let worktree = worktree.as_local_mut().unwrap();
636 worktree.register(remote_id, cx)
637 }));
638 }
639 }
640 });
641
642 futures::future::try_join_all(registrations).await?;
643 Ok(())
644 })
645 }
646
647 pub fn remote_id(&self) -> Option<u64> {
648 match &self.client_state {
649 ProjectClientState::Local { remote_id_rx, .. } => *remote_id_rx.borrow(),
650 ProjectClientState::Remote { remote_id, .. } => Some(*remote_id),
651 }
652 }
653
654 pub fn next_remote_id(&self) -> impl Future<Output = u64> {
655 let mut id = None;
656 let mut watch = None;
657 match &self.client_state {
658 ProjectClientState::Local { remote_id_rx, .. } => watch = Some(remote_id_rx.clone()),
659 ProjectClientState::Remote { remote_id, .. } => id = Some(*remote_id),
660 }
661
662 async move {
663 if let Some(id) = id {
664 return id;
665 }
666 let mut watch = watch.unwrap();
667 loop {
668 let id = *watch.borrow();
669 if let Some(id) = id {
670 return id;
671 }
672 watch.next().await;
673 }
674 }
675 }
676
677 pub fn replica_id(&self) -> ReplicaId {
678 match &self.client_state {
679 ProjectClientState::Local { .. } => 0,
680 ProjectClientState::Remote { replica_id, .. } => *replica_id,
681 }
682 }
683
684 pub fn collaborators(&self) -> &HashMap<PeerId, Collaborator> {
685 &self.collaborators
686 }
687
688 pub fn worktrees<'a>(
689 &'a self,
690 cx: &'a AppContext,
691 ) -> impl 'a + Iterator<Item = ModelHandle<Worktree>> {
692 self.worktrees
693 .iter()
694 .filter_map(move |worktree| worktree.upgrade(cx))
695 }
696
697 pub fn visible_worktrees<'a>(
698 &'a self,
699 cx: &'a AppContext,
700 ) -> impl 'a + Iterator<Item = ModelHandle<Worktree>> {
701 self.worktrees.iter().filter_map(|worktree| {
702 worktree.upgrade(cx).and_then(|worktree| {
703 if worktree.read(cx).is_visible() {
704 Some(worktree)
705 } else {
706 None
707 }
708 })
709 })
710 }
711
712 pub fn worktree_for_id(
713 &self,
714 id: WorktreeId,
715 cx: &AppContext,
716 ) -> Option<ModelHandle<Worktree>> {
717 self.worktrees(cx)
718 .find(|worktree| worktree.read(cx).id() == id)
719 }
720
721 pub fn worktree_for_entry(
722 &self,
723 entry_id: ProjectEntryId,
724 cx: &AppContext,
725 ) -> Option<ModelHandle<Worktree>> {
726 self.worktrees(cx)
727 .find(|worktree| worktree.read(cx).contains_entry(entry_id))
728 }
729
730 pub fn worktree_id_for_entry(
731 &self,
732 entry_id: ProjectEntryId,
733 cx: &AppContext,
734 ) -> Option<WorktreeId> {
735 self.worktree_for_entry(entry_id, cx)
736 .map(|worktree| worktree.read(cx).id())
737 }
738
739 pub fn create_entry(
740 &mut self,
741 project_path: impl Into<ProjectPath>,
742 is_directory: bool,
743 cx: &mut ModelContext<Self>,
744 ) -> Option<Task<Result<Entry>>> {
745 let project_path = project_path.into();
746 let worktree = self.worktree_for_id(project_path.worktree_id, cx)?;
747 if self.is_local() {
748 Some(worktree.update(cx, |worktree, cx| {
749 worktree
750 .as_local_mut()
751 .unwrap()
752 .create_entry(project_path.path, is_directory, cx)
753 }))
754 } else {
755 let client = self.client.clone();
756 let project_id = self.remote_id().unwrap();
757 Some(cx.spawn_weak(|_, mut cx| async move {
758 let response = client
759 .request(proto::CreateProjectEntry {
760 worktree_id: project_path.worktree_id.to_proto(),
761 project_id,
762 path: project_path.path.as_os_str().as_bytes().to_vec(),
763 is_directory,
764 })
765 .await?;
766 let entry = response
767 .entry
768 .ok_or_else(|| anyhow!("missing entry in response"))?;
769 worktree
770 .update(&mut cx, |worktree, cx| {
771 worktree.as_remote().unwrap().insert_entry(
772 entry,
773 response.worktree_scan_id as usize,
774 cx,
775 )
776 })
777 .await
778 }))
779 }
780 }
781
782 pub fn copy_entry(
783 &mut self,
784 entry_id: ProjectEntryId,
785 new_path: impl Into<Arc<Path>>,
786 cx: &mut ModelContext<Self>,
787 ) -> Option<Task<Result<Entry>>> {
788 let worktree = self.worktree_for_entry(entry_id, cx)?;
789 let new_path = new_path.into();
790 if self.is_local() {
791 worktree.update(cx, |worktree, cx| {
792 worktree
793 .as_local_mut()
794 .unwrap()
795 .copy_entry(entry_id, new_path, cx)
796 })
797 } else {
798 let client = self.client.clone();
799 let project_id = self.remote_id().unwrap();
800
801 Some(cx.spawn_weak(|_, mut cx| async move {
802 let response = client
803 .request(proto::CopyProjectEntry {
804 project_id,
805 entry_id: entry_id.to_proto(),
806 new_path: new_path.as_os_str().as_bytes().to_vec(),
807 })
808 .await?;
809 let entry = response
810 .entry
811 .ok_or_else(|| anyhow!("missing entry in response"))?;
812 worktree
813 .update(&mut cx, |worktree, cx| {
814 worktree.as_remote().unwrap().insert_entry(
815 entry,
816 response.worktree_scan_id as usize,
817 cx,
818 )
819 })
820 .await
821 }))
822 }
823 }
824
825 pub fn rename_entry(
826 &mut self,
827 entry_id: ProjectEntryId,
828 new_path: impl Into<Arc<Path>>,
829 cx: &mut ModelContext<Self>,
830 ) -> Option<Task<Result<Entry>>> {
831 let worktree = self.worktree_for_entry(entry_id, cx)?;
832 let new_path = new_path.into();
833 if self.is_local() {
834 worktree.update(cx, |worktree, cx| {
835 worktree
836 .as_local_mut()
837 .unwrap()
838 .rename_entry(entry_id, new_path, cx)
839 })
840 } else {
841 let client = self.client.clone();
842 let project_id = self.remote_id().unwrap();
843
844 Some(cx.spawn_weak(|_, mut cx| async move {
845 let response = client
846 .request(proto::RenameProjectEntry {
847 project_id,
848 entry_id: entry_id.to_proto(),
849 new_path: new_path.as_os_str().as_bytes().to_vec(),
850 })
851 .await?;
852 let entry = response
853 .entry
854 .ok_or_else(|| anyhow!("missing entry in response"))?;
855 worktree
856 .update(&mut cx, |worktree, cx| {
857 worktree.as_remote().unwrap().insert_entry(
858 entry,
859 response.worktree_scan_id as usize,
860 cx,
861 )
862 })
863 .await
864 }))
865 }
866 }
867
868 pub fn delete_entry(
869 &mut self,
870 entry_id: ProjectEntryId,
871 cx: &mut ModelContext<Self>,
872 ) -> Option<Task<Result<()>>> {
873 let worktree = self.worktree_for_entry(entry_id, cx)?;
874 if self.is_local() {
875 worktree.update(cx, |worktree, cx| {
876 worktree.as_local_mut().unwrap().delete_entry(entry_id, cx)
877 })
878 } else {
879 let client = self.client.clone();
880 let project_id = self.remote_id().unwrap();
881 Some(cx.spawn_weak(|_, mut cx| async move {
882 let response = client
883 .request(proto::DeleteProjectEntry {
884 project_id,
885 entry_id: entry_id.to_proto(),
886 })
887 .await?;
888 worktree
889 .update(&mut cx, move |worktree, cx| {
890 worktree.as_remote().unwrap().delete_entry(
891 entry_id,
892 response.worktree_scan_id as usize,
893 cx,
894 )
895 })
896 .await
897 }))
898 }
899 }
900
901 fn share(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
902 let project_id;
903 if let ProjectClientState::Local {
904 remote_id_rx,
905 is_shared,
906 ..
907 } = &mut self.client_state
908 {
909 if *is_shared {
910 return Task::ready(Ok(()));
911 }
912 *is_shared = true;
913 if let Some(id) = *remote_id_rx.borrow() {
914 project_id = id;
915 } else {
916 return Task::ready(Err(anyhow!("project hasn't been registered")));
917 }
918 } else {
919 return Task::ready(Err(anyhow!("can't share a remote project")));
920 };
921
922 for open_buffer in self.opened_buffers.values_mut() {
923 match open_buffer {
924 OpenBuffer::Strong(_) => {}
925 OpenBuffer::Weak(buffer) => {
926 if let Some(buffer) = buffer.upgrade(cx) {
927 *open_buffer = OpenBuffer::Strong(buffer);
928 }
929 }
930 OpenBuffer::Loading(_) => unreachable!(),
931 }
932 }
933
934 for worktree_handle in self.worktrees.iter_mut() {
935 match worktree_handle {
936 WorktreeHandle::Strong(_) => {}
937 WorktreeHandle::Weak(worktree) => {
938 if let Some(worktree) = worktree.upgrade(cx) {
939 *worktree_handle = WorktreeHandle::Strong(worktree);
940 }
941 }
942 }
943 }
944
945 let mut tasks = Vec::new();
946 for worktree in self.worktrees(cx).collect::<Vec<_>>() {
947 worktree.update(cx, |worktree, cx| {
948 let worktree = worktree.as_local_mut().unwrap();
949 tasks.push(worktree.share(project_id, cx));
950 });
951 }
952
953 cx.spawn(|this, mut cx| async move {
954 for task in tasks {
955 task.await?;
956 }
957 this.update(&mut cx, |_, cx| cx.notify());
958 Ok(())
959 })
960 }
961
962 fn unshared(&mut self, cx: &mut ModelContext<Self>) {
963 if let ProjectClientState::Local { is_shared, .. } = &mut self.client_state {
964 if !*is_shared {
965 return;
966 }
967
968 *is_shared = false;
969 self.collaborators.clear();
970 self.shared_buffers.clear();
971 for worktree_handle in self.worktrees.iter_mut() {
972 if let WorktreeHandle::Strong(worktree) = worktree_handle {
973 let is_visible = worktree.update(cx, |worktree, _| {
974 worktree.as_local_mut().unwrap().unshare();
975 worktree.is_visible()
976 });
977 if !is_visible {
978 *worktree_handle = WorktreeHandle::Weak(worktree.downgrade());
979 }
980 }
981 }
982
983 for open_buffer in self.opened_buffers.values_mut() {
984 match open_buffer {
985 OpenBuffer::Strong(buffer) => {
986 *open_buffer = OpenBuffer::Weak(buffer.downgrade());
987 }
988 _ => {}
989 }
990 }
991
992 cx.notify();
993 } else {
994 log::error!("attempted to unshare a remote project");
995 }
996 }
997
998 pub fn respond_to_join_request(
999 &mut self,
1000 requester_id: u64,
1001 allow: bool,
1002 cx: &mut ModelContext<Self>,
1003 ) {
1004 if let Some(project_id) = self.remote_id() {
1005 let share = self.share(cx);
1006 let client = self.client.clone();
1007 cx.foreground()
1008 .spawn(async move {
1009 share.await?;
1010 client.send(proto::RespondToJoinProjectRequest {
1011 requester_id,
1012 project_id,
1013 allow,
1014 })
1015 })
1016 .detach_and_log_err(cx);
1017 }
1018 }
1019
1020 fn removed_from_project(&mut self, cx: &mut ModelContext<Self>) {
1021 if let ProjectClientState::Remote {
1022 sharing_has_stopped,
1023 ..
1024 } = &mut self.client_state
1025 {
1026 *sharing_has_stopped = true;
1027 self.collaborators.clear();
1028 cx.notify();
1029 }
1030 }
1031
1032 pub fn is_read_only(&self) -> bool {
1033 match &self.client_state {
1034 ProjectClientState::Local { .. } => false,
1035 ProjectClientState::Remote {
1036 sharing_has_stopped,
1037 ..
1038 } => *sharing_has_stopped,
1039 }
1040 }
1041
1042 pub fn is_local(&self) -> bool {
1043 match &self.client_state {
1044 ProjectClientState::Local { .. } => true,
1045 ProjectClientState::Remote { .. } => false,
1046 }
1047 }
1048
1049 pub fn is_remote(&self) -> bool {
1050 !self.is_local()
1051 }
1052
1053 pub fn create_buffer(
1054 &mut self,
1055 text: &str,
1056 language: Option<Arc<Language>>,
1057 cx: &mut ModelContext<Self>,
1058 ) -> Result<ModelHandle<Buffer>> {
1059 if self.is_remote() {
1060 return Err(anyhow!("creating buffers as a guest is not supported yet"));
1061 }
1062
1063 let buffer = cx.add_model(|cx| {
1064 Buffer::new(self.replica_id(), text, cx)
1065 .with_language(language.unwrap_or(language::PLAIN_TEXT.clone()), cx)
1066 });
1067 self.register_buffer(&buffer, cx)?;
1068 Ok(buffer)
1069 }
1070
1071 pub fn open_path(
1072 &mut self,
1073 path: impl Into<ProjectPath>,
1074 cx: &mut ModelContext<Self>,
1075 ) -> Task<Result<(ProjectEntryId, AnyModelHandle)>> {
1076 let task = self.open_buffer(path, cx);
1077 cx.spawn_weak(|_, cx| async move {
1078 let buffer = task.await?;
1079 let project_entry_id = buffer
1080 .read_with(&cx, |buffer, cx| {
1081 File::from_dyn(buffer.file()).and_then(|file| file.project_entry_id(cx))
1082 })
1083 .ok_or_else(|| anyhow!("no project entry"))?;
1084 Ok((project_entry_id, buffer.into()))
1085 })
1086 }
1087
1088 pub fn open_local_buffer(
1089 &mut self,
1090 abs_path: impl AsRef<Path>,
1091 cx: &mut ModelContext<Self>,
1092 ) -> Task<Result<ModelHandle<Buffer>>> {
1093 if let Some((worktree, relative_path)) = self.find_local_worktree(abs_path.as_ref(), cx) {
1094 self.open_buffer((worktree.read(cx).id(), relative_path), cx)
1095 } else {
1096 Task::ready(Err(anyhow!("no such path")))
1097 }
1098 }
1099
1100 pub fn open_buffer(
1101 &mut self,
1102 path: impl Into<ProjectPath>,
1103 cx: &mut ModelContext<Self>,
1104 ) -> Task<Result<ModelHandle<Buffer>>> {
1105 let project_path = path.into();
1106 let worktree = if let Some(worktree) = self.worktree_for_id(project_path.worktree_id, cx) {
1107 worktree
1108 } else {
1109 return Task::ready(Err(anyhow!("no such worktree")));
1110 };
1111
1112 // If there is already a buffer for the given path, then return it.
1113 let existing_buffer = self.get_open_buffer(&project_path, cx);
1114 if let Some(existing_buffer) = existing_buffer {
1115 return Task::ready(Ok(existing_buffer));
1116 }
1117
1118 let mut loading_watch = match self.loading_buffers.entry(project_path.clone()) {
1119 // If the given path is already being loaded, then wait for that existing
1120 // task to complete and return the same buffer.
1121 hash_map::Entry::Occupied(e) => e.get().clone(),
1122
1123 // Otherwise, record the fact that this path is now being loaded.
1124 hash_map::Entry::Vacant(entry) => {
1125 let (mut tx, rx) = postage::watch::channel();
1126 entry.insert(rx.clone());
1127
1128 let load_buffer = if worktree.read(cx).is_local() {
1129 self.open_local_buffer_internal(&project_path.path, &worktree, cx)
1130 } else {
1131 self.open_remote_buffer_internal(&project_path.path, &worktree, cx)
1132 };
1133
1134 cx.spawn(move |this, mut cx| async move {
1135 let load_result = load_buffer.await;
1136 *tx.borrow_mut() = Some(this.update(&mut cx, |this, _| {
1137 // Record the fact that the buffer is no longer loading.
1138 this.loading_buffers.remove(&project_path);
1139 let buffer = load_result.map_err(Arc::new)?;
1140 Ok(buffer)
1141 }));
1142 })
1143 .detach();
1144 rx
1145 }
1146 };
1147
1148 cx.foreground().spawn(async move {
1149 loop {
1150 if let Some(result) = loading_watch.borrow().as_ref() {
1151 match result {
1152 Ok(buffer) => return Ok(buffer.clone()),
1153 Err(error) => return Err(anyhow!("{}", error)),
1154 }
1155 }
1156 loading_watch.next().await;
1157 }
1158 })
1159 }
1160
1161 fn open_local_buffer_internal(
1162 &mut self,
1163 path: &Arc<Path>,
1164 worktree: &ModelHandle<Worktree>,
1165 cx: &mut ModelContext<Self>,
1166 ) -> Task<Result<ModelHandle<Buffer>>> {
1167 let load_buffer = worktree.update(cx, |worktree, cx| {
1168 let worktree = worktree.as_local_mut().unwrap();
1169 worktree.load_buffer(path, cx)
1170 });
1171 cx.spawn(|this, mut cx| async move {
1172 let buffer = load_buffer.await?;
1173 this.update(&mut cx, |this, cx| this.register_buffer(&buffer, cx))?;
1174 Ok(buffer)
1175 })
1176 }
1177
1178 fn open_remote_buffer_internal(
1179 &mut self,
1180 path: &Arc<Path>,
1181 worktree: &ModelHandle<Worktree>,
1182 cx: &mut ModelContext<Self>,
1183 ) -> Task<Result<ModelHandle<Buffer>>> {
1184 let rpc = self.client.clone();
1185 let project_id = self.remote_id().unwrap();
1186 let remote_worktree_id = worktree.read(cx).id();
1187 let path = path.clone();
1188 let path_string = path.to_string_lossy().to_string();
1189 cx.spawn(|this, mut cx| async move {
1190 let response = rpc
1191 .request(proto::OpenBufferByPath {
1192 project_id,
1193 worktree_id: remote_worktree_id.to_proto(),
1194 path: path_string,
1195 })
1196 .await?;
1197 let buffer = response.buffer.ok_or_else(|| anyhow!("missing buffer"))?;
1198 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
1199 .await
1200 })
1201 }
1202
1203 fn open_local_buffer_via_lsp(
1204 &mut self,
1205 abs_path: lsp::Url,
1206 lsp_adapter: Arc<dyn LspAdapter>,
1207 lsp_server: Arc<LanguageServer>,
1208 cx: &mut ModelContext<Self>,
1209 ) -> Task<Result<ModelHandle<Buffer>>> {
1210 cx.spawn(|this, mut cx| async move {
1211 let abs_path = abs_path
1212 .to_file_path()
1213 .map_err(|_| anyhow!("can't convert URI to path"))?;
1214 let (worktree, relative_path) = if let Some(result) =
1215 this.read_with(&cx, |this, cx| this.find_local_worktree(&abs_path, cx))
1216 {
1217 result
1218 } else {
1219 let worktree = this
1220 .update(&mut cx, |this, cx| {
1221 this.create_local_worktree(&abs_path, false, cx)
1222 })
1223 .await?;
1224 this.update(&mut cx, |this, cx| {
1225 this.language_servers.insert(
1226 (worktree.read(cx).id(), lsp_adapter.name()),
1227 (lsp_adapter, lsp_server),
1228 );
1229 });
1230 (worktree, PathBuf::new())
1231 };
1232
1233 let project_path = ProjectPath {
1234 worktree_id: worktree.read_with(&cx, |worktree, _| worktree.id()),
1235 path: relative_path.into(),
1236 };
1237 this.update(&mut cx, |this, cx| this.open_buffer(project_path, cx))
1238 .await
1239 })
1240 }
1241
1242 pub fn open_buffer_by_id(
1243 &mut self,
1244 id: u64,
1245 cx: &mut ModelContext<Self>,
1246 ) -> Task<Result<ModelHandle<Buffer>>> {
1247 if let Some(buffer) = self.buffer_for_id(id, cx) {
1248 Task::ready(Ok(buffer))
1249 } else if self.is_local() {
1250 Task::ready(Err(anyhow!("buffer {} does not exist", id)))
1251 } else if let Some(project_id) = self.remote_id() {
1252 let request = self
1253 .client
1254 .request(proto::OpenBufferById { project_id, id });
1255 cx.spawn(|this, mut cx| async move {
1256 let buffer = request
1257 .await?
1258 .buffer
1259 .ok_or_else(|| anyhow!("invalid buffer"))?;
1260 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
1261 .await
1262 })
1263 } else {
1264 Task::ready(Err(anyhow!("cannot open buffer while disconnected")))
1265 }
1266 }
1267
1268 pub fn save_buffer_as(
1269 &mut self,
1270 buffer: ModelHandle<Buffer>,
1271 abs_path: PathBuf,
1272 cx: &mut ModelContext<Project>,
1273 ) -> Task<Result<()>> {
1274 let worktree_task = self.find_or_create_local_worktree(&abs_path, true, cx);
1275 let old_path =
1276 File::from_dyn(buffer.read(cx).file()).and_then(|f| Some(f.as_local()?.abs_path(cx)));
1277 cx.spawn(|this, mut cx| async move {
1278 if let Some(old_path) = old_path {
1279 this.update(&mut cx, |this, cx| {
1280 this.unregister_buffer_from_language_server(&buffer, old_path, cx);
1281 });
1282 }
1283 let (worktree, path) = worktree_task.await?;
1284 worktree
1285 .update(&mut cx, |worktree, cx| {
1286 worktree
1287 .as_local_mut()
1288 .unwrap()
1289 .save_buffer_as(buffer.clone(), path, cx)
1290 })
1291 .await?;
1292 this.update(&mut cx, |this, cx| {
1293 this.assign_language_to_buffer(&buffer, cx);
1294 this.register_buffer_with_language_server(&buffer, cx);
1295 });
1296 Ok(())
1297 })
1298 }
1299
1300 pub fn get_open_buffer(
1301 &mut self,
1302 path: &ProjectPath,
1303 cx: &mut ModelContext<Self>,
1304 ) -> Option<ModelHandle<Buffer>> {
1305 let worktree = self.worktree_for_id(path.worktree_id, cx)?;
1306 self.opened_buffers.values().find_map(|buffer| {
1307 let buffer = buffer.upgrade(cx)?;
1308 let file = File::from_dyn(buffer.read(cx).file())?;
1309 if file.worktree == worktree && file.path() == &path.path {
1310 Some(buffer)
1311 } else {
1312 None
1313 }
1314 })
1315 }
1316
1317 fn register_buffer(
1318 &mut self,
1319 buffer: &ModelHandle<Buffer>,
1320 cx: &mut ModelContext<Self>,
1321 ) -> Result<()> {
1322 let remote_id = buffer.read(cx).remote_id();
1323 let open_buffer = if self.is_remote() || self.is_shared() {
1324 OpenBuffer::Strong(buffer.clone())
1325 } else {
1326 OpenBuffer::Weak(buffer.downgrade())
1327 };
1328
1329 match self.opened_buffers.insert(remote_id, open_buffer) {
1330 None => {}
1331 Some(OpenBuffer::Loading(operations)) => {
1332 buffer.update(cx, |buffer, cx| buffer.apply_ops(operations, cx))?
1333 }
1334 Some(OpenBuffer::Weak(existing_handle)) => {
1335 if existing_handle.upgrade(cx).is_some() {
1336 Err(anyhow!(
1337 "already registered buffer with remote id {}",
1338 remote_id
1339 ))?
1340 }
1341 }
1342 Some(OpenBuffer::Strong(_)) => Err(anyhow!(
1343 "already registered buffer with remote id {}",
1344 remote_id
1345 ))?,
1346 }
1347 cx.subscribe(buffer, |this, buffer, event, cx| {
1348 this.on_buffer_event(buffer, event, cx);
1349 })
1350 .detach();
1351
1352 self.assign_language_to_buffer(buffer, cx);
1353 self.register_buffer_with_language_server(buffer, cx);
1354 cx.observe_release(buffer, |this, buffer, cx| {
1355 if let Some(file) = File::from_dyn(buffer.file()) {
1356 if file.is_local() {
1357 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
1358 if let Some((_, server)) = this.language_server_for_buffer(buffer, cx) {
1359 server
1360 .notify::<lsp::notification::DidCloseTextDocument>(
1361 lsp::DidCloseTextDocumentParams {
1362 text_document: lsp::TextDocumentIdentifier::new(uri.clone()),
1363 },
1364 )
1365 .log_err();
1366 }
1367 }
1368 }
1369 })
1370 .detach();
1371
1372 Ok(())
1373 }
1374
1375 fn register_buffer_with_language_server(
1376 &mut self,
1377 buffer_handle: &ModelHandle<Buffer>,
1378 cx: &mut ModelContext<Self>,
1379 ) {
1380 let buffer = buffer_handle.read(cx);
1381 let buffer_id = buffer.remote_id();
1382 if let Some(file) = File::from_dyn(buffer.file()) {
1383 if file.is_local() {
1384 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
1385 let initial_snapshot = buffer.text_snapshot();
1386
1387 let mut language_server = None;
1388 let mut language_id = None;
1389 if let Some(language) = buffer.language() {
1390 let worktree_id = file.worktree_id(cx);
1391 if let Some(adapter) = language.lsp_adapter() {
1392 language_id = adapter.id_for_language(language.name().as_ref());
1393 language_server = self
1394 .language_servers
1395 .get(&(worktree_id, adapter.name()))
1396 .cloned();
1397 }
1398 }
1399
1400 if let Some(local_worktree) = file.worktree.read(cx).as_local() {
1401 if let Some(diagnostics) = local_worktree.diagnostics_for_path(file.path()) {
1402 self.update_buffer_diagnostics(&buffer_handle, diagnostics, None, cx)
1403 .log_err();
1404 }
1405 }
1406
1407 if let Some((_, server)) = language_server {
1408 server
1409 .notify::<lsp::notification::DidOpenTextDocument>(
1410 lsp::DidOpenTextDocumentParams {
1411 text_document: lsp::TextDocumentItem::new(
1412 uri,
1413 language_id.unwrap_or_default(),
1414 0,
1415 initial_snapshot.text(),
1416 ),
1417 }
1418 .clone(),
1419 )
1420 .log_err();
1421 buffer_handle.update(cx, |buffer, cx| {
1422 buffer.set_completion_triggers(
1423 server
1424 .capabilities()
1425 .completion_provider
1426 .as_ref()
1427 .and_then(|provider| provider.trigger_characters.clone())
1428 .unwrap_or(Vec::new()),
1429 cx,
1430 )
1431 });
1432 self.buffer_snapshots
1433 .insert(buffer_id, vec![(0, initial_snapshot)]);
1434 }
1435 }
1436 }
1437 }
1438
1439 fn unregister_buffer_from_language_server(
1440 &mut self,
1441 buffer: &ModelHandle<Buffer>,
1442 old_path: PathBuf,
1443 cx: &mut ModelContext<Self>,
1444 ) {
1445 buffer.update(cx, |buffer, cx| {
1446 buffer.update_diagnostics(Default::default(), cx);
1447 self.buffer_snapshots.remove(&buffer.remote_id());
1448 if let Some((_, language_server)) = self.language_server_for_buffer(buffer, cx) {
1449 language_server
1450 .notify::<lsp::notification::DidCloseTextDocument>(
1451 lsp::DidCloseTextDocumentParams {
1452 text_document: lsp::TextDocumentIdentifier::new(
1453 lsp::Url::from_file_path(old_path).unwrap(),
1454 ),
1455 },
1456 )
1457 .log_err();
1458 }
1459 });
1460 }
1461
1462 fn on_buffer_event(
1463 &mut self,
1464 buffer: ModelHandle<Buffer>,
1465 event: &BufferEvent,
1466 cx: &mut ModelContext<Self>,
1467 ) -> Option<()> {
1468 match event {
1469 BufferEvent::Operation(operation) => {
1470 let project_id = self.remote_id()?;
1471 let request = self.client.request(proto::UpdateBuffer {
1472 project_id,
1473 buffer_id: buffer.read(cx).remote_id(),
1474 operations: vec![language::proto::serialize_operation(&operation)],
1475 });
1476 cx.background().spawn(request).detach_and_log_err(cx);
1477 }
1478 BufferEvent::Edited { .. } => {
1479 let (_, language_server) = self
1480 .language_server_for_buffer(buffer.read(cx), cx)?
1481 .clone();
1482 let buffer = buffer.read(cx);
1483 let file = File::from_dyn(buffer.file())?;
1484 let abs_path = file.as_local()?.abs_path(cx);
1485 let uri = lsp::Url::from_file_path(abs_path).unwrap();
1486 let buffer_snapshots = self.buffer_snapshots.get_mut(&buffer.remote_id())?;
1487 let (version, prev_snapshot) = buffer_snapshots.last()?;
1488 let next_snapshot = buffer.text_snapshot();
1489 let next_version = version + 1;
1490
1491 let content_changes = buffer
1492 .edits_since::<(PointUtf16, usize)>(prev_snapshot.version())
1493 .map(|edit| {
1494 let edit_start = edit.new.start.0;
1495 let edit_end = edit_start + (edit.old.end.0 - edit.old.start.0);
1496 let new_text = next_snapshot
1497 .text_for_range(edit.new.start.1..edit.new.end.1)
1498 .collect();
1499 lsp::TextDocumentContentChangeEvent {
1500 range: Some(lsp::Range::new(
1501 point_to_lsp(edit_start),
1502 point_to_lsp(edit_end),
1503 )),
1504 range_length: None,
1505 text: new_text,
1506 }
1507 })
1508 .collect();
1509
1510 buffer_snapshots.push((next_version, next_snapshot));
1511
1512 language_server
1513 .notify::<lsp::notification::DidChangeTextDocument>(
1514 lsp::DidChangeTextDocumentParams {
1515 text_document: lsp::VersionedTextDocumentIdentifier::new(
1516 uri,
1517 next_version,
1518 ),
1519 content_changes,
1520 },
1521 )
1522 .log_err();
1523 }
1524 BufferEvent::Saved => {
1525 let file = File::from_dyn(buffer.read(cx).file())?;
1526 let worktree_id = file.worktree_id(cx);
1527 let abs_path = file.as_local()?.abs_path(cx);
1528 let text_document = lsp::TextDocumentIdentifier {
1529 uri: lsp::Url::from_file_path(abs_path).unwrap(),
1530 };
1531
1532 for (_, server) in self.language_servers_for_worktree(worktree_id) {
1533 server
1534 .notify::<lsp::notification::DidSaveTextDocument>(
1535 lsp::DidSaveTextDocumentParams {
1536 text_document: text_document.clone(),
1537 text: None,
1538 },
1539 )
1540 .log_err();
1541 }
1542 }
1543 _ => {}
1544 }
1545
1546 None
1547 }
1548
1549 fn language_servers_for_worktree(
1550 &self,
1551 worktree_id: WorktreeId,
1552 ) -> impl Iterator<Item = &(Arc<dyn LspAdapter>, Arc<LanguageServer>)> {
1553 self.language_servers.iter().filter_map(
1554 move |((language_server_worktree_id, _), server)| {
1555 if *language_server_worktree_id == worktree_id {
1556 Some(server)
1557 } else {
1558 None
1559 }
1560 },
1561 )
1562 }
1563
1564 fn assign_language_to_buffer(
1565 &mut self,
1566 buffer: &ModelHandle<Buffer>,
1567 cx: &mut ModelContext<Self>,
1568 ) -> Option<()> {
1569 // If the buffer has a language, set it and start the language server if we haven't already.
1570 let full_path = buffer.read(cx).file()?.full_path(cx);
1571 let language = self.languages.select_language(&full_path)?;
1572 buffer.update(cx, |buffer, cx| {
1573 buffer.set_language(Some(language.clone()), cx);
1574 });
1575
1576 let file = File::from_dyn(buffer.read(cx).file())?;
1577 let worktree = file.worktree.read(cx).as_local()?;
1578 let worktree_id = worktree.id();
1579 let worktree_abs_path = worktree.abs_path().clone();
1580 self.start_language_server(worktree_id, worktree_abs_path, language, cx);
1581
1582 None
1583 }
1584
1585 fn start_language_server(
1586 &mut self,
1587 worktree_id: WorktreeId,
1588 worktree_path: Arc<Path>,
1589 language: Arc<Language>,
1590 cx: &mut ModelContext<Self>,
1591 ) {
1592 let adapter = if let Some(adapter) = language.lsp_adapter() {
1593 adapter
1594 } else {
1595 return;
1596 };
1597 let key = (worktree_id, adapter.name());
1598 self.started_language_servers
1599 .entry(key.clone())
1600 .or_insert_with(|| {
1601 let server_id = post_inc(&mut self.next_language_server_id);
1602 let language_server = self.languages.start_language_server(
1603 server_id,
1604 language.clone(),
1605 worktree_path,
1606 self.client.http_client(),
1607 cx,
1608 );
1609 cx.spawn_weak(|this, mut cx| async move {
1610 let language_server = language_server?.await.log_err()?;
1611 let language_server = language_server
1612 .initialize(adapter.initialization_options())
1613 .await
1614 .log_err()?;
1615 let this = this.upgrade(&cx)?;
1616 let disk_based_diagnostics_progress_token =
1617 adapter.disk_based_diagnostics_progress_token();
1618
1619 language_server
1620 .on_notification::<lsp::notification::PublishDiagnostics, _>({
1621 let this = this.downgrade();
1622 let adapter = adapter.clone();
1623 move |params, mut cx| {
1624 if let Some(this) = this.upgrade(&cx) {
1625 this.update(&mut cx, |this, cx| {
1626 this.on_lsp_diagnostics_published(
1627 server_id,
1628 params,
1629 &adapter,
1630 disk_based_diagnostics_progress_token,
1631 cx,
1632 );
1633 });
1634 }
1635 }
1636 })
1637 .detach();
1638
1639 language_server
1640 .on_request::<lsp::request::WorkspaceConfiguration, _, _>({
1641 let settings = this
1642 .read_with(&cx, |this, _| this.language_server_settings.clone());
1643 move |params, _| {
1644 let settings = settings.lock().clone();
1645 async move {
1646 Ok(params
1647 .items
1648 .into_iter()
1649 .map(|item| {
1650 if let Some(section) = &item.section {
1651 settings
1652 .get(section)
1653 .cloned()
1654 .unwrap_or(serde_json::Value::Null)
1655 } else {
1656 settings.clone()
1657 }
1658 })
1659 .collect())
1660 }
1661 }
1662 })
1663 .detach();
1664
1665 language_server
1666 .on_request::<lsp::request::ApplyWorkspaceEdit, _, _>({
1667 let this = this.downgrade();
1668 let adapter = adapter.clone();
1669 let language_server = language_server.clone();
1670 move |params, cx| {
1671 Self::on_lsp_workspace_edit(
1672 this,
1673 params,
1674 server_id,
1675 adapter.clone(),
1676 language_server.clone(),
1677 cx,
1678 )
1679 }
1680 })
1681 .detach();
1682
1683 language_server
1684 .on_notification::<lsp::notification::Progress, _>({
1685 let this = this.downgrade();
1686 move |params, mut cx| {
1687 if let Some(this) = this.upgrade(&cx) {
1688 this.update(&mut cx, |this, cx| {
1689 this.on_lsp_progress(
1690 params,
1691 server_id,
1692 disk_based_diagnostics_progress_token,
1693 cx,
1694 );
1695 });
1696 }
1697 }
1698 })
1699 .detach();
1700
1701 this.update(&mut cx, |this, cx| {
1702 this.language_servers
1703 .insert(key.clone(), (adapter.clone(), language_server.clone()));
1704 this.language_server_statuses.insert(
1705 server_id,
1706 LanguageServerStatus {
1707 name: language_server.name().to_string(),
1708 pending_work: Default::default(),
1709 pending_diagnostic_updates: 0,
1710 },
1711 );
1712 language_server
1713 .notify::<lsp::notification::DidChangeConfiguration>(
1714 lsp::DidChangeConfigurationParams {
1715 settings: this.language_server_settings.lock().clone(),
1716 },
1717 )
1718 .ok();
1719
1720 if let Some(project_id) = this.remote_id() {
1721 this.client
1722 .send(proto::StartLanguageServer {
1723 project_id,
1724 server: Some(proto::LanguageServer {
1725 id: server_id as u64,
1726 name: language_server.name().to_string(),
1727 }),
1728 })
1729 .log_err();
1730 }
1731
1732 // Tell the language server about every open buffer in the worktree that matches the language.
1733 for buffer in this.opened_buffers.values() {
1734 if let Some(buffer_handle) = buffer.upgrade(cx) {
1735 let buffer = buffer_handle.read(cx);
1736 let file = if let Some(file) = File::from_dyn(buffer.file()) {
1737 file
1738 } else {
1739 continue;
1740 };
1741 let language = if let Some(language) = buffer.language() {
1742 language
1743 } else {
1744 continue;
1745 };
1746 if file.worktree.read(cx).id() != key.0
1747 || language.lsp_adapter().map(|a| a.name())
1748 != Some(key.1.clone())
1749 {
1750 continue;
1751 }
1752
1753 let file = file.as_local()?;
1754 let versions = this
1755 .buffer_snapshots
1756 .entry(buffer.remote_id())
1757 .or_insert_with(|| vec![(0, buffer.text_snapshot())]);
1758 let (version, initial_snapshot) = versions.last().unwrap();
1759 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
1760 let language_id = adapter.id_for_language(language.name().as_ref());
1761 language_server
1762 .notify::<lsp::notification::DidOpenTextDocument>(
1763 lsp::DidOpenTextDocumentParams {
1764 text_document: lsp::TextDocumentItem::new(
1765 uri,
1766 language_id.unwrap_or_default(),
1767 *version,
1768 initial_snapshot.text(),
1769 ),
1770 },
1771 )
1772 .log_err()?;
1773 buffer_handle.update(cx, |buffer, cx| {
1774 buffer.set_completion_triggers(
1775 language_server
1776 .capabilities()
1777 .completion_provider
1778 .as_ref()
1779 .and_then(|provider| {
1780 provider.trigger_characters.clone()
1781 })
1782 .unwrap_or(Vec::new()),
1783 cx,
1784 )
1785 });
1786 }
1787 }
1788
1789 cx.notify();
1790 Some(())
1791 });
1792
1793 Some(language_server)
1794 })
1795 });
1796 }
1797
1798 pub fn restart_language_servers_for_buffers(
1799 &mut self,
1800 buffers: impl IntoIterator<Item = ModelHandle<Buffer>>,
1801 cx: &mut ModelContext<Self>,
1802 ) -> Option<()> {
1803 let language_server_lookup_info: HashSet<(WorktreeId, Arc<Path>, PathBuf)> = buffers
1804 .into_iter()
1805 .filter_map(|buffer| {
1806 let file = File::from_dyn(buffer.read(cx).file())?;
1807 let worktree = file.worktree.read(cx).as_local()?;
1808 let worktree_id = worktree.id();
1809 let worktree_abs_path = worktree.abs_path().clone();
1810 let full_path = file.full_path(cx);
1811 Some((worktree_id, worktree_abs_path, full_path))
1812 })
1813 .collect();
1814 for (worktree_id, worktree_abs_path, full_path) in language_server_lookup_info {
1815 let language = self.languages.select_language(&full_path)?;
1816 self.restart_language_server(worktree_id, worktree_abs_path, language, cx);
1817 }
1818
1819 None
1820 }
1821
1822 fn restart_language_server(
1823 &mut self,
1824 worktree_id: WorktreeId,
1825 worktree_path: Arc<Path>,
1826 language: Arc<Language>,
1827 cx: &mut ModelContext<Self>,
1828 ) {
1829 let adapter = if let Some(adapter) = language.lsp_adapter() {
1830 adapter
1831 } else {
1832 return;
1833 };
1834 let key = (worktree_id, adapter.name());
1835 let server_to_shutdown = self.language_servers.remove(&key);
1836 self.started_language_servers.remove(&key);
1837 server_to_shutdown
1838 .as_ref()
1839 .map(|(_, server)| self.language_server_statuses.remove(&server.server_id()));
1840 cx.spawn_weak(|this, mut cx| async move {
1841 if let Some(this) = this.upgrade(&cx) {
1842 if let Some((_, server_to_shutdown)) = server_to_shutdown {
1843 if let Some(shutdown_task) = server_to_shutdown.shutdown() {
1844 shutdown_task.await;
1845 }
1846 }
1847
1848 this.update(&mut cx, |this, cx| {
1849 this.start_language_server(worktree_id, worktree_path, language, cx);
1850 });
1851 }
1852 })
1853 .detach();
1854 }
1855
1856 fn on_lsp_diagnostics_published(
1857 &mut self,
1858 server_id: usize,
1859 mut params: lsp::PublishDiagnosticsParams,
1860 adapter: &Arc<dyn LspAdapter>,
1861 disk_based_diagnostics_progress_token: Option<&str>,
1862 cx: &mut ModelContext<Self>,
1863 ) {
1864 adapter.process_diagnostics(&mut params);
1865 if disk_based_diagnostics_progress_token.is_none() {
1866 self.disk_based_diagnostics_started(cx);
1867 self.broadcast_language_server_update(
1868 server_id,
1869 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(
1870 proto::LspDiskBasedDiagnosticsUpdating {},
1871 ),
1872 );
1873 }
1874 self.update_diagnostics(params, adapter.disk_based_diagnostic_sources(), cx)
1875 .log_err();
1876 if disk_based_diagnostics_progress_token.is_none() {
1877 self.disk_based_diagnostics_finished(cx);
1878 self.broadcast_language_server_update(
1879 server_id,
1880 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(
1881 proto::LspDiskBasedDiagnosticsUpdated {},
1882 ),
1883 );
1884 }
1885 }
1886
1887 fn on_lsp_progress(
1888 &mut self,
1889 progress: lsp::ProgressParams,
1890 server_id: usize,
1891 disk_based_diagnostics_progress_token: Option<&str>,
1892 cx: &mut ModelContext<Self>,
1893 ) {
1894 let token = match progress.token {
1895 lsp::NumberOrString::String(token) => token,
1896 lsp::NumberOrString::Number(token) => {
1897 log::info!("skipping numeric progress token {}", token);
1898 return;
1899 }
1900 };
1901 let progress = match progress.value {
1902 lsp::ProgressParamsValue::WorkDone(value) => value,
1903 };
1904 let language_server_status =
1905 if let Some(status) = self.language_server_statuses.get_mut(&server_id) {
1906 status
1907 } else {
1908 return;
1909 };
1910 match progress {
1911 lsp::WorkDoneProgress::Begin(_) => {
1912 if Some(token.as_str()) == disk_based_diagnostics_progress_token {
1913 language_server_status.pending_diagnostic_updates += 1;
1914 if language_server_status.pending_diagnostic_updates == 1 {
1915 self.disk_based_diagnostics_started(cx);
1916 self.broadcast_language_server_update(
1917 server_id,
1918 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(
1919 proto::LspDiskBasedDiagnosticsUpdating {},
1920 ),
1921 );
1922 }
1923 } else {
1924 self.on_lsp_work_start(server_id, token.clone(), cx);
1925 self.broadcast_language_server_update(
1926 server_id,
1927 proto::update_language_server::Variant::WorkStart(proto::LspWorkStart {
1928 token,
1929 }),
1930 );
1931 }
1932 }
1933 lsp::WorkDoneProgress::Report(report) => {
1934 if Some(token.as_str()) != disk_based_diagnostics_progress_token {
1935 self.on_lsp_work_progress(
1936 server_id,
1937 token.clone(),
1938 LanguageServerProgress {
1939 message: report.message.clone(),
1940 percentage: report.percentage.map(|p| p as usize),
1941 last_update_at: Instant::now(),
1942 },
1943 cx,
1944 );
1945 self.broadcast_language_server_update(
1946 server_id,
1947 proto::update_language_server::Variant::WorkProgress(
1948 proto::LspWorkProgress {
1949 token,
1950 message: report.message,
1951 percentage: report.percentage.map(|p| p as u32),
1952 },
1953 ),
1954 );
1955 }
1956 }
1957 lsp::WorkDoneProgress::End(_) => {
1958 if Some(token.as_str()) == disk_based_diagnostics_progress_token {
1959 language_server_status.pending_diagnostic_updates -= 1;
1960 if language_server_status.pending_diagnostic_updates == 0 {
1961 self.disk_based_diagnostics_finished(cx);
1962 self.broadcast_language_server_update(
1963 server_id,
1964 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(
1965 proto::LspDiskBasedDiagnosticsUpdated {},
1966 ),
1967 );
1968 }
1969 } else {
1970 self.on_lsp_work_end(server_id, token.clone(), cx);
1971 self.broadcast_language_server_update(
1972 server_id,
1973 proto::update_language_server::Variant::WorkEnd(proto::LspWorkEnd {
1974 token,
1975 }),
1976 );
1977 }
1978 }
1979 }
1980 }
1981
1982 fn on_lsp_work_start(
1983 &mut self,
1984 language_server_id: usize,
1985 token: String,
1986 cx: &mut ModelContext<Self>,
1987 ) {
1988 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
1989 status.pending_work.insert(
1990 token,
1991 LanguageServerProgress {
1992 message: None,
1993 percentage: None,
1994 last_update_at: Instant::now(),
1995 },
1996 );
1997 cx.notify();
1998 }
1999 }
2000
2001 fn on_lsp_work_progress(
2002 &mut self,
2003 language_server_id: usize,
2004 token: String,
2005 progress: LanguageServerProgress,
2006 cx: &mut ModelContext<Self>,
2007 ) {
2008 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
2009 status.pending_work.insert(token, progress);
2010 cx.notify();
2011 }
2012 }
2013
2014 fn on_lsp_work_end(
2015 &mut self,
2016 language_server_id: usize,
2017 token: String,
2018 cx: &mut ModelContext<Self>,
2019 ) {
2020 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
2021 status.pending_work.remove(&token);
2022 cx.notify();
2023 }
2024 }
2025
2026 async fn on_lsp_workspace_edit(
2027 this: WeakModelHandle<Self>,
2028 params: lsp::ApplyWorkspaceEditParams,
2029 server_id: usize,
2030 adapter: Arc<dyn LspAdapter>,
2031 language_server: Arc<LanguageServer>,
2032 mut cx: AsyncAppContext,
2033 ) -> Result<lsp::ApplyWorkspaceEditResponse> {
2034 let this = this
2035 .upgrade(&cx)
2036 .ok_or_else(|| anyhow!("project project closed"))?;
2037 let transaction = Self::deserialize_workspace_edit(
2038 this.clone(),
2039 params.edit,
2040 true,
2041 adapter.clone(),
2042 language_server.clone(),
2043 &mut cx,
2044 )
2045 .await
2046 .log_err();
2047 this.update(&mut cx, |this, _| {
2048 if let Some(transaction) = transaction {
2049 this.last_workspace_edits_by_language_server
2050 .insert(server_id, transaction);
2051 }
2052 });
2053 Ok(lsp::ApplyWorkspaceEditResponse {
2054 applied: true,
2055 failed_change: None,
2056 failure_reason: None,
2057 })
2058 }
2059
2060 fn broadcast_language_server_update(
2061 &self,
2062 language_server_id: usize,
2063 event: proto::update_language_server::Variant,
2064 ) {
2065 if let Some(project_id) = self.remote_id() {
2066 self.client
2067 .send(proto::UpdateLanguageServer {
2068 project_id,
2069 language_server_id: language_server_id as u64,
2070 variant: Some(event),
2071 })
2072 .log_err();
2073 }
2074 }
2075
2076 pub fn set_language_server_settings(&mut self, settings: serde_json::Value) {
2077 for (_, server) in self.language_servers.values() {
2078 server
2079 .notify::<lsp::notification::DidChangeConfiguration>(
2080 lsp::DidChangeConfigurationParams {
2081 settings: settings.clone(),
2082 },
2083 )
2084 .ok();
2085 }
2086 *self.language_server_settings.lock() = settings;
2087 }
2088
2089 pub fn language_server_statuses(
2090 &self,
2091 ) -> impl DoubleEndedIterator<Item = &LanguageServerStatus> {
2092 self.language_server_statuses.values()
2093 }
2094
2095 pub fn update_diagnostics(
2096 &mut self,
2097 params: lsp::PublishDiagnosticsParams,
2098 disk_based_sources: &[&str],
2099 cx: &mut ModelContext<Self>,
2100 ) -> Result<()> {
2101 let abs_path = params
2102 .uri
2103 .to_file_path()
2104 .map_err(|_| anyhow!("URI is not a file"))?;
2105 let mut next_group_id = 0;
2106 let mut diagnostics = Vec::default();
2107 let mut primary_diagnostic_group_ids = HashMap::default();
2108 let mut sources_by_group_id = HashMap::default();
2109 let mut supporting_diagnostics = HashMap::default();
2110 for diagnostic in ¶ms.diagnostics {
2111 let source = diagnostic.source.as_ref();
2112 let code = diagnostic.code.as_ref().map(|code| match code {
2113 lsp::NumberOrString::Number(code) => code.to_string(),
2114 lsp::NumberOrString::String(code) => code.clone(),
2115 });
2116 let range = range_from_lsp(diagnostic.range);
2117 let is_supporting = diagnostic
2118 .related_information
2119 .as_ref()
2120 .map_or(false, |infos| {
2121 infos.iter().any(|info| {
2122 primary_diagnostic_group_ids.contains_key(&(
2123 source,
2124 code.clone(),
2125 range_from_lsp(info.location.range),
2126 ))
2127 })
2128 });
2129
2130 let is_unnecessary = diagnostic.tags.as_ref().map_or(false, |tags| {
2131 tags.iter().any(|tag| *tag == DiagnosticTag::UNNECESSARY)
2132 });
2133
2134 if is_supporting {
2135 supporting_diagnostics.insert(
2136 (source, code.clone(), range),
2137 (diagnostic.severity, is_unnecessary),
2138 );
2139 } else {
2140 let group_id = post_inc(&mut next_group_id);
2141 let is_disk_based = source.map_or(false, |source| {
2142 disk_based_sources.contains(&source.as_str())
2143 });
2144
2145 sources_by_group_id.insert(group_id, source);
2146 primary_diagnostic_group_ids
2147 .insert((source, code.clone(), range.clone()), group_id);
2148
2149 diagnostics.push(DiagnosticEntry {
2150 range,
2151 diagnostic: Diagnostic {
2152 code: code.clone(),
2153 severity: diagnostic.severity.unwrap_or(DiagnosticSeverity::ERROR),
2154 message: diagnostic.message.clone(),
2155 group_id,
2156 is_primary: true,
2157 is_valid: true,
2158 is_disk_based,
2159 is_unnecessary,
2160 },
2161 });
2162 if let Some(infos) = &diagnostic.related_information {
2163 for info in infos {
2164 if info.location.uri == params.uri && !info.message.is_empty() {
2165 let range = range_from_lsp(info.location.range);
2166 diagnostics.push(DiagnosticEntry {
2167 range,
2168 diagnostic: Diagnostic {
2169 code: code.clone(),
2170 severity: DiagnosticSeverity::INFORMATION,
2171 message: info.message.clone(),
2172 group_id,
2173 is_primary: false,
2174 is_valid: true,
2175 is_disk_based,
2176 is_unnecessary: false,
2177 },
2178 });
2179 }
2180 }
2181 }
2182 }
2183 }
2184
2185 for entry in &mut diagnostics {
2186 let diagnostic = &mut entry.diagnostic;
2187 if !diagnostic.is_primary {
2188 let source = *sources_by_group_id.get(&diagnostic.group_id).unwrap();
2189 if let Some(&(severity, is_unnecessary)) = supporting_diagnostics.get(&(
2190 source,
2191 diagnostic.code.clone(),
2192 entry.range.clone(),
2193 )) {
2194 if let Some(severity) = severity {
2195 diagnostic.severity = severity;
2196 }
2197 diagnostic.is_unnecessary = is_unnecessary;
2198 }
2199 }
2200 }
2201
2202 self.update_diagnostic_entries(abs_path, params.version, diagnostics, cx)?;
2203 Ok(())
2204 }
2205
2206 pub fn update_diagnostic_entries(
2207 &mut self,
2208 abs_path: PathBuf,
2209 version: Option<i32>,
2210 diagnostics: Vec<DiagnosticEntry<PointUtf16>>,
2211 cx: &mut ModelContext<Project>,
2212 ) -> Result<(), anyhow::Error> {
2213 let (worktree, relative_path) = self
2214 .find_local_worktree(&abs_path, cx)
2215 .ok_or_else(|| anyhow!("no worktree found for diagnostics"))?;
2216 if !worktree.read(cx).is_visible() {
2217 return Ok(());
2218 }
2219
2220 let project_path = ProjectPath {
2221 worktree_id: worktree.read(cx).id(),
2222 path: relative_path.into(),
2223 };
2224 if let Some(buffer) = self.get_open_buffer(&project_path, cx) {
2225 self.update_buffer_diagnostics(&buffer, diagnostics.clone(), version, cx)?;
2226 }
2227
2228 let updated = worktree.update(cx, |worktree, cx| {
2229 worktree
2230 .as_local_mut()
2231 .ok_or_else(|| anyhow!("not a local worktree"))?
2232 .update_diagnostics(project_path.path.clone(), diagnostics, cx)
2233 })?;
2234 if updated {
2235 cx.emit(Event::DiagnosticsUpdated(project_path));
2236 }
2237 Ok(())
2238 }
2239
2240 fn update_buffer_diagnostics(
2241 &mut self,
2242 buffer: &ModelHandle<Buffer>,
2243 mut diagnostics: Vec<DiagnosticEntry<PointUtf16>>,
2244 version: Option<i32>,
2245 cx: &mut ModelContext<Self>,
2246 ) -> Result<()> {
2247 fn compare_diagnostics(a: &Diagnostic, b: &Diagnostic) -> Ordering {
2248 Ordering::Equal
2249 .then_with(|| b.is_primary.cmp(&a.is_primary))
2250 .then_with(|| a.is_disk_based.cmp(&b.is_disk_based))
2251 .then_with(|| a.severity.cmp(&b.severity))
2252 .then_with(|| a.message.cmp(&b.message))
2253 }
2254
2255 let snapshot = self.buffer_snapshot_for_lsp_version(buffer, version, cx)?;
2256
2257 diagnostics.sort_unstable_by(|a, b| {
2258 Ordering::Equal
2259 .then_with(|| a.range.start.cmp(&b.range.start))
2260 .then_with(|| b.range.end.cmp(&a.range.end))
2261 .then_with(|| compare_diagnostics(&a.diagnostic, &b.diagnostic))
2262 });
2263
2264 let mut sanitized_diagnostics = Vec::new();
2265 let edits_since_save = Patch::new(
2266 snapshot
2267 .edits_since::<PointUtf16>(buffer.read(cx).saved_version())
2268 .collect(),
2269 );
2270 for entry in diagnostics {
2271 let start;
2272 let end;
2273 if entry.diagnostic.is_disk_based {
2274 // Some diagnostics are based on files on disk instead of buffers'
2275 // current contents. Adjust these diagnostics' ranges to reflect
2276 // any unsaved edits.
2277 start = edits_since_save.old_to_new(entry.range.start);
2278 end = edits_since_save.old_to_new(entry.range.end);
2279 } else {
2280 start = entry.range.start;
2281 end = entry.range.end;
2282 }
2283
2284 let mut range = snapshot.clip_point_utf16(start, Bias::Left)
2285 ..snapshot.clip_point_utf16(end, Bias::Right);
2286
2287 // Expand empty ranges by one character
2288 if range.start == range.end {
2289 range.end.column += 1;
2290 range.end = snapshot.clip_point_utf16(range.end, Bias::Right);
2291 if range.start == range.end && range.end.column > 0 {
2292 range.start.column -= 1;
2293 range.start = snapshot.clip_point_utf16(range.start, Bias::Left);
2294 }
2295 }
2296
2297 sanitized_diagnostics.push(DiagnosticEntry {
2298 range,
2299 diagnostic: entry.diagnostic,
2300 });
2301 }
2302 drop(edits_since_save);
2303
2304 let set = DiagnosticSet::new(sanitized_diagnostics, &snapshot);
2305 buffer.update(cx, |buffer, cx| buffer.update_diagnostics(set, cx));
2306 Ok(())
2307 }
2308
2309 pub fn reload_buffers(
2310 &self,
2311 buffers: HashSet<ModelHandle<Buffer>>,
2312 push_to_history: bool,
2313 cx: &mut ModelContext<Self>,
2314 ) -> Task<Result<ProjectTransaction>> {
2315 let mut local_buffers = Vec::new();
2316 let mut remote_buffers = None;
2317 for buffer_handle in buffers {
2318 let buffer = buffer_handle.read(cx);
2319 if buffer.is_dirty() {
2320 if let Some(file) = File::from_dyn(buffer.file()) {
2321 if file.is_local() {
2322 local_buffers.push(buffer_handle);
2323 } else {
2324 remote_buffers.get_or_insert(Vec::new()).push(buffer_handle);
2325 }
2326 }
2327 }
2328 }
2329
2330 let remote_buffers = self.remote_id().zip(remote_buffers);
2331 let client = self.client.clone();
2332
2333 cx.spawn(|this, mut cx| async move {
2334 let mut project_transaction = ProjectTransaction::default();
2335
2336 if let Some((project_id, remote_buffers)) = remote_buffers {
2337 let response = client
2338 .request(proto::ReloadBuffers {
2339 project_id,
2340 buffer_ids: remote_buffers
2341 .iter()
2342 .map(|buffer| buffer.read_with(&cx, |buffer, _| buffer.remote_id()))
2343 .collect(),
2344 })
2345 .await?
2346 .transaction
2347 .ok_or_else(|| anyhow!("missing transaction"))?;
2348 project_transaction = this
2349 .update(&mut cx, |this, cx| {
2350 this.deserialize_project_transaction(response, push_to_history, cx)
2351 })
2352 .await?;
2353 }
2354
2355 for buffer in local_buffers {
2356 let transaction = buffer
2357 .update(&mut cx, |buffer, cx| buffer.reload(cx))
2358 .await?;
2359 buffer.update(&mut cx, |buffer, cx| {
2360 if let Some(transaction) = transaction {
2361 if !push_to_history {
2362 buffer.forget_transaction(transaction.id);
2363 }
2364 project_transaction.0.insert(cx.handle(), transaction);
2365 }
2366 });
2367 }
2368
2369 Ok(project_transaction)
2370 })
2371 }
2372
2373 pub fn format(
2374 &self,
2375 buffers: HashSet<ModelHandle<Buffer>>,
2376 push_to_history: bool,
2377 cx: &mut ModelContext<Project>,
2378 ) -> Task<Result<ProjectTransaction>> {
2379 let mut local_buffers = Vec::new();
2380 let mut remote_buffers = None;
2381 for buffer_handle in buffers {
2382 let buffer = buffer_handle.read(cx);
2383 if let Some(file) = File::from_dyn(buffer.file()) {
2384 if let Some(buffer_abs_path) = file.as_local().map(|f| f.abs_path(cx)) {
2385 if let Some((_, server)) = self.language_server_for_buffer(buffer, cx) {
2386 local_buffers.push((buffer_handle, buffer_abs_path, server.clone()));
2387 }
2388 } else {
2389 remote_buffers.get_or_insert(Vec::new()).push(buffer_handle);
2390 }
2391 } else {
2392 return Task::ready(Ok(Default::default()));
2393 }
2394 }
2395
2396 let remote_buffers = self.remote_id().zip(remote_buffers);
2397 let client = self.client.clone();
2398
2399 cx.spawn(|this, mut cx| async move {
2400 let mut project_transaction = ProjectTransaction::default();
2401
2402 if let Some((project_id, remote_buffers)) = remote_buffers {
2403 let response = client
2404 .request(proto::FormatBuffers {
2405 project_id,
2406 buffer_ids: remote_buffers
2407 .iter()
2408 .map(|buffer| buffer.read_with(&cx, |buffer, _| buffer.remote_id()))
2409 .collect(),
2410 })
2411 .await?
2412 .transaction
2413 .ok_or_else(|| anyhow!("missing transaction"))?;
2414 project_transaction = this
2415 .update(&mut cx, |this, cx| {
2416 this.deserialize_project_transaction(response, push_to_history, cx)
2417 })
2418 .await?;
2419 }
2420
2421 for (buffer, buffer_abs_path, language_server) in local_buffers {
2422 let text_document = lsp::TextDocumentIdentifier::new(
2423 lsp::Url::from_file_path(&buffer_abs_path).unwrap(),
2424 );
2425 let capabilities = &language_server.capabilities();
2426 let tab_size = cx.update(|cx| {
2427 let language_name = buffer.read(cx).language().map(|language| language.name());
2428 cx.global::<Settings>().tab_size(language_name.as_deref())
2429 });
2430 let lsp_edits = if capabilities
2431 .document_formatting_provider
2432 .as_ref()
2433 .map_or(false, |provider| *provider != lsp::OneOf::Left(false))
2434 {
2435 language_server
2436 .request::<lsp::request::Formatting>(lsp::DocumentFormattingParams {
2437 text_document,
2438 options: lsp::FormattingOptions {
2439 tab_size,
2440 insert_spaces: true,
2441 insert_final_newline: Some(true),
2442 ..Default::default()
2443 },
2444 work_done_progress_params: Default::default(),
2445 })
2446 .await?
2447 } else if capabilities
2448 .document_range_formatting_provider
2449 .as_ref()
2450 .map_or(false, |provider| *provider != lsp::OneOf::Left(false))
2451 {
2452 let buffer_start = lsp::Position::new(0, 0);
2453 let buffer_end =
2454 buffer.read_with(&cx, |buffer, _| point_to_lsp(buffer.max_point_utf16()));
2455 language_server
2456 .request::<lsp::request::RangeFormatting>(
2457 lsp::DocumentRangeFormattingParams {
2458 text_document,
2459 range: lsp::Range::new(buffer_start, buffer_end),
2460 options: lsp::FormattingOptions {
2461 tab_size: 4,
2462 insert_spaces: true,
2463 insert_final_newline: Some(true),
2464 ..Default::default()
2465 },
2466 work_done_progress_params: Default::default(),
2467 },
2468 )
2469 .await?
2470 } else {
2471 continue;
2472 };
2473
2474 if let Some(lsp_edits) = lsp_edits {
2475 let edits = this
2476 .update(&mut cx, |this, cx| {
2477 this.edits_from_lsp(&buffer, lsp_edits, None, cx)
2478 })
2479 .await?;
2480 buffer.update(&mut cx, |buffer, cx| {
2481 buffer.finalize_last_transaction();
2482 buffer.start_transaction();
2483 for (range, text) in edits {
2484 buffer.edit([(range, text)], cx);
2485 }
2486 if buffer.end_transaction(cx).is_some() {
2487 let transaction = buffer.finalize_last_transaction().unwrap().clone();
2488 if !push_to_history {
2489 buffer.forget_transaction(transaction.id);
2490 }
2491 project_transaction.0.insert(cx.handle(), transaction);
2492 }
2493 });
2494 }
2495 }
2496
2497 Ok(project_transaction)
2498 })
2499 }
2500
2501 pub fn definition<T: ToPointUtf16>(
2502 &self,
2503 buffer: &ModelHandle<Buffer>,
2504 position: T,
2505 cx: &mut ModelContext<Self>,
2506 ) -> Task<Result<Vec<Location>>> {
2507 let position = position.to_point_utf16(buffer.read(cx));
2508 self.request_lsp(buffer.clone(), GetDefinition { position }, cx)
2509 }
2510
2511 pub fn references<T: ToPointUtf16>(
2512 &self,
2513 buffer: &ModelHandle<Buffer>,
2514 position: T,
2515 cx: &mut ModelContext<Self>,
2516 ) -> Task<Result<Vec<Location>>> {
2517 let position = position.to_point_utf16(buffer.read(cx));
2518 self.request_lsp(buffer.clone(), GetReferences { position }, cx)
2519 }
2520
2521 pub fn document_highlights<T: ToPointUtf16>(
2522 &self,
2523 buffer: &ModelHandle<Buffer>,
2524 position: T,
2525 cx: &mut ModelContext<Self>,
2526 ) -> Task<Result<Vec<DocumentHighlight>>> {
2527 let position = position.to_point_utf16(buffer.read(cx));
2528
2529 self.request_lsp(buffer.clone(), GetDocumentHighlights { position }, cx)
2530 }
2531
2532 pub fn symbols(&self, query: &str, cx: &mut ModelContext<Self>) -> Task<Result<Vec<Symbol>>> {
2533 if self.is_local() {
2534 let mut requests = Vec::new();
2535 for ((worktree_id, _), (lsp_adapter, language_server)) in self.language_servers.iter() {
2536 let worktree_id = *worktree_id;
2537 if let Some(worktree) = self
2538 .worktree_for_id(worktree_id, cx)
2539 .and_then(|worktree| worktree.read(cx).as_local())
2540 {
2541 let lsp_adapter = lsp_adapter.clone();
2542 let worktree_abs_path = worktree.abs_path().clone();
2543 requests.push(
2544 language_server
2545 .request::<lsp::request::WorkspaceSymbol>(lsp::WorkspaceSymbolParams {
2546 query: query.to_string(),
2547 ..Default::default()
2548 })
2549 .log_err()
2550 .map(move |response| {
2551 (
2552 lsp_adapter,
2553 worktree_id,
2554 worktree_abs_path,
2555 response.unwrap_or_default(),
2556 )
2557 }),
2558 );
2559 }
2560 }
2561
2562 cx.spawn_weak(|this, cx| async move {
2563 let responses = futures::future::join_all(requests).await;
2564 let this = if let Some(this) = this.upgrade(&cx) {
2565 this
2566 } else {
2567 return Ok(Default::default());
2568 };
2569 this.read_with(&cx, |this, cx| {
2570 let mut symbols = Vec::new();
2571 for (adapter, source_worktree_id, worktree_abs_path, response) in responses {
2572 symbols.extend(response.into_iter().flatten().filter_map(|lsp_symbol| {
2573 let abs_path = lsp_symbol.location.uri.to_file_path().ok()?;
2574 let mut worktree_id = source_worktree_id;
2575 let path;
2576 if let Some((worktree, rel_path)) =
2577 this.find_local_worktree(&abs_path, cx)
2578 {
2579 worktree_id = worktree.read(cx).id();
2580 path = rel_path;
2581 } else {
2582 path = relativize_path(&worktree_abs_path, &abs_path);
2583 }
2584
2585 let label = this
2586 .languages
2587 .select_language(&path)
2588 .and_then(|language| {
2589 language.label_for_symbol(&lsp_symbol.name, lsp_symbol.kind)
2590 })
2591 .unwrap_or_else(|| CodeLabel::plain(lsp_symbol.name.clone(), None));
2592 let signature = this.symbol_signature(worktree_id, &path);
2593
2594 Some(Symbol {
2595 source_worktree_id,
2596 worktree_id,
2597 language_server_name: adapter.name(),
2598 name: lsp_symbol.name,
2599 kind: lsp_symbol.kind,
2600 label,
2601 path,
2602 range: range_from_lsp(lsp_symbol.location.range),
2603 signature,
2604 })
2605 }));
2606 }
2607 Ok(symbols)
2608 })
2609 })
2610 } else if let Some(project_id) = self.remote_id() {
2611 let request = self.client.request(proto::GetProjectSymbols {
2612 project_id,
2613 query: query.to_string(),
2614 });
2615 cx.spawn_weak(|this, cx| async move {
2616 let response = request.await?;
2617 let mut symbols = Vec::new();
2618 if let Some(this) = this.upgrade(&cx) {
2619 this.read_with(&cx, |this, _| {
2620 symbols.extend(
2621 response
2622 .symbols
2623 .into_iter()
2624 .filter_map(|symbol| this.deserialize_symbol(symbol).log_err()),
2625 );
2626 })
2627 }
2628 Ok(symbols)
2629 })
2630 } else {
2631 Task::ready(Ok(Default::default()))
2632 }
2633 }
2634
2635 pub fn open_buffer_for_symbol(
2636 &mut self,
2637 symbol: &Symbol,
2638 cx: &mut ModelContext<Self>,
2639 ) -> Task<Result<ModelHandle<Buffer>>> {
2640 if self.is_local() {
2641 let (lsp_adapter, language_server) = if let Some(server) = self.language_servers.get(&(
2642 symbol.source_worktree_id,
2643 symbol.language_server_name.clone(),
2644 )) {
2645 server.clone()
2646 } else {
2647 return Task::ready(Err(anyhow!(
2648 "language server for worktree and language not found"
2649 )));
2650 };
2651
2652 let worktree_abs_path = if let Some(worktree_abs_path) = self
2653 .worktree_for_id(symbol.worktree_id, cx)
2654 .and_then(|worktree| worktree.read(cx).as_local())
2655 .map(|local_worktree| local_worktree.abs_path())
2656 {
2657 worktree_abs_path
2658 } else {
2659 return Task::ready(Err(anyhow!("worktree not found for symbol")));
2660 };
2661 let symbol_abs_path = worktree_abs_path.join(&symbol.path);
2662 let symbol_uri = if let Ok(uri) = lsp::Url::from_file_path(symbol_abs_path) {
2663 uri
2664 } else {
2665 return Task::ready(Err(anyhow!("invalid symbol path")));
2666 };
2667
2668 self.open_local_buffer_via_lsp(symbol_uri, lsp_adapter, language_server, cx)
2669 } else if let Some(project_id) = self.remote_id() {
2670 let request = self.client.request(proto::OpenBufferForSymbol {
2671 project_id,
2672 symbol: Some(serialize_symbol(symbol)),
2673 });
2674 cx.spawn(|this, mut cx| async move {
2675 let response = request.await?;
2676 let buffer = response.buffer.ok_or_else(|| anyhow!("invalid buffer"))?;
2677 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
2678 .await
2679 })
2680 } else {
2681 Task::ready(Err(anyhow!("project does not have a remote id")))
2682 }
2683 }
2684
2685 pub fn completions<T: ToPointUtf16>(
2686 &self,
2687 source_buffer_handle: &ModelHandle<Buffer>,
2688 position: T,
2689 cx: &mut ModelContext<Self>,
2690 ) -> Task<Result<Vec<Completion>>> {
2691 let source_buffer_handle = source_buffer_handle.clone();
2692 let source_buffer = source_buffer_handle.read(cx);
2693 let buffer_id = source_buffer.remote_id();
2694 let language = source_buffer.language().cloned();
2695 let worktree;
2696 let buffer_abs_path;
2697 if let Some(file) = File::from_dyn(source_buffer.file()) {
2698 worktree = file.worktree.clone();
2699 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
2700 } else {
2701 return Task::ready(Ok(Default::default()));
2702 };
2703
2704 let position = position.to_point_utf16(source_buffer);
2705 let anchor = source_buffer.anchor_after(position);
2706
2707 if worktree.read(cx).as_local().is_some() {
2708 let buffer_abs_path = buffer_abs_path.unwrap();
2709 let (_, lang_server) =
2710 if let Some(server) = self.language_server_for_buffer(source_buffer, cx) {
2711 server.clone()
2712 } else {
2713 return Task::ready(Ok(Default::default()));
2714 };
2715
2716 cx.spawn(|_, cx| async move {
2717 let completions = lang_server
2718 .request::<lsp::request::Completion>(lsp::CompletionParams {
2719 text_document_position: lsp::TextDocumentPositionParams::new(
2720 lsp::TextDocumentIdentifier::new(
2721 lsp::Url::from_file_path(buffer_abs_path).unwrap(),
2722 ),
2723 point_to_lsp(position),
2724 ),
2725 context: Default::default(),
2726 work_done_progress_params: Default::default(),
2727 partial_result_params: Default::default(),
2728 })
2729 .await
2730 .context("lsp completion request failed")?;
2731
2732 let completions = if let Some(completions) = completions {
2733 match completions {
2734 lsp::CompletionResponse::Array(completions) => completions,
2735 lsp::CompletionResponse::List(list) => list.items,
2736 }
2737 } else {
2738 Default::default()
2739 };
2740
2741 source_buffer_handle.read_with(&cx, |this, _| {
2742 let snapshot = this.snapshot();
2743 let clipped_position = this.clip_point_utf16(position, Bias::Left);
2744 let mut range_for_token = None;
2745 Ok(completions
2746 .into_iter()
2747 .filter_map(|lsp_completion| {
2748 let (old_range, new_text) = match lsp_completion.text_edit.as_ref() {
2749 // If the language server provides a range to overwrite, then
2750 // check that the range is valid.
2751 Some(lsp::CompletionTextEdit::Edit(edit)) => {
2752 let range = range_from_lsp(edit.range);
2753 let start = snapshot.clip_point_utf16(range.start, Bias::Left);
2754 let end = snapshot.clip_point_utf16(range.end, Bias::Left);
2755 if start != range.start || end != range.end {
2756 log::info!("completion out of expected range");
2757 return None;
2758 }
2759 (
2760 snapshot.anchor_before(start)..snapshot.anchor_after(end),
2761 edit.new_text.clone(),
2762 )
2763 }
2764 // If the language server does not provide a range, then infer
2765 // the range based on the syntax tree.
2766 None => {
2767 if position != clipped_position {
2768 log::info!("completion out of expected range");
2769 return None;
2770 }
2771 let Range { start, end } = range_for_token
2772 .get_or_insert_with(|| {
2773 let offset = position.to_offset(&snapshot);
2774 snapshot
2775 .range_for_word_token_at(offset)
2776 .unwrap_or_else(|| offset..offset)
2777 })
2778 .clone();
2779 let text = lsp_completion
2780 .insert_text
2781 .as_ref()
2782 .unwrap_or(&lsp_completion.label)
2783 .clone();
2784 (
2785 snapshot.anchor_before(start)..snapshot.anchor_after(end),
2786 text.clone(),
2787 )
2788 }
2789 Some(lsp::CompletionTextEdit::InsertAndReplace(_)) => {
2790 log::info!("unsupported insert/replace completion");
2791 return None;
2792 }
2793 };
2794
2795 Some(Completion {
2796 old_range,
2797 new_text,
2798 label: language
2799 .as_ref()
2800 .and_then(|l| l.label_for_completion(&lsp_completion))
2801 .unwrap_or_else(|| {
2802 CodeLabel::plain(
2803 lsp_completion.label.clone(),
2804 lsp_completion.filter_text.as_deref(),
2805 )
2806 }),
2807 lsp_completion,
2808 })
2809 })
2810 .collect())
2811 })
2812 })
2813 } else if let Some(project_id) = self.remote_id() {
2814 let rpc = self.client.clone();
2815 let message = proto::GetCompletions {
2816 project_id,
2817 buffer_id,
2818 position: Some(language::proto::serialize_anchor(&anchor)),
2819 version: serialize_version(&source_buffer.version()),
2820 };
2821 cx.spawn_weak(|_, mut cx| async move {
2822 let response = rpc.request(message).await?;
2823
2824 source_buffer_handle
2825 .update(&mut cx, |buffer, _| {
2826 buffer.wait_for_version(deserialize_version(response.version))
2827 })
2828 .await;
2829
2830 response
2831 .completions
2832 .into_iter()
2833 .map(|completion| {
2834 language::proto::deserialize_completion(completion, language.as_ref())
2835 })
2836 .collect()
2837 })
2838 } else {
2839 Task::ready(Ok(Default::default()))
2840 }
2841 }
2842
2843 pub fn apply_additional_edits_for_completion(
2844 &self,
2845 buffer_handle: ModelHandle<Buffer>,
2846 completion: Completion,
2847 push_to_history: bool,
2848 cx: &mut ModelContext<Self>,
2849 ) -> Task<Result<Option<Transaction>>> {
2850 let buffer = buffer_handle.read(cx);
2851 let buffer_id = buffer.remote_id();
2852
2853 if self.is_local() {
2854 let (_, lang_server) = if let Some(server) = self.language_server_for_buffer(buffer, cx)
2855 {
2856 server.clone()
2857 } else {
2858 return Task::ready(Ok(Default::default()));
2859 };
2860
2861 cx.spawn(|this, mut cx| async move {
2862 let resolved_completion = lang_server
2863 .request::<lsp::request::ResolveCompletionItem>(completion.lsp_completion)
2864 .await?;
2865 if let Some(edits) = resolved_completion.additional_text_edits {
2866 let edits = this
2867 .update(&mut cx, |this, cx| {
2868 this.edits_from_lsp(&buffer_handle, edits, None, cx)
2869 })
2870 .await?;
2871 buffer_handle.update(&mut cx, |buffer, cx| {
2872 buffer.finalize_last_transaction();
2873 buffer.start_transaction();
2874 for (range, text) in edits {
2875 buffer.edit([(range, text)], cx);
2876 }
2877 let transaction = if buffer.end_transaction(cx).is_some() {
2878 let transaction = buffer.finalize_last_transaction().unwrap().clone();
2879 if !push_to_history {
2880 buffer.forget_transaction(transaction.id);
2881 }
2882 Some(transaction)
2883 } else {
2884 None
2885 };
2886 Ok(transaction)
2887 })
2888 } else {
2889 Ok(None)
2890 }
2891 })
2892 } else if let Some(project_id) = self.remote_id() {
2893 let client = self.client.clone();
2894 cx.spawn(|_, mut cx| async move {
2895 let response = client
2896 .request(proto::ApplyCompletionAdditionalEdits {
2897 project_id,
2898 buffer_id,
2899 completion: Some(language::proto::serialize_completion(&completion)),
2900 })
2901 .await?;
2902
2903 if let Some(transaction) = response.transaction {
2904 let transaction = language::proto::deserialize_transaction(transaction)?;
2905 buffer_handle
2906 .update(&mut cx, |buffer, _| {
2907 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
2908 })
2909 .await;
2910 if push_to_history {
2911 buffer_handle.update(&mut cx, |buffer, _| {
2912 buffer.push_transaction(transaction.clone(), Instant::now());
2913 });
2914 }
2915 Ok(Some(transaction))
2916 } else {
2917 Ok(None)
2918 }
2919 })
2920 } else {
2921 Task::ready(Err(anyhow!("project does not have a remote id")))
2922 }
2923 }
2924
2925 pub fn code_actions<T: Clone + ToOffset>(
2926 &self,
2927 buffer_handle: &ModelHandle<Buffer>,
2928 range: Range<T>,
2929 cx: &mut ModelContext<Self>,
2930 ) -> Task<Result<Vec<CodeAction>>> {
2931 let buffer_handle = buffer_handle.clone();
2932 let buffer = buffer_handle.read(cx);
2933 let snapshot = buffer.snapshot();
2934 let relevant_diagnostics = snapshot
2935 .diagnostics_in_range::<usize, usize>(range.to_offset(&snapshot), false)
2936 .map(|entry| entry.to_lsp_diagnostic_stub())
2937 .collect();
2938 let buffer_id = buffer.remote_id();
2939 let worktree;
2940 let buffer_abs_path;
2941 if let Some(file) = File::from_dyn(buffer.file()) {
2942 worktree = file.worktree.clone();
2943 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
2944 } else {
2945 return Task::ready(Ok(Default::default()));
2946 };
2947 let range = buffer.anchor_before(range.start)..buffer.anchor_before(range.end);
2948
2949 if worktree.read(cx).as_local().is_some() {
2950 let buffer_abs_path = buffer_abs_path.unwrap();
2951 let (_, lang_server) = if let Some(server) = self.language_server_for_buffer(buffer, cx)
2952 {
2953 server.clone()
2954 } else {
2955 return Task::ready(Ok(Default::default()));
2956 };
2957
2958 let lsp_range = range_to_lsp(range.to_point_utf16(buffer));
2959 cx.foreground().spawn(async move {
2960 if !lang_server.capabilities().code_action_provider.is_some() {
2961 return Ok(Default::default());
2962 }
2963
2964 Ok(lang_server
2965 .request::<lsp::request::CodeActionRequest>(lsp::CodeActionParams {
2966 text_document: lsp::TextDocumentIdentifier::new(
2967 lsp::Url::from_file_path(buffer_abs_path).unwrap(),
2968 ),
2969 range: lsp_range,
2970 work_done_progress_params: Default::default(),
2971 partial_result_params: Default::default(),
2972 context: lsp::CodeActionContext {
2973 diagnostics: relevant_diagnostics,
2974 only: Some(vec![
2975 lsp::CodeActionKind::QUICKFIX,
2976 lsp::CodeActionKind::REFACTOR,
2977 lsp::CodeActionKind::REFACTOR_EXTRACT,
2978 lsp::CodeActionKind::SOURCE,
2979 ]),
2980 },
2981 })
2982 .await?
2983 .unwrap_or_default()
2984 .into_iter()
2985 .filter_map(|entry| {
2986 if let lsp::CodeActionOrCommand::CodeAction(lsp_action) = entry {
2987 Some(CodeAction {
2988 range: range.clone(),
2989 lsp_action,
2990 })
2991 } else {
2992 None
2993 }
2994 })
2995 .collect())
2996 })
2997 } else if let Some(project_id) = self.remote_id() {
2998 let rpc = self.client.clone();
2999 let version = buffer.version();
3000 cx.spawn_weak(|_, mut cx| async move {
3001 let response = rpc
3002 .request(proto::GetCodeActions {
3003 project_id,
3004 buffer_id,
3005 start: Some(language::proto::serialize_anchor(&range.start)),
3006 end: Some(language::proto::serialize_anchor(&range.end)),
3007 version: serialize_version(&version),
3008 })
3009 .await?;
3010
3011 buffer_handle
3012 .update(&mut cx, |buffer, _| {
3013 buffer.wait_for_version(deserialize_version(response.version))
3014 })
3015 .await;
3016
3017 response
3018 .actions
3019 .into_iter()
3020 .map(language::proto::deserialize_code_action)
3021 .collect()
3022 })
3023 } else {
3024 Task::ready(Ok(Default::default()))
3025 }
3026 }
3027
3028 pub fn apply_code_action(
3029 &self,
3030 buffer_handle: ModelHandle<Buffer>,
3031 mut action: CodeAction,
3032 push_to_history: bool,
3033 cx: &mut ModelContext<Self>,
3034 ) -> Task<Result<ProjectTransaction>> {
3035 if self.is_local() {
3036 let buffer = buffer_handle.read(cx);
3037 let (lsp_adapter, lang_server) =
3038 if let Some(server) = self.language_server_for_buffer(buffer, cx) {
3039 server.clone()
3040 } else {
3041 return Task::ready(Ok(Default::default()));
3042 };
3043 let range = action.range.to_point_utf16(buffer);
3044
3045 cx.spawn(|this, mut cx| async move {
3046 if let Some(lsp_range) = action
3047 .lsp_action
3048 .data
3049 .as_mut()
3050 .and_then(|d| d.get_mut("codeActionParams"))
3051 .and_then(|d| d.get_mut("range"))
3052 {
3053 *lsp_range = serde_json::to_value(&range_to_lsp(range)).unwrap();
3054 action.lsp_action = lang_server
3055 .request::<lsp::request::CodeActionResolveRequest>(action.lsp_action)
3056 .await?;
3057 } else {
3058 let actions = this
3059 .update(&mut cx, |this, cx| {
3060 this.code_actions(&buffer_handle, action.range, cx)
3061 })
3062 .await?;
3063 action.lsp_action = actions
3064 .into_iter()
3065 .find(|a| a.lsp_action.title == action.lsp_action.title)
3066 .ok_or_else(|| anyhow!("code action is outdated"))?
3067 .lsp_action;
3068 }
3069
3070 if let Some(edit) = action.lsp_action.edit {
3071 Self::deserialize_workspace_edit(
3072 this,
3073 edit,
3074 push_to_history,
3075 lsp_adapter,
3076 lang_server,
3077 &mut cx,
3078 )
3079 .await
3080 } else if let Some(command) = action.lsp_action.command {
3081 this.update(&mut cx, |this, _| {
3082 this.last_workspace_edits_by_language_server
3083 .remove(&lang_server.server_id());
3084 });
3085 lang_server
3086 .request::<lsp::request::ExecuteCommand>(lsp::ExecuteCommandParams {
3087 command: command.command,
3088 arguments: command.arguments.unwrap_or_default(),
3089 ..Default::default()
3090 })
3091 .await?;
3092 Ok(this.update(&mut cx, |this, _| {
3093 this.last_workspace_edits_by_language_server
3094 .remove(&lang_server.server_id())
3095 .unwrap_or_default()
3096 }))
3097 } else {
3098 Ok(ProjectTransaction::default())
3099 }
3100 })
3101 } else if let Some(project_id) = self.remote_id() {
3102 let client = self.client.clone();
3103 let request = proto::ApplyCodeAction {
3104 project_id,
3105 buffer_id: buffer_handle.read(cx).remote_id(),
3106 action: Some(language::proto::serialize_code_action(&action)),
3107 };
3108 cx.spawn(|this, mut cx| async move {
3109 let response = client
3110 .request(request)
3111 .await?
3112 .transaction
3113 .ok_or_else(|| anyhow!("missing transaction"))?;
3114 this.update(&mut cx, |this, cx| {
3115 this.deserialize_project_transaction(response, push_to_history, cx)
3116 })
3117 .await
3118 })
3119 } else {
3120 Task::ready(Err(anyhow!("project does not have a remote id")))
3121 }
3122 }
3123
3124 async fn deserialize_workspace_edit(
3125 this: ModelHandle<Self>,
3126 edit: lsp::WorkspaceEdit,
3127 push_to_history: bool,
3128 lsp_adapter: Arc<dyn LspAdapter>,
3129 language_server: Arc<LanguageServer>,
3130 cx: &mut AsyncAppContext,
3131 ) -> Result<ProjectTransaction> {
3132 let fs = this.read_with(cx, |this, _| this.fs.clone());
3133 let mut operations = Vec::new();
3134 if let Some(document_changes) = edit.document_changes {
3135 match document_changes {
3136 lsp::DocumentChanges::Edits(edits) => {
3137 operations.extend(edits.into_iter().map(lsp::DocumentChangeOperation::Edit))
3138 }
3139 lsp::DocumentChanges::Operations(ops) => operations = ops,
3140 }
3141 } else if let Some(changes) = edit.changes {
3142 operations.extend(changes.into_iter().map(|(uri, edits)| {
3143 lsp::DocumentChangeOperation::Edit(lsp::TextDocumentEdit {
3144 text_document: lsp::OptionalVersionedTextDocumentIdentifier {
3145 uri,
3146 version: None,
3147 },
3148 edits: edits.into_iter().map(lsp::OneOf::Left).collect(),
3149 })
3150 }));
3151 }
3152
3153 let mut project_transaction = ProjectTransaction::default();
3154 for operation in operations {
3155 match operation {
3156 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Create(op)) => {
3157 let abs_path = op
3158 .uri
3159 .to_file_path()
3160 .map_err(|_| anyhow!("can't convert URI to path"))?;
3161
3162 if let Some(parent_path) = abs_path.parent() {
3163 fs.create_dir(parent_path).await?;
3164 }
3165 if abs_path.ends_with("/") {
3166 fs.create_dir(&abs_path).await?;
3167 } else {
3168 fs.create_file(&abs_path, op.options.map(Into::into).unwrap_or_default())
3169 .await?;
3170 }
3171 }
3172 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Rename(op)) => {
3173 let source_abs_path = op
3174 .old_uri
3175 .to_file_path()
3176 .map_err(|_| anyhow!("can't convert URI to path"))?;
3177 let target_abs_path = op
3178 .new_uri
3179 .to_file_path()
3180 .map_err(|_| anyhow!("can't convert URI to path"))?;
3181 fs.rename(
3182 &source_abs_path,
3183 &target_abs_path,
3184 op.options.map(Into::into).unwrap_or_default(),
3185 )
3186 .await?;
3187 }
3188 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Delete(op)) => {
3189 let abs_path = op
3190 .uri
3191 .to_file_path()
3192 .map_err(|_| anyhow!("can't convert URI to path"))?;
3193 let options = op.options.map(Into::into).unwrap_or_default();
3194 if abs_path.ends_with("/") {
3195 fs.remove_dir(&abs_path, options).await?;
3196 } else {
3197 fs.remove_file(&abs_path, options).await?;
3198 }
3199 }
3200 lsp::DocumentChangeOperation::Edit(op) => {
3201 let buffer_to_edit = this
3202 .update(cx, |this, cx| {
3203 this.open_local_buffer_via_lsp(
3204 op.text_document.uri,
3205 lsp_adapter.clone(),
3206 language_server.clone(),
3207 cx,
3208 )
3209 })
3210 .await?;
3211
3212 let edits = this
3213 .update(cx, |this, cx| {
3214 let edits = op.edits.into_iter().map(|edit| match edit {
3215 lsp::OneOf::Left(edit) => edit,
3216 lsp::OneOf::Right(edit) => edit.text_edit,
3217 });
3218 this.edits_from_lsp(
3219 &buffer_to_edit,
3220 edits,
3221 op.text_document.version,
3222 cx,
3223 )
3224 })
3225 .await?;
3226
3227 let transaction = buffer_to_edit.update(cx, |buffer, cx| {
3228 buffer.finalize_last_transaction();
3229 buffer.start_transaction();
3230 for (range, text) in edits {
3231 buffer.edit([(range, text)], cx);
3232 }
3233 let transaction = if buffer.end_transaction(cx).is_some() {
3234 let transaction = buffer.finalize_last_transaction().unwrap().clone();
3235 if !push_to_history {
3236 buffer.forget_transaction(transaction.id);
3237 }
3238 Some(transaction)
3239 } else {
3240 None
3241 };
3242
3243 transaction
3244 });
3245 if let Some(transaction) = transaction {
3246 project_transaction.0.insert(buffer_to_edit, transaction);
3247 }
3248 }
3249 }
3250 }
3251
3252 Ok(project_transaction)
3253 }
3254
3255 pub fn prepare_rename<T: ToPointUtf16>(
3256 &self,
3257 buffer: ModelHandle<Buffer>,
3258 position: T,
3259 cx: &mut ModelContext<Self>,
3260 ) -> Task<Result<Option<Range<Anchor>>>> {
3261 let position = position.to_point_utf16(buffer.read(cx));
3262 self.request_lsp(buffer, PrepareRename { position }, cx)
3263 }
3264
3265 pub fn perform_rename<T: ToPointUtf16>(
3266 &self,
3267 buffer: ModelHandle<Buffer>,
3268 position: T,
3269 new_name: String,
3270 push_to_history: bool,
3271 cx: &mut ModelContext<Self>,
3272 ) -> Task<Result<ProjectTransaction>> {
3273 let position = position.to_point_utf16(buffer.read(cx));
3274 self.request_lsp(
3275 buffer,
3276 PerformRename {
3277 position,
3278 new_name,
3279 push_to_history,
3280 },
3281 cx,
3282 )
3283 }
3284
3285 pub fn search(
3286 &self,
3287 query: SearchQuery,
3288 cx: &mut ModelContext<Self>,
3289 ) -> Task<Result<HashMap<ModelHandle<Buffer>, Vec<Range<Anchor>>>>> {
3290 if self.is_local() {
3291 let snapshots = self
3292 .visible_worktrees(cx)
3293 .filter_map(|tree| {
3294 let tree = tree.read(cx).as_local()?;
3295 Some(tree.snapshot())
3296 })
3297 .collect::<Vec<_>>();
3298
3299 let background = cx.background().clone();
3300 let path_count: usize = snapshots.iter().map(|s| s.visible_file_count()).sum();
3301 if path_count == 0 {
3302 return Task::ready(Ok(Default::default()));
3303 }
3304 let workers = background.num_cpus().min(path_count);
3305 let (matching_paths_tx, mut matching_paths_rx) = smol::channel::bounded(1024);
3306 cx.background()
3307 .spawn({
3308 let fs = self.fs.clone();
3309 let background = cx.background().clone();
3310 let query = query.clone();
3311 async move {
3312 let fs = &fs;
3313 let query = &query;
3314 let matching_paths_tx = &matching_paths_tx;
3315 let paths_per_worker = (path_count + workers - 1) / workers;
3316 let snapshots = &snapshots;
3317 background
3318 .scoped(|scope| {
3319 for worker_ix in 0..workers {
3320 let worker_start_ix = worker_ix * paths_per_worker;
3321 let worker_end_ix = worker_start_ix + paths_per_worker;
3322 scope.spawn(async move {
3323 let mut snapshot_start_ix = 0;
3324 let mut abs_path = PathBuf::new();
3325 for snapshot in snapshots {
3326 let snapshot_end_ix =
3327 snapshot_start_ix + snapshot.visible_file_count();
3328 if worker_end_ix <= snapshot_start_ix {
3329 break;
3330 } else if worker_start_ix > snapshot_end_ix {
3331 snapshot_start_ix = snapshot_end_ix;
3332 continue;
3333 } else {
3334 let start_in_snapshot = worker_start_ix
3335 .saturating_sub(snapshot_start_ix);
3336 let end_in_snapshot =
3337 cmp::min(worker_end_ix, snapshot_end_ix)
3338 - snapshot_start_ix;
3339
3340 for entry in snapshot
3341 .files(false, start_in_snapshot)
3342 .take(end_in_snapshot - start_in_snapshot)
3343 {
3344 if matching_paths_tx.is_closed() {
3345 break;
3346 }
3347
3348 abs_path.clear();
3349 abs_path.push(&snapshot.abs_path());
3350 abs_path.push(&entry.path);
3351 let matches = if let Some(file) =
3352 fs.open_sync(&abs_path).await.log_err()
3353 {
3354 query.detect(file).unwrap_or(false)
3355 } else {
3356 false
3357 };
3358
3359 if matches {
3360 let project_path =
3361 (snapshot.id(), entry.path.clone());
3362 if matching_paths_tx
3363 .send(project_path)
3364 .await
3365 .is_err()
3366 {
3367 break;
3368 }
3369 }
3370 }
3371
3372 snapshot_start_ix = snapshot_end_ix;
3373 }
3374 }
3375 });
3376 }
3377 })
3378 .await;
3379 }
3380 })
3381 .detach();
3382
3383 let (buffers_tx, buffers_rx) = smol::channel::bounded(1024);
3384 let open_buffers = self
3385 .opened_buffers
3386 .values()
3387 .filter_map(|b| b.upgrade(cx))
3388 .collect::<HashSet<_>>();
3389 cx.spawn(|this, cx| async move {
3390 for buffer in &open_buffers {
3391 let snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
3392 buffers_tx.send((buffer.clone(), snapshot)).await?;
3393 }
3394
3395 let open_buffers = Rc::new(RefCell::new(open_buffers));
3396 while let Some(project_path) = matching_paths_rx.next().await {
3397 if buffers_tx.is_closed() {
3398 break;
3399 }
3400
3401 let this = this.clone();
3402 let open_buffers = open_buffers.clone();
3403 let buffers_tx = buffers_tx.clone();
3404 cx.spawn(|mut cx| async move {
3405 if let Some(buffer) = this
3406 .update(&mut cx, |this, cx| this.open_buffer(project_path, cx))
3407 .await
3408 .log_err()
3409 {
3410 if open_buffers.borrow_mut().insert(buffer.clone()) {
3411 let snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
3412 buffers_tx.send((buffer, snapshot)).await?;
3413 }
3414 }
3415
3416 Ok::<_, anyhow::Error>(())
3417 })
3418 .detach();
3419 }
3420
3421 Ok::<_, anyhow::Error>(())
3422 })
3423 .detach_and_log_err(cx);
3424
3425 let background = cx.background().clone();
3426 cx.background().spawn(async move {
3427 let query = &query;
3428 let mut matched_buffers = Vec::new();
3429 for _ in 0..workers {
3430 matched_buffers.push(HashMap::default());
3431 }
3432 background
3433 .scoped(|scope| {
3434 for worker_matched_buffers in matched_buffers.iter_mut() {
3435 let mut buffers_rx = buffers_rx.clone();
3436 scope.spawn(async move {
3437 while let Some((buffer, snapshot)) = buffers_rx.next().await {
3438 let buffer_matches = query
3439 .search(snapshot.as_rope())
3440 .await
3441 .iter()
3442 .map(|range| {
3443 snapshot.anchor_before(range.start)
3444 ..snapshot.anchor_after(range.end)
3445 })
3446 .collect::<Vec<_>>();
3447 if !buffer_matches.is_empty() {
3448 worker_matched_buffers
3449 .insert(buffer.clone(), buffer_matches);
3450 }
3451 }
3452 });
3453 }
3454 })
3455 .await;
3456 Ok(matched_buffers.into_iter().flatten().collect())
3457 })
3458 } else if let Some(project_id) = self.remote_id() {
3459 let request = self.client.request(query.to_proto(project_id));
3460 cx.spawn(|this, mut cx| async move {
3461 let response = request.await?;
3462 let mut result = HashMap::default();
3463 for location in response.locations {
3464 let buffer = location.buffer.ok_or_else(|| anyhow!("missing buffer"))?;
3465 let target_buffer = this
3466 .update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
3467 .await?;
3468 let start = location
3469 .start
3470 .and_then(deserialize_anchor)
3471 .ok_or_else(|| anyhow!("missing target start"))?;
3472 let end = location
3473 .end
3474 .and_then(deserialize_anchor)
3475 .ok_or_else(|| anyhow!("missing target end"))?;
3476 result
3477 .entry(target_buffer)
3478 .or_insert(Vec::new())
3479 .push(start..end)
3480 }
3481 Ok(result)
3482 })
3483 } else {
3484 Task::ready(Ok(Default::default()))
3485 }
3486 }
3487
3488 fn request_lsp<R: LspCommand>(
3489 &self,
3490 buffer_handle: ModelHandle<Buffer>,
3491 request: R,
3492 cx: &mut ModelContext<Self>,
3493 ) -> Task<Result<R::Response>>
3494 where
3495 <R::LspRequest as lsp::request::Request>::Result: Send,
3496 {
3497 let buffer = buffer_handle.read(cx);
3498 if self.is_local() {
3499 let file = File::from_dyn(buffer.file()).and_then(File::as_local);
3500 if let Some((file, (_, language_server))) =
3501 file.zip(self.language_server_for_buffer(buffer, cx).cloned())
3502 {
3503 let lsp_params = request.to_lsp(&file.abs_path(cx), cx);
3504 return cx.spawn(|this, cx| async move {
3505 if !request.check_capabilities(&language_server.capabilities()) {
3506 return Ok(Default::default());
3507 }
3508
3509 let response = language_server
3510 .request::<R::LspRequest>(lsp_params)
3511 .await
3512 .context("lsp request failed")?;
3513 request
3514 .response_from_lsp(response, this, buffer_handle, cx)
3515 .await
3516 });
3517 }
3518 } else if let Some(project_id) = self.remote_id() {
3519 let rpc = self.client.clone();
3520 let message = request.to_proto(project_id, buffer);
3521 return cx.spawn(|this, cx| async move {
3522 let response = rpc.request(message).await?;
3523 request
3524 .response_from_proto(response, this, buffer_handle, cx)
3525 .await
3526 });
3527 }
3528 Task::ready(Ok(Default::default()))
3529 }
3530
3531 pub fn find_or_create_local_worktree(
3532 &mut self,
3533 abs_path: impl AsRef<Path>,
3534 visible: bool,
3535 cx: &mut ModelContext<Self>,
3536 ) -> Task<Result<(ModelHandle<Worktree>, PathBuf)>> {
3537 let abs_path = abs_path.as_ref();
3538 if let Some((tree, relative_path)) = self.find_local_worktree(abs_path, cx) {
3539 Task::ready(Ok((tree.clone(), relative_path.into())))
3540 } else {
3541 let worktree = self.create_local_worktree(abs_path, visible, cx);
3542 cx.foreground()
3543 .spawn(async move { Ok((worktree.await?, PathBuf::new())) })
3544 }
3545 }
3546
3547 pub fn find_local_worktree(
3548 &self,
3549 abs_path: &Path,
3550 cx: &AppContext,
3551 ) -> Option<(ModelHandle<Worktree>, PathBuf)> {
3552 for tree in self.worktrees(cx) {
3553 if let Some(relative_path) = tree
3554 .read(cx)
3555 .as_local()
3556 .and_then(|t| abs_path.strip_prefix(t.abs_path()).ok())
3557 {
3558 return Some((tree.clone(), relative_path.into()));
3559 }
3560 }
3561 None
3562 }
3563
3564 pub fn is_shared(&self) -> bool {
3565 match &self.client_state {
3566 ProjectClientState::Local { is_shared, .. } => *is_shared,
3567 ProjectClientState::Remote { .. } => false,
3568 }
3569 }
3570
3571 fn create_local_worktree(
3572 &mut self,
3573 abs_path: impl AsRef<Path>,
3574 visible: bool,
3575 cx: &mut ModelContext<Self>,
3576 ) -> Task<Result<ModelHandle<Worktree>>> {
3577 let fs = self.fs.clone();
3578 let client = self.client.clone();
3579 let next_entry_id = self.next_entry_id.clone();
3580 let path: Arc<Path> = abs_path.as_ref().into();
3581 let task = self
3582 .loading_local_worktrees
3583 .entry(path.clone())
3584 .or_insert_with(|| {
3585 cx.spawn(|project, mut cx| {
3586 async move {
3587 let worktree = Worktree::local(
3588 client.clone(),
3589 path.clone(),
3590 visible,
3591 fs,
3592 next_entry_id,
3593 &mut cx,
3594 )
3595 .await;
3596 project.update(&mut cx, |project, _| {
3597 project.loading_local_worktrees.remove(&path);
3598 });
3599 let worktree = worktree?;
3600
3601 let remote_project_id = project.update(&mut cx, |project, cx| {
3602 project.add_worktree(&worktree, cx);
3603 project.remote_id()
3604 });
3605
3606 if let Some(project_id) = remote_project_id {
3607 // Because sharing is async, we may have *unshared* the project by the time it completes,
3608 // in which case we need to register the worktree instead.
3609 loop {
3610 if project.read_with(&cx, |project, _| project.is_shared()) {
3611 if worktree
3612 .update(&mut cx, |worktree, cx| {
3613 worktree.as_local_mut().unwrap().share(project_id, cx)
3614 })
3615 .await
3616 .is_ok()
3617 {
3618 break;
3619 }
3620 } else {
3621 worktree
3622 .update(&mut cx, |worktree, cx| {
3623 worktree
3624 .as_local_mut()
3625 .unwrap()
3626 .register(project_id, cx)
3627 })
3628 .await?;
3629 break;
3630 }
3631 }
3632 }
3633
3634 Ok(worktree)
3635 }
3636 .map_err(|err| Arc::new(err))
3637 })
3638 .shared()
3639 })
3640 .clone();
3641 cx.foreground().spawn(async move {
3642 match task.await {
3643 Ok(worktree) => Ok(worktree),
3644 Err(err) => Err(anyhow!("{}", err)),
3645 }
3646 })
3647 }
3648
3649 pub fn remove_worktree(&mut self, id: WorktreeId, cx: &mut ModelContext<Self>) {
3650 self.worktrees.retain(|worktree| {
3651 worktree
3652 .upgrade(cx)
3653 .map_or(false, |w| w.read(cx).id() != id)
3654 });
3655 cx.notify();
3656 }
3657
3658 fn add_worktree(&mut self, worktree: &ModelHandle<Worktree>, cx: &mut ModelContext<Self>) {
3659 cx.observe(&worktree, |_, _, cx| cx.notify()).detach();
3660 if worktree.read(cx).is_local() {
3661 cx.subscribe(&worktree, |this, worktree, _, cx| {
3662 this.update_local_worktree_buffers(worktree, cx);
3663 })
3664 .detach();
3665 }
3666
3667 let push_strong_handle = {
3668 let worktree = worktree.read(cx);
3669 self.is_shared() || worktree.is_visible() || worktree.is_remote()
3670 };
3671 if push_strong_handle {
3672 self.worktrees
3673 .push(WorktreeHandle::Strong(worktree.clone()));
3674 } else {
3675 cx.observe_release(&worktree, |this, _, cx| {
3676 this.worktrees
3677 .retain(|worktree| worktree.upgrade(cx).is_some());
3678 cx.notify();
3679 })
3680 .detach();
3681 self.worktrees
3682 .push(WorktreeHandle::Weak(worktree.downgrade()));
3683 }
3684 cx.notify();
3685 }
3686
3687 fn update_local_worktree_buffers(
3688 &mut self,
3689 worktree_handle: ModelHandle<Worktree>,
3690 cx: &mut ModelContext<Self>,
3691 ) {
3692 let snapshot = worktree_handle.read(cx).snapshot();
3693 let mut buffers_to_delete = Vec::new();
3694 let mut renamed_buffers = Vec::new();
3695 for (buffer_id, buffer) in &self.opened_buffers {
3696 if let Some(buffer) = buffer.upgrade(cx) {
3697 buffer.update(cx, |buffer, cx| {
3698 if let Some(old_file) = File::from_dyn(buffer.file()) {
3699 if old_file.worktree != worktree_handle {
3700 return;
3701 }
3702
3703 let new_file = if let Some(entry) = old_file
3704 .entry_id
3705 .and_then(|entry_id| snapshot.entry_for_id(entry_id))
3706 {
3707 File {
3708 is_local: true,
3709 entry_id: Some(entry.id),
3710 mtime: entry.mtime,
3711 path: entry.path.clone(),
3712 worktree: worktree_handle.clone(),
3713 }
3714 } else if let Some(entry) =
3715 snapshot.entry_for_path(old_file.path().as_ref())
3716 {
3717 File {
3718 is_local: true,
3719 entry_id: Some(entry.id),
3720 mtime: entry.mtime,
3721 path: entry.path.clone(),
3722 worktree: worktree_handle.clone(),
3723 }
3724 } else {
3725 File {
3726 is_local: true,
3727 entry_id: None,
3728 path: old_file.path().clone(),
3729 mtime: old_file.mtime(),
3730 worktree: worktree_handle.clone(),
3731 }
3732 };
3733
3734 let old_path = old_file.abs_path(cx);
3735 if new_file.abs_path(cx) != old_path {
3736 renamed_buffers.push((cx.handle(), old_path));
3737 }
3738
3739 if let Some(project_id) = self.remote_id() {
3740 self.client
3741 .send(proto::UpdateBufferFile {
3742 project_id,
3743 buffer_id: *buffer_id as u64,
3744 file: Some(new_file.to_proto()),
3745 })
3746 .log_err();
3747 }
3748 buffer.file_updated(Box::new(new_file), cx).detach();
3749 }
3750 });
3751 } else {
3752 buffers_to_delete.push(*buffer_id);
3753 }
3754 }
3755
3756 for buffer_id in buffers_to_delete {
3757 self.opened_buffers.remove(&buffer_id);
3758 }
3759
3760 for (buffer, old_path) in renamed_buffers {
3761 self.unregister_buffer_from_language_server(&buffer, old_path, cx);
3762 self.assign_language_to_buffer(&buffer, cx);
3763 self.register_buffer_with_language_server(&buffer, cx);
3764 }
3765 }
3766
3767 pub fn set_active_path(&mut self, entry: Option<ProjectPath>, cx: &mut ModelContext<Self>) {
3768 let new_active_entry = entry.and_then(|project_path| {
3769 let worktree = self.worktree_for_id(project_path.worktree_id, cx)?;
3770 let entry = worktree.read(cx).entry_for_path(project_path.path)?;
3771 Some(entry.id)
3772 });
3773 if new_active_entry != self.active_entry {
3774 self.active_entry = new_active_entry;
3775 cx.emit(Event::ActiveEntryChanged(new_active_entry));
3776 }
3777 }
3778
3779 pub fn is_running_disk_based_diagnostics(&self) -> bool {
3780 self.language_server_statuses
3781 .values()
3782 .any(|status| status.pending_diagnostic_updates > 0)
3783 }
3784
3785 pub fn diagnostic_summary(&self, cx: &AppContext) -> DiagnosticSummary {
3786 let mut summary = DiagnosticSummary::default();
3787 for (_, path_summary) in self.diagnostic_summaries(cx) {
3788 summary.error_count += path_summary.error_count;
3789 summary.warning_count += path_summary.warning_count;
3790 }
3791 summary
3792 }
3793
3794 pub fn diagnostic_summaries<'a>(
3795 &'a self,
3796 cx: &'a AppContext,
3797 ) -> impl Iterator<Item = (ProjectPath, DiagnosticSummary)> + 'a {
3798 self.worktrees(cx).flat_map(move |worktree| {
3799 let worktree = worktree.read(cx);
3800 let worktree_id = worktree.id();
3801 worktree
3802 .diagnostic_summaries()
3803 .map(move |(path, summary)| (ProjectPath { worktree_id, path }, summary))
3804 })
3805 }
3806
3807 pub fn disk_based_diagnostics_started(&mut self, cx: &mut ModelContext<Self>) {
3808 if self
3809 .language_server_statuses
3810 .values()
3811 .map(|status| status.pending_diagnostic_updates)
3812 .sum::<isize>()
3813 == 1
3814 {
3815 cx.emit(Event::DiskBasedDiagnosticsStarted);
3816 }
3817 }
3818
3819 pub fn disk_based_diagnostics_finished(&mut self, cx: &mut ModelContext<Self>) {
3820 cx.emit(Event::DiskBasedDiagnosticsUpdated);
3821 if self
3822 .language_server_statuses
3823 .values()
3824 .map(|status| status.pending_diagnostic_updates)
3825 .sum::<isize>()
3826 == 0
3827 {
3828 cx.emit(Event::DiskBasedDiagnosticsFinished);
3829 }
3830 }
3831
3832 pub fn active_entry(&self) -> Option<ProjectEntryId> {
3833 self.active_entry
3834 }
3835
3836 pub fn entry_for_path(&self, path: &ProjectPath, cx: &AppContext) -> Option<ProjectEntryId> {
3837 self.worktree_for_id(path.worktree_id, cx)?
3838 .read(cx)
3839 .entry_for_path(&path.path)
3840 .map(|entry| entry.id)
3841 }
3842
3843 pub fn path_for_entry(&self, entry_id: ProjectEntryId, cx: &AppContext) -> Option<ProjectPath> {
3844 let worktree = self.worktree_for_entry(entry_id, cx)?;
3845 let worktree = worktree.read(cx);
3846 let worktree_id = worktree.id();
3847 let path = worktree.entry_for_id(entry_id)?.path.clone();
3848 Some(ProjectPath { worktree_id, path })
3849 }
3850
3851 // RPC message handlers
3852
3853 async fn handle_request_join_project(
3854 this: ModelHandle<Self>,
3855 message: TypedEnvelope<proto::RequestJoinProject>,
3856 _: Arc<Client>,
3857 mut cx: AsyncAppContext,
3858 ) -> Result<()> {
3859 let user_id = message.payload.requester_id;
3860 if this.read_with(&cx, |project, _| {
3861 project.collaborators.values().any(|c| c.user.id == user_id)
3862 }) {
3863 this.update(&mut cx, |this, cx| {
3864 this.respond_to_join_request(user_id, true, cx)
3865 });
3866 } else {
3867 let user_store = this.read_with(&cx, |this, _| this.user_store.clone());
3868 let user = user_store
3869 .update(&mut cx, |store, cx| store.fetch_user(user_id, cx))
3870 .await?;
3871 this.update(&mut cx, |_, cx| cx.emit(Event::ContactRequestedJoin(user)));
3872 }
3873 Ok(())
3874 }
3875
3876 async fn handle_unregister_project(
3877 this: ModelHandle<Self>,
3878 _: TypedEnvelope<proto::UnregisterProject>,
3879 _: Arc<Client>,
3880 mut cx: AsyncAppContext,
3881 ) -> Result<()> {
3882 this.update(&mut cx, |this, cx| this.removed_from_project(cx));
3883 Ok(())
3884 }
3885
3886 async fn handle_project_unshared(
3887 this: ModelHandle<Self>,
3888 _: TypedEnvelope<proto::ProjectUnshared>,
3889 _: Arc<Client>,
3890 mut cx: AsyncAppContext,
3891 ) -> Result<()> {
3892 this.update(&mut cx, |this, cx| this.unshared(cx));
3893 Ok(())
3894 }
3895
3896 async fn handle_add_collaborator(
3897 this: ModelHandle<Self>,
3898 mut envelope: TypedEnvelope<proto::AddProjectCollaborator>,
3899 _: Arc<Client>,
3900 mut cx: AsyncAppContext,
3901 ) -> Result<()> {
3902 let user_store = this.read_with(&cx, |this, _| this.user_store.clone());
3903 let collaborator = envelope
3904 .payload
3905 .collaborator
3906 .take()
3907 .ok_or_else(|| anyhow!("empty collaborator"))?;
3908
3909 let collaborator = Collaborator::from_proto(collaborator, &user_store, &mut cx).await?;
3910 this.update(&mut cx, |this, cx| {
3911 this.collaborators
3912 .insert(collaborator.peer_id, collaborator);
3913 cx.notify();
3914 });
3915
3916 Ok(())
3917 }
3918
3919 async fn handle_remove_collaborator(
3920 this: ModelHandle<Self>,
3921 envelope: TypedEnvelope<proto::RemoveProjectCollaborator>,
3922 _: Arc<Client>,
3923 mut cx: AsyncAppContext,
3924 ) -> Result<()> {
3925 this.update(&mut cx, |this, cx| {
3926 let peer_id = PeerId(envelope.payload.peer_id);
3927 let replica_id = this
3928 .collaborators
3929 .remove(&peer_id)
3930 .ok_or_else(|| anyhow!("unknown peer {:?}", peer_id))?
3931 .replica_id;
3932 for (_, buffer) in &this.opened_buffers {
3933 if let Some(buffer) = buffer.upgrade(cx) {
3934 buffer.update(cx, |buffer, cx| buffer.remove_peer(replica_id, cx));
3935 }
3936 }
3937
3938 cx.emit(Event::CollaboratorLeft(peer_id));
3939 cx.notify();
3940 Ok(())
3941 })
3942 }
3943
3944 async fn handle_join_project_request_cancelled(
3945 this: ModelHandle<Self>,
3946 envelope: TypedEnvelope<proto::JoinProjectRequestCancelled>,
3947 _: Arc<Client>,
3948 mut cx: AsyncAppContext,
3949 ) -> Result<()> {
3950 let user = this
3951 .update(&mut cx, |this, cx| {
3952 this.user_store.update(cx, |user_store, cx| {
3953 user_store.fetch_user(envelope.payload.requester_id, cx)
3954 })
3955 })
3956 .await?;
3957
3958 this.update(&mut cx, |_, cx| {
3959 cx.emit(Event::ContactCancelledJoinRequest(user));
3960 });
3961
3962 Ok(())
3963 }
3964
3965 async fn handle_register_worktree(
3966 this: ModelHandle<Self>,
3967 envelope: TypedEnvelope<proto::RegisterWorktree>,
3968 client: Arc<Client>,
3969 mut cx: AsyncAppContext,
3970 ) -> Result<()> {
3971 this.update(&mut cx, |this, cx| {
3972 let remote_id = this.remote_id().ok_or_else(|| anyhow!("invalid project"))?;
3973 let replica_id = this.replica_id();
3974 let worktree = proto::Worktree {
3975 id: envelope.payload.worktree_id,
3976 root_name: envelope.payload.root_name,
3977 entries: Default::default(),
3978 diagnostic_summaries: Default::default(),
3979 visible: envelope.payload.visible,
3980 scan_id: 0,
3981 };
3982 let (worktree, load_task) =
3983 Worktree::remote(remote_id, replica_id, worktree, client, cx);
3984 this.add_worktree(&worktree, cx);
3985 load_task.detach();
3986 Ok(())
3987 })
3988 }
3989
3990 async fn handle_unregister_worktree(
3991 this: ModelHandle<Self>,
3992 envelope: TypedEnvelope<proto::UnregisterWorktree>,
3993 _: Arc<Client>,
3994 mut cx: AsyncAppContext,
3995 ) -> Result<()> {
3996 this.update(&mut cx, |this, cx| {
3997 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
3998 this.remove_worktree(worktree_id, cx);
3999 Ok(())
4000 })
4001 }
4002
4003 async fn handle_update_worktree(
4004 this: ModelHandle<Self>,
4005 envelope: TypedEnvelope<proto::UpdateWorktree>,
4006 _: Arc<Client>,
4007 mut cx: AsyncAppContext,
4008 ) -> Result<()> {
4009 this.update(&mut cx, |this, cx| {
4010 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
4011 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
4012 worktree.update(cx, |worktree, _| {
4013 let worktree = worktree.as_remote_mut().unwrap();
4014 worktree.update_from_remote(envelope)
4015 })?;
4016 }
4017 Ok(())
4018 })
4019 }
4020
4021 async fn handle_create_project_entry(
4022 this: ModelHandle<Self>,
4023 envelope: TypedEnvelope<proto::CreateProjectEntry>,
4024 _: Arc<Client>,
4025 mut cx: AsyncAppContext,
4026 ) -> Result<proto::ProjectEntryResponse> {
4027 let worktree = this.update(&mut cx, |this, cx| {
4028 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
4029 this.worktree_for_id(worktree_id, cx)
4030 .ok_or_else(|| anyhow!("worktree not found"))
4031 })?;
4032 let worktree_scan_id = worktree.read_with(&cx, |worktree, _| worktree.scan_id());
4033 let entry = worktree
4034 .update(&mut cx, |worktree, cx| {
4035 let worktree = worktree.as_local_mut().unwrap();
4036 let path = PathBuf::from(OsString::from_vec(envelope.payload.path));
4037 worktree.create_entry(path, envelope.payload.is_directory, cx)
4038 })
4039 .await?;
4040 Ok(proto::ProjectEntryResponse {
4041 entry: Some((&entry).into()),
4042 worktree_scan_id: worktree_scan_id as u64,
4043 })
4044 }
4045
4046 async fn handle_rename_project_entry(
4047 this: ModelHandle<Self>,
4048 envelope: TypedEnvelope<proto::RenameProjectEntry>,
4049 _: Arc<Client>,
4050 mut cx: AsyncAppContext,
4051 ) -> Result<proto::ProjectEntryResponse> {
4052 let entry_id = ProjectEntryId::from_proto(envelope.payload.entry_id);
4053 let worktree = this.read_with(&cx, |this, cx| {
4054 this.worktree_for_entry(entry_id, cx)
4055 .ok_or_else(|| anyhow!("worktree not found"))
4056 })?;
4057 let worktree_scan_id = worktree.read_with(&cx, |worktree, _| worktree.scan_id());
4058 let entry = worktree
4059 .update(&mut cx, |worktree, cx| {
4060 let new_path = PathBuf::from(OsString::from_vec(envelope.payload.new_path));
4061 worktree
4062 .as_local_mut()
4063 .unwrap()
4064 .rename_entry(entry_id, new_path, cx)
4065 .ok_or_else(|| anyhow!("invalid entry"))
4066 })?
4067 .await?;
4068 Ok(proto::ProjectEntryResponse {
4069 entry: Some((&entry).into()),
4070 worktree_scan_id: worktree_scan_id as u64,
4071 })
4072 }
4073
4074 async fn handle_copy_project_entry(
4075 this: ModelHandle<Self>,
4076 envelope: TypedEnvelope<proto::CopyProjectEntry>,
4077 _: Arc<Client>,
4078 mut cx: AsyncAppContext,
4079 ) -> Result<proto::ProjectEntryResponse> {
4080 let entry_id = ProjectEntryId::from_proto(envelope.payload.entry_id);
4081 let worktree = this.read_with(&cx, |this, cx| {
4082 this.worktree_for_entry(entry_id, cx)
4083 .ok_or_else(|| anyhow!("worktree not found"))
4084 })?;
4085 let worktree_scan_id = worktree.read_with(&cx, |worktree, _| worktree.scan_id());
4086 let entry = worktree
4087 .update(&mut cx, |worktree, cx| {
4088 let new_path = PathBuf::from(OsString::from_vec(envelope.payload.new_path));
4089 worktree
4090 .as_local_mut()
4091 .unwrap()
4092 .copy_entry(entry_id, new_path, cx)
4093 .ok_or_else(|| anyhow!("invalid entry"))
4094 })?
4095 .await?;
4096 Ok(proto::ProjectEntryResponse {
4097 entry: Some((&entry).into()),
4098 worktree_scan_id: worktree_scan_id as u64,
4099 })
4100 }
4101
4102 async fn handle_delete_project_entry(
4103 this: ModelHandle<Self>,
4104 envelope: TypedEnvelope<proto::DeleteProjectEntry>,
4105 _: Arc<Client>,
4106 mut cx: AsyncAppContext,
4107 ) -> Result<proto::ProjectEntryResponse> {
4108 let entry_id = ProjectEntryId::from_proto(envelope.payload.entry_id);
4109 let worktree = this.read_with(&cx, |this, cx| {
4110 this.worktree_for_entry(entry_id, cx)
4111 .ok_or_else(|| anyhow!("worktree not found"))
4112 })?;
4113 let worktree_scan_id = worktree.read_with(&cx, |worktree, _| worktree.scan_id());
4114 worktree
4115 .update(&mut cx, |worktree, cx| {
4116 worktree
4117 .as_local_mut()
4118 .unwrap()
4119 .delete_entry(entry_id, cx)
4120 .ok_or_else(|| anyhow!("invalid entry"))
4121 })?
4122 .await?;
4123 Ok(proto::ProjectEntryResponse {
4124 entry: None,
4125 worktree_scan_id: worktree_scan_id as u64,
4126 })
4127 }
4128
4129 async fn handle_update_diagnostic_summary(
4130 this: ModelHandle<Self>,
4131 envelope: TypedEnvelope<proto::UpdateDiagnosticSummary>,
4132 _: Arc<Client>,
4133 mut cx: AsyncAppContext,
4134 ) -> Result<()> {
4135 this.update(&mut cx, |this, cx| {
4136 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
4137 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
4138 if let Some(summary) = envelope.payload.summary {
4139 let project_path = ProjectPath {
4140 worktree_id,
4141 path: Path::new(&summary.path).into(),
4142 };
4143 worktree.update(cx, |worktree, _| {
4144 worktree
4145 .as_remote_mut()
4146 .unwrap()
4147 .update_diagnostic_summary(project_path.path.clone(), &summary);
4148 });
4149 cx.emit(Event::DiagnosticsUpdated(project_path));
4150 }
4151 }
4152 Ok(())
4153 })
4154 }
4155
4156 async fn handle_start_language_server(
4157 this: ModelHandle<Self>,
4158 envelope: TypedEnvelope<proto::StartLanguageServer>,
4159 _: Arc<Client>,
4160 mut cx: AsyncAppContext,
4161 ) -> Result<()> {
4162 let server = envelope
4163 .payload
4164 .server
4165 .ok_or_else(|| anyhow!("invalid server"))?;
4166 this.update(&mut cx, |this, cx| {
4167 this.language_server_statuses.insert(
4168 server.id as usize,
4169 LanguageServerStatus {
4170 name: server.name,
4171 pending_work: Default::default(),
4172 pending_diagnostic_updates: 0,
4173 },
4174 );
4175 cx.notify();
4176 });
4177 Ok(())
4178 }
4179
4180 async fn handle_update_language_server(
4181 this: ModelHandle<Self>,
4182 envelope: TypedEnvelope<proto::UpdateLanguageServer>,
4183 _: Arc<Client>,
4184 mut cx: AsyncAppContext,
4185 ) -> Result<()> {
4186 let language_server_id = envelope.payload.language_server_id as usize;
4187 match envelope
4188 .payload
4189 .variant
4190 .ok_or_else(|| anyhow!("invalid variant"))?
4191 {
4192 proto::update_language_server::Variant::WorkStart(payload) => {
4193 this.update(&mut cx, |this, cx| {
4194 this.on_lsp_work_start(language_server_id, payload.token, cx);
4195 })
4196 }
4197 proto::update_language_server::Variant::WorkProgress(payload) => {
4198 this.update(&mut cx, |this, cx| {
4199 this.on_lsp_work_progress(
4200 language_server_id,
4201 payload.token,
4202 LanguageServerProgress {
4203 message: payload.message,
4204 percentage: payload.percentage.map(|p| p as usize),
4205 last_update_at: Instant::now(),
4206 },
4207 cx,
4208 );
4209 })
4210 }
4211 proto::update_language_server::Variant::WorkEnd(payload) => {
4212 this.update(&mut cx, |this, cx| {
4213 this.on_lsp_work_end(language_server_id, payload.token, cx);
4214 })
4215 }
4216 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(_) => {
4217 this.update(&mut cx, |this, cx| {
4218 this.disk_based_diagnostics_started(cx);
4219 })
4220 }
4221 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(_) => {
4222 this.update(&mut cx, |this, cx| this.disk_based_diagnostics_finished(cx));
4223 }
4224 }
4225
4226 Ok(())
4227 }
4228
4229 async fn handle_update_buffer(
4230 this: ModelHandle<Self>,
4231 envelope: TypedEnvelope<proto::UpdateBuffer>,
4232 _: Arc<Client>,
4233 mut cx: AsyncAppContext,
4234 ) -> Result<()> {
4235 this.update(&mut cx, |this, cx| {
4236 let payload = envelope.payload.clone();
4237 let buffer_id = payload.buffer_id;
4238 let ops = payload
4239 .operations
4240 .into_iter()
4241 .map(|op| language::proto::deserialize_operation(op))
4242 .collect::<Result<Vec<_>, _>>()?;
4243 let is_remote = this.is_remote();
4244 match this.opened_buffers.entry(buffer_id) {
4245 hash_map::Entry::Occupied(mut e) => match e.get_mut() {
4246 OpenBuffer::Strong(buffer) => {
4247 buffer.update(cx, |buffer, cx| buffer.apply_ops(ops, cx))?;
4248 }
4249 OpenBuffer::Loading(operations) => operations.extend_from_slice(&ops),
4250 OpenBuffer::Weak(_) => {}
4251 },
4252 hash_map::Entry::Vacant(e) => {
4253 assert!(
4254 is_remote,
4255 "received buffer update from {:?}",
4256 envelope.original_sender_id
4257 );
4258 e.insert(OpenBuffer::Loading(ops));
4259 }
4260 }
4261 Ok(())
4262 })
4263 }
4264
4265 async fn handle_update_buffer_file(
4266 this: ModelHandle<Self>,
4267 envelope: TypedEnvelope<proto::UpdateBufferFile>,
4268 _: Arc<Client>,
4269 mut cx: AsyncAppContext,
4270 ) -> Result<()> {
4271 this.update(&mut cx, |this, cx| {
4272 let payload = envelope.payload.clone();
4273 let buffer_id = payload.buffer_id;
4274 let file = payload.file.ok_or_else(|| anyhow!("invalid file"))?;
4275 let worktree = this
4276 .worktree_for_id(WorktreeId::from_proto(file.worktree_id), cx)
4277 .ok_or_else(|| anyhow!("no such worktree"))?;
4278 let file = File::from_proto(file, worktree.clone(), cx)?;
4279 let buffer = this
4280 .opened_buffers
4281 .get_mut(&buffer_id)
4282 .and_then(|b| b.upgrade(cx))
4283 .ok_or_else(|| anyhow!("no such buffer"))?;
4284 buffer.update(cx, |buffer, cx| {
4285 buffer.file_updated(Box::new(file), cx).detach();
4286 });
4287 Ok(())
4288 })
4289 }
4290
4291 async fn handle_save_buffer(
4292 this: ModelHandle<Self>,
4293 envelope: TypedEnvelope<proto::SaveBuffer>,
4294 _: Arc<Client>,
4295 mut cx: AsyncAppContext,
4296 ) -> Result<proto::BufferSaved> {
4297 let buffer_id = envelope.payload.buffer_id;
4298 let requested_version = deserialize_version(envelope.payload.version);
4299
4300 let (project_id, buffer) = this.update(&mut cx, |this, cx| {
4301 let project_id = this.remote_id().ok_or_else(|| anyhow!("not connected"))?;
4302 let buffer = this
4303 .opened_buffers
4304 .get(&buffer_id)
4305 .and_then(|buffer| buffer.upgrade(cx))
4306 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?;
4307 Ok::<_, anyhow::Error>((project_id, buffer))
4308 })?;
4309 buffer
4310 .update(&mut cx, |buffer, _| {
4311 buffer.wait_for_version(requested_version)
4312 })
4313 .await;
4314
4315 let (saved_version, mtime) = buffer.update(&mut cx, |buffer, cx| buffer.save(cx)).await?;
4316 Ok(proto::BufferSaved {
4317 project_id,
4318 buffer_id,
4319 version: serialize_version(&saved_version),
4320 mtime: Some(mtime.into()),
4321 })
4322 }
4323
4324 async fn handle_reload_buffers(
4325 this: ModelHandle<Self>,
4326 envelope: TypedEnvelope<proto::ReloadBuffers>,
4327 _: Arc<Client>,
4328 mut cx: AsyncAppContext,
4329 ) -> Result<proto::ReloadBuffersResponse> {
4330 let sender_id = envelope.original_sender_id()?;
4331 let reload = this.update(&mut cx, |this, cx| {
4332 let mut buffers = HashSet::default();
4333 for buffer_id in &envelope.payload.buffer_ids {
4334 buffers.insert(
4335 this.opened_buffers
4336 .get(buffer_id)
4337 .and_then(|buffer| buffer.upgrade(cx))
4338 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?,
4339 );
4340 }
4341 Ok::<_, anyhow::Error>(this.reload_buffers(buffers, false, cx))
4342 })?;
4343
4344 let project_transaction = reload.await?;
4345 let project_transaction = this.update(&mut cx, |this, cx| {
4346 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
4347 });
4348 Ok(proto::ReloadBuffersResponse {
4349 transaction: Some(project_transaction),
4350 })
4351 }
4352
4353 async fn handle_format_buffers(
4354 this: ModelHandle<Self>,
4355 envelope: TypedEnvelope<proto::FormatBuffers>,
4356 _: Arc<Client>,
4357 mut cx: AsyncAppContext,
4358 ) -> Result<proto::FormatBuffersResponse> {
4359 let sender_id = envelope.original_sender_id()?;
4360 let format = this.update(&mut cx, |this, cx| {
4361 let mut buffers = HashSet::default();
4362 for buffer_id in &envelope.payload.buffer_ids {
4363 buffers.insert(
4364 this.opened_buffers
4365 .get(buffer_id)
4366 .and_then(|buffer| buffer.upgrade(cx))
4367 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?,
4368 );
4369 }
4370 Ok::<_, anyhow::Error>(this.format(buffers, false, cx))
4371 })?;
4372
4373 let project_transaction = format.await?;
4374 let project_transaction = this.update(&mut cx, |this, cx| {
4375 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
4376 });
4377 Ok(proto::FormatBuffersResponse {
4378 transaction: Some(project_transaction),
4379 })
4380 }
4381
4382 async fn handle_get_completions(
4383 this: ModelHandle<Self>,
4384 envelope: TypedEnvelope<proto::GetCompletions>,
4385 _: Arc<Client>,
4386 mut cx: AsyncAppContext,
4387 ) -> Result<proto::GetCompletionsResponse> {
4388 let position = envelope
4389 .payload
4390 .position
4391 .and_then(language::proto::deserialize_anchor)
4392 .ok_or_else(|| anyhow!("invalid position"))?;
4393 let version = deserialize_version(envelope.payload.version);
4394 let buffer = this.read_with(&cx, |this, cx| {
4395 this.opened_buffers
4396 .get(&envelope.payload.buffer_id)
4397 .and_then(|buffer| buffer.upgrade(cx))
4398 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
4399 })?;
4400 buffer
4401 .update(&mut cx, |buffer, _| buffer.wait_for_version(version))
4402 .await;
4403 let version = buffer.read_with(&cx, |buffer, _| buffer.version());
4404 let completions = this
4405 .update(&mut cx, |this, cx| this.completions(&buffer, position, cx))
4406 .await?;
4407
4408 Ok(proto::GetCompletionsResponse {
4409 completions: completions
4410 .iter()
4411 .map(language::proto::serialize_completion)
4412 .collect(),
4413 version: serialize_version(&version),
4414 })
4415 }
4416
4417 async fn handle_apply_additional_edits_for_completion(
4418 this: ModelHandle<Self>,
4419 envelope: TypedEnvelope<proto::ApplyCompletionAdditionalEdits>,
4420 _: Arc<Client>,
4421 mut cx: AsyncAppContext,
4422 ) -> Result<proto::ApplyCompletionAdditionalEditsResponse> {
4423 let apply_additional_edits = this.update(&mut cx, |this, cx| {
4424 let buffer = this
4425 .opened_buffers
4426 .get(&envelope.payload.buffer_id)
4427 .and_then(|buffer| buffer.upgrade(cx))
4428 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
4429 let language = buffer.read(cx).language();
4430 let completion = language::proto::deserialize_completion(
4431 envelope
4432 .payload
4433 .completion
4434 .ok_or_else(|| anyhow!("invalid completion"))?,
4435 language,
4436 )?;
4437 Ok::<_, anyhow::Error>(
4438 this.apply_additional_edits_for_completion(buffer, completion, false, cx),
4439 )
4440 })?;
4441
4442 Ok(proto::ApplyCompletionAdditionalEditsResponse {
4443 transaction: apply_additional_edits
4444 .await?
4445 .as_ref()
4446 .map(language::proto::serialize_transaction),
4447 })
4448 }
4449
4450 async fn handle_get_code_actions(
4451 this: ModelHandle<Self>,
4452 envelope: TypedEnvelope<proto::GetCodeActions>,
4453 _: Arc<Client>,
4454 mut cx: AsyncAppContext,
4455 ) -> Result<proto::GetCodeActionsResponse> {
4456 let start = envelope
4457 .payload
4458 .start
4459 .and_then(language::proto::deserialize_anchor)
4460 .ok_or_else(|| anyhow!("invalid start"))?;
4461 let end = envelope
4462 .payload
4463 .end
4464 .and_then(language::proto::deserialize_anchor)
4465 .ok_or_else(|| anyhow!("invalid end"))?;
4466 let buffer = this.update(&mut cx, |this, cx| {
4467 this.opened_buffers
4468 .get(&envelope.payload.buffer_id)
4469 .and_then(|buffer| buffer.upgrade(cx))
4470 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
4471 })?;
4472 buffer
4473 .update(&mut cx, |buffer, _| {
4474 buffer.wait_for_version(deserialize_version(envelope.payload.version))
4475 })
4476 .await;
4477
4478 let version = buffer.read_with(&cx, |buffer, _| buffer.version());
4479 let code_actions = this.update(&mut cx, |this, cx| {
4480 Ok::<_, anyhow::Error>(this.code_actions(&buffer, start..end, cx))
4481 })?;
4482
4483 Ok(proto::GetCodeActionsResponse {
4484 actions: code_actions
4485 .await?
4486 .iter()
4487 .map(language::proto::serialize_code_action)
4488 .collect(),
4489 version: serialize_version(&version),
4490 })
4491 }
4492
4493 async fn handle_apply_code_action(
4494 this: ModelHandle<Self>,
4495 envelope: TypedEnvelope<proto::ApplyCodeAction>,
4496 _: Arc<Client>,
4497 mut cx: AsyncAppContext,
4498 ) -> Result<proto::ApplyCodeActionResponse> {
4499 let sender_id = envelope.original_sender_id()?;
4500 let action = language::proto::deserialize_code_action(
4501 envelope
4502 .payload
4503 .action
4504 .ok_or_else(|| anyhow!("invalid action"))?,
4505 )?;
4506 let apply_code_action = this.update(&mut cx, |this, cx| {
4507 let buffer = this
4508 .opened_buffers
4509 .get(&envelope.payload.buffer_id)
4510 .and_then(|buffer| buffer.upgrade(cx))
4511 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
4512 Ok::<_, anyhow::Error>(this.apply_code_action(buffer, action, false, cx))
4513 })?;
4514
4515 let project_transaction = apply_code_action.await?;
4516 let project_transaction = this.update(&mut cx, |this, cx| {
4517 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
4518 });
4519 Ok(proto::ApplyCodeActionResponse {
4520 transaction: Some(project_transaction),
4521 })
4522 }
4523
4524 async fn handle_lsp_command<T: LspCommand>(
4525 this: ModelHandle<Self>,
4526 envelope: TypedEnvelope<T::ProtoRequest>,
4527 _: Arc<Client>,
4528 mut cx: AsyncAppContext,
4529 ) -> Result<<T::ProtoRequest as proto::RequestMessage>::Response>
4530 where
4531 <T::LspRequest as lsp::request::Request>::Result: Send,
4532 {
4533 let sender_id = envelope.original_sender_id()?;
4534 let buffer_id = T::buffer_id_from_proto(&envelope.payload);
4535 let buffer_handle = this.read_with(&cx, |this, _| {
4536 this.opened_buffers
4537 .get(&buffer_id)
4538 .and_then(|buffer| buffer.upgrade(&cx))
4539 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))
4540 })?;
4541 let request = T::from_proto(
4542 envelope.payload,
4543 this.clone(),
4544 buffer_handle.clone(),
4545 cx.clone(),
4546 )
4547 .await?;
4548 let buffer_version = buffer_handle.read_with(&cx, |buffer, _| buffer.version());
4549 let response = this
4550 .update(&mut cx, |this, cx| {
4551 this.request_lsp(buffer_handle, request, cx)
4552 })
4553 .await?;
4554 this.update(&mut cx, |this, cx| {
4555 Ok(T::response_to_proto(
4556 response,
4557 this,
4558 sender_id,
4559 &buffer_version,
4560 cx,
4561 ))
4562 })
4563 }
4564
4565 async fn handle_get_project_symbols(
4566 this: ModelHandle<Self>,
4567 envelope: TypedEnvelope<proto::GetProjectSymbols>,
4568 _: Arc<Client>,
4569 mut cx: AsyncAppContext,
4570 ) -> Result<proto::GetProjectSymbolsResponse> {
4571 let symbols = this
4572 .update(&mut cx, |this, cx| {
4573 this.symbols(&envelope.payload.query, cx)
4574 })
4575 .await?;
4576
4577 Ok(proto::GetProjectSymbolsResponse {
4578 symbols: symbols.iter().map(serialize_symbol).collect(),
4579 })
4580 }
4581
4582 async fn handle_search_project(
4583 this: ModelHandle<Self>,
4584 envelope: TypedEnvelope<proto::SearchProject>,
4585 _: Arc<Client>,
4586 mut cx: AsyncAppContext,
4587 ) -> Result<proto::SearchProjectResponse> {
4588 let peer_id = envelope.original_sender_id()?;
4589 let query = SearchQuery::from_proto(envelope.payload)?;
4590 let result = this
4591 .update(&mut cx, |this, cx| this.search(query, cx))
4592 .await?;
4593
4594 this.update(&mut cx, |this, cx| {
4595 let mut locations = Vec::new();
4596 for (buffer, ranges) in result {
4597 for range in ranges {
4598 let start = serialize_anchor(&range.start);
4599 let end = serialize_anchor(&range.end);
4600 let buffer = this.serialize_buffer_for_peer(&buffer, peer_id, cx);
4601 locations.push(proto::Location {
4602 buffer: Some(buffer),
4603 start: Some(start),
4604 end: Some(end),
4605 });
4606 }
4607 }
4608 Ok(proto::SearchProjectResponse { locations })
4609 })
4610 }
4611
4612 async fn handle_open_buffer_for_symbol(
4613 this: ModelHandle<Self>,
4614 envelope: TypedEnvelope<proto::OpenBufferForSymbol>,
4615 _: Arc<Client>,
4616 mut cx: AsyncAppContext,
4617 ) -> Result<proto::OpenBufferForSymbolResponse> {
4618 let peer_id = envelope.original_sender_id()?;
4619 let symbol = envelope
4620 .payload
4621 .symbol
4622 .ok_or_else(|| anyhow!("invalid symbol"))?;
4623 let symbol = this.read_with(&cx, |this, _| {
4624 let symbol = this.deserialize_symbol(symbol)?;
4625 let signature = this.symbol_signature(symbol.worktree_id, &symbol.path);
4626 if signature == symbol.signature {
4627 Ok(symbol)
4628 } else {
4629 Err(anyhow!("invalid symbol signature"))
4630 }
4631 })?;
4632 let buffer = this
4633 .update(&mut cx, |this, cx| this.open_buffer_for_symbol(&symbol, cx))
4634 .await?;
4635
4636 Ok(proto::OpenBufferForSymbolResponse {
4637 buffer: Some(this.update(&mut cx, |this, cx| {
4638 this.serialize_buffer_for_peer(&buffer, peer_id, cx)
4639 })),
4640 })
4641 }
4642
4643 fn symbol_signature(&self, worktree_id: WorktreeId, path: &Path) -> [u8; 32] {
4644 let mut hasher = Sha256::new();
4645 hasher.update(worktree_id.to_proto().to_be_bytes());
4646 hasher.update(path.to_string_lossy().as_bytes());
4647 hasher.update(self.nonce.to_be_bytes());
4648 hasher.finalize().as_slice().try_into().unwrap()
4649 }
4650
4651 async fn handle_open_buffer_by_id(
4652 this: ModelHandle<Self>,
4653 envelope: TypedEnvelope<proto::OpenBufferById>,
4654 _: Arc<Client>,
4655 mut cx: AsyncAppContext,
4656 ) -> Result<proto::OpenBufferResponse> {
4657 let peer_id = envelope.original_sender_id()?;
4658 let buffer = this
4659 .update(&mut cx, |this, cx| {
4660 this.open_buffer_by_id(envelope.payload.id, cx)
4661 })
4662 .await?;
4663 this.update(&mut cx, |this, cx| {
4664 Ok(proto::OpenBufferResponse {
4665 buffer: Some(this.serialize_buffer_for_peer(&buffer, peer_id, cx)),
4666 })
4667 })
4668 }
4669
4670 async fn handle_open_buffer_by_path(
4671 this: ModelHandle<Self>,
4672 envelope: TypedEnvelope<proto::OpenBufferByPath>,
4673 _: Arc<Client>,
4674 mut cx: AsyncAppContext,
4675 ) -> Result<proto::OpenBufferResponse> {
4676 let peer_id = envelope.original_sender_id()?;
4677 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
4678 let open_buffer = this.update(&mut cx, |this, cx| {
4679 this.open_buffer(
4680 ProjectPath {
4681 worktree_id,
4682 path: PathBuf::from(envelope.payload.path).into(),
4683 },
4684 cx,
4685 )
4686 });
4687
4688 let buffer = open_buffer.await?;
4689 this.update(&mut cx, |this, cx| {
4690 Ok(proto::OpenBufferResponse {
4691 buffer: Some(this.serialize_buffer_for_peer(&buffer, peer_id, cx)),
4692 })
4693 })
4694 }
4695
4696 fn serialize_project_transaction_for_peer(
4697 &mut self,
4698 project_transaction: ProjectTransaction,
4699 peer_id: PeerId,
4700 cx: &AppContext,
4701 ) -> proto::ProjectTransaction {
4702 let mut serialized_transaction = proto::ProjectTransaction {
4703 buffers: Default::default(),
4704 transactions: Default::default(),
4705 };
4706 for (buffer, transaction) in project_transaction.0 {
4707 serialized_transaction
4708 .buffers
4709 .push(self.serialize_buffer_for_peer(&buffer, peer_id, cx));
4710 serialized_transaction
4711 .transactions
4712 .push(language::proto::serialize_transaction(&transaction));
4713 }
4714 serialized_transaction
4715 }
4716
4717 fn deserialize_project_transaction(
4718 &mut self,
4719 message: proto::ProjectTransaction,
4720 push_to_history: bool,
4721 cx: &mut ModelContext<Self>,
4722 ) -> Task<Result<ProjectTransaction>> {
4723 cx.spawn(|this, mut cx| async move {
4724 let mut project_transaction = ProjectTransaction::default();
4725 for (buffer, transaction) in message.buffers.into_iter().zip(message.transactions) {
4726 let buffer = this
4727 .update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
4728 .await?;
4729 let transaction = language::proto::deserialize_transaction(transaction)?;
4730 project_transaction.0.insert(buffer, transaction);
4731 }
4732
4733 for (buffer, transaction) in &project_transaction.0 {
4734 buffer
4735 .update(&mut cx, |buffer, _| {
4736 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
4737 })
4738 .await;
4739
4740 if push_to_history {
4741 buffer.update(&mut cx, |buffer, _| {
4742 buffer.push_transaction(transaction.clone(), Instant::now());
4743 });
4744 }
4745 }
4746
4747 Ok(project_transaction)
4748 })
4749 }
4750
4751 fn serialize_buffer_for_peer(
4752 &mut self,
4753 buffer: &ModelHandle<Buffer>,
4754 peer_id: PeerId,
4755 cx: &AppContext,
4756 ) -> proto::Buffer {
4757 let buffer_id = buffer.read(cx).remote_id();
4758 let shared_buffers = self.shared_buffers.entry(peer_id).or_default();
4759 if shared_buffers.insert(buffer_id) {
4760 proto::Buffer {
4761 variant: Some(proto::buffer::Variant::State(buffer.read(cx).to_proto())),
4762 }
4763 } else {
4764 proto::Buffer {
4765 variant: Some(proto::buffer::Variant::Id(buffer_id)),
4766 }
4767 }
4768 }
4769
4770 fn deserialize_buffer(
4771 &mut self,
4772 buffer: proto::Buffer,
4773 cx: &mut ModelContext<Self>,
4774 ) -> Task<Result<ModelHandle<Buffer>>> {
4775 let replica_id = self.replica_id();
4776
4777 let opened_buffer_tx = self.opened_buffer.0.clone();
4778 let mut opened_buffer_rx = self.opened_buffer.1.clone();
4779 cx.spawn(|this, mut cx| async move {
4780 match buffer.variant.ok_or_else(|| anyhow!("missing buffer"))? {
4781 proto::buffer::Variant::Id(id) => {
4782 let buffer = loop {
4783 let buffer = this.read_with(&cx, |this, cx| {
4784 this.opened_buffers
4785 .get(&id)
4786 .and_then(|buffer| buffer.upgrade(cx))
4787 });
4788 if let Some(buffer) = buffer {
4789 break buffer;
4790 }
4791 opened_buffer_rx
4792 .next()
4793 .await
4794 .ok_or_else(|| anyhow!("project dropped while waiting for buffer"))?;
4795 };
4796 Ok(buffer)
4797 }
4798 proto::buffer::Variant::State(mut buffer) => {
4799 let mut buffer_worktree = None;
4800 let mut buffer_file = None;
4801 if let Some(file) = buffer.file.take() {
4802 this.read_with(&cx, |this, cx| {
4803 let worktree_id = WorktreeId::from_proto(file.worktree_id);
4804 let worktree =
4805 this.worktree_for_id(worktree_id, cx).ok_or_else(|| {
4806 anyhow!("no worktree found for id {}", file.worktree_id)
4807 })?;
4808 buffer_file =
4809 Some(Box::new(File::from_proto(file, worktree.clone(), cx)?)
4810 as Box<dyn language::File>);
4811 buffer_worktree = Some(worktree);
4812 Ok::<_, anyhow::Error>(())
4813 })?;
4814 }
4815
4816 let buffer = cx.add_model(|cx| {
4817 Buffer::from_proto(replica_id, buffer, buffer_file, cx).unwrap()
4818 });
4819
4820 this.update(&mut cx, |this, cx| this.register_buffer(&buffer, cx))?;
4821
4822 *opened_buffer_tx.borrow_mut().borrow_mut() = ();
4823 Ok(buffer)
4824 }
4825 }
4826 })
4827 }
4828
4829 fn deserialize_symbol(&self, serialized_symbol: proto::Symbol) -> Result<Symbol> {
4830 let source_worktree_id = WorktreeId::from_proto(serialized_symbol.source_worktree_id);
4831 let worktree_id = WorktreeId::from_proto(serialized_symbol.worktree_id);
4832 let start = serialized_symbol
4833 .start
4834 .ok_or_else(|| anyhow!("invalid start"))?;
4835 let end = serialized_symbol
4836 .end
4837 .ok_or_else(|| anyhow!("invalid end"))?;
4838 let kind = unsafe { mem::transmute(serialized_symbol.kind) };
4839 let path = PathBuf::from(serialized_symbol.path);
4840 let language = self.languages.select_language(&path);
4841 Ok(Symbol {
4842 source_worktree_id,
4843 worktree_id,
4844 language_server_name: LanguageServerName(serialized_symbol.language_server_name.into()),
4845 label: language
4846 .and_then(|language| language.label_for_symbol(&serialized_symbol.name, kind))
4847 .unwrap_or_else(|| CodeLabel::plain(serialized_symbol.name.clone(), None)),
4848 name: serialized_symbol.name,
4849 path,
4850 range: PointUtf16::new(start.row, start.column)..PointUtf16::new(end.row, end.column),
4851 kind,
4852 signature: serialized_symbol
4853 .signature
4854 .try_into()
4855 .map_err(|_| anyhow!("invalid signature"))?,
4856 })
4857 }
4858
4859 async fn handle_buffer_saved(
4860 this: ModelHandle<Self>,
4861 envelope: TypedEnvelope<proto::BufferSaved>,
4862 _: Arc<Client>,
4863 mut cx: AsyncAppContext,
4864 ) -> Result<()> {
4865 let version = deserialize_version(envelope.payload.version);
4866 let mtime = envelope
4867 .payload
4868 .mtime
4869 .ok_or_else(|| anyhow!("missing mtime"))?
4870 .into();
4871
4872 this.update(&mut cx, |this, cx| {
4873 let buffer = this
4874 .opened_buffers
4875 .get(&envelope.payload.buffer_id)
4876 .and_then(|buffer| buffer.upgrade(cx));
4877 if let Some(buffer) = buffer {
4878 buffer.update(cx, |buffer, cx| {
4879 buffer.did_save(version, mtime, None, cx);
4880 });
4881 }
4882 Ok(())
4883 })
4884 }
4885
4886 async fn handle_buffer_reloaded(
4887 this: ModelHandle<Self>,
4888 envelope: TypedEnvelope<proto::BufferReloaded>,
4889 _: Arc<Client>,
4890 mut cx: AsyncAppContext,
4891 ) -> Result<()> {
4892 let payload = envelope.payload.clone();
4893 let version = deserialize_version(payload.version);
4894 let mtime = payload
4895 .mtime
4896 .ok_or_else(|| anyhow!("missing mtime"))?
4897 .into();
4898 this.update(&mut cx, |this, cx| {
4899 let buffer = this
4900 .opened_buffers
4901 .get(&payload.buffer_id)
4902 .and_then(|buffer| buffer.upgrade(cx));
4903 if let Some(buffer) = buffer {
4904 buffer.update(cx, |buffer, cx| {
4905 buffer.did_reload(version, mtime, cx);
4906 });
4907 }
4908 Ok(())
4909 })
4910 }
4911
4912 pub fn match_paths<'a>(
4913 &self,
4914 query: &'a str,
4915 include_ignored: bool,
4916 smart_case: bool,
4917 max_results: usize,
4918 cancel_flag: &'a AtomicBool,
4919 cx: &AppContext,
4920 ) -> impl 'a + Future<Output = Vec<PathMatch>> {
4921 let worktrees = self
4922 .worktrees(cx)
4923 .filter(|worktree| worktree.read(cx).is_visible())
4924 .collect::<Vec<_>>();
4925 let include_root_name = worktrees.len() > 1;
4926 let candidate_sets = worktrees
4927 .into_iter()
4928 .map(|worktree| CandidateSet {
4929 snapshot: worktree.read(cx).snapshot(),
4930 include_ignored,
4931 include_root_name,
4932 })
4933 .collect::<Vec<_>>();
4934
4935 let background = cx.background().clone();
4936 async move {
4937 fuzzy::match_paths(
4938 candidate_sets.as_slice(),
4939 query,
4940 smart_case,
4941 max_results,
4942 cancel_flag,
4943 background,
4944 )
4945 .await
4946 }
4947 }
4948
4949 fn edits_from_lsp(
4950 &mut self,
4951 buffer: &ModelHandle<Buffer>,
4952 lsp_edits: impl 'static + Send + IntoIterator<Item = lsp::TextEdit>,
4953 version: Option<i32>,
4954 cx: &mut ModelContext<Self>,
4955 ) -> Task<Result<Vec<(Range<Anchor>, String)>>> {
4956 let snapshot = self.buffer_snapshot_for_lsp_version(buffer, version, cx);
4957 cx.background().spawn(async move {
4958 let snapshot = snapshot?;
4959 let mut lsp_edits = lsp_edits
4960 .into_iter()
4961 .map(|edit| (range_from_lsp(edit.range), edit.new_text))
4962 .peekable();
4963
4964 let mut edits = Vec::new();
4965 while let Some((mut range, mut new_text)) = lsp_edits.next() {
4966 // Combine any LSP edits that are adjacent.
4967 //
4968 // Also, combine LSP edits that are separated from each other by only
4969 // a newline. This is important because for some code actions,
4970 // Rust-analyzer rewrites the entire buffer via a series of edits that
4971 // are separated by unchanged newline characters.
4972 //
4973 // In order for the diffing logic below to work properly, any edits that
4974 // cancel each other out must be combined into one.
4975 while let Some((next_range, next_text)) = lsp_edits.peek() {
4976 if next_range.start > range.end {
4977 if next_range.start.row > range.end.row + 1
4978 || next_range.start.column > 0
4979 || snapshot.clip_point_utf16(
4980 PointUtf16::new(range.end.row, u32::MAX),
4981 Bias::Left,
4982 ) > range.end
4983 {
4984 break;
4985 }
4986 new_text.push('\n');
4987 }
4988 range.end = next_range.end;
4989 new_text.push_str(&next_text);
4990 lsp_edits.next();
4991 }
4992
4993 if snapshot.clip_point_utf16(range.start, Bias::Left) != range.start
4994 || snapshot.clip_point_utf16(range.end, Bias::Left) != range.end
4995 {
4996 return Err(anyhow!("invalid edits received from language server"));
4997 }
4998
4999 // For multiline edits, perform a diff of the old and new text so that
5000 // we can identify the changes more precisely, preserving the locations
5001 // of any anchors positioned in the unchanged regions.
5002 if range.end.row > range.start.row {
5003 let mut offset = range.start.to_offset(&snapshot);
5004 let old_text = snapshot.text_for_range(range).collect::<String>();
5005
5006 let diff = TextDiff::from_lines(old_text.as_str(), &new_text);
5007 let mut moved_since_edit = true;
5008 for change in diff.iter_all_changes() {
5009 let tag = change.tag();
5010 let value = change.value();
5011 match tag {
5012 ChangeTag::Equal => {
5013 offset += value.len();
5014 moved_since_edit = true;
5015 }
5016 ChangeTag::Delete => {
5017 let start = snapshot.anchor_after(offset);
5018 let end = snapshot.anchor_before(offset + value.len());
5019 if moved_since_edit {
5020 edits.push((start..end, String::new()));
5021 } else {
5022 edits.last_mut().unwrap().0.end = end;
5023 }
5024 offset += value.len();
5025 moved_since_edit = false;
5026 }
5027 ChangeTag::Insert => {
5028 if moved_since_edit {
5029 let anchor = snapshot.anchor_after(offset);
5030 edits.push((anchor.clone()..anchor, value.to_string()));
5031 } else {
5032 edits.last_mut().unwrap().1.push_str(value);
5033 }
5034 moved_since_edit = false;
5035 }
5036 }
5037 }
5038 } else if range.end == range.start {
5039 let anchor = snapshot.anchor_after(range.start);
5040 edits.push((anchor.clone()..anchor, new_text));
5041 } else {
5042 let edit_start = snapshot.anchor_after(range.start);
5043 let edit_end = snapshot.anchor_before(range.end);
5044 edits.push((edit_start..edit_end, new_text));
5045 }
5046 }
5047
5048 Ok(edits)
5049 })
5050 }
5051
5052 fn buffer_snapshot_for_lsp_version(
5053 &mut self,
5054 buffer: &ModelHandle<Buffer>,
5055 version: Option<i32>,
5056 cx: &AppContext,
5057 ) -> Result<TextBufferSnapshot> {
5058 const OLD_VERSIONS_TO_RETAIN: i32 = 10;
5059
5060 if let Some(version) = version {
5061 let buffer_id = buffer.read(cx).remote_id();
5062 let snapshots = self
5063 .buffer_snapshots
5064 .get_mut(&buffer_id)
5065 .ok_or_else(|| anyhow!("no snapshot found for buffer {}", buffer_id))?;
5066 let mut found_snapshot = None;
5067 snapshots.retain(|(snapshot_version, snapshot)| {
5068 if snapshot_version + OLD_VERSIONS_TO_RETAIN < version {
5069 false
5070 } else {
5071 if *snapshot_version == version {
5072 found_snapshot = Some(snapshot.clone());
5073 }
5074 true
5075 }
5076 });
5077
5078 found_snapshot.ok_or_else(|| {
5079 anyhow!(
5080 "snapshot not found for buffer {} at version {}",
5081 buffer_id,
5082 version
5083 )
5084 })
5085 } else {
5086 Ok((buffer.read(cx)).text_snapshot())
5087 }
5088 }
5089
5090 fn language_server_for_buffer(
5091 &self,
5092 buffer: &Buffer,
5093 cx: &AppContext,
5094 ) -> Option<&(Arc<dyn LspAdapter>, Arc<LanguageServer>)> {
5095 if let Some((file, language)) = File::from_dyn(buffer.file()).zip(buffer.language()) {
5096 let worktree_id = file.worktree_id(cx);
5097 self.language_servers
5098 .get(&(worktree_id, language.lsp_adapter()?.name()))
5099 } else {
5100 None
5101 }
5102 }
5103}
5104
5105impl WorktreeHandle {
5106 pub fn upgrade(&self, cx: &AppContext) -> Option<ModelHandle<Worktree>> {
5107 match self {
5108 WorktreeHandle::Strong(handle) => Some(handle.clone()),
5109 WorktreeHandle::Weak(handle) => handle.upgrade(cx),
5110 }
5111 }
5112}
5113
5114impl OpenBuffer {
5115 pub fn upgrade(&self, cx: &impl UpgradeModelHandle) -> Option<ModelHandle<Buffer>> {
5116 match self {
5117 OpenBuffer::Strong(handle) => Some(handle.clone()),
5118 OpenBuffer::Weak(handle) => handle.upgrade(cx),
5119 OpenBuffer::Loading(_) => None,
5120 }
5121 }
5122}
5123
5124struct CandidateSet {
5125 snapshot: Snapshot,
5126 include_ignored: bool,
5127 include_root_name: bool,
5128}
5129
5130impl<'a> PathMatchCandidateSet<'a> for CandidateSet {
5131 type Candidates = CandidateSetIter<'a>;
5132
5133 fn id(&self) -> usize {
5134 self.snapshot.id().to_usize()
5135 }
5136
5137 fn len(&self) -> usize {
5138 if self.include_ignored {
5139 self.snapshot.file_count()
5140 } else {
5141 self.snapshot.visible_file_count()
5142 }
5143 }
5144
5145 fn prefix(&self) -> Arc<str> {
5146 if self.snapshot.root_entry().map_or(false, |e| e.is_file()) {
5147 self.snapshot.root_name().into()
5148 } else if self.include_root_name {
5149 format!("{}/", self.snapshot.root_name()).into()
5150 } else {
5151 "".into()
5152 }
5153 }
5154
5155 fn candidates(&'a self, start: usize) -> Self::Candidates {
5156 CandidateSetIter {
5157 traversal: self.snapshot.files(self.include_ignored, start),
5158 }
5159 }
5160}
5161
5162struct CandidateSetIter<'a> {
5163 traversal: Traversal<'a>,
5164}
5165
5166impl<'a> Iterator for CandidateSetIter<'a> {
5167 type Item = PathMatchCandidate<'a>;
5168
5169 fn next(&mut self) -> Option<Self::Item> {
5170 self.traversal.next().map(|entry| {
5171 if let EntryKind::File(char_bag) = entry.kind {
5172 PathMatchCandidate {
5173 path: &entry.path,
5174 char_bag,
5175 }
5176 } else {
5177 unreachable!()
5178 }
5179 })
5180 }
5181}
5182
5183impl Entity for Project {
5184 type Event = Event;
5185
5186 fn release(&mut self, _: &mut gpui::MutableAppContext) {
5187 match &self.client_state {
5188 ProjectClientState::Local { remote_id_rx, .. } => {
5189 if let Some(project_id) = *remote_id_rx.borrow() {
5190 self.client
5191 .send(proto::UnregisterProject { project_id })
5192 .log_err();
5193 }
5194 }
5195 ProjectClientState::Remote { remote_id, .. } => {
5196 self.client
5197 .send(proto::LeaveProject {
5198 project_id: *remote_id,
5199 })
5200 .log_err();
5201 }
5202 }
5203 }
5204
5205 fn app_will_quit(
5206 &mut self,
5207 _: &mut MutableAppContext,
5208 ) -> Option<std::pin::Pin<Box<dyn 'static + Future<Output = ()>>>> {
5209 let shutdown_futures = self
5210 .language_servers
5211 .drain()
5212 .filter_map(|(_, (_, server))| server.shutdown())
5213 .collect::<Vec<_>>();
5214 Some(
5215 async move {
5216 futures::future::join_all(shutdown_futures).await;
5217 }
5218 .boxed(),
5219 )
5220 }
5221}
5222
5223impl Collaborator {
5224 fn from_proto(
5225 message: proto::Collaborator,
5226 user_store: &ModelHandle<UserStore>,
5227 cx: &mut AsyncAppContext,
5228 ) -> impl Future<Output = Result<Self>> {
5229 let user = user_store.update(cx, |user_store, cx| {
5230 user_store.fetch_user(message.user_id, cx)
5231 });
5232
5233 async move {
5234 Ok(Self {
5235 peer_id: PeerId(message.peer_id),
5236 user: user.await?,
5237 replica_id: message.replica_id as ReplicaId,
5238 })
5239 }
5240 }
5241}
5242
5243impl<P: AsRef<Path>> From<(WorktreeId, P)> for ProjectPath {
5244 fn from((worktree_id, path): (WorktreeId, P)) -> Self {
5245 Self {
5246 worktree_id,
5247 path: path.as_ref().into(),
5248 }
5249 }
5250}
5251
5252impl From<lsp::CreateFileOptions> for fs::CreateOptions {
5253 fn from(options: lsp::CreateFileOptions) -> Self {
5254 Self {
5255 overwrite: options.overwrite.unwrap_or(false),
5256 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
5257 }
5258 }
5259}
5260
5261impl From<lsp::RenameFileOptions> for fs::RenameOptions {
5262 fn from(options: lsp::RenameFileOptions) -> Self {
5263 Self {
5264 overwrite: options.overwrite.unwrap_or(false),
5265 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
5266 }
5267 }
5268}
5269
5270impl From<lsp::DeleteFileOptions> for fs::RemoveOptions {
5271 fn from(options: lsp::DeleteFileOptions) -> Self {
5272 Self {
5273 recursive: options.recursive.unwrap_or(false),
5274 ignore_if_not_exists: options.ignore_if_not_exists.unwrap_or(false),
5275 }
5276 }
5277}
5278
5279fn serialize_symbol(symbol: &Symbol) -> proto::Symbol {
5280 proto::Symbol {
5281 source_worktree_id: symbol.source_worktree_id.to_proto(),
5282 worktree_id: symbol.worktree_id.to_proto(),
5283 language_server_name: symbol.language_server_name.0.to_string(),
5284 name: symbol.name.clone(),
5285 kind: unsafe { mem::transmute(symbol.kind) },
5286 path: symbol.path.to_string_lossy().to_string(),
5287 start: Some(proto::Point {
5288 row: symbol.range.start.row,
5289 column: symbol.range.start.column,
5290 }),
5291 end: Some(proto::Point {
5292 row: symbol.range.end.row,
5293 column: symbol.range.end.column,
5294 }),
5295 signature: symbol.signature.to_vec(),
5296 }
5297}
5298
5299fn relativize_path(base: &Path, path: &Path) -> PathBuf {
5300 let mut path_components = path.components();
5301 let mut base_components = base.components();
5302 let mut components: Vec<Component> = Vec::new();
5303 loop {
5304 match (path_components.next(), base_components.next()) {
5305 (None, None) => break,
5306 (Some(a), None) => {
5307 components.push(a);
5308 components.extend(path_components.by_ref());
5309 break;
5310 }
5311 (None, _) => components.push(Component::ParentDir),
5312 (Some(a), Some(b)) if components.is_empty() && a == b => (),
5313 (Some(a), Some(b)) if b == Component::CurDir => components.push(a),
5314 (Some(a), Some(_)) => {
5315 components.push(Component::ParentDir);
5316 for _ in base_components {
5317 components.push(Component::ParentDir);
5318 }
5319 components.push(a);
5320 components.extend(path_components.by_ref());
5321 break;
5322 }
5323 }
5324 }
5325 components.iter().map(|c| c.as_os_str()).collect()
5326}
5327
5328impl Item for Buffer {
5329 fn entry_id(&self, cx: &AppContext) -> Option<ProjectEntryId> {
5330 File::from_dyn(self.file()).and_then(|file| file.project_entry_id(cx))
5331 }
5332}
5333
5334#[cfg(test)]
5335mod tests {
5336 use crate::worktree::WorktreeHandle;
5337
5338 use super::{Event, *};
5339 use fs::RealFs;
5340 use futures::{future, StreamExt};
5341 use gpui::test::subscribe;
5342 use language::{
5343 tree_sitter_rust, tree_sitter_typescript, Diagnostic, FakeLspAdapter, LanguageConfig,
5344 OffsetRangeExt, Point, ToPoint,
5345 };
5346 use lsp::Url;
5347 use serde_json::json;
5348 use std::{cell::RefCell, os::unix, path::PathBuf, rc::Rc, task::Poll};
5349 use unindent::Unindent as _;
5350 use util::{assert_set_eq, test::temp_tree};
5351
5352 #[gpui::test]
5353 async fn test_populate_and_search(cx: &mut gpui::TestAppContext) {
5354 let dir = temp_tree(json!({
5355 "root": {
5356 "apple": "",
5357 "banana": {
5358 "carrot": {
5359 "date": "",
5360 "endive": "",
5361 }
5362 },
5363 "fennel": {
5364 "grape": "",
5365 }
5366 }
5367 }));
5368
5369 let root_link_path = dir.path().join("root_link");
5370 unix::fs::symlink(&dir.path().join("root"), &root_link_path).unwrap();
5371 unix::fs::symlink(
5372 &dir.path().join("root/fennel"),
5373 &dir.path().join("root/finnochio"),
5374 )
5375 .unwrap();
5376
5377 let project = Project::test(Arc::new(RealFs), [root_link_path.as_ref()], cx).await;
5378
5379 project.read_with(cx, |project, cx| {
5380 let tree = project.worktrees(cx).next().unwrap().read(cx);
5381 assert_eq!(tree.file_count(), 5);
5382 assert_eq!(
5383 tree.inode_for_path("fennel/grape"),
5384 tree.inode_for_path("finnochio/grape")
5385 );
5386 });
5387
5388 let cancel_flag = Default::default();
5389 let results = project
5390 .read_with(cx, |project, cx| {
5391 project.match_paths("bna", false, false, 10, &cancel_flag, cx)
5392 })
5393 .await;
5394 assert_eq!(
5395 results
5396 .into_iter()
5397 .map(|result| result.path)
5398 .collect::<Vec<Arc<Path>>>(),
5399 vec![
5400 PathBuf::from("banana/carrot/date").into(),
5401 PathBuf::from("banana/carrot/endive").into(),
5402 ]
5403 );
5404 }
5405
5406 #[gpui::test]
5407 async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
5408 cx.foreground().forbid_parking();
5409
5410 let mut rust_language = Language::new(
5411 LanguageConfig {
5412 name: "Rust".into(),
5413 path_suffixes: vec!["rs".to_string()],
5414 ..Default::default()
5415 },
5416 Some(tree_sitter_rust::language()),
5417 );
5418 let mut json_language = Language::new(
5419 LanguageConfig {
5420 name: "JSON".into(),
5421 path_suffixes: vec!["json".to_string()],
5422 ..Default::default()
5423 },
5424 None,
5425 );
5426 let mut fake_rust_servers = rust_language.set_fake_lsp_adapter(FakeLspAdapter {
5427 name: "the-rust-language-server",
5428 capabilities: lsp::ServerCapabilities {
5429 completion_provider: Some(lsp::CompletionOptions {
5430 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
5431 ..Default::default()
5432 }),
5433 ..Default::default()
5434 },
5435 ..Default::default()
5436 });
5437 let mut fake_json_servers = json_language.set_fake_lsp_adapter(FakeLspAdapter {
5438 name: "the-json-language-server",
5439 capabilities: lsp::ServerCapabilities {
5440 completion_provider: Some(lsp::CompletionOptions {
5441 trigger_characters: Some(vec![":".to_string()]),
5442 ..Default::default()
5443 }),
5444 ..Default::default()
5445 },
5446 ..Default::default()
5447 });
5448
5449 let fs = FakeFs::new(cx.background());
5450 fs.insert_tree(
5451 "/the-root",
5452 json!({
5453 "test.rs": "const A: i32 = 1;",
5454 "test2.rs": "",
5455 "Cargo.toml": "a = 1",
5456 "package.json": "{\"a\": 1}",
5457 }),
5458 )
5459 .await;
5460
5461 let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
5462 project.update(cx, |project, _| {
5463 project.languages.add(Arc::new(rust_language));
5464 project.languages.add(Arc::new(json_language));
5465 });
5466
5467 // Open a buffer without an associated language server.
5468 let toml_buffer = project
5469 .update(cx, |project, cx| {
5470 project.open_local_buffer("/the-root/Cargo.toml", cx)
5471 })
5472 .await
5473 .unwrap();
5474
5475 // Open a buffer with an associated language server.
5476 let rust_buffer = project
5477 .update(cx, |project, cx| {
5478 project.open_local_buffer("/the-root/test.rs", cx)
5479 })
5480 .await
5481 .unwrap();
5482
5483 // A server is started up, and it is notified about Rust files.
5484 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
5485 assert_eq!(
5486 fake_rust_server
5487 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5488 .await
5489 .text_document,
5490 lsp::TextDocumentItem {
5491 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
5492 version: 0,
5493 text: "const A: i32 = 1;".to_string(),
5494 language_id: Default::default()
5495 }
5496 );
5497
5498 // The buffer is configured based on the language server's capabilities.
5499 rust_buffer.read_with(cx, |buffer, _| {
5500 assert_eq!(
5501 buffer.completion_triggers(),
5502 &[".".to_string(), "::".to_string()]
5503 );
5504 });
5505 toml_buffer.read_with(cx, |buffer, _| {
5506 assert!(buffer.completion_triggers().is_empty());
5507 });
5508
5509 // Edit a buffer. The changes are reported to the language server.
5510 rust_buffer.update(cx, |buffer, cx| buffer.edit([(16..16, "2")], cx));
5511 assert_eq!(
5512 fake_rust_server
5513 .receive_notification::<lsp::notification::DidChangeTextDocument>()
5514 .await
5515 .text_document,
5516 lsp::VersionedTextDocumentIdentifier::new(
5517 lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
5518 1
5519 )
5520 );
5521
5522 // Open a third buffer with a different associated language server.
5523 let json_buffer = project
5524 .update(cx, |project, cx| {
5525 project.open_local_buffer("/the-root/package.json", cx)
5526 })
5527 .await
5528 .unwrap();
5529
5530 // A json language server is started up and is only notified about the json buffer.
5531 let mut fake_json_server = fake_json_servers.next().await.unwrap();
5532 assert_eq!(
5533 fake_json_server
5534 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5535 .await
5536 .text_document,
5537 lsp::TextDocumentItem {
5538 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
5539 version: 0,
5540 text: "{\"a\": 1}".to_string(),
5541 language_id: Default::default()
5542 }
5543 );
5544
5545 // This buffer is configured based on the second language server's
5546 // capabilities.
5547 json_buffer.read_with(cx, |buffer, _| {
5548 assert_eq!(buffer.completion_triggers(), &[":".to_string()]);
5549 });
5550
5551 // When opening another buffer whose language server is already running,
5552 // it is also configured based on the existing language server's capabilities.
5553 let rust_buffer2 = project
5554 .update(cx, |project, cx| {
5555 project.open_local_buffer("/the-root/test2.rs", cx)
5556 })
5557 .await
5558 .unwrap();
5559 rust_buffer2.read_with(cx, |buffer, _| {
5560 assert_eq!(
5561 buffer.completion_triggers(),
5562 &[".".to_string(), "::".to_string()]
5563 );
5564 });
5565
5566 // Changes are reported only to servers matching the buffer's language.
5567 toml_buffer.update(cx, |buffer, cx| buffer.edit([(5..5, "23")], cx));
5568 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "let x = 1;")], cx));
5569 assert_eq!(
5570 fake_rust_server
5571 .receive_notification::<lsp::notification::DidChangeTextDocument>()
5572 .await
5573 .text_document,
5574 lsp::VersionedTextDocumentIdentifier::new(
5575 lsp::Url::from_file_path("/the-root/test2.rs").unwrap(),
5576 1
5577 )
5578 );
5579
5580 // Save notifications are reported to all servers.
5581 toml_buffer
5582 .update(cx, |buffer, cx| buffer.save(cx))
5583 .await
5584 .unwrap();
5585 assert_eq!(
5586 fake_rust_server
5587 .receive_notification::<lsp::notification::DidSaveTextDocument>()
5588 .await
5589 .text_document,
5590 lsp::TextDocumentIdentifier::new(
5591 lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap()
5592 )
5593 );
5594 assert_eq!(
5595 fake_json_server
5596 .receive_notification::<lsp::notification::DidSaveTextDocument>()
5597 .await
5598 .text_document,
5599 lsp::TextDocumentIdentifier::new(
5600 lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap()
5601 )
5602 );
5603
5604 // Renames are reported only to servers matching the buffer's language.
5605 fs.rename(
5606 Path::new("/the-root/test2.rs"),
5607 Path::new("/the-root/test3.rs"),
5608 Default::default(),
5609 )
5610 .await
5611 .unwrap();
5612 assert_eq!(
5613 fake_rust_server
5614 .receive_notification::<lsp::notification::DidCloseTextDocument>()
5615 .await
5616 .text_document,
5617 lsp::TextDocumentIdentifier::new(
5618 lsp::Url::from_file_path("/the-root/test2.rs").unwrap()
5619 ),
5620 );
5621 assert_eq!(
5622 fake_rust_server
5623 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5624 .await
5625 .text_document,
5626 lsp::TextDocumentItem {
5627 uri: lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),
5628 version: 0,
5629 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
5630 language_id: Default::default()
5631 },
5632 );
5633
5634 rust_buffer2.update(cx, |buffer, cx| {
5635 buffer.update_diagnostics(
5636 DiagnosticSet::from_sorted_entries(
5637 vec![DiagnosticEntry {
5638 diagnostic: Default::default(),
5639 range: Anchor::MIN..Anchor::MAX,
5640 }],
5641 &buffer.snapshot(),
5642 ),
5643 cx,
5644 );
5645 assert_eq!(
5646 buffer
5647 .snapshot()
5648 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
5649 .count(),
5650 1
5651 );
5652 });
5653
5654 // When the rename changes the extension of the file, the buffer gets closed on the old
5655 // language server and gets opened on the new one.
5656 fs.rename(
5657 Path::new("/the-root/test3.rs"),
5658 Path::new("/the-root/test3.json"),
5659 Default::default(),
5660 )
5661 .await
5662 .unwrap();
5663 assert_eq!(
5664 fake_rust_server
5665 .receive_notification::<lsp::notification::DidCloseTextDocument>()
5666 .await
5667 .text_document,
5668 lsp::TextDocumentIdentifier::new(
5669 lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),
5670 ),
5671 );
5672 assert_eq!(
5673 fake_json_server
5674 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5675 .await
5676 .text_document,
5677 lsp::TextDocumentItem {
5678 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
5679 version: 0,
5680 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
5681 language_id: Default::default()
5682 },
5683 );
5684
5685 // We clear the diagnostics, since the language has changed.
5686 rust_buffer2.read_with(cx, |buffer, _| {
5687 assert_eq!(
5688 buffer
5689 .snapshot()
5690 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
5691 .count(),
5692 0
5693 );
5694 });
5695
5696 // The renamed file's version resets after changing language server.
5697 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "// ")], cx));
5698 assert_eq!(
5699 fake_json_server
5700 .receive_notification::<lsp::notification::DidChangeTextDocument>()
5701 .await
5702 .text_document,
5703 lsp::VersionedTextDocumentIdentifier::new(
5704 lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
5705 1
5706 )
5707 );
5708
5709 // Restart language servers
5710 project.update(cx, |project, cx| {
5711 project.restart_language_servers_for_buffers(
5712 vec![rust_buffer.clone(), json_buffer.clone()],
5713 cx,
5714 );
5715 });
5716
5717 let mut rust_shutdown_requests = fake_rust_server
5718 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
5719 let mut json_shutdown_requests = fake_json_server
5720 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
5721 futures::join!(rust_shutdown_requests.next(), json_shutdown_requests.next());
5722
5723 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
5724 let mut fake_json_server = fake_json_servers.next().await.unwrap();
5725
5726 // Ensure rust document is reopened in new rust language server
5727 assert_eq!(
5728 fake_rust_server
5729 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5730 .await
5731 .text_document,
5732 lsp::TextDocumentItem {
5733 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
5734 version: 1,
5735 text: rust_buffer.read_with(cx, |buffer, _| buffer.text()),
5736 language_id: Default::default()
5737 }
5738 );
5739
5740 // Ensure json documents are reopened in new json language server
5741 assert_set_eq!(
5742 [
5743 fake_json_server
5744 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5745 .await
5746 .text_document,
5747 fake_json_server
5748 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5749 .await
5750 .text_document,
5751 ],
5752 [
5753 lsp::TextDocumentItem {
5754 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
5755 version: 0,
5756 text: json_buffer.read_with(cx, |buffer, _| buffer.text()),
5757 language_id: Default::default()
5758 },
5759 lsp::TextDocumentItem {
5760 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
5761 version: 1,
5762 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
5763 language_id: Default::default()
5764 }
5765 ]
5766 );
5767
5768 // Close notifications are reported only to servers matching the buffer's language.
5769 cx.update(|_| drop(json_buffer));
5770 let close_message = lsp::DidCloseTextDocumentParams {
5771 text_document: lsp::TextDocumentIdentifier::new(
5772 lsp::Url::from_file_path("/the-root/package.json").unwrap(),
5773 ),
5774 };
5775 assert_eq!(
5776 fake_json_server
5777 .receive_notification::<lsp::notification::DidCloseTextDocument>()
5778 .await,
5779 close_message,
5780 );
5781 }
5782
5783 #[gpui::test]
5784 async fn test_single_file_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
5785 cx.foreground().forbid_parking();
5786
5787 let fs = FakeFs::new(cx.background());
5788 fs.insert_tree(
5789 "/dir",
5790 json!({
5791 "a.rs": "let a = 1;",
5792 "b.rs": "let b = 2;"
5793 }),
5794 )
5795 .await;
5796
5797 let project = Project::test(fs, ["/dir/a.rs".as_ref(), "/dir/b.rs".as_ref()], cx).await;
5798
5799 let buffer_a = project
5800 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
5801 .await
5802 .unwrap();
5803 let buffer_b = project
5804 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
5805 .await
5806 .unwrap();
5807
5808 project.update(cx, |project, cx| {
5809 project
5810 .update_diagnostics(
5811 lsp::PublishDiagnosticsParams {
5812 uri: Url::from_file_path("/dir/a.rs").unwrap(),
5813 version: None,
5814 diagnostics: vec![lsp::Diagnostic {
5815 range: lsp::Range::new(
5816 lsp::Position::new(0, 4),
5817 lsp::Position::new(0, 5),
5818 ),
5819 severity: Some(lsp::DiagnosticSeverity::ERROR),
5820 message: "error 1".to_string(),
5821 ..Default::default()
5822 }],
5823 },
5824 &[],
5825 cx,
5826 )
5827 .unwrap();
5828 project
5829 .update_diagnostics(
5830 lsp::PublishDiagnosticsParams {
5831 uri: Url::from_file_path("/dir/b.rs").unwrap(),
5832 version: None,
5833 diagnostics: vec![lsp::Diagnostic {
5834 range: lsp::Range::new(
5835 lsp::Position::new(0, 4),
5836 lsp::Position::new(0, 5),
5837 ),
5838 severity: Some(lsp::DiagnosticSeverity::WARNING),
5839 message: "error 2".to_string(),
5840 ..Default::default()
5841 }],
5842 },
5843 &[],
5844 cx,
5845 )
5846 .unwrap();
5847 });
5848
5849 buffer_a.read_with(cx, |buffer, _| {
5850 let chunks = chunks_with_diagnostics(&buffer, 0..buffer.len());
5851 assert_eq!(
5852 chunks
5853 .iter()
5854 .map(|(s, d)| (s.as_str(), *d))
5855 .collect::<Vec<_>>(),
5856 &[
5857 ("let ", None),
5858 ("a", Some(DiagnosticSeverity::ERROR)),
5859 (" = 1;", None),
5860 ]
5861 );
5862 });
5863 buffer_b.read_with(cx, |buffer, _| {
5864 let chunks = chunks_with_diagnostics(&buffer, 0..buffer.len());
5865 assert_eq!(
5866 chunks
5867 .iter()
5868 .map(|(s, d)| (s.as_str(), *d))
5869 .collect::<Vec<_>>(),
5870 &[
5871 ("let ", None),
5872 ("b", Some(DiagnosticSeverity::WARNING)),
5873 (" = 2;", None),
5874 ]
5875 );
5876 });
5877 }
5878
5879 #[gpui::test]
5880 async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
5881 cx.foreground().forbid_parking();
5882
5883 let progress_token = "the-progress-token";
5884 let mut language = Language::new(
5885 LanguageConfig {
5886 name: "Rust".into(),
5887 path_suffixes: vec!["rs".to_string()],
5888 ..Default::default()
5889 },
5890 Some(tree_sitter_rust::language()),
5891 );
5892 let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
5893 disk_based_diagnostics_progress_token: Some(progress_token),
5894 disk_based_diagnostics_sources: &["disk"],
5895 ..Default::default()
5896 });
5897
5898 let fs = FakeFs::new(cx.background());
5899 fs.insert_tree(
5900 "/dir",
5901 json!({
5902 "a.rs": "fn a() { A }",
5903 "b.rs": "const y: i32 = 1",
5904 }),
5905 )
5906 .await;
5907
5908 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
5909 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
5910 let worktree_id =
5911 project.read_with(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
5912
5913 // Cause worktree to start the fake language server
5914 let _buffer = project
5915 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
5916 .await
5917 .unwrap();
5918
5919 let mut events = subscribe(&project, cx);
5920
5921 let mut fake_server = fake_servers.next().await.unwrap();
5922 fake_server.start_progress(progress_token).await;
5923 assert_eq!(
5924 events.next().await.unwrap(),
5925 Event::DiskBasedDiagnosticsStarted
5926 );
5927
5928 fake_server.start_progress(progress_token).await;
5929 fake_server.end_progress(progress_token).await;
5930 fake_server.start_progress(progress_token).await;
5931
5932 fake_server.notify::<lsp::notification::PublishDiagnostics>(
5933 lsp::PublishDiagnosticsParams {
5934 uri: Url::from_file_path("/dir/a.rs").unwrap(),
5935 version: None,
5936 diagnostics: vec![lsp::Diagnostic {
5937 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
5938 severity: Some(lsp::DiagnosticSeverity::ERROR),
5939 message: "undefined variable 'A'".to_string(),
5940 ..Default::default()
5941 }],
5942 },
5943 );
5944 assert_eq!(
5945 events.next().await.unwrap(),
5946 Event::DiagnosticsUpdated((worktree_id, Path::new("a.rs")).into())
5947 );
5948
5949 fake_server.end_progress(progress_token).await;
5950 fake_server.end_progress(progress_token).await;
5951 assert_eq!(
5952 events.next().await.unwrap(),
5953 Event::DiskBasedDiagnosticsUpdated
5954 );
5955 assert_eq!(
5956 events.next().await.unwrap(),
5957 Event::DiskBasedDiagnosticsFinished
5958 );
5959
5960 let buffer = project
5961 .update(cx, |p, cx| p.open_local_buffer("/dir/a.rs", cx))
5962 .await
5963 .unwrap();
5964
5965 buffer.read_with(cx, |buffer, _| {
5966 let snapshot = buffer.snapshot();
5967 let diagnostics = snapshot
5968 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
5969 .collect::<Vec<_>>();
5970 assert_eq!(
5971 diagnostics,
5972 &[DiagnosticEntry {
5973 range: Point::new(0, 9)..Point::new(0, 10),
5974 diagnostic: Diagnostic {
5975 severity: lsp::DiagnosticSeverity::ERROR,
5976 message: "undefined variable 'A'".to_string(),
5977 group_id: 0,
5978 is_primary: true,
5979 ..Default::default()
5980 }
5981 }]
5982 )
5983 });
5984
5985 // Ensure publishing empty diagnostics twice only results in one update event.
5986 fake_server.notify::<lsp::notification::PublishDiagnostics>(
5987 lsp::PublishDiagnosticsParams {
5988 uri: Url::from_file_path("/dir/a.rs").unwrap(),
5989 version: None,
5990 diagnostics: Default::default(),
5991 },
5992 );
5993 assert_eq!(
5994 events.next().await.unwrap(),
5995 Event::DiagnosticsUpdated((worktree_id, Path::new("a.rs")).into())
5996 );
5997
5998 fake_server.notify::<lsp::notification::PublishDiagnostics>(
5999 lsp::PublishDiagnosticsParams {
6000 uri: Url::from_file_path("/dir/a.rs").unwrap(),
6001 version: None,
6002 diagnostics: Default::default(),
6003 },
6004 );
6005 cx.foreground().run_until_parked();
6006 assert_eq!(futures::poll!(events.next()), Poll::Pending);
6007 }
6008
6009 #[gpui::test]
6010 async fn test_restarting_server_with_diagnostics_running(cx: &mut gpui::TestAppContext) {
6011 cx.foreground().forbid_parking();
6012
6013 let progress_token = "the-progress-token";
6014 let mut language = Language::new(
6015 LanguageConfig {
6016 path_suffixes: vec!["rs".to_string()],
6017 ..Default::default()
6018 },
6019 None,
6020 );
6021 let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
6022 disk_based_diagnostics_sources: &["disk"],
6023 disk_based_diagnostics_progress_token: Some(progress_token),
6024 ..Default::default()
6025 });
6026
6027 let fs = FakeFs::new(cx.background());
6028 fs.insert_tree("/dir", json!({ "a.rs": "" })).await;
6029
6030 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
6031 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
6032
6033 let buffer = project
6034 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
6035 .await
6036 .unwrap();
6037
6038 // Simulate diagnostics starting to update.
6039 let mut fake_server = fake_servers.next().await.unwrap();
6040 fake_server.start_progress(progress_token).await;
6041
6042 // Restart the server before the diagnostics finish updating.
6043 project.update(cx, |project, cx| {
6044 project.restart_language_servers_for_buffers([buffer], cx);
6045 });
6046 let mut events = subscribe(&project, cx);
6047
6048 // Simulate the newly started server sending more diagnostics.
6049 let mut fake_server = fake_servers.next().await.unwrap();
6050 fake_server.start_progress(progress_token).await;
6051 assert_eq!(
6052 events.next().await.unwrap(),
6053 Event::DiskBasedDiagnosticsStarted
6054 );
6055
6056 // All diagnostics are considered done, despite the old server's diagnostic
6057 // task never completing.
6058 fake_server.end_progress(progress_token).await;
6059 assert_eq!(
6060 events.next().await.unwrap(),
6061 Event::DiskBasedDiagnosticsUpdated
6062 );
6063 assert_eq!(
6064 events.next().await.unwrap(),
6065 Event::DiskBasedDiagnosticsFinished
6066 );
6067 project.read_with(cx, |project, _| {
6068 assert!(!project.is_running_disk_based_diagnostics());
6069 });
6070 }
6071
6072 #[gpui::test]
6073 async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
6074 cx.foreground().forbid_parking();
6075
6076 let mut language = Language::new(
6077 LanguageConfig {
6078 name: "Rust".into(),
6079 path_suffixes: vec!["rs".to_string()],
6080 ..Default::default()
6081 },
6082 Some(tree_sitter_rust::language()),
6083 );
6084 let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
6085 disk_based_diagnostics_sources: &["disk"],
6086 ..Default::default()
6087 });
6088
6089 let text = "
6090 fn a() { A }
6091 fn b() { BB }
6092 fn c() { CCC }
6093 "
6094 .unindent();
6095
6096 let fs = FakeFs::new(cx.background());
6097 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
6098
6099 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
6100 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
6101
6102 let buffer = project
6103 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
6104 .await
6105 .unwrap();
6106
6107 let mut fake_server = fake_servers.next().await.unwrap();
6108 let open_notification = fake_server
6109 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6110 .await;
6111
6112 // Edit the buffer, moving the content down
6113 buffer.update(cx, |buffer, cx| buffer.edit([(0..0, "\n\n")], cx));
6114 let change_notification_1 = fake_server
6115 .receive_notification::<lsp::notification::DidChangeTextDocument>()
6116 .await;
6117 assert!(
6118 change_notification_1.text_document.version > open_notification.text_document.version
6119 );
6120
6121 // Report some diagnostics for the initial version of the buffer
6122 fake_server.notify::<lsp::notification::PublishDiagnostics>(
6123 lsp::PublishDiagnosticsParams {
6124 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
6125 version: Some(open_notification.text_document.version),
6126 diagnostics: vec![
6127 lsp::Diagnostic {
6128 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
6129 severity: Some(DiagnosticSeverity::ERROR),
6130 message: "undefined variable 'A'".to_string(),
6131 source: Some("disk".to_string()),
6132 ..Default::default()
6133 },
6134 lsp::Diagnostic {
6135 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
6136 severity: Some(DiagnosticSeverity::ERROR),
6137 message: "undefined variable 'BB'".to_string(),
6138 source: Some("disk".to_string()),
6139 ..Default::default()
6140 },
6141 lsp::Diagnostic {
6142 range: lsp::Range::new(lsp::Position::new(2, 9), lsp::Position::new(2, 12)),
6143 severity: Some(DiagnosticSeverity::ERROR),
6144 source: Some("disk".to_string()),
6145 message: "undefined variable 'CCC'".to_string(),
6146 ..Default::default()
6147 },
6148 ],
6149 },
6150 );
6151
6152 // The diagnostics have moved down since they were created.
6153 buffer.next_notification(cx).await;
6154 buffer.read_with(cx, |buffer, _| {
6155 assert_eq!(
6156 buffer
6157 .snapshot()
6158 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0), false)
6159 .collect::<Vec<_>>(),
6160 &[
6161 DiagnosticEntry {
6162 range: Point::new(3, 9)..Point::new(3, 11),
6163 diagnostic: Diagnostic {
6164 severity: DiagnosticSeverity::ERROR,
6165 message: "undefined variable 'BB'".to_string(),
6166 is_disk_based: true,
6167 group_id: 1,
6168 is_primary: true,
6169 ..Default::default()
6170 },
6171 },
6172 DiagnosticEntry {
6173 range: Point::new(4, 9)..Point::new(4, 12),
6174 diagnostic: Diagnostic {
6175 severity: DiagnosticSeverity::ERROR,
6176 message: "undefined variable 'CCC'".to_string(),
6177 is_disk_based: true,
6178 group_id: 2,
6179 is_primary: true,
6180 ..Default::default()
6181 }
6182 }
6183 ]
6184 );
6185 assert_eq!(
6186 chunks_with_diagnostics(buffer, 0..buffer.len()),
6187 [
6188 ("\n\nfn a() { ".to_string(), None),
6189 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
6190 (" }\nfn b() { ".to_string(), None),
6191 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
6192 (" }\nfn c() { ".to_string(), None),
6193 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
6194 (" }\n".to_string(), None),
6195 ]
6196 );
6197 assert_eq!(
6198 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
6199 [
6200 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
6201 (" }\nfn c() { ".to_string(), None),
6202 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
6203 ]
6204 );
6205 });
6206
6207 // Ensure overlapping diagnostics are highlighted correctly.
6208 fake_server.notify::<lsp::notification::PublishDiagnostics>(
6209 lsp::PublishDiagnosticsParams {
6210 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
6211 version: Some(open_notification.text_document.version),
6212 diagnostics: vec![
6213 lsp::Diagnostic {
6214 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
6215 severity: Some(DiagnosticSeverity::ERROR),
6216 message: "undefined variable 'A'".to_string(),
6217 source: Some("disk".to_string()),
6218 ..Default::default()
6219 },
6220 lsp::Diagnostic {
6221 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 12)),
6222 severity: Some(DiagnosticSeverity::WARNING),
6223 message: "unreachable statement".to_string(),
6224 source: Some("disk".to_string()),
6225 ..Default::default()
6226 },
6227 ],
6228 },
6229 );
6230
6231 buffer.next_notification(cx).await;
6232 buffer.read_with(cx, |buffer, _| {
6233 assert_eq!(
6234 buffer
6235 .snapshot()
6236 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0), false)
6237 .collect::<Vec<_>>(),
6238 &[
6239 DiagnosticEntry {
6240 range: Point::new(2, 9)..Point::new(2, 12),
6241 diagnostic: Diagnostic {
6242 severity: DiagnosticSeverity::WARNING,
6243 message: "unreachable statement".to_string(),
6244 is_disk_based: true,
6245 group_id: 1,
6246 is_primary: true,
6247 ..Default::default()
6248 }
6249 },
6250 DiagnosticEntry {
6251 range: Point::new(2, 9)..Point::new(2, 10),
6252 diagnostic: Diagnostic {
6253 severity: DiagnosticSeverity::ERROR,
6254 message: "undefined variable 'A'".to_string(),
6255 is_disk_based: true,
6256 group_id: 0,
6257 is_primary: true,
6258 ..Default::default()
6259 },
6260 }
6261 ]
6262 );
6263 assert_eq!(
6264 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
6265 [
6266 ("fn a() { ".to_string(), None),
6267 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
6268 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
6269 ("\n".to_string(), None),
6270 ]
6271 );
6272 assert_eq!(
6273 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
6274 [
6275 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
6276 ("\n".to_string(), None),
6277 ]
6278 );
6279 });
6280
6281 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
6282 // changes since the last save.
6283 buffer.update(cx, |buffer, cx| {
6284 buffer.edit([(Point::new(2, 0)..Point::new(2, 0), " ")], cx);
6285 buffer.edit([(Point::new(2, 8)..Point::new(2, 10), "(x: usize)")], cx);
6286 buffer.edit([(Point::new(3, 10)..Point::new(3, 10), "xxx")], cx);
6287 });
6288 let change_notification_2 = fake_server
6289 .receive_notification::<lsp::notification::DidChangeTextDocument>()
6290 .await;
6291 assert!(
6292 change_notification_2.text_document.version
6293 > change_notification_1.text_document.version
6294 );
6295
6296 // Handle out-of-order diagnostics
6297 fake_server.notify::<lsp::notification::PublishDiagnostics>(
6298 lsp::PublishDiagnosticsParams {
6299 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
6300 version: Some(change_notification_2.text_document.version),
6301 diagnostics: vec![
6302 lsp::Diagnostic {
6303 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
6304 severity: Some(DiagnosticSeverity::ERROR),
6305 message: "undefined variable 'BB'".to_string(),
6306 source: Some("disk".to_string()),
6307 ..Default::default()
6308 },
6309 lsp::Diagnostic {
6310 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
6311 severity: Some(DiagnosticSeverity::WARNING),
6312 message: "undefined variable 'A'".to_string(),
6313 source: Some("disk".to_string()),
6314 ..Default::default()
6315 },
6316 ],
6317 },
6318 );
6319
6320 buffer.next_notification(cx).await;
6321 buffer.read_with(cx, |buffer, _| {
6322 assert_eq!(
6323 buffer
6324 .snapshot()
6325 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
6326 .collect::<Vec<_>>(),
6327 &[
6328 DiagnosticEntry {
6329 range: Point::new(2, 21)..Point::new(2, 22),
6330 diagnostic: Diagnostic {
6331 severity: DiagnosticSeverity::WARNING,
6332 message: "undefined variable 'A'".to_string(),
6333 is_disk_based: true,
6334 group_id: 1,
6335 is_primary: true,
6336 ..Default::default()
6337 }
6338 },
6339 DiagnosticEntry {
6340 range: Point::new(3, 9)..Point::new(3, 14),
6341 diagnostic: Diagnostic {
6342 severity: DiagnosticSeverity::ERROR,
6343 message: "undefined variable 'BB'".to_string(),
6344 is_disk_based: true,
6345 group_id: 0,
6346 is_primary: true,
6347 ..Default::default()
6348 },
6349 }
6350 ]
6351 );
6352 });
6353 }
6354
6355 #[gpui::test]
6356 async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
6357 cx.foreground().forbid_parking();
6358
6359 let text = concat!(
6360 "let one = ;\n", //
6361 "let two = \n",
6362 "let three = 3;\n",
6363 );
6364
6365 let fs = FakeFs::new(cx.background());
6366 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
6367
6368 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
6369 let buffer = project
6370 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
6371 .await
6372 .unwrap();
6373
6374 project.update(cx, |project, cx| {
6375 project
6376 .update_buffer_diagnostics(
6377 &buffer,
6378 vec![
6379 DiagnosticEntry {
6380 range: PointUtf16::new(0, 10)..PointUtf16::new(0, 10),
6381 diagnostic: Diagnostic {
6382 severity: DiagnosticSeverity::ERROR,
6383 message: "syntax error 1".to_string(),
6384 ..Default::default()
6385 },
6386 },
6387 DiagnosticEntry {
6388 range: PointUtf16::new(1, 10)..PointUtf16::new(1, 10),
6389 diagnostic: Diagnostic {
6390 severity: DiagnosticSeverity::ERROR,
6391 message: "syntax error 2".to_string(),
6392 ..Default::default()
6393 },
6394 },
6395 ],
6396 None,
6397 cx,
6398 )
6399 .unwrap();
6400 });
6401
6402 // An empty range is extended forward to include the following character.
6403 // At the end of a line, an empty range is extended backward to include
6404 // the preceding character.
6405 buffer.read_with(cx, |buffer, _| {
6406 let chunks = chunks_with_diagnostics(&buffer, 0..buffer.len());
6407 assert_eq!(
6408 chunks
6409 .iter()
6410 .map(|(s, d)| (s.as_str(), *d))
6411 .collect::<Vec<_>>(),
6412 &[
6413 ("let one = ", None),
6414 (";", Some(DiagnosticSeverity::ERROR)),
6415 ("\nlet two =", None),
6416 (" ", Some(DiagnosticSeverity::ERROR)),
6417 ("\nlet three = 3;\n", None)
6418 ]
6419 );
6420 });
6421 }
6422
6423 #[gpui::test]
6424 async fn test_edits_from_lsp_with_past_version(cx: &mut gpui::TestAppContext) {
6425 cx.foreground().forbid_parking();
6426
6427 let mut language = Language::new(
6428 LanguageConfig {
6429 name: "Rust".into(),
6430 path_suffixes: vec!["rs".to_string()],
6431 ..Default::default()
6432 },
6433 Some(tree_sitter_rust::language()),
6434 );
6435 let mut fake_servers = language.set_fake_lsp_adapter(Default::default());
6436
6437 let text = "
6438 fn a() {
6439 f1();
6440 }
6441 fn b() {
6442 f2();
6443 }
6444 fn c() {
6445 f3();
6446 }
6447 "
6448 .unindent();
6449
6450 let fs = FakeFs::new(cx.background());
6451 fs.insert_tree(
6452 "/dir",
6453 json!({
6454 "a.rs": text.clone(),
6455 }),
6456 )
6457 .await;
6458
6459 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
6460 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
6461 let buffer = project
6462 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
6463 .await
6464 .unwrap();
6465
6466 let mut fake_server = fake_servers.next().await.unwrap();
6467 let lsp_document_version = fake_server
6468 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6469 .await
6470 .text_document
6471 .version;
6472
6473 // Simulate editing the buffer after the language server computes some edits.
6474 buffer.update(cx, |buffer, cx| {
6475 buffer.edit(
6476 [(
6477 Point::new(0, 0)..Point::new(0, 0),
6478 "// above first function\n",
6479 )],
6480 cx,
6481 );
6482 buffer.edit(
6483 [(
6484 Point::new(2, 0)..Point::new(2, 0),
6485 " // inside first function\n",
6486 )],
6487 cx,
6488 );
6489 buffer.edit(
6490 [(
6491 Point::new(6, 4)..Point::new(6, 4),
6492 "// inside second function ",
6493 )],
6494 cx,
6495 );
6496
6497 assert_eq!(
6498 buffer.text(),
6499 "
6500 // above first function
6501 fn a() {
6502 // inside first function
6503 f1();
6504 }
6505 fn b() {
6506 // inside second function f2();
6507 }
6508 fn c() {
6509 f3();
6510 }
6511 "
6512 .unindent()
6513 );
6514 });
6515
6516 let edits = project
6517 .update(cx, |project, cx| {
6518 project.edits_from_lsp(
6519 &buffer,
6520 vec![
6521 // replace body of first function
6522 lsp::TextEdit {
6523 range: lsp::Range::new(
6524 lsp::Position::new(0, 0),
6525 lsp::Position::new(3, 0),
6526 ),
6527 new_text: "
6528 fn a() {
6529 f10();
6530 }
6531 "
6532 .unindent(),
6533 },
6534 // edit inside second function
6535 lsp::TextEdit {
6536 range: lsp::Range::new(
6537 lsp::Position::new(4, 6),
6538 lsp::Position::new(4, 6),
6539 ),
6540 new_text: "00".into(),
6541 },
6542 // edit inside third function via two distinct edits
6543 lsp::TextEdit {
6544 range: lsp::Range::new(
6545 lsp::Position::new(7, 5),
6546 lsp::Position::new(7, 5),
6547 ),
6548 new_text: "4000".into(),
6549 },
6550 lsp::TextEdit {
6551 range: lsp::Range::new(
6552 lsp::Position::new(7, 5),
6553 lsp::Position::new(7, 6),
6554 ),
6555 new_text: "".into(),
6556 },
6557 ],
6558 Some(lsp_document_version),
6559 cx,
6560 )
6561 })
6562 .await
6563 .unwrap();
6564
6565 buffer.update(cx, |buffer, cx| {
6566 for (range, new_text) in edits {
6567 buffer.edit([(range, new_text)], cx);
6568 }
6569 assert_eq!(
6570 buffer.text(),
6571 "
6572 // above first function
6573 fn a() {
6574 // inside first function
6575 f10();
6576 }
6577 fn b() {
6578 // inside second function f200();
6579 }
6580 fn c() {
6581 f4000();
6582 }
6583 "
6584 .unindent()
6585 );
6586 });
6587 }
6588
6589 #[gpui::test]
6590 async fn test_edits_from_lsp_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
6591 cx.foreground().forbid_parking();
6592
6593 let text = "
6594 use a::b;
6595 use a::c;
6596
6597 fn f() {
6598 b();
6599 c();
6600 }
6601 "
6602 .unindent();
6603
6604 let fs = FakeFs::new(cx.background());
6605 fs.insert_tree(
6606 "/dir",
6607 json!({
6608 "a.rs": text.clone(),
6609 }),
6610 )
6611 .await;
6612
6613 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
6614 let buffer = project
6615 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
6616 .await
6617 .unwrap();
6618
6619 // Simulate the language server sending us a small edit in the form of a very large diff.
6620 // Rust-analyzer does this when performing a merge-imports code action.
6621 let edits = project
6622 .update(cx, |project, cx| {
6623 project.edits_from_lsp(
6624 &buffer,
6625 [
6626 // Replace the first use statement without editing the semicolon.
6627 lsp::TextEdit {
6628 range: lsp::Range::new(
6629 lsp::Position::new(0, 4),
6630 lsp::Position::new(0, 8),
6631 ),
6632 new_text: "a::{b, c}".into(),
6633 },
6634 // Reinsert the remainder of the file between the semicolon and the final
6635 // newline of the file.
6636 lsp::TextEdit {
6637 range: lsp::Range::new(
6638 lsp::Position::new(0, 9),
6639 lsp::Position::new(0, 9),
6640 ),
6641 new_text: "\n\n".into(),
6642 },
6643 lsp::TextEdit {
6644 range: lsp::Range::new(
6645 lsp::Position::new(0, 9),
6646 lsp::Position::new(0, 9),
6647 ),
6648 new_text: "
6649 fn f() {
6650 b();
6651 c();
6652 }"
6653 .unindent(),
6654 },
6655 // Delete everything after the first newline of the file.
6656 lsp::TextEdit {
6657 range: lsp::Range::new(
6658 lsp::Position::new(1, 0),
6659 lsp::Position::new(7, 0),
6660 ),
6661 new_text: "".into(),
6662 },
6663 ],
6664 None,
6665 cx,
6666 )
6667 })
6668 .await
6669 .unwrap();
6670
6671 buffer.update(cx, |buffer, cx| {
6672 let edits = edits
6673 .into_iter()
6674 .map(|(range, text)| {
6675 (
6676 range.start.to_point(&buffer)..range.end.to_point(&buffer),
6677 text,
6678 )
6679 })
6680 .collect::<Vec<_>>();
6681
6682 assert_eq!(
6683 edits,
6684 [
6685 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
6686 (Point::new(1, 0)..Point::new(2, 0), "".into())
6687 ]
6688 );
6689
6690 for (range, new_text) in edits {
6691 buffer.edit([(range, new_text)], cx);
6692 }
6693 assert_eq!(
6694 buffer.text(),
6695 "
6696 use a::{b, c};
6697
6698 fn f() {
6699 b();
6700 c();
6701 }
6702 "
6703 .unindent()
6704 );
6705 });
6706 }
6707
6708 fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
6709 buffer: &Buffer,
6710 range: Range<T>,
6711 ) -> Vec<(String, Option<DiagnosticSeverity>)> {
6712 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
6713 for chunk in buffer.snapshot().chunks(range, true) {
6714 if chunks.last().map_or(false, |prev_chunk| {
6715 prev_chunk.1 == chunk.diagnostic_severity
6716 }) {
6717 chunks.last_mut().unwrap().0.push_str(chunk.text);
6718 } else {
6719 chunks.push((chunk.text.to_string(), chunk.diagnostic_severity));
6720 }
6721 }
6722 chunks
6723 }
6724
6725 #[gpui::test]
6726 async fn test_search_worktree_without_files(cx: &mut gpui::TestAppContext) {
6727 let dir = temp_tree(json!({
6728 "root": {
6729 "dir1": {},
6730 "dir2": {
6731 "dir3": {}
6732 }
6733 }
6734 }));
6735
6736 let project = Project::test(Arc::new(RealFs), [dir.path()], cx).await;
6737 let cancel_flag = Default::default();
6738 let results = project
6739 .read_with(cx, |project, cx| {
6740 project.match_paths("dir", false, false, 10, &cancel_flag, cx)
6741 })
6742 .await;
6743
6744 assert!(results.is_empty());
6745 }
6746
6747 #[gpui::test(iterations = 10)]
6748 async fn test_definition(cx: &mut gpui::TestAppContext) {
6749 let mut language = Language::new(
6750 LanguageConfig {
6751 name: "Rust".into(),
6752 path_suffixes: vec!["rs".to_string()],
6753 ..Default::default()
6754 },
6755 Some(tree_sitter_rust::language()),
6756 );
6757 let mut fake_servers = language.set_fake_lsp_adapter(Default::default());
6758
6759 let fs = FakeFs::new(cx.background());
6760 fs.insert_tree(
6761 "/dir",
6762 json!({
6763 "a.rs": "const fn a() { A }",
6764 "b.rs": "const y: i32 = crate::a()",
6765 }),
6766 )
6767 .await;
6768
6769 let project = Project::test(fs, ["/dir/b.rs".as_ref()], cx).await;
6770 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
6771
6772 let buffer = project
6773 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
6774 .await
6775 .unwrap();
6776
6777 let fake_server = fake_servers.next().await.unwrap();
6778 fake_server.handle_request::<lsp::request::GotoDefinition, _, _>(|params, _| async move {
6779 let params = params.text_document_position_params;
6780 assert_eq!(
6781 params.text_document.uri.to_file_path().unwrap(),
6782 Path::new("/dir/b.rs"),
6783 );
6784 assert_eq!(params.position, lsp::Position::new(0, 22));
6785
6786 Ok(Some(lsp::GotoDefinitionResponse::Scalar(
6787 lsp::Location::new(
6788 lsp::Url::from_file_path("/dir/a.rs").unwrap(),
6789 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
6790 ),
6791 )))
6792 });
6793
6794 let mut definitions = project
6795 .update(cx, |project, cx| project.definition(&buffer, 22, cx))
6796 .await
6797 .unwrap();
6798
6799 assert_eq!(definitions.len(), 1);
6800 let definition = definitions.pop().unwrap();
6801 cx.update(|cx| {
6802 let target_buffer = definition.buffer.read(cx);
6803 assert_eq!(
6804 target_buffer
6805 .file()
6806 .unwrap()
6807 .as_local()
6808 .unwrap()
6809 .abs_path(cx),
6810 Path::new("/dir/a.rs"),
6811 );
6812 assert_eq!(definition.range.to_offset(target_buffer), 9..10);
6813 assert_eq!(
6814 list_worktrees(&project, cx),
6815 [("/dir/b.rs".as_ref(), true), ("/dir/a.rs".as_ref(), false)]
6816 );
6817
6818 drop(definition);
6819 });
6820 cx.read(|cx| {
6821 assert_eq!(list_worktrees(&project, cx), [("/dir/b.rs".as_ref(), true)]);
6822 });
6823
6824 fn list_worktrees<'a>(
6825 project: &'a ModelHandle<Project>,
6826 cx: &'a AppContext,
6827 ) -> Vec<(&'a Path, bool)> {
6828 project
6829 .read(cx)
6830 .worktrees(cx)
6831 .map(|worktree| {
6832 let worktree = worktree.read(cx);
6833 (
6834 worktree.as_local().unwrap().abs_path().as_ref(),
6835 worktree.is_visible(),
6836 )
6837 })
6838 .collect::<Vec<_>>()
6839 }
6840 }
6841
6842 #[gpui::test]
6843 async fn test_completions_without_edit_ranges(cx: &mut gpui::TestAppContext) {
6844 let mut language = Language::new(
6845 LanguageConfig {
6846 name: "TypeScript".into(),
6847 path_suffixes: vec!["ts".to_string()],
6848 ..Default::default()
6849 },
6850 Some(tree_sitter_typescript::language_typescript()),
6851 );
6852 let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default());
6853
6854 let fs = FakeFs::new(cx.background());
6855 fs.insert_tree(
6856 "/dir",
6857 json!({
6858 "a.ts": "",
6859 }),
6860 )
6861 .await;
6862
6863 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
6864 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
6865 let buffer = project
6866 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
6867 .await
6868 .unwrap();
6869
6870 let fake_server = fake_language_servers.next().await.unwrap();
6871
6872 let text = "let a = b.fqn";
6873 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
6874 let completions = project.update(cx, |project, cx| {
6875 project.completions(&buffer, text.len(), cx)
6876 });
6877
6878 fake_server
6879 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
6880 Ok(Some(lsp::CompletionResponse::Array(vec![
6881 lsp::CompletionItem {
6882 label: "fullyQualifiedName?".into(),
6883 insert_text: Some("fullyQualifiedName".into()),
6884 ..Default::default()
6885 },
6886 ])))
6887 })
6888 .next()
6889 .await;
6890 let completions = completions.await.unwrap();
6891 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
6892 assert_eq!(completions.len(), 1);
6893 assert_eq!(completions[0].new_text, "fullyQualifiedName");
6894 assert_eq!(
6895 completions[0].old_range.to_offset(&snapshot),
6896 text.len() - 3..text.len()
6897 );
6898 }
6899
6900 #[gpui::test(iterations = 10)]
6901 async fn test_apply_code_actions_with_commands(cx: &mut gpui::TestAppContext) {
6902 let mut language = Language::new(
6903 LanguageConfig {
6904 name: "TypeScript".into(),
6905 path_suffixes: vec!["ts".to_string()],
6906 ..Default::default()
6907 },
6908 None,
6909 );
6910 let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default());
6911
6912 let fs = FakeFs::new(cx.background());
6913 fs.insert_tree(
6914 "/dir",
6915 json!({
6916 "a.ts": "a",
6917 }),
6918 )
6919 .await;
6920
6921 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
6922 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
6923 let buffer = project
6924 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
6925 .await
6926 .unwrap();
6927
6928 let fake_server = fake_language_servers.next().await.unwrap();
6929
6930 // Language server returns code actions that contain commands, and not edits.
6931 let actions = project.update(cx, |project, cx| project.code_actions(&buffer, 0..0, cx));
6932 fake_server
6933 .handle_request::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
6934 Ok(Some(vec![
6935 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
6936 title: "The code action".into(),
6937 command: Some(lsp::Command {
6938 title: "The command".into(),
6939 command: "_the/command".into(),
6940 arguments: Some(vec![json!("the-argument")]),
6941 }),
6942 ..Default::default()
6943 }),
6944 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
6945 title: "two".into(),
6946 ..Default::default()
6947 }),
6948 ]))
6949 })
6950 .next()
6951 .await;
6952
6953 let action = actions.await.unwrap()[0].clone();
6954 let apply = project.update(cx, |project, cx| {
6955 project.apply_code_action(buffer.clone(), action, true, cx)
6956 });
6957
6958 // Resolving the code action does not populate its edits. In absence of
6959 // edits, we must execute the given command.
6960 fake_server.handle_request::<lsp::request::CodeActionResolveRequest, _, _>(
6961 |action, _| async move { Ok(action) },
6962 );
6963
6964 // While executing the command, the language server sends the editor
6965 // a `workspaceEdit` request.
6966 fake_server
6967 .handle_request::<lsp::request::ExecuteCommand, _, _>({
6968 let fake = fake_server.clone();
6969 move |params, _| {
6970 assert_eq!(params.command, "_the/command");
6971 let fake = fake.clone();
6972 async move {
6973 fake.server
6974 .request::<lsp::request::ApplyWorkspaceEdit>(
6975 lsp::ApplyWorkspaceEditParams {
6976 label: None,
6977 edit: lsp::WorkspaceEdit {
6978 changes: Some(
6979 [(
6980 lsp::Url::from_file_path("/dir/a.ts").unwrap(),
6981 vec![lsp::TextEdit {
6982 range: lsp::Range::new(
6983 lsp::Position::new(0, 0),
6984 lsp::Position::new(0, 0),
6985 ),
6986 new_text: "X".into(),
6987 }],
6988 )]
6989 .into_iter()
6990 .collect(),
6991 ),
6992 ..Default::default()
6993 },
6994 },
6995 )
6996 .await
6997 .unwrap();
6998 Ok(Some(json!(null)))
6999 }
7000 }
7001 })
7002 .next()
7003 .await;
7004
7005 // Applying the code action returns a project transaction containing the edits
7006 // sent by the language server in its `workspaceEdit` request.
7007 let transaction = apply.await.unwrap();
7008 assert!(transaction.0.contains_key(&buffer));
7009 buffer.update(cx, |buffer, cx| {
7010 assert_eq!(buffer.text(), "Xa");
7011 buffer.undo(cx);
7012 assert_eq!(buffer.text(), "a");
7013 });
7014 }
7015
7016 #[gpui::test]
7017 async fn test_save_file(cx: &mut gpui::TestAppContext) {
7018 let fs = FakeFs::new(cx.background());
7019 fs.insert_tree(
7020 "/dir",
7021 json!({
7022 "file1": "the old contents",
7023 }),
7024 )
7025 .await;
7026
7027 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7028 let buffer = project
7029 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
7030 .await
7031 .unwrap();
7032 buffer
7033 .update(cx, |buffer, cx| {
7034 assert_eq!(buffer.text(), "the old contents");
7035 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], cx);
7036 buffer.save(cx)
7037 })
7038 .await
7039 .unwrap();
7040
7041 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
7042 assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
7043 }
7044
7045 #[gpui::test]
7046 async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
7047 let fs = FakeFs::new(cx.background());
7048 fs.insert_tree(
7049 "/dir",
7050 json!({
7051 "file1": "the old contents",
7052 }),
7053 )
7054 .await;
7055
7056 let project = Project::test(fs.clone(), ["/dir/file1".as_ref()], cx).await;
7057 let buffer = project
7058 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
7059 .await
7060 .unwrap();
7061 buffer
7062 .update(cx, |buffer, cx| {
7063 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], cx);
7064 buffer.save(cx)
7065 })
7066 .await
7067 .unwrap();
7068
7069 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
7070 assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
7071 }
7072
7073 #[gpui::test]
7074 async fn test_save_as(cx: &mut gpui::TestAppContext) {
7075 let fs = FakeFs::new(cx.background());
7076 fs.insert_tree("/dir", json!({})).await;
7077
7078 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7079 let buffer = project.update(cx, |project, cx| {
7080 project.create_buffer("", None, cx).unwrap()
7081 });
7082 buffer.update(cx, |buffer, cx| {
7083 buffer.edit([(0..0, "abc")], cx);
7084 assert!(buffer.is_dirty());
7085 assert!(!buffer.has_conflict());
7086 });
7087 project
7088 .update(cx, |project, cx| {
7089 project.save_buffer_as(buffer.clone(), "/dir/file1".into(), cx)
7090 })
7091 .await
7092 .unwrap();
7093 assert_eq!(fs.load(Path::new("/dir/file1")).await.unwrap(), "abc");
7094 buffer.read_with(cx, |buffer, cx| {
7095 assert_eq!(buffer.file().unwrap().full_path(cx), Path::new("dir/file1"));
7096 assert!(!buffer.is_dirty());
7097 assert!(!buffer.has_conflict());
7098 });
7099
7100 let opened_buffer = project
7101 .update(cx, |project, cx| {
7102 project.open_local_buffer("/dir/file1", cx)
7103 })
7104 .await
7105 .unwrap();
7106 assert_eq!(opened_buffer, buffer);
7107 }
7108
7109 #[gpui::test(retries = 5)]
7110 async fn test_rescan_and_remote_updates(cx: &mut gpui::TestAppContext) {
7111 let dir = temp_tree(json!({
7112 "a": {
7113 "file1": "",
7114 "file2": "",
7115 "file3": "",
7116 },
7117 "b": {
7118 "c": {
7119 "file4": "",
7120 "file5": "",
7121 }
7122 }
7123 }));
7124
7125 let project = Project::test(Arc::new(RealFs), [dir.path()], cx).await;
7126 let rpc = project.read_with(cx, |p, _| p.client.clone());
7127
7128 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
7129 let buffer = project.update(cx, |p, cx| p.open_local_buffer(dir.path().join(path), cx));
7130 async move { buffer.await.unwrap() }
7131 };
7132 let id_for_path = |path: &'static str, cx: &gpui::TestAppContext| {
7133 project.read_with(cx, |project, cx| {
7134 let tree = project.worktrees(cx).next().unwrap();
7135 tree.read(cx)
7136 .entry_for_path(path)
7137 .expect(&format!("no entry for path {}", path))
7138 .id
7139 })
7140 };
7141
7142 let buffer2 = buffer_for_path("a/file2", cx).await;
7143 let buffer3 = buffer_for_path("a/file3", cx).await;
7144 let buffer4 = buffer_for_path("b/c/file4", cx).await;
7145 let buffer5 = buffer_for_path("b/c/file5", cx).await;
7146
7147 let file2_id = id_for_path("a/file2", &cx);
7148 let file3_id = id_for_path("a/file3", &cx);
7149 let file4_id = id_for_path("b/c/file4", &cx);
7150
7151 // Create a remote copy of this worktree.
7152 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
7153 let initial_snapshot = tree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot());
7154 let (remote, load_task) = cx.update(|cx| {
7155 Worktree::remote(
7156 1,
7157 1,
7158 initial_snapshot.to_proto(&Default::default(), true),
7159 rpc.clone(),
7160 cx,
7161 )
7162 });
7163 // tree
7164 load_task.await;
7165
7166 cx.read(|cx| {
7167 assert!(!buffer2.read(cx).is_dirty());
7168 assert!(!buffer3.read(cx).is_dirty());
7169 assert!(!buffer4.read(cx).is_dirty());
7170 assert!(!buffer5.read(cx).is_dirty());
7171 });
7172
7173 // Rename and delete files and directories.
7174 tree.flush_fs_events(&cx).await;
7175 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
7176 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
7177 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
7178 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
7179 tree.flush_fs_events(&cx).await;
7180
7181 let expected_paths = vec![
7182 "a",
7183 "a/file1",
7184 "a/file2.new",
7185 "b",
7186 "d",
7187 "d/file3",
7188 "d/file4",
7189 ];
7190
7191 cx.read(|app| {
7192 assert_eq!(
7193 tree.read(app)
7194 .paths()
7195 .map(|p| p.to_str().unwrap())
7196 .collect::<Vec<_>>(),
7197 expected_paths
7198 );
7199
7200 assert_eq!(id_for_path("a/file2.new", &cx), file2_id);
7201 assert_eq!(id_for_path("d/file3", &cx), file3_id);
7202 assert_eq!(id_for_path("d/file4", &cx), file4_id);
7203
7204 assert_eq!(
7205 buffer2.read(app).file().unwrap().path().as_ref(),
7206 Path::new("a/file2.new")
7207 );
7208 assert_eq!(
7209 buffer3.read(app).file().unwrap().path().as_ref(),
7210 Path::new("d/file3")
7211 );
7212 assert_eq!(
7213 buffer4.read(app).file().unwrap().path().as_ref(),
7214 Path::new("d/file4")
7215 );
7216 assert_eq!(
7217 buffer5.read(app).file().unwrap().path().as_ref(),
7218 Path::new("b/c/file5")
7219 );
7220
7221 assert!(!buffer2.read(app).file().unwrap().is_deleted());
7222 assert!(!buffer3.read(app).file().unwrap().is_deleted());
7223 assert!(!buffer4.read(app).file().unwrap().is_deleted());
7224 assert!(buffer5.read(app).file().unwrap().is_deleted());
7225 });
7226
7227 // Update the remote worktree. Check that it becomes consistent with the
7228 // local worktree.
7229 remote.update(cx, |remote, cx| {
7230 let update_message = tree.read(cx).as_local().unwrap().snapshot().build_update(
7231 &initial_snapshot,
7232 1,
7233 1,
7234 true,
7235 );
7236 remote
7237 .as_remote_mut()
7238 .unwrap()
7239 .snapshot
7240 .apply_remote_update(update_message)
7241 .unwrap();
7242
7243 assert_eq!(
7244 remote
7245 .paths()
7246 .map(|p| p.to_str().unwrap())
7247 .collect::<Vec<_>>(),
7248 expected_paths
7249 );
7250 });
7251 }
7252
7253 #[gpui::test]
7254 async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
7255 let fs = FakeFs::new(cx.background());
7256 fs.insert_tree(
7257 "/dir",
7258 json!({
7259 "a.txt": "a-contents",
7260 "b.txt": "b-contents",
7261 }),
7262 )
7263 .await;
7264
7265 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7266
7267 // Spawn multiple tasks to open paths, repeating some paths.
7268 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
7269 (
7270 p.open_local_buffer("/dir/a.txt", cx),
7271 p.open_local_buffer("/dir/b.txt", cx),
7272 p.open_local_buffer("/dir/a.txt", cx),
7273 )
7274 });
7275
7276 let buffer_a_1 = buffer_a_1.await.unwrap();
7277 let buffer_a_2 = buffer_a_2.await.unwrap();
7278 let buffer_b = buffer_b.await.unwrap();
7279 assert_eq!(buffer_a_1.read_with(cx, |b, _| b.text()), "a-contents");
7280 assert_eq!(buffer_b.read_with(cx, |b, _| b.text()), "b-contents");
7281
7282 // There is only one buffer per path.
7283 let buffer_a_id = buffer_a_1.id();
7284 assert_eq!(buffer_a_2.id(), buffer_a_id);
7285
7286 // Open the same path again while it is still open.
7287 drop(buffer_a_1);
7288 let buffer_a_3 = project
7289 .update(cx, |p, cx| p.open_local_buffer("/dir/a.txt", cx))
7290 .await
7291 .unwrap();
7292
7293 // There's still only one buffer per path.
7294 assert_eq!(buffer_a_3.id(), buffer_a_id);
7295 }
7296
7297 #[gpui::test]
7298 async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
7299 let fs = FakeFs::new(cx.background());
7300 fs.insert_tree(
7301 "/dir",
7302 json!({
7303 "file1": "abc",
7304 "file2": "def",
7305 "file3": "ghi",
7306 }),
7307 )
7308 .await;
7309
7310 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7311
7312 let buffer1 = project
7313 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
7314 .await
7315 .unwrap();
7316 let events = Rc::new(RefCell::new(Vec::new()));
7317
7318 // initially, the buffer isn't dirty.
7319 buffer1.update(cx, |buffer, cx| {
7320 cx.subscribe(&buffer1, {
7321 let events = events.clone();
7322 move |_, _, event, _| match event {
7323 BufferEvent::Operation(_) => {}
7324 _ => events.borrow_mut().push(event.clone()),
7325 }
7326 })
7327 .detach();
7328
7329 assert!(!buffer.is_dirty());
7330 assert!(events.borrow().is_empty());
7331
7332 buffer.edit([(1..2, "")], cx);
7333 });
7334
7335 // after the first edit, the buffer is dirty, and emits a dirtied event.
7336 buffer1.update(cx, |buffer, cx| {
7337 assert!(buffer.text() == "ac");
7338 assert!(buffer.is_dirty());
7339 assert_eq!(
7340 *events.borrow(),
7341 &[language::Event::Edited, language::Event::Dirtied]
7342 );
7343 events.borrow_mut().clear();
7344 buffer.did_save(buffer.version(), buffer.file().unwrap().mtime(), None, cx);
7345 });
7346
7347 // after saving, the buffer is not dirty, and emits a saved event.
7348 buffer1.update(cx, |buffer, cx| {
7349 assert!(!buffer.is_dirty());
7350 assert_eq!(*events.borrow(), &[language::Event::Saved]);
7351 events.borrow_mut().clear();
7352
7353 buffer.edit([(1..1, "B")], cx);
7354 buffer.edit([(2..2, "D")], cx);
7355 });
7356
7357 // after editing again, the buffer is dirty, and emits another dirty event.
7358 buffer1.update(cx, |buffer, cx| {
7359 assert!(buffer.text() == "aBDc");
7360 assert!(buffer.is_dirty());
7361 assert_eq!(
7362 *events.borrow(),
7363 &[
7364 language::Event::Edited,
7365 language::Event::Dirtied,
7366 language::Event::Edited,
7367 ],
7368 );
7369 events.borrow_mut().clear();
7370
7371 // TODO - currently, after restoring the buffer to its
7372 // previously-saved state, the is still considered dirty.
7373 buffer.edit([(1..3, "")], cx);
7374 assert!(buffer.text() == "ac");
7375 assert!(buffer.is_dirty());
7376 });
7377
7378 assert_eq!(*events.borrow(), &[language::Event::Edited]);
7379
7380 // When a file is deleted, the buffer is considered dirty.
7381 let events = Rc::new(RefCell::new(Vec::new()));
7382 let buffer2 = project
7383 .update(cx, |p, cx| p.open_local_buffer("/dir/file2", cx))
7384 .await
7385 .unwrap();
7386 buffer2.update(cx, |_, cx| {
7387 cx.subscribe(&buffer2, {
7388 let events = events.clone();
7389 move |_, _, event, _| events.borrow_mut().push(event.clone())
7390 })
7391 .detach();
7392 });
7393
7394 fs.remove_file("/dir/file2".as_ref(), Default::default())
7395 .await
7396 .unwrap();
7397 buffer2.condition(&cx, |b, _| b.is_dirty()).await;
7398 assert_eq!(
7399 *events.borrow(),
7400 &[language::Event::Dirtied, language::Event::FileHandleChanged]
7401 );
7402
7403 // When a file is already dirty when deleted, we don't emit a Dirtied event.
7404 let events = Rc::new(RefCell::new(Vec::new()));
7405 let buffer3 = project
7406 .update(cx, |p, cx| p.open_local_buffer("/dir/file3", cx))
7407 .await
7408 .unwrap();
7409 buffer3.update(cx, |_, cx| {
7410 cx.subscribe(&buffer3, {
7411 let events = events.clone();
7412 move |_, _, event, _| events.borrow_mut().push(event.clone())
7413 })
7414 .detach();
7415 });
7416
7417 buffer3.update(cx, |buffer, cx| {
7418 buffer.edit([(0..0, "x")], cx);
7419 });
7420 events.borrow_mut().clear();
7421 fs.remove_file("/dir/file3".as_ref(), Default::default())
7422 .await
7423 .unwrap();
7424 buffer3
7425 .condition(&cx, |_, _| !events.borrow().is_empty())
7426 .await;
7427 assert_eq!(*events.borrow(), &[language::Event::FileHandleChanged]);
7428 cx.read(|cx| assert!(buffer3.read(cx).is_dirty()));
7429 }
7430
7431 #[gpui::test]
7432 async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
7433 let initial_contents = "aaa\nbbbbb\nc\n";
7434 let fs = FakeFs::new(cx.background());
7435 fs.insert_tree(
7436 "/dir",
7437 json!({
7438 "the-file": initial_contents,
7439 }),
7440 )
7441 .await;
7442 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7443 let buffer = project
7444 .update(cx, |p, cx| p.open_local_buffer("/dir/the-file", cx))
7445 .await
7446 .unwrap();
7447
7448 let anchors = (0..3)
7449 .map(|row| buffer.read_with(cx, |b, _| b.anchor_before(Point::new(row, 1))))
7450 .collect::<Vec<_>>();
7451
7452 // Change the file on disk, adding two new lines of text, and removing
7453 // one line.
7454 buffer.read_with(cx, |buffer, _| {
7455 assert!(!buffer.is_dirty());
7456 assert!(!buffer.has_conflict());
7457 });
7458 let new_contents = "AAAA\naaa\nBB\nbbbbb\n";
7459 fs.save("/dir/the-file".as_ref(), &new_contents.into())
7460 .await
7461 .unwrap();
7462
7463 // Because the buffer was not modified, it is reloaded from disk. Its
7464 // contents are edited according to the diff between the old and new
7465 // file contents.
7466 buffer
7467 .condition(&cx, |buffer, _| buffer.text() == new_contents)
7468 .await;
7469
7470 buffer.update(cx, |buffer, _| {
7471 assert_eq!(buffer.text(), new_contents);
7472 assert!(!buffer.is_dirty());
7473 assert!(!buffer.has_conflict());
7474
7475 let anchor_positions = anchors
7476 .iter()
7477 .map(|anchor| anchor.to_point(&*buffer))
7478 .collect::<Vec<_>>();
7479 assert_eq!(
7480 anchor_positions,
7481 [Point::new(1, 1), Point::new(3, 1), Point::new(4, 0)]
7482 );
7483 });
7484
7485 // Modify the buffer
7486 buffer.update(cx, |buffer, cx| {
7487 buffer.edit([(0..0, " ")], cx);
7488 assert!(buffer.is_dirty());
7489 assert!(!buffer.has_conflict());
7490 });
7491
7492 // Change the file on disk again, adding blank lines to the beginning.
7493 fs.save(
7494 "/dir/the-file".as_ref(),
7495 &"\n\n\nAAAA\naaa\nBB\nbbbbb\n".into(),
7496 )
7497 .await
7498 .unwrap();
7499
7500 // Because the buffer is modified, it doesn't reload from disk, but is
7501 // marked as having a conflict.
7502 buffer
7503 .condition(&cx, |buffer, _| buffer.has_conflict())
7504 .await;
7505 }
7506
7507 #[gpui::test]
7508 async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
7509 cx.foreground().forbid_parking();
7510
7511 let fs = FakeFs::new(cx.background());
7512 fs.insert_tree(
7513 "/the-dir",
7514 json!({
7515 "a.rs": "
7516 fn foo(mut v: Vec<usize>) {
7517 for x in &v {
7518 v.push(1);
7519 }
7520 }
7521 "
7522 .unindent(),
7523 }),
7524 )
7525 .await;
7526
7527 let project = Project::test(fs.clone(), ["/the-dir".as_ref()], cx).await;
7528 let buffer = project
7529 .update(cx, |p, cx| p.open_local_buffer("/the-dir/a.rs", cx))
7530 .await
7531 .unwrap();
7532
7533 let buffer_uri = Url::from_file_path("/the-dir/a.rs").unwrap();
7534 let message = lsp::PublishDiagnosticsParams {
7535 uri: buffer_uri.clone(),
7536 diagnostics: vec![
7537 lsp::Diagnostic {
7538 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
7539 severity: Some(DiagnosticSeverity::WARNING),
7540 message: "error 1".to_string(),
7541 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
7542 location: lsp::Location {
7543 uri: buffer_uri.clone(),
7544 range: lsp::Range::new(
7545 lsp::Position::new(1, 8),
7546 lsp::Position::new(1, 9),
7547 ),
7548 },
7549 message: "error 1 hint 1".to_string(),
7550 }]),
7551 ..Default::default()
7552 },
7553 lsp::Diagnostic {
7554 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
7555 severity: Some(DiagnosticSeverity::HINT),
7556 message: "error 1 hint 1".to_string(),
7557 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
7558 location: lsp::Location {
7559 uri: buffer_uri.clone(),
7560 range: lsp::Range::new(
7561 lsp::Position::new(1, 8),
7562 lsp::Position::new(1, 9),
7563 ),
7564 },
7565 message: "original diagnostic".to_string(),
7566 }]),
7567 ..Default::default()
7568 },
7569 lsp::Diagnostic {
7570 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
7571 severity: Some(DiagnosticSeverity::ERROR),
7572 message: "error 2".to_string(),
7573 related_information: Some(vec![
7574 lsp::DiagnosticRelatedInformation {
7575 location: lsp::Location {
7576 uri: buffer_uri.clone(),
7577 range: lsp::Range::new(
7578 lsp::Position::new(1, 13),
7579 lsp::Position::new(1, 15),
7580 ),
7581 },
7582 message: "error 2 hint 1".to_string(),
7583 },
7584 lsp::DiagnosticRelatedInformation {
7585 location: lsp::Location {
7586 uri: buffer_uri.clone(),
7587 range: lsp::Range::new(
7588 lsp::Position::new(1, 13),
7589 lsp::Position::new(1, 15),
7590 ),
7591 },
7592 message: "error 2 hint 2".to_string(),
7593 },
7594 ]),
7595 ..Default::default()
7596 },
7597 lsp::Diagnostic {
7598 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
7599 severity: Some(DiagnosticSeverity::HINT),
7600 message: "error 2 hint 1".to_string(),
7601 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
7602 location: lsp::Location {
7603 uri: buffer_uri.clone(),
7604 range: lsp::Range::new(
7605 lsp::Position::new(2, 8),
7606 lsp::Position::new(2, 17),
7607 ),
7608 },
7609 message: "original diagnostic".to_string(),
7610 }]),
7611 ..Default::default()
7612 },
7613 lsp::Diagnostic {
7614 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
7615 severity: Some(DiagnosticSeverity::HINT),
7616 message: "error 2 hint 2".to_string(),
7617 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
7618 location: lsp::Location {
7619 uri: buffer_uri.clone(),
7620 range: lsp::Range::new(
7621 lsp::Position::new(2, 8),
7622 lsp::Position::new(2, 17),
7623 ),
7624 },
7625 message: "original diagnostic".to_string(),
7626 }]),
7627 ..Default::default()
7628 },
7629 ],
7630 version: None,
7631 };
7632
7633 project
7634 .update(cx, |p, cx| p.update_diagnostics(message, &[], cx))
7635 .unwrap();
7636 let buffer = buffer.read_with(cx, |buffer, _| buffer.snapshot());
7637
7638 assert_eq!(
7639 buffer
7640 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
7641 .collect::<Vec<_>>(),
7642 &[
7643 DiagnosticEntry {
7644 range: Point::new(1, 8)..Point::new(1, 9),
7645 diagnostic: Diagnostic {
7646 severity: DiagnosticSeverity::WARNING,
7647 message: "error 1".to_string(),
7648 group_id: 0,
7649 is_primary: true,
7650 ..Default::default()
7651 }
7652 },
7653 DiagnosticEntry {
7654 range: Point::new(1, 8)..Point::new(1, 9),
7655 diagnostic: Diagnostic {
7656 severity: DiagnosticSeverity::HINT,
7657 message: "error 1 hint 1".to_string(),
7658 group_id: 0,
7659 is_primary: false,
7660 ..Default::default()
7661 }
7662 },
7663 DiagnosticEntry {
7664 range: Point::new(1, 13)..Point::new(1, 15),
7665 diagnostic: Diagnostic {
7666 severity: DiagnosticSeverity::HINT,
7667 message: "error 2 hint 1".to_string(),
7668 group_id: 1,
7669 is_primary: false,
7670 ..Default::default()
7671 }
7672 },
7673 DiagnosticEntry {
7674 range: Point::new(1, 13)..Point::new(1, 15),
7675 diagnostic: Diagnostic {
7676 severity: DiagnosticSeverity::HINT,
7677 message: "error 2 hint 2".to_string(),
7678 group_id: 1,
7679 is_primary: false,
7680 ..Default::default()
7681 }
7682 },
7683 DiagnosticEntry {
7684 range: Point::new(2, 8)..Point::new(2, 17),
7685 diagnostic: Diagnostic {
7686 severity: DiagnosticSeverity::ERROR,
7687 message: "error 2".to_string(),
7688 group_id: 1,
7689 is_primary: true,
7690 ..Default::default()
7691 }
7692 }
7693 ]
7694 );
7695
7696 assert_eq!(
7697 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
7698 &[
7699 DiagnosticEntry {
7700 range: Point::new(1, 8)..Point::new(1, 9),
7701 diagnostic: Diagnostic {
7702 severity: DiagnosticSeverity::WARNING,
7703 message: "error 1".to_string(),
7704 group_id: 0,
7705 is_primary: true,
7706 ..Default::default()
7707 }
7708 },
7709 DiagnosticEntry {
7710 range: Point::new(1, 8)..Point::new(1, 9),
7711 diagnostic: Diagnostic {
7712 severity: DiagnosticSeverity::HINT,
7713 message: "error 1 hint 1".to_string(),
7714 group_id: 0,
7715 is_primary: false,
7716 ..Default::default()
7717 }
7718 },
7719 ]
7720 );
7721 assert_eq!(
7722 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
7723 &[
7724 DiagnosticEntry {
7725 range: Point::new(1, 13)..Point::new(1, 15),
7726 diagnostic: Diagnostic {
7727 severity: DiagnosticSeverity::HINT,
7728 message: "error 2 hint 1".to_string(),
7729 group_id: 1,
7730 is_primary: false,
7731 ..Default::default()
7732 }
7733 },
7734 DiagnosticEntry {
7735 range: Point::new(1, 13)..Point::new(1, 15),
7736 diagnostic: Diagnostic {
7737 severity: DiagnosticSeverity::HINT,
7738 message: "error 2 hint 2".to_string(),
7739 group_id: 1,
7740 is_primary: false,
7741 ..Default::default()
7742 }
7743 },
7744 DiagnosticEntry {
7745 range: Point::new(2, 8)..Point::new(2, 17),
7746 diagnostic: Diagnostic {
7747 severity: DiagnosticSeverity::ERROR,
7748 message: "error 2".to_string(),
7749 group_id: 1,
7750 is_primary: true,
7751 ..Default::default()
7752 }
7753 }
7754 ]
7755 );
7756 }
7757
7758 #[gpui::test]
7759 async fn test_rename(cx: &mut gpui::TestAppContext) {
7760 cx.foreground().forbid_parking();
7761
7762 let mut language = Language::new(
7763 LanguageConfig {
7764 name: "Rust".into(),
7765 path_suffixes: vec!["rs".to_string()],
7766 ..Default::default()
7767 },
7768 Some(tree_sitter_rust::language()),
7769 );
7770 let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
7771 capabilities: lsp::ServerCapabilities {
7772 rename_provider: Some(lsp::OneOf::Right(lsp::RenameOptions {
7773 prepare_provider: Some(true),
7774 work_done_progress_options: Default::default(),
7775 })),
7776 ..Default::default()
7777 },
7778 ..Default::default()
7779 });
7780
7781 let fs = FakeFs::new(cx.background());
7782 fs.insert_tree(
7783 "/dir",
7784 json!({
7785 "one.rs": "const ONE: usize = 1;",
7786 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
7787 }),
7788 )
7789 .await;
7790
7791 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7792 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
7793 let buffer = project
7794 .update(cx, |project, cx| {
7795 project.open_local_buffer("/dir/one.rs", cx)
7796 })
7797 .await
7798 .unwrap();
7799
7800 let fake_server = fake_servers.next().await.unwrap();
7801
7802 let response = project.update(cx, |project, cx| {
7803 project.prepare_rename(buffer.clone(), 7, cx)
7804 });
7805 fake_server
7806 .handle_request::<lsp::request::PrepareRenameRequest, _, _>(|params, _| async move {
7807 assert_eq!(params.text_document.uri.as_str(), "file:///dir/one.rs");
7808 assert_eq!(params.position, lsp::Position::new(0, 7));
7809 Ok(Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
7810 lsp::Position::new(0, 6),
7811 lsp::Position::new(0, 9),
7812 ))))
7813 })
7814 .next()
7815 .await
7816 .unwrap();
7817 let range = response.await.unwrap().unwrap();
7818 let range = buffer.read_with(cx, |buffer, _| range.to_offset(buffer));
7819 assert_eq!(range, 6..9);
7820
7821 let response = project.update(cx, |project, cx| {
7822 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), true, cx)
7823 });
7824 fake_server
7825 .handle_request::<lsp::request::Rename, _, _>(|params, _| async move {
7826 assert_eq!(
7827 params.text_document_position.text_document.uri.as_str(),
7828 "file:///dir/one.rs"
7829 );
7830 assert_eq!(
7831 params.text_document_position.position,
7832 lsp::Position::new(0, 7)
7833 );
7834 assert_eq!(params.new_name, "THREE");
7835 Ok(Some(lsp::WorkspaceEdit {
7836 changes: Some(
7837 [
7838 (
7839 lsp::Url::from_file_path("/dir/one.rs").unwrap(),
7840 vec![lsp::TextEdit::new(
7841 lsp::Range::new(
7842 lsp::Position::new(0, 6),
7843 lsp::Position::new(0, 9),
7844 ),
7845 "THREE".to_string(),
7846 )],
7847 ),
7848 (
7849 lsp::Url::from_file_path("/dir/two.rs").unwrap(),
7850 vec![
7851 lsp::TextEdit::new(
7852 lsp::Range::new(
7853 lsp::Position::new(0, 24),
7854 lsp::Position::new(0, 27),
7855 ),
7856 "THREE".to_string(),
7857 ),
7858 lsp::TextEdit::new(
7859 lsp::Range::new(
7860 lsp::Position::new(0, 35),
7861 lsp::Position::new(0, 38),
7862 ),
7863 "THREE".to_string(),
7864 ),
7865 ],
7866 ),
7867 ]
7868 .into_iter()
7869 .collect(),
7870 ),
7871 ..Default::default()
7872 }))
7873 })
7874 .next()
7875 .await
7876 .unwrap();
7877 let mut transaction = response.await.unwrap().0;
7878 assert_eq!(transaction.len(), 2);
7879 assert_eq!(
7880 transaction
7881 .remove_entry(&buffer)
7882 .unwrap()
7883 .0
7884 .read_with(cx, |buffer, _| buffer.text()),
7885 "const THREE: usize = 1;"
7886 );
7887 assert_eq!(
7888 transaction
7889 .into_keys()
7890 .next()
7891 .unwrap()
7892 .read_with(cx, |buffer, _| buffer.text()),
7893 "const TWO: usize = one::THREE + one::THREE;"
7894 );
7895 }
7896
7897 #[gpui::test]
7898 async fn test_search(cx: &mut gpui::TestAppContext) {
7899 let fs = FakeFs::new(cx.background());
7900 fs.insert_tree(
7901 "/dir",
7902 json!({
7903 "one.rs": "const ONE: usize = 1;",
7904 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
7905 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
7906 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
7907 }),
7908 )
7909 .await;
7910 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7911 assert_eq!(
7912 search(&project, SearchQuery::text("TWO", false, true), cx)
7913 .await
7914 .unwrap(),
7915 HashMap::from_iter([
7916 ("two.rs".to_string(), vec![6..9]),
7917 ("three.rs".to_string(), vec![37..40])
7918 ])
7919 );
7920
7921 let buffer_4 = project
7922 .update(cx, |project, cx| {
7923 project.open_local_buffer("/dir/four.rs", cx)
7924 })
7925 .await
7926 .unwrap();
7927 buffer_4.update(cx, |buffer, cx| {
7928 let text = "two::TWO";
7929 buffer.edit([(20..28, text), (31..43, text)], cx);
7930 });
7931
7932 assert_eq!(
7933 search(&project, SearchQuery::text("TWO", false, true), cx)
7934 .await
7935 .unwrap(),
7936 HashMap::from_iter([
7937 ("two.rs".to_string(), vec![6..9]),
7938 ("three.rs".to_string(), vec![37..40]),
7939 ("four.rs".to_string(), vec![25..28, 36..39])
7940 ])
7941 );
7942
7943 async fn search(
7944 project: &ModelHandle<Project>,
7945 query: SearchQuery,
7946 cx: &mut gpui::TestAppContext,
7947 ) -> Result<HashMap<String, Vec<Range<usize>>>> {
7948 let results = project
7949 .update(cx, |project, cx| project.search(query, cx))
7950 .await?;
7951
7952 Ok(results
7953 .into_iter()
7954 .map(|(buffer, ranges)| {
7955 buffer.read_with(cx, |buffer, _| {
7956 let path = buffer.file().unwrap().path().to_string_lossy().to_string();
7957 let ranges = ranges
7958 .into_iter()
7959 .map(|range| range.to_offset(buffer))
7960 .collect::<Vec<_>>();
7961 (path, ranges)
7962 })
7963 })
7964 .collect())
7965 }
7966 }
7967}