1pub mod fs;
2mod ignore;
3mod lsp_command;
4pub mod search;
5pub mod worktree;
6
7use anyhow::{anyhow, Context, Result};
8use client::{proto, Client, PeerId, TypedEnvelope, User, UserStore};
9use clock::ReplicaId;
10use collections::{hash_map, BTreeMap, HashMap, HashSet};
11use futures::{future::Shared, Future, FutureExt, StreamExt, TryFutureExt};
12use fuzzy::{PathMatch, PathMatchCandidate, PathMatchCandidateSet};
13use gpui::{
14 AnyModelHandle, AppContext, AsyncAppContext, Entity, ModelContext, ModelHandle,
15 MutableAppContext, Task, UpgradeModelHandle, WeakModelHandle,
16};
17use language::{
18 point_to_lsp,
19 proto::{deserialize_anchor, deserialize_version, serialize_anchor, serialize_version},
20 range_from_lsp, range_to_lsp, Anchor, Bias, Buffer, CodeAction, CodeLabel, Completion,
21 Diagnostic, DiagnosticEntry, DiagnosticSet, Event as BufferEvent, File as _, Language,
22 LanguageRegistry, LanguageServerName, LocalFile, LspAdapter, OffsetRangeExt, Operation, Patch,
23 PointUtf16, TextBufferSnapshot, ToOffset, ToPointUtf16, Transaction,
24};
25use lsp::{DiagnosticSeverity, DiagnosticTag, DocumentHighlightKind, LanguageServer};
26use lsp_command::*;
27use parking_lot::Mutex;
28use postage::watch;
29use rand::prelude::*;
30use search::SearchQuery;
31use serde::Serialize;
32use settings::Settings;
33use sha2::{Digest, Sha256};
34use similar::{ChangeTag, TextDiff};
35use std::{
36 cell::RefCell,
37 cmp::{self, Ordering},
38 convert::TryInto,
39 ffi::OsString,
40 hash::Hash,
41 mem,
42 ops::Range,
43 os::unix::{ffi::OsStrExt, prelude::OsStringExt},
44 path::{Component, Path, PathBuf},
45 rc::Rc,
46 sync::{
47 atomic::{AtomicBool, AtomicUsize, Ordering::SeqCst},
48 Arc,
49 },
50 time::Instant,
51};
52use thiserror::Error;
53use util::{post_inc, ResultExt, TryFutureExt as _};
54
55pub use fs::*;
56pub use worktree::*;
57
58pub trait Item: Entity {
59 fn entry_id(&self, cx: &AppContext) -> Option<ProjectEntryId>;
60}
61
62pub struct Project {
63 worktrees: Vec<WorktreeHandle>,
64 active_entry: Option<ProjectEntryId>,
65 languages: Arc<LanguageRegistry>,
66 language_servers:
67 HashMap<(WorktreeId, LanguageServerName), (Arc<dyn LspAdapter>, Arc<LanguageServer>)>,
68 started_language_servers:
69 HashMap<(WorktreeId, LanguageServerName), Task<Option<Arc<LanguageServer>>>>,
70 language_server_statuses: BTreeMap<usize, LanguageServerStatus>,
71 language_server_settings: Arc<Mutex<serde_json::Value>>,
72 last_workspace_edits_by_language_server: HashMap<usize, ProjectTransaction>,
73 next_language_server_id: usize,
74 client: Arc<client::Client>,
75 next_entry_id: Arc<AtomicUsize>,
76 user_store: ModelHandle<UserStore>,
77 fs: Arc<dyn Fs>,
78 client_state: ProjectClientState,
79 collaborators: HashMap<PeerId, Collaborator>,
80 subscriptions: Vec<client::Subscription>,
81 opened_buffer: (Rc<RefCell<watch::Sender<()>>>, watch::Receiver<()>),
82 shared_buffers: HashMap<PeerId, HashSet<u64>>,
83 loading_buffers: HashMap<
84 ProjectPath,
85 postage::watch::Receiver<Option<Result<ModelHandle<Buffer>, Arc<anyhow::Error>>>>,
86 >,
87 loading_local_worktrees:
88 HashMap<Arc<Path>, Shared<Task<Result<ModelHandle<Worktree>, Arc<anyhow::Error>>>>>,
89 opened_buffers: HashMap<u64, OpenBuffer>,
90 buffer_snapshots: HashMap<u64, Vec<(i32, TextBufferSnapshot)>>,
91 nonce: u128,
92}
93
94#[derive(Error, Debug)]
95pub enum JoinProjectError {
96 #[error("host declined join request")]
97 HostDeclined,
98 #[error("host closed the project")]
99 HostClosedProject,
100 #[error("host went offline")]
101 HostWentOffline,
102 #[error("{0}")]
103 Other(#[from] anyhow::Error),
104}
105
106enum OpenBuffer {
107 Strong(ModelHandle<Buffer>),
108 Weak(WeakModelHandle<Buffer>),
109 Loading(Vec<Operation>),
110}
111
112enum WorktreeHandle {
113 Strong(ModelHandle<Worktree>),
114 Weak(WeakModelHandle<Worktree>),
115}
116
117enum ProjectClientState {
118 Local {
119 is_shared: bool,
120 remote_id_tx: watch::Sender<Option<u64>>,
121 remote_id_rx: watch::Receiver<Option<u64>>,
122 _maintain_remote_id_task: Task<Option<()>>,
123 },
124 Remote {
125 sharing_has_stopped: bool,
126 remote_id: u64,
127 replica_id: ReplicaId,
128 _detect_unshare_task: Task<Option<()>>,
129 },
130}
131
132#[derive(Clone, Debug)]
133pub struct Collaborator {
134 pub user: Arc<User>,
135 pub peer_id: PeerId,
136 pub replica_id: ReplicaId,
137}
138
139#[derive(Clone, Debug, PartialEq, Eq)]
140pub enum Event {
141 ActiveEntryChanged(Option<ProjectEntryId>),
142 WorktreeRemoved(WorktreeId),
143 DiskBasedDiagnosticsStarted,
144 DiskBasedDiagnosticsUpdated,
145 DiskBasedDiagnosticsFinished,
146 DiagnosticsUpdated(ProjectPath),
147 RemoteIdChanged(Option<u64>),
148 CollaboratorLeft(PeerId),
149 ContactRequestedJoin(Arc<User>),
150 ContactCancelledJoinRequest(Arc<User>),
151}
152
153#[derive(Serialize)]
154pub struct LanguageServerStatus {
155 pub name: String,
156 pub pending_work: BTreeMap<String, LanguageServerProgress>,
157 pub pending_diagnostic_updates: isize,
158}
159
160#[derive(Clone, Debug, Serialize)]
161pub struct LanguageServerProgress {
162 pub message: Option<String>,
163 pub percentage: Option<usize>,
164 #[serde(skip_serializing)]
165 pub last_update_at: Instant,
166}
167
168#[derive(Clone, Debug, Eq, PartialEq, Hash, PartialOrd, Ord)]
169pub struct ProjectPath {
170 pub worktree_id: WorktreeId,
171 pub path: Arc<Path>,
172}
173
174#[derive(Copy, Clone, Debug, Default, PartialEq, Serialize)]
175pub struct DiagnosticSummary {
176 pub error_count: usize,
177 pub warning_count: usize,
178}
179
180#[derive(Debug)]
181pub struct Location {
182 pub buffer: ModelHandle<Buffer>,
183 pub range: Range<language::Anchor>,
184}
185
186#[derive(Debug)]
187pub struct DocumentHighlight {
188 pub range: Range<language::Anchor>,
189 pub kind: DocumentHighlightKind,
190}
191
192#[derive(Clone, Debug)]
193pub struct Symbol {
194 pub source_worktree_id: WorktreeId,
195 pub worktree_id: WorktreeId,
196 pub language_server_name: LanguageServerName,
197 pub path: PathBuf,
198 pub label: CodeLabel,
199 pub name: String,
200 pub kind: lsp::SymbolKind,
201 pub range: Range<PointUtf16>,
202 pub signature: [u8; 32],
203}
204
205#[derive(Default)]
206pub struct ProjectTransaction(pub HashMap<ModelHandle<Buffer>, language::Transaction>);
207
208impl DiagnosticSummary {
209 fn new<'a, T: 'a>(diagnostics: impl IntoIterator<Item = &'a DiagnosticEntry<T>>) -> Self {
210 let mut this = Self {
211 error_count: 0,
212 warning_count: 0,
213 };
214
215 for entry in diagnostics {
216 if entry.diagnostic.is_primary {
217 match entry.diagnostic.severity {
218 DiagnosticSeverity::ERROR => this.error_count += 1,
219 DiagnosticSeverity::WARNING => this.warning_count += 1,
220 _ => {}
221 }
222 }
223 }
224
225 this
226 }
227
228 pub fn is_empty(&self) -> bool {
229 self.error_count == 0 && self.warning_count == 0
230 }
231
232 pub fn to_proto(&self, path: &Path) -> proto::DiagnosticSummary {
233 proto::DiagnosticSummary {
234 path: path.to_string_lossy().to_string(),
235 error_count: self.error_count as u32,
236 warning_count: self.warning_count as u32,
237 }
238 }
239}
240
241#[derive(Clone, Copy, Debug, Default, Hash, PartialEq, Eq, PartialOrd, Ord)]
242pub struct ProjectEntryId(usize);
243
244impl ProjectEntryId {
245 pub const MAX: Self = Self(usize::MAX);
246
247 pub fn new(counter: &AtomicUsize) -> Self {
248 Self(counter.fetch_add(1, SeqCst))
249 }
250
251 pub fn from_proto(id: u64) -> Self {
252 Self(id as usize)
253 }
254
255 pub fn to_proto(&self) -> u64 {
256 self.0 as u64
257 }
258
259 pub fn to_usize(&self) -> usize {
260 self.0
261 }
262}
263
264impl Project {
265 pub fn init(client: &Arc<Client>) {
266 client.add_model_message_handler(Self::handle_request_join_project);
267 client.add_model_message_handler(Self::handle_add_collaborator);
268 client.add_model_message_handler(Self::handle_buffer_reloaded);
269 client.add_model_message_handler(Self::handle_buffer_saved);
270 client.add_model_message_handler(Self::handle_start_language_server);
271 client.add_model_message_handler(Self::handle_update_language_server);
272 client.add_model_message_handler(Self::handle_remove_collaborator);
273 client.add_model_message_handler(Self::handle_join_project_request_cancelled);
274 client.add_model_message_handler(Self::handle_register_worktree);
275 client.add_model_message_handler(Self::handle_unregister_worktree);
276 client.add_model_message_handler(Self::handle_unregister_project);
277 client.add_model_message_handler(Self::handle_project_unshared);
278 client.add_model_message_handler(Self::handle_update_buffer_file);
279 client.add_model_message_handler(Self::handle_update_buffer);
280 client.add_model_message_handler(Self::handle_update_diagnostic_summary);
281 client.add_model_message_handler(Self::handle_update_worktree);
282 client.add_model_request_handler(Self::handle_create_project_entry);
283 client.add_model_request_handler(Self::handle_rename_project_entry);
284 client.add_model_request_handler(Self::handle_delete_project_entry);
285 client.add_model_request_handler(Self::handle_apply_additional_edits_for_completion);
286 client.add_model_request_handler(Self::handle_apply_code_action);
287 client.add_model_request_handler(Self::handle_reload_buffers);
288 client.add_model_request_handler(Self::handle_format_buffers);
289 client.add_model_request_handler(Self::handle_get_code_actions);
290 client.add_model_request_handler(Self::handle_get_completions);
291 client.add_model_request_handler(Self::handle_lsp_command::<GetDefinition>);
292 client.add_model_request_handler(Self::handle_lsp_command::<GetDocumentHighlights>);
293 client.add_model_request_handler(Self::handle_lsp_command::<GetReferences>);
294 client.add_model_request_handler(Self::handle_lsp_command::<PrepareRename>);
295 client.add_model_request_handler(Self::handle_lsp_command::<PerformRename>);
296 client.add_model_request_handler(Self::handle_search_project);
297 client.add_model_request_handler(Self::handle_get_project_symbols);
298 client.add_model_request_handler(Self::handle_open_buffer_for_symbol);
299 client.add_model_request_handler(Self::handle_open_buffer_by_id);
300 client.add_model_request_handler(Self::handle_open_buffer_by_path);
301 client.add_model_request_handler(Self::handle_save_buffer);
302 }
303
304 pub fn local(
305 client: Arc<Client>,
306 user_store: ModelHandle<UserStore>,
307 languages: Arc<LanguageRegistry>,
308 fs: Arc<dyn Fs>,
309 cx: &mut MutableAppContext,
310 ) -> ModelHandle<Self> {
311 cx.add_model(|cx: &mut ModelContext<Self>| {
312 let (remote_id_tx, remote_id_rx) = watch::channel();
313 let _maintain_remote_id_task = cx.spawn_weak({
314 let rpc = client.clone();
315 move |this, mut cx| {
316 async move {
317 let mut status = rpc.status();
318 while let Some(status) = status.next().await {
319 if let Some(this) = this.upgrade(&cx) {
320 if status.is_connected() {
321 this.update(&mut cx, |this, cx| this.register(cx)).await?;
322 } else {
323 this.update(&mut cx, |this, cx| this.unregister(cx));
324 }
325 }
326 }
327 Ok(())
328 }
329 .log_err()
330 }
331 });
332
333 let (opened_buffer_tx, opened_buffer_rx) = watch::channel();
334 Self {
335 worktrees: Default::default(),
336 collaborators: Default::default(),
337 opened_buffers: Default::default(),
338 shared_buffers: Default::default(),
339 loading_buffers: Default::default(),
340 loading_local_worktrees: Default::default(),
341 buffer_snapshots: Default::default(),
342 client_state: ProjectClientState::Local {
343 is_shared: false,
344 remote_id_tx,
345 remote_id_rx,
346 _maintain_remote_id_task,
347 },
348 opened_buffer: (Rc::new(RefCell::new(opened_buffer_tx)), opened_buffer_rx),
349 subscriptions: Vec::new(),
350 active_entry: None,
351 languages,
352 client,
353 user_store,
354 fs,
355 next_entry_id: Default::default(),
356 language_servers: Default::default(),
357 started_language_servers: Default::default(),
358 language_server_statuses: Default::default(),
359 last_workspace_edits_by_language_server: Default::default(),
360 language_server_settings: Default::default(),
361 next_language_server_id: 0,
362 nonce: StdRng::from_entropy().gen(),
363 }
364 })
365 }
366
367 pub async fn remote(
368 remote_id: u64,
369 client: Arc<Client>,
370 user_store: ModelHandle<UserStore>,
371 languages: Arc<LanguageRegistry>,
372 fs: Arc<dyn Fs>,
373 cx: &mut AsyncAppContext,
374 ) -> Result<ModelHandle<Self>, JoinProjectError> {
375 client.authenticate_and_connect(true, &cx).await?;
376
377 let response = client
378 .request(proto::JoinProject {
379 project_id: remote_id,
380 })
381 .await?;
382
383 let response = match response.variant.ok_or_else(|| anyhow!("missing variant"))? {
384 proto::join_project_response::Variant::Accept(response) => response,
385 proto::join_project_response::Variant::Decline(decline) => {
386 match proto::join_project_response::decline::Reason::from_i32(decline.reason) {
387 Some(proto::join_project_response::decline::Reason::Declined) => {
388 Err(JoinProjectError::HostDeclined)?
389 }
390 Some(proto::join_project_response::decline::Reason::Closed) => {
391 Err(JoinProjectError::HostClosedProject)?
392 }
393 Some(proto::join_project_response::decline::Reason::WentOffline) => {
394 Err(JoinProjectError::HostWentOffline)?
395 }
396 None => Err(anyhow!("missing decline reason"))?,
397 }
398 }
399 };
400
401 let replica_id = response.replica_id as ReplicaId;
402
403 let mut worktrees = Vec::new();
404 for worktree in response.worktrees {
405 let (worktree, load_task) = cx
406 .update(|cx| Worktree::remote(remote_id, replica_id, worktree, client.clone(), cx));
407 worktrees.push(worktree);
408 load_task.detach();
409 }
410
411 let (opened_buffer_tx, opened_buffer_rx) = watch::channel();
412 let this = cx.add_model(|cx: &mut ModelContext<Self>| {
413 let mut this = Self {
414 worktrees: Vec::new(),
415 loading_buffers: Default::default(),
416 opened_buffer: (Rc::new(RefCell::new(opened_buffer_tx)), opened_buffer_rx),
417 shared_buffers: Default::default(),
418 loading_local_worktrees: Default::default(),
419 active_entry: None,
420 collaborators: Default::default(),
421 languages,
422 user_store: user_store.clone(),
423 fs,
424 next_entry_id: Default::default(),
425 subscriptions: vec![client.add_model_for_remote_entity(remote_id, cx)],
426 client: client.clone(),
427 client_state: ProjectClientState::Remote {
428 sharing_has_stopped: false,
429 remote_id,
430 replica_id,
431 _detect_unshare_task: cx.spawn_weak(move |this, mut cx| {
432 async move {
433 let mut status = client.status();
434 let is_connected =
435 status.next().await.map_or(false, |s| s.is_connected());
436 // Even if we're initially connected, any future change of the status means we momentarily disconnected.
437 if !is_connected || status.next().await.is_some() {
438 if let Some(this) = this.upgrade(&cx) {
439 this.update(&mut cx, |this, cx| this.removed_from_project(cx))
440 }
441 }
442 Ok(())
443 }
444 .log_err()
445 }),
446 },
447 language_servers: Default::default(),
448 started_language_servers: Default::default(),
449 language_server_settings: Default::default(),
450 language_server_statuses: response
451 .language_servers
452 .into_iter()
453 .map(|server| {
454 (
455 server.id as usize,
456 LanguageServerStatus {
457 name: server.name,
458 pending_work: Default::default(),
459 pending_diagnostic_updates: 0,
460 },
461 )
462 })
463 .collect(),
464 last_workspace_edits_by_language_server: Default::default(),
465 next_language_server_id: 0,
466 opened_buffers: Default::default(),
467 buffer_snapshots: Default::default(),
468 nonce: StdRng::from_entropy().gen(),
469 };
470 for worktree in worktrees {
471 this.add_worktree(&worktree, cx);
472 }
473 this
474 });
475
476 let user_ids = response
477 .collaborators
478 .iter()
479 .map(|peer| peer.user_id)
480 .collect();
481 user_store
482 .update(cx, |user_store, cx| user_store.get_users(user_ids, cx))
483 .await?;
484 let mut collaborators = HashMap::default();
485 for message in response.collaborators {
486 let collaborator = Collaborator::from_proto(message, &user_store, cx).await?;
487 collaborators.insert(collaborator.peer_id, collaborator);
488 }
489
490 this.update(cx, |this, _| {
491 this.collaborators = collaborators;
492 });
493
494 Ok(this)
495 }
496
497 #[cfg(any(test, feature = "test-support"))]
498 pub async fn test(
499 fs: Arc<dyn Fs>,
500 root_paths: impl IntoIterator<Item = &Path>,
501 cx: &mut gpui::TestAppContext,
502 ) -> ModelHandle<Project> {
503 let languages = Arc::new(LanguageRegistry::test());
504 let http_client = client::test::FakeHttpClient::with_404_response();
505 let client = client::Client::new(http_client.clone());
506 let user_store = cx.add_model(|cx| UserStore::new(client.clone(), http_client, cx));
507 let project = cx.update(|cx| Project::local(client, user_store, languages, fs, cx));
508 for path in root_paths {
509 let (tree, _) = project
510 .update(cx, |project, cx| {
511 project.find_or_create_local_worktree(path, true, cx)
512 })
513 .await
514 .unwrap();
515 tree.read_with(cx, |tree, _| tree.as_local().unwrap().scan_complete())
516 .await;
517 }
518 project
519 }
520
521 pub fn buffer_for_id(&self, remote_id: u64, cx: &AppContext) -> Option<ModelHandle<Buffer>> {
522 self.opened_buffers
523 .get(&remote_id)
524 .and_then(|buffer| buffer.upgrade(cx))
525 }
526
527 pub fn languages(&self) -> &Arc<LanguageRegistry> {
528 &self.languages
529 }
530
531 pub fn client(&self) -> Arc<Client> {
532 self.client.clone()
533 }
534
535 pub fn user_store(&self) -> ModelHandle<UserStore> {
536 self.user_store.clone()
537 }
538
539 #[cfg(any(test, feature = "test-support"))]
540 pub fn check_invariants(&self, cx: &AppContext) {
541 if self.is_local() {
542 let mut worktree_root_paths = HashMap::default();
543 for worktree in self.worktrees(cx) {
544 let worktree = worktree.read(cx);
545 let abs_path = worktree.as_local().unwrap().abs_path().clone();
546 let prev_worktree_id = worktree_root_paths.insert(abs_path.clone(), worktree.id());
547 assert_eq!(
548 prev_worktree_id,
549 None,
550 "abs path {:?} for worktree {:?} is not unique ({:?} was already registered with the same path)",
551 abs_path,
552 worktree.id(),
553 prev_worktree_id
554 )
555 }
556 } else {
557 let replica_id = self.replica_id();
558 for buffer in self.opened_buffers.values() {
559 if let Some(buffer) = buffer.upgrade(cx) {
560 let buffer = buffer.read(cx);
561 assert_eq!(
562 buffer.deferred_ops_len(),
563 0,
564 "replica {}, buffer {} has deferred operations",
565 replica_id,
566 buffer.remote_id()
567 );
568 }
569 }
570 }
571 }
572
573 #[cfg(any(test, feature = "test-support"))]
574 pub fn has_open_buffer(&self, path: impl Into<ProjectPath>, cx: &AppContext) -> bool {
575 let path = path.into();
576 if let Some(worktree) = self.worktree_for_id(path.worktree_id, cx) {
577 self.opened_buffers.iter().any(|(_, buffer)| {
578 if let Some(buffer) = buffer.upgrade(cx) {
579 if let Some(file) = File::from_dyn(buffer.read(cx).file()) {
580 if file.worktree == worktree && file.path() == &path.path {
581 return true;
582 }
583 }
584 }
585 false
586 })
587 } else {
588 false
589 }
590 }
591
592 pub fn fs(&self) -> &Arc<dyn Fs> {
593 &self.fs
594 }
595
596 fn unregister(&mut self, cx: &mut ModelContext<Self>) {
597 self.unshared(cx);
598 for worktree in &self.worktrees {
599 if let Some(worktree) = worktree.upgrade(cx) {
600 worktree.update(cx, |worktree, _| {
601 worktree.as_local_mut().unwrap().unregister();
602 });
603 }
604 }
605
606 if let ProjectClientState::Local { remote_id_tx, .. } = &mut self.client_state {
607 *remote_id_tx.borrow_mut() = None;
608 }
609
610 self.subscriptions.clear();
611 }
612
613 fn register(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
614 self.unregister(cx);
615
616 let response = self.client.request(proto::RegisterProject {});
617 cx.spawn(|this, mut cx| async move {
618 let remote_id = response.await?.project_id;
619
620 let mut registrations = Vec::new();
621 this.update(&mut cx, |this, cx| {
622 if let ProjectClientState::Local { remote_id_tx, .. } = &mut this.client_state {
623 *remote_id_tx.borrow_mut() = Some(remote_id);
624 }
625
626 cx.emit(Event::RemoteIdChanged(Some(remote_id)));
627
628 this.subscriptions
629 .push(this.client.add_model_for_remote_entity(remote_id, cx));
630
631 for worktree in &this.worktrees {
632 if let Some(worktree) = worktree.upgrade(cx) {
633 registrations.push(worktree.update(cx, |worktree, cx| {
634 let worktree = worktree.as_local_mut().unwrap();
635 worktree.register(remote_id, cx)
636 }));
637 }
638 }
639 });
640
641 futures::future::try_join_all(registrations).await?;
642 Ok(())
643 })
644 }
645
646 pub fn remote_id(&self) -> Option<u64> {
647 match &self.client_state {
648 ProjectClientState::Local { remote_id_rx, .. } => *remote_id_rx.borrow(),
649 ProjectClientState::Remote { remote_id, .. } => Some(*remote_id),
650 }
651 }
652
653 pub fn next_remote_id(&self) -> impl Future<Output = u64> {
654 let mut id = None;
655 let mut watch = None;
656 match &self.client_state {
657 ProjectClientState::Local { remote_id_rx, .. } => watch = Some(remote_id_rx.clone()),
658 ProjectClientState::Remote { remote_id, .. } => id = Some(*remote_id),
659 }
660
661 async move {
662 if let Some(id) = id {
663 return id;
664 }
665 let mut watch = watch.unwrap();
666 loop {
667 let id = *watch.borrow();
668 if let Some(id) = id {
669 return id;
670 }
671 watch.next().await;
672 }
673 }
674 }
675
676 pub fn replica_id(&self) -> ReplicaId {
677 match &self.client_state {
678 ProjectClientState::Local { .. } => 0,
679 ProjectClientState::Remote { replica_id, .. } => *replica_id,
680 }
681 }
682
683 pub fn collaborators(&self) -> &HashMap<PeerId, Collaborator> {
684 &self.collaborators
685 }
686
687 pub fn worktrees<'a>(
688 &'a self,
689 cx: &'a AppContext,
690 ) -> impl 'a + Iterator<Item = ModelHandle<Worktree>> {
691 self.worktrees
692 .iter()
693 .filter_map(move |worktree| worktree.upgrade(cx))
694 }
695
696 pub fn visible_worktrees<'a>(
697 &'a self,
698 cx: &'a AppContext,
699 ) -> impl 'a + Iterator<Item = ModelHandle<Worktree>> {
700 self.worktrees.iter().filter_map(|worktree| {
701 worktree.upgrade(cx).and_then(|worktree| {
702 if worktree.read(cx).is_visible() {
703 Some(worktree)
704 } else {
705 None
706 }
707 })
708 })
709 }
710
711 pub fn worktree_for_id(
712 &self,
713 id: WorktreeId,
714 cx: &AppContext,
715 ) -> Option<ModelHandle<Worktree>> {
716 self.worktrees(cx)
717 .find(|worktree| worktree.read(cx).id() == id)
718 }
719
720 pub fn worktree_for_entry(
721 &self,
722 entry_id: ProjectEntryId,
723 cx: &AppContext,
724 ) -> Option<ModelHandle<Worktree>> {
725 self.worktrees(cx)
726 .find(|worktree| worktree.read(cx).contains_entry(entry_id))
727 }
728
729 pub fn worktree_id_for_entry(
730 &self,
731 entry_id: ProjectEntryId,
732 cx: &AppContext,
733 ) -> Option<WorktreeId> {
734 self.worktree_for_entry(entry_id, cx)
735 .map(|worktree| worktree.read(cx).id())
736 }
737
738 pub fn create_entry(
739 &mut self,
740 project_path: impl Into<ProjectPath>,
741 is_directory: bool,
742 cx: &mut ModelContext<Self>,
743 ) -> Option<Task<Result<Entry>>> {
744 let project_path = project_path.into();
745 let worktree = self.worktree_for_id(project_path.worktree_id, cx)?;
746 if self.is_local() {
747 Some(worktree.update(cx, |worktree, cx| {
748 worktree
749 .as_local_mut()
750 .unwrap()
751 .create_entry(project_path.path, is_directory, cx)
752 }))
753 } else {
754 let client = self.client.clone();
755 let project_id = self.remote_id().unwrap();
756 Some(cx.spawn_weak(|_, mut cx| async move {
757 let response = client
758 .request(proto::CreateProjectEntry {
759 worktree_id: project_path.worktree_id.to_proto(),
760 project_id,
761 path: project_path.path.as_os_str().as_bytes().to_vec(),
762 is_directory,
763 })
764 .await?;
765 let entry = response
766 .entry
767 .ok_or_else(|| anyhow!("missing entry in response"))?;
768 worktree
769 .update(&mut cx, |worktree, cx| {
770 worktree.as_remote().unwrap().insert_entry(
771 entry,
772 response.worktree_scan_id as usize,
773 cx,
774 )
775 })
776 .await
777 }))
778 }
779 }
780
781 pub fn rename_entry(
782 &mut self,
783 entry_id: ProjectEntryId,
784 new_path: impl Into<Arc<Path>>,
785 cx: &mut ModelContext<Self>,
786 ) -> Option<Task<Result<Entry>>> {
787 let worktree = self.worktree_for_entry(entry_id, cx)?;
788 let new_path = new_path.into();
789 if self.is_local() {
790 worktree.update(cx, |worktree, cx| {
791 worktree
792 .as_local_mut()
793 .unwrap()
794 .rename_entry(entry_id, new_path, cx)
795 })
796 } else {
797 let client = self.client.clone();
798 let project_id = self.remote_id().unwrap();
799
800 Some(cx.spawn_weak(|_, mut cx| async move {
801 let response = client
802 .request(proto::RenameProjectEntry {
803 project_id,
804 entry_id: entry_id.to_proto(),
805 new_path: new_path.as_os_str().as_bytes().to_vec(),
806 })
807 .await?;
808 let entry = response
809 .entry
810 .ok_or_else(|| anyhow!("missing entry in response"))?;
811 worktree
812 .update(&mut cx, |worktree, cx| {
813 worktree.as_remote().unwrap().insert_entry(
814 entry,
815 response.worktree_scan_id as usize,
816 cx,
817 )
818 })
819 .await
820 }))
821 }
822 }
823
824 pub fn delete_entry(
825 &mut self,
826 entry_id: ProjectEntryId,
827 cx: &mut ModelContext<Self>,
828 ) -> Option<Task<Result<()>>> {
829 let worktree = self.worktree_for_entry(entry_id, cx)?;
830 if self.is_local() {
831 worktree.update(cx, |worktree, cx| {
832 worktree.as_local_mut().unwrap().delete_entry(entry_id, cx)
833 })
834 } else {
835 let client = self.client.clone();
836 let project_id = self.remote_id().unwrap();
837 Some(cx.spawn_weak(|_, mut cx| async move {
838 let response = client
839 .request(proto::DeleteProjectEntry {
840 project_id,
841 entry_id: entry_id.to_proto(),
842 })
843 .await?;
844 worktree
845 .update(&mut cx, move |worktree, cx| {
846 worktree.as_remote().unwrap().delete_entry(
847 entry_id,
848 response.worktree_scan_id as usize,
849 cx,
850 )
851 })
852 .await
853 }))
854 }
855 }
856
857 fn share(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
858 let project_id;
859 if let ProjectClientState::Local {
860 remote_id_rx,
861 is_shared,
862 ..
863 } = &mut self.client_state
864 {
865 if *is_shared {
866 return Task::ready(Ok(()));
867 }
868 *is_shared = true;
869 if let Some(id) = *remote_id_rx.borrow() {
870 project_id = id;
871 } else {
872 return Task::ready(Err(anyhow!("project hasn't been registered")));
873 }
874 } else {
875 return Task::ready(Err(anyhow!("can't share a remote project")));
876 };
877
878 for open_buffer in self.opened_buffers.values_mut() {
879 match open_buffer {
880 OpenBuffer::Strong(_) => {}
881 OpenBuffer::Weak(buffer) => {
882 if let Some(buffer) = buffer.upgrade(cx) {
883 *open_buffer = OpenBuffer::Strong(buffer);
884 }
885 }
886 OpenBuffer::Loading(_) => unreachable!(),
887 }
888 }
889
890 for worktree_handle in self.worktrees.iter_mut() {
891 match worktree_handle {
892 WorktreeHandle::Strong(_) => {}
893 WorktreeHandle::Weak(worktree) => {
894 if let Some(worktree) = worktree.upgrade(cx) {
895 *worktree_handle = WorktreeHandle::Strong(worktree);
896 }
897 }
898 }
899 }
900
901 let mut tasks = Vec::new();
902 for worktree in self.worktrees(cx).collect::<Vec<_>>() {
903 worktree.update(cx, |worktree, cx| {
904 let worktree = worktree.as_local_mut().unwrap();
905 tasks.push(worktree.share(project_id, cx));
906 });
907 }
908
909 cx.spawn(|this, mut cx| async move {
910 for task in tasks {
911 task.await?;
912 }
913 this.update(&mut cx, |_, cx| cx.notify());
914 Ok(())
915 })
916 }
917
918 fn unshared(&mut self, cx: &mut ModelContext<Self>) {
919 if let ProjectClientState::Local { is_shared, .. } = &mut self.client_state {
920 if !*is_shared {
921 return;
922 }
923
924 *is_shared = false;
925 self.collaborators.clear();
926 self.shared_buffers.clear();
927 for worktree_handle in self.worktrees.iter_mut() {
928 if let WorktreeHandle::Strong(worktree) = worktree_handle {
929 let is_visible = worktree.update(cx, |worktree, _| {
930 worktree.as_local_mut().unwrap().unshare();
931 worktree.is_visible()
932 });
933 if !is_visible {
934 *worktree_handle = WorktreeHandle::Weak(worktree.downgrade());
935 }
936 }
937 }
938
939 for open_buffer in self.opened_buffers.values_mut() {
940 match open_buffer {
941 OpenBuffer::Strong(buffer) => {
942 *open_buffer = OpenBuffer::Weak(buffer.downgrade());
943 }
944 _ => {}
945 }
946 }
947
948 cx.notify();
949 } else {
950 log::error!("attempted to unshare a remote project");
951 }
952 }
953
954 pub fn respond_to_join_request(
955 &mut self,
956 requester_id: u64,
957 allow: bool,
958 cx: &mut ModelContext<Self>,
959 ) {
960 if let Some(project_id) = self.remote_id() {
961 let share = self.share(cx);
962 let client = self.client.clone();
963 cx.foreground()
964 .spawn(async move {
965 share.await?;
966 client.send(proto::RespondToJoinProjectRequest {
967 requester_id,
968 project_id,
969 allow,
970 })
971 })
972 .detach_and_log_err(cx);
973 }
974 }
975
976 fn removed_from_project(&mut self, cx: &mut ModelContext<Self>) {
977 if let ProjectClientState::Remote {
978 sharing_has_stopped,
979 ..
980 } = &mut self.client_state
981 {
982 *sharing_has_stopped = true;
983 self.collaborators.clear();
984 cx.notify();
985 }
986 }
987
988 pub fn is_read_only(&self) -> bool {
989 match &self.client_state {
990 ProjectClientState::Local { .. } => false,
991 ProjectClientState::Remote {
992 sharing_has_stopped,
993 ..
994 } => *sharing_has_stopped,
995 }
996 }
997
998 pub fn is_local(&self) -> bool {
999 match &self.client_state {
1000 ProjectClientState::Local { .. } => true,
1001 ProjectClientState::Remote { .. } => false,
1002 }
1003 }
1004
1005 pub fn is_remote(&self) -> bool {
1006 !self.is_local()
1007 }
1008
1009 pub fn create_buffer(
1010 &mut self,
1011 text: &str,
1012 language: Option<Arc<Language>>,
1013 cx: &mut ModelContext<Self>,
1014 ) -> Result<ModelHandle<Buffer>> {
1015 if self.is_remote() {
1016 return Err(anyhow!("creating buffers as a guest is not supported yet"));
1017 }
1018
1019 let buffer = cx.add_model(|cx| {
1020 Buffer::new(self.replica_id(), text, cx)
1021 .with_language(language.unwrap_or(language::PLAIN_TEXT.clone()), cx)
1022 });
1023 self.register_buffer(&buffer, cx)?;
1024 Ok(buffer)
1025 }
1026
1027 pub fn open_path(
1028 &mut self,
1029 path: impl Into<ProjectPath>,
1030 cx: &mut ModelContext<Self>,
1031 ) -> Task<Result<(ProjectEntryId, AnyModelHandle)>> {
1032 let task = self.open_buffer(path, cx);
1033 cx.spawn_weak(|_, cx| async move {
1034 let buffer = task.await?;
1035 let project_entry_id = buffer
1036 .read_with(&cx, |buffer, cx| {
1037 File::from_dyn(buffer.file()).and_then(|file| file.project_entry_id(cx))
1038 })
1039 .ok_or_else(|| anyhow!("no project entry"))?;
1040 Ok((project_entry_id, buffer.into()))
1041 })
1042 }
1043
1044 pub fn open_local_buffer(
1045 &mut self,
1046 abs_path: impl AsRef<Path>,
1047 cx: &mut ModelContext<Self>,
1048 ) -> Task<Result<ModelHandle<Buffer>>> {
1049 if let Some((worktree, relative_path)) = self.find_local_worktree(abs_path.as_ref(), cx) {
1050 self.open_buffer((worktree.read(cx).id(), relative_path), cx)
1051 } else {
1052 Task::ready(Err(anyhow!("no such path")))
1053 }
1054 }
1055
1056 pub fn open_buffer(
1057 &mut self,
1058 path: impl Into<ProjectPath>,
1059 cx: &mut ModelContext<Self>,
1060 ) -> Task<Result<ModelHandle<Buffer>>> {
1061 let project_path = path.into();
1062 let worktree = if let Some(worktree) = self.worktree_for_id(project_path.worktree_id, cx) {
1063 worktree
1064 } else {
1065 return Task::ready(Err(anyhow!("no such worktree")));
1066 };
1067
1068 // If there is already a buffer for the given path, then return it.
1069 let existing_buffer = self.get_open_buffer(&project_path, cx);
1070 if let Some(existing_buffer) = existing_buffer {
1071 return Task::ready(Ok(existing_buffer));
1072 }
1073
1074 let mut loading_watch = match self.loading_buffers.entry(project_path.clone()) {
1075 // If the given path is already being loaded, then wait for that existing
1076 // task to complete and return the same buffer.
1077 hash_map::Entry::Occupied(e) => e.get().clone(),
1078
1079 // Otherwise, record the fact that this path is now being loaded.
1080 hash_map::Entry::Vacant(entry) => {
1081 let (mut tx, rx) = postage::watch::channel();
1082 entry.insert(rx.clone());
1083
1084 let load_buffer = if worktree.read(cx).is_local() {
1085 self.open_local_buffer_internal(&project_path.path, &worktree, cx)
1086 } else {
1087 self.open_remote_buffer_internal(&project_path.path, &worktree, cx)
1088 };
1089
1090 cx.spawn(move |this, mut cx| async move {
1091 let load_result = load_buffer.await;
1092 *tx.borrow_mut() = Some(this.update(&mut cx, |this, _| {
1093 // Record the fact that the buffer is no longer loading.
1094 this.loading_buffers.remove(&project_path);
1095 let buffer = load_result.map_err(Arc::new)?;
1096 Ok(buffer)
1097 }));
1098 })
1099 .detach();
1100 rx
1101 }
1102 };
1103
1104 cx.foreground().spawn(async move {
1105 loop {
1106 if let Some(result) = loading_watch.borrow().as_ref() {
1107 match result {
1108 Ok(buffer) => return Ok(buffer.clone()),
1109 Err(error) => return Err(anyhow!("{}", error)),
1110 }
1111 }
1112 loading_watch.next().await;
1113 }
1114 })
1115 }
1116
1117 fn open_local_buffer_internal(
1118 &mut self,
1119 path: &Arc<Path>,
1120 worktree: &ModelHandle<Worktree>,
1121 cx: &mut ModelContext<Self>,
1122 ) -> Task<Result<ModelHandle<Buffer>>> {
1123 let load_buffer = worktree.update(cx, |worktree, cx| {
1124 let worktree = worktree.as_local_mut().unwrap();
1125 worktree.load_buffer(path, cx)
1126 });
1127 cx.spawn(|this, mut cx| async move {
1128 let buffer = load_buffer.await?;
1129 this.update(&mut cx, |this, cx| this.register_buffer(&buffer, cx))?;
1130 Ok(buffer)
1131 })
1132 }
1133
1134 fn open_remote_buffer_internal(
1135 &mut self,
1136 path: &Arc<Path>,
1137 worktree: &ModelHandle<Worktree>,
1138 cx: &mut ModelContext<Self>,
1139 ) -> Task<Result<ModelHandle<Buffer>>> {
1140 let rpc = self.client.clone();
1141 let project_id = self.remote_id().unwrap();
1142 let remote_worktree_id = worktree.read(cx).id();
1143 let path = path.clone();
1144 let path_string = path.to_string_lossy().to_string();
1145 cx.spawn(|this, mut cx| async move {
1146 let response = rpc
1147 .request(proto::OpenBufferByPath {
1148 project_id,
1149 worktree_id: remote_worktree_id.to_proto(),
1150 path: path_string,
1151 })
1152 .await?;
1153 let buffer = response.buffer.ok_or_else(|| anyhow!("missing buffer"))?;
1154 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
1155 .await
1156 })
1157 }
1158
1159 fn open_local_buffer_via_lsp(
1160 &mut self,
1161 abs_path: lsp::Url,
1162 lsp_adapter: Arc<dyn LspAdapter>,
1163 lsp_server: Arc<LanguageServer>,
1164 cx: &mut ModelContext<Self>,
1165 ) -> Task<Result<ModelHandle<Buffer>>> {
1166 cx.spawn(|this, mut cx| async move {
1167 let abs_path = abs_path
1168 .to_file_path()
1169 .map_err(|_| anyhow!("can't convert URI to path"))?;
1170 let (worktree, relative_path) = if let Some(result) =
1171 this.read_with(&cx, |this, cx| this.find_local_worktree(&abs_path, cx))
1172 {
1173 result
1174 } else {
1175 let worktree = this
1176 .update(&mut cx, |this, cx| {
1177 this.create_local_worktree(&abs_path, false, cx)
1178 })
1179 .await?;
1180 this.update(&mut cx, |this, cx| {
1181 this.language_servers.insert(
1182 (worktree.read(cx).id(), lsp_adapter.name()),
1183 (lsp_adapter, lsp_server),
1184 );
1185 });
1186 (worktree, PathBuf::new())
1187 };
1188
1189 let project_path = ProjectPath {
1190 worktree_id: worktree.read_with(&cx, |worktree, _| worktree.id()),
1191 path: relative_path.into(),
1192 };
1193 this.update(&mut cx, |this, cx| this.open_buffer(project_path, cx))
1194 .await
1195 })
1196 }
1197
1198 pub fn open_buffer_by_id(
1199 &mut self,
1200 id: u64,
1201 cx: &mut ModelContext<Self>,
1202 ) -> Task<Result<ModelHandle<Buffer>>> {
1203 if let Some(buffer) = self.buffer_for_id(id, cx) {
1204 Task::ready(Ok(buffer))
1205 } else if self.is_local() {
1206 Task::ready(Err(anyhow!("buffer {} does not exist", id)))
1207 } else if let Some(project_id) = self.remote_id() {
1208 let request = self
1209 .client
1210 .request(proto::OpenBufferById { project_id, id });
1211 cx.spawn(|this, mut cx| async move {
1212 let buffer = request
1213 .await?
1214 .buffer
1215 .ok_or_else(|| anyhow!("invalid buffer"))?;
1216 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
1217 .await
1218 })
1219 } else {
1220 Task::ready(Err(anyhow!("cannot open buffer while disconnected")))
1221 }
1222 }
1223
1224 pub fn save_buffer_as(
1225 &mut self,
1226 buffer: ModelHandle<Buffer>,
1227 abs_path: PathBuf,
1228 cx: &mut ModelContext<Project>,
1229 ) -> Task<Result<()>> {
1230 let worktree_task = self.find_or_create_local_worktree(&abs_path, true, cx);
1231 let old_path =
1232 File::from_dyn(buffer.read(cx).file()).and_then(|f| Some(f.as_local()?.abs_path(cx)));
1233 cx.spawn(|this, mut cx| async move {
1234 if let Some(old_path) = old_path {
1235 this.update(&mut cx, |this, cx| {
1236 this.unregister_buffer_from_language_server(&buffer, old_path, cx);
1237 });
1238 }
1239 let (worktree, path) = worktree_task.await?;
1240 worktree
1241 .update(&mut cx, |worktree, cx| {
1242 worktree
1243 .as_local_mut()
1244 .unwrap()
1245 .save_buffer_as(buffer.clone(), path, cx)
1246 })
1247 .await?;
1248 this.update(&mut cx, |this, cx| {
1249 this.assign_language_to_buffer(&buffer, cx);
1250 this.register_buffer_with_language_server(&buffer, cx);
1251 });
1252 Ok(())
1253 })
1254 }
1255
1256 pub fn get_open_buffer(
1257 &mut self,
1258 path: &ProjectPath,
1259 cx: &mut ModelContext<Self>,
1260 ) -> Option<ModelHandle<Buffer>> {
1261 let worktree = self.worktree_for_id(path.worktree_id, cx)?;
1262 self.opened_buffers.values().find_map(|buffer| {
1263 let buffer = buffer.upgrade(cx)?;
1264 let file = File::from_dyn(buffer.read(cx).file())?;
1265 if file.worktree == worktree && file.path() == &path.path {
1266 Some(buffer)
1267 } else {
1268 None
1269 }
1270 })
1271 }
1272
1273 fn register_buffer(
1274 &mut self,
1275 buffer: &ModelHandle<Buffer>,
1276 cx: &mut ModelContext<Self>,
1277 ) -> Result<()> {
1278 let remote_id = buffer.read(cx).remote_id();
1279 let open_buffer = if self.is_remote() || self.is_shared() {
1280 OpenBuffer::Strong(buffer.clone())
1281 } else {
1282 OpenBuffer::Weak(buffer.downgrade())
1283 };
1284
1285 match self.opened_buffers.insert(remote_id, open_buffer) {
1286 None => {}
1287 Some(OpenBuffer::Loading(operations)) => {
1288 buffer.update(cx, |buffer, cx| buffer.apply_ops(operations, cx))?
1289 }
1290 Some(OpenBuffer::Weak(existing_handle)) => {
1291 if existing_handle.upgrade(cx).is_some() {
1292 Err(anyhow!(
1293 "already registered buffer with remote id {}",
1294 remote_id
1295 ))?
1296 }
1297 }
1298 Some(OpenBuffer::Strong(_)) => Err(anyhow!(
1299 "already registered buffer with remote id {}",
1300 remote_id
1301 ))?,
1302 }
1303 cx.subscribe(buffer, |this, buffer, event, cx| {
1304 this.on_buffer_event(buffer, event, cx);
1305 })
1306 .detach();
1307
1308 self.assign_language_to_buffer(buffer, cx);
1309 self.register_buffer_with_language_server(buffer, cx);
1310 cx.observe_release(buffer, |this, buffer, cx| {
1311 if let Some(file) = File::from_dyn(buffer.file()) {
1312 if file.is_local() {
1313 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
1314 if let Some((_, server)) = this.language_server_for_buffer(buffer, cx) {
1315 server
1316 .notify::<lsp::notification::DidCloseTextDocument>(
1317 lsp::DidCloseTextDocumentParams {
1318 text_document: lsp::TextDocumentIdentifier::new(uri.clone()),
1319 },
1320 )
1321 .log_err();
1322 }
1323 }
1324 }
1325 })
1326 .detach();
1327
1328 Ok(())
1329 }
1330
1331 fn register_buffer_with_language_server(
1332 &mut self,
1333 buffer_handle: &ModelHandle<Buffer>,
1334 cx: &mut ModelContext<Self>,
1335 ) {
1336 let buffer = buffer_handle.read(cx);
1337 let buffer_id = buffer.remote_id();
1338 if let Some(file) = File::from_dyn(buffer.file()) {
1339 if file.is_local() {
1340 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
1341 let initial_snapshot = buffer.text_snapshot();
1342
1343 let mut language_server = None;
1344 let mut language_id = None;
1345 if let Some(language) = buffer.language() {
1346 let worktree_id = file.worktree_id(cx);
1347 if let Some(adapter) = language.lsp_adapter() {
1348 language_id = adapter.id_for_language(language.name().as_ref());
1349 language_server = self
1350 .language_servers
1351 .get(&(worktree_id, adapter.name()))
1352 .cloned();
1353 }
1354 }
1355
1356 if let Some(local_worktree) = file.worktree.read(cx).as_local() {
1357 if let Some(diagnostics) = local_worktree.diagnostics_for_path(file.path()) {
1358 self.update_buffer_diagnostics(&buffer_handle, diagnostics, None, cx)
1359 .log_err();
1360 }
1361 }
1362
1363 if let Some((_, server)) = language_server {
1364 server
1365 .notify::<lsp::notification::DidOpenTextDocument>(
1366 lsp::DidOpenTextDocumentParams {
1367 text_document: lsp::TextDocumentItem::new(
1368 uri,
1369 language_id.unwrap_or_default(),
1370 0,
1371 initial_snapshot.text(),
1372 ),
1373 }
1374 .clone(),
1375 )
1376 .log_err();
1377 buffer_handle.update(cx, |buffer, cx| {
1378 buffer.set_completion_triggers(
1379 server
1380 .capabilities()
1381 .completion_provider
1382 .as_ref()
1383 .and_then(|provider| provider.trigger_characters.clone())
1384 .unwrap_or(Vec::new()),
1385 cx,
1386 )
1387 });
1388 self.buffer_snapshots
1389 .insert(buffer_id, vec![(0, initial_snapshot)]);
1390 }
1391 }
1392 }
1393 }
1394
1395 fn unregister_buffer_from_language_server(
1396 &mut self,
1397 buffer: &ModelHandle<Buffer>,
1398 old_path: PathBuf,
1399 cx: &mut ModelContext<Self>,
1400 ) {
1401 buffer.update(cx, |buffer, cx| {
1402 buffer.update_diagnostics(Default::default(), cx);
1403 self.buffer_snapshots.remove(&buffer.remote_id());
1404 if let Some((_, language_server)) = self.language_server_for_buffer(buffer, cx) {
1405 language_server
1406 .notify::<lsp::notification::DidCloseTextDocument>(
1407 lsp::DidCloseTextDocumentParams {
1408 text_document: lsp::TextDocumentIdentifier::new(
1409 lsp::Url::from_file_path(old_path).unwrap(),
1410 ),
1411 },
1412 )
1413 .log_err();
1414 }
1415 });
1416 }
1417
1418 fn on_buffer_event(
1419 &mut self,
1420 buffer: ModelHandle<Buffer>,
1421 event: &BufferEvent,
1422 cx: &mut ModelContext<Self>,
1423 ) -> Option<()> {
1424 match event {
1425 BufferEvent::Operation(operation) => {
1426 let project_id = self.remote_id()?;
1427 let request = self.client.request(proto::UpdateBuffer {
1428 project_id,
1429 buffer_id: buffer.read(cx).remote_id(),
1430 operations: vec![language::proto::serialize_operation(&operation)],
1431 });
1432 cx.background().spawn(request).detach_and_log_err(cx);
1433 }
1434 BufferEvent::Edited { .. } => {
1435 let (_, language_server) = self
1436 .language_server_for_buffer(buffer.read(cx), cx)?
1437 .clone();
1438 let buffer = buffer.read(cx);
1439 let file = File::from_dyn(buffer.file())?;
1440 let abs_path = file.as_local()?.abs_path(cx);
1441 let uri = lsp::Url::from_file_path(abs_path).unwrap();
1442 let buffer_snapshots = self.buffer_snapshots.get_mut(&buffer.remote_id())?;
1443 let (version, prev_snapshot) = buffer_snapshots.last()?;
1444 let next_snapshot = buffer.text_snapshot();
1445 let next_version = version + 1;
1446
1447 let content_changes = buffer
1448 .edits_since::<(PointUtf16, usize)>(prev_snapshot.version())
1449 .map(|edit| {
1450 let edit_start = edit.new.start.0;
1451 let edit_end = edit_start + (edit.old.end.0 - edit.old.start.0);
1452 let new_text = next_snapshot
1453 .text_for_range(edit.new.start.1..edit.new.end.1)
1454 .collect();
1455 lsp::TextDocumentContentChangeEvent {
1456 range: Some(lsp::Range::new(
1457 point_to_lsp(edit_start),
1458 point_to_lsp(edit_end),
1459 )),
1460 range_length: None,
1461 text: new_text,
1462 }
1463 })
1464 .collect();
1465
1466 buffer_snapshots.push((next_version, next_snapshot));
1467
1468 language_server
1469 .notify::<lsp::notification::DidChangeTextDocument>(
1470 lsp::DidChangeTextDocumentParams {
1471 text_document: lsp::VersionedTextDocumentIdentifier::new(
1472 uri,
1473 next_version,
1474 ),
1475 content_changes,
1476 },
1477 )
1478 .log_err();
1479 }
1480 BufferEvent::Saved => {
1481 let file = File::from_dyn(buffer.read(cx).file())?;
1482 let worktree_id = file.worktree_id(cx);
1483 let abs_path = file.as_local()?.abs_path(cx);
1484 let text_document = lsp::TextDocumentIdentifier {
1485 uri: lsp::Url::from_file_path(abs_path).unwrap(),
1486 };
1487
1488 for (_, server) in self.language_servers_for_worktree(worktree_id) {
1489 server
1490 .notify::<lsp::notification::DidSaveTextDocument>(
1491 lsp::DidSaveTextDocumentParams {
1492 text_document: text_document.clone(),
1493 text: None,
1494 },
1495 )
1496 .log_err();
1497 }
1498 }
1499 _ => {}
1500 }
1501
1502 None
1503 }
1504
1505 fn language_servers_for_worktree(
1506 &self,
1507 worktree_id: WorktreeId,
1508 ) -> impl Iterator<Item = &(Arc<dyn LspAdapter>, Arc<LanguageServer>)> {
1509 self.language_servers.iter().filter_map(
1510 move |((language_server_worktree_id, _), server)| {
1511 if *language_server_worktree_id == worktree_id {
1512 Some(server)
1513 } else {
1514 None
1515 }
1516 },
1517 )
1518 }
1519
1520 fn assign_language_to_buffer(
1521 &mut self,
1522 buffer: &ModelHandle<Buffer>,
1523 cx: &mut ModelContext<Self>,
1524 ) -> Option<()> {
1525 // If the buffer has a language, set it and start the language server if we haven't already.
1526 let full_path = buffer.read(cx).file()?.full_path(cx);
1527 let language = self.languages.select_language(&full_path)?;
1528 buffer.update(cx, |buffer, cx| {
1529 buffer.set_language(Some(language.clone()), cx);
1530 });
1531
1532 let file = File::from_dyn(buffer.read(cx).file())?;
1533 let worktree = file.worktree.read(cx).as_local()?;
1534 let worktree_id = worktree.id();
1535 let worktree_abs_path = worktree.abs_path().clone();
1536 self.start_language_server(worktree_id, worktree_abs_path, language, cx);
1537
1538 None
1539 }
1540
1541 fn start_language_server(
1542 &mut self,
1543 worktree_id: WorktreeId,
1544 worktree_path: Arc<Path>,
1545 language: Arc<Language>,
1546 cx: &mut ModelContext<Self>,
1547 ) {
1548 let adapter = if let Some(adapter) = language.lsp_adapter() {
1549 adapter
1550 } else {
1551 return;
1552 };
1553 let key = (worktree_id, adapter.name());
1554 self.started_language_servers
1555 .entry(key.clone())
1556 .or_insert_with(|| {
1557 let server_id = post_inc(&mut self.next_language_server_id);
1558 let language_server = self.languages.start_language_server(
1559 server_id,
1560 language.clone(),
1561 worktree_path,
1562 self.client.http_client(),
1563 cx,
1564 );
1565 cx.spawn_weak(|this, mut cx| async move {
1566 let language_server = language_server?.await.log_err()?;
1567 let language_server = language_server
1568 .initialize(adapter.initialization_options())
1569 .await
1570 .log_err()?;
1571 let this = this.upgrade(&cx)?;
1572 let disk_based_diagnostics_progress_token =
1573 adapter.disk_based_diagnostics_progress_token();
1574
1575 language_server
1576 .on_notification::<lsp::notification::PublishDiagnostics, _>({
1577 let this = this.downgrade();
1578 let adapter = adapter.clone();
1579 move |params, mut cx| {
1580 if let Some(this) = this.upgrade(&cx) {
1581 this.update(&mut cx, |this, cx| {
1582 this.on_lsp_diagnostics_published(
1583 server_id,
1584 params,
1585 &adapter,
1586 disk_based_diagnostics_progress_token,
1587 cx,
1588 );
1589 });
1590 }
1591 }
1592 })
1593 .detach();
1594
1595 language_server
1596 .on_request::<lsp::request::WorkspaceConfiguration, _, _>({
1597 let settings = this
1598 .read_with(&cx, |this, _| this.language_server_settings.clone());
1599 move |params, _| {
1600 let settings = settings.lock().clone();
1601 async move {
1602 Ok(params
1603 .items
1604 .into_iter()
1605 .map(|item| {
1606 if let Some(section) = &item.section {
1607 settings
1608 .get(section)
1609 .cloned()
1610 .unwrap_or(serde_json::Value::Null)
1611 } else {
1612 settings.clone()
1613 }
1614 })
1615 .collect())
1616 }
1617 }
1618 })
1619 .detach();
1620
1621 language_server
1622 .on_request::<lsp::request::ApplyWorkspaceEdit, _, _>({
1623 let this = this.downgrade();
1624 let adapter = adapter.clone();
1625 let language_server = language_server.clone();
1626 move |params, cx| {
1627 Self::on_lsp_workspace_edit(
1628 this,
1629 params,
1630 server_id,
1631 adapter.clone(),
1632 language_server.clone(),
1633 cx,
1634 )
1635 }
1636 })
1637 .detach();
1638
1639 language_server
1640 .on_notification::<lsp::notification::Progress, _>({
1641 let this = this.downgrade();
1642 move |params, mut cx| {
1643 if let Some(this) = this.upgrade(&cx) {
1644 this.update(&mut cx, |this, cx| {
1645 this.on_lsp_progress(
1646 params,
1647 server_id,
1648 disk_based_diagnostics_progress_token,
1649 cx,
1650 );
1651 });
1652 }
1653 }
1654 })
1655 .detach();
1656
1657 this.update(&mut cx, |this, cx| {
1658 this.language_servers
1659 .insert(key.clone(), (adapter.clone(), language_server.clone()));
1660 this.language_server_statuses.insert(
1661 server_id,
1662 LanguageServerStatus {
1663 name: language_server.name().to_string(),
1664 pending_work: Default::default(),
1665 pending_diagnostic_updates: 0,
1666 },
1667 );
1668 language_server
1669 .notify::<lsp::notification::DidChangeConfiguration>(
1670 lsp::DidChangeConfigurationParams {
1671 settings: this.language_server_settings.lock().clone(),
1672 },
1673 )
1674 .ok();
1675
1676 if let Some(project_id) = this.remote_id() {
1677 this.client
1678 .send(proto::StartLanguageServer {
1679 project_id,
1680 server: Some(proto::LanguageServer {
1681 id: server_id as u64,
1682 name: language_server.name().to_string(),
1683 }),
1684 })
1685 .log_err();
1686 }
1687
1688 // Tell the language server about every open buffer in the worktree that matches the language.
1689 for buffer in this.opened_buffers.values() {
1690 if let Some(buffer_handle) = buffer.upgrade(cx) {
1691 let buffer = buffer_handle.read(cx);
1692 let file = if let Some(file) = File::from_dyn(buffer.file()) {
1693 file
1694 } else {
1695 continue;
1696 };
1697 let language = if let Some(language) = buffer.language() {
1698 language
1699 } else {
1700 continue;
1701 };
1702 if file.worktree.read(cx).id() != key.0
1703 || language.lsp_adapter().map(|a| a.name())
1704 != Some(key.1.clone())
1705 {
1706 continue;
1707 }
1708
1709 let file = file.as_local()?;
1710 let versions = this
1711 .buffer_snapshots
1712 .entry(buffer.remote_id())
1713 .or_insert_with(|| vec![(0, buffer.text_snapshot())]);
1714 let (version, initial_snapshot) = versions.last().unwrap();
1715 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
1716 let language_id = adapter.id_for_language(language.name().as_ref());
1717 language_server
1718 .notify::<lsp::notification::DidOpenTextDocument>(
1719 lsp::DidOpenTextDocumentParams {
1720 text_document: lsp::TextDocumentItem::new(
1721 uri,
1722 language_id.unwrap_or_default(),
1723 *version,
1724 initial_snapshot.text(),
1725 ),
1726 },
1727 )
1728 .log_err()?;
1729 buffer_handle.update(cx, |buffer, cx| {
1730 buffer.set_completion_triggers(
1731 language_server
1732 .capabilities()
1733 .completion_provider
1734 .as_ref()
1735 .and_then(|provider| {
1736 provider.trigger_characters.clone()
1737 })
1738 .unwrap_or(Vec::new()),
1739 cx,
1740 )
1741 });
1742 }
1743 }
1744
1745 cx.notify();
1746 Some(())
1747 });
1748
1749 Some(language_server)
1750 })
1751 });
1752 }
1753
1754 pub fn restart_language_servers_for_buffers(
1755 &mut self,
1756 buffers: impl IntoIterator<Item = ModelHandle<Buffer>>,
1757 cx: &mut ModelContext<Self>,
1758 ) -> Option<()> {
1759 let language_server_lookup_info: HashSet<(WorktreeId, Arc<Path>, PathBuf)> = buffers
1760 .into_iter()
1761 .filter_map(|buffer| {
1762 let file = File::from_dyn(buffer.read(cx).file())?;
1763 let worktree = file.worktree.read(cx).as_local()?;
1764 let worktree_id = worktree.id();
1765 let worktree_abs_path = worktree.abs_path().clone();
1766 let full_path = file.full_path(cx);
1767 Some((worktree_id, worktree_abs_path, full_path))
1768 })
1769 .collect();
1770 for (worktree_id, worktree_abs_path, full_path) in language_server_lookup_info {
1771 let language = self.languages.select_language(&full_path)?;
1772 self.restart_language_server(worktree_id, worktree_abs_path, language, cx);
1773 }
1774
1775 None
1776 }
1777
1778 fn restart_language_server(
1779 &mut self,
1780 worktree_id: WorktreeId,
1781 worktree_path: Arc<Path>,
1782 language: Arc<Language>,
1783 cx: &mut ModelContext<Self>,
1784 ) {
1785 let adapter = if let Some(adapter) = language.lsp_adapter() {
1786 adapter
1787 } else {
1788 return;
1789 };
1790 let key = (worktree_id, adapter.name());
1791 let server_to_shutdown = self.language_servers.remove(&key);
1792 self.started_language_servers.remove(&key);
1793 server_to_shutdown
1794 .as_ref()
1795 .map(|(_, server)| self.language_server_statuses.remove(&server.server_id()));
1796 cx.spawn_weak(|this, mut cx| async move {
1797 if let Some(this) = this.upgrade(&cx) {
1798 if let Some((_, server_to_shutdown)) = server_to_shutdown {
1799 if let Some(shutdown_task) = server_to_shutdown.shutdown() {
1800 shutdown_task.await;
1801 }
1802 }
1803
1804 this.update(&mut cx, |this, cx| {
1805 this.start_language_server(worktree_id, worktree_path, language, cx);
1806 });
1807 }
1808 })
1809 .detach();
1810 }
1811
1812 fn on_lsp_diagnostics_published(
1813 &mut self,
1814 server_id: usize,
1815 mut params: lsp::PublishDiagnosticsParams,
1816 adapter: &Arc<dyn LspAdapter>,
1817 disk_based_diagnostics_progress_token: Option<&str>,
1818 cx: &mut ModelContext<Self>,
1819 ) {
1820 adapter.process_diagnostics(&mut params);
1821 if disk_based_diagnostics_progress_token.is_none() {
1822 self.disk_based_diagnostics_started(cx);
1823 self.broadcast_language_server_update(
1824 server_id,
1825 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(
1826 proto::LspDiskBasedDiagnosticsUpdating {},
1827 ),
1828 );
1829 }
1830 self.update_diagnostics(params, adapter.disk_based_diagnostic_sources(), cx)
1831 .log_err();
1832 if disk_based_diagnostics_progress_token.is_none() {
1833 self.disk_based_diagnostics_finished(cx);
1834 self.broadcast_language_server_update(
1835 server_id,
1836 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(
1837 proto::LspDiskBasedDiagnosticsUpdated {},
1838 ),
1839 );
1840 }
1841 }
1842
1843 fn on_lsp_progress(
1844 &mut self,
1845 progress: lsp::ProgressParams,
1846 server_id: usize,
1847 disk_based_diagnostics_progress_token: Option<&str>,
1848 cx: &mut ModelContext<Self>,
1849 ) {
1850 let token = match progress.token {
1851 lsp::NumberOrString::String(token) => token,
1852 lsp::NumberOrString::Number(token) => {
1853 log::info!("skipping numeric progress token {}", token);
1854 return;
1855 }
1856 };
1857 let progress = match progress.value {
1858 lsp::ProgressParamsValue::WorkDone(value) => value,
1859 };
1860 let language_server_status =
1861 if let Some(status) = self.language_server_statuses.get_mut(&server_id) {
1862 status
1863 } else {
1864 return;
1865 };
1866 match progress {
1867 lsp::WorkDoneProgress::Begin(_) => {
1868 if Some(token.as_str()) == disk_based_diagnostics_progress_token {
1869 language_server_status.pending_diagnostic_updates += 1;
1870 if language_server_status.pending_diagnostic_updates == 1 {
1871 self.disk_based_diagnostics_started(cx);
1872 self.broadcast_language_server_update(
1873 server_id,
1874 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(
1875 proto::LspDiskBasedDiagnosticsUpdating {},
1876 ),
1877 );
1878 }
1879 } else {
1880 self.on_lsp_work_start(server_id, token.clone(), cx);
1881 self.broadcast_language_server_update(
1882 server_id,
1883 proto::update_language_server::Variant::WorkStart(proto::LspWorkStart {
1884 token,
1885 }),
1886 );
1887 }
1888 }
1889 lsp::WorkDoneProgress::Report(report) => {
1890 if Some(token.as_str()) != disk_based_diagnostics_progress_token {
1891 self.on_lsp_work_progress(
1892 server_id,
1893 token.clone(),
1894 LanguageServerProgress {
1895 message: report.message.clone(),
1896 percentage: report.percentage.map(|p| p as usize),
1897 last_update_at: Instant::now(),
1898 },
1899 cx,
1900 );
1901 self.broadcast_language_server_update(
1902 server_id,
1903 proto::update_language_server::Variant::WorkProgress(
1904 proto::LspWorkProgress {
1905 token,
1906 message: report.message,
1907 percentage: report.percentage.map(|p| p as u32),
1908 },
1909 ),
1910 );
1911 }
1912 }
1913 lsp::WorkDoneProgress::End(_) => {
1914 if Some(token.as_str()) == disk_based_diagnostics_progress_token {
1915 language_server_status.pending_diagnostic_updates -= 1;
1916 if language_server_status.pending_diagnostic_updates == 0 {
1917 self.disk_based_diagnostics_finished(cx);
1918 self.broadcast_language_server_update(
1919 server_id,
1920 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(
1921 proto::LspDiskBasedDiagnosticsUpdated {},
1922 ),
1923 );
1924 }
1925 } else {
1926 self.on_lsp_work_end(server_id, token.clone(), cx);
1927 self.broadcast_language_server_update(
1928 server_id,
1929 proto::update_language_server::Variant::WorkEnd(proto::LspWorkEnd {
1930 token,
1931 }),
1932 );
1933 }
1934 }
1935 }
1936 }
1937
1938 fn on_lsp_work_start(
1939 &mut self,
1940 language_server_id: usize,
1941 token: String,
1942 cx: &mut ModelContext<Self>,
1943 ) {
1944 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
1945 status.pending_work.insert(
1946 token,
1947 LanguageServerProgress {
1948 message: None,
1949 percentage: None,
1950 last_update_at: Instant::now(),
1951 },
1952 );
1953 cx.notify();
1954 }
1955 }
1956
1957 fn on_lsp_work_progress(
1958 &mut self,
1959 language_server_id: usize,
1960 token: String,
1961 progress: LanguageServerProgress,
1962 cx: &mut ModelContext<Self>,
1963 ) {
1964 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
1965 status.pending_work.insert(token, progress);
1966 cx.notify();
1967 }
1968 }
1969
1970 fn on_lsp_work_end(
1971 &mut self,
1972 language_server_id: usize,
1973 token: String,
1974 cx: &mut ModelContext<Self>,
1975 ) {
1976 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
1977 status.pending_work.remove(&token);
1978 cx.notify();
1979 }
1980 }
1981
1982 async fn on_lsp_workspace_edit(
1983 this: WeakModelHandle<Self>,
1984 params: lsp::ApplyWorkspaceEditParams,
1985 server_id: usize,
1986 adapter: Arc<dyn LspAdapter>,
1987 language_server: Arc<LanguageServer>,
1988 mut cx: AsyncAppContext,
1989 ) -> Result<lsp::ApplyWorkspaceEditResponse> {
1990 let this = this
1991 .upgrade(&cx)
1992 .ok_or_else(|| anyhow!("project project closed"))?;
1993 let transaction = Self::deserialize_workspace_edit(
1994 this.clone(),
1995 params.edit,
1996 true,
1997 adapter.clone(),
1998 language_server.clone(),
1999 &mut cx,
2000 )
2001 .await
2002 .log_err();
2003 this.update(&mut cx, |this, _| {
2004 if let Some(transaction) = transaction {
2005 this.last_workspace_edits_by_language_server
2006 .insert(server_id, transaction);
2007 }
2008 });
2009 Ok(lsp::ApplyWorkspaceEditResponse {
2010 applied: true,
2011 failed_change: None,
2012 failure_reason: None,
2013 })
2014 }
2015
2016 fn broadcast_language_server_update(
2017 &self,
2018 language_server_id: usize,
2019 event: proto::update_language_server::Variant,
2020 ) {
2021 if let Some(project_id) = self.remote_id() {
2022 self.client
2023 .send(proto::UpdateLanguageServer {
2024 project_id,
2025 language_server_id: language_server_id as u64,
2026 variant: Some(event),
2027 })
2028 .log_err();
2029 }
2030 }
2031
2032 pub fn set_language_server_settings(&mut self, settings: serde_json::Value) {
2033 for (_, server) in self.language_servers.values() {
2034 server
2035 .notify::<lsp::notification::DidChangeConfiguration>(
2036 lsp::DidChangeConfigurationParams {
2037 settings: settings.clone(),
2038 },
2039 )
2040 .ok();
2041 }
2042 *self.language_server_settings.lock() = settings;
2043 }
2044
2045 pub fn language_server_statuses(
2046 &self,
2047 ) -> impl DoubleEndedIterator<Item = &LanguageServerStatus> {
2048 self.language_server_statuses.values()
2049 }
2050
2051 pub fn update_diagnostics(
2052 &mut self,
2053 params: lsp::PublishDiagnosticsParams,
2054 disk_based_sources: &[&str],
2055 cx: &mut ModelContext<Self>,
2056 ) -> Result<()> {
2057 let abs_path = params
2058 .uri
2059 .to_file_path()
2060 .map_err(|_| anyhow!("URI is not a file"))?;
2061 let mut next_group_id = 0;
2062 let mut diagnostics = Vec::default();
2063 let mut primary_diagnostic_group_ids = HashMap::default();
2064 let mut sources_by_group_id = HashMap::default();
2065 let mut supporting_diagnostics = HashMap::default();
2066 for diagnostic in ¶ms.diagnostics {
2067 let source = diagnostic.source.as_ref();
2068 let code = diagnostic.code.as_ref().map(|code| match code {
2069 lsp::NumberOrString::Number(code) => code.to_string(),
2070 lsp::NumberOrString::String(code) => code.clone(),
2071 });
2072 let range = range_from_lsp(diagnostic.range);
2073 let is_supporting = diagnostic
2074 .related_information
2075 .as_ref()
2076 .map_or(false, |infos| {
2077 infos.iter().any(|info| {
2078 primary_diagnostic_group_ids.contains_key(&(
2079 source,
2080 code.clone(),
2081 range_from_lsp(info.location.range),
2082 ))
2083 })
2084 });
2085
2086 let is_unnecessary = diagnostic.tags.as_ref().map_or(false, |tags| {
2087 tags.iter().any(|tag| *tag == DiagnosticTag::UNNECESSARY)
2088 });
2089
2090 if is_supporting {
2091 supporting_diagnostics.insert(
2092 (source, code.clone(), range),
2093 (diagnostic.severity, is_unnecessary),
2094 );
2095 } else {
2096 let group_id = post_inc(&mut next_group_id);
2097 let is_disk_based = source.map_or(false, |source| {
2098 disk_based_sources.contains(&source.as_str())
2099 });
2100
2101 sources_by_group_id.insert(group_id, source);
2102 primary_diagnostic_group_ids
2103 .insert((source, code.clone(), range.clone()), group_id);
2104
2105 diagnostics.push(DiagnosticEntry {
2106 range,
2107 diagnostic: Diagnostic {
2108 code: code.clone(),
2109 severity: diagnostic.severity.unwrap_or(DiagnosticSeverity::ERROR),
2110 message: diagnostic.message.clone(),
2111 group_id,
2112 is_primary: true,
2113 is_valid: true,
2114 is_disk_based,
2115 is_unnecessary,
2116 },
2117 });
2118 if let Some(infos) = &diagnostic.related_information {
2119 for info in infos {
2120 if info.location.uri == params.uri && !info.message.is_empty() {
2121 let range = range_from_lsp(info.location.range);
2122 diagnostics.push(DiagnosticEntry {
2123 range,
2124 diagnostic: Diagnostic {
2125 code: code.clone(),
2126 severity: DiagnosticSeverity::INFORMATION,
2127 message: info.message.clone(),
2128 group_id,
2129 is_primary: false,
2130 is_valid: true,
2131 is_disk_based,
2132 is_unnecessary: false,
2133 },
2134 });
2135 }
2136 }
2137 }
2138 }
2139 }
2140
2141 for entry in &mut diagnostics {
2142 let diagnostic = &mut entry.diagnostic;
2143 if !diagnostic.is_primary {
2144 let source = *sources_by_group_id.get(&diagnostic.group_id).unwrap();
2145 if let Some(&(severity, is_unnecessary)) = supporting_diagnostics.get(&(
2146 source,
2147 diagnostic.code.clone(),
2148 entry.range.clone(),
2149 )) {
2150 if let Some(severity) = severity {
2151 diagnostic.severity = severity;
2152 }
2153 diagnostic.is_unnecessary = is_unnecessary;
2154 }
2155 }
2156 }
2157
2158 self.update_diagnostic_entries(abs_path, params.version, diagnostics, cx)?;
2159 Ok(())
2160 }
2161
2162 pub fn update_diagnostic_entries(
2163 &mut self,
2164 abs_path: PathBuf,
2165 version: Option<i32>,
2166 diagnostics: Vec<DiagnosticEntry<PointUtf16>>,
2167 cx: &mut ModelContext<Project>,
2168 ) -> Result<(), anyhow::Error> {
2169 let (worktree, relative_path) = self
2170 .find_local_worktree(&abs_path, cx)
2171 .ok_or_else(|| anyhow!("no worktree found for diagnostics"))?;
2172 if !worktree.read(cx).is_visible() {
2173 return Ok(());
2174 }
2175
2176 let project_path = ProjectPath {
2177 worktree_id: worktree.read(cx).id(),
2178 path: relative_path.into(),
2179 };
2180 if let Some(buffer) = self.get_open_buffer(&project_path, cx) {
2181 self.update_buffer_diagnostics(&buffer, diagnostics.clone(), version, cx)?;
2182 }
2183
2184 let updated = worktree.update(cx, |worktree, cx| {
2185 worktree
2186 .as_local_mut()
2187 .ok_or_else(|| anyhow!("not a local worktree"))?
2188 .update_diagnostics(project_path.path.clone(), diagnostics, cx)
2189 })?;
2190 if updated {
2191 cx.emit(Event::DiagnosticsUpdated(project_path));
2192 }
2193 Ok(())
2194 }
2195
2196 fn update_buffer_diagnostics(
2197 &mut self,
2198 buffer: &ModelHandle<Buffer>,
2199 mut diagnostics: Vec<DiagnosticEntry<PointUtf16>>,
2200 version: Option<i32>,
2201 cx: &mut ModelContext<Self>,
2202 ) -> Result<()> {
2203 fn compare_diagnostics(a: &Diagnostic, b: &Diagnostic) -> Ordering {
2204 Ordering::Equal
2205 .then_with(|| b.is_primary.cmp(&a.is_primary))
2206 .then_with(|| a.is_disk_based.cmp(&b.is_disk_based))
2207 .then_with(|| a.severity.cmp(&b.severity))
2208 .then_with(|| a.message.cmp(&b.message))
2209 }
2210
2211 let snapshot = self.buffer_snapshot_for_lsp_version(buffer, version, cx)?;
2212
2213 diagnostics.sort_unstable_by(|a, b| {
2214 Ordering::Equal
2215 .then_with(|| a.range.start.cmp(&b.range.start))
2216 .then_with(|| b.range.end.cmp(&a.range.end))
2217 .then_with(|| compare_diagnostics(&a.diagnostic, &b.diagnostic))
2218 });
2219
2220 let mut sanitized_diagnostics = Vec::new();
2221 let edits_since_save = Patch::new(
2222 snapshot
2223 .edits_since::<PointUtf16>(buffer.read(cx).saved_version())
2224 .collect(),
2225 );
2226 for entry in diagnostics {
2227 let start;
2228 let end;
2229 if entry.diagnostic.is_disk_based {
2230 // Some diagnostics are based on files on disk instead of buffers'
2231 // current contents. Adjust these diagnostics' ranges to reflect
2232 // any unsaved edits.
2233 start = edits_since_save.old_to_new(entry.range.start);
2234 end = edits_since_save.old_to_new(entry.range.end);
2235 } else {
2236 start = entry.range.start;
2237 end = entry.range.end;
2238 }
2239
2240 let mut range = snapshot.clip_point_utf16(start, Bias::Left)
2241 ..snapshot.clip_point_utf16(end, Bias::Right);
2242
2243 // Expand empty ranges by one character
2244 if range.start == range.end {
2245 range.end.column += 1;
2246 range.end = snapshot.clip_point_utf16(range.end, Bias::Right);
2247 if range.start == range.end && range.end.column > 0 {
2248 range.start.column -= 1;
2249 range.start = snapshot.clip_point_utf16(range.start, Bias::Left);
2250 }
2251 }
2252
2253 sanitized_diagnostics.push(DiagnosticEntry {
2254 range,
2255 diagnostic: entry.diagnostic,
2256 });
2257 }
2258 drop(edits_since_save);
2259
2260 let set = DiagnosticSet::new(sanitized_diagnostics, &snapshot);
2261 buffer.update(cx, |buffer, cx| buffer.update_diagnostics(set, cx));
2262 Ok(())
2263 }
2264
2265 pub fn reload_buffers(
2266 &self,
2267 buffers: HashSet<ModelHandle<Buffer>>,
2268 push_to_history: bool,
2269 cx: &mut ModelContext<Self>,
2270 ) -> Task<Result<ProjectTransaction>> {
2271 let mut local_buffers = Vec::new();
2272 let mut remote_buffers = None;
2273 for buffer_handle in buffers {
2274 let buffer = buffer_handle.read(cx);
2275 if buffer.is_dirty() {
2276 if let Some(file) = File::from_dyn(buffer.file()) {
2277 if file.is_local() {
2278 local_buffers.push(buffer_handle);
2279 } else {
2280 remote_buffers.get_or_insert(Vec::new()).push(buffer_handle);
2281 }
2282 }
2283 }
2284 }
2285
2286 let remote_buffers = self.remote_id().zip(remote_buffers);
2287 let client = self.client.clone();
2288
2289 cx.spawn(|this, mut cx| async move {
2290 let mut project_transaction = ProjectTransaction::default();
2291
2292 if let Some((project_id, remote_buffers)) = remote_buffers {
2293 let response = client
2294 .request(proto::ReloadBuffers {
2295 project_id,
2296 buffer_ids: remote_buffers
2297 .iter()
2298 .map(|buffer| buffer.read_with(&cx, |buffer, _| buffer.remote_id()))
2299 .collect(),
2300 })
2301 .await?
2302 .transaction
2303 .ok_or_else(|| anyhow!("missing transaction"))?;
2304 project_transaction = this
2305 .update(&mut cx, |this, cx| {
2306 this.deserialize_project_transaction(response, push_to_history, cx)
2307 })
2308 .await?;
2309 }
2310
2311 for buffer in local_buffers {
2312 let transaction = buffer
2313 .update(&mut cx, |buffer, cx| buffer.reload(cx))
2314 .await?;
2315 buffer.update(&mut cx, |buffer, cx| {
2316 if let Some(transaction) = transaction {
2317 if !push_to_history {
2318 buffer.forget_transaction(transaction.id);
2319 }
2320 project_transaction.0.insert(cx.handle(), transaction);
2321 }
2322 });
2323 }
2324
2325 Ok(project_transaction)
2326 })
2327 }
2328
2329 pub fn format(
2330 &self,
2331 buffers: HashSet<ModelHandle<Buffer>>,
2332 push_to_history: bool,
2333 cx: &mut ModelContext<Project>,
2334 ) -> Task<Result<ProjectTransaction>> {
2335 let mut local_buffers = Vec::new();
2336 let mut remote_buffers = None;
2337 for buffer_handle in buffers {
2338 let buffer = buffer_handle.read(cx);
2339 if let Some(file) = File::from_dyn(buffer.file()) {
2340 if let Some(buffer_abs_path) = file.as_local().map(|f| f.abs_path(cx)) {
2341 if let Some((_, server)) = self.language_server_for_buffer(buffer, cx) {
2342 local_buffers.push((buffer_handle, buffer_abs_path, server.clone()));
2343 }
2344 } else {
2345 remote_buffers.get_or_insert(Vec::new()).push(buffer_handle);
2346 }
2347 } else {
2348 return Task::ready(Ok(Default::default()));
2349 }
2350 }
2351
2352 let remote_buffers = self.remote_id().zip(remote_buffers);
2353 let client = self.client.clone();
2354
2355 cx.spawn(|this, mut cx| async move {
2356 let mut project_transaction = ProjectTransaction::default();
2357
2358 if let Some((project_id, remote_buffers)) = remote_buffers {
2359 let response = client
2360 .request(proto::FormatBuffers {
2361 project_id,
2362 buffer_ids: remote_buffers
2363 .iter()
2364 .map(|buffer| buffer.read_with(&cx, |buffer, _| buffer.remote_id()))
2365 .collect(),
2366 })
2367 .await?
2368 .transaction
2369 .ok_or_else(|| anyhow!("missing transaction"))?;
2370 project_transaction = this
2371 .update(&mut cx, |this, cx| {
2372 this.deserialize_project_transaction(response, push_to_history, cx)
2373 })
2374 .await?;
2375 }
2376
2377 for (buffer, buffer_abs_path, language_server) in local_buffers {
2378 let text_document = lsp::TextDocumentIdentifier::new(
2379 lsp::Url::from_file_path(&buffer_abs_path).unwrap(),
2380 );
2381 let capabilities = &language_server.capabilities();
2382 let tab_size = cx.update(|cx| {
2383 let language_name = buffer.read(cx).language().map(|language| language.name());
2384 cx.global::<Settings>().tab_size(language_name.as_deref())
2385 });
2386 let lsp_edits = if capabilities
2387 .document_formatting_provider
2388 .as_ref()
2389 .map_or(false, |provider| *provider != lsp::OneOf::Left(false))
2390 {
2391 language_server
2392 .request::<lsp::request::Formatting>(lsp::DocumentFormattingParams {
2393 text_document,
2394 options: lsp::FormattingOptions {
2395 tab_size,
2396 insert_spaces: true,
2397 insert_final_newline: Some(true),
2398 ..Default::default()
2399 },
2400 work_done_progress_params: Default::default(),
2401 })
2402 .await?
2403 } else if capabilities
2404 .document_range_formatting_provider
2405 .as_ref()
2406 .map_or(false, |provider| *provider != lsp::OneOf::Left(false))
2407 {
2408 let buffer_start = lsp::Position::new(0, 0);
2409 let buffer_end =
2410 buffer.read_with(&cx, |buffer, _| point_to_lsp(buffer.max_point_utf16()));
2411 language_server
2412 .request::<lsp::request::RangeFormatting>(
2413 lsp::DocumentRangeFormattingParams {
2414 text_document,
2415 range: lsp::Range::new(buffer_start, buffer_end),
2416 options: lsp::FormattingOptions {
2417 tab_size: 4,
2418 insert_spaces: true,
2419 insert_final_newline: Some(true),
2420 ..Default::default()
2421 },
2422 work_done_progress_params: Default::default(),
2423 },
2424 )
2425 .await?
2426 } else {
2427 continue;
2428 };
2429
2430 if let Some(lsp_edits) = lsp_edits {
2431 let edits = this
2432 .update(&mut cx, |this, cx| {
2433 this.edits_from_lsp(&buffer, lsp_edits, None, cx)
2434 })
2435 .await?;
2436 buffer.update(&mut cx, |buffer, cx| {
2437 buffer.finalize_last_transaction();
2438 buffer.start_transaction();
2439 for (range, text) in edits {
2440 buffer.edit([(range, text)], cx);
2441 }
2442 if buffer.end_transaction(cx).is_some() {
2443 let transaction = buffer.finalize_last_transaction().unwrap().clone();
2444 if !push_to_history {
2445 buffer.forget_transaction(transaction.id);
2446 }
2447 project_transaction.0.insert(cx.handle(), transaction);
2448 }
2449 });
2450 }
2451 }
2452
2453 Ok(project_transaction)
2454 })
2455 }
2456
2457 pub fn definition<T: ToPointUtf16>(
2458 &self,
2459 buffer: &ModelHandle<Buffer>,
2460 position: T,
2461 cx: &mut ModelContext<Self>,
2462 ) -> Task<Result<Vec<Location>>> {
2463 let position = position.to_point_utf16(buffer.read(cx));
2464 self.request_lsp(buffer.clone(), GetDefinition { position }, cx)
2465 }
2466
2467 pub fn references<T: ToPointUtf16>(
2468 &self,
2469 buffer: &ModelHandle<Buffer>,
2470 position: T,
2471 cx: &mut ModelContext<Self>,
2472 ) -> Task<Result<Vec<Location>>> {
2473 let position = position.to_point_utf16(buffer.read(cx));
2474 self.request_lsp(buffer.clone(), GetReferences { position }, cx)
2475 }
2476
2477 pub fn document_highlights<T: ToPointUtf16>(
2478 &self,
2479 buffer: &ModelHandle<Buffer>,
2480 position: T,
2481 cx: &mut ModelContext<Self>,
2482 ) -> Task<Result<Vec<DocumentHighlight>>> {
2483 let position = position.to_point_utf16(buffer.read(cx));
2484
2485 self.request_lsp(buffer.clone(), GetDocumentHighlights { position }, cx)
2486 }
2487
2488 pub fn symbols(&self, query: &str, cx: &mut ModelContext<Self>) -> Task<Result<Vec<Symbol>>> {
2489 if self.is_local() {
2490 let mut requests = Vec::new();
2491 for ((worktree_id, _), (lsp_adapter, language_server)) in self.language_servers.iter() {
2492 let worktree_id = *worktree_id;
2493 if let Some(worktree) = self
2494 .worktree_for_id(worktree_id, cx)
2495 .and_then(|worktree| worktree.read(cx).as_local())
2496 {
2497 let lsp_adapter = lsp_adapter.clone();
2498 let worktree_abs_path = worktree.abs_path().clone();
2499 requests.push(
2500 language_server
2501 .request::<lsp::request::WorkspaceSymbol>(lsp::WorkspaceSymbolParams {
2502 query: query.to_string(),
2503 ..Default::default()
2504 })
2505 .log_err()
2506 .map(move |response| {
2507 (
2508 lsp_adapter,
2509 worktree_id,
2510 worktree_abs_path,
2511 response.unwrap_or_default(),
2512 )
2513 }),
2514 );
2515 }
2516 }
2517
2518 cx.spawn_weak(|this, cx| async move {
2519 let responses = futures::future::join_all(requests).await;
2520 let this = if let Some(this) = this.upgrade(&cx) {
2521 this
2522 } else {
2523 return Ok(Default::default());
2524 };
2525 this.read_with(&cx, |this, cx| {
2526 let mut symbols = Vec::new();
2527 for (adapter, source_worktree_id, worktree_abs_path, response) in responses {
2528 symbols.extend(response.into_iter().flatten().filter_map(|lsp_symbol| {
2529 let abs_path = lsp_symbol.location.uri.to_file_path().ok()?;
2530 let mut worktree_id = source_worktree_id;
2531 let path;
2532 if let Some((worktree, rel_path)) =
2533 this.find_local_worktree(&abs_path, cx)
2534 {
2535 worktree_id = worktree.read(cx).id();
2536 path = rel_path;
2537 } else {
2538 path = relativize_path(&worktree_abs_path, &abs_path);
2539 }
2540
2541 let label = this
2542 .languages
2543 .select_language(&path)
2544 .and_then(|language| {
2545 language.label_for_symbol(&lsp_symbol.name, lsp_symbol.kind)
2546 })
2547 .unwrap_or_else(|| CodeLabel::plain(lsp_symbol.name.clone(), None));
2548 let signature = this.symbol_signature(worktree_id, &path);
2549
2550 Some(Symbol {
2551 source_worktree_id,
2552 worktree_id,
2553 language_server_name: adapter.name(),
2554 name: lsp_symbol.name,
2555 kind: lsp_symbol.kind,
2556 label,
2557 path,
2558 range: range_from_lsp(lsp_symbol.location.range),
2559 signature,
2560 })
2561 }));
2562 }
2563 Ok(symbols)
2564 })
2565 })
2566 } else if let Some(project_id) = self.remote_id() {
2567 let request = self.client.request(proto::GetProjectSymbols {
2568 project_id,
2569 query: query.to_string(),
2570 });
2571 cx.spawn_weak(|this, cx| async move {
2572 let response = request.await?;
2573 let mut symbols = Vec::new();
2574 if let Some(this) = this.upgrade(&cx) {
2575 this.read_with(&cx, |this, _| {
2576 symbols.extend(
2577 response
2578 .symbols
2579 .into_iter()
2580 .filter_map(|symbol| this.deserialize_symbol(symbol).log_err()),
2581 );
2582 })
2583 }
2584 Ok(symbols)
2585 })
2586 } else {
2587 Task::ready(Ok(Default::default()))
2588 }
2589 }
2590
2591 pub fn open_buffer_for_symbol(
2592 &mut self,
2593 symbol: &Symbol,
2594 cx: &mut ModelContext<Self>,
2595 ) -> Task<Result<ModelHandle<Buffer>>> {
2596 if self.is_local() {
2597 let (lsp_adapter, language_server) = if let Some(server) = self.language_servers.get(&(
2598 symbol.source_worktree_id,
2599 symbol.language_server_name.clone(),
2600 )) {
2601 server.clone()
2602 } else {
2603 return Task::ready(Err(anyhow!(
2604 "language server for worktree and language not found"
2605 )));
2606 };
2607
2608 let worktree_abs_path = if let Some(worktree_abs_path) = self
2609 .worktree_for_id(symbol.worktree_id, cx)
2610 .and_then(|worktree| worktree.read(cx).as_local())
2611 .map(|local_worktree| local_worktree.abs_path())
2612 {
2613 worktree_abs_path
2614 } else {
2615 return Task::ready(Err(anyhow!("worktree not found for symbol")));
2616 };
2617 let symbol_abs_path = worktree_abs_path.join(&symbol.path);
2618 let symbol_uri = if let Ok(uri) = lsp::Url::from_file_path(symbol_abs_path) {
2619 uri
2620 } else {
2621 return Task::ready(Err(anyhow!("invalid symbol path")));
2622 };
2623
2624 self.open_local_buffer_via_lsp(symbol_uri, lsp_adapter, language_server, cx)
2625 } else if let Some(project_id) = self.remote_id() {
2626 let request = self.client.request(proto::OpenBufferForSymbol {
2627 project_id,
2628 symbol: Some(serialize_symbol(symbol)),
2629 });
2630 cx.spawn(|this, mut cx| async move {
2631 let response = request.await?;
2632 let buffer = response.buffer.ok_or_else(|| anyhow!("invalid buffer"))?;
2633 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
2634 .await
2635 })
2636 } else {
2637 Task::ready(Err(anyhow!("project does not have a remote id")))
2638 }
2639 }
2640
2641 pub fn completions<T: ToPointUtf16>(
2642 &self,
2643 source_buffer_handle: &ModelHandle<Buffer>,
2644 position: T,
2645 cx: &mut ModelContext<Self>,
2646 ) -> Task<Result<Vec<Completion>>> {
2647 let source_buffer_handle = source_buffer_handle.clone();
2648 let source_buffer = source_buffer_handle.read(cx);
2649 let buffer_id = source_buffer.remote_id();
2650 let language = source_buffer.language().cloned();
2651 let worktree;
2652 let buffer_abs_path;
2653 if let Some(file) = File::from_dyn(source_buffer.file()) {
2654 worktree = file.worktree.clone();
2655 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
2656 } else {
2657 return Task::ready(Ok(Default::default()));
2658 };
2659
2660 let position = position.to_point_utf16(source_buffer);
2661 let anchor = source_buffer.anchor_after(position);
2662
2663 if worktree.read(cx).as_local().is_some() {
2664 let buffer_abs_path = buffer_abs_path.unwrap();
2665 let (_, lang_server) =
2666 if let Some(server) = self.language_server_for_buffer(source_buffer, cx) {
2667 server.clone()
2668 } else {
2669 return Task::ready(Ok(Default::default()));
2670 };
2671
2672 cx.spawn(|_, cx| async move {
2673 let completions = lang_server
2674 .request::<lsp::request::Completion>(lsp::CompletionParams {
2675 text_document_position: lsp::TextDocumentPositionParams::new(
2676 lsp::TextDocumentIdentifier::new(
2677 lsp::Url::from_file_path(buffer_abs_path).unwrap(),
2678 ),
2679 point_to_lsp(position),
2680 ),
2681 context: Default::default(),
2682 work_done_progress_params: Default::default(),
2683 partial_result_params: Default::default(),
2684 })
2685 .await
2686 .context("lsp completion request failed")?;
2687
2688 let completions = if let Some(completions) = completions {
2689 match completions {
2690 lsp::CompletionResponse::Array(completions) => completions,
2691 lsp::CompletionResponse::List(list) => list.items,
2692 }
2693 } else {
2694 Default::default()
2695 };
2696
2697 source_buffer_handle.read_with(&cx, |this, _| {
2698 let snapshot = this.snapshot();
2699 let clipped_position = this.clip_point_utf16(position, Bias::Left);
2700 let mut range_for_token = None;
2701 Ok(completions
2702 .into_iter()
2703 .filter_map(|lsp_completion| {
2704 let (old_range, new_text) = match lsp_completion.text_edit.as_ref() {
2705 // If the language server provides a range to overwrite, then
2706 // check that the range is valid.
2707 Some(lsp::CompletionTextEdit::Edit(edit)) => {
2708 let range = range_from_lsp(edit.range);
2709 let start = snapshot.clip_point_utf16(range.start, Bias::Left);
2710 let end = snapshot.clip_point_utf16(range.end, Bias::Left);
2711 if start != range.start || end != range.end {
2712 log::info!("completion out of expected range");
2713 return None;
2714 }
2715 (
2716 snapshot.anchor_before(start)..snapshot.anchor_after(end),
2717 edit.new_text.clone(),
2718 )
2719 }
2720 // If the language server does not provide a range, then infer
2721 // the range based on the syntax tree.
2722 None => {
2723 if position != clipped_position {
2724 log::info!("completion out of expected range");
2725 return None;
2726 }
2727 let Range { start, end } = range_for_token
2728 .get_or_insert_with(|| {
2729 let offset = position.to_offset(&snapshot);
2730 snapshot
2731 .range_for_word_token_at(offset)
2732 .unwrap_or_else(|| offset..offset)
2733 })
2734 .clone();
2735 let text = lsp_completion
2736 .insert_text
2737 .as_ref()
2738 .unwrap_or(&lsp_completion.label)
2739 .clone();
2740 (
2741 snapshot.anchor_before(start)..snapshot.anchor_after(end),
2742 text.clone(),
2743 )
2744 }
2745 Some(lsp::CompletionTextEdit::InsertAndReplace(_)) => {
2746 log::info!("unsupported insert/replace completion");
2747 return None;
2748 }
2749 };
2750
2751 Some(Completion {
2752 old_range,
2753 new_text,
2754 label: language
2755 .as_ref()
2756 .and_then(|l| l.label_for_completion(&lsp_completion))
2757 .unwrap_or_else(|| {
2758 CodeLabel::plain(
2759 lsp_completion.label.clone(),
2760 lsp_completion.filter_text.as_deref(),
2761 )
2762 }),
2763 lsp_completion,
2764 })
2765 })
2766 .collect())
2767 })
2768 })
2769 } else if let Some(project_id) = self.remote_id() {
2770 let rpc = self.client.clone();
2771 let message = proto::GetCompletions {
2772 project_id,
2773 buffer_id,
2774 position: Some(language::proto::serialize_anchor(&anchor)),
2775 version: serialize_version(&source_buffer.version()),
2776 };
2777 cx.spawn_weak(|_, mut cx| async move {
2778 let response = rpc.request(message).await?;
2779
2780 source_buffer_handle
2781 .update(&mut cx, |buffer, _| {
2782 buffer.wait_for_version(deserialize_version(response.version))
2783 })
2784 .await;
2785
2786 response
2787 .completions
2788 .into_iter()
2789 .map(|completion| {
2790 language::proto::deserialize_completion(completion, language.as_ref())
2791 })
2792 .collect()
2793 })
2794 } else {
2795 Task::ready(Ok(Default::default()))
2796 }
2797 }
2798
2799 pub fn apply_additional_edits_for_completion(
2800 &self,
2801 buffer_handle: ModelHandle<Buffer>,
2802 completion: Completion,
2803 push_to_history: bool,
2804 cx: &mut ModelContext<Self>,
2805 ) -> Task<Result<Option<Transaction>>> {
2806 let buffer = buffer_handle.read(cx);
2807 let buffer_id = buffer.remote_id();
2808
2809 if self.is_local() {
2810 let (_, lang_server) = if let Some(server) = self.language_server_for_buffer(buffer, cx)
2811 {
2812 server.clone()
2813 } else {
2814 return Task::ready(Ok(Default::default()));
2815 };
2816
2817 cx.spawn(|this, mut cx| async move {
2818 let resolved_completion = lang_server
2819 .request::<lsp::request::ResolveCompletionItem>(completion.lsp_completion)
2820 .await?;
2821 if let Some(edits) = resolved_completion.additional_text_edits {
2822 let edits = this
2823 .update(&mut cx, |this, cx| {
2824 this.edits_from_lsp(&buffer_handle, edits, None, cx)
2825 })
2826 .await?;
2827 buffer_handle.update(&mut cx, |buffer, cx| {
2828 buffer.finalize_last_transaction();
2829 buffer.start_transaction();
2830 for (range, text) in edits {
2831 buffer.edit([(range, text)], cx);
2832 }
2833 let transaction = if buffer.end_transaction(cx).is_some() {
2834 let transaction = buffer.finalize_last_transaction().unwrap().clone();
2835 if !push_to_history {
2836 buffer.forget_transaction(transaction.id);
2837 }
2838 Some(transaction)
2839 } else {
2840 None
2841 };
2842 Ok(transaction)
2843 })
2844 } else {
2845 Ok(None)
2846 }
2847 })
2848 } else if let Some(project_id) = self.remote_id() {
2849 let client = self.client.clone();
2850 cx.spawn(|_, mut cx| async move {
2851 let response = client
2852 .request(proto::ApplyCompletionAdditionalEdits {
2853 project_id,
2854 buffer_id,
2855 completion: Some(language::proto::serialize_completion(&completion)),
2856 })
2857 .await?;
2858
2859 if let Some(transaction) = response.transaction {
2860 let transaction = language::proto::deserialize_transaction(transaction)?;
2861 buffer_handle
2862 .update(&mut cx, |buffer, _| {
2863 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
2864 })
2865 .await;
2866 if push_to_history {
2867 buffer_handle.update(&mut cx, |buffer, _| {
2868 buffer.push_transaction(transaction.clone(), Instant::now());
2869 });
2870 }
2871 Ok(Some(transaction))
2872 } else {
2873 Ok(None)
2874 }
2875 })
2876 } else {
2877 Task::ready(Err(anyhow!("project does not have a remote id")))
2878 }
2879 }
2880
2881 pub fn code_actions<T: Clone + ToOffset>(
2882 &self,
2883 buffer_handle: &ModelHandle<Buffer>,
2884 range: Range<T>,
2885 cx: &mut ModelContext<Self>,
2886 ) -> Task<Result<Vec<CodeAction>>> {
2887 let buffer_handle = buffer_handle.clone();
2888 let buffer = buffer_handle.read(cx);
2889 let snapshot = buffer.snapshot();
2890 let relevant_diagnostics = snapshot
2891 .diagnostics_in_range::<usize, usize>(range.to_offset(&snapshot), false)
2892 .map(|entry| entry.to_lsp_diagnostic_stub())
2893 .collect();
2894 let buffer_id = buffer.remote_id();
2895 let worktree;
2896 let buffer_abs_path;
2897 if let Some(file) = File::from_dyn(buffer.file()) {
2898 worktree = file.worktree.clone();
2899 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
2900 } else {
2901 return Task::ready(Ok(Default::default()));
2902 };
2903 let range = buffer.anchor_before(range.start)..buffer.anchor_before(range.end);
2904
2905 if worktree.read(cx).as_local().is_some() {
2906 let buffer_abs_path = buffer_abs_path.unwrap();
2907 let (_, lang_server) = if let Some(server) = self.language_server_for_buffer(buffer, cx)
2908 {
2909 server.clone()
2910 } else {
2911 return Task::ready(Ok(Default::default()));
2912 };
2913
2914 let lsp_range = range_to_lsp(range.to_point_utf16(buffer));
2915 cx.foreground().spawn(async move {
2916 if !lang_server.capabilities().code_action_provider.is_some() {
2917 return Ok(Default::default());
2918 }
2919
2920 Ok(lang_server
2921 .request::<lsp::request::CodeActionRequest>(lsp::CodeActionParams {
2922 text_document: lsp::TextDocumentIdentifier::new(
2923 lsp::Url::from_file_path(buffer_abs_path).unwrap(),
2924 ),
2925 range: lsp_range,
2926 work_done_progress_params: Default::default(),
2927 partial_result_params: Default::default(),
2928 context: lsp::CodeActionContext {
2929 diagnostics: relevant_diagnostics,
2930 only: Some(vec![
2931 lsp::CodeActionKind::QUICKFIX,
2932 lsp::CodeActionKind::REFACTOR,
2933 lsp::CodeActionKind::REFACTOR_EXTRACT,
2934 lsp::CodeActionKind::SOURCE,
2935 ]),
2936 },
2937 })
2938 .await?
2939 .unwrap_or_default()
2940 .into_iter()
2941 .filter_map(|entry| {
2942 if let lsp::CodeActionOrCommand::CodeAction(lsp_action) = entry {
2943 Some(CodeAction {
2944 range: range.clone(),
2945 lsp_action,
2946 })
2947 } else {
2948 None
2949 }
2950 })
2951 .collect())
2952 })
2953 } else if let Some(project_id) = self.remote_id() {
2954 let rpc = self.client.clone();
2955 let version = buffer.version();
2956 cx.spawn_weak(|_, mut cx| async move {
2957 let response = rpc
2958 .request(proto::GetCodeActions {
2959 project_id,
2960 buffer_id,
2961 start: Some(language::proto::serialize_anchor(&range.start)),
2962 end: Some(language::proto::serialize_anchor(&range.end)),
2963 version: serialize_version(&version),
2964 })
2965 .await?;
2966
2967 buffer_handle
2968 .update(&mut cx, |buffer, _| {
2969 buffer.wait_for_version(deserialize_version(response.version))
2970 })
2971 .await;
2972
2973 response
2974 .actions
2975 .into_iter()
2976 .map(language::proto::deserialize_code_action)
2977 .collect()
2978 })
2979 } else {
2980 Task::ready(Ok(Default::default()))
2981 }
2982 }
2983
2984 pub fn apply_code_action(
2985 &self,
2986 buffer_handle: ModelHandle<Buffer>,
2987 mut action: CodeAction,
2988 push_to_history: bool,
2989 cx: &mut ModelContext<Self>,
2990 ) -> Task<Result<ProjectTransaction>> {
2991 if self.is_local() {
2992 let buffer = buffer_handle.read(cx);
2993 let (lsp_adapter, lang_server) =
2994 if let Some(server) = self.language_server_for_buffer(buffer, cx) {
2995 server.clone()
2996 } else {
2997 return Task::ready(Ok(Default::default()));
2998 };
2999 let range = action.range.to_point_utf16(buffer);
3000
3001 cx.spawn(|this, mut cx| async move {
3002 if let Some(lsp_range) = action
3003 .lsp_action
3004 .data
3005 .as_mut()
3006 .and_then(|d| d.get_mut("codeActionParams"))
3007 .and_then(|d| d.get_mut("range"))
3008 {
3009 *lsp_range = serde_json::to_value(&range_to_lsp(range)).unwrap();
3010 action.lsp_action = lang_server
3011 .request::<lsp::request::CodeActionResolveRequest>(action.lsp_action)
3012 .await?;
3013 } else {
3014 let actions = this
3015 .update(&mut cx, |this, cx| {
3016 this.code_actions(&buffer_handle, action.range, cx)
3017 })
3018 .await?;
3019 action.lsp_action = actions
3020 .into_iter()
3021 .find(|a| a.lsp_action.title == action.lsp_action.title)
3022 .ok_or_else(|| anyhow!("code action is outdated"))?
3023 .lsp_action;
3024 }
3025
3026 if let Some(edit) = action.lsp_action.edit {
3027 Self::deserialize_workspace_edit(
3028 this,
3029 edit,
3030 push_to_history,
3031 lsp_adapter,
3032 lang_server,
3033 &mut cx,
3034 )
3035 .await
3036 } else if let Some(command) = action.lsp_action.command {
3037 this.update(&mut cx, |this, _| {
3038 this.last_workspace_edits_by_language_server
3039 .remove(&lang_server.server_id());
3040 });
3041 lang_server
3042 .request::<lsp::request::ExecuteCommand>(lsp::ExecuteCommandParams {
3043 command: command.command,
3044 arguments: command.arguments.unwrap_or_default(),
3045 ..Default::default()
3046 })
3047 .await?;
3048 Ok(this.update(&mut cx, |this, _| {
3049 this.last_workspace_edits_by_language_server
3050 .remove(&lang_server.server_id())
3051 .unwrap_or_default()
3052 }))
3053 } else {
3054 Ok(ProjectTransaction::default())
3055 }
3056 })
3057 } else if let Some(project_id) = self.remote_id() {
3058 let client = self.client.clone();
3059 let request = proto::ApplyCodeAction {
3060 project_id,
3061 buffer_id: buffer_handle.read(cx).remote_id(),
3062 action: Some(language::proto::serialize_code_action(&action)),
3063 };
3064 cx.spawn(|this, mut cx| async move {
3065 let response = client
3066 .request(request)
3067 .await?
3068 .transaction
3069 .ok_or_else(|| anyhow!("missing transaction"))?;
3070 this.update(&mut cx, |this, cx| {
3071 this.deserialize_project_transaction(response, push_to_history, cx)
3072 })
3073 .await
3074 })
3075 } else {
3076 Task::ready(Err(anyhow!("project does not have a remote id")))
3077 }
3078 }
3079
3080 async fn deserialize_workspace_edit(
3081 this: ModelHandle<Self>,
3082 edit: lsp::WorkspaceEdit,
3083 push_to_history: bool,
3084 lsp_adapter: Arc<dyn LspAdapter>,
3085 language_server: Arc<LanguageServer>,
3086 cx: &mut AsyncAppContext,
3087 ) -> Result<ProjectTransaction> {
3088 let fs = this.read_with(cx, |this, _| this.fs.clone());
3089 let mut operations = Vec::new();
3090 if let Some(document_changes) = edit.document_changes {
3091 match document_changes {
3092 lsp::DocumentChanges::Edits(edits) => {
3093 operations.extend(edits.into_iter().map(lsp::DocumentChangeOperation::Edit))
3094 }
3095 lsp::DocumentChanges::Operations(ops) => operations = ops,
3096 }
3097 } else if let Some(changes) = edit.changes {
3098 operations.extend(changes.into_iter().map(|(uri, edits)| {
3099 lsp::DocumentChangeOperation::Edit(lsp::TextDocumentEdit {
3100 text_document: lsp::OptionalVersionedTextDocumentIdentifier {
3101 uri,
3102 version: None,
3103 },
3104 edits: edits.into_iter().map(lsp::OneOf::Left).collect(),
3105 })
3106 }));
3107 }
3108
3109 let mut project_transaction = ProjectTransaction::default();
3110 for operation in operations {
3111 match operation {
3112 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Create(op)) => {
3113 let abs_path = op
3114 .uri
3115 .to_file_path()
3116 .map_err(|_| anyhow!("can't convert URI to path"))?;
3117
3118 if let Some(parent_path) = abs_path.parent() {
3119 fs.create_dir(parent_path).await?;
3120 }
3121 if abs_path.ends_with("/") {
3122 fs.create_dir(&abs_path).await?;
3123 } else {
3124 fs.create_file(&abs_path, op.options.map(Into::into).unwrap_or_default())
3125 .await?;
3126 }
3127 }
3128 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Rename(op)) => {
3129 let source_abs_path = op
3130 .old_uri
3131 .to_file_path()
3132 .map_err(|_| anyhow!("can't convert URI to path"))?;
3133 let target_abs_path = op
3134 .new_uri
3135 .to_file_path()
3136 .map_err(|_| anyhow!("can't convert URI to path"))?;
3137 fs.rename(
3138 &source_abs_path,
3139 &target_abs_path,
3140 op.options.map(Into::into).unwrap_or_default(),
3141 )
3142 .await?;
3143 }
3144 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Delete(op)) => {
3145 let abs_path = op
3146 .uri
3147 .to_file_path()
3148 .map_err(|_| anyhow!("can't convert URI to path"))?;
3149 let options = op.options.map(Into::into).unwrap_or_default();
3150 if abs_path.ends_with("/") {
3151 fs.remove_dir(&abs_path, options).await?;
3152 } else {
3153 fs.remove_file(&abs_path, options).await?;
3154 }
3155 }
3156 lsp::DocumentChangeOperation::Edit(op) => {
3157 let buffer_to_edit = this
3158 .update(cx, |this, cx| {
3159 this.open_local_buffer_via_lsp(
3160 op.text_document.uri,
3161 lsp_adapter.clone(),
3162 language_server.clone(),
3163 cx,
3164 )
3165 })
3166 .await?;
3167
3168 let edits = this
3169 .update(cx, |this, cx| {
3170 let edits = op.edits.into_iter().map(|edit| match edit {
3171 lsp::OneOf::Left(edit) => edit,
3172 lsp::OneOf::Right(edit) => edit.text_edit,
3173 });
3174 this.edits_from_lsp(
3175 &buffer_to_edit,
3176 edits,
3177 op.text_document.version,
3178 cx,
3179 )
3180 })
3181 .await?;
3182
3183 let transaction = buffer_to_edit.update(cx, |buffer, cx| {
3184 buffer.finalize_last_transaction();
3185 buffer.start_transaction();
3186 for (range, text) in edits {
3187 buffer.edit([(range, text)], cx);
3188 }
3189 let transaction = if buffer.end_transaction(cx).is_some() {
3190 let transaction = buffer.finalize_last_transaction().unwrap().clone();
3191 if !push_to_history {
3192 buffer.forget_transaction(transaction.id);
3193 }
3194 Some(transaction)
3195 } else {
3196 None
3197 };
3198
3199 transaction
3200 });
3201 if let Some(transaction) = transaction {
3202 project_transaction.0.insert(buffer_to_edit, transaction);
3203 }
3204 }
3205 }
3206 }
3207
3208 Ok(project_transaction)
3209 }
3210
3211 pub fn prepare_rename<T: ToPointUtf16>(
3212 &self,
3213 buffer: ModelHandle<Buffer>,
3214 position: T,
3215 cx: &mut ModelContext<Self>,
3216 ) -> Task<Result<Option<Range<Anchor>>>> {
3217 let position = position.to_point_utf16(buffer.read(cx));
3218 self.request_lsp(buffer, PrepareRename { position }, cx)
3219 }
3220
3221 pub fn perform_rename<T: ToPointUtf16>(
3222 &self,
3223 buffer: ModelHandle<Buffer>,
3224 position: T,
3225 new_name: String,
3226 push_to_history: bool,
3227 cx: &mut ModelContext<Self>,
3228 ) -> Task<Result<ProjectTransaction>> {
3229 let position = position.to_point_utf16(buffer.read(cx));
3230 self.request_lsp(
3231 buffer,
3232 PerformRename {
3233 position,
3234 new_name,
3235 push_to_history,
3236 },
3237 cx,
3238 )
3239 }
3240
3241 pub fn search(
3242 &self,
3243 query: SearchQuery,
3244 cx: &mut ModelContext<Self>,
3245 ) -> Task<Result<HashMap<ModelHandle<Buffer>, Vec<Range<Anchor>>>>> {
3246 if self.is_local() {
3247 let snapshots = self
3248 .visible_worktrees(cx)
3249 .filter_map(|tree| {
3250 let tree = tree.read(cx).as_local()?;
3251 Some(tree.snapshot())
3252 })
3253 .collect::<Vec<_>>();
3254
3255 let background = cx.background().clone();
3256 let path_count: usize = snapshots.iter().map(|s| s.visible_file_count()).sum();
3257 if path_count == 0 {
3258 return Task::ready(Ok(Default::default()));
3259 }
3260 let workers = background.num_cpus().min(path_count);
3261 let (matching_paths_tx, mut matching_paths_rx) = smol::channel::bounded(1024);
3262 cx.background()
3263 .spawn({
3264 let fs = self.fs.clone();
3265 let background = cx.background().clone();
3266 let query = query.clone();
3267 async move {
3268 let fs = &fs;
3269 let query = &query;
3270 let matching_paths_tx = &matching_paths_tx;
3271 let paths_per_worker = (path_count + workers - 1) / workers;
3272 let snapshots = &snapshots;
3273 background
3274 .scoped(|scope| {
3275 for worker_ix in 0..workers {
3276 let worker_start_ix = worker_ix * paths_per_worker;
3277 let worker_end_ix = worker_start_ix + paths_per_worker;
3278 scope.spawn(async move {
3279 let mut snapshot_start_ix = 0;
3280 let mut abs_path = PathBuf::new();
3281 for snapshot in snapshots {
3282 let snapshot_end_ix =
3283 snapshot_start_ix + snapshot.visible_file_count();
3284 if worker_end_ix <= snapshot_start_ix {
3285 break;
3286 } else if worker_start_ix > snapshot_end_ix {
3287 snapshot_start_ix = snapshot_end_ix;
3288 continue;
3289 } else {
3290 let start_in_snapshot = worker_start_ix
3291 .saturating_sub(snapshot_start_ix);
3292 let end_in_snapshot =
3293 cmp::min(worker_end_ix, snapshot_end_ix)
3294 - snapshot_start_ix;
3295
3296 for entry in snapshot
3297 .files(false, start_in_snapshot)
3298 .take(end_in_snapshot - start_in_snapshot)
3299 {
3300 if matching_paths_tx.is_closed() {
3301 break;
3302 }
3303
3304 abs_path.clear();
3305 abs_path.push(&snapshot.abs_path());
3306 abs_path.push(&entry.path);
3307 let matches = if let Some(file) =
3308 fs.open_sync(&abs_path).await.log_err()
3309 {
3310 query.detect(file).unwrap_or(false)
3311 } else {
3312 false
3313 };
3314
3315 if matches {
3316 let project_path =
3317 (snapshot.id(), entry.path.clone());
3318 if matching_paths_tx
3319 .send(project_path)
3320 .await
3321 .is_err()
3322 {
3323 break;
3324 }
3325 }
3326 }
3327
3328 snapshot_start_ix = snapshot_end_ix;
3329 }
3330 }
3331 });
3332 }
3333 })
3334 .await;
3335 }
3336 })
3337 .detach();
3338
3339 let (buffers_tx, buffers_rx) = smol::channel::bounded(1024);
3340 let open_buffers = self
3341 .opened_buffers
3342 .values()
3343 .filter_map(|b| b.upgrade(cx))
3344 .collect::<HashSet<_>>();
3345 cx.spawn(|this, cx| async move {
3346 for buffer in &open_buffers {
3347 let snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
3348 buffers_tx.send((buffer.clone(), snapshot)).await?;
3349 }
3350
3351 let open_buffers = Rc::new(RefCell::new(open_buffers));
3352 while let Some(project_path) = matching_paths_rx.next().await {
3353 if buffers_tx.is_closed() {
3354 break;
3355 }
3356
3357 let this = this.clone();
3358 let open_buffers = open_buffers.clone();
3359 let buffers_tx = buffers_tx.clone();
3360 cx.spawn(|mut cx| async move {
3361 if let Some(buffer) = this
3362 .update(&mut cx, |this, cx| this.open_buffer(project_path, cx))
3363 .await
3364 .log_err()
3365 {
3366 if open_buffers.borrow_mut().insert(buffer.clone()) {
3367 let snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
3368 buffers_tx.send((buffer, snapshot)).await?;
3369 }
3370 }
3371
3372 Ok::<_, anyhow::Error>(())
3373 })
3374 .detach();
3375 }
3376
3377 Ok::<_, anyhow::Error>(())
3378 })
3379 .detach_and_log_err(cx);
3380
3381 let background = cx.background().clone();
3382 cx.background().spawn(async move {
3383 let query = &query;
3384 let mut matched_buffers = Vec::new();
3385 for _ in 0..workers {
3386 matched_buffers.push(HashMap::default());
3387 }
3388 background
3389 .scoped(|scope| {
3390 for worker_matched_buffers in matched_buffers.iter_mut() {
3391 let mut buffers_rx = buffers_rx.clone();
3392 scope.spawn(async move {
3393 while let Some((buffer, snapshot)) = buffers_rx.next().await {
3394 let buffer_matches = query
3395 .search(snapshot.as_rope())
3396 .await
3397 .iter()
3398 .map(|range| {
3399 snapshot.anchor_before(range.start)
3400 ..snapshot.anchor_after(range.end)
3401 })
3402 .collect::<Vec<_>>();
3403 if !buffer_matches.is_empty() {
3404 worker_matched_buffers
3405 .insert(buffer.clone(), buffer_matches);
3406 }
3407 }
3408 });
3409 }
3410 })
3411 .await;
3412 Ok(matched_buffers.into_iter().flatten().collect())
3413 })
3414 } else if let Some(project_id) = self.remote_id() {
3415 let request = self.client.request(query.to_proto(project_id));
3416 cx.spawn(|this, mut cx| async move {
3417 let response = request.await?;
3418 let mut result = HashMap::default();
3419 for location in response.locations {
3420 let buffer = location.buffer.ok_or_else(|| anyhow!("missing buffer"))?;
3421 let target_buffer = this
3422 .update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
3423 .await?;
3424 let start = location
3425 .start
3426 .and_then(deserialize_anchor)
3427 .ok_or_else(|| anyhow!("missing target start"))?;
3428 let end = location
3429 .end
3430 .and_then(deserialize_anchor)
3431 .ok_or_else(|| anyhow!("missing target end"))?;
3432 result
3433 .entry(target_buffer)
3434 .or_insert(Vec::new())
3435 .push(start..end)
3436 }
3437 Ok(result)
3438 })
3439 } else {
3440 Task::ready(Ok(Default::default()))
3441 }
3442 }
3443
3444 fn request_lsp<R: LspCommand>(
3445 &self,
3446 buffer_handle: ModelHandle<Buffer>,
3447 request: R,
3448 cx: &mut ModelContext<Self>,
3449 ) -> Task<Result<R::Response>>
3450 where
3451 <R::LspRequest as lsp::request::Request>::Result: Send,
3452 {
3453 let buffer = buffer_handle.read(cx);
3454 if self.is_local() {
3455 let file = File::from_dyn(buffer.file()).and_then(File::as_local);
3456 if let Some((file, (_, language_server))) =
3457 file.zip(self.language_server_for_buffer(buffer, cx).cloned())
3458 {
3459 let lsp_params = request.to_lsp(&file.abs_path(cx), cx);
3460 return cx.spawn(|this, cx| async move {
3461 if !request.check_capabilities(&language_server.capabilities()) {
3462 return Ok(Default::default());
3463 }
3464
3465 let response = language_server
3466 .request::<R::LspRequest>(lsp_params)
3467 .await
3468 .context("lsp request failed")?;
3469 request
3470 .response_from_lsp(response, this, buffer_handle, cx)
3471 .await
3472 });
3473 }
3474 } else if let Some(project_id) = self.remote_id() {
3475 let rpc = self.client.clone();
3476 let message = request.to_proto(project_id, buffer);
3477 return cx.spawn(|this, cx| async move {
3478 let response = rpc.request(message).await?;
3479 request
3480 .response_from_proto(response, this, buffer_handle, cx)
3481 .await
3482 });
3483 }
3484 Task::ready(Ok(Default::default()))
3485 }
3486
3487 pub fn find_or_create_local_worktree(
3488 &mut self,
3489 abs_path: impl AsRef<Path>,
3490 visible: bool,
3491 cx: &mut ModelContext<Self>,
3492 ) -> Task<Result<(ModelHandle<Worktree>, PathBuf)>> {
3493 let abs_path = abs_path.as_ref();
3494 if let Some((tree, relative_path)) = self.find_local_worktree(abs_path, cx) {
3495 Task::ready(Ok((tree.clone(), relative_path.into())))
3496 } else {
3497 let worktree = self.create_local_worktree(abs_path, visible, cx);
3498 cx.foreground()
3499 .spawn(async move { Ok((worktree.await?, PathBuf::new())) })
3500 }
3501 }
3502
3503 pub fn find_local_worktree(
3504 &self,
3505 abs_path: &Path,
3506 cx: &AppContext,
3507 ) -> Option<(ModelHandle<Worktree>, PathBuf)> {
3508 for tree in self.worktrees(cx) {
3509 if let Some(relative_path) = tree
3510 .read(cx)
3511 .as_local()
3512 .and_then(|t| abs_path.strip_prefix(t.abs_path()).ok())
3513 {
3514 return Some((tree.clone(), relative_path.into()));
3515 }
3516 }
3517 None
3518 }
3519
3520 pub fn is_shared(&self) -> bool {
3521 match &self.client_state {
3522 ProjectClientState::Local { is_shared, .. } => *is_shared,
3523 ProjectClientState::Remote { .. } => false,
3524 }
3525 }
3526
3527 fn create_local_worktree(
3528 &mut self,
3529 abs_path: impl AsRef<Path>,
3530 visible: bool,
3531 cx: &mut ModelContext<Self>,
3532 ) -> Task<Result<ModelHandle<Worktree>>> {
3533 let fs = self.fs.clone();
3534 let client = self.client.clone();
3535 let next_entry_id = self.next_entry_id.clone();
3536 let path: Arc<Path> = abs_path.as_ref().into();
3537 let task = self
3538 .loading_local_worktrees
3539 .entry(path.clone())
3540 .or_insert_with(|| {
3541 cx.spawn(|project, mut cx| {
3542 async move {
3543 let worktree = Worktree::local(
3544 client.clone(),
3545 path.clone(),
3546 visible,
3547 fs,
3548 next_entry_id,
3549 &mut cx,
3550 )
3551 .await;
3552 project.update(&mut cx, |project, _| {
3553 project.loading_local_worktrees.remove(&path);
3554 });
3555 let worktree = worktree?;
3556
3557 let remote_project_id = project.update(&mut cx, |project, cx| {
3558 project.add_worktree(&worktree, cx);
3559 project.remote_id()
3560 });
3561
3562 if let Some(project_id) = remote_project_id {
3563 // Because sharing is async, we may have *unshared* the project by the time it completes,
3564 // in which case we need to register the worktree instead.
3565 loop {
3566 if project.read_with(&cx, |project, _| project.is_shared()) {
3567 if worktree
3568 .update(&mut cx, |worktree, cx| {
3569 worktree.as_local_mut().unwrap().share(project_id, cx)
3570 })
3571 .await
3572 .is_ok()
3573 {
3574 break;
3575 }
3576 } else {
3577 worktree
3578 .update(&mut cx, |worktree, cx| {
3579 worktree
3580 .as_local_mut()
3581 .unwrap()
3582 .register(project_id, cx)
3583 })
3584 .await?;
3585 break;
3586 }
3587 }
3588 }
3589
3590 Ok(worktree)
3591 }
3592 .map_err(|err| Arc::new(err))
3593 })
3594 .shared()
3595 })
3596 .clone();
3597 cx.foreground().spawn(async move {
3598 match task.await {
3599 Ok(worktree) => Ok(worktree),
3600 Err(err) => Err(anyhow!("{}", err)),
3601 }
3602 })
3603 }
3604
3605 pub fn remove_worktree(&mut self, id: WorktreeId, cx: &mut ModelContext<Self>) {
3606 self.worktrees.retain(|worktree| {
3607 worktree
3608 .upgrade(cx)
3609 .map_or(false, |w| w.read(cx).id() != id)
3610 });
3611 cx.notify();
3612 }
3613
3614 fn add_worktree(&mut self, worktree: &ModelHandle<Worktree>, cx: &mut ModelContext<Self>) {
3615 cx.observe(&worktree, |_, _, cx| cx.notify()).detach();
3616 if worktree.read(cx).is_local() {
3617 cx.subscribe(&worktree, |this, worktree, _, cx| {
3618 this.update_local_worktree_buffers(worktree, cx);
3619 })
3620 .detach();
3621 }
3622
3623 let push_strong_handle = {
3624 let worktree = worktree.read(cx);
3625 self.is_shared() || worktree.is_visible() || worktree.is_remote()
3626 };
3627 if push_strong_handle {
3628 self.worktrees
3629 .push(WorktreeHandle::Strong(worktree.clone()));
3630 } else {
3631 cx.observe_release(&worktree, |this, _, cx| {
3632 this.worktrees
3633 .retain(|worktree| worktree.upgrade(cx).is_some());
3634 cx.notify();
3635 })
3636 .detach();
3637 self.worktrees
3638 .push(WorktreeHandle::Weak(worktree.downgrade()));
3639 }
3640 cx.notify();
3641 }
3642
3643 fn update_local_worktree_buffers(
3644 &mut self,
3645 worktree_handle: ModelHandle<Worktree>,
3646 cx: &mut ModelContext<Self>,
3647 ) {
3648 let snapshot = worktree_handle.read(cx).snapshot();
3649 let mut buffers_to_delete = Vec::new();
3650 let mut renamed_buffers = Vec::new();
3651 for (buffer_id, buffer) in &self.opened_buffers {
3652 if let Some(buffer) = buffer.upgrade(cx) {
3653 buffer.update(cx, |buffer, cx| {
3654 if let Some(old_file) = File::from_dyn(buffer.file()) {
3655 if old_file.worktree != worktree_handle {
3656 return;
3657 }
3658
3659 let new_file = if let Some(entry) = old_file
3660 .entry_id
3661 .and_then(|entry_id| snapshot.entry_for_id(entry_id))
3662 {
3663 File {
3664 is_local: true,
3665 entry_id: Some(entry.id),
3666 mtime: entry.mtime,
3667 path: entry.path.clone(),
3668 worktree: worktree_handle.clone(),
3669 }
3670 } else if let Some(entry) =
3671 snapshot.entry_for_path(old_file.path().as_ref())
3672 {
3673 File {
3674 is_local: true,
3675 entry_id: Some(entry.id),
3676 mtime: entry.mtime,
3677 path: entry.path.clone(),
3678 worktree: worktree_handle.clone(),
3679 }
3680 } else {
3681 File {
3682 is_local: true,
3683 entry_id: None,
3684 path: old_file.path().clone(),
3685 mtime: old_file.mtime(),
3686 worktree: worktree_handle.clone(),
3687 }
3688 };
3689
3690 let old_path = old_file.abs_path(cx);
3691 if new_file.abs_path(cx) != old_path {
3692 renamed_buffers.push((cx.handle(), old_path));
3693 }
3694
3695 if let Some(project_id) = self.remote_id() {
3696 self.client
3697 .send(proto::UpdateBufferFile {
3698 project_id,
3699 buffer_id: *buffer_id as u64,
3700 file: Some(new_file.to_proto()),
3701 })
3702 .log_err();
3703 }
3704 buffer.file_updated(Box::new(new_file), cx).detach();
3705 }
3706 });
3707 } else {
3708 buffers_to_delete.push(*buffer_id);
3709 }
3710 }
3711
3712 for buffer_id in buffers_to_delete {
3713 self.opened_buffers.remove(&buffer_id);
3714 }
3715
3716 for (buffer, old_path) in renamed_buffers {
3717 self.unregister_buffer_from_language_server(&buffer, old_path, cx);
3718 self.assign_language_to_buffer(&buffer, cx);
3719 self.register_buffer_with_language_server(&buffer, cx);
3720 }
3721 }
3722
3723 pub fn set_active_path(&mut self, entry: Option<ProjectPath>, cx: &mut ModelContext<Self>) {
3724 let new_active_entry = entry.and_then(|project_path| {
3725 let worktree = self.worktree_for_id(project_path.worktree_id, cx)?;
3726 let entry = worktree.read(cx).entry_for_path(project_path.path)?;
3727 Some(entry.id)
3728 });
3729 if new_active_entry != self.active_entry {
3730 self.active_entry = new_active_entry;
3731 cx.emit(Event::ActiveEntryChanged(new_active_entry));
3732 }
3733 }
3734
3735 pub fn is_running_disk_based_diagnostics(&self) -> bool {
3736 self.language_server_statuses
3737 .values()
3738 .any(|status| status.pending_diagnostic_updates > 0)
3739 }
3740
3741 pub fn diagnostic_summary(&self, cx: &AppContext) -> DiagnosticSummary {
3742 let mut summary = DiagnosticSummary::default();
3743 for (_, path_summary) in self.diagnostic_summaries(cx) {
3744 summary.error_count += path_summary.error_count;
3745 summary.warning_count += path_summary.warning_count;
3746 }
3747 summary
3748 }
3749
3750 pub fn diagnostic_summaries<'a>(
3751 &'a self,
3752 cx: &'a AppContext,
3753 ) -> impl Iterator<Item = (ProjectPath, DiagnosticSummary)> + 'a {
3754 self.worktrees(cx).flat_map(move |worktree| {
3755 let worktree = worktree.read(cx);
3756 let worktree_id = worktree.id();
3757 worktree
3758 .diagnostic_summaries()
3759 .map(move |(path, summary)| (ProjectPath { worktree_id, path }, summary))
3760 })
3761 }
3762
3763 pub fn disk_based_diagnostics_started(&mut self, cx: &mut ModelContext<Self>) {
3764 if self
3765 .language_server_statuses
3766 .values()
3767 .map(|status| status.pending_diagnostic_updates)
3768 .sum::<isize>()
3769 == 1
3770 {
3771 cx.emit(Event::DiskBasedDiagnosticsStarted);
3772 }
3773 }
3774
3775 pub fn disk_based_diagnostics_finished(&mut self, cx: &mut ModelContext<Self>) {
3776 cx.emit(Event::DiskBasedDiagnosticsUpdated);
3777 if self
3778 .language_server_statuses
3779 .values()
3780 .map(|status| status.pending_diagnostic_updates)
3781 .sum::<isize>()
3782 == 0
3783 {
3784 cx.emit(Event::DiskBasedDiagnosticsFinished);
3785 }
3786 }
3787
3788 pub fn active_entry(&self) -> Option<ProjectEntryId> {
3789 self.active_entry
3790 }
3791
3792 pub fn entry_for_path(&self, path: &ProjectPath, cx: &AppContext) -> Option<ProjectEntryId> {
3793 self.worktree_for_id(path.worktree_id, cx)?
3794 .read(cx)
3795 .entry_for_path(&path.path)
3796 .map(|entry| entry.id)
3797 }
3798
3799 pub fn path_for_entry(&self, entry_id: ProjectEntryId, cx: &AppContext) -> Option<ProjectPath> {
3800 let worktree = self.worktree_for_entry(entry_id, cx)?;
3801 let worktree = worktree.read(cx);
3802 let worktree_id = worktree.id();
3803 let path = worktree.entry_for_id(entry_id)?.path.clone();
3804 Some(ProjectPath { worktree_id, path })
3805 }
3806
3807 // RPC message handlers
3808
3809 async fn handle_request_join_project(
3810 this: ModelHandle<Self>,
3811 message: TypedEnvelope<proto::RequestJoinProject>,
3812 _: Arc<Client>,
3813 mut cx: AsyncAppContext,
3814 ) -> Result<()> {
3815 let user_id = message.payload.requester_id;
3816 if this.read_with(&cx, |project, _| {
3817 project.collaborators.values().any(|c| c.user.id == user_id)
3818 }) {
3819 this.update(&mut cx, |this, cx| {
3820 this.respond_to_join_request(user_id, true, cx)
3821 });
3822 } else {
3823 let user_store = this.read_with(&cx, |this, _| this.user_store.clone());
3824 let user = user_store
3825 .update(&mut cx, |store, cx| store.fetch_user(user_id, cx))
3826 .await?;
3827 this.update(&mut cx, |_, cx| cx.emit(Event::ContactRequestedJoin(user)));
3828 }
3829 Ok(())
3830 }
3831
3832 async fn handle_unregister_project(
3833 this: ModelHandle<Self>,
3834 _: TypedEnvelope<proto::UnregisterProject>,
3835 _: Arc<Client>,
3836 mut cx: AsyncAppContext,
3837 ) -> Result<()> {
3838 this.update(&mut cx, |this, cx| this.removed_from_project(cx));
3839 Ok(())
3840 }
3841
3842 async fn handle_project_unshared(
3843 this: ModelHandle<Self>,
3844 _: TypedEnvelope<proto::ProjectUnshared>,
3845 _: Arc<Client>,
3846 mut cx: AsyncAppContext,
3847 ) -> Result<()> {
3848 this.update(&mut cx, |this, cx| this.unshared(cx));
3849 Ok(())
3850 }
3851
3852 async fn handle_add_collaborator(
3853 this: ModelHandle<Self>,
3854 mut envelope: TypedEnvelope<proto::AddProjectCollaborator>,
3855 _: Arc<Client>,
3856 mut cx: AsyncAppContext,
3857 ) -> Result<()> {
3858 let user_store = this.read_with(&cx, |this, _| this.user_store.clone());
3859 let collaborator = envelope
3860 .payload
3861 .collaborator
3862 .take()
3863 .ok_or_else(|| anyhow!("empty collaborator"))?;
3864
3865 let collaborator = Collaborator::from_proto(collaborator, &user_store, &mut cx).await?;
3866 this.update(&mut cx, |this, cx| {
3867 this.collaborators
3868 .insert(collaborator.peer_id, collaborator);
3869 cx.notify();
3870 });
3871
3872 Ok(())
3873 }
3874
3875 async fn handle_remove_collaborator(
3876 this: ModelHandle<Self>,
3877 envelope: TypedEnvelope<proto::RemoveProjectCollaborator>,
3878 _: Arc<Client>,
3879 mut cx: AsyncAppContext,
3880 ) -> Result<()> {
3881 this.update(&mut cx, |this, cx| {
3882 let peer_id = PeerId(envelope.payload.peer_id);
3883 let replica_id = this
3884 .collaborators
3885 .remove(&peer_id)
3886 .ok_or_else(|| anyhow!("unknown peer {:?}", peer_id))?
3887 .replica_id;
3888 for (_, buffer) in &this.opened_buffers {
3889 if let Some(buffer) = buffer.upgrade(cx) {
3890 buffer.update(cx, |buffer, cx| buffer.remove_peer(replica_id, cx));
3891 }
3892 }
3893
3894 cx.emit(Event::CollaboratorLeft(peer_id));
3895 cx.notify();
3896 Ok(())
3897 })
3898 }
3899
3900 async fn handle_join_project_request_cancelled(
3901 this: ModelHandle<Self>,
3902 envelope: TypedEnvelope<proto::JoinProjectRequestCancelled>,
3903 _: Arc<Client>,
3904 mut cx: AsyncAppContext,
3905 ) -> Result<()> {
3906 let user = this
3907 .update(&mut cx, |this, cx| {
3908 this.user_store.update(cx, |user_store, cx| {
3909 user_store.fetch_user(envelope.payload.requester_id, cx)
3910 })
3911 })
3912 .await?;
3913
3914 this.update(&mut cx, |_, cx| {
3915 cx.emit(Event::ContactCancelledJoinRequest(user));
3916 });
3917
3918 Ok(())
3919 }
3920
3921 async fn handle_register_worktree(
3922 this: ModelHandle<Self>,
3923 envelope: TypedEnvelope<proto::RegisterWorktree>,
3924 client: Arc<Client>,
3925 mut cx: AsyncAppContext,
3926 ) -> Result<()> {
3927 this.update(&mut cx, |this, cx| {
3928 let remote_id = this.remote_id().ok_or_else(|| anyhow!("invalid project"))?;
3929 let replica_id = this.replica_id();
3930 let worktree = proto::Worktree {
3931 id: envelope.payload.worktree_id,
3932 root_name: envelope.payload.root_name,
3933 entries: Default::default(),
3934 diagnostic_summaries: Default::default(),
3935 visible: envelope.payload.visible,
3936 scan_id: 0,
3937 };
3938 let (worktree, load_task) =
3939 Worktree::remote(remote_id, replica_id, worktree, client, cx);
3940 this.add_worktree(&worktree, cx);
3941 load_task.detach();
3942 Ok(())
3943 })
3944 }
3945
3946 async fn handle_unregister_worktree(
3947 this: ModelHandle<Self>,
3948 envelope: TypedEnvelope<proto::UnregisterWorktree>,
3949 _: Arc<Client>,
3950 mut cx: AsyncAppContext,
3951 ) -> Result<()> {
3952 this.update(&mut cx, |this, cx| {
3953 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
3954 this.remove_worktree(worktree_id, cx);
3955 Ok(())
3956 })
3957 }
3958
3959 async fn handle_update_worktree(
3960 this: ModelHandle<Self>,
3961 envelope: TypedEnvelope<proto::UpdateWorktree>,
3962 _: Arc<Client>,
3963 mut cx: AsyncAppContext,
3964 ) -> Result<()> {
3965 this.update(&mut cx, |this, cx| {
3966 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
3967 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
3968 worktree.update(cx, |worktree, _| {
3969 let worktree = worktree.as_remote_mut().unwrap();
3970 worktree.update_from_remote(envelope)
3971 })?;
3972 }
3973 Ok(())
3974 })
3975 }
3976
3977 async fn handle_create_project_entry(
3978 this: ModelHandle<Self>,
3979 envelope: TypedEnvelope<proto::CreateProjectEntry>,
3980 _: Arc<Client>,
3981 mut cx: AsyncAppContext,
3982 ) -> Result<proto::ProjectEntryResponse> {
3983 let worktree = this.update(&mut cx, |this, cx| {
3984 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
3985 this.worktree_for_id(worktree_id, cx)
3986 .ok_or_else(|| anyhow!("worktree not found"))
3987 })?;
3988 let worktree_scan_id = worktree.read_with(&cx, |worktree, _| worktree.scan_id());
3989 let entry = worktree
3990 .update(&mut cx, |worktree, cx| {
3991 let worktree = worktree.as_local_mut().unwrap();
3992 let path = PathBuf::from(OsString::from_vec(envelope.payload.path));
3993 worktree.create_entry(path, envelope.payload.is_directory, cx)
3994 })
3995 .await?;
3996 Ok(proto::ProjectEntryResponse {
3997 entry: Some((&entry).into()),
3998 worktree_scan_id: worktree_scan_id as u64,
3999 })
4000 }
4001
4002 async fn handle_rename_project_entry(
4003 this: ModelHandle<Self>,
4004 envelope: TypedEnvelope<proto::RenameProjectEntry>,
4005 _: Arc<Client>,
4006 mut cx: AsyncAppContext,
4007 ) -> Result<proto::ProjectEntryResponse> {
4008 let entry_id = ProjectEntryId::from_proto(envelope.payload.entry_id);
4009 let worktree = this.read_with(&cx, |this, cx| {
4010 this.worktree_for_entry(entry_id, cx)
4011 .ok_or_else(|| anyhow!("worktree not found"))
4012 })?;
4013 let worktree_scan_id = worktree.read_with(&cx, |worktree, _| worktree.scan_id());
4014 let entry = worktree
4015 .update(&mut cx, |worktree, cx| {
4016 let new_path = PathBuf::from(OsString::from_vec(envelope.payload.new_path));
4017 worktree
4018 .as_local_mut()
4019 .unwrap()
4020 .rename_entry(entry_id, new_path, cx)
4021 .ok_or_else(|| anyhow!("invalid entry"))
4022 })?
4023 .await?;
4024 Ok(proto::ProjectEntryResponse {
4025 entry: Some((&entry).into()),
4026 worktree_scan_id: worktree_scan_id as u64,
4027 })
4028 }
4029
4030 async fn handle_delete_project_entry(
4031 this: ModelHandle<Self>,
4032 envelope: TypedEnvelope<proto::DeleteProjectEntry>,
4033 _: Arc<Client>,
4034 mut cx: AsyncAppContext,
4035 ) -> Result<proto::ProjectEntryResponse> {
4036 let entry_id = ProjectEntryId::from_proto(envelope.payload.entry_id);
4037 let worktree = this.read_with(&cx, |this, cx| {
4038 this.worktree_for_entry(entry_id, cx)
4039 .ok_or_else(|| anyhow!("worktree not found"))
4040 })?;
4041 let worktree_scan_id = worktree.read_with(&cx, |worktree, _| worktree.scan_id());
4042 worktree
4043 .update(&mut cx, |worktree, cx| {
4044 worktree
4045 .as_local_mut()
4046 .unwrap()
4047 .delete_entry(entry_id, cx)
4048 .ok_or_else(|| anyhow!("invalid entry"))
4049 })?
4050 .await?;
4051 Ok(proto::ProjectEntryResponse {
4052 entry: None,
4053 worktree_scan_id: worktree_scan_id as u64,
4054 })
4055 }
4056
4057 async fn handle_update_diagnostic_summary(
4058 this: ModelHandle<Self>,
4059 envelope: TypedEnvelope<proto::UpdateDiagnosticSummary>,
4060 _: Arc<Client>,
4061 mut cx: AsyncAppContext,
4062 ) -> Result<()> {
4063 this.update(&mut cx, |this, cx| {
4064 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
4065 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
4066 if let Some(summary) = envelope.payload.summary {
4067 let project_path = ProjectPath {
4068 worktree_id,
4069 path: Path::new(&summary.path).into(),
4070 };
4071 worktree.update(cx, |worktree, _| {
4072 worktree
4073 .as_remote_mut()
4074 .unwrap()
4075 .update_diagnostic_summary(project_path.path.clone(), &summary);
4076 });
4077 cx.emit(Event::DiagnosticsUpdated(project_path));
4078 }
4079 }
4080 Ok(())
4081 })
4082 }
4083
4084 async fn handle_start_language_server(
4085 this: ModelHandle<Self>,
4086 envelope: TypedEnvelope<proto::StartLanguageServer>,
4087 _: Arc<Client>,
4088 mut cx: AsyncAppContext,
4089 ) -> Result<()> {
4090 let server = envelope
4091 .payload
4092 .server
4093 .ok_or_else(|| anyhow!("invalid server"))?;
4094 this.update(&mut cx, |this, cx| {
4095 this.language_server_statuses.insert(
4096 server.id as usize,
4097 LanguageServerStatus {
4098 name: server.name,
4099 pending_work: Default::default(),
4100 pending_diagnostic_updates: 0,
4101 },
4102 );
4103 cx.notify();
4104 });
4105 Ok(())
4106 }
4107
4108 async fn handle_update_language_server(
4109 this: ModelHandle<Self>,
4110 envelope: TypedEnvelope<proto::UpdateLanguageServer>,
4111 _: Arc<Client>,
4112 mut cx: AsyncAppContext,
4113 ) -> Result<()> {
4114 let language_server_id = envelope.payload.language_server_id as usize;
4115 match envelope
4116 .payload
4117 .variant
4118 .ok_or_else(|| anyhow!("invalid variant"))?
4119 {
4120 proto::update_language_server::Variant::WorkStart(payload) => {
4121 this.update(&mut cx, |this, cx| {
4122 this.on_lsp_work_start(language_server_id, payload.token, cx);
4123 })
4124 }
4125 proto::update_language_server::Variant::WorkProgress(payload) => {
4126 this.update(&mut cx, |this, cx| {
4127 this.on_lsp_work_progress(
4128 language_server_id,
4129 payload.token,
4130 LanguageServerProgress {
4131 message: payload.message,
4132 percentage: payload.percentage.map(|p| p as usize),
4133 last_update_at: Instant::now(),
4134 },
4135 cx,
4136 );
4137 })
4138 }
4139 proto::update_language_server::Variant::WorkEnd(payload) => {
4140 this.update(&mut cx, |this, cx| {
4141 this.on_lsp_work_end(language_server_id, payload.token, cx);
4142 })
4143 }
4144 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(_) => {
4145 this.update(&mut cx, |this, cx| {
4146 this.disk_based_diagnostics_started(cx);
4147 })
4148 }
4149 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(_) => {
4150 this.update(&mut cx, |this, cx| this.disk_based_diagnostics_finished(cx));
4151 }
4152 }
4153
4154 Ok(())
4155 }
4156
4157 async fn handle_update_buffer(
4158 this: ModelHandle<Self>,
4159 envelope: TypedEnvelope<proto::UpdateBuffer>,
4160 _: Arc<Client>,
4161 mut cx: AsyncAppContext,
4162 ) -> Result<()> {
4163 this.update(&mut cx, |this, cx| {
4164 let payload = envelope.payload.clone();
4165 let buffer_id = payload.buffer_id;
4166 let ops = payload
4167 .operations
4168 .into_iter()
4169 .map(|op| language::proto::deserialize_operation(op))
4170 .collect::<Result<Vec<_>, _>>()?;
4171 let is_remote = this.is_remote();
4172 match this.opened_buffers.entry(buffer_id) {
4173 hash_map::Entry::Occupied(mut e) => match e.get_mut() {
4174 OpenBuffer::Strong(buffer) => {
4175 buffer.update(cx, |buffer, cx| buffer.apply_ops(ops, cx))?;
4176 }
4177 OpenBuffer::Loading(operations) => operations.extend_from_slice(&ops),
4178 OpenBuffer::Weak(_) => {}
4179 },
4180 hash_map::Entry::Vacant(e) => {
4181 assert!(
4182 is_remote,
4183 "received buffer update from {:?}",
4184 envelope.original_sender_id
4185 );
4186 e.insert(OpenBuffer::Loading(ops));
4187 }
4188 }
4189 Ok(())
4190 })
4191 }
4192
4193 async fn handle_update_buffer_file(
4194 this: ModelHandle<Self>,
4195 envelope: TypedEnvelope<proto::UpdateBufferFile>,
4196 _: Arc<Client>,
4197 mut cx: AsyncAppContext,
4198 ) -> Result<()> {
4199 this.update(&mut cx, |this, cx| {
4200 let payload = envelope.payload.clone();
4201 let buffer_id = payload.buffer_id;
4202 let file = payload.file.ok_or_else(|| anyhow!("invalid file"))?;
4203 let worktree = this
4204 .worktree_for_id(WorktreeId::from_proto(file.worktree_id), cx)
4205 .ok_or_else(|| anyhow!("no such worktree"))?;
4206 let file = File::from_proto(file, worktree.clone(), cx)?;
4207 let buffer = this
4208 .opened_buffers
4209 .get_mut(&buffer_id)
4210 .and_then(|b| b.upgrade(cx))
4211 .ok_or_else(|| anyhow!("no such buffer"))?;
4212 buffer.update(cx, |buffer, cx| {
4213 buffer.file_updated(Box::new(file), cx).detach();
4214 });
4215 Ok(())
4216 })
4217 }
4218
4219 async fn handle_save_buffer(
4220 this: ModelHandle<Self>,
4221 envelope: TypedEnvelope<proto::SaveBuffer>,
4222 _: Arc<Client>,
4223 mut cx: AsyncAppContext,
4224 ) -> Result<proto::BufferSaved> {
4225 let buffer_id = envelope.payload.buffer_id;
4226 let requested_version = deserialize_version(envelope.payload.version);
4227
4228 let (project_id, buffer) = this.update(&mut cx, |this, cx| {
4229 let project_id = this.remote_id().ok_or_else(|| anyhow!("not connected"))?;
4230 let buffer = this
4231 .opened_buffers
4232 .get(&buffer_id)
4233 .and_then(|buffer| buffer.upgrade(cx))
4234 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?;
4235 Ok::<_, anyhow::Error>((project_id, buffer))
4236 })?;
4237 buffer
4238 .update(&mut cx, |buffer, _| {
4239 buffer.wait_for_version(requested_version)
4240 })
4241 .await;
4242
4243 let (saved_version, mtime) = buffer.update(&mut cx, |buffer, cx| buffer.save(cx)).await?;
4244 Ok(proto::BufferSaved {
4245 project_id,
4246 buffer_id,
4247 version: serialize_version(&saved_version),
4248 mtime: Some(mtime.into()),
4249 })
4250 }
4251
4252 async fn handle_reload_buffers(
4253 this: ModelHandle<Self>,
4254 envelope: TypedEnvelope<proto::ReloadBuffers>,
4255 _: Arc<Client>,
4256 mut cx: AsyncAppContext,
4257 ) -> Result<proto::ReloadBuffersResponse> {
4258 let sender_id = envelope.original_sender_id()?;
4259 let reload = this.update(&mut cx, |this, cx| {
4260 let mut buffers = HashSet::default();
4261 for buffer_id in &envelope.payload.buffer_ids {
4262 buffers.insert(
4263 this.opened_buffers
4264 .get(buffer_id)
4265 .and_then(|buffer| buffer.upgrade(cx))
4266 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?,
4267 );
4268 }
4269 Ok::<_, anyhow::Error>(this.reload_buffers(buffers, false, cx))
4270 })?;
4271
4272 let project_transaction = reload.await?;
4273 let project_transaction = this.update(&mut cx, |this, cx| {
4274 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
4275 });
4276 Ok(proto::ReloadBuffersResponse {
4277 transaction: Some(project_transaction),
4278 })
4279 }
4280
4281 async fn handle_format_buffers(
4282 this: ModelHandle<Self>,
4283 envelope: TypedEnvelope<proto::FormatBuffers>,
4284 _: Arc<Client>,
4285 mut cx: AsyncAppContext,
4286 ) -> Result<proto::FormatBuffersResponse> {
4287 let sender_id = envelope.original_sender_id()?;
4288 let format = this.update(&mut cx, |this, cx| {
4289 let mut buffers = HashSet::default();
4290 for buffer_id in &envelope.payload.buffer_ids {
4291 buffers.insert(
4292 this.opened_buffers
4293 .get(buffer_id)
4294 .and_then(|buffer| buffer.upgrade(cx))
4295 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?,
4296 );
4297 }
4298 Ok::<_, anyhow::Error>(this.format(buffers, false, cx))
4299 })?;
4300
4301 let project_transaction = format.await?;
4302 let project_transaction = this.update(&mut cx, |this, cx| {
4303 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
4304 });
4305 Ok(proto::FormatBuffersResponse {
4306 transaction: Some(project_transaction),
4307 })
4308 }
4309
4310 async fn handle_get_completions(
4311 this: ModelHandle<Self>,
4312 envelope: TypedEnvelope<proto::GetCompletions>,
4313 _: Arc<Client>,
4314 mut cx: AsyncAppContext,
4315 ) -> Result<proto::GetCompletionsResponse> {
4316 let position = envelope
4317 .payload
4318 .position
4319 .and_then(language::proto::deserialize_anchor)
4320 .ok_or_else(|| anyhow!("invalid position"))?;
4321 let version = deserialize_version(envelope.payload.version);
4322 let buffer = this.read_with(&cx, |this, cx| {
4323 this.opened_buffers
4324 .get(&envelope.payload.buffer_id)
4325 .and_then(|buffer| buffer.upgrade(cx))
4326 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
4327 })?;
4328 buffer
4329 .update(&mut cx, |buffer, _| buffer.wait_for_version(version))
4330 .await;
4331 let version = buffer.read_with(&cx, |buffer, _| buffer.version());
4332 let completions = this
4333 .update(&mut cx, |this, cx| this.completions(&buffer, position, cx))
4334 .await?;
4335
4336 Ok(proto::GetCompletionsResponse {
4337 completions: completions
4338 .iter()
4339 .map(language::proto::serialize_completion)
4340 .collect(),
4341 version: serialize_version(&version),
4342 })
4343 }
4344
4345 async fn handle_apply_additional_edits_for_completion(
4346 this: ModelHandle<Self>,
4347 envelope: TypedEnvelope<proto::ApplyCompletionAdditionalEdits>,
4348 _: Arc<Client>,
4349 mut cx: AsyncAppContext,
4350 ) -> Result<proto::ApplyCompletionAdditionalEditsResponse> {
4351 let apply_additional_edits = this.update(&mut cx, |this, cx| {
4352 let buffer = this
4353 .opened_buffers
4354 .get(&envelope.payload.buffer_id)
4355 .and_then(|buffer| buffer.upgrade(cx))
4356 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
4357 let language = buffer.read(cx).language();
4358 let completion = language::proto::deserialize_completion(
4359 envelope
4360 .payload
4361 .completion
4362 .ok_or_else(|| anyhow!("invalid completion"))?,
4363 language,
4364 )?;
4365 Ok::<_, anyhow::Error>(
4366 this.apply_additional_edits_for_completion(buffer, completion, false, cx),
4367 )
4368 })?;
4369
4370 Ok(proto::ApplyCompletionAdditionalEditsResponse {
4371 transaction: apply_additional_edits
4372 .await?
4373 .as_ref()
4374 .map(language::proto::serialize_transaction),
4375 })
4376 }
4377
4378 async fn handle_get_code_actions(
4379 this: ModelHandle<Self>,
4380 envelope: TypedEnvelope<proto::GetCodeActions>,
4381 _: Arc<Client>,
4382 mut cx: AsyncAppContext,
4383 ) -> Result<proto::GetCodeActionsResponse> {
4384 let start = envelope
4385 .payload
4386 .start
4387 .and_then(language::proto::deserialize_anchor)
4388 .ok_or_else(|| anyhow!("invalid start"))?;
4389 let end = envelope
4390 .payload
4391 .end
4392 .and_then(language::proto::deserialize_anchor)
4393 .ok_or_else(|| anyhow!("invalid end"))?;
4394 let buffer = this.update(&mut cx, |this, cx| {
4395 this.opened_buffers
4396 .get(&envelope.payload.buffer_id)
4397 .and_then(|buffer| buffer.upgrade(cx))
4398 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
4399 })?;
4400 buffer
4401 .update(&mut cx, |buffer, _| {
4402 buffer.wait_for_version(deserialize_version(envelope.payload.version))
4403 })
4404 .await;
4405
4406 let version = buffer.read_with(&cx, |buffer, _| buffer.version());
4407 let code_actions = this.update(&mut cx, |this, cx| {
4408 Ok::<_, anyhow::Error>(this.code_actions(&buffer, start..end, cx))
4409 })?;
4410
4411 Ok(proto::GetCodeActionsResponse {
4412 actions: code_actions
4413 .await?
4414 .iter()
4415 .map(language::proto::serialize_code_action)
4416 .collect(),
4417 version: serialize_version(&version),
4418 })
4419 }
4420
4421 async fn handle_apply_code_action(
4422 this: ModelHandle<Self>,
4423 envelope: TypedEnvelope<proto::ApplyCodeAction>,
4424 _: Arc<Client>,
4425 mut cx: AsyncAppContext,
4426 ) -> Result<proto::ApplyCodeActionResponse> {
4427 let sender_id = envelope.original_sender_id()?;
4428 let action = language::proto::deserialize_code_action(
4429 envelope
4430 .payload
4431 .action
4432 .ok_or_else(|| anyhow!("invalid action"))?,
4433 )?;
4434 let apply_code_action = this.update(&mut cx, |this, cx| {
4435 let buffer = this
4436 .opened_buffers
4437 .get(&envelope.payload.buffer_id)
4438 .and_then(|buffer| buffer.upgrade(cx))
4439 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
4440 Ok::<_, anyhow::Error>(this.apply_code_action(buffer, action, false, cx))
4441 })?;
4442
4443 let project_transaction = apply_code_action.await?;
4444 let project_transaction = this.update(&mut cx, |this, cx| {
4445 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
4446 });
4447 Ok(proto::ApplyCodeActionResponse {
4448 transaction: Some(project_transaction),
4449 })
4450 }
4451
4452 async fn handle_lsp_command<T: LspCommand>(
4453 this: ModelHandle<Self>,
4454 envelope: TypedEnvelope<T::ProtoRequest>,
4455 _: Arc<Client>,
4456 mut cx: AsyncAppContext,
4457 ) -> Result<<T::ProtoRequest as proto::RequestMessage>::Response>
4458 where
4459 <T::LspRequest as lsp::request::Request>::Result: Send,
4460 {
4461 let sender_id = envelope.original_sender_id()?;
4462 let buffer_id = T::buffer_id_from_proto(&envelope.payload);
4463 let buffer_handle = this.read_with(&cx, |this, _| {
4464 this.opened_buffers
4465 .get(&buffer_id)
4466 .and_then(|buffer| buffer.upgrade(&cx))
4467 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))
4468 })?;
4469 let request = T::from_proto(
4470 envelope.payload,
4471 this.clone(),
4472 buffer_handle.clone(),
4473 cx.clone(),
4474 )
4475 .await?;
4476 let buffer_version = buffer_handle.read_with(&cx, |buffer, _| buffer.version());
4477 let response = this
4478 .update(&mut cx, |this, cx| {
4479 this.request_lsp(buffer_handle, request, cx)
4480 })
4481 .await?;
4482 this.update(&mut cx, |this, cx| {
4483 Ok(T::response_to_proto(
4484 response,
4485 this,
4486 sender_id,
4487 &buffer_version,
4488 cx,
4489 ))
4490 })
4491 }
4492
4493 async fn handle_get_project_symbols(
4494 this: ModelHandle<Self>,
4495 envelope: TypedEnvelope<proto::GetProjectSymbols>,
4496 _: Arc<Client>,
4497 mut cx: AsyncAppContext,
4498 ) -> Result<proto::GetProjectSymbolsResponse> {
4499 let symbols = this
4500 .update(&mut cx, |this, cx| {
4501 this.symbols(&envelope.payload.query, cx)
4502 })
4503 .await?;
4504
4505 Ok(proto::GetProjectSymbolsResponse {
4506 symbols: symbols.iter().map(serialize_symbol).collect(),
4507 })
4508 }
4509
4510 async fn handle_search_project(
4511 this: ModelHandle<Self>,
4512 envelope: TypedEnvelope<proto::SearchProject>,
4513 _: Arc<Client>,
4514 mut cx: AsyncAppContext,
4515 ) -> Result<proto::SearchProjectResponse> {
4516 let peer_id = envelope.original_sender_id()?;
4517 let query = SearchQuery::from_proto(envelope.payload)?;
4518 let result = this
4519 .update(&mut cx, |this, cx| this.search(query, cx))
4520 .await?;
4521
4522 this.update(&mut cx, |this, cx| {
4523 let mut locations = Vec::new();
4524 for (buffer, ranges) in result {
4525 for range in ranges {
4526 let start = serialize_anchor(&range.start);
4527 let end = serialize_anchor(&range.end);
4528 let buffer = this.serialize_buffer_for_peer(&buffer, peer_id, cx);
4529 locations.push(proto::Location {
4530 buffer: Some(buffer),
4531 start: Some(start),
4532 end: Some(end),
4533 });
4534 }
4535 }
4536 Ok(proto::SearchProjectResponse { locations })
4537 })
4538 }
4539
4540 async fn handle_open_buffer_for_symbol(
4541 this: ModelHandle<Self>,
4542 envelope: TypedEnvelope<proto::OpenBufferForSymbol>,
4543 _: Arc<Client>,
4544 mut cx: AsyncAppContext,
4545 ) -> Result<proto::OpenBufferForSymbolResponse> {
4546 let peer_id = envelope.original_sender_id()?;
4547 let symbol = envelope
4548 .payload
4549 .symbol
4550 .ok_or_else(|| anyhow!("invalid symbol"))?;
4551 let symbol = this.read_with(&cx, |this, _| {
4552 let symbol = this.deserialize_symbol(symbol)?;
4553 let signature = this.symbol_signature(symbol.worktree_id, &symbol.path);
4554 if signature == symbol.signature {
4555 Ok(symbol)
4556 } else {
4557 Err(anyhow!("invalid symbol signature"))
4558 }
4559 })?;
4560 let buffer = this
4561 .update(&mut cx, |this, cx| this.open_buffer_for_symbol(&symbol, cx))
4562 .await?;
4563
4564 Ok(proto::OpenBufferForSymbolResponse {
4565 buffer: Some(this.update(&mut cx, |this, cx| {
4566 this.serialize_buffer_for_peer(&buffer, peer_id, cx)
4567 })),
4568 })
4569 }
4570
4571 fn symbol_signature(&self, worktree_id: WorktreeId, path: &Path) -> [u8; 32] {
4572 let mut hasher = Sha256::new();
4573 hasher.update(worktree_id.to_proto().to_be_bytes());
4574 hasher.update(path.to_string_lossy().as_bytes());
4575 hasher.update(self.nonce.to_be_bytes());
4576 hasher.finalize().as_slice().try_into().unwrap()
4577 }
4578
4579 async fn handle_open_buffer_by_id(
4580 this: ModelHandle<Self>,
4581 envelope: TypedEnvelope<proto::OpenBufferById>,
4582 _: Arc<Client>,
4583 mut cx: AsyncAppContext,
4584 ) -> Result<proto::OpenBufferResponse> {
4585 let peer_id = envelope.original_sender_id()?;
4586 let buffer = this
4587 .update(&mut cx, |this, cx| {
4588 this.open_buffer_by_id(envelope.payload.id, cx)
4589 })
4590 .await?;
4591 this.update(&mut cx, |this, cx| {
4592 Ok(proto::OpenBufferResponse {
4593 buffer: Some(this.serialize_buffer_for_peer(&buffer, peer_id, cx)),
4594 })
4595 })
4596 }
4597
4598 async fn handle_open_buffer_by_path(
4599 this: ModelHandle<Self>,
4600 envelope: TypedEnvelope<proto::OpenBufferByPath>,
4601 _: Arc<Client>,
4602 mut cx: AsyncAppContext,
4603 ) -> Result<proto::OpenBufferResponse> {
4604 let peer_id = envelope.original_sender_id()?;
4605 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
4606 let open_buffer = this.update(&mut cx, |this, cx| {
4607 this.open_buffer(
4608 ProjectPath {
4609 worktree_id,
4610 path: PathBuf::from(envelope.payload.path).into(),
4611 },
4612 cx,
4613 )
4614 });
4615
4616 let buffer = open_buffer.await?;
4617 this.update(&mut cx, |this, cx| {
4618 Ok(proto::OpenBufferResponse {
4619 buffer: Some(this.serialize_buffer_for_peer(&buffer, peer_id, cx)),
4620 })
4621 })
4622 }
4623
4624 fn serialize_project_transaction_for_peer(
4625 &mut self,
4626 project_transaction: ProjectTransaction,
4627 peer_id: PeerId,
4628 cx: &AppContext,
4629 ) -> proto::ProjectTransaction {
4630 let mut serialized_transaction = proto::ProjectTransaction {
4631 buffers: Default::default(),
4632 transactions: Default::default(),
4633 };
4634 for (buffer, transaction) in project_transaction.0 {
4635 serialized_transaction
4636 .buffers
4637 .push(self.serialize_buffer_for_peer(&buffer, peer_id, cx));
4638 serialized_transaction
4639 .transactions
4640 .push(language::proto::serialize_transaction(&transaction));
4641 }
4642 serialized_transaction
4643 }
4644
4645 fn deserialize_project_transaction(
4646 &mut self,
4647 message: proto::ProjectTransaction,
4648 push_to_history: bool,
4649 cx: &mut ModelContext<Self>,
4650 ) -> Task<Result<ProjectTransaction>> {
4651 cx.spawn(|this, mut cx| async move {
4652 let mut project_transaction = ProjectTransaction::default();
4653 for (buffer, transaction) in message.buffers.into_iter().zip(message.transactions) {
4654 let buffer = this
4655 .update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
4656 .await?;
4657 let transaction = language::proto::deserialize_transaction(transaction)?;
4658 project_transaction.0.insert(buffer, transaction);
4659 }
4660
4661 for (buffer, transaction) in &project_transaction.0 {
4662 buffer
4663 .update(&mut cx, |buffer, _| {
4664 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
4665 })
4666 .await;
4667
4668 if push_to_history {
4669 buffer.update(&mut cx, |buffer, _| {
4670 buffer.push_transaction(transaction.clone(), Instant::now());
4671 });
4672 }
4673 }
4674
4675 Ok(project_transaction)
4676 })
4677 }
4678
4679 fn serialize_buffer_for_peer(
4680 &mut self,
4681 buffer: &ModelHandle<Buffer>,
4682 peer_id: PeerId,
4683 cx: &AppContext,
4684 ) -> proto::Buffer {
4685 let buffer_id = buffer.read(cx).remote_id();
4686 let shared_buffers = self.shared_buffers.entry(peer_id).or_default();
4687 if shared_buffers.insert(buffer_id) {
4688 proto::Buffer {
4689 variant: Some(proto::buffer::Variant::State(buffer.read(cx).to_proto())),
4690 }
4691 } else {
4692 proto::Buffer {
4693 variant: Some(proto::buffer::Variant::Id(buffer_id)),
4694 }
4695 }
4696 }
4697
4698 fn deserialize_buffer(
4699 &mut self,
4700 buffer: proto::Buffer,
4701 cx: &mut ModelContext<Self>,
4702 ) -> Task<Result<ModelHandle<Buffer>>> {
4703 let replica_id = self.replica_id();
4704
4705 let opened_buffer_tx = self.opened_buffer.0.clone();
4706 let mut opened_buffer_rx = self.opened_buffer.1.clone();
4707 cx.spawn(|this, mut cx| async move {
4708 match buffer.variant.ok_or_else(|| anyhow!("missing buffer"))? {
4709 proto::buffer::Variant::Id(id) => {
4710 let buffer = loop {
4711 let buffer = this.read_with(&cx, |this, cx| {
4712 this.opened_buffers
4713 .get(&id)
4714 .and_then(|buffer| buffer.upgrade(cx))
4715 });
4716 if let Some(buffer) = buffer {
4717 break buffer;
4718 }
4719 opened_buffer_rx
4720 .next()
4721 .await
4722 .ok_or_else(|| anyhow!("project dropped while waiting for buffer"))?;
4723 };
4724 Ok(buffer)
4725 }
4726 proto::buffer::Variant::State(mut buffer) => {
4727 let mut buffer_worktree = None;
4728 let mut buffer_file = None;
4729 if let Some(file) = buffer.file.take() {
4730 this.read_with(&cx, |this, cx| {
4731 let worktree_id = WorktreeId::from_proto(file.worktree_id);
4732 let worktree =
4733 this.worktree_for_id(worktree_id, cx).ok_or_else(|| {
4734 anyhow!("no worktree found for id {}", file.worktree_id)
4735 })?;
4736 buffer_file =
4737 Some(Box::new(File::from_proto(file, worktree.clone(), cx)?)
4738 as Box<dyn language::File>);
4739 buffer_worktree = Some(worktree);
4740 Ok::<_, anyhow::Error>(())
4741 })?;
4742 }
4743
4744 let buffer = cx.add_model(|cx| {
4745 Buffer::from_proto(replica_id, buffer, buffer_file, cx).unwrap()
4746 });
4747
4748 this.update(&mut cx, |this, cx| this.register_buffer(&buffer, cx))?;
4749
4750 *opened_buffer_tx.borrow_mut().borrow_mut() = ();
4751 Ok(buffer)
4752 }
4753 }
4754 })
4755 }
4756
4757 fn deserialize_symbol(&self, serialized_symbol: proto::Symbol) -> Result<Symbol> {
4758 let source_worktree_id = WorktreeId::from_proto(serialized_symbol.source_worktree_id);
4759 let worktree_id = WorktreeId::from_proto(serialized_symbol.worktree_id);
4760 let start = serialized_symbol
4761 .start
4762 .ok_or_else(|| anyhow!("invalid start"))?;
4763 let end = serialized_symbol
4764 .end
4765 .ok_or_else(|| anyhow!("invalid end"))?;
4766 let kind = unsafe { mem::transmute(serialized_symbol.kind) };
4767 let path = PathBuf::from(serialized_symbol.path);
4768 let language = self.languages.select_language(&path);
4769 Ok(Symbol {
4770 source_worktree_id,
4771 worktree_id,
4772 language_server_name: LanguageServerName(serialized_symbol.language_server_name.into()),
4773 label: language
4774 .and_then(|language| language.label_for_symbol(&serialized_symbol.name, kind))
4775 .unwrap_or_else(|| CodeLabel::plain(serialized_symbol.name.clone(), None)),
4776 name: serialized_symbol.name,
4777 path,
4778 range: PointUtf16::new(start.row, start.column)..PointUtf16::new(end.row, end.column),
4779 kind,
4780 signature: serialized_symbol
4781 .signature
4782 .try_into()
4783 .map_err(|_| anyhow!("invalid signature"))?,
4784 })
4785 }
4786
4787 async fn handle_buffer_saved(
4788 this: ModelHandle<Self>,
4789 envelope: TypedEnvelope<proto::BufferSaved>,
4790 _: Arc<Client>,
4791 mut cx: AsyncAppContext,
4792 ) -> Result<()> {
4793 let version = deserialize_version(envelope.payload.version);
4794 let mtime = envelope
4795 .payload
4796 .mtime
4797 .ok_or_else(|| anyhow!("missing mtime"))?
4798 .into();
4799
4800 this.update(&mut cx, |this, cx| {
4801 let buffer = this
4802 .opened_buffers
4803 .get(&envelope.payload.buffer_id)
4804 .and_then(|buffer| buffer.upgrade(cx));
4805 if let Some(buffer) = buffer {
4806 buffer.update(cx, |buffer, cx| {
4807 buffer.did_save(version, mtime, None, cx);
4808 });
4809 }
4810 Ok(())
4811 })
4812 }
4813
4814 async fn handle_buffer_reloaded(
4815 this: ModelHandle<Self>,
4816 envelope: TypedEnvelope<proto::BufferReloaded>,
4817 _: Arc<Client>,
4818 mut cx: AsyncAppContext,
4819 ) -> Result<()> {
4820 let payload = envelope.payload.clone();
4821 let version = deserialize_version(payload.version);
4822 let mtime = payload
4823 .mtime
4824 .ok_or_else(|| anyhow!("missing mtime"))?
4825 .into();
4826 this.update(&mut cx, |this, cx| {
4827 let buffer = this
4828 .opened_buffers
4829 .get(&payload.buffer_id)
4830 .and_then(|buffer| buffer.upgrade(cx));
4831 if let Some(buffer) = buffer {
4832 buffer.update(cx, |buffer, cx| {
4833 buffer.did_reload(version, mtime, cx);
4834 });
4835 }
4836 Ok(())
4837 })
4838 }
4839
4840 pub fn match_paths<'a>(
4841 &self,
4842 query: &'a str,
4843 include_ignored: bool,
4844 smart_case: bool,
4845 max_results: usize,
4846 cancel_flag: &'a AtomicBool,
4847 cx: &AppContext,
4848 ) -> impl 'a + Future<Output = Vec<PathMatch>> {
4849 let worktrees = self
4850 .worktrees(cx)
4851 .filter(|worktree| worktree.read(cx).is_visible())
4852 .collect::<Vec<_>>();
4853 let include_root_name = worktrees.len() > 1;
4854 let candidate_sets = worktrees
4855 .into_iter()
4856 .map(|worktree| CandidateSet {
4857 snapshot: worktree.read(cx).snapshot(),
4858 include_ignored,
4859 include_root_name,
4860 })
4861 .collect::<Vec<_>>();
4862
4863 let background = cx.background().clone();
4864 async move {
4865 fuzzy::match_paths(
4866 candidate_sets.as_slice(),
4867 query,
4868 smart_case,
4869 max_results,
4870 cancel_flag,
4871 background,
4872 )
4873 .await
4874 }
4875 }
4876
4877 fn edits_from_lsp(
4878 &mut self,
4879 buffer: &ModelHandle<Buffer>,
4880 lsp_edits: impl 'static + Send + IntoIterator<Item = lsp::TextEdit>,
4881 version: Option<i32>,
4882 cx: &mut ModelContext<Self>,
4883 ) -> Task<Result<Vec<(Range<Anchor>, String)>>> {
4884 let snapshot = self.buffer_snapshot_for_lsp_version(buffer, version, cx);
4885 cx.background().spawn(async move {
4886 let snapshot = snapshot?;
4887 let mut lsp_edits = lsp_edits
4888 .into_iter()
4889 .map(|edit| (range_from_lsp(edit.range), edit.new_text))
4890 .peekable();
4891
4892 let mut edits = Vec::new();
4893 while let Some((mut range, mut new_text)) = lsp_edits.next() {
4894 // Combine any LSP edits that are adjacent.
4895 //
4896 // Also, combine LSP edits that are separated from each other by only
4897 // a newline. This is important because for some code actions,
4898 // Rust-analyzer rewrites the entire buffer via a series of edits that
4899 // are separated by unchanged newline characters.
4900 //
4901 // In order for the diffing logic below to work properly, any edits that
4902 // cancel each other out must be combined into one.
4903 while let Some((next_range, next_text)) = lsp_edits.peek() {
4904 if next_range.start > range.end {
4905 if next_range.start.row > range.end.row + 1
4906 || next_range.start.column > 0
4907 || snapshot.clip_point_utf16(
4908 PointUtf16::new(range.end.row, u32::MAX),
4909 Bias::Left,
4910 ) > range.end
4911 {
4912 break;
4913 }
4914 new_text.push('\n');
4915 }
4916 range.end = next_range.end;
4917 new_text.push_str(&next_text);
4918 lsp_edits.next();
4919 }
4920
4921 if snapshot.clip_point_utf16(range.start, Bias::Left) != range.start
4922 || snapshot.clip_point_utf16(range.end, Bias::Left) != range.end
4923 {
4924 return Err(anyhow!("invalid edits received from language server"));
4925 }
4926
4927 // For multiline edits, perform a diff of the old and new text so that
4928 // we can identify the changes more precisely, preserving the locations
4929 // of any anchors positioned in the unchanged regions.
4930 if range.end.row > range.start.row {
4931 let mut offset = range.start.to_offset(&snapshot);
4932 let old_text = snapshot.text_for_range(range).collect::<String>();
4933
4934 let diff = TextDiff::from_lines(old_text.as_str(), &new_text);
4935 let mut moved_since_edit = true;
4936 for change in diff.iter_all_changes() {
4937 let tag = change.tag();
4938 let value = change.value();
4939 match tag {
4940 ChangeTag::Equal => {
4941 offset += value.len();
4942 moved_since_edit = true;
4943 }
4944 ChangeTag::Delete => {
4945 let start = snapshot.anchor_after(offset);
4946 let end = snapshot.anchor_before(offset + value.len());
4947 if moved_since_edit {
4948 edits.push((start..end, String::new()));
4949 } else {
4950 edits.last_mut().unwrap().0.end = end;
4951 }
4952 offset += value.len();
4953 moved_since_edit = false;
4954 }
4955 ChangeTag::Insert => {
4956 if moved_since_edit {
4957 let anchor = snapshot.anchor_after(offset);
4958 edits.push((anchor.clone()..anchor, value.to_string()));
4959 } else {
4960 edits.last_mut().unwrap().1.push_str(value);
4961 }
4962 moved_since_edit = false;
4963 }
4964 }
4965 }
4966 } else if range.end == range.start {
4967 let anchor = snapshot.anchor_after(range.start);
4968 edits.push((anchor.clone()..anchor, new_text));
4969 } else {
4970 let edit_start = snapshot.anchor_after(range.start);
4971 let edit_end = snapshot.anchor_before(range.end);
4972 edits.push((edit_start..edit_end, new_text));
4973 }
4974 }
4975
4976 Ok(edits)
4977 })
4978 }
4979
4980 fn buffer_snapshot_for_lsp_version(
4981 &mut self,
4982 buffer: &ModelHandle<Buffer>,
4983 version: Option<i32>,
4984 cx: &AppContext,
4985 ) -> Result<TextBufferSnapshot> {
4986 const OLD_VERSIONS_TO_RETAIN: i32 = 10;
4987
4988 if let Some(version) = version {
4989 let buffer_id = buffer.read(cx).remote_id();
4990 let snapshots = self
4991 .buffer_snapshots
4992 .get_mut(&buffer_id)
4993 .ok_or_else(|| anyhow!("no snapshot found for buffer {}", buffer_id))?;
4994 let mut found_snapshot = None;
4995 snapshots.retain(|(snapshot_version, snapshot)| {
4996 if snapshot_version + OLD_VERSIONS_TO_RETAIN < version {
4997 false
4998 } else {
4999 if *snapshot_version == version {
5000 found_snapshot = Some(snapshot.clone());
5001 }
5002 true
5003 }
5004 });
5005
5006 found_snapshot.ok_or_else(|| {
5007 anyhow!(
5008 "snapshot not found for buffer {} at version {}",
5009 buffer_id,
5010 version
5011 )
5012 })
5013 } else {
5014 Ok((buffer.read(cx)).text_snapshot())
5015 }
5016 }
5017
5018 fn language_server_for_buffer(
5019 &self,
5020 buffer: &Buffer,
5021 cx: &AppContext,
5022 ) -> Option<&(Arc<dyn LspAdapter>, Arc<LanguageServer>)> {
5023 if let Some((file, language)) = File::from_dyn(buffer.file()).zip(buffer.language()) {
5024 let worktree_id = file.worktree_id(cx);
5025 self.language_servers
5026 .get(&(worktree_id, language.lsp_adapter()?.name()))
5027 } else {
5028 None
5029 }
5030 }
5031}
5032
5033impl WorktreeHandle {
5034 pub fn upgrade(&self, cx: &AppContext) -> Option<ModelHandle<Worktree>> {
5035 match self {
5036 WorktreeHandle::Strong(handle) => Some(handle.clone()),
5037 WorktreeHandle::Weak(handle) => handle.upgrade(cx),
5038 }
5039 }
5040}
5041
5042impl OpenBuffer {
5043 pub fn upgrade(&self, cx: &impl UpgradeModelHandle) -> Option<ModelHandle<Buffer>> {
5044 match self {
5045 OpenBuffer::Strong(handle) => Some(handle.clone()),
5046 OpenBuffer::Weak(handle) => handle.upgrade(cx),
5047 OpenBuffer::Loading(_) => None,
5048 }
5049 }
5050}
5051
5052struct CandidateSet {
5053 snapshot: Snapshot,
5054 include_ignored: bool,
5055 include_root_name: bool,
5056}
5057
5058impl<'a> PathMatchCandidateSet<'a> for CandidateSet {
5059 type Candidates = CandidateSetIter<'a>;
5060
5061 fn id(&self) -> usize {
5062 self.snapshot.id().to_usize()
5063 }
5064
5065 fn len(&self) -> usize {
5066 if self.include_ignored {
5067 self.snapshot.file_count()
5068 } else {
5069 self.snapshot.visible_file_count()
5070 }
5071 }
5072
5073 fn prefix(&self) -> Arc<str> {
5074 if self.snapshot.root_entry().map_or(false, |e| e.is_file()) {
5075 self.snapshot.root_name().into()
5076 } else if self.include_root_name {
5077 format!("{}/", self.snapshot.root_name()).into()
5078 } else {
5079 "".into()
5080 }
5081 }
5082
5083 fn candidates(&'a self, start: usize) -> Self::Candidates {
5084 CandidateSetIter {
5085 traversal: self.snapshot.files(self.include_ignored, start),
5086 }
5087 }
5088}
5089
5090struct CandidateSetIter<'a> {
5091 traversal: Traversal<'a>,
5092}
5093
5094impl<'a> Iterator for CandidateSetIter<'a> {
5095 type Item = PathMatchCandidate<'a>;
5096
5097 fn next(&mut self) -> Option<Self::Item> {
5098 self.traversal.next().map(|entry| {
5099 if let EntryKind::File(char_bag) = entry.kind {
5100 PathMatchCandidate {
5101 path: &entry.path,
5102 char_bag,
5103 }
5104 } else {
5105 unreachable!()
5106 }
5107 })
5108 }
5109}
5110
5111impl Entity for Project {
5112 type Event = Event;
5113
5114 fn release(&mut self, _: &mut gpui::MutableAppContext) {
5115 match &self.client_state {
5116 ProjectClientState::Local { remote_id_rx, .. } => {
5117 if let Some(project_id) = *remote_id_rx.borrow() {
5118 self.client
5119 .send(proto::UnregisterProject { project_id })
5120 .log_err();
5121 }
5122 }
5123 ProjectClientState::Remote { remote_id, .. } => {
5124 self.client
5125 .send(proto::LeaveProject {
5126 project_id: *remote_id,
5127 })
5128 .log_err();
5129 }
5130 }
5131 }
5132
5133 fn app_will_quit(
5134 &mut self,
5135 _: &mut MutableAppContext,
5136 ) -> Option<std::pin::Pin<Box<dyn 'static + Future<Output = ()>>>> {
5137 let shutdown_futures = self
5138 .language_servers
5139 .drain()
5140 .filter_map(|(_, (_, server))| server.shutdown())
5141 .collect::<Vec<_>>();
5142 Some(
5143 async move {
5144 futures::future::join_all(shutdown_futures).await;
5145 }
5146 .boxed(),
5147 )
5148 }
5149}
5150
5151impl Collaborator {
5152 fn from_proto(
5153 message: proto::Collaborator,
5154 user_store: &ModelHandle<UserStore>,
5155 cx: &mut AsyncAppContext,
5156 ) -> impl Future<Output = Result<Self>> {
5157 let user = user_store.update(cx, |user_store, cx| {
5158 user_store.fetch_user(message.user_id, cx)
5159 });
5160
5161 async move {
5162 Ok(Self {
5163 peer_id: PeerId(message.peer_id),
5164 user: user.await?,
5165 replica_id: message.replica_id as ReplicaId,
5166 })
5167 }
5168 }
5169}
5170
5171impl<P: AsRef<Path>> From<(WorktreeId, P)> for ProjectPath {
5172 fn from((worktree_id, path): (WorktreeId, P)) -> Self {
5173 Self {
5174 worktree_id,
5175 path: path.as_ref().into(),
5176 }
5177 }
5178}
5179
5180impl From<lsp::CreateFileOptions> for fs::CreateOptions {
5181 fn from(options: lsp::CreateFileOptions) -> Self {
5182 Self {
5183 overwrite: options.overwrite.unwrap_or(false),
5184 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
5185 }
5186 }
5187}
5188
5189impl From<lsp::RenameFileOptions> for fs::RenameOptions {
5190 fn from(options: lsp::RenameFileOptions) -> Self {
5191 Self {
5192 overwrite: options.overwrite.unwrap_or(false),
5193 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
5194 }
5195 }
5196}
5197
5198impl From<lsp::DeleteFileOptions> for fs::RemoveOptions {
5199 fn from(options: lsp::DeleteFileOptions) -> Self {
5200 Self {
5201 recursive: options.recursive.unwrap_or(false),
5202 ignore_if_not_exists: options.ignore_if_not_exists.unwrap_or(false),
5203 }
5204 }
5205}
5206
5207fn serialize_symbol(symbol: &Symbol) -> proto::Symbol {
5208 proto::Symbol {
5209 source_worktree_id: symbol.source_worktree_id.to_proto(),
5210 worktree_id: symbol.worktree_id.to_proto(),
5211 language_server_name: symbol.language_server_name.0.to_string(),
5212 name: symbol.name.clone(),
5213 kind: unsafe { mem::transmute(symbol.kind) },
5214 path: symbol.path.to_string_lossy().to_string(),
5215 start: Some(proto::Point {
5216 row: symbol.range.start.row,
5217 column: symbol.range.start.column,
5218 }),
5219 end: Some(proto::Point {
5220 row: symbol.range.end.row,
5221 column: symbol.range.end.column,
5222 }),
5223 signature: symbol.signature.to_vec(),
5224 }
5225}
5226
5227fn relativize_path(base: &Path, path: &Path) -> PathBuf {
5228 let mut path_components = path.components();
5229 let mut base_components = base.components();
5230 let mut components: Vec<Component> = Vec::new();
5231 loop {
5232 match (path_components.next(), base_components.next()) {
5233 (None, None) => break,
5234 (Some(a), None) => {
5235 components.push(a);
5236 components.extend(path_components.by_ref());
5237 break;
5238 }
5239 (None, _) => components.push(Component::ParentDir),
5240 (Some(a), Some(b)) if components.is_empty() && a == b => (),
5241 (Some(a), Some(b)) if b == Component::CurDir => components.push(a),
5242 (Some(a), Some(_)) => {
5243 components.push(Component::ParentDir);
5244 for _ in base_components {
5245 components.push(Component::ParentDir);
5246 }
5247 components.push(a);
5248 components.extend(path_components.by_ref());
5249 break;
5250 }
5251 }
5252 }
5253 components.iter().map(|c| c.as_os_str()).collect()
5254}
5255
5256impl Item for Buffer {
5257 fn entry_id(&self, cx: &AppContext) -> Option<ProjectEntryId> {
5258 File::from_dyn(self.file()).and_then(|file| file.project_entry_id(cx))
5259 }
5260}
5261
5262#[cfg(test)]
5263mod tests {
5264 use crate::worktree::WorktreeHandle;
5265
5266 use super::{Event, *};
5267 use fs::RealFs;
5268 use futures::{future, StreamExt};
5269 use gpui::test::subscribe;
5270 use language::{
5271 tree_sitter_rust, tree_sitter_typescript, Diagnostic, FakeLspAdapter, LanguageConfig,
5272 OffsetRangeExt, Point, ToPoint,
5273 };
5274 use lsp::Url;
5275 use serde_json::json;
5276 use std::{cell::RefCell, os::unix, path::PathBuf, rc::Rc, task::Poll};
5277 use unindent::Unindent as _;
5278 use util::{assert_set_eq, test::temp_tree};
5279
5280 #[gpui::test]
5281 async fn test_populate_and_search(cx: &mut gpui::TestAppContext) {
5282 let dir = temp_tree(json!({
5283 "root": {
5284 "apple": "",
5285 "banana": {
5286 "carrot": {
5287 "date": "",
5288 "endive": "",
5289 }
5290 },
5291 "fennel": {
5292 "grape": "",
5293 }
5294 }
5295 }));
5296
5297 let root_link_path = dir.path().join("root_link");
5298 unix::fs::symlink(&dir.path().join("root"), &root_link_path).unwrap();
5299 unix::fs::symlink(
5300 &dir.path().join("root/fennel"),
5301 &dir.path().join("root/finnochio"),
5302 )
5303 .unwrap();
5304
5305 let project = Project::test(Arc::new(RealFs), [root_link_path.as_ref()], cx).await;
5306
5307 project.read_with(cx, |project, cx| {
5308 let tree = project.worktrees(cx).next().unwrap().read(cx);
5309 assert_eq!(tree.file_count(), 5);
5310 assert_eq!(
5311 tree.inode_for_path("fennel/grape"),
5312 tree.inode_for_path("finnochio/grape")
5313 );
5314 });
5315
5316 let cancel_flag = Default::default();
5317 let results = project
5318 .read_with(cx, |project, cx| {
5319 project.match_paths("bna", false, false, 10, &cancel_flag, cx)
5320 })
5321 .await;
5322 assert_eq!(
5323 results
5324 .into_iter()
5325 .map(|result| result.path)
5326 .collect::<Vec<Arc<Path>>>(),
5327 vec![
5328 PathBuf::from("banana/carrot/date").into(),
5329 PathBuf::from("banana/carrot/endive").into(),
5330 ]
5331 );
5332 }
5333
5334 #[gpui::test]
5335 async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
5336 cx.foreground().forbid_parking();
5337
5338 let mut rust_language = Language::new(
5339 LanguageConfig {
5340 name: "Rust".into(),
5341 path_suffixes: vec!["rs".to_string()],
5342 ..Default::default()
5343 },
5344 Some(tree_sitter_rust::language()),
5345 );
5346 let mut json_language = Language::new(
5347 LanguageConfig {
5348 name: "JSON".into(),
5349 path_suffixes: vec!["json".to_string()],
5350 ..Default::default()
5351 },
5352 None,
5353 );
5354 let mut fake_rust_servers = rust_language.set_fake_lsp_adapter(FakeLspAdapter {
5355 name: "the-rust-language-server",
5356 capabilities: lsp::ServerCapabilities {
5357 completion_provider: Some(lsp::CompletionOptions {
5358 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
5359 ..Default::default()
5360 }),
5361 ..Default::default()
5362 },
5363 ..Default::default()
5364 });
5365 let mut fake_json_servers = json_language.set_fake_lsp_adapter(FakeLspAdapter {
5366 name: "the-json-language-server",
5367 capabilities: lsp::ServerCapabilities {
5368 completion_provider: Some(lsp::CompletionOptions {
5369 trigger_characters: Some(vec![":".to_string()]),
5370 ..Default::default()
5371 }),
5372 ..Default::default()
5373 },
5374 ..Default::default()
5375 });
5376
5377 let fs = FakeFs::new(cx.background());
5378 fs.insert_tree(
5379 "/the-root",
5380 json!({
5381 "test.rs": "const A: i32 = 1;",
5382 "test2.rs": "",
5383 "Cargo.toml": "a = 1",
5384 "package.json": "{\"a\": 1}",
5385 }),
5386 )
5387 .await;
5388
5389 let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
5390 project.update(cx, |project, _| {
5391 project.languages.add(Arc::new(rust_language));
5392 project.languages.add(Arc::new(json_language));
5393 });
5394
5395 // Open a buffer without an associated language server.
5396 let toml_buffer = project
5397 .update(cx, |project, cx| {
5398 project.open_local_buffer("/the-root/Cargo.toml", cx)
5399 })
5400 .await
5401 .unwrap();
5402
5403 // Open a buffer with an associated language server.
5404 let rust_buffer = project
5405 .update(cx, |project, cx| {
5406 project.open_local_buffer("/the-root/test.rs", cx)
5407 })
5408 .await
5409 .unwrap();
5410
5411 // A server is started up, and it is notified about Rust files.
5412 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
5413 assert_eq!(
5414 fake_rust_server
5415 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5416 .await
5417 .text_document,
5418 lsp::TextDocumentItem {
5419 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
5420 version: 0,
5421 text: "const A: i32 = 1;".to_string(),
5422 language_id: Default::default()
5423 }
5424 );
5425
5426 // The buffer is configured based on the language server's capabilities.
5427 rust_buffer.read_with(cx, |buffer, _| {
5428 assert_eq!(
5429 buffer.completion_triggers(),
5430 &[".".to_string(), "::".to_string()]
5431 );
5432 });
5433 toml_buffer.read_with(cx, |buffer, _| {
5434 assert!(buffer.completion_triggers().is_empty());
5435 });
5436
5437 // Edit a buffer. The changes are reported to the language server.
5438 rust_buffer.update(cx, |buffer, cx| buffer.edit([(16..16, "2")], cx));
5439 assert_eq!(
5440 fake_rust_server
5441 .receive_notification::<lsp::notification::DidChangeTextDocument>()
5442 .await
5443 .text_document,
5444 lsp::VersionedTextDocumentIdentifier::new(
5445 lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
5446 1
5447 )
5448 );
5449
5450 // Open a third buffer with a different associated language server.
5451 let json_buffer = project
5452 .update(cx, |project, cx| {
5453 project.open_local_buffer("/the-root/package.json", cx)
5454 })
5455 .await
5456 .unwrap();
5457
5458 // A json language server is started up and is only notified about the json buffer.
5459 let mut fake_json_server = fake_json_servers.next().await.unwrap();
5460 assert_eq!(
5461 fake_json_server
5462 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5463 .await
5464 .text_document,
5465 lsp::TextDocumentItem {
5466 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
5467 version: 0,
5468 text: "{\"a\": 1}".to_string(),
5469 language_id: Default::default()
5470 }
5471 );
5472
5473 // This buffer is configured based on the second language server's
5474 // capabilities.
5475 json_buffer.read_with(cx, |buffer, _| {
5476 assert_eq!(buffer.completion_triggers(), &[":".to_string()]);
5477 });
5478
5479 // When opening another buffer whose language server is already running,
5480 // it is also configured based on the existing language server's capabilities.
5481 let rust_buffer2 = project
5482 .update(cx, |project, cx| {
5483 project.open_local_buffer("/the-root/test2.rs", cx)
5484 })
5485 .await
5486 .unwrap();
5487 rust_buffer2.read_with(cx, |buffer, _| {
5488 assert_eq!(
5489 buffer.completion_triggers(),
5490 &[".".to_string(), "::".to_string()]
5491 );
5492 });
5493
5494 // Changes are reported only to servers matching the buffer's language.
5495 toml_buffer.update(cx, |buffer, cx| buffer.edit([(5..5, "23")], cx));
5496 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "let x = 1;")], cx));
5497 assert_eq!(
5498 fake_rust_server
5499 .receive_notification::<lsp::notification::DidChangeTextDocument>()
5500 .await
5501 .text_document,
5502 lsp::VersionedTextDocumentIdentifier::new(
5503 lsp::Url::from_file_path("/the-root/test2.rs").unwrap(),
5504 1
5505 )
5506 );
5507
5508 // Save notifications are reported to all servers.
5509 toml_buffer
5510 .update(cx, |buffer, cx| buffer.save(cx))
5511 .await
5512 .unwrap();
5513 assert_eq!(
5514 fake_rust_server
5515 .receive_notification::<lsp::notification::DidSaveTextDocument>()
5516 .await
5517 .text_document,
5518 lsp::TextDocumentIdentifier::new(
5519 lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap()
5520 )
5521 );
5522 assert_eq!(
5523 fake_json_server
5524 .receive_notification::<lsp::notification::DidSaveTextDocument>()
5525 .await
5526 .text_document,
5527 lsp::TextDocumentIdentifier::new(
5528 lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap()
5529 )
5530 );
5531
5532 // Renames are reported only to servers matching the buffer's language.
5533 fs.rename(
5534 Path::new("/the-root/test2.rs"),
5535 Path::new("/the-root/test3.rs"),
5536 Default::default(),
5537 )
5538 .await
5539 .unwrap();
5540 assert_eq!(
5541 fake_rust_server
5542 .receive_notification::<lsp::notification::DidCloseTextDocument>()
5543 .await
5544 .text_document,
5545 lsp::TextDocumentIdentifier::new(
5546 lsp::Url::from_file_path("/the-root/test2.rs").unwrap()
5547 ),
5548 );
5549 assert_eq!(
5550 fake_rust_server
5551 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5552 .await
5553 .text_document,
5554 lsp::TextDocumentItem {
5555 uri: lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),
5556 version: 0,
5557 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
5558 language_id: Default::default()
5559 },
5560 );
5561
5562 rust_buffer2.update(cx, |buffer, cx| {
5563 buffer.update_diagnostics(
5564 DiagnosticSet::from_sorted_entries(
5565 vec![DiagnosticEntry {
5566 diagnostic: Default::default(),
5567 range: Anchor::MIN..Anchor::MAX,
5568 }],
5569 &buffer.snapshot(),
5570 ),
5571 cx,
5572 );
5573 assert_eq!(
5574 buffer
5575 .snapshot()
5576 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
5577 .count(),
5578 1
5579 );
5580 });
5581
5582 // When the rename changes the extension of the file, the buffer gets closed on the old
5583 // language server and gets opened on the new one.
5584 fs.rename(
5585 Path::new("/the-root/test3.rs"),
5586 Path::new("/the-root/test3.json"),
5587 Default::default(),
5588 )
5589 .await
5590 .unwrap();
5591 assert_eq!(
5592 fake_rust_server
5593 .receive_notification::<lsp::notification::DidCloseTextDocument>()
5594 .await
5595 .text_document,
5596 lsp::TextDocumentIdentifier::new(
5597 lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),
5598 ),
5599 );
5600 assert_eq!(
5601 fake_json_server
5602 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5603 .await
5604 .text_document,
5605 lsp::TextDocumentItem {
5606 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
5607 version: 0,
5608 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
5609 language_id: Default::default()
5610 },
5611 );
5612
5613 // We clear the diagnostics, since the language has changed.
5614 rust_buffer2.read_with(cx, |buffer, _| {
5615 assert_eq!(
5616 buffer
5617 .snapshot()
5618 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
5619 .count(),
5620 0
5621 );
5622 });
5623
5624 // The renamed file's version resets after changing language server.
5625 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "// ")], cx));
5626 assert_eq!(
5627 fake_json_server
5628 .receive_notification::<lsp::notification::DidChangeTextDocument>()
5629 .await
5630 .text_document,
5631 lsp::VersionedTextDocumentIdentifier::new(
5632 lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
5633 1
5634 )
5635 );
5636
5637 // Restart language servers
5638 project.update(cx, |project, cx| {
5639 project.restart_language_servers_for_buffers(
5640 vec![rust_buffer.clone(), json_buffer.clone()],
5641 cx,
5642 );
5643 });
5644
5645 let mut rust_shutdown_requests = fake_rust_server
5646 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
5647 let mut json_shutdown_requests = fake_json_server
5648 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
5649 futures::join!(rust_shutdown_requests.next(), json_shutdown_requests.next());
5650
5651 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
5652 let mut fake_json_server = fake_json_servers.next().await.unwrap();
5653
5654 // Ensure rust document is reopened in new rust language server
5655 assert_eq!(
5656 fake_rust_server
5657 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5658 .await
5659 .text_document,
5660 lsp::TextDocumentItem {
5661 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
5662 version: 1,
5663 text: rust_buffer.read_with(cx, |buffer, _| buffer.text()),
5664 language_id: Default::default()
5665 }
5666 );
5667
5668 // Ensure json documents are reopened in new json language server
5669 assert_set_eq!(
5670 [
5671 fake_json_server
5672 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5673 .await
5674 .text_document,
5675 fake_json_server
5676 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5677 .await
5678 .text_document,
5679 ],
5680 [
5681 lsp::TextDocumentItem {
5682 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
5683 version: 0,
5684 text: json_buffer.read_with(cx, |buffer, _| buffer.text()),
5685 language_id: Default::default()
5686 },
5687 lsp::TextDocumentItem {
5688 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
5689 version: 1,
5690 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
5691 language_id: Default::default()
5692 }
5693 ]
5694 );
5695
5696 // Close notifications are reported only to servers matching the buffer's language.
5697 cx.update(|_| drop(json_buffer));
5698 let close_message = lsp::DidCloseTextDocumentParams {
5699 text_document: lsp::TextDocumentIdentifier::new(
5700 lsp::Url::from_file_path("/the-root/package.json").unwrap(),
5701 ),
5702 };
5703 assert_eq!(
5704 fake_json_server
5705 .receive_notification::<lsp::notification::DidCloseTextDocument>()
5706 .await,
5707 close_message,
5708 );
5709 }
5710
5711 #[gpui::test]
5712 async fn test_single_file_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
5713 cx.foreground().forbid_parking();
5714
5715 let fs = FakeFs::new(cx.background());
5716 fs.insert_tree(
5717 "/dir",
5718 json!({
5719 "a.rs": "let a = 1;",
5720 "b.rs": "let b = 2;"
5721 }),
5722 )
5723 .await;
5724
5725 let project = Project::test(fs, ["/dir/a.rs".as_ref(), "/dir/b.rs".as_ref()], cx).await;
5726
5727 let buffer_a = project
5728 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
5729 .await
5730 .unwrap();
5731 let buffer_b = project
5732 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
5733 .await
5734 .unwrap();
5735
5736 project.update(cx, |project, cx| {
5737 project
5738 .update_diagnostics(
5739 lsp::PublishDiagnosticsParams {
5740 uri: Url::from_file_path("/dir/a.rs").unwrap(),
5741 version: None,
5742 diagnostics: vec![lsp::Diagnostic {
5743 range: lsp::Range::new(
5744 lsp::Position::new(0, 4),
5745 lsp::Position::new(0, 5),
5746 ),
5747 severity: Some(lsp::DiagnosticSeverity::ERROR),
5748 message: "error 1".to_string(),
5749 ..Default::default()
5750 }],
5751 },
5752 &[],
5753 cx,
5754 )
5755 .unwrap();
5756 project
5757 .update_diagnostics(
5758 lsp::PublishDiagnosticsParams {
5759 uri: Url::from_file_path("/dir/b.rs").unwrap(),
5760 version: None,
5761 diagnostics: vec![lsp::Diagnostic {
5762 range: lsp::Range::new(
5763 lsp::Position::new(0, 4),
5764 lsp::Position::new(0, 5),
5765 ),
5766 severity: Some(lsp::DiagnosticSeverity::WARNING),
5767 message: "error 2".to_string(),
5768 ..Default::default()
5769 }],
5770 },
5771 &[],
5772 cx,
5773 )
5774 .unwrap();
5775 });
5776
5777 buffer_a.read_with(cx, |buffer, _| {
5778 let chunks = chunks_with_diagnostics(&buffer, 0..buffer.len());
5779 assert_eq!(
5780 chunks
5781 .iter()
5782 .map(|(s, d)| (s.as_str(), *d))
5783 .collect::<Vec<_>>(),
5784 &[
5785 ("let ", None),
5786 ("a", Some(DiagnosticSeverity::ERROR)),
5787 (" = 1;", None),
5788 ]
5789 );
5790 });
5791 buffer_b.read_with(cx, |buffer, _| {
5792 let chunks = chunks_with_diagnostics(&buffer, 0..buffer.len());
5793 assert_eq!(
5794 chunks
5795 .iter()
5796 .map(|(s, d)| (s.as_str(), *d))
5797 .collect::<Vec<_>>(),
5798 &[
5799 ("let ", None),
5800 ("b", Some(DiagnosticSeverity::WARNING)),
5801 (" = 2;", None),
5802 ]
5803 );
5804 });
5805 }
5806
5807 #[gpui::test]
5808 async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
5809 cx.foreground().forbid_parking();
5810
5811 let progress_token = "the-progress-token";
5812 let mut language = Language::new(
5813 LanguageConfig {
5814 name: "Rust".into(),
5815 path_suffixes: vec!["rs".to_string()],
5816 ..Default::default()
5817 },
5818 Some(tree_sitter_rust::language()),
5819 );
5820 let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
5821 disk_based_diagnostics_progress_token: Some(progress_token),
5822 disk_based_diagnostics_sources: &["disk"],
5823 ..Default::default()
5824 });
5825
5826 let fs = FakeFs::new(cx.background());
5827 fs.insert_tree(
5828 "/dir",
5829 json!({
5830 "a.rs": "fn a() { A }",
5831 "b.rs": "const y: i32 = 1",
5832 }),
5833 )
5834 .await;
5835
5836 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
5837 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
5838 let worktree_id =
5839 project.read_with(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
5840
5841 // Cause worktree to start the fake language server
5842 let _buffer = project
5843 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
5844 .await
5845 .unwrap();
5846
5847 let mut events = subscribe(&project, cx);
5848
5849 let mut fake_server = fake_servers.next().await.unwrap();
5850 fake_server.start_progress(progress_token).await;
5851 assert_eq!(
5852 events.next().await.unwrap(),
5853 Event::DiskBasedDiagnosticsStarted
5854 );
5855
5856 fake_server.start_progress(progress_token).await;
5857 fake_server.end_progress(progress_token).await;
5858 fake_server.start_progress(progress_token).await;
5859
5860 fake_server.notify::<lsp::notification::PublishDiagnostics>(
5861 lsp::PublishDiagnosticsParams {
5862 uri: Url::from_file_path("/dir/a.rs").unwrap(),
5863 version: None,
5864 diagnostics: vec![lsp::Diagnostic {
5865 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
5866 severity: Some(lsp::DiagnosticSeverity::ERROR),
5867 message: "undefined variable 'A'".to_string(),
5868 ..Default::default()
5869 }],
5870 },
5871 );
5872 assert_eq!(
5873 events.next().await.unwrap(),
5874 Event::DiagnosticsUpdated((worktree_id, Path::new("a.rs")).into())
5875 );
5876
5877 fake_server.end_progress(progress_token).await;
5878 fake_server.end_progress(progress_token).await;
5879 assert_eq!(
5880 events.next().await.unwrap(),
5881 Event::DiskBasedDiagnosticsUpdated
5882 );
5883 assert_eq!(
5884 events.next().await.unwrap(),
5885 Event::DiskBasedDiagnosticsFinished
5886 );
5887
5888 let buffer = project
5889 .update(cx, |p, cx| p.open_local_buffer("/dir/a.rs", cx))
5890 .await
5891 .unwrap();
5892
5893 buffer.read_with(cx, |buffer, _| {
5894 let snapshot = buffer.snapshot();
5895 let diagnostics = snapshot
5896 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
5897 .collect::<Vec<_>>();
5898 assert_eq!(
5899 diagnostics,
5900 &[DiagnosticEntry {
5901 range: Point::new(0, 9)..Point::new(0, 10),
5902 diagnostic: Diagnostic {
5903 severity: lsp::DiagnosticSeverity::ERROR,
5904 message: "undefined variable 'A'".to_string(),
5905 group_id: 0,
5906 is_primary: true,
5907 ..Default::default()
5908 }
5909 }]
5910 )
5911 });
5912
5913 // Ensure publishing empty diagnostics twice only results in one update event.
5914 fake_server.notify::<lsp::notification::PublishDiagnostics>(
5915 lsp::PublishDiagnosticsParams {
5916 uri: Url::from_file_path("/dir/a.rs").unwrap(),
5917 version: None,
5918 diagnostics: Default::default(),
5919 },
5920 );
5921 assert_eq!(
5922 events.next().await.unwrap(),
5923 Event::DiagnosticsUpdated((worktree_id, Path::new("a.rs")).into())
5924 );
5925
5926 fake_server.notify::<lsp::notification::PublishDiagnostics>(
5927 lsp::PublishDiagnosticsParams {
5928 uri: Url::from_file_path("/dir/a.rs").unwrap(),
5929 version: None,
5930 diagnostics: Default::default(),
5931 },
5932 );
5933 cx.foreground().run_until_parked();
5934 assert_eq!(futures::poll!(events.next()), Poll::Pending);
5935 }
5936
5937 #[gpui::test]
5938 async fn test_restarting_server_with_diagnostics_running(cx: &mut gpui::TestAppContext) {
5939 cx.foreground().forbid_parking();
5940
5941 let progress_token = "the-progress-token";
5942 let mut language = Language::new(
5943 LanguageConfig {
5944 path_suffixes: vec!["rs".to_string()],
5945 ..Default::default()
5946 },
5947 None,
5948 );
5949 let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
5950 disk_based_diagnostics_sources: &["disk"],
5951 disk_based_diagnostics_progress_token: Some(progress_token),
5952 ..Default::default()
5953 });
5954
5955 let fs = FakeFs::new(cx.background());
5956 fs.insert_tree("/dir", json!({ "a.rs": "" })).await;
5957
5958 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
5959 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
5960
5961 let buffer = project
5962 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
5963 .await
5964 .unwrap();
5965
5966 // Simulate diagnostics starting to update.
5967 let mut fake_server = fake_servers.next().await.unwrap();
5968 fake_server.start_progress(progress_token).await;
5969
5970 // Restart the server before the diagnostics finish updating.
5971 project.update(cx, |project, cx| {
5972 project.restart_language_servers_for_buffers([buffer], cx);
5973 });
5974 let mut events = subscribe(&project, cx);
5975
5976 // Simulate the newly started server sending more diagnostics.
5977 let mut fake_server = fake_servers.next().await.unwrap();
5978 fake_server.start_progress(progress_token).await;
5979 assert_eq!(
5980 events.next().await.unwrap(),
5981 Event::DiskBasedDiagnosticsStarted
5982 );
5983
5984 // All diagnostics are considered done, despite the old server's diagnostic
5985 // task never completing.
5986 fake_server.end_progress(progress_token).await;
5987 assert_eq!(
5988 events.next().await.unwrap(),
5989 Event::DiskBasedDiagnosticsUpdated
5990 );
5991 assert_eq!(
5992 events.next().await.unwrap(),
5993 Event::DiskBasedDiagnosticsFinished
5994 );
5995 project.read_with(cx, |project, _| {
5996 assert!(!project.is_running_disk_based_diagnostics());
5997 });
5998 }
5999
6000 #[gpui::test]
6001 async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
6002 cx.foreground().forbid_parking();
6003
6004 let mut language = Language::new(
6005 LanguageConfig {
6006 name: "Rust".into(),
6007 path_suffixes: vec!["rs".to_string()],
6008 ..Default::default()
6009 },
6010 Some(tree_sitter_rust::language()),
6011 );
6012 let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
6013 disk_based_diagnostics_sources: &["disk"],
6014 ..Default::default()
6015 });
6016
6017 let text = "
6018 fn a() { A }
6019 fn b() { BB }
6020 fn c() { CCC }
6021 "
6022 .unindent();
6023
6024 let fs = FakeFs::new(cx.background());
6025 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
6026
6027 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
6028 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
6029
6030 let buffer = project
6031 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
6032 .await
6033 .unwrap();
6034
6035 let mut fake_server = fake_servers.next().await.unwrap();
6036 let open_notification = fake_server
6037 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6038 .await;
6039
6040 // Edit the buffer, moving the content down
6041 buffer.update(cx, |buffer, cx| buffer.edit([(0..0, "\n\n")], cx));
6042 let change_notification_1 = fake_server
6043 .receive_notification::<lsp::notification::DidChangeTextDocument>()
6044 .await;
6045 assert!(
6046 change_notification_1.text_document.version > open_notification.text_document.version
6047 );
6048
6049 // Report some diagnostics for the initial version of the buffer
6050 fake_server.notify::<lsp::notification::PublishDiagnostics>(
6051 lsp::PublishDiagnosticsParams {
6052 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
6053 version: Some(open_notification.text_document.version),
6054 diagnostics: vec![
6055 lsp::Diagnostic {
6056 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
6057 severity: Some(DiagnosticSeverity::ERROR),
6058 message: "undefined variable 'A'".to_string(),
6059 source: Some("disk".to_string()),
6060 ..Default::default()
6061 },
6062 lsp::Diagnostic {
6063 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
6064 severity: Some(DiagnosticSeverity::ERROR),
6065 message: "undefined variable 'BB'".to_string(),
6066 source: Some("disk".to_string()),
6067 ..Default::default()
6068 },
6069 lsp::Diagnostic {
6070 range: lsp::Range::new(lsp::Position::new(2, 9), lsp::Position::new(2, 12)),
6071 severity: Some(DiagnosticSeverity::ERROR),
6072 source: Some("disk".to_string()),
6073 message: "undefined variable 'CCC'".to_string(),
6074 ..Default::default()
6075 },
6076 ],
6077 },
6078 );
6079
6080 // The diagnostics have moved down since they were created.
6081 buffer.next_notification(cx).await;
6082 buffer.read_with(cx, |buffer, _| {
6083 assert_eq!(
6084 buffer
6085 .snapshot()
6086 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0), false)
6087 .collect::<Vec<_>>(),
6088 &[
6089 DiagnosticEntry {
6090 range: Point::new(3, 9)..Point::new(3, 11),
6091 diagnostic: Diagnostic {
6092 severity: DiagnosticSeverity::ERROR,
6093 message: "undefined variable 'BB'".to_string(),
6094 is_disk_based: true,
6095 group_id: 1,
6096 is_primary: true,
6097 ..Default::default()
6098 },
6099 },
6100 DiagnosticEntry {
6101 range: Point::new(4, 9)..Point::new(4, 12),
6102 diagnostic: Diagnostic {
6103 severity: DiagnosticSeverity::ERROR,
6104 message: "undefined variable 'CCC'".to_string(),
6105 is_disk_based: true,
6106 group_id: 2,
6107 is_primary: true,
6108 ..Default::default()
6109 }
6110 }
6111 ]
6112 );
6113 assert_eq!(
6114 chunks_with_diagnostics(buffer, 0..buffer.len()),
6115 [
6116 ("\n\nfn a() { ".to_string(), None),
6117 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
6118 (" }\nfn b() { ".to_string(), None),
6119 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
6120 (" }\nfn c() { ".to_string(), None),
6121 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
6122 (" }\n".to_string(), None),
6123 ]
6124 );
6125 assert_eq!(
6126 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
6127 [
6128 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
6129 (" }\nfn c() { ".to_string(), None),
6130 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
6131 ]
6132 );
6133 });
6134
6135 // Ensure overlapping diagnostics are highlighted correctly.
6136 fake_server.notify::<lsp::notification::PublishDiagnostics>(
6137 lsp::PublishDiagnosticsParams {
6138 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
6139 version: Some(open_notification.text_document.version),
6140 diagnostics: vec![
6141 lsp::Diagnostic {
6142 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
6143 severity: Some(DiagnosticSeverity::ERROR),
6144 message: "undefined variable 'A'".to_string(),
6145 source: Some("disk".to_string()),
6146 ..Default::default()
6147 },
6148 lsp::Diagnostic {
6149 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 12)),
6150 severity: Some(DiagnosticSeverity::WARNING),
6151 message: "unreachable statement".to_string(),
6152 source: Some("disk".to_string()),
6153 ..Default::default()
6154 },
6155 ],
6156 },
6157 );
6158
6159 buffer.next_notification(cx).await;
6160 buffer.read_with(cx, |buffer, _| {
6161 assert_eq!(
6162 buffer
6163 .snapshot()
6164 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0), false)
6165 .collect::<Vec<_>>(),
6166 &[
6167 DiagnosticEntry {
6168 range: Point::new(2, 9)..Point::new(2, 12),
6169 diagnostic: Diagnostic {
6170 severity: DiagnosticSeverity::WARNING,
6171 message: "unreachable statement".to_string(),
6172 is_disk_based: true,
6173 group_id: 1,
6174 is_primary: true,
6175 ..Default::default()
6176 }
6177 },
6178 DiagnosticEntry {
6179 range: Point::new(2, 9)..Point::new(2, 10),
6180 diagnostic: Diagnostic {
6181 severity: DiagnosticSeverity::ERROR,
6182 message: "undefined variable 'A'".to_string(),
6183 is_disk_based: true,
6184 group_id: 0,
6185 is_primary: true,
6186 ..Default::default()
6187 },
6188 }
6189 ]
6190 );
6191 assert_eq!(
6192 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
6193 [
6194 ("fn a() { ".to_string(), None),
6195 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
6196 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
6197 ("\n".to_string(), None),
6198 ]
6199 );
6200 assert_eq!(
6201 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
6202 [
6203 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
6204 ("\n".to_string(), None),
6205 ]
6206 );
6207 });
6208
6209 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
6210 // changes since the last save.
6211 buffer.update(cx, |buffer, cx| {
6212 buffer.edit([(Point::new(2, 0)..Point::new(2, 0), " ")], cx);
6213 buffer.edit([(Point::new(2, 8)..Point::new(2, 10), "(x: usize)")], cx);
6214 buffer.edit([(Point::new(3, 10)..Point::new(3, 10), "xxx")], cx);
6215 });
6216 let change_notification_2 = fake_server
6217 .receive_notification::<lsp::notification::DidChangeTextDocument>()
6218 .await;
6219 assert!(
6220 change_notification_2.text_document.version
6221 > change_notification_1.text_document.version
6222 );
6223
6224 // Handle out-of-order diagnostics
6225 fake_server.notify::<lsp::notification::PublishDiagnostics>(
6226 lsp::PublishDiagnosticsParams {
6227 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
6228 version: Some(change_notification_2.text_document.version),
6229 diagnostics: vec![
6230 lsp::Diagnostic {
6231 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
6232 severity: Some(DiagnosticSeverity::ERROR),
6233 message: "undefined variable 'BB'".to_string(),
6234 source: Some("disk".to_string()),
6235 ..Default::default()
6236 },
6237 lsp::Diagnostic {
6238 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
6239 severity: Some(DiagnosticSeverity::WARNING),
6240 message: "undefined variable 'A'".to_string(),
6241 source: Some("disk".to_string()),
6242 ..Default::default()
6243 },
6244 ],
6245 },
6246 );
6247
6248 buffer.next_notification(cx).await;
6249 buffer.read_with(cx, |buffer, _| {
6250 assert_eq!(
6251 buffer
6252 .snapshot()
6253 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
6254 .collect::<Vec<_>>(),
6255 &[
6256 DiagnosticEntry {
6257 range: Point::new(2, 21)..Point::new(2, 22),
6258 diagnostic: Diagnostic {
6259 severity: DiagnosticSeverity::WARNING,
6260 message: "undefined variable 'A'".to_string(),
6261 is_disk_based: true,
6262 group_id: 1,
6263 is_primary: true,
6264 ..Default::default()
6265 }
6266 },
6267 DiagnosticEntry {
6268 range: Point::new(3, 9)..Point::new(3, 14),
6269 diagnostic: Diagnostic {
6270 severity: DiagnosticSeverity::ERROR,
6271 message: "undefined variable 'BB'".to_string(),
6272 is_disk_based: true,
6273 group_id: 0,
6274 is_primary: true,
6275 ..Default::default()
6276 },
6277 }
6278 ]
6279 );
6280 });
6281 }
6282
6283 #[gpui::test]
6284 async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
6285 cx.foreground().forbid_parking();
6286
6287 let text = concat!(
6288 "let one = ;\n", //
6289 "let two = \n",
6290 "let three = 3;\n",
6291 );
6292
6293 let fs = FakeFs::new(cx.background());
6294 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
6295
6296 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
6297 let buffer = project
6298 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
6299 .await
6300 .unwrap();
6301
6302 project.update(cx, |project, cx| {
6303 project
6304 .update_buffer_diagnostics(
6305 &buffer,
6306 vec![
6307 DiagnosticEntry {
6308 range: PointUtf16::new(0, 10)..PointUtf16::new(0, 10),
6309 diagnostic: Diagnostic {
6310 severity: DiagnosticSeverity::ERROR,
6311 message: "syntax error 1".to_string(),
6312 ..Default::default()
6313 },
6314 },
6315 DiagnosticEntry {
6316 range: PointUtf16::new(1, 10)..PointUtf16::new(1, 10),
6317 diagnostic: Diagnostic {
6318 severity: DiagnosticSeverity::ERROR,
6319 message: "syntax error 2".to_string(),
6320 ..Default::default()
6321 },
6322 },
6323 ],
6324 None,
6325 cx,
6326 )
6327 .unwrap();
6328 });
6329
6330 // An empty range is extended forward to include the following character.
6331 // At the end of a line, an empty range is extended backward to include
6332 // the preceding character.
6333 buffer.read_with(cx, |buffer, _| {
6334 let chunks = chunks_with_diagnostics(&buffer, 0..buffer.len());
6335 assert_eq!(
6336 chunks
6337 .iter()
6338 .map(|(s, d)| (s.as_str(), *d))
6339 .collect::<Vec<_>>(),
6340 &[
6341 ("let one = ", None),
6342 (";", Some(DiagnosticSeverity::ERROR)),
6343 ("\nlet two =", None),
6344 (" ", Some(DiagnosticSeverity::ERROR)),
6345 ("\nlet three = 3;\n", None)
6346 ]
6347 );
6348 });
6349 }
6350
6351 #[gpui::test]
6352 async fn test_edits_from_lsp_with_past_version(cx: &mut gpui::TestAppContext) {
6353 cx.foreground().forbid_parking();
6354
6355 let mut language = Language::new(
6356 LanguageConfig {
6357 name: "Rust".into(),
6358 path_suffixes: vec!["rs".to_string()],
6359 ..Default::default()
6360 },
6361 Some(tree_sitter_rust::language()),
6362 );
6363 let mut fake_servers = language.set_fake_lsp_adapter(Default::default());
6364
6365 let text = "
6366 fn a() {
6367 f1();
6368 }
6369 fn b() {
6370 f2();
6371 }
6372 fn c() {
6373 f3();
6374 }
6375 "
6376 .unindent();
6377
6378 let fs = FakeFs::new(cx.background());
6379 fs.insert_tree(
6380 "/dir",
6381 json!({
6382 "a.rs": text.clone(),
6383 }),
6384 )
6385 .await;
6386
6387 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
6388 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
6389 let buffer = project
6390 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
6391 .await
6392 .unwrap();
6393
6394 let mut fake_server = fake_servers.next().await.unwrap();
6395 let lsp_document_version = fake_server
6396 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6397 .await
6398 .text_document
6399 .version;
6400
6401 // Simulate editing the buffer after the language server computes some edits.
6402 buffer.update(cx, |buffer, cx| {
6403 buffer.edit(
6404 [(
6405 Point::new(0, 0)..Point::new(0, 0),
6406 "// above first function\n",
6407 )],
6408 cx,
6409 );
6410 buffer.edit(
6411 [(
6412 Point::new(2, 0)..Point::new(2, 0),
6413 " // inside first function\n",
6414 )],
6415 cx,
6416 );
6417 buffer.edit(
6418 [(
6419 Point::new(6, 4)..Point::new(6, 4),
6420 "// inside second function ",
6421 )],
6422 cx,
6423 );
6424
6425 assert_eq!(
6426 buffer.text(),
6427 "
6428 // above first function
6429 fn a() {
6430 // inside first function
6431 f1();
6432 }
6433 fn b() {
6434 // inside second function f2();
6435 }
6436 fn c() {
6437 f3();
6438 }
6439 "
6440 .unindent()
6441 );
6442 });
6443
6444 let edits = project
6445 .update(cx, |project, cx| {
6446 project.edits_from_lsp(
6447 &buffer,
6448 vec![
6449 // replace body of first function
6450 lsp::TextEdit {
6451 range: lsp::Range::new(
6452 lsp::Position::new(0, 0),
6453 lsp::Position::new(3, 0),
6454 ),
6455 new_text: "
6456 fn a() {
6457 f10();
6458 }
6459 "
6460 .unindent(),
6461 },
6462 // edit inside second function
6463 lsp::TextEdit {
6464 range: lsp::Range::new(
6465 lsp::Position::new(4, 6),
6466 lsp::Position::new(4, 6),
6467 ),
6468 new_text: "00".into(),
6469 },
6470 // edit inside third function via two distinct edits
6471 lsp::TextEdit {
6472 range: lsp::Range::new(
6473 lsp::Position::new(7, 5),
6474 lsp::Position::new(7, 5),
6475 ),
6476 new_text: "4000".into(),
6477 },
6478 lsp::TextEdit {
6479 range: lsp::Range::new(
6480 lsp::Position::new(7, 5),
6481 lsp::Position::new(7, 6),
6482 ),
6483 new_text: "".into(),
6484 },
6485 ],
6486 Some(lsp_document_version),
6487 cx,
6488 )
6489 })
6490 .await
6491 .unwrap();
6492
6493 buffer.update(cx, |buffer, cx| {
6494 for (range, new_text) in edits {
6495 buffer.edit([(range, new_text)], cx);
6496 }
6497 assert_eq!(
6498 buffer.text(),
6499 "
6500 // above first function
6501 fn a() {
6502 // inside first function
6503 f10();
6504 }
6505 fn b() {
6506 // inside second function f200();
6507 }
6508 fn c() {
6509 f4000();
6510 }
6511 "
6512 .unindent()
6513 );
6514 });
6515 }
6516
6517 #[gpui::test]
6518 async fn test_edits_from_lsp_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
6519 cx.foreground().forbid_parking();
6520
6521 let text = "
6522 use a::b;
6523 use a::c;
6524
6525 fn f() {
6526 b();
6527 c();
6528 }
6529 "
6530 .unindent();
6531
6532 let fs = FakeFs::new(cx.background());
6533 fs.insert_tree(
6534 "/dir",
6535 json!({
6536 "a.rs": text.clone(),
6537 }),
6538 )
6539 .await;
6540
6541 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
6542 let buffer = project
6543 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
6544 .await
6545 .unwrap();
6546
6547 // Simulate the language server sending us a small edit in the form of a very large diff.
6548 // Rust-analyzer does this when performing a merge-imports code action.
6549 let edits = project
6550 .update(cx, |project, cx| {
6551 project.edits_from_lsp(
6552 &buffer,
6553 [
6554 // Replace the first use statement without editing the semicolon.
6555 lsp::TextEdit {
6556 range: lsp::Range::new(
6557 lsp::Position::new(0, 4),
6558 lsp::Position::new(0, 8),
6559 ),
6560 new_text: "a::{b, c}".into(),
6561 },
6562 // Reinsert the remainder of the file between the semicolon and the final
6563 // newline of the file.
6564 lsp::TextEdit {
6565 range: lsp::Range::new(
6566 lsp::Position::new(0, 9),
6567 lsp::Position::new(0, 9),
6568 ),
6569 new_text: "\n\n".into(),
6570 },
6571 lsp::TextEdit {
6572 range: lsp::Range::new(
6573 lsp::Position::new(0, 9),
6574 lsp::Position::new(0, 9),
6575 ),
6576 new_text: "
6577 fn f() {
6578 b();
6579 c();
6580 }"
6581 .unindent(),
6582 },
6583 // Delete everything after the first newline of the file.
6584 lsp::TextEdit {
6585 range: lsp::Range::new(
6586 lsp::Position::new(1, 0),
6587 lsp::Position::new(7, 0),
6588 ),
6589 new_text: "".into(),
6590 },
6591 ],
6592 None,
6593 cx,
6594 )
6595 })
6596 .await
6597 .unwrap();
6598
6599 buffer.update(cx, |buffer, cx| {
6600 let edits = edits
6601 .into_iter()
6602 .map(|(range, text)| {
6603 (
6604 range.start.to_point(&buffer)..range.end.to_point(&buffer),
6605 text,
6606 )
6607 })
6608 .collect::<Vec<_>>();
6609
6610 assert_eq!(
6611 edits,
6612 [
6613 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
6614 (Point::new(1, 0)..Point::new(2, 0), "".into())
6615 ]
6616 );
6617
6618 for (range, new_text) in edits {
6619 buffer.edit([(range, new_text)], cx);
6620 }
6621 assert_eq!(
6622 buffer.text(),
6623 "
6624 use a::{b, c};
6625
6626 fn f() {
6627 b();
6628 c();
6629 }
6630 "
6631 .unindent()
6632 );
6633 });
6634 }
6635
6636 fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
6637 buffer: &Buffer,
6638 range: Range<T>,
6639 ) -> Vec<(String, Option<DiagnosticSeverity>)> {
6640 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
6641 for chunk in buffer.snapshot().chunks(range, true) {
6642 if chunks.last().map_or(false, |prev_chunk| {
6643 prev_chunk.1 == chunk.diagnostic_severity
6644 }) {
6645 chunks.last_mut().unwrap().0.push_str(chunk.text);
6646 } else {
6647 chunks.push((chunk.text.to_string(), chunk.diagnostic_severity));
6648 }
6649 }
6650 chunks
6651 }
6652
6653 #[gpui::test]
6654 async fn test_search_worktree_without_files(cx: &mut gpui::TestAppContext) {
6655 let dir = temp_tree(json!({
6656 "root": {
6657 "dir1": {},
6658 "dir2": {
6659 "dir3": {}
6660 }
6661 }
6662 }));
6663
6664 let project = Project::test(Arc::new(RealFs), [dir.path()], cx).await;
6665 let cancel_flag = Default::default();
6666 let results = project
6667 .read_with(cx, |project, cx| {
6668 project.match_paths("dir", false, false, 10, &cancel_flag, cx)
6669 })
6670 .await;
6671
6672 assert!(results.is_empty());
6673 }
6674
6675 #[gpui::test(iterations = 10)]
6676 async fn test_definition(cx: &mut gpui::TestAppContext) {
6677 let mut language = Language::new(
6678 LanguageConfig {
6679 name: "Rust".into(),
6680 path_suffixes: vec!["rs".to_string()],
6681 ..Default::default()
6682 },
6683 Some(tree_sitter_rust::language()),
6684 );
6685 let mut fake_servers = language.set_fake_lsp_adapter(Default::default());
6686
6687 let fs = FakeFs::new(cx.background());
6688 fs.insert_tree(
6689 "/dir",
6690 json!({
6691 "a.rs": "const fn a() { A }",
6692 "b.rs": "const y: i32 = crate::a()",
6693 }),
6694 )
6695 .await;
6696
6697 let project = Project::test(fs, ["/dir/b.rs".as_ref()], cx).await;
6698 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
6699
6700 let buffer = project
6701 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
6702 .await
6703 .unwrap();
6704
6705 let fake_server = fake_servers.next().await.unwrap();
6706 fake_server.handle_request::<lsp::request::GotoDefinition, _, _>(|params, _| async move {
6707 let params = params.text_document_position_params;
6708 assert_eq!(
6709 params.text_document.uri.to_file_path().unwrap(),
6710 Path::new("/dir/b.rs"),
6711 );
6712 assert_eq!(params.position, lsp::Position::new(0, 22));
6713
6714 Ok(Some(lsp::GotoDefinitionResponse::Scalar(
6715 lsp::Location::new(
6716 lsp::Url::from_file_path("/dir/a.rs").unwrap(),
6717 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
6718 ),
6719 )))
6720 });
6721
6722 let mut definitions = project
6723 .update(cx, |project, cx| project.definition(&buffer, 22, cx))
6724 .await
6725 .unwrap();
6726
6727 assert_eq!(definitions.len(), 1);
6728 let definition = definitions.pop().unwrap();
6729 cx.update(|cx| {
6730 let target_buffer = definition.buffer.read(cx);
6731 assert_eq!(
6732 target_buffer
6733 .file()
6734 .unwrap()
6735 .as_local()
6736 .unwrap()
6737 .abs_path(cx),
6738 Path::new("/dir/a.rs"),
6739 );
6740 assert_eq!(definition.range.to_offset(target_buffer), 9..10);
6741 assert_eq!(
6742 list_worktrees(&project, cx),
6743 [("/dir/b.rs".as_ref(), true), ("/dir/a.rs".as_ref(), false)]
6744 );
6745
6746 drop(definition);
6747 });
6748 cx.read(|cx| {
6749 assert_eq!(list_worktrees(&project, cx), [("/dir/b.rs".as_ref(), true)]);
6750 });
6751
6752 fn list_worktrees<'a>(
6753 project: &'a ModelHandle<Project>,
6754 cx: &'a AppContext,
6755 ) -> Vec<(&'a Path, bool)> {
6756 project
6757 .read(cx)
6758 .worktrees(cx)
6759 .map(|worktree| {
6760 let worktree = worktree.read(cx);
6761 (
6762 worktree.as_local().unwrap().abs_path().as_ref(),
6763 worktree.is_visible(),
6764 )
6765 })
6766 .collect::<Vec<_>>()
6767 }
6768 }
6769
6770 #[gpui::test]
6771 async fn test_completions_without_edit_ranges(cx: &mut gpui::TestAppContext) {
6772 let mut language = Language::new(
6773 LanguageConfig {
6774 name: "TypeScript".into(),
6775 path_suffixes: vec!["ts".to_string()],
6776 ..Default::default()
6777 },
6778 Some(tree_sitter_typescript::language_typescript()),
6779 );
6780 let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default());
6781
6782 let fs = FakeFs::new(cx.background());
6783 fs.insert_tree(
6784 "/dir",
6785 json!({
6786 "a.ts": "",
6787 }),
6788 )
6789 .await;
6790
6791 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
6792 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
6793 let buffer = project
6794 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
6795 .await
6796 .unwrap();
6797
6798 let fake_server = fake_language_servers.next().await.unwrap();
6799
6800 let text = "let a = b.fqn";
6801 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
6802 let completions = project.update(cx, |project, cx| {
6803 project.completions(&buffer, text.len(), cx)
6804 });
6805
6806 fake_server
6807 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
6808 Ok(Some(lsp::CompletionResponse::Array(vec![
6809 lsp::CompletionItem {
6810 label: "fullyQualifiedName?".into(),
6811 insert_text: Some("fullyQualifiedName".into()),
6812 ..Default::default()
6813 },
6814 ])))
6815 })
6816 .next()
6817 .await;
6818 let completions = completions.await.unwrap();
6819 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
6820 assert_eq!(completions.len(), 1);
6821 assert_eq!(completions[0].new_text, "fullyQualifiedName");
6822 assert_eq!(
6823 completions[0].old_range.to_offset(&snapshot),
6824 text.len() - 3..text.len()
6825 );
6826 }
6827
6828 #[gpui::test(iterations = 10)]
6829 async fn test_apply_code_actions_with_commands(cx: &mut gpui::TestAppContext) {
6830 let mut language = Language::new(
6831 LanguageConfig {
6832 name: "TypeScript".into(),
6833 path_suffixes: vec!["ts".to_string()],
6834 ..Default::default()
6835 },
6836 None,
6837 );
6838 let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default());
6839
6840 let fs = FakeFs::new(cx.background());
6841 fs.insert_tree(
6842 "/dir",
6843 json!({
6844 "a.ts": "a",
6845 }),
6846 )
6847 .await;
6848
6849 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
6850 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
6851 let buffer = project
6852 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
6853 .await
6854 .unwrap();
6855
6856 let fake_server = fake_language_servers.next().await.unwrap();
6857
6858 // Language server returns code actions that contain commands, and not edits.
6859 let actions = project.update(cx, |project, cx| project.code_actions(&buffer, 0..0, cx));
6860 fake_server
6861 .handle_request::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
6862 Ok(Some(vec![
6863 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
6864 title: "The code action".into(),
6865 command: Some(lsp::Command {
6866 title: "The command".into(),
6867 command: "_the/command".into(),
6868 arguments: Some(vec![json!("the-argument")]),
6869 }),
6870 ..Default::default()
6871 }),
6872 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
6873 title: "two".into(),
6874 ..Default::default()
6875 }),
6876 ]))
6877 })
6878 .next()
6879 .await;
6880
6881 let action = actions.await.unwrap()[0].clone();
6882 let apply = project.update(cx, |project, cx| {
6883 project.apply_code_action(buffer.clone(), action, true, cx)
6884 });
6885
6886 // Resolving the code action does not populate its edits. In absence of
6887 // edits, we must execute the given command.
6888 fake_server.handle_request::<lsp::request::CodeActionResolveRequest, _, _>(
6889 |action, _| async move { Ok(action) },
6890 );
6891
6892 // While executing the command, the language server sends the editor
6893 // a `workspaceEdit` request.
6894 fake_server
6895 .handle_request::<lsp::request::ExecuteCommand, _, _>({
6896 let fake = fake_server.clone();
6897 move |params, _| {
6898 assert_eq!(params.command, "_the/command");
6899 let fake = fake.clone();
6900 async move {
6901 fake.server
6902 .request::<lsp::request::ApplyWorkspaceEdit>(
6903 lsp::ApplyWorkspaceEditParams {
6904 label: None,
6905 edit: lsp::WorkspaceEdit {
6906 changes: Some(
6907 [(
6908 lsp::Url::from_file_path("/dir/a.ts").unwrap(),
6909 vec![lsp::TextEdit {
6910 range: lsp::Range::new(
6911 lsp::Position::new(0, 0),
6912 lsp::Position::new(0, 0),
6913 ),
6914 new_text: "X".into(),
6915 }],
6916 )]
6917 .into_iter()
6918 .collect(),
6919 ),
6920 ..Default::default()
6921 },
6922 },
6923 )
6924 .await
6925 .unwrap();
6926 Ok(Some(json!(null)))
6927 }
6928 }
6929 })
6930 .next()
6931 .await;
6932
6933 // Applying the code action returns a project transaction containing the edits
6934 // sent by the language server in its `workspaceEdit` request.
6935 let transaction = apply.await.unwrap();
6936 assert!(transaction.0.contains_key(&buffer));
6937 buffer.update(cx, |buffer, cx| {
6938 assert_eq!(buffer.text(), "Xa");
6939 buffer.undo(cx);
6940 assert_eq!(buffer.text(), "a");
6941 });
6942 }
6943
6944 #[gpui::test]
6945 async fn test_save_file(cx: &mut gpui::TestAppContext) {
6946 let fs = FakeFs::new(cx.background());
6947 fs.insert_tree(
6948 "/dir",
6949 json!({
6950 "file1": "the old contents",
6951 }),
6952 )
6953 .await;
6954
6955 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
6956 let buffer = project
6957 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
6958 .await
6959 .unwrap();
6960 buffer
6961 .update(cx, |buffer, cx| {
6962 assert_eq!(buffer.text(), "the old contents");
6963 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], cx);
6964 buffer.save(cx)
6965 })
6966 .await
6967 .unwrap();
6968
6969 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
6970 assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
6971 }
6972
6973 #[gpui::test]
6974 async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
6975 let fs = FakeFs::new(cx.background());
6976 fs.insert_tree(
6977 "/dir",
6978 json!({
6979 "file1": "the old contents",
6980 }),
6981 )
6982 .await;
6983
6984 let project = Project::test(fs.clone(), ["/dir/file1".as_ref()], cx).await;
6985 let buffer = project
6986 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
6987 .await
6988 .unwrap();
6989 buffer
6990 .update(cx, |buffer, cx| {
6991 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], cx);
6992 buffer.save(cx)
6993 })
6994 .await
6995 .unwrap();
6996
6997 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
6998 assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
6999 }
7000
7001 #[gpui::test]
7002 async fn test_save_as(cx: &mut gpui::TestAppContext) {
7003 let fs = FakeFs::new(cx.background());
7004 fs.insert_tree("/dir", json!({})).await;
7005
7006 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7007 let buffer = project.update(cx, |project, cx| {
7008 project.create_buffer("", None, cx).unwrap()
7009 });
7010 buffer.update(cx, |buffer, cx| {
7011 buffer.edit([(0..0, "abc")], cx);
7012 assert!(buffer.is_dirty());
7013 assert!(!buffer.has_conflict());
7014 });
7015 project
7016 .update(cx, |project, cx| {
7017 project.save_buffer_as(buffer.clone(), "/dir/file1".into(), cx)
7018 })
7019 .await
7020 .unwrap();
7021 assert_eq!(fs.load(Path::new("/dir/file1")).await.unwrap(), "abc");
7022 buffer.read_with(cx, |buffer, cx| {
7023 assert_eq!(buffer.file().unwrap().full_path(cx), Path::new("dir/file1"));
7024 assert!(!buffer.is_dirty());
7025 assert!(!buffer.has_conflict());
7026 });
7027
7028 let opened_buffer = project
7029 .update(cx, |project, cx| {
7030 project.open_local_buffer("/dir/file1", cx)
7031 })
7032 .await
7033 .unwrap();
7034 assert_eq!(opened_buffer, buffer);
7035 }
7036
7037 #[gpui::test(retries = 5)]
7038 async fn test_rescan_and_remote_updates(cx: &mut gpui::TestAppContext) {
7039 let dir = temp_tree(json!({
7040 "a": {
7041 "file1": "",
7042 "file2": "",
7043 "file3": "",
7044 },
7045 "b": {
7046 "c": {
7047 "file4": "",
7048 "file5": "",
7049 }
7050 }
7051 }));
7052
7053 let project = Project::test(Arc::new(RealFs), [dir.path()], cx).await;
7054 let rpc = project.read_with(cx, |p, _| p.client.clone());
7055
7056 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
7057 let buffer = project.update(cx, |p, cx| p.open_local_buffer(dir.path().join(path), cx));
7058 async move { buffer.await.unwrap() }
7059 };
7060 let id_for_path = |path: &'static str, cx: &gpui::TestAppContext| {
7061 project.read_with(cx, |project, cx| {
7062 let tree = project.worktrees(cx).next().unwrap();
7063 tree.read(cx)
7064 .entry_for_path(path)
7065 .expect(&format!("no entry for path {}", path))
7066 .id
7067 })
7068 };
7069
7070 let buffer2 = buffer_for_path("a/file2", cx).await;
7071 let buffer3 = buffer_for_path("a/file3", cx).await;
7072 let buffer4 = buffer_for_path("b/c/file4", cx).await;
7073 let buffer5 = buffer_for_path("b/c/file5", cx).await;
7074
7075 let file2_id = id_for_path("a/file2", &cx);
7076 let file3_id = id_for_path("a/file3", &cx);
7077 let file4_id = id_for_path("b/c/file4", &cx);
7078
7079 // Create a remote copy of this worktree.
7080 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
7081 let initial_snapshot = tree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot());
7082 let (remote, load_task) = cx.update(|cx| {
7083 Worktree::remote(
7084 1,
7085 1,
7086 initial_snapshot.to_proto(&Default::default(), true),
7087 rpc.clone(),
7088 cx,
7089 )
7090 });
7091 // tree
7092 load_task.await;
7093
7094 cx.read(|cx| {
7095 assert!(!buffer2.read(cx).is_dirty());
7096 assert!(!buffer3.read(cx).is_dirty());
7097 assert!(!buffer4.read(cx).is_dirty());
7098 assert!(!buffer5.read(cx).is_dirty());
7099 });
7100
7101 // Rename and delete files and directories.
7102 tree.flush_fs_events(&cx).await;
7103 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
7104 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
7105 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
7106 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
7107 tree.flush_fs_events(&cx).await;
7108
7109 let expected_paths = vec![
7110 "a",
7111 "a/file1",
7112 "a/file2.new",
7113 "b",
7114 "d",
7115 "d/file3",
7116 "d/file4",
7117 ];
7118
7119 cx.read(|app| {
7120 assert_eq!(
7121 tree.read(app)
7122 .paths()
7123 .map(|p| p.to_str().unwrap())
7124 .collect::<Vec<_>>(),
7125 expected_paths
7126 );
7127
7128 assert_eq!(id_for_path("a/file2.new", &cx), file2_id);
7129 assert_eq!(id_for_path("d/file3", &cx), file3_id);
7130 assert_eq!(id_for_path("d/file4", &cx), file4_id);
7131
7132 assert_eq!(
7133 buffer2.read(app).file().unwrap().path().as_ref(),
7134 Path::new("a/file2.new")
7135 );
7136 assert_eq!(
7137 buffer3.read(app).file().unwrap().path().as_ref(),
7138 Path::new("d/file3")
7139 );
7140 assert_eq!(
7141 buffer4.read(app).file().unwrap().path().as_ref(),
7142 Path::new("d/file4")
7143 );
7144 assert_eq!(
7145 buffer5.read(app).file().unwrap().path().as_ref(),
7146 Path::new("b/c/file5")
7147 );
7148
7149 assert!(!buffer2.read(app).file().unwrap().is_deleted());
7150 assert!(!buffer3.read(app).file().unwrap().is_deleted());
7151 assert!(!buffer4.read(app).file().unwrap().is_deleted());
7152 assert!(buffer5.read(app).file().unwrap().is_deleted());
7153 });
7154
7155 // Update the remote worktree. Check that it becomes consistent with the
7156 // local worktree.
7157 remote.update(cx, |remote, cx| {
7158 let update_message = tree.read(cx).as_local().unwrap().snapshot().build_update(
7159 &initial_snapshot,
7160 1,
7161 1,
7162 true,
7163 );
7164 remote
7165 .as_remote_mut()
7166 .unwrap()
7167 .snapshot
7168 .apply_remote_update(update_message)
7169 .unwrap();
7170
7171 assert_eq!(
7172 remote
7173 .paths()
7174 .map(|p| p.to_str().unwrap())
7175 .collect::<Vec<_>>(),
7176 expected_paths
7177 );
7178 });
7179 }
7180
7181 #[gpui::test]
7182 async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
7183 let fs = FakeFs::new(cx.background());
7184 fs.insert_tree(
7185 "/dir",
7186 json!({
7187 "a.txt": "a-contents",
7188 "b.txt": "b-contents",
7189 }),
7190 )
7191 .await;
7192
7193 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7194
7195 // Spawn multiple tasks to open paths, repeating some paths.
7196 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
7197 (
7198 p.open_local_buffer("/dir/a.txt", cx),
7199 p.open_local_buffer("/dir/b.txt", cx),
7200 p.open_local_buffer("/dir/a.txt", cx),
7201 )
7202 });
7203
7204 let buffer_a_1 = buffer_a_1.await.unwrap();
7205 let buffer_a_2 = buffer_a_2.await.unwrap();
7206 let buffer_b = buffer_b.await.unwrap();
7207 assert_eq!(buffer_a_1.read_with(cx, |b, _| b.text()), "a-contents");
7208 assert_eq!(buffer_b.read_with(cx, |b, _| b.text()), "b-contents");
7209
7210 // There is only one buffer per path.
7211 let buffer_a_id = buffer_a_1.id();
7212 assert_eq!(buffer_a_2.id(), buffer_a_id);
7213
7214 // Open the same path again while it is still open.
7215 drop(buffer_a_1);
7216 let buffer_a_3 = project
7217 .update(cx, |p, cx| p.open_local_buffer("/dir/a.txt", cx))
7218 .await
7219 .unwrap();
7220
7221 // There's still only one buffer per path.
7222 assert_eq!(buffer_a_3.id(), buffer_a_id);
7223 }
7224
7225 #[gpui::test]
7226 async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
7227 let fs = FakeFs::new(cx.background());
7228 fs.insert_tree(
7229 "/dir",
7230 json!({
7231 "file1": "abc",
7232 "file2": "def",
7233 "file3": "ghi",
7234 }),
7235 )
7236 .await;
7237
7238 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7239
7240 let buffer1 = project
7241 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
7242 .await
7243 .unwrap();
7244 let events = Rc::new(RefCell::new(Vec::new()));
7245
7246 // initially, the buffer isn't dirty.
7247 buffer1.update(cx, |buffer, cx| {
7248 cx.subscribe(&buffer1, {
7249 let events = events.clone();
7250 move |_, _, event, _| match event {
7251 BufferEvent::Operation(_) => {}
7252 _ => events.borrow_mut().push(event.clone()),
7253 }
7254 })
7255 .detach();
7256
7257 assert!(!buffer.is_dirty());
7258 assert!(events.borrow().is_empty());
7259
7260 buffer.edit([(1..2, "")], cx);
7261 });
7262
7263 // after the first edit, the buffer is dirty, and emits a dirtied event.
7264 buffer1.update(cx, |buffer, cx| {
7265 assert!(buffer.text() == "ac");
7266 assert!(buffer.is_dirty());
7267 assert_eq!(
7268 *events.borrow(),
7269 &[language::Event::Edited, language::Event::Dirtied]
7270 );
7271 events.borrow_mut().clear();
7272 buffer.did_save(buffer.version(), buffer.file().unwrap().mtime(), None, cx);
7273 });
7274
7275 // after saving, the buffer is not dirty, and emits a saved event.
7276 buffer1.update(cx, |buffer, cx| {
7277 assert!(!buffer.is_dirty());
7278 assert_eq!(*events.borrow(), &[language::Event::Saved]);
7279 events.borrow_mut().clear();
7280
7281 buffer.edit([(1..1, "B")], cx);
7282 buffer.edit([(2..2, "D")], cx);
7283 });
7284
7285 // after editing again, the buffer is dirty, and emits another dirty event.
7286 buffer1.update(cx, |buffer, cx| {
7287 assert!(buffer.text() == "aBDc");
7288 assert!(buffer.is_dirty());
7289 assert_eq!(
7290 *events.borrow(),
7291 &[
7292 language::Event::Edited,
7293 language::Event::Dirtied,
7294 language::Event::Edited,
7295 ],
7296 );
7297 events.borrow_mut().clear();
7298
7299 // TODO - currently, after restoring the buffer to its
7300 // previously-saved state, the is still considered dirty.
7301 buffer.edit([(1..3, "")], cx);
7302 assert!(buffer.text() == "ac");
7303 assert!(buffer.is_dirty());
7304 });
7305
7306 assert_eq!(*events.borrow(), &[language::Event::Edited]);
7307
7308 // When a file is deleted, the buffer is considered dirty.
7309 let events = Rc::new(RefCell::new(Vec::new()));
7310 let buffer2 = project
7311 .update(cx, |p, cx| p.open_local_buffer("/dir/file2", cx))
7312 .await
7313 .unwrap();
7314 buffer2.update(cx, |_, cx| {
7315 cx.subscribe(&buffer2, {
7316 let events = events.clone();
7317 move |_, _, event, _| events.borrow_mut().push(event.clone())
7318 })
7319 .detach();
7320 });
7321
7322 fs.remove_file("/dir/file2".as_ref(), Default::default())
7323 .await
7324 .unwrap();
7325 buffer2.condition(&cx, |b, _| b.is_dirty()).await;
7326 assert_eq!(
7327 *events.borrow(),
7328 &[language::Event::Dirtied, language::Event::FileHandleChanged]
7329 );
7330
7331 // When a file is already dirty when deleted, we don't emit a Dirtied event.
7332 let events = Rc::new(RefCell::new(Vec::new()));
7333 let buffer3 = project
7334 .update(cx, |p, cx| p.open_local_buffer("/dir/file3", cx))
7335 .await
7336 .unwrap();
7337 buffer3.update(cx, |_, cx| {
7338 cx.subscribe(&buffer3, {
7339 let events = events.clone();
7340 move |_, _, event, _| events.borrow_mut().push(event.clone())
7341 })
7342 .detach();
7343 });
7344
7345 buffer3.update(cx, |buffer, cx| {
7346 buffer.edit([(0..0, "x")], cx);
7347 });
7348 events.borrow_mut().clear();
7349 fs.remove_file("/dir/file3".as_ref(), Default::default())
7350 .await
7351 .unwrap();
7352 buffer3
7353 .condition(&cx, |_, _| !events.borrow().is_empty())
7354 .await;
7355 assert_eq!(*events.borrow(), &[language::Event::FileHandleChanged]);
7356 cx.read(|cx| assert!(buffer3.read(cx).is_dirty()));
7357 }
7358
7359 #[gpui::test]
7360 async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
7361 let initial_contents = "aaa\nbbbbb\nc\n";
7362 let fs = FakeFs::new(cx.background());
7363 fs.insert_tree(
7364 "/dir",
7365 json!({
7366 "the-file": initial_contents,
7367 }),
7368 )
7369 .await;
7370 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7371 let buffer = project
7372 .update(cx, |p, cx| p.open_local_buffer("/dir/the-file", cx))
7373 .await
7374 .unwrap();
7375
7376 let anchors = (0..3)
7377 .map(|row| buffer.read_with(cx, |b, _| b.anchor_before(Point::new(row, 1))))
7378 .collect::<Vec<_>>();
7379
7380 // Change the file on disk, adding two new lines of text, and removing
7381 // one line.
7382 buffer.read_with(cx, |buffer, _| {
7383 assert!(!buffer.is_dirty());
7384 assert!(!buffer.has_conflict());
7385 });
7386 let new_contents = "AAAA\naaa\nBB\nbbbbb\n";
7387 fs.save("/dir/the-file".as_ref(), &new_contents.into())
7388 .await
7389 .unwrap();
7390
7391 // Because the buffer was not modified, it is reloaded from disk. Its
7392 // contents are edited according to the diff between the old and new
7393 // file contents.
7394 buffer
7395 .condition(&cx, |buffer, _| buffer.text() == new_contents)
7396 .await;
7397
7398 buffer.update(cx, |buffer, _| {
7399 assert_eq!(buffer.text(), new_contents);
7400 assert!(!buffer.is_dirty());
7401 assert!(!buffer.has_conflict());
7402
7403 let anchor_positions = anchors
7404 .iter()
7405 .map(|anchor| anchor.to_point(&*buffer))
7406 .collect::<Vec<_>>();
7407 assert_eq!(
7408 anchor_positions,
7409 [Point::new(1, 1), Point::new(3, 1), Point::new(4, 0)]
7410 );
7411 });
7412
7413 // Modify the buffer
7414 buffer.update(cx, |buffer, cx| {
7415 buffer.edit([(0..0, " ")], cx);
7416 assert!(buffer.is_dirty());
7417 assert!(!buffer.has_conflict());
7418 });
7419
7420 // Change the file on disk again, adding blank lines to the beginning.
7421 fs.save(
7422 "/dir/the-file".as_ref(),
7423 &"\n\n\nAAAA\naaa\nBB\nbbbbb\n".into(),
7424 )
7425 .await
7426 .unwrap();
7427
7428 // Because the buffer is modified, it doesn't reload from disk, but is
7429 // marked as having a conflict.
7430 buffer
7431 .condition(&cx, |buffer, _| buffer.has_conflict())
7432 .await;
7433 }
7434
7435 #[gpui::test]
7436 async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
7437 cx.foreground().forbid_parking();
7438
7439 let fs = FakeFs::new(cx.background());
7440 fs.insert_tree(
7441 "/the-dir",
7442 json!({
7443 "a.rs": "
7444 fn foo(mut v: Vec<usize>) {
7445 for x in &v {
7446 v.push(1);
7447 }
7448 }
7449 "
7450 .unindent(),
7451 }),
7452 )
7453 .await;
7454
7455 let project = Project::test(fs.clone(), ["/the-dir".as_ref()], cx).await;
7456 let buffer = project
7457 .update(cx, |p, cx| p.open_local_buffer("/the-dir/a.rs", cx))
7458 .await
7459 .unwrap();
7460
7461 let buffer_uri = Url::from_file_path("/the-dir/a.rs").unwrap();
7462 let message = lsp::PublishDiagnosticsParams {
7463 uri: buffer_uri.clone(),
7464 diagnostics: vec![
7465 lsp::Diagnostic {
7466 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
7467 severity: Some(DiagnosticSeverity::WARNING),
7468 message: "error 1".to_string(),
7469 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
7470 location: lsp::Location {
7471 uri: buffer_uri.clone(),
7472 range: lsp::Range::new(
7473 lsp::Position::new(1, 8),
7474 lsp::Position::new(1, 9),
7475 ),
7476 },
7477 message: "error 1 hint 1".to_string(),
7478 }]),
7479 ..Default::default()
7480 },
7481 lsp::Diagnostic {
7482 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
7483 severity: Some(DiagnosticSeverity::HINT),
7484 message: "error 1 hint 1".to_string(),
7485 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
7486 location: lsp::Location {
7487 uri: buffer_uri.clone(),
7488 range: lsp::Range::new(
7489 lsp::Position::new(1, 8),
7490 lsp::Position::new(1, 9),
7491 ),
7492 },
7493 message: "original diagnostic".to_string(),
7494 }]),
7495 ..Default::default()
7496 },
7497 lsp::Diagnostic {
7498 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
7499 severity: Some(DiagnosticSeverity::ERROR),
7500 message: "error 2".to_string(),
7501 related_information: Some(vec![
7502 lsp::DiagnosticRelatedInformation {
7503 location: lsp::Location {
7504 uri: buffer_uri.clone(),
7505 range: lsp::Range::new(
7506 lsp::Position::new(1, 13),
7507 lsp::Position::new(1, 15),
7508 ),
7509 },
7510 message: "error 2 hint 1".to_string(),
7511 },
7512 lsp::DiagnosticRelatedInformation {
7513 location: lsp::Location {
7514 uri: buffer_uri.clone(),
7515 range: lsp::Range::new(
7516 lsp::Position::new(1, 13),
7517 lsp::Position::new(1, 15),
7518 ),
7519 },
7520 message: "error 2 hint 2".to_string(),
7521 },
7522 ]),
7523 ..Default::default()
7524 },
7525 lsp::Diagnostic {
7526 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
7527 severity: Some(DiagnosticSeverity::HINT),
7528 message: "error 2 hint 1".to_string(),
7529 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
7530 location: lsp::Location {
7531 uri: buffer_uri.clone(),
7532 range: lsp::Range::new(
7533 lsp::Position::new(2, 8),
7534 lsp::Position::new(2, 17),
7535 ),
7536 },
7537 message: "original diagnostic".to_string(),
7538 }]),
7539 ..Default::default()
7540 },
7541 lsp::Diagnostic {
7542 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
7543 severity: Some(DiagnosticSeverity::HINT),
7544 message: "error 2 hint 2".to_string(),
7545 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
7546 location: lsp::Location {
7547 uri: buffer_uri.clone(),
7548 range: lsp::Range::new(
7549 lsp::Position::new(2, 8),
7550 lsp::Position::new(2, 17),
7551 ),
7552 },
7553 message: "original diagnostic".to_string(),
7554 }]),
7555 ..Default::default()
7556 },
7557 ],
7558 version: None,
7559 };
7560
7561 project
7562 .update(cx, |p, cx| p.update_diagnostics(message, &[], cx))
7563 .unwrap();
7564 let buffer = buffer.read_with(cx, |buffer, _| buffer.snapshot());
7565
7566 assert_eq!(
7567 buffer
7568 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
7569 .collect::<Vec<_>>(),
7570 &[
7571 DiagnosticEntry {
7572 range: Point::new(1, 8)..Point::new(1, 9),
7573 diagnostic: Diagnostic {
7574 severity: DiagnosticSeverity::WARNING,
7575 message: "error 1".to_string(),
7576 group_id: 0,
7577 is_primary: true,
7578 ..Default::default()
7579 }
7580 },
7581 DiagnosticEntry {
7582 range: Point::new(1, 8)..Point::new(1, 9),
7583 diagnostic: Diagnostic {
7584 severity: DiagnosticSeverity::HINT,
7585 message: "error 1 hint 1".to_string(),
7586 group_id: 0,
7587 is_primary: false,
7588 ..Default::default()
7589 }
7590 },
7591 DiagnosticEntry {
7592 range: Point::new(1, 13)..Point::new(1, 15),
7593 diagnostic: Diagnostic {
7594 severity: DiagnosticSeverity::HINT,
7595 message: "error 2 hint 1".to_string(),
7596 group_id: 1,
7597 is_primary: false,
7598 ..Default::default()
7599 }
7600 },
7601 DiagnosticEntry {
7602 range: Point::new(1, 13)..Point::new(1, 15),
7603 diagnostic: Diagnostic {
7604 severity: DiagnosticSeverity::HINT,
7605 message: "error 2 hint 2".to_string(),
7606 group_id: 1,
7607 is_primary: false,
7608 ..Default::default()
7609 }
7610 },
7611 DiagnosticEntry {
7612 range: Point::new(2, 8)..Point::new(2, 17),
7613 diagnostic: Diagnostic {
7614 severity: DiagnosticSeverity::ERROR,
7615 message: "error 2".to_string(),
7616 group_id: 1,
7617 is_primary: true,
7618 ..Default::default()
7619 }
7620 }
7621 ]
7622 );
7623
7624 assert_eq!(
7625 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
7626 &[
7627 DiagnosticEntry {
7628 range: Point::new(1, 8)..Point::new(1, 9),
7629 diagnostic: Diagnostic {
7630 severity: DiagnosticSeverity::WARNING,
7631 message: "error 1".to_string(),
7632 group_id: 0,
7633 is_primary: true,
7634 ..Default::default()
7635 }
7636 },
7637 DiagnosticEntry {
7638 range: Point::new(1, 8)..Point::new(1, 9),
7639 diagnostic: Diagnostic {
7640 severity: DiagnosticSeverity::HINT,
7641 message: "error 1 hint 1".to_string(),
7642 group_id: 0,
7643 is_primary: false,
7644 ..Default::default()
7645 }
7646 },
7647 ]
7648 );
7649 assert_eq!(
7650 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
7651 &[
7652 DiagnosticEntry {
7653 range: Point::new(1, 13)..Point::new(1, 15),
7654 diagnostic: Diagnostic {
7655 severity: DiagnosticSeverity::HINT,
7656 message: "error 2 hint 1".to_string(),
7657 group_id: 1,
7658 is_primary: false,
7659 ..Default::default()
7660 }
7661 },
7662 DiagnosticEntry {
7663 range: Point::new(1, 13)..Point::new(1, 15),
7664 diagnostic: Diagnostic {
7665 severity: DiagnosticSeverity::HINT,
7666 message: "error 2 hint 2".to_string(),
7667 group_id: 1,
7668 is_primary: false,
7669 ..Default::default()
7670 }
7671 },
7672 DiagnosticEntry {
7673 range: Point::new(2, 8)..Point::new(2, 17),
7674 diagnostic: Diagnostic {
7675 severity: DiagnosticSeverity::ERROR,
7676 message: "error 2".to_string(),
7677 group_id: 1,
7678 is_primary: true,
7679 ..Default::default()
7680 }
7681 }
7682 ]
7683 );
7684 }
7685
7686 #[gpui::test]
7687 async fn test_rename(cx: &mut gpui::TestAppContext) {
7688 cx.foreground().forbid_parking();
7689
7690 let mut language = Language::new(
7691 LanguageConfig {
7692 name: "Rust".into(),
7693 path_suffixes: vec!["rs".to_string()],
7694 ..Default::default()
7695 },
7696 Some(tree_sitter_rust::language()),
7697 );
7698 let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
7699 capabilities: lsp::ServerCapabilities {
7700 rename_provider: Some(lsp::OneOf::Right(lsp::RenameOptions {
7701 prepare_provider: Some(true),
7702 work_done_progress_options: Default::default(),
7703 })),
7704 ..Default::default()
7705 },
7706 ..Default::default()
7707 });
7708
7709 let fs = FakeFs::new(cx.background());
7710 fs.insert_tree(
7711 "/dir",
7712 json!({
7713 "one.rs": "const ONE: usize = 1;",
7714 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
7715 }),
7716 )
7717 .await;
7718
7719 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7720 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
7721 let buffer = project
7722 .update(cx, |project, cx| {
7723 project.open_local_buffer("/dir/one.rs", cx)
7724 })
7725 .await
7726 .unwrap();
7727
7728 let fake_server = fake_servers.next().await.unwrap();
7729
7730 let response = project.update(cx, |project, cx| {
7731 project.prepare_rename(buffer.clone(), 7, cx)
7732 });
7733 fake_server
7734 .handle_request::<lsp::request::PrepareRenameRequest, _, _>(|params, _| async move {
7735 assert_eq!(params.text_document.uri.as_str(), "file:///dir/one.rs");
7736 assert_eq!(params.position, lsp::Position::new(0, 7));
7737 Ok(Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
7738 lsp::Position::new(0, 6),
7739 lsp::Position::new(0, 9),
7740 ))))
7741 })
7742 .next()
7743 .await
7744 .unwrap();
7745 let range = response.await.unwrap().unwrap();
7746 let range = buffer.read_with(cx, |buffer, _| range.to_offset(buffer));
7747 assert_eq!(range, 6..9);
7748
7749 let response = project.update(cx, |project, cx| {
7750 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), true, cx)
7751 });
7752 fake_server
7753 .handle_request::<lsp::request::Rename, _, _>(|params, _| async move {
7754 assert_eq!(
7755 params.text_document_position.text_document.uri.as_str(),
7756 "file:///dir/one.rs"
7757 );
7758 assert_eq!(
7759 params.text_document_position.position,
7760 lsp::Position::new(0, 7)
7761 );
7762 assert_eq!(params.new_name, "THREE");
7763 Ok(Some(lsp::WorkspaceEdit {
7764 changes: Some(
7765 [
7766 (
7767 lsp::Url::from_file_path("/dir/one.rs").unwrap(),
7768 vec![lsp::TextEdit::new(
7769 lsp::Range::new(
7770 lsp::Position::new(0, 6),
7771 lsp::Position::new(0, 9),
7772 ),
7773 "THREE".to_string(),
7774 )],
7775 ),
7776 (
7777 lsp::Url::from_file_path("/dir/two.rs").unwrap(),
7778 vec![
7779 lsp::TextEdit::new(
7780 lsp::Range::new(
7781 lsp::Position::new(0, 24),
7782 lsp::Position::new(0, 27),
7783 ),
7784 "THREE".to_string(),
7785 ),
7786 lsp::TextEdit::new(
7787 lsp::Range::new(
7788 lsp::Position::new(0, 35),
7789 lsp::Position::new(0, 38),
7790 ),
7791 "THREE".to_string(),
7792 ),
7793 ],
7794 ),
7795 ]
7796 .into_iter()
7797 .collect(),
7798 ),
7799 ..Default::default()
7800 }))
7801 })
7802 .next()
7803 .await
7804 .unwrap();
7805 let mut transaction = response.await.unwrap().0;
7806 assert_eq!(transaction.len(), 2);
7807 assert_eq!(
7808 transaction
7809 .remove_entry(&buffer)
7810 .unwrap()
7811 .0
7812 .read_with(cx, |buffer, _| buffer.text()),
7813 "const THREE: usize = 1;"
7814 );
7815 assert_eq!(
7816 transaction
7817 .into_keys()
7818 .next()
7819 .unwrap()
7820 .read_with(cx, |buffer, _| buffer.text()),
7821 "const TWO: usize = one::THREE + one::THREE;"
7822 );
7823 }
7824
7825 #[gpui::test]
7826 async fn test_search(cx: &mut gpui::TestAppContext) {
7827 let fs = FakeFs::new(cx.background());
7828 fs.insert_tree(
7829 "/dir",
7830 json!({
7831 "one.rs": "const ONE: usize = 1;",
7832 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
7833 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
7834 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
7835 }),
7836 )
7837 .await;
7838 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7839 assert_eq!(
7840 search(&project, SearchQuery::text("TWO", false, true), cx)
7841 .await
7842 .unwrap(),
7843 HashMap::from_iter([
7844 ("two.rs".to_string(), vec![6..9]),
7845 ("three.rs".to_string(), vec![37..40])
7846 ])
7847 );
7848
7849 let buffer_4 = project
7850 .update(cx, |project, cx| {
7851 project.open_local_buffer("/dir/four.rs", cx)
7852 })
7853 .await
7854 .unwrap();
7855 buffer_4.update(cx, |buffer, cx| {
7856 let text = "two::TWO";
7857 buffer.edit([(20..28, text), (31..43, text)], cx);
7858 });
7859
7860 assert_eq!(
7861 search(&project, SearchQuery::text("TWO", false, true), cx)
7862 .await
7863 .unwrap(),
7864 HashMap::from_iter([
7865 ("two.rs".to_string(), vec![6..9]),
7866 ("three.rs".to_string(), vec![37..40]),
7867 ("four.rs".to_string(), vec![25..28, 36..39])
7868 ])
7869 );
7870
7871 async fn search(
7872 project: &ModelHandle<Project>,
7873 query: SearchQuery,
7874 cx: &mut gpui::TestAppContext,
7875 ) -> Result<HashMap<String, Vec<Range<usize>>>> {
7876 let results = project
7877 .update(cx, |project, cx| project.search(query, cx))
7878 .await?;
7879
7880 Ok(results
7881 .into_iter()
7882 .map(|(buffer, ranges)| {
7883 buffer.read_with(cx, |buffer, _| {
7884 let path = buffer.file().unwrap().path().to_string_lossy().to_string();
7885 let ranges = ranges
7886 .into_iter()
7887 .map(|range| range.to_offset(buffer))
7888 .collect::<Vec<_>>();
7889 (path, ranges)
7890 })
7891 })
7892 .collect())
7893 }
7894 }
7895}