1pub mod fs;
2mod ignore;
3mod lsp_command;
4pub mod search;
5pub mod worktree;
6
7use anyhow::{anyhow, Context, Result};
8use client::{proto, Client, PeerId, TypedEnvelope, User, UserStore};
9use clock::ReplicaId;
10use collections::{hash_map, BTreeMap, HashMap, HashSet};
11use futures::{future::Shared, Future, FutureExt, StreamExt, TryFutureExt};
12use fuzzy::{PathMatch, PathMatchCandidate, PathMatchCandidateSet};
13use gpui::{
14 AnyModelHandle, AppContext, AsyncAppContext, Entity, ModelContext, ModelHandle,
15 MutableAppContext, Task, UpgradeModelHandle, WeakModelHandle,
16};
17use language::{
18 point_to_lsp,
19 proto::{deserialize_anchor, deserialize_version, serialize_anchor, serialize_version},
20 range_from_lsp, range_to_lsp, Anchor, Bias, Buffer, CodeAction, CodeLabel, Completion,
21 Diagnostic, DiagnosticEntry, DiagnosticSet, Event as BufferEvent, File as _, Language,
22 LanguageRegistry, LanguageServerName, LocalFile, LspAdapter, OffsetRangeExt, Operation, Patch,
23 PointUtf16, TextBufferSnapshot, ToOffset, ToPointUtf16, Transaction,
24};
25use lsp::{DiagnosticSeverity, DiagnosticTag, DocumentHighlightKind, LanguageServer};
26use lsp_command::*;
27use parking_lot::Mutex;
28use postage::watch;
29use rand::prelude::*;
30use search::SearchQuery;
31use serde::Serialize;
32use settings::Settings;
33use sha2::{Digest, Sha256};
34use similar::{ChangeTag, TextDiff};
35use std::{
36 cell::RefCell,
37 cmp::{self, Ordering},
38 convert::TryInto,
39 ffi::OsString,
40 hash::Hash,
41 mem,
42 ops::Range,
43 os::unix::{ffi::OsStrExt, prelude::OsStringExt},
44 path::{Component, Path, PathBuf},
45 rc::Rc,
46 sync::{
47 atomic::{AtomicBool, AtomicUsize, Ordering::SeqCst},
48 Arc,
49 },
50 time::Instant,
51};
52use thiserror::Error;
53use util::{post_inc, ResultExt, TryFutureExt as _};
54
55pub use fs::*;
56pub use worktree::*;
57
58pub trait Item: Entity {
59 fn entry_id(&self, cx: &AppContext) -> Option<ProjectEntryId>;
60}
61
62pub struct Project {
63 worktrees: Vec<WorktreeHandle>,
64 active_entry: Option<ProjectEntryId>,
65 languages: Arc<LanguageRegistry>,
66 language_servers:
67 HashMap<(WorktreeId, LanguageServerName), (Arc<dyn LspAdapter>, Arc<LanguageServer>)>,
68 started_language_servers:
69 HashMap<(WorktreeId, LanguageServerName), Task<Option<Arc<LanguageServer>>>>,
70 language_server_statuses: BTreeMap<usize, LanguageServerStatus>,
71 language_server_settings: Arc<Mutex<serde_json::Value>>,
72 last_workspace_edits_by_language_server: HashMap<usize, ProjectTransaction>,
73 next_language_server_id: usize,
74 client: Arc<client::Client>,
75 next_entry_id: Arc<AtomicUsize>,
76 user_store: ModelHandle<UserStore>,
77 fs: Arc<dyn Fs>,
78 client_state: ProjectClientState,
79 collaborators: HashMap<PeerId, Collaborator>,
80 subscriptions: Vec<client::Subscription>,
81 opened_buffer: (Rc<RefCell<watch::Sender<()>>>, watch::Receiver<()>),
82 shared_buffers: HashMap<PeerId, HashSet<u64>>,
83 loading_buffers: HashMap<
84 ProjectPath,
85 postage::watch::Receiver<Option<Result<ModelHandle<Buffer>, Arc<anyhow::Error>>>>,
86 >,
87 loading_local_worktrees:
88 HashMap<Arc<Path>, Shared<Task<Result<ModelHandle<Worktree>, Arc<anyhow::Error>>>>>,
89 opened_buffers: HashMap<u64, OpenBuffer>,
90 buffer_snapshots: HashMap<u64, Vec<(i32, TextBufferSnapshot)>>,
91 nonce: u128,
92}
93
94#[derive(Error, Debug)]
95pub enum JoinProjectError {
96 #[error("host declined join request")]
97 HostDeclined,
98 #[error("host closed the project")]
99 HostClosedProject,
100 #[error("host went offline")]
101 HostWentOffline,
102 #[error("{0}")]
103 Other(#[from] anyhow::Error),
104}
105
106enum OpenBuffer {
107 Strong(ModelHandle<Buffer>),
108 Weak(WeakModelHandle<Buffer>),
109 Loading(Vec<Operation>),
110}
111
112enum WorktreeHandle {
113 Strong(ModelHandle<Worktree>),
114 Weak(WeakModelHandle<Worktree>),
115}
116
117enum ProjectClientState {
118 Local {
119 is_shared: bool,
120 remote_id_tx: watch::Sender<Option<u64>>,
121 remote_id_rx: watch::Receiver<Option<u64>>,
122 _maintain_remote_id_task: Task<Option<()>>,
123 },
124 Remote {
125 sharing_has_stopped: bool,
126 remote_id: u64,
127 replica_id: ReplicaId,
128 _detect_unshare_task: Task<Option<()>>,
129 },
130}
131
132#[derive(Clone, Debug)]
133pub struct Collaborator {
134 pub user: Arc<User>,
135 pub peer_id: PeerId,
136 pub replica_id: ReplicaId,
137}
138
139#[derive(Clone, Debug, PartialEq)]
140pub enum Event {
141 ActiveEntryChanged(Option<ProjectEntryId>),
142 WorktreeRemoved(WorktreeId),
143 DiskBasedDiagnosticsStarted,
144 DiskBasedDiagnosticsUpdated,
145 DiskBasedDiagnosticsFinished,
146 DiagnosticsUpdated(ProjectPath),
147 RemoteIdChanged(Option<u64>),
148 CollaboratorLeft(PeerId),
149 ContactRequestedJoin(Arc<User>),
150}
151
152#[derive(Serialize)]
153pub struct LanguageServerStatus {
154 pub name: String,
155 pub pending_work: BTreeMap<String, LanguageServerProgress>,
156 pub pending_diagnostic_updates: isize,
157}
158
159#[derive(Clone, Debug, Serialize)]
160pub struct LanguageServerProgress {
161 pub message: Option<String>,
162 pub percentage: Option<usize>,
163 #[serde(skip_serializing)]
164 pub last_update_at: Instant,
165}
166
167#[derive(Clone, Debug, Eq, PartialEq, Hash, PartialOrd, Ord)]
168pub struct ProjectPath {
169 pub worktree_id: WorktreeId,
170 pub path: Arc<Path>,
171}
172
173#[derive(Copy, Clone, Debug, Default, PartialEq, Serialize)]
174pub struct DiagnosticSummary {
175 pub error_count: usize,
176 pub warning_count: usize,
177}
178
179#[derive(Debug)]
180pub struct Location {
181 pub buffer: ModelHandle<Buffer>,
182 pub range: Range<language::Anchor>,
183}
184
185#[derive(Debug)]
186pub struct DocumentHighlight {
187 pub range: Range<language::Anchor>,
188 pub kind: DocumentHighlightKind,
189}
190
191#[derive(Clone, Debug)]
192pub struct Symbol {
193 pub source_worktree_id: WorktreeId,
194 pub worktree_id: WorktreeId,
195 pub language_server_name: LanguageServerName,
196 pub path: PathBuf,
197 pub label: CodeLabel,
198 pub name: String,
199 pub kind: lsp::SymbolKind,
200 pub range: Range<PointUtf16>,
201 pub signature: [u8; 32],
202}
203
204#[derive(Default)]
205pub struct ProjectTransaction(pub HashMap<ModelHandle<Buffer>, language::Transaction>);
206
207impl DiagnosticSummary {
208 fn new<'a, T: 'a>(diagnostics: impl IntoIterator<Item = &'a DiagnosticEntry<T>>) -> Self {
209 let mut this = Self {
210 error_count: 0,
211 warning_count: 0,
212 };
213
214 for entry in diagnostics {
215 if entry.diagnostic.is_primary {
216 match entry.diagnostic.severity {
217 DiagnosticSeverity::ERROR => this.error_count += 1,
218 DiagnosticSeverity::WARNING => this.warning_count += 1,
219 _ => {}
220 }
221 }
222 }
223
224 this
225 }
226
227 pub fn is_empty(&self) -> bool {
228 self.error_count == 0 && self.warning_count == 0
229 }
230
231 pub fn to_proto(&self, path: &Path) -> proto::DiagnosticSummary {
232 proto::DiagnosticSummary {
233 path: path.to_string_lossy().to_string(),
234 error_count: self.error_count as u32,
235 warning_count: self.warning_count as u32,
236 }
237 }
238}
239
240#[derive(Clone, Copy, Debug, Default, Hash, PartialEq, Eq, PartialOrd, Ord)]
241pub struct ProjectEntryId(usize);
242
243impl ProjectEntryId {
244 pub const MAX: Self = Self(usize::MAX);
245
246 pub fn new(counter: &AtomicUsize) -> Self {
247 Self(counter.fetch_add(1, SeqCst))
248 }
249
250 pub fn from_proto(id: u64) -> Self {
251 Self(id as usize)
252 }
253
254 pub fn to_proto(&self) -> u64 {
255 self.0 as u64
256 }
257
258 pub fn to_usize(&self) -> usize {
259 self.0
260 }
261}
262
263impl Project {
264 pub fn init(client: &Arc<Client>) {
265 client.add_model_message_handler(Self::handle_request_join_project);
266 client.add_model_message_handler(Self::handle_add_collaborator);
267 client.add_model_message_handler(Self::handle_buffer_reloaded);
268 client.add_model_message_handler(Self::handle_buffer_saved);
269 client.add_model_message_handler(Self::handle_start_language_server);
270 client.add_model_message_handler(Self::handle_update_language_server);
271 client.add_model_message_handler(Self::handle_remove_collaborator);
272 client.add_model_message_handler(Self::handle_register_worktree);
273 client.add_model_message_handler(Self::handle_unregister_worktree);
274 client.add_model_message_handler(Self::handle_unregister_project);
275 client.add_model_message_handler(Self::handle_project_unshared);
276 client.add_model_message_handler(Self::handle_update_buffer_file);
277 client.add_model_message_handler(Self::handle_update_buffer);
278 client.add_model_message_handler(Self::handle_update_diagnostic_summary);
279 client.add_model_message_handler(Self::handle_update_worktree);
280 client.add_model_request_handler(Self::handle_create_project_entry);
281 client.add_model_request_handler(Self::handle_rename_project_entry);
282 client.add_model_request_handler(Self::handle_delete_project_entry);
283 client.add_model_request_handler(Self::handle_apply_additional_edits_for_completion);
284 client.add_model_request_handler(Self::handle_apply_code_action);
285 client.add_model_request_handler(Self::handle_reload_buffers);
286 client.add_model_request_handler(Self::handle_format_buffers);
287 client.add_model_request_handler(Self::handle_get_code_actions);
288 client.add_model_request_handler(Self::handle_get_completions);
289 client.add_model_request_handler(Self::handle_lsp_command::<GetDefinition>);
290 client.add_model_request_handler(Self::handle_lsp_command::<GetDocumentHighlights>);
291 client.add_model_request_handler(Self::handle_lsp_command::<GetReferences>);
292 client.add_model_request_handler(Self::handle_lsp_command::<PrepareRename>);
293 client.add_model_request_handler(Self::handle_lsp_command::<PerformRename>);
294 client.add_model_request_handler(Self::handle_search_project);
295 client.add_model_request_handler(Self::handle_get_project_symbols);
296 client.add_model_request_handler(Self::handle_open_buffer_for_symbol);
297 client.add_model_request_handler(Self::handle_open_buffer_by_id);
298 client.add_model_request_handler(Self::handle_open_buffer_by_path);
299 client.add_model_request_handler(Self::handle_save_buffer);
300 }
301
302 pub fn local(
303 client: Arc<Client>,
304 user_store: ModelHandle<UserStore>,
305 languages: Arc<LanguageRegistry>,
306 fs: Arc<dyn Fs>,
307 cx: &mut MutableAppContext,
308 ) -> ModelHandle<Self> {
309 cx.add_model(|cx: &mut ModelContext<Self>| {
310 let (remote_id_tx, remote_id_rx) = watch::channel();
311 let _maintain_remote_id_task = cx.spawn_weak({
312 let rpc = client.clone();
313 move |this, mut cx| {
314 async move {
315 let mut status = rpc.status();
316 while let Some(status) = status.next().await {
317 if let Some(this) = this.upgrade(&cx) {
318 if status.is_connected() {
319 this.update(&mut cx, |this, cx| this.register(cx)).await?;
320 } else {
321 this.update(&mut cx, |this, cx| this.unregister(cx));
322 }
323 }
324 }
325 Ok(())
326 }
327 .log_err()
328 }
329 });
330
331 let (opened_buffer_tx, opened_buffer_rx) = watch::channel();
332 Self {
333 worktrees: Default::default(),
334 collaborators: Default::default(),
335 opened_buffers: Default::default(),
336 shared_buffers: Default::default(),
337 loading_buffers: Default::default(),
338 loading_local_worktrees: Default::default(),
339 buffer_snapshots: Default::default(),
340 client_state: ProjectClientState::Local {
341 is_shared: false,
342 remote_id_tx,
343 remote_id_rx,
344 _maintain_remote_id_task,
345 },
346 opened_buffer: (Rc::new(RefCell::new(opened_buffer_tx)), opened_buffer_rx),
347 subscriptions: Vec::new(),
348 active_entry: None,
349 languages,
350 client,
351 user_store,
352 fs,
353 next_entry_id: Default::default(),
354 language_servers: Default::default(),
355 started_language_servers: Default::default(),
356 language_server_statuses: Default::default(),
357 last_workspace_edits_by_language_server: Default::default(),
358 language_server_settings: Default::default(),
359 next_language_server_id: 0,
360 nonce: StdRng::from_entropy().gen(),
361 }
362 })
363 }
364
365 pub async fn remote(
366 remote_id: u64,
367 client: Arc<Client>,
368 user_store: ModelHandle<UserStore>,
369 languages: Arc<LanguageRegistry>,
370 fs: Arc<dyn Fs>,
371 cx: &mut AsyncAppContext,
372 ) -> Result<ModelHandle<Self>, JoinProjectError> {
373 client.authenticate_and_connect(true, &cx).await?;
374
375 let response = client
376 .request(proto::JoinProject {
377 project_id: remote_id,
378 })
379 .await?;
380
381 let response = match response.variant.ok_or_else(|| anyhow!("missing variant"))? {
382 proto::join_project_response::Variant::Accept(response) => response,
383 proto::join_project_response::Variant::Decline(decline) => {
384 match proto::join_project_response::decline::Reason::from_i32(decline.reason) {
385 Some(proto::join_project_response::decline::Reason::Declined) => {
386 Err(JoinProjectError::HostDeclined)?
387 }
388 Some(proto::join_project_response::decline::Reason::Closed) => {
389 Err(JoinProjectError::HostClosedProject)?
390 }
391 Some(proto::join_project_response::decline::Reason::WentOffline) => {
392 Err(JoinProjectError::HostWentOffline)?
393 }
394 None => Err(anyhow!("missing decline reason"))?,
395 }
396 }
397 };
398
399 let replica_id = response.replica_id as ReplicaId;
400
401 let mut worktrees = Vec::new();
402 for worktree in response.worktrees {
403 let (worktree, load_task) = cx
404 .update(|cx| Worktree::remote(remote_id, replica_id, worktree, client.clone(), cx));
405 worktrees.push(worktree);
406 load_task.detach();
407 }
408
409 let (opened_buffer_tx, opened_buffer_rx) = watch::channel();
410 let this = cx.add_model(|cx: &mut ModelContext<Self>| {
411 let mut this = Self {
412 worktrees: Vec::new(),
413 loading_buffers: Default::default(),
414 opened_buffer: (Rc::new(RefCell::new(opened_buffer_tx)), opened_buffer_rx),
415 shared_buffers: Default::default(),
416 loading_local_worktrees: Default::default(),
417 active_entry: None,
418 collaborators: Default::default(),
419 languages,
420 user_store: user_store.clone(),
421 fs,
422 next_entry_id: Default::default(),
423 subscriptions: vec![client.add_model_for_remote_entity(remote_id, cx)],
424 client: client.clone(),
425 client_state: ProjectClientState::Remote {
426 sharing_has_stopped: false,
427 remote_id,
428 replica_id,
429 _detect_unshare_task: cx.spawn_weak(move |this, mut cx| {
430 async move {
431 let mut status = client.status();
432 let is_connected =
433 status.next().await.map_or(false, |s| s.is_connected());
434 // Even if we're initially connected, any future change of the status means we momentarily disconnected.
435 if !is_connected || status.next().await.is_some() {
436 if let Some(this) = this.upgrade(&cx) {
437 this.update(&mut cx, |this, cx| this.removed_from_project(cx))
438 }
439 }
440 Ok(())
441 }
442 .log_err()
443 }),
444 },
445 language_servers: Default::default(),
446 started_language_servers: Default::default(),
447 language_server_settings: Default::default(),
448 language_server_statuses: response
449 .language_servers
450 .into_iter()
451 .map(|server| {
452 (
453 server.id as usize,
454 LanguageServerStatus {
455 name: server.name,
456 pending_work: Default::default(),
457 pending_diagnostic_updates: 0,
458 },
459 )
460 })
461 .collect(),
462 last_workspace_edits_by_language_server: Default::default(),
463 next_language_server_id: 0,
464 opened_buffers: Default::default(),
465 buffer_snapshots: Default::default(),
466 nonce: StdRng::from_entropy().gen(),
467 };
468 for worktree in worktrees {
469 this.add_worktree(&worktree, cx);
470 }
471 this
472 });
473
474 let user_ids = response
475 .collaborators
476 .iter()
477 .map(|peer| peer.user_id)
478 .collect();
479 user_store
480 .update(cx, |user_store, cx| user_store.get_users(user_ids, cx))
481 .await?;
482 let mut collaborators = HashMap::default();
483 for message in response.collaborators {
484 let collaborator = Collaborator::from_proto(message, &user_store, cx).await?;
485 collaborators.insert(collaborator.peer_id, collaborator);
486 }
487
488 this.update(cx, |this, _| {
489 this.collaborators = collaborators;
490 });
491
492 Ok(this)
493 }
494
495 #[cfg(any(test, feature = "test-support"))]
496 pub async fn test(
497 fs: Arc<dyn Fs>,
498 root_paths: impl IntoIterator<Item = impl AsRef<Path>>,
499 cx: &mut gpui::TestAppContext,
500 ) -> ModelHandle<Project> {
501 let languages = Arc::new(LanguageRegistry::test());
502 let http_client = client::test::FakeHttpClient::with_404_response();
503 let client = client::Client::new(http_client.clone());
504 let user_store = cx.add_model(|cx| UserStore::new(client.clone(), http_client, cx));
505 let project = cx.update(|cx| Project::local(client, user_store, languages, fs, cx));
506 for path in root_paths {
507 let (tree, _) = project
508 .update(cx, |project, cx| {
509 project.find_or_create_local_worktree(path, true, cx)
510 })
511 .await
512 .unwrap();
513 tree.read_with(cx, |tree, _| tree.as_local().unwrap().scan_complete())
514 .await;
515 }
516 project
517 }
518
519 pub fn buffer_for_id(&self, remote_id: u64, cx: &AppContext) -> Option<ModelHandle<Buffer>> {
520 self.opened_buffers
521 .get(&remote_id)
522 .and_then(|buffer| buffer.upgrade(cx))
523 }
524
525 pub fn languages(&self) -> &Arc<LanguageRegistry> {
526 &self.languages
527 }
528
529 #[cfg(any(test, feature = "test-support"))]
530 pub fn check_invariants(&self, cx: &AppContext) {
531 if self.is_local() {
532 let mut worktree_root_paths = HashMap::default();
533 for worktree in self.worktrees(cx) {
534 let worktree = worktree.read(cx);
535 let abs_path = worktree.as_local().unwrap().abs_path().clone();
536 let prev_worktree_id = worktree_root_paths.insert(abs_path.clone(), worktree.id());
537 assert_eq!(
538 prev_worktree_id,
539 None,
540 "abs path {:?} for worktree {:?} is not unique ({:?} was already registered with the same path)",
541 abs_path,
542 worktree.id(),
543 prev_worktree_id
544 )
545 }
546 } else {
547 let replica_id = self.replica_id();
548 for buffer in self.opened_buffers.values() {
549 if let Some(buffer) = buffer.upgrade(cx) {
550 let buffer = buffer.read(cx);
551 assert_eq!(
552 buffer.deferred_ops_len(),
553 0,
554 "replica {}, buffer {} has deferred operations",
555 replica_id,
556 buffer.remote_id()
557 );
558 }
559 }
560 }
561 }
562
563 #[cfg(any(test, feature = "test-support"))]
564 pub fn has_open_buffer(&self, path: impl Into<ProjectPath>, cx: &AppContext) -> bool {
565 let path = path.into();
566 if let Some(worktree) = self.worktree_for_id(path.worktree_id, cx) {
567 self.opened_buffers.iter().any(|(_, buffer)| {
568 if let Some(buffer) = buffer.upgrade(cx) {
569 if let Some(file) = File::from_dyn(buffer.read(cx).file()) {
570 if file.worktree == worktree && file.path() == &path.path {
571 return true;
572 }
573 }
574 }
575 false
576 })
577 } else {
578 false
579 }
580 }
581
582 pub fn fs(&self) -> &Arc<dyn Fs> {
583 &self.fs
584 }
585
586 fn unregister(&mut self, cx: &mut ModelContext<Self>) {
587 self.unshared(cx);
588 for worktree in &self.worktrees {
589 if let Some(worktree) = worktree.upgrade(cx) {
590 worktree.update(cx, |worktree, _| {
591 worktree.as_local_mut().unwrap().unregister();
592 });
593 }
594 }
595
596 if let ProjectClientState::Local { remote_id_tx, .. } = &mut self.client_state {
597 *remote_id_tx.borrow_mut() = None;
598 }
599
600 self.subscriptions.clear();
601 }
602
603 fn register(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
604 self.unregister(cx);
605
606 let response = self.client.request(proto::RegisterProject {});
607 cx.spawn(|this, mut cx| async move {
608 let remote_id = response.await?.project_id;
609
610 let mut registrations = Vec::new();
611 this.update(&mut cx, |this, cx| {
612 if let ProjectClientState::Local { remote_id_tx, .. } = &mut this.client_state {
613 *remote_id_tx.borrow_mut() = Some(remote_id);
614 }
615
616 cx.emit(Event::RemoteIdChanged(Some(remote_id)));
617
618 this.subscriptions
619 .push(this.client.add_model_for_remote_entity(remote_id, cx));
620
621 for worktree in &this.worktrees {
622 if let Some(worktree) = worktree.upgrade(cx) {
623 registrations.push(worktree.update(cx, |worktree, cx| {
624 let worktree = worktree.as_local_mut().unwrap();
625 worktree.register(remote_id, cx)
626 }));
627 }
628 }
629 });
630
631 futures::future::try_join_all(registrations).await?;
632 Ok(())
633 })
634 }
635
636 pub fn remote_id(&self) -> Option<u64> {
637 match &self.client_state {
638 ProjectClientState::Local { remote_id_rx, .. } => *remote_id_rx.borrow(),
639 ProjectClientState::Remote { remote_id, .. } => Some(*remote_id),
640 }
641 }
642
643 pub fn next_remote_id(&self) -> impl Future<Output = u64> {
644 let mut id = None;
645 let mut watch = None;
646 match &self.client_state {
647 ProjectClientState::Local { remote_id_rx, .. } => watch = Some(remote_id_rx.clone()),
648 ProjectClientState::Remote { remote_id, .. } => id = Some(*remote_id),
649 }
650
651 async move {
652 if let Some(id) = id {
653 return id;
654 }
655 let mut watch = watch.unwrap();
656 loop {
657 let id = *watch.borrow();
658 if let Some(id) = id {
659 return id;
660 }
661 watch.next().await;
662 }
663 }
664 }
665
666 pub fn replica_id(&self) -> ReplicaId {
667 match &self.client_state {
668 ProjectClientState::Local { .. } => 0,
669 ProjectClientState::Remote { replica_id, .. } => *replica_id,
670 }
671 }
672
673 pub fn collaborators(&self) -> &HashMap<PeerId, Collaborator> {
674 &self.collaborators
675 }
676
677 pub fn worktrees<'a>(
678 &'a self,
679 cx: &'a AppContext,
680 ) -> impl 'a + Iterator<Item = ModelHandle<Worktree>> {
681 self.worktrees
682 .iter()
683 .filter_map(move |worktree| worktree.upgrade(cx))
684 }
685
686 pub fn visible_worktrees<'a>(
687 &'a self,
688 cx: &'a AppContext,
689 ) -> impl 'a + Iterator<Item = ModelHandle<Worktree>> {
690 self.worktrees.iter().filter_map(|worktree| {
691 worktree.upgrade(cx).and_then(|worktree| {
692 if worktree.read(cx).is_visible() {
693 Some(worktree)
694 } else {
695 None
696 }
697 })
698 })
699 }
700
701 pub fn worktree_for_id(
702 &self,
703 id: WorktreeId,
704 cx: &AppContext,
705 ) -> Option<ModelHandle<Worktree>> {
706 self.worktrees(cx)
707 .find(|worktree| worktree.read(cx).id() == id)
708 }
709
710 pub fn worktree_for_entry(
711 &self,
712 entry_id: ProjectEntryId,
713 cx: &AppContext,
714 ) -> Option<ModelHandle<Worktree>> {
715 self.worktrees(cx)
716 .find(|worktree| worktree.read(cx).contains_entry(entry_id))
717 }
718
719 pub fn worktree_id_for_entry(
720 &self,
721 entry_id: ProjectEntryId,
722 cx: &AppContext,
723 ) -> Option<WorktreeId> {
724 self.worktree_for_entry(entry_id, cx)
725 .map(|worktree| worktree.read(cx).id())
726 }
727
728 pub fn create_entry(
729 &mut self,
730 project_path: impl Into<ProjectPath>,
731 is_directory: bool,
732 cx: &mut ModelContext<Self>,
733 ) -> Option<Task<Result<Entry>>> {
734 let project_path = project_path.into();
735 let worktree = self.worktree_for_id(project_path.worktree_id, cx)?;
736 if self.is_local() {
737 Some(worktree.update(cx, |worktree, cx| {
738 worktree
739 .as_local_mut()
740 .unwrap()
741 .create_entry(project_path.path, is_directory, cx)
742 }))
743 } else {
744 let client = self.client.clone();
745 let project_id = self.remote_id().unwrap();
746 Some(cx.spawn_weak(|_, mut cx| async move {
747 let response = client
748 .request(proto::CreateProjectEntry {
749 worktree_id: project_path.worktree_id.to_proto(),
750 project_id,
751 path: project_path.path.as_os_str().as_bytes().to_vec(),
752 is_directory,
753 })
754 .await?;
755 let entry = response
756 .entry
757 .ok_or_else(|| anyhow!("missing entry in response"))?;
758 worktree
759 .update(&mut cx, |worktree, cx| {
760 worktree.as_remote().unwrap().insert_entry(
761 entry,
762 response.worktree_scan_id as usize,
763 cx,
764 )
765 })
766 .await
767 }))
768 }
769 }
770
771 pub fn rename_entry(
772 &mut self,
773 entry_id: ProjectEntryId,
774 new_path: impl Into<Arc<Path>>,
775 cx: &mut ModelContext<Self>,
776 ) -> Option<Task<Result<Entry>>> {
777 let worktree = self.worktree_for_entry(entry_id, cx)?;
778 let new_path = new_path.into();
779 if self.is_local() {
780 worktree.update(cx, |worktree, cx| {
781 worktree
782 .as_local_mut()
783 .unwrap()
784 .rename_entry(entry_id, new_path, cx)
785 })
786 } else {
787 let client = self.client.clone();
788 let project_id = self.remote_id().unwrap();
789
790 Some(cx.spawn_weak(|_, mut cx| async move {
791 let response = client
792 .request(proto::RenameProjectEntry {
793 project_id,
794 entry_id: entry_id.to_proto(),
795 new_path: new_path.as_os_str().as_bytes().to_vec(),
796 })
797 .await?;
798 let entry = response
799 .entry
800 .ok_or_else(|| anyhow!("missing entry in response"))?;
801 worktree
802 .update(&mut cx, |worktree, cx| {
803 worktree.as_remote().unwrap().insert_entry(
804 entry,
805 response.worktree_scan_id as usize,
806 cx,
807 )
808 })
809 .await
810 }))
811 }
812 }
813
814 pub fn delete_entry(
815 &mut self,
816 entry_id: ProjectEntryId,
817 cx: &mut ModelContext<Self>,
818 ) -> Option<Task<Result<()>>> {
819 let worktree = self.worktree_for_entry(entry_id, cx)?;
820 if self.is_local() {
821 worktree.update(cx, |worktree, cx| {
822 worktree.as_local_mut().unwrap().delete_entry(entry_id, cx)
823 })
824 } else {
825 let client = self.client.clone();
826 let project_id = self.remote_id().unwrap();
827 Some(cx.spawn_weak(|_, mut cx| async move {
828 let response = client
829 .request(proto::DeleteProjectEntry {
830 project_id,
831 entry_id: entry_id.to_proto(),
832 })
833 .await?;
834 worktree
835 .update(&mut cx, move |worktree, cx| {
836 worktree.as_remote().unwrap().delete_entry(
837 entry_id,
838 response.worktree_scan_id as usize,
839 cx,
840 )
841 })
842 .await
843 }))
844 }
845 }
846
847 fn share(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
848 let project_id;
849 if let ProjectClientState::Local {
850 remote_id_rx,
851 is_shared,
852 ..
853 } = &mut self.client_state
854 {
855 if *is_shared {
856 return Task::ready(Ok(()));
857 }
858 *is_shared = true;
859 if let Some(id) = *remote_id_rx.borrow() {
860 project_id = id;
861 } else {
862 return Task::ready(Err(anyhow!("project hasn't been registered")));
863 }
864 } else {
865 return Task::ready(Err(anyhow!("can't share a remote project")));
866 };
867
868 for open_buffer in self.opened_buffers.values_mut() {
869 match open_buffer {
870 OpenBuffer::Strong(_) => {}
871 OpenBuffer::Weak(buffer) => {
872 if let Some(buffer) = buffer.upgrade(cx) {
873 *open_buffer = OpenBuffer::Strong(buffer);
874 }
875 }
876 OpenBuffer::Loading(_) => unreachable!(),
877 }
878 }
879
880 for worktree_handle in self.worktrees.iter_mut() {
881 match worktree_handle {
882 WorktreeHandle::Strong(_) => {}
883 WorktreeHandle::Weak(worktree) => {
884 if let Some(worktree) = worktree.upgrade(cx) {
885 *worktree_handle = WorktreeHandle::Strong(worktree);
886 }
887 }
888 }
889 }
890
891 let mut tasks = Vec::new();
892 for worktree in self.worktrees(cx).collect::<Vec<_>>() {
893 worktree.update(cx, |worktree, cx| {
894 let worktree = worktree.as_local_mut().unwrap();
895 tasks.push(worktree.share(project_id, cx));
896 });
897 }
898
899 cx.spawn(|this, mut cx| async move {
900 for task in tasks {
901 task.await?;
902 }
903 this.update(&mut cx, |_, cx| cx.notify());
904 Ok(())
905 })
906 }
907
908 fn unshared(&mut self, cx: &mut ModelContext<Self>) {
909 if let ProjectClientState::Local { is_shared, .. } = &mut self.client_state {
910 if !*is_shared {
911 return;
912 }
913
914 *is_shared = false;
915 self.collaborators.clear();
916 self.shared_buffers.clear();
917 for worktree_handle in self.worktrees.iter_mut() {
918 if let WorktreeHandle::Strong(worktree) = worktree_handle {
919 let is_visible = worktree.update(cx, |worktree, _| {
920 worktree.as_local_mut().unwrap().unshare();
921 worktree.is_visible()
922 });
923 if !is_visible {
924 *worktree_handle = WorktreeHandle::Weak(worktree.downgrade());
925 }
926 }
927 }
928
929 for open_buffer in self.opened_buffers.values_mut() {
930 match open_buffer {
931 OpenBuffer::Strong(buffer) => {
932 *open_buffer = OpenBuffer::Weak(buffer.downgrade());
933 }
934 _ => {}
935 }
936 }
937
938 cx.notify();
939 } else {
940 log::error!("attempted to unshare a remote project");
941 }
942 }
943
944 pub fn respond_to_join_request(
945 &mut self,
946 requester_id: u64,
947 allow: bool,
948 cx: &mut ModelContext<Self>,
949 ) {
950 if let Some(project_id) = self.remote_id() {
951 let share = self.share(cx);
952 let client = self.client.clone();
953 cx.foreground()
954 .spawn(async move {
955 share.await?;
956 client.send(proto::RespondToJoinProjectRequest {
957 requester_id,
958 project_id,
959 allow,
960 })
961 })
962 .detach_and_log_err(cx);
963 }
964 }
965
966 fn removed_from_project(&mut self, cx: &mut ModelContext<Self>) {
967 if let ProjectClientState::Remote {
968 sharing_has_stopped,
969 ..
970 } = &mut self.client_state
971 {
972 *sharing_has_stopped = true;
973 self.collaborators.clear();
974 cx.notify();
975 }
976 }
977
978 pub fn is_read_only(&self) -> bool {
979 match &self.client_state {
980 ProjectClientState::Local { .. } => false,
981 ProjectClientState::Remote {
982 sharing_has_stopped,
983 ..
984 } => *sharing_has_stopped,
985 }
986 }
987
988 pub fn is_local(&self) -> bool {
989 match &self.client_state {
990 ProjectClientState::Local { .. } => true,
991 ProjectClientState::Remote { .. } => false,
992 }
993 }
994
995 pub fn is_remote(&self) -> bool {
996 !self.is_local()
997 }
998
999 pub fn create_buffer(
1000 &mut self,
1001 text: &str,
1002 language: Option<Arc<Language>>,
1003 cx: &mut ModelContext<Self>,
1004 ) -> Result<ModelHandle<Buffer>> {
1005 if self.is_remote() {
1006 return Err(anyhow!("creating buffers as a guest is not supported yet"));
1007 }
1008
1009 let buffer = cx.add_model(|cx| {
1010 Buffer::new(self.replica_id(), text, cx)
1011 .with_language(language.unwrap_or(language::PLAIN_TEXT.clone()), cx)
1012 });
1013 self.register_buffer(&buffer, cx)?;
1014 Ok(buffer)
1015 }
1016
1017 pub fn open_path(
1018 &mut self,
1019 path: impl Into<ProjectPath>,
1020 cx: &mut ModelContext<Self>,
1021 ) -> Task<Result<(ProjectEntryId, AnyModelHandle)>> {
1022 let task = self.open_buffer(path, cx);
1023 cx.spawn_weak(|_, cx| async move {
1024 let buffer = task.await?;
1025 let project_entry_id = buffer
1026 .read_with(&cx, |buffer, cx| {
1027 File::from_dyn(buffer.file()).and_then(|file| file.project_entry_id(cx))
1028 })
1029 .ok_or_else(|| anyhow!("no project entry"))?;
1030 Ok((project_entry_id, buffer.into()))
1031 })
1032 }
1033
1034 pub fn open_local_buffer(
1035 &mut self,
1036 abs_path: impl AsRef<Path>,
1037 cx: &mut ModelContext<Self>,
1038 ) -> Task<Result<ModelHandle<Buffer>>> {
1039 if let Some((worktree, relative_path)) = self.find_local_worktree(abs_path.as_ref(), cx) {
1040 self.open_buffer((worktree.read(cx).id(), relative_path), cx)
1041 } else {
1042 Task::ready(Err(anyhow!("no such path")))
1043 }
1044 }
1045
1046 pub fn open_buffer(
1047 &mut self,
1048 path: impl Into<ProjectPath>,
1049 cx: &mut ModelContext<Self>,
1050 ) -> Task<Result<ModelHandle<Buffer>>> {
1051 let project_path = path.into();
1052 let worktree = if let Some(worktree) = self.worktree_for_id(project_path.worktree_id, cx) {
1053 worktree
1054 } else {
1055 return Task::ready(Err(anyhow!("no such worktree")));
1056 };
1057
1058 // If there is already a buffer for the given path, then return it.
1059 let existing_buffer = self.get_open_buffer(&project_path, cx);
1060 if let Some(existing_buffer) = existing_buffer {
1061 return Task::ready(Ok(existing_buffer));
1062 }
1063
1064 let mut loading_watch = match self.loading_buffers.entry(project_path.clone()) {
1065 // If the given path is already being loaded, then wait for that existing
1066 // task to complete and return the same buffer.
1067 hash_map::Entry::Occupied(e) => e.get().clone(),
1068
1069 // Otherwise, record the fact that this path is now being loaded.
1070 hash_map::Entry::Vacant(entry) => {
1071 let (mut tx, rx) = postage::watch::channel();
1072 entry.insert(rx.clone());
1073
1074 let load_buffer = if worktree.read(cx).is_local() {
1075 self.open_local_buffer_internal(&project_path.path, &worktree, cx)
1076 } else {
1077 self.open_remote_buffer_internal(&project_path.path, &worktree, cx)
1078 };
1079
1080 cx.spawn(move |this, mut cx| async move {
1081 let load_result = load_buffer.await;
1082 *tx.borrow_mut() = Some(this.update(&mut cx, |this, _| {
1083 // Record the fact that the buffer is no longer loading.
1084 this.loading_buffers.remove(&project_path);
1085 let buffer = load_result.map_err(Arc::new)?;
1086 Ok(buffer)
1087 }));
1088 })
1089 .detach();
1090 rx
1091 }
1092 };
1093
1094 cx.foreground().spawn(async move {
1095 loop {
1096 if let Some(result) = loading_watch.borrow().as_ref() {
1097 match result {
1098 Ok(buffer) => return Ok(buffer.clone()),
1099 Err(error) => return Err(anyhow!("{}", error)),
1100 }
1101 }
1102 loading_watch.next().await;
1103 }
1104 })
1105 }
1106
1107 fn open_local_buffer_internal(
1108 &mut self,
1109 path: &Arc<Path>,
1110 worktree: &ModelHandle<Worktree>,
1111 cx: &mut ModelContext<Self>,
1112 ) -> Task<Result<ModelHandle<Buffer>>> {
1113 let load_buffer = worktree.update(cx, |worktree, cx| {
1114 let worktree = worktree.as_local_mut().unwrap();
1115 worktree.load_buffer(path, cx)
1116 });
1117 cx.spawn(|this, mut cx| async move {
1118 let buffer = load_buffer.await?;
1119 this.update(&mut cx, |this, cx| this.register_buffer(&buffer, cx))?;
1120 Ok(buffer)
1121 })
1122 }
1123
1124 fn open_remote_buffer_internal(
1125 &mut self,
1126 path: &Arc<Path>,
1127 worktree: &ModelHandle<Worktree>,
1128 cx: &mut ModelContext<Self>,
1129 ) -> Task<Result<ModelHandle<Buffer>>> {
1130 let rpc = self.client.clone();
1131 let project_id = self.remote_id().unwrap();
1132 let remote_worktree_id = worktree.read(cx).id();
1133 let path = path.clone();
1134 let path_string = path.to_string_lossy().to_string();
1135 cx.spawn(|this, mut cx| async move {
1136 let response = rpc
1137 .request(proto::OpenBufferByPath {
1138 project_id,
1139 worktree_id: remote_worktree_id.to_proto(),
1140 path: path_string,
1141 })
1142 .await?;
1143 let buffer = response.buffer.ok_or_else(|| anyhow!("missing buffer"))?;
1144 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
1145 .await
1146 })
1147 }
1148
1149 fn open_local_buffer_via_lsp(
1150 &mut self,
1151 abs_path: lsp::Url,
1152 lsp_adapter: Arc<dyn LspAdapter>,
1153 lsp_server: Arc<LanguageServer>,
1154 cx: &mut ModelContext<Self>,
1155 ) -> Task<Result<ModelHandle<Buffer>>> {
1156 cx.spawn(|this, mut cx| async move {
1157 let abs_path = abs_path
1158 .to_file_path()
1159 .map_err(|_| anyhow!("can't convert URI to path"))?;
1160 let (worktree, relative_path) = if let Some(result) =
1161 this.read_with(&cx, |this, cx| this.find_local_worktree(&abs_path, cx))
1162 {
1163 result
1164 } else {
1165 let worktree = this
1166 .update(&mut cx, |this, cx| {
1167 this.create_local_worktree(&abs_path, false, cx)
1168 })
1169 .await?;
1170 this.update(&mut cx, |this, cx| {
1171 this.language_servers.insert(
1172 (worktree.read(cx).id(), lsp_adapter.name()),
1173 (lsp_adapter, lsp_server),
1174 );
1175 });
1176 (worktree, PathBuf::new())
1177 };
1178
1179 let project_path = ProjectPath {
1180 worktree_id: worktree.read_with(&cx, |worktree, _| worktree.id()),
1181 path: relative_path.into(),
1182 };
1183 this.update(&mut cx, |this, cx| this.open_buffer(project_path, cx))
1184 .await
1185 })
1186 }
1187
1188 pub fn open_buffer_by_id(
1189 &mut self,
1190 id: u64,
1191 cx: &mut ModelContext<Self>,
1192 ) -> Task<Result<ModelHandle<Buffer>>> {
1193 if let Some(buffer) = self.buffer_for_id(id, cx) {
1194 Task::ready(Ok(buffer))
1195 } else if self.is_local() {
1196 Task::ready(Err(anyhow!("buffer {} does not exist", id)))
1197 } else if let Some(project_id) = self.remote_id() {
1198 let request = self
1199 .client
1200 .request(proto::OpenBufferById { project_id, id });
1201 cx.spawn(|this, mut cx| async move {
1202 let buffer = request
1203 .await?
1204 .buffer
1205 .ok_or_else(|| anyhow!("invalid buffer"))?;
1206 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
1207 .await
1208 })
1209 } else {
1210 Task::ready(Err(anyhow!("cannot open buffer while disconnected")))
1211 }
1212 }
1213
1214 pub fn save_buffer_as(
1215 &mut self,
1216 buffer: ModelHandle<Buffer>,
1217 abs_path: PathBuf,
1218 cx: &mut ModelContext<Project>,
1219 ) -> Task<Result<()>> {
1220 let worktree_task = self.find_or_create_local_worktree(&abs_path, true, cx);
1221 let old_path =
1222 File::from_dyn(buffer.read(cx).file()).and_then(|f| Some(f.as_local()?.abs_path(cx)));
1223 cx.spawn(|this, mut cx| async move {
1224 if let Some(old_path) = old_path {
1225 this.update(&mut cx, |this, cx| {
1226 this.unregister_buffer_from_language_server(&buffer, old_path, cx);
1227 });
1228 }
1229 let (worktree, path) = worktree_task.await?;
1230 worktree
1231 .update(&mut cx, |worktree, cx| {
1232 worktree
1233 .as_local_mut()
1234 .unwrap()
1235 .save_buffer_as(buffer.clone(), path, cx)
1236 })
1237 .await?;
1238 this.update(&mut cx, |this, cx| {
1239 this.assign_language_to_buffer(&buffer, cx);
1240 this.register_buffer_with_language_server(&buffer, cx);
1241 });
1242 Ok(())
1243 })
1244 }
1245
1246 pub fn get_open_buffer(
1247 &mut self,
1248 path: &ProjectPath,
1249 cx: &mut ModelContext<Self>,
1250 ) -> Option<ModelHandle<Buffer>> {
1251 let worktree = self.worktree_for_id(path.worktree_id, cx)?;
1252 self.opened_buffers.values().find_map(|buffer| {
1253 let buffer = buffer.upgrade(cx)?;
1254 let file = File::from_dyn(buffer.read(cx).file())?;
1255 if file.worktree == worktree && file.path() == &path.path {
1256 Some(buffer)
1257 } else {
1258 None
1259 }
1260 })
1261 }
1262
1263 fn register_buffer(
1264 &mut self,
1265 buffer: &ModelHandle<Buffer>,
1266 cx: &mut ModelContext<Self>,
1267 ) -> Result<()> {
1268 let remote_id = buffer.read(cx).remote_id();
1269 let open_buffer = if self.is_remote() || self.is_shared() {
1270 OpenBuffer::Strong(buffer.clone())
1271 } else {
1272 OpenBuffer::Weak(buffer.downgrade())
1273 };
1274
1275 match self.opened_buffers.insert(remote_id, open_buffer) {
1276 None => {}
1277 Some(OpenBuffer::Loading(operations)) => {
1278 buffer.update(cx, |buffer, cx| buffer.apply_ops(operations, cx))?
1279 }
1280 Some(OpenBuffer::Weak(existing_handle)) => {
1281 if existing_handle.upgrade(cx).is_some() {
1282 Err(anyhow!(
1283 "already registered buffer with remote id {}",
1284 remote_id
1285 ))?
1286 }
1287 }
1288 Some(OpenBuffer::Strong(_)) => Err(anyhow!(
1289 "already registered buffer with remote id {}",
1290 remote_id
1291 ))?,
1292 }
1293 cx.subscribe(buffer, |this, buffer, event, cx| {
1294 this.on_buffer_event(buffer, event, cx);
1295 })
1296 .detach();
1297
1298 self.assign_language_to_buffer(buffer, cx);
1299 self.register_buffer_with_language_server(buffer, cx);
1300 cx.observe_release(buffer, |this, buffer, cx| {
1301 if let Some(file) = File::from_dyn(buffer.file()) {
1302 if file.is_local() {
1303 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
1304 if let Some((_, server)) = this.language_server_for_buffer(buffer, cx) {
1305 server
1306 .notify::<lsp::notification::DidCloseTextDocument>(
1307 lsp::DidCloseTextDocumentParams {
1308 text_document: lsp::TextDocumentIdentifier::new(uri.clone()),
1309 },
1310 )
1311 .log_err();
1312 }
1313 }
1314 }
1315 })
1316 .detach();
1317
1318 Ok(())
1319 }
1320
1321 fn register_buffer_with_language_server(
1322 &mut self,
1323 buffer_handle: &ModelHandle<Buffer>,
1324 cx: &mut ModelContext<Self>,
1325 ) {
1326 let buffer = buffer_handle.read(cx);
1327 let buffer_id = buffer.remote_id();
1328 if let Some(file) = File::from_dyn(buffer.file()) {
1329 if file.is_local() {
1330 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
1331 let initial_snapshot = buffer.text_snapshot();
1332
1333 let mut language_server = None;
1334 let mut language_id = None;
1335 if let Some(language) = buffer.language() {
1336 let worktree_id = file.worktree_id(cx);
1337 if let Some(adapter) = language.lsp_adapter() {
1338 language_id = adapter.id_for_language(language.name().as_ref());
1339 language_server = self
1340 .language_servers
1341 .get(&(worktree_id, adapter.name()))
1342 .cloned();
1343 }
1344 }
1345
1346 if let Some(local_worktree) = file.worktree.read(cx).as_local() {
1347 if let Some(diagnostics) = local_worktree.diagnostics_for_path(file.path()) {
1348 self.update_buffer_diagnostics(&buffer_handle, diagnostics, None, cx)
1349 .log_err();
1350 }
1351 }
1352
1353 if let Some((_, server)) = language_server {
1354 server
1355 .notify::<lsp::notification::DidOpenTextDocument>(
1356 lsp::DidOpenTextDocumentParams {
1357 text_document: lsp::TextDocumentItem::new(
1358 uri,
1359 language_id.unwrap_or_default(),
1360 0,
1361 initial_snapshot.text(),
1362 ),
1363 }
1364 .clone(),
1365 )
1366 .log_err();
1367 buffer_handle.update(cx, |buffer, cx| {
1368 buffer.set_completion_triggers(
1369 server
1370 .capabilities()
1371 .completion_provider
1372 .as_ref()
1373 .and_then(|provider| provider.trigger_characters.clone())
1374 .unwrap_or(Vec::new()),
1375 cx,
1376 )
1377 });
1378 self.buffer_snapshots
1379 .insert(buffer_id, vec![(0, initial_snapshot)]);
1380 }
1381 }
1382 }
1383 }
1384
1385 fn unregister_buffer_from_language_server(
1386 &mut self,
1387 buffer: &ModelHandle<Buffer>,
1388 old_path: PathBuf,
1389 cx: &mut ModelContext<Self>,
1390 ) {
1391 buffer.update(cx, |buffer, cx| {
1392 buffer.update_diagnostics(Default::default(), cx);
1393 self.buffer_snapshots.remove(&buffer.remote_id());
1394 if let Some((_, language_server)) = self.language_server_for_buffer(buffer, cx) {
1395 language_server
1396 .notify::<lsp::notification::DidCloseTextDocument>(
1397 lsp::DidCloseTextDocumentParams {
1398 text_document: lsp::TextDocumentIdentifier::new(
1399 lsp::Url::from_file_path(old_path).unwrap(),
1400 ),
1401 },
1402 )
1403 .log_err();
1404 }
1405 });
1406 }
1407
1408 fn on_buffer_event(
1409 &mut self,
1410 buffer: ModelHandle<Buffer>,
1411 event: &BufferEvent,
1412 cx: &mut ModelContext<Self>,
1413 ) -> Option<()> {
1414 match event {
1415 BufferEvent::Operation(operation) => {
1416 let project_id = self.remote_id()?;
1417 let request = self.client.request(proto::UpdateBuffer {
1418 project_id,
1419 buffer_id: buffer.read(cx).remote_id(),
1420 operations: vec![language::proto::serialize_operation(&operation)],
1421 });
1422 cx.background().spawn(request).detach_and_log_err(cx);
1423 }
1424 BufferEvent::Edited { .. } => {
1425 let (_, language_server) = self
1426 .language_server_for_buffer(buffer.read(cx), cx)?
1427 .clone();
1428 let buffer = buffer.read(cx);
1429 let file = File::from_dyn(buffer.file())?;
1430 let abs_path = file.as_local()?.abs_path(cx);
1431 let uri = lsp::Url::from_file_path(abs_path).unwrap();
1432 let buffer_snapshots = self.buffer_snapshots.get_mut(&buffer.remote_id())?;
1433 let (version, prev_snapshot) = buffer_snapshots.last()?;
1434 let next_snapshot = buffer.text_snapshot();
1435 let next_version = version + 1;
1436
1437 let content_changes = buffer
1438 .edits_since::<(PointUtf16, usize)>(prev_snapshot.version())
1439 .map(|edit| {
1440 let edit_start = edit.new.start.0;
1441 let edit_end = edit_start + (edit.old.end.0 - edit.old.start.0);
1442 let new_text = next_snapshot
1443 .text_for_range(edit.new.start.1..edit.new.end.1)
1444 .collect();
1445 lsp::TextDocumentContentChangeEvent {
1446 range: Some(lsp::Range::new(
1447 point_to_lsp(edit_start),
1448 point_to_lsp(edit_end),
1449 )),
1450 range_length: None,
1451 text: new_text,
1452 }
1453 })
1454 .collect();
1455
1456 buffer_snapshots.push((next_version, next_snapshot));
1457
1458 language_server
1459 .notify::<lsp::notification::DidChangeTextDocument>(
1460 lsp::DidChangeTextDocumentParams {
1461 text_document: lsp::VersionedTextDocumentIdentifier::new(
1462 uri,
1463 next_version,
1464 ),
1465 content_changes,
1466 },
1467 )
1468 .log_err();
1469 }
1470 BufferEvent::Saved => {
1471 let file = File::from_dyn(buffer.read(cx).file())?;
1472 let worktree_id = file.worktree_id(cx);
1473 let abs_path = file.as_local()?.abs_path(cx);
1474 let text_document = lsp::TextDocumentIdentifier {
1475 uri: lsp::Url::from_file_path(abs_path).unwrap(),
1476 };
1477
1478 for (_, server) in self.language_servers_for_worktree(worktree_id) {
1479 server
1480 .notify::<lsp::notification::DidSaveTextDocument>(
1481 lsp::DidSaveTextDocumentParams {
1482 text_document: text_document.clone(),
1483 text: None,
1484 },
1485 )
1486 .log_err();
1487 }
1488 }
1489 _ => {}
1490 }
1491
1492 None
1493 }
1494
1495 fn language_servers_for_worktree(
1496 &self,
1497 worktree_id: WorktreeId,
1498 ) -> impl Iterator<Item = &(Arc<dyn LspAdapter>, Arc<LanguageServer>)> {
1499 self.language_servers.iter().filter_map(
1500 move |((language_server_worktree_id, _), server)| {
1501 if *language_server_worktree_id == worktree_id {
1502 Some(server)
1503 } else {
1504 None
1505 }
1506 },
1507 )
1508 }
1509
1510 fn assign_language_to_buffer(
1511 &mut self,
1512 buffer: &ModelHandle<Buffer>,
1513 cx: &mut ModelContext<Self>,
1514 ) -> Option<()> {
1515 // If the buffer has a language, set it and start the language server if we haven't already.
1516 let full_path = buffer.read(cx).file()?.full_path(cx);
1517 let language = self.languages.select_language(&full_path)?;
1518 buffer.update(cx, |buffer, cx| {
1519 buffer.set_language(Some(language.clone()), cx);
1520 });
1521
1522 let file = File::from_dyn(buffer.read(cx).file())?;
1523 let worktree = file.worktree.read(cx).as_local()?;
1524 let worktree_id = worktree.id();
1525 let worktree_abs_path = worktree.abs_path().clone();
1526 self.start_language_server(worktree_id, worktree_abs_path, language, cx);
1527
1528 None
1529 }
1530
1531 fn start_language_server(
1532 &mut self,
1533 worktree_id: WorktreeId,
1534 worktree_path: Arc<Path>,
1535 language: Arc<Language>,
1536 cx: &mut ModelContext<Self>,
1537 ) {
1538 let adapter = if let Some(adapter) = language.lsp_adapter() {
1539 adapter
1540 } else {
1541 return;
1542 };
1543 let key = (worktree_id, adapter.name());
1544 self.started_language_servers
1545 .entry(key.clone())
1546 .or_insert_with(|| {
1547 let server_id = post_inc(&mut self.next_language_server_id);
1548 let language_server = self.languages.start_language_server(
1549 server_id,
1550 language.clone(),
1551 worktree_path,
1552 self.client.http_client(),
1553 cx,
1554 );
1555 cx.spawn_weak(|this, mut cx| async move {
1556 let language_server = language_server?.await.log_err()?;
1557 let language_server = language_server
1558 .initialize(adapter.initialization_options())
1559 .await
1560 .log_err()?;
1561 let this = this.upgrade(&cx)?;
1562 let disk_based_diagnostics_progress_token =
1563 adapter.disk_based_diagnostics_progress_token();
1564
1565 language_server
1566 .on_notification::<lsp::notification::PublishDiagnostics, _>({
1567 let this = this.downgrade();
1568 let adapter = adapter.clone();
1569 move |params, mut cx| {
1570 if let Some(this) = this.upgrade(&cx) {
1571 this.update(&mut cx, |this, cx| {
1572 this.on_lsp_diagnostics_published(
1573 server_id,
1574 params,
1575 &adapter,
1576 disk_based_diagnostics_progress_token,
1577 cx,
1578 );
1579 });
1580 }
1581 }
1582 })
1583 .detach();
1584
1585 language_server
1586 .on_request::<lsp::request::WorkspaceConfiguration, _, _>({
1587 let settings = this
1588 .read_with(&cx, |this, _| this.language_server_settings.clone());
1589 move |params, _| {
1590 let settings = settings.lock().clone();
1591 async move {
1592 Ok(params
1593 .items
1594 .into_iter()
1595 .map(|item| {
1596 if let Some(section) = &item.section {
1597 settings
1598 .get(section)
1599 .cloned()
1600 .unwrap_or(serde_json::Value::Null)
1601 } else {
1602 settings.clone()
1603 }
1604 })
1605 .collect())
1606 }
1607 }
1608 })
1609 .detach();
1610
1611 language_server
1612 .on_request::<lsp::request::ApplyWorkspaceEdit, _, _>({
1613 let this = this.downgrade();
1614 let adapter = adapter.clone();
1615 let language_server = language_server.clone();
1616 move |params, cx| {
1617 Self::on_lsp_workspace_edit(
1618 this,
1619 params,
1620 server_id,
1621 adapter.clone(),
1622 language_server.clone(),
1623 cx,
1624 )
1625 }
1626 })
1627 .detach();
1628
1629 language_server
1630 .on_notification::<lsp::notification::Progress, _>({
1631 let this = this.downgrade();
1632 move |params, mut cx| {
1633 if let Some(this) = this.upgrade(&cx) {
1634 this.update(&mut cx, |this, cx| {
1635 this.on_lsp_progress(
1636 params,
1637 server_id,
1638 disk_based_diagnostics_progress_token,
1639 cx,
1640 );
1641 });
1642 }
1643 }
1644 })
1645 .detach();
1646
1647 this.update(&mut cx, |this, cx| {
1648 this.language_servers
1649 .insert(key.clone(), (adapter.clone(), language_server.clone()));
1650 this.language_server_statuses.insert(
1651 server_id,
1652 LanguageServerStatus {
1653 name: language_server.name().to_string(),
1654 pending_work: Default::default(),
1655 pending_diagnostic_updates: 0,
1656 },
1657 );
1658 language_server
1659 .notify::<lsp::notification::DidChangeConfiguration>(
1660 lsp::DidChangeConfigurationParams {
1661 settings: this.language_server_settings.lock().clone(),
1662 },
1663 )
1664 .ok();
1665
1666 if let Some(project_id) = this.remote_id() {
1667 this.client
1668 .send(proto::StartLanguageServer {
1669 project_id,
1670 server: Some(proto::LanguageServer {
1671 id: server_id as u64,
1672 name: language_server.name().to_string(),
1673 }),
1674 })
1675 .log_err();
1676 }
1677
1678 // Tell the language server about every open buffer in the worktree that matches the language.
1679 for buffer in this.opened_buffers.values() {
1680 if let Some(buffer_handle) = buffer.upgrade(cx) {
1681 let buffer = buffer_handle.read(cx);
1682 let file = if let Some(file) = File::from_dyn(buffer.file()) {
1683 file
1684 } else {
1685 continue;
1686 };
1687 let language = if let Some(language) = buffer.language() {
1688 language
1689 } else {
1690 continue;
1691 };
1692 if file.worktree.read(cx).id() != key.0
1693 || language.lsp_adapter().map(|a| a.name())
1694 != Some(key.1.clone())
1695 {
1696 continue;
1697 }
1698
1699 let file = file.as_local()?;
1700 let versions = this
1701 .buffer_snapshots
1702 .entry(buffer.remote_id())
1703 .or_insert_with(|| vec![(0, buffer.text_snapshot())]);
1704 let (version, initial_snapshot) = versions.last().unwrap();
1705 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
1706 let language_id = adapter.id_for_language(language.name().as_ref());
1707 language_server
1708 .notify::<lsp::notification::DidOpenTextDocument>(
1709 lsp::DidOpenTextDocumentParams {
1710 text_document: lsp::TextDocumentItem::new(
1711 uri,
1712 language_id.unwrap_or_default(),
1713 *version,
1714 initial_snapshot.text(),
1715 ),
1716 },
1717 )
1718 .log_err()?;
1719 buffer_handle.update(cx, |buffer, cx| {
1720 buffer.set_completion_triggers(
1721 language_server
1722 .capabilities()
1723 .completion_provider
1724 .as_ref()
1725 .and_then(|provider| {
1726 provider.trigger_characters.clone()
1727 })
1728 .unwrap_or(Vec::new()),
1729 cx,
1730 )
1731 });
1732 }
1733 }
1734
1735 cx.notify();
1736 Some(())
1737 });
1738
1739 Some(language_server)
1740 })
1741 });
1742 }
1743
1744 pub fn restart_language_servers_for_buffers(
1745 &mut self,
1746 buffers: impl IntoIterator<Item = ModelHandle<Buffer>>,
1747 cx: &mut ModelContext<Self>,
1748 ) -> Option<()> {
1749 let language_server_lookup_info: HashSet<(WorktreeId, Arc<Path>, PathBuf)> = buffers
1750 .into_iter()
1751 .filter_map(|buffer| {
1752 let file = File::from_dyn(buffer.read(cx).file())?;
1753 let worktree = file.worktree.read(cx).as_local()?;
1754 let worktree_id = worktree.id();
1755 let worktree_abs_path = worktree.abs_path().clone();
1756 let full_path = file.full_path(cx);
1757 Some((worktree_id, worktree_abs_path, full_path))
1758 })
1759 .collect();
1760 for (worktree_id, worktree_abs_path, full_path) in language_server_lookup_info {
1761 let language = self.languages.select_language(&full_path)?;
1762 self.restart_language_server(worktree_id, worktree_abs_path, language, cx);
1763 }
1764
1765 None
1766 }
1767
1768 fn restart_language_server(
1769 &mut self,
1770 worktree_id: WorktreeId,
1771 worktree_path: Arc<Path>,
1772 language: Arc<Language>,
1773 cx: &mut ModelContext<Self>,
1774 ) {
1775 let adapter = if let Some(adapter) = language.lsp_adapter() {
1776 adapter
1777 } else {
1778 return;
1779 };
1780 let key = (worktree_id, adapter.name());
1781 let server_to_shutdown = self.language_servers.remove(&key);
1782 self.started_language_servers.remove(&key);
1783 server_to_shutdown
1784 .as_ref()
1785 .map(|(_, server)| self.language_server_statuses.remove(&server.server_id()));
1786 cx.spawn_weak(|this, mut cx| async move {
1787 if let Some(this) = this.upgrade(&cx) {
1788 if let Some((_, server_to_shutdown)) = server_to_shutdown {
1789 if let Some(shutdown_task) = server_to_shutdown.shutdown() {
1790 shutdown_task.await;
1791 }
1792 }
1793
1794 this.update(&mut cx, |this, cx| {
1795 this.start_language_server(worktree_id, worktree_path, language, cx);
1796 });
1797 }
1798 })
1799 .detach();
1800 }
1801
1802 fn on_lsp_diagnostics_published(
1803 &mut self,
1804 server_id: usize,
1805 mut params: lsp::PublishDiagnosticsParams,
1806 adapter: &Arc<dyn LspAdapter>,
1807 disk_based_diagnostics_progress_token: Option<&str>,
1808 cx: &mut ModelContext<Self>,
1809 ) {
1810 adapter.process_diagnostics(&mut params);
1811 if disk_based_diagnostics_progress_token.is_none() {
1812 self.disk_based_diagnostics_started(cx);
1813 self.broadcast_language_server_update(
1814 server_id,
1815 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(
1816 proto::LspDiskBasedDiagnosticsUpdating {},
1817 ),
1818 );
1819 }
1820 self.update_diagnostics(params, adapter.disk_based_diagnostic_sources(), cx)
1821 .log_err();
1822 if disk_based_diagnostics_progress_token.is_none() {
1823 self.disk_based_diagnostics_finished(cx);
1824 self.broadcast_language_server_update(
1825 server_id,
1826 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(
1827 proto::LspDiskBasedDiagnosticsUpdated {},
1828 ),
1829 );
1830 }
1831 }
1832
1833 fn on_lsp_progress(
1834 &mut self,
1835 progress: lsp::ProgressParams,
1836 server_id: usize,
1837 disk_based_diagnostics_progress_token: Option<&str>,
1838 cx: &mut ModelContext<Self>,
1839 ) {
1840 let token = match progress.token {
1841 lsp::NumberOrString::String(token) => token,
1842 lsp::NumberOrString::Number(token) => {
1843 log::info!("skipping numeric progress token {}", token);
1844 return;
1845 }
1846 };
1847 let progress = match progress.value {
1848 lsp::ProgressParamsValue::WorkDone(value) => value,
1849 };
1850 let language_server_status =
1851 if let Some(status) = self.language_server_statuses.get_mut(&server_id) {
1852 status
1853 } else {
1854 return;
1855 };
1856 match progress {
1857 lsp::WorkDoneProgress::Begin(_) => {
1858 if Some(token.as_str()) == disk_based_diagnostics_progress_token {
1859 language_server_status.pending_diagnostic_updates += 1;
1860 if language_server_status.pending_diagnostic_updates == 1 {
1861 self.disk_based_diagnostics_started(cx);
1862 self.broadcast_language_server_update(
1863 server_id,
1864 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(
1865 proto::LspDiskBasedDiagnosticsUpdating {},
1866 ),
1867 );
1868 }
1869 } else {
1870 self.on_lsp_work_start(server_id, token.clone(), cx);
1871 self.broadcast_language_server_update(
1872 server_id,
1873 proto::update_language_server::Variant::WorkStart(proto::LspWorkStart {
1874 token,
1875 }),
1876 );
1877 }
1878 }
1879 lsp::WorkDoneProgress::Report(report) => {
1880 if Some(token.as_str()) != disk_based_diagnostics_progress_token {
1881 self.on_lsp_work_progress(
1882 server_id,
1883 token.clone(),
1884 LanguageServerProgress {
1885 message: report.message.clone(),
1886 percentage: report.percentage.map(|p| p as usize),
1887 last_update_at: Instant::now(),
1888 },
1889 cx,
1890 );
1891 self.broadcast_language_server_update(
1892 server_id,
1893 proto::update_language_server::Variant::WorkProgress(
1894 proto::LspWorkProgress {
1895 token,
1896 message: report.message,
1897 percentage: report.percentage.map(|p| p as u32),
1898 },
1899 ),
1900 );
1901 }
1902 }
1903 lsp::WorkDoneProgress::End(_) => {
1904 if Some(token.as_str()) == disk_based_diagnostics_progress_token {
1905 language_server_status.pending_diagnostic_updates -= 1;
1906 if language_server_status.pending_diagnostic_updates == 0 {
1907 self.disk_based_diagnostics_finished(cx);
1908 self.broadcast_language_server_update(
1909 server_id,
1910 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(
1911 proto::LspDiskBasedDiagnosticsUpdated {},
1912 ),
1913 );
1914 }
1915 } else {
1916 self.on_lsp_work_end(server_id, token.clone(), cx);
1917 self.broadcast_language_server_update(
1918 server_id,
1919 proto::update_language_server::Variant::WorkEnd(proto::LspWorkEnd {
1920 token,
1921 }),
1922 );
1923 }
1924 }
1925 }
1926 }
1927
1928 fn on_lsp_work_start(
1929 &mut self,
1930 language_server_id: usize,
1931 token: String,
1932 cx: &mut ModelContext<Self>,
1933 ) {
1934 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
1935 status.pending_work.insert(
1936 token,
1937 LanguageServerProgress {
1938 message: None,
1939 percentage: None,
1940 last_update_at: Instant::now(),
1941 },
1942 );
1943 cx.notify();
1944 }
1945 }
1946
1947 fn on_lsp_work_progress(
1948 &mut self,
1949 language_server_id: usize,
1950 token: String,
1951 progress: LanguageServerProgress,
1952 cx: &mut ModelContext<Self>,
1953 ) {
1954 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
1955 status.pending_work.insert(token, progress);
1956 cx.notify();
1957 }
1958 }
1959
1960 fn on_lsp_work_end(
1961 &mut self,
1962 language_server_id: usize,
1963 token: String,
1964 cx: &mut ModelContext<Self>,
1965 ) {
1966 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
1967 status.pending_work.remove(&token);
1968 cx.notify();
1969 }
1970 }
1971
1972 async fn on_lsp_workspace_edit(
1973 this: WeakModelHandle<Self>,
1974 params: lsp::ApplyWorkspaceEditParams,
1975 server_id: usize,
1976 adapter: Arc<dyn LspAdapter>,
1977 language_server: Arc<LanguageServer>,
1978 mut cx: AsyncAppContext,
1979 ) -> Result<lsp::ApplyWorkspaceEditResponse> {
1980 let this = this
1981 .upgrade(&cx)
1982 .ok_or_else(|| anyhow!("project project closed"))?;
1983 let transaction = Self::deserialize_workspace_edit(
1984 this.clone(),
1985 params.edit,
1986 true,
1987 adapter.clone(),
1988 language_server.clone(),
1989 &mut cx,
1990 )
1991 .await
1992 .log_err();
1993 this.update(&mut cx, |this, _| {
1994 if let Some(transaction) = transaction {
1995 this.last_workspace_edits_by_language_server
1996 .insert(server_id, transaction);
1997 }
1998 });
1999 Ok(lsp::ApplyWorkspaceEditResponse {
2000 applied: true,
2001 failed_change: None,
2002 failure_reason: None,
2003 })
2004 }
2005
2006 fn broadcast_language_server_update(
2007 &self,
2008 language_server_id: usize,
2009 event: proto::update_language_server::Variant,
2010 ) {
2011 if let Some(project_id) = self.remote_id() {
2012 self.client
2013 .send(proto::UpdateLanguageServer {
2014 project_id,
2015 language_server_id: language_server_id as u64,
2016 variant: Some(event),
2017 })
2018 .log_err();
2019 }
2020 }
2021
2022 pub fn set_language_server_settings(&mut self, settings: serde_json::Value) {
2023 for (_, server) in self.language_servers.values() {
2024 server
2025 .notify::<lsp::notification::DidChangeConfiguration>(
2026 lsp::DidChangeConfigurationParams {
2027 settings: settings.clone(),
2028 },
2029 )
2030 .ok();
2031 }
2032 *self.language_server_settings.lock() = settings;
2033 }
2034
2035 pub fn language_server_statuses(
2036 &self,
2037 ) -> impl DoubleEndedIterator<Item = &LanguageServerStatus> {
2038 self.language_server_statuses.values()
2039 }
2040
2041 pub fn update_diagnostics(
2042 &mut self,
2043 params: lsp::PublishDiagnosticsParams,
2044 disk_based_sources: &[&str],
2045 cx: &mut ModelContext<Self>,
2046 ) -> Result<()> {
2047 let abs_path = params
2048 .uri
2049 .to_file_path()
2050 .map_err(|_| anyhow!("URI is not a file"))?;
2051 let mut next_group_id = 0;
2052 let mut diagnostics = Vec::default();
2053 let mut primary_diagnostic_group_ids = HashMap::default();
2054 let mut sources_by_group_id = HashMap::default();
2055 let mut supporting_diagnostics = HashMap::default();
2056 for diagnostic in ¶ms.diagnostics {
2057 let source = diagnostic.source.as_ref();
2058 let code = diagnostic.code.as_ref().map(|code| match code {
2059 lsp::NumberOrString::Number(code) => code.to_string(),
2060 lsp::NumberOrString::String(code) => code.clone(),
2061 });
2062 let range = range_from_lsp(diagnostic.range);
2063 let is_supporting = diagnostic
2064 .related_information
2065 .as_ref()
2066 .map_or(false, |infos| {
2067 infos.iter().any(|info| {
2068 primary_diagnostic_group_ids.contains_key(&(
2069 source,
2070 code.clone(),
2071 range_from_lsp(info.location.range),
2072 ))
2073 })
2074 });
2075
2076 let is_unnecessary = diagnostic.tags.as_ref().map_or(false, |tags| {
2077 tags.iter().any(|tag| *tag == DiagnosticTag::UNNECESSARY)
2078 });
2079
2080 if is_supporting {
2081 supporting_diagnostics.insert(
2082 (source, code.clone(), range),
2083 (diagnostic.severity, is_unnecessary),
2084 );
2085 } else {
2086 let group_id = post_inc(&mut next_group_id);
2087 let is_disk_based = source.map_or(false, |source| {
2088 disk_based_sources.contains(&source.as_str())
2089 });
2090
2091 sources_by_group_id.insert(group_id, source);
2092 primary_diagnostic_group_ids
2093 .insert((source, code.clone(), range.clone()), group_id);
2094
2095 diagnostics.push(DiagnosticEntry {
2096 range,
2097 diagnostic: Diagnostic {
2098 code: code.clone(),
2099 severity: diagnostic.severity.unwrap_or(DiagnosticSeverity::ERROR),
2100 message: diagnostic.message.clone(),
2101 group_id,
2102 is_primary: true,
2103 is_valid: true,
2104 is_disk_based,
2105 is_unnecessary,
2106 },
2107 });
2108 if let Some(infos) = &diagnostic.related_information {
2109 for info in infos {
2110 if info.location.uri == params.uri && !info.message.is_empty() {
2111 let range = range_from_lsp(info.location.range);
2112 diagnostics.push(DiagnosticEntry {
2113 range,
2114 diagnostic: Diagnostic {
2115 code: code.clone(),
2116 severity: DiagnosticSeverity::INFORMATION,
2117 message: info.message.clone(),
2118 group_id,
2119 is_primary: false,
2120 is_valid: true,
2121 is_disk_based,
2122 is_unnecessary: false,
2123 },
2124 });
2125 }
2126 }
2127 }
2128 }
2129 }
2130
2131 for entry in &mut diagnostics {
2132 let diagnostic = &mut entry.diagnostic;
2133 if !diagnostic.is_primary {
2134 let source = *sources_by_group_id.get(&diagnostic.group_id).unwrap();
2135 if let Some(&(severity, is_unnecessary)) = supporting_diagnostics.get(&(
2136 source,
2137 diagnostic.code.clone(),
2138 entry.range.clone(),
2139 )) {
2140 if let Some(severity) = severity {
2141 diagnostic.severity = severity;
2142 }
2143 diagnostic.is_unnecessary = is_unnecessary;
2144 }
2145 }
2146 }
2147
2148 self.update_diagnostic_entries(abs_path, params.version, diagnostics, cx)?;
2149 Ok(())
2150 }
2151
2152 pub fn update_diagnostic_entries(
2153 &mut self,
2154 abs_path: PathBuf,
2155 version: Option<i32>,
2156 diagnostics: Vec<DiagnosticEntry<PointUtf16>>,
2157 cx: &mut ModelContext<Project>,
2158 ) -> Result<(), anyhow::Error> {
2159 let (worktree, relative_path) = self
2160 .find_local_worktree(&abs_path, cx)
2161 .ok_or_else(|| anyhow!("no worktree found for diagnostics"))?;
2162 if !worktree.read(cx).is_visible() {
2163 return Ok(());
2164 }
2165
2166 let project_path = ProjectPath {
2167 worktree_id: worktree.read(cx).id(),
2168 path: relative_path.into(),
2169 };
2170 if let Some(buffer) = self.get_open_buffer(&project_path, cx) {
2171 self.update_buffer_diagnostics(&buffer, diagnostics.clone(), version, cx)?;
2172 }
2173
2174 let updated = worktree.update(cx, |worktree, cx| {
2175 worktree
2176 .as_local_mut()
2177 .ok_or_else(|| anyhow!("not a local worktree"))?
2178 .update_diagnostics(project_path.path.clone(), diagnostics, cx)
2179 })?;
2180 if updated {
2181 cx.emit(Event::DiagnosticsUpdated(project_path));
2182 }
2183 Ok(())
2184 }
2185
2186 fn update_buffer_diagnostics(
2187 &mut self,
2188 buffer: &ModelHandle<Buffer>,
2189 mut diagnostics: Vec<DiagnosticEntry<PointUtf16>>,
2190 version: Option<i32>,
2191 cx: &mut ModelContext<Self>,
2192 ) -> Result<()> {
2193 fn compare_diagnostics(a: &Diagnostic, b: &Diagnostic) -> Ordering {
2194 Ordering::Equal
2195 .then_with(|| b.is_primary.cmp(&a.is_primary))
2196 .then_with(|| a.is_disk_based.cmp(&b.is_disk_based))
2197 .then_with(|| a.severity.cmp(&b.severity))
2198 .then_with(|| a.message.cmp(&b.message))
2199 }
2200
2201 let snapshot = self.buffer_snapshot_for_lsp_version(buffer, version, cx)?;
2202
2203 diagnostics.sort_unstable_by(|a, b| {
2204 Ordering::Equal
2205 .then_with(|| a.range.start.cmp(&b.range.start))
2206 .then_with(|| b.range.end.cmp(&a.range.end))
2207 .then_with(|| compare_diagnostics(&a.diagnostic, &b.diagnostic))
2208 });
2209
2210 let mut sanitized_diagnostics = Vec::new();
2211 let edits_since_save = Patch::new(
2212 snapshot
2213 .edits_since::<PointUtf16>(buffer.read(cx).saved_version())
2214 .collect(),
2215 );
2216 for entry in diagnostics {
2217 let start;
2218 let end;
2219 if entry.diagnostic.is_disk_based {
2220 // Some diagnostics are based on files on disk instead of buffers'
2221 // current contents. Adjust these diagnostics' ranges to reflect
2222 // any unsaved edits.
2223 start = edits_since_save.old_to_new(entry.range.start);
2224 end = edits_since_save.old_to_new(entry.range.end);
2225 } else {
2226 start = entry.range.start;
2227 end = entry.range.end;
2228 }
2229
2230 let mut range = snapshot.clip_point_utf16(start, Bias::Left)
2231 ..snapshot.clip_point_utf16(end, Bias::Right);
2232
2233 // Expand empty ranges by one character
2234 if range.start == range.end {
2235 range.end.column += 1;
2236 range.end = snapshot.clip_point_utf16(range.end, Bias::Right);
2237 if range.start == range.end && range.end.column > 0 {
2238 range.start.column -= 1;
2239 range.start = snapshot.clip_point_utf16(range.start, Bias::Left);
2240 }
2241 }
2242
2243 sanitized_diagnostics.push(DiagnosticEntry {
2244 range,
2245 diagnostic: entry.diagnostic,
2246 });
2247 }
2248 drop(edits_since_save);
2249
2250 let set = DiagnosticSet::new(sanitized_diagnostics, &snapshot);
2251 buffer.update(cx, |buffer, cx| buffer.update_diagnostics(set, cx));
2252 Ok(())
2253 }
2254
2255 pub fn reload_buffers(
2256 &self,
2257 buffers: HashSet<ModelHandle<Buffer>>,
2258 push_to_history: bool,
2259 cx: &mut ModelContext<Self>,
2260 ) -> Task<Result<ProjectTransaction>> {
2261 let mut local_buffers = Vec::new();
2262 let mut remote_buffers = None;
2263 for buffer_handle in buffers {
2264 let buffer = buffer_handle.read(cx);
2265 if buffer.is_dirty() {
2266 if let Some(file) = File::from_dyn(buffer.file()) {
2267 if file.is_local() {
2268 local_buffers.push(buffer_handle);
2269 } else {
2270 remote_buffers.get_or_insert(Vec::new()).push(buffer_handle);
2271 }
2272 }
2273 }
2274 }
2275
2276 let remote_buffers = self.remote_id().zip(remote_buffers);
2277 let client = self.client.clone();
2278
2279 cx.spawn(|this, mut cx| async move {
2280 let mut project_transaction = ProjectTransaction::default();
2281
2282 if let Some((project_id, remote_buffers)) = remote_buffers {
2283 let response = client
2284 .request(proto::ReloadBuffers {
2285 project_id,
2286 buffer_ids: remote_buffers
2287 .iter()
2288 .map(|buffer| buffer.read_with(&cx, |buffer, _| buffer.remote_id()))
2289 .collect(),
2290 })
2291 .await?
2292 .transaction
2293 .ok_or_else(|| anyhow!("missing transaction"))?;
2294 project_transaction = this
2295 .update(&mut cx, |this, cx| {
2296 this.deserialize_project_transaction(response, push_to_history, cx)
2297 })
2298 .await?;
2299 }
2300
2301 for buffer in local_buffers {
2302 let transaction = buffer
2303 .update(&mut cx, |buffer, cx| buffer.reload(cx))
2304 .await?;
2305 buffer.update(&mut cx, |buffer, cx| {
2306 if let Some(transaction) = transaction {
2307 if !push_to_history {
2308 buffer.forget_transaction(transaction.id);
2309 }
2310 project_transaction.0.insert(cx.handle(), transaction);
2311 }
2312 });
2313 }
2314
2315 Ok(project_transaction)
2316 })
2317 }
2318
2319 pub fn format(
2320 &self,
2321 buffers: HashSet<ModelHandle<Buffer>>,
2322 push_to_history: bool,
2323 cx: &mut ModelContext<Project>,
2324 ) -> Task<Result<ProjectTransaction>> {
2325 let mut local_buffers = Vec::new();
2326 let mut remote_buffers = None;
2327 for buffer_handle in buffers {
2328 let buffer = buffer_handle.read(cx);
2329 if let Some(file) = File::from_dyn(buffer.file()) {
2330 if let Some(buffer_abs_path) = file.as_local().map(|f| f.abs_path(cx)) {
2331 if let Some((_, server)) = self.language_server_for_buffer(buffer, cx) {
2332 local_buffers.push((buffer_handle, buffer_abs_path, server.clone()));
2333 }
2334 } else {
2335 remote_buffers.get_or_insert(Vec::new()).push(buffer_handle);
2336 }
2337 } else {
2338 return Task::ready(Ok(Default::default()));
2339 }
2340 }
2341
2342 let remote_buffers = self.remote_id().zip(remote_buffers);
2343 let client = self.client.clone();
2344
2345 cx.spawn(|this, mut cx| async move {
2346 let mut project_transaction = ProjectTransaction::default();
2347
2348 if let Some((project_id, remote_buffers)) = remote_buffers {
2349 let response = client
2350 .request(proto::FormatBuffers {
2351 project_id,
2352 buffer_ids: remote_buffers
2353 .iter()
2354 .map(|buffer| buffer.read_with(&cx, |buffer, _| buffer.remote_id()))
2355 .collect(),
2356 })
2357 .await?
2358 .transaction
2359 .ok_or_else(|| anyhow!("missing transaction"))?;
2360 project_transaction = this
2361 .update(&mut cx, |this, cx| {
2362 this.deserialize_project_transaction(response, push_to_history, cx)
2363 })
2364 .await?;
2365 }
2366
2367 for (buffer, buffer_abs_path, language_server) in local_buffers {
2368 let text_document = lsp::TextDocumentIdentifier::new(
2369 lsp::Url::from_file_path(&buffer_abs_path).unwrap(),
2370 );
2371 let capabilities = &language_server.capabilities();
2372 let tab_size = cx.update(|cx| {
2373 let language_name = buffer.read(cx).language().map(|language| language.name());
2374 cx.global::<Settings>().tab_size(language_name.as_deref())
2375 });
2376 let lsp_edits = if capabilities
2377 .document_formatting_provider
2378 .as_ref()
2379 .map_or(false, |provider| *provider != lsp::OneOf::Left(false))
2380 {
2381 language_server
2382 .request::<lsp::request::Formatting>(lsp::DocumentFormattingParams {
2383 text_document,
2384 options: lsp::FormattingOptions {
2385 tab_size,
2386 insert_spaces: true,
2387 insert_final_newline: Some(true),
2388 ..Default::default()
2389 },
2390 work_done_progress_params: Default::default(),
2391 })
2392 .await?
2393 } else if capabilities
2394 .document_range_formatting_provider
2395 .as_ref()
2396 .map_or(false, |provider| *provider != lsp::OneOf::Left(false))
2397 {
2398 let buffer_start = lsp::Position::new(0, 0);
2399 let buffer_end =
2400 buffer.read_with(&cx, |buffer, _| point_to_lsp(buffer.max_point_utf16()));
2401 language_server
2402 .request::<lsp::request::RangeFormatting>(
2403 lsp::DocumentRangeFormattingParams {
2404 text_document,
2405 range: lsp::Range::new(buffer_start, buffer_end),
2406 options: lsp::FormattingOptions {
2407 tab_size: 4,
2408 insert_spaces: true,
2409 insert_final_newline: Some(true),
2410 ..Default::default()
2411 },
2412 work_done_progress_params: Default::default(),
2413 },
2414 )
2415 .await?
2416 } else {
2417 continue;
2418 };
2419
2420 if let Some(lsp_edits) = lsp_edits {
2421 let edits = this
2422 .update(&mut cx, |this, cx| {
2423 this.edits_from_lsp(&buffer, lsp_edits, None, cx)
2424 })
2425 .await?;
2426 buffer.update(&mut cx, |buffer, cx| {
2427 buffer.finalize_last_transaction();
2428 buffer.start_transaction();
2429 for (range, text) in edits {
2430 buffer.edit([(range, text)], cx);
2431 }
2432 if buffer.end_transaction(cx).is_some() {
2433 let transaction = buffer.finalize_last_transaction().unwrap().clone();
2434 if !push_to_history {
2435 buffer.forget_transaction(transaction.id);
2436 }
2437 project_transaction.0.insert(cx.handle(), transaction);
2438 }
2439 });
2440 }
2441 }
2442
2443 Ok(project_transaction)
2444 })
2445 }
2446
2447 pub fn definition<T: ToPointUtf16>(
2448 &self,
2449 buffer: &ModelHandle<Buffer>,
2450 position: T,
2451 cx: &mut ModelContext<Self>,
2452 ) -> Task<Result<Vec<Location>>> {
2453 let position = position.to_point_utf16(buffer.read(cx));
2454 self.request_lsp(buffer.clone(), GetDefinition { position }, cx)
2455 }
2456
2457 pub fn references<T: ToPointUtf16>(
2458 &self,
2459 buffer: &ModelHandle<Buffer>,
2460 position: T,
2461 cx: &mut ModelContext<Self>,
2462 ) -> Task<Result<Vec<Location>>> {
2463 let position = position.to_point_utf16(buffer.read(cx));
2464 self.request_lsp(buffer.clone(), GetReferences { position }, cx)
2465 }
2466
2467 pub fn document_highlights<T: ToPointUtf16>(
2468 &self,
2469 buffer: &ModelHandle<Buffer>,
2470 position: T,
2471 cx: &mut ModelContext<Self>,
2472 ) -> Task<Result<Vec<DocumentHighlight>>> {
2473 let position = position.to_point_utf16(buffer.read(cx));
2474
2475 self.request_lsp(buffer.clone(), GetDocumentHighlights { position }, cx)
2476 }
2477
2478 pub fn symbols(&self, query: &str, cx: &mut ModelContext<Self>) -> Task<Result<Vec<Symbol>>> {
2479 if self.is_local() {
2480 let mut requests = Vec::new();
2481 for ((worktree_id, _), (lsp_adapter, language_server)) in self.language_servers.iter() {
2482 let worktree_id = *worktree_id;
2483 if let Some(worktree) = self
2484 .worktree_for_id(worktree_id, cx)
2485 .and_then(|worktree| worktree.read(cx).as_local())
2486 {
2487 let lsp_adapter = lsp_adapter.clone();
2488 let worktree_abs_path = worktree.abs_path().clone();
2489 requests.push(
2490 language_server
2491 .request::<lsp::request::WorkspaceSymbol>(lsp::WorkspaceSymbolParams {
2492 query: query.to_string(),
2493 ..Default::default()
2494 })
2495 .log_err()
2496 .map(move |response| {
2497 (
2498 lsp_adapter,
2499 worktree_id,
2500 worktree_abs_path,
2501 response.unwrap_or_default(),
2502 )
2503 }),
2504 );
2505 }
2506 }
2507
2508 cx.spawn_weak(|this, cx| async move {
2509 let responses = futures::future::join_all(requests).await;
2510 let this = if let Some(this) = this.upgrade(&cx) {
2511 this
2512 } else {
2513 return Ok(Default::default());
2514 };
2515 this.read_with(&cx, |this, cx| {
2516 let mut symbols = Vec::new();
2517 for (adapter, source_worktree_id, worktree_abs_path, response) in responses {
2518 symbols.extend(response.into_iter().flatten().filter_map(|lsp_symbol| {
2519 let abs_path = lsp_symbol.location.uri.to_file_path().ok()?;
2520 let mut worktree_id = source_worktree_id;
2521 let path;
2522 if let Some((worktree, rel_path)) =
2523 this.find_local_worktree(&abs_path, cx)
2524 {
2525 worktree_id = worktree.read(cx).id();
2526 path = rel_path;
2527 } else {
2528 path = relativize_path(&worktree_abs_path, &abs_path);
2529 }
2530
2531 let label = this
2532 .languages
2533 .select_language(&path)
2534 .and_then(|language| {
2535 language.label_for_symbol(&lsp_symbol.name, lsp_symbol.kind)
2536 })
2537 .unwrap_or_else(|| CodeLabel::plain(lsp_symbol.name.clone(), None));
2538 let signature = this.symbol_signature(worktree_id, &path);
2539
2540 Some(Symbol {
2541 source_worktree_id,
2542 worktree_id,
2543 language_server_name: adapter.name(),
2544 name: lsp_symbol.name,
2545 kind: lsp_symbol.kind,
2546 label,
2547 path,
2548 range: range_from_lsp(lsp_symbol.location.range),
2549 signature,
2550 })
2551 }));
2552 }
2553 Ok(symbols)
2554 })
2555 })
2556 } else if let Some(project_id) = self.remote_id() {
2557 let request = self.client.request(proto::GetProjectSymbols {
2558 project_id,
2559 query: query.to_string(),
2560 });
2561 cx.spawn_weak(|this, cx| async move {
2562 let response = request.await?;
2563 let mut symbols = Vec::new();
2564 if let Some(this) = this.upgrade(&cx) {
2565 this.read_with(&cx, |this, _| {
2566 symbols.extend(
2567 response
2568 .symbols
2569 .into_iter()
2570 .filter_map(|symbol| this.deserialize_symbol(symbol).log_err()),
2571 );
2572 })
2573 }
2574 Ok(symbols)
2575 })
2576 } else {
2577 Task::ready(Ok(Default::default()))
2578 }
2579 }
2580
2581 pub fn open_buffer_for_symbol(
2582 &mut self,
2583 symbol: &Symbol,
2584 cx: &mut ModelContext<Self>,
2585 ) -> Task<Result<ModelHandle<Buffer>>> {
2586 if self.is_local() {
2587 let (lsp_adapter, language_server) = if let Some(server) = self.language_servers.get(&(
2588 symbol.source_worktree_id,
2589 symbol.language_server_name.clone(),
2590 )) {
2591 server.clone()
2592 } else {
2593 return Task::ready(Err(anyhow!(
2594 "language server for worktree and language not found"
2595 )));
2596 };
2597
2598 let worktree_abs_path = if let Some(worktree_abs_path) = self
2599 .worktree_for_id(symbol.worktree_id, cx)
2600 .and_then(|worktree| worktree.read(cx).as_local())
2601 .map(|local_worktree| local_worktree.abs_path())
2602 {
2603 worktree_abs_path
2604 } else {
2605 return Task::ready(Err(anyhow!("worktree not found for symbol")));
2606 };
2607 let symbol_abs_path = worktree_abs_path.join(&symbol.path);
2608 let symbol_uri = if let Ok(uri) = lsp::Url::from_file_path(symbol_abs_path) {
2609 uri
2610 } else {
2611 return Task::ready(Err(anyhow!("invalid symbol path")));
2612 };
2613
2614 self.open_local_buffer_via_lsp(symbol_uri, lsp_adapter, language_server, cx)
2615 } else if let Some(project_id) = self.remote_id() {
2616 let request = self.client.request(proto::OpenBufferForSymbol {
2617 project_id,
2618 symbol: Some(serialize_symbol(symbol)),
2619 });
2620 cx.spawn(|this, mut cx| async move {
2621 let response = request.await?;
2622 let buffer = response.buffer.ok_or_else(|| anyhow!("invalid buffer"))?;
2623 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
2624 .await
2625 })
2626 } else {
2627 Task::ready(Err(anyhow!("project does not have a remote id")))
2628 }
2629 }
2630
2631 pub fn completions<T: ToPointUtf16>(
2632 &self,
2633 source_buffer_handle: &ModelHandle<Buffer>,
2634 position: T,
2635 cx: &mut ModelContext<Self>,
2636 ) -> Task<Result<Vec<Completion>>> {
2637 let source_buffer_handle = source_buffer_handle.clone();
2638 let source_buffer = source_buffer_handle.read(cx);
2639 let buffer_id = source_buffer.remote_id();
2640 let language = source_buffer.language().cloned();
2641 let worktree;
2642 let buffer_abs_path;
2643 if let Some(file) = File::from_dyn(source_buffer.file()) {
2644 worktree = file.worktree.clone();
2645 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
2646 } else {
2647 return Task::ready(Ok(Default::default()));
2648 };
2649
2650 let position = position.to_point_utf16(source_buffer);
2651 let anchor = source_buffer.anchor_after(position);
2652
2653 if worktree.read(cx).as_local().is_some() {
2654 let buffer_abs_path = buffer_abs_path.unwrap();
2655 let (_, lang_server) =
2656 if let Some(server) = self.language_server_for_buffer(source_buffer, cx) {
2657 server.clone()
2658 } else {
2659 return Task::ready(Ok(Default::default()));
2660 };
2661
2662 cx.spawn(|_, cx| async move {
2663 let completions = lang_server
2664 .request::<lsp::request::Completion>(lsp::CompletionParams {
2665 text_document_position: lsp::TextDocumentPositionParams::new(
2666 lsp::TextDocumentIdentifier::new(
2667 lsp::Url::from_file_path(buffer_abs_path).unwrap(),
2668 ),
2669 point_to_lsp(position),
2670 ),
2671 context: Default::default(),
2672 work_done_progress_params: Default::default(),
2673 partial_result_params: Default::default(),
2674 })
2675 .await
2676 .context("lsp completion request failed")?;
2677
2678 let completions = if let Some(completions) = completions {
2679 match completions {
2680 lsp::CompletionResponse::Array(completions) => completions,
2681 lsp::CompletionResponse::List(list) => list.items,
2682 }
2683 } else {
2684 Default::default()
2685 };
2686
2687 source_buffer_handle.read_with(&cx, |this, _| {
2688 let snapshot = this.snapshot();
2689 let clipped_position = this.clip_point_utf16(position, Bias::Left);
2690 let mut range_for_token = None;
2691 Ok(completions
2692 .into_iter()
2693 .filter_map(|lsp_completion| {
2694 let (old_range, new_text) = match lsp_completion.text_edit.as_ref() {
2695 // If the language server provides a range to overwrite, then
2696 // check that the range is valid.
2697 Some(lsp::CompletionTextEdit::Edit(edit)) => {
2698 let range = range_from_lsp(edit.range);
2699 let start = snapshot.clip_point_utf16(range.start, Bias::Left);
2700 let end = snapshot.clip_point_utf16(range.end, Bias::Left);
2701 if start != range.start || end != range.end {
2702 log::info!("completion out of expected range");
2703 return None;
2704 }
2705 (
2706 snapshot.anchor_before(start)..snapshot.anchor_after(end),
2707 edit.new_text.clone(),
2708 )
2709 }
2710 // If the language server does not provide a range, then infer
2711 // the range based on the syntax tree.
2712 None => {
2713 if position != clipped_position {
2714 log::info!("completion out of expected range");
2715 return None;
2716 }
2717 let Range { start, end } = range_for_token
2718 .get_or_insert_with(|| {
2719 let offset = position.to_offset(&snapshot);
2720 snapshot
2721 .range_for_word_token_at(offset)
2722 .unwrap_or_else(|| offset..offset)
2723 })
2724 .clone();
2725 let text = lsp_completion
2726 .insert_text
2727 .as_ref()
2728 .unwrap_or(&lsp_completion.label)
2729 .clone();
2730 (
2731 snapshot.anchor_before(start)..snapshot.anchor_after(end),
2732 text.clone(),
2733 )
2734 }
2735 Some(lsp::CompletionTextEdit::InsertAndReplace(_)) => {
2736 log::info!("unsupported insert/replace completion");
2737 return None;
2738 }
2739 };
2740
2741 Some(Completion {
2742 old_range,
2743 new_text,
2744 label: language
2745 .as_ref()
2746 .and_then(|l| l.label_for_completion(&lsp_completion))
2747 .unwrap_or_else(|| {
2748 CodeLabel::plain(
2749 lsp_completion.label.clone(),
2750 lsp_completion.filter_text.as_deref(),
2751 )
2752 }),
2753 lsp_completion,
2754 })
2755 })
2756 .collect())
2757 })
2758 })
2759 } else if let Some(project_id) = self.remote_id() {
2760 let rpc = self.client.clone();
2761 let message = proto::GetCompletions {
2762 project_id,
2763 buffer_id,
2764 position: Some(language::proto::serialize_anchor(&anchor)),
2765 version: serialize_version(&source_buffer.version()),
2766 };
2767 cx.spawn_weak(|_, mut cx| async move {
2768 let response = rpc.request(message).await?;
2769
2770 source_buffer_handle
2771 .update(&mut cx, |buffer, _| {
2772 buffer.wait_for_version(deserialize_version(response.version))
2773 })
2774 .await;
2775
2776 response
2777 .completions
2778 .into_iter()
2779 .map(|completion| {
2780 language::proto::deserialize_completion(completion, language.as_ref())
2781 })
2782 .collect()
2783 })
2784 } else {
2785 Task::ready(Ok(Default::default()))
2786 }
2787 }
2788
2789 pub fn apply_additional_edits_for_completion(
2790 &self,
2791 buffer_handle: ModelHandle<Buffer>,
2792 completion: Completion,
2793 push_to_history: bool,
2794 cx: &mut ModelContext<Self>,
2795 ) -> Task<Result<Option<Transaction>>> {
2796 let buffer = buffer_handle.read(cx);
2797 let buffer_id = buffer.remote_id();
2798
2799 if self.is_local() {
2800 let (_, lang_server) = if let Some(server) = self.language_server_for_buffer(buffer, cx)
2801 {
2802 server.clone()
2803 } else {
2804 return Task::ready(Ok(Default::default()));
2805 };
2806
2807 cx.spawn(|this, mut cx| async move {
2808 let resolved_completion = lang_server
2809 .request::<lsp::request::ResolveCompletionItem>(completion.lsp_completion)
2810 .await?;
2811 if let Some(edits) = resolved_completion.additional_text_edits {
2812 let edits = this
2813 .update(&mut cx, |this, cx| {
2814 this.edits_from_lsp(&buffer_handle, edits, None, cx)
2815 })
2816 .await?;
2817 buffer_handle.update(&mut cx, |buffer, cx| {
2818 buffer.finalize_last_transaction();
2819 buffer.start_transaction();
2820 for (range, text) in edits {
2821 buffer.edit([(range, text)], cx);
2822 }
2823 let transaction = if buffer.end_transaction(cx).is_some() {
2824 let transaction = buffer.finalize_last_transaction().unwrap().clone();
2825 if !push_to_history {
2826 buffer.forget_transaction(transaction.id);
2827 }
2828 Some(transaction)
2829 } else {
2830 None
2831 };
2832 Ok(transaction)
2833 })
2834 } else {
2835 Ok(None)
2836 }
2837 })
2838 } else if let Some(project_id) = self.remote_id() {
2839 let client = self.client.clone();
2840 cx.spawn(|_, mut cx| async move {
2841 let response = client
2842 .request(proto::ApplyCompletionAdditionalEdits {
2843 project_id,
2844 buffer_id,
2845 completion: Some(language::proto::serialize_completion(&completion)),
2846 })
2847 .await?;
2848
2849 if let Some(transaction) = response.transaction {
2850 let transaction = language::proto::deserialize_transaction(transaction)?;
2851 buffer_handle
2852 .update(&mut cx, |buffer, _| {
2853 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
2854 })
2855 .await;
2856 if push_to_history {
2857 buffer_handle.update(&mut cx, |buffer, _| {
2858 buffer.push_transaction(transaction.clone(), Instant::now());
2859 });
2860 }
2861 Ok(Some(transaction))
2862 } else {
2863 Ok(None)
2864 }
2865 })
2866 } else {
2867 Task::ready(Err(anyhow!("project does not have a remote id")))
2868 }
2869 }
2870
2871 pub fn code_actions<T: Clone + ToOffset>(
2872 &self,
2873 buffer_handle: &ModelHandle<Buffer>,
2874 range: Range<T>,
2875 cx: &mut ModelContext<Self>,
2876 ) -> Task<Result<Vec<CodeAction>>> {
2877 let buffer_handle = buffer_handle.clone();
2878 let buffer = buffer_handle.read(cx);
2879 let snapshot = buffer.snapshot();
2880 let relevant_diagnostics = snapshot
2881 .diagnostics_in_range::<usize, usize>(range.to_offset(&snapshot), false)
2882 .map(|entry| entry.to_lsp_diagnostic_stub())
2883 .collect();
2884 let buffer_id = buffer.remote_id();
2885 let worktree;
2886 let buffer_abs_path;
2887 if let Some(file) = File::from_dyn(buffer.file()) {
2888 worktree = file.worktree.clone();
2889 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
2890 } else {
2891 return Task::ready(Ok(Default::default()));
2892 };
2893 let range = buffer.anchor_before(range.start)..buffer.anchor_before(range.end);
2894
2895 if worktree.read(cx).as_local().is_some() {
2896 let buffer_abs_path = buffer_abs_path.unwrap();
2897 let (_, lang_server) = if let Some(server) = self.language_server_for_buffer(buffer, cx)
2898 {
2899 server.clone()
2900 } else {
2901 return Task::ready(Ok(Default::default()));
2902 };
2903
2904 let lsp_range = range_to_lsp(range.to_point_utf16(buffer));
2905 cx.foreground().spawn(async move {
2906 if !lang_server.capabilities().code_action_provider.is_some() {
2907 return Ok(Default::default());
2908 }
2909
2910 Ok(lang_server
2911 .request::<lsp::request::CodeActionRequest>(lsp::CodeActionParams {
2912 text_document: lsp::TextDocumentIdentifier::new(
2913 lsp::Url::from_file_path(buffer_abs_path).unwrap(),
2914 ),
2915 range: lsp_range,
2916 work_done_progress_params: Default::default(),
2917 partial_result_params: Default::default(),
2918 context: lsp::CodeActionContext {
2919 diagnostics: relevant_diagnostics,
2920 only: Some(vec![
2921 lsp::CodeActionKind::QUICKFIX,
2922 lsp::CodeActionKind::REFACTOR,
2923 lsp::CodeActionKind::REFACTOR_EXTRACT,
2924 lsp::CodeActionKind::SOURCE,
2925 ]),
2926 },
2927 })
2928 .await?
2929 .unwrap_or_default()
2930 .into_iter()
2931 .filter_map(|entry| {
2932 if let lsp::CodeActionOrCommand::CodeAction(lsp_action) = entry {
2933 Some(CodeAction {
2934 range: range.clone(),
2935 lsp_action,
2936 })
2937 } else {
2938 None
2939 }
2940 })
2941 .collect())
2942 })
2943 } else if let Some(project_id) = self.remote_id() {
2944 let rpc = self.client.clone();
2945 let version = buffer.version();
2946 cx.spawn_weak(|_, mut cx| async move {
2947 let response = rpc
2948 .request(proto::GetCodeActions {
2949 project_id,
2950 buffer_id,
2951 start: Some(language::proto::serialize_anchor(&range.start)),
2952 end: Some(language::proto::serialize_anchor(&range.end)),
2953 version: serialize_version(&version),
2954 })
2955 .await?;
2956
2957 buffer_handle
2958 .update(&mut cx, |buffer, _| {
2959 buffer.wait_for_version(deserialize_version(response.version))
2960 })
2961 .await;
2962
2963 response
2964 .actions
2965 .into_iter()
2966 .map(language::proto::deserialize_code_action)
2967 .collect()
2968 })
2969 } else {
2970 Task::ready(Ok(Default::default()))
2971 }
2972 }
2973
2974 pub fn apply_code_action(
2975 &self,
2976 buffer_handle: ModelHandle<Buffer>,
2977 mut action: CodeAction,
2978 push_to_history: bool,
2979 cx: &mut ModelContext<Self>,
2980 ) -> Task<Result<ProjectTransaction>> {
2981 if self.is_local() {
2982 let buffer = buffer_handle.read(cx);
2983 let (lsp_adapter, lang_server) =
2984 if let Some(server) = self.language_server_for_buffer(buffer, cx) {
2985 server.clone()
2986 } else {
2987 return Task::ready(Ok(Default::default()));
2988 };
2989 let range = action.range.to_point_utf16(buffer);
2990
2991 cx.spawn(|this, mut cx| async move {
2992 if let Some(lsp_range) = action
2993 .lsp_action
2994 .data
2995 .as_mut()
2996 .and_then(|d| d.get_mut("codeActionParams"))
2997 .and_then(|d| d.get_mut("range"))
2998 {
2999 *lsp_range = serde_json::to_value(&range_to_lsp(range)).unwrap();
3000 action.lsp_action = lang_server
3001 .request::<lsp::request::CodeActionResolveRequest>(action.lsp_action)
3002 .await?;
3003 } else {
3004 let actions = this
3005 .update(&mut cx, |this, cx| {
3006 this.code_actions(&buffer_handle, action.range, cx)
3007 })
3008 .await?;
3009 action.lsp_action = actions
3010 .into_iter()
3011 .find(|a| a.lsp_action.title == action.lsp_action.title)
3012 .ok_or_else(|| anyhow!("code action is outdated"))?
3013 .lsp_action;
3014 }
3015
3016 if let Some(edit) = action.lsp_action.edit {
3017 Self::deserialize_workspace_edit(
3018 this,
3019 edit,
3020 push_to_history,
3021 lsp_adapter,
3022 lang_server,
3023 &mut cx,
3024 )
3025 .await
3026 } else if let Some(command) = action.lsp_action.command {
3027 this.update(&mut cx, |this, _| {
3028 this.last_workspace_edits_by_language_server
3029 .remove(&lang_server.server_id());
3030 });
3031 lang_server
3032 .request::<lsp::request::ExecuteCommand>(lsp::ExecuteCommandParams {
3033 command: command.command,
3034 arguments: command.arguments.unwrap_or_default(),
3035 ..Default::default()
3036 })
3037 .await?;
3038 Ok(this.update(&mut cx, |this, _| {
3039 this.last_workspace_edits_by_language_server
3040 .remove(&lang_server.server_id())
3041 .unwrap_or_default()
3042 }))
3043 } else {
3044 Ok(ProjectTransaction::default())
3045 }
3046 })
3047 } else if let Some(project_id) = self.remote_id() {
3048 let client = self.client.clone();
3049 let request = proto::ApplyCodeAction {
3050 project_id,
3051 buffer_id: buffer_handle.read(cx).remote_id(),
3052 action: Some(language::proto::serialize_code_action(&action)),
3053 };
3054 cx.spawn(|this, mut cx| async move {
3055 let response = client
3056 .request(request)
3057 .await?
3058 .transaction
3059 .ok_or_else(|| anyhow!("missing transaction"))?;
3060 this.update(&mut cx, |this, cx| {
3061 this.deserialize_project_transaction(response, push_to_history, cx)
3062 })
3063 .await
3064 })
3065 } else {
3066 Task::ready(Err(anyhow!("project does not have a remote id")))
3067 }
3068 }
3069
3070 async fn deserialize_workspace_edit(
3071 this: ModelHandle<Self>,
3072 edit: lsp::WorkspaceEdit,
3073 push_to_history: bool,
3074 lsp_adapter: Arc<dyn LspAdapter>,
3075 language_server: Arc<LanguageServer>,
3076 cx: &mut AsyncAppContext,
3077 ) -> Result<ProjectTransaction> {
3078 let fs = this.read_with(cx, |this, _| this.fs.clone());
3079 let mut operations = Vec::new();
3080 if let Some(document_changes) = edit.document_changes {
3081 match document_changes {
3082 lsp::DocumentChanges::Edits(edits) => {
3083 operations.extend(edits.into_iter().map(lsp::DocumentChangeOperation::Edit))
3084 }
3085 lsp::DocumentChanges::Operations(ops) => operations = ops,
3086 }
3087 } else if let Some(changes) = edit.changes {
3088 operations.extend(changes.into_iter().map(|(uri, edits)| {
3089 lsp::DocumentChangeOperation::Edit(lsp::TextDocumentEdit {
3090 text_document: lsp::OptionalVersionedTextDocumentIdentifier {
3091 uri,
3092 version: None,
3093 },
3094 edits: edits.into_iter().map(lsp::OneOf::Left).collect(),
3095 })
3096 }));
3097 }
3098
3099 let mut project_transaction = ProjectTransaction::default();
3100 for operation in operations {
3101 match operation {
3102 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Create(op)) => {
3103 let abs_path = op
3104 .uri
3105 .to_file_path()
3106 .map_err(|_| anyhow!("can't convert URI to path"))?;
3107
3108 if let Some(parent_path) = abs_path.parent() {
3109 fs.create_dir(parent_path).await?;
3110 }
3111 if abs_path.ends_with("/") {
3112 fs.create_dir(&abs_path).await?;
3113 } else {
3114 fs.create_file(&abs_path, op.options.map(Into::into).unwrap_or_default())
3115 .await?;
3116 }
3117 }
3118 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Rename(op)) => {
3119 let source_abs_path = op
3120 .old_uri
3121 .to_file_path()
3122 .map_err(|_| anyhow!("can't convert URI to path"))?;
3123 let target_abs_path = op
3124 .new_uri
3125 .to_file_path()
3126 .map_err(|_| anyhow!("can't convert URI to path"))?;
3127 fs.rename(
3128 &source_abs_path,
3129 &target_abs_path,
3130 op.options.map(Into::into).unwrap_or_default(),
3131 )
3132 .await?;
3133 }
3134 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Delete(op)) => {
3135 let abs_path = op
3136 .uri
3137 .to_file_path()
3138 .map_err(|_| anyhow!("can't convert URI to path"))?;
3139 let options = op.options.map(Into::into).unwrap_or_default();
3140 if abs_path.ends_with("/") {
3141 fs.remove_dir(&abs_path, options).await?;
3142 } else {
3143 fs.remove_file(&abs_path, options).await?;
3144 }
3145 }
3146 lsp::DocumentChangeOperation::Edit(op) => {
3147 let buffer_to_edit = this
3148 .update(cx, |this, cx| {
3149 this.open_local_buffer_via_lsp(
3150 op.text_document.uri,
3151 lsp_adapter.clone(),
3152 language_server.clone(),
3153 cx,
3154 )
3155 })
3156 .await?;
3157
3158 let edits = this
3159 .update(cx, |this, cx| {
3160 let edits = op.edits.into_iter().map(|edit| match edit {
3161 lsp::OneOf::Left(edit) => edit,
3162 lsp::OneOf::Right(edit) => edit.text_edit,
3163 });
3164 this.edits_from_lsp(
3165 &buffer_to_edit,
3166 edits,
3167 op.text_document.version,
3168 cx,
3169 )
3170 })
3171 .await?;
3172
3173 let transaction = buffer_to_edit.update(cx, |buffer, cx| {
3174 buffer.finalize_last_transaction();
3175 buffer.start_transaction();
3176 for (range, text) in edits {
3177 buffer.edit([(range, text)], cx);
3178 }
3179 let transaction = if buffer.end_transaction(cx).is_some() {
3180 let transaction = buffer.finalize_last_transaction().unwrap().clone();
3181 if !push_to_history {
3182 buffer.forget_transaction(transaction.id);
3183 }
3184 Some(transaction)
3185 } else {
3186 None
3187 };
3188
3189 transaction
3190 });
3191 if let Some(transaction) = transaction {
3192 project_transaction.0.insert(buffer_to_edit, transaction);
3193 }
3194 }
3195 }
3196 }
3197
3198 Ok(project_transaction)
3199 }
3200
3201 pub fn prepare_rename<T: ToPointUtf16>(
3202 &self,
3203 buffer: ModelHandle<Buffer>,
3204 position: T,
3205 cx: &mut ModelContext<Self>,
3206 ) -> Task<Result<Option<Range<Anchor>>>> {
3207 let position = position.to_point_utf16(buffer.read(cx));
3208 self.request_lsp(buffer, PrepareRename { position }, cx)
3209 }
3210
3211 pub fn perform_rename<T: ToPointUtf16>(
3212 &self,
3213 buffer: ModelHandle<Buffer>,
3214 position: T,
3215 new_name: String,
3216 push_to_history: bool,
3217 cx: &mut ModelContext<Self>,
3218 ) -> Task<Result<ProjectTransaction>> {
3219 let position = position.to_point_utf16(buffer.read(cx));
3220 self.request_lsp(
3221 buffer,
3222 PerformRename {
3223 position,
3224 new_name,
3225 push_to_history,
3226 },
3227 cx,
3228 )
3229 }
3230
3231 pub fn search(
3232 &self,
3233 query: SearchQuery,
3234 cx: &mut ModelContext<Self>,
3235 ) -> Task<Result<HashMap<ModelHandle<Buffer>, Vec<Range<Anchor>>>>> {
3236 if self.is_local() {
3237 let snapshots = self
3238 .visible_worktrees(cx)
3239 .filter_map(|tree| {
3240 let tree = tree.read(cx).as_local()?;
3241 Some(tree.snapshot())
3242 })
3243 .collect::<Vec<_>>();
3244
3245 let background = cx.background().clone();
3246 let path_count: usize = snapshots.iter().map(|s| s.visible_file_count()).sum();
3247 if path_count == 0 {
3248 return Task::ready(Ok(Default::default()));
3249 }
3250 let workers = background.num_cpus().min(path_count);
3251 let (matching_paths_tx, mut matching_paths_rx) = smol::channel::bounded(1024);
3252 cx.background()
3253 .spawn({
3254 let fs = self.fs.clone();
3255 let background = cx.background().clone();
3256 let query = query.clone();
3257 async move {
3258 let fs = &fs;
3259 let query = &query;
3260 let matching_paths_tx = &matching_paths_tx;
3261 let paths_per_worker = (path_count + workers - 1) / workers;
3262 let snapshots = &snapshots;
3263 background
3264 .scoped(|scope| {
3265 for worker_ix in 0..workers {
3266 let worker_start_ix = worker_ix * paths_per_worker;
3267 let worker_end_ix = worker_start_ix + paths_per_worker;
3268 scope.spawn(async move {
3269 let mut snapshot_start_ix = 0;
3270 let mut abs_path = PathBuf::new();
3271 for snapshot in snapshots {
3272 let snapshot_end_ix =
3273 snapshot_start_ix + snapshot.visible_file_count();
3274 if worker_end_ix <= snapshot_start_ix {
3275 break;
3276 } else if worker_start_ix > snapshot_end_ix {
3277 snapshot_start_ix = snapshot_end_ix;
3278 continue;
3279 } else {
3280 let start_in_snapshot = worker_start_ix
3281 .saturating_sub(snapshot_start_ix);
3282 let end_in_snapshot =
3283 cmp::min(worker_end_ix, snapshot_end_ix)
3284 - snapshot_start_ix;
3285
3286 for entry in snapshot
3287 .files(false, start_in_snapshot)
3288 .take(end_in_snapshot - start_in_snapshot)
3289 {
3290 if matching_paths_tx.is_closed() {
3291 break;
3292 }
3293
3294 abs_path.clear();
3295 abs_path.push(&snapshot.abs_path());
3296 abs_path.push(&entry.path);
3297 let matches = if let Some(file) =
3298 fs.open_sync(&abs_path).await.log_err()
3299 {
3300 query.detect(file).unwrap_or(false)
3301 } else {
3302 false
3303 };
3304
3305 if matches {
3306 let project_path =
3307 (snapshot.id(), entry.path.clone());
3308 if matching_paths_tx
3309 .send(project_path)
3310 .await
3311 .is_err()
3312 {
3313 break;
3314 }
3315 }
3316 }
3317
3318 snapshot_start_ix = snapshot_end_ix;
3319 }
3320 }
3321 });
3322 }
3323 })
3324 .await;
3325 }
3326 })
3327 .detach();
3328
3329 let (buffers_tx, buffers_rx) = smol::channel::bounded(1024);
3330 let open_buffers = self
3331 .opened_buffers
3332 .values()
3333 .filter_map(|b| b.upgrade(cx))
3334 .collect::<HashSet<_>>();
3335 cx.spawn(|this, cx| async move {
3336 for buffer in &open_buffers {
3337 let snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
3338 buffers_tx.send((buffer.clone(), snapshot)).await?;
3339 }
3340
3341 let open_buffers = Rc::new(RefCell::new(open_buffers));
3342 while let Some(project_path) = matching_paths_rx.next().await {
3343 if buffers_tx.is_closed() {
3344 break;
3345 }
3346
3347 let this = this.clone();
3348 let open_buffers = open_buffers.clone();
3349 let buffers_tx = buffers_tx.clone();
3350 cx.spawn(|mut cx| async move {
3351 if let Some(buffer) = this
3352 .update(&mut cx, |this, cx| this.open_buffer(project_path, cx))
3353 .await
3354 .log_err()
3355 {
3356 if open_buffers.borrow_mut().insert(buffer.clone()) {
3357 let snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
3358 buffers_tx.send((buffer, snapshot)).await?;
3359 }
3360 }
3361
3362 Ok::<_, anyhow::Error>(())
3363 })
3364 .detach();
3365 }
3366
3367 Ok::<_, anyhow::Error>(())
3368 })
3369 .detach_and_log_err(cx);
3370
3371 let background = cx.background().clone();
3372 cx.background().spawn(async move {
3373 let query = &query;
3374 let mut matched_buffers = Vec::new();
3375 for _ in 0..workers {
3376 matched_buffers.push(HashMap::default());
3377 }
3378 background
3379 .scoped(|scope| {
3380 for worker_matched_buffers in matched_buffers.iter_mut() {
3381 let mut buffers_rx = buffers_rx.clone();
3382 scope.spawn(async move {
3383 while let Some((buffer, snapshot)) = buffers_rx.next().await {
3384 let buffer_matches = query
3385 .search(snapshot.as_rope())
3386 .await
3387 .iter()
3388 .map(|range| {
3389 snapshot.anchor_before(range.start)
3390 ..snapshot.anchor_after(range.end)
3391 })
3392 .collect::<Vec<_>>();
3393 if !buffer_matches.is_empty() {
3394 worker_matched_buffers
3395 .insert(buffer.clone(), buffer_matches);
3396 }
3397 }
3398 });
3399 }
3400 })
3401 .await;
3402 Ok(matched_buffers.into_iter().flatten().collect())
3403 })
3404 } else if let Some(project_id) = self.remote_id() {
3405 let request = self.client.request(query.to_proto(project_id));
3406 cx.spawn(|this, mut cx| async move {
3407 let response = request.await?;
3408 let mut result = HashMap::default();
3409 for location in response.locations {
3410 let buffer = location.buffer.ok_or_else(|| anyhow!("missing buffer"))?;
3411 let target_buffer = this
3412 .update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
3413 .await?;
3414 let start = location
3415 .start
3416 .and_then(deserialize_anchor)
3417 .ok_or_else(|| anyhow!("missing target start"))?;
3418 let end = location
3419 .end
3420 .and_then(deserialize_anchor)
3421 .ok_or_else(|| anyhow!("missing target end"))?;
3422 result
3423 .entry(target_buffer)
3424 .or_insert(Vec::new())
3425 .push(start..end)
3426 }
3427 Ok(result)
3428 })
3429 } else {
3430 Task::ready(Ok(Default::default()))
3431 }
3432 }
3433
3434 fn request_lsp<R: LspCommand>(
3435 &self,
3436 buffer_handle: ModelHandle<Buffer>,
3437 request: R,
3438 cx: &mut ModelContext<Self>,
3439 ) -> Task<Result<R::Response>>
3440 where
3441 <R::LspRequest as lsp::request::Request>::Result: Send,
3442 {
3443 let buffer = buffer_handle.read(cx);
3444 if self.is_local() {
3445 let file = File::from_dyn(buffer.file()).and_then(File::as_local);
3446 if let Some((file, (_, language_server))) =
3447 file.zip(self.language_server_for_buffer(buffer, cx).cloned())
3448 {
3449 let lsp_params = request.to_lsp(&file.abs_path(cx), cx);
3450 return cx.spawn(|this, cx| async move {
3451 if !request.check_capabilities(&language_server.capabilities()) {
3452 return Ok(Default::default());
3453 }
3454
3455 let response = language_server
3456 .request::<R::LspRequest>(lsp_params)
3457 .await
3458 .context("lsp request failed")?;
3459 request
3460 .response_from_lsp(response, this, buffer_handle, cx)
3461 .await
3462 });
3463 }
3464 } else if let Some(project_id) = self.remote_id() {
3465 let rpc = self.client.clone();
3466 let message = request.to_proto(project_id, buffer);
3467 return cx.spawn(|this, cx| async move {
3468 let response = rpc.request(message).await?;
3469 request
3470 .response_from_proto(response, this, buffer_handle, cx)
3471 .await
3472 });
3473 }
3474 Task::ready(Ok(Default::default()))
3475 }
3476
3477 pub fn find_or_create_local_worktree(
3478 &mut self,
3479 abs_path: impl AsRef<Path>,
3480 visible: bool,
3481 cx: &mut ModelContext<Self>,
3482 ) -> Task<Result<(ModelHandle<Worktree>, PathBuf)>> {
3483 let abs_path = abs_path.as_ref();
3484 if let Some((tree, relative_path)) = self.find_local_worktree(abs_path, cx) {
3485 Task::ready(Ok((tree.clone(), relative_path.into())))
3486 } else {
3487 let worktree = self.create_local_worktree(abs_path, visible, cx);
3488 cx.foreground()
3489 .spawn(async move { Ok((worktree.await?, PathBuf::new())) })
3490 }
3491 }
3492
3493 pub fn find_local_worktree(
3494 &self,
3495 abs_path: &Path,
3496 cx: &AppContext,
3497 ) -> Option<(ModelHandle<Worktree>, PathBuf)> {
3498 for tree in self.worktrees(cx) {
3499 if let Some(relative_path) = tree
3500 .read(cx)
3501 .as_local()
3502 .and_then(|t| abs_path.strip_prefix(t.abs_path()).ok())
3503 {
3504 return Some((tree.clone(), relative_path.into()));
3505 }
3506 }
3507 None
3508 }
3509
3510 pub fn is_shared(&self) -> bool {
3511 match &self.client_state {
3512 ProjectClientState::Local { is_shared, .. } => *is_shared,
3513 ProjectClientState::Remote { .. } => false,
3514 }
3515 }
3516
3517 fn create_local_worktree(
3518 &mut self,
3519 abs_path: impl AsRef<Path>,
3520 visible: bool,
3521 cx: &mut ModelContext<Self>,
3522 ) -> Task<Result<ModelHandle<Worktree>>> {
3523 let fs = self.fs.clone();
3524 let client = self.client.clone();
3525 let next_entry_id = self.next_entry_id.clone();
3526 let path: Arc<Path> = abs_path.as_ref().into();
3527 let task = self
3528 .loading_local_worktrees
3529 .entry(path.clone())
3530 .or_insert_with(|| {
3531 cx.spawn(|project, mut cx| {
3532 async move {
3533 let worktree = Worktree::local(
3534 client.clone(),
3535 path.clone(),
3536 visible,
3537 fs,
3538 next_entry_id,
3539 &mut cx,
3540 )
3541 .await;
3542 project.update(&mut cx, |project, _| {
3543 project.loading_local_worktrees.remove(&path);
3544 });
3545 let worktree = worktree?;
3546
3547 let remote_project_id = project.update(&mut cx, |project, cx| {
3548 project.add_worktree(&worktree, cx);
3549 project.remote_id()
3550 });
3551
3552 if let Some(project_id) = remote_project_id {
3553 // Because sharing is async, we may have *unshared* the project by the time it completes,
3554 // in which case we need to register the worktree instead.
3555 loop {
3556 if project.read_with(&cx, |project, _| project.is_shared()) {
3557 if worktree
3558 .update(&mut cx, |worktree, cx| {
3559 worktree.as_local_mut().unwrap().share(project_id, cx)
3560 })
3561 .await
3562 .is_ok()
3563 {
3564 break;
3565 }
3566 } else {
3567 worktree
3568 .update(&mut cx, |worktree, cx| {
3569 worktree
3570 .as_local_mut()
3571 .unwrap()
3572 .register(project_id, cx)
3573 })
3574 .await?;
3575 break;
3576 }
3577 }
3578 }
3579
3580 Ok(worktree)
3581 }
3582 .map_err(|err| Arc::new(err))
3583 })
3584 .shared()
3585 })
3586 .clone();
3587 cx.foreground().spawn(async move {
3588 match task.await {
3589 Ok(worktree) => Ok(worktree),
3590 Err(err) => Err(anyhow!("{}", err)),
3591 }
3592 })
3593 }
3594
3595 pub fn remove_worktree(&mut self, id: WorktreeId, cx: &mut ModelContext<Self>) {
3596 self.worktrees.retain(|worktree| {
3597 worktree
3598 .upgrade(cx)
3599 .map_or(false, |w| w.read(cx).id() != id)
3600 });
3601 cx.notify();
3602 }
3603
3604 fn add_worktree(&mut self, worktree: &ModelHandle<Worktree>, cx: &mut ModelContext<Self>) {
3605 cx.observe(&worktree, |_, _, cx| cx.notify()).detach();
3606 if worktree.read(cx).is_local() {
3607 cx.subscribe(&worktree, |this, worktree, _, cx| {
3608 this.update_local_worktree_buffers(worktree, cx);
3609 })
3610 .detach();
3611 }
3612
3613 let push_strong_handle = {
3614 let worktree = worktree.read(cx);
3615 self.is_shared() || worktree.is_visible() || worktree.is_remote()
3616 };
3617 if push_strong_handle {
3618 self.worktrees
3619 .push(WorktreeHandle::Strong(worktree.clone()));
3620 } else {
3621 cx.observe_release(&worktree, |this, _, cx| {
3622 this.worktrees
3623 .retain(|worktree| worktree.upgrade(cx).is_some());
3624 cx.notify();
3625 })
3626 .detach();
3627 self.worktrees
3628 .push(WorktreeHandle::Weak(worktree.downgrade()));
3629 }
3630 cx.notify();
3631 }
3632
3633 fn update_local_worktree_buffers(
3634 &mut self,
3635 worktree_handle: ModelHandle<Worktree>,
3636 cx: &mut ModelContext<Self>,
3637 ) {
3638 let snapshot = worktree_handle.read(cx).snapshot();
3639 let mut buffers_to_delete = Vec::new();
3640 let mut renamed_buffers = Vec::new();
3641 for (buffer_id, buffer) in &self.opened_buffers {
3642 if let Some(buffer) = buffer.upgrade(cx) {
3643 buffer.update(cx, |buffer, cx| {
3644 if let Some(old_file) = File::from_dyn(buffer.file()) {
3645 if old_file.worktree != worktree_handle {
3646 return;
3647 }
3648
3649 let new_file = if let Some(entry) = old_file
3650 .entry_id
3651 .and_then(|entry_id| snapshot.entry_for_id(entry_id))
3652 {
3653 File {
3654 is_local: true,
3655 entry_id: Some(entry.id),
3656 mtime: entry.mtime,
3657 path: entry.path.clone(),
3658 worktree: worktree_handle.clone(),
3659 }
3660 } else if let Some(entry) =
3661 snapshot.entry_for_path(old_file.path().as_ref())
3662 {
3663 File {
3664 is_local: true,
3665 entry_id: Some(entry.id),
3666 mtime: entry.mtime,
3667 path: entry.path.clone(),
3668 worktree: worktree_handle.clone(),
3669 }
3670 } else {
3671 File {
3672 is_local: true,
3673 entry_id: None,
3674 path: old_file.path().clone(),
3675 mtime: old_file.mtime(),
3676 worktree: worktree_handle.clone(),
3677 }
3678 };
3679
3680 let old_path = old_file.abs_path(cx);
3681 if new_file.abs_path(cx) != old_path {
3682 renamed_buffers.push((cx.handle(), old_path));
3683 }
3684
3685 if let Some(project_id) = self.remote_id() {
3686 self.client
3687 .send(proto::UpdateBufferFile {
3688 project_id,
3689 buffer_id: *buffer_id as u64,
3690 file: Some(new_file.to_proto()),
3691 })
3692 .log_err();
3693 }
3694 buffer.file_updated(Box::new(new_file), cx).detach();
3695 }
3696 });
3697 } else {
3698 buffers_to_delete.push(*buffer_id);
3699 }
3700 }
3701
3702 for buffer_id in buffers_to_delete {
3703 self.opened_buffers.remove(&buffer_id);
3704 }
3705
3706 for (buffer, old_path) in renamed_buffers {
3707 self.unregister_buffer_from_language_server(&buffer, old_path, cx);
3708 self.assign_language_to_buffer(&buffer, cx);
3709 self.register_buffer_with_language_server(&buffer, cx);
3710 }
3711 }
3712
3713 pub fn set_active_path(&mut self, entry: Option<ProjectPath>, cx: &mut ModelContext<Self>) {
3714 let new_active_entry = entry.and_then(|project_path| {
3715 let worktree = self.worktree_for_id(project_path.worktree_id, cx)?;
3716 let entry = worktree.read(cx).entry_for_path(project_path.path)?;
3717 Some(entry.id)
3718 });
3719 if new_active_entry != self.active_entry {
3720 self.active_entry = new_active_entry;
3721 cx.emit(Event::ActiveEntryChanged(new_active_entry));
3722 }
3723 }
3724
3725 pub fn is_running_disk_based_diagnostics(&self) -> bool {
3726 self.language_server_statuses
3727 .values()
3728 .any(|status| status.pending_diagnostic_updates > 0)
3729 }
3730
3731 pub fn diagnostic_summary(&self, cx: &AppContext) -> DiagnosticSummary {
3732 let mut summary = DiagnosticSummary::default();
3733 for (_, path_summary) in self.diagnostic_summaries(cx) {
3734 summary.error_count += path_summary.error_count;
3735 summary.warning_count += path_summary.warning_count;
3736 }
3737 summary
3738 }
3739
3740 pub fn diagnostic_summaries<'a>(
3741 &'a self,
3742 cx: &'a AppContext,
3743 ) -> impl Iterator<Item = (ProjectPath, DiagnosticSummary)> + 'a {
3744 self.worktrees(cx).flat_map(move |worktree| {
3745 let worktree = worktree.read(cx);
3746 let worktree_id = worktree.id();
3747 worktree
3748 .diagnostic_summaries()
3749 .map(move |(path, summary)| (ProjectPath { worktree_id, path }, summary))
3750 })
3751 }
3752
3753 pub fn disk_based_diagnostics_started(&mut self, cx: &mut ModelContext<Self>) {
3754 if self
3755 .language_server_statuses
3756 .values()
3757 .map(|status| status.pending_diagnostic_updates)
3758 .sum::<isize>()
3759 == 1
3760 {
3761 cx.emit(Event::DiskBasedDiagnosticsStarted);
3762 }
3763 }
3764
3765 pub fn disk_based_diagnostics_finished(&mut self, cx: &mut ModelContext<Self>) {
3766 cx.emit(Event::DiskBasedDiagnosticsUpdated);
3767 if self
3768 .language_server_statuses
3769 .values()
3770 .map(|status| status.pending_diagnostic_updates)
3771 .sum::<isize>()
3772 == 0
3773 {
3774 cx.emit(Event::DiskBasedDiagnosticsFinished);
3775 }
3776 }
3777
3778 pub fn active_entry(&self) -> Option<ProjectEntryId> {
3779 self.active_entry
3780 }
3781
3782 pub fn entry_for_path(&self, path: &ProjectPath, cx: &AppContext) -> Option<ProjectEntryId> {
3783 self.worktree_for_id(path.worktree_id, cx)?
3784 .read(cx)
3785 .entry_for_path(&path.path)
3786 .map(|entry| entry.id)
3787 }
3788
3789 pub fn path_for_entry(&self, entry_id: ProjectEntryId, cx: &AppContext) -> Option<ProjectPath> {
3790 let worktree = self.worktree_for_entry(entry_id, cx)?;
3791 let worktree = worktree.read(cx);
3792 let worktree_id = worktree.id();
3793 let path = worktree.entry_for_id(entry_id)?.path.clone();
3794 Some(ProjectPath { worktree_id, path })
3795 }
3796
3797 // RPC message handlers
3798
3799 async fn handle_request_join_project(
3800 this: ModelHandle<Self>,
3801 message: TypedEnvelope<proto::RequestJoinProject>,
3802 _: Arc<Client>,
3803 mut cx: AsyncAppContext,
3804 ) -> Result<()> {
3805 let user_id = message.payload.requester_id;
3806 let user_store = this.read_with(&cx, |this, _| this.user_store.clone());
3807 let user = user_store
3808 .update(&mut cx, |store, cx| store.fetch_user(user_id, cx))
3809 .await?;
3810 this.update(&mut cx, |_, cx| cx.emit(Event::ContactRequestedJoin(user)));
3811 Ok(())
3812 }
3813
3814 async fn handle_unregister_project(
3815 this: ModelHandle<Self>,
3816 _: TypedEnvelope<proto::UnregisterProject>,
3817 _: Arc<Client>,
3818 mut cx: AsyncAppContext,
3819 ) -> Result<()> {
3820 this.update(&mut cx, |this, cx| this.removed_from_project(cx));
3821 Ok(())
3822 }
3823
3824 async fn handle_project_unshared(
3825 this: ModelHandle<Self>,
3826 _: TypedEnvelope<proto::ProjectUnshared>,
3827 _: Arc<Client>,
3828 mut cx: AsyncAppContext,
3829 ) -> Result<()> {
3830 this.update(&mut cx, |this, cx| this.unshared(cx));
3831 Ok(())
3832 }
3833
3834 async fn handle_add_collaborator(
3835 this: ModelHandle<Self>,
3836 mut envelope: TypedEnvelope<proto::AddProjectCollaborator>,
3837 _: Arc<Client>,
3838 mut cx: AsyncAppContext,
3839 ) -> Result<()> {
3840 let user_store = this.read_with(&cx, |this, _| this.user_store.clone());
3841 let collaborator = envelope
3842 .payload
3843 .collaborator
3844 .take()
3845 .ok_or_else(|| anyhow!("empty collaborator"))?;
3846
3847 let collaborator = Collaborator::from_proto(collaborator, &user_store, &mut cx).await?;
3848 this.update(&mut cx, |this, cx| {
3849 this.collaborators
3850 .insert(collaborator.peer_id, collaborator);
3851 cx.notify();
3852 });
3853
3854 Ok(())
3855 }
3856
3857 async fn handle_remove_collaborator(
3858 this: ModelHandle<Self>,
3859 envelope: TypedEnvelope<proto::RemoveProjectCollaborator>,
3860 _: Arc<Client>,
3861 mut cx: AsyncAppContext,
3862 ) -> Result<()> {
3863 this.update(&mut cx, |this, cx| {
3864 let peer_id = PeerId(envelope.payload.peer_id);
3865 let replica_id = this
3866 .collaborators
3867 .remove(&peer_id)
3868 .ok_or_else(|| anyhow!("unknown peer {:?}", peer_id))?
3869 .replica_id;
3870 for (_, buffer) in &this.opened_buffers {
3871 if let Some(buffer) = buffer.upgrade(cx) {
3872 buffer.update(cx, |buffer, cx| buffer.remove_peer(replica_id, cx));
3873 }
3874 }
3875
3876 cx.emit(Event::CollaboratorLeft(peer_id));
3877 cx.notify();
3878 Ok(())
3879 })
3880 }
3881
3882 async fn handle_register_worktree(
3883 this: ModelHandle<Self>,
3884 envelope: TypedEnvelope<proto::RegisterWorktree>,
3885 client: Arc<Client>,
3886 mut cx: AsyncAppContext,
3887 ) -> Result<()> {
3888 this.update(&mut cx, |this, cx| {
3889 let remote_id = this.remote_id().ok_or_else(|| anyhow!("invalid project"))?;
3890 let replica_id = this.replica_id();
3891 let worktree = proto::Worktree {
3892 id: envelope.payload.worktree_id,
3893 root_name: envelope.payload.root_name,
3894 entries: Default::default(),
3895 diagnostic_summaries: Default::default(),
3896 visible: envelope.payload.visible,
3897 scan_id: 0,
3898 };
3899 let (worktree, load_task) =
3900 Worktree::remote(remote_id, replica_id, worktree, client, cx);
3901 this.add_worktree(&worktree, cx);
3902 load_task.detach();
3903 Ok(())
3904 })
3905 }
3906
3907 async fn handle_unregister_worktree(
3908 this: ModelHandle<Self>,
3909 envelope: TypedEnvelope<proto::UnregisterWorktree>,
3910 _: Arc<Client>,
3911 mut cx: AsyncAppContext,
3912 ) -> Result<()> {
3913 this.update(&mut cx, |this, cx| {
3914 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
3915 this.remove_worktree(worktree_id, cx);
3916 Ok(())
3917 })
3918 }
3919
3920 async fn handle_update_worktree(
3921 this: ModelHandle<Self>,
3922 envelope: TypedEnvelope<proto::UpdateWorktree>,
3923 _: Arc<Client>,
3924 mut cx: AsyncAppContext,
3925 ) -> Result<()> {
3926 this.update(&mut cx, |this, cx| {
3927 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
3928 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
3929 worktree.update(cx, |worktree, _| {
3930 let worktree = worktree.as_remote_mut().unwrap();
3931 worktree.update_from_remote(envelope)
3932 })?;
3933 }
3934 Ok(())
3935 })
3936 }
3937
3938 async fn handle_create_project_entry(
3939 this: ModelHandle<Self>,
3940 envelope: TypedEnvelope<proto::CreateProjectEntry>,
3941 _: Arc<Client>,
3942 mut cx: AsyncAppContext,
3943 ) -> Result<proto::ProjectEntryResponse> {
3944 let worktree = this.update(&mut cx, |this, cx| {
3945 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
3946 this.worktree_for_id(worktree_id, cx)
3947 .ok_or_else(|| anyhow!("worktree not found"))
3948 })?;
3949 let worktree_scan_id = worktree.read_with(&cx, |worktree, _| worktree.scan_id());
3950 let entry = worktree
3951 .update(&mut cx, |worktree, cx| {
3952 let worktree = worktree.as_local_mut().unwrap();
3953 let path = PathBuf::from(OsString::from_vec(envelope.payload.path));
3954 worktree.create_entry(path, envelope.payload.is_directory, cx)
3955 })
3956 .await?;
3957 Ok(proto::ProjectEntryResponse {
3958 entry: Some((&entry).into()),
3959 worktree_scan_id: worktree_scan_id as u64,
3960 })
3961 }
3962
3963 async fn handle_rename_project_entry(
3964 this: ModelHandle<Self>,
3965 envelope: TypedEnvelope<proto::RenameProjectEntry>,
3966 _: Arc<Client>,
3967 mut cx: AsyncAppContext,
3968 ) -> Result<proto::ProjectEntryResponse> {
3969 let entry_id = ProjectEntryId::from_proto(envelope.payload.entry_id);
3970 let worktree = this.read_with(&cx, |this, cx| {
3971 this.worktree_for_entry(entry_id, cx)
3972 .ok_or_else(|| anyhow!("worktree not found"))
3973 })?;
3974 let worktree_scan_id = worktree.read_with(&cx, |worktree, _| worktree.scan_id());
3975 let entry = worktree
3976 .update(&mut cx, |worktree, cx| {
3977 let new_path = PathBuf::from(OsString::from_vec(envelope.payload.new_path));
3978 worktree
3979 .as_local_mut()
3980 .unwrap()
3981 .rename_entry(entry_id, new_path, cx)
3982 .ok_or_else(|| anyhow!("invalid entry"))
3983 })?
3984 .await?;
3985 Ok(proto::ProjectEntryResponse {
3986 entry: Some((&entry).into()),
3987 worktree_scan_id: worktree_scan_id as u64,
3988 })
3989 }
3990
3991 async fn handle_delete_project_entry(
3992 this: ModelHandle<Self>,
3993 envelope: TypedEnvelope<proto::DeleteProjectEntry>,
3994 _: Arc<Client>,
3995 mut cx: AsyncAppContext,
3996 ) -> Result<proto::ProjectEntryResponse> {
3997 let entry_id = ProjectEntryId::from_proto(envelope.payload.entry_id);
3998 let worktree = this.read_with(&cx, |this, cx| {
3999 this.worktree_for_entry(entry_id, cx)
4000 .ok_or_else(|| anyhow!("worktree not found"))
4001 })?;
4002 let worktree_scan_id = worktree.read_with(&cx, |worktree, _| worktree.scan_id());
4003 worktree
4004 .update(&mut cx, |worktree, cx| {
4005 worktree
4006 .as_local_mut()
4007 .unwrap()
4008 .delete_entry(entry_id, cx)
4009 .ok_or_else(|| anyhow!("invalid entry"))
4010 })?
4011 .await?;
4012 Ok(proto::ProjectEntryResponse {
4013 entry: None,
4014 worktree_scan_id: worktree_scan_id as u64,
4015 })
4016 }
4017
4018 async fn handle_update_diagnostic_summary(
4019 this: ModelHandle<Self>,
4020 envelope: TypedEnvelope<proto::UpdateDiagnosticSummary>,
4021 _: Arc<Client>,
4022 mut cx: AsyncAppContext,
4023 ) -> Result<()> {
4024 this.update(&mut cx, |this, cx| {
4025 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
4026 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
4027 if let Some(summary) = envelope.payload.summary {
4028 let project_path = ProjectPath {
4029 worktree_id,
4030 path: Path::new(&summary.path).into(),
4031 };
4032 worktree.update(cx, |worktree, _| {
4033 worktree
4034 .as_remote_mut()
4035 .unwrap()
4036 .update_diagnostic_summary(project_path.path.clone(), &summary);
4037 });
4038 cx.emit(Event::DiagnosticsUpdated(project_path));
4039 }
4040 }
4041 Ok(())
4042 })
4043 }
4044
4045 async fn handle_start_language_server(
4046 this: ModelHandle<Self>,
4047 envelope: TypedEnvelope<proto::StartLanguageServer>,
4048 _: Arc<Client>,
4049 mut cx: AsyncAppContext,
4050 ) -> Result<()> {
4051 let server = envelope
4052 .payload
4053 .server
4054 .ok_or_else(|| anyhow!("invalid server"))?;
4055 this.update(&mut cx, |this, cx| {
4056 this.language_server_statuses.insert(
4057 server.id as usize,
4058 LanguageServerStatus {
4059 name: server.name,
4060 pending_work: Default::default(),
4061 pending_diagnostic_updates: 0,
4062 },
4063 );
4064 cx.notify();
4065 });
4066 Ok(())
4067 }
4068
4069 async fn handle_update_language_server(
4070 this: ModelHandle<Self>,
4071 envelope: TypedEnvelope<proto::UpdateLanguageServer>,
4072 _: Arc<Client>,
4073 mut cx: AsyncAppContext,
4074 ) -> Result<()> {
4075 let language_server_id = envelope.payload.language_server_id as usize;
4076 match envelope
4077 .payload
4078 .variant
4079 .ok_or_else(|| anyhow!("invalid variant"))?
4080 {
4081 proto::update_language_server::Variant::WorkStart(payload) => {
4082 this.update(&mut cx, |this, cx| {
4083 this.on_lsp_work_start(language_server_id, payload.token, cx);
4084 })
4085 }
4086 proto::update_language_server::Variant::WorkProgress(payload) => {
4087 this.update(&mut cx, |this, cx| {
4088 this.on_lsp_work_progress(
4089 language_server_id,
4090 payload.token,
4091 LanguageServerProgress {
4092 message: payload.message,
4093 percentage: payload.percentage.map(|p| p as usize),
4094 last_update_at: Instant::now(),
4095 },
4096 cx,
4097 );
4098 })
4099 }
4100 proto::update_language_server::Variant::WorkEnd(payload) => {
4101 this.update(&mut cx, |this, cx| {
4102 this.on_lsp_work_end(language_server_id, payload.token, cx);
4103 })
4104 }
4105 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(_) => {
4106 this.update(&mut cx, |this, cx| {
4107 this.disk_based_diagnostics_started(cx);
4108 })
4109 }
4110 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(_) => {
4111 this.update(&mut cx, |this, cx| this.disk_based_diagnostics_finished(cx));
4112 }
4113 }
4114
4115 Ok(())
4116 }
4117
4118 async fn handle_update_buffer(
4119 this: ModelHandle<Self>,
4120 envelope: TypedEnvelope<proto::UpdateBuffer>,
4121 _: Arc<Client>,
4122 mut cx: AsyncAppContext,
4123 ) -> Result<()> {
4124 this.update(&mut cx, |this, cx| {
4125 let payload = envelope.payload.clone();
4126 let buffer_id = payload.buffer_id;
4127 let ops = payload
4128 .operations
4129 .into_iter()
4130 .map(|op| language::proto::deserialize_operation(op))
4131 .collect::<Result<Vec<_>, _>>()?;
4132 let is_remote = this.is_remote();
4133 match this.opened_buffers.entry(buffer_id) {
4134 hash_map::Entry::Occupied(mut e) => match e.get_mut() {
4135 OpenBuffer::Strong(buffer) => {
4136 buffer.update(cx, |buffer, cx| buffer.apply_ops(ops, cx))?;
4137 }
4138 OpenBuffer::Loading(operations) => operations.extend_from_slice(&ops),
4139 OpenBuffer::Weak(_) => {}
4140 },
4141 hash_map::Entry::Vacant(e) => {
4142 assert!(
4143 is_remote,
4144 "received buffer update from {:?}",
4145 envelope.original_sender_id
4146 );
4147 e.insert(OpenBuffer::Loading(ops));
4148 }
4149 }
4150 Ok(())
4151 })
4152 }
4153
4154 async fn handle_update_buffer_file(
4155 this: ModelHandle<Self>,
4156 envelope: TypedEnvelope<proto::UpdateBufferFile>,
4157 _: Arc<Client>,
4158 mut cx: AsyncAppContext,
4159 ) -> Result<()> {
4160 this.update(&mut cx, |this, cx| {
4161 let payload = envelope.payload.clone();
4162 let buffer_id = payload.buffer_id;
4163 let file = payload.file.ok_or_else(|| anyhow!("invalid file"))?;
4164 let worktree = this
4165 .worktree_for_id(WorktreeId::from_proto(file.worktree_id), cx)
4166 .ok_or_else(|| anyhow!("no such worktree"))?;
4167 let file = File::from_proto(file, worktree.clone(), cx)?;
4168 let buffer = this
4169 .opened_buffers
4170 .get_mut(&buffer_id)
4171 .and_then(|b| b.upgrade(cx))
4172 .ok_or_else(|| anyhow!("no such buffer"))?;
4173 buffer.update(cx, |buffer, cx| {
4174 buffer.file_updated(Box::new(file), cx).detach();
4175 });
4176 Ok(())
4177 })
4178 }
4179
4180 async fn handle_save_buffer(
4181 this: ModelHandle<Self>,
4182 envelope: TypedEnvelope<proto::SaveBuffer>,
4183 _: Arc<Client>,
4184 mut cx: AsyncAppContext,
4185 ) -> Result<proto::BufferSaved> {
4186 let buffer_id = envelope.payload.buffer_id;
4187 let requested_version = deserialize_version(envelope.payload.version);
4188
4189 let (project_id, buffer) = this.update(&mut cx, |this, cx| {
4190 let project_id = this.remote_id().ok_or_else(|| anyhow!("not connected"))?;
4191 let buffer = this
4192 .opened_buffers
4193 .get(&buffer_id)
4194 .and_then(|buffer| buffer.upgrade(cx))
4195 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?;
4196 Ok::<_, anyhow::Error>((project_id, buffer))
4197 })?;
4198 buffer
4199 .update(&mut cx, |buffer, _| {
4200 buffer.wait_for_version(requested_version)
4201 })
4202 .await;
4203
4204 let (saved_version, mtime) = buffer.update(&mut cx, |buffer, cx| buffer.save(cx)).await?;
4205 Ok(proto::BufferSaved {
4206 project_id,
4207 buffer_id,
4208 version: serialize_version(&saved_version),
4209 mtime: Some(mtime.into()),
4210 })
4211 }
4212
4213 async fn handle_reload_buffers(
4214 this: ModelHandle<Self>,
4215 envelope: TypedEnvelope<proto::ReloadBuffers>,
4216 _: Arc<Client>,
4217 mut cx: AsyncAppContext,
4218 ) -> Result<proto::ReloadBuffersResponse> {
4219 let sender_id = envelope.original_sender_id()?;
4220 let reload = this.update(&mut cx, |this, cx| {
4221 let mut buffers = HashSet::default();
4222 for buffer_id in &envelope.payload.buffer_ids {
4223 buffers.insert(
4224 this.opened_buffers
4225 .get(buffer_id)
4226 .and_then(|buffer| buffer.upgrade(cx))
4227 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?,
4228 );
4229 }
4230 Ok::<_, anyhow::Error>(this.reload_buffers(buffers, false, cx))
4231 })?;
4232
4233 let project_transaction = reload.await?;
4234 let project_transaction = this.update(&mut cx, |this, cx| {
4235 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
4236 });
4237 Ok(proto::ReloadBuffersResponse {
4238 transaction: Some(project_transaction),
4239 })
4240 }
4241
4242 async fn handle_format_buffers(
4243 this: ModelHandle<Self>,
4244 envelope: TypedEnvelope<proto::FormatBuffers>,
4245 _: Arc<Client>,
4246 mut cx: AsyncAppContext,
4247 ) -> Result<proto::FormatBuffersResponse> {
4248 let sender_id = envelope.original_sender_id()?;
4249 let format = this.update(&mut cx, |this, cx| {
4250 let mut buffers = HashSet::default();
4251 for buffer_id in &envelope.payload.buffer_ids {
4252 buffers.insert(
4253 this.opened_buffers
4254 .get(buffer_id)
4255 .and_then(|buffer| buffer.upgrade(cx))
4256 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?,
4257 );
4258 }
4259 Ok::<_, anyhow::Error>(this.format(buffers, false, cx))
4260 })?;
4261
4262 let project_transaction = format.await?;
4263 let project_transaction = this.update(&mut cx, |this, cx| {
4264 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
4265 });
4266 Ok(proto::FormatBuffersResponse {
4267 transaction: Some(project_transaction),
4268 })
4269 }
4270
4271 async fn handle_get_completions(
4272 this: ModelHandle<Self>,
4273 envelope: TypedEnvelope<proto::GetCompletions>,
4274 _: Arc<Client>,
4275 mut cx: AsyncAppContext,
4276 ) -> Result<proto::GetCompletionsResponse> {
4277 let position = envelope
4278 .payload
4279 .position
4280 .and_then(language::proto::deserialize_anchor)
4281 .ok_or_else(|| anyhow!("invalid position"))?;
4282 let version = deserialize_version(envelope.payload.version);
4283 let buffer = this.read_with(&cx, |this, cx| {
4284 this.opened_buffers
4285 .get(&envelope.payload.buffer_id)
4286 .and_then(|buffer| buffer.upgrade(cx))
4287 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
4288 })?;
4289 buffer
4290 .update(&mut cx, |buffer, _| buffer.wait_for_version(version))
4291 .await;
4292 let version = buffer.read_with(&cx, |buffer, _| buffer.version());
4293 let completions = this
4294 .update(&mut cx, |this, cx| this.completions(&buffer, position, cx))
4295 .await?;
4296
4297 Ok(proto::GetCompletionsResponse {
4298 completions: completions
4299 .iter()
4300 .map(language::proto::serialize_completion)
4301 .collect(),
4302 version: serialize_version(&version),
4303 })
4304 }
4305
4306 async fn handle_apply_additional_edits_for_completion(
4307 this: ModelHandle<Self>,
4308 envelope: TypedEnvelope<proto::ApplyCompletionAdditionalEdits>,
4309 _: Arc<Client>,
4310 mut cx: AsyncAppContext,
4311 ) -> Result<proto::ApplyCompletionAdditionalEditsResponse> {
4312 let apply_additional_edits = this.update(&mut cx, |this, cx| {
4313 let buffer = this
4314 .opened_buffers
4315 .get(&envelope.payload.buffer_id)
4316 .and_then(|buffer| buffer.upgrade(cx))
4317 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
4318 let language = buffer.read(cx).language();
4319 let completion = language::proto::deserialize_completion(
4320 envelope
4321 .payload
4322 .completion
4323 .ok_or_else(|| anyhow!("invalid completion"))?,
4324 language,
4325 )?;
4326 Ok::<_, anyhow::Error>(
4327 this.apply_additional_edits_for_completion(buffer, completion, false, cx),
4328 )
4329 })?;
4330
4331 Ok(proto::ApplyCompletionAdditionalEditsResponse {
4332 transaction: apply_additional_edits
4333 .await?
4334 .as_ref()
4335 .map(language::proto::serialize_transaction),
4336 })
4337 }
4338
4339 async fn handle_get_code_actions(
4340 this: ModelHandle<Self>,
4341 envelope: TypedEnvelope<proto::GetCodeActions>,
4342 _: Arc<Client>,
4343 mut cx: AsyncAppContext,
4344 ) -> Result<proto::GetCodeActionsResponse> {
4345 let start = envelope
4346 .payload
4347 .start
4348 .and_then(language::proto::deserialize_anchor)
4349 .ok_or_else(|| anyhow!("invalid start"))?;
4350 let end = envelope
4351 .payload
4352 .end
4353 .and_then(language::proto::deserialize_anchor)
4354 .ok_or_else(|| anyhow!("invalid end"))?;
4355 let buffer = this.update(&mut cx, |this, cx| {
4356 this.opened_buffers
4357 .get(&envelope.payload.buffer_id)
4358 .and_then(|buffer| buffer.upgrade(cx))
4359 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
4360 })?;
4361 buffer
4362 .update(&mut cx, |buffer, _| {
4363 buffer.wait_for_version(deserialize_version(envelope.payload.version))
4364 })
4365 .await;
4366
4367 let version = buffer.read_with(&cx, |buffer, _| buffer.version());
4368 let code_actions = this.update(&mut cx, |this, cx| {
4369 Ok::<_, anyhow::Error>(this.code_actions(&buffer, start..end, cx))
4370 })?;
4371
4372 Ok(proto::GetCodeActionsResponse {
4373 actions: code_actions
4374 .await?
4375 .iter()
4376 .map(language::proto::serialize_code_action)
4377 .collect(),
4378 version: serialize_version(&version),
4379 })
4380 }
4381
4382 async fn handle_apply_code_action(
4383 this: ModelHandle<Self>,
4384 envelope: TypedEnvelope<proto::ApplyCodeAction>,
4385 _: Arc<Client>,
4386 mut cx: AsyncAppContext,
4387 ) -> Result<proto::ApplyCodeActionResponse> {
4388 let sender_id = envelope.original_sender_id()?;
4389 let action = language::proto::deserialize_code_action(
4390 envelope
4391 .payload
4392 .action
4393 .ok_or_else(|| anyhow!("invalid action"))?,
4394 )?;
4395 let apply_code_action = this.update(&mut cx, |this, cx| {
4396 let buffer = this
4397 .opened_buffers
4398 .get(&envelope.payload.buffer_id)
4399 .and_then(|buffer| buffer.upgrade(cx))
4400 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
4401 Ok::<_, anyhow::Error>(this.apply_code_action(buffer, action, false, cx))
4402 })?;
4403
4404 let project_transaction = apply_code_action.await?;
4405 let project_transaction = this.update(&mut cx, |this, cx| {
4406 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
4407 });
4408 Ok(proto::ApplyCodeActionResponse {
4409 transaction: Some(project_transaction),
4410 })
4411 }
4412
4413 async fn handle_lsp_command<T: LspCommand>(
4414 this: ModelHandle<Self>,
4415 envelope: TypedEnvelope<T::ProtoRequest>,
4416 _: Arc<Client>,
4417 mut cx: AsyncAppContext,
4418 ) -> Result<<T::ProtoRequest as proto::RequestMessage>::Response>
4419 where
4420 <T::LspRequest as lsp::request::Request>::Result: Send,
4421 {
4422 let sender_id = envelope.original_sender_id()?;
4423 let buffer_id = T::buffer_id_from_proto(&envelope.payload);
4424 let buffer_handle = this.read_with(&cx, |this, _| {
4425 this.opened_buffers
4426 .get(&buffer_id)
4427 .and_then(|buffer| buffer.upgrade(&cx))
4428 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))
4429 })?;
4430 let request = T::from_proto(
4431 envelope.payload,
4432 this.clone(),
4433 buffer_handle.clone(),
4434 cx.clone(),
4435 )
4436 .await?;
4437 let buffer_version = buffer_handle.read_with(&cx, |buffer, _| buffer.version());
4438 let response = this
4439 .update(&mut cx, |this, cx| {
4440 this.request_lsp(buffer_handle, request, cx)
4441 })
4442 .await?;
4443 this.update(&mut cx, |this, cx| {
4444 Ok(T::response_to_proto(
4445 response,
4446 this,
4447 sender_id,
4448 &buffer_version,
4449 cx,
4450 ))
4451 })
4452 }
4453
4454 async fn handle_get_project_symbols(
4455 this: ModelHandle<Self>,
4456 envelope: TypedEnvelope<proto::GetProjectSymbols>,
4457 _: Arc<Client>,
4458 mut cx: AsyncAppContext,
4459 ) -> Result<proto::GetProjectSymbolsResponse> {
4460 let symbols = this
4461 .update(&mut cx, |this, cx| {
4462 this.symbols(&envelope.payload.query, cx)
4463 })
4464 .await?;
4465
4466 Ok(proto::GetProjectSymbolsResponse {
4467 symbols: symbols.iter().map(serialize_symbol).collect(),
4468 })
4469 }
4470
4471 async fn handle_search_project(
4472 this: ModelHandle<Self>,
4473 envelope: TypedEnvelope<proto::SearchProject>,
4474 _: Arc<Client>,
4475 mut cx: AsyncAppContext,
4476 ) -> Result<proto::SearchProjectResponse> {
4477 let peer_id = envelope.original_sender_id()?;
4478 let query = SearchQuery::from_proto(envelope.payload)?;
4479 let result = this
4480 .update(&mut cx, |this, cx| this.search(query, cx))
4481 .await?;
4482
4483 this.update(&mut cx, |this, cx| {
4484 let mut locations = Vec::new();
4485 for (buffer, ranges) in result {
4486 for range in ranges {
4487 let start = serialize_anchor(&range.start);
4488 let end = serialize_anchor(&range.end);
4489 let buffer = this.serialize_buffer_for_peer(&buffer, peer_id, cx);
4490 locations.push(proto::Location {
4491 buffer: Some(buffer),
4492 start: Some(start),
4493 end: Some(end),
4494 });
4495 }
4496 }
4497 Ok(proto::SearchProjectResponse { locations })
4498 })
4499 }
4500
4501 async fn handle_open_buffer_for_symbol(
4502 this: ModelHandle<Self>,
4503 envelope: TypedEnvelope<proto::OpenBufferForSymbol>,
4504 _: Arc<Client>,
4505 mut cx: AsyncAppContext,
4506 ) -> Result<proto::OpenBufferForSymbolResponse> {
4507 let peer_id = envelope.original_sender_id()?;
4508 let symbol = envelope
4509 .payload
4510 .symbol
4511 .ok_or_else(|| anyhow!("invalid symbol"))?;
4512 let symbol = this.read_with(&cx, |this, _| {
4513 let symbol = this.deserialize_symbol(symbol)?;
4514 let signature = this.symbol_signature(symbol.worktree_id, &symbol.path);
4515 if signature == symbol.signature {
4516 Ok(symbol)
4517 } else {
4518 Err(anyhow!("invalid symbol signature"))
4519 }
4520 })?;
4521 let buffer = this
4522 .update(&mut cx, |this, cx| this.open_buffer_for_symbol(&symbol, cx))
4523 .await?;
4524
4525 Ok(proto::OpenBufferForSymbolResponse {
4526 buffer: Some(this.update(&mut cx, |this, cx| {
4527 this.serialize_buffer_for_peer(&buffer, peer_id, cx)
4528 })),
4529 })
4530 }
4531
4532 fn symbol_signature(&self, worktree_id: WorktreeId, path: &Path) -> [u8; 32] {
4533 let mut hasher = Sha256::new();
4534 hasher.update(worktree_id.to_proto().to_be_bytes());
4535 hasher.update(path.to_string_lossy().as_bytes());
4536 hasher.update(self.nonce.to_be_bytes());
4537 hasher.finalize().as_slice().try_into().unwrap()
4538 }
4539
4540 async fn handle_open_buffer_by_id(
4541 this: ModelHandle<Self>,
4542 envelope: TypedEnvelope<proto::OpenBufferById>,
4543 _: Arc<Client>,
4544 mut cx: AsyncAppContext,
4545 ) -> Result<proto::OpenBufferResponse> {
4546 let peer_id = envelope.original_sender_id()?;
4547 let buffer = this
4548 .update(&mut cx, |this, cx| {
4549 this.open_buffer_by_id(envelope.payload.id, cx)
4550 })
4551 .await?;
4552 this.update(&mut cx, |this, cx| {
4553 Ok(proto::OpenBufferResponse {
4554 buffer: Some(this.serialize_buffer_for_peer(&buffer, peer_id, cx)),
4555 })
4556 })
4557 }
4558
4559 async fn handle_open_buffer_by_path(
4560 this: ModelHandle<Self>,
4561 envelope: TypedEnvelope<proto::OpenBufferByPath>,
4562 _: Arc<Client>,
4563 mut cx: AsyncAppContext,
4564 ) -> Result<proto::OpenBufferResponse> {
4565 let peer_id = envelope.original_sender_id()?;
4566 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
4567 let open_buffer = this.update(&mut cx, |this, cx| {
4568 this.open_buffer(
4569 ProjectPath {
4570 worktree_id,
4571 path: PathBuf::from(envelope.payload.path).into(),
4572 },
4573 cx,
4574 )
4575 });
4576
4577 let buffer = open_buffer.await?;
4578 this.update(&mut cx, |this, cx| {
4579 Ok(proto::OpenBufferResponse {
4580 buffer: Some(this.serialize_buffer_for_peer(&buffer, peer_id, cx)),
4581 })
4582 })
4583 }
4584
4585 fn serialize_project_transaction_for_peer(
4586 &mut self,
4587 project_transaction: ProjectTransaction,
4588 peer_id: PeerId,
4589 cx: &AppContext,
4590 ) -> proto::ProjectTransaction {
4591 let mut serialized_transaction = proto::ProjectTransaction {
4592 buffers: Default::default(),
4593 transactions: Default::default(),
4594 };
4595 for (buffer, transaction) in project_transaction.0 {
4596 serialized_transaction
4597 .buffers
4598 .push(self.serialize_buffer_for_peer(&buffer, peer_id, cx));
4599 serialized_transaction
4600 .transactions
4601 .push(language::proto::serialize_transaction(&transaction));
4602 }
4603 serialized_transaction
4604 }
4605
4606 fn deserialize_project_transaction(
4607 &mut self,
4608 message: proto::ProjectTransaction,
4609 push_to_history: bool,
4610 cx: &mut ModelContext<Self>,
4611 ) -> Task<Result<ProjectTransaction>> {
4612 cx.spawn(|this, mut cx| async move {
4613 let mut project_transaction = ProjectTransaction::default();
4614 for (buffer, transaction) in message.buffers.into_iter().zip(message.transactions) {
4615 let buffer = this
4616 .update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
4617 .await?;
4618 let transaction = language::proto::deserialize_transaction(transaction)?;
4619 project_transaction.0.insert(buffer, transaction);
4620 }
4621
4622 for (buffer, transaction) in &project_transaction.0 {
4623 buffer
4624 .update(&mut cx, |buffer, _| {
4625 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
4626 })
4627 .await;
4628
4629 if push_to_history {
4630 buffer.update(&mut cx, |buffer, _| {
4631 buffer.push_transaction(transaction.clone(), Instant::now());
4632 });
4633 }
4634 }
4635
4636 Ok(project_transaction)
4637 })
4638 }
4639
4640 fn serialize_buffer_for_peer(
4641 &mut self,
4642 buffer: &ModelHandle<Buffer>,
4643 peer_id: PeerId,
4644 cx: &AppContext,
4645 ) -> proto::Buffer {
4646 let buffer_id = buffer.read(cx).remote_id();
4647 let shared_buffers = self.shared_buffers.entry(peer_id).or_default();
4648 if shared_buffers.insert(buffer_id) {
4649 proto::Buffer {
4650 variant: Some(proto::buffer::Variant::State(buffer.read(cx).to_proto())),
4651 }
4652 } else {
4653 proto::Buffer {
4654 variant: Some(proto::buffer::Variant::Id(buffer_id)),
4655 }
4656 }
4657 }
4658
4659 fn deserialize_buffer(
4660 &mut self,
4661 buffer: proto::Buffer,
4662 cx: &mut ModelContext<Self>,
4663 ) -> Task<Result<ModelHandle<Buffer>>> {
4664 let replica_id = self.replica_id();
4665
4666 let opened_buffer_tx = self.opened_buffer.0.clone();
4667 let mut opened_buffer_rx = self.opened_buffer.1.clone();
4668 cx.spawn(|this, mut cx| async move {
4669 match buffer.variant.ok_or_else(|| anyhow!("missing buffer"))? {
4670 proto::buffer::Variant::Id(id) => {
4671 let buffer = loop {
4672 let buffer = this.read_with(&cx, |this, cx| {
4673 this.opened_buffers
4674 .get(&id)
4675 .and_then(|buffer| buffer.upgrade(cx))
4676 });
4677 if let Some(buffer) = buffer {
4678 break buffer;
4679 }
4680 opened_buffer_rx
4681 .next()
4682 .await
4683 .ok_or_else(|| anyhow!("project dropped while waiting for buffer"))?;
4684 };
4685 Ok(buffer)
4686 }
4687 proto::buffer::Variant::State(mut buffer) => {
4688 let mut buffer_worktree = None;
4689 let mut buffer_file = None;
4690 if let Some(file) = buffer.file.take() {
4691 this.read_with(&cx, |this, cx| {
4692 let worktree_id = WorktreeId::from_proto(file.worktree_id);
4693 let worktree =
4694 this.worktree_for_id(worktree_id, cx).ok_or_else(|| {
4695 anyhow!("no worktree found for id {}", file.worktree_id)
4696 })?;
4697 buffer_file =
4698 Some(Box::new(File::from_proto(file, worktree.clone(), cx)?)
4699 as Box<dyn language::File>);
4700 buffer_worktree = Some(worktree);
4701 Ok::<_, anyhow::Error>(())
4702 })?;
4703 }
4704
4705 let buffer = cx.add_model(|cx| {
4706 Buffer::from_proto(replica_id, buffer, buffer_file, cx).unwrap()
4707 });
4708
4709 this.update(&mut cx, |this, cx| this.register_buffer(&buffer, cx))?;
4710
4711 *opened_buffer_tx.borrow_mut().borrow_mut() = ();
4712 Ok(buffer)
4713 }
4714 }
4715 })
4716 }
4717
4718 fn deserialize_symbol(&self, serialized_symbol: proto::Symbol) -> Result<Symbol> {
4719 let source_worktree_id = WorktreeId::from_proto(serialized_symbol.source_worktree_id);
4720 let worktree_id = WorktreeId::from_proto(serialized_symbol.worktree_id);
4721 let start = serialized_symbol
4722 .start
4723 .ok_or_else(|| anyhow!("invalid start"))?;
4724 let end = serialized_symbol
4725 .end
4726 .ok_or_else(|| anyhow!("invalid end"))?;
4727 let kind = unsafe { mem::transmute(serialized_symbol.kind) };
4728 let path = PathBuf::from(serialized_symbol.path);
4729 let language = self.languages.select_language(&path);
4730 Ok(Symbol {
4731 source_worktree_id,
4732 worktree_id,
4733 language_server_name: LanguageServerName(serialized_symbol.language_server_name.into()),
4734 label: language
4735 .and_then(|language| language.label_for_symbol(&serialized_symbol.name, kind))
4736 .unwrap_or_else(|| CodeLabel::plain(serialized_symbol.name.clone(), None)),
4737 name: serialized_symbol.name,
4738 path,
4739 range: PointUtf16::new(start.row, start.column)..PointUtf16::new(end.row, end.column),
4740 kind,
4741 signature: serialized_symbol
4742 .signature
4743 .try_into()
4744 .map_err(|_| anyhow!("invalid signature"))?,
4745 })
4746 }
4747
4748 async fn handle_buffer_saved(
4749 this: ModelHandle<Self>,
4750 envelope: TypedEnvelope<proto::BufferSaved>,
4751 _: Arc<Client>,
4752 mut cx: AsyncAppContext,
4753 ) -> Result<()> {
4754 let version = deserialize_version(envelope.payload.version);
4755 let mtime = envelope
4756 .payload
4757 .mtime
4758 .ok_or_else(|| anyhow!("missing mtime"))?
4759 .into();
4760
4761 this.update(&mut cx, |this, cx| {
4762 let buffer = this
4763 .opened_buffers
4764 .get(&envelope.payload.buffer_id)
4765 .and_then(|buffer| buffer.upgrade(cx));
4766 if let Some(buffer) = buffer {
4767 buffer.update(cx, |buffer, cx| {
4768 buffer.did_save(version, mtime, None, cx);
4769 });
4770 }
4771 Ok(())
4772 })
4773 }
4774
4775 async fn handle_buffer_reloaded(
4776 this: ModelHandle<Self>,
4777 envelope: TypedEnvelope<proto::BufferReloaded>,
4778 _: Arc<Client>,
4779 mut cx: AsyncAppContext,
4780 ) -> Result<()> {
4781 let payload = envelope.payload.clone();
4782 let version = deserialize_version(payload.version);
4783 let mtime = payload
4784 .mtime
4785 .ok_or_else(|| anyhow!("missing mtime"))?
4786 .into();
4787 this.update(&mut cx, |this, cx| {
4788 let buffer = this
4789 .opened_buffers
4790 .get(&payload.buffer_id)
4791 .and_then(|buffer| buffer.upgrade(cx));
4792 if let Some(buffer) = buffer {
4793 buffer.update(cx, |buffer, cx| {
4794 buffer.did_reload(version, mtime, cx);
4795 });
4796 }
4797 Ok(())
4798 })
4799 }
4800
4801 pub fn match_paths<'a>(
4802 &self,
4803 query: &'a str,
4804 include_ignored: bool,
4805 smart_case: bool,
4806 max_results: usize,
4807 cancel_flag: &'a AtomicBool,
4808 cx: &AppContext,
4809 ) -> impl 'a + Future<Output = Vec<PathMatch>> {
4810 let worktrees = self
4811 .worktrees(cx)
4812 .filter(|worktree| worktree.read(cx).is_visible())
4813 .collect::<Vec<_>>();
4814 let include_root_name = worktrees.len() > 1;
4815 let candidate_sets = worktrees
4816 .into_iter()
4817 .map(|worktree| CandidateSet {
4818 snapshot: worktree.read(cx).snapshot(),
4819 include_ignored,
4820 include_root_name,
4821 })
4822 .collect::<Vec<_>>();
4823
4824 let background = cx.background().clone();
4825 async move {
4826 fuzzy::match_paths(
4827 candidate_sets.as_slice(),
4828 query,
4829 smart_case,
4830 max_results,
4831 cancel_flag,
4832 background,
4833 )
4834 .await
4835 }
4836 }
4837
4838 fn edits_from_lsp(
4839 &mut self,
4840 buffer: &ModelHandle<Buffer>,
4841 lsp_edits: impl 'static + Send + IntoIterator<Item = lsp::TextEdit>,
4842 version: Option<i32>,
4843 cx: &mut ModelContext<Self>,
4844 ) -> Task<Result<Vec<(Range<Anchor>, String)>>> {
4845 let snapshot = self.buffer_snapshot_for_lsp_version(buffer, version, cx);
4846 cx.background().spawn(async move {
4847 let snapshot = snapshot?;
4848 let mut lsp_edits = lsp_edits
4849 .into_iter()
4850 .map(|edit| (range_from_lsp(edit.range), edit.new_text))
4851 .peekable();
4852
4853 let mut edits = Vec::new();
4854 while let Some((mut range, mut new_text)) = lsp_edits.next() {
4855 // Combine any LSP edits that are adjacent.
4856 //
4857 // Also, combine LSP edits that are separated from each other by only
4858 // a newline. This is important because for some code actions,
4859 // Rust-analyzer rewrites the entire buffer via a series of edits that
4860 // are separated by unchanged newline characters.
4861 //
4862 // In order for the diffing logic below to work properly, any edits that
4863 // cancel each other out must be combined into one.
4864 while let Some((next_range, next_text)) = lsp_edits.peek() {
4865 if next_range.start > range.end {
4866 if next_range.start.row > range.end.row + 1
4867 || next_range.start.column > 0
4868 || snapshot.clip_point_utf16(
4869 PointUtf16::new(range.end.row, u32::MAX),
4870 Bias::Left,
4871 ) > range.end
4872 {
4873 break;
4874 }
4875 new_text.push('\n');
4876 }
4877 range.end = next_range.end;
4878 new_text.push_str(&next_text);
4879 lsp_edits.next();
4880 }
4881
4882 if snapshot.clip_point_utf16(range.start, Bias::Left) != range.start
4883 || snapshot.clip_point_utf16(range.end, Bias::Left) != range.end
4884 {
4885 return Err(anyhow!("invalid edits received from language server"));
4886 }
4887
4888 // For multiline edits, perform a diff of the old and new text so that
4889 // we can identify the changes more precisely, preserving the locations
4890 // of any anchors positioned in the unchanged regions.
4891 if range.end.row > range.start.row {
4892 let mut offset = range.start.to_offset(&snapshot);
4893 let old_text = snapshot.text_for_range(range).collect::<String>();
4894
4895 let diff = TextDiff::from_lines(old_text.as_str(), &new_text);
4896 let mut moved_since_edit = true;
4897 for change in diff.iter_all_changes() {
4898 let tag = change.tag();
4899 let value = change.value();
4900 match tag {
4901 ChangeTag::Equal => {
4902 offset += value.len();
4903 moved_since_edit = true;
4904 }
4905 ChangeTag::Delete => {
4906 let start = snapshot.anchor_after(offset);
4907 let end = snapshot.anchor_before(offset + value.len());
4908 if moved_since_edit {
4909 edits.push((start..end, String::new()));
4910 } else {
4911 edits.last_mut().unwrap().0.end = end;
4912 }
4913 offset += value.len();
4914 moved_since_edit = false;
4915 }
4916 ChangeTag::Insert => {
4917 if moved_since_edit {
4918 let anchor = snapshot.anchor_after(offset);
4919 edits.push((anchor.clone()..anchor, value.to_string()));
4920 } else {
4921 edits.last_mut().unwrap().1.push_str(value);
4922 }
4923 moved_since_edit = false;
4924 }
4925 }
4926 }
4927 } else if range.end == range.start {
4928 let anchor = snapshot.anchor_after(range.start);
4929 edits.push((anchor.clone()..anchor, new_text));
4930 } else {
4931 let edit_start = snapshot.anchor_after(range.start);
4932 let edit_end = snapshot.anchor_before(range.end);
4933 edits.push((edit_start..edit_end, new_text));
4934 }
4935 }
4936
4937 Ok(edits)
4938 })
4939 }
4940
4941 fn buffer_snapshot_for_lsp_version(
4942 &mut self,
4943 buffer: &ModelHandle<Buffer>,
4944 version: Option<i32>,
4945 cx: &AppContext,
4946 ) -> Result<TextBufferSnapshot> {
4947 const OLD_VERSIONS_TO_RETAIN: i32 = 10;
4948
4949 if let Some(version) = version {
4950 let buffer_id = buffer.read(cx).remote_id();
4951 let snapshots = self
4952 .buffer_snapshots
4953 .get_mut(&buffer_id)
4954 .ok_or_else(|| anyhow!("no snapshot found for buffer {}", buffer_id))?;
4955 let mut found_snapshot = None;
4956 snapshots.retain(|(snapshot_version, snapshot)| {
4957 if snapshot_version + OLD_VERSIONS_TO_RETAIN < version {
4958 false
4959 } else {
4960 if *snapshot_version == version {
4961 found_snapshot = Some(snapshot.clone());
4962 }
4963 true
4964 }
4965 });
4966
4967 found_snapshot.ok_or_else(|| {
4968 anyhow!(
4969 "snapshot not found for buffer {} at version {}",
4970 buffer_id,
4971 version
4972 )
4973 })
4974 } else {
4975 Ok((buffer.read(cx)).text_snapshot())
4976 }
4977 }
4978
4979 fn language_server_for_buffer(
4980 &self,
4981 buffer: &Buffer,
4982 cx: &AppContext,
4983 ) -> Option<&(Arc<dyn LspAdapter>, Arc<LanguageServer>)> {
4984 if let Some((file, language)) = File::from_dyn(buffer.file()).zip(buffer.language()) {
4985 let worktree_id = file.worktree_id(cx);
4986 self.language_servers
4987 .get(&(worktree_id, language.lsp_adapter()?.name()))
4988 } else {
4989 None
4990 }
4991 }
4992}
4993
4994impl WorktreeHandle {
4995 pub fn upgrade(&self, cx: &AppContext) -> Option<ModelHandle<Worktree>> {
4996 match self {
4997 WorktreeHandle::Strong(handle) => Some(handle.clone()),
4998 WorktreeHandle::Weak(handle) => handle.upgrade(cx),
4999 }
5000 }
5001}
5002
5003impl OpenBuffer {
5004 pub fn upgrade(&self, cx: &impl UpgradeModelHandle) -> Option<ModelHandle<Buffer>> {
5005 match self {
5006 OpenBuffer::Strong(handle) => Some(handle.clone()),
5007 OpenBuffer::Weak(handle) => handle.upgrade(cx),
5008 OpenBuffer::Loading(_) => None,
5009 }
5010 }
5011}
5012
5013struct CandidateSet {
5014 snapshot: Snapshot,
5015 include_ignored: bool,
5016 include_root_name: bool,
5017}
5018
5019impl<'a> PathMatchCandidateSet<'a> for CandidateSet {
5020 type Candidates = CandidateSetIter<'a>;
5021
5022 fn id(&self) -> usize {
5023 self.snapshot.id().to_usize()
5024 }
5025
5026 fn len(&self) -> usize {
5027 if self.include_ignored {
5028 self.snapshot.file_count()
5029 } else {
5030 self.snapshot.visible_file_count()
5031 }
5032 }
5033
5034 fn prefix(&self) -> Arc<str> {
5035 if self.snapshot.root_entry().map_or(false, |e| e.is_file()) {
5036 self.snapshot.root_name().into()
5037 } else if self.include_root_name {
5038 format!("{}/", self.snapshot.root_name()).into()
5039 } else {
5040 "".into()
5041 }
5042 }
5043
5044 fn candidates(&'a self, start: usize) -> Self::Candidates {
5045 CandidateSetIter {
5046 traversal: self.snapshot.files(self.include_ignored, start),
5047 }
5048 }
5049}
5050
5051struct CandidateSetIter<'a> {
5052 traversal: Traversal<'a>,
5053}
5054
5055impl<'a> Iterator for CandidateSetIter<'a> {
5056 type Item = PathMatchCandidate<'a>;
5057
5058 fn next(&mut self) -> Option<Self::Item> {
5059 self.traversal.next().map(|entry| {
5060 if let EntryKind::File(char_bag) = entry.kind {
5061 PathMatchCandidate {
5062 path: &entry.path,
5063 char_bag,
5064 }
5065 } else {
5066 unreachable!()
5067 }
5068 })
5069 }
5070}
5071
5072impl Entity for Project {
5073 type Event = Event;
5074
5075 fn release(&mut self, _: &mut gpui::MutableAppContext) {
5076 match &self.client_state {
5077 ProjectClientState::Local { remote_id_rx, .. } => {
5078 if let Some(project_id) = *remote_id_rx.borrow() {
5079 self.client
5080 .send(proto::UnregisterProject { project_id })
5081 .log_err();
5082 }
5083 }
5084 ProjectClientState::Remote { remote_id, .. } => {
5085 self.client
5086 .send(proto::LeaveProject {
5087 project_id: *remote_id,
5088 })
5089 .log_err();
5090 }
5091 }
5092 }
5093
5094 fn app_will_quit(
5095 &mut self,
5096 _: &mut MutableAppContext,
5097 ) -> Option<std::pin::Pin<Box<dyn 'static + Future<Output = ()>>>> {
5098 let shutdown_futures = self
5099 .language_servers
5100 .drain()
5101 .filter_map(|(_, (_, server))| server.shutdown())
5102 .collect::<Vec<_>>();
5103 Some(
5104 async move {
5105 futures::future::join_all(shutdown_futures).await;
5106 }
5107 .boxed(),
5108 )
5109 }
5110}
5111
5112impl Collaborator {
5113 fn from_proto(
5114 message: proto::Collaborator,
5115 user_store: &ModelHandle<UserStore>,
5116 cx: &mut AsyncAppContext,
5117 ) -> impl Future<Output = Result<Self>> {
5118 let user = user_store.update(cx, |user_store, cx| {
5119 user_store.fetch_user(message.user_id, cx)
5120 });
5121
5122 async move {
5123 Ok(Self {
5124 peer_id: PeerId(message.peer_id),
5125 user: user.await?,
5126 replica_id: message.replica_id as ReplicaId,
5127 })
5128 }
5129 }
5130}
5131
5132impl<P: AsRef<Path>> From<(WorktreeId, P)> for ProjectPath {
5133 fn from((worktree_id, path): (WorktreeId, P)) -> Self {
5134 Self {
5135 worktree_id,
5136 path: path.as_ref().into(),
5137 }
5138 }
5139}
5140
5141impl From<lsp::CreateFileOptions> for fs::CreateOptions {
5142 fn from(options: lsp::CreateFileOptions) -> Self {
5143 Self {
5144 overwrite: options.overwrite.unwrap_or(false),
5145 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
5146 }
5147 }
5148}
5149
5150impl From<lsp::RenameFileOptions> for fs::RenameOptions {
5151 fn from(options: lsp::RenameFileOptions) -> Self {
5152 Self {
5153 overwrite: options.overwrite.unwrap_or(false),
5154 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
5155 }
5156 }
5157}
5158
5159impl From<lsp::DeleteFileOptions> for fs::RemoveOptions {
5160 fn from(options: lsp::DeleteFileOptions) -> Self {
5161 Self {
5162 recursive: options.recursive.unwrap_or(false),
5163 ignore_if_not_exists: options.ignore_if_not_exists.unwrap_or(false),
5164 }
5165 }
5166}
5167
5168fn serialize_symbol(symbol: &Symbol) -> proto::Symbol {
5169 proto::Symbol {
5170 source_worktree_id: symbol.source_worktree_id.to_proto(),
5171 worktree_id: symbol.worktree_id.to_proto(),
5172 language_server_name: symbol.language_server_name.0.to_string(),
5173 name: symbol.name.clone(),
5174 kind: unsafe { mem::transmute(symbol.kind) },
5175 path: symbol.path.to_string_lossy().to_string(),
5176 start: Some(proto::Point {
5177 row: symbol.range.start.row,
5178 column: symbol.range.start.column,
5179 }),
5180 end: Some(proto::Point {
5181 row: symbol.range.end.row,
5182 column: symbol.range.end.column,
5183 }),
5184 signature: symbol.signature.to_vec(),
5185 }
5186}
5187
5188fn relativize_path(base: &Path, path: &Path) -> PathBuf {
5189 let mut path_components = path.components();
5190 let mut base_components = base.components();
5191 let mut components: Vec<Component> = Vec::new();
5192 loop {
5193 match (path_components.next(), base_components.next()) {
5194 (None, None) => break,
5195 (Some(a), None) => {
5196 components.push(a);
5197 components.extend(path_components.by_ref());
5198 break;
5199 }
5200 (None, _) => components.push(Component::ParentDir),
5201 (Some(a), Some(b)) if components.is_empty() && a == b => (),
5202 (Some(a), Some(b)) if b == Component::CurDir => components.push(a),
5203 (Some(a), Some(_)) => {
5204 components.push(Component::ParentDir);
5205 for _ in base_components {
5206 components.push(Component::ParentDir);
5207 }
5208 components.push(a);
5209 components.extend(path_components.by_ref());
5210 break;
5211 }
5212 }
5213 }
5214 components.iter().map(|c| c.as_os_str()).collect()
5215}
5216
5217impl Item for Buffer {
5218 fn entry_id(&self, cx: &AppContext) -> Option<ProjectEntryId> {
5219 File::from_dyn(self.file()).and_then(|file| file.project_entry_id(cx))
5220 }
5221}
5222
5223#[cfg(test)]
5224mod tests {
5225 use crate::worktree::WorktreeHandle;
5226
5227 use super::{Event, *};
5228 use fs::RealFs;
5229 use futures::{future, StreamExt};
5230 use gpui::test::subscribe;
5231 use language::{
5232 tree_sitter_rust, tree_sitter_typescript, Diagnostic, FakeLspAdapter, LanguageConfig,
5233 OffsetRangeExt, Point, ToPoint,
5234 };
5235 use lsp::Url;
5236 use serde_json::json;
5237 use std::{cell::RefCell, os::unix, path::PathBuf, rc::Rc, task::Poll};
5238 use unindent::Unindent as _;
5239 use util::{assert_set_eq, test::temp_tree};
5240
5241 #[gpui::test]
5242 async fn test_populate_and_search(cx: &mut gpui::TestAppContext) {
5243 let dir = temp_tree(json!({
5244 "root": {
5245 "apple": "",
5246 "banana": {
5247 "carrot": {
5248 "date": "",
5249 "endive": "",
5250 }
5251 },
5252 "fennel": {
5253 "grape": "",
5254 }
5255 }
5256 }));
5257
5258 let root_link_path = dir.path().join("root_link");
5259 unix::fs::symlink(&dir.path().join("root"), &root_link_path).unwrap();
5260 unix::fs::symlink(
5261 &dir.path().join("root/fennel"),
5262 &dir.path().join("root/finnochio"),
5263 )
5264 .unwrap();
5265
5266 let project = Project::test(Arc::new(RealFs), [root_link_path], cx).await;
5267
5268 project.read_with(cx, |project, cx| {
5269 let tree = project.worktrees(cx).next().unwrap().read(cx);
5270 assert_eq!(tree.file_count(), 5);
5271 assert_eq!(
5272 tree.inode_for_path("fennel/grape"),
5273 tree.inode_for_path("finnochio/grape")
5274 );
5275 });
5276
5277 let cancel_flag = Default::default();
5278 let results = project
5279 .read_with(cx, |project, cx| {
5280 project.match_paths("bna", false, false, 10, &cancel_flag, cx)
5281 })
5282 .await;
5283 assert_eq!(
5284 results
5285 .into_iter()
5286 .map(|result| result.path)
5287 .collect::<Vec<Arc<Path>>>(),
5288 vec![
5289 PathBuf::from("banana/carrot/date").into(),
5290 PathBuf::from("banana/carrot/endive").into(),
5291 ]
5292 );
5293 }
5294
5295 #[gpui::test]
5296 async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
5297 cx.foreground().forbid_parking();
5298
5299 let mut rust_language = Language::new(
5300 LanguageConfig {
5301 name: "Rust".into(),
5302 path_suffixes: vec!["rs".to_string()],
5303 ..Default::default()
5304 },
5305 Some(tree_sitter_rust::language()),
5306 );
5307 let mut json_language = Language::new(
5308 LanguageConfig {
5309 name: "JSON".into(),
5310 path_suffixes: vec!["json".to_string()],
5311 ..Default::default()
5312 },
5313 None,
5314 );
5315 let mut fake_rust_servers = rust_language.set_fake_lsp_adapter(FakeLspAdapter {
5316 name: "the-rust-language-server",
5317 capabilities: lsp::ServerCapabilities {
5318 completion_provider: Some(lsp::CompletionOptions {
5319 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
5320 ..Default::default()
5321 }),
5322 ..Default::default()
5323 },
5324 ..Default::default()
5325 });
5326 let mut fake_json_servers = json_language.set_fake_lsp_adapter(FakeLspAdapter {
5327 name: "the-json-language-server",
5328 capabilities: lsp::ServerCapabilities {
5329 completion_provider: Some(lsp::CompletionOptions {
5330 trigger_characters: Some(vec![":".to_string()]),
5331 ..Default::default()
5332 }),
5333 ..Default::default()
5334 },
5335 ..Default::default()
5336 });
5337
5338 let fs = FakeFs::new(cx.background());
5339 fs.insert_tree(
5340 "/the-root",
5341 json!({
5342 "test.rs": "const A: i32 = 1;",
5343 "test2.rs": "",
5344 "Cargo.toml": "a = 1",
5345 "package.json": "{\"a\": 1}",
5346 }),
5347 )
5348 .await;
5349
5350 let project = Project::test(fs.clone(), ["/the-root"], cx).await;
5351 project.update(cx, |project, _| {
5352 project.languages.add(Arc::new(rust_language));
5353 project.languages.add(Arc::new(json_language));
5354 });
5355
5356 // Open a buffer without an associated language server.
5357 let toml_buffer = project
5358 .update(cx, |project, cx| {
5359 project.open_local_buffer("/the-root/Cargo.toml", cx)
5360 })
5361 .await
5362 .unwrap();
5363
5364 // Open a buffer with an associated language server.
5365 let rust_buffer = project
5366 .update(cx, |project, cx| {
5367 project.open_local_buffer("/the-root/test.rs", cx)
5368 })
5369 .await
5370 .unwrap();
5371
5372 // A server is started up, and it is notified about Rust files.
5373 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
5374 assert_eq!(
5375 fake_rust_server
5376 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5377 .await
5378 .text_document,
5379 lsp::TextDocumentItem {
5380 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
5381 version: 0,
5382 text: "const A: i32 = 1;".to_string(),
5383 language_id: Default::default()
5384 }
5385 );
5386
5387 // The buffer is configured based on the language server's capabilities.
5388 rust_buffer.read_with(cx, |buffer, _| {
5389 assert_eq!(
5390 buffer.completion_triggers(),
5391 &[".".to_string(), "::".to_string()]
5392 );
5393 });
5394 toml_buffer.read_with(cx, |buffer, _| {
5395 assert!(buffer.completion_triggers().is_empty());
5396 });
5397
5398 // Edit a buffer. The changes are reported to the language server.
5399 rust_buffer.update(cx, |buffer, cx| buffer.edit([(16..16, "2")], cx));
5400 assert_eq!(
5401 fake_rust_server
5402 .receive_notification::<lsp::notification::DidChangeTextDocument>()
5403 .await
5404 .text_document,
5405 lsp::VersionedTextDocumentIdentifier::new(
5406 lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
5407 1
5408 )
5409 );
5410
5411 // Open a third buffer with a different associated language server.
5412 let json_buffer = project
5413 .update(cx, |project, cx| {
5414 project.open_local_buffer("/the-root/package.json", cx)
5415 })
5416 .await
5417 .unwrap();
5418
5419 // A json language server is started up and is only notified about the json buffer.
5420 let mut fake_json_server = fake_json_servers.next().await.unwrap();
5421 assert_eq!(
5422 fake_json_server
5423 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5424 .await
5425 .text_document,
5426 lsp::TextDocumentItem {
5427 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
5428 version: 0,
5429 text: "{\"a\": 1}".to_string(),
5430 language_id: Default::default()
5431 }
5432 );
5433
5434 // This buffer is configured based on the second language server's
5435 // capabilities.
5436 json_buffer.read_with(cx, |buffer, _| {
5437 assert_eq!(buffer.completion_triggers(), &[":".to_string()]);
5438 });
5439
5440 // When opening another buffer whose language server is already running,
5441 // it is also configured based on the existing language server's capabilities.
5442 let rust_buffer2 = project
5443 .update(cx, |project, cx| {
5444 project.open_local_buffer("/the-root/test2.rs", cx)
5445 })
5446 .await
5447 .unwrap();
5448 rust_buffer2.read_with(cx, |buffer, _| {
5449 assert_eq!(
5450 buffer.completion_triggers(),
5451 &[".".to_string(), "::".to_string()]
5452 );
5453 });
5454
5455 // Changes are reported only to servers matching the buffer's language.
5456 toml_buffer.update(cx, |buffer, cx| buffer.edit([(5..5, "23")], cx));
5457 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "let x = 1;")], cx));
5458 assert_eq!(
5459 fake_rust_server
5460 .receive_notification::<lsp::notification::DidChangeTextDocument>()
5461 .await
5462 .text_document,
5463 lsp::VersionedTextDocumentIdentifier::new(
5464 lsp::Url::from_file_path("/the-root/test2.rs").unwrap(),
5465 1
5466 )
5467 );
5468
5469 // Save notifications are reported to all servers.
5470 toml_buffer
5471 .update(cx, |buffer, cx| buffer.save(cx))
5472 .await
5473 .unwrap();
5474 assert_eq!(
5475 fake_rust_server
5476 .receive_notification::<lsp::notification::DidSaveTextDocument>()
5477 .await
5478 .text_document,
5479 lsp::TextDocumentIdentifier::new(
5480 lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap()
5481 )
5482 );
5483 assert_eq!(
5484 fake_json_server
5485 .receive_notification::<lsp::notification::DidSaveTextDocument>()
5486 .await
5487 .text_document,
5488 lsp::TextDocumentIdentifier::new(
5489 lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap()
5490 )
5491 );
5492
5493 // Renames are reported only to servers matching the buffer's language.
5494 fs.rename(
5495 Path::new("/the-root/test2.rs"),
5496 Path::new("/the-root/test3.rs"),
5497 Default::default(),
5498 )
5499 .await
5500 .unwrap();
5501 assert_eq!(
5502 fake_rust_server
5503 .receive_notification::<lsp::notification::DidCloseTextDocument>()
5504 .await
5505 .text_document,
5506 lsp::TextDocumentIdentifier::new(
5507 lsp::Url::from_file_path("/the-root/test2.rs").unwrap()
5508 ),
5509 );
5510 assert_eq!(
5511 fake_rust_server
5512 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5513 .await
5514 .text_document,
5515 lsp::TextDocumentItem {
5516 uri: lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),
5517 version: 0,
5518 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
5519 language_id: Default::default()
5520 },
5521 );
5522
5523 rust_buffer2.update(cx, |buffer, cx| {
5524 buffer.update_diagnostics(
5525 DiagnosticSet::from_sorted_entries(
5526 vec![DiagnosticEntry {
5527 diagnostic: Default::default(),
5528 range: Anchor::MIN..Anchor::MAX,
5529 }],
5530 &buffer.snapshot(),
5531 ),
5532 cx,
5533 );
5534 assert_eq!(
5535 buffer
5536 .snapshot()
5537 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
5538 .count(),
5539 1
5540 );
5541 });
5542
5543 // When the rename changes the extension of the file, the buffer gets closed on the old
5544 // language server and gets opened on the new one.
5545 fs.rename(
5546 Path::new("/the-root/test3.rs"),
5547 Path::new("/the-root/test3.json"),
5548 Default::default(),
5549 )
5550 .await
5551 .unwrap();
5552 assert_eq!(
5553 fake_rust_server
5554 .receive_notification::<lsp::notification::DidCloseTextDocument>()
5555 .await
5556 .text_document,
5557 lsp::TextDocumentIdentifier::new(
5558 lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),
5559 ),
5560 );
5561 assert_eq!(
5562 fake_json_server
5563 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5564 .await
5565 .text_document,
5566 lsp::TextDocumentItem {
5567 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
5568 version: 0,
5569 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
5570 language_id: Default::default()
5571 },
5572 );
5573
5574 // We clear the diagnostics, since the language has changed.
5575 rust_buffer2.read_with(cx, |buffer, _| {
5576 assert_eq!(
5577 buffer
5578 .snapshot()
5579 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
5580 .count(),
5581 0
5582 );
5583 });
5584
5585 // The renamed file's version resets after changing language server.
5586 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "// ")], cx));
5587 assert_eq!(
5588 fake_json_server
5589 .receive_notification::<lsp::notification::DidChangeTextDocument>()
5590 .await
5591 .text_document,
5592 lsp::VersionedTextDocumentIdentifier::new(
5593 lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
5594 1
5595 )
5596 );
5597
5598 // Restart language servers
5599 project.update(cx, |project, cx| {
5600 project.restart_language_servers_for_buffers(
5601 vec![rust_buffer.clone(), json_buffer.clone()],
5602 cx,
5603 );
5604 });
5605
5606 let mut rust_shutdown_requests = fake_rust_server
5607 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
5608 let mut json_shutdown_requests = fake_json_server
5609 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
5610 futures::join!(rust_shutdown_requests.next(), json_shutdown_requests.next());
5611
5612 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
5613 let mut fake_json_server = fake_json_servers.next().await.unwrap();
5614
5615 // Ensure rust document is reopened in new rust language server
5616 assert_eq!(
5617 fake_rust_server
5618 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5619 .await
5620 .text_document,
5621 lsp::TextDocumentItem {
5622 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
5623 version: 1,
5624 text: rust_buffer.read_with(cx, |buffer, _| buffer.text()),
5625 language_id: Default::default()
5626 }
5627 );
5628
5629 // Ensure json documents are reopened in new json language server
5630 assert_set_eq!(
5631 [
5632 fake_json_server
5633 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5634 .await
5635 .text_document,
5636 fake_json_server
5637 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5638 .await
5639 .text_document,
5640 ],
5641 [
5642 lsp::TextDocumentItem {
5643 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
5644 version: 0,
5645 text: json_buffer.read_with(cx, |buffer, _| buffer.text()),
5646 language_id: Default::default()
5647 },
5648 lsp::TextDocumentItem {
5649 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
5650 version: 1,
5651 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
5652 language_id: Default::default()
5653 }
5654 ]
5655 );
5656
5657 // Close notifications are reported only to servers matching the buffer's language.
5658 cx.update(|_| drop(json_buffer));
5659 let close_message = lsp::DidCloseTextDocumentParams {
5660 text_document: lsp::TextDocumentIdentifier::new(
5661 lsp::Url::from_file_path("/the-root/package.json").unwrap(),
5662 ),
5663 };
5664 assert_eq!(
5665 fake_json_server
5666 .receive_notification::<lsp::notification::DidCloseTextDocument>()
5667 .await,
5668 close_message,
5669 );
5670 }
5671
5672 #[gpui::test]
5673 async fn test_single_file_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
5674 cx.foreground().forbid_parking();
5675
5676 let fs = FakeFs::new(cx.background());
5677 fs.insert_tree(
5678 "/dir",
5679 json!({
5680 "a.rs": "let a = 1;",
5681 "b.rs": "let b = 2;"
5682 }),
5683 )
5684 .await;
5685
5686 let project = Project::test(fs, ["/dir/a.rs", "/dir/b.rs"], cx).await;
5687
5688 let buffer_a = project
5689 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
5690 .await
5691 .unwrap();
5692 let buffer_b = project
5693 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
5694 .await
5695 .unwrap();
5696
5697 project.update(cx, |project, cx| {
5698 project
5699 .update_diagnostics(
5700 lsp::PublishDiagnosticsParams {
5701 uri: Url::from_file_path("/dir/a.rs").unwrap(),
5702 version: None,
5703 diagnostics: vec![lsp::Diagnostic {
5704 range: lsp::Range::new(
5705 lsp::Position::new(0, 4),
5706 lsp::Position::new(0, 5),
5707 ),
5708 severity: Some(lsp::DiagnosticSeverity::ERROR),
5709 message: "error 1".to_string(),
5710 ..Default::default()
5711 }],
5712 },
5713 &[],
5714 cx,
5715 )
5716 .unwrap();
5717 project
5718 .update_diagnostics(
5719 lsp::PublishDiagnosticsParams {
5720 uri: Url::from_file_path("/dir/b.rs").unwrap(),
5721 version: None,
5722 diagnostics: vec![lsp::Diagnostic {
5723 range: lsp::Range::new(
5724 lsp::Position::new(0, 4),
5725 lsp::Position::new(0, 5),
5726 ),
5727 severity: Some(lsp::DiagnosticSeverity::WARNING),
5728 message: "error 2".to_string(),
5729 ..Default::default()
5730 }],
5731 },
5732 &[],
5733 cx,
5734 )
5735 .unwrap();
5736 });
5737
5738 buffer_a.read_with(cx, |buffer, _| {
5739 let chunks = chunks_with_diagnostics(&buffer, 0..buffer.len());
5740 assert_eq!(
5741 chunks
5742 .iter()
5743 .map(|(s, d)| (s.as_str(), *d))
5744 .collect::<Vec<_>>(),
5745 &[
5746 ("let ", None),
5747 ("a", Some(DiagnosticSeverity::ERROR)),
5748 (" = 1;", None),
5749 ]
5750 );
5751 });
5752 buffer_b.read_with(cx, |buffer, _| {
5753 let chunks = chunks_with_diagnostics(&buffer, 0..buffer.len());
5754 assert_eq!(
5755 chunks
5756 .iter()
5757 .map(|(s, d)| (s.as_str(), *d))
5758 .collect::<Vec<_>>(),
5759 &[
5760 ("let ", None),
5761 ("b", Some(DiagnosticSeverity::WARNING)),
5762 (" = 2;", None),
5763 ]
5764 );
5765 });
5766 }
5767
5768 #[gpui::test]
5769 async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
5770 cx.foreground().forbid_parking();
5771
5772 let progress_token = "the-progress-token";
5773 let mut language = Language::new(
5774 LanguageConfig {
5775 name: "Rust".into(),
5776 path_suffixes: vec!["rs".to_string()],
5777 ..Default::default()
5778 },
5779 Some(tree_sitter_rust::language()),
5780 );
5781 let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
5782 disk_based_diagnostics_progress_token: Some(progress_token),
5783 disk_based_diagnostics_sources: &["disk"],
5784 ..Default::default()
5785 });
5786
5787 let fs = FakeFs::new(cx.background());
5788 fs.insert_tree(
5789 "/dir",
5790 json!({
5791 "a.rs": "fn a() { A }",
5792 "b.rs": "const y: i32 = 1",
5793 }),
5794 )
5795 .await;
5796
5797 let project = Project::test(fs, ["/dir"], cx).await;
5798 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
5799 let worktree_id =
5800 project.read_with(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
5801
5802 // Cause worktree to start the fake language server
5803 let _buffer = project
5804 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
5805 .await
5806 .unwrap();
5807
5808 let mut events = subscribe(&project, cx);
5809
5810 let mut fake_server = fake_servers.next().await.unwrap();
5811 fake_server.start_progress(progress_token).await;
5812 assert_eq!(
5813 events.next().await.unwrap(),
5814 Event::DiskBasedDiagnosticsStarted
5815 );
5816
5817 fake_server.start_progress(progress_token).await;
5818 fake_server.end_progress(progress_token).await;
5819 fake_server.start_progress(progress_token).await;
5820
5821 fake_server.notify::<lsp::notification::PublishDiagnostics>(
5822 lsp::PublishDiagnosticsParams {
5823 uri: Url::from_file_path("/dir/a.rs").unwrap(),
5824 version: None,
5825 diagnostics: vec![lsp::Diagnostic {
5826 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
5827 severity: Some(lsp::DiagnosticSeverity::ERROR),
5828 message: "undefined variable 'A'".to_string(),
5829 ..Default::default()
5830 }],
5831 },
5832 );
5833 assert_eq!(
5834 events.next().await.unwrap(),
5835 Event::DiagnosticsUpdated((worktree_id, Path::new("a.rs")).into())
5836 );
5837
5838 fake_server.end_progress(progress_token).await;
5839 fake_server.end_progress(progress_token).await;
5840 assert_eq!(
5841 events.next().await.unwrap(),
5842 Event::DiskBasedDiagnosticsUpdated
5843 );
5844 assert_eq!(
5845 events.next().await.unwrap(),
5846 Event::DiskBasedDiagnosticsFinished
5847 );
5848
5849 let buffer = project
5850 .update(cx, |p, cx| p.open_local_buffer("/dir/a.rs", cx))
5851 .await
5852 .unwrap();
5853
5854 buffer.read_with(cx, |buffer, _| {
5855 let snapshot = buffer.snapshot();
5856 let diagnostics = snapshot
5857 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
5858 .collect::<Vec<_>>();
5859 assert_eq!(
5860 diagnostics,
5861 &[DiagnosticEntry {
5862 range: Point::new(0, 9)..Point::new(0, 10),
5863 diagnostic: Diagnostic {
5864 severity: lsp::DiagnosticSeverity::ERROR,
5865 message: "undefined variable 'A'".to_string(),
5866 group_id: 0,
5867 is_primary: true,
5868 ..Default::default()
5869 }
5870 }]
5871 )
5872 });
5873
5874 // Ensure publishing empty diagnostics twice only results in one update event.
5875 fake_server.notify::<lsp::notification::PublishDiagnostics>(
5876 lsp::PublishDiagnosticsParams {
5877 uri: Url::from_file_path("/dir/a.rs").unwrap(),
5878 version: None,
5879 diagnostics: Default::default(),
5880 },
5881 );
5882 assert_eq!(
5883 events.next().await.unwrap(),
5884 Event::DiagnosticsUpdated((worktree_id, Path::new("a.rs")).into())
5885 );
5886
5887 fake_server.notify::<lsp::notification::PublishDiagnostics>(
5888 lsp::PublishDiagnosticsParams {
5889 uri: Url::from_file_path("/dir/a.rs").unwrap(),
5890 version: None,
5891 diagnostics: Default::default(),
5892 },
5893 );
5894 cx.foreground().run_until_parked();
5895 assert_eq!(futures::poll!(events.next()), Poll::Pending);
5896 }
5897
5898 #[gpui::test]
5899 async fn test_restarting_server_with_diagnostics_running(cx: &mut gpui::TestAppContext) {
5900 cx.foreground().forbid_parking();
5901
5902 let progress_token = "the-progress-token";
5903 let mut language = Language::new(
5904 LanguageConfig {
5905 path_suffixes: vec!["rs".to_string()],
5906 ..Default::default()
5907 },
5908 None,
5909 );
5910 let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
5911 disk_based_diagnostics_sources: &["disk"],
5912 disk_based_diagnostics_progress_token: Some(progress_token),
5913 ..Default::default()
5914 });
5915
5916 let fs = FakeFs::new(cx.background());
5917 fs.insert_tree("/dir", json!({ "a.rs": "" })).await;
5918
5919 let project = Project::test(fs, ["/dir"], cx).await;
5920 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
5921
5922 let buffer = project
5923 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
5924 .await
5925 .unwrap();
5926
5927 // Simulate diagnostics starting to update.
5928 let mut fake_server = fake_servers.next().await.unwrap();
5929 fake_server.start_progress(progress_token).await;
5930
5931 // Restart the server before the diagnostics finish updating.
5932 project.update(cx, |project, cx| {
5933 project.restart_language_servers_for_buffers([buffer], cx);
5934 });
5935 let mut events = subscribe(&project, cx);
5936
5937 // Simulate the newly started server sending more diagnostics.
5938 let mut fake_server = fake_servers.next().await.unwrap();
5939 fake_server.start_progress(progress_token).await;
5940 assert_eq!(
5941 events.next().await.unwrap(),
5942 Event::DiskBasedDiagnosticsStarted
5943 );
5944
5945 // All diagnostics are considered done, despite the old server's diagnostic
5946 // task never completing.
5947 fake_server.end_progress(progress_token).await;
5948 assert_eq!(
5949 events.next().await.unwrap(),
5950 Event::DiskBasedDiagnosticsUpdated
5951 );
5952 assert_eq!(
5953 events.next().await.unwrap(),
5954 Event::DiskBasedDiagnosticsFinished
5955 );
5956 project.read_with(cx, |project, _| {
5957 assert!(!project.is_running_disk_based_diagnostics());
5958 });
5959 }
5960
5961 #[gpui::test]
5962 async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
5963 cx.foreground().forbid_parking();
5964
5965 let mut language = Language::new(
5966 LanguageConfig {
5967 name: "Rust".into(),
5968 path_suffixes: vec!["rs".to_string()],
5969 ..Default::default()
5970 },
5971 Some(tree_sitter_rust::language()),
5972 );
5973 let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
5974 disk_based_diagnostics_sources: &["disk"],
5975 ..Default::default()
5976 });
5977
5978 let text = "
5979 fn a() { A }
5980 fn b() { BB }
5981 fn c() { CCC }
5982 "
5983 .unindent();
5984
5985 let fs = FakeFs::new(cx.background());
5986 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
5987
5988 let project = Project::test(fs, ["/dir"], cx).await;
5989 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
5990
5991 let buffer = project
5992 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
5993 .await
5994 .unwrap();
5995
5996 let mut fake_server = fake_servers.next().await.unwrap();
5997 let open_notification = fake_server
5998 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5999 .await;
6000
6001 // Edit the buffer, moving the content down
6002 buffer.update(cx, |buffer, cx| buffer.edit([(0..0, "\n\n")], cx));
6003 let change_notification_1 = fake_server
6004 .receive_notification::<lsp::notification::DidChangeTextDocument>()
6005 .await;
6006 assert!(
6007 change_notification_1.text_document.version > open_notification.text_document.version
6008 );
6009
6010 // Report some diagnostics for the initial version of the buffer
6011 fake_server.notify::<lsp::notification::PublishDiagnostics>(
6012 lsp::PublishDiagnosticsParams {
6013 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
6014 version: Some(open_notification.text_document.version),
6015 diagnostics: vec![
6016 lsp::Diagnostic {
6017 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
6018 severity: Some(DiagnosticSeverity::ERROR),
6019 message: "undefined variable 'A'".to_string(),
6020 source: Some("disk".to_string()),
6021 ..Default::default()
6022 },
6023 lsp::Diagnostic {
6024 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
6025 severity: Some(DiagnosticSeverity::ERROR),
6026 message: "undefined variable 'BB'".to_string(),
6027 source: Some("disk".to_string()),
6028 ..Default::default()
6029 },
6030 lsp::Diagnostic {
6031 range: lsp::Range::new(lsp::Position::new(2, 9), lsp::Position::new(2, 12)),
6032 severity: Some(DiagnosticSeverity::ERROR),
6033 source: Some("disk".to_string()),
6034 message: "undefined variable 'CCC'".to_string(),
6035 ..Default::default()
6036 },
6037 ],
6038 },
6039 );
6040
6041 // The diagnostics have moved down since they were created.
6042 buffer.next_notification(cx).await;
6043 buffer.read_with(cx, |buffer, _| {
6044 assert_eq!(
6045 buffer
6046 .snapshot()
6047 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0), false)
6048 .collect::<Vec<_>>(),
6049 &[
6050 DiagnosticEntry {
6051 range: Point::new(3, 9)..Point::new(3, 11),
6052 diagnostic: Diagnostic {
6053 severity: DiagnosticSeverity::ERROR,
6054 message: "undefined variable 'BB'".to_string(),
6055 is_disk_based: true,
6056 group_id: 1,
6057 is_primary: true,
6058 ..Default::default()
6059 },
6060 },
6061 DiagnosticEntry {
6062 range: Point::new(4, 9)..Point::new(4, 12),
6063 diagnostic: Diagnostic {
6064 severity: DiagnosticSeverity::ERROR,
6065 message: "undefined variable 'CCC'".to_string(),
6066 is_disk_based: true,
6067 group_id: 2,
6068 is_primary: true,
6069 ..Default::default()
6070 }
6071 }
6072 ]
6073 );
6074 assert_eq!(
6075 chunks_with_diagnostics(buffer, 0..buffer.len()),
6076 [
6077 ("\n\nfn a() { ".to_string(), None),
6078 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
6079 (" }\nfn b() { ".to_string(), None),
6080 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
6081 (" }\nfn c() { ".to_string(), None),
6082 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
6083 (" }\n".to_string(), None),
6084 ]
6085 );
6086 assert_eq!(
6087 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
6088 [
6089 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
6090 (" }\nfn c() { ".to_string(), None),
6091 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
6092 ]
6093 );
6094 });
6095
6096 // Ensure overlapping diagnostics are highlighted correctly.
6097 fake_server.notify::<lsp::notification::PublishDiagnostics>(
6098 lsp::PublishDiagnosticsParams {
6099 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
6100 version: Some(open_notification.text_document.version),
6101 diagnostics: vec![
6102 lsp::Diagnostic {
6103 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
6104 severity: Some(DiagnosticSeverity::ERROR),
6105 message: "undefined variable 'A'".to_string(),
6106 source: Some("disk".to_string()),
6107 ..Default::default()
6108 },
6109 lsp::Diagnostic {
6110 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 12)),
6111 severity: Some(DiagnosticSeverity::WARNING),
6112 message: "unreachable statement".to_string(),
6113 source: Some("disk".to_string()),
6114 ..Default::default()
6115 },
6116 ],
6117 },
6118 );
6119
6120 buffer.next_notification(cx).await;
6121 buffer.read_with(cx, |buffer, _| {
6122 assert_eq!(
6123 buffer
6124 .snapshot()
6125 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0), false)
6126 .collect::<Vec<_>>(),
6127 &[
6128 DiagnosticEntry {
6129 range: Point::new(2, 9)..Point::new(2, 12),
6130 diagnostic: Diagnostic {
6131 severity: DiagnosticSeverity::WARNING,
6132 message: "unreachable statement".to_string(),
6133 is_disk_based: true,
6134 group_id: 1,
6135 is_primary: true,
6136 ..Default::default()
6137 }
6138 },
6139 DiagnosticEntry {
6140 range: Point::new(2, 9)..Point::new(2, 10),
6141 diagnostic: Diagnostic {
6142 severity: DiagnosticSeverity::ERROR,
6143 message: "undefined variable 'A'".to_string(),
6144 is_disk_based: true,
6145 group_id: 0,
6146 is_primary: true,
6147 ..Default::default()
6148 },
6149 }
6150 ]
6151 );
6152 assert_eq!(
6153 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
6154 [
6155 ("fn a() { ".to_string(), None),
6156 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
6157 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
6158 ("\n".to_string(), None),
6159 ]
6160 );
6161 assert_eq!(
6162 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
6163 [
6164 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
6165 ("\n".to_string(), None),
6166 ]
6167 );
6168 });
6169
6170 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
6171 // changes since the last save.
6172 buffer.update(cx, |buffer, cx| {
6173 buffer.edit([(Point::new(2, 0)..Point::new(2, 0), " ")], cx);
6174 buffer.edit([(Point::new(2, 8)..Point::new(2, 10), "(x: usize)")], cx);
6175 buffer.edit([(Point::new(3, 10)..Point::new(3, 10), "xxx")], cx);
6176 });
6177 let change_notification_2 = fake_server
6178 .receive_notification::<lsp::notification::DidChangeTextDocument>()
6179 .await;
6180 assert!(
6181 change_notification_2.text_document.version
6182 > change_notification_1.text_document.version
6183 );
6184
6185 // Handle out-of-order diagnostics
6186 fake_server.notify::<lsp::notification::PublishDiagnostics>(
6187 lsp::PublishDiagnosticsParams {
6188 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
6189 version: Some(change_notification_2.text_document.version),
6190 diagnostics: vec![
6191 lsp::Diagnostic {
6192 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
6193 severity: Some(DiagnosticSeverity::ERROR),
6194 message: "undefined variable 'BB'".to_string(),
6195 source: Some("disk".to_string()),
6196 ..Default::default()
6197 },
6198 lsp::Diagnostic {
6199 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
6200 severity: Some(DiagnosticSeverity::WARNING),
6201 message: "undefined variable 'A'".to_string(),
6202 source: Some("disk".to_string()),
6203 ..Default::default()
6204 },
6205 ],
6206 },
6207 );
6208
6209 buffer.next_notification(cx).await;
6210 buffer.read_with(cx, |buffer, _| {
6211 assert_eq!(
6212 buffer
6213 .snapshot()
6214 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
6215 .collect::<Vec<_>>(),
6216 &[
6217 DiagnosticEntry {
6218 range: Point::new(2, 21)..Point::new(2, 22),
6219 diagnostic: Diagnostic {
6220 severity: DiagnosticSeverity::WARNING,
6221 message: "undefined variable 'A'".to_string(),
6222 is_disk_based: true,
6223 group_id: 1,
6224 is_primary: true,
6225 ..Default::default()
6226 }
6227 },
6228 DiagnosticEntry {
6229 range: Point::new(3, 9)..Point::new(3, 14),
6230 diagnostic: Diagnostic {
6231 severity: DiagnosticSeverity::ERROR,
6232 message: "undefined variable 'BB'".to_string(),
6233 is_disk_based: true,
6234 group_id: 0,
6235 is_primary: true,
6236 ..Default::default()
6237 },
6238 }
6239 ]
6240 );
6241 });
6242 }
6243
6244 #[gpui::test]
6245 async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
6246 cx.foreground().forbid_parking();
6247
6248 let text = concat!(
6249 "let one = ;\n", //
6250 "let two = \n",
6251 "let three = 3;\n",
6252 );
6253
6254 let fs = FakeFs::new(cx.background());
6255 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
6256
6257 let project = Project::test(fs, ["/dir"], cx).await;
6258 let buffer = project
6259 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
6260 .await
6261 .unwrap();
6262
6263 project.update(cx, |project, cx| {
6264 project
6265 .update_buffer_diagnostics(
6266 &buffer,
6267 vec![
6268 DiagnosticEntry {
6269 range: PointUtf16::new(0, 10)..PointUtf16::new(0, 10),
6270 diagnostic: Diagnostic {
6271 severity: DiagnosticSeverity::ERROR,
6272 message: "syntax error 1".to_string(),
6273 ..Default::default()
6274 },
6275 },
6276 DiagnosticEntry {
6277 range: PointUtf16::new(1, 10)..PointUtf16::new(1, 10),
6278 diagnostic: Diagnostic {
6279 severity: DiagnosticSeverity::ERROR,
6280 message: "syntax error 2".to_string(),
6281 ..Default::default()
6282 },
6283 },
6284 ],
6285 None,
6286 cx,
6287 )
6288 .unwrap();
6289 });
6290
6291 // An empty range is extended forward to include the following character.
6292 // At the end of a line, an empty range is extended backward to include
6293 // the preceding character.
6294 buffer.read_with(cx, |buffer, _| {
6295 let chunks = chunks_with_diagnostics(&buffer, 0..buffer.len());
6296 assert_eq!(
6297 chunks
6298 .iter()
6299 .map(|(s, d)| (s.as_str(), *d))
6300 .collect::<Vec<_>>(),
6301 &[
6302 ("let one = ", None),
6303 (";", Some(DiagnosticSeverity::ERROR)),
6304 ("\nlet two =", None),
6305 (" ", Some(DiagnosticSeverity::ERROR)),
6306 ("\nlet three = 3;\n", None)
6307 ]
6308 );
6309 });
6310 }
6311
6312 #[gpui::test]
6313 async fn test_edits_from_lsp_with_past_version(cx: &mut gpui::TestAppContext) {
6314 cx.foreground().forbid_parking();
6315
6316 let mut language = Language::new(
6317 LanguageConfig {
6318 name: "Rust".into(),
6319 path_suffixes: vec!["rs".to_string()],
6320 ..Default::default()
6321 },
6322 Some(tree_sitter_rust::language()),
6323 );
6324 let mut fake_servers = language.set_fake_lsp_adapter(Default::default());
6325
6326 let text = "
6327 fn a() {
6328 f1();
6329 }
6330 fn b() {
6331 f2();
6332 }
6333 fn c() {
6334 f3();
6335 }
6336 "
6337 .unindent();
6338
6339 let fs = FakeFs::new(cx.background());
6340 fs.insert_tree(
6341 "/dir",
6342 json!({
6343 "a.rs": text.clone(),
6344 }),
6345 )
6346 .await;
6347
6348 let project = Project::test(fs, ["/dir"], cx).await;
6349 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
6350 let buffer = project
6351 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
6352 .await
6353 .unwrap();
6354
6355 let mut fake_server = fake_servers.next().await.unwrap();
6356 let lsp_document_version = fake_server
6357 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6358 .await
6359 .text_document
6360 .version;
6361
6362 // Simulate editing the buffer after the language server computes some edits.
6363 buffer.update(cx, |buffer, cx| {
6364 buffer.edit(
6365 [(
6366 Point::new(0, 0)..Point::new(0, 0),
6367 "// above first function\n",
6368 )],
6369 cx,
6370 );
6371 buffer.edit(
6372 [(
6373 Point::new(2, 0)..Point::new(2, 0),
6374 " // inside first function\n",
6375 )],
6376 cx,
6377 );
6378 buffer.edit(
6379 [(
6380 Point::new(6, 4)..Point::new(6, 4),
6381 "// inside second function ",
6382 )],
6383 cx,
6384 );
6385
6386 assert_eq!(
6387 buffer.text(),
6388 "
6389 // above first function
6390 fn a() {
6391 // inside first function
6392 f1();
6393 }
6394 fn b() {
6395 // inside second function f2();
6396 }
6397 fn c() {
6398 f3();
6399 }
6400 "
6401 .unindent()
6402 );
6403 });
6404
6405 let edits = project
6406 .update(cx, |project, cx| {
6407 project.edits_from_lsp(
6408 &buffer,
6409 vec![
6410 // replace body of first function
6411 lsp::TextEdit {
6412 range: lsp::Range::new(
6413 lsp::Position::new(0, 0),
6414 lsp::Position::new(3, 0),
6415 ),
6416 new_text: "
6417 fn a() {
6418 f10();
6419 }
6420 "
6421 .unindent(),
6422 },
6423 // edit inside second function
6424 lsp::TextEdit {
6425 range: lsp::Range::new(
6426 lsp::Position::new(4, 6),
6427 lsp::Position::new(4, 6),
6428 ),
6429 new_text: "00".into(),
6430 },
6431 // edit inside third function via two distinct edits
6432 lsp::TextEdit {
6433 range: lsp::Range::new(
6434 lsp::Position::new(7, 5),
6435 lsp::Position::new(7, 5),
6436 ),
6437 new_text: "4000".into(),
6438 },
6439 lsp::TextEdit {
6440 range: lsp::Range::new(
6441 lsp::Position::new(7, 5),
6442 lsp::Position::new(7, 6),
6443 ),
6444 new_text: "".into(),
6445 },
6446 ],
6447 Some(lsp_document_version),
6448 cx,
6449 )
6450 })
6451 .await
6452 .unwrap();
6453
6454 buffer.update(cx, |buffer, cx| {
6455 for (range, new_text) in edits {
6456 buffer.edit([(range, new_text)], cx);
6457 }
6458 assert_eq!(
6459 buffer.text(),
6460 "
6461 // above first function
6462 fn a() {
6463 // inside first function
6464 f10();
6465 }
6466 fn b() {
6467 // inside second function f200();
6468 }
6469 fn c() {
6470 f4000();
6471 }
6472 "
6473 .unindent()
6474 );
6475 });
6476 }
6477
6478 #[gpui::test]
6479 async fn test_edits_from_lsp_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
6480 cx.foreground().forbid_parking();
6481
6482 let text = "
6483 use a::b;
6484 use a::c;
6485
6486 fn f() {
6487 b();
6488 c();
6489 }
6490 "
6491 .unindent();
6492
6493 let fs = FakeFs::new(cx.background());
6494 fs.insert_tree(
6495 "/dir",
6496 json!({
6497 "a.rs": text.clone(),
6498 }),
6499 )
6500 .await;
6501
6502 let project = Project::test(fs, ["/dir"], cx).await;
6503 let buffer = project
6504 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
6505 .await
6506 .unwrap();
6507
6508 // Simulate the language server sending us a small edit in the form of a very large diff.
6509 // Rust-analyzer does this when performing a merge-imports code action.
6510 let edits = project
6511 .update(cx, |project, cx| {
6512 project.edits_from_lsp(
6513 &buffer,
6514 [
6515 // Replace the first use statement without editing the semicolon.
6516 lsp::TextEdit {
6517 range: lsp::Range::new(
6518 lsp::Position::new(0, 4),
6519 lsp::Position::new(0, 8),
6520 ),
6521 new_text: "a::{b, c}".into(),
6522 },
6523 // Reinsert the remainder of the file between the semicolon and the final
6524 // newline of the file.
6525 lsp::TextEdit {
6526 range: lsp::Range::new(
6527 lsp::Position::new(0, 9),
6528 lsp::Position::new(0, 9),
6529 ),
6530 new_text: "\n\n".into(),
6531 },
6532 lsp::TextEdit {
6533 range: lsp::Range::new(
6534 lsp::Position::new(0, 9),
6535 lsp::Position::new(0, 9),
6536 ),
6537 new_text: "
6538 fn f() {
6539 b();
6540 c();
6541 }"
6542 .unindent(),
6543 },
6544 // Delete everything after the first newline of the file.
6545 lsp::TextEdit {
6546 range: lsp::Range::new(
6547 lsp::Position::new(1, 0),
6548 lsp::Position::new(7, 0),
6549 ),
6550 new_text: "".into(),
6551 },
6552 ],
6553 None,
6554 cx,
6555 )
6556 })
6557 .await
6558 .unwrap();
6559
6560 buffer.update(cx, |buffer, cx| {
6561 let edits = edits
6562 .into_iter()
6563 .map(|(range, text)| {
6564 (
6565 range.start.to_point(&buffer)..range.end.to_point(&buffer),
6566 text,
6567 )
6568 })
6569 .collect::<Vec<_>>();
6570
6571 assert_eq!(
6572 edits,
6573 [
6574 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
6575 (Point::new(1, 0)..Point::new(2, 0), "".into())
6576 ]
6577 );
6578
6579 for (range, new_text) in edits {
6580 buffer.edit([(range, new_text)], cx);
6581 }
6582 assert_eq!(
6583 buffer.text(),
6584 "
6585 use a::{b, c};
6586
6587 fn f() {
6588 b();
6589 c();
6590 }
6591 "
6592 .unindent()
6593 );
6594 });
6595 }
6596
6597 fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
6598 buffer: &Buffer,
6599 range: Range<T>,
6600 ) -> Vec<(String, Option<DiagnosticSeverity>)> {
6601 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
6602 for chunk in buffer.snapshot().chunks(range, true) {
6603 if chunks.last().map_or(false, |prev_chunk| {
6604 prev_chunk.1 == chunk.diagnostic_severity
6605 }) {
6606 chunks.last_mut().unwrap().0.push_str(chunk.text);
6607 } else {
6608 chunks.push((chunk.text.to_string(), chunk.diagnostic_severity));
6609 }
6610 }
6611 chunks
6612 }
6613
6614 #[gpui::test]
6615 async fn test_search_worktree_without_files(cx: &mut gpui::TestAppContext) {
6616 let dir = temp_tree(json!({
6617 "root": {
6618 "dir1": {},
6619 "dir2": {
6620 "dir3": {}
6621 }
6622 }
6623 }));
6624
6625 let project = Project::test(Arc::new(RealFs), [dir.path()], cx).await;
6626 let cancel_flag = Default::default();
6627 let results = project
6628 .read_with(cx, |project, cx| {
6629 project.match_paths("dir", false, false, 10, &cancel_flag, cx)
6630 })
6631 .await;
6632
6633 assert!(results.is_empty());
6634 }
6635
6636 #[gpui::test(iterations = 10)]
6637 async fn test_definition(cx: &mut gpui::TestAppContext) {
6638 let mut language = Language::new(
6639 LanguageConfig {
6640 name: "Rust".into(),
6641 path_suffixes: vec!["rs".to_string()],
6642 ..Default::default()
6643 },
6644 Some(tree_sitter_rust::language()),
6645 );
6646 let mut fake_servers = language.set_fake_lsp_adapter(Default::default());
6647
6648 let fs = FakeFs::new(cx.background());
6649 fs.insert_tree(
6650 "/dir",
6651 json!({
6652 "a.rs": "const fn a() { A }",
6653 "b.rs": "const y: i32 = crate::a()",
6654 }),
6655 )
6656 .await;
6657
6658 let project = Project::test(fs, ["/dir/b.rs"], cx).await;
6659 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
6660
6661 let buffer = project
6662 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
6663 .await
6664 .unwrap();
6665
6666 let fake_server = fake_servers.next().await.unwrap();
6667 fake_server.handle_request::<lsp::request::GotoDefinition, _, _>(|params, _| async move {
6668 let params = params.text_document_position_params;
6669 assert_eq!(
6670 params.text_document.uri.to_file_path().unwrap(),
6671 Path::new("/dir/b.rs"),
6672 );
6673 assert_eq!(params.position, lsp::Position::new(0, 22));
6674
6675 Ok(Some(lsp::GotoDefinitionResponse::Scalar(
6676 lsp::Location::new(
6677 lsp::Url::from_file_path("/dir/a.rs").unwrap(),
6678 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
6679 ),
6680 )))
6681 });
6682
6683 let mut definitions = project
6684 .update(cx, |project, cx| project.definition(&buffer, 22, cx))
6685 .await
6686 .unwrap();
6687
6688 assert_eq!(definitions.len(), 1);
6689 let definition = definitions.pop().unwrap();
6690 cx.update(|cx| {
6691 let target_buffer = definition.buffer.read(cx);
6692 assert_eq!(
6693 target_buffer
6694 .file()
6695 .unwrap()
6696 .as_local()
6697 .unwrap()
6698 .abs_path(cx),
6699 Path::new("/dir/a.rs"),
6700 );
6701 assert_eq!(definition.range.to_offset(target_buffer), 9..10);
6702 assert_eq!(
6703 list_worktrees(&project, cx),
6704 [("/dir/b.rs".as_ref(), true), ("/dir/a.rs".as_ref(), false)]
6705 );
6706
6707 drop(definition);
6708 });
6709 cx.read(|cx| {
6710 assert_eq!(list_worktrees(&project, cx), [("/dir/b.rs".as_ref(), true)]);
6711 });
6712
6713 fn list_worktrees<'a>(
6714 project: &'a ModelHandle<Project>,
6715 cx: &'a AppContext,
6716 ) -> Vec<(&'a Path, bool)> {
6717 project
6718 .read(cx)
6719 .worktrees(cx)
6720 .map(|worktree| {
6721 let worktree = worktree.read(cx);
6722 (
6723 worktree.as_local().unwrap().abs_path().as_ref(),
6724 worktree.is_visible(),
6725 )
6726 })
6727 .collect::<Vec<_>>()
6728 }
6729 }
6730
6731 #[gpui::test]
6732 async fn test_completions_without_edit_ranges(cx: &mut gpui::TestAppContext) {
6733 let mut language = Language::new(
6734 LanguageConfig {
6735 name: "TypeScript".into(),
6736 path_suffixes: vec!["ts".to_string()],
6737 ..Default::default()
6738 },
6739 Some(tree_sitter_typescript::language_typescript()),
6740 );
6741 let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default());
6742
6743 let fs = FakeFs::new(cx.background());
6744 fs.insert_tree(
6745 "/dir",
6746 json!({
6747 "a.ts": "",
6748 }),
6749 )
6750 .await;
6751
6752 let project = Project::test(fs, ["/dir"], cx).await;
6753 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
6754 let buffer = project
6755 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
6756 .await
6757 .unwrap();
6758
6759 let fake_server = fake_language_servers.next().await.unwrap();
6760
6761 let text = "let a = b.fqn";
6762 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
6763 let completions = project.update(cx, |project, cx| {
6764 project.completions(&buffer, text.len(), cx)
6765 });
6766
6767 fake_server
6768 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
6769 Ok(Some(lsp::CompletionResponse::Array(vec![
6770 lsp::CompletionItem {
6771 label: "fullyQualifiedName?".into(),
6772 insert_text: Some("fullyQualifiedName".into()),
6773 ..Default::default()
6774 },
6775 ])))
6776 })
6777 .next()
6778 .await;
6779 let completions = completions.await.unwrap();
6780 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
6781 assert_eq!(completions.len(), 1);
6782 assert_eq!(completions[0].new_text, "fullyQualifiedName");
6783 assert_eq!(
6784 completions[0].old_range.to_offset(&snapshot),
6785 text.len() - 3..text.len()
6786 );
6787 }
6788
6789 #[gpui::test(iterations = 10)]
6790 async fn test_apply_code_actions_with_commands(cx: &mut gpui::TestAppContext) {
6791 let mut language = Language::new(
6792 LanguageConfig {
6793 name: "TypeScript".into(),
6794 path_suffixes: vec!["ts".to_string()],
6795 ..Default::default()
6796 },
6797 None,
6798 );
6799 let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default());
6800
6801 let fs = FakeFs::new(cx.background());
6802 fs.insert_tree(
6803 "/dir",
6804 json!({
6805 "a.ts": "a",
6806 }),
6807 )
6808 .await;
6809
6810 let project = Project::test(fs, ["/dir"], cx).await;
6811 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
6812 let buffer = project
6813 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
6814 .await
6815 .unwrap();
6816
6817 let fake_server = fake_language_servers.next().await.unwrap();
6818
6819 // Language server returns code actions that contain commands, and not edits.
6820 let actions = project.update(cx, |project, cx| project.code_actions(&buffer, 0..0, cx));
6821 fake_server
6822 .handle_request::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
6823 Ok(Some(vec![
6824 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
6825 title: "The code action".into(),
6826 command: Some(lsp::Command {
6827 title: "The command".into(),
6828 command: "_the/command".into(),
6829 arguments: Some(vec![json!("the-argument")]),
6830 }),
6831 ..Default::default()
6832 }),
6833 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
6834 title: "two".into(),
6835 ..Default::default()
6836 }),
6837 ]))
6838 })
6839 .next()
6840 .await;
6841
6842 let action = actions.await.unwrap()[0].clone();
6843 let apply = project.update(cx, |project, cx| {
6844 project.apply_code_action(buffer.clone(), action, true, cx)
6845 });
6846
6847 // Resolving the code action does not populate its edits. In absence of
6848 // edits, we must execute the given command.
6849 fake_server.handle_request::<lsp::request::CodeActionResolveRequest, _, _>(
6850 |action, _| async move { Ok(action) },
6851 );
6852
6853 // While executing the command, the language server sends the editor
6854 // a `workspaceEdit` request.
6855 fake_server
6856 .handle_request::<lsp::request::ExecuteCommand, _, _>({
6857 let fake = fake_server.clone();
6858 move |params, _| {
6859 assert_eq!(params.command, "_the/command");
6860 let fake = fake.clone();
6861 async move {
6862 fake.server
6863 .request::<lsp::request::ApplyWorkspaceEdit>(
6864 lsp::ApplyWorkspaceEditParams {
6865 label: None,
6866 edit: lsp::WorkspaceEdit {
6867 changes: Some(
6868 [(
6869 lsp::Url::from_file_path("/dir/a.ts").unwrap(),
6870 vec![lsp::TextEdit {
6871 range: lsp::Range::new(
6872 lsp::Position::new(0, 0),
6873 lsp::Position::new(0, 0),
6874 ),
6875 new_text: "X".into(),
6876 }],
6877 )]
6878 .into_iter()
6879 .collect(),
6880 ),
6881 ..Default::default()
6882 },
6883 },
6884 )
6885 .await
6886 .unwrap();
6887 Ok(Some(json!(null)))
6888 }
6889 }
6890 })
6891 .next()
6892 .await;
6893
6894 // Applying the code action returns a project transaction containing the edits
6895 // sent by the language server in its `workspaceEdit` request.
6896 let transaction = apply.await.unwrap();
6897 assert!(transaction.0.contains_key(&buffer));
6898 buffer.update(cx, |buffer, cx| {
6899 assert_eq!(buffer.text(), "Xa");
6900 buffer.undo(cx);
6901 assert_eq!(buffer.text(), "a");
6902 });
6903 }
6904
6905 #[gpui::test]
6906 async fn test_save_file(cx: &mut gpui::TestAppContext) {
6907 let fs = FakeFs::new(cx.background());
6908 fs.insert_tree(
6909 "/dir",
6910 json!({
6911 "file1": "the old contents",
6912 }),
6913 )
6914 .await;
6915
6916 let project = Project::test(fs.clone(), ["/dir"], cx).await;
6917 let buffer = project
6918 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
6919 .await
6920 .unwrap();
6921 buffer
6922 .update(cx, |buffer, cx| {
6923 assert_eq!(buffer.text(), "the old contents");
6924 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], cx);
6925 buffer.save(cx)
6926 })
6927 .await
6928 .unwrap();
6929
6930 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
6931 assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
6932 }
6933
6934 #[gpui::test]
6935 async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
6936 let fs = FakeFs::new(cx.background());
6937 fs.insert_tree(
6938 "/dir",
6939 json!({
6940 "file1": "the old contents",
6941 }),
6942 )
6943 .await;
6944
6945 let project = Project::test(fs.clone(), ["/dir/file1"], cx).await;
6946 let buffer = project
6947 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
6948 .await
6949 .unwrap();
6950 buffer
6951 .update(cx, |buffer, cx| {
6952 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], cx);
6953 buffer.save(cx)
6954 })
6955 .await
6956 .unwrap();
6957
6958 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
6959 assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
6960 }
6961
6962 #[gpui::test]
6963 async fn test_save_as(cx: &mut gpui::TestAppContext) {
6964 let fs = FakeFs::new(cx.background());
6965 fs.insert_tree("/dir", json!({})).await;
6966
6967 let project = Project::test(fs.clone(), ["/dir"], cx).await;
6968 let buffer = project.update(cx, |project, cx| {
6969 project.create_buffer("", None, cx).unwrap()
6970 });
6971 buffer.update(cx, |buffer, cx| {
6972 buffer.edit([(0..0, "abc")], cx);
6973 assert!(buffer.is_dirty());
6974 assert!(!buffer.has_conflict());
6975 });
6976 project
6977 .update(cx, |project, cx| {
6978 project.save_buffer_as(buffer.clone(), "/dir/file1".into(), cx)
6979 })
6980 .await
6981 .unwrap();
6982 assert_eq!(fs.load(Path::new("/dir/file1")).await.unwrap(), "abc");
6983 buffer.read_with(cx, |buffer, cx| {
6984 assert_eq!(buffer.file().unwrap().full_path(cx), Path::new("dir/file1"));
6985 assert!(!buffer.is_dirty());
6986 assert!(!buffer.has_conflict());
6987 });
6988
6989 let opened_buffer = project
6990 .update(cx, |project, cx| {
6991 project.open_local_buffer("/dir/file1", cx)
6992 })
6993 .await
6994 .unwrap();
6995 assert_eq!(opened_buffer, buffer);
6996 }
6997
6998 #[gpui::test(retries = 5)]
6999 async fn test_rescan_and_remote_updates(cx: &mut gpui::TestAppContext) {
7000 let dir = temp_tree(json!({
7001 "a": {
7002 "file1": "",
7003 "file2": "",
7004 "file3": "",
7005 },
7006 "b": {
7007 "c": {
7008 "file4": "",
7009 "file5": "",
7010 }
7011 }
7012 }));
7013
7014 let project = Project::test(Arc::new(RealFs), [dir.path()], cx).await;
7015 let rpc = project.read_with(cx, |p, _| p.client.clone());
7016
7017 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
7018 let buffer = project.update(cx, |p, cx| p.open_local_buffer(dir.path().join(path), cx));
7019 async move { buffer.await.unwrap() }
7020 };
7021 let id_for_path = |path: &'static str, cx: &gpui::TestAppContext| {
7022 project.read_with(cx, |project, cx| {
7023 let tree = project.worktrees(cx).next().unwrap();
7024 tree.read(cx)
7025 .entry_for_path(path)
7026 .expect(&format!("no entry for path {}", path))
7027 .id
7028 })
7029 };
7030
7031 let buffer2 = buffer_for_path("a/file2", cx).await;
7032 let buffer3 = buffer_for_path("a/file3", cx).await;
7033 let buffer4 = buffer_for_path("b/c/file4", cx).await;
7034 let buffer5 = buffer_for_path("b/c/file5", cx).await;
7035
7036 let file2_id = id_for_path("a/file2", &cx);
7037 let file3_id = id_for_path("a/file3", &cx);
7038 let file4_id = id_for_path("b/c/file4", &cx);
7039
7040 // Create a remote copy of this worktree.
7041 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
7042 let initial_snapshot = tree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot());
7043 let (remote, load_task) = cx.update(|cx| {
7044 Worktree::remote(
7045 1,
7046 1,
7047 initial_snapshot.to_proto(&Default::default(), true),
7048 rpc.clone(),
7049 cx,
7050 )
7051 });
7052 // tree
7053 load_task.await;
7054
7055 cx.read(|cx| {
7056 assert!(!buffer2.read(cx).is_dirty());
7057 assert!(!buffer3.read(cx).is_dirty());
7058 assert!(!buffer4.read(cx).is_dirty());
7059 assert!(!buffer5.read(cx).is_dirty());
7060 });
7061
7062 // Rename and delete files and directories.
7063 tree.flush_fs_events(&cx).await;
7064 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
7065 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
7066 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
7067 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
7068 tree.flush_fs_events(&cx).await;
7069
7070 let expected_paths = vec![
7071 "a",
7072 "a/file1",
7073 "a/file2.new",
7074 "b",
7075 "d",
7076 "d/file3",
7077 "d/file4",
7078 ];
7079
7080 cx.read(|app| {
7081 assert_eq!(
7082 tree.read(app)
7083 .paths()
7084 .map(|p| p.to_str().unwrap())
7085 .collect::<Vec<_>>(),
7086 expected_paths
7087 );
7088
7089 assert_eq!(id_for_path("a/file2.new", &cx), file2_id);
7090 assert_eq!(id_for_path("d/file3", &cx), file3_id);
7091 assert_eq!(id_for_path("d/file4", &cx), file4_id);
7092
7093 assert_eq!(
7094 buffer2.read(app).file().unwrap().path().as_ref(),
7095 Path::new("a/file2.new")
7096 );
7097 assert_eq!(
7098 buffer3.read(app).file().unwrap().path().as_ref(),
7099 Path::new("d/file3")
7100 );
7101 assert_eq!(
7102 buffer4.read(app).file().unwrap().path().as_ref(),
7103 Path::new("d/file4")
7104 );
7105 assert_eq!(
7106 buffer5.read(app).file().unwrap().path().as_ref(),
7107 Path::new("b/c/file5")
7108 );
7109
7110 assert!(!buffer2.read(app).file().unwrap().is_deleted());
7111 assert!(!buffer3.read(app).file().unwrap().is_deleted());
7112 assert!(!buffer4.read(app).file().unwrap().is_deleted());
7113 assert!(buffer5.read(app).file().unwrap().is_deleted());
7114 });
7115
7116 // Update the remote worktree. Check that it becomes consistent with the
7117 // local worktree.
7118 remote.update(cx, |remote, cx| {
7119 let update_message = tree.read(cx).as_local().unwrap().snapshot().build_update(
7120 &initial_snapshot,
7121 1,
7122 1,
7123 true,
7124 );
7125 remote
7126 .as_remote_mut()
7127 .unwrap()
7128 .snapshot
7129 .apply_remote_update(update_message)
7130 .unwrap();
7131
7132 assert_eq!(
7133 remote
7134 .paths()
7135 .map(|p| p.to_str().unwrap())
7136 .collect::<Vec<_>>(),
7137 expected_paths
7138 );
7139 });
7140 }
7141
7142 #[gpui::test]
7143 async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
7144 let fs = FakeFs::new(cx.background());
7145 fs.insert_tree(
7146 "/dir",
7147 json!({
7148 "a.txt": "a-contents",
7149 "b.txt": "b-contents",
7150 }),
7151 )
7152 .await;
7153
7154 let project = Project::test(fs.clone(), ["/dir"], cx).await;
7155
7156 // Spawn multiple tasks to open paths, repeating some paths.
7157 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
7158 (
7159 p.open_local_buffer("/dir/a.txt", cx),
7160 p.open_local_buffer("/dir/b.txt", cx),
7161 p.open_local_buffer("/dir/a.txt", cx),
7162 )
7163 });
7164
7165 let buffer_a_1 = buffer_a_1.await.unwrap();
7166 let buffer_a_2 = buffer_a_2.await.unwrap();
7167 let buffer_b = buffer_b.await.unwrap();
7168 assert_eq!(buffer_a_1.read_with(cx, |b, _| b.text()), "a-contents");
7169 assert_eq!(buffer_b.read_with(cx, |b, _| b.text()), "b-contents");
7170
7171 // There is only one buffer per path.
7172 let buffer_a_id = buffer_a_1.id();
7173 assert_eq!(buffer_a_2.id(), buffer_a_id);
7174
7175 // Open the same path again while it is still open.
7176 drop(buffer_a_1);
7177 let buffer_a_3 = project
7178 .update(cx, |p, cx| p.open_local_buffer("/dir/a.txt", cx))
7179 .await
7180 .unwrap();
7181
7182 // There's still only one buffer per path.
7183 assert_eq!(buffer_a_3.id(), buffer_a_id);
7184 }
7185
7186 #[gpui::test]
7187 async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
7188 let fs = FakeFs::new(cx.background());
7189 fs.insert_tree(
7190 "/dir",
7191 json!({
7192 "file1": "abc",
7193 "file2": "def",
7194 "file3": "ghi",
7195 }),
7196 )
7197 .await;
7198
7199 let project = Project::test(fs.clone(), ["/dir"], cx).await;
7200
7201 let buffer1 = project
7202 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
7203 .await
7204 .unwrap();
7205 let events = Rc::new(RefCell::new(Vec::new()));
7206
7207 // initially, the buffer isn't dirty.
7208 buffer1.update(cx, |buffer, cx| {
7209 cx.subscribe(&buffer1, {
7210 let events = events.clone();
7211 move |_, _, event, _| match event {
7212 BufferEvent::Operation(_) => {}
7213 _ => events.borrow_mut().push(event.clone()),
7214 }
7215 })
7216 .detach();
7217
7218 assert!(!buffer.is_dirty());
7219 assert!(events.borrow().is_empty());
7220
7221 buffer.edit([(1..2, "")], cx);
7222 });
7223
7224 // after the first edit, the buffer is dirty, and emits a dirtied event.
7225 buffer1.update(cx, |buffer, cx| {
7226 assert!(buffer.text() == "ac");
7227 assert!(buffer.is_dirty());
7228 assert_eq!(
7229 *events.borrow(),
7230 &[language::Event::Edited, language::Event::Dirtied]
7231 );
7232 events.borrow_mut().clear();
7233 buffer.did_save(buffer.version(), buffer.file().unwrap().mtime(), None, cx);
7234 });
7235
7236 // after saving, the buffer is not dirty, and emits a saved event.
7237 buffer1.update(cx, |buffer, cx| {
7238 assert!(!buffer.is_dirty());
7239 assert_eq!(*events.borrow(), &[language::Event::Saved]);
7240 events.borrow_mut().clear();
7241
7242 buffer.edit([(1..1, "B")], cx);
7243 buffer.edit([(2..2, "D")], cx);
7244 });
7245
7246 // after editing again, the buffer is dirty, and emits another dirty event.
7247 buffer1.update(cx, |buffer, cx| {
7248 assert!(buffer.text() == "aBDc");
7249 assert!(buffer.is_dirty());
7250 assert_eq!(
7251 *events.borrow(),
7252 &[
7253 language::Event::Edited,
7254 language::Event::Dirtied,
7255 language::Event::Edited,
7256 ],
7257 );
7258 events.borrow_mut().clear();
7259
7260 // TODO - currently, after restoring the buffer to its
7261 // previously-saved state, the is still considered dirty.
7262 buffer.edit([(1..3, "")], cx);
7263 assert!(buffer.text() == "ac");
7264 assert!(buffer.is_dirty());
7265 });
7266
7267 assert_eq!(*events.borrow(), &[language::Event::Edited]);
7268
7269 // When a file is deleted, the buffer is considered dirty.
7270 let events = Rc::new(RefCell::new(Vec::new()));
7271 let buffer2 = project
7272 .update(cx, |p, cx| p.open_local_buffer("/dir/file2", cx))
7273 .await
7274 .unwrap();
7275 buffer2.update(cx, |_, cx| {
7276 cx.subscribe(&buffer2, {
7277 let events = events.clone();
7278 move |_, _, event, _| events.borrow_mut().push(event.clone())
7279 })
7280 .detach();
7281 });
7282
7283 fs.remove_file("/dir/file2".as_ref(), Default::default())
7284 .await
7285 .unwrap();
7286 buffer2.condition(&cx, |b, _| b.is_dirty()).await;
7287 assert_eq!(
7288 *events.borrow(),
7289 &[language::Event::Dirtied, language::Event::FileHandleChanged]
7290 );
7291
7292 // When a file is already dirty when deleted, we don't emit a Dirtied event.
7293 let events = Rc::new(RefCell::new(Vec::new()));
7294 let buffer3 = project
7295 .update(cx, |p, cx| p.open_local_buffer("/dir/file3", cx))
7296 .await
7297 .unwrap();
7298 buffer3.update(cx, |_, cx| {
7299 cx.subscribe(&buffer3, {
7300 let events = events.clone();
7301 move |_, _, event, _| events.borrow_mut().push(event.clone())
7302 })
7303 .detach();
7304 });
7305
7306 buffer3.update(cx, |buffer, cx| {
7307 buffer.edit([(0..0, "x")], cx);
7308 });
7309 events.borrow_mut().clear();
7310 fs.remove_file("/dir/file3".as_ref(), Default::default())
7311 .await
7312 .unwrap();
7313 buffer3
7314 .condition(&cx, |_, _| !events.borrow().is_empty())
7315 .await;
7316 assert_eq!(*events.borrow(), &[language::Event::FileHandleChanged]);
7317 cx.read(|cx| assert!(buffer3.read(cx).is_dirty()));
7318 }
7319
7320 #[gpui::test]
7321 async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
7322 let initial_contents = "aaa\nbbbbb\nc\n";
7323 let fs = FakeFs::new(cx.background());
7324 fs.insert_tree(
7325 "/dir",
7326 json!({
7327 "the-file": initial_contents,
7328 }),
7329 )
7330 .await;
7331 let project = Project::test(fs.clone(), ["/dir"], cx).await;
7332 let buffer = project
7333 .update(cx, |p, cx| p.open_local_buffer("/dir/the-file", cx))
7334 .await
7335 .unwrap();
7336
7337 let anchors = (0..3)
7338 .map(|row| buffer.read_with(cx, |b, _| b.anchor_before(Point::new(row, 1))))
7339 .collect::<Vec<_>>();
7340
7341 // Change the file on disk, adding two new lines of text, and removing
7342 // one line.
7343 buffer.read_with(cx, |buffer, _| {
7344 assert!(!buffer.is_dirty());
7345 assert!(!buffer.has_conflict());
7346 });
7347 let new_contents = "AAAA\naaa\nBB\nbbbbb\n";
7348 fs.save("/dir/the-file".as_ref(), &new_contents.into())
7349 .await
7350 .unwrap();
7351
7352 // Because the buffer was not modified, it is reloaded from disk. Its
7353 // contents are edited according to the diff between the old and new
7354 // file contents.
7355 buffer
7356 .condition(&cx, |buffer, _| buffer.text() == new_contents)
7357 .await;
7358
7359 buffer.update(cx, |buffer, _| {
7360 assert_eq!(buffer.text(), new_contents);
7361 assert!(!buffer.is_dirty());
7362 assert!(!buffer.has_conflict());
7363
7364 let anchor_positions = anchors
7365 .iter()
7366 .map(|anchor| anchor.to_point(&*buffer))
7367 .collect::<Vec<_>>();
7368 assert_eq!(
7369 anchor_positions,
7370 [Point::new(1, 1), Point::new(3, 1), Point::new(4, 0)]
7371 );
7372 });
7373
7374 // Modify the buffer
7375 buffer.update(cx, |buffer, cx| {
7376 buffer.edit([(0..0, " ")], cx);
7377 assert!(buffer.is_dirty());
7378 assert!(!buffer.has_conflict());
7379 });
7380
7381 // Change the file on disk again, adding blank lines to the beginning.
7382 fs.save(
7383 "/dir/the-file".as_ref(),
7384 &"\n\n\nAAAA\naaa\nBB\nbbbbb\n".into(),
7385 )
7386 .await
7387 .unwrap();
7388
7389 // Because the buffer is modified, it doesn't reload from disk, but is
7390 // marked as having a conflict.
7391 buffer
7392 .condition(&cx, |buffer, _| buffer.has_conflict())
7393 .await;
7394 }
7395
7396 #[gpui::test]
7397 async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
7398 cx.foreground().forbid_parking();
7399
7400 let fs = FakeFs::new(cx.background());
7401 fs.insert_tree(
7402 "/the-dir",
7403 json!({
7404 "a.rs": "
7405 fn foo(mut v: Vec<usize>) {
7406 for x in &v {
7407 v.push(1);
7408 }
7409 }
7410 "
7411 .unindent(),
7412 }),
7413 )
7414 .await;
7415
7416 let project = Project::test(fs.clone(), ["/the-dir"], cx).await;
7417 let buffer = project
7418 .update(cx, |p, cx| p.open_local_buffer("/the-dir/a.rs", cx))
7419 .await
7420 .unwrap();
7421
7422 let buffer_uri = Url::from_file_path("/the-dir/a.rs").unwrap();
7423 let message = lsp::PublishDiagnosticsParams {
7424 uri: buffer_uri.clone(),
7425 diagnostics: vec![
7426 lsp::Diagnostic {
7427 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
7428 severity: Some(DiagnosticSeverity::WARNING),
7429 message: "error 1".to_string(),
7430 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
7431 location: lsp::Location {
7432 uri: buffer_uri.clone(),
7433 range: lsp::Range::new(
7434 lsp::Position::new(1, 8),
7435 lsp::Position::new(1, 9),
7436 ),
7437 },
7438 message: "error 1 hint 1".to_string(),
7439 }]),
7440 ..Default::default()
7441 },
7442 lsp::Diagnostic {
7443 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
7444 severity: Some(DiagnosticSeverity::HINT),
7445 message: "error 1 hint 1".to_string(),
7446 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
7447 location: lsp::Location {
7448 uri: buffer_uri.clone(),
7449 range: lsp::Range::new(
7450 lsp::Position::new(1, 8),
7451 lsp::Position::new(1, 9),
7452 ),
7453 },
7454 message: "original diagnostic".to_string(),
7455 }]),
7456 ..Default::default()
7457 },
7458 lsp::Diagnostic {
7459 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
7460 severity: Some(DiagnosticSeverity::ERROR),
7461 message: "error 2".to_string(),
7462 related_information: Some(vec![
7463 lsp::DiagnosticRelatedInformation {
7464 location: lsp::Location {
7465 uri: buffer_uri.clone(),
7466 range: lsp::Range::new(
7467 lsp::Position::new(1, 13),
7468 lsp::Position::new(1, 15),
7469 ),
7470 },
7471 message: "error 2 hint 1".to_string(),
7472 },
7473 lsp::DiagnosticRelatedInformation {
7474 location: lsp::Location {
7475 uri: buffer_uri.clone(),
7476 range: lsp::Range::new(
7477 lsp::Position::new(1, 13),
7478 lsp::Position::new(1, 15),
7479 ),
7480 },
7481 message: "error 2 hint 2".to_string(),
7482 },
7483 ]),
7484 ..Default::default()
7485 },
7486 lsp::Diagnostic {
7487 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
7488 severity: Some(DiagnosticSeverity::HINT),
7489 message: "error 2 hint 1".to_string(),
7490 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
7491 location: lsp::Location {
7492 uri: buffer_uri.clone(),
7493 range: lsp::Range::new(
7494 lsp::Position::new(2, 8),
7495 lsp::Position::new(2, 17),
7496 ),
7497 },
7498 message: "original diagnostic".to_string(),
7499 }]),
7500 ..Default::default()
7501 },
7502 lsp::Diagnostic {
7503 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
7504 severity: Some(DiagnosticSeverity::HINT),
7505 message: "error 2 hint 2".to_string(),
7506 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
7507 location: lsp::Location {
7508 uri: buffer_uri.clone(),
7509 range: lsp::Range::new(
7510 lsp::Position::new(2, 8),
7511 lsp::Position::new(2, 17),
7512 ),
7513 },
7514 message: "original diagnostic".to_string(),
7515 }]),
7516 ..Default::default()
7517 },
7518 ],
7519 version: None,
7520 };
7521
7522 project
7523 .update(cx, |p, cx| p.update_diagnostics(message, &[], cx))
7524 .unwrap();
7525 let buffer = buffer.read_with(cx, |buffer, _| buffer.snapshot());
7526
7527 assert_eq!(
7528 buffer
7529 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
7530 .collect::<Vec<_>>(),
7531 &[
7532 DiagnosticEntry {
7533 range: Point::new(1, 8)..Point::new(1, 9),
7534 diagnostic: Diagnostic {
7535 severity: DiagnosticSeverity::WARNING,
7536 message: "error 1".to_string(),
7537 group_id: 0,
7538 is_primary: true,
7539 ..Default::default()
7540 }
7541 },
7542 DiagnosticEntry {
7543 range: Point::new(1, 8)..Point::new(1, 9),
7544 diagnostic: Diagnostic {
7545 severity: DiagnosticSeverity::HINT,
7546 message: "error 1 hint 1".to_string(),
7547 group_id: 0,
7548 is_primary: false,
7549 ..Default::default()
7550 }
7551 },
7552 DiagnosticEntry {
7553 range: Point::new(1, 13)..Point::new(1, 15),
7554 diagnostic: Diagnostic {
7555 severity: DiagnosticSeverity::HINT,
7556 message: "error 2 hint 1".to_string(),
7557 group_id: 1,
7558 is_primary: false,
7559 ..Default::default()
7560 }
7561 },
7562 DiagnosticEntry {
7563 range: Point::new(1, 13)..Point::new(1, 15),
7564 diagnostic: Diagnostic {
7565 severity: DiagnosticSeverity::HINT,
7566 message: "error 2 hint 2".to_string(),
7567 group_id: 1,
7568 is_primary: false,
7569 ..Default::default()
7570 }
7571 },
7572 DiagnosticEntry {
7573 range: Point::new(2, 8)..Point::new(2, 17),
7574 diagnostic: Diagnostic {
7575 severity: DiagnosticSeverity::ERROR,
7576 message: "error 2".to_string(),
7577 group_id: 1,
7578 is_primary: true,
7579 ..Default::default()
7580 }
7581 }
7582 ]
7583 );
7584
7585 assert_eq!(
7586 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
7587 &[
7588 DiagnosticEntry {
7589 range: Point::new(1, 8)..Point::new(1, 9),
7590 diagnostic: Diagnostic {
7591 severity: DiagnosticSeverity::WARNING,
7592 message: "error 1".to_string(),
7593 group_id: 0,
7594 is_primary: true,
7595 ..Default::default()
7596 }
7597 },
7598 DiagnosticEntry {
7599 range: Point::new(1, 8)..Point::new(1, 9),
7600 diagnostic: Diagnostic {
7601 severity: DiagnosticSeverity::HINT,
7602 message: "error 1 hint 1".to_string(),
7603 group_id: 0,
7604 is_primary: false,
7605 ..Default::default()
7606 }
7607 },
7608 ]
7609 );
7610 assert_eq!(
7611 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
7612 &[
7613 DiagnosticEntry {
7614 range: Point::new(1, 13)..Point::new(1, 15),
7615 diagnostic: Diagnostic {
7616 severity: DiagnosticSeverity::HINT,
7617 message: "error 2 hint 1".to_string(),
7618 group_id: 1,
7619 is_primary: false,
7620 ..Default::default()
7621 }
7622 },
7623 DiagnosticEntry {
7624 range: Point::new(1, 13)..Point::new(1, 15),
7625 diagnostic: Diagnostic {
7626 severity: DiagnosticSeverity::HINT,
7627 message: "error 2 hint 2".to_string(),
7628 group_id: 1,
7629 is_primary: false,
7630 ..Default::default()
7631 }
7632 },
7633 DiagnosticEntry {
7634 range: Point::new(2, 8)..Point::new(2, 17),
7635 diagnostic: Diagnostic {
7636 severity: DiagnosticSeverity::ERROR,
7637 message: "error 2".to_string(),
7638 group_id: 1,
7639 is_primary: true,
7640 ..Default::default()
7641 }
7642 }
7643 ]
7644 );
7645 }
7646
7647 #[gpui::test]
7648 async fn test_rename(cx: &mut gpui::TestAppContext) {
7649 cx.foreground().forbid_parking();
7650
7651 let mut language = Language::new(
7652 LanguageConfig {
7653 name: "Rust".into(),
7654 path_suffixes: vec!["rs".to_string()],
7655 ..Default::default()
7656 },
7657 Some(tree_sitter_rust::language()),
7658 );
7659 let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
7660 capabilities: lsp::ServerCapabilities {
7661 rename_provider: Some(lsp::OneOf::Right(lsp::RenameOptions {
7662 prepare_provider: Some(true),
7663 work_done_progress_options: Default::default(),
7664 })),
7665 ..Default::default()
7666 },
7667 ..Default::default()
7668 });
7669
7670 let fs = FakeFs::new(cx.background());
7671 fs.insert_tree(
7672 "/dir",
7673 json!({
7674 "one.rs": "const ONE: usize = 1;",
7675 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
7676 }),
7677 )
7678 .await;
7679
7680 let project = Project::test(fs.clone(), ["/dir"], cx).await;
7681 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
7682 let buffer = project
7683 .update(cx, |project, cx| {
7684 project.open_local_buffer("/dir/one.rs", cx)
7685 })
7686 .await
7687 .unwrap();
7688
7689 let fake_server = fake_servers.next().await.unwrap();
7690
7691 let response = project.update(cx, |project, cx| {
7692 project.prepare_rename(buffer.clone(), 7, cx)
7693 });
7694 fake_server
7695 .handle_request::<lsp::request::PrepareRenameRequest, _, _>(|params, _| async move {
7696 assert_eq!(params.text_document.uri.as_str(), "file:///dir/one.rs");
7697 assert_eq!(params.position, lsp::Position::new(0, 7));
7698 Ok(Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
7699 lsp::Position::new(0, 6),
7700 lsp::Position::new(0, 9),
7701 ))))
7702 })
7703 .next()
7704 .await
7705 .unwrap();
7706 let range = response.await.unwrap().unwrap();
7707 let range = buffer.read_with(cx, |buffer, _| range.to_offset(buffer));
7708 assert_eq!(range, 6..9);
7709
7710 let response = project.update(cx, |project, cx| {
7711 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), true, cx)
7712 });
7713 fake_server
7714 .handle_request::<lsp::request::Rename, _, _>(|params, _| async move {
7715 assert_eq!(
7716 params.text_document_position.text_document.uri.as_str(),
7717 "file:///dir/one.rs"
7718 );
7719 assert_eq!(
7720 params.text_document_position.position,
7721 lsp::Position::new(0, 7)
7722 );
7723 assert_eq!(params.new_name, "THREE");
7724 Ok(Some(lsp::WorkspaceEdit {
7725 changes: Some(
7726 [
7727 (
7728 lsp::Url::from_file_path("/dir/one.rs").unwrap(),
7729 vec![lsp::TextEdit::new(
7730 lsp::Range::new(
7731 lsp::Position::new(0, 6),
7732 lsp::Position::new(0, 9),
7733 ),
7734 "THREE".to_string(),
7735 )],
7736 ),
7737 (
7738 lsp::Url::from_file_path("/dir/two.rs").unwrap(),
7739 vec![
7740 lsp::TextEdit::new(
7741 lsp::Range::new(
7742 lsp::Position::new(0, 24),
7743 lsp::Position::new(0, 27),
7744 ),
7745 "THREE".to_string(),
7746 ),
7747 lsp::TextEdit::new(
7748 lsp::Range::new(
7749 lsp::Position::new(0, 35),
7750 lsp::Position::new(0, 38),
7751 ),
7752 "THREE".to_string(),
7753 ),
7754 ],
7755 ),
7756 ]
7757 .into_iter()
7758 .collect(),
7759 ),
7760 ..Default::default()
7761 }))
7762 })
7763 .next()
7764 .await
7765 .unwrap();
7766 let mut transaction = response.await.unwrap().0;
7767 assert_eq!(transaction.len(), 2);
7768 assert_eq!(
7769 transaction
7770 .remove_entry(&buffer)
7771 .unwrap()
7772 .0
7773 .read_with(cx, |buffer, _| buffer.text()),
7774 "const THREE: usize = 1;"
7775 );
7776 assert_eq!(
7777 transaction
7778 .into_keys()
7779 .next()
7780 .unwrap()
7781 .read_with(cx, |buffer, _| buffer.text()),
7782 "const TWO: usize = one::THREE + one::THREE;"
7783 );
7784 }
7785
7786 #[gpui::test]
7787 async fn test_search(cx: &mut gpui::TestAppContext) {
7788 let fs = FakeFs::new(cx.background());
7789 fs.insert_tree(
7790 "/dir",
7791 json!({
7792 "one.rs": "const ONE: usize = 1;",
7793 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
7794 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
7795 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
7796 }),
7797 )
7798 .await;
7799 let project = Project::test(fs.clone(), ["/dir"], cx).await;
7800 assert_eq!(
7801 search(&project, SearchQuery::text("TWO", false, true), cx)
7802 .await
7803 .unwrap(),
7804 HashMap::from_iter([
7805 ("two.rs".to_string(), vec![6..9]),
7806 ("three.rs".to_string(), vec![37..40])
7807 ])
7808 );
7809
7810 let buffer_4 = project
7811 .update(cx, |project, cx| {
7812 project.open_local_buffer("/dir/four.rs", cx)
7813 })
7814 .await
7815 .unwrap();
7816 buffer_4.update(cx, |buffer, cx| {
7817 let text = "two::TWO";
7818 buffer.edit([(20..28, text), (31..43, text)], cx);
7819 });
7820
7821 assert_eq!(
7822 search(&project, SearchQuery::text("TWO", false, true), cx)
7823 .await
7824 .unwrap(),
7825 HashMap::from_iter([
7826 ("two.rs".to_string(), vec![6..9]),
7827 ("three.rs".to_string(), vec![37..40]),
7828 ("four.rs".to_string(), vec![25..28, 36..39])
7829 ])
7830 );
7831
7832 async fn search(
7833 project: &ModelHandle<Project>,
7834 query: SearchQuery,
7835 cx: &mut gpui::TestAppContext,
7836 ) -> Result<HashMap<String, Vec<Range<usize>>>> {
7837 let results = project
7838 .update(cx, |project, cx| project.search(query, cx))
7839 .await?;
7840
7841 Ok(results
7842 .into_iter()
7843 .map(|(buffer, ranges)| {
7844 buffer.read_with(cx, |buffer, _| {
7845 let path = buffer.file().unwrap().path().to_string_lossy().to_string();
7846 let ranges = ranges
7847 .into_iter()
7848 .map(|range| range.to_offset(buffer))
7849 .collect::<Vec<_>>();
7850 (path, ranges)
7851 })
7852 })
7853 .collect())
7854 }
7855 }
7856}