1pub mod fs;
2mod ignore;
3mod lsp_command;
4pub mod search;
5pub mod worktree;
6
7use anyhow::{anyhow, Context, Result};
8use client::{proto, Client, PeerId, TypedEnvelope, User, UserStore};
9use clock::ReplicaId;
10use collections::{hash_map, BTreeMap, HashMap, HashSet};
11use futures::{future::Shared, Future, FutureExt, StreamExt, TryFutureExt};
12use fuzzy::{PathMatch, PathMatchCandidate, PathMatchCandidateSet};
13use gpui::{
14 AnyModelHandle, AppContext, AsyncAppContext, Entity, ModelContext, ModelHandle,
15 MutableAppContext, Task, UpgradeModelHandle, WeakModelHandle,
16};
17use language::{
18 point_to_lsp,
19 proto::{deserialize_anchor, deserialize_version, serialize_anchor, serialize_version},
20 range_from_lsp, range_to_lsp, Anchor, Bias, Buffer, CodeAction, CodeLabel, Completion,
21 Diagnostic, DiagnosticEntry, DiagnosticSet, Event as BufferEvent, File as _, Language,
22 LanguageRegistry, LanguageServerName, LocalFile, LspAdapter, OffsetRangeExt, Operation, Patch,
23 PointUtf16, TextBufferSnapshot, ToOffset, ToPointUtf16, Transaction,
24};
25use lsp::{DiagnosticSeverity, DiagnosticTag, DocumentHighlightKind, LanguageServer};
26use lsp_command::*;
27use parking_lot::Mutex;
28use postage::stream::Stream;
29use postage::watch;
30use rand::prelude::*;
31use search::SearchQuery;
32use serde::Serialize;
33use settings::Settings;
34use sha2::{Digest, Sha256};
35use similar::{ChangeTag, TextDiff};
36use std::{
37 cell::RefCell,
38 cmp::{self, Ordering},
39 convert::TryInto,
40 ffi::OsString,
41 hash::Hash,
42 mem,
43 ops::Range,
44 os::unix::{ffi::OsStrExt, prelude::OsStringExt},
45 path::{Component, Path, PathBuf},
46 rc::Rc,
47 sync::{
48 atomic::{AtomicBool, AtomicUsize, Ordering::SeqCst},
49 Arc,
50 },
51 time::Instant,
52};
53use thiserror::Error;
54use util::{post_inc, ResultExt, TryFutureExt as _};
55
56pub use fs::*;
57pub use worktree::*;
58
59pub trait Item: Entity {
60 fn entry_id(&self, cx: &AppContext) -> Option<ProjectEntryId>;
61}
62
63#[derive(Default)]
64pub struct ProjectStore {
65 projects: Vec<WeakModelHandle<Project>>,
66}
67
68pub struct Project {
69 worktrees: Vec<WorktreeHandle>,
70 active_entry: Option<ProjectEntryId>,
71 languages: Arc<LanguageRegistry>,
72 language_servers:
73 HashMap<(WorktreeId, LanguageServerName), (Arc<dyn LspAdapter>, Arc<LanguageServer>)>,
74 started_language_servers:
75 HashMap<(WorktreeId, LanguageServerName), Task<Option<Arc<LanguageServer>>>>,
76 language_server_statuses: BTreeMap<usize, LanguageServerStatus>,
77 language_server_settings: Arc<Mutex<serde_json::Value>>,
78 last_workspace_edits_by_language_server: HashMap<usize, ProjectTransaction>,
79 next_language_server_id: usize,
80 client: Arc<client::Client>,
81 next_entry_id: Arc<AtomicUsize>,
82 next_diagnostic_group_id: usize,
83 user_store: ModelHandle<UserStore>,
84 project_store: ModelHandle<ProjectStore>,
85 fs: Arc<dyn Fs>,
86 client_state: ProjectClientState,
87 collaborators: HashMap<PeerId, Collaborator>,
88 subscriptions: Vec<client::Subscription>,
89 opened_buffer: (Rc<RefCell<watch::Sender<()>>>, watch::Receiver<()>),
90 shared_buffers: HashMap<PeerId, HashSet<u64>>,
91 loading_buffers: HashMap<
92 ProjectPath,
93 postage::watch::Receiver<Option<Result<ModelHandle<Buffer>, Arc<anyhow::Error>>>>,
94 >,
95 loading_local_worktrees:
96 HashMap<Arc<Path>, Shared<Task<Result<ModelHandle<Worktree>, Arc<anyhow::Error>>>>>,
97 opened_buffers: HashMap<u64, OpenBuffer>,
98 buffer_snapshots: HashMap<u64, Vec<(i32, TextBufferSnapshot)>>,
99 nonce: u128,
100}
101
102#[derive(Error, Debug)]
103pub enum JoinProjectError {
104 #[error("host declined join request")]
105 HostDeclined,
106 #[error("host closed the project")]
107 HostClosedProject,
108 #[error("host went offline")]
109 HostWentOffline,
110 #[error("{0}")]
111 Other(#[from] anyhow::Error),
112}
113
114enum OpenBuffer {
115 Strong(ModelHandle<Buffer>),
116 Weak(WeakModelHandle<Buffer>),
117 Loading(Vec<Operation>),
118}
119
120enum WorktreeHandle {
121 Strong(ModelHandle<Worktree>),
122 Weak(WeakModelHandle<Worktree>),
123}
124
125enum ProjectClientState {
126 Local {
127 is_shared: bool,
128 remote_id_tx: watch::Sender<Option<u64>>,
129 remote_id_rx: watch::Receiver<Option<u64>>,
130 public_tx: watch::Sender<bool>,
131 public_rx: watch::Receiver<bool>,
132 _maintain_remote_id_task: Task<Option<()>>,
133 },
134 Remote {
135 sharing_has_stopped: bool,
136 remote_id: u64,
137 replica_id: ReplicaId,
138 _detect_unshare_task: Task<Option<()>>,
139 },
140}
141
142#[derive(Clone, Debug)]
143pub struct Collaborator {
144 pub user: Arc<User>,
145 pub peer_id: PeerId,
146 pub replica_id: ReplicaId,
147}
148
149#[derive(Clone, Debug, PartialEq, Eq)]
150pub enum Event {
151 ActiveEntryChanged(Option<ProjectEntryId>),
152 WorktreeAdded,
153 WorktreeRemoved(WorktreeId),
154 DiskBasedDiagnosticsStarted,
155 DiskBasedDiagnosticsUpdated,
156 DiskBasedDiagnosticsFinished,
157 DiagnosticsUpdated(ProjectPath),
158 RemoteIdChanged(Option<u64>),
159 CollaboratorLeft(PeerId),
160 ContactRequestedJoin(Arc<User>),
161 ContactCancelledJoinRequest(Arc<User>),
162}
163
164#[derive(Serialize)]
165pub struct LanguageServerStatus {
166 pub name: String,
167 pub pending_work: BTreeMap<String, LanguageServerProgress>,
168 pub pending_diagnostic_updates: isize,
169}
170
171#[derive(Clone, Debug, Serialize)]
172pub struct LanguageServerProgress {
173 pub message: Option<String>,
174 pub percentage: Option<usize>,
175 #[serde(skip_serializing)]
176 pub last_update_at: Instant,
177}
178
179#[derive(Clone, Debug, Eq, PartialEq, Hash, PartialOrd, Ord)]
180pub struct ProjectPath {
181 pub worktree_id: WorktreeId,
182 pub path: Arc<Path>,
183}
184
185#[derive(Copy, Clone, Debug, Default, PartialEq, Serialize)]
186pub struct DiagnosticSummary {
187 pub error_count: usize,
188 pub warning_count: usize,
189}
190
191#[derive(Debug)]
192pub struct Location {
193 pub buffer: ModelHandle<Buffer>,
194 pub range: Range<language::Anchor>,
195}
196
197#[derive(Debug)]
198pub struct DocumentHighlight {
199 pub range: Range<language::Anchor>,
200 pub kind: DocumentHighlightKind,
201}
202
203#[derive(Clone, Debug)]
204pub struct Symbol {
205 pub source_worktree_id: WorktreeId,
206 pub worktree_id: WorktreeId,
207 pub language_server_name: LanguageServerName,
208 pub path: PathBuf,
209 pub label: CodeLabel,
210 pub name: String,
211 pub kind: lsp::SymbolKind,
212 pub range: Range<PointUtf16>,
213 pub signature: [u8; 32],
214}
215
216#[derive(Default)]
217pub struct ProjectTransaction(pub HashMap<ModelHandle<Buffer>, language::Transaction>);
218
219impl DiagnosticSummary {
220 fn new<'a, T: 'a>(diagnostics: impl IntoIterator<Item = &'a DiagnosticEntry<T>>) -> Self {
221 let mut this = Self {
222 error_count: 0,
223 warning_count: 0,
224 };
225
226 for entry in diagnostics {
227 if entry.diagnostic.is_primary {
228 match entry.diagnostic.severity {
229 DiagnosticSeverity::ERROR => this.error_count += 1,
230 DiagnosticSeverity::WARNING => this.warning_count += 1,
231 _ => {}
232 }
233 }
234 }
235
236 this
237 }
238
239 pub fn is_empty(&self) -> bool {
240 self.error_count == 0 && self.warning_count == 0
241 }
242
243 pub fn to_proto(&self, path: &Path) -> proto::DiagnosticSummary {
244 proto::DiagnosticSummary {
245 path: path.to_string_lossy().to_string(),
246 error_count: self.error_count as u32,
247 warning_count: self.warning_count as u32,
248 }
249 }
250}
251
252#[derive(Clone, Copy, Debug, Default, Hash, PartialEq, Eq, PartialOrd, Ord)]
253pub struct ProjectEntryId(usize);
254
255impl ProjectEntryId {
256 pub const MAX: Self = Self(usize::MAX);
257
258 pub fn new(counter: &AtomicUsize) -> Self {
259 Self(counter.fetch_add(1, SeqCst))
260 }
261
262 pub fn from_proto(id: u64) -> Self {
263 Self(id as usize)
264 }
265
266 pub fn to_proto(&self) -> u64 {
267 self.0 as u64
268 }
269
270 pub fn to_usize(&self) -> usize {
271 self.0
272 }
273}
274
275impl Project {
276 pub fn init(client: &Arc<Client>) {
277 client.add_model_message_handler(Self::handle_request_join_project);
278 client.add_model_message_handler(Self::handle_add_collaborator);
279 client.add_model_message_handler(Self::handle_buffer_reloaded);
280 client.add_model_message_handler(Self::handle_buffer_saved);
281 client.add_model_message_handler(Self::handle_start_language_server);
282 client.add_model_message_handler(Self::handle_update_language_server);
283 client.add_model_message_handler(Self::handle_remove_collaborator);
284 client.add_model_message_handler(Self::handle_join_project_request_cancelled);
285 client.add_model_message_handler(Self::handle_update_project);
286 client.add_model_message_handler(Self::handle_unregister_project);
287 client.add_model_message_handler(Self::handle_project_unshared);
288 client.add_model_message_handler(Self::handle_update_buffer_file);
289 client.add_model_message_handler(Self::handle_update_buffer);
290 client.add_model_message_handler(Self::handle_update_diagnostic_summary);
291 client.add_model_message_handler(Self::handle_update_worktree);
292 client.add_model_request_handler(Self::handle_create_project_entry);
293 client.add_model_request_handler(Self::handle_rename_project_entry);
294 client.add_model_request_handler(Self::handle_copy_project_entry);
295 client.add_model_request_handler(Self::handle_delete_project_entry);
296 client.add_model_request_handler(Self::handle_apply_additional_edits_for_completion);
297 client.add_model_request_handler(Self::handle_apply_code_action);
298 client.add_model_request_handler(Self::handle_reload_buffers);
299 client.add_model_request_handler(Self::handle_format_buffers);
300 client.add_model_request_handler(Self::handle_get_code_actions);
301 client.add_model_request_handler(Self::handle_get_completions);
302 client.add_model_request_handler(Self::handle_lsp_command::<GetDefinition>);
303 client.add_model_request_handler(Self::handle_lsp_command::<GetDocumentHighlights>);
304 client.add_model_request_handler(Self::handle_lsp_command::<GetReferences>);
305 client.add_model_request_handler(Self::handle_lsp_command::<PrepareRename>);
306 client.add_model_request_handler(Self::handle_lsp_command::<PerformRename>);
307 client.add_model_request_handler(Self::handle_search_project);
308 client.add_model_request_handler(Self::handle_get_project_symbols);
309 client.add_model_request_handler(Self::handle_open_buffer_for_symbol);
310 client.add_model_request_handler(Self::handle_open_buffer_by_id);
311 client.add_model_request_handler(Self::handle_open_buffer_by_path);
312 client.add_model_request_handler(Self::handle_save_buffer);
313 }
314
315 pub fn local(
316 public: bool,
317 client: Arc<Client>,
318 user_store: ModelHandle<UserStore>,
319 project_store: ModelHandle<ProjectStore>,
320 languages: Arc<LanguageRegistry>,
321 fs: Arc<dyn Fs>,
322 cx: &mut MutableAppContext,
323 ) -> ModelHandle<Self> {
324 cx.add_model(|cx: &mut ModelContext<Self>| {
325 let (public_tx, public_rx) = watch::channel_with(public);
326 let (remote_id_tx, remote_id_rx) = watch::channel();
327 let _maintain_remote_id_task = cx.spawn_weak({
328 let status_rx = client.clone().status();
329 let public_rx = public_rx.clone();
330 move |this, mut cx| async move {
331 let mut stream = Stream::map(status_rx.clone(), drop)
332 .merge(Stream::map(public_rx.clone(), drop));
333 while stream.recv().await.is_some() {
334 let this = this.upgrade(&cx)?;
335 if status_rx.borrow().is_connected() && *public_rx.borrow() {
336 this.update(&mut cx, |this, cx| this.register(cx))
337 .await
338 .log_err()?;
339 } else {
340 this.update(&mut cx, |this, cx| this.unregister(cx))
341 .await
342 .log_err();
343 }
344 }
345 None
346 }
347 });
348
349 let handle = cx.weak_handle();
350 project_store.update(cx, |store, cx| store.add_project(handle, cx));
351
352 let (opened_buffer_tx, opened_buffer_rx) = watch::channel();
353 Self {
354 worktrees: Default::default(),
355 collaborators: Default::default(),
356 opened_buffers: Default::default(),
357 shared_buffers: Default::default(),
358 loading_buffers: Default::default(),
359 loading_local_worktrees: Default::default(),
360 buffer_snapshots: Default::default(),
361 client_state: ProjectClientState::Local {
362 is_shared: false,
363 remote_id_tx,
364 remote_id_rx,
365 public_tx,
366 public_rx,
367 _maintain_remote_id_task,
368 },
369 opened_buffer: (Rc::new(RefCell::new(opened_buffer_tx)), opened_buffer_rx),
370 subscriptions: Vec::new(),
371 active_entry: None,
372 languages,
373 client,
374 user_store,
375 project_store,
376 fs,
377 next_entry_id: Default::default(),
378 next_diagnostic_group_id: Default::default(),
379 language_servers: Default::default(),
380 started_language_servers: Default::default(),
381 language_server_statuses: Default::default(),
382 last_workspace_edits_by_language_server: Default::default(),
383 language_server_settings: Default::default(),
384 next_language_server_id: 0,
385 nonce: StdRng::from_entropy().gen(),
386 }
387 })
388 }
389
390 pub async fn remote(
391 remote_id: u64,
392 client: Arc<Client>,
393 user_store: ModelHandle<UserStore>,
394 project_store: ModelHandle<ProjectStore>,
395 languages: Arc<LanguageRegistry>,
396 fs: Arc<dyn Fs>,
397 mut cx: AsyncAppContext,
398 ) -> Result<ModelHandle<Self>, JoinProjectError> {
399 client.authenticate_and_connect(true, &cx).await?;
400
401 let response = client
402 .request(proto::JoinProject {
403 project_id: remote_id,
404 })
405 .await?;
406
407 let response = match response.variant.ok_or_else(|| anyhow!("missing variant"))? {
408 proto::join_project_response::Variant::Accept(response) => response,
409 proto::join_project_response::Variant::Decline(decline) => {
410 match proto::join_project_response::decline::Reason::from_i32(decline.reason) {
411 Some(proto::join_project_response::decline::Reason::Declined) => {
412 Err(JoinProjectError::HostDeclined)?
413 }
414 Some(proto::join_project_response::decline::Reason::Closed) => {
415 Err(JoinProjectError::HostClosedProject)?
416 }
417 Some(proto::join_project_response::decline::Reason::WentOffline) => {
418 Err(JoinProjectError::HostWentOffline)?
419 }
420 None => Err(anyhow!("missing decline reason"))?,
421 }
422 }
423 };
424
425 let replica_id = response.replica_id as ReplicaId;
426
427 let mut worktrees = Vec::new();
428 for worktree in response.worktrees {
429 let (worktree, load_task) = cx
430 .update(|cx| Worktree::remote(remote_id, replica_id, worktree, client.clone(), cx));
431 worktrees.push(worktree);
432 load_task.detach();
433 }
434
435 let (opened_buffer_tx, opened_buffer_rx) = watch::channel();
436 let this = cx.add_model(|cx: &mut ModelContext<Self>| {
437 let handle = cx.weak_handle();
438 project_store.update(cx, |store, cx| store.add_project(handle, cx));
439
440 let mut this = Self {
441 worktrees: Vec::new(),
442 loading_buffers: Default::default(),
443 opened_buffer: (Rc::new(RefCell::new(opened_buffer_tx)), opened_buffer_rx),
444 shared_buffers: Default::default(),
445 loading_local_worktrees: Default::default(),
446 active_entry: None,
447 collaborators: Default::default(),
448 languages,
449 user_store: user_store.clone(),
450 project_store,
451 fs,
452 next_entry_id: Default::default(),
453 next_diagnostic_group_id: Default::default(),
454 subscriptions: vec![client.add_model_for_remote_entity(remote_id, cx)],
455 client: client.clone(),
456 client_state: ProjectClientState::Remote {
457 sharing_has_stopped: false,
458 remote_id,
459 replica_id,
460 _detect_unshare_task: cx.spawn_weak(move |this, mut cx| {
461 async move {
462 let mut status = client.status();
463 let is_connected =
464 status.next().await.map_or(false, |s| s.is_connected());
465 // Even if we're initially connected, any future change of the status means we momentarily disconnected.
466 if !is_connected || status.next().await.is_some() {
467 if let Some(this) = this.upgrade(&cx) {
468 this.update(&mut cx, |this, cx| this.removed_from_project(cx))
469 }
470 }
471 Ok(())
472 }
473 .log_err()
474 }),
475 },
476 language_servers: Default::default(),
477 started_language_servers: Default::default(),
478 language_server_settings: Default::default(),
479 language_server_statuses: response
480 .language_servers
481 .into_iter()
482 .map(|server| {
483 (
484 server.id as usize,
485 LanguageServerStatus {
486 name: server.name,
487 pending_work: Default::default(),
488 pending_diagnostic_updates: 0,
489 },
490 )
491 })
492 .collect(),
493 last_workspace_edits_by_language_server: Default::default(),
494 next_language_server_id: 0,
495 opened_buffers: Default::default(),
496 buffer_snapshots: Default::default(),
497 nonce: StdRng::from_entropy().gen(),
498 };
499 for worktree in worktrees {
500 this.add_worktree(&worktree, cx);
501 }
502 this
503 });
504
505 let user_ids = response
506 .collaborators
507 .iter()
508 .map(|peer| peer.user_id)
509 .collect();
510 user_store
511 .update(&mut cx, |user_store, cx| user_store.get_users(user_ids, cx))
512 .await?;
513 let mut collaborators = HashMap::default();
514 for message in response.collaborators {
515 let collaborator = Collaborator::from_proto(message, &user_store, &mut cx).await?;
516 collaborators.insert(collaborator.peer_id, collaborator);
517 }
518
519 this.update(&mut cx, |this, _| {
520 this.collaborators = collaborators;
521 });
522
523 Ok(this)
524 }
525
526 #[cfg(any(test, feature = "test-support"))]
527 pub async fn test(
528 fs: Arc<dyn Fs>,
529 root_paths: impl IntoIterator<Item = &Path>,
530 cx: &mut gpui::TestAppContext,
531 ) -> ModelHandle<Project> {
532 let languages = Arc::new(LanguageRegistry::test());
533 let http_client = client::test::FakeHttpClient::with_404_response();
534 let client = client::Client::new(http_client.clone());
535 let user_store = cx.add_model(|cx| UserStore::new(client.clone(), http_client, cx));
536 let project_store = cx.add_model(|_| ProjectStore::default());
537 let project = cx.update(|cx| {
538 Project::local(true, client, user_store, project_store, languages, fs, cx)
539 });
540 for path in root_paths {
541 let (tree, _) = project
542 .update(cx, |project, cx| {
543 project.find_or_create_local_worktree(path, true, cx)
544 })
545 .await
546 .unwrap();
547 tree.read_with(cx, |tree, _| tree.as_local().unwrap().scan_complete())
548 .await;
549 }
550 project
551 }
552
553 pub fn buffer_for_id(&self, remote_id: u64, cx: &AppContext) -> Option<ModelHandle<Buffer>> {
554 self.opened_buffers
555 .get(&remote_id)
556 .and_then(|buffer| buffer.upgrade(cx))
557 }
558
559 pub fn languages(&self) -> &Arc<LanguageRegistry> {
560 &self.languages
561 }
562
563 pub fn client(&self) -> Arc<Client> {
564 self.client.clone()
565 }
566
567 pub fn user_store(&self) -> ModelHandle<UserStore> {
568 self.user_store.clone()
569 }
570
571 #[cfg(any(test, feature = "test-support"))]
572 pub fn check_invariants(&self, cx: &AppContext) {
573 if self.is_local() {
574 let mut worktree_root_paths = HashMap::default();
575 for worktree in self.worktrees(cx) {
576 let worktree = worktree.read(cx);
577 let abs_path = worktree.as_local().unwrap().abs_path().clone();
578 let prev_worktree_id = worktree_root_paths.insert(abs_path.clone(), worktree.id());
579 assert_eq!(
580 prev_worktree_id,
581 None,
582 "abs path {:?} for worktree {:?} is not unique ({:?} was already registered with the same path)",
583 abs_path,
584 worktree.id(),
585 prev_worktree_id
586 )
587 }
588 } else {
589 let replica_id = self.replica_id();
590 for buffer in self.opened_buffers.values() {
591 if let Some(buffer) = buffer.upgrade(cx) {
592 let buffer = buffer.read(cx);
593 assert_eq!(
594 buffer.deferred_ops_len(),
595 0,
596 "replica {}, buffer {} has deferred operations",
597 replica_id,
598 buffer.remote_id()
599 );
600 }
601 }
602 }
603 }
604
605 #[cfg(any(test, feature = "test-support"))]
606 pub fn has_open_buffer(&self, path: impl Into<ProjectPath>, cx: &AppContext) -> bool {
607 let path = path.into();
608 if let Some(worktree) = self.worktree_for_id(path.worktree_id, cx) {
609 self.opened_buffers.iter().any(|(_, buffer)| {
610 if let Some(buffer) = buffer.upgrade(cx) {
611 if let Some(file) = File::from_dyn(buffer.read(cx).file()) {
612 if file.worktree == worktree && file.path() == &path.path {
613 return true;
614 }
615 }
616 }
617 false
618 })
619 } else {
620 false
621 }
622 }
623
624 pub fn fs(&self) -> &Arc<dyn Fs> {
625 &self.fs
626 }
627
628 pub fn set_public(&mut self, is_public: bool, cx: &mut ModelContext<Self>) {
629 if let ProjectClientState::Local { public_tx, .. } = &mut self.client_state {
630 *public_tx.borrow_mut() = is_public;
631 self.metadata_changed(cx);
632 }
633 }
634
635 pub fn is_public(&self) -> bool {
636 match &self.client_state {
637 ProjectClientState::Local { public_rx, .. } => *public_rx.borrow(),
638 ProjectClientState::Remote { .. } => true,
639 }
640 }
641
642 fn unregister(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
643 self.unshared(cx);
644 if let ProjectClientState::Local { remote_id_rx, .. } = &mut self.client_state {
645 if let Some(remote_id) = *remote_id_rx.borrow() {
646 let request = self.client.request(proto::UnregisterProject {
647 project_id: remote_id,
648 });
649 return cx.spawn(|this, mut cx| async move {
650 let response = request.await;
651 this.update(&mut cx, |this, cx| {
652 if let ProjectClientState::Local { remote_id_tx, .. } =
653 &mut this.client_state
654 {
655 *remote_id_tx.borrow_mut() = None;
656 }
657 this.subscriptions.clear();
658 this.metadata_changed(cx);
659 });
660 response.map(drop)
661 });
662 }
663 }
664 Task::ready(Ok(()))
665 }
666
667 fn register(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
668 if let ProjectClientState::Local { remote_id_rx, .. } = &self.client_state {
669 if remote_id_rx.borrow().is_some() {
670 return Task::ready(Ok(()));
671 }
672 }
673
674 let response = self.client.request(proto::RegisterProject {});
675 cx.spawn(|this, mut cx| async move {
676 let remote_id = response.await?.project_id;
677 this.update(&mut cx, |this, cx| {
678 if let ProjectClientState::Local { remote_id_tx, .. } = &mut this.client_state {
679 *remote_id_tx.borrow_mut() = Some(remote_id);
680 }
681
682 this.metadata_changed(cx);
683 cx.emit(Event::RemoteIdChanged(Some(remote_id)));
684 this.subscriptions
685 .push(this.client.add_model_for_remote_entity(remote_id, cx));
686 Ok(())
687 })
688 })
689 }
690
691 pub fn remote_id(&self) -> Option<u64> {
692 match &self.client_state {
693 ProjectClientState::Local { remote_id_rx, .. } => *remote_id_rx.borrow(),
694 ProjectClientState::Remote { remote_id, .. } => Some(*remote_id),
695 }
696 }
697
698 pub fn next_remote_id(&self) -> impl Future<Output = u64> {
699 let mut id = None;
700 let mut watch = None;
701 match &self.client_state {
702 ProjectClientState::Local { remote_id_rx, .. } => watch = Some(remote_id_rx.clone()),
703 ProjectClientState::Remote { remote_id, .. } => id = Some(*remote_id),
704 }
705
706 async move {
707 if let Some(id) = id {
708 return id;
709 }
710 let mut watch = watch.unwrap();
711 loop {
712 let id = *watch.borrow();
713 if let Some(id) = id {
714 return id;
715 }
716 watch.next().await;
717 }
718 }
719 }
720
721 pub fn shared_remote_id(&self) -> Option<u64> {
722 match &self.client_state {
723 ProjectClientState::Local {
724 remote_id_rx,
725 is_shared,
726 ..
727 } => {
728 if *is_shared {
729 *remote_id_rx.borrow()
730 } else {
731 None
732 }
733 }
734 ProjectClientState::Remote { remote_id, .. } => Some(*remote_id),
735 }
736 }
737
738 pub fn replica_id(&self) -> ReplicaId {
739 match &self.client_state {
740 ProjectClientState::Local { .. } => 0,
741 ProjectClientState::Remote { replica_id, .. } => *replica_id,
742 }
743 }
744
745 fn metadata_changed(&mut self, cx: &mut ModelContext<Self>) {
746 cx.notify();
747 self.project_store.update(cx, |_, cx| cx.notify());
748
749 if let ProjectClientState::Local { remote_id_rx, .. } = &self.client_state {
750 if let Some(project_id) = *remote_id_rx.borrow() {
751 self.client
752 .send(proto::UpdateProject {
753 project_id,
754 worktrees: self
755 .worktrees
756 .iter()
757 .filter_map(|worktree| {
758 worktree.upgrade(&cx).map(|worktree| {
759 worktree.read(cx).as_local().unwrap().metadata_proto()
760 })
761 })
762 .collect(),
763 })
764 .log_err();
765 }
766 }
767 }
768
769 pub fn collaborators(&self) -> &HashMap<PeerId, Collaborator> {
770 &self.collaborators
771 }
772
773 pub fn worktrees<'a>(
774 &'a self,
775 cx: &'a AppContext,
776 ) -> impl 'a + Iterator<Item = ModelHandle<Worktree>> {
777 self.worktrees
778 .iter()
779 .filter_map(move |worktree| worktree.upgrade(cx))
780 }
781
782 pub fn visible_worktrees<'a>(
783 &'a self,
784 cx: &'a AppContext,
785 ) -> impl 'a + Iterator<Item = ModelHandle<Worktree>> {
786 self.worktrees.iter().filter_map(|worktree| {
787 worktree.upgrade(cx).and_then(|worktree| {
788 if worktree.read(cx).is_visible() {
789 Some(worktree)
790 } else {
791 None
792 }
793 })
794 })
795 }
796
797 pub fn worktree_root_names<'a>(&'a self, cx: &'a AppContext) -> impl Iterator<Item = &'a str> {
798 self.visible_worktrees(cx)
799 .map(|tree| tree.read(cx).root_name())
800 }
801
802 pub fn worktree_for_id(
803 &self,
804 id: WorktreeId,
805 cx: &AppContext,
806 ) -> Option<ModelHandle<Worktree>> {
807 self.worktrees(cx)
808 .find(|worktree| worktree.read(cx).id() == id)
809 }
810
811 pub fn worktree_for_entry(
812 &self,
813 entry_id: ProjectEntryId,
814 cx: &AppContext,
815 ) -> Option<ModelHandle<Worktree>> {
816 self.worktrees(cx)
817 .find(|worktree| worktree.read(cx).contains_entry(entry_id))
818 }
819
820 pub fn worktree_id_for_entry(
821 &self,
822 entry_id: ProjectEntryId,
823 cx: &AppContext,
824 ) -> Option<WorktreeId> {
825 self.worktree_for_entry(entry_id, cx)
826 .map(|worktree| worktree.read(cx).id())
827 }
828
829 pub fn contains_paths(&self, paths: &[PathBuf], cx: &AppContext) -> bool {
830 paths.iter().all(|path| self.contains_path(&path, cx))
831 }
832
833 pub fn contains_path(&self, path: &Path, cx: &AppContext) -> bool {
834 for worktree in self.worktrees(cx) {
835 let worktree = worktree.read(cx).as_local();
836 if worktree.map_or(false, |w| w.contains_abs_path(path)) {
837 return true;
838 }
839 }
840 false
841 }
842
843 pub fn create_entry(
844 &mut self,
845 project_path: impl Into<ProjectPath>,
846 is_directory: bool,
847 cx: &mut ModelContext<Self>,
848 ) -> Option<Task<Result<Entry>>> {
849 let project_path = project_path.into();
850 let worktree = self.worktree_for_id(project_path.worktree_id, cx)?;
851 if self.is_local() {
852 Some(worktree.update(cx, |worktree, cx| {
853 worktree
854 .as_local_mut()
855 .unwrap()
856 .create_entry(project_path.path, is_directory, cx)
857 }))
858 } else {
859 let client = self.client.clone();
860 let project_id = self.remote_id().unwrap();
861 Some(cx.spawn_weak(|_, mut cx| async move {
862 let response = client
863 .request(proto::CreateProjectEntry {
864 worktree_id: project_path.worktree_id.to_proto(),
865 project_id,
866 path: project_path.path.as_os_str().as_bytes().to_vec(),
867 is_directory,
868 })
869 .await?;
870 let entry = response
871 .entry
872 .ok_or_else(|| anyhow!("missing entry in response"))?;
873 worktree
874 .update(&mut cx, |worktree, cx| {
875 worktree.as_remote().unwrap().insert_entry(
876 entry,
877 response.worktree_scan_id as usize,
878 cx,
879 )
880 })
881 .await
882 }))
883 }
884 }
885
886 pub fn copy_entry(
887 &mut self,
888 entry_id: ProjectEntryId,
889 new_path: impl Into<Arc<Path>>,
890 cx: &mut ModelContext<Self>,
891 ) -> Option<Task<Result<Entry>>> {
892 let worktree = self.worktree_for_entry(entry_id, cx)?;
893 let new_path = new_path.into();
894 if self.is_local() {
895 worktree.update(cx, |worktree, cx| {
896 worktree
897 .as_local_mut()
898 .unwrap()
899 .copy_entry(entry_id, new_path, cx)
900 })
901 } else {
902 let client = self.client.clone();
903 let project_id = self.remote_id().unwrap();
904
905 Some(cx.spawn_weak(|_, mut cx| async move {
906 let response = client
907 .request(proto::CopyProjectEntry {
908 project_id,
909 entry_id: entry_id.to_proto(),
910 new_path: new_path.as_os_str().as_bytes().to_vec(),
911 })
912 .await?;
913 let entry = response
914 .entry
915 .ok_or_else(|| anyhow!("missing entry in response"))?;
916 worktree
917 .update(&mut cx, |worktree, cx| {
918 worktree.as_remote().unwrap().insert_entry(
919 entry,
920 response.worktree_scan_id as usize,
921 cx,
922 )
923 })
924 .await
925 }))
926 }
927 }
928
929 pub fn rename_entry(
930 &mut self,
931 entry_id: ProjectEntryId,
932 new_path: impl Into<Arc<Path>>,
933 cx: &mut ModelContext<Self>,
934 ) -> Option<Task<Result<Entry>>> {
935 let worktree = self.worktree_for_entry(entry_id, cx)?;
936 let new_path = new_path.into();
937 if self.is_local() {
938 worktree.update(cx, |worktree, cx| {
939 worktree
940 .as_local_mut()
941 .unwrap()
942 .rename_entry(entry_id, new_path, cx)
943 })
944 } else {
945 let client = self.client.clone();
946 let project_id = self.remote_id().unwrap();
947
948 Some(cx.spawn_weak(|_, mut cx| async move {
949 let response = client
950 .request(proto::RenameProjectEntry {
951 project_id,
952 entry_id: entry_id.to_proto(),
953 new_path: new_path.as_os_str().as_bytes().to_vec(),
954 })
955 .await?;
956 let entry = response
957 .entry
958 .ok_or_else(|| anyhow!("missing entry in response"))?;
959 worktree
960 .update(&mut cx, |worktree, cx| {
961 worktree.as_remote().unwrap().insert_entry(
962 entry,
963 response.worktree_scan_id as usize,
964 cx,
965 )
966 })
967 .await
968 }))
969 }
970 }
971
972 pub fn delete_entry(
973 &mut self,
974 entry_id: ProjectEntryId,
975 cx: &mut ModelContext<Self>,
976 ) -> Option<Task<Result<()>>> {
977 let worktree = self.worktree_for_entry(entry_id, cx)?;
978 if self.is_local() {
979 worktree.update(cx, |worktree, cx| {
980 worktree.as_local_mut().unwrap().delete_entry(entry_id, cx)
981 })
982 } else {
983 let client = self.client.clone();
984 let project_id = self.remote_id().unwrap();
985 Some(cx.spawn_weak(|_, mut cx| async move {
986 let response = client
987 .request(proto::DeleteProjectEntry {
988 project_id,
989 entry_id: entry_id.to_proto(),
990 })
991 .await?;
992 worktree
993 .update(&mut cx, move |worktree, cx| {
994 worktree.as_remote().unwrap().delete_entry(
995 entry_id,
996 response.worktree_scan_id as usize,
997 cx,
998 )
999 })
1000 .await
1001 }))
1002 }
1003 }
1004
1005 fn share(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
1006 let project_id;
1007 if let ProjectClientState::Local {
1008 remote_id_rx,
1009 is_shared,
1010 ..
1011 } = &mut self.client_state
1012 {
1013 if *is_shared {
1014 return Task::ready(Ok(()));
1015 }
1016 *is_shared = true;
1017 if let Some(id) = *remote_id_rx.borrow() {
1018 project_id = id;
1019 } else {
1020 return Task::ready(Err(anyhow!("project hasn't been registered")));
1021 }
1022 } else {
1023 return Task::ready(Err(anyhow!("can't share a remote project")));
1024 };
1025
1026 for open_buffer in self.opened_buffers.values_mut() {
1027 match open_buffer {
1028 OpenBuffer::Strong(_) => {}
1029 OpenBuffer::Weak(buffer) => {
1030 if let Some(buffer) = buffer.upgrade(cx) {
1031 *open_buffer = OpenBuffer::Strong(buffer);
1032 }
1033 }
1034 OpenBuffer::Loading(_) => unreachable!(),
1035 }
1036 }
1037
1038 for worktree_handle in self.worktrees.iter_mut() {
1039 match worktree_handle {
1040 WorktreeHandle::Strong(_) => {}
1041 WorktreeHandle::Weak(worktree) => {
1042 if let Some(worktree) = worktree.upgrade(cx) {
1043 *worktree_handle = WorktreeHandle::Strong(worktree);
1044 }
1045 }
1046 }
1047 }
1048
1049 let mut tasks = Vec::new();
1050 for worktree in self.worktrees(cx).collect::<Vec<_>>() {
1051 worktree.update(cx, |worktree, cx| {
1052 let worktree = worktree.as_local_mut().unwrap();
1053 tasks.push(worktree.share(project_id, cx));
1054 });
1055 }
1056
1057 cx.spawn(|this, mut cx| async move {
1058 for task in tasks {
1059 task.await?;
1060 }
1061 this.update(&mut cx, |_, cx| cx.notify());
1062 Ok(())
1063 })
1064 }
1065
1066 fn unshared(&mut self, cx: &mut ModelContext<Self>) {
1067 if let ProjectClientState::Local { is_shared, .. } = &mut self.client_state {
1068 if !*is_shared {
1069 return;
1070 }
1071
1072 *is_shared = false;
1073 self.collaborators.clear();
1074 self.shared_buffers.clear();
1075 for worktree_handle in self.worktrees.iter_mut() {
1076 if let WorktreeHandle::Strong(worktree) = worktree_handle {
1077 let is_visible = worktree.update(cx, |worktree, _| {
1078 worktree.as_local_mut().unwrap().unshare();
1079 worktree.is_visible()
1080 });
1081 if !is_visible {
1082 *worktree_handle = WorktreeHandle::Weak(worktree.downgrade());
1083 }
1084 }
1085 }
1086
1087 for open_buffer in self.opened_buffers.values_mut() {
1088 match open_buffer {
1089 OpenBuffer::Strong(buffer) => {
1090 *open_buffer = OpenBuffer::Weak(buffer.downgrade());
1091 }
1092 _ => {}
1093 }
1094 }
1095
1096 cx.notify();
1097 } else {
1098 log::error!("attempted to unshare a remote project");
1099 }
1100 }
1101
1102 pub fn respond_to_join_request(
1103 &mut self,
1104 requester_id: u64,
1105 allow: bool,
1106 cx: &mut ModelContext<Self>,
1107 ) {
1108 if let Some(project_id) = self.remote_id() {
1109 let share = self.share(cx);
1110 let client = self.client.clone();
1111 cx.foreground()
1112 .spawn(async move {
1113 share.await?;
1114 client.send(proto::RespondToJoinProjectRequest {
1115 requester_id,
1116 project_id,
1117 allow,
1118 })
1119 })
1120 .detach_and_log_err(cx);
1121 }
1122 }
1123
1124 fn removed_from_project(&mut self, cx: &mut ModelContext<Self>) {
1125 if let ProjectClientState::Remote {
1126 sharing_has_stopped,
1127 ..
1128 } = &mut self.client_state
1129 {
1130 *sharing_has_stopped = true;
1131 self.collaborators.clear();
1132 cx.notify();
1133 }
1134 }
1135
1136 pub fn is_read_only(&self) -> bool {
1137 match &self.client_state {
1138 ProjectClientState::Local { .. } => false,
1139 ProjectClientState::Remote {
1140 sharing_has_stopped,
1141 ..
1142 } => *sharing_has_stopped,
1143 }
1144 }
1145
1146 pub fn is_local(&self) -> bool {
1147 match &self.client_state {
1148 ProjectClientState::Local { .. } => true,
1149 ProjectClientState::Remote { .. } => false,
1150 }
1151 }
1152
1153 pub fn is_remote(&self) -> bool {
1154 !self.is_local()
1155 }
1156
1157 pub fn create_buffer(
1158 &mut self,
1159 text: &str,
1160 language: Option<Arc<Language>>,
1161 cx: &mut ModelContext<Self>,
1162 ) -> Result<ModelHandle<Buffer>> {
1163 if self.is_remote() {
1164 return Err(anyhow!("creating buffers as a guest is not supported yet"));
1165 }
1166
1167 let buffer = cx.add_model(|cx| {
1168 Buffer::new(self.replica_id(), text, cx)
1169 .with_language(language.unwrap_or(language::PLAIN_TEXT.clone()), cx)
1170 });
1171 self.register_buffer(&buffer, cx)?;
1172 Ok(buffer)
1173 }
1174
1175 pub fn open_path(
1176 &mut self,
1177 path: impl Into<ProjectPath>,
1178 cx: &mut ModelContext<Self>,
1179 ) -> Task<Result<(ProjectEntryId, AnyModelHandle)>> {
1180 let task = self.open_buffer(path, cx);
1181 cx.spawn_weak(|_, cx| async move {
1182 let buffer = task.await?;
1183 let project_entry_id = buffer
1184 .read_with(&cx, |buffer, cx| {
1185 File::from_dyn(buffer.file()).and_then(|file| file.project_entry_id(cx))
1186 })
1187 .ok_or_else(|| anyhow!("no project entry"))?;
1188 Ok((project_entry_id, buffer.into()))
1189 })
1190 }
1191
1192 pub fn open_local_buffer(
1193 &mut self,
1194 abs_path: impl AsRef<Path>,
1195 cx: &mut ModelContext<Self>,
1196 ) -> Task<Result<ModelHandle<Buffer>>> {
1197 if let Some((worktree, relative_path)) = self.find_local_worktree(abs_path.as_ref(), cx) {
1198 self.open_buffer((worktree.read(cx).id(), relative_path), cx)
1199 } else {
1200 Task::ready(Err(anyhow!("no such path")))
1201 }
1202 }
1203
1204 pub fn open_buffer(
1205 &mut self,
1206 path: impl Into<ProjectPath>,
1207 cx: &mut ModelContext<Self>,
1208 ) -> Task<Result<ModelHandle<Buffer>>> {
1209 let project_path = path.into();
1210 let worktree = if let Some(worktree) = self.worktree_for_id(project_path.worktree_id, cx) {
1211 worktree
1212 } else {
1213 return Task::ready(Err(anyhow!("no such worktree")));
1214 };
1215
1216 // If there is already a buffer for the given path, then return it.
1217 let existing_buffer = self.get_open_buffer(&project_path, cx);
1218 if let Some(existing_buffer) = existing_buffer {
1219 return Task::ready(Ok(existing_buffer));
1220 }
1221
1222 let mut loading_watch = match self.loading_buffers.entry(project_path.clone()) {
1223 // If the given path is already being loaded, then wait for that existing
1224 // task to complete and return the same buffer.
1225 hash_map::Entry::Occupied(e) => e.get().clone(),
1226
1227 // Otherwise, record the fact that this path is now being loaded.
1228 hash_map::Entry::Vacant(entry) => {
1229 let (mut tx, rx) = postage::watch::channel();
1230 entry.insert(rx.clone());
1231
1232 let load_buffer = if worktree.read(cx).is_local() {
1233 self.open_local_buffer_internal(&project_path.path, &worktree, cx)
1234 } else {
1235 self.open_remote_buffer_internal(&project_path.path, &worktree, cx)
1236 };
1237
1238 cx.spawn(move |this, mut cx| async move {
1239 let load_result = load_buffer.await;
1240 *tx.borrow_mut() = Some(this.update(&mut cx, |this, _| {
1241 // Record the fact that the buffer is no longer loading.
1242 this.loading_buffers.remove(&project_path);
1243 let buffer = load_result.map_err(Arc::new)?;
1244 Ok(buffer)
1245 }));
1246 })
1247 .detach();
1248 rx
1249 }
1250 };
1251
1252 cx.foreground().spawn(async move {
1253 loop {
1254 if let Some(result) = loading_watch.borrow().as_ref() {
1255 match result {
1256 Ok(buffer) => return Ok(buffer.clone()),
1257 Err(error) => return Err(anyhow!("{}", error)),
1258 }
1259 }
1260 loading_watch.next().await;
1261 }
1262 })
1263 }
1264
1265 fn open_local_buffer_internal(
1266 &mut self,
1267 path: &Arc<Path>,
1268 worktree: &ModelHandle<Worktree>,
1269 cx: &mut ModelContext<Self>,
1270 ) -> Task<Result<ModelHandle<Buffer>>> {
1271 let load_buffer = worktree.update(cx, |worktree, cx| {
1272 let worktree = worktree.as_local_mut().unwrap();
1273 worktree.load_buffer(path, cx)
1274 });
1275 cx.spawn(|this, mut cx| async move {
1276 let buffer = load_buffer.await?;
1277 this.update(&mut cx, |this, cx| this.register_buffer(&buffer, cx))?;
1278 Ok(buffer)
1279 })
1280 }
1281
1282 fn open_remote_buffer_internal(
1283 &mut self,
1284 path: &Arc<Path>,
1285 worktree: &ModelHandle<Worktree>,
1286 cx: &mut ModelContext<Self>,
1287 ) -> Task<Result<ModelHandle<Buffer>>> {
1288 let rpc = self.client.clone();
1289 let project_id = self.remote_id().unwrap();
1290 let remote_worktree_id = worktree.read(cx).id();
1291 let path = path.clone();
1292 let path_string = path.to_string_lossy().to_string();
1293 cx.spawn(|this, mut cx| async move {
1294 let response = rpc
1295 .request(proto::OpenBufferByPath {
1296 project_id,
1297 worktree_id: remote_worktree_id.to_proto(),
1298 path: path_string,
1299 })
1300 .await?;
1301 let buffer = response.buffer.ok_or_else(|| anyhow!("missing buffer"))?;
1302 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
1303 .await
1304 })
1305 }
1306
1307 fn open_local_buffer_via_lsp(
1308 &mut self,
1309 abs_path: lsp::Url,
1310 lsp_adapter: Arc<dyn LspAdapter>,
1311 lsp_server: Arc<LanguageServer>,
1312 cx: &mut ModelContext<Self>,
1313 ) -> Task<Result<ModelHandle<Buffer>>> {
1314 cx.spawn(|this, mut cx| async move {
1315 let abs_path = abs_path
1316 .to_file_path()
1317 .map_err(|_| anyhow!("can't convert URI to path"))?;
1318 let (worktree, relative_path) = if let Some(result) =
1319 this.read_with(&cx, |this, cx| this.find_local_worktree(&abs_path, cx))
1320 {
1321 result
1322 } else {
1323 let worktree = this
1324 .update(&mut cx, |this, cx| {
1325 this.create_local_worktree(&abs_path, false, cx)
1326 })
1327 .await?;
1328 this.update(&mut cx, |this, cx| {
1329 this.language_servers.insert(
1330 (worktree.read(cx).id(), lsp_adapter.name()),
1331 (lsp_adapter, lsp_server),
1332 );
1333 });
1334 (worktree, PathBuf::new())
1335 };
1336
1337 let project_path = ProjectPath {
1338 worktree_id: worktree.read_with(&cx, |worktree, _| worktree.id()),
1339 path: relative_path.into(),
1340 };
1341 this.update(&mut cx, |this, cx| this.open_buffer(project_path, cx))
1342 .await
1343 })
1344 }
1345
1346 pub fn open_buffer_by_id(
1347 &mut self,
1348 id: u64,
1349 cx: &mut ModelContext<Self>,
1350 ) -> Task<Result<ModelHandle<Buffer>>> {
1351 if let Some(buffer) = self.buffer_for_id(id, cx) {
1352 Task::ready(Ok(buffer))
1353 } else if self.is_local() {
1354 Task::ready(Err(anyhow!("buffer {} does not exist", id)))
1355 } else if let Some(project_id) = self.remote_id() {
1356 let request = self
1357 .client
1358 .request(proto::OpenBufferById { project_id, id });
1359 cx.spawn(|this, mut cx| async move {
1360 let buffer = request
1361 .await?
1362 .buffer
1363 .ok_or_else(|| anyhow!("invalid buffer"))?;
1364 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
1365 .await
1366 })
1367 } else {
1368 Task::ready(Err(anyhow!("cannot open buffer while disconnected")))
1369 }
1370 }
1371
1372 pub fn save_buffer_as(
1373 &mut self,
1374 buffer: ModelHandle<Buffer>,
1375 abs_path: PathBuf,
1376 cx: &mut ModelContext<Project>,
1377 ) -> Task<Result<()>> {
1378 let worktree_task = self.find_or_create_local_worktree(&abs_path, true, cx);
1379 let old_path =
1380 File::from_dyn(buffer.read(cx).file()).and_then(|f| Some(f.as_local()?.abs_path(cx)));
1381 cx.spawn(|this, mut cx| async move {
1382 if let Some(old_path) = old_path {
1383 this.update(&mut cx, |this, cx| {
1384 this.unregister_buffer_from_language_server(&buffer, old_path, cx);
1385 });
1386 }
1387 let (worktree, path) = worktree_task.await?;
1388 worktree
1389 .update(&mut cx, |worktree, cx| {
1390 worktree
1391 .as_local_mut()
1392 .unwrap()
1393 .save_buffer_as(buffer.clone(), path, cx)
1394 })
1395 .await?;
1396 this.update(&mut cx, |this, cx| {
1397 this.assign_language_to_buffer(&buffer, cx);
1398 this.register_buffer_with_language_server(&buffer, cx);
1399 });
1400 Ok(())
1401 })
1402 }
1403
1404 pub fn get_open_buffer(
1405 &mut self,
1406 path: &ProjectPath,
1407 cx: &mut ModelContext<Self>,
1408 ) -> Option<ModelHandle<Buffer>> {
1409 let worktree = self.worktree_for_id(path.worktree_id, cx)?;
1410 self.opened_buffers.values().find_map(|buffer| {
1411 let buffer = buffer.upgrade(cx)?;
1412 let file = File::from_dyn(buffer.read(cx).file())?;
1413 if file.worktree == worktree && file.path() == &path.path {
1414 Some(buffer)
1415 } else {
1416 None
1417 }
1418 })
1419 }
1420
1421 fn register_buffer(
1422 &mut self,
1423 buffer: &ModelHandle<Buffer>,
1424 cx: &mut ModelContext<Self>,
1425 ) -> Result<()> {
1426 let remote_id = buffer.read(cx).remote_id();
1427 let open_buffer = if self.is_remote() || self.is_shared() {
1428 OpenBuffer::Strong(buffer.clone())
1429 } else {
1430 OpenBuffer::Weak(buffer.downgrade())
1431 };
1432
1433 match self.opened_buffers.insert(remote_id, open_buffer) {
1434 None => {}
1435 Some(OpenBuffer::Loading(operations)) => {
1436 buffer.update(cx, |buffer, cx| buffer.apply_ops(operations, cx))?
1437 }
1438 Some(OpenBuffer::Weak(existing_handle)) => {
1439 if existing_handle.upgrade(cx).is_some() {
1440 Err(anyhow!(
1441 "already registered buffer with remote id {}",
1442 remote_id
1443 ))?
1444 }
1445 }
1446 Some(OpenBuffer::Strong(_)) => Err(anyhow!(
1447 "already registered buffer with remote id {}",
1448 remote_id
1449 ))?,
1450 }
1451 cx.subscribe(buffer, |this, buffer, event, cx| {
1452 this.on_buffer_event(buffer, event, cx);
1453 })
1454 .detach();
1455
1456 self.assign_language_to_buffer(buffer, cx);
1457 self.register_buffer_with_language_server(buffer, cx);
1458 cx.observe_release(buffer, |this, buffer, cx| {
1459 if let Some(file) = File::from_dyn(buffer.file()) {
1460 if file.is_local() {
1461 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
1462 if let Some((_, server)) = this.language_server_for_buffer(buffer, cx) {
1463 server
1464 .notify::<lsp::notification::DidCloseTextDocument>(
1465 lsp::DidCloseTextDocumentParams {
1466 text_document: lsp::TextDocumentIdentifier::new(uri.clone()),
1467 },
1468 )
1469 .log_err();
1470 }
1471 }
1472 }
1473 })
1474 .detach();
1475
1476 Ok(())
1477 }
1478
1479 fn register_buffer_with_language_server(
1480 &mut self,
1481 buffer_handle: &ModelHandle<Buffer>,
1482 cx: &mut ModelContext<Self>,
1483 ) {
1484 let buffer = buffer_handle.read(cx);
1485 let buffer_id = buffer.remote_id();
1486 if let Some(file) = File::from_dyn(buffer.file()) {
1487 if file.is_local() {
1488 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
1489 let initial_snapshot = buffer.text_snapshot();
1490
1491 let mut language_server = None;
1492 let mut language_id = None;
1493 if let Some(language) = buffer.language() {
1494 let worktree_id = file.worktree_id(cx);
1495 if let Some(adapter) = language.lsp_adapter() {
1496 language_id = adapter.id_for_language(language.name().as_ref());
1497 language_server = self
1498 .language_servers
1499 .get(&(worktree_id, adapter.name()))
1500 .cloned();
1501 }
1502 }
1503
1504 if let Some(local_worktree) = file.worktree.read(cx).as_local() {
1505 if let Some(diagnostics) = local_worktree.diagnostics_for_path(file.path()) {
1506 self.update_buffer_diagnostics(&buffer_handle, diagnostics, None, cx)
1507 .log_err();
1508 }
1509 }
1510
1511 if let Some((_, server)) = language_server {
1512 server
1513 .notify::<lsp::notification::DidOpenTextDocument>(
1514 lsp::DidOpenTextDocumentParams {
1515 text_document: lsp::TextDocumentItem::new(
1516 uri,
1517 language_id.unwrap_or_default(),
1518 0,
1519 initial_snapshot.text(),
1520 ),
1521 }
1522 .clone(),
1523 )
1524 .log_err();
1525 buffer_handle.update(cx, |buffer, cx| {
1526 buffer.set_completion_triggers(
1527 server
1528 .capabilities()
1529 .completion_provider
1530 .as_ref()
1531 .and_then(|provider| provider.trigger_characters.clone())
1532 .unwrap_or(Vec::new()),
1533 cx,
1534 )
1535 });
1536 self.buffer_snapshots
1537 .insert(buffer_id, vec![(0, initial_snapshot)]);
1538 }
1539 }
1540 }
1541 }
1542
1543 fn unregister_buffer_from_language_server(
1544 &mut self,
1545 buffer: &ModelHandle<Buffer>,
1546 old_path: PathBuf,
1547 cx: &mut ModelContext<Self>,
1548 ) {
1549 buffer.update(cx, |buffer, cx| {
1550 buffer.update_diagnostics(Default::default(), cx);
1551 self.buffer_snapshots.remove(&buffer.remote_id());
1552 if let Some((_, language_server)) = self.language_server_for_buffer(buffer, cx) {
1553 language_server
1554 .notify::<lsp::notification::DidCloseTextDocument>(
1555 lsp::DidCloseTextDocumentParams {
1556 text_document: lsp::TextDocumentIdentifier::new(
1557 lsp::Url::from_file_path(old_path).unwrap(),
1558 ),
1559 },
1560 )
1561 .log_err();
1562 }
1563 });
1564 }
1565
1566 fn on_buffer_event(
1567 &mut self,
1568 buffer: ModelHandle<Buffer>,
1569 event: &BufferEvent,
1570 cx: &mut ModelContext<Self>,
1571 ) -> Option<()> {
1572 match event {
1573 BufferEvent::Operation(operation) => {
1574 if let Some(project_id) = self.shared_remote_id() {
1575 let request = self.client.request(proto::UpdateBuffer {
1576 project_id,
1577 buffer_id: buffer.read(cx).remote_id(),
1578 operations: vec![language::proto::serialize_operation(&operation)],
1579 });
1580 cx.background().spawn(request).detach_and_log_err(cx);
1581 }
1582 }
1583 BufferEvent::Edited { .. } => {
1584 let (_, language_server) = self
1585 .language_server_for_buffer(buffer.read(cx), cx)?
1586 .clone();
1587 let buffer = buffer.read(cx);
1588 let file = File::from_dyn(buffer.file())?;
1589 let abs_path = file.as_local()?.abs_path(cx);
1590 let uri = lsp::Url::from_file_path(abs_path).unwrap();
1591 let buffer_snapshots = self.buffer_snapshots.get_mut(&buffer.remote_id())?;
1592 let (version, prev_snapshot) = buffer_snapshots.last()?;
1593 let next_snapshot = buffer.text_snapshot();
1594 let next_version = version + 1;
1595
1596 let content_changes = buffer
1597 .edits_since::<(PointUtf16, usize)>(prev_snapshot.version())
1598 .map(|edit| {
1599 let edit_start = edit.new.start.0;
1600 let edit_end = edit_start + (edit.old.end.0 - edit.old.start.0);
1601 let new_text = next_snapshot
1602 .text_for_range(edit.new.start.1..edit.new.end.1)
1603 .collect();
1604 lsp::TextDocumentContentChangeEvent {
1605 range: Some(lsp::Range::new(
1606 point_to_lsp(edit_start),
1607 point_to_lsp(edit_end),
1608 )),
1609 range_length: None,
1610 text: new_text,
1611 }
1612 })
1613 .collect();
1614
1615 buffer_snapshots.push((next_version, next_snapshot));
1616
1617 language_server
1618 .notify::<lsp::notification::DidChangeTextDocument>(
1619 lsp::DidChangeTextDocumentParams {
1620 text_document: lsp::VersionedTextDocumentIdentifier::new(
1621 uri,
1622 next_version,
1623 ),
1624 content_changes,
1625 },
1626 )
1627 .log_err();
1628 }
1629 BufferEvent::Saved => {
1630 let file = File::from_dyn(buffer.read(cx).file())?;
1631 let worktree_id = file.worktree_id(cx);
1632 let abs_path = file.as_local()?.abs_path(cx);
1633 let text_document = lsp::TextDocumentIdentifier {
1634 uri: lsp::Url::from_file_path(abs_path).unwrap(),
1635 };
1636
1637 for (_, server) in self.language_servers_for_worktree(worktree_id) {
1638 server
1639 .notify::<lsp::notification::DidSaveTextDocument>(
1640 lsp::DidSaveTextDocumentParams {
1641 text_document: text_document.clone(),
1642 text: None,
1643 },
1644 )
1645 .log_err();
1646 }
1647 }
1648 _ => {}
1649 }
1650
1651 None
1652 }
1653
1654 fn language_servers_for_worktree(
1655 &self,
1656 worktree_id: WorktreeId,
1657 ) -> impl Iterator<Item = &(Arc<dyn LspAdapter>, Arc<LanguageServer>)> {
1658 self.language_servers.iter().filter_map(
1659 move |((language_server_worktree_id, _), server)| {
1660 if *language_server_worktree_id == worktree_id {
1661 Some(server)
1662 } else {
1663 None
1664 }
1665 },
1666 )
1667 }
1668
1669 fn assign_language_to_buffer(
1670 &mut self,
1671 buffer: &ModelHandle<Buffer>,
1672 cx: &mut ModelContext<Self>,
1673 ) -> Option<()> {
1674 // If the buffer has a language, set it and start the language server if we haven't already.
1675 let full_path = buffer.read(cx).file()?.full_path(cx);
1676 let language = self.languages.select_language(&full_path)?;
1677 buffer.update(cx, |buffer, cx| {
1678 buffer.set_language(Some(language.clone()), cx);
1679 });
1680
1681 let file = File::from_dyn(buffer.read(cx).file())?;
1682 let worktree = file.worktree.read(cx).as_local()?;
1683 let worktree_id = worktree.id();
1684 let worktree_abs_path = worktree.abs_path().clone();
1685 self.start_language_server(worktree_id, worktree_abs_path, language, cx);
1686
1687 None
1688 }
1689
1690 fn start_language_server(
1691 &mut self,
1692 worktree_id: WorktreeId,
1693 worktree_path: Arc<Path>,
1694 language: Arc<Language>,
1695 cx: &mut ModelContext<Self>,
1696 ) {
1697 let adapter = if let Some(adapter) = language.lsp_adapter() {
1698 adapter
1699 } else {
1700 return;
1701 };
1702 let key = (worktree_id, adapter.name());
1703 self.started_language_servers
1704 .entry(key.clone())
1705 .or_insert_with(|| {
1706 let server_id = post_inc(&mut self.next_language_server_id);
1707 let language_server = self.languages.start_language_server(
1708 server_id,
1709 language.clone(),
1710 worktree_path,
1711 self.client.http_client(),
1712 cx,
1713 );
1714 cx.spawn_weak(|this, mut cx| async move {
1715 let language_server = language_server?.await.log_err()?;
1716 let language_server = language_server
1717 .initialize(adapter.initialization_options())
1718 .await
1719 .log_err()?;
1720 let this = this.upgrade(&cx)?;
1721 let disk_based_diagnostics_progress_token =
1722 adapter.disk_based_diagnostics_progress_token();
1723
1724 language_server
1725 .on_notification::<lsp::notification::PublishDiagnostics, _>({
1726 let this = this.downgrade();
1727 let adapter = adapter.clone();
1728 move |params, mut cx| {
1729 if let Some(this) = this.upgrade(&cx) {
1730 this.update(&mut cx, |this, cx| {
1731 this.on_lsp_diagnostics_published(
1732 server_id,
1733 params,
1734 &adapter,
1735 disk_based_diagnostics_progress_token,
1736 cx,
1737 );
1738 });
1739 }
1740 }
1741 })
1742 .detach();
1743
1744 language_server
1745 .on_request::<lsp::request::WorkspaceConfiguration, _, _>({
1746 let settings = this
1747 .read_with(&cx, |this, _| this.language_server_settings.clone());
1748 move |params, _| {
1749 let settings = settings.lock().clone();
1750 async move {
1751 Ok(params
1752 .items
1753 .into_iter()
1754 .map(|item| {
1755 if let Some(section) = &item.section {
1756 settings
1757 .get(section)
1758 .cloned()
1759 .unwrap_or(serde_json::Value::Null)
1760 } else {
1761 settings.clone()
1762 }
1763 })
1764 .collect())
1765 }
1766 }
1767 })
1768 .detach();
1769
1770 language_server
1771 .on_request::<lsp::request::ApplyWorkspaceEdit, _, _>({
1772 let this = this.downgrade();
1773 let adapter = adapter.clone();
1774 let language_server = language_server.clone();
1775 move |params, cx| {
1776 Self::on_lsp_workspace_edit(
1777 this,
1778 params,
1779 server_id,
1780 adapter.clone(),
1781 language_server.clone(),
1782 cx,
1783 )
1784 }
1785 })
1786 .detach();
1787
1788 language_server
1789 .on_notification::<lsp::notification::Progress, _>({
1790 let this = this.downgrade();
1791 move |params, mut cx| {
1792 if let Some(this) = this.upgrade(&cx) {
1793 this.update(&mut cx, |this, cx| {
1794 this.on_lsp_progress(
1795 params,
1796 server_id,
1797 disk_based_diagnostics_progress_token,
1798 cx,
1799 );
1800 });
1801 }
1802 }
1803 })
1804 .detach();
1805
1806 this.update(&mut cx, |this, cx| {
1807 this.language_servers
1808 .insert(key.clone(), (adapter.clone(), language_server.clone()));
1809 this.language_server_statuses.insert(
1810 server_id,
1811 LanguageServerStatus {
1812 name: language_server.name().to_string(),
1813 pending_work: Default::default(),
1814 pending_diagnostic_updates: 0,
1815 },
1816 );
1817 language_server
1818 .notify::<lsp::notification::DidChangeConfiguration>(
1819 lsp::DidChangeConfigurationParams {
1820 settings: this.language_server_settings.lock().clone(),
1821 },
1822 )
1823 .ok();
1824
1825 if let Some(project_id) = this.shared_remote_id() {
1826 this.client
1827 .send(proto::StartLanguageServer {
1828 project_id,
1829 server: Some(proto::LanguageServer {
1830 id: server_id as u64,
1831 name: language_server.name().to_string(),
1832 }),
1833 })
1834 .log_err();
1835 }
1836
1837 // Tell the language server about every open buffer in the worktree that matches the language.
1838 for buffer in this.opened_buffers.values() {
1839 if let Some(buffer_handle) = buffer.upgrade(cx) {
1840 let buffer = buffer_handle.read(cx);
1841 let file = if let Some(file) = File::from_dyn(buffer.file()) {
1842 file
1843 } else {
1844 continue;
1845 };
1846 let language = if let Some(language) = buffer.language() {
1847 language
1848 } else {
1849 continue;
1850 };
1851 if file.worktree.read(cx).id() != key.0
1852 || language.lsp_adapter().map(|a| a.name())
1853 != Some(key.1.clone())
1854 {
1855 continue;
1856 }
1857
1858 let file = file.as_local()?;
1859 let versions = this
1860 .buffer_snapshots
1861 .entry(buffer.remote_id())
1862 .or_insert_with(|| vec![(0, buffer.text_snapshot())]);
1863 let (version, initial_snapshot) = versions.last().unwrap();
1864 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
1865 let language_id = adapter.id_for_language(language.name().as_ref());
1866 language_server
1867 .notify::<lsp::notification::DidOpenTextDocument>(
1868 lsp::DidOpenTextDocumentParams {
1869 text_document: lsp::TextDocumentItem::new(
1870 uri,
1871 language_id.unwrap_or_default(),
1872 *version,
1873 initial_snapshot.text(),
1874 ),
1875 },
1876 )
1877 .log_err()?;
1878 buffer_handle.update(cx, |buffer, cx| {
1879 buffer.set_completion_triggers(
1880 language_server
1881 .capabilities()
1882 .completion_provider
1883 .as_ref()
1884 .and_then(|provider| {
1885 provider.trigger_characters.clone()
1886 })
1887 .unwrap_or(Vec::new()),
1888 cx,
1889 )
1890 });
1891 }
1892 }
1893
1894 cx.notify();
1895 Some(())
1896 });
1897
1898 Some(language_server)
1899 })
1900 });
1901 }
1902
1903 pub fn restart_language_servers_for_buffers(
1904 &mut self,
1905 buffers: impl IntoIterator<Item = ModelHandle<Buffer>>,
1906 cx: &mut ModelContext<Self>,
1907 ) -> Option<()> {
1908 let language_server_lookup_info: HashSet<(WorktreeId, Arc<Path>, PathBuf)> = buffers
1909 .into_iter()
1910 .filter_map(|buffer| {
1911 let file = File::from_dyn(buffer.read(cx).file())?;
1912 let worktree = file.worktree.read(cx).as_local()?;
1913 let worktree_id = worktree.id();
1914 let worktree_abs_path = worktree.abs_path().clone();
1915 let full_path = file.full_path(cx);
1916 Some((worktree_id, worktree_abs_path, full_path))
1917 })
1918 .collect();
1919 for (worktree_id, worktree_abs_path, full_path) in language_server_lookup_info {
1920 let language = self.languages.select_language(&full_path)?;
1921 self.restart_language_server(worktree_id, worktree_abs_path, language, cx);
1922 }
1923
1924 None
1925 }
1926
1927 fn restart_language_server(
1928 &mut self,
1929 worktree_id: WorktreeId,
1930 worktree_path: Arc<Path>,
1931 language: Arc<Language>,
1932 cx: &mut ModelContext<Self>,
1933 ) {
1934 let adapter = if let Some(adapter) = language.lsp_adapter() {
1935 adapter
1936 } else {
1937 return;
1938 };
1939 let key = (worktree_id, adapter.name());
1940 let server_to_shutdown = self.language_servers.remove(&key);
1941 self.started_language_servers.remove(&key);
1942 server_to_shutdown
1943 .as_ref()
1944 .map(|(_, server)| self.language_server_statuses.remove(&server.server_id()));
1945 cx.spawn_weak(|this, mut cx| async move {
1946 if let Some(this) = this.upgrade(&cx) {
1947 if let Some((_, server_to_shutdown)) = server_to_shutdown {
1948 if let Some(shutdown_task) = server_to_shutdown.shutdown() {
1949 shutdown_task.await;
1950 }
1951 }
1952
1953 this.update(&mut cx, |this, cx| {
1954 this.start_language_server(worktree_id, worktree_path, language, cx);
1955 });
1956 }
1957 })
1958 .detach();
1959 }
1960
1961 fn on_lsp_diagnostics_published(
1962 &mut self,
1963 server_id: usize,
1964 mut params: lsp::PublishDiagnosticsParams,
1965 adapter: &Arc<dyn LspAdapter>,
1966 disk_based_diagnostics_progress_token: Option<&str>,
1967 cx: &mut ModelContext<Self>,
1968 ) {
1969 adapter.process_diagnostics(&mut params);
1970 if disk_based_diagnostics_progress_token.is_none() {
1971 self.disk_based_diagnostics_started(cx);
1972 self.broadcast_language_server_update(
1973 server_id,
1974 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(
1975 proto::LspDiskBasedDiagnosticsUpdating {},
1976 ),
1977 );
1978 }
1979 self.update_diagnostics(params, adapter.disk_based_diagnostic_sources(), cx)
1980 .log_err();
1981 if disk_based_diagnostics_progress_token.is_none() {
1982 self.disk_based_diagnostics_finished(cx);
1983 self.broadcast_language_server_update(
1984 server_id,
1985 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(
1986 proto::LspDiskBasedDiagnosticsUpdated {},
1987 ),
1988 );
1989 }
1990 }
1991
1992 fn on_lsp_progress(
1993 &mut self,
1994 progress: lsp::ProgressParams,
1995 server_id: usize,
1996 disk_based_diagnostics_progress_token: Option<&str>,
1997 cx: &mut ModelContext<Self>,
1998 ) {
1999 let token = match progress.token {
2000 lsp::NumberOrString::String(token) => token,
2001 lsp::NumberOrString::Number(token) => {
2002 log::info!("skipping numeric progress token {}", token);
2003 return;
2004 }
2005 };
2006 let progress = match progress.value {
2007 lsp::ProgressParamsValue::WorkDone(value) => value,
2008 };
2009 let language_server_status =
2010 if let Some(status) = self.language_server_statuses.get_mut(&server_id) {
2011 status
2012 } else {
2013 return;
2014 };
2015 match progress {
2016 lsp::WorkDoneProgress::Begin(_) => {
2017 if Some(token.as_str()) == disk_based_diagnostics_progress_token {
2018 language_server_status.pending_diagnostic_updates += 1;
2019 if language_server_status.pending_diagnostic_updates == 1 {
2020 self.disk_based_diagnostics_started(cx);
2021 self.broadcast_language_server_update(
2022 server_id,
2023 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(
2024 proto::LspDiskBasedDiagnosticsUpdating {},
2025 ),
2026 );
2027 }
2028 } else {
2029 self.on_lsp_work_start(server_id, token.clone(), cx);
2030 self.broadcast_language_server_update(
2031 server_id,
2032 proto::update_language_server::Variant::WorkStart(proto::LspWorkStart {
2033 token,
2034 }),
2035 );
2036 }
2037 }
2038 lsp::WorkDoneProgress::Report(report) => {
2039 if Some(token.as_str()) != disk_based_diagnostics_progress_token {
2040 self.on_lsp_work_progress(
2041 server_id,
2042 token.clone(),
2043 LanguageServerProgress {
2044 message: report.message.clone(),
2045 percentage: report.percentage.map(|p| p as usize),
2046 last_update_at: Instant::now(),
2047 },
2048 cx,
2049 );
2050 self.broadcast_language_server_update(
2051 server_id,
2052 proto::update_language_server::Variant::WorkProgress(
2053 proto::LspWorkProgress {
2054 token,
2055 message: report.message,
2056 percentage: report.percentage.map(|p| p as u32),
2057 },
2058 ),
2059 );
2060 }
2061 }
2062 lsp::WorkDoneProgress::End(_) => {
2063 if Some(token.as_str()) == disk_based_diagnostics_progress_token {
2064 language_server_status.pending_diagnostic_updates -= 1;
2065 if language_server_status.pending_diagnostic_updates == 0 {
2066 self.disk_based_diagnostics_finished(cx);
2067 self.broadcast_language_server_update(
2068 server_id,
2069 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(
2070 proto::LspDiskBasedDiagnosticsUpdated {},
2071 ),
2072 );
2073 }
2074 } else {
2075 self.on_lsp_work_end(server_id, token.clone(), cx);
2076 self.broadcast_language_server_update(
2077 server_id,
2078 proto::update_language_server::Variant::WorkEnd(proto::LspWorkEnd {
2079 token,
2080 }),
2081 );
2082 }
2083 }
2084 }
2085 }
2086
2087 fn on_lsp_work_start(
2088 &mut self,
2089 language_server_id: usize,
2090 token: String,
2091 cx: &mut ModelContext<Self>,
2092 ) {
2093 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
2094 status.pending_work.insert(
2095 token,
2096 LanguageServerProgress {
2097 message: None,
2098 percentage: None,
2099 last_update_at: Instant::now(),
2100 },
2101 );
2102 cx.notify();
2103 }
2104 }
2105
2106 fn on_lsp_work_progress(
2107 &mut self,
2108 language_server_id: usize,
2109 token: String,
2110 progress: LanguageServerProgress,
2111 cx: &mut ModelContext<Self>,
2112 ) {
2113 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
2114 status.pending_work.insert(token, progress);
2115 cx.notify();
2116 }
2117 }
2118
2119 fn on_lsp_work_end(
2120 &mut self,
2121 language_server_id: usize,
2122 token: String,
2123 cx: &mut ModelContext<Self>,
2124 ) {
2125 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
2126 status.pending_work.remove(&token);
2127 cx.notify();
2128 }
2129 }
2130
2131 async fn on_lsp_workspace_edit(
2132 this: WeakModelHandle<Self>,
2133 params: lsp::ApplyWorkspaceEditParams,
2134 server_id: usize,
2135 adapter: Arc<dyn LspAdapter>,
2136 language_server: Arc<LanguageServer>,
2137 mut cx: AsyncAppContext,
2138 ) -> Result<lsp::ApplyWorkspaceEditResponse> {
2139 let this = this
2140 .upgrade(&cx)
2141 .ok_or_else(|| anyhow!("project project closed"))?;
2142 let transaction = Self::deserialize_workspace_edit(
2143 this.clone(),
2144 params.edit,
2145 true,
2146 adapter.clone(),
2147 language_server.clone(),
2148 &mut cx,
2149 )
2150 .await
2151 .log_err();
2152 this.update(&mut cx, |this, _| {
2153 if let Some(transaction) = transaction {
2154 this.last_workspace_edits_by_language_server
2155 .insert(server_id, transaction);
2156 }
2157 });
2158 Ok(lsp::ApplyWorkspaceEditResponse {
2159 applied: true,
2160 failed_change: None,
2161 failure_reason: None,
2162 })
2163 }
2164
2165 fn broadcast_language_server_update(
2166 &self,
2167 language_server_id: usize,
2168 event: proto::update_language_server::Variant,
2169 ) {
2170 if let Some(project_id) = self.shared_remote_id() {
2171 self.client
2172 .send(proto::UpdateLanguageServer {
2173 project_id,
2174 language_server_id: language_server_id as u64,
2175 variant: Some(event),
2176 })
2177 .log_err();
2178 }
2179 }
2180
2181 pub fn set_language_server_settings(&mut self, settings: serde_json::Value) {
2182 for (_, server) in self.language_servers.values() {
2183 server
2184 .notify::<lsp::notification::DidChangeConfiguration>(
2185 lsp::DidChangeConfigurationParams {
2186 settings: settings.clone(),
2187 },
2188 )
2189 .ok();
2190 }
2191 *self.language_server_settings.lock() = settings;
2192 }
2193
2194 pub fn language_server_statuses(
2195 &self,
2196 ) -> impl DoubleEndedIterator<Item = &LanguageServerStatus> {
2197 self.language_server_statuses.values()
2198 }
2199
2200 pub fn update_diagnostics(
2201 &mut self,
2202 params: lsp::PublishDiagnosticsParams,
2203 disk_based_sources: &[&str],
2204 cx: &mut ModelContext<Self>,
2205 ) -> Result<()> {
2206 let abs_path = params
2207 .uri
2208 .to_file_path()
2209 .map_err(|_| anyhow!("URI is not a file"))?;
2210 let mut diagnostics = Vec::default();
2211 let mut primary_diagnostic_group_ids = HashMap::default();
2212 let mut sources_by_group_id = HashMap::default();
2213 let mut supporting_diagnostics = HashMap::default();
2214 for diagnostic in ¶ms.diagnostics {
2215 let source = diagnostic.source.as_ref();
2216 let code = diagnostic.code.as_ref().map(|code| match code {
2217 lsp::NumberOrString::Number(code) => code.to_string(),
2218 lsp::NumberOrString::String(code) => code.clone(),
2219 });
2220 let range = range_from_lsp(diagnostic.range);
2221 let is_supporting = diagnostic
2222 .related_information
2223 .as_ref()
2224 .map_or(false, |infos| {
2225 infos.iter().any(|info| {
2226 primary_diagnostic_group_ids.contains_key(&(
2227 source,
2228 code.clone(),
2229 range_from_lsp(info.location.range),
2230 ))
2231 })
2232 });
2233
2234 let is_unnecessary = diagnostic.tags.as_ref().map_or(false, |tags| {
2235 tags.iter().any(|tag| *tag == DiagnosticTag::UNNECESSARY)
2236 });
2237
2238 if is_supporting {
2239 supporting_diagnostics.insert(
2240 (source, code.clone(), range),
2241 (diagnostic.severity, is_unnecessary),
2242 );
2243 } else {
2244 let group_id = post_inc(&mut self.next_diagnostic_group_id);
2245 let is_disk_based = source.map_or(false, |source| {
2246 disk_based_sources.contains(&source.as_str())
2247 });
2248
2249 sources_by_group_id.insert(group_id, source);
2250 primary_diagnostic_group_ids
2251 .insert((source, code.clone(), range.clone()), group_id);
2252
2253 diagnostics.push(DiagnosticEntry {
2254 range,
2255 diagnostic: Diagnostic {
2256 code: code.clone(),
2257 severity: diagnostic.severity.unwrap_or(DiagnosticSeverity::ERROR),
2258 message: diagnostic.message.clone(),
2259 group_id,
2260 is_primary: true,
2261 is_valid: true,
2262 is_disk_based,
2263 is_unnecessary,
2264 },
2265 });
2266 if let Some(infos) = &diagnostic.related_information {
2267 for info in infos {
2268 if info.location.uri == params.uri && !info.message.is_empty() {
2269 let range = range_from_lsp(info.location.range);
2270 diagnostics.push(DiagnosticEntry {
2271 range,
2272 diagnostic: Diagnostic {
2273 code: code.clone(),
2274 severity: DiagnosticSeverity::INFORMATION,
2275 message: info.message.clone(),
2276 group_id,
2277 is_primary: false,
2278 is_valid: true,
2279 is_disk_based,
2280 is_unnecessary: false,
2281 },
2282 });
2283 }
2284 }
2285 }
2286 }
2287 }
2288
2289 for entry in &mut diagnostics {
2290 let diagnostic = &mut entry.diagnostic;
2291 if !diagnostic.is_primary {
2292 let source = *sources_by_group_id.get(&diagnostic.group_id).unwrap();
2293 if let Some(&(severity, is_unnecessary)) = supporting_diagnostics.get(&(
2294 source,
2295 diagnostic.code.clone(),
2296 entry.range.clone(),
2297 )) {
2298 if let Some(severity) = severity {
2299 diagnostic.severity = severity;
2300 }
2301 diagnostic.is_unnecessary = is_unnecessary;
2302 }
2303 }
2304 }
2305
2306 self.update_diagnostic_entries(abs_path, params.version, diagnostics, cx)?;
2307 Ok(())
2308 }
2309
2310 pub fn update_diagnostic_entries(
2311 &mut self,
2312 abs_path: PathBuf,
2313 version: Option<i32>,
2314 diagnostics: Vec<DiagnosticEntry<PointUtf16>>,
2315 cx: &mut ModelContext<Project>,
2316 ) -> Result<(), anyhow::Error> {
2317 let (worktree, relative_path) = self
2318 .find_local_worktree(&abs_path, cx)
2319 .ok_or_else(|| anyhow!("no worktree found for diagnostics"))?;
2320 if !worktree.read(cx).is_visible() {
2321 return Ok(());
2322 }
2323
2324 let project_path = ProjectPath {
2325 worktree_id: worktree.read(cx).id(),
2326 path: relative_path.into(),
2327 };
2328 if let Some(buffer) = self.get_open_buffer(&project_path, cx) {
2329 self.update_buffer_diagnostics(&buffer, diagnostics.clone(), version, cx)?;
2330 }
2331
2332 let updated = worktree.update(cx, |worktree, cx| {
2333 worktree
2334 .as_local_mut()
2335 .ok_or_else(|| anyhow!("not a local worktree"))?
2336 .update_diagnostics(project_path.path.clone(), diagnostics, cx)
2337 })?;
2338 if updated {
2339 cx.emit(Event::DiagnosticsUpdated(project_path));
2340 }
2341 Ok(())
2342 }
2343
2344 fn update_buffer_diagnostics(
2345 &mut self,
2346 buffer: &ModelHandle<Buffer>,
2347 mut diagnostics: Vec<DiagnosticEntry<PointUtf16>>,
2348 version: Option<i32>,
2349 cx: &mut ModelContext<Self>,
2350 ) -> Result<()> {
2351 fn compare_diagnostics(a: &Diagnostic, b: &Diagnostic) -> Ordering {
2352 Ordering::Equal
2353 .then_with(|| b.is_primary.cmp(&a.is_primary))
2354 .then_with(|| a.is_disk_based.cmp(&b.is_disk_based))
2355 .then_with(|| a.severity.cmp(&b.severity))
2356 .then_with(|| a.message.cmp(&b.message))
2357 }
2358
2359 let snapshot = self.buffer_snapshot_for_lsp_version(buffer, version, cx)?;
2360
2361 diagnostics.sort_unstable_by(|a, b| {
2362 Ordering::Equal
2363 .then_with(|| a.range.start.cmp(&b.range.start))
2364 .then_with(|| b.range.end.cmp(&a.range.end))
2365 .then_with(|| compare_diagnostics(&a.diagnostic, &b.diagnostic))
2366 });
2367
2368 let mut sanitized_diagnostics = Vec::new();
2369 let edits_since_save = Patch::new(
2370 snapshot
2371 .edits_since::<PointUtf16>(buffer.read(cx).saved_version())
2372 .collect(),
2373 );
2374 for entry in diagnostics {
2375 let start;
2376 let end;
2377 if entry.diagnostic.is_disk_based {
2378 // Some diagnostics are based on files on disk instead of buffers'
2379 // current contents. Adjust these diagnostics' ranges to reflect
2380 // any unsaved edits.
2381 start = edits_since_save.old_to_new(entry.range.start);
2382 end = edits_since_save.old_to_new(entry.range.end);
2383 } else {
2384 start = entry.range.start;
2385 end = entry.range.end;
2386 }
2387
2388 let mut range = snapshot.clip_point_utf16(start, Bias::Left)
2389 ..snapshot.clip_point_utf16(end, Bias::Right);
2390
2391 // Expand empty ranges by one character
2392 if range.start == range.end {
2393 range.end.column += 1;
2394 range.end = snapshot.clip_point_utf16(range.end, Bias::Right);
2395 if range.start == range.end && range.end.column > 0 {
2396 range.start.column -= 1;
2397 range.start = snapshot.clip_point_utf16(range.start, Bias::Left);
2398 }
2399 }
2400
2401 sanitized_diagnostics.push(DiagnosticEntry {
2402 range,
2403 diagnostic: entry.diagnostic,
2404 });
2405 }
2406 drop(edits_since_save);
2407
2408 let set = DiagnosticSet::new(sanitized_diagnostics, &snapshot);
2409 buffer.update(cx, |buffer, cx| buffer.update_diagnostics(set, cx));
2410 Ok(())
2411 }
2412
2413 pub fn reload_buffers(
2414 &self,
2415 buffers: HashSet<ModelHandle<Buffer>>,
2416 push_to_history: bool,
2417 cx: &mut ModelContext<Self>,
2418 ) -> Task<Result<ProjectTransaction>> {
2419 let mut local_buffers = Vec::new();
2420 let mut remote_buffers = None;
2421 for buffer_handle in buffers {
2422 let buffer = buffer_handle.read(cx);
2423 if buffer.is_dirty() {
2424 if let Some(file) = File::from_dyn(buffer.file()) {
2425 if file.is_local() {
2426 local_buffers.push(buffer_handle);
2427 } else {
2428 remote_buffers.get_or_insert(Vec::new()).push(buffer_handle);
2429 }
2430 }
2431 }
2432 }
2433
2434 let remote_buffers = self.remote_id().zip(remote_buffers);
2435 let client = self.client.clone();
2436
2437 cx.spawn(|this, mut cx| async move {
2438 let mut project_transaction = ProjectTransaction::default();
2439
2440 if let Some((project_id, remote_buffers)) = remote_buffers {
2441 let response = client
2442 .request(proto::ReloadBuffers {
2443 project_id,
2444 buffer_ids: remote_buffers
2445 .iter()
2446 .map(|buffer| buffer.read_with(&cx, |buffer, _| buffer.remote_id()))
2447 .collect(),
2448 })
2449 .await?
2450 .transaction
2451 .ok_or_else(|| anyhow!("missing transaction"))?;
2452 project_transaction = this
2453 .update(&mut cx, |this, cx| {
2454 this.deserialize_project_transaction(response, push_to_history, cx)
2455 })
2456 .await?;
2457 }
2458
2459 for buffer in local_buffers {
2460 let transaction = buffer
2461 .update(&mut cx, |buffer, cx| buffer.reload(cx))
2462 .await?;
2463 buffer.update(&mut cx, |buffer, cx| {
2464 if let Some(transaction) = transaction {
2465 if !push_to_history {
2466 buffer.forget_transaction(transaction.id);
2467 }
2468 project_transaction.0.insert(cx.handle(), transaction);
2469 }
2470 });
2471 }
2472
2473 Ok(project_transaction)
2474 })
2475 }
2476
2477 pub fn format(
2478 &self,
2479 buffers: HashSet<ModelHandle<Buffer>>,
2480 push_to_history: bool,
2481 cx: &mut ModelContext<Project>,
2482 ) -> Task<Result<ProjectTransaction>> {
2483 let mut local_buffers = Vec::new();
2484 let mut remote_buffers = None;
2485 for buffer_handle in buffers {
2486 let buffer = buffer_handle.read(cx);
2487 if let Some(file) = File::from_dyn(buffer.file()) {
2488 if let Some(buffer_abs_path) = file.as_local().map(|f| f.abs_path(cx)) {
2489 if let Some((_, server)) = self.language_server_for_buffer(buffer, cx) {
2490 local_buffers.push((buffer_handle, buffer_abs_path, server.clone()));
2491 }
2492 } else {
2493 remote_buffers.get_or_insert(Vec::new()).push(buffer_handle);
2494 }
2495 } else {
2496 return Task::ready(Ok(Default::default()));
2497 }
2498 }
2499
2500 let remote_buffers = self.remote_id().zip(remote_buffers);
2501 let client = self.client.clone();
2502
2503 cx.spawn(|this, mut cx| async move {
2504 let mut project_transaction = ProjectTransaction::default();
2505
2506 if let Some((project_id, remote_buffers)) = remote_buffers {
2507 let response = client
2508 .request(proto::FormatBuffers {
2509 project_id,
2510 buffer_ids: remote_buffers
2511 .iter()
2512 .map(|buffer| buffer.read_with(&cx, |buffer, _| buffer.remote_id()))
2513 .collect(),
2514 })
2515 .await?
2516 .transaction
2517 .ok_or_else(|| anyhow!("missing transaction"))?;
2518 project_transaction = this
2519 .update(&mut cx, |this, cx| {
2520 this.deserialize_project_transaction(response, push_to_history, cx)
2521 })
2522 .await?;
2523 }
2524
2525 for (buffer, buffer_abs_path, language_server) in local_buffers {
2526 let text_document = lsp::TextDocumentIdentifier::new(
2527 lsp::Url::from_file_path(&buffer_abs_path).unwrap(),
2528 );
2529 let capabilities = &language_server.capabilities();
2530 let tab_size = cx.update(|cx| {
2531 let language_name = buffer.read(cx).language().map(|language| language.name());
2532 cx.global::<Settings>().tab_size(language_name.as_deref())
2533 });
2534 let lsp_edits = if capabilities
2535 .document_formatting_provider
2536 .as_ref()
2537 .map_or(false, |provider| *provider != lsp::OneOf::Left(false))
2538 {
2539 language_server
2540 .request::<lsp::request::Formatting>(lsp::DocumentFormattingParams {
2541 text_document,
2542 options: lsp::FormattingOptions {
2543 tab_size,
2544 insert_spaces: true,
2545 insert_final_newline: Some(true),
2546 ..Default::default()
2547 },
2548 work_done_progress_params: Default::default(),
2549 })
2550 .await?
2551 } else if capabilities
2552 .document_range_formatting_provider
2553 .as_ref()
2554 .map_or(false, |provider| *provider != lsp::OneOf::Left(false))
2555 {
2556 let buffer_start = lsp::Position::new(0, 0);
2557 let buffer_end =
2558 buffer.read_with(&cx, |buffer, _| point_to_lsp(buffer.max_point_utf16()));
2559 language_server
2560 .request::<lsp::request::RangeFormatting>(
2561 lsp::DocumentRangeFormattingParams {
2562 text_document,
2563 range: lsp::Range::new(buffer_start, buffer_end),
2564 options: lsp::FormattingOptions {
2565 tab_size: 4,
2566 insert_spaces: true,
2567 insert_final_newline: Some(true),
2568 ..Default::default()
2569 },
2570 work_done_progress_params: Default::default(),
2571 },
2572 )
2573 .await?
2574 } else {
2575 continue;
2576 };
2577
2578 if let Some(lsp_edits) = lsp_edits {
2579 let edits = this
2580 .update(&mut cx, |this, cx| {
2581 this.edits_from_lsp(&buffer, lsp_edits, None, cx)
2582 })
2583 .await?;
2584 buffer.update(&mut cx, |buffer, cx| {
2585 buffer.finalize_last_transaction();
2586 buffer.start_transaction();
2587 for (range, text) in edits {
2588 buffer.edit([(range, text)], cx);
2589 }
2590 if buffer.end_transaction(cx).is_some() {
2591 let transaction = buffer.finalize_last_transaction().unwrap().clone();
2592 if !push_to_history {
2593 buffer.forget_transaction(transaction.id);
2594 }
2595 project_transaction.0.insert(cx.handle(), transaction);
2596 }
2597 });
2598 }
2599 }
2600
2601 Ok(project_transaction)
2602 })
2603 }
2604
2605 pub fn definition<T: ToPointUtf16>(
2606 &self,
2607 buffer: &ModelHandle<Buffer>,
2608 position: T,
2609 cx: &mut ModelContext<Self>,
2610 ) -> Task<Result<Vec<Location>>> {
2611 let position = position.to_point_utf16(buffer.read(cx));
2612 self.request_lsp(buffer.clone(), GetDefinition { position }, cx)
2613 }
2614
2615 pub fn references<T: ToPointUtf16>(
2616 &self,
2617 buffer: &ModelHandle<Buffer>,
2618 position: T,
2619 cx: &mut ModelContext<Self>,
2620 ) -> Task<Result<Vec<Location>>> {
2621 let position = position.to_point_utf16(buffer.read(cx));
2622 self.request_lsp(buffer.clone(), GetReferences { position }, cx)
2623 }
2624
2625 pub fn document_highlights<T: ToPointUtf16>(
2626 &self,
2627 buffer: &ModelHandle<Buffer>,
2628 position: T,
2629 cx: &mut ModelContext<Self>,
2630 ) -> Task<Result<Vec<DocumentHighlight>>> {
2631 let position = position.to_point_utf16(buffer.read(cx));
2632
2633 self.request_lsp(buffer.clone(), GetDocumentHighlights { position }, cx)
2634 }
2635
2636 pub fn symbols(&self, query: &str, cx: &mut ModelContext<Self>) -> Task<Result<Vec<Symbol>>> {
2637 if self.is_local() {
2638 let mut requests = Vec::new();
2639 for ((worktree_id, _), (lsp_adapter, language_server)) in self.language_servers.iter() {
2640 let worktree_id = *worktree_id;
2641 if let Some(worktree) = self
2642 .worktree_for_id(worktree_id, cx)
2643 .and_then(|worktree| worktree.read(cx).as_local())
2644 {
2645 let lsp_adapter = lsp_adapter.clone();
2646 let worktree_abs_path = worktree.abs_path().clone();
2647 requests.push(
2648 language_server
2649 .request::<lsp::request::WorkspaceSymbol>(lsp::WorkspaceSymbolParams {
2650 query: query.to_string(),
2651 ..Default::default()
2652 })
2653 .log_err()
2654 .map(move |response| {
2655 (
2656 lsp_adapter,
2657 worktree_id,
2658 worktree_abs_path,
2659 response.unwrap_or_default(),
2660 )
2661 }),
2662 );
2663 }
2664 }
2665
2666 cx.spawn_weak(|this, cx| async move {
2667 let responses = futures::future::join_all(requests).await;
2668 let this = if let Some(this) = this.upgrade(&cx) {
2669 this
2670 } else {
2671 return Ok(Default::default());
2672 };
2673 this.read_with(&cx, |this, cx| {
2674 let mut symbols = Vec::new();
2675 for (adapter, source_worktree_id, worktree_abs_path, response) in responses {
2676 symbols.extend(response.into_iter().flatten().filter_map(|lsp_symbol| {
2677 let abs_path = lsp_symbol.location.uri.to_file_path().ok()?;
2678 let mut worktree_id = source_worktree_id;
2679 let path;
2680 if let Some((worktree, rel_path)) =
2681 this.find_local_worktree(&abs_path, cx)
2682 {
2683 worktree_id = worktree.read(cx).id();
2684 path = rel_path;
2685 } else {
2686 path = relativize_path(&worktree_abs_path, &abs_path);
2687 }
2688
2689 let label = this
2690 .languages
2691 .select_language(&path)
2692 .and_then(|language| {
2693 language.label_for_symbol(&lsp_symbol.name, lsp_symbol.kind)
2694 })
2695 .unwrap_or_else(|| CodeLabel::plain(lsp_symbol.name.clone(), None));
2696 let signature = this.symbol_signature(worktree_id, &path);
2697
2698 Some(Symbol {
2699 source_worktree_id,
2700 worktree_id,
2701 language_server_name: adapter.name(),
2702 name: lsp_symbol.name,
2703 kind: lsp_symbol.kind,
2704 label,
2705 path,
2706 range: range_from_lsp(lsp_symbol.location.range),
2707 signature,
2708 })
2709 }));
2710 }
2711 Ok(symbols)
2712 })
2713 })
2714 } else if let Some(project_id) = self.remote_id() {
2715 let request = self.client.request(proto::GetProjectSymbols {
2716 project_id,
2717 query: query.to_string(),
2718 });
2719 cx.spawn_weak(|this, cx| async move {
2720 let response = request.await?;
2721 let mut symbols = Vec::new();
2722 if let Some(this) = this.upgrade(&cx) {
2723 this.read_with(&cx, |this, _| {
2724 symbols.extend(
2725 response
2726 .symbols
2727 .into_iter()
2728 .filter_map(|symbol| this.deserialize_symbol(symbol).log_err()),
2729 );
2730 })
2731 }
2732 Ok(symbols)
2733 })
2734 } else {
2735 Task::ready(Ok(Default::default()))
2736 }
2737 }
2738
2739 pub fn open_buffer_for_symbol(
2740 &mut self,
2741 symbol: &Symbol,
2742 cx: &mut ModelContext<Self>,
2743 ) -> Task<Result<ModelHandle<Buffer>>> {
2744 if self.is_local() {
2745 let (lsp_adapter, language_server) = if let Some(server) = self.language_servers.get(&(
2746 symbol.source_worktree_id,
2747 symbol.language_server_name.clone(),
2748 )) {
2749 server.clone()
2750 } else {
2751 return Task::ready(Err(anyhow!(
2752 "language server for worktree and language not found"
2753 )));
2754 };
2755
2756 let worktree_abs_path = if let Some(worktree_abs_path) = self
2757 .worktree_for_id(symbol.worktree_id, cx)
2758 .and_then(|worktree| worktree.read(cx).as_local())
2759 .map(|local_worktree| local_worktree.abs_path())
2760 {
2761 worktree_abs_path
2762 } else {
2763 return Task::ready(Err(anyhow!("worktree not found for symbol")));
2764 };
2765 let symbol_abs_path = worktree_abs_path.join(&symbol.path);
2766 let symbol_uri = if let Ok(uri) = lsp::Url::from_file_path(symbol_abs_path) {
2767 uri
2768 } else {
2769 return Task::ready(Err(anyhow!("invalid symbol path")));
2770 };
2771
2772 self.open_local_buffer_via_lsp(symbol_uri, lsp_adapter, language_server, cx)
2773 } else if let Some(project_id) = self.remote_id() {
2774 let request = self.client.request(proto::OpenBufferForSymbol {
2775 project_id,
2776 symbol: Some(serialize_symbol(symbol)),
2777 });
2778 cx.spawn(|this, mut cx| async move {
2779 let response = request.await?;
2780 let buffer = response.buffer.ok_or_else(|| anyhow!("invalid buffer"))?;
2781 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
2782 .await
2783 })
2784 } else {
2785 Task::ready(Err(anyhow!("project does not have a remote id")))
2786 }
2787 }
2788
2789 pub fn completions<T: ToPointUtf16>(
2790 &self,
2791 source_buffer_handle: &ModelHandle<Buffer>,
2792 position: T,
2793 cx: &mut ModelContext<Self>,
2794 ) -> Task<Result<Vec<Completion>>> {
2795 let source_buffer_handle = source_buffer_handle.clone();
2796 let source_buffer = source_buffer_handle.read(cx);
2797 let buffer_id = source_buffer.remote_id();
2798 let language = source_buffer.language().cloned();
2799 let worktree;
2800 let buffer_abs_path;
2801 if let Some(file) = File::from_dyn(source_buffer.file()) {
2802 worktree = file.worktree.clone();
2803 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
2804 } else {
2805 return Task::ready(Ok(Default::default()));
2806 };
2807
2808 let position = position.to_point_utf16(source_buffer);
2809 let anchor = source_buffer.anchor_after(position);
2810
2811 if worktree.read(cx).as_local().is_some() {
2812 let buffer_abs_path = buffer_abs_path.unwrap();
2813 let (_, lang_server) =
2814 if let Some(server) = self.language_server_for_buffer(source_buffer, cx) {
2815 server.clone()
2816 } else {
2817 return Task::ready(Ok(Default::default()));
2818 };
2819
2820 cx.spawn(|_, cx| async move {
2821 let completions = lang_server
2822 .request::<lsp::request::Completion>(lsp::CompletionParams {
2823 text_document_position: lsp::TextDocumentPositionParams::new(
2824 lsp::TextDocumentIdentifier::new(
2825 lsp::Url::from_file_path(buffer_abs_path).unwrap(),
2826 ),
2827 point_to_lsp(position),
2828 ),
2829 context: Default::default(),
2830 work_done_progress_params: Default::default(),
2831 partial_result_params: Default::default(),
2832 })
2833 .await
2834 .context("lsp completion request failed")?;
2835
2836 let completions = if let Some(completions) = completions {
2837 match completions {
2838 lsp::CompletionResponse::Array(completions) => completions,
2839 lsp::CompletionResponse::List(list) => list.items,
2840 }
2841 } else {
2842 Default::default()
2843 };
2844
2845 source_buffer_handle.read_with(&cx, |this, _| {
2846 let snapshot = this.snapshot();
2847 let clipped_position = this.clip_point_utf16(position, Bias::Left);
2848 let mut range_for_token = None;
2849 Ok(completions
2850 .into_iter()
2851 .filter_map(|lsp_completion| {
2852 let (old_range, new_text) = match lsp_completion.text_edit.as_ref() {
2853 // If the language server provides a range to overwrite, then
2854 // check that the range is valid.
2855 Some(lsp::CompletionTextEdit::Edit(edit)) => {
2856 let range = range_from_lsp(edit.range);
2857 let start = snapshot.clip_point_utf16(range.start, Bias::Left);
2858 let end = snapshot.clip_point_utf16(range.end, Bias::Left);
2859 if start != range.start || end != range.end {
2860 log::info!("completion out of expected range");
2861 return None;
2862 }
2863 (
2864 snapshot.anchor_before(start)..snapshot.anchor_after(end),
2865 edit.new_text.clone(),
2866 )
2867 }
2868 // If the language server does not provide a range, then infer
2869 // the range based on the syntax tree.
2870 None => {
2871 if position != clipped_position {
2872 log::info!("completion out of expected range");
2873 return None;
2874 }
2875 let Range { start, end } = range_for_token
2876 .get_or_insert_with(|| {
2877 let offset = position.to_offset(&snapshot);
2878 snapshot
2879 .range_for_word_token_at(offset)
2880 .unwrap_or_else(|| offset..offset)
2881 })
2882 .clone();
2883 let text = lsp_completion
2884 .insert_text
2885 .as_ref()
2886 .unwrap_or(&lsp_completion.label)
2887 .clone();
2888 (
2889 snapshot.anchor_before(start)..snapshot.anchor_after(end),
2890 text.clone(),
2891 )
2892 }
2893 Some(lsp::CompletionTextEdit::InsertAndReplace(_)) => {
2894 log::info!("unsupported insert/replace completion");
2895 return None;
2896 }
2897 };
2898
2899 Some(Completion {
2900 old_range,
2901 new_text,
2902 label: language
2903 .as_ref()
2904 .and_then(|l| l.label_for_completion(&lsp_completion))
2905 .unwrap_or_else(|| {
2906 CodeLabel::plain(
2907 lsp_completion.label.clone(),
2908 lsp_completion.filter_text.as_deref(),
2909 )
2910 }),
2911 lsp_completion,
2912 })
2913 })
2914 .collect())
2915 })
2916 })
2917 } else if let Some(project_id) = self.remote_id() {
2918 let rpc = self.client.clone();
2919 let message = proto::GetCompletions {
2920 project_id,
2921 buffer_id,
2922 position: Some(language::proto::serialize_anchor(&anchor)),
2923 version: serialize_version(&source_buffer.version()),
2924 };
2925 cx.spawn_weak(|_, mut cx| async move {
2926 let response = rpc.request(message).await?;
2927
2928 source_buffer_handle
2929 .update(&mut cx, |buffer, _| {
2930 buffer.wait_for_version(deserialize_version(response.version))
2931 })
2932 .await;
2933
2934 response
2935 .completions
2936 .into_iter()
2937 .map(|completion| {
2938 language::proto::deserialize_completion(completion, language.as_ref())
2939 })
2940 .collect()
2941 })
2942 } else {
2943 Task::ready(Ok(Default::default()))
2944 }
2945 }
2946
2947 pub fn apply_additional_edits_for_completion(
2948 &self,
2949 buffer_handle: ModelHandle<Buffer>,
2950 completion: Completion,
2951 push_to_history: bool,
2952 cx: &mut ModelContext<Self>,
2953 ) -> Task<Result<Option<Transaction>>> {
2954 let buffer = buffer_handle.read(cx);
2955 let buffer_id = buffer.remote_id();
2956
2957 if self.is_local() {
2958 let (_, lang_server) = if let Some(server) = self.language_server_for_buffer(buffer, cx)
2959 {
2960 server.clone()
2961 } else {
2962 return Task::ready(Ok(Default::default()));
2963 };
2964
2965 cx.spawn(|this, mut cx| async move {
2966 let resolved_completion = lang_server
2967 .request::<lsp::request::ResolveCompletionItem>(completion.lsp_completion)
2968 .await?;
2969 if let Some(edits) = resolved_completion.additional_text_edits {
2970 let edits = this
2971 .update(&mut cx, |this, cx| {
2972 this.edits_from_lsp(&buffer_handle, edits, None, cx)
2973 })
2974 .await?;
2975 buffer_handle.update(&mut cx, |buffer, cx| {
2976 buffer.finalize_last_transaction();
2977 buffer.start_transaction();
2978 for (range, text) in edits {
2979 buffer.edit([(range, text)], cx);
2980 }
2981 let transaction = if buffer.end_transaction(cx).is_some() {
2982 let transaction = buffer.finalize_last_transaction().unwrap().clone();
2983 if !push_to_history {
2984 buffer.forget_transaction(transaction.id);
2985 }
2986 Some(transaction)
2987 } else {
2988 None
2989 };
2990 Ok(transaction)
2991 })
2992 } else {
2993 Ok(None)
2994 }
2995 })
2996 } else if let Some(project_id) = self.remote_id() {
2997 let client = self.client.clone();
2998 cx.spawn(|_, mut cx| async move {
2999 let response = client
3000 .request(proto::ApplyCompletionAdditionalEdits {
3001 project_id,
3002 buffer_id,
3003 completion: Some(language::proto::serialize_completion(&completion)),
3004 })
3005 .await?;
3006
3007 if let Some(transaction) = response.transaction {
3008 let transaction = language::proto::deserialize_transaction(transaction)?;
3009 buffer_handle
3010 .update(&mut cx, |buffer, _| {
3011 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
3012 })
3013 .await;
3014 if push_to_history {
3015 buffer_handle.update(&mut cx, |buffer, _| {
3016 buffer.push_transaction(transaction.clone(), Instant::now());
3017 });
3018 }
3019 Ok(Some(transaction))
3020 } else {
3021 Ok(None)
3022 }
3023 })
3024 } else {
3025 Task::ready(Err(anyhow!("project does not have a remote id")))
3026 }
3027 }
3028
3029 pub fn code_actions<T: Clone + ToOffset>(
3030 &self,
3031 buffer_handle: &ModelHandle<Buffer>,
3032 range: Range<T>,
3033 cx: &mut ModelContext<Self>,
3034 ) -> Task<Result<Vec<CodeAction>>> {
3035 let buffer_handle = buffer_handle.clone();
3036 let buffer = buffer_handle.read(cx);
3037 let snapshot = buffer.snapshot();
3038 let relevant_diagnostics = snapshot
3039 .diagnostics_in_range::<usize, usize>(range.to_offset(&snapshot), false)
3040 .map(|entry| entry.to_lsp_diagnostic_stub())
3041 .collect();
3042 let buffer_id = buffer.remote_id();
3043 let worktree;
3044 let buffer_abs_path;
3045 if let Some(file) = File::from_dyn(buffer.file()) {
3046 worktree = file.worktree.clone();
3047 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
3048 } else {
3049 return Task::ready(Ok(Default::default()));
3050 };
3051 let range = buffer.anchor_before(range.start)..buffer.anchor_before(range.end);
3052
3053 if worktree.read(cx).as_local().is_some() {
3054 let buffer_abs_path = buffer_abs_path.unwrap();
3055 let (_, lang_server) = if let Some(server) = self.language_server_for_buffer(buffer, cx)
3056 {
3057 server.clone()
3058 } else {
3059 return Task::ready(Ok(Default::default()));
3060 };
3061
3062 let lsp_range = range_to_lsp(range.to_point_utf16(buffer));
3063 cx.foreground().spawn(async move {
3064 if !lang_server.capabilities().code_action_provider.is_some() {
3065 return Ok(Default::default());
3066 }
3067
3068 Ok(lang_server
3069 .request::<lsp::request::CodeActionRequest>(lsp::CodeActionParams {
3070 text_document: lsp::TextDocumentIdentifier::new(
3071 lsp::Url::from_file_path(buffer_abs_path).unwrap(),
3072 ),
3073 range: lsp_range,
3074 work_done_progress_params: Default::default(),
3075 partial_result_params: Default::default(),
3076 context: lsp::CodeActionContext {
3077 diagnostics: relevant_diagnostics,
3078 only: Some(vec![
3079 lsp::CodeActionKind::QUICKFIX,
3080 lsp::CodeActionKind::REFACTOR,
3081 lsp::CodeActionKind::REFACTOR_EXTRACT,
3082 lsp::CodeActionKind::SOURCE,
3083 ]),
3084 },
3085 })
3086 .await?
3087 .unwrap_or_default()
3088 .into_iter()
3089 .filter_map(|entry| {
3090 if let lsp::CodeActionOrCommand::CodeAction(lsp_action) = entry {
3091 Some(CodeAction {
3092 range: range.clone(),
3093 lsp_action,
3094 })
3095 } else {
3096 None
3097 }
3098 })
3099 .collect())
3100 })
3101 } else if let Some(project_id) = self.remote_id() {
3102 let rpc = self.client.clone();
3103 let version = buffer.version();
3104 cx.spawn_weak(|_, mut cx| async move {
3105 let response = rpc
3106 .request(proto::GetCodeActions {
3107 project_id,
3108 buffer_id,
3109 start: Some(language::proto::serialize_anchor(&range.start)),
3110 end: Some(language::proto::serialize_anchor(&range.end)),
3111 version: serialize_version(&version),
3112 })
3113 .await?;
3114
3115 buffer_handle
3116 .update(&mut cx, |buffer, _| {
3117 buffer.wait_for_version(deserialize_version(response.version))
3118 })
3119 .await;
3120
3121 response
3122 .actions
3123 .into_iter()
3124 .map(language::proto::deserialize_code_action)
3125 .collect()
3126 })
3127 } else {
3128 Task::ready(Ok(Default::default()))
3129 }
3130 }
3131
3132 pub fn apply_code_action(
3133 &self,
3134 buffer_handle: ModelHandle<Buffer>,
3135 mut action: CodeAction,
3136 push_to_history: bool,
3137 cx: &mut ModelContext<Self>,
3138 ) -> Task<Result<ProjectTransaction>> {
3139 if self.is_local() {
3140 let buffer = buffer_handle.read(cx);
3141 let (lsp_adapter, lang_server) =
3142 if let Some(server) = self.language_server_for_buffer(buffer, cx) {
3143 server.clone()
3144 } else {
3145 return Task::ready(Ok(Default::default()));
3146 };
3147 let range = action.range.to_point_utf16(buffer);
3148
3149 cx.spawn(|this, mut cx| async move {
3150 if let Some(lsp_range) = action
3151 .lsp_action
3152 .data
3153 .as_mut()
3154 .and_then(|d| d.get_mut("codeActionParams"))
3155 .and_then(|d| d.get_mut("range"))
3156 {
3157 *lsp_range = serde_json::to_value(&range_to_lsp(range)).unwrap();
3158 action.lsp_action = lang_server
3159 .request::<lsp::request::CodeActionResolveRequest>(action.lsp_action)
3160 .await?;
3161 } else {
3162 let actions = this
3163 .update(&mut cx, |this, cx| {
3164 this.code_actions(&buffer_handle, action.range, cx)
3165 })
3166 .await?;
3167 action.lsp_action = actions
3168 .into_iter()
3169 .find(|a| a.lsp_action.title == action.lsp_action.title)
3170 .ok_or_else(|| anyhow!("code action is outdated"))?
3171 .lsp_action;
3172 }
3173
3174 if let Some(edit) = action.lsp_action.edit {
3175 Self::deserialize_workspace_edit(
3176 this,
3177 edit,
3178 push_to_history,
3179 lsp_adapter,
3180 lang_server,
3181 &mut cx,
3182 )
3183 .await
3184 } else if let Some(command) = action.lsp_action.command {
3185 this.update(&mut cx, |this, _| {
3186 this.last_workspace_edits_by_language_server
3187 .remove(&lang_server.server_id());
3188 });
3189 lang_server
3190 .request::<lsp::request::ExecuteCommand>(lsp::ExecuteCommandParams {
3191 command: command.command,
3192 arguments: command.arguments.unwrap_or_default(),
3193 ..Default::default()
3194 })
3195 .await?;
3196 Ok(this.update(&mut cx, |this, _| {
3197 this.last_workspace_edits_by_language_server
3198 .remove(&lang_server.server_id())
3199 .unwrap_or_default()
3200 }))
3201 } else {
3202 Ok(ProjectTransaction::default())
3203 }
3204 })
3205 } else if let Some(project_id) = self.remote_id() {
3206 let client = self.client.clone();
3207 let request = proto::ApplyCodeAction {
3208 project_id,
3209 buffer_id: buffer_handle.read(cx).remote_id(),
3210 action: Some(language::proto::serialize_code_action(&action)),
3211 };
3212 cx.spawn(|this, mut cx| async move {
3213 let response = client
3214 .request(request)
3215 .await?
3216 .transaction
3217 .ok_or_else(|| anyhow!("missing transaction"))?;
3218 this.update(&mut cx, |this, cx| {
3219 this.deserialize_project_transaction(response, push_to_history, cx)
3220 })
3221 .await
3222 })
3223 } else {
3224 Task::ready(Err(anyhow!("project does not have a remote id")))
3225 }
3226 }
3227
3228 async fn deserialize_workspace_edit(
3229 this: ModelHandle<Self>,
3230 edit: lsp::WorkspaceEdit,
3231 push_to_history: bool,
3232 lsp_adapter: Arc<dyn LspAdapter>,
3233 language_server: Arc<LanguageServer>,
3234 cx: &mut AsyncAppContext,
3235 ) -> Result<ProjectTransaction> {
3236 let fs = this.read_with(cx, |this, _| this.fs.clone());
3237 let mut operations = Vec::new();
3238 if let Some(document_changes) = edit.document_changes {
3239 match document_changes {
3240 lsp::DocumentChanges::Edits(edits) => {
3241 operations.extend(edits.into_iter().map(lsp::DocumentChangeOperation::Edit))
3242 }
3243 lsp::DocumentChanges::Operations(ops) => operations = ops,
3244 }
3245 } else if let Some(changes) = edit.changes {
3246 operations.extend(changes.into_iter().map(|(uri, edits)| {
3247 lsp::DocumentChangeOperation::Edit(lsp::TextDocumentEdit {
3248 text_document: lsp::OptionalVersionedTextDocumentIdentifier {
3249 uri,
3250 version: None,
3251 },
3252 edits: edits.into_iter().map(lsp::OneOf::Left).collect(),
3253 })
3254 }));
3255 }
3256
3257 let mut project_transaction = ProjectTransaction::default();
3258 for operation in operations {
3259 match operation {
3260 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Create(op)) => {
3261 let abs_path = op
3262 .uri
3263 .to_file_path()
3264 .map_err(|_| anyhow!("can't convert URI to path"))?;
3265
3266 if let Some(parent_path) = abs_path.parent() {
3267 fs.create_dir(parent_path).await?;
3268 }
3269 if abs_path.ends_with("/") {
3270 fs.create_dir(&abs_path).await?;
3271 } else {
3272 fs.create_file(&abs_path, op.options.map(Into::into).unwrap_or_default())
3273 .await?;
3274 }
3275 }
3276 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Rename(op)) => {
3277 let source_abs_path = op
3278 .old_uri
3279 .to_file_path()
3280 .map_err(|_| anyhow!("can't convert URI to path"))?;
3281 let target_abs_path = op
3282 .new_uri
3283 .to_file_path()
3284 .map_err(|_| anyhow!("can't convert URI to path"))?;
3285 fs.rename(
3286 &source_abs_path,
3287 &target_abs_path,
3288 op.options.map(Into::into).unwrap_or_default(),
3289 )
3290 .await?;
3291 }
3292 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Delete(op)) => {
3293 let abs_path = op
3294 .uri
3295 .to_file_path()
3296 .map_err(|_| anyhow!("can't convert URI to path"))?;
3297 let options = op.options.map(Into::into).unwrap_or_default();
3298 if abs_path.ends_with("/") {
3299 fs.remove_dir(&abs_path, options).await?;
3300 } else {
3301 fs.remove_file(&abs_path, options).await?;
3302 }
3303 }
3304 lsp::DocumentChangeOperation::Edit(op) => {
3305 let buffer_to_edit = this
3306 .update(cx, |this, cx| {
3307 this.open_local_buffer_via_lsp(
3308 op.text_document.uri,
3309 lsp_adapter.clone(),
3310 language_server.clone(),
3311 cx,
3312 )
3313 })
3314 .await?;
3315
3316 let edits = this
3317 .update(cx, |this, cx| {
3318 let edits = op.edits.into_iter().map(|edit| match edit {
3319 lsp::OneOf::Left(edit) => edit,
3320 lsp::OneOf::Right(edit) => edit.text_edit,
3321 });
3322 this.edits_from_lsp(
3323 &buffer_to_edit,
3324 edits,
3325 op.text_document.version,
3326 cx,
3327 )
3328 })
3329 .await?;
3330
3331 let transaction = buffer_to_edit.update(cx, |buffer, cx| {
3332 buffer.finalize_last_transaction();
3333 buffer.start_transaction();
3334 for (range, text) in edits {
3335 buffer.edit([(range, text)], cx);
3336 }
3337 let transaction = if buffer.end_transaction(cx).is_some() {
3338 let transaction = buffer.finalize_last_transaction().unwrap().clone();
3339 if !push_to_history {
3340 buffer.forget_transaction(transaction.id);
3341 }
3342 Some(transaction)
3343 } else {
3344 None
3345 };
3346
3347 transaction
3348 });
3349 if let Some(transaction) = transaction {
3350 project_transaction.0.insert(buffer_to_edit, transaction);
3351 }
3352 }
3353 }
3354 }
3355
3356 Ok(project_transaction)
3357 }
3358
3359 pub fn prepare_rename<T: ToPointUtf16>(
3360 &self,
3361 buffer: ModelHandle<Buffer>,
3362 position: T,
3363 cx: &mut ModelContext<Self>,
3364 ) -> Task<Result<Option<Range<Anchor>>>> {
3365 let position = position.to_point_utf16(buffer.read(cx));
3366 self.request_lsp(buffer, PrepareRename { position }, cx)
3367 }
3368
3369 pub fn perform_rename<T: ToPointUtf16>(
3370 &self,
3371 buffer: ModelHandle<Buffer>,
3372 position: T,
3373 new_name: String,
3374 push_to_history: bool,
3375 cx: &mut ModelContext<Self>,
3376 ) -> Task<Result<ProjectTransaction>> {
3377 let position = position.to_point_utf16(buffer.read(cx));
3378 self.request_lsp(
3379 buffer,
3380 PerformRename {
3381 position,
3382 new_name,
3383 push_to_history,
3384 },
3385 cx,
3386 )
3387 }
3388
3389 pub fn search(
3390 &self,
3391 query: SearchQuery,
3392 cx: &mut ModelContext<Self>,
3393 ) -> Task<Result<HashMap<ModelHandle<Buffer>, Vec<Range<Anchor>>>>> {
3394 if self.is_local() {
3395 let snapshots = self
3396 .visible_worktrees(cx)
3397 .filter_map(|tree| {
3398 let tree = tree.read(cx).as_local()?;
3399 Some(tree.snapshot())
3400 })
3401 .collect::<Vec<_>>();
3402
3403 let background = cx.background().clone();
3404 let path_count: usize = snapshots.iter().map(|s| s.visible_file_count()).sum();
3405 if path_count == 0 {
3406 return Task::ready(Ok(Default::default()));
3407 }
3408 let workers = background.num_cpus().min(path_count);
3409 let (matching_paths_tx, mut matching_paths_rx) = smol::channel::bounded(1024);
3410 cx.background()
3411 .spawn({
3412 let fs = self.fs.clone();
3413 let background = cx.background().clone();
3414 let query = query.clone();
3415 async move {
3416 let fs = &fs;
3417 let query = &query;
3418 let matching_paths_tx = &matching_paths_tx;
3419 let paths_per_worker = (path_count + workers - 1) / workers;
3420 let snapshots = &snapshots;
3421 background
3422 .scoped(|scope| {
3423 for worker_ix in 0..workers {
3424 let worker_start_ix = worker_ix * paths_per_worker;
3425 let worker_end_ix = worker_start_ix + paths_per_worker;
3426 scope.spawn(async move {
3427 let mut snapshot_start_ix = 0;
3428 let mut abs_path = PathBuf::new();
3429 for snapshot in snapshots {
3430 let snapshot_end_ix =
3431 snapshot_start_ix + snapshot.visible_file_count();
3432 if worker_end_ix <= snapshot_start_ix {
3433 break;
3434 } else if worker_start_ix > snapshot_end_ix {
3435 snapshot_start_ix = snapshot_end_ix;
3436 continue;
3437 } else {
3438 let start_in_snapshot = worker_start_ix
3439 .saturating_sub(snapshot_start_ix);
3440 let end_in_snapshot =
3441 cmp::min(worker_end_ix, snapshot_end_ix)
3442 - snapshot_start_ix;
3443
3444 for entry in snapshot
3445 .files(false, start_in_snapshot)
3446 .take(end_in_snapshot - start_in_snapshot)
3447 {
3448 if matching_paths_tx.is_closed() {
3449 break;
3450 }
3451
3452 abs_path.clear();
3453 abs_path.push(&snapshot.abs_path());
3454 abs_path.push(&entry.path);
3455 let matches = if let Some(file) =
3456 fs.open_sync(&abs_path).await.log_err()
3457 {
3458 query.detect(file).unwrap_or(false)
3459 } else {
3460 false
3461 };
3462
3463 if matches {
3464 let project_path =
3465 (snapshot.id(), entry.path.clone());
3466 if matching_paths_tx
3467 .send(project_path)
3468 .await
3469 .is_err()
3470 {
3471 break;
3472 }
3473 }
3474 }
3475
3476 snapshot_start_ix = snapshot_end_ix;
3477 }
3478 }
3479 });
3480 }
3481 })
3482 .await;
3483 }
3484 })
3485 .detach();
3486
3487 let (buffers_tx, buffers_rx) = smol::channel::bounded(1024);
3488 let open_buffers = self
3489 .opened_buffers
3490 .values()
3491 .filter_map(|b| b.upgrade(cx))
3492 .collect::<HashSet<_>>();
3493 cx.spawn(|this, cx| async move {
3494 for buffer in &open_buffers {
3495 let snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
3496 buffers_tx.send((buffer.clone(), snapshot)).await?;
3497 }
3498
3499 let open_buffers = Rc::new(RefCell::new(open_buffers));
3500 while let Some(project_path) = matching_paths_rx.next().await {
3501 if buffers_tx.is_closed() {
3502 break;
3503 }
3504
3505 let this = this.clone();
3506 let open_buffers = open_buffers.clone();
3507 let buffers_tx = buffers_tx.clone();
3508 cx.spawn(|mut cx| async move {
3509 if let Some(buffer) = this
3510 .update(&mut cx, |this, cx| this.open_buffer(project_path, cx))
3511 .await
3512 .log_err()
3513 {
3514 if open_buffers.borrow_mut().insert(buffer.clone()) {
3515 let snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
3516 buffers_tx.send((buffer, snapshot)).await?;
3517 }
3518 }
3519
3520 Ok::<_, anyhow::Error>(())
3521 })
3522 .detach();
3523 }
3524
3525 Ok::<_, anyhow::Error>(())
3526 })
3527 .detach_and_log_err(cx);
3528
3529 let background = cx.background().clone();
3530 cx.background().spawn(async move {
3531 let query = &query;
3532 let mut matched_buffers = Vec::new();
3533 for _ in 0..workers {
3534 matched_buffers.push(HashMap::default());
3535 }
3536 background
3537 .scoped(|scope| {
3538 for worker_matched_buffers in matched_buffers.iter_mut() {
3539 let mut buffers_rx = buffers_rx.clone();
3540 scope.spawn(async move {
3541 while let Some((buffer, snapshot)) = buffers_rx.next().await {
3542 let buffer_matches = query
3543 .search(snapshot.as_rope())
3544 .await
3545 .iter()
3546 .map(|range| {
3547 snapshot.anchor_before(range.start)
3548 ..snapshot.anchor_after(range.end)
3549 })
3550 .collect::<Vec<_>>();
3551 if !buffer_matches.is_empty() {
3552 worker_matched_buffers
3553 .insert(buffer.clone(), buffer_matches);
3554 }
3555 }
3556 });
3557 }
3558 })
3559 .await;
3560 Ok(matched_buffers.into_iter().flatten().collect())
3561 })
3562 } else if let Some(project_id) = self.remote_id() {
3563 let request = self.client.request(query.to_proto(project_id));
3564 cx.spawn(|this, mut cx| async move {
3565 let response = request.await?;
3566 let mut result = HashMap::default();
3567 for location in response.locations {
3568 let buffer = location.buffer.ok_or_else(|| anyhow!("missing buffer"))?;
3569 let target_buffer = this
3570 .update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
3571 .await?;
3572 let start = location
3573 .start
3574 .and_then(deserialize_anchor)
3575 .ok_or_else(|| anyhow!("missing target start"))?;
3576 let end = location
3577 .end
3578 .and_then(deserialize_anchor)
3579 .ok_or_else(|| anyhow!("missing target end"))?;
3580 result
3581 .entry(target_buffer)
3582 .or_insert(Vec::new())
3583 .push(start..end)
3584 }
3585 Ok(result)
3586 })
3587 } else {
3588 Task::ready(Ok(Default::default()))
3589 }
3590 }
3591
3592 fn request_lsp<R: LspCommand>(
3593 &self,
3594 buffer_handle: ModelHandle<Buffer>,
3595 request: R,
3596 cx: &mut ModelContext<Self>,
3597 ) -> Task<Result<R::Response>>
3598 where
3599 <R::LspRequest as lsp::request::Request>::Result: Send,
3600 {
3601 let buffer = buffer_handle.read(cx);
3602 if self.is_local() {
3603 let file = File::from_dyn(buffer.file()).and_then(File::as_local);
3604 if let Some((file, (_, language_server))) =
3605 file.zip(self.language_server_for_buffer(buffer, cx).cloned())
3606 {
3607 let lsp_params = request.to_lsp(&file.abs_path(cx), cx);
3608 return cx.spawn(|this, cx| async move {
3609 if !request.check_capabilities(&language_server.capabilities()) {
3610 return Ok(Default::default());
3611 }
3612
3613 let response = language_server
3614 .request::<R::LspRequest>(lsp_params)
3615 .await
3616 .context("lsp request failed")?;
3617 request
3618 .response_from_lsp(response, this, buffer_handle, cx)
3619 .await
3620 });
3621 }
3622 } else if let Some(project_id) = self.remote_id() {
3623 let rpc = self.client.clone();
3624 let message = request.to_proto(project_id, buffer);
3625 return cx.spawn(|this, cx| async move {
3626 let response = rpc.request(message).await?;
3627 request
3628 .response_from_proto(response, this, buffer_handle, cx)
3629 .await
3630 });
3631 }
3632 Task::ready(Ok(Default::default()))
3633 }
3634
3635 pub fn find_or_create_local_worktree(
3636 &mut self,
3637 abs_path: impl AsRef<Path>,
3638 visible: bool,
3639 cx: &mut ModelContext<Self>,
3640 ) -> Task<Result<(ModelHandle<Worktree>, PathBuf)>> {
3641 let abs_path = abs_path.as_ref();
3642 if let Some((tree, relative_path)) = self.find_local_worktree(abs_path, cx) {
3643 Task::ready(Ok((tree.clone(), relative_path.into())))
3644 } else {
3645 let worktree = self.create_local_worktree(abs_path, visible, cx);
3646 cx.foreground()
3647 .spawn(async move { Ok((worktree.await?, PathBuf::new())) })
3648 }
3649 }
3650
3651 pub fn find_local_worktree(
3652 &self,
3653 abs_path: &Path,
3654 cx: &AppContext,
3655 ) -> Option<(ModelHandle<Worktree>, PathBuf)> {
3656 for tree in self.worktrees(cx) {
3657 if let Some(relative_path) = tree
3658 .read(cx)
3659 .as_local()
3660 .and_then(|t| abs_path.strip_prefix(t.abs_path()).ok())
3661 {
3662 return Some((tree.clone(), relative_path.into()));
3663 }
3664 }
3665 None
3666 }
3667
3668 pub fn is_shared(&self) -> bool {
3669 match &self.client_state {
3670 ProjectClientState::Local { is_shared, .. } => *is_shared,
3671 ProjectClientState::Remote { .. } => false,
3672 }
3673 }
3674
3675 fn create_local_worktree(
3676 &mut self,
3677 abs_path: impl AsRef<Path>,
3678 visible: bool,
3679 cx: &mut ModelContext<Self>,
3680 ) -> Task<Result<ModelHandle<Worktree>>> {
3681 let fs = self.fs.clone();
3682 let client = self.client.clone();
3683 let next_entry_id = self.next_entry_id.clone();
3684 let path: Arc<Path> = abs_path.as_ref().into();
3685 let task = self
3686 .loading_local_worktrees
3687 .entry(path.clone())
3688 .or_insert_with(|| {
3689 cx.spawn(|project, mut cx| {
3690 async move {
3691 let worktree = Worktree::local(
3692 client.clone(),
3693 path.clone(),
3694 visible,
3695 fs,
3696 next_entry_id,
3697 &mut cx,
3698 )
3699 .await;
3700 project.update(&mut cx, |project, _| {
3701 project.loading_local_worktrees.remove(&path);
3702 });
3703 let worktree = worktree?;
3704
3705 let project_id = project.update(&mut cx, |project, cx| {
3706 project.add_worktree(&worktree, cx);
3707 project.shared_remote_id()
3708 });
3709
3710 // Because sharing is async, we may have *unshared* the project by the time it completes.
3711 if let Some(project_id) = project_id {
3712 worktree
3713 .update(&mut cx, |worktree, cx| {
3714 worktree.as_local_mut().unwrap().share(project_id, cx)
3715 })
3716 .await
3717 .log_err();
3718 }
3719
3720 Ok(worktree)
3721 }
3722 .map_err(|err| Arc::new(err))
3723 })
3724 .shared()
3725 })
3726 .clone();
3727 cx.foreground().spawn(async move {
3728 match task.await {
3729 Ok(worktree) => Ok(worktree),
3730 Err(err) => Err(anyhow!("{}", err)),
3731 }
3732 })
3733 }
3734
3735 pub fn remove_worktree(&mut self, id_to_remove: WorktreeId, cx: &mut ModelContext<Self>) {
3736 self.worktrees.retain(|worktree| {
3737 if let Some(worktree) = worktree.upgrade(cx) {
3738 let id = worktree.read(cx).id();
3739 if id == id_to_remove {
3740 cx.emit(Event::WorktreeRemoved(id));
3741 false
3742 } else {
3743 true
3744 }
3745 } else {
3746 false
3747 }
3748 });
3749 self.metadata_changed(cx);
3750 cx.notify();
3751 }
3752
3753 fn add_worktree(&mut self, worktree: &ModelHandle<Worktree>, cx: &mut ModelContext<Self>) {
3754 cx.observe(&worktree, |_, _, cx| cx.notify()).detach();
3755 if worktree.read(cx).is_local() {
3756 cx.subscribe(&worktree, |this, worktree, _, cx| {
3757 this.update_local_worktree_buffers(worktree, cx);
3758 })
3759 .detach();
3760 }
3761
3762 let push_strong_handle = {
3763 let worktree = worktree.read(cx);
3764 self.is_shared() || worktree.is_visible() || worktree.is_remote()
3765 };
3766 if push_strong_handle {
3767 self.worktrees
3768 .push(WorktreeHandle::Strong(worktree.clone()));
3769 } else {
3770 cx.observe_release(&worktree, |this, _, cx| {
3771 this.worktrees
3772 .retain(|worktree| worktree.upgrade(cx).is_some());
3773 cx.notify();
3774 })
3775 .detach();
3776 self.worktrees
3777 .push(WorktreeHandle::Weak(worktree.downgrade()));
3778 }
3779 self.metadata_changed(cx);
3780 cx.emit(Event::WorktreeAdded);
3781 cx.notify();
3782 }
3783
3784 fn update_local_worktree_buffers(
3785 &mut self,
3786 worktree_handle: ModelHandle<Worktree>,
3787 cx: &mut ModelContext<Self>,
3788 ) {
3789 let snapshot = worktree_handle.read(cx).snapshot();
3790 let mut buffers_to_delete = Vec::new();
3791 let mut renamed_buffers = Vec::new();
3792 for (buffer_id, buffer) in &self.opened_buffers {
3793 if let Some(buffer) = buffer.upgrade(cx) {
3794 buffer.update(cx, |buffer, cx| {
3795 if let Some(old_file) = File::from_dyn(buffer.file()) {
3796 if old_file.worktree != worktree_handle {
3797 return;
3798 }
3799
3800 let new_file = if let Some(entry) = old_file
3801 .entry_id
3802 .and_then(|entry_id| snapshot.entry_for_id(entry_id))
3803 {
3804 File {
3805 is_local: true,
3806 entry_id: Some(entry.id),
3807 mtime: entry.mtime,
3808 path: entry.path.clone(),
3809 worktree: worktree_handle.clone(),
3810 }
3811 } else if let Some(entry) =
3812 snapshot.entry_for_path(old_file.path().as_ref())
3813 {
3814 File {
3815 is_local: true,
3816 entry_id: Some(entry.id),
3817 mtime: entry.mtime,
3818 path: entry.path.clone(),
3819 worktree: worktree_handle.clone(),
3820 }
3821 } else {
3822 File {
3823 is_local: true,
3824 entry_id: None,
3825 path: old_file.path().clone(),
3826 mtime: old_file.mtime(),
3827 worktree: worktree_handle.clone(),
3828 }
3829 };
3830
3831 let old_path = old_file.abs_path(cx);
3832 if new_file.abs_path(cx) != old_path {
3833 renamed_buffers.push((cx.handle(), old_path));
3834 }
3835
3836 if let Some(project_id) = self.shared_remote_id() {
3837 self.client
3838 .send(proto::UpdateBufferFile {
3839 project_id,
3840 buffer_id: *buffer_id as u64,
3841 file: Some(new_file.to_proto()),
3842 })
3843 .log_err();
3844 }
3845 buffer.file_updated(Box::new(new_file), cx).detach();
3846 }
3847 });
3848 } else {
3849 buffers_to_delete.push(*buffer_id);
3850 }
3851 }
3852
3853 for buffer_id in buffers_to_delete {
3854 self.opened_buffers.remove(&buffer_id);
3855 }
3856
3857 for (buffer, old_path) in renamed_buffers {
3858 self.unregister_buffer_from_language_server(&buffer, old_path, cx);
3859 self.assign_language_to_buffer(&buffer, cx);
3860 self.register_buffer_with_language_server(&buffer, cx);
3861 }
3862 }
3863
3864 pub fn set_active_path(&mut self, entry: Option<ProjectPath>, cx: &mut ModelContext<Self>) {
3865 let new_active_entry = entry.and_then(|project_path| {
3866 let worktree = self.worktree_for_id(project_path.worktree_id, cx)?;
3867 let entry = worktree.read(cx).entry_for_path(project_path.path)?;
3868 Some(entry.id)
3869 });
3870 if new_active_entry != self.active_entry {
3871 self.active_entry = new_active_entry;
3872 cx.emit(Event::ActiveEntryChanged(new_active_entry));
3873 }
3874 }
3875
3876 pub fn is_running_disk_based_diagnostics(&self) -> bool {
3877 self.language_server_statuses
3878 .values()
3879 .any(|status| status.pending_diagnostic_updates > 0)
3880 }
3881
3882 pub fn diagnostic_summary(&self, cx: &AppContext) -> DiagnosticSummary {
3883 let mut summary = DiagnosticSummary::default();
3884 for (_, path_summary) in self.diagnostic_summaries(cx) {
3885 summary.error_count += path_summary.error_count;
3886 summary.warning_count += path_summary.warning_count;
3887 }
3888 summary
3889 }
3890
3891 pub fn diagnostic_summaries<'a>(
3892 &'a self,
3893 cx: &'a AppContext,
3894 ) -> impl Iterator<Item = (ProjectPath, DiagnosticSummary)> + 'a {
3895 self.worktrees(cx).flat_map(move |worktree| {
3896 let worktree = worktree.read(cx);
3897 let worktree_id = worktree.id();
3898 worktree
3899 .diagnostic_summaries()
3900 .map(move |(path, summary)| (ProjectPath { worktree_id, path }, summary))
3901 })
3902 }
3903
3904 pub fn disk_based_diagnostics_started(&mut self, cx: &mut ModelContext<Self>) {
3905 if self
3906 .language_server_statuses
3907 .values()
3908 .map(|status| status.pending_diagnostic_updates)
3909 .sum::<isize>()
3910 == 1
3911 {
3912 cx.emit(Event::DiskBasedDiagnosticsStarted);
3913 }
3914 }
3915
3916 pub fn disk_based_diagnostics_finished(&mut self, cx: &mut ModelContext<Self>) {
3917 cx.emit(Event::DiskBasedDiagnosticsUpdated);
3918 if self
3919 .language_server_statuses
3920 .values()
3921 .map(|status| status.pending_diagnostic_updates)
3922 .sum::<isize>()
3923 == 0
3924 {
3925 cx.emit(Event::DiskBasedDiagnosticsFinished);
3926 }
3927 }
3928
3929 pub fn active_entry(&self) -> Option<ProjectEntryId> {
3930 self.active_entry
3931 }
3932
3933 pub fn entry_for_path(&self, path: &ProjectPath, cx: &AppContext) -> Option<ProjectEntryId> {
3934 self.worktree_for_id(path.worktree_id, cx)?
3935 .read(cx)
3936 .entry_for_path(&path.path)
3937 .map(|entry| entry.id)
3938 }
3939
3940 pub fn path_for_entry(&self, entry_id: ProjectEntryId, cx: &AppContext) -> Option<ProjectPath> {
3941 let worktree = self.worktree_for_entry(entry_id, cx)?;
3942 let worktree = worktree.read(cx);
3943 let worktree_id = worktree.id();
3944 let path = worktree.entry_for_id(entry_id)?.path.clone();
3945 Some(ProjectPath { worktree_id, path })
3946 }
3947
3948 // RPC message handlers
3949
3950 async fn handle_request_join_project(
3951 this: ModelHandle<Self>,
3952 message: TypedEnvelope<proto::RequestJoinProject>,
3953 _: Arc<Client>,
3954 mut cx: AsyncAppContext,
3955 ) -> Result<()> {
3956 let user_id = message.payload.requester_id;
3957 if this.read_with(&cx, |project, _| {
3958 project.collaborators.values().any(|c| c.user.id == user_id)
3959 }) {
3960 this.update(&mut cx, |this, cx| {
3961 this.respond_to_join_request(user_id, true, cx)
3962 });
3963 } else {
3964 let user_store = this.read_with(&cx, |this, _| this.user_store.clone());
3965 let user = user_store
3966 .update(&mut cx, |store, cx| store.fetch_user(user_id, cx))
3967 .await?;
3968 this.update(&mut cx, |_, cx| cx.emit(Event::ContactRequestedJoin(user)));
3969 }
3970 Ok(())
3971 }
3972
3973 async fn handle_unregister_project(
3974 this: ModelHandle<Self>,
3975 _: TypedEnvelope<proto::UnregisterProject>,
3976 _: Arc<Client>,
3977 mut cx: AsyncAppContext,
3978 ) -> Result<()> {
3979 this.update(&mut cx, |this, cx| this.removed_from_project(cx));
3980 Ok(())
3981 }
3982
3983 async fn handle_project_unshared(
3984 this: ModelHandle<Self>,
3985 _: TypedEnvelope<proto::ProjectUnshared>,
3986 _: Arc<Client>,
3987 mut cx: AsyncAppContext,
3988 ) -> Result<()> {
3989 this.update(&mut cx, |this, cx| this.unshared(cx));
3990 Ok(())
3991 }
3992
3993 async fn handle_add_collaborator(
3994 this: ModelHandle<Self>,
3995 mut envelope: TypedEnvelope<proto::AddProjectCollaborator>,
3996 _: Arc<Client>,
3997 mut cx: AsyncAppContext,
3998 ) -> Result<()> {
3999 let user_store = this.read_with(&cx, |this, _| this.user_store.clone());
4000 let collaborator = envelope
4001 .payload
4002 .collaborator
4003 .take()
4004 .ok_or_else(|| anyhow!("empty collaborator"))?;
4005
4006 let collaborator = Collaborator::from_proto(collaborator, &user_store, &mut cx).await?;
4007 this.update(&mut cx, |this, cx| {
4008 this.collaborators
4009 .insert(collaborator.peer_id, collaborator);
4010 cx.notify();
4011 });
4012
4013 Ok(())
4014 }
4015
4016 async fn handle_remove_collaborator(
4017 this: ModelHandle<Self>,
4018 envelope: TypedEnvelope<proto::RemoveProjectCollaborator>,
4019 _: Arc<Client>,
4020 mut cx: AsyncAppContext,
4021 ) -> Result<()> {
4022 this.update(&mut cx, |this, cx| {
4023 let peer_id = PeerId(envelope.payload.peer_id);
4024 let replica_id = this
4025 .collaborators
4026 .remove(&peer_id)
4027 .ok_or_else(|| anyhow!("unknown peer {:?}", peer_id))?
4028 .replica_id;
4029 for (_, buffer) in &this.opened_buffers {
4030 if let Some(buffer) = buffer.upgrade(cx) {
4031 buffer.update(cx, |buffer, cx| buffer.remove_peer(replica_id, cx));
4032 }
4033 }
4034
4035 cx.emit(Event::CollaboratorLeft(peer_id));
4036 cx.notify();
4037 Ok(())
4038 })
4039 }
4040
4041 async fn handle_join_project_request_cancelled(
4042 this: ModelHandle<Self>,
4043 envelope: TypedEnvelope<proto::JoinProjectRequestCancelled>,
4044 _: Arc<Client>,
4045 mut cx: AsyncAppContext,
4046 ) -> Result<()> {
4047 let user = this
4048 .update(&mut cx, |this, cx| {
4049 this.user_store.update(cx, |user_store, cx| {
4050 user_store.fetch_user(envelope.payload.requester_id, cx)
4051 })
4052 })
4053 .await?;
4054
4055 this.update(&mut cx, |_, cx| {
4056 cx.emit(Event::ContactCancelledJoinRequest(user));
4057 });
4058
4059 Ok(())
4060 }
4061
4062 async fn handle_update_project(
4063 this: ModelHandle<Self>,
4064 envelope: TypedEnvelope<proto::UpdateProject>,
4065 client: Arc<Client>,
4066 mut cx: AsyncAppContext,
4067 ) -> Result<()> {
4068 this.update(&mut cx, |this, cx| {
4069 let replica_id = this.replica_id();
4070 let remote_id = this.remote_id().ok_or_else(|| anyhow!("invalid project"))?;
4071
4072 let mut old_worktrees_by_id = this
4073 .worktrees
4074 .drain(..)
4075 .filter_map(|worktree| {
4076 let worktree = worktree.upgrade(cx)?;
4077 Some((worktree.read(cx).id(), worktree))
4078 })
4079 .collect::<HashMap<_, _>>();
4080
4081 for worktree in envelope.payload.worktrees {
4082 if let Some(old_worktree) =
4083 old_worktrees_by_id.remove(&WorktreeId::from_proto(worktree.id))
4084 {
4085 this.worktrees.push(WorktreeHandle::Strong(old_worktree));
4086 } else {
4087 let worktree = proto::Worktree {
4088 id: worktree.id,
4089 root_name: worktree.root_name,
4090 entries: Default::default(),
4091 diagnostic_summaries: Default::default(),
4092 visible: worktree.visible,
4093 scan_id: 0,
4094 };
4095 let (worktree, load_task) =
4096 Worktree::remote(remote_id, replica_id, worktree, client.clone(), cx);
4097 this.add_worktree(&worktree, cx);
4098 load_task.detach();
4099 }
4100 }
4101
4102 this.metadata_changed(cx);
4103 for (id, _) in old_worktrees_by_id {
4104 cx.emit(Event::WorktreeRemoved(id));
4105 }
4106
4107 Ok(())
4108 })
4109 }
4110
4111 async fn handle_update_worktree(
4112 this: ModelHandle<Self>,
4113 envelope: TypedEnvelope<proto::UpdateWorktree>,
4114 _: Arc<Client>,
4115 mut cx: AsyncAppContext,
4116 ) -> Result<()> {
4117 this.update(&mut cx, |this, cx| {
4118 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
4119 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
4120 worktree.update(cx, |worktree, _| {
4121 let worktree = worktree.as_remote_mut().unwrap();
4122 worktree.update_from_remote(envelope)
4123 })?;
4124 }
4125 Ok(())
4126 })
4127 }
4128
4129 async fn handle_create_project_entry(
4130 this: ModelHandle<Self>,
4131 envelope: TypedEnvelope<proto::CreateProjectEntry>,
4132 _: Arc<Client>,
4133 mut cx: AsyncAppContext,
4134 ) -> Result<proto::ProjectEntryResponse> {
4135 let worktree = this.update(&mut cx, |this, cx| {
4136 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
4137 this.worktree_for_id(worktree_id, cx)
4138 .ok_or_else(|| anyhow!("worktree not found"))
4139 })?;
4140 let worktree_scan_id = worktree.read_with(&cx, |worktree, _| worktree.scan_id());
4141 let entry = worktree
4142 .update(&mut cx, |worktree, cx| {
4143 let worktree = worktree.as_local_mut().unwrap();
4144 let path = PathBuf::from(OsString::from_vec(envelope.payload.path));
4145 worktree.create_entry(path, envelope.payload.is_directory, cx)
4146 })
4147 .await?;
4148 Ok(proto::ProjectEntryResponse {
4149 entry: Some((&entry).into()),
4150 worktree_scan_id: worktree_scan_id as u64,
4151 })
4152 }
4153
4154 async fn handle_rename_project_entry(
4155 this: ModelHandle<Self>,
4156 envelope: TypedEnvelope<proto::RenameProjectEntry>,
4157 _: Arc<Client>,
4158 mut cx: AsyncAppContext,
4159 ) -> Result<proto::ProjectEntryResponse> {
4160 let entry_id = ProjectEntryId::from_proto(envelope.payload.entry_id);
4161 let worktree = this.read_with(&cx, |this, cx| {
4162 this.worktree_for_entry(entry_id, cx)
4163 .ok_or_else(|| anyhow!("worktree not found"))
4164 })?;
4165 let worktree_scan_id = worktree.read_with(&cx, |worktree, _| worktree.scan_id());
4166 let entry = worktree
4167 .update(&mut cx, |worktree, cx| {
4168 let new_path = PathBuf::from(OsString::from_vec(envelope.payload.new_path));
4169 worktree
4170 .as_local_mut()
4171 .unwrap()
4172 .rename_entry(entry_id, new_path, cx)
4173 .ok_or_else(|| anyhow!("invalid entry"))
4174 })?
4175 .await?;
4176 Ok(proto::ProjectEntryResponse {
4177 entry: Some((&entry).into()),
4178 worktree_scan_id: worktree_scan_id as u64,
4179 })
4180 }
4181
4182 async fn handle_copy_project_entry(
4183 this: ModelHandle<Self>,
4184 envelope: TypedEnvelope<proto::CopyProjectEntry>,
4185 _: Arc<Client>,
4186 mut cx: AsyncAppContext,
4187 ) -> Result<proto::ProjectEntryResponse> {
4188 let entry_id = ProjectEntryId::from_proto(envelope.payload.entry_id);
4189 let worktree = this.read_with(&cx, |this, cx| {
4190 this.worktree_for_entry(entry_id, cx)
4191 .ok_or_else(|| anyhow!("worktree not found"))
4192 })?;
4193 let worktree_scan_id = worktree.read_with(&cx, |worktree, _| worktree.scan_id());
4194 let entry = worktree
4195 .update(&mut cx, |worktree, cx| {
4196 let new_path = PathBuf::from(OsString::from_vec(envelope.payload.new_path));
4197 worktree
4198 .as_local_mut()
4199 .unwrap()
4200 .copy_entry(entry_id, new_path, cx)
4201 .ok_or_else(|| anyhow!("invalid entry"))
4202 })?
4203 .await?;
4204 Ok(proto::ProjectEntryResponse {
4205 entry: Some((&entry).into()),
4206 worktree_scan_id: worktree_scan_id as u64,
4207 })
4208 }
4209
4210 async fn handle_delete_project_entry(
4211 this: ModelHandle<Self>,
4212 envelope: TypedEnvelope<proto::DeleteProjectEntry>,
4213 _: Arc<Client>,
4214 mut cx: AsyncAppContext,
4215 ) -> Result<proto::ProjectEntryResponse> {
4216 let entry_id = ProjectEntryId::from_proto(envelope.payload.entry_id);
4217 let worktree = this.read_with(&cx, |this, cx| {
4218 this.worktree_for_entry(entry_id, cx)
4219 .ok_or_else(|| anyhow!("worktree not found"))
4220 })?;
4221 let worktree_scan_id = worktree.read_with(&cx, |worktree, _| worktree.scan_id());
4222 worktree
4223 .update(&mut cx, |worktree, cx| {
4224 worktree
4225 .as_local_mut()
4226 .unwrap()
4227 .delete_entry(entry_id, cx)
4228 .ok_or_else(|| anyhow!("invalid entry"))
4229 })?
4230 .await?;
4231 Ok(proto::ProjectEntryResponse {
4232 entry: None,
4233 worktree_scan_id: worktree_scan_id as u64,
4234 })
4235 }
4236
4237 async fn handle_update_diagnostic_summary(
4238 this: ModelHandle<Self>,
4239 envelope: TypedEnvelope<proto::UpdateDiagnosticSummary>,
4240 _: Arc<Client>,
4241 mut cx: AsyncAppContext,
4242 ) -> Result<()> {
4243 this.update(&mut cx, |this, cx| {
4244 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
4245 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
4246 if let Some(summary) = envelope.payload.summary {
4247 let project_path = ProjectPath {
4248 worktree_id,
4249 path: Path::new(&summary.path).into(),
4250 };
4251 worktree.update(cx, |worktree, _| {
4252 worktree
4253 .as_remote_mut()
4254 .unwrap()
4255 .update_diagnostic_summary(project_path.path.clone(), &summary);
4256 });
4257 cx.emit(Event::DiagnosticsUpdated(project_path));
4258 }
4259 }
4260 Ok(())
4261 })
4262 }
4263
4264 async fn handle_start_language_server(
4265 this: ModelHandle<Self>,
4266 envelope: TypedEnvelope<proto::StartLanguageServer>,
4267 _: Arc<Client>,
4268 mut cx: AsyncAppContext,
4269 ) -> Result<()> {
4270 let server = envelope
4271 .payload
4272 .server
4273 .ok_or_else(|| anyhow!("invalid server"))?;
4274 this.update(&mut cx, |this, cx| {
4275 this.language_server_statuses.insert(
4276 server.id as usize,
4277 LanguageServerStatus {
4278 name: server.name,
4279 pending_work: Default::default(),
4280 pending_diagnostic_updates: 0,
4281 },
4282 );
4283 cx.notify();
4284 });
4285 Ok(())
4286 }
4287
4288 async fn handle_update_language_server(
4289 this: ModelHandle<Self>,
4290 envelope: TypedEnvelope<proto::UpdateLanguageServer>,
4291 _: Arc<Client>,
4292 mut cx: AsyncAppContext,
4293 ) -> Result<()> {
4294 let language_server_id = envelope.payload.language_server_id as usize;
4295 match envelope
4296 .payload
4297 .variant
4298 .ok_or_else(|| anyhow!("invalid variant"))?
4299 {
4300 proto::update_language_server::Variant::WorkStart(payload) => {
4301 this.update(&mut cx, |this, cx| {
4302 this.on_lsp_work_start(language_server_id, payload.token, cx);
4303 })
4304 }
4305 proto::update_language_server::Variant::WorkProgress(payload) => {
4306 this.update(&mut cx, |this, cx| {
4307 this.on_lsp_work_progress(
4308 language_server_id,
4309 payload.token,
4310 LanguageServerProgress {
4311 message: payload.message,
4312 percentage: payload.percentage.map(|p| p as usize),
4313 last_update_at: Instant::now(),
4314 },
4315 cx,
4316 );
4317 })
4318 }
4319 proto::update_language_server::Variant::WorkEnd(payload) => {
4320 this.update(&mut cx, |this, cx| {
4321 this.on_lsp_work_end(language_server_id, payload.token, cx);
4322 })
4323 }
4324 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(_) => {
4325 this.update(&mut cx, |this, cx| {
4326 this.disk_based_diagnostics_started(cx);
4327 })
4328 }
4329 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(_) => {
4330 this.update(&mut cx, |this, cx| this.disk_based_diagnostics_finished(cx));
4331 }
4332 }
4333
4334 Ok(())
4335 }
4336
4337 async fn handle_update_buffer(
4338 this: ModelHandle<Self>,
4339 envelope: TypedEnvelope<proto::UpdateBuffer>,
4340 _: Arc<Client>,
4341 mut cx: AsyncAppContext,
4342 ) -> Result<()> {
4343 this.update(&mut cx, |this, cx| {
4344 let payload = envelope.payload.clone();
4345 let buffer_id = payload.buffer_id;
4346 let ops = payload
4347 .operations
4348 .into_iter()
4349 .map(|op| language::proto::deserialize_operation(op))
4350 .collect::<Result<Vec<_>, _>>()?;
4351 let is_remote = this.is_remote();
4352 match this.opened_buffers.entry(buffer_id) {
4353 hash_map::Entry::Occupied(mut e) => match e.get_mut() {
4354 OpenBuffer::Strong(buffer) => {
4355 buffer.update(cx, |buffer, cx| buffer.apply_ops(ops, cx))?;
4356 }
4357 OpenBuffer::Loading(operations) => operations.extend_from_slice(&ops),
4358 OpenBuffer::Weak(_) => {}
4359 },
4360 hash_map::Entry::Vacant(e) => {
4361 assert!(
4362 is_remote,
4363 "received buffer update from {:?}",
4364 envelope.original_sender_id
4365 );
4366 e.insert(OpenBuffer::Loading(ops));
4367 }
4368 }
4369 Ok(())
4370 })
4371 }
4372
4373 async fn handle_update_buffer_file(
4374 this: ModelHandle<Self>,
4375 envelope: TypedEnvelope<proto::UpdateBufferFile>,
4376 _: Arc<Client>,
4377 mut cx: AsyncAppContext,
4378 ) -> Result<()> {
4379 this.update(&mut cx, |this, cx| {
4380 let payload = envelope.payload.clone();
4381 let buffer_id = payload.buffer_id;
4382 let file = payload.file.ok_or_else(|| anyhow!("invalid file"))?;
4383 let worktree = this
4384 .worktree_for_id(WorktreeId::from_proto(file.worktree_id), cx)
4385 .ok_or_else(|| anyhow!("no such worktree"))?;
4386 let file = File::from_proto(file, worktree.clone(), cx)?;
4387 let buffer = this
4388 .opened_buffers
4389 .get_mut(&buffer_id)
4390 .and_then(|b| b.upgrade(cx))
4391 .ok_or_else(|| anyhow!("no such buffer"))?;
4392 buffer.update(cx, |buffer, cx| {
4393 buffer.file_updated(Box::new(file), cx).detach();
4394 });
4395 Ok(())
4396 })
4397 }
4398
4399 async fn handle_save_buffer(
4400 this: ModelHandle<Self>,
4401 envelope: TypedEnvelope<proto::SaveBuffer>,
4402 _: Arc<Client>,
4403 mut cx: AsyncAppContext,
4404 ) -> Result<proto::BufferSaved> {
4405 let buffer_id = envelope.payload.buffer_id;
4406 let requested_version = deserialize_version(envelope.payload.version);
4407
4408 let (project_id, buffer) = this.update(&mut cx, |this, cx| {
4409 let project_id = this.remote_id().ok_or_else(|| anyhow!("not connected"))?;
4410 let buffer = this
4411 .opened_buffers
4412 .get(&buffer_id)
4413 .and_then(|buffer| buffer.upgrade(cx))
4414 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?;
4415 Ok::<_, anyhow::Error>((project_id, buffer))
4416 })?;
4417 buffer
4418 .update(&mut cx, |buffer, _| {
4419 buffer.wait_for_version(requested_version)
4420 })
4421 .await;
4422
4423 let (saved_version, mtime) = buffer.update(&mut cx, |buffer, cx| buffer.save(cx)).await?;
4424 Ok(proto::BufferSaved {
4425 project_id,
4426 buffer_id,
4427 version: serialize_version(&saved_version),
4428 mtime: Some(mtime.into()),
4429 })
4430 }
4431
4432 async fn handle_reload_buffers(
4433 this: ModelHandle<Self>,
4434 envelope: TypedEnvelope<proto::ReloadBuffers>,
4435 _: Arc<Client>,
4436 mut cx: AsyncAppContext,
4437 ) -> Result<proto::ReloadBuffersResponse> {
4438 let sender_id = envelope.original_sender_id()?;
4439 let reload = this.update(&mut cx, |this, cx| {
4440 let mut buffers = HashSet::default();
4441 for buffer_id in &envelope.payload.buffer_ids {
4442 buffers.insert(
4443 this.opened_buffers
4444 .get(buffer_id)
4445 .and_then(|buffer| buffer.upgrade(cx))
4446 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?,
4447 );
4448 }
4449 Ok::<_, anyhow::Error>(this.reload_buffers(buffers, false, cx))
4450 })?;
4451
4452 let project_transaction = reload.await?;
4453 let project_transaction = this.update(&mut cx, |this, cx| {
4454 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
4455 });
4456 Ok(proto::ReloadBuffersResponse {
4457 transaction: Some(project_transaction),
4458 })
4459 }
4460
4461 async fn handle_format_buffers(
4462 this: ModelHandle<Self>,
4463 envelope: TypedEnvelope<proto::FormatBuffers>,
4464 _: Arc<Client>,
4465 mut cx: AsyncAppContext,
4466 ) -> Result<proto::FormatBuffersResponse> {
4467 let sender_id = envelope.original_sender_id()?;
4468 let format = this.update(&mut cx, |this, cx| {
4469 let mut buffers = HashSet::default();
4470 for buffer_id in &envelope.payload.buffer_ids {
4471 buffers.insert(
4472 this.opened_buffers
4473 .get(buffer_id)
4474 .and_then(|buffer| buffer.upgrade(cx))
4475 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?,
4476 );
4477 }
4478 Ok::<_, anyhow::Error>(this.format(buffers, false, cx))
4479 })?;
4480
4481 let project_transaction = format.await?;
4482 let project_transaction = this.update(&mut cx, |this, cx| {
4483 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
4484 });
4485 Ok(proto::FormatBuffersResponse {
4486 transaction: Some(project_transaction),
4487 })
4488 }
4489
4490 async fn handle_get_completions(
4491 this: ModelHandle<Self>,
4492 envelope: TypedEnvelope<proto::GetCompletions>,
4493 _: Arc<Client>,
4494 mut cx: AsyncAppContext,
4495 ) -> Result<proto::GetCompletionsResponse> {
4496 let position = envelope
4497 .payload
4498 .position
4499 .and_then(language::proto::deserialize_anchor)
4500 .ok_or_else(|| anyhow!("invalid position"))?;
4501 let version = deserialize_version(envelope.payload.version);
4502 let buffer = this.read_with(&cx, |this, cx| {
4503 this.opened_buffers
4504 .get(&envelope.payload.buffer_id)
4505 .and_then(|buffer| buffer.upgrade(cx))
4506 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
4507 })?;
4508 buffer
4509 .update(&mut cx, |buffer, _| buffer.wait_for_version(version))
4510 .await;
4511 let version = buffer.read_with(&cx, |buffer, _| buffer.version());
4512 let completions = this
4513 .update(&mut cx, |this, cx| this.completions(&buffer, position, cx))
4514 .await?;
4515
4516 Ok(proto::GetCompletionsResponse {
4517 completions: completions
4518 .iter()
4519 .map(language::proto::serialize_completion)
4520 .collect(),
4521 version: serialize_version(&version),
4522 })
4523 }
4524
4525 async fn handle_apply_additional_edits_for_completion(
4526 this: ModelHandle<Self>,
4527 envelope: TypedEnvelope<proto::ApplyCompletionAdditionalEdits>,
4528 _: Arc<Client>,
4529 mut cx: AsyncAppContext,
4530 ) -> Result<proto::ApplyCompletionAdditionalEditsResponse> {
4531 let apply_additional_edits = this.update(&mut cx, |this, cx| {
4532 let buffer = this
4533 .opened_buffers
4534 .get(&envelope.payload.buffer_id)
4535 .and_then(|buffer| buffer.upgrade(cx))
4536 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
4537 let language = buffer.read(cx).language();
4538 let completion = language::proto::deserialize_completion(
4539 envelope
4540 .payload
4541 .completion
4542 .ok_or_else(|| anyhow!("invalid completion"))?,
4543 language,
4544 )?;
4545 Ok::<_, anyhow::Error>(
4546 this.apply_additional_edits_for_completion(buffer, completion, false, cx),
4547 )
4548 })?;
4549
4550 Ok(proto::ApplyCompletionAdditionalEditsResponse {
4551 transaction: apply_additional_edits
4552 .await?
4553 .as_ref()
4554 .map(language::proto::serialize_transaction),
4555 })
4556 }
4557
4558 async fn handle_get_code_actions(
4559 this: ModelHandle<Self>,
4560 envelope: TypedEnvelope<proto::GetCodeActions>,
4561 _: Arc<Client>,
4562 mut cx: AsyncAppContext,
4563 ) -> Result<proto::GetCodeActionsResponse> {
4564 let start = envelope
4565 .payload
4566 .start
4567 .and_then(language::proto::deserialize_anchor)
4568 .ok_or_else(|| anyhow!("invalid start"))?;
4569 let end = envelope
4570 .payload
4571 .end
4572 .and_then(language::proto::deserialize_anchor)
4573 .ok_or_else(|| anyhow!("invalid end"))?;
4574 let buffer = this.update(&mut cx, |this, cx| {
4575 this.opened_buffers
4576 .get(&envelope.payload.buffer_id)
4577 .and_then(|buffer| buffer.upgrade(cx))
4578 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
4579 })?;
4580 buffer
4581 .update(&mut cx, |buffer, _| {
4582 buffer.wait_for_version(deserialize_version(envelope.payload.version))
4583 })
4584 .await;
4585
4586 let version = buffer.read_with(&cx, |buffer, _| buffer.version());
4587 let code_actions = this.update(&mut cx, |this, cx| {
4588 Ok::<_, anyhow::Error>(this.code_actions(&buffer, start..end, cx))
4589 })?;
4590
4591 Ok(proto::GetCodeActionsResponse {
4592 actions: code_actions
4593 .await?
4594 .iter()
4595 .map(language::proto::serialize_code_action)
4596 .collect(),
4597 version: serialize_version(&version),
4598 })
4599 }
4600
4601 async fn handle_apply_code_action(
4602 this: ModelHandle<Self>,
4603 envelope: TypedEnvelope<proto::ApplyCodeAction>,
4604 _: Arc<Client>,
4605 mut cx: AsyncAppContext,
4606 ) -> Result<proto::ApplyCodeActionResponse> {
4607 let sender_id = envelope.original_sender_id()?;
4608 let action = language::proto::deserialize_code_action(
4609 envelope
4610 .payload
4611 .action
4612 .ok_or_else(|| anyhow!("invalid action"))?,
4613 )?;
4614 let apply_code_action = this.update(&mut cx, |this, cx| {
4615 let buffer = this
4616 .opened_buffers
4617 .get(&envelope.payload.buffer_id)
4618 .and_then(|buffer| buffer.upgrade(cx))
4619 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
4620 Ok::<_, anyhow::Error>(this.apply_code_action(buffer, action, false, cx))
4621 })?;
4622
4623 let project_transaction = apply_code_action.await?;
4624 let project_transaction = this.update(&mut cx, |this, cx| {
4625 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
4626 });
4627 Ok(proto::ApplyCodeActionResponse {
4628 transaction: Some(project_transaction),
4629 })
4630 }
4631
4632 async fn handle_lsp_command<T: LspCommand>(
4633 this: ModelHandle<Self>,
4634 envelope: TypedEnvelope<T::ProtoRequest>,
4635 _: Arc<Client>,
4636 mut cx: AsyncAppContext,
4637 ) -> Result<<T::ProtoRequest as proto::RequestMessage>::Response>
4638 where
4639 <T::LspRequest as lsp::request::Request>::Result: Send,
4640 {
4641 let sender_id = envelope.original_sender_id()?;
4642 let buffer_id = T::buffer_id_from_proto(&envelope.payload);
4643 let buffer_handle = this.read_with(&cx, |this, _| {
4644 this.opened_buffers
4645 .get(&buffer_id)
4646 .and_then(|buffer| buffer.upgrade(&cx))
4647 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))
4648 })?;
4649 let request = T::from_proto(
4650 envelope.payload,
4651 this.clone(),
4652 buffer_handle.clone(),
4653 cx.clone(),
4654 )
4655 .await?;
4656 let buffer_version = buffer_handle.read_with(&cx, |buffer, _| buffer.version());
4657 let response = this
4658 .update(&mut cx, |this, cx| {
4659 this.request_lsp(buffer_handle, request, cx)
4660 })
4661 .await?;
4662 this.update(&mut cx, |this, cx| {
4663 Ok(T::response_to_proto(
4664 response,
4665 this,
4666 sender_id,
4667 &buffer_version,
4668 cx,
4669 ))
4670 })
4671 }
4672
4673 async fn handle_get_project_symbols(
4674 this: ModelHandle<Self>,
4675 envelope: TypedEnvelope<proto::GetProjectSymbols>,
4676 _: Arc<Client>,
4677 mut cx: AsyncAppContext,
4678 ) -> Result<proto::GetProjectSymbolsResponse> {
4679 let symbols = this
4680 .update(&mut cx, |this, cx| {
4681 this.symbols(&envelope.payload.query, cx)
4682 })
4683 .await?;
4684
4685 Ok(proto::GetProjectSymbolsResponse {
4686 symbols: symbols.iter().map(serialize_symbol).collect(),
4687 })
4688 }
4689
4690 async fn handle_search_project(
4691 this: ModelHandle<Self>,
4692 envelope: TypedEnvelope<proto::SearchProject>,
4693 _: Arc<Client>,
4694 mut cx: AsyncAppContext,
4695 ) -> Result<proto::SearchProjectResponse> {
4696 let peer_id = envelope.original_sender_id()?;
4697 let query = SearchQuery::from_proto(envelope.payload)?;
4698 let result = this
4699 .update(&mut cx, |this, cx| this.search(query, cx))
4700 .await?;
4701
4702 this.update(&mut cx, |this, cx| {
4703 let mut locations = Vec::new();
4704 for (buffer, ranges) in result {
4705 for range in ranges {
4706 let start = serialize_anchor(&range.start);
4707 let end = serialize_anchor(&range.end);
4708 let buffer = this.serialize_buffer_for_peer(&buffer, peer_id, cx);
4709 locations.push(proto::Location {
4710 buffer: Some(buffer),
4711 start: Some(start),
4712 end: Some(end),
4713 });
4714 }
4715 }
4716 Ok(proto::SearchProjectResponse { locations })
4717 })
4718 }
4719
4720 async fn handle_open_buffer_for_symbol(
4721 this: ModelHandle<Self>,
4722 envelope: TypedEnvelope<proto::OpenBufferForSymbol>,
4723 _: Arc<Client>,
4724 mut cx: AsyncAppContext,
4725 ) -> Result<proto::OpenBufferForSymbolResponse> {
4726 let peer_id = envelope.original_sender_id()?;
4727 let symbol = envelope
4728 .payload
4729 .symbol
4730 .ok_or_else(|| anyhow!("invalid symbol"))?;
4731 let symbol = this.read_with(&cx, |this, _| {
4732 let symbol = this.deserialize_symbol(symbol)?;
4733 let signature = this.symbol_signature(symbol.worktree_id, &symbol.path);
4734 if signature == symbol.signature {
4735 Ok(symbol)
4736 } else {
4737 Err(anyhow!("invalid symbol signature"))
4738 }
4739 })?;
4740 let buffer = this
4741 .update(&mut cx, |this, cx| this.open_buffer_for_symbol(&symbol, cx))
4742 .await?;
4743
4744 Ok(proto::OpenBufferForSymbolResponse {
4745 buffer: Some(this.update(&mut cx, |this, cx| {
4746 this.serialize_buffer_for_peer(&buffer, peer_id, cx)
4747 })),
4748 })
4749 }
4750
4751 fn symbol_signature(&self, worktree_id: WorktreeId, path: &Path) -> [u8; 32] {
4752 let mut hasher = Sha256::new();
4753 hasher.update(worktree_id.to_proto().to_be_bytes());
4754 hasher.update(path.to_string_lossy().as_bytes());
4755 hasher.update(self.nonce.to_be_bytes());
4756 hasher.finalize().as_slice().try_into().unwrap()
4757 }
4758
4759 async fn handle_open_buffer_by_id(
4760 this: ModelHandle<Self>,
4761 envelope: TypedEnvelope<proto::OpenBufferById>,
4762 _: Arc<Client>,
4763 mut cx: AsyncAppContext,
4764 ) -> Result<proto::OpenBufferResponse> {
4765 let peer_id = envelope.original_sender_id()?;
4766 let buffer = this
4767 .update(&mut cx, |this, cx| {
4768 this.open_buffer_by_id(envelope.payload.id, cx)
4769 })
4770 .await?;
4771 this.update(&mut cx, |this, cx| {
4772 Ok(proto::OpenBufferResponse {
4773 buffer: Some(this.serialize_buffer_for_peer(&buffer, peer_id, cx)),
4774 })
4775 })
4776 }
4777
4778 async fn handle_open_buffer_by_path(
4779 this: ModelHandle<Self>,
4780 envelope: TypedEnvelope<proto::OpenBufferByPath>,
4781 _: Arc<Client>,
4782 mut cx: AsyncAppContext,
4783 ) -> Result<proto::OpenBufferResponse> {
4784 let peer_id = envelope.original_sender_id()?;
4785 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
4786 let open_buffer = this.update(&mut cx, |this, cx| {
4787 this.open_buffer(
4788 ProjectPath {
4789 worktree_id,
4790 path: PathBuf::from(envelope.payload.path).into(),
4791 },
4792 cx,
4793 )
4794 });
4795
4796 let buffer = open_buffer.await?;
4797 this.update(&mut cx, |this, cx| {
4798 Ok(proto::OpenBufferResponse {
4799 buffer: Some(this.serialize_buffer_for_peer(&buffer, peer_id, cx)),
4800 })
4801 })
4802 }
4803
4804 fn serialize_project_transaction_for_peer(
4805 &mut self,
4806 project_transaction: ProjectTransaction,
4807 peer_id: PeerId,
4808 cx: &AppContext,
4809 ) -> proto::ProjectTransaction {
4810 let mut serialized_transaction = proto::ProjectTransaction {
4811 buffers: Default::default(),
4812 transactions: Default::default(),
4813 };
4814 for (buffer, transaction) in project_transaction.0 {
4815 serialized_transaction
4816 .buffers
4817 .push(self.serialize_buffer_for_peer(&buffer, peer_id, cx));
4818 serialized_transaction
4819 .transactions
4820 .push(language::proto::serialize_transaction(&transaction));
4821 }
4822 serialized_transaction
4823 }
4824
4825 fn deserialize_project_transaction(
4826 &mut self,
4827 message: proto::ProjectTransaction,
4828 push_to_history: bool,
4829 cx: &mut ModelContext<Self>,
4830 ) -> Task<Result<ProjectTransaction>> {
4831 cx.spawn(|this, mut cx| async move {
4832 let mut project_transaction = ProjectTransaction::default();
4833 for (buffer, transaction) in message.buffers.into_iter().zip(message.transactions) {
4834 let buffer = this
4835 .update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
4836 .await?;
4837 let transaction = language::proto::deserialize_transaction(transaction)?;
4838 project_transaction.0.insert(buffer, transaction);
4839 }
4840
4841 for (buffer, transaction) in &project_transaction.0 {
4842 buffer
4843 .update(&mut cx, |buffer, _| {
4844 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
4845 })
4846 .await;
4847
4848 if push_to_history {
4849 buffer.update(&mut cx, |buffer, _| {
4850 buffer.push_transaction(transaction.clone(), Instant::now());
4851 });
4852 }
4853 }
4854
4855 Ok(project_transaction)
4856 })
4857 }
4858
4859 fn serialize_buffer_for_peer(
4860 &mut self,
4861 buffer: &ModelHandle<Buffer>,
4862 peer_id: PeerId,
4863 cx: &AppContext,
4864 ) -> proto::Buffer {
4865 let buffer_id = buffer.read(cx).remote_id();
4866 let shared_buffers = self.shared_buffers.entry(peer_id).or_default();
4867 if shared_buffers.insert(buffer_id) {
4868 proto::Buffer {
4869 variant: Some(proto::buffer::Variant::State(buffer.read(cx).to_proto())),
4870 }
4871 } else {
4872 proto::Buffer {
4873 variant: Some(proto::buffer::Variant::Id(buffer_id)),
4874 }
4875 }
4876 }
4877
4878 fn deserialize_buffer(
4879 &mut self,
4880 buffer: proto::Buffer,
4881 cx: &mut ModelContext<Self>,
4882 ) -> Task<Result<ModelHandle<Buffer>>> {
4883 let replica_id = self.replica_id();
4884
4885 let opened_buffer_tx = self.opened_buffer.0.clone();
4886 let mut opened_buffer_rx = self.opened_buffer.1.clone();
4887 cx.spawn(|this, mut cx| async move {
4888 match buffer.variant.ok_or_else(|| anyhow!("missing buffer"))? {
4889 proto::buffer::Variant::Id(id) => {
4890 let buffer = loop {
4891 let buffer = this.read_with(&cx, |this, cx| {
4892 this.opened_buffers
4893 .get(&id)
4894 .and_then(|buffer| buffer.upgrade(cx))
4895 });
4896 if let Some(buffer) = buffer {
4897 break buffer;
4898 }
4899 opened_buffer_rx
4900 .next()
4901 .await
4902 .ok_or_else(|| anyhow!("project dropped while waiting for buffer"))?;
4903 };
4904 Ok(buffer)
4905 }
4906 proto::buffer::Variant::State(mut buffer) => {
4907 let mut buffer_worktree = None;
4908 let mut buffer_file = None;
4909 if let Some(file) = buffer.file.take() {
4910 this.read_with(&cx, |this, cx| {
4911 let worktree_id = WorktreeId::from_proto(file.worktree_id);
4912 let worktree =
4913 this.worktree_for_id(worktree_id, cx).ok_or_else(|| {
4914 anyhow!("no worktree found for id {}", file.worktree_id)
4915 })?;
4916 buffer_file =
4917 Some(Box::new(File::from_proto(file, worktree.clone(), cx)?)
4918 as Box<dyn language::File>);
4919 buffer_worktree = Some(worktree);
4920 Ok::<_, anyhow::Error>(())
4921 })?;
4922 }
4923
4924 let buffer = cx.add_model(|cx| {
4925 Buffer::from_proto(replica_id, buffer, buffer_file, cx).unwrap()
4926 });
4927
4928 this.update(&mut cx, |this, cx| this.register_buffer(&buffer, cx))?;
4929
4930 *opened_buffer_tx.borrow_mut().borrow_mut() = ();
4931 Ok(buffer)
4932 }
4933 }
4934 })
4935 }
4936
4937 fn deserialize_symbol(&self, serialized_symbol: proto::Symbol) -> Result<Symbol> {
4938 let source_worktree_id = WorktreeId::from_proto(serialized_symbol.source_worktree_id);
4939 let worktree_id = WorktreeId::from_proto(serialized_symbol.worktree_id);
4940 let start = serialized_symbol
4941 .start
4942 .ok_or_else(|| anyhow!("invalid start"))?;
4943 let end = serialized_symbol
4944 .end
4945 .ok_or_else(|| anyhow!("invalid end"))?;
4946 let kind = unsafe { mem::transmute(serialized_symbol.kind) };
4947 let path = PathBuf::from(serialized_symbol.path);
4948 let language = self.languages.select_language(&path);
4949 Ok(Symbol {
4950 source_worktree_id,
4951 worktree_id,
4952 language_server_name: LanguageServerName(serialized_symbol.language_server_name.into()),
4953 label: language
4954 .and_then(|language| language.label_for_symbol(&serialized_symbol.name, kind))
4955 .unwrap_or_else(|| CodeLabel::plain(serialized_symbol.name.clone(), None)),
4956 name: serialized_symbol.name,
4957 path,
4958 range: PointUtf16::new(start.row, start.column)..PointUtf16::new(end.row, end.column),
4959 kind,
4960 signature: serialized_symbol
4961 .signature
4962 .try_into()
4963 .map_err(|_| anyhow!("invalid signature"))?,
4964 })
4965 }
4966
4967 async fn handle_buffer_saved(
4968 this: ModelHandle<Self>,
4969 envelope: TypedEnvelope<proto::BufferSaved>,
4970 _: Arc<Client>,
4971 mut cx: AsyncAppContext,
4972 ) -> Result<()> {
4973 let version = deserialize_version(envelope.payload.version);
4974 let mtime = envelope
4975 .payload
4976 .mtime
4977 .ok_or_else(|| anyhow!("missing mtime"))?
4978 .into();
4979
4980 this.update(&mut cx, |this, cx| {
4981 let buffer = this
4982 .opened_buffers
4983 .get(&envelope.payload.buffer_id)
4984 .and_then(|buffer| buffer.upgrade(cx));
4985 if let Some(buffer) = buffer {
4986 buffer.update(cx, |buffer, cx| {
4987 buffer.did_save(version, mtime, None, cx);
4988 });
4989 }
4990 Ok(())
4991 })
4992 }
4993
4994 async fn handle_buffer_reloaded(
4995 this: ModelHandle<Self>,
4996 envelope: TypedEnvelope<proto::BufferReloaded>,
4997 _: Arc<Client>,
4998 mut cx: AsyncAppContext,
4999 ) -> Result<()> {
5000 let payload = envelope.payload.clone();
5001 let version = deserialize_version(payload.version);
5002 let mtime = payload
5003 .mtime
5004 .ok_or_else(|| anyhow!("missing mtime"))?
5005 .into();
5006 this.update(&mut cx, |this, cx| {
5007 let buffer = this
5008 .opened_buffers
5009 .get(&payload.buffer_id)
5010 .and_then(|buffer| buffer.upgrade(cx));
5011 if let Some(buffer) = buffer {
5012 buffer.update(cx, |buffer, cx| {
5013 buffer.did_reload(version, mtime, cx);
5014 });
5015 }
5016 Ok(())
5017 })
5018 }
5019
5020 pub fn match_paths<'a>(
5021 &self,
5022 query: &'a str,
5023 include_ignored: bool,
5024 smart_case: bool,
5025 max_results: usize,
5026 cancel_flag: &'a AtomicBool,
5027 cx: &AppContext,
5028 ) -> impl 'a + Future<Output = Vec<PathMatch>> {
5029 let worktrees = self
5030 .worktrees(cx)
5031 .filter(|worktree| worktree.read(cx).is_visible())
5032 .collect::<Vec<_>>();
5033 let include_root_name = worktrees.len() > 1;
5034 let candidate_sets = worktrees
5035 .into_iter()
5036 .map(|worktree| CandidateSet {
5037 snapshot: worktree.read(cx).snapshot(),
5038 include_ignored,
5039 include_root_name,
5040 })
5041 .collect::<Vec<_>>();
5042
5043 let background = cx.background().clone();
5044 async move {
5045 fuzzy::match_paths(
5046 candidate_sets.as_slice(),
5047 query,
5048 smart_case,
5049 max_results,
5050 cancel_flag,
5051 background,
5052 )
5053 .await
5054 }
5055 }
5056
5057 fn edits_from_lsp(
5058 &mut self,
5059 buffer: &ModelHandle<Buffer>,
5060 lsp_edits: impl 'static + Send + IntoIterator<Item = lsp::TextEdit>,
5061 version: Option<i32>,
5062 cx: &mut ModelContext<Self>,
5063 ) -> Task<Result<Vec<(Range<Anchor>, String)>>> {
5064 let snapshot = self.buffer_snapshot_for_lsp_version(buffer, version, cx);
5065 cx.background().spawn(async move {
5066 let snapshot = snapshot?;
5067 let mut lsp_edits = lsp_edits
5068 .into_iter()
5069 .map(|edit| (range_from_lsp(edit.range), edit.new_text))
5070 .peekable();
5071
5072 let mut edits = Vec::new();
5073 while let Some((mut range, mut new_text)) = lsp_edits.next() {
5074 // Combine any LSP edits that are adjacent.
5075 //
5076 // Also, combine LSP edits that are separated from each other by only
5077 // a newline. This is important because for some code actions,
5078 // Rust-analyzer rewrites the entire buffer via a series of edits that
5079 // are separated by unchanged newline characters.
5080 //
5081 // In order for the diffing logic below to work properly, any edits that
5082 // cancel each other out must be combined into one.
5083 while let Some((next_range, next_text)) = lsp_edits.peek() {
5084 if next_range.start > range.end {
5085 if next_range.start.row > range.end.row + 1
5086 || next_range.start.column > 0
5087 || snapshot.clip_point_utf16(
5088 PointUtf16::new(range.end.row, u32::MAX),
5089 Bias::Left,
5090 ) > range.end
5091 {
5092 break;
5093 }
5094 new_text.push('\n');
5095 }
5096 range.end = next_range.end;
5097 new_text.push_str(&next_text);
5098 lsp_edits.next();
5099 }
5100
5101 if snapshot.clip_point_utf16(range.start, Bias::Left) != range.start
5102 || snapshot.clip_point_utf16(range.end, Bias::Left) != range.end
5103 {
5104 return Err(anyhow!("invalid edits received from language server"));
5105 }
5106
5107 // For multiline edits, perform a diff of the old and new text so that
5108 // we can identify the changes more precisely, preserving the locations
5109 // of any anchors positioned in the unchanged regions.
5110 if range.end.row > range.start.row {
5111 let mut offset = range.start.to_offset(&snapshot);
5112 let old_text = snapshot.text_for_range(range).collect::<String>();
5113
5114 let diff = TextDiff::from_lines(old_text.as_str(), &new_text);
5115 let mut moved_since_edit = true;
5116 for change in diff.iter_all_changes() {
5117 let tag = change.tag();
5118 let value = change.value();
5119 match tag {
5120 ChangeTag::Equal => {
5121 offset += value.len();
5122 moved_since_edit = true;
5123 }
5124 ChangeTag::Delete => {
5125 let start = snapshot.anchor_after(offset);
5126 let end = snapshot.anchor_before(offset + value.len());
5127 if moved_since_edit {
5128 edits.push((start..end, String::new()));
5129 } else {
5130 edits.last_mut().unwrap().0.end = end;
5131 }
5132 offset += value.len();
5133 moved_since_edit = false;
5134 }
5135 ChangeTag::Insert => {
5136 if moved_since_edit {
5137 let anchor = snapshot.anchor_after(offset);
5138 edits.push((anchor.clone()..anchor, value.to_string()));
5139 } else {
5140 edits.last_mut().unwrap().1.push_str(value);
5141 }
5142 moved_since_edit = false;
5143 }
5144 }
5145 }
5146 } else if range.end == range.start {
5147 let anchor = snapshot.anchor_after(range.start);
5148 edits.push((anchor.clone()..anchor, new_text));
5149 } else {
5150 let edit_start = snapshot.anchor_after(range.start);
5151 let edit_end = snapshot.anchor_before(range.end);
5152 edits.push((edit_start..edit_end, new_text));
5153 }
5154 }
5155
5156 Ok(edits)
5157 })
5158 }
5159
5160 fn buffer_snapshot_for_lsp_version(
5161 &mut self,
5162 buffer: &ModelHandle<Buffer>,
5163 version: Option<i32>,
5164 cx: &AppContext,
5165 ) -> Result<TextBufferSnapshot> {
5166 const OLD_VERSIONS_TO_RETAIN: i32 = 10;
5167
5168 if let Some(version) = version {
5169 let buffer_id = buffer.read(cx).remote_id();
5170 let snapshots = self
5171 .buffer_snapshots
5172 .get_mut(&buffer_id)
5173 .ok_or_else(|| anyhow!("no snapshot found for buffer {}", buffer_id))?;
5174 let mut found_snapshot = None;
5175 snapshots.retain(|(snapshot_version, snapshot)| {
5176 if snapshot_version + OLD_VERSIONS_TO_RETAIN < version {
5177 false
5178 } else {
5179 if *snapshot_version == version {
5180 found_snapshot = Some(snapshot.clone());
5181 }
5182 true
5183 }
5184 });
5185
5186 found_snapshot.ok_or_else(|| {
5187 anyhow!(
5188 "snapshot not found for buffer {} at version {}",
5189 buffer_id,
5190 version
5191 )
5192 })
5193 } else {
5194 Ok((buffer.read(cx)).text_snapshot())
5195 }
5196 }
5197
5198 fn language_server_for_buffer(
5199 &self,
5200 buffer: &Buffer,
5201 cx: &AppContext,
5202 ) -> Option<&(Arc<dyn LspAdapter>, Arc<LanguageServer>)> {
5203 if let Some((file, language)) = File::from_dyn(buffer.file()).zip(buffer.language()) {
5204 let worktree_id = file.worktree_id(cx);
5205 self.language_servers
5206 .get(&(worktree_id, language.lsp_adapter()?.name()))
5207 } else {
5208 None
5209 }
5210 }
5211}
5212
5213impl ProjectStore {
5214 pub fn projects<'a>(
5215 &'a self,
5216 cx: &'a AppContext,
5217 ) -> impl 'a + Iterator<Item = ModelHandle<Project>> {
5218 self.projects
5219 .iter()
5220 .filter_map(|project| project.upgrade(cx))
5221 }
5222
5223 fn add_project(&mut self, project: WeakModelHandle<Project>, cx: &mut ModelContext<Self>) {
5224 if let Err(ix) = self
5225 .projects
5226 .binary_search_by_key(&project.id(), WeakModelHandle::id)
5227 {
5228 self.projects.insert(ix, project);
5229 }
5230 cx.notify();
5231 }
5232
5233 fn prune_projects(&mut self, cx: &mut ModelContext<Self>) {
5234 let mut did_change = false;
5235 self.projects.retain(|project| {
5236 if project.is_upgradable(cx) {
5237 true
5238 } else {
5239 did_change = true;
5240 false
5241 }
5242 });
5243 if did_change {
5244 cx.notify();
5245 }
5246 }
5247}
5248
5249impl WorktreeHandle {
5250 pub fn upgrade(&self, cx: &AppContext) -> Option<ModelHandle<Worktree>> {
5251 match self {
5252 WorktreeHandle::Strong(handle) => Some(handle.clone()),
5253 WorktreeHandle::Weak(handle) => handle.upgrade(cx),
5254 }
5255 }
5256}
5257
5258impl OpenBuffer {
5259 pub fn upgrade(&self, cx: &impl UpgradeModelHandle) -> Option<ModelHandle<Buffer>> {
5260 match self {
5261 OpenBuffer::Strong(handle) => Some(handle.clone()),
5262 OpenBuffer::Weak(handle) => handle.upgrade(cx),
5263 OpenBuffer::Loading(_) => None,
5264 }
5265 }
5266}
5267
5268struct CandidateSet {
5269 snapshot: Snapshot,
5270 include_ignored: bool,
5271 include_root_name: bool,
5272}
5273
5274impl<'a> PathMatchCandidateSet<'a> for CandidateSet {
5275 type Candidates = CandidateSetIter<'a>;
5276
5277 fn id(&self) -> usize {
5278 self.snapshot.id().to_usize()
5279 }
5280
5281 fn len(&self) -> usize {
5282 if self.include_ignored {
5283 self.snapshot.file_count()
5284 } else {
5285 self.snapshot.visible_file_count()
5286 }
5287 }
5288
5289 fn prefix(&self) -> Arc<str> {
5290 if self.snapshot.root_entry().map_or(false, |e| e.is_file()) {
5291 self.snapshot.root_name().into()
5292 } else if self.include_root_name {
5293 format!("{}/", self.snapshot.root_name()).into()
5294 } else {
5295 "".into()
5296 }
5297 }
5298
5299 fn candidates(&'a self, start: usize) -> Self::Candidates {
5300 CandidateSetIter {
5301 traversal: self.snapshot.files(self.include_ignored, start),
5302 }
5303 }
5304}
5305
5306struct CandidateSetIter<'a> {
5307 traversal: Traversal<'a>,
5308}
5309
5310impl<'a> Iterator for CandidateSetIter<'a> {
5311 type Item = PathMatchCandidate<'a>;
5312
5313 fn next(&mut self) -> Option<Self::Item> {
5314 self.traversal.next().map(|entry| {
5315 if let EntryKind::File(char_bag) = entry.kind {
5316 PathMatchCandidate {
5317 path: &entry.path,
5318 char_bag,
5319 }
5320 } else {
5321 unreachable!()
5322 }
5323 })
5324 }
5325}
5326
5327impl Entity for ProjectStore {
5328 type Event = ();
5329}
5330
5331impl Entity for Project {
5332 type Event = Event;
5333
5334 fn release(&mut self, cx: &mut gpui::MutableAppContext) {
5335 self.project_store.update(cx, ProjectStore::prune_projects);
5336
5337 match &self.client_state {
5338 ProjectClientState::Local { remote_id_rx, .. } => {
5339 if let Some(project_id) = *remote_id_rx.borrow() {
5340 self.client
5341 .send(proto::UnregisterProject { project_id })
5342 .log_err();
5343 }
5344 }
5345 ProjectClientState::Remote { remote_id, .. } => {
5346 self.client
5347 .send(proto::LeaveProject {
5348 project_id: *remote_id,
5349 })
5350 .log_err();
5351 }
5352 }
5353 }
5354
5355 fn app_will_quit(
5356 &mut self,
5357 _: &mut MutableAppContext,
5358 ) -> Option<std::pin::Pin<Box<dyn 'static + Future<Output = ()>>>> {
5359 let shutdown_futures = self
5360 .language_servers
5361 .drain()
5362 .filter_map(|(_, (_, server))| server.shutdown())
5363 .collect::<Vec<_>>();
5364 Some(
5365 async move {
5366 futures::future::join_all(shutdown_futures).await;
5367 }
5368 .boxed(),
5369 )
5370 }
5371}
5372
5373impl Collaborator {
5374 fn from_proto(
5375 message: proto::Collaborator,
5376 user_store: &ModelHandle<UserStore>,
5377 cx: &mut AsyncAppContext,
5378 ) -> impl Future<Output = Result<Self>> {
5379 let user = user_store.update(cx, |user_store, cx| {
5380 user_store.fetch_user(message.user_id, cx)
5381 });
5382
5383 async move {
5384 Ok(Self {
5385 peer_id: PeerId(message.peer_id),
5386 user: user.await?,
5387 replica_id: message.replica_id as ReplicaId,
5388 })
5389 }
5390 }
5391}
5392
5393impl<P: AsRef<Path>> From<(WorktreeId, P)> for ProjectPath {
5394 fn from((worktree_id, path): (WorktreeId, P)) -> Self {
5395 Self {
5396 worktree_id,
5397 path: path.as_ref().into(),
5398 }
5399 }
5400}
5401
5402impl From<lsp::CreateFileOptions> for fs::CreateOptions {
5403 fn from(options: lsp::CreateFileOptions) -> Self {
5404 Self {
5405 overwrite: options.overwrite.unwrap_or(false),
5406 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
5407 }
5408 }
5409}
5410
5411impl From<lsp::RenameFileOptions> for fs::RenameOptions {
5412 fn from(options: lsp::RenameFileOptions) -> Self {
5413 Self {
5414 overwrite: options.overwrite.unwrap_or(false),
5415 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
5416 }
5417 }
5418}
5419
5420impl From<lsp::DeleteFileOptions> for fs::RemoveOptions {
5421 fn from(options: lsp::DeleteFileOptions) -> Self {
5422 Self {
5423 recursive: options.recursive.unwrap_or(false),
5424 ignore_if_not_exists: options.ignore_if_not_exists.unwrap_or(false),
5425 }
5426 }
5427}
5428
5429fn serialize_symbol(symbol: &Symbol) -> proto::Symbol {
5430 proto::Symbol {
5431 source_worktree_id: symbol.source_worktree_id.to_proto(),
5432 worktree_id: symbol.worktree_id.to_proto(),
5433 language_server_name: symbol.language_server_name.0.to_string(),
5434 name: symbol.name.clone(),
5435 kind: unsafe { mem::transmute(symbol.kind) },
5436 path: symbol.path.to_string_lossy().to_string(),
5437 start: Some(proto::Point {
5438 row: symbol.range.start.row,
5439 column: symbol.range.start.column,
5440 }),
5441 end: Some(proto::Point {
5442 row: symbol.range.end.row,
5443 column: symbol.range.end.column,
5444 }),
5445 signature: symbol.signature.to_vec(),
5446 }
5447}
5448
5449fn relativize_path(base: &Path, path: &Path) -> PathBuf {
5450 let mut path_components = path.components();
5451 let mut base_components = base.components();
5452 let mut components: Vec<Component> = Vec::new();
5453 loop {
5454 match (path_components.next(), base_components.next()) {
5455 (None, None) => break,
5456 (Some(a), None) => {
5457 components.push(a);
5458 components.extend(path_components.by_ref());
5459 break;
5460 }
5461 (None, _) => components.push(Component::ParentDir),
5462 (Some(a), Some(b)) if components.is_empty() && a == b => (),
5463 (Some(a), Some(b)) if b == Component::CurDir => components.push(a),
5464 (Some(a), Some(_)) => {
5465 components.push(Component::ParentDir);
5466 for _ in base_components {
5467 components.push(Component::ParentDir);
5468 }
5469 components.push(a);
5470 components.extend(path_components.by_ref());
5471 break;
5472 }
5473 }
5474 }
5475 components.iter().map(|c| c.as_os_str()).collect()
5476}
5477
5478impl Item for Buffer {
5479 fn entry_id(&self, cx: &AppContext) -> Option<ProjectEntryId> {
5480 File::from_dyn(self.file()).and_then(|file| file.project_entry_id(cx))
5481 }
5482}
5483
5484#[cfg(test)]
5485mod tests {
5486 use crate::worktree::WorktreeHandle;
5487
5488 use super::{Event, *};
5489 use fs::RealFs;
5490 use futures::{future, StreamExt};
5491 use gpui::test::subscribe;
5492 use language::{
5493 tree_sitter_rust, tree_sitter_typescript, Diagnostic, FakeLspAdapter, LanguageConfig,
5494 OffsetRangeExt, Point, ToPoint,
5495 };
5496 use lsp::Url;
5497 use serde_json::json;
5498 use std::{cell::RefCell, os::unix, path::PathBuf, rc::Rc, task::Poll};
5499 use unindent::Unindent as _;
5500 use util::{assert_set_eq, test::temp_tree};
5501
5502 #[gpui::test]
5503 async fn test_populate_and_search(cx: &mut gpui::TestAppContext) {
5504 let dir = temp_tree(json!({
5505 "root": {
5506 "apple": "",
5507 "banana": {
5508 "carrot": {
5509 "date": "",
5510 "endive": "",
5511 }
5512 },
5513 "fennel": {
5514 "grape": "",
5515 }
5516 }
5517 }));
5518
5519 let root_link_path = dir.path().join("root_link");
5520 unix::fs::symlink(&dir.path().join("root"), &root_link_path).unwrap();
5521 unix::fs::symlink(
5522 &dir.path().join("root/fennel"),
5523 &dir.path().join("root/finnochio"),
5524 )
5525 .unwrap();
5526
5527 let project = Project::test(Arc::new(RealFs), [root_link_path.as_ref()], cx).await;
5528
5529 project.read_with(cx, |project, cx| {
5530 let tree = project.worktrees(cx).next().unwrap().read(cx);
5531 assert_eq!(tree.file_count(), 5);
5532 assert_eq!(
5533 tree.inode_for_path("fennel/grape"),
5534 tree.inode_for_path("finnochio/grape")
5535 );
5536 });
5537
5538 let cancel_flag = Default::default();
5539 let results = project
5540 .read_with(cx, |project, cx| {
5541 project.match_paths("bna", false, false, 10, &cancel_flag, cx)
5542 })
5543 .await;
5544 assert_eq!(
5545 results
5546 .into_iter()
5547 .map(|result| result.path)
5548 .collect::<Vec<Arc<Path>>>(),
5549 vec![
5550 PathBuf::from("banana/carrot/date").into(),
5551 PathBuf::from("banana/carrot/endive").into(),
5552 ]
5553 );
5554 }
5555
5556 #[gpui::test]
5557 async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
5558 cx.foreground().forbid_parking();
5559
5560 let mut rust_language = Language::new(
5561 LanguageConfig {
5562 name: "Rust".into(),
5563 path_suffixes: vec!["rs".to_string()],
5564 ..Default::default()
5565 },
5566 Some(tree_sitter_rust::language()),
5567 );
5568 let mut json_language = Language::new(
5569 LanguageConfig {
5570 name: "JSON".into(),
5571 path_suffixes: vec!["json".to_string()],
5572 ..Default::default()
5573 },
5574 None,
5575 );
5576 let mut fake_rust_servers = rust_language.set_fake_lsp_adapter(FakeLspAdapter {
5577 name: "the-rust-language-server",
5578 capabilities: lsp::ServerCapabilities {
5579 completion_provider: Some(lsp::CompletionOptions {
5580 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
5581 ..Default::default()
5582 }),
5583 ..Default::default()
5584 },
5585 ..Default::default()
5586 });
5587 let mut fake_json_servers = json_language.set_fake_lsp_adapter(FakeLspAdapter {
5588 name: "the-json-language-server",
5589 capabilities: lsp::ServerCapabilities {
5590 completion_provider: Some(lsp::CompletionOptions {
5591 trigger_characters: Some(vec![":".to_string()]),
5592 ..Default::default()
5593 }),
5594 ..Default::default()
5595 },
5596 ..Default::default()
5597 });
5598
5599 let fs = FakeFs::new(cx.background());
5600 fs.insert_tree(
5601 "/the-root",
5602 json!({
5603 "test.rs": "const A: i32 = 1;",
5604 "test2.rs": "",
5605 "Cargo.toml": "a = 1",
5606 "package.json": "{\"a\": 1}",
5607 }),
5608 )
5609 .await;
5610
5611 let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
5612 project.update(cx, |project, _| {
5613 project.languages.add(Arc::new(rust_language));
5614 project.languages.add(Arc::new(json_language));
5615 });
5616
5617 // Open a buffer without an associated language server.
5618 let toml_buffer = project
5619 .update(cx, |project, cx| {
5620 project.open_local_buffer("/the-root/Cargo.toml", cx)
5621 })
5622 .await
5623 .unwrap();
5624
5625 // Open a buffer with an associated language server.
5626 let rust_buffer = project
5627 .update(cx, |project, cx| {
5628 project.open_local_buffer("/the-root/test.rs", cx)
5629 })
5630 .await
5631 .unwrap();
5632
5633 // A server is started up, and it is notified about Rust files.
5634 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
5635 assert_eq!(
5636 fake_rust_server
5637 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5638 .await
5639 .text_document,
5640 lsp::TextDocumentItem {
5641 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
5642 version: 0,
5643 text: "const A: i32 = 1;".to_string(),
5644 language_id: Default::default()
5645 }
5646 );
5647
5648 // The buffer is configured based on the language server's capabilities.
5649 rust_buffer.read_with(cx, |buffer, _| {
5650 assert_eq!(
5651 buffer.completion_triggers(),
5652 &[".".to_string(), "::".to_string()]
5653 );
5654 });
5655 toml_buffer.read_with(cx, |buffer, _| {
5656 assert!(buffer.completion_triggers().is_empty());
5657 });
5658
5659 // Edit a buffer. The changes are reported to the language server.
5660 rust_buffer.update(cx, |buffer, cx| buffer.edit([(16..16, "2")], cx));
5661 assert_eq!(
5662 fake_rust_server
5663 .receive_notification::<lsp::notification::DidChangeTextDocument>()
5664 .await
5665 .text_document,
5666 lsp::VersionedTextDocumentIdentifier::new(
5667 lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
5668 1
5669 )
5670 );
5671
5672 // Open a third buffer with a different associated language server.
5673 let json_buffer = project
5674 .update(cx, |project, cx| {
5675 project.open_local_buffer("/the-root/package.json", cx)
5676 })
5677 .await
5678 .unwrap();
5679
5680 // A json language server is started up and is only notified about the json buffer.
5681 let mut fake_json_server = fake_json_servers.next().await.unwrap();
5682 assert_eq!(
5683 fake_json_server
5684 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5685 .await
5686 .text_document,
5687 lsp::TextDocumentItem {
5688 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
5689 version: 0,
5690 text: "{\"a\": 1}".to_string(),
5691 language_id: Default::default()
5692 }
5693 );
5694
5695 // This buffer is configured based on the second language server's
5696 // capabilities.
5697 json_buffer.read_with(cx, |buffer, _| {
5698 assert_eq!(buffer.completion_triggers(), &[":".to_string()]);
5699 });
5700
5701 // When opening another buffer whose language server is already running,
5702 // it is also configured based on the existing language server's capabilities.
5703 let rust_buffer2 = project
5704 .update(cx, |project, cx| {
5705 project.open_local_buffer("/the-root/test2.rs", cx)
5706 })
5707 .await
5708 .unwrap();
5709 rust_buffer2.read_with(cx, |buffer, _| {
5710 assert_eq!(
5711 buffer.completion_triggers(),
5712 &[".".to_string(), "::".to_string()]
5713 );
5714 });
5715
5716 // Changes are reported only to servers matching the buffer's language.
5717 toml_buffer.update(cx, |buffer, cx| buffer.edit([(5..5, "23")], cx));
5718 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "let x = 1;")], cx));
5719 assert_eq!(
5720 fake_rust_server
5721 .receive_notification::<lsp::notification::DidChangeTextDocument>()
5722 .await
5723 .text_document,
5724 lsp::VersionedTextDocumentIdentifier::new(
5725 lsp::Url::from_file_path("/the-root/test2.rs").unwrap(),
5726 1
5727 )
5728 );
5729
5730 // Save notifications are reported to all servers.
5731 toml_buffer
5732 .update(cx, |buffer, cx| buffer.save(cx))
5733 .await
5734 .unwrap();
5735 assert_eq!(
5736 fake_rust_server
5737 .receive_notification::<lsp::notification::DidSaveTextDocument>()
5738 .await
5739 .text_document,
5740 lsp::TextDocumentIdentifier::new(
5741 lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap()
5742 )
5743 );
5744 assert_eq!(
5745 fake_json_server
5746 .receive_notification::<lsp::notification::DidSaveTextDocument>()
5747 .await
5748 .text_document,
5749 lsp::TextDocumentIdentifier::new(
5750 lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap()
5751 )
5752 );
5753
5754 // Renames are reported only to servers matching the buffer's language.
5755 fs.rename(
5756 Path::new("/the-root/test2.rs"),
5757 Path::new("/the-root/test3.rs"),
5758 Default::default(),
5759 )
5760 .await
5761 .unwrap();
5762 assert_eq!(
5763 fake_rust_server
5764 .receive_notification::<lsp::notification::DidCloseTextDocument>()
5765 .await
5766 .text_document,
5767 lsp::TextDocumentIdentifier::new(
5768 lsp::Url::from_file_path("/the-root/test2.rs").unwrap()
5769 ),
5770 );
5771 assert_eq!(
5772 fake_rust_server
5773 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5774 .await
5775 .text_document,
5776 lsp::TextDocumentItem {
5777 uri: lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),
5778 version: 0,
5779 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
5780 language_id: Default::default()
5781 },
5782 );
5783
5784 rust_buffer2.update(cx, |buffer, cx| {
5785 buffer.update_diagnostics(
5786 DiagnosticSet::from_sorted_entries(
5787 vec![DiagnosticEntry {
5788 diagnostic: Default::default(),
5789 range: Anchor::MIN..Anchor::MAX,
5790 }],
5791 &buffer.snapshot(),
5792 ),
5793 cx,
5794 );
5795 assert_eq!(
5796 buffer
5797 .snapshot()
5798 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
5799 .count(),
5800 1
5801 );
5802 });
5803
5804 // When the rename changes the extension of the file, the buffer gets closed on the old
5805 // language server and gets opened on the new one.
5806 fs.rename(
5807 Path::new("/the-root/test3.rs"),
5808 Path::new("/the-root/test3.json"),
5809 Default::default(),
5810 )
5811 .await
5812 .unwrap();
5813 assert_eq!(
5814 fake_rust_server
5815 .receive_notification::<lsp::notification::DidCloseTextDocument>()
5816 .await
5817 .text_document,
5818 lsp::TextDocumentIdentifier::new(
5819 lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),
5820 ),
5821 );
5822 assert_eq!(
5823 fake_json_server
5824 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5825 .await
5826 .text_document,
5827 lsp::TextDocumentItem {
5828 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
5829 version: 0,
5830 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
5831 language_id: Default::default()
5832 },
5833 );
5834
5835 // We clear the diagnostics, since the language has changed.
5836 rust_buffer2.read_with(cx, |buffer, _| {
5837 assert_eq!(
5838 buffer
5839 .snapshot()
5840 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
5841 .count(),
5842 0
5843 );
5844 });
5845
5846 // The renamed file's version resets after changing language server.
5847 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "// ")], cx));
5848 assert_eq!(
5849 fake_json_server
5850 .receive_notification::<lsp::notification::DidChangeTextDocument>()
5851 .await
5852 .text_document,
5853 lsp::VersionedTextDocumentIdentifier::new(
5854 lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
5855 1
5856 )
5857 );
5858
5859 // Restart language servers
5860 project.update(cx, |project, cx| {
5861 project.restart_language_servers_for_buffers(
5862 vec![rust_buffer.clone(), json_buffer.clone()],
5863 cx,
5864 );
5865 });
5866
5867 let mut rust_shutdown_requests = fake_rust_server
5868 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
5869 let mut json_shutdown_requests = fake_json_server
5870 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
5871 futures::join!(rust_shutdown_requests.next(), json_shutdown_requests.next());
5872
5873 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
5874 let mut fake_json_server = fake_json_servers.next().await.unwrap();
5875
5876 // Ensure rust document is reopened in new rust language server
5877 assert_eq!(
5878 fake_rust_server
5879 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5880 .await
5881 .text_document,
5882 lsp::TextDocumentItem {
5883 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
5884 version: 1,
5885 text: rust_buffer.read_with(cx, |buffer, _| buffer.text()),
5886 language_id: Default::default()
5887 }
5888 );
5889
5890 // Ensure json documents are reopened in new json language server
5891 assert_set_eq!(
5892 [
5893 fake_json_server
5894 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5895 .await
5896 .text_document,
5897 fake_json_server
5898 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5899 .await
5900 .text_document,
5901 ],
5902 [
5903 lsp::TextDocumentItem {
5904 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
5905 version: 0,
5906 text: json_buffer.read_with(cx, |buffer, _| buffer.text()),
5907 language_id: Default::default()
5908 },
5909 lsp::TextDocumentItem {
5910 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
5911 version: 1,
5912 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
5913 language_id: Default::default()
5914 }
5915 ]
5916 );
5917
5918 // Close notifications are reported only to servers matching the buffer's language.
5919 cx.update(|_| drop(json_buffer));
5920 let close_message = lsp::DidCloseTextDocumentParams {
5921 text_document: lsp::TextDocumentIdentifier::new(
5922 lsp::Url::from_file_path("/the-root/package.json").unwrap(),
5923 ),
5924 };
5925 assert_eq!(
5926 fake_json_server
5927 .receive_notification::<lsp::notification::DidCloseTextDocument>()
5928 .await,
5929 close_message,
5930 );
5931 }
5932
5933 #[gpui::test]
5934 async fn test_single_file_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
5935 cx.foreground().forbid_parking();
5936
5937 let fs = FakeFs::new(cx.background());
5938 fs.insert_tree(
5939 "/dir",
5940 json!({
5941 "a.rs": "let a = 1;",
5942 "b.rs": "let b = 2;"
5943 }),
5944 )
5945 .await;
5946
5947 let project = Project::test(fs, ["/dir/a.rs".as_ref(), "/dir/b.rs".as_ref()], cx).await;
5948
5949 let buffer_a = project
5950 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
5951 .await
5952 .unwrap();
5953 let buffer_b = project
5954 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
5955 .await
5956 .unwrap();
5957
5958 project.update(cx, |project, cx| {
5959 project
5960 .update_diagnostics(
5961 lsp::PublishDiagnosticsParams {
5962 uri: Url::from_file_path("/dir/a.rs").unwrap(),
5963 version: None,
5964 diagnostics: vec![lsp::Diagnostic {
5965 range: lsp::Range::new(
5966 lsp::Position::new(0, 4),
5967 lsp::Position::new(0, 5),
5968 ),
5969 severity: Some(lsp::DiagnosticSeverity::ERROR),
5970 message: "error 1".to_string(),
5971 ..Default::default()
5972 }],
5973 },
5974 &[],
5975 cx,
5976 )
5977 .unwrap();
5978 project
5979 .update_diagnostics(
5980 lsp::PublishDiagnosticsParams {
5981 uri: Url::from_file_path("/dir/b.rs").unwrap(),
5982 version: None,
5983 diagnostics: vec![lsp::Diagnostic {
5984 range: lsp::Range::new(
5985 lsp::Position::new(0, 4),
5986 lsp::Position::new(0, 5),
5987 ),
5988 severity: Some(lsp::DiagnosticSeverity::WARNING),
5989 message: "error 2".to_string(),
5990 ..Default::default()
5991 }],
5992 },
5993 &[],
5994 cx,
5995 )
5996 .unwrap();
5997 });
5998
5999 buffer_a.read_with(cx, |buffer, _| {
6000 let chunks = chunks_with_diagnostics(&buffer, 0..buffer.len());
6001 assert_eq!(
6002 chunks
6003 .iter()
6004 .map(|(s, d)| (s.as_str(), *d))
6005 .collect::<Vec<_>>(),
6006 &[
6007 ("let ", None),
6008 ("a", Some(DiagnosticSeverity::ERROR)),
6009 (" = 1;", None),
6010 ]
6011 );
6012 });
6013 buffer_b.read_with(cx, |buffer, _| {
6014 let chunks = chunks_with_diagnostics(&buffer, 0..buffer.len());
6015 assert_eq!(
6016 chunks
6017 .iter()
6018 .map(|(s, d)| (s.as_str(), *d))
6019 .collect::<Vec<_>>(),
6020 &[
6021 ("let ", None),
6022 ("b", Some(DiagnosticSeverity::WARNING)),
6023 (" = 2;", None),
6024 ]
6025 );
6026 });
6027 }
6028
6029 #[gpui::test]
6030 async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
6031 cx.foreground().forbid_parking();
6032
6033 let progress_token = "the-progress-token";
6034 let mut language = Language::new(
6035 LanguageConfig {
6036 name: "Rust".into(),
6037 path_suffixes: vec!["rs".to_string()],
6038 ..Default::default()
6039 },
6040 Some(tree_sitter_rust::language()),
6041 );
6042 let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
6043 disk_based_diagnostics_progress_token: Some(progress_token),
6044 disk_based_diagnostics_sources: &["disk"],
6045 ..Default::default()
6046 });
6047
6048 let fs = FakeFs::new(cx.background());
6049 fs.insert_tree(
6050 "/dir",
6051 json!({
6052 "a.rs": "fn a() { A }",
6053 "b.rs": "const y: i32 = 1",
6054 }),
6055 )
6056 .await;
6057
6058 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
6059 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
6060 let worktree_id =
6061 project.read_with(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
6062
6063 // Cause worktree to start the fake language server
6064 let _buffer = project
6065 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
6066 .await
6067 .unwrap();
6068
6069 let mut events = subscribe(&project, cx);
6070
6071 let mut fake_server = fake_servers.next().await.unwrap();
6072 fake_server.start_progress(progress_token).await;
6073 assert_eq!(
6074 events.next().await.unwrap(),
6075 Event::DiskBasedDiagnosticsStarted
6076 );
6077
6078 fake_server.start_progress(progress_token).await;
6079 fake_server.end_progress(progress_token).await;
6080 fake_server.start_progress(progress_token).await;
6081
6082 fake_server.notify::<lsp::notification::PublishDiagnostics>(
6083 lsp::PublishDiagnosticsParams {
6084 uri: Url::from_file_path("/dir/a.rs").unwrap(),
6085 version: None,
6086 diagnostics: vec![lsp::Diagnostic {
6087 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
6088 severity: Some(lsp::DiagnosticSeverity::ERROR),
6089 message: "undefined variable 'A'".to_string(),
6090 ..Default::default()
6091 }],
6092 },
6093 );
6094 assert_eq!(
6095 events.next().await.unwrap(),
6096 Event::DiagnosticsUpdated((worktree_id, Path::new("a.rs")).into())
6097 );
6098
6099 fake_server.end_progress(progress_token).await;
6100 fake_server.end_progress(progress_token).await;
6101 assert_eq!(
6102 events.next().await.unwrap(),
6103 Event::DiskBasedDiagnosticsUpdated
6104 );
6105 assert_eq!(
6106 events.next().await.unwrap(),
6107 Event::DiskBasedDiagnosticsFinished
6108 );
6109
6110 let buffer = project
6111 .update(cx, |p, cx| p.open_local_buffer("/dir/a.rs", cx))
6112 .await
6113 .unwrap();
6114
6115 buffer.read_with(cx, |buffer, _| {
6116 let snapshot = buffer.snapshot();
6117 let diagnostics = snapshot
6118 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
6119 .collect::<Vec<_>>();
6120 assert_eq!(
6121 diagnostics,
6122 &[DiagnosticEntry {
6123 range: Point::new(0, 9)..Point::new(0, 10),
6124 diagnostic: Diagnostic {
6125 severity: lsp::DiagnosticSeverity::ERROR,
6126 message: "undefined variable 'A'".to_string(),
6127 group_id: 0,
6128 is_primary: true,
6129 ..Default::default()
6130 }
6131 }]
6132 )
6133 });
6134
6135 // Ensure publishing empty diagnostics twice only results in one update event.
6136 fake_server.notify::<lsp::notification::PublishDiagnostics>(
6137 lsp::PublishDiagnosticsParams {
6138 uri: Url::from_file_path("/dir/a.rs").unwrap(),
6139 version: None,
6140 diagnostics: Default::default(),
6141 },
6142 );
6143 assert_eq!(
6144 events.next().await.unwrap(),
6145 Event::DiagnosticsUpdated((worktree_id, Path::new("a.rs")).into())
6146 );
6147
6148 fake_server.notify::<lsp::notification::PublishDiagnostics>(
6149 lsp::PublishDiagnosticsParams {
6150 uri: Url::from_file_path("/dir/a.rs").unwrap(),
6151 version: None,
6152 diagnostics: Default::default(),
6153 },
6154 );
6155 cx.foreground().run_until_parked();
6156 assert_eq!(futures::poll!(events.next()), Poll::Pending);
6157 }
6158
6159 #[gpui::test]
6160 async fn test_restarting_server_with_diagnostics_running(cx: &mut gpui::TestAppContext) {
6161 cx.foreground().forbid_parking();
6162
6163 let progress_token = "the-progress-token";
6164 let mut language = Language::new(
6165 LanguageConfig {
6166 path_suffixes: vec!["rs".to_string()],
6167 ..Default::default()
6168 },
6169 None,
6170 );
6171 let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
6172 disk_based_diagnostics_sources: &["disk"],
6173 disk_based_diagnostics_progress_token: Some(progress_token),
6174 ..Default::default()
6175 });
6176
6177 let fs = FakeFs::new(cx.background());
6178 fs.insert_tree("/dir", json!({ "a.rs": "" })).await;
6179
6180 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
6181 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
6182
6183 let buffer = project
6184 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
6185 .await
6186 .unwrap();
6187
6188 // Simulate diagnostics starting to update.
6189 let mut fake_server = fake_servers.next().await.unwrap();
6190 fake_server.start_progress(progress_token).await;
6191
6192 // Restart the server before the diagnostics finish updating.
6193 project.update(cx, |project, cx| {
6194 project.restart_language_servers_for_buffers([buffer], cx);
6195 });
6196 let mut events = subscribe(&project, cx);
6197
6198 // Simulate the newly started server sending more diagnostics.
6199 let mut fake_server = fake_servers.next().await.unwrap();
6200 fake_server.start_progress(progress_token).await;
6201 assert_eq!(
6202 events.next().await.unwrap(),
6203 Event::DiskBasedDiagnosticsStarted
6204 );
6205
6206 // All diagnostics are considered done, despite the old server's diagnostic
6207 // task never completing.
6208 fake_server.end_progress(progress_token).await;
6209 assert_eq!(
6210 events.next().await.unwrap(),
6211 Event::DiskBasedDiagnosticsUpdated
6212 );
6213 assert_eq!(
6214 events.next().await.unwrap(),
6215 Event::DiskBasedDiagnosticsFinished
6216 );
6217 project.read_with(cx, |project, _| {
6218 assert!(!project.is_running_disk_based_diagnostics());
6219 });
6220 }
6221
6222 #[gpui::test]
6223 async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
6224 cx.foreground().forbid_parking();
6225
6226 let mut language = Language::new(
6227 LanguageConfig {
6228 name: "Rust".into(),
6229 path_suffixes: vec!["rs".to_string()],
6230 ..Default::default()
6231 },
6232 Some(tree_sitter_rust::language()),
6233 );
6234 let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
6235 disk_based_diagnostics_sources: &["disk"],
6236 ..Default::default()
6237 });
6238
6239 let text = "
6240 fn a() { A }
6241 fn b() { BB }
6242 fn c() { CCC }
6243 "
6244 .unindent();
6245
6246 let fs = FakeFs::new(cx.background());
6247 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
6248
6249 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
6250 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
6251
6252 let buffer = project
6253 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
6254 .await
6255 .unwrap();
6256
6257 let mut fake_server = fake_servers.next().await.unwrap();
6258 let open_notification = fake_server
6259 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6260 .await;
6261
6262 // Edit the buffer, moving the content down
6263 buffer.update(cx, |buffer, cx| buffer.edit([(0..0, "\n\n")], cx));
6264 let change_notification_1 = fake_server
6265 .receive_notification::<lsp::notification::DidChangeTextDocument>()
6266 .await;
6267 assert!(
6268 change_notification_1.text_document.version > open_notification.text_document.version
6269 );
6270
6271 // Report some diagnostics for the initial version of the buffer
6272 fake_server.notify::<lsp::notification::PublishDiagnostics>(
6273 lsp::PublishDiagnosticsParams {
6274 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
6275 version: Some(open_notification.text_document.version),
6276 diagnostics: vec![
6277 lsp::Diagnostic {
6278 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
6279 severity: Some(DiagnosticSeverity::ERROR),
6280 message: "undefined variable 'A'".to_string(),
6281 source: Some("disk".to_string()),
6282 ..Default::default()
6283 },
6284 lsp::Diagnostic {
6285 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
6286 severity: Some(DiagnosticSeverity::ERROR),
6287 message: "undefined variable 'BB'".to_string(),
6288 source: Some("disk".to_string()),
6289 ..Default::default()
6290 },
6291 lsp::Diagnostic {
6292 range: lsp::Range::new(lsp::Position::new(2, 9), lsp::Position::new(2, 12)),
6293 severity: Some(DiagnosticSeverity::ERROR),
6294 source: Some("disk".to_string()),
6295 message: "undefined variable 'CCC'".to_string(),
6296 ..Default::default()
6297 },
6298 ],
6299 },
6300 );
6301
6302 // The diagnostics have moved down since they were created.
6303 buffer.next_notification(cx).await;
6304 buffer.read_with(cx, |buffer, _| {
6305 assert_eq!(
6306 buffer
6307 .snapshot()
6308 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0), false)
6309 .collect::<Vec<_>>(),
6310 &[
6311 DiagnosticEntry {
6312 range: Point::new(3, 9)..Point::new(3, 11),
6313 diagnostic: Diagnostic {
6314 severity: DiagnosticSeverity::ERROR,
6315 message: "undefined variable 'BB'".to_string(),
6316 is_disk_based: true,
6317 group_id: 1,
6318 is_primary: true,
6319 ..Default::default()
6320 },
6321 },
6322 DiagnosticEntry {
6323 range: Point::new(4, 9)..Point::new(4, 12),
6324 diagnostic: Diagnostic {
6325 severity: DiagnosticSeverity::ERROR,
6326 message: "undefined variable 'CCC'".to_string(),
6327 is_disk_based: true,
6328 group_id: 2,
6329 is_primary: true,
6330 ..Default::default()
6331 }
6332 }
6333 ]
6334 );
6335 assert_eq!(
6336 chunks_with_diagnostics(buffer, 0..buffer.len()),
6337 [
6338 ("\n\nfn a() { ".to_string(), None),
6339 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
6340 (" }\nfn b() { ".to_string(), None),
6341 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
6342 (" }\nfn c() { ".to_string(), None),
6343 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
6344 (" }\n".to_string(), None),
6345 ]
6346 );
6347 assert_eq!(
6348 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
6349 [
6350 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
6351 (" }\nfn c() { ".to_string(), None),
6352 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
6353 ]
6354 );
6355 });
6356
6357 // Ensure overlapping diagnostics are highlighted correctly.
6358 fake_server.notify::<lsp::notification::PublishDiagnostics>(
6359 lsp::PublishDiagnosticsParams {
6360 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
6361 version: Some(open_notification.text_document.version),
6362 diagnostics: vec![
6363 lsp::Diagnostic {
6364 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
6365 severity: Some(DiagnosticSeverity::ERROR),
6366 message: "undefined variable 'A'".to_string(),
6367 source: Some("disk".to_string()),
6368 ..Default::default()
6369 },
6370 lsp::Diagnostic {
6371 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 12)),
6372 severity: Some(DiagnosticSeverity::WARNING),
6373 message: "unreachable statement".to_string(),
6374 source: Some("disk".to_string()),
6375 ..Default::default()
6376 },
6377 ],
6378 },
6379 );
6380
6381 buffer.next_notification(cx).await;
6382 buffer.read_with(cx, |buffer, _| {
6383 assert_eq!(
6384 buffer
6385 .snapshot()
6386 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0), false)
6387 .collect::<Vec<_>>(),
6388 &[
6389 DiagnosticEntry {
6390 range: Point::new(2, 9)..Point::new(2, 12),
6391 diagnostic: Diagnostic {
6392 severity: DiagnosticSeverity::WARNING,
6393 message: "unreachable statement".to_string(),
6394 is_disk_based: true,
6395 group_id: 4,
6396 is_primary: true,
6397 ..Default::default()
6398 }
6399 },
6400 DiagnosticEntry {
6401 range: Point::new(2, 9)..Point::new(2, 10),
6402 diagnostic: Diagnostic {
6403 severity: DiagnosticSeverity::ERROR,
6404 message: "undefined variable 'A'".to_string(),
6405 is_disk_based: true,
6406 group_id: 3,
6407 is_primary: true,
6408 ..Default::default()
6409 },
6410 }
6411 ]
6412 );
6413 assert_eq!(
6414 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
6415 [
6416 ("fn a() { ".to_string(), None),
6417 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
6418 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
6419 ("\n".to_string(), None),
6420 ]
6421 );
6422 assert_eq!(
6423 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
6424 [
6425 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
6426 ("\n".to_string(), None),
6427 ]
6428 );
6429 });
6430
6431 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
6432 // changes since the last save.
6433 buffer.update(cx, |buffer, cx| {
6434 buffer.edit([(Point::new(2, 0)..Point::new(2, 0), " ")], cx);
6435 buffer.edit([(Point::new(2, 8)..Point::new(2, 10), "(x: usize)")], cx);
6436 buffer.edit([(Point::new(3, 10)..Point::new(3, 10), "xxx")], cx);
6437 });
6438 let change_notification_2 = fake_server
6439 .receive_notification::<lsp::notification::DidChangeTextDocument>()
6440 .await;
6441 assert!(
6442 change_notification_2.text_document.version
6443 > change_notification_1.text_document.version
6444 );
6445
6446 // Handle out-of-order diagnostics
6447 fake_server.notify::<lsp::notification::PublishDiagnostics>(
6448 lsp::PublishDiagnosticsParams {
6449 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
6450 version: Some(change_notification_2.text_document.version),
6451 diagnostics: vec![
6452 lsp::Diagnostic {
6453 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
6454 severity: Some(DiagnosticSeverity::ERROR),
6455 message: "undefined variable 'BB'".to_string(),
6456 source: Some("disk".to_string()),
6457 ..Default::default()
6458 },
6459 lsp::Diagnostic {
6460 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
6461 severity: Some(DiagnosticSeverity::WARNING),
6462 message: "undefined variable 'A'".to_string(),
6463 source: Some("disk".to_string()),
6464 ..Default::default()
6465 },
6466 ],
6467 },
6468 );
6469
6470 buffer.next_notification(cx).await;
6471 buffer.read_with(cx, |buffer, _| {
6472 assert_eq!(
6473 buffer
6474 .snapshot()
6475 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
6476 .collect::<Vec<_>>(),
6477 &[
6478 DiagnosticEntry {
6479 range: Point::new(2, 21)..Point::new(2, 22),
6480 diagnostic: Diagnostic {
6481 severity: DiagnosticSeverity::WARNING,
6482 message: "undefined variable 'A'".to_string(),
6483 is_disk_based: true,
6484 group_id: 6,
6485 is_primary: true,
6486 ..Default::default()
6487 }
6488 },
6489 DiagnosticEntry {
6490 range: Point::new(3, 9)..Point::new(3, 14),
6491 diagnostic: Diagnostic {
6492 severity: DiagnosticSeverity::ERROR,
6493 message: "undefined variable 'BB'".to_string(),
6494 is_disk_based: true,
6495 group_id: 5,
6496 is_primary: true,
6497 ..Default::default()
6498 },
6499 }
6500 ]
6501 );
6502 });
6503 }
6504
6505 #[gpui::test]
6506 async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
6507 cx.foreground().forbid_parking();
6508
6509 let text = concat!(
6510 "let one = ;\n", //
6511 "let two = \n",
6512 "let three = 3;\n",
6513 );
6514
6515 let fs = FakeFs::new(cx.background());
6516 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
6517
6518 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
6519 let buffer = project
6520 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
6521 .await
6522 .unwrap();
6523
6524 project.update(cx, |project, cx| {
6525 project
6526 .update_buffer_diagnostics(
6527 &buffer,
6528 vec![
6529 DiagnosticEntry {
6530 range: PointUtf16::new(0, 10)..PointUtf16::new(0, 10),
6531 diagnostic: Diagnostic {
6532 severity: DiagnosticSeverity::ERROR,
6533 message: "syntax error 1".to_string(),
6534 ..Default::default()
6535 },
6536 },
6537 DiagnosticEntry {
6538 range: PointUtf16::new(1, 10)..PointUtf16::new(1, 10),
6539 diagnostic: Diagnostic {
6540 severity: DiagnosticSeverity::ERROR,
6541 message: "syntax error 2".to_string(),
6542 ..Default::default()
6543 },
6544 },
6545 ],
6546 None,
6547 cx,
6548 )
6549 .unwrap();
6550 });
6551
6552 // An empty range is extended forward to include the following character.
6553 // At the end of a line, an empty range is extended backward to include
6554 // the preceding character.
6555 buffer.read_with(cx, |buffer, _| {
6556 let chunks = chunks_with_diagnostics(&buffer, 0..buffer.len());
6557 assert_eq!(
6558 chunks
6559 .iter()
6560 .map(|(s, d)| (s.as_str(), *d))
6561 .collect::<Vec<_>>(),
6562 &[
6563 ("let one = ", None),
6564 (";", Some(DiagnosticSeverity::ERROR)),
6565 ("\nlet two =", None),
6566 (" ", Some(DiagnosticSeverity::ERROR)),
6567 ("\nlet three = 3;\n", None)
6568 ]
6569 );
6570 });
6571 }
6572
6573 #[gpui::test]
6574 async fn test_edits_from_lsp_with_past_version(cx: &mut gpui::TestAppContext) {
6575 cx.foreground().forbid_parking();
6576
6577 let mut language = Language::new(
6578 LanguageConfig {
6579 name: "Rust".into(),
6580 path_suffixes: vec!["rs".to_string()],
6581 ..Default::default()
6582 },
6583 Some(tree_sitter_rust::language()),
6584 );
6585 let mut fake_servers = language.set_fake_lsp_adapter(Default::default());
6586
6587 let text = "
6588 fn a() {
6589 f1();
6590 }
6591 fn b() {
6592 f2();
6593 }
6594 fn c() {
6595 f3();
6596 }
6597 "
6598 .unindent();
6599
6600 let fs = FakeFs::new(cx.background());
6601 fs.insert_tree(
6602 "/dir",
6603 json!({
6604 "a.rs": text.clone(),
6605 }),
6606 )
6607 .await;
6608
6609 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
6610 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
6611 let buffer = project
6612 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
6613 .await
6614 .unwrap();
6615
6616 let mut fake_server = fake_servers.next().await.unwrap();
6617 let lsp_document_version = fake_server
6618 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6619 .await
6620 .text_document
6621 .version;
6622
6623 // Simulate editing the buffer after the language server computes some edits.
6624 buffer.update(cx, |buffer, cx| {
6625 buffer.edit(
6626 [(
6627 Point::new(0, 0)..Point::new(0, 0),
6628 "// above first function\n",
6629 )],
6630 cx,
6631 );
6632 buffer.edit(
6633 [(
6634 Point::new(2, 0)..Point::new(2, 0),
6635 " // inside first function\n",
6636 )],
6637 cx,
6638 );
6639 buffer.edit(
6640 [(
6641 Point::new(6, 4)..Point::new(6, 4),
6642 "// inside second function ",
6643 )],
6644 cx,
6645 );
6646
6647 assert_eq!(
6648 buffer.text(),
6649 "
6650 // above first function
6651 fn a() {
6652 // inside first function
6653 f1();
6654 }
6655 fn b() {
6656 // inside second function f2();
6657 }
6658 fn c() {
6659 f3();
6660 }
6661 "
6662 .unindent()
6663 );
6664 });
6665
6666 let edits = project
6667 .update(cx, |project, cx| {
6668 project.edits_from_lsp(
6669 &buffer,
6670 vec![
6671 // replace body of first function
6672 lsp::TextEdit {
6673 range: lsp::Range::new(
6674 lsp::Position::new(0, 0),
6675 lsp::Position::new(3, 0),
6676 ),
6677 new_text: "
6678 fn a() {
6679 f10();
6680 }
6681 "
6682 .unindent(),
6683 },
6684 // edit inside second function
6685 lsp::TextEdit {
6686 range: lsp::Range::new(
6687 lsp::Position::new(4, 6),
6688 lsp::Position::new(4, 6),
6689 ),
6690 new_text: "00".into(),
6691 },
6692 // edit inside third function via two distinct edits
6693 lsp::TextEdit {
6694 range: lsp::Range::new(
6695 lsp::Position::new(7, 5),
6696 lsp::Position::new(7, 5),
6697 ),
6698 new_text: "4000".into(),
6699 },
6700 lsp::TextEdit {
6701 range: lsp::Range::new(
6702 lsp::Position::new(7, 5),
6703 lsp::Position::new(7, 6),
6704 ),
6705 new_text: "".into(),
6706 },
6707 ],
6708 Some(lsp_document_version),
6709 cx,
6710 )
6711 })
6712 .await
6713 .unwrap();
6714
6715 buffer.update(cx, |buffer, cx| {
6716 for (range, new_text) in edits {
6717 buffer.edit([(range, new_text)], cx);
6718 }
6719 assert_eq!(
6720 buffer.text(),
6721 "
6722 // above first function
6723 fn a() {
6724 // inside first function
6725 f10();
6726 }
6727 fn b() {
6728 // inside second function f200();
6729 }
6730 fn c() {
6731 f4000();
6732 }
6733 "
6734 .unindent()
6735 );
6736 });
6737 }
6738
6739 #[gpui::test]
6740 async fn test_edits_from_lsp_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
6741 cx.foreground().forbid_parking();
6742
6743 let text = "
6744 use a::b;
6745 use a::c;
6746
6747 fn f() {
6748 b();
6749 c();
6750 }
6751 "
6752 .unindent();
6753
6754 let fs = FakeFs::new(cx.background());
6755 fs.insert_tree(
6756 "/dir",
6757 json!({
6758 "a.rs": text.clone(),
6759 }),
6760 )
6761 .await;
6762
6763 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
6764 let buffer = project
6765 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
6766 .await
6767 .unwrap();
6768
6769 // Simulate the language server sending us a small edit in the form of a very large diff.
6770 // Rust-analyzer does this when performing a merge-imports code action.
6771 let edits = project
6772 .update(cx, |project, cx| {
6773 project.edits_from_lsp(
6774 &buffer,
6775 [
6776 // Replace the first use statement without editing the semicolon.
6777 lsp::TextEdit {
6778 range: lsp::Range::new(
6779 lsp::Position::new(0, 4),
6780 lsp::Position::new(0, 8),
6781 ),
6782 new_text: "a::{b, c}".into(),
6783 },
6784 // Reinsert the remainder of the file between the semicolon and the final
6785 // newline of the file.
6786 lsp::TextEdit {
6787 range: lsp::Range::new(
6788 lsp::Position::new(0, 9),
6789 lsp::Position::new(0, 9),
6790 ),
6791 new_text: "\n\n".into(),
6792 },
6793 lsp::TextEdit {
6794 range: lsp::Range::new(
6795 lsp::Position::new(0, 9),
6796 lsp::Position::new(0, 9),
6797 ),
6798 new_text: "
6799 fn f() {
6800 b();
6801 c();
6802 }"
6803 .unindent(),
6804 },
6805 // Delete everything after the first newline of the file.
6806 lsp::TextEdit {
6807 range: lsp::Range::new(
6808 lsp::Position::new(1, 0),
6809 lsp::Position::new(7, 0),
6810 ),
6811 new_text: "".into(),
6812 },
6813 ],
6814 None,
6815 cx,
6816 )
6817 })
6818 .await
6819 .unwrap();
6820
6821 buffer.update(cx, |buffer, cx| {
6822 let edits = edits
6823 .into_iter()
6824 .map(|(range, text)| {
6825 (
6826 range.start.to_point(&buffer)..range.end.to_point(&buffer),
6827 text,
6828 )
6829 })
6830 .collect::<Vec<_>>();
6831
6832 assert_eq!(
6833 edits,
6834 [
6835 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
6836 (Point::new(1, 0)..Point::new(2, 0), "".into())
6837 ]
6838 );
6839
6840 for (range, new_text) in edits {
6841 buffer.edit([(range, new_text)], cx);
6842 }
6843 assert_eq!(
6844 buffer.text(),
6845 "
6846 use a::{b, c};
6847
6848 fn f() {
6849 b();
6850 c();
6851 }
6852 "
6853 .unindent()
6854 );
6855 });
6856 }
6857
6858 fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
6859 buffer: &Buffer,
6860 range: Range<T>,
6861 ) -> Vec<(String, Option<DiagnosticSeverity>)> {
6862 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
6863 for chunk in buffer.snapshot().chunks(range, true) {
6864 if chunks.last().map_or(false, |prev_chunk| {
6865 prev_chunk.1 == chunk.diagnostic_severity
6866 }) {
6867 chunks.last_mut().unwrap().0.push_str(chunk.text);
6868 } else {
6869 chunks.push((chunk.text.to_string(), chunk.diagnostic_severity));
6870 }
6871 }
6872 chunks
6873 }
6874
6875 #[gpui::test]
6876 async fn test_search_worktree_without_files(cx: &mut gpui::TestAppContext) {
6877 let dir = temp_tree(json!({
6878 "root": {
6879 "dir1": {},
6880 "dir2": {
6881 "dir3": {}
6882 }
6883 }
6884 }));
6885
6886 let project = Project::test(Arc::new(RealFs), [dir.path()], cx).await;
6887 let cancel_flag = Default::default();
6888 let results = project
6889 .read_with(cx, |project, cx| {
6890 project.match_paths("dir", false, false, 10, &cancel_flag, cx)
6891 })
6892 .await;
6893
6894 assert!(results.is_empty());
6895 }
6896
6897 #[gpui::test(iterations = 10)]
6898 async fn test_definition(cx: &mut gpui::TestAppContext) {
6899 let mut language = Language::new(
6900 LanguageConfig {
6901 name: "Rust".into(),
6902 path_suffixes: vec!["rs".to_string()],
6903 ..Default::default()
6904 },
6905 Some(tree_sitter_rust::language()),
6906 );
6907 let mut fake_servers = language.set_fake_lsp_adapter(Default::default());
6908
6909 let fs = FakeFs::new(cx.background());
6910 fs.insert_tree(
6911 "/dir",
6912 json!({
6913 "a.rs": "const fn a() { A }",
6914 "b.rs": "const y: i32 = crate::a()",
6915 }),
6916 )
6917 .await;
6918
6919 let project = Project::test(fs, ["/dir/b.rs".as_ref()], cx).await;
6920 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
6921
6922 let buffer = project
6923 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
6924 .await
6925 .unwrap();
6926
6927 let fake_server = fake_servers.next().await.unwrap();
6928 fake_server.handle_request::<lsp::request::GotoDefinition, _, _>(|params, _| async move {
6929 let params = params.text_document_position_params;
6930 assert_eq!(
6931 params.text_document.uri.to_file_path().unwrap(),
6932 Path::new("/dir/b.rs"),
6933 );
6934 assert_eq!(params.position, lsp::Position::new(0, 22));
6935
6936 Ok(Some(lsp::GotoDefinitionResponse::Scalar(
6937 lsp::Location::new(
6938 lsp::Url::from_file_path("/dir/a.rs").unwrap(),
6939 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
6940 ),
6941 )))
6942 });
6943
6944 let mut definitions = project
6945 .update(cx, |project, cx| project.definition(&buffer, 22, cx))
6946 .await
6947 .unwrap();
6948
6949 assert_eq!(definitions.len(), 1);
6950 let definition = definitions.pop().unwrap();
6951 cx.update(|cx| {
6952 let target_buffer = definition.buffer.read(cx);
6953 assert_eq!(
6954 target_buffer
6955 .file()
6956 .unwrap()
6957 .as_local()
6958 .unwrap()
6959 .abs_path(cx),
6960 Path::new("/dir/a.rs"),
6961 );
6962 assert_eq!(definition.range.to_offset(target_buffer), 9..10);
6963 assert_eq!(
6964 list_worktrees(&project, cx),
6965 [("/dir/b.rs".as_ref(), true), ("/dir/a.rs".as_ref(), false)]
6966 );
6967
6968 drop(definition);
6969 });
6970 cx.read(|cx| {
6971 assert_eq!(list_worktrees(&project, cx), [("/dir/b.rs".as_ref(), true)]);
6972 });
6973
6974 fn list_worktrees<'a>(
6975 project: &'a ModelHandle<Project>,
6976 cx: &'a AppContext,
6977 ) -> Vec<(&'a Path, bool)> {
6978 project
6979 .read(cx)
6980 .worktrees(cx)
6981 .map(|worktree| {
6982 let worktree = worktree.read(cx);
6983 (
6984 worktree.as_local().unwrap().abs_path().as_ref(),
6985 worktree.is_visible(),
6986 )
6987 })
6988 .collect::<Vec<_>>()
6989 }
6990 }
6991
6992 #[gpui::test]
6993 async fn test_completions_without_edit_ranges(cx: &mut gpui::TestAppContext) {
6994 let mut language = Language::new(
6995 LanguageConfig {
6996 name: "TypeScript".into(),
6997 path_suffixes: vec!["ts".to_string()],
6998 ..Default::default()
6999 },
7000 Some(tree_sitter_typescript::language_typescript()),
7001 );
7002 let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default());
7003
7004 let fs = FakeFs::new(cx.background());
7005 fs.insert_tree(
7006 "/dir",
7007 json!({
7008 "a.ts": "",
7009 }),
7010 )
7011 .await;
7012
7013 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
7014 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
7015 let buffer = project
7016 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
7017 .await
7018 .unwrap();
7019
7020 let fake_server = fake_language_servers.next().await.unwrap();
7021
7022 let text = "let a = b.fqn";
7023 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
7024 let completions = project.update(cx, |project, cx| {
7025 project.completions(&buffer, text.len(), cx)
7026 });
7027
7028 fake_server
7029 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
7030 Ok(Some(lsp::CompletionResponse::Array(vec![
7031 lsp::CompletionItem {
7032 label: "fullyQualifiedName?".into(),
7033 insert_text: Some("fullyQualifiedName".into()),
7034 ..Default::default()
7035 },
7036 ])))
7037 })
7038 .next()
7039 .await;
7040 let completions = completions.await.unwrap();
7041 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
7042 assert_eq!(completions.len(), 1);
7043 assert_eq!(completions[0].new_text, "fullyQualifiedName");
7044 assert_eq!(
7045 completions[0].old_range.to_offset(&snapshot),
7046 text.len() - 3..text.len()
7047 );
7048 }
7049
7050 #[gpui::test(iterations = 10)]
7051 async fn test_apply_code_actions_with_commands(cx: &mut gpui::TestAppContext) {
7052 let mut language = Language::new(
7053 LanguageConfig {
7054 name: "TypeScript".into(),
7055 path_suffixes: vec!["ts".to_string()],
7056 ..Default::default()
7057 },
7058 None,
7059 );
7060 let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default());
7061
7062 let fs = FakeFs::new(cx.background());
7063 fs.insert_tree(
7064 "/dir",
7065 json!({
7066 "a.ts": "a",
7067 }),
7068 )
7069 .await;
7070
7071 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
7072 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
7073 let buffer = project
7074 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
7075 .await
7076 .unwrap();
7077
7078 let fake_server = fake_language_servers.next().await.unwrap();
7079
7080 // Language server returns code actions that contain commands, and not edits.
7081 let actions = project.update(cx, |project, cx| project.code_actions(&buffer, 0..0, cx));
7082 fake_server
7083 .handle_request::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
7084 Ok(Some(vec![
7085 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
7086 title: "The code action".into(),
7087 command: Some(lsp::Command {
7088 title: "The command".into(),
7089 command: "_the/command".into(),
7090 arguments: Some(vec![json!("the-argument")]),
7091 }),
7092 ..Default::default()
7093 }),
7094 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
7095 title: "two".into(),
7096 ..Default::default()
7097 }),
7098 ]))
7099 })
7100 .next()
7101 .await;
7102
7103 let action = actions.await.unwrap()[0].clone();
7104 let apply = project.update(cx, |project, cx| {
7105 project.apply_code_action(buffer.clone(), action, true, cx)
7106 });
7107
7108 // Resolving the code action does not populate its edits. In absence of
7109 // edits, we must execute the given command.
7110 fake_server.handle_request::<lsp::request::CodeActionResolveRequest, _, _>(
7111 |action, _| async move { Ok(action) },
7112 );
7113
7114 // While executing the command, the language server sends the editor
7115 // a `workspaceEdit` request.
7116 fake_server
7117 .handle_request::<lsp::request::ExecuteCommand, _, _>({
7118 let fake = fake_server.clone();
7119 move |params, _| {
7120 assert_eq!(params.command, "_the/command");
7121 let fake = fake.clone();
7122 async move {
7123 fake.server
7124 .request::<lsp::request::ApplyWorkspaceEdit>(
7125 lsp::ApplyWorkspaceEditParams {
7126 label: None,
7127 edit: lsp::WorkspaceEdit {
7128 changes: Some(
7129 [(
7130 lsp::Url::from_file_path("/dir/a.ts").unwrap(),
7131 vec![lsp::TextEdit {
7132 range: lsp::Range::new(
7133 lsp::Position::new(0, 0),
7134 lsp::Position::new(0, 0),
7135 ),
7136 new_text: "X".into(),
7137 }],
7138 )]
7139 .into_iter()
7140 .collect(),
7141 ),
7142 ..Default::default()
7143 },
7144 },
7145 )
7146 .await
7147 .unwrap();
7148 Ok(Some(json!(null)))
7149 }
7150 }
7151 })
7152 .next()
7153 .await;
7154
7155 // Applying the code action returns a project transaction containing the edits
7156 // sent by the language server in its `workspaceEdit` request.
7157 let transaction = apply.await.unwrap();
7158 assert!(transaction.0.contains_key(&buffer));
7159 buffer.update(cx, |buffer, cx| {
7160 assert_eq!(buffer.text(), "Xa");
7161 buffer.undo(cx);
7162 assert_eq!(buffer.text(), "a");
7163 });
7164 }
7165
7166 #[gpui::test]
7167 async fn test_save_file(cx: &mut gpui::TestAppContext) {
7168 let fs = FakeFs::new(cx.background());
7169 fs.insert_tree(
7170 "/dir",
7171 json!({
7172 "file1": "the old contents",
7173 }),
7174 )
7175 .await;
7176
7177 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7178 let buffer = project
7179 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
7180 .await
7181 .unwrap();
7182 buffer
7183 .update(cx, |buffer, cx| {
7184 assert_eq!(buffer.text(), "the old contents");
7185 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], cx);
7186 buffer.save(cx)
7187 })
7188 .await
7189 .unwrap();
7190
7191 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
7192 assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
7193 }
7194
7195 #[gpui::test]
7196 async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
7197 let fs = FakeFs::new(cx.background());
7198 fs.insert_tree(
7199 "/dir",
7200 json!({
7201 "file1": "the old contents",
7202 }),
7203 )
7204 .await;
7205
7206 let project = Project::test(fs.clone(), ["/dir/file1".as_ref()], cx).await;
7207 let buffer = project
7208 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
7209 .await
7210 .unwrap();
7211 buffer
7212 .update(cx, |buffer, cx| {
7213 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], cx);
7214 buffer.save(cx)
7215 })
7216 .await
7217 .unwrap();
7218
7219 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
7220 assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
7221 }
7222
7223 #[gpui::test]
7224 async fn test_save_as(cx: &mut gpui::TestAppContext) {
7225 let fs = FakeFs::new(cx.background());
7226 fs.insert_tree("/dir", json!({})).await;
7227
7228 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7229 let buffer = project.update(cx, |project, cx| {
7230 project.create_buffer("", None, cx).unwrap()
7231 });
7232 buffer.update(cx, |buffer, cx| {
7233 buffer.edit([(0..0, "abc")], cx);
7234 assert!(buffer.is_dirty());
7235 assert!(!buffer.has_conflict());
7236 });
7237 project
7238 .update(cx, |project, cx| {
7239 project.save_buffer_as(buffer.clone(), "/dir/file1".into(), cx)
7240 })
7241 .await
7242 .unwrap();
7243 assert_eq!(fs.load(Path::new("/dir/file1")).await.unwrap(), "abc");
7244 buffer.read_with(cx, |buffer, cx| {
7245 assert_eq!(buffer.file().unwrap().full_path(cx), Path::new("dir/file1"));
7246 assert!(!buffer.is_dirty());
7247 assert!(!buffer.has_conflict());
7248 });
7249
7250 let opened_buffer = project
7251 .update(cx, |project, cx| {
7252 project.open_local_buffer("/dir/file1", cx)
7253 })
7254 .await
7255 .unwrap();
7256 assert_eq!(opened_buffer, buffer);
7257 }
7258
7259 #[gpui::test(retries = 5)]
7260 async fn test_rescan_and_remote_updates(cx: &mut gpui::TestAppContext) {
7261 let dir = temp_tree(json!({
7262 "a": {
7263 "file1": "",
7264 "file2": "",
7265 "file3": "",
7266 },
7267 "b": {
7268 "c": {
7269 "file4": "",
7270 "file5": "",
7271 }
7272 }
7273 }));
7274
7275 let project = Project::test(Arc::new(RealFs), [dir.path()], cx).await;
7276 let rpc = project.read_with(cx, |p, _| p.client.clone());
7277
7278 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
7279 let buffer = project.update(cx, |p, cx| p.open_local_buffer(dir.path().join(path), cx));
7280 async move { buffer.await.unwrap() }
7281 };
7282 let id_for_path = |path: &'static str, cx: &gpui::TestAppContext| {
7283 project.read_with(cx, |project, cx| {
7284 let tree = project.worktrees(cx).next().unwrap();
7285 tree.read(cx)
7286 .entry_for_path(path)
7287 .expect(&format!("no entry for path {}", path))
7288 .id
7289 })
7290 };
7291
7292 let buffer2 = buffer_for_path("a/file2", cx).await;
7293 let buffer3 = buffer_for_path("a/file3", cx).await;
7294 let buffer4 = buffer_for_path("b/c/file4", cx).await;
7295 let buffer5 = buffer_for_path("b/c/file5", cx).await;
7296
7297 let file2_id = id_for_path("a/file2", &cx);
7298 let file3_id = id_for_path("a/file3", &cx);
7299 let file4_id = id_for_path("b/c/file4", &cx);
7300
7301 // Create a remote copy of this worktree.
7302 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
7303 let initial_snapshot = tree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot());
7304 let (remote, load_task) = cx.update(|cx| {
7305 Worktree::remote(
7306 1,
7307 1,
7308 initial_snapshot.to_proto(&Default::default(), true),
7309 rpc.clone(),
7310 cx,
7311 )
7312 });
7313 // tree
7314 load_task.await;
7315
7316 cx.read(|cx| {
7317 assert!(!buffer2.read(cx).is_dirty());
7318 assert!(!buffer3.read(cx).is_dirty());
7319 assert!(!buffer4.read(cx).is_dirty());
7320 assert!(!buffer5.read(cx).is_dirty());
7321 });
7322
7323 // Rename and delete files and directories.
7324 tree.flush_fs_events(&cx).await;
7325 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
7326 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
7327 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
7328 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
7329 tree.flush_fs_events(&cx).await;
7330
7331 let expected_paths = vec![
7332 "a",
7333 "a/file1",
7334 "a/file2.new",
7335 "b",
7336 "d",
7337 "d/file3",
7338 "d/file4",
7339 ];
7340
7341 cx.read(|app| {
7342 assert_eq!(
7343 tree.read(app)
7344 .paths()
7345 .map(|p| p.to_str().unwrap())
7346 .collect::<Vec<_>>(),
7347 expected_paths
7348 );
7349
7350 assert_eq!(id_for_path("a/file2.new", &cx), file2_id);
7351 assert_eq!(id_for_path("d/file3", &cx), file3_id);
7352 assert_eq!(id_for_path("d/file4", &cx), file4_id);
7353
7354 assert_eq!(
7355 buffer2.read(app).file().unwrap().path().as_ref(),
7356 Path::new("a/file2.new")
7357 );
7358 assert_eq!(
7359 buffer3.read(app).file().unwrap().path().as_ref(),
7360 Path::new("d/file3")
7361 );
7362 assert_eq!(
7363 buffer4.read(app).file().unwrap().path().as_ref(),
7364 Path::new("d/file4")
7365 );
7366 assert_eq!(
7367 buffer5.read(app).file().unwrap().path().as_ref(),
7368 Path::new("b/c/file5")
7369 );
7370
7371 assert!(!buffer2.read(app).file().unwrap().is_deleted());
7372 assert!(!buffer3.read(app).file().unwrap().is_deleted());
7373 assert!(!buffer4.read(app).file().unwrap().is_deleted());
7374 assert!(buffer5.read(app).file().unwrap().is_deleted());
7375 });
7376
7377 // Update the remote worktree. Check that it becomes consistent with the
7378 // local worktree.
7379 remote.update(cx, |remote, cx| {
7380 let update_message = tree.read(cx).as_local().unwrap().snapshot().build_update(
7381 &initial_snapshot,
7382 1,
7383 1,
7384 true,
7385 );
7386 remote
7387 .as_remote_mut()
7388 .unwrap()
7389 .snapshot
7390 .apply_remote_update(update_message)
7391 .unwrap();
7392
7393 assert_eq!(
7394 remote
7395 .paths()
7396 .map(|p| p.to_str().unwrap())
7397 .collect::<Vec<_>>(),
7398 expected_paths
7399 );
7400 });
7401 }
7402
7403 #[gpui::test]
7404 async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
7405 let fs = FakeFs::new(cx.background());
7406 fs.insert_tree(
7407 "/dir",
7408 json!({
7409 "a.txt": "a-contents",
7410 "b.txt": "b-contents",
7411 }),
7412 )
7413 .await;
7414
7415 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7416
7417 // Spawn multiple tasks to open paths, repeating some paths.
7418 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
7419 (
7420 p.open_local_buffer("/dir/a.txt", cx),
7421 p.open_local_buffer("/dir/b.txt", cx),
7422 p.open_local_buffer("/dir/a.txt", cx),
7423 )
7424 });
7425
7426 let buffer_a_1 = buffer_a_1.await.unwrap();
7427 let buffer_a_2 = buffer_a_2.await.unwrap();
7428 let buffer_b = buffer_b.await.unwrap();
7429 assert_eq!(buffer_a_1.read_with(cx, |b, _| b.text()), "a-contents");
7430 assert_eq!(buffer_b.read_with(cx, |b, _| b.text()), "b-contents");
7431
7432 // There is only one buffer per path.
7433 let buffer_a_id = buffer_a_1.id();
7434 assert_eq!(buffer_a_2.id(), buffer_a_id);
7435
7436 // Open the same path again while it is still open.
7437 drop(buffer_a_1);
7438 let buffer_a_3 = project
7439 .update(cx, |p, cx| p.open_local_buffer("/dir/a.txt", cx))
7440 .await
7441 .unwrap();
7442
7443 // There's still only one buffer per path.
7444 assert_eq!(buffer_a_3.id(), buffer_a_id);
7445 }
7446
7447 #[gpui::test]
7448 async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
7449 let fs = FakeFs::new(cx.background());
7450 fs.insert_tree(
7451 "/dir",
7452 json!({
7453 "file1": "abc",
7454 "file2": "def",
7455 "file3": "ghi",
7456 }),
7457 )
7458 .await;
7459
7460 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7461
7462 let buffer1 = project
7463 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
7464 .await
7465 .unwrap();
7466 let events = Rc::new(RefCell::new(Vec::new()));
7467
7468 // initially, the buffer isn't dirty.
7469 buffer1.update(cx, |buffer, cx| {
7470 cx.subscribe(&buffer1, {
7471 let events = events.clone();
7472 move |_, _, event, _| match event {
7473 BufferEvent::Operation(_) => {}
7474 _ => events.borrow_mut().push(event.clone()),
7475 }
7476 })
7477 .detach();
7478
7479 assert!(!buffer.is_dirty());
7480 assert!(events.borrow().is_empty());
7481
7482 buffer.edit([(1..2, "")], cx);
7483 });
7484
7485 // after the first edit, the buffer is dirty, and emits a dirtied event.
7486 buffer1.update(cx, |buffer, cx| {
7487 assert!(buffer.text() == "ac");
7488 assert!(buffer.is_dirty());
7489 assert_eq!(
7490 *events.borrow(),
7491 &[language::Event::Edited, language::Event::Dirtied]
7492 );
7493 events.borrow_mut().clear();
7494 buffer.did_save(buffer.version(), buffer.file().unwrap().mtime(), None, cx);
7495 });
7496
7497 // after saving, the buffer is not dirty, and emits a saved event.
7498 buffer1.update(cx, |buffer, cx| {
7499 assert!(!buffer.is_dirty());
7500 assert_eq!(*events.borrow(), &[language::Event::Saved]);
7501 events.borrow_mut().clear();
7502
7503 buffer.edit([(1..1, "B")], cx);
7504 buffer.edit([(2..2, "D")], cx);
7505 });
7506
7507 // after editing again, the buffer is dirty, and emits another dirty event.
7508 buffer1.update(cx, |buffer, cx| {
7509 assert!(buffer.text() == "aBDc");
7510 assert!(buffer.is_dirty());
7511 assert_eq!(
7512 *events.borrow(),
7513 &[
7514 language::Event::Edited,
7515 language::Event::Dirtied,
7516 language::Event::Edited,
7517 ],
7518 );
7519 events.borrow_mut().clear();
7520
7521 // TODO - currently, after restoring the buffer to its
7522 // previously-saved state, the is still considered dirty.
7523 buffer.edit([(1..3, "")], cx);
7524 assert!(buffer.text() == "ac");
7525 assert!(buffer.is_dirty());
7526 });
7527
7528 assert_eq!(*events.borrow(), &[language::Event::Edited]);
7529
7530 // When a file is deleted, the buffer is considered dirty.
7531 let events = Rc::new(RefCell::new(Vec::new()));
7532 let buffer2 = project
7533 .update(cx, |p, cx| p.open_local_buffer("/dir/file2", cx))
7534 .await
7535 .unwrap();
7536 buffer2.update(cx, |_, cx| {
7537 cx.subscribe(&buffer2, {
7538 let events = events.clone();
7539 move |_, _, event, _| events.borrow_mut().push(event.clone())
7540 })
7541 .detach();
7542 });
7543
7544 fs.remove_file("/dir/file2".as_ref(), Default::default())
7545 .await
7546 .unwrap();
7547 buffer2.condition(&cx, |b, _| b.is_dirty()).await;
7548 assert_eq!(
7549 *events.borrow(),
7550 &[language::Event::Dirtied, language::Event::FileHandleChanged]
7551 );
7552
7553 // When a file is already dirty when deleted, we don't emit a Dirtied event.
7554 let events = Rc::new(RefCell::new(Vec::new()));
7555 let buffer3 = project
7556 .update(cx, |p, cx| p.open_local_buffer("/dir/file3", cx))
7557 .await
7558 .unwrap();
7559 buffer3.update(cx, |_, cx| {
7560 cx.subscribe(&buffer3, {
7561 let events = events.clone();
7562 move |_, _, event, _| events.borrow_mut().push(event.clone())
7563 })
7564 .detach();
7565 });
7566
7567 buffer3.update(cx, |buffer, cx| {
7568 buffer.edit([(0..0, "x")], cx);
7569 });
7570 events.borrow_mut().clear();
7571 fs.remove_file("/dir/file3".as_ref(), Default::default())
7572 .await
7573 .unwrap();
7574 buffer3
7575 .condition(&cx, |_, _| !events.borrow().is_empty())
7576 .await;
7577 assert_eq!(*events.borrow(), &[language::Event::FileHandleChanged]);
7578 cx.read(|cx| assert!(buffer3.read(cx).is_dirty()));
7579 }
7580
7581 #[gpui::test]
7582 async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
7583 let initial_contents = "aaa\nbbbbb\nc\n";
7584 let fs = FakeFs::new(cx.background());
7585 fs.insert_tree(
7586 "/dir",
7587 json!({
7588 "the-file": initial_contents,
7589 }),
7590 )
7591 .await;
7592 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7593 let buffer = project
7594 .update(cx, |p, cx| p.open_local_buffer("/dir/the-file", cx))
7595 .await
7596 .unwrap();
7597
7598 let anchors = (0..3)
7599 .map(|row| buffer.read_with(cx, |b, _| b.anchor_before(Point::new(row, 1))))
7600 .collect::<Vec<_>>();
7601
7602 // Change the file on disk, adding two new lines of text, and removing
7603 // one line.
7604 buffer.read_with(cx, |buffer, _| {
7605 assert!(!buffer.is_dirty());
7606 assert!(!buffer.has_conflict());
7607 });
7608 let new_contents = "AAAA\naaa\nBB\nbbbbb\n";
7609 fs.save("/dir/the-file".as_ref(), &new_contents.into())
7610 .await
7611 .unwrap();
7612
7613 // Because the buffer was not modified, it is reloaded from disk. Its
7614 // contents are edited according to the diff between the old and new
7615 // file contents.
7616 buffer
7617 .condition(&cx, |buffer, _| buffer.text() == new_contents)
7618 .await;
7619
7620 buffer.update(cx, |buffer, _| {
7621 assert_eq!(buffer.text(), new_contents);
7622 assert!(!buffer.is_dirty());
7623 assert!(!buffer.has_conflict());
7624
7625 let anchor_positions = anchors
7626 .iter()
7627 .map(|anchor| anchor.to_point(&*buffer))
7628 .collect::<Vec<_>>();
7629 assert_eq!(
7630 anchor_positions,
7631 [Point::new(1, 1), Point::new(3, 1), Point::new(4, 0)]
7632 );
7633 });
7634
7635 // Modify the buffer
7636 buffer.update(cx, |buffer, cx| {
7637 buffer.edit([(0..0, " ")], cx);
7638 assert!(buffer.is_dirty());
7639 assert!(!buffer.has_conflict());
7640 });
7641
7642 // Change the file on disk again, adding blank lines to the beginning.
7643 fs.save(
7644 "/dir/the-file".as_ref(),
7645 &"\n\n\nAAAA\naaa\nBB\nbbbbb\n".into(),
7646 )
7647 .await
7648 .unwrap();
7649
7650 // Because the buffer is modified, it doesn't reload from disk, but is
7651 // marked as having a conflict.
7652 buffer
7653 .condition(&cx, |buffer, _| buffer.has_conflict())
7654 .await;
7655 }
7656
7657 #[gpui::test]
7658 async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
7659 cx.foreground().forbid_parking();
7660
7661 let fs = FakeFs::new(cx.background());
7662 fs.insert_tree(
7663 "/the-dir",
7664 json!({
7665 "a.rs": "
7666 fn foo(mut v: Vec<usize>) {
7667 for x in &v {
7668 v.push(1);
7669 }
7670 }
7671 "
7672 .unindent(),
7673 }),
7674 )
7675 .await;
7676
7677 let project = Project::test(fs.clone(), ["/the-dir".as_ref()], cx).await;
7678 let buffer = project
7679 .update(cx, |p, cx| p.open_local_buffer("/the-dir/a.rs", cx))
7680 .await
7681 .unwrap();
7682
7683 let buffer_uri = Url::from_file_path("/the-dir/a.rs").unwrap();
7684 let message = lsp::PublishDiagnosticsParams {
7685 uri: buffer_uri.clone(),
7686 diagnostics: vec![
7687 lsp::Diagnostic {
7688 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
7689 severity: Some(DiagnosticSeverity::WARNING),
7690 message: "error 1".to_string(),
7691 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
7692 location: lsp::Location {
7693 uri: buffer_uri.clone(),
7694 range: lsp::Range::new(
7695 lsp::Position::new(1, 8),
7696 lsp::Position::new(1, 9),
7697 ),
7698 },
7699 message: "error 1 hint 1".to_string(),
7700 }]),
7701 ..Default::default()
7702 },
7703 lsp::Diagnostic {
7704 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
7705 severity: Some(DiagnosticSeverity::HINT),
7706 message: "error 1 hint 1".to_string(),
7707 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
7708 location: lsp::Location {
7709 uri: buffer_uri.clone(),
7710 range: lsp::Range::new(
7711 lsp::Position::new(1, 8),
7712 lsp::Position::new(1, 9),
7713 ),
7714 },
7715 message: "original diagnostic".to_string(),
7716 }]),
7717 ..Default::default()
7718 },
7719 lsp::Diagnostic {
7720 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
7721 severity: Some(DiagnosticSeverity::ERROR),
7722 message: "error 2".to_string(),
7723 related_information: Some(vec![
7724 lsp::DiagnosticRelatedInformation {
7725 location: lsp::Location {
7726 uri: buffer_uri.clone(),
7727 range: lsp::Range::new(
7728 lsp::Position::new(1, 13),
7729 lsp::Position::new(1, 15),
7730 ),
7731 },
7732 message: "error 2 hint 1".to_string(),
7733 },
7734 lsp::DiagnosticRelatedInformation {
7735 location: lsp::Location {
7736 uri: buffer_uri.clone(),
7737 range: lsp::Range::new(
7738 lsp::Position::new(1, 13),
7739 lsp::Position::new(1, 15),
7740 ),
7741 },
7742 message: "error 2 hint 2".to_string(),
7743 },
7744 ]),
7745 ..Default::default()
7746 },
7747 lsp::Diagnostic {
7748 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
7749 severity: Some(DiagnosticSeverity::HINT),
7750 message: "error 2 hint 1".to_string(),
7751 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
7752 location: lsp::Location {
7753 uri: buffer_uri.clone(),
7754 range: lsp::Range::new(
7755 lsp::Position::new(2, 8),
7756 lsp::Position::new(2, 17),
7757 ),
7758 },
7759 message: "original diagnostic".to_string(),
7760 }]),
7761 ..Default::default()
7762 },
7763 lsp::Diagnostic {
7764 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
7765 severity: Some(DiagnosticSeverity::HINT),
7766 message: "error 2 hint 2".to_string(),
7767 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
7768 location: lsp::Location {
7769 uri: buffer_uri.clone(),
7770 range: lsp::Range::new(
7771 lsp::Position::new(2, 8),
7772 lsp::Position::new(2, 17),
7773 ),
7774 },
7775 message: "original diagnostic".to_string(),
7776 }]),
7777 ..Default::default()
7778 },
7779 ],
7780 version: None,
7781 };
7782
7783 project
7784 .update(cx, |p, cx| p.update_diagnostics(message, &[], cx))
7785 .unwrap();
7786 let buffer = buffer.read_with(cx, |buffer, _| buffer.snapshot());
7787
7788 assert_eq!(
7789 buffer
7790 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
7791 .collect::<Vec<_>>(),
7792 &[
7793 DiagnosticEntry {
7794 range: Point::new(1, 8)..Point::new(1, 9),
7795 diagnostic: Diagnostic {
7796 severity: DiagnosticSeverity::WARNING,
7797 message: "error 1".to_string(),
7798 group_id: 0,
7799 is_primary: true,
7800 ..Default::default()
7801 }
7802 },
7803 DiagnosticEntry {
7804 range: Point::new(1, 8)..Point::new(1, 9),
7805 diagnostic: Diagnostic {
7806 severity: DiagnosticSeverity::HINT,
7807 message: "error 1 hint 1".to_string(),
7808 group_id: 0,
7809 is_primary: false,
7810 ..Default::default()
7811 }
7812 },
7813 DiagnosticEntry {
7814 range: Point::new(1, 13)..Point::new(1, 15),
7815 diagnostic: Diagnostic {
7816 severity: DiagnosticSeverity::HINT,
7817 message: "error 2 hint 1".to_string(),
7818 group_id: 1,
7819 is_primary: false,
7820 ..Default::default()
7821 }
7822 },
7823 DiagnosticEntry {
7824 range: Point::new(1, 13)..Point::new(1, 15),
7825 diagnostic: Diagnostic {
7826 severity: DiagnosticSeverity::HINT,
7827 message: "error 2 hint 2".to_string(),
7828 group_id: 1,
7829 is_primary: false,
7830 ..Default::default()
7831 }
7832 },
7833 DiagnosticEntry {
7834 range: Point::new(2, 8)..Point::new(2, 17),
7835 diagnostic: Diagnostic {
7836 severity: DiagnosticSeverity::ERROR,
7837 message: "error 2".to_string(),
7838 group_id: 1,
7839 is_primary: true,
7840 ..Default::default()
7841 }
7842 }
7843 ]
7844 );
7845
7846 assert_eq!(
7847 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
7848 &[
7849 DiagnosticEntry {
7850 range: Point::new(1, 8)..Point::new(1, 9),
7851 diagnostic: Diagnostic {
7852 severity: DiagnosticSeverity::WARNING,
7853 message: "error 1".to_string(),
7854 group_id: 0,
7855 is_primary: true,
7856 ..Default::default()
7857 }
7858 },
7859 DiagnosticEntry {
7860 range: Point::new(1, 8)..Point::new(1, 9),
7861 diagnostic: Diagnostic {
7862 severity: DiagnosticSeverity::HINT,
7863 message: "error 1 hint 1".to_string(),
7864 group_id: 0,
7865 is_primary: false,
7866 ..Default::default()
7867 }
7868 },
7869 ]
7870 );
7871 assert_eq!(
7872 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
7873 &[
7874 DiagnosticEntry {
7875 range: Point::new(1, 13)..Point::new(1, 15),
7876 diagnostic: Diagnostic {
7877 severity: DiagnosticSeverity::HINT,
7878 message: "error 2 hint 1".to_string(),
7879 group_id: 1,
7880 is_primary: false,
7881 ..Default::default()
7882 }
7883 },
7884 DiagnosticEntry {
7885 range: Point::new(1, 13)..Point::new(1, 15),
7886 diagnostic: Diagnostic {
7887 severity: DiagnosticSeverity::HINT,
7888 message: "error 2 hint 2".to_string(),
7889 group_id: 1,
7890 is_primary: false,
7891 ..Default::default()
7892 }
7893 },
7894 DiagnosticEntry {
7895 range: Point::new(2, 8)..Point::new(2, 17),
7896 diagnostic: Diagnostic {
7897 severity: DiagnosticSeverity::ERROR,
7898 message: "error 2".to_string(),
7899 group_id: 1,
7900 is_primary: true,
7901 ..Default::default()
7902 }
7903 }
7904 ]
7905 );
7906 }
7907
7908 #[gpui::test]
7909 async fn test_rename(cx: &mut gpui::TestAppContext) {
7910 cx.foreground().forbid_parking();
7911
7912 let mut language = Language::new(
7913 LanguageConfig {
7914 name: "Rust".into(),
7915 path_suffixes: vec!["rs".to_string()],
7916 ..Default::default()
7917 },
7918 Some(tree_sitter_rust::language()),
7919 );
7920 let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
7921 capabilities: lsp::ServerCapabilities {
7922 rename_provider: Some(lsp::OneOf::Right(lsp::RenameOptions {
7923 prepare_provider: Some(true),
7924 work_done_progress_options: Default::default(),
7925 })),
7926 ..Default::default()
7927 },
7928 ..Default::default()
7929 });
7930
7931 let fs = FakeFs::new(cx.background());
7932 fs.insert_tree(
7933 "/dir",
7934 json!({
7935 "one.rs": "const ONE: usize = 1;",
7936 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
7937 }),
7938 )
7939 .await;
7940
7941 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7942 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
7943 let buffer = project
7944 .update(cx, |project, cx| {
7945 project.open_local_buffer("/dir/one.rs", cx)
7946 })
7947 .await
7948 .unwrap();
7949
7950 let fake_server = fake_servers.next().await.unwrap();
7951
7952 let response = project.update(cx, |project, cx| {
7953 project.prepare_rename(buffer.clone(), 7, cx)
7954 });
7955 fake_server
7956 .handle_request::<lsp::request::PrepareRenameRequest, _, _>(|params, _| async move {
7957 assert_eq!(params.text_document.uri.as_str(), "file:///dir/one.rs");
7958 assert_eq!(params.position, lsp::Position::new(0, 7));
7959 Ok(Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
7960 lsp::Position::new(0, 6),
7961 lsp::Position::new(0, 9),
7962 ))))
7963 })
7964 .next()
7965 .await
7966 .unwrap();
7967 let range = response.await.unwrap().unwrap();
7968 let range = buffer.read_with(cx, |buffer, _| range.to_offset(buffer));
7969 assert_eq!(range, 6..9);
7970
7971 let response = project.update(cx, |project, cx| {
7972 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), true, cx)
7973 });
7974 fake_server
7975 .handle_request::<lsp::request::Rename, _, _>(|params, _| async move {
7976 assert_eq!(
7977 params.text_document_position.text_document.uri.as_str(),
7978 "file:///dir/one.rs"
7979 );
7980 assert_eq!(
7981 params.text_document_position.position,
7982 lsp::Position::new(0, 7)
7983 );
7984 assert_eq!(params.new_name, "THREE");
7985 Ok(Some(lsp::WorkspaceEdit {
7986 changes: Some(
7987 [
7988 (
7989 lsp::Url::from_file_path("/dir/one.rs").unwrap(),
7990 vec![lsp::TextEdit::new(
7991 lsp::Range::new(
7992 lsp::Position::new(0, 6),
7993 lsp::Position::new(0, 9),
7994 ),
7995 "THREE".to_string(),
7996 )],
7997 ),
7998 (
7999 lsp::Url::from_file_path("/dir/two.rs").unwrap(),
8000 vec![
8001 lsp::TextEdit::new(
8002 lsp::Range::new(
8003 lsp::Position::new(0, 24),
8004 lsp::Position::new(0, 27),
8005 ),
8006 "THREE".to_string(),
8007 ),
8008 lsp::TextEdit::new(
8009 lsp::Range::new(
8010 lsp::Position::new(0, 35),
8011 lsp::Position::new(0, 38),
8012 ),
8013 "THREE".to_string(),
8014 ),
8015 ],
8016 ),
8017 ]
8018 .into_iter()
8019 .collect(),
8020 ),
8021 ..Default::default()
8022 }))
8023 })
8024 .next()
8025 .await
8026 .unwrap();
8027 let mut transaction = response.await.unwrap().0;
8028 assert_eq!(transaction.len(), 2);
8029 assert_eq!(
8030 transaction
8031 .remove_entry(&buffer)
8032 .unwrap()
8033 .0
8034 .read_with(cx, |buffer, _| buffer.text()),
8035 "const THREE: usize = 1;"
8036 );
8037 assert_eq!(
8038 transaction
8039 .into_keys()
8040 .next()
8041 .unwrap()
8042 .read_with(cx, |buffer, _| buffer.text()),
8043 "const TWO: usize = one::THREE + one::THREE;"
8044 );
8045 }
8046
8047 #[gpui::test]
8048 async fn test_search(cx: &mut gpui::TestAppContext) {
8049 let fs = FakeFs::new(cx.background());
8050 fs.insert_tree(
8051 "/dir",
8052 json!({
8053 "one.rs": "const ONE: usize = 1;",
8054 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
8055 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
8056 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
8057 }),
8058 )
8059 .await;
8060 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
8061 assert_eq!(
8062 search(&project, SearchQuery::text("TWO", false, true), cx)
8063 .await
8064 .unwrap(),
8065 HashMap::from_iter([
8066 ("two.rs".to_string(), vec![6..9]),
8067 ("three.rs".to_string(), vec![37..40])
8068 ])
8069 );
8070
8071 let buffer_4 = project
8072 .update(cx, |project, cx| {
8073 project.open_local_buffer("/dir/four.rs", cx)
8074 })
8075 .await
8076 .unwrap();
8077 buffer_4.update(cx, |buffer, cx| {
8078 let text = "two::TWO";
8079 buffer.edit([(20..28, text), (31..43, text)], cx);
8080 });
8081
8082 assert_eq!(
8083 search(&project, SearchQuery::text("TWO", false, true), cx)
8084 .await
8085 .unwrap(),
8086 HashMap::from_iter([
8087 ("two.rs".to_string(), vec![6..9]),
8088 ("three.rs".to_string(), vec![37..40]),
8089 ("four.rs".to_string(), vec![25..28, 36..39])
8090 ])
8091 );
8092
8093 async fn search(
8094 project: &ModelHandle<Project>,
8095 query: SearchQuery,
8096 cx: &mut gpui::TestAppContext,
8097 ) -> Result<HashMap<String, Vec<Range<usize>>>> {
8098 let results = project
8099 .update(cx, |project, cx| project.search(query, cx))
8100 .await?;
8101
8102 Ok(results
8103 .into_iter()
8104 .map(|(buffer, ranges)| {
8105 buffer.read_with(cx, |buffer, _| {
8106 let path = buffer.file().unwrap().path().to_string_lossy().to_string();
8107 let ranges = ranges
8108 .into_iter()
8109 .map(|range| range.to_offset(buffer))
8110 .collect::<Vec<_>>();
8111 (path, ranges)
8112 })
8113 })
8114 .collect())
8115 }
8116 }
8117}