1pub mod fs;
2mod ignore;
3mod lsp_command;
4pub mod search;
5pub mod worktree;
6
7use anyhow::{anyhow, Context, Result};
8use client::{proto, Client, PeerId, TypedEnvelope, User, UserStore};
9use clock::ReplicaId;
10use collections::{hash_map, BTreeMap, HashMap, HashSet};
11use futures::{future::Shared, Future, FutureExt, StreamExt, TryFutureExt};
12use fuzzy::{PathMatch, PathMatchCandidate, PathMatchCandidateSet};
13use gpui::{
14 AnyModelHandle, AppContext, AsyncAppContext, Entity, ModelContext, ModelHandle,
15 MutableAppContext, Task, UpgradeModelHandle, WeakModelHandle,
16};
17use language::{
18 point_to_lsp,
19 proto::{deserialize_anchor, deserialize_version, serialize_anchor, serialize_version},
20 range_from_lsp, range_to_lsp, Anchor, Bias, Buffer, CodeAction, CodeLabel, Completion,
21 Diagnostic, DiagnosticEntry, DiagnosticSet, Event as BufferEvent, File as _, Language,
22 LanguageRegistry, LanguageServerName, LocalFile, LspAdapter, OffsetRangeExt, Operation, Patch,
23 PointUtf16, TextBufferSnapshot, ToOffset, ToPointUtf16, Transaction,
24};
25use lsp::{DiagnosticSeverity, DiagnosticTag, DocumentHighlightKind, LanguageServer};
26use lsp_command::*;
27use parking_lot::Mutex;
28use postage::stream::Stream;
29use postage::watch;
30use rand::prelude::*;
31use search::SearchQuery;
32use serde::Serialize;
33use settings::Settings;
34use sha2::{Digest, Sha256};
35use similar::{ChangeTag, TextDiff};
36use std::{
37 cell::RefCell,
38 cmp::{self, Ordering},
39 convert::TryInto,
40 ffi::OsString,
41 hash::Hash,
42 mem,
43 ops::Range,
44 os::unix::{ffi::OsStrExt, prelude::OsStringExt},
45 path::{Component, Path, PathBuf},
46 rc::Rc,
47 sync::{
48 atomic::{AtomicBool, AtomicUsize, Ordering::SeqCst},
49 Arc,
50 },
51 time::Instant,
52};
53use thiserror::Error;
54use util::{post_inc, ResultExt, TryFutureExt as _};
55
56pub use fs::*;
57pub use worktree::*;
58
59pub trait Item: Entity {
60 fn entry_id(&self, cx: &AppContext) -> Option<ProjectEntryId>;
61}
62
63#[derive(Default)]
64pub struct ProjectStore {
65 projects: Vec<WeakModelHandle<Project>>,
66}
67
68pub struct Project {
69 worktrees: Vec<WorktreeHandle>,
70 active_entry: Option<ProjectEntryId>,
71 languages: Arc<LanguageRegistry>,
72 language_servers:
73 HashMap<(WorktreeId, LanguageServerName), (Arc<dyn LspAdapter>, Arc<LanguageServer>)>,
74 started_language_servers:
75 HashMap<(WorktreeId, LanguageServerName), Task<Option<Arc<LanguageServer>>>>,
76 language_server_statuses: BTreeMap<usize, LanguageServerStatus>,
77 language_server_settings: Arc<Mutex<serde_json::Value>>,
78 last_workspace_edits_by_language_server: HashMap<usize, ProjectTransaction>,
79 next_language_server_id: usize,
80 client: Arc<client::Client>,
81 next_entry_id: Arc<AtomicUsize>,
82 next_diagnostic_group_id: usize,
83 user_store: ModelHandle<UserStore>,
84 project_store: ModelHandle<ProjectStore>,
85 fs: Arc<dyn Fs>,
86 client_state: ProjectClientState,
87 collaborators: HashMap<PeerId, Collaborator>,
88 subscriptions: Vec<client::Subscription>,
89 opened_buffer: (Rc<RefCell<watch::Sender<()>>>, watch::Receiver<()>),
90 shared_buffers: HashMap<PeerId, HashSet<u64>>,
91 loading_buffers: HashMap<
92 ProjectPath,
93 postage::watch::Receiver<Option<Result<ModelHandle<Buffer>, Arc<anyhow::Error>>>>,
94 >,
95 loading_local_worktrees:
96 HashMap<Arc<Path>, Shared<Task<Result<ModelHandle<Worktree>, Arc<anyhow::Error>>>>>,
97 opened_buffers: HashMap<u64, OpenBuffer>,
98 buffer_snapshots: HashMap<u64, Vec<(i32, TextBufferSnapshot)>>,
99 nonce: u128,
100}
101
102#[derive(Error, Debug)]
103pub enum JoinProjectError {
104 #[error("host declined join request")]
105 HostDeclined,
106 #[error("host closed the project")]
107 HostClosedProject,
108 #[error("host went offline")]
109 HostWentOffline,
110 #[error("{0}")]
111 Other(#[from] anyhow::Error),
112}
113
114enum OpenBuffer {
115 Strong(ModelHandle<Buffer>),
116 Weak(WeakModelHandle<Buffer>),
117 Loading(Vec<Operation>),
118}
119
120enum WorktreeHandle {
121 Strong(ModelHandle<Worktree>),
122 Weak(WeakModelHandle<Worktree>),
123}
124
125enum ProjectClientState {
126 Local {
127 is_shared: bool,
128 remote_id_tx: watch::Sender<Option<u64>>,
129 remote_id_rx: watch::Receiver<Option<u64>>,
130 public_tx: watch::Sender<bool>,
131 public_rx: watch::Receiver<bool>,
132 _maintain_remote_id_task: Task<Option<()>>,
133 },
134 Remote {
135 sharing_has_stopped: bool,
136 remote_id: u64,
137 replica_id: ReplicaId,
138 _detect_unshare_task: Task<Option<()>>,
139 },
140}
141
142#[derive(Clone, Debug)]
143pub struct Collaborator {
144 pub user: Arc<User>,
145 pub peer_id: PeerId,
146 pub replica_id: ReplicaId,
147}
148
149#[derive(Clone, Debug, PartialEq, Eq)]
150pub enum Event {
151 ActiveEntryChanged(Option<ProjectEntryId>),
152 WorktreeAdded,
153 WorktreeRemoved(WorktreeId),
154 DiskBasedDiagnosticsStarted,
155 DiskBasedDiagnosticsUpdated,
156 DiskBasedDiagnosticsFinished,
157 DiagnosticsUpdated(ProjectPath),
158 RemoteIdChanged(Option<u64>),
159 CollaboratorLeft(PeerId),
160 ContactRequestedJoin(Arc<User>),
161 ContactCancelledJoinRequest(Arc<User>),
162}
163
164#[derive(Serialize)]
165pub struct LanguageServerStatus {
166 pub name: String,
167 pub pending_work: BTreeMap<String, LanguageServerProgress>,
168 pub pending_diagnostic_updates: isize,
169}
170
171#[derive(Clone, Debug, Serialize)]
172pub struct LanguageServerProgress {
173 pub message: Option<String>,
174 pub percentage: Option<usize>,
175 #[serde(skip_serializing)]
176 pub last_update_at: Instant,
177}
178
179#[derive(Clone, Debug, Eq, PartialEq, Hash, PartialOrd, Ord)]
180pub struct ProjectPath {
181 pub worktree_id: WorktreeId,
182 pub path: Arc<Path>,
183}
184
185#[derive(Copy, Clone, Debug, Default, PartialEq, Serialize)]
186pub struct DiagnosticSummary {
187 pub error_count: usize,
188 pub warning_count: usize,
189}
190
191#[derive(Debug)]
192pub struct Location {
193 pub buffer: ModelHandle<Buffer>,
194 pub range: Range<language::Anchor>,
195}
196
197#[derive(Debug)]
198pub struct DocumentHighlight {
199 pub range: Range<language::Anchor>,
200 pub kind: DocumentHighlightKind,
201}
202
203#[derive(Clone, Debug)]
204pub struct Symbol {
205 pub source_worktree_id: WorktreeId,
206 pub worktree_id: WorktreeId,
207 pub language_server_name: LanguageServerName,
208 pub path: PathBuf,
209 pub label: CodeLabel,
210 pub name: String,
211 pub kind: lsp::SymbolKind,
212 pub range: Range<PointUtf16>,
213 pub signature: [u8; 32],
214}
215
216#[derive(Default)]
217pub struct ProjectTransaction(pub HashMap<ModelHandle<Buffer>, language::Transaction>);
218
219impl DiagnosticSummary {
220 fn new<'a, T: 'a>(diagnostics: impl IntoIterator<Item = &'a DiagnosticEntry<T>>) -> Self {
221 let mut this = Self {
222 error_count: 0,
223 warning_count: 0,
224 };
225
226 for entry in diagnostics {
227 if entry.diagnostic.is_primary {
228 match entry.diagnostic.severity {
229 DiagnosticSeverity::ERROR => this.error_count += 1,
230 DiagnosticSeverity::WARNING => this.warning_count += 1,
231 _ => {}
232 }
233 }
234 }
235
236 this
237 }
238
239 pub fn is_empty(&self) -> bool {
240 self.error_count == 0 && self.warning_count == 0
241 }
242
243 pub fn to_proto(&self, path: &Path) -> proto::DiagnosticSummary {
244 proto::DiagnosticSummary {
245 path: path.to_string_lossy().to_string(),
246 error_count: self.error_count as u32,
247 warning_count: self.warning_count as u32,
248 }
249 }
250}
251
252#[derive(Clone, Copy, Debug, Default, Hash, PartialEq, Eq, PartialOrd, Ord)]
253pub struct ProjectEntryId(usize);
254
255impl ProjectEntryId {
256 pub const MAX: Self = Self(usize::MAX);
257
258 pub fn new(counter: &AtomicUsize) -> Self {
259 Self(counter.fetch_add(1, SeqCst))
260 }
261
262 pub fn from_proto(id: u64) -> Self {
263 Self(id as usize)
264 }
265
266 pub fn to_proto(&self) -> u64 {
267 self.0 as u64
268 }
269
270 pub fn to_usize(&self) -> usize {
271 self.0
272 }
273}
274
275impl Project {
276 pub fn init(client: &Arc<Client>) {
277 client.add_model_message_handler(Self::handle_request_join_project);
278 client.add_model_message_handler(Self::handle_add_collaborator);
279 client.add_model_message_handler(Self::handle_buffer_reloaded);
280 client.add_model_message_handler(Self::handle_buffer_saved);
281 client.add_model_message_handler(Self::handle_start_language_server);
282 client.add_model_message_handler(Self::handle_update_language_server);
283 client.add_model_message_handler(Self::handle_remove_collaborator);
284 client.add_model_message_handler(Self::handle_join_project_request_cancelled);
285 client.add_model_message_handler(Self::handle_update_project);
286 client.add_model_message_handler(Self::handle_unregister_project);
287 client.add_model_message_handler(Self::handle_project_unshared);
288 client.add_model_message_handler(Self::handle_update_buffer_file);
289 client.add_model_message_handler(Self::handle_update_buffer);
290 client.add_model_message_handler(Self::handle_update_diagnostic_summary);
291 client.add_model_message_handler(Self::handle_update_worktree);
292 client.add_model_request_handler(Self::handle_create_project_entry);
293 client.add_model_request_handler(Self::handle_rename_project_entry);
294 client.add_model_request_handler(Self::handle_copy_project_entry);
295 client.add_model_request_handler(Self::handle_delete_project_entry);
296 client.add_model_request_handler(Self::handle_apply_additional_edits_for_completion);
297 client.add_model_request_handler(Self::handle_apply_code_action);
298 client.add_model_request_handler(Self::handle_reload_buffers);
299 client.add_model_request_handler(Self::handle_format_buffers);
300 client.add_model_request_handler(Self::handle_get_code_actions);
301 client.add_model_request_handler(Self::handle_get_completions);
302 client.add_model_request_handler(Self::handle_lsp_command::<GetDefinition>);
303 client.add_model_request_handler(Self::handle_lsp_command::<GetDocumentHighlights>);
304 client.add_model_request_handler(Self::handle_lsp_command::<GetReferences>);
305 client.add_model_request_handler(Self::handle_lsp_command::<PrepareRename>);
306 client.add_model_request_handler(Self::handle_lsp_command::<PerformRename>);
307 client.add_model_request_handler(Self::handle_search_project);
308 client.add_model_request_handler(Self::handle_get_project_symbols);
309 client.add_model_request_handler(Self::handle_open_buffer_for_symbol);
310 client.add_model_request_handler(Self::handle_open_buffer_by_id);
311 client.add_model_request_handler(Self::handle_open_buffer_by_path);
312 client.add_model_request_handler(Self::handle_save_buffer);
313 }
314
315 pub fn local(
316 public: bool,
317 client: Arc<Client>,
318 user_store: ModelHandle<UserStore>,
319 project_store: ModelHandle<ProjectStore>,
320 languages: Arc<LanguageRegistry>,
321 fs: Arc<dyn Fs>,
322 cx: &mut MutableAppContext,
323 ) -> ModelHandle<Self> {
324 cx.add_model(|cx: &mut ModelContext<Self>| {
325 let (public_tx, public_rx) = watch::channel_with(public);
326 let (remote_id_tx, remote_id_rx) = watch::channel();
327 let _maintain_remote_id_task = cx.spawn_weak({
328 let status_rx = client.clone().status();
329 let public_rx = public_rx.clone();
330 move |this, mut cx| async move {
331 let mut stream = Stream::map(status_rx.clone(), drop)
332 .merge(Stream::map(public_rx.clone(), drop));
333 while stream.recv().await.is_some() {
334 let this = this.upgrade(&cx)?;
335 if status_rx.borrow().is_connected() && *public_rx.borrow() {
336 this.update(&mut cx, |this, cx| this.register(cx))
337 .await
338 .log_err()?;
339 } else {
340 this.update(&mut cx, |this, cx| this.unregister(cx))
341 .await
342 .log_err();
343 }
344 }
345 None
346 }
347 });
348
349 let handle = cx.weak_handle();
350 project_store.update(cx, |store, cx| store.add_project(handle, cx));
351
352 let (opened_buffer_tx, opened_buffer_rx) = watch::channel();
353 Self {
354 worktrees: Default::default(),
355 collaborators: Default::default(),
356 opened_buffers: Default::default(),
357 shared_buffers: Default::default(),
358 loading_buffers: Default::default(),
359 loading_local_worktrees: Default::default(),
360 buffer_snapshots: Default::default(),
361 client_state: ProjectClientState::Local {
362 is_shared: false,
363 remote_id_tx,
364 remote_id_rx,
365 public_tx,
366 public_rx,
367 _maintain_remote_id_task,
368 },
369 opened_buffer: (Rc::new(RefCell::new(opened_buffer_tx)), opened_buffer_rx),
370 subscriptions: Vec::new(),
371 active_entry: None,
372 languages,
373 client,
374 user_store,
375 project_store,
376 fs,
377 next_entry_id: Default::default(),
378 next_diagnostic_group_id: Default::default(),
379 language_servers: Default::default(),
380 started_language_servers: Default::default(),
381 language_server_statuses: Default::default(),
382 last_workspace_edits_by_language_server: Default::default(),
383 language_server_settings: Default::default(),
384 next_language_server_id: 0,
385 nonce: StdRng::from_entropy().gen(),
386 }
387 })
388 }
389
390 pub async fn remote(
391 remote_id: u64,
392 client: Arc<Client>,
393 user_store: ModelHandle<UserStore>,
394 project_store: ModelHandle<ProjectStore>,
395 languages: Arc<LanguageRegistry>,
396 fs: Arc<dyn Fs>,
397 mut cx: AsyncAppContext,
398 ) -> Result<ModelHandle<Self>, JoinProjectError> {
399 client.authenticate_and_connect(true, &cx).await?;
400
401 let response = client
402 .request(proto::JoinProject {
403 project_id: remote_id,
404 })
405 .await?;
406
407 let response = match response.variant.ok_or_else(|| anyhow!("missing variant"))? {
408 proto::join_project_response::Variant::Accept(response) => response,
409 proto::join_project_response::Variant::Decline(decline) => {
410 match proto::join_project_response::decline::Reason::from_i32(decline.reason) {
411 Some(proto::join_project_response::decline::Reason::Declined) => {
412 Err(JoinProjectError::HostDeclined)?
413 }
414 Some(proto::join_project_response::decline::Reason::Closed) => {
415 Err(JoinProjectError::HostClosedProject)?
416 }
417 Some(proto::join_project_response::decline::Reason::WentOffline) => {
418 Err(JoinProjectError::HostWentOffline)?
419 }
420 None => Err(anyhow!("missing decline reason"))?,
421 }
422 }
423 };
424
425 let replica_id = response.replica_id as ReplicaId;
426
427 let mut worktrees = Vec::new();
428 for worktree in response.worktrees {
429 let (worktree, load_task) = cx
430 .update(|cx| Worktree::remote(remote_id, replica_id, worktree, client.clone(), cx));
431 worktrees.push(worktree);
432 load_task.detach();
433 }
434
435 let (opened_buffer_tx, opened_buffer_rx) = watch::channel();
436 let this = cx.add_model(|cx: &mut ModelContext<Self>| {
437 let handle = cx.weak_handle();
438 project_store.update(cx, |store, cx| store.add_project(handle, cx));
439
440 let mut this = Self {
441 worktrees: Vec::new(),
442 loading_buffers: Default::default(),
443 opened_buffer: (Rc::new(RefCell::new(opened_buffer_tx)), opened_buffer_rx),
444 shared_buffers: Default::default(),
445 loading_local_worktrees: Default::default(),
446 active_entry: None,
447 collaborators: Default::default(),
448 languages,
449 user_store: user_store.clone(),
450 project_store,
451 fs,
452 next_entry_id: Default::default(),
453 next_diagnostic_group_id: Default::default(),
454 subscriptions: vec![client.add_model_for_remote_entity(remote_id, cx)],
455 client: client.clone(),
456 client_state: ProjectClientState::Remote {
457 sharing_has_stopped: false,
458 remote_id,
459 replica_id,
460 _detect_unshare_task: cx.spawn_weak(move |this, mut cx| {
461 async move {
462 let mut status = client.status();
463 let is_connected =
464 status.next().await.map_or(false, |s| s.is_connected());
465 // Even if we're initially connected, any future change of the status means we momentarily disconnected.
466 if !is_connected || status.next().await.is_some() {
467 if let Some(this) = this.upgrade(&cx) {
468 this.update(&mut cx, |this, cx| this.removed_from_project(cx))
469 }
470 }
471 Ok(())
472 }
473 .log_err()
474 }),
475 },
476 language_servers: Default::default(),
477 started_language_servers: Default::default(),
478 language_server_settings: Default::default(),
479 language_server_statuses: response
480 .language_servers
481 .into_iter()
482 .map(|server| {
483 (
484 server.id as usize,
485 LanguageServerStatus {
486 name: server.name,
487 pending_work: Default::default(),
488 pending_diagnostic_updates: 0,
489 },
490 )
491 })
492 .collect(),
493 last_workspace_edits_by_language_server: Default::default(),
494 next_language_server_id: 0,
495 opened_buffers: Default::default(),
496 buffer_snapshots: Default::default(),
497 nonce: StdRng::from_entropy().gen(),
498 };
499 for worktree in worktrees {
500 this.add_worktree(&worktree, cx);
501 }
502 this
503 });
504
505 let user_ids = response
506 .collaborators
507 .iter()
508 .map(|peer| peer.user_id)
509 .collect();
510 user_store
511 .update(&mut cx, |user_store, cx| user_store.get_users(user_ids, cx))
512 .await?;
513 let mut collaborators = HashMap::default();
514 for message in response.collaborators {
515 let collaborator = Collaborator::from_proto(message, &user_store, &mut cx).await?;
516 collaborators.insert(collaborator.peer_id, collaborator);
517 }
518
519 this.update(&mut cx, |this, _| {
520 this.collaborators = collaborators;
521 });
522
523 Ok(this)
524 }
525
526 #[cfg(any(test, feature = "test-support"))]
527 pub async fn test(
528 fs: Arc<dyn Fs>,
529 root_paths: impl IntoIterator<Item = &Path>,
530 cx: &mut gpui::TestAppContext,
531 ) -> ModelHandle<Project> {
532 let languages = Arc::new(LanguageRegistry::test());
533 let http_client = client::test::FakeHttpClient::with_404_response();
534 let client = client::Client::new(http_client.clone());
535 let user_store = cx.add_model(|cx| UserStore::new(client.clone(), http_client, cx));
536 let project_store = cx.add_model(|_| ProjectStore::default());
537 let project = cx.update(|cx| {
538 Project::local(true, client, user_store, project_store, languages, fs, cx)
539 });
540 for path in root_paths {
541 let (tree, _) = project
542 .update(cx, |project, cx| {
543 project.find_or_create_local_worktree(path, true, cx)
544 })
545 .await
546 .unwrap();
547 tree.read_with(cx, |tree, _| tree.as_local().unwrap().scan_complete())
548 .await;
549 }
550 project
551 }
552
553 pub fn buffer_for_id(&self, remote_id: u64, cx: &AppContext) -> Option<ModelHandle<Buffer>> {
554 self.opened_buffers
555 .get(&remote_id)
556 .and_then(|buffer| buffer.upgrade(cx))
557 }
558
559 pub fn languages(&self) -> &Arc<LanguageRegistry> {
560 &self.languages
561 }
562
563 pub fn client(&self) -> Arc<Client> {
564 self.client.clone()
565 }
566
567 pub fn user_store(&self) -> ModelHandle<UserStore> {
568 self.user_store.clone()
569 }
570
571 #[cfg(any(test, feature = "test-support"))]
572 pub fn check_invariants(&self, cx: &AppContext) {
573 if self.is_local() {
574 let mut worktree_root_paths = HashMap::default();
575 for worktree in self.worktrees(cx) {
576 let worktree = worktree.read(cx);
577 let abs_path = worktree.as_local().unwrap().abs_path().clone();
578 let prev_worktree_id = worktree_root_paths.insert(abs_path.clone(), worktree.id());
579 assert_eq!(
580 prev_worktree_id,
581 None,
582 "abs path {:?} for worktree {:?} is not unique ({:?} was already registered with the same path)",
583 abs_path,
584 worktree.id(),
585 prev_worktree_id
586 )
587 }
588 } else {
589 let replica_id = self.replica_id();
590 for buffer in self.opened_buffers.values() {
591 if let Some(buffer) = buffer.upgrade(cx) {
592 let buffer = buffer.read(cx);
593 assert_eq!(
594 buffer.deferred_ops_len(),
595 0,
596 "replica {}, buffer {} has deferred operations",
597 replica_id,
598 buffer.remote_id()
599 );
600 }
601 }
602 }
603 }
604
605 #[cfg(any(test, feature = "test-support"))]
606 pub fn has_open_buffer(&self, path: impl Into<ProjectPath>, cx: &AppContext) -> bool {
607 let path = path.into();
608 if let Some(worktree) = self.worktree_for_id(path.worktree_id, cx) {
609 self.opened_buffers.iter().any(|(_, buffer)| {
610 if let Some(buffer) = buffer.upgrade(cx) {
611 if let Some(file) = File::from_dyn(buffer.read(cx).file()) {
612 if file.worktree == worktree && file.path() == &path.path {
613 return true;
614 }
615 }
616 }
617 false
618 })
619 } else {
620 false
621 }
622 }
623
624 pub fn fs(&self) -> &Arc<dyn Fs> {
625 &self.fs
626 }
627
628 pub fn set_public(&mut self, is_public: bool, cx: &mut ModelContext<Self>) {
629 if let ProjectClientState::Local { public_tx, .. } = &mut self.client_state {
630 *public_tx.borrow_mut() = is_public;
631 self.metadata_changed(cx);
632 }
633 }
634
635 pub fn is_public(&self) -> bool {
636 match &self.client_state {
637 ProjectClientState::Local { public_rx, .. } => *public_rx.borrow(),
638 ProjectClientState::Remote { .. } => true,
639 }
640 }
641
642 fn unregister(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
643 self.unshared(cx);
644 if let ProjectClientState::Local { remote_id_rx, .. } = &mut self.client_state {
645 if let Some(remote_id) = *remote_id_rx.borrow() {
646 let request = self.client.request(proto::UnregisterProject {
647 project_id: remote_id,
648 });
649 return cx.spawn(|this, mut cx| async move {
650 let response = request.await;
651 this.update(&mut cx, |this, cx| {
652 if let ProjectClientState::Local { remote_id_tx, .. } =
653 &mut this.client_state
654 {
655 *remote_id_tx.borrow_mut() = None;
656 }
657 this.subscriptions.clear();
658 this.metadata_changed(cx);
659 });
660 response.map(drop)
661 });
662 }
663 }
664 Task::ready(Ok(()))
665 }
666
667 fn register(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
668 if let ProjectClientState::Local { remote_id_rx, .. } = &self.client_state {
669 if remote_id_rx.borrow().is_some() {
670 return Task::ready(Ok(()));
671 }
672 }
673
674 let response = self.client.request(proto::RegisterProject {});
675 cx.spawn(|this, mut cx| async move {
676 let remote_id = response.await?.project_id;
677 this.update(&mut cx, |this, cx| {
678 if let ProjectClientState::Local { remote_id_tx, .. } = &mut this.client_state {
679 *remote_id_tx.borrow_mut() = Some(remote_id);
680 }
681
682 this.metadata_changed(cx);
683 cx.emit(Event::RemoteIdChanged(Some(remote_id)));
684 this.subscriptions
685 .push(this.client.add_model_for_remote_entity(remote_id, cx));
686 Ok(())
687 })
688 })
689 }
690
691 pub fn remote_id(&self) -> Option<u64> {
692 match &self.client_state {
693 ProjectClientState::Local { remote_id_rx, .. } => *remote_id_rx.borrow(),
694 ProjectClientState::Remote { remote_id, .. } => Some(*remote_id),
695 }
696 }
697
698 pub fn next_remote_id(&self) -> impl Future<Output = u64> {
699 let mut id = None;
700 let mut watch = None;
701 match &self.client_state {
702 ProjectClientState::Local { remote_id_rx, .. } => watch = Some(remote_id_rx.clone()),
703 ProjectClientState::Remote { remote_id, .. } => id = Some(*remote_id),
704 }
705
706 async move {
707 if let Some(id) = id {
708 return id;
709 }
710 let mut watch = watch.unwrap();
711 loop {
712 let id = *watch.borrow();
713 if let Some(id) = id {
714 return id;
715 }
716 watch.next().await;
717 }
718 }
719 }
720
721 pub fn shared_remote_id(&self) -> Option<u64> {
722 match &self.client_state {
723 ProjectClientState::Local {
724 remote_id_rx,
725 is_shared,
726 ..
727 } => {
728 if *is_shared {
729 *remote_id_rx.borrow()
730 } else {
731 None
732 }
733 }
734 ProjectClientState::Remote { remote_id, .. } => Some(*remote_id),
735 }
736 }
737
738 pub fn replica_id(&self) -> ReplicaId {
739 match &self.client_state {
740 ProjectClientState::Local { .. } => 0,
741 ProjectClientState::Remote { replica_id, .. } => *replica_id,
742 }
743 }
744
745 fn metadata_changed(&mut self, cx: &mut ModelContext<Self>) {
746 cx.notify();
747 self.project_store.update(cx, |_, cx| cx.notify());
748
749 if let ProjectClientState::Local {
750 remote_id_rx,
751 public_rx,
752 ..
753 } = &self.client_state
754 {
755 if let (Some(project_id), true) = (*remote_id_rx.borrow(), *public_rx.borrow()) {
756 self.client
757 .send(proto::UpdateProject {
758 project_id,
759 worktrees: self
760 .worktrees
761 .iter()
762 .filter_map(|worktree| {
763 worktree.upgrade(&cx).map(|worktree| {
764 worktree.read(cx).as_local().unwrap().metadata_proto()
765 })
766 })
767 .collect(),
768 })
769 .log_err();
770 }
771 }
772 }
773
774 pub fn collaborators(&self) -> &HashMap<PeerId, Collaborator> {
775 &self.collaborators
776 }
777
778 pub fn worktrees<'a>(
779 &'a self,
780 cx: &'a AppContext,
781 ) -> impl 'a + Iterator<Item = ModelHandle<Worktree>> {
782 self.worktrees
783 .iter()
784 .filter_map(move |worktree| worktree.upgrade(cx))
785 }
786
787 pub fn visible_worktrees<'a>(
788 &'a self,
789 cx: &'a AppContext,
790 ) -> impl 'a + Iterator<Item = ModelHandle<Worktree>> {
791 self.worktrees.iter().filter_map(|worktree| {
792 worktree.upgrade(cx).and_then(|worktree| {
793 if worktree.read(cx).is_visible() {
794 Some(worktree)
795 } else {
796 None
797 }
798 })
799 })
800 }
801
802 pub fn worktree_root_names<'a>(&'a self, cx: &'a AppContext) -> impl Iterator<Item = &'a str> {
803 self.visible_worktrees(cx)
804 .map(|tree| tree.read(cx).root_name())
805 }
806
807 pub fn worktree_for_id(
808 &self,
809 id: WorktreeId,
810 cx: &AppContext,
811 ) -> Option<ModelHandle<Worktree>> {
812 self.worktrees(cx)
813 .find(|worktree| worktree.read(cx).id() == id)
814 }
815
816 pub fn worktree_for_entry(
817 &self,
818 entry_id: ProjectEntryId,
819 cx: &AppContext,
820 ) -> Option<ModelHandle<Worktree>> {
821 self.worktrees(cx)
822 .find(|worktree| worktree.read(cx).contains_entry(entry_id))
823 }
824
825 pub fn worktree_id_for_entry(
826 &self,
827 entry_id: ProjectEntryId,
828 cx: &AppContext,
829 ) -> Option<WorktreeId> {
830 self.worktree_for_entry(entry_id, cx)
831 .map(|worktree| worktree.read(cx).id())
832 }
833
834 pub fn contains_paths(&self, paths: &[PathBuf], cx: &AppContext) -> bool {
835 paths.iter().all(|path| self.contains_path(&path, cx))
836 }
837
838 pub fn contains_path(&self, path: &Path, cx: &AppContext) -> bool {
839 for worktree in self.worktrees(cx) {
840 let worktree = worktree.read(cx).as_local();
841 if worktree.map_or(false, |w| w.contains_abs_path(path)) {
842 return true;
843 }
844 }
845 false
846 }
847
848 pub fn create_entry(
849 &mut self,
850 project_path: impl Into<ProjectPath>,
851 is_directory: bool,
852 cx: &mut ModelContext<Self>,
853 ) -> Option<Task<Result<Entry>>> {
854 let project_path = project_path.into();
855 let worktree = self.worktree_for_id(project_path.worktree_id, cx)?;
856 if self.is_local() {
857 Some(worktree.update(cx, |worktree, cx| {
858 worktree
859 .as_local_mut()
860 .unwrap()
861 .create_entry(project_path.path, is_directory, cx)
862 }))
863 } else {
864 let client = self.client.clone();
865 let project_id = self.remote_id().unwrap();
866 Some(cx.spawn_weak(|_, mut cx| async move {
867 let response = client
868 .request(proto::CreateProjectEntry {
869 worktree_id: project_path.worktree_id.to_proto(),
870 project_id,
871 path: project_path.path.as_os_str().as_bytes().to_vec(),
872 is_directory,
873 })
874 .await?;
875 let entry = response
876 .entry
877 .ok_or_else(|| anyhow!("missing entry in response"))?;
878 worktree
879 .update(&mut cx, |worktree, cx| {
880 worktree.as_remote().unwrap().insert_entry(
881 entry,
882 response.worktree_scan_id as usize,
883 cx,
884 )
885 })
886 .await
887 }))
888 }
889 }
890
891 pub fn copy_entry(
892 &mut self,
893 entry_id: ProjectEntryId,
894 new_path: impl Into<Arc<Path>>,
895 cx: &mut ModelContext<Self>,
896 ) -> Option<Task<Result<Entry>>> {
897 let worktree = self.worktree_for_entry(entry_id, cx)?;
898 let new_path = new_path.into();
899 if self.is_local() {
900 worktree.update(cx, |worktree, cx| {
901 worktree
902 .as_local_mut()
903 .unwrap()
904 .copy_entry(entry_id, new_path, cx)
905 })
906 } else {
907 let client = self.client.clone();
908 let project_id = self.remote_id().unwrap();
909
910 Some(cx.spawn_weak(|_, mut cx| async move {
911 let response = client
912 .request(proto::CopyProjectEntry {
913 project_id,
914 entry_id: entry_id.to_proto(),
915 new_path: new_path.as_os_str().as_bytes().to_vec(),
916 })
917 .await?;
918 let entry = response
919 .entry
920 .ok_or_else(|| anyhow!("missing entry in response"))?;
921 worktree
922 .update(&mut cx, |worktree, cx| {
923 worktree.as_remote().unwrap().insert_entry(
924 entry,
925 response.worktree_scan_id as usize,
926 cx,
927 )
928 })
929 .await
930 }))
931 }
932 }
933
934 pub fn rename_entry(
935 &mut self,
936 entry_id: ProjectEntryId,
937 new_path: impl Into<Arc<Path>>,
938 cx: &mut ModelContext<Self>,
939 ) -> Option<Task<Result<Entry>>> {
940 let worktree = self.worktree_for_entry(entry_id, cx)?;
941 let new_path = new_path.into();
942 if self.is_local() {
943 worktree.update(cx, |worktree, cx| {
944 worktree
945 .as_local_mut()
946 .unwrap()
947 .rename_entry(entry_id, new_path, cx)
948 })
949 } else {
950 let client = self.client.clone();
951 let project_id = self.remote_id().unwrap();
952
953 Some(cx.spawn_weak(|_, mut cx| async move {
954 let response = client
955 .request(proto::RenameProjectEntry {
956 project_id,
957 entry_id: entry_id.to_proto(),
958 new_path: new_path.as_os_str().as_bytes().to_vec(),
959 })
960 .await?;
961 let entry = response
962 .entry
963 .ok_or_else(|| anyhow!("missing entry in response"))?;
964 worktree
965 .update(&mut cx, |worktree, cx| {
966 worktree.as_remote().unwrap().insert_entry(
967 entry,
968 response.worktree_scan_id as usize,
969 cx,
970 )
971 })
972 .await
973 }))
974 }
975 }
976
977 pub fn delete_entry(
978 &mut self,
979 entry_id: ProjectEntryId,
980 cx: &mut ModelContext<Self>,
981 ) -> Option<Task<Result<()>>> {
982 let worktree = self.worktree_for_entry(entry_id, cx)?;
983 if self.is_local() {
984 worktree.update(cx, |worktree, cx| {
985 worktree.as_local_mut().unwrap().delete_entry(entry_id, cx)
986 })
987 } else {
988 let client = self.client.clone();
989 let project_id = self.remote_id().unwrap();
990 Some(cx.spawn_weak(|_, mut cx| async move {
991 let response = client
992 .request(proto::DeleteProjectEntry {
993 project_id,
994 entry_id: entry_id.to_proto(),
995 })
996 .await?;
997 worktree
998 .update(&mut cx, move |worktree, cx| {
999 worktree.as_remote().unwrap().delete_entry(
1000 entry_id,
1001 response.worktree_scan_id as usize,
1002 cx,
1003 )
1004 })
1005 .await
1006 }))
1007 }
1008 }
1009
1010 fn share(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
1011 let project_id;
1012 if let ProjectClientState::Local {
1013 remote_id_rx,
1014 is_shared,
1015 ..
1016 } = &mut self.client_state
1017 {
1018 if *is_shared {
1019 return Task::ready(Ok(()));
1020 }
1021 *is_shared = true;
1022 if let Some(id) = *remote_id_rx.borrow() {
1023 project_id = id;
1024 } else {
1025 return Task::ready(Err(anyhow!("project hasn't been registered")));
1026 }
1027 } else {
1028 return Task::ready(Err(anyhow!("can't share a remote project")));
1029 };
1030
1031 for open_buffer in self.opened_buffers.values_mut() {
1032 match open_buffer {
1033 OpenBuffer::Strong(_) => {}
1034 OpenBuffer::Weak(buffer) => {
1035 if let Some(buffer) = buffer.upgrade(cx) {
1036 *open_buffer = OpenBuffer::Strong(buffer);
1037 }
1038 }
1039 OpenBuffer::Loading(_) => unreachable!(),
1040 }
1041 }
1042
1043 for worktree_handle in self.worktrees.iter_mut() {
1044 match worktree_handle {
1045 WorktreeHandle::Strong(_) => {}
1046 WorktreeHandle::Weak(worktree) => {
1047 if let Some(worktree) = worktree.upgrade(cx) {
1048 *worktree_handle = WorktreeHandle::Strong(worktree);
1049 }
1050 }
1051 }
1052 }
1053
1054 let mut tasks = Vec::new();
1055 for worktree in self.worktrees(cx).collect::<Vec<_>>() {
1056 worktree.update(cx, |worktree, cx| {
1057 let worktree = worktree.as_local_mut().unwrap();
1058 tasks.push(worktree.share(project_id, cx));
1059 });
1060 }
1061
1062 cx.spawn(|this, mut cx| async move {
1063 for task in tasks {
1064 task.await?;
1065 }
1066 this.update(&mut cx, |_, cx| cx.notify());
1067 Ok(())
1068 })
1069 }
1070
1071 fn unshared(&mut self, cx: &mut ModelContext<Self>) {
1072 if let ProjectClientState::Local { is_shared, .. } = &mut self.client_state {
1073 if !*is_shared {
1074 return;
1075 }
1076
1077 *is_shared = false;
1078 self.collaborators.clear();
1079 self.shared_buffers.clear();
1080 for worktree_handle in self.worktrees.iter_mut() {
1081 if let WorktreeHandle::Strong(worktree) = worktree_handle {
1082 let is_visible = worktree.update(cx, |worktree, _| {
1083 worktree.as_local_mut().unwrap().unshare();
1084 worktree.is_visible()
1085 });
1086 if !is_visible {
1087 *worktree_handle = WorktreeHandle::Weak(worktree.downgrade());
1088 }
1089 }
1090 }
1091
1092 for open_buffer in self.opened_buffers.values_mut() {
1093 match open_buffer {
1094 OpenBuffer::Strong(buffer) => {
1095 *open_buffer = OpenBuffer::Weak(buffer.downgrade());
1096 }
1097 _ => {}
1098 }
1099 }
1100
1101 cx.notify();
1102 } else {
1103 log::error!("attempted to unshare a remote project");
1104 }
1105 }
1106
1107 pub fn respond_to_join_request(
1108 &mut self,
1109 requester_id: u64,
1110 allow: bool,
1111 cx: &mut ModelContext<Self>,
1112 ) {
1113 if let Some(project_id) = self.remote_id() {
1114 let share = self.share(cx);
1115 let client = self.client.clone();
1116 cx.foreground()
1117 .spawn(async move {
1118 share.await?;
1119 client.send(proto::RespondToJoinProjectRequest {
1120 requester_id,
1121 project_id,
1122 allow,
1123 })
1124 })
1125 .detach_and_log_err(cx);
1126 }
1127 }
1128
1129 fn removed_from_project(&mut self, cx: &mut ModelContext<Self>) {
1130 if let ProjectClientState::Remote {
1131 sharing_has_stopped,
1132 ..
1133 } = &mut self.client_state
1134 {
1135 *sharing_has_stopped = true;
1136 self.collaborators.clear();
1137 cx.notify();
1138 }
1139 }
1140
1141 pub fn is_read_only(&self) -> bool {
1142 match &self.client_state {
1143 ProjectClientState::Local { .. } => false,
1144 ProjectClientState::Remote {
1145 sharing_has_stopped,
1146 ..
1147 } => *sharing_has_stopped,
1148 }
1149 }
1150
1151 pub fn is_local(&self) -> bool {
1152 match &self.client_state {
1153 ProjectClientState::Local { .. } => true,
1154 ProjectClientState::Remote { .. } => false,
1155 }
1156 }
1157
1158 pub fn is_remote(&self) -> bool {
1159 !self.is_local()
1160 }
1161
1162 pub fn create_buffer(
1163 &mut self,
1164 text: &str,
1165 language: Option<Arc<Language>>,
1166 cx: &mut ModelContext<Self>,
1167 ) -> Result<ModelHandle<Buffer>> {
1168 if self.is_remote() {
1169 return Err(anyhow!("creating buffers as a guest is not supported yet"));
1170 }
1171
1172 let buffer = cx.add_model(|cx| {
1173 Buffer::new(self.replica_id(), text, cx)
1174 .with_language(language.unwrap_or(language::PLAIN_TEXT.clone()), cx)
1175 });
1176 self.register_buffer(&buffer, cx)?;
1177 Ok(buffer)
1178 }
1179
1180 pub fn open_path(
1181 &mut self,
1182 path: impl Into<ProjectPath>,
1183 cx: &mut ModelContext<Self>,
1184 ) -> Task<Result<(ProjectEntryId, AnyModelHandle)>> {
1185 let task = self.open_buffer(path, cx);
1186 cx.spawn_weak(|_, cx| async move {
1187 let buffer = task.await?;
1188 let project_entry_id = buffer
1189 .read_with(&cx, |buffer, cx| {
1190 File::from_dyn(buffer.file()).and_then(|file| file.project_entry_id(cx))
1191 })
1192 .ok_or_else(|| anyhow!("no project entry"))?;
1193 Ok((project_entry_id, buffer.into()))
1194 })
1195 }
1196
1197 pub fn open_local_buffer(
1198 &mut self,
1199 abs_path: impl AsRef<Path>,
1200 cx: &mut ModelContext<Self>,
1201 ) -> Task<Result<ModelHandle<Buffer>>> {
1202 if let Some((worktree, relative_path)) = self.find_local_worktree(abs_path.as_ref(), cx) {
1203 self.open_buffer((worktree.read(cx).id(), relative_path), cx)
1204 } else {
1205 Task::ready(Err(anyhow!("no such path")))
1206 }
1207 }
1208
1209 pub fn open_buffer(
1210 &mut self,
1211 path: impl Into<ProjectPath>,
1212 cx: &mut ModelContext<Self>,
1213 ) -> Task<Result<ModelHandle<Buffer>>> {
1214 let project_path = path.into();
1215 let worktree = if let Some(worktree) = self.worktree_for_id(project_path.worktree_id, cx) {
1216 worktree
1217 } else {
1218 return Task::ready(Err(anyhow!("no such worktree")));
1219 };
1220
1221 // If there is already a buffer for the given path, then return it.
1222 let existing_buffer = self.get_open_buffer(&project_path, cx);
1223 if let Some(existing_buffer) = existing_buffer {
1224 return Task::ready(Ok(existing_buffer));
1225 }
1226
1227 let mut loading_watch = match self.loading_buffers.entry(project_path.clone()) {
1228 // If the given path is already being loaded, then wait for that existing
1229 // task to complete and return the same buffer.
1230 hash_map::Entry::Occupied(e) => e.get().clone(),
1231
1232 // Otherwise, record the fact that this path is now being loaded.
1233 hash_map::Entry::Vacant(entry) => {
1234 let (mut tx, rx) = postage::watch::channel();
1235 entry.insert(rx.clone());
1236
1237 let load_buffer = if worktree.read(cx).is_local() {
1238 self.open_local_buffer_internal(&project_path.path, &worktree, cx)
1239 } else {
1240 self.open_remote_buffer_internal(&project_path.path, &worktree, cx)
1241 };
1242
1243 cx.spawn(move |this, mut cx| async move {
1244 let load_result = load_buffer.await;
1245 *tx.borrow_mut() = Some(this.update(&mut cx, |this, _| {
1246 // Record the fact that the buffer is no longer loading.
1247 this.loading_buffers.remove(&project_path);
1248 let buffer = load_result.map_err(Arc::new)?;
1249 Ok(buffer)
1250 }));
1251 })
1252 .detach();
1253 rx
1254 }
1255 };
1256
1257 cx.foreground().spawn(async move {
1258 loop {
1259 if let Some(result) = loading_watch.borrow().as_ref() {
1260 match result {
1261 Ok(buffer) => return Ok(buffer.clone()),
1262 Err(error) => return Err(anyhow!("{}", error)),
1263 }
1264 }
1265 loading_watch.next().await;
1266 }
1267 })
1268 }
1269
1270 fn open_local_buffer_internal(
1271 &mut self,
1272 path: &Arc<Path>,
1273 worktree: &ModelHandle<Worktree>,
1274 cx: &mut ModelContext<Self>,
1275 ) -> Task<Result<ModelHandle<Buffer>>> {
1276 let load_buffer = worktree.update(cx, |worktree, cx| {
1277 let worktree = worktree.as_local_mut().unwrap();
1278 worktree.load_buffer(path, cx)
1279 });
1280 cx.spawn(|this, mut cx| async move {
1281 let buffer = load_buffer.await?;
1282 this.update(&mut cx, |this, cx| this.register_buffer(&buffer, cx))?;
1283 Ok(buffer)
1284 })
1285 }
1286
1287 fn open_remote_buffer_internal(
1288 &mut self,
1289 path: &Arc<Path>,
1290 worktree: &ModelHandle<Worktree>,
1291 cx: &mut ModelContext<Self>,
1292 ) -> Task<Result<ModelHandle<Buffer>>> {
1293 let rpc = self.client.clone();
1294 let project_id = self.remote_id().unwrap();
1295 let remote_worktree_id = worktree.read(cx).id();
1296 let path = path.clone();
1297 let path_string = path.to_string_lossy().to_string();
1298 cx.spawn(|this, mut cx| async move {
1299 let response = rpc
1300 .request(proto::OpenBufferByPath {
1301 project_id,
1302 worktree_id: remote_worktree_id.to_proto(),
1303 path: path_string,
1304 })
1305 .await?;
1306 let buffer = response.buffer.ok_or_else(|| anyhow!("missing buffer"))?;
1307 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
1308 .await
1309 })
1310 }
1311
1312 fn open_local_buffer_via_lsp(
1313 &mut self,
1314 abs_path: lsp::Url,
1315 lsp_adapter: Arc<dyn LspAdapter>,
1316 lsp_server: Arc<LanguageServer>,
1317 cx: &mut ModelContext<Self>,
1318 ) -> Task<Result<ModelHandle<Buffer>>> {
1319 cx.spawn(|this, mut cx| async move {
1320 let abs_path = abs_path
1321 .to_file_path()
1322 .map_err(|_| anyhow!("can't convert URI to path"))?;
1323 let (worktree, relative_path) = if let Some(result) =
1324 this.read_with(&cx, |this, cx| this.find_local_worktree(&abs_path, cx))
1325 {
1326 result
1327 } else {
1328 let worktree = this
1329 .update(&mut cx, |this, cx| {
1330 this.create_local_worktree(&abs_path, false, cx)
1331 })
1332 .await?;
1333 this.update(&mut cx, |this, cx| {
1334 this.language_servers.insert(
1335 (worktree.read(cx).id(), lsp_adapter.name()),
1336 (lsp_adapter, lsp_server),
1337 );
1338 });
1339 (worktree, PathBuf::new())
1340 };
1341
1342 let project_path = ProjectPath {
1343 worktree_id: worktree.read_with(&cx, |worktree, _| worktree.id()),
1344 path: relative_path.into(),
1345 };
1346 this.update(&mut cx, |this, cx| this.open_buffer(project_path, cx))
1347 .await
1348 })
1349 }
1350
1351 pub fn open_buffer_by_id(
1352 &mut self,
1353 id: u64,
1354 cx: &mut ModelContext<Self>,
1355 ) -> Task<Result<ModelHandle<Buffer>>> {
1356 if let Some(buffer) = self.buffer_for_id(id, cx) {
1357 Task::ready(Ok(buffer))
1358 } else if self.is_local() {
1359 Task::ready(Err(anyhow!("buffer {} does not exist", id)))
1360 } else if let Some(project_id) = self.remote_id() {
1361 let request = self
1362 .client
1363 .request(proto::OpenBufferById { project_id, id });
1364 cx.spawn(|this, mut cx| async move {
1365 let buffer = request
1366 .await?
1367 .buffer
1368 .ok_or_else(|| anyhow!("invalid buffer"))?;
1369 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
1370 .await
1371 })
1372 } else {
1373 Task::ready(Err(anyhow!("cannot open buffer while disconnected")))
1374 }
1375 }
1376
1377 pub fn save_buffer_as(
1378 &mut self,
1379 buffer: ModelHandle<Buffer>,
1380 abs_path: PathBuf,
1381 cx: &mut ModelContext<Project>,
1382 ) -> Task<Result<()>> {
1383 let worktree_task = self.find_or_create_local_worktree(&abs_path, true, cx);
1384 let old_path =
1385 File::from_dyn(buffer.read(cx).file()).and_then(|f| Some(f.as_local()?.abs_path(cx)));
1386 cx.spawn(|this, mut cx| async move {
1387 if let Some(old_path) = old_path {
1388 this.update(&mut cx, |this, cx| {
1389 this.unregister_buffer_from_language_server(&buffer, old_path, cx);
1390 });
1391 }
1392 let (worktree, path) = worktree_task.await?;
1393 worktree
1394 .update(&mut cx, |worktree, cx| {
1395 worktree
1396 .as_local_mut()
1397 .unwrap()
1398 .save_buffer_as(buffer.clone(), path, cx)
1399 })
1400 .await?;
1401 this.update(&mut cx, |this, cx| {
1402 this.assign_language_to_buffer(&buffer, cx);
1403 this.register_buffer_with_language_server(&buffer, cx);
1404 });
1405 Ok(())
1406 })
1407 }
1408
1409 pub fn get_open_buffer(
1410 &mut self,
1411 path: &ProjectPath,
1412 cx: &mut ModelContext<Self>,
1413 ) -> Option<ModelHandle<Buffer>> {
1414 let worktree = self.worktree_for_id(path.worktree_id, cx)?;
1415 self.opened_buffers.values().find_map(|buffer| {
1416 let buffer = buffer.upgrade(cx)?;
1417 let file = File::from_dyn(buffer.read(cx).file())?;
1418 if file.worktree == worktree && file.path() == &path.path {
1419 Some(buffer)
1420 } else {
1421 None
1422 }
1423 })
1424 }
1425
1426 fn register_buffer(
1427 &mut self,
1428 buffer: &ModelHandle<Buffer>,
1429 cx: &mut ModelContext<Self>,
1430 ) -> Result<()> {
1431 let remote_id = buffer.read(cx).remote_id();
1432 let open_buffer = if self.is_remote() || self.is_shared() {
1433 OpenBuffer::Strong(buffer.clone())
1434 } else {
1435 OpenBuffer::Weak(buffer.downgrade())
1436 };
1437
1438 match self.opened_buffers.insert(remote_id, open_buffer) {
1439 None => {}
1440 Some(OpenBuffer::Loading(operations)) => {
1441 buffer.update(cx, |buffer, cx| buffer.apply_ops(operations, cx))?
1442 }
1443 Some(OpenBuffer::Weak(existing_handle)) => {
1444 if existing_handle.upgrade(cx).is_some() {
1445 Err(anyhow!(
1446 "already registered buffer with remote id {}",
1447 remote_id
1448 ))?
1449 }
1450 }
1451 Some(OpenBuffer::Strong(_)) => Err(anyhow!(
1452 "already registered buffer with remote id {}",
1453 remote_id
1454 ))?,
1455 }
1456 cx.subscribe(buffer, |this, buffer, event, cx| {
1457 this.on_buffer_event(buffer, event, cx);
1458 })
1459 .detach();
1460
1461 self.assign_language_to_buffer(buffer, cx);
1462 self.register_buffer_with_language_server(buffer, cx);
1463 cx.observe_release(buffer, |this, buffer, cx| {
1464 if let Some(file) = File::from_dyn(buffer.file()) {
1465 if file.is_local() {
1466 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
1467 if let Some((_, server)) = this.language_server_for_buffer(buffer, cx) {
1468 server
1469 .notify::<lsp::notification::DidCloseTextDocument>(
1470 lsp::DidCloseTextDocumentParams {
1471 text_document: lsp::TextDocumentIdentifier::new(uri.clone()),
1472 },
1473 )
1474 .log_err();
1475 }
1476 }
1477 }
1478 })
1479 .detach();
1480
1481 Ok(())
1482 }
1483
1484 fn register_buffer_with_language_server(
1485 &mut self,
1486 buffer_handle: &ModelHandle<Buffer>,
1487 cx: &mut ModelContext<Self>,
1488 ) {
1489 let buffer = buffer_handle.read(cx);
1490 let buffer_id = buffer.remote_id();
1491 if let Some(file) = File::from_dyn(buffer.file()) {
1492 if file.is_local() {
1493 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
1494 let initial_snapshot = buffer.text_snapshot();
1495
1496 let mut language_server = None;
1497 let mut language_id = None;
1498 if let Some(language) = buffer.language() {
1499 let worktree_id = file.worktree_id(cx);
1500 if let Some(adapter) = language.lsp_adapter() {
1501 language_id = adapter.id_for_language(language.name().as_ref());
1502 language_server = self
1503 .language_servers
1504 .get(&(worktree_id, adapter.name()))
1505 .cloned();
1506 }
1507 }
1508
1509 if let Some(local_worktree) = file.worktree.read(cx).as_local() {
1510 if let Some(diagnostics) = local_worktree.diagnostics_for_path(file.path()) {
1511 self.update_buffer_diagnostics(&buffer_handle, diagnostics, None, cx)
1512 .log_err();
1513 }
1514 }
1515
1516 if let Some((_, server)) = language_server {
1517 server
1518 .notify::<lsp::notification::DidOpenTextDocument>(
1519 lsp::DidOpenTextDocumentParams {
1520 text_document: lsp::TextDocumentItem::new(
1521 uri,
1522 language_id.unwrap_or_default(),
1523 0,
1524 initial_snapshot.text(),
1525 ),
1526 }
1527 .clone(),
1528 )
1529 .log_err();
1530 buffer_handle.update(cx, |buffer, cx| {
1531 buffer.set_completion_triggers(
1532 server
1533 .capabilities()
1534 .completion_provider
1535 .as_ref()
1536 .and_then(|provider| provider.trigger_characters.clone())
1537 .unwrap_or(Vec::new()),
1538 cx,
1539 )
1540 });
1541 self.buffer_snapshots
1542 .insert(buffer_id, vec![(0, initial_snapshot)]);
1543 }
1544 }
1545 }
1546 }
1547
1548 fn unregister_buffer_from_language_server(
1549 &mut self,
1550 buffer: &ModelHandle<Buffer>,
1551 old_path: PathBuf,
1552 cx: &mut ModelContext<Self>,
1553 ) {
1554 buffer.update(cx, |buffer, cx| {
1555 buffer.update_diagnostics(Default::default(), cx);
1556 self.buffer_snapshots.remove(&buffer.remote_id());
1557 if let Some((_, language_server)) = self.language_server_for_buffer(buffer, cx) {
1558 language_server
1559 .notify::<lsp::notification::DidCloseTextDocument>(
1560 lsp::DidCloseTextDocumentParams {
1561 text_document: lsp::TextDocumentIdentifier::new(
1562 lsp::Url::from_file_path(old_path).unwrap(),
1563 ),
1564 },
1565 )
1566 .log_err();
1567 }
1568 });
1569 }
1570
1571 fn on_buffer_event(
1572 &mut self,
1573 buffer: ModelHandle<Buffer>,
1574 event: &BufferEvent,
1575 cx: &mut ModelContext<Self>,
1576 ) -> Option<()> {
1577 match event {
1578 BufferEvent::Operation(operation) => {
1579 if let Some(project_id) = self.shared_remote_id() {
1580 let request = self.client.request(proto::UpdateBuffer {
1581 project_id,
1582 buffer_id: buffer.read(cx).remote_id(),
1583 operations: vec![language::proto::serialize_operation(&operation)],
1584 });
1585 cx.background().spawn(request).detach_and_log_err(cx);
1586 }
1587 }
1588 BufferEvent::Edited { .. } => {
1589 let (_, language_server) = self
1590 .language_server_for_buffer(buffer.read(cx), cx)?
1591 .clone();
1592 let buffer = buffer.read(cx);
1593 let file = File::from_dyn(buffer.file())?;
1594 let abs_path = file.as_local()?.abs_path(cx);
1595 let uri = lsp::Url::from_file_path(abs_path).unwrap();
1596 let buffer_snapshots = self.buffer_snapshots.get_mut(&buffer.remote_id())?;
1597 let (version, prev_snapshot) = buffer_snapshots.last()?;
1598 let next_snapshot = buffer.text_snapshot();
1599 let next_version = version + 1;
1600
1601 let content_changes = buffer
1602 .edits_since::<(PointUtf16, usize)>(prev_snapshot.version())
1603 .map(|edit| {
1604 let edit_start = edit.new.start.0;
1605 let edit_end = edit_start + (edit.old.end.0 - edit.old.start.0);
1606 let new_text = next_snapshot
1607 .text_for_range(edit.new.start.1..edit.new.end.1)
1608 .collect();
1609 lsp::TextDocumentContentChangeEvent {
1610 range: Some(lsp::Range::new(
1611 point_to_lsp(edit_start),
1612 point_to_lsp(edit_end),
1613 )),
1614 range_length: None,
1615 text: new_text,
1616 }
1617 })
1618 .collect();
1619
1620 buffer_snapshots.push((next_version, next_snapshot));
1621
1622 language_server
1623 .notify::<lsp::notification::DidChangeTextDocument>(
1624 lsp::DidChangeTextDocumentParams {
1625 text_document: lsp::VersionedTextDocumentIdentifier::new(
1626 uri,
1627 next_version,
1628 ),
1629 content_changes,
1630 },
1631 )
1632 .log_err();
1633 }
1634 BufferEvent::Saved => {
1635 let file = File::from_dyn(buffer.read(cx).file())?;
1636 let worktree_id = file.worktree_id(cx);
1637 let abs_path = file.as_local()?.abs_path(cx);
1638 let text_document = lsp::TextDocumentIdentifier {
1639 uri: lsp::Url::from_file_path(abs_path).unwrap(),
1640 };
1641
1642 for (_, server) in self.language_servers_for_worktree(worktree_id) {
1643 server
1644 .notify::<lsp::notification::DidSaveTextDocument>(
1645 lsp::DidSaveTextDocumentParams {
1646 text_document: text_document.clone(),
1647 text: None,
1648 },
1649 )
1650 .log_err();
1651 }
1652 }
1653 _ => {}
1654 }
1655
1656 None
1657 }
1658
1659 fn language_servers_for_worktree(
1660 &self,
1661 worktree_id: WorktreeId,
1662 ) -> impl Iterator<Item = &(Arc<dyn LspAdapter>, Arc<LanguageServer>)> {
1663 self.language_servers.iter().filter_map(
1664 move |((language_server_worktree_id, _), server)| {
1665 if *language_server_worktree_id == worktree_id {
1666 Some(server)
1667 } else {
1668 None
1669 }
1670 },
1671 )
1672 }
1673
1674 fn assign_language_to_buffer(
1675 &mut self,
1676 buffer: &ModelHandle<Buffer>,
1677 cx: &mut ModelContext<Self>,
1678 ) -> Option<()> {
1679 // If the buffer has a language, set it and start the language server if we haven't already.
1680 let full_path = buffer.read(cx).file()?.full_path(cx);
1681 let language = self.languages.select_language(&full_path)?;
1682 buffer.update(cx, |buffer, cx| {
1683 buffer.set_language(Some(language.clone()), cx);
1684 });
1685
1686 let file = File::from_dyn(buffer.read(cx).file())?;
1687 let worktree = file.worktree.read(cx).as_local()?;
1688 let worktree_id = worktree.id();
1689 let worktree_abs_path = worktree.abs_path().clone();
1690 self.start_language_server(worktree_id, worktree_abs_path, language, cx);
1691
1692 None
1693 }
1694
1695 fn start_language_server(
1696 &mut self,
1697 worktree_id: WorktreeId,
1698 worktree_path: Arc<Path>,
1699 language: Arc<Language>,
1700 cx: &mut ModelContext<Self>,
1701 ) {
1702 let adapter = if let Some(adapter) = language.lsp_adapter() {
1703 adapter
1704 } else {
1705 return;
1706 };
1707 let key = (worktree_id, adapter.name());
1708 self.started_language_servers
1709 .entry(key.clone())
1710 .or_insert_with(|| {
1711 let server_id = post_inc(&mut self.next_language_server_id);
1712 let language_server = self.languages.start_language_server(
1713 server_id,
1714 language.clone(),
1715 worktree_path,
1716 self.client.http_client(),
1717 cx,
1718 );
1719 cx.spawn_weak(|this, mut cx| async move {
1720 let language_server = language_server?.await.log_err()?;
1721 let language_server = language_server
1722 .initialize(adapter.initialization_options())
1723 .await
1724 .log_err()?;
1725 let this = this.upgrade(&cx)?;
1726 let disk_based_diagnostics_progress_token =
1727 adapter.disk_based_diagnostics_progress_token();
1728
1729 language_server
1730 .on_notification::<lsp::notification::PublishDiagnostics, _>({
1731 let this = this.downgrade();
1732 let adapter = adapter.clone();
1733 move |params, mut cx| {
1734 if let Some(this) = this.upgrade(&cx) {
1735 this.update(&mut cx, |this, cx| {
1736 this.on_lsp_diagnostics_published(
1737 server_id,
1738 params,
1739 &adapter,
1740 disk_based_diagnostics_progress_token,
1741 cx,
1742 );
1743 });
1744 }
1745 }
1746 })
1747 .detach();
1748
1749 language_server
1750 .on_request::<lsp::request::WorkspaceConfiguration, _, _>({
1751 let settings = this
1752 .read_with(&cx, |this, _| this.language_server_settings.clone());
1753 move |params, _| {
1754 let settings = settings.lock().clone();
1755 async move {
1756 Ok(params
1757 .items
1758 .into_iter()
1759 .map(|item| {
1760 if let Some(section) = &item.section {
1761 settings
1762 .get(section)
1763 .cloned()
1764 .unwrap_or(serde_json::Value::Null)
1765 } else {
1766 settings.clone()
1767 }
1768 })
1769 .collect())
1770 }
1771 }
1772 })
1773 .detach();
1774
1775 language_server
1776 .on_request::<lsp::request::ApplyWorkspaceEdit, _, _>({
1777 let this = this.downgrade();
1778 let adapter = adapter.clone();
1779 let language_server = language_server.clone();
1780 move |params, cx| {
1781 Self::on_lsp_workspace_edit(
1782 this,
1783 params,
1784 server_id,
1785 adapter.clone(),
1786 language_server.clone(),
1787 cx,
1788 )
1789 }
1790 })
1791 .detach();
1792
1793 language_server
1794 .on_notification::<lsp::notification::Progress, _>({
1795 let this = this.downgrade();
1796 move |params, mut cx| {
1797 if let Some(this) = this.upgrade(&cx) {
1798 this.update(&mut cx, |this, cx| {
1799 this.on_lsp_progress(
1800 params,
1801 server_id,
1802 disk_based_diagnostics_progress_token,
1803 cx,
1804 );
1805 });
1806 }
1807 }
1808 })
1809 .detach();
1810
1811 this.update(&mut cx, |this, cx| {
1812 this.language_servers
1813 .insert(key.clone(), (adapter.clone(), language_server.clone()));
1814 this.language_server_statuses.insert(
1815 server_id,
1816 LanguageServerStatus {
1817 name: language_server.name().to_string(),
1818 pending_work: Default::default(),
1819 pending_diagnostic_updates: 0,
1820 },
1821 );
1822 language_server
1823 .notify::<lsp::notification::DidChangeConfiguration>(
1824 lsp::DidChangeConfigurationParams {
1825 settings: this.language_server_settings.lock().clone(),
1826 },
1827 )
1828 .ok();
1829
1830 if let Some(project_id) = this.shared_remote_id() {
1831 this.client
1832 .send(proto::StartLanguageServer {
1833 project_id,
1834 server: Some(proto::LanguageServer {
1835 id: server_id as u64,
1836 name: language_server.name().to_string(),
1837 }),
1838 })
1839 .log_err();
1840 }
1841
1842 // Tell the language server about every open buffer in the worktree that matches the language.
1843 for buffer in this.opened_buffers.values() {
1844 if let Some(buffer_handle) = buffer.upgrade(cx) {
1845 let buffer = buffer_handle.read(cx);
1846 let file = if let Some(file) = File::from_dyn(buffer.file()) {
1847 file
1848 } else {
1849 continue;
1850 };
1851 let language = if let Some(language) = buffer.language() {
1852 language
1853 } else {
1854 continue;
1855 };
1856 if file.worktree.read(cx).id() != key.0
1857 || language.lsp_adapter().map(|a| a.name())
1858 != Some(key.1.clone())
1859 {
1860 continue;
1861 }
1862
1863 let file = file.as_local()?;
1864 let versions = this
1865 .buffer_snapshots
1866 .entry(buffer.remote_id())
1867 .or_insert_with(|| vec![(0, buffer.text_snapshot())]);
1868 let (version, initial_snapshot) = versions.last().unwrap();
1869 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
1870 let language_id = adapter.id_for_language(language.name().as_ref());
1871 language_server
1872 .notify::<lsp::notification::DidOpenTextDocument>(
1873 lsp::DidOpenTextDocumentParams {
1874 text_document: lsp::TextDocumentItem::new(
1875 uri,
1876 language_id.unwrap_or_default(),
1877 *version,
1878 initial_snapshot.text(),
1879 ),
1880 },
1881 )
1882 .log_err()?;
1883 buffer_handle.update(cx, |buffer, cx| {
1884 buffer.set_completion_triggers(
1885 language_server
1886 .capabilities()
1887 .completion_provider
1888 .as_ref()
1889 .and_then(|provider| {
1890 provider.trigger_characters.clone()
1891 })
1892 .unwrap_or(Vec::new()),
1893 cx,
1894 )
1895 });
1896 }
1897 }
1898
1899 cx.notify();
1900 Some(())
1901 });
1902
1903 Some(language_server)
1904 })
1905 });
1906 }
1907
1908 pub fn restart_language_servers_for_buffers(
1909 &mut self,
1910 buffers: impl IntoIterator<Item = ModelHandle<Buffer>>,
1911 cx: &mut ModelContext<Self>,
1912 ) -> Option<()> {
1913 let language_server_lookup_info: HashSet<(WorktreeId, Arc<Path>, PathBuf)> = buffers
1914 .into_iter()
1915 .filter_map(|buffer| {
1916 let file = File::from_dyn(buffer.read(cx).file())?;
1917 let worktree = file.worktree.read(cx).as_local()?;
1918 let worktree_id = worktree.id();
1919 let worktree_abs_path = worktree.abs_path().clone();
1920 let full_path = file.full_path(cx);
1921 Some((worktree_id, worktree_abs_path, full_path))
1922 })
1923 .collect();
1924 for (worktree_id, worktree_abs_path, full_path) in language_server_lookup_info {
1925 let language = self.languages.select_language(&full_path)?;
1926 self.restart_language_server(worktree_id, worktree_abs_path, language, cx);
1927 }
1928
1929 None
1930 }
1931
1932 fn restart_language_server(
1933 &mut self,
1934 worktree_id: WorktreeId,
1935 worktree_path: Arc<Path>,
1936 language: Arc<Language>,
1937 cx: &mut ModelContext<Self>,
1938 ) {
1939 let adapter = if let Some(adapter) = language.lsp_adapter() {
1940 adapter
1941 } else {
1942 return;
1943 };
1944 let key = (worktree_id, adapter.name());
1945 let server_to_shutdown = self.language_servers.remove(&key);
1946 self.started_language_servers.remove(&key);
1947 server_to_shutdown
1948 .as_ref()
1949 .map(|(_, server)| self.language_server_statuses.remove(&server.server_id()));
1950 cx.spawn_weak(|this, mut cx| async move {
1951 if let Some(this) = this.upgrade(&cx) {
1952 if let Some((_, server_to_shutdown)) = server_to_shutdown {
1953 if let Some(shutdown_task) = server_to_shutdown.shutdown() {
1954 shutdown_task.await;
1955 }
1956 }
1957
1958 this.update(&mut cx, |this, cx| {
1959 this.start_language_server(worktree_id, worktree_path, language, cx);
1960 });
1961 }
1962 })
1963 .detach();
1964 }
1965
1966 fn on_lsp_diagnostics_published(
1967 &mut self,
1968 server_id: usize,
1969 mut params: lsp::PublishDiagnosticsParams,
1970 adapter: &Arc<dyn LspAdapter>,
1971 disk_based_diagnostics_progress_token: Option<&str>,
1972 cx: &mut ModelContext<Self>,
1973 ) {
1974 adapter.process_diagnostics(&mut params);
1975 if disk_based_diagnostics_progress_token.is_none() {
1976 self.disk_based_diagnostics_started(cx);
1977 self.broadcast_language_server_update(
1978 server_id,
1979 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(
1980 proto::LspDiskBasedDiagnosticsUpdating {},
1981 ),
1982 );
1983 }
1984 self.update_diagnostics(params, adapter.disk_based_diagnostic_sources(), cx)
1985 .log_err();
1986 if disk_based_diagnostics_progress_token.is_none() {
1987 self.disk_based_diagnostics_finished(cx);
1988 self.broadcast_language_server_update(
1989 server_id,
1990 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(
1991 proto::LspDiskBasedDiagnosticsUpdated {},
1992 ),
1993 );
1994 }
1995 }
1996
1997 fn on_lsp_progress(
1998 &mut self,
1999 progress: lsp::ProgressParams,
2000 server_id: usize,
2001 disk_based_diagnostics_progress_token: Option<&str>,
2002 cx: &mut ModelContext<Self>,
2003 ) {
2004 let token = match progress.token {
2005 lsp::NumberOrString::String(token) => token,
2006 lsp::NumberOrString::Number(token) => {
2007 log::info!("skipping numeric progress token {}", token);
2008 return;
2009 }
2010 };
2011 let progress = match progress.value {
2012 lsp::ProgressParamsValue::WorkDone(value) => value,
2013 };
2014 let language_server_status =
2015 if let Some(status) = self.language_server_statuses.get_mut(&server_id) {
2016 status
2017 } else {
2018 return;
2019 };
2020 match progress {
2021 lsp::WorkDoneProgress::Begin(_) => {
2022 if Some(token.as_str()) == disk_based_diagnostics_progress_token {
2023 language_server_status.pending_diagnostic_updates += 1;
2024 if language_server_status.pending_diagnostic_updates == 1 {
2025 self.disk_based_diagnostics_started(cx);
2026 self.broadcast_language_server_update(
2027 server_id,
2028 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(
2029 proto::LspDiskBasedDiagnosticsUpdating {},
2030 ),
2031 );
2032 }
2033 } else {
2034 self.on_lsp_work_start(server_id, token.clone(), cx);
2035 self.broadcast_language_server_update(
2036 server_id,
2037 proto::update_language_server::Variant::WorkStart(proto::LspWorkStart {
2038 token,
2039 }),
2040 );
2041 }
2042 }
2043 lsp::WorkDoneProgress::Report(report) => {
2044 if Some(token.as_str()) != disk_based_diagnostics_progress_token {
2045 self.on_lsp_work_progress(
2046 server_id,
2047 token.clone(),
2048 LanguageServerProgress {
2049 message: report.message.clone(),
2050 percentage: report.percentage.map(|p| p as usize),
2051 last_update_at: Instant::now(),
2052 },
2053 cx,
2054 );
2055 self.broadcast_language_server_update(
2056 server_id,
2057 proto::update_language_server::Variant::WorkProgress(
2058 proto::LspWorkProgress {
2059 token,
2060 message: report.message,
2061 percentage: report.percentage.map(|p| p as u32),
2062 },
2063 ),
2064 );
2065 }
2066 }
2067 lsp::WorkDoneProgress::End(_) => {
2068 if Some(token.as_str()) == disk_based_diagnostics_progress_token {
2069 language_server_status.pending_diagnostic_updates -= 1;
2070 if language_server_status.pending_diagnostic_updates == 0 {
2071 self.disk_based_diagnostics_finished(cx);
2072 self.broadcast_language_server_update(
2073 server_id,
2074 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(
2075 proto::LspDiskBasedDiagnosticsUpdated {},
2076 ),
2077 );
2078 }
2079 } else {
2080 self.on_lsp_work_end(server_id, token.clone(), cx);
2081 self.broadcast_language_server_update(
2082 server_id,
2083 proto::update_language_server::Variant::WorkEnd(proto::LspWorkEnd {
2084 token,
2085 }),
2086 );
2087 }
2088 }
2089 }
2090 }
2091
2092 fn on_lsp_work_start(
2093 &mut self,
2094 language_server_id: usize,
2095 token: String,
2096 cx: &mut ModelContext<Self>,
2097 ) {
2098 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
2099 status.pending_work.insert(
2100 token,
2101 LanguageServerProgress {
2102 message: None,
2103 percentage: None,
2104 last_update_at: Instant::now(),
2105 },
2106 );
2107 cx.notify();
2108 }
2109 }
2110
2111 fn on_lsp_work_progress(
2112 &mut self,
2113 language_server_id: usize,
2114 token: String,
2115 progress: LanguageServerProgress,
2116 cx: &mut ModelContext<Self>,
2117 ) {
2118 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
2119 status.pending_work.insert(token, progress);
2120 cx.notify();
2121 }
2122 }
2123
2124 fn on_lsp_work_end(
2125 &mut self,
2126 language_server_id: usize,
2127 token: String,
2128 cx: &mut ModelContext<Self>,
2129 ) {
2130 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
2131 status.pending_work.remove(&token);
2132 cx.notify();
2133 }
2134 }
2135
2136 async fn on_lsp_workspace_edit(
2137 this: WeakModelHandle<Self>,
2138 params: lsp::ApplyWorkspaceEditParams,
2139 server_id: usize,
2140 adapter: Arc<dyn LspAdapter>,
2141 language_server: Arc<LanguageServer>,
2142 mut cx: AsyncAppContext,
2143 ) -> Result<lsp::ApplyWorkspaceEditResponse> {
2144 let this = this
2145 .upgrade(&cx)
2146 .ok_or_else(|| anyhow!("project project closed"))?;
2147 let transaction = Self::deserialize_workspace_edit(
2148 this.clone(),
2149 params.edit,
2150 true,
2151 adapter.clone(),
2152 language_server.clone(),
2153 &mut cx,
2154 )
2155 .await
2156 .log_err();
2157 this.update(&mut cx, |this, _| {
2158 if let Some(transaction) = transaction {
2159 this.last_workspace_edits_by_language_server
2160 .insert(server_id, transaction);
2161 }
2162 });
2163 Ok(lsp::ApplyWorkspaceEditResponse {
2164 applied: true,
2165 failed_change: None,
2166 failure_reason: None,
2167 })
2168 }
2169
2170 fn broadcast_language_server_update(
2171 &self,
2172 language_server_id: usize,
2173 event: proto::update_language_server::Variant,
2174 ) {
2175 if let Some(project_id) = self.shared_remote_id() {
2176 self.client
2177 .send(proto::UpdateLanguageServer {
2178 project_id,
2179 language_server_id: language_server_id as u64,
2180 variant: Some(event),
2181 })
2182 .log_err();
2183 }
2184 }
2185
2186 pub fn set_language_server_settings(&mut self, settings: serde_json::Value) {
2187 for (_, server) in self.language_servers.values() {
2188 server
2189 .notify::<lsp::notification::DidChangeConfiguration>(
2190 lsp::DidChangeConfigurationParams {
2191 settings: settings.clone(),
2192 },
2193 )
2194 .ok();
2195 }
2196 *self.language_server_settings.lock() = settings;
2197 }
2198
2199 pub fn language_server_statuses(
2200 &self,
2201 ) -> impl DoubleEndedIterator<Item = &LanguageServerStatus> {
2202 self.language_server_statuses.values()
2203 }
2204
2205 pub fn update_diagnostics(
2206 &mut self,
2207 params: lsp::PublishDiagnosticsParams,
2208 disk_based_sources: &[&str],
2209 cx: &mut ModelContext<Self>,
2210 ) -> Result<()> {
2211 let abs_path = params
2212 .uri
2213 .to_file_path()
2214 .map_err(|_| anyhow!("URI is not a file"))?;
2215 let mut diagnostics = Vec::default();
2216 let mut primary_diagnostic_group_ids = HashMap::default();
2217 let mut sources_by_group_id = HashMap::default();
2218 let mut supporting_diagnostics = HashMap::default();
2219 for diagnostic in ¶ms.diagnostics {
2220 let source = diagnostic.source.as_ref();
2221 let code = diagnostic.code.as_ref().map(|code| match code {
2222 lsp::NumberOrString::Number(code) => code.to_string(),
2223 lsp::NumberOrString::String(code) => code.clone(),
2224 });
2225 let range = range_from_lsp(diagnostic.range);
2226 let is_supporting = diagnostic
2227 .related_information
2228 .as_ref()
2229 .map_or(false, |infos| {
2230 infos.iter().any(|info| {
2231 primary_diagnostic_group_ids.contains_key(&(
2232 source,
2233 code.clone(),
2234 range_from_lsp(info.location.range),
2235 ))
2236 })
2237 });
2238
2239 let is_unnecessary = diagnostic.tags.as_ref().map_or(false, |tags| {
2240 tags.iter().any(|tag| *tag == DiagnosticTag::UNNECESSARY)
2241 });
2242
2243 if is_supporting {
2244 supporting_diagnostics.insert(
2245 (source, code.clone(), range),
2246 (diagnostic.severity, is_unnecessary),
2247 );
2248 } else {
2249 let group_id = post_inc(&mut self.next_diagnostic_group_id);
2250 let is_disk_based = source.map_or(false, |source| {
2251 disk_based_sources.contains(&source.as_str())
2252 });
2253
2254 sources_by_group_id.insert(group_id, source);
2255 primary_diagnostic_group_ids
2256 .insert((source, code.clone(), range.clone()), group_id);
2257
2258 diagnostics.push(DiagnosticEntry {
2259 range,
2260 diagnostic: Diagnostic {
2261 code: code.clone(),
2262 severity: diagnostic.severity.unwrap_or(DiagnosticSeverity::ERROR),
2263 message: diagnostic.message.clone(),
2264 group_id,
2265 is_primary: true,
2266 is_valid: true,
2267 is_disk_based,
2268 is_unnecessary,
2269 },
2270 });
2271 if let Some(infos) = &diagnostic.related_information {
2272 for info in infos {
2273 if info.location.uri == params.uri && !info.message.is_empty() {
2274 let range = range_from_lsp(info.location.range);
2275 diagnostics.push(DiagnosticEntry {
2276 range,
2277 diagnostic: Diagnostic {
2278 code: code.clone(),
2279 severity: DiagnosticSeverity::INFORMATION,
2280 message: info.message.clone(),
2281 group_id,
2282 is_primary: false,
2283 is_valid: true,
2284 is_disk_based,
2285 is_unnecessary: false,
2286 },
2287 });
2288 }
2289 }
2290 }
2291 }
2292 }
2293
2294 for entry in &mut diagnostics {
2295 let diagnostic = &mut entry.diagnostic;
2296 if !diagnostic.is_primary {
2297 let source = *sources_by_group_id.get(&diagnostic.group_id).unwrap();
2298 if let Some(&(severity, is_unnecessary)) = supporting_diagnostics.get(&(
2299 source,
2300 diagnostic.code.clone(),
2301 entry.range.clone(),
2302 )) {
2303 if let Some(severity) = severity {
2304 diagnostic.severity = severity;
2305 }
2306 diagnostic.is_unnecessary = is_unnecessary;
2307 }
2308 }
2309 }
2310
2311 self.update_diagnostic_entries(abs_path, params.version, diagnostics, cx)?;
2312 Ok(())
2313 }
2314
2315 pub fn update_diagnostic_entries(
2316 &mut self,
2317 abs_path: PathBuf,
2318 version: Option<i32>,
2319 diagnostics: Vec<DiagnosticEntry<PointUtf16>>,
2320 cx: &mut ModelContext<Project>,
2321 ) -> Result<(), anyhow::Error> {
2322 let (worktree, relative_path) = self
2323 .find_local_worktree(&abs_path, cx)
2324 .ok_or_else(|| anyhow!("no worktree found for diagnostics"))?;
2325 if !worktree.read(cx).is_visible() {
2326 return Ok(());
2327 }
2328
2329 let project_path = ProjectPath {
2330 worktree_id: worktree.read(cx).id(),
2331 path: relative_path.into(),
2332 };
2333 if let Some(buffer) = self.get_open_buffer(&project_path, cx) {
2334 self.update_buffer_diagnostics(&buffer, diagnostics.clone(), version, cx)?;
2335 }
2336
2337 let updated = worktree.update(cx, |worktree, cx| {
2338 worktree
2339 .as_local_mut()
2340 .ok_or_else(|| anyhow!("not a local worktree"))?
2341 .update_diagnostics(project_path.path.clone(), diagnostics, cx)
2342 })?;
2343 if updated {
2344 cx.emit(Event::DiagnosticsUpdated(project_path));
2345 }
2346 Ok(())
2347 }
2348
2349 fn update_buffer_diagnostics(
2350 &mut self,
2351 buffer: &ModelHandle<Buffer>,
2352 mut diagnostics: Vec<DiagnosticEntry<PointUtf16>>,
2353 version: Option<i32>,
2354 cx: &mut ModelContext<Self>,
2355 ) -> Result<()> {
2356 fn compare_diagnostics(a: &Diagnostic, b: &Diagnostic) -> Ordering {
2357 Ordering::Equal
2358 .then_with(|| b.is_primary.cmp(&a.is_primary))
2359 .then_with(|| a.is_disk_based.cmp(&b.is_disk_based))
2360 .then_with(|| a.severity.cmp(&b.severity))
2361 .then_with(|| a.message.cmp(&b.message))
2362 }
2363
2364 let snapshot = self.buffer_snapshot_for_lsp_version(buffer, version, cx)?;
2365
2366 diagnostics.sort_unstable_by(|a, b| {
2367 Ordering::Equal
2368 .then_with(|| a.range.start.cmp(&b.range.start))
2369 .then_with(|| b.range.end.cmp(&a.range.end))
2370 .then_with(|| compare_diagnostics(&a.diagnostic, &b.diagnostic))
2371 });
2372
2373 let mut sanitized_diagnostics = Vec::new();
2374 let edits_since_save = Patch::new(
2375 snapshot
2376 .edits_since::<PointUtf16>(buffer.read(cx).saved_version())
2377 .collect(),
2378 );
2379 for entry in diagnostics {
2380 let start;
2381 let end;
2382 if entry.diagnostic.is_disk_based {
2383 // Some diagnostics are based on files on disk instead of buffers'
2384 // current contents. Adjust these diagnostics' ranges to reflect
2385 // any unsaved edits.
2386 start = edits_since_save.old_to_new(entry.range.start);
2387 end = edits_since_save.old_to_new(entry.range.end);
2388 } else {
2389 start = entry.range.start;
2390 end = entry.range.end;
2391 }
2392
2393 let mut range = snapshot.clip_point_utf16(start, Bias::Left)
2394 ..snapshot.clip_point_utf16(end, Bias::Right);
2395
2396 // Expand empty ranges by one character
2397 if range.start == range.end {
2398 range.end.column += 1;
2399 range.end = snapshot.clip_point_utf16(range.end, Bias::Right);
2400 if range.start == range.end && range.end.column > 0 {
2401 range.start.column -= 1;
2402 range.start = snapshot.clip_point_utf16(range.start, Bias::Left);
2403 }
2404 }
2405
2406 sanitized_diagnostics.push(DiagnosticEntry {
2407 range,
2408 diagnostic: entry.diagnostic,
2409 });
2410 }
2411 drop(edits_since_save);
2412
2413 let set = DiagnosticSet::new(sanitized_diagnostics, &snapshot);
2414 buffer.update(cx, |buffer, cx| buffer.update_diagnostics(set, cx));
2415 Ok(())
2416 }
2417
2418 pub fn reload_buffers(
2419 &self,
2420 buffers: HashSet<ModelHandle<Buffer>>,
2421 push_to_history: bool,
2422 cx: &mut ModelContext<Self>,
2423 ) -> Task<Result<ProjectTransaction>> {
2424 let mut local_buffers = Vec::new();
2425 let mut remote_buffers = None;
2426 for buffer_handle in buffers {
2427 let buffer = buffer_handle.read(cx);
2428 if buffer.is_dirty() {
2429 if let Some(file) = File::from_dyn(buffer.file()) {
2430 if file.is_local() {
2431 local_buffers.push(buffer_handle);
2432 } else {
2433 remote_buffers.get_or_insert(Vec::new()).push(buffer_handle);
2434 }
2435 }
2436 }
2437 }
2438
2439 let remote_buffers = self.remote_id().zip(remote_buffers);
2440 let client = self.client.clone();
2441
2442 cx.spawn(|this, mut cx| async move {
2443 let mut project_transaction = ProjectTransaction::default();
2444
2445 if let Some((project_id, remote_buffers)) = remote_buffers {
2446 let response = client
2447 .request(proto::ReloadBuffers {
2448 project_id,
2449 buffer_ids: remote_buffers
2450 .iter()
2451 .map(|buffer| buffer.read_with(&cx, |buffer, _| buffer.remote_id()))
2452 .collect(),
2453 })
2454 .await?
2455 .transaction
2456 .ok_or_else(|| anyhow!("missing transaction"))?;
2457 project_transaction = this
2458 .update(&mut cx, |this, cx| {
2459 this.deserialize_project_transaction(response, push_to_history, cx)
2460 })
2461 .await?;
2462 }
2463
2464 for buffer in local_buffers {
2465 let transaction = buffer
2466 .update(&mut cx, |buffer, cx| buffer.reload(cx))
2467 .await?;
2468 buffer.update(&mut cx, |buffer, cx| {
2469 if let Some(transaction) = transaction {
2470 if !push_to_history {
2471 buffer.forget_transaction(transaction.id);
2472 }
2473 project_transaction.0.insert(cx.handle(), transaction);
2474 }
2475 });
2476 }
2477
2478 Ok(project_transaction)
2479 })
2480 }
2481
2482 pub fn format(
2483 &self,
2484 buffers: HashSet<ModelHandle<Buffer>>,
2485 push_to_history: bool,
2486 cx: &mut ModelContext<Project>,
2487 ) -> Task<Result<ProjectTransaction>> {
2488 let mut local_buffers = Vec::new();
2489 let mut remote_buffers = None;
2490 for buffer_handle in buffers {
2491 let buffer = buffer_handle.read(cx);
2492 if let Some(file) = File::from_dyn(buffer.file()) {
2493 if let Some(buffer_abs_path) = file.as_local().map(|f| f.abs_path(cx)) {
2494 if let Some((_, server)) = self.language_server_for_buffer(buffer, cx) {
2495 local_buffers.push((buffer_handle, buffer_abs_path, server.clone()));
2496 }
2497 } else {
2498 remote_buffers.get_or_insert(Vec::new()).push(buffer_handle);
2499 }
2500 } else {
2501 return Task::ready(Ok(Default::default()));
2502 }
2503 }
2504
2505 let remote_buffers = self.remote_id().zip(remote_buffers);
2506 let client = self.client.clone();
2507
2508 cx.spawn(|this, mut cx| async move {
2509 let mut project_transaction = ProjectTransaction::default();
2510
2511 if let Some((project_id, remote_buffers)) = remote_buffers {
2512 let response = client
2513 .request(proto::FormatBuffers {
2514 project_id,
2515 buffer_ids: remote_buffers
2516 .iter()
2517 .map(|buffer| buffer.read_with(&cx, |buffer, _| buffer.remote_id()))
2518 .collect(),
2519 })
2520 .await?
2521 .transaction
2522 .ok_or_else(|| anyhow!("missing transaction"))?;
2523 project_transaction = this
2524 .update(&mut cx, |this, cx| {
2525 this.deserialize_project_transaction(response, push_to_history, cx)
2526 })
2527 .await?;
2528 }
2529
2530 for (buffer, buffer_abs_path, language_server) in local_buffers {
2531 let text_document = lsp::TextDocumentIdentifier::new(
2532 lsp::Url::from_file_path(&buffer_abs_path).unwrap(),
2533 );
2534 let capabilities = &language_server.capabilities();
2535 let tab_size = cx.update(|cx| {
2536 let language_name = buffer.read(cx).language().map(|language| language.name());
2537 cx.global::<Settings>().tab_size(language_name.as_deref())
2538 });
2539 let lsp_edits = if capabilities
2540 .document_formatting_provider
2541 .as_ref()
2542 .map_or(false, |provider| *provider != lsp::OneOf::Left(false))
2543 {
2544 language_server
2545 .request::<lsp::request::Formatting>(lsp::DocumentFormattingParams {
2546 text_document,
2547 options: lsp::FormattingOptions {
2548 tab_size,
2549 insert_spaces: true,
2550 insert_final_newline: Some(true),
2551 ..Default::default()
2552 },
2553 work_done_progress_params: Default::default(),
2554 })
2555 .await?
2556 } else if capabilities
2557 .document_range_formatting_provider
2558 .as_ref()
2559 .map_or(false, |provider| *provider != lsp::OneOf::Left(false))
2560 {
2561 let buffer_start = lsp::Position::new(0, 0);
2562 let buffer_end =
2563 buffer.read_with(&cx, |buffer, _| point_to_lsp(buffer.max_point_utf16()));
2564 language_server
2565 .request::<lsp::request::RangeFormatting>(
2566 lsp::DocumentRangeFormattingParams {
2567 text_document,
2568 range: lsp::Range::new(buffer_start, buffer_end),
2569 options: lsp::FormattingOptions {
2570 tab_size: 4,
2571 insert_spaces: true,
2572 insert_final_newline: Some(true),
2573 ..Default::default()
2574 },
2575 work_done_progress_params: Default::default(),
2576 },
2577 )
2578 .await?
2579 } else {
2580 continue;
2581 };
2582
2583 if let Some(lsp_edits) = lsp_edits {
2584 let edits = this
2585 .update(&mut cx, |this, cx| {
2586 this.edits_from_lsp(&buffer, lsp_edits, None, cx)
2587 })
2588 .await?;
2589 buffer.update(&mut cx, |buffer, cx| {
2590 buffer.finalize_last_transaction();
2591 buffer.start_transaction();
2592 for (range, text) in edits {
2593 buffer.edit([(range, text)], cx);
2594 }
2595 if buffer.end_transaction(cx).is_some() {
2596 let transaction = buffer.finalize_last_transaction().unwrap().clone();
2597 if !push_to_history {
2598 buffer.forget_transaction(transaction.id);
2599 }
2600 project_transaction.0.insert(cx.handle(), transaction);
2601 }
2602 });
2603 }
2604 }
2605
2606 Ok(project_transaction)
2607 })
2608 }
2609
2610 pub fn definition<T: ToPointUtf16>(
2611 &self,
2612 buffer: &ModelHandle<Buffer>,
2613 position: T,
2614 cx: &mut ModelContext<Self>,
2615 ) -> Task<Result<Vec<Location>>> {
2616 let position = position.to_point_utf16(buffer.read(cx));
2617 self.request_lsp(buffer.clone(), GetDefinition { position }, cx)
2618 }
2619
2620 pub fn references<T: ToPointUtf16>(
2621 &self,
2622 buffer: &ModelHandle<Buffer>,
2623 position: T,
2624 cx: &mut ModelContext<Self>,
2625 ) -> Task<Result<Vec<Location>>> {
2626 let position = position.to_point_utf16(buffer.read(cx));
2627 self.request_lsp(buffer.clone(), GetReferences { position }, cx)
2628 }
2629
2630 pub fn document_highlights<T: ToPointUtf16>(
2631 &self,
2632 buffer: &ModelHandle<Buffer>,
2633 position: T,
2634 cx: &mut ModelContext<Self>,
2635 ) -> Task<Result<Vec<DocumentHighlight>>> {
2636 let position = position.to_point_utf16(buffer.read(cx));
2637
2638 self.request_lsp(buffer.clone(), GetDocumentHighlights { position }, cx)
2639 }
2640
2641 pub fn symbols(&self, query: &str, cx: &mut ModelContext<Self>) -> Task<Result<Vec<Symbol>>> {
2642 if self.is_local() {
2643 let mut requests = Vec::new();
2644 for ((worktree_id, _), (lsp_adapter, language_server)) in self.language_servers.iter() {
2645 let worktree_id = *worktree_id;
2646 if let Some(worktree) = self
2647 .worktree_for_id(worktree_id, cx)
2648 .and_then(|worktree| worktree.read(cx).as_local())
2649 {
2650 let lsp_adapter = lsp_adapter.clone();
2651 let worktree_abs_path = worktree.abs_path().clone();
2652 requests.push(
2653 language_server
2654 .request::<lsp::request::WorkspaceSymbol>(lsp::WorkspaceSymbolParams {
2655 query: query.to_string(),
2656 ..Default::default()
2657 })
2658 .log_err()
2659 .map(move |response| {
2660 (
2661 lsp_adapter,
2662 worktree_id,
2663 worktree_abs_path,
2664 response.unwrap_or_default(),
2665 )
2666 }),
2667 );
2668 }
2669 }
2670
2671 cx.spawn_weak(|this, cx| async move {
2672 let responses = futures::future::join_all(requests).await;
2673 let this = if let Some(this) = this.upgrade(&cx) {
2674 this
2675 } else {
2676 return Ok(Default::default());
2677 };
2678 this.read_with(&cx, |this, cx| {
2679 let mut symbols = Vec::new();
2680 for (adapter, source_worktree_id, worktree_abs_path, response) in responses {
2681 symbols.extend(response.into_iter().flatten().filter_map(|lsp_symbol| {
2682 let abs_path = lsp_symbol.location.uri.to_file_path().ok()?;
2683 let mut worktree_id = source_worktree_id;
2684 let path;
2685 if let Some((worktree, rel_path)) =
2686 this.find_local_worktree(&abs_path, cx)
2687 {
2688 worktree_id = worktree.read(cx).id();
2689 path = rel_path;
2690 } else {
2691 path = relativize_path(&worktree_abs_path, &abs_path);
2692 }
2693
2694 let label = this
2695 .languages
2696 .select_language(&path)
2697 .and_then(|language| {
2698 language.label_for_symbol(&lsp_symbol.name, lsp_symbol.kind)
2699 })
2700 .unwrap_or_else(|| CodeLabel::plain(lsp_symbol.name.clone(), None));
2701 let signature = this.symbol_signature(worktree_id, &path);
2702
2703 Some(Symbol {
2704 source_worktree_id,
2705 worktree_id,
2706 language_server_name: adapter.name(),
2707 name: lsp_symbol.name,
2708 kind: lsp_symbol.kind,
2709 label,
2710 path,
2711 range: range_from_lsp(lsp_symbol.location.range),
2712 signature,
2713 })
2714 }));
2715 }
2716 Ok(symbols)
2717 })
2718 })
2719 } else if let Some(project_id) = self.remote_id() {
2720 let request = self.client.request(proto::GetProjectSymbols {
2721 project_id,
2722 query: query.to_string(),
2723 });
2724 cx.spawn_weak(|this, cx| async move {
2725 let response = request.await?;
2726 let mut symbols = Vec::new();
2727 if let Some(this) = this.upgrade(&cx) {
2728 this.read_with(&cx, |this, _| {
2729 symbols.extend(
2730 response
2731 .symbols
2732 .into_iter()
2733 .filter_map(|symbol| this.deserialize_symbol(symbol).log_err()),
2734 );
2735 })
2736 }
2737 Ok(symbols)
2738 })
2739 } else {
2740 Task::ready(Ok(Default::default()))
2741 }
2742 }
2743
2744 pub fn open_buffer_for_symbol(
2745 &mut self,
2746 symbol: &Symbol,
2747 cx: &mut ModelContext<Self>,
2748 ) -> Task<Result<ModelHandle<Buffer>>> {
2749 if self.is_local() {
2750 let (lsp_adapter, language_server) = if let Some(server) = self.language_servers.get(&(
2751 symbol.source_worktree_id,
2752 symbol.language_server_name.clone(),
2753 )) {
2754 server.clone()
2755 } else {
2756 return Task::ready(Err(anyhow!(
2757 "language server for worktree and language not found"
2758 )));
2759 };
2760
2761 let worktree_abs_path = if let Some(worktree_abs_path) = self
2762 .worktree_for_id(symbol.worktree_id, cx)
2763 .and_then(|worktree| worktree.read(cx).as_local())
2764 .map(|local_worktree| local_worktree.abs_path())
2765 {
2766 worktree_abs_path
2767 } else {
2768 return Task::ready(Err(anyhow!("worktree not found for symbol")));
2769 };
2770 let symbol_abs_path = worktree_abs_path.join(&symbol.path);
2771 let symbol_uri = if let Ok(uri) = lsp::Url::from_file_path(symbol_abs_path) {
2772 uri
2773 } else {
2774 return Task::ready(Err(anyhow!("invalid symbol path")));
2775 };
2776
2777 self.open_local_buffer_via_lsp(symbol_uri, lsp_adapter, language_server, cx)
2778 } else if let Some(project_id) = self.remote_id() {
2779 let request = self.client.request(proto::OpenBufferForSymbol {
2780 project_id,
2781 symbol: Some(serialize_symbol(symbol)),
2782 });
2783 cx.spawn(|this, mut cx| async move {
2784 let response = request.await?;
2785 let buffer = response.buffer.ok_or_else(|| anyhow!("invalid buffer"))?;
2786 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
2787 .await
2788 })
2789 } else {
2790 Task::ready(Err(anyhow!("project does not have a remote id")))
2791 }
2792 }
2793
2794 pub fn completions<T: ToPointUtf16>(
2795 &self,
2796 source_buffer_handle: &ModelHandle<Buffer>,
2797 position: T,
2798 cx: &mut ModelContext<Self>,
2799 ) -> Task<Result<Vec<Completion>>> {
2800 let source_buffer_handle = source_buffer_handle.clone();
2801 let source_buffer = source_buffer_handle.read(cx);
2802 let buffer_id = source_buffer.remote_id();
2803 let language = source_buffer.language().cloned();
2804 let worktree;
2805 let buffer_abs_path;
2806 if let Some(file) = File::from_dyn(source_buffer.file()) {
2807 worktree = file.worktree.clone();
2808 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
2809 } else {
2810 return Task::ready(Ok(Default::default()));
2811 };
2812
2813 let position = position.to_point_utf16(source_buffer);
2814 let anchor = source_buffer.anchor_after(position);
2815
2816 if worktree.read(cx).as_local().is_some() {
2817 let buffer_abs_path = buffer_abs_path.unwrap();
2818 let (_, lang_server) =
2819 if let Some(server) = self.language_server_for_buffer(source_buffer, cx) {
2820 server.clone()
2821 } else {
2822 return Task::ready(Ok(Default::default()));
2823 };
2824
2825 cx.spawn(|_, cx| async move {
2826 let completions = lang_server
2827 .request::<lsp::request::Completion>(lsp::CompletionParams {
2828 text_document_position: lsp::TextDocumentPositionParams::new(
2829 lsp::TextDocumentIdentifier::new(
2830 lsp::Url::from_file_path(buffer_abs_path).unwrap(),
2831 ),
2832 point_to_lsp(position),
2833 ),
2834 context: Default::default(),
2835 work_done_progress_params: Default::default(),
2836 partial_result_params: Default::default(),
2837 })
2838 .await
2839 .context("lsp completion request failed")?;
2840
2841 let completions = if let Some(completions) = completions {
2842 match completions {
2843 lsp::CompletionResponse::Array(completions) => completions,
2844 lsp::CompletionResponse::List(list) => list.items,
2845 }
2846 } else {
2847 Default::default()
2848 };
2849
2850 source_buffer_handle.read_with(&cx, |this, _| {
2851 let snapshot = this.snapshot();
2852 let clipped_position = this.clip_point_utf16(position, Bias::Left);
2853 let mut range_for_token = None;
2854 Ok(completions
2855 .into_iter()
2856 .filter_map(|lsp_completion| {
2857 let (old_range, new_text) = match lsp_completion.text_edit.as_ref() {
2858 // If the language server provides a range to overwrite, then
2859 // check that the range is valid.
2860 Some(lsp::CompletionTextEdit::Edit(edit)) => {
2861 let range = range_from_lsp(edit.range);
2862 let start = snapshot.clip_point_utf16(range.start, Bias::Left);
2863 let end = snapshot.clip_point_utf16(range.end, Bias::Left);
2864 if start != range.start || end != range.end {
2865 log::info!("completion out of expected range");
2866 return None;
2867 }
2868 (
2869 snapshot.anchor_before(start)..snapshot.anchor_after(end),
2870 edit.new_text.clone(),
2871 )
2872 }
2873 // If the language server does not provide a range, then infer
2874 // the range based on the syntax tree.
2875 None => {
2876 if position != clipped_position {
2877 log::info!("completion out of expected range");
2878 return None;
2879 }
2880 let Range { start, end } = range_for_token
2881 .get_or_insert_with(|| {
2882 let offset = position.to_offset(&snapshot);
2883 snapshot
2884 .range_for_word_token_at(offset)
2885 .unwrap_or_else(|| offset..offset)
2886 })
2887 .clone();
2888 let text = lsp_completion
2889 .insert_text
2890 .as_ref()
2891 .unwrap_or(&lsp_completion.label)
2892 .clone();
2893 (
2894 snapshot.anchor_before(start)..snapshot.anchor_after(end),
2895 text.clone(),
2896 )
2897 }
2898 Some(lsp::CompletionTextEdit::InsertAndReplace(_)) => {
2899 log::info!("unsupported insert/replace completion");
2900 return None;
2901 }
2902 };
2903
2904 Some(Completion {
2905 old_range,
2906 new_text,
2907 label: language
2908 .as_ref()
2909 .and_then(|l| l.label_for_completion(&lsp_completion))
2910 .unwrap_or_else(|| {
2911 CodeLabel::plain(
2912 lsp_completion.label.clone(),
2913 lsp_completion.filter_text.as_deref(),
2914 )
2915 }),
2916 lsp_completion,
2917 })
2918 })
2919 .collect())
2920 })
2921 })
2922 } else if let Some(project_id) = self.remote_id() {
2923 let rpc = self.client.clone();
2924 let message = proto::GetCompletions {
2925 project_id,
2926 buffer_id,
2927 position: Some(language::proto::serialize_anchor(&anchor)),
2928 version: serialize_version(&source_buffer.version()),
2929 };
2930 cx.spawn_weak(|_, mut cx| async move {
2931 let response = rpc.request(message).await?;
2932
2933 source_buffer_handle
2934 .update(&mut cx, |buffer, _| {
2935 buffer.wait_for_version(deserialize_version(response.version))
2936 })
2937 .await;
2938
2939 response
2940 .completions
2941 .into_iter()
2942 .map(|completion| {
2943 language::proto::deserialize_completion(completion, language.as_ref())
2944 })
2945 .collect()
2946 })
2947 } else {
2948 Task::ready(Ok(Default::default()))
2949 }
2950 }
2951
2952 pub fn apply_additional_edits_for_completion(
2953 &self,
2954 buffer_handle: ModelHandle<Buffer>,
2955 completion: Completion,
2956 push_to_history: bool,
2957 cx: &mut ModelContext<Self>,
2958 ) -> Task<Result<Option<Transaction>>> {
2959 let buffer = buffer_handle.read(cx);
2960 let buffer_id = buffer.remote_id();
2961
2962 if self.is_local() {
2963 let (_, lang_server) = if let Some(server) = self.language_server_for_buffer(buffer, cx)
2964 {
2965 server.clone()
2966 } else {
2967 return Task::ready(Ok(Default::default()));
2968 };
2969
2970 cx.spawn(|this, mut cx| async move {
2971 let resolved_completion = lang_server
2972 .request::<lsp::request::ResolveCompletionItem>(completion.lsp_completion)
2973 .await?;
2974 if let Some(edits) = resolved_completion.additional_text_edits {
2975 let edits = this
2976 .update(&mut cx, |this, cx| {
2977 this.edits_from_lsp(&buffer_handle, edits, None, cx)
2978 })
2979 .await?;
2980 buffer_handle.update(&mut cx, |buffer, cx| {
2981 buffer.finalize_last_transaction();
2982 buffer.start_transaction();
2983 for (range, text) in edits {
2984 buffer.edit([(range, text)], cx);
2985 }
2986 let transaction = if buffer.end_transaction(cx).is_some() {
2987 let transaction = buffer.finalize_last_transaction().unwrap().clone();
2988 if !push_to_history {
2989 buffer.forget_transaction(transaction.id);
2990 }
2991 Some(transaction)
2992 } else {
2993 None
2994 };
2995 Ok(transaction)
2996 })
2997 } else {
2998 Ok(None)
2999 }
3000 })
3001 } else if let Some(project_id) = self.remote_id() {
3002 let client = self.client.clone();
3003 cx.spawn(|_, mut cx| async move {
3004 let response = client
3005 .request(proto::ApplyCompletionAdditionalEdits {
3006 project_id,
3007 buffer_id,
3008 completion: Some(language::proto::serialize_completion(&completion)),
3009 })
3010 .await?;
3011
3012 if let Some(transaction) = response.transaction {
3013 let transaction = language::proto::deserialize_transaction(transaction)?;
3014 buffer_handle
3015 .update(&mut cx, |buffer, _| {
3016 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
3017 })
3018 .await;
3019 if push_to_history {
3020 buffer_handle.update(&mut cx, |buffer, _| {
3021 buffer.push_transaction(transaction.clone(), Instant::now());
3022 });
3023 }
3024 Ok(Some(transaction))
3025 } else {
3026 Ok(None)
3027 }
3028 })
3029 } else {
3030 Task::ready(Err(anyhow!("project does not have a remote id")))
3031 }
3032 }
3033
3034 pub fn code_actions<T: Clone + ToOffset>(
3035 &self,
3036 buffer_handle: &ModelHandle<Buffer>,
3037 range: Range<T>,
3038 cx: &mut ModelContext<Self>,
3039 ) -> Task<Result<Vec<CodeAction>>> {
3040 let buffer_handle = buffer_handle.clone();
3041 let buffer = buffer_handle.read(cx);
3042 let snapshot = buffer.snapshot();
3043 let relevant_diagnostics = snapshot
3044 .diagnostics_in_range::<usize, usize>(range.to_offset(&snapshot), false)
3045 .map(|entry| entry.to_lsp_diagnostic_stub())
3046 .collect();
3047 let buffer_id = buffer.remote_id();
3048 let worktree;
3049 let buffer_abs_path;
3050 if let Some(file) = File::from_dyn(buffer.file()) {
3051 worktree = file.worktree.clone();
3052 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
3053 } else {
3054 return Task::ready(Ok(Default::default()));
3055 };
3056 let range = buffer.anchor_before(range.start)..buffer.anchor_before(range.end);
3057
3058 if worktree.read(cx).as_local().is_some() {
3059 let buffer_abs_path = buffer_abs_path.unwrap();
3060 let (_, lang_server) = if let Some(server) = self.language_server_for_buffer(buffer, cx)
3061 {
3062 server.clone()
3063 } else {
3064 return Task::ready(Ok(Default::default()));
3065 };
3066
3067 let lsp_range = range_to_lsp(range.to_point_utf16(buffer));
3068 cx.foreground().spawn(async move {
3069 if !lang_server.capabilities().code_action_provider.is_some() {
3070 return Ok(Default::default());
3071 }
3072
3073 Ok(lang_server
3074 .request::<lsp::request::CodeActionRequest>(lsp::CodeActionParams {
3075 text_document: lsp::TextDocumentIdentifier::new(
3076 lsp::Url::from_file_path(buffer_abs_path).unwrap(),
3077 ),
3078 range: lsp_range,
3079 work_done_progress_params: Default::default(),
3080 partial_result_params: Default::default(),
3081 context: lsp::CodeActionContext {
3082 diagnostics: relevant_diagnostics,
3083 only: Some(vec![
3084 lsp::CodeActionKind::QUICKFIX,
3085 lsp::CodeActionKind::REFACTOR,
3086 lsp::CodeActionKind::REFACTOR_EXTRACT,
3087 lsp::CodeActionKind::SOURCE,
3088 ]),
3089 },
3090 })
3091 .await?
3092 .unwrap_or_default()
3093 .into_iter()
3094 .filter_map(|entry| {
3095 if let lsp::CodeActionOrCommand::CodeAction(lsp_action) = entry {
3096 Some(CodeAction {
3097 range: range.clone(),
3098 lsp_action,
3099 })
3100 } else {
3101 None
3102 }
3103 })
3104 .collect())
3105 })
3106 } else if let Some(project_id) = self.remote_id() {
3107 let rpc = self.client.clone();
3108 let version = buffer.version();
3109 cx.spawn_weak(|_, mut cx| async move {
3110 let response = rpc
3111 .request(proto::GetCodeActions {
3112 project_id,
3113 buffer_id,
3114 start: Some(language::proto::serialize_anchor(&range.start)),
3115 end: Some(language::proto::serialize_anchor(&range.end)),
3116 version: serialize_version(&version),
3117 })
3118 .await?;
3119
3120 buffer_handle
3121 .update(&mut cx, |buffer, _| {
3122 buffer.wait_for_version(deserialize_version(response.version))
3123 })
3124 .await;
3125
3126 response
3127 .actions
3128 .into_iter()
3129 .map(language::proto::deserialize_code_action)
3130 .collect()
3131 })
3132 } else {
3133 Task::ready(Ok(Default::default()))
3134 }
3135 }
3136
3137 pub fn apply_code_action(
3138 &self,
3139 buffer_handle: ModelHandle<Buffer>,
3140 mut action: CodeAction,
3141 push_to_history: bool,
3142 cx: &mut ModelContext<Self>,
3143 ) -> Task<Result<ProjectTransaction>> {
3144 if self.is_local() {
3145 let buffer = buffer_handle.read(cx);
3146 let (lsp_adapter, lang_server) =
3147 if let Some(server) = self.language_server_for_buffer(buffer, cx) {
3148 server.clone()
3149 } else {
3150 return Task::ready(Ok(Default::default()));
3151 };
3152 let range = action.range.to_point_utf16(buffer);
3153
3154 cx.spawn(|this, mut cx| async move {
3155 if let Some(lsp_range) = action
3156 .lsp_action
3157 .data
3158 .as_mut()
3159 .and_then(|d| d.get_mut("codeActionParams"))
3160 .and_then(|d| d.get_mut("range"))
3161 {
3162 *lsp_range = serde_json::to_value(&range_to_lsp(range)).unwrap();
3163 action.lsp_action = lang_server
3164 .request::<lsp::request::CodeActionResolveRequest>(action.lsp_action)
3165 .await?;
3166 } else {
3167 let actions = this
3168 .update(&mut cx, |this, cx| {
3169 this.code_actions(&buffer_handle, action.range, cx)
3170 })
3171 .await?;
3172 action.lsp_action = actions
3173 .into_iter()
3174 .find(|a| a.lsp_action.title == action.lsp_action.title)
3175 .ok_or_else(|| anyhow!("code action is outdated"))?
3176 .lsp_action;
3177 }
3178
3179 if let Some(edit) = action.lsp_action.edit {
3180 Self::deserialize_workspace_edit(
3181 this,
3182 edit,
3183 push_to_history,
3184 lsp_adapter,
3185 lang_server,
3186 &mut cx,
3187 )
3188 .await
3189 } else if let Some(command) = action.lsp_action.command {
3190 this.update(&mut cx, |this, _| {
3191 this.last_workspace_edits_by_language_server
3192 .remove(&lang_server.server_id());
3193 });
3194 lang_server
3195 .request::<lsp::request::ExecuteCommand>(lsp::ExecuteCommandParams {
3196 command: command.command,
3197 arguments: command.arguments.unwrap_or_default(),
3198 ..Default::default()
3199 })
3200 .await?;
3201 Ok(this.update(&mut cx, |this, _| {
3202 this.last_workspace_edits_by_language_server
3203 .remove(&lang_server.server_id())
3204 .unwrap_or_default()
3205 }))
3206 } else {
3207 Ok(ProjectTransaction::default())
3208 }
3209 })
3210 } else if let Some(project_id) = self.remote_id() {
3211 let client = self.client.clone();
3212 let request = proto::ApplyCodeAction {
3213 project_id,
3214 buffer_id: buffer_handle.read(cx).remote_id(),
3215 action: Some(language::proto::serialize_code_action(&action)),
3216 };
3217 cx.spawn(|this, mut cx| async move {
3218 let response = client
3219 .request(request)
3220 .await?
3221 .transaction
3222 .ok_or_else(|| anyhow!("missing transaction"))?;
3223 this.update(&mut cx, |this, cx| {
3224 this.deserialize_project_transaction(response, push_to_history, cx)
3225 })
3226 .await
3227 })
3228 } else {
3229 Task::ready(Err(anyhow!("project does not have a remote id")))
3230 }
3231 }
3232
3233 async fn deserialize_workspace_edit(
3234 this: ModelHandle<Self>,
3235 edit: lsp::WorkspaceEdit,
3236 push_to_history: bool,
3237 lsp_adapter: Arc<dyn LspAdapter>,
3238 language_server: Arc<LanguageServer>,
3239 cx: &mut AsyncAppContext,
3240 ) -> Result<ProjectTransaction> {
3241 let fs = this.read_with(cx, |this, _| this.fs.clone());
3242 let mut operations = Vec::new();
3243 if let Some(document_changes) = edit.document_changes {
3244 match document_changes {
3245 lsp::DocumentChanges::Edits(edits) => {
3246 operations.extend(edits.into_iter().map(lsp::DocumentChangeOperation::Edit))
3247 }
3248 lsp::DocumentChanges::Operations(ops) => operations = ops,
3249 }
3250 } else if let Some(changes) = edit.changes {
3251 operations.extend(changes.into_iter().map(|(uri, edits)| {
3252 lsp::DocumentChangeOperation::Edit(lsp::TextDocumentEdit {
3253 text_document: lsp::OptionalVersionedTextDocumentIdentifier {
3254 uri,
3255 version: None,
3256 },
3257 edits: edits.into_iter().map(lsp::OneOf::Left).collect(),
3258 })
3259 }));
3260 }
3261
3262 let mut project_transaction = ProjectTransaction::default();
3263 for operation in operations {
3264 match operation {
3265 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Create(op)) => {
3266 let abs_path = op
3267 .uri
3268 .to_file_path()
3269 .map_err(|_| anyhow!("can't convert URI to path"))?;
3270
3271 if let Some(parent_path) = abs_path.parent() {
3272 fs.create_dir(parent_path).await?;
3273 }
3274 if abs_path.ends_with("/") {
3275 fs.create_dir(&abs_path).await?;
3276 } else {
3277 fs.create_file(&abs_path, op.options.map(Into::into).unwrap_or_default())
3278 .await?;
3279 }
3280 }
3281 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Rename(op)) => {
3282 let source_abs_path = op
3283 .old_uri
3284 .to_file_path()
3285 .map_err(|_| anyhow!("can't convert URI to path"))?;
3286 let target_abs_path = op
3287 .new_uri
3288 .to_file_path()
3289 .map_err(|_| anyhow!("can't convert URI to path"))?;
3290 fs.rename(
3291 &source_abs_path,
3292 &target_abs_path,
3293 op.options.map(Into::into).unwrap_or_default(),
3294 )
3295 .await?;
3296 }
3297 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Delete(op)) => {
3298 let abs_path = op
3299 .uri
3300 .to_file_path()
3301 .map_err(|_| anyhow!("can't convert URI to path"))?;
3302 let options = op.options.map(Into::into).unwrap_or_default();
3303 if abs_path.ends_with("/") {
3304 fs.remove_dir(&abs_path, options).await?;
3305 } else {
3306 fs.remove_file(&abs_path, options).await?;
3307 }
3308 }
3309 lsp::DocumentChangeOperation::Edit(op) => {
3310 let buffer_to_edit = this
3311 .update(cx, |this, cx| {
3312 this.open_local_buffer_via_lsp(
3313 op.text_document.uri,
3314 lsp_adapter.clone(),
3315 language_server.clone(),
3316 cx,
3317 )
3318 })
3319 .await?;
3320
3321 let edits = this
3322 .update(cx, |this, cx| {
3323 let edits = op.edits.into_iter().map(|edit| match edit {
3324 lsp::OneOf::Left(edit) => edit,
3325 lsp::OneOf::Right(edit) => edit.text_edit,
3326 });
3327 this.edits_from_lsp(
3328 &buffer_to_edit,
3329 edits,
3330 op.text_document.version,
3331 cx,
3332 )
3333 })
3334 .await?;
3335
3336 let transaction = buffer_to_edit.update(cx, |buffer, cx| {
3337 buffer.finalize_last_transaction();
3338 buffer.start_transaction();
3339 for (range, text) in edits {
3340 buffer.edit([(range, text)], cx);
3341 }
3342 let transaction = if buffer.end_transaction(cx).is_some() {
3343 let transaction = buffer.finalize_last_transaction().unwrap().clone();
3344 if !push_to_history {
3345 buffer.forget_transaction(transaction.id);
3346 }
3347 Some(transaction)
3348 } else {
3349 None
3350 };
3351
3352 transaction
3353 });
3354 if let Some(transaction) = transaction {
3355 project_transaction.0.insert(buffer_to_edit, transaction);
3356 }
3357 }
3358 }
3359 }
3360
3361 Ok(project_transaction)
3362 }
3363
3364 pub fn prepare_rename<T: ToPointUtf16>(
3365 &self,
3366 buffer: ModelHandle<Buffer>,
3367 position: T,
3368 cx: &mut ModelContext<Self>,
3369 ) -> Task<Result<Option<Range<Anchor>>>> {
3370 let position = position.to_point_utf16(buffer.read(cx));
3371 self.request_lsp(buffer, PrepareRename { position }, cx)
3372 }
3373
3374 pub fn perform_rename<T: ToPointUtf16>(
3375 &self,
3376 buffer: ModelHandle<Buffer>,
3377 position: T,
3378 new_name: String,
3379 push_to_history: bool,
3380 cx: &mut ModelContext<Self>,
3381 ) -> Task<Result<ProjectTransaction>> {
3382 let position = position.to_point_utf16(buffer.read(cx));
3383 self.request_lsp(
3384 buffer,
3385 PerformRename {
3386 position,
3387 new_name,
3388 push_to_history,
3389 },
3390 cx,
3391 )
3392 }
3393
3394 pub fn search(
3395 &self,
3396 query: SearchQuery,
3397 cx: &mut ModelContext<Self>,
3398 ) -> Task<Result<HashMap<ModelHandle<Buffer>, Vec<Range<Anchor>>>>> {
3399 if self.is_local() {
3400 let snapshots = self
3401 .visible_worktrees(cx)
3402 .filter_map(|tree| {
3403 let tree = tree.read(cx).as_local()?;
3404 Some(tree.snapshot())
3405 })
3406 .collect::<Vec<_>>();
3407
3408 let background = cx.background().clone();
3409 let path_count: usize = snapshots.iter().map(|s| s.visible_file_count()).sum();
3410 if path_count == 0 {
3411 return Task::ready(Ok(Default::default()));
3412 }
3413 let workers = background.num_cpus().min(path_count);
3414 let (matching_paths_tx, mut matching_paths_rx) = smol::channel::bounded(1024);
3415 cx.background()
3416 .spawn({
3417 let fs = self.fs.clone();
3418 let background = cx.background().clone();
3419 let query = query.clone();
3420 async move {
3421 let fs = &fs;
3422 let query = &query;
3423 let matching_paths_tx = &matching_paths_tx;
3424 let paths_per_worker = (path_count + workers - 1) / workers;
3425 let snapshots = &snapshots;
3426 background
3427 .scoped(|scope| {
3428 for worker_ix in 0..workers {
3429 let worker_start_ix = worker_ix * paths_per_worker;
3430 let worker_end_ix = worker_start_ix + paths_per_worker;
3431 scope.spawn(async move {
3432 let mut snapshot_start_ix = 0;
3433 let mut abs_path = PathBuf::new();
3434 for snapshot in snapshots {
3435 let snapshot_end_ix =
3436 snapshot_start_ix + snapshot.visible_file_count();
3437 if worker_end_ix <= snapshot_start_ix {
3438 break;
3439 } else if worker_start_ix > snapshot_end_ix {
3440 snapshot_start_ix = snapshot_end_ix;
3441 continue;
3442 } else {
3443 let start_in_snapshot = worker_start_ix
3444 .saturating_sub(snapshot_start_ix);
3445 let end_in_snapshot =
3446 cmp::min(worker_end_ix, snapshot_end_ix)
3447 - snapshot_start_ix;
3448
3449 for entry in snapshot
3450 .files(false, start_in_snapshot)
3451 .take(end_in_snapshot - start_in_snapshot)
3452 {
3453 if matching_paths_tx.is_closed() {
3454 break;
3455 }
3456
3457 abs_path.clear();
3458 abs_path.push(&snapshot.abs_path());
3459 abs_path.push(&entry.path);
3460 let matches = if let Some(file) =
3461 fs.open_sync(&abs_path).await.log_err()
3462 {
3463 query.detect(file).unwrap_or(false)
3464 } else {
3465 false
3466 };
3467
3468 if matches {
3469 let project_path =
3470 (snapshot.id(), entry.path.clone());
3471 if matching_paths_tx
3472 .send(project_path)
3473 .await
3474 .is_err()
3475 {
3476 break;
3477 }
3478 }
3479 }
3480
3481 snapshot_start_ix = snapshot_end_ix;
3482 }
3483 }
3484 });
3485 }
3486 })
3487 .await;
3488 }
3489 })
3490 .detach();
3491
3492 let (buffers_tx, buffers_rx) = smol::channel::bounded(1024);
3493 let open_buffers = self
3494 .opened_buffers
3495 .values()
3496 .filter_map(|b| b.upgrade(cx))
3497 .collect::<HashSet<_>>();
3498 cx.spawn(|this, cx| async move {
3499 for buffer in &open_buffers {
3500 let snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
3501 buffers_tx.send((buffer.clone(), snapshot)).await?;
3502 }
3503
3504 let open_buffers = Rc::new(RefCell::new(open_buffers));
3505 while let Some(project_path) = matching_paths_rx.next().await {
3506 if buffers_tx.is_closed() {
3507 break;
3508 }
3509
3510 let this = this.clone();
3511 let open_buffers = open_buffers.clone();
3512 let buffers_tx = buffers_tx.clone();
3513 cx.spawn(|mut cx| async move {
3514 if let Some(buffer) = this
3515 .update(&mut cx, |this, cx| this.open_buffer(project_path, cx))
3516 .await
3517 .log_err()
3518 {
3519 if open_buffers.borrow_mut().insert(buffer.clone()) {
3520 let snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
3521 buffers_tx.send((buffer, snapshot)).await?;
3522 }
3523 }
3524
3525 Ok::<_, anyhow::Error>(())
3526 })
3527 .detach();
3528 }
3529
3530 Ok::<_, anyhow::Error>(())
3531 })
3532 .detach_and_log_err(cx);
3533
3534 let background = cx.background().clone();
3535 cx.background().spawn(async move {
3536 let query = &query;
3537 let mut matched_buffers = Vec::new();
3538 for _ in 0..workers {
3539 matched_buffers.push(HashMap::default());
3540 }
3541 background
3542 .scoped(|scope| {
3543 for worker_matched_buffers in matched_buffers.iter_mut() {
3544 let mut buffers_rx = buffers_rx.clone();
3545 scope.spawn(async move {
3546 while let Some((buffer, snapshot)) = buffers_rx.next().await {
3547 let buffer_matches = query
3548 .search(snapshot.as_rope())
3549 .await
3550 .iter()
3551 .map(|range| {
3552 snapshot.anchor_before(range.start)
3553 ..snapshot.anchor_after(range.end)
3554 })
3555 .collect::<Vec<_>>();
3556 if !buffer_matches.is_empty() {
3557 worker_matched_buffers
3558 .insert(buffer.clone(), buffer_matches);
3559 }
3560 }
3561 });
3562 }
3563 })
3564 .await;
3565 Ok(matched_buffers.into_iter().flatten().collect())
3566 })
3567 } else if let Some(project_id) = self.remote_id() {
3568 let request = self.client.request(query.to_proto(project_id));
3569 cx.spawn(|this, mut cx| async move {
3570 let response = request.await?;
3571 let mut result = HashMap::default();
3572 for location in response.locations {
3573 let buffer = location.buffer.ok_or_else(|| anyhow!("missing buffer"))?;
3574 let target_buffer = this
3575 .update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
3576 .await?;
3577 let start = location
3578 .start
3579 .and_then(deserialize_anchor)
3580 .ok_or_else(|| anyhow!("missing target start"))?;
3581 let end = location
3582 .end
3583 .and_then(deserialize_anchor)
3584 .ok_or_else(|| anyhow!("missing target end"))?;
3585 result
3586 .entry(target_buffer)
3587 .or_insert(Vec::new())
3588 .push(start..end)
3589 }
3590 Ok(result)
3591 })
3592 } else {
3593 Task::ready(Ok(Default::default()))
3594 }
3595 }
3596
3597 fn request_lsp<R: LspCommand>(
3598 &self,
3599 buffer_handle: ModelHandle<Buffer>,
3600 request: R,
3601 cx: &mut ModelContext<Self>,
3602 ) -> Task<Result<R::Response>>
3603 where
3604 <R::LspRequest as lsp::request::Request>::Result: Send,
3605 {
3606 let buffer = buffer_handle.read(cx);
3607 if self.is_local() {
3608 let file = File::from_dyn(buffer.file()).and_then(File::as_local);
3609 if let Some((file, (_, language_server))) =
3610 file.zip(self.language_server_for_buffer(buffer, cx).cloned())
3611 {
3612 let lsp_params = request.to_lsp(&file.abs_path(cx), cx);
3613 return cx.spawn(|this, cx| async move {
3614 if !request.check_capabilities(&language_server.capabilities()) {
3615 return Ok(Default::default());
3616 }
3617
3618 let response = language_server
3619 .request::<R::LspRequest>(lsp_params)
3620 .await
3621 .context("lsp request failed")?;
3622 request
3623 .response_from_lsp(response, this, buffer_handle, cx)
3624 .await
3625 });
3626 }
3627 } else if let Some(project_id) = self.remote_id() {
3628 let rpc = self.client.clone();
3629 let message = request.to_proto(project_id, buffer);
3630 return cx.spawn(|this, cx| async move {
3631 let response = rpc.request(message).await?;
3632 request
3633 .response_from_proto(response, this, buffer_handle, cx)
3634 .await
3635 });
3636 }
3637 Task::ready(Ok(Default::default()))
3638 }
3639
3640 pub fn find_or_create_local_worktree(
3641 &mut self,
3642 abs_path: impl AsRef<Path>,
3643 visible: bool,
3644 cx: &mut ModelContext<Self>,
3645 ) -> Task<Result<(ModelHandle<Worktree>, PathBuf)>> {
3646 let abs_path = abs_path.as_ref();
3647 if let Some((tree, relative_path)) = self.find_local_worktree(abs_path, cx) {
3648 Task::ready(Ok((tree.clone(), relative_path.into())))
3649 } else {
3650 let worktree = self.create_local_worktree(abs_path, visible, cx);
3651 cx.foreground()
3652 .spawn(async move { Ok((worktree.await?, PathBuf::new())) })
3653 }
3654 }
3655
3656 pub fn find_local_worktree(
3657 &self,
3658 abs_path: &Path,
3659 cx: &AppContext,
3660 ) -> Option<(ModelHandle<Worktree>, PathBuf)> {
3661 for tree in self.worktrees(cx) {
3662 if let Some(relative_path) = tree
3663 .read(cx)
3664 .as_local()
3665 .and_then(|t| abs_path.strip_prefix(t.abs_path()).ok())
3666 {
3667 return Some((tree.clone(), relative_path.into()));
3668 }
3669 }
3670 None
3671 }
3672
3673 pub fn is_shared(&self) -> bool {
3674 match &self.client_state {
3675 ProjectClientState::Local { is_shared, .. } => *is_shared,
3676 ProjectClientState::Remote { .. } => false,
3677 }
3678 }
3679
3680 fn create_local_worktree(
3681 &mut self,
3682 abs_path: impl AsRef<Path>,
3683 visible: bool,
3684 cx: &mut ModelContext<Self>,
3685 ) -> Task<Result<ModelHandle<Worktree>>> {
3686 let fs = self.fs.clone();
3687 let client = self.client.clone();
3688 let next_entry_id = self.next_entry_id.clone();
3689 let path: Arc<Path> = abs_path.as_ref().into();
3690 let task = self
3691 .loading_local_worktrees
3692 .entry(path.clone())
3693 .or_insert_with(|| {
3694 cx.spawn(|project, mut cx| {
3695 async move {
3696 let worktree = Worktree::local(
3697 client.clone(),
3698 path.clone(),
3699 visible,
3700 fs,
3701 next_entry_id,
3702 &mut cx,
3703 )
3704 .await;
3705 project.update(&mut cx, |project, _| {
3706 project.loading_local_worktrees.remove(&path);
3707 });
3708 let worktree = worktree?;
3709
3710 let project_id = project.update(&mut cx, |project, cx| {
3711 project.add_worktree(&worktree, cx);
3712 project.shared_remote_id()
3713 });
3714
3715 if let Some(project_id) = project_id {
3716 worktree
3717 .update(&mut cx, |worktree, cx| {
3718 worktree.as_local_mut().unwrap().share(project_id, cx)
3719 })
3720 .await
3721 .log_err();
3722 }
3723
3724 Ok(worktree)
3725 }
3726 .map_err(|err| Arc::new(err))
3727 })
3728 .shared()
3729 })
3730 .clone();
3731 cx.foreground().spawn(async move {
3732 match task.await {
3733 Ok(worktree) => Ok(worktree),
3734 Err(err) => Err(anyhow!("{}", err)),
3735 }
3736 })
3737 }
3738
3739 pub fn remove_worktree(&mut self, id_to_remove: WorktreeId, cx: &mut ModelContext<Self>) {
3740 self.worktrees.retain(|worktree| {
3741 if let Some(worktree) = worktree.upgrade(cx) {
3742 let id = worktree.read(cx).id();
3743 if id == id_to_remove {
3744 cx.emit(Event::WorktreeRemoved(id));
3745 false
3746 } else {
3747 true
3748 }
3749 } else {
3750 false
3751 }
3752 });
3753 self.metadata_changed(cx);
3754 cx.notify();
3755 }
3756
3757 fn add_worktree(&mut self, worktree: &ModelHandle<Worktree>, cx: &mut ModelContext<Self>) {
3758 cx.observe(&worktree, |_, _, cx| cx.notify()).detach();
3759 if worktree.read(cx).is_local() {
3760 cx.subscribe(&worktree, |this, worktree, _, cx| {
3761 this.update_local_worktree_buffers(worktree, cx);
3762 })
3763 .detach();
3764 }
3765
3766 let push_strong_handle = {
3767 let worktree = worktree.read(cx);
3768 self.is_shared() || worktree.is_visible() || worktree.is_remote()
3769 };
3770 if push_strong_handle {
3771 self.worktrees
3772 .push(WorktreeHandle::Strong(worktree.clone()));
3773 } else {
3774 cx.observe_release(&worktree, |this, _, cx| {
3775 this.worktrees
3776 .retain(|worktree| worktree.upgrade(cx).is_some());
3777 cx.notify();
3778 })
3779 .detach();
3780 self.worktrees
3781 .push(WorktreeHandle::Weak(worktree.downgrade()));
3782 }
3783 self.metadata_changed(cx);
3784 cx.emit(Event::WorktreeAdded);
3785 cx.notify();
3786 }
3787
3788 fn update_local_worktree_buffers(
3789 &mut self,
3790 worktree_handle: ModelHandle<Worktree>,
3791 cx: &mut ModelContext<Self>,
3792 ) {
3793 let snapshot = worktree_handle.read(cx).snapshot();
3794 let mut buffers_to_delete = Vec::new();
3795 let mut renamed_buffers = Vec::new();
3796 for (buffer_id, buffer) in &self.opened_buffers {
3797 if let Some(buffer) = buffer.upgrade(cx) {
3798 buffer.update(cx, |buffer, cx| {
3799 if let Some(old_file) = File::from_dyn(buffer.file()) {
3800 if old_file.worktree != worktree_handle {
3801 return;
3802 }
3803
3804 let new_file = if let Some(entry) = old_file
3805 .entry_id
3806 .and_then(|entry_id| snapshot.entry_for_id(entry_id))
3807 {
3808 File {
3809 is_local: true,
3810 entry_id: Some(entry.id),
3811 mtime: entry.mtime,
3812 path: entry.path.clone(),
3813 worktree: worktree_handle.clone(),
3814 }
3815 } else if let Some(entry) =
3816 snapshot.entry_for_path(old_file.path().as_ref())
3817 {
3818 File {
3819 is_local: true,
3820 entry_id: Some(entry.id),
3821 mtime: entry.mtime,
3822 path: entry.path.clone(),
3823 worktree: worktree_handle.clone(),
3824 }
3825 } else {
3826 File {
3827 is_local: true,
3828 entry_id: None,
3829 path: old_file.path().clone(),
3830 mtime: old_file.mtime(),
3831 worktree: worktree_handle.clone(),
3832 }
3833 };
3834
3835 let old_path = old_file.abs_path(cx);
3836 if new_file.abs_path(cx) != old_path {
3837 renamed_buffers.push((cx.handle(), old_path));
3838 }
3839
3840 if let Some(project_id) = self.shared_remote_id() {
3841 self.client
3842 .send(proto::UpdateBufferFile {
3843 project_id,
3844 buffer_id: *buffer_id as u64,
3845 file: Some(new_file.to_proto()),
3846 })
3847 .log_err();
3848 }
3849 buffer.file_updated(Box::new(new_file), cx).detach();
3850 }
3851 });
3852 } else {
3853 buffers_to_delete.push(*buffer_id);
3854 }
3855 }
3856
3857 for buffer_id in buffers_to_delete {
3858 self.opened_buffers.remove(&buffer_id);
3859 }
3860
3861 for (buffer, old_path) in renamed_buffers {
3862 self.unregister_buffer_from_language_server(&buffer, old_path, cx);
3863 self.assign_language_to_buffer(&buffer, cx);
3864 self.register_buffer_with_language_server(&buffer, cx);
3865 }
3866 }
3867
3868 pub fn set_active_path(&mut self, entry: Option<ProjectPath>, cx: &mut ModelContext<Self>) {
3869 let new_active_entry = entry.and_then(|project_path| {
3870 let worktree = self.worktree_for_id(project_path.worktree_id, cx)?;
3871 let entry = worktree.read(cx).entry_for_path(project_path.path)?;
3872 Some(entry.id)
3873 });
3874 if new_active_entry != self.active_entry {
3875 self.active_entry = new_active_entry;
3876 cx.emit(Event::ActiveEntryChanged(new_active_entry));
3877 }
3878 }
3879
3880 pub fn is_running_disk_based_diagnostics(&self) -> bool {
3881 self.language_server_statuses
3882 .values()
3883 .any(|status| status.pending_diagnostic_updates > 0)
3884 }
3885
3886 pub fn diagnostic_summary(&self, cx: &AppContext) -> DiagnosticSummary {
3887 let mut summary = DiagnosticSummary::default();
3888 for (_, path_summary) in self.diagnostic_summaries(cx) {
3889 summary.error_count += path_summary.error_count;
3890 summary.warning_count += path_summary.warning_count;
3891 }
3892 summary
3893 }
3894
3895 pub fn diagnostic_summaries<'a>(
3896 &'a self,
3897 cx: &'a AppContext,
3898 ) -> impl Iterator<Item = (ProjectPath, DiagnosticSummary)> + 'a {
3899 self.worktrees(cx).flat_map(move |worktree| {
3900 let worktree = worktree.read(cx);
3901 let worktree_id = worktree.id();
3902 worktree
3903 .diagnostic_summaries()
3904 .map(move |(path, summary)| (ProjectPath { worktree_id, path }, summary))
3905 })
3906 }
3907
3908 pub fn disk_based_diagnostics_started(&mut self, cx: &mut ModelContext<Self>) {
3909 if self
3910 .language_server_statuses
3911 .values()
3912 .map(|status| status.pending_diagnostic_updates)
3913 .sum::<isize>()
3914 == 1
3915 {
3916 cx.emit(Event::DiskBasedDiagnosticsStarted);
3917 }
3918 }
3919
3920 pub fn disk_based_diagnostics_finished(&mut self, cx: &mut ModelContext<Self>) {
3921 cx.emit(Event::DiskBasedDiagnosticsUpdated);
3922 if self
3923 .language_server_statuses
3924 .values()
3925 .map(|status| status.pending_diagnostic_updates)
3926 .sum::<isize>()
3927 == 0
3928 {
3929 cx.emit(Event::DiskBasedDiagnosticsFinished);
3930 }
3931 }
3932
3933 pub fn active_entry(&self) -> Option<ProjectEntryId> {
3934 self.active_entry
3935 }
3936
3937 pub fn entry_for_path(&self, path: &ProjectPath, cx: &AppContext) -> Option<ProjectEntryId> {
3938 self.worktree_for_id(path.worktree_id, cx)?
3939 .read(cx)
3940 .entry_for_path(&path.path)
3941 .map(|entry| entry.id)
3942 }
3943
3944 pub fn path_for_entry(&self, entry_id: ProjectEntryId, cx: &AppContext) -> Option<ProjectPath> {
3945 let worktree = self.worktree_for_entry(entry_id, cx)?;
3946 let worktree = worktree.read(cx);
3947 let worktree_id = worktree.id();
3948 let path = worktree.entry_for_id(entry_id)?.path.clone();
3949 Some(ProjectPath { worktree_id, path })
3950 }
3951
3952 // RPC message handlers
3953
3954 async fn handle_request_join_project(
3955 this: ModelHandle<Self>,
3956 message: TypedEnvelope<proto::RequestJoinProject>,
3957 _: Arc<Client>,
3958 mut cx: AsyncAppContext,
3959 ) -> Result<()> {
3960 let user_id = message.payload.requester_id;
3961 if this.read_with(&cx, |project, _| {
3962 project.collaborators.values().any(|c| c.user.id == user_id)
3963 }) {
3964 this.update(&mut cx, |this, cx| {
3965 this.respond_to_join_request(user_id, true, cx)
3966 });
3967 } else {
3968 let user_store = this.read_with(&cx, |this, _| this.user_store.clone());
3969 let user = user_store
3970 .update(&mut cx, |store, cx| store.fetch_user(user_id, cx))
3971 .await?;
3972 this.update(&mut cx, |_, cx| cx.emit(Event::ContactRequestedJoin(user)));
3973 }
3974 Ok(())
3975 }
3976
3977 async fn handle_unregister_project(
3978 this: ModelHandle<Self>,
3979 _: TypedEnvelope<proto::UnregisterProject>,
3980 _: Arc<Client>,
3981 mut cx: AsyncAppContext,
3982 ) -> Result<()> {
3983 this.update(&mut cx, |this, cx| this.removed_from_project(cx));
3984 Ok(())
3985 }
3986
3987 async fn handle_project_unshared(
3988 this: ModelHandle<Self>,
3989 _: TypedEnvelope<proto::ProjectUnshared>,
3990 _: Arc<Client>,
3991 mut cx: AsyncAppContext,
3992 ) -> Result<()> {
3993 this.update(&mut cx, |this, cx| this.unshared(cx));
3994 Ok(())
3995 }
3996
3997 async fn handle_add_collaborator(
3998 this: ModelHandle<Self>,
3999 mut envelope: TypedEnvelope<proto::AddProjectCollaborator>,
4000 _: Arc<Client>,
4001 mut cx: AsyncAppContext,
4002 ) -> Result<()> {
4003 let user_store = this.read_with(&cx, |this, _| this.user_store.clone());
4004 let collaborator = envelope
4005 .payload
4006 .collaborator
4007 .take()
4008 .ok_or_else(|| anyhow!("empty collaborator"))?;
4009
4010 let collaborator = Collaborator::from_proto(collaborator, &user_store, &mut cx).await?;
4011 this.update(&mut cx, |this, cx| {
4012 this.collaborators
4013 .insert(collaborator.peer_id, collaborator);
4014 cx.notify();
4015 });
4016
4017 Ok(())
4018 }
4019
4020 async fn handle_remove_collaborator(
4021 this: ModelHandle<Self>,
4022 envelope: TypedEnvelope<proto::RemoveProjectCollaborator>,
4023 _: Arc<Client>,
4024 mut cx: AsyncAppContext,
4025 ) -> Result<()> {
4026 this.update(&mut cx, |this, cx| {
4027 let peer_id = PeerId(envelope.payload.peer_id);
4028 let replica_id = this
4029 .collaborators
4030 .remove(&peer_id)
4031 .ok_or_else(|| anyhow!("unknown peer {:?}", peer_id))?
4032 .replica_id;
4033 for (_, buffer) in &this.opened_buffers {
4034 if let Some(buffer) = buffer.upgrade(cx) {
4035 buffer.update(cx, |buffer, cx| buffer.remove_peer(replica_id, cx));
4036 }
4037 }
4038
4039 cx.emit(Event::CollaboratorLeft(peer_id));
4040 cx.notify();
4041 Ok(())
4042 })
4043 }
4044
4045 async fn handle_join_project_request_cancelled(
4046 this: ModelHandle<Self>,
4047 envelope: TypedEnvelope<proto::JoinProjectRequestCancelled>,
4048 _: Arc<Client>,
4049 mut cx: AsyncAppContext,
4050 ) -> Result<()> {
4051 let user = this
4052 .update(&mut cx, |this, cx| {
4053 this.user_store.update(cx, |user_store, cx| {
4054 user_store.fetch_user(envelope.payload.requester_id, cx)
4055 })
4056 })
4057 .await?;
4058
4059 this.update(&mut cx, |_, cx| {
4060 cx.emit(Event::ContactCancelledJoinRequest(user));
4061 });
4062
4063 Ok(())
4064 }
4065
4066 async fn handle_update_project(
4067 this: ModelHandle<Self>,
4068 envelope: TypedEnvelope<proto::UpdateProject>,
4069 client: Arc<Client>,
4070 mut cx: AsyncAppContext,
4071 ) -> Result<()> {
4072 this.update(&mut cx, |this, cx| {
4073 let replica_id = this.replica_id();
4074 let remote_id = this.remote_id().ok_or_else(|| anyhow!("invalid project"))?;
4075
4076 let mut old_worktrees_by_id = this
4077 .worktrees
4078 .drain(..)
4079 .filter_map(|worktree| {
4080 let worktree = worktree.upgrade(cx)?;
4081 Some((worktree.read(cx).id(), worktree))
4082 })
4083 .collect::<HashMap<_, _>>();
4084
4085 for worktree in envelope.payload.worktrees {
4086 if let Some(old_worktree) =
4087 old_worktrees_by_id.remove(&WorktreeId::from_proto(worktree.id))
4088 {
4089 this.worktrees.push(WorktreeHandle::Strong(old_worktree));
4090 } else {
4091 let worktree = proto::Worktree {
4092 id: worktree.id,
4093 root_name: worktree.root_name,
4094 entries: Default::default(),
4095 diagnostic_summaries: Default::default(),
4096 visible: worktree.visible,
4097 scan_id: 0,
4098 };
4099 let (worktree, load_task) =
4100 Worktree::remote(remote_id, replica_id, worktree, client.clone(), cx);
4101 this.add_worktree(&worktree, cx);
4102 load_task.detach();
4103 }
4104 }
4105
4106 this.metadata_changed(cx);
4107 for (id, _) in old_worktrees_by_id {
4108 cx.emit(Event::WorktreeRemoved(id));
4109 }
4110
4111 Ok(())
4112 })
4113 }
4114
4115 async fn handle_update_worktree(
4116 this: ModelHandle<Self>,
4117 envelope: TypedEnvelope<proto::UpdateWorktree>,
4118 _: Arc<Client>,
4119 mut cx: AsyncAppContext,
4120 ) -> Result<()> {
4121 this.update(&mut cx, |this, cx| {
4122 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
4123 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
4124 worktree.update(cx, |worktree, _| {
4125 let worktree = worktree.as_remote_mut().unwrap();
4126 worktree.update_from_remote(envelope)
4127 })?;
4128 }
4129 Ok(())
4130 })
4131 }
4132
4133 async fn handle_create_project_entry(
4134 this: ModelHandle<Self>,
4135 envelope: TypedEnvelope<proto::CreateProjectEntry>,
4136 _: Arc<Client>,
4137 mut cx: AsyncAppContext,
4138 ) -> Result<proto::ProjectEntryResponse> {
4139 let worktree = this.update(&mut cx, |this, cx| {
4140 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
4141 this.worktree_for_id(worktree_id, cx)
4142 .ok_or_else(|| anyhow!("worktree not found"))
4143 })?;
4144 let worktree_scan_id = worktree.read_with(&cx, |worktree, _| worktree.scan_id());
4145 let entry = worktree
4146 .update(&mut cx, |worktree, cx| {
4147 let worktree = worktree.as_local_mut().unwrap();
4148 let path = PathBuf::from(OsString::from_vec(envelope.payload.path));
4149 worktree.create_entry(path, envelope.payload.is_directory, cx)
4150 })
4151 .await?;
4152 Ok(proto::ProjectEntryResponse {
4153 entry: Some((&entry).into()),
4154 worktree_scan_id: worktree_scan_id as u64,
4155 })
4156 }
4157
4158 async fn handle_rename_project_entry(
4159 this: ModelHandle<Self>,
4160 envelope: TypedEnvelope<proto::RenameProjectEntry>,
4161 _: Arc<Client>,
4162 mut cx: AsyncAppContext,
4163 ) -> Result<proto::ProjectEntryResponse> {
4164 let entry_id = ProjectEntryId::from_proto(envelope.payload.entry_id);
4165 let worktree = this.read_with(&cx, |this, cx| {
4166 this.worktree_for_entry(entry_id, cx)
4167 .ok_or_else(|| anyhow!("worktree not found"))
4168 })?;
4169 let worktree_scan_id = worktree.read_with(&cx, |worktree, _| worktree.scan_id());
4170 let entry = worktree
4171 .update(&mut cx, |worktree, cx| {
4172 let new_path = PathBuf::from(OsString::from_vec(envelope.payload.new_path));
4173 worktree
4174 .as_local_mut()
4175 .unwrap()
4176 .rename_entry(entry_id, new_path, cx)
4177 .ok_or_else(|| anyhow!("invalid entry"))
4178 })?
4179 .await?;
4180 Ok(proto::ProjectEntryResponse {
4181 entry: Some((&entry).into()),
4182 worktree_scan_id: worktree_scan_id as u64,
4183 })
4184 }
4185
4186 async fn handle_copy_project_entry(
4187 this: ModelHandle<Self>,
4188 envelope: TypedEnvelope<proto::CopyProjectEntry>,
4189 _: Arc<Client>,
4190 mut cx: AsyncAppContext,
4191 ) -> Result<proto::ProjectEntryResponse> {
4192 let entry_id = ProjectEntryId::from_proto(envelope.payload.entry_id);
4193 let worktree = this.read_with(&cx, |this, cx| {
4194 this.worktree_for_entry(entry_id, cx)
4195 .ok_or_else(|| anyhow!("worktree not found"))
4196 })?;
4197 let worktree_scan_id = worktree.read_with(&cx, |worktree, _| worktree.scan_id());
4198 let entry = worktree
4199 .update(&mut cx, |worktree, cx| {
4200 let new_path = PathBuf::from(OsString::from_vec(envelope.payload.new_path));
4201 worktree
4202 .as_local_mut()
4203 .unwrap()
4204 .copy_entry(entry_id, new_path, cx)
4205 .ok_or_else(|| anyhow!("invalid entry"))
4206 })?
4207 .await?;
4208 Ok(proto::ProjectEntryResponse {
4209 entry: Some((&entry).into()),
4210 worktree_scan_id: worktree_scan_id as u64,
4211 })
4212 }
4213
4214 async fn handle_delete_project_entry(
4215 this: ModelHandle<Self>,
4216 envelope: TypedEnvelope<proto::DeleteProjectEntry>,
4217 _: Arc<Client>,
4218 mut cx: AsyncAppContext,
4219 ) -> Result<proto::ProjectEntryResponse> {
4220 let entry_id = ProjectEntryId::from_proto(envelope.payload.entry_id);
4221 let worktree = this.read_with(&cx, |this, cx| {
4222 this.worktree_for_entry(entry_id, cx)
4223 .ok_or_else(|| anyhow!("worktree not found"))
4224 })?;
4225 let worktree_scan_id = worktree.read_with(&cx, |worktree, _| worktree.scan_id());
4226 worktree
4227 .update(&mut cx, |worktree, cx| {
4228 worktree
4229 .as_local_mut()
4230 .unwrap()
4231 .delete_entry(entry_id, cx)
4232 .ok_or_else(|| anyhow!("invalid entry"))
4233 })?
4234 .await?;
4235 Ok(proto::ProjectEntryResponse {
4236 entry: None,
4237 worktree_scan_id: worktree_scan_id as u64,
4238 })
4239 }
4240
4241 async fn handle_update_diagnostic_summary(
4242 this: ModelHandle<Self>,
4243 envelope: TypedEnvelope<proto::UpdateDiagnosticSummary>,
4244 _: Arc<Client>,
4245 mut cx: AsyncAppContext,
4246 ) -> Result<()> {
4247 this.update(&mut cx, |this, cx| {
4248 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
4249 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
4250 if let Some(summary) = envelope.payload.summary {
4251 let project_path = ProjectPath {
4252 worktree_id,
4253 path: Path::new(&summary.path).into(),
4254 };
4255 worktree.update(cx, |worktree, _| {
4256 worktree
4257 .as_remote_mut()
4258 .unwrap()
4259 .update_diagnostic_summary(project_path.path.clone(), &summary);
4260 });
4261 cx.emit(Event::DiagnosticsUpdated(project_path));
4262 }
4263 }
4264 Ok(())
4265 })
4266 }
4267
4268 async fn handle_start_language_server(
4269 this: ModelHandle<Self>,
4270 envelope: TypedEnvelope<proto::StartLanguageServer>,
4271 _: Arc<Client>,
4272 mut cx: AsyncAppContext,
4273 ) -> Result<()> {
4274 let server = envelope
4275 .payload
4276 .server
4277 .ok_or_else(|| anyhow!("invalid server"))?;
4278 this.update(&mut cx, |this, cx| {
4279 this.language_server_statuses.insert(
4280 server.id as usize,
4281 LanguageServerStatus {
4282 name: server.name,
4283 pending_work: Default::default(),
4284 pending_diagnostic_updates: 0,
4285 },
4286 );
4287 cx.notify();
4288 });
4289 Ok(())
4290 }
4291
4292 async fn handle_update_language_server(
4293 this: ModelHandle<Self>,
4294 envelope: TypedEnvelope<proto::UpdateLanguageServer>,
4295 _: Arc<Client>,
4296 mut cx: AsyncAppContext,
4297 ) -> Result<()> {
4298 let language_server_id = envelope.payload.language_server_id as usize;
4299 match envelope
4300 .payload
4301 .variant
4302 .ok_or_else(|| anyhow!("invalid variant"))?
4303 {
4304 proto::update_language_server::Variant::WorkStart(payload) => {
4305 this.update(&mut cx, |this, cx| {
4306 this.on_lsp_work_start(language_server_id, payload.token, cx);
4307 })
4308 }
4309 proto::update_language_server::Variant::WorkProgress(payload) => {
4310 this.update(&mut cx, |this, cx| {
4311 this.on_lsp_work_progress(
4312 language_server_id,
4313 payload.token,
4314 LanguageServerProgress {
4315 message: payload.message,
4316 percentage: payload.percentage.map(|p| p as usize),
4317 last_update_at: Instant::now(),
4318 },
4319 cx,
4320 );
4321 })
4322 }
4323 proto::update_language_server::Variant::WorkEnd(payload) => {
4324 this.update(&mut cx, |this, cx| {
4325 this.on_lsp_work_end(language_server_id, payload.token, cx);
4326 })
4327 }
4328 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(_) => {
4329 this.update(&mut cx, |this, cx| {
4330 this.disk_based_diagnostics_started(cx);
4331 })
4332 }
4333 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(_) => {
4334 this.update(&mut cx, |this, cx| this.disk_based_diagnostics_finished(cx));
4335 }
4336 }
4337
4338 Ok(())
4339 }
4340
4341 async fn handle_update_buffer(
4342 this: ModelHandle<Self>,
4343 envelope: TypedEnvelope<proto::UpdateBuffer>,
4344 _: Arc<Client>,
4345 mut cx: AsyncAppContext,
4346 ) -> Result<()> {
4347 this.update(&mut cx, |this, cx| {
4348 let payload = envelope.payload.clone();
4349 let buffer_id = payload.buffer_id;
4350 let ops = payload
4351 .operations
4352 .into_iter()
4353 .map(|op| language::proto::deserialize_operation(op))
4354 .collect::<Result<Vec<_>, _>>()?;
4355 let is_remote = this.is_remote();
4356 match this.opened_buffers.entry(buffer_id) {
4357 hash_map::Entry::Occupied(mut e) => match e.get_mut() {
4358 OpenBuffer::Strong(buffer) => {
4359 buffer.update(cx, |buffer, cx| buffer.apply_ops(ops, cx))?;
4360 }
4361 OpenBuffer::Loading(operations) => operations.extend_from_slice(&ops),
4362 OpenBuffer::Weak(_) => {}
4363 },
4364 hash_map::Entry::Vacant(e) => {
4365 assert!(
4366 is_remote,
4367 "received buffer update from {:?}",
4368 envelope.original_sender_id
4369 );
4370 e.insert(OpenBuffer::Loading(ops));
4371 }
4372 }
4373 Ok(())
4374 })
4375 }
4376
4377 async fn handle_update_buffer_file(
4378 this: ModelHandle<Self>,
4379 envelope: TypedEnvelope<proto::UpdateBufferFile>,
4380 _: Arc<Client>,
4381 mut cx: AsyncAppContext,
4382 ) -> Result<()> {
4383 this.update(&mut cx, |this, cx| {
4384 let payload = envelope.payload.clone();
4385 let buffer_id = payload.buffer_id;
4386 let file = payload.file.ok_or_else(|| anyhow!("invalid file"))?;
4387 let worktree = this
4388 .worktree_for_id(WorktreeId::from_proto(file.worktree_id), cx)
4389 .ok_or_else(|| anyhow!("no such worktree"))?;
4390 let file = File::from_proto(file, worktree.clone(), cx)?;
4391 let buffer = this
4392 .opened_buffers
4393 .get_mut(&buffer_id)
4394 .and_then(|b| b.upgrade(cx))
4395 .ok_or_else(|| anyhow!("no such buffer"))?;
4396 buffer.update(cx, |buffer, cx| {
4397 buffer.file_updated(Box::new(file), cx).detach();
4398 });
4399 Ok(())
4400 })
4401 }
4402
4403 async fn handle_save_buffer(
4404 this: ModelHandle<Self>,
4405 envelope: TypedEnvelope<proto::SaveBuffer>,
4406 _: Arc<Client>,
4407 mut cx: AsyncAppContext,
4408 ) -> Result<proto::BufferSaved> {
4409 let buffer_id = envelope.payload.buffer_id;
4410 let requested_version = deserialize_version(envelope.payload.version);
4411
4412 let (project_id, buffer) = this.update(&mut cx, |this, cx| {
4413 let project_id = this.remote_id().ok_or_else(|| anyhow!("not connected"))?;
4414 let buffer = this
4415 .opened_buffers
4416 .get(&buffer_id)
4417 .and_then(|buffer| buffer.upgrade(cx))
4418 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?;
4419 Ok::<_, anyhow::Error>((project_id, buffer))
4420 })?;
4421 buffer
4422 .update(&mut cx, |buffer, _| {
4423 buffer.wait_for_version(requested_version)
4424 })
4425 .await;
4426
4427 let (saved_version, mtime) = buffer.update(&mut cx, |buffer, cx| buffer.save(cx)).await?;
4428 Ok(proto::BufferSaved {
4429 project_id,
4430 buffer_id,
4431 version: serialize_version(&saved_version),
4432 mtime: Some(mtime.into()),
4433 })
4434 }
4435
4436 async fn handle_reload_buffers(
4437 this: ModelHandle<Self>,
4438 envelope: TypedEnvelope<proto::ReloadBuffers>,
4439 _: Arc<Client>,
4440 mut cx: AsyncAppContext,
4441 ) -> Result<proto::ReloadBuffersResponse> {
4442 let sender_id = envelope.original_sender_id()?;
4443 let reload = this.update(&mut cx, |this, cx| {
4444 let mut buffers = HashSet::default();
4445 for buffer_id in &envelope.payload.buffer_ids {
4446 buffers.insert(
4447 this.opened_buffers
4448 .get(buffer_id)
4449 .and_then(|buffer| buffer.upgrade(cx))
4450 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?,
4451 );
4452 }
4453 Ok::<_, anyhow::Error>(this.reload_buffers(buffers, false, cx))
4454 })?;
4455
4456 let project_transaction = reload.await?;
4457 let project_transaction = this.update(&mut cx, |this, cx| {
4458 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
4459 });
4460 Ok(proto::ReloadBuffersResponse {
4461 transaction: Some(project_transaction),
4462 })
4463 }
4464
4465 async fn handle_format_buffers(
4466 this: ModelHandle<Self>,
4467 envelope: TypedEnvelope<proto::FormatBuffers>,
4468 _: Arc<Client>,
4469 mut cx: AsyncAppContext,
4470 ) -> Result<proto::FormatBuffersResponse> {
4471 let sender_id = envelope.original_sender_id()?;
4472 let format = this.update(&mut cx, |this, cx| {
4473 let mut buffers = HashSet::default();
4474 for buffer_id in &envelope.payload.buffer_ids {
4475 buffers.insert(
4476 this.opened_buffers
4477 .get(buffer_id)
4478 .and_then(|buffer| buffer.upgrade(cx))
4479 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?,
4480 );
4481 }
4482 Ok::<_, anyhow::Error>(this.format(buffers, false, cx))
4483 })?;
4484
4485 let project_transaction = format.await?;
4486 let project_transaction = this.update(&mut cx, |this, cx| {
4487 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
4488 });
4489 Ok(proto::FormatBuffersResponse {
4490 transaction: Some(project_transaction),
4491 })
4492 }
4493
4494 async fn handle_get_completions(
4495 this: ModelHandle<Self>,
4496 envelope: TypedEnvelope<proto::GetCompletions>,
4497 _: Arc<Client>,
4498 mut cx: AsyncAppContext,
4499 ) -> Result<proto::GetCompletionsResponse> {
4500 let position = envelope
4501 .payload
4502 .position
4503 .and_then(language::proto::deserialize_anchor)
4504 .ok_or_else(|| anyhow!("invalid position"))?;
4505 let version = deserialize_version(envelope.payload.version);
4506 let buffer = this.read_with(&cx, |this, cx| {
4507 this.opened_buffers
4508 .get(&envelope.payload.buffer_id)
4509 .and_then(|buffer| buffer.upgrade(cx))
4510 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
4511 })?;
4512 buffer
4513 .update(&mut cx, |buffer, _| buffer.wait_for_version(version))
4514 .await;
4515 let version = buffer.read_with(&cx, |buffer, _| buffer.version());
4516 let completions = this
4517 .update(&mut cx, |this, cx| this.completions(&buffer, position, cx))
4518 .await?;
4519
4520 Ok(proto::GetCompletionsResponse {
4521 completions: completions
4522 .iter()
4523 .map(language::proto::serialize_completion)
4524 .collect(),
4525 version: serialize_version(&version),
4526 })
4527 }
4528
4529 async fn handle_apply_additional_edits_for_completion(
4530 this: ModelHandle<Self>,
4531 envelope: TypedEnvelope<proto::ApplyCompletionAdditionalEdits>,
4532 _: Arc<Client>,
4533 mut cx: AsyncAppContext,
4534 ) -> Result<proto::ApplyCompletionAdditionalEditsResponse> {
4535 let apply_additional_edits = this.update(&mut cx, |this, cx| {
4536 let buffer = this
4537 .opened_buffers
4538 .get(&envelope.payload.buffer_id)
4539 .and_then(|buffer| buffer.upgrade(cx))
4540 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
4541 let language = buffer.read(cx).language();
4542 let completion = language::proto::deserialize_completion(
4543 envelope
4544 .payload
4545 .completion
4546 .ok_or_else(|| anyhow!("invalid completion"))?,
4547 language,
4548 )?;
4549 Ok::<_, anyhow::Error>(
4550 this.apply_additional_edits_for_completion(buffer, completion, false, cx),
4551 )
4552 })?;
4553
4554 Ok(proto::ApplyCompletionAdditionalEditsResponse {
4555 transaction: apply_additional_edits
4556 .await?
4557 .as_ref()
4558 .map(language::proto::serialize_transaction),
4559 })
4560 }
4561
4562 async fn handle_get_code_actions(
4563 this: ModelHandle<Self>,
4564 envelope: TypedEnvelope<proto::GetCodeActions>,
4565 _: Arc<Client>,
4566 mut cx: AsyncAppContext,
4567 ) -> Result<proto::GetCodeActionsResponse> {
4568 let start = envelope
4569 .payload
4570 .start
4571 .and_then(language::proto::deserialize_anchor)
4572 .ok_or_else(|| anyhow!("invalid start"))?;
4573 let end = envelope
4574 .payload
4575 .end
4576 .and_then(language::proto::deserialize_anchor)
4577 .ok_or_else(|| anyhow!("invalid end"))?;
4578 let buffer = this.update(&mut cx, |this, cx| {
4579 this.opened_buffers
4580 .get(&envelope.payload.buffer_id)
4581 .and_then(|buffer| buffer.upgrade(cx))
4582 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
4583 })?;
4584 buffer
4585 .update(&mut cx, |buffer, _| {
4586 buffer.wait_for_version(deserialize_version(envelope.payload.version))
4587 })
4588 .await;
4589
4590 let version = buffer.read_with(&cx, |buffer, _| buffer.version());
4591 let code_actions = this.update(&mut cx, |this, cx| {
4592 Ok::<_, anyhow::Error>(this.code_actions(&buffer, start..end, cx))
4593 })?;
4594
4595 Ok(proto::GetCodeActionsResponse {
4596 actions: code_actions
4597 .await?
4598 .iter()
4599 .map(language::proto::serialize_code_action)
4600 .collect(),
4601 version: serialize_version(&version),
4602 })
4603 }
4604
4605 async fn handle_apply_code_action(
4606 this: ModelHandle<Self>,
4607 envelope: TypedEnvelope<proto::ApplyCodeAction>,
4608 _: Arc<Client>,
4609 mut cx: AsyncAppContext,
4610 ) -> Result<proto::ApplyCodeActionResponse> {
4611 let sender_id = envelope.original_sender_id()?;
4612 let action = language::proto::deserialize_code_action(
4613 envelope
4614 .payload
4615 .action
4616 .ok_or_else(|| anyhow!("invalid action"))?,
4617 )?;
4618 let apply_code_action = this.update(&mut cx, |this, cx| {
4619 let buffer = this
4620 .opened_buffers
4621 .get(&envelope.payload.buffer_id)
4622 .and_then(|buffer| buffer.upgrade(cx))
4623 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
4624 Ok::<_, anyhow::Error>(this.apply_code_action(buffer, action, false, cx))
4625 })?;
4626
4627 let project_transaction = apply_code_action.await?;
4628 let project_transaction = this.update(&mut cx, |this, cx| {
4629 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
4630 });
4631 Ok(proto::ApplyCodeActionResponse {
4632 transaction: Some(project_transaction),
4633 })
4634 }
4635
4636 async fn handle_lsp_command<T: LspCommand>(
4637 this: ModelHandle<Self>,
4638 envelope: TypedEnvelope<T::ProtoRequest>,
4639 _: Arc<Client>,
4640 mut cx: AsyncAppContext,
4641 ) -> Result<<T::ProtoRequest as proto::RequestMessage>::Response>
4642 where
4643 <T::LspRequest as lsp::request::Request>::Result: Send,
4644 {
4645 let sender_id = envelope.original_sender_id()?;
4646 let buffer_id = T::buffer_id_from_proto(&envelope.payload);
4647 let buffer_handle = this.read_with(&cx, |this, _| {
4648 this.opened_buffers
4649 .get(&buffer_id)
4650 .and_then(|buffer| buffer.upgrade(&cx))
4651 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))
4652 })?;
4653 let request = T::from_proto(
4654 envelope.payload,
4655 this.clone(),
4656 buffer_handle.clone(),
4657 cx.clone(),
4658 )
4659 .await?;
4660 let buffer_version = buffer_handle.read_with(&cx, |buffer, _| buffer.version());
4661 let response = this
4662 .update(&mut cx, |this, cx| {
4663 this.request_lsp(buffer_handle, request, cx)
4664 })
4665 .await?;
4666 this.update(&mut cx, |this, cx| {
4667 Ok(T::response_to_proto(
4668 response,
4669 this,
4670 sender_id,
4671 &buffer_version,
4672 cx,
4673 ))
4674 })
4675 }
4676
4677 async fn handle_get_project_symbols(
4678 this: ModelHandle<Self>,
4679 envelope: TypedEnvelope<proto::GetProjectSymbols>,
4680 _: Arc<Client>,
4681 mut cx: AsyncAppContext,
4682 ) -> Result<proto::GetProjectSymbolsResponse> {
4683 let symbols = this
4684 .update(&mut cx, |this, cx| {
4685 this.symbols(&envelope.payload.query, cx)
4686 })
4687 .await?;
4688
4689 Ok(proto::GetProjectSymbolsResponse {
4690 symbols: symbols.iter().map(serialize_symbol).collect(),
4691 })
4692 }
4693
4694 async fn handle_search_project(
4695 this: ModelHandle<Self>,
4696 envelope: TypedEnvelope<proto::SearchProject>,
4697 _: Arc<Client>,
4698 mut cx: AsyncAppContext,
4699 ) -> Result<proto::SearchProjectResponse> {
4700 let peer_id = envelope.original_sender_id()?;
4701 let query = SearchQuery::from_proto(envelope.payload)?;
4702 let result = this
4703 .update(&mut cx, |this, cx| this.search(query, cx))
4704 .await?;
4705
4706 this.update(&mut cx, |this, cx| {
4707 let mut locations = Vec::new();
4708 for (buffer, ranges) in result {
4709 for range in ranges {
4710 let start = serialize_anchor(&range.start);
4711 let end = serialize_anchor(&range.end);
4712 let buffer = this.serialize_buffer_for_peer(&buffer, peer_id, cx);
4713 locations.push(proto::Location {
4714 buffer: Some(buffer),
4715 start: Some(start),
4716 end: Some(end),
4717 });
4718 }
4719 }
4720 Ok(proto::SearchProjectResponse { locations })
4721 })
4722 }
4723
4724 async fn handle_open_buffer_for_symbol(
4725 this: ModelHandle<Self>,
4726 envelope: TypedEnvelope<proto::OpenBufferForSymbol>,
4727 _: Arc<Client>,
4728 mut cx: AsyncAppContext,
4729 ) -> Result<proto::OpenBufferForSymbolResponse> {
4730 let peer_id = envelope.original_sender_id()?;
4731 let symbol = envelope
4732 .payload
4733 .symbol
4734 .ok_or_else(|| anyhow!("invalid symbol"))?;
4735 let symbol = this.read_with(&cx, |this, _| {
4736 let symbol = this.deserialize_symbol(symbol)?;
4737 let signature = this.symbol_signature(symbol.worktree_id, &symbol.path);
4738 if signature == symbol.signature {
4739 Ok(symbol)
4740 } else {
4741 Err(anyhow!("invalid symbol signature"))
4742 }
4743 })?;
4744 let buffer = this
4745 .update(&mut cx, |this, cx| this.open_buffer_for_symbol(&symbol, cx))
4746 .await?;
4747
4748 Ok(proto::OpenBufferForSymbolResponse {
4749 buffer: Some(this.update(&mut cx, |this, cx| {
4750 this.serialize_buffer_for_peer(&buffer, peer_id, cx)
4751 })),
4752 })
4753 }
4754
4755 fn symbol_signature(&self, worktree_id: WorktreeId, path: &Path) -> [u8; 32] {
4756 let mut hasher = Sha256::new();
4757 hasher.update(worktree_id.to_proto().to_be_bytes());
4758 hasher.update(path.to_string_lossy().as_bytes());
4759 hasher.update(self.nonce.to_be_bytes());
4760 hasher.finalize().as_slice().try_into().unwrap()
4761 }
4762
4763 async fn handle_open_buffer_by_id(
4764 this: ModelHandle<Self>,
4765 envelope: TypedEnvelope<proto::OpenBufferById>,
4766 _: Arc<Client>,
4767 mut cx: AsyncAppContext,
4768 ) -> Result<proto::OpenBufferResponse> {
4769 let peer_id = envelope.original_sender_id()?;
4770 let buffer = this
4771 .update(&mut cx, |this, cx| {
4772 this.open_buffer_by_id(envelope.payload.id, cx)
4773 })
4774 .await?;
4775 this.update(&mut cx, |this, cx| {
4776 Ok(proto::OpenBufferResponse {
4777 buffer: Some(this.serialize_buffer_for_peer(&buffer, peer_id, cx)),
4778 })
4779 })
4780 }
4781
4782 async fn handle_open_buffer_by_path(
4783 this: ModelHandle<Self>,
4784 envelope: TypedEnvelope<proto::OpenBufferByPath>,
4785 _: Arc<Client>,
4786 mut cx: AsyncAppContext,
4787 ) -> Result<proto::OpenBufferResponse> {
4788 let peer_id = envelope.original_sender_id()?;
4789 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
4790 let open_buffer = this.update(&mut cx, |this, cx| {
4791 this.open_buffer(
4792 ProjectPath {
4793 worktree_id,
4794 path: PathBuf::from(envelope.payload.path).into(),
4795 },
4796 cx,
4797 )
4798 });
4799
4800 let buffer = open_buffer.await?;
4801 this.update(&mut cx, |this, cx| {
4802 Ok(proto::OpenBufferResponse {
4803 buffer: Some(this.serialize_buffer_for_peer(&buffer, peer_id, cx)),
4804 })
4805 })
4806 }
4807
4808 fn serialize_project_transaction_for_peer(
4809 &mut self,
4810 project_transaction: ProjectTransaction,
4811 peer_id: PeerId,
4812 cx: &AppContext,
4813 ) -> proto::ProjectTransaction {
4814 let mut serialized_transaction = proto::ProjectTransaction {
4815 buffers: Default::default(),
4816 transactions: Default::default(),
4817 };
4818 for (buffer, transaction) in project_transaction.0 {
4819 serialized_transaction
4820 .buffers
4821 .push(self.serialize_buffer_for_peer(&buffer, peer_id, cx));
4822 serialized_transaction
4823 .transactions
4824 .push(language::proto::serialize_transaction(&transaction));
4825 }
4826 serialized_transaction
4827 }
4828
4829 fn deserialize_project_transaction(
4830 &mut self,
4831 message: proto::ProjectTransaction,
4832 push_to_history: bool,
4833 cx: &mut ModelContext<Self>,
4834 ) -> Task<Result<ProjectTransaction>> {
4835 cx.spawn(|this, mut cx| async move {
4836 let mut project_transaction = ProjectTransaction::default();
4837 for (buffer, transaction) in message.buffers.into_iter().zip(message.transactions) {
4838 let buffer = this
4839 .update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
4840 .await?;
4841 let transaction = language::proto::deserialize_transaction(transaction)?;
4842 project_transaction.0.insert(buffer, transaction);
4843 }
4844
4845 for (buffer, transaction) in &project_transaction.0 {
4846 buffer
4847 .update(&mut cx, |buffer, _| {
4848 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
4849 })
4850 .await;
4851
4852 if push_to_history {
4853 buffer.update(&mut cx, |buffer, _| {
4854 buffer.push_transaction(transaction.clone(), Instant::now());
4855 });
4856 }
4857 }
4858
4859 Ok(project_transaction)
4860 })
4861 }
4862
4863 fn serialize_buffer_for_peer(
4864 &mut self,
4865 buffer: &ModelHandle<Buffer>,
4866 peer_id: PeerId,
4867 cx: &AppContext,
4868 ) -> proto::Buffer {
4869 let buffer_id = buffer.read(cx).remote_id();
4870 let shared_buffers = self.shared_buffers.entry(peer_id).or_default();
4871 if shared_buffers.insert(buffer_id) {
4872 proto::Buffer {
4873 variant: Some(proto::buffer::Variant::State(buffer.read(cx).to_proto())),
4874 }
4875 } else {
4876 proto::Buffer {
4877 variant: Some(proto::buffer::Variant::Id(buffer_id)),
4878 }
4879 }
4880 }
4881
4882 fn deserialize_buffer(
4883 &mut self,
4884 buffer: proto::Buffer,
4885 cx: &mut ModelContext<Self>,
4886 ) -> Task<Result<ModelHandle<Buffer>>> {
4887 let replica_id = self.replica_id();
4888
4889 let opened_buffer_tx = self.opened_buffer.0.clone();
4890 let mut opened_buffer_rx = self.opened_buffer.1.clone();
4891 cx.spawn(|this, mut cx| async move {
4892 match buffer.variant.ok_or_else(|| anyhow!("missing buffer"))? {
4893 proto::buffer::Variant::Id(id) => {
4894 let buffer = loop {
4895 let buffer = this.read_with(&cx, |this, cx| {
4896 this.opened_buffers
4897 .get(&id)
4898 .and_then(|buffer| buffer.upgrade(cx))
4899 });
4900 if let Some(buffer) = buffer {
4901 break buffer;
4902 }
4903 opened_buffer_rx
4904 .next()
4905 .await
4906 .ok_or_else(|| anyhow!("project dropped while waiting for buffer"))?;
4907 };
4908 Ok(buffer)
4909 }
4910 proto::buffer::Variant::State(mut buffer) => {
4911 let mut buffer_worktree = None;
4912 let mut buffer_file = None;
4913 if let Some(file) = buffer.file.take() {
4914 this.read_with(&cx, |this, cx| {
4915 let worktree_id = WorktreeId::from_proto(file.worktree_id);
4916 let worktree =
4917 this.worktree_for_id(worktree_id, cx).ok_or_else(|| {
4918 anyhow!("no worktree found for id {}", file.worktree_id)
4919 })?;
4920 buffer_file =
4921 Some(Box::new(File::from_proto(file, worktree.clone(), cx)?)
4922 as Box<dyn language::File>);
4923 buffer_worktree = Some(worktree);
4924 Ok::<_, anyhow::Error>(())
4925 })?;
4926 }
4927
4928 let buffer = cx.add_model(|cx| {
4929 Buffer::from_proto(replica_id, buffer, buffer_file, cx).unwrap()
4930 });
4931
4932 this.update(&mut cx, |this, cx| this.register_buffer(&buffer, cx))?;
4933
4934 *opened_buffer_tx.borrow_mut().borrow_mut() = ();
4935 Ok(buffer)
4936 }
4937 }
4938 })
4939 }
4940
4941 fn deserialize_symbol(&self, serialized_symbol: proto::Symbol) -> Result<Symbol> {
4942 let source_worktree_id = WorktreeId::from_proto(serialized_symbol.source_worktree_id);
4943 let worktree_id = WorktreeId::from_proto(serialized_symbol.worktree_id);
4944 let start = serialized_symbol
4945 .start
4946 .ok_or_else(|| anyhow!("invalid start"))?;
4947 let end = serialized_symbol
4948 .end
4949 .ok_or_else(|| anyhow!("invalid end"))?;
4950 let kind = unsafe { mem::transmute(serialized_symbol.kind) };
4951 let path = PathBuf::from(serialized_symbol.path);
4952 let language = self.languages.select_language(&path);
4953 Ok(Symbol {
4954 source_worktree_id,
4955 worktree_id,
4956 language_server_name: LanguageServerName(serialized_symbol.language_server_name.into()),
4957 label: language
4958 .and_then(|language| language.label_for_symbol(&serialized_symbol.name, kind))
4959 .unwrap_or_else(|| CodeLabel::plain(serialized_symbol.name.clone(), None)),
4960 name: serialized_symbol.name,
4961 path,
4962 range: PointUtf16::new(start.row, start.column)..PointUtf16::new(end.row, end.column),
4963 kind,
4964 signature: serialized_symbol
4965 .signature
4966 .try_into()
4967 .map_err(|_| anyhow!("invalid signature"))?,
4968 })
4969 }
4970
4971 async fn handle_buffer_saved(
4972 this: ModelHandle<Self>,
4973 envelope: TypedEnvelope<proto::BufferSaved>,
4974 _: Arc<Client>,
4975 mut cx: AsyncAppContext,
4976 ) -> Result<()> {
4977 let version = deserialize_version(envelope.payload.version);
4978 let mtime = envelope
4979 .payload
4980 .mtime
4981 .ok_or_else(|| anyhow!("missing mtime"))?
4982 .into();
4983
4984 this.update(&mut cx, |this, cx| {
4985 let buffer = this
4986 .opened_buffers
4987 .get(&envelope.payload.buffer_id)
4988 .and_then(|buffer| buffer.upgrade(cx));
4989 if let Some(buffer) = buffer {
4990 buffer.update(cx, |buffer, cx| {
4991 buffer.did_save(version, mtime, None, cx);
4992 });
4993 }
4994 Ok(())
4995 })
4996 }
4997
4998 async fn handle_buffer_reloaded(
4999 this: ModelHandle<Self>,
5000 envelope: TypedEnvelope<proto::BufferReloaded>,
5001 _: Arc<Client>,
5002 mut cx: AsyncAppContext,
5003 ) -> Result<()> {
5004 let payload = envelope.payload.clone();
5005 let version = deserialize_version(payload.version);
5006 let mtime = payload
5007 .mtime
5008 .ok_or_else(|| anyhow!("missing mtime"))?
5009 .into();
5010 this.update(&mut cx, |this, cx| {
5011 let buffer = this
5012 .opened_buffers
5013 .get(&payload.buffer_id)
5014 .and_then(|buffer| buffer.upgrade(cx));
5015 if let Some(buffer) = buffer {
5016 buffer.update(cx, |buffer, cx| {
5017 buffer.did_reload(version, mtime, cx);
5018 });
5019 }
5020 Ok(())
5021 })
5022 }
5023
5024 pub fn match_paths<'a>(
5025 &self,
5026 query: &'a str,
5027 include_ignored: bool,
5028 smart_case: bool,
5029 max_results: usize,
5030 cancel_flag: &'a AtomicBool,
5031 cx: &AppContext,
5032 ) -> impl 'a + Future<Output = Vec<PathMatch>> {
5033 let worktrees = self
5034 .worktrees(cx)
5035 .filter(|worktree| worktree.read(cx).is_visible())
5036 .collect::<Vec<_>>();
5037 let include_root_name = worktrees.len() > 1;
5038 let candidate_sets = worktrees
5039 .into_iter()
5040 .map(|worktree| CandidateSet {
5041 snapshot: worktree.read(cx).snapshot(),
5042 include_ignored,
5043 include_root_name,
5044 })
5045 .collect::<Vec<_>>();
5046
5047 let background = cx.background().clone();
5048 async move {
5049 fuzzy::match_paths(
5050 candidate_sets.as_slice(),
5051 query,
5052 smart_case,
5053 max_results,
5054 cancel_flag,
5055 background,
5056 )
5057 .await
5058 }
5059 }
5060
5061 fn edits_from_lsp(
5062 &mut self,
5063 buffer: &ModelHandle<Buffer>,
5064 lsp_edits: impl 'static + Send + IntoIterator<Item = lsp::TextEdit>,
5065 version: Option<i32>,
5066 cx: &mut ModelContext<Self>,
5067 ) -> Task<Result<Vec<(Range<Anchor>, String)>>> {
5068 let snapshot = self.buffer_snapshot_for_lsp_version(buffer, version, cx);
5069 cx.background().spawn(async move {
5070 let snapshot = snapshot?;
5071 let mut lsp_edits = lsp_edits
5072 .into_iter()
5073 .map(|edit| (range_from_lsp(edit.range), edit.new_text))
5074 .peekable();
5075
5076 let mut edits = Vec::new();
5077 while let Some((mut range, mut new_text)) = lsp_edits.next() {
5078 // Combine any LSP edits that are adjacent.
5079 //
5080 // Also, combine LSP edits that are separated from each other by only
5081 // a newline. This is important because for some code actions,
5082 // Rust-analyzer rewrites the entire buffer via a series of edits that
5083 // are separated by unchanged newline characters.
5084 //
5085 // In order for the diffing logic below to work properly, any edits that
5086 // cancel each other out must be combined into one.
5087 while let Some((next_range, next_text)) = lsp_edits.peek() {
5088 if next_range.start > range.end {
5089 if next_range.start.row > range.end.row + 1
5090 || next_range.start.column > 0
5091 || snapshot.clip_point_utf16(
5092 PointUtf16::new(range.end.row, u32::MAX),
5093 Bias::Left,
5094 ) > range.end
5095 {
5096 break;
5097 }
5098 new_text.push('\n');
5099 }
5100 range.end = next_range.end;
5101 new_text.push_str(&next_text);
5102 lsp_edits.next();
5103 }
5104
5105 if snapshot.clip_point_utf16(range.start, Bias::Left) != range.start
5106 || snapshot.clip_point_utf16(range.end, Bias::Left) != range.end
5107 {
5108 return Err(anyhow!("invalid edits received from language server"));
5109 }
5110
5111 // For multiline edits, perform a diff of the old and new text so that
5112 // we can identify the changes more precisely, preserving the locations
5113 // of any anchors positioned in the unchanged regions.
5114 if range.end.row > range.start.row {
5115 let mut offset = range.start.to_offset(&snapshot);
5116 let old_text = snapshot.text_for_range(range).collect::<String>();
5117
5118 let diff = TextDiff::from_lines(old_text.as_str(), &new_text);
5119 let mut moved_since_edit = true;
5120 for change in diff.iter_all_changes() {
5121 let tag = change.tag();
5122 let value = change.value();
5123 match tag {
5124 ChangeTag::Equal => {
5125 offset += value.len();
5126 moved_since_edit = true;
5127 }
5128 ChangeTag::Delete => {
5129 let start = snapshot.anchor_after(offset);
5130 let end = snapshot.anchor_before(offset + value.len());
5131 if moved_since_edit {
5132 edits.push((start..end, String::new()));
5133 } else {
5134 edits.last_mut().unwrap().0.end = end;
5135 }
5136 offset += value.len();
5137 moved_since_edit = false;
5138 }
5139 ChangeTag::Insert => {
5140 if moved_since_edit {
5141 let anchor = snapshot.anchor_after(offset);
5142 edits.push((anchor.clone()..anchor, value.to_string()));
5143 } else {
5144 edits.last_mut().unwrap().1.push_str(value);
5145 }
5146 moved_since_edit = false;
5147 }
5148 }
5149 }
5150 } else if range.end == range.start {
5151 let anchor = snapshot.anchor_after(range.start);
5152 edits.push((anchor.clone()..anchor, new_text));
5153 } else {
5154 let edit_start = snapshot.anchor_after(range.start);
5155 let edit_end = snapshot.anchor_before(range.end);
5156 edits.push((edit_start..edit_end, new_text));
5157 }
5158 }
5159
5160 Ok(edits)
5161 })
5162 }
5163
5164 fn buffer_snapshot_for_lsp_version(
5165 &mut self,
5166 buffer: &ModelHandle<Buffer>,
5167 version: Option<i32>,
5168 cx: &AppContext,
5169 ) -> Result<TextBufferSnapshot> {
5170 const OLD_VERSIONS_TO_RETAIN: i32 = 10;
5171
5172 if let Some(version) = version {
5173 let buffer_id = buffer.read(cx).remote_id();
5174 let snapshots = self
5175 .buffer_snapshots
5176 .get_mut(&buffer_id)
5177 .ok_or_else(|| anyhow!("no snapshot found for buffer {}", buffer_id))?;
5178 let mut found_snapshot = None;
5179 snapshots.retain(|(snapshot_version, snapshot)| {
5180 if snapshot_version + OLD_VERSIONS_TO_RETAIN < version {
5181 false
5182 } else {
5183 if *snapshot_version == version {
5184 found_snapshot = Some(snapshot.clone());
5185 }
5186 true
5187 }
5188 });
5189
5190 found_snapshot.ok_or_else(|| {
5191 anyhow!(
5192 "snapshot not found for buffer {} at version {}",
5193 buffer_id,
5194 version
5195 )
5196 })
5197 } else {
5198 Ok((buffer.read(cx)).text_snapshot())
5199 }
5200 }
5201
5202 fn language_server_for_buffer(
5203 &self,
5204 buffer: &Buffer,
5205 cx: &AppContext,
5206 ) -> Option<&(Arc<dyn LspAdapter>, Arc<LanguageServer>)> {
5207 if let Some((file, language)) = File::from_dyn(buffer.file()).zip(buffer.language()) {
5208 let worktree_id = file.worktree_id(cx);
5209 self.language_servers
5210 .get(&(worktree_id, language.lsp_adapter()?.name()))
5211 } else {
5212 None
5213 }
5214 }
5215}
5216
5217impl ProjectStore {
5218 pub fn projects<'a>(
5219 &'a self,
5220 cx: &'a AppContext,
5221 ) -> impl 'a + Iterator<Item = ModelHandle<Project>> {
5222 self.projects
5223 .iter()
5224 .filter_map(|project| project.upgrade(cx))
5225 }
5226
5227 fn add_project(&mut self, project: WeakModelHandle<Project>, cx: &mut ModelContext<Self>) {
5228 if let Err(ix) = self
5229 .projects
5230 .binary_search_by_key(&project.id(), WeakModelHandle::id)
5231 {
5232 self.projects.insert(ix, project);
5233 }
5234 cx.notify();
5235 }
5236
5237 fn prune_projects(&mut self, cx: &mut ModelContext<Self>) {
5238 let mut did_change = false;
5239 self.projects.retain(|project| {
5240 if project.is_upgradable(cx) {
5241 true
5242 } else {
5243 did_change = true;
5244 false
5245 }
5246 });
5247 if did_change {
5248 cx.notify();
5249 }
5250 }
5251}
5252
5253impl WorktreeHandle {
5254 pub fn upgrade(&self, cx: &AppContext) -> Option<ModelHandle<Worktree>> {
5255 match self {
5256 WorktreeHandle::Strong(handle) => Some(handle.clone()),
5257 WorktreeHandle::Weak(handle) => handle.upgrade(cx),
5258 }
5259 }
5260}
5261
5262impl OpenBuffer {
5263 pub fn upgrade(&self, cx: &impl UpgradeModelHandle) -> Option<ModelHandle<Buffer>> {
5264 match self {
5265 OpenBuffer::Strong(handle) => Some(handle.clone()),
5266 OpenBuffer::Weak(handle) => handle.upgrade(cx),
5267 OpenBuffer::Loading(_) => None,
5268 }
5269 }
5270}
5271
5272struct CandidateSet {
5273 snapshot: Snapshot,
5274 include_ignored: bool,
5275 include_root_name: bool,
5276}
5277
5278impl<'a> PathMatchCandidateSet<'a> for CandidateSet {
5279 type Candidates = CandidateSetIter<'a>;
5280
5281 fn id(&self) -> usize {
5282 self.snapshot.id().to_usize()
5283 }
5284
5285 fn len(&self) -> usize {
5286 if self.include_ignored {
5287 self.snapshot.file_count()
5288 } else {
5289 self.snapshot.visible_file_count()
5290 }
5291 }
5292
5293 fn prefix(&self) -> Arc<str> {
5294 if self.snapshot.root_entry().map_or(false, |e| e.is_file()) {
5295 self.snapshot.root_name().into()
5296 } else if self.include_root_name {
5297 format!("{}/", self.snapshot.root_name()).into()
5298 } else {
5299 "".into()
5300 }
5301 }
5302
5303 fn candidates(&'a self, start: usize) -> Self::Candidates {
5304 CandidateSetIter {
5305 traversal: self.snapshot.files(self.include_ignored, start),
5306 }
5307 }
5308}
5309
5310struct CandidateSetIter<'a> {
5311 traversal: Traversal<'a>,
5312}
5313
5314impl<'a> Iterator for CandidateSetIter<'a> {
5315 type Item = PathMatchCandidate<'a>;
5316
5317 fn next(&mut self) -> Option<Self::Item> {
5318 self.traversal.next().map(|entry| {
5319 if let EntryKind::File(char_bag) = entry.kind {
5320 PathMatchCandidate {
5321 path: &entry.path,
5322 char_bag,
5323 }
5324 } else {
5325 unreachable!()
5326 }
5327 })
5328 }
5329}
5330
5331impl Entity for ProjectStore {
5332 type Event = ();
5333}
5334
5335impl Entity for Project {
5336 type Event = Event;
5337
5338 fn release(&mut self, cx: &mut gpui::MutableAppContext) {
5339 self.project_store.update(cx, ProjectStore::prune_projects);
5340
5341 match &self.client_state {
5342 ProjectClientState::Local { remote_id_rx, .. } => {
5343 if let Some(project_id) = *remote_id_rx.borrow() {
5344 self.client
5345 .send(proto::UnregisterProject { project_id })
5346 .log_err();
5347 }
5348 }
5349 ProjectClientState::Remote { remote_id, .. } => {
5350 self.client
5351 .send(proto::LeaveProject {
5352 project_id: *remote_id,
5353 })
5354 .log_err();
5355 }
5356 }
5357 }
5358
5359 fn app_will_quit(
5360 &mut self,
5361 _: &mut MutableAppContext,
5362 ) -> Option<std::pin::Pin<Box<dyn 'static + Future<Output = ()>>>> {
5363 let shutdown_futures = self
5364 .language_servers
5365 .drain()
5366 .filter_map(|(_, (_, server))| server.shutdown())
5367 .collect::<Vec<_>>();
5368 Some(
5369 async move {
5370 futures::future::join_all(shutdown_futures).await;
5371 }
5372 .boxed(),
5373 )
5374 }
5375}
5376
5377impl Collaborator {
5378 fn from_proto(
5379 message: proto::Collaborator,
5380 user_store: &ModelHandle<UserStore>,
5381 cx: &mut AsyncAppContext,
5382 ) -> impl Future<Output = Result<Self>> {
5383 let user = user_store.update(cx, |user_store, cx| {
5384 user_store.fetch_user(message.user_id, cx)
5385 });
5386
5387 async move {
5388 Ok(Self {
5389 peer_id: PeerId(message.peer_id),
5390 user: user.await?,
5391 replica_id: message.replica_id as ReplicaId,
5392 })
5393 }
5394 }
5395}
5396
5397impl<P: AsRef<Path>> From<(WorktreeId, P)> for ProjectPath {
5398 fn from((worktree_id, path): (WorktreeId, P)) -> Self {
5399 Self {
5400 worktree_id,
5401 path: path.as_ref().into(),
5402 }
5403 }
5404}
5405
5406impl From<lsp::CreateFileOptions> for fs::CreateOptions {
5407 fn from(options: lsp::CreateFileOptions) -> Self {
5408 Self {
5409 overwrite: options.overwrite.unwrap_or(false),
5410 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
5411 }
5412 }
5413}
5414
5415impl From<lsp::RenameFileOptions> for fs::RenameOptions {
5416 fn from(options: lsp::RenameFileOptions) -> Self {
5417 Self {
5418 overwrite: options.overwrite.unwrap_or(false),
5419 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
5420 }
5421 }
5422}
5423
5424impl From<lsp::DeleteFileOptions> for fs::RemoveOptions {
5425 fn from(options: lsp::DeleteFileOptions) -> Self {
5426 Self {
5427 recursive: options.recursive.unwrap_or(false),
5428 ignore_if_not_exists: options.ignore_if_not_exists.unwrap_or(false),
5429 }
5430 }
5431}
5432
5433fn serialize_symbol(symbol: &Symbol) -> proto::Symbol {
5434 proto::Symbol {
5435 source_worktree_id: symbol.source_worktree_id.to_proto(),
5436 worktree_id: symbol.worktree_id.to_proto(),
5437 language_server_name: symbol.language_server_name.0.to_string(),
5438 name: symbol.name.clone(),
5439 kind: unsafe { mem::transmute(symbol.kind) },
5440 path: symbol.path.to_string_lossy().to_string(),
5441 start: Some(proto::Point {
5442 row: symbol.range.start.row,
5443 column: symbol.range.start.column,
5444 }),
5445 end: Some(proto::Point {
5446 row: symbol.range.end.row,
5447 column: symbol.range.end.column,
5448 }),
5449 signature: symbol.signature.to_vec(),
5450 }
5451}
5452
5453fn relativize_path(base: &Path, path: &Path) -> PathBuf {
5454 let mut path_components = path.components();
5455 let mut base_components = base.components();
5456 let mut components: Vec<Component> = Vec::new();
5457 loop {
5458 match (path_components.next(), base_components.next()) {
5459 (None, None) => break,
5460 (Some(a), None) => {
5461 components.push(a);
5462 components.extend(path_components.by_ref());
5463 break;
5464 }
5465 (None, _) => components.push(Component::ParentDir),
5466 (Some(a), Some(b)) if components.is_empty() && a == b => (),
5467 (Some(a), Some(b)) if b == Component::CurDir => components.push(a),
5468 (Some(a), Some(_)) => {
5469 components.push(Component::ParentDir);
5470 for _ in base_components {
5471 components.push(Component::ParentDir);
5472 }
5473 components.push(a);
5474 components.extend(path_components.by_ref());
5475 break;
5476 }
5477 }
5478 }
5479 components.iter().map(|c| c.as_os_str()).collect()
5480}
5481
5482impl Item for Buffer {
5483 fn entry_id(&self, cx: &AppContext) -> Option<ProjectEntryId> {
5484 File::from_dyn(self.file()).and_then(|file| file.project_entry_id(cx))
5485 }
5486}
5487
5488#[cfg(test)]
5489mod tests {
5490 use crate::worktree::WorktreeHandle;
5491
5492 use super::{Event, *};
5493 use fs::RealFs;
5494 use futures::{future, StreamExt};
5495 use gpui::test::subscribe;
5496 use language::{
5497 tree_sitter_rust, tree_sitter_typescript, Diagnostic, FakeLspAdapter, LanguageConfig,
5498 OffsetRangeExt, Point, ToPoint,
5499 };
5500 use lsp::Url;
5501 use serde_json::json;
5502 use std::{cell::RefCell, os::unix, path::PathBuf, rc::Rc, task::Poll};
5503 use unindent::Unindent as _;
5504 use util::{assert_set_eq, test::temp_tree};
5505
5506 #[gpui::test]
5507 async fn test_populate_and_search(cx: &mut gpui::TestAppContext) {
5508 let dir = temp_tree(json!({
5509 "root": {
5510 "apple": "",
5511 "banana": {
5512 "carrot": {
5513 "date": "",
5514 "endive": "",
5515 }
5516 },
5517 "fennel": {
5518 "grape": "",
5519 }
5520 }
5521 }));
5522
5523 let root_link_path = dir.path().join("root_link");
5524 unix::fs::symlink(&dir.path().join("root"), &root_link_path).unwrap();
5525 unix::fs::symlink(
5526 &dir.path().join("root/fennel"),
5527 &dir.path().join("root/finnochio"),
5528 )
5529 .unwrap();
5530
5531 let project = Project::test(Arc::new(RealFs), [root_link_path.as_ref()], cx).await;
5532
5533 project.read_with(cx, |project, cx| {
5534 let tree = project.worktrees(cx).next().unwrap().read(cx);
5535 assert_eq!(tree.file_count(), 5);
5536 assert_eq!(
5537 tree.inode_for_path("fennel/grape"),
5538 tree.inode_for_path("finnochio/grape")
5539 );
5540 });
5541
5542 let cancel_flag = Default::default();
5543 let results = project
5544 .read_with(cx, |project, cx| {
5545 project.match_paths("bna", false, false, 10, &cancel_flag, cx)
5546 })
5547 .await;
5548 assert_eq!(
5549 results
5550 .into_iter()
5551 .map(|result| result.path)
5552 .collect::<Vec<Arc<Path>>>(),
5553 vec![
5554 PathBuf::from("banana/carrot/date").into(),
5555 PathBuf::from("banana/carrot/endive").into(),
5556 ]
5557 );
5558 }
5559
5560 #[gpui::test]
5561 async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
5562 cx.foreground().forbid_parking();
5563
5564 let mut rust_language = Language::new(
5565 LanguageConfig {
5566 name: "Rust".into(),
5567 path_suffixes: vec!["rs".to_string()],
5568 ..Default::default()
5569 },
5570 Some(tree_sitter_rust::language()),
5571 );
5572 let mut json_language = Language::new(
5573 LanguageConfig {
5574 name: "JSON".into(),
5575 path_suffixes: vec!["json".to_string()],
5576 ..Default::default()
5577 },
5578 None,
5579 );
5580 let mut fake_rust_servers = rust_language.set_fake_lsp_adapter(FakeLspAdapter {
5581 name: "the-rust-language-server",
5582 capabilities: lsp::ServerCapabilities {
5583 completion_provider: Some(lsp::CompletionOptions {
5584 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
5585 ..Default::default()
5586 }),
5587 ..Default::default()
5588 },
5589 ..Default::default()
5590 });
5591 let mut fake_json_servers = json_language.set_fake_lsp_adapter(FakeLspAdapter {
5592 name: "the-json-language-server",
5593 capabilities: lsp::ServerCapabilities {
5594 completion_provider: Some(lsp::CompletionOptions {
5595 trigger_characters: Some(vec![":".to_string()]),
5596 ..Default::default()
5597 }),
5598 ..Default::default()
5599 },
5600 ..Default::default()
5601 });
5602
5603 let fs = FakeFs::new(cx.background());
5604 fs.insert_tree(
5605 "/the-root",
5606 json!({
5607 "test.rs": "const A: i32 = 1;",
5608 "test2.rs": "",
5609 "Cargo.toml": "a = 1",
5610 "package.json": "{\"a\": 1}",
5611 }),
5612 )
5613 .await;
5614
5615 let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
5616 project.update(cx, |project, _| {
5617 project.languages.add(Arc::new(rust_language));
5618 project.languages.add(Arc::new(json_language));
5619 });
5620
5621 // Open a buffer without an associated language server.
5622 let toml_buffer = project
5623 .update(cx, |project, cx| {
5624 project.open_local_buffer("/the-root/Cargo.toml", cx)
5625 })
5626 .await
5627 .unwrap();
5628
5629 // Open a buffer with an associated language server.
5630 let rust_buffer = project
5631 .update(cx, |project, cx| {
5632 project.open_local_buffer("/the-root/test.rs", cx)
5633 })
5634 .await
5635 .unwrap();
5636
5637 // A server is started up, and it is notified about Rust files.
5638 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
5639 assert_eq!(
5640 fake_rust_server
5641 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5642 .await
5643 .text_document,
5644 lsp::TextDocumentItem {
5645 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
5646 version: 0,
5647 text: "const A: i32 = 1;".to_string(),
5648 language_id: Default::default()
5649 }
5650 );
5651
5652 // The buffer is configured based on the language server's capabilities.
5653 rust_buffer.read_with(cx, |buffer, _| {
5654 assert_eq!(
5655 buffer.completion_triggers(),
5656 &[".".to_string(), "::".to_string()]
5657 );
5658 });
5659 toml_buffer.read_with(cx, |buffer, _| {
5660 assert!(buffer.completion_triggers().is_empty());
5661 });
5662
5663 // Edit a buffer. The changes are reported to the language server.
5664 rust_buffer.update(cx, |buffer, cx| buffer.edit([(16..16, "2")], cx));
5665 assert_eq!(
5666 fake_rust_server
5667 .receive_notification::<lsp::notification::DidChangeTextDocument>()
5668 .await
5669 .text_document,
5670 lsp::VersionedTextDocumentIdentifier::new(
5671 lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
5672 1
5673 )
5674 );
5675
5676 // Open a third buffer with a different associated language server.
5677 let json_buffer = project
5678 .update(cx, |project, cx| {
5679 project.open_local_buffer("/the-root/package.json", cx)
5680 })
5681 .await
5682 .unwrap();
5683
5684 // A json language server is started up and is only notified about the json buffer.
5685 let mut fake_json_server = fake_json_servers.next().await.unwrap();
5686 assert_eq!(
5687 fake_json_server
5688 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5689 .await
5690 .text_document,
5691 lsp::TextDocumentItem {
5692 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
5693 version: 0,
5694 text: "{\"a\": 1}".to_string(),
5695 language_id: Default::default()
5696 }
5697 );
5698
5699 // This buffer is configured based on the second language server's
5700 // capabilities.
5701 json_buffer.read_with(cx, |buffer, _| {
5702 assert_eq!(buffer.completion_triggers(), &[":".to_string()]);
5703 });
5704
5705 // When opening another buffer whose language server is already running,
5706 // it is also configured based on the existing language server's capabilities.
5707 let rust_buffer2 = project
5708 .update(cx, |project, cx| {
5709 project.open_local_buffer("/the-root/test2.rs", cx)
5710 })
5711 .await
5712 .unwrap();
5713 rust_buffer2.read_with(cx, |buffer, _| {
5714 assert_eq!(
5715 buffer.completion_triggers(),
5716 &[".".to_string(), "::".to_string()]
5717 );
5718 });
5719
5720 // Changes are reported only to servers matching the buffer's language.
5721 toml_buffer.update(cx, |buffer, cx| buffer.edit([(5..5, "23")], cx));
5722 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "let x = 1;")], cx));
5723 assert_eq!(
5724 fake_rust_server
5725 .receive_notification::<lsp::notification::DidChangeTextDocument>()
5726 .await
5727 .text_document,
5728 lsp::VersionedTextDocumentIdentifier::new(
5729 lsp::Url::from_file_path("/the-root/test2.rs").unwrap(),
5730 1
5731 )
5732 );
5733
5734 // Save notifications are reported to all servers.
5735 toml_buffer
5736 .update(cx, |buffer, cx| buffer.save(cx))
5737 .await
5738 .unwrap();
5739 assert_eq!(
5740 fake_rust_server
5741 .receive_notification::<lsp::notification::DidSaveTextDocument>()
5742 .await
5743 .text_document,
5744 lsp::TextDocumentIdentifier::new(
5745 lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap()
5746 )
5747 );
5748 assert_eq!(
5749 fake_json_server
5750 .receive_notification::<lsp::notification::DidSaveTextDocument>()
5751 .await
5752 .text_document,
5753 lsp::TextDocumentIdentifier::new(
5754 lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap()
5755 )
5756 );
5757
5758 // Renames are reported only to servers matching the buffer's language.
5759 fs.rename(
5760 Path::new("/the-root/test2.rs"),
5761 Path::new("/the-root/test3.rs"),
5762 Default::default(),
5763 )
5764 .await
5765 .unwrap();
5766 assert_eq!(
5767 fake_rust_server
5768 .receive_notification::<lsp::notification::DidCloseTextDocument>()
5769 .await
5770 .text_document,
5771 lsp::TextDocumentIdentifier::new(
5772 lsp::Url::from_file_path("/the-root/test2.rs").unwrap()
5773 ),
5774 );
5775 assert_eq!(
5776 fake_rust_server
5777 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5778 .await
5779 .text_document,
5780 lsp::TextDocumentItem {
5781 uri: lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),
5782 version: 0,
5783 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
5784 language_id: Default::default()
5785 },
5786 );
5787
5788 rust_buffer2.update(cx, |buffer, cx| {
5789 buffer.update_diagnostics(
5790 DiagnosticSet::from_sorted_entries(
5791 vec![DiagnosticEntry {
5792 diagnostic: Default::default(),
5793 range: Anchor::MIN..Anchor::MAX,
5794 }],
5795 &buffer.snapshot(),
5796 ),
5797 cx,
5798 );
5799 assert_eq!(
5800 buffer
5801 .snapshot()
5802 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
5803 .count(),
5804 1
5805 );
5806 });
5807
5808 // When the rename changes the extension of the file, the buffer gets closed on the old
5809 // language server and gets opened on the new one.
5810 fs.rename(
5811 Path::new("/the-root/test3.rs"),
5812 Path::new("/the-root/test3.json"),
5813 Default::default(),
5814 )
5815 .await
5816 .unwrap();
5817 assert_eq!(
5818 fake_rust_server
5819 .receive_notification::<lsp::notification::DidCloseTextDocument>()
5820 .await
5821 .text_document,
5822 lsp::TextDocumentIdentifier::new(
5823 lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),
5824 ),
5825 );
5826 assert_eq!(
5827 fake_json_server
5828 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5829 .await
5830 .text_document,
5831 lsp::TextDocumentItem {
5832 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
5833 version: 0,
5834 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
5835 language_id: Default::default()
5836 },
5837 );
5838
5839 // We clear the diagnostics, since the language has changed.
5840 rust_buffer2.read_with(cx, |buffer, _| {
5841 assert_eq!(
5842 buffer
5843 .snapshot()
5844 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
5845 .count(),
5846 0
5847 );
5848 });
5849
5850 // The renamed file's version resets after changing language server.
5851 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "// ")], cx));
5852 assert_eq!(
5853 fake_json_server
5854 .receive_notification::<lsp::notification::DidChangeTextDocument>()
5855 .await
5856 .text_document,
5857 lsp::VersionedTextDocumentIdentifier::new(
5858 lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
5859 1
5860 )
5861 );
5862
5863 // Restart language servers
5864 project.update(cx, |project, cx| {
5865 project.restart_language_servers_for_buffers(
5866 vec![rust_buffer.clone(), json_buffer.clone()],
5867 cx,
5868 );
5869 });
5870
5871 let mut rust_shutdown_requests = fake_rust_server
5872 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
5873 let mut json_shutdown_requests = fake_json_server
5874 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
5875 futures::join!(rust_shutdown_requests.next(), json_shutdown_requests.next());
5876
5877 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
5878 let mut fake_json_server = fake_json_servers.next().await.unwrap();
5879
5880 // Ensure rust document is reopened in new rust language server
5881 assert_eq!(
5882 fake_rust_server
5883 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5884 .await
5885 .text_document,
5886 lsp::TextDocumentItem {
5887 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
5888 version: 1,
5889 text: rust_buffer.read_with(cx, |buffer, _| buffer.text()),
5890 language_id: Default::default()
5891 }
5892 );
5893
5894 // Ensure json documents are reopened in new json language server
5895 assert_set_eq!(
5896 [
5897 fake_json_server
5898 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5899 .await
5900 .text_document,
5901 fake_json_server
5902 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5903 .await
5904 .text_document,
5905 ],
5906 [
5907 lsp::TextDocumentItem {
5908 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
5909 version: 0,
5910 text: json_buffer.read_with(cx, |buffer, _| buffer.text()),
5911 language_id: Default::default()
5912 },
5913 lsp::TextDocumentItem {
5914 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
5915 version: 1,
5916 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
5917 language_id: Default::default()
5918 }
5919 ]
5920 );
5921
5922 // Close notifications are reported only to servers matching the buffer's language.
5923 cx.update(|_| drop(json_buffer));
5924 let close_message = lsp::DidCloseTextDocumentParams {
5925 text_document: lsp::TextDocumentIdentifier::new(
5926 lsp::Url::from_file_path("/the-root/package.json").unwrap(),
5927 ),
5928 };
5929 assert_eq!(
5930 fake_json_server
5931 .receive_notification::<lsp::notification::DidCloseTextDocument>()
5932 .await,
5933 close_message,
5934 );
5935 }
5936
5937 #[gpui::test]
5938 async fn test_single_file_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
5939 cx.foreground().forbid_parking();
5940
5941 let fs = FakeFs::new(cx.background());
5942 fs.insert_tree(
5943 "/dir",
5944 json!({
5945 "a.rs": "let a = 1;",
5946 "b.rs": "let b = 2;"
5947 }),
5948 )
5949 .await;
5950
5951 let project = Project::test(fs, ["/dir/a.rs".as_ref(), "/dir/b.rs".as_ref()], cx).await;
5952
5953 let buffer_a = project
5954 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
5955 .await
5956 .unwrap();
5957 let buffer_b = project
5958 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
5959 .await
5960 .unwrap();
5961
5962 project.update(cx, |project, cx| {
5963 project
5964 .update_diagnostics(
5965 lsp::PublishDiagnosticsParams {
5966 uri: Url::from_file_path("/dir/a.rs").unwrap(),
5967 version: None,
5968 diagnostics: vec![lsp::Diagnostic {
5969 range: lsp::Range::new(
5970 lsp::Position::new(0, 4),
5971 lsp::Position::new(0, 5),
5972 ),
5973 severity: Some(lsp::DiagnosticSeverity::ERROR),
5974 message: "error 1".to_string(),
5975 ..Default::default()
5976 }],
5977 },
5978 &[],
5979 cx,
5980 )
5981 .unwrap();
5982 project
5983 .update_diagnostics(
5984 lsp::PublishDiagnosticsParams {
5985 uri: Url::from_file_path("/dir/b.rs").unwrap(),
5986 version: None,
5987 diagnostics: vec![lsp::Diagnostic {
5988 range: lsp::Range::new(
5989 lsp::Position::new(0, 4),
5990 lsp::Position::new(0, 5),
5991 ),
5992 severity: Some(lsp::DiagnosticSeverity::WARNING),
5993 message: "error 2".to_string(),
5994 ..Default::default()
5995 }],
5996 },
5997 &[],
5998 cx,
5999 )
6000 .unwrap();
6001 });
6002
6003 buffer_a.read_with(cx, |buffer, _| {
6004 let chunks = chunks_with_diagnostics(&buffer, 0..buffer.len());
6005 assert_eq!(
6006 chunks
6007 .iter()
6008 .map(|(s, d)| (s.as_str(), *d))
6009 .collect::<Vec<_>>(),
6010 &[
6011 ("let ", None),
6012 ("a", Some(DiagnosticSeverity::ERROR)),
6013 (" = 1;", None),
6014 ]
6015 );
6016 });
6017 buffer_b.read_with(cx, |buffer, _| {
6018 let chunks = chunks_with_diagnostics(&buffer, 0..buffer.len());
6019 assert_eq!(
6020 chunks
6021 .iter()
6022 .map(|(s, d)| (s.as_str(), *d))
6023 .collect::<Vec<_>>(),
6024 &[
6025 ("let ", None),
6026 ("b", Some(DiagnosticSeverity::WARNING)),
6027 (" = 2;", None),
6028 ]
6029 );
6030 });
6031 }
6032
6033 #[gpui::test]
6034 async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
6035 cx.foreground().forbid_parking();
6036
6037 let progress_token = "the-progress-token";
6038 let mut language = Language::new(
6039 LanguageConfig {
6040 name: "Rust".into(),
6041 path_suffixes: vec!["rs".to_string()],
6042 ..Default::default()
6043 },
6044 Some(tree_sitter_rust::language()),
6045 );
6046 let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
6047 disk_based_diagnostics_progress_token: Some(progress_token),
6048 disk_based_diagnostics_sources: &["disk"],
6049 ..Default::default()
6050 });
6051
6052 let fs = FakeFs::new(cx.background());
6053 fs.insert_tree(
6054 "/dir",
6055 json!({
6056 "a.rs": "fn a() { A }",
6057 "b.rs": "const y: i32 = 1",
6058 }),
6059 )
6060 .await;
6061
6062 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
6063 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
6064 let worktree_id =
6065 project.read_with(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
6066
6067 // Cause worktree to start the fake language server
6068 let _buffer = project
6069 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
6070 .await
6071 .unwrap();
6072
6073 let mut events = subscribe(&project, cx);
6074
6075 let mut fake_server = fake_servers.next().await.unwrap();
6076 fake_server.start_progress(progress_token).await;
6077 assert_eq!(
6078 events.next().await.unwrap(),
6079 Event::DiskBasedDiagnosticsStarted
6080 );
6081
6082 fake_server.start_progress(progress_token).await;
6083 fake_server.end_progress(progress_token).await;
6084 fake_server.start_progress(progress_token).await;
6085
6086 fake_server.notify::<lsp::notification::PublishDiagnostics>(
6087 lsp::PublishDiagnosticsParams {
6088 uri: Url::from_file_path("/dir/a.rs").unwrap(),
6089 version: None,
6090 diagnostics: vec![lsp::Diagnostic {
6091 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
6092 severity: Some(lsp::DiagnosticSeverity::ERROR),
6093 message: "undefined variable 'A'".to_string(),
6094 ..Default::default()
6095 }],
6096 },
6097 );
6098 assert_eq!(
6099 events.next().await.unwrap(),
6100 Event::DiagnosticsUpdated((worktree_id, Path::new("a.rs")).into())
6101 );
6102
6103 fake_server.end_progress(progress_token).await;
6104 fake_server.end_progress(progress_token).await;
6105 assert_eq!(
6106 events.next().await.unwrap(),
6107 Event::DiskBasedDiagnosticsUpdated
6108 );
6109 assert_eq!(
6110 events.next().await.unwrap(),
6111 Event::DiskBasedDiagnosticsFinished
6112 );
6113
6114 let buffer = project
6115 .update(cx, |p, cx| p.open_local_buffer("/dir/a.rs", cx))
6116 .await
6117 .unwrap();
6118
6119 buffer.read_with(cx, |buffer, _| {
6120 let snapshot = buffer.snapshot();
6121 let diagnostics = snapshot
6122 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
6123 .collect::<Vec<_>>();
6124 assert_eq!(
6125 diagnostics,
6126 &[DiagnosticEntry {
6127 range: Point::new(0, 9)..Point::new(0, 10),
6128 diagnostic: Diagnostic {
6129 severity: lsp::DiagnosticSeverity::ERROR,
6130 message: "undefined variable 'A'".to_string(),
6131 group_id: 0,
6132 is_primary: true,
6133 ..Default::default()
6134 }
6135 }]
6136 )
6137 });
6138
6139 // Ensure publishing empty diagnostics twice only results in one update event.
6140 fake_server.notify::<lsp::notification::PublishDiagnostics>(
6141 lsp::PublishDiagnosticsParams {
6142 uri: Url::from_file_path("/dir/a.rs").unwrap(),
6143 version: None,
6144 diagnostics: Default::default(),
6145 },
6146 );
6147 assert_eq!(
6148 events.next().await.unwrap(),
6149 Event::DiagnosticsUpdated((worktree_id, Path::new("a.rs")).into())
6150 );
6151
6152 fake_server.notify::<lsp::notification::PublishDiagnostics>(
6153 lsp::PublishDiagnosticsParams {
6154 uri: Url::from_file_path("/dir/a.rs").unwrap(),
6155 version: None,
6156 diagnostics: Default::default(),
6157 },
6158 );
6159 cx.foreground().run_until_parked();
6160 assert_eq!(futures::poll!(events.next()), Poll::Pending);
6161 }
6162
6163 #[gpui::test]
6164 async fn test_restarting_server_with_diagnostics_running(cx: &mut gpui::TestAppContext) {
6165 cx.foreground().forbid_parking();
6166
6167 let progress_token = "the-progress-token";
6168 let mut language = Language::new(
6169 LanguageConfig {
6170 path_suffixes: vec!["rs".to_string()],
6171 ..Default::default()
6172 },
6173 None,
6174 );
6175 let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
6176 disk_based_diagnostics_sources: &["disk"],
6177 disk_based_diagnostics_progress_token: Some(progress_token),
6178 ..Default::default()
6179 });
6180
6181 let fs = FakeFs::new(cx.background());
6182 fs.insert_tree("/dir", json!({ "a.rs": "" })).await;
6183
6184 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
6185 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
6186
6187 let buffer = project
6188 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
6189 .await
6190 .unwrap();
6191
6192 // Simulate diagnostics starting to update.
6193 let mut fake_server = fake_servers.next().await.unwrap();
6194 fake_server.start_progress(progress_token).await;
6195
6196 // Restart the server before the diagnostics finish updating.
6197 project.update(cx, |project, cx| {
6198 project.restart_language_servers_for_buffers([buffer], cx);
6199 });
6200 let mut events = subscribe(&project, cx);
6201
6202 // Simulate the newly started server sending more diagnostics.
6203 let mut fake_server = fake_servers.next().await.unwrap();
6204 fake_server.start_progress(progress_token).await;
6205 assert_eq!(
6206 events.next().await.unwrap(),
6207 Event::DiskBasedDiagnosticsStarted
6208 );
6209
6210 // All diagnostics are considered done, despite the old server's diagnostic
6211 // task never completing.
6212 fake_server.end_progress(progress_token).await;
6213 assert_eq!(
6214 events.next().await.unwrap(),
6215 Event::DiskBasedDiagnosticsUpdated
6216 );
6217 assert_eq!(
6218 events.next().await.unwrap(),
6219 Event::DiskBasedDiagnosticsFinished
6220 );
6221 project.read_with(cx, |project, _| {
6222 assert!(!project.is_running_disk_based_diagnostics());
6223 });
6224 }
6225
6226 #[gpui::test]
6227 async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
6228 cx.foreground().forbid_parking();
6229
6230 let mut language = Language::new(
6231 LanguageConfig {
6232 name: "Rust".into(),
6233 path_suffixes: vec!["rs".to_string()],
6234 ..Default::default()
6235 },
6236 Some(tree_sitter_rust::language()),
6237 );
6238 let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
6239 disk_based_diagnostics_sources: &["disk"],
6240 ..Default::default()
6241 });
6242
6243 let text = "
6244 fn a() { A }
6245 fn b() { BB }
6246 fn c() { CCC }
6247 "
6248 .unindent();
6249
6250 let fs = FakeFs::new(cx.background());
6251 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
6252
6253 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
6254 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
6255
6256 let buffer = project
6257 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
6258 .await
6259 .unwrap();
6260
6261 let mut fake_server = fake_servers.next().await.unwrap();
6262 let open_notification = fake_server
6263 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6264 .await;
6265
6266 // Edit the buffer, moving the content down
6267 buffer.update(cx, |buffer, cx| buffer.edit([(0..0, "\n\n")], cx));
6268 let change_notification_1 = fake_server
6269 .receive_notification::<lsp::notification::DidChangeTextDocument>()
6270 .await;
6271 assert!(
6272 change_notification_1.text_document.version > open_notification.text_document.version
6273 );
6274
6275 // Report some diagnostics for the initial version of the buffer
6276 fake_server.notify::<lsp::notification::PublishDiagnostics>(
6277 lsp::PublishDiagnosticsParams {
6278 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
6279 version: Some(open_notification.text_document.version),
6280 diagnostics: vec![
6281 lsp::Diagnostic {
6282 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
6283 severity: Some(DiagnosticSeverity::ERROR),
6284 message: "undefined variable 'A'".to_string(),
6285 source: Some("disk".to_string()),
6286 ..Default::default()
6287 },
6288 lsp::Diagnostic {
6289 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
6290 severity: Some(DiagnosticSeverity::ERROR),
6291 message: "undefined variable 'BB'".to_string(),
6292 source: Some("disk".to_string()),
6293 ..Default::default()
6294 },
6295 lsp::Diagnostic {
6296 range: lsp::Range::new(lsp::Position::new(2, 9), lsp::Position::new(2, 12)),
6297 severity: Some(DiagnosticSeverity::ERROR),
6298 source: Some("disk".to_string()),
6299 message: "undefined variable 'CCC'".to_string(),
6300 ..Default::default()
6301 },
6302 ],
6303 },
6304 );
6305
6306 // The diagnostics have moved down since they were created.
6307 buffer.next_notification(cx).await;
6308 buffer.read_with(cx, |buffer, _| {
6309 assert_eq!(
6310 buffer
6311 .snapshot()
6312 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0), false)
6313 .collect::<Vec<_>>(),
6314 &[
6315 DiagnosticEntry {
6316 range: Point::new(3, 9)..Point::new(3, 11),
6317 diagnostic: Diagnostic {
6318 severity: DiagnosticSeverity::ERROR,
6319 message: "undefined variable 'BB'".to_string(),
6320 is_disk_based: true,
6321 group_id: 1,
6322 is_primary: true,
6323 ..Default::default()
6324 },
6325 },
6326 DiagnosticEntry {
6327 range: Point::new(4, 9)..Point::new(4, 12),
6328 diagnostic: Diagnostic {
6329 severity: DiagnosticSeverity::ERROR,
6330 message: "undefined variable 'CCC'".to_string(),
6331 is_disk_based: true,
6332 group_id: 2,
6333 is_primary: true,
6334 ..Default::default()
6335 }
6336 }
6337 ]
6338 );
6339 assert_eq!(
6340 chunks_with_diagnostics(buffer, 0..buffer.len()),
6341 [
6342 ("\n\nfn a() { ".to_string(), None),
6343 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
6344 (" }\nfn b() { ".to_string(), None),
6345 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
6346 (" }\nfn c() { ".to_string(), None),
6347 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
6348 (" }\n".to_string(), None),
6349 ]
6350 );
6351 assert_eq!(
6352 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
6353 [
6354 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
6355 (" }\nfn c() { ".to_string(), None),
6356 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
6357 ]
6358 );
6359 });
6360
6361 // Ensure overlapping diagnostics are highlighted correctly.
6362 fake_server.notify::<lsp::notification::PublishDiagnostics>(
6363 lsp::PublishDiagnosticsParams {
6364 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
6365 version: Some(open_notification.text_document.version),
6366 diagnostics: vec![
6367 lsp::Diagnostic {
6368 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
6369 severity: Some(DiagnosticSeverity::ERROR),
6370 message: "undefined variable 'A'".to_string(),
6371 source: Some("disk".to_string()),
6372 ..Default::default()
6373 },
6374 lsp::Diagnostic {
6375 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 12)),
6376 severity: Some(DiagnosticSeverity::WARNING),
6377 message: "unreachable statement".to_string(),
6378 source: Some("disk".to_string()),
6379 ..Default::default()
6380 },
6381 ],
6382 },
6383 );
6384
6385 buffer.next_notification(cx).await;
6386 buffer.read_with(cx, |buffer, _| {
6387 assert_eq!(
6388 buffer
6389 .snapshot()
6390 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0), false)
6391 .collect::<Vec<_>>(),
6392 &[
6393 DiagnosticEntry {
6394 range: Point::new(2, 9)..Point::new(2, 12),
6395 diagnostic: Diagnostic {
6396 severity: DiagnosticSeverity::WARNING,
6397 message: "unreachable statement".to_string(),
6398 is_disk_based: true,
6399 group_id: 4,
6400 is_primary: true,
6401 ..Default::default()
6402 }
6403 },
6404 DiagnosticEntry {
6405 range: Point::new(2, 9)..Point::new(2, 10),
6406 diagnostic: Diagnostic {
6407 severity: DiagnosticSeverity::ERROR,
6408 message: "undefined variable 'A'".to_string(),
6409 is_disk_based: true,
6410 group_id: 3,
6411 is_primary: true,
6412 ..Default::default()
6413 },
6414 }
6415 ]
6416 );
6417 assert_eq!(
6418 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
6419 [
6420 ("fn a() { ".to_string(), None),
6421 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
6422 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
6423 ("\n".to_string(), None),
6424 ]
6425 );
6426 assert_eq!(
6427 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
6428 [
6429 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
6430 ("\n".to_string(), None),
6431 ]
6432 );
6433 });
6434
6435 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
6436 // changes since the last save.
6437 buffer.update(cx, |buffer, cx| {
6438 buffer.edit([(Point::new(2, 0)..Point::new(2, 0), " ")], cx);
6439 buffer.edit([(Point::new(2, 8)..Point::new(2, 10), "(x: usize)")], cx);
6440 buffer.edit([(Point::new(3, 10)..Point::new(3, 10), "xxx")], cx);
6441 });
6442 let change_notification_2 = fake_server
6443 .receive_notification::<lsp::notification::DidChangeTextDocument>()
6444 .await;
6445 assert!(
6446 change_notification_2.text_document.version
6447 > change_notification_1.text_document.version
6448 );
6449
6450 // Handle out-of-order diagnostics
6451 fake_server.notify::<lsp::notification::PublishDiagnostics>(
6452 lsp::PublishDiagnosticsParams {
6453 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
6454 version: Some(change_notification_2.text_document.version),
6455 diagnostics: vec![
6456 lsp::Diagnostic {
6457 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
6458 severity: Some(DiagnosticSeverity::ERROR),
6459 message: "undefined variable 'BB'".to_string(),
6460 source: Some("disk".to_string()),
6461 ..Default::default()
6462 },
6463 lsp::Diagnostic {
6464 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
6465 severity: Some(DiagnosticSeverity::WARNING),
6466 message: "undefined variable 'A'".to_string(),
6467 source: Some("disk".to_string()),
6468 ..Default::default()
6469 },
6470 ],
6471 },
6472 );
6473
6474 buffer.next_notification(cx).await;
6475 buffer.read_with(cx, |buffer, _| {
6476 assert_eq!(
6477 buffer
6478 .snapshot()
6479 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
6480 .collect::<Vec<_>>(),
6481 &[
6482 DiagnosticEntry {
6483 range: Point::new(2, 21)..Point::new(2, 22),
6484 diagnostic: Diagnostic {
6485 severity: DiagnosticSeverity::WARNING,
6486 message: "undefined variable 'A'".to_string(),
6487 is_disk_based: true,
6488 group_id: 6,
6489 is_primary: true,
6490 ..Default::default()
6491 }
6492 },
6493 DiagnosticEntry {
6494 range: Point::new(3, 9)..Point::new(3, 14),
6495 diagnostic: Diagnostic {
6496 severity: DiagnosticSeverity::ERROR,
6497 message: "undefined variable 'BB'".to_string(),
6498 is_disk_based: true,
6499 group_id: 5,
6500 is_primary: true,
6501 ..Default::default()
6502 },
6503 }
6504 ]
6505 );
6506 });
6507 }
6508
6509 #[gpui::test]
6510 async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
6511 cx.foreground().forbid_parking();
6512
6513 let text = concat!(
6514 "let one = ;\n", //
6515 "let two = \n",
6516 "let three = 3;\n",
6517 );
6518
6519 let fs = FakeFs::new(cx.background());
6520 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
6521
6522 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
6523 let buffer = project
6524 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
6525 .await
6526 .unwrap();
6527
6528 project.update(cx, |project, cx| {
6529 project
6530 .update_buffer_diagnostics(
6531 &buffer,
6532 vec![
6533 DiagnosticEntry {
6534 range: PointUtf16::new(0, 10)..PointUtf16::new(0, 10),
6535 diagnostic: Diagnostic {
6536 severity: DiagnosticSeverity::ERROR,
6537 message: "syntax error 1".to_string(),
6538 ..Default::default()
6539 },
6540 },
6541 DiagnosticEntry {
6542 range: PointUtf16::new(1, 10)..PointUtf16::new(1, 10),
6543 diagnostic: Diagnostic {
6544 severity: DiagnosticSeverity::ERROR,
6545 message: "syntax error 2".to_string(),
6546 ..Default::default()
6547 },
6548 },
6549 ],
6550 None,
6551 cx,
6552 )
6553 .unwrap();
6554 });
6555
6556 // An empty range is extended forward to include the following character.
6557 // At the end of a line, an empty range is extended backward to include
6558 // the preceding character.
6559 buffer.read_with(cx, |buffer, _| {
6560 let chunks = chunks_with_diagnostics(&buffer, 0..buffer.len());
6561 assert_eq!(
6562 chunks
6563 .iter()
6564 .map(|(s, d)| (s.as_str(), *d))
6565 .collect::<Vec<_>>(),
6566 &[
6567 ("let one = ", None),
6568 (";", Some(DiagnosticSeverity::ERROR)),
6569 ("\nlet two =", None),
6570 (" ", Some(DiagnosticSeverity::ERROR)),
6571 ("\nlet three = 3;\n", None)
6572 ]
6573 );
6574 });
6575 }
6576
6577 #[gpui::test]
6578 async fn test_edits_from_lsp_with_past_version(cx: &mut gpui::TestAppContext) {
6579 cx.foreground().forbid_parking();
6580
6581 let mut language = Language::new(
6582 LanguageConfig {
6583 name: "Rust".into(),
6584 path_suffixes: vec!["rs".to_string()],
6585 ..Default::default()
6586 },
6587 Some(tree_sitter_rust::language()),
6588 );
6589 let mut fake_servers = language.set_fake_lsp_adapter(Default::default());
6590
6591 let text = "
6592 fn a() {
6593 f1();
6594 }
6595 fn b() {
6596 f2();
6597 }
6598 fn c() {
6599 f3();
6600 }
6601 "
6602 .unindent();
6603
6604 let fs = FakeFs::new(cx.background());
6605 fs.insert_tree(
6606 "/dir",
6607 json!({
6608 "a.rs": text.clone(),
6609 }),
6610 )
6611 .await;
6612
6613 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
6614 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
6615 let buffer = project
6616 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
6617 .await
6618 .unwrap();
6619
6620 let mut fake_server = fake_servers.next().await.unwrap();
6621 let lsp_document_version = fake_server
6622 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6623 .await
6624 .text_document
6625 .version;
6626
6627 // Simulate editing the buffer after the language server computes some edits.
6628 buffer.update(cx, |buffer, cx| {
6629 buffer.edit(
6630 [(
6631 Point::new(0, 0)..Point::new(0, 0),
6632 "// above first function\n",
6633 )],
6634 cx,
6635 );
6636 buffer.edit(
6637 [(
6638 Point::new(2, 0)..Point::new(2, 0),
6639 " // inside first function\n",
6640 )],
6641 cx,
6642 );
6643 buffer.edit(
6644 [(
6645 Point::new(6, 4)..Point::new(6, 4),
6646 "// inside second function ",
6647 )],
6648 cx,
6649 );
6650
6651 assert_eq!(
6652 buffer.text(),
6653 "
6654 // above first function
6655 fn a() {
6656 // inside first function
6657 f1();
6658 }
6659 fn b() {
6660 // inside second function f2();
6661 }
6662 fn c() {
6663 f3();
6664 }
6665 "
6666 .unindent()
6667 );
6668 });
6669
6670 let edits = project
6671 .update(cx, |project, cx| {
6672 project.edits_from_lsp(
6673 &buffer,
6674 vec![
6675 // replace body of first function
6676 lsp::TextEdit {
6677 range: lsp::Range::new(
6678 lsp::Position::new(0, 0),
6679 lsp::Position::new(3, 0),
6680 ),
6681 new_text: "
6682 fn a() {
6683 f10();
6684 }
6685 "
6686 .unindent(),
6687 },
6688 // edit inside second function
6689 lsp::TextEdit {
6690 range: lsp::Range::new(
6691 lsp::Position::new(4, 6),
6692 lsp::Position::new(4, 6),
6693 ),
6694 new_text: "00".into(),
6695 },
6696 // edit inside third function via two distinct edits
6697 lsp::TextEdit {
6698 range: lsp::Range::new(
6699 lsp::Position::new(7, 5),
6700 lsp::Position::new(7, 5),
6701 ),
6702 new_text: "4000".into(),
6703 },
6704 lsp::TextEdit {
6705 range: lsp::Range::new(
6706 lsp::Position::new(7, 5),
6707 lsp::Position::new(7, 6),
6708 ),
6709 new_text: "".into(),
6710 },
6711 ],
6712 Some(lsp_document_version),
6713 cx,
6714 )
6715 })
6716 .await
6717 .unwrap();
6718
6719 buffer.update(cx, |buffer, cx| {
6720 for (range, new_text) in edits {
6721 buffer.edit([(range, new_text)], cx);
6722 }
6723 assert_eq!(
6724 buffer.text(),
6725 "
6726 // above first function
6727 fn a() {
6728 // inside first function
6729 f10();
6730 }
6731 fn b() {
6732 // inside second function f200();
6733 }
6734 fn c() {
6735 f4000();
6736 }
6737 "
6738 .unindent()
6739 );
6740 });
6741 }
6742
6743 #[gpui::test]
6744 async fn test_edits_from_lsp_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
6745 cx.foreground().forbid_parking();
6746
6747 let text = "
6748 use a::b;
6749 use a::c;
6750
6751 fn f() {
6752 b();
6753 c();
6754 }
6755 "
6756 .unindent();
6757
6758 let fs = FakeFs::new(cx.background());
6759 fs.insert_tree(
6760 "/dir",
6761 json!({
6762 "a.rs": text.clone(),
6763 }),
6764 )
6765 .await;
6766
6767 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
6768 let buffer = project
6769 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
6770 .await
6771 .unwrap();
6772
6773 // Simulate the language server sending us a small edit in the form of a very large diff.
6774 // Rust-analyzer does this when performing a merge-imports code action.
6775 let edits = project
6776 .update(cx, |project, cx| {
6777 project.edits_from_lsp(
6778 &buffer,
6779 [
6780 // Replace the first use statement without editing the semicolon.
6781 lsp::TextEdit {
6782 range: lsp::Range::new(
6783 lsp::Position::new(0, 4),
6784 lsp::Position::new(0, 8),
6785 ),
6786 new_text: "a::{b, c}".into(),
6787 },
6788 // Reinsert the remainder of the file between the semicolon and the final
6789 // newline of the file.
6790 lsp::TextEdit {
6791 range: lsp::Range::new(
6792 lsp::Position::new(0, 9),
6793 lsp::Position::new(0, 9),
6794 ),
6795 new_text: "\n\n".into(),
6796 },
6797 lsp::TextEdit {
6798 range: lsp::Range::new(
6799 lsp::Position::new(0, 9),
6800 lsp::Position::new(0, 9),
6801 ),
6802 new_text: "
6803 fn f() {
6804 b();
6805 c();
6806 }"
6807 .unindent(),
6808 },
6809 // Delete everything after the first newline of the file.
6810 lsp::TextEdit {
6811 range: lsp::Range::new(
6812 lsp::Position::new(1, 0),
6813 lsp::Position::new(7, 0),
6814 ),
6815 new_text: "".into(),
6816 },
6817 ],
6818 None,
6819 cx,
6820 )
6821 })
6822 .await
6823 .unwrap();
6824
6825 buffer.update(cx, |buffer, cx| {
6826 let edits = edits
6827 .into_iter()
6828 .map(|(range, text)| {
6829 (
6830 range.start.to_point(&buffer)..range.end.to_point(&buffer),
6831 text,
6832 )
6833 })
6834 .collect::<Vec<_>>();
6835
6836 assert_eq!(
6837 edits,
6838 [
6839 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
6840 (Point::new(1, 0)..Point::new(2, 0), "".into())
6841 ]
6842 );
6843
6844 for (range, new_text) in edits {
6845 buffer.edit([(range, new_text)], cx);
6846 }
6847 assert_eq!(
6848 buffer.text(),
6849 "
6850 use a::{b, c};
6851
6852 fn f() {
6853 b();
6854 c();
6855 }
6856 "
6857 .unindent()
6858 );
6859 });
6860 }
6861
6862 fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
6863 buffer: &Buffer,
6864 range: Range<T>,
6865 ) -> Vec<(String, Option<DiagnosticSeverity>)> {
6866 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
6867 for chunk in buffer.snapshot().chunks(range, true) {
6868 if chunks.last().map_or(false, |prev_chunk| {
6869 prev_chunk.1 == chunk.diagnostic_severity
6870 }) {
6871 chunks.last_mut().unwrap().0.push_str(chunk.text);
6872 } else {
6873 chunks.push((chunk.text.to_string(), chunk.diagnostic_severity));
6874 }
6875 }
6876 chunks
6877 }
6878
6879 #[gpui::test]
6880 async fn test_search_worktree_without_files(cx: &mut gpui::TestAppContext) {
6881 let dir = temp_tree(json!({
6882 "root": {
6883 "dir1": {},
6884 "dir2": {
6885 "dir3": {}
6886 }
6887 }
6888 }));
6889
6890 let project = Project::test(Arc::new(RealFs), [dir.path()], cx).await;
6891 let cancel_flag = Default::default();
6892 let results = project
6893 .read_with(cx, |project, cx| {
6894 project.match_paths("dir", false, false, 10, &cancel_flag, cx)
6895 })
6896 .await;
6897
6898 assert!(results.is_empty());
6899 }
6900
6901 #[gpui::test(iterations = 10)]
6902 async fn test_definition(cx: &mut gpui::TestAppContext) {
6903 let mut language = Language::new(
6904 LanguageConfig {
6905 name: "Rust".into(),
6906 path_suffixes: vec!["rs".to_string()],
6907 ..Default::default()
6908 },
6909 Some(tree_sitter_rust::language()),
6910 );
6911 let mut fake_servers = language.set_fake_lsp_adapter(Default::default());
6912
6913 let fs = FakeFs::new(cx.background());
6914 fs.insert_tree(
6915 "/dir",
6916 json!({
6917 "a.rs": "const fn a() { A }",
6918 "b.rs": "const y: i32 = crate::a()",
6919 }),
6920 )
6921 .await;
6922
6923 let project = Project::test(fs, ["/dir/b.rs".as_ref()], cx).await;
6924 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
6925
6926 let buffer = project
6927 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
6928 .await
6929 .unwrap();
6930
6931 let fake_server = fake_servers.next().await.unwrap();
6932 fake_server.handle_request::<lsp::request::GotoDefinition, _, _>(|params, _| async move {
6933 let params = params.text_document_position_params;
6934 assert_eq!(
6935 params.text_document.uri.to_file_path().unwrap(),
6936 Path::new("/dir/b.rs"),
6937 );
6938 assert_eq!(params.position, lsp::Position::new(0, 22));
6939
6940 Ok(Some(lsp::GotoDefinitionResponse::Scalar(
6941 lsp::Location::new(
6942 lsp::Url::from_file_path("/dir/a.rs").unwrap(),
6943 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
6944 ),
6945 )))
6946 });
6947
6948 let mut definitions = project
6949 .update(cx, |project, cx| project.definition(&buffer, 22, cx))
6950 .await
6951 .unwrap();
6952
6953 assert_eq!(definitions.len(), 1);
6954 let definition = definitions.pop().unwrap();
6955 cx.update(|cx| {
6956 let target_buffer = definition.buffer.read(cx);
6957 assert_eq!(
6958 target_buffer
6959 .file()
6960 .unwrap()
6961 .as_local()
6962 .unwrap()
6963 .abs_path(cx),
6964 Path::new("/dir/a.rs"),
6965 );
6966 assert_eq!(definition.range.to_offset(target_buffer), 9..10);
6967 assert_eq!(
6968 list_worktrees(&project, cx),
6969 [("/dir/b.rs".as_ref(), true), ("/dir/a.rs".as_ref(), false)]
6970 );
6971
6972 drop(definition);
6973 });
6974 cx.read(|cx| {
6975 assert_eq!(list_worktrees(&project, cx), [("/dir/b.rs".as_ref(), true)]);
6976 });
6977
6978 fn list_worktrees<'a>(
6979 project: &'a ModelHandle<Project>,
6980 cx: &'a AppContext,
6981 ) -> Vec<(&'a Path, bool)> {
6982 project
6983 .read(cx)
6984 .worktrees(cx)
6985 .map(|worktree| {
6986 let worktree = worktree.read(cx);
6987 (
6988 worktree.as_local().unwrap().abs_path().as_ref(),
6989 worktree.is_visible(),
6990 )
6991 })
6992 .collect::<Vec<_>>()
6993 }
6994 }
6995
6996 #[gpui::test]
6997 async fn test_completions_without_edit_ranges(cx: &mut gpui::TestAppContext) {
6998 let mut language = Language::new(
6999 LanguageConfig {
7000 name: "TypeScript".into(),
7001 path_suffixes: vec!["ts".to_string()],
7002 ..Default::default()
7003 },
7004 Some(tree_sitter_typescript::language_typescript()),
7005 );
7006 let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default());
7007
7008 let fs = FakeFs::new(cx.background());
7009 fs.insert_tree(
7010 "/dir",
7011 json!({
7012 "a.ts": "",
7013 }),
7014 )
7015 .await;
7016
7017 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
7018 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
7019 let buffer = project
7020 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
7021 .await
7022 .unwrap();
7023
7024 let fake_server = fake_language_servers.next().await.unwrap();
7025
7026 let text = "let a = b.fqn";
7027 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
7028 let completions = project.update(cx, |project, cx| {
7029 project.completions(&buffer, text.len(), cx)
7030 });
7031
7032 fake_server
7033 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
7034 Ok(Some(lsp::CompletionResponse::Array(vec![
7035 lsp::CompletionItem {
7036 label: "fullyQualifiedName?".into(),
7037 insert_text: Some("fullyQualifiedName".into()),
7038 ..Default::default()
7039 },
7040 ])))
7041 })
7042 .next()
7043 .await;
7044 let completions = completions.await.unwrap();
7045 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
7046 assert_eq!(completions.len(), 1);
7047 assert_eq!(completions[0].new_text, "fullyQualifiedName");
7048 assert_eq!(
7049 completions[0].old_range.to_offset(&snapshot),
7050 text.len() - 3..text.len()
7051 );
7052 }
7053
7054 #[gpui::test(iterations = 10)]
7055 async fn test_apply_code_actions_with_commands(cx: &mut gpui::TestAppContext) {
7056 let mut language = Language::new(
7057 LanguageConfig {
7058 name: "TypeScript".into(),
7059 path_suffixes: vec!["ts".to_string()],
7060 ..Default::default()
7061 },
7062 None,
7063 );
7064 let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default());
7065
7066 let fs = FakeFs::new(cx.background());
7067 fs.insert_tree(
7068 "/dir",
7069 json!({
7070 "a.ts": "a",
7071 }),
7072 )
7073 .await;
7074
7075 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
7076 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
7077 let buffer = project
7078 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
7079 .await
7080 .unwrap();
7081
7082 let fake_server = fake_language_servers.next().await.unwrap();
7083
7084 // Language server returns code actions that contain commands, and not edits.
7085 let actions = project.update(cx, |project, cx| project.code_actions(&buffer, 0..0, cx));
7086 fake_server
7087 .handle_request::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
7088 Ok(Some(vec![
7089 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
7090 title: "The code action".into(),
7091 command: Some(lsp::Command {
7092 title: "The command".into(),
7093 command: "_the/command".into(),
7094 arguments: Some(vec![json!("the-argument")]),
7095 }),
7096 ..Default::default()
7097 }),
7098 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
7099 title: "two".into(),
7100 ..Default::default()
7101 }),
7102 ]))
7103 })
7104 .next()
7105 .await;
7106
7107 let action = actions.await.unwrap()[0].clone();
7108 let apply = project.update(cx, |project, cx| {
7109 project.apply_code_action(buffer.clone(), action, true, cx)
7110 });
7111
7112 // Resolving the code action does not populate its edits. In absence of
7113 // edits, we must execute the given command.
7114 fake_server.handle_request::<lsp::request::CodeActionResolveRequest, _, _>(
7115 |action, _| async move { Ok(action) },
7116 );
7117
7118 // While executing the command, the language server sends the editor
7119 // a `workspaceEdit` request.
7120 fake_server
7121 .handle_request::<lsp::request::ExecuteCommand, _, _>({
7122 let fake = fake_server.clone();
7123 move |params, _| {
7124 assert_eq!(params.command, "_the/command");
7125 let fake = fake.clone();
7126 async move {
7127 fake.server
7128 .request::<lsp::request::ApplyWorkspaceEdit>(
7129 lsp::ApplyWorkspaceEditParams {
7130 label: None,
7131 edit: lsp::WorkspaceEdit {
7132 changes: Some(
7133 [(
7134 lsp::Url::from_file_path("/dir/a.ts").unwrap(),
7135 vec![lsp::TextEdit {
7136 range: lsp::Range::new(
7137 lsp::Position::new(0, 0),
7138 lsp::Position::new(0, 0),
7139 ),
7140 new_text: "X".into(),
7141 }],
7142 )]
7143 .into_iter()
7144 .collect(),
7145 ),
7146 ..Default::default()
7147 },
7148 },
7149 )
7150 .await
7151 .unwrap();
7152 Ok(Some(json!(null)))
7153 }
7154 }
7155 })
7156 .next()
7157 .await;
7158
7159 // Applying the code action returns a project transaction containing the edits
7160 // sent by the language server in its `workspaceEdit` request.
7161 let transaction = apply.await.unwrap();
7162 assert!(transaction.0.contains_key(&buffer));
7163 buffer.update(cx, |buffer, cx| {
7164 assert_eq!(buffer.text(), "Xa");
7165 buffer.undo(cx);
7166 assert_eq!(buffer.text(), "a");
7167 });
7168 }
7169
7170 #[gpui::test]
7171 async fn test_save_file(cx: &mut gpui::TestAppContext) {
7172 let fs = FakeFs::new(cx.background());
7173 fs.insert_tree(
7174 "/dir",
7175 json!({
7176 "file1": "the old contents",
7177 }),
7178 )
7179 .await;
7180
7181 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7182 let buffer = project
7183 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
7184 .await
7185 .unwrap();
7186 buffer
7187 .update(cx, |buffer, cx| {
7188 assert_eq!(buffer.text(), "the old contents");
7189 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], cx);
7190 buffer.save(cx)
7191 })
7192 .await
7193 .unwrap();
7194
7195 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
7196 assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
7197 }
7198
7199 #[gpui::test]
7200 async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
7201 let fs = FakeFs::new(cx.background());
7202 fs.insert_tree(
7203 "/dir",
7204 json!({
7205 "file1": "the old contents",
7206 }),
7207 )
7208 .await;
7209
7210 let project = Project::test(fs.clone(), ["/dir/file1".as_ref()], cx).await;
7211 let buffer = project
7212 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
7213 .await
7214 .unwrap();
7215 buffer
7216 .update(cx, |buffer, cx| {
7217 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], cx);
7218 buffer.save(cx)
7219 })
7220 .await
7221 .unwrap();
7222
7223 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
7224 assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
7225 }
7226
7227 #[gpui::test]
7228 async fn test_save_as(cx: &mut gpui::TestAppContext) {
7229 let fs = FakeFs::new(cx.background());
7230 fs.insert_tree("/dir", json!({})).await;
7231
7232 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7233 let buffer = project.update(cx, |project, cx| {
7234 project.create_buffer("", None, cx).unwrap()
7235 });
7236 buffer.update(cx, |buffer, cx| {
7237 buffer.edit([(0..0, "abc")], cx);
7238 assert!(buffer.is_dirty());
7239 assert!(!buffer.has_conflict());
7240 });
7241 project
7242 .update(cx, |project, cx| {
7243 project.save_buffer_as(buffer.clone(), "/dir/file1".into(), cx)
7244 })
7245 .await
7246 .unwrap();
7247 assert_eq!(fs.load(Path::new("/dir/file1")).await.unwrap(), "abc");
7248 buffer.read_with(cx, |buffer, cx| {
7249 assert_eq!(buffer.file().unwrap().full_path(cx), Path::new("dir/file1"));
7250 assert!(!buffer.is_dirty());
7251 assert!(!buffer.has_conflict());
7252 });
7253
7254 let opened_buffer = project
7255 .update(cx, |project, cx| {
7256 project.open_local_buffer("/dir/file1", cx)
7257 })
7258 .await
7259 .unwrap();
7260 assert_eq!(opened_buffer, buffer);
7261 }
7262
7263 #[gpui::test(retries = 5)]
7264 async fn test_rescan_and_remote_updates(cx: &mut gpui::TestAppContext) {
7265 let dir = temp_tree(json!({
7266 "a": {
7267 "file1": "",
7268 "file2": "",
7269 "file3": "",
7270 },
7271 "b": {
7272 "c": {
7273 "file4": "",
7274 "file5": "",
7275 }
7276 }
7277 }));
7278
7279 let project = Project::test(Arc::new(RealFs), [dir.path()], cx).await;
7280 let rpc = project.read_with(cx, |p, _| p.client.clone());
7281
7282 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
7283 let buffer = project.update(cx, |p, cx| p.open_local_buffer(dir.path().join(path), cx));
7284 async move { buffer.await.unwrap() }
7285 };
7286 let id_for_path = |path: &'static str, cx: &gpui::TestAppContext| {
7287 project.read_with(cx, |project, cx| {
7288 let tree = project.worktrees(cx).next().unwrap();
7289 tree.read(cx)
7290 .entry_for_path(path)
7291 .expect(&format!("no entry for path {}", path))
7292 .id
7293 })
7294 };
7295
7296 let buffer2 = buffer_for_path("a/file2", cx).await;
7297 let buffer3 = buffer_for_path("a/file3", cx).await;
7298 let buffer4 = buffer_for_path("b/c/file4", cx).await;
7299 let buffer5 = buffer_for_path("b/c/file5", cx).await;
7300
7301 let file2_id = id_for_path("a/file2", &cx);
7302 let file3_id = id_for_path("a/file3", &cx);
7303 let file4_id = id_for_path("b/c/file4", &cx);
7304
7305 // Create a remote copy of this worktree.
7306 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
7307 let initial_snapshot = tree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot());
7308 let (remote, load_task) = cx.update(|cx| {
7309 Worktree::remote(
7310 1,
7311 1,
7312 initial_snapshot.to_proto(&Default::default(), true),
7313 rpc.clone(),
7314 cx,
7315 )
7316 });
7317 // tree
7318 load_task.await;
7319
7320 cx.read(|cx| {
7321 assert!(!buffer2.read(cx).is_dirty());
7322 assert!(!buffer3.read(cx).is_dirty());
7323 assert!(!buffer4.read(cx).is_dirty());
7324 assert!(!buffer5.read(cx).is_dirty());
7325 });
7326
7327 // Rename and delete files and directories.
7328 tree.flush_fs_events(&cx).await;
7329 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
7330 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
7331 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
7332 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
7333 tree.flush_fs_events(&cx).await;
7334
7335 let expected_paths = vec![
7336 "a",
7337 "a/file1",
7338 "a/file2.new",
7339 "b",
7340 "d",
7341 "d/file3",
7342 "d/file4",
7343 ];
7344
7345 cx.read(|app| {
7346 assert_eq!(
7347 tree.read(app)
7348 .paths()
7349 .map(|p| p.to_str().unwrap())
7350 .collect::<Vec<_>>(),
7351 expected_paths
7352 );
7353
7354 assert_eq!(id_for_path("a/file2.new", &cx), file2_id);
7355 assert_eq!(id_for_path("d/file3", &cx), file3_id);
7356 assert_eq!(id_for_path("d/file4", &cx), file4_id);
7357
7358 assert_eq!(
7359 buffer2.read(app).file().unwrap().path().as_ref(),
7360 Path::new("a/file2.new")
7361 );
7362 assert_eq!(
7363 buffer3.read(app).file().unwrap().path().as_ref(),
7364 Path::new("d/file3")
7365 );
7366 assert_eq!(
7367 buffer4.read(app).file().unwrap().path().as_ref(),
7368 Path::new("d/file4")
7369 );
7370 assert_eq!(
7371 buffer5.read(app).file().unwrap().path().as_ref(),
7372 Path::new("b/c/file5")
7373 );
7374
7375 assert!(!buffer2.read(app).file().unwrap().is_deleted());
7376 assert!(!buffer3.read(app).file().unwrap().is_deleted());
7377 assert!(!buffer4.read(app).file().unwrap().is_deleted());
7378 assert!(buffer5.read(app).file().unwrap().is_deleted());
7379 });
7380
7381 // Update the remote worktree. Check that it becomes consistent with the
7382 // local worktree.
7383 remote.update(cx, |remote, cx| {
7384 let update_message = tree.read(cx).as_local().unwrap().snapshot().build_update(
7385 &initial_snapshot,
7386 1,
7387 1,
7388 true,
7389 );
7390 remote
7391 .as_remote_mut()
7392 .unwrap()
7393 .snapshot
7394 .apply_remote_update(update_message)
7395 .unwrap();
7396
7397 assert_eq!(
7398 remote
7399 .paths()
7400 .map(|p| p.to_str().unwrap())
7401 .collect::<Vec<_>>(),
7402 expected_paths
7403 );
7404 });
7405 }
7406
7407 #[gpui::test]
7408 async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
7409 let fs = FakeFs::new(cx.background());
7410 fs.insert_tree(
7411 "/dir",
7412 json!({
7413 "a.txt": "a-contents",
7414 "b.txt": "b-contents",
7415 }),
7416 )
7417 .await;
7418
7419 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7420
7421 // Spawn multiple tasks to open paths, repeating some paths.
7422 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
7423 (
7424 p.open_local_buffer("/dir/a.txt", cx),
7425 p.open_local_buffer("/dir/b.txt", cx),
7426 p.open_local_buffer("/dir/a.txt", cx),
7427 )
7428 });
7429
7430 let buffer_a_1 = buffer_a_1.await.unwrap();
7431 let buffer_a_2 = buffer_a_2.await.unwrap();
7432 let buffer_b = buffer_b.await.unwrap();
7433 assert_eq!(buffer_a_1.read_with(cx, |b, _| b.text()), "a-contents");
7434 assert_eq!(buffer_b.read_with(cx, |b, _| b.text()), "b-contents");
7435
7436 // There is only one buffer per path.
7437 let buffer_a_id = buffer_a_1.id();
7438 assert_eq!(buffer_a_2.id(), buffer_a_id);
7439
7440 // Open the same path again while it is still open.
7441 drop(buffer_a_1);
7442 let buffer_a_3 = project
7443 .update(cx, |p, cx| p.open_local_buffer("/dir/a.txt", cx))
7444 .await
7445 .unwrap();
7446
7447 // There's still only one buffer per path.
7448 assert_eq!(buffer_a_3.id(), buffer_a_id);
7449 }
7450
7451 #[gpui::test]
7452 async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
7453 let fs = FakeFs::new(cx.background());
7454 fs.insert_tree(
7455 "/dir",
7456 json!({
7457 "file1": "abc",
7458 "file2": "def",
7459 "file3": "ghi",
7460 }),
7461 )
7462 .await;
7463
7464 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7465
7466 let buffer1 = project
7467 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
7468 .await
7469 .unwrap();
7470 let events = Rc::new(RefCell::new(Vec::new()));
7471
7472 // initially, the buffer isn't dirty.
7473 buffer1.update(cx, |buffer, cx| {
7474 cx.subscribe(&buffer1, {
7475 let events = events.clone();
7476 move |_, _, event, _| match event {
7477 BufferEvent::Operation(_) => {}
7478 _ => events.borrow_mut().push(event.clone()),
7479 }
7480 })
7481 .detach();
7482
7483 assert!(!buffer.is_dirty());
7484 assert!(events.borrow().is_empty());
7485
7486 buffer.edit([(1..2, "")], cx);
7487 });
7488
7489 // after the first edit, the buffer is dirty, and emits a dirtied event.
7490 buffer1.update(cx, |buffer, cx| {
7491 assert!(buffer.text() == "ac");
7492 assert!(buffer.is_dirty());
7493 assert_eq!(
7494 *events.borrow(),
7495 &[language::Event::Edited, language::Event::Dirtied]
7496 );
7497 events.borrow_mut().clear();
7498 buffer.did_save(buffer.version(), buffer.file().unwrap().mtime(), None, cx);
7499 });
7500
7501 // after saving, the buffer is not dirty, and emits a saved event.
7502 buffer1.update(cx, |buffer, cx| {
7503 assert!(!buffer.is_dirty());
7504 assert_eq!(*events.borrow(), &[language::Event::Saved]);
7505 events.borrow_mut().clear();
7506
7507 buffer.edit([(1..1, "B")], cx);
7508 buffer.edit([(2..2, "D")], cx);
7509 });
7510
7511 // after editing again, the buffer is dirty, and emits another dirty event.
7512 buffer1.update(cx, |buffer, cx| {
7513 assert!(buffer.text() == "aBDc");
7514 assert!(buffer.is_dirty());
7515 assert_eq!(
7516 *events.borrow(),
7517 &[
7518 language::Event::Edited,
7519 language::Event::Dirtied,
7520 language::Event::Edited,
7521 ],
7522 );
7523 events.borrow_mut().clear();
7524
7525 // TODO - currently, after restoring the buffer to its
7526 // previously-saved state, the is still considered dirty.
7527 buffer.edit([(1..3, "")], cx);
7528 assert!(buffer.text() == "ac");
7529 assert!(buffer.is_dirty());
7530 });
7531
7532 assert_eq!(*events.borrow(), &[language::Event::Edited]);
7533
7534 // When a file is deleted, the buffer is considered dirty.
7535 let events = Rc::new(RefCell::new(Vec::new()));
7536 let buffer2 = project
7537 .update(cx, |p, cx| p.open_local_buffer("/dir/file2", cx))
7538 .await
7539 .unwrap();
7540 buffer2.update(cx, |_, cx| {
7541 cx.subscribe(&buffer2, {
7542 let events = events.clone();
7543 move |_, _, event, _| events.borrow_mut().push(event.clone())
7544 })
7545 .detach();
7546 });
7547
7548 fs.remove_file("/dir/file2".as_ref(), Default::default())
7549 .await
7550 .unwrap();
7551 buffer2.condition(&cx, |b, _| b.is_dirty()).await;
7552 assert_eq!(
7553 *events.borrow(),
7554 &[language::Event::Dirtied, language::Event::FileHandleChanged]
7555 );
7556
7557 // When a file is already dirty when deleted, we don't emit a Dirtied event.
7558 let events = Rc::new(RefCell::new(Vec::new()));
7559 let buffer3 = project
7560 .update(cx, |p, cx| p.open_local_buffer("/dir/file3", cx))
7561 .await
7562 .unwrap();
7563 buffer3.update(cx, |_, cx| {
7564 cx.subscribe(&buffer3, {
7565 let events = events.clone();
7566 move |_, _, event, _| events.borrow_mut().push(event.clone())
7567 })
7568 .detach();
7569 });
7570
7571 buffer3.update(cx, |buffer, cx| {
7572 buffer.edit([(0..0, "x")], cx);
7573 });
7574 events.borrow_mut().clear();
7575 fs.remove_file("/dir/file3".as_ref(), Default::default())
7576 .await
7577 .unwrap();
7578 buffer3
7579 .condition(&cx, |_, _| !events.borrow().is_empty())
7580 .await;
7581 assert_eq!(*events.borrow(), &[language::Event::FileHandleChanged]);
7582 cx.read(|cx| assert!(buffer3.read(cx).is_dirty()));
7583 }
7584
7585 #[gpui::test]
7586 async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
7587 let initial_contents = "aaa\nbbbbb\nc\n";
7588 let fs = FakeFs::new(cx.background());
7589 fs.insert_tree(
7590 "/dir",
7591 json!({
7592 "the-file": initial_contents,
7593 }),
7594 )
7595 .await;
7596 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7597 let buffer = project
7598 .update(cx, |p, cx| p.open_local_buffer("/dir/the-file", cx))
7599 .await
7600 .unwrap();
7601
7602 let anchors = (0..3)
7603 .map(|row| buffer.read_with(cx, |b, _| b.anchor_before(Point::new(row, 1))))
7604 .collect::<Vec<_>>();
7605
7606 // Change the file on disk, adding two new lines of text, and removing
7607 // one line.
7608 buffer.read_with(cx, |buffer, _| {
7609 assert!(!buffer.is_dirty());
7610 assert!(!buffer.has_conflict());
7611 });
7612 let new_contents = "AAAA\naaa\nBB\nbbbbb\n";
7613 fs.save("/dir/the-file".as_ref(), &new_contents.into())
7614 .await
7615 .unwrap();
7616
7617 // Because the buffer was not modified, it is reloaded from disk. Its
7618 // contents are edited according to the diff between the old and new
7619 // file contents.
7620 buffer
7621 .condition(&cx, |buffer, _| buffer.text() == new_contents)
7622 .await;
7623
7624 buffer.update(cx, |buffer, _| {
7625 assert_eq!(buffer.text(), new_contents);
7626 assert!(!buffer.is_dirty());
7627 assert!(!buffer.has_conflict());
7628
7629 let anchor_positions = anchors
7630 .iter()
7631 .map(|anchor| anchor.to_point(&*buffer))
7632 .collect::<Vec<_>>();
7633 assert_eq!(
7634 anchor_positions,
7635 [Point::new(1, 1), Point::new(3, 1), Point::new(4, 0)]
7636 );
7637 });
7638
7639 // Modify the buffer
7640 buffer.update(cx, |buffer, cx| {
7641 buffer.edit([(0..0, " ")], cx);
7642 assert!(buffer.is_dirty());
7643 assert!(!buffer.has_conflict());
7644 });
7645
7646 // Change the file on disk again, adding blank lines to the beginning.
7647 fs.save(
7648 "/dir/the-file".as_ref(),
7649 &"\n\n\nAAAA\naaa\nBB\nbbbbb\n".into(),
7650 )
7651 .await
7652 .unwrap();
7653
7654 // Because the buffer is modified, it doesn't reload from disk, but is
7655 // marked as having a conflict.
7656 buffer
7657 .condition(&cx, |buffer, _| buffer.has_conflict())
7658 .await;
7659 }
7660
7661 #[gpui::test]
7662 async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
7663 cx.foreground().forbid_parking();
7664
7665 let fs = FakeFs::new(cx.background());
7666 fs.insert_tree(
7667 "/the-dir",
7668 json!({
7669 "a.rs": "
7670 fn foo(mut v: Vec<usize>) {
7671 for x in &v {
7672 v.push(1);
7673 }
7674 }
7675 "
7676 .unindent(),
7677 }),
7678 )
7679 .await;
7680
7681 let project = Project::test(fs.clone(), ["/the-dir".as_ref()], cx).await;
7682 let buffer = project
7683 .update(cx, |p, cx| p.open_local_buffer("/the-dir/a.rs", cx))
7684 .await
7685 .unwrap();
7686
7687 let buffer_uri = Url::from_file_path("/the-dir/a.rs").unwrap();
7688 let message = lsp::PublishDiagnosticsParams {
7689 uri: buffer_uri.clone(),
7690 diagnostics: vec![
7691 lsp::Diagnostic {
7692 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
7693 severity: Some(DiagnosticSeverity::WARNING),
7694 message: "error 1".to_string(),
7695 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
7696 location: lsp::Location {
7697 uri: buffer_uri.clone(),
7698 range: lsp::Range::new(
7699 lsp::Position::new(1, 8),
7700 lsp::Position::new(1, 9),
7701 ),
7702 },
7703 message: "error 1 hint 1".to_string(),
7704 }]),
7705 ..Default::default()
7706 },
7707 lsp::Diagnostic {
7708 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
7709 severity: Some(DiagnosticSeverity::HINT),
7710 message: "error 1 hint 1".to_string(),
7711 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
7712 location: lsp::Location {
7713 uri: buffer_uri.clone(),
7714 range: lsp::Range::new(
7715 lsp::Position::new(1, 8),
7716 lsp::Position::new(1, 9),
7717 ),
7718 },
7719 message: "original diagnostic".to_string(),
7720 }]),
7721 ..Default::default()
7722 },
7723 lsp::Diagnostic {
7724 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
7725 severity: Some(DiagnosticSeverity::ERROR),
7726 message: "error 2".to_string(),
7727 related_information: Some(vec![
7728 lsp::DiagnosticRelatedInformation {
7729 location: lsp::Location {
7730 uri: buffer_uri.clone(),
7731 range: lsp::Range::new(
7732 lsp::Position::new(1, 13),
7733 lsp::Position::new(1, 15),
7734 ),
7735 },
7736 message: "error 2 hint 1".to_string(),
7737 },
7738 lsp::DiagnosticRelatedInformation {
7739 location: lsp::Location {
7740 uri: buffer_uri.clone(),
7741 range: lsp::Range::new(
7742 lsp::Position::new(1, 13),
7743 lsp::Position::new(1, 15),
7744 ),
7745 },
7746 message: "error 2 hint 2".to_string(),
7747 },
7748 ]),
7749 ..Default::default()
7750 },
7751 lsp::Diagnostic {
7752 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
7753 severity: Some(DiagnosticSeverity::HINT),
7754 message: "error 2 hint 1".to_string(),
7755 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
7756 location: lsp::Location {
7757 uri: buffer_uri.clone(),
7758 range: lsp::Range::new(
7759 lsp::Position::new(2, 8),
7760 lsp::Position::new(2, 17),
7761 ),
7762 },
7763 message: "original diagnostic".to_string(),
7764 }]),
7765 ..Default::default()
7766 },
7767 lsp::Diagnostic {
7768 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
7769 severity: Some(DiagnosticSeverity::HINT),
7770 message: "error 2 hint 2".to_string(),
7771 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
7772 location: lsp::Location {
7773 uri: buffer_uri.clone(),
7774 range: lsp::Range::new(
7775 lsp::Position::new(2, 8),
7776 lsp::Position::new(2, 17),
7777 ),
7778 },
7779 message: "original diagnostic".to_string(),
7780 }]),
7781 ..Default::default()
7782 },
7783 ],
7784 version: None,
7785 };
7786
7787 project
7788 .update(cx, |p, cx| p.update_diagnostics(message, &[], cx))
7789 .unwrap();
7790 let buffer = buffer.read_with(cx, |buffer, _| buffer.snapshot());
7791
7792 assert_eq!(
7793 buffer
7794 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
7795 .collect::<Vec<_>>(),
7796 &[
7797 DiagnosticEntry {
7798 range: Point::new(1, 8)..Point::new(1, 9),
7799 diagnostic: Diagnostic {
7800 severity: DiagnosticSeverity::WARNING,
7801 message: "error 1".to_string(),
7802 group_id: 0,
7803 is_primary: true,
7804 ..Default::default()
7805 }
7806 },
7807 DiagnosticEntry {
7808 range: Point::new(1, 8)..Point::new(1, 9),
7809 diagnostic: Diagnostic {
7810 severity: DiagnosticSeverity::HINT,
7811 message: "error 1 hint 1".to_string(),
7812 group_id: 0,
7813 is_primary: false,
7814 ..Default::default()
7815 }
7816 },
7817 DiagnosticEntry {
7818 range: Point::new(1, 13)..Point::new(1, 15),
7819 diagnostic: Diagnostic {
7820 severity: DiagnosticSeverity::HINT,
7821 message: "error 2 hint 1".to_string(),
7822 group_id: 1,
7823 is_primary: false,
7824 ..Default::default()
7825 }
7826 },
7827 DiagnosticEntry {
7828 range: Point::new(1, 13)..Point::new(1, 15),
7829 diagnostic: Diagnostic {
7830 severity: DiagnosticSeverity::HINT,
7831 message: "error 2 hint 2".to_string(),
7832 group_id: 1,
7833 is_primary: false,
7834 ..Default::default()
7835 }
7836 },
7837 DiagnosticEntry {
7838 range: Point::new(2, 8)..Point::new(2, 17),
7839 diagnostic: Diagnostic {
7840 severity: DiagnosticSeverity::ERROR,
7841 message: "error 2".to_string(),
7842 group_id: 1,
7843 is_primary: true,
7844 ..Default::default()
7845 }
7846 }
7847 ]
7848 );
7849
7850 assert_eq!(
7851 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
7852 &[
7853 DiagnosticEntry {
7854 range: Point::new(1, 8)..Point::new(1, 9),
7855 diagnostic: Diagnostic {
7856 severity: DiagnosticSeverity::WARNING,
7857 message: "error 1".to_string(),
7858 group_id: 0,
7859 is_primary: true,
7860 ..Default::default()
7861 }
7862 },
7863 DiagnosticEntry {
7864 range: Point::new(1, 8)..Point::new(1, 9),
7865 diagnostic: Diagnostic {
7866 severity: DiagnosticSeverity::HINT,
7867 message: "error 1 hint 1".to_string(),
7868 group_id: 0,
7869 is_primary: false,
7870 ..Default::default()
7871 }
7872 },
7873 ]
7874 );
7875 assert_eq!(
7876 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
7877 &[
7878 DiagnosticEntry {
7879 range: Point::new(1, 13)..Point::new(1, 15),
7880 diagnostic: Diagnostic {
7881 severity: DiagnosticSeverity::HINT,
7882 message: "error 2 hint 1".to_string(),
7883 group_id: 1,
7884 is_primary: false,
7885 ..Default::default()
7886 }
7887 },
7888 DiagnosticEntry {
7889 range: Point::new(1, 13)..Point::new(1, 15),
7890 diagnostic: Diagnostic {
7891 severity: DiagnosticSeverity::HINT,
7892 message: "error 2 hint 2".to_string(),
7893 group_id: 1,
7894 is_primary: false,
7895 ..Default::default()
7896 }
7897 },
7898 DiagnosticEntry {
7899 range: Point::new(2, 8)..Point::new(2, 17),
7900 diagnostic: Diagnostic {
7901 severity: DiagnosticSeverity::ERROR,
7902 message: "error 2".to_string(),
7903 group_id: 1,
7904 is_primary: true,
7905 ..Default::default()
7906 }
7907 }
7908 ]
7909 );
7910 }
7911
7912 #[gpui::test]
7913 async fn test_rename(cx: &mut gpui::TestAppContext) {
7914 cx.foreground().forbid_parking();
7915
7916 let mut language = Language::new(
7917 LanguageConfig {
7918 name: "Rust".into(),
7919 path_suffixes: vec!["rs".to_string()],
7920 ..Default::default()
7921 },
7922 Some(tree_sitter_rust::language()),
7923 );
7924 let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
7925 capabilities: lsp::ServerCapabilities {
7926 rename_provider: Some(lsp::OneOf::Right(lsp::RenameOptions {
7927 prepare_provider: Some(true),
7928 work_done_progress_options: Default::default(),
7929 })),
7930 ..Default::default()
7931 },
7932 ..Default::default()
7933 });
7934
7935 let fs = FakeFs::new(cx.background());
7936 fs.insert_tree(
7937 "/dir",
7938 json!({
7939 "one.rs": "const ONE: usize = 1;",
7940 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
7941 }),
7942 )
7943 .await;
7944
7945 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7946 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
7947 let buffer = project
7948 .update(cx, |project, cx| {
7949 project.open_local_buffer("/dir/one.rs", cx)
7950 })
7951 .await
7952 .unwrap();
7953
7954 let fake_server = fake_servers.next().await.unwrap();
7955
7956 let response = project.update(cx, |project, cx| {
7957 project.prepare_rename(buffer.clone(), 7, cx)
7958 });
7959 fake_server
7960 .handle_request::<lsp::request::PrepareRenameRequest, _, _>(|params, _| async move {
7961 assert_eq!(params.text_document.uri.as_str(), "file:///dir/one.rs");
7962 assert_eq!(params.position, lsp::Position::new(0, 7));
7963 Ok(Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
7964 lsp::Position::new(0, 6),
7965 lsp::Position::new(0, 9),
7966 ))))
7967 })
7968 .next()
7969 .await
7970 .unwrap();
7971 let range = response.await.unwrap().unwrap();
7972 let range = buffer.read_with(cx, |buffer, _| range.to_offset(buffer));
7973 assert_eq!(range, 6..9);
7974
7975 let response = project.update(cx, |project, cx| {
7976 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), true, cx)
7977 });
7978 fake_server
7979 .handle_request::<lsp::request::Rename, _, _>(|params, _| async move {
7980 assert_eq!(
7981 params.text_document_position.text_document.uri.as_str(),
7982 "file:///dir/one.rs"
7983 );
7984 assert_eq!(
7985 params.text_document_position.position,
7986 lsp::Position::new(0, 7)
7987 );
7988 assert_eq!(params.new_name, "THREE");
7989 Ok(Some(lsp::WorkspaceEdit {
7990 changes: Some(
7991 [
7992 (
7993 lsp::Url::from_file_path("/dir/one.rs").unwrap(),
7994 vec![lsp::TextEdit::new(
7995 lsp::Range::new(
7996 lsp::Position::new(0, 6),
7997 lsp::Position::new(0, 9),
7998 ),
7999 "THREE".to_string(),
8000 )],
8001 ),
8002 (
8003 lsp::Url::from_file_path("/dir/two.rs").unwrap(),
8004 vec![
8005 lsp::TextEdit::new(
8006 lsp::Range::new(
8007 lsp::Position::new(0, 24),
8008 lsp::Position::new(0, 27),
8009 ),
8010 "THREE".to_string(),
8011 ),
8012 lsp::TextEdit::new(
8013 lsp::Range::new(
8014 lsp::Position::new(0, 35),
8015 lsp::Position::new(0, 38),
8016 ),
8017 "THREE".to_string(),
8018 ),
8019 ],
8020 ),
8021 ]
8022 .into_iter()
8023 .collect(),
8024 ),
8025 ..Default::default()
8026 }))
8027 })
8028 .next()
8029 .await
8030 .unwrap();
8031 let mut transaction = response.await.unwrap().0;
8032 assert_eq!(transaction.len(), 2);
8033 assert_eq!(
8034 transaction
8035 .remove_entry(&buffer)
8036 .unwrap()
8037 .0
8038 .read_with(cx, |buffer, _| buffer.text()),
8039 "const THREE: usize = 1;"
8040 );
8041 assert_eq!(
8042 transaction
8043 .into_keys()
8044 .next()
8045 .unwrap()
8046 .read_with(cx, |buffer, _| buffer.text()),
8047 "const TWO: usize = one::THREE + one::THREE;"
8048 );
8049 }
8050
8051 #[gpui::test]
8052 async fn test_search(cx: &mut gpui::TestAppContext) {
8053 let fs = FakeFs::new(cx.background());
8054 fs.insert_tree(
8055 "/dir",
8056 json!({
8057 "one.rs": "const ONE: usize = 1;",
8058 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
8059 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
8060 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
8061 }),
8062 )
8063 .await;
8064 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
8065 assert_eq!(
8066 search(&project, SearchQuery::text("TWO", false, true), cx)
8067 .await
8068 .unwrap(),
8069 HashMap::from_iter([
8070 ("two.rs".to_string(), vec![6..9]),
8071 ("three.rs".to_string(), vec![37..40])
8072 ])
8073 );
8074
8075 let buffer_4 = project
8076 .update(cx, |project, cx| {
8077 project.open_local_buffer("/dir/four.rs", cx)
8078 })
8079 .await
8080 .unwrap();
8081 buffer_4.update(cx, |buffer, cx| {
8082 let text = "two::TWO";
8083 buffer.edit([(20..28, text), (31..43, text)], cx);
8084 });
8085
8086 assert_eq!(
8087 search(&project, SearchQuery::text("TWO", false, true), cx)
8088 .await
8089 .unwrap(),
8090 HashMap::from_iter([
8091 ("two.rs".to_string(), vec![6..9]),
8092 ("three.rs".to_string(), vec![37..40]),
8093 ("four.rs".to_string(), vec![25..28, 36..39])
8094 ])
8095 );
8096
8097 async fn search(
8098 project: &ModelHandle<Project>,
8099 query: SearchQuery,
8100 cx: &mut gpui::TestAppContext,
8101 ) -> Result<HashMap<String, Vec<Range<usize>>>> {
8102 let results = project
8103 .update(cx, |project, cx| project.search(query, cx))
8104 .await?;
8105
8106 Ok(results
8107 .into_iter()
8108 .map(|(buffer, ranges)| {
8109 buffer.read_with(cx, |buffer, _| {
8110 let path = buffer.file().unwrap().path().to_string_lossy().to_string();
8111 let ranges = ranges
8112 .into_iter()
8113 .map(|range| range.to_offset(buffer))
8114 .collect::<Vec<_>>();
8115 (path, ranges)
8116 })
8117 })
8118 .collect())
8119 }
8120 }
8121}