1pub mod fs;
2mod ignore;
3mod lsp_command;
4pub mod search;
5pub mod worktree;
6
7use anyhow::{anyhow, Context, Result};
8use client::{proto, Client, PeerId, TypedEnvelope, User, UserStore};
9use clock::ReplicaId;
10use collections::{hash_map, BTreeMap, HashMap, HashSet};
11use futures::{future::Shared, select_biased, Future, FutureExt, StreamExt, TryFutureExt};
12use fuzzy::{PathMatch, PathMatchCandidate, PathMatchCandidateSet};
13use gpui::{
14 AnyModelHandle, AppContext, AsyncAppContext, Entity, ModelContext, ModelHandle,
15 MutableAppContext, Task, UpgradeModelHandle, WeakModelHandle,
16};
17use language::{
18 point_to_lsp,
19 proto::{deserialize_anchor, deserialize_version, serialize_anchor, serialize_version},
20 range_from_lsp, range_to_lsp, Anchor, Bias, Buffer, CodeAction, CodeLabel, Completion,
21 Diagnostic, DiagnosticEntry, DiagnosticSet, Event as BufferEvent, File as _, Language,
22 LanguageRegistry, LanguageServerName, LocalFile, LspAdapter, OffsetRangeExt, Operation, Patch,
23 PointUtf16, TextBufferSnapshot, ToOffset, ToPointUtf16, Transaction,
24};
25use lsp::{DiagnosticSeverity, DiagnosticTag, DocumentHighlightKind, LanguageServer};
26use lsp_command::*;
27use parking_lot::Mutex;
28use postage::watch;
29use rand::prelude::*;
30use search::SearchQuery;
31use serde::Serialize;
32use settings::Settings;
33use sha2::{Digest, Sha256};
34use similar::{ChangeTag, TextDiff};
35use std::{
36 cell::RefCell,
37 cmp::{self, Ordering},
38 convert::TryInto,
39 ffi::OsString,
40 hash::Hash,
41 mem,
42 ops::Range,
43 os::unix::{ffi::OsStrExt, prelude::OsStringExt},
44 path::{Component, Path, PathBuf},
45 rc::Rc,
46 sync::{
47 atomic::{AtomicBool, AtomicUsize, Ordering::SeqCst},
48 Arc,
49 },
50 time::Instant,
51};
52use thiserror::Error;
53use util::{post_inc, ResultExt, TryFutureExt as _};
54
55pub use fs::*;
56pub use worktree::*;
57
58pub trait Item: Entity {
59 fn entry_id(&self, cx: &AppContext) -> Option<ProjectEntryId>;
60}
61
62#[derive(Default)]
63pub struct ProjectStore {
64 projects: Vec<WeakModelHandle<Project>>,
65}
66
67pub struct Project {
68 worktrees: Vec<WorktreeHandle>,
69 active_entry: Option<ProjectEntryId>,
70 languages: Arc<LanguageRegistry>,
71 language_servers:
72 HashMap<(WorktreeId, LanguageServerName), (Arc<dyn LspAdapter>, Arc<LanguageServer>)>,
73 started_language_servers:
74 HashMap<(WorktreeId, LanguageServerName), Task<Option<Arc<LanguageServer>>>>,
75 language_server_statuses: BTreeMap<usize, LanguageServerStatus>,
76 language_server_settings: Arc<Mutex<serde_json::Value>>,
77 last_workspace_edits_by_language_server: HashMap<usize, ProjectTransaction>,
78 next_language_server_id: usize,
79 client: Arc<client::Client>,
80 next_entry_id: Arc<AtomicUsize>,
81 next_diagnostic_group_id: usize,
82 user_store: ModelHandle<UserStore>,
83 project_store: ModelHandle<ProjectStore>,
84 fs: Arc<dyn Fs>,
85 client_state: ProjectClientState,
86 collaborators: HashMap<PeerId, Collaborator>,
87 subscriptions: Vec<client::Subscription>,
88 opened_buffer: (Rc<RefCell<watch::Sender<()>>>, watch::Receiver<()>),
89 shared_buffers: HashMap<PeerId, HashSet<u64>>,
90 loading_buffers: HashMap<
91 ProjectPath,
92 postage::watch::Receiver<Option<Result<ModelHandle<Buffer>, Arc<anyhow::Error>>>>,
93 >,
94 loading_local_worktrees:
95 HashMap<Arc<Path>, Shared<Task<Result<ModelHandle<Worktree>, Arc<anyhow::Error>>>>>,
96 opened_buffers: HashMap<u64, OpenBuffer>,
97 buffer_snapshots: HashMap<u64, Vec<(i32, TextBufferSnapshot)>>,
98 nonce: u128,
99}
100
101#[derive(Error, Debug)]
102pub enum JoinProjectError {
103 #[error("host declined join request")]
104 HostDeclined,
105 #[error("host closed the project")]
106 HostClosedProject,
107 #[error("host went offline")]
108 HostWentOffline,
109 #[error("{0}")]
110 Other(#[from] anyhow::Error),
111}
112
113enum OpenBuffer {
114 Strong(ModelHandle<Buffer>),
115 Weak(WeakModelHandle<Buffer>),
116 Loading(Vec<Operation>),
117}
118
119enum WorktreeHandle {
120 Strong(ModelHandle<Worktree>),
121 Weak(WeakModelHandle<Worktree>),
122}
123
124enum ProjectClientState {
125 Local {
126 is_shared: bool,
127 remote_id_tx: watch::Sender<Option<u64>>,
128 remote_id_rx: watch::Receiver<Option<u64>>,
129 public_tx: watch::Sender<bool>,
130 public_rx: watch::Receiver<bool>,
131 _maintain_remote_id_task: Task<Option<()>>,
132 },
133 Remote {
134 sharing_has_stopped: bool,
135 remote_id: u64,
136 replica_id: ReplicaId,
137 _detect_unshare_task: Task<Option<()>>,
138 },
139}
140
141#[derive(Clone, Debug)]
142pub struct Collaborator {
143 pub user: Arc<User>,
144 pub peer_id: PeerId,
145 pub replica_id: ReplicaId,
146}
147
148#[derive(Clone, Debug, PartialEq, Eq)]
149pub enum Event {
150 ActiveEntryChanged(Option<ProjectEntryId>),
151 WorktreeAdded,
152 WorktreeRemoved(WorktreeId),
153 DiskBasedDiagnosticsStarted,
154 DiskBasedDiagnosticsUpdated,
155 DiskBasedDiagnosticsFinished,
156 DiagnosticsUpdated(ProjectPath),
157 RemoteIdChanged(Option<u64>),
158 CollaboratorLeft(PeerId),
159 ContactRequestedJoin(Arc<User>),
160 ContactCancelledJoinRequest(Arc<User>),
161}
162
163#[derive(Serialize)]
164pub struct LanguageServerStatus {
165 pub name: String,
166 pub pending_work: BTreeMap<String, LanguageServerProgress>,
167 pub pending_diagnostic_updates: isize,
168}
169
170#[derive(Clone, Debug, Serialize)]
171pub struct LanguageServerProgress {
172 pub message: Option<String>,
173 pub percentage: Option<usize>,
174 #[serde(skip_serializing)]
175 pub last_update_at: Instant,
176}
177
178#[derive(Clone, Debug, Eq, PartialEq, Hash, PartialOrd, Ord)]
179pub struct ProjectPath {
180 pub worktree_id: WorktreeId,
181 pub path: Arc<Path>,
182}
183
184#[derive(Copy, Clone, Debug, Default, PartialEq, Serialize)]
185pub struct DiagnosticSummary {
186 pub error_count: usize,
187 pub warning_count: usize,
188}
189
190#[derive(Debug)]
191pub struct Location {
192 pub buffer: ModelHandle<Buffer>,
193 pub range: Range<language::Anchor>,
194}
195
196#[derive(Debug)]
197pub struct DocumentHighlight {
198 pub range: Range<language::Anchor>,
199 pub kind: DocumentHighlightKind,
200}
201
202#[derive(Clone, Debug)]
203pub struct Symbol {
204 pub source_worktree_id: WorktreeId,
205 pub worktree_id: WorktreeId,
206 pub language_server_name: LanguageServerName,
207 pub path: PathBuf,
208 pub label: CodeLabel,
209 pub name: String,
210 pub kind: lsp::SymbolKind,
211 pub range: Range<PointUtf16>,
212 pub signature: [u8; 32],
213}
214
215#[derive(Default)]
216pub struct ProjectTransaction(pub HashMap<ModelHandle<Buffer>, language::Transaction>);
217
218impl DiagnosticSummary {
219 fn new<'a, T: 'a>(diagnostics: impl IntoIterator<Item = &'a DiagnosticEntry<T>>) -> Self {
220 let mut this = Self {
221 error_count: 0,
222 warning_count: 0,
223 };
224
225 for entry in diagnostics {
226 if entry.diagnostic.is_primary {
227 match entry.diagnostic.severity {
228 DiagnosticSeverity::ERROR => this.error_count += 1,
229 DiagnosticSeverity::WARNING => this.warning_count += 1,
230 _ => {}
231 }
232 }
233 }
234
235 this
236 }
237
238 pub fn is_empty(&self) -> bool {
239 self.error_count == 0 && self.warning_count == 0
240 }
241
242 pub fn to_proto(&self, path: &Path) -> proto::DiagnosticSummary {
243 proto::DiagnosticSummary {
244 path: path.to_string_lossy().to_string(),
245 error_count: self.error_count as u32,
246 warning_count: self.warning_count as u32,
247 }
248 }
249}
250
251#[derive(Clone, Copy, Debug, Default, Hash, PartialEq, Eq, PartialOrd, Ord)]
252pub struct ProjectEntryId(usize);
253
254impl ProjectEntryId {
255 pub const MAX: Self = Self(usize::MAX);
256
257 pub fn new(counter: &AtomicUsize) -> Self {
258 Self(counter.fetch_add(1, SeqCst))
259 }
260
261 pub fn from_proto(id: u64) -> Self {
262 Self(id as usize)
263 }
264
265 pub fn to_proto(&self) -> u64 {
266 self.0 as u64
267 }
268
269 pub fn to_usize(&self) -> usize {
270 self.0
271 }
272}
273
274impl Project {
275 pub fn init(client: &Arc<Client>) {
276 client.add_model_message_handler(Self::handle_request_join_project);
277 client.add_model_message_handler(Self::handle_add_collaborator);
278 client.add_model_message_handler(Self::handle_buffer_reloaded);
279 client.add_model_message_handler(Self::handle_buffer_saved);
280 client.add_model_message_handler(Self::handle_start_language_server);
281 client.add_model_message_handler(Self::handle_update_language_server);
282 client.add_model_message_handler(Self::handle_remove_collaborator);
283 client.add_model_message_handler(Self::handle_join_project_request_cancelled);
284 client.add_model_message_handler(Self::handle_register_worktree);
285 client.add_model_message_handler(Self::handle_unregister_worktree);
286 client.add_model_message_handler(Self::handle_unregister_project);
287 client.add_model_message_handler(Self::handle_project_unshared);
288 client.add_model_message_handler(Self::handle_update_buffer_file);
289 client.add_model_message_handler(Self::handle_update_buffer);
290 client.add_model_message_handler(Self::handle_update_diagnostic_summary);
291 client.add_model_message_handler(Self::handle_update_worktree);
292 client.add_model_request_handler(Self::handle_create_project_entry);
293 client.add_model_request_handler(Self::handle_rename_project_entry);
294 client.add_model_request_handler(Self::handle_copy_project_entry);
295 client.add_model_request_handler(Self::handle_delete_project_entry);
296 client.add_model_request_handler(Self::handle_apply_additional_edits_for_completion);
297 client.add_model_request_handler(Self::handle_apply_code_action);
298 client.add_model_request_handler(Self::handle_reload_buffers);
299 client.add_model_request_handler(Self::handle_format_buffers);
300 client.add_model_request_handler(Self::handle_get_code_actions);
301 client.add_model_request_handler(Self::handle_get_completions);
302 client.add_model_request_handler(Self::handle_lsp_command::<GetDefinition>);
303 client.add_model_request_handler(Self::handle_lsp_command::<GetDocumentHighlights>);
304 client.add_model_request_handler(Self::handle_lsp_command::<GetReferences>);
305 client.add_model_request_handler(Self::handle_lsp_command::<PrepareRename>);
306 client.add_model_request_handler(Self::handle_lsp_command::<PerformRename>);
307 client.add_model_request_handler(Self::handle_search_project);
308 client.add_model_request_handler(Self::handle_get_project_symbols);
309 client.add_model_request_handler(Self::handle_open_buffer_for_symbol);
310 client.add_model_request_handler(Self::handle_open_buffer_by_id);
311 client.add_model_request_handler(Self::handle_open_buffer_by_path);
312 client.add_model_request_handler(Self::handle_save_buffer);
313 }
314
315 pub fn local(
316 public: bool,
317 client: Arc<Client>,
318 user_store: ModelHandle<UserStore>,
319 project_store: ModelHandle<ProjectStore>,
320 languages: Arc<LanguageRegistry>,
321 fs: Arc<dyn Fs>,
322 cx: &mut MutableAppContext,
323 ) -> ModelHandle<Self> {
324 cx.add_model(|cx: &mut ModelContext<Self>| {
325 let (public_tx, public_rx) = watch::channel_with(public);
326 let (remote_id_tx, remote_id_rx) = watch::channel();
327 let _maintain_remote_id_task = cx.spawn_weak({
328 let mut status_rx = client.clone().status();
329 let mut public_rx = public_rx.clone();
330 move |this, mut cx| async move {
331 loop {
332 select_biased! {
333 value = status_rx.next().fuse() => { value?; }
334 value = public_rx.next().fuse() => { value?; }
335 };
336 let this = this.upgrade(&cx)?;
337 if status_rx.borrow().is_connected() && *public_rx.borrow() {
338 this.update(&mut cx, |this, cx| this.register(cx))
339 .await
340 .log_err()?;
341 } else {
342 this.update(&mut cx, |this, cx| this.unregister(cx));
343 }
344 }
345 }
346 });
347
348 let handle = cx.weak_handle();
349 project_store.update(cx, |store, cx| store.add(handle, cx));
350
351 let (opened_buffer_tx, opened_buffer_rx) = watch::channel();
352 Self {
353 worktrees: Default::default(),
354 collaborators: Default::default(),
355 opened_buffers: Default::default(),
356 shared_buffers: Default::default(),
357 loading_buffers: Default::default(),
358 loading_local_worktrees: Default::default(),
359 buffer_snapshots: Default::default(),
360 client_state: ProjectClientState::Local {
361 is_shared: false,
362 remote_id_tx,
363 remote_id_rx,
364 public_tx,
365 public_rx,
366 _maintain_remote_id_task,
367 },
368 opened_buffer: (Rc::new(RefCell::new(opened_buffer_tx)), opened_buffer_rx),
369 subscriptions: Vec::new(),
370 active_entry: None,
371 languages,
372 client,
373 user_store,
374 project_store,
375 fs,
376 next_entry_id: Default::default(),
377 next_diagnostic_group_id: Default::default(),
378 language_servers: Default::default(),
379 started_language_servers: Default::default(),
380 language_server_statuses: Default::default(),
381 last_workspace_edits_by_language_server: Default::default(),
382 language_server_settings: Default::default(),
383 next_language_server_id: 0,
384 nonce: StdRng::from_entropy().gen(),
385 }
386 })
387 }
388
389 pub async fn remote(
390 remote_id: u64,
391 client: Arc<Client>,
392 user_store: ModelHandle<UserStore>,
393 project_store: ModelHandle<ProjectStore>,
394 languages: Arc<LanguageRegistry>,
395 fs: Arc<dyn Fs>,
396 mut cx: AsyncAppContext,
397 ) -> Result<ModelHandle<Self>, JoinProjectError> {
398 client.authenticate_and_connect(true, &cx).await?;
399
400 let response = client
401 .request(proto::JoinProject {
402 project_id: remote_id,
403 })
404 .await?;
405
406 let response = match response.variant.ok_or_else(|| anyhow!("missing variant"))? {
407 proto::join_project_response::Variant::Accept(response) => response,
408 proto::join_project_response::Variant::Decline(decline) => {
409 match proto::join_project_response::decline::Reason::from_i32(decline.reason) {
410 Some(proto::join_project_response::decline::Reason::Declined) => {
411 Err(JoinProjectError::HostDeclined)?
412 }
413 Some(proto::join_project_response::decline::Reason::Closed) => {
414 Err(JoinProjectError::HostClosedProject)?
415 }
416 Some(proto::join_project_response::decline::Reason::WentOffline) => {
417 Err(JoinProjectError::HostWentOffline)?
418 }
419 None => Err(anyhow!("missing decline reason"))?,
420 }
421 }
422 };
423
424 let replica_id = response.replica_id as ReplicaId;
425
426 let mut worktrees = Vec::new();
427 for worktree in response.worktrees {
428 let (worktree, load_task) = cx
429 .update(|cx| Worktree::remote(remote_id, replica_id, worktree, client.clone(), cx));
430 worktrees.push(worktree);
431 load_task.detach();
432 }
433
434 let (opened_buffer_tx, opened_buffer_rx) = watch::channel();
435 let this = cx.add_model(|cx: &mut ModelContext<Self>| {
436 let handle = cx.weak_handle();
437 project_store.update(cx, |store, cx| store.add(handle, cx));
438
439 let mut this = Self {
440 worktrees: Vec::new(),
441 loading_buffers: Default::default(),
442 opened_buffer: (Rc::new(RefCell::new(opened_buffer_tx)), opened_buffer_rx),
443 shared_buffers: Default::default(),
444 loading_local_worktrees: Default::default(),
445 active_entry: None,
446 collaborators: Default::default(),
447 languages,
448 user_store: user_store.clone(),
449 project_store,
450 fs,
451 next_entry_id: Default::default(),
452 next_diagnostic_group_id: Default::default(),
453 subscriptions: vec![client.add_model_for_remote_entity(remote_id, cx)],
454 client: client.clone(),
455 client_state: ProjectClientState::Remote {
456 sharing_has_stopped: false,
457 remote_id,
458 replica_id,
459 _detect_unshare_task: cx.spawn_weak(move |this, mut cx| {
460 async move {
461 let mut status = client.status();
462 let is_connected =
463 status.next().await.map_or(false, |s| s.is_connected());
464 // Even if we're initially connected, any future change of the status means we momentarily disconnected.
465 if !is_connected || status.next().await.is_some() {
466 if let Some(this) = this.upgrade(&cx) {
467 this.update(&mut cx, |this, cx| this.removed_from_project(cx))
468 }
469 }
470 Ok(())
471 }
472 .log_err()
473 }),
474 },
475 language_servers: Default::default(),
476 started_language_servers: Default::default(),
477 language_server_settings: Default::default(),
478 language_server_statuses: response
479 .language_servers
480 .into_iter()
481 .map(|server| {
482 (
483 server.id as usize,
484 LanguageServerStatus {
485 name: server.name,
486 pending_work: Default::default(),
487 pending_diagnostic_updates: 0,
488 },
489 )
490 })
491 .collect(),
492 last_workspace_edits_by_language_server: Default::default(),
493 next_language_server_id: 0,
494 opened_buffers: Default::default(),
495 buffer_snapshots: Default::default(),
496 nonce: StdRng::from_entropy().gen(),
497 };
498 for worktree in worktrees {
499 this.add_worktree(&worktree, cx);
500 }
501 this
502 });
503
504 let user_ids = response
505 .collaborators
506 .iter()
507 .map(|peer| peer.user_id)
508 .collect();
509 user_store
510 .update(&mut cx, |user_store, cx| user_store.get_users(user_ids, cx))
511 .await?;
512 let mut collaborators = HashMap::default();
513 for message in response.collaborators {
514 let collaborator = Collaborator::from_proto(message, &user_store, &mut cx).await?;
515 collaborators.insert(collaborator.peer_id, collaborator);
516 }
517
518 this.update(&mut cx, |this, _| {
519 this.collaborators = collaborators;
520 });
521
522 Ok(this)
523 }
524
525 #[cfg(any(test, feature = "test-support"))]
526 pub async fn test(
527 fs: Arc<dyn Fs>,
528 root_paths: impl IntoIterator<Item = &Path>,
529 cx: &mut gpui::TestAppContext,
530 ) -> ModelHandle<Project> {
531 let languages = Arc::new(LanguageRegistry::test());
532 let http_client = client::test::FakeHttpClient::with_404_response();
533 let client = client::Client::new(http_client.clone());
534 let user_store = cx.add_model(|cx| UserStore::new(client.clone(), http_client, cx));
535 let project_store = cx.add_model(|_| ProjectStore::default());
536 let project = cx.update(|cx| {
537 Project::local(true, client, user_store, project_store, languages, fs, cx)
538 });
539 for path in root_paths {
540 let (tree, _) = project
541 .update(cx, |project, cx| {
542 project.find_or_create_local_worktree(path, true, cx)
543 })
544 .await
545 .unwrap();
546 tree.read_with(cx, |tree, _| tree.as_local().unwrap().scan_complete())
547 .await;
548 }
549 project
550 }
551
552 pub fn buffer_for_id(&self, remote_id: u64, cx: &AppContext) -> Option<ModelHandle<Buffer>> {
553 self.opened_buffers
554 .get(&remote_id)
555 .and_then(|buffer| buffer.upgrade(cx))
556 }
557
558 pub fn languages(&self) -> &Arc<LanguageRegistry> {
559 &self.languages
560 }
561
562 pub fn client(&self) -> Arc<Client> {
563 self.client.clone()
564 }
565
566 pub fn user_store(&self) -> ModelHandle<UserStore> {
567 self.user_store.clone()
568 }
569
570 #[cfg(any(test, feature = "test-support"))]
571 pub fn check_invariants(&self, cx: &AppContext) {
572 if self.is_local() {
573 let mut worktree_root_paths = HashMap::default();
574 for worktree in self.worktrees(cx) {
575 let worktree = worktree.read(cx);
576 let abs_path = worktree.as_local().unwrap().abs_path().clone();
577 let prev_worktree_id = worktree_root_paths.insert(abs_path.clone(), worktree.id());
578 assert_eq!(
579 prev_worktree_id,
580 None,
581 "abs path {:?} for worktree {:?} is not unique ({:?} was already registered with the same path)",
582 abs_path,
583 worktree.id(),
584 prev_worktree_id
585 )
586 }
587 } else {
588 let replica_id = self.replica_id();
589 for buffer in self.opened_buffers.values() {
590 if let Some(buffer) = buffer.upgrade(cx) {
591 let buffer = buffer.read(cx);
592 assert_eq!(
593 buffer.deferred_ops_len(),
594 0,
595 "replica {}, buffer {} has deferred operations",
596 replica_id,
597 buffer.remote_id()
598 );
599 }
600 }
601 }
602 }
603
604 #[cfg(any(test, feature = "test-support"))]
605 pub fn has_open_buffer(&self, path: impl Into<ProjectPath>, cx: &AppContext) -> bool {
606 let path = path.into();
607 if let Some(worktree) = self.worktree_for_id(path.worktree_id, cx) {
608 self.opened_buffers.iter().any(|(_, buffer)| {
609 if let Some(buffer) = buffer.upgrade(cx) {
610 if let Some(file) = File::from_dyn(buffer.read(cx).file()) {
611 if file.worktree == worktree && file.path() == &path.path {
612 return true;
613 }
614 }
615 }
616 false
617 })
618 } else {
619 false
620 }
621 }
622
623 pub fn fs(&self) -> &Arc<dyn Fs> {
624 &self.fs
625 }
626
627 pub fn set_public(&mut self, is_public: bool) {
628 if let ProjectClientState::Local { public_tx, .. } = &mut self.client_state {
629 *public_tx.borrow_mut() = is_public;
630 }
631 }
632
633 pub fn is_public(&self) -> bool {
634 match &self.client_state {
635 ProjectClientState::Local { public_rx, .. } => *public_rx.borrow(),
636 ProjectClientState::Remote { .. } => true,
637 }
638 }
639
640 fn unregister(&mut self, cx: &mut ModelContext<Self>) {
641 self.unshared(cx);
642 for worktree in &self.worktrees {
643 if let Some(worktree) = worktree.upgrade(cx) {
644 worktree.update(cx, |worktree, _| {
645 worktree.as_local_mut().unwrap().unregister();
646 });
647 }
648 }
649
650 if let ProjectClientState::Local { remote_id_tx, .. } = &mut self.client_state {
651 *remote_id_tx.borrow_mut() = None;
652 }
653
654 self.subscriptions.clear();
655 }
656
657 fn register(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
658 if let ProjectClientState::Local { remote_id_rx, .. } = &self.client_state {
659 if remote_id_rx.borrow().is_some() {
660 return Task::ready(Ok(()));
661 }
662 }
663
664 let response = self.client.request(proto::RegisterProject {});
665 cx.spawn(|this, mut cx| async move {
666 let remote_id = response.await?.project_id;
667
668 let mut registrations = Vec::new();
669 this.update(&mut cx, |this, cx| {
670 if let ProjectClientState::Local { remote_id_tx, .. } = &mut this.client_state {
671 *remote_id_tx.borrow_mut() = Some(remote_id);
672 }
673
674 cx.emit(Event::RemoteIdChanged(Some(remote_id)));
675
676 this.subscriptions
677 .push(this.client.add_model_for_remote_entity(remote_id, cx));
678
679 for worktree in &this.worktrees {
680 if let Some(worktree) = worktree.upgrade(cx) {
681 registrations.push(worktree.update(cx, |worktree, cx| {
682 let worktree = worktree.as_local_mut().unwrap();
683 worktree.register(remote_id, cx)
684 }));
685 }
686 }
687 });
688
689 futures::future::try_join_all(registrations).await?;
690 Ok(())
691 })
692 }
693
694 pub fn remote_id(&self) -> Option<u64> {
695 match &self.client_state {
696 ProjectClientState::Local { remote_id_rx, .. } => *remote_id_rx.borrow(),
697 ProjectClientState::Remote { remote_id, .. } => Some(*remote_id),
698 }
699 }
700
701 pub fn next_remote_id(&self) -> impl Future<Output = u64> {
702 let mut id = None;
703 let mut watch = None;
704 match &self.client_state {
705 ProjectClientState::Local { remote_id_rx, .. } => watch = Some(remote_id_rx.clone()),
706 ProjectClientState::Remote { remote_id, .. } => id = Some(*remote_id),
707 }
708
709 async move {
710 if let Some(id) = id {
711 return id;
712 }
713 let mut watch = watch.unwrap();
714 loop {
715 let id = *watch.borrow();
716 if let Some(id) = id {
717 return id;
718 }
719 watch.next().await;
720 }
721 }
722 }
723
724 pub fn shared_remote_id(&self) -> Option<u64> {
725 match &self.client_state {
726 ProjectClientState::Local {
727 remote_id_rx,
728 is_shared,
729 ..
730 } => {
731 if *is_shared {
732 *remote_id_rx.borrow()
733 } else {
734 None
735 }
736 }
737 ProjectClientState::Remote { remote_id, .. } => Some(*remote_id),
738 }
739 }
740
741 pub fn replica_id(&self) -> ReplicaId {
742 match &self.client_state {
743 ProjectClientState::Local { .. } => 0,
744 ProjectClientState::Remote { replica_id, .. } => *replica_id,
745 }
746 }
747
748 pub fn collaborators(&self) -> &HashMap<PeerId, Collaborator> {
749 &self.collaborators
750 }
751
752 pub fn worktrees<'a>(
753 &'a self,
754 cx: &'a AppContext,
755 ) -> impl 'a + Iterator<Item = ModelHandle<Worktree>> {
756 self.worktrees
757 .iter()
758 .filter_map(move |worktree| worktree.upgrade(cx))
759 }
760
761 pub fn visible_worktrees<'a>(
762 &'a self,
763 cx: &'a AppContext,
764 ) -> impl 'a + Iterator<Item = ModelHandle<Worktree>> {
765 self.worktrees.iter().filter_map(|worktree| {
766 worktree.upgrade(cx).and_then(|worktree| {
767 if worktree.read(cx).is_visible() {
768 Some(worktree)
769 } else {
770 None
771 }
772 })
773 })
774 }
775
776 pub fn worktree_root_names<'a>(&'a self, cx: &'a AppContext) -> impl Iterator<Item = &'a str> {
777 self.visible_worktrees(cx)
778 .map(|tree| tree.read(cx).root_name())
779 }
780
781 pub fn worktree_for_id(
782 &self,
783 id: WorktreeId,
784 cx: &AppContext,
785 ) -> Option<ModelHandle<Worktree>> {
786 self.worktrees(cx)
787 .find(|worktree| worktree.read(cx).id() == id)
788 }
789
790 pub fn worktree_for_entry(
791 &self,
792 entry_id: ProjectEntryId,
793 cx: &AppContext,
794 ) -> Option<ModelHandle<Worktree>> {
795 self.worktrees(cx)
796 .find(|worktree| worktree.read(cx).contains_entry(entry_id))
797 }
798
799 pub fn worktree_id_for_entry(
800 &self,
801 entry_id: ProjectEntryId,
802 cx: &AppContext,
803 ) -> Option<WorktreeId> {
804 self.worktree_for_entry(entry_id, cx)
805 .map(|worktree| worktree.read(cx).id())
806 }
807
808 pub fn contains_paths(&self, paths: &[PathBuf], cx: &AppContext) -> bool {
809 paths.iter().all(|path| self.contains_path(&path, cx))
810 }
811
812 pub fn contains_path(&self, path: &Path, cx: &AppContext) -> bool {
813 for worktree in self.worktrees(cx) {
814 let worktree = worktree.read(cx).as_local();
815 if worktree.map_or(false, |w| w.contains_abs_path(path)) {
816 return true;
817 }
818 }
819 false
820 }
821
822 pub fn create_entry(
823 &mut self,
824 project_path: impl Into<ProjectPath>,
825 is_directory: bool,
826 cx: &mut ModelContext<Self>,
827 ) -> Option<Task<Result<Entry>>> {
828 let project_path = project_path.into();
829 let worktree = self.worktree_for_id(project_path.worktree_id, cx)?;
830 if self.is_local() {
831 Some(worktree.update(cx, |worktree, cx| {
832 worktree
833 .as_local_mut()
834 .unwrap()
835 .create_entry(project_path.path, is_directory, cx)
836 }))
837 } else {
838 let client = self.client.clone();
839 let project_id = self.remote_id().unwrap();
840 Some(cx.spawn_weak(|_, mut cx| async move {
841 let response = client
842 .request(proto::CreateProjectEntry {
843 worktree_id: project_path.worktree_id.to_proto(),
844 project_id,
845 path: project_path.path.as_os_str().as_bytes().to_vec(),
846 is_directory,
847 })
848 .await?;
849 let entry = response
850 .entry
851 .ok_or_else(|| anyhow!("missing entry in response"))?;
852 worktree
853 .update(&mut cx, |worktree, cx| {
854 worktree.as_remote().unwrap().insert_entry(
855 entry,
856 response.worktree_scan_id as usize,
857 cx,
858 )
859 })
860 .await
861 }))
862 }
863 }
864
865 pub fn copy_entry(
866 &mut self,
867 entry_id: ProjectEntryId,
868 new_path: impl Into<Arc<Path>>,
869 cx: &mut ModelContext<Self>,
870 ) -> Option<Task<Result<Entry>>> {
871 let worktree = self.worktree_for_entry(entry_id, cx)?;
872 let new_path = new_path.into();
873 if self.is_local() {
874 worktree.update(cx, |worktree, cx| {
875 worktree
876 .as_local_mut()
877 .unwrap()
878 .copy_entry(entry_id, new_path, cx)
879 })
880 } else {
881 let client = self.client.clone();
882 let project_id = self.remote_id().unwrap();
883
884 Some(cx.spawn_weak(|_, mut cx| async move {
885 let response = client
886 .request(proto::CopyProjectEntry {
887 project_id,
888 entry_id: entry_id.to_proto(),
889 new_path: new_path.as_os_str().as_bytes().to_vec(),
890 })
891 .await?;
892 let entry = response
893 .entry
894 .ok_or_else(|| anyhow!("missing entry in response"))?;
895 worktree
896 .update(&mut cx, |worktree, cx| {
897 worktree.as_remote().unwrap().insert_entry(
898 entry,
899 response.worktree_scan_id as usize,
900 cx,
901 )
902 })
903 .await
904 }))
905 }
906 }
907
908 pub fn rename_entry(
909 &mut self,
910 entry_id: ProjectEntryId,
911 new_path: impl Into<Arc<Path>>,
912 cx: &mut ModelContext<Self>,
913 ) -> Option<Task<Result<Entry>>> {
914 let worktree = self.worktree_for_entry(entry_id, cx)?;
915 let new_path = new_path.into();
916 if self.is_local() {
917 worktree.update(cx, |worktree, cx| {
918 worktree
919 .as_local_mut()
920 .unwrap()
921 .rename_entry(entry_id, new_path, cx)
922 })
923 } else {
924 let client = self.client.clone();
925 let project_id = self.remote_id().unwrap();
926
927 Some(cx.spawn_weak(|_, mut cx| async move {
928 let response = client
929 .request(proto::RenameProjectEntry {
930 project_id,
931 entry_id: entry_id.to_proto(),
932 new_path: new_path.as_os_str().as_bytes().to_vec(),
933 })
934 .await?;
935 let entry = response
936 .entry
937 .ok_or_else(|| anyhow!("missing entry in response"))?;
938 worktree
939 .update(&mut cx, |worktree, cx| {
940 worktree.as_remote().unwrap().insert_entry(
941 entry,
942 response.worktree_scan_id as usize,
943 cx,
944 )
945 })
946 .await
947 }))
948 }
949 }
950
951 pub fn delete_entry(
952 &mut self,
953 entry_id: ProjectEntryId,
954 cx: &mut ModelContext<Self>,
955 ) -> Option<Task<Result<()>>> {
956 let worktree = self.worktree_for_entry(entry_id, cx)?;
957 if self.is_local() {
958 worktree.update(cx, |worktree, cx| {
959 worktree.as_local_mut().unwrap().delete_entry(entry_id, cx)
960 })
961 } else {
962 let client = self.client.clone();
963 let project_id = self.remote_id().unwrap();
964 Some(cx.spawn_weak(|_, mut cx| async move {
965 let response = client
966 .request(proto::DeleteProjectEntry {
967 project_id,
968 entry_id: entry_id.to_proto(),
969 })
970 .await?;
971 worktree
972 .update(&mut cx, move |worktree, cx| {
973 worktree.as_remote().unwrap().delete_entry(
974 entry_id,
975 response.worktree_scan_id as usize,
976 cx,
977 )
978 })
979 .await
980 }))
981 }
982 }
983
984 fn share(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
985 let project_id;
986 if let ProjectClientState::Local {
987 remote_id_rx,
988 is_shared,
989 ..
990 } = &mut self.client_state
991 {
992 if *is_shared {
993 return Task::ready(Ok(()));
994 }
995 *is_shared = true;
996 if let Some(id) = *remote_id_rx.borrow() {
997 project_id = id;
998 } else {
999 return Task::ready(Err(anyhow!("project hasn't been registered")));
1000 }
1001 } else {
1002 return Task::ready(Err(anyhow!("can't share a remote project")));
1003 };
1004
1005 for open_buffer in self.opened_buffers.values_mut() {
1006 match open_buffer {
1007 OpenBuffer::Strong(_) => {}
1008 OpenBuffer::Weak(buffer) => {
1009 if let Some(buffer) = buffer.upgrade(cx) {
1010 *open_buffer = OpenBuffer::Strong(buffer);
1011 }
1012 }
1013 OpenBuffer::Loading(_) => unreachable!(),
1014 }
1015 }
1016
1017 for worktree_handle in self.worktrees.iter_mut() {
1018 match worktree_handle {
1019 WorktreeHandle::Strong(_) => {}
1020 WorktreeHandle::Weak(worktree) => {
1021 if let Some(worktree) = worktree.upgrade(cx) {
1022 *worktree_handle = WorktreeHandle::Strong(worktree);
1023 }
1024 }
1025 }
1026 }
1027
1028 let mut tasks = Vec::new();
1029 for worktree in self.worktrees(cx).collect::<Vec<_>>() {
1030 worktree.update(cx, |worktree, cx| {
1031 let worktree = worktree.as_local_mut().unwrap();
1032 tasks.push(worktree.share(project_id, cx));
1033 });
1034 }
1035
1036 cx.spawn(|this, mut cx| async move {
1037 for task in tasks {
1038 task.await?;
1039 }
1040 this.update(&mut cx, |_, cx| cx.notify());
1041 Ok(())
1042 })
1043 }
1044
1045 fn unshared(&mut self, cx: &mut ModelContext<Self>) {
1046 if let ProjectClientState::Local { is_shared, .. } = &mut self.client_state {
1047 if !*is_shared {
1048 return;
1049 }
1050
1051 *is_shared = false;
1052 self.collaborators.clear();
1053 self.shared_buffers.clear();
1054 for worktree_handle in self.worktrees.iter_mut() {
1055 if let WorktreeHandle::Strong(worktree) = worktree_handle {
1056 let is_visible = worktree.update(cx, |worktree, _| {
1057 worktree.as_local_mut().unwrap().unshare();
1058 worktree.is_visible()
1059 });
1060 if !is_visible {
1061 *worktree_handle = WorktreeHandle::Weak(worktree.downgrade());
1062 }
1063 }
1064 }
1065
1066 for open_buffer in self.opened_buffers.values_mut() {
1067 match open_buffer {
1068 OpenBuffer::Strong(buffer) => {
1069 *open_buffer = OpenBuffer::Weak(buffer.downgrade());
1070 }
1071 _ => {}
1072 }
1073 }
1074
1075 cx.notify();
1076 } else {
1077 log::error!("attempted to unshare a remote project");
1078 }
1079 }
1080
1081 pub fn respond_to_join_request(
1082 &mut self,
1083 requester_id: u64,
1084 allow: bool,
1085 cx: &mut ModelContext<Self>,
1086 ) {
1087 if let Some(project_id) = self.remote_id() {
1088 let share = self.share(cx);
1089 let client = self.client.clone();
1090 cx.foreground()
1091 .spawn(async move {
1092 share.await?;
1093 client.send(proto::RespondToJoinProjectRequest {
1094 requester_id,
1095 project_id,
1096 allow,
1097 })
1098 })
1099 .detach_and_log_err(cx);
1100 }
1101 }
1102
1103 fn removed_from_project(&mut self, cx: &mut ModelContext<Self>) {
1104 if let ProjectClientState::Remote {
1105 sharing_has_stopped,
1106 ..
1107 } = &mut self.client_state
1108 {
1109 *sharing_has_stopped = true;
1110 self.collaborators.clear();
1111 cx.notify();
1112 }
1113 }
1114
1115 pub fn is_read_only(&self) -> bool {
1116 match &self.client_state {
1117 ProjectClientState::Local { .. } => false,
1118 ProjectClientState::Remote {
1119 sharing_has_stopped,
1120 ..
1121 } => *sharing_has_stopped,
1122 }
1123 }
1124
1125 pub fn is_local(&self) -> bool {
1126 match &self.client_state {
1127 ProjectClientState::Local { .. } => true,
1128 ProjectClientState::Remote { .. } => false,
1129 }
1130 }
1131
1132 pub fn is_remote(&self) -> bool {
1133 !self.is_local()
1134 }
1135
1136 pub fn create_buffer(
1137 &mut self,
1138 text: &str,
1139 language: Option<Arc<Language>>,
1140 cx: &mut ModelContext<Self>,
1141 ) -> Result<ModelHandle<Buffer>> {
1142 if self.is_remote() {
1143 return Err(anyhow!("creating buffers as a guest is not supported yet"));
1144 }
1145
1146 let buffer = cx.add_model(|cx| {
1147 Buffer::new(self.replica_id(), text, cx)
1148 .with_language(language.unwrap_or(language::PLAIN_TEXT.clone()), cx)
1149 });
1150 self.register_buffer(&buffer, cx)?;
1151 Ok(buffer)
1152 }
1153
1154 pub fn open_path(
1155 &mut self,
1156 path: impl Into<ProjectPath>,
1157 cx: &mut ModelContext<Self>,
1158 ) -> Task<Result<(ProjectEntryId, AnyModelHandle)>> {
1159 let task = self.open_buffer(path, cx);
1160 cx.spawn_weak(|_, cx| async move {
1161 let buffer = task.await?;
1162 let project_entry_id = buffer
1163 .read_with(&cx, |buffer, cx| {
1164 File::from_dyn(buffer.file()).and_then(|file| file.project_entry_id(cx))
1165 })
1166 .ok_or_else(|| anyhow!("no project entry"))?;
1167 Ok((project_entry_id, buffer.into()))
1168 })
1169 }
1170
1171 pub fn open_local_buffer(
1172 &mut self,
1173 abs_path: impl AsRef<Path>,
1174 cx: &mut ModelContext<Self>,
1175 ) -> Task<Result<ModelHandle<Buffer>>> {
1176 if let Some((worktree, relative_path)) = self.find_local_worktree(abs_path.as_ref(), cx) {
1177 self.open_buffer((worktree.read(cx).id(), relative_path), cx)
1178 } else {
1179 Task::ready(Err(anyhow!("no such path")))
1180 }
1181 }
1182
1183 pub fn open_buffer(
1184 &mut self,
1185 path: impl Into<ProjectPath>,
1186 cx: &mut ModelContext<Self>,
1187 ) -> Task<Result<ModelHandle<Buffer>>> {
1188 let project_path = path.into();
1189 let worktree = if let Some(worktree) = self.worktree_for_id(project_path.worktree_id, cx) {
1190 worktree
1191 } else {
1192 return Task::ready(Err(anyhow!("no such worktree")));
1193 };
1194
1195 // If there is already a buffer for the given path, then return it.
1196 let existing_buffer = self.get_open_buffer(&project_path, cx);
1197 if let Some(existing_buffer) = existing_buffer {
1198 return Task::ready(Ok(existing_buffer));
1199 }
1200
1201 let mut loading_watch = match self.loading_buffers.entry(project_path.clone()) {
1202 // If the given path is already being loaded, then wait for that existing
1203 // task to complete and return the same buffer.
1204 hash_map::Entry::Occupied(e) => e.get().clone(),
1205
1206 // Otherwise, record the fact that this path is now being loaded.
1207 hash_map::Entry::Vacant(entry) => {
1208 let (mut tx, rx) = postage::watch::channel();
1209 entry.insert(rx.clone());
1210
1211 let load_buffer = if worktree.read(cx).is_local() {
1212 self.open_local_buffer_internal(&project_path.path, &worktree, cx)
1213 } else {
1214 self.open_remote_buffer_internal(&project_path.path, &worktree, cx)
1215 };
1216
1217 cx.spawn(move |this, mut cx| async move {
1218 let load_result = load_buffer.await;
1219 *tx.borrow_mut() = Some(this.update(&mut cx, |this, _| {
1220 // Record the fact that the buffer is no longer loading.
1221 this.loading_buffers.remove(&project_path);
1222 let buffer = load_result.map_err(Arc::new)?;
1223 Ok(buffer)
1224 }));
1225 })
1226 .detach();
1227 rx
1228 }
1229 };
1230
1231 cx.foreground().spawn(async move {
1232 loop {
1233 if let Some(result) = loading_watch.borrow().as_ref() {
1234 match result {
1235 Ok(buffer) => return Ok(buffer.clone()),
1236 Err(error) => return Err(anyhow!("{}", error)),
1237 }
1238 }
1239 loading_watch.next().await;
1240 }
1241 })
1242 }
1243
1244 fn open_local_buffer_internal(
1245 &mut self,
1246 path: &Arc<Path>,
1247 worktree: &ModelHandle<Worktree>,
1248 cx: &mut ModelContext<Self>,
1249 ) -> Task<Result<ModelHandle<Buffer>>> {
1250 let load_buffer = worktree.update(cx, |worktree, cx| {
1251 let worktree = worktree.as_local_mut().unwrap();
1252 worktree.load_buffer(path, cx)
1253 });
1254 cx.spawn(|this, mut cx| async move {
1255 let buffer = load_buffer.await?;
1256 this.update(&mut cx, |this, cx| this.register_buffer(&buffer, cx))?;
1257 Ok(buffer)
1258 })
1259 }
1260
1261 fn open_remote_buffer_internal(
1262 &mut self,
1263 path: &Arc<Path>,
1264 worktree: &ModelHandle<Worktree>,
1265 cx: &mut ModelContext<Self>,
1266 ) -> Task<Result<ModelHandle<Buffer>>> {
1267 let rpc = self.client.clone();
1268 let project_id = self.remote_id().unwrap();
1269 let remote_worktree_id = worktree.read(cx).id();
1270 let path = path.clone();
1271 let path_string = path.to_string_lossy().to_string();
1272 cx.spawn(|this, mut cx| async move {
1273 let response = rpc
1274 .request(proto::OpenBufferByPath {
1275 project_id,
1276 worktree_id: remote_worktree_id.to_proto(),
1277 path: path_string,
1278 })
1279 .await?;
1280 let buffer = response.buffer.ok_or_else(|| anyhow!("missing buffer"))?;
1281 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
1282 .await
1283 })
1284 }
1285
1286 fn open_local_buffer_via_lsp(
1287 &mut self,
1288 abs_path: lsp::Url,
1289 lsp_adapter: Arc<dyn LspAdapter>,
1290 lsp_server: Arc<LanguageServer>,
1291 cx: &mut ModelContext<Self>,
1292 ) -> Task<Result<ModelHandle<Buffer>>> {
1293 cx.spawn(|this, mut cx| async move {
1294 let abs_path = abs_path
1295 .to_file_path()
1296 .map_err(|_| anyhow!("can't convert URI to path"))?;
1297 let (worktree, relative_path) = if let Some(result) =
1298 this.read_with(&cx, |this, cx| this.find_local_worktree(&abs_path, cx))
1299 {
1300 result
1301 } else {
1302 let worktree = this
1303 .update(&mut cx, |this, cx| {
1304 this.create_local_worktree(&abs_path, false, cx)
1305 })
1306 .await?;
1307 this.update(&mut cx, |this, cx| {
1308 this.language_servers.insert(
1309 (worktree.read(cx).id(), lsp_adapter.name()),
1310 (lsp_adapter, lsp_server),
1311 );
1312 });
1313 (worktree, PathBuf::new())
1314 };
1315
1316 let project_path = ProjectPath {
1317 worktree_id: worktree.read_with(&cx, |worktree, _| worktree.id()),
1318 path: relative_path.into(),
1319 };
1320 this.update(&mut cx, |this, cx| this.open_buffer(project_path, cx))
1321 .await
1322 })
1323 }
1324
1325 pub fn open_buffer_by_id(
1326 &mut self,
1327 id: u64,
1328 cx: &mut ModelContext<Self>,
1329 ) -> Task<Result<ModelHandle<Buffer>>> {
1330 if let Some(buffer) = self.buffer_for_id(id, cx) {
1331 Task::ready(Ok(buffer))
1332 } else if self.is_local() {
1333 Task::ready(Err(anyhow!("buffer {} does not exist", id)))
1334 } else if let Some(project_id) = self.remote_id() {
1335 let request = self
1336 .client
1337 .request(proto::OpenBufferById { project_id, id });
1338 cx.spawn(|this, mut cx| async move {
1339 let buffer = request
1340 .await?
1341 .buffer
1342 .ok_or_else(|| anyhow!("invalid buffer"))?;
1343 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
1344 .await
1345 })
1346 } else {
1347 Task::ready(Err(anyhow!("cannot open buffer while disconnected")))
1348 }
1349 }
1350
1351 pub fn save_buffer_as(
1352 &mut self,
1353 buffer: ModelHandle<Buffer>,
1354 abs_path: PathBuf,
1355 cx: &mut ModelContext<Project>,
1356 ) -> Task<Result<()>> {
1357 let worktree_task = self.find_or_create_local_worktree(&abs_path, true, cx);
1358 let old_path =
1359 File::from_dyn(buffer.read(cx).file()).and_then(|f| Some(f.as_local()?.abs_path(cx)));
1360 cx.spawn(|this, mut cx| async move {
1361 if let Some(old_path) = old_path {
1362 this.update(&mut cx, |this, cx| {
1363 this.unregister_buffer_from_language_server(&buffer, old_path, cx);
1364 });
1365 }
1366 let (worktree, path) = worktree_task.await?;
1367 worktree
1368 .update(&mut cx, |worktree, cx| {
1369 worktree
1370 .as_local_mut()
1371 .unwrap()
1372 .save_buffer_as(buffer.clone(), path, cx)
1373 })
1374 .await?;
1375 this.update(&mut cx, |this, cx| {
1376 this.assign_language_to_buffer(&buffer, cx);
1377 this.register_buffer_with_language_server(&buffer, cx);
1378 });
1379 Ok(())
1380 })
1381 }
1382
1383 pub fn get_open_buffer(
1384 &mut self,
1385 path: &ProjectPath,
1386 cx: &mut ModelContext<Self>,
1387 ) -> Option<ModelHandle<Buffer>> {
1388 let worktree = self.worktree_for_id(path.worktree_id, cx)?;
1389 self.opened_buffers.values().find_map(|buffer| {
1390 let buffer = buffer.upgrade(cx)?;
1391 let file = File::from_dyn(buffer.read(cx).file())?;
1392 if file.worktree == worktree && file.path() == &path.path {
1393 Some(buffer)
1394 } else {
1395 None
1396 }
1397 })
1398 }
1399
1400 fn register_buffer(
1401 &mut self,
1402 buffer: &ModelHandle<Buffer>,
1403 cx: &mut ModelContext<Self>,
1404 ) -> Result<()> {
1405 let remote_id = buffer.read(cx).remote_id();
1406 let open_buffer = if self.is_remote() || self.is_shared() {
1407 OpenBuffer::Strong(buffer.clone())
1408 } else {
1409 OpenBuffer::Weak(buffer.downgrade())
1410 };
1411
1412 match self.opened_buffers.insert(remote_id, open_buffer) {
1413 None => {}
1414 Some(OpenBuffer::Loading(operations)) => {
1415 buffer.update(cx, |buffer, cx| buffer.apply_ops(operations, cx))?
1416 }
1417 Some(OpenBuffer::Weak(existing_handle)) => {
1418 if existing_handle.upgrade(cx).is_some() {
1419 Err(anyhow!(
1420 "already registered buffer with remote id {}",
1421 remote_id
1422 ))?
1423 }
1424 }
1425 Some(OpenBuffer::Strong(_)) => Err(anyhow!(
1426 "already registered buffer with remote id {}",
1427 remote_id
1428 ))?,
1429 }
1430 cx.subscribe(buffer, |this, buffer, event, cx| {
1431 this.on_buffer_event(buffer, event, cx);
1432 })
1433 .detach();
1434
1435 self.assign_language_to_buffer(buffer, cx);
1436 self.register_buffer_with_language_server(buffer, cx);
1437 cx.observe_release(buffer, |this, buffer, cx| {
1438 if let Some(file) = File::from_dyn(buffer.file()) {
1439 if file.is_local() {
1440 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
1441 if let Some((_, server)) = this.language_server_for_buffer(buffer, cx) {
1442 server
1443 .notify::<lsp::notification::DidCloseTextDocument>(
1444 lsp::DidCloseTextDocumentParams {
1445 text_document: lsp::TextDocumentIdentifier::new(uri.clone()),
1446 },
1447 )
1448 .log_err();
1449 }
1450 }
1451 }
1452 })
1453 .detach();
1454
1455 Ok(())
1456 }
1457
1458 fn register_buffer_with_language_server(
1459 &mut self,
1460 buffer_handle: &ModelHandle<Buffer>,
1461 cx: &mut ModelContext<Self>,
1462 ) {
1463 let buffer = buffer_handle.read(cx);
1464 let buffer_id = buffer.remote_id();
1465 if let Some(file) = File::from_dyn(buffer.file()) {
1466 if file.is_local() {
1467 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
1468 let initial_snapshot = buffer.text_snapshot();
1469
1470 let mut language_server = None;
1471 let mut language_id = None;
1472 if let Some(language) = buffer.language() {
1473 let worktree_id = file.worktree_id(cx);
1474 if let Some(adapter) = language.lsp_adapter() {
1475 language_id = adapter.id_for_language(language.name().as_ref());
1476 language_server = self
1477 .language_servers
1478 .get(&(worktree_id, adapter.name()))
1479 .cloned();
1480 }
1481 }
1482
1483 if let Some(local_worktree) = file.worktree.read(cx).as_local() {
1484 if let Some(diagnostics) = local_worktree.diagnostics_for_path(file.path()) {
1485 self.update_buffer_diagnostics(&buffer_handle, diagnostics, None, cx)
1486 .log_err();
1487 }
1488 }
1489
1490 if let Some((_, server)) = language_server {
1491 server
1492 .notify::<lsp::notification::DidOpenTextDocument>(
1493 lsp::DidOpenTextDocumentParams {
1494 text_document: lsp::TextDocumentItem::new(
1495 uri,
1496 language_id.unwrap_or_default(),
1497 0,
1498 initial_snapshot.text(),
1499 ),
1500 }
1501 .clone(),
1502 )
1503 .log_err();
1504 buffer_handle.update(cx, |buffer, cx| {
1505 buffer.set_completion_triggers(
1506 server
1507 .capabilities()
1508 .completion_provider
1509 .as_ref()
1510 .and_then(|provider| provider.trigger_characters.clone())
1511 .unwrap_or(Vec::new()),
1512 cx,
1513 )
1514 });
1515 self.buffer_snapshots
1516 .insert(buffer_id, vec![(0, initial_snapshot)]);
1517 }
1518 }
1519 }
1520 }
1521
1522 fn unregister_buffer_from_language_server(
1523 &mut self,
1524 buffer: &ModelHandle<Buffer>,
1525 old_path: PathBuf,
1526 cx: &mut ModelContext<Self>,
1527 ) {
1528 buffer.update(cx, |buffer, cx| {
1529 buffer.update_diagnostics(Default::default(), cx);
1530 self.buffer_snapshots.remove(&buffer.remote_id());
1531 if let Some((_, language_server)) = self.language_server_for_buffer(buffer, cx) {
1532 language_server
1533 .notify::<lsp::notification::DidCloseTextDocument>(
1534 lsp::DidCloseTextDocumentParams {
1535 text_document: lsp::TextDocumentIdentifier::new(
1536 lsp::Url::from_file_path(old_path).unwrap(),
1537 ),
1538 },
1539 )
1540 .log_err();
1541 }
1542 });
1543 }
1544
1545 fn on_buffer_event(
1546 &mut self,
1547 buffer: ModelHandle<Buffer>,
1548 event: &BufferEvent,
1549 cx: &mut ModelContext<Self>,
1550 ) -> Option<()> {
1551 match event {
1552 BufferEvent::Operation(operation) => {
1553 if let Some(project_id) = self.shared_remote_id() {
1554 let request = self.client.request(proto::UpdateBuffer {
1555 project_id,
1556 buffer_id: buffer.read(cx).remote_id(),
1557 operations: vec![language::proto::serialize_operation(&operation)],
1558 });
1559 cx.background().spawn(request).detach_and_log_err(cx);
1560 }
1561 }
1562 BufferEvent::Edited { .. } => {
1563 let (_, language_server) = self
1564 .language_server_for_buffer(buffer.read(cx), cx)?
1565 .clone();
1566 let buffer = buffer.read(cx);
1567 let file = File::from_dyn(buffer.file())?;
1568 let abs_path = file.as_local()?.abs_path(cx);
1569 let uri = lsp::Url::from_file_path(abs_path).unwrap();
1570 let buffer_snapshots = self.buffer_snapshots.get_mut(&buffer.remote_id())?;
1571 let (version, prev_snapshot) = buffer_snapshots.last()?;
1572 let next_snapshot = buffer.text_snapshot();
1573 let next_version = version + 1;
1574
1575 let content_changes = buffer
1576 .edits_since::<(PointUtf16, usize)>(prev_snapshot.version())
1577 .map(|edit| {
1578 let edit_start = edit.new.start.0;
1579 let edit_end = edit_start + (edit.old.end.0 - edit.old.start.0);
1580 let new_text = next_snapshot
1581 .text_for_range(edit.new.start.1..edit.new.end.1)
1582 .collect();
1583 lsp::TextDocumentContentChangeEvent {
1584 range: Some(lsp::Range::new(
1585 point_to_lsp(edit_start),
1586 point_to_lsp(edit_end),
1587 )),
1588 range_length: None,
1589 text: new_text,
1590 }
1591 })
1592 .collect();
1593
1594 buffer_snapshots.push((next_version, next_snapshot));
1595
1596 language_server
1597 .notify::<lsp::notification::DidChangeTextDocument>(
1598 lsp::DidChangeTextDocumentParams {
1599 text_document: lsp::VersionedTextDocumentIdentifier::new(
1600 uri,
1601 next_version,
1602 ),
1603 content_changes,
1604 },
1605 )
1606 .log_err();
1607 }
1608 BufferEvent::Saved => {
1609 let file = File::from_dyn(buffer.read(cx).file())?;
1610 let worktree_id = file.worktree_id(cx);
1611 let abs_path = file.as_local()?.abs_path(cx);
1612 let text_document = lsp::TextDocumentIdentifier {
1613 uri: lsp::Url::from_file_path(abs_path).unwrap(),
1614 };
1615
1616 for (_, server) in self.language_servers_for_worktree(worktree_id) {
1617 server
1618 .notify::<lsp::notification::DidSaveTextDocument>(
1619 lsp::DidSaveTextDocumentParams {
1620 text_document: text_document.clone(),
1621 text: None,
1622 },
1623 )
1624 .log_err();
1625 }
1626 }
1627 _ => {}
1628 }
1629
1630 None
1631 }
1632
1633 fn language_servers_for_worktree(
1634 &self,
1635 worktree_id: WorktreeId,
1636 ) -> impl Iterator<Item = &(Arc<dyn LspAdapter>, Arc<LanguageServer>)> {
1637 self.language_servers.iter().filter_map(
1638 move |((language_server_worktree_id, _), server)| {
1639 if *language_server_worktree_id == worktree_id {
1640 Some(server)
1641 } else {
1642 None
1643 }
1644 },
1645 )
1646 }
1647
1648 fn assign_language_to_buffer(
1649 &mut self,
1650 buffer: &ModelHandle<Buffer>,
1651 cx: &mut ModelContext<Self>,
1652 ) -> Option<()> {
1653 // If the buffer has a language, set it and start the language server if we haven't already.
1654 let full_path = buffer.read(cx).file()?.full_path(cx);
1655 let language = self.languages.select_language(&full_path)?;
1656 buffer.update(cx, |buffer, cx| {
1657 buffer.set_language(Some(language.clone()), cx);
1658 });
1659
1660 let file = File::from_dyn(buffer.read(cx).file())?;
1661 let worktree = file.worktree.read(cx).as_local()?;
1662 let worktree_id = worktree.id();
1663 let worktree_abs_path = worktree.abs_path().clone();
1664 self.start_language_server(worktree_id, worktree_abs_path, language, cx);
1665
1666 None
1667 }
1668
1669 fn start_language_server(
1670 &mut self,
1671 worktree_id: WorktreeId,
1672 worktree_path: Arc<Path>,
1673 language: Arc<Language>,
1674 cx: &mut ModelContext<Self>,
1675 ) {
1676 let adapter = if let Some(adapter) = language.lsp_adapter() {
1677 adapter
1678 } else {
1679 return;
1680 };
1681 let key = (worktree_id, adapter.name());
1682 self.started_language_servers
1683 .entry(key.clone())
1684 .or_insert_with(|| {
1685 let server_id = post_inc(&mut self.next_language_server_id);
1686 let language_server = self.languages.start_language_server(
1687 server_id,
1688 language.clone(),
1689 worktree_path,
1690 self.client.http_client(),
1691 cx,
1692 );
1693 cx.spawn_weak(|this, mut cx| async move {
1694 let language_server = language_server?.await.log_err()?;
1695 let language_server = language_server
1696 .initialize(adapter.initialization_options())
1697 .await
1698 .log_err()?;
1699 let this = this.upgrade(&cx)?;
1700 let disk_based_diagnostics_progress_token =
1701 adapter.disk_based_diagnostics_progress_token();
1702
1703 language_server
1704 .on_notification::<lsp::notification::PublishDiagnostics, _>({
1705 let this = this.downgrade();
1706 let adapter = adapter.clone();
1707 move |params, mut cx| {
1708 if let Some(this) = this.upgrade(&cx) {
1709 this.update(&mut cx, |this, cx| {
1710 this.on_lsp_diagnostics_published(
1711 server_id,
1712 params,
1713 &adapter,
1714 disk_based_diagnostics_progress_token,
1715 cx,
1716 );
1717 });
1718 }
1719 }
1720 })
1721 .detach();
1722
1723 language_server
1724 .on_request::<lsp::request::WorkspaceConfiguration, _, _>({
1725 let settings = this
1726 .read_with(&cx, |this, _| this.language_server_settings.clone());
1727 move |params, _| {
1728 let settings = settings.lock().clone();
1729 async move {
1730 Ok(params
1731 .items
1732 .into_iter()
1733 .map(|item| {
1734 if let Some(section) = &item.section {
1735 settings
1736 .get(section)
1737 .cloned()
1738 .unwrap_or(serde_json::Value::Null)
1739 } else {
1740 settings.clone()
1741 }
1742 })
1743 .collect())
1744 }
1745 }
1746 })
1747 .detach();
1748
1749 language_server
1750 .on_request::<lsp::request::ApplyWorkspaceEdit, _, _>({
1751 let this = this.downgrade();
1752 let adapter = adapter.clone();
1753 let language_server = language_server.clone();
1754 move |params, cx| {
1755 Self::on_lsp_workspace_edit(
1756 this,
1757 params,
1758 server_id,
1759 adapter.clone(),
1760 language_server.clone(),
1761 cx,
1762 )
1763 }
1764 })
1765 .detach();
1766
1767 language_server
1768 .on_notification::<lsp::notification::Progress, _>({
1769 let this = this.downgrade();
1770 move |params, mut cx| {
1771 if let Some(this) = this.upgrade(&cx) {
1772 this.update(&mut cx, |this, cx| {
1773 this.on_lsp_progress(
1774 params,
1775 server_id,
1776 disk_based_diagnostics_progress_token,
1777 cx,
1778 );
1779 });
1780 }
1781 }
1782 })
1783 .detach();
1784
1785 this.update(&mut cx, |this, cx| {
1786 this.language_servers
1787 .insert(key.clone(), (adapter.clone(), language_server.clone()));
1788 this.language_server_statuses.insert(
1789 server_id,
1790 LanguageServerStatus {
1791 name: language_server.name().to_string(),
1792 pending_work: Default::default(),
1793 pending_diagnostic_updates: 0,
1794 },
1795 );
1796 language_server
1797 .notify::<lsp::notification::DidChangeConfiguration>(
1798 lsp::DidChangeConfigurationParams {
1799 settings: this.language_server_settings.lock().clone(),
1800 },
1801 )
1802 .ok();
1803
1804 if let Some(project_id) = this.shared_remote_id() {
1805 this.client
1806 .send(proto::StartLanguageServer {
1807 project_id,
1808 server: Some(proto::LanguageServer {
1809 id: server_id as u64,
1810 name: language_server.name().to_string(),
1811 }),
1812 })
1813 .log_err();
1814 }
1815
1816 // Tell the language server about every open buffer in the worktree that matches the language.
1817 for buffer in this.opened_buffers.values() {
1818 if let Some(buffer_handle) = buffer.upgrade(cx) {
1819 let buffer = buffer_handle.read(cx);
1820 let file = if let Some(file) = File::from_dyn(buffer.file()) {
1821 file
1822 } else {
1823 continue;
1824 };
1825 let language = if let Some(language) = buffer.language() {
1826 language
1827 } else {
1828 continue;
1829 };
1830 if file.worktree.read(cx).id() != key.0
1831 || language.lsp_adapter().map(|a| a.name())
1832 != Some(key.1.clone())
1833 {
1834 continue;
1835 }
1836
1837 let file = file.as_local()?;
1838 let versions = this
1839 .buffer_snapshots
1840 .entry(buffer.remote_id())
1841 .or_insert_with(|| vec![(0, buffer.text_snapshot())]);
1842 let (version, initial_snapshot) = versions.last().unwrap();
1843 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
1844 let language_id = adapter.id_for_language(language.name().as_ref());
1845 language_server
1846 .notify::<lsp::notification::DidOpenTextDocument>(
1847 lsp::DidOpenTextDocumentParams {
1848 text_document: lsp::TextDocumentItem::new(
1849 uri,
1850 language_id.unwrap_or_default(),
1851 *version,
1852 initial_snapshot.text(),
1853 ),
1854 },
1855 )
1856 .log_err()?;
1857 buffer_handle.update(cx, |buffer, cx| {
1858 buffer.set_completion_triggers(
1859 language_server
1860 .capabilities()
1861 .completion_provider
1862 .as_ref()
1863 .and_then(|provider| {
1864 provider.trigger_characters.clone()
1865 })
1866 .unwrap_or(Vec::new()),
1867 cx,
1868 )
1869 });
1870 }
1871 }
1872
1873 cx.notify();
1874 Some(())
1875 });
1876
1877 Some(language_server)
1878 })
1879 });
1880 }
1881
1882 pub fn restart_language_servers_for_buffers(
1883 &mut self,
1884 buffers: impl IntoIterator<Item = ModelHandle<Buffer>>,
1885 cx: &mut ModelContext<Self>,
1886 ) -> Option<()> {
1887 let language_server_lookup_info: HashSet<(WorktreeId, Arc<Path>, PathBuf)> = buffers
1888 .into_iter()
1889 .filter_map(|buffer| {
1890 let file = File::from_dyn(buffer.read(cx).file())?;
1891 let worktree = file.worktree.read(cx).as_local()?;
1892 let worktree_id = worktree.id();
1893 let worktree_abs_path = worktree.abs_path().clone();
1894 let full_path = file.full_path(cx);
1895 Some((worktree_id, worktree_abs_path, full_path))
1896 })
1897 .collect();
1898 for (worktree_id, worktree_abs_path, full_path) in language_server_lookup_info {
1899 let language = self.languages.select_language(&full_path)?;
1900 self.restart_language_server(worktree_id, worktree_abs_path, language, cx);
1901 }
1902
1903 None
1904 }
1905
1906 fn restart_language_server(
1907 &mut self,
1908 worktree_id: WorktreeId,
1909 worktree_path: Arc<Path>,
1910 language: Arc<Language>,
1911 cx: &mut ModelContext<Self>,
1912 ) {
1913 let adapter = if let Some(adapter) = language.lsp_adapter() {
1914 adapter
1915 } else {
1916 return;
1917 };
1918 let key = (worktree_id, adapter.name());
1919 let server_to_shutdown = self.language_servers.remove(&key);
1920 self.started_language_servers.remove(&key);
1921 server_to_shutdown
1922 .as_ref()
1923 .map(|(_, server)| self.language_server_statuses.remove(&server.server_id()));
1924 cx.spawn_weak(|this, mut cx| async move {
1925 if let Some(this) = this.upgrade(&cx) {
1926 if let Some((_, server_to_shutdown)) = server_to_shutdown {
1927 if let Some(shutdown_task) = server_to_shutdown.shutdown() {
1928 shutdown_task.await;
1929 }
1930 }
1931
1932 this.update(&mut cx, |this, cx| {
1933 this.start_language_server(worktree_id, worktree_path, language, cx);
1934 });
1935 }
1936 })
1937 .detach();
1938 }
1939
1940 fn on_lsp_diagnostics_published(
1941 &mut self,
1942 server_id: usize,
1943 mut params: lsp::PublishDiagnosticsParams,
1944 adapter: &Arc<dyn LspAdapter>,
1945 disk_based_diagnostics_progress_token: Option<&str>,
1946 cx: &mut ModelContext<Self>,
1947 ) {
1948 adapter.process_diagnostics(&mut params);
1949 if disk_based_diagnostics_progress_token.is_none() {
1950 self.disk_based_diagnostics_started(cx);
1951 self.broadcast_language_server_update(
1952 server_id,
1953 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(
1954 proto::LspDiskBasedDiagnosticsUpdating {},
1955 ),
1956 );
1957 }
1958 self.update_diagnostics(params, adapter.disk_based_diagnostic_sources(), cx)
1959 .log_err();
1960 if disk_based_diagnostics_progress_token.is_none() {
1961 self.disk_based_diagnostics_finished(cx);
1962 self.broadcast_language_server_update(
1963 server_id,
1964 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(
1965 proto::LspDiskBasedDiagnosticsUpdated {},
1966 ),
1967 );
1968 }
1969 }
1970
1971 fn on_lsp_progress(
1972 &mut self,
1973 progress: lsp::ProgressParams,
1974 server_id: usize,
1975 disk_based_diagnostics_progress_token: Option<&str>,
1976 cx: &mut ModelContext<Self>,
1977 ) {
1978 let token = match progress.token {
1979 lsp::NumberOrString::String(token) => token,
1980 lsp::NumberOrString::Number(token) => {
1981 log::info!("skipping numeric progress token {}", token);
1982 return;
1983 }
1984 };
1985 let progress = match progress.value {
1986 lsp::ProgressParamsValue::WorkDone(value) => value,
1987 };
1988 let language_server_status =
1989 if let Some(status) = self.language_server_statuses.get_mut(&server_id) {
1990 status
1991 } else {
1992 return;
1993 };
1994 match progress {
1995 lsp::WorkDoneProgress::Begin(_) => {
1996 if Some(token.as_str()) == disk_based_diagnostics_progress_token {
1997 language_server_status.pending_diagnostic_updates += 1;
1998 if language_server_status.pending_diagnostic_updates == 1 {
1999 self.disk_based_diagnostics_started(cx);
2000 self.broadcast_language_server_update(
2001 server_id,
2002 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(
2003 proto::LspDiskBasedDiagnosticsUpdating {},
2004 ),
2005 );
2006 }
2007 } else {
2008 self.on_lsp_work_start(server_id, token.clone(), cx);
2009 self.broadcast_language_server_update(
2010 server_id,
2011 proto::update_language_server::Variant::WorkStart(proto::LspWorkStart {
2012 token,
2013 }),
2014 );
2015 }
2016 }
2017 lsp::WorkDoneProgress::Report(report) => {
2018 if Some(token.as_str()) != disk_based_diagnostics_progress_token {
2019 self.on_lsp_work_progress(
2020 server_id,
2021 token.clone(),
2022 LanguageServerProgress {
2023 message: report.message.clone(),
2024 percentage: report.percentage.map(|p| p as usize),
2025 last_update_at: Instant::now(),
2026 },
2027 cx,
2028 );
2029 self.broadcast_language_server_update(
2030 server_id,
2031 proto::update_language_server::Variant::WorkProgress(
2032 proto::LspWorkProgress {
2033 token,
2034 message: report.message,
2035 percentage: report.percentage.map(|p| p as u32),
2036 },
2037 ),
2038 );
2039 }
2040 }
2041 lsp::WorkDoneProgress::End(_) => {
2042 if Some(token.as_str()) == disk_based_diagnostics_progress_token {
2043 language_server_status.pending_diagnostic_updates -= 1;
2044 if language_server_status.pending_diagnostic_updates == 0 {
2045 self.disk_based_diagnostics_finished(cx);
2046 self.broadcast_language_server_update(
2047 server_id,
2048 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(
2049 proto::LspDiskBasedDiagnosticsUpdated {},
2050 ),
2051 );
2052 }
2053 } else {
2054 self.on_lsp_work_end(server_id, token.clone(), cx);
2055 self.broadcast_language_server_update(
2056 server_id,
2057 proto::update_language_server::Variant::WorkEnd(proto::LspWorkEnd {
2058 token,
2059 }),
2060 );
2061 }
2062 }
2063 }
2064 }
2065
2066 fn on_lsp_work_start(
2067 &mut self,
2068 language_server_id: usize,
2069 token: String,
2070 cx: &mut ModelContext<Self>,
2071 ) {
2072 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
2073 status.pending_work.insert(
2074 token,
2075 LanguageServerProgress {
2076 message: None,
2077 percentage: None,
2078 last_update_at: Instant::now(),
2079 },
2080 );
2081 cx.notify();
2082 }
2083 }
2084
2085 fn on_lsp_work_progress(
2086 &mut self,
2087 language_server_id: usize,
2088 token: String,
2089 progress: LanguageServerProgress,
2090 cx: &mut ModelContext<Self>,
2091 ) {
2092 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
2093 status.pending_work.insert(token, progress);
2094 cx.notify();
2095 }
2096 }
2097
2098 fn on_lsp_work_end(
2099 &mut self,
2100 language_server_id: usize,
2101 token: String,
2102 cx: &mut ModelContext<Self>,
2103 ) {
2104 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
2105 status.pending_work.remove(&token);
2106 cx.notify();
2107 }
2108 }
2109
2110 async fn on_lsp_workspace_edit(
2111 this: WeakModelHandle<Self>,
2112 params: lsp::ApplyWorkspaceEditParams,
2113 server_id: usize,
2114 adapter: Arc<dyn LspAdapter>,
2115 language_server: Arc<LanguageServer>,
2116 mut cx: AsyncAppContext,
2117 ) -> Result<lsp::ApplyWorkspaceEditResponse> {
2118 let this = this
2119 .upgrade(&cx)
2120 .ok_or_else(|| anyhow!("project project closed"))?;
2121 let transaction = Self::deserialize_workspace_edit(
2122 this.clone(),
2123 params.edit,
2124 true,
2125 adapter.clone(),
2126 language_server.clone(),
2127 &mut cx,
2128 )
2129 .await
2130 .log_err();
2131 this.update(&mut cx, |this, _| {
2132 if let Some(transaction) = transaction {
2133 this.last_workspace_edits_by_language_server
2134 .insert(server_id, transaction);
2135 }
2136 });
2137 Ok(lsp::ApplyWorkspaceEditResponse {
2138 applied: true,
2139 failed_change: None,
2140 failure_reason: None,
2141 })
2142 }
2143
2144 fn broadcast_language_server_update(
2145 &self,
2146 language_server_id: usize,
2147 event: proto::update_language_server::Variant,
2148 ) {
2149 if let Some(project_id) = self.shared_remote_id() {
2150 self.client
2151 .send(proto::UpdateLanguageServer {
2152 project_id,
2153 language_server_id: language_server_id as u64,
2154 variant: Some(event),
2155 })
2156 .log_err();
2157 }
2158 }
2159
2160 pub fn set_language_server_settings(&mut self, settings: serde_json::Value) {
2161 for (_, server) in self.language_servers.values() {
2162 server
2163 .notify::<lsp::notification::DidChangeConfiguration>(
2164 lsp::DidChangeConfigurationParams {
2165 settings: settings.clone(),
2166 },
2167 )
2168 .ok();
2169 }
2170 *self.language_server_settings.lock() = settings;
2171 }
2172
2173 pub fn language_server_statuses(
2174 &self,
2175 ) -> impl DoubleEndedIterator<Item = &LanguageServerStatus> {
2176 self.language_server_statuses.values()
2177 }
2178
2179 pub fn update_diagnostics(
2180 &mut self,
2181 params: lsp::PublishDiagnosticsParams,
2182 disk_based_sources: &[&str],
2183 cx: &mut ModelContext<Self>,
2184 ) -> Result<()> {
2185 let abs_path = params
2186 .uri
2187 .to_file_path()
2188 .map_err(|_| anyhow!("URI is not a file"))?;
2189 let mut diagnostics = Vec::default();
2190 let mut primary_diagnostic_group_ids = HashMap::default();
2191 let mut sources_by_group_id = HashMap::default();
2192 let mut supporting_diagnostics = HashMap::default();
2193 for diagnostic in ¶ms.diagnostics {
2194 let source = diagnostic.source.as_ref();
2195 let code = diagnostic.code.as_ref().map(|code| match code {
2196 lsp::NumberOrString::Number(code) => code.to_string(),
2197 lsp::NumberOrString::String(code) => code.clone(),
2198 });
2199 let range = range_from_lsp(diagnostic.range);
2200 let is_supporting = diagnostic
2201 .related_information
2202 .as_ref()
2203 .map_or(false, |infos| {
2204 infos.iter().any(|info| {
2205 primary_diagnostic_group_ids.contains_key(&(
2206 source,
2207 code.clone(),
2208 range_from_lsp(info.location.range),
2209 ))
2210 })
2211 });
2212
2213 let is_unnecessary = diagnostic.tags.as_ref().map_or(false, |tags| {
2214 tags.iter().any(|tag| *tag == DiagnosticTag::UNNECESSARY)
2215 });
2216
2217 if is_supporting {
2218 supporting_diagnostics.insert(
2219 (source, code.clone(), range),
2220 (diagnostic.severity, is_unnecessary),
2221 );
2222 } else {
2223 let group_id = post_inc(&mut self.next_diagnostic_group_id);
2224 let is_disk_based = source.map_or(false, |source| {
2225 disk_based_sources.contains(&source.as_str())
2226 });
2227
2228 sources_by_group_id.insert(group_id, source);
2229 primary_diagnostic_group_ids
2230 .insert((source, code.clone(), range.clone()), group_id);
2231
2232 diagnostics.push(DiagnosticEntry {
2233 range,
2234 diagnostic: Diagnostic {
2235 code: code.clone(),
2236 severity: diagnostic.severity.unwrap_or(DiagnosticSeverity::ERROR),
2237 message: diagnostic.message.clone(),
2238 group_id,
2239 is_primary: true,
2240 is_valid: true,
2241 is_disk_based,
2242 is_unnecessary,
2243 },
2244 });
2245 if let Some(infos) = &diagnostic.related_information {
2246 for info in infos {
2247 if info.location.uri == params.uri && !info.message.is_empty() {
2248 let range = range_from_lsp(info.location.range);
2249 diagnostics.push(DiagnosticEntry {
2250 range,
2251 diagnostic: Diagnostic {
2252 code: code.clone(),
2253 severity: DiagnosticSeverity::INFORMATION,
2254 message: info.message.clone(),
2255 group_id,
2256 is_primary: false,
2257 is_valid: true,
2258 is_disk_based,
2259 is_unnecessary: false,
2260 },
2261 });
2262 }
2263 }
2264 }
2265 }
2266 }
2267
2268 for entry in &mut diagnostics {
2269 let diagnostic = &mut entry.diagnostic;
2270 if !diagnostic.is_primary {
2271 let source = *sources_by_group_id.get(&diagnostic.group_id).unwrap();
2272 if let Some(&(severity, is_unnecessary)) = supporting_diagnostics.get(&(
2273 source,
2274 diagnostic.code.clone(),
2275 entry.range.clone(),
2276 )) {
2277 if let Some(severity) = severity {
2278 diagnostic.severity = severity;
2279 }
2280 diagnostic.is_unnecessary = is_unnecessary;
2281 }
2282 }
2283 }
2284
2285 self.update_diagnostic_entries(abs_path, params.version, diagnostics, cx)?;
2286 Ok(())
2287 }
2288
2289 pub fn update_diagnostic_entries(
2290 &mut self,
2291 abs_path: PathBuf,
2292 version: Option<i32>,
2293 diagnostics: Vec<DiagnosticEntry<PointUtf16>>,
2294 cx: &mut ModelContext<Project>,
2295 ) -> Result<(), anyhow::Error> {
2296 let (worktree, relative_path) = self
2297 .find_local_worktree(&abs_path, cx)
2298 .ok_or_else(|| anyhow!("no worktree found for diagnostics"))?;
2299 if !worktree.read(cx).is_visible() {
2300 return Ok(());
2301 }
2302
2303 let project_path = ProjectPath {
2304 worktree_id: worktree.read(cx).id(),
2305 path: relative_path.into(),
2306 };
2307 if let Some(buffer) = self.get_open_buffer(&project_path, cx) {
2308 self.update_buffer_diagnostics(&buffer, diagnostics.clone(), version, cx)?;
2309 }
2310
2311 let updated = worktree.update(cx, |worktree, cx| {
2312 worktree
2313 .as_local_mut()
2314 .ok_or_else(|| anyhow!("not a local worktree"))?
2315 .update_diagnostics(project_path.path.clone(), diagnostics, cx)
2316 })?;
2317 if updated {
2318 cx.emit(Event::DiagnosticsUpdated(project_path));
2319 }
2320 Ok(())
2321 }
2322
2323 fn update_buffer_diagnostics(
2324 &mut self,
2325 buffer: &ModelHandle<Buffer>,
2326 mut diagnostics: Vec<DiagnosticEntry<PointUtf16>>,
2327 version: Option<i32>,
2328 cx: &mut ModelContext<Self>,
2329 ) -> Result<()> {
2330 fn compare_diagnostics(a: &Diagnostic, b: &Diagnostic) -> Ordering {
2331 Ordering::Equal
2332 .then_with(|| b.is_primary.cmp(&a.is_primary))
2333 .then_with(|| a.is_disk_based.cmp(&b.is_disk_based))
2334 .then_with(|| a.severity.cmp(&b.severity))
2335 .then_with(|| a.message.cmp(&b.message))
2336 }
2337
2338 let snapshot = self.buffer_snapshot_for_lsp_version(buffer, version, cx)?;
2339
2340 diagnostics.sort_unstable_by(|a, b| {
2341 Ordering::Equal
2342 .then_with(|| a.range.start.cmp(&b.range.start))
2343 .then_with(|| b.range.end.cmp(&a.range.end))
2344 .then_with(|| compare_diagnostics(&a.diagnostic, &b.diagnostic))
2345 });
2346
2347 let mut sanitized_diagnostics = Vec::new();
2348 let edits_since_save = Patch::new(
2349 snapshot
2350 .edits_since::<PointUtf16>(buffer.read(cx).saved_version())
2351 .collect(),
2352 );
2353 for entry in diagnostics {
2354 let start;
2355 let end;
2356 if entry.diagnostic.is_disk_based {
2357 // Some diagnostics are based on files on disk instead of buffers'
2358 // current contents. Adjust these diagnostics' ranges to reflect
2359 // any unsaved edits.
2360 start = edits_since_save.old_to_new(entry.range.start);
2361 end = edits_since_save.old_to_new(entry.range.end);
2362 } else {
2363 start = entry.range.start;
2364 end = entry.range.end;
2365 }
2366
2367 let mut range = snapshot.clip_point_utf16(start, Bias::Left)
2368 ..snapshot.clip_point_utf16(end, Bias::Right);
2369
2370 // Expand empty ranges by one character
2371 if range.start == range.end {
2372 range.end.column += 1;
2373 range.end = snapshot.clip_point_utf16(range.end, Bias::Right);
2374 if range.start == range.end && range.end.column > 0 {
2375 range.start.column -= 1;
2376 range.start = snapshot.clip_point_utf16(range.start, Bias::Left);
2377 }
2378 }
2379
2380 sanitized_diagnostics.push(DiagnosticEntry {
2381 range,
2382 diagnostic: entry.diagnostic,
2383 });
2384 }
2385 drop(edits_since_save);
2386
2387 let set = DiagnosticSet::new(sanitized_diagnostics, &snapshot);
2388 buffer.update(cx, |buffer, cx| buffer.update_diagnostics(set, cx));
2389 Ok(())
2390 }
2391
2392 pub fn reload_buffers(
2393 &self,
2394 buffers: HashSet<ModelHandle<Buffer>>,
2395 push_to_history: bool,
2396 cx: &mut ModelContext<Self>,
2397 ) -> Task<Result<ProjectTransaction>> {
2398 let mut local_buffers = Vec::new();
2399 let mut remote_buffers = None;
2400 for buffer_handle in buffers {
2401 let buffer = buffer_handle.read(cx);
2402 if buffer.is_dirty() {
2403 if let Some(file) = File::from_dyn(buffer.file()) {
2404 if file.is_local() {
2405 local_buffers.push(buffer_handle);
2406 } else {
2407 remote_buffers.get_or_insert(Vec::new()).push(buffer_handle);
2408 }
2409 }
2410 }
2411 }
2412
2413 let remote_buffers = self.remote_id().zip(remote_buffers);
2414 let client = self.client.clone();
2415
2416 cx.spawn(|this, mut cx| async move {
2417 let mut project_transaction = ProjectTransaction::default();
2418
2419 if let Some((project_id, remote_buffers)) = remote_buffers {
2420 let response = client
2421 .request(proto::ReloadBuffers {
2422 project_id,
2423 buffer_ids: remote_buffers
2424 .iter()
2425 .map(|buffer| buffer.read_with(&cx, |buffer, _| buffer.remote_id()))
2426 .collect(),
2427 })
2428 .await?
2429 .transaction
2430 .ok_or_else(|| anyhow!("missing transaction"))?;
2431 project_transaction = this
2432 .update(&mut cx, |this, cx| {
2433 this.deserialize_project_transaction(response, push_to_history, cx)
2434 })
2435 .await?;
2436 }
2437
2438 for buffer in local_buffers {
2439 let transaction = buffer
2440 .update(&mut cx, |buffer, cx| buffer.reload(cx))
2441 .await?;
2442 buffer.update(&mut cx, |buffer, cx| {
2443 if let Some(transaction) = transaction {
2444 if !push_to_history {
2445 buffer.forget_transaction(transaction.id);
2446 }
2447 project_transaction.0.insert(cx.handle(), transaction);
2448 }
2449 });
2450 }
2451
2452 Ok(project_transaction)
2453 })
2454 }
2455
2456 pub fn format(
2457 &self,
2458 buffers: HashSet<ModelHandle<Buffer>>,
2459 push_to_history: bool,
2460 cx: &mut ModelContext<Project>,
2461 ) -> Task<Result<ProjectTransaction>> {
2462 let mut local_buffers = Vec::new();
2463 let mut remote_buffers = None;
2464 for buffer_handle in buffers {
2465 let buffer = buffer_handle.read(cx);
2466 if let Some(file) = File::from_dyn(buffer.file()) {
2467 if let Some(buffer_abs_path) = file.as_local().map(|f| f.abs_path(cx)) {
2468 if let Some((_, server)) = self.language_server_for_buffer(buffer, cx) {
2469 local_buffers.push((buffer_handle, buffer_abs_path, server.clone()));
2470 }
2471 } else {
2472 remote_buffers.get_or_insert(Vec::new()).push(buffer_handle);
2473 }
2474 } else {
2475 return Task::ready(Ok(Default::default()));
2476 }
2477 }
2478
2479 let remote_buffers = self.remote_id().zip(remote_buffers);
2480 let client = self.client.clone();
2481
2482 cx.spawn(|this, mut cx| async move {
2483 let mut project_transaction = ProjectTransaction::default();
2484
2485 if let Some((project_id, remote_buffers)) = remote_buffers {
2486 let response = client
2487 .request(proto::FormatBuffers {
2488 project_id,
2489 buffer_ids: remote_buffers
2490 .iter()
2491 .map(|buffer| buffer.read_with(&cx, |buffer, _| buffer.remote_id()))
2492 .collect(),
2493 })
2494 .await?
2495 .transaction
2496 .ok_or_else(|| anyhow!("missing transaction"))?;
2497 project_transaction = this
2498 .update(&mut cx, |this, cx| {
2499 this.deserialize_project_transaction(response, push_to_history, cx)
2500 })
2501 .await?;
2502 }
2503
2504 for (buffer, buffer_abs_path, language_server) in local_buffers {
2505 let text_document = lsp::TextDocumentIdentifier::new(
2506 lsp::Url::from_file_path(&buffer_abs_path).unwrap(),
2507 );
2508 let capabilities = &language_server.capabilities();
2509 let tab_size = cx.update(|cx| {
2510 let language_name = buffer.read(cx).language().map(|language| language.name());
2511 cx.global::<Settings>().tab_size(language_name.as_deref())
2512 });
2513 let lsp_edits = if capabilities
2514 .document_formatting_provider
2515 .as_ref()
2516 .map_or(false, |provider| *provider != lsp::OneOf::Left(false))
2517 {
2518 language_server
2519 .request::<lsp::request::Formatting>(lsp::DocumentFormattingParams {
2520 text_document,
2521 options: lsp::FormattingOptions {
2522 tab_size,
2523 insert_spaces: true,
2524 insert_final_newline: Some(true),
2525 ..Default::default()
2526 },
2527 work_done_progress_params: Default::default(),
2528 })
2529 .await?
2530 } else if capabilities
2531 .document_range_formatting_provider
2532 .as_ref()
2533 .map_or(false, |provider| *provider != lsp::OneOf::Left(false))
2534 {
2535 let buffer_start = lsp::Position::new(0, 0);
2536 let buffer_end =
2537 buffer.read_with(&cx, |buffer, _| point_to_lsp(buffer.max_point_utf16()));
2538 language_server
2539 .request::<lsp::request::RangeFormatting>(
2540 lsp::DocumentRangeFormattingParams {
2541 text_document,
2542 range: lsp::Range::new(buffer_start, buffer_end),
2543 options: lsp::FormattingOptions {
2544 tab_size: 4,
2545 insert_spaces: true,
2546 insert_final_newline: Some(true),
2547 ..Default::default()
2548 },
2549 work_done_progress_params: Default::default(),
2550 },
2551 )
2552 .await?
2553 } else {
2554 continue;
2555 };
2556
2557 if let Some(lsp_edits) = lsp_edits {
2558 let edits = this
2559 .update(&mut cx, |this, cx| {
2560 this.edits_from_lsp(&buffer, lsp_edits, None, cx)
2561 })
2562 .await?;
2563 buffer.update(&mut cx, |buffer, cx| {
2564 buffer.finalize_last_transaction();
2565 buffer.start_transaction();
2566 for (range, text) in edits {
2567 buffer.edit([(range, text)], cx);
2568 }
2569 if buffer.end_transaction(cx).is_some() {
2570 let transaction = buffer.finalize_last_transaction().unwrap().clone();
2571 if !push_to_history {
2572 buffer.forget_transaction(transaction.id);
2573 }
2574 project_transaction.0.insert(cx.handle(), transaction);
2575 }
2576 });
2577 }
2578 }
2579
2580 Ok(project_transaction)
2581 })
2582 }
2583
2584 pub fn definition<T: ToPointUtf16>(
2585 &self,
2586 buffer: &ModelHandle<Buffer>,
2587 position: T,
2588 cx: &mut ModelContext<Self>,
2589 ) -> Task<Result<Vec<Location>>> {
2590 let position = position.to_point_utf16(buffer.read(cx));
2591 self.request_lsp(buffer.clone(), GetDefinition { position }, cx)
2592 }
2593
2594 pub fn references<T: ToPointUtf16>(
2595 &self,
2596 buffer: &ModelHandle<Buffer>,
2597 position: T,
2598 cx: &mut ModelContext<Self>,
2599 ) -> Task<Result<Vec<Location>>> {
2600 let position = position.to_point_utf16(buffer.read(cx));
2601 self.request_lsp(buffer.clone(), GetReferences { position }, cx)
2602 }
2603
2604 pub fn document_highlights<T: ToPointUtf16>(
2605 &self,
2606 buffer: &ModelHandle<Buffer>,
2607 position: T,
2608 cx: &mut ModelContext<Self>,
2609 ) -> Task<Result<Vec<DocumentHighlight>>> {
2610 let position = position.to_point_utf16(buffer.read(cx));
2611
2612 self.request_lsp(buffer.clone(), GetDocumentHighlights { position }, cx)
2613 }
2614
2615 pub fn symbols(&self, query: &str, cx: &mut ModelContext<Self>) -> Task<Result<Vec<Symbol>>> {
2616 if self.is_local() {
2617 let mut requests = Vec::new();
2618 for ((worktree_id, _), (lsp_adapter, language_server)) in self.language_servers.iter() {
2619 let worktree_id = *worktree_id;
2620 if let Some(worktree) = self
2621 .worktree_for_id(worktree_id, cx)
2622 .and_then(|worktree| worktree.read(cx).as_local())
2623 {
2624 let lsp_adapter = lsp_adapter.clone();
2625 let worktree_abs_path = worktree.abs_path().clone();
2626 requests.push(
2627 language_server
2628 .request::<lsp::request::WorkspaceSymbol>(lsp::WorkspaceSymbolParams {
2629 query: query.to_string(),
2630 ..Default::default()
2631 })
2632 .log_err()
2633 .map(move |response| {
2634 (
2635 lsp_adapter,
2636 worktree_id,
2637 worktree_abs_path,
2638 response.unwrap_or_default(),
2639 )
2640 }),
2641 );
2642 }
2643 }
2644
2645 cx.spawn_weak(|this, cx| async move {
2646 let responses = futures::future::join_all(requests).await;
2647 let this = if let Some(this) = this.upgrade(&cx) {
2648 this
2649 } else {
2650 return Ok(Default::default());
2651 };
2652 this.read_with(&cx, |this, cx| {
2653 let mut symbols = Vec::new();
2654 for (adapter, source_worktree_id, worktree_abs_path, response) in responses {
2655 symbols.extend(response.into_iter().flatten().filter_map(|lsp_symbol| {
2656 let abs_path = lsp_symbol.location.uri.to_file_path().ok()?;
2657 let mut worktree_id = source_worktree_id;
2658 let path;
2659 if let Some((worktree, rel_path)) =
2660 this.find_local_worktree(&abs_path, cx)
2661 {
2662 worktree_id = worktree.read(cx).id();
2663 path = rel_path;
2664 } else {
2665 path = relativize_path(&worktree_abs_path, &abs_path);
2666 }
2667
2668 let label = this
2669 .languages
2670 .select_language(&path)
2671 .and_then(|language| {
2672 language.label_for_symbol(&lsp_symbol.name, lsp_symbol.kind)
2673 })
2674 .unwrap_or_else(|| CodeLabel::plain(lsp_symbol.name.clone(), None));
2675 let signature = this.symbol_signature(worktree_id, &path);
2676
2677 Some(Symbol {
2678 source_worktree_id,
2679 worktree_id,
2680 language_server_name: adapter.name(),
2681 name: lsp_symbol.name,
2682 kind: lsp_symbol.kind,
2683 label,
2684 path,
2685 range: range_from_lsp(lsp_symbol.location.range),
2686 signature,
2687 })
2688 }));
2689 }
2690 Ok(symbols)
2691 })
2692 })
2693 } else if let Some(project_id) = self.remote_id() {
2694 let request = self.client.request(proto::GetProjectSymbols {
2695 project_id,
2696 query: query.to_string(),
2697 });
2698 cx.spawn_weak(|this, cx| async move {
2699 let response = request.await?;
2700 let mut symbols = Vec::new();
2701 if let Some(this) = this.upgrade(&cx) {
2702 this.read_with(&cx, |this, _| {
2703 symbols.extend(
2704 response
2705 .symbols
2706 .into_iter()
2707 .filter_map(|symbol| this.deserialize_symbol(symbol).log_err()),
2708 );
2709 })
2710 }
2711 Ok(symbols)
2712 })
2713 } else {
2714 Task::ready(Ok(Default::default()))
2715 }
2716 }
2717
2718 pub fn open_buffer_for_symbol(
2719 &mut self,
2720 symbol: &Symbol,
2721 cx: &mut ModelContext<Self>,
2722 ) -> Task<Result<ModelHandle<Buffer>>> {
2723 if self.is_local() {
2724 let (lsp_adapter, language_server) = if let Some(server) = self.language_servers.get(&(
2725 symbol.source_worktree_id,
2726 symbol.language_server_name.clone(),
2727 )) {
2728 server.clone()
2729 } else {
2730 return Task::ready(Err(anyhow!(
2731 "language server for worktree and language not found"
2732 )));
2733 };
2734
2735 let worktree_abs_path = if let Some(worktree_abs_path) = self
2736 .worktree_for_id(symbol.worktree_id, cx)
2737 .and_then(|worktree| worktree.read(cx).as_local())
2738 .map(|local_worktree| local_worktree.abs_path())
2739 {
2740 worktree_abs_path
2741 } else {
2742 return Task::ready(Err(anyhow!("worktree not found for symbol")));
2743 };
2744 let symbol_abs_path = worktree_abs_path.join(&symbol.path);
2745 let symbol_uri = if let Ok(uri) = lsp::Url::from_file_path(symbol_abs_path) {
2746 uri
2747 } else {
2748 return Task::ready(Err(anyhow!("invalid symbol path")));
2749 };
2750
2751 self.open_local_buffer_via_lsp(symbol_uri, lsp_adapter, language_server, cx)
2752 } else if let Some(project_id) = self.remote_id() {
2753 let request = self.client.request(proto::OpenBufferForSymbol {
2754 project_id,
2755 symbol: Some(serialize_symbol(symbol)),
2756 });
2757 cx.spawn(|this, mut cx| async move {
2758 let response = request.await?;
2759 let buffer = response.buffer.ok_or_else(|| anyhow!("invalid buffer"))?;
2760 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
2761 .await
2762 })
2763 } else {
2764 Task::ready(Err(anyhow!("project does not have a remote id")))
2765 }
2766 }
2767
2768 pub fn completions<T: ToPointUtf16>(
2769 &self,
2770 source_buffer_handle: &ModelHandle<Buffer>,
2771 position: T,
2772 cx: &mut ModelContext<Self>,
2773 ) -> Task<Result<Vec<Completion>>> {
2774 let source_buffer_handle = source_buffer_handle.clone();
2775 let source_buffer = source_buffer_handle.read(cx);
2776 let buffer_id = source_buffer.remote_id();
2777 let language = source_buffer.language().cloned();
2778 let worktree;
2779 let buffer_abs_path;
2780 if let Some(file) = File::from_dyn(source_buffer.file()) {
2781 worktree = file.worktree.clone();
2782 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
2783 } else {
2784 return Task::ready(Ok(Default::default()));
2785 };
2786
2787 let position = position.to_point_utf16(source_buffer);
2788 let anchor = source_buffer.anchor_after(position);
2789
2790 if worktree.read(cx).as_local().is_some() {
2791 let buffer_abs_path = buffer_abs_path.unwrap();
2792 let (_, lang_server) =
2793 if let Some(server) = self.language_server_for_buffer(source_buffer, cx) {
2794 server.clone()
2795 } else {
2796 return Task::ready(Ok(Default::default()));
2797 };
2798
2799 cx.spawn(|_, cx| async move {
2800 let completions = lang_server
2801 .request::<lsp::request::Completion>(lsp::CompletionParams {
2802 text_document_position: lsp::TextDocumentPositionParams::new(
2803 lsp::TextDocumentIdentifier::new(
2804 lsp::Url::from_file_path(buffer_abs_path).unwrap(),
2805 ),
2806 point_to_lsp(position),
2807 ),
2808 context: Default::default(),
2809 work_done_progress_params: Default::default(),
2810 partial_result_params: Default::default(),
2811 })
2812 .await
2813 .context("lsp completion request failed")?;
2814
2815 let completions = if let Some(completions) = completions {
2816 match completions {
2817 lsp::CompletionResponse::Array(completions) => completions,
2818 lsp::CompletionResponse::List(list) => list.items,
2819 }
2820 } else {
2821 Default::default()
2822 };
2823
2824 source_buffer_handle.read_with(&cx, |this, _| {
2825 let snapshot = this.snapshot();
2826 let clipped_position = this.clip_point_utf16(position, Bias::Left);
2827 let mut range_for_token = None;
2828 Ok(completions
2829 .into_iter()
2830 .filter_map(|lsp_completion| {
2831 let (old_range, new_text) = match lsp_completion.text_edit.as_ref() {
2832 // If the language server provides a range to overwrite, then
2833 // check that the range is valid.
2834 Some(lsp::CompletionTextEdit::Edit(edit)) => {
2835 let range = range_from_lsp(edit.range);
2836 let start = snapshot.clip_point_utf16(range.start, Bias::Left);
2837 let end = snapshot.clip_point_utf16(range.end, Bias::Left);
2838 if start != range.start || end != range.end {
2839 log::info!("completion out of expected range");
2840 return None;
2841 }
2842 (
2843 snapshot.anchor_before(start)..snapshot.anchor_after(end),
2844 edit.new_text.clone(),
2845 )
2846 }
2847 // If the language server does not provide a range, then infer
2848 // the range based on the syntax tree.
2849 None => {
2850 if position != clipped_position {
2851 log::info!("completion out of expected range");
2852 return None;
2853 }
2854 let Range { start, end } = range_for_token
2855 .get_or_insert_with(|| {
2856 let offset = position.to_offset(&snapshot);
2857 snapshot
2858 .range_for_word_token_at(offset)
2859 .unwrap_or_else(|| offset..offset)
2860 })
2861 .clone();
2862 let text = lsp_completion
2863 .insert_text
2864 .as_ref()
2865 .unwrap_or(&lsp_completion.label)
2866 .clone();
2867 (
2868 snapshot.anchor_before(start)..snapshot.anchor_after(end),
2869 text.clone(),
2870 )
2871 }
2872 Some(lsp::CompletionTextEdit::InsertAndReplace(_)) => {
2873 log::info!("unsupported insert/replace completion");
2874 return None;
2875 }
2876 };
2877
2878 Some(Completion {
2879 old_range,
2880 new_text,
2881 label: language
2882 .as_ref()
2883 .and_then(|l| l.label_for_completion(&lsp_completion))
2884 .unwrap_or_else(|| {
2885 CodeLabel::plain(
2886 lsp_completion.label.clone(),
2887 lsp_completion.filter_text.as_deref(),
2888 )
2889 }),
2890 lsp_completion,
2891 })
2892 })
2893 .collect())
2894 })
2895 })
2896 } else if let Some(project_id) = self.remote_id() {
2897 let rpc = self.client.clone();
2898 let message = proto::GetCompletions {
2899 project_id,
2900 buffer_id,
2901 position: Some(language::proto::serialize_anchor(&anchor)),
2902 version: serialize_version(&source_buffer.version()),
2903 };
2904 cx.spawn_weak(|_, mut cx| async move {
2905 let response = rpc.request(message).await?;
2906
2907 source_buffer_handle
2908 .update(&mut cx, |buffer, _| {
2909 buffer.wait_for_version(deserialize_version(response.version))
2910 })
2911 .await;
2912
2913 response
2914 .completions
2915 .into_iter()
2916 .map(|completion| {
2917 language::proto::deserialize_completion(completion, language.as_ref())
2918 })
2919 .collect()
2920 })
2921 } else {
2922 Task::ready(Ok(Default::default()))
2923 }
2924 }
2925
2926 pub fn apply_additional_edits_for_completion(
2927 &self,
2928 buffer_handle: ModelHandle<Buffer>,
2929 completion: Completion,
2930 push_to_history: bool,
2931 cx: &mut ModelContext<Self>,
2932 ) -> Task<Result<Option<Transaction>>> {
2933 let buffer = buffer_handle.read(cx);
2934 let buffer_id = buffer.remote_id();
2935
2936 if self.is_local() {
2937 let (_, lang_server) = if let Some(server) = self.language_server_for_buffer(buffer, cx)
2938 {
2939 server.clone()
2940 } else {
2941 return Task::ready(Ok(Default::default()));
2942 };
2943
2944 cx.spawn(|this, mut cx| async move {
2945 let resolved_completion = lang_server
2946 .request::<lsp::request::ResolveCompletionItem>(completion.lsp_completion)
2947 .await?;
2948 if let Some(edits) = resolved_completion.additional_text_edits {
2949 let edits = this
2950 .update(&mut cx, |this, cx| {
2951 this.edits_from_lsp(&buffer_handle, edits, None, cx)
2952 })
2953 .await?;
2954 buffer_handle.update(&mut cx, |buffer, cx| {
2955 buffer.finalize_last_transaction();
2956 buffer.start_transaction();
2957 for (range, text) in edits {
2958 buffer.edit([(range, text)], cx);
2959 }
2960 let transaction = if buffer.end_transaction(cx).is_some() {
2961 let transaction = buffer.finalize_last_transaction().unwrap().clone();
2962 if !push_to_history {
2963 buffer.forget_transaction(transaction.id);
2964 }
2965 Some(transaction)
2966 } else {
2967 None
2968 };
2969 Ok(transaction)
2970 })
2971 } else {
2972 Ok(None)
2973 }
2974 })
2975 } else if let Some(project_id) = self.remote_id() {
2976 let client = self.client.clone();
2977 cx.spawn(|_, mut cx| async move {
2978 let response = client
2979 .request(proto::ApplyCompletionAdditionalEdits {
2980 project_id,
2981 buffer_id,
2982 completion: Some(language::proto::serialize_completion(&completion)),
2983 })
2984 .await?;
2985
2986 if let Some(transaction) = response.transaction {
2987 let transaction = language::proto::deserialize_transaction(transaction)?;
2988 buffer_handle
2989 .update(&mut cx, |buffer, _| {
2990 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
2991 })
2992 .await;
2993 if push_to_history {
2994 buffer_handle.update(&mut cx, |buffer, _| {
2995 buffer.push_transaction(transaction.clone(), Instant::now());
2996 });
2997 }
2998 Ok(Some(transaction))
2999 } else {
3000 Ok(None)
3001 }
3002 })
3003 } else {
3004 Task::ready(Err(anyhow!("project does not have a remote id")))
3005 }
3006 }
3007
3008 pub fn code_actions<T: Clone + ToOffset>(
3009 &self,
3010 buffer_handle: &ModelHandle<Buffer>,
3011 range: Range<T>,
3012 cx: &mut ModelContext<Self>,
3013 ) -> Task<Result<Vec<CodeAction>>> {
3014 let buffer_handle = buffer_handle.clone();
3015 let buffer = buffer_handle.read(cx);
3016 let snapshot = buffer.snapshot();
3017 let relevant_diagnostics = snapshot
3018 .diagnostics_in_range::<usize, usize>(range.to_offset(&snapshot), false)
3019 .map(|entry| entry.to_lsp_diagnostic_stub())
3020 .collect();
3021 let buffer_id = buffer.remote_id();
3022 let worktree;
3023 let buffer_abs_path;
3024 if let Some(file) = File::from_dyn(buffer.file()) {
3025 worktree = file.worktree.clone();
3026 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
3027 } else {
3028 return Task::ready(Ok(Default::default()));
3029 };
3030 let range = buffer.anchor_before(range.start)..buffer.anchor_before(range.end);
3031
3032 if worktree.read(cx).as_local().is_some() {
3033 let buffer_abs_path = buffer_abs_path.unwrap();
3034 let (_, lang_server) = if let Some(server) = self.language_server_for_buffer(buffer, cx)
3035 {
3036 server.clone()
3037 } else {
3038 return Task::ready(Ok(Default::default()));
3039 };
3040
3041 let lsp_range = range_to_lsp(range.to_point_utf16(buffer));
3042 cx.foreground().spawn(async move {
3043 if !lang_server.capabilities().code_action_provider.is_some() {
3044 return Ok(Default::default());
3045 }
3046
3047 Ok(lang_server
3048 .request::<lsp::request::CodeActionRequest>(lsp::CodeActionParams {
3049 text_document: lsp::TextDocumentIdentifier::new(
3050 lsp::Url::from_file_path(buffer_abs_path).unwrap(),
3051 ),
3052 range: lsp_range,
3053 work_done_progress_params: Default::default(),
3054 partial_result_params: Default::default(),
3055 context: lsp::CodeActionContext {
3056 diagnostics: relevant_diagnostics,
3057 only: Some(vec![
3058 lsp::CodeActionKind::QUICKFIX,
3059 lsp::CodeActionKind::REFACTOR,
3060 lsp::CodeActionKind::REFACTOR_EXTRACT,
3061 lsp::CodeActionKind::SOURCE,
3062 ]),
3063 },
3064 })
3065 .await?
3066 .unwrap_or_default()
3067 .into_iter()
3068 .filter_map(|entry| {
3069 if let lsp::CodeActionOrCommand::CodeAction(lsp_action) = entry {
3070 Some(CodeAction {
3071 range: range.clone(),
3072 lsp_action,
3073 })
3074 } else {
3075 None
3076 }
3077 })
3078 .collect())
3079 })
3080 } else if let Some(project_id) = self.remote_id() {
3081 let rpc = self.client.clone();
3082 let version = buffer.version();
3083 cx.spawn_weak(|_, mut cx| async move {
3084 let response = rpc
3085 .request(proto::GetCodeActions {
3086 project_id,
3087 buffer_id,
3088 start: Some(language::proto::serialize_anchor(&range.start)),
3089 end: Some(language::proto::serialize_anchor(&range.end)),
3090 version: serialize_version(&version),
3091 })
3092 .await?;
3093
3094 buffer_handle
3095 .update(&mut cx, |buffer, _| {
3096 buffer.wait_for_version(deserialize_version(response.version))
3097 })
3098 .await;
3099
3100 response
3101 .actions
3102 .into_iter()
3103 .map(language::proto::deserialize_code_action)
3104 .collect()
3105 })
3106 } else {
3107 Task::ready(Ok(Default::default()))
3108 }
3109 }
3110
3111 pub fn apply_code_action(
3112 &self,
3113 buffer_handle: ModelHandle<Buffer>,
3114 mut action: CodeAction,
3115 push_to_history: bool,
3116 cx: &mut ModelContext<Self>,
3117 ) -> Task<Result<ProjectTransaction>> {
3118 if self.is_local() {
3119 let buffer = buffer_handle.read(cx);
3120 let (lsp_adapter, lang_server) =
3121 if let Some(server) = self.language_server_for_buffer(buffer, cx) {
3122 server.clone()
3123 } else {
3124 return Task::ready(Ok(Default::default()));
3125 };
3126 let range = action.range.to_point_utf16(buffer);
3127
3128 cx.spawn(|this, mut cx| async move {
3129 if let Some(lsp_range) = action
3130 .lsp_action
3131 .data
3132 .as_mut()
3133 .and_then(|d| d.get_mut("codeActionParams"))
3134 .and_then(|d| d.get_mut("range"))
3135 {
3136 *lsp_range = serde_json::to_value(&range_to_lsp(range)).unwrap();
3137 action.lsp_action = lang_server
3138 .request::<lsp::request::CodeActionResolveRequest>(action.lsp_action)
3139 .await?;
3140 } else {
3141 let actions = this
3142 .update(&mut cx, |this, cx| {
3143 this.code_actions(&buffer_handle, action.range, cx)
3144 })
3145 .await?;
3146 action.lsp_action = actions
3147 .into_iter()
3148 .find(|a| a.lsp_action.title == action.lsp_action.title)
3149 .ok_or_else(|| anyhow!("code action is outdated"))?
3150 .lsp_action;
3151 }
3152
3153 if let Some(edit) = action.lsp_action.edit {
3154 Self::deserialize_workspace_edit(
3155 this,
3156 edit,
3157 push_to_history,
3158 lsp_adapter,
3159 lang_server,
3160 &mut cx,
3161 )
3162 .await
3163 } else if let Some(command) = action.lsp_action.command {
3164 this.update(&mut cx, |this, _| {
3165 this.last_workspace_edits_by_language_server
3166 .remove(&lang_server.server_id());
3167 });
3168 lang_server
3169 .request::<lsp::request::ExecuteCommand>(lsp::ExecuteCommandParams {
3170 command: command.command,
3171 arguments: command.arguments.unwrap_or_default(),
3172 ..Default::default()
3173 })
3174 .await?;
3175 Ok(this.update(&mut cx, |this, _| {
3176 this.last_workspace_edits_by_language_server
3177 .remove(&lang_server.server_id())
3178 .unwrap_or_default()
3179 }))
3180 } else {
3181 Ok(ProjectTransaction::default())
3182 }
3183 })
3184 } else if let Some(project_id) = self.remote_id() {
3185 let client = self.client.clone();
3186 let request = proto::ApplyCodeAction {
3187 project_id,
3188 buffer_id: buffer_handle.read(cx).remote_id(),
3189 action: Some(language::proto::serialize_code_action(&action)),
3190 };
3191 cx.spawn(|this, mut cx| async move {
3192 let response = client
3193 .request(request)
3194 .await?
3195 .transaction
3196 .ok_or_else(|| anyhow!("missing transaction"))?;
3197 this.update(&mut cx, |this, cx| {
3198 this.deserialize_project_transaction(response, push_to_history, cx)
3199 })
3200 .await
3201 })
3202 } else {
3203 Task::ready(Err(anyhow!("project does not have a remote id")))
3204 }
3205 }
3206
3207 async fn deserialize_workspace_edit(
3208 this: ModelHandle<Self>,
3209 edit: lsp::WorkspaceEdit,
3210 push_to_history: bool,
3211 lsp_adapter: Arc<dyn LspAdapter>,
3212 language_server: Arc<LanguageServer>,
3213 cx: &mut AsyncAppContext,
3214 ) -> Result<ProjectTransaction> {
3215 let fs = this.read_with(cx, |this, _| this.fs.clone());
3216 let mut operations = Vec::new();
3217 if let Some(document_changes) = edit.document_changes {
3218 match document_changes {
3219 lsp::DocumentChanges::Edits(edits) => {
3220 operations.extend(edits.into_iter().map(lsp::DocumentChangeOperation::Edit))
3221 }
3222 lsp::DocumentChanges::Operations(ops) => operations = ops,
3223 }
3224 } else if let Some(changes) = edit.changes {
3225 operations.extend(changes.into_iter().map(|(uri, edits)| {
3226 lsp::DocumentChangeOperation::Edit(lsp::TextDocumentEdit {
3227 text_document: lsp::OptionalVersionedTextDocumentIdentifier {
3228 uri,
3229 version: None,
3230 },
3231 edits: edits.into_iter().map(lsp::OneOf::Left).collect(),
3232 })
3233 }));
3234 }
3235
3236 let mut project_transaction = ProjectTransaction::default();
3237 for operation in operations {
3238 match operation {
3239 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Create(op)) => {
3240 let abs_path = op
3241 .uri
3242 .to_file_path()
3243 .map_err(|_| anyhow!("can't convert URI to path"))?;
3244
3245 if let Some(parent_path) = abs_path.parent() {
3246 fs.create_dir(parent_path).await?;
3247 }
3248 if abs_path.ends_with("/") {
3249 fs.create_dir(&abs_path).await?;
3250 } else {
3251 fs.create_file(&abs_path, op.options.map(Into::into).unwrap_or_default())
3252 .await?;
3253 }
3254 }
3255 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Rename(op)) => {
3256 let source_abs_path = op
3257 .old_uri
3258 .to_file_path()
3259 .map_err(|_| anyhow!("can't convert URI to path"))?;
3260 let target_abs_path = op
3261 .new_uri
3262 .to_file_path()
3263 .map_err(|_| anyhow!("can't convert URI to path"))?;
3264 fs.rename(
3265 &source_abs_path,
3266 &target_abs_path,
3267 op.options.map(Into::into).unwrap_or_default(),
3268 )
3269 .await?;
3270 }
3271 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Delete(op)) => {
3272 let abs_path = op
3273 .uri
3274 .to_file_path()
3275 .map_err(|_| anyhow!("can't convert URI to path"))?;
3276 let options = op.options.map(Into::into).unwrap_or_default();
3277 if abs_path.ends_with("/") {
3278 fs.remove_dir(&abs_path, options).await?;
3279 } else {
3280 fs.remove_file(&abs_path, options).await?;
3281 }
3282 }
3283 lsp::DocumentChangeOperation::Edit(op) => {
3284 let buffer_to_edit = this
3285 .update(cx, |this, cx| {
3286 this.open_local_buffer_via_lsp(
3287 op.text_document.uri,
3288 lsp_adapter.clone(),
3289 language_server.clone(),
3290 cx,
3291 )
3292 })
3293 .await?;
3294
3295 let edits = this
3296 .update(cx, |this, cx| {
3297 let edits = op.edits.into_iter().map(|edit| match edit {
3298 lsp::OneOf::Left(edit) => edit,
3299 lsp::OneOf::Right(edit) => edit.text_edit,
3300 });
3301 this.edits_from_lsp(
3302 &buffer_to_edit,
3303 edits,
3304 op.text_document.version,
3305 cx,
3306 )
3307 })
3308 .await?;
3309
3310 let transaction = buffer_to_edit.update(cx, |buffer, cx| {
3311 buffer.finalize_last_transaction();
3312 buffer.start_transaction();
3313 for (range, text) in edits {
3314 buffer.edit([(range, text)], cx);
3315 }
3316 let transaction = if buffer.end_transaction(cx).is_some() {
3317 let transaction = buffer.finalize_last_transaction().unwrap().clone();
3318 if !push_to_history {
3319 buffer.forget_transaction(transaction.id);
3320 }
3321 Some(transaction)
3322 } else {
3323 None
3324 };
3325
3326 transaction
3327 });
3328 if let Some(transaction) = transaction {
3329 project_transaction.0.insert(buffer_to_edit, transaction);
3330 }
3331 }
3332 }
3333 }
3334
3335 Ok(project_transaction)
3336 }
3337
3338 pub fn prepare_rename<T: ToPointUtf16>(
3339 &self,
3340 buffer: ModelHandle<Buffer>,
3341 position: T,
3342 cx: &mut ModelContext<Self>,
3343 ) -> Task<Result<Option<Range<Anchor>>>> {
3344 let position = position.to_point_utf16(buffer.read(cx));
3345 self.request_lsp(buffer, PrepareRename { position }, cx)
3346 }
3347
3348 pub fn perform_rename<T: ToPointUtf16>(
3349 &self,
3350 buffer: ModelHandle<Buffer>,
3351 position: T,
3352 new_name: String,
3353 push_to_history: bool,
3354 cx: &mut ModelContext<Self>,
3355 ) -> Task<Result<ProjectTransaction>> {
3356 let position = position.to_point_utf16(buffer.read(cx));
3357 self.request_lsp(
3358 buffer,
3359 PerformRename {
3360 position,
3361 new_name,
3362 push_to_history,
3363 },
3364 cx,
3365 )
3366 }
3367
3368 pub fn search(
3369 &self,
3370 query: SearchQuery,
3371 cx: &mut ModelContext<Self>,
3372 ) -> Task<Result<HashMap<ModelHandle<Buffer>, Vec<Range<Anchor>>>>> {
3373 if self.is_local() {
3374 let snapshots = self
3375 .visible_worktrees(cx)
3376 .filter_map(|tree| {
3377 let tree = tree.read(cx).as_local()?;
3378 Some(tree.snapshot())
3379 })
3380 .collect::<Vec<_>>();
3381
3382 let background = cx.background().clone();
3383 let path_count: usize = snapshots.iter().map(|s| s.visible_file_count()).sum();
3384 if path_count == 0 {
3385 return Task::ready(Ok(Default::default()));
3386 }
3387 let workers = background.num_cpus().min(path_count);
3388 let (matching_paths_tx, mut matching_paths_rx) = smol::channel::bounded(1024);
3389 cx.background()
3390 .spawn({
3391 let fs = self.fs.clone();
3392 let background = cx.background().clone();
3393 let query = query.clone();
3394 async move {
3395 let fs = &fs;
3396 let query = &query;
3397 let matching_paths_tx = &matching_paths_tx;
3398 let paths_per_worker = (path_count + workers - 1) / workers;
3399 let snapshots = &snapshots;
3400 background
3401 .scoped(|scope| {
3402 for worker_ix in 0..workers {
3403 let worker_start_ix = worker_ix * paths_per_worker;
3404 let worker_end_ix = worker_start_ix + paths_per_worker;
3405 scope.spawn(async move {
3406 let mut snapshot_start_ix = 0;
3407 let mut abs_path = PathBuf::new();
3408 for snapshot in snapshots {
3409 let snapshot_end_ix =
3410 snapshot_start_ix + snapshot.visible_file_count();
3411 if worker_end_ix <= snapshot_start_ix {
3412 break;
3413 } else if worker_start_ix > snapshot_end_ix {
3414 snapshot_start_ix = snapshot_end_ix;
3415 continue;
3416 } else {
3417 let start_in_snapshot = worker_start_ix
3418 .saturating_sub(snapshot_start_ix);
3419 let end_in_snapshot =
3420 cmp::min(worker_end_ix, snapshot_end_ix)
3421 - snapshot_start_ix;
3422
3423 for entry in snapshot
3424 .files(false, start_in_snapshot)
3425 .take(end_in_snapshot - start_in_snapshot)
3426 {
3427 if matching_paths_tx.is_closed() {
3428 break;
3429 }
3430
3431 abs_path.clear();
3432 abs_path.push(&snapshot.abs_path());
3433 abs_path.push(&entry.path);
3434 let matches = if let Some(file) =
3435 fs.open_sync(&abs_path).await.log_err()
3436 {
3437 query.detect(file).unwrap_or(false)
3438 } else {
3439 false
3440 };
3441
3442 if matches {
3443 let project_path =
3444 (snapshot.id(), entry.path.clone());
3445 if matching_paths_tx
3446 .send(project_path)
3447 .await
3448 .is_err()
3449 {
3450 break;
3451 }
3452 }
3453 }
3454
3455 snapshot_start_ix = snapshot_end_ix;
3456 }
3457 }
3458 });
3459 }
3460 })
3461 .await;
3462 }
3463 })
3464 .detach();
3465
3466 let (buffers_tx, buffers_rx) = smol::channel::bounded(1024);
3467 let open_buffers = self
3468 .opened_buffers
3469 .values()
3470 .filter_map(|b| b.upgrade(cx))
3471 .collect::<HashSet<_>>();
3472 cx.spawn(|this, cx| async move {
3473 for buffer in &open_buffers {
3474 let snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
3475 buffers_tx.send((buffer.clone(), snapshot)).await?;
3476 }
3477
3478 let open_buffers = Rc::new(RefCell::new(open_buffers));
3479 while let Some(project_path) = matching_paths_rx.next().await {
3480 if buffers_tx.is_closed() {
3481 break;
3482 }
3483
3484 let this = this.clone();
3485 let open_buffers = open_buffers.clone();
3486 let buffers_tx = buffers_tx.clone();
3487 cx.spawn(|mut cx| async move {
3488 if let Some(buffer) = this
3489 .update(&mut cx, |this, cx| this.open_buffer(project_path, cx))
3490 .await
3491 .log_err()
3492 {
3493 if open_buffers.borrow_mut().insert(buffer.clone()) {
3494 let snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
3495 buffers_tx.send((buffer, snapshot)).await?;
3496 }
3497 }
3498
3499 Ok::<_, anyhow::Error>(())
3500 })
3501 .detach();
3502 }
3503
3504 Ok::<_, anyhow::Error>(())
3505 })
3506 .detach_and_log_err(cx);
3507
3508 let background = cx.background().clone();
3509 cx.background().spawn(async move {
3510 let query = &query;
3511 let mut matched_buffers = Vec::new();
3512 for _ in 0..workers {
3513 matched_buffers.push(HashMap::default());
3514 }
3515 background
3516 .scoped(|scope| {
3517 for worker_matched_buffers in matched_buffers.iter_mut() {
3518 let mut buffers_rx = buffers_rx.clone();
3519 scope.spawn(async move {
3520 while let Some((buffer, snapshot)) = buffers_rx.next().await {
3521 let buffer_matches = query
3522 .search(snapshot.as_rope())
3523 .await
3524 .iter()
3525 .map(|range| {
3526 snapshot.anchor_before(range.start)
3527 ..snapshot.anchor_after(range.end)
3528 })
3529 .collect::<Vec<_>>();
3530 if !buffer_matches.is_empty() {
3531 worker_matched_buffers
3532 .insert(buffer.clone(), buffer_matches);
3533 }
3534 }
3535 });
3536 }
3537 })
3538 .await;
3539 Ok(matched_buffers.into_iter().flatten().collect())
3540 })
3541 } else if let Some(project_id) = self.remote_id() {
3542 let request = self.client.request(query.to_proto(project_id));
3543 cx.spawn(|this, mut cx| async move {
3544 let response = request.await?;
3545 let mut result = HashMap::default();
3546 for location in response.locations {
3547 let buffer = location.buffer.ok_or_else(|| anyhow!("missing buffer"))?;
3548 let target_buffer = this
3549 .update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
3550 .await?;
3551 let start = location
3552 .start
3553 .and_then(deserialize_anchor)
3554 .ok_or_else(|| anyhow!("missing target start"))?;
3555 let end = location
3556 .end
3557 .and_then(deserialize_anchor)
3558 .ok_or_else(|| anyhow!("missing target end"))?;
3559 result
3560 .entry(target_buffer)
3561 .or_insert(Vec::new())
3562 .push(start..end)
3563 }
3564 Ok(result)
3565 })
3566 } else {
3567 Task::ready(Ok(Default::default()))
3568 }
3569 }
3570
3571 fn request_lsp<R: LspCommand>(
3572 &self,
3573 buffer_handle: ModelHandle<Buffer>,
3574 request: R,
3575 cx: &mut ModelContext<Self>,
3576 ) -> Task<Result<R::Response>>
3577 where
3578 <R::LspRequest as lsp::request::Request>::Result: Send,
3579 {
3580 let buffer = buffer_handle.read(cx);
3581 if self.is_local() {
3582 let file = File::from_dyn(buffer.file()).and_then(File::as_local);
3583 if let Some((file, (_, language_server))) =
3584 file.zip(self.language_server_for_buffer(buffer, cx).cloned())
3585 {
3586 let lsp_params = request.to_lsp(&file.abs_path(cx), cx);
3587 return cx.spawn(|this, cx| async move {
3588 if !request.check_capabilities(&language_server.capabilities()) {
3589 return Ok(Default::default());
3590 }
3591
3592 let response = language_server
3593 .request::<R::LspRequest>(lsp_params)
3594 .await
3595 .context("lsp request failed")?;
3596 request
3597 .response_from_lsp(response, this, buffer_handle, cx)
3598 .await
3599 });
3600 }
3601 } else if let Some(project_id) = self.remote_id() {
3602 let rpc = self.client.clone();
3603 let message = request.to_proto(project_id, buffer);
3604 return cx.spawn(|this, cx| async move {
3605 let response = rpc.request(message).await?;
3606 request
3607 .response_from_proto(response, this, buffer_handle, cx)
3608 .await
3609 });
3610 }
3611 Task::ready(Ok(Default::default()))
3612 }
3613
3614 pub fn find_or_create_local_worktree(
3615 &mut self,
3616 abs_path: impl AsRef<Path>,
3617 visible: bool,
3618 cx: &mut ModelContext<Self>,
3619 ) -> Task<Result<(ModelHandle<Worktree>, PathBuf)>> {
3620 let abs_path = abs_path.as_ref();
3621 if let Some((tree, relative_path)) = self.find_local_worktree(abs_path, cx) {
3622 Task::ready(Ok((tree.clone(), relative_path.into())))
3623 } else {
3624 let worktree = self.create_local_worktree(abs_path, visible, cx);
3625 cx.foreground()
3626 .spawn(async move { Ok((worktree.await?, PathBuf::new())) })
3627 }
3628 }
3629
3630 pub fn find_local_worktree(
3631 &self,
3632 abs_path: &Path,
3633 cx: &AppContext,
3634 ) -> Option<(ModelHandle<Worktree>, PathBuf)> {
3635 for tree in self.worktrees(cx) {
3636 if let Some(relative_path) = tree
3637 .read(cx)
3638 .as_local()
3639 .and_then(|t| abs_path.strip_prefix(t.abs_path()).ok())
3640 {
3641 return Some((tree.clone(), relative_path.into()));
3642 }
3643 }
3644 None
3645 }
3646
3647 pub fn is_shared(&self) -> bool {
3648 match &self.client_state {
3649 ProjectClientState::Local { is_shared, .. } => *is_shared,
3650 ProjectClientState::Remote { .. } => false,
3651 }
3652 }
3653
3654 fn create_local_worktree(
3655 &mut self,
3656 abs_path: impl AsRef<Path>,
3657 visible: bool,
3658 cx: &mut ModelContext<Self>,
3659 ) -> Task<Result<ModelHandle<Worktree>>> {
3660 let fs = self.fs.clone();
3661 let client = self.client.clone();
3662 let next_entry_id = self.next_entry_id.clone();
3663 let path: Arc<Path> = abs_path.as_ref().into();
3664 let task = self
3665 .loading_local_worktrees
3666 .entry(path.clone())
3667 .or_insert_with(|| {
3668 cx.spawn(|project, mut cx| {
3669 async move {
3670 let worktree = Worktree::local(
3671 client.clone(),
3672 path.clone(),
3673 visible,
3674 fs,
3675 next_entry_id,
3676 &mut cx,
3677 )
3678 .await;
3679 project.update(&mut cx, |project, _| {
3680 project.loading_local_worktrees.remove(&path);
3681 });
3682 let worktree = worktree?;
3683
3684 let remote_project_id = project.update(&mut cx, |project, cx| {
3685 project.add_worktree(&worktree, cx);
3686 project.remote_id()
3687 });
3688
3689 if let Some(project_id) = remote_project_id {
3690 // Because sharing is async, we may have *unshared* the project by the time it completes,
3691 // in which case we need to register the worktree instead.
3692 loop {
3693 if project.read_with(&cx, |project, _| project.is_shared()) {
3694 if worktree
3695 .update(&mut cx, |worktree, cx| {
3696 worktree.as_local_mut().unwrap().share(project_id, cx)
3697 })
3698 .await
3699 .is_ok()
3700 {
3701 break;
3702 }
3703 } else {
3704 worktree
3705 .update(&mut cx, |worktree, cx| {
3706 worktree
3707 .as_local_mut()
3708 .unwrap()
3709 .register(project_id, cx)
3710 })
3711 .await?;
3712 break;
3713 }
3714 }
3715 }
3716
3717 Ok(worktree)
3718 }
3719 .map_err(|err| Arc::new(err))
3720 })
3721 .shared()
3722 })
3723 .clone();
3724 cx.foreground().spawn(async move {
3725 match task.await {
3726 Ok(worktree) => Ok(worktree),
3727 Err(err) => Err(anyhow!("{}", err)),
3728 }
3729 })
3730 }
3731
3732 pub fn remove_worktree(&mut self, id_to_remove: WorktreeId, cx: &mut ModelContext<Self>) {
3733 self.worktrees.retain(|worktree| {
3734 if let Some(worktree) = worktree.upgrade(cx) {
3735 let id = worktree.read(cx).id();
3736 if id == id_to_remove {
3737 cx.emit(Event::WorktreeRemoved(id));
3738 false
3739 } else {
3740 true
3741 }
3742 } else {
3743 false
3744 }
3745 });
3746 cx.notify();
3747 }
3748
3749 fn add_worktree(&mut self, worktree: &ModelHandle<Worktree>, cx: &mut ModelContext<Self>) {
3750 cx.observe(&worktree, |_, _, cx| cx.notify()).detach();
3751 if worktree.read(cx).is_local() {
3752 cx.subscribe(&worktree, |this, worktree, _, cx| {
3753 this.update_local_worktree_buffers(worktree, cx);
3754 })
3755 .detach();
3756 }
3757
3758 let push_strong_handle = {
3759 let worktree = worktree.read(cx);
3760 self.is_shared() || worktree.is_visible() || worktree.is_remote()
3761 };
3762 if push_strong_handle {
3763 self.worktrees
3764 .push(WorktreeHandle::Strong(worktree.clone()));
3765 } else {
3766 cx.observe_release(&worktree, |this, _, cx| {
3767 this.worktrees
3768 .retain(|worktree| worktree.upgrade(cx).is_some());
3769 cx.notify();
3770 })
3771 .detach();
3772 self.worktrees
3773 .push(WorktreeHandle::Weak(worktree.downgrade()));
3774 }
3775 cx.emit(Event::WorktreeAdded);
3776 cx.notify();
3777 }
3778
3779 fn update_local_worktree_buffers(
3780 &mut self,
3781 worktree_handle: ModelHandle<Worktree>,
3782 cx: &mut ModelContext<Self>,
3783 ) {
3784 let snapshot = worktree_handle.read(cx).snapshot();
3785 let mut buffers_to_delete = Vec::new();
3786 let mut renamed_buffers = Vec::new();
3787 for (buffer_id, buffer) in &self.opened_buffers {
3788 if let Some(buffer) = buffer.upgrade(cx) {
3789 buffer.update(cx, |buffer, cx| {
3790 if let Some(old_file) = File::from_dyn(buffer.file()) {
3791 if old_file.worktree != worktree_handle {
3792 return;
3793 }
3794
3795 let new_file = if let Some(entry) = old_file
3796 .entry_id
3797 .and_then(|entry_id| snapshot.entry_for_id(entry_id))
3798 {
3799 File {
3800 is_local: true,
3801 entry_id: Some(entry.id),
3802 mtime: entry.mtime,
3803 path: entry.path.clone(),
3804 worktree: worktree_handle.clone(),
3805 }
3806 } else if let Some(entry) =
3807 snapshot.entry_for_path(old_file.path().as_ref())
3808 {
3809 File {
3810 is_local: true,
3811 entry_id: Some(entry.id),
3812 mtime: entry.mtime,
3813 path: entry.path.clone(),
3814 worktree: worktree_handle.clone(),
3815 }
3816 } else {
3817 File {
3818 is_local: true,
3819 entry_id: None,
3820 path: old_file.path().clone(),
3821 mtime: old_file.mtime(),
3822 worktree: worktree_handle.clone(),
3823 }
3824 };
3825
3826 let old_path = old_file.abs_path(cx);
3827 if new_file.abs_path(cx) != old_path {
3828 renamed_buffers.push((cx.handle(), old_path));
3829 }
3830
3831 if let Some(project_id) = self.shared_remote_id() {
3832 self.client
3833 .send(proto::UpdateBufferFile {
3834 project_id,
3835 buffer_id: *buffer_id as u64,
3836 file: Some(new_file.to_proto()),
3837 })
3838 .log_err();
3839 }
3840 buffer.file_updated(Box::new(new_file), cx).detach();
3841 }
3842 });
3843 } else {
3844 buffers_to_delete.push(*buffer_id);
3845 }
3846 }
3847
3848 for buffer_id in buffers_to_delete {
3849 self.opened_buffers.remove(&buffer_id);
3850 }
3851
3852 for (buffer, old_path) in renamed_buffers {
3853 self.unregister_buffer_from_language_server(&buffer, old_path, cx);
3854 self.assign_language_to_buffer(&buffer, cx);
3855 self.register_buffer_with_language_server(&buffer, cx);
3856 }
3857 }
3858
3859 pub fn set_active_path(&mut self, entry: Option<ProjectPath>, cx: &mut ModelContext<Self>) {
3860 let new_active_entry = entry.and_then(|project_path| {
3861 let worktree = self.worktree_for_id(project_path.worktree_id, cx)?;
3862 let entry = worktree.read(cx).entry_for_path(project_path.path)?;
3863 Some(entry.id)
3864 });
3865 if new_active_entry != self.active_entry {
3866 self.active_entry = new_active_entry;
3867 cx.emit(Event::ActiveEntryChanged(new_active_entry));
3868 }
3869 }
3870
3871 pub fn is_running_disk_based_diagnostics(&self) -> bool {
3872 self.language_server_statuses
3873 .values()
3874 .any(|status| status.pending_diagnostic_updates > 0)
3875 }
3876
3877 pub fn diagnostic_summary(&self, cx: &AppContext) -> DiagnosticSummary {
3878 let mut summary = DiagnosticSummary::default();
3879 for (_, path_summary) in self.diagnostic_summaries(cx) {
3880 summary.error_count += path_summary.error_count;
3881 summary.warning_count += path_summary.warning_count;
3882 }
3883 summary
3884 }
3885
3886 pub fn diagnostic_summaries<'a>(
3887 &'a self,
3888 cx: &'a AppContext,
3889 ) -> impl Iterator<Item = (ProjectPath, DiagnosticSummary)> + 'a {
3890 self.worktrees(cx).flat_map(move |worktree| {
3891 let worktree = worktree.read(cx);
3892 let worktree_id = worktree.id();
3893 worktree
3894 .diagnostic_summaries()
3895 .map(move |(path, summary)| (ProjectPath { worktree_id, path }, summary))
3896 })
3897 }
3898
3899 pub fn disk_based_diagnostics_started(&mut self, cx: &mut ModelContext<Self>) {
3900 if self
3901 .language_server_statuses
3902 .values()
3903 .map(|status| status.pending_diagnostic_updates)
3904 .sum::<isize>()
3905 == 1
3906 {
3907 cx.emit(Event::DiskBasedDiagnosticsStarted);
3908 }
3909 }
3910
3911 pub fn disk_based_diagnostics_finished(&mut self, cx: &mut ModelContext<Self>) {
3912 cx.emit(Event::DiskBasedDiagnosticsUpdated);
3913 if self
3914 .language_server_statuses
3915 .values()
3916 .map(|status| status.pending_diagnostic_updates)
3917 .sum::<isize>()
3918 == 0
3919 {
3920 cx.emit(Event::DiskBasedDiagnosticsFinished);
3921 }
3922 }
3923
3924 pub fn active_entry(&self) -> Option<ProjectEntryId> {
3925 self.active_entry
3926 }
3927
3928 pub fn entry_for_path(&self, path: &ProjectPath, cx: &AppContext) -> Option<ProjectEntryId> {
3929 self.worktree_for_id(path.worktree_id, cx)?
3930 .read(cx)
3931 .entry_for_path(&path.path)
3932 .map(|entry| entry.id)
3933 }
3934
3935 pub fn path_for_entry(&self, entry_id: ProjectEntryId, cx: &AppContext) -> Option<ProjectPath> {
3936 let worktree = self.worktree_for_entry(entry_id, cx)?;
3937 let worktree = worktree.read(cx);
3938 let worktree_id = worktree.id();
3939 let path = worktree.entry_for_id(entry_id)?.path.clone();
3940 Some(ProjectPath { worktree_id, path })
3941 }
3942
3943 // RPC message handlers
3944
3945 async fn handle_request_join_project(
3946 this: ModelHandle<Self>,
3947 message: TypedEnvelope<proto::RequestJoinProject>,
3948 _: Arc<Client>,
3949 mut cx: AsyncAppContext,
3950 ) -> Result<()> {
3951 let user_id = message.payload.requester_id;
3952 if this.read_with(&cx, |project, _| {
3953 project.collaborators.values().any(|c| c.user.id == user_id)
3954 }) {
3955 this.update(&mut cx, |this, cx| {
3956 this.respond_to_join_request(user_id, true, cx)
3957 });
3958 } else {
3959 let user_store = this.read_with(&cx, |this, _| this.user_store.clone());
3960 let user = user_store
3961 .update(&mut cx, |store, cx| store.fetch_user(user_id, cx))
3962 .await?;
3963 this.update(&mut cx, |_, cx| cx.emit(Event::ContactRequestedJoin(user)));
3964 }
3965 Ok(())
3966 }
3967
3968 async fn handle_unregister_project(
3969 this: ModelHandle<Self>,
3970 _: TypedEnvelope<proto::UnregisterProject>,
3971 _: Arc<Client>,
3972 mut cx: AsyncAppContext,
3973 ) -> Result<()> {
3974 this.update(&mut cx, |this, cx| this.removed_from_project(cx));
3975 Ok(())
3976 }
3977
3978 async fn handle_project_unshared(
3979 this: ModelHandle<Self>,
3980 _: TypedEnvelope<proto::ProjectUnshared>,
3981 _: Arc<Client>,
3982 mut cx: AsyncAppContext,
3983 ) -> Result<()> {
3984 this.update(&mut cx, |this, cx| this.unshared(cx));
3985 Ok(())
3986 }
3987
3988 async fn handle_add_collaborator(
3989 this: ModelHandle<Self>,
3990 mut envelope: TypedEnvelope<proto::AddProjectCollaborator>,
3991 _: Arc<Client>,
3992 mut cx: AsyncAppContext,
3993 ) -> Result<()> {
3994 let user_store = this.read_with(&cx, |this, _| this.user_store.clone());
3995 let collaborator = envelope
3996 .payload
3997 .collaborator
3998 .take()
3999 .ok_or_else(|| anyhow!("empty collaborator"))?;
4000
4001 let collaborator = Collaborator::from_proto(collaborator, &user_store, &mut cx).await?;
4002 this.update(&mut cx, |this, cx| {
4003 this.collaborators
4004 .insert(collaborator.peer_id, collaborator);
4005 cx.notify();
4006 });
4007
4008 Ok(())
4009 }
4010
4011 async fn handle_remove_collaborator(
4012 this: ModelHandle<Self>,
4013 envelope: TypedEnvelope<proto::RemoveProjectCollaborator>,
4014 _: Arc<Client>,
4015 mut cx: AsyncAppContext,
4016 ) -> Result<()> {
4017 this.update(&mut cx, |this, cx| {
4018 let peer_id = PeerId(envelope.payload.peer_id);
4019 let replica_id = this
4020 .collaborators
4021 .remove(&peer_id)
4022 .ok_or_else(|| anyhow!("unknown peer {:?}", peer_id))?
4023 .replica_id;
4024 for (_, buffer) in &this.opened_buffers {
4025 if let Some(buffer) = buffer.upgrade(cx) {
4026 buffer.update(cx, |buffer, cx| buffer.remove_peer(replica_id, cx));
4027 }
4028 }
4029
4030 cx.emit(Event::CollaboratorLeft(peer_id));
4031 cx.notify();
4032 Ok(())
4033 })
4034 }
4035
4036 async fn handle_join_project_request_cancelled(
4037 this: ModelHandle<Self>,
4038 envelope: TypedEnvelope<proto::JoinProjectRequestCancelled>,
4039 _: Arc<Client>,
4040 mut cx: AsyncAppContext,
4041 ) -> Result<()> {
4042 let user = this
4043 .update(&mut cx, |this, cx| {
4044 this.user_store.update(cx, |user_store, cx| {
4045 user_store.fetch_user(envelope.payload.requester_id, cx)
4046 })
4047 })
4048 .await?;
4049
4050 this.update(&mut cx, |_, cx| {
4051 cx.emit(Event::ContactCancelledJoinRequest(user));
4052 });
4053
4054 Ok(())
4055 }
4056
4057 async fn handle_register_worktree(
4058 this: ModelHandle<Self>,
4059 envelope: TypedEnvelope<proto::RegisterWorktree>,
4060 client: Arc<Client>,
4061 mut cx: AsyncAppContext,
4062 ) -> Result<()> {
4063 this.update(&mut cx, |this, cx| {
4064 let remote_id = this.remote_id().ok_or_else(|| anyhow!("invalid project"))?;
4065 let replica_id = this.replica_id();
4066 let worktree = proto::Worktree {
4067 id: envelope.payload.worktree_id,
4068 root_name: envelope.payload.root_name,
4069 entries: Default::default(),
4070 diagnostic_summaries: Default::default(),
4071 visible: envelope.payload.visible,
4072 scan_id: 0,
4073 };
4074 let (worktree, load_task) =
4075 Worktree::remote(remote_id, replica_id, worktree, client, cx);
4076 this.add_worktree(&worktree, cx);
4077 load_task.detach();
4078 Ok(())
4079 })
4080 }
4081
4082 async fn handle_unregister_worktree(
4083 this: ModelHandle<Self>,
4084 envelope: TypedEnvelope<proto::UnregisterWorktree>,
4085 _: Arc<Client>,
4086 mut cx: AsyncAppContext,
4087 ) -> Result<()> {
4088 this.update(&mut cx, |this, cx| {
4089 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
4090 this.remove_worktree(worktree_id, cx);
4091 Ok(())
4092 })
4093 }
4094
4095 async fn handle_update_worktree(
4096 this: ModelHandle<Self>,
4097 envelope: TypedEnvelope<proto::UpdateWorktree>,
4098 _: Arc<Client>,
4099 mut cx: AsyncAppContext,
4100 ) -> Result<()> {
4101 this.update(&mut cx, |this, cx| {
4102 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
4103 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
4104 worktree.update(cx, |worktree, _| {
4105 let worktree = worktree.as_remote_mut().unwrap();
4106 worktree.update_from_remote(envelope)
4107 })?;
4108 }
4109 Ok(())
4110 })
4111 }
4112
4113 async fn handle_create_project_entry(
4114 this: ModelHandle<Self>,
4115 envelope: TypedEnvelope<proto::CreateProjectEntry>,
4116 _: Arc<Client>,
4117 mut cx: AsyncAppContext,
4118 ) -> Result<proto::ProjectEntryResponse> {
4119 let worktree = this.update(&mut cx, |this, cx| {
4120 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
4121 this.worktree_for_id(worktree_id, cx)
4122 .ok_or_else(|| anyhow!("worktree not found"))
4123 })?;
4124 let worktree_scan_id = worktree.read_with(&cx, |worktree, _| worktree.scan_id());
4125 let entry = worktree
4126 .update(&mut cx, |worktree, cx| {
4127 let worktree = worktree.as_local_mut().unwrap();
4128 let path = PathBuf::from(OsString::from_vec(envelope.payload.path));
4129 worktree.create_entry(path, envelope.payload.is_directory, cx)
4130 })
4131 .await?;
4132 Ok(proto::ProjectEntryResponse {
4133 entry: Some((&entry).into()),
4134 worktree_scan_id: worktree_scan_id as u64,
4135 })
4136 }
4137
4138 async fn handle_rename_project_entry(
4139 this: ModelHandle<Self>,
4140 envelope: TypedEnvelope<proto::RenameProjectEntry>,
4141 _: Arc<Client>,
4142 mut cx: AsyncAppContext,
4143 ) -> Result<proto::ProjectEntryResponse> {
4144 let entry_id = ProjectEntryId::from_proto(envelope.payload.entry_id);
4145 let worktree = this.read_with(&cx, |this, cx| {
4146 this.worktree_for_entry(entry_id, cx)
4147 .ok_or_else(|| anyhow!("worktree not found"))
4148 })?;
4149 let worktree_scan_id = worktree.read_with(&cx, |worktree, _| worktree.scan_id());
4150 let entry = worktree
4151 .update(&mut cx, |worktree, cx| {
4152 let new_path = PathBuf::from(OsString::from_vec(envelope.payload.new_path));
4153 worktree
4154 .as_local_mut()
4155 .unwrap()
4156 .rename_entry(entry_id, new_path, cx)
4157 .ok_or_else(|| anyhow!("invalid entry"))
4158 })?
4159 .await?;
4160 Ok(proto::ProjectEntryResponse {
4161 entry: Some((&entry).into()),
4162 worktree_scan_id: worktree_scan_id as u64,
4163 })
4164 }
4165
4166 async fn handle_copy_project_entry(
4167 this: ModelHandle<Self>,
4168 envelope: TypedEnvelope<proto::CopyProjectEntry>,
4169 _: Arc<Client>,
4170 mut cx: AsyncAppContext,
4171 ) -> Result<proto::ProjectEntryResponse> {
4172 let entry_id = ProjectEntryId::from_proto(envelope.payload.entry_id);
4173 let worktree = this.read_with(&cx, |this, cx| {
4174 this.worktree_for_entry(entry_id, cx)
4175 .ok_or_else(|| anyhow!("worktree not found"))
4176 })?;
4177 let worktree_scan_id = worktree.read_with(&cx, |worktree, _| worktree.scan_id());
4178 let entry = worktree
4179 .update(&mut cx, |worktree, cx| {
4180 let new_path = PathBuf::from(OsString::from_vec(envelope.payload.new_path));
4181 worktree
4182 .as_local_mut()
4183 .unwrap()
4184 .copy_entry(entry_id, new_path, cx)
4185 .ok_or_else(|| anyhow!("invalid entry"))
4186 })?
4187 .await?;
4188 Ok(proto::ProjectEntryResponse {
4189 entry: Some((&entry).into()),
4190 worktree_scan_id: worktree_scan_id as u64,
4191 })
4192 }
4193
4194 async fn handle_delete_project_entry(
4195 this: ModelHandle<Self>,
4196 envelope: TypedEnvelope<proto::DeleteProjectEntry>,
4197 _: Arc<Client>,
4198 mut cx: AsyncAppContext,
4199 ) -> Result<proto::ProjectEntryResponse> {
4200 let entry_id = ProjectEntryId::from_proto(envelope.payload.entry_id);
4201 let worktree = this.read_with(&cx, |this, cx| {
4202 this.worktree_for_entry(entry_id, cx)
4203 .ok_or_else(|| anyhow!("worktree not found"))
4204 })?;
4205 let worktree_scan_id = worktree.read_with(&cx, |worktree, _| worktree.scan_id());
4206 worktree
4207 .update(&mut cx, |worktree, cx| {
4208 worktree
4209 .as_local_mut()
4210 .unwrap()
4211 .delete_entry(entry_id, cx)
4212 .ok_or_else(|| anyhow!("invalid entry"))
4213 })?
4214 .await?;
4215 Ok(proto::ProjectEntryResponse {
4216 entry: None,
4217 worktree_scan_id: worktree_scan_id as u64,
4218 })
4219 }
4220
4221 async fn handle_update_diagnostic_summary(
4222 this: ModelHandle<Self>,
4223 envelope: TypedEnvelope<proto::UpdateDiagnosticSummary>,
4224 _: Arc<Client>,
4225 mut cx: AsyncAppContext,
4226 ) -> Result<()> {
4227 this.update(&mut cx, |this, cx| {
4228 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
4229 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
4230 if let Some(summary) = envelope.payload.summary {
4231 let project_path = ProjectPath {
4232 worktree_id,
4233 path: Path::new(&summary.path).into(),
4234 };
4235 worktree.update(cx, |worktree, _| {
4236 worktree
4237 .as_remote_mut()
4238 .unwrap()
4239 .update_diagnostic_summary(project_path.path.clone(), &summary);
4240 });
4241 cx.emit(Event::DiagnosticsUpdated(project_path));
4242 }
4243 }
4244 Ok(())
4245 })
4246 }
4247
4248 async fn handle_start_language_server(
4249 this: ModelHandle<Self>,
4250 envelope: TypedEnvelope<proto::StartLanguageServer>,
4251 _: Arc<Client>,
4252 mut cx: AsyncAppContext,
4253 ) -> Result<()> {
4254 let server = envelope
4255 .payload
4256 .server
4257 .ok_or_else(|| anyhow!("invalid server"))?;
4258 this.update(&mut cx, |this, cx| {
4259 this.language_server_statuses.insert(
4260 server.id as usize,
4261 LanguageServerStatus {
4262 name: server.name,
4263 pending_work: Default::default(),
4264 pending_diagnostic_updates: 0,
4265 },
4266 );
4267 cx.notify();
4268 });
4269 Ok(())
4270 }
4271
4272 async fn handle_update_language_server(
4273 this: ModelHandle<Self>,
4274 envelope: TypedEnvelope<proto::UpdateLanguageServer>,
4275 _: Arc<Client>,
4276 mut cx: AsyncAppContext,
4277 ) -> Result<()> {
4278 let language_server_id = envelope.payload.language_server_id as usize;
4279 match envelope
4280 .payload
4281 .variant
4282 .ok_or_else(|| anyhow!("invalid variant"))?
4283 {
4284 proto::update_language_server::Variant::WorkStart(payload) => {
4285 this.update(&mut cx, |this, cx| {
4286 this.on_lsp_work_start(language_server_id, payload.token, cx);
4287 })
4288 }
4289 proto::update_language_server::Variant::WorkProgress(payload) => {
4290 this.update(&mut cx, |this, cx| {
4291 this.on_lsp_work_progress(
4292 language_server_id,
4293 payload.token,
4294 LanguageServerProgress {
4295 message: payload.message,
4296 percentage: payload.percentage.map(|p| p as usize),
4297 last_update_at: Instant::now(),
4298 },
4299 cx,
4300 );
4301 })
4302 }
4303 proto::update_language_server::Variant::WorkEnd(payload) => {
4304 this.update(&mut cx, |this, cx| {
4305 this.on_lsp_work_end(language_server_id, payload.token, cx);
4306 })
4307 }
4308 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(_) => {
4309 this.update(&mut cx, |this, cx| {
4310 this.disk_based_diagnostics_started(cx);
4311 })
4312 }
4313 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(_) => {
4314 this.update(&mut cx, |this, cx| this.disk_based_diagnostics_finished(cx));
4315 }
4316 }
4317
4318 Ok(())
4319 }
4320
4321 async fn handle_update_buffer(
4322 this: ModelHandle<Self>,
4323 envelope: TypedEnvelope<proto::UpdateBuffer>,
4324 _: Arc<Client>,
4325 mut cx: AsyncAppContext,
4326 ) -> Result<()> {
4327 this.update(&mut cx, |this, cx| {
4328 let payload = envelope.payload.clone();
4329 let buffer_id = payload.buffer_id;
4330 let ops = payload
4331 .operations
4332 .into_iter()
4333 .map(|op| language::proto::deserialize_operation(op))
4334 .collect::<Result<Vec<_>, _>>()?;
4335 let is_remote = this.is_remote();
4336 match this.opened_buffers.entry(buffer_id) {
4337 hash_map::Entry::Occupied(mut e) => match e.get_mut() {
4338 OpenBuffer::Strong(buffer) => {
4339 buffer.update(cx, |buffer, cx| buffer.apply_ops(ops, cx))?;
4340 }
4341 OpenBuffer::Loading(operations) => operations.extend_from_slice(&ops),
4342 OpenBuffer::Weak(_) => {}
4343 },
4344 hash_map::Entry::Vacant(e) => {
4345 assert!(
4346 is_remote,
4347 "received buffer update from {:?}",
4348 envelope.original_sender_id
4349 );
4350 e.insert(OpenBuffer::Loading(ops));
4351 }
4352 }
4353 Ok(())
4354 })
4355 }
4356
4357 async fn handle_update_buffer_file(
4358 this: ModelHandle<Self>,
4359 envelope: TypedEnvelope<proto::UpdateBufferFile>,
4360 _: Arc<Client>,
4361 mut cx: AsyncAppContext,
4362 ) -> Result<()> {
4363 this.update(&mut cx, |this, cx| {
4364 let payload = envelope.payload.clone();
4365 let buffer_id = payload.buffer_id;
4366 let file = payload.file.ok_or_else(|| anyhow!("invalid file"))?;
4367 let worktree = this
4368 .worktree_for_id(WorktreeId::from_proto(file.worktree_id), cx)
4369 .ok_or_else(|| anyhow!("no such worktree"))?;
4370 let file = File::from_proto(file, worktree.clone(), cx)?;
4371 let buffer = this
4372 .opened_buffers
4373 .get_mut(&buffer_id)
4374 .and_then(|b| b.upgrade(cx))
4375 .ok_or_else(|| anyhow!("no such buffer"))?;
4376 buffer.update(cx, |buffer, cx| {
4377 buffer.file_updated(Box::new(file), cx).detach();
4378 });
4379 Ok(())
4380 })
4381 }
4382
4383 async fn handle_save_buffer(
4384 this: ModelHandle<Self>,
4385 envelope: TypedEnvelope<proto::SaveBuffer>,
4386 _: Arc<Client>,
4387 mut cx: AsyncAppContext,
4388 ) -> Result<proto::BufferSaved> {
4389 let buffer_id = envelope.payload.buffer_id;
4390 let requested_version = deserialize_version(envelope.payload.version);
4391
4392 let (project_id, buffer) = this.update(&mut cx, |this, cx| {
4393 let project_id = this.remote_id().ok_or_else(|| anyhow!("not connected"))?;
4394 let buffer = this
4395 .opened_buffers
4396 .get(&buffer_id)
4397 .and_then(|buffer| buffer.upgrade(cx))
4398 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?;
4399 Ok::<_, anyhow::Error>((project_id, buffer))
4400 })?;
4401 buffer
4402 .update(&mut cx, |buffer, _| {
4403 buffer.wait_for_version(requested_version)
4404 })
4405 .await;
4406
4407 let (saved_version, mtime) = buffer.update(&mut cx, |buffer, cx| buffer.save(cx)).await?;
4408 Ok(proto::BufferSaved {
4409 project_id,
4410 buffer_id,
4411 version: serialize_version(&saved_version),
4412 mtime: Some(mtime.into()),
4413 })
4414 }
4415
4416 async fn handle_reload_buffers(
4417 this: ModelHandle<Self>,
4418 envelope: TypedEnvelope<proto::ReloadBuffers>,
4419 _: Arc<Client>,
4420 mut cx: AsyncAppContext,
4421 ) -> Result<proto::ReloadBuffersResponse> {
4422 let sender_id = envelope.original_sender_id()?;
4423 let reload = this.update(&mut cx, |this, cx| {
4424 let mut buffers = HashSet::default();
4425 for buffer_id in &envelope.payload.buffer_ids {
4426 buffers.insert(
4427 this.opened_buffers
4428 .get(buffer_id)
4429 .and_then(|buffer| buffer.upgrade(cx))
4430 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?,
4431 );
4432 }
4433 Ok::<_, anyhow::Error>(this.reload_buffers(buffers, false, cx))
4434 })?;
4435
4436 let project_transaction = reload.await?;
4437 let project_transaction = this.update(&mut cx, |this, cx| {
4438 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
4439 });
4440 Ok(proto::ReloadBuffersResponse {
4441 transaction: Some(project_transaction),
4442 })
4443 }
4444
4445 async fn handle_format_buffers(
4446 this: ModelHandle<Self>,
4447 envelope: TypedEnvelope<proto::FormatBuffers>,
4448 _: Arc<Client>,
4449 mut cx: AsyncAppContext,
4450 ) -> Result<proto::FormatBuffersResponse> {
4451 let sender_id = envelope.original_sender_id()?;
4452 let format = this.update(&mut cx, |this, cx| {
4453 let mut buffers = HashSet::default();
4454 for buffer_id in &envelope.payload.buffer_ids {
4455 buffers.insert(
4456 this.opened_buffers
4457 .get(buffer_id)
4458 .and_then(|buffer| buffer.upgrade(cx))
4459 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?,
4460 );
4461 }
4462 Ok::<_, anyhow::Error>(this.format(buffers, false, cx))
4463 })?;
4464
4465 let project_transaction = format.await?;
4466 let project_transaction = this.update(&mut cx, |this, cx| {
4467 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
4468 });
4469 Ok(proto::FormatBuffersResponse {
4470 transaction: Some(project_transaction),
4471 })
4472 }
4473
4474 async fn handle_get_completions(
4475 this: ModelHandle<Self>,
4476 envelope: TypedEnvelope<proto::GetCompletions>,
4477 _: Arc<Client>,
4478 mut cx: AsyncAppContext,
4479 ) -> Result<proto::GetCompletionsResponse> {
4480 let position = envelope
4481 .payload
4482 .position
4483 .and_then(language::proto::deserialize_anchor)
4484 .ok_or_else(|| anyhow!("invalid position"))?;
4485 let version = deserialize_version(envelope.payload.version);
4486 let buffer = this.read_with(&cx, |this, cx| {
4487 this.opened_buffers
4488 .get(&envelope.payload.buffer_id)
4489 .and_then(|buffer| buffer.upgrade(cx))
4490 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
4491 })?;
4492 buffer
4493 .update(&mut cx, |buffer, _| buffer.wait_for_version(version))
4494 .await;
4495 let version = buffer.read_with(&cx, |buffer, _| buffer.version());
4496 let completions = this
4497 .update(&mut cx, |this, cx| this.completions(&buffer, position, cx))
4498 .await?;
4499
4500 Ok(proto::GetCompletionsResponse {
4501 completions: completions
4502 .iter()
4503 .map(language::proto::serialize_completion)
4504 .collect(),
4505 version: serialize_version(&version),
4506 })
4507 }
4508
4509 async fn handle_apply_additional_edits_for_completion(
4510 this: ModelHandle<Self>,
4511 envelope: TypedEnvelope<proto::ApplyCompletionAdditionalEdits>,
4512 _: Arc<Client>,
4513 mut cx: AsyncAppContext,
4514 ) -> Result<proto::ApplyCompletionAdditionalEditsResponse> {
4515 let apply_additional_edits = this.update(&mut cx, |this, cx| {
4516 let buffer = this
4517 .opened_buffers
4518 .get(&envelope.payload.buffer_id)
4519 .and_then(|buffer| buffer.upgrade(cx))
4520 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
4521 let language = buffer.read(cx).language();
4522 let completion = language::proto::deserialize_completion(
4523 envelope
4524 .payload
4525 .completion
4526 .ok_or_else(|| anyhow!("invalid completion"))?,
4527 language,
4528 )?;
4529 Ok::<_, anyhow::Error>(
4530 this.apply_additional_edits_for_completion(buffer, completion, false, cx),
4531 )
4532 })?;
4533
4534 Ok(proto::ApplyCompletionAdditionalEditsResponse {
4535 transaction: apply_additional_edits
4536 .await?
4537 .as_ref()
4538 .map(language::proto::serialize_transaction),
4539 })
4540 }
4541
4542 async fn handle_get_code_actions(
4543 this: ModelHandle<Self>,
4544 envelope: TypedEnvelope<proto::GetCodeActions>,
4545 _: Arc<Client>,
4546 mut cx: AsyncAppContext,
4547 ) -> Result<proto::GetCodeActionsResponse> {
4548 let start = envelope
4549 .payload
4550 .start
4551 .and_then(language::proto::deserialize_anchor)
4552 .ok_or_else(|| anyhow!("invalid start"))?;
4553 let end = envelope
4554 .payload
4555 .end
4556 .and_then(language::proto::deserialize_anchor)
4557 .ok_or_else(|| anyhow!("invalid end"))?;
4558 let buffer = this.update(&mut cx, |this, cx| {
4559 this.opened_buffers
4560 .get(&envelope.payload.buffer_id)
4561 .and_then(|buffer| buffer.upgrade(cx))
4562 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
4563 })?;
4564 buffer
4565 .update(&mut cx, |buffer, _| {
4566 buffer.wait_for_version(deserialize_version(envelope.payload.version))
4567 })
4568 .await;
4569
4570 let version = buffer.read_with(&cx, |buffer, _| buffer.version());
4571 let code_actions = this.update(&mut cx, |this, cx| {
4572 Ok::<_, anyhow::Error>(this.code_actions(&buffer, start..end, cx))
4573 })?;
4574
4575 Ok(proto::GetCodeActionsResponse {
4576 actions: code_actions
4577 .await?
4578 .iter()
4579 .map(language::proto::serialize_code_action)
4580 .collect(),
4581 version: serialize_version(&version),
4582 })
4583 }
4584
4585 async fn handle_apply_code_action(
4586 this: ModelHandle<Self>,
4587 envelope: TypedEnvelope<proto::ApplyCodeAction>,
4588 _: Arc<Client>,
4589 mut cx: AsyncAppContext,
4590 ) -> Result<proto::ApplyCodeActionResponse> {
4591 let sender_id = envelope.original_sender_id()?;
4592 let action = language::proto::deserialize_code_action(
4593 envelope
4594 .payload
4595 .action
4596 .ok_or_else(|| anyhow!("invalid action"))?,
4597 )?;
4598 let apply_code_action = this.update(&mut cx, |this, cx| {
4599 let buffer = this
4600 .opened_buffers
4601 .get(&envelope.payload.buffer_id)
4602 .and_then(|buffer| buffer.upgrade(cx))
4603 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
4604 Ok::<_, anyhow::Error>(this.apply_code_action(buffer, action, false, cx))
4605 })?;
4606
4607 let project_transaction = apply_code_action.await?;
4608 let project_transaction = this.update(&mut cx, |this, cx| {
4609 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
4610 });
4611 Ok(proto::ApplyCodeActionResponse {
4612 transaction: Some(project_transaction),
4613 })
4614 }
4615
4616 async fn handle_lsp_command<T: LspCommand>(
4617 this: ModelHandle<Self>,
4618 envelope: TypedEnvelope<T::ProtoRequest>,
4619 _: Arc<Client>,
4620 mut cx: AsyncAppContext,
4621 ) -> Result<<T::ProtoRequest as proto::RequestMessage>::Response>
4622 where
4623 <T::LspRequest as lsp::request::Request>::Result: Send,
4624 {
4625 let sender_id = envelope.original_sender_id()?;
4626 let buffer_id = T::buffer_id_from_proto(&envelope.payload);
4627 let buffer_handle = this.read_with(&cx, |this, _| {
4628 this.opened_buffers
4629 .get(&buffer_id)
4630 .and_then(|buffer| buffer.upgrade(&cx))
4631 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))
4632 })?;
4633 let request = T::from_proto(
4634 envelope.payload,
4635 this.clone(),
4636 buffer_handle.clone(),
4637 cx.clone(),
4638 )
4639 .await?;
4640 let buffer_version = buffer_handle.read_with(&cx, |buffer, _| buffer.version());
4641 let response = this
4642 .update(&mut cx, |this, cx| {
4643 this.request_lsp(buffer_handle, request, cx)
4644 })
4645 .await?;
4646 this.update(&mut cx, |this, cx| {
4647 Ok(T::response_to_proto(
4648 response,
4649 this,
4650 sender_id,
4651 &buffer_version,
4652 cx,
4653 ))
4654 })
4655 }
4656
4657 async fn handle_get_project_symbols(
4658 this: ModelHandle<Self>,
4659 envelope: TypedEnvelope<proto::GetProjectSymbols>,
4660 _: Arc<Client>,
4661 mut cx: AsyncAppContext,
4662 ) -> Result<proto::GetProjectSymbolsResponse> {
4663 let symbols = this
4664 .update(&mut cx, |this, cx| {
4665 this.symbols(&envelope.payload.query, cx)
4666 })
4667 .await?;
4668
4669 Ok(proto::GetProjectSymbolsResponse {
4670 symbols: symbols.iter().map(serialize_symbol).collect(),
4671 })
4672 }
4673
4674 async fn handle_search_project(
4675 this: ModelHandle<Self>,
4676 envelope: TypedEnvelope<proto::SearchProject>,
4677 _: Arc<Client>,
4678 mut cx: AsyncAppContext,
4679 ) -> Result<proto::SearchProjectResponse> {
4680 let peer_id = envelope.original_sender_id()?;
4681 let query = SearchQuery::from_proto(envelope.payload)?;
4682 let result = this
4683 .update(&mut cx, |this, cx| this.search(query, cx))
4684 .await?;
4685
4686 this.update(&mut cx, |this, cx| {
4687 let mut locations = Vec::new();
4688 for (buffer, ranges) in result {
4689 for range in ranges {
4690 let start = serialize_anchor(&range.start);
4691 let end = serialize_anchor(&range.end);
4692 let buffer = this.serialize_buffer_for_peer(&buffer, peer_id, cx);
4693 locations.push(proto::Location {
4694 buffer: Some(buffer),
4695 start: Some(start),
4696 end: Some(end),
4697 });
4698 }
4699 }
4700 Ok(proto::SearchProjectResponse { locations })
4701 })
4702 }
4703
4704 async fn handle_open_buffer_for_symbol(
4705 this: ModelHandle<Self>,
4706 envelope: TypedEnvelope<proto::OpenBufferForSymbol>,
4707 _: Arc<Client>,
4708 mut cx: AsyncAppContext,
4709 ) -> Result<proto::OpenBufferForSymbolResponse> {
4710 let peer_id = envelope.original_sender_id()?;
4711 let symbol = envelope
4712 .payload
4713 .symbol
4714 .ok_or_else(|| anyhow!("invalid symbol"))?;
4715 let symbol = this.read_with(&cx, |this, _| {
4716 let symbol = this.deserialize_symbol(symbol)?;
4717 let signature = this.symbol_signature(symbol.worktree_id, &symbol.path);
4718 if signature == symbol.signature {
4719 Ok(symbol)
4720 } else {
4721 Err(anyhow!("invalid symbol signature"))
4722 }
4723 })?;
4724 let buffer = this
4725 .update(&mut cx, |this, cx| this.open_buffer_for_symbol(&symbol, cx))
4726 .await?;
4727
4728 Ok(proto::OpenBufferForSymbolResponse {
4729 buffer: Some(this.update(&mut cx, |this, cx| {
4730 this.serialize_buffer_for_peer(&buffer, peer_id, cx)
4731 })),
4732 })
4733 }
4734
4735 fn symbol_signature(&self, worktree_id: WorktreeId, path: &Path) -> [u8; 32] {
4736 let mut hasher = Sha256::new();
4737 hasher.update(worktree_id.to_proto().to_be_bytes());
4738 hasher.update(path.to_string_lossy().as_bytes());
4739 hasher.update(self.nonce.to_be_bytes());
4740 hasher.finalize().as_slice().try_into().unwrap()
4741 }
4742
4743 async fn handle_open_buffer_by_id(
4744 this: ModelHandle<Self>,
4745 envelope: TypedEnvelope<proto::OpenBufferById>,
4746 _: Arc<Client>,
4747 mut cx: AsyncAppContext,
4748 ) -> Result<proto::OpenBufferResponse> {
4749 let peer_id = envelope.original_sender_id()?;
4750 let buffer = this
4751 .update(&mut cx, |this, cx| {
4752 this.open_buffer_by_id(envelope.payload.id, cx)
4753 })
4754 .await?;
4755 this.update(&mut cx, |this, cx| {
4756 Ok(proto::OpenBufferResponse {
4757 buffer: Some(this.serialize_buffer_for_peer(&buffer, peer_id, cx)),
4758 })
4759 })
4760 }
4761
4762 async fn handle_open_buffer_by_path(
4763 this: ModelHandle<Self>,
4764 envelope: TypedEnvelope<proto::OpenBufferByPath>,
4765 _: Arc<Client>,
4766 mut cx: AsyncAppContext,
4767 ) -> Result<proto::OpenBufferResponse> {
4768 let peer_id = envelope.original_sender_id()?;
4769 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
4770 let open_buffer = this.update(&mut cx, |this, cx| {
4771 this.open_buffer(
4772 ProjectPath {
4773 worktree_id,
4774 path: PathBuf::from(envelope.payload.path).into(),
4775 },
4776 cx,
4777 )
4778 });
4779
4780 let buffer = open_buffer.await?;
4781 this.update(&mut cx, |this, cx| {
4782 Ok(proto::OpenBufferResponse {
4783 buffer: Some(this.serialize_buffer_for_peer(&buffer, peer_id, cx)),
4784 })
4785 })
4786 }
4787
4788 fn serialize_project_transaction_for_peer(
4789 &mut self,
4790 project_transaction: ProjectTransaction,
4791 peer_id: PeerId,
4792 cx: &AppContext,
4793 ) -> proto::ProjectTransaction {
4794 let mut serialized_transaction = proto::ProjectTransaction {
4795 buffers: Default::default(),
4796 transactions: Default::default(),
4797 };
4798 for (buffer, transaction) in project_transaction.0 {
4799 serialized_transaction
4800 .buffers
4801 .push(self.serialize_buffer_for_peer(&buffer, peer_id, cx));
4802 serialized_transaction
4803 .transactions
4804 .push(language::proto::serialize_transaction(&transaction));
4805 }
4806 serialized_transaction
4807 }
4808
4809 fn deserialize_project_transaction(
4810 &mut self,
4811 message: proto::ProjectTransaction,
4812 push_to_history: bool,
4813 cx: &mut ModelContext<Self>,
4814 ) -> Task<Result<ProjectTransaction>> {
4815 cx.spawn(|this, mut cx| async move {
4816 let mut project_transaction = ProjectTransaction::default();
4817 for (buffer, transaction) in message.buffers.into_iter().zip(message.transactions) {
4818 let buffer = this
4819 .update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
4820 .await?;
4821 let transaction = language::proto::deserialize_transaction(transaction)?;
4822 project_transaction.0.insert(buffer, transaction);
4823 }
4824
4825 for (buffer, transaction) in &project_transaction.0 {
4826 buffer
4827 .update(&mut cx, |buffer, _| {
4828 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
4829 })
4830 .await;
4831
4832 if push_to_history {
4833 buffer.update(&mut cx, |buffer, _| {
4834 buffer.push_transaction(transaction.clone(), Instant::now());
4835 });
4836 }
4837 }
4838
4839 Ok(project_transaction)
4840 })
4841 }
4842
4843 fn serialize_buffer_for_peer(
4844 &mut self,
4845 buffer: &ModelHandle<Buffer>,
4846 peer_id: PeerId,
4847 cx: &AppContext,
4848 ) -> proto::Buffer {
4849 let buffer_id = buffer.read(cx).remote_id();
4850 let shared_buffers = self.shared_buffers.entry(peer_id).or_default();
4851 if shared_buffers.insert(buffer_id) {
4852 proto::Buffer {
4853 variant: Some(proto::buffer::Variant::State(buffer.read(cx).to_proto())),
4854 }
4855 } else {
4856 proto::Buffer {
4857 variant: Some(proto::buffer::Variant::Id(buffer_id)),
4858 }
4859 }
4860 }
4861
4862 fn deserialize_buffer(
4863 &mut self,
4864 buffer: proto::Buffer,
4865 cx: &mut ModelContext<Self>,
4866 ) -> Task<Result<ModelHandle<Buffer>>> {
4867 let replica_id = self.replica_id();
4868
4869 let opened_buffer_tx = self.opened_buffer.0.clone();
4870 let mut opened_buffer_rx = self.opened_buffer.1.clone();
4871 cx.spawn(|this, mut cx| async move {
4872 match buffer.variant.ok_or_else(|| anyhow!("missing buffer"))? {
4873 proto::buffer::Variant::Id(id) => {
4874 let buffer = loop {
4875 let buffer = this.read_with(&cx, |this, cx| {
4876 this.opened_buffers
4877 .get(&id)
4878 .and_then(|buffer| buffer.upgrade(cx))
4879 });
4880 if let Some(buffer) = buffer {
4881 break buffer;
4882 }
4883 opened_buffer_rx
4884 .next()
4885 .await
4886 .ok_or_else(|| anyhow!("project dropped while waiting for buffer"))?;
4887 };
4888 Ok(buffer)
4889 }
4890 proto::buffer::Variant::State(mut buffer) => {
4891 let mut buffer_worktree = None;
4892 let mut buffer_file = None;
4893 if let Some(file) = buffer.file.take() {
4894 this.read_with(&cx, |this, cx| {
4895 let worktree_id = WorktreeId::from_proto(file.worktree_id);
4896 let worktree =
4897 this.worktree_for_id(worktree_id, cx).ok_or_else(|| {
4898 anyhow!("no worktree found for id {}", file.worktree_id)
4899 })?;
4900 buffer_file =
4901 Some(Box::new(File::from_proto(file, worktree.clone(), cx)?)
4902 as Box<dyn language::File>);
4903 buffer_worktree = Some(worktree);
4904 Ok::<_, anyhow::Error>(())
4905 })?;
4906 }
4907
4908 let buffer = cx.add_model(|cx| {
4909 Buffer::from_proto(replica_id, buffer, buffer_file, cx).unwrap()
4910 });
4911
4912 this.update(&mut cx, |this, cx| this.register_buffer(&buffer, cx))?;
4913
4914 *opened_buffer_tx.borrow_mut().borrow_mut() = ();
4915 Ok(buffer)
4916 }
4917 }
4918 })
4919 }
4920
4921 fn deserialize_symbol(&self, serialized_symbol: proto::Symbol) -> Result<Symbol> {
4922 let source_worktree_id = WorktreeId::from_proto(serialized_symbol.source_worktree_id);
4923 let worktree_id = WorktreeId::from_proto(serialized_symbol.worktree_id);
4924 let start = serialized_symbol
4925 .start
4926 .ok_or_else(|| anyhow!("invalid start"))?;
4927 let end = serialized_symbol
4928 .end
4929 .ok_or_else(|| anyhow!("invalid end"))?;
4930 let kind = unsafe { mem::transmute(serialized_symbol.kind) };
4931 let path = PathBuf::from(serialized_symbol.path);
4932 let language = self.languages.select_language(&path);
4933 Ok(Symbol {
4934 source_worktree_id,
4935 worktree_id,
4936 language_server_name: LanguageServerName(serialized_symbol.language_server_name.into()),
4937 label: language
4938 .and_then(|language| language.label_for_symbol(&serialized_symbol.name, kind))
4939 .unwrap_or_else(|| CodeLabel::plain(serialized_symbol.name.clone(), None)),
4940 name: serialized_symbol.name,
4941 path,
4942 range: PointUtf16::new(start.row, start.column)..PointUtf16::new(end.row, end.column),
4943 kind,
4944 signature: serialized_symbol
4945 .signature
4946 .try_into()
4947 .map_err(|_| anyhow!("invalid signature"))?,
4948 })
4949 }
4950
4951 async fn handle_buffer_saved(
4952 this: ModelHandle<Self>,
4953 envelope: TypedEnvelope<proto::BufferSaved>,
4954 _: Arc<Client>,
4955 mut cx: AsyncAppContext,
4956 ) -> Result<()> {
4957 let version = deserialize_version(envelope.payload.version);
4958 let mtime = envelope
4959 .payload
4960 .mtime
4961 .ok_or_else(|| anyhow!("missing mtime"))?
4962 .into();
4963
4964 this.update(&mut cx, |this, cx| {
4965 let buffer = this
4966 .opened_buffers
4967 .get(&envelope.payload.buffer_id)
4968 .and_then(|buffer| buffer.upgrade(cx));
4969 if let Some(buffer) = buffer {
4970 buffer.update(cx, |buffer, cx| {
4971 buffer.did_save(version, mtime, None, cx);
4972 });
4973 }
4974 Ok(())
4975 })
4976 }
4977
4978 async fn handle_buffer_reloaded(
4979 this: ModelHandle<Self>,
4980 envelope: TypedEnvelope<proto::BufferReloaded>,
4981 _: Arc<Client>,
4982 mut cx: AsyncAppContext,
4983 ) -> Result<()> {
4984 let payload = envelope.payload.clone();
4985 let version = deserialize_version(payload.version);
4986 let mtime = payload
4987 .mtime
4988 .ok_or_else(|| anyhow!("missing mtime"))?
4989 .into();
4990 this.update(&mut cx, |this, cx| {
4991 let buffer = this
4992 .opened_buffers
4993 .get(&payload.buffer_id)
4994 .and_then(|buffer| buffer.upgrade(cx));
4995 if let Some(buffer) = buffer {
4996 buffer.update(cx, |buffer, cx| {
4997 buffer.did_reload(version, mtime, cx);
4998 });
4999 }
5000 Ok(())
5001 })
5002 }
5003
5004 pub fn match_paths<'a>(
5005 &self,
5006 query: &'a str,
5007 include_ignored: bool,
5008 smart_case: bool,
5009 max_results: usize,
5010 cancel_flag: &'a AtomicBool,
5011 cx: &AppContext,
5012 ) -> impl 'a + Future<Output = Vec<PathMatch>> {
5013 let worktrees = self
5014 .worktrees(cx)
5015 .filter(|worktree| worktree.read(cx).is_visible())
5016 .collect::<Vec<_>>();
5017 let include_root_name = worktrees.len() > 1;
5018 let candidate_sets = worktrees
5019 .into_iter()
5020 .map(|worktree| CandidateSet {
5021 snapshot: worktree.read(cx).snapshot(),
5022 include_ignored,
5023 include_root_name,
5024 })
5025 .collect::<Vec<_>>();
5026
5027 let background = cx.background().clone();
5028 async move {
5029 fuzzy::match_paths(
5030 candidate_sets.as_slice(),
5031 query,
5032 smart_case,
5033 max_results,
5034 cancel_flag,
5035 background,
5036 )
5037 .await
5038 }
5039 }
5040
5041 fn edits_from_lsp(
5042 &mut self,
5043 buffer: &ModelHandle<Buffer>,
5044 lsp_edits: impl 'static + Send + IntoIterator<Item = lsp::TextEdit>,
5045 version: Option<i32>,
5046 cx: &mut ModelContext<Self>,
5047 ) -> Task<Result<Vec<(Range<Anchor>, String)>>> {
5048 let snapshot = self.buffer_snapshot_for_lsp_version(buffer, version, cx);
5049 cx.background().spawn(async move {
5050 let snapshot = snapshot?;
5051 let mut lsp_edits = lsp_edits
5052 .into_iter()
5053 .map(|edit| (range_from_lsp(edit.range), edit.new_text))
5054 .peekable();
5055
5056 let mut edits = Vec::new();
5057 while let Some((mut range, mut new_text)) = lsp_edits.next() {
5058 // Combine any LSP edits that are adjacent.
5059 //
5060 // Also, combine LSP edits that are separated from each other by only
5061 // a newline. This is important because for some code actions,
5062 // Rust-analyzer rewrites the entire buffer via a series of edits that
5063 // are separated by unchanged newline characters.
5064 //
5065 // In order for the diffing logic below to work properly, any edits that
5066 // cancel each other out must be combined into one.
5067 while let Some((next_range, next_text)) = lsp_edits.peek() {
5068 if next_range.start > range.end {
5069 if next_range.start.row > range.end.row + 1
5070 || next_range.start.column > 0
5071 || snapshot.clip_point_utf16(
5072 PointUtf16::new(range.end.row, u32::MAX),
5073 Bias::Left,
5074 ) > range.end
5075 {
5076 break;
5077 }
5078 new_text.push('\n');
5079 }
5080 range.end = next_range.end;
5081 new_text.push_str(&next_text);
5082 lsp_edits.next();
5083 }
5084
5085 if snapshot.clip_point_utf16(range.start, Bias::Left) != range.start
5086 || snapshot.clip_point_utf16(range.end, Bias::Left) != range.end
5087 {
5088 return Err(anyhow!("invalid edits received from language server"));
5089 }
5090
5091 // For multiline edits, perform a diff of the old and new text so that
5092 // we can identify the changes more precisely, preserving the locations
5093 // of any anchors positioned in the unchanged regions.
5094 if range.end.row > range.start.row {
5095 let mut offset = range.start.to_offset(&snapshot);
5096 let old_text = snapshot.text_for_range(range).collect::<String>();
5097
5098 let diff = TextDiff::from_lines(old_text.as_str(), &new_text);
5099 let mut moved_since_edit = true;
5100 for change in diff.iter_all_changes() {
5101 let tag = change.tag();
5102 let value = change.value();
5103 match tag {
5104 ChangeTag::Equal => {
5105 offset += value.len();
5106 moved_since_edit = true;
5107 }
5108 ChangeTag::Delete => {
5109 let start = snapshot.anchor_after(offset);
5110 let end = snapshot.anchor_before(offset + value.len());
5111 if moved_since_edit {
5112 edits.push((start..end, String::new()));
5113 } else {
5114 edits.last_mut().unwrap().0.end = end;
5115 }
5116 offset += value.len();
5117 moved_since_edit = false;
5118 }
5119 ChangeTag::Insert => {
5120 if moved_since_edit {
5121 let anchor = snapshot.anchor_after(offset);
5122 edits.push((anchor.clone()..anchor, value.to_string()));
5123 } else {
5124 edits.last_mut().unwrap().1.push_str(value);
5125 }
5126 moved_since_edit = false;
5127 }
5128 }
5129 }
5130 } else if range.end == range.start {
5131 let anchor = snapshot.anchor_after(range.start);
5132 edits.push((anchor.clone()..anchor, new_text));
5133 } else {
5134 let edit_start = snapshot.anchor_after(range.start);
5135 let edit_end = snapshot.anchor_before(range.end);
5136 edits.push((edit_start..edit_end, new_text));
5137 }
5138 }
5139
5140 Ok(edits)
5141 })
5142 }
5143
5144 fn buffer_snapshot_for_lsp_version(
5145 &mut self,
5146 buffer: &ModelHandle<Buffer>,
5147 version: Option<i32>,
5148 cx: &AppContext,
5149 ) -> Result<TextBufferSnapshot> {
5150 const OLD_VERSIONS_TO_RETAIN: i32 = 10;
5151
5152 if let Some(version) = version {
5153 let buffer_id = buffer.read(cx).remote_id();
5154 let snapshots = self
5155 .buffer_snapshots
5156 .get_mut(&buffer_id)
5157 .ok_or_else(|| anyhow!("no snapshot found for buffer {}", buffer_id))?;
5158 let mut found_snapshot = None;
5159 snapshots.retain(|(snapshot_version, snapshot)| {
5160 if snapshot_version + OLD_VERSIONS_TO_RETAIN < version {
5161 false
5162 } else {
5163 if *snapshot_version == version {
5164 found_snapshot = Some(snapshot.clone());
5165 }
5166 true
5167 }
5168 });
5169
5170 found_snapshot.ok_or_else(|| {
5171 anyhow!(
5172 "snapshot not found for buffer {} at version {}",
5173 buffer_id,
5174 version
5175 )
5176 })
5177 } else {
5178 Ok((buffer.read(cx)).text_snapshot())
5179 }
5180 }
5181
5182 fn language_server_for_buffer(
5183 &self,
5184 buffer: &Buffer,
5185 cx: &AppContext,
5186 ) -> Option<&(Arc<dyn LspAdapter>, Arc<LanguageServer>)> {
5187 if let Some((file, language)) = File::from_dyn(buffer.file()).zip(buffer.language()) {
5188 let worktree_id = file.worktree_id(cx);
5189 self.language_servers
5190 .get(&(worktree_id, language.lsp_adapter()?.name()))
5191 } else {
5192 None
5193 }
5194 }
5195}
5196
5197impl ProjectStore {
5198 pub fn projects<'a>(
5199 &'a self,
5200 cx: &'a AppContext,
5201 ) -> impl 'a + Iterator<Item = ModelHandle<Project>> {
5202 self.projects
5203 .iter()
5204 .filter_map(|project| project.upgrade(cx))
5205 }
5206
5207 fn add(&mut self, project: WeakModelHandle<Project>, cx: &mut ModelContext<Self>) {
5208 if let Err(ix) = self
5209 .projects
5210 .binary_search_by_key(&project.id(), WeakModelHandle::id)
5211 {
5212 self.projects.insert(ix, project);
5213 }
5214 cx.notify();
5215 }
5216
5217 fn prune(&mut self, cx: &mut ModelContext<Self>) {
5218 let mut did_change = false;
5219 self.projects.retain(|project| {
5220 if project.is_upgradable(cx) {
5221 true
5222 } else {
5223 did_change = true;
5224 false
5225 }
5226 });
5227 if did_change {
5228 cx.notify();
5229 }
5230 }
5231}
5232
5233impl WorktreeHandle {
5234 pub fn upgrade(&self, cx: &AppContext) -> Option<ModelHandle<Worktree>> {
5235 match self {
5236 WorktreeHandle::Strong(handle) => Some(handle.clone()),
5237 WorktreeHandle::Weak(handle) => handle.upgrade(cx),
5238 }
5239 }
5240}
5241
5242impl OpenBuffer {
5243 pub fn upgrade(&self, cx: &impl UpgradeModelHandle) -> Option<ModelHandle<Buffer>> {
5244 match self {
5245 OpenBuffer::Strong(handle) => Some(handle.clone()),
5246 OpenBuffer::Weak(handle) => handle.upgrade(cx),
5247 OpenBuffer::Loading(_) => None,
5248 }
5249 }
5250}
5251
5252struct CandidateSet {
5253 snapshot: Snapshot,
5254 include_ignored: bool,
5255 include_root_name: bool,
5256}
5257
5258impl<'a> PathMatchCandidateSet<'a> for CandidateSet {
5259 type Candidates = CandidateSetIter<'a>;
5260
5261 fn id(&self) -> usize {
5262 self.snapshot.id().to_usize()
5263 }
5264
5265 fn len(&self) -> usize {
5266 if self.include_ignored {
5267 self.snapshot.file_count()
5268 } else {
5269 self.snapshot.visible_file_count()
5270 }
5271 }
5272
5273 fn prefix(&self) -> Arc<str> {
5274 if self.snapshot.root_entry().map_or(false, |e| e.is_file()) {
5275 self.snapshot.root_name().into()
5276 } else if self.include_root_name {
5277 format!("{}/", self.snapshot.root_name()).into()
5278 } else {
5279 "".into()
5280 }
5281 }
5282
5283 fn candidates(&'a self, start: usize) -> Self::Candidates {
5284 CandidateSetIter {
5285 traversal: self.snapshot.files(self.include_ignored, start),
5286 }
5287 }
5288}
5289
5290struct CandidateSetIter<'a> {
5291 traversal: Traversal<'a>,
5292}
5293
5294impl<'a> Iterator for CandidateSetIter<'a> {
5295 type Item = PathMatchCandidate<'a>;
5296
5297 fn next(&mut self) -> Option<Self::Item> {
5298 self.traversal.next().map(|entry| {
5299 if let EntryKind::File(char_bag) = entry.kind {
5300 PathMatchCandidate {
5301 path: &entry.path,
5302 char_bag,
5303 }
5304 } else {
5305 unreachable!()
5306 }
5307 })
5308 }
5309}
5310
5311impl Entity for ProjectStore {
5312 type Event = ();
5313}
5314
5315impl Entity for Project {
5316 type Event = Event;
5317
5318 fn release(&mut self, cx: &mut gpui::MutableAppContext) {
5319 self.project_store.update(cx, ProjectStore::prune);
5320
5321 match &self.client_state {
5322 ProjectClientState::Local { remote_id_rx, .. } => {
5323 if let Some(project_id) = *remote_id_rx.borrow() {
5324 self.client
5325 .send(proto::UnregisterProject { project_id })
5326 .log_err();
5327 }
5328 }
5329 ProjectClientState::Remote { remote_id, .. } => {
5330 self.client
5331 .send(proto::LeaveProject {
5332 project_id: *remote_id,
5333 })
5334 .log_err();
5335 }
5336 }
5337 }
5338
5339 fn app_will_quit(
5340 &mut self,
5341 _: &mut MutableAppContext,
5342 ) -> Option<std::pin::Pin<Box<dyn 'static + Future<Output = ()>>>> {
5343 let shutdown_futures = self
5344 .language_servers
5345 .drain()
5346 .filter_map(|(_, (_, server))| server.shutdown())
5347 .collect::<Vec<_>>();
5348 Some(
5349 async move {
5350 futures::future::join_all(shutdown_futures).await;
5351 }
5352 .boxed(),
5353 )
5354 }
5355}
5356
5357impl Collaborator {
5358 fn from_proto(
5359 message: proto::Collaborator,
5360 user_store: &ModelHandle<UserStore>,
5361 cx: &mut AsyncAppContext,
5362 ) -> impl Future<Output = Result<Self>> {
5363 let user = user_store.update(cx, |user_store, cx| {
5364 user_store.fetch_user(message.user_id, cx)
5365 });
5366
5367 async move {
5368 Ok(Self {
5369 peer_id: PeerId(message.peer_id),
5370 user: user.await?,
5371 replica_id: message.replica_id as ReplicaId,
5372 })
5373 }
5374 }
5375}
5376
5377impl<P: AsRef<Path>> From<(WorktreeId, P)> for ProjectPath {
5378 fn from((worktree_id, path): (WorktreeId, P)) -> Self {
5379 Self {
5380 worktree_id,
5381 path: path.as_ref().into(),
5382 }
5383 }
5384}
5385
5386impl From<lsp::CreateFileOptions> for fs::CreateOptions {
5387 fn from(options: lsp::CreateFileOptions) -> Self {
5388 Self {
5389 overwrite: options.overwrite.unwrap_or(false),
5390 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
5391 }
5392 }
5393}
5394
5395impl From<lsp::RenameFileOptions> for fs::RenameOptions {
5396 fn from(options: lsp::RenameFileOptions) -> Self {
5397 Self {
5398 overwrite: options.overwrite.unwrap_or(false),
5399 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
5400 }
5401 }
5402}
5403
5404impl From<lsp::DeleteFileOptions> for fs::RemoveOptions {
5405 fn from(options: lsp::DeleteFileOptions) -> Self {
5406 Self {
5407 recursive: options.recursive.unwrap_or(false),
5408 ignore_if_not_exists: options.ignore_if_not_exists.unwrap_or(false),
5409 }
5410 }
5411}
5412
5413fn serialize_symbol(symbol: &Symbol) -> proto::Symbol {
5414 proto::Symbol {
5415 source_worktree_id: symbol.source_worktree_id.to_proto(),
5416 worktree_id: symbol.worktree_id.to_proto(),
5417 language_server_name: symbol.language_server_name.0.to_string(),
5418 name: symbol.name.clone(),
5419 kind: unsafe { mem::transmute(symbol.kind) },
5420 path: symbol.path.to_string_lossy().to_string(),
5421 start: Some(proto::Point {
5422 row: symbol.range.start.row,
5423 column: symbol.range.start.column,
5424 }),
5425 end: Some(proto::Point {
5426 row: symbol.range.end.row,
5427 column: symbol.range.end.column,
5428 }),
5429 signature: symbol.signature.to_vec(),
5430 }
5431}
5432
5433fn relativize_path(base: &Path, path: &Path) -> PathBuf {
5434 let mut path_components = path.components();
5435 let mut base_components = base.components();
5436 let mut components: Vec<Component> = Vec::new();
5437 loop {
5438 match (path_components.next(), base_components.next()) {
5439 (None, None) => break,
5440 (Some(a), None) => {
5441 components.push(a);
5442 components.extend(path_components.by_ref());
5443 break;
5444 }
5445 (None, _) => components.push(Component::ParentDir),
5446 (Some(a), Some(b)) if components.is_empty() && a == b => (),
5447 (Some(a), Some(b)) if b == Component::CurDir => components.push(a),
5448 (Some(a), Some(_)) => {
5449 components.push(Component::ParentDir);
5450 for _ in base_components {
5451 components.push(Component::ParentDir);
5452 }
5453 components.push(a);
5454 components.extend(path_components.by_ref());
5455 break;
5456 }
5457 }
5458 }
5459 components.iter().map(|c| c.as_os_str()).collect()
5460}
5461
5462impl Item for Buffer {
5463 fn entry_id(&self, cx: &AppContext) -> Option<ProjectEntryId> {
5464 File::from_dyn(self.file()).and_then(|file| file.project_entry_id(cx))
5465 }
5466}
5467
5468#[cfg(test)]
5469mod tests {
5470 use crate::worktree::WorktreeHandle;
5471
5472 use super::{Event, *};
5473 use fs::RealFs;
5474 use futures::{future, StreamExt};
5475 use gpui::test::subscribe;
5476 use language::{
5477 tree_sitter_rust, tree_sitter_typescript, Diagnostic, FakeLspAdapter, LanguageConfig,
5478 OffsetRangeExt, Point, ToPoint,
5479 };
5480 use lsp::Url;
5481 use serde_json::json;
5482 use std::{cell::RefCell, os::unix, path::PathBuf, rc::Rc, task::Poll};
5483 use unindent::Unindent as _;
5484 use util::{assert_set_eq, test::temp_tree};
5485
5486 #[gpui::test]
5487 async fn test_populate_and_search(cx: &mut gpui::TestAppContext) {
5488 let dir = temp_tree(json!({
5489 "root": {
5490 "apple": "",
5491 "banana": {
5492 "carrot": {
5493 "date": "",
5494 "endive": "",
5495 }
5496 },
5497 "fennel": {
5498 "grape": "",
5499 }
5500 }
5501 }));
5502
5503 let root_link_path = dir.path().join("root_link");
5504 unix::fs::symlink(&dir.path().join("root"), &root_link_path).unwrap();
5505 unix::fs::symlink(
5506 &dir.path().join("root/fennel"),
5507 &dir.path().join("root/finnochio"),
5508 )
5509 .unwrap();
5510
5511 let project = Project::test(Arc::new(RealFs), [root_link_path.as_ref()], cx).await;
5512
5513 project.read_with(cx, |project, cx| {
5514 let tree = project.worktrees(cx).next().unwrap().read(cx);
5515 assert_eq!(tree.file_count(), 5);
5516 assert_eq!(
5517 tree.inode_for_path("fennel/grape"),
5518 tree.inode_for_path("finnochio/grape")
5519 );
5520 });
5521
5522 let cancel_flag = Default::default();
5523 let results = project
5524 .read_with(cx, |project, cx| {
5525 project.match_paths("bna", false, false, 10, &cancel_flag, cx)
5526 })
5527 .await;
5528 assert_eq!(
5529 results
5530 .into_iter()
5531 .map(|result| result.path)
5532 .collect::<Vec<Arc<Path>>>(),
5533 vec![
5534 PathBuf::from("banana/carrot/date").into(),
5535 PathBuf::from("banana/carrot/endive").into(),
5536 ]
5537 );
5538 }
5539
5540 #[gpui::test]
5541 async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
5542 cx.foreground().forbid_parking();
5543
5544 let mut rust_language = Language::new(
5545 LanguageConfig {
5546 name: "Rust".into(),
5547 path_suffixes: vec!["rs".to_string()],
5548 ..Default::default()
5549 },
5550 Some(tree_sitter_rust::language()),
5551 );
5552 let mut json_language = Language::new(
5553 LanguageConfig {
5554 name: "JSON".into(),
5555 path_suffixes: vec!["json".to_string()],
5556 ..Default::default()
5557 },
5558 None,
5559 );
5560 let mut fake_rust_servers = rust_language.set_fake_lsp_adapter(FakeLspAdapter {
5561 name: "the-rust-language-server",
5562 capabilities: lsp::ServerCapabilities {
5563 completion_provider: Some(lsp::CompletionOptions {
5564 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
5565 ..Default::default()
5566 }),
5567 ..Default::default()
5568 },
5569 ..Default::default()
5570 });
5571 let mut fake_json_servers = json_language.set_fake_lsp_adapter(FakeLspAdapter {
5572 name: "the-json-language-server",
5573 capabilities: lsp::ServerCapabilities {
5574 completion_provider: Some(lsp::CompletionOptions {
5575 trigger_characters: Some(vec![":".to_string()]),
5576 ..Default::default()
5577 }),
5578 ..Default::default()
5579 },
5580 ..Default::default()
5581 });
5582
5583 let fs = FakeFs::new(cx.background());
5584 fs.insert_tree(
5585 "/the-root",
5586 json!({
5587 "test.rs": "const A: i32 = 1;",
5588 "test2.rs": "",
5589 "Cargo.toml": "a = 1",
5590 "package.json": "{\"a\": 1}",
5591 }),
5592 )
5593 .await;
5594
5595 let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
5596 project.update(cx, |project, _| {
5597 project.languages.add(Arc::new(rust_language));
5598 project.languages.add(Arc::new(json_language));
5599 });
5600
5601 // Open a buffer without an associated language server.
5602 let toml_buffer = project
5603 .update(cx, |project, cx| {
5604 project.open_local_buffer("/the-root/Cargo.toml", cx)
5605 })
5606 .await
5607 .unwrap();
5608
5609 // Open a buffer with an associated language server.
5610 let rust_buffer = project
5611 .update(cx, |project, cx| {
5612 project.open_local_buffer("/the-root/test.rs", cx)
5613 })
5614 .await
5615 .unwrap();
5616
5617 // A server is started up, and it is notified about Rust files.
5618 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
5619 assert_eq!(
5620 fake_rust_server
5621 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5622 .await
5623 .text_document,
5624 lsp::TextDocumentItem {
5625 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
5626 version: 0,
5627 text: "const A: i32 = 1;".to_string(),
5628 language_id: Default::default()
5629 }
5630 );
5631
5632 // The buffer is configured based on the language server's capabilities.
5633 rust_buffer.read_with(cx, |buffer, _| {
5634 assert_eq!(
5635 buffer.completion_triggers(),
5636 &[".".to_string(), "::".to_string()]
5637 );
5638 });
5639 toml_buffer.read_with(cx, |buffer, _| {
5640 assert!(buffer.completion_triggers().is_empty());
5641 });
5642
5643 // Edit a buffer. The changes are reported to the language server.
5644 rust_buffer.update(cx, |buffer, cx| buffer.edit([(16..16, "2")], cx));
5645 assert_eq!(
5646 fake_rust_server
5647 .receive_notification::<lsp::notification::DidChangeTextDocument>()
5648 .await
5649 .text_document,
5650 lsp::VersionedTextDocumentIdentifier::new(
5651 lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
5652 1
5653 )
5654 );
5655
5656 // Open a third buffer with a different associated language server.
5657 let json_buffer = project
5658 .update(cx, |project, cx| {
5659 project.open_local_buffer("/the-root/package.json", cx)
5660 })
5661 .await
5662 .unwrap();
5663
5664 // A json language server is started up and is only notified about the json buffer.
5665 let mut fake_json_server = fake_json_servers.next().await.unwrap();
5666 assert_eq!(
5667 fake_json_server
5668 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5669 .await
5670 .text_document,
5671 lsp::TextDocumentItem {
5672 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
5673 version: 0,
5674 text: "{\"a\": 1}".to_string(),
5675 language_id: Default::default()
5676 }
5677 );
5678
5679 // This buffer is configured based on the second language server's
5680 // capabilities.
5681 json_buffer.read_with(cx, |buffer, _| {
5682 assert_eq!(buffer.completion_triggers(), &[":".to_string()]);
5683 });
5684
5685 // When opening another buffer whose language server is already running,
5686 // it is also configured based on the existing language server's capabilities.
5687 let rust_buffer2 = project
5688 .update(cx, |project, cx| {
5689 project.open_local_buffer("/the-root/test2.rs", cx)
5690 })
5691 .await
5692 .unwrap();
5693 rust_buffer2.read_with(cx, |buffer, _| {
5694 assert_eq!(
5695 buffer.completion_triggers(),
5696 &[".".to_string(), "::".to_string()]
5697 );
5698 });
5699
5700 // Changes are reported only to servers matching the buffer's language.
5701 toml_buffer.update(cx, |buffer, cx| buffer.edit([(5..5, "23")], cx));
5702 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "let x = 1;")], cx));
5703 assert_eq!(
5704 fake_rust_server
5705 .receive_notification::<lsp::notification::DidChangeTextDocument>()
5706 .await
5707 .text_document,
5708 lsp::VersionedTextDocumentIdentifier::new(
5709 lsp::Url::from_file_path("/the-root/test2.rs").unwrap(),
5710 1
5711 )
5712 );
5713
5714 // Save notifications are reported to all servers.
5715 toml_buffer
5716 .update(cx, |buffer, cx| buffer.save(cx))
5717 .await
5718 .unwrap();
5719 assert_eq!(
5720 fake_rust_server
5721 .receive_notification::<lsp::notification::DidSaveTextDocument>()
5722 .await
5723 .text_document,
5724 lsp::TextDocumentIdentifier::new(
5725 lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap()
5726 )
5727 );
5728 assert_eq!(
5729 fake_json_server
5730 .receive_notification::<lsp::notification::DidSaveTextDocument>()
5731 .await
5732 .text_document,
5733 lsp::TextDocumentIdentifier::new(
5734 lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap()
5735 )
5736 );
5737
5738 // Renames are reported only to servers matching the buffer's language.
5739 fs.rename(
5740 Path::new("/the-root/test2.rs"),
5741 Path::new("/the-root/test3.rs"),
5742 Default::default(),
5743 )
5744 .await
5745 .unwrap();
5746 assert_eq!(
5747 fake_rust_server
5748 .receive_notification::<lsp::notification::DidCloseTextDocument>()
5749 .await
5750 .text_document,
5751 lsp::TextDocumentIdentifier::new(
5752 lsp::Url::from_file_path("/the-root/test2.rs").unwrap()
5753 ),
5754 );
5755 assert_eq!(
5756 fake_rust_server
5757 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5758 .await
5759 .text_document,
5760 lsp::TextDocumentItem {
5761 uri: lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),
5762 version: 0,
5763 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
5764 language_id: Default::default()
5765 },
5766 );
5767
5768 rust_buffer2.update(cx, |buffer, cx| {
5769 buffer.update_diagnostics(
5770 DiagnosticSet::from_sorted_entries(
5771 vec![DiagnosticEntry {
5772 diagnostic: Default::default(),
5773 range: Anchor::MIN..Anchor::MAX,
5774 }],
5775 &buffer.snapshot(),
5776 ),
5777 cx,
5778 );
5779 assert_eq!(
5780 buffer
5781 .snapshot()
5782 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
5783 .count(),
5784 1
5785 );
5786 });
5787
5788 // When the rename changes the extension of the file, the buffer gets closed on the old
5789 // language server and gets opened on the new one.
5790 fs.rename(
5791 Path::new("/the-root/test3.rs"),
5792 Path::new("/the-root/test3.json"),
5793 Default::default(),
5794 )
5795 .await
5796 .unwrap();
5797 assert_eq!(
5798 fake_rust_server
5799 .receive_notification::<lsp::notification::DidCloseTextDocument>()
5800 .await
5801 .text_document,
5802 lsp::TextDocumentIdentifier::new(
5803 lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),
5804 ),
5805 );
5806 assert_eq!(
5807 fake_json_server
5808 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5809 .await
5810 .text_document,
5811 lsp::TextDocumentItem {
5812 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
5813 version: 0,
5814 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
5815 language_id: Default::default()
5816 },
5817 );
5818
5819 // We clear the diagnostics, since the language has changed.
5820 rust_buffer2.read_with(cx, |buffer, _| {
5821 assert_eq!(
5822 buffer
5823 .snapshot()
5824 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
5825 .count(),
5826 0
5827 );
5828 });
5829
5830 // The renamed file's version resets after changing language server.
5831 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "// ")], cx));
5832 assert_eq!(
5833 fake_json_server
5834 .receive_notification::<lsp::notification::DidChangeTextDocument>()
5835 .await
5836 .text_document,
5837 lsp::VersionedTextDocumentIdentifier::new(
5838 lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
5839 1
5840 )
5841 );
5842
5843 // Restart language servers
5844 project.update(cx, |project, cx| {
5845 project.restart_language_servers_for_buffers(
5846 vec![rust_buffer.clone(), json_buffer.clone()],
5847 cx,
5848 );
5849 });
5850
5851 let mut rust_shutdown_requests = fake_rust_server
5852 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
5853 let mut json_shutdown_requests = fake_json_server
5854 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
5855 futures::join!(rust_shutdown_requests.next(), json_shutdown_requests.next());
5856
5857 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
5858 let mut fake_json_server = fake_json_servers.next().await.unwrap();
5859
5860 // Ensure rust document is reopened in new rust language server
5861 assert_eq!(
5862 fake_rust_server
5863 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5864 .await
5865 .text_document,
5866 lsp::TextDocumentItem {
5867 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
5868 version: 1,
5869 text: rust_buffer.read_with(cx, |buffer, _| buffer.text()),
5870 language_id: Default::default()
5871 }
5872 );
5873
5874 // Ensure json documents are reopened in new json language server
5875 assert_set_eq!(
5876 [
5877 fake_json_server
5878 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5879 .await
5880 .text_document,
5881 fake_json_server
5882 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5883 .await
5884 .text_document,
5885 ],
5886 [
5887 lsp::TextDocumentItem {
5888 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
5889 version: 0,
5890 text: json_buffer.read_with(cx, |buffer, _| buffer.text()),
5891 language_id: Default::default()
5892 },
5893 lsp::TextDocumentItem {
5894 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
5895 version: 1,
5896 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
5897 language_id: Default::default()
5898 }
5899 ]
5900 );
5901
5902 // Close notifications are reported only to servers matching the buffer's language.
5903 cx.update(|_| drop(json_buffer));
5904 let close_message = lsp::DidCloseTextDocumentParams {
5905 text_document: lsp::TextDocumentIdentifier::new(
5906 lsp::Url::from_file_path("/the-root/package.json").unwrap(),
5907 ),
5908 };
5909 assert_eq!(
5910 fake_json_server
5911 .receive_notification::<lsp::notification::DidCloseTextDocument>()
5912 .await,
5913 close_message,
5914 );
5915 }
5916
5917 #[gpui::test]
5918 async fn test_single_file_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
5919 cx.foreground().forbid_parking();
5920
5921 let fs = FakeFs::new(cx.background());
5922 fs.insert_tree(
5923 "/dir",
5924 json!({
5925 "a.rs": "let a = 1;",
5926 "b.rs": "let b = 2;"
5927 }),
5928 )
5929 .await;
5930
5931 let project = Project::test(fs, ["/dir/a.rs".as_ref(), "/dir/b.rs".as_ref()], cx).await;
5932
5933 let buffer_a = project
5934 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
5935 .await
5936 .unwrap();
5937 let buffer_b = project
5938 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
5939 .await
5940 .unwrap();
5941
5942 project.update(cx, |project, cx| {
5943 project
5944 .update_diagnostics(
5945 lsp::PublishDiagnosticsParams {
5946 uri: Url::from_file_path("/dir/a.rs").unwrap(),
5947 version: None,
5948 diagnostics: vec![lsp::Diagnostic {
5949 range: lsp::Range::new(
5950 lsp::Position::new(0, 4),
5951 lsp::Position::new(0, 5),
5952 ),
5953 severity: Some(lsp::DiagnosticSeverity::ERROR),
5954 message: "error 1".to_string(),
5955 ..Default::default()
5956 }],
5957 },
5958 &[],
5959 cx,
5960 )
5961 .unwrap();
5962 project
5963 .update_diagnostics(
5964 lsp::PublishDiagnosticsParams {
5965 uri: Url::from_file_path("/dir/b.rs").unwrap(),
5966 version: None,
5967 diagnostics: vec![lsp::Diagnostic {
5968 range: lsp::Range::new(
5969 lsp::Position::new(0, 4),
5970 lsp::Position::new(0, 5),
5971 ),
5972 severity: Some(lsp::DiagnosticSeverity::WARNING),
5973 message: "error 2".to_string(),
5974 ..Default::default()
5975 }],
5976 },
5977 &[],
5978 cx,
5979 )
5980 .unwrap();
5981 });
5982
5983 buffer_a.read_with(cx, |buffer, _| {
5984 let chunks = chunks_with_diagnostics(&buffer, 0..buffer.len());
5985 assert_eq!(
5986 chunks
5987 .iter()
5988 .map(|(s, d)| (s.as_str(), *d))
5989 .collect::<Vec<_>>(),
5990 &[
5991 ("let ", None),
5992 ("a", Some(DiagnosticSeverity::ERROR)),
5993 (" = 1;", None),
5994 ]
5995 );
5996 });
5997 buffer_b.read_with(cx, |buffer, _| {
5998 let chunks = chunks_with_diagnostics(&buffer, 0..buffer.len());
5999 assert_eq!(
6000 chunks
6001 .iter()
6002 .map(|(s, d)| (s.as_str(), *d))
6003 .collect::<Vec<_>>(),
6004 &[
6005 ("let ", None),
6006 ("b", Some(DiagnosticSeverity::WARNING)),
6007 (" = 2;", None),
6008 ]
6009 );
6010 });
6011 }
6012
6013 #[gpui::test]
6014 async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
6015 cx.foreground().forbid_parking();
6016
6017 let progress_token = "the-progress-token";
6018 let mut language = Language::new(
6019 LanguageConfig {
6020 name: "Rust".into(),
6021 path_suffixes: vec!["rs".to_string()],
6022 ..Default::default()
6023 },
6024 Some(tree_sitter_rust::language()),
6025 );
6026 let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
6027 disk_based_diagnostics_progress_token: Some(progress_token),
6028 disk_based_diagnostics_sources: &["disk"],
6029 ..Default::default()
6030 });
6031
6032 let fs = FakeFs::new(cx.background());
6033 fs.insert_tree(
6034 "/dir",
6035 json!({
6036 "a.rs": "fn a() { A }",
6037 "b.rs": "const y: i32 = 1",
6038 }),
6039 )
6040 .await;
6041
6042 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
6043 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
6044 let worktree_id =
6045 project.read_with(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
6046
6047 // Cause worktree to start the fake language server
6048 let _buffer = project
6049 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
6050 .await
6051 .unwrap();
6052
6053 let mut events = subscribe(&project, cx);
6054
6055 let mut fake_server = fake_servers.next().await.unwrap();
6056 fake_server.start_progress(progress_token).await;
6057 assert_eq!(
6058 events.next().await.unwrap(),
6059 Event::DiskBasedDiagnosticsStarted
6060 );
6061
6062 fake_server.start_progress(progress_token).await;
6063 fake_server.end_progress(progress_token).await;
6064 fake_server.start_progress(progress_token).await;
6065
6066 fake_server.notify::<lsp::notification::PublishDiagnostics>(
6067 lsp::PublishDiagnosticsParams {
6068 uri: Url::from_file_path("/dir/a.rs").unwrap(),
6069 version: None,
6070 diagnostics: vec![lsp::Diagnostic {
6071 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
6072 severity: Some(lsp::DiagnosticSeverity::ERROR),
6073 message: "undefined variable 'A'".to_string(),
6074 ..Default::default()
6075 }],
6076 },
6077 );
6078 assert_eq!(
6079 events.next().await.unwrap(),
6080 Event::DiagnosticsUpdated((worktree_id, Path::new("a.rs")).into())
6081 );
6082
6083 fake_server.end_progress(progress_token).await;
6084 fake_server.end_progress(progress_token).await;
6085 assert_eq!(
6086 events.next().await.unwrap(),
6087 Event::DiskBasedDiagnosticsUpdated
6088 );
6089 assert_eq!(
6090 events.next().await.unwrap(),
6091 Event::DiskBasedDiagnosticsFinished
6092 );
6093
6094 let buffer = project
6095 .update(cx, |p, cx| p.open_local_buffer("/dir/a.rs", cx))
6096 .await
6097 .unwrap();
6098
6099 buffer.read_with(cx, |buffer, _| {
6100 let snapshot = buffer.snapshot();
6101 let diagnostics = snapshot
6102 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
6103 .collect::<Vec<_>>();
6104 assert_eq!(
6105 diagnostics,
6106 &[DiagnosticEntry {
6107 range: Point::new(0, 9)..Point::new(0, 10),
6108 diagnostic: Diagnostic {
6109 severity: lsp::DiagnosticSeverity::ERROR,
6110 message: "undefined variable 'A'".to_string(),
6111 group_id: 0,
6112 is_primary: true,
6113 ..Default::default()
6114 }
6115 }]
6116 )
6117 });
6118
6119 // Ensure publishing empty diagnostics twice only results in one update event.
6120 fake_server.notify::<lsp::notification::PublishDiagnostics>(
6121 lsp::PublishDiagnosticsParams {
6122 uri: Url::from_file_path("/dir/a.rs").unwrap(),
6123 version: None,
6124 diagnostics: Default::default(),
6125 },
6126 );
6127 assert_eq!(
6128 events.next().await.unwrap(),
6129 Event::DiagnosticsUpdated((worktree_id, Path::new("a.rs")).into())
6130 );
6131
6132 fake_server.notify::<lsp::notification::PublishDiagnostics>(
6133 lsp::PublishDiagnosticsParams {
6134 uri: Url::from_file_path("/dir/a.rs").unwrap(),
6135 version: None,
6136 diagnostics: Default::default(),
6137 },
6138 );
6139 cx.foreground().run_until_parked();
6140 assert_eq!(futures::poll!(events.next()), Poll::Pending);
6141 }
6142
6143 #[gpui::test]
6144 async fn test_restarting_server_with_diagnostics_running(cx: &mut gpui::TestAppContext) {
6145 cx.foreground().forbid_parking();
6146
6147 let progress_token = "the-progress-token";
6148 let mut language = Language::new(
6149 LanguageConfig {
6150 path_suffixes: vec!["rs".to_string()],
6151 ..Default::default()
6152 },
6153 None,
6154 );
6155 let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
6156 disk_based_diagnostics_sources: &["disk"],
6157 disk_based_diagnostics_progress_token: Some(progress_token),
6158 ..Default::default()
6159 });
6160
6161 let fs = FakeFs::new(cx.background());
6162 fs.insert_tree("/dir", json!({ "a.rs": "" })).await;
6163
6164 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
6165 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
6166
6167 let buffer = project
6168 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
6169 .await
6170 .unwrap();
6171
6172 // Simulate diagnostics starting to update.
6173 let mut fake_server = fake_servers.next().await.unwrap();
6174 fake_server.start_progress(progress_token).await;
6175
6176 // Restart the server before the diagnostics finish updating.
6177 project.update(cx, |project, cx| {
6178 project.restart_language_servers_for_buffers([buffer], cx);
6179 });
6180 let mut events = subscribe(&project, cx);
6181
6182 // Simulate the newly started server sending more diagnostics.
6183 let mut fake_server = fake_servers.next().await.unwrap();
6184 fake_server.start_progress(progress_token).await;
6185 assert_eq!(
6186 events.next().await.unwrap(),
6187 Event::DiskBasedDiagnosticsStarted
6188 );
6189
6190 // All diagnostics are considered done, despite the old server's diagnostic
6191 // task never completing.
6192 fake_server.end_progress(progress_token).await;
6193 assert_eq!(
6194 events.next().await.unwrap(),
6195 Event::DiskBasedDiagnosticsUpdated
6196 );
6197 assert_eq!(
6198 events.next().await.unwrap(),
6199 Event::DiskBasedDiagnosticsFinished
6200 );
6201 project.read_with(cx, |project, _| {
6202 assert!(!project.is_running_disk_based_diagnostics());
6203 });
6204 }
6205
6206 #[gpui::test]
6207 async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
6208 cx.foreground().forbid_parking();
6209
6210 let mut language = Language::new(
6211 LanguageConfig {
6212 name: "Rust".into(),
6213 path_suffixes: vec!["rs".to_string()],
6214 ..Default::default()
6215 },
6216 Some(tree_sitter_rust::language()),
6217 );
6218 let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
6219 disk_based_diagnostics_sources: &["disk"],
6220 ..Default::default()
6221 });
6222
6223 let text = "
6224 fn a() { A }
6225 fn b() { BB }
6226 fn c() { CCC }
6227 "
6228 .unindent();
6229
6230 let fs = FakeFs::new(cx.background());
6231 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
6232
6233 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
6234 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
6235
6236 let buffer = project
6237 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
6238 .await
6239 .unwrap();
6240
6241 let mut fake_server = fake_servers.next().await.unwrap();
6242 let open_notification = fake_server
6243 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6244 .await;
6245
6246 // Edit the buffer, moving the content down
6247 buffer.update(cx, |buffer, cx| buffer.edit([(0..0, "\n\n")], cx));
6248 let change_notification_1 = fake_server
6249 .receive_notification::<lsp::notification::DidChangeTextDocument>()
6250 .await;
6251 assert!(
6252 change_notification_1.text_document.version > open_notification.text_document.version
6253 );
6254
6255 // Report some diagnostics for the initial version of the buffer
6256 fake_server.notify::<lsp::notification::PublishDiagnostics>(
6257 lsp::PublishDiagnosticsParams {
6258 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
6259 version: Some(open_notification.text_document.version),
6260 diagnostics: vec![
6261 lsp::Diagnostic {
6262 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
6263 severity: Some(DiagnosticSeverity::ERROR),
6264 message: "undefined variable 'A'".to_string(),
6265 source: Some("disk".to_string()),
6266 ..Default::default()
6267 },
6268 lsp::Diagnostic {
6269 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
6270 severity: Some(DiagnosticSeverity::ERROR),
6271 message: "undefined variable 'BB'".to_string(),
6272 source: Some("disk".to_string()),
6273 ..Default::default()
6274 },
6275 lsp::Diagnostic {
6276 range: lsp::Range::new(lsp::Position::new(2, 9), lsp::Position::new(2, 12)),
6277 severity: Some(DiagnosticSeverity::ERROR),
6278 source: Some("disk".to_string()),
6279 message: "undefined variable 'CCC'".to_string(),
6280 ..Default::default()
6281 },
6282 ],
6283 },
6284 );
6285
6286 // The diagnostics have moved down since they were created.
6287 buffer.next_notification(cx).await;
6288 buffer.read_with(cx, |buffer, _| {
6289 assert_eq!(
6290 buffer
6291 .snapshot()
6292 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0), false)
6293 .collect::<Vec<_>>(),
6294 &[
6295 DiagnosticEntry {
6296 range: Point::new(3, 9)..Point::new(3, 11),
6297 diagnostic: Diagnostic {
6298 severity: DiagnosticSeverity::ERROR,
6299 message: "undefined variable 'BB'".to_string(),
6300 is_disk_based: true,
6301 group_id: 1,
6302 is_primary: true,
6303 ..Default::default()
6304 },
6305 },
6306 DiagnosticEntry {
6307 range: Point::new(4, 9)..Point::new(4, 12),
6308 diagnostic: Diagnostic {
6309 severity: DiagnosticSeverity::ERROR,
6310 message: "undefined variable 'CCC'".to_string(),
6311 is_disk_based: true,
6312 group_id: 2,
6313 is_primary: true,
6314 ..Default::default()
6315 }
6316 }
6317 ]
6318 );
6319 assert_eq!(
6320 chunks_with_diagnostics(buffer, 0..buffer.len()),
6321 [
6322 ("\n\nfn a() { ".to_string(), None),
6323 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
6324 (" }\nfn b() { ".to_string(), None),
6325 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
6326 (" }\nfn c() { ".to_string(), None),
6327 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
6328 (" }\n".to_string(), None),
6329 ]
6330 );
6331 assert_eq!(
6332 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
6333 [
6334 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
6335 (" }\nfn c() { ".to_string(), None),
6336 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
6337 ]
6338 );
6339 });
6340
6341 // Ensure overlapping diagnostics are highlighted correctly.
6342 fake_server.notify::<lsp::notification::PublishDiagnostics>(
6343 lsp::PublishDiagnosticsParams {
6344 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
6345 version: Some(open_notification.text_document.version),
6346 diagnostics: vec![
6347 lsp::Diagnostic {
6348 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
6349 severity: Some(DiagnosticSeverity::ERROR),
6350 message: "undefined variable 'A'".to_string(),
6351 source: Some("disk".to_string()),
6352 ..Default::default()
6353 },
6354 lsp::Diagnostic {
6355 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 12)),
6356 severity: Some(DiagnosticSeverity::WARNING),
6357 message: "unreachable statement".to_string(),
6358 source: Some("disk".to_string()),
6359 ..Default::default()
6360 },
6361 ],
6362 },
6363 );
6364
6365 buffer.next_notification(cx).await;
6366 buffer.read_with(cx, |buffer, _| {
6367 assert_eq!(
6368 buffer
6369 .snapshot()
6370 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0), false)
6371 .collect::<Vec<_>>(),
6372 &[
6373 DiagnosticEntry {
6374 range: Point::new(2, 9)..Point::new(2, 12),
6375 diagnostic: Diagnostic {
6376 severity: DiagnosticSeverity::WARNING,
6377 message: "unreachable statement".to_string(),
6378 is_disk_based: true,
6379 group_id: 4,
6380 is_primary: true,
6381 ..Default::default()
6382 }
6383 },
6384 DiagnosticEntry {
6385 range: Point::new(2, 9)..Point::new(2, 10),
6386 diagnostic: Diagnostic {
6387 severity: DiagnosticSeverity::ERROR,
6388 message: "undefined variable 'A'".to_string(),
6389 is_disk_based: true,
6390 group_id: 3,
6391 is_primary: true,
6392 ..Default::default()
6393 },
6394 }
6395 ]
6396 );
6397 assert_eq!(
6398 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
6399 [
6400 ("fn a() { ".to_string(), None),
6401 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
6402 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
6403 ("\n".to_string(), None),
6404 ]
6405 );
6406 assert_eq!(
6407 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
6408 [
6409 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
6410 ("\n".to_string(), None),
6411 ]
6412 );
6413 });
6414
6415 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
6416 // changes since the last save.
6417 buffer.update(cx, |buffer, cx| {
6418 buffer.edit([(Point::new(2, 0)..Point::new(2, 0), " ")], cx);
6419 buffer.edit([(Point::new(2, 8)..Point::new(2, 10), "(x: usize)")], cx);
6420 buffer.edit([(Point::new(3, 10)..Point::new(3, 10), "xxx")], cx);
6421 });
6422 let change_notification_2 = fake_server
6423 .receive_notification::<lsp::notification::DidChangeTextDocument>()
6424 .await;
6425 assert!(
6426 change_notification_2.text_document.version
6427 > change_notification_1.text_document.version
6428 );
6429
6430 // Handle out-of-order diagnostics
6431 fake_server.notify::<lsp::notification::PublishDiagnostics>(
6432 lsp::PublishDiagnosticsParams {
6433 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
6434 version: Some(change_notification_2.text_document.version),
6435 diagnostics: vec![
6436 lsp::Diagnostic {
6437 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
6438 severity: Some(DiagnosticSeverity::ERROR),
6439 message: "undefined variable 'BB'".to_string(),
6440 source: Some("disk".to_string()),
6441 ..Default::default()
6442 },
6443 lsp::Diagnostic {
6444 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
6445 severity: Some(DiagnosticSeverity::WARNING),
6446 message: "undefined variable 'A'".to_string(),
6447 source: Some("disk".to_string()),
6448 ..Default::default()
6449 },
6450 ],
6451 },
6452 );
6453
6454 buffer.next_notification(cx).await;
6455 buffer.read_with(cx, |buffer, _| {
6456 assert_eq!(
6457 buffer
6458 .snapshot()
6459 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
6460 .collect::<Vec<_>>(),
6461 &[
6462 DiagnosticEntry {
6463 range: Point::new(2, 21)..Point::new(2, 22),
6464 diagnostic: Diagnostic {
6465 severity: DiagnosticSeverity::WARNING,
6466 message: "undefined variable 'A'".to_string(),
6467 is_disk_based: true,
6468 group_id: 6,
6469 is_primary: true,
6470 ..Default::default()
6471 }
6472 },
6473 DiagnosticEntry {
6474 range: Point::new(3, 9)..Point::new(3, 14),
6475 diagnostic: Diagnostic {
6476 severity: DiagnosticSeverity::ERROR,
6477 message: "undefined variable 'BB'".to_string(),
6478 is_disk_based: true,
6479 group_id: 5,
6480 is_primary: true,
6481 ..Default::default()
6482 },
6483 }
6484 ]
6485 );
6486 });
6487 }
6488
6489 #[gpui::test]
6490 async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
6491 cx.foreground().forbid_parking();
6492
6493 let text = concat!(
6494 "let one = ;\n", //
6495 "let two = \n",
6496 "let three = 3;\n",
6497 );
6498
6499 let fs = FakeFs::new(cx.background());
6500 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
6501
6502 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
6503 let buffer = project
6504 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
6505 .await
6506 .unwrap();
6507
6508 project.update(cx, |project, cx| {
6509 project
6510 .update_buffer_diagnostics(
6511 &buffer,
6512 vec![
6513 DiagnosticEntry {
6514 range: PointUtf16::new(0, 10)..PointUtf16::new(0, 10),
6515 diagnostic: Diagnostic {
6516 severity: DiagnosticSeverity::ERROR,
6517 message: "syntax error 1".to_string(),
6518 ..Default::default()
6519 },
6520 },
6521 DiagnosticEntry {
6522 range: PointUtf16::new(1, 10)..PointUtf16::new(1, 10),
6523 diagnostic: Diagnostic {
6524 severity: DiagnosticSeverity::ERROR,
6525 message: "syntax error 2".to_string(),
6526 ..Default::default()
6527 },
6528 },
6529 ],
6530 None,
6531 cx,
6532 )
6533 .unwrap();
6534 });
6535
6536 // An empty range is extended forward to include the following character.
6537 // At the end of a line, an empty range is extended backward to include
6538 // the preceding character.
6539 buffer.read_with(cx, |buffer, _| {
6540 let chunks = chunks_with_diagnostics(&buffer, 0..buffer.len());
6541 assert_eq!(
6542 chunks
6543 .iter()
6544 .map(|(s, d)| (s.as_str(), *d))
6545 .collect::<Vec<_>>(),
6546 &[
6547 ("let one = ", None),
6548 (";", Some(DiagnosticSeverity::ERROR)),
6549 ("\nlet two =", None),
6550 (" ", Some(DiagnosticSeverity::ERROR)),
6551 ("\nlet three = 3;\n", None)
6552 ]
6553 );
6554 });
6555 }
6556
6557 #[gpui::test]
6558 async fn test_edits_from_lsp_with_past_version(cx: &mut gpui::TestAppContext) {
6559 cx.foreground().forbid_parking();
6560
6561 let mut language = Language::new(
6562 LanguageConfig {
6563 name: "Rust".into(),
6564 path_suffixes: vec!["rs".to_string()],
6565 ..Default::default()
6566 },
6567 Some(tree_sitter_rust::language()),
6568 );
6569 let mut fake_servers = language.set_fake_lsp_adapter(Default::default());
6570
6571 let text = "
6572 fn a() {
6573 f1();
6574 }
6575 fn b() {
6576 f2();
6577 }
6578 fn c() {
6579 f3();
6580 }
6581 "
6582 .unindent();
6583
6584 let fs = FakeFs::new(cx.background());
6585 fs.insert_tree(
6586 "/dir",
6587 json!({
6588 "a.rs": text.clone(),
6589 }),
6590 )
6591 .await;
6592
6593 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
6594 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
6595 let buffer = project
6596 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
6597 .await
6598 .unwrap();
6599
6600 let mut fake_server = fake_servers.next().await.unwrap();
6601 let lsp_document_version = fake_server
6602 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6603 .await
6604 .text_document
6605 .version;
6606
6607 // Simulate editing the buffer after the language server computes some edits.
6608 buffer.update(cx, |buffer, cx| {
6609 buffer.edit(
6610 [(
6611 Point::new(0, 0)..Point::new(0, 0),
6612 "// above first function\n",
6613 )],
6614 cx,
6615 );
6616 buffer.edit(
6617 [(
6618 Point::new(2, 0)..Point::new(2, 0),
6619 " // inside first function\n",
6620 )],
6621 cx,
6622 );
6623 buffer.edit(
6624 [(
6625 Point::new(6, 4)..Point::new(6, 4),
6626 "// inside second function ",
6627 )],
6628 cx,
6629 );
6630
6631 assert_eq!(
6632 buffer.text(),
6633 "
6634 // above first function
6635 fn a() {
6636 // inside first function
6637 f1();
6638 }
6639 fn b() {
6640 // inside second function f2();
6641 }
6642 fn c() {
6643 f3();
6644 }
6645 "
6646 .unindent()
6647 );
6648 });
6649
6650 let edits = project
6651 .update(cx, |project, cx| {
6652 project.edits_from_lsp(
6653 &buffer,
6654 vec![
6655 // replace body of first function
6656 lsp::TextEdit {
6657 range: lsp::Range::new(
6658 lsp::Position::new(0, 0),
6659 lsp::Position::new(3, 0),
6660 ),
6661 new_text: "
6662 fn a() {
6663 f10();
6664 }
6665 "
6666 .unindent(),
6667 },
6668 // edit inside second function
6669 lsp::TextEdit {
6670 range: lsp::Range::new(
6671 lsp::Position::new(4, 6),
6672 lsp::Position::new(4, 6),
6673 ),
6674 new_text: "00".into(),
6675 },
6676 // edit inside third function via two distinct edits
6677 lsp::TextEdit {
6678 range: lsp::Range::new(
6679 lsp::Position::new(7, 5),
6680 lsp::Position::new(7, 5),
6681 ),
6682 new_text: "4000".into(),
6683 },
6684 lsp::TextEdit {
6685 range: lsp::Range::new(
6686 lsp::Position::new(7, 5),
6687 lsp::Position::new(7, 6),
6688 ),
6689 new_text: "".into(),
6690 },
6691 ],
6692 Some(lsp_document_version),
6693 cx,
6694 )
6695 })
6696 .await
6697 .unwrap();
6698
6699 buffer.update(cx, |buffer, cx| {
6700 for (range, new_text) in edits {
6701 buffer.edit([(range, new_text)], cx);
6702 }
6703 assert_eq!(
6704 buffer.text(),
6705 "
6706 // above first function
6707 fn a() {
6708 // inside first function
6709 f10();
6710 }
6711 fn b() {
6712 // inside second function f200();
6713 }
6714 fn c() {
6715 f4000();
6716 }
6717 "
6718 .unindent()
6719 );
6720 });
6721 }
6722
6723 #[gpui::test]
6724 async fn test_edits_from_lsp_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
6725 cx.foreground().forbid_parking();
6726
6727 let text = "
6728 use a::b;
6729 use a::c;
6730
6731 fn f() {
6732 b();
6733 c();
6734 }
6735 "
6736 .unindent();
6737
6738 let fs = FakeFs::new(cx.background());
6739 fs.insert_tree(
6740 "/dir",
6741 json!({
6742 "a.rs": text.clone(),
6743 }),
6744 )
6745 .await;
6746
6747 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
6748 let buffer = project
6749 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
6750 .await
6751 .unwrap();
6752
6753 // Simulate the language server sending us a small edit in the form of a very large diff.
6754 // Rust-analyzer does this when performing a merge-imports code action.
6755 let edits = project
6756 .update(cx, |project, cx| {
6757 project.edits_from_lsp(
6758 &buffer,
6759 [
6760 // Replace the first use statement without editing the semicolon.
6761 lsp::TextEdit {
6762 range: lsp::Range::new(
6763 lsp::Position::new(0, 4),
6764 lsp::Position::new(0, 8),
6765 ),
6766 new_text: "a::{b, c}".into(),
6767 },
6768 // Reinsert the remainder of the file between the semicolon and the final
6769 // newline of the file.
6770 lsp::TextEdit {
6771 range: lsp::Range::new(
6772 lsp::Position::new(0, 9),
6773 lsp::Position::new(0, 9),
6774 ),
6775 new_text: "\n\n".into(),
6776 },
6777 lsp::TextEdit {
6778 range: lsp::Range::new(
6779 lsp::Position::new(0, 9),
6780 lsp::Position::new(0, 9),
6781 ),
6782 new_text: "
6783 fn f() {
6784 b();
6785 c();
6786 }"
6787 .unindent(),
6788 },
6789 // Delete everything after the first newline of the file.
6790 lsp::TextEdit {
6791 range: lsp::Range::new(
6792 lsp::Position::new(1, 0),
6793 lsp::Position::new(7, 0),
6794 ),
6795 new_text: "".into(),
6796 },
6797 ],
6798 None,
6799 cx,
6800 )
6801 })
6802 .await
6803 .unwrap();
6804
6805 buffer.update(cx, |buffer, cx| {
6806 let edits = edits
6807 .into_iter()
6808 .map(|(range, text)| {
6809 (
6810 range.start.to_point(&buffer)..range.end.to_point(&buffer),
6811 text,
6812 )
6813 })
6814 .collect::<Vec<_>>();
6815
6816 assert_eq!(
6817 edits,
6818 [
6819 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
6820 (Point::new(1, 0)..Point::new(2, 0), "".into())
6821 ]
6822 );
6823
6824 for (range, new_text) in edits {
6825 buffer.edit([(range, new_text)], cx);
6826 }
6827 assert_eq!(
6828 buffer.text(),
6829 "
6830 use a::{b, c};
6831
6832 fn f() {
6833 b();
6834 c();
6835 }
6836 "
6837 .unindent()
6838 );
6839 });
6840 }
6841
6842 fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
6843 buffer: &Buffer,
6844 range: Range<T>,
6845 ) -> Vec<(String, Option<DiagnosticSeverity>)> {
6846 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
6847 for chunk in buffer.snapshot().chunks(range, true) {
6848 if chunks.last().map_or(false, |prev_chunk| {
6849 prev_chunk.1 == chunk.diagnostic_severity
6850 }) {
6851 chunks.last_mut().unwrap().0.push_str(chunk.text);
6852 } else {
6853 chunks.push((chunk.text.to_string(), chunk.diagnostic_severity));
6854 }
6855 }
6856 chunks
6857 }
6858
6859 #[gpui::test]
6860 async fn test_search_worktree_without_files(cx: &mut gpui::TestAppContext) {
6861 let dir = temp_tree(json!({
6862 "root": {
6863 "dir1": {},
6864 "dir2": {
6865 "dir3": {}
6866 }
6867 }
6868 }));
6869
6870 let project = Project::test(Arc::new(RealFs), [dir.path()], cx).await;
6871 let cancel_flag = Default::default();
6872 let results = project
6873 .read_with(cx, |project, cx| {
6874 project.match_paths("dir", false, false, 10, &cancel_flag, cx)
6875 })
6876 .await;
6877
6878 assert!(results.is_empty());
6879 }
6880
6881 #[gpui::test(iterations = 10)]
6882 async fn test_definition(cx: &mut gpui::TestAppContext) {
6883 let mut language = Language::new(
6884 LanguageConfig {
6885 name: "Rust".into(),
6886 path_suffixes: vec!["rs".to_string()],
6887 ..Default::default()
6888 },
6889 Some(tree_sitter_rust::language()),
6890 );
6891 let mut fake_servers = language.set_fake_lsp_adapter(Default::default());
6892
6893 let fs = FakeFs::new(cx.background());
6894 fs.insert_tree(
6895 "/dir",
6896 json!({
6897 "a.rs": "const fn a() { A }",
6898 "b.rs": "const y: i32 = crate::a()",
6899 }),
6900 )
6901 .await;
6902
6903 let project = Project::test(fs, ["/dir/b.rs".as_ref()], cx).await;
6904 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
6905
6906 let buffer = project
6907 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
6908 .await
6909 .unwrap();
6910
6911 let fake_server = fake_servers.next().await.unwrap();
6912 fake_server.handle_request::<lsp::request::GotoDefinition, _, _>(|params, _| async move {
6913 let params = params.text_document_position_params;
6914 assert_eq!(
6915 params.text_document.uri.to_file_path().unwrap(),
6916 Path::new("/dir/b.rs"),
6917 );
6918 assert_eq!(params.position, lsp::Position::new(0, 22));
6919
6920 Ok(Some(lsp::GotoDefinitionResponse::Scalar(
6921 lsp::Location::new(
6922 lsp::Url::from_file_path("/dir/a.rs").unwrap(),
6923 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
6924 ),
6925 )))
6926 });
6927
6928 let mut definitions = project
6929 .update(cx, |project, cx| project.definition(&buffer, 22, cx))
6930 .await
6931 .unwrap();
6932
6933 assert_eq!(definitions.len(), 1);
6934 let definition = definitions.pop().unwrap();
6935 cx.update(|cx| {
6936 let target_buffer = definition.buffer.read(cx);
6937 assert_eq!(
6938 target_buffer
6939 .file()
6940 .unwrap()
6941 .as_local()
6942 .unwrap()
6943 .abs_path(cx),
6944 Path::new("/dir/a.rs"),
6945 );
6946 assert_eq!(definition.range.to_offset(target_buffer), 9..10);
6947 assert_eq!(
6948 list_worktrees(&project, cx),
6949 [("/dir/b.rs".as_ref(), true), ("/dir/a.rs".as_ref(), false)]
6950 );
6951
6952 drop(definition);
6953 });
6954 cx.read(|cx| {
6955 assert_eq!(list_worktrees(&project, cx), [("/dir/b.rs".as_ref(), true)]);
6956 });
6957
6958 fn list_worktrees<'a>(
6959 project: &'a ModelHandle<Project>,
6960 cx: &'a AppContext,
6961 ) -> Vec<(&'a Path, bool)> {
6962 project
6963 .read(cx)
6964 .worktrees(cx)
6965 .map(|worktree| {
6966 let worktree = worktree.read(cx);
6967 (
6968 worktree.as_local().unwrap().abs_path().as_ref(),
6969 worktree.is_visible(),
6970 )
6971 })
6972 .collect::<Vec<_>>()
6973 }
6974 }
6975
6976 #[gpui::test]
6977 async fn test_completions_without_edit_ranges(cx: &mut gpui::TestAppContext) {
6978 let mut language = Language::new(
6979 LanguageConfig {
6980 name: "TypeScript".into(),
6981 path_suffixes: vec!["ts".to_string()],
6982 ..Default::default()
6983 },
6984 Some(tree_sitter_typescript::language_typescript()),
6985 );
6986 let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default());
6987
6988 let fs = FakeFs::new(cx.background());
6989 fs.insert_tree(
6990 "/dir",
6991 json!({
6992 "a.ts": "",
6993 }),
6994 )
6995 .await;
6996
6997 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
6998 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
6999 let buffer = project
7000 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
7001 .await
7002 .unwrap();
7003
7004 let fake_server = fake_language_servers.next().await.unwrap();
7005
7006 let text = "let a = b.fqn";
7007 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
7008 let completions = project.update(cx, |project, cx| {
7009 project.completions(&buffer, text.len(), cx)
7010 });
7011
7012 fake_server
7013 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
7014 Ok(Some(lsp::CompletionResponse::Array(vec![
7015 lsp::CompletionItem {
7016 label: "fullyQualifiedName?".into(),
7017 insert_text: Some("fullyQualifiedName".into()),
7018 ..Default::default()
7019 },
7020 ])))
7021 })
7022 .next()
7023 .await;
7024 let completions = completions.await.unwrap();
7025 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
7026 assert_eq!(completions.len(), 1);
7027 assert_eq!(completions[0].new_text, "fullyQualifiedName");
7028 assert_eq!(
7029 completions[0].old_range.to_offset(&snapshot),
7030 text.len() - 3..text.len()
7031 );
7032 }
7033
7034 #[gpui::test(iterations = 10)]
7035 async fn test_apply_code_actions_with_commands(cx: &mut gpui::TestAppContext) {
7036 let mut language = Language::new(
7037 LanguageConfig {
7038 name: "TypeScript".into(),
7039 path_suffixes: vec!["ts".to_string()],
7040 ..Default::default()
7041 },
7042 None,
7043 );
7044 let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default());
7045
7046 let fs = FakeFs::new(cx.background());
7047 fs.insert_tree(
7048 "/dir",
7049 json!({
7050 "a.ts": "a",
7051 }),
7052 )
7053 .await;
7054
7055 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
7056 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
7057 let buffer = project
7058 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
7059 .await
7060 .unwrap();
7061
7062 let fake_server = fake_language_servers.next().await.unwrap();
7063
7064 // Language server returns code actions that contain commands, and not edits.
7065 let actions = project.update(cx, |project, cx| project.code_actions(&buffer, 0..0, cx));
7066 fake_server
7067 .handle_request::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
7068 Ok(Some(vec![
7069 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
7070 title: "The code action".into(),
7071 command: Some(lsp::Command {
7072 title: "The command".into(),
7073 command: "_the/command".into(),
7074 arguments: Some(vec![json!("the-argument")]),
7075 }),
7076 ..Default::default()
7077 }),
7078 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
7079 title: "two".into(),
7080 ..Default::default()
7081 }),
7082 ]))
7083 })
7084 .next()
7085 .await;
7086
7087 let action = actions.await.unwrap()[0].clone();
7088 let apply = project.update(cx, |project, cx| {
7089 project.apply_code_action(buffer.clone(), action, true, cx)
7090 });
7091
7092 // Resolving the code action does not populate its edits. In absence of
7093 // edits, we must execute the given command.
7094 fake_server.handle_request::<lsp::request::CodeActionResolveRequest, _, _>(
7095 |action, _| async move { Ok(action) },
7096 );
7097
7098 // While executing the command, the language server sends the editor
7099 // a `workspaceEdit` request.
7100 fake_server
7101 .handle_request::<lsp::request::ExecuteCommand, _, _>({
7102 let fake = fake_server.clone();
7103 move |params, _| {
7104 assert_eq!(params.command, "_the/command");
7105 let fake = fake.clone();
7106 async move {
7107 fake.server
7108 .request::<lsp::request::ApplyWorkspaceEdit>(
7109 lsp::ApplyWorkspaceEditParams {
7110 label: None,
7111 edit: lsp::WorkspaceEdit {
7112 changes: Some(
7113 [(
7114 lsp::Url::from_file_path("/dir/a.ts").unwrap(),
7115 vec![lsp::TextEdit {
7116 range: lsp::Range::new(
7117 lsp::Position::new(0, 0),
7118 lsp::Position::new(0, 0),
7119 ),
7120 new_text: "X".into(),
7121 }],
7122 )]
7123 .into_iter()
7124 .collect(),
7125 ),
7126 ..Default::default()
7127 },
7128 },
7129 )
7130 .await
7131 .unwrap();
7132 Ok(Some(json!(null)))
7133 }
7134 }
7135 })
7136 .next()
7137 .await;
7138
7139 // Applying the code action returns a project transaction containing the edits
7140 // sent by the language server in its `workspaceEdit` request.
7141 let transaction = apply.await.unwrap();
7142 assert!(transaction.0.contains_key(&buffer));
7143 buffer.update(cx, |buffer, cx| {
7144 assert_eq!(buffer.text(), "Xa");
7145 buffer.undo(cx);
7146 assert_eq!(buffer.text(), "a");
7147 });
7148 }
7149
7150 #[gpui::test]
7151 async fn test_save_file(cx: &mut gpui::TestAppContext) {
7152 let fs = FakeFs::new(cx.background());
7153 fs.insert_tree(
7154 "/dir",
7155 json!({
7156 "file1": "the old contents",
7157 }),
7158 )
7159 .await;
7160
7161 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7162 let buffer = project
7163 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
7164 .await
7165 .unwrap();
7166 buffer
7167 .update(cx, |buffer, cx| {
7168 assert_eq!(buffer.text(), "the old contents");
7169 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], cx);
7170 buffer.save(cx)
7171 })
7172 .await
7173 .unwrap();
7174
7175 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
7176 assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
7177 }
7178
7179 #[gpui::test]
7180 async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
7181 let fs = FakeFs::new(cx.background());
7182 fs.insert_tree(
7183 "/dir",
7184 json!({
7185 "file1": "the old contents",
7186 }),
7187 )
7188 .await;
7189
7190 let project = Project::test(fs.clone(), ["/dir/file1".as_ref()], cx).await;
7191 let buffer = project
7192 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
7193 .await
7194 .unwrap();
7195 buffer
7196 .update(cx, |buffer, cx| {
7197 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], cx);
7198 buffer.save(cx)
7199 })
7200 .await
7201 .unwrap();
7202
7203 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
7204 assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
7205 }
7206
7207 #[gpui::test]
7208 async fn test_save_as(cx: &mut gpui::TestAppContext) {
7209 let fs = FakeFs::new(cx.background());
7210 fs.insert_tree("/dir", json!({})).await;
7211
7212 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7213 let buffer = project.update(cx, |project, cx| {
7214 project.create_buffer("", None, cx).unwrap()
7215 });
7216 buffer.update(cx, |buffer, cx| {
7217 buffer.edit([(0..0, "abc")], cx);
7218 assert!(buffer.is_dirty());
7219 assert!(!buffer.has_conflict());
7220 });
7221 project
7222 .update(cx, |project, cx| {
7223 project.save_buffer_as(buffer.clone(), "/dir/file1".into(), cx)
7224 })
7225 .await
7226 .unwrap();
7227 assert_eq!(fs.load(Path::new("/dir/file1")).await.unwrap(), "abc");
7228 buffer.read_with(cx, |buffer, cx| {
7229 assert_eq!(buffer.file().unwrap().full_path(cx), Path::new("dir/file1"));
7230 assert!(!buffer.is_dirty());
7231 assert!(!buffer.has_conflict());
7232 });
7233
7234 let opened_buffer = project
7235 .update(cx, |project, cx| {
7236 project.open_local_buffer("/dir/file1", cx)
7237 })
7238 .await
7239 .unwrap();
7240 assert_eq!(opened_buffer, buffer);
7241 }
7242
7243 #[gpui::test(retries = 5)]
7244 async fn test_rescan_and_remote_updates(cx: &mut gpui::TestAppContext) {
7245 let dir = temp_tree(json!({
7246 "a": {
7247 "file1": "",
7248 "file2": "",
7249 "file3": "",
7250 },
7251 "b": {
7252 "c": {
7253 "file4": "",
7254 "file5": "",
7255 }
7256 }
7257 }));
7258
7259 let project = Project::test(Arc::new(RealFs), [dir.path()], cx).await;
7260 let rpc = project.read_with(cx, |p, _| p.client.clone());
7261
7262 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
7263 let buffer = project.update(cx, |p, cx| p.open_local_buffer(dir.path().join(path), cx));
7264 async move { buffer.await.unwrap() }
7265 };
7266 let id_for_path = |path: &'static str, cx: &gpui::TestAppContext| {
7267 project.read_with(cx, |project, cx| {
7268 let tree = project.worktrees(cx).next().unwrap();
7269 tree.read(cx)
7270 .entry_for_path(path)
7271 .expect(&format!("no entry for path {}", path))
7272 .id
7273 })
7274 };
7275
7276 let buffer2 = buffer_for_path("a/file2", cx).await;
7277 let buffer3 = buffer_for_path("a/file3", cx).await;
7278 let buffer4 = buffer_for_path("b/c/file4", cx).await;
7279 let buffer5 = buffer_for_path("b/c/file5", cx).await;
7280
7281 let file2_id = id_for_path("a/file2", &cx);
7282 let file3_id = id_for_path("a/file3", &cx);
7283 let file4_id = id_for_path("b/c/file4", &cx);
7284
7285 // Create a remote copy of this worktree.
7286 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
7287 let initial_snapshot = tree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot());
7288 let (remote, load_task) = cx.update(|cx| {
7289 Worktree::remote(
7290 1,
7291 1,
7292 initial_snapshot.to_proto(&Default::default(), true),
7293 rpc.clone(),
7294 cx,
7295 )
7296 });
7297 // tree
7298 load_task.await;
7299
7300 cx.read(|cx| {
7301 assert!(!buffer2.read(cx).is_dirty());
7302 assert!(!buffer3.read(cx).is_dirty());
7303 assert!(!buffer4.read(cx).is_dirty());
7304 assert!(!buffer5.read(cx).is_dirty());
7305 });
7306
7307 // Rename and delete files and directories.
7308 tree.flush_fs_events(&cx).await;
7309 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
7310 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
7311 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
7312 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
7313 tree.flush_fs_events(&cx).await;
7314
7315 let expected_paths = vec![
7316 "a",
7317 "a/file1",
7318 "a/file2.new",
7319 "b",
7320 "d",
7321 "d/file3",
7322 "d/file4",
7323 ];
7324
7325 cx.read(|app| {
7326 assert_eq!(
7327 tree.read(app)
7328 .paths()
7329 .map(|p| p.to_str().unwrap())
7330 .collect::<Vec<_>>(),
7331 expected_paths
7332 );
7333
7334 assert_eq!(id_for_path("a/file2.new", &cx), file2_id);
7335 assert_eq!(id_for_path("d/file3", &cx), file3_id);
7336 assert_eq!(id_for_path("d/file4", &cx), file4_id);
7337
7338 assert_eq!(
7339 buffer2.read(app).file().unwrap().path().as_ref(),
7340 Path::new("a/file2.new")
7341 );
7342 assert_eq!(
7343 buffer3.read(app).file().unwrap().path().as_ref(),
7344 Path::new("d/file3")
7345 );
7346 assert_eq!(
7347 buffer4.read(app).file().unwrap().path().as_ref(),
7348 Path::new("d/file4")
7349 );
7350 assert_eq!(
7351 buffer5.read(app).file().unwrap().path().as_ref(),
7352 Path::new("b/c/file5")
7353 );
7354
7355 assert!(!buffer2.read(app).file().unwrap().is_deleted());
7356 assert!(!buffer3.read(app).file().unwrap().is_deleted());
7357 assert!(!buffer4.read(app).file().unwrap().is_deleted());
7358 assert!(buffer5.read(app).file().unwrap().is_deleted());
7359 });
7360
7361 // Update the remote worktree. Check that it becomes consistent with the
7362 // local worktree.
7363 remote.update(cx, |remote, cx| {
7364 let update_message = tree.read(cx).as_local().unwrap().snapshot().build_update(
7365 &initial_snapshot,
7366 1,
7367 1,
7368 true,
7369 );
7370 remote
7371 .as_remote_mut()
7372 .unwrap()
7373 .snapshot
7374 .apply_remote_update(update_message)
7375 .unwrap();
7376
7377 assert_eq!(
7378 remote
7379 .paths()
7380 .map(|p| p.to_str().unwrap())
7381 .collect::<Vec<_>>(),
7382 expected_paths
7383 );
7384 });
7385 }
7386
7387 #[gpui::test]
7388 async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
7389 let fs = FakeFs::new(cx.background());
7390 fs.insert_tree(
7391 "/dir",
7392 json!({
7393 "a.txt": "a-contents",
7394 "b.txt": "b-contents",
7395 }),
7396 )
7397 .await;
7398
7399 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7400
7401 // Spawn multiple tasks to open paths, repeating some paths.
7402 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
7403 (
7404 p.open_local_buffer("/dir/a.txt", cx),
7405 p.open_local_buffer("/dir/b.txt", cx),
7406 p.open_local_buffer("/dir/a.txt", cx),
7407 )
7408 });
7409
7410 let buffer_a_1 = buffer_a_1.await.unwrap();
7411 let buffer_a_2 = buffer_a_2.await.unwrap();
7412 let buffer_b = buffer_b.await.unwrap();
7413 assert_eq!(buffer_a_1.read_with(cx, |b, _| b.text()), "a-contents");
7414 assert_eq!(buffer_b.read_with(cx, |b, _| b.text()), "b-contents");
7415
7416 // There is only one buffer per path.
7417 let buffer_a_id = buffer_a_1.id();
7418 assert_eq!(buffer_a_2.id(), buffer_a_id);
7419
7420 // Open the same path again while it is still open.
7421 drop(buffer_a_1);
7422 let buffer_a_3 = project
7423 .update(cx, |p, cx| p.open_local_buffer("/dir/a.txt", cx))
7424 .await
7425 .unwrap();
7426
7427 // There's still only one buffer per path.
7428 assert_eq!(buffer_a_3.id(), buffer_a_id);
7429 }
7430
7431 #[gpui::test]
7432 async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
7433 let fs = FakeFs::new(cx.background());
7434 fs.insert_tree(
7435 "/dir",
7436 json!({
7437 "file1": "abc",
7438 "file2": "def",
7439 "file3": "ghi",
7440 }),
7441 )
7442 .await;
7443
7444 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7445
7446 let buffer1 = project
7447 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
7448 .await
7449 .unwrap();
7450 let events = Rc::new(RefCell::new(Vec::new()));
7451
7452 // initially, the buffer isn't dirty.
7453 buffer1.update(cx, |buffer, cx| {
7454 cx.subscribe(&buffer1, {
7455 let events = events.clone();
7456 move |_, _, event, _| match event {
7457 BufferEvent::Operation(_) => {}
7458 _ => events.borrow_mut().push(event.clone()),
7459 }
7460 })
7461 .detach();
7462
7463 assert!(!buffer.is_dirty());
7464 assert!(events.borrow().is_empty());
7465
7466 buffer.edit([(1..2, "")], cx);
7467 });
7468
7469 // after the first edit, the buffer is dirty, and emits a dirtied event.
7470 buffer1.update(cx, |buffer, cx| {
7471 assert!(buffer.text() == "ac");
7472 assert!(buffer.is_dirty());
7473 assert_eq!(
7474 *events.borrow(),
7475 &[language::Event::Edited, language::Event::Dirtied]
7476 );
7477 events.borrow_mut().clear();
7478 buffer.did_save(buffer.version(), buffer.file().unwrap().mtime(), None, cx);
7479 });
7480
7481 // after saving, the buffer is not dirty, and emits a saved event.
7482 buffer1.update(cx, |buffer, cx| {
7483 assert!(!buffer.is_dirty());
7484 assert_eq!(*events.borrow(), &[language::Event::Saved]);
7485 events.borrow_mut().clear();
7486
7487 buffer.edit([(1..1, "B")], cx);
7488 buffer.edit([(2..2, "D")], cx);
7489 });
7490
7491 // after editing again, the buffer is dirty, and emits another dirty event.
7492 buffer1.update(cx, |buffer, cx| {
7493 assert!(buffer.text() == "aBDc");
7494 assert!(buffer.is_dirty());
7495 assert_eq!(
7496 *events.borrow(),
7497 &[
7498 language::Event::Edited,
7499 language::Event::Dirtied,
7500 language::Event::Edited,
7501 ],
7502 );
7503 events.borrow_mut().clear();
7504
7505 // TODO - currently, after restoring the buffer to its
7506 // previously-saved state, the is still considered dirty.
7507 buffer.edit([(1..3, "")], cx);
7508 assert!(buffer.text() == "ac");
7509 assert!(buffer.is_dirty());
7510 });
7511
7512 assert_eq!(*events.borrow(), &[language::Event::Edited]);
7513
7514 // When a file is deleted, the buffer is considered dirty.
7515 let events = Rc::new(RefCell::new(Vec::new()));
7516 let buffer2 = project
7517 .update(cx, |p, cx| p.open_local_buffer("/dir/file2", cx))
7518 .await
7519 .unwrap();
7520 buffer2.update(cx, |_, cx| {
7521 cx.subscribe(&buffer2, {
7522 let events = events.clone();
7523 move |_, _, event, _| events.borrow_mut().push(event.clone())
7524 })
7525 .detach();
7526 });
7527
7528 fs.remove_file("/dir/file2".as_ref(), Default::default())
7529 .await
7530 .unwrap();
7531 buffer2.condition(&cx, |b, _| b.is_dirty()).await;
7532 assert_eq!(
7533 *events.borrow(),
7534 &[language::Event::Dirtied, language::Event::FileHandleChanged]
7535 );
7536
7537 // When a file is already dirty when deleted, we don't emit a Dirtied event.
7538 let events = Rc::new(RefCell::new(Vec::new()));
7539 let buffer3 = project
7540 .update(cx, |p, cx| p.open_local_buffer("/dir/file3", cx))
7541 .await
7542 .unwrap();
7543 buffer3.update(cx, |_, cx| {
7544 cx.subscribe(&buffer3, {
7545 let events = events.clone();
7546 move |_, _, event, _| events.borrow_mut().push(event.clone())
7547 })
7548 .detach();
7549 });
7550
7551 buffer3.update(cx, |buffer, cx| {
7552 buffer.edit([(0..0, "x")], cx);
7553 });
7554 events.borrow_mut().clear();
7555 fs.remove_file("/dir/file3".as_ref(), Default::default())
7556 .await
7557 .unwrap();
7558 buffer3
7559 .condition(&cx, |_, _| !events.borrow().is_empty())
7560 .await;
7561 assert_eq!(*events.borrow(), &[language::Event::FileHandleChanged]);
7562 cx.read(|cx| assert!(buffer3.read(cx).is_dirty()));
7563 }
7564
7565 #[gpui::test]
7566 async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
7567 let initial_contents = "aaa\nbbbbb\nc\n";
7568 let fs = FakeFs::new(cx.background());
7569 fs.insert_tree(
7570 "/dir",
7571 json!({
7572 "the-file": initial_contents,
7573 }),
7574 )
7575 .await;
7576 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7577 let buffer = project
7578 .update(cx, |p, cx| p.open_local_buffer("/dir/the-file", cx))
7579 .await
7580 .unwrap();
7581
7582 let anchors = (0..3)
7583 .map(|row| buffer.read_with(cx, |b, _| b.anchor_before(Point::new(row, 1))))
7584 .collect::<Vec<_>>();
7585
7586 // Change the file on disk, adding two new lines of text, and removing
7587 // one line.
7588 buffer.read_with(cx, |buffer, _| {
7589 assert!(!buffer.is_dirty());
7590 assert!(!buffer.has_conflict());
7591 });
7592 let new_contents = "AAAA\naaa\nBB\nbbbbb\n";
7593 fs.save("/dir/the-file".as_ref(), &new_contents.into())
7594 .await
7595 .unwrap();
7596
7597 // Because the buffer was not modified, it is reloaded from disk. Its
7598 // contents are edited according to the diff between the old and new
7599 // file contents.
7600 buffer
7601 .condition(&cx, |buffer, _| buffer.text() == new_contents)
7602 .await;
7603
7604 buffer.update(cx, |buffer, _| {
7605 assert_eq!(buffer.text(), new_contents);
7606 assert!(!buffer.is_dirty());
7607 assert!(!buffer.has_conflict());
7608
7609 let anchor_positions = anchors
7610 .iter()
7611 .map(|anchor| anchor.to_point(&*buffer))
7612 .collect::<Vec<_>>();
7613 assert_eq!(
7614 anchor_positions,
7615 [Point::new(1, 1), Point::new(3, 1), Point::new(4, 0)]
7616 );
7617 });
7618
7619 // Modify the buffer
7620 buffer.update(cx, |buffer, cx| {
7621 buffer.edit([(0..0, " ")], cx);
7622 assert!(buffer.is_dirty());
7623 assert!(!buffer.has_conflict());
7624 });
7625
7626 // Change the file on disk again, adding blank lines to the beginning.
7627 fs.save(
7628 "/dir/the-file".as_ref(),
7629 &"\n\n\nAAAA\naaa\nBB\nbbbbb\n".into(),
7630 )
7631 .await
7632 .unwrap();
7633
7634 // Because the buffer is modified, it doesn't reload from disk, but is
7635 // marked as having a conflict.
7636 buffer
7637 .condition(&cx, |buffer, _| buffer.has_conflict())
7638 .await;
7639 }
7640
7641 #[gpui::test]
7642 async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
7643 cx.foreground().forbid_parking();
7644
7645 let fs = FakeFs::new(cx.background());
7646 fs.insert_tree(
7647 "/the-dir",
7648 json!({
7649 "a.rs": "
7650 fn foo(mut v: Vec<usize>) {
7651 for x in &v {
7652 v.push(1);
7653 }
7654 }
7655 "
7656 .unindent(),
7657 }),
7658 )
7659 .await;
7660
7661 let project = Project::test(fs.clone(), ["/the-dir".as_ref()], cx).await;
7662 let buffer = project
7663 .update(cx, |p, cx| p.open_local_buffer("/the-dir/a.rs", cx))
7664 .await
7665 .unwrap();
7666
7667 let buffer_uri = Url::from_file_path("/the-dir/a.rs").unwrap();
7668 let message = lsp::PublishDiagnosticsParams {
7669 uri: buffer_uri.clone(),
7670 diagnostics: vec![
7671 lsp::Diagnostic {
7672 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
7673 severity: Some(DiagnosticSeverity::WARNING),
7674 message: "error 1".to_string(),
7675 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
7676 location: lsp::Location {
7677 uri: buffer_uri.clone(),
7678 range: lsp::Range::new(
7679 lsp::Position::new(1, 8),
7680 lsp::Position::new(1, 9),
7681 ),
7682 },
7683 message: "error 1 hint 1".to_string(),
7684 }]),
7685 ..Default::default()
7686 },
7687 lsp::Diagnostic {
7688 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
7689 severity: Some(DiagnosticSeverity::HINT),
7690 message: "error 1 hint 1".to_string(),
7691 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
7692 location: lsp::Location {
7693 uri: buffer_uri.clone(),
7694 range: lsp::Range::new(
7695 lsp::Position::new(1, 8),
7696 lsp::Position::new(1, 9),
7697 ),
7698 },
7699 message: "original diagnostic".to_string(),
7700 }]),
7701 ..Default::default()
7702 },
7703 lsp::Diagnostic {
7704 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
7705 severity: Some(DiagnosticSeverity::ERROR),
7706 message: "error 2".to_string(),
7707 related_information: Some(vec![
7708 lsp::DiagnosticRelatedInformation {
7709 location: lsp::Location {
7710 uri: buffer_uri.clone(),
7711 range: lsp::Range::new(
7712 lsp::Position::new(1, 13),
7713 lsp::Position::new(1, 15),
7714 ),
7715 },
7716 message: "error 2 hint 1".to_string(),
7717 },
7718 lsp::DiagnosticRelatedInformation {
7719 location: lsp::Location {
7720 uri: buffer_uri.clone(),
7721 range: lsp::Range::new(
7722 lsp::Position::new(1, 13),
7723 lsp::Position::new(1, 15),
7724 ),
7725 },
7726 message: "error 2 hint 2".to_string(),
7727 },
7728 ]),
7729 ..Default::default()
7730 },
7731 lsp::Diagnostic {
7732 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
7733 severity: Some(DiagnosticSeverity::HINT),
7734 message: "error 2 hint 1".to_string(),
7735 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
7736 location: lsp::Location {
7737 uri: buffer_uri.clone(),
7738 range: lsp::Range::new(
7739 lsp::Position::new(2, 8),
7740 lsp::Position::new(2, 17),
7741 ),
7742 },
7743 message: "original diagnostic".to_string(),
7744 }]),
7745 ..Default::default()
7746 },
7747 lsp::Diagnostic {
7748 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
7749 severity: Some(DiagnosticSeverity::HINT),
7750 message: "error 2 hint 2".to_string(),
7751 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
7752 location: lsp::Location {
7753 uri: buffer_uri.clone(),
7754 range: lsp::Range::new(
7755 lsp::Position::new(2, 8),
7756 lsp::Position::new(2, 17),
7757 ),
7758 },
7759 message: "original diagnostic".to_string(),
7760 }]),
7761 ..Default::default()
7762 },
7763 ],
7764 version: None,
7765 };
7766
7767 project
7768 .update(cx, |p, cx| p.update_diagnostics(message, &[], cx))
7769 .unwrap();
7770 let buffer = buffer.read_with(cx, |buffer, _| buffer.snapshot());
7771
7772 assert_eq!(
7773 buffer
7774 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
7775 .collect::<Vec<_>>(),
7776 &[
7777 DiagnosticEntry {
7778 range: Point::new(1, 8)..Point::new(1, 9),
7779 diagnostic: Diagnostic {
7780 severity: DiagnosticSeverity::WARNING,
7781 message: "error 1".to_string(),
7782 group_id: 0,
7783 is_primary: true,
7784 ..Default::default()
7785 }
7786 },
7787 DiagnosticEntry {
7788 range: Point::new(1, 8)..Point::new(1, 9),
7789 diagnostic: Diagnostic {
7790 severity: DiagnosticSeverity::HINT,
7791 message: "error 1 hint 1".to_string(),
7792 group_id: 0,
7793 is_primary: false,
7794 ..Default::default()
7795 }
7796 },
7797 DiagnosticEntry {
7798 range: Point::new(1, 13)..Point::new(1, 15),
7799 diagnostic: Diagnostic {
7800 severity: DiagnosticSeverity::HINT,
7801 message: "error 2 hint 1".to_string(),
7802 group_id: 1,
7803 is_primary: false,
7804 ..Default::default()
7805 }
7806 },
7807 DiagnosticEntry {
7808 range: Point::new(1, 13)..Point::new(1, 15),
7809 diagnostic: Diagnostic {
7810 severity: DiagnosticSeverity::HINT,
7811 message: "error 2 hint 2".to_string(),
7812 group_id: 1,
7813 is_primary: false,
7814 ..Default::default()
7815 }
7816 },
7817 DiagnosticEntry {
7818 range: Point::new(2, 8)..Point::new(2, 17),
7819 diagnostic: Diagnostic {
7820 severity: DiagnosticSeverity::ERROR,
7821 message: "error 2".to_string(),
7822 group_id: 1,
7823 is_primary: true,
7824 ..Default::default()
7825 }
7826 }
7827 ]
7828 );
7829
7830 assert_eq!(
7831 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
7832 &[
7833 DiagnosticEntry {
7834 range: Point::new(1, 8)..Point::new(1, 9),
7835 diagnostic: Diagnostic {
7836 severity: DiagnosticSeverity::WARNING,
7837 message: "error 1".to_string(),
7838 group_id: 0,
7839 is_primary: true,
7840 ..Default::default()
7841 }
7842 },
7843 DiagnosticEntry {
7844 range: Point::new(1, 8)..Point::new(1, 9),
7845 diagnostic: Diagnostic {
7846 severity: DiagnosticSeverity::HINT,
7847 message: "error 1 hint 1".to_string(),
7848 group_id: 0,
7849 is_primary: false,
7850 ..Default::default()
7851 }
7852 },
7853 ]
7854 );
7855 assert_eq!(
7856 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
7857 &[
7858 DiagnosticEntry {
7859 range: Point::new(1, 13)..Point::new(1, 15),
7860 diagnostic: Diagnostic {
7861 severity: DiagnosticSeverity::HINT,
7862 message: "error 2 hint 1".to_string(),
7863 group_id: 1,
7864 is_primary: false,
7865 ..Default::default()
7866 }
7867 },
7868 DiagnosticEntry {
7869 range: Point::new(1, 13)..Point::new(1, 15),
7870 diagnostic: Diagnostic {
7871 severity: DiagnosticSeverity::HINT,
7872 message: "error 2 hint 2".to_string(),
7873 group_id: 1,
7874 is_primary: false,
7875 ..Default::default()
7876 }
7877 },
7878 DiagnosticEntry {
7879 range: Point::new(2, 8)..Point::new(2, 17),
7880 diagnostic: Diagnostic {
7881 severity: DiagnosticSeverity::ERROR,
7882 message: "error 2".to_string(),
7883 group_id: 1,
7884 is_primary: true,
7885 ..Default::default()
7886 }
7887 }
7888 ]
7889 );
7890 }
7891
7892 #[gpui::test]
7893 async fn test_rename(cx: &mut gpui::TestAppContext) {
7894 cx.foreground().forbid_parking();
7895
7896 let mut language = Language::new(
7897 LanguageConfig {
7898 name: "Rust".into(),
7899 path_suffixes: vec!["rs".to_string()],
7900 ..Default::default()
7901 },
7902 Some(tree_sitter_rust::language()),
7903 );
7904 let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
7905 capabilities: lsp::ServerCapabilities {
7906 rename_provider: Some(lsp::OneOf::Right(lsp::RenameOptions {
7907 prepare_provider: Some(true),
7908 work_done_progress_options: Default::default(),
7909 })),
7910 ..Default::default()
7911 },
7912 ..Default::default()
7913 });
7914
7915 let fs = FakeFs::new(cx.background());
7916 fs.insert_tree(
7917 "/dir",
7918 json!({
7919 "one.rs": "const ONE: usize = 1;",
7920 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
7921 }),
7922 )
7923 .await;
7924
7925 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7926 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
7927 let buffer = project
7928 .update(cx, |project, cx| {
7929 project.open_local_buffer("/dir/one.rs", cx)
7930 })
7931 .await
7932 .unwrap();
7933
7934 let fake_server = fake_servers.next().await.unwrap();
7935
7936 let response = project.update(cx, |project, cx| {
7937 project.prepare_rename(buffer.clone(), 7, cx)
7938 });
7939 fake_server
7940 .handle_request::<lsp::request::PrepareRenameRequest, _, _>(|params, _| async move {
7941 assert_eq!(params.text_document.uri.as_str(), "file:///dir/one.rs");
7942 assert_eq!(params.position, lsp::Position::new(0, 7));
7943 Ok(Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
7944 lsp::Position::new(0, 6),
7945 lsp::Position::new(0, 9),
7946 ))))
7947 })
7948 .next()
7949 .await
7950 .unwrap();
7951 let range = response.await.unwrap().unwrap();
7952 let range = buffer.read_with(cx, |buffer, _| range.to_offset(buffer));
7953 assert_eq!(range, 6..9);
7954
7955 let response = project.update(cx, |project, cx| {
7956 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), true, cx)
7957 });
7958 fake_server
7959 .handle_request::<lsp::request::Rename, _, _>(|params, _| async move {
7960 assert_eq!(
7961 params.text_document_position.text_document.uri.as_str(),
7962 "file:///dir/one.rs"
7963 );
7964 assert_eq!(
7965 params.text_document_position.position,
7966 lsp::Position::new(0, 7)
7967 );
7968 assert_eq!(params.new_name, "THREE");
7969 Ok(Some(lsp::WorkspaceEdit {
7970 changes: Some(
7971 [
7972 (
7973 lsp::Url::from_file_path("/dir/one.rs").unwrap(),
7974 vec![lsp::TextEdit::new(
7975 lsp::Range::new(
7976 lsp::Position::new(0, 6),
7977 lsp::Position::new(0, 9),
7978 ),
7979 "THREE".to_string(),
7980 )],
7981 ),
7982 (
7983 lsp::Url::from_file_path("/dir/two.rs").unwrap(),
7984 vec![
7985 lsp::TextEdit::new(
7986 lsp::Range::new(
7987 lsp::Position::new(0, 24),
7988 lsp::Position::new(0, 27),
7989 ),
7990 "THREE".to_string(),
7991 ),
7992 lsp::TextEdit::new(
7993 lsp::Range::new(
7994 lsp::Position::new(0, 35),
7995 lsp::Position::new(0, 38),
7996 ),
7997 "THREE".to_string(),
7998 ),
7999 ],
8000 ),
8001 ]
8002 .into_iter()
8003 .collect(),
8004 ),
8005 ..Default::default()
8006 }))
8007 })
8008 .next()
8009 .await
8010 .unwrap();
8011 let mut transaction = response.await.unwrap().0;
8012 assert_eq!(transaction.len(), 2);
8013 assert_eq!(
8014 transaction
8015 .remove_entry(&buffer)
8016 .unwrap()
8017 .0
8018 .read_with(cx, |buffer, _| buffer.text()),
8019 "const THREE: usize = 1;"
8020 );
8021 assert_eq!(
8022 transaction
8023 .into_keys()
8024 .next()
8025 .unwrap()
8026 .read_with(cx, |buffer, _| buffer.text()),
8027 "const TWO: usize = one::THREE + one::THREE;"
8028 );
8029 }
8030
8031 #[gpui::test]
8032 async fn test_search(cx: &mut gpui::TestAppContext) {
8033 let fs = FakeFs::new(cx.background());
8034 fs.insert_tree(
8035 "/dir",
8036 json!({
8037 "one.rs": "const ONE: usize = 1;",
8038 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
8039 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
8040 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
8041 }),
8042 )
8043 .await;
8044 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
8045 assert_eq!(
8046 search(&project, SearchQuery::text("TWO", false, true), cx)
8047 .await
8048 .unwrap(),
8049 HashMap::from_iter([
8050 ("two.rs".to_string(), vec![6..9]),
8051 ("three.rs".to_string(), vec![37..40])
8052 ])
8053 );
8054
8055 let buffer_4 = project
8056 .update(cx, |project, cx| {
8057 project.open_local_buffer("/dir/four.rs", cx)
8058 })
8059 .await
8060 .unwrap();
8061 buffer_4.update(cx, |buffer, cx| {
8062 let text = "two::TWO";
8063 buffer.edit([(20..28, text), (31..43, text)], cx);
8064 });
8065
8066 assert_eq!(
8067 search(&project, SearchQuery::text("TWO", false, true), cx)
8068 .await
8069 .unwrap(),
8070 HashMap::from_iter([
8071 ("two.rs".to_string(), vec![6..9]),
8072 ("three.rs".to_string(), vec![37..40]),
8073 ("four.rs".to_string(), vec![25..28, 36..39])
8074 ])
8075 );
8076
8077 async fn search(
8078 project: &ModelHandle<Project>,
8079 query: SearchQuery,
8080 cx: &mut gpui::TestAppContext,
8081 ) -> Result<HashMap<String, Vec<Range<usize>>>> {
8082 let results = project
8083 .update(cx, |project, cx| project.search(query, cx))
8084 .await?;
8085
8086 Ok(results
8087 .into_iter()
8088 .map(|(buffer, ranges)| {
8089 buffer.read_with(cx, |buffer, _| {
8090 let path = buffer.file().unwrap().path().to_string_lossy().to_string();
8091 let ranges = ranges
8092 .into_iter()
8093 .map(|range| range.to_offset(buffer))
8094 .collect::<Vec<_>>();
8095 (path, ranges)
8096 })
8097 })
8098 .collect())
8099 }
8100 }
8101}