1mod db;
2pub mod fs;
3mod ignore;
4mod lsp_command;
5pub mod search;
6pub mod worktree;
7
8use anyhow::{anyhow, Context, Result};
9use client::{proto, Client, PeerId, TypedEnvelope, User, UserStore};
10use clock::ReplicaId;
11use collections::{hash_map, BTreeMap, HashMap, HashSet};
12use futures::{future::Shared, Future, FutureExt, StreamExt, TryFutureExt};
13use fuzzy::{PathMatch, PathMatchCandidate, PathMatchCandidateSet};
14use gpui::{
15 AnyModelHandle, AppContext, AsyncAppContext, Entity, ModelContext, ModelHandle,
16 MutableAppContext, Task, UpgradeModelHandle, WeakModelHandle,
17};
18use language::{
19 point_to_lsp,
20 proto::{deserialize_anchor, deserialize_version, serialize_anchor, serialize_version},
21 range_from_lsp, range_to_lsp, Anchor, Bias, Buffer, CodeAction, CodeLabel, Completion,
22 Diagnostic, DiagnosticEntry, DiagnosticSet, Event as BufferEvent, File as _, Language,
23 LanguageRegistry, LanguageServerName, LocalFile, LspAdapter, OffsetRangeExt, Operation, Patch,
24 PointUtf16, TextBufferSnapshot, ToOffset, ToPointUtf16, Transaction,
25};
26use lsp::{DiagnosticSeverity, DiagnosticTag, DocumentHighlightKind, LanguageServer};
27use lsp_command::*;
28use parking_lot::Mutex;
29use postage::stream::Stream;
30use postage::watch;
31use rand::prelude::*;
32use search::SearchQuery;
33use serde::Serialize;
34use settings::Settings;
35use sha2::{Digest, Sha256};
36use similar::{ChangeTag, TextDiff};
37use std::{
38 cell::RefCell,
39 cmp::{self, Ordering},
40 convert::TryInto,
41 ffi::OsString,
42 hash::Hash,
43 mem,
44 ops::Range,
45 os::unix::{ffi::OsStrExt, prelude::OsStringExt},
46 path::{Component, Path, PathBuf},
47 rc::Rc,
48 sync::{
49 atomic::{AtomicBool, AtomicUsize, Ordering::SeqCst},
50 Arc,
51 },
52 time::Instant,
53};
54use thiserror::Error;
55use util::{post_inc, ResultExt, TryFutureExt as _};
56
57pub use db::Db;
58pub use fs::*;
59pub use worktree::*;
60
61pub trait Item: Entity {
62 fn entry_id(&self, cx: &AppContext) -> Option<ProjectEntryId>;
63}
64
65pub struct ProjectStore {
66 db: Arc<Db>,
67 projects: Vec<WeakModelHandle<Project>>,
68}
69
70pub struct Project {
71 worktrees: Vec<WorktreeHandle>,
72 active_entry: Option<ProjectEntryId>,
73 languages: Arc<LanguageRegistry>,
74 language_servers:
75 HashMap<(WorktreeId, LanguageServerName), (Arc<dyn LspAdapter>, Arc<LanguageServer>)>,
76 started_language_servers:
77 HashMap<(WorktreeId, LanguageServerName), Task<Option<Arc<LanguageServer>>>>,
78 language_server_statuses: BTreeMap<usize, LanguageServerStatus>,
79 language_server_settings: Arc<Mutex<serde_json::Value>>,
80 last_workspace_edits_by_language_server: HashMap<usize, ProjectTransaction>,
81 next_language_server_id: usize,
82 client: Arc<client::Client>,
83 next_entry_id: Arc<AtomicUsize>,
84 next_diagnostic_group_id: usize,
85 user_store: ModelHandle<UserStore>,
86 project_store: ModelHandle<ProjectStore>,
87 fs: Arc<dyn Fs>,
88 client_state: ProjectClientState,
89 collaborators: HashMap<PeerId, Collaborator>,
90 subscriptions: Vec<client::Subscription>,
91 opened_buffer: (Rc<RefCell<watch::Sender<()>>>, watch::Receiver<()>),
92 shared_buffers: HashMap<PeerId, HashSet<u64>>,
93 loading_buffers: HashMap<
94 ProjectPath,
95 postage::watch::Receiver<Option<Result<ModelHandle<Buffer>, Arc<anyhow::Error>>>>,
96 >,
97 loading_local_worktrees:
98 HashMap<Arc<Path>, Shared<Task<Result<ModelHandle<Worktree>, Arc<anyhow::Error>>>>>,
99 opened_buffers: HashMap<u64, OpenBuffer>,
100 buffer_snapshots: HashMap<u64, Vec<(i32, TextBufferSnapshot)>>,
101 nonce: u128,
102}
103
104#[derive(Error, Debug)]
105pub enum JoinProjectError {
106 #[error("host declined join request")]
107 HostDeclined,
108 #[error("host closed the project")]
109 HostClosedProject,
110 #[error("host went offline")]
111 HostWentOffline,
112 #[error("{0}")]
113 Other(#[from] anyhow::Error),
114}
115
116enum OpenBuffer {
117 Strong(ModelHandle<Buffer>),
118 Weak(WeakModelHandle<Buffer>),
119 Loading(Vec<Operation>),
120}
121
122enum WorktreeHandle {
123 Strong(ModelHandle<Worktree>),
124 Weak(WeakModelHandle<Worktree>),
125}
126
127enum ProjectClientState {
128 Local {
129 is_shared: bool,
130 remote_id_tx: watch::Sender<Option<u64>>,
131 remote_id_rx: watch::Receiver<Option<u64>>,
132 public_tx: watch::Sender<bool>,
133 public_rx: watch::Receiver<bool>,
134 _maintain_remote_id_task: Task<Option<()>>,
135 },
136 Remote {
137 sharing_has_stopped: bool,
138 remote_id: u64,
139 replica_id: ReplicaId,
140 _detect_unshare_task: Task<Option<()>>,
141 },
142}
143
144#[derive(Clone, Debug)]
145pub struct Collaborator {
146 pub user: Arc<User>,
147 pub peer_id: PeerId,
148 pub replica_id: ReplicaId,
149}
150
151#[derive(Clone, Debug, PartialEq, Eq)]
152pub enum Event {
153 ActiveEntryChanged(Option<ProjectEntryId>),
154 WorktreeAdded,
155 WorktreeRemoved(WorktreeId),
156 DiskBasedDiagnosticsStarted,
157 DiskBasedDiagnosticsUpdated,
158 DiskBasedDiagnosticsFinished,
159 DiagnosticsUpdated(ProjectPath),
160 RemoteIdChanged(Option<u64>),
161 CollaboratorLeft(PeerId),
162 ContactRequestedJoin(Arc<User>),
163 ContactCancelledJoinRequest(Arc<User>),
164}
165
166#[derive(Serialize)]
167pub struct LanguageServerStatus {
168 pub name: String,
169 pub pending_work: BTreeMap<String, LanguageServerProgress>,
170 pub pending_diagnostic_updates: isize,
171}
172
173#[derive(Clone, Debug, Serialize)]
174pub struct LanguageServerProgress {
175 pub message: Option<String>,
176 pub percentage: Option<usize>,
177 #[serde(skip_serializing)]
178 pub last_update_at: Instant,
179}
180
181#[derive(Clone, Debug, Eq, PartialEq, Hash, PartialOrd, Ord)]
182pub struct ProjectPath {
183 pub worktree_id: WorktreeId,
184 pub path: Arc<Path>,
185}
186
187#[derive(Copy, Clone, Debug, Default, PartialEq, Serialize)]
188pub struct DiagnosticSummary {
189 pub error_count: usize,
190 pub warning_count: usize,
191}
192
193#[derive(Debug)]
194pub struct Location {
195 pub buffer: ModelHandle<Buffer>,
196 pub range: Range<language::Anchor>,
197}
198
199#[derive(Debug)]
200pub struct DocumentHighlight {
201 pub range: Range<language::Anchor>,
202 pub kind: DocumentHighlightKind,
203}
204
205#[derive(Clone, Debug)]
206pub struct Symbol {
207 pub source_worktree_id: WorktreeId,
208 pub worktree_id: WorktreeId,
209 pub language_server_name: LanguageServerName,
210 pub path: PathBuf,
211 pub label: CodeLabel,
212 pub name: String,
213 pub kind: lsp::SymbolKind,
214 pub range: Range<PointUtf16>,
215 pub signature: [u8; 32],
216}
217
218#[derive(Default)]
219pub struct ProjectTransaction(pub HashMap<ModelHandle<Buffer>, language::Transaction>);
220
221impl DiagnosticSummary {
222 fn new<'a, T: 'a>(diagnostics: impl IntoIterator<Item = &'a DiagnosticEntry<T>>) -> Self {
223 let mut this = Self {
224 error_count: 0,
225 warning_count: 0,
226 };
227
228 for entry in diagnostics {
229 if entry.diagnostic.is_primary {
230 match entry.diagnostic.severity {
231 DiagnosticSeverity::ERROR => this.error_count += 1,
232 DiagnosticSeverity::WARNING => this.warning_count += 1,
233 _ => {}
234 }
235 }
236 }
237
238 this
239 }
240
241 pub fn is_empty(&self) -> bool {
242 self.error_count == 0 && self.warning_count == 0
243 }
244
245 pub fn to_proto(&self, path: &Path) -> proto::DiagnosticSummary {
246 proto::DiagnosticSummary {
247 path: path.to_string_lossy().to_string(),
248 error_count: self.error_count as u32,
249 warning_count: self.warning_count as u32,
250 }
251 }
252}
253
254#[derive(Clone, Copy, Debug, Default, Hash, PartialEq, Eq, PartialOrd, Ord)]
255pub struct ProjectEntryId(usize);
256
257impl ProjectEntryId {
258 pub const MAX: Self = Self(usize::MAX);
259
260 pub fn new(counter: &AtomicUsize) -> Self {
261 Self(counter.fetch_add(1, SeqCst))
262 }
263
264 pub fn from_proto(id: u64) -> Self {
265 Self(id as usize)
266 }
267
268 pub fn to_proto(&self) -> u64 {
269 self.0 as u64
270 }
271
272 pub fn to_usize(&self) -> usize {
273 self.0
274 }
275}
276
277impl Project {
278 pub fn init(client: &Arc<Client>) {
279 client.add_model_message_handler(Self::handle_request_join_project);
280 client.add_model_message_handler(Self::handle_add_collaborator);
281 client.add_model_message_handler(Self::handle_buffer_reloaded);
282 client.add_model_message_handler(Self::handle_buffer_saved);
283 client.add_model_message_handler(Self::handle_start_language_server);
284 client.add_model_message_handler(Self::handle_update_language_server);
285 client.add_model_message_handler(Self::handle_remove_collaborator);
286 client.add_model_message_handler(Self::handle_join_project_request_cancelled);
287 client.add_model_message_handler(Self::handle_update_project);
288 client.add_model_message_handler(Self::handle_unregister_project);
289 client.add_model_message_handler(Self::handle_project_unshared);
290 client.add_model_message_handler(Self::handle_update_buffer_file);
291 client.add_model_message_handler(Self::handle_update_buffer);
292 client.add_model_message_handler(Self::handle_update_diagnostic_summary);
293 client.add_model_message_handler(Self::handle_update_worktree);
294 client.add_model_request_handler(Self::handle_create_project_entry);
295 client.add_model_request_handler(Self::handle_rename_project_entry);
296 client.add_model_request_handler(Self::handle_copy_project_entry);
297 client.add_model_request_handler(Self::handle_delete_project_entry);
298 client.add_model_request_handler(Self::handle_apply_additional_edits_for_completion);
299 client.add_model_request_handler(Self::handle_apply_code_action);
300 client.add_model_request_handler(Self::handle_reload_buffers);
301 client.add_model_request_handler(Self::handle_format_buffers);
302 client.add_model_request_handler(Self::handle_get_code_actions);
303 client.add_model_request_handler(Self::handle_get_completions);
304 client.add_model_request_handler(Self::handle_lsp_command::<GetDefinition>);
305 client.add_model_request_handler(Self::handle_lsp_command::<GetDocumentHighlights>);
306 client.add_model_request_handler(Self::handle_lsp_command::<GetReferences>);
307 client.add_model_request_handler(Self::handle_lsp_command::<PrepareRename>);
308 client.add_model_request_handler(Self::handle_lsp_command::<PerformRename>);
309 client.add_model_request_handler(Self::handle_search_project);
310 client.add_model_request_handler(Self::handle_get_project_symbols);
311 client.add_model_request_handler(Self::handle_open_buffer_for_symbol);
312 client.add_model_request_handler(Self::handle_open_buffer_by_id);
313 client.add_model_request_handler(Self::handle_open_buffer_by_path);
314 client.add_model_request_handler(Self::handle_save_buffer);
315 }
316
317 pub fn local(
318 public: bool,
319 client: Arc<Client>,
320 user_store: ModelHandle<UserStore>,
321 project_store: ModelHandle<ProjectStore>,
322 languages: Arc<LanguageRegistry>,
323 fs: Arc<dyn Fs>,
324 cx: &mut MutableAppContext,
325 ) -> ModelHandle<Self> {
326 cx.add_model(|cx: &mut ModelContext<Self>| {
327 let (public_tx, public_rx) = watch::channel_with(public);
328 let (remote_id_tx, remote_id_rx) = watch::channel();
329 let _maintain_remote_id_task = cx.spawn_weak({
330 let status_rx = client.clone().status();
331 let public_rx = public_rx.clone();
332 move |this, mut cx| async move {
333 let mut stream = Stream::map(status_rx.clone(), drop)
334 .merge(Stream::map(public_rx.clone(), drop));
335 while stream.recv().await.is_some() {
336 let this = this.upgrade(&cx)?;
337 if status_rx.borrow().is_connected() && *public_rx.borrow() {
338 this.update(&mut cx, |this, cx| this.register(cx))
339 .await
340 .log_err()?;
341 } else {
342 this.update(&mut cx, |this, cx| this.unregister(cx))
343 .await
344 .log_err();
345 }
346 }
347 None
348 }
349 });
350
351 let handle = cx.weak_handle();
352 project_store.update(cx, |store, cx| store.add_project(handle, cx));
353
354 let (opened_buffer_tx, opened_buffer_rx) = watch::channel();
355 Self {
356 worktrees: Default::default(),
357 collaborators: Default::default(),
358 opened_buffers: Default::default(),
359 shared_buffers: Default::default(),
360 loading_buffers: Default::default(),
361 loading_local_worktrees: Default::default(),
362 buffer_snapshots: Default::default(),
363 client_state: ProjectClientState::Local {
364 is_shared: false,
365 remote_id_tx,
366 remote_id_rx,
367 public_tx,
368 public_rx,
369 _maintain_remote_id_task,
370 },
371 opened_buffer: (Rc::new(RefCell::new(opened_buffer_tx)), opened_buffer_rx),
372 subscriptions: Vec::new(),
373 active_entry: None,
374 languages,
375 client,
376 user_store,
377 project_store,
378 fs,
379 next_entry_id: Default::default(),
380 next_diagnostic_group_id: Default::default(),
381 language_servers: Default::default(),
382 started_language_servers: Default::default(),
383 language_server_statuses: Default::default(),
384 last_workspace_edits_by_language_server: Default::default(),
385 language_server_settings: Default::default(),
386 next_language_server_id: 0,
387 nonce: StdRng::from_entropy().gen(),
388 }
389 })
390 }
391
392 pub async fn remote(
393 remote_id: u64,
394 client: Arc<Client>,
395 user_store: ModelHandle<UserStore>,
396 project_store: ModelHandle<ProjectStore>,
397 languages: Arc<LanguageRegistry>,
398 fs: Arc<dyn Fs>,
399 mut cx: AsyncAppContext,
400 ) -> Result<ModelHandle<Self>, JoinProjectError> {
401 client.authenticate_and_connect(true, &cx).await?;
402
403 let response = client
404 .request(proto::JoinProject {
405 project_id: remote_id,
406 })
407 .await?;
408
409 let response = match response.variant.ok_or_else(|| anyhow!("missing variant"))? {
410 proto::join_project_response::Variant::Accept(response) => response,
411 proto::join_project_response::Variant::Decline(decline) => {
412 match proto::join_project_response::decline::Reason::from_i32(decline.reason) {
413 Some(proto::join_project_response::decline::Reason::Declined) => {
414 Err(JoinProjectError::HostDeclined)?
415 }
416 Some(proto::join_project_response::decline::Reason::Closed) => {
417 Err(JoinProjectError::HostClosedProject)?
418 }
419 Some(proto::join_project_response::decline::Reason::WentOffline) => {
420 Err(JoinProjectError::HostWentOffline)?
421 }
422 None => Err(anyhow!("missing decline reason"))?,
423 }
424 }
425 };
426
427 let replica_id = response.replica_id as ReplicaId;
428
429 let mut worktrees = Vec::new();
430 for worktree in response.worktrees {
431 let (worktree, load_task) = cx
432 .update(|cx| Worktree::remote(remote_id, replica_id, worktree, client.clone(), cx));
433 worktrees.push(worktree);
434 load_task.detach();
435 }
436
437 let (opened_buffer_tx, opened_buffer_rx) = watch::channel();
438 let this = cx.add_model(|cx: &mut ModelContext<Self>| {
439 let handle = cx.weak_handle();
440 project_store.update(cx, |store, cx| store.add_project(handle, cx));
441
442 let mut this = Self {
443 worktrees: Vec::new(),
444 loading_buffers: Default::default(),
445 opened_buffer: (Rc::new(RefCell::new(opened_buffer_tx)), opened_buffer_rx),
446 shared_buffers: Default::default(),
447 loading_local_worktrees: Default::default(),
448 active_entry: None,
449 collaborators: Default::default(),
450 languages,
451 user_store: user_store.clone(),
452 project_store,
453 fs,
454 next_entry_id: Default::default(),
455 next_diagnostic_group_id: Default::default(),
456 subscriptions: vec![client.add_model_for_remote_entity(remote_id, cx)],
457 client: client.clone(),
458 client_state: ProjectClientState::Remote {
459 sharing_has_stopped: false,
460 remote_id,
461 replica_id,
462 _detect_unshare_task: cx.spawn_weak(move |this, mut cx| {
463 async move {
464 let mut status = client.status();
465 let is_connected =
466 status.next().await.map_or(false, |s| s.is_connected());
467 // Even if we're initially connected, any future change of the status means we momentarily disconnected.
468 if !is_connected || status.next().await.is_some() {
469 if let Some(this) = this.upgrade(&cx) {
470 this.update(&mut cx, |this, cx| this.removed_from_project(cx))
471 }
472 }
473 Ok(())
474 }
475 .log_err()
476 }),
477 },
478 language_servers: Default::default(),
479 started_language_servers: Default::default(),
480 language_server_settings: Default::default(),
481 language_server_statuses: response
482 .language_servers
483 .into_iter()
484 .map(|server| {
485 (
486 server.id as usize,
487 LanguageServerStatus {
488 name: server.name,
489 pending_work: Default::default(),
490 pending_diagnostic_updates: 0,
491 },
492 )
493 })
494 .collect(),
495 last_workspace_edits_by_language_server: Default::default(),
496 next_language_server_id: 0,
497 opened_buffers: Default::default(),
498 buffer_snapshots: Default::default(),
499 nonce: StdRng::from_entropy().gen(),
500 };
501 for worktree in worktrees {
502 this.add_worktree(&worktree, cx);
503 }
504 this
505 });
506
507 let user_ids = response
508 .collaborators
509 .iter()
510 .map(|peer| peer.user_id)
511 .collect();
512 user_store
513 .update(&mut cx, |user_store, cx| user_store.get_users(user_ids, cx))
514 .await?;
515 let mut collaborators = HashMap::default();
516 for message in response.collaborators {
517 let collaborator = Collaborator::from_proto(message, &user_store, &mut cx).await?;
518 collaborators.insert(collaborator.peer_id, collaborator);
519 }
520
521 this.update(&mut cx, |this, _| {
522 this.collaborators = collaborators;
523 });
524
525 Ok(this)
526 }
527
528 #[cfg(any(test, feature = "test-support"))]
529 pub async fn test(
530 fs: Arc<dyn Fs>,
531 root_paths: impl IntoIterator<Item = &Path>,
532 cx: &mut gpui::TestAppContext,
533 ) -> ModelHandle<Project> {
534 let languages = Arc::new(LanguageRegistry::test());
535 let http_client = client::test::FakeHttpClient::with_404_response();
536 let client = client::Client::new(http_client.clone());
537 let user_store = cx.add_model(|cx| UserStore::new(client.clone(), http_client, cx));
538 let project_store = cx.add_model(|_| ProjectStore::new(Db::open_fake()));
539 let project = cx.update(|cx| {
540 Project::local(true, client, user_store, project_store, languages, fs, cx)
541 });
542 for path in root_paths {
543 let (tree, _) = project
544 .update(cx, |project, cx| {
545 project.find_or_create_local_worktree(path, true, cx)
546 })
547 .await
548 .unwrap();
549 tree.read_with(cx, |tree, _| tree.as_local().unwrap().scan_complete())
550 .await;
551 }
552 project
553 }
554
555 pub fn buffer_for_id(&self, remote_id: u64, cx: &AppContext) -> Option<ModelHandle<Buffer>> {
556 self.opened_buffers
557 .get(&remote_id)
558 .and_then(|buffer| buffer.upgrade(cx))
559 }
560
561 pub fn languages(&self) -> &Arc<LanguageRegistry> {
562 &self.languages
563 }
564
565 pub fn client(&self) -> Arc<Client> {
566 self.client.clone()
567 }
568
569 pub fn user_store(&self) -> ModelHandle<UserStore> {
570 self.user_store.clone()
571 }
572
573 pub fn project_store(&self) -> ModelHandle<ProjectStore> {
574 self.project_store.clone()
575 }
576
577 #[cfg(any(test, feature = "test-support"))]
578 pub fn check_invariants(&self, cx: &AppContext) {
579 if self.is_local() {
580 let mut worktree_root_paths = HashMap::default();
581 for worktree in self.worktrees(cx) {
582 let worktree = worktree.read(cx);
583 let abs_path = worktree.as_local().unwrap().abs_path().clone();
584 let prev_worktree_id = worktree_root_paths.insert(abs_path.clone(), worktree.id());
585 assert_eq!(
586 prev_worktree_id,
587 None,
588 "abs path {:?} for worktree {:?} is not unique ({:?} was already registered with the same path)",
589 abs_path,
590 worktree.id(),
591 prev_worktree_id
592 )
593 }
594 } else {
595 let replica_id = self.replica_id();
596 for buffer in self.opened_buffers.values() {
597 if let Some(buffer) = buffer.upgrade(cx) {
598 let buffer = buffer.read(cx);
599 assert_eq!(
600 buffer.deferred_ops_len(),
601 0,
602 "replica {}, buffer {} has deferred operations",
603 replica_id,
604 buffer.remote_id()
605 );
606 }
607 }
608 }
609 }
610
611 #[cfg(any(test, feature = "test-support"))]
612 pub fn has_open_buffer(&self, path: impl Into<ProjectPath>, cx: &AppContext) -> bool {
613 let path = path.into();
614 if let Some(worktree) = self.worktree_for_id(path.worktree_id, cx) {
615 self.opened_buffers.iter().any(|(_, buffer)| {
616 if let Some(buffer) = buffer.upgrade(cx) {
617 if let Some(file) = File::from_dyn(buffer.read(cx).file()) {
618 if file.worktree == worktree && file.path() == &path.path {
619 return true;
620 }
621 }
622 }
623 false
624 })
625 } else {
626 false
627 }
628 }
629
630 pub fn fs(&self) -> &Arc<dyn Fs> {
631 &self.fs
632 }
633
634 pub fn set_public(&mut self, is_public: bool, cx: &mut ModelContext<Self>) {
635 if let ProjectClientState::Local { public_tx, .. } = &mut self.client_state {
636 *public_tx.borrow_mut() = is_public;
637 self.metadata_changed(cx);
638 }
639 }
640
641 pub fn is_public(&self) -> bool {
642 match &self.client_state {
643 ProjectClientState::Local { public_rx, .. } => *public_rx.borrow(),
644 ProjectClientState::Remote { .. } => true,
645 }
646 }
647
648 fn unregister(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
649 self.unshared(cx);
650 if let ProjectClientState::Local { remote_id_rx, .. } = &mut self.client_state {
651 if let Some(remote_id) = *remote_id_rx.borrow() {
652 let request = self.client.request(proto::UnregisterProject {
653 project_id: remote_id,
654 });
655 return cx.spawn(|this, mut cx| async move {
656 let response = request.await;
657
658 // Unregistering the project causes the server to send out a
659 // contact update removing this project from the host's list
660 // of public projects. Wait until this contact update has been
661 // processed before clearing out this project's remote id, so
662 // that there is no moment where this project appears in the
663 // contact metadata and *also* has no remote id.
664 this.update(&mut cx, |this, cx| {
665 this.user_store()
666 .update(cx, |store, _| store.contact_updates_done())
667 })
668 .await;
669
670 this.update(&mut cx, |this, cx| {
671 if let ProjectClientState::Local { remote_id_tx, .. } =
672 &mut this.client_state
673 {
674 *remote_id_tx.borrow_mut() = None;
675 }
676 this.subscriptions.clear();
677 this.metadata_changed(cx);
678 });
679 response.map(drop)
680 });
681 }
682 }
683 Task::ready(Ok(()))
684 }
685
686 fn register(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
687 if let ProjectClientState::Local { remote_id_rx, .. } = &self.client_state {
688 if remote_id_rx.borrow().is_some() {
689 return Task::ready(Ok(()));
690 }
691 }
692
693 let response = self.client.request(proto::RegisterProject {});
694 cx.spawn(|this, mut cx| async move {
695 let remote_id = response.await?.project_id;
696 this.update(&mut cx, |this, cx| {
697 if let ProjectClientState::Local { remote_id_tx, .. } = &mut this.client_state {
698 *remote_id_tx.borrow_mut() = Some(remote_id);
699 }
700
701 this.metadata_changed(cx);
702 cx.emit(Event::RemoteIdChanged(Some(remote_id)));
703 this.subscriptions
704 .push(this.client.add_model_for_remote_entity(remote_id, cx));
705 Ok(())
706 })
707 })
708 }
709
710 pub fn remote_id(&self) -> Option<u64> {
711 match &self.client_state {
712 ProjectClientState::Local { remote_id_rx, .. } => *remote_id_rx.borrow(),
713 ProjectClientState::Remote { remote_id, .. } => Some(*remote_id),
714 }
715 }
716
717 pub fn next_remote_id(&self) -> impl Future<Output = u64> {
718 let mut id = None;
719 let mut watch = None;
720 match &self.client_state {
721 ProjectClientState::Local { remote_id_rx, .. } => watch = Some(remote_id_rx.clone()),
722 ProjectClientState::Remote { remote_id, .. } => id = Some(*remote_id),
723 }
724
725 async move {
726 if let Some(id) = id {
727 return id;
728 }
729 let mut watch = watch.unwrap();
730 loop {
731 let id = *watch.borrow();
732 if let Some(id) = id {
733 return id;
734 }
735 watch.next().await;
736 }
737 }
738 }
739
740 pub fn shared_remote_id(&self) -> Option<u64> {
741 match &self.client_state {
742 ProjectClientState::Local {
743 remote_id_rx,
744 is_shared,
745 ..
746 } => {
747 if *is_shared {
748 *remote_id_rx.borrow()
749 } else {
750 None
751 }
752 }
753 ProjectClientState::Remote { remote_id, .. } => Some(*remote_id),
754 }
755 }
756
757 pub fn replica_id(&self) -> ReplicaId {
758 match &self.client_state {
759 ProjectClientState::Local { .. } => 0,
760 ProjectClientState::Remote { replica_id, .. } => *replica_id,
761 }
762 }
763
764 fn metadata_changed(&mut self, cx: &mut ModelContext<Self>) {
765 if let ProjectClientState::Local {
766 remote_id_rx,
767 public_rx,
768 ..
769 } = &self.client_state
770 {
771 if let (Some(project_id), true) = (*remote_id_rx.borrow(), *public_rx.borrow()) {
772 self.client
773 .send(proto::UpdateProject {
774 project_id,
775 worktrees: self
776 .worktrees
777 .iter()
778 .filter_map(|worktree| {
779 worktree.upgrade(&cx).map(|worktree| {
780 worktree.read(cx).as_local().unwrap().metadata_proto()
781 })
782 })
783 .collect(),
784 })
785 .log_err();
786 }
787
788 self.project_store.update(cx, |_, cx| cx.notify());
789 cx.notify();
790 }
791 }
792
793 pub fn collaborators(&self) -> &HashMap<PeerId, Collaborator> {
794 &self.collaborators
795 }
796
797 pub fn worktrees<'a>(
798 &'a self,
799 cx: &'a AppContext,
800 ) -> impl 'a + Iterator<Item = ModelHandle<Worktree>> {
801 self.worktrees
802 .iter()
803 .filter_map(move |worktree| worktree.upgrade(cx))
804 }
805
806 pub fn visible_worktrees<'a>(
807 &'a self,
808 cx: &'a AppContext,
809 ) -> impl 'a + Iterator<Item = ModelHandle<Worktree>> {
810 self.worktrees.iter().filter_map(|worktree| {
811 worktree.upgrade(cx).and_then(|worktree| {
812 if worktree.read(cx).is_visible() {
813 Some(worktree)
814 } else {
815 None
816 }
817 })
818 })
819 }
820
821 pub fn worktree_root_names<'a>(&'a self, cx: &'a AppContext) -> impl Iterator<Item = &'a str> {
822 self.visible_worktrees(cx)
823 .map(|tree| tree.read(cx).root_name())
824 }
825
826 pub fn worktree_for_id(
827 &self,
828 id: WorktreeId,
829 cx: &AppContext,
830 ) -> Option<ModelHandle<Worktree>> {
831 self.worktrees(cx)
832 .find(|worktree| worktree.read(cx).id() == id)
833 }
834
835 pub fn worktree_for_entry(
836 &self,
837 entry_id: ProjectEntryId,
838 cx: &AppContext,
839 ) -> Option<ModelHandle<Worktree>> {
840 self.worktrees(cx)
841 .find(|worktree| worktree.read(cx).contains_entry(entry_id))
842 }
843
844 pub fn worktree_id_for_entry(
845 &self,
846 entry_id: ProjectEntryId,
847 cx: &AppContext,
848 ) -> Option<WorktreeId> {
849 self.worktree_for_entry(entry_id, cx)
850 .map(|worktree| worktree.read(cx).id())
851 }
852
853 pub fn contains_paths(&self, paths: &[PathBuf], cx: &AppContext) -> bool {
854 paths.iter().all(|path| self.contains_path(&path, cx))
855 }
856
857 pub fn contains_path(&self, path: &Path, cx: &AppContext) -> bool {
858 for worktree in self.worktrees(cx) {
859 let worktree = worktree.read(cx).as_local();
860 if worktree.map_or(false, |w| w.contains_abs_path(path)) {
861 return true;
862 }
863 }
864 false
865 }
866
867 pub fn create_entry(
868 &mut self,
869 project_path: impl Into<ProjectPath>,
870 is_directory: bool,
871 cx: &mut ModelContext<Self>,
872 ) -> Option<Task<Result<Entry>>> {
873 let project_path = project_path.into();
874 let worktree = self.worktree_for_id(project_path.worktree_id, cx)?;
875 if self.is_local() {
876 Some(worktree.update(cx, |worktree, cx| {
877 worktree
878 .as_local_mut()
879 .unwrap()
880 .create_entry(project_path.path, is_directory, cx)
881 }))
882 } else {
883 let client = self.client.clone();
884 let project_id = self.remote_id().unwrap();
885 Some(cx.spawn_weak(|_, mut cx| async move {
886 let response = client
887 .request(proto::CreateProjectEntry {
888 worktree_id: project_path.worktree_id.to_proto(),
889 project_id,
890 path: project_path.path.as_os_str().as_bytes().to_vec(),
891 is_directory,
892 })
893 .await?;
894 let entry = response
895 .entry
896 .ok_or_else(|| anyhow!("missing entry in response"))?;
897 worktree
898 .update(&mut cx, |worktree, cx| {
899 worktree.as_remote().unwrap().insert_entry(
900 entry,
901 response.worktree_scan_id as usize,
902 cx,
903 )
904 })
905 .await
906 }))
907 }
908 }
909
910 pub fn copy_entry(
911 &mut self,
912 entry_id: ProjectEntryId,
913 new_path: impl Into<Arc<Path>>,
914 cx: &mut ModelContext<Self>,
915 ) -> Option<Task<Result<Entry>>> {
916 let worktree = self.worktree_for_entry(entry_id, cx)?;
917 let new_path = new_path.into();
918 if self.is_local() {
919 worktree.update(cx, |worktree, cx| {
920 worktree
921 .as_local_mut()
922 .unwrap()
923 .copy_entry(entry_id, new_path, cx)
924 })
925 } else {
926 let client = self.client.clone();
927 let project_id = self.remote_id().unwrap();
928
929 Some(cx.spawn_weak(|_, mut cx| async move {
930 let response = client
931 .request(proto::CopyProjectEntry {
932 project_id,
933 entry_id: entry_id.to_proto(),
934 new_path: new_path.as_os_str().as_bytes().to_vec(),
935 })
936 .await?;
937 let entry = response
938 .entry
939 .ok_or_else(|| anyhow!("missing entry in response"))?;
940 worktree
941 .update(&mut cx, |worktree, cx| {
942 worktree.as_remote().unwrap().insert_entry(
943 entry,
944 response.worktree_scan_id as usize,
945 cx,
946 )
947 })
948 .await
949 }))
950 }
951 }
952
953 pub fn rename_entry(
954 &mut self,
955 entry_id: ProjectEntryId,
956 new_path: impl Into<Arc<Path>>,
957 cx: &mut ModelContext<Self>,
958 ) -> Option<Task<Result<Entry>>> {
959 let worktree = self.worktree_for_entry(entry_id, cx)?;
960 let new_path = new_path.into();
961 if self.is_local() {
962 worktree.update(cx, |worktree, cx| {
963 worktree
964 .as_local_mut()
965 .unwrap()
966 .rename_entry(entry_id, new_path, cx)
967 })
968 } else {
969 let client = self.client.clone();
970 let project_id = self.remote_id().unwrap();
971
972 Some(cx.spawn_weak(|_, mut cx| async move {
973 let response = client
974 .request(proto::RenameProjectEntry {
975 project_id,
976 entry_id: entry_id.to_proto(),
977 new_path: new_path.as_os_str().as_bytes().to_vec(),
978 })
979 .await?;
980 let entry = response
981 .entry
982 .ok_or_else(|| anyhow!("missing entry in response"))?;
983 worktree
984 .update(&mut cx, |worktree, cx| {
985 worktree.as_remote().unwrap().insert_entry(
986 entry,
987 response.worktree_scan_id as usize,
988 cx,
989 )
990 })
991 .await
992 }))
993 }
994 }
995
996 pub fn delete_entry(
997 &mut self,
998 entry_id: ProjectEntryId,
999 cx: &mut ModelContext<Self>,
1000 ) -> Option<Task<Result<()>>> {
1001 let worktree = self.worktree_for_entry(entry_id, cx)?;
1002 if self.is_local() {
1003 worktree.update(cx, |worktree, cx| {
1004 worktree.as_local_mut().unwrap().delete_entry(entry_id, cx)
1005 })
1006 } else {
1007 let client = self.client.clone();
1008 let project_id = self.remote_id().unwrap();
1009 Some(cx.spawn_weak(|_, mut cx| async move {
1010 let response = client
1011 .request(proto::DeleteProjectEntry {
1012 project_id,
1013 entry_id: entry_id.to_proto(),
1014 })
1015 .await?;
1016 worktree
1017 .update(&mut cx, move |worktree, cx| {
1018 worktree.as_remote().unwrap().delete_entry(
1019 entry_id,
1020 response.worktree_scan_id as usize,
1021 cx,
1022 )
1023 })
1024 .await
1025 }))
1026 }
1027 }
1028
1029 fn share(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
1030 let project_id;
1031 if let ProjectClientState::Local {
1032 remote_id_rx,
1033 is_shared,
1034 ..
1035 } = &mut self.client_state
1036 {
1037 if *is_shared {
1038 return Task::ready(Ok(()));
1039 }
1040 *is_shared = true;
1041 if let Some(id) = *remote_id_rx.borrow() {
1042 project_id = id;
1043 } else {
1044 return Task::ready(Err(anyhow!("project hasn't been registered")));
1045 }
1046 } else {
1047 return Task::ready(Err(anyhow!("can't share a remote project")));
1048 };
1049
1050 for open_buffer in self.opened_buffers.values_mut() {
1051 match open_buffer {
1052 OpenBuffer::Strong(_) => {}
1053 OpenBuffer::Weak(buffer) => {
1054 if let Some(buffer) = buffer.upgrade(cx) {
1055 *open_buffer = OpenBuffer::Strong(buffer);
1056 }
1057 }
1058 OpenBuffer::Loading(_) => unreachable!(),
1059 }
1060 }
1061
1062 for worktree_handle in self.worktrees.iter_mut() {
1063 match worktree_handle {
1064 WorktreeHandle::Strong(_) => {}
1065 WorktreeHandle::Weak(worktree) => {
1066 if let Some(worktree) = worktree.upgrade(cx) {
1067 *worktree_handle = WorktreeHandle::Strong(worktree);
1068 }
1069 }
1070 }
1071 }
1072
1073 let mut tasks = Vec::new();
1074 for worktree in self.worktrees(cx).collect::<Vec<_>>() {
1075 worktree.update(cx, |worktree, cx| {
1076 let worktree = worktree.as_local_mut().unwrap();
1077 tasks.push(worktree.share(project_id, cx));
1078 });
1079 }
1080
1081 cx.spawn(|this, mut cx| async move {
1082 for task in tasks {
1083 task.await?;
1084 }
1085 this.update(&mut cx, |_, cx| cx.notify());
1086 Ok(())
1087 })
1088 }
1089
1090 fn unshared(&mut self, cx: &mut ModelContext<Self>) {
1091 if let ProjectClientState::Local { is_shared, .. } = &mut self.client_state {
1092 if !*is_shared {
1093 return;
1094 }
1095
1096 *is_shared = false;
1097 self.collaborators.clear();
1098 self.shared_buffers.clear();
1099 for worktree_handle in self.worktrees.iter_mut() {
1100 if let WorktreeHandle::Strong(worktree) = worktree_handle {
1101 let is_visible = worktree.update(cx, |worktree, _| {
1102 worktree.as_local_mut().unwrap().unshare();
1103 worktree.is_visible()
1104 });
1105 if !is_visible {
1106 *worktree_handle = WorktreeHandle::Weak(worktree.downgrade());
1107 }
1108 }
1109 }
1110
1111 for open_buffer in self.opened_buffers.values_mut() {
1112 match open_buffer {
1113 OpenBuffer::Strong(buffer) => {
1114 *open_buffer = OpenBuffer::Weak(buffer.downgrade());
1115 }
1116 _ => {}
1117 }
1118 }
1119
1120 cx.notify();
1121 } else {
1122 log::error!("attempted to unshare a remote project");
1123 }
1124 }
1125
1126 pub fn respond_to_join_request(
1127 &mut self,
1128 requester_id: u64,
1129 allow: bool,
1130 cx: &mut ModelContext<Self>,
1131 ) {
1132 if let Some(project_id) = self.remote_id() {
1133 let share = self.share(cx);
1134 let client = self.client.clone();
1135 cx.foreground()
1136 .spawn(async move {
1137 share.await?;
1138 client.send(proto::RespondToJoinProjectRequest {
1139 requester_id,
1140 project_id,
1141 allow,
1142 })
1143 })
1144 .detach_and_log_err(cx);
1145 }
1146 }
1147
1148 fn removed_from_project(&mut self, cx: &mut ModelContext<Self>) {
1149 if let ProjectClientState::Remote {
1150 sharing_has_stopped,
1151 ..
1152 } = &mut self.client_state
1153 {
1154 *sharing_has_stopped = true;
1155 self.collaborators.clear();
1156 cx.notify();
1157 }
1158 }
1159
1160 pub fn is_read_only(&self) -> bool {
1161 match &self.client_state {
1162 ProjectClientState::Local { .. } => false,
1163 ProjectClientState::Remote {
1164 sharing_has_stopped,
1165 ..
1166 } => *sharing_has_stopped,
1167 }
1168 }
1169
1170 pub fn is_local(&self) -> bool {
1171 match &self.client_state {
1172 ProjectClientState::Local { .. } => true,
1173 ProjectClientState::Remote { .. } => false,
1174 }
1175 }
1176
1177 pub fn is_remote(&self) -> bool {
1178 !self.is_local()
1179 }
1180
1181 pub fn create_buffer(
1182 &mut self,
1183 text: &str,
1184 language: Option<Arc<Language>>,
1185 cx: &mut ModelContext<Self>,
1186 ) -> Result<ModelHandle<Buffer>> {
1187 if self.is_remote() {
1188 return Err(anyhow!("creating buffers as a guest is not supported yet"));
1189 }
1190
1191 let buffer = cx.add_model(|cx| {
1192 Buffer::new(self.replica_id(), text, cx)
1193 .with_language(language.unwrap_or(language::PLAIN_TEXT.clone()), cx)
1194 });
1195 self.register_buffer(&buffer, cx)?;
1196 Ok(buffer)
1197 }
1198
1199 pub fn open_path(
1200 &mut self,
1201 path: impl Into<ProjectPath>,
1202 cx: &mut ModelContext<Self>,
1203 ) -> Task<Result<(ProjectEntryId, AnyModelHandle)>> {
1204 let task = self.open_buffer(path, cx);
1205 cx.spawn_weak(|_, cx| async move {
1206 let buffer = task.await?;
1207 let project_entry_id = buffer
1208 .read_with(&cx, |buffer, cx| {
1209 File::from_dyn(buffer.file()).and_then(|file| file.project_entry_id(cx))
1210 })
1211 .ok_or_else(|| anyhow!("no project entry"))?;
1212 Ok((project_entry_id, buffer.into()))
1213 })
1214 }
1215
1216 pub fn open_local_buffer(
1217 &mut self,
1218 abs_path: impl AsRef<Path>,
1219 cx: &mut ModelContext<Self>,
1220 ) -> Task<Result<ModelHandle<Buffer>>> {
1221 if let Some((worktree, relative_path)) = self.find_local_worktree(abs_path.as_ref(), cx) {
1222 self.open_buffer((worktree.read(cx).id(), relative_path), cx)
1223 } else {
1224 Task::ready(Err(anyhow!("no such path")))
1225 }
1226 }
1227
1228 pub fn open_buffer(
1229 &mut self,
1230 path: impl Into<ProjectPath>,
1231 cx: &mut ModelContext<Self>,
1232 ) -> Task<Result<ModelHandle<Buffer>>> {
1233 let project_path = path.into();
1234 let worktree = if let Some(worktree) = self.worktree_for_id(project_path.worktree_id, cx) {
1235 worktree
1236 } else {
1237 return Task::ready(Err(anyhow!("no such worktree")));
1238 };
1239
1240 // If there is already a buffer for the given path, then return it.
1241 let existing_buffer = self.get_open_buffer(&project_path, cx);
1242 if let Some(existing_buffer) = existing_buffer {
1243 return Task::ready(Ok(existing_buffer));
1244 }
1245
1246 let mut loading_watch = match self.loading_buffers.entry(project_path.clone()) {
1247 // If the given path is already being loaded, then wait for that existing
1248 // task to complete and return the same buffer.
1249 hash_map::Entry::Occupied(e) => e.get().clone(),
1250
1251 // Otherwise, record the fact that this path is now being loaded.
1252 hash_map::Entry::Vacant(entry) => {
1253 let (mut tx, rx) = postage::watch::channel();
1254 entry.insert(rx.clone());
1255
1256 let load_buffer = if worktree.read(cx).is_local() {
1257 self.open_local_buffer_internal(&project_path.path, &worktree, cx)
1258 } else {
1259 self.open_remote_buffer_internal(&project_path.path, &worktree, cx)
1260 };
1261
1262 cx.spawn(move |this, mut cx| async move {
1263 let load_result = load_buffer.await;
1264 *tx.borrow_mut() = Some(this.update(&mut cx, |this, _| {
1265 // Record the fact that the buffer is no longer loading.
1266 this.loading_buffers.remove(&project_path);
1267 let buffer = load_result.map_err(Arc::new)?;
1268 Ok(buffer)
1269 }));
1270 })
1271 .detach();
1272 rx
1273 }
1274 };
1275
1276 cx.foreground().spawn(async move {
1277 loop {
1278 if let Some(result) = loading_watch.borrow().as_ref() {
1279 match result {
1280 Ok(buffer) => return Ok(buffer.clone()),
1281 Err(error) => return Err(anyhow!("{}", error)),
1282 }
1283 }
1284 loading_watch.next().await;
1285 }
1286 })
1287 }
1288
1289 fn open_local_buffer_internal(
1290 &mut self,
1291 path: &Arc<Path>,
1292 worktree: &ModelHandle<Worktree>,
1293 cx: &mut ModelContext<Self>,
1294 ) -> Task<Result<ModelHandle<Buffer>>> {
1295 let load_buffer = worktree.update(cx, |worktree, cx| {
1296 let worktree = worktree.as_local_mut().unwrap();
1297 worktree.load_buffer(path, cx)
1298 });
1299 cx.spawn(|this, mut cx| async move {
1300 let buffer = load_buffer.await?;
1301 this.update(&mut cx, |this, cx| this.register_buffer(&buffer, cx))?;
1302 Ok(buffer)
1303 })
1304 }
1305
1306 fn open_remote_buffer_internal(
1307 &mut self,
1308 path: &Arc<Path>,
1309 worktree: &ModelHandle<Worktree>,
1310 cx: &mut ModelContext<Self>,
1311 ) -> Task<Result<ModelHandle<Buffer>>> {
1312 let rpc = self.client.clone();
1313 let project_id = self.remote_id().unwrap();
1314 let remote_worktree_id = worktree.read(cx).id();
1315 let path = path.clone();
1316 let path_string = path.to_string_lossy().to_string();
1317 cx.spawn(|this, mut cx| async move {
1318 let response = rpc
1319 .request(proto::OpenBufferByPath {
1320 project_id,
1321 worktree_id: remote_worktree_id.to_proto(),
1322 path: path_string,
1323 })
1324 .await?;
1325 let buffer = response.buffer.ok_or_else(|| anyhow!("missing buffer"))?;
1326 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
1327 .await
1328 })
1329 }
1330
1331 fn open_local_buffer_via_lsp(
1332 &mut self,
1333 abs_path: lsp::Url,
1334 lsp_adapter: Arc<dyn LspAdapter>,
1335 lsp_server: Arc<LanguageServer>,
1336 cx: &mut ModelContext<Self>,
1337 ) -> Task<Result<ModelHandle<Buffer>>> {
1338 cx.spawn(|this, mut cx| async move {
1339 let abs_path = abs_path
1340 .to_file_path()
1341 .map_err(|_| anyhow!("can't convert URI to path"))?;
1342 let (worktree, relative_path) = if let Some(result) =
1343 this.read_with(&cx, |this, cx| this.find_local_worktree(&abs_path, cx))
1344 {
1345 result
1346 } else {
1347 let worktree = this
1348 .update(&mut cx, |this, cx| {
1349 this.create_local_worktree(&abs_path, false, cx)
1350 })
1351 .await?;
1352 this.update(&mut cx, |this, cx| {
1353 this.language_servers.insert(
1354 (worktree.read(cx).id(), lsp_adapter.name()),
1355 (lsp_adapter, lsp_server),
1356 );
1357 });
1358 (worktree, PathBuf::new())
1359 };
1360
1361 let project_path = ProjectPath {
1362 worktree_id: worktree.read_with(&cx, |worktree, _| worktree.id()),
1363 path: relative_path.into(),
1364 };
1365 this.update(&mut cx, |this, cx| this.open_buffer(project_path, cx))
1366 .await
1367 })
1368 }
1369
1370 pub fn open_buffer_by_id(
1371 &mut self,
1372 id: u64,
1373 cx: &mut ModelContext<Self>,
1374 ) -> Task<Result<ModelHandle<Buffer>>> {
1375 if let Some(buffer) = self.buffer_for_id(id, cx) {
1376 Task::ready(Ok(buffer))
1377 } else if self.is_local() {
1378 Task::ready(Err(anyhow!("buffer {} does not exist", id)))
1379 } else if let Some(project_id) = self.remote_id() {
1380 let request = self
1381 .client
1382 .request(proto::OpenBufferById { project_id, id });
1383 cx.spawn(|this, mut cx| async move {
1384 let buffer = request
1385 .await?
1386 .buffer
1387 .ok_or_else(|| anyhow!("invalid buffer"))?;
1388 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
1389 .await
1390 })
1391 } else {
1392 Task::ready(Err(anyhow!("cannot open buffer while disconnected")))
1393 }
1394 }
1395
1396 pub fn save_buffer_as(
1397 &mut self,
1398 buffer: ModelHandle<Buffer>,
1399 abs_path: PathBuf,
1400 cx: &mut ModelContext<Project>,
1401 ) -> Task<Result<()>> {
1402 let worktree_task = self.find_or_create_local_worktree(&abs_path, true, cx);
1403 let old_path =
1404 File::from_dyn(buffer.read(cx).file()).and_then(|f| Some(f.as_local()?.abs_path(cx)));
1405 cx.spawn(|this, mut cx| async move {
1406 if let Some(old_path) = old_path {
1407 this.update(&mut cx, |this, cx| {
1408 this.unregister_buffer_from_language_server(&buffer, old_path, cx);
1409 });
1410 }
1411 let (worktree, path) = worktree_task.await?;
1412 worktree
1413 .update(&mut cx, |worktree, cx| {
1414 worktree
1415 .as_local_mut()
1416 .unwrap()
1417 .save_buffer_as(buffer.clone(), path, cx)
1418 })
1419 .await?;
1420 this.update(&mut cx, |this, cx| {
1421 this.assign_language_to_buffer(&buffer, cx);
1422 this.register_buffer_with_language_server(&buffer, cx);
1423 });
1424 Ok(())
1425 })
1426 }
1427
1428 pub fn get_open_buffer(
1429 &mut self,
1430 path: &ProjectPath,
1431 cx: &mut ModelContext<Self>,
1432 ) -> Option<ModelHandle<Buffer>> {
1433 let worktree = self.worktree_for_id(path.worktree_id, cx)?;
1434 self.opened_buffers.values().find_map(|buffer| {
1435 let buffer = buffer.upgrade(cx)?;
1436 let file = File::from_dyn(buffer.read(cx).file())?;
1437 if file.worktree == worktree && file.path() == &path.path {
1438 Some(buffer)
1439 } else {
1440 None
1441 }
1442 })
1443 }
1444
1445 fn register_buffer(
1446 &mut self,
1447 buffer: &ModelHandle<Buffer>,
1448 cx: &mut ModelContext<Self>,
1449 ) -> Result<()> {
1450 let remote_id = buffer.read(cx).remote_id();
1451 let open_buffer = if self.is_remote() || self.is_shared() {
1452 OpenBuffer::Strong(buffer.clone())
1453 } else {
1454 OpenBuffer::Weak(buffer.downgrade())
1455 };
1456
1457 match self.opened_buffers.insert(remote_id, open_buffer) {
1458 None => {}
1459 Some(OpenBuffer::Loading(operations)) => {
1460 buffer.update(cx, |buffer, cx| buffer.apply_ops(operations, cx))?
1461 }
1462 Some(OpenBuffer::Weak(existing_handle)) => {
1463 if existing_handle.upgrade(cx).is_some() {
1464 Err(anyhow!(
1465 "already registered buffer with remote id {}",
1466 remote_id
1467 ))?
1468 }
1469 }
1470 Some(OpenBuffer::Strong(_)) => Err(anyhow!(
1471 "already registered buffer with remote id {}",
1472 remote_id
1473 ))?,
1474 }
1475 cx.subscribe(buffer, |this, buffer, event, cx| {
1476 this.on_buffer_event(buffer, event, cx);
1477 })
1478 .detach();
1479
1480 self.assign_language_to_buffer(buffer, cx);
1481 self.register_buffer_with_language_server(buffer, cx);
1482 cx.observe_release(buffer, |this, buffer, cx| {
1483 if let Some(file) = File::from_dyn(buffer.file()) {
1484 if file.is_local() {
1485 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
1486 if let Some((_, server)) = this.language_server_for_buffer(buffer, cx) {
1487 server
1488 .notify::<lsp::notification::DidCloseTextDocument>(
1489 lsp::DidCloseTextDocumentParams {
1490 text_document: lsp::TextDocumentIdentifier::new(uri.clone()),
1491 },
1492 )
1493 .log_err();
1494 }
1495 }
1496 }
1497 })
1498 .detach();
1499
1500 Ok(())
1501 }
1502
1503 fn register_buffer_with_language_server(
1504 &mut self,
1505 buffer_handle: &ModelHandle<Buffer>,
1506 cx: &mut ModelContext<Self>,
1507 ) {
1508 let buffer = buffer_handle.read(cx);
1509 let buffer_id = buffer.remote_id();
1510 if let Some(file) = File::from_dyn(buffer.file()) {
1511 if file.is_local() {
1512 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
1513 let initial_snapshot = buffer.text_snapshot();
1514
1515 let mut language_server = None;
1516 let mut language_id = None;
1517 if let Some(language) = buffer.language() {
1518 let worktree_id = file.worktree_id(cx);
1519 if let Some(adapter) = language.lsp_adapter() {
1520 language_id = adapter.id_for_language(language.name().as_ref());
1521 language_server = self
1522 .language_servers
1523 .get(&(worktree_id, adapter.name()))
1524 .cloned();
1525 }
1526 }
1527
1528 if let Some(local_worktree) = file.worktree.read(cx).as_local() {
1529 if let Some(diagnostics) = local_worktree.diagnostics_for_path(file.path()) {
1530 self.update_buffer_diagnostics(&buffer_handle, diagnostics, None, cx)
1531 .log_err();
1532 }
1533 }
1534
1535 if let Some((_, server)) = language_server {
1536 server
1537 .notify::<lsp::notification::DidOpenTextDocument>(
1538 lsp::DidOpenTextDocumentParams {
1539 text_document: lsp::TextDocumentItem::new(
1540 uri,
1541 language_id.unwrap_or_default(),
1542 0,
1543 initial_snapshot.text(),
1544 ),
1545 }
1546 .clone(),
1547 )
1548 .log_err();
1549 buffer_handle.update(cx, |buffer, cx| {
1550 buffer.set_completion_triggers(
1551 server
1552 .capabilities()
1553 .completion_provider
1554 .as_ref()
1555 .and_then(|provider| provider.trigger_characters.clone())
1556 .unwrap_or(Vec::new()),
1557 cx,
1558 )
1559 });
1560 self.buffer_snapshots
1561 .insert(buffer_id, vec![(0, initial_snapshot)]);
1562 }
1563 }
1564 }
1565 }
1566
1567 fn unregister_buffer_from_language_server(
1568 &mut self,
1569 buffer: &ModelHandle<Buffer>,
1570 old_path: PathBuf,
1571 cx: &mut ModelContext<Self>,
1572 ) {
1573 buffer.update(cx, |buffer, cx| {
1574 buffer.update_diagnostics(Default::default(), cx);
1575 self.buffer_snapshots.remove(&buffer.remote_id());
1576 if let Some((_, language_server)) = self.language_server_for_buffer(buffer, cx) {
1577 language_server
1578 .notify::<lsp::notification::DidCloseTextDocument>(
1579 lsp::DidCloseTextDocumentParams {
1580 text_document: lsp::TextDocumentIdentifier::new(
1581 lsp::Url::from_file_path(old_path).unwrap(),
1582 ),
1583 },
1584 )
1585 .log_err();
1586 }
1587 });
1588 }
1589
1590 fn on_buffer_event(
1591 &mut self,
1592 buffer: ModelHandle<Buffer>,
1593 event: &BufferEvent,
1594 cx: &mut ModelContext<Self>,
1595 ) -> Option<()> {
1596 match event {
1597 BufferEvent::Operation(operation) => {
1598 if let Some(project_id) = self.shared_remote_id() {
1599 let request = self.client.request(proto::UpdateBuffer {
1600 project_id,
1601 buffer_id: buffer.read(cx).remote_id(),
1602 operations: vec![language::proto::serialize_operation(&operation)],
1603 });
1604 cx.background().spawn(request).detach_and_log_err(cx);
1605 }
1606 }
1607 BufferEvent::Edited { .. } => {
1608 let (_, language_server) = self
1609 .language_server_for_buffer(buffer.read(cx), cx)?
1610 .clone();
1611 let buffer = buffer.read(cx);
1612 let file = File::from_dyn(buffer.file())?;
1613 let abs_path = file.as_local()?.abs_path(cx);
1614 let uri = lsp::Url::from_file_path(abs_path).unwrap();
1615 let buffer_snapshots = self.buffer_snapshots.get_mut(&buffer.remote_id())?;
1616 let (version, prev_snapshot) = buffer_snapshots.last()?;
1617 let next_snapshot = buffer.text_snapshot();
1618 let next_version = version + 1;
1619
1620 let content_changes = buffer
1621 .edits_since::<(PointUtf16, usize)>(prev_snapshot.version())
1622 .map(|edit| {
1623 let edit_start = edit.new.start.0;
1624 let edit_end = edit_start + (edit.old.end.0 - edit.old.start.0);
1625 let new_text = next_snapshot
1626 .text_for_range(edit.new.start.1..edit.new.end.1)
1627 .collect();
1628 lsp::TextDocumentContentChangeEvent {
1629 range: Some(lsp::Range::new(
1630 point_to_lsp(edit_start),
1631 point_to_lsp(edit_end),
1632 )),
1633 range_length: None,
1634 text: new_text,
1635 }
1636 })
1637 .collect();
1638
1639 buffer_snapshots.push((next_version, next_snapshot));
1640
1641 language_server
1642 .notify::<lsp::notification::DidChangeTextDocument>(
1643 lsp::DidChangeTextDocumentParams {
1644 text_document: lsp::VersionedTextDocumentIdentifier::new(
1645 uri,
1646 next_version,
1647 ),
1648 content_changes,
1649 },
1650 )
1651 .log_err();
1652 }
1653 BufferEvent::Saved => {
1654 let file = File::from_dyn(buffer.read(cx).file())?;
1655 let worktree_id = file.worktree_id(cx);
1656 let abs_path = file.as_local()?.abs_path(cx);
1657 let text_document = lsp::TextDocumentIdentifier {
1658 uri: lsp::Url::from_file_path(abs_path).unwrap(),
1659 };
1660
1661 for (_, server) in self.language_servers_for_worktree(worktree_id) {
1662 server
1663 .notify::<lsp::notification::DidSaveTextDocument>(
1664 lsp::DidSaveTextDocumentParams {
1665 text_document: text_document.clone(),
1666 text: None,
1667 },
1668 )
1669 .log_err();
1670 }
1671 }
1672 _ => {}
1673 }
1674
1675 None
1676 }
1677
1678 fn language_servers_for_worktree(
1679 &self,
1680 worktree_id: WorktreeId,
1681 ) -> impl Iterator<Item = &(Arc<dyn LspAdapter>, Arc<LanguageServer>)> {
1682 self.language_servers.iter().filter_map(
1683 move |((language_server_worktree_id, _), server)| {
1684 if *language_server_worktree_id == worktree_id {
1685 Some(server)
1686 } else {
1687 None
1688 }
1689 },
1690 )
1691 }
1692
1693 fn assign_language_to_buffer(
1694 &mut self,
1695 buffer: &ModelHandle<Buffer>,
1696 cx: &mut ModelContext<Self>,
1697 ) -> Option<()> {
1698 // If the buffer has a language, set it and start the language server if we haven't already.
1699 let full_path = buffer.read(cx).file()?.full_path(cx);
1700 let language = self.languages.select_language(&full_path)?;
1701 buffer.update(cx, |buffer, cx| {
1702 buffer.set_language(Some(language.clone()), cx);
1703 });
1704
1705 let file = File::from_dyn(buffer.read(cx).file())?;
1706 let worktree = file.worktree.read(cx).as_local()?;
1707 let worktree_id = worktree.id();
1708 let worktree_abs_path = worktree.abs_path().clone();
1709 self.start_language_server(worktree_id, worktree_abs_path, language, cx);
1710
1711 None
1712 }
1713
1714 fn start_language_server(
1715 &mut self,
1716 worktree_id: WorktreeId,
1717 worktree_path: Arc<Path>,
1718 language: Arc<Language>,
1719 cx: &mut ModelContext<Self>,
1720 ) {
1721 let adapter = if let Some(adapter) = language.lsp_adapter() {
1722 adapter
1723 } else {
1724 return;
1725 };
1726 let key = (worktree_id, adapter.name());
1727 self.started_language_servers
1728 .entry(key.clone())
1729 .or_insert_with(|| {
1730 let server_id = post_inc(&mut self.next_language_server_id);
1731 let language_server = self.languages.start_language_server(
1732 server_id,
1733 language.clone(),
1734 worktree_path,
1735 self.client.http_client(),
1736 cx,
1737 );
1738 cx.spawn_weak(|this, mut cx| async move {
1739 let language_server = language_server?.await.log_err()?;
1740 let language_server = language_server
1741 .initialize(adapter.initialization_options())
1742 .await
1743 .log_err()?;
1744 let this = this.upgrade(&cx)?;
1745 let disk_based_diagnostics_progress_token =
1746 adapter.disk_based_diagnostics_progress_token();
1747
1748 language_server
1749 .on_notification::<lsp::notification::PublishDiagnostics, _>({
1750 let this = this.downgrade();
1751 let adapter = adapter.clone();
1752 move |params, mut cx| {
1753 if let Some(this) = this.upgrade(&cx) {
1754 this.update(&mut cx, |this, cx| {
1755 this.on_lsp_diagnostics_published(
1756 server_id,
1757 params,
1758 &adapter,
1759 disk_based_diagnostics_progress_token,
1760 cx,
1761 );
1762 });
1763 }
1764 }
1765 })
1766 .detach();
1767
1768 language_server
1769 .on_request::<lsp::request::WorkspaceConfiguration, _, _>({
1770 let settings = this
1771 .read_with(&cx, |this, _| this.language_server_settings.clone());
1772 move |params, _| {
1773 let settings = settings.lock().clone();
1774 async move {
1775 Ok(params
1776 .items
1777 .into_iter()
1778 .map(|item| {
1779 if let Some(section) = &item.section {
1780 settings
1781 .get(section)
1782 .cloned()
1783 .unwrap_or(serde_json::Value::Null)
1784 } else {
1785 settings.clone()
1786 }
1787 })
1788 .collect())
1789 }
1790 }
1791 })
1792 .detach();
1793
1794 language_server
1795 .on_request::<lsp::request::ApplyWorkspaceEdit, _, _>({
1796 let this = this.downgrade();
1797 let adapter = adapter.clone();
1798 let language_server = language_server.clone();
1799 move |params, cx| {
1800 Self::on_lsp_workspace_edit(
1801 this,
1802 params,
1803 server_id,
1804 adapter.clone(),
1805 language_server.clone(),
1806 cx,
1807 )
1808 }
1809 })
1810 .detach();
1811
1812 language_server
1813 .on_notification::<lsp::notification::Progress, _>({
1814 let this = this.downgrade();
1815 move |params, mut cx| {
1816 if let Some(this) = this.upgrade(&cx) {
1817 this.update(&mut cx, |this, cx| {
1818 this.on_lsp_progress(
1819 params,
1820 server_id,
1821 disk_based_diagnostics_progress_token,
1822 cx,
1823 );
1824 });
1825 }
1826 }
1827 })
1828 .detach();
1829
1830 this.update(&mut cx, |this, cx| {
1831 this.language_servers
1832 .insert(key.clone(), (adapter.clone(), language_server.clone()));
1833 this.language_server_statuses.insert(
1834 server_id,
1835 LanguageServerStatus {
1836 name: language_server.name().to_string(),
1837 pending_work: Default::default(),
1838 pending_diagnostic_updates: 0,
1839 },
1840 );
1841 language_server
1842 .notify::<lsp::notification::DidChangeConfiguration>(
1843 lsp::DidChangeConfigurationParams {
1844 settings: this.language_server_settings.lock().clone(),
1845 },
1846 )
1847 .ok();
1848
1849 if let Some(project_id) = this.shared_remote_id() {
1850 this.client
1851 .send(proto::StartLanguageServer {
1852 project_id,
1853 server: Some(proto::LanguageServer {
1854 id: server_id as u64,
1855 name: language_server.name().to_string(),
1856 }),
1857 })
1858 .log_err();
1859 }
1860
1861 // Tell the language server about every open buffer in the worktree that matches the language.
1862 for buffer in this.opened_buffers.values() {
1863 if let Some(buffer_handle) = buffer.upgrade(cx) {
1864 let buffer = buffer_handle.read(cx);
1865 let file = if let Some(file) = File::from_dyn(buffer.file()) {
1866 file
1867 } else {
1868 continue;
1869 };
1870 let language = if let Some(language) = buffer.language() {
1871 language
1872 } else {
1873 continue;
1874 };
1875 if file.worktree.read(cx).id() != key.0
1876 || language.lsp_adapter().map(|a| a.name())
1877 != Some(key.1.clone())
1878 {
1879 continue;
1880 }
1881
1882 let file = file.as_local()?;
1883 let versions = this
1884 .buffer_snapshots
1885 .entry(buffer.remote_id())
1886 .or_insert_with(|| vec![(0, buffer.text_snapshot())]);
1887 let (version, initial_snapshot) = versions.last().unwrap();
1888 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
1889 let language_id = adapter.id_for_language(language.name().as_ref());
1890 language_server
1891 .notify::<lsp::notification::DidOpenTextDocument>(
1892 lsp::DidOpenTextDocumentParams {
1893 text_document: lsp::TextDocumentItem::new(
1894 uri,
1895 language_id.unwrap_or_default(),
1896 *version,
1897 initial_snapshot.text(),
1898 ),
1899 },
1900 )
1901 .log_err()?;
1902 buffer_handle.update(cx, |buffer, cx| {
1903 buffer.set_completion_triggers(
1904 language_server
1905 .capabilities()
1906 .completion_provider
1907 .as_ref()
1908 .and_then(|provider| {
1909 provider.trigger_characters.clone()
1910 })
1911 .unwrap_or(Vec::new()),
1912 cx,
1913 )
1914 });
1915 }
1916 }
1917
1918 cx.notify();
1919 Some(())
1920 });
1921
1922 Some(language_server)
1923 })
1924 });
1925 }
1926
1927 pub fn restart_language_servers_for_buffers(
1928 &mut self,
1929 buffers: impl IntoIterator<Item = ModelHandle<Buffer>>,
1930 cx: &mut ModelContext<Self>,
1931 ) -> Option<()> {
1932 let language_server_lookup_info: HashSet<(WorktreeId, Arc<Path>, PathBuf)> = buffers
1933 .into_iter()
1934 .filter_map(|buffer| {
1935 let file = File::from_dyn(buffer.read(cx).file())?;
1936 let worktree = file.worktree.read(cx).as_local()?;
1937 let worktree_id = worktree.id();
1938 let worktree_abs_path = worktree.abs_path().clone();
1939 let full_path = file.full_path(cx);
1940 Some((worktree_id, worktree_abs_path, full_path))
1941 })
1942 .collect();
1943 for (worktree_id, worktree_abs_path, full_path) in language_server_lookup_info {
1944 let language = self.languages.select_language(&full_path)?;
1945 self.restart_language_server(worktree_id, worktree_abs_path, language, cx);
1946 }
1947
1948 None
1949 }
1950
1951 fn restart_language_server(
1952 &mut self,
1953 worktree_id: WorktreeId,
1954 worktree_path: Arc<Path>,
1955 language: Arc<Language>,
1956 cx: &mut ModelContext<Self>,
1957 ) {
1958 let adapter = if let Some(adapter) = language.lsp_adapter() {
1959 adapter
1960 } else {
1961 return;
1962 };
1963 let key = (worktree_id, adapter.name());
1964 let server_to_shutdown = self.language_servers.remove(&key);
1965 self.started_language_servers.remove(&key);
1966 server_to_shutdown
1967 .as_ref()
1968 .map(|(_, server)| self.language_server_statuses.remove(&server.server_id()));
1969 cx.spawn_weak(|this, mut cx| async move {
1970 if let Some(this) = this.upgrade(&cx) {
1971 if let Some((_, server_to_shutdown)) = server_to_shutdown {
1972 if let Some(shutdown_task) = server_to_shutdown.shutdown() {
1973 shutdown_task.await;
1974 }
1975 }
1976
1977 this.update(&mut cx, |this, cx| {
1978 this.start_language_server(worktree_id, worktree_path, language, cx);
1979 });
1980 }
1981 })
1982 .detach();
1983 }
1984
1985 fn on_lsp_diagnostics_published(
1986 &mut self,
1987 server_id: usize,
1988 mut params: lsp::PublishDiagnosticsParams,
1989 adapter: &Arc<dyn LspAdapter>,
1990 disk_based_diagnostics_progress_token: Option<&str>,
1991 cx: &mut ModelContext<Self>,
1992 ) {
1993 adapter.process_diagnostics(&mut params);
1994 if disk_based_diagnostics_progress_token.is_none() {
1995 self.disk_based_diagnostics_started(cx);
1996 self.broadcast_language_server_update(
1997 server_id,
1998 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(
1999 proto::LspDiskBasedDiagnosticsUpdating {},
2000 ),
2001 );
2002 }
2003 self.update_diagnostics(params, adapter.disk_based_diagnostic_sources(), cx)
2004 .log_err();
2005 if disk_based_diagnostics_progress_token.is_none() {
2006 self.disk_based_diagnostics_finished(cx);
2007 self.broadcast_language_server_update(
2008 server_id,
2009 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(
2010 proto::LspDiskBasedDiagnosticsUpdated {},
2011 ),
2012 );
2013 }
2014 }
2015
2016 fn on_lsp_progress(
2017 &mut self,
2018 progress: lsp::ProgressParams,
2019 server_id: usize,
2020 disk_based_diagnostics_progress_token: Option<&str>,
2021 cx: &mut ModelContext<Self>,
2022 ) {
2023 let token = match progress.token {
2024 lsp::NumberOrString::String(token) => token,
2025 lsp::NumberOrString::Number(token) => {
2026 log::info!("skipping numeric progress token {}", token);
2027 return;
2028 }
2029 };
2030 let progress = match progress.value {
2031 lsp::ProgressParamsValue::WorkDone(value) => value,
2032 };
2033 let language_server_status =
2034 if let Some(status) = self.language_server_statuses.get_mut(&server_id) {
2035 status
2036 } else {
2037 return;
2038 };
2039 match progress {
2040 lsp::WorkDoneProgress::Begin(_) => {
2041 if Some(token.as_str()) == disk_based_diagnostics_progress_token {
2042 language_server_status.pending_diagnostic_updates += 1;
2043 if language_server_status.pending_diagnostic_updates == 1 {
2044 self.disk_based_diagnostics_started(cx);
2045 self.broadcast_language_server_update(
2046 server_id,
2047 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(
2048 proto::LspDiskBasedDiagnosticsUpdating {},
2049 ),
2050 );
2051 }
2052 } else {
2053 self.on_lsp_work_start(server_id, token.clone(), cx);
2054 self.broadcast_language_server_update(
2055 server_id,
2056 proto::update_language_server::Variant::WorkStart(proto::LspWorkStart {
2057 token,
2058 }),
2059 );
2060 }
2061 }
2062 lsp::WorkDoneProgress::Report(report) => {
2063 if Some(token.as_str()) != disk_based_diagnostics_progress_token {
2064 self.on_lsp_work_progress(
2065 server_id,
2066 token.clone(),
2067 LanguageServerProgress {
2068 message: report.message.clone(),
2069 percentage: report.percentage.map(|p| p as usize),
2070 last_update_at: Instant::now(),
2071 },
2072 cx,
2073 );
2074 self.broadcast_language_server_update(
2075 server_id,
2076 proto::update_language_server::Variant::WorkProgress(
2077 proto::LspWorkProgress {
2078 token,
2079 message: report.message,
2080 percentage: report.percentage.map(|p| p as u32),
2081 },
2082 ),
2083 );
2084 }
2085 }
2086 lsp::WorkDoneProgress::End(_) => {
2087 if Some(token.as_str()) == disk_based_diagnostics_progress_token {
2088 language_server_status.pending_diagnostic_updates -= 1;
2089 if language_server_status.pending_diagnostic_updates == 0 {
2090 self.disk_based_diagnostics_finished(cx);
2091 self.broadcast_language_server_update(
2092 server_id,
2093 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(
2094 proto::LspDiskBasedDiagnosticsUpdated {},
2095 ),
2096 );
2097 }
2098 } else {
2099 self.on_lsp_work_end(server_id, token.clone(), cx);
2100 self.broadcast_language_server_update(
2101 server_id,
2102 proto::update_language_server::Variant::WorkEnd(proto::LspWorkEnd {
2103 token,
2104 }),
2105 );
2106 }
2107 }
2108 }
2109 }
2110
2111 fn on_lsp_work_start(
2112 &mut self,
2113 language_server_id: usize,
2114 token: String,
2115 cx: &mut ModelContext<Self>,
2116 ) {
2117 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
2118 status.pending_work.insert(
2119 token,
2120 LanguageServerProgress {
2121 message: None,
2122 percentage: None,
2123 last_update_at: Instant::now(),
2124 },
2125 );
2126 cx.notify();
2127 }
2128 }
2129
2130 fn on_lsp_work_progress(
2131 &mut self,
2132 language_server_id: usize,
2133 token: String,
2134 progress: LanguageServerProgress,
2135 cx: &mut ModelContext<Self>,
2136 ) {
2137 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
2138 status.pending_work.insert(token, progress);
2139 cx.notify();
2140 }
2141 }
2142
2143 fn on_lsp_work_end(
2144 &mut self,
2145 language_server_id: usize,
2146 token: String,
2147 cx: &mut ModelContext<Self>,
2148 ) {
2149 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
2150 status.pending_work.remove(&token);
2151 cx.notify();
2152 }
2153 }
2154
2155 async fn on_lsp_workspace_edit(
2156 this: WeakModelHandle<Self>,
2157 params: lsp::ApplyWorkspaceEditParams,
2158 server_id: usize,
2159 adapter: Arc<dyn LspAdapter>,
2160 language_server: Arc<LanguageServer>,
2161 mut cx: AsyncAppContext,
2162 ) -> Result<lsp::ApplyWorkspaceEditResponse> {
2163 let this = this
2164 .upgrade(&cx)
2165 .ok_or_else(|| anyhow!("project project closed"))?;
2166 let transaction = Self::deserialize_workspace_edit(
2167 this.clone(),
2168 params.edit,
2169 true,
2170 adapter.clone(),
2171 language_server.clone(),
2172 &mut cx,
2173 )
2174 .await
2175 .log_err();
2176 this.update(&mut cx, |this, _| {
2177 if let Some(transaction) = transaction {
2178 this.last_workspace_edits_by_language_server
2179 .insert(server_id, transaction);
2180 }
2181 });
2182 Ok(lsp::ApplyWorkspaceEditResponse {
2183 applied: true,
2184 failed_change: None,
2185 failure_reason: None,
2186 })
2187 }
2188
2189 fn broadcast_language_server_update(
2190 &self,
2191 language_server_id: usize,
2192 event: proto::update_language_server::Variant,
2193 ) {
2194 if let Some(project_id) = self.shared_remote_id() {
2195 self.client
2196 .send(proto::UpdateLanguageServer {
2197 project_id,
2198 language_server_id: language_server_id as u64,
2199 variant: Some(event),
2200 })
2201 .log_err();
2202 }
2203 }
2204
2205 pub fn set_language_server_settings(&mut self, settings: serde_json::Value) {
2206 for (_, server) in self.language_servers.values() {
2207 server
2208 .notify::<lsp::notification::DidChangeConfiguration>(
2209 lsp::DidChangeConfigurationParams {
2210 settings: settings.clone(),
2211 },
2212 )
2213 .ok();
2214 }
2215 *self.language_server_settings.lock() = settings;
2216 }
2217
2218 pub fn language_server_statuses(
2219 &self,
2220 ) -> impl DoubleEndedIterator<Item = &LanguageServerStatus> {
2221 self.language_server_statuses.values()
2222 }
2223
2224 pub fn update_diagnostics(
2225 &mut self,
2226 params: lsp::PublishDiagnosticsParams,
2227 disk_based_sources: &[&str],
2228 cx: &mut ModelContext<Self>,
2229 ) -> Result<()> {
2230 let abs_path = params
2231 .uri
2232 .to_file_path()
2233 .map_err(|_| anyhow!("URI is not a file"))?;
2234 let mut diagnostics = Vec::default();
2235 let mut primary_diagnostic_group_ids = HashMap::default();
2236 let mut sources_by_group_id = HashMap::default();
2237 let mut supporting_diagnostics = HashMap::default();
2238 for diagnostic in ¶ms.diagnostics {
2239 let source = diagnostic.source.as_ref();
2240 let code = diagnostic.code.as_ref().map(|code| match code {
2241 lsp::NumberOrString::Number(code) => code.to_string(),
2242 lsp::NumberOrString::String(code) => code.clone(),
2243 });
2244 let range = range_from_lsp(diagnostic.range);
2245 let is_supporting = diagnostic
2246 .related_information
2247 .as_ref()
2248 .map_or(false, |infos| {
2249 infos.iter().any(|info| {
2250 primary_diagnostic_group_ids.contains_key(&(
2251 source,
2252 code.clone(),
2253 range_from_lsp(info.location.range),
2254 ))
2255 })
2256 });
2257
2258 let is_unnecessary = diagnostic.tags.as_ref().map_or(false, |tags| {
2259 tags.iter().any(|tag| *tag == DiagnosticTag::UNNECESSARY)
2260 });
2261
2262 if is_supporting {
2263 supporting_diagnostics.insert(
2264 (source, code.clone(), range),
2265 (diagnostic.severity, is_unnecessary),
2266 );
2267 } else {
2268 let group_id = post_inc(&mut self.next_diagnostic_group_id);
2269 let is_disk_based = source.map_or(false, |source| {
2270 disk_based_sources.contains(&source.as_str())
2271 });
2272
2273 sources_by_group_id.insert(group_id, source);
2274 primary_diagnostic_group_ids
2275 .insert((source, code.clone(), range.clone()), group_id);
2276
2277 diagnostics.push(DiagnosticEntry {
2278 range,
2279 diagnostic: Diagnostic {
2280 code: code.clone(),
2281 severity: diagnostic.severity.unwrap_or(DiagnosticSeverity::ERROR),
2282 message: diagnostic.message.clone(),
2283 group_id,
2284 is_primary: true,
2285 is_valid: true,
2286 is_disk_based,
2287 is_unnecessary,
2288 },
2289 });
2290 if let Some(infos) = &diagnostic.related_information {
2291 for info in infos {
2292 if info.location.uri == params.uri && !info.message.is_empty() {
2293 let range = range_from_lsp(info.location.range);
2294 diagnostics.push(DiagnosticEntry {
2295 range,
2296 diagnostic: Diagnostic {
2297 code: code.clone(),
2298 severity: DiagnosticSeverity::INFORMATION,
2299 message: info.message.clone(),
2300 group_id,
2301 is_primary: false,
2302 is_valid: true,
2303 is_disk_based,
2304 is_unnecessary: false,
2305 },
2306 });
2307 }
2308 }
2309 }
2310 }
2311 }
2312
2313 for entry in &mut diagnostics {
2314 let diagnostic = &mut entry.diagnostic;
2315 if !diagnostic.is_primary {
2316 let source = *sources_by_group_id.get(&diagnostic.group_id).unwrap();
2317 if let Some(&(severity, is_unnecessary)) = supporting_diagnostics.get(&(
2318 source,
2319 diagnostic.code.clone(),
2320 entry.range.clone(),
2321 )) {
2322 if let Some(severity) = severity {
2323 diagnostic.severity = severity;
2324 }
2325 diagnostic.is_unnecessary = is_unnecessary;
2326 }
2327 }
2328 }
2329
2330 self.update_diagnostic_entries(abs_path, params.version, diagnostics, cx)?;
2331 Ok(())
2332 }
2333
2334 pub fn update_diagnostic_entries(
2335 &mut self,
2336 abs_path: PathBuf,
2337 version: Option<i32>,
2338 diagnostics: Vec<DiagnosticEntry<PointUtf16>>,
2339 cx: &mut ModelContext<Project>,
2340 ) -> Result<(), anyhow::Error> {
2341 let (worktree, relative_path) = self
2342 .find_local_worktree(&abs_path, cx)
2343 .ok_or_else(|| anyhow!("no worktree found for diagnostics"))?;
2344 if !worktree.read(cx).is_visible() {
2345 return Ok(());
2346 }
2347
2348 let project_path = ProjectPath {
2349 worktree_id: worktree.read(cx).id(),
2350 path: relative_path.into(),
2351 };
2352 if let Some(buffer) = self.get_open_buffer(&project_path, cx) {
2353 self.update_buffer_diagnostics(&buffer, diagnostics.clone(), version, cx)?;
2354 }
2355
2356 let updated = worktree.update(cx, |worktree, cx| {
2357 worktree
2358 .as_local_mut()
2359 .ok_or_else(|| anyhow!("not a local worktree"))?
2360 .update_diagnostics(project_path.path.clone(), diagnostics, cx)
2361 })?;
2362 if updated {
2363 cx.emit(Event::DiagnosticsUpdated(project_path));
2364 }
2365 Ok(())
2366 }
2367
2368 fn update_buffer_diagnostics(
2369 &mut self,
2370 buffer: &ModelHandle<Buffer>,
2371 mut diagnostics: Vec<DiagnosticEntry<PointUtf16>>,
2372 version: Option<i32>,
2373 cx: &mut ModelContext<Self>,
2374 ) -> Result<()> {
2375 fn compare_diagnostics(a: &Diagnostic, b: &Diagnostic) -> Ordering {
2376 Ordering::Equal
2377 .then_with(|| b.is_primary.cmp(&a.is_primary))
2378 .then_with(|| a.is_disk_based.cmp(&b.is_disk_based))
2379 .then_with(|| a.severity.cmp(&b.severity))
2380 .then_with(|| a.message.cmp(&b.message))
2381 }
2382
2383 let snapshot = self.buffer_snapshot_for_lsp_version(buffer, version, cx)?;
2384
2385 diagnostics.sort_unstable_by(|a, b| {
2386 Ordering::Equal
2387 .then_with(|| a.range.start.cmp(&b.range.start))
2388 .then_with(|| b.range.end.cmp(&a.range.end))
2389 .then_with(|| compare_diagnostics(&a.diagnostic, &b.diagnostic))
2390 });
2391
2392 let mut sanitized_diagnostics = Vec::new();
2393 let edits_since_save = Patch::new(
2394 snapshot
2395 .edits_since::<PointUtf16>(buffer.read(cx).saved_version())
2396 .collect(),
2397 );
2398 for entry in diagnostics {
2399 let start;
2400 let end;
2401 if entry.diagnostic.is_disk_based {
2402 // Some diagnostics are based on files on disk instead of buffers'
2403 // current contents. Adjust these diagnostics' ranges to reflect
2404 // any unsaved edits.
2405 start = edits_since_save.old_to_new(entry.range.start);
2406 end = edits_since_save.old_to_new(entry.range.end);
2407 } else {
2408 start = entry.range.start;
2409 end = entry.range.end;
2410 }
2411
2412 let mut range = snapshot.clip_point_utf16(start, Bias::Left)
2413 ..snapshot.clip_point_utf16(end, Bias::Right);
2414
2415 // Expand empty ranges by one character
2416 if range.start == range.end {
2417 range.end.column += 1;
2418 range.end = snapshot.clip_point_utf16(range.end, Bias::Right);
2419 if range.start == range.end && range.end.column > 0 {
2420 range.start.column -= 1;
2421 range.start = snapshot.clip_point_utf16(range.start, Bias::Left);
2422 }
2423 }
2424
2425 sanitized_diagnostics.push(DiagnosticEntry {
2426 range,
2427 diagnostic: entry.diagnostic,
2428 });
2429 }
2430 drop(edits_since_save);
2431
2432 let set = DiagnosticSet::new(sanitized_diagnostics, &snapshot);
2433 buffer.update(cx, |buffer, cx| buffer.update_diagnostics(set, cx));
2434 Ok(())
2435 }
2436
2437 pub fn reload_buffers(
2438 &self,
2439 buffers: HashSet<ModelHandle<Buffer>>,
2440 push_to_history: bool,
2441 cx: &mut ModelContext<Self>,
2442 ) -> Task<Result<ProjectTransaction>> {
2443 let mut local_buffers = Vec::new();
2444 let mut remote_buffers = None;
2445 for buffer_handle in buffers {
2446 let buffer = buffer_handle.read(cx);
2447 if buffer.is_dirty() {
2448 if let Some(file) = File::from_dyn(buffer.file()) {
2449 if file.is_local() {
2450 local_buffers.push(buffer_handle);
2451 } else {
2452 remote_buffers.get_or_insert(Vec::new()).push(buffer_handle);
2453 }
2454 }
2455 }
2456 }
2457
2458 let remote_buffers = self.remote_id().zip(remote_buffers);
2459 let client = self.client.clone();
2460
2461 cx.spawn(|this, mut cx| async move {
2462 let mut project_transaction = ProjectTransaction::default();
2463
2464 if let Some((project_id, remote_buffers)) = remote_buffers {
2465 let response = client
2466 .request(proto::ReloadBuffers {
2467 project_id,
2468 buffer_ids: remote_buffers
2469 .iter()
2470 .map(|buffer| buffer.read_with(&cx, |buffer, _| buffer.remote_id()))
2471 .collect(),
2472 })
2473 .await?
2474 .transaction
2475 .ok_or_else(|| anyhow!("missing transaction"))?;
2476 project_transaction = this
2477 .update(&mut cx, |this, cx| {
2478 this.deserialize_project_transaction(response, push_to_history, cx)
2479 })
2480 .await?;
2481 }
2482
2483 for buffer in local_buffers {
2484 let transaction = buffer
2485 .update(&mut cx, |buffer, cx| buffer.reload(cx))
2486 .await?;
2487 buffer.update(&mut cx, |buffer, cx| {
2488 if let Some(transaction) = transaction {
2489 if !push_to_history {
2490 buffer.forget_transaction(transaction.id);
2491 }
2492 project_transaction.0.insert(cx.handle(), transaction);
2493 }
2494 });
2495 }
2496
2497 Ok(project_transaction)
2498 })
2499 }
2500
2501 pub fn format(
2502 &self,
2503 buffers: HashSet<ModelHandle<Buffer>>,
2504 push_to_history: bool,
2505 cx: &mut ModelContext<Project>,
2506 ) -> Task<Result<ProjectTransaction>> {
2507 let mut local_buffers = Vec::new();
2508 let mut remote_buffers = None;
2509 for buffer_handle in buffers {
2510 let buffer = buffer_handle.read(cx);
2511 if let Some(file) = File::from_dyn(buffer.file()) {
2512 if let Some(buffer_abs_path) = file.as_local().map(|f| f.abs_path(cx)) {
2513 if let Some((_, server)) = self.language_server_for_buffer(buffer, cx) {
2514 local_buffers.push((buffer_handle, buffer_abs_path, server.clone()));
2515 }
2516 } else {
2517 remote_buffers.get_or_insert(Vec::new()).push(buffer_handle);
2518 }
2519 } else {
2520 return Task::ready(Ok(Default::default()));
2521 }
2522 }
2523
2524 let remote_buffers = self.remote_id().zip(remote_buffers);
2525 let client = self.client.clone();
2526
2527 cx.spawn(|this, mut cx| async move {
2528 let mut project_transaction = ProjectTransaction::default();
2529
2530 if let Some((project_id, remote_buffers)) = remote_buffers {
2531 let response = client
2532 .request(proto::FormatBuffers {
2533 project_id,
2534 buffer_ids: remote_buffers
2535 .iter()
2536 .map(|buffer| buffer.read_with(&cx, |buffer, _| buffer.remote_id()))
2537 .collect(),
2538 })
2539 .await?
2540 .transaction
2541 .ok_or_else(|| anyhow!("missing transaction"))?;
2542 project_transaction = this
2543 .update(&mut cx, |this, cx| {
2544 this.deserialize_project_transaction(response, push_to_history, cx)
2545 })
2546 .await?;
2547 }
2548
2549 for (buffer, buffer_abs_path, language_server) in local_buffers {
2550 let text_document = lsp::TextDocumentIdentifier::new(
2551 lsp::Url::from_file_path(&buffer_abs_path).unwrap(),
2552 );
2553 let capabilities = &language_server.capabilities();
2554 let tab_size = cx.update(|cx| {
2555 let language_name = buffer.read(cx).language().map(|language| language.name());
2556 cx.global::<Settings>().tab_size(language_name.as_deref())
2557 });
2558 let lsp_edits = if capabilities
2559 .document_formatting_provider
2560 .as_ref()
2561 .map_or(false, |provider| *provider != lsp::OneOf::Left(false))
2562 {
2563 language_server
2564 .request::<lsp::request::Formatting>(lsp::DocumentFormattingParams {
2565 text_document,
2566 options: lsp::FormattingOptions {
2567 tab_size,
2568 insert_spaces: true,
2569 insert_final_newline: Some(true),
2570 ..Default::default()
2571 },
2572 work_done_progress_params: Default::default(),
2573 })
2574 .await?
2575 } else if capabilities
2576 .document_range_formatting_provider
2577 .as_ref()
2578 .map_or(false, |provider| *provider != lsp::OneOf::Left(false))
2579 {
2580 let buffer_start = lsp::Position::new(0, 0);
2581 let buffer_end =
2582 buffer.read_with(&cx, |buffer, _| point_to_lsp(buffer.max_point_utf16()));
2583 language_server
2584 .request::<lsp::request::RangeFormatting>(
2585 lsp::DocumentRangeFormattingParams {
2586 text_document,
2587 range: lsp::Range::new(buffer_start, buffer_end),
2588 options: lsp::FormattingOptions {
2589 tab_size: 4,
2590 insert_spaces: true,
2591 insert_final_newline: Some(true),
2592 ..Default::default()
2593 },
2594 work_done_progress_params: Default::default(),
2595 },
2596 )
2597 .await?
2598 } else {
2599 continue;
2600 };
2601
2602 if let Some(lsp_edits) = lsp_edits {
2603 let edits = this
2604 .update(&mut cx, |this, cx| {
2605 this.edits_from_lsp(&buffer, lsp_edits, None, cx)
2606 })
2607 .await?;
2608 buffer.update(&mut cx, |buffer, cx| {
2609 buffer.finalize_last_transaction();
2610 buffer.start_transaction();
2611 for (range, text) in edits {
2612 buffer.edit([(range, text)], cx);
2613 }
2614 if buffer.end_transaction(cx).is_some() {
2615 let transaction = buffer.finalize_last_transaction().unwrap().clone();
2616 if !push_to_history {
2617 buffer.forget_transaction(transaction.id);
2618 }
2619 project_transaction.0.insert(cx.handle(), transaction);
2620 }
2621 });
2622 }
2623 }
2624
2625 Ok(project_transaction)
2626 })
2627 }
2628
2629 pub fn definition<T: ToPointUtf16>(
2630 &self,
2631 buffer: &ModelHandle<Buffer>,
2632 position: T,
2633 cx: &mut ModelContext<Self>,
2634 ) -> Task<Result<Vec<Location>>> {
2635 let position = position.to_point_utf16(buffer.read(cx));
2636 self.request_lsp(buffer.clone(), GetDefinition { position }, cx)
2637 }
2638
2639 pub fn references<T: ToPointUtf16>(
2640 &self,
2641 buffer: &ModelHandle<Buffer>,
2642 position: T,
2643 cx: &mut ModelContext<Self>,
2644 ) -> Task<Result<Vec<Location>>> {
2645 let position = position.to_point_utf16(buffer.read(cx));
2646 self.request_lsp(buffer.clone(), GetReferences { position }, cx)
2647 }
2648
2649 pub fn document_highlights<T: ToPointUtf16>(
2650 &self,
2651 buffer: &ModelHandle<Buffer>,
2652 position: T,
2653 cx: &mut ModelContext<Self>,
2654 ) -> Task<Result<Vec<DocumentHighlight>>> {
2655 let position = position.to_point_utf16(buffer.read(cx));
2656
2657 self.request_lsp(buffer.clone(), GetDocumentHighlights { position }, cx)
2658 }
2659
2660 pub fn symbols(&self, query: &str, cx: &mut ModelContext<Self>) -> Task<Result<Vec<Symbol>>> {
2661 if self.is_local() {
2662 let mut requests = Vec::new();
2663 for ((worktree_id, _), (lsp_adapter, language_server)) in self.language_servers.iter() {
2664 let worktree_id = *worktree_id;
2665 if let Some(worktree) = self
2666 .worktree_for_id(worktree_id, cx)
2667 .and_then(|worktree| worktree.read(cx).as_local())
2668 {
2669 let lsp_adapter = lsp_adapter.clone();
2670 let worktree_abs_path = worktree.abs_path().clone();
2671 requests.push(
2672 language_server
2673 .request::<lsp::request::WorkspaceSymbol>(lsp::WorkspaceSymbolParams {
2674 query: query.to_string(),
2675 ..Default::default()
2676 })
2677 .log_err()
2678 .map(move |response| {
2679 (
2680 lsp_adapter,
2681 worktree_id,
2682 worktree_abs_path,
2683 response.unwrap_or_default(),
2684 )
2685 }),
2686 );
2687 }
2688 }
2689
2690 cx.spawn_weak(|this, cx| async move {
2691 let responses = futures::future::join_all(requests).await;
2692 let this = if let Some(this) = this.upgrade(&cx) {
2693 this
2694 } else {
2695 return Ok(Default::default());
2696 };
2697 this.read_with(&cx, |this, cx| {
2698 let mut symbols = Vec::new();
2699 for (adapter, source_worktree_id, worktree_abs_path, response) in responses {
2700 symbols.extend(response.into_iter().flatten().filter_map(|lsp_symbol| {
2701 let abs_path = lsp_symbol.location.uri.to_file_path().ok()?;
2702 let mut worktree_id = source_worktree_id;
2703 let path;
2704 if let Some((worktree, rel_path)) =
2705 this.find_local_worktree(&abs_path, cx)
2706 {
2707 worktree_id = worktree.read(cx).id();
2708 path = rel_path;
2709 } else {
2710 path = relativize_path(&worktree_abs_path, &abs_path);
2711 }
2712
2713 let label = this
2714 .languages
2715 .select_language(&path)
2716 .and_then(|language| {
2717 language.label_for_symbol(&lsp_symbol.name, lsp_symbol.kind)
2718 })
2719 .unwrap_or_else(|| CodeLabel::plain(lsp_symbol.name.clone(), None));
2720 let signature = this.symbol_signature(worktree_id, &path);
2721
2722 Some(Symbol {
2723 source_worktree_id,
2724 worktree_id,
2725 language_server_name: adapter.name(),
2726 name: lsp_symbol.name,
2727 kind: lsp_symbol.kind,
2728 label,
2729 path,
2730 range: range_from_lsp(lsp_symbol.location.range),
2731 signature,
2732 })
2733 }));
2734 }
2735 Ok(symbols)
2736 })
2737 })
2738 } else if let Some(project_id) = self.remote_id() {
2739 let request = self.client.request(proto::GetProjectSymbols {
2740 project_id,
2741 query: query.to_string(),
2742 });
2743 cx.spawn_weak(|this, cx| async move {
2744 let response = request.await?;
2745 let mut symbols = Vec::new();
2746 if let Some(this) = this.upgrade(&cx) {
2747 this.read_with(&cx, |this, _| {
2748 symbols.extend(
2749 response
2750 .symbols
2751 .into_iter()
2752 .filter_map(|symbol| this.deserialize_symbol(symbol).log_err()),
2753 );
2754 })
2755 }
2756 Ok(symbols)
2757 })
2758 } else {
2759 Task::ready(Ok(Default::default()))
2760 }
2761 }
2762
2763 pub fn open_buffer_for_symbol(
2764 &mut self,
2765 symbol: &Symbol,
2766 cx: &mut ModelContext<Self>,
2767 ) -> Task<Result<ModelHandle<Buffer>>> {
2768 if self.is_local() {
2769 let (lsp_adapter, language_server) = if let Some(server) = self.language_servers.get(&(
2770 symbol.source_worktree_id,
2771 symbol.language_server_name.clone(),
2772 )) {
2773 server.clone()
2774 } else {
2775 return Task::ready(Err(anyhow!(
2776 "language server for worktree and language not found"
2777 )));
2778 };
2779
2780 let worktree_abs_path = if let Some(worktree_abs_path) = self
2781 .worktree_for_id(symbol.worktree_id, cx)
2782 .and_then(|worktree| worktree.read(cx).as_local())
2783 .map(|local_worktree| local_worktree.abs_path())
2784 {
2785 worktree_abs_path
2786 } else {
2787 return Task::ready(Err(anyhow!("worktree not found for symbol")));
2788 };
2789 let symbol_abs_path = worktree_abs_path.join(&symbol.path);
2790 let symbol_uri = if let Ok(uri) = lsp::Url::from_file_path(symbol_abs_path) {
2791 uri
2792 } else {
2793 return Task::ready(Err(anyhow!("invalid symbol path")));
2794 };
2795
2796 self.open_local_buffer_via_lsp(symbol_uri, lsp_adapter, language_server, cx)
2797 } else if let Some(project_id) = self.remote_id() {
2798 let request = self.client.request(proto::OpenBufferForSymbol {
2799 project_id,
2800 symbol: Some(serialize_symbol(symbol)),
2801 });
2802 cx.spawn(|this, mut cx| async move {
2803 let response = request.await?;
2804 let buffer = response.buffer.ok_or_else(|| anyhow!("invalid buffer"))?;
2805 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
2806 .await
2807 })
2808 } else {
2809 Task::ready(Err(anyhow!("project does not have a remote id")))
2810 }
2811 }
2812
2813 pub fn completions<T: ToPointUtf16>(
2814 &self,
2815 source_buffer_handle: &ModelHandle<Buffer>,
2816 position: T,
2817 cx: &mut ModelContext<Self>,
2818 ) -> Task<Result<Vec<Completion>>> {
2819 let source_buffer_handle = source_buffer_handle.clone();
2820 let source_buffer = source_buffer_handle.read(cx);
2821 let buffer_id = source_buffer.remote_id();
2822 let language = source_buffer.language().cloned();
2823 let worktree;
2824 let buffer_abs_path;
2825 if let Some(file) = File::from_dyn(source_buffer.file()) {
2826 worktree = file.worktree.clone();
2827 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
2828 } else {
2829 return Task::ready(Ok(Default::default()));
2830 };
2831
2832 let position = position.to_point_utf16(source_buffer);
2833 let anchor = source_buffer.anchor_after(position);
2834
2835 if worktree.read(cx).as_local().is_some() {
2836 let buffer_abs_path = buffer_abs_path.unwrap();
2837 let (_, lang_server) =
2838 if let Some(server) = self.language_server_for_buffer(source_buffer, cx) {
2839 server.clone()
2840 } else {
2841 return Task::ready(Ok(Default::default()));
2842 };
2843
2844 cx.spawn(|_, cx| async move {
2845 let completions = lang_server
2846 .request::<lsp::request::Completion>(lsp::CompletionParams {
2847 text_document_position: lsp::TextDocumentPositionParams::new(
2848 lsp::TextDocumentIdentifier::new(
2849 lsp::Url::from_file_path(buffer_abs_path).unwrap(),
2850 ),
2851 point_to_lsp(position),
2852 ),
2853 context: Default::default(),
2854 work_done_progress_params: Default::default(),
2855 partial_result_params: Default::default(),
2856 })
2857 .await
2858 .context("lsp completion request failed")?;
2859
2860 let completions = if let Some(completions) = completions {
2861 match completions {
2862 lsp::CompletionResponse::Array(completions) => completions,
2863 lsp::CompletionResponse::List(list) => list.items,
2864 }
2865 } else {
2866 Default::default()
2867 };
2868
2869 source_buffer_handle.read_with(&cx, |this, _| {
2870 let snapshot = this.snapshot();
2871 let clipped_position = this.clip_point_utf16(position, Bias::Left);
2872 let mut range_for_token = None;
2873 Ok(completions
2874 .into_iter()
2875 .filter_map(|lsp_completion| {
2876 let (old_range, new_text) = match lsp_completion.text_edit.as_ref() {
2877 // If the language server provides a range to overwrite, then
2878 // check that the range is valid.
2879 Some(lsp::CompletionTextEdit::Edit(edit)) => {
2880 let range = range_from_lsp(edit.range);
2881 let start = snapshot.clip_point_utf16(range.start, Bias::Left);
2882 let end = snapshot.clip_point_utf16(range.end, Bias::Left);
2883 if start != range.start || end != range.end {
2884 log::info!("completion out of expected range");
2885 return None;
2886 }
2887 (
2888 snapshot.anchor_before(start)..snapshot.anchor_after(end),
2889 edit.new_text.clone(),
2890 )
2891 }
2892 // If the language server does not provide a range, then infer
2893 // the range based on the syntax tree.
2894 None => {
2895 if position != clipped_position {
2896 log::info!("completion out of expected range");
2897 return None;
2898 }
2899 let Range { start, end } = range_for_token
2900 .get_or_insert_with(|| {
2901 let offset = position.to_offset(&snapshot);
2902 snapshot
2903 .range_for_word_token_at(offset)
2904 .unwrap_or_else(|| offset..offset)
2905 })
2906 .clone();
2907 let text = lsp_completion
2908 .insert_text
2909 .as_ref()
2910 .unwrap_or(&lsp_completion.label)
2911 .clone();
2912 (
2913 snapshot.anchor_before(start)..snapshot.anchor_after(end),
2914 text.clone(),
2915 )
2916 }
2917 Some(lsp::CompletionTextEdit::InsertAndReplace(_)) => {
2918 log::info!("unsupported insert/replace completion");
2919 return None;
2920 }
2921 };
2922
2923 Some(Completion {
2924 old_range,
2925 new_text,
2926 label: language
2927 .as_ref()
2928 .and_then(|l| l.label_for_completion(&lsp_completion))
2929 .unwrap_or_else(|| {
2930 CodeLabel::plain(
2931 lsp_completion.label.clone(),
2932 lsp_completion.filter_text.as_deref(),
2933 )
2934 }),
2935 lsp_completion,
2936 })
2937 })
2938 .collect())
2939 })
2940 })
2941 } else if let Some(project_id) = self.remote_id() {
2942 let rpc = self.client.clone();
2943 let message = proto::GetCompletions {
2944 project_id,
2945 buffer_id,
2946 position: Some(language::proto::serialize_anchor(&anchor)),
2947 version: serialize_version(&source_buffer.version()),
2948 };
2949 cx.spawn_weak(|_, mut cx| async move {
2950 let response = rpc.request(message).await?;
2951
2952 source_buffer_handle
2953 .update(&mut cx, |buffer, _| {
2954 buffer.wait_for_version(deserialize_version(response.version))
2955 })
2956 .await;
2957
2958 response
2959 .completions
2960 .into_iter()
2961 .map(|completion| {
2962 language::proto::deserialize_completion(completion, language.as_ref())
2963 })
2964 .collect()
2965 })
2966 } else {
2967 Task::ready(Ok(Default::default()))
2968 }
2969 }
2970
2971 pub fn apply_additional_edits_for_completion(
2972 &self,
2973 buffer_handle: ModelHandle<Buffer>,
2974 completion: Completion,
2975 push_to_history: bool,
2976 cx: &mut ModelContext<Self>,
2977 ) -> Task<Result<Option<Transaction>>> {
2978 let buffer = buffer_handle.read(cx);
2979 let buffer_id = buffer.remote_id();
2980
2981 if self.is_local() {
2982 let (_, lang_server) = if let Some(server) = self.language_server_for_buffer(buffer, cx)
2983 {
2984 server.clone()
2985 } else {
2986 return Task::ready(Ok(Default::default()));
2987 };
2988
2989 cx.spawn(|this, mut cx| async move {
2990 let resolved_completion = lang_server
2991 .request::<lsp::request::ResolveCompletionItem>(completion.lsp_completion)
2992 .await?;
2993 if let Some(edits) = resolved_completion.additional_text_edits {
2994 let edits = this
2995 .update(&mut cx, |this, cx| {
2996 this.edits_from_lsp(&buffer_handle, edits, None, cx)
2997 })
2998 .await?;
2999 buffer_handle.update(&mut cx, |buffer, cx| {
3000 buffer.finalize_last_transaction();
3001 buffer.start_transaction();
3002 for (range, text) in edits {
3003 buffer.edit([(range, text)], cx);
3004 }
3005 let transaction = if buffer.end_transaction(cx).is_some() {
3006 let transaction = buffer.finalize_last_transaction().unwrap().clone();
3007 if !push_to_history {
3008 buffer.forget_transaction(transaction.id);
3009 }
3010 Some(transaction)
3011 } else {
3012 None
3013 };
3014 Ok(transaction)
3015 })
3016 } else {
3017 Ok(None)
3018 }
3019 })
3020 } else if let Some(project_id) = self.remote_id() {
3021 let client = self.client.clone();
3022 cx.spawn(|_, mut cx| async move {
3023 let response = client
3024 .request(proto::ApplyCompletionAdditionalEdits {
3025 project_id,
3026 buffer_id,
3027 completion: Some(language::proto::serialize_completion(&completion)),
3028 })
3029 .await?;
3030
3031 if let Some(transaction) = response.transaction {
3032 let transaction = language::proto::deserialize_transaction(transaction)?;
3033 buffer_handle
3034 .update(&mut cx, |buffer, _| {
3035 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
3036 })
3037 .await;
3038 if push_to_history {
3039 buffer_handle.update(&mut cx, |buffer, _| {
3040 buffer.push_transaction(transaction.clone(), Instant::now());
3041 });
3042 }
3043 Ok(Some(transaction))
3044 } else {
3045 Ok(None)
3046 }
3047 })
3048 } else {
3049 Task::ready(Err(anyhow!("project does not have a remote id")))
3050 }
3051 }
3052
3053 pub fn code_actions<T: Clone + ToOffset>(
3054 &self,
3055 buffer_handle: &ModelHandle<Buffer>,
3056 range: Range<T>,
3057 cx: &mut ModelContext<Self>,
3058 ) -> Task<Result<Vec<CodeAction>>> {
3059 let buffer_handle = buffer_handle.clone();
3060 let buffer = buffer_handle.read(cx);
3061 let snapshot = buffer.snapshot();
3062 let relevant_diagnostics = snapshot
3063 .diagnostics_in_range::<usize, usize>(range.to_offset(&snapshot), false)
3064 .map(|entry| entry.to_lsp_diagnostic_stub())
3065 .collect();
3066 let buffer_id = buffer.remote_id();
3067 let worktree;
3068 let buffer_abs_path;
3069 if let Some(file) = File::from_dyn(buffer.file()) {
3070 worktree = file.worktree.clone();
3071 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
3072 } else {
3073 return Task::ready(Ok(Default::default()));
3074 };
3075 let range = buffer.anchor_before(range.start)..buffer.anchor_before(range.end);
3076
3077 if worktree.read(cx).as_local().is_some() {
3078 let buffer_abs_path = buffer_abs_path.unwrap();
3079 let (_, lang_server) = if let Some(server) = self.language_server_for_buffer(buffer, cx)
3080 {
3081 server.clone()
3082 } else {
3083 return Task::ready(Ok(Default::default()));
3084 };
3085
3086 let lsp_range = range_to_lsp(range.to_point_utf16(buffer));
3087 cx.foreground().spawn(async move {
3088 if !lang_server.capabilities().code_action_provider.is_some() {
3089 return Ok(Default::default());
3090 }
3091
3092 Ok(lang_server
3093 .request::<lsp::request::CodeActionRequest>(lsp::CodeActionParams {
3094 text_document: lsp::TextDocumentIdentifier::new(
3095 lsp::Url::from_file_path(buffer_abs_path).unwrap(),
3096 ),
3097 range: lsp_range,
3098 work_done_progress_params: Default::default(),
3099 partial_result_params: Default::default(),
3100 context: lsp::CodeActionContext {
3101 diagnostics: relevant_diagnostics,
3102 only: Some(vec![
3103 lsp::CodeActionKind::QUICKFIX,
3104 lsp::CodeActionKind::REFACTOR,
3105 lsp::CodeActionKind::REFACTOR_EXTRACT,
3106 lsp::CodeActionKind::SOURCE,
3107 ]),
3108 },
3109 })
3110 .await?
3111 .unwrap_or_default()
3112 .into_iter()
3113 .filter_map(|entry| {
3114 if let lsp::CodeActionOrCommand::CodeAction(lsp_action) = entry {
3115 Some(CodeAction {
3116 range: range.clone(),
3117 lsp_action,
3118 })
3119 } else {
3120 None
3121 }
3122 })
3123 .collect())
3124 })
3125 } else if let Some(project_id) = self.remote_id() {
3126 let rpc = self.client.clone();
3127 let version = buffer.version();
3128 cx.spawn_weak(|_, mut cx| async move {
3129 let response = rpc
3130 .request(proto::GetCodeActions {
3131 project_id,
3132 buffer_id,
3133 start: Some(language::proto::serialize_anchor(&range.start)),
3134 end: Some(language::proto::serialize_anchor(&range.end)),
3135 version: serialize_version(&version),
3136 })
3137 .await?;
3138
3139 buffer_handle
3140 .update(&mut cx, |buffer, _| {
3141 buffer.wait_for_version(deserialize_version(response.version))
3142 })
3143 .await;
3144
3145 response
3146 .actions
3147 .into_iter()
3148 .map(language::proto::deserialize_code_action)
3149 .collect()
3150 })
3151 } else {
3152 Task::ready(Ok(Default::default()))
3153 }
3154 }
3155
3156 pub fn apply_code_action(
3157 &self,
3158 buffer_handle: ModelHandle<Buffer>,
3159 mut action: CodeAction,
3160 push_to_history: bool,
3161 cx: &mut ModelContext<Self>,
3162 ) -> Task<Result<ProjectTransaction>> {
3163 if self.is_local() {
3164 let buffer = buffer_handle.read(cx);
3165 let (lsp_adapter, lang_server) =
3166 if let Some(server) = self.language_server_for_buffer(buffer, cx) {
3167 server.clone()
3168 } else {
3169 return Task::ready(Ok(Default::default()));
3170 };
3171 let range = action.range.to_point_utf16(buffer);
3172
3173 cx.spawn(|this, mut cx| async move {
3174 if let Some(lsp_range) = action
3175 .lsp_action
3176 .data
3177 .as_mut()
3178 .and_then(|d| d.get_mut("codeActionParams"))
3179 .and_then(|d| d.get_mut("range"))
3180 {
3181 *lsp_range = serde_json::to_value(&range_to_lsp(range)).unwrap();
3182 action.lsp_action = lang_server
3183 .request::<lsp::request::CodeActionResolveRequest>(action.lsp_action)
3184 .await?;
3185 } else {
3186 let actions = this
3187 .update(&mut cx, |this, cx| {
3188 this.code_actions(&buffer_handle, action.range, cx)
3189 })
3190 .await?;
3191 action.lsp_action = actions
3192 .into_iter()
3193 .find(|a| a.lsp_action.title == action.lsp_action.title)
3194 .ok_or_else(|| anyhow!("code action is outdated"))?
3195 .lsp_action;
3196 }
3197
3198 if let Some(edit) = action.lsp_action.edit {
3199 Self::deserialize_workspace_edit(
3200 this,
3201 edit,
3202 push_to_history,
3203 lsp_adapter,
3204 lang_server,
3205 &mut cx,
3206 )
3207 .await
3208 } else if let Some(command) = action.lsp_action.command {
3209 this.update(&mut cx, |this, _| {
3210 this.last_workspace_edits_by_language_server
3211 .remove(&lang_server.server_id());
3212 });
3213 lang_server
3214 .request::<lsp::request::ExecuteCommand>(lsp::ExecuteCommandParams {
3215 command: command.command,
3216 arguments: command.arguments.unwrap_or_default(),
3217 ..Default::default()
3218 })
3219 .await?;
3220 Ok(this.update(&mut cx, |this, _| {
3221 this.last_workspace_edits_by_language_server
3222 .remove(&lang_server.server_id())
3223 .unwrap_or_default()
3224 }))
3225 } else {
3226 Ok(ProjectTransaction::default())
3227 }
3228 })
3229 } else if let Some(project_id) = self.remote_id() {
3230 let client = self.client.clone();
3231 let request = proto::ApplyCodeAction {
3232 project_id,
3233 buffer_id: buffer_handle.read(cx).remote_id(),
3234 action: Some(language::proto::serialize_code_action(&action)),
3235 };
3236 cx.spawn(|this, mut cx| async move {
3237 let response = client
3238 .request(request)
3239 .await?
3240 .transaction
3241 .ok_or_else(|| anyhow!("missing transaction"))?;
3242 this.update(&mut cx, |this, cx| {
3243 this.deserialize_project_transaction(response, push_to_history, cx)
3244 })
3245 .await
3246 })
3247 } else {
3248 Task::ready(Err(anyhow!("project does not have a remote id")))
3249 }
3250 }
3251
3252 async fn deserialize_workspace_edit(
3253 this: ModelHandle<Self>,
3254 edit: lsp::WorkspaceEdit,
3255 push_to_history: bool,
3256 lsp_adapter: Arc<dyn LspAdapter>,
3257 language_server: Arc<LanguageServer>,
3258 cx: &mut AsyncAppContext,
3259 ) -> Result<ProjectTransaction> {
3260 let fs = this.read_with(cx, |this, _| this.fs.clone());
3261 let mut operations = Vec::new();
3262 if let Some(document_changes) = edit.document_changes {
3263 match document_changes {
3264 lsp::DocumentChanges::Edits(edits) => {
3265 operations.extend(edits.into_iter().map(lsp::DocumentChangeOperation::Edit))
3266 }
3267 lsp::DocumentChanges::Operations(ops) => operations = ops,
3268 }
3269 } else if let Some(changes) = edit.changes {
3270 operations.extend(changes.into_iter().map(|(uri, edits)| {
3271 lsp::DocumentChangeOperation::Edit(lsp::TextDocumentEdit {
3272 text_document: lsp::OptionalVersionedTextDocumentIdentifier {
3273 uri,
3274 version: None,
3275 },
3276 edits: edits.into_iter().map(lsp::OneOf::Left).collect(),
3277 })
3278 }));
3279 }
3280
3281 let mut project_transaction = ProjectTransaction::default();
3282 for operation in operations {
3283 match operation {
3284 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Create(op)) => {
3285 let abs_path = op
3286 .uri
3287 .to_file_path()
3288 .map_err(|_| anyhow!("can't convert URI to path"))?;
3289
3290 if let Some(parent_path) = abs_path.parent() {
3291 fs.create_dir(parent_path).await?;
3292 }
3293 if abs_path.ends_with("/") {
3294 fs.create_dir(&abs_path).await?;
3295 } else {
3296 fs.create_file(&abs_path, op.options.map(Into::into).unwrap_or_default())
3297 .await?;
3298 }
3299 }
3300 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Rename(op)) => {
3301 let source_abs_path = op
3302 .old_uri
3303 .to_file_path()
3304 .map_err(|_| anyhow!("can't convert URI to path"))?;
3305 let target_abs_path = op
3306 .new_uri
3307 .to_file_path()
3308 .map_err(|_| anyhow!("can't convert URI to path"))?;
3309 fs.rename(
3310 &source_abs_path,
3311 &target_abs_path,
3312 op.options.map(Into::into).unwrap_or_default(),
3313 )
3314 .await?;
3315 }
3316 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Delete(op)) => {
3317 let abs_path = op
3318 .uri
3319 .to_file_path()
3320 .map_err(|_| anyhow!("can't convert URI to path"))?;
3321 let options = op.options.map(Into::into).unwrap_or_default();
3322 if abs_path.ends_with("/") {
3323 fs.remove_dir(&abs_path, options).await?;
3324 } else {
3325 fs.remove_file(&abs_path, options).await?;
3326 }
3327 }
3328 lsp::DocumentChangeOperation::Edit(op) => {
3329 let buffer_to_edit = this
3330 .update(cx, |this, cx| {
3331 this.open_local_buffer_via_lsp(
3332 op.text_document.uri,
3333 lsp_adapter.clone(),
3334 language_server.clone(),
3335 cx,
3336 )
3337 })
3338 .await?;
3339
3340 let edits = this
3341 .update(cx, |this, cx| {
3342 let edits = op.edits.into_iter().map(|edit| match edit {
3343 lsp::OneOf::Left(edit) => edit,
3344 lsp::OneOf::Right(edit) => edit.text_edit,
3345 });
3346 this.edits_from_lsp(
3347 &buffer_to_edit,
3348 edits,
3349 op.text_document.version,
3350 cx,
3351 )
3352 })
3353 .await?;
3354
3355 let transaction = buffer_to_edit.update(cx, |buffer, cx| {
3356 buffer.finalize_last_transaction();
3357 buffer.start_transaction();
3358 for (range, text) in edits {
3359 buffer.edit([(range, text)], cx);
3360 }
3361 let transaction = if buffer.end_transaction(cx).is_some() {
3362 let transaction = buffer.finalize_last_transaction().unwrap().clone();
3363 if !push_to_history {
3364 buffer.forget_transaction(transaction.id);
3365 }
3366 Some(transaction)
3367 } else {
3368 None
3369 };
3370
3371 transaction
3372 });
3373 if let Some(transaction) = transaction {
3374 project_transaction.0.insert(buffer_to_edit, transaction);
3375 }
3376 }
3377 }
3378 }
3379
3380 Ok(project_transaction)
3381 }
3382
3383 pub fn prepare_rename<T: ToPointUtf16>(
3384 &self,
3385 buffer: ModelHandle<Buffer>,
3386 position: T,
3387 cx: &mut ModelContext<Self>,
3388 ) -> Task<Result<Option<Range<Anchor>>>> {
3389 let position = position.to_point_utf16(buffer.read(cx));
3390 self.request_lsp(buffer, PrepareRename { position }, cx)
3391 }
3392
3393 pub fn perform_rename<T: ToPointUtf16>(
3394 &self,
3395 buffer: ModelHandle<Buffer>,
3396 position: T,
3397 new_name: String,
3398 push_to_history: bool,
3399 cx: &mut ModelContext<Self>,
3400 ) -> Task<Result<ProjectTransaction>> {
3401 let position = position.to_point_utf16(buffer.read(cx));
3402 self.request_lsp(
3403 buffer,
3404 PerformRename {
3405 position,
3406 new_name,
3407 push_to_history,
3408 },
3409 cx,
3410 )
3411 }
3412
3413 pub fn search(
3414 &self,
3415 query: SearchQuery,
3416 cx: &mut ModelContext<Self>,
3417 ) -> Task<Result<HashMap<ModelHandle<Buffer>, Vec<Range<Anchor>>>>> {
3418 if self.is_local() {
3419 let snapshots = self
3420 .visible_worktrees(cx)
3421 .filter_map(|tree| {
3422 let tree = tree.read(cx).as_local()?;
3423 Some(tree.snapshot())
3424 })
3425 .collect::<Vec<_>>();
3426
3427 let background = cx.background().clone();
3428 let path_count: usize = snapshots.iter().map(|s| s.visible_file_count()).sum();
3429 if path_count == 0 {
3430 return Task::ready(Ok(Default::default()));
3431 }
3432 let workers = background.num_cpus().min(path_count);
3433 let (matching_paths_tx, mut matching_paths_rx) = smol::channel::bounded(1024);
3434 cx.background()
3435 .spawn({
3436 let fs = self.fs.clone();
3437 let background = cx.background().clone();
3438 let query = query.clone();
3439 async move {
3440 let fs = &fs;
3441 let query = &query;
3442 let matching_paths_tx = &matching_paths_tx;
3443 let paths_per_worker = (path_count + workers - 1) / workers;
3444 let snapshots = &snapshots;
3445 background
3446 .scoped(|scope| {
3447 for worker_ix in 0..workers {
3448 let worker_start_ix = worker_ix * paths_per_worker;
3449 let worker_end_ix = worker_start_ix + paths_per_worker;
3450 scope.spawn(async move {
3451 let mut snapshot_start_ix = 0;
3452 let mut abs_path = PathBuf::new();
3453 for snapshot in snapshots {
3454 let snapshot_end_ix =
3455 snapshot_start_ix + snapshot.visible_file_count();
3456 if worker_end_ix <= snapshot_start_ix {
3457 break;
3458 } else if worker_start_ix > snapshot_end_ix {
3459 snapshot_start_ix = snapshot_end_ix;
3460 continue;
3461 } else {
3462 let start_in_snapshot = worker_start_ix
3463 .saturating_sub(snapshot_start_ix);
3464 let end_in_snapshot =
3465 cmp::min(worker_end_ix, snapshot_end_ix)
3466 - snapshot_start_ix;
3467
3468 for entry in snapshot
3469 .files(false, start_in_snapshot)
3470 .take(end_in_snapshot - start_in_snapshot)
3471 {
3472 if matching_paths_tx.is_closed() {
3473 break;
3474 }
3475
3476 abs_path.clear();
3477 abs_path.push(&snapshot.abs_path());
3478 abs_path.push(&entry.path);
3479 let matches = if let Some(file) =
3480 fs.open_sync(&abs_path).await.log_err()
3481 {
3482 query.detect(file).unwrap_or(false)
3483 } else {
3484 false
3485 };
3486
3487 if matches {
3488 let project_path =
3489 (snapshot.id(), entry.path.clone());
3490 if matching_paths_tx
3491 .send(project_path)
3492 .await
3493 .is_err()
3494 {
3495 break;
3496 }
3497 }
3498 }
3499
3500 snapshot_start_ix = snapshot_end_ix;
3501 }
3502 }
3503 });
3504 }
3505 })
3506 .await;
3507 }
3508 })
3509 .detach();
3510
3511 let (buffers_tx, buffers_rx) = smol::channel::bounded(1024);
3512 let open_buffers = self
3513 .opened_buffers
3514 .values()
3515 .filter_map(|b| b.upgrade(cx))
3516 .collect::<HashSet<_>>();
3517 cx.spawn(|this, cx| async move {
3518 for buffer in &open_buffers {
3519 let snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
3520 buffers_tx.send((buffer.clone(), snapshot)).await?;
3521 }
3522
3523 let open_buffers = Rc::new(RefCell::new(open_buffers));
3524 while let Some(project_path) = matching_paths_rx.next().await {
3525 if buffers_tx.is_closed() {
3526 break;
3527 }
3528
3529 let this = this.clone();
3530 let open_buffers = open_buffers.clone();
3531 let buffers_tx = buffers_tx.clone();
3532 cx.spawn(|mut cx| async move {
3533 if let Some(buffer) = this
3534 .update(&mut cx, |this, cx| this.open_buffer(project_path, cx))
3535 .await
3536 .log_err()
3537 {
3538 if open_buffers.borrow_mut().insert(buffer.clone()) {
3539 let snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
3540 buffers_tx.send((buffer, snapshot)).await?;
3541 }
3542 }
3543
3544 Ok::<_, anyhow::Error>(())
3545 })
3546 .detach();
3547 }
3548
3549 Ok::<_, anyhow::Error>(())
3550 })
3551 .detach_and_log_err(cx);
3552
3553 let background = cx.background().clone();
3554 cx.background().spawn(async move {
3555 let query = &query;
3556 let mut matched_buffers = Vec::new();
3557 for _ in 0..workers {
3558 matched_buffers.push(HashMap::default());
3559 }
3560 background
3561 .scoped(|scope| {
3562 for worker_matched_buffers in matched_buffers.iter_mut() {
3563 let mut buffers_rx = buffers_rx.clone();
3564 scope.spawn(async move {
3565 while let Some((buffer, snapshot)) = buffers_rx.next().await {
3566 let buffer_matches = query
3567 .search(snapshot.as_rope())
3568 .await
3569 .iter()
3570 .map(|range| {
3571 snapshot.anchor_before(range.start)
3572 ..snapshot.anchor_after(range.end)
3573 })
3574 .collect::<Vec<_>>();
3575 if !buffer_matches.is_empty() {
3576 worker_matched_buffers
3577 .insert(buffer.clone(), buffer_matches);
3578 }
3579 }
3580 });
3581 }
3582 })
3583 .await;
3584 Ok(matched_buffers.into_iter().flatten().collect())
3585 })
3586 } else if let Some(project_id) = self.remote_id() {
3587 let request = self.client.request(query.to_proto(project_id));
3588 cx.spawn(|this, mut cx| async move {
3589 let response = request.await?;
3590 let mut result = HashMap::default();
3591 for location in response.locations {
3592 let buffer = location.buffer.ok_or_else(|| anyhow!("missing buffer"))?;
3593 let target_buffer = this
3594 .update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
3595 .await?;
3596 let start = location
3597 .start
3598 .and_then(deserialize_anchor)
3599 .ok_or_else(|| anyhow!("missing target start"))?;
3600 let end = location
3601 .end
3602 .and_then(deserialize_anchor)
3603 .ok_or_else(|| anyhow!("missing target end"))?;
3604 result
3605 .entry(target_buffer)
3606 .or_insert(Vec::new())
3607 .push(start..end)
3608 }
3609 Ok(result)
3610 })
3611 } else {
3612 Task::ready(Ok(Default::default()))
3613 }
3614 }
3615
3616 fn request_lsp<R: LspCommand>(
3617 &self,
3618 buffer_handle: ModelHandle<Buffer>,
3619 request: R,
3620 cx: &mut ModelContext<Self>,
3621 ) -> Task<Result<R::Response>>
3622 where
3623 <R::LspRequest as lsp::request::Request>::Result: Send,
3624 {
3625 let buffer = buffer_handle.read(cx);
3626 if self.is_local() {
3627 let file = File::from_dyn(buffer.file()).and_then(File::as_local);
3628 if let Some((file, (_, language_server))) =
3629 file.zip(self.language_server_for_buffer(buffer, cx).cloned())
3630 {
3631 let lsp_params = request.to_lsp(&file.abs_path(cx), cx);
3632 return cx.spawn(|this, cx| async move {
3633 if !request.check_capabilities(&language_server.capabilities()) {
3634 return Ok(Default::default());
3635 }
3636
3637 let response = language_server
3638 .request::<R::LspRequest>(lsp_params)
3639 .await
3640 .context("lsp request failed")?;
3641 request
3642 .response_from_lsp(response, this, buffer_handle, cx)
3643 .await
3644 });
3645 }
3646 } else if let Some(project_id) = self.remote_id() {
3647 let rpc = self.client.clone();
3648 let message = request.to_proto(project_id, buffer);
3649 return cx.spawn(|this, cx| async move {
3650 let response = rpc.request(message).await?;
3651 request
3652 .response_from_proto(response, this, buffer_handle, cx)
3653 .await
3654 });
3655 }
3656 Task::ready(Ok(Default::default()))
3657 }
3658
3659 pub fn find_or_create_local_worktree(
3660 &mut self,
3661 abs_path: impl AsRef<Path>,
3662 visible: bool,
3663 cx: &mut ModelContext<Self>,
3664 ) -> Task<Result<(ModelHandle<Worktree>, PathBuf)>> {
3665 let abs_path = abs_path.as_ref();
3666 if let Some((tree, relative_path)) = self.find_local_worktree(abs_path, cx) {
3667 Task::ready(Ok((tree.clone(), relative_path.into())))
3668 } else {
3669 let worktree = self.create_local_worktree(abs_path, visible, cx);
3670 cx.foreground()
3671 .spawn(async move { Ok((worktree.await?, PathBuf::new())) })
3672 }
3673 }
3674
3675 pub fn find_local_worktree(
3676 &self,
3677 abs_path: &Path,
3678 cx: &AppContext,
3679 ) -> Option<(ModelHandle<Worktree>, PathBuf)> {
3680 for tree in self.worktrees(cx) {
3681 if let Some(relative_path) = tree
3682 .read(cx)
3683 .as_local()
3684 .and_then(|t| abs_path.strip_prefix(t.abs_path()).ok())
3685 {
3686 return Some((tree.clone(), relative_path.into()));
3687 }
3688 }
3689 None
3690 }
3691
3692 pub fn is_shared(&self) -> bool {
3693 match &self.client_state {
3694 ProjectClientState::Local { is_shared, .. } => *is_shared,
3695 ProjectClientState::Remote { .. } => false,
3696 }
3697 }
3698
3699 fn create_local_worktree(
3700 &mut self,
3701 abs_path: impl AsRef<Path>,
3702 visible: bool,
3703 cx: &mut ModelContext<Self>,
3704 ) -> Task<Result<ModelHandle<Worktree>>> {
3705 let fs = self.fs.clone();
3706 let client = self.client.clone();
3707 let next_entry_id = self.next_entry_id.clone();
3708 let path: Arc<Path> = abs_path.as_ref().into();
3709 let task = self
3710 .loading_local_worktrees
3711 .entry(path.clone())
3712 .or_insert_with(|| {
3713 cx.spawn(|project, mut cx| {
3714 async move {
3715 let worktree = Worktree::local(
3716 client.clone(),
3717 path.clone(),
3718 visible,
3719 fs,
3720 next_entry_id,
3721 &mut cx,
3722 )
3723 .await;
3724 project.update(&mut cx, |project, _| {
3725 project.loading_local_worktrees.remove(&path);
3726 });
3727 let worktree = worktree?;
3728
3729 let project_id = project.update(&mut cx, |project, cx| {
3730 project.add_worktree(&worktree, cx);
3731 project.shared_remote_id()
3732 });
3733
3734 if let Some(project_id) = project_id {
3735 worktree
3736 .update(&mut cx, |worktree, cx| {
3737 worktree.as_local_mut().unwrap().share(project_id, cx)
3738 })
3739 .await
3740 .log_err();
3741 }
3742
3743 Ok(worktree)
3744 }
3745 .map_err(|err| Arc::new(err))
3746 })
3747 .shared()
3748 })
3749 .clone();
3750 cx.foreground().spawn(async move {
3751 match task.await {
3752 Ok(worktree) => Ok(worktree),
3753 Err(err) => Err(anyhow!("{}", err)),
3754 }
3755 })
3756 }
3757
3758 pub fn remove_worktree(&mut self, id_to_remove: WorktreeId, cx: &mut ModelContext<Self>) {
3759 self.worktrees.retain(|worktree| {
3760 if let Some(worktree) = worktree.upgrade(cx) {
3761 let id = worktree.read(cx).id();
3762 if id == id_to_remove {
3763 cx.emit(Event::WorktreeRemoved(id));
3764 false
3765 } else {
3766 true
3767 }
3768 } else {
3769 false
3770 }
3771 });
3772 self.metadata_changed(cx);
3773 cx.notify();
3774 }
3775
3776 fn add_worktree(&mut self, worktree: &ModelHandle<Worktree>, cx: &mut ModelContext<Self>) {
3777 cx.observe(&worktree, |_, _, cx| cx.notify()).detach();
3778 if worktree.read(cx).is_local() {
3779 cx.subscribe(&worktree, |this, worktree, _, cx| {
3780 this.update_local_worktree_buffers(worktree, cx);
3781 })
3782 .detach();
3783 }
3784
3785 let push_strong_handle = {
3786 let worktree = worktree.read(cx);
3787 self.is_shared() || worktree.is_visible() || worktree.is_remote()
3788 };
3789 if push_strong_handle {
3790 self.worktrees
3791 .push(WorktreeHandle::Strong(worktree.clone()));
3792 } else {
3793 cx.observe_release(&worktree, |this, _, cx| {
3794 this.worktrees
3795 .retain(|worktree| worktree.upgrade(cx).is_some());
3796 cx.notify();
3797 })
3798 .detach();
3799 self.worktrees
3800 .push(WorktreeHandle::Weak(worktree.downgrade()));
3801 }
3802 self.metadata_changed(cx);
3803 cx.emit(Event::WorktreeAdded);
3804 cx.notify();
3805 }
3806
3807 fn update_local_worktree_buffers(
3808 &mut self,
3809 worktree_handle: ModelHandle<Worktree>,
3810 cx: &mut ModelContext<Self>,
3811 ) {
3812 let snapshot = worktree_handle.read(cx).snapshot();
3813 let mut buffers_to_delete = Vec::new();
3814 let mut renamed_buffers = Vec::new();
3815 for (buffer_id, buffer) in &self.opened_buffers {
3816 if let Some(buffer) = buffer.upgrade(cx) {
3817 buffer.update(cx, |buffer, cx| {
3818 if let Some(old_file) = File::from_dyn(buffer.file()) {
3819 if old_file.worktree != worktree_handle {
3820 return;
3821 }
3822
3823 let new_file = if let Some(entry) = old_file
3824 .entry_id
3825 .and_then(|entry_id| snapshot.entry_for_id(entry_id))
3826 {
3827 File {
3828 is_local: true,
3829 entry_id: Some(entry.id),
3830 mtime: entry.mtime,
3831 path: entry.path.clone(),
3832 worktree: worktree_handle.clone(),
3833 }
3834 } else if let Some(entry) =
3835 snapshot.entry_for_path(old_file.path().as_ref())
3836 {
3837 File {
3838 is_local: true,
3839 entry_id: Some(entry.id),
3840 mtime: entry.mtime,
3841 path: entry.path.clone(),
3842 worktree: worktree_handle.clone(),
3843 }
3844 } else {
3845 File {
3846 is_local: true,
3847 entry_id: None,
3848 path: old_file.path().clone(),
3849 mtime: old_file.mtime(),
3850 worktree: worktree_handle.clone(),
3851 }
3852 };
3853
3854 let old_path = old_file.abs_path(cx);
3855 if new_file.abs_path(cx) != old_path {
3856 renamed_buffers.push((cx.handle(), old_path));
3857 }
3858
3859 if let Some(project_id) = self.shared_remote_id() {
3860 self.client
3861 .send(proto::UpdateBufferFile {
3862 project_id,
3863 buffer_id: *buffer_id as u64,
3864 file: Some(new_file.to_proto()),
3865 })
3866 .log_err();
3867 }
3868 buffer.file_updated(Box::new(new_file), cx).detach();
3869 }
3870 });
3871 } else {
3872 buffers_to_delete.push(*buffer_id);
3873 }
3874 }
3875
3876 for buffer_id in buffers_to_delete {
3877 self.opened_buffers.remove(&buffer_id);
3878 }
3879
3880 for (buffer, old_path) in renamed_buffers {
3881 self.unregister_buffer_from_language_server(&buffer, old_path, cx);
3882 self.assign_language_to_buffer(&buffer, cx);
3883 self.register_buffer_with_language_server(&buffer, cx);
3884 }
3885 }
3886
3887 pub fn set_active_path(&mut self, entry: Option<ProjectPath>, cx: &mut ModelContext<Self>) {
3888 let new_active_entry = entry.and_then(|project_path| {
3889 let worktree = self.worktree_for_id(project_path.worktree_id, cx)?;
3890 let entry = worktree.read(cx).entry_for_path(project_path.path)?;
3891 Some(entry.id)
3892 });
3893 if new_active_entry != self.active_entry {
3894 self.active_entry = new_active_entry;
3895 cx.emit(Event::ActiveEntryChanged(new_active_entry));
3896 }
3897 }
3898
3899 pub fn is_running_disk_based_diagnostics(&self) -> bool {
3900 self.language_server_statuses
3901 .values()
3902 .any(|status| status.pending_diagnostic_updates > 0)
3903 }
3904
3905 pub fn diagnostic_summary(&self, cx: &AppContext) -> DiagnosticSummary {
3906 let mut summary = DiagnosticSummary::default();
3907 for (_, path_summary) in self.diagnostic_summaries(cx) {
3908 summary.error_count += path_summary.error_count;
3909 summary.warning_count += path_summary.warning_count;
3910 }
3911 summary
3912 }
3913
3914 pub fn diagnostic_summaries<'a>(
3915 &'a self,
3916 cx: &'a AppContext,
3917 ) -> impl Iterator<Item = (ProjectPath, DiagnosticSummary)> + 'a {
3918 self.worktrees(cx).flat_map(move |worktree| {
3919 let worktree = worktree.read(cx);
3920 let worktree_id = worktree.id();
3921 worktree
3922 .diagnostic_summaries()
3923 .map(move |(path, summary)| (ProjectPath { worktree_id, path }, summary))
3924 })
3925 }
3926
3927 pub fn disk_based_diagnostics_started(&mut self, cx: &mut ModelContext<Self>) {
3928 if self
3929 .language_server_statuses
3930 .values()
3931 .map(|status| status.pending_diagnostic_updates)
3932 .sum::<isize>()
3933 == 1
3934 {
3935 cx.emit(Event::DiskBasedDiagnosticsStarted);
3936 }
3937 }
3938
3939 pub fn disk_based_diagnostics_finished(&mut self, cx: &mut ModelContext<Self>) {
3940 cx.emit(Event::DiskBasedDiagnosticsUpdated);
3941 if self
3942 .language_server_statuses
3943 .values()
3944 .map(|status| status.pending_diagnostic_updates)
3945 .sum::<isize>()
3946 == 0
3947 {
3948 cx.emit(Event::DiskBasedDiagnosticsFinished);
3949 }
3950 }
3951
3952 pub fn active_entry(&self) -> Option<ProjectEntryId> {
3953 self.active_entry
3954 }
3955
3956 pub fn entry_for_path(&self, path: &ProjectPath, cx: &AppContext) -> Option<ProjectEntryId> {
3957 self.worktree_for_id(path.worktree_id, cx)?
3958 .read(cx)
3959 .entry_for_path(&path.path)
3960 .map(|entry| entry.id)
3961 }
3962
3963 pub fn path_for_entry(&self, entry_id: ProjectEntryId, cx: &AppContext) -> Option<ProjectPath> {
3964 let worktree = self.worktree_for_entry(entry_id, cx)?;
3965 let worktree = worktree.read(cx);
3966 let worktree_id = worktree.id();
3967 let path = worktree.entry_for_id(entry_id)?.path.clone();
3968 Some(ProjectPath { worktree_id, path })
3969 }
3970
3971 // RPC message handlers
3972
3973 async fn handle_request_join_project(
3974 this: ModelHandle<Self>,
3975 message: TypedEnvelope<proto::RequestJoinProject>,
3976 _: Arc<Client>,
3977 mut cx: AsyncAppContext,
3978 ) -> Result<()> {
3979 let user_id = message.payload.requester_id;
3980 if this.read_with(&cx, |project, _| {
3981 project.collaborators.values().any(|c| c.user.id == user_id)
3982 }) {
3983 this.update(&mut cx, |this, cx| {
3984 this.respond_to_join_request(user_id, true, cx)
3985 });
3986 } else {
3987 let user_store = this.read_with(&cx, |this, _| this.user_store.clone());
3988 let user = user_store
3989 .update(&mut cx, |store, cx| store.fetch_user(user_id, cx))
3990 .await?;
3991 this.update(&mut cx, |_, cx| cx.emit(Event::ContactRequestedJoin(user)));
3992 }
3993 Ok(())
3994 }
3995
3996 async fn handle_unregister_project(
3997 this: ModelHandle<Self>,
3998 _: TypedEnvelope<proto::UnregisterProject>,
3999 _: Arc<Client>,
4000 mut cx: AsyncAppContext,
4001 ) -> Result<()> {
4002 this.update(&mut cx, |this, cx| this.removed_from_project(cx));
4003 Ok(())
4004 }
4005
4006 async fn handle_project_unshared(
4007 this: ModelHandle<Self>,
4008 _: TypedEnvelope<proto::ProjectUnshared>,
4009 _: Arc<Client>,
4010 mut cx: AsyncAppContext,
4011 ) -> Result<()> {
4012 this.update(&mut cx, |this, cx| this.unshared(cx));
4013 Ok(())
4014 }
4015
4016 async fn handle_add_collaborator(
4017 this: ModelHandle<Self>,
4018 mut envelope: TypedEnvelope<proto::AddProjectCollaborator>,
4019 _: Arc<Client>,
4020 mut cx: AsyncAppContext,
4021 ) -> Result<()> {
4022 let user_store = this.read_with(&cx, |this, _| this.user_store.clone());
4023 let collaborator = envelope
4024 .payload
4025 .collaborator
4026 .take()
4027 .ok_or_else(|| anyhow!("empty collaborator"))?;
4028
4029 let collaborator = Collaborator::from_proto(collaborator, &user_store, &mut cx).await?;
4030 this.update(&mut cx, |this, cx| {
4031 this.collaborators
4032 .insert(collaborator.peer_id, collaborator);
4033 cx.notify();
4034 });
4035
4036 Ok(())
4037 }
4038
4039 async fn handle_remove_collaborator(
4040 this: ModelHandle<Self>,
4041 envelope: TypedEnvelope<proto::RemoveProjectCollaborator>,
4042 _: Arc<Client>,
4043 mut cx: AsyncAppContext,
4044 ) -> Result<()> {
4045 this.update(&mut cx, |this, cx| {
4046 let peer_id = PeerId(envelope.payload.peer_id);
4047 let replica_id = this
4048 .collaborators
4049 .remove(&peer_id)
4050 .ok_or_else(|| anyhow!("unknown peer {:?}", peer_id))?
4051 .replica_id;
4052 for (_, buffer) in &this.opened_buffers {
4053 if let Some(buffer) = buffer.upgrade(cx) {
4054 buffer.update(cx, |buffer, cx| buffer.remove_peer(replica_id, cx));
4055 }
4056 }
4057
4058 cx.emit(Event::CollaboratorLeft(peer_id));
4059 cx.notify();
4060 Ok(())
4061 })
4062 }
4063
4064 async fn handle_join_project_request_cancelled(
4065 this: ModelHandle<Self>,
4066 envelope: TypedEnvelope<proto::JoinProjectRequestCancelled>,
4067 _: Arc<Client>,
4068 mut cx: AsyncAppContext,
4069 ) -> Result<()> {
4070 let user = this
4071 .update(&mut cx, |this, cx| {
4072 this.user_store.update(cx, |user_store, cx| {
4073 user_store.fetch_user(envelope.payload.requester_id, cx)
4074 })
4075 })
4076 .await?;
4077
4078 this.update(&mut cx, |_, cx| {
4079 cx.emit(Event::ContactCancelledJoinRequest(user));
4080 });
4081
4082 Ok(())
4083 }
4084
4085 async fn handle_update_project(
4086 this: ModelHandle<Self>,
4087 envelope: TypedEnvelope<proto::UpdateProject>,
4088 client: Arc<Client>,
4089 mut cx: AsyncAppContext,
4090 ) -> Result<()> {
4091 this.update(&mut cx, |this, cx| {
4092 let replica_id = this.replica_id();
4093 let remote_id = this.remote_id().ok_or_else(|| anyhow!("invalid project"))?;
4094
4095 let mut old_worktrees_by_id = this
4096 .worktrees
4097 .drain(..)
4098 .filter_map(|worktree| {
4099 let worktree = worktree.upgrade(cx)?;
4100 Some((worktree.read(cx).id(), worktree))
4101 })
4102 .collect::<HashMap<_, _>>();
4103
4104 for worktree in envelope.payload.worktrees {
4105 if let Some(old_worktree) =
4106 old_worktrees_by_id.remove(&WorktreeId::from_proto(worktree.id))
4107 {
4108 this.worktrees.push(WorktreeHandle::Strong(old_worktree));
4109 } else {
4110 let worktree = proto::Worktree {
4111 id: worktree.id,
4112 root_name: worktree.root_name,
4113 entries: Default::default(),
4114 diagnostic_summaries: Default::default(),
4115 visible: worktree.visible,
4116 scan_id: 0,
4117 };
4118 let (worktree, load_task) =
4119 Worktree::remote(remote_id, replica_id, worktree, client.clone(), cx);
4120 this.add_worktree(&worktree, cx);
4121 load_task.detach();
4122 }
4123 }
4124
4125 this.metadata_changed(cx);
4126 for (id, _) in old_worktrees_by_id {
4127 cx.emit(Event::WorktreeRemoved(id));
4128 }
4129
4130 Ok(())
4131 })
4132 }
4133
4134 async fn handle_update_worktree(
4135 this: ModelHandle<Self>,
4136 envelope: TypedEnvelope<proto::UpdateWorktree>,
4137 _: Arc<Client>,
4138 mut cx: AsyncAppContext,
4139 ) -> Result<()> {
4140 this.update(&mut cx, |this, cx| {
4141 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
4142 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
4143 worktree.update(cx, |worktree, _| {
4144 let worktree = worktree.as_remote_mut().unwrap();
4145 worktree.update_from_remote(envelope)
4146 })?;
4147 }
4148 Ok(())
4149 })
4150 }
4151
4152 async fn handle_create_project_entry(
4153 this: ModelHandle<Self>,
4154 envelope: TypedEnvelope<proto::CreateProjectEntry>,
4155 _: Arc<Client>,
4156 mut cx: AsyncAppContext,
4157 ) -> Result<proto::ProjectEntryResponse> {
4158 let worktree = this.update(&mut cx, |this, cx| {
4159 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
4160 this.worktree_for_id(worktree_id, cx)
4161 .ok_or_else(|| anyhow!("worktree not found"))
4162 })?;
4163 let worktree_scan_id = worktree.read_with(&cx, |worktree, _| worktree.scan_id());
4164 let entry = worktree
4165 .update(&mut cx, |worktree, cx| {
4166 let worktree = worktree.as_local_mut().unwrap();
4167 let path = PathBuf::from(OsString::from_vec(envelope.payload.path));
4168 worktree.create_entry(path, envelope.payload.is_directory, cx)
4169 })
4170 .await?;
4171 Ok(proto::ProjectEntryResponse {
4172 entry: Some((&entry).into()),
4173 worktree_scan_id: worktree_scan_id as u64,
4174 })
4175 }
4176
4177 async fn handle_rename_project_entry(
4178 this: ModelHandle<Self>,
4179 envelope: TypedEnvelope<proto::RenameProjectEntry>,
4180 _: Arc<Client>,
4181 mut cx: AsyncAppContext,
4182 ) -> Result<proto::ProjectEntryResponse> {
4183 let entry_id = ProjectEntryId::from_proto(envelope.payload.entry_id);
4184 let worktree = this.read_with(&cx, |this, cx| {
4185 this.worktree_for_entry(entry_id, cx)
4186 .ok_or_else(|| anyhow!("worktree not found"))
4187 })?;
4188 let worktree_scan_id = worktree.read_with(&cx, |worktree, _| worktree.scan_id());
4189 let entry = worktree
4190 .update(&mut cx, |worktree, cx| {
4191 let new_path = PathBuf::from(OsString::from_vec(envelope.payload.new_path));
4192 worktree
4193 .as_local_mut()
4194 .unwrap()
4195 .rename_entry(entry_id, new_path, cx)
4196 .ok_or_else(|| anyhow!("invalid entry"))
4197 })?
4198 .await?;
4199 Ok(proto::ProjectEntryResponse {
4200 entry: Some((&entry).into()),
4201 worktree_scan_id: worktree_scan_id as u64,
4202 })
4203 }
4204
4205 async fn handle_copy_project_entry(
4206 this: ModelHandle<Self>,
4207 envelope: TypedEnvelope<proto::CopyProjectEntry>,
4208 _: Arc<Client>,
4209 mut cx: AsyncAppContext,
4210 ) -> Result<proto::ProjectEntryResponse> {
4211 let entry_id = ProjectEntryId::from_proto(envelope.payload.entry_id);
4212 let worktree = this.read_with(&cx, |this, cx| {
4213 this.worktree_for_entry(entry_id, cx)
4214 .ok_or_else(|| anyhow!("worktree not found"))
4215 })?;
4216 let worktree_scan_id = worktree.read_with(&cx, |worktree, _| worktree.scan_id());
4217 let entry = worktree
4218 .update(&mut cx, |worktree, cx| {
4219 let new_path = PathBuf::from(OsString::from_vec(envelope.payload.new_path));
4220 worktree
4221 .as_local_mut()
4222 .unwrap()
4223 .copy_entry(entry_id, new_path, cx)
4224 .ok_or_else(|| anyhow!("invalid entry"))
4225 })?
4226 .await?;
4227 Ok(proto::ProjectEntryResponse {
4228 entry: Some((&entry).into()),
4229 worktree_scan_id: worktree_scan_id as u64,
4230 })
4231 }
4232
4233 async fn handle_delete_project_entry(
4234 this: ModelHandle<Self>,
4235 envelope: TypedEnvelope<proto::DeleteProjectEntry>,
4236 _: Arc<Client>,
4237 mut cx: AsyncAppContext,
4238 ) -> Result<proto::ProjectEntryResponse> {
4239 let entry_id = ProjectEntryId::from_proto(envelope.payload.entry_id);
4240 let worktree = this.read_with(&cx, |this, cx| {
4241 this.worktree_for_entry(entry_id, cx)
4242 .ok_or_else(|| anyhow!("worktree not found"))
4243 })?;
4244 let worktree_scan_id = worktree.read_with(&cx, |worktree, _| worktree.scan_id());
4245 worktree
4246 .update(&mut cx, |worktree, cx| {
4247 worktree
4248 .as_local_mut()
4249 .unwrap()
4250 .delete_entry(entry_id, cx)
4251 .ok_or_else(|| anyhow!("invalid entry"))
4252 })?
4253 .await?;
4254 Ok(proto::ProjectEntryResponse {
4255 entry: None,
4256 worktree_scan_id: worktree_scan_id as u64,
4257 })
4258 }
4259
4260 async fn handle_update_diagnostic_summary(
4261 this: ModelHandle<Self>,
4262 envelope: TypedEnvelope<proto::UpdateDiagnosticSummary>,
4263 _: Arc<Client>,
4264 mut cx: AsyncAppContext,
4265 ) -> Result<()> {
4266 this.update(&mut cx, |this, cx| {
4267 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
4268 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
4269 if let Some(summary) = envelope.payload.summary {
4270 let project_path = ProjectPath {
4271 worktree_id,
4272 path: Path::new(&summary.path).into(),
4273 };
4274 worktree.update(cx, |worktree, _| {
4275 worktree
4276 .as_remote_mut()
4277 .unwrap()
4278 .update_diagnostic_summary(project_path.path.clone(), &summary);
4279 });
4280 cx.emit(Event::DiagnosticsUpdated(project_path));
4281 }
4282 }
4283 Ok(())
4284 })
4285 }
4286
4287 async fn handle_start_language_server(
4288 this: ModelHandle<Self>,
4289 envelope: TypedEnvelope<proto::StartLanguageServer>,
4290 _: Arc<Client>,
4291 mut cx: AsyncAppContext,
4292 ) -> Result<()> {
4293 let server = envelope
4294 .payload
4295 .server
4296 .ok_or_else(|| anyhow!("invalid server"))?;
4297 this.update(&mut cx, |this, cx| {
4298 this.language_server_statuses.insert(
4299 server.id as usize,
4300 LanguageServerStatus {
4301 name: server.name,
4302 pending_work: Default::default(),
4303 pending_diagnostic_updates: 0,
4304 },
4305 );
4306 cx.notify();
4307 });
4308 Ok(())
4309 }
4310
4311 async fn handle_update_language_server(
4312 this: ModelHandle<Self>,
4313 envelope: TypedEnvelope<proto::UpdateLanguageServer>,
4314 _: Arc<Client>,
4315 mut cx: AsyncAppContext,
4316 ) -> Result<()> {
4317 let language_server_id = envelope.payload.language_server_id as usize;
4318 match envelope
4319 .payload
4320 .variant
4321 .ok_or_else(|| anyhow!("invalid variant"))?
4322 {
4323 proto::update_language_server::Variant::WorkStart(payload) => {
4324 this.update(&mut cx, |this, cx| {
4325 this.on_lsp_work_start(language_server_id, payload.token, cx);
4326 })
4327 }
4328 proto::update_language_server::Variant::WorkProgress(payload) => {
4329 this.update(&mut cx, |this, cx| {
4330 this.on_lsp_work_progress(
4331 language_server_id,
4332 payload.token,
4333 LanguageServerProgress {
4334 message: payload.message,
4335 percentage: payload.percentage.map(|p| p as usize),
4336 last_update_at: Instant::now(),
4337 },
4338 cx,
4339 );
4340 })
4341 }
4342 proto::update_language_server::Variant::WorkEnd(payload) => {
4343 this.update(&mut cx, |this, cx| {
4344 this.on_lsp_work_end(language_server_id, payload.token, cx);
4345 })
4346 }
4347 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(_) => {
4348 this.update(&mut cx, |this, cx| {
4349 this.disk_based_diagnostics_started(cx);
4350 })
4351 }
4352 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(_) => {
4353 this.update(&mut cx, |this, cx| this.disk_based_diagnostics_finished(cx));
4354 }
4355 }
4356
4357 Ok(())
4358 }
4359
4360 async fn handle_update_buffer(
4361 this: ModelHandle<Self>,
4362 envelope: TypedEnvelope<proto::UpdateBuffer>,
4363 _: Arc<Client>,
4364 mut cx: AsyncAppContext,
4365 ) -> Result<()> {
4366 this.update(&mut cx, |this, cx| {
4367 let payload = envelope.payload.clone();
4368 let buffer_id = payload.buffer_id;
4369 let ops = payload
4370 .operations
4371 .into_iter()
4372 .map(|op| language::proto::deserialize_operation(op))
4373 .collect::<Result<Vec<_>, _>>()?;
4374 let is_remote = this.is_remote();
4375 match this.opened_buffers.entry(buffer_id) {
4376 hash_map::Entry::Occupied(mut e) => match e.get_mut() {
4377 OpenBuffer::Strong(buffer) => {
4378 buffer.update(cx, |buffer, cx| buffer.apply_ops(ops, cx))?;
4379 }
4380 OpenBuffer::Loading(operations) => operations.extend_from_slice(&ops),
4381 OpenBuffer::Weak(_) => {}
4382 },
4383 hash_map::Entry::Vacant(e) => {
4384 assert!(
4385 is_remote,
4386 "received buffer update from {:?}",
4387 envelope.original_sender_id
4388 );
4389 e.insert(OpenBuffer::Loading(ops));
4390 }
4391 }
4392 Ok(())
4393 })
4394 }
4395
4396 async fn handle_update_buffer_file(
4397 this: ModelHandle<Self>,
4398 envelope: TypedEnvelope<proto::UpdateBufferFile>,
4399 _: Arc<Client>,
4400 mut cx: AsyncAppContext,
4401 ) -> Result<()> {
4402 this.update(&mut cx, |this, cx| {
4403 let payload = envelope.payload.clone();
4404 let buffer_id = payload.buffer_id;
4405 let file = payload.file.ok_or_else(|| anyhow!("invalid file"))?;
4406 let worktree = this
4407 .worktree_for_id(WorktreeId::from_proto(file.worktree_id), cx)
4408 .ok_or_else(|| anyhow!("no such worktree"))?;
4409 let file = File::from_proto(file, worktree.clone(), cx)?;
4410 let buffer = this
4411 .opened_buffers
4412 .get_mut(&buffer_id)
4413 .and_then(|b| b.upgrade(cx))
4414 .ok_or_else(|| anyhow!("no such buffer"))?;
4415 buffer.update(cx, |buffer, cx| {
4416 buffer.file_updated(Box::new(file), cx).detach();
4417 });
4418 Ok(())
4419 })
4420 }
4421
4422 async fn handle_save_buffer(
4423 this: ModelHandle<Self>,
4424 envelope: TypedEnvelope<proto::SaveBuffer>,
4425 _: Arc<Client>,
4426 mut cx: AsyncAppContext,
4427 ) -> Result<proto::BufferSaved> {
4428 let buffer_id = envelope.payload.buffer_id;
4429 let requested_version = deserialize_version(envelope.payload.version);
4430
4431 let (project_id, buffer) = this.update(&mut cx, |this, cx| {
4432 let project_id = this.remote_id().ok_or_else(|| anyhow!("not connected"))?;
4433 let buffer = this
4434 .opened_buffers
4435 .get(&buffer_id)
4436 .and_then(|buffer| buffer.upgrade(cx))
4437 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?;
4438 Ok::<_, anyhow::Error>((project_id, buffer))
4439 })?;
4440 buffer
4441 .update(&mut cx, |buffer, _| {
4442 buffer.wait_for_version(requested_version)
4443 })
4444 .await;
4445
4446 let (saved_version, mtime) = buffer.update(&mut cx, |buffer, cx| buffer.save(cx)).await?;
4447 Ok(proto::BufferSaved {
4448 project_id,
4449 buffer_id,
4450 version: serialize_version(&saved_version),
4451 mtime: Some(mtime.into()),
4452 })
4453 }
4454
4455 async fn handle_reload_buffers(
4456 this: ModelHandle<Self>,
4457 envelope: TypedEnvelope<proto::ReloadBuffers>,
4458 _: Arc<Client>,
4459 mut cx: AsyncAppContext,
4460 ) -> Result<proto::ReloadBuffersResponse> {
4461 let sender_id = envelope.original_sender_id()?;
4462 let reload = this.update(&mut cx, |this, cx| {
4463 let mut buffers = HashSet::default();
4464 for buffer_id in &envelope.payload.buffer_ids {
4465 buffers.insert(
4466 this.opened_buffers
4467 .get(buffer_id)
4468 .and_then(|buffer| buffer.upgrade(cx))
4469 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?,
4470 );
4471 }
4472 Ok::<_, anyhow::Error>(this.reload_buffers(buffers, false, cx))
4473 })?;
4474
4475 let project_transaction = reload.await?;
4476 let project_transaction = this.update(&mut cx, |this, cx| {
4477 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
4478 });
4479 Ok(proto::ReloadBuffersResponse {
4480 transaction: Some(project_transaction),
4481 })
4482 }
4483
4484 async fn handle_format_buffers(
4485 this: ModelHandle<Self>,
4486 envelope: TypedEnvelope<proto::FormatBuffers>,
4487 _: Arc<Client>,
4488 mut cx: AsyncAppContext,
4489 ) -> Result<proto::FormatBuffersResponse> {
4490 let sender_id = envelope.original_sender_id()?;
4491 let format = this.update(&mut cx, |this, cx| {
4492 let mut buffers = HashSet::default();
4493 for buffer_id in &envelope.payload.buffer_ids {
4494 buffers.insert(
4495 this.opened_buffers
4496 .get(buffer_id)
4497 .and_then(|buffer| buffer.upgrade(cx))
4498 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?,
4499 );
4500 }
4501 Ok::<_, anyhow::Error>(this.format(buffers, false, cx))
4502 })?;
4503
4504 let project_transaction = format.await?;
4505 let project_transaction = this.update(&mut cx, |this, cx| {
4506 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
4507 });
4508 Ok(proto::FormatBuffersResponse {
4509 transaction: Some(project_transaction),
4510 })
4511 }
4512
4513 async fn handle_get_completions(
4514 this: ModelHandle<Self>,
4515 envelope: TypedEnvelope<proto::GetCompletions>,
4516 _: Arc<Client>,
4517 mut cx: AsyncAppContext,
4518 ) -> Result<proto::GetCompletionsResponse> {
4519 let position = envelope
4520 .payload
4521 .position
4522 .and_then(language::proto::deserialize_anchor)
4523 .ok_or_else(|| anyhow!("invalid position"))?;
4524 let version = deserialize_version(envelope.payload.version);
4525 let buffer = this.read_with(&cx, |this, cx| {
4526 this.opened_buffers
4527 .get(&envelope.payload.buffer_id)
4528 .and_then(|buffer| buffer.upgrade(cx))
4529 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
4530 })?;
4531 buffer
4532 .update(&mut cx, |buffer, _| buffer.wait_for_version(version))
4533 .await;
4534 let version = buffer.read_with(&cx, |buffer, _| buffer.version());
4535 let completions = this
4536 .update(&mut cx, |this, cx| this.completions(&buffer, position, cx))
4537 .await?;
4538
4539 Ok(proto::GetCompletionsResponse {
4540 completions: completions
4541 .iter()
4542 .map(language::proto::serialize_completion)
4543 .collect(),
4544 version: serialize_version(&version),
4545 })
4546 }
4547
4548 async fn handle_apply_additional_edits_for_completion(
4549 this: ModelHandle<Self>,
4550 envelope: TypedEnvelope<proto::ApplyCompletionAdditionalEdits>,
4551 _: Arc<Client>,
4552 mut cx: AsyncAppContext,
4553 ) -> Result<proto::ApplyCompletionAdditionalEditsResponse> {
4554 let apply_additional_edits = this.update(&mut cx, |this, cx| {
4555 let buffer = this
4556 .opened_buffers
4557 .get(&envelope.payload.buffer_id)
4558 .and_then(|buffer| buffer.upgrade(cx))
4559 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
4560 let language = buffer.read(cx).language();
4561 let completion = language::proto::deserialize_completion(
4562 envelope
4563 .payload
4564 .completion
4565 .ok_or_else(|| anyhow!("invalid completion"))?,
4566 language,
4567 )?;
4568 Ok::<_, anyhow::Error>(
4569 this.apply_additional_edits_for_completion(buffer, completion, false, cx),
4570 )
4571 })?;
4572
4573 Ok(proto::ApplyCompletionAdditionalEditsResponse {
4574 transaction: apply_additional_edits
4575 .await?
4576 .as_ref()
4577 .map(language::proto::serialize_transaction),
4578 })
4579 }
4580
4581 async fn handle_get_code_actions(
4582 this: ModelHandle<Self>,
4583 envelope: TypedEnvelope<proto::GetCodeActions>,
4584 _: Arc<Client>,
4585 mut cx: AsyncAppContext,
4586 ) -> Result<proto::GetCodeActionsResponse> {
4587 let start = envelope
4588 .payload
4589 .start
4590 .and_then(language::proto::deserialize_anchor)
4591 .ok_or_else(|| anyhow!("invalid start"))?;
4592 let end = envelope
4593 .payload
4594 .end
4595 .and_then(language::proto::deserialize_anchor)
4596 .ok_or_else(|| anyhow!("invalid end"))?;
4597 let buffer = this.update(&mut cx, |this, cx| {
4598 this.opened_buffers
4599 .get(&envelope.payload.buffer_id)
4600 .and_then(|buffer| buffer.upgrade(cx))
4601 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
4602 })?;
4603 buffer
4604 .update(&mut cx, |buffer, _| {
4605 buffer.wait_for_version(deserialize_version(envelope.payload.version))
4606 })
4607 .await;
4608
4609 let version = buffer.read_with(&cx, |buffer, _| buffer.version());
4610 let code_actions = this.update(&mut cx, |this, cx| {
4611 Ok::<_, anyhow::Error>(this.code_actions(&buffer, start..end, cx))
4612 })?;
4613
4614 Ok(proto::GetCodeActionsResponse {
4615 actions: code_actions
4616 .await?
4617 .iter()
4618 .map(language::proto::serialize_code_action)
4619 .collect(),
4620 version: serialize_version(&version),
4621 })
4622 }
4623
4624 async fn handle_apply_code_action(
4625 this: ModelHandle<Self>,
4626 envelope: TypedEnvelope<proto::ApplyCodeAction>,
4627 _: Arc<Client>,
4628 mut cx: AsyncAppContext,
4629 ) -> Result<proto::ApplyCodeActionResponse> {
4630 let sender_id = envelope.original_sender_id()?;
4631 let action = language::proto::deserialize_code_action(
4632 envelope
4633 .payload
4634 .action
4635 .ok_or_else(|| anyhow!("invalid action"))?,
4636 )?;
4637 let apply_code_action = this.update(&mut cx, |this, cx| {
4638 let buffer = this
4639 .opened_buffers
4640 .get(&envelope.payload.buffer_id)
4641 .and_then(|buffer| buffer.upgrade(cx))
4642 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
4643 Ok::<_, anyhow::Error>(this.apply_code_action(buffer, action, false, cx))
4644 })?;
4645
4646 let project_transaction = apply_code_action.await?;
4647 let project_transaction = this.update(&mut cx, |this, cx| {
4648 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
4649 });
4650 Ok(proto::ApplyCodeActionResponse {
4651 transaction: Some(project_transaction),
4652 })
4653 }
4654
4655 async fn handle_lsp_command<T: LspCommand>(
4656 this: ModelHandle<Self>,
4657 envelope: TypedEnvelope<T::ProtoRequest>,
4658 _: Arc<Client>,
4659 mut cx: AsyncAppContext,
4660 ) -> Result<<T::ProtoRequest as proto::RequestMessage>::Response>
4661 where
4662 <T::LspRequest as lsp::request::Request>::Result: Send,
4663 {
4664 let sender_id = envelope.original_sender_id()?;
4665 let buffer_id = T::buffer_id_from_proto(&envelope.payload);
4666 let buffer_handle = this.read_with(&cx, |this, _| {
4667 this.opened_buffers
4668 .get(&buffer_id)
4669 .and_then(|buffer| buffer.upgrade(&cx))
4670 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))
4671 })?;
4672 let request = T::from_proto(
4673 envelope.payload,
4674 this.clone(),
4675 buffer_handle.clone(),
4676 cx.clone(),
4677 )
4678 .await?;
4679 let buffer_version = buffer_handle.read_with(&cx, |buffer, _| buffer.version());
4680 let response = this
4681 .update(&mut cx, |this, cx| {
4682 this.request_lsp(buffer_handle, request, cx)
4683 })
4684 .await?;
4685 this.update(&mut cx, |this, cx| {
4686 Ok(T::response_to_proto(
4687 response,
4688 this,
4689 sender_id,
4690 &buffer_version,
4691 cx,
4692 ))
4693 })
4694 }
4695
4696 async fn handle_get_project_symbols(
4697 this: ModelHandle<Self>,
4698 envelope: TypedEnvelope<proto::GetProjectSymbols>,
4699 _: Arc<Client>,
4700 mut cx: AsyncAppContext,
4701 ) -> Result<proto::GetProjectSymbolsResponse> {
4702 let symbols = this
4703 .update(&mut cx, |this, cx| {
4704 this.symbols(&envelope.payload.query, cx)
4705 })
4706 .await?;
4707
4708 Ok(proto::GetProjectSymbolsResponse {
4709 symbols: symbols.iter().map(serialize_symbol).collect(),
4710 })
4711 }
4712
4713 async fn handle_search_project(
4714 this: ModelHandle<Self>,
4715 envelope: TypedEnvelope<proto::SearchProject>,
4716 _: Arc<Client>,
4717 mut cx: AsyncAppContext,
4718 ) -> Result<proto::SearchProjectResponse> {
4719 let peer_id = envelope.original_sender_id()?;
4720 let query = SearchQuery::from_proto(envelope.payload)?;
4721 let result = this
4722 .update(&mut cx, |this, cx| this.search(query, cx))
4723 .await?;
4724
4725 this.update(&mut cx, |this, cx| {
4726 let mut locations = Vec::new();
4727 for (buffer, ranges) in result {
4728 for range in ranges {
4729 let start = serialize_anchor(&range.start);
4730 let end = serialize_anchor(&range.end);
4731 let buffer = this.serialize_buffer_for_peer(&buffer, peer_id, cx);
4732 locations.push(proto::Location {
4733 buffer: Some(buffer),
4734 start: Some(start),
4735 end: Some(end),
4736 });
4737 }
4738 }
4739 Ok(proto::SearchProjectResponse { locations })
4740 })
4741 }
4742
4743 async fn handle_open_buffer_for_symbol(
4744 this: ModelHandle<Self>,
4745 envelope: TypedEnvelope<proto::OpenBufferForSymbol>,
4746 _: Arc<Client>,
4747 mut cx: AsyncAppContext,
4748 ) -> Result<proto::OpenBufferForSymbolResponse> {
4749 let peer_id = envelope.original_sender_id()?;
4750 let symbol = envelope
4751 .payload
4752 .symbol
4753 .ok_or_else(|| anyhow!("invalid symbol"))?;
4754 let symbol = this.read_with(&cx, |this, _| {
4755 let symbol = this.deserialize_symbol(symbol)?;
4756 let signature = this.symbol_signature(symbol.worktree_id, &symbol.path);
4757 if signature == symbol.signature {
4758 Ok(symbol)
4759 } else {
4760 Err(anyhow!("invalid symbol signature"))
4761 }
4762 })?;
4763 let buffer = this
4764 .update(&mut cx, |this, cx| this.open_buffer_for_symbol(&symbol, cx))
4765 .await?;
4766
4767 Ok(proto::OpenBufferForSymbolResponse {
4768 buffer: Some(this.update(&mut cx, |this, cx| {
4769 this.serialize_buffer_for_peer(&buffer, peer_id, cx)
4770 })),
4771 })
4772 }
4773
4774 fn symbol_signature(&self, worktree_id: WorktreeId, path: &Path) -> [u8; 32] {
4775 let mut hasher = Sha256::new();
4776 hasher.update(worktree_id.to_proto().to_be_bytes());
4777 hasher.update(path.to_string_lossy().as_bytes());
4778 hasher.update(self.nonce.to_be_bytes());
4779 hasher.finalize().as_slice().try_into().unwrap()
4780 }
4781
4782 async fn handle_open_buffer_by_id(
4783 this: ModelHandle<Self>,
4784 envelope: TypedEnvelope<proto::OpenBufferById>,
4785 _: Arc<Client>,
4786 mut cx: AsyncAppContext,
4787 ) -> Result<proto::OpenBufferResponse> {
4788 let peer_id = envelope.original_sender_id()?;
4789 let buffer = this
4790 .update(&mut cx, |this, cx| {
4791 this.open_buffer_by_id(envelope.payload.id, cx)
4792 })
4793 .await?;
4794 this.update(&mut cx, |this, cx| {
4795 Ok(proto::OpenBufferResponse {
4796 buffer: Some(this.serialize_buffer_for_peer(&buffer, peer_id, cx)),
4797 })
4798 })
4799 }
4800
4801 async fn handle_open_buffer_by_path(
4802 this: ModelHandle<Self>,
4803 envelope: TypedEnvelope<proto::OpenBufferByPath>,
4804 _: Arc<Client>,
4805 mut cx: AsyncAppContext,
4806 ) -> Result<proto::OpenBufferResponse> {
4807 let peer_id = envelope.original_sender_id()?;
4808 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
4809 let open_buffer = this.update(&mut cx, |this, cx| {
4810 this.open_buffer(
4811 ProjectPath {
4812 worktree_id,
4813 path: PathBuf::from(envelope.payload.path).into(),
4814 },
4815 cx,
4816 )
4817 });
4818
4819 let buffer = open_buffer.await?;
4820 this.update(&mut cx, |this, cx| {
4821 Ok(proto::OpenBufferResponse {
4822 buffer: Some(this.serialize_buffer_for_peer(&buffer, peer_id, cx)),
4823 })
4824 })
4825 }
4826
4827 fn serialize_project_transaction_for_peer(
4828 &mut self,
4829 project_transaction: ProjectTransaction,
4830 peer_id: PeerId,
4831 cx: &AppContext,
4832 ) -> proto::ProjectTransaction {
4833 let mut serialized_transaction = proto::ProjectTransaction {
4834 buffers: Default::default(),
4835 transactions: Default::default(),
4836 };
4837 for (buffer, transaction) in project_transaction.0 {
4838 serialized_transaction
4839 .buffers
4840 .push(self.serialize_buffer_for_peer(&buffer, peer_id, cx));
4841 serialized_transaction
4842 .transactions
4843 .push(language::proto::serialize_transaction(&transaction));
4844 }
4845 serialized_transaction
4846 }
4847
4848 fn deserialize_project_transaction(
4849 &mut self,
4850 message: proto::ProjectTransaction,
4851 push_to_history: bool,
4852 cx: &mut ModelContext<Self>,
4853 ) -> Task<Result<ProjectTransaction>> {
4854 cx.spawn(|this, mut cx| async move {
4855 let mut project_transaction = ProjectTransaction::default();
4856 for (buffer, transaction) in message.buffers.into_iter().zip(message.transactions) {
4857 let buffer = this
4858 .update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
4859 .await?;
4860 let transaction = language::proto::deserialize_transaction(transaction)?;
4861 project_transaction.0.insert(buffer, transaction);
4862 }
4863
4864 for (buffer, transaction) in &project_transaction.0 {
4865 buffer
4866 .update(&mut cx, |buffer, _| {
4867 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
4868 })
4869 .await;
4870
4871 if push_to_history {
4872 buffer.update(&mut cx, |buffer, _| {
4873 buffer.push_transaction(transaction.clone(), Instant::now());
4874 });
4875 }
4876 }
4877
4878 Ok(project_transaction)
4879 })
4880 }
4881
4882 fn serialize_buffer_for_peer(
4883 &mut self,
4884 buffer: &ModelHandle<Buffer>,
4885 peer_id: PeerId,
4886 cx: &AppContext,
4887 ) -> proto::Buffer {
4888 let buffer_id = buffer.read(cx).remote_id();
4889 let shared_buffers = self.shared_buffers.entry(peer_id).or_default();
4890 if shared_buffers.insert(buffer_id) {
4891 proto::Buffer {
4892 variant: Some(proto::buffer::Variant::State(buffer.read(cx).to_proto())),
4893 }
4894 } else {
4895 proto::Buffer {
4896 variant: Some(proto::buffer::Variant::Id(buffer_id)),
4897 }
4898 }
4899 }
4900
4901 fn deserialize_buffer(
4902 &mut self,
4903 buffer: proto::Buffer,
4904 cx: &mut ModelContext<Self>,
4905 ) -> Task<Result<ModelHandle<Buffer>>> {
4906 let replica_id = self.replica_id();
4907
4908 let opened_buffer_tx = self.opened_buffer.0.clone();
4909 let mut opened_buffer_rx = self.opened_buffer.1.clone();
4910 cx.spawn(|this, mut cx| async move {
4911 match buffer.variant.ok_or_else(|| anyhow!("missing buffer"))? {
4912 proto::buffer::Variant::Id(id) => {
4913 let buffer = loop {
4914 let buffer = this.read_with(&cx, |this, cx| {
4915 this.opened_buffers
4916 .get(&id)
4917 .and_then(|buffer| buffer.upgrade(cx))
4918 });
4919 if let Some(buffer) = buffer {
4920 break buffer;
4921 }
4922 opened_buffer_rx
4923 .next()
4924 .await
4925 .ok_or_else(|| anyhow!("project dropped while waiting for buffer"))?;
4926 };
4927 Ok(buffer)
4928 }
4929 proto::buffer::Variant::State(mut buffer) => {
4930 let mut buffer_worktree = None;
4931 let mut buffer_file = None;
4932 if let Some(file) = buffer.file.take() {
4933 this.read_with(&cx, |this, cx| {
4934 let worktree_id = WorktreeId::from_proto(file.worktree_id);
4935 let worktree =
4936 this.worktree_for_id(worktree_id, cx).ok_or_else(|| {
4937 anyhow!("no worktree found for id {}", file.worktree_id)
4938 })?;
4939 buffer_file =
4940 Some(Box::new(File::from_proto(file, worktree.clone(), cx)?)
4941 as Box<dyn language::File>);
4942 buffer_worktree = Some(worktree);
4943 Ok::<_, anyhow::Error>(())
4944 })?;
4945 }
4946
4947 let buffer = cx.add_model(|cx| {
4948 Buffer::from_proto(replica_id, buffer, buffer_file, cx).unwrap()
4949 });
4950
4951 this.update(&mut cx, |this, cx| this.register_buffer(&buffer, cx))?;
4952
4953 *opened_buffer_tx.borrow_mut().borrow_mut() = ();
4954 Ok(buffer)
4955 }
4956 }
4957 })
4958 }
4959
4960 fn deserialize_symbol(&self, serialized_symbol: proto::Symbol) -> Result<Symbol> {
4961 let source_worktree_id = WorktreeId::from_proto(serialized_symbol.source_worktree_id);
4962 let worktree_id = WorktreeId::from_proto(serialized_symbol.worktree_id);
4963 let start = serialized_symbol
4964 .start
4965 .ok_or_else(|| anyhow!("invalid start"))?;
4966 let end = serialized_symbol
4967 .end
4968 .ok_or_else(|| anyhow!("invalid end"))?;
4969 let kind = unsafe { mem::transmute(serialized_symbol.kind) };
4970 let path = PathBuf::from(serialized_symbol.path);
4971 let language = self.languages.select_language(&path);
4972 Ok(Symbol {
4973 source_worktree_id,
4974 worktree_id,
4975 language_server_name: LanguageServerName(serialized_symbol.language_server_name.into()),
4976 label: language
4977 .and_then(|language| language.label_for_symbol(&serialized_symbol.name, kind))
4978 .unwrap_or_else(|| CodeLabel::plain(serialized_symbol.name.clone(), None)),
4979 name: serialized_symbol.name,
4980 path,
4981 range: PointUtf16::new(start.row, start.column)..PointUtf16::new(end.row, end.column),
4982 kind,
4983 signature: serialized_symbol
4984 .signature
4985 .try_into()
4986 .map_err(|_| anyhow!("invalid signature"))?,
4987 })
4988 }
4989
4990 async fn handle_buffer_saved(
4991 this: ModelHandle<Self>,
4992 envelope: TypedEnvelope<proto::BufferSaved>,
4993 _: Arc<Client>,
4994 mut cx: AsyncAppContext,
4995 ) -> Result<()> {
4996 let version = deserialize_version(envelope.payload.version);
4997 let mtime = envelope
4998 .payload
4999 .mtime
5000 .ok_or_else(|| anyhow!("missing mtime"))?
5001 .into();
5002
5003 this.update(&mut cx, |this, cx| {
5004 let buffer = this
5005 .opened_buffers
5006 .get(&envelope.payload.buffer_id)
5007 .and_then(|buffer| buffer.upgrade(cx));
5008 if let Some(buffer) = buffer {
5009 buffer.update(cx, |buffer, cx| {
5010 buffer.did_save(version, mtime, None, cx);
5011 });
5012 }
5013 Ok(())
5014 })
5015 }
5016
5017 async fn handle_buffer_reloaded(
5018 this: ModelHandle<Self>,
5019 envelope: TypedEnvelope<proto::BufferReloaded>,
5020 _: Arc<Client>,
5021 mut cx: AsyncAppContext,
5022 ) -> Result<()> {
5023 let payload = envelope.payload.clone();
5024 let version = deserialize_version(payload.version);
5025 let mtime = payload
5026 .mtime
5027 .ok_or_else(|| anyhow!("missing mtime"))?
5028 .into();
5029 this.update(&mut cx, |this, cx| {
5030 let buffer = this
5031 .opened_buffers
5032 .get(&payload.buffer_id)
5033 .and_then(|buffer| buffer.upgrade(cx));
5034 if let Some(buffer) = buffer {
5035 buffer.update(cx, |buffer, cx| {
5036 buffer.did_reload(version, mtime, cx);
5037 });
5038 }
5039 Ok(())
5040 })
5041 }
5042
5043 pub fn match_paths<'a>(
5044 &self,
5045 query: &'a str,
5046 include_ignored: bool,
5047 smart_case: bool,
5048 max_results: usize,
5049 cancel_flag: &'a AtomicBool,
5050 cx: &AppContext,
5051 ) -> impl 'a + Future<Output = Vec<PathMatch>> {
5052 let worktrees = self
5053 .worktrees(cx)
5054 .filter(|worktree| worktree.read(cx).is_visible())
5055 .collect::<Vec<_>>();
5056 let include_root_name = worktrees.len() > 1;
5057 let candidate_sets = worktrees
5058 .into_iter()
5059 .map(|worktree| CandidateSet {
5060 snapshot: worktree.read(cx).snapshot(),
5061 include_ignored,
5062 include_root_name,
5063 })
5064 .collect::<Vec<_>>();
5065
5066 let background = cx.background().clone();
5067 async move {
5068 fuzzy::match_paths(
5069 candidate_sets.as_slice(),
5070 query,
5071 smart_case,
5072 max_results,
5073 cancel_flag,
5074 background,
5075 )
5076 .await
5077 }
5078 }
5079
5080 fn edits_from_lsp(
5081 &mut self,
5082 buffer: &ModelHandle<Buffer>,
5083 lsp_edits: impl 'static + Send + IntoIterator<Item = lsp::TextEdit>,
5084 version: Option<i32>,
5085 cx: &mut ModelContext<Self>,
5086 ) -> Task<Result<Vec<(Range<Anchor>, String)>>> {
5087 let snapshot = self.buffer_snapshot_for_lsp_version(buffer, version, cx);
5088 cx.background().spawn(async move {
5089 let snapshot = snapshot?;
5090 let mut lsp_edits = lsp_edits
5091 .into_iter()
5092 .map(|edit| (range_from_lsp(edit.range), edit.new_text))
5093 .peekable();
5094
5095 let mut edits = Vec::new();
5096 while let Some((mut range, mut new_text)) = lsp_edits.next() {
5097 // Combine any LSP edits that are adjacent.
5098 //
5099 // Also, combine LSP edits that are separated from each other by only
5100 // a newline. This is important because for some code actions,
5101 // Rust-analyzer rewrites the entire buffer via a series of edits that
5102 // are separated by unchanged newline characters.
5103 //
5104 // In order for the diffing logic below to work properly, any edits that
5105 // cancel each other out must be combined into one.
5106 while let Some((next_range, next_text)) = lsp_edits.peek() {
5107 if next_range.start > range.end {
5108 if next_range.start.row > range.end.row + 1
5109 || next_range.start.column > 0
5110 || snapshot.clip_point_utf16(
5111 PointUtf16::new(range.end.row, u32::MAX),
5112 Bias::Left,
5113 ) > range.end
5114 {
5115 break;
5116 }
5117 new_text.push('\n');
5118 }
5119 range.end = next_range.end;
5120 new_text.push_str(&next_text);
5121 lsp_edits.next();
5122 }
5123
5124 if snapshot.clip_point_utf16(range.start, Bias::Left) != range.start
5125 || snapshot.clip_point_utf16(range.end, Bias::Left) != range.end
5126 {
5127 return Err(anyhow!("invalid edits received from language server"));
5128 }
5129
5130 // For multiline edits, perform a diff of the old and new text so that
5131 // we can identify the changes more precisely, preserving the locations
5132 // of any anchors positioned in the unchanged regions.
5133 if range.end.row > range.start.row {
5134 let mut offset = range.start.to_offset(&snapshot);
5135 let old_text = snapshot.text_for_range(range).collect::<String>();
5136
5137 let diff = TextDiff::from_lines(old_text.as_str(), &new_text);
5138 let mut moved_since_edit = true;
5139 for change in diff.iter_all_changes() {
5140 let tag = change.tag();
5141 let value = change.value();
5142 match tag {
5143 ChangeTag::Equal => {
5144 offset += value.len();
5145 moved_since_edit = true;
5146 }
5147 ChangeTag::Delete => {
5148 let start = snapshot.anchor_after(offset);
5149 let end = snapshot.anchor_before(offset + value.len());
5150 if moved_since_edit {
5151 edits.push((start..end, String::new()));
5152 } else {
5153 edits.last_mut().unwrap().0.end = end;
5154 }
5155 offset += value.len();
5156 moved_since_edit = false;
5157 }
5158 ChangeTag::Insert => {
5159 if moved_since_edit {
5160 let anchor = snapshot.anchor_after(offset);
5161 edits.push((anchor.clone()..anchor, value.to_string()));
5162 } else {
5163 edits.last_mut().unwrap().1.push_str(value);
5164 }
5165 moved_since_edit = false;
5166 }
5167 }
5168 }
5169 } else if range.end == range.start {
5170 let anchor = snapshot.anchor_after(range.start);
5171 edits.push((anchor.clone()..anchor, new_text));
5172 } else {
5173 let edit_start = snapshot.anchor_after(range.start);
5174 let edit_end = snapshot.anchor_before(range.end);
5175 edits.push((edit_start..edit_end, new_text));
5176 }
5177 }
5178
5179 Ok(edits)
5180 })
5181 }
5182
5183 fn buffer_snapshot_for_lsp_version(
5184 &mut self,
5185 buffer: &ModelHandle<Buffer>,
5186 version: Option<i32>,
5187 cx: &AppContext,
5188 ) -> Result<TextBufferSnapshot> {
5189 const OLD_VERSIONS_TO_RETAIN: i32 = 10;
5190
5191 if let Some(version) = version {
5192 let buffer_id = buffer.read(cx).remote_id();
5193 let snapshots = self
5194 .buffer_snapshots
5195 .get_mut(&buffer_id)
5196 .ok_or_else(|| anyhow!("no snapshot found for buffer {}", buffer_id))?;
5197 let mut found_snapshot = None;
5198 snapshots.retain(|(snapshot_version, snapshot)| {
5199 if snapshot_version + OLD_VERSIONS_TO_RETAIN < version {
5200 false
5201 } else {
5202 if *snapshot_version == version {
5203 found_snapshot = Some(snapshot.clone());
5204 }
5205 true
5206 }
5207 });
5208
5209 found_snapshot.ok_or_else(|| {
5210 anyhow!(
5211 "snapshot not found for buffer {} at version {}",
5212 buffer_id,
5213 version
5214 )
5215 })
5216 } else {
5217 Ok((buffer.read(cx)).text_snapshot())
5218 }
5219 }
5220
5221 fn language_server_for_buffer(
5222 &self,
5223 buffer: &Buffer,
5224 cx: &AppContext,
5225 ) -> Option<&(Arc<dyn LspAdapter>, Arc<LanguageServer>)> {
5226 if let Some((file, language)) = File::from_dyn(buffer.file()).zip(buffer.language()) {
5227 let worktree_id = file.worktree_id(cx);
5228 self.language_servers
5229 .get(&(worktree_id, language.lsp_adapter()?.name()))
5230 } else {
5231 None
5232 }
5233 }
5234}
5235
5236impl ProjectStore {
5237 pub fn new(db: Arc<Db>) -> Self {
5238 Self {
5239 db,
5240 projects: Default::default(),
5241 }
5242 }
5243
5244 pub fn projects<'a>(
5245 &'a self,
5246 cx: &'a AppContext,
5247 ) -> impl 'a + Iterator<Item = ModelHandle<Project>> {
5248 self.projects
5249 .iter()
5250 .filter_map(|project| project.upgrade(cx))
5251 }
5252
5253 fn add_project(&mut self, project: WeakModelHandle<Project>, cx: &mut ModelContext<Self>) {
5254 if let Err(ix) = self
5255 .projects
5256 .binary_search_by_key(&project.id(), WeakModelHandle::id)
5257 {
5258 self.projects.insert(ix, project);
5259 }
5260 cx.notify();
5261 }
5262
5263 fn prune_projects(&mut self, cx: &mut ModelContext<Self>) {
5264 let mut did_change = false;
5265 self.projects.retain(|project| {
5266 if project.is_upgradable(cx) {
5267 true
5268 } else {
5269 did_change = true;
5270 false
5271 }
5272 });
5273 if did_change {
5274 cx.notify();
5275 }
5276 }
5277
5278 pub fn are_all_project_paths_public(
5279 &self,
5280 project: &Project,
5281 cx: &AppContext,
5282 ) -> Task<Result<bool>> {
5283 let project_path_keys = self.project_path_keys(project, cx);
5284 let db = self.db.clone();
5285 cx.background().spawn(async move {
5286 let values = db.read(project_path_keys)?;
5287 Ok(values.into_iter().all(|e| e.is_some()))
5288 })
5289 }
5290
5291 pub fn set_project_paths_public(
5292 &self,
5293 project: &Project,
5294 public: bool,
5295 cx: &AppContext,
5296 ) -> Task<Result<()>> {
5297 let project_path_keys = self.project_path_keys(project, cx);
5298 let db = self.db.clone();
5299 cx.background().spawn(async move {
5300 if public {
5301 db.write(project_path_keys.into_iter().map(|key| (key, &[])))
5302 } else {
5303 db.delete(project_path_keys)
5304 }
5305 })
5306 }
5307
5308 fn project_path_keys(&self, project: &Project, cx: &AppContext) -> Vec<String> {
5309 project
5310 .worktrees
5311 .iter()
5312 .filter_map(|worktree| {
5313 worktree.upgrade(&cx).map(|worktree| {
5314 format!(
5315 "public-project-path:{}",
5316 worktree
5317 .read(cx)
5318 .as_local()
5319 .unwrap()
5320 .abs_path()
5321 .to_string_lossy()
5322 )
5323 })
5324 })
5325 .collect::<Vec<_>>()
5326 }
5327}
5328
5329impl WorktreeHandle {
5330 pub fn upgrade(&self, cx: &AppContext) -> Option<ModelHandle<Worktree>> {
5331 match self {
5332 WorktreeHandle::Strong(handle) => Some(handle.clone()),
5333 WorktreeHandle::Weak(handle) => handle.upgrade(cx),
5334 }
5335 }
5336}
5337
5338impl OpenBuffer {
5339 pub fn upgrade(&self, cx: &impl UpgradeModelHandle) -> Option<ModelHandle<Buffer>> {
5340 match self {
5341 OpenBuffer::Strong(handle) => Some(handle.clone()),
5342 OpenBuffer::Weak(handle) => handle.upgrade(cx),
5343 OpenBuffer::Loading(_) => None,
5344 }
5345 }
5346}
5347
5348struct CandidateSet {
5349 snapshot: Snapshot,
5350 include_ignored: bool,
5351 include_root_name: bool,
5352}
5353
5354impl<'a> PathMatchCandidateSet<'a> for CandidateSet {
5355 type Candidates = CandidateSetIter<'a>;
5356
5357 fn id(&self) -> usize {
5358 self.snapshot.id().to_usize()
5359 }
5360
5361 fn len(&self) -> usize {
5362 if self.include_ignored {
5363 self.snapshot.file_count()
5364 } else {
5365 self.snapshot.visible_file_count()
5366 }
5367 }
5368
5369 fn prefix(&self) -> Arc<str> {
5370 if self.snapshot.root_entry().map_or(false, |e| e.is_file()) {
5371 self.snapshot.root_name().into()
5372 } else if self.include_root_name {
5373 format!("{}/", self.snapshot.root_name()).into()
5374 } else {
5375 "".into()
5376 }
5377 }
5378
5379 fn candidates(&'a self, start: usize) -> Self::Candidates {
5380 CandidateSetIter {
5381 traversal: self.snapshot.files(self.include_ignored, start),
5382 }
5383 }
5384}
5385
5386struct CandidateSetIter<'a> {
5387 traversal: Traversal<'a>,
5388}
5389
5390impl<'a> Iterator for CandidateSetIter<'a> {
5391 type Item = PathMatchCandidate<'a>;
5392
5393 fn next(&mut self) -> Option<Self::Item> {
5394 self.traversal.next().map(|entry| {
5395 if let EntryKind::File(char_bag) = entry.kind {
5396 PathMatchCandidate {
5397 path: &entry.path,
5398 char_bag,
5399 }
5400 } else {
5401 unreachable!()
5402 }
5403 })
5404 }
5405}
5406
5407impl Entity for ProjectStore {
5408 type Event = ();
5409}
5410
5411impl Entity for Project {
5412 type Event = Event;
5413
5414 fn release(&mut self, cx: &mut gpui::MutableAppContext) {
5415 self.project_store.update(cx, ProjectStore::prune_projects);
5416
5417 match &self.client_state {
5418 ProjectClientState::Local { remote_id_rx, .. } => {
5419 if let Some(project_id) = *remote_id_rx.borrow() {
5420 self.client
5421 .send(proto::UnregisterProject { project_id })
5422 .log_err();
5423 }
5424 }
5425 ProjectClientState::Remote { remote_id, .. } => {
5426 self.client
5427 .send(proto::LeaveProject {
5428 project_id: *remote_id,
5429 })
5430 .log_err();
5431 }
5432 }
5433 }
5434
5435 fn app_will_quit(
5436 &mut self,
5437 _: &mut MutableAppContext,
5438 ) -> Option<std::pin::Pin<Box<dyn 'static + Future<Output = ()>>>> {
5439 let shutdown_futures = self
5440 .language_servers
5441 .drain()
5442 .filter_map(|(_, (_, server))| server.shutdown())
5443 .collect::<Vec<_>>();
5444 Some(
5445 async move {
5446 futures::future::join_all(shutdown_futures).await;
5447 }
5448 .boxed(),
5449 )
5450 }
5451}
5452
5453impl Collaborator {
5454 fn from_proto(
5455 message: proto::Collaborator,
5456 user_store: &ModelHandle<UserStore>,
5457 cx: &mut AsyncAppContext,
5458 ) -> impl Future<Output = Result<Self>> {
5459 let user = user_store.update(cx, |user_store, cx| {
5460 user_store.fetch_user(message.user_id, cx)
5461 });
5462
5463 async move {
5464 Ok(Self {
5465 peer_id: PeerId(message.peer_id),
5466 user: user.await?,
5467 replica_id: message.replica_id as ReplicaId,
5468 })
5469 }
5470 }
5471}
5472
5473impl<P: AsRef<Path>> From<(WorktreeId, P)> for ProjectPath {
5474 fn from((worktree_id, path): (WorktreeId, P)) -> Self {
5475 Self {
5476 worktree_id,
5477 path: path.as_ref().into(),
5478 }
5479 }
5480}
5481
5482impl From<lsp::CreateFileOptions> for fs::CreateOptions {
5483 fn from(options: lsp::CreateFileOptions) -> Self {
5484 Self {
5485 overwrite: options.overwrite.unwrap_or(false),
5486 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
5487 }
5488 }
5489}
5490
5491impl From<lsp::RenameFileOptions> for fs::RenameOptions {
5492 fn from(options: lsp::RenameFileOptions) -> Self {
5493 Self {
5494 overwrite: options.overwrite.unwrap_or(false),
5495 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
5496 }
5497 }
5498}
5499
5500impl From<lsp::DeleteFileOptions> for fs::RemoveOptions {
5501 fn from(options: lsp::DeleteFileOptions) -> Self {
5502 Self {
5503 recursive: options.recursive.unwrap_or(false),
5504 ignore_if_not_exists: options.ignore_if_not_exists.unwrap_or(false),
5505 }
5506 }
5507}
5508
5509fn serialize_symbol(symbol: &Symbol) -> proto::Symbol {
5510 proto::Symbol {
5511 source_worktree_id: symbol.source_worktree_id.to_proto(),
5512 worktree_id: symbol.worktree_id.to_proto(),
5513 language_server_name: symbol.language_server_name.0.to_string(),
5514 name: symbol.name.clone(),
5515 kind: unsafe { mem::transmute(symbol.kind) },
5516 path: symbol.path.to_string_lossy().to_string(),
5517 start: Some(proto::Point {
5518 row: symbol.range.start.row,
5519 column: symbol.range.start.column,
5520 }),
5521 end: Some(proto::Point {
5522 row: symbol.range.end.row,
5523 column: symbol.range.end.column,
5524 }),
5525 signature: symbol.signature.to_vec(),
5526 }
5527}
5528
5529fn relativize_path(base: &Path, path: &Path) -> PathBuf {
5530 let mut path_components = path.components();
5531 let mut base_components = base.components();
5532 let mut components: Vec<Component> = Vec::new();
5533 loop {
5534 match (path_components.next(), base_components.next()) {
5535 (None, None) => break,
5536 (Some(a), None) => {
5537 components.push(a);
5538 components.extend(path_components.by_ref());
5539 break;
5540 }
5541 (None, _) => components.push(Component::ParentDir),
5542 (Some(a), Some(b)) if components.is_empty() && a == b => (),
5543 (Some(a), Some(b)) if b == Component::CurDir => components.push(a),
5544 (Some(a), Some(_)) => {
5545 components.push(Component::ParentDir);
5546 for _ in base_components {
5547 components.push(Component::ParentDir);
5548 }
5549 components.push(a);
5550 components.extend(path_components.by_ref());
5551 break;
5552 }
5553 }
5554 }
5555 components.iter().map(|c| c.as_os_str()).collect()
5556}
5557
5558impl Item for Buffer {
5559 fn entry_id(&self, cx: &AppContext) -> Option<ProjectEntryId> {
5560 File::from_dyn(self.file()).and_then(|file| file.project_entry_id(cx))
5561 }
5562}
5563
5564#[cfg(test)]
5565mod tests {
5566 use crate::worktree::WorktreeHandle;
5567
5568 use super::{Event, *};
5569 use fs::RealFs;
5570 use futures::{future, StreamExt};
5571 use gpui::test::subscribe;
5572 use language::{
5573 tree_sitter_rust, tree_sitter_typescript, Diagnostic, FakeLspAdapter, LanguageConfig,
5574 OffsetRangeExt, Point, ToPoint,
5575 };
5576 use lsp::Url;
5577 use serde_json::json;
5578 use std::{cell::RefCell, os::unix, path::PathBuf, rc::Rc, task::Poll};
5579 use unindent::Unindent as _;
5580 use util::{assert_set_eq, test::temp_tree};
5581
5582 #[gpui::test]
5583 async fn test_populate_and_search(cx: &mut gpui::TestAppContext) {
5584 let dir = temp_tree(json!({
5585 "root": {
5586 "apple": "",
5587 "banana": {
5588 "carrot": {
5589 "date": "",
5590 "endive": "",
5591 }
5592 },
5593 "fennel": {
5594 "grape": "",
5595 }
5596 }
5597 }));
5598
5599 let root_link_path = dir.path().join("root_link");
5600 unix::fs::symlink(&dir.path().join("root"), &root_link_path).unwrap();
5601 unix::fs::symlink(
5602 &dir.path().join("root/fennel"),
5603 &dir.path().join("root/finnochio"),
5604 )
5605 .unwrap();
5606
5607 let project = Project::test(Arc::new(RealFs), [root_link_path.as_ref()], cx).await;
5608
5609 project.read_with(cx, |project, cx| {
5610 let tree = project.worktrees(cx).next().unwrap().read(cx);
5611 assert_eq!(tree.file_count(), 5);
5612 assert_eq!(
5613 tree.inode_for_path("fennel/grape"),
5614 tree.inode_for_path("finnochio/grape")
5615 );
5616 });
5617
5618 let cancel_flag = Default::default();
5619 let results = project
5620 .read_with(cx, |project, cx| {
5621 project.match_paths("bna", false, false, 10, &cancel_flag, cx)
5622 })
5623 .await;
5624 assert_eq!(
5625 results
5626 .into_iter()
5627 .map(|result| result.path)
5628 .collect::<Vec<Arc<Path>>>(),
5629 vec![
5630 PathBuf::from("banana/carrot/date").into(),
5631 PathBuf::from("banana/carrot/endive").into(),
5632 ]
5633 );
5634 }
5635
5636 #[gpui::test]
5637 async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
5638 cx.foreground().forbid_parking();
5639
5640 let mut rust_language = Language::new(
5641 LanguageConfig {
5642 name: "Rust".into(),
5643 path_suffixes: vec!["rs".to_string()],
5644 ..Default::default()
5645 },
5646 Some(tree_sitter_rust::language()),
5647 );
5648 let mut json_language = Language::new(
5649 LanguageConfig {
5650 name: "JSON".into(),
5651 path_suffixes: vec!["json".to_string()],
5652 ..Default::default()
5653 },
5654 None,
5655 );
5656 let mut fake_rust_servers = rust_language.set_fake_lsp_adapter(FakeLspAdapter {
5657 name: "the-rust-language-server",
5658 capabilities: lsp::ServerCapabilities {
5659 completion_provider: Some(lsp::CompletionOptions {
5660 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
5661 ..Default::default()
5662 }),
5663 ..Default::default()
5664 },
5665 ..Default::default()
5666 });
5667 let mut fake_json_servers = json_language.set_fake_lsp_adapter(FakeLspAdapter {
5668 name: "the-json-language-server",
5669 capabilities: lsp::ServerCapabilities {
5670 completion_provider: Some(lsp::CompletionOptions {
5671 trigger_characters: Some(vec![":".to_string()]),
5672 ..Default::default()
5673 }),
5674 ..Default::default()
5675 },
5676 ..Default::default()
5677 });
5678
5679 let fs = FakeFs::new(cx.background());
5680 fs.insert_tree(
5681 "/the-root",
5682 json!({
5683 "test.rs": "const A: i32 = 1;",
5684 "test2.rs": "",
5685 "Cargo.toml": "a = 1",
5686 "package.json": "{\"a\": 1}",
5687 }),
5688 )
5689 .await;
5690
5691 let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
5692 project.update(cx, |project, _| {
5693 project.languages.add(Arc::new(rust_language));
5694 project.languages.add(Arc::new(json_language));
5695 });
5696
5697 // Open a buffer without an associated language server.
5698 let toml_buffer = project
5699 .update(cx, |project, cx| {
5700 project.open_local_buffer("/the-root/Cargo.toml", cx)
5701 })
5702 .await
5703 .unwrap();
5704
5705 // Open a buffer with an associated language server.
5706 let rust_buffer = project
5707 .update(cx, |project, cx| {
5708 project.open_local_buffer("/the-root/test.rs", cx)
5709 })
5710 .await
5711 .unwrap();
5712
5713 // A server is started up, and it is notified about Rust files.
5714 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
5715 assert_eq!(
5716 fake_rust_server
5717 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5718 .await
5719 .text_document,
5720 lsp::TextDocumentItem {
5721 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
5722 version: 0,
5723 text: "const A: i32 = 1;".to_string(),
5724 language_id: Default::default()
5725 }
5726 );
5727
5728 // The buffer is configured based on the language server's capabilities.
5729 rust_buffer.read_with(cx, |buffer, _| {
5730 assert_eq!(
5731 buffer.completion_triggers(),
5732 &[".".to_string(), "::".to_string()]
5733 );
5734 });
5735 toml_buffer.read_with(cx, |buffer, _| {
5736 assert!(buffer.completion_triggers().is_empty());
5737 });
5738
5739 // Edit a buffer. The changes are reported to the language server.
5740 rust_buffer.update(cx, |buffer, cx| buffer.edit([(16..16, "2")], cx));
5741 assert_eq!(
5742 fake_rust_server
5743 .receive_notification::<lsp::notification::DidChangeTextDocument>()
5744 .await
5745 .text_document,
5746 lsp::VersionedTextDocumentIdentifier::new(
5747 lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
5748 1
5749 )
5750 );
5751
5752 // Open a third buffer with a different associated language server.
5753 let json_buffer = project
5754 .update(cx, |project, cx| {
5755 project.open_local_buffer("/the-root/package.json", cx)
5756 })
5757 .await
5758 .unwrap();
5759
5760 // A json language server is started up and is only notified about the json buffer.
5761 let mut fake_json_server = fake_json_servers.next().await.unwrap();
5762 assert_eq!(
5763 fake_json_server
5764 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5765 .await
5766 .text_document,
5767 lsp::TextDocumentItem {
5768 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
5769 version: 0,
5770 text: "{\"a\": 1}".to_string(),
5771 language_id: Default::default()
5772 }
5773 );
5774
5775 // This buffer is configured based on the second language server's
5776 // capabilities.
5777 json_buffer.read_with(cx, |buffer, _| {
5778 assert_eq!(buffer.completion_triggers(), &[":".to_string()]);
5779 });
5780
5781 // When opening another buffer whose language server is already running,
5782 // it is also configured based on the existing language server's capabilities.
5783 let rust_buffer2 = project
5784 .update(cx, |project, cx| {
5785 project.open_local_buffer("/the-root/test2.rs", cx)
5786 })
5787 .await
5788 .unwrap();
5789 rust_buffer2.read_with(cx, |buffer, _| {
5790 assert_eq!(
5791 buffer.completion_triggers(),
5792 &[".".to_string(), "::".to_string()]
5793 );
5794 });
5795
5796 // Changes are reported only to servers matching the buffer's language.
5797 toml_buffer.update(cx, |buffer, cx| buffer.edit([(5..5, "23")], cx));
5798 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "let x = 1;")], cx));
5799 assert_eq!(
5800 fake_rust_server
5801 .receive_notification::<lsp::notification::DidChangeTextDocument>()
5802 .await
5803 .text_document,
5804 lsp::VersionedTextDocumentIdentifier::new(
5805 lsp::Url::from_file_path("/the-root/test2.rs").unwrap(),
5806 1
5807 )
5808 );
5809
5810 // Save notifications are reported to all servers.
5811 toml_buffer
5812 .update(cx, |buffer, cx| buffer.save(cx))
5813 .await
5814 .unwrap();
5815 assert_eq!(
5816 fake_rust_server
5817 .receive_notification::<lsp::notification::DidSaveTextDocument>()
5818 .await
5819 .text_document,
5820 lsp::TextDocumentIdentifier::new(
5821 lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap()
5822 )
5823 );
5824 assert_eq!(
5825 fake_json_server
5826 .receive_notification::<lsp::notification::DidSaveTextDocument>()
5827 .await
5828 .text_document,
5829 lsp::TextDocumentIdentifier::new(
5830 lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap()
5831 )
5832 );
5833
5834 // Renames are reported only to servers matching the buffer's language.
5835 fs.rename(
5836 Path::new("/the-root/test2.rs"),
5837 Path::new("/the-root/test3.rs"),
5838 Default::default(),
5839 )
5840 .await
5841 .unwrap();
5842 assert_eq!(
5843 fake_rust_server
5844 .receive_notification::<lsp::notification::DidCloseTextDocument>()
5845 .await
5846 .text_document,
5847 lsp::TextDocumentIdentifier::new(
5848 lsp::Url::from_file_path("/the-root/test2.rs").unwrap()
5849 ),
5850 );
5851 assert_eq!(
5852 fake_rust_server
5853 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5854 .await
5855 .text_document,
5856 lsp::TextDocumentItem {
5857 uri: lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),
5858 version: 0,
5859 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
5860 language_id: Default::default()
5861 },
5862 );
5863
5864 rust_buffer2.update(cx, |buffer, cx| {
5865 buffer.update_diagnostics(
5866 DiagnosticSet::from_sorted_entries(
5867 vec![DiagnosticEntry {
5868 diagnostic: Default::default(),
5869 range: Anchor::MIN..Anchor::MAX,
5870 }],
5871 &buffer.snapshot(),
5872 ),
5873 cx,
5874 );
5875 assert_eq!(
5876 buffer
5877 .snapshot()
5878 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
5879 .count(),
5880 1
5881 );
5882 });
5883
5884 // When the rename changes the extension of the file, the buffer gets closed on the old
5885 // language server and gets opened on the new one.
5886 fs.rename(
5887 Path::new("/the-root/test3.rs"),
5888 Path::new("/the-root/test3.json"),
5889 Default::default(),
5890 )
5891 .await
5892 .unwrap();
5893 assert_eq!(
5894 fake_rust_server
5895 .receive_notification::<lsp::notification::DidCloseTextDocument>()
5896 .await
5897 .text_document,
5898 lsp::TextDocumentIdentifier::new(
5899 lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),
5900 ),
5901 );
5902 assert_eq!(
5903 fake_json_server
5904 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5905 .await
5906 .text_document,
5907 lsp::TextDocumentItem {
5908 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
5909 version: 0,
5910 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
5911 language_id: Default::default()
5912 },
5913 );
5914
5915 // We clear the diagnostics, since the language has changed.
5916 rust_buffer2.read_with(cx, |buffer, _| {
5917 assert_eq!(
5918 buffer
5919 .snapshot()
5920 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
5921 .count(),
5922 0
5923 );
5924 });
5925
5926 // The renamed file's version resets after changing language server.
5927 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "// ")], cx));
5928 assert_eq!(
5929 fake_json_server
5930 .receive_notification::<lsp::notification::DidChangeTextDocument>()
5931 .await
5932 .text_document,
5933 lsp::VersionedTextDocumentIdentifier::new(
5934 lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
5935 1
5936 )
5937 );
5938
5939 // Restart language servers
5940 project.update(cx, |project, cx| {
5941 project.restart_language_servers_for_buffers(
5942 vec![rust_buffer.clone(), json_buffer.clone()],
5943 cx,
5944 );
5945 });
5946
5947 let mut rust_shutdown_requests = fake_rust_server
5948 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
5949 let mut json_shutdown_requests = fake_json_server
5950 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
5951 futures::join!(rust_shutdown_requests.next(), json_shutdown_requests.next());
5952
5953 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
5954 let mut fake_json_server = fake_json_servers.next().await.unwrap();
5955
5956 // Ensure rust document is reopened in new rust language server
5957 assert_eq!(
5958 fake_rust_server
5959 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5960 .await
5961 .text_document,
5962 lsp::TextDocumentItem {
5963 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
5964 version: 1,
5965 text: rust_buffer.read_with(cx, |buffer, _| buffer.text()),
5966 language_id: Default::default()
5967 }
5968 );
5969
5970 // Ensure json documents are reopened in new json language server
5971 assert_set_eq!(
5972 [
5973 fake_json_server
5974 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5975 .await
5976 .text_document,
5977 fake_json_server
5978 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5979 .await
5980 .text_document,
5981 ],
5982 [
5983 lsp::TextDocumentItem {
5984 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
5985 version: 0,
5986 text: json_buffer.read_with(cx, |buffer, _| buffer.text()),
5987 language_id: Default::default()
5988 },
5989 lsp::TextDocumentItem {
5990 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
5991 version: 1,
5992 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
5993 language_id: Default::default()
5994 }
5995 ]
5996 );
5997
5998 // Close notifications are reported only to servers matching the buffer's language.
5999 cx.update(|_| drop(json_buffer));
6000 let close_message = lsp::DidCloseTextDocumentParams {
6001 text_document: lsp::TextDocumentIdentifier::new(
6002 lsp::Url::from_file_path("/the-root/package.json").unwrap(),
6003 ),
6004 };
6005 assert_eq!(
6006 fake_json_server
6007 .receive_notification::<lsp::notification::DidCloseTextDocument>()
6008 .await,
6009 close_message,
6010 );
6011 }
6012
6013 #[gpui::test]
6014 async fn test_single_file_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
6015 cx.foreground().forbid_parking();
6016
6017 let fs = FakeFs::new(cx.background());
6018 fs.insert_tree(
6019 "/dir",
6020 json!({
6021 "a.rs": "let a = 1;",
6022 "b.rs": "let b = 2;"
6023 }),
6024 )
6025 .await;
6026
6027 let project = Project::test(fs, ["/dir/a.rs".as_ref(), "/dir/b.rs".as_ref()], cx).await;
6028
6029 let buffer_a = project
6030 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
6031 .await
6032 .unwrap();
6033 let buffer_b = project
6034 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
6035 .await
6036 .unwrap();
6037
6038 project.update(cx, |project, cx| {
6039 project
6040 .update_diagnostics(
6041 lsp::PublishDiagnosticsParams {
6042 uri: Url::from_file_path("/dir/a.rs").unwrap(),
6043 version: None,
6044 diagnostics: vec![lsp::Diagnostic {
6045 range: lsp::Range::new(
6046 lsp::Position::new(0, 4),
6047 lsp::Position::new(0, 5),
6048 ),
6049 severity: Some(lsp::DiagnosticSeverity::ERROR),
6050 message: "error 1".to_string(),
6051 ..Default::default()
6052 }],
6053 },
6054 &[],
6055 cx,
6056 )
6057 .unwrap();
6058 project
6059 .update_diagnostics(
6060 lsp::PublishDiagnosticsParams {
6061 uri: Url::from_file_path("/dir/b.rs").unwrap(),
6062 version: None,
6063 diagnostics: vec![lsp::Diagnostic {
6064 range: lsp::Range::new(
6065 lsp::Position::new(0, 4),
6066 lsp::Position::new(0, 5),
6067 ),
6068 severity: Some(lsp::DiagnosticSeverity::WARNING),
6069 message: "error 2".to_string(),
6070 ..Default::default()
6071 }],
6072 },
6073 &[],
6074 cx,
6075 )
6076 .unwrap();
6077 });
6078
6079 buffer_a.read_with(cx, |buffer, _| {
6080 let chunks = chunks_with_diagnostics(&buffer, 0..buffer.len());
6081 assert_eq!(
6082 chunks
6083 .iter()
6084 .map(|(s, d)| (s.as_str(), *d))
6085 .collect::<Vec<_>>(),
6086 &[
6087 ("let ", None),
6088 ("a", Some(DiagnosticSeverity::ERROR)),
6089 (" = 1;", None),
6090 ]
6091 );
6092 });
6093 buffer_b.read_with(cx, |buffer, _| {
6094 let chunks = chunks_with_diagnostics(&buffer, 0..buffer.len());
6095 assert_eq!(
6096 chunks
6097 .iter()
6098 .map(|(s, d)| (s.as_str(), *d))
6099 .collect::<Vec<_>>(),
6100 &[
6101 ("let ", None),
6102 ("b", Some(DiagnosticSeverity::WARNING)),
6103 (" = 2;", None),
6104 ]
6105 );
6106 });
6107 }
6108
6109 #[gpui::test]
6110 async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
6111 cx.foreground().forbid_parking();
6112
6113 let progress_token = "the-progress-token";
6114 let mut language = Language::new(
6115 LanguageConfig {
6116 name: "Rust".into(),
6117 path_suffixes: vec!["rs".to_string()],
6118 ..Default::default()
6119 },
6120 Some(tree_sitter_rust::language()),
6121 );
6122 let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
6123 disk_based_diagnostics_progress_token: Some(progress_token),
6124 disk_based_diagnostics_sources: &["disk"],
6125 ..Default::default()
6126 });
6127
6128 let fs = FakeFs::new(cx.background());
6129 fs.insert_tree(
6130 "/dir",
6131 json!({
6132 "a.rs": "fn a() { A }",
6133 "b.rs": "const y: i32 = 1",
6134 }),
6135 )
6136 .await;
6137
6138 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
6139 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
6140 let worktree_id =
6141 project.read_with(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
6142
6143 // Cause worktree to start the fake language server
6144 let _buffer = project
6145 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
6146 .await
6147 .unwrap();
6148
6149 let mut events = subscribe(&project, cx);
6150
6151 let mut fake_server = fake_servers.next().await.unwrap();
6152 fake_server.start_progress(progress_token).await;
6153 assert_eq!(
6154 events.next().await.unwrap(),
6155 Event::DiskBasedDiagnosticsStarted
6156 );
6157
6158 fake_server.start_progress(progress_token).await;
6159 fake_server.end_progress(progress_token).await;
6160 fake_server.start_progress(progress_token).await;
6161
6162 fake_server.notify::<lsp::notification::PublishDiagnostics>(
6163 lsp::PublishDiagnosticsParams {
6164 uri: Url::from_file_path("/dir/a.rs").unwrap(),
6165 version: None,
6166 diagnostics: vec![lsp::Diagnostic {
6167 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
6168 severity: Some(lsp::DiagnosticSeverity::ERROR),
6169 message: "undefined variable 'A'".to_string(),
6170 ..Default::default()
6171 }],
6172 },
6173 );
6174 assert_eq!(
6175 events.next().await.unwrap(),
6176 Event::DiagnosticsUpdated((worktree_id, Path::new("a.rs")).into())
6177 );
6178
6179 fake_server.end_progress(progress_token).await;
6180 fake_server.end_progress(progress_token).await;
6181 assert_eq!(
6182 events.next().await.unwrap(),
6183 Event::DiskBasedDiagnosticsUpdated
6184 );
6185 assert_eq!(
6186 events.next().await.unwrap(),
6187 Event::DiskBasedDiagnosticsFinished
6188 );
6189
6190 let buffer = project
6191 .update(cx, |p, cx| p.open_local_buffer("/dir/a.rs", cx))
6192 .await
6193 .unwrap();
6194
6195 buffer.read_with(cx, |buffer, _| {
6196 let snapshot = buffer.snapshot();
6197 let diagnostics = snapshot
6198 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
6199 .collect::<Vec<_>>();
6200 assert_eq!(
6201 diagnostics,
6202 &[DiagnosticEntry {
6203 range: Point::new(0, 9)..Point::new(0, 10),
6204 diagnostic: Diagnostic {
6205 severity: lsp::DiagnosticSeverity::ERROR,
6206 message: "undefined variable 'A'".to_string(),
6207 group_id: 0,
6208 is_primary: true,
6209 ..Default::default()
6210 }
6211 }]
6212 )
6213 });
6214
6215 // Ensure publishing empty diagnostics twice only results in one update event.
6216 fake_server.notify::<lsp::notification::PublishDiagnostics>(
6217 lsp::PublishDiagnosticsParams {
6218 uri: Url::from_file_path("/dir/a.rs").unwrap(),
6219 version: None,
6220 diagnostics: Default::default(),
6221 },
6222 );
6223 assert_eq!(
6224 events.next().await.unwrap(),
6225 Event::DiagnosticsUpdated((worktree_id, Path::new("a.rs")).into())
6226 );
6227
6228 fake_server.notify::<lsp::notification::PublishDiagnostics>(
6229 lsp::PublishDiagnosticsParams {
6230 uri: Url::from_file_path("/dir/a.rs").unwrap(),
6231 version: None,
6232 diagnostics: Default::default(),
6233 },
6234 );
6235 cx.foreground().run_until_parked();
6236 assert_eq!(futures::poll!(events.next()), Poll::Pending);
6237 }
6238
6239 #[gpui::test]
6240 async fn test_restarting_server_with_diagnostics_running(cx: &mut gpui::TestAppContext) {
6241 cx.foreground().forbid_parking();
6242
6243 let progress_token = "the-progress-token";
6244 let mut language = Language::new(
6245 LanguageConfig {
6246 path_suffixes: vec!["rs".to_string()],
6247 ..Default::default()
6248 },
6249 None,
6250 );
6251 let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
6252 disk_based_diagnostics_sources: &["disk"],
6253 disk_based_diagnostics_progress_token: Some(progress_token),
6254 ..Default::default()
6255 });
6256
6257 let fs = FakeFs::new(cx.background());
6258 fs.insert_tree("/dir", json!({ "a.rs": "" })).await;
6259
6260 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
6261 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
6262
6263 let buffer = project
6264 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
6265 .await
6266 .unwrap();
6267
6268 // Simulate diagnostics starting to update.
6269 let mut fake_server = fake_servers.next().await.unwrap();
6270 fake_server.start_progress(progress_token).await;
6271
6272 // Restart the server before the diagnostics finish updating.
6273 project.update(cx, |project, cx| {
6274 project.restart_language_servers_for_buffers([buffer], cx);
6275 });
6276 let mut events = subscribe(&project, cx);
6277
6278 // Simulate the newly started server sending more diagnostics.
6279 let mut fake_server = fake_servers.next().await.unwrap();
6280 fake_server.start_progress(progress_token).await;
6281 assert_eq!(
6282 events.next().await.unwrap(),
6283 Event::DiskBasedDiagnosticsStarted
6284 );
6285
6286 // All diagnostics are considered done, despite the old server's diagnostic
6287 // task never completing.
6288 fake_server.end_progress(progress_token).await;
6289 assert_eq!(
6290 events.next().await.unwrap(),
6291 Event::DiskBasedDiagnosticsUpdated
6292 );
6293 assert_eq!(
6294 events.next().await.unwrap(),
6295 Event::DiskBasedDiagnosticsFinished
6296 );
6297 project.read_with(cx, |project, _| {
6298 assert!(!project.is_running_disk_based_diagnostics());
6299 });
6300 }
6301
6302 #[gpui::test]
6303 async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
6304 cx.foreground().forbid_parking();
6305
6306 let mut language = Language::new(
6307 LanguageConfig {
6308 name: "Rust".into(),
6309 path_suffixes: vec!["rs".to_string()],
6310 ..Default::default()
6311 },
6312 Some(tree_sitter_rust::language()),
6313 );
6314 let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
6315 disk_based_diagnostics_sources: &["disk"],
6316 ..Default::default()
6317 });
6318
6319 let text = "
6320 fn a() { A }
6321 fn b() { BB }
6322 fn c() { CCC }
6323 "
6324 .unindent();
6325
6326 let fs = FakeFs::new(cx.background());
6327 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
6328
6329 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
6330 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
6331
6332 let buffer = project
6333 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
6334 .await
6335 .unwrap();
6336
6337 let mut fake_server = fake_servers.next().await.unwrap();
6338 let open_notification = fake_server
6339 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6340 .await;
6341
6342 // Edit the buffer, moving the content down
6343 buffer.update(cx, |buffer, cx| buffer.edit([(0..0, "\n\n")], cx));
6344 let change_notification_1 = fake_server
6345 .receive_notification::<lsp::notification::DidChangeTextDocument>()
6346 .await;
6347 assert!(
6348 change_notification_1.text_document.version > open_notification.text_document.version
6349 );
6350
6351 // Report some diagnostics for the initial version of the buffer
6352 fake_server.notify::<lsp::notification::PublishDiagnostics>(
6353 lsp::PublishDiagnosticsParams {
6354 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
6355 version: Some(open_notification.text_document.version),
6356 diagnostics: vec![
6357 lsp::Diagnostic {
6358 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
6359 severity: Some(DiagnosticSeverity::ERROR),
6360 message: "undefined variable 'A'".to_string(),
6361 source: Some("disk".to_string()),
6362 ..Default::default()
6363 },
6364 lsp::Diagnostic {
6365 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
6366 severity: Some(DiagnosticSeverity::ERROR),
6367 message: "undefined variable 'BB'".to_string(),
6368 source: Some("disk".to_string()),
6369 ..Default::default()
6370 },
6371 lsp::Diagnostic {
6372 range: lsp::Range::new(lsp::Position::new(2, 9), lsp::Position::new(2, 12)),
6373 severity: Some(DiagnosticSeverity::ERROR),
6374 source: Some("disk".to_string()),
6375 message: "undefined variable 'CCC'".to_string(),
6376 ..Default::default()
6377 },
6378 ],
6379 },
6380 );
6381
6382 // The diagnostics have moved down since they were created.
6383 buffer.next_notification(cx).await;
6384 buffer.read_with(cx, |buffer, _| {
6385 assert_eq!(
6386 buffer
6387 .snapshot()
6388 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0), false)
6389 .collect::<Vec<_>>(),
6390 &[
6391 DiagnosticEntry {
6392 range: Point::new(3, 9)..Point::new(3, 11),
6393 diagnostic: Diagnostic {
6394 severity: DiagnosticSeverity::ERROR,
6395 message: "undefined variable 'BB'".to_string(),
6396 is_disk_based: true,
6397 group_id: 1,
6398 is_primary: true,
6399 ..Default::default()
6400 },
6401 },
6402 DiagnosticEntry {
6403 range: Point::new(4, 9)..Point::new(4, 12),
6404 diagnostic: Diagnostic {
6405 severity: DiagnosticSeverity::ERROR,
6406 message: "undefined variable 'CCC'".to_string(),
6407 is_disk_based: true,
6408 group_id: 2,
6409 is_primary: true,
6410 ..Default::default()
6411 }
6412 }
6413 ]
6414 );
6415 assert_eq!(
6416 chunks_with_diagnostics(buffer, 0..buffer.len()),
6417 [
6418 ("\n\nfn a() { ".to_string(), None),
6419 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
6420 (" }\nfn b() { ".to_string(), None),
6421 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
6422 (" }\nfn c() { ".to_string(), None),
6423 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
6424 (" }\n".to_string(), None),
6425 ]
6426 );
6427 assert_eq!(
6428 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
6429 [
6430 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
6431 (" }\nfn c() { ".to_string(), None),
6432 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
6433 ]
6434 );
6435 });
6436
6437 // Ensure overlapping diagnostics are highlighted correctly.
6438 fake_server.notify::<lsp::notification::PublishDiagnostics>(
6439 lsp::PublishDiagnosticsParams {
6440 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
6441 version: Some(open_notification.text_document.version),
6442 diagnostics: vec![
6443 lsp::Diagnostic {
6444 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
6445 severity: Some(DiagnosticSeverity::ERROR),
6446 message: "undefined variable 'A'".to_string(),
6447 source: Some("disk".to_string()),
6448 ..Default::default()
6449 },
6450 lsp::Diagnostic {
6451 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 12)),
6452 severity: Some(DiagnosticSeverity::WARNING),
6453 message: "unreachable statement".to_string(),
6454 source: Some("disk".to_string()),
6455 ..Default::default()
6456 },
6457 ],
6458 },
6459 );
6460
6461 buffer.next_notification(cx).await;
6462 buffer.read_with(cx, |buffer, _| {
6463 assert_eq!(
6464 buffer
6465 .snapshot()
6466 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0), false)
6467 .collect::<Vec<_>>(),
6468 &[
6469 DiagnosticEntry {
6470 range: Point::new(2, 9)..Point::new(2, 12),
6471 diagnostic: Diagnostic {
6472 severity: DiagnosticSeverity::WARNING,
6473 message: "unreachable statement".to_string(),
6474 is_disk_based: true,
6475 group_id: 4,
6476 is_primary: true,
6477 ..Default::default()
6478 }
6479 },
6480 DiagnosticEntry {
6481 range: Point::new(2, 9)..Point::new(2, 10),
6482 diagnostic: Diagnostic {
6483 severity: DiagnosticSeverity::ERROR,
6484 message: "undefined variable 'A'".to_string(),
6485 is_disk_based: true,
6486 group_id: 3,
6487 is_primary: true,
6488 ..Default::default()
6489 },
6490 }
6491 ]
6492 );
6493 assert_eq!(
6494 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
6495 [
6496 ("fn a() { ".to_string(), None),
6497 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
6498 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
6499 ("\n".to_string(), None),
6500 ]
6501 );
6502 assert_eq!(
6503 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
6504 [
6505 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
6506 ("\n".to_string(), None),
6507 ]
6508 );
6509 });
6510
6511 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
6512 // changes since the last save.
6513 buffer.update(cx, |buffer, cx| {
6514 buffer.edit([(Point::new(2, 0)..Point::new(2, 0), " ")], cx);
6515 buffer.edit([(Point::new(2, 8)..Point::new(2, 10), "(x: usize)")], cx);
6516 buffer.edit([(Point::new(3, 10)..Point::new(3, 10), "xxx")], cx);
6517 });
6518 let change_notification_2 = fake_server
6519 .receive_notification::<lsp::notification::DidChangeTextDocument>()
6520 .await;
6521 assert!(
6522 change_notification_2.text_document.version
6523 > change_notification_1.text_document.version
6524 );
6525
6526 // Handle out-of-order diagnostics
6527 fake_server.notify::<lsp::notification::PublishDiagnostics>(
6528 lsp::PublishDiagnosticsParams {
6529 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
6530 version: Some(change_notification_2.text_document.version),
6531 diagnostics: vec![
6532 lsp::Diagnostic {
6533 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
6534 severity: Some(DiagnosticSeverity::ERROR),
6535 message: "undefined variable 'BB'".to_string(),
6536 source: Some("disk".to_string()),
6537 ..Default::default()
6538 },
6539 lsp::Diagnostic {
6540 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
6541 severity: Some(DiagnosticSeverity::WARNING),
6542 message: "undefined variable 'A'".to_string(),
6543 source: Some("disk".to_string()),
6544 ..Default::default()
6545 },
6546 ],
6547 },
6548 );
6549
6550 buffer.next_notification(cx).await;
6551 buffer.read_with(cx, |buffer, _| {
6552 assert_eq!(
6553 buffer
6554 .snapshot()
6555 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
6556 .collect::<Vec<_>>(),
6557 &[
6558 DiagnosticEntry {
6559 range: Point::new(2, 21)..Point::new(2, 22),
6560 diagnostic: Diagnostic {
6561 severity: DiagnosticSeverity::WARNING,
6562 message: "undefined variable 'A'".to_string(),
6563 is_disk_based: true,
6564 group_id: 6,
6565 is_primary: true,
6566 ..Default::default()
6567 }
6568 },
6569 DiagnosticEntry {
6570 range: Point::new(3, 9)..Point::new(3, 14),
6571 diagnostic: Diagnostic {
6572 severity: DiagnosticSeverity::ERROR,
6573 message: "undefined variable 'BB'".to_string(),
6574 is_disk_based: true,
6575 group_id: 5,
6576 is_primary: true,
6577 ..Default::default()
6578 },
6579 }
6580 ]
6581 );
6582 });
6583 }
6584
6585 #[gpui::test]
6586 async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
6587 cx.foreground().forbid_parking();
6588
6589 let text = concat!(
6590 "let one = ;\n", //
6591 "let two = \n",
6592 "let three = 3;\n",
6593 );
6594
6595 let fs = FakeFs::new(cx.background());
6596 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
6597
6598 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
6599 let buffer = project
6600 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
6601 .await
6602 .unwrap();
6603
6604 project.update(cx, |project, cx| {
6605 project
6606 .update_buffer_diagnostics(
6607 &buffer,
6608 vec![
6609 DiagnosticEntry {
6610 range: PointUtf16::new(0, 10)..PointUtf16::new(0, 10),
6611 diagnostic: Diagnostic {
6612 severity: DiagnosticSeverity::ERROR,
6613 message: "syntax error 1".to_string(),
6614 ..Default::default()
6615 },
6616 },
6617 DiagnosticEntry {
6618 range: PointUtf16::new(1, 10)..PointUtf16::new(1, 10),
6619 diagnostic: Diagnostic {
6620 severity: DiagnosticSeverity::ERROR,
6621 message: "syntax error 2".to_string(),
6622 ..Default::default()
6623 },
6624 },
6625 ],
6626 None,
6627 cx,
6628 )
6629 .unwrap();
6630 });
6631
6632 // An empty range is extended forward to include the following character.
6633 // At the end of a line, an empty range is extended backward to include
6634 // the preceding character.
6635 buffer.read_with(cx, |buffer, _| {
6636 let chunks = chunks_with_diagnostics(&buffer, 0..buffer.len());
6637 assert_eq!(
6638 chunks
6639 .iter()
6640 .map(|(s, d)| (s.as_str(), *d))
6641 .collect::<Vec<_>>(),
6642 &[
6643 ("let one = ", None),
6644 (";", Some(DiagnosticSeverity::ERROR)),
6645 ("\nlet two =", None),
6646 (" ", Some(DiagnosticSeverity::ERROR)),
6647 ("\nlet three = 3;\n", None)
6648 ]
6649 );
6650 });
6651 }
6652
6653 #[gpui::test]
6654 async fn test_edits_from_lsp_with_past_version(cx: &mut gpui::TestAppContext) {
6655 cx.foreground().forbid_parking();
6656
6657 let mut language = Language::new(
6658 LanguageConfig {
6659 name: "Rust".into(),
6660 path_suffixes: vec!["rs".to_string()],
6661 ..Default::default()
6662 },
6663 Some(tree_sitter_rust::language()),
6664 );
6665 let mut fake_servers = language.set_fake_lsp_adapter(Default::default());
6666
6667 let text = "
6668 fn a() {
6669 f1();
6670 }
6671 fn b() {
6672 f2();
6673 }
6674 fn c() {
6675 f3();
6676 }
6677 "
6678 .unindent();
6679
6680 let fs = FakeFs::new(cx.background());
6681 fs.insert_tree(
6682 "/dir",
6683 json!({
6684 "a.rs": text.clone(),
6685 }),
6686 )
6687 .await;
6688
6689 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
6690 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
6691 let buffer = project
6692 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
6693 .await
6694 .unwrap();
6695
6696 let mut fake_server = fake_servers.next().await.unwrap();
6697 let lsp_document_version = fake_server
6698 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6699 .await
6700 .text_document
6701 .version;
6702
6703 // Simulate editing the buffer after the language server computes some edits.
6704 buffer.update(cx, |buffer, cx| {
6705 buffer.edit(
6706 [(
6707 Point::new(0, 0)..Point::new(0, 0),
6708 "// above first function\n",
6709 )],
6710 cx,
6711 );
6712 buffer.edit(
6713 [(
6714 Point::new(2, 0)..Point::new(2, 0),
6715 " // inside first function\n",
6716 )],
6717 cx,
6718 );
6719 buffer.edit(
6720 [(
6721 Point::new(6, 4)..Point::new(6, 4),
6722 "// inside second function ",
6723 )],
6724 cx,
6725 );
6726
6727 assert_eq!(
6728 buffer.text(),
6729 "
6730 // above first function
6731 fn a() {
6732 // inside first function
6733 f1();
6734 }
6735 fn b() {
6736 // inside second function f2();
6737 }
6738 fn c() {
6739 f3();
6740 }
6741 "
6742 .unindent()
6743 );
6744 });
6745
6746 let edits = project
6747 .update(cx, |project, cx| {
6748 project.edits_from_lsp(
6749 &buffer,
6750 vec![
6751 // replace body of first function
6752 lsp::TextEdit {
6753 range: lsp::Range::new(
6754 lsp::Position::new(0, 0),
6755 lsp::Position::new(3, 0),
6756 ),
6757 new_text: "
6758 fn a() {
6759 f10();
6760 }
6761 "
6762 .unindent(),
6763 },
6764 // edit inside second function
6765 lsp::TextEdit {
6766 range: lsp::Range::new(
6767 lsp::Position::new(4, 6),
6768 lsp::Position::new(4, 6),
6769 ),
6770 new_text: "00".into(),
6771 },
6772 // edit inside third function via two distinct edits
6773 lsp::TextEdit {
6774 range: lsp::Range::new(
6775 lsp::Position::new(7, 5),
6776 lsp::Position::new(7, 5),
6777 ),
6778 new_text: "4000".into(),
6779 },
6780 lsp::TextEdit {
6781 range: lsp::Range::new(
6782 lsp::Position::new(7, 5),
6783 lsp::Position::new(7, 6),
6784 ),
6785 new_text: "".into(),
6786 },
6787 ],
6788 Some(lsp_document_version),
6789 cx,
6790 )
6791 })
6792 .await
6793 .unwrap();
6794
6795 buffer.update(cx, |buffer, cx| {
6796 for (range, new_text) in edits {
6797 buffer.edit([(range, new_text)], cx);
6798 }
6799 assert_eq!(
6800 buffer.text(),
6801 "
6802 // above first function
6803 fn a() {
6804 // inside first function
6805 f10();
6806 }
6807 fn b() {
6808 // inside second function f200();
6809 }
6810 fn c() {
6811 f4000();
6812 }
6813 "
6814 .unindent()
6815 );
6816 });
6817 }
6818
6819 #[gpui::test]
6820 async fn test_edits_from_lsp_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
6821 cx.foreground().forbid_parking();
6822
6823 let text = "
6824 use a::b;
6825 use a::c;
6826
6827 fn f() {
6828 b();
6829 c();
6830 }
6831 "
6832 .unindent();
6833
6834 let fs = FakeFs::new(cx.background());
6835 fs.insert_tree(
6836 "/dir",
6837 json!({
6838 "a.rs": text.clone(),
6839 }),
6840 )
6841 .await;
6842
6843 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
6844 let buffer = project
6845 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
6846 .await
6847 .unwrap();
6848
6849 // Simulate the language server sending us a small edit in the form of a very large diff.
6850 // Rust-analyzer does this when performing a merge-imports code action.
6851 let edits = project
6852 .update(cx, |project, cx| {
6853 project.edits_from_lsp(
6854 &buffer,
6855 [
6856 // Replace the first use statement without editing the semicolon.
6857 lsp::TextEdit {
6858 range: lsp::Range::new(
6859 lsp::Position::new(0, 4),
6860 lsp::Position::new(0, 8),
6861 ),
6862 new_text: "a::{b, c}".into(),
6863 },
6864 // Reinsert the remainder of the file between the semicolon and the final
6865 // newline of the file.
6866 lsp::TextEdit {
6867 range: lsp::Range::new(
6868 lsp::Position::new(0, 9),
6869 lsp::Position::new(0, 9),
6870 ),
6871 new_text: "\n\n".into(),
6872 },
6873 lsp::TextEdit {
6874 range: lsp::Range::new(
6875 lsp::Position::new(0, 9),
6876 lsp::Position::new(0, 9),
6877 ),
6878 new_text: "
6879 fn f() {
6880 b();
6881 c();
6882 }"
6883 .unindent(),
6884 },
6885 // Delete everything after the first newline of the file.
6886 lsp::TextEdit {
6887 range: lsp::Range::new(
6888 lsp::Position::new(1, 0),
6889 lsp::Position::new(7, 0),
6890 ),
6891 new_text: "".into(),
6892 },
6893 ],
6894 None,
6895 cx,
6896 )
6897 })
6898 .await
6899 .unwrap();
6900
6901 buffer.update(cx, |buffer, cx| {
6902 let edits = edits
6903 .into_iter()
6904 .map(|(range, text)| {
6905 (
6906 range.start.to_point(&buffer)..range.end.to_point(&buffer),
6907 text,
6908 )
6909 })
6910 .collect::<Vec<_>>();
6911
6912 assert_eq!(
6913 edits,
6914 [
6915 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
6916 (Point::new(1, 0)..Point::new(2, 0), "".into())
6917 ]
6918 );
6919
6920 for (range, new_text) in edits {
6921 buffer.edit([(range, new_text)], cx);
6922 }
6923 assert_eq!(
6924 buffer.text(),
6925 "
6926 use a::{b, c};
6927
6928 fn f() {
6929 b();
6930 c();
6931 }
6932 "
6933 .unindent()
6934 );
6935 });
6936 }
6937
6938 fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
6939 buffer: &Buffer,
6940 range: Range<T>,
6941 ) -> Vec<(String, Option<DiagnosticSeverity>)> {
6942 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
6943 for chunk in buffer.snapshot().chunks(range, true) {
6944 if chunks.last().map_or(false, |prev_chunk| {
6945 prev_chunk.1 == chunk.diagnostic_severity
6946 }) {
6947 chunks.last_mut().unwrap().0.push_str(chunk.text);
6948 } else {
6949 chunks.push((chunk.text.to_string(), chunk.diagnostic_severity));
6950 }
6951 }
6952 chunks
6953 }
6954
6955 #[gpui::test]
6956 async fn test_search_worktree_without_files(cx: &mut gpui::TestAppContext) {
6957 let dir = temp_tree(json!({
6958 "root": {
6959 "dir1": {},
6960 "dir2": {
6961 "dir3": {}
6962 }
6963 }
6964 }));
6965
6966 let project = Project::test(Arc::new(RealFs), [dir.path()], cx).await;
6967 let cancel_flag = Default::default();
6968 let results = project
6969 .read_with(cx, |project, cx| {
6970 project.match_paths("dir", false, false, 10, &cancel_flag, cx)
6971 })
6972 .await;
6973
6974 assert!(results.is_empty());
6975 }
6976
6977 #[gpui::test(iterations = 10)]
6978 async fn test_definition(cx: &mut gpui::TestAppContext) {
6979 let mut language = Language::new(
6980 LanguageConfig {
6981 name: "Rust".into(),
6982 path_suffixes: vec!["rs".to_string()],
6983 ..Default::default()
6984 },
6985 Some(tree_sitter_rust::language()),
6986 );
6987 let mut fake_servers = language.set_fake_lsp_adapter(Default::default());
6988
6989 let fs = FakeFs::new(cx.background());
6990 fs.insert_tree(
6991 "/dir",
6992 json!({
6993 "a.rs": "const fn a() { A }",
6994 "b.rs": "const y: i32 = crate::a()",
6995 }),
6996 )
6997 .await;
6998
6999 let project = Project::test(fs, ["/dir/b.rs".as_ref()], cx).await;
7000 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
7001
7002 let buffer = project
7003 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
7004 .await
7005 .unwrap();
7006
7007 let fake_server = fake_servers.next().await.unwrap();
7008 fake_server.handle_request::<lsp::request::GotoDefinition, _, _>(|params, _| async move {
7009 let params = params.text_document_position_params;
7010 assert_eq!(
7011 params.text_document.uri.to_file_path().unwrap(),
7012 Path::new("/dir/b.rs"),
7013 );
7014 assert_eq!(params.position, lsp::Position::new(0, 22));
7015
7016 Ok(Some(lsp::GotoDefinitionResponse::Scalar(
7017 lsp::Location::new(
7018 lsp::Url::from_file_path("/dir/a.rs").unwrap(),
7019 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
7020 ),
7021 )))
7022 });
7023
7024 let mut definitions = project
7025 .update(cx, |project, cx| project.definition(&buffer, 22, cx))
7026 .await
7027 .unwrap();
7028
7029 assert_eq!(definitions.len(), 1);
7030 let definition = definitions.pop().unwrap();
7031 cx.update(|cx| {
7032 let target_buffer = definition.buffer.read(cx);
7033 assert_eq!(
7034 target_buffer
7035 .file()
7036 .unwrap()
7037 .as_local()
7038 .unwrap()
7039 .abs_path(cx),
7040 Path::new("/dir/a.rs"),
7041 );
7042 assert_eq!(definition.range.to_offset(target_buffer), 9..10);
7043 assert_eq!(
7044 list_worktrees(&project, cx),
7045 [("/dir/b.rs".as_ref(), true), ("/dir/a.rs".as_ref(), false)]
7046 );
7047
7048 drop(definition);
7049 });
7050 cx.read(|cx| {
7051 assert_eq!(list_worktrees(&project, cx), [("/dir/b.rs".as_ref(), true)]);
7052 });
7053
7054 fn list_worktrees<'a>(
7055 project: &'a ModelHandle<Project>,
7056 cx: &'a AppContext,
7057 ) -> Vec<(&'a Path, bool)> {
7058 project
7059 .read(cx)
7060 .worktrees(cx)
7061 .map(|worktree| {
7062 let worktree = worktree.read(cx);
7063 (
7064 worktree.as_local().unwrap().abs_path().as_ref(),
7065 worktree.is_visible(),
7066 )
7067 })
7068 .collect::<Vec<_>>()
7069 }
7070 }
7071
7072 #[gpui::test]
7073 async fn test_completions_without_edit_ranges(cx: &mut gpui::TestAppContext) {
7074 let mut language = Language::new(
7075 LanguageConfig {
7076 name: "TypeScript".into(),
7077 path_suffixes: vec!["ts".to_string()],
7078 ..Default::default()
7079 },
7080 Some(tree_sitter_typescript::language_typescript()),
7081 );
7082 let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default());
7083
7084 let fs = FakeFs::new(cx.background());
7085 fs.insert_tree(
7086 "/dir",
7087 json!({
7088 "a.ts": "",
7089 }),
7090 )
7091 .await;
7092
7093 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
7094 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
7095 let buffer = project
7096 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
7097 .await
7098 .unwrap();
7099
7100 let fake_server = fake_language_servers.next().await.unwrap();
7101
7102 let text = "let a = b.fqn";
7103 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
7104 let completions = project.update(cx, |project, cx| {
7105 project.completions(&buffer, text.len(), cx)
7106 });
7107
7108 fake_server
7109 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
7110 Ok(Some(lsp::CompletionResponse::Array(vec![
7111 lsp::CompletionItem {
7112 label: "fullyQualifiedName?".into(),
7113 insert_text: Some("fullyQualifiedName".into()),
7114 ..Default::default()
7115 },
7116 ])))
7117 })
7118 .next()
7119 .await;
7120 let completions = completions.await.unwrap();
7121 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
7122 assert_eq!(completions.len(), 1);
7123 assert_eq!(completions[0].new_text, "fullyQualifiedName");
7124 assert_eq!(
7125 completions[0].old_range.to_offset(&snapshot),
7126 text.len() - 3..text.len()
7127 );
7128 }
7129
7130 #[gpui::test(iterations = 10)]
7131 async fn test_apply_code_actions_with_commands(cx: &mut gpui::TestAppContext) {
7132 let mut language = Language::new(
7133 LanguageConfig {
7134 name: "TypeScript".into(),
7135 path_suffixes: vec!["ts".to_string()],
7136 ..Default::default()
7137 },
7138 None,
7139 );
7140 let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default());
7141
7142 let fs = FakeFs::new(cx.background());
7143 fs.insert_tree(
7144 "/dir",
7145 json!({
7146 "a.ts": "a",
7147 }),
7148 )
7149 .await;
7150
7151 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
7152 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
7153 let buffer = project
7154 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
7155 .await
7156 .unwrap();
7157
7158 let fake_server = fake_language_servers.next().await.unwrap();
7159
7160 // Language server returns code actions that contain commands, and not edits.
7161 let actions = project.update(cx, |project, cx| project.code_actions(&buffer, 0..0, cx));
7162 fake_server
7163 .handle_request::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
7164 Ok(Some(vec![
7165 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
7166 title: "The code action".into(),
7167 command: Some(lsp::Command {
7168 title: "The command".into(),
7169 command: "_the/command".into(),
7170 arguments: Some(vec![json!("the-argument")]),
7171 }),
7172 ..Default::default()
7173 }),
7174 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
7175 title: "two".into(),
7176 ..Default::default()
7177 }),
7178 ]))
7179 })
7180 .next()
7181 .await;
7182
7183 let action = actions.await.unwrap()[0].clone();
7184 let apply = project.update(cx, |project, cx| {
7185 project.apply_code_action(buffer.clone(), action, true, cx)
7186 });
7187
7188 // Resolving the code action does not populate its edits. In absence of
7189 // edits, we must execute the given command.
7190 fake_server.handle_request::<lsp::request::CodeActionResolveRequest, _, _>(
7191 |action, _| async move { Ok(action) },
7192 );
7193
7194 // While executing the command, the language server sends the editor
7195 // a `workspaceEdit` request.
7196 fake_server
7197 .handle_request::<lsp::request::ExecuteCommand, _, _>({
7198 let fake = fake_server.clone();
7199 move |params, _| {
7200 assert_eq!(params.command, "_the/command");
7201 let fake = fake.clone();
7202 async move {
7203 fake.server
7204 .request::<lsp::request::ApplyWorkspaceEdit>(
7205 lsp::ApplyWorkspaceEditParams {
7206 label: None,
7207 edit: lsp::WorkspaceEdit {
7208 changes: Some(
7209 [(
7210 lsp::Url::from_file_path("/dir/a.ts").unwrap(),
7211 vec![lsp::TextEdit {
7212 range: lsp::Range::new(
7213 lsp::Position::new(0, 0),
7214 lsp::Position::new(0, 0),
7215 ),
7216 new_text: "X".into(),
7217 }],
7218 )]
7219 .into_iter()
7220 .collect(),
7221 ),
7222 ..Default::default()
7223 },
7224 },
7225 )
7226 .await
7227 .unwrap();
7228 Ok(Some(json!(null)))
7229 }
7230 }
7231 })
7232 .next()
7233 .await;
7234
7235 // Applying the code action returns a project transaction containing the edits
7236 // sent by the language server in its `workspaceEdit` request.
7237 let transaction = apply.await.unwrap();
7238 assert!(transaction.0.contains_key(&buffer));
7239 buffer.update(cx, |buffer, cx| {
7240 assert_eq!(buffer.text(), "Xa");
7241 buffer.undo(cx);
7242 assert_eq!(buffer.text(), "a");
7243 });
7244 }
7245
7246 #[gpui::test]
7247 async fn test_save_file(cx: &mut gpui::TestAppContext) {
7248 let fs = FakeFs::new(cx.background());
7249 fs.insert_tree(
7250 "/dir",
7251 json!({
7252 "file1": "the old contents",
7253 }),
7254 )
7255 .await;
7256
7257 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7258 let buffer = project
7259 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
7260 .await
7261 .unwrap();
7262 buffer
7263 .update(cx, |buffer, cx| {
7264 assert_eq!(buffer.text(), "the old contents");
7265 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], cx);
7266 buffer.save(cx)
7267 })
7268 .await
7269 .unwrap();
7270
7271 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
7272 assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
7273 }
7274
7275 #[gpui::test]
7276 async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
7277 let fs = FakeFs::new(cx.background());
7278 fs.insert_tree(
7279 "/dir",
7280 json!({
7281 "file1": "the old contents",
7282 }),
7283 )
7284 .await;
7285
7286 let project = Project::test(fs.clone(), ["/dir/file1".as_ref()], cx).await;
7287 let buffer = project
7288 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
7289 .await
7290 .unwrap();
7291 buffer
7292 .update(cx, |buffer, cx| {
7293 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], cx);
7294 buffer.save(cx)
7295 })
7296 .await
7297 .unwrap();
7298
7299 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
7300 assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
7301 }
7302
7303 #[gpui::test]
7304 async fn test_save_as(cx: &mut gpui::TestAppContext) {
7305 let fs = FakeFs::new(cx.background());
7306 fs.insert_tree("/dir", json!({})).await;
7307
7308 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7309 let buffer = project.update(cx, |project, cx| {
7310 project.create_buffer("", None, cx).unwrap()
7311 });
7312 buffer.update(cx, |buffer, cx| {
7313 buffer.edit([(0..0, "abc")], cx);
7314 assert!(buffer.is_dirty());
7315 assert!(!buffer.has_conflict());
7316 });
7317 project
7318 .update(cx, |project, cx| {
7319 project.save_buffer_as(buffer.clone(), "/dir/file1".into(), cx)
7320 })
7321 .await
7322 .unwrap();
7323 assert_eq!(fs.load(Path::new("/dir/file1")).await.unwrap(), "abc");
7324 buffer.read_with(cx, |buffer, cx| {
7325 assert_eq!(buffer.file().unwrap().full_path(cx), Path::new("dir/file1"));
7326 assert!(!buffer.is_dirty());
7327 assert!(!buffer.has_conflict());
7328 });
7329
7330 let opened_buffer = project
7331 .update(cx, |project, cx| {
7332 project.open_local_buffer("/dir/file1", cx)
7333 })
7334 .await
7335 .unwrap();
7336 assert_eq!(opened_buffer, buffer);
7337 }
7338
7339 #[gpui::test(retries = 5)]
7340 async fn test_rescan_and_remote_updates(cx: &mut gpui::TestAppContext) {
7341 let dir = temp_tree(json!({
7342 "a": {
7343 "file1": "",
7344 "file2": "",
7345 "file3": "",
7346 },
7347 "b": {
7348 "c": {
7349 "file4": "",
7350 "file5": "",
7351 }
7352 }
7353 }));
7354
7355 let project = Project::test(Arc::new(RealFs), [dir.path()], cx).await;
7356 let rpc = project.read_with(cx, |p, _| p.client.clone());
7357
7358 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
7359 let buffer = project.update(cx, |p, cx| p.open_local_buffer(dir.path().join(path), cx));
7360 async move { buffer.await.unwrap() }
7361 };
7362 let id_for_path = |path: &'static str, cx: &gpui::TestAppContext| {
7363 project.read_with(cx, |project, cx| {
7364 let tree = project.worktrees(cx).next().unwrap();
7365 tree.read(cx)
7366 .entry_for_path(path)
7367 .expect(&format!("no entry for path {}", path))
7368 .id
7369 })
7370 };
7371
7372 let buffer2 = buffer_for_path("a/file2", cx).await;
7373 let buffer3 = buffer_for_path("a/file3", cx).await;
7374 let buffer4 = buffer_for_path("b/c/file4", cx).await;
7375 let buffer5 = buffer_for_path("b/c/file5", cx).await;
7376
7377 let file2_id = id_for_path("a/file2", &cx);
7378 let file3_id = id_for_path("a/file3", &cx);
7379 let file4_id = id_for_path("b/c/file4", &cx);
7380
7381 // Create a remote copy of this worktree.
7382 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
7383 let initial_snapshot = tree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot());
7384 let (remote, load_task) = cx.update(|cx| {
7385 Worktree::remote(
7386 1,
7387 1,
7388 initial_snapshot.to_proto(&Default::default(), true),
7389 rpc.clone(),
7390 cx,
7391 )
7392 });
7393 // tree
7394 load_task.await;
7395
7396 cx.read(|cx| {
7397 assert!(!buffer2.read(cx).is_dirty());
7398 assert!(!buffer3.read(cx).is_dirty());
7399 assert!(!buffer4.read(cx).is_dirty());
7400 assert!(!buffer5.read(cx).is_dirty());
7401 });
7402
7403 // Rename and delete files and directories.
7404 tree.flush_fs_events(&cx).await;
7405 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
7406 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
7407 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
7408 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
7409 tree.flush_fs_events(&cx).await;
7410
7411 let expected_paths = vec![
7412 "a",
7413 "a/file1",
7414 "a/file2.new",
7415 "b",
7416 "d",
7417 "d/file3",
7418 "d/file4",
7419 ];
7420
7421 cx.read(|app| {
7422 assert_eq!(
7423 tree.read(app)
7424 .paths()
7425 .map(|p| p.to_str().unwrap())
7426 .collect::<Vec<_>>(),
7427 expected_paths
7428 );
7429
7430 assert_eq!(id_for_path("a/file2.new", &cx), file2_id);
7431 assert_eq!(id_for_path("d/file3", &cx), file3_id);
7432 assert_eq!(id_for_path("d/file4", &cx), file4_id);
7433
7434 assert_eq!(
7435 buffer2.read(app).file().unwrap().path().as_ref(),
7436 Path::new("a/file2.new")
7437 );
7438 assert_eq!(
7439 buffer3.read(app).file().unwrap().path().as_ref(),
7440 Path::new("d/file3")
7441 );
7442 assert_eq!(
7443 buffer4.read(app).file().unwrap().path().as_ref(),
7444 Path::new("d/file4")
7445 );
7446 assert_eq!(
7447 buffer5.read(app).file().unwrap().path().as_ref(),
7448 Path::new("b/c/file5")
7449 );
7450
7451 assert!(!buffer2.read(app).file().unwrap().is_deleted());
7452 assert!(!buffer3.read(app).file().unwrap().is_deleted());
7453 assert!(!buffer4.read(app).file().unwrap().is_deleted());
7454 assert!(buffer5.read(app).file().unwrap().is_deleted());
7455 });
7456
7457 // Update the remote worktree. Check that it becomes consistent with the
7458 // local worktree.
7459 remote.update(cx, |remote, cx| {
7460 let update_message = tree.read(cx).as_local().unwrap().snapshot().build_update(
7461 &initial_snapshot,
7462 1,
7463 1,
7464 true,
7465 );
7466 remote
7467 .as_remote_mut()
7468 .unwrap()
7469 .snapshot
7470 .apply_remote_update(update_message)
7471 .unwrap();
7472
7473 assert_eq!(
7474 remote
7475 .paths()
7476 .map(|p| p.to_str().unwrap())
7477 .collect::<Vec<_>>(),
7478 expected_paths
7479 );
7480 });
7481 }
7482
7483 #[gpui::test]
7484 async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
7485 let fs = FakeFs::new(cx.background());
7486 fs.insert_tree(
7487 "/dir",
7488 json!({
7489 "a.txt": "a-contents",
7490 "b.txt": "b-contents",
7491 }),
7492 )
7493 .await;
7494
7495 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7496
7497 // Spawn multiple tasks to open paths, repeating some paths.
7498 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
7499 (
7500 p.open_local_buffer("/dir/a.txt", cx),
7501 p.open_local_buffer("/dir/b.txt", cx),
7502 p.open_local_buffer("/dir/a.txt", cx),
7503 )
7504 });
7505
7506 let buffer_a_1 = buffer_a_1.await.unwrap();
7507 let buffer_a_2 = buffer_a_2.await.unwrap();
7508 let buffer_b = buffer_b.await.unwrap();
7509 assert_eq!(buffer_a_1.read_with(cx, |b, _| b.text()), "a-contents");
7510 assert_eq!(buffer_b.read_with(cx, |b, _| b.text()), "b-contents");
7511
7512 // There is only one buffer per path.
7513 let buffer_a_id = buffer_a_1.id();
7514 assert_eq!(buffer_a_2.id(), buffer_a_id);
7515
7516 // Open the same path again while it is still open.
7517 drop(buffer_a_1);
7518 let buffer_a_3 = project
7519 .update(cx, |p, cx| p.open_local_buffer("/dir/a.txt", cx))
7520 .await
7521 .unwrap();
7522
7523 // There's still only one buffer per path.
7524 assert_eq!(buffer_a_3.id(), buffer_a_id);
7525 }
7526
7527 #[gpui::test]
7528 async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
7529 let fs = FakeFs::new(cx.background());
7530 fs.insert_tree(
7531 "/dir",
7532 json!({
7533 "file1": "abc",
7534 "file2": "def",
7535 "file3": "ghi",
7536 }),
7537 )
7538 .await;
7539
7540 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7541
7542 let buffer1 = project
7543 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
7544 .await
7545 .unwrap();
7546 let events = Rc::new(RefCell::new(Vec::new()));
7547
7548 // initially, the buffer isn't dirty.
7549 buffer1.update(cx, |buffer, cx| {
7550 cx.subscribe(&buffer1, {
7551 let events = events.clone();
7552 move |_, _, event, _| match event {
7553 BufferEvent::Operation(_) => {}
7554 _ => events.borrow_mut().push(event.clone()),
7555 }
7556 })
7557 .detach();
7558
7559 assert!(!buffer.is_dirty());
7560 assert!(events.borrow().is_empty());
7561
7562 buffer.edit([(1..2, "")], cx);
7563 });
7564
7565 // after the first edit, the buffer is dirty, and emits a dirtied event.
7566 buffer1.update(cx, |buffer, cx| {
7567 assert!(buffer.text() == "ac");
7568 assert!(buffer.is_dirty());
7569 assert_eq!(
7570 *events.borrow(),
7571 &[language::Event::Edited, language::Event::Dirtied]
7572 );
7573 events.borrow_mut().clear();
7574 buffer.did_save(buffer.version(), buffer.file().unwrap().mtime(), None, cx);
7575 });
7576
7577 // after saving, the buffer is not dirty, and emits a saved event.
7578 buffer1.update(cx, |buffer, cx| {
7579 assert!(!buffer.is_dirty());
7580 assert_eq!(*events.borrow(), &[language::Event::Saved]);
7581 events.borrow_mut().clear();
7582
7583 buffer.edit([(1..1, "B")], cx);
7584 buffer.edit([(2..2, "D")], cx);
7585 });
7586
7587 // after editing again, the buffer is dirty, and emits another dirty event.
7588 buffer1.update(cx, |buffer, cx| {
7589 assert!(buffer.text() == "aBDc");
7590 assert!(buffer.is_dirty());
7591 assert_eq!(
7592 *events.borrow(),
7593 &[
7594 language::Event::Edited,
7595 language::Event::Dirtied,
7596 language::Event::Edited,
7597 ],
7598 );
7599 events.borrow_mut().clear();
7600
7601 // TODO - currently, after restoring the buffer to its
7602 // previously-saved state, the is still considered dirty.
7603 buffer.edit([(1..3, "")], cx);
7604 assert!(buffer.text() == "ac");
7605 assert!(buffer.is_dirty());
7606 });
7607
7608 assert_eq!(*events.borrow(), &[language::Event::Edited]);
7609
7610 // When a file is deleted, the buffer is considered dirty.
7611 let events = Rc::new(RefCell::new(Vec::new()));
7612 let buffer2 = project
7613 .update(cx, |p, cx| p.open_local_buffer("/dir/file2", cx))
7614 .await
7615 .unwrap();
7616 buffer2.update(cx, |_, cx| {
7617 cx.subscribe(&buffer2, {
7618 let events = events.clone();
7619 move |_, _, event, _| events.borrow_mut().push(event.clone())
7620 })
7621 .detach();
7622 });
7623
7624 fs.remove_file("/dir/file2".as_ref(), Default::default())
7625 .await
7626 .unwrap();
7627 buffer2.condition(&cx, |b, _| b.is_dirty()).await;
7628 assert_eq!(
7629 *events.borrow(),
7630 &[language::Event::Dirtied, language::Event::FileHandleChanged]
7631 );
7632
7633 // When a file is already dirty when deleted, we don't emit a Dirtied event.
7634 let events = Rc::new(RefCell::new(Vec::new()));
7635 let buffer3 = project
7636 .update(cx, |p, cx| p.open_local_buffer("/dir/file3", cx))
7637 .await
7638 .unwrap();
7639 buffer3.update(cx, |_, cx| {
7640 cx.subscribe(&buffer3, {
7641 let events = events.clone();
7642 move |_, _, event, _| events.borrow_mut().push(event.clone())
7643 })
7644 .detach();
7645 });
7646
7647 buffer3.update(cx, |buffer, cx| {
7648 buffer.edit([(0..0, "x")], cx);
7649 });
7650 events.borrow_mut().clear();
7651 fs.remove_file("/dir/file3".as_ref(), Default::default())
7652 .await
7653 .unwrap();
7654 buffer3
7655 .condition(&cx, |_, _| !events.borrow().is_empty())
7656 .await;
7657 assert_eq!(*events.borrow(), &[language::Event::FileHandleChanged]);
7658 cx.read(|cx| assert!(buffer3.read(cx).is_dirty()));
7659 }
7660
7661 #[gpui::test]
7662 async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
7663 let initial_contents = "aaa\nbbbbb\nc\n";
7664 let fs = FakeFs::new(cx.background());
7665 fs.insert_tree(
7666 "/dir",
7667 json!({
7668 "the-file": initial_contents,
7669 }),
7670 )
7671 .await;
7672 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7673 let buffer = project
7674 .update(cx, |p, cx| p.open_local_buffer("/dir/the-file", cx))
7675 .await
7676 .unwrap();
7677
7678 let anchors = (0..3)
7679 .map(|row| buffer.read_with(cx, |b, _| b.anchor_before(Point::new(row, 1))))
7680 .collect::<Vec<_>>();
7681
7682 // Change the file on disk, adding two new lines of text, and removing
7683 // one line.
7684 buffer.read_with(cx, |buffer, _| {
7685 assert!(!buffer.is_dirty());
7686 assert!(!buffer.has_conflict());
7687 });
7688 let new_contents = "AAAA\naaa\nBB\nbbbbb\n";
7689 fs.save("/dir/the-file".as_ref(), &new_contents.into())
7690 .await
7691 .unwrap();
7692
7693 // Because the buffer was not modified, it is reloaded from disk. Its
7694 // contents are edited according to the diff between the old and new
7695 // file contents.
7696 buffer
7697 .condition(&cx, |buffer, _| buffer.text() == new_contents)
7698 .await;
7699
7700 buffer.update(cx, |buffer, _| {
7701 assert_eq!(buffer.text(), new_contents);
7702 assert!(!buffer.is_dirty());
7703 assert!(!buffer.has_conflict());
7704
7705 let anchor_positions = anchors
7706 .iter()
7707 .map(|anchor| anchor.to_point(&*buffer))
7708 .collect::<Vec<_>>();
7709 assert_eq!(
7710 anchor_positions,
7711 [Point::new(1, 1), Point::new(3, 1), Point::new(4, 0)]
7712 );
7713 });
7714
7715 // Modify the buffer
7716 buffer.update(cx, |buffer, cx| {
7717 buffer.edit([(0..0, " ")], cx);
7718 assert!(buffer.is_dirty());
7719 assert!(!buffer.has_conflict());
7720 });
7721
7722 // Change the file on disk again, adding blank lines to the beginning.
7723 fs.save(
7724 "/dir/the-file".as_ref(),
7725 &"\n\n\nAAAA\naaa\nBB\nbbbbb\n".into(),
7726 )
7727 .await
7728 .unwrap();
7729
7730 // Because the buffer is modified, it doesn't reload from disk, but is
7731 // marked as having a conflict.
7732 buffer
7733 .condition(&cx, |buffer, _| buffer.has_conflict())
7734 .await;
7735 }
7736
7737 #[gpui::test]
7738 async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
7739 cx.foreground().forbid_parking();
7740
7741 let fs = FakeFs::new(cx.background());
7742 fs.insert_tree(
7743 "/the-dir",
7744 json!({
7745 "a.rs": "
7746 fn foo(mut v: Vec<usize>) {
7747 for x in &v {
7748 v.push(1);
7749 }
7750 }
7751 "
7752 .unindent(),
7753 }),
7754 )
7755 .await;
7756
7757 let project = Project::test(fs.clone(), ["/the-dir".as_ref()], cx).await;
7758 let buffer = project
7759 .update(cx, |p, cx| p.open_local_buffer("/the-dir/a.rs", cx))
7760 .await
7761 .unwrap();
7762
7763 let buffer_uri = Url::from_file_path("/the-dir/a.rs").unwrap();
7764 let message = lsp::PublishDiagnosticsParams {
7765 uri: buffer_uri.clone(),
7766 diagnostics: vec![
7767 lsp::Diagnostic {
7768 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
7769 severity: Some(DiagnosticSeverity::WARNING),
7770 message: "error 1".to_string(),
7771 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
7772 location: lsp::Location {
7773 uri: buffer_uri.clone(),
7774 range: lsp::Range::new(
7775 lsp::Position::new(1, 8),
7776 lsp::Position::new(1, 9),
7777 ),
7778 },
7779 message: "error 1 hint 1".to_string(),
7780 }]),
7781 ..Default::default()
7782 },
7783 lsp::Diagnostic {
7784 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
7785 severity: Some(DiagnosticSeverity::HINT),
7786 message: "error 1 hint 1".to_string(),
7787 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
7788 location: lsp::Location {
7789 uri: buffer_uri.clone(),
7790 range: lsp::Range::new(
7791 lsp::Position::new(1, 8),
7792 lsp::Position::new(1, 9),
7793 ),
7794 },
7795 message: "original diagnostic".to_string(),
7796 }]),
7797 ..Default::default()
7798 },
7799 lsp::Diagnostic {
7800 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
7801 severity: Some(DiagnosticSeverity::ERROR),
7802 message: "error 2".to_string(),
7803 related_information: Some(vec![
7804 lsp::DiagnosticRelatedInformation {
7805 location: lsp::Location {
7806 uri: buffer_uri.clone(),
7807 range: lsp::Range::new(
7808 lsp::Position::new(1, 13),
7809 lsp::Position::new(1, 15),
7810 ),
7811 },
7812 message: "error 2 hint 1".to_string(),
7813 },
7814 lsp::DiagnosticRelatedInformation {
7815 location: lsp::Location {
7816 uri: buffer_uri.clone(),
7817 range: lsp::Range::new(
7818 lsp::Position::new(1, 13),
7819 lsp::Position::new(1, 15),
7820 ),
7821 },
7822 message: "error 2 hint 2".to_string(),
7823 },
7824 ]),
7825 ..Default::default()
7826 },
7827 lsp::Diagnostic {
7828 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
7829 severity: Some(DiagnosticSeverity::HINT),
7830 message: "error 2 hint 1".to_string(),
7831 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
7832 location: lsp::Location {
7833 uri: buffer_uri.clone(),
7834 range: lsp::Range::new(
7835 lsp::Position::new(2, 8),
7836 lsp::Position::new(2, 17),
7837 ),
7838 },
7839 message: "original diagnostic".to_string(),
7840 }]),
7841 ..Default::default()
7842 },
7843 lsp::Diagnostic {
7844 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
7845 severity: Some(DiagnosticSeverity::HINT),
7846 message: "error 2 hint 2".to_string(),
7847 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
7848 location: lsp::Location {
7849 uri: buffer_uri.clone(),
7850 range: lsp::Range::new(
7851 lsp::Position::new(2, 8),
7852 lsp::Position::new(2, 17),
7853 ),
7854 },
7855 message: "original diagnostic".to_string(),
7856 }]),
7857 ..Default::default()
7858 },
7859 ],
7860 version: None,
7861 };
7862
7863 project
7864 .update(cx, |p, cx| p.update_diagnostics(message, &[], cx))
7865 .unwrap();
7866 let buffer = buffer.read_with(cx, |buffer, _| buffer.snapshot());
7867
7868 assert_eq!(
7869 buffer
7870 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
7871 .collect::<Vec<_>>(),
7872 &[
7873 DiagnosticEntry {
7874 range: Point::new(1, 8)..Point::new(1, 9),
7875 diagnostic: Diagnostic {
7876 severity: DiagnosticSeverity::WARNING,
7877 message: "error 1".to_string(),
7878 group_id: 0,
7879 is_primary: true,
7880 ..Default::default()
7881 }
7882 },
7883 DiagnosticEntry {
7884 range: Point::new(1, 8)..Point::new(1, 9),
7885 diagnostic: Diagnostic {
7886 severity: DiagnosticSeverity::HINT,
7887 message: "error 1 hint 1".to_string(),
7888 group_id: 0,
7889 is_primary: false,
7890 ..Default::default()
7891 }
7892 },
7893 DiagnosticEntry {
7894 range: Point::new(1, 13)..Point::new(1, 15),
7895 diagnostic: Diagnostic {
7896 severity: DiagnosticSeverity::HINT,
7897 message: "error 2 hint 1".to_string(),
7898 group_id: 1,
7899 is_primary: false,
7900 ..Default::default()
7901 }
7902 },
7903 DiagnosticEntry {
7904 range: Point::new(1, 13)..Point::new(1, 15),
7905 diagnostic: Diagnostic {
7906 severity: DiagnosticSeverity::HINT,
7907 message: "error 2 hint 2".to_string(),
7908 group_id: 1,
7909 is_primary: false,
7910 ..Default::default()
7911 }
7912 },
7913 DiagnosticEntry {
7914 range: Point::new(2, 8)..Point::new(2, 17),
7915 diagnostic: Diagnostic {
7916 severity: DiagnosticSeverity::ERROR,
7917 message: "error 2".to_string(),
7918 group_id: 1,
7919 is_primary: true,
7920 ..Default::default()
7921 }
7922 }
7923 ]
7924 );
7925
7926 assert_eq!(
7927 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
7928 &[
7929 DiagnosticEntry {
7930 range: Point::new(1, 8)..Point::new(1, 9),
7931 diagnostic: Diagnostic {
7932 severity: DiagnosticSeverity::WARNING,
7933 message: "error 1".to_string(),
7934 group_id: 0,
7935 is_primary: true,
7936 ..Default::default()
7937 }
7938 },
7939 DiagnosticEntry {
7940 range: Point::new(1, 8)..Point::new(1, 9),
7941 diagnostic: Diagnostic {
7942 severity: DiagnosticSeverity::HINT,
7943 message: "error 1 hint 1".to_string(),
7944 group_id: 0,
7945 is_primary: false,
7946 ..Default::default()
7947 }
7948 },
7949 ]
7950 );
7951 assert_eq!(
7952 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
7953 &[
7954 DiagnosticEntry {
7955 range: Point::new(1, 13)..Point::new(1, 15),
7956 diagnostic: Diagnostic {
7957 severity: DiagnosticSeverity::HINT,
7958 message: "error 2 hint 1".to_string(),
7959 group_id: 1,
7960 is_primary: false,
7961 ..Default::default()
7962 }
7963 },
7964 DiagnosticEntry {
7965 range: Point::new(1, 13)..Point::new(1, 15),
7966 diagnostic: Diagnostic {
7967 severity: DiagnosticSeverity::HINT,
7968 message: "error 2 hint 2".to_string(),
7969 group_id: 1,
7970 is_primary: false,
7971 ..Default::default()
7972 }
7973 },
7974 DiagnosticEntry {
7975 range: Point::new(2, 8)..Point::new(2, 17),
7976 diagnostic: Diagnostic {
7977 severity: DiagnosticSeverity::ERROR,
7978 message: "error 2".to_string(),
7979 group_id: 1,
7980 is_primary: true,
7981 ..Default::default()
7982 }
7983 }
7984 ]
7985 );
7986 }
7987
7988 #[gpui::test]
7989 async fn test_rename(cx: &mut gpui::TestAppContext) {
7990 cx.foreground().forbid_parking();
7991
7992 let mut language = Language::new(
7993 LanguageConfig {
7994 name: "Rust".into(),
7995 path_suffixes: vec!["rs".to_string()],
7996 ..Default::default()
7997 },
7998 Some(tree_sitter_rust::language()),
7999 );
8000 let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
8001 capabilities: lsp::ServerCapabilities {
8002 rename_provider: Some(lsp::OneOf::Right(lsp::RenameOptions {
8003 prepare_provider: Some(true),
8004 work_done_progress_options: Default::default(),
8005 })),
8006 ..Default::default()
8007 },
8008 ..Default::default()
8009 });
8010
8011 let fs = FakeFs::new(cx.background());
8012 fs.insert_tree(
8013 "/dir",
8014 json!({
8015 "one.rs": "const ONE: usize = 1;",
8016 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
8017 }),
8018 )
8019 .await;
8020
8021 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
8022 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
8023 let buffer = project
8024 .update(cx, |project, cx| {
8025 project.open_local_buffer("/dir/one.rs", cx)
8026 })
8027 .await
8028 .unwrap();
8029
8030 let fake_server = fake_servers.next().await.unwrap();
8031
8032 let response = project.update(cx, |project, cx| {
8033 project.prepare_rename(buffer.clone(), 7, cx)
8034 });
8035 fake_server
8036 .handle_request::<lsp::request::PrepareRenameRequest, _, _>(|params, _| async move {
8037 assert_eq!(params.text_document.uri.as_str(), "file:///dir/one.rs");
8038 assert_eq!(params.position, lsp::Position::new(0, 7));
8039 Ok(Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
8040 lsp::Position::new(0, 6),
8041 lsp::Position::new(0, 9),
8042 ))))
8043 })
8044 .next()
8045 .await
8046 .unwrap();
8047 let range = response.await.unwrap().unwrap();
8048 let range = buffer.read_with(cx, |buffer, _| range.to_offset(buffer));
8049 assert_eq!(range, 6..9);
8050
8051 let response = project.update(cx, |project, cx| {
8052 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), true, cx)
8053 });
8054 fake_server
8055 .handle_request::<lsp::request::Rename, _, _>(|params, _| async move {
8056 assert_eq!(
8057 params.text_document_position.text_document.uri.as_str(),
8058 "file:///dir/one.rs"
8059 );
8060 assert_eq!(
8061 params.text_document_position.position,
8062 lsp::Position::new(0, 7)
8063 );
8064 assert_eq!(params.new_name, "THREE");
8065 Ok(Some(lsp::WorkspaceEdit {
8066 changes: Some(
8067 [
8068 (
8069 lsp::Url::from_file_path("/dir/one.rs").unwrap(),
8070 vec![lsp::TextEdit::new(
8071 lsp::Range::new(
8072 lsp::Position::new(0, 6),
8073 lsp::Position::new(0, 9),
8074 ),
8075 "THREE".to_string(),
8076 )],
8077 ),
8078 (
8079 lsp::Url::from_file_path("/dir/two.rs").unwrap(),
8080 vec![
8081 lsp::TextEdit::new(
8082 lsp::Range::new(
8083 lsp::Position::new(0, 24),
8084 lsp::Position::new(0, 27),
8085 ),
8086 "THREE".to_string(),
8087 ),
8088 lsp::TextEdit::new(
8089 lsp::Range::new(
8090 lsp::Position::new(0, 35),
8091 lsp::Position::new(0, 38),
8092 ),
8093 "THREE".to_string(),
8094 ),
8095 ],
8096 ),
8097 ]
8098 .into_iter()
8099 .collect(),
8100 ),
8101 ..Default::default()
8102 }))
8103 })
8104 .next()
8105 .await
8106 .unwrap();
8107 let mut transaction = response.await.unwrap().0;
8108 assert_eq!(transaction.len(), 2);
8109 assert_eq!(
8110 transaction
8111 .remove_entry(&buffer)
8112 .unwrap()
8113 .0
8114 .read_with(cx, |buffer, _| buffer.text()),
8115 "const THREE: usize = 1;"
8116 );
8117 assert_eq!(
8118 transaction
8119 .into_keys()
8120 .next()
8121 .unwrap()
8122 .read_with(cx, |buffer, _| buffer.text()),
8123 "const TWO: usize = one::THREE + one::THREE;"
8124 );
8125 }
8126
8127 #[gpui::test]
8128 async fn test_search(cx: &mut gpui::TestAppContext) {
8129 let fs = FakeFs::new(cx.background());
8130 fs.insert_tree(
8131 "/dir",
8132 json!({
8133 "one.rs": "const ONE: usize = 1;",
8134 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
8135 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
8136 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
8137 }),
8138 )
8139 .await;
8140 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
8141 assert_eq!(
8142 search(&project, SearchQuery::text("TWO", false, true), cx)
8143 .await
8144 .unwrap(),
8145 HashMap::from_iter([
8146 ("two.rs".to_string(), vec![6..9]),
8147 ("three.rs".to_string(), vec![37..40])
8148 ])
8149 );
8150
8151 let buffer_4 = project
8152 .update(cx, |project, cx| {
8153 project.open_local_buffer("/dir/four.rs", cx)
8154 })
8155 .await
8156 .unwrap();
8157 buffer_4.update(cx, |buffer, cx| {
8158 let text = "two::TWO";
8159 buffer.edit([(20..28, text), (31..43, text)], cx);
8160 });
8161
8162 assert_eq!(
8163 search(&project, SearchQuery::text("TWO", false, true), cx)
8164 .await
8165 .unwrap(),
8166 HashMap::from_iter([
8167 ("two.rs".to_string(), vec![6..9]),
8168 ("three.rs".to_string(), vec![37..40]),
8169 ("four.rs".to_string(), vec![25..28, 36..39])
8170 ])
8171 );
8172
8173 async fn search(
8174 project: &ModelHandle<Project>,
8175 query: SearchQuery,
8176 cx: &mut gpui::TestAppContext,
8177 ) -> Result<HashMap<String, Vec<Range<usize>>>> {
8178 let results = project
8179 .update(cx, |project, cx| project.search(query, cx))
8180 .await?;
8181
8182 Ok(results
8183 .into_iter()
8184 .map(|(buffer, ranges)| {
8185 buffer.read_with(cx, |buffer, _| {
8186 let path = buffer.file().unwrap().path().to_string_lossy().to_string();
8187 let ranges = ranges
8188 .into_iter()
8189 .map(|range| range.to_offset(buffer))
8190 .collect::<Vec<_>>();
8191 (path, ranges)
8192 })
8193 })
8194 .collect())
8195 }
8196 }
8197}