1mod db;
2pub mod fs;
3mod ignore;
4mod lsp_command;
5pub mod search;
6pub mod worktree;
7
8use anyhow::{anyhow, Context, Result};
9use client::{proto, Client, PeerId, TypedEnvelope, User, UserStore};
10use clock::ReplicaId;
11use collections::{hash_map, BTreeMap, HashMap, HashSet};
12use futures::{future::Shared, Future, FutureExt, StreamExt, TryFutureExt};
13use fuzzy::{PathMatch, PathMatchCandidate, PathMatchCandidateSet};
14use gpui::{
15 AnyModelHandle, AppContext, AsyncAppContext, Entity, ModelContext, ModelHandle,
16 MutableAppContext, Task, UpgradeModelHandle, WeakModelHandle,
17};
18use language::{
19 point_to_lsp,
20 proto::{deserialize_anchor, deserialize_version, serialize_anchor, serialize_version},
21 range_from_lsp, range_to_lsp, Anchor, Bias, Buffer, CodeAction, CodeLabel, Completion,
22 Diagnostic, DiagnosticEntry, DiagnosticSet, Event as BufferEvent, File as _, Language,
23 LanguageRegistry, LanguageServerName, LocalFile, LspAdapter, OffsetRangeExt, Operation, Patch,
24 PointUtf16, TextBufferSnapshot, ToOffset, ToPointUtf16, Transaction,
25};
26use lsp::{DiagnosticSeverity, DiagnosticTag, DocumentHighlightKind, LanguageServer};
27use lsp_command::*;
28use parking_lot::Mutex;
29use postage::stream::Stream;
30use postage::watch;
31use rand::prelude::*;
32use search::SearchQuery;
33use serde::Serialize;
34use settings::Settings;
35use sha2::{Digest, Sha256};
36use similar::{ChangeTag, TextDiff};
37use std::{
38 cell::RefCell,
39 cmp::{self, Ordering},
40 convert::TryInto,
41 ffi::OsString,
42 hash::Hash,
43 mem,
44 ops::Range,
45 os::unix::{ffi::OsStrExt, prelude::OsStringExt},
46 path::{Component, Path, PathBuf},
47 rc::Rc,
48 sync::{
49 atomic::{AtomicBool, AtomicUsize, Ordering::SeqCst},
50 Arc,
51 },
52 time::Instant,
53};
54use thiserror::Error;
55use util::{post_inc, ResultExt, TryFutureExt as _};
56
57pub use db::Db;
58pub use fs::*;
59pub use worktree::*;
60
61pub trait Item: Entity {
62 fn entry_id(&self, cx: &AppContext) -> Option<ProjectEntryId>;
63}
64
65pub struct ProjectStore {
66 db: Arc<Db>,
67 projects: Vec<WeakModelHandle<Project>>,
68}
69
70pub struct Project {
71 worktrees: Vec<WorktreeHandle>,
72 active_entry: Option<ProjectEntryId>,
73 languages: Arc<LanguageRegistry>,
74 language_servers:
75 HashMap<(WorktreeId, LanguageServerName), (Arc<dyn LspAdapter>, Arc<LanguageServer>)>,
76 started_language_servers:
77 HashMap<(WorktreeId, LanguageServerName), Task<Option<Arc<LanguageServer>>>>,
78 language_server_statuses: BTreeMap<usize, LanguageServerStatus>,
79 language_server_settings: Arc<Mutex<serde_json::Value>>,
80 last_workspace_edits_by_language_server: HashMap<usize, ProjectTransaction>,
81 next_language_server_id: usize,
82 client: Arc<client::Client>,
83 next_entry_id: Arc<AtomicUsize>,
84 next_diagnostic_group_id: usize,
85 user_store: ModelHandle<UserStore>,
86 project_store: ModelHandle<ProjectStore>,
87 fs: Arc<dyn Fs>,
88 client_state: ProjectClientState,
89 collaborators: HashMap<PeerId, Collaborator>,
90 subscriptions: Vec<client::Subscription>,
91 opened_buffer: (Rc<RefCell<watch::Sender<()>>>, watch::Receiver<()>),
92 shared_buffers: HashMap<PeerId, HashSet<u64>>,
93 loading_buffers: HashMap<
94 ProjectPath,
95 postage::watch::Receiver<Option<Result<ModelHandle<Buffer>, Arc<anyhow::Error>>>>,
96 >,
97 loading_local_worktrees:
98 HashMap<Arc<Path>, Shared<Task<Result<ModelHandle<Worktree>, Arc<anyhow::Error>>>>>,
99 opened_buffers: HashMap<u64, OpenBuffer>,
100 buffer_snapshots: HashMap<u64, Vec<(i32, TextBufferSnapshot)>>,
101 nonce: u128,
102 initialized_persistent_state: bool,
103}
104
105#[derive(Error, Debug)]
106pub enum JoinProjectError {
107 #[error("host declined join request")]
108 HostDeclined,
109 #[error("host closed the project")]
110 HostClosedProject,
111 #[error("host went offline")]
112 HostWentOffline,
113 #[error("{0}")]
114 Other(#[from] anyhow::Error),
115}
116
117enum OpenBuffer {
118 Strong(ModelHandle<Buffer>),
119 Weak(WeakModelHandle<Buffer>),
120 Loading(Vec<Operation>),
121}
122
123enum WorktreeHandle {
124 Strong(ModelHandle<Worktree>),
125 Weak(WeakModelHandle<Worktree>),
126}
127
128enum ProjectClientState {
129 Local {
130 is_shared: bool,
131 remote_id_tx: watch::Sender<Option<u64>>,
132 remote_id_rx: watch::Receiver<Option<u64>>,
133 online_tx: watch::Sender<bool>,
134 online_rx: watch::Receiver<bool>,
135 _maintain_remote_id_task: Task<Option<()>>,
136 },
137 Remote {
138 sharing_has_stopped: bool,
139 remote_id: u64,
140 replica_id: ReplicaId,
141 _detect_unshare_task: Task<Option<()>>,
142 },
143}
144
145#[derive(Clone, Debug)]
146pub struct Collaborator {
147 pub user: Arc<User>,
148 pub peer_id: PeerId,
149 pub replica_id: ReplicaId,
150}
151
152#[derive(Clone, Debug, PartialEq, Eq)]
153pub enum Event {
154 ActiveEntryChanged(Option<ProjectEntryId>),
155 WorktreeAdded,
156 WorktreeRemoved(WorktreeId),
157 DiskBasedDiagnosticsStarted,
158 DiskBasedDiagnosticsUpdated,
159 DiskBasedDiagnosticsFinished,
160 DiagnosticsUpdated(ProjectPath),
161 RemoteIdChanged(Option<u64>),
162 CollaboratorLeft(PeerId),
163 ContactRequestedJoin(Arc<User>),
164 ContactCancelledJoinRequest(Arc<User>),
165}
166
167#[derive(Serialize)]
168pub struct LanguageServerStatus {
169 pub name: String,
170 pub pending_work: BTreeMap<String, LanguageServerProgress>,
171 pub pending_diagnostic_updates: isize,
172}
173
174#[derive(Clone, Debug, Serialize)]
175pub struct LanguageServerProgress {
176 pub message: Option<String>,
177 pub percentage: Option<usize>,
178 #[serde(skip_serializing)]
179 pub last_update_at: Instant,
180}
181
182#[derive(Clone, Debug, Eq, PartialEq, Hash, PartialOrd, Ord)]
183pub struct ProjectPath {
184 pub worktree_id: WorktreeId,
185 pub path: Arc<Path>,
186}
187
188#[derive(Copy, Clone, Debug, Default, PartialEq, Serialize)]
189pub struct DiagnosticSummary {
190 pub error_count: usize,
191 pub warning_count: usize,
192}
193
194#[derive(Debug)]
195pub struct Location {
196 pub buffer: ModelHandle<Buffer>,
197 pub range: Range<language::Anchor>,
198}
199
200#[derive(Debug)]
201pub struct DocumentHighlight {
202 pub range: Range<language::Anchor>,
203 pub kind: DocumentHighlightKind,
204}
205
206#[derive(Clone, Debug)]
207pub struct Symbol {
208 pub source_worktree_id: WorktreeId,
209 pub worktree_id: WorktreeId,
210 pub language_server_name: LanguageServerName,
211 pub path: PathBuf,
212 pub label: CodeLabel,
213 pub name: String,
214 pub kind: lsp::SymbolKind,
215 pub range: Range<PointUtf16>,
216 pub signature: [u8; 32],
217}
218
219#[derive(Default)]
220pub struct ProjectTransaction(pub HashMap<ModelHandle<Buffer>, language::Transaction>);
221
222impl DiagnosticSummary {
223 fn new<'a, T: 'a>(diagnostics: impl IntoIterator<Item = &'a DiagnosticEntry<T>>) -> Self {
224 let mut this = Self {
225 error_count: 0,
226 warning_count: 0,
227 };
228
229 for entry in diagnostics {
230 if entry.diagnostic.is_primary {
231 match entry.diagnostic.severity {
232 DiagnosticSeverity::ERROR => this.error_count += 1,
233 DiagnosticSeverity::WARNING => this.warning_count += 1,
234 _ => {}
235 }
236 }
237 }
238
239 this
240 }
241
242 pub fn is_empty(&self) -> bool {
243 self.error_count == 0 && self.warning_count == 0
244 }
245
246 pub fn to_proto(&self, path: &Path) -> proto::DiagnosticSummary {
247 proto::DiagnosticSummary {
248 path: path.to_string_lossy().to_string(),
249 error_count: self.error_count as u32,
250 warning_count: self.warning_count as u32,
251 }
252 }
253}
254
255#[derive(Clone, Copy, Debug, Default, Hash, PartialEq, Eq, PartialOrd, Ord)]
256pub struct ProjectEntryId(usize);
257
258impl ProjectEntryId {
259 pub const MAX: Self = Self(usize::MAX);
260
261 pub fn new(counter: &AtomicUsize) -> Self {
262 Self(counter.fetch_add(1, SeqCst))
263 }
264
265 pub fn from_proto(id: u64) -> Self {
266 Self(id as usize)
267 }
268
269 pub fn to_proto(&self) -> u64 {
270 self.0 as u64
271 }
272
273 pub fn to_usize(&self) -> usize {
274 self.0
275 }
276}
277
278impl Project {
279 pub fn init(client: &Arc<Client>) {
280 client.add_model_message_handler(Self::handle_request_join_project);
281 client.add_model_message_handler(Self::handle_add_collaborator);
282 client.add_model_message_handler(Self::handle_buffer_reloaded);
283 client.add_model_message_handler(Self::handle_buffer_saved);
284 client.add_model_message_handler(Self::handle_start_language_server);
285 client.add_model_message_handler(Self::handle_update_language_server);
286 client.add_model_message_handler(Self::handle_remove_collaborator);
287 client.add_model_message_handler(Self::handle_join_project_request_cancelled);
288 client.add_model_message_handler(Self::handle_update_project);
289 client.add_model_message_handler(Self::handle_unregister_project);
290 client.add_model_message_handler(Self::handle_project_unshared);
291 client.add_model_message_handler(Self::handle_update_buffer_file);
292 client.add_model_message_handler(Self::handle_update_buffer);
293 client.add_model_message_handler(Self::handle_update_diagnostic_summary);
294 client.add_model_message_handler(Self::handle_update_worktree);
295 client.add_model_request_handler(Self::handle_create_project_entry);
296 client.add_model_request_handler(Self::handle_rename_project_entry);
297 client.add_model_request_handler(Self::handle_copy_project_entry);
298 client.add_model_request_handler(Self::handle_delete_project_entry);
299 client.add_model_request_handler(Self::handle_apply_additional_edits_for_completion);
300 client.add_model_request_handler(Self::handle_apply_code_action);
301 client.add_model_request_handler(Self::handle_reload_buffers);
302 client.add_model_request_handler(Self::handle_format_buffers);
303 client.add_model_request_handler(Self::handle_get_code_actions);
304 client.add_model_request_handler(Self::handle_get_completions);
305 client.add_model_request_handler(Self::handle_lsp_command::<GetDefinition>);
306 client.add_model_request_handler(Self::handle_lsp_command::<GetDocumentHighlights>);
307 client.add_model_request_handler(Self::handle_lsp_command::<GetReferences>);
308 client.add_model_request_handler(Self::handle_lsp_command::<PrepareRename>);
309 client.add_model_request_handler(Self::handle_lsp_command::<PerformRename>);
310 client.add_model_request_handler(Self::handle_search_project);
311 client.add_model_request_handler(Self::handle_get_project_symbols);
312 client.add_model_request_handler(Self::handle_open_buffer_for_symbol);
313 client.add_model_request_handler(Self::handle_open_buffer_by_id);
314 client.add_model_request_handler(Self::handle_open_buffer_by_path);
315 client.add_model_request_handler(Self::handle_save_buffer);
316 }
317
318 pub fn local(
319 online: bool,
320 client: Arc<Client>,
321 user_store: ModelHandle<UserStore>,
322 project_store: ModelHandle<ProjectStore>,
323 languages: Arc<LanguageRegistry>,
324 fs: Arc<dyn Fs>,
325 cx: &mut MutableAppContext,
326 ) -> ModelHandle<Self> {
327 cx.add_model(|cx: &mut ModelContext<Self>| {
328 let (online_tx, online_rx) = watch::channel_with(online);
329 let (remote_id_tx, remote_id_rx) = watch::channel();
330 let _maintain_remote_id_task = cx.spawn_weak({
331 let status_rx = client.clone().status();
332 let online_rx = online_rx.clone();
333 move |this, mut cx| async move {
334 let mut stream = Stream::map(status_rx.clone(), drop)
335 .merge(Stream::map(online_rx.clone(), drop));
336 while stream.recv().await.is_some() {
337 let this = this.upgrade(&cx)?;
338 if status_rx.borrow().is_connected() && *online_rx.borrow() {
339 this.update(&mut cx, |this, cx| this.register(cx))
340 .await
341 .log_err()?;
342 } else {
343 this.update(&mut cx, |this, cx| this.unregister(cx))
344 .await
345 .log_err();
346 }
347 }
348 None
349 }
350 });
351
352 let handle = cx.weak_handle();
353 project_store.update(cx, |store, cx| store.add_project(handle, cx));
354
355 let (opened_buffer_tx, opened_buffer_rx) = watch::channel();
356 Self {
357 worktrees: Default::default(),
358 collaborators: Default::default(),
359 opened_buffers: Default::default(),
360 shared_buffers: Default::default(),
361 loading_buffers: Default::default(),
362 loading_local_worktrees: Default::default(),
363 buffer_snapshots: Default::default(),
364 client_state: ProjectClientState::Local {
365 is_shared: false,
366 remote_id_tx,
367 remote_id_rx,
368 online_tx,
369 online_rx,
370 _maintain_remote_id_task,
371 },
372 opened_buffer: (Rc::new(RefCell::new(opened_buffer_tx)), opened_buffer_rx),
373 subscriptions: Vec::new(),
374 active_entry: None,
375 languages,
376 client,
377 user_store,
378 project_store,
379 fs,
380 next_entry_id: Default::default(),
381 next_diagnostic_group_id: Default::default(),
382 language_servers: Default::default(),
383 started_language_servers: Default::default(),
384 language_server_statuses: Default::default(),
385 last_workspace_edits_by_language_server: Default::default(),
386 language_server_settings: Default::default(),
387 next_language_server_id: 0,
388 nonce: StdRng::from_entropy().gen(),
389 initialized_persistent_state: false,
390 }
391 })
392 }
393
394 pub async fn remote(
395 remote_id: u64,
396 client: Arc<Client>,
397 user_store: ModelHandle<UserStore>,
398 project_store: ModelHandle<ProjectStore>,
399 languages: Arc<LanguageRegistry>,
400 fs: Arc<dyn Fs>,
401 mut cx: AsyncAppContext,
402 ) -> Result<ModelHandle<Self>, JoinProjectError> {
403 client.authenticate_and_connect(true, &cx).await?;
404
405 let response = client
406 .request(proto::JoinProject {
407 project_id: remote_id,
408 })
409 .await?;
410
411 let response = match response.variant.ok_or_else(|| anyhow!("missing variant"))? {
412 proto::join_project_response::Variant::Accept(response) => response,
413 proto::join_project_response::Variant::Decline(decline) => {
414 match proto::join_project_response::decline::Reason::from_i32(decline.reason) {
415 Some(proto::join_project_response::decline::Reason::Declined) => {
416 Err(JoinProjectError::HostDeclined)?
417 }
418 Some(proto::join_project_response::decline::Reason::Closed) => {
419 Err(JoinProjectError::HostClosedProject)?
420 }
421 Some(proto::join_project_response::decline::Reason::WentOffline) => {
422 Err(JoinProjectError::HostWentOffline)?
423 }
424 None => Err(anyhow!("missing decline reason"))?,
425 }
426 }
427 };
428
429 let replica_id = response.replica_id as ReplicaId;
430
431 let mut worktrees = Vec::new();
432 for worktree in response.worktrees {
433 let (worktree, load_task) = cx
434 .update(|cx| Worktree::remote(remote_id, replica_id, worktree, client.clone(), cx));
435 worktrees.push(worktree);
436 load_task.detach();
437 }
438
439 let (opened_buffer_tx, opened_buffer_rx) = watch::channel();
440 let this = cx.add_model(|cx: &mut ModelContext<Self>| {
441 let handle = cx.weak_handle();
442 project_store.update(cx, |store, cx| store.add_project(handle, cx));
443
444 let mut this = Self {
445 worktrees: Vec::new(),
446 loading_buffers: Default::default(),
447 opened_buffer: (Rc::new(RefCell::new(opened_buffer_tx)), opened_buffer_rx),
448 shared_buffers: Default::default(),
449 loading_local_worktrees: Default::default(),
450 active_entry: None,
451 collaborators: Default::default(),
452 languages,
453 user_store: user_store.clone(),
454 project_store,
455 fs,
456 next_entry_id: Default::default(),
457 next_diagnostic_group_id: Default::default(),
458 subscriptions: vec![client.add_model_for_remote_entity(remote_id, cx)],
459 client: client.clone(),
460 client_state: ProjectClientState::Remote {
461 sharing_has_stopped: false,
462 remote_id,
463 replica_id,
464 _detect_unshare_task: cx.spawn_weak(move |this, mut cx| {
465 async move {
466 let mut status = client.status();
467 let is_connected =
468 status.next().await.map_or(false, |s| s.is_connected());
469 // Even if we're initially connected, any future change of the status means we momentarily disconnected.
470 if !is_connected || status.next().await.is_some() {
471 if let Some(this) = this.upgrade(&cx) {
472 this.update(&mut cx, |this, cx| this.removed_from_project(cx))
473 }
474 }
475 Ok(())
476 }
477 .log_err()
478 }),
479 },
480 language_servers: Default::default(),
481 started_language_servers: Default::default(),
482 language_server_settings: Default::default(),
483 language_server_statuses: response
484 .language_servers
485 .into_iter()
486 .map(|server| {
487 (
488 server.id as usize,
489 LanguageServerStatus {
490 name: server.name,
491 pending_work: Default::default(),
492 pending_diagnostic_updates: 0,
493 },
494 )
495 })
496 .collect(),
497 last_workspace_edits_by_language_server: Default::default(),
498 next_language_server_id: 0,
499 opened_buffers: Default::default(),
500 buffer_snapshots: Default::default(),
501 nonce: StdRng::from_entropy().gen(),
502 initialized_persistent_state: false,
503 };
504 for worktree in worktrees {
505 this.add_worktree(&worktree, cx);
506 }
507 this
508 });
509
510 let user_ids = response
511 .collaborators
512 .iter()
513 .map(|peer| peer.user_id)
514 .collect();
515 user_store
516 .update(&mut cx, |user_store, cx| user_store.get_users(user_ids, cx))
517 .await?;
518 let mut collaborators = HashMap::default();
519 for message in response.collaborators {
520 let collaborator = Collaborator::from_proto(message, &user_store, &mut cx).await?;
521 collaborators.insert(collaborator.peer_id, collaborator);
522 }
523
524 this.update(&mut cx, |this, _| {
525 this.collaborators = collaborators;
526 });
527
528 Ok(this)
529 }
530
531 #[cfg(any(test, feature = "test-support"))]
532 pub async fn test(
533 fs: Arc<dyn Fs>,
534 root_paths: impl IntoIterator<Item = &Path>,
535 cx: &mut gpui::TestAppContext,
536 ) -> ModelHandle<Project> {
537 let languages = Arc::new(LanguageRegistry::test());
538 let http_client = client::test::FakeHttpClient::with_404_response();
539 let client = client::Client::new(http_client.clone());
540 let user_store = cx.add_model(|cx| UserStore::new(client.clone(), http_client, cx));
541 let project_store = cx.add_model(|_| ProjectStore::new(Db::open_fake()));
542 let project = cx.update(|cx| {
543 Project::local(true, client, user_store, project_store, languages, fs, cx)
544 });
545 for path in root_paths {
546 let (tree, _) = project
547 .update(cx, |project, cx| {
548 project.find_or_create_local_worktree(path, true, cx)
549 })
550 .await
551 .unwrap();
552 tree.read_with(cx, |tree, _| tree.as_local().unwrap().scan_complete())
553 .await;
554 }
555 project
556 }
557
558 pub fn restore_state(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
559 if self.is_remote() {
560 return Task::ready(Ok(()));
561 }
562
563 let db = self.project_store.read(cx).db.clone();
564 let keys = self.db_keys_for_online_state(cx);
565 let online_by_default = cx.global::<Settings>().projects_online_by_default;
566 let read_online = cx.background().spawn(async move {
567 let values = db.read(keys)?;
568 anyhow::Ok(
569 values
570 .into_iter()
571 .all(|e| e.map_or(online_by_default, |e| e == [true as u8])),
572 )
573 });
574 cx.spawn(|this, mut cx| async move {
575 let online = read_online.await.log_err().unwrap_or(false);
576 this.update(&mut cx, |this, cx| {
577 this.initialized_persistent_state = true;
578 if let ProjectClientState::Local { online_tx, .. } = &mut this.client_state {
579 let mut online_tx = online_tx.borrow_mut();
580 if *online_tx != online {
581 *online_tx = online;
582 drop(online_tx);
583 this.metadata_changed(false, cx);
584 }
585 }
586 });
587 Ok(())
588 })
589 }
590
591 fn persist_state(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
592 if self.is_remote() || !self.initialized_persistent_state {
593 return Task::ready(Ok(()));
594 }
595
596 let db = self.project_store.read(cx).db.clone();
597 let keys = self.db_keys_for_online_state(cx);
598 let is_online = self.is_online();
599 cx.background().spawn(async move {
600 let value = &[is_online as u8];
601 db.write(keys.into_iter().map(|key| (key, value)))
602 })
603 }
604
605 pub fn buffer_for_id(&self, remote_id: u64, cx: &AppContext) -> Option<ModelHandle<Buffer>> {
606 self.opened_buffers
607 .get(&remote_id)
608 .and_then(|buffer| buffer.upgrade(cx))
609 }
610
611 pub fn languages(&self) -> &Arc<LanguageRegistry> {
612 &self.languages
613 }
614
615 pub fn client(&self) -> Arc<Client> {
616 self.client.clone()
617 }
618
619 pub fn user_store(&self) -> ModelHandle<UserStore> {
620 self.user_store.clone()
621 }
622
623 pub fn project_store(&self) -> ModelHandle<ProjectStore> {
624 self.project_store.clone()
625 }
626
627 #[cfg(any(test, feature = "test-support"))]
628 pub fn check_invariants(&self, cx: &AppContext) {
629 if self.is_local() {
630 let mut worktree_root_paths = HashMap::default();
631 for worktree in self.worktrees(cx) {
632 let worktree = worktree.read(cx);
633 let abs_path = worktree.as_local().unwrap().abs_path().clone();
634 let prev_worktree_id = worktree_root_paths.insert(abs_path.clone(), worktree.id());
635 assert_eq!(
636 prev_worktree_id,
637 None,
638 "abs path {:?} for worktree {:?} is not unique ({:?} was already registered with the same path)",
639 abs_path,
640 worktree.id(),
641 prev_worktree_id
642 )
643 }
644 } else {
645 let replica_id = self.replica_id();
646 for buffer in self.opened_buffers.values() {
647 if let Some(buffer) = buffer.upgrade(cx) {
648 let buffer = buffer.read(cx);
649 assert_eq!(
650 buffer.deferred_ops_len(),
651 0,
652 "replica {}, buffer {} has deferred operations",
653 replica_id,
654 buffer.remote_id()
655 );
656 }
657 }
658 }
659 }
660
661 #[cfg(any(test, feature = "test-support"))]
662 pub fn has_open_buffer(&self, path: impl Into<ProjectPath>, cx: &AppContext) -> bool {
663 let path = path.into();
664 if let Some(worktree) = self.worktree_for_id(path.worktree_id, cx) {
665 self.opened_buffers.iter().any(|(_, buffer)| {
666 if let Some(buffer) = buffer.upgrade(cx) {
667 if let Some(file) = File::from_dyn(buffer.read(cx).file()) {
668 if file.worktree == worktree && file.path() == &path.path {
669 return true;
670 }
671 }
672 }
673 false
674 })
675 } else {
676 false
677 }
678 }
679
680 pub fn fs(&self) -> &Arc<dyn Fs> {
681 &self.fs
682 }
683
684 pub fn set_online(&mut self, online: bool, cx: &mut ModelContext<Self>) {
685 if let ProjectClientState::Local { online_tx, .. } = &mut self.client_state {
686 let mut online_tx = online_tx.borrow_mut();
687 if *online_tx != online {
688 *online_tx = online;
689 drop(online_tx);
690 self.metadata_changed(true, cx);
691 }
692 }
693 }
694
695 pub fn is_online(&self) -> bool {
696 match &self.client_state {
697 ProjectClientState::Local { online_rx, .. } => *online_rx.borrow(),
698 ProjectClientState::Remote { .. } => true,
699 }
700 }
701
702 fn unregister(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
703 self.unshared(cx);
704 if let ProjectClientState::Local { remote_id_rx, .. } = &mut self.client_state {
705 if let Some(remote_id) = *remote_id_rx.borrow() {
706 let request = self.client.request(proto::UnregisterProject {
707 project_id: remote_id,
708 });
709 return cx.spawn(|this, mut cx| async move {
710 let response = request.await;
711
712 // Unregistering the project causes the server to send out a
713 // contact update removing this project from the host's list
714 // of online projects. Wait until this contact update has been
715 // processed before clearing out this project's remote id, so
716 // that there is no moment where this project appears in the
717 // contact metadata and *also* has no remote id.
718 this.update(&mut cx, |this, cx| {
719 this.user_store()
720 .update(cx, |store, _| store.contact_updates_done())
721 })
722 .await;
723
724 this.update(&mut cx, |this, cx| {
725 if let ProjectClientState::Local { remote_id_tx, .. } =
726 &mut this.client_state
727 {
728 *remote_id_tx.borrow_mut() = None;
729 }
730 this.subscriptions.clear();
731 this.metadata_changed(false, cx);
732 });
733 response.map(drop)
734 });
735 }
736 }
737 Task::ready(Ok(()))
738 }
739
740 fn register(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
741 if let ProjectClientState::Local { remote_id_rx, .. } = &self.client_state {
742 if remote_id_rx.borrow().is_some() {
743 return Task::ready(Ok(()));
744 }
745 }
746
747 let response = self.client.request(proto::RegisterProject {});
748 cx.spawn(|this, mut cx| async move {
749 let remote_id = response.await?.project_id;
750 this.update(&mut cx, |this, cx| {
751 if let ProjectClientState::Local { remote_id_tx, .. } = &mut this.client_state {
752 *remote_id_tx.borrow_mut() = Some(remote_id);
753 }
754
755 this.metadata_changed(false, cx);
756 cx.emit(Event::RemoteIdChanged(Some(remote_id)));
757 this.subscriptions
758 .push(this.client.add_model_for_remote_entity(remote_id, cx));
759 Ok(())
760 })
761 })
762 }
763
764 pub fn remote_id(&self) -> Option<u64> {
765 match &self.client_state {
766 ProjectClientState::Local { remote_id_rx, .. } => *remote_id_rx.borrow(),
767 ProjectClientState::Remote { remote_id, .. } => Some(*remote_id),
768 }
769 }
770
771 pub fn next_remote_id(&self) -> impl Future<Output = u64> {
772 let mut id = None;
773 let mut watch = None;
774 match &self.client_state {
775 ProjectClientState::Local { remote_id_rx, .. } => watch = Some(remote_id_rx.clone()),
776 ProjectClientState::Remote { remote_id, .. } => id = Some(*remote_id),
777 }
778
779 async move {
780 if let Some(id) = id {
781 return id;
782 }
783 let mut watch = watch.unwrap();
784 loop {
785 let id = *watch.borrow();
786 if let Some(id) = id {
787 return id;
788 }
789 watch.next().await;
790 }
791 }
792 }
793
794 pub fn shared_remote_id(&self) -> Option<u64> {
795 match &self.client_state {
796 ProjectClientState::Local {
797 remote_id_rx,
798 is_shared,
799 ..
800 } => {
801 if *is_shared {
802 *remote_id_rx.borrow()
803 } else {
804 None
805 }
806 }
807 ProjectClientState::Remote { remote_id, .. } => Some(*remote_id),
808 }
809 }
810
811 pub fn replica_id(&self) -> ReplicaId {
812 match &self.client_state {
813 ProjectClientState::Local { .. } => 0,
814 ProjectClientState::Remote { replica_id, .. } => *replica_id,
815 }
816 }
817
818 fn metadata_changed(&mut self, persist: bool, cx: &mut ModelContext<Self>) {
819 if let ProjectClientState::Local {
820 remote_id_rx,
821 online_rx,
822 ..
823 } = &self.client_state
824 {
825 if let (Some(project_id), true) = (*remote_id_rx.borrow(), *online_rx.borrow()) {
826 self.client
827 .send(proto::UpdateProject {
828 project_id,
829 worktrees: self
830 .worktrees
831 .iter()
832 .filter_map(|worktree| {
833 worktree.upgrade(&cx).map(|worktree| {
834 worktree.read(cx).as_local().unwrap().metadata_proto()
835 })
836 })
837 .collect(),
838 })
839 .log_err();
840 }
841
842 self.project_store.update(cx, |_, cx| cx.notify());
843 if persist {
844 self.persist_state(cx).detach_and_log_err(cx);
845 }
846 cx.notify();
847 }
848 }
849
850 pub fn collaborators(&self) -> &HashMap<PeerId, Collaborator> {
851 &self.collaborators
852 }
853
854 pub fn worktrees<'a>(
855 &'a self,
856 cx: &'a AppContext,
857 ) -> impl 'a + Iterator<Item = ModelHandle<Worktree>> {
858 self.worktrees
859 .iter()
860 .filter_map(move |worktree| worktree.upgrade(cx))
861 }
862
863 pub fn visible_worktrees<'a>(
864 &'a self,
865 cx: &'a AppContext,
866 ) -> impl 'a + Iterator<Item = ModelHandle<Worktree>> {
867 self.worktrees.iter().filter_map(|worktree| {
868 worktree.upgrade(cx).and_then(|worktree| {
869 if worktree.read(cx).is_visible() {
870 Some(worktree)
871 } else {
872 None
873 }
874 })
875 })
876 }
877
878 pub fn worktree_root_names<'a>(&'a self, cx: &'a AppContext) -> impl Iterator<Item = &'a str> {
879 self.visible_worktrees(cx)
880 .map(|tree| tree.read(cx).root_name())
881 }
882
883 fn db_keys_for_online_state(&self, cx: &AppContext) -> Vec<String> {
884 self.worktrees
885 .iter()
886 .filter_map(|worktree| {
887 let worktree = worktree.upgrade(&cx)?.read(cx);
888 if worktree.is_visible() {
889 Some(format!(
890 "project-path-online:{}",
891 worktree.as_local().unwrap().abs_path().to_string_lossy()
892 ))
893 } else {
894 None
895 }
896 })
897 .collect::<Vec<_>>()
898 }
899
900 pub fn worktree_for_id(
901 &self,
902 id: WorktreeId,
903 cx: &AppContext,
904 ) -> Option<ModelHandle<Worktree>> {
905 self.worktrees(cx)
906 .find(|worktree| worktree.read(cx).id() == id)
907 }
908
909 pub fn worktree_for_entry(
910 &self,
911 entry_id: ProjectEntryId,
912 cx: &AppContext,
913 ) -> Option<ModelHandle<Worktree>> {
914 self.worktrees(cx)
915 .find(|worktree| worktree.read(cx).contains_entry(entry_id))
916 }
917
918 pub fn worktree_id_for_entry(
919 &self,
920 entry_id: ProjectEntryId,
921 cx: &AppContext,
922 ) -> Option<WorktreeId> {
923 self.worktree_for_entry(entry_id, cx)
924 .map(|worktree| worktree.read(cx).id())
925 }
926
927 pub fn contains_paths(&self, paths: &[PathBuf], cx: &AppContext) -> bool {
928 paths.iter().all(|path| self.contains_path(&path, cx))
929 }
930
931 pub fn contains_path(&self, path: &Path, cx: &AppContext) -> bool {
932 for worktree in self.worktrees(cx) {
933 let worktree = worktree.read(cx).as_local();
934 if worktree.map_or(false, |w| w.contains_abs_path(path)) {
935 return true;
936 }
937 }
938 false
939 }
940
941 pub fn create_entry(
942 &mut self,
943 project_path: impl Into<ProjectPath>,
944 is_directory: bool,
945 cx: &mut ModelContext<Self>,
946 ) -> Option<Task<Result<Entry>>> {
947 let project_path = project_path.into();
948 let worktree = self.worktree_for_id(project_path.worktree_id, cx)?;
949 if self.is_local() {
950 Some(worktree.update(cx, |worktree, cx| {
951 worktree
952 .as_local_mut()
953 .unwrap()
954 .create_entry(project_path.path, is_directory, cx)
955 }))
956 } else {
957 let client = self.client.clone();
958 let project_id = self.remote_id().unwrap();
959 Some(cx.spawn_weak(|_, mut cx| async move {
960 let response = client
961 .request(proto::CreateProjectEntry {
962 worktree_id: project_path.worktree_id.to_proto(),
963 project_id,
964 path: project_path.path.as_os_str().as_bytes().to_vec(),
965 is_directory,
966 })
967 .await?;
968 let entry = response
969 .entry
970 .ok_or_else(|| anyhow!("missing entry in response"))?;
971 worktree
972 .update(&mut cx, |worktree, cx| {
973 worktree.as_remote().unwrap().insert_entry(
974 entry,
975 response.worktree_scan_id as usize,
976 cx,
977 )
978 })
979 .await
980 }))
981 }
982 }
983
984 pub fn copy_entry(
985 &mut self,
986 entry_id: ProjectEntryId,
987 new_path: impl Into<Arc<Path>>,
988 cx: &mut ModelContext<Self>,
989 ) -> Option<Task<Result<Entry>>> {
990 let worktree = self.worktree_for_entry(entry_id, cx)?;
991 let new_path = new_path.into();
992 if self.is_local() {
993 worktree.update(cx, |worktree, cx| {
994 worktree
995 .as_local_mut()
996 .unwrap()
997 .copy_entry(entry_id, new_path, cx)
998 })
999 } else {
1000 let client = self.client.clone();
1001 let project_id = self.remote_id().unwrap();
1002
1003 Some(cx.spawn_weak(|_, mut cx| async move {
1004 let response = client
1005 .request(proto::CopyProjectEntry {
1006 project_id,
1007 entry_id: entry_id.to_proto(),
1008 new_path: new_path.as_os_str().as_bytes().to_vec(),
1009 })
1010 .await?;
1011 let entry = response
1012 .entry
1013 .ok_or_else(|| anyhow!("missing entry in response"))?;
1014 worktree
1015 .update(&mut cx, |worktree, cx| {
1016 worktree.as_remote().unwrap().insert_entry(
1017 entry,
1018 response.worktree_scan_id as usize,
1019 cx,
1020 )
1021 })
1022 .await
1023 }))
1024 }
1025 }
1026
1027 pub fn rename_entry(
1028 &mut self,
1029 entry_id: ProjectEntryId,
1030 new_path: impl Into<Arc<Path>>,
1031 cx: &mut ModelContext<Self>,
1032 ) -> Option<Task<Result<Entry>>> {
1033 let worktree = self.worktree_for_entry(entry_id, cx)?;
1034 let new_path = new_path.into();
1035 if self.is_local() {
1036 worktree.update(cx, |worktree, cx| {
1037 worktree
1038 .as_local_mut()
1039 .unwrap()
1040 .rename_entry(entry_id, new_path, cx)
1041 })
1042 } else {
1043 let client = self.client.clone();
1044 let project_id = self.remote_id().unwrap();
1045
1046 Some(cx.spawn_weak(|_, mut cx| async move {
1047 let response = client
1048 .request(proto::RenameProjectEntry {
1049 project_id,
1050 entry_id: entry_id.to_proto(),
1051 new_path: new_path.as_os_str().as_bytes().to_vec(),
1052 })
1053 .await?;
1054 let entry = response
1055 .entry
1056 .ok_or_else(|| anyhow!("missing entry in response"))?;
1057 worktree
1058 .update(&mut cx, |worktree, cx| {
1059 worktree.as_remote().unwrap().insert_entry(
1060 entry,
1061 response.worktree_scan_id as usize,
1062 cx,
1063 )
1064 })
1065 .await
1066 }))
1067 }
1068 }
1069
1070 pub fn delete_entry(
1071 &mut self,
1072 entry_id: ProjectEntryId,
1073 cx: &mut ModelContext<Self>,
1074 ) -> Option<Task<Result<()>>> {
1075 let worktree = self.worktree_for_entry(entry_id, cx)?;
1076 if self.is_local() {
1077 worktree.update(cx, |worktree, cx| {
1078 worktree.as_local_mut().unwrap().delete_entry(entry_id, cx)
1079 })
1080 } else {
1081 let client = self.client.clone();
1082 let project_id = self.remote_id().unwrap();
1083 Some(cx.spawn_weak(|_, mut cx| async move {
1084 let response = client
1085 .request(proto::DeleteProjectEntry {
1086 project_id,
1087 entry_id: entry_id.to_proto(),
1088 })
1089 .await?;
1090 worktree
1091 .update(&mut cx, move |worktree, cx| {
1092 worktree.as_remote().unwrap().delete_entry(
1093 entry_id,
1094 response.worktree_scan_id as usize,
1095 cx,
1096 )
1097 })
1098 .await
1099 }))
1100 }
1101 }
1102
1103 fn share(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
1104 let project_id;
1105 if let ProjectClientState::Local {
1106 remote_id_rx,
1107 is_shared,
1108 ..
1109 } = &mut self.client_state
1110 {
1111 if *is_shared {
1112 return Task::ready(Ok(()));
1113 }
1114 *is_shared = true;
1115 if let Some(id) = *remote_id_rx.borrow() {
1116 project_id = id;
1117 } else {
1118 return Task::ready(Err(anyhow!("project hasn't been registered")));
1119 }
1120 } else {
1121 return Task::ready(Err(anyhow!("can't share a remote project")));
1122 };
1123
1124 for open_buffer in self.opened_buffers.values_mut() {
1125 match open_buffer {
1126 OpenBuffer::Strong(_) => {}
1127 OpenBuffer::Weak(buffer) => {
1128 if let Some(buffer) = buffer.upgrade(cx) {
1129 *open_buffer = OpenBuffer::Strong(buffer);
1130 }
1131 }
1132 OpenBuffer::Loading(_) => unreachable!(),
1133 }
1134 }
1135
1136 for worktree_handle in self.worktrees.iter_mut() {
1137 match worktree_handle {
1138 WorktreeHandle::Strong(_) => {}
1139 WorktreeHandle::Weak(worktree) => {
1140 if let Some(worktree) = worktree.upgrade(cx) {
1141 *worktree_handle = WorktreeHandle::Strong(worktree);
1142 }
1143 }
1144 }
1145 }
1146
1147 let mut tasks = Vec::new();
1148 for worktree in self.worktrees(cx).collect::<Vec<_>>() {
1149 worktree.update(cx, |worktree, cx| {
1150 let worktree = worktree.as_local_mut().unwrap();
1151 tasks.push(worktree.share(project_id, cx));
1152 });
1153 }
1154
1155 cx.spawn(|this, mut cx| async move {
1156 for task in tasks {
1157 task.await?;
1158 }
1159 this.update(&mut cx, |_, cx| cx.notify());
1160 Ok(())
1161 })
1162 }
1163
1164 fn unshared(&mut self, cx: &mut ModelContext<Self>) {
1165 if let ProjectClientState::Local { is_shared, .. } = &mut self.client_state {
1166 if !*is_shared {
1167 return;
1168 }
1169
1170 *is_shared = false;
1171 self.collaborators.clear();
1172 self.shared_buffers.clear();
1173 for worktree_handle in self.worktrees.iter_mut() {
1174 if let WorktreeHandle::Strong(worktree) = worktree_handle {
1175 let is_visible = worktree.update(cx, |worktree, _| {
1176 worktree.as_local_mut().unwrap().unshare();
1177 worktree.is_visible()
1178 });
1179 if !is_visible {
1180 *worktree_handle = WorktreeHandle::Weak(worktree.downgrade());
1181 }
1182 }
1183 }
1184
1185 for open_buffer in self.opened_buffers.values_mut() {
1186 match open_buffer {
1187 OpenBuffer::Strong(buffer) => {
1188 *open_buffer = OpenBuffer::Weak(buffer.downgrade());
1189 }
1190 _ => {}
1191 }
1192 }
1193
1194 cx.notify();
1195 } else {
1196 log::error!("attempted to unshare a remote project");
1197 }
1198 }
1199
1200 pub fn respond_to_join_request(
1201 &mut self,
1202 requester_id: u64,
1203 allow: bool,
1204 cx: &mut ModelContext<Self>,
1205 ) {
1206 if let Some(project_id) = self.remote_id() {
1207 let share = self.share(cx);
1208 let client = self.client.clone();
1209 cx.foreground()
1210 .spawn(async move {
1211 share.await?;
1212 client.send(proto::RespondToJoinProjectRequest {
1213 requester_id,
1214 project_id,
1215 allow,
1216 })
1217 })
1218 .detach_and_log_err(cx);
1219 }
1220 }
1221
1222 fn removed_from_project(&mut self, cx: &mut ModelContext<Self>) {
1223 if let ProjectClientState::Remote {
1224 sharing_has_stopped,
1225 ..
1226 } = &mut self.client_state
1227 {
1228 *sharing_has_stopped = true;
1229 self.collaborators.clear();
1230 cx.notify();
1231 }
1232 }
1233
1234 pub fn is_read_only(&self) -> bool {
1235 match &self.client_state {
1236 ProjectClientState::Local { .. } => false,
1237 ProjectClientState::Remote {
1238 sharing_has_stopped,
1239 ..
1240 } => *sharing_has_stopped,
1241 }
1242 }
1243
1244 pub fn is_local(&self) -> bool {
1245 match &self.client_state {
1246 ProjectClientState::Local { .. } => true,
1247 ProjectClientState::Remote { .. } => false,
1248 }
1249 }
1250
1251 pub fn is_remote(&self) -> bool {
1252 !self.is_local()
1253 }
1254
1255 pub fn create_buffer(
1256 &mut self,
1257 text: &str,
1258 language: Option<Arc<Language>>,
1259 cx: &mut ModelContext<Self>,
1260 ) -> Result<ModelHandle<Buffer>> {
1261 if self.is_remote() {
1262 return Err(anyhow!("creating buffers as a guest is not supported yet"));
1263 }
1264
1265 let buffer = cx.add_model(|cx| {
1266 Buffer::new(self.replica_id(), text, cx)
1267 .with_language(language.unwrap_or(language::PLAIN_TEXT.clone()), cx)
1268 });
1269 self.register_buffer(&buffer, cx)?;
1270 Ok(buffer)
1271 }
1272
1273 pub fn open_path(
1274 &mut self,
1275 path: impl Into<ProjectPath>,
1276 cx: &mut ModelContext<Self>,
1277 ) -> Task<Result<(ProjectEntryId, AnyModelHandle)>> {
1278 let task = self.open_buffer(path, cx);
1279 cx.spawn_weak(|_, cx| async move {
1280 let buffer = task.await?;
1281 let project_entry_id = buffer
1282 .read_with(&cx, |buffer, cx| {
1283 File::from_dyn(buffer.file()).and_then(|file| file.project_entry_id(cx))
1284 })
1285 .ok_or_else(|| anyhow!("no project entry"))?;
1286 Ok((project_entry_id, buffer.into()))
1287 })
1288 }
1289
1290 pub fn open_local_buffer(
1291 &mut self,
1292 abs_path: impl AsRef<Path>,
1293 cx: &mut ModelContext<Self>,
1294 ) -> Task<Result<ModelHandle<Buffer>>> {
1295 if let Some((worktree, relative_path)) = self.find_local_worktree(abs_path.as_ref(), cx) {
1296 self.open_buffer((worktree.read(cx).id(), relative_path), cx)
1297 } else {
1298 Task::ready(Err(anyhow!("no such path")))
1299 }
1300 }
1301
1302 pub fn open_buffer(
1303 &mut self,
1304 path: impl Into<ProjectPath>,
1305 cx: &mut ModelContext<Self>,
1306 ) -> Task<Result<ModelHandle<Buffer>>> {
1307 let project_path = path.into();
1308 let worktree = if let Some(worktree) = self.worktree_for_id(project_path.worktree_id, cx) {
1309 worktree
1310 } else {
1311 return Task::ready(Err(anyhow!("no such worktree")));
1312 };
1313
1314 // If there is already a buffer for the given path, then return it.
1315 let existing_buffer = self.get_open_buffer(&project_path, cx);
1316 if let Some(existing_buffer) = existing_buffer {
1317 return Task::ready(Ok(existing_buffer));
1318 }
1319
1320 let mut loading_watch = match self.loading_buffers.entry(project_path.clone()) {
1321 // If the given path is already being loaded, then wait for that existing
1322 // task to complete and return the same buffer.
1323 hash_map::Entry::Occupied(e) => e.get().clone(),
1324
1325 // Otherwise, record the fact that this path is now being loaded.
1326 hash_map::Entry::Vacant(entry) => {
1327 let (mut tx, rx) = postage::watch::channel();
1328 entry.insert(rx.clone());
1329
1330 let load_buffer = if worktree.read(cx).is_local() {
1331 self.open_local_buffer_internal(&project_path.path, &worktree, cx)
1332 } else {
1333 self.open_remote_buffer_internal(&project_path.path, &worktree, cx)
1334 };
1335
1336 cx.spawn(move |this, mut cx| async move {
1337 let load_result = load_buffer.await;
1338 *tx.borrow_mut() = Some(this.update(&mut cx, |this, _| {
1339 // Record the fact that the buffer is no longer loading.
1340 this.loading_buffers.remove(&project_path);
1341 let buffer = load_result.map_err(Arc::new)?;
1342 Ok(buffer)
1343 }));
1344 })
1345 .detach();
1346 rx
1347 }
1348 };
1349
1350 cx.foreground().spawn(async move {
1351 loop {
1352 if let Some(result) = loading_watch.borrow().as_ref() {
1353 match result {
1354 Ok(buffer) => return Ok(buffer.clone()),
1355 Err(error) => return Err(anyhow!("{}", error)),
1356 }
1357 }
1358 loading_watch.next().await;
1359 }
1360 })
1361 }
1362
1363 fn open_local_buffer_internal(
1364 &mut self,
1365 path: &Arc<Path>,
1366 worktree: &ModelHandle<Worktree>,
1367 cx: &mut ModelContext<Self>,
1368 ) -> Task<Result<ModelHandle<Buffer>>> {
1369 let load_buffer = worktree.update(cx, |worktree, cx| {
1370 let worktree = worktree.as_local_mut().unwrap();
1371 worktree.load_buffer(path, cx)
1372 });
1373 cx.spawn(|this, mut cx| async move {
1374 let buffer = load_buffer.await?;
1375 this.update(&mut cx, |this, cx| this.register_buffer(&buffer, cx))?;
1376 Ok(buffer)
1377 })
1378 }
1379
1380 fn open_remote_buffer_internal(
1381 &mut self,
1382 path: &Arc<Path>,
1383 worktree: &ModelHandle<Worktree>,
1384 cx: &mut ModelContext<Self>,
1385 ) -> Task<Result<ModelHandle<Buffer>>> {
1386 let rpc = self.client.clone();
1387 let project_id = self.remote_id().unwrap();
1388 let remote_worktree_id = worktree.read(cx).id();
1389 let path = path.clone();
1390 let path_string = path.to_string_lossy().to_string();
1391 cx.spawn(|this, mut cx| async move {
1392 let response = rpc
1393 .request(proto::OpenBufferByPath {
1394 project_id,
1395 worktree_id: remote_worktree_id.to_proto(),
1396 path: path_string,
1397 })
1398 .await?;
1399 let buffer = response.buffer.ok_or_else(|| anyhow!("missing buffer"))?;
1400 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
1401 .await
1402 })
1403 }
1404
1405 fn open_local_buffer_via_lsp(
1406 &mut self,
1407 abs_path: lsp::Url,
1408 lsp_adapter: Arc<dyn LspAdapter>,
1409 lsp_server: Arc<LanguageServer>,
1410 cx: &mut ModelContext<Self>,
1411 ) -> Task<Result<ModelHandle<Buffer>>> {
1412 cx.spawn(|this, mut cx| async move {
1413 let abs_path = abs_path
1414 .to_file_path()
1415 .map_err(|_| anyhow!("can't convert URI to path"))?;
1416 let (worktree, relative_path) = if let Some(result) =
1417 this.read_with(&cx, |this, cx| this.find_local_worktree(&abs_path, cx))
1418 {
1419 result
1420 } else {
1421 let worktree = this
1422 .update(&mut cx, |this, cx| {
1423 this.create_local_worktree(&abs_path, false, cx)
1424 })
1425 .await?;
1426 this.update(&mut cx, |this, cx| {
1427 this.language_servers.insert(
1428 (worktree.read(cx).id(), lsp_adapter.name()),
1429 (lsp_adapter, lsp_server),
1430 );
1431 });
1432 (worktree, PathBuf::new())
1433 };
1434
1435 let project_path = ProjectPath {
1436 worktree_id: worktree.read_with(&cx, |worktree, _| worktree.id()),
1437 path: relative_path.into(),
1438 };
1439 this.update(&mut cx, |this, cx| this.open_buffer(project_path, cx))
1440 .await
1441 })
1442 }
1443
1444 pub fn open_buffer_by_id(
1445 &mut self,
1446 id: u64,
1447 cx: &mut ModelContext<Self>,
1448 ) -> Task<Result<ModelHandle<Buffer>>> {
1449 if let Some(buffer) = self.buffer_for_id(id, cx) {
1450 Task::ready(Ok(buffer))
1451 } else if self.is_local() {
1452 Task::ready(Err(anyhow!("buffer {} does not exist", id)))
1453 } else if let Some(project_id) = self.remote_id() {
1454 let request = self
1455 .client
1456 .request(proto::OpenBufferById { project_id, id });
1457 cx.spawn(|this, mut cx| async move {
1458 let buffer = request
1459 .await?
1460 .buffer
1461 .ok_or_else(|| anyhow!("invalid buffer"))?;
1462 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
1463 .await
1464 })
1465 } else {
1466 Task::ready(Err(anyhow!("cannot open buffer while disconnected")))
1467 }
1468 }
1469
1470 pub fn save_buffer_as(
1471 &mut self,
1472 buffer: ModelHandle<Buffer>,
1473 abs_path: PathBuf,
1474 cx: &mut ModelContext<Project>,
1475 ) -> Task<Result<()>> {
1476 let worktree_task = self.find_or_create_local_worktree(&abs_path, true, cx);
1477 let old_path =
1478 File::from_dyn(buffer.read(cx).file()).and_then(|f| Some(f.as_local()?.abs_path(cx)));
1479 cx.spawn(|this, mut cx| async move {
1480 if let Some(old_path) = old_path {
1481 this.update(&mut cx, |this, cx| {
1482 this.unregister_buffer_from_language_server(&buffer, old_path, cx);
1483 });
1484 }
1485 let (worktree, path) = worktree_task.await?;
1486 worktree
1487 .update(&mut cx, |worktree, cx| {
1488 worktree
1489 .as_local_mut()
1490 .unwrap()
1491 .save_buffer_as(buffer.clone(), path, cx)
1492 })
1493 .await?;
1494 this.update(&mut cx, |this, cx| {
1495 this.assign_language_to_buffer(&buffer, cx);
1496 this.register_buffer_with_language_server(&buffer, cx);
1497 });
1498 Ok(())
1499 })
1500 }
1501
1502 pub fn get_open_buffer(
1503 &mut self,
1504 path: &ProjectPath,
1505 cx: &mut ModelContext<Self>,
1506 ) -> Option<ModelHandle<Buffer>> {
1507 let worktree = self.worktree_for_id(path.worktree_id, cx)?;
1508 self.opened_buffers.values().find_map(|buffer| {
1509 let buffer = buffer.upgrade(cx)?;
1510 let file = File::from_dyn(buffer.read(cx).file())?;
1511 if file.worktree == worktree && file.path() == &path.path {
1512 Some(buffer)
1513 } else {
1514 None
1515 }
1516 })
1517 }
1518
1519 fn register_buffer(
1520 &mut self,
1521 buffer: &ModelHandle<Buffer>,
1522 cx: &mut ModelContext<Self>,
1523 ) -> Result<()> {
1524 let remote_id = buffer.read(cx).remote_id();
1525 let open_buffer = if self.is_remote() || self.is_shared() {
1526 OpenBuffer::Strong(buffer.clone())
1527 } else {
1528 OpenBuffer::Weak(buffer.downgrade())
1529 };
1530
1531 match self.opened_buffers.insert(remote_id, open_buffer) {
1532 None => {}
1533 Some(OpenBuffer::Loading(operations)) => {
1534 buffer.update(cx, |buffer, cx| buffer.apply_ops(operations, cx))?
1535 }
1536 Some(OpenBuffer::Weak(existing_handle)) => {
1537 if existing_handle.upgrade(cx).is_some() {
1538 Err(anyhow!(
1539 "already registered buffer with remote id {}",
1540 remote_id
1541 ))?
1542 }
1543 }
1544 Some(OpenBuffer::Strong(_)) => Err(anyhow!(
1545 "already registered buffer with remote id {}",
1546 remote_id
1547 ))?,
1548 }
1549 cx.subscribe(buffer, |this, buffer, event, cx| {
1550 this.on_buffer_event(buffer, event, cx);
1551 })
1552 .detach();
1553
1554 self.assign_language_to_buffer(buffer, cx);
1555 self.register_buffer_with_language_server(buffer, cx);
1556 cx.observe_release(buffer, |this, buffer, cx| {
1557 if let Some(file) = File::from_dyn(buffer.file()) {
1558 if file.is_local() {
1559 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
1560 if let Some((_, server)) = this.language_server_for_buffer(buffer, cx) {
1561 server
1562 .notify::<lsp::notification::DidCloseTextDocument>(
1563 lsp::DidCloseTextDocumentParams {
1564 text_document: lsp::TextDocumentIdentifier::new(uri.clone()),
1565 },
1566 )
1567 .log_err();
1568 }
1569 }
1570 }
1571 })
1572 .detach();
1573
1574 Ok(())
1575 }
1576
1577 fn register_buffer_with_language_server(
1578 &mut self,
1579 buffer_handle: &ModelHandle<Buffer>,
1580 cx: &mut ModelContext<Self>,
1581 ) {
1582 let buffer = buffer_handle.read(cx);
1583 let buffer_id = buffer.remote_id();
1584 if let Some(file) = File::from_dyn(buffer.file()) {
1585 if file.is_local() {
1586 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
1587 let initial_snapshot = buffer.text_snapshot();
1588
1589 let mut language_server = None;
1590 let mut language_id = None;
1591 if let Some(language) = buffer.language() {
1592 let worktree_id = file.worktree_id(cx);
1593 if let Some(adapter) = language.lsp_adapter() {
1594 language_id = adapter.id_for_language(language.name().as_ref());
1595 language_server = self
1596 .language_servers
1597 .get(&(worktree_id, adapter.name()))
1598 .cloned();
1599 }
1600 }
1601
1602 if let Some(local_worktree) = file.worktree.read(cx).as_local() {
1603 if let Some(diagnostics) = local_worktree.diagnostics_for_path(file.path()) {
1604 self.update_buffer_diagnostics(&buffer_handle, diagnostics, None, cx)
1605 .log_err();
1606 }
1607 }
1608
1609 if let Some((_, server)) = language_server {
1610 server
1611 .notify::<lsp::notification::DidOpenTextDocument>(
1612 lsp::DidOpenTextDocumentParams {
1613 text_document: lsp::TextDocumentItem::new(
1614 uri,
1615 language_id.unwrap_or_default(),
1616 0,
1617 initial_snapshot.text(),
1618 ),
1619 }
1620 .clone(),
1621 )
1622 .log_err();
1623 buffer_handle.update(cx, |buffer, cx| {
1624 buffer.set_completion_triggers(
1625 server
1626 .capabilities()
1627 .completion_provider
1628 .as_ref()
1629 .and_then(|provider| provider.trigger_characters.clone())
1630 .unwrap_or(Vec::new()),
1631 cx,
1632 )
1633 });
1634 self.buffer_snapshots
1635 .insert(buffer_id, vec![(0, initial_snapshot)]);
1636 }
1637 }
1638 }
1639 }
1640
1641 fn unregister_buffer_from_language_server(
1642 &mut self,
1643 buffer: &ModelHandle<Buffer>,
1644 old_path: PathBuf,
1645 cx: &mut ModelContext<Self>,
1646 ) {
1647 buffer.update(cx, |buffer, cx| {
1648 buffer.update_diagnostics(Default::default(), cx);
1649 self.buffer_snapshots.remove(&buffer.remote_id());
1650 if let Some((_, language_server)) = self.language_server_for_buffer(buffer, cx) {
1651 language_server
1652 .notify::<lsp::notification::DidCloseTextDocument>(
1653 lsp::DidCloseTextDocumentParams {
1654 text_document: lsp::TextDocumentIdentifier::new(
1655 lsp::Url::from_file_path(old_path).unwrap(),
1656 ),
1657 },
1658 )
1659 .log_err();
1660 }
1661 });
1662 }
1663
1664 fn on_buffer_event(
1665 &mut self,
1666 buffer: ModelHandle<Buffer>,
1667 event: &BufferEvent,
1668 cx: &mut ModelContext<Self>,
1669 ) -> Option<()> {
1670 match event {
1671 BufferEvent::Operation(operation) => {
1672 if let Some(project_id) = self.shared_remote_id() {
1673 let request = self.client.request(proto::UpdateBuffer {
1674 project_id,
1675 buffer_id: buffer.read(cx).remote_id(),
1676 operations: vec![language::proto::serialize_operation(&operation)],
1677 });
1678 cx.background().spawn(request).detach_and_log_err(cx);
1679 }
1680 }
1681 BufferEvent::Edited { .. } => {
1682 let (_, language_server) = self
1683 .language_server_for_buffer(buffer.read(cx), cx)?
1684 .clone();
1685 let buffer = buffer.read(cx);
1686 let file = File::from_dyn(buffer.file())?;
1687 let abs_path = file.as_local()?.abs_path(cx);
1688 let uri = lsp::Url::from_file_path(abs_path).unwrap();
1689 let buffer_snapshots = self.buffer_snapshots.get_mut(&buffer.remote_id())?;
1690 let (version, prev_snapshot) = buffer_snapshots.last()?;
1691 let next_snapshot = buffer.text_snapshot();
1692 let next_version = version + 1;
1693
1694 let content_changes = buffer
1695 .edits_since::<(PointUtf16, usize)>(prev_snapshot.version())
1696 .map(|edit| {
1697 let edit_start = edit.new.start.0;
1698 let edit_end = edit_start + (edit.old.end.0 - edit.old.start.0);
1699 let new_text = next_snapshot
1700 .text_for_range(edit.new.start.1..edit.new.end.1)
1701 .collect();
1702 lsp::TextDocumentContentChangeEvent {
1703 range: Some(lsp::Range::new(
1704 point_to_lsp(edit_start),
1705 point_to_lsp(edit_end),
1706 )),
1707 range_length: None,
1708 text: new_text,
1709 }
1710 })
1711 .collect();
1712
1713 buffer_snapshots.push((next_version, next_snapshot));
1714
1715 language_server
1716 .notify::<lsp::notification::DidChangeTextDocument>(
1717 lsp::DidChangeTextDocumentParams {
1718 text_document: lsp::VersionedTextDocumentIdentifier::new(
1719 uri,
1720 next_version,
1721 ),
1722 content_changes,
1723 },
1724 )
1725 .log_err();
1726 }
1727 BufferEvent::Saved => {
1728 let file = File::from_dyn(buffer.read(cx).file())?;
1729 let worktree_id = file.worktree_id(cx);
1730 let abs_path = file.as_local()?.abs_path(cx);
1731 let text_document = lsp::TextDocumentIdentifier {
1732 uri: lsp::Url::from_file_path(abs_path).unwrap(),
1733 };
1734
1735 for (_, server) in self.language_servers_for_worktree(worktree_id) {
1736 server
1737 .notify::<lsp::notification::DidSaveTextDocument>(
1738 lsp::DidSaveTextDocumentParams {
1739 text_document: text_document.clone(),
1740 text: None,
1741 },
1742 )
1743 .log_err();
1744 }
1745 }
1746 _ => {}
1747 }
1748
1749 None
1750 }
1751
1752 fn language_servers_for_worktree(
1753 &self,
1754 worktree_id: WorktreeId,
1755 ) -> impl Iterator<Item = &(Arc<dyn LspAdapter>, Arc<LanguageServer>)> {
1756 self.language_servers.iter().filter_map(
1757 move |((language_server_worktree_id, _), server)| {
1758 if *language_server_worktree_id == worktree_id {
1759 Some(server)
1760 } else {
1761 None
1762 }
1763 },
1764 )
1765 }
1766
1767 fn assign_language_to_buffer(
1768 &mut self,
1769 buffer: &ModelHandle<Buffer>,
1770 cx: &mut ModelContext<Self>,
1771 ) -> Option<()> {
1772 // If the buffer has a language, set it and start the language server if we haven't already.
1773 let full_path = buffer.read(cx).file()?.full_path(cx);
1774 let language = self.languages.select_language(&full_path)?;
1775 buffer.update(cx, |buffer, cx| {
1776 buffer.set_language(Some(language.clone()), cx);
1777 });
1778
1779 let file = File::from_dyn(buffer.read(cx).file())?;
1780 let worktree = file.worktree.read(cx).as_local()?;
1781 let worktree_id = worktree.id();
1782 let worktree_abs_path = worktree.abs_path().clone();
1783 self.start_language_server(worktree_id, worktree_abs_path, language, cx);
1784
1785 None
1786 }
1787
1788 fn start_language_server(
1789 &mut self,
1790 worktree_id: WorktreeId,
1791 worktree_path: Arc<Path>,
1792 language: Arc<Language>,
1793 cx: &mut ModelContext<Self>,
1794 ) {
1795 let adapter = if let Some(adapter) = language.lsp_adapter() {
1796 adapter
1797 } else {
1798 return;
1799 };
1800 let key = (worktree_id, adapter.name());
1801 self.started_language_servers
1802 .entry(key.clone())
1803 .or_insert_with(|| {
1804 let server_id = post_inc(&mut self.next_language_server_id);
1805 let language_server = self.languages.start_language_server(
1806 server_id,
1807 language.clone(),
1808 worktree_path,
1809 self.client.http_client(),
1810 cx,
1811 );
1812 cx.spawn_weak(|this, mut cx| async move {
1813 let language_server = language_server?.await.log_err()?;
1814 let language_server = language_server
1815 .initialize(adapter.initialization_options())
1816 .await
1817 .log_err()?;
1818 let this = this.upgrade(&cx)?;
1819 let disk_based_diagnostics_progress_token =
1820 adapter.disk_based_diagnostics_progress_token();
1821
1822 language_server
1823 .on_notification::<lsp::notification::PublishDiagnostics, _>({
1824 let this = this.downgrade();
1825 let adapter = adapter.clone();
1826 move |params, mut cx| {
1827 if let Some(this) = this.upgrade(&cx) {
1828 this.update(&mut cx, |this, cx| {
1829 this.on_lsp_diagnostics_published(
1830 server_id,
1831 params,
1832 &adapter,
1833 disk_based_diagnostics_progress_token,
1834 cx,
1835 );
1836 });
1837 }
1838 }
1839 })
1840 .detach();
1841
1842 language_server
1843 .on_request::<lsp::request::WorkspaceConfiguration, _, _>({
1844 let settings = this
1845 .read_with(&cx, |this, _| this.language_server_settings.clone());
1846 move |params, _| {
1847 let settings = settings.lock().clone();
1848 async move {
1849 Ok(params
1850 .items
1851 .into_iter()
1852 .map(|item| {
1853 if let Some(section) = &item.section {
1854 settings
1855 .get(section)
1856 .cloned()
1857 .unwrap_or(serde_json::Value::Null)
1858 } else {
1859 settings.clone()
1860 }
1861 })
1862 .collect())
1863 }
1864 }
1865 })
1866 .detach();
1867
1868 language_server
1869 .on_request::<lsp::request::ApplyWorkspaceEdit, _, _>({
1870 let this = this.downgrade();
1871 let adapter = adapter.clone();
1872 let language_server = language_server.clone();
1873 move |params, cx| {
1874 Self::on_lsp_workspace_edit(
1875 this,
1876 params,
1877 server_id,
1878 adapter.clone(),
1879 language_server.clone(),
1880 cx,
1881 )
1882 }
1883 })
1884 .detach();
1885
1886 language_server
1887 .on_notification::<lsp::notification::Progress, _>({
1888 let this = this.downgrade();
1889 move |params, mut cx| {
1890 if let Some(this) = this.upgrade(&cx) {
1891 this.update(&mut cx, |this, cx| {
1892 this.on_lsp_progress(
1893 params,
1894 server_id,
1895 disk_based_diagnostics_progress_token,
1896 cx,
1897 );
1898 });
1899 }
1900 }
1901 })
1902 .detach();
1903
1904 this.update(&mut cx, |this, cx| {
1905 this.language_servers
1906 .insert(key.clone(), (adapter.clone(), language_server.clone()));
1907 this.language_server_statuses.insert(
1908 server_id,
1909 LanguageServerStatus {
1910 name: language_server.name().to_string(),
1911 pending_work: Default::default(),
1912 pending_diagnostic_updates: 0,
1913 },
1914 );
1915 language_server
1916 .notify::<lsp::notification::DidChangeConfiguration>(
1917 lsp::DidChangeConfigurationParams {
1918 settings: this.language_server_settings.lock().clone(),
1919 },
1920 )
1921 .ok();
1922
1923 if let Some(project_id) = this.shared_remote_id() {
1924 this.client
1925 .send(proto::StartLanguageServer {
1926 project_id,
1927 server: Some(proto::LanguageServer {
1928 id: server_id as u64,
1929 name: language_server.name().to_string(),
1930 }),
1931 })
1932 .log_err();
1933 }
1934
1935 // Tell the language server about every open buffer in the worktree that matches the language.
1936 for buffer in this.opened_buffers.values() {
1937 if let Some(buffer_handle) = buffer.upgrade(cx) {
1938 let buffer = buffer_handle.read(cx);
1939 let file = if let Some(file) = File::from_dyn(buffer.file()) {
1940 file
1941 } else {
1942 continue;
1943 };
1944 let language = if let Some(language) = buffer.language() {
1945 language
1946 } else {
1947 continue;
1948 };
1949 if file.worktree.read(cx).id() != key.0
1950 || language.lsp_adapter().map(|a| a.name())
1951 != Some(key.1.clone())
1952 {
1953 continue;
1954 }
1955
1956 let file = file.as_local()?;
1957 let versions = this
1958 .buffer_snapshots
1959 .entry(buffer.remote_id())
1960 .or_insert_with(|| vec![(0, buffer.text_snapshot())]);
1961 let (version, initial_snapshot) = versions.last().unwrap();
1962 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
1963 let language_id = adapter.id_for_language(language.name().as_ref());
1964 language_server
1965 .notify::<lsp::notification::DidOpenTextDocument>(
1966 lsp::DidOpenTextDocumentParams {
1967 text_document: lsp::TextDocumentItem::new(
1968 uri,
1969 language_id.unwrap_or_default(),
1970 *version,
1971 initial_snapshot.text(),
1972 ),
1973 },
1974 )
1975 .log_err()?;
1976 buffer_handle.update(cx, |buffer, cx| {
1977 buffer.set_completion_triggers(
1978 language_server
1979 .capabilities()
1980 .completion_provider
1981 .as_ref()
1982 .and_then(|provider| {
1983 provider.trigger_characters.clone()
1984 })
1985 .unwrap_or(Vec::new()),
1986 cx,
1987 )
1988 });
1989 }
1990 }
1991
1992 cx.notify();
1993 Some(())
1994 });
1995
1996 Some(language_server)
1997 })
1998 });
1999 }
2000
2001 pub fn restart_language_servers_for_buffers(
2002 &mut self,
2003 buffers: impl IntoIterator<Item = ModelHandle<Buffer>>,
2004 cx: &mut ModelContext<Self>,
2005 ) -> Option<()> {
2006 let language_server_lookup_info: HashSet<(WorktreeId, Arc<Path>, PathBuf)> = buffers
2007 .into_iter()
2008 .filter_map(|buffer| {
2009 let file = File::from_dyn(buffer.read(cx).file())?;
2010 let worktree = file.worktree.read(cx).as_local()?;
2011 let worktree_id = worktree.id();
2012 let worktree_abs_path = worktree.abs_path().clone();
2013 let full_path = file.full_path(cx);
2014 Some((worktree_id, worktree_abs_path, full_path))
2015 })
2016 .collect();
2017 for (worktree_id, worktree_abs_path, full_path) in language_server_lookup_info {
2018 let language = self.languages.select_language(&full_path)?;
2019 self.restart_language_server(worktree_id, worktree_abs_path, language, cx);
2020 }
2021
2022 None
2023 }
2024
2025 fn restart_language_server(
2026 &mut self,
2027 worktree_id: WorktreeId,
2028 worktree_path: Arc<Path>,
2029 language: Arc<Language>,
2030 cx: &mut ModelContext<Self>,
2031 ) {
2032 let adapter = if let Some(adapter) = language.lsp_adapter() {
2033 adapter
2034 } else {
2035 return;
2036 };
2037 let key = (worktree_id, adapter.name());
2038 let server_to_shutdown = self.language_servers.remove(&key);
2039 self.started_language_servers.remove(&key);
2040 server_to_shutdown
2041 .as_ref()
2042 .map(|(_, server)| self.language_server_statuses.remove(&server.server_id()));
2043 cx.spawn_weak(|this, mut cx| async move {
2044 if let Some(this) = this.upgrade(&cx) {
2045 if let Some((_, server_to_shutdown)) = server_to_shutdown {
2046 if let Some(shutdown_task) = server_to_shutdown.shutdown() {
2047 shutdown_task.await;
2048 }
2049 }
2050
2051 this.update(&mut cx, |this, cx| {
2052 this.start_language_server(worktree_id, worktree_path, language, cx);
2053 });
2054 }
2055 })
2056 .detach();
2057 }
2058
2059 fn on_lsp_diagnostics_published(
2060 &mut self,
2061 server_id: usize,
2062 mut params: lsp::PublishDiagnosticsParams,
2063 adapter: &Arc<dyn LspAdapter>,
2064 disk_based_diagnostics_progress_token: Option<&str>,
2065 cx: &mut ModelContext<Self>,
2066 ) {
2067 adapter.process_diagnostics(&mut params);
2068 if disk_based_diagnostics_progress_token.is_none() {
2069 self.disk_based_diagnostics_started(cx);
2070 self.broadcast_language_server_update(
2071 server_id,
2072 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(
2073 proto::LspDiskBasedDiagnosticsUpdating {},
2074 ),
2075 );
2076 }
2077 self.update_diagnostics(params, adapter.disk_based_diagnostic_sources(), cx)
2078 .log_err();
2079 if disk_based_diagnostics_progress_token.is_none() {
2080 self.disk_based_diagnostics_finished(cx);
2081 self.broadcast_language_server_update(
2082 server_id,
2083 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(
2084 proto::LspDiskBasedDiagnosticsUpdated {},
2085 ),
2086 );
2087 }
2088 }
2089
2090 fn on_lsp_progress(
2091 &mut self,
2092 progress: lsp::ProgressParams,
2093 server_id: usize,
2094 disk_based_diagnostics_progress_token: Option<&str>,
2095 cx: &mut ModelContext<Self>,
2096 ) {
2097 let token = match progress.token {
2098 lsp::NumberOrString::String(token) => token,
2099 lsp::NumberOrString::Number(token) => {
2100 log::info!("skipping numeric progress token {}", token);
2101 return;
2102 }
2103 };
2104 let progress = match progress.value {
2105 lsp::ProgressParamsValue::WorkDone(value) => value,
2106 };
2107 let language_server_status =
2108 if let Some(status) = self.language_server_statuses.get_mut(&server_id) {
2109 status
2110 } else {
2111 return;
2112 };
2113 match progress {
2114 lsp::WorkDoneProgress::Begin(_) => {
2115 if Some(token.as_str()) == disk_based_diagnostics_progress_token {
2116 language_server_status.pending_diagnostic_updates += 1;
2117 if language_server_status.pending_diagnostic_updates == 1 {
2118 self.disk_based_diagnostics_started(cx);
2119 self.broadcast_language_server_update(
2120 server_id,
2121 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(
2122 proto::LspDiskBasedDiagnosticsUpdating {},
2123 ),
2124 );
2125 }
2126 } else {
2127 self.on_lsp_work_start(server_id, token.clone(), cx);
2128 self.broadcast_language_server_update(
2129 server_id,
2130 proto::update_language_server::Variant::WorkStart(proto::LspWorkStart {
2131 token,
2132 }),
2133 );
2134 }
2135 }
2136 lsp::WorkDoneProgress::Report(report) => {
2137 if Some(token.as_str()) != disk_based_diagnostics_progress_token {
2138 self.on_lsp_work_progress(
2139 server_id,
2140 token.clone(),
2141 LanguageServerProgress {
2142 message: report.message.clone(),
2143 percentage: report.percentage.map(|p| p as usize),
2144 last_update_at: Instant::now(),
2145 },
2146 cx,
2147 );
2148 self.broadcast_language_server_update(
2149 server_id,
2150 proto::update_language_server::Variant::WorkProgress(
2151 proto::LspWorkProgress {
2152 token,
2153 message: report.message,
2154 percentage: report.percentage.map(|p| p as u32),
2155 },
2156 ),
2157 );
2158 }
2159 }
2160 lsp::WorkDoneProgress::End(_) => {
2161 if Some(token.as_str()) == disk_based_diagnostics_progress_token {
2162 language_server_status.pending_diagnostic_updates -= 1;
2163 if language_server_status.pending_diagnostic_updates == 0 {
2164 self.disk_based_diagnostics_finished(cx);
2165 self.broadcast_language_server_update(
2166 server_id,
2167 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(
2168 proto::LspDiskBasedDiagnosticsUpdated {},
2169 ),
2170 );
2171 }
2172 } else {
2173 self.on_lsp_work_end(server_id, token.clone(), cx);
2174 self.broadcast_language_server_update(
2175 server_id,
2176 proto::update_language_server::Variant::WorkEnd(proto::LspWorkEnd {
2177 token,
2178 }),
2179 );
2180 }
2181 }
2182 }
2183 }
2184
2185 fn on_lsp_work_start(
2186 &mut self,
2187 language_server_id: usize,
2188 token: String,
2189 cx: &mut ModelContext<Self>,
2190 ) {
2191 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
2192 status.pending_work.insert(
2193 token,
2194 LanguageServerProgress {
2195 message: None,
2196 percentage: None,
2197 last_update_at: Instant::now(),
2198 },
2199 );
2200 cx.notify();
2201 }
2202 }
2203
2204 fn on_lsp_work_progress(
2205 &mut self,
2206 language_server_id: usize,
2207 token: String,
2208 progress: LanguageServerProgress,
2209 cx: &mut ModelContext<Self>,
2210 ) {
2211 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
2212 status.pending_work.insert(token, progress);
2213 cx.notify();
2214 }
2215 }
2216
2217 fn on_lsp_work_end(
2218 &mut self,
2219 language_server_id: usize,
2220 token: String,
2221 cx: &mut ModelContext<Self>,
2222 ) {
2223 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
2224 status.pending_work.remove(&token);
2225 cx.notify();
2226 }
2227 }
2228
2229 async fn on_lsp_workspace_edit(
2230 this: WeakModelHandle<Self>,
2231 params: lsp::ApplyWorkspaceEditParams,
2232 server_id: usize,
2233 adapter: Arc<dyn LspAdapter>,
2234 language_server: Arc<LanguageServer>,
2235 mut cx: AsyncAppContext,
2236 ) -> Result<lsp::ApplyWorkspaceEditResponse> {
2237 let this = this
2238 .upgrade(&cx)
2239 .ok_or_else(|| anyhow!("project project closed"))?;
2240 let transaction = Self::deserialize_workspace_edit(
2241 this.clone(),
2242 params.edit,
2243 true,
2244 adapter.clone(),
2245 language_server.clone(),
2246 &mut cx,
2247 )
2248 .await
2249 .log_err();
2250 this.update(&mut cx, |this, _| {
2251 if let Some(transaction) = transaction {
2252 this.last_workspace_edits_by_language_server
2253 .insert(server_id, transaction);
2254 }
2255 });
2256 Ok(lsp::ApplyWorkspaceEditResponse {
2257 applied: true,
2258 failed_change: None,
2259 failure_reason: None,
2260 })
2261 }
2262
2263 fn broadcast_language_server_update(
2264 &self,
2265 language_server_id: usize,
2266 event: proto::update_language_server::Variant,
2267 ) {
2268 if let Some(project_id) = self.shared_remote_id() {
2269 self.client
2270 .send(proto::UpdateLanguageServer {
2271 project_id,
2272 language_server_id: language_server_id as u64,
2273 variant: Some(event),
2274 })
2275 .log_err();
2276 }
2277 }
2278
2279 pub fn set_language_server_settings(&mut self, settings: serde_json::Value) {
2280 for (_, server) in self.language_servers.values() {
2281 server
2282 .notify::<lsp::notification::DidChangeConfiguration>(
2283 lsp::DidChangeConfigurationParams {
2284 settings: settings.clone(),
2285 },
2286 )
2287 .ok();
2288 }
2289 *self.language_server_settings.lock() = settings;
2290 }
2291
2292 pub fn language_server_statuses(
2293 &self,
2294 ) -> impl DoubleEndedIterator<Item = &LanguageServerStatus> {
2295 self.language_server_statuses.values()
2296 }
2297
2298 pub fn update_diagnostics(
2299 &mut self,
2300 params: lsp::PublishDiagnosticsParams,
2301 disk_based_sources: &[&str],
2302 cx: &mut ModelContext<Self>,
2303 ) -> Result<()> {
2304 let abs_path = params
2305 .uri
2306 .to_file_path()
2307 .map_err(|_| anyhow!("URI is not a file"))?;
2308 let mut diagnostics = Vec::default();
2309 let mut primary_diagnostic_group_ids = HashMap::default();
2310 let mut sources_by_group_id = HashMap::default();
2311 let mut supporting_diagnostics = HashMap::default();
2312 for diagnostic in ¶ms.diagnostics {
2313 let source = diagnostic.source.as_ref();
2314 let code = diagnostic.code.as_ref().map(|code| match code {
2315 lsp::NumberOrString::Number(code) => code.to_string(),
2316 lsp::NumberOrString::String(code) => code.clone(),
2317 });
2318 let range = range_from_lsp(diagnostic.range);
2319 let is_supporting = diagnostic
2320 .related_information
2321 .as_ref()
2322 .map_or(false, |infos| {
2323 infos.iter().any(|info| {
2324 primary_diagnostic_group_ids.contains_key(&(
2325 source,
2326 code.clone(),
2327 range_from_lsp(info.location.range),
2328 ))
2329 })
2330 });
2331
2332 let is_unnecessary = diagnostic.tags.as_ref().map_or(false, |tags| {
2333 tags.iter().any(|tag| *tag == DiagnosticTag::UNNECESSARY)
2334 });
2335
2336 if is_supporting {
2337 supporting_diagnostics.insert(
2338 (source, code.clone(), range),
2339 (diagnostic.severity, is_unnecessary),
2340 );
2341 } else {
2342 let group_id = post_inc(&mut self.next_diagnostic_group_id);
2343 let is_disk_based = source.map_or(false, |source| {
2344 disk_based_sources.contains(&source.as_str())
2345 });
2346
2347 sources_by_group_id.insert(group_id, source);
2348 primary_diagnostic_group_ids
2349 .insert((source, code.clone(), range.clone()), group_id);
2350
2351 diagnostics.push(DiagnosticEntry {
2352 range,
2353 diagnostic: Diagnostic {
2354 code: code.clone(),
2355 severity: diagnostic.severity.unwrap_or(DiagnosticSeverity::ERROR),
2356 message: diagnostic.message.clone(),
2357 group_id,
2358 is_primary: true,
2359 is_valid: true,
2360 is_disk_based,
2361 is_unnecessary,
2362 },
2363 });
2364 if let Some(infos) = &diagnostic.related_information {
2365 for info in infos {
2366 if info.location.uri == params.uri && !info.message.is_empty() {
2367 let range = range_from_lsp(info.location.range);
2368 diagnostics.push(DiagnosticEntry {
2369 range,
2370 diagnostic: Diagnostic {
2371 code: code.clone(),
2372 severity: DiagnosticSeverity::INFORMATION,
2373 message: info.message.clone(),
2374 group_id,
2375 is_primary: false,
2376 is_valid: true,
2377 is_disk_based,
2378 is_unnecessary: false,
2379 },
2380 });
2381 }
2382 }
2383 }
2384 }
2385 }
2386
2387 for entry in &mut diagnostics {
2388 let diagnostic = &mut entry.diagnostic;
2389 if !diagnostic.is_primary {
2390 let source = *sources_by_group_id.get(&diagnostic.group_id).unwrap();
2391 if let Some(&(severity, is_unnecessary)) = supporting_diagnostics.get(&(
2392 source,
2393 diagnostic.code.clone(),
2394 entry.range.clone(),
2395 )) {
2396 if let Some(severity) = severity {
2397 diagnostic.severity = severity;
2398 }
2399 diagnostic.is_unnecessary = is_unnecessary;
2400 }
2401 }
2402 }
2403
2404 self.update_diagnostic_entries(abs_path, params.version, diagnostics, cx)?;
2405 Ok(())
2406 }
2407
2408 pub fn update_diagnostic_entries(
2409 &mut self,
2410 abs_path: PathBuf,
2411 version: Option<i32>,
2412 diagnostics: Vec<DiagnosticEntry<PointUtf16>>,
2413 cx: &mut ModelContext<Project>,
2414 ) -> Result<(), anyhow::Error> {
2415 let (worktree, relative_path) = self
2416 .find_local_worktree(&abs_path, cx)
2417 .ok_or_else(|| anyhow!("no worktree found for diagnostics"))?;
2418 if !worktree.read(cx).is_visible() {
2419 return Ok(());
2420 }
2421
2422 let project_path = ProjectPath {
2423 worktree_id: worktree.read(cx).id(),
2424 path: relative_path.into(),
2425 };
2426 if let Some(buffer) = self.get_open_buffer(&project_path, cx) {
2427 self.update_buffer_diagnostics(&buffer, diagnostics.clone(), version, cx)?;
2428 }
2429
2430 let updated = worktree.update(cx, |worktree, cx| {
2431 worktree
2432 .as_local_mut()
2433 .ok_or_else(|| anyhow!("not a local worktree"))?
2434 .update_diagnostics(project_path.path.clone(), diagnostics, cx)
2435 })?;
2436 if updated {
2437 cx.emit(Event::DiagnosticsUpdated(project_path));
2438 }
2439 Ok(())
2440 }
2441
2442 fn update_buffer_diagnostics(
2443 &mut self,
2444 buffer: &ModelHandle<Buffer>,
2445 mut diagnostics: Vec<DiagnosticEntry<PointUtf16>>,
2446 version: Option<i32>,
2447 cx: &mut ModelContext<Self>,
2448 ) -> Result<()> {
2449 fn compare_diagnostics(a: &Diagnostic, b: &Diagnostic) -> Ordering {
2450 Ordering::Equal
2451 .then_with(|| b.is_primary.cmp(&a.is_primary))
2452 .then_with(|| a.is_disk_based.cmp(&b.is_disk_based))
2453 .then_with(|| a.severity.cmp(&b.severity))
2454 .then_with(|| a.message.cmp(&b.message))
2455 }
2456
2457 let snapshot = self.buffer_snapshot_for_lsp_version(buffer, version, cx)?;
2458
2459 diagnostics.sort_unstable_by(|a, b| {
2460 Ordering::Equal
2461 .then_with(|| a.range.start.cmp(&b.range.start))
2462 .then_with(|| b.range.end.cmp(&a.range.end))
2463 .then_with(|| compare_diagnostics(&a.diagnostic, &b.diagnostic))
2464 });
2465
2466 let mut sanitized_diagnostics = Vec::new();
2467 let edits_since_save = Patch::new(
2468 snapshot
2469 .edits_since::<PointUtf16>(buffer.read(cx).saved_version())
2470 .collect(),
2471 );
2472 for entry in diagnostics {
2473 let start;
2474 let end;
2475 if entry.diagnostic.is_disk_based {
2476 // Some diagnostics are based on files on disk instead of buffers'
2477 // current contents. Adjust these diagnostics' ranges to reflect
2478 // any unsaved edits.
2479 start = edits_since_save.old_to_new(entry.range.start);
2480 end = edits_since_save.old_to_new(entry.range.end);
2481 } else {
2482 start = entry.range.start;
2483 end = entry.range.end;
2484 }
2485
2486 let mut range = snapshot.clip_point_utf16(start, Bias::Left)
2487 ..snapshot.clip_point_utf16(end, Bias::Right);
2488
2489 // Expand empty ranges by one character
2490 if range.start == range.end {
2491 range.end.column += 1;
2492 range.end = snapshot.clip_point_utf16(range.end, Bias::Right);
2493 if range.start == range.end && range.end.column > 0 {
2494 range.start.column -= 1;
2495 range.start = snapshot.clip_point_utf16(range.start, Bias::Left);
2496 }
2497 }
2498
2499 sanitized_diagnostics.push(DiagnosticEntry {
2500 range,
2501 diagnostic: entry.diagnostic,
2502 });
2503 }
2504 drop(edits_since_save);
2505
2506 let set = DiagnosticSet::new(sanitized_diagnostics, &snapshot);
2507 buffer.update(cx, |buffer, cx| buffer.update_diagnostics(set, cx));
2508 Ok(())
2509 }
2510
2511 pub fn reload_buffers(
2512 &self,
2513 buffers: HashSet<ModelHandle<Buffer>>,
2514 push_to_history: bool,
2515 cx: &mut ModelContext<Self>,
2516 ) -> Task<Result<ProjectTransaction>> {
2517 let mut local_buffers = Vec::new();
2518 let mut remote_buffers = None;
2519 for buffer_handle in buffers {
2520 let buffer = buffer_handle.read(cx);
2521 if buffer.is_dirty() {
2522 if let Some(file) = File::from_dyn(buffer.file()) {
2523 if file.is_local() {
2524 local_buffers.push(buffer_handle);
2525 } else {
2526 remote_buffers.get_or_insert(Vec::new()).push(buffer_handle);
2527 }
2528 }
2529 }
2530 }
2531
2532 let remote_buffers = self.remote_id().zip(remote_buffers);
2533 let client = self.client.clone();
2534
2535 cx.spawn(|this, mut cx| async move {
2536 let mut project_transaction = ProjectTransaction::default();
2537
2538 if let Some((project_id, remote_buffers)) = remote_buffers {
2539 let response = client
2540 .request(proto::ReloadBuffers {
2541 project_id,
2542 buffer_ids: remote_buffers
2543 .iter()
2544 .map(|buffer| buffer.read_with(&cx, |buffer, _| buffer.remote_id()))
2545 .collect(),
2546 })
2547 .await?
2548 .transaction
2549 .ok_or_else(|| anyhow!("missing transaction"))?;
2550 project_transaction = this
2551 .update(&mut cx, |this, cx| {
2552 this.deserialize_project_transaction(response, push_to_history, cx)
2553 })
2554 .await?;
2555 }
2556
2557 for buffer in local_buffers {
2558 let transaction = buffer
2559 .update(&mut cx, |buffer, cx| buffer.reload(cx))
2560 .await?;
2561 buffer.update(&mut cx, |buffer, cx| {
2562 if let Some(transaction) = transaction {
2563 if !push_to_history {
2564 buffer.forget_transaction(transaction.id);
2565 }
2566 project_transaction.0.insert(cx.handle(), transaction);
2567 }
2568 });
2569 }
2570
2571 Ok(project_transaction)
2572 })
2573 }
2574
2575 pub fn format(
2576 &self,
2577 buffers: HashSet<ModelHandle<Buffer>>,
2578 push_to_history: bool,
2579 cx: &mut ModelContext<Project>,
2580 ) -> Task<Result<ProjectTransaction>> {
2581 let mut local_buffers = Vec::new();
2582 let mut remote_buffers = None;
2583 for buffer_handle in buffers {
2584 let buffer = buffer_handle.read(cx);
2585 if let Some(file) = File::from_dyn(buffer.file()) {
2586 if let Some(buffer_abs_path) = file.as_local().map(|f| f.abs_path(cx)) {
2587 if let Some((_, server)) = self.language_server_for_buffer(buffer, cx) {
2588 local_buffers.push((buffer_handle, buffer_abs_path, server.clone()));
2589 }
2590 } else {
2591 remote_buffers.get_or_insert(Vec::new()).push(buffer_handle);
2592 }
2593 } else {
2594 return Task::ready(Ok(Default::default()));
2595 }
2596 }
2597
2598 let remote_buffers = self.remote_id().zip(remote_buffers);
2599 let client = self.client.clone();
2600
2601 cx.spawn(|this, mut cx| async move {
2602 let mut project_transaction = ProjectTransaction::default();
2603
2604 if let Some((project_id, remote_buffers)) = remote_buffers {
2605 let response = client
2606 .request(proto::FormatBuffers {
2607 project_id,
2608 buffer_ids: remote_buffers
2609 .iter()
2610 .map(|buffer| buffer.read_with(&cx, |buffer, _| buffer.remote_id()))
2611 .collect(),
2612 })
2613 .await?
2614 .transaction
2615 .ok_or_else(|| anyhow!("missing transaction"))?;
2616 project_transaction = this
2617 .update(&mut cx, |this, cx| {
2618 this.deserialize_project_transaction(response, push_to_history, cx)
2619 })
2620 .await?;
2621 }
2622
2623 for (buffer, buffer_abs_path, language_server) in local_buffers {
2624 let text_document = lsp::TextDocumentIdentifier::new(
2625 lsp::Url::from_file_path(&buffer_abs_path).unwrap(),
2626 );
2627 let capabilities = &language_server.capabilities();
2628 let tab_size = cx.update(|cx| {
2629 let language_name = buffer.read(cx).language().map(|language| language.name());
2630 cx.global::<Settings>().tab_size(language_name.as_deref())
2631 });
2632 let lsp_edits = if capabilities
2633 .document_formatting_provider
2634 .as_ref()
2635 .map_or(false, |provider| *provider != lsp::OneOf::Left(false))
2636 {
2637 language_server
2638 .request::<lsp::request::Formatting>(lsp::DocumentFormattingParams {
2639 text_document,
2640 options: lsp::FormattingOptions {
2641 tab_size,
2642 insert_spaces: true,
2643 insert_final_newline: Some(true),
2644 ..Default::default()
2645 },
2646 work_done_progress_params: Default::default(),
2647 })
2648 .await?
2649 } else if capabilities
2650 .document_range_formatting_provider
2651 .as_ref()
2652 .map_or(false, |provider| *provider != lsp::OneOf::Left(false))
2653 {
2654 let buffer_start = lsp::Position::new(0, 0);
2655 let buffer_end =
2656 buffer.read_with(&cx, |buffer, _| point_to_lsp(buffer.max_point_utf16()));
2657 language_server
2658 .request::<lsp::request::RangeFormatting>(
2659 lsp::DocumentRangeFormattingParams {
2660 text_document,
2661 range: lsp::Range::new(buffer_start, buffer_end),
2662 options: lsp::FormattingOptions {
2663 tab_size: 4,
2664 insert_spaces: true,
2665 insert_final_newline: Some(true),
2666 ..Default::default()
2667 },
2668 work_done_progress_params: Default::default(),
2669 },
2670 )
2671 .await?
2672 } else {
2673 continue;
2674 };
2675
2676 if let Some(lsp_edits) = lsp_edits {
2677 let edits = this
2678 .update(&mut cx, |this, cx| {
2679 this.edits_from_lsp(&buffer, lsp_edits, None, cx)
2680 })
2681 .await?;
2682 buffer.update(&mut cx, |buffer, cx| {
2683 buffer.finalize_last_transaction();
2684 buffer.start_transaction();
2685 for (range, text) in edits {
2686 buffer.edit([(range, text)], cx);
2687 }
2688 if buffer.end_transaction(cx).is_some() {
2689 let transaction = buffer.finalize_last_transaction().unwrap().clone();
2690 if !push_to_history {
2691 buffer.forget_transaction(transaction.id);
2692 }
2693 project_transaction.0.insert(cx.handle(), transaction);
2694 }
2695 });
2696 }
2697 }
2698
2699 Ok(project_transaction)
2700 })
2701 }
2702
2703 pub fn definition<T: ToPointUtf16>(
2704 &self,
2705 buffer: &ModelHandle<Buffer>,
2706 position: T,
2707 cx: &mut ModelContext<Self>,
2708 ) -> Task<Result<Vec<Location>>> {
2709 let position = position.to_point_utf16(buffer.read(cx));
2710 self.request_lsp(buffer.clone(), GetDefinition { position }, cx)
2711 }
2712
2713 pub fn references<T: ToPointUtf16>(
2714 &self,
2715 buffer: &ModelHandle<Buffer>,
2716 position: T,
2717 cx: &mut ModelContext<Self>,
2718 ) -> Task<Result<Vec<Location>>> {
2719 let position = position.to_point_utf16(buffer.read(cx));
2720 self.request_lsp(buffer.clone(), GetReferences { position }, cx)
2721 }
2722
2723 pub fn document_highlights<T: ToPointUtf16>(
2724 &self,
2725 buffer: &ModelHandle<Buffer>,
2726 position: T,
2727 cx: &mut ModelContext<Self>,
2728 ) -> Task<Result<Vec<DocumentHighlight>>> {
2729 let position = position.to_point_utf16(buffer.read(cx));
2730
2731 self.request_lsp(buffer.clone(), GetDocumentHighlights { position }, cx)
2732 }
2733
2734 pub fn symbols(&self, query: &str, cx: &mut ModelContext<Self>) -> Task<Result<Vec<Symbol>>> {
2735 if self.is_local() {
2736 let mut requests = Vec::new();
2737 for ((worktree_id, _), (lsp_adapter, language_server)) in self.language_servers.iter() {
2738 let worktree_id = *worktree_id;
2739 if let Some(worktree) = self
2740 .worktree_for_id(worktree_id, cx)
2741 .and_then(|worktree| worktree.read(cx).as_local())
2742 {
2743 let lsp_adapter = lsp_adapter.clone();
2744 let worktree_abs_path = worktree.abs_path().clone();
2745 requests.push(
2746 language_server
2747 .request::<lsp::request::WorkspaceSymbol>(lsp::WorkspaceSymbolParams {
2748 query: query.to_string(),
2749 ..Default::default()
2750 })
2751 .log_err()
2752 .map(move |response| {
2753 (
2754 lsp_adapter,
2755 worktree_id,
2756 worktree_abs_path,
2757 response.unwrap_or_default(),
2758 )
2759 }),
2760 );
2761 }
2762 }
2763
2764 cx.spawn_weak(|this, cx| async move {
2765 let responses = futures::future::join_all(requests).await;
2766 let this = if let Some(this) = this.upgrade(&cx) {
2767 this
2768 } else {
2769 return Ok(Default::default());
2770 };
2771 this.read_with(&cx, |this, cx| {
2772 let mut symbols = Vec::new();
2773 for (adapter, source_worktree_id, worktree_abs_path, response) in responses {
2774 symbols.extend(response.into_iter().flatten().filter_map(|lsp_symbol| {
2775 let abs_path = lsp_symbol.location.uri.to_file_path().ok()?;
2776 let mut worktree_id = source_worktree_id;
2777 let path;
2778 if let Some((worktree, rel_path)) =
2779 this.find_local_worktree(&abs_path, cx)
2780 {
2781 worktree_id = worktree.read(cx).id();
2782 path = rel_path;
2783 } else {
2784 path = relativize_path(&worktree_abs_path, &abs_path);
2785 }
2786
2787 let label = this
2788 .languages
2789 .select_language(&path)
2790 .and_then(|language| {
2791 language.label_for_symbol(&lsp_symbol.name, lsp_symbol.kind)
2792 })
2793 .unwrap_or_else(|| CodeLabel::plain(lsp_symbol.name.clone(), None));
2794 let signature = this.symbol_signature(worktree_id, &path);
2795
2796 Some(Symbol {
2797 source_worktree_id,
2798 worktree_id,
2799 language_server_name: adapter.name(),
2800 name: lsp_symbol.name,
2801 kind: lsp_symbol.kind,
2802 label,
2803 path,
2804 range: range_from_lsp(lsp_symbol.location.range),
2805 signature,
2806 })
2807 }));
2808 }
2809 Ok(symbols)
2810 })
2811 })
2812 } else if let Some(project_id) = self.remote_id() {
2813 let request = self.client.request(proto::GetProjectSymbols {
2814 project_id,
2815 query: query.to_string(),
2816 });
2817 cx.spawn_weak(|this, cx| async move {
2818 let response = request.await?;
2819 let mut symbols = Vec::new();
2820 if let Some(this) = this.upgrade(&cx) {
2821 this.read_with(&cx, |this, _| {
2822 symbols.extend(
2823 response
2824 .symbols
2825 .into_iter()
2826 .filter_map(|symbol| this.deserialize_symbol(symbol).log_err()),
2827 );
2828 })
2829 }
2830 Ok(symbols)
2831 })
2832 } else {
2833 Task::ready(Ok(Default::default()))
2834 }
2835 }
2836
2837 pub fn open_buffer_for_symbol(
2838 &mut self,
2839 symbol: &Symbol,
2840 cx: &mut ModelContext<Self>,
2841 ) -> Task<Result<ModelHandle<Buffer>>> {
2842 if self.is_local() {
2843 let (lsp_adapter, language_server) = if let Some(server) = self.language_servers.get(&(
2844 symbol.source_worktree_id,
2845 symbol.language_server_name.clone(),
2846 )) {
2847 server.clone()
2848 } else {
2849 return Task::ready(Err(anyhow!(
2850 "language server for worktree and language not found"
2851 )));
2852 };
2853
2854 let worktree_abs_path = if let Some(worktree_abs_path) = self
2855 .worktree_for_id(symbol.worktree_id, cx)
2856 .and_then(|worktree| worktree.read(cx).as_local())
2857 .map(|local_worktree| local_worktree.abs_path())
2858 {
2859 worktree_abs_path
2860 } else {
2861 return Task::ready(Err(anyhow!("worktree not found for symbol")));
2862 };
2863 let symbol_abs_path = worktree_abs_path.join(&symbol.path);
2864 let symbol_uri = if let Ok(uri) = lsp::Url::from_file_path(symbol_abs_path) {
2865 uri
2866 } else {
2867 return Task::ready(Err(anyhow!("invalid symbol path")));
2868 };
2869
2870 self.open_local_buffer_via_lsp(symbol_uri, lsp_adapter, language_server, cx)
2871 } else if let Some(project_id) = self.remote_id() {
2872 let request = self.client.request(proto::OpenBufferForSymbol {
2873 project_id,
2874 symbol: Some(serialize_symbol(symbol)),
2875 });
2876 cx.spawn(|this, mut cx| async move {
2877 let response = request.await?;
2878 let buffer = response.buffer.ok_or_else(|| anyhow!("invalid buffer"))?;
2879 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
2880 .await
2881 })
2882 } else {
2883 Task::ready(Err(anyhow!("project does not have a remote id")))
2884 }
2885 }
2886
2887 pub fn completions<T: ToPointUtf16>(
2888 &self,
2889 source_buffer_handle: &ModelHandle<Buffer>,
2890 position: T,
2891 cx: &mut ModelContext<Self>,
2892 ) -> Task<Result<Vec<Completion>>> {
2893 let source_buffer_handle = source_buffer_handle.clone();
2894 let source_buffer = source_buffer_handle.read(cx);
2895 let buffer_id = source_buffer.remote_id();
2896 let language = source_buffer.language().cloned();
2897 let worktree;
2898 let buffer_abs_path;
2899 if let Some(file) = File::from_dyn(source_buffer.file()) {
2900 worktree = file.worktree.clone();
2901 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
2902 } else {
2903 return Task::ready(Ok(Default::default()));
2904 };
2905
2906 let position = position.to_point_utf16(source_buffer);
2907 let anchor = source_buffer.anchor_after(position);
2908
2909 if worktree.read(cx).as_local().is_some() {
2910 let buffer_abs_path = buffer_abs_path.unwrap();
2911 let (_, lang_server) =
2912 if let Some(server) = self.language_server_for_buffer(source_buffer, cx) {
2913 server.clone()
2914 } else {
2915 return Task::ready(Ok(Default::default()));
2916 };
2917
2918 cx.spawn(|_, cx| async move {
2919 let completions = lang_server
2920 .request::<lsp::request::Completion>(lsp::CompletionParams {
2921 text_document_position: lsp::TextDocumentPositionParams::new(
2922 lsp::TextDocumentIdentifier::new(
2923 lsp::Url::from_file_path(buffer_abs_path).unwrap(),
2924 ),
2925 point_to_lsp(position),
2926 ),
2927 context: Default::default(),
2928 work_done_progress_params: Default::default(),
2929 partial_result_params: Default::default(),
2930 })
2931 .await
2932 .context("lsp completion request failed")?;
2933
2934 let completions = if let Some(completions) = completions {
2935 match completions {
2936 lsp::CompletionResponse::Array(completions) => completions,
2937 lsp::CompletionResponse::List(list) => list.items,
2938 }
2939 } else {
2940 Default::default()
2941 };
2942
2943 source_buffer_handle.read_with(&cx, |this, _| {
2944 let snapshot = this.snapshot();
2945 let clipped_position = this.clip_point_utf16(position, Bias::Left);
2946 let mut range_for_token = None;
2947 Ok(completions
2948 .into_iter()
2949 .filter_map(|lsp_completion| {
2950 let (old_range, new_text) = match lsp_completion.text_edit.as_ref() {
2951 // If the language server provides a range to overwrite, then
2952 // check that the range is valid.
2953 Some(lsp::CompletionTextEdit::Edit(edit)) => {
2954 let range = range_from_lsp(edit.range);
2955 let start = snapshot.clip_point_utf16(range.start, Bias::Left);
2956 let end = snapshot.clip_point_utf16(range.end, Bias::Left);
2957 if start != range.start || end != range.end {
2958 log::info!("completion out of expected range");
2959 return None;
2960 }
2961 (
2962 snapshot.anchor_before(start)..snapshot.anchor_after(end),
2963 edit.new_text.clone(),
2964 )
2965 }
2966 // If the language server does not provide a range, then infer
2967 // the range based on the syntax tree.
2968 None => {
2969 if position != clipped_position {
2970 log::info!("completion out of expected range");
2971 return None;
2972 }
2973 let Range { start, end } = range_for_token
2974 .get_or_insert_with(|| {
2975 let offset = position.to_offset(&snapshot);
2976 snapshot
2977 .range_for_word_token_at(offset)
2978 .unwrap_or_else(|| offset..offset)
2979 })
2980 .clone();
2981 let text = lsp_completion
2982 .insert_text
2983 .as_ref()
2984 .unwrap_or(&lsp_completion.label)
2985 .clone();
2986 (
2987 snapshot.anchor_before(start)..snapshot.anchor_after(end),
2988 text.clone(),
2989 )
2990 }
2991 Some(lsp::CompletionTextEdit::InsertAndReplace(_)) => {
2992 log::info!("unsupported insert/replace completion");
2993 return None;
2994 }
2995 };
2996
2997 Some(Completion {
2998 old_range,
2999 new_text,
3000 label: language
3001 .as_ref()
3002 .and_then(|l| l.label_for_completion(&lsp_completion))
3003 .unwrap_or_else(|| {
3004 CodeLabel::plain(
3005 lsp_completion.label.clone(),
3006 lsp_completion.filter_text.as_deref(),
3007 )
3008 }),
3009 lsp_completion,
3010 })
3011 })
3012 .collect())
3013 })
3014 })
3015 } else if let Some(project_id) = self.remote_id() {
3016 let rpc = self.client.clone();
3017 let message = proto::GetCompletions {
3018 project_id,
3019 buffer_id,
3020 position: Some(language::proto::serialize_anchor(&anchor)),
3021 version: serialize_version(&source_buffer.version()),
3022 };
3023 cx.spawn_weak(|_, mut cx| async move {
3024 let response = rpc.request(message).await?;
3025
3026 source_buffer_handle
3027 .update(&mut cx, |buffer, _| {
3028 buffer.wait_for_version(deserialize_version(response.version))
3029 })
3030 .await;
3031
3032 response
3033 .completions
3034 .into_iter()
3035 .map(|completion| {
3036 language::proto::deserialize_completion(completion, language.as_ref())
3037 })
3038 .collect()
3039 })
3040 } else {
3041 Task::ready(Ok(Default::default()))
3042 }
3043 }
3044
3045 pub fn apply_additional_edits_for_completion(
3046 &self,
3047 buffer_handle: ModelHandle<Buffer>,
3048 completion: Completion,
3049 push_to_history: bool,
3050 cx: &mut ModelContext<Self>,
3051 ) -> Task<Result<Option<Transaction>>> {
3052 let buffer = buffer_handle.read(cx);
3053 let buffer_id = buffer.remote_id();
3054
3055 if self.is_local() {
3056 let (_, lang_server) = if let Some(server) = self.language_server_for_buffer(buffer, cx)
3057 {
3058 server.clone()
3059 } else {
3060 return Task::ready(Ok(Default::default()));
3061 };
3062
3063 cx.spawn(|this, mut cx| async move {
3064 let resolved_completion = lang_server
3065 .request::<lsp::request::ResolveCompletionItem>(completion.lsp_completion)
3066 .await?;
3067 if let Some(edits) = resolved_completion.additional_text_edits {
3068 let edits = this
3069 .update(&mut cx, |this, cx| {
3070 this.edits_from_lsp(&buffer_handle, edits, None, cx)
3071 })
3072 .await?;
3073 buffer_handle.update(&mut cx, |buffer, cx| {
3074 buffer.finalize_last_transaction();
3075 buffer.start_transaction();
3076 for (range, text) in edits {
3077 buffer.edit([(range, text)], cx);
3078 }
3079 let transaction = if buffer.end_transaction(cx).is_some() {
3080 let transaction = buffer.finalize_last_transaction().unwrap().clone();
3081 if !push_to_history {
3082 buffer.forget_transaction(transaction.id);
3083 }
3084 Some(transaction)
3085 } else {
3086 None
3087 };
3088 Ok(transaction)
3089 })
3090 } else {
3091 Ok(None)
3092 }
3093 })
3094 } else if let Some(project_id) = self.remote_id() {
3095 let client = self.client.clone();
3096 cx.spawn(|_, mut cx| async move {
3097 let response = client
3098 .request(proto::ApplyCompletionAdditionalEdits {
3099 project_id,
3100 buffer_id,
3101 completion: Some(language::proto::serialize_completion(&completion)),
3102 })
3103 .await?;
3104
3105 if let Some(transaction) = response.transaction {
3106 let transaction = language::proto::deserialize_transaction(transaction)?;
3107 buffer_handle
3108 .update(&mut cx, |buffer, _| {
3109 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
3110 })
3111 .await;
3112 if push_to_history {
3113 buffer_handle.update(&mut cx, |buffer, _| {
3114 buffer.push_transaction(transaction.clone(), Instant::now());
3115 });
3116 }
3117 Ok(Some(transaction))
3118 } else {
3119 Ok(None)
3120 }
3121 })
3122 } else {
3123 Task::ready(Err(anyhow!("project does not have a remote id")))
3124 }
3125 }
3126
3127 pub fn code_actions<T: Clone + ToOffset>(
3128 &self,
3129 buffer_handle: &ModelHandle<Buffer>,
3130 range: Range<T>,
3131 cx: &mut ModelContext<Self>,
3132 ) -> Task<Result<Vec<CodeAction>>> {
3133 let buffer_handle = buffer_handle.clone();
3134 let buffer = buffer_handle.read(cx);
3135 let snapshot = buffer.snapshot();
3136 let relevant_diagnostics = snapshot
3137 .diagnostics_in_range::<usize, usize>(range.to_offset(&snapshot), false)
3138 .map(|entry| entry.to_lsp_diagnostic_stub())
3139 .collect();
3140 let buffer_id = buffer.remote_id();
3141 let worktree;
3142 let buffer_abs_path;
3143 if let Some(file) = File::from_dyn(buffer.file()) {
3144 worktree = file.worktree.clone();
3145 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
3146 } else {
3147 return Task::ready(Ok(Default::default()));
3148 };
3149 let range = buffer.anchor_before(range.start)..buffer.anchor_before(range.end);
3150
3151 if worktree.read(cx).as_local().is_some() {
3152 let buffer_abs_path = buffer_abs_path.unwrap();
3153 let (_, lang_server) = if let Some(server) = self.language_server_for_buffer(buffer, cx)
3154 {
3155 server.clone()
3156 } else {
3157 return Task::ready(Ok(Default::default()));
3158 };
3159
3160 let lsp_range = range_to_lsp(range.to_point_utf16(buffer));
3161 cx.foreground().spawn(async move {
3162 if !lang_server.capabilities().code_action_provider.is_some() {
3163 return Ok(Default::default());
3164 }
3165
3166 Ok(lang_server
3167 .request::<lsp::request::CodeActionRequest>(lsp::CodeActionParams {
3168 text_document: lsp::TextDocumentIdentifier::new(
3169 lsp::Url::from_file_path(buffer_abs_path).unwrap(),
3170 ),
3171 range: lsp_range,
3172 work_done_progress_params: Default::default(),
3173 partial_result_params: Default::default(),
3174 context: lsp::CodeActionContext {
3175 diagnostics: relevant_diagnostics,
3176 only: Some(vec![
3177 lsp::CodeActionKind::QUICKFIX,
3178 lsp::CodeActionKind::REFACTOR,
3179 lsp::CodeActionKind::REFACTOR_EXTRACT,
3180 lsp::CodeActionKind::SOURCE,
3181 ]),
3182 },
3183 })
3184 .await?
3185 .unwrap_or_default()
3186 .into_iter()
3187 .filter_map(|entry| {
3188 if let lsp::CodeActionOrCommand::CodeAction(lsp_action) = entry {
3189 Some(CodeAction {
3190 range: range.clone(),
3191 lsp_action,
3192 })
3193 } else {
3194 None
3195 }
3196 })
3197 .collect())
3198 })
3199 } else if let Some(project_id) = self.remote_id() {
3200 let rpc = self.client.clone();
3201 let version = buffer.version();
3202 cx.spawn_weak(|_, mut cx| async move {
3203 let response = rpc
3204 .request(proto::GetCodeActions {
3205 project_id,
3206 buffer_id,
3207 start: Some(language::proto::serialize_anchor(&range.start)),
3208 end: Some(language::proto::serialize_anchor(&range.end)),
3209 version: serialize_version(&version),
3210 })
3211 .await?;
3212
3213 buffer_handle
3214 .update(&mut cx, |buffer, _| {
3215 buffer.wait_for_version(deserialize_version(response.version))
3216 })
3217 .await;
3218
3219 response
3220 .actions
3221 .into_iter()
3222 .map(language::proto::deserialize_code_action)
3223 .collect()
3224 })
3225 } else {
3226 Task::ready(Ok(Default::default()))
3227 }
3228 }
3229
3230 pub fn apply_code_action(
3231 &self,
3232 buffer_handle: ModelHandle<Buffer>,
3233 mut action: CodeAction,
3234 push_to_history: bool,
3235 cx: &mut ModelContext<Self>,
3236 ) -> Task<Result<ProjectTransaction>> {
3237 if self.is_local() {
3238 let buffer = buffer_handle.read(cx);
3239 let (lsp_adapter, lang_server) =
3240 if let Some(server) = self.language_server_for_buffer(buffer, cx) {
3241 server.clone()
3242 } else {
3243 return Task::ready(Ok(Default::default()));
3244 };
3245 let range = action.range.to_point_utf16(buffer);
3246
3247 cx.spawn(|this, mut cx| async move {
3248 if let Some(lsp_range) = action
3249 .lsp_action
3250 .data
3251 .as_mut()
3252 .and_then(|d| d.get_mut("codeActionParams"))
3253 .and_then(|d| d.get_mut("range"))
3254 {
3255 *lsp_range = serde_json::to_value(&range_to_lsp(range)).unwrap();
3256 action.lsp_action = lang_server
3257 .request::<lsp::request::CodeActionResolveRequest>(action.lsp_action)
3258 .await?;
3259 } else {
3260 let actions = this
3261 .update(&mut cx, |this, cx| {
3262 this.code_actions(&buffer_handle, action.range, cx)
3263 })
3264 .await?;
3265 action.lsp_action = actions
3266 .into_iter()
3267 .find(|a| a.lsp_action.title == action.lsp_action.title)
3268 .ok_or_else(|| anyhow!("code action is outdated"))?
3269 .lsp_action;
3270 }
3271
3272 if let Some(edit) = action.lsp_action.edit {
3273 Self::deserialize_workspace_edit(
3274 this,
3275 edit,
3276 push_to_history,
3277 lsp_adapter,
3278 lang_server,
3279 &mut cx,
3280 )
3281 .await
3282 } else if let Some(command) = action.lsp_action.command {
3283 this.update(&mut cx, |this, _| {
3284 this.last_workspace_edits_by_language_server
3285 .remove(&lang_server.server_id());
3286 });
3287 lang_server
3288 .request::<lsp::request::ExecuteCommand>(lsp::ExecuteCommandParams {
3289 command: command.command,
3290 arguments: command.arguments.unwrap_or_default(),
3291 ..Default::default()
3292 })
3293 .await?;
3294 Ok(this.update(&mut cx, |this, _| {
3295 this.last_workspace_edits_by_language_server
3296 .remove(&lang_server.server_id())
3297 .unwrap_or_default()
3298 }))
3299 } else {
3300 Ok(ProjectTransaction::default())
3301 }
3302 })
3303 } else if let Some(project_id) = self.remote_id() {
3304 let client = self.client.clone();
3305 let request = proto::ApplyCodeAction {
3306 project_id,
3307 buffer_id: buffer_handle.read(cx).remote_id(),
3308 action: Some(language::proto::serialize_code_action(&action)),
3309 };
3310 cx.spawn(|this, mut cx| async move {
3311 let response = client
3312 .request(request)
3313 .await?
3314 .transaction
3315 .ok_or_else(|| anyhow!("missing transaction"))?;
3316 this.update(&mut cx, |this, cx| {
3317 this.deserialize_project_transaction(response, push_to_history, cx)
3318 })
3319 .await
3320 })
3321 } else {
3322 Task::ready(Err(anyhow!("project does not have a remote id")))
3323 }
3324 }
3325
3326 async fn deserialize_workspace_edit(
3327 this: ModelHandle<Self>,
3328 edit: lsp::WorkspaceEdit,
3329 push_to_history: bool,
3330 lsp_adapter: Arc<dyn LspAdapter>,
3331 language_server: Arc<LanguageServer>,
3332 cx: &mut AsyncAppContext,
3333 ) -> Result<ProjectTransaction> {
3334 let fs = this.read_with(cx, |this, _| this.fs.clone());
3335 let mut operations = Vec::new();
3336 if let Some(document_changes) = edit.document_changes {
3337 match document_changes {
3338 lsp::DocumentChanges::Edits(edits) => {
3339 operations.extend(edits.into_iter().map(lsp::DocumentChangeOperation::Edit))
3340 }
3341 lsp::DocumentChanges::Operations(ops) => operations = ops,
3342 }
3343 } else if let Some(changes) = edit.changes {
3344 operations.extend(changes.into_iter().map(|(uri, edits)| {
3345 lsp::DocumentChangeOperation::Edit(lsp::TextDocumentEdit {
3346 text_document: lsp::OptionalVersionedTextDocumentIdentifier {
3347 uri,
3348 version: None,
3349 },
3350 edits: edits.into_iter().map(lsp::OneOf::Left).collect(),
3351 })
3352 }));
3353 }
3354
3355 let mut project_transaction = ProjectTransaction::default();
3356 for operation in operations {
3357 match operation {
3358 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Create(op)) => {
3359 let abs_path = op
3360 .uri
3361 .to_file_path()
3362 .map_err(|_| anyhow!("can't convert URI to path"))?;
3363
3364 if let Some(parent_path) = abs_path.parent() {
3365 fs.create_dir(parent_path).await?;
3366 }
3367 if abs_path.ends_with("/") {
3368 fs.create_dir(&abs_path).await?;
3369 } else {
3370 fs.create_file(&abs_path, op.options.map(Into::into).unwrap_or_default())
3371 .await?;
3372 }
3373 }
3374 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Rename(op)) => {
3375 let source_abs_path = op
3376 .old_uri
3377 .to_file_path()
3378 .map_err(|_| anyhow!("can't convert URI to path"))?;
3379 let target_abs_path = op
3380 .new_uri
3381 .to_file_path()
3382 .map_err(|_| anyhow!("can't convert URI to path"))?;
3383 fs.rename(
3384 &source_abs_path,
3385 &target_abs_path,
3386 op.options.map(Into::into).unwrap_or_default(),
3387 )
3388 .await?;
3389 }
3390 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Delete(op)) => {
3391 let abs_path = op
3392 .uri
3393 .to_file_path()
3394 .map_err(|_| anyhow!("can't convert URI to path"))?;
3395 let options = op.options.map(Into::into).unwrap_or_default();
3396 if abs_path.ends_with("/") {
3397 fs.remove_dir(&abs_path, options).await?;
3398 } else {
3399 fs.remove_file(&abs_path, options).await?;
3400 }
3401 }
3402 lsp::DocumentChangeOperation::Edit(op) => {
3403 let buffer_to_edit = this
3404 .update(cx, |this, cx| {
3405 this.open_local_buffer_via_lsp(
3406 op.text_document.uri,
3407 lsp_adapter.clone(),
3408 language_server.clone(),
3409 cx,
3410 )
3411 })
3412 .await?;
3413
3414 let edits = this
3415 .update(cx, |this, cx| {
3416 let edits = op.edits.into_iter().map(|edit| match edit {
3417 lsp::OneOf::Left(edit) => edit,
3418 lsp::OneOf::Right(edit) => edit.text_edit,
3419 });
3420 this.edits_from_lsp(
3421 &buffer_to_edit,
3422 edits,
3423 op.text_document.version,
3424 cx,
3425 )
3426 })
3427 .await?;
3428
3429 let transaction = buffer_to_edit.update(cx, |buffer, cx| {
3430 buffer.finalize_last_transaction();
3431 buffer.start_transaction();
3432 for (range, text) in edits {
3433 buffer.edit([(range, text)], cx);
3434 }
3435 let transaction = if buffer.end_transaction(cx).is_some() {
3436 let transaction = buffer.finalize_last_transaction().unwrap().clone();
3437 if !push_to_history {
3438 buffer.forget_transaction(transaction.id);
3439 }
3440 Some(transaction)
3441 } else {
3442 None
3443 };
3444
3445 transaction
3446 });
3447 if let Some(transaction) = transaction {
3448 project_transaction.0.insert(buffer_to_edit, transaction);
3449 }
3450 }
3451 }
3452 }
3453
3454 Ok(project_transaction)
3455 }
3456
3457 pub fn prepare_rename<T: ToPointUtf16>(
3458 &self,
3459 buffer: ModelHandle<Buffer>,
3460 position: T,
3461 cx: &mut ModelContext<Self>,
3462 ) -> Task<Result<Option<Range<Anchor>>>> {
3463 let position = position.to_point_utf16(buffer.read(cx));
3464 self.request_lsp(buffer, PrepareRename { position }, cx)
3465 }
3466
3467 pub fn perform_rename<T: ToPointUtf16>(
3468 &self,
3469 buffer: ModelHandle<Buffer>,
3470 position: T,
3471 new_name: String,
3472 push_to_history: bool,
3473 cx: &mut ModelContext<Self>,
3474 ) -> Task<Result<ProjectTransaction>> {
3475 let position = position.to_point_utf16(buffer.read(cx));
3476 self.request_lsp(
3477 buffer,
3478 PerformRename {
3479 position,
3480 new_name,
3481 push_to_history,
3482 },
3483 cx,
3484 )
3485 }
3486
3487 pub fn search(
3488 &self,
3489 query: SearchQuery,
3490 cx: &mut ModelContext<Self>,
3491 ) -> Task<Result<HashMap<ModelHandle<Buffer>, Vec<Range<Anchor>>>>> {
3492 if self.is_local() {
3493 let snapshots = self
3494 .visible_worktrees(cx)
3495 .filter_map(|tree| {
3496 let tree = tree.read(cx).as_local()?;
3497 Some(tree.snapshot())
3498 })
3499 .collect::<Vec<_>>();
3500
3501 let background = cx.background().clone();
3502 let path_count: usize = snapshots.iter().map(|s| s.visible_file_count()).sum();
3503 if path_count == 0 {
3504 return Task::ready(Ok(Default::default()));
3505 }
3506 let workers = background.num_cpus().min(path_count);
3507 let (matching_paths_tx, mut matching_paths_rx) = smol::channel::bounded(1024);
3508 cx.background()
3509 .spawn({
3510 let fs = self.fs.clone();
3511 let background = cx.background().clone();
3512 let query = query.clone();
3513 async move {
3514 let fs = &fs;
3515 let query = &query;
3516 let matching_paths_tx = &matching_paths_tx;
3517 let paths_per_worker = (path_count + workers - 1) / workers;
3518 let snapshots = &snapshots;
3519 background
3520 .scoped(|scope| {
3521 for worker_ix in 0..workers {
3522 let worker_start_ix = worker_ix * paths_per_worker;
3523 let worker_end_ix = worker_start_ix + paths_per_worker;
3524 scope.spawn(async move {
3525 let mut snapshot_start_ix = 0;
3526 let mut abs_path = PathBuf::new();
3527 for snapshot in snapshots {
3528 let snapshot_end_ix =
3529 snapshot_start_ix + snapshot.visible_file_count();
3530 if worker_end_ix <= snapshot_start_ix {
3531 break;
3532 } else if worker_start_ix > snapshot_end_ix {
3533 snapshot_start_ix = snapshot_end_ix;
3534 continue;
3535 } else {
3536 let start_in_snapshot = worker_start_ix
3537 .saturating_sub(snapshot_start_ix);
3538 let end_in_snapshot =
3539 cmp::min(worker_end_ix, snapshot_end_ix)
3540 - snapshot_start_ix;
3541
3542 for entry in snapshot
3543 .files(false, start_in_snapshot)
3544 .take(end_in_snapshot - start_in_snapshot)
3545 {
3546 if matching_paths_tx.is_closed() {
3547 break;
3548 }
3549
3550 abs_path.clear();
3551 abs_path.push(&snapshot.abs_path());
3552 abs_path.push(&entry.path);
3553 let matches = if let Some(file) =
3554 fs.open_sync(&abs_path).await.log_err()
3555 {
3556 query.detect(file).unwrap_or(false)
3557 } else {
3558 false
3559 };
3560
3561 if matches {
3562 let project_path =
3563 (snapshot.id(), entry.path.clone());
3564 if matching_paths_tx
3565 .send(project_path)
3566 .await
3567 .is_err()
3568 {
3569 break;
3570 }
3571 }
3572 }
3573
3574 snapshot_start_ix = snapshot_end_ix;
3575 }
3576 }
3577 });
3578 }
3579 })
3580 .await;
3581 }
3582 })
3583 .detach();
3584
3585 let (buffers_tx, buffers_rx) = smol::channel::bounded(1024);
3586 let open_buffers = self
3587 .opened_buffers
3588 .values()
3589 .filter_map(|b| b.upgrade(cx))
3590 .collect::<HashSet<_>>();
3591 cx.spawn(|this, cx| async move {
3592 for buffer in &open_buffers {
3593 let snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
3594 buffers_tx.send((buffer.clone(), snapshot)).await?;
3595 }
3596
3597 let open_buffers = Rc::new(RefCell::new(open_buffers));
3598 while let Some(project_path) = matching_paths_rx.next().await {
3599 if buffers_tx.is_closed() {
3600 break;
3601 }
3602
3603 let this = this.clone();
3604 let open_buffers = open_buffers.clone();
3605 let buffers_tx = buffers_tx.clone();
3606 cx.spawn(|mut cx| async move {
3607 if let Some(buffer) = this
3608 .update(&mut cx, |this, cx| this.open_buffer(project_path, cx))
3609 .await
3610 .log_err()
3611 {
3612 if open_buffers.borrow_mut().insert(buffer.clone()) {
3613 let snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
3614 buffers_tx.send((buffer, snapshot)).await?;
3615 }
3616 }
3617
3618 Ok::<_, anyhow::Error>(())
3619 })
3620 .detach();
3621 }
3622
3623 Ok::<_, anyhow::Error>(())
3624 })
3625 .detach_and_log_err(cx);
3626
3627 let background = cx.background().clone();
3628 cx.background().spawn(async move {
3629 let query = &query;
3630 let mut matched_buffers = Vec::new();
3631 for _ in 0..workers {
3632 matched_buffers.push(HashMap::default());
3633 }
3634 background
3635 .scoped(|scope| {
3636 for worker_matched_buffers in matched_buffers.iter_mut() {
3637 let mut buffers_rx = buffers_rx.clone();
3638 scope.spawn(async move {
3639 while let Some((buffer, snapshot)) = buffers_rx.next().await {
3640 let buffer_matches = query
3641 .search(snapshot.as_rope())
3642 .await
3643 .iter()
3644 .map(|range| {
3645 snapshot.anchor_before(range.start)
3646 ..snapshot.anchor_after(range.end)
3647 })
3648 .collect::<Vec<_>>();
3649 if !buffer_matches.is_empty() {
3650 worker_matched_buffers
3651 .insert(buffer.clone(), buffer_matches);
3652 }
3653 }
3654 });
3655 }
3656 })
3657 .await;
3658 Ok(matched_buffers.into_iter().flatten().collect())
3659 })
3660 } else if let Some(project_id) = self.remote_id() {
3661 let request = self.client.request(query.to_proto(project_id));
3662 cx.spawn(|this, mut cx| async move {
3663 let response = request.await?;
3664 let mut result = HashMap::default();
3665 for location in response.locations {
3666 let buffer = location.buffer.ok_or_else(|| anyhow!("missing buffer"))?;
3667 let target_buffer = this
3668 .update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
3669 .await?;
3670 let start = location
3671 .start
3672 .and_then(deserialize_anchor)
3673 .ok_or_else(|| anyhow!("missing target start"))?;
3674 let end = location
3675 .end
3676 .and_then(deserialize_anchor)
3677 .ok_or_else(|| anyhow!("missing target end"))?;
3678 result
3679 .entry(target_buffer)
3680 .or_insert(Vec::new())
3681 .push(start..end)
3682 }
3683 Ok(result)
3684 })
3685 } else {
3686 Task::ready(Ok(Default::default()))
3687 }
3688 }
3689
3690 fn request_lsp<R: LspCommand>(
3691 &self,
3692 buffer_handle: ModelHandle<Buffer>,
3693 request: R,
3694 cx: &mut ModelContext<Self>,
3695 ) -> Task<Result<R::Response>>
3696 where
3697 <R::LspRequest as lsp::request::Request>::Result: Send,
3698 {
3699 let buffer = buffer_handle.read(cx);
3700 if self.is_local() {
3701 let file = File::from_dyn(buffer.file()).and_then(File::as_local);
3702 if let Some((file, (_, language_server))) =
3703 file.zip(self.language_server_for_buffer(buffer, cx).cloned())
3704 {
3705 let lsp_params = request.to_lsp(&file.abs_path(cx), cx);
3706 return cx.spawn(|this, cx| async move {
3707 if !request.check_capabilities(&language_server.capabilities()) {
3708 return Ok(Default::default());
3709 }
3710
3711 let response = language_server
3712 .request::<R::LspRequest>(lsp_params)
3713 .await
3714 .context("lsp request failed")?;
3715 request
3716 .response_from_lsp(response, this, buffer_handle, cx)
3717 .await
3718 });
3719 }
3720 } else if let Some(project_id) = self.remote_id() {
3721 let rpc = self.client.clone();
3722 let message = request.to_proto(project_id, buffer);
3723 return cx.spawn(|this, cx| async move {
3724 let response = rpc.request(message).await?;
3725 request
3726 .response_from_proto(response, this, buffer_handle, cx)
3727 .await
3728 });
3729 }
3730 Task::ready(Ok(Default::default()))
3731 }
3732
3733 pub fn find_or_create_local_worktree(
3734 &mut self,
3735 abs_path: impl AsRef<Path>,
3736 visible: bool,
3737 cx: &mut ModelContext<Self>,
3738 ) -> Task<Result<(ModelHandle<Worktree>, PathBuf)>> {
3739 let abs_path = abs_path.as_ref();
3740 if let Some((tree, relative_path)) = self.find_local_worktree(abs_path, cx) {
3741 Task::ready(Ok((tree.clone(), relative_path.into())))
3742 } else {
3743 let worktree = self.create_local_worktree(abs_path, visible, cx);
3744 cx.foreground()
3745 .spawn(async move { Ok((worktree.await?, PathBuf::new())) })
3746 }
3747 }
3748
3749 pub fn find_local_worktree(
3750 &self,
3751 abs_path: &Path,
3752 cx: &AppContext,
3753 ) -> Option<(ModelHandle<Worktree>, PathBuf)> {
3754 for tree in self.worktrees(cx) {
3755 if let Some(relative_path) = tree
3756 .read(cx)
3757 .as_local()
3758 .and_then(|t| abs_path.strip_prefix(t.abs_path()).ok())
3759 {
3760 return Some((tree.clone(), relative_path.into()));
3761 }
3762 }
3763 None
3764 }
3765
3766 pub fn is_shared(&self) -> bool {
3767 match &self.client_state {
3768 ProjectClientState::Local { is_shared, .. } => *is_shared,
3769 ProjectClientState::Remote { .. } => false,
3770 }
3771 }
3772
3773 fn create_local_worktree(
3774 &mut self,
3775 abs_path: impl AsRef<Path>,
3776 visible: bool,
3777 cx: &mut ModelContext<Self>,
3778 ) -> Task<Result<ModelHandle<Worktree>>> {
3779 let fs = self.fs.clone();
3780 let client = self.client.clone();
3781 let next_entry_id = self.next_entry_id.clone();
3782 let path: Arc<Path> = abs_path.as_ref().into();
3783 let task = self
3784 .loading_local_worktrees
3785 .entry(path.clone())
3786 .or_insert_with(|| {
3787 cx.spawn(|project, mut cx| {
3788 async move {
3789 let worktree = Worktree::local(
3790 client.clone(),
3791 path.clone(),
3792 visible,
3793 fs,
3794 next_entry_id,
3795 &mut cx,
3796 )
3797 .await;
3798 project.update(&mut cx, |project, _| {
3799 project.loading_local_worktrees.remove(&path);
3800 });
3801 let worktree = worktree?;
3802
3803 let project_id = project.update(&mut cx, |project, cx| {
3804 project.add_worktree(&worktree, cx);
3805 project.shared_remote_id()
3806 });
3807
3808 if let Some(project_id) = project_id {
3809 worktree
3810 .update(&mut cx, |worktree, cx| {
3811 worktree.as_local_mut().unwrap().share(project_id, cx)
3812 })
3813 .await
3814 .log_err();
3815 }
3816
3817 Ok(worktree)
3818 }
3819 .map_err(|err| Arc::new(err))
3820 })
3821 .shared()
3822 })
3823 .clone();
3824 cx.foreground().spawn(async move {
3825 match task.await {
3826 Ok(worktree) => Ok(worktree),
3827 Err(err) => Err(anyhow!("{}", err)),
3828 }
3829 })
3830 }
3831
3832 pub fn remove_worktree(&mut self, id_to_remove: WorktreeId, cx: &mut ModelContext<Self>) {
3833 self.worktrees.retain(|worktree| {
3834 if let Some(worktree) = worktree.upgrade(cx) {
3835 let id = worktree.read(cx).id();
3836 if id == id_to_remove {
3837 cx.emit(Event::WorktreeRemoved(id));
3838 false
3839 } else {
3840 true
3841 }
3842 } else {
3843 false
3844 }
3845 });
3846 self.metadata_changed(true, cx);
3847 cx.notify();
3848 }
3849
3850 fn add_worktree(&mut self, worktree: &ModelHandle<Worktree>, cx: &mut ModelContext<Self>) {
3851 cx.observe(&worktree, |_, _, cx| cx.notify()).detach();
3852 if worktree.read(cx).is_local() {
3853 cx.subscribe(&worktree, |this, worktree, _, cx| {
3854 this.update_local_worktree_buffers(worktree, cx);
3855 })
3856 .detach();
3857 }
3858
3859 let push_strong_handle = {
3860 let worktree = worktree.read(cx);
3861 self.is_shared() || worktree.is_visible() || worktree.is_remote()
3862 };
3863 if push_strong_handle {
3864 self.worktrees
3865 .push(WorktreeHandle::Strong(worktree.clone()));
3866 } else {
3867 cx.observe_release(&worktree, |this, _, cx| {
3868 this.worktrees
3869 .retain(|worktree| worktree.upgrade(cx).is_some());
3870 cx.notify();
3871 })
3872 .detach();
3873 self.worktrees
3874 .push(WorktreeHandle::Weak(worktree.downgrade()));
3875 }
3876 self.metadata_changed(true, cx);
3877 cx.emit(Event::WorktreeAdded);
3878 cx.notify();
3879 }
3880
3881 fn update_local_worktree_buffers(
3882 &mut self,
3883 worktree_handle: ModelHandle<Worktree>,
3884 cx: &mut ModelContext<Self>,
3885 ) {
3886 let snapshot = worktree_handle.read(cx).snapshot();
3887 let mut buffers_to_delete = Vec::new();
3888 let mut renamed_buffers = Vec::new();
3889 for (buffer_id, buffer) in &self.opened_buffers {
3890 if let Some(buffer) = buffer.upgrade(cx) {
3891 buffer.update(cx, |buffer, cx| {
3892 if let Some(old_file) = File::from_dyn(buffer.file()) {
3893 if old_file.worktree != worktree_handle {
3894 return;
3895 }
3896
3897 let new_file = if let Some(entry) = old_file
3898 .entry_id
3899 .and_then(|entry_id| snapshot.entry_for_id(entry_id))
3900 {
3901 File {
3902 is_local: true,
3903 entry_id: Some(entry.id),
3904 mtime: entry.mtime,
3905 path: entry.path.clone(),
3906 worktree: worktree_handle.clone(),
3907 }
3908 } else if let Some(entry) =
3909 snapshot.entry_for_path(old_file.path().as_ref())
3910 {
3911 File {
3912 is_local: true,
3913 entry_id: Some(entry.id),
3914 mtime: entry.mtime,
3915 path: entry.path.clone(),
3916 worktree: worktree_handle.clone(),
3917 }
3918 } else {
3919 File {
3920 is_local: true,
3921 entry_id: None,
3922 path: old_file.path().clone(),
3923 mtime: old_file.mtime(),
3924 worktree: worktree_handle.clone(),
3925 }
3926 };
3927
3928 let old_path = old_file.abs_path(cx);
3929 if new_file.abs_path(cx) != old_path {
3930 renamed_buffers.push((cx.handle(), old_path));
3931 }
3932
3933 if let Some(project_id) = self.shared_remote_id() {
3934 self.client
3935 .send(proto::UpdateBufferFile {
3936 project_id,
3937 buffer_id: *buffer_id as u64,
3938 file: Some(new_file.to_proto()),
3939 })
3940 .log_err();
3941 }
3942 buffer.file_updated(Box::new(new_file), cx).detach();
3943 }
3944 });
3945 } else {
3946 buffers_to_delete.push(*buffer_id);
3947 }
3948 }
3949
3950 for buffer_id in buffers_to_delete {
3951 self.opened_buffers.remove(&buffer_id);
3952 }
3953
3954 for (buffer, old_path) in renamed_buffers {
3955 self.unregister_buffer_from_language_server(&buffer, old_path, cx);
3956 self.assign_language_to_buffer(&buffer, cx);
3957 self.register_buffer_with_language_server(&buffer, cx);
3958 }
3959 }
3960
3961 pub fn set_active_path(&mut self, entry: Option<ProjectPath>, cx: &mut ModelContext<Self>) {
3962 let new_active_entry = entry.and_then(|project_path| {
3963 let worktree = self.worktree_for_id(project_path.worktree_id, cx)?;
3964 let entry = worktree.read(cx).entry_for_path(project_path.path)?;
3965 Some(entry.id)
3966 });
3967 if new_active_entry != self.active_entry {
3968 self.active_entry = new_active_entry;
3969 cx.emit(Event::ActiveEntryChanged(new_active_entry));
3970 }
3971 }
3972
3973 pub fn is_running_disk_based_diagnostics(&self) -> bool {
3974 self.language_server_statuses
3975 .values()
3976 .any(|status| status.pending_diagnostic_updates > 0)
3977 }
3978
3979 pub fn diagnostic_summary(&self, cx: &AppContext) -> DiagnosticSummary {
3980 let mut summary = DiagnosticSummary::default();
3981 for (_, path_summary) in self.diagnostic_summaries(cx) {
3982 summary.error_count += path_summary.error_count;
3983 summary.warning_count += path_summary.warning_count;
3984 }
3985 summary
3986 }
3987
3988 pub fn diagnostic_summaries<'a>(
3989 &'a self,
3990 cx: &'a AppContext,
3991 ) -> impl Iterator<Item = (ProjectPath, DiagnosticSummary)> + 'a {
3992 self.worktrees(cx).flat_map(move |worktree| {
3993 let worktree = worktree.read(cx);
3994 let worktree_id = worktree.id();
3995 worktree
3996 .diagnostic_summaries()
3997 .map(move |(path, summary)| (ProjectPath { worktree_id, path }, summary))
3998 })
3999 }
4000
4001 pub fn disk_based_diagnostics_started(&mut self, cx: &mut ModelContext<Self>) {
4002 if self
4003 .language_server_statuses
4004 .values()
4005 .map(|status| status.pending_diagnostic_updates)
4006 .sum::<isize>()
4007 == 1
4008 {
4009 cx.emit(Event::DiskBasedDiagnosticsStarted);
4010 }
4011 }
4012
4013 pub fn disk_based_diagnostics_finished(&mut self, cx: &mut ModelContext<Self>) {
4014 cx.emit(Event::DiskBasedDiagnosticsUpdated);
4015 if self
4016 .language_server_statuses
4017 .values()
4018 .map(|status| status.pending_diagnostic_updates)
4019 .sum::<isize>()
4020 == 0
4021 {
4022 cx.emit(Event::DiskBasedDiagnosticsFinished);
4023 }
4024 }
4025
4026 pub fn active_entry(&self) -> Option<ProjectEntryId> {
4027 self.active_entry
4028 }
4029
4030 pub fn entry_for_path(&self, path: &ProjectPath, cx: &AppContext) -> Option<ProjectEntryId> {
4031 self.worktree_for_id(path.worktree_id, cx)?
4032 .read(cx)
4033 .entry_for_path(&path.path)
4034 .map(|entry| entry.id)
4035 }
4036
4037 pub fn path_for_entry(&self, entry_id: ProjectEntryId, cx: &AppContext) -> Option<ProjectPath> {
4038 let worktree = self.worktree_for_entry(entry_id, cx)?;
4039 let worktree = worktree.read(cx);
4040 let worktree_id = worktree.id();
4041 let path = worktree.entry_for_id(entry_id)?.path.clone();
4042 Some(ProjectPath { worktree_id, path })
4043 }
4044
4045 // RPC message handlers
4046
4047 async fn handle_request_join_project(
4048 this: ModelHandle<Self>,
4049 message: TypedEnvelope<proto::RequestJoinProject>,
4050 _: Arc<Client>,
4051 mut cx: AsyncAppContext,
4052 ) -> Result<()> {
4053 let user_id = message.payload.requester_id;
4054 if this.read_with(&cx, |project, _| {
4055 project.collaborators.values().any(|c| c.user.id == user_id)
4056 }) {
4057 this.update(&mut cx, |this, cx| {
4058 this.respond_to_join_request(user_id, true, cx)
4059 });
4060 } else {
4061 let user_store = this.read_with(&cx, |this, _| this.user_store.clone());
4062 let user = user_store
4063 .update(&mut cx, |store, cx| store.fetch_user(user_id, cx))
4064 .await?;
4065 this.update(&mut cx, |_, cx| cx.emit(Event::ContactRequestedJoin(user)));
4066 }
4067 Ok(())
4068 }
4069
4070 async fn handle_unregister_project(
4071 this: ModelHandle<Self>,
4072 _: TypedEnvelope<proto::UnregisterProject>,
4073 _: Arc<Client>,
4074 mut cx: AsyncAppContext,
4075 ) -> Result<()> {
4076 this.update(&mut cx, |this, cx| this.removed_from_project(cx));
4077 Ok(())
4078 }
4079
4080 async fn handle_project_unshared(
4081 this: ModelHandle<Self>,
4082 _: TypedEnvelope<proto::ProjectUnshared>,
4083 _: Arc<Client>,
4084 mut cx: AsyncAppContext,
4085 ) -> Result<()> {
4086 this.update(&mut cx, |this, cx| this.unshared(cx));
4087 Ok(())
4088 }
4089
4090 async fn handle_add_collaborator(
4091 this: ModelHandle<Self>,
4092 mut envelope: TypedEnvelope<proto::AddProjectCollaborator>,
4093 _: Arc<Client>,
4094 mut cx: AsyncAppContext,
4095 ) -> Result<()> {
4096 let user_store = this.read_with(&cx, |this, _| this.user_store.clone());
4097 let collaborator = envelope
4098 .payload
4099 .collaborator
4100 .take()
4101 .ok_or_else(|| anyhow!("empty collaborator"))?;
4102
4103 let collaborator = Collaborator::from_proto(collaborator, &user_store, &mut cx).await?;
4104 this.update(&mut cx, |this, cx| {
4105 this.collaborators
4106 .insert(collaborator.peer_id, collaborator);
4107 cx.notify();
4108 });
4109
4110 Ok(())
4111 }
4112
4113 async fn handle_remove_collaborator(
4114 this: ModelHandle<Self>,
4115 envelope: TypedEnvelope<proto::RemoveProjectCollaborator>,
4116 _: Arc<Client>,
4117 mut cx: AsyncAppContext,
4118 ) -> Result<()> {
4119 this.update(&mut cx, |this, cx| {
4120 let peer_id = PeerId(envelope.payload.peer_id);
4121 let replica_id = this
4122 .collaborators
4123 .remove(&peer_id)
4124 .ok_or_else(|| anyhow!("unknown peer {:?}", peer_id))?
4125 .replica_id;
4126 for (_, buffer) in &this.opened_buffers {
4127 if let Some(buffer) = buffer.upgrade(cx) {
4128 buffer.update(cx, |buffer, cx| buffer.remove_peer(replica_id, cx));
4129 }
4130 }
4131
4132 cx.emit(Event::CollaboratorLeft(peer_id));
4133 cx.notify();
4134 Ok(())
4135 })
4136 }
4137
4138 async fn handle_join_project_request_cancelled(
4139 this: ModelHandle<Self>,
4140 envelope: TypedEnvelope<proto::JoinProjectRequestCancelled>,
4141 _: Arc<Client>,
4142 mut cx: AsyncAppContext,
4143 ) -> Result<()> {
4144 let user = this
4145 .update(&mut cx, |this, cx| {
4146 this.user_store.update(cx, |user_store, cx| {
4147 user_store.fetch_user(envelope.payload.requester_id, cx)
4148 })
4149 })
4150 .await?;
4151
4152 this.update(&mut cx, |_, cx| {
4153 cx.emit(Event::ContactCancelledJoinRequest(user));
4154 });
4155
4156 Ok(())
4157 }
4158
4159 async fn handle_update_project(
4160 this: ModelHandle<Self>,
4161 envelope: TypedEnvelope<proto::UpdateProject>,
4162 client: Arc<Client>,
4163 mut cx: AsyncAppContext,
4164 ) -> Result<()> {
4165 this.update(&mut cx, |this, cx| {
4166 let replica_id = this.replica_id();
4167 let remote_id = this.remote_id().ok_or_else(|| anyhow!("invalid project"))?;
4168
4169 let mut old_worktrees_by_id = this
4170 .worktrees
4171 .drain(..)
4172 .filter_map(|worktree| {
4173 let worktree = worktree.upgrade(cx)?;
4174 Some((worktree.read(cx).id(), worktree))
4175 })
4176 .collect::<HashMap<_, _>>();
4177
4178 for worktree in envelope.payload.worktrees {
4179 if let Some(old_worktree) =
4180 old_worktrees_by_id.remove(&WorktreeId::from_proto(worktree.id))
4181 {
4182 this.worktrees.push(WorktreeHandle::Strong(old_worktree));
4183 } else {
4184 let worktree = proto::Worktree {
4185 id: worktree.id,
4186 root_name: worktree.root_name,
4187 entries: Default::default(),
4188 diagnostic_summaries: Default::default(),
4189 visible: worktree.visible,
4190 scan_id: 0,
4191 };
4192 let (worktree, load_task) =
4193 Worktree::remote(remote_id, replica_id, worktree, client.clone(), cx);
4194 this.add_worktree(&worktree, cx);
4195 load_task.detach();
4196 }
4197 }
4198
4199 this.metadata_changed(true, cx);
4200 for (id, _) in old_worktrees_by_id {
4201 cx.emit(Event::WorktreeRemoved(id));
4202 }
4203
4204 Ok(())
4205 })
4206 }
4207
4208 async fn handle_update_worktree(
4209 this: ModelHandle<Self>,
4210 envelope: TypedEnvelope<proto::UpdateWorktree>,
4211 _: Arc<Client>,
4212 mut cx: AsyncAppContext,
4213 ) -> Result<()> {
4214 this.update(&mut cx, |this, cx| {
4215 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
4216 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
4217 worktree.update(cx, |worktree, _| {
4218 let worktree = worktree.as_remote_mut().unwrap();
4219 worktree.update_from_remote(envelope)
4220 })?;
4221 }
4222 Ok(())
4223 })
4224 }
4225
4226 async fn handle_create_project_entry(
4227 this: ModelHandle<Self>,
4228 envelope: TypedEnvelope<proto::CreateProjectEntry>,
4229 _: Arc<Client>,
4230 mut cx: AsyncAppContext,
4231 ) -> Result<proto::ProjectEntryResponse> {
4232 let worktree = this.update(&mut cx, |this, cx| {
4233 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
4234 this.worktree_for_id(worktree_id, cx)
4235 .ok_or_else(|| anyhow!("worktree not found"))
4236 })?;
4237 let worktree_scan_id = worktree.read_with(&cx, |worktree, _| worktree.scan_id());
4238 let entry = worktree
4239 .update(&mut cx, |worktree, cx| {
4240 let worktree = worktree.as_local_mut().unwrap();
4241 let path = PathBuf::from(OsString::from_vec(envelope.payload.path));
4242 worktree.create_entry(path, envelope.payload.is_directory, cx)
4243 })
4244 .await?;
4245 Ok(proto::ProjectEntryResponse {
4246 entry: Some((&entry).into()),
4247 worktree_scan_id: worktree_scan_id as u64,
4248 })
4249 }
4250
4251 async fn handle_rename_project_entry(
4252 this: ModelHandle<Self>,
4253 envelope: TypedEnvelope<proto::RenameProjectEntry>,
4254 _: Arc<Client>,
4255 mut cx: AsyncAppContext,
4256 ) -> Result<proto::ProjectEntryResponse> {
4257 let entry_id = ProjectEntryId::from_proto(envelope.payload.entry_id);
4258 let worktree = this.read_with(&cx, |this, cx| {
4259 this.worktree_for_entry(entry_id, cx)
4260 .ok_or_else(|| anyhow!("worktree not found"))
4261 })?;
4262 let worktree_scan_id = worktree.read_with(&cx, |worktree, _| worktree.scan_id());
4263 let entry = worktree
4264 .update(&mut cx, |worktree, cx| {
4265 let new_path = PathBuf::from(OsString::from_vec(envelope.payload.new_path));
4266 worktree
4267 .as_local_mut()
4268 .unwrap()
4269 .rename_entry(entry_id, new_path, cx)
4270 .ok_or_else(|| anyhow!("invalid entry"))
4271 })?
4272 .await?;
4273 Ok(proto::ProjectEntryResponse {
4274 entry: Some((&entry).into()),
4275 worktree_scan_id: worktree_scan_id as u64,
4276 })
4277 }
4278
4279 async fn handle_copy_project_entry(
4280 this: ModelHandle<Self>,
4281 envelope: TypedEnvelope<proto::CopyProjectEntry>,
4282 _: Arc<Client>,
4283 mut cx: AsyncAppContext,
4284 ) -> Result<proto::ProjectEntryResponse> {
4285 let entry_id = ProjectEntryId::from_proto(envelope.payload.entry_id);
4286 let worktree = this.read_with(&cx, |this, cx| {
4287 this.worktree_for_entry(entry_id, cx)
4288 .ok_or_else(|| anyhow!("worktree not found"))
4289 })?;
4290 let worktree_scan_id = worktree.read_with(&cx, |worktree, _| worktree.scan_id());
4291 let entry = worktree
4292 .update(&mut cx, |worktree, cx| {
4293 let new_path = PathBuf::from(OsString::from_vec(envelope.payload.new_path));
4294 worktree
4295 .as_local_mut()
4296 .unwrap()
4297 .copy_entry(entry_id, new_path, cx)
4298 .ok_or_else(|| anyhow!("invalid entry"))
4299 })?
4300 .await?;
4301 Ok(proto::ProjectEntryResponse {
4302 entry: Some((&entry).into()),
4303 worktree_scan_id: worktree_scan_id as u64,
4304 })
4305 }
4306
4307 async fn handle_delete_project_entry(
4308 this: ModelHandle<Self>,
4309 envelope: TypedEnvelope<proto::DeleteProjectEntry>,
4310 _: Arc<Client>,
4311 mut cx: AsyncAppContext,
4312 ) -> Result<proto::ProjectEntryResponse> {
4313 let entry_id = ProjectEntryId::from_proto(envelope.payload.entry_id);
4314 let worktree = this.read_with(&cx, |this, cx| {
4315 this.worktree_for_entry(entry_id, cx)
4316 .ok_or_else(|| anyhow!("worktree not found"))
4317 })?;
4318 let worktree_scan_id = worktree.read_with(&cx, |worktree, _| worktree.scan_id());
4319 worktree
4320 .update(&mut cx, |worktree, cx| {
4321 worktree
4322 .as_local_mut()
4323 .unwrap()
4324 .delete_entry(entry_id, cx)
4325 .ok_or_else(|| anyhow!("invalid entry"))
4326 })?
4327 .await?;
4328 Ok(proto::ProjectEntryResponse {
4329 entry: None,
4330 worktree_scan_id: worktree_scan_id as u64,
4331 })
4332 }
4333
4334 async fn handle_update_diagnostic_summary(
4335 this: ModelHandle<Self>,
4336 envelope: TypedEnvelope<proto::UpdateDiagnosticSummary>,
4337 _: Arc<Client>,
4338 mut cx: AsyncAppContext,
4339 ) -> Result<()> {
4340 this.update(&mut cx, |this, cx| {
4341 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
4342 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
4343 if let Some(summary) = envelope.payload.summary {
4344 let project_path = ProjectPath {
4345 worktree_id,
4346 path: Path::new(&summary.path).into(),
4347 };
4348 worktree.update(cx, |worktree, _| {
4349 worktree
4350 .as_remote_mut()
4351 .unwrap()
4352 .update_diagnostic_summary(project_path.path.clone(), &summary);
4353 });
4354 cx.emit(Event::DiagnosticsUpdated(project_path));
4355 }
4356 }
4357 Ok(())
4358 })
4359 }
4360
4361 async fn handle_start_language_server(
4362 this: ModelHandle<Self>,
4363 envelope: TypedEnvelope<proto::StartLanguageServer>,
4364 _: Arc<Client>,
4365 mut cx: AsyncAppContext,
4366 ) -> Result<()> {
4367 let server = envelope
4368 .payload
4369 .server
4370 .ok_or_else(|| anyhow!("invalid server"))?;
4371 this.update(&mut cx, |this, cx| {
4372 this.language_server_statuses.insert(
4373 server.id as usize,
4374 LanguageServerStatus {
4375 name: server.name,
4376 pending_work: Default::default(),
4377 pending_diagnostic_updates: 0,
4378 },
4379 );
4380 cx.notify();
4381 });
4382 Ok(())
4383 }
4384
4385 async fn handle_update_language_server(
4386 this: ModelHandle<Self>,
4387 envelope: TypedEnvelope<proto::UpdateLanguageServer>,
4388 _: Arc<Client>,
4389 mut cx: AsyncAppContext,
4390 ) -> Result<()> {
4391 let language_server_id = envelope.payload.language_server_id as usize;
4392 match envelope
4393 .payload
4394 .variant
4395 .ok_or_else(|| anyhow!("invalid variant"))?
4396 {
4397 proto::update_language_server::Variant::WorkStart(payload) => {
4398 this.update(&mut cx, |this, cx| {
4399 this.on_lsp_work_start(language_server_id, payload.token, cx);
4400 })
4401 }
4402 proto::update_language_server::Variant::WorkProgress(payload) => {
4403 this.update(&mut cx, |this, cx| {
4404 this.on_lsp_work_progress(
4405 language_server_id,
4406 payload.token,
4407 LanguageServerProgress {
4408 message: payload.message,
4409 percentage: payload.percentage.map(|p| p as usize),
4410 last_update_at: Instant::now(),
4411 },
4412 cx,
4413 );
4414 })
4415 }
4416 proto::update_language_server::Variant::WorkEnd(payload) => {
4417 this.update(&mut cx, |this, cx| {
4418 this.on_lsp_work_end(language_server_id, payload.token, cx);
4419 })
4420 }
4421 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(_) => {
4422 this.update(&mut cx, |this, cx| {
4423 this.disk_based_diagnostics_started(cx);
4424 })
4425 }
4426 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(_) => {
4427 this.update(&mut cx, |this, cx| this.disk_based_diagnostics_finished(cx));
4428 }
4429 }
4430
4431 Ok(())
4432 }
4433
4434 async fn handle_update_buffer(
4435 this: ModelHandle<Self>,
4436 envelope: TypedEnvelope<proto::UpdateBuffer>,
4437 _: Arc<Client>,
4438 mut cx: AsyncAppContext,
4439 ) -> Result<()> {
4440 this.update(&mut cx, |this, cx| {
4441 let payload = envelope.payload.clone();
4442 let buffer_id = payload.buffer_id;
4443 let ops = payload
4444 .operations
4445 .into_iter()
4446 .map(|op| language::proto::deserialize_operation(op))
4447 .collect::<Result<Vec<_>, _>>()?;
4448 let is_remote = this.is_remote();
4449 match this.opened_buffers.entry(buffer_id) {
4450 hash_map::Entry::Occupied(mut e) => match e.get_mut() {
4451 OpenBuffer::Strong(buffer) => {
4452 buffer.update(cx, |buffer, cx| buffer.apply_ops(ops, cx))?;
4453 }
4454 OpenBuffer::Loading(operations) => operations.extend_from_slice(&ops),
4455 OpenBuffer::Weak(_) => {}
4456 },
4457 hash_map::Entry::Vacant(e) => {
4458 assert!(
4459 is_remote,
4460 "received buffer update from {:?}",
4461 envelope.original_sender_id
4462 );
4463 e.insert(OpenBuffer::Loading(ops));
4464 }
4465 }
4466 Ok(())
4467 })
4468 }
4469
4470 async fn handle_update_buffer_file(
4471 this: ModelHandle<Self>,
4472 envelope: TypedEnvelope<proto::UpdateBufferFile>,
4473 _: Arc<Client>,
4474 mut cx: AsyncAppContext,
4475 ) -> Result<()> {
4476 this.update(&mut cx, |this, cx| {
4477 let payload = envelope.payload.clone();
4478 let buffer_id = payload.buffer_id;
4479 let file = payload.file.ok_or_else(|| anyhow!("invalid file"))?;
4480 let worktree = this
4481 .worktree_for_id(WorktreeId::from_proto(file.worktree_id), cx)
4482 .ok_or_else(|| anyhow!("no such worktree"))?;
4483 let file = File::from_proto(file, worktree.clone(), cx)?;
4484 let buffer = this
4485 .opened_buffers
4486 .get_mut(&buffer_id)
4487 .and_then(|b| b.upgrade(cx))
4488 .ok_or_else(|| anyhow!("no such buffer"))?;
4489 buffer.update(cx, |buffer, cx| {
4490 buffer.file_updated(Box::new(file), cx).detach();
4491 });
4492 Ok(())
4493 })
4494 }
4495
4496 async fn handle_save_buffer(
4497 this: ModelHandle<Self>,
4498 envelope: TypedEnvelope<proto::SaveBuffer>,
4499 _: Arc<Client>,
4500 mut cx: AsyncAppContext,
4501 ) -> Result<proto::BufferSaved> {
4502 let buffer_id = envelope.payload.buffer_id;
4503 let requested_version = deserialize_version(envelope.payload.version);
4504
4505 let (project_id, buffer) = this.update(&mut cx, |this, cx| {
4506 let project_id = this.remote_id().ok_or_else(|| anyhow!("not connected"))?;
4507 let buffer = this
4508 .opened_buffers
4509 .get(&buffer_id)
4510 .and_then(|buffer| buffer.upgrade(cx))
4511 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?;
4512 Ok::<_, anyhow::Error>((project_id, buffer))
4513 })?;
4514 buffer
4515 .update(&mut cx, |buffer, _| {
4516 buffer.wait_for_version(requested_version)
4517 })
4518 .await;
4519
4520 let (saved_version, mtime) = buffer.update(&mut cx, |buffer, cx| buffer.save(cx)).await?;
4521 Ok(proto::BufferSaved {
4522 project_id,
4523 buffer_id,
4524 version: serialize_version(&saved_version),
4525 mtime: Some(mtime.into()),
4526 })
4527 }
4528
4529 async fn handle_reload_buffers(
4530 this: ModelHandle<Self>,
4531 envelope: TypedEnvelope<proto::ReloadBuffers>,
4532 _: Arc<Client>,
4533 mut cx: AsyncAppContext,
4534 ) -> Result<proto::ReloadBuffersResponse> {
4535 let sender_id = envelope.original_sender_id()?;
4536 let reload = this.update(&mut cx, |this, cx| {
4537 let mut buffers = HashSet::default();
4538 for buffer_id in &envelope.payload.buffer_ids {
4539 buffers.insert(
4540 this.opened_buffers
4541 .get(buffer_id)
4542 .and_then(|buffer| buffer.upgrade(cx))
4543 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?,
4544 );
4545 }
4546 Ok::<_, anyhow::Error>(this.reload_buffers(buffers, false, cx))
4547 })?;
4548
4549 let project_transaction = reload.await?;
4550 let project_transaction = this.update(&mut cx, |this, cx| {
4551 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
4552 });
4553 Ok(proto::ReloadBuffersResponse {
4554 transaction: Some(project_transaction),
4555 })
4556 }
4557
4558 async fn handle_format_buffers(
4559 this: ModelHandle<Self>,
4560 envelope: TypedEnvelope<proto::FormatBuffers>,
4561 _: Arc<Client>,
4562 mut cx: AsyncAppContext,
4563 ) -> Result<proto::FormatBuffersResponse> {
4564 let sender_id = envelope.original_sender_id()?;
4565 let format = this.update(&mut cx, |this, cx| {
4566 let mut buffers = HashSet::default();
4567 for buffer_id in &envelope.payload.buffer_ids {
4568 buffers.insert(
4569 this.opened_buffers
4570 .get(buffer_id)
4571 .and_then(|buffer| buffer.upgrade(cx))
4572 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?,
4573 );
4574 }
4575 Ok::<_, anyhow::Error>(this.format(buffers, false, cx))
4576 })?;
4577
4578 let project_transaction = format.await?;
4579 let project_transaction = this.update(&mut cx, |this, cx| {
4580 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
4581 });
4582 Ok(proto::FormatBuffersResponse {
4583 transaction: Some(project_transaction),
4584 })
4585 }
4586
4587 async fn handle_get_completions(
4588 this: ModelHandle<Self>,
4589 envelope: TypedEnvelope<proto::GetCompletions>,
4590 _: Arc<Client>,
4591 mut cx: AsyncAppContext,
4592 ) -> Result<proto::GetCompletionsResponse> {
4593 let position = envelope
4594 .payload
4595 .position
4596 .and_then(language::proto::deserialize_anchor)
4597 .ok_or_else(|| anyhow!("invalid position"))?;
4598 let version = deserialize_version(envelope.payload.version);
4599 let buffer = this.read_with(&cx, |this, cx| {
4600 this.opened_buffers
4601 .get(&envelope.payload.buffer_id)
4602 .and_then(|buffer| buffer.upgrade(cx))
4603 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
4604 })?;
4605 buffer
4606 .update(&mut cx, |buffer, _| buffer.wait_for_version(version))
4607 .await;
4608 let version = buffer.read_with(&cx, |buffer, _| buffer.version());
4609 let completions = this
4610 .update(&mut cx, |this, cx| this.completions(&buffer, position, cx))
4611 .await?;
4612
4613 Ok(proto::GetCompletionsResponse {
4614 completions: completions
4615 .iter()
4616 .map(language::proto::serialize_completion)
4617 .collect(),
4618 version: serialize_version(&version),
4619 })
4620 }
4621
4622 async fn handle_apply_additional_edits_for_completion(
4623 this: ModelHandle<Self>,
4624 envelope: TypedEnvelope<proto::ApplyCompletionAdditionalEdits>,
4625 _: Arc<Client>,
4626 mut cx: AsyncAppContext,
4627 ) -> Result<proto::ApplyCompletionAdditionalEditsResponse> {
4628 let apply_additional_edits = this.update(&mut cx, |this, cx| {
4629 let buffer = this
4630 .opened_buffers
4631 .get(&envelope.payload.buffer_id)
4632 .and_then(|buffer| buffer.upgrade(cx))
4633 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
4634 let language = buffer.read(cx).language();
4635 let completion = language::proto::deserialize_completion(
4636 envelope
4637 .payload
4638 .completion
4639 .ok_or_else(|| anyhow!("invalid completion"))?,
4640 language,
4641 )?;
4642 Ok::<_, anyhow::Error>(
4643 this.apply_additional_edits_for_completion(buffer, completion, false, cx),
4644 )
4645 })?;
4646
4647 Ok(proto::ApplyCompletionAdditionalEditsResponse {
4648 transaction: apply_additional_edits
4649 .await?
4650 .as_ref()
4651 .map(language::proto::serialize_transaction),
4652 })
4653 }
4654
4655 async fn handle_get_code_actions(
4656 this: ModelHandle<Self>,
4657 envelope: TypedEnvelope<proto::GetCodeActions>,
4658 _: Arc<Client>,
4659 mut cx: AsyncAppContext,
4660 ) -> Result<proto::GetCodeActionsResponse> {
4661 let start = envelope
4662 .payload
4663 .start
4664 .and_then(language::proto::deserialize_anchor)
4665 .ok_or_else(|| anyhow!("invalid start"))?;
4666 let end = envelope
4667 .payload
4668 .end
4669 .and_then(language::proto::deserialize_anchor)
4670 .ok_or_else(|| anyhow!("invalid end"))?;
4671 let buffer = this.update(&mut cx, |this, cx| {
4672 this.opened_buffers
4673 .get(&envelope.payload.buffer_id)
4674 .and_then(|buffer| buffer.upgrade(cx))
4675 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
4676 })?;
4677 buffer
4678 .update(&mut cx, |buffer, _| {
4679 buffer.wait_for_version(deserialize_version(envelope.payload.version))
4680 })
4681 .await;
4682
4683 let version = buffer.read_with(&cx, |buffer, _| buffer.version());
4684 let code_actions = this.update(&mut cx, |this, cx| {
4685 Ok::<_, anyhow::Error>(this.code_actions(&buffer, start..end, cx))
4686 })?;
4687
4688 Ok(proto::GetCodeActionsResponse {
4689 actions: code_actions
4690 .await?
4691 .iter()
4692 .map(language::proto::serialize_code_action)
4693 .collect(),
4694 version: serialize_version(&version),
4695 })
4696 }
4697
4698 async fn handle_apply_code_action(
4699 this: ModelHandle<Self>,
4700 envelope: TypedEnvelope<proto::ApplyCodeAction>,
4701 _: Arc<Client>,
4702 mut cx: AsyncAppContext,
4703 ) -> Result<proto::ApplyCodeActionResponse> {
4704 let sender_id = envelope.original_sender_id()?;
4705 let action = language::proto::deserialize_code_action(
4706 envelope
4707 .payload
4708 .action
4709 .ok_or_else(|| anyhow!("invalid action"))?,
4710 )?;
4711 let apply_code_action = this.update(&mut cx, |this, cx| {
4712 let buffer = this
4713 .opened_buffers
4714 .get(&envelope.payload.buffer_id)
4715 .and_then(|buffer| buffer.upgrade(cx))
4716 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
4717 Ok::<_, anyhow::Error>(this.apply_code_action(buffer, action, false, cx))
4718 })?;
4719
4720 let project_transaction = apply_code_action.await?;
4721 let project_transaction = this.update(&mut cx, |this, cx| {
4722 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
4723 });
4724 Ok(proto::ApplyCodeActionResponse {
4725 transaction: Some(project_transaction),
4726 })
4727 }
4728
4729 async fn handle_lsp_command<T: LspCommand>(
4730 this: ModelHandle<Self>,
4731 envelope: TypedEnvelope<T::ProtoRequest>,
4732 _: Arc<Client>,
4733 mut cx: AsyncAppContext,
4734 ) -> Result<<T::ProtoRequest as proto::RequestMessage>::Response>
4735 where
4736 <T::LspRequest as lsp::request::Request>::Result: Send,
4737 {
4738 let sender_id = envelope.original_sender_id()?;
4739 let buffer_id = T::buffer_id_from_proto(&envelope.payload);
4740 let buffer_handle = this.read_with(&cx, |this, _| {
4741 this.opened_buffers
4742 .get(&buffer_id)
4743 .and_then(|buffer| buffer.upgrade(&cx))
4744 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))
4745 })?;
4746 let request = T::from_proto(
4747 envelope.payload,
4748 this.clone(),
4749 buffer_handle.clone(),
4750 cx.clone(),
4751 )
4752 .await?;
4753 let buffer_version = buffer_handle.read_with(&cx, |buffer, _| buffer.version());
4754 let response = this
4755 .update(&mut cx, |this, cx| {
4756 this.request_lsp(buffer_handle, request, cx)
4757 })
4758 .await?;
4759 this.update(&mut cx, |this, cx| {
4760 Ok(T::response_to_proto(
4761 response,
4762 this,
4763 sender_id,
4764 &buffer_version,
4765 cx,
4766 ))
4767 })
4768 }
4769
4770 async fn handle_get_project_symbols(
4771 this: ModelHandle<Self>,
4772 envelope: TypedEnvelope<proto::GetProjectSymbols>,
4773 _: Arc<Client>,
4774 mut cx: AsyncAppContext,
4775 ) -> Result<proto::GetProjectSymbolsResponse> {
4776 let symbols = this
4777 .update(&mut cx, |this, cx| {
4778 this.symbols(&envelope.payload.query, cx)
4779 })
4780 .await?;
4781
4782 Ok(proto::GetProjectSymbolsResponse {
4783 symbols: symbols.iter().map(serialize_symbol).collect(),
4784 })
4785 }
4786
4787 async fn handle_search_project(
4788 this: ModelHandle<Self>,
4789 envelope: TypedEnvelope<proto::SearchProject>,
4790 _: Arc<Client>,
4791 mut cx: AsyncAppContext,
4792 ) -> Result<proto::SearchProjectResponse> {
4793 let peer_id = envelope.original_sender_id()?;
4794 let query = SearchQuery::from_proto(envelope.payload)?;
4795 let result = this
4796 .update(&mut cx, |this, cx| this.search(query, cx))
4797 .await?;
4798
4799 this.update(&mut cx, |this, cx| {
4800 let mut locations = Vec::new();
4801 for (buffer, ranges) in result {
4802 for range in ranges {
4803 let start = serialize_anchor(&range.start);
4804 let end = serialize_anchor(&range.end);
4805 let buffer = this.serialize_buffer_for_peer(&buffer, peer_id, cx);
4806 locations.push(proto::Location {
4807 buffer: Some(buffer),
4808 start: Some(start),
4809 end: Some(end),
4810 });
4811 }
4812 }
4813 Ok(proto::SearchProjectResponse { locations })
4814 })
4815 }
4816
4817 async fn handle_open_buffer_for_symbol(
4818 this: ModelHandle<Self>,
4819 envelope: TypedEnvelope<proto::OpenBufferForSymbol>,
4820 _: Arc<Client>,
4821 mut cx: AsyncAppContext,
4822 ) -> Result<proto::OpenBufferForSymbolResponse> {
4823 let peer_id = envelope.original_sender_id()?;
4824 let symbol = envelope
4825 .payload
4826 .symbol
4827 .ok_or_else(|| anyhow!("invalid symbol"))?;
4828 let symbol = this.read_with(&cx, |this, _| {
4829 let symbol = this.deserialize_symbol(symbol)?;
4830 let signature = this.symbol_signature(symbol.worktree_id, &symbol.path);
4831 if signature == symbol.signature {
4832 Ok(symbol)
4833 } else {
4834 Err(anyhow!("invalid symbol signature"))
4835 }
4836 })?;
4837 let buffer = this
4838 .update(&mut cx, |this, cx| this.open_buffer_for_symbol(&symbol, cx))
4839 .await?;
4840
4841 Ok(proto::OpenBufferForSymbolResponse {
4842 buffer: Some(this.update(&mut cx, |this, cx| {
4843 this.serialize_buffer_for_peer(&buffer, peer_id, cx)
4844 })),
4845 })
4846 }
4847
4848 fn symbol_signature(&self, worktree_id: WorktreeId, path: &Path) -> [u8; 32] {
4849 let mut hasher = Sha256::new();
4850 hasher.update(worktree_id.to_proto().to_be_bytes());
4851 hasher.update(path.to_string_lossy().as_bytes());
4852 hasher.update(self.nonce.to_be_bytes());
4853 hasher.finalize().as_slice().try_into().unwrap()
4854 }
4855
4856 async fn handle_open_buffer_by_id(
4857 this: ModelHandle<Self>,
4858 envelope: TypedEnvelope<proto::OpenBufferById>,
4859 _: Arc<Client>,
4860 mut cx: AsyncAppContext,
4861 ) -> Result<proto::OpenBufferResponse> {
4862 let peer_id = envelope.original_sender_id()?;
4863 let buffer = this
4864 .update(&mut cx, |this, cx| {
4865 this.open_buffer_by_id(envelope.payload.id, cx)
4866 })
4867 .await?;
4868 this.update(&mut cx, |this, cx| {
4869 Ok(proto::OpenBufferResponse {
4870 buffer: Some(this.serialize_buffer_for_peer(&buffer, peer_id, cx)),
4871 })
4872 })
4873 }
4874
4875 async fn handle_open_buffer_by_path(
4876 this: ModelHandle<Self>,
4877 envelope: TypedEnvelope<proto::OpenBufferByPath>,
4878 _: Arc<Client>,
4879 mut cx: AsyncAppContext,
4880 ) -> Result<proto::OpenBufferResponse> {
4881 let peer_id = envelope.original_sender_id()?;
4882 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
4883 let open_buffer = this.update(&mut cx, |this, cx| {
4884 this.open_buffer(
4885 ProjectPath {
4886 worktree_id,
4887 path: PathBuf::from(envelope.payload.path).into(),
4888 },
4889 cx,
4890 )
4891 });
4892
4893 let buffer = open_buffer.await?;
4894 this.update(&mut cx, |this, cx| {
4895 Ok(proto::OpenBufferResponse {
4896 buffer: Some(this.serialize_buffer_for_peer(&buffer, peer_id, cx)),
4897 })
4898 })
4899 }
4900
4901 fn serialize_project_transaction_for_peer(
4902 &mut self,
4903 project_transaction: ProjectTransaction,
4904 peer_id: PeerId,
4905 cx: &AppContext,
4906 ) -> proto::ProjectTransaction {
4907 let mut serialized_transaction = proto::ProjectTransaction {
4908 buffers: Default::default(),
4909 transactions: Default::default(),
4910 };
4911 for (buffer, transaction) in project_transaction.0 {
4912 serialized_transaction
4913 .buffers
4914 .push(self.serialize_buffer_for_peer(&buffer, peer_id, cx));
4915 serialized_transaction
4916 .transactions
4917 .push(language::proto::serialize_transaction(&transaction));
4918 }
4919 serialized_transaction
4920 }
4921
4922 fn deserialize_project_transaction(
4923 &mut self,
4924 message: proto::ProjectTransaction,
4925 push_to_history: bool,
4926 cx: &mut ModelContext<Self>,
4927 ) -> Task<Result<ProjectTransaction>> {
4928 cx.spawn(|this, mut cx| async move {
4929 let mut project_transaction = ProjectTransaction::default();
4930 for (buffer, transaction) in message.buffers.into_iter().zip(message.transactions) {
4931 let buffer = this
4932 .update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
4933 .await?;
4934 let transaction = language::proto::deserialize_transaction(transaction)?;
4935 project_transaction.0.insert(buffer, transaction);
4936 }
4937
4938 for (buffer, transaction) in &project_transaction.0 {
4939 buffer
4940 .update(&mut cx, |buffer, _| {
4941 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
4942 })
4943 .await;
4944
4945 if push_to_history {
4946 buffer.update(&mut cx, |buffer, _| {
4947 buffer.push_transaction(transaction.clone(), Instant::now());
4948 });
4949 }
4950 }
4951
4952 Ok(project_transaction)
4953 })
4954 }
4955
4956 fn serialize_buffer_for_peer(
4957 &mut self,
4958 buffer: &ModelHandle<Buffer>,
4959 peer_id: PeerId,
4960 cx: &AppContext,
4961 ) -> proto::Buffer {
4962 let buffer_id = buffer.read(cx).remote_id();
4963 let shared_buffers = self.shared_buffers.entry(peer_id).or_default();
4964 if shared_buffers.insert(buffer_id) {
4965 proto::Buffer {
4966 variant: Some(proto::buffer::Variant::State(buffer.read(cx).to_proto())),
4967 }
4968 } else {
4969 proto::Buffer {
4970 variant: Some(proto::buffer::Variant::Id(buffer_id)),
4971 }
4972 }
4973 }
4974
4975 fn deserialize_buffer(
4976 &mut self,
4977 buffer: proto::Buffer,
4978 cx: &mut ModelContext<Self>,
4979 ) -> Task<Result<ModelHandle<Buffer>>> {
4980 let replica_id = self.replica_id();
4981
4982 let opened_buffer_tx = self.opened_buffer.0.clone();
4983 let mut opened_buffer_rx = self.opened_buffer.1.clone();
4984 cx.spawn(|this, mut cx| async move {
4985 match buffer.variant.ok_or_else(|| anyhow!("missing buffer"))? {
4986 proto::buffer::Variant::Id(id) => {
4987 let buffer = loop {
4988 let buffer = this.read_with(&cx, |this, cx| {
4989 this.opened_buffers
4990 .get(&id)
4991 .and_then(|buffer| buffer.upgrade(cx))
4992 });
4993 if let Some(buffer) = buffer {
4994 break buffer;
4995 }
4996 opened_buffer_rx
4997 .next()
4998 .await
4999 .ok_or_else(|| anyhow!("project dropped while waiting for buffer"))?;
5000 };
5001 Ok(buffer)
5002 }
5003 proto::buffer::Variant::State(mut buffer) => {
5004 let mut buffer_worktree = None;
5005 let mut buffer_file = None;
5006 if let Some(file) = buffer.file.take() {
5007 this.read_with(&cx, |this, cx| {
5008 let worktree_id = WorktreeId::from_proto(file.worktree_id);
5009 let worktree =
5010 this.worktree_for_id(worktree_id, cx).ok_or_else(|| {
5011 anyhow!("no worktree found for id {}", file.worktree_id)
5012 })?;
5013 buffer_file =
5014 Some(Box::new(File::from_proto(file, worktree.clone(), cx)?)
5015 as Box<dyn language::File>);
5016 buffer_worktree = Some(worktree);
5017 Ok::<_, anyhow::Error>(())
5018 })?;
5019 }
5020
5021 let buffer = cx.add_model(|cx| {
5022 Buffer::from_proto(replica_id, buffer, buffer_file, cx).unwrap()
5023 });
5024
5025 this.update(&mut cx, |this, cx| this.register_buffer(&buffer, cx))?;
5026
5027 *opened_buffer_tx.borrow_mut().borrow_mut() = ();
5028 Ok(buffer)
5029 }
5030 }
5031 })
5032 }
5033
5034 fn deserialize_symbol(&self, serialized_symbol: proto::Symbol) -> Result<Symbol> {
5035 let source_worktree_id = WorktreeId::from_proto(serialized_symbol.source_worktree_id);
5036 let worktree_id = WorktreeId::from_proto(serialized_symbol.worktree_id);
5037 let start = serialized_symbol
5038 .start
5039 .ok_or_else(|| anyhow!("invalid start"))?;
5040 let end = serialized_symbol
5041 .end
5042 .ok_or_else(|| anyhow!("invalid end"))?;
5043 let kind = unsafe { mem::transmute(serialized_symbol.kind) };
5044 let path = PathBuf::from(serialized_symbol.path);
5045 let language = self.languages.select_language(&path);
5046 Ok(Symbol {
5047 source_worktree_id,
5048 worktree_id,
5049 language_server_name: LanguageServerName(serialized_symbol.language_server_name.into()),
5050 label: language
5051 .and_then(|language| language.label_for_symbol(&serialized_symbol.name, kind))
5052 .unwrap_or_else(|| CodeLabel::plain(serialized_symbol.name.clone(), None)),
5053 name: serialized_symbol.name,
5054 path,
5055 range: PointUtf16::new(start.row, start.column)..PointUtf16::new(end.row, end.column),
5056 kind,
5057 signature: serialized_symbol
5058 .signature
5059 .try_into()
5060 .map_err(|_| anyhow!("invalid signature"))?,
5061 })
5062 }
5063
5064 async fn handle_buffer_saved(
5065 this: ModelHandle<Self>,
5066 envelope: TypedEnvelope<proto::BufferSaved>,
5067 _: Arc<Client>,
5068 mut cx: AsyncAppContext,
5069 ) -> Result<()> {
5070 let version = deserialize_version(envelope.payload.version);
5071 let mtime = envelope
5072 .payload
5073 .mtime
5074 .ok_or_else(|| anyhow!("missing mtime"))?
5075 .into();
5076
5077 this.update(&mut cx, |this, cx| {
5078 let buffer = this
5079 .opened_buffers
5080 .get(&envelope.payload.buffer_id)
5081 .and_then(|buffer| buffer.upgrade(cx));
5082 if let Some(buffer) = buffer {
5083 buffer.update(cx, |buffer, cx| {
5084 buffer.did_save(version, mtime, None, cx);
5085 });
5086 }
5087 Ok(())
5088 })
5089 }
5090
5091 async fn handle_buffer_reloaded(
5092 this: ModelHandle<Self>,
5093 envelope: TypedEnvelope<proto::BufferReloaded>,
5094 _: Arc<Client>,
5095 mut cx: AsyncAppContext,
5096 ) -> Result<()> {
5097 let payload = envelope.payload.clone();
5098 let version = deserialize_version(payload.version);
5099 let mtime = payload
5100 .mtime
5101 .ok_or_else(|| anyhow!("missing mtime"))?
5102 .into();
5103 this.update(&mut cx, |this, cx| {
5104 let buffer = this
5105 .opened_buffers
5106 .get(&payload.buffer_id)
5107 .and_then(|buffer| buffer.upgrade(cx));
5108 if let Some(buffer) = buffer {
5109 buffer.update(cx, |buffer, cx| {
5110 buffer.did_reload(version, mtime, cx);
5111 });
5112 }
5113 Ok(())
5114 })
5115 }
5116
5117 pub fn match_paths<'a>(
5118 &self,
5119 query: &'a str,
5120 include_ignored: bool,
5121 smart_case: bool,
5122 max_results: usize,
5123 cancel_flag: &'a AtomicBool,
5124 cx: &AppContext,
5125 ) -> impl 'a + Future<Output = Vec<PathMatch>> {
5126 let worktrees = self
5127 .worktrees(cx)
5128 .filter(|worktree| worktree.read(cx).is_visible())
5129 .collect::<Vec<_>>();
5130 let include_root_name = worktrees.len() > 1;
5131 let candidate_sets = worktrees
5132 .into_iter()
5133 .map(|worktree| CandidateSet {
5134 snapshot: worktree.read(cx).snapshot(),
5135 include_ignored,
5136 include_root_name,
5137 })
5138 .collect::<Vec<_>>();
5139
5140 let background = cx.background().clone();
5141 async move {
5142 fuzzy::match_paths(
5143 candidate_sets.as_slice(),
5144 query,
5145 smart_case,
5146 max_results,
5147 cancel_flag,
5148 background,
5149 )
5150 .await
5151 }
5152 }
5153
5154 fn edits_from_lsp(
5155 &mut self,
5156 buffer: &ModelHandle<Buffer>,
5157 lsp_edits: impl 'static + Send + IntoIterator<Item = lsp::TextEdit>,
5158 version: Option<i32>,
5159 cx: &mut ModelContext<Self>,
5160 ) -> Task<Result<Vec<(Range<Anchor>, String)>>> {
5161 let snapshot = self.buffer_snapshot_for_lsp_version(buffer, version, cx);
5162 cx.background().spawn(async move {
5163 let snapshot = snapshot?;
5164 let mut lsp_edits = lsp_edits
5165 .into_iter()
5166 .map(|edit| (range_from_lsp(edit.range), edit.new_text))
5167 .collect::<Vec<_>>();
5168 lsp_edits.sort_by_key(|(range, _)| range.start);
5169
5170 let mut lsp_edits = lsp_edits.into_iter().peekable();
5171 let mut edits = Vec::new();
5172 while let Some((mut range, mut new_text)) = lsp_edits.next() {
5173 // Combine any LSP edits that are adjacent.
5174 //
5175 // Also, combine LSP edits that are separated from each other by only
5176 // a newline. This is important because for some code actions,
5177 // Rust-analyzer rewrites the entire buffer via a series of edits that
5178 // are separated by unchanged newline characters.
5179 //
5180 // In order for the diffing logic below to work properly, any edits that
5181 // cancel each other out must be combined into one.
5182 while let Some((next_range, next_text)) = lsp_edits.peek() {
5183 if next_range.start > range.end {
5184 if next_range.start.row > range.end.row + 1
5185 || next_range.start.column > 0
5186 || snapshot.clip_point_utf16(
5187 PointUtf16::new(range.end.row, u32::MAX),
5188 Bias::Left,
5189 ) > range.end
5190 {
5191 break;
5192 }
5193 new_text.push('\n');
5194 }
5195 range.end = next_range.end;
5196 new_text.push_str(&next_text);
5197 lsp_edits.next();
5198 }
5199
5200 if snapshot.clip_point_utf16(range.start, Bias::Left) != range.start
5201 || snapshot.clip_point_utf16(range.end, Bias::Left) != range.end
5202 {
5203 return Err(anyhow!("invalid edits received from language server"));
5204 }
5205
5206 // For multiline edits, perform a diff of the old and new text so that
5207 // we can identify the changes more precisely, preserving the locations
5208 // of any anchors positioned in the unchanged regions.
5209 if range.end.row > range.start.row {
5210 let mut offset = range.start.to_offset(&snapshot);
5211 let old_text = snapshot.text_for_range(range).collect::<String>();
5212
5213 let diff = TextDiff::from_lines(old_text.as_str(), &new_text);
5214 let mut moved_since_edit = true;
5215 for change in diff.iter_all_changes() {
5216 let tag = change.tag();
5217 let value = change.value();
5218 match tag {
5219 ChangeTag::Equal => {
5220 offset += value.len();
5221 moved_since_edit = true;
5222 }
5223 ChangeTag::Delete => {
5224 let start = snapshot.anchor_after(offset);
5225 let end = snapshot.anchor_before(offset + value.len());
5226 if moved_since_edit {
5227 edits.push((start..end, String::new()));
5228 } else {
5229 edits.last_mut().unwrap().0.end = end;
5230 }
5231 offset += value.len();
5232 moved_since_edit = false;
5233 }
5234 ChangeTag::Insert => {
5235 if moved_since_edit {
5236 let anchor = snapshot.anchor_after(offset);
5237 edits.push((anchor.clone()..anchor, value.to_string()));
5238 } else {
5239 edits.last_mut().unwrap().1.push_str(value);
5240 }
5241 moved_since_edit = false;
5242 }
5243 }
5244 }
5245 } else if range.end == range.start {
5246 let anchor = snapshot.anchor_after(range.start);
5247 edits.push((anchor.clone()..anchor, new_text));
5248 } else {
5249 let edit_start = snapshot.anchor_after(range.start);
5250 let edit_end = snapshot.anchor_before(range.end);
5251 edits.push((edit_start..edit_end, new_text));
5252 }
5253 }
5254
5255 Ok(edits)
5256 })
5257 }
5258
5259 fn buffer_snapshot_for_lsp_version(
5260 &mut self,
5261 buffer: &ModelHandle<Buffer>,
5262 version: Option<i32>,
5263 cx: &AppContext,
5264 ) -> Result<TextBufferSnapshot> {
5265 const OLD_VERSIONS_TO_RETAIN: i32 = 10;
5266
5267 if let Some(version) = version {
5268 let buffer_id = buffer.read(cx).remote_id();
5269 let snapshots = self
5270 .buffer_snapshots
5271 .get_mut(&buffer_id)
5272 .ok_or_else(|| anyhow!("no snapshot found for buffer {}", buffer_id))?;
5273 let mut found_snapshot = None;
5274 snapshots.retain(|(snapshot_version, snapshot)| {
5275 if snapshot_version + OLD_VERSIONS_TO_RETAIN < version {
5276 false
5277 } else {
5278 if *snapshot_version == version {
5279 found_snapshot = Some(snapshot.clone());
5280 }
5281 true
5282 }
5283 });
5284
5285 found_snapshot.ok_or_else(|| {
5286 anyhow!(
5287 "snapshot not found for buffer {} at version {}",
5288 buffer_id,
5289 version
5290 )
5291 })
5292 } else {
5293 Ok((buffer.read(cx)).text_snapshot())
5294 }
5295 }
5296
5297 fn language_server_for_buffer(
5298 &self,
5299 buffer: &Buffer,
5300 cx: &AppContext,
5301 ) -> Option<&(Arc<dyn LspAdapter>, Arc<LanguageServer>)> {
5302 if let Some((file, language)) = File::from_dyn(buffer.file()).zip(buffer.language()) {
5303 let worktree_id = file.worktree_id(cx);
5304 self.language_servers
5305 .get(&(worktree_id, language.lsp_adapter()?.name()))
5306 } else {
5307 None
5308 }
5309 }
5310}
5311
5312impl ProjectStore {
5313 pub fn new(db: Arc<Db>) -> Self {
5314 Self {
5315 db,
5316 projects: Default::default(),
5317 }
5318 }
5319
5320 pub fn projects<'a>(
5321 &'a self,
5322 cx: &'a AppContext,
5323 ) -> impl 'a + Iterator<Item = ModelHandle<Project>> {
5324 self.projects
5325 .iter()
5326 .filter_map(|project| project.upgrade(cx))
5327 }
5328
5329 fn add_project(&mut self, project: WeakModelHandle<Project>, cx: &mut ModelContext<Self>) {
5330 if let Err(ix) = self
5331 .projects
5332 .binary_search_by_key(&project.id(), WeakModelHandle::id)
5333 {
5334 self.projects.insert(ix, project);
5335 }
5336 cx.notify();
5337 }
5338
5339 fn prune_projects(&mut self, cx: &mut ModelContext<Self>) {
5340 let mut did_change = false;
5341 self.projects.retain(|project| {
5342 if project.is_upgradable(cx) {
5343 true
5344 } else {
5345 did_change = true;
5346 false
5347 }
5348 });
5349 if did_change {
5350 cx.notify();
5351 }
5352 }
5353}
5354
5355impl WorktreeHandle {
5356 pub fn upgrade(&self, cx: &AppContext) -> Option<ModelHandle<Worktree>> {
5357 match self {
5358 WorktreeHandle::Strong(handle) => Some(handle.clone()),
5359 WorktreeHandle::Weak(handle) => handle.upgrade(cx),
5360 }
5361 }
5362}
5363
5364impl OpenBuffer {
5365 pub fn upgrade(&self, cx: &impl UpgradeModelHandle) -> Option<ModelHandle<Buffer>> {
5366 match self {
5367 OpenBuffer::Strong(handle) => Some(handle.clone()),
5368 OpenBuffer::Weak(handle) => handle.upgrade(cx),
5369 OpenBuffer::Loading(_) => None,
5370 }
5371 }
5372}
5373
5374struct CandidateSet {
5375 snapshot: Snapshot,
5376 include_ignored: bool,
5377 include_root_name: bool,
5378}
5379
5380impl<'a> PathMatchCandidateSet<'a> for CandidateSet {
5381 type Candidates = CandidateSetIter<'a>;
5382
5383 fn id(&self) -> usize {
5384 self.snapshot.id().to_usize()
5385 }
5386
5387 fn len(&self) -> usize {
5388 if self.include_ignored {
5389 self.snapshot.file_count()
5390 } else {
5391 self.snapshot.visible_file_count()
5392 }
5393 }
5394
5395 fn prefix(&self) -> Arc<str> {
5396 if self.snapshot.root_entry().map_or(false, |e| e.is_file()) {
5397 self.snapshot.root_name().into()
5398 } else if self.include_root_name {
5399 format!("{}/", self.snapshot.root_name()).into()
5400 } else {
5401 "".into()
5402 }
5403 }
5404
5405 fn candidates(&'a self, start: usize) -> Self::Candidates {
5406 CandidateSetIter {
5407 traversal: self.snapshot.files(self.include_ignored, start),
5408 }
5409 }
5410}
5411
5412struct CandidateSetIter<'a> {
5413 traversal: Traversal<'a>,
5414}
5415
5416impl<'a> Iterator for CandidateSetIter<'a> {
5417 type Item = PathMatchCandidate<'a>;
5418
5419 fn next(&mut self) -> Option<Self::Item> {
5420 self.traversal.next().map(|entry| {
5421 if let EntryKind::File(char_bag) = entry.kind {
5422 PathMatchCandidate {
5423 path: &entry.path,
5424 char_bag,
5425 }
5426 } else {
5427 unreachable!()
5428 }
5429 })
5430 }
5431}
5432
5433impl Entity for ProjectStore {
5434 type Event = ();
5435}
5436
5437impl Entity for Project {
5438 type Event = Event;
5439
5440 fn release(&mut self, cx: &mut gpui::MutableAppContext) {
5441 self.project_store.update(cx, ProjectStore::prune_projects);
5442
5443 match &self.client_state {
5444 ProjectClientState::Local { remote_id_rx, .. } => {
5445 if let Some(project_id) = *remote_id_rx.borrow() {
5446 self.client
5447 .send(proto::UnregisterProject { project_id })
5448 .log_err();
5449 }
5450 }
5451 ProjectClientState::Remote { remote_id, .. } => {
5452 self.client
5453 .send(proto::LeaveProject {
5454 project_id: *remote_id,
5455 })
5456 .log_err();
5457 }
5458 }
5459 }
5460
5461 fn app_will_quit(
5462 &mut self,
5463 _: &mut MutableAppContext,
5464 ) -> Option<std::pin::Pin<Box<dyn 'static + Future<Output = ()>>>> {
5465 let shutdown_futures = self
5466 .language_servers
5467 .drain()
5468 .filter_map(|(_, (_, server))| server.shutdown())
5469 .collect::<Vec<_>>();
5470 Some(
5471 async move {
5472 futures::future::join_all(shutdown_futures).await;
5473 }
5474 .boxed(),
5475 )
5476 }
5477}
5478
5479impl Collaborator {
5480 fn from_proto(
5481 message: proto::Collaborator,
5482 user_store: &ModelHandle<UserStore>,
5483 cx: &mut AsyncAppContext,
5484 ) -> impl Future<Output = Result<Self>> {
5485 let user = user_store.update(cx, |user_store, cx| {
5486 user_store.fetch_user(message.user_id, cx)
5487 });
5488
5489 async move {
5490 Ok(Self {
5491 peer_id: PeerId(message.peer_id),
5492 user: user.await?,
5493 replica_id: message.replica_id as ReplicaId,
5494 })
5495 }
5496 }
5497}
5498
5499impl<P: AsRef<Path>> From<(WorktreeId, P)> for ProjectPath {
5500 fn from((worktree_id, path): (WorktreeId, P)) -> Self {
5501 Self {
5502 worktree_id,
5503 path: path.as_ref().into(),
5504 }
5505 }
5506}
5507
5508impl From<lsp::CreateFileOptions> for fs::CreateOptions {
5509 fn from(options: lsp::CreateFileOptions) -> Self {
5510 Self {
5511 overwrite: options.overwrite.unwrap_or(false),
5512 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
5513 }
5514 }
5515}
5516
5517impl From<lsp::RenameFileOptions> for fs::RenameOptions {
5518 fn from(options: lsp::RenameFileOptions) -> Self {
5519 Self {
5520 overwrite: options.overwrite.unwrap_or(false),
5521 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
5522 }
5523 }
5524}
5525
5526impl From<lsp::DeleteFileOptions> for fs::RemoveOptions {
5527 fn from(options: lsp::DeleteFileOptions) -> Self {
5528 Self {
5529 recursive: options.recursive.unwrap_or(false),
5530 ignore_if_not_exists: options.ignore_if_not_exists.unwrap_or(false),
5531 }
5532 }
5533}
5534
5535fn serialize_symbol(symbol: &Symbol) -> proto::Symbol {
5536 proto::Symbol {
5537 source_worktree_id: symbol.source_worktree_id.to_proto(),
5538 worktree_id: symbol.worktree_id.to_proto(),
5539 language_server_name: symbol.language_server_name.0.to_string(),
5540 name: symbol.name.clone(),
5541 kind: unsafe { mem::transmute(symbol.kind) },
5542 path: symbol.path.to_string_lossy().to_string(),
5543 start: Some(proto::Point {
5544 row: symbol.range.start.row,
5545 column: symbol.range.start.column,
5546 }),
5547 end: Some(proto::Point {
5548 row: symbol.range.end.row,
5549 column: symbol.range.end.column,
5550 }),
5551 signature: symbol.signature.to_vec(),
5552 }
5553}
5554
5555fn relativize_path(base: &Path, path: &Path) -> PathBuf {
5556 let mut path_components = path.components();
5557 let mut base_components = base.components();
5558 let mut components: Vec<Component> = Vec::new();
5559 loop {
5560 match (path_components.next(), base_components.next()) {
5561 (None, None) => break,
5562 (Some(a), None) => {
5563 components.push(a);
5564 components.extend(path_components.by_ref());
5565 break;
5566 }
5567 (None, _) => components.push(Component::ParentDir),
5568 (Some(a), Some(b)) if components.is_empty() && a == b => (),
5569 (Some(a), Some(b)) if b == Component::CurDir => components.push(a),
5570 (Some(a), Some(_)) => {
5571 components.push(Component::ParentDir);
5572 for _ in base_components {
5573 components.push(Component::ParentDir);
5574 }
5575 components.push(a);
5576 components.extend(path_components.by_ref());
5577 break;
5578 }
5579 }
5580 }
5581 components.iter().map(|c| c.as_os_str()).collect()
5582}
5583
5584impl Item for Buffer {
5585 fn entry_id(&self, cx: &AppContext) -> Option<ProjectEntryId> {
5586 File::from_dyn(self.file()).and_then(|file| file.project_entry_id(cx))
5587 }
5588}
5589
5590#[cfg(test)]
5591mod tests {
5592 use crate::worktree::WorktreeHandle;
5593
5594 use super::{Event, *};
5595 use fs::RealFs;
5596 use futures::{future, StreamExt};
5597 use gpui::test::subscribe;
5598 use language::{
5599 tree_sitter_rust, tree_sitter_typescript, Diagnostic, FakeLspAdapter, LanguageConfig,
5600 OffsetRangeExt, Point, ToPoint,
5601 };
5602 use lsp::Url;
5603 use serde_json::json;
5604 use std::{cell::RefCell, os::unix, path::PathBuf, rc::Rc, task::Poll};
5605 use unindent::Unindent as _;
5606 use util::{assert_set_eq, test::temp_tree};
5607
5608 #[gpui::test]
5609 async fn test_populate_and_search(cx: &mut gpui::TestAppContext) {
5610 let dir = temp_tree(json!({
5611 "root": {
5612 "apple": "",
5613 "banana": {
5614 "carrot": {
5615 "date": "",
5616 "endive": "",
5617 }
5618 },
5619 "fennel": {
5620 "grape": "",
5621 }
5622 }
5623 }));
5624
5625 let root_link_path = dir.path().join("root_link");
5626 unix::fs::symlink(&dir.path().join("root"), &root_link_path).unwrap();
5627 unix::fs::symlink(
5628 &dir.path().join("root/fennel"),
5629 &dir.path().join("root/finnochio"),
5630 )
5631 .unwrap();
5632
5633 let project = Project::test(Arc::new(RealFs), [root_link_path.as_ref()], cx).await;
5634
5635 project.read_with(cx, |project, cx| {
5636 let tree = project.worktrees(cx).next().unwrap().read(cx);
5637 assert_eq!(tree.file_count(), 5);
5638 assert_eq!(
5639 tree.inode_for_path("fennel/grape"),
5640 tree.inode_for_path("finnochio/grape")
5641 );
5642 });
5643
5644 let cancel_flag = Default::default();
5645 let results = project
5646 .read_with(cx, |project, cx| {
5647 project.match_paths("bna", false, false, 10, &cancel_flag, cx)
5648 })
5649 .await;
5650 assert_eq!(
5651 results
5652 .into_iter()
5653 .map(|result| result.path)
5654 .collect::<Vec<Arc<Path>>>(),
5655 vec![
5656 PathBuf::from("banana/carrot/date").into(),
5657 PathBuf::from("banana/carrot/endive").into(),
5658 ]
5659 );
5660 }
5661
5662 #[gpui::test]
5663 async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
5664 cx.foreground().forbid_parking();
5665
5666 let mut rust_language = Language::new(
5667 LanguageConfig {
5668 name: "Rust".into(),
5669 path_suffixes: vec!["rs".to_string()],
5670 ..Default::default()
5671 },
5672 Some(tree_sitter_rust::language()),
5673 );
5674 let mut json_language = Language::new(
5675 LanguageConfig {
5676 name: "JSON".into(),
5677 path_suffixes: vec!["json".to_string()],
5678 ..Default::default()
5679 },
5680 None,
5681 );
5682 let mut fake_rust_servers = rust_language.set_fake_lsp_adapter(FakeLspAdapter {
5683 name: "the-rust-language-server",
5684 capabilities: lsp::ServerCapabilities {
5685 completion_provider: Some(lsp::CompletionOptions {
5686 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
5687 ..Default::default()
5688 }),
5689 ..Default::default()
5690 },
5691 ..Default::default()
5692 });
5693 let mut fake_json_servers = json_language.set_fake_lsp_adapter(FakeLspAdapter {
5694 name: "the-json-language-server",
5695 capabilities: lsp::ServerCapabilities {
5696 completion_provider: Some(lsp::CompletionOptions {
5697 trigger_characters: Some(vec![":".to_string()]),
5698 ..Default::default()
5699 }),
5700 ..Default::default()
5701 },
5702 ..Default::default()
5703 });
5704
5705 let fs = FakeFs::new(cx.background());
5706 fs.insert_tree(
5707 "/the-root",
5708 json!({
5709 "test.rs": "const A: i32 = 1;",
5710 "test2.rs": "",
5711 "Cargo.toml": "a = 1",
5712 "package.json": "{\"a\": 1}",
5713 }),
5714 )
5715 .await;
5716
5717 let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
5718 project.update(cx, |project, _| {
5719 project.languages.add(Arc::new(rust_language));
5720 project.languages.add(Arc::new(json_language));
5721 });
5722
5723 // Open a buffer without an associated language server.
5724 let toml_buffer = project
5725 .update(cx, |project, cx| {
5726 project.open_local_buffer("/the-root/Cargo.toml", cx)
5727 })
5728 .await
5729 .unwrap();
5730
5731 // Open a buffer with an associated language server.
5732 let rust_buffer = project
5733 .update(cx, |project, cx| {
5734 project.open_local_buffer("/the-root/test.rs", cx)
5735 })
5736 .await
5737 .unwrap();
5738
5739 // A server is started up, and it is notified about Rust files.
5740 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
5741 assert_eq!(
5742 fake_rust_server
5743 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5744 .await
5745 .text_document,
5746 lsp::TextDocumentItem {
5747 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
5748 version: 0,
5749 text: "const A: i32 = 1;".to_string(),
5750 language_id: Default::default()
5751 }
5752 );
5753
5754 // The buffer is configured based on the language server's capabilities.
5755 rust_buffer.read_with(cx, |buffer, _| {
5756 assert_eq!(
5757 buffer.completion_triggers(),
5758 &[".".to_string(), "::".to_string()]
5759 );
5760 });
5761 toml_buffer.read_with(cx, |buffer, _| {
5762 assert!(buffer.completion_triggers().is_empty());
5763 });
5764
5765 // Edit a buffer. The changes are reported to the language server.
5766 rust_buffer.update(cx, |buffer, cx| buffer.edit([(16..16, "2")], cx));
5767 assert_eq!(
5768 fake_rust_server
5769 .receive_notification::<lsp::notification::DidChangeTextDocument>()
5770 .await
5771 .text_document,
5772 lsp::VersionedTextDocumentIdentifier::new(
5773 lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
5774 1
5775 )
5776 );
5777
5778 // Open a third buffer with a different associated language server.
5779 let json_buffer = project
5780 .update(cx, |project, cx| {
5781 project.open_local_buffer("/the-root/package.json", cx)
5782 })
5783 .await
5784 .unwrap();
5785
5786 // A json language server is started up and is only notified about the json buffer.
5787 let mut fake_json_server = fake_json_servers.next().await.unwrap();
5788 assert_eq!(
5789 fake_json_server
5790 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5791 .await
5792 .text_document,
5793 lsp::TextDocumentItem {
5794 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
5795 version: 0,
5796 text: "{\"a\": 1}".to_string(),
5797 language_id: Default::default()
5798 }
5799 );
5800
5801 // This buffer is configured based on the second language server's
5802 // capabilities.
5803 json_buffer.read_with(cx, |buffer, _| {
5804 assert_eq!(buffer.completion_triggers(), &[":".to_string()]);
5805 });
5806
5807 // When opening another buffer whose language server is already running,
5808 // it is also configured based on the existing language server's capabilities.
5809 let rust_buffer2 = project
5810 .update(cx, |project, cx| {
5811 project.open_local_buffer("/the-root/test2.rs", cx)
5812 })
5813 .await
5814 .unwrap();
5815 rust_buffer2.read_with(cx, |buffer, _| {
5816 assert_eq!(
5817 buffer.completion_triggers(),
5818 &[".".to_string(), "::".to_string()]
5819 );
5820 });
5821
5822 // Changes are reported only to servers matching the buffer's language.
5823 toml_buffer.update(cx, |buffer, cx| buffer.edit([(5..5, "23")], cx));
5824 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "let x = 1;")], cx));
5825 assert_eq!(
5826 fake_rust_server
5827 .receive_notification::<lsp::notification::DidChangeTextDocument>()
5828 .await
5829 .text_document,
5830 lsp::VersionedTextDocumentIdentifier::new(
5831 lsp::Url::from_file_path("/the-root/test2.rs").unwrap(),
5832 1
5833 )
5834 );
5835
5836 // Save notifications are reported to all servers.
5837 toml_buffer
5838 .update(cx, |buffer, cx| buffer.save(cx))
5839 .await
5840 .unwrap();
5841 assert_eq!(
5842 fake_rust_server
5843 .receive_notification::<lsp::notification::DidSaveTextDocument>()
5844 .await
5845 .text_document,
5846 lsp::TextDocumentIdentifier::new(
5847 lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap()
5848 )
5849 );
5850 assert_eq!(
5851 fake_json_server
5852 .receive_notification::<lsp::notification::DidSaveTextDocument>()
5853 .await
5854 .text_document,
5855 lsp::TextDocumentIdentifier::new(
5856 lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap()
5857 )
5858 );
5859
5860 // Renames are reported only to servers matching the buffer's language.
5861 fs.rename(
5862 Path::new("/the-root/test2.rs"),
5863 Path::new("/the-root/test3.rs"),
5864 Default::default(),
5865 )
5866 .await
5867 .unwrap();
5868 assert_eq!(
5869 fake_rust_server
5870 .receive_notification::<lsp::notification::DidCloseTextDocument>()
5871 .await
5872 .text_document,
5873 lsp::TextDocumentIdentifier::new(
5874 lsp::Url::from_file_path("/the-root/test2.rs").unwrap()
5875 ),
5876 );
5877 assert_eq!(
5878 fake_rust_server
5879 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5880 .await
5881 .text_document,
5882 lsp::TextDocumentItem {
5883 uri: lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),
5884 version: 0,
5885 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
5886 language_id: Default::default()
5887 },
5888 );
5889
5890 rust_buffer2.update(cx, |buffer, cx| {
5891 buffer.update_diagnostics(
5892 DiagnosticSet::from_sorted_entries(
5893 vec![DiagnosticEntry {
5894 diagnostic: Default::default(),
5895 range: Anchor::MIN..Anchor::MAX,
5896 }],
5897 &buffer.snapshot(),
5898 ),
5899 cx,
5900 );
5901 assert_eq!(
5902 buffer
5903 .snapshot()
5904 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
5905 .count(),
5906 1
5907 );
5908 });
5909
5910 // When the rename changes the extension of the file, the buffer gets closed on the old
5911 // language server and gets opened on the new one.
5912 fs.rename(
5913 Path::new("/the-root/test3.rs"),
5914 Path::new("/the-root/test3.json"),
5915 Default::default(),
5916 )
5917 .await
5918 .unwrap();
5919 assert_eq!(
5920 fake_rust_server
5921 .receive_notification::<lsp::notification::DidCloseTextDocument>()
5922 .await
5923 .text_document,
5924 lsp::TextDocumentIdentifier::new(
5925 lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),
5926 ),
5927 );
5928 assert_eq!(
5929 fake_json_server
5930 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5931 .await
5932 .text_document,
5933 lsp::TextDocumentItem {
5934 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
5935 version: 0,
5936 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
5937 language_id: Default::default()
5938 },
5939 );
5940
5941 // We clear the diagnostics, since the language has changed.
5942 rust_buffer2.read_with(cx, |buffer, _| {
5943 assert_eq!(
5944 buffer
5945 .snapshot()
5946 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
5947 .count(),
5948 0
5949 );
5950 });
5951
5952 // The renamed file's version resets after changing language server.
5953 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "// ")], cx));
5954 assert_eq!(
5955 fake_json_server
5956 .receive_notification::<lsp::notification::DidChangeTextDocument>()
5957 .await
5958 .text_document,
5959 lsp::VersionedTextDocumentIdentifier::new(
5960 lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
5961 1
5962 )
5963 );
5964
5965 // Restart language servers
5966 project.update(cx, |project, cx| {
5967 project.restart_language_servers_for_buffers(
5968 vec![rust_buffer.clone(), json_buffer.clone()],
5969 cx,
5970 );
5971 });
5972
5973 let mut rust_shutdown_requests = fake_rust_server
5974 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
5975 let mut json_shutdown_requests = fake_json_server
5976 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
5977 futures::join!(rust_shutdown_requests.next(), json_shutdown_requests.next());
5978
5979 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
5980 let mut fake_json_server = fake_json_servers.next().await.unwrap();
5981
5982 // Ensure rust document is reopened in new rust language server
5983 assert_eq!(
5984 fake_rust_server
5985 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5986 .await
5987 .text_document,
5988 lsp::TextDocumentItem {
5989 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
5990 version: 1,
5991 text: rust_buffer.read_with(cx, |buffer, _| buffer.text()),
5992 language_id: Default::default()
5993 }
5994 );
5995
5996 // Ensure json documents are reopened in new json language server
5997 assert_set_eq!(
5998 [
5999 fake_json_server
6000 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6001 .await
6002 .text_document,
6003 fake_json_server
6004 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6005 .await
6006 .text_document,
6007 ],
6008 [
6009 lsp::TextDocumentItem {
6010 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
6011 version: 0,
6012 text: json_buffer.read_with(cx, |buffer, _| buffer.text()),
6013 language_id: Default::default()
6014 },
6015 lsp::TextDocumentItem {
6016 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
6017 version: 1,
6018 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
6019 language_id: Default::default()
6020 }
6021 ]
6022 );
6023
6024 // Close notifications are reported only to servers matching the buffer's language.
6025 cx.update(|_| drop(json_buffer));
6026 let close_message = lsp::DidCloseTextDocumentParams {
6027 text_document: lsp::TextDocumentIdentifier::new(
6028 lsp::Url::from_file_path("/the-root/package.json").unwrap(),
6029 ),
6030 };
6031 assert_eq!(
6032 fake_json_server
6033 .receive_notification::<lsp::notification::DidCloseTextDocument>()
6034 .await,
6035 close_message,
6036 );
6037 }
6038
6039 #[gpui::test]
6040 async fn test_single_file_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
6041 cx.foreground().forbid_parking();
6042
6043 let fs = FakeFs::new(cx.background());
6044 fs.insert_tree(
6045 "/dir",
6046 json!({
6047 "a.rs": "let a = 1;",
6048 "b.rs": "let b = 2;"
6049 }),
6050 )
6051 .await;
6052
6053 let project = Project::test(fs, ["/dir/a.rs".as_ref(), "/dir/b.rs".as_ref()], cx).await;
6054
6055 let buffer_a = project
6056 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
6057 .await
6058 .unwrap();
6059 let buffer_b = project
6060 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
6061 .await
6062 .unwrap();
6063
6064 project.update(cx, |project, cx| {
6065 project
6066 .update_diagnostics(
6067 lsp::PublishDiagnosticsParams {
6068 uri: Url::from_file_path("/dir/a.rs").unwrap(),
6069 version: None,
6070 diagnostics: vec![lsp::Diagnostic {
6071 range: lsp::Range::new(
6072 lsp::Position::new(0, 4),
6073 lsp::Position::new(0, 5),
6074 ),
6075 severity: Some(lsp::DiagnosticSeverity::ERROR),
6076 message: "error 1".to_string(),
6077 ..Default::default()
6078 }],
6079 },
6080 &[],
6081 cx,
6082 )
6083 .unwrap();
6084 project
6085 .update_diagnostics(
6086 lsp::PublishDiagnosticsParams {
6087 uri: Url::from_file_path("/dir/b.rs").unwrap(),
6088 version: None,
6089 diagnostics: vec![lsp::Diagnostic {
6090 range: lsp::Range::new(
6091 lsp::Position::new(0, 4),
6092 lsp::Position::new(0, 5),
6093 ),
6094 severity: Some(lsp::DiagnosticSeverity::WARNING),
6095 message: "error 2".to_string(),
6096 ..Default::default()
6097 }],
6098 },
6099 &[],
6100 cx,
6101 )
6102 .unwrap();
6103 });
6104
6105 buffer_a.read_with(cx, |buffer, _| {
6106 let chunks = chunks_with_diagnostics(&buffer, 0..buffer.len());
6107 assert_eq!(
6108 chunks
6109 .iter()
6110 .map(|(s, d)| (s.as_str(), *d))
6111 .collect::<Vec<_>>(),
6112 &[
6113 ("let ", None),
6114 ("a", Some(DiagnosticSeverity::ERROR)),
6115 (" = 1;", None),
6116 ]
6117 );
6118 });
6119 buffer_b.read_with(cx, |buffer, _| {
6120 let chunks = chunks_with_diagnostics(&buffer, 0..buffer.len());
6121 assert_eq!(
6122 chunks
6123 .iter()
6124 .map(|(s, d)| (s.as_str(), *d))
6125 .collect::<Vec<_>>(),
6126 &[
6127 ("let ", None),
6128 ("b", Some(DiagnosticSeverity::WARNING)),
6129 (" = 2;", None),
6130 ]
6131 );
6132 });
6133 }
6134
6135 #[gpui::test]
6136 async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
6137 cx.foreground().forbid_parking();
6138
6139 let progress_token = "the-progress-token";
6140 let mut language = Language::new(
6141 LanguageConfig {
6142 name: "Rust".into(),
6143 path_suffixes: vec!["rs".to_string()],
6144 ..Default::default()
6145 },
6146 Some(tree_sitter_rust::language()),
6147 );
6148 let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
6149 disk_based_diagnostics_progress_token: Some(progress_token),
6150 disk_based_diagnostics_sources: &["disk"],
6151 ..Default::default()
6152 });
6153
6154 let fs = FakeFs::new(cx.background());
6155 fs.insert_tree(
6156 "/dir",
6157 json!({
6158 "a.rs": "fn a() { A }",
6159 "b.rs": "const y: i32 = 1",
6160 }),
6161 )
6162 .await;
6163
6164 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
6165 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
6166 let worktree_id =
6167 project.read_with(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
6168
6169 // Cause worktree to start the fake language server
6170 let _buffer = project
6171 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
6172 .await
6173 .unwrap();
6174
6175 let mut events = subscribe(&project, cx);
6176
6177 let mut fake_server = fake_servers.next().await.unwrap();
6178 fake_server.start_progress(progress_token).await;
6179 assert_eq!(
6180 events.next().await.unwrap(),
6181 Event::DiskBasedDiagnosticsStarted
6182 );
6183
6184 fake_server.start_progress(progress_token).await;
6185 fake_server.end_progress(progress_token).await;
6186 fake_server.start_progress(progress_token).await;
6187
6188 fake_server.notify::<lsp::notification::PublishDiagnostics>(
6189 lsp::PublishDiagnosticsParams {
6190 uri: Url::from_file_path("/dir/a.rs").unwrap(),
6191 version: None,
6192 diagnostics: vec![lsp::Diagnostic {
6193 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
6194 severity: Some(lsp::DiagnosticSeverity::ERROR),
6195 message: "undefined variable 'A'".to_string(),
6196 ..Default::default()
6197 }],
6198 },
6199 );
6200 assert_eq!(
6201 events.next().await.unwrap(),
6202 Event::DiagnosticsUpdated((worktree_id, Path::new("a.rs")).into())
6203 );
6204
6205 fake_server.end_progress(progress_token).await;
6206 fake_server.end_progress(progress_token).await;
6207 assert_eq!(
6208 events.next().await.unwrap(),
6209 Event::DiskBasedDiagnosticsUpdated
6210 );
6211 assert_eq!(
6212 events.next().await.unwrap(),
6213 Event::DiskBasedDiagnosticsFinished
6214 );
6215
6216 let buffer = project
6217 .update(cx, |p, cx| p.open_local_buffer("/dir/a.rs", cx))
6218 .await
6219 .unwrap();
6220
6221 buffer.read_with(cx, |buffer, _| {
6222 let snapshot = buffer.snapshot();
6223 let diagnostics = snapshot
6224 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
6225 .collect::<Vec<_>>();
6226 assert_eq!(
6227 diagnostics,
6228 &[DiagnosticEntry {
6229 range: Point::new(0, 9)..Point::new(0, 10),
6230 diagnostic: Diagnostic {
6231 severity: lsp::DiagnosticSeverity::ERROR,
6232 message: "undefined variable 'A'".to_string(),
6233 group_id: 0,
6234 is_primary: true,
6235 ..Default::default()
6236 }
6237 }]
6238 )
6239 });
6240
6241 // Ensure publishing empty diagnostics twice only results in one update event.
6242 fake_server.notify::<lsp::notification::PublishDiagnostics>(
6243 lsp::PublishDiagnosticsParams {
6244 uri: Url::from_file_path("/dir/a.rs").unwrap(),
6245 version: None,
6246 diagnostics: Default::default(),
6247 },
6248 );
6249 assert_eq!(
6250 events.next().await.unwrap(),
6251 Event::DiagnosticsUpdated((worktree_id, Path::new("a.rs")).into())
6252 );
6253
6254 fake_server.notify::<lsp::notification::PublishDiagnostics>(
6255 lsp::PublishDiagnosticsParams {
6256 uri: Url::from_file_path("/dir/a.rs").unwrap(),
6257 version: None,
6258 diagnostics: Default::default(),
6259 },
6260 );
6261 cx.foreground().run_until_parked();
6262 assert_eq!(futures::poll!(events.next()), Poll::Pending);
6263 }
6264
6265 #[gpui::test]
6266 async fn test_restarting_server_with_diagnostics_running(cx: &mut gpui::TestAppContext) {
6267 cx.foreground().forbid_parking();
6268
6269 let progress_token = "the-progress-token";
6270 let mut language = Language::new(
6271 LanguageConfig {
6272 path_suffixes: vec!["rs".to_string()],
6273 ..Default::default()
6274 },
6275 None,
6276 );
6277 let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
6278 disk_based_diagnostics_sources: &["disk"],
6279 disk_based_diagnostics_progress_token: Some(progress_token),
6280 ..Default::default()
6281 });
6282
6283 let fs = FakeFs::new(cx.background());
6284 fs.insert_tree("/dir", json!({ "a.rs": "" })).await;
6285
6286 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
6287 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
6288
6289 let buffer = project
6290 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
6291 .await
6292 .unwrap();
6293
6294 // Simulate diagnostics starting to update.
6295 let mut fake_server = fake_servers.next().await.unwrap();
6296 fake_server.start_progress(progress_token).await;
6297
6298 // Restart the server before the diagnostics finish updating.
6299 project.update(cx, |project, cx| {
6300 project.restart_language_servers_for_buffers([buffer], cx);
6301 });
6302 let mut events = subscribe(&project, cx);
6303
6304 // Simulate the newly started server sending more diagnostics.
6305 let mut fake_server = fake_servers.next().await.unwrap();
6306 fake_server.start_progress(progress_token).await;
6307 assert_eq!(
6308 events.next().await.unwrap(),
6309 Event::DiskBasedDiagnosticsStarted
6310 );
6311
6312 // All diagnostics are considered done, despite the old server's diagnostic
6313 // task never completing.
6314 fake_server.end_progress(progress_token).await;
6315 assert_eq!(
6316 events.next().await.unwrap(),
6317 Event::DiskBasedDiagnosticsUpdated
6318 );
6319 assert_eq!(
6320 events.next().await.unwrap(),
6321 Event::DiskBasedDiagnosticsFinished
6322 );
6323 project.read_with(cx, |project, _| {
6324 assert!(!project.is_running_disk_based_diagnostics());
6325 });
6326 }
6327
6328 #[gpui::test]
6329 async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
6330 cx.foreground().forbid_parking();
6331
6332 let mut language = Language::new(
6333 LanguageConfig {
6334 name: "Rust".into(),
6335 path_suffixes: vec!["rs".to_string()],
6336 ..Default::default()
6337 },
6338 Some(tree_sitter_rust::language()),
6339 );
6340 let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
6341 disk_based_diagnostics_sources: &["disk"],
6342 ..Default::default()
6343 });
6344
6345 let text = "
6346 fn a() { A }
6347 fn b() { BB }
6348 fn c() { CCC }
6349 "
6350 .unindent();
6351
6352 let fs = FakeFs::new(cx.background());
6353 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
6354
6355 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
6356 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
6357
6358 let buffer = project
6359 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
6360 .await
6361 .unwrap();
6362
6363 let mut fake_server = fake_servers.next().await.unwrap();
6364 let open_notification = fake_server
6365 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6366 .await;
6367
6368 // Edit the buffer, moving the content down
6369 buffer.update(cx, |buffer, cx| buffer.edit([(0..0, "\n\n")], cx));
6370 let change_notification_1 = fake_server
6371 .receive_notification::<lsp::notification::DidChangeTextDocument>()
6372 .await;
6373 assert!(
6374 change_notification_1.text_document.version > open_notification.text_document.version
6375 );
6376
6377 // Report some diagnostics for the initial version of the buffer
6378 fake_server.notify::<lsp::notification::PublishDiagnostics>(
6379 lsp::PublishDiagnosticsParams {
6380 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
6381 version: Some(open_notification.text_document.version),
6382 diagnostics: vec![
6383 lsp::Diagnostic {
6384 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
6385 severity: Some(DiagnosticSeverity::ERROR),
6386 message: "undefined variable 'A'".to_string(),
6387 source: Some("disk".to_string()),
6388 ..Default::default()
6389 },
6390 lsp::Diagnostic {
6391 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
6392 severity: Some(DiagnosticSeverity::ERROR),
6393 message: "undefined variable 'BB'".to_string(),
6394 source: Some("disk".to_string()),
6395 ..Default::default()
6396 },
6397 lsp::Diagnostic {
6398 range: lsp::Range::new(lsp::Position::new(2, 9), lsp::Position::new(2, 12)),
6399 severity: Some(DiagnosticSeverity::ERROR),
6400 source: Some("disk".to_string()),
6401 message: "undefined variable 'CCC'".to_string(),
6402 ..Default::default()
6403 },
6404 ],
6405 },
6406 );
6407
6408 // The diagnostics have moved down since they were created.
6409 buffer.next_notification(cx).await;
6410 buffer.read_with(cx, |buffer, _| {
6411 assert_eq!(
6412 buffer
6413 .snapshot()
6414 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0), false)
6415 .collect::<Vec<_>>(),
6416 &[
6417 DiagnosticEntry {
6418 range: Point::new(3, 9)..Point::new(3, 11),
6419 diagnostic: Diagnostic {
6420 severity: DiagnosticSeverity::ERROR,
6421 message: "undefined variable 'BB'".to_string(),
6422 is_disk_based: true,
6423 group_id: 1,
6424 is_primary: true,
6425 ..Default::default()
6426 },
6427 },
6428 DiagnosticEntry {
6429 range: Point::new(4, 9)..Point::new(4, 12),
6430 diagnostic: Diagnostic {
6431 severity: DiagnosticSeverity::ERROR,
6432 message: "undefined variable 'CCC'".to_string(),
6433 is_disk_based: true,
6434 group_id: 2,
6435 is_primary: true,
6436 ..Default::default()
6437 }
6438 }
6439 ]
6440 );
6441 assert_eq!(
6442 chunks_with_diagnostics(buffer, 0..buffer.len()),
6443 [
6444 ("\n\nfn a() { ".to_string(), None),
6445 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
6446 (" }\nfn b() { ".to_string(), None),
6447 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
6448 (" }\nfn c() { ".to_string(), None),
6449 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
6450 (" }\n".to_string(), None),
6451 ]
6452 );
6453 assert_eq!(
6454 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
6455 [
6456 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
6457 (" }\nfn c() { ".to_string(), None),
6458 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
6459 ]
6460 );
6461 });
6462
6463 // Ensure overlapping diagnostics are highlighted correctly.
6464 fake_server.notify::<lsp::notification::PublishDiagnostics>(
6465 lsp::PublishDiagnosticsParams {
6466 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
6467 version: Some(open_notification.text_document.version),
6468 diagnostics: vec![
6469 lsp::Diagnostic {
6470 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
6471 severity: Some(DiagnosticSeverity::ERROR),
6472 message: "undefined variable 'A'".to_string(),
6473 source: Some("disk".to_string()),
6474 ..Default::default()
6475 },
6476 lsp::Diagnostic {
6477 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 12)),
6478 severity: Some(DiagnosticSeverity::WARNING),
6479 message: "unreachable statement".to_string(),
6480 source: Some("disk".to_string()),
6481 ..Default::default()
6482 },
6483 ],
6484 },
6485 );
6486
6487 buffer.next_notification(cx).await;
6488 buffer.read_with(cx, |buffer, _| {
6489 assert_eq!(
6490 buffer
6491 .snapshot()
6492 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0), false)
6493 .collect::<Vec<_>>(),
6494 &[
6495 DiagnosticEntry {
6496 range: Point::new(2, 9)..Point::new(2, 12),
6497 diagnostic: Diagnostic {
6498 severity: DiagnosticSeverity::WARNING,
6499 message: "unreachable statement".to_string(),
6500 is_disk_based: true,
6501 group_id: 4,
6502 is_primary: true,
6503 ..Default::default()
6504 }
6505 },
6506 DiagnosticEntry {
6507 range: Point::new(2, 9)..Point::new(2, 10),
6508 diagnostic: Diagnostic {
6509 severity: DiagnosticSeverity::ERROR,
6510 message: "undefined variable 'A'".to_string(),
6511 is_disk_based: true,
6512 group_id: 3,
6513 is_primary: true,
6514 ..Default::default()
6515 },
6516 }
6517 ]
6518 );
6519 assert_eq!(
6520 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
6521 [
6522 ("fn a() { ".to_string(), None),
6523 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
6524 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
6525 ("\n".to_string(), None),
6526 ]
6527 );
6528 assert_eq!(
6529 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
6530 [
6531 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
6532 ("\n".to_string(), None),
6533 ]
6534 );
6535 });
6536
6537 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
6538 // changes since the last save.
6539 buffer.update(cx, |buffer, cx| {
6540 buffer.edit([(Point::new(2, 0)..Point::new(2, 0), " ")], cx);
6541 buffer.edit([(Point::new(2, 8)..Point::new(2, 10), "(x: usize)")], cx);
6542 buffer.edit([(Point::new(3, 10)..Point::new(3, 10), "xxx")], cx);
6543 });
6544 let change_notification_2 = fake_server
6545 .receive_notification::<lsp::notification::DidChangeTextDocument>()
6546 .await;
6547 assert!(
6548 change_notification_2.text_document.version
6549 > change_notification_1.text_document.version
6550 );
6551
6552 // Handle out-of-order diagnostics
6553 fake_server.notify::<lsp::notification::PublishDiagnostics>(
6554 lsp::PublishDiagnosticsParams {
6555 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
6556 version: Some(change_notification_2.text_document.version),
6557 diagnostics: vec![
6558 lsp::Diagnostic {
6559 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
6560 severity: Some(DiagnosticSeverity::ERROR),
6561 message: "undefined variable 'BB'".to_string(),
6562 source: Some("disk".to_string()),
6563 ..Default::default()
6564 },
6565 lsp::Diagnostic {
6566 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
6567 severity: Some(DiagnosticSeverity::WARNING),
6568 message: "undefined variable 'A'".to_string(),
6569 source: Some("disk".to_string()),
6570 ..Default::default()
6571 },
6572 ],
6573 },
6574 );
6575
6576 buffer.next_notification(cx).await;
6577 buffer.read_with(cx, |buffer, _| {
6578 assert_eq!(
6579 buffer
6580 .snapshot()
6581 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
6582 .collect::<Vec<_>>(),
6583 &[
6584 DiagnosticEntry {
6585 range: Point::new(2, 21)..Point::new(2, 22),
6586 diagnostic: Diagnostic {
6587 severity: DiagnosticSeverity::WARNING,
6588 message: "undefined variable 'A'".to_string(),
6589 is_disk_based: true,
6590 group_id: 6,
6591 is_primary: true,
6592 ..Default::default()
6593 }
6594 },
6595 DiagnosticEntry {
6596 range: Point::new(3, 9)..Point::new(3, 14),
6597 diagnostic: Diagnostic {
6598 severity: DiagnosticSeverity::ERROR,
6599 message: "undefined variable 'BB'".to_string(),
6600 is_disk_based: true,
6601 group_id: 5,
6602 is_primary: true,
6603 ..Default::default()
6604 },
6605 }
6606 ]
6607 );
6608 });
6609 }
6610
6611 #[gpui::test]
6612 async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
6613 cx.foreground().forbid_parking();
6614
6615 let text = concat!(
6616 "let one = ;\n", //
6617 "let two = \n",
6618 "let three = 3;\n",
6619 );
6620
6621 let fs = FakeFs::new(cx.background());
6622 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
6623
6624 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
6625 let buffer = project
6626 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
6627 .await
6628 .unwrap();
6629
6630 project.update(cx, |project, cx| {
6631 project
6632 .update_buffer_diagnostics(
6633 &buffer,
6634 vec![
6635 DiagnosticEntry {
6636 range: PointUtf16::new(0, 10)..PointUtf16::new(0, 10),
6637 diagnostic: Diagnostic {
6638 severity: DiagnosticSeverity::ERROR,
6639 message: "syntax error 1".to_string(),
6640 ..Default::default()
6641 },
6642 },
6643 DiagnosticEntry {
6644 range: PointUtf16::new(1, 10)..PointUtf16::new(1, 10),
6645 diagnostic: Diagnostic {
6646 severity: DiagnosticSeverity::ERROR,
6647 message: "syntax error 2".to_string(),
6648 ..Default::default()
6649 },
6650 },
6651 ],
6652 None,
6653 cx,
6654 )
6655 .unwrap();
6656 });
6657
6658 // An empty range is extended forward to include the following character.
6659 // At the end of a line, an empty range is extended backward to include
6660 // the preceding character.
6661 buffer.read_with(cx, |buffer, _| {
6662 let chunks = chunks_with_diagnostics(&buffer, 0..buffer.len());
6663 assert_eq!(
6664 chunks
6665 .iter()
6666 .map(|(s, d)| (s.as_str(), *d))
6667 .collect::<Vec<_>>(),
6668 &[
6669 ("let one = ", None),
6670 (";", Some(DiagnosticSeverity::ERROR)),
6671 ("\nlet two =", None),
6672 (" ", Some(DiagnosticSeverity::ERROR)),
6673 ("\nlet three = 3;\n", None)
6674 ]
6675 );
6676 });
6677 }
6678
6679 #[gpui::test]
6680 async fn test_edits_from_lsp_with_past_version(cx: &mut gpui::TestAppContext) {
6681 cx.foreground().forbid_parking();
6682
6683 let mut language = Language::new(
6684 LanguageConfig {
6685 name: "Rust".into(),
6686 path_suffixes: vec!["rs".to_string()],
6687 ..Default::default()
6688 },
6689 Some(tree_sitter_rust::language()),
6690 );
6691 let mut fake_servers = language.set_fake_lsp_adapter(Default::default());
6692
6693 let text = "
6694 fn a() {
6695 f1();
6696 }
6697 fn b() {
6698 f2();
6699 }
6700 fn c() {
6701 f3();
6702 }
6703 "
6704 .unindent();
6705
6706 let fs = FakeFs::new(cx.background());
6707 fs.insert_tree(
6708 "/dir",
6709 json!({
6710 "a.rs": text.clone(),
6711 }),
6712 )
6713 .await;
6714
6715 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
6716 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
6717 let buffer = project
6718 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
6719 .await
6720 .unwrap();
6721
6722 let mut fake_server = fake_servers.next().await.unwrap();
6723 let lsp_document_version = fake_server
6724 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6725 .await
6726 .text_document
6727 .version;
6728
6729 // Simulate editing the buffer after the language server computes some edits.
6730 buffer.update(cx, |buffer, cx| {
6731 buffer.edit(
6732 [(
6733 Point::new(0, 0)..Point::new(0, 0),
6734 "// above first function\n",
6735 )],
6736 cx,
6737 );
6738 buffer.edit(
6739 [(
6740 Point::new(2, 0)..Point::new(2, 0),
6741 " // inside first function\n",
6742 )],
6743 cx,
6744 );
6745 buffer.edit(
6746 [(
6747 Point::new(6, 4)..Point::new(6, 4),
6748 "// inside second function ",
6749 )],
6750 cx,
6751 );
6752
6753 assert_eq!(
6754 buffer.text(),
6755 "
6756 // above first function
6757 fn a() {
6758 // inside first function
6759 f1();
6760 }
6761 fn b() {
6762 // inside second function f2();
6763 }
6764 fn c() {
6765 f3();
6766 }
6767 "
6768 .unindent()
6769 );
6770 });
6771
6772 let edits = project
6773 .update(cx, |project, cx| {
6774 project.edits_from_lsp(
6775 &buffer,
6776 vec![
6777 // replace body of first function
6778 lsp::TextEdit {
6779 range: lsp::Range::new(
6780 lsp::Position::new(0, 0),
6781 lsp::Position::new(3, 0),
6782 ),
6783 new_text: "
6784 fn a() {
6785 f10();
6786 }
6787 "
6788 .unindent(),
6789 },
6790 // edit inside second function
6791 lsp::TextEdit {
6792 range: lsp::Range::new(
6793 lsp::Position::new(4, 6),
6794 lsp::Position::new(4, 6),
6795 ),
6796 new_text: "00".into(),
6797 },
6798 // edit inside third function via two distinct edits
6799 lsp::TextEdit {
6800 range: lsp::Range::new(
6801 lsp::Position::new(7, 5),
6802 lsp::Position::new(7, 5),
6803 ),
6804 new_text: "4000".into(),
6805 },
6806 lsp::TextEdit {
6807 range: lsp::Range::new(
6808 lsp::Position::new(7, 5),
6809 lsp::Position::new(7, 6),
6810 ),
6811 new_text: "".into(),
6812 },
6813 ],
6814 Some(lsp_document_version),
6815 cx,
6816 )
6817 })
6818 .await
6819 .unwrap();
6820
6821 buffer.update(cx, |buffer, cx| {
6822 for (range, new_text) in edits {
6823 buffer.edit([(range, new_text)], cx);
6824 }
6825 assert_eq!(
6826 buffer.text(),
6827 "
6828 // above first function
6829 fn a() {
6830 // inside first function
6831 f10();
6832 }
6833 fn b() {
6834 // inside second function f200();
6835 }
6836 fn c() {
6837 f4000();
6838 }
6839 "
6840 .unindent()
6841 );
6842 });
6843 }
6844
6845 #[gpui::test]
6846 async fn test_edits_from_lsp_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
6847 cx.foreground().forbid_parking();
6848
6849 let text = "
6850 use a::b;
6851 use a::c;
6852
6853 fn f() {
6854 b();
6855 c();
6856 }
6857 "
6858 .unindent();
6859
6860 let fs = FakeFs::new(cx.background());
6861 fs.insert_tree(
6862 "/dir",
6863 json!({
6864 "a.rs": text.clone(),
6865 }),
6866 )
6867 .await;
6868
6869 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
6870 let buffer = project
6871 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
6872 .await
6873 .unwrap();
6874
6875 // Simulate the language server sending us a small edit in the form of a very large diff.
6876 // Rust-analyzer does this when performing a merge-imports code action.
6877 let edits = project
6878 .update(cx, |project, cx| {
6879 project.edits_from_lsp(
6880 &buffer,
6881 [
6882 // Replace the first use statement without editing the semicolon.
6883 lsp::TextEdit {
6884 range: lsp::Range::new(
6885 lsp::Position::new(0, 4),
6886 lsp::Position::new(0, 8),
6887 ),
6888 new_text: "a::{b, c}".into(),
6889 },
6890 // Reinsert the remainder of the file between the semicolon and the final
6891 // newline of the file.
6892 lsp::TextEdit {
6893 range: lsp::Range::new(
6894 lsp::Position::new(0, 9),
6895 lsp::Position::new(0, 9),
6896 ),
6897 new_text: "\n\n".into(),
6898 },
6899 lsp::TextEdit {
6900 range: lsp::Range::new(
6901 lsp::Position::new(0, 9),
6902 lsp::Position::new(0, 9),
6903 ),
6904 new_text: "
6905 fn f() {
6906 b();
6907 c();
6908 }"
6909 .unindent(),
6910 },
6911 // Delete everything after the first newline of the file.
6912 lsp::TextEdit {
6913 range: lsp::Range::new(
6914 lsp::Position::new(1, 0),
6915 lsp::Position::new(7, 0),
6916 ),
6917 new_text: "".into(),
6918 },
6919 ],
6920 None,
6921 cx,
6922 )
6923 })
6924 .await
6925 .unwrap();
6926
6927 buffer.update(cx, |buffer, cx| {
6928 let edits = edits
6929 .into_iter()
6930 .map(|(range, text)| {
6931 (
6932 range.start.to_point(&buffer)..range.end.to_point(&buffer),
6933 text,
6934 )
6935 })
6936 .collect::<Vec<_>>();
6937
6938 assert_eq!(
6939 edits,
6940 [
6941 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
6942 (Point::new(1, 0)..Point::new(2, 0), "".into())
6943 ]
6944 );
6945
6946 for (range, new_text) in edits {
6947 buffer.edit([(range, new_text)], cx);
6948 }
6949 assert_eq!(
6950 buffer.text(),
6951 "
6952 use a::{b, c};
6953
6954 fn f() {
6955 b();
6956 c();
6957 }
6958 "
6959 .unindent()
6960 );
6961 });
6962 }
6963
6964 #[gpui::test]
6965 async fn test_invalid_edits_from_lsp(cx: &mut gpui::TestAppContext) {
6966 cx.foreground().forbid_parking();
6967
6968 let text = "
6969 use a::b;
6970 use a::c;
6971
6972 fn f() {
6973 b();
6974 c();
6975 }
6976 "
6977 .unindent();
6978
6979 let fs = FakeFs::new(cx.background());
6980 fs.insert_tree(
6981 "/dir",
6982 json!({
6983 "a.rs": text.clone(),
6984 }),
6985 )
6986 .await;
6987
6988 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
6989 let buffer = project
6990 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
6991 .await
6992 .unwrap();
6993
6994 // Simulate the language server sending us edits in a non-ordered fashion,
6995 // with ranges sometimes being inverted.
6996 let edits = project
6997 .update(cx, |project, cx| {
6998 project.edits_from_lsp(
6999 &buffer,
7000 [
7001 lsp::TextEdit {
7002 range: lsp::Range::new(
7003 lsp::Position::new(0, 9),
7004 lsp::Position::new(0, 9),
7005 ),
7006 new_text: "\n\n".into(),
7007 },
7008 lsp::TextEdit {
7009 range: lsp::Range::new(
7010 lsp::Position::new(0, 8),
7011 lsp::Position::new(0, 4),
7012 ),
7013 new_text: "a::{b, c}".into(),
7014 },
7015 lsp::TextEdit {
7016 range: lsp::Range::new(
7017 lsp::Position::new(1, 0),
7018 lsp::Position::new(7, 0),
7019 ),
7020 new_text: "".into(),
7021 },
7022 lsp::TextEdit {
7023 range: lsp::Range::new(
7024 lsp::Position::new(0, 9),
7025 lsp::Position::new(0, 9),
7026 ),
7027 new_text: "
7028 fn f() {
7029 b();
7030 c();
7031 }"
7032 .unindent(),
7033 },
7034 ],
7035 None,
7036 cx,
7037 )
7038 })
7039 .await
7040 .unwrap();
7041
7042 buffer.update(cx, |buffer, cx| {
7043 let edits = edits
7044 .into_iter()
7045 .map(|(range, text)| {
7046 (
7047 range.start.to_point(&buffer)..range.end.to_point(&buffer),
7048 text,
7049 )
7050 })
7051 .collect::<Vec<_>>();
7052
7053 assert_eq!(
7054 edits,
7055 [
7056 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
7057 (Point::new(1, 0)..Point::new(2, 0), "".into())
7058 ]
7059 );
7060
7061 for (range, new_text) in edits {
7062 buffer.edit([(range, new_text)], cx);
7063 }
7064 assert_eq!(
7065 buffer.text(),
7066 "
7067 use a::{b, c};
7068
7069 fn f() {
7070 b();
7071 c();
7072 }
7073 "
7074 .unindent()
7075 );
7076 });
7077 }
7078
7079 fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
7080 buffer: &Buffer,
7081 range: Range<T>,
7082 ) -> Vec<(String, Option<DiagnosticSeverity>)> {
7083 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
7084 for chunk in buffer.snapshot().chunks(range, true) {
7085 if chunks.last().map_or(false, |prev_chunk| {
7086 prev_chunk.1 == chunk.diagnostic_severity
7087 }) {
7088 chunks.last_mut().unwrap().0.push_str(chunk.text);
7089 } else {
7090 chunks.push((chunk.text.to_string(), chunk.diagnostic_severity));
7091 }
7092 }
7093 chunks
7094 }
7095
7096 #[gpui::test]
7097 async fn test_search_worktree_without_files(cx: &mut gpui::TestAppContext) {
7098 let dir = temp_tree(json!({
7099 "root": {
7100 "dir1": {},
7101 "dir2": {
7102 "dir3": {}
7103 }
7104 }
7105 }));
7106
7107 let project = Project::test(Arc::new(RealFs), [dir.path()], cx).await;
7108 let cancel_flag = Default::default();
7109 let results = project
7110 .read_with(cx, |project, cx| {
7111 project.match_paths("dir", false, false, 10, &cancel_flag, cx)
7112 })
7113 .await;
7114
7115 assert!(results.is_empty());
7116 }
7117
7118 #[gpui::test(iterations = 10)]
7119 async fn test_definition(cx: &mut gpui::TestAppContext) {
7120 let mut language = Language::new(
7121 LanguageConfig {
7122 name: "Rust".into(),
7123 path_suffixes: vec!["rs".to_string()],
7124 ..Default::default()
7125 },
7126 Some(tree_sitter_rust::language()),
7127 );
7128 let mut fake_servers = language.set_fake_lsp_adapter(Default::default());
7129
7130 let fs = FakeFs::new(cx.background());
7131 fs.insert_tree(
7132 "/dir",
7133 json!({
7134 "a.rs": "const fn a() { A }",
7135 "b.rs": "const y: i32 = crate::a()",
7136 }),
7137 )
7138 .await;
7139
7140 let project = Project::test(fs, ["/dir/b.rs".as_ref()], cx).await;
7141 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
7142
7143 let buffer = project
7144 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
7145 .await
7146 .unwrap();
7147
7148 let fake_server = fake_servers.next().await.unwrap();
7149 fake_server.handle_request::<lsp::request::GotoDefinition, _, _>(|params, _| async move {
7150 let params = params.text_document_position_params;
7151 assert_eq!(
7152 params.text_document.uri.to_file_path().unwrap(),
7153 Path::new("/dir/b.rs"),
7154 );
7155 assert_eq!(params.position, lsp::Position::new(0, 22));
7156
7157 Ok(Some(lsp::GotoDefinitionResponse::Scalar(
7158 lsp::Location::new(
7159 lsp::Url::from_file_path("/dir/a.rs").unwrap(),
7160 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
7161 ),
7162 )))
7163 });
7164
7165 let mut definitions = project
7166 .update(cx, |project, cx| project.definition(&buffer, 22, cx))
7167 .await
7168 .unwrap();
7169
7170 assert_eq!(definitions.len(), 1);
7171 let definition = definitions.pop().unwrap();
7172 cx.update(|cx| {
7173 let target_buffer = definition.buffer.read(cx);
7174 assert_eq!(
7175 target_buffer
7176 .file()
7177 .unwrap()
7178 .as_local()
7179 .unwrap()
7180 .abs_path(cx),
7181 Path::new("/dir/a.rs"),
7182 );
7183 assert_eq!(definition.range.to_offset(target_buffer), 9..10);
7184 assert_eq!(
7185 list_worktrees(&project, cx),
7186 [("/dir/b.rs".as_ref(), true), ("/dir/a.rs".as_ref(), false)]
7187 );
7188
7189 drop(definition);
7190 });
7191 cx.read(|cx| {
7192 assert_eq!(list_worktrees(&project, cx), [("/dir/b.rs".as_ref(), true)]);
7193 });
7194
7195 fn list_worktrees<'a>(
7196 project: &'a ModelHandle<Project>,
7197 cx: &'a AppContext,
7198 ) -> Vec<(&'a Path, bool)> {
7199 project
7200 .read(cx)
7201 .worktrees(cx)
7202 .map(|worktree| {
7203 let worktree = worktree.read(cx);
7204 (
7205 worktree.as_local().unwrap().abs_path().as_ref(),
7206 worktree.is_visible(),
7207 )
7208 })
7209 .collect::<Vec<_>>()
7210 }
7211 }
7212
7213 #[gpui::test]
7214 async fn test_completions_without_edit_ranges(cx: &mut gpui::TestAppContext) {
7215 let mut language = Language::new(
7216 LanguageConfig {
7217 name: "TypeScript".into(),
7218 path_suffixes: vec!["ts".to_string()],
7219 ..Default::default()
7220 },
7221 Some(tree_sitter_typescript::language_typescript()),
7222 );
7223 let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default());
7224
7225 let fs = FakeFs::new(cx.background());
7226 fs.insert_tree(
7227 "/dir",
7228 json!({
7229 "a.ts": "",
7230 }),
7231 )
7232 .await;
7233
7234 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
7235 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
7236 let buffer = project
7237 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
7238 .await
7239 .unwrap();
7240
7241 let fake_server = fake_language_servers.next().await.unwrap();
7242
7243 let text = "let a = b.fqn";
7244 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
7245 let completions = project.update(cx, |project, cx| {
7246 project.completions(&buffer, text.len(), cx)
7247 });
7248
7249 fake_server
7250 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
7251 Ok(Some(lsp::CompletionResponse::Array(vec![
7252 lsp::CompletionItem {
7253 label: "fullyQualifiedName?".into(),
7254 insert_text: Some("fullyQualifiedName".into()),
7255 ..Default::default()
7256 },
7257 ])))
7258 })
7259 .next()
7260 .await;
7261 let completions = completions.await.unwrap();
7262 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
7263 assert_eq!(completions.len(), 1);
7264 assert_eq!(completions[0].new_text, "fullyQualifiedName");
7265 assert_eq!(
7266 completions[0].old_range.to_offset(&snapshot),
7267 text.len() - 3..text.len()
7268 );
7269 }
7270
7271 #[gpui::test(iterations = 10)]
7272 async fn test_apply_code_actions_with_commands(cx: &mut gpui::TestAppContext) {
7273 let mut language = Language::new(
7274 LanguageConfig {
7275 name: "TypeScript".into(),
7276 path_suffixes: vec!["ts".to_string()],
7277 ..Default::default()
7278 },
7279 None,
7280 );
7281 let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default());
7282
7283 let fs = FakeFs::new(cx.background());
7284 fs.insert_tree(
7285 "/dir",
7286 json!({
7287 "a.ts": "a",
7288 }),
7289 )
7290 .await;
7291
7292 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
7293 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
7294 let buffer = project
7295 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
7296 .await
7297 .unwrap();
7298
7299 let fake_server = fake_language_servers.next().await.unwrap();
7300
7301 // Language server returns code actions that contain commands, and not edits.
7302 let actions = project.update(cx, |project, cx| project.code_actions(&buffer, 0..0, cx));
7303 fake_server
7304 .handle_request::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
7305 Ok(Some(vec![
7306 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
7307 title: "The code action".into(),
7308 command: Some(lsp::Command {
7309 title: "The command".into(),
7310 command: "_the/command".into(),
7311 arguments: Some(vec![json!("the-argument")]),
7312 }),
7313 ..Default::default()
7314 }),
7315 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
7316 title: "two".into(),
7317 ..Default::default()
7318 }),
7319 ]))
7320 })
7321 .next()
7322 .await;
7323
7324 let action = actions.await.unwrap()[0].clone();
7325 let apply = project.update(cx, |project, cx| {
7326 project.apply_code_action(buffer.clone(), action, true, cx)
7327 });
7328
7329 // Resolving the code action does not populate its edits. In absence of
7330 // edits, we must execute the given command.
7331 fake_server.handle_request::<lsp::request::CodeActionResolveRequest, _, _>(
7332 |action, _| async move { Ok(action) },
7333 );
7334
7335 // While executing the command, the language server sends the editor
7336 // a `workspaceEdit` request.
7337 fake_server
7338 .handle_request::<lsp::request::ExecuteCommand, _, _>({
7339 let fake = fake_server.clone();
7340 move |params, _| {
7341 assert_eq!(params.command, "_the/command");
7342 let fake = fake.clone();
7343 async move {
7344 fake.server
7345 .request::<lsp::request::ApplyWorkspaceEdit>(
7346 lsp::ApplyWorkspaceEditParams {
7347 label: None,
7348 edit: lsp::WorkspaceEdit {
7349 changes: Some(
7350 [(
7351 lsp::Url::from_file_path("/dir/a.ts").unwrap(),
7352 vec![lsp::TextEdit {
7353 range: lsp::Range::new(
7354 lsp::Position::new(0, 0),
7355 lsp::Position::new(0, 0),
7356 ),
7357 new_text: "X".into(),
7358 }],
7359 )]
7360 .into_iter()
7361 .collect(),
7362 ),
7363 ..Default::default()
7364 },
7365 },
7366 )
7367 .await
7368 .unwrap();
7369 Ok(Some(json!(null)))
7370 }
7371 }
7372 })
7373 .next()
7374 .await;
7375
7376 // Applying the code action returns a project transaction containing the edits
7377 // sent by the language server in its `workspaceEdit` request.
7378 let transaction = apply.await.unwrap();
7379 assert!(transaction.0.contains_key(&buffer));
7380 buffer.update(cx, |buffer, cx| {
7381 assert_eq!(buffer.text(), "Xa");
7382 buffer.undo(cx);
7383 assert_eq!(buffer.text(), "a");
7384 });
7385 }
7386
7387 #[gpui::test]
7388 async fn test_save_file(cx: &mut gpui::TestAppContext) {
7389 let fs = FakeFs::new(cx.background());
7390 fs.insert_tree(
7391 "/dir",
7392 json!({
7393 "file1": "the old contents",
7394 }),
7395 )
7396 .await;
7397
7398 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7399 let buffer = project
7400 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
7401 .await
7402 .unwrap();
7403 buffer
7404 .update(cx, |buffer, cx| {
7405 assert_eq!(buffer.text(), "the old contents");
7406 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], cx);
7407 buffer.save(cx)
7408 })
7409 .await
7410 .unwrap();
7411
7412 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
7413 assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
7414 }
7415
7416 #[gpui::test]
7417 async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
7418 let fs = FakeFs::new(cx.background());
7419 fs.insert_tree(
7420 "/dir",
7421 json!({
7422 "file1": "the old contents",
7423 }),
7424 )
7425 .await;
7426
7427 let project = Project::test(fs.clone(), ["/dir/file1".as_ref()], cx).await;
7428 let buffer = project
7429 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
7430 .await
7431 .unwrap();
7432 buffer
7433 .update(cx, |buffer, cx| {
7434 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], cx);
7435 buffer.save(cx)
7436 })
7437 .await
7438 .unwrap();
7439
7440 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
7441 assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
7442 }
7443
7444 #[gpui::test]
7445 async fn test_save_as(cx: &mut gpui::TestAppContext) {
7446 let fs = FakeFs::new(cx.background());
7447 fs.insert_tree("/dir", json!({})).await;
7448
7449 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7450 let buffer = project.update(cx, |project, cx| {
7451 project.create_buffer("", None, cx).unwrap()
7452 });
7453 buffer.update(cx, |buffer, cx| {
7454 buffer.edit([(0..0, "abc")], cx);
7455 assert!(buffer.is_dirty());
7456 assert!(!buffer.has_conflict());
7457 });
7458 project
7459 .update(cx, |project, cx| {
7460 project.save_buffer_as(buffer.clone(), "/dir/file1".into(), cx)
7461 })
7462 .await
7463 .unwrap();
7464 assert_eq!(fs.load(Path::new("/dir/file1")).await.unwrap(), "abc");
7465 buffer.read_with(cx, |buffer, cx| {
7466 assert_eq!(buffer.file().unwrap().full_path(cx), Path::new("dir/file1"));
7467 assert!(!buffer.is_dirty());
7468 assert!(!buffer.has_conflict());
7469 });
7470
7471 let opened_buffer = project
7472 .update(cx, |project, cx| {
7473 project.open_local_buffer("/dir/file1", cx)
7474 })
7475 .await
7476 .unwrap();
7477 assert_eq!(opened_buffer, buffer);
7478 }
7479
7480 #[gpui::test(retries = 5)]
7481 async fn test_rescan_and_remote_updates(cx: &mut gpui::TestAppContext) {
7482 let dir = temp_tree(json!({
7483 "a": {
7484 "file1": "",
7485 "file2": "",
7486 "file3": "",
7487 },
7488 "b": {
7489 "c": {
7490 "file4": "",
7491 "file5": "",
7492 }
7493 }
7494 }));
7495
7496 let project = Project::test(Arc::new(RealFs), [dir.path()], cx).await;
7497 let rpc = project.read_with(cx, |p, _| p.client.clone());
7498
7499 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
7500 let buffer = project.update(cx, |p, cx| p.open_local_buffer(dir.path().join(path), cx));
7501 async move { buffer.await.unwrap() }
7502 };
7503 let id_for_path = |path: &'static str, cx: &gpui::TestAppContext| {
7504 project.read_with(cx, |project, cx| {
7505 let tree = project.worktrees(cx).next().unwrap();
7506 tree.read(cx)
7507 .entry_for_path(path)
7508 .expect(&format!("no entry for path {}", path))
7509 .id
7510 })
7511 };
7512
7513 let buffer2 = buffer_for_path("a/file2", cx).await;
7514 let buffer3 = buffer_for_path("a/file3", cx).await;
7515 let buffer4 = buffer_for_path("b/c/file4", cx).await;
7516 let buffer5 = buffer_for_path("b/c/file5", cx).await;
7517
7518 let file2_id = id_for_path("a/file2", &cx);
7519 let file3_id = id_for_path("a/file3", &cx);
7520 let file4_id = id_for_path("b/c/file4", &cx);
7521
7522 // Create a remote copy of this worktree.
7523 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
7524 let initial_snapshot = tree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot());
7525 let (remote, load_task) = cx.update(|cx| {
7526 Worktree::remote(
7527 1,
7528 1,
7529 initial_snapshot.to_proto(&Default::default(), true),
7530 rpc.clone(),
7531 cx,
7532 )
7533 });
7534 // tree
7535 load_task.await;
7536
7537 cx.read(|cx| {
7538 assert!(!buffer2.read(cx).is_dirty());
7539 assert!(!buffer3.read(cx).is_dirty());
7540 assert!(!buffer4.read(cx).is_dirty());
7541 assert!(!buffer5.read(cx).is_dirty());
7542 });
7543
7544 // Rename and delete files and directories.
7545 tree.flush_fs_events(&cx).await;
7546 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
7547 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
7548 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
7549 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
7550 tree.flush_fs_events(&cx).await;
7551
7552 let expected_paths = vec![
7553 "a",
7554 "a/file1",
7555 "a/file2.new",
7556 "b",
7557 "d",
7558 "d/file3",
7559 "d/file4",
7560 ];
7561
7562 cx.read(|app| {
7563 assert_eq!(
7564 tree.read(app)
7565 .paths()
7566 .map(|p| p.to_str().unwrap())
7567 .collect::<Vec<_>>(),
7568 expected_paths
7569 );
7570
7571 assert_eq!(id_for_path("a/file2.new", &cx), file2_id);
7572 assert_eq!(id_for_path("d/file3", &cx), file3_id);
7573 assert_eq!(id_for_path("d/file4", &cx), file4_id);
7574
7575 assert_eq!(
7576 buffer2.read(app).file().unwrap().path().as_ref(),
7577 Path::new("a/file2.new")
7578 );
7579 assert_eq!(
7580 buffer3.read(app).file().unwrap().path().as_ref(),
7581 Path::new("d/file3")
7582 );
7583 assert_eq!(
7584 buffer4.read(app).file().unwrap().path().as_ref(),
7585 Path::new("d/file4")
7586 );
7587 assert_eq!(
7588 buffer5.read(app).file().unwrap().path().as_ref(),
7589 Path::new("b/c/file5")
7590 );
7591
7592 assert!(!buffer2.read(app).file().unwrap().is_deleted());
7593 assert!(!buffer3.read(app).file().unwrap().is_deleted());
7594 assert!(!buffer4.read(app).file().unwrap().is_deleted());
7595 assert!(buffer5.read(app).file().unwrap().is_deleted());
7596 });
7597
7598 // Update the remote worktree. Check that it becomes consistent with the
7599 // local worktree.
7600 remote.update(cx, |remote, cx| {
7601 let update_message = tree.read(cx).as_local().unwrap().snapshot().build_update(
7602 &initial_snapshot,
7603 1,
7604 1,
7605 true,
7606 );
7607 remote
7608 .as_remote_mut()
7609 .unwrap()
7610 .snapshot
7611 .apply_remote_update(update_message)
7612 .unwrap();
7613
7614 assert_eq!(
7615 remote
7616 .paths()
7617 .map(|p| p.to_str().unwrap())
7618 .collect::<Vec<_>>(),
7619 expected_paths
7620 );
7621 });
7622 }
7623
7624 #[gpui::test]
7625 async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
7626 let fs = FakeFs::new(cx.background());
7627 fs.insert_tree(
7628 "/dir",
7629 json!({
7630 "a.txt": "a-contents",
7631 "b.txt": "b-contents",
7632 }),
7633 )
7634 .await;
7635
7636 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7637
7638 // Spawn multiple tasks to open paths, repeating some paths.
7639 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
7640 (
7641 p.open_local_buffer("/dir/a.txt", cx),
7642 p.open_local_buffer("/dir/b.txt", cx),
7643 p.open_local_buffer("/dir/a.txt", cx),
7644 )
7645 });
7646
7647 let buffer_a_1 = buffer_a_1.await.unwrap();
7648 let buffer_a_2 = buffer_a_2.await.unwrap();
7649 let buffer_b = buffer_b.await.unwrap();
7650 assert_eq!(buffer_a_1.read_with(cx, |b, _| b.text()), "a-contents");
7651 assert_eq!(buffer_b.read_with(cx, |b, _| b.text()), "b-contents");
7652
7653 // There is only one buffer per path.
7654 let buffer_a_id = buffer_a_1.id();
7655 assert_eq!(buffer_a_2.id(), buffer_a_id);
7656
7657 // Open the same path again while it is still open.
7658 drop(buffer_a_1);
7659 let buffer_a_3 = project
7660 .update(cx, |p, cx| p.open_local_buffer("/dir/a.txt", cx))
7661 .await
7662 .unwrap();
7663
7664 // There's still only one buffer per path.
7665 assert_eq!(buffer_a_3.id(), buffer_a_id);
7666 }
7667
7668 #[gpui::test]
7669 async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
7670 let fs = FakeFs::new(cx.background());
7671 fs.insert_tree(
7672 "/dir",
7673 json!({
7674 "file1": "abc",
7675 "file2": "def",
7676 "file3": "ghi",
7677 }),
7678 )
7679 .await;
7680
7681 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7682
7683 let buffer1 = project
7684 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
7685 .await
7686 .unwrap();
7687 let events = Rc::new(RefCell::new(Vec::new()));
7688
7689 // initially, the buffer isn't dirty.
7690 buffer1.update(cx, |buffer, cx| {
7691 cx.subscribe(&buffer1, {
7692 let events = events.clone();
7693 move |_, _, event, _| match event {
7694 BufferEvent::Operation(_) => {}
7695 _ => events.borrow_mut().push(event.clone()),
7696 }
7697 })
7698 .detach();
7699
7700 assert!(!buffer.is_dirty());
7701 assert!(events.borrow().is_empty());
7702
7703 buffer.edit([(1..2, "")], cx);
7704 });
7705
7706 // after the first edit, the buffer is dirty, and emits a dirtied event.
7707 buffer1.update(cx, |buffer, cx| {
7708 assert!(buffer.text() == "ac");
7709 assert!(buffer.is_dirty());
7710 assert_eq!(
7711 *events.borrow(),
7712 &[language::Event::Edited, language::Event::Dirtied]
7713 );
7714 events.borrow_mut().clear();
7715 buffer.did_save(buffer.version(), buffer.file().unwrap().mtime(), None, cx);
7716 });
7717
7718 // after saving, the buffer is not dirty, and emits a saved event.
7719 buffer1.update(cx, |buffer, cx| {
7720 assert!(!buffer.is_dirty());
7721 assert_eq!(*events.borrow(), &[language::Event::Saved]);
7722 events.borrow_mut().clear();
7723
7724 buffer.edit([(1..1, "B")], cx);
7725 buffer.edit([(2..2, "D")], cx);
7726 });
7727
7728 // after editing again, the buffer is dirty, and emits another dirty event.
7729 buffer1.update(cx, |buffer, cx| {
7730 assert!(buffer.text() == "aBDc");
7731 assert!(buffer.is_dirty());
7732 assert_eq!(
7733 *events.borrow(),
7734 &[
7735 language::Event::Edited,
7736 language::Event::Dirtied,
7737 language::Event::Edited,
7738 ],
7739 );
7740 events.borrow_mut().clear();
7741
7742 // TODO - currently, after restoring the buffer to its
7743 // previously-saved state, the is still considered dirty.
7744 buffer.edit([(1..3, "")], cx);
7745 assert!(buffer.text() == "ac");
7746 assert!(buffer.is_dirty());
7747 });
7748
7749 assert_eq!(*events.borrow(), &[language::Event::Edited]);
7750
7751 // When a file is deleted, the buffer is considered dirty.
7752 let events = Rc::new(RefCell::new(Vec::new()));
7753 let buffer2 = project
7754 .update(cx, |p, cx| p.open_local_buffer("/dir/file2", cx))
7755 .await
7756 .unwrap();
7757 buffer2.update(cx, |_, cx| {
7758 cx.subscribe(&buffer2, {
7759 let events = events.clone();
7760 move |_, _, event, _| events.borrow_mut().push(event.clone())
7761 })
7762 .detach();
7763 });
7764
7765 fs.remove_file("/dir/file2".as_ref(), Default::default())
7766 .await
7767 .unwrap();
7768 buffer2.condition(&cx, |b, _| b.is_dirty()).await;
7769 assert_eq!(
7770 *events.borrow(),
7771 &[language::Event::Dirtied, language::Event::FileHandleChanged]
7772 );
7773
7774 // When a file is already dirty when deleted, we don't emit a Dirtied event.
7775 let events = Rc::new(RefCell::new(Vec::new()));
7776 let buffer3 = project
7777 .update(cx, |p, cx| p.open_local_buffer("/dir/file3", cx))
7778 .await
7779 .unwrap();
7780 buffer3.update(cx, |_, cx| {
7781 cx.subscribe(&buffer3, {
7782 let events = events.clone();
7783 move |_, _, event, _| events.borrow_mut().push(event.clone())
7784 })
7785 .detach();
7786 });
7787
7788 buffer3.update(cx, |buffer, cx| {
7789 buffer.edit([(0..0, "x")], cx);
7790 });
7791 events.borrow_mut().clear();
7792 fs.remove_file("/dir/file3".as_ref(), Default::default())
7793 .await
7794 .unwrap();
7795 buffer3
7796 .condition(&cx, |_, _| !events.borrow().is_empty())
7797 .await;
7798 assert_eq!(*events.borrow(), &[language::Event::FileHandleChanged]);
7799 cx.read(|cx| assert!(buffer3.read(cx).is_dirty()));
7800 }
7801
7802 #[gpui::test]
7803 async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
7804 let initial_contents = "aaa\nbbbbb\nc\n";
7805 let fs = FakeFs::new(cx.background());
7806 fs.insert_tree(
7807 "/dir",
7808 json!({
7809 "the-file": initial_contents,
7810 }),
7811 )
7812 .await;
7813 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7814 let buffer = project
7815 .update(cx, |p, cx| p.open_local_buffer("/dir/the-file", cx))
7816 .await
7817 .unwrap();
7818
7819 let anchors = (0..3)
7820 .map(|row| buffer.read_with(cx, |b, _| b.anchor_before(Point::new(row, 1))))
7821 .collect::<Vec<_>>();
7822
7823 // Change the file on disk, adding two new lines of text, and removing
7824 // one line.
7825 buffer.read_with(cx, |buffer, _| {
7826 assert!(!buffer.is_dirty());
7827 assert!(!buffer.has_conflict());
7828 });
7829 let new_contents = "AAAA\naaa\nBB\nbbbbb\n";
7830 fs.save("/dir/the-file".as_ref(), &new_contents.into())
7831 .await
7832 .unwrap();
7833
7834 // Because the buffer was not modified, it is reloaded from disk. Its
7835 // contents are edited according to the diff between the old and new
7836 // file contents.
7837 buffer
7838 .condition(&cx, |buffer, _| buffer.text() == new_contents)
7839 .await;
7840
7841 buffer.update(cx, |buffer, _| {
7842 assert_eq!(buffer.text(), new_contents);
7843 assert!(!buffer.is_dirty());
7844 assert!(!buffer.has_conflict());
7845
7846 let anchor_positions = anchors
7847 .iter()
7848 .map(|anchor| anchor.to_point(&*buffer))
7849 .collect::<Vec<_>>();
7850 assert_eq!(
7851 anchor_positions,
7852 [Point::new(1, 1), Point::new(3, 1), Point::new(4, 0)]
7853 );
7854 });
7855
7856 // Modify the buffer
7857 buffer.update(cx, |buffer, cx| {
7858 buffer.edit([(0..0, " ")], cx);
7859 assert!(buffer.is_dirty());
7860 assert!(!buffer.has_conflict());
7861 });
7862
7863 // Change the file on disk again, adding blank lines to the beginning.
7864 fs.save(
7865 "/dir/the-file".as_ref(),
7866 &"\n\n\nAAAA\naaa\nBB\nbbbbb\n".into(),
7867 )
7868 .await
7869 .unwrap();
7870
7871 // Because the buffer is modified, it doesn't reload from disk, but is
7872 // marked as having a conflict.
7873 buffer
7874 .condition(&cx, |buffer, _| buffer.has_conflict())
7875 .await;
7876 }
7877
7878 #[gpui::test]
7879 async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
7880 cx.foreground().forbid_parking();
7881
7882 let fs = FakeFs::new(cx.background());
7883 fs.insert_tree(
7884 "/the-dir",
7885 json!({
7886 "a.rs": "
7887 fn foo(mut v: Vec<usize>) {
7888 for x in &v {
7889 v.push(1);
7890 }
7891 }
7892 "
7893 .unindent(),
7894 }),
7895 )
7896 .await;
7897
7898 let project = Project::test(fs.clone(), ["/the-dir".as_ref()], cx).await;
7899 let buffer = project
7900 .update(cx, |p, cx| p.open_local_buffer("/the-dir/a.rs", cx))
7901 .await
7902 .unwrap();
7903
7904 let buffer_uri = Url::from_file_path("/the-dir/a.rs").unwrap();
7905 let message = lsp::PublishDiagnosticsParams {
7906 uri: buffer_uri.clone(),
7907 diagnostics: vec![
7908 lsp::Diagnostic {
7909 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
7910 severity: Some(DiagnosticSeverity::WARNING),
7911 message: "error 1".to_string(),
7912 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
7913 location: lsp::Location {
7914 uri: buffer_uri.clone(),
7915 range: lsp::Range::new(
7916 lsp::Position::new(1, 8),
7917 lsp::Position::new(1, 9),
7918 ),
7919 },
7920 message: "error 1 hint 1".to_string(),
7921 }]),
7922 ..Default::default()
7923 },
7924 lsp::Diagnostic {
7925 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
7926 severity: Some(DiagnosticSeverity::HINT),
7927 message: "error 1 hint 1".to_string(),
7928 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
7929 location: lsp::Location {
7930 uri: buffer_uri.clone(),
7931 range: lsp::Range::new(
7932 lsp::Position::new(1, 8),
7933 lsp::Position::new(1, 9),
7934 ),
7935 },
7936 message: "original diagnostic".to_string(),
7937 }]),
7938 ..Default::default()
7939 },
7940 lsp::Diagnostic {
7941 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
7942 severity: Some(DiagnosticSeverity::ERROR),
7943 message: "error 2".to_string(),
7944 related_information: Some(vec![
7945 lsp::DiagnosticRelatedInformation {
7946 location: lsp::Location {
7947 uri: buffer_uri.clone(),
7948 range: lsp::Range::new(
7949 lsp::Position::new(1, 13),
7950 lsp::Position::new(1, 15),
7951 ),
7952 },
7953 message: "error 2 hint 1".to_string(),
7954 },
7955 lsp::DiagnosticRelatedInformation {
7956 location: lsp::Location {
7957 uri: buffer_uri.clone(),
7958 range: lsp::Range::new(
7959 lsp::Position::new(1, 13),
7960 lsp::Position::new(1, 15),
7961 ),
7962 },
7963 message: "error 2 hint 2".to_string(),
7964 },
7965 ]),
7966 ..Default::default()
7967 },
7968 lsp::Diagnostic {
7969 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
7970 severity: Some(DiagnosticSeverity::HINT),
7971 message: "error 2 hint 1".to_string(),
7972 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
7973 location: lsp::Location {
7974 uri: buffer_uri.clone(),
7975 range: lsp::Range::new(
7976 lsp::Position::new(2, 8),
7977 lsp::Position::new(2, 17),
7978 ),
7979 },
7980 message: "original diagnostic".to_string(),
7981 }]),
7982 ..Default::default()
7983 },
7984 lsp::Diagnostic {
7985 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
7986 severity: Some(DiagnosticSeverity::HINT),
7987 message: "error 2 hint 2".to_string(),
7988 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
7989 location: lsp::Location {
7990 uri: buffer_uri.clone(),
7991 range: lsp::Range::new(
7992 lsp::Position::new(2, 8),
7993 lsp::Position::new(2, 17),
7994 ),
7995 },
7996 message: "original diagnostic".to_string(),
7997 }]),
7998 ..Default::default()
7999 },
8000 ],
8001 version: None,
8002 };
8003
8004 project
8005 .update(cx, |p, cx| p.update_diagnostics(message, &[], cx))
8006 .unwrap();
8007 let buffer = buffer.read_with(cx, |buffer, _| buffer.snapshot());
8008
8009 assert_eq!(
8010 buffer
8011 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
8012 .collect::<Vec<_>>(),
8013 &[
8014 DiagnosticEntry {
8015 range: Point::new(1, 8)..Point::new(1, 9),
8016 diagnostic: Diagnostic {
8017 severity: DiagnosticSeverity::WARNING,
8018 message: "error 1".to_string(),
8019 group_id: 0,
8020 is_primary: true,
8021 ..Default::default()
8022 }
8023 },
8024 DiagnosticEntry {
8025 range: Point::new(1, 8)..Point::new(1, 9),
8026 diagnostic: Diagnostic {
8027 severity: DiagnosticSeverity::HINT,
8028 message: "error 1 hint 1".to_string(),
8029 group_id: 0,
8030 is_primary: false,
8031 ..Default::default()
8032 }
8033 },
8034 DiagnosticEntry {
8035 range: Point::new(1, 13)..Point::new(1, 15),
8036 diagnostic: Diagnostic {
8037 severity: DiagnosticSeverity::HINT,
8038 message: "error 2 hint 1".to_string(),
8039 group_id: 1,
8040 is_primary: false,
8041 ..Default::default()
8042 }
8043 },
8044 DiagnosticEntry {
8045 range: Point::new(1, 13)..Point::new(1, 15),
8046 diagnostic: Diagnostic {
8047 severity: DiagnosticSeverity::HINT,
8048 message: "error 2 hint 2".to_string(),
8049 group_id: 1,
8050 is_primary: false,
8051 ..Default::default()
8052 }
8053 },
8054 DiagnosticEntry {
8055 range: Point::new(2, 8)..Point::new(2, 17),
8056 diagnostic: Diagnostic {
8057 severity: DiagnosticSeverity::ERROR,
8058 message: "error 2".to_string(),
8059 group_id: 1,
8060 is_primary: true,
8061 ..Default::default()
8062 }
8063 }
8064 ]
8065 );
8066
8067 assert_eq!(
8068 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
8069 &[
8070 DiagnosticEntry {
8071 range: Point::new(1, 8)..Point::new(1, 9),
8072 diagnostic: Diagnostic {
8073 severity: DiagnosticSeverity::WARNING,
8074 message: "error 1".to_string(),
8075 group_id: 0,
8076 is_primary: true,
8077 ..Default::default()
8078 }
8079 },
8080 DiagnosticEntry {
8081 range: Point::new(1, 8)..Point::new(1, 9),
8082 diagnostic: Diagnostic {
8083 severity: DiagnosticSeverity::HINT,
8084 message: "error 1 hint 1".to_string(),
8085 group_id: 0,
8086 is_primary: false,
8087 ..Default::default()
8088 }
8089 },
8090 ]
8091 );
8092 assert_eq!(
8093 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
8094 &[
8095 DiagnosticEntry {
8096 range: Point::new(1, 13)..Point::new(1, 15),
8097 diagnostic: Diagnostic {
8098 severity: DiagnosticSeverity::HINT,
8099 message: "error 2 hint 1".to_string(),
8100 group_id: 1,
8101 is_primary: false,
8102 ..Default::default()
8103 }
8104 },
8105 DiagnosticEntry {
8106 range: Point::new(1, 13)..Point::new(1, 15),
8107 diagnostic: Diagnostic {
8108 severity: DiagnosticSeverity::HINT,
8109 message: "error 2 hint 2".to_string(),
8110 group_id: 1,
8111 is_primary: false,
8112 ..Default::default()
8113 }
8114 },
8115 DiagnosticEntry {
8116 range: Point::new(2, 8)..Point::new(2, 17),
8117 diagnostic: Diagnostic {
8118 severity: DiagnosticSeverity::ERROR,
8119 message: "error 2".to_string(),
8120 group_id: 1,
8121 is_primary: true,
8122 ..Default::default()
8123 }
8124 }
8125 ]
8126 );
8127 }
8128
8129 #[gpui::test]
8130 async fn test_rename(cx: &mut gpui::TestAppContext) {
8131 cx.foreground().forbid_parking();
8132
8133 let mut language = Language::new(
8134 LanguageConfig {
8135 name: "Rust".into(),
8136 path_suffixes: vec!["rs".to_string()],
8137 ..Default::default()
8138 },
8139 Some(tree_sitter_rust::language()),
8140 );
8141 let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
8142 capabilities: lsp::ServerCapabilities {
8143 rename_provider: Some(lsp::OneOf::Right(lsp::RenameOptions {
8144 prepare_provider: Some(true),
8145 work_done_progress_options: Default::default(),
8146 })),
8147 ..Default::default()
8148 },
8149 ..Default::default()
8150 });
8151
8152 let fs = FakeFs::new(cx.background());
8153 fs.insert_tree(
8154 "/dir",
8155 json!({
8156 "one.rs": "const ONE: usize = 1;",
8157 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
8158 }),
8159 )
8160 .await;
8161
8162 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
8163 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
8164 let buffer = project
8165 .update(cx, |project, cx| {
8166 project.open_local_buffer("/dir/one.rs", cx)
8167 })
8168 .await
8169 .unwrap();
8170
8171 let fake_server = fake_servers.next().await.unwrap();
8172
8173 let response = project.update(cx, |project, cx| {
8174 project.prepare_rename(buffer.clone(), 7, cx)
8175 });
8176 fake_server
8177 .handle_request::<lsp::request::PrepareRenameRequest, _, _>(|params, _| async move {
8178 assert_eq!(params.text_document.uri.as_str(), "file:///dir/one.rs");
8179 assert_eq!(params.position, lsp::Position::new(0, 7));
8180 Ok(Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
8181 lsp::Position::new(0, 6),
8182 lsp::Position::new(0, 9),
8183 ))))
8184 })
8185 .next()
8186 .await
8187 .unwrap();
8188 let range = response.await.unwrap().unwrap();
8189 let range = buffer.read_with(cx, |buffer, _| range.to_offset(buffer));
8190 assert_eq!(range, 6..9);
8191
8192 let response = project.update(cx, |project, cx| {
8193 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), true, cx)
8194 });
8195 fake_server
8196 .handle_request::<lsp::request::Rename, _, _>(|params, _| async move {
8197 assert_eq!(
8198 params.text_document_position.text_document.uri.as_str(),
8199 "file:///dir/one.rs"
8200 );
8201 assert_eq!(
8202 params.text_document_position.position,
8203 lsp::Position::new(0, 7)
8204 );
8205 assert_eq!(params.new_name, "THREE");
8206 Ok(Some(lsp::WorkspaceEdit {
8207 changes: Some(
8208 [
8209 (
8210 lsp::Url::from_file_path("/dir/one.rs").unwrap(),
8211 vec![lsp::TextEdit::new(
8212 lsp::Range::new(
8213 lsp::Position::new(0, 6),
8214 lsp::Position::new(0, 9),
8215 ),
8216 "THREE".to_string(),
8217 )],
8218 ),
8219 (
8220 lsp::Url::from_file_path("/dir/two.rs").unwrap(),
8221 vec![
8222 lsp::TextEdit::new(
8223 lsp::Range::new(
8224 lsp::Position::new(0, 24),
8225 lsp::Position::new(0, 27),
8226 ),
8227 "THREE".to_string(),
8228 ),
8229 lsp::TextEdit::new(
8230 lsp::Range::new(
8231 lsp::Position::new(0, 35),
8232 lsp::Position::new(0, 38),
8233 ),
8234 "THREE".to_string(),
8235 ),
8236 ],
8237 ),
8238 ]
8239 .into_iter()
8240 .collect(),
8241 ),
8242 ..Default::default()
8243 }))
8244 })
8245 .next()
8246 .await
8247 .unwrap();
8248 let mut transaction = response.await.unwrap().0;
8249 assert_eq!(transaction.len(), 2);
8250 assert_eq!(
8251 transaction
8252 .remove_entry(&buffer)
8253 .unwrap()
8254 .0
8255 .read_with(cx, |buffer, _| buffer.text()),
8256 "const THREE: usize = 1;"
8257 );
8258 assert_eq!(
8259 transaction
8260 .into_keys()
8261 .next()
8262 .unwrap()
8263 .read_with(cx, |buffer, _| buffer.text()),
8264 "const TWO: usize = one::THREE + one::THREE;"
8265 );
8266 }
8267
8268 #[gpui::test]
8269 async fn test_search(cx: &mut gpui::TestAppContext) {
8270 let fs = FakeFs::new(cx.background());
8271 fs.insert_tree(
8272 "/dir",
8273 json!({
8274 "one.rs": "const ONE: usize = 1;",
8275 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
8276 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
8277 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
8278 }),
8279 )
8280 .await;
8281 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
8282 assert_eq!(
8283 search(&project, SearchQuery::text("TWO", false, true), cx)
8284 .await
8285 .unwrap(),
8286 HashMap::from_iter([
8287 ("two.rs".to_string(), vec![6..9]),
8288 ("three.rs".to_string(), vec![37..40])
8289 ])
8290 );
8291
8292 let buffer_4 = project
8293 .update(cx, |project, cx| {
8294 project.open_local_buffer("/dir/four.rs", cx)
8295 })
8296 .await
8297 .unwrap();
8298 buffer_4.update(cx, |buffer, cx| {
8299 let text = "two::TWO";
8300 buffer.edit([(20..28, text), (31..43, text)], cx);
8301 });
8302
8303 assert_eq!(
8304 search(&project, SearchQuery::text("TWO", false, true), cx)
8305 .await
8306 .unwrap(),
8307 HashMap::from_iter([
8308 ("two.rs".to_string(), vec![6..9]),
8309 ("three.rs".to_string(), vec![37..40]),
8310 ("four.rs".to_string(), vec![25..28, 36..39])
8311 ])
8312 );
8313
8314 async fn search(
8315 project: &ModelHandle<Project>,
8316 query: SearchQuery,
8317 cx: &mut gpui::TestAppContext,
8318 ) -> Result<HashMap<String, Vec<Range<usize>>>> {
8319 let results = project
8320 .update(cx, |project, cx| project.search(query, cx))
8321 .await?;
8322
8323 Ok(results
8324 .into_iter()
8325 .map(|(buffer, ranges)| {
8326 buffer.read_with(cx, |buffer, _| {
8327 let path = buffer.file().unwrap().path().to_string_lossy().to_string();
8328 let ranges = ranges
8329 .into_iter()
8330 .map(|range| range.to_offset(buffer))
8331 .collect::<Vec<_>>();
8332 (path, ranges)
8333 })
8334 })
8335 .collect())
8336 }
8337 }
8338}