1mod db;
2pub mod fs;
3mod ignore;
4mod lsp_command;
5pub mod search;
6pub mod worktree;
7
8use anyhow::{anyhow, Context, Result};
9use client::{proto, Client, PeerId, TypedEnvelope, User, UserStore};
10use clock::ReplicaId;
11use collections::{hash_map, BTreeMap, HashMap, HashSet};
12use futures::{future::Shared, Future, FutureExt, StreamExt, TryFutureExt};
13use fuzzy::{PathMatch, PathMatchCandidate, PathMatchCandidateSet};
14use gpui::{
15 AnyModelHandle, AppContext, AsyncAppContext, Entity, ModelContext, ModelHandle,
16 MutableAppContext, Task, UpgradeModelHandle, WeakModelHandle,
17};
18use language::{
19 point_to_lsp,
20 proto::{deserialize_anchor, deserialize_version, serialize_anchor, serialize_version},
21 range_from_lsp, range_to_lsp, Anchor, Bias, Buffer, CodeAction, CodeLabel, Completion,
22 Diagnostic, DiagnosticEntry, DiagnosticSet, Event as BufferEvent, File as _, Language,
23 LanguageRegistry, LanguageServerName, LocalFile, LspAdapter, OffsetRangeExt, Operation, Patch,
24 PointUtf16, TextBufferSnapshot, ToOffset, ToPointUtf16, Transaction,
25};
26use lsp::{
27 DiagnosticSeverity, DiagnosticTag, DocumentHighlightKind, LanguageServer, LanguageString,
28 MarkedString,
29};
30use lsp_command::*;
31use parking_lot::Mutex;
32use postage::stream::Stream;
33use postage::watch;
34use rand::prelude::*;
35use search::SearchQuery;
36use serde::Serialize;
37use settings::Settings;
38use sha2::{Digest, Sha256};
39use similar::{ChangeTag, TextDiff};
40use std::{
41 cell::RefCell,
42 cmp::{self, Ordering},
43 convert::TryInto,
44 ffi::OsString,
45 hash::Hash,
46 mem,
47 ops::Range,
48 os::unix::{ffi::OsStrExt, prelude::OsStringExt},
49 path::{Component, Path, PathBuf},
50 rc::Rc,
51 sync::{
52 atomic::{AtomicBool, AtomicUsize, Ordering::SeqCst},
53 Arc,
54 },
55 time::Instant,
56};
57use thiserror::Error;
58use util::{post_inc, ResultExt, TryFutureExt as _};
59
60pub use db::Db;
61pub use fs::*;
62pub use worktree::*;
63
64pub trait Item: Entity {
65 fn entry_id(&self, cx: &AppContext) -> Option<ProjectEntryId>;
66}
67
68pub struct ProjectStore {
69 db: Arc<Db>,
70 projects: Vec<WeakModelHandle<Project>>,
71}
72
73pub struct Project {
74 worktrees: Vec<WorktreeHandle>,
75 active_entry: Option<ProjectEntryId>,
76 languages: Arc<LanguageRegistry>,
77 language_servers:
78 HashMap<(WorktreeId, LanguageServerName), (Arc<dyn LspAdapter>, Arc<LanguageServer>)>,
79 started_language_servers:
80 HashMap<(WorktreeId, LanguageServerName), Task<Option<Arc<LanguageServer>>>>,
81 language_server_statuses: BTreeMap<usize, LanguageServerStatus>,
82 language_server_settings: Arc<Mutex<serde_json::Value>>,
83 last_workspace_edits_by_language_server: HashMap<usize, ProjectTransaction>,
84 next_language_server_id: usize,
85 client: Arc<client::Client>,
86 next_entry_id: Arc<AtomicUsize>,
87 next_diagnostic_group_id: usize,
88 user_store: ModelHandle<UserStore>,
89 project_store: ModelHandle<ProjectStore>,
90 fs: Arc<dyn Fs>,
91 client_state: ProjectClientState,
92 collaborators: HashMap<PeerId, Collaborator>,
93 subscriptions: Vec<client::Subscription>,
94 opened_buffer: (Rc<RefCell<watch::Sender<()>>>, watch::Receiver<()>),
95 shared_buffers: HashMap<PeerId, HashSet<u64>>,
96 loading_buffers: HashMap<
97 ProjectPath,
98 postage::watch::Receiver<Option<Result<ModelHandle<Buffer>, Arc<anyhow::Error>>>>,
99 >,
100 loading_local_worktrees:
101 HashMap<Arc<Path>, Shared<Task<Result<ModelHandle<Worktree>, Arc<anyhow::Error>>>>>,
102 opened_buffers: HashMap<u64, OpenBuffer>,
103 buffer_snapshots: HashMap<u64, Vec<(i32, TextBufferSnapshot)>>,
104 nonce: u128,
105 initialized_persistent_state: bool,
106}
107
108#[derive(Error, Debug)]
109pub enum JoinProjectError {
110 #[error("host declined join request")]
111 HostDeclined,
112 #[error("host closed the project")]
113 HostClosedProject,
114 #[error("host went offline")]
115 HostWentOffline,
116 #[error("{0}")]
117 Other(#[from] anyhow::Error),
118}
119
120enum OpenBuffer {
121 Strong(ModelHandle<Buffer>),
122 Weak(WeakModelHandle<Buffer>),
123 Loading(Vec<Operation>),
124}
125
126enum WorktreeHandle {
127 Strong(ModelHandle<Worktree>),
128 Weak(WeakModelHandle<Worktree>),
129}
130
131enum ProjectClientState {
132 Local {
133 is_shared: bool,
134 remote_id_tx: watch::Sender<Option<u64>>,
135 remote_id_rx: watch::Receiver<Option<u64>>,
136 online_tx: watch::Sender<bool>,
137 online_rx: watch::Receiver<bool>,
138 _maintain_remote_id_task: Task<Option<()>>,
139 },
140 Remote {
141 sharing_has_stopped: bool,
142 remote_id: u64,
143 replica_id: ReplicaId,
144 _detect_unshare_task: Task<Option<()>>,
145 },
146}
147
148#[derive(Clone, Debug)]
149pub struct Collaborator {
150 pub user: Arc<User>,
151 pub peer_id: PeerId,
152 pub replica_id: ReplicaId,
153}
154
155#[derive(Clone, Debug, PartialEq, Eq)]
156pub enum Event {
157 ActiveEntryChanged(Option<ProjectEntryId>),
158 WorktreeAdded,
159 WorktreeRemoved(WorktreeId),
160 DiskBasedDiagnosticsStarted {
161 language_server_id: usize,
162 },
163 DiskBasedDiagnosticsFinished {
164 language_server_id: usize,
165 },
166 DiagnosticsUpdated {
167 path: ProjectPath,
168 language_server_id: usize,
169 },
170 RemoteIdChanged(Option<u64>),
171 CollaboratorLeft(PeerId),
172 ContactRequestedJoin(Arc<User>),
173 ContactCancelledJoinRequest(Arc<User>),
174}
175
176#[derive(Serialize)]
177pub struct LanguageServerStatus {
178 pub name: String,
179 pub pending_work: BTreeMap<String, LanguageServerProgress>,
180 pub pending_diagnostic_updates: isize,
181}
182
183#[derive(Clone, Debug, Serialize)]
184pub struct LanguageServerProgress {
185 pub message: Option<String>,
186 pub percentage: Option<usize>,
187 #[serde(skip_serializing)]
188 pub last_update_at: Instant,
189}
190
191#[derive(Clone, Debug, Eq, PartialEq, Hash, PartialOrd, Ord)]
192pub struct ProjectPath {
193 pub worktree_id: WorktreeId,
194 pub path: Arc<Path>,
195}
196
197#[derive(Copy, Clone, Debug, Default, PartialEq, Serialize)]
198pub struct DiagnosticSummary {
199 pub language_server_id: usize,
200 pub error_count: usize,
201 pub warning_count: usize,
202}
203
204#[derive(Debug)]
205pub struct Location {
206 pub buffer: ModelHandle<Buffer>,
207 pub range: Range<language::Anchor>,
208}
209
210#[derive(Debug)]
211pub struct DocumentHighlight {
212 pub range: Range<language::Anchor>,
213 pub kind: DocumentHighlightKind,
214}
215
216#[derive(Clone, Debug)]
217pub struct Symbol {
218 pub source_worktree_id: WorktreeId,
219 pub worktree_id: WorktreeId,
220 pub language_server_name: LanguageServerName,
221 pub path: PathBuf,
222 pub label: CodeLabel,
223 pub name: String,
224 pub kind: lsp::SymbolKind,
225 pub range: Range<PointUtf16>,
226 pub signature: [u8; 32],
227}
228
229#[derive(Clone, Debug, PartialEq)]
230pub struct HoverBlock {
231 pub text: String,
232 pub language: Option<String>,
233}
234
235impl HoverBlock {
236 fn try_new(marked_string: MarkedString) -> Option<Self> {
237 let result = match marked_string {
238 MarkedString::LanguageString(LanguageString { language, value }) => HoverBlock {
239 text: value,
240 language: Some(language),
241 },
242 MarkedString::String(text) => HoverBlock {
243 text,
244 language: None,
245 },
246 };
247 if result.text.is_empty() {
248 None
249 } else {
250 Some(result)
251 }
252 }
253}
254
255#[derive(Debug)]
256pub struct Hover {
257 pub contents: Vec<HoverBlock>,
258 pub range: Option<Range<language::Anchor>>,
259}
260
261#[derive(Default)]
262pub struct ProjectTransaction(pub HashMap<ModelHandle<Buffer>, language::Transaction>);
263
264impl DiagnosticSummary {
265 fn new<'a, T: 'a>(
266 language_server_id: usize,
267 diagnostics: impl IntoIterator<Item = &'a DiagnosticEntry<T>>,
268 ) -> Self {
269 let mut this = Self {
270 language_server_id,
271 error_count: 0,
272 warning_count: 0,
273 };
274
275 for entry in diagnostics {
276 if entry.diagnostic.is_primary {
277 match entry.diagnostic.severity {
278 DiagnosticSeverity::ERROR => this.error_count += 1,
279 DiagnosticSeverity::WARNING => this.warning_count += 1,
280 _ => {}
281 }
282 }
283 }
284
285 this
286 }
287
288 pub fn is_empty(&self) -> bool {
289 self.error_count == 0 && self.warning_count == 0
290 }
291
292 pub fn to_proto(&self, path: &Path) -> proto::DiagnosticSummary {
293 proto::DiagnosticSummary {
294 path: path.to_string_lossy().to_string(),
295 language_server_id: self.language_server_id as u64,
296 error_count: self.error_count as u32,
297 warning_count: self.warning_count as u32,
298 }
299 }
300}
301
302#[derive(Clone, Copy, Debug, Default, Hash, PartialEq, Eq, PartialOrd, Ord)]
303pub struct ProjectEntryId(usize);
304
305impl ProjectEntryId {
306 pub const MAX: Self = Self(usize::MAX);
307
308 pub fn new(counter: &AtomicUsize) -> Self {
309 Self(counter.fetch_add(1, SeqCst))
310 }
311
312 pub fn from_proto(id: u64) -> Self {
313 Self(id as usize)
314 }
315
316 pub fn to_proto(&self) -> u64 {
317 self.0 as u64
318 }
319
320 pub fn to_usize(&self) -> usize {
321 self.0
322 }
323}
324
325impl Project {
326 pub fn init(client: &Arc<Client>) {
327 client.add_model_message_handler(Self::handle_request_join_project);
328 client.add_model_message_handler(Self::handle_add_collaborator);
329 client.add_model_message_handler(Self::handle_buffer_reloaded);
330 client.add_model_message_handler(Self::handle_buffer_saved);
331 client.add_model_message_handler(Self::handle_start_language_server);
332 client.add_model_message_handler(Self::handle_update_language_server);
333 client.add_model_message_handler(Self::handle_remove_collaborator);
334 client.add_model_message_handler(Self::handle_join_project_request_cancelled);
335 client.add_model_message_handler(Self::handle_update_project);
336 client.add_model_message_handler(Self::handle_unregister_project);
337 client.add_model_message_handler(Self::handle_project_unshared);
338 client.add_model_message_handler(Self::handle_update_buffer_file);
339 client.add_model_message_handler(Self::handle_update_buffer);
340 client.add_model_message_handler(Self::handle_update_diagnostic_summary);
341 client.add_model_message_handler(Self::handle_update_worktree);
342 client.add_model_request_handler(Self::handle_create_project_entry);
343 client.add_model_request_handler(Self::handle_rename_project_entry);
344 client.add_model_request_handler(Self::handle_copy_project_entry);
345 client.add_model_request_handler(Self::handle_delete_project_entry);
346 client.add_model_request_handler(Self::handle_apply_additional_edits_for_completion);
347 client.add_model_request_handler(Self::handle_apply_code_action);
348 client.add_model_request_handler(Self::handle_reload_buffers);
349 client.add_model_request_handler(Self::handle_format_buffers);
350 client.add_model_request_handler(Self::handle_get_code_actions);
351 client.add_model_request_handler(Self::handle_get_completions);
352 client.add_model_request_handler(Self::handle_lsp_command::<GetHover>);
353 client.add_model_request_handler(Self::handle_lsp_command::<GetDefinition>);
354 client.add_model_request_handler(Self::handle_lsp_command::<GetDocumentHighlights>);
355 client.add_model_request_handler(Self::handle_lsp_command::<GetReferences>);
356 client.add_model_request_handler(Self::handle_lsp_command::<PrepareRename>);
357 client.add_model_request_handler(Self::handle_lsp_command::<PerformRename>);
358 client.add_model_request_handler(Self::handle_search_project);
359 client.add_model_request_handler(Self::handle_get_project_symbols);
360 client.add_model_request_handler(Self::handle_open_buffer_for_symbol);
361 client.add_model_request_handler(Self::handle_open_buffer_by_id);
362 client.add_model_request_handler(Self::handle_open_buffer_by_path);
363 client.add_model_request_handler(Self::handle_save_buffer);
364 }
365
366 pub fn local(
367 online: bool,
368 client: Arc<Client>,
369 user_store: ModelHandle<UserStore>,
370 project_store: ModelHandle<ProjectStore>,
371 languages: Arc<LanguageRegistry>,
372 fs: Arc<dyn Fs>,
373 cx: &mut MutableAppContext,
374 ) -> ModelHandle<Self> {
375 cx.add_model(|cx: &mut ModelContext<Self>| {
376 let (online_tx, online_rx) = watch::channel_with(online);
377 let (remote_id_tx, remote_id_rx) = watch::channel();
378 let _maintain_remote_id_task = cx.spawn_weak({
379 let status_rx = client.clone().status();
380 let online_rx = online_rx.clone();
381 move |this, mut cx| async move {
382 let mut stream = Stream::map(status_rx.clone(), drop)
383 .merge(Stream::map(online_rx.clone(), drop));
384 while stream.recv().await.is_some() {
385 let this = this.upgrade(&cx)?;
386 if status_rx.borrow().is_connected() && *online_rx.borrow() {
387 this.update(&mut cx, |this, cx| this.register(cx))
388 .await
389 .log_err()?;
390 } else {
391 this.update(&mut cx, |this, cx| this.unregister(cx))
392 .await
393 .log_err();
394 }
395 }
396 None
397 }
398 });
399
400 let handle = cx.weak_handle();
401 project_store.update(cx, |store, cx| store.add_project(handle, cx));
402
403 let (opened_buffer_tx, opened_buffer_rx) = watch::channel();
404 Self {
405 worktrees: Default::default(),
406 collaborators: Default::default(),
407 opened_buffers: Default::default(),
408 shared_buffers: Default::default(),
409 loading_buffers: Default::default(),
410 loading_local_worktrees: Default::default(),
411 buffer_snapshots: Default::default(),
412 client_state: ProjectClientState::Local {
413 is_shared: false,
414 remote_id_tx,
415 remote_id_rx,
416 online_tx,
417 online_rx,
418 _maintain_remote_id_task,
419 },
420 opened_buffer: (Rc::new(RefCell::new(opened_buffer_tx)), opened_buffer_rx),
421 subscriptions: Vec::new(),
422 active_entry: None,
423 languages,
424 client,
425 user_store,
426 project_store,
427 fs,
428 next_entry_id: Default::default(),
429 next_diagnostic_group_id: Default::default(),
430 language_servers: Default::default(),
431 started_language_servers: Default::default(),
432 language_server_statuses: Default::default(),
433 last_workspace_edits_by_language_server: Default::default(),
434 language_server_settings: Default::default(),
435 next_language_server_id: 0,
436 nonce: StdRng::from_entropy().gen(),
437 initialized_persistent_state: false,
438 }
439 })
440 }
441
442 pub async fn remote(
443 remote_id: u64,
444 client: Arc<Client>,
445 user_store: ModelHandle<UserStore>,
446 project_store: ModelHandle<ProjectStore>,
447 languages: Arc<LanguageRegistry>,
448 fs: Arc<dyn Fs>,
449 mut cx: AsyncAppContext,
450 ) -> Result<ModelHandle<Self>, JoinProjectError> {
451 client.authenticate_and_connect(true, &cx).await?;
452
453 let response = client
454 .request(proto::JoinProject {
455 project_id: remote_id,
456 })
457 .await?;
458
459 let response = match response.variant.ok_or_else(|| anyhow!("missing variant"))? {
460 proto::join_project_response::Variant::Accept(response) => response,
461 proto::join_project_response::Variant::Decline(decline) => {
462 match proto::join_project_response::decline::Reason::from_i32(decline.reason) {
463 Some(proto::join_project_response::decline::Reason::Declined) => {
464 Err(JoinProjectError::HostDeclined)?
465 }
466 Some(proto::join_project_response::decline::Reason::Closed) => {
467 Err(JoinProjectError::HostClosedProject)?
468 }
469 Some(proto::join_project_response::decline::Reason::WentOffline) => {
470 Err(JoinProjectError::HostWentOffline)?
471 }
472 None => Err(anyhow!("missing decline reason"))?,
473 }
474 }
475 };
476
477 let replica_id = response.replica_id as ReplicaId;
478
479 let mut worktrees = Vec::new();
480 for worktree in response.worktrees {
481 let (worktree, load_task) = cx
482 .update(|cx| Worktree::remote(remote_id, replica_id, worktree, client.clone(), cx));
483 worktrees.push(worktree);
484 load_task.detach();
485 }
486
487 let (opened_buffer_tx, opened_buffer_rx) = watch::channel();
488 let this = cx.add_model(|cx: &mut ModelContext<Self>| {
489 let handle = cx.weak_handle();
490 project_store.update(cx, |store, cx| store.add_project(handle, cx));
491
492 let mut this = Self {
493 worktrees: Vec::new(),
494 loading_buffers: Default::default(),
495 opened_buffer: (Rc::new(RefCell::new(opened_buffer_tx)), opened_buffer_rx),
496 shared_buffers: Default::default(),
497 loading_local_worktrees: Default::default(),
498 active_entry: None,
499 collaborators: Default::default(),
500 languages,
501 user_store: user_store.clone(),
502 project_store,
503 fs,
504 next_entry_id: Default::default(),
505 next_diagnostic_group_id: Default::default(),
506 subscriptions: vec![client.add_model_for_remote_entity(remote_id, cx)],
507 client: client.clone(),
508 client_state: ProjectClientState::Remote {
509 sharing_has_stopped: false,
510 remote_id,
511 replica_id,
512 _detect_unshare_task: cx.spawn_weak(move |this, mut cx| {
513 async move {
514 let mut status = client.status();
515 let is_connected =
516 status.next().await.map_or(false, |s| s.is_connected());
517 // Even if we're initially connected, any future change of the status means we momentarily disconnected.
518 if !is_connected || status.next().await.is_some() {
519 if let Some(this) = this.upgrade(&cx) {
520 this.update(&mut cx, |this, cx| this.removed_from_project(cx))
521 }
522 }
523 Ok(())
524 }
525 .log_err()
526 }),
527 },
528 language_servers: Default::default(),
529 started_language_servers: Default::default(),
530 language_server_settings: Default::default(),
531 language_server_statuses: response
532 .language_servers
533 .into_iter()
534 .map(|server| {
535 (
536 server.id as usize,
537 LanguageServerStatus {
538 name: server.name,
539 pending_work: Default::default(),
540 pending_diagnostic_updates: 0,
541 },
542 )
543 })
544 .collect(),
545 last_workspace_edits_by_language_server: Default::default(),
546 next_language_server_id: 0,
547 opened_buffers: Default::default(),
548 buffer_snapshots: Default::default(),
549 nonce: StdRng::from_entropy().gen(),
550 initialized_persistent_state: false,
551 };
552 for worktree in worktrees {
553 this.add_worktree(&worktree, cx);
554 }
555 this
556 });
557
558 let user_ids = response
559 .collaborators
560 .iter()
561 .map(|peer| peer.user_id)
562 .collect();
563 user_store
564 .update(&mut cx, |user_store, cx| user_store.get_users(user_ids, cx))
565 .await?;
566 let mut collaborators = HashMap::default();
567 for message in response.collaborators {
568 let collaborator = Collaborator::from_proto(message, &user_store, &mut cx).await?;
569 collaborators.insert(collaborator.peer_id, collaborator);
570 }
571
572 this.update(&mut cx, |this, _| {
573 this.collaborators = collaborators;
574 });
575
576 Ok(this)
577 }
578
579 #[cfg(any(test, feature = "test-support"))]
580 pub async fn test(
581 fs: Arc<dyn Fs>,
582 root_paths: impl IntoIterator<Item = &Path>,
583 cx: &mut gpui::TestAppContext,
584 ) -> ModelHandle<Project> {
585 let languages = Arc::new(LanguageRegistry::test());
586 let http_client = client::test::FakeHttpClient::with_404_response();
587 let client = client::Client::new(http_client.clone());
588 let user_store = cx.add_model(|cx| UserStore::new(client.clone(), http_client, cx));
589 let project_store = cx.add_model(|_| ProjectStore::new(Db::open_fake()));
590 let project = cx.update(|cx| {
591 Project::local(true, client, user_store, project_store, languages, fs, cx)
592 });
593 for path in root_paths {
594 let (tree, _) = project
595 .update(cx, |project, cx| {
596 project.find_or_create_local_worktree(path, true, cx)
597 })
598 .await
599 .unwrap();
600 tree.read_with(cx, |tree, _| tree.as_local().unwrap().scan_complete())
601 .await;
602 }
603 project
604 }
605
606 pub fn restore_state(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
607 if self.is_remote() {
608 return Task::ready(Ok(()));
609 }
610
611 let db = self.project_store.read(cx).db.clone();
612 let keys = self.db_keys_for_online_state(cx);
613 let online_by_default = cx.global::<Settings>().projects_online_by_default;
614 let read_online = cx.background().spawn(async move {
615 let values = db.read(keys)?;
616 anyhow::Ok(
617 values
618 .into_iter()
619 .all(|e| e.map_or(online_by_default, |e| e == [true as u8])),
620 )
621 });
622 cx.spawn(|this, mut cx| async move {
623 let online = read_online.await.log_err().unwrap_or(false);
624 this.update(&mut cx, |this, cx| {
625 this.initialized_persistent_state = true;
626 if let ProjectClientState::Local { online_tx, .. } = &mut this.client_state {
627 let mut online_tx = online_tx.borrow_mut();
628 if *online_tx != online {
629 *online_tx = online;
630 drop(online_tx);
631 this.metadata_changed(false, cx);
632 }
633 }
634 });
635 Ok(())
636 })
637 }
638
639 fn persist_state(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
640 if self.is_remote() || !self.initialized_persistent_state {
641 return Task::ready(Ok(()));
642 }
643
644 let db = self.project_store.read(cx).db.clone();
645 let keys = self.db_keys_for_online_state(cx);
646 let is_online = self.is_online();
647 cx.background().spawn(async move {
648 let value = &[is_online as u8];
649 db.write(keys.into_iter().map(|key| (key, value)))
650 })
651 }
652
653 pub fn buffer_for_id(&self, remote_id: u64, cx: &AppContext) -> Option<ModelHandle<Buffer>> {
654 self.opened_buffers
655 .get(&remote_id)
656 .and_then(|buffer| buffer.upgrade(cx))
657 }
658
659 pub fn languages(&self) -> &Arc<LanguageRegistry> {
660 &self.languages
661 }
662
663 pub fn client(&self) -> Arc<Client> {
664 self.client.clone()
665 }
666
667 pub fn user_store(&self) -> ModelHandle<UserStore> {
668 self.user_store.clone()
669 }
670
671 pub fn project_store(&self) -> ModelHandle<ProjectStore> {
672 self.project_store.clone()
673 }
674
675 #[cfg(any(test, feature = "test-support"))]
676 pub fn check_invariants(&self, cx: &AppContext) {
677 if self.is_local() {
678 let mut worktree_root_paths = HashMap::default();
679 for worktree in self.worktrees(cx) {
680 let worktree = worktree.read(cx);
681 let abs_path = worktree.as_local().unwrap().abs_path().clone();
682 let prev_worktree_id = worktree_root_paths.insert(abs_path.clone(), worktree.id());
683 assert_eq!(
684 prev_worktree_id,
685 None,
686 "abs path {:?} for worktree {:?} is not unique ({:?} was already registered with the same path)",
687 abs_path,
688 worktree.id(),
689 prev_worktree_id
690 )
691 }
692 } else {
693 let replica_id = self.replica_id();
694 for buffer in self.opened_buffers.values() {
695 if let Some(buffer) = buffer.upgrade(cx) {
696 let buffer = buffer.read(cx);
697 assert_eq!(
698 buffer.deferred_ops_len(),
699 0,
700 "replica {}, buffer {} has deferred operations",
701 replica_id,
702 buffer.remote_id()
703 );
704 }
705 }
706 }
707 }
708
709 #[cfg(any(test, feature = "test-support"))]
710 pub fn has_open_buffer(&self, path: impl Into<ProjectPath>, cx: &AppContext) -> bool {
711 let path = path.into();
712 if let Some(worktree) = self.worktree_for_id(path.worktree_id, cx) {
713 self.opened_buffers.iter().any(|(_, buffer)| {
714 if let Some(buffer) = buffer.upgrade(cx) {
715 if let Some(file) = File::from_dyn(buffer.read(cx).file()) {
716 if file.worktree == worktree && file.path() == &path.path {
717 return true;
718 }
719 }
720 }
721 false
722 })
723 } else {
724 false
725 }
726 }
727
728 pub fn fs(&self) -> &Arc<dyn Fs> {
729 &self.fs
730 }
731
732 pub fn set_online(&mut self, online: bool, cx: &mut ModelContext<Self>) {
733 if let ProjectClientState::Local { online_tx, .. } = &mut self.client_state {
734 let mut online_tx = online_tx.borrow_mut();
735 if *online_tx != online {
736 *online_tx = online;
737 drop(online_tx);
738 self.metadata_changed(true, cx);
739 }
740 }
741 }
742
743 pub fn is_online(&self) -> bool {
744 match &self.client_state {
745 ProjectClientState::Local { online_rx, .. } => *online_rx.borrow(),
746 ProjectClientState::Remote { .. } => true,
747 }
748 }
749
750 fn unregister(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
751 self.unshared(cx);
752 if let ProjectClientState::Local { remote_id_rx, .. } = &mut self.client_state {
753 if let Some(remote_id) = *remote_id_rx.borrow() {
754 let request = self.client.request(proto::UnregisterProject {
755 project_id: remote_id,
756 });
757 return cx.spawn(|this, mut cx| async move {
758 let response = request.await;
759
760 // Unregistering the project causes the server to send out a
761 // contact update removing this project from the host's list
762 // of online projects. Wait until this contact update has been
763 // processed before clearing out this project's remote id, so
764 // that there is no moment where this project appears in the
765 // contact metadata and *also* has no remote id.
766 this.update(&mut cx, |this, cx| {
767 this.user_store()
768 .update(cx, |store, _| store.contact_updates_done())
769 })
770 .await;
771
772 this.update(&mut cx, |this, cx| {
773 if let ProjectClientState::Local { remote_id_tx, .. } =
774 &mut this.client_state
775 {
776 *remote_id_tx.borrow_mut() = None;
777 }
778 this.subscriptions.clear();
779 this.metadata_changed(false, cx);
780 });
781 response.map(drop)
782 });
783 }
784 }
785 Task::ready(Ok(()))
786 }
787
788 fn register(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
789 if let ProjectClientState::Local { remote_id_rx, .. } = &self.client_state {
790 if remote_id_rx.borrow().is_some() {
791 return Task::ready(Ok(()));
792 }
793 }
794
795 let response = self.client.request(proto::RegisterProject {});
796 cx.spawn(|this, mut cx| async move {
797 let remote_id = response.await?.project_id;
798 this.update(&mut cx, |this, cx| {
799 if let ProjectClientState::Local { remote_id_tx, .. } = &mut this.client_state {
800 *remote_id_tx.borrow_mut() = Some(remote_id);
801 }
802
803 this.metadata_changed(false, cx);
804 cx.emit(Event::RemoteIdChanged(Some(remote_id)));
805 this.subscriptions
806 .push(this.client.add_model_for_remote_entity(remote_id, cx));
807 Ok(())
808 })
809 })
810 }
811
812 pub fn remote_id(&self) -> Option<u64> {
813 match &self.client_state {
814 ProjectClientState::Local { remote_id_rx, .. } => *remote_id_rx.borrow(),
815 ProjectClientState::Remote { remote_id, .. } => Some(*remote_id),
816 }
817 }
818
819 pub fn next_remote_id(&self) -> impl Future<Output = u64> {
820 let mut id = None;
821 let mut watch = None;
822 match &self.client_state {
823 ProjectClientState::Local { remote_id_rx, .. } => watch = Some(remote_id_rx.clone()),
824 ProjectClientState::Remote { remote_id, .. } => id = Some(*remote_id),
825 }
826
827 async move {
828 if let Some(id) = id {
829 return id;
830 }
831 let mut watch = watch.unwrap();
832 loop {
833 let id = *watch.borrow();
834 if let Some(id) = id {
835 return id;
836 }
837 watch.next().await;
838 }
839 }
840 }
841
842 pub fn shared_remote_id(&self) -> Option<u64> {
843 match &self.client_state {
844 ProjectClientState::Local {
845 remote_id_rx,
846 is_shared,
847 ..
848 } => {
849 if *is_shared {
850 *remote_id_rx.borrow()
851 } else {
852 None
853 }
854 }
855 ProjectClientState::Remote { remote_id, .. } => Some(*remote_id),
856 }
857 }
858
859 pub fn replica_id(&self) -> ReplicaId {
860 match &self.client_state {
861 ProjectClientState::Local { .. } => 0,
862 ProjectClientState::Remote { replica_id, .. } => *replica_id,
863 }
864 }
865
866 fn metadata_changed(&mut self, persist: bool, cx: &mut ModelContext<Self>) {
867 if let ProjectClientState::Local {
868 remote_id_rx,
869 online_rx,
870 ..
871 } = &self.client_state
872 {
873 if let (Some(project_id), true) = (*remote_id_rx.borrow(), *online_rx.borrow()) {
874 self.client
875 .send(proto::UpdateProject {
876 project_id,
877 worktrees: self
878 .worktrees
879 .iter()
880 .filter_map(|worktree| {
881 worktree.upgrade(&cx).map(|worktree| {
882 worktree.read(cx).as_local().unwrap().metadata_proto()
883 })
884 })
885 .collect(),
886 })
887 .log_err();
888 }
889
890 self.project_store.update(cx, |_, cx| cx.notify());
891 if persist {
892 self.persist_state(cx).detach_and_log_err(cx);
893 }
894 cx.notify();
895 }
896 }
897
898 pub fn collaborators(&self) -> &HashMap<PeerId, Collaborator> {
899 &self.collaborators
900 }
901
902 pub fn worktrees<'a>(
903 &'a self,
904 cx: &'a AppContext,
905 ) -> impl 'a + Iterator<Item = ModelHandle<Worktree>> {
906 self.worktrees
907 .iter()
908 .filter_map(move |worktree| worktree.upgrade(cx))
909 }
910
911 pub fn visible_worktrees<'a>(
912 &'a self,
913 cx: &'a AppContext,
914 ) -> impl 'a + Iterator<Item = ModelHandle<Worktree>> {
915 self.worktrees.iter().filter_map(|worktree| {
916 worktree.upgrade(cx).and_then(|worktree| {
917 if worktree.read(cx).is_visible() {
918 Some(worktree)
919 } else {
920 None
921 }
922 })
923 })
924 }
925
926 pub fn worktree_root_names<'a>(&'a self, cx: &'a AppContext) -> impl Iterator<Item = &'a str> {
927 self.visible_worktrees(cx)
928 .map(|tree| tree.read(cx).root_name())
929 }
930
931 fn db_keys_for_online_state(&self, cx: &AppContext) -> Vec<String> {
932 self.worktrees
933 .iter()
934 .filter_map(|worktree| {
935 let worktree = worktree.upgrade(&cx)?.read(cx);
936 if worktree.is_visible() {
937 Some(format!(
938 "project-path-online:{}",
939 worktree.as_local().unwrap().abs_path().to_string_lossy()
940 ))
941 } else {
942 None
943 }
944 })
945 .collect::<Vec<_>>()
946 }
947
948 pub fn worktree_for_id(
949 &self,
950 id: WorktreeId,
951 cx: &AppContext,
952 ) -> Option<ModelHandle<Worktree>> {
953 self.worktrees(cx)
954 .find(|worktree| worktree.read(cx).id() == id)
955 }
956
957 pub fn worktree_for_entry(
958 &self,
959 entry_id: ProjectEntryId,
960 cx: &AppContext,
961 ) -> Option<ModelHandle<Worktree>> {
962 self.worktrees(cx)
963 .find(|worktree| worktree.read(cx).contains_entry(entry_id))
964 }
965
966 pub fn worktree_id_for_entry(
967 &self,
968 entry_id: ProjectEntryId,
969 cx: &AppContext,
970 ) -> Option<WorktreeId> {
971 self.worktree_for_entry(entry_id, cx)
972 .map(|worktree| worktree.read(cx).id())
973 }
974
975 pub fn contains_paths(&self, paths: &[PathBuf], cx: &AppContext) -> bool {
976 paths.iter().all(|path| self.contains_path(&path, cx))
977 }
978
979 pub fn contains_path(&self, path: &Path, cx: &AppContext) -> bool {
980 for worktree in self.worktrees(cx) {
981 let worktree = worktree.read(cx).as_local();
982 if worktree.map_or(false, |w| w.contains_abs_path(path)) {
983 return true;
984 }
985 }
986 false
987 }
988
989 pub fn create_entry(
990 &mut self,
991 project_path: impl Into<ProjectPath>,
992 is_directory: bool,
993 cx: &mut ModelContext<Self>,
994 ) -> Option<Task<Result<Entry>>> {
995 let project_path = project_path.into();
996 let worktree = self.worktree_for_id(project_path.worktree_id, cx)?;
997 if self.is_local() {
998 Some(worktree.update(cx, |worktree, cx| {
999 worktree
1000 .as_local_mut()
1001 .unwrap()
1002 .create_entry(project_path.path, is_directory, cx)
1003 }))
1004 } else {
1005 let client = self.client.clone();
1006 let project_id = self.remote_id().unwrap();
1007 Some(cx.spawn_weak(|_, mut cx| async move {
1008 let response = client
1009 .request(proto::CreateProjectEntry {
1010 worktree_id: project_path.worktree_id.to_proto(),
1011 project_id,
1012 path: project_path.path.as_os_str().as_bytes().to_vec(),
1013 is_directory,
1014 })
1015 .await?;
1016 let entry = response
1017 .entry
1018 .ok_or_else(|| anyhow!("missing entry in response"))?;
1019 worktree
1020 .update(&mut cx, |worktree, cx| {
1021 worktree.as_remote().unwrap().insert_entry(
1022 entry,
1023 response.worktree_scan_id as usize,
1024 cx,
1025 )
1026 })
1027 .await
1028 }))
1029 }
1030 }
1031
1032 pub fn copy_entry(
1033 &mut self,
1034 entry_id: ProjectEntryId,
1035 new_path: impl Into<Arc<Path>>,
1036 cx: &mut ModelContext<Self>,
1037 ) -> Option<Task<Result<Entry>>> {
1038 let worktree = self.worktree_for_entry(entry_id, cx)?;
1039 let new_path = new_path.into();
1040 if self.is_local() {
1041 worktree.update(cx, |worktree, cx| {
1042 worktree
1043 .as_local_mut()
1044 .unwrap()
1045 .copy_entry(entry_id, new_path, cx)
1046 })
1047 } else {
1048 let client = self.client.clone();
1049 let project_id = self.remote_id().unwrap();
1050
1051 Some(cx.spawn_weak(|_, mut cx| async move {
1052 let response = client
1053 .request(proto::CopyProjectEntry {
1054 project_id,
1055 entry_id: entry_id.to_proto(),
1056 new_path: new_path.as_os_str().as_bytes().to_vec(),
1057 })
1058 .await?;
1059 let entry = response
1060 .entry
1061 .ok_or_else(|| anyhow!("missing entry in response"))?;
1062 worktree
1063 .update(&mut cx, |worktree, cx| {
1064 worktree.as_remote().unwrap().insert_entry(
1065 entry,
1066 response.worktree_scan_id as usize,
1067 cx,
1068 )
1069 })
1070 .await
1071 }))
1072 }
1073 }
1074
1075 pub fn rename_entry(
1076 &mut self,
1077 entry_id: ProjectEntryId,
1078 new_path: impl Into<Arc<Path>>,
1079 cx: &mut ModelContext<Self>,
1080 ) -> Option<Task<Result<Entry>>> {
1081 let worktree = self.worktree_for_entry(entry_id, cx)?;
1082 let new_path = new_path.into();
1083 if self.is_local() {
1084 worktree.update(cx, |worktree, cx| {
1085 worktree
1086 .as_local_mut()
1087 .unwrap()
1088 .rename_entry(entry_id, new_path, cx)
1089 })
1090 } else {
1091 let client = self.client.clone();
1092 let project_id = self.remote_id().unwrap();
1093
1094 Some(cx.spawn_weak(|_, mut cx| async move {
1095 let response = client
1096 .request(proto::RenameProjectEntry {
1097 project_id,
1098 entry_id: entry_id.to_proto(),
1099 new_path: new_path.as_os_str().as_bytes().to_vec(),
1100 })
1101 .await?;
1102 let entry = response
1103 .entry
1104 .ok_or_else(|| anyhow!("missing entry in response"))?;
1105 worktree
1106 .update(&mut cx, |worktree, cx| {
1107 worktree.as_remote().unwrap().insert_entry(
1108 entry,
1109 response.worktree_scan_id as usize,
1110 cx,
1111 )
1112 })
1113 .await
1114 }))
1115 }
1116 }
1117
1118 pub fn delete_entry(
1119 &mut self,
1120 entry_id: ProjectEntryId,
1121 cx: &mut ModelContext<Self>,
1122 ) -> Option<Task<Result<()>>> {
1123 let worktree = self.worktree_for_entry(entry_id, cx)?;
1124 if self.is_local() {
1125 worktree.update(cx, |worktree, cx| {
1126 worktree.as_local_mut().unwrap().delete_entry(entry_id, cx)
1127 })
1128 } else {
1129 let client = self.client.clone();
1130 let project_id = self.remote_id().unwrap();
1131 Some(cx.spawn_weak(|_, mut cx| async move {
1132 let response = client
1133 .request(proto::DeleteProjectEntry {
1134 project_id,
1135 entry_id: entry_id.to_proto(),
1136 })
1137 .await?;
1138 worktree
1139 .update(&mut cx, move |worktree, cx| {
1140 worktree.as_remote().unwrap().delete_entry(
1141 entry_id,
1142 response.worktree_scan_id as usize,
1143 cx,
1144 )
1145 })
1146 .await
1147 }))
1148 }
1149 }
1150
1151 fn share(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
1152 let project_id;
1153 if let ProjectClientState::Local {
1154 remote_id_rx,
1155 is_shared,
1156 ..
1157 } = &mut self.client_state
1158 {
1159 if *is_shared {
1160 return Task::ready(Ok(()));
1161 }
1162 *is_shared = true;
1163 if let Some(id) = *remote_id_rx.borrow() {
1164 project_id = id;
1165 } else {
1166 return Task::ready(Err(anyhow!("project hasn't been registered")));
1167 }
1168 } else {
1169 return Task::ready(Err(anyhow!("can't share a remote project")));
1170 };
1171
1172 for open_buffer in self.opened_buffers.values_mut() {
1173 match open_buffer {
1174 OpenBuffer::Strong(_) => {}
1175 OpenBuffer::Weak(buffer) => {
1176 if let Some(buffer) = buffer.upgrade(cx) {
1177 *open_buffer = OpenBuffer::Strong(buffer);
1178 }
1179 }
1180 OpenBuffer::Loading(_) => unreachable!(),
1181 }
1182 }
1183
1184 for worktree_handle in self.worktrees.iter_mut() {
1185 match worktree_handle {
1186 WorktreeHandle::Strong(_) => {}
1187 WorktreeHandle::Weak(worktree) => {
1188 if let Some(worktree) = worktree.upgrade(cx) {
1189 *worktree_handle = WorktreeHandle::Strong(worktree);
1190 }
1191 }
1192 }
1193 }
1194
1195 let mut tasks = Vec::new();
1196 for worktree in self.worktrees(cx).collect::<Vec<_>>() {
1197 worktree.update(cx, |worktree, cx| {
1198 let worktree = worktree.as_local_mut().unwrap();
1199 tasks.push(worktree.share(project_id, cx));
1200 });
1201 }
1202
1203 cx.spawn(|this, mut cx| async move {
1204 for task in tasks {
1205 task.await?;
1206 }
1207 this.update(&mut cx, |_, cx| cx.notify());
1208 Ok(())
1209 })
1210 }
1211
1212 fn unshared(&mut self, cx: &mut ModelContext<Self>) {
1213 if let ProjectClientState::Local { is_shared, .. } = &mut self.client_state {
1214 if !*is_shared {
1215 return;
1216 }
1217
1218 *is_shared = false;
1219 self.collaborators.clear();
1220 self.shared_buffers.clear();
1221 for worktree_handle in self.worktrees.iter_mut() {
1222 if let WorktreeHandle::Strong(worktree) = worktree_handle {
1223 let is_visible = worktree.update(cx, |worktree, _| {
1224 worktree.as_local_mut().unwrap().unshare();
1225 worktree.is_visible()
1226 });
1227 if !is_visible {
1228 *worktree_handle = WorktreeHandle::Weak(worktree.downgrade());
1229 }
1230 }
1231 }
1232
1233 for open_buffer in self.opened_buffers.values_mut() {
1234 match open_buffer {
1235 OpenBuffer::Strong(buffer) => {
1236 *open_buffer = OpenBuffer::Weak(buffer.downgrade());
1237 }
1238 _ => {}
1239 }
1240 }
1241
1242 cx.notify();
1243 } else {
1244 log::error!("attempted to unshare a remote project");
1245 }
1246 }
1247
1248 pub fn respond_to_join_request(
1249 &mut self,
1250 requester_id: u64,
1251 allow: bool,
1252 cx: &mut ModelContext<Self>,
1253 ) {
1254 if let Some(project_id) = self.remote_id() {
1255 let share = self.share(cx);
1256 let client = self.client.clone();
1257 cx.foreground()
1258 .spawn(async move {
1259 share.await?;
1260 client.send(proto::RespondToJoinProjectRequest {
1261 requester_id,
1262 project_id,
1263 allow,
1264 })
1265 })
1266 .detach_and_log_err(cx);
1267 }
1268 }
1269
1270 fn removed_from_project(&mut self, cx: &mut ModelContext<Self>) {
1271 if let ProjectClientState::Remote {
1272 sharing_has_stopped,
1273 ..
1274 } = &mut self.client_state
1275 {
1276 *sharing_has_stopped = true;
1277 self.collaborators.clear();
1278 cx.notify();
1279 }
1280 }
1281
1282 pub fn is_read_only(&self) -> bool {
1283 match &self.client_state {
1284 ProjectClientState::Local { .. } => false,
1285 ProjectClientState::Remote {
1286 sharing_has_stopped,
1287 ..
1288 } => *sharing_has_stopped,
1289 }
1290 }
1291
1292 pub fn is_local(&self) -> bool {
1293 match &self.client_state {
1294 ProjectClientState::Local { .. } => true,
1295 ProjectClientState::Remote { .. } => false,
1296 }
1297 }
1298
1299 pub fn is_remote(&self) -> bool {
1300 !self.is_local()
1301 }
1302
1303 pub fn create_buffer(
1304 &mut self,
1305 text: &str,
1306 language: Option<Arc<Language>>,
1307 cx: &mut ModelContext<Self>,
1308 ) -> Result<ModelHandle<Buffer>> {
1309 if self.is_remote() {
1310 return Err(anyhow!("creating buffers as a guest is not supported yet"));
1311 }
1312
1313 let buffer = cx.add_model(|cx| {
1314 Buffer::new(self.replica_id(), text, cx)
1315 .with_language(language.unwrap_or(language::PLAIN_TEXT.clone()), cx)
1316 });
1317 self.register_buffer(&buffer, cx)?;
1318 Ok(buffer)
1319 }
1320
1321 pub fn open_path(
1322 &mut self,
1323 path: impl Into<ProjectPath>,
1324 cx: &mut ModelContext<Self>,
1325 ) -> Task<Result<(ProjectEntryId, AnyModelHandle)>> {
1326 let task = self.open_buffer(path, cx);
1327 cx.spawn_weak(|_, cx| async move {
1328 let buffer = task.await?;
1329 let project_entry_id = buffer
1330 .read_with(&cx, |buffer, cx| {
1331 File::from_dyn(buffer.file()).and_then(|file| file.project_entry_id(cx))
1332 })
1333 .ok_or_else(|| anyhow!("no project entry"))?;
1334 Ok((project_entry_id, buffer.into()))
1335 })
1336 }
1337
1338 pub fn open_local_buffer(
1339 &mut self,
1340 abs_path: impl AsRef<Path>,
1341 cx: &mut ModelContext<Self>,
1342 ) -> Task<Result<ModelHandle<Buffer>>> {
1343 if let Some((worktree, relative_path)) = self.find_local_worktree(abs_path.as_ref(), cx) {
1344 self.open_buffer((worktree.read(cx).id(), relative_path), cx)
1345 } else {
1346 Task::ready(Err(anyhow!("no such path")))
1347 }
1348 }
1349
1350 pub fn open_buffer(
1351 &mut self,
1352 path: impl Into<ProjectPath>,
1353 cx: &mut ModelContext<Self>,
1354 ) -> Task<Result<ModelHandle<Buffer>>> {
1355 let project_path = path.into();
1356 let worktree = if let Some(worktree) = self.worktree_for_id(project_path.worktree_id, cx) {
1357 worktree
1358 } else {
1359 return Task::ready(Err(anyhow!("no such worktree")));
1360 };
1361
1362 // If there is already a buffer for the given path, then return it.
1363 let existing_buffer = self.get_open_buffer(&project_path, cx);
1364 if let Some(existing_buffer) = existing_buffer {
1365 return Task::ready(Ok(existing_buffer));
1366 }
1367
1368 let mut loading_watch = match self.loading_buffers.entry(project_path.clone()) {
1369 // If the given path is already being loaded, then wait for that existing
1370 // task to complete and return the same buffer.
1371 hash_map::Entry::Occupied(e) => e.get().clone(),
1372
1373 // Otherwise, record the fact that this path is now being loaded.
1374 hash_map::Entry::Vacant(entry) => {
1375 let (mut tx, rx) = postage::watch::channel();
1376 entry.insert(rx.clone());
1377
1378 let load_buffer = if worktree.read(cx).is_local() {
1379 self.open_local_buffer_internal(&project_path.path, &worktree, cx)
1380 } else {
1381 self.open_remote_buffer_internal(&project_path.path, &worktree, cx)
1382 };
1383
1384 cx.spawn(move |this, mut cx| async move {
1385 let load_result = load_buffer.await;
1386 *tx.borrow_mut() = Some(this.update(&mut cx, |this, _| {
1387 // Record the fact that the buffer is no longer loading.
1388 this.loading_buffers.remove(&project_path);
1389 let buffer = load_result.map_err(Arc::new)?;
1390 Ok(buffer)
1391 }));
1392 })
1393 .detach();
1394 rx
1395 }
1396 };
1397
1398 cx.foreground().spawn(async move {
1399 loop {
1400 if let Some(result) = loading_watch.borrow().as_ref() {
1401 match result {
1402 Ok(buffer) => return Ok(buffer.clone()),
1403 Err(error) => return Err(anyhow!("{}", error)),
1404 }
1405 }
1406 loading_watch.next().await;
1407 }
1408 })
1409 }
1410
1411 fn open_local_buffer_internal(
1412 &mut self,
1413 path: &Arc<Path>,
1414 worktree: &ModelHandle<Worktree>,
1415 cx: &mut ModelContext<Self>,
1416 ) -> Task<Result<ModelHandle<Buffer>>> {
1417 let load_buffer = worktree.update(cx, |worktree, cx| {
1418 let worktree = worktree.as_local_mut().unwrap();
1419 worktree.load_buffer(path, cx)
1420 });
1421 cx.spawn(|this, mut cx| async move {
1422 let buffer = load_buffer.await?;
1423 this.update(&mut cx, |this, cx| this.register_buffer(&buffer, cx))?;
1424 Ok(buffer)
1425 })
1426 }
1427
1428 fn open_remote_buffer_internal(
1429 &mut self,
1430 path: &Arc<Path>,
1431 worktree: &ModelHandle<Worktree>,
1432 cx: &mut ModelContext<Self>,
1433 ) -> Task<Result<ModelHandle<Buffer>>> {
1434 let rpc = self.client.clone();
1435 let project_id = self.remote_id().unwrap();
1436 let remote_worktree_id = worktree.read(cx).id();
1437 let path = path.clone();
1438 let path_string = path.to_string_lossy().to_string();
1439 cx.spawn(|this, mut cx| async move {
1440 let response = rpc
1441 .request(proto::OpenBufferByPath {
1442 project_id,
1443 worktree_id: remote_worktree_id.to_proto(),
1444 path: path_string,
1445 })
1446 .await?;
1447 let buffer = response.buffer.ok_or_else(|| anyhow!("missing buffer"))?;
1448 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
1449 .await
1450 })
1451 }
1452
1453 fn open_local_buffer_via_lsp(
1454 &mut self,
1455 abs_path: lsp::Url,
1456 lsp_adapter: Arc<dyn LspAdapter>,
1457 lsp_server: Arc<LanguageServer>,
1458 cx: &mut ModelContext<Self>,
1459 ) -> Task<Result<ModelHandle<Buffer>>> {
1460 cx.spawn(|this, mut cx| async move {
1461 let abs_path = abs_path
1462 .to_file_path()
1463 .map_err(|_| anyhow!("can't convert URI to path"))?;
1464 let (worktree, relative_path) = if let Some(result) =
1465 this.read_with(&cx, |this, cx| this.find_local_worktree(&abs_path, cx))
1466 {
1467 result
1468 } else {
1469 let worktree = this
1470 .update(&mut cx, |this, cx| {
1471 this.create_local_worktree(&abs_path, false, cx)
1472 })
1473 .await?;
1474 this.update(&mut cx, |this, cx| {
1475 this.language_servers.insert(
1476 (worktree.read(cx).id(), lsp_adapter.name()),
1477 (lsp_adapter, lsp_server),
1478 );
1479 });
1480 (worktree, PathBuf::new())
1481 };
1482
1483 let project_path = ProjectPath {
1484 worktree_id: worktree.read_with(&cx, |worktree, _| worktree.id()),
1485 path: relative_path.into(),
1486 };
1487 this.update(&mut cx, |this, cx| this.open_buffer(project_path, cx))
1488 .await
1489 })
1490 }
1491
1492 pub fn open_buffer_by_id(
1493 &mut self,
1494 id: u64,
1495 cx: &mut ModelContext<Self>,
1496 ) -> Task<Result<ModelHandle<Buffer>>> {
1497 if let Some(buffer) = self.buffer_for_id(id, cx) {
1498 Task::ready(Ok(buffer))
1499 } else if self.is_local() {
1500 Task::ready(Err(anyhow!("buffer {} does not exist", id)))
1501 } else if let Some(project_id) = self.remote_id() {
1502 let request = self
1503 .client
1504 .request(proto::OpenBufferById { project_id, id });
1505 cx.spawn(|this, mut cx| async move {
1506 let buffer = request
1507 .await?
1508 .buffer
1509 .ok_or_else(|| anyhow!("invalid buffer"))?;
1510 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
1511 .await
1512 })
1513 } else {
1514 Task::ready(Err(anyhow!("cannot open buffer while disconnected")))
1515 }
1516 }
1517
1518 pub fn save_buffer_as(
1519 &mut self,
1520 buffer: ModelHandle<Buffer>,
1521 abs_path: PathBuf,
1522 cx: &mut ModelContext<Project>,
1523 ) -> Task<Result<()>> {
1524 let worktree_task = self.find_or_create_local_worktree(&abs_path, true, cx);
1525 let old_path =
1526 File::from_dyn(buffer.read(cx).file()).and_then(|f| Some(f.as_local()?.abs_path(cx)));
1527 cx.spawn(|this, mut cx| async move {
1528 if let Some(old_path) = old_path {
1529 this.update(&mut cx, |this, cx| {
1530 this.unregister_buffer_from_language_server(&buffer, old_path, cx);
1531 });
1532 }
1533 let (worktree, path) = worktree_task.await?;
1534 worktree
1535 .update(&mut cx, |worktree, cx| {
1536 worktree
1537 .as_local_mut()
1538 .unwrap()
1539 .save_buffer_as(buffer.clone(), path, cx)
1540 })
1541 .await?;
1542 this.update(&mut cx, |this, cx| {
1543 this.assign_language_to_buffer(&buffer, cx);
1544 this.register_buffer_with_language_server(&buffer, cx);
1545 });
1546 Ok(())
1547 })
1548 }
1549
1550 pub fn get_open_buffer(
1551 &mut self,
1552 path: &ProjectPath,
1553 cx: &mut ModelContext<Self>,
1554 ) -> Option<ModelHandle<Buffer>> {
1555 let worktree = self.worktree_for_id(path.worktree_id, cx)?;
1556 self.opened_buffers.values().find_map(|buffer| {
1557 let buffer = buffer.upgrade(cx)?;
1558 let file = File::from_dyn(buffer.read(cx).file())?;
1559 if file.worktree == worktree && file.path() == &path.path {
1560 Some(buffer)
1561 } else {
1562 None
1563 }
1564 })
1565 }
1566
1567 fn register_buffer(
1568 &mut self,
1569 buffer: &ModelHandle<Buffer>,
1570 cx: &mut ModelContext<Self>,
1571 ) -> Result<()> {
1572 let remote_id = buffer.read(cx).remote_id();
1573 let open_buffer = if self.is_remote() || self.is_shared() {
1574 OpenBuffer::Strong(buffer.clone())
1575 } else {
1576 OpenBuffer::Weak(buffer.downgrade())
1577 };
1578
1579 match self.opened_buffers.insert(remote_id, open_buffer) {
1580 None => {}
1581 Some(OpenBuffer::Loading(operations)) => {
1582 buffer.update(cx, |buffer, cx| buffer.apply_ops(operations, cx))?
1583 }
1584 Some(OpenBuffer::Weak(existing_handle)) => {
1585 if existing_handle.upgrade(cx).is_some() {
1586 Err(anyhow!(
1587 "already registered buffer with remote id {}",
1588 remote_id
1589 ))?
1590 }
1591 }
1592 Some(OpenBuffer::Strong(_)) => Err(anyhow!(
1593 "already registered buffer with remote id {}",
1594 remote_id
1595 ))?,
1596 }
1597 cx.subscribe(buffer, |this, buffer, event, cx| {
1598 this.on_buffer_event(buffer, event, cx);
1599 })
1600 .detach();
1601
1602 self.assign_language_to_buffer(buffer, cx);
1603 self.register_buffer_with_language_server(buffer, cx);
1604 cx.observe_release(buffer, |this, buffer, cx| {
1605 if let Some(file) = File::from_dyn(buffer.file()) {
1606 if file.is_local() {
1607 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
1608 if let Some((_, server)) = this.language_server_for_buffer(buffer, cx) {
1609 server
1610 .notify::<lsp::notification::DidCloseTextDocument>(
1611 lsp::DidCloseTextDocumentParams {
1612 text_document: lsp::TextDocumentIdentifier::new(uri.clone()),
1613 },
1614 )
1615 .log_err();
1616 }
1617 }
1618 }
1619 })
1620 .detach();
1621
1622 Ok(())
1623 }
1624
1625 fn register_buffer_with_language_server(
1626 &mut self,
1627 buffer_handle: &ModelHandle<Buffer>,
1628 cx: &mut ModelContext<Self>,
1629 ) {
1630 let buffer = buffer_handle.read(cx);
1631 let buffer_id = buffer.remote_id();
1632 if let Some(file) = File::from_dyn(buffer.file()) {
1633 if file.is_local() {
1634 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
1635 let initial_snapshot = buffer.text_snapshot();
1636
1637 let mut language_server = None;
1638 let mut language_id = None;
1639 if let Some(language) = buffer.language() {
1640 let worktree_id = file.worktree_id(cx);
1641 if let Some(adapter) = language.lsp_adapter() {
1642 language_id = adapter.id_for_language(language.name().as_ref());
1643 language_server = self
1644 .language_servers
1645 .get(&(worktree_id, adapter.name()))
1646 .cloned();
1647 }
1648 }
1649
1650 if let Some(local_worktree) = file.worktree.read(cx).as_local() {
1651 if let Some(diagnostics) = local_worktree.diagnostics_for_path(file.path()) {
1652 self.update_buffer_diagnostics(&buffer_handle, diagnostics, None, cx)
1653 .log_err();
1654 }
1655 }
1656
1657 if let Some((_, server)) = language_server {
1658 server
1659 .notify::<lsp::notification::DidOpenTextDocument>(
1660 lsp::DidOpenTextDocumentParams {
1661 text_document: lsp::TextDocumentItem::new(
1662 uri,
1663 language_id.unwrap_or_default(),
1664 0,
1665 initial_snapshot.text(),
1666 ),
1667 }
1668 .clone(),
1669 )
1670 .log_err();
1671 buffer_handle.update(cx, |buffer, cx| {
1672 buffer.set_completion_triggers(
1673 server
1674 .capabilities()
1675 .completion_provider
1676 .as_ref()
1677 .and_then(|provider| provider.trigger_characters.clone())
1678 .unwrap_or(Vec::new()),
1679 cx,
1680 )
1681 });
1682 self.buffer_snapshots
1683 .insert(buffer_id, vec![(0, initial_snapshot)]);
1684 }
1685 }
1686 }
1687 }
1688
1689 fn unregister_buffer_from_language_server(
1690 &mut self,
1691 buffer: &ModelHandle<Buffer>,
1692 old_path: PathBuf,
1693 cx: &mut ModelContext<Self>,
1694 ) {
1695 buffer.update(cx, |buffer, cx| {
1696 buffer.update_diagnostics(Default::default(), cx);
1697 self.buffer_snapshots.remove(&buffer.remote_id());
1698 if let Some((_, language_server)) = self.language_server_for_buffer(buffer, cx) {
1699 language_server
1700 .notify::<lsp::notification::DidCloseTextDocument>(
1701 lsp::DidCloseTextDocumentParams {
1702 text_document: lsp::TextDocumentIdentifier::new(
1703 lsp::Url::from_file_path(old_path).unwrap(),
1704 ),
1705 },
1706 )
1707 .log_err();
1708 }
1709 });
1710 }
1711
1712 fn on_buffer_event(
1713 &mut self,
1714 buffer: ModelHandle<Buffer>,
1715 event: &BufferEvent,
1716 cx: &mut ModelContext<Self>,
1717 ) -> Option<()> {
1718 match event {
1719 BufferEvent::Operation(operation) => {
1720 if let Some(project_id) = self.shared_remote_id() {
1721 let request = self.client.request(proto::UpdateBuffer {
1722 project_id,
1723 buffer_id: buffer.read(cx).remote_id(),
1724 operations: vec![language::proto::serialize_operation(&operation)],
1725 });
1726 cx.background().spawn(request).detach_and_log_err(cx);
1727 }
1728 }
1729 BufferEvent::Edited { .. } => {
1730 let (_, language_server) = self
1731 .language_server_for_buffer(buffer.read(cx), cx)?
1732 .clone();
1733 let buffer = buffer.read(cx);
1734 let file = File::from_dyn(buffer.file())?;
1735 let abs_path = file.as_local()?.abs_path(cx);
1736 let uri = lsp::Url::from_file_path(abs_path).unwrap();
1737 let buffer_snapshots = self.buffer_snapshots.get_mut(&buffer.remote_id())?;
1738 let (version, prev_snapshot) = buffer_snapshots.last()?;
1739 let next_snapshot = buffer.text_snapshot();
1740 let next_version = version + 1;
1741
1742 let content_changes = buffer
1743 .edits_since::<(PointUtf16, usize)>(prev_snapshot.version())
1744 .map(|edit| {
1745 let edit_start = edit.new.start.0;
1746 let edit_end = edit_start + (edit.old.end.0 - edit.old.start.0);
1747 let new_text = next_snapshot
1748 .text_for_range(edit.new.start.1..edit.new.end.1)
1749 .collect();
1750 lsp::TextDocumentContentChangeEvent {
1751 range: Some(lsp::Range::new(
1752 point_to_lsp(edit_start),
1753 point_to_lsp(edit_end),
1754 )),
1755 range_length: None,
1756 text: new_text,
1757 }
1758 })
1759 .collect();
1760
1761 buffer_snapshots.push((next_version, next_snapshot));
1762
1763 language_server
1764 .notify::<lsp::notification::DidChangeTextDocument>(
1765 lsp::DidChangeTextDocumentParams {
1766 text_document: lsp::VersionedTextDocumentIdentifier::new(
1767 uri,
1768 next_version,
1769 ),
1770 content_changes,
1771 },
1772 )
1773 .log_err();
1774 }
1775 BufferEvent::Saved => {
1776 let file = File::from_dyn(buffer.read(cx).file())?;
1777 let worktree_id = file.worktree_id(cx);
1778 let abs_path = file.as_local()?.abs_path(cx);
1779 let text_document = lsp::TextDocumentIdentifier {
1780 uri: lsp::Url::from_file_path(abs_path).unwrap(),
1781 };
1782
1783 for (_, server) in self.language_servers_for_worktree(worktree_id) {
1784 server
1785 .notify::<lsp::notification::DidSaveTextDocument>(
1786 lsp::DidSaveTextDocumentParams {
1787 text_document: text_document.clone(),
1788 text: None,
1789 },
1790 )
1791 .log_err();
1792 }
1793
1794 // After saving a buffer, simulate disk-based diagnostics being finished for languages
1795 // that don't support a disk-based progress token.
1796 let (lsp_adapter, language_server) =
1797 self.language_server_for_buffer(buffer.read(cx), cx)?;
1798 if lsp_adapter
1799 .disk_based_diagnostics_progress_token()
1800 .is_none()
1801 {
1802 let server_id = language_server.server_id();
1803 self.disk_based_diagnostics_finished(server_id, cx);
1804 self.broadcast_language_server_update(
1805 server_id,
1806 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(
1807 proto::LspDiskBasedDiagnosticsUpdated {},
1808 ),
1809 );
1810 }
1811 }
1812 _ => {}
1813 }
1814
1815 None
1816 }
1817
1818 fn language_servers_for_worktree(
1819 &self,
1820 worktree_id: WorktreeId,
1821 ) -> impl Iterator<Item = &(Arc<dyn LspAdapter>, Arc<LanguageServer>)> {
1822 self.language_servers.iter().filter_map(
1823 move |((language_server_worktree_id, _), server)| {
1824 if *language_server_worktree_id == worktree_id {
1825 Some(server)
1826 } else {
1827 None
1828 }
1829 },
1830 )
1831 }
1832
1833 fn assign_language_to_buffer(
1834 &mut self,
1835 buffer: &ModelHandle<Buffer>,
1836 cx: &mut ModelContext<Self>,
1837 ) -> Option<()> {
1838 // If the buffer has a language, set it and start the language server if we haven't already.
1839 let full_path = buffer.read(cx).file()?.full_path(cx);
1840 let language = self.languages.select_language(&full_path)?;
1841 buffer.update(cx, |buffer, cx| {
1842 buffer.set_language(Some(language.clone()), cx);
1843 });
1844
1845 let file = File::from_dyn(buffer.read(cx).file())?;
1846 let worktree = file.worktree.read(cx).as_local()?;
1847 let worktree_id = worktree.id();
1848 let worktree_abs_path = worktree.abs_path().clone();
1849 self.start_language_server(worktree_id, worktree_abs_path, language, cx);
1850
1851 None
1852 }
1853
1854 fn start_language_server(
1855 &mut self,
1856 worktree_id: WorktreeId,
1857 worktree_path: Arc<Path>,
1858 language: Arc<Language>,
1859 cx: &mut ModelContext<Self>,
1860 ) {
1861 let adapter = if let Some(adapter) = language.lsp_adapter() {
1862 adapter
1863 } else {
1864 return;
1865 };
1866 let key = (worktree_id, adapter.name());
1867 self.started_language_servers
1868 .entry(key.clone())
1869 .or_insert_with(|| {
1870 let server_id = post_inc(&mut self.next_language_server_id);
1871 let language_server = self.languages.start_language_server(
1872 server_id,
1873 language.clone(),
1874 worktree_path,
1875 self.client.http_client(),
1876 cx,
1877 );
1878 cx.spawn_weak(|this, mut cx| async move {
1879 let language_server = language_server?.await.log_err()?;
1880 let language_server = language_server
1881 .initialize(adapter.initialization_options())
1882 .await
1883 .log_err()?;
1884 let this = this.upgrade(&cx)?;
1885 let disk_based_diagnostics_progress_token =
1886 adapter.disk_based_diagnostics_progress_token();
1887
1888 language_server
1889 .on_notification::<lsp::notification::PublishDiagnostics, _>({
1890 let this = this.downgrade();
1891 let adapter = adapter.clone();
1892 move |params, mut cx| {
1893 if let Some(this) = this.upgrade(&cx) {
1894 this.update(&mut cx, |this, cx| {
1895 this.on_lsp_diagnostics_published(
1896 server_id, params, &adapter, cx,
1897 );
1898 });
1899 }
1900 }
1901 })
1902 .detach();
1903
1904 language_server
1905 .on_request::<lsp::request::WorkspaceConfiguration, _, _>({
1906 let settings = this
1907 .read_with(&cx, |this, _| this.language_server_settings.clone());
1908 move |params, _| {
1909 let settings = settings.lock().clone();
1910 async move {
1911 Ok(params
1912 .items
1913 .into_iter()
1914 .map(|item| {
1915 if let Some(section) = &item.section {
1916 settings
1917 .get(section)
1918 .cloned()
1919 .unwrap_or(serde_json::Value::Null)
1920 } else {
1921 settings.clone()
1922 }
1923 })
1924 .collect())
1925 }
1926 }
1927 })
1928 .detach();
1929
1930 // Even though we don't have handling for these requests, respond to them to
1931 // avoid stalling any language server like `gopls` which waits for a response
1932 // to these requests when initializing.
1933 language_server
1934 .on_request::<lsp::request::WorkDoneProgressCreate, _, _>(|_, _| async {
1935 Ok(())
1936 })
1937 .detach();
1938 language_server
1939 .on_request::<lsp::request::RegisterCapability, _, _>(|_, _| async {
1940 Ok(())
1941 })
1942 .detach();
1943
1944 language_server
1945 .on_request::<lsp::request::ApplyWorkspaceEdit, _, _>({
1946 let this = this.downgrade();
1947 let adapter = adapter.clone();
1948 let language_server = language_server.clone();
1949 move |params, cx| {
1950 Self::on_lsp_workspace_edit(
1951 this,
1952 params,
1953 server_id,
1954 adapter.clone(),
1955 language_server.clone(),
1956 cx,
1957 )
1958 }
1959 })
1960 .detach();
1961
1962 language_server
1963 .on_notification::<lsp::notification::Progress, _>({
1964 let this = this.downgrade();
1965 move |params, mut cx| {
1966 if let Some(this) = this.upgrade(&cx) {
1967 this.update(&mut cx, |this, cx| {
1968 this.on_lsp_progress(
1969 params,
1970 server_id,
1971 disk_based_diagnostics_progress_token,
1972 cx,
1973 );
1974 });
1975 }
1976 }
1977 })
1978 .detach();
1979
1980 this.update(&mut cx, |this, cx| {
1981 this.language_servers
1982 .insert(key.clone(), (adapter.clone(), language_server.clone()));
1983 this.language_server_statuses.insert(
1984 server_id,
1985 LanguageServerStatus {
1986 name: language_server.name().to_string(),
1987 pending_work: Default::default(),
1988 pending_diagnostic_updates: 0,
1989 },
1990 );
1991 language_server
1992 .notify::<lsp::notification::DidChangeConfiguration>(
1993 lsp::DidChangeConfigurationParams {
1994 settings: this.language_server_settings.lock().clone(),
1995 },
1996 )
1997 .ok();
1998
1999 if let Some(project_id) = this.shared_remote_id() {
2000 this.client
2001 .send(proto::StartLanguageServer {
2002 project_id,
2003 server: Some(proto::LanguageServer {
2004 id: server_id as u64,
2005 name: language_server.name().to_string(),
2006 }),
2007 })
2008 .log_err();
2009 }
2010
2011 // Tell the language server about every open buffer in the worktree that matches the language.
2012 for buffer in this.opened_buffers.values() {
2013 if let Some(buffer_handle) = buffer.upgrade(cx) {
2014 let buffer = buffer_handle.read(cx);
2015 let file = if let Some(file) = File::from_dyn(buffer.file()) {
2016 file
2017 } else {
2018 continue;
2019 };
2020 let language = if let Some(language) = buffer.language() {
2021 language
2022 } else {
2023 continue;
2024 };
2025 if file.worktree.read(cx).id() != key.0
2026 || language.lsp_adapter().map(|a| a.name())
2027 != Some(key.1.clone())
2028 {
2029 continue;
2030 }
2031
2032 let file = file.as_local()?;
2033 let versions = this
2034 .buffer_snapshots
2035 .entry(buffer.remote_id())
2036 .or_insert_with(|| vec![(0, buffer.text_snapshot())]);
2037 let (version, initial_snapshot) = versions.last().unwrap();
2038 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
2039 let language_id = adapter.id_for_language(language.name().as_ref());
2040 language_server
2041 .notify::<lsp::notification::DidOpenTextDocument>(
2042 lsp::DidOpenTextDocumentParams {
2043 text_document: lsp::TextDocumentItem::new(
2044 uri,
2045 language_id.unwrap_or_default(),
2046 *version,
2047 initial_snapshot.text(),
2048 ),
2049 },
2050 )
2051 .log_err()?;
2052 buffer_handle.update(cx, |buffer, cx| {
2053 buffer.set_completion_triggers(
2054 language_server
2055 .capabilities()
2056 .completion_provider
2057 .as_ref()
2058 .and_then(|provider| {
2059 provider.trigger_characters.clone()
2060 })
2061 .unwrap_or(Vec::new()),
2062 cx,
2063 )
2064 });
2065 }
2066 }
2067
2068 cx.notify();
2069 Some(())
2070 });
2071
2072 Some(language_server)
2073 })
2074 });
2075 }
2076
2077 pub fn restart_language_servers_for_buffers(
2078 &mut self,
2079 buffers: impl IntoIterator<Item = ModelHandle<Buffer>>,
2080 cx: &mut ModelContext<Self>,
2081 ) -> Option<()> {
2082 let language_server_lookup_info: HashSet<(WorktreeId, Arc<Path>, PathBuf)> = buffers
2083 .into_iter()
2084 .filter_map(|buffer| {
2085 let file = File::from_dyn(buffer.read(cx).file())?;
2086 let worktree = file.worktree.read(cx).as_local()?;
2087 let worktree_id = worktree.id();
2088 let worktree_abs_path = worktree.abs_path().clone();
2089 let full_path = file.full_path(cx);
2090 Some((worktree_id, worktree_abs_path, full_path))
2091 })
2092 .collect();
2093 for (worktree_id, worktree_abs_path, full_path) in language_server_lookup_info {
2094 let language = self.languages.select_language(&full_path)?;
2095 self.restart_language_server(worktree_id, worktree_abs_path, language, cx);
2096 }
2097
2098 None
2099 }
2100
2101 fn restart_language_server(
2102 &mut self,
2103 worktree_id: WorktreeId,
2104 worktree_path: Arc<Path>,
2105 language: Arc<Language>,
2106 cx: &mut ModelContext<Self>,
2107 ) {
2108 let adapter = if let Some(adapter) = language.lsp_adapter() {
2109 adapter
2110 } else {
2111 return;
2112 };
2113 let key = (worktree_id, adapter.name());
2114 let server_to_shutdown = self.language_servers.remove(&key);
2115 self.started_language_servers.remove(&key);
2116 server_to_shutdown
2117 .as_ref()
2118 .map(|(_, server)| self.language_server_statuses.remove(&server.server_id()));
2119 cx.spawn_weak(|this, mut cx| async move {
2120 if let Some(this) = this.upgrade(&cx) {
2121 if let Some((_, server_to_shutdown)) = server_to_shutdown {
2122 if let Some(shutdown_task) = server_to_shutdown.shutdown() {
2123 shutdown_task.await;
2124 }
2125 }
2126
2127 this.update(&mut cx, |this, cx| {
2128 this.start_language_server(worktree_id, worktree_path, language, cx);
2129 });
2130 }
2131 })
2132 .detach();
2133 }
2134
2135 fn on_lsp_diagnostics_published(
2136 &mut self,
2137 server_id: usize,
2138 mut params: lsp::PublishDiagnosticsParams,
2139 adapter: &Arc<dyn LspAdapter>,
2140 cx: &mut ModelContext<Self>,
2141 ) {
2142 adapter.process_diagnostics(&mut params);
2143 self.update_diagnostics(
2144 server_id,
2145 params,
2146 adapter.disk_based_diagnostic_sources(),
2147 cx,
2148 )
2149 .log_err();
2150 }
2151
2152 fn on_lsp_progress(
2153 &mut self,
2154 progress: lsp::ProgressParams,
2155 server_id: usize,
2156 disk_based_diagnostics_progress_token: Option<&str>,
2157 cx: &mut ModelContext<Self>,
2158 ) {
2159 let token = match progress.token {
2160 lsp::NumberOrString::String(token) => token,
2161 lsp::NumberOrString::Number(token) => {
2162 log::info!("skipping numeric progress token {}", token);
2163 return;
2164 }
2165 };
2166 let progress = match progress.value {
2167 lsp::ProgressParamsValue::WorkDone(value) => value,
2168 };
2169 let language_server_status =
2170 if let Some(status) = self.language_server_statuses.get_mut(&server_id) {
2171 status
2172 } else {
2173 return;
2174 };
2175 match progress {
2176 lsp::WorkDoneProgress::Begin(report) => {
2177 if Some(token.as_str()) == disk_based_diagnostics_progress_token {
2178 language_server_status.pending_diagnostic_updates += 1;
2179 if language_server_status.pending_diagnostic_updates == 1 {
2180 self.disk_based_diagnostics_started(server_id, cx);
2181 self.broadcast_language_server_update(
2182 server_id,
2183 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(
2184 proto::LspDiskBasedDiagnosticsUpdating {},
2185 ),
2186 );
2187 }
2188 } else {
2189 self.on_lsp_work_start(
2190 server_id,
2191 token.clone(),
2192 LanguageServerProgress {
2193 message: report.message.clone(),
2194 percentage: report.percentage.map(|p| p as usize),
2195 last_update_at: Instant::now(),
2196 },
2197 cx,
2198 );
2199 self.broadcast_language_server_update(
2200 server_id,
2201 proto::update_language_server::Variant::WorkStart(proto::LspWorkStart {
2202 token,
2203 message: report.message,
2204 percentage: report.percentage.map(|p| p as u32),
2205 }),
2206 );
2207 }
2208 }
2209 lsp::WorkDoneProgress::Report(report) => {
2210 if Some(token.as_str()) != disk_based_diagnostics_progress_token {
2211 self.on_lsp_work_progress(
2212 server_id,
2213 token.clone(),
2214 LanguageServerProgress {
2215 message: report.message.clone(),
2216 percentage: report.percentage.map(|p| p as usize),
2217 last_update_at: Instant::now(),
2218 },
2219 cx,
2220 );
2221 self.broadcast_language_server_update(
2222 server_id,
2223 proto::update_language_server::Variant::WorkProgress(
2224 proto::LspWorkProgress {
2225 token,
2226 message: report.message,
2227 percentage: report.percentage.map(|p| p as u32),
2228 },
2229 ),
2230 );
2231 }
2232 }
2233 lsp::WorkDoneProgress::End(_) => {
2234 if Some(token.as_str()) == disk_based_diagnostics_progress_token {
2235 language_server_status.pending_diagnostic_updates -= 1;
2236 if language_server_status.pending_diagnostic_updates == 0 {
2237 self.disk_based_diagnostics_finished(server_id, cx);
2238 self.broadcast_language_server_update(
2239 server_id,
2240 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(
2241 proto::LspDiskBasedDiagnosticsUpdated {},
2242 ),
2243 );
2244 }
2245 } else {
2246 self.on_lsp_work_end(server_id, token.clone(), cx);
2247 self.broadcast_language_server_update(
2248 server_id,
2249 proto::update_language_server::Variant::WorkEnd(proto::LspWorkEnd {
2250 token,
2251 }),
2252 );
2253 }
2254 }
2255 }
2256 }
2257
2258 fn on_lsp_work_start(
2259 &mut self,
2260 language_server_id: usize,
2261 token: String,
2262 progress: LanguageServerProgress,
2263 cx: &mut ModelContext<Self>,
2264 ) {
2265 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
2266 status.pending_work.insert(token, progress);
2267 cx.notify();
2268 }
2269 }
2270
2271 fn on_lsp_work_progress(
2272 &mut self,
2273 language_server_id: usize,
2274 token: String,
2275 progress: LanguageServerProgress,
2276 cx: &mut ModelContext<Self>,
2277 ) {
2278 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
2279 let entry = status
2280 .pending_work
2281 .entry(token)
2282 .or_insert(LanguageServerProgress {
2283 message: Default::default(),
2284 percentage: Default::default(),
2285 last_update_at: progress.last_update_at,
2286 });
2287 if progress.message.is_some() {
2288 entry.message = progress.message;
2289 }
2290 if progress.percentage.is_some() {
2291 entry.percentage = progress.percentage;
2292 }
2293 entry.last_update_at = progress.last_update_at;
2294 cx.notify();
2295 }
2296 }
2297
2298 fn on_lsp_work_end(
2299 &mut self,
2300 language_server_id: usize,
2301 token: String,
2302 cx: &mut ModelContext<Self>,
2303 ) {
2304 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
2305 status.pending_work.remove(&token);
2306 cx.notify();
2307 }
2308 }
2309
2310 async fn on_lsp_workspace_edit(
2311 this: WeakModelHandle<Self>,
2312 params: lsp::ApplyWorkspaceEditParams,
2313 server_id: usize,
2314 adapter: Arc<dyn LspAdapter>,
2315 language_server: Arc<LanguageServer>,
2316 mut cx: AsyncAppContext,
2317 ) -> Result<lsp::ApplyWorkspaceEditResponse> {
2318 let this = this
2319 .upgrade(&cx)
2320 .ok_or_else(|| anyhow!("project project closed"))?;
2321 let transaction = Self::deserialize_workspace_edit(
2322 this.clone(),
2323 params.edit,
2324 true,
2325 adapter.clone(),
2326 language_server.clone(),
2327 &mut cx,
2328 )
2329 .await
2330 .log_err();
2331 this.update(&mut cx, |this, _| {
2332 if let Some(transaction) = transaction {
2333 this.last_workspace_edits_by_language_server
2334 .insert(server_id, transaction);
2335 }
2336 });
2337 Ok(lsp::ApplyWorkspaceEditResponse {
2338 applied: true,
2339 failed_change: None,
2340 failure_reason: None,
2341 })
2342 }
2343
2344 fn broadcast_language_server_update(
2345 &self,
2346 language_server_id: usize,
2347 event: proto::update_language_server::Variant,
2348 ) {
2349 if let Some(project_id) = self.shared_remote_id() {
2350 self.client
2351 .send(proto::UpdateLanguageServer {
2352 project_id,
2353 language_server_id: language_server_id as u64,
2354 variant: Some(event),
2355 })
2356 .log_err();
2357 }
2358 }
2359
2360 pub fn set_language_server_settings(&mut self, settings: serde_json::Value) {
2361 for (_, server) in self.language_servers.values() {
2362 server
2363 .notify::<lsp::notification::DidChangeConfiguration>(
2364 lsp::DidChangeConfigurationParams {
2365 settings: settings.clone(),
2366 },
2367 )
2368 .ok();
2369 }
2370 *self.language_server_settings.lock() = settings;
2371 }
2372
2373 pub fn language_server_statuses(
2374 &self,
2375 ) -> impl DoubleEndedIterator<Item = &LanguageServerStatus> {
2376 self.language_server_statuses.values()
2377 }
2378
2379 pub fn update_diagnostics(
2380 &mut self,
2381 language_server_id: usize,
2382 params: lsp::PublishDiagnosticsParams,
2383 disk_based_sources: &[&str],
2384 cx: &mut ModelContext<Self>,
2385 ) -> Result<()> {
2386 let abs_path = params
2387 .uri
2388 .to_file_path()
2389 .map_err(|_| anyhow!("URI is not a file"))?;
2390 let mut diagnostics = Vec::default();
2391 let mut primary_diagnostic_group_ids = HashMap::default();
2392 let mut sources_by_group_id = HashMap::default();
2393 let mut supporting_diagnostics = HashMap::default();
2394 for diagnostic in ¶ms.diagnostics {
2395 let source = diagnostic.source.as_ref();
2396 let code = diagnostic.code.as_ref().map(|code| match code {
2397 lsp::NumberOrString::Number(code) => code.to_string(),
2398 lsp::NumberOrString::String(code) => code.clone(),
2399 });
2400 let range = range_from_lsp(diagnostic.range);
2401 let is_supporting = diagnostic
2402 .related_information
2403 .as_ref()
2404 .map_or(false, |infos| {
2405 infos.iter().any(|info| {
2406 primary_diagnostic_group_ids.contains_key(&(
2407 source,
2408 code.clone(),
2409 range_from_lsp(info.location.range),
2410 ))
2411 })
2412 });
2413
2414 let is_unnecessary = diagnostic.tags.as_ref().map_or(false, |tags| {
2415 tags.iter().any(|tag| *tag == DiagnosticTag::UNNECESSARY)
2416 });
2417
2418 if is_supporting {
2419 supporting_diagnostics.insert(
2420 (source, code.clone(), range),
2421 (diagnostic.severity, is_unnecessary),
2422 );
2423 } else {
2424 let group_id = post_inc(&mut self.next_diagnostic_group_id);
2425 let is_disk_based = source.map_or(false, |source| {
2426 disk_based_sources.contains(&source.as_str())
2427 });
2428
2429 sources_by_group_id.insert(group_id, source);
2430 primary_diagnostic_group_ids
2431 .insert((source, code.clone(), range.clone()), group_id);
2432
2433 diagnostics.push(DiagnosticEntry {
2434 range,
2435 diagnostic: Diagnostic {
2436 code: code.clone(),
2437 severity: diagnostic.severity.unwrap_or(DiagnosticSeverity::ERROR),
2438 message: diagnostic.message.clone(),
2439 group_id,
2440 is_primary: true,
2441 is_valid: true,
2442 is_disk_based,
2443 is_unnecessary,
2444 },
2445 });
2446 if let Some(infos) = &diagnostic.related_information {
2447 for info in infos {
2448 if info.location.uri == params.uri && !info.message.is_empty() {
2449 let range = range_from_lsp(info.location.range);
2450 diagnostics.push(DiagnosticEntry {
2451 range,
2452 diagnostic: Diagnostic {
2453 code: code.clone(),
2454 severity: DiagnosticSeverity::INFORMATION,
2455 message: info.message.clone(),
2456 group_id,
2457 is_primary: false,
2458 is_valid: true,
2459 is_disk_based,
2460 is_unnecessary: false,
2461 },
2462 });
2463 }
2464 }
2465 }
2466 }
2467 }
2468
2469 for entry in &mut diagnostics {
2470 let diagnostic = &mut entry.diagnostic;
2471 if !diagnostic.is_primary {
2472 let source = *sources_by_group_id.get(&diagnostic.group_id).unwrap();
2473 if let Some(&(severity, is_unnecessary)) = supporting_diagnostics.get(&(
2474 source,
2475 diagnostic.code.clone(),
2476 entry.range.clone(),
2477 )) {
2478 if let Some(severity) = severity {
2479 diagnostic.severity = severity;
2480 }
2481 diagnostic.is_unnecessary = is_unnecessary;
2482 }
2483 }
2484 }
2485
2486 self.update_diagnostic_entries(
2487 language_server_id,
2488 abs_path,
2489 params.version,
2490 diagnostics,
2491 cx,
2492 )?;
2493 Ok(())
2494 }
2495
2496 pub fn update_diagnostic_entries(
2497 &mut self,
2498 language_server_id: usize,
2499 abs_path: PathBuf,
2500 version: Option<i32>,
2501 diagnostics: Vec<DiagnosticEntry<PointUtf16>>,
2502 cx: &mut ModelContext<Project>,
2503 ) -> Result<(), anyhow::Error> {
2504 let (worktree, relative_path) = self
2505 .find_local_worktree(&abs_path, cx)
2506 .ok_or_else(|| anyhow!("no worktree found for diagnostics"))?;
2507 if !worktree.read(cx).is_visible() {
2508 return Ok(());
2509 }
2510
2511 let project_path = ProjectPath {
2512 worktree_id: worktree.read(cx).id(),
2513 path: relative_path.into(),
2514 };
2515 if let Some(buffer) = self.get_open_buffer(&project_path, cx) {
2516 self.update_buffer_diagnostics(&buffer, diagnostics.clone(), version, cx)?;
2517 }
2518
2519 let updated = worktree.update(cx, |worktree, cx| {
2520 worktree
2521 .as_local_mut()
2522 .ok_or_else(|| anyhow!("not a local worktree"))?
2523 .update_diagnostics(
2524 language_server_id,
2525 project_path.path.clone(),
2526 diagnostics,
2527 cx,
2528 )
2529 })?;
2530 if updated {
2531 cx.emit(Event::DiagnosticsUpdated {
2532 language_server_id,
2533 path: project_path,
2534 });
2535 }
2536 Ok(())
2537 }
2538
2539 fn update_buffer_diagnostics(
2540 &mut self,
2541 buffer: &ModelHandle<Buffer>,
2542 mut diagnostics: Vec<DiagnosticEntry<PointUtf16>>,
2543 version: Option<i32>,
2544 cx: &mut ModelContext<Self>,
2545 ) -> Result<()> {
2546 fn compare_diagnostics(a: &Diagnostic, b: &Diagnostic) -> Ordering {
2547 Ordering::Equal
2548 .then_with(|| b.is_primary.cmp(&a.is_primary))
2549 .then_with(|| a.is_disk_based.cmp(&b.is_disk_based))
2550 .then_with(|| a.severity.cmp(&b.severity))
2551 .then_with(|| a.message.cmp(&b.message))
2552 }
2553
2554 let snapshot = self.buffer_snapshot_for_lsp_version(buffer, version, cx)?;
2555
2556 diagnostics.sort_unstable_by(|a, b| {
2557 Ordering::Equal
2558 .then_with(|| a.range.start.cmp(&b.range.start))
2559 .then_with(|| b.range.end.cmp(&a.range.end))
2560 .then_with(|| compare_diagnostics(&a.diagnostic, &b.diagnostic))
2561 });
2562
2563 let mut sanitized_diagnostics = Vec::new();
2564 let edits_since_save = Patch::new(
2565 snapshot
2566 .edits_since::<PointUtf16>(buffer.read(cx).saved_version())
2567 .collect(),
2568 );
2569 for entry in diagnostics {
2570 let start;
2571 let end;
2572 if entry.diagnostic.is_disk_based {
2573 // Some diagnostics are based on files on disk instead of buffers'
2574 // current contents. Adjust these diagnostics' ranges to reflect
2575 // any unsaved edits.
2576 start = edits_since_save.old_to_new(entry.range.start);
2577 end = edits_since_save.old_to_new(entry.range.end);
2578 } else {
2579 start = entry.range.start;
2580 end = entry.range.end;
2581 }
2582
2583 let mut range = snapshot.clip_point_utf16(start, Bias::Left)
2584 ..snapshot.clip_point_utf16(end, Bias::Right);
2585
2586 // Expand empty ranges by one character
2587 if range.start == range.end {
2588 range.end.column += 1;
2589 range.end = snapshot.clip_point_utf16(range.end, Bias::Right);
2590 if range.start == range.end && range.end.column > 0 {
2591 range.start.column -= 1;
2592 range.start = snapshot.clip_point_utf16(range.start, Bias::Left);
2593 }
2594 }
2595
2596 sanitized_diagnostics.push(DiagnosticEntry {
2597 range,
2598 diagnostic: entry.diagnostic,
2599 });
2600 }
2601 drop(edits_since_save);
2602
2603 let set = DiagnosticSet::new(sanitized_diagnostics, &snapshot);
2604 buffer.update(cx, |buffer, cx| buffer.update_diagnostics(set, cx));
2605 Ok(())
2606 }
2607
2608 pub fn reload_buffers(
2609 &self,
2610 buffers: HashSet<ModelHandle<Buffer>>,
2611 push_to_history: bool,
2612 cx: &mut ModelContext<Self>,
2613 ) -> Task<Result<ProjectTransaction>> {
2614 let mut local_buffers = Vec::new();
2615 let mut remote_buffers = None;
2616 for buffer_handle in buffers {
2617 let buffer = buffer_handle.read(cx);
2618 if buffer.is_dirty() {
2619 if let Some(file) = File::from_dyn(buffer.file()) {
2620 if file.is_local() {
2621 local_buffers.push(buffer_handle);
2622 } else {
2623 remote_buffers.get_or_insert(Vec::new()).push(buffer_handle);
2624 }
2625 }
2626 }
2627 }
2628
2629 let remote_buffers = self.remote_id().zip(remote_buffers);
2630 let client = self.client.clone();
2631
2632 cx.spawn(|this, mut cx| async move {
2633 let mut project_transaction = ProjectTransaction::default();
2634
2635 if let Some((project_id, remote_buffers)) = remote_buffers {
2636 let response = client
2637 .request(proto::ReloadBuffers {
2638 project_id,
2639 buffer_ids: remote_buffers
2640 .iter()
2641 .map(|buffer| buffer.read_with(&cx, |buffer, _| buffer.remote_id()))
2642 .collect(),
2643 })
2644 .await?
2645 .transaction
2646 .ok_or_else(|| anyhow!("missing transaction"))?;
2647 project_transaction = this
2648 .update(&mut cx, |this, cx| {
2649 this.deserialize_project_transaction(response, push_to_history, cx)
2650 })
2651 .await?;
2652 }
2653
2654 for buffer in local_buffers {
2655 let transaction = buffer
2656 .update(&mut cx, |buffer, cx| buffer.reload(cx))
2657 .await?;
2658 buffer.update(&mut cx, |buffer, cx| {
2659 if let Some(transaction) = transaction {
2660 if !push_to_history {
2661 buffer.forget_transaction(transaction.id);
2662 }
2663 project_transaction.0.insert(cx.handle(), transaction);
2664 }
2665 });
2666 }
2667
2668 Ok(project_transaction)
2669 })
2670 }
2671
2672 pub fn format(
2673 &self,
2674 buffers: HashSet<ModelHandle<Buffer>>,
2675 push_to_history: bool,
2676 cx: &mut ModelContext<Project>,
2677 ) -> Task<Result<ProjectTransaction>> {
2678 let mut local_buffers = Vec::new();
2679 let mut remote_buffers = None;
2680 for buffer_handle in buffers {
2681 let buffer = buffer_handle.read(cx);
2682 if let Some(file) = File::from_dyn(buffer.file()) {
2683 if let Some(buffer_abs_path) = file.as_local().map(|f| f.abs_path(cx)) {
2684 if let Some((_, server)) = self.language_server_for_buffer(buffer, cx) {
2685 local_buffers.push((buffer_handle, buffer_abs_path, server.clone()));
2686 }
2687 } else {
2688 remote_buffers.get_or_insert(Vec::new()).push(buffer_handle);
2689 }
2690 } else {
2691 return Task::ready(Ok(Default::default()));
2692 }
2693 }
2694
2695 let remote_buffers = self.remote_id().zip(remote_buffers);
2696 let client = self.client.clone();
2697
2698 cx.spawn(|this, mut cx| async move {
2699 let mut project_transaction = ProjectTransaction::default();
2700
2701 if let Some((project_id, remote_buffers)) = remote_buffers {
2702 let response = client
2703 .request(proto::FormatBuffers {
2704 project_id,
2705 buffer_ids: remote_buffers
2706 .iter()
2707 .map(|buffer| buffer.read_with(&cx, |buffer, _| buffer.remote_id()))
2708 .collect(),
2709 })
2710 .await?
2711 .transaction
2712 .ok_or_else(|| anyhow!("missing transaction"))?;
2713 project_transaction = this
2714 .update(&mut cx, |this, cx| {
2715 this.deserialize_project_transaction(response, push_to_history, cx)
2716 })
2717 .await?;
2718 }
2719
2720 for (buffer, buffer_abs_path, language_server) in local_buffers {
2721 let text_document = lsp::TextDocumentIdentifier::new(
2722 lsp::Url::from_file_path(&buffer_abs_path).unwrap(),
2723 );
2724 let capabilities = &language_server.capabilities();
2725 let tab_size = cx.update(|cx| {
2726 let language_name = buffer.read(cx).language().map(|language| language.name());
2727 cx.global::<Settings>().tab_size(language_name.as_deref())
2728 });
2729 let lsp_edits = if capabilities
2730 .document_formatting_provider
2731 .as_ref()
2732 .map_or(false, |provider| *provider != lsp::OneOf::Left(false))
2733 {
2734 language_server
2735 .request::<lsp::request::Formatting>(lsp::DocumentFormattingParams {
2736 text_document,
2737 options: lsp::FormattingOptions {
2738 tab_size,
2739 insert_spaces: true,
2740 insert_final_newline: Some(true),
2741 ..Default::default()
2742 },
2743 work_done_progress_params: Default::default(),
2744 })
2745 .await?
2746 } else if capabilities
2747 .document_range_formatting_provider
2748 .as_ref()
2749 .map_or(false, |provider| *provider != lsp::OneOf::Left(false))
2750 {
2751 let buffer_start = lsp::Position::new(0, 0);
2752 let buffer_end =
2753 buffer.read_with(&cx, |buffer, _| point_to_lsp(buffer.max_point_utf16()));
2754 language_server
2755 .request::<lsp::request::RangeFormatting>(
2756 lsp::DocumentRangeFormattingParams {
2757 text_document,
2758 range: lsp::Range::new(buffer_start, buffer_end),
2759 options: lsp::FormattingOptions {
2760 tab_size: 4,
2761 insert_spaces: true,
2762 insert_final_newline: Some(true),
2763 ..Default::default()
2764 },
2765 work_done_progress_params: Default::default(),
2766 },
2767 )
2768 .await?
2769 } else {
2770 continue;
2771 };
2772
2773 if let Some(lsp_edits) = lsp_edits {
2774 let edits = this
2775 .update(&mut cx, |this, cx| {
2776 this.edits_from_lsp(&buffer, lsp_edits, None, cx)
2777 })
2778 .await?;
2779 buffer.update(&mut cx, |buffer, cx| {
2780 buffer.finalize_last_transaction();
2781 buffer.start_transaction();
2782 for (range, text) in edits {
2783 buffer.edit([(range, text)], cx);
2784 }
2785 if buffer.end_transaction(cx).is_some() {
2786 let transaction = buffer.finalize_last_transaction().unwrap().clone();
2787 if !push_to_history {
2788 buffer.forget_transaction(transaction.id);
2789 }
2790 project_transaction.0.insert(cx.handle(), transaction);
2791 }
2792 });
2793 }
2794 }
2795
2796 Ok(project_transaction)
2797 })
2798 }
2799
2800 pub fn definition<T: ToPointUtf16>(
2801 &self,
2802 buffer: &ModelHandle<Buffer>,
2803 position: T,
2804 cx: &mut ModelContext<Self>,
2805 ) -> Task<Result<Vec<Location>>> {
2806 let position = position.to_point_utf16(buffer.read(cx));
2807 self.request_lsp(buffer.clone(), GetDefinition { position }, cx)
2808 }
2809
2810 pub fn references<T: ToPointUtf16>(
2811 &self,
2812 buffer: &ModelHandle<Buffer>,
2813 position: T,
2814 cx: &mut ModelContext<Self>,
2815 ) -> Task<Result<Vec<Location>>> {
2816 let position = position.to_point_utf16(buffer.read(cx));
2817 self.request_lsp(buffer.clone(), GetReferences { position }, cx)
2818 }
2819
2820 pub fn document_highlights<T: ToPointUtf16>(
2821 &self,
2822 buffer: &ModelHandle<Buffer>,
2823 position: T,
2824 cx: &mut ModelContext<Self>,
2825 ) -> Task<Result<Vec<DocumentHighlight>>> {
2826 let position = position.to_point_utf16(buffer.read(cx));
2827
2828 self.request_lsp(buffer.clone(), GetDocumentHighlights { position }, cx)
2829 }
2830
2831 pub fn symbols(&self, query: &str, cx: &mut ModelContext<Self>) -> Task<Result<Vec<Symbol>>> {
2832 if self.is_local() {
2833 let mut requests = Vec::new();
2834 for ((worktree_id, _), (lsp_adapter, language_server)) in self.language_servers.iter() {
2835 let worktree_id = *worktree_id;
2836 if let Some(worktree) = self
2837 .worktree_for_id(worktree_id, cx)
2838 .and_then(|worktree| worktree.read(cx).as_local())
2839 {
2840 let lsp_adapter = lsp_adapter.clone();
2841 let worktree_abs_path = worktree.abs_path().clone();
2842 requests.push(
2843 language_server
2844 .request::<lsp::request::WorkspaceSymbol>(lsp::WorkspaceSymbolParams {
2845 query: query.to_string(),
2846 ..Default::default()
2847 })
2848 .log_err()
2849 .map(move |response| {
2850 (
2851 lsp_adapter,
2852 worktree_id,
2853 worktree_abs_path,
2854 response.unwrap_or_default(),
2855 )
2856 }),
2857 );
2858 }
2859 }
2860
2861 cx.spawn_weak(|this, cx| async move {
2862 let responses = futures::future::join_all(requests).await;
2863 let this = if let Some(this) = this.upgrade(&cx) {
2864 this
2865 } else {
2866 return Ok(Default::default());
2867 };
2868 this.read_with(&cx, |this, cx| {
2869 let mut symbols = Vec::new();
2870 for (adapter, source_worktree_id, worktree_abs_path, response) in responses {
2871 symbols.extend(response.into_iter().flatten().filter_map(|lsp_symbol| {
2872 let abs_path = lsp_symbol.location.uri.to_file_path().ok()?;
2873 let mut worktree_id = source_worktree_id;
2874 let path;
2875 if let Some((worktree, rel_path)) =
2876 this.find_local_worktree(&abs_path, cx)
2877 {
2878 worktree_id = worktree.read(cx).id();
2879 path = rel_path;
2880 } else {
2881 path = relativize_path(&worktree_abs_path, &abs_path);
2882 }
2883
2884 let label = this
2885 .languages
2886 .select_language(&path)
2887 .and_then(|language| {
2888 language.label_for_symbol(&lsp_symbol.name, lsp_symbol.kind)
2889 })
2890 .unwrap_or_else(|| CodeLabel::plain(lsp_symbol.name.clone(), None));
2891 let signature = this.symbol_signature(worktree_id, &path);
2892
2893 Some(Symbol {
2894 source_worktree_id,
2895 worktree_id,
2896 language_server_name: adapter.name(),
2897 name: lsp_symbol.name,
2898 kind: lsp_symbol.kind,
2899 label,
2900 path,
2901 range: range_from_lsp(lsp_symbol.location.range),
2902 signature,
2903 })
2904 }));
2905 }
2906 Ok(symbols)
2907 })
2908 })
2909 } else if let Some(project_id) = self.remote_id() {
2910 let request = self.client.request(proto::GetProjectSymbols {
2911 project_id,
2912 query: query.to_string(),
2913 });
2914 cx.spawn_weak(|this, cx| async move {
2915 let response = request.await?;
2916 let mut symbols = Vec::new();
2917 if let Some(this) = this.upgrade(&cx) {
2918 this.read_with(&cx, |this, _| {
2919 symbols.extend(
2920 response
2921 .symbols
2922 .into_iter()
2923 .filter_map(|symbol| this.deserialize_symbol(symbol).log_err()),
2924 );
2925 })
2926 }
2927 Ok(symbols)
2928 })
2929 } else {
2930 Task::ready(Ok(Default::default()))
2931 }
2932 }
2933
2934 pub fn open_buffer_for_symbol(
2935 &mut self,
2936 symbol: &Symbol,
2937 cx: &mut ModelContext<Self>,
2938 ) -> Task<Result<ModelHandle<Buffer>>> {
2939 if self.is_local() {
2940 let (lsp_adapter, language_server) = if let Some(server) = self.language_servers.get(&(
2941 symbol.source_worktree_id,
2942 symbol.language_server_name.clone(),
2943 )) {
2944 server.clone()
2945 } else {
2946 return Task::ready(Err(anyhow!(
2947 "language server for worktree and language not found"
2948 )));
2949 };
2950
2951 let worktree_abs_path = if let Some(worktree_abs_path) = self
2952 .worktree_for_id(symbol.worktree_id, cx)
2953 .and_then(|worktree| worktree.read(cx).as_local())
2954 .map(|local_worktree| local_worktree.abs_path())
2955 {
2956 worktree_abs_path
2957 } else {
2958 return Task::ready(Err(anyhow!("worktree not found for symbol")));
2959 };
2960 let symbol_abs_path = worktree_abs_path.join(&symbol.path);
2961 let symbol_uri = if let Ok(uri) = lsp::Url::from_file_path(symbol_abs_path) {
2962 uri
2963 } else {
2964 return Task::ready(Err(anyhow!("invalid symbol path")));
2965 };
2966
2967 self.open_local_buffer_via_lsp(symbol_uri, lsp_adapter, language_server, cx)
2968 } else if let Some(project_id) = self.remote_id() {
2969 let request = self.client.request(proto::OpenBufferForSymbol {
2970 project_id,
2971 symbol: Some(serialize_symbol(symbol)),
2972 });
2973 cx.spawn(|this, mut cx| async move {
2974 let response = request.await?;
2975 let buffer = response.buffer.ok_or_else(|| anyhow!("invalid buffer"))?;
2976 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
2977 .await
2978 })
2979 } else {
2980 Task::ready(Err(anyhow!("project does not have a remote id")))
2981 }
2982 }
2983
2984 pub fn hover<T: ToPointUtf16>(
2985 &self,
2986 buffer: &ModelHandle<Buffer>,
2987 position: T,
2988 cx: &mut ModelContext<Self>,
2989 ) -> Task<Result<Option<Hover>>> {
2990 let position = position.to_point_utf16(buffer.read(cx));
2991 self.request_lsp(buffer.clone(), GetHover { position }, cx)
2992 }
2993
2994 pub fn completions<T: ToPointUtf16>(
2995 &self,
2996 source_buffer_handle: &ModelHandle<Buffer>,
2997 position: T,
2998 cx: &mut ModelContext<Self>,
2999 ) -> Task<Result<Vec<Completion>>> {
3000 let source_buffer_handle = source_buffer_handle.clone();
3001 let source_buffer = source_buffer_handle.read(cx);
3002 let buffer_id = source_buffer.remote_id();
3003 let language = source_buffer.language().cloned();
3004 let worktree;
3005 let buffer_abs_path;
3006 if let Some(file) = File::from_dyn(source_buffer.file()) {
3007 worktree = file.worktree.clone();
3008 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
3009 } else {
3010 return Task::ready(Ok(Default::default()));
3011 };
3012
3013 let position = position.to_point_utf16(source_buffer);
3014 let anchor = source_buffer.anchor_after(position);
3015
3016 if worktree.read(cx).as_local().is_some() {
3017 let buffer_abs_path = buffer_abs_path.unwrap();
3018 let (_, lang_server) =
3019 if let Some(server) = self.language_server_for_buffer(source_buffer, cx) {
3020 server.clone()
3021 } else {
3022 return Task::ready(Ok(Default::default()));
3023 };
3024
3025 cx.spawn(|_, cx| async move {
3026 let completions = lang_server
3027 .request::<lsp::request::Completion>(lsp::CompletionParams {
3028 text_document_position: lsp::TextDocumentPositionParams::new(
3029 lsp::TextDocumentIdentifier::new(
3030 lsp::Url::from_file_path(buffer_abs_path).unwrap(),
3031 ),
3032 point_to_lsp(position),
3033 ),
3034 context: Default::default(),
3035 work_done_progress_params: Default::default(),
3036 partial_result_params: Default::default(),
3037 })
3038 .await
3039 .context("lsp completion request failed")?;
3040
3041 let completions = if let Some(completions) = completions {
3042 match completions {
3043 lsp::CompletionResponse::Array(completions) => completions,
3044 lsp::CompletionResponse::List(list) => list.items,
3045 }
3046 } else {
3047 Default::default()
3048 };
3049
3050 source_buffer_handle.read_with(&cx, |this, _| {
3051 let snapshot = this.snapshot();
3052 let clipped_position = this.clip_point_utf16(position, Bias::Left);
3053 let mut range_for_token = None;
3054 Ok(completions
3055 .into_iter()
3056 .filter_map(|lsp_completion| {
3057 let (old_range, new_text) = match lsp_completion.text_edit.as_ref() {
3058 // If the language server provides a range to overwrite, then
3059 // check that the range is valid.
3060 Some(lsp::CompletionTextEdit::Edit(edit)) => {
3061 let range = range_from_lsp(edit.range);
3062 let start = snapshot.clip_point_utf16(range.start, Bias::Left);
3063 let end = snapshot.clip_point_utf16(range.end, Bias::Left);
3064 if start != range.start || end != range.end {
3065 log::info!("completion out of expected range");
3066 return None;
3067 }
3068 (
3069 snapshot.anchor_before(start)..snapshot.anchor_after(end),
3070 edit.new_text.clone(),
3071 )
3072 }
3073 // If the language server does not provide a range, then infer
3074 // the range based on the syntax tree.
3075 None => {
3076 if position != clipped_position {
3077 log::info!("completion out of expected range");
3078 return None;
3079 }
3080 let Range { start, end } = range_for_token
3081 .get_or_insert_with(|| {
3082 let offset = position.to_offset(&snapshot);
3083 snapshot
3084 .range_for_word_token_at(offset)
3085 .unwrap_or_else(|| offset..offset)
3086 })
3087 .clone();
3088 let text = lsp_completion
3089 .insert_text
3090 .as_ref()
3091 .unwrap_or(&lsp_completion.label)
3092 .clone();
3093 (
3094 snapshot.anchor_before(start)..snapshot.anchor_after(end),
3095 text.clone(),
3096 )
3097 }
3098 Some(lsp::CompletionTextEdit::InsertAndReplace(_)) => {
3099 log::info!("unsupported insert/replace completion");
3100 return None;
3101 }
3102 };
3103
3104 Some(Completion {
3105 old_range,
3106 new_text,
3107 label: language
3108 .as_ref()
3109 .and_then(|l| l.label_for_completion(&lsp_completion))
3110 .unwrap_or_else(|| {
3111 CodeLabel::plain(
3112 lsp_completion.label.clone(),
3113 lsp_completion.filter_text.as_deref(),
3114 )
3115 }),
3116 lsp_completion,
3117 })
3118 })
3119 .collect())
3120 })
3121 })
3122 } else if let Some(project_id) = self.remote_id() {
3123 let rpc = self.client.clone();
3124 let message = proto::GetCompletions {
3125 project_id,
3126 buffer_id,
3127 position: Some(language::proto::serialize_anchor(&anchor)),
3128 version: serialize_version(&source_buffer.version()),
3129 };
3130 cx.spawn_weak(|_, mut cx| async move {
3131 let response = rpc.request(message).await?;
3132
3133 source_buffer_handle
3134 .update(&mut cx, |buffer, _| {
3135 buffer.wait_for_version(deserialize_version(response.version))
3136 })
3137 .await;
3138
3139 response
3140 .completions
3141 .into_iter()
3142 .map(|completion| {
3143 language::proto::deserialize_completion(completion, language.as_ref())
3144 })
3145 .collect()
3146 })
3147 } else {
3148 Task::ready(Ok(Default::default()))
3149 }
3150 }
3151
3152 pub fn apply_additional_edits_for_completion(
3153 &self,
3154 buffer_handle: ModelHandle<Buffer>,
3155 completion: Completion,
3156 push_to_history: bool,
3157 cx: &mut ModelContext<Self>,
3158 ) -> Task<Result<Option<Transaction>>> {
3159 let buffer = buffer_handle.read(cx);
3160 let buffer_id = buffer.remote_id();
3161
3162 if self.is_local() {
3163 let (_, lang_server) = if let Some(server) = self.language_server_for_buffer(buffer, cx)
3164 {
3165 server.clone()
3166 } else {
3167 return Task::ready(Ok(Default::default()));
3168 };
3169
3170 cx.spawn(|this, mut cx| async move {
3171 let resolved_completion = lang_server
3172 .request::<lsp::request::ResolveCompletionItem>(completion.lsp_completion)
3173 .await?;
3174 if let Some(edits) = resolved_completion.additional_text_edits {
3175 let edits = this
3176 .update(&mut cx, |this, cx| {
3177 this.edits_from_lsp(&buffer_handle, edits, None, cx)
3178 })
3179 .await?;
3180 buffer_handle.update(&mut cx, |buffer, cx| {
3181 buffer.finalize_last_transaction();
3182 buffer.start_transaction();
3183 for (range, text) in edits {
3184 buffer.edit([(range, text)], cx);
3185 }
3186 let transaction = if buffer.end_transaction(cx).is_some() {
3187 let transaction = buffer.finalize_last_transaction().unwrap().clone();
3188 if !push_to_history {
3189 buffer.forget_transaction(transaction.id);
3190 }
3191 Some(transaction)
3192 } else {
3193 None
3194 };
3195 Ok(transaction)
3196 })
3197 } else {
3198 Ok(None)
3199 }
3200 })
3201 } else if let Some(project_id) = self.remote_id() {
3202 let client = self.client.clone();
3203 cx.spawn(|_, mut cx| async move {
3204 let response = client
3205 .request(proto::ApplyCompletionAdditionalEdits {
3206 project_id,
3207 buffer_id,
3208 completion: Some(language::proto::serialize_completion(&completion)),
3209 })
3210 .await?;
3211
3212 if let Some(transaction) = response.transaction {
3213 let transaction = language::proto::deserialize_transaction(transaction)?;
3214 buffer_handle
3215 .update(&mut cx, |buffer, _| {
3216 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
3217 })
3218 .await;
3219 if push_to_history {
3220 buffer_handle.update(&mut cx, |buffer, _| {
3221 buffer.push_transaction(transaction.clone(), Instant::now());
3222 });
3223 }
3224 Ok(Some(transaction))
3225 } else {
3226 Ok(None)
3227 }
3228 })
3229 } else {
3230 Task::ready(Err(anyhow!("project does not have a remote id")))
3231 }
3232 }
3233
3234 pub fn code_actions<T: Clone + ToOffset>(
3235 &self,
3236 buffer_handle: &ModelHandle<Buffer>,
3237 range: Range<T>,
3238 cx: &mut ModelContext<Self>,
3239 ) -> Task<Result<Vec<CodeAction>>> {
3240 let buffer_handle = buffer_handle.clone();
3241 let buffer = buffer_handle.read(cx);
3242 let snapshot = buffer.snapshot();
3243 let relevant_diagnostics = snapshot
3244 .diagnostics_in_range::<usize, usize>(range.to_offset(&snapshot), false)
3245 .map(|entry| entry.to_lsp_diagnostic_stub())
3246 .collect();
3247 let buffer_id = buffer.remote_id();
3248 let worktree;
3249 let buffer_abs_path;
3250 if let Some(file) = File::from_dyn(buffer.file()) {
3251 worktree = file.worktree.clone();
3252 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
3253 } else {
3254 return Task::ready(Ok(Default::default()));
3255 };
3256 let range = buffer.anchor_before(range.start)..buffer.anchor_before(range.end);
3257
3258 if worktree.read(cx).as_local().is_some() {
3259 let buffer_abs_path = buffer_abs_path.unwrap();
3260 let (_, lang_server) = if let Some(server) = self.language_server_for_buffer(buffer, cx)
3261 {
3262 server.clone()
3263 } else {
3264 return Task::ready(Ok(Default::default()));
3265 };
3266
3267 let lsp_range = range_to_lsp(range.to_point_utf16(buffer));
3268 cx.foreground().spawn(async move {
3269 if !lang_server.capabilities().code_action_provider.is_some() {
3270 return Ok(Default::default());
3271 }
3272
3273 Ok(lang_server
3274 .request::<lsp::request::CodeActionRequest>(lsp::CodeActionParams {
3275 text_document: lsp::TextDocumentIdentifier::new(
3276 lsp::Url::from_file_path(buffer_abs_path).unwrap(),
3277 ),
3278 range: lsp_range,
3279 work_done_progress_params: Default::default(),
3280 partial_result_params: Default::default(),
3281 context: lsp::CodeActionContext {
3282 diagnostics: relevant_diagnostics,
3283 only: Some(vec![
3284 lsp::CodeActionKind::QUICKFIX,
3285 lsp::CodeActionKind::REFACTOR,
3286 lsp::CodeActionKind::REFACTOR_EXTRACT,
3287 lsp::CodeActionKind::SOURCE,
3288 ]),
3289 },
3290 })
3291 .await?
3292 .unwrap_or_default()
3293 .into_iter()
3294 .filter_map(|entry| {
3295 if let lsp::CodeActionOrCommand::CodeAction(lsp_action) = entry {
3296 Some(CodeAction {
3297 range: range.clone(),
3298 lsp_action,
3299 })
3300 } else {
3301 None
3302 }
3303 })
3304 .collect())
3305 })
3306 } else if let Some(project_id) = self.remote_id() {
3307 let rpc = self.client.clone();
3308 let version = buffer.version();
3309 cx.spawn_weak(|_, mut cx| async move {
3310 let response = rpc
3311 .request(proto::GetCodeActions {
3312 project_id,
3313 buffer_id,
3314 start: Some(language::proto::serialize_anchor(&range.start)),
3315 end: Some(language::proto::serialize_anchor(&range.end)),
3316 version: serialize_version(&version),
3317 })
3318 .await?;
3319
3320 buffer_handle
3321 .update(&mut cx, |buffer, _| {
3322 buffer.wait_for_version(deserialize_version(response.version))
3323 })
3324 .await;
3325
3326 response
3327 .actions
3328 .into_iter()
3329 .map(language::proto::deserialize_code_action)
3330 .collect()
3331 })
3332 } else {
3333 Task::ready(Ok(Default::default()))
3334 }
3335 }
3336
3337 pub fn apply_code_action(
3338 &self,
3339 buffer_handle: ModelHandle<Buffer>,
3340 mut action: CodeAction,
3341 push_to_history: bool,
3342 cx: &mut ModelContext<Self>,
3343 ) -> Task<Result<ProjectTransaction>> {
3344 if self.is_local() {
3345 let buffer = buffer_handle.read(cx);
3346 let (lsp_adapter, lang_server) =
3347 if let Some(server) = self.language_server_for_buffer(buffer, cx) {
3348 server.clone()
3349 } else {
3350 return Task::ready(Ok(Default::default()));
3351 };
3352 let range = action.range.to_point_utf16(buffer);
3353
3354 cx.spawn(|this, mut cx| async move {
3355 if let Some(lsp_range) = action
3356 .lsp_action
3357 .data
3358 .as_mut()
3359 .and_then(|d| d.get_mut("codeActionParams"))
3360 .and_then(|d| d.get_mut("range"))
3361 {
3362 *lsp_range = serde_json::to_value(&range_to_lsp(range)).unwrap();
3363 action.lsp_action = lang_server
3364 .request::<lsp::request::CodeActionResolveRequest>(action.lsp_action)
3365 .await?;
3366 } else {
3367 let actions = this
3368 .update(&mut cx, |this, cx| {
3369 this.code_actions(&buffer_handle, action.range, cx)
3370 })
3371 .await?;
3372 action.lsp_action = actions
3373 .into_iter()
3374 .find(|a| a.lsp_action.title == action.lsp_action.title)
3375 .ok_or_else(|| anyhow!("code action is outdated"))?
3376 .lsp_action;
3377 }
3378
3379 if let Some(edit) = action.lsp_action.edit {
3380 Self::deserialize_workspace_edit(
3381 this,
3382 edit,
3383 push_to_history,
3384 lsp_adapter,
3385 lang_server,
3386 &mut cx,
3387 )
3388 .await
3389 } else if let Some(command) = action.lsp_action.command {
3390 this.update(&mut cx, |this, _| {
3391 this.last_workspace_edits_by_language_server
3392 .remove(&lang_server.server_id());
3393 });
3394 lang_server
3395 .request::<lsp::request::ExecuteCommand>(lsp::ExecuteCommandParams {
3396 command: command.command,
3397 arguments: command.arguments.unwrap_or_default(),
3398 ..Default::default()
3399 })
3400 .await?;
3401 Ok(this.update(&mut cx, |this, _| {
3402 this.last_workspace_edits_by_language_server
3403 .remove(&lang_server.server_id())
3404 .unwrap_or_default()
3405 }))
3406 } else {
3407 Ok(ProjectTransaction::default())
3408 }
3409 })
3410 } else if let Some(project_id) = self.remote_id() {
3411 let client = self.client.clone();
3412 let request = proto::ApplyCodeAction {
3413 project_id,
3414 buffer_id: buffer_handle.read(cx).remote_id(),
3415 action: Some(language::proto::serialize_code_action(&action)),
3416 };
3417 cx.spawn(|this, mut cx| async move {
3418 let response = client
3419 .request(request)
3420 .await?
3421 .transaction
3422 .ok_or_else(|| anyhow!("missing transaction"))?;
3423 this.update(&mut cx, |this, cx| {
3424 this.deserialize_project_transaction(response, push_to_history, cx)
3425 })
3426 .await
3427 })
3428 } else {
3429 Task::ready(Err(anyhow!("project does not have a remote id")))
3430 }
3431 }
3432
3433 async fn deserialize_workspace_edit(
3434 this: ModelHandle<Self>,
3435 edit: lsp::WorkspaceEdit,
3436 push_to_history: bool,
3437 lsp_adapter: Arc<dyn LspAdapter>,
3438 language_server: Arc<LanguageServer>,
3439 cx: &mut AsyncAppContext,
3440 ) -> Result<ProjectTransaction> {
3441 let fs = this.read_with(cx, |this, _| this.fs.clone());
3442 let mut operations = Vec::new();
3443 if let Some(document_changes) = edit.document_changes {
3444 match document_changes {
3445 lsp::DocumentChanges::Edits(edits) => {
3446 operations.extend(edits.into_iter().map(lsp::DocumentChangeOperation::Edit))
3447 }
3448 lsp::DocumentChanges::Operations(ops) => operations = ops,
3449 }
3450 } else if let Some(changes) = edit.changes {
3451 operations.extend(changes.into_iter().map(|(uri, edits)| {
3452 lsp::DocumentChangeOperation::Edit(lsp::TextDocumentEdit {
3453 text_document: lsp::OptionalVersionedTextDocumentIdentifier {
3454 uri,
3455 version: None,
3456 },
3457 edits: edits.into_iter().map(lsp::OneOf::Left).collect(),
3458 })
3459 }));
3460 }
3461
3462 let mut project_transaction = ProjectTransaction::default();
3463 for operation in operations {
3464 match operation {
3465 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Create(op)) => {
3466 let abs_path = op
3467 .uri
3468 .to_file_path()
3469 .map_err(|_| anyhow!("can't convert URI to path"))?;
3470
3471 if let Some(parent_path) = abs_path.parent() {
3472 fs.create_dir(parent_path).await?;
3473 }
3474 if abs_path.ends_with("/") {
3475 fs.create_dir(&abs_path).await?;
3476 } else {
3477 fs.create_file(&abs_path, op.options.map(Into::into).unwrap_or_default())
3478 .await?;
3479 }
3480 }
3481 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Rename(op)) => {
3482 let source_abs_path = op
3483 .old_uri
3484 .to_file_path()
3485 .map_err(|_| anyhow!("can't convert URI to path"))?;
3486 let target_abs_path = op
3487 .new_uri
3488 .to_file_path()
3489 .map_err(|_| anyhow!("can't convert URI to path"))?;
3490 fs.rename(
3491 &source_abs_path,
3492 &target_abs_path,
3493 op.options.map(Into::into).unwrap_or_default(),
3494 )
3495 .await?;
3496 }
3497 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Delete(op)) => {
3498 let abs_path = op
3499 .uri
3500 .to_file_path()
3501 .map_err(|_| anyhow!("can't convert URI to path"))?;
3502 let options = op.options.map(Into::into).unwrap_or_default();
3503 if abs_path.ends_with("/") {
3504 fs.remove_dir(&abs_path, options).await?;
3505 } else {
3506 fs.remove_file(&abs_path, options).await?;
3507 }
3508 }
3509 lsp::DocumentChangeOperation::Edit(op) => {
3510 let buffer_to_edit = this
3511 .update(cx, |this, cx| {
3512 this.open_local_buffer_via_lsp(
3513 op.text_document.uri,
3514 lsp_adapter.clone(),
3515 language_server.clone(),
3516 cx,
3517 )
3518 })
3519 .await?;
3520
3521 let edits = this
3522 .update(cx, |this, cx| {
3523 let edits = op.edits.into_iter().map(|edit| match edit {
3524 lsp::OneOf::Left(edit) => edit,
3525 lsp::OneOf::Right(edit) => edit.text_edit,
3526 });
3527 this.edits_from_lsp(
3528 &buffer_to_edit,
3529 edits,
3530 op.text_document.version,
3531 cx,
3532 )
3533 })
3534 .await?;
3535
3536 let transaction = buffer_to_edit.update(cx, |buffer, cx| {
3537 buffer.finalize_last_transaction();
3538 buffer.start_transaction();
3539 for (range, text) in edits {
3540 buffer.edit([(range, text)], cx);
3541 }
3542 let transaction = if buffer.end_transaction(cx).is_some() {
3543 let transaction = buffer.finalize_last_transaction().unwrap().clone();
3544 if !push_to_history {
3545 buffer.forget_transaction(transaction.id);
3546 }
3547 Some(transaction)
3548 } else {
3549 None
3550 };
3551
3552 transaction
3553 });
3554 if let Some(transaction) = transaction {
3555 project_transaction.0.insert(buffer_to_edit, transaction);
3556 }
3557 }
3558 }
3559 }
3560
3561 Ok(project_transaction)
3562 }
3563
3564 pub fn prepare_rename<T: ToPointUtf16>(
3565 &self,
3566 buffer: ModelHandle<Buffer>,
3567 position: T,
3568 cx: &mut ModelContext<Self>,
3569 ) -> Task<Result<Option<Range<Anchor>>>> {
3570 let position = position.to_point_utf16(buffer.read(cx));
3571 self.request_lsp(buffer, PrepareRename { position }, cx)
3572 }
3573
3574 pub fn perform_rename<T: ToPointUtf16>(
3575 &self,
3576 buffer: ModelHandle<Buffer>,
3577 position: T,
3578 new_name: String,
3579 push_to_history: bool,
3580 cx: &mut ModelContext<Self>,
3581 ) -> Task<Result<ProjectTransaction>> {
3582 let position = position.to_point_utf16(buffer.read(cx));
3583 self.request_lsp(
3584 buffer,
3585 PerformRename {
3586 position,
3587 new_name,
3588 push_to_history,
3589 },
3590 cx,
3591 )
3592 }
3593
3594 pub fn search(
3595 &self,
3596 query: SearchQuery,
3597 cx: &mut ModelContext<Self>,
3598 ) -> Task<Result<HashMap<ModelHandle<Buffer>, Vec<Range<Anchor>>>>> {
3599 if self.is_local() {
3600 let snapshots = self
3601 .visible_worktrees(cx)
3602 .filter_map(|tree| {
3603 let tree = tree.read(cx).as_local()?;
3604 Some(tree.snapshot())
3605 })
3606 .collect::<Vec<_>>();
3607
3608 let background = cx.background().clone();
3609 let path_count: usize = snapshots.iter().map(|s| s.visible_file_count()).sum();
3610 if path_count == 0 {
3611 return Task::ready(Ok(Default::default()));
3612 }
3613 let workers = background.num_cpus().min(path_count);
3614 let (matching_paths_tx, mut matching_paths_rx) = smol::channel::bounded(1024);
3615 cx.background()
3616 .spawn({
3617 let fs = self.fs.clone();
3618 let background = cx.background().clone();
3619 let query = query.clone();
3620 async move {
3621 let fs = &fs;
3622 let query = &query;
3623 let matching_paths_tx = &matching_paths_tx;
3624 let paths_per_worker = (path_count + workers - 1) / workers;
3625 let snapshots = &snapshots;
3626 background
3627 .scoped(|scope| {
3628 for worker_ix in 0..workers {
3629 let worker_start_ix = worker_ix * paths_per_worker;
3630 let worker_end_ix = worker_start_ix + paths_per_worker;
3631 scope.spawn(async move {
3632 let mut snapshot_start_ix = 0;
3633 let mut abs_path = PathBuf::new();
3634 for snapshot in snapshots {
3635 let snapshot_end_ix =
3636 snapshot_start_ix + snapshot.visible_file_count();
3637 if worker_end_ix <= snapshot_start_ix {
3638 break;
3639 } else if worker_start_ix > snapshot_end_ix {
3640 snapshot_start_ix = snapshot_end_ix;
3641 continue;
3642 } else {
3643 let start_in_snapshot = worker_start_ix
3644 .saturating_sub(snapshot_start_ix);
3645 let end_in_snapshot =
3646 cmp::min(worker_end_ix, snapshot_end_ix)
3647 - snapshot_start_ix;
3648
3649 for entry in snapshot
3650 .files(false, start_in_snapshot)
3651 .take(end_in_snapshot - start_in_snapshot)
3652 {
3653 if matching_paths_tx.is_closed() {
3654 break;
3655 }
3656
3657 abs_path.clear();
3658 abs_path.push(&snapshot.abs_path());
3659 abs_path.push(&entry.path);
3660 let matches = if let Some(file) =
3661 fs.open_sync(&abs_path).await.log_err()
3662 {
3663 query.detect(file).unwrap_or(false)
3664 } else {
3665 false
3666 };
3667
3668 if matches {
3669 let project_path =
3670 (snapshot.id(), entry.path.clone());
3671 if matching_paths_tx
3672 .send(project_path)
3673 .await
3674 .is_err()
3675 {
3676 break;
3677 }
3678 }
3679 }
3680
3681 snapshot_start_ix = snapshot_end_ix;
3682 }
3683 }
3684 });
3685 }
3686 })
3687 .await;
3688 }
3689 })
3690 .detach();
3691
3692 let (buffers_tx, buffers_rx) = smol::channel::bounded(1024);
3693 let open_buffers = self
3694 .opened_buffers
3695 .values()
3696 .filter_map(|b| b.upgrade(cx))
3697 .collect::<HashSet<_>>();
3698 cx.spawn(|this, cx| async move {
3699 for buffer in &open_buffers {
3700 let snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
3701 buffers_tx.send((buffer.clone(), snapshot)).await?;
3702 }
3703
3704 let open_buffers = Rc::new(RefCell::new(open_buffers));
3705 while let Some(project_path) = matching_paths_rx.next().await {
3706 if buffers_tx.is_closed() {
3707 break;
3708 }
3709
3710 let this = this.clone();
3711 let open_buffers = open_buffers.clone();
3712 let buffers_tx = buffers_tx.clone();
3713 cx.spawn(|mut cx| async move {
3714 if let Some(buffer) = this
3715 .update(&mut cx, |this, cx| this.open_buffer(project_path, cx))
3716 .await
3717 .log_err()
3718 {
3719 if open_buffers.borrow_mut().insert(buffer.clone()) {
3720 let snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
3721 buffers_tx.send((buffer, snapshot)).await?;
3722 }
3723 }
3724
3725 Ok::<_, anyhow::Error>(())
3726 })
3727 .detach();
3728 }
3729
3730 Ok::<_, anyhow::Error>(())
3731 })
3732 .detach_and_log_err(cx);
3733
3734 let background = cx.background().clone();
3735 cx.background().spawn(async move {
3736 let query = &query;
3737 let mut matched_buffers = Vec::new();
3738 for _ in 0..workers {
3739 matched_buffers.push(HashMap::default());
3740 }
3741 background
3742 .scoped(|scope| {
3743 for worker_matched_buffers in matched_buffers.iter_mut() {
3744 let mut buffers_rx = buffers_rx.clone();
3745 scope.spawn(async move {
3746 while let Some((buffer, snapshot)) = buffers_rx.next().await {
3747 let buffer_matches = query
3748 .search(snapshot.as_rope())
3749 .await
3750 .iter()
3751 .map(|range| {
3752 snapshot.anchor_before(range.start)
3753 ..snapshot.anchor_after(range.end)
3754 })
3755 .collect::<Vec<_>>();
3756 if !buffer_matches.is_empty() {
3757 worker_matched_buffers
3758 .insert(buffer.clone(), buffer_matches);
3759 }
3760 }
3761 });
3762 }
3763 })
3764 .await;
3765 Ok(matched_buffers.into_iter().flatten().collect())
3766 })
3767 } else if let Some(project_id) = self.remote_id() {
3768 let request = self.client.request(query.to_proto(project_id));
3769 cx.spawn(|this, mut cx| async move {
3770 let response = request.await?;
3771 let mut result = HashMap::default();
3772 for location in response.locations {
3773 let buffer = location.buffer.ok_or_else(|| anyhow!("missing buffer"))?;
3774 let target_buffer = this
3775 .update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
3776 .await?;
3777 let start = location
3778 .start
3779 .and_then(deserialize_anchor)
3780 .ok_or_else(|| anyhow!("missing target start"))?;
3781 let end = location
3782 .end
3783 .and_then(deserialize_anchor)
3784 .ok_or_else(|| anyhow!("missing target end"))?;
3785 result
3786 .entry(target_buffer)
3787 .or_insert(Vec::new())
3788 .push(start..end)
3789 }
3790 Ok(result)
3791 })
3792 } else {
3793 Task::ready(Ok(Default::default()))
3794 }
3795 }
3796
3797 fn request_lsp<R: LspCommand>(
3798 &self,
3799 buffer_handle: ModelHandle<Buffer>,
3800 request: R,
3801 cx: &mut ModelContext<Self>,
3802 ) -> Task<Result<R::Response>>
3803 where
3804 <R::LspRequest as lsp::request::Request>::Result: Send,
3805 {
3806 let buffer = buffer_handle.read(cx);
3807 if self.is_local() {
3808 let file = File::from_dyn(buffer.file()).and_then(File::as_local);
3809 if let Some((file, (_, language_server))) =
3810 file.zip(self.language_server_for_buffer(buffer, cx).cloned())
3811 {
3812 let lsp_params = request.to_lsp(&file.abs_path(cx), cx);
3813 return cx.spawn(|this, cx| async move {
3814 if !request.check_capabilities(&language_server.capabilities()) {
3815 return Ok(Default::default());
3816 }
3817
3818 let response = language_server
3819 .request::<R::LspRequest>(lsp_params)
3820 .await
3821 .context("lsp request failed")?;
3822 request
3823 .response_from_lsp(response, this, buffer_handle, cx)
3824 .await
3825 });
3826 }
3827 } else if let Some(project_id) = self.remote_id() {
3828 let rpc = self.client.clone();
3829 let message = request.to_proto(project_id, buffer);
3830 return cx.spawn(|this, cx| async move {
3831 let response = rpc.request(message).await?;
3832 request
3833 .response_from_proto(response, this, buffer_handle, cx)
3834 .await
3835 });
3836 }
3837 Task::ready(Ok(Default::default()))
3838 }
3839
3840 pub fn find_or_create_local_worktree(
3841 &mut self,
3842 abs_path: impl AsRef<Path>,
3843 visible: bool,
3844 cx: &mut ModelContext<Self>,
3845 ) -> Task<Result<(ModelHandle<Worktree>, PathBuf)>> {
3846 let abs_path = abs_path.as_ref();
3847 if let Some((tree, relative_path)) = self.find_local_worktree(abs_path, cx) {
3848 Task::ready(Ok((tree.clone(), relative_path.into())))
3849 } else {
3850 let worktree = self.create_local_worktree(abs_path, visible, cx);
3851 cx.foreground()
3852 .spawn(async move { Ok((worktree.await?, PathBuf::new())) })
3853 }
3854 }
3855
3856 pub fn find_local_worktree(
3857 &self,
3858 abs_path: &Path,
3859 cx: &AppContext,
3860 ) -> Option<(ModelHandle<Worktree>, PathBuf)> {
3861 for tree in self.worktrees(cx) {
3862 if let Some(relative_path) = tree
3863 .read(cx)
3864 .as_local()
3865 .and_then(|t| abs_path.strip_prefix(t.abs_path()).ok())
3866 {
3867 return Some((tree.clone(), relative_path.into()));
3868 }
3869 }
3870 None
3871 }
3872
3873 pub fn is_shared(&self) -> bool {
3874 match &self.client_state {
3875 ProjectClientState::Local { is_shared, .. } => *is_shared,
3876 ProjectClientState::Remote { .. } => false,
3877 }
3878 }
3879
3880 fn create_local_worktree(
3881 &mut self,
3882 abs_path: impl AsRef<Path>,
3883 visible: bool,
3884 cx: &mut ModelContext<Self>,
3885 ) -> Task<Result<ModelHandle<Worktree>>> {
3886 let fs = self.fs.clone();
3887 let client = self.client.clone();
3888 let next_entry_id = self.next_entry_id.clone();
3889 let path: Arc<Path> = abs_path.as_ref().into();
3890 let task = self
3891 .loading_local_worktrees
3892 .entry(path.clone())
3893 .or_insert_with(|| {
3894 cx.spawn(|project, mut cx| {
3895 async move {
3896 let worktree = Worktree::local(
3897 client.clone(),
3898 path.clone(),
3899 visible,
3900 fs,
3901 next_entry_id,
3902 &mut cx,
3903 )
3904 .await;
3905 project.update(&mut cx, |project, _| {
3906 project.loading_local_worktrees.remove(&path);
3907 });
3908 let worktree = worktree?;
3909
3910 let project_id = project.update(&mut cx, |project, cx| {
3911 project.add_worktree(&worktree, cx);
3912 project.shared_remote_id()
3913 });
3914
3915 if let Some(project_id) = project_id {
3916 worktree
3917 .update(&mut cx, |worktree, cx| {
3918 worktree.as_local_mut().unwrap().share(project_id, cx)
3919 })
3920 .await
3921 .log_err();
3922 }
3923
3924 Ok(worktree)
3925 }
3926 .map_err(|err| Arc::new(err))
3927 })
3928 .shared()
3929 })
3930 .clone();
3931 cx.foreground().spawn(async move {
3932 match task.await {
3933 Ok(worktree) => Ok(worktree),
3934 Err(err) => Err(anyhow!("{}", err)),
3935 }
3936 })
3937 }
3938
3939 pub fn remove_worktree(&mut self, id_to_remove: WorktreeId, cx: &mut ModelContext<Self>) {
3940 self.worktrees.retain(|worktree| {
3941 if let Some(worktree) = worktree.upgrade(cx) {
3942 let id = worktree.read(cx).id();
3943 if id == id_to_remove {
3944 cx.emit(Event::WorktreeRemoved(id));
3945 false
3946 } else {
3947 true
3948 }
3949 } else {
3950 false
3951 }
3952 });
3953 self.metadata_changed(true, cx);
3954 cx.notify();
3955 }
3956
3957 fn add_worktree(&mut self, worktree: &ModelHandle<Worktree>, cx: &mut ModelContext<Self>) {
3958 cx.observe(&worktree, |_, _, cx| cx.notify()).detach();
3959 if worktree.read(cx).is_local() {
3960 cx.subscribe(&worktree, |this, worktree, _, cx| {
3961 this.update_local_worktree_buffers(worktree, cx);
3962 })
3963 .detach();
3964 }
3965
3966 let push_strong_handle = {
3967 let worktree = worktree.read(cx);
3968 self.is_shared() || worktree.is_visible() || worktree.is_remote()
3969 };
3970 if push_strong_handle {
3971 self.worktrees
3972 .push(WorktreeHandle::Strong(worktree.clone()));
3973 } else {
3974 cx.observe_release(&worktree, |this, _, cx| {
3975 this.worktrees
3976 .retain(|worktree| worktree.upgrade(cx).is_some());
3977 cx.notify();
3978 })
3979 .detach();
3980 self.worktrees
3981 .push(WorktreeHandle::Weak(worktree.downgrade()));
3982 }
3983 self.metadata_changed(true, cx);
3984 cx.emit(Event::WorktreeAdded);
3985 cx.notify();
3986 }
3987
3988 fn update_local_worktree_buffers(
3989 &mut self,
3990 worktree_handle: ModelHandle<Worktree>,
3991 cx: &mut ModelContext<Self>,
3992 ) {
3993 let snapshot = worktree_handle.read(cx).snapshot();
3994 let mut buffers_to_delete = Vec::new();
3995 let mut renamed_buffers = Vec::new();
3996 for (buffer_id, buffer) in &self.opened_buffers {
3997 if let Some(buffer) = buffer.upgrade(cx) {
3998 buffer.update(cx, |buffer, cx| {
3999 if let Some(old_file) = File::from_dyn(buffer.file()) {
4000 if old_file.worktree != worktree_handle {
4001 return;
4002 }
4003
4004 let new_file = if let Some(entry) = old_file
4005 .entry_id
4006 .and_then(|entry_id| snapshot.entry_for_id(entry_id))
4007 {
4008 File {
4009 is_local: true,
4010 entry_id: Some(entry.id),
4011 mtime: entry.mtime,
4012 path: entry.path.clone(),
4013 worktree: worktree_handle.clone(),
4014 }
4015 } else if let Some(entry) =
4016 snapshot.entry_for_path(old_file.path().as_ref())
4017 {
4018 File {
4019 is_local: true,
4020 entry_id: Some(entry.id),
4021 mtime: entry.mtime,
4022 path: entry.path.clone(),
4023 worktree: worktree_handle.clone(),
4024 }
4025 } else {
4026 File {
4027 is_local: true,
4028 entry_id: None,
4029 path: old_file.path().clone(),
4030 mtime: old_file.mtime(),
4031 worktree: worktree_handle.clone(),
4032 }
4033 };
4034
4035 let old_path = old_file.abs_path(cx);
4036 if new_file.abs_path(cx) != old_path {
4037 renamed_buffers.push((cx.handle(), old_path));
4038 }
4039
4040 if let Some(project_id) = self.shared_remote_id() {
4041 self.client
4042 .send(proto::UpdateBufferFile {
4043 project_id,
4044 buffer_id: *buffer_id as u64,
4045 file: Some(new_file.to_proto()),
4046 })
4047 .log_err();
4048 }
4049 buffer.file_updated(Box::new(new_file), cx).detach();
4050 }
4051 });
4052 } else {
4053 buffers_to_delete.push(*buffer_id);
4054 }
4055 }
4056
4057 for buffer_id in buffers_to_delete {
4058 self.opened_buffers.remove(&buffer_id);
4059 }
4060
4061 for (buffer, old_path) in renamed_buffers {
4062 self.unregister_buffer_from_language_server(&buffer, old_path, cx);
4063 self.assign_language_to_buffer(&buffer, cx);
4064 self.register_buffer_with_language_server(&buffer, cx);
4065 }
4066 }
4067
4068 pub fn set_active_path(&mut self, entry: Option<ProjectPath>, cx: &mut ModelContext<Self>) {
4069 let new_active_entry = entry.and_then(|project_path| {
4070 let worktree = self.worktree_for_id(project_path.worktree_id, cx)?;
4071 let entry = worktree.read(cx).entry_for_path(project_path.path)?;
4072 Some(entry.id)
4073 });
4074 if new_active_entry != self.active_entry {
4075 self.active_entry = new_active_entry;
4076 cx.emit(Event::ActiveEntryChanged(new_active_entry));
4077 }
4078 }
4079
4080 pub fn language_servers_running_disk_based_diagnostics<'a>(
4081 &'a self,
4082 ) -> impl 'a + Iterator<Item = usize> {
4083 self.language_server_statuses
4084 .iter()
4085 .filter_map(|(id, status)| {
4086 if status.pending_diagnostic_updates > 0 {
4087 Some(*id)
4088 } else {
4089 None
4090 }
4091 })
4092 }
4093
4094 pub fn diagnostic_summary(&self, cx: &AppContext) -> DiagnosticSummary {
4095 let mut summary = DiagnosticSummary::default();
4096 for (_, path_summary) in self.diagnostic_summaries(cx) {
4097 summary.error_count += path_summary.error_count;
4098 summary.warning_count += path_summary.warning_count;
4099 }
4100 summary
4101 }
4102
4103 pub fn diagnostic_summaries<'a>(
4104 &'a self,
4105 cx: &'a AppContext,
4106 ) -> impl Iterator<Item = (ProjectPath, DiagnosticSummary)> + 'a {
4107 self.worktrees(cx).flat_map(move |worktree| {
4108 let worktree = worktree.read(cx);
4109 let worktree_id = worktree.id();
4110 worktree
4111 .diagnostic_summaries()
4112 .map(move |(path, summary)| (ProjectPath { worktree_id, path }, summary))
4113 })
4114 }
4115
4116 pub fn disk_based_diagnostics_started(
4117 &mut self,
4118 language_server_id: usize,
4119 cx: &mut ModelContext<Self>,
4120 ) {
4121 cx.emit(Event::DiskBasedDiagnosticsStarted { language_server_id });
4122 }
4123
4124 pub fn disk_based_diagnostics_finished(
4125 &mut self,
4126 language_server_id: usize,
4127 cx: &mut ModelContext<Self>,
4128 ) {
4129 cx.emit(Event::DiskBasedDiagnosticsFinished { language_server_id });
4130 }
4131
4132 pub fn active_entry(&self) -> Option<ProjectEntryId> {
4133 self.active_entry
4134 }
4135
4136 pub fn entry_for_path(&self, path: &ProjectPath, cx: &AppContext) -> Option<ProjectEntryId> {
4137 self.worktree_for_id(path.worktree_id, cx)?
4138 .read(cx)
4139 .entry_for_path(&path.path)
4140 .map(|entry| entry.id)
4141 }
4142
4143 pub fn path_for_entry(&self, entry_id: ProjectEntryId, cx: &AppContext) -> Option<ProjectPath> {
4144 let worktree = self.worktree_for_entry(entry_id, cx)?;
4145 let worktree = worktree.read(cx);
4146 let worktree_id = worktree.id();
4147 let path = worktree.entry_for_id(entry_id)?.path.clone();
4148 Some(ProjectPath { worktree_id, path })
4149 }
4150
4151 // RPC message handlers
4152
4153 async fn handle_request_join_project(
4154 this: ModelHandle<Self>,
4155 message: TypedEnvelope<proto::RequestJoinProject>,
4156 _: Arc<Client>,
4157 mut cx: AsyncAppContext,
4158 ) -> Result<()> {
4159 let user_id = message.payload.requester_id;
4160 if this.read_with(&cx, |project, _| {
4161 project.collaborators.values().any(|c| c.user.id == user_id)
4162 }) {
4163 this.update(&mut cx, |this, cx| {
4164 this.respond_to_join_request(user_id, true, cx)
4165 });
4166 } else {
4167 let user_store = this.read_with(&cx, |this, _| this.user_store.clone());
4168 let user = user_store
4169 .update(&mut cx, |store, cx| store.fetch_user(user_id, cx))
4170 .await?;
4171 this.update(&mut cx, |_, cx| cx.emit(Event::ContactRequestedJoin(user)));
4172 }
4173 Ok(())
4174 }
4175
4176 async fn handle_unregister_project(
4177 this: ModelHandle<Self>,
4178 _: TypedEnvelope<proto::UnregisterProject>,
4179 _: Arc<Client>,
4180 mut cx: AsyncAppContext,
4181 ) -> Result<()> {
4182 this.update(&mut cx, |this, cx| this.removed_from_project(cx));
4183 Ok(())
4184 }
4185
4186 async fn handle_project_unshared(
4187 this: ModelHandle<Self>,
4188 _: TypedEnvelope<proto::ProjectUnshared>,
4189 _: Arc<Client>,
4190 mut cx: AsyncAppContext,
4191 ) -> Result<()> {
4192 this.update(&mut cx, |this, cx| this.unshared(cx));
4193 Ok(())
4194 }
4195
4196 async fn handle_add_collaborator(
4197 this: ModelHandle<Self>,
4198 mut envelope: TypedEnvelope<proto::AddProjectCollaborator>,
4199 _: Arc<Client>,
4200 mut cx: AsyncAppContext,
4201 ) -> Result<()> {
4202 let user_store = this.read_with(&cx, |this, _| this.user_store.clone());
4203 let collaborator = envelope
4204 .payload
4205 .collaborator
4206 .take()
4207 .ok_or_else(|| anyhow!("empty collaborator"))?;
4208
4209 let collaborator = Collaborator::from_proto(collaborator, &user_store, &mut cx).await?;
4210 this.update(&mut cx, |this, cx| {
4211 this.collaborators
4212 .insert(collaborator.peer_id, collaborator);
4213 cx.notify();
4214 });
4215
4216 Ok(())
4217 }
4218
4219 async fn handle_remove_collaborator(
4220 this: ModelHandle<Self>,
4221 envelope: TypedEnvelope<proto::RemoveProjectCollaborator>,
4222 _: Arc<Client>,
4223 mut cx: AsyncAppContext,
4224 ) -> Result<()> {
4225 this.update(&mut cx, |this, cx| {
4226 let peer_id = PeerId(envelope.payload.peer_id);
4227 let replica_id = this
4228 .collaborators
4229 .remove(&peer_id)
4230 .ok_or_else(|| anyhow!("unknown peer {:?}", peer_id))?
4231 .replica_id;
4232 for (_, buffer) in &this.opened_buffers {
4233 if let Some(buffer) = buffer.upgrade(cx) {
4234 buffer.update(cx, |buffer, cx| buffer.remove_peer(replica_id, cx));
4235 }
4236 }
4237
4238 cx.emit(Event::CollaboratorLeft(peer_id));
4239 cx.notify();
4240 Ok(())
4241 })
4242 }
4243
4244 async fn handle_join_project_request_cancelled(
4245 this: ModelHandle<Self>,
4246 envelope: TypedEnvelope<proto::JoinProjectRequestCancelled>,
4247 _: Arc<Client>,
4248 mut cx: AsyncAppContext,
4249 ) -> Result<()> {
4250 let user = this
4251 .update(&mut cx, |this, cx| {
4252 this.user_store.update(cx, |user_store, cx| {
4253 user_store.fetch_user(envelope.payload.requester_id, cx)
4254 })
4255 })
4256 .await?;
4257
4258 this.update(&mut cx, |_, cx| {
4259 cx.emit(Event::ContactCancelledJoinRequest(user));
4260 });
4261
4262 Ok(())
4263 }
4264
4265 async fn handle_update_project(
4266 this: ModelHandle<Self>,
4267 envelope: TypedEnvelope<proto::UpdateProject>,
4268 client: Arc<Client>,
4269 mut cx: AsyncAppContext,
4270 ) -> Result<()> {
4271 this.update(&mut cx, |this, cx| {
4272 let replica_id = this.replica_id();
4273 let remote_id = this.remote_id().ok_or_else(|| anyhow!("invalid project"))?;
4274
4275 let mut old_worktrees_by_id = this
4276 .worktrees
4277 .drain(..)
4278 .filter_map(|worktree| {
4279 let worktree = worktree.upgrade(cx)?;
4280 Some((worktree.read(cx).id(), worktree))
4281 })
4282 .collect::<HashMap<_, _>>();
4283
4284 for worktree in envelope.payload.worktrees {
4285 if let Some(old_worktree) =
4286 old_worktrees_by_id.remove(&WorktreeId::from_proto(worktree.id))
4287 {
4288 this.worktrees.push(WorktreeHandle::Strong(old_worktree));
4289 } else {
4290 let worktree = proto::Worktree {
4291 id: worktree.id,
4292 root_name: worktree.root_name,
4293 entries: Default::default(),
4294 diagnostic_summaries: Default::default(),
4295 visible: worktree.visible,
4296 scan_id: 0,
4297 };
4298 let (worktree, load_task) =
4299 Worktree::remote(remote_id, replica_id, worktree, client.clone(), cx);
4300 this.add_worktree(&worktree, cx);
4301 load_task.detach();
4302 }
4303 }
4304
4305 this.metadata_changed(true, cx);
4306 for (id, _) in old_worktrees_by_id {
4307 cx.emit(Event::WorktreeRemoved(id));
4308 }
4309
4310 Ok(())
4311 })
4312 }
4313
4314 async fn handle_update_worktree(
4315 this: ModelHandle<Self>,
4316 envelope: TypedEnvelope<proto::UpdateWorktree>,
4317 _: Arc<Client>,
4318 mut cx: AsyncAppContext,
4319 ) -> Result<()> {
4320 this.update(&mut cx, |this, cx| {
4321 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
4322 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
4323 worktree.update(cx, |worktree, _| {
4324 let worktree = worktree.as_remote_mut().unwrap();
4325 worktree.update_from_remote(envelope)
4326 })?;
4327 }
4328 Ok(())
4329 })
4330 }
4331
4332 async fn handle_create_project_entry(
4333 this: ModelHandle<Self>,
4334 envelope: TypedEnvelope<proto::CreateProjectEntry>,
4335 _: Arc<Client>,
4336 mut cx: AsyncAppContext,
4337 ) -> Result<proto::ProjectEntryResponse> {
4338 let worktree = this.update(&mut cx, |this, cx| {
4339 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
4340 this.worktree_for_id(worktree_id, cx)
4341 .ok_or_else(|| anyhow!("worktree not found"))
4342 })?;
4343 let worktree_scan_id = worktree.read_with(&cx, |worktree, _| worktree.scan_id());
4344 let entry = worktree
4345 .update(&mut cx, |worktree, cx| {
4346 let worktree = worktree.as_local_mut().unwrap();
4347 let path = PathBuf::from(OsString::from_vec(envelope.payload.path));
4348 worktree.create_entry(path, envelope.payload.is_directory, cx)
4349 })
4350 .await?;
4351 Ok(proto::ProjectEntryResponse {
4352 entry: Some((&entry).into()),
4353 worktree_scan_id: worktree_scan_id as u64,
4354 })
4355 }
4356
4357 async fn handle_rename_project_entry(
4358 this: ModelHandle<Self>,
4359 envelope: TypedEnvelope<proto::RenameProjectEntry>,
4360 _: Arc<Client>,
4361 mut cx: AsyncAppContext,
4362 ) -> Result<proto::ProjectEntryResponse> {
4363 let entry_id = ProjectEntryId::from_proto(envelope.payload.entry_id);
4364 let worktree = this.read_with(&cx, |this, cx| {
4365 this.worktree_for_entry(entry_id, cx)
4366 .ok_or_else(|| anyhow!("worktree not found"))
4367 })?;
4368 let worktree_scan_id = worktree.read_with(&cx, |worktree, _| worktree.scan_id());
4369 let entry = worktree
4370 .update(&mut cx, |worktree, cx| {
4371 let new_path = PathBuf::from(OsString::from_vec(envelope.payload.new_path));
4372 worktree
4373 .as_local_mut()
4374 .unwrap()
4375 .rename_entry(entry_id, new_path, cx)
4376 .ok_or_else(|| anyhow!("invalid entry"))
4377 })?
4378 .await?;
4379 Ok(proto::ProjectEntryResponse {
4380 entry: Some((&entry).into()),
4381 worktree_scan_id: worktree_scan_id as u64,
4382 })
4383 }
4384
4385 async fn handle_copy_project_entry(
4386 this: ModelHandle<Self>,
4387 envelope: TypedEnvelope<proto::CopyProjectEntry>,
4388 _: Arc<Client>,
4389 mut cx: AsyncAppContext,
4390 ) -> Result<proto::ProjectEntryResponse> {
4391 let entry_id = ProjectEntryId::from_proto(envelope.payload.entry_id);
4392 let worktree = this.read_with(&cx, |this, cx| {
4393 this.worktree_for_entry(entry_id, cx)
4394 .ok_or_else(|| anyhow!("worktree not found"))
4395 })?;
4396 let worktree_scan_id = worktree.read_with(&cx, |worktree, _| worktree.scan_id());
4397 let entry = worktree
4398 .update(&mut cx, |worktree, cx| {
4399 let new_path = PathBuf::from(OsString::from_vec(envelope.payload.new_path));
4400 worktree
4401 .as_local_mut()
4402 .unwrap()
4403 .copy_entry(entry_id, new_path, cx)
4404 .ok_or_else(|| anyhow!("invalid entry"))
4405 })?
4406 .await?;
4407 Ok(proto::ProjectEntryResponse {
4408 entry: Some((&entry).into()),
4409 worktree_scan_id: worktree_scan_id as u64,
4410 })
4411 }
4412
4413 async fn handle_delete_project_entry(
4414 this: ModelHandle<Self>,
4415 envelope: TypedEnvelope<proto::DeleteProjectEntry>,
4416 _: Arc<Client>,
4417 mut cx: AsyncAppContext,
4418 ) -> Result<proto::ProjectEntryResponse> {
4419 let entry_id = ProjectEntryId::from_proto(envelope.payload.entry_id);
4420 let worktree = this.read_with(&cx, |this, cx| {
4421 this.worktree_for_entry(entry_id, cx)
4422 .ok_or_else(|| anyhow!("worktree not found"))
4423 })?;
4424 let worktree_scan_id = worktree.read_with(&cx, |worktree, _| worktree.scan_id());
4425 worktree
4426 .update(&mut cx, |worktree, cx| {
4427 worktree
4428 .as_local_mut()
4429 .unwrap()
4430 .delete_entry(entry_id, cx)
4431 .ok_or_else(|| anyhow!("invalid entry"))
4432 })?
4433 .await?;
4434 Ok(proto::ProjectEntryResponse {
4435 entry: None,
4436 worktree_scan_id: worktree_scan_id as u64,
4437 })
4438 }
4439
4440 async fn handle_update_diagnostic_summary(
4441 this: ModelHandle<Self>,
4442 envelope: TypedEnvelope<proto::UpdateDiagnosticSummary>,
4443 _: Arc<Client>,
4444 mut cx: AsyncAppContext,
4445 ) -> Result<()> {
4446 this.update(&mut cx, |this, cx| {
4447 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
4448 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
4449 if let Some(summary) = envelope.payload.summary {
4450 let project_path = ProjectPath {
4451 worktree_id,
4452 path: Path::new(&summary.path).into(),
4453 };
4454 worktree.update(cx, |worktree, _| {
4455 worktree
4456 .as_remote_mut()
4457 .unwrap()
4458 .update_diagnostic_summary(project_path.path.clone(), &summary);
4459 });
4460 cx.emit(Event::DiagnosticsUpdated {
4461 language_server_id: summary.language_server_id as usize,
4462 path: project_path,
4463 });
4464 }
4465 }
4466 Ok(())
4467 })
4468 }
4469
4470 async fn handle_start_language_server(
4471 this: ModelHandle<Self>,
4472 envelope: TypedEnvelope<proto::StartLanguageServer>,
4473 _: Arc<Client>,
4474 mut cx: AsyncAppContext,
4475 ) -> Result<()> {
4476 let server = envelope
4477 .payload
4478 .server
4479 .ok_or_else(|| anyhow!("invalid server"))?;
4480 this.update(&mut cx, |this, cx| {
4481 this.language_server_statuses.insert(
4482 server.id as usize,
4483 LanguageServerStatus {
4484 name: server.name,
4485 pending_work: Default::default(),
4486 pending_diagnostic_updates: 0,
4487 },
4488 );
4489 cx.notify();
4490 });
4491 Ok(())
4492 }
4493
4494 async fn handle_update_language_server(
4495 this: ModelHandle<Self>,
4496 envelope: TypedEnvelope<proto::UpdateLanguageServer>,
4497 _: Arc<Client>,
4498 mut cx: AsyncAppContext,
4499 ) -> Result<()> {
4500 let language_server_id = envelope.payload.language_server_id as usize;
4501 match envelope
4502 .payload
4503 .variant
4504 .ok_or_else(|| anyhow!("invalid variant"))?
4505 {
4506 proto::update_language_server::Variant::WorkStart(payload) => {
4507 this.update(&mut cx, |this, cx| {
4508 this.on_lsp_work_start(
4509 language_server_id,
4510 payload.token,
4511 LanguageServerProgress {
4512 message: payload.message,
4513 percentage: payload.percentage.map(|p| p as usize),
4514 last_update_at: Instant::now(),
4515 },
4516 cx,
4517 );
4518 })
4519 }
4520 proto::update_language_server::Variant::WorkProgress(payload) => {
4521 this.update(&mut cx, |this, cx| {
4522 this.on_lsp_work_progress(
4523 language_server_id,
4524 payload.token,
4525 LanguageServerProgress {
4526 message: payload.message,
4527 percentage: payload.percentage.map(|p| p as usize),
4528 last_update_at: Instant::now(),
4529 },
4530 cx,
4531 );
4532 })
4533 }
4534 proto::update_language_server::Variant::WorkEnd(payload) => {
4535 this.update(&mut cx, |this, cx| {
4536 this.on_lsp_work_end(language_server_id, payload.token, cx);
4537 })
4538 }
4539 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(_) => {
4540 this.update(&mut cx, |this, cx| {
4541 this.disk_based_diagnostics_started(language_server_id, cx);
4542 })
4543 }
4544 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(_) => {
4545 this.update(&mut cx, |this, cx| {
4546 this.disk_based_diagnostics_finished(language_server_id, cx)
4547 });
4548 }
4549 }
4550
4551 Ok(())
4552 }
4553
4554 async fn handle_update_buffer(
4555 this: ModelHandle<Self>,
4556 envelope: TypedEnvelope<proto::UpdateBuffer>,
4557 _: Arc<Client>,
4558 mut cx: AsyncAppContext,
4559 ) -> Result<()> {
4560 this.update(&mut cx, |this, cx| {
4561 let payload = envelope.payload.clone();
4562 let buffer_id = payload.buffer_id;
4563 let ops = payload
4564 .operations
4565 .into_iter()
4566 .map(|op| language::proto::deserialize_operation(op))
4567 .collect::<Result<Vec<_>, _>>()?;
4568 let is_remote = this.is_remote();
4569 match this.opened_buffers.entry(buffer_id) {
4570 hash_map::Entry::Occupied(mut e) => match e.get_mut() {
4571 OpenBuffer::Strong(buffer) => {
4572 buffer.update(cx, |buffer, cx| buffer.apply_ops(ops, cx))?;
4573 }
4574 OpenBuffer::Loading(operations) => operations.extend_from_slice(&ops),
4575 OpenBuffer::Weak(_) => {}
4576 },
4577 hash_map::Entry::Vacant(e) => {
4578 assert!(
4579 is_remote,
4580 "received buffer update from {:?}",
4581 envelope.original_sender_id
4582 );
4583 e.insert(OpenBuffer::Loading(ops));
4584 }
4585 }
4586 Ok(())
4587 })
4588 }
4589
4590 async fn handle_update_buffer_file(
4591 this: ModelHandle<Self>,
4592 envelope: TypedEnvelope<proto::UpdateBufferFile>,
4593 _: Arc<Client>,
4594 mut cx: AsyncAppContext,
4595 ) -> Result<()> {
4596 this.update(&mut cx, |this, cx| {
4597 let payload = envelope.payload.clone();
4598 let buffer_id = payload.buffer_id;
4599 let file = payload.file.ok_or_else(|| anyhow!("invalid file"))?;
4600 let worktree = this
4601 .worktree_for_id(WorktreeId::from_proto(file.worktree_id), cx)
4602 .ok_or_else(|| anyhow!("no such worktree"))?;
4603 let file = File::from_proto(file, worktree.clone(), cx)?;
4604 let buffer = this
4605 .opened_buffers
4606 .get_mut(&buffer_id)
4607 .and_then(|b| b.upgrade(cx))
4608 .ok_or_else(|| anyhow!("no such buffer"))?;
4609 buffer.update(cx, |buffer, cx| {
4610 buffer.file_updated(Box::new(file), cx).detach();
4611 });
4612 Ok(())
4613 })
4614 }
4615
4616 async fn handle_save_buffer(
4617 this: ModelHandle<Self>,
4618 envelope: TypedEnvelope<proto::SaveBuffer>,
4619 _: Arc<Client>,
4620 mut cx: AsyncAppContext,
4621 ) -> Result<proto::BufferSaved> {
4622 let buffer_id = envelope.payload.buffer_id;
4623 let requested_version = deserialize_version(envelope.payload.version);
4624
4625 let (project_id, buffer) = this.update(&mut cx, |this, cx| {
4626 let project_id = this.remote_id().ok_or_else(|| anyhow!("not connected"))?;
4627 let buffer = this
4628 .opened_buffers
4629 .get(&buffer_id)
4630 .and_then(|buffer| buffer.upgrade(cx))
4631 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?;
4632 Ok::<_, anyhow::Error>((project_id, buffer))
4633 })?;
4634 buffer
4635 .update(&mut cx, |buffer, _| {
4636 buffer.wait_for_version(requested_version)
4637 })
4638 .await;
4639
4640 let (saved_version, mtime) = buffer.update(&mut cx, |buffer, cx| buffer.save(cx)).await?;
4641 Ok(proto::BufferSaved {
4642 project_id,
4643 buffer_id,
4644 version: serialize_version(&saved_version),
4645 mtime: Some(mtime.into()),
4646 })
4647 }
4648
4649 async fn handle_reload_buffers(
4650 this: ModelHandle<Self>,
4651 envelope: TypedEnvelope<proto::ReloadBuffers>,
4652 _: Arc<Client>,
4653 mut cx: AsyncAppContext,
4654 ) -> Result<proto::ReloadBuffersResponse> {
4655 let sender_id = envelope.original_sender_id()?;
4656 let reload = this.update(&mut cx, |this, cx| {
4657 let mut buffers = HashSet::default();
4658 for buffer_id in &envelope.payload.buffer_ids {
4659 buffers.insert(
4660 this.opened_buffers
4661 .get(buffer_id)
4662 .and_then(|buffer| buffer.upgrade(cx))
4663 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?,
4664 );
4665 }
4666 Ok::<_, anyhow::Error>(this.reload_buffers(buffers, false, cx))
4667 })?;
4668
4669 let project_transaction = reload.await?;
4670 let project_transaction = this.update(&mut cx, |this, cx| {
4671 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
4672 });
4673 Ok(proto::ReloadBuffersResponse {
4674 transaction: Some(project_transaction),
4675 })
4676 }
4677
4678 async fn handle_format_buffers(
4679 this: ModelHandle<Self>,
4680 envelope: TypedEnvelope<proto::FormatBuffers>,
4681 _: Arc<Client>,
4682 mut cx: AsyncAppContext,
4683 ) -> Result<proto::FormatBuffersResponse> {
4684 let sender_id = envelope.original_sender_id()?;
4685 let format = this.update(&mut cx, |this, cx| {
4686 let mut buffers = HashSet::default();
4687 for buffer_id in &envelope.payload.buffer_ids {
4688 buffers.insert(
4689 this.opened_buffers
4690 .get(buffer_id)
4691 .and_then(|buffer| buffer.upgrade(cx))
4692 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?,
4693 );
4694 }
4695 Ok::<_, anyhow::Error>(this.format(buffers, false, cx))
4696 })?;
4697
4698 let project_transaction = format.await?;
4699 let project_transaction = this.update(&mut cx, |this, cx| {
4700 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
4701 });
4702 Ok(proto::FormatBuffersResponse {
4703 transaction: Some(project_transaction),
4704 })
4705 }
4706
4707 async fn handle_get_completions(
4708 this: ModelHandle<Self>,
4709 envelope: TypedEnvelope<proto::GetCompletions>,
4710 _: Arc<Client>,
4711 mut cx: AsyncAppContext,
4712 ) -> Result<proto::GetCompletionsResponse> {
4713 let position = envelope
4714 .payload
4715 .position
4716 .and_then(language::proto::deserialize_anchor)
4717 .ok_or_else(|| anyhow!("invalid position"))?;
4718 let version = deserialize_version(envelope.payload.version);
4719 let buffer = this.read_with(&cx, |this, cx| {
4720 this.opened_buffers
4721 .get(&envelope.payload.buffer_id)
4722 .and_then(|buffer| buffer.upgrade(cx))
4723 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
4724 })?;
4725 buffer
4726 .update(&mut cx, |buffer, _| buffer.wait_for_version(version))
4727 .await;
4728 let version = buffer.read_with(&cx, |buffer, _| buffer.version());
4729 let completions = this
4730 .update(&mut cx, |this, cx| this.completions(&buffer, position, cx))
4731 .await?;
4732
4733 Ok(proto::GetCompletionsResponse {
4734 completions: completions
4735 .iter()
4736 .map(language::proto::serialize_completion)
4737 .collect(),
4738 version: serialize_version(&version),
4739 })
4740 }
4741
4742 async fn handle_apply_additional_edits_for_completion(
4743 this: ModelHandle<Self>,
4744 envelope: TypedEnvelope<proto::ApplyCompletionAdditionalEdits>,
4745 _: Arc<Client>,
4746 mut cx: AsyncAppContext,
4747 ) -> Result<proto::ApplyCompletionAdditionalEditsResponse> {
4748 let apply_additional_edits = this.update(&mut cx, |this, cx| {
4749 let buffer = this
4750 .opened_buffers
4751 .get(&envelope.payload.buffer_id)
4752 .and_then(|buffer| buffer.upgrade(cx))
4753 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
4754 let language = buffer.read(cx).language();
4755 let completion = language::proto::deserialize_completion(
4756 envelope
4757 .payload
4758 .completion
4759 .ok_or_else(|| anyhow!("invalid completion"))?,
4760 language,
4761 )?;
4762 Ok::<_, anyhow::Error>(
4763 this.apply_additional_edits_for_completion(buffer, completion, false, cx),
4764 )
4765 })?;
4766
4767 Ok(proto::ApplyCompletionAdditionalEditsResponse {
4768 transaction: apply_additional_edits
4769 .await?
4770 .as_ref()
4771 .map(language::proto::serialize_transaction),
4772 })
4773 }
4774
4775 async fn handle_get_code_actions(
4776 this: ModelHandle<Self>,
4777 envelope: TypedEnvelope<proto::GetCodeActions>,
4778 _: Arc<Client>,
4779 mut cx: AsyncAppContext,
4780 ) -> Result<proto::GetCodeActionsResponse> {
4781 let start = envelope
4782 .payload
4783 .start
4784 .and_then(language::proto::deserialize_anchor)
4785 .ok_or_else(|| anyhow!("invalid start"))?;
4786 let end = envelope
4787 .payload
4788 .end
4789 .and_then(language::proto::deserialize_anchor)
4790 .ok_or_else(|| anyhow!("invalid end"))?;
4791 let buffer = this.update(&mut cx, |this, cx| {
4792 this.opened_buffers
4793 .get(&envelope.payload.buffer_id)
4794 .and_then(|buffer| buffer.upgrade(cx))
4795 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
4796 })?;
4797 buffer
4798 .update(&mut cx, |buffer, _| {
4799 buffer.wait_for_version(deserialize_version(envelope.payload.version))
4800 })
4801 .await;
4802
4803 let version = buffer.read_with(&cx, |buffer, _| buffer.version());
4804 let code_actions = this.update(&mut cx, |this, cx| {
4805 Ok::<_, anyhow::Error>(this.code_actions(&buffer, start..end, cx))
4806 })?;
4807
4808 Ok(proto::GetCodeActionsResponse {
4809 actions: code_actions
4810 .await?
4811 .iter()
4812 .map(language::proto::serialize_code_action)
4813 .collect(),
4814 version: serialize_version(&version),
4815 })
4816 }
4817
4818 async fn handle_apply_code_action(
4819 this: ModelHandle<Self>,
4820 envelope: TypedEnvelope<proto::ApplyCodeAction>,
4821 _: Arc<Client>,
4822 mut cx: AsyncAppContext,
4823 ) -> Result<proto::ApplyCodeActionResponse> {
4824 let sender_id = envelope.original_sender_id()?;
4825 let action = language::proto::deserialize_code_action(
4826 envelope
4827 .payload
4828 .action
4829 .ok_or_else(|| anyhow!("invalid action"))?,
4830 )?;
4831 let apply_code_action = this.update(&mut cx, |this, cx| {
4832 let buffer = this
4833 .opened_buffers
4834 .get(&envelope.payload.buffer_id)
4835 .and_then(|buffer| buffer.upgrade(cx))
4836 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
4837 Ok::<_, anyhow::Error>(this.apply_code_action(buffer, action, false, cx))
4838 })?;
4839
4840 let project_transaction = apply_code_action.await?;
4841 let project_transaction = this.update(&mut cx, |this, cx| {
4842 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
4843 });
4844 Ok(proto::ApplyCodeActionResponse {
4845 transaction: Some(project_transaction),
4846 })
4847 }
4848
4849 async fn handle_lsp_command<T: LspCommand>(
4850 this: ModelHandle<Self>,
4851 envelope: TypedEnvelope<T::ProtoRequest>,
4852 _: Arc<Client>,
4853 mut cx: AsyncAppContext,
4854 ) -> Result<<T::ProtoRequest as proto::RequestMessage>::Response>
4855 where
4856 <T::LspRequest as lsp::request::Request>::Result: Send,
4857 {
4858 let sender_id = envelope.original_sender_id()?;
4859 let buffer_id = T::buffer_id_from_proto(&envelope.payload);
4860 let buffer_handle = this.read_with(&cx, |this, _| {
4861 this.opened_buffers
4862 .get(&buffer_id)
4863 .and_then(|buffer| buffer.upgrade(&cx))
4864 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))
4865 })?;
4866 let request = T::from_proto(
4867 envelope.payload,
4868 this.clone(),
4869 buffer_handle.clone(),
4870 cx.clone(),
4871 )
4872 .await?;
4873 let buffer_version = buffer_handle.read_with(&cx, |buffer, _| buffer.version());
4874 let response = this
4875 .update(&mut cx, |this, cx| {
4876 this.request_lsp(buffer_handle, request, cx)
4877 })
4878 .await?;
4879 this.update(&mut cx, |this, cx| {
4880 Ok(T::response_to_proto(
4881 response,
4882 this,
4883 sender_id,
4884 &buffer_version,
4885 cx,
4886 ))
4887 })
4888 }
4889
4890 async fn handle_get_project_symbols(
4891 this: ModelHandle<Self>,
4892 envelope: TypedEnvelope<proto::GetProjectSymbols>,
4893 _: Arc<Client>,
4894 mut cx: AsyncAppContext,
4895 ) -> Result<proto::GetProjectSymbolsResponse> {
4896 let symbols = this
4897 .update(&mut cx, |this, cx| {
4898 this.symbols(&envelope.payload.query, cx)
4899 })
4900 .await?;
4901
4902 Ok(proto::GetProjectSymbolsResponse {
4903 symbols: symbols.iter().map(serialize_symbol).collect(),
4904 })
4905 }
4906
4907 async fn handle_search_project(
4908 this: ModelHandle<Self>,
4909 envelope: TypedEnvelope<proto::SearchProject>,
4910 _: Arc<Client>,
4911 mut cx: AsyncAppContext,
4912 ) -> Result<proto::SearchProjectResponse> {
4913 let peer_id = envelope.original_sender_id()?;
4914 let query = SearchQuery::from_proto(envelope.payload)?;
4915 let result = this
4916 .update(&mut cx, |this, cx| this.search(query, cx))
4917 .await?;
4918
4919 this.update(&mut cx, |this, cx| {
4920 let mut locations = Vec::new();
4921 for (buffer, ranges) in result {
4922 for range in ranges {
4923 let start = serialize_anchor(&range.start);
4924 let end = serialize_anchor(&range.end);
4925 let buffer = this.serialize_buffer_for_peer(&buffer, peer_id, cx);
4926 locations.push(proto::Location {
4927 buffer: Some(buffer),
4928 start: Some(start),
4929 end: Some(end),
4930 });
4931 }
4932 }
4933 Ok(proto::SearchProjectResponse { locations })
4934 })
4935 }
4936
4937 async fn handle_open_buffer_for_symbol(
4938 this: ModelHandle<Self>,
4939 envelope: TypedEnvelope<proto::OpenBufferForSymbol>,
4940 _: Arc<Client>,
4941 mut cx: AsyncAppContext,
4942 ) -> Result<proto::OpenBufferForSymbolResponse> {
4943 let peer_id = envelope.original_sender_id()?;
4944 let symbol = envelope
4945 .payload
4946 .symbol
4947 .ok_or_else(|| anyhow!("invalid symbol"))?;
4948 let symbol = this.read_with(&cx, |this, _| {
4949 let symbol = this.deserialize_symbol(symbol)?;
4950 let signature = this.symbol_signature(symbol.worktree_id, &symbol.path);
4951 if signature == symbol.signature {
4952 Ok(symbol)
4953 } else {
4954 Err(anyhow!("invalid symbol signature"))
4955 }
4956 })?;
4957 let buffer = this
4958 .update(&mut cx, |this, cx| this.open_buffer_for_symbol(&symbol, cx))
4959 .await?;
4960
4961 Ok(proto::OpenBufferForSymbolResponse {
4962 buffer: Some(this.update(&mut cx, |this, cx| {
4963 this.serialize_buffer_for_peer(&buffer, peer_id, cx)
4964 })),
4965 })
4966 }
4967
4968 fn symbol_signature(&self, worktree_id: WorktreeId, path: &Path) -> [u8; 32] {
4969 let mut hasher = Sha256::new();
4970 hasher.update(worktree_id.to_proto().to_be_bytes());
4971 hasher.update(path.to_string_lossy().as_bytes());
4972 hasher.update(self.nonce.to_be_bytes());
4973 hasher.finalize().as_slice().try_into().unwrap()
4974 }
4975
4976 async fn handle_open_buffer_by_id(
4977 this: ModelHandle<Self>,
4978 envelope: TypedEnvelope<proto::OpenBufferById>,
4979 _: Arc<Client>,
4980 mut cx: AsyncAppContext,
4981 ) -> Result<proto::OpenBufferResponse> {
4982 let peer_id = envelope.original_sender_id()?;
4983 let buffer = this
4984 .update(&mut cx, |this, cx| {
4985 this.open_buffer_by_id(envelope.payload.id, cx)
4986 })
4987 .await?;
4988 this.update(&mut cx, |this, cx| {
4989 Ok(proto::OpenBufferResponse {
4990 buffer: Some(this.serialize_buffer_for_peer(&buffer, peer_id, cx)),
4991 })
4992 })
4993 }
4994
4995 async fn handle_open_buffer_by_path(
4996 this: ModelHandle<Self>,
4997 envelope: TypedEnvelope<proto::OpenBufferByPath>,
4998 _: Arc<Client>,
4999 mut cx: AsyncAppContext,
5000 ) -> Result<proto::OpenBufferResponse> {
5001 let peer_id = envelope.original_sender_id()?;
5002 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
5003 let open_buffer = this.update(&mut cx, |this, cx| {
5004 this.open_buffer(
5005 ProjectPath {
5006 worktree_id,
5007 path: PathBuf::from(envelope.payload.path).into(),
5008 },
5009 cx,
5010 )
5011 });
5012
5013 let buffer = open_buffer.await?;
5014 this.update(&mut cx, |this, cx| {
5015 Ok(proto::OpenBufferResponse {
5016 buffer: Some(this.serialize_buffer_for_peer(&buffer, peer_id, cx)),
5017 })
5018 })
5019 }
5020
5021 fn serialize_project_transaction_for_peer(
5022 &mut self,
5023 project_transaction: ProjectTransaction,
5024 peer_id: PeerId,
5025 cx: &AppContext,
5026 ) -> proto::ProjectTransaction {
5027 let mut serialized_transaction = proto::ProjectTransaction {
5028 buffers: Default::default(),
5029 transactions: Default::default(),
5030 };
5031 for (buffer, transaction) in project_transaction.0 {
5032 serialized_transaction
5033 .buffers
5034 .push(self.serialize_buffer_for_peer(&buffer, peer_id, cx));
5035 serialized_transaction
5036 .transactions
5037 .push(language::proto::serialize_transaction(&transaction));
5038 }
5039 serialized_transaction
5040 }
5041
5042 fn deserialize_project_transaction(
5043 &mut self,
5044 message: proto::ProjectTransaction,
5045 push_to_history: bool,
5046 cx: &mut ModelContext<Self>,
5047 ) -> Task<Result<ProjectTransaction>> {
5048 cx.spawn(|this, mut cx| async move {
5049 let mut project_transaction = ProjectTransaction::default();
5050 for (buffer, transaction) in message.buffers.into_iter().zip(message.transactions) {
5051 let buffer = this
5052 .update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
5053 .await?;
5054 let transaction = language::proto::deserialize_transaction(transaction)?;
5055 project_transaction.0.insert(buffer, transaction);
5056 }
5057
5058 for (buffer, transaction) in &project_transaction.0 {
5059 buffer
5060 .update(&mut cx, |buffer, _| {
5061 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
5062 })
5063 .await;
5064
5065 if push_to_history {
5066 buffer.update(&mut cx, |buffer, _| {
5067 buffer.push_transaction(transaction.clone(), Instant::now());
5068 });
5069 }
5070 }
5071
5072 Ok(project_transaction)
5073 })
5074 }
5075
5076 fn serialize_buffer_for_peer(
5077 &mut self,
5078 buffer: &ModelHandle<Buffer>,
5079 peer_id: PeerId,
5080 cx: &AppContext,
5081 ) -> proto::Buffer {
5082 let buffer_id = buffer.read(cx).remote_id();
5083 let shared_buffers = self.shared_buffers.entry(peer_id).or_default();
5084 if shared_buffers.insert(buffer_id) {
5085 proto::Buffer {
5086 variant: Some(proto::buffer::Variant::State(buffer.read(cx).to_proto())),
5087 }
5088 } else {
5089 proto::Buffer {
5090 variant: Some(proto::buffer::Variant::Id(buffer_id)),
5091 }
5092 }
5093 }
5094
5095 fn deserialize_buffer(
5096 &mut self,
5097 buffer: proto::Buffer,
5098 cx: &mut ModelContext<Self>,
5099 ) -> Task<Result<ModelHandle<Buffer>>> {
5100 let replica_id = self.replica_id();
5101
5102 let opened_buffer_tx = self.opened_buffer.0.clone();
5103 let mut opened_buffer_rx = self.opened_buffer.1.clone();
5104 cx.spawn(|this, mut cx| async move {
5105 match buffer.variant.ok_or_else(|| anyhow!("missing buffer"))? {
5106 proto::buffer::Variant::Id(id) => {
5107 let buffer = loop {
5108 let buffer = this.read_with(&cx, |this, cx| {
5109 this.opened_buffers
5110 .get(&id)
5111 .and_then(|buffer| buffer.upgrade(cx))
5112 });
5113 if let Some(buffer) = buffer {
5114 break buffer;
5115 }
5116 opened_buffer_rx
5117 .next()
5118 .await
5119 .ok_or_else(|| anyhow!("project dropped while waiting for buffer"))?;
5120 };
5121 Ok(buffer)
5122 }
5123 proto::buffer::Variant::State(mut buffer) => {
5124 let mut buffer_worktree = None;
5125 let mut buffer_file = None;
5126 if let Some(file) = buffer.file.take() {
5127 this.read_with(&cx, |this, cx| {
5128 let worktree_id = WorktreeId::from_proto(file.worktree_id);
5129 let worktree =
5130 this.worktree_for_id(worktree_id, cx).ok_or_else(|| {
5131 anyhow!("no worktree found for id {}", file.worktree_id)
5132 })?;
5133 buffer_file =
5134 Some(Box::new(File::from_proto(file, worktree.clone(), cx)?)
5135 as Box<dyn language::File>);
5136 buffer_worktree = Some(worktree);
5137 Ok::<_, anyhow::Error>(())
5138 })?;
5139 }
5140
5141 let buffer = cx.add_model(|cx| {
5142 Buffer::from_proto(replica_id, buffer, buffer_file, cx).unwrap()
5143 });
5144
5145 this.update(&mut cx, |this, cx| this.register_buffer(&buffer, cx))?;
5146
5147 *opened_buffer_tx.borrow_mut().borrow_mut() = ();
5148 Ok(buffer)
5149 }
5150 }
5151 })
5152 }
5153
5154 fn deserialize_symbol(&self, serialized_symbol: proto::Symbol) -> Result<Symbol> {
5155 let source_worktree_id = WorktreeId::from_proto(serialized_symbol.source_worktree_id);
5156 let worktree_id = WorktreeId::from_proto(serialized_symbol.worktree_id);
5157 let start = serialized_symbol
5158 .start
5159 .ok_or_else(|| anyhow!("invalid start"))?;
5160 let end = serialized_symbol
5161 .end
5162 .ok_or_else(|| anyhow!("invalid end"))?;
5163 let kind = unsafe { mem::transmute(serialized_symbol.kind) };
5164 let path = PathBuf::from(serialized_symbol.path);
5165 let language = self.languages.select_language(&path);
5166 Ok(Symbol {
5167 source_worktree_id,
5168 worktree_id,
5169 language_server_name: LanguageServerName(serialized_symbol.language_server_name.into()),
5170 label: language
5171 .and_then(|language| language.label_for_symbol(&serialized_symbol.name, kind))
5172 .unwrap_or_else(|| CodeLabel::plain(serialized_symbol.name.clone(), None)),
5173 name: serialized_symbol.name,
5174 path,
5175 range: PointUtf16::new(start.row, start.column)..PointUtf16::new(end.row, end.column),
5176 kind,
5177 signature: serialized_symbol
5178 .signature
5179 .try_into()
5180 .map_err(|_| anyhow!("invalid signature"))?,
5181 })
5182 }
5183
5184 async fn handle_buffer_saved(
5185 this: ModelHandle<Self>,
5186 envelope: TypedEnvelope<proto::BufferSaved>,
5187 _: Arc<Client>,
5188 mut cx: AsyncAppContext,
5189 ) -> Result<()> {
5190 let version = deserialize_version(envelope.payload.version);
5191 let mtime = envelope
5192 .payload
5193 .mtime
5194 .ok_or_else(|| anyhow!("missing mtime"))?
5195 .into();
5196
5197 this.update(&mut cx, |this, cx| {
5198 let buffer = this
5199 .opened_buffers
5200 .get(&envelope.payload.buffer_id)
5201 .and_then(|buffer| buffer.upgrade(cx));
5202 if let Some(buffer) = buffer {
5203 buffer.update(cx, |buffer, cx| {
5204 buffer.did_save(version, mtime, None, cx);
5205 });
5206 }
5207 Ok(())
5208 })
5209 }
5210
5211 async fn handle_buffer_reloaded(
5212 this: ModelHandle<Self>,
5213 envelope: TypedEnvelope<proto::BufferReloaded>,
5214 _: Arc<Client>,
5215 mut cx: AsyncAppContext,
5216 ) -> Result<()> {
5217 let payload = envelope.payload.clone();
5218 let version = deserialize_version(payload.version);
5219 let mtime = payload
5220 .mtime
5221 .ok_or_else(|| anyhow!("missing mtime"))?
5222 .into();
5223 this.update(&mut cx, |this, cx| {
5224 let buffer = this
5225 .opened_buffers
5226 .get(&payload.buffer_id)
5227 .and_then(|buffer| buffer.upgrade(cx));
5228 if let Some(buffer) = buffer {
5229 buffer.update(cx, |buffer, cx| {
5230 buffer.did_reload(version, mtime, cx);
5231 });
5232 }
5233 Ok(())
5234 })
5235 }
5236
5237 pub fn match_paths<'a>(
5238 &self,
5239 query: &'a str,
5240 include_ignored: bool,
5241 smart_case: bool,
5242 max_results: usize,
5243 cancel_flag: &'a AtomicBool,
5244 cx: &AppContext,
5245 ) -> impl 'a + Future<Output = Vec<PathMatch>> {
5246 let worktrees = self
5247 .worktrees(cx)
5248 .filter(|worktree| worktree.read(cx).is_visible())
5249 .collect::<Vec<_>>();
5250 let include_root_name = worktrees.len() > 1;
5251 let candidate_sets = worktrees
5252 .into_iter()
5253 .map(|worktree| CandidateSet {
5254 snapshot: worktree.read(cx).snapshot(),
5255 include_ignored,
5256 include_root_name,
5257 })
5258 .collect::<Vec<_>>();
5259
5260 let background = cx.background().clone();
5261 async move {
5262 fuzzy::match_paths(
5263 candidate_sets.as_slice(),
5264 query,
5265 smart_case,
5266 max_results,
5267 cancel_flag,
5268 background,
5269 )
5270 .await
5271 }
5272 }
5273
5274 fn edits_from_lsp(
5275 &mut self,
5276 buffer: &ModelHandle<Buffer>,
5277 lsp_edits: impl 'static + Send + IntoIterator<Item = lsp::TextEdit>,
5278 version: Option<i32>,
5279 cx: &mut ModelContext<Self>,
5280 ) -> Task<Result<Vec<(Range<Anchor>, String)>>> {
5281 let snapshot = self.buffer_snapshot_for_lsp_version(buffer, version, cx);
5282 cx.background().spawn(async move {
5283 let snapshot = snapshot?;
5284 let mut lsp_edits = lsp_edits
5285 .into_iter()
5286 .map(|edit| (range_from_lsp(edit.range), edit.new_text))
5287 .collect::<Vec<_>>();
5288 lsp_edits.sort_by_key(|(range, _)| range.start);
5289
5290 let mut lsp_edits = lsp_edits.into_iter().peekable();
5291 let mut edits = Vec::new();
5292 while let Some((mut range, mut new_text)) = lsp_edits.next() {
5293 // Combine any LSP edits that are adjacent.
5294 //
5295 // Also, combine LSP edits that are separated from each other by only
5296 // a newline. This is important because for some code actions,
5297 // Rust-analyzer rewrites the entire buffer via a series of edits that
5298 // are separated by unchanged newline characters.
5299 //
5300 // In order for the diffing logic below to work properly, any edits that
5301 // cancel each other out must be combined into one.
5302 while let Some((next_range, next_text)) = lsp_edits.peek() {
5303 if next_range.start > range.end {
5304 if next_range.start.row > range.end.row + 1
5305 || next_range.start.column > 0
5306 || snapshot.clip_point_utf16(
5307 PointUtf16::new(range.end.row, u32::MAX),
5308 Bias::Left,
5309 ) > range.end
5310 {
5311 break;
5312 }
5313 new_text.push('\n');
5314 }
5315 range.end = next_range.end;
5316 new_text.push_str(&next_text);
5317 lsp_edits.next();
5318 }
5319
5320 if snapshot.clip_point_utf16(range.start, Bias::Left) != range.start
5321 || snapshot.clip_point_utf16(range.end, Bias::Left) != range.end
5322 {
5323 return Err(anyhow!("invalid edits received from language server"));
5324 }
5325
5326 // For multiline edits, perform a diff of the old and new text so that
5327 // we can identify the changes more precisely, preserving the locations
5328 // of any anchors positioned in the unchanged regions.
5329 if range.end.row > range.start.row {
5330 let mut offset = range.start.to_offset(&snapshot);
5331 let old_text = snapshot.text_for_range(range).collect::<String>();
5332
5333 let diff = TextDiff::from_lines(old_text.as_str(), &new_text);
5334 let mut moved_since_edit = true;
5335 for change in diff.iter_all_changes() {
5336 let tag = change.tag();
5337 let value = change.value();
5338 match tag {
5339 ChangeTag::Equal => {
5340 offset += value.len();
5341 moved_since_edit = true;
5342 }
5343 ChangeTag::Delete => {
5344 let start = snapshot.anchor_after(offset);
5345 let end = snapshot.anchor_before(offset + value.len());
5346 if moved_since_edit {
5347 edits.push((start..end, String::new()));
5348 } else {
5349 edits.last_mut().unwrap().0.end = end;
5350 }
5351 offset += value.len();
5352 moved_since_edit = false;
5353 }
5354 ChangeTag::Insert => {
5355 if moved_since_edit {
5356 let anchor = snapshot.anchor_after(offset);
5357 edits.push((anchor.clone()..anchor, value.to_string()));
5358 } else {
5359 edits.last_mut().unwrap().1.push_str(value);
5360 }
5361 moved_since_edit = false;
5362 }
5363 }
5364 }
5365 } else if range.end == range.start {
5366 let anchor = snapshot.anchor_after(range.start);
5367 edits.push((anchor.clone()..anchor, new_text));
5368 } else {
5369 let edit_start = snapshot.anchor_after(range.start);
5370 let edit_end = snapshot.anchor_before(range.end);
5371 edits.push((edit_start..edit_end, new_text));
5372 }
5373 }
5374
5375 Ok(edits)
5376 })
5377 }
5378
5379 fn buffer_snapshot_for_lsp_version(
5380 &mut self,
5381 buffer: &ModelHandle<Buffer>,
5382 version: Option<i32>,
5383 cx: &AppContext,
5384 ) -> Result<TextBufferSnapshot> {
5385 const OLD_VERSIONS_TO_RETAIN: i32 = 10;
5386
5387 if let Some(version) = version {
5388 let buffer_id = buffer.read(cx).remote_id();
5389 let snapshots = self
5390 .buffer_snapshots
5391 .get_mut(&buffer_id)
5392 .ok_or_else(|| anyhow!("no snapshot found for buffer {}", buffer_id))?;
5393 let mut found_snapshot = None;
5394 snapshots.retain(|(snapshot_version, snapshot)| {
5395 if snapshot_version + OLD_VERSIONS_TO_RETAIN < version {
5396 false
5397 } else {
5398 if *snapshot_version == version {
5399 found_snapshot = Some(snapshot.clone());
5400 }
5401 true
5402 }
5403 });
5404
5405 found_snapshot.ok_or_else(|| {
5406 anyhow!(
5407 "snapshot not found for buffer {} at version {}",
5408 buffer_id,
5409 version
5410 )
5411 })
5412 } else {
5413 Ok((buffer.read(cx)).text_snapshot())
5414 }
5415 }
5416
5417 fn language_server_for_buffer(
5418 &self,
5419 buffer: &Buffer,
5420 cx: &AppContext,
5421 ) -> Option<&(Arc<dyn LspAdapter>, Arc<LanguageServer>)> {
5422 if let Some((file, language)) = File::from_dyn(buffer.file()).zip(buffer.language()) {
5423 let worktree_id = file.worktree_id(cx);
5424 self.language_servers
5425 .get(&(worktree_id, language.lsp_adapter()?.name()))
5426 } else {
5427 None
5428 }
5429 }
5430}
5431
5432impl ProjectStore {
5433 pub fn new(db: Arc<Db>) -> Self {
5434 Self {
5435 db,
5436 projects: Default::default(),
5437 }
5438 }
5439
5440 pub fn projects<'a>(
5441 &'a self,
5442 cx: &'a AppContext,
5443 ) -> impl 'a + Iterator<Item = ModelHandle<Project>> {
5444 self.projects
5445 .iter()
5446 .filter_map(|project| project.upgrade(cx))
5447 }
5448
5449 fn add_project(&mut self, project: WeakModelHandle<Project>, cx: &mut ModelContext<Self>) {
5450 if let Err(ix) = self
5451 .projects
5452 .binary_search_by_key(&project.id(), WeakModelHandle::id)
5453 {
5454 self.projects.insert(ix, project);
5455 }
5456 cx.notify();
5457 }
5458
5459 fn prune_projects(&mut self, cx: &mut ModelContext<Self>) {
5460 let mut did_change = false;
5461 self.projects.retain(|project| {
5462 if project.is_upgradable(cx) {
5463 true
5464 } else {
5465 did_change = true;
5466 false
5467 }
5468 });
5469 if did_change {
5470 cx.notify();
5471 }
5472 }
5473}
5474
5475impl WorktreeHandle {
5476 pub fn upgrade(&self, cx: &AppContext) -> Option<ModelHandle<Worktree>> {
5477 match self {
5478 WorktreeHandle::Strong(handle) => Some(handle.clone()),
5479 WorktreeHandle::Weak(handle) => handle.upgrade(cx),
5480 }
5481 }
5482}
5483
5484impl OpenBuffer {
5485 pub fn upgrade(&self, cx: &impl UpgradeModelHandle) -> Option<ModelHandle<Buffer>> {
5486 match self {
5487 OpenBuffer::Strong(handle) => Some(handle.clone()),
5488 OpenBuffer::Weak(handle) => handle.upgrade(cx),
5489 OpenBuffer::Loading(_) => None,
5490 }
5491 }
5492}
5493
5494struct CandidateSet {
5495 snapshot: Snapshot,
5496 include_ignored: bool,
5497 include_root_name: bool,
5498}
5499
5500impl<'a> PathMatchCandidateSet<'a> for CandidateSet {
5501 type Candidates = CandidateSetIter<'a>;
5502
5503 fn id(&self) -> usize {
5504 self.snapshot.id().to_usize()
5505 }
5506
5507 fn len(&self) -> usize {
5508 if self.include_ignored {
5509 self.snapshot.file_count()
5510 } else {
5511 self.snapshot.visible_file_count()
5512 }
5513 }
5514
5515 fn prefix(&self) -> Arc<str> {
5516 if self.snapshot.root_entry().map_or(false, |e| e.is_file()) {
5517 self.snapshot.root_name().into()
5518 } else if self.include_root_name {
5519 format!("{}/", self.snapshot.root_name()).into()
5520 } else {
5521 "".into()
5522 }
5523 }
5524
5525 fn candidates(&'a self, start: usize) -> Self::Candidates {
5526 CandidateSetIter {
5527 traversal: self.snapshot.files(self.include_ignored, start),
5528 }
5529 }
5530}
5531
5532struct CandidateSetIter<'a> {
5533 traversal: Traversal<'a>,
5534}
5535
5536impl<'a> Iterator for CandidateSetIter<'a> {
5537 type Item = PathMatchCandidate<'a>;
5538
5539 fn next(&mut self) -> Option<Self::Item> {
5540 self.traversal.next().map(|entry| {
5541 if let EntryKind::File(char_bag) = entry.kind {
5542 PathMatchCandidate {
5543 path: &entry.path,
5544 char_bag,
5545 }
5546 } else {
5547 unreachable!()
5548 }
5549 })
5550 }
5551}
5552
5553impl Entity for ProjectStore {
5554 type Event = ();
5555}
5556
5557impl Entity for Project {
5558 type Event = Event;
5559
5560 fn release(&mut self, cx: &mut gpui::MutableAppContext) {
5561 self.project_store.update(cx, ProjectStore::prune_projects);
5562
5563 match &self.client_state {
5564 ProjectClientState::Local { remote_id_rx, .. } => {
5565 if let Some(project_id) = *remote_id_rx.borrow() {
5566 self.client
5567 .send(proto::UnregisterProject { project_id })
5568 .log_err();
5569 }
5570 }
5571 ProjectClientState::Remote { remote_id, .. } => {
5572 self.client
5573 .send(proto::LeaveProject {
5574 project_id: *remote_id,
5575 })
5576 .log_err();
5577 }
5578 }
5579 }
5580
5581 fn app_will_quit(
5582 &mut self,
5583 _: &mut MutableAppContext,
5584 ) -> Option<std::pin::Pin<Box<dyn 'static + Future<Output = ()>>>> {
5585 let shutdown_futures = self
5586 .language_servers
5587 .drain()
5588 .filter_map(|(_, (_, server))| server.shutdown())
5589 .collect::<Vec<_>>();
5590 Some(
5591 async move {
5592 futures::future::join_all(shutdown_futures).await;
5593 }
5594 .boxed(),
5595 )
5596 }
5597}
5598
5599impl Collaborator {
5600 fn from_proto(
5601 message: proto::Collaborator,
5602 user_store: &ModelHandle<UserStore>,
5603 cx: &mut AsyncAppContext,
5604 ) -> impl Future<Output = Result<Self>> {
5605 let user = user_store.update(cx, |user_store, cx| {
5606 user_store.fetch_user(message.user_id, cx)
5607 });
5608
5609 async move {
5610 Ok(Self {
5611 peer_id: PeerId(message.peer_id),
5612 user: user.await?,
5613 replica_id: message.replica_id as ReplicaId,
5614 })
5615 }
5616 }
5617}
5618
5619impl<P: AsRef<Path>> From<(WorktreeId, P)> for ProjectPath {
5620 fn from((worktree_id, path): (WorktreeId, P)) -> Self {
5621 Self {
5622 worktree_id,
5623 path: path.as_ref().into(),
5624 }
5625 }
5626}
5627
5628impl From<lsp::CreateFileOptions> for fs::CreateOptions {
5629 fn from(options: lsp::CreateFileOptions) -> Self {
5630 Self {
5631 overwrite: options.overwrite.unwrap_or(false),
5632 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
5633 }
5634 }
5635}
5636
5637impl From<lsp::RenameFileOptions> for fs::RenameOptions {
5638 fn from(options: lsp::RenameFileOptions) -> Self {
5639 Self {
5640 overwrite: options.overwrite.unwrap_or(false),
5641 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
5642 }
5643 }
5644}
5645
5646impl From<lsp::DeleteFileOptions> for fs::RemoveOptions {
5647 fn from(options: lsp::DeleteFileOptions) -> Self {
5648 Self {
5649 recursive: options.recursive.unwrap_or(false),
5650 ignore_if_not_exists: options.ignore_if_not_exists.unwrap_or(false),
5651 }
5652 }
5653}
5654
5655fn serialize_symbol(symbol: &Symbol) -> proto::Symbol {
5656 proto::Symbol {
5657 source_worktree_id: symbol.source_worktree_id.to_proto(),
5658 worktree_id: symbol.worktree_id.to_proto(),
5659 language_server_name: symbol.language_server_name.0.to_string(),
5660 name: symbol.name.clone(),
5661 kind: unsafe { mem::transmute(symbol.kind) },
5662 path: symbol.path.to_string_lossy().to_string(),
5663 start: Some(proto::Point {
5664 row: symbol.range.start.row,
5665 column: symbol.range.start.column,
5666 }),
5667 end: Some(proto::Point {
5668 row: symbol.range.end.row,
5669 column: symbol.range.end.column,
5670 }),
5671 signature: symbol.signature.to_vec(),
5672 }
5673}
5674
5675fn relativize_path(base: &Path, path: &Path) -> PathBuf {
5676 let mut path_components = path.components();
5677 let mut base_components = base.components();
5678 let mut components: Vec<Component> = Vec::new();
5679 loop {
5680 match (path_components.next(), base_components.next()) {
5681 (None, None) => break,
5682 (Some(a), None) => {
5683 components.push(a);
5684 components.extend(path_components.by_ref());
5685 break;
5686 }
5687 (None, _) => components.push(Component::ParentDir),
5688 (Some(a), Some(b)) if components.is_empty() && a == b => (),
5689 (Some(a), Some(b)) if b == Component::CurDir => components.push(a),
5690 (Some(a), Some(_)) => {
5691 components.push(Component::ParentDir);
5692 for _ in base_components {
5693 components.push(Component::ParentDir);
5694 }
5695 components.push(a);
5696 components.extend(path_components.by_ref());
5697 break;
5698 }
5699 }
5700 }
5701 components.iter().map(|c| c.as_os_str()).collect()
5702}
5703
5704impl Item for Buffer {
5705 fn entry_id(&self, cx: &AppContext) -> Option<ProjectEntryId> {
5706 File::from_dyn(self.file()).and_then(|file| file.project_entry_id(cx))
5707 }
5708}
5709
5710#[cfg(test)]
5711mod tests {
5712 use crate::worktree::WorktreeHandle;
5713
5714 use super::{Event, *};
5715 use fs::RealFs;
5716 use futures::{future, StreamExt};
5717 use gpui::test::subscribe;
5718 use language::{
5719 tree_sitter_rust, tree_sitter_typescript, Diagnostic, FakeLspAdapter, LanguageConfig,
5720 OffsetRangeExt, Point, ToPoint,
5721 };
5722 use lsp::Url;
5723 use serde_json::json;
5724 use std::{cell::RefCell, os::unix, path::PathBuf, rc::Rc, task::Poll};
5725 use unindent::Unindent as _;
5726 use util::{assert_set_eq, test::temp_tree};
5727
5728 #[gpui::test]
5729 async fn test_populate_and_search(cx: &mut gpui::TestAppContext) {
5730 let dir = temp_tree(json!({
5731 "root": {
5732 "apple": "",
5733 "banana": {
5734 "carrot": {
5735 "date": "",
5736 "endive": "",
5737 }
5738 },
5739 "fennel": {
5740 "grape": "",
5741 }
5742 }
5743 }));
5744
5745 let root_link_path = dir.path().join("root_link");
5746 unix::fs::symlink(&dir.path().join("root"), &root_link_path).unwrap();
5747 unix::fs::symlink(
5748 &dir.path().join("root/fennel"),
5749 &dir.path().join("root/finnochio"),
5750 )
5751 .unwrap();
5752
5753 let project = Project::test(Arc::new(RealFs), [root_link_path.as_ref()], cx).await;
5754
5755 project.read_with(cx, |project, cx| {
5756 let tree = project.worktrees(cx).next().unwrap().read(cx);
5757 assert_eq!(tree.file_count(), 5);
5758 assert_eq!(
5759 tree.inode_for_path("fennel/grape"),
5760 tree.inode_for_path("finnochio/grape")
5761 );
5762 });
5763
5764 let cancel_flag = Default::default();
5765 let results = project
5766 .read_with(cx, |project, cx| {
5767 project.match_paths("bna", false, false, 10, &cancel_flag, cx)
5768 })
5769 .await;
5770 assert_eq!(
5771 results
5772 .into_iter()
5773 .map(|result| result.path)
5774 .collect::<Vec<Arc<Path>>>(),
5775 vec![
5776 PathBuf::from("banana/carrot/date").into(),
5777 PathBuf::from("banana/carrot/endive").into(),
5778 ]
5779 );
5780 }
5781
5782 #[gpui::test]
5783 async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
5784 cx.foreground().forbid_parking();
5785
5786 let mut rust_language = Language::new(
5787 LanguageConfig {
5788 name: "Rust".into(),
5789 path_suffixes: vec!["rs".to_string()],
5790 ..Default::default()
5791 },
5792 Some(tree_sitter_rust::language()),
5793 );
5794 let mut json_language = Language::new(
5795 LanguageConfig {
5796 name: "JSON".into(),
5797 path_suffixes: vec!["json".to_string()],
5798 ..Default::default()
5799 },
5800 None,
5801 );
5802 let mut fake_rust_servers = rust_language.set_fake_lsp_adapter(FakeLspAdapter {
5803 name: "the-rust-language-server",
5804 capabilities: lsp::ServerCapabilities {
5805 completion_provider: Some(lsp::CompletionOptions {
5806 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
5807 ..Default::default()
5808 }),
5809 ..Default::default()
5810 },
5811 ..Default::default()
5812 });
5813 let mut fake_json_servers = json_language.set_fake_lsp_adapter(FakeLspAdapter {
5814 name: "the-json-language-server",
5815 capabilities: lsp::ServerCapabilities {
5816 completion_provider: Some(lsp::CompletionOptions {
5817 trigger_characters: Some(vec![":".to_string()]),
5818 ..Default::default()
5819 }),
5820 ..Default::default()
5821 },
5822 ..Default::default()
5823 });
5824
5825 let fs = FakeFs::new(cx.background());
5826 fs.insert_tree(
5827 "/the-root",
5828 json!({
5829 "test.rs": "const A: i32 = 1;",
5830 "test2.rs": "",
5831 "Cargo.toml": "a = 1",
5832 "package.json": "{\"a\": 1}",
5833 }),
5834 )
5835 .await;
5836
5837 let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
5838 project.update(cx, |project, _| {
5839 project.languages.add(Arc::new(rust_language));
5840 project.languages.add(Arc::new(json_language));
5841 });
5842
5843 // Open a buffer without an associated language server.
5844 let toml_buffer = project
5845 .update(cx, |project, cx| {
5846 project.open_local_buffer("/the-root/Cargo.toml", cx)
5847 })
5848 .await
5849 .unwrap();
5850
5851 // Open a buffer with an associated language server.
5852 let rust_buffer = project
5853 .update(cx, |project, cx| {
5854 project.open_local_buffer("/the-root/test.rs", cx)
5855 })
5856 .await
5857 .unwrap();
5858
5859 // A server is started up, and it is notified about Rust files.
5860 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
5861 assert_eq!(
5862 fake_rust_server
5863 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5864 .await
5865 .text_document,
5866 lsp::TextDocumentItem {
5867 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
5868 version: 0,
5869 text: "const A: i32 = 1;".to_string(),
5870 language_id: Default::default()
5871 }
5872 );
5873
5874 // The buffer is configured based on the language server's capabilities.
5875 rust_buffer.read_with(cx, |buffer, _| {
5876 assert_eq!(
5877 buffer.completion_triggers(),
5878 &[".".to_string(), "::".to_string()]
5879 );
5880 });
5881 toml_buffer.read_with(cx, |buffer, _| {
5882 assert!(buffer.completion_triggers().is_empty());
5883 });
5884
5885 // Edit a buffer. The changes are reported to the language server.
5886 rust_buffer.update(cx, |buffer, cx| buffer.edit([(16..16, "2")], cx));
5887 assert_eq!(
5888 fake_rust_server
5889 .receive_notification::<lsp::notification::DidChangeTextDocument>()
5890 .await
5891 .text_document,
5892 lsp::VersionedTextDocumentIdentifier::new(
5893 lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
5894 1
5895 )
5896 );
5897
5898 // Open a third buffer with a different associated language server.
5899 let json_buffer = project
5900 .update(cx, |project, cx| {
5901 project.open_local_buffer("/the-root/package.json", cx)
5902 })
5903 .await
5904 .unwrap();
5905
5906 // A json language server is started up and is only notified about the json buffer.
5907 let mut fake_json_server = fake_json_servers.next().await.unwrap();
5908 assert_eq!(
5909 fake_json_server
5910 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5911 .await
5912 .text_document,
5913 lsp::TextDocumentItem {
5914 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
5915 version: 0,
5916 text: "{\"a\": 1}".to_string(),
5917 language_id: Default::default()
5918 }
5919 );
5920
5921 // This buffer is configured based on the second language server's
5922 // capabilities.
5923 json_buffer.read_with(cx, |buffer, _| {
5924 assert_eq!(buffer.completion_triggers(), &[":".to_string()]);
5925 });
5926
5927 // When opening another buffer whose language server is already running,
5928 // it is also configured based on the existing language server's capabilities.
5929 let rust_buffer2 = project
5930 .update(cx, |project, cx| {
5931 project.open_local_buffer("/the-root/test2.rs", cx)
5932 })
5933 .await
5934 .unwrap();
5935 rust_buffer2.read_with(cx, |buffer, _| {
5936 assert_eq!(
5937 buffer.completion_triggers(),
5938 &[".".to_string(), "::".to_string()]
5939 );
5940 });
5941
5942 // Changes are reported only to servers matching the buffer's language.
5943 toml_buffer.update(cx, |buffer, cx| buffer.edit([(5..5, "23")], cx));
5944 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "let x = 1;")], cx));
5945 assert_eq!(
5946 fake_rust_server
5947 .receive_notification::<lsp::notification::DidChangeTextDocument>()
5948 .await
5949 .text_document,
5950 lsp::VersionedTextDocumentIdentifier::new(
5951 lsp::Url::from_file_path("/the-root/test2.rs").unwrap(),
5952 1
5953 )
5954 );
5955
5956 // Save notifications are reported to all servers.
5957 toml_buffer
5958 .update(cx, |buffer, cx| buffer.save(cx))
5959 .await
5960 .unwrap();
5961 assert_eq!(
5962 fake_rust_server
5963 .receive_notification::<lsp::notification::DidSaveTextDocument>()
5964 .await
5965 .text_document,
5966 lsp::TextDocumentIdentifier::new(
5967 lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap()
5968 )
5969 );
5970 assert_eq!(
5971 fake_json_server
5972 .receive_notification::<lsp::notification::DidSaveTextDocument>()
5973 .await
5974 .text_document,
5975 lsp::TextDocumentIdentifier::new(
5976 lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap()
5977 )
5978 );
5979
5980 // Renames are reported only to servers matching the buffer's language.
5981 fs.rename(
5982 Path::new("/the-root/test2.rs"),
5983 Path::new("/the-root/test3.rs"),
5984 Default::default(),
5985 )
5986 .await
5987 .unwrap();
5988 assert_eq!(
5989 fake_rust_server
5990 .receive_notification::<lsp::notification::DidCloseTextDocument>()
5991 .await
5992 .text_document,
5993 lsp::TextDocumentIdentifier::new(
5994 lsp::Url::from_file_path("/the-root/test2.rs").unwrap()
5995 ),
5996 );
5997 assert_eq!(
5998 fake_rust_server
5999 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6000 .await
6001 .text_document,
6002 lsp::TextDocumentItem {
6003 uri: lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),
6004 version: 0,
6005 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
6006 language_id: Default::default()
6007 },
6008 );
6009
6010 rust_buffer2.update(cx, |buffer, cx| {
6011 buffer.update_diagnostics(
6012 DiagnosticSet::from_sorted_entries(
6013 vec![DiagnosticEntry {
6014 diagnostic: Default::default(),
6015 range: Anchor::MIN..Anchor::MAX,
6016 }],
6017 &buffer.snapshot(),
6018 ),
6019 cx,
6020 );
6021 assert_eq!(
6022 buffer
6023 .snapshot()
6024 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
6025 .count(),
6026 1
6027 );
6028 });
6029
6030 // When the rename changes the extension of the file, the buffer gets closed on the old
6031 // language server and gets opened on the new one.
6032 fs.rename(
6033 Path::new("/the-root/test3.rs"),
6034 Path::new("/the-root/test3.json"),
6035 Default::default(),
6036 )
6037 .await
6038 .unwrap();
6039 assert_eq!(
6040 fake_rust_server
6041 .receive_notification::<lsp::notification::DidCloseTextDocument>()
6042 .await
6043 .text_document,
6044 lsp::TextDocumentIdentifier::new(
6045 lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),
6046 ),
6047 );
6048 assert_eq!(
6049 fake_json_server
6050 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6051 .await
6052 .text_document,
6053 lsp::TextDocumentItem {
6054 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
6055 version: 0,
6056 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
6057 language_id: Default::default()
6058 },
6059 );
6060
6061 // We clear the diagnostics, since the language has changed.
6062 rust_buffer2.read_with(cx, |buffer, _| {
6063 assert_eq!(
6064 buffer
6065 .snapshot()
6066 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
6067 .count(),
6068 0
6069 );
6070 });
6071
6072 // The renamed file's version resets after changing language server.
6073 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "// ")], cx));
6074 assert_eq!(
6075 fake_json_server
6076 .receive_notification::<lsp::notification::DidChangeTextDocument>()
6077 .await
6078 .text_document,
6079 lsp::VersionedTextDocumentIdentifier::new(
6080 lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
6081 1
6082 )
6083 );
6084
6085 // Restart language servers
6086 project.update(cx, |project, cx| {
6087 project.restart_language_servers_for_buffers(
6088 vec![rust_buffer.clone(), json_buffer.clone()],
6089 cx,
6090 );
6091 });
6092
6093 let mut rust_shutdown_requests = fake_rust_server
6094 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
6095 let mut json_shutdown_requests = fake_json_server
6096 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
6097 futures::join!(rust_shutdown_requests.next(), json_shutdown_requests.next());
6098
6099 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
6100 let mut fake_json_server = fake_json_servers.next().await.unwrap();
6101
6102 // Ensure rust document is reopened in new rust language server
6103 assert_eq!(
6104 fake_rust_server
6105 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6106 .await
6107 .text_document,
6108 lsp::TextDocumentItem {
6109 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
6110 version: 1,
6111 text: rust_buffer.read_with(cx, |buffer, _| buffer.text()),
6112 language_id: Default::default()
6113 }
6114 );
6115
6116 // Ensure json documents are reopened in new json language server
6117 assert_set_eq!(
6118 [
6119 fake_json_server
6120 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6121 .await
6122 .text_document,
6123 fake_json_server
6124 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6125 .await
6126 .text_document,
6127 ],
6128 [
6129 lsp::TextDocumentItem {
6130 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
6131 version: 0,
6132 text: json_buffer.read_with(cx, |buffer, _| buffer.text()),
6133 language_id: Default::default()
6134 },
6135 lsp::TextDocumentItem {
6136 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
6137 version: 1,
6138 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
6139 language_id: Default::default()
6140 }
6141 ]
6142 );
6143
6144 // Close notifications are reported only to servers matching the buffer's language.
6145 cx.update(|_| drop(json_buffer));
6146 let close_message = lsp::DidCloseTextDocumentParams {
6147 text_document: lsp::TextDocumentIdentifier::new(
6148 lsp::Url::from_file_path("/the-root/package.json").unwrap(),
6149 ),
6150 };
6151 assert_eq!(
6152 fake_json_server
6153 .receive_notification::<lsp::notification::DidCloseTextDocument>()
6154 .await,
6155 close_message,
6156 );
6157 }
6158
6159 #[gpui::test]
6160 async fn test_single_file_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
6161 cx.foreground().forbid_parking();
6162
6163 let fs = FakeFs::new(cx.background());
6164 fs.insert_tree(
6165 "/dir",
6166 json!({
6167 "a.rs": "let a = 1;",
6168 "b.rs": "let b = 2;"
6169 }),
6170 )
6171 .await;
6172
6173 let project = Project::test(fs, ["/dir/a.rs".as_ref(), "/dir/b.rs".as_ref()], cx).await;
6174
6175 let buffer_a = project
6176 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
6177 .await
6178 .unwrap();
6179 let buffer_b = project
6180 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
6181 .await
6182 .unwrap();
6183
6184 project.update(cx, |project, cx| {
6185 project
6186 .update_diagnostics(
6187 0,
6188 lsp::PublishDiagnosticsParams {
6189 uri: Url::from_file_path("/dir/a.rs").unwrap(),
6190 version: None,
6191 diagnostics: vec![lsp::Diagnostic {
6192 range: lsp::Range::new(
6193 lsp::Position::new(0, 4),
6194 lsp::Position::new(0, 5),
6195 ),
6196 severity: Some(lsp::DiagnosticSeverity::ERROR),
6197 message: "error 1".to_string(),
6198 ..Default::default()
6199 }],
6200 },
6201 &[],
6202 cx,
6203 )
6204 .unwrap();
6205 project
6206 .update_diagnostics(
6207 0,
6208 lsp::PublishDiagnosticsParams {
6209 uri: Url::from_file_path("/dir/b.rs").unwrap(),
6210 version: None,
6211 diagnostics: vec![lsp::Diagnostic {
6212 range: lsp::Range::new(
6213 lsp::Position::new(0, 4),
6214 lsp::Position::new(0, 5),
6215 ),
6216 severity: Some(lsp::DiagnosticSeverity::WARNING),
6217 message: "error 2".to_string(),
6218 ..Default::default()
6219 }],
6220 },
6221 &[],
6222 cx,
6223 )
6224 .unwrap();
6225 });
6226
6227 buffer_a.read_with(cx, |buffer, _| {
6228 let chunks = chunks_with_diagnostics(&buffer, 0..buffer.len());
6229 assert_eq!(
6230 chunks
6231 .iter()
6232 .map(|(s, d)| (s.as_str(), *d))
6233 .collect::<Vec<_>>(),
6234 &[
6235 ("let ", None),
6236 ("a", Some(DiagnosticSeverity::ERROR)),
6237 (" = 1;", None),
6238 ]
6239 );
6240 });
6241 buffer_b.read_with(cx, |buffer, _| {
6242 let chunks = chunks_with_diagnostics(&buffer, 0..buffer.len());
6243 assert_eq!(
6244 chunks
6245 .iter()
6246 .map(|(s, d)| (s.as_str(), *d))
6247 .collect::<Vec<_>>(),
6248 &[
6249 ("let ", None),
6250 ("b", Some(DiagnosticSeverity::WARNING)),
6251 (" = 2;", None),
6252 ]
6253 );
6254 });
6255 }
6256
6257 #[gpui::test]
6258 async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
6259 cx.foreground().forbid_parking();
6260
6261 let progress_token = "the-progress-token";
6262 let mut language = Language::new(
6263 LanguageConfig {
6264 name: "Rust".into(),
6265 path_suffixes: vec!["rs".to_string()],
6266 ..Default::default()
6267 },
6268 Some(tree_sitter_rust::language()),
6269 );
6270 let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
6271 disk_based_diagnostics_progress_token: Some(progress_token),
6272 disk_based_diagnostics_sources: &["disk"],
6273 ..Default::default()
6274 });
6275
6276 let fs = FakeFs::new(cx.background());
6277 fs.insert_tree(
6278 "/dir",
6279 json!({
6280 "a.rs": "fn a() { A }",
6281 "b.rs": "const y: i32 = 1",
6282 }),
6283 )
6284 .await;
6285
6286 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
6287 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
6288 let worktree_id =
6289 project.read_with(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
6290
6291 // Cause worktree to start the fake language server
6292 let _buffer = project
6293 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
6294 .await
6295 .unwrap();
6296
6297 let mut events = subscribe(&project, cx);
6298
6299 let mut fake_server = fake_servers.next().await.unwrap();
6300 fake_server.start_progress(progress_token).await;
6301 assert_eq!(
6302 events.next().await.unwrap(),
6303 Event::DiskBasedDiagnosticsStarted {
6304 language_server_id: 0,
6305 }
6306 );
6307
6308 fake_server.start_progress(progress_token).await;
6309 fake_server.end_progress(progress_token).await;
6310 fake_server.start_progress(progress_token).await;
6311
6312 fake_server.notify::<lsp::notification::PublishDiagnostics>(
6313 lsp::PublishDiagnosticsParams {
6314 uri: Url::from_file_path("/dir/a.rs").unwrap(),
6315 version: None,
6316 diagnostics: vec![lsp::Diagnostic {
6317 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
6318 severity: Some(lsp::DiagnosticSeverity::ERROR),
6319 message: "undefined variable 'A'".to_string(),
6320 ..Default::default()
6321 }],
6322 },
6323 );
6324 assert_eq!(
6325 events.next().await.unwrap(),
6326 Event::DiagnosticsUpdated {
6327 language_server_id: 0,
6328 path: (worktree_id, Path::new("a.rs")).into()
6329 }
6330 );
6331
6332 fake_server.end_progress(progress_token).await;
6333 fake_server.end_progress(progress_token).await;
6334 assert_eq!(
6335 events.next().await.unwrap(),
6336 Event::DiskBasedDiagnosticsFinished {
6337 language_server_id: 0
6338 }
6339 );
6340
6341 let buffer = project
6342 .update(cx, |p, cx| p.open_local_buffer("/dir/a.rs", cx))
6343 .await
6344 .unwrap();
6345
6346 buffer.read_with(cx, |buffer, _| {
6347 let snapshot = buffer.snapshot();
6348 let diagnostics = snapshot
6349 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
6350 .collect::<Vec<_>>();
6351 assert_eq!(
6352 diagnostics,
6353 &[DiagnosticEntry {
6354 range: Point::new(0, 9)..Point::new(0, 10),
6355 diagnostic: Diagnostic {
6356 severity: lsp::DiagnosticSeverity::ERROR,
6357 message: "undefined variable 'A'".to_string(),
6358 group_id: 0,
6359 is_primary: true,
6360 ..Default::default()
6361 }
6362 }]
6363 )
6364 });
6365
6366 // Ensure publishing empty diagnostics twice only results in one update event.
6367 fake_server.notify::<lsp::notification::PublishDiagnostics>(
6368 lsp::PublishDiagnosticsParams {
6369 uri: Url::from_file_path("/dir/a.rs").unwrap(),
6370 version: None,
6371 diagnostics: Default::default(),
6372 },
6373 );
6374 assert_eq!(
6375 events.next().await.unwrap(),
6376 Event::DiagnosticsUpdated {
6377 language_server_id: 0,
6378 path: (worktree_id, Path::new("a.rs")).into()
6379 }
6380 );
6381
6382 fake_server.notify::<lsp::notification::PublishDiagnostics>(
6383 lsp::PublishDiagnosticsParams {
6384 uri: Url::from_file_path("/dir/a.rs").unwrap(),
6385 version: None,
6386 diagnostics: Default::default(),
6387 },
6388 );
6389 cx.foreground().run_until_parked();
6390 assert_eq!(futures::poll!(events.next()), Poll::Pending);
6391 }
6392
6393 #[gpui::test]
6394 async fn test_restarting_server_with_diagnostics_running(cx: &mut gpui::TestAppContext) {
6395 cx.foreground().forbid_parking();
6396
6397 let progress_token = "the-progress-token";
6398 let mut language = Language::new(
6399 LanguageConfig {
6400 path_suffixes: vec!["rs".to_string()],
6401 ..Default::default()
6402 },
6403 None,
6404 );
6405 let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
6406 disk_based_diagnostics_sources: &["disk"],
6407 disk_based_diagnostics_progress_token: Some(progress_token),
6408 ..Default::default()
6409 });
6410
6411 let fs = FakeFs::new(cx.background());
6412 fs.insert_tree("/dir", json!({ "a.rs": "" })).await;
6413
6414 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
6415 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
6416
6417 let buffer = project
6418 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
6419 .await
6420 .unwrap();
6421
6422 // Simulate diagnostics starting to update.
6423 let mut fake_server = fake_servers.next().await.unwrap();
6424 fake_server.start_progress(progress_token).await;
6425
6426 // Restart the server before the diagnostics finish updating.
6427 project.update(cx, |project, cx| {
6428 project.restart_language_servers_for_buffers([buffer], cx);
6429 });
6430 let mut events = subscribe(&project, cx);
6431
6432 // Simulate the newly started server sending more diagnostics.
6433 let mut fake_server = fake_servers.next().await.unwrap();
6434 fake_server.start_progress(progress_token).await;
6435 assert_eq!(
6436 events.next().await.unwrap(),
6437 Event::DiskBasedDiagnosticsStarted {
6438 language_server_id: 1
6439 }
6440 );
6441 project.read_with(cx, |project, _| {
6442 assert_eq!(
6443 project
6444 .language_servers_running_disk_based_diagnostics()
6445 .collect::<Vec<_>>(),
6446 [1]
6447 );
6448 });
6449
6450 // All diagnostics are considered done, despite the old server's diagnostic
6451 // task never completing.
6452 fake_server.end_progress(progress_token).await;
6453 assert_eq!(
6454 events.next().await.unwrap(),
6455 Event::DiskBasedDiagnosticsFinished {
6456 language_server_id: 1
6457 }
6458 );
6459 project.read_with(cx, |project, _| {
6460 assert_eq!(
6461 project
6462 .language_servers_running_disk_based_diagnostics()
6463 .collect::<Vec<_>>(),
6464 [0; 0]
6465 );
6466 });
6467 }
6468
6469 #[gpui::test]
6470 async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
6471 cx.foreground().forbid_parking();
6472
6473 let mut language = Language::new(
6474 LanguageConfig {
6475 name: "Rust".into(),
6476 path_suffixes: vec!["rs".to_string()],
6477 ..Default::default()
6478 },
6479 Some(tree_sitter_rust::language()),
6480 );
6481 let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
6482 disk_based_diagnostics_sources: &["disk"],
6483 ..Default::default()
6484 });
6485
6486 let text = "
6487 fn a() { A }
6488 fn b() { BB }
6489 fn c() { CCC }
6490 "
6491 .unindent();
6492
6493 let fs = FakeFs::new(cx.background());
6494 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
6495
6496 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
6497 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
6498
6499 let buffer = project
6500 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
6501 .await
6502 .unwrap();
6503
6504 let mut fake_server = fake_servers.next().await.unwrap();
6505 let open_notification = fake_server
6506 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6507 .await;
6508
6509 // Edit the buffer, moving the content down
6510 buffer.update(cx, |buffer, cx| buffer.edit([(0..0, "\n\n")], cx));
6511 let change_notification_1 = fake_server
6512 .receive_notification::<lsp::notification::DidChangeTextDocument>()
6513 .await;
6514 assert!(
6515 change_notification_1.text_document.version > open_notification.text_document.version
6516 );
6517
6518 // Report some diagnostics for the initial version of the buffer
6519 fake_server.notify::<lsp::notification::PublishDiagnostics>(
6520 lsp::PublishDiagnosticsParams {
6521 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
6522 version: Some(open_notification.text_document.version),
6523 diagnostics: vec![
6524 lsp::Diagnostic {
6525 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
6526 severity: Some(DiagnosticSeverity::ERROR),
6527 message: "undefined variable 'A'".to_string(),
6528 source: Some("disk".to_string()),
6529 ..Default::default()
6530 },
6531 lsp::Diagnostic {
6532 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
6533 severity: Some(DiagnosticSeverity::ERROR),
6534 message: "undefined variable 'BB'".to_string(),
6535 source: Some("disk".to_string()),
6536 ..Default::default()
6537 },
6538 lsp::Diagnostic {
6539 range: lsp::Range::new(lsp::Position::new(2, 9), lsp::Position::new(2, 12)),
6540 severity: Some(DiagnosticSeverity::ERROR),
6541 source: Some("disk".to_string()),
6542 message: "undefined variable 'CCC'".to_string(),
6543 ..Default::default()
6544 },
6545 ],
6546 },
6547 );
6548
6549 // The diagnostics have moved down since they were created.
6550 buffer.next_notification(cx).await;
6551 buffer.read_with(cx, |buffer, _| {
6552 assert_eq!(
6553 buffer
6554 .snapshot()
6555 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0), false)
6556 .collect::<Vec<_>>(),
6557 &[
6558 DiagnosticEntry {
6559 range: Point::new(3, 9)..Point::new(3, 11),
6560 diagnostic: Diagnostic {
6561 severity: DiagnosticSeverity::ERROR,
6562 message: "undefined variable 'BB'".to_string(),
6563 is_disk_based: true,
6564 group_id: 1,
6565 is_primary: true,
6566 ..Default::default()
6567 },
6568 },
6569 DiagnosticEntry {
6570 range: Point::new(4, 9)..Point::new(4, 12),
6571 diagnostic: Diagnostic {
6572 severity: DiagnosticSeverity::ERROR,
6573 message: "undefined variable 'CCC'".to_string(),
6574 is_disk_based: true,
6575 group_id: 2,
6576 is_primary: true,
6577 ..Default::default()
6578 }
6579 }
6580 ]
6581 );
6582 assert_eq!(
6583 chunks_with_diagnostics(buffer, 0..buffer.len()),
6584 [
6585 ("\n\nfn a() { ".to_string(), None),
6586 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
6587 (" }\nfn b() { ".to_string(), None),
6588 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
6589 (" }\nfn c() { ".to_string(), None),
6590 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
6591 (" }\n".to_string(), None),
6592 ]
6593 );
6594 assert_eq!(
6595 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
6596 [
6597 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
6598 (" }\nfn c() { ".to_string(), None),
6599 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
6600 ]
6601 );
6602 });
6603
6604 // Ensure overlapping diagnostics are highlighted correctly.
6605 fake_server.notify::<lsp::notification::PublishDiagnostics>(
6606 lsp::PublishDiagnosticsParams {
6607 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
6608 version: Some(open_notification.text_document.version),
6609 diagnostics: vec![
6610 lsp::Diagnostic {
6611 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
6612 severity: Some(DiagnosticSeverity::ERROR),
6613 message: "undefined variable 'A'".to_string(),
6614 source: Some("disk".to_string()),
6615 ..Default::default()
6616 },
6617 lsp::Diagnostic {
6618 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 12)),
6619 severity: Some(DiagnosticSeverity::WARNING),
6620 message: "unreachable statement".to_string(),
6621 source: Some("disk".to_string()),
6622 ..Default::default()
6623 },
6624 ],
6625 },
6626 );
6627
6628 buffer.next_notification(cx).await;
6629 buffer.read_with(cx, |buffer, _| {
6630 assert_eq!(
6631 buffer
6632 .snapshot()
6633 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0), false)
6634 .collect::<Vec<_>>(),
6635 &[
6636 DiagnosticEntry {
6637 range: Point::new(2, 9)..Point::new(2, 12),
6638 diagnostic: Diagnostic {
6639 severity: DiagnosticSeverity::WARNING,
6640 message: "unreachable statement".to_string(),
6641 is_disk_based: true,
6642 group_id: 4,
6643 is_primary: true,
6644 ..Default::default()
6645 }
6646 },
6647 DiagnosticEntry {
6648 range: Point::new(2, 9)..Point::new(2, 10),
6649 diagnostic: Diagnostic {
6650 severity: DiagnosticSeverity::ERROR,
6651 message: "undefined variable 'A'".to_string(),
6652 is_disk_based: true,
6653 group_id: 3,
6654 is_primary: true,
6655 ..Default::default()
6656 },
6657 }
6658 ]
6659 );
6660 assert_eq!(
6661 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
6662 [
6663 ("fn a() { ".to_string(), None),
6664 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
6665 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
6666 ("\n".to_string(), None),
6667 ]
6668 );
6669 assert_eq!(
6670 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
6671 [
6672 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
6673 ("\n".to_string(), None),
6674 ]
6675 );
6676 });
6677
6678 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
6679 // changes since the last save.
6680 buffer.update(cx, |buffer, cx| {
6681 buffer.edit([(Point::new(2, 0)..Point::new(2, 0), " ")], cx);
6682 buffer.edit([(Point::new(2, 8)..Point::new(2, 10), "(x: usize)")], cx);
6683 buffer.edit([(Point::new(3, 10)..Point::new(3, 10), "xxx")], cx);
6684 });
6685 let change_notification_2 = fake_server
6686 .receive_notification::<lsp::notification::DidChangeTextDocument>()
6687 .await;
6688 assert!(
6689 change_notification_2.text_document.version
6690 > change_notification_1.text_document.version
6691 );
6692
6693 // Handle out-of-order diagnostics
6694 fake_server.notify::<lsp::notification::PublishDiagnostics>(
6695 lsp::PublishDiagnosticsParams {
6696 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
6697 version: Some(change_notification_2.text_document.version),
6698 diagnostics: vec![
6699 lsp::Diagnostic {
6700 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
6701 severity: Some(DiagnosticSeverity::ERROR),
6702 message: "undefined variable 'BB'".to_string(),
6703 source: Some("disk".to_string()),
6704 ..Default::default()
6705 },
6706 lsp::Diagnostic {
6707 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
6708 severity: Some(DiagnosticSeverity::WARNING),
6709 message: "undefined variable 'A'".to_string(),
6710 source: Some("disk".to_string()),
6711 ..Default::default()
6712 },
6713 ],
6714 },
6715 );
6716
6717 buffer.next_notification(cx).await;
6718 buffer.read_with(cx, |buffer, _| {
6719 assert_eq!(
6720 buffer
6721 .snapshot()
6722 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
6723 .collect::<Vec<_>>(),
6724 &[
6725 DiagnosticEntry {
6726 range: Point::new(2, 21)..Point::new(2, 22),
6727 diagnostic: Diagnostic {
6728 severity: DiagnosticSeverity::WARNING,
6729 message: "undefined variable 'A'".to_string(),
6730 is_disk_based: true,
6731 group_id: 6,
6732 is_primary: true,
6733 ..Default::default()
6734 }
6735 },
6736 DiagnosticEntry {
6737 range: Point::new(3, 9)..Point::new(3, 14),
6738 diagnostic: Diagnostic {
6739 severity: DiagnosticSeverity::ERROR,
6740 message: "undefined variable 'BB'".to_string(),
6741 is_disk_based: true,
6742 group_id: 5,
6743 is_primary: true,
6744 ..Default::default()
6745 },
6746 }
6747 ]
6748 );
6749 });
6750 }
6751
6752 #[gpui::test]
6753 async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
6754 cx.foreground().forbid_parking();
6755
6756 let text = concat!(
6757 "let one = ;\n", //
6758 "let two = \n",
6759 "let three = 3;\n",
6760 );
6761
6762 let fs = FakeFs::new(cx.background());
6763 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
6764
6765 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
6766 let buffer = project
6767 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
6768 .await
6769 .unwrap();
6770
6771 project.update(cx, |project, cx| {
6772 project
6773 .update_buffer_diagnostics(
6774 &buffer,
6775 vec![
6776 DiagnosticEntry {
6777 range: PointUtf16::new(0, 10)..PointUtf16::new(0, 10),
6778 diagnostic: Diagnostic {
6779 severity: DiagnosticSeverity::ERROR,
6780 message: "syntax error 1".to_string(),
6781 ..Default::default()
6782 },
6783 },
6784 DiagnosticEntry {
6785 range: PointUtf16::new(1, 10)..PointUtf16::new(1, 10),
6786 diagnostic: Diagnostic {
6787 severity: DiagnosticSeverity::ERROR,
6788 message: "syntax error 2".to_string(),
6789 ..Default::default()
6790 },
6791 },
6792 ],
6793 None,
6794 cx,
6795 )
6796 .unwrap();
6797 });
6798
6799 // An empty range is extended forward to include the following character.
6800 // At the end of a line, an empty range is extended backward to include
6801 // the preceding character.
6802 buffer.read_with(cx, |buffer, _| {
6803 let chunks = chunks_with_diagnostics(&buffer, 0..buffer.len());
6804 assert_eq!(
6805 chunks
6806 .iter()
6807 .map(|(s, d)| (s.as_str(), *d))
6808 .collect::<Vec<_>>(),
6809 &[
6810 ("let one = ", None),
6811 (";", Some(DiagnosticSeverity::ERROR)),
6812 ("\nlet two =", None),
6813 (" ", Some(DiagnosticSeverity::ERROR)),
6814 ("\nlet three = 3;\n", None)
6815 ]
6816 );
6817 });
6818 }
6819
6820 #[gpui::test]
6821 async fn test_edits_from_lsp_with_past_version(cx: &mut gpui::TestAppContext) {
6822 cx.foreground().forbid_parking();
6823
6824 let mut language = Language::new(
6825 LanguageConfig {
6826 name: "Rust".into(),
6827 path_suffixes: vec!["rs".to_string()],
6828 ..Default::default()
6829 },
6830 Some(tree_sitter_rust::language()),
6831 );
6832 let mut fake_servers = language.set_fake_lsp_adapter(Default::default());
6833
6834 let text = "
6835 fn a() {
6836 f1();
6837 }
6838 fn b() {
6839 f2();
6840 }
6841 fn c() {
6842 f3();
6843 }
6844 "
6845 .unindent();
6846
6847 let fs = FakeFs::new(cx.background());
6848 fs.insert_tree(
6849 "/dir",
6850 json!({
6851 "a.rs": text.clone(),
6852 }),
6853 )
6854 .await;
6855
6856 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
6857 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
6858 let buffer = project
6859 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
6860 .await
6861 .unwrap();
6862
6863 let mut fake_server = fake_servers.next().await.unwrap();
6864 let lsp_document_version = fake_server
6865 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6866 .await
6867 .text_document
6868 .version;
6869
6870 // Simulate editing the buffer after the language server computes some edits.
6871 buffer.update(cx, |buffer, cx| {
6872 buffer.edit(
6873 [(
6874 Point::new(0, 0)..Point::new(0, 0),
6875 "// above first function\n",
6876 )],
6877 cx,
6878 );
6879 buffer.edit(
6880 [(
6881 Point::new(2, 0)..Point::new(2, 0),
6882 " // inside first function\n",
6883 )],
6884 cx,
6885 );
6886 buffer.edit(
6887 [(
6888 Point::new(6, 4)..Point::new(6, 4),
6889 "// inside second function ",
6890 )],
6891 cx,
6892 );
6893
6894 assert_eq!(
6895 buffer.text(),
6896 "
6897 // above first function
6898 fn a() {
6899 // inside first function
6900 f1();
6901 }
6902 fn b() {
6903 // inside second function f2();
6904 }
6905 fn c() {
6906 f3();
6907 }
6908 "
6909 .unindent()
6910 );
6911 });
6912
6913 let edits = project
6914 .update(cx, |project, cx| {
6915 project.edits_from_lsp(
6916 &buffer,
6917 vec![
6918 // replace body of first function
6919 lsp::TextEdit {
6920 range: lsp::Range::new(
6921 lsp::Position::new(0, 0),
6922 lsp::Position::new(3, 0),
6923 ),
6924 new_text: "
6925 fn a() {
6926 f10();
6927 }
6928 "
6929 .unindent(),
6930 },
6931 // edit inside second function
6932 lsp::TextEdit {
6933 range: lsp::Range::new(
6934 lsp::Position::new(4, 6),
6935 lsp::Position::new(4, 6),
6936 ),
6937 new_text: "00".into(),
6938 },
6939 // edit inside third function via two distinct edits
6940 lsp::TextEdit {
6941 range: lsp::Range::new(
6942 lsp::Position::new(7, 5),
6943 lsp::Position::new(7, 5),
6944 ),
6945 new_text: "4000".into(),
6946 },
6947 lsp::TextEdit {
6948 range: lsp::Range::new(
6949 lsp::Position::new(7, 5),
6950 lsp::Position::new(7, 6),
6951 ),
6952 new_text: "".into(),
6953 },
6954 ],
6955 Some(lsp_document_version),
6956 cx,
6957 )
6958 })
6959 .await
6960 .unwrap();
6961
6962 buffer.update(cx, |buffer, cx| {
6963 for (range, new_text) in edits {
6964 buffer.edit([(range, new_text)], cx);
6965 }
6966 assert_eq!(
6967 buffer.text(),
6968 "
6969 // above first function
6970 fn a() {
6971 // inside first function
6972 f10();
6973 }
6974 fn b() {
6975 // inside second function f200();
6976 }
6977 fn c() {
6978 f4000();
6979 }
6980 "
6981 .unindent()
6982 );
6983 });
6984 }
6985
6986 #[gpui::test]
6987 async fn test_edits_from_lsp_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
6988 cx.foreground().forbid_parking();
6989
6990 let text = "
6991 use a::b;
6992 use a::c;
6993
6994 fn f() {
6995 b();
6996 c();
6997 }
6998 "
6999 .unindent();
7000
7001 let fs = FakeFs::new(cx.background());
7002 fs.insert_tree(
7003 "/dir",
7004 json!({
7005 "a.rs": text.clone(),
7006 }),
7007 )
7008 .await;
7009
7010 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
7011 let buffer = project
7012 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
7013 .await
7014 .unwrap();
7015
7016 // Simulate the language server sending us a small edit in the form of a very large diff.
7017 // Rust-analyzer does this when performing a merge-imports code action.
7018 let edits = project
7019 .update(cx, |project, cx| {
7020 project.edits_from_lsp(
7021 &buffer,
7022 [
7023 // Replace the first use statement without editing the semicolon.
7024 lsp::TextEdit {
7025 range: lsp::Range::new(
7026 lsp::Position::new(0, 4),
7027 lsp::Position::new(0, 8),
7028 ),
7029 new_text: "a::{b, c}".into(),
7030 },
7031 // Reinsert the remainder of the file between the semicolon and the final
7032 // newline of the file.
7033 lsp::TextEdit {
7034 range: lsp::Range::new(
7035 lsp::Position::new(0, 9),
7036 lsp::Position::new(0, 9),
7037 ),
7038 new_text: "\n\n".into(),
7039 },
7040 lsp::TextEdit {
7041 range: lsp::Range::new(
7042 lsp::Position::new(0, 9),
7043 lsp::Position::new(0, 9),
7044 ),
7045 new_text: "
7046 fn f() {
7047 b();
7048 c();
7049 }"
7050 .unindent(),
7051 },
7052 // Delete everything after the first newline of the file.
7053 lsp::TextEdit {
7054 range: lsp::Range::new(
7055 lsp::Position::new(1, 0),
7056 lsp::Position::new(7, 0),
7057 ),
7058 new_text: "".into(),
7059 },
7060 ],
7061 None,
7062 cx,
7063 )
7064 })
7065 .await
7066 .unwrap();
7067
7068 buffer.update(cx, |buffer, cx| {
7069 let edits = edits
7070 .into_iter()
7071 .map(|(range, text)| {
7072 (
7073 range.start.to_point(&buffer)..range.end.to_point(&buffer),
7074 text,
7075 )
7076 })
7077 .collect::<Vec<_>>();
7078
7079 assert_eq!(
7080 edits,
7081 [
7082 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
7083 (Point::new(1, 0)..Point::new(2, 0), "".into())
7084 ]
7085 );
7086
7087 for (range, new_text) in edits {
7088 buffer.edit([(range, new_text)], cx);
7089 }
7090 assert_eq!(
7091 buffer.text(),
7092 "
7093 use a::{b, c};
7094
7095 fn f() {
7096 b();
7097 c();
7098 }
7099 "
7100 .unindent()
7101 );
7102 });
7103 }
7104
7105 #[gpui::test]
7106 async fn test_invalid_edits_from_lsp(cx: &mut gpui::TestAppContext) {
7107 cx.foreground().forbid_parking();
7108
7109 let text = "
7110 use a::b;
7111 use a::c;
7112
7113 fn f() {
7114 b();
7115 c();
7116 }
7117 "
7118 .unindent();
7119
7120 let fs = FakeFs::new(cx.background());
7121 fs.insert_tree(
7122 "/dir",
7123 json!({
7124 "a.rs": text.clone(),
7125 }),
7126 )
7127 .await;
7128
7129 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
7130 let buffer = project
7131 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
7132 .await
7133 .unwrap();
7134
7135 // Simulate the language server sending us edits in a non-ordered fashion,
7136 // with ranges sometimes being inverted.
7137 let edits = project
7138 .update(cx, |project, cx| {
7139 project.edits_from_lsp(
7140 &buffer,
7141 [
7142 lsp::TextEdit {
7143 range: lsp::Range::new(
7144 lsp::Position::new(0, 9),
7145 lsp::Position::new(0, 9),
7146 ),
7147 new_text: "\n\n".into(),
7148 },
7149 lsp::TextEdit {
7150 range: lsp::Range::new(
7151 lsp::Position::new(0, 8),
7152 lsp::Position::new(0, 4),
7153 ),
7154 new_text: "a::{b, c}".into(),
7155 },
7156 lsp::TextEdit {
7157 range: lsp::Range::new(
7158 lsp::Position::new(1, 0),
7159 lsp::Position::new(7, 0),
7160 ),
7161 new_text: "".into(),
7162 },
7163 lsp::TextEdit {
7164 range: lsp::Range::new(
7165 lsp::Position::new(0, 9),
7166 lsp::Position::new(0, 9),
7167 ),
7168 new_text: "
7169 fn f() {
7170 b();
7171 c();
7172 }"
7173 .unindent(),
7174 },
7175 ],
7176 None,
7177 cx,
7178 )
7179 })
7180 .await
7181 .unwrap();
7182
7183 buffer.update(cx, |buffer, cx| {
7184 let edits = edits
7185 .into_iter()
7186 .map(|(range, text)| {
7187 (
7188 range.start.to_point(&buffer)..range.end.to_point(&buffer),
7189 text,
7190 )
7191 })
7192 .collect::<Vec<_>>();
7193
7194 assert_eq!(
7195 edits,
7196 [
7197 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
7198 (Point::new(1, 0)..Point::new(2, 0), "".into())
7199 ]
7200 );
7201
7202 for (range, new_text) in edits {
7203 buffer.edit([(range, new_text)], cx);
7204 }
7205 assert_eq!(
7206 buffer.text(),
7207 "
7208 use a::{b, c};
7209
7210 fn f() {
7211 b();
7212 c();
7213 }
7214 "
7215 .unindent()
7216 );
7217 });
7218 }
7219
7220 fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
7221 buffer: &Buffer,
7222 range: Range<T>,
7223 ) -> Vec<(String, Option<DiagnosticSeverity>)> {
7224 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
7225 for chunk in buffer.snapshot().chunks(range, true) {
7226 if chunks.last().map_or(false, |prev_chunk| {
7227 prev_chunk.1 == chunk.diagnostic_severity
7228 }) {
7229 chunks.last_mut().unwrap().0.push_str(chunk.text);
7230 } else {
7231 chunks.push((chunk.text.to_string(), chunk.diagnostic_severity));
7232 }
7233 }
7234 chunks
7235 }
7236
7237 #[gpui::test]
7238 async fn test_search_worktree_without_files(cx: &mut gpui::TestAppContext) {
7239 let dir = temp_tree(json!({
7240 "root": {
7241 "dir1": {},
7242 "dir2": {
7243 "dir3": {}
7244 }
7245 }
7246 }));
7247
7248 let project = Project::test(Arc::new(RealFs), [dir.path()], cx).await;
7249 let cancel_flag = Default::default();
7250 let results = project
7251 .read_with(cx, |project, cx| {
7252 project.match_paths("dir", false, false, 10, &cancel_flag, cx)
7253 })
7254 .await;
7255
7256 assert!(results.is_empty());
7257 }
7258
7259 #[gpui::test(iterations = 10)]
7260 async fn test_definition(cx: &mut gpui::TestAppContext) {
7261 let mut language = Language::new(
7262 LanguageConfig {
7263 name: "Rust".into(),
7264 path_suffixes: vec!["rs".to_string()],
7265 ..Default::default()
7266 },
7267 Some(tree_sitter_rust::language()),
7268 );
7269 let mut fake_servers = language.set_fake_lsp_adapter(Default::default());
7270
7271 let fs = FakeFs::new(cx.background());
7272 fs.insert_tree(
7273 "/dir",
7274 json!({
7275 "a.rs": "const fn a() { A }",
7276 "b.rs": "const y: i32 = crate::a()",
7277 }),
7278 )
7279 .await;
7280
7281 let project = Project::test(fs, ["/dir/b.rs".as_ref()], cx).await;
7282 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
7283
7284 let buffer = project
7285 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
7286 .await
7287 .unwrap();
7288
7289 let fake_server = fake_servers.next().await.unwrap();
7290 fake_server.handle_request::<lsp::request::GotoDefinition, _, _>(|params, _| async move {
7291 let params = params.text_document_position_params;
7292 assert_eq!(
7293 params.text_document.uri.to_file_path().unwrap(),
7294 Path::new("/dir/b.rs"),
7295 );
7296 assert_eq!(params.position, lsp::Position::new(0, 22));
7297
7298 Ok(Some(lsp::GotoDefinitionResponse::Scalar(
7299 lsp::Location::new(
7300 lsp::Url::from_file_path("/dir/a.rs").unwrap(),
7301 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
7302 ),
7303 )))
7304 });
7305
7306 let mut definitions = project
7307 .update(cx, |project, cx| project.definition(&buffer, 22, cx))
7308 .await
7309 .unwrap();
7310
7311 assert_eq!(definitions.len(), 1);
7312 let definition = definitions.pop().unwrap();
7313 cx.update(|cx| {
7314 let target_buffer = definition.buffer.read(cx);
7315 assert_eq!(
7316 target_buffer
7317 .file()
7318 .unwrap()
7319 .as_local()
7320 .unwrap()
7321 .abs_path(cx),
7322 Path::new("/dir/a.rs"),
7323 );
7324 assert_eq!(definition.range.to_offset(target_buffer), 9..10);
7325 assert_eq!(
7326 list_worktrees(&project, cx),
7327 [("/dir/b.rs".as_ref(), true), ("/dir/a.rs".as_ref(), false)]
7328 );
7329
7330 drop(definition);
7331 });
7332 cx.read(|cx| {
7333 assert_eq!(list_worktrees(&project, cx), [("/dir/b.rs".as_ref(), true)]);
7334 });
7335
7336 fn list_worktrees<'a>(
7337 project: &'a ModelHandle<Project>,
7338 cx: &'a AppContext,
7339 ) -> Vec<(&'a Path, bool)> {
7340 project
7341 .read(cx)
7342 .worktrees(cx)
7343 .map(|worktree| {
7344 let worktree = worktree.read(cx);
7345 (
7346 worktree.as_local().unwrap().abs_path().as_ref(),
7347 worktree.is_visible(),
7348 )
7349 })
7350 .collect::<Vec<_>>()
7351 }
7352 }
7353
7354 #[gpui::test]
7355 async fn test_completions_without_edit_ranges(cx: &mut gpui::TestAppContext) {
7356 let mut language = Language::new(
7357 LanguageConfig {
7358 name: "TypeScript".into(),
7359 path_suffixes: vec!["ts".to_string()],
7360 ..Default::default()
7361 },
7362 Some(tree_sitter_typescript::language_typescript()),
7363 );
7364 let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default());
7365
7366 let fs = FakeFs::new(cx.background());
7367 fs.insert_tree(
7368 "/dir",
7369 json!({
7370 "a.ts": "",
7371 }),
7372 )
7373 .await;
7374
7375 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
7376 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
7377 let buffer = project
7378 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
7379 .await
7380 .unwrap();
7381
7382 let fake_server = fake_language_servers.next().await.unwrap();
7383
7384 let text = "let a = b.fqn";
7385 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
7386 let completions = project.update(cx, |project, cx| {
7387 project.completions(&buffer, text.len(), cx)
7388 });
7389
7390 fake_server
7391 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
7392 Ok(Some(lsp::CompletionResponse::Array(vec![
7393 lsp::CompletionItem {
7394 label: "fullyQualifiedName?".into(),
7395 insert_text: Some("fullyQualifiedName".into()),
7396 ..Default::default()
7397 },
7398 ])))
7399 })
7400 .next()
7401 .await;
7402 let completions = completions.await.unwrap();
7403 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
7404 assert_eq!(completions.len(), 1);
7405 assert_eq!(completions[0].new_text, "fullyQualifiedName");
7406 assert_eq!(
7407 completions[0].old_range.to_offset(&snapshot),
7408 text.len() - 3..text.len()
7409 );
7410 }
7411
7412 #[gpui::test(iterations = 10)]
7413 async fn test_apply_code_actions_with_commands(cx: &mut gpui::TestAppContext) {
7414 let mut language = Language::new(
7415 LanguageConfig {
7416 name: "TypeScript".into(),
7417 path_suffixes: vec!["ts".to_string()],
7418 ..Default::default()
7419 },
7420 None,
7421 );
7422 let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default());
7423
7424 let fs = FakeFs::new(cx.background());
7425 fs.insert_tree(
7426 "/dir",
7427 json!({
7428 "a.ts": "a",
7429 }),
7430 )
7431 .await;
7432
7433 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
7434 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
7435 let buffer = project
7436 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
7437 .await
7438 .unwrap();
7439
7440 let fake_server = fake_language_servers.next().await.unwrap();
7441
7442 // Language server returns code actions that contain commands, and not edits.
7443 let actions = project.update(cx, |project, cx| project.code_actions(&buffer, 0..0, cx));
7444 fake_server
7445 .handle_request::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
7446 Ok(Some(vec![
7447 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
7448 title: "The code action".into(),
7449 command: Some(lsp::Command {
7450 title: "The command".into(),
7451 command: "_the/command".into(),
7452 arguments: Some(vec![json!("the-argument")]),
7453 }),
7454 ..Default::default()
7455 }),
7456 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
7457 title: "two".into(),
7458 ..Default::default()
7459 }),
7460 ]))
7461 })
7462 .next()
7463 .await;
7464
7465 let action = actions.await.unwrap()[0].clone();
7466 let apply = project.update(cx, |project, cx| {
7467 project.apply_code_action(buffer.clone(), action, true, cx)
7468 });
7469
7470 // Resolving the code action does not populate its edits. In absence of
7471 // edits, we must execute the given command.
7472 fake_server.handle_request::<lsp::request::CodeActionResolveRequest, _, _>(
7473 |action, _| async move { Ok(action) },
7474 );
7475
7476 // While executing the command, the language server sends the editor
7477 // a `workspaceEdit` request.
7478 fake_server
7479 .handle_request::<lsp::request::ExecuteCommand, _, _>({
7480 let fake = fake_server.clone();
7481 move |params, _| {
7482 assert_eq!(params.command, "_the/command");
7483 let fake = fake.clone();
7484 async move {
7485 fake.server
7486 .request::<lsp::request::ApplyWorkspaceEdit>(
7487 lsp::ApplyWorkspaceEditParams {
7488 label: None,
7489 edit: lsp::WorkspaceEdit {
7490 changes: Some(
7491 [(
7492 lsp::Url::from_file_path("/dir/a.ts").unwrap(),
7493 vec![lsp::TextEdit {
7494 range: lsp::Range::new(
7495 lsp::Position::new(0, 0),
7496 lsp::Position::new(0, 0),
7497 ),
7498 new_text: "X".into(),
7499 }],
7500 )]
7501 .into_iter()
7502 .collect(),
7503 ),
7504 ..Default::default()
7505 },
7506 },
7507 )
7508 .await
7509 .unwrap();
7510 Ok(Some(json!(null)))
7511 }
7512 }
7513 })
7514 .next()
7515 .await;
7516
7517 // Applying the code action returns a project transaction containing the edits
7518 // sent by the language server in its `workspaceEdit` request.
7519 let transaction = apply.await.unwrap();
7520 assert!(transaction.0.contains_key(&buffer));
7521 buffer.update(cx, |buffer, cx| {
7522 assert_eq!(buffer.text(), "Xa");
7523 buffer.undo(cx);
7524 assert_eq!(buffer.text(), "a");
7525 });
7526 }
7527
7528 #[gpui::test]
7529 async fn test_save_file(cx: &mut gpui::TestAppContext) {
7530 let fs = FakeFs::new(cx.background());
7531 fs.insert_tree(
7532 "/dir",
7533 json!({
7534 "file1": "the old contents",
7535 }),
7536 )
7537 .await;
7538
7539 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7540 let buffer = project
7541 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
7542 .await
7543 .unwrap();
7544 buffer
7545 .update(cx, |buffer, cx| {
7546 assert_eq!(buffer.text(), "the old contents");
7547 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], cx);
7548 buffer.save(cx)
7549 })
7550 .await
7551 .unwrap();
7552
7553 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
7554 assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
7555 }
7556
7557 #[gpui::test]
7558 async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
7559 let fs = FakeFs::new(cx.background());
7560 fs.insert_tree(
7561 "/dir",
7562 json!({
7563 "file1": "the old contents",
7564 }),
7565 )
7566 .await;
7567
7568 let project = Project::test(fs.clone(), ["/dir/file1".as_ref()], cx).await;
7569 let buffer = project
7570 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
7571 .await
7572 .unwrap();
7573 buffer
7574 .update(cx, |buffer, cx| {
7575 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], cx);
7576 buffer.save(cx)
7577 })
7578 .await
7579 .unwrap();
7580
7581 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
7582 assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
7583 }
7584
7585 #[gpui::test]
7586 async fn test_save_as(cx: &mut gpui::TestAppContext) {
7587 let fs = FakeFs::new(cx.background());
7588 fs.insert_tree("/dir", json!({})).await;
7589
7590 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7591 let buffer = project.update(cx, |project, cx| {
7592 project.create_buffer("", None, cx).unwrap()
7593 });
7594 buffer.update(cx, |buffer, cx| {
7595 buffer.edit([(0..0, "abc")], cx);
7596 assert!(buffer.is_dirty());
7597 assert!(!buffer.has_conflict());
7598 });
7599 project
7600 .update(cx, |project, cx| {
7601 project.save_buffer_as(buffer.clone(), "/dir/file1".into(), cx)
7602 })
7603 .await
7604 .unwrap();
7605 assert_eq!(fs.load(Path::new("/dir/file1")).await.unwrap(), "abc");
7606 buffer.read_with(cx, |buffer, cx| {
7607 assert_eq!(buffer.file().unwrap().full_path(cx), Path::new("dir/file1"));
7608 assert!(!buffer.is_dirty());
7609 assert!(!buffer.has_conflict());
7610 });
7611
7612 let opened_buffer = project
7613 .update(cx, |project, cx| {
7614 project.open_local_buffer("/dir/file1", cx)
7615 })
7616 .await
7617 .unwrap();
7618 assert_eq!(opened_buffer, buffer);
7619 }
7620
7621 #[gpui::test(retries = 5)]
7622 async fn test_rescan_and_remote_updates(cx: &mut gpui::TestAppContext) {
7623 let dir = temp_tree(json!({
7624 "a": {
7625 "file1": "",
7626 "file2": "",
7627 "file3": "",
7628 },
7629 "b": {
7630 "c": {
7631 "file4": "",
7632 "file5": "",
7633 }
7634 }
7635 }));
7636
7637 let project = Project::test(Arc::new(RealFs), [dir.path()], cx).await;
7638 let rpc = project.read_with(cx, |p, _| p.client.clone());
7639
7640 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
7641 let buffer = project.update(cx, |p, cx| p.open_local_buffer(dir.path().join(path), cx));
7642 async move { buffer.await.unwrap() }
7643 };
7644 let id_for_path = |path: &'static str, cx: &gpui::TestAppContext| {
7645 project.read_with(cx, |project, cx| {
7646 let tree = project.worktrees(cx).next().unwrap();
7647 tree.read(cx)
7648 .entry_for_path(path)
7649 .expect(&format!("no entry for path {}", path))
7650 .id
7651 })
7652 };
7653
7654 let buffer2 = buffer_for_path("a/file2", cx).await;
7655 let buffer3 = buffer_for_path("a/file3", cx).await;
7656 let buffer4 = buffer_for_path("b/c/file4", cx).await;
7657 let buffer5 = buffer_for_path("b/c/file5", cx).await;
7658
7659 let file2_id = id_for_path("a/file2", &cx);
7660 let file3_id = id_for_path("a/file3", &cx);
7661 let file4_id = id_for_path("b/c/file4", &cx);
7662
7663 // Create a remote copy of this worktree.
7664 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
7665 let initial_snapshot = tree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot());
7666 let (remote, load_task) = cx.update(|cx| {
7667 Worktree::remote(
7668 1,
7669 1,
7670 initial_snapshot.to_proto(&Default::default(), true),
7671 rpc.clone(),
7672 cx,
7673 )
7674 });
7675 // tree
7676 load_task.await;
7677
7678 cx.read(|cx| {
7679 assert!(!buffer2.read(cx).is_dirty());
7680 assert!(!buffer3.read(cx).is_dirty());
7681 assert!(!buffer4.read(cx).is_dirty());
7682 assert!(!buffer5.read(cx).is_dirty());
7683 });
7684
7685 // Rename and delete files and directories.
7686 tree.flush_fs_events(&cx).await;
7687 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
7688 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
7689 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
7690 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
7691 tree.flush_fs_events(&cx).await;
7692
7693 let expected_paths = vec![
7694 "a",
7695 "a/file1",
7696 "a/file2.new",
7697 "b",
7698 "d",
7699 "d/file3",
7700 "d/file4",
7701 ];
7702
7703 cx.read(|app| {
7704 assert_eq!(
7705 tree.read(app)
7706 .paths()
7707 .map(|p| p.to_str().unwrap())
7708 .collect::<Vec<_>>(),
7709 expected_paths
7710 );
7711
7712 assert_eq!(id_for_path("a/file2.new", &cx), file2_id);
7713 assert_eq!(id_for_path("d/file3", &cx), file3_id);
7714 assert_eq!(id_for_path("d/file4", &cx), file4_id);
7715
7716 assert_eq!(
7717 buffer2.read(app).file().unwrap().path().as_ref(),
7718 Path::new("a/file2.new")
7719 );
7720 assert_eq!(
7721 buffer3.read(app).file().unwrap().path().as_ref(),
7722 Path::new("d/file3")
7723 );
7724 assert_eq!(
7725 buffer4.read(app).file().unwrap().path().as_ref(),
7726 Path::new("d/file4")
7727 );
7728 assert_eq!(
7729 buffer5.read(app).file().unwrap().path().as_ref(),
7730 Path::new("b/c/file5")
7731 );
7732
7733 assert!(!buffer2.read(app).file().unwrap().is_deleted());
7734 assert!(!buffer3.read(app).file().unwrap().is_deleted());
7735 assert!(!buffer4.read(app).file().unwrap().is_deleted());
7736 assert!(buffer5.read(app).file().unwrap().is_deleted());
7737 });
7738
7739 // Update the remote worktree. Check that it becomes consistent with the
7740 // local worktree.
7741 remote.update(cx, |remote, cx| {
7742 let update_message = tree.read(cx).as_local().unwrap().snapshot().build_update(
7743 &initial_snapshot,
7744 1,
7745 1,
7746 true,
7747 );
7748 remote
7749 .as_remote_mut()
7750 .unwrap()
7751 .snapshot
7752 .apply_remote_update(update_message)
7753 .unwrap();
7754
7755 assert_eq!(
7756 remote
7757 .paths()
7758 .map(|p| p.to_str().unwrap())
7759 .collect::<Vec<_>>(),
7760 expected_paths
7761 );
7762 });
7763 }
7764
7765 #[gpui::test]
7766 async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
7767 let fs = FakeFs::new(cx.background());
7768 fs.insert_tree(
7769 "/dir",
7770 json!({
7771 "a.txt": "a-contents",
7772 "b.txt": "b-contents",
7773 }),
7774 )
7775 .await;
7776
7777 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7778
7779 // Spawn multiple tasks to open paths, repeating some paths.
7780 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
7781 (
7782 p.open_local_buffer("/dir/a.txt", cx),
7783 p.open_local_buffer("/dir/b.txt", cx),
7784 p.open_local_buffer("/dir/a.txt", cx),
7785 )
7786 });
7787
7788 let buffer_a_1 = buffer_a_1.await.unwrap();
7789 let buffer_a_2 = buffer_a_2.await.unwrap();
7790 let buffer_b = buffer_b.await.unwrap();
7791 assert_eq!(buffer_a_1.read_with(cx, |b, _| b.text()), "a-contents");
7792 assert_eq!(buffer_b.read_with(cx, |b, _| b.text()), "b-contents");
7793
7794 // There is only one buffer per path.
7795 let buffer_a_id = buffer_a_1.id();
7796 assert_eq!(buffer_a_2.id(), buffer_a_id);
7797
7798 // Open the same path again while it is still open.
7799 drop(buffer_a_1);
7800 let buffer_a_3 = project
7801 .update(cx, |p, cx| p.open_local_buffer("/dir/a.txt", cx))
7802 .await
7803 .unwrap();
7804
7805 // There's still only one buffer per path.
7806 assert_eq!(buffer_a_3.id(), buffer_a_id);
7807 }
7808
7809 #[gpui::test]
7810 async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
7811 let fs = FakeFs::new(cx.background());
7812 fs.insert_tree(
7813 "/dir",
7814 json!({
7815 "file1": "abc",
7816 "file2": "def",
7817 "file3": "ghi",
7818 }),
7819 )
7820 .await;
7821
7822 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7823
7824 let buffer1 = project
7825 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
7826 .await
7827 .unwrap();
7828 let events = Rc::new(RefCell::new(Vec::new()));
7829
7830 // initially, the buffer isn't dirty.
7831 buffer1.update(cx, |buffer, cx| {
7832 cx.subscribe(&buffer1, {
7833 let events = events.clone();
7834 move |_, _, event, _| match event {
7835 BufferEvent::Operation(_) => {}
7836 _ => events.borrow_mut().push(event.clone()),
7837 }
7838 })
7839 .detach();
7840
7841 assert!(!buffer.is_dirty());
7842 assert!(events.borrow().is_empty());
7843
7844 buffer.edit([(1..2, "")], cx);
7845 });
7846
7847 // after the first edit, the buffer is dirty, and emits a dirtied event.
7848 buffer1.update(cx, |buffer, cx| {
7849 assert!(buffer.text() == "ac");
7850 assert!(buffer.is_dirty());
7851 assert_eq!(
7852 *events.borrow(),
7853 &[language::Event::Edited, language::Event::Dirtied]
7854 );
7855 events.borrow_mut().clear();
7856 buffer.did_save(buffer.version(), buffer.file().unwrap().mtime(), None, cx);
7857 });
7858
7859 // after saving, the buffer is not dirty, and emits a saved event.
7860 buffer1.update(cx, |buffer, cx| {
7861 assert!(!buffer.is_dirty());
7862 assert_eq!(*events.borrow(), &[language::Event::Saved]);
7863 events.borrow_mut().clear();
7864
7865 buffer.edit([(1..1, "B")], cx);
7866 buffer.edit([(2..2, "D")], cx);
7867 });
7868
7869 // after editing again, the buffer is dirty, and emits another dirty event.
7870 buffer1.update(cx, |buffer, cx| {
7871 assert!(buffer.text() == "aBDc");
7872 assert!(buffer.is_dirty());
7873 assert_eq!(
7874 *events.borrow(),
7875 &[
7876 language::Event::Edited,
7877 language::Event::Dirtied,
7878 language::Event::Edited,
7879 ],
7880 );
7881 events.borrow_mut().clear();
7882
7883 // TODO - currently, after restoring the buffer to its
7884 // previously-saved state, the is still considered dirty.
7885 buffer.edit([(1..3, "")], cx);
7886 assert!(buffer.text() == "ac");
7887 assert!(buffer.is_dirty());
7888 });
7889
7890 assert_eq!(*events.borrow(), &[language::Event::Edited]);
7891
7892 // When a file is deleted, the buffer is considered dirty.
7893 let events = Rc::new(RefCell::new(Vec::new()));
7894 let buffer2 = project
7895 .update(cx, |p, cx| p.open_local_buffer("/dir/file2", cx))
7896 .await
7897 .unwrap();
7898 buffer2.update(cx, |_, cx| {
7899 cx.subscribe(&buffer2, {
7900 let events = events.clone();
7901 move |_, _, event, _| events.borrow_mut().push(event.clone())
7902 })
7903 .detach();
7904 });
7905
7906 fs.remove_file("/dir/file2".as_ref(), Default::default())
7907 .await
7908 .unwrap();
7909 buffer2.condition(&cx, |b, _| b.is_dirty()).await;
7910 assert_eq!(
7911 *events.borrow(),
7912 &[language::Event::Dirtied, language::Event::FileHandleChanged]
7913 );
7914
7915 // When a file is already dirty when deleted, we don't emit a Dirtied event.
7916 let events = Rc::new(RefCell::new(Vec::new()));
7917 let buffer3 = project
7918 .update(cx, |p, cx| p.open_local_buffer("/dir/file3", cx))
7919 .await
7920 .unwrap();
7921 buffer3.update(cx, |_, cx| {
7922 cx.subscribe(&buffer3, {
7923 let events = events.clone();
7924 move |_, _, event, _| events.borrow_mut().push(event.clone())
7925 })
7926 .detach();
7927 });
7928
7929 buffer3.update(cx, |buffer, cx| {
7930 buffer.edit([(0..0, "x")], cx);
7931 });
7932 events.borrow_mut().clear();
7933 fs.remove_file("/dir/file3".as_ref(), Default::default())
7934 .await
7935 .unwrap();
7936 buffer3
7937 .condition(&cx, |_, _| !events.borrow().is_empty())
7938 .await;
7939 assert_eq!(*events.borrow(), &[language::Event::FileHandleChanged]);
7940 cx.read(|cx| assert!(buffer3.read(cx).is_dirty()));
7941 }
7942
7943 #[gpui::test]
7944 async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
7945 let initial_contents = "aaa\nbbbbb\nc\n";
7946 let fs = FakeFs::new(cx.background());
7947 fs.insert_tree(
7948 "/dir",
7949 json!({
7950 "the-file": initial_contents,
7951 }),
7952 )
7953 .await;
7954 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7955 let buffer = project
7956 .update(cx, |p, cx| p.open_local_buffer("/dir/the-file", cx))
7957 .await
7958 .unwrap();
7959
7960 let anchors = (0..3)
7961 .map(|row| buffer.read_with(cx, |b, _| b.anchor_before(Point::new(row, 1))))
7962 .collect::<Vec<_>>();
7963
7964 // Change the file on disk, adding two new lines of text, and removing
7965 // one line.
7966 buffer.read_with(cx, |buffer, _| {
7967 assert!(!buffer.is_dirty());
7968 assert!(!buffer.has_conflict());
7969 });
7970 let new_contents = "AAAA\naaa\nBB\nbbbbb\n";
7971 fs.save("/dir/the-file".as_ref(), &new_contents.into())
7972 .await
7973 .unwrap();
7974
7975 // Because the buffer was not modified, it is reloaded from disk. Its
7976 // contents are edited according to the diff between the old and new
7977 // file contents.
7978 buffer
7979 .condition(&cx, |buffer, _| buffer.text() == new_contents)
7980 .await;
7981
7982 buffer.update(cx, |buffer, _| {
7983 assert_eq!(buffer.text(), new_contents);
7984 assert!(!buffer.is_dirty());
7985 assert!(!buffer.has_conflict());
7986
7987 let anchor_positions = anchors
7988 .iter()
7989 .map(|anchor| anchor.to_point(&*buffer))
7990 .collect::<Vec<_>>();
7991 assert_eq!(
7992 anchor_positions,
7993 [Point::new(1, 1), Point::new(3, 1), Point::new(4, 0)]
7994 );
7995 });
7996
7997 // Modify the buffer
7998 buffer.update(cx, |buffer, cx| {
7999 buffer.edit([(0..0, " ")], cx);
8000 assert!(buffer.is_dirty());
8001 assert!(!buffer.has_conflict());
8002 });
8003
8004 // Change the file on disk again, adding blank lines to the beginning.
8005 fs.save(
8006 "/dir/the-file".as_ref(),
8007 &"\n\n\nAAAA\naaa\nBB\nbbbbb\n".into(),
8008 )
8009 .await
8010 .unwrap();
8011
8012 // Because the buffer is modified, it doesn't reload from disk, but is
8013 // marked as having a conflict.
8014 buffer
8015 .condition(&cx, |buffer, _| buffer.has_conflict())
8016 .await;
8017 }
8018
8019 #[gpui::test]
8020 async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
8021 cx.foreground().forbid_parking();
8022
8023 let fs = FakeFs::new(cx.background());
8024 fs.insert_tree(
8025 "/the-dir",
8026 json!({
8027 "a.rs": "
8028 fn foo(mut v: Vec<usize>) {
8029 for x in &v {
8030 v.push(1);
8031 }
8032 }
8033 "
8034 .unindent(),
8035 }),
8036 )
8037 .await;
8038
8039 let project = Project::test(fs.clone(), ["/the-dir".as_ref()], cx).await;
8040 let buffer = project
8041 .update(cx, |p, cx| p.open_local_buffer("/the-dir/a.rs", cx))
8042 .await
8043 .unwrap();
8044
8045 let buffer_uri = Url::from_file_path("/the-dir/a.rs").unwrap();
8046 let message = lsp::PublishDiagnosticsParams {
8047 uri: buffer_uri.clone(),
8048 diagnostics: vec![
8049 lsp::Diagnostic {
8050 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
8051 severity: Some(DiagnosticSeverity::WARNING),
8052 message: "error 1".to_string(),
8053 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
8054 location: lsp::Location {
8055 uri: buffer_uri.clone(),
8056 range: lsp::Range::new(
8057 lsp::Position::new(1, 8),
8058 lsp::Position::new(1, 9),
8059 ),
8060 },
8061 message: "error 1 hint 1".to_string(),
8062 }]),
8063 ..Default::default()
8064 },
8065 lsp::Diagnostic {
8066 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
8067 severity: Some(DiagnosticSeverity::HINT),
8068 message: "error 1 hint 1".to_string(),
8069 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
8070 location: lsp::Location {
8071 uri: buffer_uri.clone(),
8072 range: lsp::Range::new(
8073 lsp::Position::new(1, 8),
8074 lsp::Position::new(1, 9),
8075 ),
8076 },
8077 message: "original diagnostic".to_string(),
8078 }]),
8079 ..Default::default()
8080 },
8081 lsp::Diagnostic {
8082 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
8083 severity: Some(DiagnosticSeverity::ERROR),
8084 message: "error 2".to_string(),
8085 related_information: Some(vec![
8086 lsp::DiagnosticRelatedInformation {
8087 location: lsp::Location {
8088 uri: buffer_uri.clone(),
8089 range: lsp::Range::new(
8090 lsp::Position::new(1, 13),
8091 lsp::Position::new(1, 15),
8092 ),
8093 },
8094 message: "error 2 hint 1".to_string(),
8095 },
8096 lsp::DiagnosticRelatedInformation {
8097 location: lsp::Location {
8098 uri: buffer_uri.clone(),
8099 range: lsp::Range::new(
8100 lsp::Position::new(1, 13),
8101 lsp::Position::new(1, 15),
8102 ),
8103 },
8104 message: "error 2 hint 2".to_string(),
8105 },
8106 ]),
8107 ..Default::default()
8108 },
8109 lsp::Diagnostic {
8110 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
8111 severity: Some(DiagnosticSeverity::HINT),
8112 message: "error 2 hint 1".to_string(),
8113 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
8114 location: lsp::Location {
8115 uri: buffer_uri.clone(),
8116 range: lsp::Range::new(
8117 lsp::Position::new(2, 8),
8118 lsp::Position::new(2, 17),
8119 ),
8120 },
8121 message: "original diagnostic".to_string(),
8122 }]),
8123 ..Default::default()
8124 },
8125 lsp::Diagnostic {
8126 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
8127 severity: Some(DiagnosticSeverity::HINT),
8128 message: "error 2 hint 2".to_string(),
8129 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
8130 location: lsp::Location {
8131 uri: buffer_uri.clone(),
8132 range: lsp::Range::new(
8133 lsp::Position::new(2, 8),
8134 lsp::Position::new(2, 17),
8135 ),
8136 },
8137 message: "original diagnostic".to_string(),
8138 }]),
8139 ..Default::default()
8140 },
8141 ],
8142 version: None,
8143 };
8144
8145 project
8146 .update(cx, |p, cx| p.update_diagnostics(0, message, &[], cx))
8147 .unwrap();
8148 let buffer = buffer.read_with(cx, |buffer, _| buffer.snapshot());
8149
8150 assert_eq!(
8151 buffer
8152 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
8153 .collect::<Vec<_>>(),
8154 &[
8155 DiagnosticEntry {
8156 range: Point::new(1, 8)..Point::new(1, 9),
8157 diagnostic: Diagnostic {
8158 severity: DiagnosticSeverity::WARNING,
8159 message: "error 1".to_string(),
8160 group_id: 0,
8161 is_primary: true,
8162 ..Default::default()
8163 }
8164 },
8165 DiagnosticEntry {
8166 range: Point::new(1, 8)..Point::new(1, 9),
8167 diagnostic: Diagnostic {
8168 severity: DiagnosticSeverity::HINT,
8169 message: "error 1 hint 1".to_string(),
8170 group_id: 0,
8171 is_primary: false,
8172 ..Default::default()
8173 }
8174 },
8175 DiagnosticEntry {
8176 range: Point::new(1, 13)..Point::new(1, 15),
8177 diagnostic: Diagnostic {
8178 severity: DiagnosticSeverity::HINT,
8179 message: "error 2 hint 1".to_string(),
8180 group_id: 1,
8181 is_primary: false,
8182 ..Default::default()
8183 }
8184 },
8185 DiagnosticEntry {
8186 range: Point::new(1, 13)..Point::new(1, 15),
8187 diagnostic: Diagnostic {
8188 severity: DiagnosticSeverity::HINT,
8189 message: "error 2 hint 2".to_string(),
8190 group_id: 1,
8191 is_primary: false,
8192 ..Default::default()
8193 }
8194 },
8195 DiagnosticEntry {
8196 range: Point::new(2, 8)..Point::new(2, 17),
8197 diagnostic: Diagnostic {
8198 severity: DiagnosticSeverity::ERROR,
8199 message: "error 2".to_string(),
8200 group_id: 1,
8201 is_primary: true,
8202 ..Default::default()
8203 }
8204 }
8205 ]
8206 );
8207
8208 assert_eq!(
8209 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
8210 &[
8211 DiagnosticEntry {
8212 range: Point::new(1, 8)..Point::new(1, 9),
8213 diagnostic: Diagnostic {
8214 severity: DiagnosticSeverity::WARNING,
8215 message: "error 1".to_string(),
8216 group_id: 0,
8217 is_primary: true,
8218 ..Default::default()
8219 }
8220 },
8221 DiagnosticEntry {
8222 range: Point::new(1, 8)..Point::new(1, 9),
8223 diagnostic: Diagnostic {
8224 severity: DiagnosticSeverity::HINT,
8225 message: "error 1 hint 1".to_string(),
8226 group_id: 0,
8227 is_primary: false,
8228 ..Default::default()
8229 }
8230 },
8231 ]
8232 );
8233 assert_eq!(
8234 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
8235 &[
8236 DiagnosticEntry {
8237 range: Point::new(1, 13)..Point::new(1, 15),
8238 diagnostic: Diagnostic {
8239 severity: DiagnosticSeverity::HINT,
8240 message: "error 2 hint 1".to_string(),
8241 group_id: 1,
8242 is_primary: false,
8243 ..Default::default()
8244 }
8245 },
8246 DiagnosticEntry {
8247 range: Point::new(1, 13)..Point::new(1, 15),
8248 diagnostic: Diagnostic {
8249 severity: DiagnosticSeverity::HINT,
8250 message: "error 2 hint 2".to_string(),
8251 group_id: 1,
8252 is_primary: false,
8253 ..Default::default()
8254 }
8255 },
8256 DiagnosticEntry {
8257 range: Point::new(2, 8)..Point::new(2, 17),
8258 diagnostic: Diagnostic {
8259 severity: DiagnosticSeverity::ERROR,
8260 message: "error 2".to_string(),
8261 group_id: 1,
8262 is_primary: true,
8263 ..Default::default()
8264 }
8265 }
8266 ]
8267 );
8268 }
8269
8270 #[gpui::test]
8271 async fn test_rename(cx: &mut gpui::TestAppContext) {
8272 cx.foreground().forbid_parking();
8273
8274 let mut language = Language::new(
8275 LanguageConfig {
8276 name: "Rust".into(),
8277 path_suffixes: vec!["rs".to_string()],
8278 ..Default::default()
8279 },
8280 Some(tree_sitter_rust::language()),
8281 );
8282 let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
8283 capabilities: lsp::ServerCapabilities {
8284 rename_provider: Some(lsp::OneOf::Right(lsp::RenameOptions {
8285 prepare_provider: Some(true),
8286 work_done_progress_options: Default::default(),
8287 })),
8288 ..Default::default()
8289 },
8290 ..Default::default()
8291 });
8292
8293 let fs = FakeFs::new(cx.background());
8294 fs.insert_tree(
8295 "/dir",
8296 json!({
8297 "one.rs": "const ONE: usize = 1;",
8298 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
8299 }),
8300 )
8301 .await;
8302
8303 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
8304 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
8305 let buffer = project
8306 .update(cx, |project, cx| {
8307 project.open_local_buffer("/dir/one.rs", cx)
8308 })
8309 .await
8310 .unwrap();
8311
8312 let fake_server = fake_servers.next().await.unwrap();
8313
8314 let response = project.update(cx, |project, cx| {
8315 project.prepare_rename(buffer.clone(), 7, cx)
8316 });
8317 fake_server
8318 .handle_request::<lsp::request::PrepareRenameRequest, _, _>(|params, _| async move {
8319 assert_eq!(params.text_document.uri.as_str(), "file:///dir/one.rs");
8320 assert_eq!(params.position, lsp::Position::new(0, 7));
8321 Ok(Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
8322 lsp::Position::new(0, 6),
8323 lsp::Position::new(0, 9),
8324 ))))
8325 })
8326 .next()
8327 .await
8328 .unwrap();
8329 let range = response.await.unwrap().unwrap();
8330 let range = buffer.read_with(cx, |buffer, _| range.to_offset(buffer));
8331 assert_eq!(range, 6..9);
8332
8333 let response = project.update(cx, |project, cx| {
8334 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), true, cx)
8335 });
8336 fake_server
8337 .handle_request::<lsp::request::Rename, _, _>(|params, _| async move {
8338 assert_eq!(
8339 params.text_document_position.text_document.uri.as_str(),
8340 "file:///dir/one.rs"
8341 );
8342 assert_eq!(
8343 params.text_document_position.position,
8344 lsp::Position::new(0, 7)
8345 );
8346 assert_eq!(params.new_name, "THREE");
8347 Ok(Some(lsp::WorkspaceEdit {
8348 changes: Some(
8349 [
8350 (
8351 lsp::Url::from_file_path("/dir/one.rs").unwrap(),
8352 vec![lsp::TextEdit::new(
8353 lsp::Range::new(
8354 lsp::Position::new(0, 6),
8355 lsp::Position::new(0, 9),
8356 ),
8357 "THREE".to_string(),
8358 )],
8359 ),
8360 (
8361 lsp::Url::from_file_path("/dir/two.rs").unwrap(),
8362 vec![
8363 lsp::TextEdit::new(
8364 lsp::Range::new(
8365 lsp::Position::new(0, 24),
8366 lsp::Position::new(0, 27),
8367 ),
8368 "THREE".to_string(),
8369 ),
8370 lsp::TextEdit::new(
8371 lsp::Range::new(
8372 lsp::Position::new(0, 35),
8373 lsp::Position::new(0, 38),
8374 ),
8375 "THREE".to_string(),
8376 ),
8377 ],
8378 ),
8379 ]
8380 .into_iter()
8381 .collect(),
8382 ),
8383 ..Default::default()
8384 }))
8385 })
8386 .next()
8387 .await
8388 .unwrap();
8389 let mut transaction = response.await.unwrap().0;
8390 assert_eq!(transaction.len(), 2);
8391 assert_eq!(
8392 transaction
8393 .remove_entry(&buffer)
8394 .unwrap()
8395 .0
8396 .read_with(cx, |buffer, _| buffer.text()),
8397 "const THREE: usize = 1;"
8398 );
8399 assert_eq!(
8400 transaction
8401 .into_keys()
8402 .next()
8403 .unwrap()
8404 .read_with(cx, |buffer, _| buffer.text()),
8405 "const TWO: usize = one::THREE + one::THREE;"
8406 );
8407 }
8408
8409 #[gpui::test]
8410 async fn test_search(cx: &mut gpui::TestAppContext) {
8411 let fs = FakeFs::new(cx.background());
8412 fs.insert_tree(
8413 "/dir",
8414 json!({
8415 "one.rs": "const ONE: usize = 1;",
8416 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
8417 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
8418 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
8419 }),
8420 )
8421 .await;
8422 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
8423 assert_eq!(
8424 search(&project, SearchQuery::text("TWO", false, true), cx)
8425 .await
8426 .unwrap(),
8427 HashMap::from_iter([
8428 ("two.rs".to_string(), vec![6..9]),
8429 ("three.rs".to_string(), vec![37..40])
8430 ])
8431 );
8432
8433 let buffer_4 = project
8434 .update(cx, |project, cx| {
8435 project.open_local_buffer("/dir/four.rs", cx)
8436 })
8437 .await
8438 .unwrap();
8439 buffer_4.update(cx, |buffer, cx| {
8440 let text = "two::TWO";
8441 buffer.edit([(20..28, text), (31..43, text)], cx);
8442 });
8443
8444 assert_eq!(
8445 search(&project, SearchQuery::text("TWO", false, true), cx)
8446 .await
8447 .unwrap(),
8448 HashMap::from_iter([
8449 ("two.rs".to_string(), vec![6..9]),
8450 ("three.rs".to_string(), vec![37..40]),
8451 ("four.rs".to_string(), vec![25..28, 36..39])
8452 ])
8453 );
8454
8455 async fn search(
8456 project: &ModelHandle<Project>,
8457 query: SearchQuery,
8458 cx: &mut gpui::TestAppContext,
8459 ) -> Result<HashMap<String, Vec<Range<usize>>>> {
8460 let results = project
8461 .update(cx, |project, cx| project.search(query, cx))
8462 .await?;
8463
8464 Ok(results
8465 .into_iter()
8466 .map(|(buffer, ranges)| {
8467 buffer.read_with(cx, |buffer, _| {
8468 let path = buffer.file().unwrap().path().to_string_lossy().to_string();
8469 let ranges = ranges
8470 .into_iter()
8471 .map(|range| range.to_offset(buffer))
8472 .collect::<Vec<_>>();
8473 (path, ranges)
8474 })
8475 })
8476 .collect())
8477 }
8478 }
8479}