1mod db;
2pub mod fs;
3mod ignore;
4mod lsp_command;
5pub mod search;
6pub mod worktree;
7
8use anyhow::{anyhow, Context, Result};
9use client::{proto, Client, PeerId, TypedEnvelope, User, UserStore};
10use clock::ReplicaId;
11use collections::{hash_map, BTreeMap, HashMap, HashSet};
12use futures::{future::Shared, Future, FutureExt, StreamExt, TryFutureExt};
13use fuzzy::{PathMatch, PathMatchCandidate, PathMatchCandidateSet};
14use gpui::{
15 AnyModelHandle, AppContext, AsyncAppContext, Entity, ModelContext, ModelHandle,
16 MutableAppContext, Task, UpgradeModelHandle, WeakModelHandle,
17};
18use language::{
19 point_to_lsp,
20 proto::{deserialize_anchor, deserialize_version, serialize_anchor, serialize_version},
21 range_from_lsp, range_to_lsp, Anchor, Bias, Buffer, CodeAction, CodeLabel, Completion,
22 Diagnostic, DiagnosticEntry, DiagnosticSet, Event as BufferEvent, File as _, Language,
23 LanguageRegistry, LanguageServerName, LocalFile, LspAdapter, OffsetRangeExt, Operation, Patch,
24 PointUtf16, TextBufferSnapshot, ToOffset, ToPointUtf16, Transaction,
25};
26use lsp::{
27 DiagnosticSeverity, DiagnosticTag, DocumentHighlightKind, LanguageServer, LanguageString,
28 MarkedString,
29};
30use lsp_command::*;
31use parking_lot::Mutex;
32use postage::stream::Stream;
33use postage::watch;
34use rand::prelude::*;
35use search::SearchQuery;
36use serde::Serialize;
37use settings::Settings;
38use sha2::{Digest, Sha256};
39use similar::{ChangeTag, TextDiff};
40use std::{
41 cell::RefCell,
42 cmp::{self, Ordering},
43 convert::TryInto,
44 ffi::OsString,
45 hash::Hash,
46 mem,
47 ops::Range,
48 os::unix::{ffi::OsStrExt, prelude::OsStringExt},
49 path::{Component, Path, PathBuf},
50 rc::Rc,
51 sync::{
52 atomic::{AtomicBool, AtomicUsize, Ordering::SeqCst},
53 Arc,
54 },
55 time::Instant,
56};
57use thiserror::Error;
58use util::{post_inc, ResultExt, TryFutureExt as _};
59
60pub use db::Db;
61pub use fs::*;
62pub use worktree::*;
63
64pub trait Item: Entity {
65 fn entry_id(&self, cx: &AppContext) -> Option<ProjectEntryId>;
66}
67
68pub struct ProjectStore {
69 db: Arc<Db>,
70 projects: Vec<WeakModelHandle<Project>>,
71}
72
73pub struct Project {
74 worktrees: Vec<WorktreeHandle>,
75 active_entry: Option<ProjectEntryId>,
76 languages: Arc<LanguageRegistry>,
77 language_servers:
78 HashMap<(WorktreeId, LanguageServerName), (Arc<dyn LspAdapter>, Arc<LanguageServer>)>,
79 started_language_servers:
80 HashMap<(WorktreeId, LanguageServerName), Task<Option<Arc<LanguageServer>>>>,
81 language_server_statuses: BTreeMap<usize, LanguageServerStatus>,
82 language_server_settings: Arc<Mutex<serde_json::Value>>,
83 last_workspace_edits_by_language_server: HashMap<usize, ProjectTransaction>,
84 next_language_server_id: usize,
85 client: Arc<client::Client>,
86 next_entry_id: Arc<AtomicUsize>,
87 next_diagnostic_group_id: usize,
88 user_store: ModelHandle<UserStore>,
89 project_store: ModelHandle<ProjectStore>,
90 fs: Arc<dyn Fs>,
91 client_state: ProjectClientState,
92 collaborators: HashMap<PeerId, Collaborator>,
93 subscriptions: Vec<client::Subscription>,
94 opened_buffer: (Rc<RefCell<watch::Sender<()>>>, watch::Receiver<()>),
95 shared_buffers: HashMap<PeerId, HashSet<u64>>,
96 loading_buffers: HashMap<
97 ProjectPath,
98 postage::watch::Receiver<Option<Result<ModelHandle<Buffer>, Arc<anyhow::Error>>>>,
99 >,
100 loading_local_worktrees:
101 HashMap<Arc<Path>, Shared<Task<Result<ModelHandle<Worktree>, Arc<anyhow::Error>>>>>,
102 opened_buffers: HashMap<u64, OpenBuffer>,
103 buffer_snapshots: HashMap<u64, Vec<(i32, TextBufferSnapshot)>>,
104 nonce: u128,
105 initialized_persistent_state: bool,
106}
107
108#[derive(Error, Debug)]
109pub enum JoinProjectError {
110 #[error("host declined join request")]
111 HostDeclined,
112 #[error("host closed the project")]
113 HostClosedProject,
114 #[error("host went offline")]
115 HostWentOffline,
116 #[error("{0}")]
117 Other(#[from] anyhow::Error),
118}
119
120enum OpenBuffer {
121 Strong(ModelHandle<Buffer>),
122 Weak(WeakModelHandle<Buffer>),
123 Loading(Vec<Operation>),
124}
125
126enum WorktreeHandle {
127 Strong(ModelHandle<Worktree>),
128 Weak(WeakModelHandle<Worktree>),
129}
130
131enum ProjectClientState {
132 Local {
133 is_shared: bool,
134 remote_id_tx: watch::Sender<Option<u64>>,
135 remote_id_rx: watch::Receiver<Option<u64>>,
136 online_tx: watch::Sender<bool>,
137 online_rx: watch::Receiver<bool>,
138 _maintain_remote_id_task: Task<Option<()>>,
139 },
140 Remote {
141 sharing_has_stopped: bool,
142 remote_id: u64,
143 replica_id: ReplicaId,
144 _detect_unshare_task: Task<Option<()>>,
145 },
146}
147
148#[derive(Clone, Debug)]
149pub struct Collaborator {
150 pub user: Arc<User>,
151 pub peer_id: PeerId,
152 pub replica_id: ReplicaId,
153}
154
155#[derive(Clone, Debug, PartialEq, Eq)]
156pub enum Event {
157 ActiveEntryChanged(Option<ProjectEntryId>),
158 WorktreeAdded,
159 WorktreeRemoved(WorktreeId),
160 DiskBasedDiagnosticsStarted,
161 DiskBasedDiagnosticsUpdated,
162 DiskBasedDiagnosticsFinished,
163 DiagnosticsUpdated(ProjectPath),
164 RemoteIdChanged(Option<u64>),
165 CollaboratorLeft(PeerId),
166 ContactRequestedJoin(Arc<User>),
167 ContactCancelledJoinRequest(Arc<User>),
168}
169
170#[derive(Serialize)]
171pub struct LanguageServerStatus {
172 pub name: String,
173 pub pending_work: BTreeMap<String, LanguageServerProgress>,
174 pub pending_diagnostic_updates: isize,
175}
176
177#[derive(Clone, Debug, Serialize)]
178pub struct LanguageServerProgress {
179 pub message: Option<String>,
180 pub percentage: Option<usize>,
181 #[serde(skip_serializing)]
182 pub last_update_at: Instant,
183}
184
185#[derive(Clone, Debug, Eq, PartialEq, Hash, PartialOrd, Ord)]
186pub struct ProjectPath {
187 pub worktree_id: WorktreeId,
188 pub path: Arc<Path>,
189}
190
191#[derive(Copy, Clone, Debug, Default, PartialEq, Serialize)]
192pub struct DiagnosticSummary {
193 pub error_count: usize,
194 pub warning_count: usize,
195}
196
197#[derive(Debug)]
198pub struct Location {
199 pub buffer: ModelHandle<Buffer>,
200 pub range: Range<language::Anchor>,
201}
202
203#[derive(Debug)]
204pub struct DocumentHighlight {
205 pub range: Range<language::Anchor>,
206 pub kind: DocumentHighlightKind,
207}
208
209#[derive(Clone, Debug)]
210pub struct Symbol {
211 pub source_worktree_id: WorktreeId,
212 pub worktree_id: WorktreeId,
213 pub language_server_name: LanguageServerName,
214 pub path: PathBuf,
215 pub label: CodeLabel,
216 pub name: String,
217 pub kind: lsp::SymbolKind,
218 pub range: Range<PointUtf16>,
219 pub signature: [u8; 32],
220}
221
222#[derive(Clone, Debug, PartialEq)]
223pub struct HoverBlock {
224 pub text: String,
225 pub language: Option<String>,
226}
227
228impl HoverBlock {
229 fn try_new(marked_string: MarkedString) -> Option<Self> {
230 let result = match marked_string {
231 MarkedString::LanguageString(LanguageString { language, value }) => HoverBlock {
232 text: value,
233 language: Some(language),
234 },
235 MarkedString::String(text) => HoverBlock {
236 text,
237 language: None,
238 },
239 };
240 if result.text.is_empty() {
241 None
242 } else {
243 Some(result)
244 }
245 }
246}
247
248#[derive(Debug)]
249pub struct Hover {
250 pub contents: Vec<HoverBlock>,
251 pub range: Option<Range<language::Anchor>>,
252}
253
254#[derive(Default)]
255pub struct ProjectTransaction(pub HashMap<ModelHandle<Buffer>, language::Transaction>);
256
257impl DiagnosticSummary {
258 fn new<'a, T: 'a>(diagnostics: impl IntoIterator<Item = &'a DiagnosticEntry<T>>) -> Self {
259 let mut this = Self {
260 error_count: 0,
261 warning_count: 0,
262 };
263
264 for entry in diagnostics {
265 if entry.diagnostic.is_primary {
266 match entry.diagnostic.severity {
267 DiagnosticSeverity::ERROR => this.error_count += 1,
268 DiagnosticSeverity::WARNING => this.warning_count += 1,
269 _ => {}
270 }
271 }
272 }
273
274 this
275 }
276
277 pub fn is_empty(&self) -> bool {
278 self.error_count == 0 && self.warning_count == 0
279 }
280
281 pub fn to_proto(&self, path: &Path) -> proto::DiagnosticSummary {
282 proto::DiagnosticSummary {
283 path: path.to_string_lossy().to_string(),
284 error_count: self.error_count as u32,
285 warning_count: self.warning_count as u32,
286 }
287 }
288}
289
290#[derive(Clone, Copy, Debug, Default, Hash, PartialEq, Eq, PartialOrd, Ord)]
291pub struct ProjectEntryId(usize);
292
293impl ProjectEntryId {
294 pub const MAX: Self = Self(usize::MAX);
295
296 pub fn new(counter: &AtomicUsize) -> Self {
297 Self(counter.fetch_add(1, SeqCst))
298 }
299
300 pub fn from_proto(id: u64) -> Self {
301 Self(id as usize)
302 }
303
304 pub fn to_proto(&self) -> u64 {
305 self.0 as u64
306 }
307
308 pub fn to_usize(&self) -> usize {
309 self.0
310 }
311}
312
313impl Project {
314 pub fn init(client: &Arc<Client>) {
315 client.add_model_message_handler(Self::handle_request_join_project);
316 client.add_model_message_handler(Self::handle_add_collaborator);
317 client.add_model_message_handler(Self::handle_buffer_reloaded);
318 client.add_model_message_handler(Self::handle_buffer_saved);
319 client.add_model_message_handler(Self::handle_start_language_server);
320 client.add_model_message_handler(Self::handle_update_language_server);
321 client.add_model_message_handler(Self::handle_remove_collaborator);
322 client.add_model_message_handler(Self::handle_join_project_request_cancelled);
323 client.add_model_message_handler(Self::handle_update_project);
324 client.add_model_message_handler(Self::handle_unregister_project);
325 client.add_model_message_handler(Self::handle_project_unshared);
326 client.add_model_message_handler(Self::handle_update_buffer_file);
327 client.add_model_message_handler(Self::handle_update_buffer);
328 client.add_model_message_handler(Self::handle_update_diagnostic_summary);
329 client.add_model_message_handler(Self::handle_update_worktree);
330 client.add_model_request_handler(Self::handle_create_project_entry);
331 client.add_model_request_handler(Self::handle_rename_project_entry);
332 client.add_model_request_handler(Self::handle_copy_project_entry);
333 client.add_model_request_handler(Self::handle_delete_project_entry);
334 client.add_model_request_handler(Self::handle_apply_additional_edits_for_completion);
335 client.add_model_request_handler(Self::handle_apply_code_action);
336 client.add_model_request_handler(Self::handle_reload_buffers);
337 client.add_model_request_handler(Self::handle_format_buffers);
338 client.add_model_request_handler(Self::handle_get_code_actions);
339 client.add_model_request_handler(Self::handle_get_completions);
340 client.add_model_request_handler(Self::handle_lsp_command::<GetHover>);
341 client.add_model_request_handler(Self::handle_lsp_command::<GetDefinition>);
342 client.add_model_request_handler(Self::handle_lsp_command::<GetDocumentHighlights>);
343 client.add_model_request_handler(Self::handle_lsp_command::<GetReferences>);
344 client.add_model_request_handler(Self::handle_lsp_command::<PrepareRename>);
345 client.add_model_request_handler(Self::handle_lsp_command::<PerformRename>);
346 client.add_model_request_handler(Self::handle_search_project);
347 client.add_model_request_handler(Self::handle_get_project_symbols);
348 client.add_model_request_handler(Self::handle_open_buffer_for_symbol);
349 client.add_model_request_handler(Self::handle_open_buffer_by_id);
350 client.add_model_request_handler(Self::handle_open_buffer_by_path);
351 client.add_model_request_handler(Self::handle_save_buffer);
352 }
353
354 pub fn local(
355 online: bool,
356 client: Arc<Client>,
357 user_store: ModelHandle<UserStore>,
358 project_store: ModelHandle<ProjectStore>,
359 languages: Arc<LanguageRegistry>,
360 fs: Arc<dyn Fs>,
361 cx: &mut MutableAppContext,
362 ) -> ModelHandle<Self> {
363 cx.add_model(|cx: &mut ModelContext<Self>| {
364 let (online_tx, online_rx) = watch::channel_with(online);
365 let (remote_id_tx, remote_id_rx) = watch::channel();
366 let _maintain_remote_id_task = cx.spawn_weak({
367 let status_rx = client.clone().status();
368 let online_rx = online_rx.clone();
369 move |this, mut cx| async move {
370 let mut stream = Stream::map(status_rx.clone(), drop)
371 .merge(Stream::map(online_rx.clone(), drop));
372 while stream.recv().await.is_some() {
373 let this = this.upgrade(&cx)?;
374 if status_rx.borrow().is_connected() && *online_rx.borrow() {
375 this.update(&mut cx, |this, cx| this.register(cx))
376 .await
377 .log_err()?;
378 } else {
379 this.update(&mut cx, |this, cx| this.unregister(cx))
380 .await
381 .log_err();
382 }
383 }
384 None
385 }
386 });
387
388 let handle = cx.weak_handle();
389 project_store.update(cx, |store, cx| store.add_project(handle, cx));
390
391 let (opened_buffer_tx, opened_buffer_rx) = watch::channel();
392 Self {
393 worktrees: Default::default(),
394 collaborators: Default::default(),
395 opened_buffers: Default::default(),
396 shared_buffers: Default::default(),
397 loading_buffers: Default::default(),
398 loading_local_worktrees: Default::default(),
399 buffer_snapshots: Default::default(),
400 client_state: ProjectClientState::Local {
401 is_shared: false,
402 remote_id_tx,
403 remote_id_rx,
404 online_tx,
405 online_rx,
406 _maintain_remote_id_task,
407 },
408 opened_buffer: (Rc::new(RefCell::new(opened_buffer_tx)), opened_buffer_rx),
409 subscriptions: Vec::new(),
410 active_entry: None,
411 languages,
412 client,
413 user_store,
414 project_store,
415 fs,
416 next_entry_id: Default::default(),
417 next_diagnostic_group_id: Default::default(),
418 language_servers: Default::default(),
419 started_language_servers: Default::default(),
420 language_server_statuses: Default::default(),
421 last_workspace_edits_by_language_server: Default::default(),
422 language_server_settings: Default::default(),
423 next_language_server_id: 0,
424 nonce: StdRng::from_entropy().gen(),
425 initialized_persistent_state: false,
426 }
427 })
428 }
429
430 pub async fn remote(
431 remote_id: u64,
432 client: Arc<Client>,
433 user_store: ModelHandle<UserStore>,
434 project_store: ModelHandle<ProjectStore>,
435 languages: Arc<LanguageRegistry>,
436 fs: Arc<dyn Fs>,
437 mut cx: AsyncAppContext,
438 ) -> Result<ModelHandle<Self>, JoinProjectError> {
439 client.authenticate_and_connect(true, &cx).await?;
440
441 let response = client
442 .request(proto::JoinProject {
443 project_id: remote_id,
444 })
445 .await?;
446
447 let response = match response.variant.ok_or_else(|| anyhow!("missing variant"))? {
448 proto::join_project_response::Variant::Accept(response) => response,
449 proto::join_project_response::Variant::Decline(decline) => {
450 match proto::join_project_response::decline::Reason::from_i32(decline.reason) {
451 Some(proto::join_project_response::decline::Reason::Declined) => {
452 Err(JoinProjectError::HostDeclined)?
453 }
454 Some(proto::join_project_response::decline::Reason::Closed) => {
455 Err(JoinProjectError::HostClosedProject)?
456 }
457 Some(proto::join_project_response::decline::Reason::WentOffline) => {
458 Err(JoinProjectError::HostWentOffline)?
459 }
460 None => Err(anyhow!("missing decline reason"))?,
461 }
462 }
463 };
464
465 let replica_id = response.replica_id as ReplicaId;
466
467 let mut worktrees = Vec::new();
468 for worktree in response.worktrees {
469 let (worktree, load_task) = cx
470 .update(|cx| Worktree::remote(remote_id, replica_id, worktree, client.clone(), cx));
471 worktrees.push(worktree);
472 load_task.detach();
473 }
474
475 let (opened_buffer_tx, opened_buffer_rx) = watch::channel();
476 let this = cx.add_model(|cx: &mut ModelContext<Self>| {
477 let handle = cx.weak_handle();
478 project_store.update(cx, |store, cx| store.add_project(handle, cx));
479
480 let mut this = Self {
481 worktrees: Vec::new(),
482 loading_buffers: Default::default(),
483 opened_buffer: (Rc::new(RefCell::new(opened_buffer_tx)), opened_buffer_rx),
484 shared_buffers: Default::default(),
485 loading_local_worktrees: Default::default(),
486 active_entry: None,
487 collaborators: Default::default(),
488 languages,
489 user_store: user_store.clone(),
490 project_store,
491 fs,
492 next_entry_id: Default::default(),
493 next_diagnostic_group_id: Default::default(),
494 subscriptions: vec![client.add_model_for_remote_entity(remote_id, cx)],
495 client: client.clone(),
496 client_state: ProjectClientState::Remote {
497 sharing_has_stopped: false,
498 remote_id,
499 replica_id,
500 _detect_unshare_task: cx.spawn_weak(move |this, mut cx| {
501 async move {
502 let mut status = client.status();
503 let is_connected =
504 status.next().await.map_or(false, |s| s.is_connected());
505 // Even if we're initially connected, any future change of the status means we momentarily disconnected.
506 if !is_connected || status.next().await.is_some() {
507 if let Some(this) = this.upgrade(&cx) {
508 this.update(&mut cx, |this, cx| this.removed_from_project(cx))
509 }
510 }
511 Ok(())
512 }
513 .log_err()
514 }),
515 },
516 language_servers: Default::default(),
517 started_language_servers: Default::default(),
518 language_server_settings: Default::default(),
519 language_server_statuses: response
520 .language_servers
521 .into_iter()
522 .map(|server| {
523 (
524 server.id as usize,
525 LanguageServerStatus {
526 name: server.name,
527 pending_work: Default::default(),
528 pending_diagnostic_updates: 0,
529 },
530 )
531 })
532 .collect(),
533 last_workspace_edits_by_language_server: Default::default(),
534 next_language_server_id: 0,
535 opened_buffers: Default::default(),
536 buffer_snapshots: Default::default(),
537 nonce: StdRng::from_entropy().gen(),
538 initialized_persistent_state: false,
539 };
540 for worktree in worktrees {
541 this.add_worktree(&worktree, cx);
542 }
543 this
544 });
545
546 let user_ids = response
547 .collaborators
548 .iter()
549 .map(|peer| peer.user_id)
550 .collect();
551 user_store
552 .update(&mut cx, |user_store, cx| user_store.get_users(user_ids, cx))
553 .await?;
554 let mut collaborators = HashMap::default();
555 for message in response.collaborators {
556 let collaborator = Collaborator::from_proto(message, &user_store, &mut cx).await?;
557 collaborators.insert(collaborator.peer_id, collaborator);
558 }
559
560 this.update(&mut cx, |this, _| {
561 this.collaborators = collaborators;
562 });
563
564 Ok(this)
565 }
566
567 #[cfg(any(test, feature = "test-support"))]
568 pub async fn test(
569 fs: Arc<dyn Fs>,
570 root_paths: impl IntoIterator<Item = &Path>,
571 cx: &mut gpui::TestAppContext,
572 ) -> ModelHandle<Project> {
573 let languages = Arc::new(LanguageRegistry::test());
574 let http_client = client::test::FakeHttpClient::with_404_response();
575 let client = client::Client::new(http_client.clone());
576 let user_store = cx.add_model(|cx| UserStore::new(client.clone(), http_client, cx));
577 let project_store = cx.add_model(|_| ProjectStore::new(Db::open_fake()));
578 let project = cx.update(|cx| {
579 Project::local(true, client, user_store, project_store, languages, fs, cx)
580 });
581 for path in root_paths {
582 let (tree, _) = project
583 .update(cx, |project, cx| {
584 project.find_or_create_local_worktree(path, true, cx)
585 })
586 .await
587 .unwrap();
588 tree.read_with(cx, |tree, _| tree.as_local().unwrap().scan_complete())
589 .await;
590 }
591 project
592 }
593
594 pub fn restore_state(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
595 if self.is_remote() {
596 return Task::ready(Ok(()));
597 }
598
599 let db = self.project_store.read(cx).db.clone();
600 let keys = self.db_keys_for_online_state(cx);
601 let online_by_default = cx.global::<Settings>().projects_online_by_default;
602 let read_online = cx.background().spawn(async move {
603 let values = db.read(keys)?;
604 anyhow::Ok(
605 values
606 .into_iter()
607 .all(|e| e.map_or(online_by_default, |e| e == [true as u8])),
608 )
609 });
610 cx.spawn(|this, mut cx| async move {
611 let online = read_online.await.log_err().unwrap_or(false);
612 this.update(&mut cx, |this, cx| {
613 this.initialized_persistent_state = true;
614 if let ProjectClientState::Local { online_tx, .. } = &mut this.client_state {
615 let mut online_tx = online_tx.borrow_mut();
616 if *online_tx != online {
617 *online_tx = online;
618 drop(online_tx);
619 this.metadata_changed(false, cx);
620 }
621 }
622 });
623 Ok(())
624 })
625 }
626
627 fn persist_state(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
628 if self.is_remote() || !self.initialized_persistent_state {
629 return Task::ready(Ok(()));
630 }
631
632 let db = self.project_store.read(cx).db.clone();
633 let keys = self.db_keys_for_online_state(cx);
634 let is_online = self.is_online();
635 cx.background().spawn(async move {
636 let value = &[is_online as u8];
637 db.write(keys.into_iter().map(|key| (key, value)))
638 })
639 }
640
641 pub fn buffer_for_id(&self, remote_id: u64, cx: &AppContext) -> Option<ModelHandle<Buffer>> {
642 self.opened_buffers
643 .get(&remote_id)
644 .and_then(|buffer| buffer.upgrade(cx))
645 }
646
647 pub fn languages(&self) -> &Arc<LanguageRegistry> {
648 &self.languages
649 }
650
651 pub fn client(&self) -> Arc<Client> {
652 self.client.clone()
653 }
654
655 pub fn user_store(&self) -> ModelHandle<UserStore> {
656 self.user_store.clone()
657 }
658
659 pub fn project_store(&self) -> ModelHandle<ProjectStore> {
660 self.project_store.clone()
661 }
662
663 #[cfg(any(test, feature = "test-support"))]
664 pub fn check_invariants(&self, cx: &AppContext) {
665 if self.is_local() {
666 let mut worktree_root_paths = HashMap::default();
667 for worktree in self.worktrees(cx) {
668 let worktree = worktree.read(cx);
669 let abs_path = worktree.as_local().unwrap().abs_path().clone();
670 let prev_worktree_id = worktree_root_paths.insert(abs_path.clone(), worktree.id());
671 assert_eq!(
672 prev_worktree_id,
673 None,
674 "abs path {:?} for worktree {:?} is not unique ({:?} was already registered with the same path)",
675 abs_path,
676 worktree.id(),
677 prev_worktree_id
678 )
679 }
680 } else {
681 let replica_id = self.replica_id();
682 for buffer in self.opened_buffers.values() {
683 if let Some(buffer) = buffer.upgrade(cx) {
684 let buffer = buffer.read(cx);
685 assert_eq!(
686 buffer.deferred_ops_len(),
687 0,
688 "replica {}, buffer {} has deferred operations",
689 replica_id,
690 buffer.remote_id()
691 );
692 }
693 }
694 }
695 }
696
697 #[cfg(any(test, feature = "test-support"))]
698 pub fn has_open_buffer(&self, path: impl Into<ProjectPath>, cx: &AppContext) -> bool {
699 let path = path.into();
700 if let Some(worktree) = self.worktree_for_id(path.worktree_id, cx) {
701 self.opened_buffers.iter().any(|(_, buffer)| {
702 if let Some(buffer) = buffer.upgrade(cx) {
703 if let Some(file) = File::from_dyn(buffer.read(cx).file()) {
704 if file.worktree == worktree && file.path() == &path.path {
705 return true;
706 }
707 }
708 }
709 false
710 })
711 } else {
712 false
713 }
714 }
715
716 pub fn fs(&self) -> &Arc<dyn Fs> {
717 &self.fs
718 }
719
720 pub fn set_online(&mut self, online: bool, cx: &mut ModelContext<Self>) {
721 if let ProjectClientState::Local { online_tx, .. } = &mut self.client_state {
722 let mut online_tx = online_tx.borrow_mut();
723 if *online_tx != online {
724 *online_tx = online;
725 drop(online_tx);
726 self.metadata_changed(true, cx);
727 }
728 }
729 }
730
731 pub fn is_online(&self) -> bool {
732 match &self.client_state {
733 ProjectClientState::Local { online_rx, .. } => *online_rx.borrow(),
734 ProjectClientState::Remote { .. } => true,
735 }
736 }
737
738 fn unregister(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
739 self.unshared(cx);
740 if let ProjectClientState::Local { remote_id_rx, .. } = &mut self.client_state {
741 if let Some(remote_id) = *remote_id_rx.borrow() {
742 let request = self.client.request(proto::UnregisterProject {
743 project_id: remote_id,
744 });
745 return cx.spawn(|this, mut cx| async move {
746 let response = request.await;
747
748 // Unregistering the project causes the server to send out a
749 // contact update removing this project from the host's list
750 // of online projects. Wait until this contact update has been
751 // processed before clearing out this project's remote id, so
752 // that there is no moment where this project appears in the
753 // contact metadata and *also* has no remote id.
754 this.update(&mut cx, |this, cx| {
755 this.user_store()
756 .update(cx, |store, _| store.contact_updates_done())
757 })
758 .await;
759
760 this.update(&mut cx, |this, cx| {
761 if let ProjectClientState::Local { remote_id_tx, .. } =
762 &mut this.client_state
763 {
764 *remote_id_tx.borrow_mut() = None;
765 }
766 this.subscriptions.clear();
767 this.metadata_changed(false, cx);
768 });
769 response.map(drop)
770 });
771 }
772 }
773 Task::ready(Ok(()))
774 }
775
776 fn register(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
777 if let ProjectClientState::Local { remote_id_rx, .. } = &self.client_state {
778 if remote_id_rx.borrow().is_some() {
779 return Task::ready(Ok(()));
780 }
781 }
782
783 let response = self.client.request(proto::RegisterProject {});
784 cx.spawn(|this, mut cx| async move {
785 let remote_id = response.await?.project_id;
786 this.update(&mut cx, |this, cx| {
787 if let ProjectClientState::Local { remote_id_tx, .. } = &mut this.client_state {
788 *remote_id_tx.borrow_mut() = Some(remote_id);
789 }
790
791 this.metadata_changed(false, cx);
792 cx.emit(Event::RemoteIdChanged(Some(remote_id)));
793 this.subscriptions
794 .push(this.client.add_model_for_remote_entity(remote_id, cx));
795 Ok(())
796 })
797 })
798 }
799
800 pub fn remote_id(&self) -> Option<u64> {
801 match &self.client_state {
802 ProjectClientState::Local { remote_id_rx, .. } => *remote_id_rx.borrow(),
803 ProjectClientState::Remote { remote_id, .. } => Some(*remote_id),
804 }
805 }
806
807 pub fn next_remote_id(&self) -> impl Future<Output = u64> {
808 let mut id = None;
809 let mut watch = None;
810 match &self.client_state {
811 ProjectClientState::Local { remote_id_rx, .. } => watch = Some(remote_id_rx.clone()),
812 ProjectClientState::Remote { remote_id, .. } => id = Some(*remote_id),
813 }
814
815 async move {
816 if let Some(id) = id {
817 return id;
818 }
819 let mut watch = watch.unwrap();
820 loop {
821 let id = *watch.borrow();
822 if let Some(id) = id {
823 return id;
824 }
825 watch.next().await;
826 }
827 }
828 }
829
830 pub fn shared_remote_id(&self) -> Option<u64> {
831 match &self.client_state {
832 ProjectClientState::Local {
833 remote_id_rx,
834 is_shared,
835 ..
836 } => {
837 if *is_shared {
838 *remote_id_rx.borrow()
839 } else {
840 None
841 }
842 }
843 ProjectClientState::Remote { remote_id, .. } => Some(*remote_id),
844 }
845 }
846
847 pub fn replica_id(&self) -> ReplicaId {
848 match &self.client_state {
849 ProjectClientState::Local { .. } => 0,
850 ProjectClientState::Remote { replica_id, .. } => *replica_id,
851 }
852 }
853
854 fn metadata_changed(&mut self, persist: bool, cx: &mut ModelContext<Self>) {
855 if let ProjectClientState::Local {
856 remote_id_rx,
857 online_rx,
858 ..
859 } = &self.client_state
860 {
861 if let (Some(project_id), true) = (*remote_id_rx.borrow(), *online_rx.borrow()) {
862 self.client
863 .send(proto::UpdateProject {
864 project_id,
865 worktrees: self
866 .worktrees
867 .iter()
868 .filter_map(|worktree| {
869 worktree.upgrade(&cx).map(|worktree| {
870 worktree.read(cx).as_local().unwrap().metadata_proto()
871 })
872 })
873 .collect(),
874 })
875 .log_err();
876 }
877
878 self.project_store.update(cx, |_, cx| cx.notify());
879 if persist {
880 self.persist_state(cx).detach_and_log_err(cx);
881 }
882 cx.notify();
883 }
884 }
885
886 pub fn collaborators(&self) -> &HashMap<PeerId, Collaborator> {
887 &self.collaborators
888 }
889
890 pub fn worktrees<'a>(
891 &'a self,
892 cx: &'a AppContext,
893 ) -> impl 'a + Iterator<Item = ModelHandle<Worktree>> {
894 self.worktrees
895 .iter()
896 .filter_map(move |worktree| worktree.upgrade(cx))
897 }
898
899 pub fn visible_worktrees<'a>(
900 &'a self,
901 cx: &'a AppContext,
902 ) -> impl 'a + Iterator<Item = ModelHandle<Worktree>> {
903 self.worktrees.iter().filter_map(|worktree| {
904 worktree.upgrade(cx).and_then(|worktree| {
905 if worktree.read(cx).is_visible() {
906 Some(worktree)
907 } else {
908 None
909 }
910 })
911 })
912 }
913
914 pub fn worktree_root_names<'a>(&'a self, cx: &'a AppContext) -> impl Iterator<Item = &'a str> {
915 self.visible_worktrees(cx)
916 .map(|tree| tree.read(cx).root_name())
917 }
918
919 fn db_keys_for_online_state(&self, cx: &AppContext) -> Vec<String> {
920 self.worktrees
921 .iter()
922 .filter_map(|worktree| {
923 let worktree = worktree.upgrade(&cx)?.read(cx);
924 if worktree.is_visible() {
925 Some(format!(
926 "project-path-online:{}",
927 worktree.as_local().unwrap().abs_path().to_string_lossy()
928 ))
929 } else {
930 None
931 }
932 })
933 .collect::<Vec<_>>()
934 }
935
936 pub fn worktree_for_id(
937 &self,
938 id: WorktreeId,
939 cx: &AppContext,
940 ) -> Option<ModelHandle<Worktree>> {
941 self.worktrees(cx)
942 .find(|worktree| worktree.read(cx).id() == id)
943 }
944
945 pub fn worktree_for_entry(
946 &self,
947 entry_id: ProjectEntryId,
948 cx: &AppContext,
949 ) -> Option<ModelHandle<Worktree>> {
950 self.worktrees(cx)
951 .find(|worktree| worktree.read(cx).contains_entry(entry_id))
952 }
953
954 pub fn worktree_id_for_entry(
955 &self,
956 entry_id: ProjectEntryId,
957 cx: &AppContext,
958 ) -> Option<WorktreeId> {
959 self.worktree_for_entry(entry_id, cx)
960 .map(|worktree| worktree.read(cx).id())
961 }
962
963 pub fn contains_paths(&self, paths: &[PathBuf], cx: &AppContext) -> bool {
964 paths.iter().all(|path| self.contains_path(&path, cx))
965 }
966
967 pub fn contains_path(&self, path: &Path, cx: &AppContext) -> bool {
968 for worktree in self.worktrees(cx) {
969 let worktree = worktree.read(cx).as_local();
970 if worktree.map_or(false, |w| w.contains_abs_path(path)) {
971 return true;
972 }
973 }
974 false
975 }
976
977 pub fn create_entry(
978 &mut self,
979 project_path: impl Into<ProjectPath>,
980 is_directory: bool,
981 cx: &mut ModelContext<Self>,
982 ) -> Option<Task<Result<Entry>>> {
983 let project_path = project_path.into();
984 let worktree = self.worktree_for_id(project_path.worktree_id, cx)?;
985 if self.is_local() {
986 Some(worktree.update(cx, |worktree, cx| {
987 worktree
988 .as_local_mut()
989 .unwrap()
990 .create_entry(project_path.path, is_directory, cx)
991 }))
992 } else {
993 let client = self.client.clone();
994 let project_id = self.remote_id().unwrap();
995 Some(cx.spawn_weak(|_, mut cx| async move {
996 let response = client
997 .request(proto::CreateProjectEntry {
998 worktree_id: project_path.worktree_id.to_proto(),
999 project_id,
1000 path: project_path.path.as_os_str().as_bytes().to_vec(),
1001 is_directory,
1002 })
1003 .await?;
1004 let entry = response
1005 .entry
1006 .ok_or_else(|| anyhow!("missing entry in response"))?;
1007 worktree
1008 .update(&mut cx, |worktree, cx| {
1009 worktree.as_remote().unwrap().insert_entry(
1010 entry,
1011 response.worktree_scan_id as usize,
1012 cx,
1013 )
1014 })
1015 .await
1016 }))
1017 }
1018 }
1019
1020 pub fn copy_entry(
1021 &mut self,
1022 entry_id: ProjectEntryId,
1023 new_path: impl Into<Arc<Path>>,
1024 cx: &mut ModelContext<Self>,
1025 ) -> Option<Task<Result<Entry>>> {
1026 let worktree = self.worktree_for_entry(entry_id, cx)?;
1027 let new_path = new_path.into();
1028 if self.is_local() {
1029 worktree.update(cx, |worktree, cx| {
1030 worktree
1031 .as_local_mut()
1032 .unwrap()
1033 .copy_entry(entry_id, new_path, cx)
1034 })
1035 } else {
1036 let client = self.client.clone();
1037 let project_id = self.remote_id().unwrap();
1038
1039 Some(cx.spawn_weak(|_, mut cx| async move {
1040 let response = client
1041 .request(proto::CopyProjectEntry {
1042 project_id,
1043 entry_id: entry_id.to_proto(),
1044 new_path: new_path.as_os_str().as_bytes().to_vec(),
1045 })
1046 .await?;
1047 let entry = response
1048 .entry
1049 .ok_or_else(|| anyhow!("missing entry in response"))?;
1050 worktree
1051 .update(&mut cx, |worktree, cx| {
1052 worktree.as_remote().unwrap().insert_entry(
1053 entry,
1054 response.worktree_scan_id as usize,
1055 cx,
1056 )
1057 })
1058 .await
1059 }))
1060 }
1061 }
1062
1063 pub fn rename_entry(
1064 &mut self,
1065 entry_id: ProjectEntryId,
1066 new_path: impl Into<Arc<Path>>,
1067 cx: &mut ModelContext<Self>,
1068 ) -> Option<Task<Result<Entry>>> {
1069 let worktree = self.worktree_for_entry(entry_id, cx)?;
1070 let new_path = new_path.into();
1071 if self.is_local() {
1072 worktree.update(cx, |worktree, cx| {
1073 worktree
1074 .as_local_mut()
1075 .unwrap()
1076 .rename_entry(entry_id, new_path, cx)
1077 })
1078 } else {
1079 let client = self.client.clone();
1080 let project_id = self.remote_id().unwrap();
1081
1082 Some(cx.spawn_weak(|_, mut cx| async move {
1083 let response = client
1084 .request(proto::RenameProjectEntry {
1085 project_id,
1086 entry_id: entry_id.to_proto(),
1087 new_path: new_path.as_os_str().as_bytes().to_vec(),
1088 })
1089 .await?;
1090 let entry = response
1091 .entry
1092 .ok_or_else(|| anyhow!("missing entry in response"))?;
1093 worktree
1094 .update(&mut cx, |worktree, cx| {
1095 worktree.as_remote().unwrap().insert_entry(
1096 entry,
1097 response.worktree_scan_id as usize,
1098 cx,
1099 )
1100 })
1101 .await
1102 }))
1103 }
1104 }
1105
1106 pub fn delete_entry(
1107 &mut self,
1108 entry_id: ProjectEntryId,
1109 cx: &mut ModelContext<Self>,
1110 ) -> Option<Task<Result<()>>> {
1111 let worktree = self.worktree_for_entry(entry_id, cx)?;
1112 if self.is_local() {
1113 worktree.update(cx, |worktree, cx| {
1114 worktree.as_local_mut().unwrap().delete_entry(entry_id, cx)
1115 })
1116 } else {
1117 let client = self.client.clone();
1118 let project_id = self.remote_id().unwrap();
1119 Some(cx.spawn_weak(|_, mut cx| async move {
1120 let response = client
1121 .request(proto::DeleteProjectEntry {
1122 project_id,
1123 entry_id: entry_id.to_proto(),
1124 })
1125 .await?;
1126 worktree
1127 .update(&mut cx, move |worktree, cx| {
1128 worktree.as_remote().unwrap().delete_entry(
1129 entry_id,
1130 response.worktree_scan_id as usize,
1131 cx,
1132 )
1133 })
1134 .await
1135 }))
1136 }
1137 }
1138
1139 fn share(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
1140 let project_id;
1141 if let ProjectClientState::Local {
1142 remote_id_rx,
1143 is_shared,
1144 ..
1145 } = &mut self.client_state
1146 {
1147 if *is_shared {
1148 return Task::ready(Ok(()));
1149 }
1150 *is_shared = true;
1151 if let Some(id) = *remote_id_rx.borrow() {
1152 project_id = id;
1153 } else {
1154 return Task::ready(Err(anyhow!("project hasn't been registered")));
1155 }
1156 } else {
1157 return Task::ready(Err(anyhow!("can't share a remote project")));
1158 };
1159
1160 for open_buffer in self.opened_buffers.values_mut() {
1161 match open_buffer {
1162 OpenBuffer::Strong(_) => {}
1163 OpenBuffer::Weak(buffer) => {
1164 if let Some(buffer) = buffer.upgrade(cx) {
1165 *open_buffer = OpenBuffer::Strong(buffer);
1166 }
1167 }
1168 OpenBuffer::Loading(_) => unreachable!(),
1169 }
1170 }
1171
1172 for worktree_handle in self.worktrees.iter_mut() {
1173 match worktree_handle {
1174 WorktreeHandle::Strong(_) => {}
1175 WorktreeHandle::Weak(worktree) => {
1176 if let Some(worktree) = worktree.upgrade(cx) {
1177 *worktree_handle = WorktreeHandle::Strong(worktree);
1178 }
1179 }
1180 }
1181 }
1182
1183 let mut tasks = Vec::new();
1184 for worktree in self.worktrees(cx).collect::<Vec<_>>() {
1185 worktree.update(cx, |worktree, cx| {
1186 let worktree = worktree.as_local_mut().unwrap();
1187 tasks.push(worktree.share(project_id, cx));
1188 });
1189 }
1190
1191 cx.spawn(|this, mut cx| async move {
1192 for task in tasks {
1193 task.await?;
1194 }
1195 this.update(&mut cx, |_, cx| cx.notify());
1196 Ok(())
1197 })
1198 }
1199
1200 fn unshared(&mut self, cx: &mut ModelContext<Self>) {
1201 if let ProjectClientState::Local { is_shared, .. } = &mut self.client_state {
1202 if !*is_shared {
1203 return;
1204 }
1205
1206 *is_shared = false;
1207 self.collaborators.clear();
1208 self.shared_buffers.clear();
1209 for worktree_handle in self.worktrees.iter_mut() {
1210 if let WorktreeHandle::Strong(worktree) = worktree_handle {
1211 let is_visible = worktree.update(cx, |worktree, _| {
1212 worktree.as_local_mut().unwrap().unshare();
1213 worktree.is_visible()
1214 });
1215 if !is_visible {
1216 *worktree_handle = WorktreeHandle::Weak(worktree.downgrade());
1217 }
1218 }
1219 }
1220
1221 for open_buffer in self.opened_buffers.values_mut() {
1222 match open_buffer {
1223 OpenBuffer::Strong(buffer) => {
1224 *open_buffer = OpenBuffer::Weak(buffer.downgrade());
1225 }
1226 _ => {}
1227 }
1228 }
1229
1230 cx.notify();
1231 } else {
1232 log::error!("attempted to unshare a remote project");
1233 }
1234 }
1235
1236 pub fn respond_to_join_request(
1237 &mut self,
1238 requester_id: u64,
1239 allow: bool,
1240 cx: &mut ModelContext<Self>,
1241 ) {
1242 if let Some(project_id) = self.remote_id() {
1243 let share = self.share(cx);
1244 let client = self.client.clone();
1245 cx.foreground()
1246 .spawn(async move {
1247 share.await?;
1248 client.send(proto::RespondToJoinProjectRequest {
1249 requester_id,
1250 project_id,
1251 allow,
1252 })
1253 })
1254 .detach_and_log_err(cx);
1255 }
1256 }
1257
1258 fn removed_from_project(&mut self, cx: &mut ModelContext<Self>) {
1259 if let ProjectClientState::Remote {
1260 sharing_has_stopped,
1261 ..
1262 } = &mut self.client_state
1263 {
1264 *sharing_has_stopped = true;
1265 self.collaborators.clear();
1266 cx.notify();
1267 }
1268 }
1269
1270 pub fn is_read_only(&self) -> bool {
1271 match &self.client_state {
1272 ProjectClientState::Local { .. } => false,
1273 ProjectClientState::Remote {
1274 sharing_has_stopped,
1275 ..
1276 } => *sharing_has_stopped,
1277 }
1278 }
1279
1280 pub fn is_local(&self) -> bool {
1281 match &self.client_state {
1282 ProjectClientState::Local { .. } => true,
1283 ProjectClientState::Remote { .. } => false,
1284 }
1285 }
1286
1287 pub fn is_remote(&self) -> bool {
1288 !self.is_local()
1289 }
1290
1291 pub fn create_buffer(
1292 &mut self,
1293 text: &str,
1294 language: Option<Arc<Language>>,
1295 cx: &mut ModelContext<Self>,
1296 ) -> Result<ModelHandle<Buffer>> {
1297 if self.is_remote() {
1298 return Err(anyhow!("creating buffers as a guest is not supported yet"));
1299 }
1300
1301 let buffer = cx.add_model(|cx| {
1302 Buffer::new(self.replica_id(), text, cx)
1303 .with_language(language.unwrap_or(language::PLAIN_TEXT.clone()), cx)
1304 });
1305 self.register_buffer(&buffer, cx)?;
1306 Ok(buffer)
1307 }
1308
1309 pub fn open_path(
1310 &mut self,
1311 path: impl Into<ProjectPath>,
1312 cx: &mut ModelContext<Self>,
1313 ) -> Task<Result<(ProjectEntryId, AnyModelHandle)>> {
1314 let task = self.open_buffer(path, cx);
1315 cx.spawn_weak(|_, cx| async move {
1316 let buffer = task.await?;
1317 let project_entry_id = buffer
1318 .read_with(&cx, |buffer, cx| {
1319 File::from_dyn(buffer.file()).and_then(|file| file.project_entry_id(cx))
1320 })
1321 .ok_or_else(|| anyhow!("no project entry"))?;
1322 Ok((project_entry_id, buffer.into()))
1323 })
1324 }
1325
1326 pub fn open_local_buffer(
1327 &mut self,
1328 abs_path: impl AsRef<Path>,
1329 cx: &mut ModelContext<Self>,
1330 ) -> Task<Result<ModelHandle<Buffer>>> {
1331 if let Some((worktree, relative_path)) = self.find_local_worktree(abs_path.as_ref(), cx) {
1332 self.open_buffer((worktree.read(cx).id(), relative_path), cx)
1333 } else {
1334 Task::ready(Err(anyhow!("no such path")))
1335 }
1336 }
1337
1338 pub fn open_buffer(
1339 &mut self,
1340 path: impl Into<ProjectPath>,
1341 cx: &mut ModelContext<Self>,
1342 ) -> Task<Result<ModelHandle<Buffer>>> {
1343 let project_path = path.into();
1344 let worktree = if let Some(worktree) = self.worktree_for_id(project_path.worktree_id, cx) {
1345 worktree
1346 } else {
1347 return Task::ready(Err(anyhow!("no such worktree")));
1348 };
1349
1350 // If there is already a buffer for the given path, then return it.
1351 let existing_buffer = self.get_open_buffer(&project_path, cx);
1352 if let Some(existing_buffer) = existing_buffer {
1353 return Task::ready(Ok(existing_buffer));
1354 }
1355
1356 let mut loading_watch = match self.loading_buffers.entry(project_path.clone()) {
1357 // If the given path is already being loaded, then wait for that existing
1358 // task to complete and return the same buffer.
1359 hash_map::Entry::Occupied(e) => e.get().clone(),
1360
1361 // Otherwise, record the fact that this path is now being loaded.
1362 hash_map::Entry::Vacant(entry) => {
1363 let (mut tx, rx) = postage::watch::channel();
1364 entry.insert(rx.clone());
1365
1366 let load_buffer = if worktree.read(cx).is_local() {
1367 self.open_local_buffer_internal(&project_path.path, &worktree, cx)
1368 } else {
1369 self.open_remote_buffer_internal(&project_path.path, &worktree, cx)
1370 };
1371
1372 cx.spawn(move |this, mut cx| async move {
1373 let load_result = load_buffer.await;
1374 *tx.borrow_mut() = Some(this.update(&mut cx, |this, _| {
1375 // Record the fact that the buffer is no longer loading.
1376 this.loading_buffers.remove(&project_path);
1377 let buffer = load_result.map_err(Arc::new)?;
1378 Ok(buffer)
1379 }));
1380 })
1381 .detach();
1382 rx
1383 }
1384 };
1385
1386 cx.foreground().spawn(async move {
1387 loop {
1388 if let Some(result) = loading_watch.borrow().as_ref() {
1389 match result {
1390 Ok(buffer) => return Ok(buffer.clone()),
1391 Err(error) => return Err(anyhow!("{}", error)),
1392 }
1393 }
1394 loading_watch.next().await;
1395 }
1396 })
1397 }
1398
1399 fn open_local_buffer_internal(
1400 &mut self,
1401 path: &Arc<Path>,
1402 worktree: &ModelHandle<Worktree>,
1403 cx: &mut ModelContext<Self>,
1404 ) -> Task<Result<ModelHandle<Buffer>>> {
1405 let load_buffer = worktree.update(cx, |worktree, cx| {
1406 let worktree = worktree.as_local_mut().unwrap();
1407 worktree.load_buffer(path, cx)
1408 });
1409 cx.spawn(|this, mut cx| async move {
1410 let buffer = load_buffer.await?;
1411 this.update(&mut cx, |this, cx| this.register_buffer(&buffer, cx))?;
1412 Ok(buffer)
1413 })
1414 }
1415
1416 fn open_remote_buffer_internal(
1417 &mut self,
1418 path: &Arc<Path>,
1419 worktree: &ModelHandle<Worktree>,
1420 cx: &mut ModelContext<Self>,
1421 ) -> Task<Result<ModelHandle<Buffer>>> {
1422 let rpc = self.client.clone();
1423 let project_id = self.remote_id().unwrap();
1424 let remote_worktree_id = worktree.read(cx).id();
1425 let path = path.clone();
1426 let path_string = path.to_string_lossy().to_string();
1427 cx.spawn(|this, mut cx| async move {
1428 let response = rpc
1429 .request(proto::OpenBufferByPath {
1430 project_id,
1431 worktree_id: remote_worktree_id.to_proto(),
1432 path: path_string,
1433 })
1434 .await?;
1435 let buffer = response.buffer.ok_or_else(|| anyhow!("missing buffer"))?;
1436 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
1437 .await
1438 })
1439 }
1440
1441 fn open_local_buffer_via_lsp(
1442 &mut self,
1443 abs_path: lsp::Url,
1444 lsp_adapter: Arc<dyn LspAdapter>,
1445 lsp_server: Arc<LanguageServer>,
1446 cx: &mut ModelContext<Self>,
1447 ) -> Task<Result<ModelHandle<Buffer>>> {
1448 cx.spawn(|this, mut cx| async move {
1449 let abs_path = abs_path
1450 .to_file_path()
1451 .map_err(|_| anyhow!("can't convert URI to path"))?;
1452 let (worktree, relative_path) = if let Some(result) =
1453 this.read_with(&cx, |this, cx| this.find_local_worktree(&abs_path, cx))
1454 {
1455 result
1456 } else {
1457 let worktree = this
1458 .update(&mut cx, |this, cx| {
1459 this.create_local_worktree(&abs_path, false, cx)
1460 })
1461 .await?;
1462 this.update(&mut cx, |this, cx| {
1463 this.language_servers.insert(
1464 (worktree.read(cx).id(), lsp_adapter.name()),
1465 (lsp_adapter, lsp_server),
1466 );
1467 });
1468 (worktree, PathBuf::new())
1469 };
1470
1471 let project_path = ProjectPath {
1472 worktree_id: worktree.read_with(&cx, |worktree, _| worktree.id()),
1473 path: relative_path.into(),
1474 };
1475 this.update(&mut cx, |this, cx| this.open_buffer(project_path, cx))
1476 .await
1477 })
1478 }
1479
1480 pub fn open_buffer_by_id(
1481 &mut self,
1482 id: u64,
1483 cx: &mut ModelContext<Self>,
1484 ) -> Task<Result<ModelHandle<Buffer>>> {
1485 if let Some(buffer) = self.buffer_for_id(id, cx) {
1486 Task::ready(Ok(buffer))
1487 } else if self.is_local() {
1488 Task::ready(Err(anyhow!("buffer {} does not exist", id)))
1489 } else if let Some(project_id) = self.remote_id() {
1490 let request = self
1491 .client
1492 .request(proto::OpenBufferById { project_id, id });
1493 cx.spawn(|this, mut cx| async move {
1494 let buffer = request
1495 .await?
1496 .buffer
1497 .ok_or_else(|| anyhow!("invalid buffer"))?;
1498 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
1499 .await
1500 })
1501 } else {
1502 Task::ready(Err(anyhow!("cannot open buffer while disconnected")))
1503 }
1504 }
1505
1506 pub fn save_buffer_as(
1507 &mut self,
1508 buffer: ModelHandle<Buffer>,
1509 abs_path: PathBuf,
1510 cx: &mut ModelContext<Project>,
1511 ) -> Task<Result<()>> {
1512 let worktree_task = self.find_or_create_local_worktree(&abs_path, true, cx);
1513 let old_path =
1514 File::from_dyn(buffer.read(cx).file()).and_then(|f| Some(f.as_local()?.abs_path(cx)));
1515 cx.spawn(|this, mut cx| async move {
1516 if let Some(old_path) = old_path {
1517 this.update(&mut cx, |this, cx| {
1518 this.unregister_buffer_from_language_server(&buffer, old_path, cx);
1519 });
1520 }
1521 let (worktree, path) = worktree_task.await?;
1522 worktree
1523 .update(&mut cx, |worktree, cx| {
1524 worktree
1525 .as_local_mut()
1526 .unwrap()
1527 .save_buffer_as(buffer.clone(), path, cx)
1528 })
1529 .await?;
1530 this.update(&mut cx, |this, cx| {
1531 this.assign_language_to_buffer(&buffer, cx);
1532 this.register_buffer_with_language_server(&buffer, cx);
1533 });
1534 Ok(())
1535 })
1536 }
1537
1538 pub fn get_open_buffer(
1539 &mut self,
1540 path: &ProjectPath,
1541 cx: &mut ModelContext<Self>,
1542 ) -> Option<ModelHandle<Buffer>> {
1543 let worktree = self.worktree_for_id(path.worktree_id, cx)?;
1544 self.opened_buffers.values().find_map(|buffer| {
1545 let buffer = buffer.upgrade(cx)?;
1546 let file = File::from_dyn(buffer.read(cx).file())?;
1547 if file.worktree == worktree && file.path() == &path.path {
1548 Some(buffer)
1549 } else {
1550 None
1551 }
1552 })
1553 }
1554
1555 fn register_buffer(
1556 &mut self,
1557 buffer: &ModelHandle<Buffer>,
1558 cx: &mut ModelContext<Self>,
1559 ) -> Result<()> {
1560 let remote_id = buffer.read(cx).remote_id();
1561 let open_buffer = if self.is_remote() || self.is_shared() {
1562 OpenBuffer::Strong(buffer.clone())
1563 } else {
1564 OpenBuffer::Weak(buffer.downgrade())
1565 };
1566
1567 match self.opened_buffers.insert(remote_id, open_buffer) {
1568 None => {}
1569 Some(OpenBuffer::Loading(operations)) => {
1570 buffer.update(cx, |buffer, cx| buffer.apply_ops(operations, cx))?
1571 }
1572 Some(OpenBuffer::Weak(existing_handle)) => {
1573 if existing_handle.upgrade(cx).is_some() {
1574 Err(anyhow!(
1575 "already registered buffer with remote id {}",
1576 remote_id
1577 ))?
1578 }
1579 }
1580 Some(OpenBuffer::Strong(_)) => Err(anyhow!(
1581 "already registered buffer with remote id {}",
1582 remote_id
1583 ))?,
1584 }
1585 cx.subscribe(buffer, |this, buffer, event, cx| {
1586 this.on_buffer_event(buffer, event, cx);
1587 })
1588 .detach();
1589
1590 self.assign_language_to_buffer(buffer, cx);
1591 self.register_buffer_with_language_server(buffer, cx);
1592 cx.observe_release(buffer, |this, buffer, cx| {
1593 if let Some(file) = File::from_dyn(buffer.file()) {
1594 if file.is_local() {
1595 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
1596 if let Some((_, server)) = this.language_server_for_buffer(buffer, cx) {
1597 server
1598 .notify::<lsp::notification::DidCloseTextDocument>(
1599 lsp::DidCloseTextDocumentParams {
1600 text_document: lsp::TextDocumentIdentifier::new(uri.clone()),
1601 },
1602 )
1603 .log_err();
1604 }
1605 }
1606 }
1607 })
1608 .detach();
1609
1610 Ok(())
1611 }
1612
1613 fn register_buffer_with_language_server(
1614 &mut self,
1615 buffer_handle: &ModelHandle<Buffer>,
1616 cx: &mut ModelContext<Self>,
1617 ) {
1618 let buffer = buffer_handle.read(cx);
1619 let buffer_id = buffer.remote_id();
1620 if let Some(file) = File::from_dyn(buffer.file()) {
1621 if file.is_local() {
1622 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
1623 let initial_snapshot = buffer.text_snapshot();
1624
1625 let mut language_server = None;
1626 let mut language_id = None;
1627 if let Some(language) = buffer.language() {
1628 let worktree_id = file.worktree_id(cx);
1629 if let Some(adapter) = language.lsp_adapter() {
1630 language_id = adapter.id_for_language(language.name().as_ref());
1631 language_server = self
1632 .language_servers
1633 .get(&(worktree_id, adapter.name()))
1634 .cloned();
1635 }
1636 }
1637
1638 if let Some(local_worktree) = file.worktree.read(cx).as_local() {
1639 if let Some(diagnostics) = local_worktree.diagnostics_for_path(file.path()) {
1640 self.update_buffer_diagnostics(&buffer_handle, diagnostics, None, cx)
1641 .log_err();
1642 }
1643 }
1644
1645 if let Some((_, server)) = language_server {
1646 server
1647 .notify::<lsp::notification::DidOpenTextDocument>(
1648 lsp::DidOpenTextDocumentParams {
1649 text_document: lsp::TextDocumentItem::new(
1650 uri,
1651 language_id.unwrap_or_default(),
1652 0,
1653 initial_snapshot.text(),
1654 ),
1655 }
1656 .clone(),
1657 )
1658 .log_err();
1659 buffer_handle.update(cx, |buffer, cx| {
1660 buffer.set_completion_triggers(
1661 server
1662 .capabilities()
1663 .completion_provider
1664 .as_ref()
1665 .and_then(|provider| provider.trigger_characters.clone())
1666 .unwrap_or(Vec::new()),
1667 cx,
1668 )
1669 });
1670 self.buffer_snapshots
1671 .insert(buffer_id, vec![(0, initial_snapshot)]);
1672 }
1673 }
1674 }
1675 }
1676
1677 fn unregister_buffer_from_language_server(
1678 &mut self,
1679 buffer: &ModelHandle<Buffer>,
1680 old_path: PathBuf,
1681 cx: &mut ModelContext<Self>,
1682 ) {
1683 buffer.update(cx, |buffer, cx| {
1684 buffer.update_diagnostics(Default::default(), cx);
1685 self.buffer_snapshots.remove(&buffer.remote_id());
1686 if let Some((_, language_server)) = self.language_server_for_buffer(buffer, cx) {
1687 language_server
1688 .notify::<lsp::notification::DidCloseTextDocument>(
1689 lsp::DidCloseTextDocumentParams {
1690 text_document: lsp::TextDocumentIdentifier::new(
1691 lsp::Url::from_file_path(old_path).unwrap(),
1692 ),
1693 },
1694 )
1695 .log_err();
1696 }
1697 });
1698 }
1699
1700 fn on_buffer_event(
1701 &mut self,
1702 buffer: ModelHandle<Buffer>,
1703 event: &BufferEvent,
1704 cx: &mut ModelContext<Self>,
1705 ) -> Option<()> {
1706 match event {
1707 BufferEvent::Operation(operation) => {
1708 if let Some(project_id) = self.shared_remote_id() {
1709 let request = self.client.request(proto::UpdateBuffer {
1710 project_id,
1711 buffer_id: buffer.read(cx).remote_id(),
1712 operations: vec![language::proto::serialize_operation(&operation)],
1713 });
1714 cx.background().spawn(request).detach_and_log_err(cx);
1715 }
1716 }
1717 BufferEvent::Edited { .. } => {
1718 let (_, language_server) = self
1719 .language_server_for_buffer(buffer.read(cx), cx)?
1720 .clone();
1721 let buffer = buffer.read(cx);
1722 let file = File::from_dyn(buffer.file())?;
1723 let abs_path = file.as_local()?.abs_path(cx);
1724 let uri = lsp::Url::from_file_path(abs_path).unwrap();
1725 let buffer_snapshots = self.buffer_snapshots.get_mut(&buffer.remote_id())?;
1726 let (version, prev_snapshot) = buffer_snapshots.last()?;
1727 let next_snapshot = buffer.text_snapshot();
1728 let next_version = version + 1;
1729
1730 let content_changes = buffer
1731 .edits_since::<(PointUtf16, usize)>(prev_snapshot.version())
1732 .map(|edit| {
1733 let edit_start = edit.new.start.0;
1734 let edit_end = edit_start + (edit.old.end.0 - edit.old.start.0);
1735 let new_text = next_snapshot
1736 .text_for_range(edit.new.start.1..edit.new.end.1)
1737 .collect();
1738 lsp::TextDocumentContentChangeEvent {
1739 range: Some(lsp::Range::new(
1740 point_to_lsp(edit_start),
1741 point_to_lsp(edit_end),
1742 )),
1743 range_length: None,
1744 text: new_text,
1745 }
1746 })
1747 .collect();
1748
1749 buffer_snapshots.push((next_version, next_snapshot));
1750
1751 language_server
1752 .notify::<lsp::notification::DidChangeTextDocument>(
1753 lsp::DidChangeTextDocumentParams {
1754 text_document: lsp::VersionedTextDocumentIdentifier::new(
1755 uri,
1756 next_version,
1757 ),
1758 content_changes,
1759 },
1760 )
1761 .log_err();
1762 }
1763 BufferEvent::Saved => {
1764 let file = File::from_dyn(buffer.read(cx).file())?;
1765 let worktree_id = file.worktree_id(cx);
1766 let abs_path = file.as_local()?.abs_path(cx);
1767 let text_document = lsp::TextDocumentIdentifier {
1768 uri: lsp::Url::from_file_path(abs_path).unwrap(),
1769 };
1770
1771 for (_, server) in self.language_servers_for_worktree(worktree_id) {
1772 server
1773 .notify::<lsp::notification::DidSaveTextDocument>(
1774 lsp::DidSaveTextDocumentParams {
1775 text_document: text_document.clone(),
1776 text: None,
1777 },
1778 )
1779 .log_err();
1780 }
1781 }
1782 _ => {}
1783 }
1784
1785 None
1786 }
1787
1788 fn language_servers_for_worktree(
1789 &self,
1790 worktree_id: WorktreeId,
1791 ) -> impl Iterator<Item = &(Arc<dyn LspAdapter>, Arc<LanguageServer>)> {
1792 self.language_servers.iter().filter_map(
1793 move |((language_server_worktree_id, _), server)| {
1794 if *language_server_worktree_id == worktree_id {
1795 Some(server)
1796 } else {
1797 None
1798 }
1799 },
1800 )
1801 }
1802
1803 fn assign_language_to_buffer(
1804 &mut self,
1805 buffer: &ModelHandle<Buffer>,
1806 cx: &mut ModelContext<Self>,
1807 ) -> Option<()> {
1808 // If the buffer has a language, set it and start the language server if we haven't already.
1809 let full_path = buffer.read(cx).file()?.full_path(cx);
1810 let language = self.languages.select_language(&full_path)?;
1811 buffer.update(cx, |buffer, cx| {
1812 buffer.set_language(Some(language.clone()), cx);
1813 });
1814
1815 let file = File::from_dyn(buffer.read(cx).file())?;
1816 let worktree = file.worktree.read(cx).as_local()?;
1817 let worktree_id = worktree.id();
1818 let worktree_abs_path = worktree.abs_path().clone();
1819 self.start_language_server(worktree_id, worktree_abs_path, language, cx);
1820
1821 None
1822 }
1823
1824 fn start_language_server(
1825 &mut self,
1826 worktree_id: WorktreeId,
1827 worktree_path: Arc<Path>,
1828 language: Arc<Language>,
1829 cx: &mut ModelContext<Self>,
1830 ) {
1831 let adapter = if let Some(adapter) = language.lsp_adapter() {
1832 adapter
1833 } else {
1834 return;
1835 };
1836 let key = (worktree_id, adapter.name());
1837 self.started_language_servers
1838 .entry(key.clone())
1839 .or_insert_with(|| {
1840 let server_id = post_inc(&mut self.next_language_server_id);
1841 let language_server = self.languages.start_language_server(
1842 server_id,
1843 language.clone(),
1844 worktree_path,
1845 self.client.http_client(),
1846 cx,
1847 );
1848 cx.spawn_weak(|this, mut cx| async move {
1849 let language_server = language_server?.await.log_err()?;
1850 let language_server = language_server
1851 .initialize(adapter.initialization_options())
1852 .await
1853 .log_err()?;
1854 let this = this.upgrade(&cx)?;
1855 let disk_based_diagnostics_progress_token =
1856 adapter.disk_based_diagnostics_progress_token();
1857
1858 language_server
1859 .on_notification::<lsp::notification::PublishDiagnostics, _>({
1860 let this = this.downgrade();
1861 let adapter = adapter.clone();
1862 move |params, mut cx| {
1863 if let Some(this) = this.upgrade(&cx) {
1864 this.update(&mut cx, |this, cx| {
1865 this.on_lsp_diagnostics_published(
1866 server_id,
1867 params,
1868 &adapter,
1869 disk_based_diagnostics_progress_token,
1870 cx,
1871 );
1872 });
1873 }
1874 }
1875 })
1876 .detach();
1877
1878 language_server
1879 .on_request::<lsp::request::WorkspaceConfiguration, _, _>({
1880 let settings = this
1881 .read_with(&cx, |this, _| this.language_server_settings.clone());
1882 move |params, _| {
1883 let settings = settings.lock().clone();
1884 async move {
1885 Ok(params
1886 .items
1887 .into_iter()
1888 .map(|item| {
1889 if let Some(section) = &item.section {
1890 settings
1891 .get(section)
1892 .cloned()
1893 .unwrap_or(serde_json::Value::Null)
1894 } else {
1895 settings.clone()
1896 }
1897 })
1898 .collect())
1899 }
1900 }
1901 })
1902 .detach();
1903
1904 language_server
1905 .on_request::<lsp::request::ApplyWorkspaceEdit, _, _>({
1906 let this = this.downgrade();
1907 let adapter = adapter.clone();
1908 let language_server = language_server.clone();
1909 move |params, cx| {
1910 Self::on_lsp_workspace_edit(
1911 this,
1912 params,
1913 server_id,
1914 adapter.clone(),
1915 language_server.clone(),
1916 cx,
1917 )
1918 }
1919 })
1920 .detach();
1921
1922 language_server
1923 .on_notification::<lsp::notification::Progress, _>({
1924 let this = this.downgrade();
1925 move |params, mut cx| {
1926 if let Some(this) = this.upgrade(&cx) {
1927 this.update(&mut cx, |this, cx| {
1928 this.on_lsp_progress(
1929 params,
1930 server_id,
1931 disk_based_diagnostics_progress_token,
1932 cx,
1933 );
1934 });
1935 }
1936 }
1937 })
1938 .detach();
1939
1940 this.update(&mut cx, |this, cx| {
1941 this.language_servers
1942 .insert(key.clone(), (adapter.clone(), language_server.clone()));
1943 this.language_server_statuses.insert(
1944 server_id,
1945 LanguageServerStatus {
1946 name: language_server.name().to_string(),
1947 pending_work: Default::default(),
1948 pending_diagnostic_updates: 0,
1949 },
1950 );
1951 language_server
1952 .notify::<lsp::notification::DidChangeConfiguration>(
1953 lsp::DidChangeConfigurationParams {
1954 settings: this.language_server_settings.lock().clone(),
1955 },
1956 )
1957 .ok();
1958
1959 if let Some(project_id) = this.shared_remote_id() {
1960 this.client
1961 .send(proto::StartLanguageServer {
1962 project_id,
1963 server: Some(proto::LanguageServer {
1964 id: server_id as u64,
1965 name: language_server.name().to_string(),
1966 }),
1967 })
1968 .log_err();
1969 }
1970
1971 // Tell the language server about every open buffer in the worktree that matches the language.
1972 for buffer in this.opened_buffers.values() {
1973 if let Some(buffer_handle) = buffer.upgrade(cx) {
1974 let buffer = buffer_handle.read(cx);
1975 let file = if let Some(file) = File::from_dyn(buffer.file()) {
1976 file
1977 } else {
1978 continue;
1979 };
1980 let language = if let Some(language) = buffer.language() {
1981 language
1982 } else {
1983 continue;
1984 };
1985 if file.worktree.read(cx).id() != key.0
1986 || language.lsp_adapter().map(|a| a.name())
1987 != Some(key.1.clone())
1988 {
1989 continue;
1990 }
1991
1992 let file = file.as_local()?;
1993 let versions = this
1994 .buffer_snapshots
1995 .entry(buffer.remote_id())
1996 .or_insert_with(|| vec![(0, buffer.text_snapshot())]);
1997 let (version, initial_snapshot) = versions.last().unwrap();
1998 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
1999 let language_id = adapter.id_for_language(language.name().as_ref());
2000 language_server
2001 .notify::<lsp::notification::DidOpenTextDocument>(
2002 lsp::DidOpenTextDocumentParams {
2003 text_document: lsp::TextDocumentItem::new(
2004 uri,
2005 language_id.unwrap_or_default(),
2006 *version,
2007 initial_snapshot.text(),
2008 ),
2009 },
2010 )
2011 .log_err()?;
2012 buffer_handle.update(cx, |buffer, cx| {
2013 buffer.set_completion_triggers(
2014 language_server
2015 .capabilities()
2016 .completion_provider
2017 .as_ref()
2018 .and_then(|provider| {
2019 provider.trigger_characters.clone()
2020 })
2021 .unwrap_or(Vec::new()),
2022 cx,
2023 )
2024 });
2025 }
2026 }
2027
2028 cx.notify();
2029 Some(())
2030 });
2031
2032 Some(language_server)
2033 })
2034 });
2035 }
2036
2037 pub fn restart_language_servers_for_buffers(
2038 &mut self,
2039 buffers: impl IntoIterator<Item = ModelHandle<Buffer>>,
2040 cx: &mut ModelContext<Self>,
2041 ) -> Option<()> {
2042 let language_server_lookup_info: HashSet<(WorktreeId, Arc<Path>, PathBuf)> = buffers
2043 .into_iter()
2044 .filter_map(|buffer| {
2045 let file = File::from_dyn(buffer.read(cx).file())?;
2046 let worktree = file.worktree.read(cx).as_local()?;
2047 let worktree_id = worktree.id();
2048 let worktree_abs_path = worktree.abs_path().clone();
2049 let full_path = file.full_path(cx);
2050 Some((worktree_id, worktree_abs_path, full_path))
2051 })
2052 .collect();
2053 for (worktree_id, worktree_abs_path, full_path) in language_server_lookup_info {
2054 let language = self.languages.select_language(&full_path)?;
2055 self.restart_language_server(worktree_id, worktree_abs_path, language, cx);
2056 }
2057
2058 None
2059 }
2060
2061 fn restart_language_server(
2062 &mut self,
2063 worktree_id: WorktreeId,
2064 worktree_path: Arc<Path>,
2065 language: Arc<Language>,
2066 cx: &mut ModelContext<Self>,
2067 ) {
2068 let adapter = if let Some(adapter) = language.lsp_adapter() {
2069 adapter
2070 } else {
2071 return;
2072 };
2073 let key = (worktree_id, adapter.name());
2074 let server_to_shutdown = self.language_servers.remove(&key);
2075 self.started_language_servers.remove(&key);
2076 server_to_shutdown
2077 .as_ref()
2078 .map(|(_, server)| self.language_server_statuses.remove(&server.server_id()));
2079 cx.spawn_weak(|this, mut cx| async move {
2080 if let Some(this) = this.upgrade(&cx) {
2081 if let Some((_, server_to_shutdown)) = server_to_shutdown {
2082 if let Some(shutdown_task) = server_to_shutdown.shutdown() {
2083 shutdown_task.await;
2084 }
2085 }
2086
2087 this.update(&mut cx, |this, cx| {
2088 this.start_language_server(worktree_id, worktree_path, language, cx);
2089 });
2090 }
2091 })
2092 .detach();
2093 }
2094
2095 fn on_lsp_diagnostics_published(
2096 &mut self,
2097 server_id: usize,
2098 mut params: lsp::PublishDiagnosticsParams,
2099 adapter: &Arc<dyn LspAdapter>,
2100 disk_based_diagnostics_progress_token: Option<&str>,
2101 cx: &mut ModelContext<Self>,
2102 ) {
2103 adapter.process_diagnostics(&mut params);
2104 if disk_based_diagnostics_progress_token.is_none() {
2105 self.disk_based_diagnostics_started(cx);
2106 self.broadcast_language_server_update(
2107 server_id,
2108 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(
2109 proto::LspDiskBasedDiagnosticsUpdating {},
2110 ),
2111 );
2112 }
2113 self.update_diagnostics(params, adapter.disk_based_diagnostic_sources(), cx)
2114 .log_err();
2115 if disk_based_diagnostics_progress_token.is_none() {
2116 self.disk_based_diagnostics_finished(cx);
2117 self.broadcast_language_server_update(
2118 server_id,
2119 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(
2120 proto::LspDiskBasedDiagnosticsUpdated {},
2121 ),
2122 );
2123 }
2124 }
2125
2126 fn on_lsp_progress(
2127 &mut self,
2128 progress: lsp::ProgressParams,
2129 server_id: usize,
2130 disk_based_diagnostics_progress_token: Option<&str>,
2131 cx: &mut ModelContext<Self>,
2132 ) {
2133 let token = match progress.token {
2134 lsp::NumberOrString::String(token) => token,
2135 lsp::NumberOrString::Number(token) => {
2136 log::info!("skipping numeric progress token {}", token);
2137 return;
2138 }
2139 };
2140 let progress = match progress.value {
2141 lsp::ProgressParamsValue::WorkDone(value) => value,
2142 };
2143 let language_server_status =
2144 if let Some(status) = self.language_server_statuses.get_mut(&server_id) {
2145 status
2146 } else {
2147 return;
2148 };
2149 match progress {
2150 lsp::WorkDoneProgress::Begin(_) => {
2151 if Some(token.as_str()) == disk_based_diagnostics_progress_token {
2152 language_server_status.pending_diagnostic_updates += 1;
2153 if language_server_status.pending_diagnostic_updates == 1 {
2154 self.disk_based_diagnostics_started(cx);
2155 self.broadcast_language_server_update(
2156 server_id,
2157 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(
2158 proto::LspDiskBasedDiagnosticsUpdating {},
2159 ),
2160 );
2161 }
2162 } else {
2163 self.on_lsp_work_start(server_id, token.clone(), cx);
2164 self.broadcast_language_server_update(
2165 server_id,
2166 proto::update_language_server::Variant::WorkStart(proto::LspWorkStart {
2167 token,
2168 }),
2169 );
2170 }
2171 }
2172 lsp::WorkDoneProgress::Report(report) => {
2173 if Some(token.as_str()) != disk_based_diagnostics_progress_token {
2174 self.on_lsp_work_progress(
2175 server_id,
2176 token.clone(),
2177 LanguageServerProgress {
2178 message: report.message.clone(),
2179 percentage: report.percentage.map(|p| p as usize),
2180 last_update_at: Instant::now(),
2181 },
2182 cx,
2183 );
2184 self.broadcast_language_server_update(
2185 server_id,
2186 proto::update_language_server::Variant::WorkProgress(
2187 proto::LspWorkProgress {
2188 token,
2189 message: report.message,
2190 percentage: report.percentage.map(|p| p as u32),
2191 },
2192 ),
2193 );
2194 }
2195 }
2196 lsp::WorkDoneProgress::End(_) => {
2197 if Some(token.as_str()) == disk_based_diagnostics_progress_token {
2198 language_server_status.pending_diagnostic_updates -= 1;
2199 if language_server_status.pending_diagnostic_updates == 0 {
2200 self.disk_based_diagnostics_finished(cx);
2201 self.broadcast_language_server_update(
2202 server_id,
2203 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(
2204 proto::LspDiskBasedDiagnosticsUpdated {},
2205 ),
2206 );
2207 }
2208 } else {
2209 self.on_lsp_work_end(server_id, token.clone(), cx);
2210 self.broadcast_language_server_update(
2211 server_id,
2212 proto::update_language_server::Variant::WorkEnd(proto::LspWorkEnd {
2213 token,
2214 }),
2215 );
2216 }
2217 }
2218 }
2219 }
2220
2221 fn on_lsp_work_start(
2222 &mut self,
2223 language_server_id: usize,
2224 token: String,
2225 cx: &mut ModelContext<Self>,
2226 ) {
2227 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
2228 status.pending_work.insert(
2229 token,
2230 LanguageServerProgress {
2231 message: None,
2232 percentage: None,
2233 last_update_at: Instant::now(),
2234 },
2235 );
2236 cx.notify();
2237 }
2238 }
2239
2240 fn on_lsp_work_progress(
2241 &mut self,
2242 language_server_id: usize,
2243 token: String,
2244 progress: LanguageServerProgress,
2245 cx: &mut ModelContext<Self>,
2246 ) {
2247 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
2248 status.pending_work.insert(token, progress);
2249 cx.notify();
2250 }
2251 }
2252
2253 fn on_lsp_work_end(
2254 &mut self,
2255 language_server_id: usize,
2256 token: String,
2257 cx: &mut ModelContext<Self>,
2258 ) {
2259 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
2260 status.pending_work.remove(&token);
2261 cx.notify();
2262 }
2263 }
2264
2265 async fn on_lsp_workspace_edit(
2266 this: WeakModelHandle<Self>,
2267 params: lsp::ApplyWorkspaceEditParams,
2268 server_id: usize,
2269 adapter: Arc<dyn LspAdapter>,
2270 language_server: Arc<LanguageServer>,
2271 mut cx: AsyncAppContext,
2272 ) -> Result<lsp::ApplyWorkspaceEditResponse> {
2273 let this = this
2274 .upgrade(&cx)
2275 .ok_or_else(|| anyhow!("project project closed"))?;
2276 let transaction = Self::deserialize_workspace_edit(
2277 this.clone(),
2278 params.edit,
2279 true,
2280 adapter.clone(),
2281 language_server.clone(),
2282 &mut cx,
2283 )
2284 .await
2285 .log_err();
2286 this.update(&mut cx, |this, _| {
2287 if let Some(transaction) = transaction {
2288 this.last_workspace_edits_by_language_server
2289 .insert(server_id, transaction);
2290 }
2291 });
2292 Ok(lsp::ApplyWorkspaceEditResponse {
2293 applied: true,
2294 failed_change: None,
2295 failure_reason: None,
2296 })
2297 }
2298
2299 fn broadcast_language_server_update(
2300 &self,
2301 language_server_id: usize,
2302 event: proto::update_language_server::Variant,
2303 ) {
2304 if let Some(project_id) = self.shared_remote_id() {
2305 self.client
2306 .send(proto::UpdateLanguageServer {
2307 project_id,
2308 language_server_id: language_server_id as u64,
2309 variant: Some(event),
2310 })
2311 .log_err();
2312 }
2313 }
2314
2315 pub fn set_language_server_settings(&mut self, settings: serde_json::Value) {
2316 for (_, server) in self.language_servers.values() {
2317 server
2318 .notify::<lsp::notification::DidChangeConfiguration>(
2319 lsp::DidChangeConfigurationParams {
2320 settings: settings.clone(),
2321 },
2322 )
2323 .ok();
2324 }
2325 *self.language_server_settings.lock() = settings;
2326 }
2327
2328 pub fn language_server_statuses(
2329 &self,
2330 ) -> impl DoubleEndedIterator<Item = &LanguageServerStatus> {
2331 self.language_server_statuses.values()
2332 }
2333
2334 pub fn update_diagnostics(
2335 &mut self,
2336 params: lsp::PublishDiagnosticsParams,
2337 disk_based_sources: &[&str],
2338 cx: &mut ModelContext<Self>,
2339 ) -> Result<()> {
2340 let abs_path = params
2341 .uri
2342 .to_file_path()
2343 .map_err(|_| anyhow!("URI is not a file"))?;
2344 let mut diagnostics = Vec::default();
2345 let mut primary_diagnostic_group_ids = HashMap::default();
2346 let mut sources_by_group_id = HashMap::default();
2347 let mut supporting_diagnostics = HashMap::default();
2348 for diagnostic in ¶ms.diagnostics {
2349 let source = diagnostic.source.as_ref();
2350 let code = diagnostic.code.as_ref().map(|code| match code {
2351 lsp::NumberOrString::Number(code) => code.to_string(),
2352 lsp::NumberOrString::String(code) => code.clone(),
2353 });
2354 let range = range_from_lsp(diagnostic.range);
2355 let is_supporting = diagnostic
2356 .related_information
2357 .as_ref()
2358 .map_or(false, |infos| {
2359 infos.iter().any(|info| {
2360 primary_diagnostic_group_ids.contains_key(&(
2361 source,
2362 code.clone(),
2363 range_from_lsp(info.location.range),
2364 ))
2365 })
2366 });
2367
2368 let is_unnecessary = diagnostic.tags.as_ref().map_or(false, |tags| {
2369 tags.iter().any(|tag| *tag == DiagnosticTag::UNNECESSARY)
2370 });
2371
2372 if is_supporting {
2373 supporting_diagnostics.insert(
2374 (source, code.clone(), range),
2375 (diagnostic.severity, is_unnecessary),
2376 );
2377 } else {
2378 let group_id = post_inc(&mut self.next_diagnostic_group_id);
2379 let is_disk_based = source.map_or(false, |source| {
2380 disk_based_sources.contains(&source.as_str())
2381 });
2382
2383 sources_by_group_id.insert(group_id, source);
2384 primary_diagnostic_group_ids
2385 .insert((source, code.clone(), range.clone()), group_id);
2386
2387 diagnostics.push(DiagnosticEntry {
2388 range,
2389 diagnostic: Diagnostic {
2390 code: code.clone(),
2391 severity: diagnostic.severity.unwrap_or(DiagnosticSeverity::ERROR),
2392 message: diagnostic.message.clone(),
2393 group_id,
2394 is_primary: true,
2395 is_valid: true,
2396 is_disk_based,
2397 is_unnecessary,
2398 },
2399 });
2400 if let Some(infos) = &diagnostic.related_information {
2401 for info in infos {
2402 if info.location.uri == params.uri && !info.message.is_empty() {
2403 let range = range_from_lsp(info.location.range);
2404 diagnostics.push(DiagnosticEntry {
2405 range,
2406 diagnostic: Diagnostic {
2407 code: code.clone(),
2408 severity: DiagnosticSeverity::INFORMATION,
2409 message: info.message.clone(),
2410 group_id,
2411 is_primary: false,
2412 is_valid: true,
2413 is_disk_based,
2414 is_unnecessary: false,
2415 },
2416 });
2417 }
2418 }
2419 }
2420 }
2421 }
2422
2423 for entry in &mut diagnostics {
2424 let diagnostic = &mut entry.diagnostic;
2425 if !diagnostic.is_primary {
2426 let source = *sources_by_group_id.get(&diagnostic.group_id).unwrap();
2427 if let Some(&(severity, is_unnecessary)) = supporting_diagnostics.get(&(
2428 source,
2429 diagnostic.code.clone(),
2430 entry.range.clone(),
2431 )) {
2432 if let Some(severity) = severity {
2433 diagnostic.severity = severity;
2434 }
2435 diagnostic.is_unnecessary = is_unnecessary;
2436 }
2437 }
2438 }
2439
2440 self.update_diagnostic_entries(abs_path, params.version, diagnostics, cx)?;
2441 Ok(())
2442 }
2443
2444 pub fn update_diagnostic_entries(
2445 &mut self,
2446 abs_path: PathBuf,
2447 version: Option<i32>,
2448 diagnostics: Vec<DiagnosticEntry<PointUtf16>>,
2449 cx: &mut ModelContext<Project>,
2450 ) -> Result<(), anyhow::Error> {
2451 let (worktree, relative_path) = self
2452 .find_local_worktree(&abs_path, cx)
2453 .ok_or_else(|| anyhow!("no worktree found for diagnostics"))?;
2454 if !worktree.read(cx).is_visible() {
2455 return Ok(());
2456 }
2457
2458 let project_path = ProjectPath {
2459 worktree_id: worktree.read(cx).id(),
2460 path: relative_path.into(),
2461 };
2462 if let Some(buffer) = self.get_open_buffer(&project_path, cx) {
2463 self.update_buffer_diagnostics(&buffer, diagnostics.clone(), version, cx)?;
2464 }
2465
2466 let updated = worktree.update(cx, |worktree, cx| {
2467 worktree
2468 .as_local_mut()
2469 .ok_or_else(|| anyhow!("not a local worktree"))?
2470 .update_diagnostics(project_path.path.clone(), diagnostics, cx)
2471 })?;
2472 if updated {
2473 cx.emit(Event::DiagnosticsUpdated(project_path));
2474 }
2475 Ok(())
2476 }
2477
2478 fn update_buffer_diagnostics(
2479 &mut self,
2480 buffer: &ModelHandle<Buffer>,
2481 mut diagnostics: Vec<DiagnosticEntry<PointUtf16>>,
2482 version: Option<i32>,
2483 cx: &mut ModelContext<Self>,
2484 ) -> Result<()> {
2485 fn compare_diagnostics(a: &Diagnostic, b: &Diagnostic) -> Ordering {
2486 Ordering::Equal
2487 .then_with(|| b.is_primary.cmp(&a.is_primary))
2488 .then_with(|| a.is_disk_based.cmp(&b.is_disk_based))
2489 .then_with(|| a.severity.cmp(&b.severity))
2490 .then_with(|| a.message.cmp(&b.message))
2491 }
2492
2493 let snapshot = self.buffer_snapshot_for_lsp_version(buffer, version, cx)?;
2494
2495 diagnostics.sort_unstable_by(|a, b| {
2496 Ordering::Equal
2497 .then_with(|| a.range.start.cmp(&b.range.start))
2498 .then_with(|| b.range.end.cmp(&a.range.end))
2499 .then_with(|| compare_diagnostics(&a.diagnostic, &b.diagnostic))
2500 });
2501
2502 let mut sanitized_diagnostics = Vec::new();
2503 let edits_since_save = Patch::new(
2504 snapshot
2505 .edits_since::<PointUtf16>(buffer.read(cx).saved_version())
2506 .collect(),
2507 );
2508 for entry in diagnostics {
2509 let start;
2510 let end;
2511 if entry.diagnostic.is_disk_based {
2512 // Some diagnostics are based on files on disk instead of buffers'
2513 // current contents. Adjust these diagnostics' ranges to reflect
2514 // any unsaved edits.
2515 start = edits_since_save.old_to_new(entry.range.start);
2516 end = edits_since_save.old_to_new(entry.range.end);
2517 } else {
2518 start = entry.range.start;
2519 end = entry.range.end;
2520 }
2521
2522 let mut range = snapshot.clip_point_utf16(start, Bias::Left)
2523 ..snapshot.clip_point_utf16(end, Bias::Right);
2524
2525 // Expand empty ranges by one character
2526 if range.start == range.end {
2527 range.end.column += 1;
2528 range.end = snapshot.clip_point_utf16(range.end, Bias::Right);
2529 if range.start == range.end && range.end.column > 0 {
2530 range.start.column -= 1;
2531 range.start = snapshot.clip_point_utf16(range.start, Bias::Left);
2532 }
2533 }
2534
2535 sanitized_diagnostics.push(DiagnosticEntry {
2536 range,
2537 diagnostic: entry.diagnostic,
2538 });
2539 }
2540 drop(edits_since_save);
2541
2542 let set = DiagnosticSet::new(sanitized_diagnostics, &snapshot);
2543 buffer.update(cx, |buffer, cx| buffer.update_diagnostics(set, cx));
2544 Ok(())
2545 }
2546
2547 pub fn reload_buffers(
2548 &self,
2549 buffers: HashSet<ModelHandle<Buffer>>,
2550 push_to_history: bool,
2551 cx: &mut ModelContext<Self>,
2552 ) -> Task<Result<ProjectTransaction>> {
2553 let mut local_buffers = Vec::new();
2554 let mut remote_buffers = None;
2555 for buffer_handle in buffers {
2556 let buffer = buffer_handle.read(cx);
2557 if buffer.is_dirty() {
2558 if let Some(file) = File::from_dyn(buffer.file()) {
2559 if file.is_local() {
2560 local_buffers.push(buffer_handle);
2561 } else {
2562 remote_buffers.get_or_insert(Vec::new()).push(buffer_handle);
2563 }
2564 }
2565 }
2566 }
2567
2568 let remote_buffers = self.remote_id().zip(remote_buffers);
2569 let client = self.client.clone();
2570
2571 cx.spawn(|this, mut cx| async move {
2572 let mut project_transaction = ProjectTransaction::default();
2573
2574 if let Some((project_id, remote_buffers)) = remote_buffers {
2575 let response = client
2576 .request(proto::ReloadBuffers {
2577 project_id,
2578 buffer_ids: remote_buffers
2579 .iter()
2580 .map(|buffer| buffer.read_with(&cx, |buffer, _| buffer.remote_id()))
2581 .collect(),
2582 })
2583 .await?
2584 .transaction
2585 .ok_or_else(|| anyhow!("missing transaction"))?;
2586 project_transaction = this
2587 .update(&mut cx, |this, cx| {
2588 this.deserialize_project_transaction(response, push_to_history, cx)
2589 })
2590 .await?;
2591 }
2592
2593 for buffer in local_buffers {
2594 let transaction = buffer
2595 .update(&mut cx, |buffer, cx| buffer.reload(cx))
2596 .await?;
2597 buffer.update(&mut cx, |buffer, cx| {
2598 if let Some(transaction) = transaction {
2599 if !push_to_history {
2600 buffer.forget_transaction(transaction.id);
2601 }
2602 project_transaction.0.insert(cx.handle(), transaction);
2603 }
2604 });
2605 }
2606
2607 Ok(project_transaction)
2608 })
2609 }
2610
2611 pub fn format(
2612 &self,
2613 buffers: HashSet<ModelHandle<Buffer>>,
2614 push_to_history: bool,
2615 cx: &mut ModelContext<Project>,
2616 ) -> Task<Result<ProjectTransaction>> {
2617 let mut local_buffers = Vec::new();
2618 let mut remote_buffers = None;
2619 for buffer_handle in buffers {
2620 let buffer = buffer_handle.read(cx);
2621 if let Some(file) = File::from_dyn(buffer.file()) {
2622 if let Some(buffer_abs_path) = file.as_local().map(|f| f.abs_path(cx)) {
2623 if let Some((_, server)) = self.language_server_for_buffer(buffer, cx) {
2624 local_buffers.push((buffer_handle, buffer_abs_path, server.clone()));
2625 }
2626 } else {
2627 remote_buffers.get_or_insert(Vec::new()).push(buffer_handle);
2628 }
2629 } else {
2630 return Task::ready(Ok(Default::default()));
2631 }
2632 }
2633
2634 let remote_buffers = self.remote_id().zip(remote_buffers);
2635 let client = self.client.clone();
2636
2637 cx.spawn(|this, mut cx| async move {
2638 let mut project_transaction = ProjectTransaction::default();
2639
2640 if let Some((project_id, remote_buffers)) = remote_buffers {
2641 let response = client
2642 .request(proto::FormatBuffers {
2643 project_id,
2644 buffer_ids: remote_buffers
2645 .iter()
2646 .map(|buffer| buffer.read_with(&cx, |buffer, _| buffer.remote_id()))
2647 .collect(),
2648 })
2649 .await?
2650 .transaction
2651 .ok_or_else(|| anyhow!("missing transaction"))?;
2652 project_transaction = this
2653 .update(&mut cx, |this, cx| {
2654 this.deserialize_project_transaction(response, push_to_history, cx)
2655 })
2656 .await?;
2657 }
2658
2659 for (buffer, buffer_abs_path, language_server) in local_buffers {
2660 let text_document = lsp::TextDocumentIdentifier::new(
2661 lsp::Url::from_file_path(&buffer_abs_path).unwrap(),
2662 );
2663 let capabilities = &language_server.capabilities();
2664 let tab_size = cx.update(|cx| {
2665 let language_name = buffer.read(cx).language().map(|language| language.name());
2666 cx.global::<Settings>().tab_size(language_name.as_deref())
2667 });
2668 let lsp_edits = if capabilities
2669 .document_formatting_provider
2670 .as_ref()
2671 .map_or(false, |provider| *provider != lsp::OneOf::Left(false))
2672 {
2673 language_server
2674 .request::<lsp::request::Formatting>(lsp::DocumentFormattingParams {
2675 text_document,
2676 options: lsp::FormattingOptions {
2677 tab_size,
2678 insert_spaces: true,
2679 insert_final_newline: Some(true),
2680 ..Default::default()
2681 },
2682 work_done_progress_params: Default::default(),
2683 })
2684 .await?
2685 } else if capabilities
2686 .document_range_formatting_provider
2687 .as_ref()
2688 .map_or(false, |provider| *provider != lsp::OneOf::Left(false))
2689 {
2690 let buffer_start = lsp::Position::new(0, 0);
2691 let buffer_end =
2692 buffer.read_with(&cx, |buffer, _| point_to_lsp(buffer.max_point_utf16()));
2693 language_server
2694 .request::<lsp::request::RangeFormatting>(
2695 lsp::DocumentRangeFormattingParams {
2696 text_document,
2697 range: lsp::Range::new(buffer_start, buffer_end),
2698 options: lsp::FormattingOptions {
2699 tab_size: 4,
2700 insert_spaces: true,
2701 insert_final_newline: Some(true),
2702 ..Default::default()
2703 },
2704 work_done_progress_params: Default::default(),
2705 },
2706 )
2707 .await?
2708 } else {
2709 continue;
2710 };
2711
2712 if let Some(lsp_edits) = lsp_edits {
2713 let edits = this
2714 .update(&mut cx, |this, cx| {
2715 this.edits_from_lsp(&buffer, lsp_edits, None, cx)
2716 })
2717 .await?;
2718 buffer.update(&mut cx, |buffer, cx| {
2719 buffer.finalize_last_transaction();
2720 buffer.start_transaction();
2721 for (range, text) in edits {
2722 buffer.edit([(range, text)], cx);
2723 }
2724 if buffer.end_transaction(cx).is_some() {
2725 let transaction = buffer.finalize_last_transaction().unwrap().clone();
2726 if !push_to_history {
2727 buffer.forget_transaction(transaction.id);
2728 }
2729 project_transaction.0.insert(cx.handle(), transaction);
2730 }
2731 });
2732 }
2733 }
2734
2735 Ok(project_transaction)
2736 })
2737 }
2738
2739 pub fn definition<T: ToPointUtf16>(
2740 &self,
2741 buffer: &ModelHandle<Buffer>,
2742 position: T,
2743 cx: &mut ModelContext<Self>,
2744 ) -> Task<Result<Vec<Location>>> {
2745 let position = position.to_point_utf16(buffer.read(cx));
2746 self.request_lsp(buffer.clone(), GetDefinition { position }, cx)
2747 }
2748
2749 pub fn references<T: ToPointUtf16>(
2750 &self,
2751 buffer: &ModelHandle<Buffer>,
2752 position: T,
2753 cx: &mut ModelContext<Self>,
2754 ) -> Task<Result<Vec<Location>>> {
2755 let position = position.to_point_utf16(buffer.read(cx));
2756 self.request_lsp(buffer.clone(), GetReferences { position }, cx)
2757 }
2758
2759 pub fn document_highlights<T: ToPointUtf16>(
2760 &self,
2761 buffer: &ModelHandle<Buffer>,
2762 position: T,
2763 cx: &mut ModelContext<Self>,
2764 ) -> Task<Result<Vec<DocumentHighlight>>> {
2765 let position = position.to_point_utf16(buffer.read(cx));
2766
2767 self.request_lsp(buffer.clone(), GetDocumentHighlights { position }, cx)
2768 }
2769
2770 pub fn symbols(&self, query: &str, cx: &mut ModelContext<Self>) -> Task<Result<Vec<Symbol>>> {
2771 if self.is_local() {
2772 let mut requests = Vec::new();
2773 for ((worktree_id, _), (lsp_adapter, language_server)) in self.language_servers.iter() {
2774 let worktree_id = *worktree_id;
2775 if let Some(worktree) = self
2776 .worktree_for_id(worktree_id, cx)
2777 .and_then(|worktree| worktree.read(cx).as_local())
2778 {
2779 let lsp_adapter = lsp_adapter.clone();
2780 let worktree_abs_path = worktree.abs_path().clone();
2781 requests.push(
2782 language_server
2783 .request::<lsp::request::WorkspaceSymbol>(lsp::WorkspaceSymbolParams {
2784 query: query.to_string(),
2785 ..Default::default()
2786 })
2787 .log_err()
2788 .map(move |response| {
2789 (
2790 lsp_adapter,
2791 worktree_id,
2792 worktree_abs_path,
2793 response.unwrap_or_default(),
2794 )
2795 }),
2796 );
2797 }
2798 }
2799
2800 cx.spawn_weak(|this, cx| async move {
2801 let responses = futures::future::join_all(requests).await;
2802 let this = if let Some(this) = this.upgrade(&cx) {
2803 this
2804 } else {
2805 return Ok(Default::default());
2806 };
2807 this.read_with(&cx, |this, cx| {
2808 let mut symbols = Vec::new();
2809 for (adapter, source_worktree_id, worktree_abs_path, response) in responses {
2810 symbols.extend(response.into_iter().flatten().filter_map(|lsp_symbol| {
2811 let abs_path = lsp_symbol.location.uri.to_file_path().ok()?;
2812 let mut worktree_id = source_worktree_id;
2813 let path;
2814 if let Some((worktree, rel_path)) =
2815 this.find_local_worktree(&abs_path, cx)
2816 {
2817 worktree_id = worktree.read(cx).id();
2818 path = rel_path;
2819 } else {
2820 path = relativize_path(&worktree_abs_path, &abs_path);
2821 }
2822
2823 let label = this
2824 .languages
2825 .select_language(&path)
2826 .and_then(|language| {
2827 language.label_for_symbol(&lsp_symbol.name, lsp_symbol.kind)
2828 })
2829 .unwrap_or_else(|| CodeLabel::plain(lsp_symbol.name.clone(), None));
2830 let signature = this.symbol_signature(worktree_id, &path);
2831
2832 Some(Symbol {
2833 source_worktree_id,
2834 worktree_id,
2835 language_server_name: adapter.name(),
2836 name: lsp_symbol.name,
2837 kind: lsp_symbol.kind,
2838 label,
2839 path,
2840 range: range_from_lsp(lsp_symbol.location.range),
2841 signature,
2842 })
2843 }));
2844 }
2845 Ok(symbols)
2846 })
2847 })
2848 } else if let Some(project_id) = self.remote_id() {
2849 let request = self.client.request(proto::GetProjectSymbols {
2850 project_id,
2851 query: query.to_string(),
2852 });
2853 cx.spawn_weak(|this, cx| async move {
2854 let response = request.await?;
2855 let mut symbols = Vec::new();
2856 if let Some(this) = this.upgrade(&cx) {
2857 this.read_with(&cx, |this, _| {
2858 symbols.extend(
2859 response
2860 .symbols
2861 .into_iter()
2862 .filter_map(|symbol| this.deserialize_symbol(symbol).log_err()),
2863 );
2864 })
2865 }
2866 Ok(symbols)
2867 })
2868 } else {
2869 Task::ready(Ok(Default::default()))
2870 }
2871 }
2872
2873 pub fn open_buffer_for_symbol(
2874 &mut self,
2875 symbol: &Symbol,
2876 cx: &mut ModelContext<Self>,
2877 ) -> Task<Result<ModelHandle<Buffer>>> {
2878 if self.is_local() {
2879 let (lsp_adapter, language_server) = if let Some(server) = self.language_servers.get(&(
2880 symbol.source_worktree_id,
2881 symbol.language_server_name.clone(),
2882 )) {
2883 server.clone()
2884 } else {
2885 return Task::ready(Err(anyhow!(
2886 "language server for worktree and language not found"
2887 )));
2888 };
2889
2890 let worktree_abs_path = if let Some(worktree_abs_path) = self
2891 .worktree_for_id(symbol.worktree_id, cx)
2892 .and_then(|worktree| worktree.read(cx).as_local())
2893 .map(|local_worktree| local_worktree.abs_path())
2894 {
2895 worktree_abs_path
2896 } else {
2897 return Task::ready(Err(anyhow!("worktree not found for symbol")));
2898 };
2899 let symbol_abs_path = worktree_abs_path.join(&symbol.path);
2900 let symbol_uri = if let Ok(uri) = lsp::Url::from_file_path(symbol_abs_path) {
2901 uri
2902 } else {
2903 return Task::ready(Err(anyhow!("invalid symbol path")));
2904 };
2905
2906 self.open_local_buffer_via_lsp(symbol_uri, lsp_adapter, language_server, cx)
2907 } else if let Some(project_id) = self.remote_id() {
2908 let request = self.client.request(proto::OpenBufferForSymbol {
2909 project_id,
2910 symbol: Some(serialize_symbol(symbol)),
2911 });
2912 cx.spawn(|this, mut cx| async move {
2913 let response = request.await?;
2914 let buffer = response.buffer.ok_or_else(|| anyhow!("invalid buffer"))?;
2915 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
2916 .await
2917 })
2918 } else {
2919 Task::ready(Err(anyhow!("project does not have a remote id")))
2920 }
2921 }
2922
2923 pub fn hover<T: ToPointUtf16>(
2924 &self,
2925 buffer: &ModelHandle<Buffer>,
2926 position: T,
2927 cx: &mut ModelContext<Self>,
2928 ) -> Task<Result<Option<Hover>>> {
2929 let position = position.to_point_utf16(buffer.read(cx));
2930 self.request_lsp(buffer.clone(), GetHover { position }, cx)
2931 }
2932
2933 pub fn completions<T: ToPointUtf16>(
2934 &self,
2935 source_buffer_handle: &ModelHandle<Buffer>,
2936 position: T,
2937 cx: &mut ModelContext<Self>,
2938 ) -> Task<Result<Vec<Completion>>> {
2939 let source_buffer_handle = source_buffer_handle.clone();
2940 let source_buffer = source_buffer_handle.read(cx);
2941 let buffer_id = source_buffer.remote_id();
2942 let language = source_buffer.language().cloned();
2943 let worktree;
2944 let buffer_abs_path;
2945 if let Some(file) = File::from_dyn(source_buffer.file()) {
2946 worktree = file.worktree.clone();
2947 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
2948 } else {
2949 return Task::ready(Ok(Default::default()));
2950 };
2951
2952 let position = position.to_point_utf16(source_buffer);
2953 let anchor = source_buffer.anchor_after(position);
2954
2955 if worktree.read(cx).as_local().is_some() {
2956 let buffer_abs_path = buffer_abs_path.unwrap();
2957 let (_, lang_server) =
2958 if let Some(server) = self.language_server_for_buffer(source_buffer, cx) {
2959 server.clone()
2960 } else {
2961 return Task::ready(Ok(Default::default()));
2962 };
2963
2964 cx.spawn(|_, cx| async move {
2965 let completions = lang_server
2966 .request::<lsp::request::Completion>(lsp::CompletionParams {
2967 text_document_position: lsp::TextDocumentPositionParams::new(
2968 lsp::TextDocumentIdentifier::new(
2969 lsp::Url::from_file_path(buffer_abs_path).unwrap(),
2970 ),
2971 point_to_lsp(position),
2972 ),
2973 context: Default::default(),
2974 work_done_progress_params: Default::default(),
2975 partial_result_params: Default::default(),
2976 })
2977 .await
2978 .context("lsp completion request failed")?;
2979
2980 let completions = if let Some(completions) = completions {
2981 match completions {
2982 lsp::CompletionResponse::Array(completions) => completions,
2983 lsp::CompletionResponse::List(list) => list.items,
2984 }
2985 } else {
2986 Default::default()
2987 };
2988
2989 source_buffer_handle.read_with(&cx, |this, _| {
2990 let snapshot = this.snapshot();
2991 let clipped_position = this.clip_point_utf16(position, Bias::Left);
2992 let mut range_for_token = None;
2993 Ok(completions
2994 .into_iter()
2995 .filter_map(|lsp_completion| {
2996 let (old_range, new_text) = match lsp_completion.text_edit.as_ref() {
2997 // If the language server provides a range to overwrite, then
2998 // check that the range is valid.
2999 Some(lsp::CompletionTextEdit::Edit(edit)) => {
3000 let range = range_from_lsp(edit.range);
3001 let start = snapshot.clip_point_utf16(range.start, Bias::Left);
3002 let end = snapshot.clip_point_utf16(range.end, Bias::Left);
3003 if start != range.start || end != range.end {
3004 log::info!("completion out of expected range");
3005 return None;
3006 }
3007 (
3008 snapshot.anchor_before(start)..snapshot.anchor_after(end),
3009 edit.new_text.clone(),
3010 )
3011 }
3012 // If the language server does not provide a range, then infer
3013 // the range based on the syntax tree.
3014 None => {
3015 if position != clipped_position {
3016 log::info!("completion out of expected range");
3017 return None;
3018 }
3019 let Range { start, end } = range_for_token
3020 .get_or_insert_with(|| {
3021 let offset = position.to_offset(&snapshot);
3022 snapshot
3023 .range_for_word_token_at(offset)
3024 .unwrap_or_else(|| offset..offset)
3025 })
3026 .clone();
3027 let text = lsp_completion
3028 .insert_text
3029 .as_ref()
3030 .unwrap_or(&lsp_completion.label)
3031 .clone();
3032 (
3033 snapshot.anchor_before(start)..snapshot.anchor_after(end),
3034 text.clone(),
3035 )
3036 }
3037 Some(lsp::CompletionTextEdit::InsertAndReplace(_)) => {
3038 log::info!("unsupported insert/replace completion");
3039 return None;
3040 }
3041 };
3042
3043 Some(Completion {
3044 old_range,
3045 new_text,
3046 label: language
3047 .as_ref()
3048 .and_then(|l| l.label_for_completion(&lsp_completion))
3049 .unwrap_or_else(|| {
3050 CodeLabel::plain(
3051 lsp_completion.label.clone(),
3052 lsp_completion.filter_text.as_deref(),
3053 )
3054 }),
3055 lsp_completion,
3056 })
3057 })
3058 .collect())
3059 })
3060 })
3061 } else if let Some(project_id) = self.remote_id() {
3062 let rpc = self.client.clone();
3063 let message = proto::GetCompletions {
3064 project_id,
3065 buffer_id,
3066 position: Some(language::proto::serialize_anchor(&anchor)),
3067 version: serialize_version(&source_buffer.version()),
3068 };
3069 cx.spawn_weak(|_, mut cx| async move {
3070 let response = rpc.request(message).await?;
3071
3072 source_buffer_handle
3073 .update(&mut cx, |buffer, _| {
3074 buffer.wait_for_version(deserialize_version(response.version))
3075 })
3076 .await;
3077
3078 response
3079 .completions
3080 .into_iter()
3081 .map(|completion| {
3082 language::proto::deserialize_completion(completion, language.as_ref())
3083 })
3084 .collect()
3085 })
3086 } else {
3087 Task::ready(Ok(Default::default()))
3088 }
3089 }
3090
3091 pub fn apply_additional_edits_for_completion(
3092 &self,
3093 buffer_handle: ModelHandle<Buffer>,
3094 completion: Completion,
3095 push_to_history: bool,
3096 cx: &mut ModelContext<Self>,
3097 ) -> Task<Result<Option<Transaction>>> {
3098 let buffer = buffer_handle.read(cx);
3099 let buffer_id = buffer.remote_id();
3100
3101 if self.is_local() {
3102 let (_, lang_server) = if let Some(server) = self.language_server_for_buffer(buffer, cx)
3103 {
3104 server.clone()
3105 } else {
3106 return Task::ready(Ok(Default::default()));
3107 };
3108
3109 cx.spawn(|this, mut cx| async move {
3110 let resolved_completion = lang_server
3111 .request::<lsp::request::ResolveCompletionItem>(completion.lsp_completion)
3112 .await?;
3113 if let Some(edits) = resolved_completion.additional_text_edits {
3114 let edits = this
3115 .update(&mut cx, |this, cx| {
3116 this.edits_from_lsp(&buffer_handle, edits, None, cx)
3117 })
3118 .await?;
3119 buffer_handle.update(&mut cx, |buffer, cx| {
3120 buffer.finalize_last_transaction();
3121 buffer.start_transaction();
3122 for (range, text) in edits {
3123 buffer.edit([(range, text)], cx);
3124 }
3125 let transaction = if buffer.end_transaction(cx).is_some() {
3126 let transaction = buffer.finalize_last_transaction().unwrap().clone();
3127 if !push_to_history {
3128 buffer.forget_transaction(transaction.id);
3129 }
3130 Some(transaction)
3131 } else {
3132 None
3133 };
3134 Ok(transaction)
3135 })
3136 } else {
3137 Ok(None)
3138 }
3139 })
3140 } else if let Some(project_id) = self.remote_id() {
3141 let client = self.client.clone();
3142 cx.spawn(|_, mut cx| async move {
3143 let response = client
3144 .request(proto::ApplyCompletionAdditionalEdits {
3145 project_id,
3146 buffer_id,
3147 completion: Some(language::proto::serialize_completion(&completion)),
3148 })
3149 .await?;
3150
3151 if let Some(transaction) = response.transaction {
3152 let transaction = language::proto::deserialize_transaction(transaction)?;
3153 buffer_handle
3154 .update(&mut cx, |buffer, _| {
3155 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
3156 })
3157 .await;
3158 if push_to_history {
3159 buffer_handle.update(&mut cx, |buffer, _| {
3160 buffer.push_transaction(transaction.clone(), Instant::now());
3161 });
3162 }
3163 Ok(Some(transaction))
3164 } else {
3165 Ok(None)
3166 }
3167 })
3168 } else {
3169 Task::ready(Err(anyhow!("project does not have a remote id")))
3170 }
3171 }
3172
3173 pub fn code_actions<T: Clone + ToOffset>(
3174 &self,
3175 buffer_handle: &ModelHandle<Buffer>,
3176 range: Range<T>,
3177 cx: &mut ModelContext<Self>,
3178 ) -> Task<Result<Vec<CodeAction>>> {
3179 let buffer_handle = buffer_handle.clone();
3180 let buffer = buffer_handle.read(cx);
3181 let snapshot = buffer.snapshot();
3182 let relevant_diagnostics = snapshot
3183 .diagnostics_in_range::<usize, usize>(range.to_offset(&snapshot), false)
3184 .map(|entry| entry.to_lsp_diagnostic_stub())
3185 .collect();
3186 let buffer_id = buffer.remote_id();
3187 let worktree;
3188 let buffer_abs_path;
3189 if let Some(file) = File::from_dyn(buffer.file()) {
3190 worktree = file.worktree.clone();
3191 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
3192 } else {
3193 return Task::ready(Ok(Default::default()));
3194 };
3195 let range = buffer.anchor_before(range.start)..buffer.anchor_before(range.end);
3196
3197 if worktree.read(cx).as_local().is_some() {
3198 let buffer_abs_path = buffer_abs_path.unwrap();
3199 let (_, lang_server) = if let Some(server) = self.language_server_for_buffer(buffer, cx)
3200 {
3201 server.clone()
3202 } else {
3203 return Task::ready(Ok(Default::default()));
3204 };
3205
3206 let lsp_range = range_to_lsp(range.to_point_utf16(buffer));
3207 cx.foreground().spawn(async move {
3208 if !lang_server.capabilities().code_action_provider.is_some() {
3209 return Ok(Default::default());
3210 }
3211
3212 Ok(lang_server
3213 .request::<lsp::request::CodeActionRequest>(lsp::CodeActionParams {
3214 text_document: lsp::TextDocumentIdentifier::new(
3215 lsp::Url::from_file_path(buffer_abs_path).unwrap(),
3216 ),
3217 range: lsp_range,
3218 work_done_progress_params: Default::default(),
3219 partial_result_params: Default::default(),
3220 context: lsp::CodeActionContext {
3221 diagnostics: relevant_diagnostics,
3222 only: Some(vec![
3223 lsp::CodeActionKind::QUICKFIX,
3224 lsp::CodeActionKind::REFACTOR,
3225 lsp::CodeActionKind::REFACTOR_EXTRACT,
3226 lsp::CodeActionKind::SOURCE,
3227 ]),
3228 },
3229 })
3230 .await?
3231 .unwrap_or_default()
3232 .into_iter()
3233 .filter_map(|entry| {
3234 if let lsp::CodeActionOrCommand::CodeAction(lsp_action) = entry {
3235 Some(CodeAction {
3236 range: range.clone(),
3237 lsp_action,
3238 })
3239 } else {
3240 None
3241 }
3242 })
3243 .collect())
3244 })
3245 } else if let Some(project_id) = self.remote_id() {
3246 let rpc = self.client.clone();
3247 let version = buffer.version();
3248 cx.spawn_weak(|_, mut cx| async move {
3249 let response = rpc
3250 .request(proto::GetCodeActions {
3251 project_id,
3252 buffer_id,
3253 start: Some(language::proto::serialize_anchor(&range.start)),
3254 end: Some(language::proto::serialize_anchor(&range.end)),
3255 version: serialize_version(&version),
3256 })
3257 .await?;
3258
3259 buffer_handle
3260 .update(&mut cx, |buffer, _| {
3261 buffer.wait_for_version(deserialize_version(response.version))
3262 })
3263 .await;
3264
3265 response
3266 .actions
3267 .into_iter()
3268 .map(language::proto::deserialize_code_action)
3269 .collect()
3270 })
3271 } else {
3272 Task::ready(Ok(Default::default()))
3273 }
3274 }
3275
3276 pub fn apply_code_action(
3277 &self,
3278 buffer_handle: ModelHandle<Buffer>,
3279 mut action: CodeAction,
3280 push_to_history: bool,
3281 cx: &mut ModelContext<Self>,
3282 ) -> Task<Result<ProjectTransaction>> {
3283 if self.is_local() {
3284 let buffer = buffer_handle.read(cx);
3285 let (lsp_adapter, lang_server) =
3286 if let Some(server) = self.language_server_for_buffer(buffer, cx) {
3287 server.clone()
3288 } else {
3289 return Task::ready(Ok(Default::default()));
3290 };
3291 let range = action.range.to_point_utf16(buffer);
3292
3293 cx.spawn(|this, mut cx| async move {
3294 if let Some(lsp_range) = action
3295 .lsp_action
3296 .data
3297 .as_mut()
3298 .and_then(|d| d.get_mut("codeActionParams"))
3299 .and_then(|d| d.get_mut("range"))
3300 {
3301 *lsp_range = serde_json::to_value(&range_to_lsp(range)).unwrap();
3302 action.lsp_action = lang_server
3303 .request::<lsp::request::CodeActionResolveRequest>(action.lsp_action)
3304 .await?;
3305 } else {
3306 let actions = this
3307 .update(&mut cx, |this, cx| {
3308 this.code_actions(&buffer_handle, action.range, cx)
3309 })
3310 .await?;
3311 action.lsp_action = actions
3312 .into_iter()
3313 .find(|a| a.lsp_action.title == action.lsp_action.title)
3314 .ok_or_else(|| anyhow!("code action is outdated"))?
3315 .lsp_action;
3316 }
3317
3318 if let Some(edit) = action.lsp_action.edit {
3319 Self::deserialize_workspace_edit(
3320 this,
3321 edit,
3322 push_to_history,
3323 lsp_adapter,
3324 lang_server,
3325 &mut cx,
3326 )
3327 .await
3328 } else if let Some(command) = action.lsp_action.command {
3329 this.update(&mut cx, |this, _| {
3330 this.last_workspace_edits_by_language_server
3331 .remove(&lang_server.server_id());
3332 });
3333 lang_server
3334 .request::<lsp::request::ExecuteCommand>(lsp::ExecuteCommandParams {
3335 command: command.command,
3336 arguments: command.arguments.unwrap_or_default(),
3337 ..Default::default()
3338 })
3339 .await?;
3340 Ok(this.update(&mut cx, |this, _| {
3341 this.last_workspace_edits_by_language_server
3342 .remove(&lang_server.server_id())
3343 .unwrap_or_default()
3344 }))
3345 } else {
3346 Ok(ProjectTransaction::default())
3347 }
3348 })
3349 } else if let Some(project_id) = self.remote_id() {
3350 let client = self.client.clone();
3351 let request = proto::ApplyCodeAction {
3352 project_id,
3353 buffer_id: buffer_handle.read(cx).remote_id(),
3354 action: Some(language::proto::serialize_code_action(&action)),
3355 };
3356 cx.spawn(|this, mut cx| async move {
3357 let response = client
3358 .request(request)
3359 .await?
3360 .transaction
3361 .ok_or_else(|| anyhow!("missing transaction"))?;
3362 this.update(&mut cx, |this, cx| {
3363 this.deserialize_project_transaction(response, push_to_history, cx)
3364 })
3365 .await
3366 })
3367 } else {
3368 Task::ready(Err(anyhow!("project does not have a remote id")))
3369 }
3370 }
3371
3372 async fn deserialize_workspace_edit(
3373 this: ModelHandle<Self>,
3374 edit: lsp::WorkspaceEdit,
3375 push_to_history: bool,
3376 lsp_adapter: Arc<dyn LspAdapter>,
3377 language_server: Arc<LanguageServer>,
3378 cx: &mut AsyncAppContext,
3379 ) -> Result<ProjectTransaction> {
3380 let fs = this.read_with(cx, |this, _| this.fs.clone());
3381 let mut operations = Vec::new();
3382 if let Some(document_changes) = edit.document_changes {
3383 match document_changes {
3384 lsp::DocumentChanges::Edits(edits) => {
3385 operations.extend(edits.into_iter().map(lsp::DocumentChangeOperation::Edit))
3386 }
3387 lsp::DocumentChanges::Operations(ops) => operations = ops,
3388 }
3389 } else if let Some(changes) = edit.changes {
3390 operations.extend(changes.into_iter().map(|(uri, edits)| {
3391 lsp::DocumentChangeOperation::Edit(lsp::TextDocumentEdit {
3392 text_document: lsp::OptionalVersionedTextDocumentIdentifier {
3393 uri,
3394 version: None,
3395 },
3396 edits: edits.into_iter().map(lsp::OneOf::Left).collect(),
3397 })
3398 }));
3399 }
3400
3401 let mut project_transaction = ProjectTransaction::default();
3402 for operation in operations {
3403 match operation {
3404 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Create(op)) => {
3405 let abs_path = op
3406 .uri
3407 .to_file_path()
3408 .map_err(|_| anyhow!("can't convert URI to path"))?;
3409
3410 if let Some(parent_path) = abs_path.parent() {
3411 fs.create_dir(parent_path).await?;
3412 }
3413 if abs_path.ends_with("/") {
3414 fs.create_dir(&abs_path).await?;
3415 } else {
3416 fs.create_file(&abs_path, op.options.map(Into::into).unwrap_or_default())
3417 .await?;
3418 }
3419 }
3420 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Rename(op)) => {
3421 let source_abs_path = op
3422 .old_uri
3423 .to_file_path()
3424 .map_err(|_| anyhow!("can't convert URI to path"))?;
3425 let target_abs_path = op
3426 .new_uri
3427 .to_file_path()
3428 .map_err(|_| anyhow!("can't convert URI to path"))?;
3429 fs.rename(
3430 &source_abs_path,
3431 &target_abs_path,
3432 op.options.map(Into::into).unwrap_or_default(),
3433 )
3434 .await?;
3435 }
3436 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Delete(op)) => {
3437 let abs_path = op
3438 .uri
3439 .to_file_path()
3440 .map_err(|_| anyhow!("can't convert URI to path"))?;
3441 let options = op.options.map(Into::into).unwrap_or_default();
3442 if abs_path.ends_with("/") {
3443 fs.remove_dir(&abs_path, options).await?;
3444 } else {
3445 fs.remove_file(&abs_path, options).await?;
3446 }
3447 }
3448 lsp::DocumentChangeOperation::Edit(op) => {
3449 let buffer_to_edit = this
3450 .update(cx, |this, cx| {
3451 this.open_local_buffer_via_lsp(
3452 op.text_document.uri,
3453 lsp_adapter.clone(),
3454 language_server.clone(),
3455 cx,
3456 )
3457 })
3458 .await?;
3459
3460 let edits = this
3461 .update(cx, |this, cx| {
3462 let edits = op.edits.into_iter().map(|edit| match edit {
3463 lsp::OneOf::Left(edit) => edit,
3464 lsp::OneOf::Right(edit) => edit.text_edit,
3465 });
3466 this.edits_from_lsp(
3467 &buffer_to_edit,
3468 edits,
3469 op.text_document.version,
3470 cx,
3471 )
3472 })
3473 .await?;
3474
3475 let transaction = buffer_to_edit.update(cx, |buffer, cx| {
3476 buffer.finalize_last_transaction();
3477 buffer.start_transaction();
3478 for (range, text) in edits {
3479 buffer.edit([(range, text)], cx);
3480 }
3481 let transaction = if buffer.end_transaction(cx).is_some() {
3482 let transaction = buffer.finalize_last_transaction().unwrap().clone();
3483 if !push_to_history {
3484 buffer.forget_transaction(transaction.id);
3485 }
3486 Some(transaction)
3487 } else {
3488 None
3489 };
3490
3491 transaction
3492 });
3493 if let Some(transaction) = transaction {
3494 project_transaction.0.insert(buffer_to_edit, transaction);
3495 }
3496 }
3497 }
3498 }
3499
3500 Ok(project_transaction)
3501 }
3502
3503 pub fn prepare_rename<T: ToPointUtf16>(
3504 &self,
3505 buffer: ModelHandle<Buffer>,
3506 position: T,
3507 cx: &mut ModelContext<Self>,
3508 ) -> Task<Result<Option<Range<Anchor>>>> {
3509 let position = position.to_point_utf16(buffer.read(cx));
3510 self.request_lsp(buffer, PrepareRename { position }, cx)
3511 }
3512
3513 pub fn perform_rename<T: ToPointUtf16>(
3514 &self,
3515 buffer: ModelHandle<Buffer>,
3516 position: T,
3517 new_name: String,
3518 push_to_history: bool,
3519 cx: &mut ModelContext<Self>,
3520 ) -> Task<Result<ProjectTransaction>> {
3521 let position = position.to_point_utf16(buffer.read(cx));
3522 self.request_lsp(
3523 buffer,
3524 PerformRename {
3525 position,
3526 new_name,
3527 push_to_history,
3528 },
3529 cx,
3530 )
3531 }
3532
3533 pub fn search(
3534 &self,
3535 query: SearchQuery,
3536 cx: &mut ModelContext<Self>,
3537 ) -> Task<Result<HashMap<ModelHandle<Buffer>, Vec<Range<Anchor>>>>> {
3538 if self.is_local() {
3539 let snapshots = self
3540 .visible_worktrees(cx)
3541 .filter_map(|tree| {
3542 let tree = tree.read(cx).as_local()?;
3543 Some(tree.snapshot())
3544 })
3545 .collect::<Vec<_>>();
3546
3547 let background = cx.background().clone();
3548 let path_count: usize = snapshots.iter().map(|s| s.visible_file_count()).sum();
3549 if path_count == 0 {
3550 return Task::ready(Ok(Default::default()));
3551 }
3552 let workers = background.num_cpus().min(path_count);
3553 let (matching_paths_tx, mut matching_paths_rx) = smol::channel::bounded(1024);
3554 cx.background()
3555 .spawn({
3556 let fs = self.fs.clone();
3557 let background = cx.background().clone();
3558 let query = query.clone();
3559 async move {
3560 let fs = &fs;
3561 let query = &query;
3562 let matching_paths_tx = &matching_paths_tx;
3563 let paths_per_worker = (path_count + workers - 1) / workers;
3564 let snapshots = &snapshots;
3565 background
3566 .scoped(|scope| {
3567 for worker_ix in 0..workers {
3568 let worker_start_ix = worker_ix * paths_per_worker;
3569 let worker_end_ix = worker_start_ix + paths_per_worker;
3570 scope.spawn(async move {
3571 let mut snapshot_start_ix = 0;
3572 let mut abs_path = PathBuf::new();
3573 for snapshot in snapshots {
3574 let snapshot_end_ix =
3575 snapshot_start_ix + snapshot.visible_file_count();
3576 if worker_end_ix <= snapshot_start_ix {
3577 break;
3578 } else if worker_start_ix > snapshot_end_ix {
3579 snapshot_start_ix = snapshot_end_ix;
3580 continue;
3581 } else {
3582 let start_in_snapshot = worker_start_ix
3583 .saturating_sub(snapshot_start_ix);
3584 let end_in_snapshot =
3585 cmp::min(worker_end_ix, snapshot_end_ix)
3586 - snapshot_start_ix;
3587
3588 for entry in snapshot
3589 .files(false, start_in_snapshot)
3590 .take(end_in_snapshot - start_in_snapshot)
3591 {
3592 if matching_paths_tx.is_closed() {
3593 break;
3594 }
3595
3596 abs_path.clear();
3597 abs_path.push(&snapshot.abs_path());
3598 abs_path.push(&entry.path);
3599 let matches = if let Some(file) =
3600 fs.open_sync(&abs_path).await.log_err()
3601 {
3602 query.detect(file).unwrap_or(false)
3603 } else {
3604 false
3605 };
3606
3607 if matches {
3608 let project_path =
3609 (snapshot.id(), entry.path.clone());
3610 if matching_paths_tx
3611 .send(project_path)
3612 .await
3613 .is_err()
3614 {
3615 break;
3616 }
3617 }
3618 }
3619
3620 snapshot_start_ix = snapshot_end_ix;
3621 }
3622 }
3623 });
3624 }
3625 })
3626 .await;
3627 }
3628 })
3629 .detach();
3630
3631 let (buffers_tx, buffers_rx) = smol::channel::bounded(1024);
3632 let open_buffers = self
3633 .opened_buffers
3634 .values()
3635 .filter_map(|b| b.upgrade(cx))
3636 .collect::<HashSet<_>>();
3637 cx.spawn(|this, cx| async move {
3638 for buffer in &open_buffers {
3639 let snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
3640 buffers_tx.send((buffer.clone(), snapshot)).await?;
3641 }
3642
3643 let open_buffers = Rc::new(RefCell::new(open_buffers));
3644 while let Some(project_path) = matching_paths_rx.next().await {
3645 if buffers_tx.is_closed() {
3646 break;
3647 }
3648
3649 let this = this.clone();
3650 let open_buffers = open_buffers.clone();
3651 let buffers_tx = buffers_tx.clone();
3652 cx.spawn(|mut cx| async move {
3653 if let Some(buffer) = this
3654 .update(&mut cx, |this, cx| this.open_buffer(project_path, cx))
3655 .await
3656 .log_err()
3657 {
3658 if open_buffers.borrow_mut().insert(buffer.clone()) {
3659 let snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
3660 buffers_tx.send((buffer, snapshot)).await?;
3661 }
3662 }
3663
3664 Ok::<_, anyhow::Error>(())
3665 })
3666 .detach();
3667 }
3668
3669 Ok::<_, anyhow::Error>(())
3670 })
3671 .detach_and_log_err(cx);
3672
3673 let background = cx.background().clone();
3674 cx.background().spawn(async move {
3675 let query = &query;
3676 let mut matched_buffers = Vec::new();
3677 for _ in 0..workers {
3678 matched_buffers.push(HashMap::default());
3679 }
3680 background
3681 .scoped(|scope| {
3682 for worker_matched_buffers in matched_buffers.iter_mut() {
3683 let mut buffers_rx = buffers_rx.clone();
3684 scope.spawn(async move {
3685 while let Some((buffer, snapshot)) = buffers_rx.next().await {
3686 let buffer_matches = query
3687 .search(snapshot.as_rope())
3688 .await
3689 .iter()
3690 .map(|range| {
3691 snapshot.anchor_before(range.start)
3692 ..snapshot.anchor_after(range.end)
3693 })
3694 .collect::<Vec<_>>();
3695 if !buffer_matches.is_empty() {
3696 worker_matched_buffers
3697 .insert(buffer.clone(), buffer_matches);
3698 }
3699 }
3700 });
3701 }
3702 })
3703 .await;
3704 Ok(matched_buffers.into_iter().flatten().collect())
3705 })
3706 } else if let Some(project_id) = self.remote_id() {
3707 let request = self.client.request(query.to_proto(project_id));
3708 cx.spawn(|this, mut cx| async move {
3709 let response = request.await?;
3710 let mut result = HashMap::default();
3711 for location in response.locations {
3712 let buffer = location.buffer.ok_or_else(|| anyhow!("missing buffer"))?;
3713 let target_buffer = this
3714 .update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
3715 .await?;
3716 let start = location
3717 .start
3718 .and_then(deserialize_anchor)
3719 .ok_or_else(|| anyhow!("missing target start"))?;
3720 let end = location
3721 .end
3722 .and_then(deserialize_anchor)
3723 .ok_or_else(|| anyhow!("missing target end"))?;
3724 result
3725 .entry(target_buffer)
3726 .or_insert(Vec::new())
3727 .push(start..end)
3728 }
3729 Ok(result)
3730 })
3731 } else {
3732 Task::ready(Ok(Default::default()))
3733 }
3734 }
3735
3736 fn request_lsp<R: LspCommand>(
3737 &self,
3738 buffer_handle: ModelHandle<Buffer>,
3739 request: R,
3740 cx: &mut ModelContext<Self>,
3741 ) -> Task<Result<R::Response>>
3742 where
3743 <R::LspRequest as lsp::request::Request>::Result: Send,
3744 {
3745 let buffer = buffer_handle.read(cx);
3746 if self.is_local() {
3747 let file = File::from_dyn(buffer.file()).and_then(File::as_local);
3748 if let Some((file, (_, language_server))) =
3749 file.zip(self.language_server_for_buffer(buffer, cx).cloned())
3750 {
3751 let lsp_params = request.to_lsp(&file.abs_path(cx), cx);
3752 return cx.spawn(|this, cx| async move {
3753 if !request.check_capabilities(&language_server.capabilities()) {
3754 return Ok(Default::default());
3755 }
3756
3757 let response = language_server
3758 .request::<R::LspRequest>(lsp_params)
3759 .await
3760 .context("lsp request failed")?;
3761 request
3762 .response_from_lsp(response, this, buffer_handle, cx)
3763 .await
3764 });
3765 }
3766 } else if let Some(project_id) = self.remote_id() {
3767 let rpc = self.client.clone();
3768 let message = request.to_proto(project_id, buffer);
3769 return cx.spawn(|this, cx| async move {
3770 let response = rpc.request(message).await?;
3771 request
3772 .response_from_proto(response, this, buffer_handle, cx)
3773 .await
3774 });
3775 }
3776 Task::ready(Ok(Default::default()))
3777 }
3778
3779 pub fn find_or_create_local_worktree(
3780 &mut self,
3781 abs_path: impl AsRef<Path>,
3782 visible: bool,
3783 cx: &mut ModelContext<Self>,
3784 ) -> Task<Result<(ModelHandle<Worktree>, PathBuf)>> {
3785 let abs_path = abs_path.as_ref();
3786 if let Some((tree, relative_path)) = self.find_local_worktree(abs_path, cx) {
3787 Task::ready(Ok((tree.clone(), relative_path.into())))
3788 } else {
3789 let worktree = self.create_local_worktree(abs_path, visible, cx);
3790 cx.foreground()
3791 .spawn(async move { Ok((worktree.await?, PathBuf::new())) })
3792 }
3793 }
3794
3795 pub fn find_local_worktree(
3796 &self,
3797 abs_path: &Path,
3798 cx: &AppContext,
3799 ) -> Option<(ModelHandle<Worktree>, PathBuf)> {
3800 for tree in self.worktrees(cx) {
3801 if let Some(relative_path) = tree
3802 .read(cx)
3803 .as_local()
3804 .and_then(|t| abs_path.strip_prefix(t.abs_path()).ok())
3805 {
3806 return Some((tree.clone(), relative_path.into()));
3807 }
3808 }
3809 None
3810 }
3811
3812 pub fn is_shared(&self) -> bool {
3813 match &self.client_state {
3814 ProjectClientState::Local { is_shared, .. } => *is_shared,
3815 ProjectClientState::Remote { .. } => false,
3816 }
3817 }
3818
3819 fn create_local_worktree(
3820 &mut self,
3821 abs_path: impl AsRef<Path>,
3822 visible: bool,
3823 cx: &mut ModelContext<Self>,
3824 ) -> Task<Result<ModelHandle<Worktree>>> {
3825 let fs = self.fs.clone();
3826 let client = self.client.clone();
3827 let next_entry_id = self.next_entry_id.clone();
3828 let path: Arc<Path> = abs_path.as_ref().into();
3829 let task = self
3830 .loading_local_worktrees
3831 .entry(path.clone())
3832 .or_insert_with(|| {
3833 cx.spawn(|project, mut cx| {
3834 async move {
3835 let worktree = Worktree::local(
3836 client.clone(),
3837 path.clone(),
3838 visible,
3839 fs,
3840 next_entry_id,
3841 &mut cx,
3842 )
3843 .await;
3844 project.update(&mut cx, |project, _| {
3845 project.loading_local_worktrees.remove(&path);
3846 });
3847 let worktree = worktree?;
3848
3849 let project_id = project.update(&mut cx, |project, cx| {
3850 project.add_worktree(&worktree, cx);
3851 project.shared_remote_id()
3852 });
3853
3854 if let Some(project_id) = project_id {
3855 worktree
3856 .update(&mut cx, |worktree, cx| {
3857 worktree.as_local_mut().unwrap().share(project_id, cx)
3858 })
3859 .await
3860 .log_err();
3861 }
3862
3863 Ok(worktree)
3864 }
3865 .map_err(|err| Arc::new(err))
3866 })
3867 .shared()
3868 })
3869 .clone();
3870 cx.foreground().spawn(async move {
3871 match task.await {
3872 Ok(worktree) => Ok(worktree),
3873 Err(err) => Err(anyhow!("{}", err)),
3874 }
3875 })
3876 }
3877
3878 pub fn remove_worktree(&mut self, id_to_remove: WorktreeId, cx: &mut ModelContext<Self>) {
3879 self.worktrees.retain(|worktree| {
3880 if let Some(worktree) = worktree.upgrade(cx) {
3881 let id = worktree.read(cx).id();
3882 if id == id_to_remove {
3883 cx.emit(Event::WorktreeRemoved(id));
3884 false
3885 } else {
3886 true
3887 }
3888 } else {
3889 false
3890 }
3891 });
3892 self.metadata_changed(true, cx);
3893 cx.notify();
3894 }
3895
3896 fn add_worktree(&mut self, worktree: &ModelHandle<Worktree>, cx: &mut ModelContext<Self>) {
3897 cx.observe(&worktree, |_, _, cx| cx.notify()).detach();
3898 if worktree.read(cx).is_local() {
3899 cx.subscribe(&worktree, |this, worktree, _, cx| {
3900 this.update_local_worktree_buffers(worktree, cx);
3901 })
3902 .detach();
3903 }
3904
3905 let push_strong_handle = {
3906 let worktree = worktree.read(cx);
3907 self.is_shared() || worktree.is_visible() || worktree.is_remote()
3908 };
3909 if push_strong_handle {
3910 self.worktrees
3911 .push(WorktreeHandle::Strong(worktree.clone()));
3912 } else {
3913 cx.observe_release(&worktree, |this, _, cx| {
3914 this.worktrees
3915 .retain(|worktree| worktree.upgrade(cx).is_some());
3916 cx.notify();
3917 })
3918 .detach();
3919 self.worktrees
3920 .push(WorktreeHandle::Weak(worktree.downgrade()));
3921 }
3922 self.metadata_changed(true, cx);
3923 cx.emit(Event::WorktreeAdded);
3924 cx.notify();
3925 }
3926
3927 fn update_local_worktree_buffers(
3928 &mut self,
3929 worktree_handle: ModelHandle<Worktree>,
3930 cx: &mut ModelContext<Self>,
3931 ) {
3932 let snapshot = worktree_handle.read(cx).snapshot();
3933 let mut buffers_to_delete = Vec::new();
3934 let mut renamed_buffers = Vec::new();
3935 for (buffer_id, buffer) in &self.opened_buffers {
3936 if let Some(buffer) = buffer.upgrade(cx) {
3937 buffer.update(cx, |buffer, cx| {
3938 if let Some(old_file) = File::from_dyn(buffer.file()) {
3939 if old_file.worktree != worktree_handle {
3940 return;
3941 }
3942
3943 let new_file = if let Some(entry) = old_file
3944 .entry_id
3945 .and_then(|entry_id| snapshot.entry_for_id(entry_id))
3946 {
3947 File {
3948 is_local: true,
3949 entry_id: Some(entry.id),
3950 mtime: entry.mtime,
3951 path: entry.path.clone(),
3952 worktree: worktree_handle.clone(),
3953 }
3954 } else if let Some(entry) =
3955 snapshot.entry_for_path(old_file.path().as_ref())
3956 {
3957 File {
3958 is_local: true,
3959 entry_id: Some(entry.id),
3960 mtime: entry.mtime,
3961 path: entry.path.clone(),
3962 worktree: worktree_handle.clone(),
3963 }
3964 } else {
3965 File {
3966 is_local: true,
3967 entry_id: None,
3968 path: old_file.path().clone(),
3969 mtime: old_file.mtime(),
3970 worktree: worktree_handle.clone(),
3971 }
3972 };
3973
3974 let old_path = old_file.abs_path(cx);
3975 if new_file.abs_path(cx) != old_path {
3976 renamed_buffers.push((cx.handle(), old_path));
3977 }
3978
3979 if let Some(project_id) = self.shared_remote_id() {
3980 self.client
3981 .send(proto::UpdateBufferFile {
3982 project_id,
3983 buffer_id: *buffer_id as u64,
3984 file: Some(new_file.to_proto()),
3985 })
3986 .log_err();
3987 }
3988 buffer.file_updated(Box::new(new_file), cx).detach();
3989 }
3990 });
3991 } else {
3992 buffers_to_delete.push(*buffer_id);
3993 }
3994 }
3995
3996 for buffer_id in buffers_to_delete {
3997 self.opened_buffers.remove(&buffer_id);
3998 }
3999
4000 for (buffer, old_path) in renamed_buffers {
4001 self.unregister_buffer_from_language_server(&buffer, old_path, cx);
4002 self.assign_language_to_buffer(&buffer, cx);
4003 self.register_buffer_with_language_server(&buffer, cx);
4004 }
4005 }
4006
4007 pub fn set_active_path(&mut self, entry: Option<ProjectPath>, cx: &mut ModelContext<Self>) {
4008 let new_active_entry = entry.and_then(|project_path| {
4009 let worktree = self.worktree_for_id(project_path.worktree_id, cx)?;
4010 let entry = worktree.read(cx).entry_for_path(project_path.path)?;
4011 Some(entry.id)
4012 });
4013 if new_active_entry != self.active_entry {
4014 self.active_entry = new_active_entry;
4015 cx.emit(Event::ActiveEntryChanged(new_active_entry));
4016 }
4017 }
4018
4019 pub fn is_running_disk_based_diagnostics(&self) -> bool {
4020 self.language_server_statuses
4021 .values()
4022 .any(|status| status.pending_diagnostic_updates > 0)
4023 }
4024
4025 pub fn diagnostic_summary(&self, cx: &AppContext) -> DiagnosticSummary {
4026 let mut summary = DiagnosticSummary::default();
4027 for (_, path_summary) in self.diagnostic_summaries(cx) {
4028 summary.error_count += path_summary.error_count;
4029 summary.warning_count += path_summary.warning_count;
4030 }
4031 summary
4032 }
4033
4034 pub fn diagnostic_summaries<'a>(
4035 &'a self,
4036 cx: &'a AppContext,
4037 ) -> impl Iterator<Item = (ProjectPath, DiagnosticSummary)> + 'a {
4038 self.worktrees(cx).flat_map(move |worktree| {
4039 let worktree = worktree.read(cx);
4040 let worktree_id = worktree.id();
4041 worktree
4042 .diagnostic_summaries()
4043 .map(move |(path, summary)| (ProjectPath { worktree_id, path }, summary))
4044 })
4045 }
4046
4047 pub fn disk_based_diagnostics_started(&mut self, cx: &mut ModelContext<Self>) {
4048 if self
4049 .language_server_statuses
4050 .values()
4051 .map(|status| status.pending_diagnostic_updates)
4052 .sum::<isize>()
4053 == 1
4054 {
4055 cx.emit(Event::DiskBasedDiagnosticsStarted);
4056 }
4057 }
4058
4059 pub fn disk_based_diagnostics_finished(&mut self, cx: &mut ModelContext<Self>) {
4060 cx.emit(Event::DiskBasedDiagnosticsUpdated);
4061 if self
4062 .language_server_statuses
4063 .values()
4064 .map(|status| status.pending_diagnostic_updates)
4065 .sum::<isize>()
4066 == 0
4067 {
4068 cx.emit(Event::DiskBasedDiagnosticsFinished);
4069 }
4070 }
4071
4072 pub fn active_entry(&self) -> Option<ProjectEntryId> {
4073 self.active_entry
4074 }
4075
4076 pub fn entry_for_path(&self, path: &ProjectPath, cx: &AppContext) -> Option<ProjectEntryId> {
4077 self.worktree_for_id(path.worktree_id, cx)?
4078 .read(cx)
4079 .entry_for_path(&path.path)
4080 .map(|entry| entry.id)
4081 }
4082
4083 pub fn path_for_entry(&self, entry_id: ProjectEntryId, cx: &AppContext) -> Option<ProjectPath> {
4084 let worktree = self.worktree_for_entry(entry_id, cx)?;
4085 let worktree = worktree.read(cx);
4086 let worktree_id = worktree.id();
4087 let path = worktree.entry_for_id(entry_id)?.path.clone();
4088 Some(ProjectPath { worktree_id, path })
4089 }
4090
4091 // RPC message handlers
4092
4093 async fn handle_request_join_project(
4094 this: ModelHandle<Self>,
4095 message: TypedEnvelope<proto::RequestJoinProject>,
4096 _: Arc<Client>,
4097 mut cx: AsyncAppContext,
4098 ) -> Result<()> {
4099 let user_id = message.payload.requester_id;
4100 if this.read_with(&cx, |project, _| {
4101 project.collaborators.values().any(|c| c.user.id == user_id)
4102 }) {
4103 this.update(&mut cx, |this, cx| {
4104 this.respond_to_join_request(user_id, true, cx)
4105 });
4106 } else {
4107 let user_store = this.read_with(&cx, |this, _| this.user_store.clone());
4108 let user = user_store
4109 .update(&mut cx, |store, cx| store.fetch_user(user_id, cx))
4110 .await?;
4111 this.update(&mut cx, |_, cx| cx.emit(Event::ContactRequestedJoin(user)));
4112 }
4113 Ok(())
4114 }
4115
4116 async fn handle_unregister_project(
4117 this: ModelHandle<Self>,
4118 _: TypedEnvelope<proto::UnregisterProject>,
4119 _: Arc<Client>,
4120 mut cx: AsyncAppContext,
4121 ) -> Result<()> {
4122 this.update(&mut cx, |this, cx| this.removed_from_project(cx));
4123 Ok(())
4124 }
4125
4126 async fn handle_project_unshared(
4127 this: ModelHandle<Self>,
4128 _: TypedEnvelope<proto::ProjectUnshared>,
4129 _: Arc<Client>,
4130 mut cx: AsyncAppContext,
4131 ) -> Result<()> {
4132 this.update(&mut cx, |this, cx| this.unshared(cx));
4133 Ok(())
4134 }
4135
4136 async fn handle_add_collaborator(
4137 this: ModelHandle<Self>,
4138 mut envelope: TypedEnvelope<proto::AddProjectCollaborator>,
4139 _: Arc<Client>,
4140 mut cx: AsyncAppContext,
4141 ) -> Result<()> {
4142 let user_store = this.read_with(&cx, |this, _| this.user_store.clone());
4143 let collaborator = envelope
4144 .payload
4145 .collaborator
4146 .take()
4147 .ok_or_else(|| anyhow!("empty collaborator"))?;
4148
4149 let collaborator = Collaborator::from_proto(collaborator, &user_store, &mut cx).await?;
4150 this.update(&mut cx, |this, cx| {
4151 this.collaborators
4152 .insert(collaborator.peer_id, collaborator);
4153 cx.notify();
4154 });
4155
4156 Ok(())
4157 }
4158
4159 async fn handle_remove_collaborator(
4160 this: ModelHandle<Self>,
4161 envelope: TypedEnvelope<proto::RemoveProjectCollaborator>,
4162 _: Arc<Client>,
4163 mut cx: AsyncAppContext,
4164 ) -> Result<()> {
4165 this.update(&mut cx, |this, cx| {
4166 let peer_id = PeerId(envelope.payload.peer_id);
4167 let replica_id = this
4168 .collaborators
4169 .remove(&peer_id)
4170 .ok_or_else(|| anyhow!("unknown peer {:?}", peer_id))?
4171 .replica_id;
4172 for (_, buffer) in &this.opened_buffers {
4173 if let Some(buffer) = buffer.upgrade(cx) {
4174 buffer.update(cx, |buffer, cx| buffer.remove_peer(replica_id, cx));
4175 }
4176 }
4177
4178 cx.emit(Event::CollaboratorLeft(peer_id));
4179 cx.notify();
4180 Ok(())
4181 })
4182 }
4183
4184 async fn handle_join_project_request_cancelled(
4185 this: ModelHandle<Self>,
4186 envelope: TypedEnvelope<proto::JoinProjectRequestCancelled>,
4187 _: Arc<Client>,
4188 mut cx: AsyncAppContext,
4189 ) -> Result<()> {
4190 let user = this
4191 .update(&mut cx, |this, cx| {
4192 this.user_store.update(cx, |user_store, cx| {
4193 user_store.fetch_user(envelope.payload.requester_id, cx)
4194 })
4195 })
4196 .await?;
4197
4198 this.update(&mut cx, |_, cx| {
4199 cx.emit(Event::ContactCancelledJoinRequest(user));
4200 });
4201
4202 Ok(())
4203 }
4204
4205 async fn handle_update_project(
4206 this: ModelHandle<Self>,
4207 envelope: TypedEnvelope<proto::UpdateProject>,
4208 client: Arc<Client>,
4209 mut cx: AsyncAppContext,
4210 ) -> Result<()> {
4211 this.update(&mut cx, |this, cx| {
4212 let replica_id = this.replica_id();
4213 let remote_id = this.remote_id().ok_or_else(|| anyhow!("invalid project"))?;
4214
4215 let mut old_worktrees_by_id = this
4216 .worktrees
4217 .drain(..)
4218 .filter_map(|worktree| {
4219 let worktree = worktree.upgrade(cx)?;
4220 Some((worktree.read(cx).id(), worktree))
4221 })
4222 .collect::<HashMap<_, _>>();
4223
4224 for worktree in envelope.payload.worktrees {
4225 if let Some(old_worktree) =
4226 old_worktrees_by_id.remove(&WorktreeId::from_proto(worktree.id))
4227 {
4228 this.worktrees.push(WorktreeHandle::Strong(old_worktree));
4229 } else {
4230 let worktree = proto::Worktree {
4231 id: worktree.id,
4232 root_name: worktree.root_name,
4233 entries: Default::default(),
4234 diagnostic_summaries: Default::default(),
4235 visible: worktree.visible,
4236 scan_id: 0,
4237 };
4238 let (worktree, load_task) =
4239 Worktree::remote(remote_id, replica_id, worktree, client.clone(), cx);
4240 this.add_worktree(&worktree, cx);
4241 load_task.detach();
4242 }
4243 }
4244
4245 this.metadata_changed(true, cx);
4246 for (id, _) in old_worktrees_by_id {
4247 cx.emit(Event::WorktreeRemoved(id));
4248 }
4249
4250 Ok(())
4251 })
4252 }
4253
4254 async fn handle_update_worktree(
4255 this: ModelHandle<Self>,
4256 envelope: TypedEnvelope<proto::UpdateWorktree>,
4257 _: Arc<Client>,
4258 mut cx: AsyncAppContext,
4259 ) -> Result<()> {
4260 this.update(&mut cx, |this, cx| {
4261 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
4262 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
4263 worktree.update(cx, |worktree, _| {
4264 let worktree = worktree.as_remote_mut().unwrap();
4265 worktree.update_from_remote(envelope)
4266 })?;
4267 }
4268 Ok(())
4269 })
4270 }
4271
4272 async fn handle_create_project_entry(
4273 this: ModelHandle<Self>,
4274 envelope: TypedEnvelope<proto::CreateProjectEntry>,
4275 _: Arc<Client>,
4276 mut cx: AsyncAppContext,
4277 ) -> Result<proto::ProjectEntryResponse> {
4278 let worktree = this.update(&mut cx, |this, cx| {
4279 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
4280 this.worktree_for_id(worktree_id, cx)
4281 .ok_or_else(|| anyhow!("worktree not found"))
4282 })?;
4283 let worktree_scan_id = worktree.read_with(&cx, |worktree, _| worktree.scan_id());
4284 let entry = worktree
4285 .update(&mut cx, |worktree, cx| {
4286 let worktree = worktree.as_local_mut().unwrap();
4287 let path = PathBuf::from(OsString::from_vec(envelope.payload.path));
4288 worktree.create_entry(path, envelope.payload.is_directory, cx)
4289 })
4290 .await?;
4291 Ok(proto::ProjectEntryResponse {
4292 entry: Some((&entry).into()),
4293 worktree_scan_id: worktree_scan_id as u64,
4294 })
4295 }
4296
4297 async fn handle_rename_project_entry(
4298 this: ModelHandle<Self>,
4299 envelope: TypedEnvelope<proto::RenameProjectEntry>,
4300 _: Arc<Client>,
4301 mut cx: AsyncAppContext,
4302 ) -> Result<proto::ProjectEntryResponse> {
4303 let entry_id = ProjectEntryId::from_proto(envelope.payload.entry_id);
4304 let worktree = this.read_with(&cx, |this, cx| {
4305 this.worktree_for_entry(entry_id, cx)
4306 .ok_or_else(|| anyhow!("worktree not found"))
4307 })?;
4308 let worktree_scan_id = worktree.read_with(&cx, |worktree, _| worktree.scan_id());
4309 let entry = worktree
4310 .update(&mut cx, |worktree, cx| {
4311 let new_path = PathBuf::from(OsString::from_vec(envelope.payload.new_path));
4312 worktree
4313 .as_local_mut()
4314 .unwrap()
4315 .rename_entry(entry_id, new_path, cx)
4316 .ok_or_else(|| anyhow!("invalid entry"))
4317 })?
4318 .await?;
4319 Ok(proto::ProjectEntryResponse {
4320 entry: Some((&entry).into()),
4321 worktree_scan_id: worktree_scan_id as u64,
4322 })
4323 }
4324
4325 async fn handle_copy_project_entry(
4326 this: ModelHandle<Self>,
4327 envelope: TypedEnvelope<proto::CopyProjectEntry>,
4328 _: Arc<Client>,
4329 mut cx: AsyncAppContext,
4330 ) -> Result<proto::ProjectEntryResponse> {
4331 let entry_id = ProjectEntryId::from_proto(envelope.payload.entry_id);
4332 let worktree = this.read_with(&cx, |this, cx| {
4333 this.worktree_for_entry(entry_id, cx)
4334 .ok_or_else(|| anyhow!("worktree not found"))
4335 })?;
4336 let worktree_scan_id = worktree.read_with(&cx, |worktree, _| worktree.scan_id());
4337 let entry = worktree
4338 .update(&mut cx, |worktree, cx| {
4339 let new_path = PathBuf::from(OsString::from_vec(envelope.payload.new_path));
4340 worktree
4341 .as_local_mut()
4342 .unwrap()
4343 .copy_entry(entry_id, new_path, cx)
4344 .ok_or_else(|| anyhow!("invalid entry"))
4345 })?
4346 .await?;
4347 Ok(proto::ProjectEntryResponse {
4348 entry: Some((&entry).into()),
4349 worktree_scan_id: worktree_scan_id as u64,
4350 })
4351 }
4352
4353 async fn handle_delete_project_entry(
4354 this: ModelHandle<Self>,
4355 envelope: TypedEnvelope<proto::DeleteProjectEntry>,
4356 _: Arc<Client>,
4357 mut cx: AsyncAppContext,
4358 ) -> Result<proto::ProjectEntryResponse> {
4359 let entry_id = ProjectEntryId::from_proto(envelope.payload.entry_id);
4360 let worktree = this.read_with(&cx, |this, cx| {
4361 this.worktree_for_entry(entry_id, cx)
4362 .ok_or_else(|| anyhow!("worktree not found"))
4363 })?;
4364 let worktree_scan_id = worktree.read_with(&cx, |worktree, _| worktree.scan_id());
4365 worktree
4366 .update(&mut cx, |worktree, cx| {
4367 worktree
4368 .as_local_mut()
4369 .unwrap()
4370 .delete_entry(entry_id, cx)
4371 .ok_or_else(|| anyhow!("invalid entry"))
4372 })?
4373 .await?;
4374 Ok(proto::ProjectEntryResponse {
4375 entry: None,
4376 worktree_scan_id: worktree_scan_id as u64,
4377 })
4378 }
4379
4380 async fn handle_update_diagnostic_summary(
4381 this: ModelHandle<Self>,
4382 envelope: TypedEnvelope<proto::UpdateDiagnosticSummary>,
4383 _: Arc<Client>,
4384 mut cx: AsyncAppContext,
4385 ) -> Result<()> {
4386 this.update(&mut cx, |this, cx| {
4387 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
4388 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
4389 if let Some(summary) = envelope.payload.summary {
4390 let project_path = ProjectPath {
4391 worktree_id,
4392 path: Path::new(&summary.path).into(),
4393 };
4394 worktree.update(cx, |worktree, _| {
4395 worktree
4396 .as_remote_mut()
4397 .unwrap()
4398 .update_diagnostic_summary(project_path.path.clone(), &summary);
4399 });
4400 cx.emit(Event::DiagnosticsUpdated(project_path));
4401 }
4402 }
4403 Ok(())
4404 })
4405 }
4406
4407 async fn handle_start_language_server(
4408 this: ModelHandle<Self>,
4409 envelope: TypedEnvelope<proto::StartLanguageServer>,
4410 _: Arc<Client>,
4411 mut cx: AsyncAppContext,
4412 ) -> Result<()> {
4413 let server = envelope
4414 .payload
4415 .server
4416 .ok_or_else(|| anyhow!("invalid server"))?;
4417 this.update(&mut cx, |this, cx| {
4418 this.language_server_statuses.insert(
4419 server.id as usize,
4420 LanguageServerStatus {
4421 name: server.name,
4422 pending_work: Default::default(),
4423 pending_diagnostic_updates: 0,
4424 },
4425 );
4426 cx.notify();
4427 });
4428 Ok(())
4429 }
4430
4431 async fn handle_update_language_server(
4432 this: ModelHandle<Self>,
4433 envelope: TypedEnvelope<proto::UpdateLanguageServer>,
4434 _: Arc<Client>,
4435 mut cx: AsyncAppContext,
4436 ) -> Result<()> {
4437 let language_server_id = envelope.payload.language_server_id as usize;
4438 match envelope
4439 .payload
4440 .variant
4441 .ok_or_else(|| anyhow!("invalid variant"))?
4442 {
4443 proto::update_language_server::Variant::WorkStart(payload) => {
4444 this.update(&mut cx, |this, cx| {
4445 this.on_lsp_work_start(language_server_id, payload.token, cx);
4446 })
4447 }
4448 proto::update_language_server::Variant::WorkProgress(payload) => {
4449 this.update(&mut cx, |this, cx| {
4450 this.on_lsp_work_progress(
4451 language_server_id,
4452 payload.token,
4453 LanguageServerProgress {
4454 message: payload.message,
4455 percentage: payload.percentage.map(|p| p as usize),
4456 last_update_at: Instant::now(),
4457 },
4458 cx,
4459 );
4460 })
4461 }
4462 proto::update_language_server::Variant::WorkEnd(payload) => {
4463 this.update(&mut cx, |this, cx| {
4464 this.on_lsp_work_end(language_server_id, payload.token, cx);
4465 })
4466 }
4467 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(_) => {
4468 this.update(&mut cx, |this, cx| {
4469 this.disk_based_diagnostics_started(cx);
4470 })
4471 }
4472 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(_) => {
4473 this.update(&mut cx, |this, cx| this.disk_based_diagnostics_finished(cx));
4474 }
4475 }
4476
4477 Ok(())
4478 }
4479
4480 async fn handle_update_buffer(
4481 this: ModelHandle<Self>,
4482 envelope: TypedEnvelope<proto::UpdateBuffer>,
4483 _: Arc<Client>,
4484 mut cx: AsyncAppContext,
4485 ) -> Result<()> {
4486 this.update(&mut cx, |this, cx| {
4487 let payload = envelope.payload.clone();
4488 let buffer_id = payload.buffer_id;
4489 let ops = payload
4490 .operations
4491 .into_iter()
4492 .map(|op| language::proto::deserialize_operation(op))
4493 .collect::<Result<Vec<_>, _>>()?;
4494 let is_remote = this.is_remote();
4495 match this.opened_buffers.entry(buffer_id) {
4496 hash_map::Entry::Occupied(mut e) => match e.get_mut() {
4497 OpenBuffer::Strong(buffer) => {
4498 buffer.update(cx, |buffer, cx| buffer.apply_ops(ops, cx))?;
4499 }
4500 OpenBuffer::Loading(operations) => operations.extend_from_slice(&ops),
4501 OpenBuffer::Weak(_) => {}
4502 },
4503 hash_map::Entry::Vacant(e) => {
4504 assert!(
4505 is_remote,
4506 "received buffer update from {:?}",
4507 envelope.original_sender_id
4508 );
4509 e.insert(OpenBuffer::Loading(ops));
4510 }
4511 }
4512 Ok(())
4513 })
4514 }
4515
4516 async fn handle_update_buffer_file(
4517 this: ModelHandle<Self>,
4518 envelope: TypedEnvelope<proto::UpdateBufferFile>,
4519 _: Arc<Client>,
4520 mut cx: AsyncAppContext,
4521 ) -> Result<()> {
4522 this.update(&mut cx, |this, cx| {
4523 let payload = envelope.payload.clone();
4524 let buffer_id = payload.buffer_id;
4525 let file = payload.file.ok_or_else(|| anyhow!("invalid file"))?;
4526 let worktree = this
4527 .worktree_for_id(WorktreeId::from_proto(file.worktree_id), cx)
4528 .ok_or_else(|| anyhow!("no such worktree"))?;
4529 let file = File::from_proto(file, worktree.clone(), cx)?;
4530 let buffer = this
4531 .opened_buffers
4532 .get_mut(&buffer_id)
4533 .and_then(|b| b.upgrade(cx))
4534 .ok_or_else(|| anyhow!("no such buffer"))?;
4535 buffer.update(cx, |buffer, cx| {
4536 buffer.file_updated(Box::new(file), cx).detach();
4537 });
4538 Ok(())
4539 })
4540 }
4541
4542 async fn handle_save_buffer(
4543 this: ModelHandle<Self>,
4544 envelope: TypedEnvelope<proto::SaveBuffer>,
4545 _: Arc<Client>,
4546 mut cx: AsyncAppContext,
4547 ) -> Result<proto::BufferSaved> {
4548 let buffer_id = envelope.payload.buffer_id;
4549 let requested_version = deserialize_version(envelope.payload.version);
4550
4551 let (project_id, buffer) = this.update(&mut cx, |this, cx| {
4552 let project_id = this.remote_id().ok_or_else(|| anyhow!("not connected"))?;
4553 let buffer = this
4554 .opened_buffers
4555 .get(&buffer_id)
4556 .and_then(|buffer| buffer.upgrade(cx))
4557 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?;
4558 Ok::<_, anyhow::Error>((project_id, buffer))
4559 })?;
4560 buffer
4561 .update(&mut cx, |buffer, _| {
4562 buffer.wait_for_version(requested_version)
4563 })
4564 .await;
4565
4566 let (saved_version, mtime) = buffer.update(&mut cx, |buffer, cx| buffer.save(cx)).await?;
4567 Ok(proto::BufferSaved {
4568 project_id,
4569 buffer_id,
4570 version: serialize_version(&saved_version),
4571 mtime: Some(mtime.into()),
4572 })
4573 }
4574
4575 async fn handle_reload_buffers(
4576 this: ModelHandle<Self>,
4577 envelope: TypedEnvelope<proto::ReloadBuffers>,
4578 _: Arc<Client>,
4579 mut cx: AsyncAppContext,
4580 ) -> Result<proto::ReloadBuffersResponse> {
4581 let sender_id = envelope.original_sender_id()?;
4582 let reload = this.update(&mut cx, |this, cx| {
4583 let mut buffers = HashSet::default();
4584 for buffer_id in &envelope.payload.buffer_ids {
4585 buffers.insert(
4586 this.opened_buffers
4587 .get(buffer_id)
4588 .and_then(|buffer| buffer.upgrade(cx))
4589 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?,
4590 );
4591 }
4592 Ok::<_, anyhow::Error>(this.reload_buffers(buffers, false, cx))
4593 })?;
4594
4595 let project_transaction = reload.await?;
4596 let project_transaction = this.update(&mut cx, |this, cx| {
4597 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
4598 });
4599 Ok(proto::ReloadBuffersResponse {
4600 transaction: Some(project_transaction),
4601 })
4602 }
4603
4604 async fn handle_format_buffers(
4605 this: ModelHandle<Self>,
4606 envelope: TypedEnvelope<proto::FormatBuffers>,
4607 _: Arc<Client>,
4608 mut cx: AsyncAppContext,
4609 ) -> Result<proto::FormatBuffersResponse> {
4610 let sender_id = envelope.original_sender_id()?;
4611 let format = this.update(&mut cx, |this, cx| {
4612 let mut buffers = HashSet::default();
4613 for buffer_id in &envelope.payload.buffer_ids {
4614 buffers.insert(
4615 this.opened_buffers
4616 .get(buffer_id)
4617 .and_then(|buffer| buffer.upgrade(cx))
4618 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?,
4619 );
4620 }
4621 Ok::<_, anyhow::Error>(this.format(buffers, false, cx))
4622 })?;
4623
4624 let project_transaction = format.await?;
4625 let project_transaction = this.update(&mut cx, |this, cx| {
4626 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
4627 });
4628 Ok(proto::FormatBuffersResponse {
4629 transaction: Some(project_transaction),
4630 })
4631 }
4632
4633 async fn handle_get_completions(
4634 this: ModelHandle<Self>,
4635 envelope: TypedEnvelope<proto::GetCompletions>,
4636 _: Arc<Client>,
4637 mut cx: AsyncAppContext,
4638 ) -> Result<proto::GetCompletionsResponse> {
4639 let position = envelope
4640 .payload
4641 .position
4642 .and_then(language::proto::deserialize_anchor)
4643 .ok_or_else(|| anyhow!("invalid position"))?;
4644 let version = deserialize_version(envelope.payload.version);
4645 let buffer = this.read_with(&cx, |this, cx| {
4646 this.opened_buffers
4647 .get(&envelope.payload.buffer_id)
4648 .and_then(|buffer| buffer.upgrade(cx))
4649 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
4650 })?;
4651 buffer
4652 .update(&mut cx, |buffer, _| buffer.wait_for_version(version))
4653 .await;
4654 let version = buffer.read_with(&cx, |buffer, _| buffer.version());
4655 let completions = this
4656 .update(&mut cx, |this, cx| this.completions(&buffer, position, cx))
4657 .await?;
4658
4659 Ok(proto::GetCompletionsResponse {
4660 completions: completions
4661 .iter()
4662 .map(language::proto::serialize_completion)
4663 .collect(),
4664 version: serialize_version(&version),
4665 })
4666 }
4667
4668 async fn handle_apply_additional_edits_for_completion(
4669 this: ModelHandle<Self>,
4670 envelope: TypedEnvelope<proto::ApplyCompletionAdditionalEdits>,
4671 _: Arc<Client>,
4672 mut cx: AsyncAppContext,
4673 ) -> Result<proto::ApplyCompletionAdditionalEditsResponse> {
4674 let apply_additional_edits = this.update(&mut cx, |this, cx| {
4675 let buffer = this
4676 .opened_buffers
4677 .get(&envelope.payload.buffer_id)
4678 .and_then(|buffer| buffer.upgrade(cx))
4679 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
4680 let language = buffer.read(cx).language();
4681 let completion = language::proto::deserialize_completion(
4682 envelope
4683 .payload
4684 .completion
4685 .ok_or_else(|| anyhow!("invalid completion"))?,
4686 language,
4687 )?;
4688 Ok::<_, anyhow::Error>(
4689 this.apply_additional_edits_for_completion(buffer, completion, false, cx),
4690 )
4691 })?;
4692
4693 Ok(proto::ApplyCompletionAdditionalEditsResponse {
4694 transaction: apply_additional_edits
4695 .await?
4696 .as_ref()
4697 .map(language::proto::serialize_transaction),
4698 })
4699 }
4700
4701 async fn handle_get_code_actions(
4702 this: ModelHandle<Self>,
4703 envelope: TypedEnvelope<proto::GetCodeActions>,
4704 _: Arc<Client>,
4705 mut cx: AsyncAppContext,
4706 ) -> Result<proto::GetCodeActionsResponse> {
4707 let start = envelope
4708 .payload
4709 .start
4710 .and_then(language::proto::deserialize_anchor)
4711 .ok_or_else(|| anyhow!("invalid start"))?;
4712 let end = envelope
4713 .payload
4714 .end
4715 .and_then(language::proto::deserialize_anchor)
4716 .ok_or_else(|| anyhow!("invalid end"))?;
4717 let buffer = this.update(&mut cx, |this, cx| {
4718 this.opened_buffers
4719 .get(&envelope.payload.buffer_id)
4720 .and_then(|buffer| buffer.upgrade(cx))
4721 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
4722 })?;
4723 buffer
4724 .update(&mut cx, |buffer, _| {
4725 buffer.wait_for_version(deserialize_version(envelope.payload.version))
4726 })
4727 .await;
4728
4729 let version = buffer.read_with(&cx, |buffer, _| buffer.version());
4730 let code_actions = this.update(&mut cx, |this, cx| {
4731 Ok::<_, anyhow::Error>(this.code_actions(&buffer, start..end, cx))
4732 })?;
4733
4734 Ok(proto::GetCodeActionsResponse {
4735 actions: code_actions
4736 .await?
4737 .iter()
4738 .map(language::proto::serialize_code_action)
4739 .collect(),
4740 version: serialize_version(&version),
4741 })
4742 }
4743
4744 async fn handle_apply_code_action(
4745 this: ModelHandle<Self>,
4746 envelope: TypedEnvelope<proto::ApplyCodeAction>,
4747 _: Arc<Client>,
4748 mut cx: AsyncAppContext,
4749 ) -> Result<proto::ApplyCodeActionResponse> {
4750 let sender_id = envelope.original_sender_id()?;
4751 let action = language::proto::deserialize_code_action(
4752 envelope
4753 .payload
4754 .action
4755 .ok_or_else(|| anyhow!("invalid action"))?,
4756 )?;
4757 let apply_code_action = this.update(&mut cx, |this, cx| {
4758 let buffer = this
4759 .opened_buffers
4760 .get(&envelope.payload.buffer_id)
4761 .and_then(|buffer| buffer.upgrade(cx))
4762 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
4763 Ok::<_, anyhow::Error>(this.apply_code_action(buffer, action, false, cx))
4764 })?;
4765
4766 let project_transaction = apply_code_action.await?;
4767 let project_transaction = this.update(&mut cx, |this, cx| {
4768 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
4769 });
4770 Ok(proto::ApplyCodeActionResponse {
4771 transaction: Some(project_transaction),
4772 })
4773 }
4774
4775 async fn handle_lsp_command<T: LspCommand>(
4776 this: ModelHandle<Self>,
4777 envelope: TypedEnvelope<T::ProtoRequest>,
4778 _: Arc<Client>,
4779 mut cx: AsyncAppContext,
4780 ) -> Result<<T::ProtoRequest as proto::RequestMessage>::Response>
4781 where
4782 <T::LspRequest as lsp::request::Request>::Result: Send,
4783 {
4784 let sender_id = envelope.original_sender_id()?;
4785 let buffer_id = T::buffer_id_from_proto(&envelope.payload);
4786 let buffer_handle = this.read_with(&cx, |this, _| {
4787 this.opened_buffers
4788 .get(&buffer_id)
4789 .and_then(|buffer| buffer.upgrade(&cx))
4790 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))
4791 })?;
4792 let request = T::from_proto(
4793 envelope.payload,
4794 this.clone(),
4795 buffer_handle.clone(),
4796 cx.clone(),
4797 )
4798 .await?;
4799 let buffer_version = buffer_handle.read_with(&cx, |buffer, _| buffer.version());
4800 let response = this
4801 .update(&mut cx, |this, cx| {
4802 this.request_lsp(buffer_handle, request, cx)
4803 })
4804 .await?;
4805 this.update(&mut cx, |this, cx| {
4806 Ok(T::response_to_proto(
4807 response,
4808 this,
4809 sender_id,
4810 &buffer_version,
4811 cx,
4812 ))
4813 })
4814 }
4815
4816 async fn handle_get_project_symbols(
4817 this: ModelHandle<Self>,
4818 envelope: TypedEnvelope<proto::GetProjectSymbols>,
4819 _: Arc<Client>,
4820 mut cx: AsyncAppContext,
4821 ) -> Result<proto::GetProjectSymbolsResponse> {
4822 let symbols = this
4823 .update(&mut cx, |this, cx| {
4824 this.symbols(&envelope.payload.query, cx)
4825 })
4826 .await?;
4827
4828 Ok(proto::GetProjectSymbolsResponse {
4829 symbols: symbols.iter().map(serialize_symbol).collect(),
4830 })
4831 }
4832
4833 async fn handle_search_project(
4834 this: ModelHandle<Self>,
4835 envelope: TypedEnvelope<proto::SearchProject>,
4836 _: Arc<Client>,
4837 mut cx: AsyncAppContext,
4838 ) -> Result<proto::SearchProjectResponse> {
4839 let peer_id = envelope.original_sender_id()?;
4840 let query = SearchQuery::from_proto(envelope.payload)?;
4841 let result = this
4842 .update(&mut cx, |this, cx| this.search(query, cx))
4843 .await?;
4844
4845 this.update(&mut cx, |this, cx| {
4846 let mut locations = Vec::new();
4847 for (buffer, ranges) in result {
4848 for range in ranges {
4849 let start = serialize_anchor(&range.start);
4850 let end = serialize_anchor(&range.end);
4851 let buffer = this.serialize_buffer_for_peer(&buffer, peer_id, cx);
4852 locations.push(proto::Location {
4853 buffer: Some(buffer),
4854 start: Some(start),
4855 end: Some(end),
4856 });
4857 }
4858 }
4859 Ok(proto::SearchProjectResponse { locations })
4860 })
4861 }
4862
4863 async fn handle_open_buffer_for_symbol(
4864 this: ModelHandle<Self>,
4865 envelope: TypedEnvelope<proto::OpenBufferForSymbol>,
4866 _: Arc<Client>,
4867 mut cx: AsyncAppContext,
4868 ) -> Result<proto::OpenBufferForSymbolResponse> {
4869 let peer_id = envelope.original_sender_id()?;
4870 let symbol = envelope
4871 .payload
4872 .symbol
4873 .ok_or_else(|| anyhow!("invalid symbol"))?;
4874 let symbol = this.read_with(&cx, |this, _| {
4875 let symbol = this.deserialize_symbol(symbol)?;
4876 let signature = this.symbol_signature(symbol.worktree_id, &symbol.path);
4877 if signature == symbol.signature {
4878 Ok(symbol)
4879 } else {
4880 Err(anyhow!("invalid symbol signature"))
4881 }
4882 })?;
4883 let buffer = this
4884 .update(&mut cx, |this, cx| this.open_buffer_for_symbol(&symbol, cx))
4885 .await?;
4886
4887 Ok(proto::OpenBufferForSymbolResponse {
4888 buffer: Some(this.update(&mut cx, |this, cx| {
4889 this.serialize_buffer_for_peer(&buffer, peer_id, cx)
4890 })),
4891 })
4892 }
4893
4894 fn symbol_signature(&self, worktree_id: WorktreeId, path: &Path) -> [u8; 32] {
4895 let mut hasher = Sha256::new();
4896 hasher.update(worktree_id.to_proto().to_be_bytes());
4897 hasher.update(path.to_string_lossy().as_bytes());
4898 hasher.update(self.nonce.to_be_bytes());
4899 hasher.finalize().as_slice().try_into().unwrap()
4900 }
4901
4902 async fn handle_open_buffer_by_id(
4903 this: ModelHandle<Self>,
4904 envelope: TypedEnvelope<proto::OpenBufferById>,
4905 _: Arc<Client>,
4906 mut cx: AsyncAppContext,
4907 ) -> Result<proto::OpenBufferResponse> {
4908 let peer_id = envelope.original_sender_id()?;
4909 let buffer = this
4910 .update(&mut cx, |this, cx| {
4911 this.open_buffer_by_id(envelope.payload.id, cx)
4912 })
4913 .await?;
4914 this.update(&mut cx, |this, cx| {
4915 Ok(proto::OpenBufferResponse {
4916 buffer: Some(this.serialize_buffer_for_peer(&buffer, peer_id, cx)),
4917 })
4918 })
4919 }
4920
4921 async fn handle_open_buffer_by_path(
4922 this: ModelHandle<Self>,
4923 envelope: TypedEnvelope<proto::OpenBufferByPath>,
4924 _: Arc<Client>,
4925 mut cx: AsyncAppContext,
4926 ) -> Result<proto::OpenBufferResponse> {
4927 let peer_id = envelope.original_sender_id()?;
4928 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
4929 let open_buffer = this.update(&mut cx, |this, cx| {
4930 this.open_buffer(
4931 ProjectPath {
4932 worktree_id,
4933 path: PathBuf::from(envelope.payload.path).into(),
4934 },
4935 cx,
4936 )
4937 });
4938
4939 let buffer = open_buffer.await?;
4940 this.update(&mut cx, |this, cx| {
4941 Ok(proto::OpenBufferResponse {
4942 buffer: Some(this.serialize_buffer_for_peer(&buffer, peer_id, cx)),
4943 })
4944 })
4945 }
4946
4947 fn serialize_project_transaction_for_peer(
4948 &mut self,
4949 project_transaction: ProjectTransaction,
4950 peer_id: PeerId,
4951 cx: &AppContext,
4952 ) -> proto::ProjectTransaction {
4953 let mut serialized_transaction = proto::ProjectTransaction {
4954 buffers: Default::default(),
4955 transactions: Default::default(),
4956 };
4957 for (buffer, transaction) in project_transaction.0 {
4958 serialized_transaction
4959 .buffers
4960 .push(self.serialize_buffer_for_peer(&buffer, peer_id, cx));
4961 serialized_transaction
4962 .transactions
4963 .push(language::proto::serialize_transaction(&transaction));
4964 }
4965 serialized_transaction
4966 }
4967
4968 fn deserialize_project_transaction(
4969 &mut self,
4970 message: proto::ProjectTransaction,
4971 push_to_history: bool,
4972 cx: &mut ModelContext<Self>,
4973 ) -> Task<Result<ProjectTransaction>> {
4974 cx.spawn(|this, mut cx| async move {
4975 let mut project_transaction = ProjectTransaction::default();
4976 for (buffer, transaction) in message.buffers.into_iter().zip(message.transactions) {
4977 let buffer = this
4978 .update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
4979 .await?;
4980 let transaction = language::proto::deserialize_transaction(transaction)?;
4981 project_transaction.0.insert(buffer, transaction);
4982 }
4983
4984 for (buffer, transaction) in &project_transaction.0 {
4985 buffer
4986 .update(&mut cx, |buffer, _| {
4987 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
4988 })
4989 .await;
4990
4991 if push_to_history {
4992 buffer.update(&mut cx, |buffer, _| {
4993 buffer.push_transaction(transaction.clone(), Instant::now());
4994 });
4995 }
4996 }
4997
4998 Ok(project_transaction)
4999 })
5000 }
5001
5002 fn serialize_buffer_for_peer(
5003 &mut self,
5004 buffer: &ModelHandle<Buffer>,
5005 peer_id: PeerId,
5006 cx: &AppContext,
5007 ) -> proto::Buffer {
5008 let buffer_id = buffer.read(cx).remote_id();
5009 let shared_buffers = self.shared_buffers.entry(peer_id).or_default();
5010 if shared_buffers.insert(buffer_id) {
5011 proto::Buffer {
5012 variant: Some(proto::buffer::Variant::State(buffer.read(cx).to_proto())),
5013 }
5014 } else {
5015 proto::Buffer {
5016 variant: Some(proto::buffer::Variant::Id(buffer_id)),
5017 }
5018 }
5019 }
5020
5021 fn deserialize_buffer(
5022 &mut self,
5023 buffer: proto::Buffer,
5024 cx: &mut ModelContext<Self>,
5025 ) -> Task<Result<ModelHandle<Buffer>>> {
5026 let replica_id = self.replica_id();
5027
5028 let opened_buffer_tx = self.opened_buffer.0.clone();
5029 let mut opened_buffer_rx = self.opened_buffer.1.clone();
5030 cx.spawn(|this, mut cx| async move {
5031 match buffer.variant.ok_or_else(|| anyhow!("missing buffer"))? {
5032 proto::buffer::Variant::Id(id) => {
5033 let buffer = loop {
5034 let buffer = this.read_with(&cx, |this, cx| {
5035 this.opened_buffers
5036 .get(&id)
5037 .and_then(|buffer| buffer.upgrade(cx))
5038 });
5039 if let Some(buffer) = buffer {
5040 break buffer;
5041 }
5042 opened_buffer_rx
5043 .next()
5044 .await
5045 .ok_or_else(|| anyhow!("project dropped while waiting for buffer"))?;
5046 };
5047 Ok(buffer)
5048 }
5049 proto::buffer::Variant::State(mut buffer) => {
5050 let mut buffer_worktree = None;
5051 let mut buffer_file = None;
5052 if let Some(file) = buffer.file.take() {
5053 this.read_with(&cx, |this, cx| {
5054 let worktree_id = WorktreeId::from_proto(file.worktree_id);
5055 let worktree =
5056 this.worktree_for_id(worktree_id, cx).ok_or_else(|| {
5057 anyhow!("no worktree found for id {}", file.worktree_id)
5058 })?;
5059 buffer_file =
5060 Some(Box::new(File::from_proto(file, worktree.clone(), cx)?)
5061 as Box<dyn language::File>);
5062 buffer_worktree = Some(worktree);
5063 Ok::<_, anyhow::Error>(())
5064 })?;
5065 }
5066
5067 let buffer = cx.add_model(|cx| {
5068 Buffer::from_proto(replica_id, buffer, buffer_file, cx).unwrap()
5069 });
5070
5071 this.update(&mut cx, |this, cx| this.register_buffer(&buffer, cx))?;
5072
5073 *opened_buffer_tx.borrow_mut().borrow_mut() = ();
5074 Ok(buffer)
5075 }
5076 }
5077 })
5078 }
5079
5080 fn deserialize_symbol(&self, serialized_symbol: proto::Symbol) -> Result<Symbol> {
5081 let source_worktree_id = WorktreeId::from_proto(serialized_symbol.source_worktree_id);
5082 let worktree_id = WorktreeId::from_proto(serialized_symbol.worktree_id);
5083 let start = serialized_symbol
5084 .start
5085 .ok_or_else(|| anyhow!("invalid start"))?;
5086 let end = serialized_symbol
5087 .end
5088 .ok_or_else(|| anyhow!("invalid end"))?;
5089 let kind = unsafe { mem::transmute(serialized_symbol.kind) };
5090 let path = PathBuf::from(serialized_symbol.path);
5091 let language = self.languages.select_language(&path);
5092 Ok(Symbol {
5093 source_worktree_id,
5094 worktree_id,
5095 language_server_name: LanguageServerName(serialized_symbol.language_server_name.into()),
5096 label: language
5097 .and_then(|language| language.label_for_symbol(&serialized_symbol.name, kind))
5098 .unwrap_or_else(|| CodeLabel::plain(serialized_symbol.name.clone(), None)),
5099 name: serialized_symbol.name,
5100 path,
5101 range: PointUtf16::new(start.row, start.column)..PointUtf16::new(end.row, end.column),
5102 kind,
5103 signature: serialized_symbol
5104 .signature
5105 .try_into()
5106 .map_err(|_| anyhow!("invalid signature"))?,
5107 })
5108 }
5109
5110 async fn handle_buffer_saved(
5111 this: ModelHandle<Self>,
5112 envelope: TypedEnvelope<proto::BufferSaved>,
5113 _: Arc<Client>,
5114 mut cx: AsyncAppContext,
5115 ) -> Result<()> {
5116 let version = deserialize_version(envelope.payload.version);
5117 let mtime = envelope
5118 .payload
5119 .mtime
5120 .ok_or_else(|| anyhow!("missing mtime"))?
5121 .into();
5122
5123 this.update(&mut cx, |this, cx| {
5124 let buffer = this
5125 .opened_buffers
5126 .get(&envelope.payload.buffer_id)
5127 .and_then(|buffer| buffer.upgrade(cx));
5128 if let Some(buffer) = buffer {
5129 buffer.update(cx, |buffer, cx| {
5130 buffer.did_save(version, mtime, None, cx);
5131 });
5132 }
5133 Ok(())
5134 })
5135 }
5136
5137 async fn handle_buffer_reloaded(
5138 this: ModelHandle<Self>,
5139 envelope: TypedEnvelope<proto::BufferReloaded>,
5140 _: Arc<Client>,
5141 mut cx: AsyncAppContext,
5142 ) -> Result<()> {
5143 let payload = envelope.payload.clone();
5144 let version = deserialize_version(payload.version);
5145 let mtime = payload
5146 .mtime
5147 .ok_or_else(|| anyhow!("missing mtime"))?
5148 .into();
5149 this.update(&mut cx, |this, cx| {
5150 let buffer = this
5151 .opened_buffers
5152 .get(&payload.buffer_id)
5153 .and_then(|buffer| buffer.upgrade(cx));
5154 if let Some(buffer) = buffer {
5155 buffer.update(cx, |buffer, cx| {
5156 buffer.did_reload(version, mtime, cx);
5157 });
5158 }
5159 Ok(())
5160 })
5161 }
5162
5163 pub fn match_paths<'a>(
5164 &self,
5165 query: &'a str,
5166 include_ignored: bool,
5167 smart_case: bool,
5168 max_results: usize,
5169 cancel_flag: &'a AtomicBool,
5170 cx: &AppContext,
5171 ) -> impl 'a + Future<Output = Vec<PathMatch>> {
5172 let worktrees = self
5173 .worktrees(cx)
5174 .filter(|worktree| worktree.read(cx).is_visible())
5175 .collect::<Vec<_>>();
5176 let include_root_name = worktrees.len() > 1;
5177 let candidate_sets = worktrees
5178 .into_iter()
5179 .map(|worktree| CandidateSet {
5180 snapshot: worktree.read(cx).snapshot(),
5181 include_ignored,
5182 include_root_name,
5183 })
5184 .collect::<Vec<_>>();
5185
5186 let background = cx.background().clone();
5187 async move {
5188 fuzzy::match_paths(
5189 candidate_sets.as_slice(),
5190 query,
5191 smart_case,
5192 max_results,
5193 cancel_flag,
5194 background,
5195 )
5196 .await
5197 }
5198 }
5199
5200 fn edits_from_lsp(
5201 &mut self,
5202 buffer: &ModelHandle<Buffer>,
5203 lsp_edits: impl 'static + Send + IntoIterator<Item = lsp::TextEdit>,
5204 version: Option<i32>,
5205 cx: &mut ModelContext<Self>,
5206 ) -> Task<Result<Vec<(Range<Anchor>, String)>>> {
5207 let snapshot = self.buffer_snapshot_for_lsp_version(buffer, version, cx);
5208 cx.background().spawn(async move {
5209 let snapshot = snapshot?;
5210 let mut lsp_edits = lsp_edits
5211 .into_iter()
5212 .map(|edit| (range_from_lsp(edit.range), edit.new_text))
5213 .collect::<Vec<_>>();
5214 lsp_edits.sort_by_key(|(range, _)| range.start);
5215
5216 let mut lsp_edits = lsp_edits.into_iter().peekable();
5217 let mut edits = Vec::new();
5218 while let Some((mut range, mut new_text)) = lsp_edits.next() {
5219 // Combine any LSP edits that are adjacent.
5220 //
5221 // Also, combine LSP edits that are separated from each other by only
5222 // a newline. This is important because for some code actions,
5223 // Rust-analyzer rewrites the entire buffer via a series of edits that
5224 // are separated by unchanged newline characters.
5225 //
5226 // In order for the diffing logic below to work properly, any edits that
5227 // cancel each other out must be combined into one.
5228 while let Some((next_range, next_text)) = lsp_edits.peek() {
5229 if next_range.start > range.end {
5230 if next_range.start.row > range.end.row + 1
5231 || next_range.start.column > 0
5232 || snapshot.clip_point_utf16(
5233 PointUtf16::new(range.end.row, u32::MAX),
5234 Bias::Left,
5235 ) > range.end
5236 {
5237 break;
5238 }
5239 new_text.push('\n');
5240 }
5241 range.end = next_range.end;
5242 new_text.push_str(&next_text);
5243 lsp_edits.next();
5244 }
5245
5246 if snapshot.clip_point_utf16(range.start, Bias::Left) != range.start
5247 || snapshot.clip_point_utf16(range.end, Bias::Left) != range.end
5248 {
5249 return Err(anyhow!("invalid edits received from language server"));
5250 }
5251
5252 // For multiline edits, perform a diff of the old and new text so that
5253 // we can identify the changes more precisely, preserving the locations
5254 // of any anchors positioned in the unchanged regions.
5255 if range.end.row > range.start.row {
5256 let mut offset = range.start.to_offset(&snapshot);
5257 let old_text = snapshot.text_for_range(range).collect::<String>();
5258
5259 let diff = TextDiff::from_lines(old_text.as_str(), &new_text);
5260 let mut moved_since_edit = true;
5261 for change in diff.iter_all_changes() {
5262 let tag = change.tag();
5263 let value = change.value();
5264 match tag {
5265 ChangeTag::Equal => {
5266 offset += value.len();
5267 moved_since_edit = true;
5268 }
5269 ChangeTag::Delete => {
5270 let start = snapshot.anchor_after(offset);
5271 let end = snapshot.anchor_before(offset + value.len());
5272 if moved_since_edit {
5273 edits.push((start..end, String::new()));
5274 } else {
5275 edits.last_mut().unwrap().0.end = end;
5276 }
5277 offset += value.len();
5278 moved_since_edit = false;
5279 }
5280 ChangeTag::Insert => {
5281 if moved_since_edit {
5282 let anchor = snapshot.anchor_after(offset);
5283 edits.push((anchor.clone()..anchor, value.to_string()));
5284 } else {
5285 edits.last_mut().unwrap().1.push_str(value);
5286 }
5287 moved_since_edit = false;
5288 }
5289 }
5290 }
5291 } else if range.end == range.start {
5292 let anchor = snapshot.anchor_after(range.start);
5293 edits.push((anchor.clone()..anchor, new_text));
5294 } else {
5295 let edit_start = snapshot.anchor_after(range.start);
5296 let edit_end = snapshot.anchor_before(range.end);
5297 edits.push((edit_start..edit_end, new_text));
5298 }
5299 }
5300
5301 Ok(edits)
5302 })
5303 }
5304
5305 fn buffer_snapshot_for_lsp_version(
5306 &mut self,
5307 buffer: &ModelHandle<Buffer>,
5308 version: Option<i32>,
5309 cx: &AppContext,
5310 ) -> Result<TextBufferSnapshot> {
5311 const OLD_VERSIONS_TO_RETAIN: i32 = 10;
5312
5313 if let Some(version) = version {
5314 let buffer_id = buffer.read(cx).remote_id();
5315 let snapshots = self
5316 .buffer_snapshots
5317 .get_mut(&buffer_id)
5318 .ok_or_else(|| anyhow!("no snapshot found for buffer {}", buffer_id))?;
5319 let mut found_snapshot = None;
5320 snapshots.retain(|(snapshot_version, snapshot)| {
5321 if snapshot_version + OLD_VERSIONS_TO_RETAIN < version {
5322 false
5323 } else {
5324 if *snapshot_version == version {
5325 found_snapshot = Some(snapshot.clone());
5326 }
5327 true
5328 }
5329 });
5330
5331 found_snapshot.ok_or_else(|| {
5332 anyhow!(
5333 "snapshot not found for buffer {} at version {}",
5334 buffer_id,
5335 version
5336 )
5337 })
5338 } else {
5339 Ok((buffer.read(cx)).text_snapshot())
5340 }
5341 }
5342
5343 fn language_server_for_buffer(
5344 &self,
5345 buffer: &Buffer,
5346 cx: &AppContext,
5347 ) -> Option<&(Arc<dyn LspAdapter>, Arc<LanguageServer>)> {
5348 if let Some((file, language)) = File::from_dyn(buffer.file()).zip(buffer.language()) {
5349 let worktree_id = file.worktree_id(cx);
5350 self.language_servers
5351 .get(&(worktree_id, language.lsp_adapter()?.name()))
5352 } else {
5353 None
5354 }
5355 }
5356}
5357
5358impl ProjectStore {
5359 pub fn new(db: Arc<Db>) -> Self {
5360 Self {
5361 db,
5362 projects: Default::default(),
5363 }
5364 }
5365
5366 pub fn projects<'a>(
5367 &'a self,
5368 cx: &'a AppContext,
5369 ) -> impl 'a + Iterator<Item = ModelHandle<Project>> {
5370 self.projects
5371 .iter()
5372 .filter_map(|project| project.upgrade(cx))
5373 }
5374
5375 fn add_project(&mut self, project: WeakModelHandle<Project>, cx: &mut ModelContext<Self>) {
5376 if let Err(ix) = self
5377 .projects
5378 .binary_search_by_key(&project.id(), WeakModelHandle::id)
5379 {
5380 self.projects.insert(ix, project);
5381 }
5382 cx.notify();
5383 }
5384
5385 fn prune_projects(&mut self, cx: &mut ModelContext<Self>) {
5386 let mut did_change = false;
5387 self.projects.retain(|project| {
5388 if project.is_upgradable(cx) {
5389 true
5390 } else {
5391 did_change = true;
5392 false
5393 }
5394 });
5395 if did_change {
5396 cx.notify();
5397 }
5398 }
5399}
5400
5401impl WorktreeHandle {
5402 pub fn upgrade(&self, cx: &AppContext) -> Option<ModelHandle<Worktree>> {
5403 match self {
5404 WorktreeHandle::Strong(handle) => Some(handle.clone()),
5405 WorktreeHandle::Weak(handle) => handle.upgrade(cx),
5406 }
5407 }
5408}
5409
5410impl OpenBuffer {
5411 pub fn upgrade(&self, cx: &impl UpgradeModelHandle) -> Option<ModelHandle<Buffer>> {
5412 match self {
5413 OpenBuffer::Strong(handle) => Some(handle.clone()),
5414 OpenBuffer::Weak(handle) => handle.upgrade(cx),
5415 OpenBuffer::Loading(_) => None,
5416 }
5417 }
5418}
5419
5420struct CandidateSet {
5421 snapshot: Snapshot,
5422 include_ignored: bool,
5423 include_root_name: bool,
5424}
5425
5426impl<'a> PathMatchCandidateSet<'a> for CandidateSet {
5427 type Candidates = CandidateSetIter<'a>;
5428
5429 fn id(&self) -> usize {
5430 self.snapshot.id().to_usize()
5431 }
5432
5433 fn len(&self) -> usize {
5434 if self.include_ignored {
5435 self.snapshot.file_count()
5436 } else {
5437 self.snapshot.visible_file_count()
5438 }
5439 }
5440
5441 fn prefix(&self) -> Arc<str> {
5442 if self.snapshot.root_entry().map_or(false, |e| e.is_file()) {
5443 self.snapshot.root_name().into()
5444 } else if self.include_root_name {
5445 format!("{}/", self.snapshot.root_name()).into()
5446 } else {
5447 "".into()
5448 }
5449 }
5450
5451 fn candidates(&'a self, start: usize) -> Self::Candidates {
5452 CandidateSetIter {
5453 traversal: self.snapshot.files(self.include_ignored, start),
5454 }
5455 }
5456}
5457
5458struct CandidateSetIter<'a> {
5459 traversal: Traversal<'a>,
5460}
5461
5462impl<'a> Iterator for CandidateSetIter<'a> {
5463 type Item = PathMatchCandidate<'a>;
5464
5465 fn next(&mut self) -> Option<Self::Item> {
5466 self.traversal.next().map(|entry| {
5467 if let EntryKind::File(char_bag) = entry.kind {
5468 PathMatchCandidate {
5469 path: &entry.path,
5470 char_bag,
5471 }
5472 } else {
5473 unreachable!()
5474 }
5475 })
5476 }
5477}
5478
5479impl Entity for ProjectStore {
5480 type Event = ();
5481}
5482
5483impl Entity for Project {
5484 type Event = Event;
5485
5486 fn release(&mut self, cx: &mut gpui::MutableAppContext) {
5487 self.project_store.update(cx, ProjectStore::prune_projects);
5488
5489 match &self.client_state {
5490 ProjectClientState::Local { remote_id_rx, .. } => {
5491 if let Some(project_id) = *remote_id_rx.borrow() {
5492 self.client
5493 .send(proto::UnregisterProject { project_id })
5494 .log_err();
5495 }
5496 }
5497 ProjectClientState::Remote { remote_id, .. } => {
5498 self.client
5499 .send(proto::LeaveProject {
5500 project_id: *remote_id,
5501 })
5502 .log_err();
5503 }
5504 }
5505 }
5506
5507 fn app_will_quit(
5508 &mut self,
5509 _: &mut MutableAppContext,
5510 ) -> Option<std::pin::Pin<Box<dyn 'static + Future<Output = ()>>>> {
5511 let shutdown_futures = self
5512 .language_servers
5513 .drain()
5514 .filter_map(|(_, (_, server))| server.shutdown())
5515 .collect::<Vec<_>>();
5516 Some(
5517 async move {
5518 futures::future::join_all(shutdown_futures).await;
5519 }
5520 .boxed(),
5521 )
5522 }
5523}
5524
5525impl Collaborator {
5526 fn from_proto(
5527 message: proto::Collaborator,
5528 user_store: &ModelHandle<UserStore>,
5529 cx: &mut AsyncAppContext,
5530 ) -> impl Future<Output = Result<Self>> {
5531 let user = user_store.update(cx, |user_store, cx| {
5532 user_store.fetch_user(message.user_id, cx)
5533 });
5534
5535 async move {
5536 Ok(Self {
5537 peer_id: PeerId(message.peer_id),
5538 user: user.await?,
5539 replica_id: message.replica_id as ReplicaId,
5540 })
5541 }
5542 }
5543}
5544
5545impl<P: AsRef<Path>> From<(WorktreeId, P)> for ProjectPath {
5546 fn from((worktree_id, path): (WorktreeId, P)) -> Self {
5547 Self {
5548 worktree_id,
5549 path: path.as_ref().into(),
5550 }
5551 }
5552}
5553
5554impl From<lsp::CreateFileOptions> for fs::CreateOptions {
5555 fn from(options: lsp::CreateFileOptions) -> Self {
5556 Self {
5557 overwrite: options.overwrite.unwrap_or(false),
5558 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
5559 }
5560 }
5561}
5562
5563impl From<lsp::RenameFileOptions> for fs::RenameOptions {
5564 fn from(options: lsp::RenameFileOptions) -> Self {
5565 Self {
5566 overwrite: options.overwrite.unwrap_or(false),
5567 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
5568 }
5569 }
5570}
5571
5572impl From<lsp::DeleteFileOptions> for fs::RemoveOptions {
5573 fn from(options: lsp::DeleteFileOptions) -> Self {
5574 Self {
5575 recursive: options.recursive.unwrap_or(false),
5576 ignore_if_not_exists: options.ignore_if_not_exists.unwrap_or(false),
5577 }
5578 }
5579}
5580
5581fn serialize_symbol(symbol: &Symbol) -> proto::Symbol {
5582 proto::Symbol {
5583 source_worktree_id: symbol.source_worktree_id.to_proto(),
5584 worktree_id: symbol.worktree_id.to_proto(),
5585 language_server_name: symbol.language_server_name.0.to_string(),
5586 name: symbol.name.clone(),
5587 kind: unsafe { mem::transmute(symbol.kind) },
5588 path: symbol.path.to_string_lossy().to_string(),
5589 start: Some(proto::Point {
5590 row: symbol.range.start.row,
5591 column: symbol.range.start.column,
5592 }),
5593 end: Some(proto::Point {
5594 row: symbol.range.end.row,
5595 column: symbol.range.end.column,
5596 }),
5597 signature: symbol.signature.to_vec(),
5598 }
5599}
5600
5601fn relativize_path(base: &Path, path: &Path) -> PathBuf {
5602 let mut path_components = path.components();
5603 let mut base_components = base.components();
5604 let mut components: Vec<Component> = Vec::new();
5605 loop {
5606 match (path_components.next(), base_components.next()) {
5607 (None, None) => break,
5608 (Some(a), None) => {
5609 components.push(a);
5610 components.extend(path_components.by_ref());
5611 break;
5612 }
5613 (None, _) => components.push(Component::ParentDir),
5614 (Some(a), Some(b)) if components.is_empty() && a == b => (),
5615 (Some(a), Some(b)) if b == Component::CurDir => components.push(a),
5616 (Some(a), Some(_)) => {
5617 components.push(Component::ParentDir);
5618 for _ in base_components {
5619 components.push(Component::ParentDir);
5620 }
5621 components.push(a);
5622 components.extend(path_components.by_ref());
5623 break;
5624 }
5625 }
5626 }
5627 components.iter().map(|c| c.as_os_str()).collect()
5628}
5629
5630impl Item for Buffer {
5631 fn entry_id(&self, cx: &AppContext) -> Option<ProjectEntryId> {
5632 File::from_dyn(self.file()).and_then(|file| file.project_entry_id(cx))
5633 }
5634}
5635
5636#[cfg(test)]
5637mod tests {
5638 use crate::worktree::WorktreeHandle;
5639
5640 use super::{Event, *};
5641 use fs::RealFs;
5642 use futures::{future, StreamExt};
5643 use gpui::test::subscribe;
5644 use language::{
5645 tree_sitter_rust, tree_sitter_typescript, Diagnostic, FakeLspAdapter, LanguageConfig,
5646 OffsetRangeExt, Point, ToPoint,
5647 };
5648 use lsp::Url;
5649 use serde_json::json;
5650 use std::{cell::RefCell, os::unix, path::PathBuf, rc::Rc, task::Poll};
5651 use unindent::Unindent as _;
5652 use util::{assert_set_eq, test::temp_tree};
5653
5654 #[gpui::test]
5655 async fn test_populate_and_search(cx: &mut gpui::TestAppContext) {
5656 let dir = temp_tree(json!({
5657 "root": {
5658 "apple": "",
5659 "banana": {
5660 "carrot": {
5661 "date": "",
5662 "endive": "",
5663 }
5664 },
5665 "fennel": {
5666 "grape": "",
5667 }
5668 }
5669 }));
5670
5671 let root_link_path = dir.path().join("root_link");
5672 unix::fs::symlink(&dir.path().join("root"), &root_link_path).unwrap();
5673 unix::fs::symlink(
5674 &dir.path().join("root/fennel"),
5675 &dir.path().join("root/finnochio"),
5676 )
5677 .unwrap();
5678
5679 let project = Project::test(Arc::new(RealFs), [root_link_path.as_ref()], cx).await;
5680
5681 project.read_with(cx, |project, cx| {
5682 let tree = project.worktrees(cx).next().unwrap().read(cx);
5683 assert_eq!(tree.file_count(), 5);
5684 assert_eq!(
5685 tree.inode_for_path("fennel/grape"),
5686 tree.inode_for_path("finnochio/grape")
5687 );
5688 });
5689
5690 let cancel_flag = Default::default();
5691 let results = project
5692 .read_with(cx, |project, cx| {
5693 project.match_paths("bna", false, false, 10, &cancel_flag, cx)
5694 })
5695 .await;
5696 assert_eq!(
5697 results
5698 .into_iter()
5699 .map(|result| result.path)
5700 .collect::<Vec<Arc<Path>>>(),
5701 vec![
5702 PathBuf::from("banana/carrot/date").into(),
5703 PathBuf::from("banana/carrot/endive").into(),
5704 ]
5705 );
5706 }
5707
5708 #[gpui::test]
5709 async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
5710 cx.foreground().forbid_parking();
5711
5712 let mut rust_language = Language::new(
5713 LanguageConfig {
5714 name: "Rust".into(),
5715 path_suffixes: vec!["rs".to_string()],
5716 ..Default::default()
5717 },
5718 Some(tree_sitter_rust::language()),
5719 );
5720 let mut json_language = Language::new(
5721 LanguageConfig {
5722 name: "JSON".into(),
5723 path_suffixes: vec!["json".to_string()],
5724 ..Default::default()
5725 },
5726 None,
5727 );
5728 let mut fake_rust_servers = rust_language.set_fake_lsp_adapter(FakeLspAdapter {
5729 name: "the-rust-language-server",
5730 capabilities: lsp::ServerCapabilities {
5731 completion_provider: Some(lsp::CompletionOptions {
5732 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
5733 ..Default::default()
5734 }),
5735 ..Default::default()
5736 },
5737 ..Default::default()
5738 });
5739 let mut fake_json_servers = json_language.set_fake_lsp_adapter(FakeLspAdapter {
5740 name: "the-json-language-server",
5741 capabilities: lsp::ServerCapabilities {
5742 completion_provider: Some(lsp::CompletionOptions {
5743 trigger_characters: Some(vec![":".to_string()]),
5744 ..Default::default()
5745 }),
5746 ..Default::default()
5747 },
5748 ..Default::default()
5749 });
5750
5751 let fs = FakeFs::new(cx.background());
5752 fs.insert_tree(
5753 "/the-root",
5754 json!({
5755 "test.rs": "const A: i32 = 1;",
5756 "test2.rs": "",
5757 "Cargo.toml": "a = 1",
5758 "package.json": "{\"a\": 1}",
5759 }),
5760 )
5761 .await;
5762
5763 let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
5764 project.update(cx, |project, _| {
5765 project.languages.add(Arc::new(rust_language));
5766 project.languages.add(Arc::new(json_language));
5767 });
5768
5769 // Open a buffer without an associated language server.
5770 let toml_buffer = project
5771 .update(cx, |project, cx| {
5772 project.open_local_buffer("/the-root/Cargo.toml", cx)
5773 })
5774 .await
5775 .unwrap();
5776
5777 // Open a buffer with an associated language server.
5778 let rust_buffer = project
5779 .update(cx, |project, cx| {
5780 project.open_local_buffer("/the-root/test.rs", cx)
5781 })
5782 .await
5783 .unwrap();
5784
5785 // A server is started up, and it is notified about Rust files.
5786 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
5787 assert_eq!(
5788 fake_rust_server
5789 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5790 .await
5791 .text_document,
5792 lsp::TextDocumentItem {
5793 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
5794 version: 0,
5795 text: "const A: i32 = 1;".to_string(),
5796 language_id: Default::default()
5797 }
5798 );
5799
5800 // The buffer is configured based on the language server's capabilities.
5801 rust_buffer.read_with(cx, |buffer, _| {
5802 assert_eq!(
5803 buffer.completion_triggers(),
5804 &[".".to_string(), "::".to_string()]
5805 );
5806 });
5807 toml_buffer.read_with(cx, |buffer, _| {
5808 assert!(buffer.completion_triggers().is_empty());
5809 });
5810
5811 // Edit a buffer. The changes are reported to the language server.
5812 rust_buffer.update(cx, |buffer, cx| buffer.edit([(16..16, "2")], cx));
5813 assert_eq!(
5814 fake_rust_server
5815 .receive_notification::<lsp::notification::DidChangeTextDocument>()
5816 .await
5817 .text_document,
5818 lsp::VersionedTextDocumentIdentifier::new(
5819 lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
5820 1
5821 )
5822 );
5823
5824 // Open a third buffer with a different associated language server.
5825 let json_buffer = project
5826 .update(cx, |project, cx| {
5827 project.open_local_buffer("/the-root/package.json", cx)
5828 })
5829 .await
5830 .unwrap();
5831
5832 // A json language server is started up and is only notified about the json buffer.
5833 let mut fake_json_server = fake_json_servers.next().await.unwrap();
5834 assert_eq!(
5835 fake_json_server
5836 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5837 .await
5838 .text_document,
5839 lsp::TextDocumentItem {
5840 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
5841 version: 0,
5842 text: "{\"a\": 1}".to_string(),
5843 language_id: Default::default()
5844 }
5845 );
5846
5847 // This buffer is configured based on the second language server's
5848 // capabilities.
5849 json_buffer.read_with(cx, |buffer, _| {
5850 assert_eq!(buffer.completion_triggers(), &[":".to_string()]);
5851 });
5852
5853 // When opening another buffer whose language server is already running,
5854 // it is also configured based on the existing language server's capabilities.
5855 let rust_buffer2 = project
5856 .update(cx, |project, cx| {
5857 project.open_local_buffer("/the-root/test2.rs", cx)
5858 })
5859 .await
5860 .unwrap();
5861 rust_buffer2.read_with(cx, |buffer, _| {
5862 assert_eq!(
5863 buffer.completion_triggers(),
5864 &[".".to_string(), "::".to_string()]
5865 );
5866 });
5867
5868 // Changes are reported only to servers matching the buffer's language.
5869 toml_buffer.update(cx, |buffer, cx| buffer.edit([(5..5, "23")], cx));
5870 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "let x = 1;")], cx));
5871 assert_eq!(
5872 fake_rust_server
5873 .receive_notification::<lsp::notification::DidChangeTextDocument>()
5874 .await
5875 .text_document,
5876 lsp::VersionedTextDocumentIdentifier::new(
5877 lsp::Url::from_file_path("/the-root/test2.rs").unwrap(),
5878 1
5879 )
5880 );
5881
5882 // Save notifications are reported to all servers.
5883 toml_buffer
5884 .update(cx, |buffer, cx| buffer.save(cx))
5885 .await
5886 .unwrap();
5887 assert_eq!(
5888 fake_rust_server
5889 .receive_notification::<lsp::notification::DidSaveTextDocument>()
5890 .await
5891 .text_document,
5892 lsp::TextDocumentIdentifier::new(
5893 lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap()
5894 )
5895 );
5896 assert_eq!(
5897 fake_json_server
5898 .receive_notification::<lsp::notification::DidSaveTextDocument>()
5899 .await
5900 .text_document,
5901 lsp::TextDocumentIdentifier::new(
5902 lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap()
5903 )
5904 );
5905
5906 // Renames are reported only to servers matching the buffer's language.
5907 fs.rename(
5908 Path::new("/the-root/test2.rs"),
5909 Path::new("/the-root/test3.rs"),
5910 Default::default(),
5911 )
5912 .await
5913 .unwrap();
5914 assert_eq!(
5915 fake_rust_server
5916 .receive_notification::<lsp::notification::DidCloseTextDocument>()
5917 .await
5918 .text_document,
5919 lsp::TextDocumentIdentifier::new(
5920 lsp::Url::from_file_path("/the-root/test2.rs").unwrap()
5921 ),
5922 );
5923 assert_eq!(
5924 fake_rust_server
5925 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5926 .await
5927 .text_document,
5928 lsp::TextDocumentItem {
5929 uri: lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),
5930 version: 0,
5931 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
5932 language_id: Default::default()
5933 },
5934 );
5935
5936 rust_buffer2.update(cx, |buffer, cx| {
5937 buffer.update_diagnostics(
5938 DiagnosticSet::from_sorted_entries(
5939 vec![DiagnosticEntry {
5940 diagnostic: Default::default(),
5941 range: Anchor::MIN..Anchor::MAX,
5942 }],
5943 &buffer.snapshot(),
5944 ),
5945 cx,
5946 );
5947 assert_eq!(
5948 buffer
5949 .snapshot()
5950 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
5951 .count(),
5952 1
5953 );
5954 });
5955
5956 // When the rename changes the extension of the file, the buffer gets closed on the old
5957 // language server and gets opened on the new one.
5958 fs.rename(
5959 Path::new("/the-root/test3.rs"),
5960 Path::new("/the-root/test3.json"),
5961 Default::default(),
5962 )
5963 .await
5964 .unwrap();
5965 assert_eq!(
5966 fake_rust_server
5967 .receive_notification::<lsp::notification::DidCloseTextDocument>()
5968 .await
5969 .text_document,
5970 lsp::TextDocumentIdentifier::new(
5971 lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),
5972 ),
5973 );
5974 assert_eq!(
5975 fake_json_server
5976 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5977 .await
5978 .text_document,
5979 lsp::TextDocumentItem {
5980 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
5981 version: 0,
5982 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
5983 language_id: Default::default()
5984 },
5985 );
5986
5987 // We clear the diagnostics, since the language has changed.
5988 rust_buffer2.read_with(cx, |buffer, _| {
5989 assert_eq!(
5990 buffer
5991 .snapshot()
5992 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
5993 .count(),
5994 0
5995 );
5996 });
5997
5998 // The renamed file's version resets after changing language server.
5999 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "// ")], cx));
6000 assert_eq!(
6001 fake_json_server
6002 .receive_notification::<lsp::notification::DidChangeTextDocument>()
6003 .await
6004 .text_document,
6005 lsp::VersionedTextDocumentIdentifier::new(
6006 lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
6007 1
6008 )
6009 );
6010
6011 // Restart language servers
6012 project.update(cx, |project, cx| {
6013 project.restart_language_servers_for_buffers(
6014 vec![rust_buffer.clone(), json_buffer.clone()],
6015 cx,
6016 );
6017 });
6018
6019 let mut rust_shutdown_requests = fake_rust_server
6020 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
6021 let mut json_shutdown_requests = fake_json_server
6022 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
6023 futures::join!(rust_shutdown_requests.next(), json_shutdown_requests.next());
6024
6025 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
6026 let mut fake_json_server = fake_json_servers.next().await.unwrap();
6027
6028 // Ensure rust document is reopened in new rust language server
6029 assert_eq!(
6030 fake_rust_server
6031 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6032 .await
6033 .text_document,
6034 lsp::TextDocumentItem {
6035 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
6036 version: 1,
6037 text: rust_buffer.read_with(cx, |buffer, _| buffer.text()),
6038 language_id: Default::default()
6039 }
6040 );
6041
6042 // Ensure json documents are reopened in new json language server
6043 assert_set_eq!(
6044 [
6045 fake_json_server
6046 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6047 .await
6048 .text_document,
6049 fake_json_server
6050 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6051 .await
6052 .text_document,
6053 ],
6054 [
6055 lsp::TextDocumentItem {
6056 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
6057 version: 0,
6058 text: json_buffer.read_with(cx, |buffer, _| buffer.text()),
6059 language_id: Default::default()
6060 },
6061 lsp::TextDocumentItem {
6062 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
6063 version: 1,
6064 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
6065 language_id: Default::default()
6066 }
6067 ]
6068 );
6069
6070 // Close notifications are reported only to servers matching the buffer's language.
6071 cx.update(|_| drop(json_buffer));
6072 let close_message = lsp::DidCloseTextDocumentParams {
6073 text_document: lsp::TextDocumentIdentifier::new(
6074 lsp::Url::from_file_path("/the-root/package.json").unwrap(),
6075 ),
6076 };
6077 assert_eq!(
6078 fake_json_server
6079 .receive_notification::<lsp::notification::DidCloseTextDocument>()
6080 .await,
6081 close_message,
6082 );
6083 }
6084
6085 #[gpui::test]
6086 async fn test_single_file_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
6087 cx.foreground().forbid_parking();
6088
6089 let fs = FakeFs::new(cx.background());
6090 fs.insert_tree(
6091 "/dir",
6092 json!({
6093 "a.rs": "let a = 1;",
6094 "b.rs": "let b = 2;"
6095 }),
6096 )
6097 .await;
6098
6099 let project = Project::test(fs, ["/dir/a.rs".as_ref(), "/dir/b.rs".as_ref()], cx).await;
6100
6101 let buffer_a = project
6102 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
6103 .await
6104 .unwrap();
6105 let buffer_b = project
6106 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
6107 .await
6108 .unwrap();
6109
6110 project.update(cx, |project, cx| {
6111 project
6112 .update_diagnostics(
6113 lsp::PublishDiagnosticsParams {
6114 uri: Url::from_file_path("/dir/a.rs").unwrap(),
6115 version: None,
6116 diagnostics: vec![lsp::Diagnostic {
6117 range: lsp::Range::new(
6118 lsp::Position::new(0, 4),
6119 lsp::Position::new(0, 5),
6120 ),
6121 severity: Some(lsp::DiagnosticSeverity::ERROR),
6122 message: "error 1".to_string(),
6123 ..Default::default()
6124 }],
6125 },
6126 &[],
6127 cx,
6128 )
6129 .unwrap();
6130 project
6131 .update_diagnostics(
6132 lsp::PublishDiagnosticsParams {
6133 uri: Url::from_file_path("/dir/b.rs").unwrap(),
6134 version: None,
6135 diagnostics: vec![lsp::Diagnostic {
6136 range: lsp::Range::new(
6137 lsp::Position::new(0, 4),
6138 lsp::Position::new(0, 5),
6139 ),
6140 severity: Some(lsp::DiagnosticSeverity::WARNING),
6141 message: "error 2".to_string(),
6142 ..Default::default()
6143 }],
6144 },
6145 &[],
6146 cx,
6147 )
6148 .unwrap();
6149 });
6150
6151 buffer_a.read_with(cx, |buffer, _| {
6152 let chunks = chunks_with_diagnostics(&buffer, 0..buffer.len());
6153 assert_eq!(
6154 chunks
6155 .iter()
6156 .map(|(s, d)| (s.as_str(), *d))
6157 .collect::<Vec<_>>(),
6158 &[
6159 ("let ", None),
6160 ("a", Some(DiagnosticSeverity::ERROR)),
6161 (" = 1;", None),
6162 ]
6163 );
6164 });
6165 buffer_b.read_with(cx, |buffer, _| {
6166 let chunks = chunks_with_diagnostics(&buffer, 0..buffer.len());
6167 assert_eq!(
6168 chunks
6169 .iter()
6170 .map(|(s, d)| (s.as_str(), *d))
6171 .collect::<Vec<_>>(),
6172 &[
6173 ("let ", None),
6174 ("b", Some(DiagnosticSeverity::WARNING)),
6175 (" = 2;", None),
6176 ]
6177 );
6178 });
6179 }
6180
6181 #[gpui::test]
6182 async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
6183 cx.foreground().forbid_parking();
6184
6185 let progress_token = "the-progress-token";
6186 let mut language = Language::new(
6187 LanguageConfig {
6188 name: "Rust".into(),
6189 path_suffixes: vec!["rs".to_string()],
6190 ..Default::default()
6191 },
6192 Some(tree_sitter_rust::language()),
6193 );
6194 let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
6195 disk_based_diagnostics_progress_token: Some(progress_token),
6196 disk_based_diagnostics_sources: &["disk"],
6197 ..Default::default()
6198 });
6199
6200 let fs = FakeFs::new(cx.background());
6201 fs.insert_tree(
6202 "/dir",
6203 json!({
6204 "a.rs": "fn a() { A }",
6205 "b.rs": "const y: i32 = 1",
6206 }),
6207 )
6208 .await;
6209
6210 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
6211 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
6212 let worktree_id =
6213 project.read_with(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
6214
6215 // Cause worktree to start the fake language server
6216 let _buffer = project
6217 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
6218 .await
6219 .unwrap();
6220
6221 let mut events = subscribe(&project, cx);
6222
6223 let mut fake_server = fake_servers.next().await.unwrap();
6224 fake_server.start_progress(progress_token).await;
6225 assert_eq!(
6226 events.next().await.unwrap(),
6227 Event::DiskBasedDiagnosticsStarted
6228 );
6229
6230 fake_server.start_progress(progress_token).await;
6231 fake_server.end_progress(progress_token).await;
6232 fake_server.start_progress(progress_token).await;
6233
6234 fake_server.notify::<lsp::notification::PublishDiagnostics>(
6235 lsp::PublishDiagnosticsParams {
6236 uri: Url::from_file_path("/dir/a.rs").unwrap(),
6237 version: None,
6238 diagnostics: vec![lsp::Diagnostic {
6239 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
6240 severity: Some(lsp::DiagnosticSeverity::ERROR),
6241 message: "undefined variable 'A'".to_string(),
6242 ..Default::default()
6243 }],
6244 },
6245 );
6246 assert_eq!(
6247 events.next().await.unwrap(),
6248 Event::DiagnosticsUpdated((worktree_id, Path::new("a.rs")).into())
6249 );
6250
6251 fake_server.end_progress(progress_token).await;
6252 fake_server.end_progress(progress_token).await;
6253 assert_eq!(
6254 events.next().await.unwrap(),
6255 Event::DiskBasedDiagnosticsUpdated
6256 );
6257 assert_eq!(
6258 events.next().await.unwrap(),
6259 Event::DiskBasedDiagnosticsFinished
6260 );
6261
6262 let buffer = project
6263 .update(cx, |p, cx| p.open_local_buffer("/dir/a.rs", cx))
6264 .await
6265 .unwrap();
6266
6267 buffer.read_with(cx, |buffer, _| {
6268 let snapshot = buffer.snapshot();
6269 let diagnostics = snapshot
6270 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
6271 .collect::<Vec<_>>();
6272 assert_eq!(
6273 diagnostics,
6274 &[DiagnosticEntry {
6275 range: Point::new(0, 9)..Point::new(0, 10),
6276 diagnostic: Diagnostic {
6277 severity: lsp::DiagnosticSeverity::ERROR,
6278 message: "undefined variable 'A'".to_string(),
6279 group_id: 0,
6280 is_primary: true,
6281 ..Default::default()
6282 }
6283 }]
6284 )
6285 });
6286
6287 // Ensure publishing empty diagnostics twice only results in one update event.
6288 fake_server.notify::<lsp::notification::PublishDiagnostics>(
6289 lsp::PublishDiagnosticsParams {
6290 uri: Url::from_file_path("/dir/a.rs").unwrap(),
6291 version: None,
6292 diagnostics: Default::default(),
6293 },
6294 );
6295 assert_eq!(
6296 events.next().await.unwrap(),
6297 Event::DiagnosticsUpdated((worktree_id, Path::new("a.rs")).into())
6298 );
6299
6300 fake_server.notify::<lsp::notification::PublishDiagnostics>(
6301 lsp::PublishDiagnosticsParams {
6302 uri: Url::from_file_path("/dir/a.rs").unwrap(),
6303 version: None,
6304 diagnostics: Default::default(),
6305 },
6306 );
6307 cx.foreground().run_until_parked();
6308 assert_eq!(futures::poll!(events.next()), Poll::Pending);
6309 }
6310
6311 #[gpui::test]
6312 async fn test_restarting_server_with_diagnostics_running(cx: &mut gpui::TestAppContext) {
6313 cx.foreground().forbid_parking();
6314
6315 let progress_token = "the-progress-token";
6316 let mut language = Language::new(
6317 LanguageConfig {
6318 path_suffixes: vec!["rs".to_string()],
6319 ..Default::default()
6320 },
6321 None,
6322 );
6323 let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
6324 disk_based_diagnostics_sources: &["disk"],
6325 disk_based_diagnostics_progress_token: Some(progress_token),
6326 ..Default::default()
6327 });
6328
6329 let fs = FakeFs::new(cx.background());
6330 fs.insert_tree("/dir", json!({ "a.rs": "" })).await;
6331
6332 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
6333 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
6334
6335 let buffer = project
6336 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
6337 .await
6338 .unwrap();
6339
6340 // Simulate diagnostics starting to update.
6341 let mut fake_server = fake_servers.next().await.unwrap();
6342 fake_server.start_progress(progress_token).await;
6343
6344 // Restart the server before the diagnostics finish updating.
6345 project.update(cx, |project, cx| {
6346 project.restart_language_servers_for_buffers([buffer], cx);
6347 });
6348 let mut events = subscribe(&project, cx);
6349
6350 // Simulate the newly started server sending more diagnostics.
6351 let mut fake_server = fake_servers.next().await.unwrap();
6352 fake_server.start_progress(progress_token).await;
6353 assert_eq!(
6354 events.next().await.unwrap(),
6355 Event::DiskBasedDiagnosticsStarted
6356 );
6357
6358 // All diagnostics are considered done, despite the old server's diagnostic
6359 // task never completing.
6360 fake_server.end_progress(progress_token).await;
6361 assert_eq!(
6362 events.next().await.unwrap(),
6363 Event::DiskBasedDiagnosticsUpdated
6364 );
6365 assert_eq!(
6366 events.next().await.unwrap(),
6367 Event::DiskBasedDiagnosticsFinished
6368 );
6369 project.read_with(cx, |project, _| {
6370 assert!(!project.is_running_disk_based_diagnostics());
6371 });
6372 }
6373
6374 #[gpui::test]
6375 async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
6376 cx.foreground().forbid_parking();
6377
6378 let mut language = Language::new(
6379 LanguageConfig {
6380 name: "Rust".into(),
6381 path_suffixes: vec!["rs".to_string()],
6382 ..Default::default()
6383 },
6384 Some(tree_sitter_rust::language()),
6385 );
6386 let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
6387 disk_based_diagnostics_sources: &["disk"],
6388 ..Default::default()
6389 });
6390
6391 let text = "
6392 fn a() { A }
6393 fn b() { BB }
6394 fn c() { CCC }
6395 "
6396 .unindent();
6397
6398 let fs = FakeFs::new(cx.background());
6399 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
6400
6401 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
6402 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
6403
6404 let buffer = project
6405 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
6406 .await
6407 .unwrap();
6408
6409 let mut fake_server = fake_servers.next().await.unwrap();
6410 let open_notification = fake_server
6411 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6412 .await;
6413
6414 // Edit the buffer, moving the content down
6415 buffer.update(cx, |buffer, cx| buffer.edit([(0..0, "\n\n")], cx));
6416 let change_notification_1 = fake_server
6417 .receive_notification::<lsp::notification::DidChangeTextDocument>()
6418 .await;
6419 assert!(
6420 change_notification_1.text_document.version > open_notification.text_document.version
6421 );
6422
6423 // Report some diagnostics for the initial version of the buffer
6424 fake_server.notify::<lsp::notification::PublishDiagnostics>(
6425 lsp::PublishDiagnosticsParams {
6426 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
6427 version: Some(open_notification.text_document.version),
6428 diagnostics: vec![
6429 lsp::Diagnostic {
6430 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
6431 severity: Some(DiagnosticSeverity::ERROR),
6432 message: "undefined variable 'A'".to_string(),
6433 source: Some("disk".to_string()),
6434 ..Default::default()
6435 },
6436 lsp::Diagnostic {
6437 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
6438 severity: Some(DiagnosticSeverity::ERROR),
6439 message: "undefined variable 'BB'".to_string(),
6440 source: Some("disk".to_string()),
6441 ..Default::default()
6442 },
6443 lsp::Diagnostic {
6444 range: lsp::Range::new(lsp::Position::new(2, 9), lsp::Position::new(2, 12)),
6445 severity: Some(DiagnosticSeverity::ERROR),
6446 source: Some("disk".to_string()),
6447 message: "undefined variable 'CCC'".to_string(),
6448 ..Default::default()
6449 },
6450 ],
6451 },
6452 );
6453
6454 // The diagnostics have moved down since they were created.
6455 buffer.next_notification(cx).await;
6456 buffer.read_with(cx, |buffer, _| {
6457 assert_eq!(
6458 buffer
6459 .snapshot()
6460 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0), false)
6461 .collect::<Vec<_>>(),
6462 &[
6463 DiagnosticEntry {
6464 range: Point::new(3, 9)..Point::new(3, 11),
6465 diagnostic: Diagnostic {
6466 severity: DiagnosticSeverity::ERROR,
6467 message: "undefined variable 'BB'".to_string(),
6468 is_disk_based: true,
6469 group_id: 1,
6470 is_primary: true,
6471 ..Default::default()
6472 },
6473 },
6474 DiagnosticEntry {
6475 range: Point::new(4, 9)..Point::new(4, 12),
6476 diagnostic: Diagnostic {
6477 severity: DiagnosticSeverity::ERROR,
6478 message: "undefined variable 'CCC'".to_string(),
6479 is_disk_based: true,
6480 group_id: 2,
6481 is_primary: true,
6482 ..Default::default()
6483 }
6484 }
6485 ]
6486 );
6487 assert_eq!(
6488 chunks_with_diagnostics(buffer, 0..buffer.len()),
6489 [
6490 ("\n\nfn a() { ".to_string(), None),
6491 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
6492 (" }\nfn b() { ".to_string(), None),
6493 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
6494 (" }\nfn c() { ".to_string(), None),
6495 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
6496 (" }\n".to_string(), None),
6497 ]
6498 );
6499 assert_eq!(
6500 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
6501 [
6502 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
6503 (" }\nfn c() { ".to_string(), None),
6504 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
6505 ]
6506 );
6507 });
6508
6509 // Ensure overlapping diagnostics are highlighted correctly.
6510 fake_server.notify::<lsp::notification::PublishDiagnostics>(
6511 lsp::PublishDiagnosticsParams {
6512 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
6513 version: Some(open_notification.text_document.version),
6514 diagnostics: vec![
6515 lsp::Diagnostic {
6516 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
6517 severity: Some(DiagnosticSeverity::ERROR),
6518 message: "undefined variable 'A'".to_string(),
6519 source: Some("disk".to_string()),
6520 ..Default::default()
6521 },
6522 lsp::Diagnostic {
6523 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 12)),
6524 severity: Some(DiagnosticSeverity::WARNING),
6525 message: "unreachable statement".to_string(),
6526 source: Some("disk".to_string()),
6527 ..Default::default()
6528 },
6529 ],
6530 },
6531 );
6532
6533 buffer.next_notification(cx).await;
6534 buffer.read_with(cx, |buffer, _| {
6535 assert_eq!(
6536 buffer
6537 .snapshot()
6538 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0), false)
6539 .collect::<Vec<_>>(),
6540 &[
6541 DiagnosticEntry {
6542 range: Point::new(2, 9)..Point::new(2, 12),
6543 diagnostic: Diagnostic {
6544 severity: DiagnosticSeverity::WARNING,
6545 message: "unreachable statement".to_string(),
6546 is_disk_based: true,
6547 group_id: 4,
6548 is_primary: true,
6549 ..Default::default()
6550 }
6551 },
6552 DiagnosticEntry {
6553 range: Point::new(2, 9)..Point::new(2, 10),
6554 diagnostic: Diagnostic {
6555 severity: DiagnosticSeverity::ERROR,
6556 message: "undefined variable 'A'".to_string(),
6557 is_disk_based: true,
6558 group_id: 3,
6559 is_primary: true,
6560 ..Default::default()
6561 },
6562 }
6563 ]
6564 );
6565 assert_eq!(
6566 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
6567 [
6568 ("fn a() { ".to_string(), None),
6569 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
6570 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
6571 ("\n".to_string(), None),
6572 ]
6573 );
6574 assert_eq!(
6575 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
6576 [
6577 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
6578 ("\n".to_string(), None),
6579 ]
6580 );
6581 });
6582
6583 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
6584 // changes since the last save.
6585 buffer.update(cx, |buffer, cx| {
6586 buffer.edit([(Point::new(2, 0)..Point::new(2, 0), " ")], cx);
6587 buffer.edit([(Point::new(2, 8)..Point::new(2, 10), "(x: usize)")], cx);
6588 buffer.edit([(Point::new(3, 10)..Point::new(3, 10), "xxx")], cx);
6589 });
6590 let change_notification_2 = fake_server
6591 .receive_notification::<lsp::notification::DidChangeTextDocument>()
6592 .await;
6593 assert!(
6594 change_notification_2.text_document.version
6595 > change_notification_1.text_document.version
6596 );
6597
6598 // Handle out-of-order diagnostics
6599 fake_server.notify::<lsp::notification::PublishDiagnostics>(
6600 lsp::PublishDiagnosticsParams {
6601 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
6602 version: Some(change_notification_2.text_document.version),
6603 diagnostics: vec![
6604 lsp::Diagnostic {
6605 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
6606 severity: Some(DiagnosticSeverity::ERROR),
6607 message: "undefined variable 'BB'".to_string(),
6608 source: Some("disk".to_string()),
6609 ..Default::default()
6610 },
6611 lsp::Diagnostic {
6612 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
6613 severity: Some(DiagnosticSeverity::WARNING),
6614 message: "undefined variable 'A'".to_string(),
6615 source: Some("disk".to_string()),
6616 ..Default::default()
6617 },
6618 ],
6619 },
6620 );
6621
6622 buffer.next_notification(cx).await;
6623 buffer.read_with(cx, |buffer, _| {
6624 assert_eq!(
6625 buffer
6626 .snapshot()
6627 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
6628 .collect::<Vec<_>>(),
6629 &[
6630 DiagnosticEntry {
6631 range: Point::new(2, 21)..Point::new(2, 22),
6632 diagnostic: Diagnostic {
6633 severity: DiagnosticSeverity::WARNING,
6634 message: "undefined variable 'A'".to_string(),
6635 is_disk_based: true,
6636 group_id: 6,
6637 is_primary: true,
6638 ..Default::default()
6639 }
6640 },
6641 DiagnosticEntry {
6642 range: Point::new(3, 9)..Point::new(3, 14),
6643 diagnostic: Diagnostic {
6644 severity: DiagnosticSeverity::ERROR,
6645 message: "undefined variable 'BB'".to_string(),
6646 is_disk_based: true,
6647 group_id: 5,
6648 is_primary: true,
6649 ..Default::default()
6650 },
6651 }
6652 ]
6653 );
6654 });
6655 }
6656
6657 #[gpui::test]
6658 async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
6659 cx.foreground().forbid_parking();
6660
6661 let text = concat!(
6662 "let one = ;\n", //
6663 "let two = \n",
6664 "let three = 3;\n",
6665 );
6666
6667 let fs = FakeFs::new(cx.background());
6668 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
6669
6670 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
6671 let buffer = project
6672 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
6673 .await
6674 .unwrap();
6675
6676 project.update(cx, |project, cx| {
6677 project
6678 .update_buffer_diagnostics(
6679 &buffer,
6680 vec![
6681 DiagnosticEntry {
6682 range: PointUtf16::new(0, 10)..PointUtf16::new(0, 10),
6683 diagnostic: Diagnostic {
6684 severity: DiagnosticSeverity::ERROR,
6685 message: "syntax error 1".to_string(),
6686 ..Default::default()
6687 },
6688 },
6689 DiagnosticEntry {
6690 range: PointUtf16::new(1, 10)..PointUtf16::new(1, 10),
6691 diagnostic: Diagnostic {
6692 severity: DiagnosticSeverity::ERROR,
6693 message: "syntax error 2".to_string(),
6694 ..Default::default()
6695 },
6696 },
6697 ],
6698 None,
6699 cx,
6700 )
6701 .unwrap();
6702 });
6703
6704 // An empty range is extended forward to include the following character.
6705 // At the end of a line, an empty range is extended backward to include
6706 // the preceding character.
6707 buffer.read_with(cx, |buffer, _| {
6708 let chunks = chunks_with_diagnostics(&buffer, 0..buffer.len());
6709 assert_eq!(
6710 chunks
6711 .iter()
6712 .map(|(s, d)| (s.as_str(), *d))
6713 .collect::<Vec<_>>(),
6714 &[
6715 ("let one = ", None),
6716 (";", Some(DiagnosticSeverity::ERROR)),
6717 ("\nlet two =", None),
6718 (" ", Some(DiagnosticSeverity::ERROR)),
6719 ("\nlet three = 3;\n", None)
6720 ]
6721 );
6722 });
6723 }
6724
6725 #[gpui::test]
6726 async fn test_edits_from_lsp_with_past_version(cx: &mut gpui::TestAppContext) {
6727 cx.foreground().forbid_parking();
6728
6729 let mut language = Language::new(
6730 LanguageConfig {
6731 name: "Rust".into(),
6732 path_suffixes: vec!["rs".to_string()],
6733 ..Default::default()
6734 },
6735 Some(tree_sitter_rust::language()),
6736 );
6737 let mut fake_servers = language.set_fake_lsp_adapter(Default::default());
6738
6739 let text = "
6740 fn a() {
6741 f1();
6742 }
6743 fn b() {
6744 f2();
6745 }
6746 fn c() {
6747 f3();
6748 }
6749 "
6750 .unindent();
6751
6752 let fs = FakeFs::new(cx.background());
6753 fs.insert_tree(
6754 "/dir",
6755 json!({
6756 "a.rs": text.clone(),
6757 }),
6758 )
6759 .await;
6760
6761 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
6762 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
6763 let buffer = project
6764 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
6765 .await
6766 .unwrap();
6767
6768 let mut fake_server = fake_servers.next().await.unwrap();
6769 let lsp_document_version = fake_server
6770 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6771 .await
6772 .text_document
6773 .version;
6774
6775 // Simulate editing the buffer after the language server computes some edits.
6776 buffer.update(cx, |buffer, cx| {
6777 buffer.edit(
6778 [(
6779 Point::new(0, 0)..Point::new(0, 0),
6780 "// above first function\n",
6781 )],
6782 cx,
6783 );
6784 buffer.edit(
6785 [(
6786 Point::new(2, 0)..Point::new(2, 0),
6787 " // inside first function\n",
6788 )],
6789 cx,
6790 );
6791 buffer.edit(
6792 [(
6793 Point::new(6, 4)..Point::new(6, 4),
6794 "// inside second function ",
6795 )],
6796 cx,
6797 );
6798
6799 assert_eq!(
6800 buffer.text(),
6801 "
6802 // above first function
6803 fn a() {
6804 // inside first function
6805 f1();
6806 }
6807 fn b() {
6808 // inside second function f2();
6809 }
6810 fn c() {
6811 f3();
6812 }
6813 "
6814 .unindent()
6815 );
6816 });
6817
6818 let edits = project
6819 .update(cx, |project, cx| {
6820 project.edits_from_lsp(
6821 &buffer,
6822 vec![
6823 // replace body of first function
6824 lsp::TextEdit {
6825 range: lsp::Range::new(
6826 lsp::Position::new(0, 0),
6827 lsp::Position::new(3, 0),
6828 ),
6829 new_text: "
6830 fn a() {
6831 f10();
6832 }
6833 "
6834 .unindent(),
6835 },
6836 // edit inside second function
6837 lsp::TextEdit {
6838 range: lsp::Range::new(
6839 lsp::Position::new(4, 6),
6840 lsp::Position::new(4, 6),
6841 ),
6842 new_text: "00".into(),
6843 },
6844 // edit inside third function via two distinct edits
6845 lsp::TextEdit {
6846 range: lsp::Range::new(
6847 lsp::Position::new(7, 5),
6848 lsp::Position::new(7, 5),
6849 ),
6850 new_text: "4000".into(),
6851 },
6852 lsp::TextEdit {
6853 range: lsp::Range::new(
6854 lsp::Position::new(7, 5),
6855 lsp::Position::new(7, 6),
6856 ),
6857 new_text: "".into(),
6858 },
6859 ],
6860 Some(lsp_document_version),
6861 cx,
6862 )
6863 })
6864 .await
6865 .unwrap();
6866
6867 buffer.update(cx, |buffer, cx| {
6868 for (range, new_text) in edits {
6869 buffer.edit([(range, new_text)], cx);
6870 }
6871 assert_eq!(
6872 buffer.text(),
6873 "
6874 // above first function
6875 fn a() {
6876 // inside first function
6877 f10();
6878 }
6879 fn b() {
6880 // inside second function f200();
6881 }
6882 fn c() {
6883 f4000();
6884 }
6885 "
6886 .unindent()
6887 );
6888 });
6889 }
6890
6891 #[gpui::test]
6892 async fn test_edits_from_lsp_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
6893 cx.foreground().forbid_parking();
6894
6895 let text = "
6896 use a::b;
6897 use a::c;
6898
6899 fn f() {
6900 b();
6901 c();
6902 }
6903 "
6904 .unindent();
6905
6906 let fs = FakeFs::new(cx.background());
6907 fs.insert_tree(
6908 "/dir",
6909 json!({
6910 "a.rs": text.clone(),
6911 }),
6912 )
6913 .await;
6914
6915 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
6916 let buffer = project
6917 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
6918 .await
6919 .unwrap();
6920
6921 // Simulate the language server sending us a small edit in the form of a very large diff.
6922 // Rust-analyzer does this when performing a merge-imports code action.
6923 let edits = project
6924 .update(cx, |project, cx| {
6925 project.edits_from_lsp(
6926 &buffer,
6927 [
6928 // Replace the first use statement without editing the semicolon.
6929 lsp::TextEdit {
6930 range: lsp::Range::new(
6931 lsp::Position::new(0, 4),
6932 lsp::Position::new(0, 8),
6933 ),
6934 new_text: "a::{b, c}".into(),
6935 },
6936 // Reinsert the remainder of the file between the semicolon and the final
6937 // newline of the file.
6938 lsp::TextEdit {
6939 range: lsp::Range::new(
6940 lsp::Position::new(0, 9),
6941 lsp::Position::new(0, 9),
6942 ),
6943 new_text: "\n\n".into(),
6944 },
6945 lsp::TextEdit {
6946 range: lsp::Range::new(
6947 lsp::Position::new(0, 9),
6948 lsp::Position::new(0, 9),
6949 ),
6950 new_text: "
6951 fn f() {
6952 b();
6953 c();
6954 }"
6955 .unindent(),
6956 },
6957 // Delete everything after the first newline of the file.
6958 lsp::TextEdit {
6959 range: lsp::Range::new(
6960 lsp::Position::new(1, 0),
6961 lsp::Position::new(7, 0),
6962 ),
6963 new_text: "".into(),
6964 },
6965 ],
6966 None,
6967 cx,
6968 )
6969 })
6970 .await
6971 .unwrap();
6972
6973 buffer.update(cx, |buffer, cx| {
6974 let edits = edits
6975 .into_iter()
6976 .map(|(range, text)| {
6977 (
6978 range.start.to_point(&buffer)..range.end.to_point(&buffer),
6979 text,
6980 )
6981 })
6982 .collect::<Vec<_>>();
6983
6984 assert_eq!(
6985 edits,
6986 [
6987 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
6988 (Point::new(1, 0)..Point::new(2, 0), "".into())
6989 ]
6990 );
6991
6992 for (range, new_text) in edits {
6993 buffer.edit([(range, new_text)], cx);
6994 }
6995 assert_eq!(
6996 buffer.text(),
6997 "
6998 use a::{b, c};
6999
7000 fn f() {
7001 b();
7002 c();
7003 }
7004 "
7005 .unindent()
7006 );
7007 });
7008 }
7009
7010 #[gpui::test]
7011 async fn test_invalid_edits_from_lsp(cx: &mut gpui::TestAppContext) {
7012 cx.foreground().forbid_parking();
7013
7014 let text = "
7015 use a::b;
7016 use a::c;
7017
7018 fn f() {
7019 b();
7020 c();
7021 }
7022 "
7023 .unindent();
7024
7025 let fs = FakeFs::new(cx.background());
7026 fs.insert_tree(
7027 "/dir",
7028 json!({
7029 "a.rs": text.clone(),
7030 }),
7031 )
7032 .await;
7033
7034 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
7035 let buffer = project
7036 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
7037 .await
7038 .unwrap();
7039
7040 // Simulate the language server sending us edits in a non-ordered fashion,
7041 // with ranges sometimes being inverted.
7042 let edits = project
7043 .update(cx, |project, cx| {
7044 project.edits_from_lsp(
7045 &buffer,
7046 [
7047 lsp::TextEdit {
7048 range: lsp::Range::new(
7049 lsp::Position::new(0, 9),
7050 lsp::Position::new(0, 9),
7051 ),
7052 new_text: "\n\n".into(),
7053 },
7054 lsp::TextEdit {
7055 range: lsp::Range::new(
7056 lsp::Position::new(0, 8),
7057 lsp::Position::new(0, 4),
7058 ),
7059 new_text: "a::{b, c}".into(),
7060 },
7061 lsp::TextEdit {
7062 range: lsp::Range::new(
7063 lsp::Position::new(1, 0),
7064 lsp::Position::new(7, 0),
7065 ),
7066 new_text: "".into(),
7067 },
7068 lsp::TextEdit {
7069 range: lsp::Range::new(
7070 lsp::Position::new(0, 9),
7071 lsp::Position::new(0, 9),
7072 ),
7073 new_text: "
7074 fn f() {
7075 b();
7076 c();
7077 }"
7078 .unindent(),
7079 },
7080 ],
7081 None,
7082 cx,
7083 )
7084 })
7085 .await
7086 .unwrap();
7087
7088 buffer.update(cx, |buffer, cx| {
7089 let edits = edits
7090 .into_iter()
7091 .map(|(range, text)| {
7092 (
7093 range.start.to_point(&buffer)..range.end.to_point(&buffer),
7094 text,
7095 )
7096 })
7097 .collect::<Vec<_>>();
7098
7099 assert_eq!(
7100 edits,
7101 [
7102 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
7103 (Point::new(1, 0)..Point::new(2, 0), "".into())
7104 ]
7105 );
7106
7107 for (range, new_text) in edits {
7108 buffer.edit([(range, new_text)], cx);
7109 }
7110 assert_eq!(
7111 buffer.text(),
7112 "
7113 use a::{b, c};
7114
7115 fn f() {
7116 b();
7117 c();
7118 }
7119 "
7120 .unindent()
7121 );
7122 });
7123 }
7124
7125 fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
7126 buffer: &Buffer,
7127 range: Range<T>,
7128 ) -> Vec<(String, Option<DiagnosticSeverity>)> {
7129 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
7130 for chunk in buffer.snapshot().chunks(range, true) {
7131 if chunks.last().map_or(false, |prev_chunk| {
7132 prev_chunk.1 == chunk.diagnostic_severity
7133 }) {
7134 chunks.last_mut().unwrap().0.push_str(chunk.text);
7135 } else {
7136 chunks.push((chunk.text.to_string(), chunk.diagnostic_severity));
7137 }
7138 }
7139 chunks
7140 }
7141
7142 #[gpui::test]
7143 async fn test_search_worktree_without_files(cx: &mut gpui::TestAppContext) {
7144 let dir = temp_tree(json!({
7145 "root": {
7146 "dir1": {},
7147 "dir2": {
7148 "dir3": {}
7149 }
7150 }
7151 }));
7152
7153 let project = Project::test(Arc::new(RealFs), [dir.path()], cx).await;
7154 let cancel_flag = Default::default();
7155 let results = project
7156 .read_with(cx, |project, cx| {
7157 project.match_paths("dir", false, false, 10, &cancel_flag, cx)
7158 })
7159 .await;
7160
7161 assert!(results.is_empty());
7162 }
7163
7164 #[gpui::test(iterations = 10)]
7165 async fn test_definition(cx: &mut gpui::TestAppContext) {
7166 let mut language = Language::new(
7167 LanguageConfig {
7168 name: "Rust".into(),
7169 path_suffixes: vec!["rs".to_string()],
7170 ..Default::default()
7171 },
7172 Some(tree_sitter_rust::language()),
7173 );
7174 let mut fake_servers = language.set_fake_lsp_adapter(Default::default());
7175
7176 let fs = FakeFs::new(cx.background());
7177 fs.insert_tree(
7178 "/dir",
7179 json!({
7180 "a.rs": "const fn a() { A }",
7181 "b.rs": "const y: i32 = crate::a()",
7182 }),
7183 )
7184 .await;
7185
7186 let project = Project::test(fs, ["/dir/b.rs".as_ref()], cx).await;
7187 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
7188
7189 let buffer = project
7190 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
7191 .await
7192 .unwrap();
7193
7194 let fake_server = fake_servers.next().await.unwrap();
7195 fake_server.handle_request::<lsp::request::GotoDefinition, _, _>(|params, _| async move {
7196 let params = params.text_document_position_params;
7197 assert_eq!(
7198 params.text_document.uri.to_file_path().unwrap(),
7199 Path::new("/dir/b.rs"),
7200 );
7201 assert_eq!(params.position, lsp::Position::new(0, 22));
7202
7203 Ok(Some(lsp::GotoDefinitionResponse::Scalar(
7204 lsp::Location::new(
7205 lsp::Url::from_file_path("/dir/a.rs").unwrap(),
7206 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
7207 ),
7208 )))
7209 });
7210
7211 let mut definitions = project
7212 .update(cx, |project, cx| project.definition(&buffer, 22, cx))
7213 .await
7214 .unwrap();
7215
7216 assert_eq!(definitions.len(), 1);
7217 let definition = definitions.pop().unwrap();
7218 cx.update(|cx| {
7219 let target_buffer = definition.buffer.read(cx);
7220 assert_eq!(
7221 target_buffer
7222 .file()
7223 .unwrap()
7224 .as_local()
7225 .unwrap()
7226 .abs_path(cx),
7227 Path::new("/dir/a.rs"),
7228 );
7229 assert_eq!(definition.range.to_offset(target_buffer), 9..10);
7230 assert_eq!(
7231 list_worktrees(&project, cx),
7232 [("/dir/b.rs".as_ref(), true), ("/dir/a.rs".as_ref(), false)]
7233 );
7234
7235 drop(definition);
7236 });
7237 cx.read(|cx| {
7238 assert_eq!(list_worktrees(&project, cx), [("/dir/b.rs".as_ref(), true)]);
7239 });
7240
7241 fn list_worktrees<'a>(
7242 project: &'a ModelHandle<Project>,
7243 cx: &'a AppContext,
7244 ) -> Vec<(&'a Path, bool)> {
7245 project
7246 .read(cx)
7247 .worktrees(cx)
7248 .map(|worktree| {
7249 let worktree = worktree.read(cx);
7250 (
7251 worktree.as_local().unwrap().abs_path().as_ref(),
7252 worktree.is_visible(),
7253 )
7254 })
7255 .collect::<Vec<_>>()
7256 }
7257 }
7258
7259 #[gpui::test]
7260 async fn test_completions_without_edit_ranges(cx: &mut gpui::TestAppContext) {
7261 let mut language = Language::new(
7262 LanguageConfig {
7263 name: "TypeScript".into(),
7264 path_suffixes: vec!["ts".to_string()],
7265 ..Default::default()
7266 },
7267 Some(tree_sitter_typescript::language_typescript()),
7268 );
7269 let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default());
7270
7271 let fs = FakeFs::new(cx.background());
7272 fs.insert_tree(
7273 "/dir",
7274 json!({
7275 "a.ts": "",
7276 }),
7277 )
7278 .await;
7279
7280 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
7281 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
7282 let buffer = project
7283 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
7284 .await
7285 .unwrap();
7286
7287 let fake_server = fake_language_servers.next().await.unwrap();
7288
7289 let text = "let a = b.fqn";
7290 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
7291 let completions = project.update(cx, |project, cx| {
7292 project.completions(&buffer, text.len(), cx)
7293 });
7294
7295 fake_server
7296 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
7297 Ok(Some(lsp::CompletionResponse::Array(vec![
7298 lsp::CompletionItem {
7299 label: "fullyQualifiedName?".into(),
7300 insert_text: Some("fullyQualifiedName".into()),
7301 ..Default::default()
7302 },
7303 ])))
7304 })
7305 .next()
7306 .await;
7307 let completions = completions.await.unwrap();
7308 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
7309 assert_eq!(completions.len(), 1);
7310 assert_eq!(completions[0].new_text, "fullyQualifiedName");
7311 assert_eq!(
7312 completions[0].old_range.to_offset(&snapshot),
7313 text.len() - 3..text.len()
7314 );
7315 }
7316
7317 #[gpui::test(iterations = 10)]
7318 async fn test_apply_code_actions_with_commands(cx: &mut gpui::TestAppContext) {
7319 let mut language = Language::new(
7320 LanguageConfig {
7321 name: "TypeScript".into(),
7322 path_suffixes: vec!["ts".to_string()],
7323 ..Default::default()
7324 },
7325 None,
7326 );
7327 let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default());
7328
7329 let fs = FakeFs::new(cx.background());
7330 fs.insert_tree(
7331 "/dir",
7332 json!({
7333 "a.ts": "a",
7334 }),
7335 )
7336 .await;
7337
7338 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
7339 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
7340 let buffer = project
7341 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
7342 .await
7343 .unwrap();
7344
7345 let fake_server = fake_language_servers.next().await.unwrap();
7346
7347 // Language server returns code actions that contain commands, and not edits.
7348 let actions = project.update(cx, |project, cx| project.code_actions(&buffer, 0..0, cx));
7349 fake_server
7350 .handle_request::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
7351 Ok(Some(vec![
7352 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
7353 title: "The code action".into(),
7354 command: Some(lsp::Command {
7355 title: "The command".into(),
7356 command: "_the/command".into(),
7357 arguments: Some(vec![json!("the-argument")]),
7358 }),
7359 ..Default::default()
7360 }),
7361 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
7362 title: "two".into(),
7363 ..Default::default()
7364 }),
7365 ]))
7366 })
7367 .next()
7368 .await;
7369
7370 let action = actions.await.unwrap()[0].clone();
7371 let apply = project.update(cx, |project, cx| {
7372 project.apply_code_action(buffer.clone(), action, true, cx)
7373 });
7374
7375 // Resolving the code action does not populate its edits. In absence of
7376 // edits, we must execute the given command.
7377 fake_server.handle_request::<lsp::request::CodeActionResolveRequest, _, _>(
7378 |action, _| async move { Ok(action) },
7379 );
7380
7381 // While executing the command, the language server sends the editor
7382 // a `workspaceEdit` request.
7383 fake_server
7384 .handle_request::<lsp::request::ExecuteCommand, _, _>({
7385 let fake = fake_server.clone();
7386 move |params, _| {
7387 assert_eq!(params.command, "_the/command");
7388 let fake = fake.clone();
7389 async move {
7390 fake.server
7391 .request::<lsp::request::ApplyWorkspaceEdit>(
7392 lsp::ApplyWorkspaceEditParams {
7393 label: None,
7394 edit: lsp::WorkspaceEdit {
7395 changes: Some(
7396 [(
7397 lsp::Url::from_file_path("/dir/a.ts").unwrap(),
7398 vec![lsp::TextEdit {
7399 range: lsp::Range::new(
7400 lsp::Position::new(0, 0),
7401 lsp::Position::new(0, 0),
7402 ),
7403 new_text: "X".into(),
7404 }],
7405 )]
7406 .into_iter()
7407 .collect(),
7408 ),
7409 ..Default::default()
7410 },
7411 },
7412 )
7413 .await
7414 .unwrap();
7415 Ok(Some(json!(null)))
7416 }
7417 }
7418 })
7419 .next()
7420 .await;
7421
7422 // Applying the code action returns a project transaction containing the edits
7423 // sent by the language server in its `workspaceEdit` request.
7424 let transaction = apply.await.unwrap();
7425 assert!(transaction.0.contains_key(&buffer));
7426 buffer.update(cx, |buffer, cx| {
7427 assert_eq!(buffer.text(), "Xa");
7428 buffer.undo(cx);
7429 assert_eq!(buffer.text(), "a");
7430 });
7431 }
7432
7433 #[gpui::test]
7434 async fn test_save_file(cx: &mut gpui::TestAppContext) {
7435 let fs = FakeFs::new(cx.background());
7436 fs.insert_tree(
7437 "/dir",
7438 json!({
7439 "file1": "the old contents",
7440 }),
7441 )
7442 .await;
7443
7444 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7445 let buffer = project
7446 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
7447 .await
7448 .unwrap();
7449 buffer
7450 .update(cx, |buffer, cx| {
7451 assert_eq!(buffer.text(), "the old contents");
7452 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], cx);
7453 buffer.save(cx)
7454 })
7455 .await
7456 .unwrap();
7457
7458 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
7459 assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
7460 }
7461
7462 #[gpui::test]
7463 async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
7464 let fs = FakeFs::new(cx.background());
7465 fs.insert_tree(
7466 "/dir",
7467 json!({
7468 "file1": "the old contents",
7469 }),
7470 )
7471 .await;
7472
7473 let project = Project::test(fs.clone(), ["/dir/file1".as_ref()], cx).await;
7474 let buffer = project
7475 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
7476 .await
7477 .unwrap();
7478 buffer
7479 .update(cx, |buffer, cx| {
7480 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], cx);
7481 buffer.save(cx)
7482 })
7483 .await
7484 .unwrap();
7485
7486 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
7487 assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
7488 }
7489
7490 #[gpui::test]
7491 async fn test_save_as(cx: &mut gpui::TestAppContext) {
7492 let fs = FakeFs::new(cx.background());
7493 fs.insert_tree("/dir", json!({})).await;
7494
7495 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7496 let buffer = project.update(cx, |project, cx| {
7497 project.create_buffer("", None, cx).unwrap()
7498 });
7499 buffer.update(cx, |buffer, cx| {
7500 buffer.edit([(0..0, "abc")], cx);
7501 assert!(buffer.is_dirty());
7502 assert!(!buffer.has_conflict());
7503 });
7504 project
7505 .update(cx, |project, cx| {
7506 project.save_buffer_as(buffer.clone(), "/dir/file1".into(), cx)
7507 })
7508 .await
7509 .unwrap();
7510 assert_eq!(fs.load(Path::new("/dir/file1")).await.unwrap(), "abc");
7511 buffer.read_with(cx, |buffer, cx| {
7512 assert_eq!(buffer.file().unwrap().full_path(cx), Path::new("dir/file1"));
7513 assert!(!buffer.is_dirty());
7514 assert!(!buffer.has_conflict());
7515 });
7516
7517 let opened_buffer = project
7518 .update(cx, |project, cx| {
7519 project.open_local_buffer("/dir/file1", cx)
7520 })
7521 .await
7522 .unwrap();
7523 assert_eq!(opened_buffer, buffer);
7524 }
7525
7526 #[gpui::test(retries = 5)]
7527 async fn test_rescan_and_remote_updates(cx: &mut gpui::TestAppContext) {
7528 let dir = temp_tree(json!({
7529 "a": {
7530 "file1": "",
7531 "file2": "",
7532 "file3": "",
7533 },
7534 "b": {
7535 "c": {
7536 "file4": "",
7537 "file5": "",
7538 }
7539 }
7540 }));
7541
7542 let project = Project::test(Arc::new(RealFs), [dir.path()], cx).await;
7543 let rpc = project.read_with(cx, |p, _| p.client.clone());
7544
7545 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
7546 let buffer = project.update(cx, |p, cx| p.open_local_buffer(dir.path().join(path), cx));
7547 async move { buffer.await.unwrap() }
7548 };
7549 let id_for_path = |path: &'static str, cx: &gpui::TestAppContext| {
7550 project.read_with(cx, |project, cx| {
7551 let tree = project.worktrees(cx).next().unwrap();
7552 tree.read(cx)
7553 .entry_for_path(path)
7554 .expect(&format!("no entry for path {}", path))
7555 .id
7556 })
7557 };
7558
7559 let buffer2 = buffer_for_path("a/file2", cx).await;
7560 let buffer3 = buffer_for_path("a/file3", cx).await;
7561 let buffer4 = buffer_for_path("b/c/file4", cx).await;
7562 let buffer5 = buffer_for_path("b/c/file5", cx).await;
7563
7564 let file2_id = id_for_path("a/file2", &cx);
7565 let file3_id = id_for_path("a/file3", &cx);
7566 let file4_id = id_for_path("b/c/file4", &cx);
7567
7568 // Create a remote copy of this worktree.
7569 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
7570 let initial_snapshot = tree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot());
7571 let (remote, load_task) = cx.update(|cx| {
7572 Worktree::remote(
7573 1,
7574 1,
7575 initial_snapshot.to_proto(&Default::default(), true),
7576 rpc.clone(),
7577 cx,
7578 )
7579 });
7580 // tree
7581 load_task.await;
7582
7583 cx.read(|cx| {
7584 assert!(!buffer2.read(cx).is_dirty());
7585 assert!(!buffer3.read(cx).is_dirty());
7586 assert!(!buffer4.read(cx).is_dirty());
7587 assert!(!buffer5.read(cx).is_dirty());
7588 });
7589
7590 // Rename and delete files and directories.
7591 tree.flush_fs_events(&cx).await;
7592 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
7593 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
7594 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
7595 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
7596 tree.flush_fs_events(&cx).await;
7597
7598 let expected_paths = vec![
7599 "a",
7600 "a/file1",
7601 "a/file2.new",
7602 "b",
7603 "d",
7604 "d/file3",
7605 "d/file4",
7606 ];
7607
7608 cx.read(|app| {
7609 assert_eq!(
7610 tree.read(app)
7611 .paths()
7612 .map(|p| p.to_str().unwrap())
7613 .collect::<Vec<_>>(),
7614 expected_paths
7615 );
7616
7617 assert_eq!(id_for_path("a/file2.new", &cx), file2_id);
7618 assert_eq!(id_for_path("d/file3", &cx), file3_id);
7619 assert_eq!(id_for_path("d/file4", &cx), file4_id);
7620
7621 assert_eq!(
7622 buffer2.read(app).file().unwrap().path().as_ref(),
7623 Path::new("a/file2.new")
7624 );
7625 assert_eq!(
7626 buffer3.read(app).file().unwrap().path().as_ref(),
7627 Path::new("d/file3")
7628 );
7629 assert_eq!(
7630 buffer4.read(app).file().unwrap().path().as_ref(),
7631 Path::new("d/file4")
7632 );
7633 assert_eq!(
7634 buffer5.read(app).file().unwrap().path().as_ref(),
7635 Path::new("b/c/file5")
7636 );
7637
7638 assert!(!buffer2.read(app).file().unwrap().is_deleted());
7639 assert!(!buffer3.read(app).file().unwrap().is_deleted());
7640 assert!(!buffer4.read(app).file().unwrap().is_deleted());
7641 assert!(buffer5.read(app).file().unwrap().is_deleted());
7642 });
7643
7644 // Update the remote worktree. Check that it becomes consistent with the
7645 // local worktree.
7646 remote.update(cx, |remote, cx| {
7647 let update_message = tree.read(cx).as_local().unwrap().snapshot().build_update(
7648 &initial_snapshot,
7649 1,
7650 1,
7651 true,
7652 );
7653 remote
7654 .as_remote_mut()
7655 .unwrap()
7656 .snapshot
7657 .apply_remote_update(update_message)
7658 .unwrap();
7659
7660 assert_eq!(
7661 remote
7662 .paths()
7663 .map(|p| p.to_str().unwrap())
7664 .collect::<Vec<_>>(),
7665 expected_paths
7666 );
7667 });
7668 }
7669
7670 #[gpui::test]
7671 async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
7672 let fs = FakeFs::new(cx.background());
7673 fs.insert_tree(
7674 "/dir",
7675 json!({
7676 "a.txt": "a-contents",
7677 "b.txt": "b-contents",
7678 }),
7679 )
7680 .await;
7681
7682 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7683
7684 // Spawn multiple tasks to open paths, repeating some paths.
7685 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
7686 (
7687 p.open_local_buffer("/dir/a.txt", cx),
7688 p.open_local_buffer("/dir/b.txt", cx),
7689 p.open_local_buffer("/dir/a.txt", cx),
7690 )
7691 });
7692
7693 let buffer_a_1 = buffer_a_1.await.unwrap();
7694 let buffer_a_2 = buffer_a_2.await.unwrap();
7695 let buffer_b = buffer_b.await.unwrap();
7696 assert_eq!(buffer_a_1.read_with(cx, |b, _| b.text()), "a-contents");
7697 assert_eq!(buffer_b.read_with(cx, |b, _| b.text()), "b-contents");
7698
7699 // There is only one buffer per path.
7700 let buffer_a_id = buffer_a_1.id();
7701 assert_eq!(buffer_a_2.id(), buffer_a_id);
7702
7703 // Open the same path again while it is still open.
7704 drop(buffer_a_1);
7705 let buffer_a_3 = project
7706 .update(cx, |p, cx| p.open_local_buffer("/dir/a.txt", cx))
7707 .await
7708 .unwrap();
7709
7710 // There's still only one buffer per path.
7711 assert_eq!(buffer_a_3.id(), buffer_a_id);
7712 }
7713
7714 #[gpui::test]
7715 async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
7716 let fs = FakeFs::new(cx.background());
7717 fs.insert_tree(
7718 "/dir",
7719 json!({
7720 "file1": "abc",
7721 "file2": "def",
7722 "file3": "ghi",
7723 }),
7724 )
7725 .await;
7726
7727 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7728
7729 let buffer1 = project
7730 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
7731 .await
7732 .unwrap();
7733 let events = Rc::new(RefCell::new(Vec::new()));
7734
7735 // initially, the buffer isn't dirty.
7736 buffer1.update(cx, |buffer, cx| {
7737 cx.subscribe(&buffer1, {
7738 let events = events.clone();
7739 move |_, _, event, _| match event {
7740 BufferEvent::Operation(_) => {}
7741 _ => events.borrow_mut().push(event.clone()),
7742 }
7743 })
7744 .detach();
7745
7746 assert!(!buffer.is_dirty());
7747 assert!(events.borrow().is_empty());
7748
7749 buffer.edit([(1..2, "")], cx);
7750 });
7751
7752 // after the first edit, the buffer is dirty, and emits a dirtied event.
7753 buffer1.update(cx, |buffer, cx| {
7754 assert!(buffer.text() == "ac");
7755 assert!(buffer.is_dirty());
7756 assert_eq!(
7757 *events.borrow(),
7758 &[language::Event::Edited, language::Event::Dirtied]
7759 );
7760 events.borrow_mut().clear();
7761 buffer.did_save(buffer.version(), buffer.file().unwrap().mtime(), None, cx);
7762 });
7763
7764 // after saving, the buffer is not dirty, and emits a saved event.
7765 buffer1.update(cx, |buffer, cx| {
7766 assert!(!buffer.is_dirty());
7767 assert_eq!(*events.borrow(), &[language::Event::Saved]);
7768 events.borrow_mut().clear();
7769
7770 buffer.edit([(1..1, "B")], cx);
7771 buffer.edit([(2..2, "D")], cx);
7772 });
7773
7774 // after editing again, the buffer is dirty, and emits another dirty event.
7775 buffer1.update(cx, |buffer, cx| {
7776 assert!(buffer.text() == "aBDc");
7777 assert!(buffer.is_dirty());
7778 assert_eq!(
7779 *events.borrow(),
7780 &[
7781 language::Event::Edited,
7782 language::Event::Dirtied,
7783 language::Event::Edited,
7784 ],
7785 );
7786 events.borrow_mut().clear();
7787
7788 // TODO - currently, after restoring the buffer to its
7789 // previously-saved state, the is still considered dirty.
7790 buffer.edit([(1..3, "")], cx);
7791 assert!(buffer.text() == "ac");
7792 assert!(buffer.is_dirty());
7793 });
7794
7795 assert_eq!(*events.borrow(), &[language::Event::Edited]);
7796
7797 // When a file is deleted, the buffer is considered dirty.
7798 let events = Rc::new(RefCell::new(Vec::new()));
7799 let buffer2 = project
7800 .update(cx, |p, cx| p.open_local_buffer("/dir/file2", cx))
7801 .await
7802 .unwrap();
7803 buffer2.update(cx, |_, cx| {
7804 cx.subscribe(&buffer2, {
7805 let events = events.clone();
7806 move |_, _, event, _| events.borrow_mut().push(event.clone())
7807 })
7808 .detach();
7809 });
7810
7811 fs.remove_file("/dir/file2".as_ref(), Default::default())
7812 .await
7813 .unwrap();
7814 buffer2.condition(&cx, |b, _| b.is_dirty()).await;
7815 assert_eq!(
7816 *events.borrow(),
7817 &[language::Event::Dirtied, language::Event::FileHandleChanged]
7818 );
7819
7820 // When a file is already dirty when deleted, we don't emit a Dirtied event.
7821 let events = Rc::new(RefCell::new(Vec::new()));
7822 let buffer3 = project
7823 .update(cx, |p, cx| p.open_local_buffer("/dir/file3", cx))
7824 .await
7825 .unwrap();
7826 buffer3.update(cx, |_, cx| {
7827 cx.subscribe(&buffer3, {
7828 let events = events.clone();
7829 move |_, _, event, _| events.borrow_mut().push(event.clone())
7830 })
7831 .detach();
7832 });
7833
7834 buffer3.update(cx, |buffer, cx| {
7835 buffer.edit([(0..0, "x")], cx);
7836 });
7837 events.borrow_mut().clear();
7838 fs.remove_file("/dir/file3".as_ref(), Default::default())
7839 .await
7840 .unwrap();
7841 buffer3
7842 .condition(&cx, |_, _| !events.borrow().is_empty())
7843 .await;
7844 assert_eq!(*events.borrow(), &[language::Event::FileHandleChanged]);
7845 cx.read(|cx| assert!(buffer3.read(cx).is_dirty()));
7846 }
7847
7848 #[gpui::test]
7849 async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
7850 let initial_contents = "aaa\nbbbbb\nc\n";
7851 let fs = FakeFs::new(cx.background());
7852 fs.insert_tree(
7853 "/dir",
7854 json!({
7855 "the-file": initial_contents,
7856 }),
7857 )
7858 .await;
7859 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7860 let buffer = project
7861 .update(cx, |p, cx| p.open_local_buffer("/dir/the-file", cx))
7862 .await
7863 .unwrap();
7864
7865 let anchors = (0..3)
7866 .map(|row| buffer.read_with(cx, |b, _| b.anchor_before(Point::new(row, 1))))
7867 .collect::<Vec<_>>();
7868
7869 // Change the file on disk, adding two new lines of text, and removing
7870 // one line.
7871 buffer.read_with(cx, |buffer, _| {
7872 assert!(!buffer.is_dirty());
7873 assert!(!buffer.has_conflict());
7874 });
7875 let new_contents = "AAAA\naaa\nBB\nbbbbb\n";
7876 fs.save("/dir/the-file".as_ref(), &new_contents.into())
7877 .await
7878 .unwrap();
7879
7880 // Because the buffer was not modified, it is reloaded from disk. Its
7881 // contents are edited according to the diff between the old and new
7882 // file contents.
7883 buffer
7884 .condition(&cx, |buffer, _| buffer.text() == new_contents)
7885 .await;
7886
7887 buffer.update(cx, |buffer, _| {
7888 assert_eq!(buffer.text(), new_contents);
7889 assert!(!buffer.is_dirty());
7890 assert!(!buffer.has_conflict());
7891
7892 let anchor_positions = anchors
7893 .iter()
7894 .map(|anchor| anchor.to_point(&*buffer))
7895 .collect::<Vec<_>>();
7896 assert_eq!(
7897 anchor_positions,
7898 [Point::new(1, 1), Point::new(3, 1), Point::new(4, 0)]
7899 );
7900 });
7901
7902 // Modify the buffer
7903 buffer.update(cx, |buffer, cx| {
7904 buffer.edit([(0..0, " ")], cx);
7905 assert!(buffer.is_dirty());
7906 assert!(!buffer.has_conflict());
7907 });
7908
7909 // Change the file on disk again, adding blank lines to the beginning.
7910 fs.save(
7911 "/dir/the-file".as_ref(),
7912 &"\n\n\nAAAA\naaa\nBB\nbbbbb\n".into(),
7913 )
7914 .await
7915 .unwrap();
7916
7917 // Because the buffer is modified, it doesn't reload from disk, but is
7918 // marked as having a conflict.
7919 buffer
7920 .condition(&cx, |buffer, _| buffer.has_conflict())
7921 .await;
7922 }
7923
7924 #[gpui::test]
7925 async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
7926 cx.foreground().forbid_parking();
7927
7928 let fs = FakeFs::new(cx.background());
7929 fs.insert_tree(
7930 "/the-dir",
7931 json!({
7932 "a.rs": "
7933 fn foo(mut v: Vec<usize>) {
7934 for x in &v {
7935 v.push(1);
7936 }
7937 }
7938 "
7939 .unindent(),
7940 }),
7941 )
7942 .await;
7943
7944 let project = Project::test(fs.clone(), ["/the-dir".as_ref()], cx).await;
7945 let buffer = project
7946 .update(cx, |p, cx| p.open_local_buffer("/the-dir/a.rs", cx))
7947 .await
7948 .unwrap();
7949
7950 let buffer_uri = Url::from_file_path("/the-dir/a.rs").unwrap();
7951 let message = lsp::PublishDiagnosticsParams {
7952 uri: buffer_uri.clone(),
7953 diagnostics: vec![
7954 lsp::Diagnostic {
7955 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
7956 severity: Some(DiagnosticSeverity::WARNING),
7957 message: "error 1".to_string(),
7958 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
7959 location: lsp::Location {
7960 uri: buffer_uri.clone(),
7961 range: lsp::Range::new(
7962 lsp::Position::new(1, 8),
7963 lsp::Position::new(1, 9),
7964 ),
7965 },
7966 message: "error 1 hint 1".to_string(),
7967 }]),
7968 ..Default::default()
7969 },
7970 lsp::Diagnostic {
7971 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
7972 severity: Some(DiagnosticSeverity::HINT),
7973 message: "error 1 hint 1".to_string(),
7974 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
7975 location: lsp::Location {
7976 uri: buffer_uri.clone(),
7977 range: lsp::Range::new(
7978 lsp::Position::new(1, 8),
7979 lsp::Position::new(1, 9),
7980 ),
7981 },
7982 message: "original diagnostic".to_string(),
7983 }]),
7984 ..Default::default()
7985 },
7986 lsp::Diagnostic {
7987 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
7988 severity: Some(DiagnosticSeverity::ERROR),
7989 message: "error 2".to_string(),
7990 related_information: Some(vec![
7991 lsp::DiagnosticRelatedInformation {
7992 location: lsp::Location {
7993 uri: buffer_uri.clone(),
7994 range: lsp::Range::new(
7995 lsp::Position::new(1, 13),
7996 lsp::Position::new(1, 15),
7997 ),
7998 },
7999 message: "error 2 hint 1".to_string(),
8000 },
8001 lsp::DiagnosticRelatedInformation {
8002 location: lsp::Location {
8003 uri: buffer_uri.clone(),
8004 range: lsp::Range::new(
8005 lsp::Position::new(1, 13),
8006 lsp::Position::new(1, 15),
8007 ),
8008 },
8009 message: "error 2 hint 2".to_string(),
8010 },
8011 ]),
8012 ..Default::default()
8013 },
8014 lsp::Diagnostic {
8015 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
8016 severity: Some(DiagnosticSeverity::HINT),
8017 message: "error 2 hint 1".to_string(),
8018 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
8019 location: lsp::Location {
8020 uri: buffer_uri.clone(),
8021 range: lsp::Range::new(
8022 lsp::Position::new(2, 8),
8023 lsp::Position::new(2, 17),
8024 ),
8025 },
8026 message: "original diagnostic".to_string(),
8027 }]),
8028 ..Default::default()
8029 },
8030 lsp::Diagnostic {
8031 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
8032 severity: Some(DiagnosticSeverity::HINT),
8033 message: "error 2 hint 2".to_string(),
8034 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
8035 location: lsp::Location {
8036 uri: buffer_uri.clone(),
8037 range: lsp::Range::new(
8038 lsp::Position::new(2, 8),
8039 lsp::Position::new(2, 17),
8040 ),
8041 },
8042 message: "original diagnostic".to_string(),
8043 }]),
8044 ..Default::default()
8045 },
8046 ],
8047 version: None,
8048 };
8049
8050 project
8051 .update(cx, |p, cx| p.update_diagnostics(message, &[], cx))
8052 .unwrap();
8053 let buffer = buffer.read_with(cx, |buffer, _| buffer.snapshot());
8054
8055 assert_eq!(
8056 buffer
8057 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
8058 .collect::<Vec<_>>(),
8059 &[
8060 DiagnosticEntry {
8061 range: Point::new(1, 8)..Point::new(1, 9),
8062 diagnostic: Diagnostic {
8063 severity: DiagnosticSeverity::WARNING,
8064 message: "error 1".to_string(),
8065 group_id: 0,
8066 is_primary: true,
8067 ..Default::default()
8068 }
8069 },
8070 DiagnosticEntry {
8071 range: Point::new(1, 8)..Point::new(1, 9),
8072 diagnostic: Diagnostic {
8073 severity: DiagnosticSeverity::HINT,
8074 message: "error 1 hint 1".to_string(),
8075 group_id: 0,
8076 is_primary: false,
8077 ..Default::default()
8078 }
8079 },
8080 DiagnosticEntry {
8081 range: Point::new(1, 13)..Point::new(1, 15),
8082 diagnostic: Diagnostic {
8083 severity: DiagnosticSeverity::HINT,
8084 message: "error 2 hint 1".to_string(),
8085 group_id: 1,
8086 is_primary: false,
8087 ..Default::default()
8088 }
8089 },
8090 DiagnosticEntry {
8091 range: Point::new(1, 13)..Point::new(1, 15),
8092 diagnostic: Diagnostic {
8093 severity: DiagnosticSeverity::HINT,
8094 message: "error 2 hint 2".to_string(),
8095 group_id: 1,
8096 is_primary: false,
8097 ..Default::default()
8098 }
8099 },
8100 DiagnosticEntry {
8101 range: Point::new(2, 8)..Point::new(2, 17),
8102 diagnostic: Diagnostic {
8103 severity: DiagnosticSeverity::ERROR,
8104 message: "error 2".to_string(),
8105 group_id: 1,
8106 is_primary: true,
8107 ..Default::default()
8108 }
8109 }
8110 ]
8111 );
8112
8113 assert_eq!(
8114 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
8115 &[
8116 DiagnosticEntry {
8117 range: Point::new(1, 8)..Point::new(1, 9),
8118 diagnostic: Diagnostic {
8119 severity: DiagnosticSeverity::WARNING,
8120 message: "error 1".to_string(),
8121 group_id: 0,
8122 is_primary: true,
8123 ..Default::default()
8124 }
8125 },
8126 DiagnosticEntry {
8127 range: Point::new(1, 8)..Point::new(1, 9),
8128 diagnostic: Diagnostic {
8129 severity: DiagnosticSeverity::HINT,
8130 message: "error 1 hint 1".to_string(),
8131 group_id: 0,
8132 is_primary: false,
8133 ..Default::default()
8134 }
8135 },
8136 ]
8137 );
8138 assert_eq!(
8139 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
8140 &[
8141 DiagnosticEntry {
8142 range: Point::new(1, 13)..Point::new(1, 15),
8143 diagnostic: Diagnostic {
8144 severity: DiagnosticSeverity::HINT,
8145 message: "error 2 hint 1".to_string(),
8146 group_id: 1,
8147 is_primary: false,
8148 ..Default::default()
8149 }
8150 },
8151 DiagnosticEntry {
8152 range: Point::new(1, 13)..Point::new(1, 15),
8153 diagnostic: Diagnostic {
8154 severity: DiagnosticSeverity::HINT,
8155 message: "error 2 hint 2".to_string(),
8156 group_id: 1,
8157 is_primary: false,
8158 ..Default::default()
8159 }
8160 },
8161 DiagnosticEntry {
8162 range: Point::new(2, 8)..Point::new(2, 17),
8163 diagnostic: Diagnostic {
8164 severity: DiagnosticSeverity::ERROR,
8165 message: "error 2".to_string(),
8166 group_id: 1,
8167 is_primary: true,
8168 ..Default::default()
8169 }
8170 }
8171 ]
8172 );
8173 }
8174
8175 #[gpui::test]
8176 async fn test_rename(cx: &mut gpui::TestAppContext) {
8177 cx.foreground().forbid_parking();
8178
8179 let mut language = Language::new(
8180 LanguageConfig {
8181 name: "Rust".into(),
8182 path_suffixes: vec!["rs".to_string()],
8183 ..Default::default()
8184 },
8185 Some(tree_sitter_rust::language()),
8186 );
8187 let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
8188 capabilities: lsp::ServerCapabilities {
8189 rename_provider: Some(lsp::OneOf::Right(lsp::RenameOptions {
8190 prepare_provider: Some(true),
8191 work_done_progress_options: Default::default(),
8192 })),
8193 ..Default::default()
8194 },
8195 ..Default::default()
8196 });
8197
8198 let fs = FakeFs::new(cx.background());
8199 fs.insert_tree(
8200 "/dir",
8201 json!({
8202 "one.rs": "const ONE: usize = 1;",
8203 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
8204 }),
8205 )
8206 .await;
8207
8208 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
8209 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
8210 let buffer = project
8211 .update(cx, |project, cx| {
8212 project.open_local_buffer("/dir/one.rs", cx)
8213 })
8214 .await
8215 .unwrap();
8216
8217 let fake_server = fake_servers.next().await.unwrap();
8218
8219 let response = project.update(cx, |project, cx| {
8220 project.prepare_rename(buffer.clone(), 7, cx)
8221 });
8222 fake_server
8223 .handle_request::<lsp::request::PrepareRenameRequest, _, _>(|params, _| async move {
8224 assert_eq!(params.text_document.uri.as_str(), "file:///dir/one.rs");
8225 assert_eq!(params.position, lsp::Position::new(0, 7));
8226 Ok(Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
8227 lsp::Position::new(0, 6),
8228 lsp::Position::new(0, 9),
8229 ))))
8230 })
8231 .next()
8232 .await
8233 .unwrap();
8234 let range = response.await.unwrap().unwrap();
8235 let range = buffer.read_with(cx, |buffer, _| range.to_offset(buffer));
8236 assert_eq!(range, 6..9);
8237
8238 let response = project.update(cx, |project, cx| {
8239 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), true, cx)
8240 });
8241 fake_server
8242 .handle_request::<lsp::request::Rename, _, _>(|params, _| async move {
8243 assert_eq!(
8244 params.text_document_position.text_document.uri.as_str(),
8245 "file:///dir/one.rs"
8246 );
8247 assert_eq!(
8248 params.text_document_position.position,
8249 lsp::Position::new(0, 7)
8250 );
8251 assert_eq!(params.new_name, "THREE");
8252 Ok(Some(lsp::WorkspaceEdit {
8253 changes: Some(
8254 [
8255 (
8256 lsp::Url::from_file_path("/dir/one.rs").unwrap(),
8257 vec![lsp::TextEdit::new(
8258 lsp::Range::new(
8259 lsp::Position::new(0, 6),
8260 lsp::Position::new(0, 9),
8261 ),
8262 "THREE".to_string(),
8263 )],
8264 ),
8265 (
8266 lsp::Url::from_file_path("/dir/two.rs").unwrap(),
8267 vec![
8268 lsp::TextEdit::new(
8269 lsp::Range::new(
8270 lsp::Position::new(0, 24),
8271 lsp::Position::new(0, 27),
8272 ),
8273 "THREE".to_string(),
8274 ),
8275 lsp::TextEdit::new(
8276 lsp::Range::new(
8277 lsp::Position::new(0, 35),
8278 lsp::Position::new(0, 38),
8279 ),
8280 "THREE".to_string(),
8281 ),
8282 ],
8283 ),
8284 ]
8285 .into_iter()
8286 .collect(),
8287 ),
8288 ..Default::default()
8289 }))
8290 })
8291 .next()
8292 .await
8293 .unwrap();
8294 let mut transaction = response.await.unwrap().0;
8295 assert_eq!(transaction.len(), 2);
8296 assert_eq!(
8297 transaction
8298 .remove_entry(&buffer)
8299 .unwrap()
8300 .0
8301 .read_with(cx, |buffer, _| buffer.text()),
8302 "const THREE: usize = 1;"
8303 );
8304 assert_eq!(
8305 transaction
8306 .into_keys()
8307 .next()
8308 .unwrap()
8309 .read_with(cx, |buffer, _| buffer.text()),
8310 "const TWO: usize = one::THREE + one::THREE;"
8311 );
8312 }
8313
8314 #[gpui::test]
8315 async fn test_search(cx: &mut gpui::TestAppContext) {
8316 let fs = FakeFs::new(cx.background());
8317 fs.insert_tree(
8318 "/dir",
8319 json!({
8320 "one.rs": "const ONE: usize = 1;",
8321 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
8322 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
8323 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
8324 }),
8325 )
8326 .await;
8327 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
8328 assert_eq!(
8329 search(&project, SearchQuery::text("TWO", false, true), cx)
8330 .await
8331 .unwrap(),
8332 HashMap::from_iter([
8333 ("two.rs".to_string(), vec![6..9]),
8334 ("three.rs".to_string(), vec![37..40])
8335 ])
8336 );
8337
8338 let buffer_4 = project
8339 .update(cx, |project, cx| {
8340 project.open_local_buffer("/dir/four.rs", cx)
8341 })
8342 .await
8343 .unwrap();
8344 buffer_4.update(cx, |buffer, cx| {
8345 let text = "two::TWO";
8346 buffer.edit([(20..28, text), (31..43, text)], cx);
8347 });
8348
8349 assert_eq!(
8350 search(&project, SearchQuery::text("TWO", false, true), cx)
8351 .await
8352 .unwrap(),
8353 HashMap::from_iter([
8354 ("two.rs".to_string(), vec![6..9]),
8355 ("three.rs".to_string(), vec![37..40]),
8356 ("four.rs".to_string(), vec![25..28, 36..39])
8357 ])
8358 );
8359
8360 async fn search(
8361 project: &ModelHandle<Project>,
8362 query: SearchQuery,
8363 cx: &mut gpui::TestAppContext,
8364 ) -> Result<HashMap<String, Vec<Range<usize>>>> {
8365 let results = project
8366 .update(cx, |project, cx| project.search(query, cx))
8367 .await?;
8368
8369 Ok(results
8370 .into_iter()
8371 .map(|(buffer, ranges)| {
8372 buffer.read_with(cx, |buffer, _| {
8373 let path = buffer.file().unwrap().path().to_string_lossy().to_string();
8374 let ranges = ranges
8375 .into_iter()
8376 .map(|range| range.to_offset(buffer))
8377 .collect::<Vec<_>>();
8378 (path, ranges)
8379 })
8380 })
8381 .collect())
8382 }
8383 }
8384}