1mod db;
2pub mod fs;
3mod ignore;
4mod lsp_command;
5pub mod search;
6pub mod worktree;
7
8use anyhow::{anyhow, Context, Result};
9use client::{proto, Client, PeerId, TypedEnvelope, User, UserStore};
10use clock::ReplicaId;
11use collections::{hash_map, BTreeMap, HashMap, HashSet};
12use futures::{future::Shared, Future, FutureExt, StreamExt, TryFutureExt};
13use fuzzy::{PathMatch, PathMatchCandidate, PathMatchCandidateSet};
14use gpui::{
15 AnyModelHandle, AppContext, AsyncAppContext, Entity, ModelContext, ModelHandle,
16 MutableAppContext, Task, UpgradeModelHandle, WeakModelHandle,
17};
18use language::{
19 point_to_lsp,
20 proto::{deserialize_anchor, deserialize_version, serialize_anchor, serialize_version},
21 range_from_lsp, range_to_lsp, Anchor, Bias, Buffer, CodeAction, CodeLabel, Completion,
22 Diagnostic, DiagnosticEntry, DiagnosticSet, Event as BufferEvent, File as _, Language,
23 LanguageRegistry, LanguageServerName, LocalFile, LspAdapter, OffsetRangeExt, Operation, Patch,
24 PointUtf16, TextBufferSnapshot, ToOffset, ToPointUtf16, Transaction,
25};
26use lsp::{
27 DiagnosticSeverity, DiagnosticTag, DocumentHighlightKind, LanguageServer, LanguageString,
28 MarkedString,
29};
30use lsp_command::*;
31use parking_lot::Mutex;
32use postage::stream::Stream;
33use postage::watch;
34use rand::prelude::*;
35use search::SearchQuery;
36use serde::Serialize;
37use settings::Settings;
38use sha2::{Digest, Sha256};
39use similar::{ChangeTag, TextDiff};
40use std::{
41 cell::RefCell,
42 cmp::{self, Ordering},
43 convert::TryInto,
44 ffi::OsString,
45 hash::Hash,
46 mem,
47 ops::Range,
48 os::unix::{ffi::OsStrExt, prelude::OsStringExt},
49 path::{Component, Path, PathBuf},
50 rc::Rc,
51 sync::{
52 atomic::{AtomicBool, AtomicUsize, Ordering::SeqCst},
53 Arc,
54 },
55 time::Instant,
56};
57use thiserror::Error;
58use util::{post_inc, ResultExt, TryFutureExt as _};
59
60pub use db::Db;
61pub use fs::*;
62pub use worktree::*;
63
64pub trait Item: Entity {
65 fn entry_id(&self, cx: &AppContext) -> Option<ProjectEntryId>;
66}
67
68pub struct ProjectStore {
69 db: Arc<Db>,
70 projects: Vec<WeakModelHandle<Project>>,
71}
72
73pub struct Project {
74 worktrees: Vec<WorktreeHandle>,
75 active_entry: Option<ProjectEntryId>,
76 languages: Arc<LanguageRegistry>,
77 language_servers:
78 HashMap<(WorktreeId, LanguageServerName), (Arc<dyn LspAdapter>, Arc<LanguageServer>)>,
79 started_language_servers:
80 HashMap<(WorktreeId, LanguageServerName), Task<Option<Arc<LanguageServer>>>>,
81 language_server_statuses: BTreeMap<usize, LanguageServerStatus>,
82 language_server_settings: Arc<Mutex<serde_json::Value>>,
83 last_workspace_edits_by_language_server: HashMap<usize, ProjectTransaction>,
84 next_language_server_id: usize,
85 client: Arc<client::Client>,
86 next_entry_id: Arc<AtomicUsize>,
87 next_diagnostic_group_id: usize,
88 user_store: ModelHandle<UserStore>,
89 project_store: ModelHandle<ProjectStore>,
90 fs: Arc<dyn Fs>,
91 client_state: ProjectClientState,
92 collaborators: HashMap<PeerId, Collaborator>,
93 subscriptions: Vec<client::Subscription>,
94 opened_buffer: (Rc<RefCell<watch::Sender<()>>>, watch::Receiver<()>),
95 shared_buffers: HashMap<PeerId, HashSet<u64>>,
96 loading_buffers: HashMap<
97 ProjectPath,
98 postage::watch::Receiver<Option<Result<ModelHandle<Buffer>, Arc<anyhow::Error>>>>,
99 >,
100 loading_local_worktrees:
101 HashMap<Arc<Path>, Shared<Task<Result<ModelHandle<Worktree>, Arc<anyhow::Error>>>>>,
102 opened_buffers: HashMap<u64, OpenBuffer>,
103 buffer_snapshots: HashMap<u64, Vec<(i32, TextBufferSnapshot)>>,
104 nonce: u128,
105 initialized_persistent_state: bool,
106}
107
108#[derive(Error, Debug)]
109pub enum JoinProjectError {
110 #[error("host declined join request")]
111 HostDeclined,
112 #[error("host closed the project")]
113 HostClosedProject,
114 #[error("host went offline")]
115 HostWentOffline,
116 #[error("{0}")]
117 Other(#[from] anyhow::Error),
118}
119
120enum OpenBuffer {
121 Strong(ModelHandle<Buffer>),
122 Weak(WeakModelHandle<Buffer>),
123 Loading(Vec<Operation>),
124}
125
126enum WorktreeHandle {
127 Strong(ModelHandle<Worktree>),
128 Weak(WeakModelHandle<Worktree>),
129}
130
131enum ProjectClientState {
132 Local {
133 is_shared: bool,
134 remote_id_tx: watch::Sender<Option<u64>>,
135 remote_id_rx: watch::Receiver<Option<u64>>,
136 online_tx: watch::Sender<bool>,
137 online_rx: watch::Receiver<bool>,
138 _maintain_remote_id_task: Task<Option<()>>,
139 },
140 Remote {
141 sharing_has_stopped: bool,
142 remote_id: u64,
143 replica_id: ReplicaId,
144 _detect_unshare_task: Task<Option<()>>,
145 },
146}
147
148#[derive(Clone, Debug)]
149pub struct Collaborator {
150 pub user: Arc<User>,
151 pub peer_id: PeerId,
152 pub replica_id: ReplicaId,
153}
154
155#[derive(Clone, Debug, PartialEq, Eq)]
156pub enum Event {
157 ActiveEntryChanged(Option<ProjectEntryId>),
158 WorktreeAdded,
159 WorktreeRemoved(WorktreeId),
160 DiskBasedDiagnosticsStarted,
161 DiskBasedDiagnosticsUpdated,
162 DiskBasedDiagnosticsFinished,
163 DiagnosticsUpdated(ProjectPath),
164 RemoteIdChanged(Option<u64>),
165 CollaboratorLeft(PeerId),
166 ContactRequestedJoin(Arc<User>),
167 ContactCancelledJoinRequest(Arc<User>),
168}
169
170#[derive(Serialize)]
171pub struct LanguageServerStatus {
172 pub name: String,
173 pub pending_work: BTreeMap<String, LanguageServerProgress>,
174 pub pending_diagnostic_updates: isize,
175}
176
177#[derive(Clone, Debug, Serialize)]
178pub struct LanguageServerProgress {
179 pub message: Option<String>,
180 pub percentage: Option<usize>,
181 #[serde(skip_serializing)]
182 pub last_update_at: Instant,
183}
184
185#[derive(Clone, Debug, Eq, PartialEq, Hash, PartialOrd, Ord)]
186pub struct ProjectPath {
187 pub worktree_id: WorktreeId,
188 pub path: Arc<Path>,
189}
190
191#[derive(Copy, Clone, Debug, Default, PartialEq, Serialize)]
192pub struct DiagnosticSummary {
193 pub error_count: usize,
194 pub warning_count: usize,
195}
196
197#[derive(Debug)]
198pub struct Location {
199 pub buffer: ModelHandle<Buffer>,
200 pub range: Range<language::Anchor>,
201}
202
203#[derive(Debug)]
204pub struct DocumentHighlight {
205 pub range: Range<language::Anchor>,
206 pub kind: DocumentHighlightKind,
207}
208
209#[derive(Clone, Debug)]
210pub struct Symbol {
211 pub source_worktree_id: WorktreeId,
212 pub worktree_id: WorktreeId,
213 pub language_server_name: LanguageServerName,
214 pub path: PathBuf,
215 pub label: CodeLabel,
216 pub name: String,
217 pub kind: lsp::SymbolKind,
218 pub range: Range<PointUtf16>,
219 pub signature: [u8; 32],
220}
221
222#[derive(Clone, Debug)]
223pub struct HoverBlock {
224 pub text: String,
225 pub language: Option<String>,
226}
227
228impl HoverBlock {
229 fn try_new(marked_string: MarkedString) -> Option<Self> {
230 let result = match marked_string {
231 MarkedString::LanguageString(LanguageString { language, value }) => HoverBlock {
232 text: value,
233 language: Some(language),
234 },
235 MarkedString::String(text) => HoverBlock {
236 text,
237 language: None,
238 },
239 };
240 if result.text.is_empty() {
241 None
242 } else {
243 Some(result)
244 }
245 }
246}
247
248#[derive(Debug)]
249pub struct Hover {
250 pub contents: Vec<HoverBlock>,
251 pub range: Option<Range<language::Anchor>>,
252}
253
254#[derive(Default)]
255pub struct ProjectTransaction(pub HashMap<ModelHandle<Buffer>, language::Transaction>);
256
257impl DiagnosticSummary {
258 fn new<'a, T: 'a>(diagnostics: impl IntoIterator<Item = &'a DiagnosticEntry<T>>) -> Self {
259 let mut this = Self {
260 error_count: 0,
261 warning_count: 0,
262 };
263
264 for entry in diagnostics {
265 if entry.diagnostic.is_primary {
266 match entry.diagnostic.severity {
267 DiagnosticSeverity::ERROR => this.error_count += 1,
268 DiagnosticSeverity::WARNING => this.warning_count += 1,
269 _ => {}
270 }
271 }
272 }
273
274 this
275 }
276
277 pub fn is_empty(&self) -> bool {
278 self.error_count == 0 && self.warning_count == 0
279 }
280
281 pub fn to_proto(&self, path: &Path) -> proto::DiagnosticSummary {
282 proto::DiagnosticSummary {
283 path: path.to_string_lossy().to_string(),
284 error_count: self.error_count as u32,
285 warning_count: self.warning_count as u32,
286 }
287 }
288}
289
290#[derive(Clone, Copy, Debug, Default, Hash, PartialEq, Eq, PartialOrd, Ord)]
291pub struct ProjectEntryId(usize);
292
293impl ProjectEntryId {
294 pub const MAX: Self = Self(usize::MAX);
295
296 pub fn new(counter: &AtomicUsize) -> Self {
297 Self(counter.fetch_add(1, SeqCst))
298 }
299
300 pub fn from_proto(id: u64) -> Self {
301 Self(id as usize)
302 }
303
304 pub fn to_proto(&self) -> u64 {
305 self.0 as u64
306 }
307
308 pub fn to_usize(&self) -> usize {
309 self.0
310 }
311}
312
313impl Project {
314 pub fn init(client: &Arc<Client>) {
315 client.add_model_message_handler(Self::handle_request_join_project);
316 client.add_model_message_handler(Self::handle_add_collaborator);
317 client.add_model_message_handler(Self::handle_buffer_reloaded);
318 client.add_model_message_handler(Self::handle_buffer_saved);
319 client.add_model_message_handler(Self::handle_start_language_server);
320 client.add_model_message_handler(Self::handle_update_language_server);
321 client.add_model_message_handler(Self::handle_remove_collaborator);
322 client.add_model_message_handler(Self::handle_join_project_request_cancelled);
323 client.add_model_message_handler(Self::handle_update_project);
324 client.add_model_message_handler(Self::handle_unregister_project);
325 client.add_model_message_handler(Self::handle_project_unshared);
326 client.add_model_message_handler(Self::handle_update_buffer_file);
327 client.add_model_message_handler(Self::handle_update_buffer);
328 client.add_model_message_handler(Self::handle_update_diagnostic_summary);
329 client.add_model_message_handler(Self::handle_update_worktree);
330 client.add_model_request_handler(Self::handle_create_project_entry);
331 client.add_model_request_handler(Self::handle_rename_project_entry);
332 client.add_model_request_handler(Self::handle_copy_project_entry);
333 client.add_model_request_handler(Self::handle_delete_project_entry);
334 client.add_model_request_handler(Self::handle_apply_additional_edits_for_completion);
335 client.add_model_request_handler(Self::handle_apply_code_action);
336 client.add_model_request_handler(Self::handle_reload_buffers);
337 client.add_model_request_handler(Self::handle_format_buffers);
338 client.add_model_request_handler(Self::handle_get_code_actions);
339 client.add_model_request_handler(Self::handle_get_completions);
340 client.add_model_request_handler(Self::handle_lsp_command::<GetHover>);
341 client.add_model_request_handler(Self::handle_lsp_command::<GetDefinition>);
342 client.add_model_request_handler(Self::handle_lsp_command::<GetDocumentHighlights>);
343 client.add_model_request_handler(Self::handle_lsp_command::<GetReferences>);
344 client.add_model_request_handler(Self::handle_lsp_command::<PrepareRename>);
345 client.add_model_request_handler(Self::handle_lsp_command::<PerformRename>);
346 client.add_model_request_handler(Self::handle_search_project);
347 client.add_model_request_handler(Self::handle_get_project_symbols);
348 client.add_model_request_handler(Self::handle_open_buffer_for_symbol);
349 client.add_model_request_handler(Self::handle_open_buffer_by_id);
350 client.add_model_request_handler(Self::handle_open_buffer_by_path);
351 client.add_model_request_handler(Self::handle_save_buffer);
352 }
353
354 pub fn local(
355 online: bool,
356 client: Arc<Client>,
357 user_store: ModelHandle<UserStore>,
358 project_store: ModelHandle<ProjectStore>,
359 languages: Arc<LanguageRegistry>,
360 fs: Arc<dyn Fs>,
361 cx: &mut MutableAppContext,
362 ) -> ModelHandle<Self> {
363 cx.add_model(|cx: &mut ModelContext<Self>| {
364 let (online_tx, online_rx) = watch::channel_with(online);
365 let (remote_id_tx, remote_id_rx) = watch::channel();
366 let _maintain_remote_id_task = cx.spawn_weak({
367 let status_rx = client.clone().status();
368 let online_rx = online_rx.clone();
369 move |this, mut cx| async move {
370 let mut stream = Stream::map(status_rx.clone(), drop)
371 .merge(Stream::map(online_rx.clone(), drop));
372 while stream.recv().await.is_some() {
373 let this = this.upgrade(&cx)?;
374 if status_rx.borrow().is_connected() && *online_rx.borrow() {
375 this.update(&mut cx, |this, cx| this.register(cx))
376 .await
377 .log_err()?;
378 } else {
379 this.update(&mut cx, |this, cx| this.unregister(cx))
380 .await
381 .log_err();
382 }
383 }
384 None
385 }
386 });
387
388 let handle = cx.weak_handle();
389 project_store.update(cx, |store, cx| store.add_project(handle, cx));
390
391 let (opened_buffer_tx, opened_buffer_rx) = watch::channel();
392 Self {
393 worktrees: Default::default(),
394 collaborators: Default::default(),
395 opened_buffers: Default::default(),
396 shared_buffers: Default::default(),
397 loading_buffers: Default::default(),
398 loading_local_worktrees: Default::default(),
399 buffer_snapshots: Default::default(),
400 client_state: ProjectClientState::Local {
401 is_shared: false,
402 remote_id_tx,
403 remote_id_rx,
404 online_tx,
405 online_rx,
406 _maintain_remote_id_task,
407 },
408 opened_buffer: (Rc::new(RefCell::new(opened_buffer_tx)), opened_buffer_rx),
409 subscriptions: Vec::new(),
410 active_entry: None,
411 languages,
412 client,
413 user_store,
414 project_store,
415 fs,
416 next_entry_id: Default::default(),
417 next_diagnostic_group_id: Default::default(),
418 language_servers: Default::default(),
419 started_language_servers: Default::default(),
420 language_server_statuses: Default::default(),
421 last_workspace_edits_by_language_server: Default::default(),
422 language_server_settings: Default::default(),
423 next_language_server_id: 0,
424 nonce: StdRng::from_entropy().gen(),
425 initialized_persistent_state: false,
426 }
427 })
428 }
429
430 pub async fn remote(
431 remote_id: u64,
432 client: Arc<Client>,
433 user_store: ModelHandle<UserStore>,
434 project_store: ModelHandle<ProjectStore>,
435 languages: Arc<LanguageRegistry>,
436 fs: Arc<dyn Fs>,
437 mut cx: AsyncAppContext,
438 ) -> Result<ModelHandle<Self>, JoinProjectError> {
439 client.authenticate_and_connect(true, &cx).await?;
440
441 let response = client
442 .request(proto::JoinProject {
443 project_id: remote_id,
444 })
445 .await?;
446
447 let response = match response.variant.ok_or_else(|| anyhow!("missing variant"))? {
448 proto::join_project_response::Variant::Accept(response) => response,
449 proto::join_project_response::Variant::Decline(decline) => {
450 match proto::join_project_response::decline::Reason::from_i32(decline.reason) {
451 Some(proto::join_project_response::decline::Reason::Declined) => {
452 Err(JoinProjectError::HostDeclined)?
453 }
454 Some(proto::join_project_response::decline::Reason::Closed) => {
455 Err(JoinProjectError::HostClosedProject)?
456 }
457 Some(proto::join_project_response::decline::Reason::WentOffline) => {
458 Err(JoinProjectError::HostWentOffline)?
459 }
460 None => Err(anyhow!("missing decline reason"))?,
461 }
462 }
463 };
464
465 let replica_id = response.replica_id as ReplicaId;
466
467 let mut worktrees = Vec::new();
468 for worktree in response.worktrees {
469 let (worktree, load_task) = cx
470 .update(|cx| Worktree::remote(remote_id, replica_id, worktree, client.clone(), cx));
471 worktrees.push(worktree);
472 load_task.detach();
473 }
474
475 let (opened_buffer_tx, opened_buffer_rx) = watch::channel();
476 let this = cx.add_model(|cx: &mut ModelContext<Self>| {
477 let handle = cx.weak_handle();
478 project_store.update(cx, |store, cx| store.add_project(handle, cx));
479
480 let mut this = Self {
481 worktrees: Vec::new(),
482 loading_buffers: Default::default(),
483 opened_buffer: (Rc::new(RefCell::new(opened_buffer_tx)), opened_buffer_rx),
484 shared_buffers: Default::default(),
485 loading_local_worktrees: Default::default(),
486 active_entry: None,
487 collaborators: Default::default(),
488 languages,
489 user_store: user_store.clone(),
490 project_store,
491 fs,
492 next_entry_id: Default::default(),
493 next_diagnostic_group_id: Default::default(),
494 subscriptions: vec![client.add_model_for_remote_entity(remote_id, cx)],
495 client: client.clone(),
496 client_state: ProjectClientState::Remote {
497 sharing_has_stopped: false,
498 remote_id,
499 replica_id,
500 _detect_unshare_task: cx.spawn_weak(move |this, mut cx| {
501 async move {
502 let mut status = client.status();
503 let is_connected =
504 status.next().await.map_or(false, |s| s.is_connected());
505 // Even if we're initially connected, any future change of the status means we momentarily disconnected.
506 if !is_connected || status.next().await.is_some() {
507 if let Some(this) = this.upgrade(&cx) {
508 this.update(&mut cx, |this, cx| this.removed_from_project(cx))
509 }
510 }
511 Ok(())
512 }
513 .log_err()
514 }),
515 },
516 language_servers: Default::default(),
517 started_language_servers: Default::default(),
518 language_server_settings: Default::default(),
519 language_server_statuses: response
520 .language_servers
521 .into_iter()
522 .map(|server| {
523 (
524 server.id as usize,
525 LanguageServerStatus {
526 name: server.name,
527 pending_work: Default::default(),
528 pending_diagnostic_updates: 0,
529 },
530 )
531 })
532 .collect(),
533 last_workspace_edits_by_language_server: Default::default(),
534 next_language_server_id: 0,
535 opened_buffers: Default::default(),
536 buffer_snapshots: Default::default(),
537 nonce: StdRng::from_entropy().gen(),
538 initialized_persistent_state: false,
539 };
540 for worktree in worktrees {
541 this.add_worktree(&worktree, cx);
542 }
543 this
544 });
545
546 let user_ids = response
547 .collaborators
548 .iter()
549 .map(|peer| peer.user_id)
550 .collect();
551 user_store
552 .update(&mut cx, |user_store, cx| user_store.get_users(user_ids, cx))
553 .await?;
554 let mut collaborators = HashMap::default();
555 for message in response.collaborators {
556 let collaborator = Collaborator::from_proto(message, &user_store, &mut cx).await?;
557 collaborators.insert(collaborator.peer_id, collaborator);
558 }
559
560 this.update(&mut cx, |this, _| {
561 this.collaborators = collaborators;
562 });
563
564 Ok(this)
565 }
566
567 #[cfg(any(test, feature = "test-support"))]
568 pub async fn test(
569 fs: Arc<dyn Fs>,
570 root_paths: impl IntoIterator<Item = &Path>,
571 cx: &mut gpui::TestAppContext,
572 ) -> ModelHandle<Project> {
573 let languages = Arc::new(LanguageRegistry::test());
574 let http_client = client::test::FakeHttpClient::with_404_response();
575 let client = client::Client::new(http_client.clone());
576 let user_store = cx.add_model(|cx| UserStore::new(client.clone(), http_client, cx));
577 let project_store = cx.add_model(|_| ProjectStore::new(Db::open_fake()));
578 let project = cx.update(|cx| {
579 Project::local(true, client, user_store, project_store, languages, fs, cx)
580 });
581 for path in root_paths {
582 let (tree, _) = project
583 .update(cx, |project, cx| {
584 project.find_or_create_local_worktree(path, true, cx)
585 })
586 .await
587 .unwrap();
588 tree.read_with(cx, |tree, _| tree.as_local().unwrap().scan_complete())
589 .await;
590 }
591 project
592 }
593
594 pub fn restore_state(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
595 if self.is_remote() {
596 return Task::ready(Ok(()));
597 }
598
599 let db = self.project_store.read(cx).db.clone();
600 let keys = self.db_keys_for_online_state(cx);
601 let online_by_default = cx.global::<Settings>().projects_online_by_default;
602 let read_online = cx.background().spawn(async move {
603 let values = db.read(keys)?;
604 anyhow::Ok(
605 values
606 .into_iter()
607 .all(|e| e.map_or(online_by_default, |e| e == [true as u8])),
608 )
609 });
610 cx.spawn(|this, mut cx| async move {
611 let online = read_online.await.log_err().unwrap_or(false);
612 this.update(&mut cx, |this, cx| {
613 this.initialized_persistent_state = true;
614 if let ProjectClientState::Local { online_tx, .. } = &mut this.client_state {
615 let mut online_tx = online_tx.borrow_mut();
616 if *online_tx != online {
617 *online_tx = online;
618 drop(online_tx);
619 this.metadata_changed(false, cx);
620 }
621 }
622 });
623 Ok(())
624 })
625 }
626
627 fn persist_state(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
628 if self.is_remote() || !self.initialized_persistent_state {
629 return Task::ready(Ok(()));
630 }
631
632 let db = self.project_store.read(cx).db.clone();
633 let keys = self.db_keys_for_online_state(cx);
634 let is_online = self.is_online();
635 cx.background().spawn(async move {
636 let value = &[is_online as u8];
637 db.write(keys.into_iter().map(|key| (key, value)))
638 })
639 }
640
641 pub fn buffer_for_id(&self, remote_id: u64, cx: &AppContext) -> Option<ModelHandle<Buffer>> {
642 self.opened_buffers
643 .get(&remote_id)
644 .and_then(|buffer| buffer.upgrade(cx))
645 }
646
647 pub fn languages(&self) -> &Arc<LanguageRegistry> {
648 &self.languages
649 }
650
651 pub fn client(&self) -> Arc<Client> {
652 self.client.clone()
653 }
654
655 pub fn user_store(&self) -> ModelHandle<UserStore> {
656 self.user_store.clone()
657 }
658
659 pub fn project_store(&self) -> ModelHandle<ProjectStore> {
660 self.project_store.clone()
661 }
662
663 #[cfg(any(test, feature = "test-support"))]
664 pub fn check_invariants(&self, cx: &AppContext) {
665 if self.is_local() {
666 let mut worktree_root_paths = HashMap::default();
667 for worktree in self.worktrees(cx) {
668 let worktree = worktree.read(cx);
669 let abs_path = worktree.as_local().unwrap().abs_path().clone();
670 let prev_worktree_id = worktree_root_paths.insert(abs_path.clone(), worktree.id());
671 assert_eq!(
672 prev_worktree_id,
673 None,
674 "abs path {:?} for worktree {:?} is not unique ({:?} was already registered with the same path)",
675 abs_path,
676 worktree.id(),
677 prev_worktree_id
678 )
679 }
680 } else {
681 let replica_id = self.replica_id();
682 for buffer in self.opened_buffers.values() {
683 if let Some(buffer) = buffer.upgrade(cx) {
684 let buffer = buffer.read(cx);
685 assert_eq!(
686 buffer.deferred_ops_len(),
687 0,
688 "replica {}, buffer {} has deferred operations",
689 replica_id,
690 buffer.remote_id()
691 );
692 }
693 }
694 }
695 }
696
697 #[cfg(any(test, feature = "test-support"))]
698 pub fn has_open_buffer(&self, path: impl Into<ProjectPath>, cx: &AppContext) -> bool {
699 let path = path.into();
700 if let Some(worktree) = self.worktree_for_id(path.worktree_id, cx) {
701 self.opened_buffers.iter().any(|(_, buffer)| {
702 if let Some(buffer) = buffer.upgrade(cx) {
703 if let Some(file) = File::from_dyn(buffer.read(cx).file()) {
704 if file.worktree == worktree && file.path() == &path.path {
705 return true;
706 }
707 }
708 }
709 false
710 })
711 } else {
712 false
713 }
714 }
715
716 pub fn fs(&self) -> &Arc<dyn Fs> {
717 &self.fs
718 }
719
720 pub fn set_online(&mut self, online: bool, cx: &mut ModelContext<Self>) {
721 if let ProjectClientState::Local { online_tx, .. } = &mut self.client_state {
722 let mut online_tx = online_tx.borrow_mut();
723 if *online_tx != online {
724 *online_tx = online;
725 drop(online_tx);
726 self.metadata_changed(true, cx);
727 }
728 }
729 }
730
731 pub fn is_online(&self) -> bool {
732 match &self.client_state {
733 ProjectClientState::Local { online_rx, .. } => *online_rx.borrow(),
734 ProjectClientState::Remote { .. } => true,
735 }
736 }
737
738 fn unregister(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
739 self.unshared(cx);
740 if let ProjectClientState::Local { remote_id_rx, .. } = &mut self.client_state {
741 if let Some(remote_id) = *remote_id_rx.borrow() {
742 let request = self.client.request(proto::UnregisterProject {
743 project_id: remote_id,
744 });
745 return cx.spawn(|this, mut cx| async move {
746 let response = request.await;
747
748 // Unregistering the project causes the server to send out a
749 // contact update removing this project from the host's list
750 // of online projects. Wait until this contact update has been
751 // processed before clearing out this project's remote id, so
752 // that there is no moment where this project appears in the
753 // contact metadata and *also* has no remote id.
754 this.update(&mut cx, |this, cx| {
755 this.user_store()
756 .update(cx, |store, _| store.contact_updates_done())
757 })
758 .await;
759
760 this.update(&mut cx, |this, cx| {
761 if let ProjectClientState::Local { remote_id_tx, .. } =
762 &mut this.client_state
763 {
764 *remote_id_tx.borrow_mut() = None;
765 }
766 this.subscriptions.clear();
767 this.metadata_changed(false, cx);
768 });
769 response.map(drop)
770 });
771 }
772 }
773 Task::ready(Ok(()))
774 }
775
776 fn register(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
777 if let ProjectClientState::Local { remote_id_rx, .. } = &self.client_state {
778 if remote_id_rx.borrow().is_some() {
779 return Task::ready(Ok(()));
780 }
781 }
782
783 let response = self.client.request(proto::RegisterProject {});
784 cx.spawn(|this, mut cx| async move {
785 let remote_id = response.await?.project_id;
786 this.update(&mut cx, |this, cx| {
787 if let ProjectClientState::Local { remote_id_tx, .. } = &mut this.client_state {
788 *remote_id_tx.borrow_mut() = Some(remote_id);
789 }
790
791 this.metadata_changed(false, cx);
792 cx.emit(Event::RemoteIdChanged(Some(remote_id)));
793 this.subscriptions
794 .push(this.client.add_model_for_remote_entity(remote_id, cx));
795 Ok(())
796 })
797 })
798 }
799
800 pub fn remote_id(&self) -> Option<u64> {
801 match &self.client_state {
802 ProjectClientState::Local { remote_id_rx, .. } => *remote_id_rx.borrow(),
803 ProjectClientState::Remote { remote_id, .. } => Some(*remote_id),
804 }
805 }
806
807 pub fn next_remote_id(&self) -> impl Future<Output = u64> {
808 let mut id = None;
809 let mut watch = None;
810 match &self.client_state {
811 ProjectClientState::Local { remote_id_rx, .. } => watch = Some(remote_id_rx.clone()),
812 ProjectClientState::Remote { remote_id, .. } => id = Some(*remote_id),
813 }
814
815 async move {
816 if let Some(id) = id {
817 return id;
818 }
819 let mut watch = watch.unwrap();
820 loop {
821 let id = *watch.borrow();
822 if let Some(id) = id {
823 return id;
824 }
825 watch.next().await;
826 }
827 }
828 }
829
830 pub fn shared_remote_id(&self) -> Option<u64> {
831 match &self.client_state {
832 ProjectClientState::Local {
833 remote_id_rx,
834 is_shared,
835 ..
836 } => {
837 if *is_shared {
838 *remote_id_rx.borrow()
839 } else {
840 None
841 }
842 }
843 ProjectClientState::Remote { remote_id, .. } => Some(*remote_id),
844 }
845 }
846
847 pub fn replica_id(&self) -> ReplicaId {
848 match &self.client_state {
849 ProjectClientState::Local { .. } => 0,
850 ProjectClientState::Remote { replica_id, .. } => *replica_id,
851 }
852 }
853
854 fn metadata_changed(&mut self, persist: bool, cx: &mut ModelContext<Self>) {
855 if let ProjectClientState::Local {
856 remote_id_rx,
857 online_rx,
858 ..
859 } = &self.client_state
860 {
861 if let (Some(project_id), true) = (*remote_id_rx.borrow(), *online_rx.borrow()) {
862 self.client
863 .send(proto::UpdateProject {
864 project_id,
865 worktrees: self
866 .worktrees
867 .iter()
868 .filter_map(|worktree| {
869 worktree.upgrade(&cx).map(|worktree| {
870 worktree.read(cx).as_local().unwrap().metadata_proto()
871 })
872 })
873 .collect(),
874 })
875 .log_err();
876 }
877
878 self.project_store.update(cx, |_, cx| cx.notify());
879 if persist {
880 self.persist_state(cx).detach_and_log_err(cx);
881 }
882 cx.notify();
883 }
884 }
885
886 pub fn collaborators(&self) -> &HashMap<PeerId, Collaborator> {
887 &self.collaborators
888 }
889
890 pub fn worktrees<'a>(
891 &'a self,
892 cx: &'a AppContext,
893 ) -> impl 'a + Iterator<Item = ModelHandle<Worktree>> {
894 self.worktrees
895 .iter()
896 .filter_map(move |worktree| worktree.upgrade(cx))
897 }
898
899 pub fn visible_worktrees<'a>(
900 &'a self,
901 cx: &'a AppContext,
902 ) -> impl 'a + Iterator<Item = ModelHandle<Worktree>> {
903 self.worktrees.iter().filter_map(|worktree| {
904 worktree.upgrade(cx).and_then(|worktree| {
905 if worktree.read(cx).is_visible() {
906 Some(worktree)
907 } else {
908 None
909 }
910 })
911 })
912 }
913
914 pub fn worktree_root_names<'a>(&'a self, cx: &'a AppContext) -> impl Iterator<Item = &'a str> {
915 self.visible_worktrees(cx)
916 .map(|tree| tree.read(cx).root_name())
917 }
918
919 fn db_keys_for_online_state(&self, cx: &AppContext) -> Vec<String> {
920 self.worktrees
921 .iter()
922 .filter_map(|worktree| {
923 let worktree = worktree.upgrade(&cx)?.read(cx);
924 if worktree.is_visible() {
925 Some(format!(
926 "project-path-online:{}",
927 worktree.as_local().unwrap().abs_path().to_string_lossy()
928 ))
929 } else {
930 None
931 }
932 })
933 .collect::<Vec<_>>()
934 }
935
936 pub fn worktree_for_id(
937 &self,
938 id: WorktreeId,
939 cx: &AppContext,
940 ) -> Option<ModelHandle<Worktree>> {
941 self.worktrees(cx)
942 .find(|worktree| worktree.read(cx).id() == id)
943 }
944
945 pub fn worktree_for_entry(
946 &self,
947 entry_id: ProjectEntryId,
948 cx: &AppContext,
949 ) -> Option<ModelHandle<Worktree>> {
950 self.worktrees(cx)
951 .find(|worktree| worktree.read(cx).contains_entry(entry_id))
952 }
953
954 pub fn worktree_id_for_entry(
955 &self,
956 entry_id: ProjectEntryId,
957 cx: &AppContext,
958 ) -> Option<WorktreeId> {
959 self.worktree_for_entry(entry_id, cx)
960 .map(|worktree| worktree.read(cx).id())
961 }
962
963 pub fn contains_paths(&self, paths: &[PathBuf], cx: &AppContext) -> bool {
964 paths.iter().all(|path| self.contains_path(&path, cx))
965 }
966
967 pub fn contains_path(&self, path: &Path, cx: &AppContext) -> bool {
968 for worktree in self.worktrees(cx) {
969 let worktree = worktree.read(cx).as_local();
970 if worktree.map_or(false, |w| w.contains_abs_path(path)) {
971 return true;
972 }
973 }
974 false
975 }
976
977 pub fn create_entry(
978 &mut self,
979 project_path: impl Into<ProjectPath>,
980 is_directory: bool,
981 cx: &mut ModelContext<Self>,
982 ) -> Option<Task<Result<Entry>>> {
983 let project_path = project_path.into();
984 let worktree = self.worktree_for_id(project_path.worktree_id, cx)?;
985 if self.is_local() {
986 Some(worktree.update(cx, |worktree, cx| {
987 worktree
988 .as_local_mut()
989 .unwrap()
990 .create_entry(project_path.path, is_directory, cx)
991 }))
992 } else {
993 let client = self.client.clone();
994 let project_id = self.remote_id().unwrap();
995 Some(cx.spawn_weak(|_, mut cx| async move {
996 let response = client
997 .request(proto::CreateProjectEntry {
998 worktree_id: project_path.worktree_id.to_proto(),
999 project_id,
1000 path: project_path.path.as_os_str().as_bytes().to_vec(),
1001 is_directory,
1002 })
1003 .await?;
1004 let entry = response
1005 .entry
1006 .ok_or_else(|| anyhow!("missing entry in response"))?;
1007 worktree
1008 .update(&mut cx, |worktree, cx| {
1009 worktree.as_remote().unwrap().insert_entry(
1010 entry,
1011 response.worktree_scan_id as usize,
1012 cx,
1013 )
1014 })
1015 .await
1016 }))
1017 }
1018 }
1019
1020 pub fn copy_entry(
1021 &mut self,
1022 entry_id: ProjectEntryId,
1023 new_path: impl Into<Arc<Path>>,
1024 cx: &mut ModelContext<Self>,
1025 ) -> Option<Task<Result<Entry>>> {
1026 let worktree = self.worktree_for_entry(entry_id, cx)?;
1027 let new_path = new_path.into();
1028 if self.is_local() {
1029 worktree.update(cx, |worktree, cx| {
1030 worktree
1031 .as_local_mut()
1032 .unwrap()
1033 .copy_entry(entry_id, new_path, cx)
1034 })
1035 } else {
1036 let client = self.client.clone();
1037 let project_id = self.remote_id().unwrap();
1038
1039 Some(cx.spawn_weak(|_, mut cx| async move {
1040 let response = client
1041 .request(proto::CopyProjectEntry {
1042 project_id,
1043 entry_id: entry_id.to_proto(),
1044 new_path: new_path.as_os_str().as_bytes().to_vec(),
1045 })
1046 .await?;
1047 let entry = response
1048 .entry
1049 .ok_or_else(|| anyhow!("missing entry in response"))?;
1050 worktree
1051 .update(&mut cx, |worktree, cx| {
1052 worktree.as_remote().unwrap().insert_entry(
1053 entry,
1054 response.worktree_scan_id as usize,
1055 cx,
1056 )
1057 })
1058 .await
1059 }))
1060 }
1061 }
1062
1063 pub fn rename_entry(
1064 &mut self,
1065 entry_id: ProjectEntryId,
1066 new_path: impl Into<Arc<Path>>,
1067 cx: &mut ModelContext<Self>,
1068 ) -> Option<Task<Result<Entry>>> {
1069 let worktree = self.worktree_for_entry(entry_id, cx)?;
1070 let new_path = new_path.into();
1071 if self.is_local() {
1072 worktree.update(cx, |worktree, cx| {
1073 worktree
1074 .as_local_mut()
1075 .unwrap()
1076 .rename_entry(entry_id, new_path, cx)
1077 })
1078 } else {
1079 let client = self.client.clone();
1080 let project_id = self.remote_id().unwrap();
1081
1082 Some(cx.spawn_weak(|_, mut cx| async move {
1083 let response = client
1084 .request(proto::RenameProjectEntry {
1085 project_id,
1086 entry_id: entry_id.to_proto(),
1087 new_path: new_path.as_os_str().as_bytes().to_vec(),
1088 })
1089 .await?;
1090 let entry = response
1091 .entry
1092 .ok_or_else(|| anyhow!("missing entry in response"))?;
1093 worktree
1094 .update(&mut cx, |worktree, cx| {
1095 worktree.as_remote().unwrap().insert_entry(
1096 entry,
1097 response.worktree_scan_id as usize,
1098 cx,
1099 )
1100 })
1101 .await
1102 }))
1103 }
1104 }
1105
1106 pub fn delete_entry(
1107 &mut self,
1108 entry_id: ProjectEntryId,
1109 cx: &mut ModelContext<Self>,
1110 ) -> Option<Task<Result<()>>> {
1111 let worktree = self.worktree_for_entry(entry_id, cx)?;
1112 if self.is_local() {
1113 worktree.update(cx, |worktree, cx| {
1114 worktree.as_local_mut().unwrap().delete_entry(entry_id, cx)
1115 })
1116 } else {
1117 let client = self.client.clone();
1118 let project_id = self.remote_id().unwrap();
1119 Some(cx.spawn_weak(|_, mut cx| async move {
1120 let response = client
1121 .request(proto::DeleteProjectEntry {
1122 project_id,
1123 entry_id: entry_id.to_proto(),
1124 })
1125 .await?;
1126 worktree
1127 .update(&mut cx, move |worktree, cx| {
1128 worktree.as_remote().unwrap().delete_entry(
1129 entry_id,
1130 response.worktree_scan_id as usize,
1131 cx,
1132 )
1133 })
1134 .await
1135 }))
1136 }
1137 }
1138
1139 fn share(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
1140 let project_id;
1141 if let ProjectClientState::Local {
1142 remote_id_rx,
1143 is_shared,
1144 ..
1145 } = &mut self.client_state
1146 {
1147 if *is_shared {
1148 return Task::ready(Ok(()));
1149 }
1150 *is_shared = true;
1151 if let Some(id) = *remote_id_rx.borrow() {
1152 project_id = id;
1153 } else {
1154 return Task::ready(Err(anyhow!("project hasn't been registered")));
1155 }
1156 } else {
1157 return Task::ready(Err(anyhow!("can't share a remote project")));
1158 };
1159
1160 for open_buffer in self.opened_buffers.values_mut() {
1161 match open_buffer {
1162 OpenBuffer::Strong(_) => {}
1163 OpenBuffer::Weak(buffer) => {
1164 if let Some(buffer) = buffer.upgrade(cx) {
1165 *open_buffer = OpenBuffer::Strong(buffer);
1166 }
1167 }
1168 OpenBuffer::Loading(_) => unreachable!(),
1169 }
1170 }
1171
1172 for worktree_handle in self.worktrees.iter_mut() {
1173 match worktree_handle {
1174 WorktreeHandle::Strong(_) => {}
1175 WorktreeHandle::Weak(worktree) => {
1176 if let Some(worktree) = worktree.upgrade(cx) {
1177 *worktree_handle = WorktreeHandle::Strong(worktree);
1178 }
1179 }
1180 }
1181 }
1182
1183 let mut tasks = Vec::new();
1184 for worktree in self.worktrees(cx).collect::<Vec<_>>() {
1185 worktree.update(cx, |worktree, cx| {
1186 let worktree = worktree.as_local_mut().unwrap();
1187 tasks.push(worktree.share(project_id, cx));
1188 });
1189 }
1190
1191 cx.spawn(|this, mut cx| async move {
1192 for task in tasks {
1193 task.await?;
1194 }
1195 this.update(&mut cx, |_, cx| cx.notify());
1196 Ok(())
1197 })
1198 }
1199
1200 fn unshared(&mut self, cx: &mut ModelContext<Self>) {
1201 if let ProjectClientState::Local { is_shared, .. } = &mut self.client_state {
1202 if !*is_shared {
1203 return;
1204 }
1205
1206 *is_shared = false;
1207 self.collaborators.clear();
1208 self.shared_buffers.clear();
1209 for worktree_handle in self.worktrees.iter_mut() {
1210 if let WorktreeHandle::Strong(worktree) = worktree_handle {
1211 let is_visible = worktree.update(cx, |worktree, _| {
1212 worktree.as_local_mut().unwrap().unshare();
1213 worktree.is_visible()
1214 });
1215 if !is_visible {
1216 *worktree_handle = WorktreeHandle::Weak(worktree.downgrade());
1217 }
1218 }
1219 }
1220
1221 for open_buffer in self.opened_buffers.values_mut() {
1222 match open_buffer {
1223 OpenBuffer::Strong(buffer) => {
1224 *open_buffer = OpenBuffer::Weak(buffer.downgrade());
1225 }
1226 _ => {}
1227 }
1228 }
1229
1230 cx.notify();
1231 } else {
1232 log::error!("attempted to unshare a remote project");
1233 }
1234 }
1235
1236 pub fn respond_to_join_request(
1237 &mut self,
1238 requester_id: u64,
1239 allow: bool,
1240 cx: &mut ModelContext<Self>,
1241 ) {
1242 if let Some(project_id) = self.remote_id() {
1243 let share = self.share(cx);
1244 let client = self.client.clone();
1245 cx.foreground()
1246 .spawn(async move {
1247 share.await?;
1248 client.send(proto::RespondToJoinProjectRequest {
1249 requester_id,
1250 project_id,
1251 allow,
1252 })
1253 })
1254 .detach_and_log_err(cx);
1255 }
1256 }
1257
1258 fn removed_from_project(&mut self, cx: &mut ModelContext<Self>) {
1259 if let ProjectClientState::Remote {
1260 sharing_has_stopped,
1261 ..
1262 } = &mut self.client_state
1263 {
1264 *sharing_has_stopped = true;
1265 self.collaborators.clear();
1266 cx.notify();
1267 }
1268 }
1269
1270 pub fn is_read_only(&self) -> bool {
1271 match &self.client_state {
1272 ProjectClientState::Local { .. } => false,
1273 ProjectClientState::Remote {
1274 sharing_has_stopped,
1275 ..
1276 } => *sharing_has_stopped,
1277 }
1278 }
1279
1280 pub fn is_local(&self) -> bool {
1281 match &self.client_state {
1282 ProjectClientState::Local { .. } => true,
1283 ProjectClientState::Remote { .. } => false,
1284 }
1285 }
1286
1287 pub fn is_remote(&self) -> bool {
1288 !self.is_local()
1289 }
1290
1291 pub fn create_buffer(
1292 &mut self,
1293 text: &str,
1294 language: Option<Arc<Language>>,
1295 cx: &mut ModelContext<Self>,
1296 ) -> Result<ModelHandle<Buffer>> {
1297 if self.is_remote() {
1298 return Err(anyhow!("creating buffers as a guest is not supported yet"));
1299 }
1300
1301 let buffer = cx.add_model(|cx| {
1302 Buffer::new(self.replica_id(), text, cx)
1303 .with_language(language.unwrap_or(language::PLAIN_TEXT.clone()), cx)
1304 });
1305 self.register_buffer(&buffer, cx)?;
1306 Ok(buffer)
1307 }
1308
1309 pub fn open_path(
1310 &mut self,
1311 path: impl Into<ProjectPath>,
1312 cx: &mut ModelContext<Self>,
1313 ) -> Task<Result<(ProjectEntryId, AnyModelHandle)>> {
1314 let task = self.open_buffer(path, cx);
1315 cx.spawn_weak(|_, cx| async move {
1316 let buffer = task.await?;
1317 let project_entry_id = buffer
1318 .read_with(&cx, |buffer, cx| {
1319 File::from_dyn(buffer.file()).and_then(|file| file.project_entry_id(cx))
1320 })
1321 .ok_or_else(|| anyhow!("no project entry"))?;
1322 Ok((project_entry_id, buffer.into()))
1323 })
1324 }
1325
1326 pub fn open_local_buffer(
1327 &mut self,
1328 abs_path: impl AsRef<Path>,
1329 cx: &mut ModelContext<Self>,
1330 ) -> Task<Result<ModelHandle<Buffer>>> {
1331 if let Some((worktree, relative_path)) = self.find_local_worktree(abs_path.as_ref(), cx) {
1332 self.open_buffer((worktree.read(cx).id(), relative_path), cx)
1333 } else {
1334 Task::ready(Err(anyhow!("no such path")))
1335 }
1336 }
1337
1338 pub fn open_buffer(
1339 &mut self,
1340 path: impl Into<ProjectPath>,
1341 cx: &mut ModelContext<Self>,
1342 ) -> Task<Result<ModelHandle<Buffer>>> {
1343 let project_path = path.into();
1344 let worktree = if let Some(worktree) = self.worktree_for_id(project_path.worktree_id, cx) {
1345 worktree
1346 } else {
1347 return Task::ready(Err(anyhow!("no such worktree")));
1348 };
1349
1350 // If there is already a buffer for the given path, then return it.
1351 let existing_buffer = self.get_open_buffer(&project_path, cx);
1352 if let Some(existing_buffer) = existing_buffer {
1353 return Task::ready(Ok(existing_buffer));
1354 }
1355
1356 let mut loading_watch = match self.loading_buffers.entry(project_path.clone()) {
1357 // If the given path is already being loaded, then wait for that existing
1358 // task to complete and return the same buffer.
1359 hash_map::Entry::Occupied(e) => e.get().clone(),
1360
1361 // Otherwise, record the fact that this path is now being loaded.
1362 hash_map::Entry::Vacant(entry) => {
1363 let (mut tx, rx) = postage::watch::channel();
1364 entry.insert(rx.clone());
1365
1366 let load_buffer = if worktree.read(cx).is_local() {
1367 self.open_local_buffer_internal(&project_path.path, &worktree, cx)
1368 } else {
1369 self.open_remote_buffer_internal(&project_path.path, &worktree, cx)
1370 };
1371
1372 cx.spawn(move |this, mut cx| async move {
1373 let load_result = load_buffer.await;
1374 *tx.borrow_mut() = Some(this.update(&mut cx, |this, _| {
1375 // Record the fact that the buffer is no longer loading.
1376 this.loading_buffers.remove(&project_path);
1377 let buffer = load_result.map_err(Arc::new)?;
1378 Ok(buffer)
1379 }));
1380 })
1381 .detach();
1382 rx
1383 }
1384 };
1385
1386 cx.foreground().spawn(async move {
1387 loop {
1388 if let Some(result) = loading_watch.borrow().as_ref() {
1389 match result {
1390 Ok(buffer) => return Ok(buffer.clone()),
1391 Err(error) => return Err(anyhow!("{}", error)),
1392 }
1393 }
1394 loading_watch.next().await;
1395 }
1396 })
1397 }
1398
1399 fn open_local_buffer_internal(
1400 &mut self,
1401 path: &Arc<Path>,
1402 worktree: &ModelHandle<Worktree>,
1403 cx: &mut ModelContext<Self>,
1404 ) -> Task<Result<ModelHandle<Buffer>>> {
1405 let load_buffer = worktree.update(cx, |worktree, cx| {
1406 let worktree = worktree.as_local_mut().unwrap();
1407 worktree.load_buffer(path, cx)
1408 });
1409 cx.spawn(|this, mut cx| async move {
1410 let buffer = load_buffer.await?;
1411 this.update(&mut cx, |this, cx| this.register_buffer(&buffer, cx))?;
1412 Ok(buffer)
1413 })
1414 }
1415
1416 fn open_remote_buffer_internal(
1417 &mut self,
1418 path: &Arc<Path>,
1419 worktree: &ModelHandle<Worktree>,
1420 cx: &mut ModelContext<Self>,
1421 ) -> Task<Result<ModelHandle<Buffer>>> {
1422 let rpc = self.client.clone();
1423 let project_id = self.remote_id().unwrap();
1424 let remote_worktree_id = worktree.read(cx).id();
1425 let path = path.clone();
1426 let path_string = path.to_string_lossy().to_string();
1427 cx.spawn(|this, mut cx| async move {
1428 let response = rpc
1429 .request(proto::OpenBufferByPath {
1430 project_id,
1431 worktree_id: remote_worktree_id.to_proto(),
1432 path: path_string,
1433 })
1434 .await?;
1435 let buffer = response.buffer.ok_or_else(|| anyhow!("missing buffer"))?;
1436 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
1437 .await
1438 })
1439 }
1440
1441 fn open_local_buffer_via_lsp(
1442 &mut self,
1443 abs_path: lsp::Url,
1444 lsp_adapter: Arc<dyn LspAdapter>,
1445 lsp_server: Arc<LanguageServer>,
1446 cx: &mut ModelContext<Self>,
1447 ) -> Task<Result<ModelHandle<Buffer>>> {
1448 cx.spawn(|this, mut cx| async move {
1449 let abs_path = abs_path
1450 .to_file_path()
1451 .map_err(|_| anyhow!("can't convert URI to path"))?;
1452 let (worktree, relative_path) = if let Some(result) =
1453 this.read_with(&cx, |this, cx| this.find_local_worktree(&abs_path, cx))
1454 {
1455 result
1456 } else {
1457 let worktree = this
1458 .update(&mut cx, |this, cx| {
1459 this.create_local_worktree(&abs_path, false, cx)
1460 })
1461 .await?;
1462 this.update(&mut cx, |this, cx| {
1463 this.language_servers.insert(
1464 (worktree.read(cx).id(), lsp_adapter.name()),
1465 (lsp_adapter, lsp_server),
1466 );
1467 });
1468 (worktree, PathBuf::new())
1469 };
1470
1471 let project_path = ProjectPath {
1472 worktree_id: worktree.read_with(&cx, |worktree, _| worktree.id()),
1473 path: relative_path.into(),
1474 };
1475 this.update(&mut cx, |this, cx| this.open_buffer(project_path, cx))
1476 .await
1477 })
1478 }
1479
1480 pub fn open_buffer_by_id(
1481 &mut self,
1482 id: u64,
1483 cx: &mut ModelContext<Self>,
1484 ) -> Task<Result<ModelHandle<Buffer>>> {
1485 if let Some(buffer) = self.buffer_for_id(id, cx) {
1486 Task::ready(Ok(buffer))
1487 } else if self.is_local() {
1488 Task::ready(Err(anyhow!("buffer {} does not exist", id)))
1489 } else if let Some(project_id) = self.remote_id() {
1490 let request = self
1491 .client
1492 .request(proto::OpenBufferById { project_id, id });
1493 cx.spawn(|this, mut cx| async move {
1494 let buffer = request
1495 .await?
1496 .buffer
1497 .ok_or_else(|| anyhow!("invalid buffer"))?;
1498 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
1499 .await
1500 })
1501 } else {
1502 Task::ready(Err(anyhow!("cannot open buffer while disconnected")))
1503 }
1504 }
1505
1506 pub fn save_buffer_as(
1507 &mut self,
1508 buffer: ModelHandle<Buffer>,
1509 abs_path: PathBuf,
1510 cx: &mut ModelContext<Project>,
1511 ) -> Task<Result<()>> {
1512 let worktree_task = self.find_or_create_local_worktree(&abs_path, true, cx);
1513 let old_path =
1514 File::from_dyn(buffer.read(cx).file()).and_then(|f| Some(f.as_local()?.abs_path(cx)));
1515 cx.spawn(|this, mut cx| async move {
1516 if let Some(old_path) = old_path {
1517 this.update(&mut cx, |this, cx| {
1518 this.unregister_buffer_from_language_server(&buffer, old_path, cx);
1519 });
1520 }
1521 let (worktree, path) = worktree_task.await?;
1522 worktree
1523 .update(&mut cx, |worktree, cx| {
1524 worktree
1525 .as_local_mut()
1526 .unwrap()
1527 .save_buffer_as(buffer.clone(), path, cx)
1528 })
1529 .await?;
1530 this.update(&mut cx, |this, cx| {
1531 this.assign_language_to_buffer(&buffer, cx);
1532 this.register_buffer_with_language_server(&buffer, cx);
1533 });
1534 Ok(())
1535 })
1536 }
1537
1538 pub fn get_open_buffer(
1539 &mut self,
1540 path: &ProjectPath,
1541 cx: &mut ModelContext<Self>,
1542 ) -> Option<ModelHandle<Buffer>> {
1543 let worktree = self.worktree_for_id(path.worktree_id, cx)?;
1544 self.opened_buffers.values().find_map(|buffer| {
1545 let buffer = buffer.upgrade(cx)?;
1546 let file = File::from_dyn(buffer.read(cx).file())?;
1547 if file.worktree == worktree && file.path() == &path.path {
1548 Some(buffer)
1549 } else {
1550 None
1551 }
1552 })
1553 }
1554
1555 fn register_buffer(
1556 &mut self,
1557 buffer: &ModelHandle<Buffer>,
1558 cx: &mut ModelContext<Self>,
1559 ) -> Result<()> {
1560 let remote_id = buffer.read(cx).remote_id();
1561 let open_buffer = if self.is_remote() || self.is_shared() {
1562 OpenBuffer::Strong(buffer.clone())
1563 } else {
1564 OpenBuffer::Weak(buffer.downgrade())
1565 };
1566
1567 match self.opened_buffers.insert(remote_id, open_buffer) {
1568 None => {}
1569 Some(OpenBuffer::Loading(operations)) => {
1570 buffer.update(cx, |buffer, cx| buffer.apply_ops(operations, cx))?
1571 }
1572 Some(OpenBuffer::Weak(existing_handle)) => {
1573 if existing_handle.upgrade(cx).is_some() {
1574 Err(anyhow!(
1575 "already registered buffer with remote id {}",
1576 remote_id
1577 ))?
1578 }
1579 }
1580 Some(OpenBuffer::Strong(_)) => Err(anyhow!(
1581 "already registered buffer with remote id {}",
1582 remote_id
1583 ))?,
1584 }
1585 cx.subscribe(buffer, |this, buffer, event, cx| {
1586 this.on_buffer_event(buffer, event, cx);
1587 })
1588 .detach();
1589
1590 self.assign_language_to_buffer(buffer, cx);
1591 self.register_buffer_with_language_server(buffer, cx);
1592 cx.observe_release(buffer, |this, buffer, cx| {
1593 if let Some(file) = File::from_dyn(buffer.file()) {
1594 if file.is_local() {
1595 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
1596 if let Some((_, server)) = this.language_server_for_buffer(buffer, cx) {
1597 server
1598 .notify::<lsp::notification::DidCloseTextDocument>(
1599 lsp::DidCloseTextDocumentParams {
1600 text_document: lsp::TextDocumentIdentifier::new(uri.clone()),
1601 },
1602 )
1603 .log_err();
1604 }
1605 }
1606 }
1607 })
1608 .detach();
1609
1610 Ok(())
1611 }
1612
1613 fn register_buffer_with_language_server(
1614 &mut self,
1615 buffer_handle: &ModelHandle<Buffer>,
1616 cx: &mut ModelContext<Self>,
1617 ) {
1618 let buffer = buffer_handle.read(cx);
1619 let buffer_id = buffer.remote_id();
1620 if let Some(file) = File::from_dyn(buffer.file()) {
1621 if file.is_local() {
1622 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
1623 let initial_snapshot = buffer.text_snapshot();
1624
1625 let mut language_server = None;
1626 let mut language_id = None;
1627 if let Some(language) = buffer.language() {
1628 let worktree_id = file.worktree_id(cx);
1629 if let Some(adapter) = language.lsp_adapter() {
1630 language_id = adapter.id_for_language(language.name().as_ref());
1631 language_server = self
1632 .language_servers
1633 .get(&(worktree_id, adapter.name()))
1634 .cloned();
1635 }
1636 }
1637
1638 if let Some(local_worktree) = file.worktree.read(cx).as_local() {
1639 if let Some(diagnostics) = local_worktree.diagnostics_for_path(file.path()) {
1640 self.update_buffer_diagnostics(&buffer_handle, diagnostics, None, cx)
1641 .log_err();
1642 }
1643 }
1644
1645 if let Some((_, server)) = language_server {
1646 server
1647 .notify::<lsp::notification::DidOpenTextDocument>(
1648 lsp::DidOpenTextDocumentParams {
1649 text_document: lsp::TextDocumentItem::new(
1650 uri,
1651 language_id.unwrap_or_default(),
1652 0,
1653 initial_snapshot.text(),
1654 ),
1655 }
1656 .clone(),
1657 )
1658 .log_err();
1659 buffer_handle.update(cx, |buffer, cx| {
1660 buffer.set_completion_triggers(
1661 server
1662 .capabilities()
1663 .completion_provider
1664 .as_ref()
1665 .and_then(|provider| provider.trigger_characters.clone())
1666 .unwrap_or(Vec::new()),
1667 cx,
1668 )
1669 });
1670 self.buffer_snapshots
1671 .insert(buffer_id, vec![(0, initial_snapshot)]);
1672 }
1673 }
1674 }
1675 }
1676
1677 fn unregister_buffer_from_language_server(
1678 &mut self,
1679 buffer: &ModelHandle<Buffer>,
1680 old_path: PathBuf,
1681 cx: &mut ModelContext<Self>,
1682 ) {
1683 buffer.update(cx, |buffer, cx| {
1684 buffer.update_diagnostics(Default::default(), cx);
1685 self.buffer_snapshots.remove(&buffer.remote_id());
1686 if let Some((_, language_server)) = self.language_server_for_buffer(buffer, cx) {
1687 language_server
1688 .notify::<lsp::notification::DidCloseTextDocument>(
1689 lsp::DidCloseTextDocumentParams {
1690 text_document: lsp::TextDocumentIdentifier::new(
1691 lsp::Url::from_file_path(old_path).unwrap(),
1692 ),
1693 },
1694 )
1695 .log_err();
1696 }
1697 });
1698 }
1699
1700 fn on_buffer_event(
1701 &mut self,
1702 buffer: ModelHandle<Buffer>,
1703 event: &BufferEvent,
1704 cx: &mut ModelContext<Self>,
1705 ) -> Option<()> {
1706 match event {
1707 BufferEvent::Operation(operation) => {
1708 if let Some(project_id) = self.shared_remote_id() {
1709 let request = self.client.request(proto::UpdateBuffer {
1710 project_id,
1711 buffer_id: buffer.read(cx).remote_id(),
1712 operations: vec![language::proto::serialize_operation(&operation)],
1713 });
1714 cx.background().spawn(request).detach_and_log_err(cx);
1715 }
1716 }
1717 BufferEvent::Edited { .. } => {
1718 let (_, language_server) = self
1719 .language_server_for_buffer(buffer.read(cx), cx)?
1720 .clone();
1721 let buffer = buffer.read(cx);
1722 let file = File::from_dyn(buffer.file())?;
1723 let abs_path = file.as_local()?.abs_path(cx);
1724 let uri = lsp::Url::from_file_path(abs_path).unwrap();
1725 let buffer_snapshots = self.buffer_snapshots.get_mut(&buffer.remote_id())?;
1726 let (version, prev_snapshot) = buffer_snapshots.last()?;
1727 let next_snapshot = buffer.text_snapshot();
1728 let next_version = version + 1;
1729
1730 let content_changes = buffer
1731 .edits_since::<(PointUtf16, usize)>(prev_snapshot.version())
1732 .map(|edit| {
1733 let edit_start = edit.new.start.0;
1734 let edit_end = edit_start + (edit.old.end.0 - edit.old.start.0);
1735 let new_text = next_snapshot
1736 .text_for_range(edit.new.start.1..edit.new.end.1)
1737 .collect();
1738 lsp::TextDocumentContentChangeEvent {
1739 range: Some(lsp::Range::new(
1740 point_to_lsp(edit_start),
1741 point_to_lsp(edit_end),
1742 )),
1743 range_length: None,
1744 text: new_text,
1745 }
1746 })
1747 .collect();
1748
1749 buffer_snapshots.push((next_version, next_snapshot));
1750
1751 language_server
1752 .notify::<lsp::notification::DidChangeTextDocument>(
1753 lsp::DidChangeTextDocumentParams {
1754 text_document: lsp::VersionedTextDocumentIdentifier::new(
1755 uri,
1756 next_version,
1757 ),
1758 content_changes,
1759 },
1760 )
1761 .log_err();
1762 }
1763 BufferEvent::Saved => {
1764 let file = File::from_dyn(buffer.read(cx).file())?;
1765 let worktree_id = file.worktree_id(cx);
1766 let abs_path = file.as_local()?.abs_path(cx);
1767 let text_document = lsp::TextDocumentIdentifier {
1768 uri: lsp::Url::from_file_path(abs_path).unwrap(),
1769 };
1770
1771 for (_, server) in self.language_servers_for_worktree(worktree_id) {
1772 server
1773 .notify::<lsp::notification::DidSaveTextDocument>(
1774 lsp::DidSaveTextDocumentParams {
1775 text_document: text_document.clone(),
1776 text: None,
1777 },
1778 )
1779 .log_err();
1780 }
1781 }
1782 _ => {}
1783 }
1784
1785 None
1786 }
1787
1788 fn language_servers_for_worktree(
1789 &self,
1790 worktree_id: WorktreeId,
1791 ) -> impl Iterator<Item = &(Arc<dyn LspAdapter>, Arc<LanguageServer>)> {
1792 self.language_servers.iter().filter_map(
1793 move |((language_server_worktree_id, _), server)| {
1794 if *language_server_worktree_id == worktree_id {
1795 Some(server)
1796 } else {
1797 None
1798 }
1799 },
1800 )
1801 }
1802
1803 fn assign_language_to_buffer(
1804 &mut self,
1805 buffer: &ModelHandle<Buffer>,
1806 cx: &mut ModelContext<Self>,
1807 ) -> Option<()> {
1808 // If the buffer has a language, set it and start the language server if we haven't already.
1809 let full_path = buffer.read(cx).file()?.full_path(cx);
1810 let language = self.languages.select_language(&full_path)?;
1811 buffer.update(cx, |buffer, cx| {
1812 buffer.set_language(Some(language.clone()), cx);
1813 });
1814
1815 let file = File::from_dyn(buffer.read(cx).file())?;
1816 let worktree = file.worktree.read(cx).as_local()?;
1817 let worktree_id = worktree.id();
1818 let worktree_abs_path = worktree.abs_path().clone();
1819 self.start_language_server(worktree_id, worktree_abs_path, language, cx);
1820
1821 None
1822 }
1823
1824 fn start_language_server(
1825 &mut self,
1826 worktree_id: WorktreeId,
1827 worktree_path: Arc<Path>,
1828 language: Arc<Language>,
1829 cx: &mut ModelContext<Self>,
1830 ) {
1831 let adapter = if let Some(adapter) = language.lsp_adapter() {
1832 adapter
1833 } else {
1834 return;
1835 };
1836 let key = (worktree_id, adapter.name());
1837 self.started_language_servers
1838 .entry(key.clone())
1839 .or_insert_with(|| {
1840 let server_id = post_inc(&mut self.next_language_server_id);
1841 let language_server = self.languages.start_language_server(
1842 server_id,
1843 language.clone(),
1844 worktree_path,
1845 self.client.http_client(),
1846 cx,
1847 );
1848 cx.spawn_weak(|this, mut cx| async move {
1849 let language_server = language_server?.await.log_err()?;
1850 let language_server = language_server
1851 .initialize(adapter.initialization_options())
1852 .await
1853 .log_err()?;
1854 let this = this.upgrade(&cx)?;
1855 let disk_based_diagnostics_progress_token =
1856 adapter.disk_based_diagnostics_progress_token();
1857
1858 language_server
1859 .on_notification::<lsp::notification::PublishDiagnostics, _>({
1860 let this = this.downgrade();
1861 let adapter = adapter.clone();
1862 move |params, mut cx| {
1863 if let Some(this) = this.upgrade(&cx) {
1864 this.update(&mut cx, |this, cx| {
1865 this.on_lsp_diagnostics_published(
1866 server_id,
1867 params,
1868 &adapter,
1869 disk_based_diagnostics_progress_token,
1870 cx,
1871 );
1872 });
1873 }
1874 }
1875 })
1876 .detach();
1877
1878 language_server
1879 .on_request::<lsp::request::WorkspaceConfiguration, _, _>({
1880 let settings = this
1881 .read_with(&cx, |this, _| this.language_server_settings.clone());
1882 move |params, _| {
1883 let settings = settings.lock().clone();
1884 async move {
1885 Ok(params
1886 .items
1887 .into_iter()
1888 .map(|item| {
1889 if let Some(section) = &item.section {
1890 settings
1891 .get(section)
1892 .cloned()
1893 .unwrap_or(serde_json::Value::Null)
1894 } else {
1895 settings.clone()
1896 }
1897 })
1898 .collect())
1899 }
1900 }
1901 })
1902 .detach();
1903
1904 language_server
1905 .on_request::<lsp::request::ApplyWorkspaceEdit, _, _>({
1906 let this = this.downgrade();
1907 let adapter = adapter.clone();
1908 let language_server = language_server.clone();
1909 move |params, cx| {
1910 Self::on_lsp_workspace_edit(
1911 this,
1912 params,
1913 server_id,
1914 adapter.clone(),
1915 language_server.clone(),
1916 cx,
1917 )
1918 }
1919 })
1920 .detach();
1921
1922 language_server
1923 .on_notification::<lsp::notification::Progress, _>({
1924 let this = this.downgrade();
1925 move |params, mut cx| {
1926 if let Some(this) = this.upgrade(&cx) {
1927 this.update(&mut cx, |this, cx| {
1928 this.on_lsp_progress(
1929 params,
1930 server_id,
1931 disk_based_diagnostics_progress_token,
1932 cx,
1933 );
1934 });
1935 }
1936 }
1937 })
1938 .detach();
1939
1940 this.update(&mut cx, |this, cx| {
1941 this.language_servers
1942 .insert(key.clone(), (adapter.clone(), language_server.clone()));
1943 this.language_server_statuses.insert(
1944 server_id,
1945 LanguageServerStatus {
1946 name: language_server.name().to_string(),
1947 pending_work: Default::default(),
1948 pending_diagnostic_updates: 0,
1949 },
1950 );
1951 language_server
1952 .notify::<lsp::notification::DidChangeConfiguration>(
1953 lsp::DidChangeConfigurationParams {
1954 settings: this.language_server_settings.lock().clone(),
1955 },
1956 )
1957 .ok();
1958
1959 if let Some(project_id) = this.shared_remote_id() {
1960 this.client
1961 .send(proto::StartLanguageServer {
1962 project_id,
1963 server: Some(proto::LanguageServer {
1964 id: server_id as u64,
1965 name: language_server.name().to_string(),
1966 }),
1967 })
1968 .log_err();
1969 }
1970
1971 // Tell the language server about every open buffer in the worktree that matches the language.
1972 for buffer in this.opened_buffers.values() {
1973 if let Some(buffer_handle) = buffer.upgrade(cx) {
1974 let buffer = buffer_handle.read(cx);
1975 let file = if let Some(file) = File::from_dyn(buffer.file()) {
1976 file
1977 } else {
1978 continue;
1979 };
1980 let language = if let Some(language) = buffer.language() {
1981 language
1982 } else {
1983 continue;
1984 };
1985 if file.worktree.read(cx).id() != key.0
1986 || language.lsp_adapter().map(|a| a.name())
1987 != Some(key.1.clone())
1988 {
1989 continue;
1990 }
1991
1992 let file = file.as_local()?;
1993 let versions = this
1994 .buffer_snapshots
1995 .entry(buffer.remote_id())
1996 .or_insert_with(|| vec![(0, buffer.text_snapshot())]);
1997 let (version, initial_snapshot) = versions.last().unwrap();
1998 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
1999 let language_id = adapter.id_for_language(language.name().as_ref());
2000 language_server
2001 .notify::<lsp::notification::DidOpenTextDocument>(
2002 lsp::DidOpenTextDocumentParams {
2003 text_document: lsp::TextDocumentItem::new(
2004 uri,
2005 language_id.unwrap_or_default(),
2006 *version,
2007 initial_snapshot.text(),
2008 ),
2009 },
2010 )
2011 .log_err()?;
2012 buffer_handle.update(cx, |buffer, cx| {
2013 buffer.set_completion_triggers(
2014 language_server
2015 .capabilities()
2016 .completion_provider
2017 .as_ref()
2018 .and_then(|provider| {
2019 provider.trigger_characters.clone()
2020 })
2021 .unwrap_or(Vec::new()),
2022 cx,
2023 )
2024 });
2025 }
2026 }
2027
2028 cx.notify();
2029 Some(())
2030 });
2031
2032 Some(language_server)
2033 })
2034 });
2035 }
2036
2037 pub fn restart_language_servers_for_buffers(
2038 &mut self,
2039 buffers: impl IntoIterator<Item = ModelHandle<Buffer>>,
2040 cx: &mut ModelContext<Self>,
2041 ) -> Option<()> {
2042 let language_server_lookup_info: HashSet<(WorktreeId, Arc<Path>, PathBuf)> = buffers
2043 .into_iter()
2044 .filter_map(|buffer| {
2045 let file = File::from_dyn(buffer.read(cx).file())?;
2046 let worktree = file.worktree.read(cx).as_local()?;
2047 let worktree_id = worktree.id();
2048 let worktree_abs_path = worktree.abs_path().clone();
2049 let full_path = file.full_path(cx);
2050 Some((worktree_id, worktree_abs_path, full_path))
2051 })
2052 .collect();
2053 for (worktree_id, worktree_abs_path, full_path) in language_server_lookup_info {
2054 let language = self.languages.select_language(&full_path)?;
2055 self.restart_language_server(worktree_id, worktree_abs_path, language, cx);
2056 }
2057
2058 None
2059 }
2060
2061 fn restart_language_server(
2062 &mut self,
2063 worktree_id: WorktreeId,
2064 worktree_path: Arc<Path>,
2065 language: Arc<Language>,
2066 cx: &mut ModelContext<Self>,
2067 ) {
2068 let adapter = if let Some(adapter) = language.lsp_adapter() {
2069 adapter
2070 } else {
2071 return;
2072 };
2073 let key = (worktree_id, adapter.name());
2074 let server_to_shutdown = self.language_servers.remove(&key);
2075 self.started_language_servers.remove(&key);
2076 server_to_shutdown
2077 .as_ref()
2078 .map(|(_, server)| self.language_server_statuses.remove(&server.server_id()));
2079 cx.spawn_weak(|this, mut cx| async move {
2080 if let Some(this) = this.upgrade(&cx) {
2081 if let Some((_, server_to_shutdown)) = server_to_shutdown {
2082 if let Some(shutdown_task) = server_to_shutdown.shutdown() {
2083 shutdown_task.await;
2084 }
2085 }
2086
2087 this.update(&mut cx, |this, cx| {
2088 this.start_language_server(worktree_id, worktree_path, language, cx);
2089 });
2090 }
2091 })
2092 .detach();
2093 }
2094
2095 fn on_lsp_diagnostics_published(
2096 &mut self,
2097 server_id: usize,
2098 mut params: lsp::PublishDiagnosticsParams,
2099 adapter: &Arc<dyn LspAdapter>,
2100 disk_based_diagnostics_progress_token: Option<&str>,
2101 cx: &mut ModelContext<Self>,
2102 ) {
2103 adapter.process_diagnostics(&mut params);
2104 if disk_based_diagnostics_progress_token.is_none() {
2105 self.disk_based_diagnostics_started(cx);
2106 self.broadcast_language_server_update(
2107 server_id,
2108 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(
2109 proto::LspDiskBasedDiagnosticsUpdating {},
2110 ),
2111 );
2112 }
2113 self.update_diagnostics(params, adapter.disk_based_diagnostic_sources(), cx)
2114 .log_err();
2115 if disk_based_diagnostics_progress_token.is_none() {
2116 self.disk_based_diagnostics_finished(cx);
2117 self.broadcast_language_server_update(
2118 server_id,
2119 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(
2120 proto::LspDiskBasedDiagnosticsUpdated {},
2121 ),
2122 );
2123 }
2124 }
2125
2126 fn on_lsp_progress(
2127 &mut self,
2128 progress: lsp::ProgressParams,
2129 server_id: usize,
2130 disk_based_diagnostics_progress_token: Option<&str>,
2131 cx: &mut ModelContext<Self>,
2132 ) {
2133 let token = match progress.token {
2134 lsp::NumberOrString::String(token) => token,
2135 lsp::NumberOrString::Number(token) => {
2136 log::info!("skipping numeric progress token {}", token);
2137 return;
2138 }
2139 };
2140 let progress = match progress.value {
2141 lsp::ProgressParamsValue::WorkDone(value) => value,
2142 };
2143 let language_server_status =
2144 if let Some(status) = self.language_server_statuses.get_mut(&server_id) {
2145 status
2146 } else {
2147 return;
2148 };
2149 match progress {
2150 lsp::WorkDoneProgress::Begin(_) => {
2151 if Some(token.as_str()) == disk_based_diagnostics_progress_token {
2152 language_server_status.pending_diagnostic_updates += 1;
2153 if language_server_status.pending_diagnostic_updates == 1 {
2154 self.disk_based_diagnostics_started(cx);
2155 self.broadcast_language_server_update(
2156 server_id,
2157 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(
2158 proto::LspDiskBasedDiagnosticsUpdating {},
2159 ),
2160 );
2161 }
2162 } else {
2163 self.on_lsp_work_start(server_id, token.clone(), cx);
2164 self.broadcast_language_server_update(
2165 server_id,
2166 proto::update_language_server::Variant::WorkStart(proto::LspWorkStart {
2167 token,
2168 }),
2169 );
2170 }
2171 }
2172 lsp::WorkDoneProgress::Report(report) => {
2173 if Some(token.as_str()) != disk_based_diagnostics_progress_token {
2174 self.on_lsp_work_progress(
2175 server_id,
2176 token.clone(),
2177 LanguageServerProgress {
2178 message: report.message.clone(),
2179 percentage: report.percentage.map(|p| p as usize),
2180 last_update_at: Instant::now(),
2181 },
2182 cx,
2183 );
2184 self.broadcast_language_server_update(
2185 server_id,
2186 proto::update_language_server::Variant::WorkProgress(
2187 proto::LspWorkProgress {
2188 token,
2189 message: report.message,
2190 percentage: report.percentage.map(|p| p as u32),
2191 },
2192 ),
2193 );
2194 }
2195 }
2196 lsp::WorkDoneProgress::End(_) => {
2197 if Some(token.as_str()) == disk_based_diagnostics_progress_token {
2198 language_server_status.pending_diagnostic_updates -= 1;
2199 if language_server_status.pending_diagnostic_updates == 0 {
2200 self.disk_based_diagnostics_finished(cx);
2201 self.broadcast_language_server_update(
2202 server_id,
2203 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(
2204 proto::LspDiskBasedDiagnosticsUpdated {},
2205 ),
2206 );
2207 }
2208 } else {
2209 self.on_lsp_work_end(server_id, token.clone(), cx);
2210 self.broadcast_language_server_update(
2211 server_id,
2212 proto::update_language_server::Variant::WorkEnd(proto::LspWorkEnd {
2213 token,
2214 }),
2215 );
2216 }
2217 }
2218 }
2219 }
2220
2221 fn on_lsp_work_start(
2222 &mut self,
2223 language_server_id: usize,
2224 token: String,
2225 cx: &mut ModelContext<Self>,
2226 ) {
2227 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
2228 status.pending_work.insert(
2229 token,
2230 LanguageServerProgress {
2231 message: None,
2232 percentage: None,
2233 last_update_at: Instant::now(),
2234 },
2235 );
2236 cx.notify();
2237 }
2238 }
2239
2240 fn on_lsp_work_progress(
2241 &mut self,
2242 language_server_id: usize,
2243 token: String,
2244 progress: LanguageServerProgress,
2245 cx: &mut ModelContext<Self>,
2246 ) {
2247 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
2248 status.pending_work.insert(token, progress);
2249 cx.notify();
2250 }
2251 }
2252
2253 fn on_lsp_work_end(
2254 &mut self,
2255 language_server_id: usize,
2256 token: String,
2257 cx: &mut ModelContext<Self>,
2258 ) {
2259 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
2260 status.pending_work.remove(&token);
2261 cx.notify();
2262 }
2263 }
2264
2265 async fn on_lsp_workspace_edit(
2266 this: WeakModelHandle<Self>,
2267 params: lsp::ApplyWorkspaceEditParams,
2268 server_id: usize,
2269 adapter: Arc<dyn LspAdapter>,
2270 language_server: Arc<LanguageServer>,
2271 mut cx: AsyncAppContext,
2272 ) -> Result<lsp::ApplyWorkspaceEditResponse> {
2273 let this = this
2274 .upgrade(&cx)
2275 .ok_or_else(|| anyhow!("project project closed"))?;
2276 let transaction = Self::deserialize_workspace_edit(
2277 this.clone(),
2278 params.edit,
2279 true,
2280 adapter.clone(),
2281 language_server.clone(),
2282 &mut cx,
2283 )
2284 .await
2285 .log_err();
2286 this.update(&mut cx, |this, _| {
2287 if let Some(transaction) = transaction {
2288 this.last_workspace_edits_by_language_server
2289 .insert(server_id, transaction);
2290 }
2291 });
2292 Ok(lsp::ApplyWorkspaceEditResponse {
2293 applied: true,
2294 failed_change: None,
2295 failure_reason: None,
2296 })
2297 }
2298
2299 fn broadcast_language_server_update(
2300 &self,
2301 language_server_id: usize,
2302 event: proto::update_language_server::Variant,
2303 ) {
2304 if let Some(project_id) = self.shared_remote_id() {
2305 self.client
2306 .send(proto::UpdateLanguageServer {
2307 project_id,
2308 language_server_id: language_server_id as u64,
2309 variant: Some(event),
2310 })
2311 .log_err();
2312 }
2313 }
2314
2315 pub fn set_language_server_settings(&mut self, settings: serde_json::Value) {
2316 for (_, server) in self.language_servers.values() {
2317 server
2318 .notify::<lsp::notification::DidChangeConfiguration>(
2319 lsp::DidChangeConfigurationParams {
2320 settings: settings.clone(),
2321 },
2322 )
2323 .ok();
2324 }
2325 *self.language_server_settings.lock() = settings;
2326 }
2327
2328 pub fn language_server_statuses(
2329 &self,
2330 ) -> impl DoubleEndedIterator<Item = &LanguageServerStatus> {
2331 self.language_server_statuses.values()
2332 }
2333
2334 pub fn update_diagnostics(
2335 &mut self,
2336 params: lsp::PublishDiagnosticsParams,
2337 disk_based_sources: &[&str],
2338 cx: &mut ModelContext<Self>,
2339 ) -> Result<()> {
2340 let abs_path = params
2341 .uri
2342 .to_file_path()
2343 .map_err(|_| anyhow!("URI is not a file"))?;
2344 let mut diagnostics = Vec::default();
2345 let mut primary_diagnostic_group_ids = HashMap::default();
2346 let mut sources_by_group_id = HashMap::default();
2347 let mut supporting_diagnostics = HashMap::default();
2348 for diagnostic in ¶ms.diagnostics {
2349 let source = diagnostic.source.as_ref();
2350 let code = diagnostic.code.as_ref().map(|code| match code {
2351 lsp::NumberOrString::Number(code) => code.to_string(),
2352 lsp::NumberOrString::String(code) => code.clone(),
2353 });
2354 let range = range_from_lsp(diagnostic.range);
2355 let is_supporting = diagnostic
2356 .related_information
2357 .as_ref()
2358 .map_or(false, |infos| {
2359 infos.iter().any(|info| {
2360 primary_diagnostic_group_ids.contains_key(&(
2361 source,
2362 code.clone(),
2363 range_from_lsp(info.location.range),
2364 ))
2365 })
2366 });
2367
2368 let is_unnecessary = diagnostic.tags.as_ref().map_or(false, |tags| {
2369 tags.iter().any(|tag| *tag == DiagnosticTag::UNNECESSARY)
2370 });
2371
2372 if is_supporting {
2373 supporting_diagnostics.insert(
2374 (source, code.clone(), range),
2375 (diagnostic.severity, is_unnecessary),
2376 );
2377 } else {
2378 let group_id = post_inc(&mut self.next_diagnostic_group_id);
2379 let is_disk_based = source.map_or(false, |source| {
2380 disk_based_sources.contains(&source.as_str())
2381 });
2382
2383 sources_by_group_id.insert(group_id, source);
2384 primary_diagnostic_group_ids
2385 .insert((source, code.clone(), range.clone()), group_id);
2386
2387 diagnostics.push(DiagnosticEntry {
2388 range,
2389 diagnostic: Diagnostic {
2390 code: code.clone(),
2391 severity: diagnostic.severity.unwrap_or(DiagnosticSeverity::ERROR),
2392 message: diagnostic.message.clone(),
2393 group_id,
2394 is_primary: true,
2395 is_valid: true,
2396 is_disk_based,
2397 is_unnecessary,
2398 },
2399 });
2400 if let Some(infos) = &diagnostic.related_information {
2401 for info in infos {
2402 if info.location.uri == params.uri && !info.message.is_empty() {
2403 let range = range_from_lsp(info.location.range);
2404 diagnostics.push(DiagnosticEntry {
2405 range,
2406 diagnostic: Diagnostic {
2407 code: code.clone(),
2408 severity: DiagnosticSeverity::INFORMATION,
2409 message: info.message.clone(),
2410 group_id,
2411 is_primary: false,
2412 is_valid: true,
2413 is_disk_based,
2414 is_unnecessary: false,
2415 },
2416 });
2417 }
2418 }
2419 }
2420 }
2421 }
2422
2423 for entry in &mut diagnostics {
2424 let diagnostic = &mut entry.diagnostic;
2425 if !diagnostic.is_primary {
2426 let source = *sources_by_group_id.get(&diagnostic.group_id).unwrap();
2427 if let Some(&(severity, is_unnecessary)) = supporting_diagnostics.get(&(
2428 source,
2429 diagnostic.code.clone(),
2430 entry.range.clone(),
2431 )) {
2432 if let Some(severity) = severity {
2433 diagnostic.severity = severity;
2434 }
2435 diagnostic.is_unnecessary = is_unnecessary;
2436 }
2437 }
2438 }
2439
2440 self.update_diagnostic_entries(abs_path, params.version, diagnostics, cx)?;
2441 Ok(())
2442 }
2443
2444 pub fn update_diagnostic_entries(
2445 &mut self,
2446 abs_path: PathBuf,
2447 version: Option<i32>,
2448 diagnostics: Vec<DiagnosticEntry<PointUtf16>>,
2449 cx: &mut ModelContext<Project>,
2450 ) -> Result<(), anyhow::Error> {
2451 let (worktree, relative_path) = self
2452 .find_local_worktree(&abs_path, cx)
2453 .ok_or_else(|| anyhow!("no worktree found for diagnostics"))?;
2454 if !worktree.read(cx).is_visible() {
2455 return Ok(());
2456 }
2457
2458 let project_path = ProjectPath {
2459 worktree_id: worktree.read(cx).id(),
2460 path: relative_path.into(),
2461 };
2462 if let Some(buffer) = self.get_open_buffer(&project_path, cx) {
2463 self.update_buffer_diagnostics(&buffer, diagnostics.clone(), version, cx)?;
2464 }
2465
2466 let updated = worktree.update(cx, |worktree, cx| {
2467 worktree
2468 .as_local_mut()
2469 .ok_or_else(|| anyhow!("not a local worktree"))?
2470 .update_diagnostics(project_path.path.clone(), diagnostics, cx)
2471 })?;
2472 if updated {
2473 cx.emit(Event::DiagnosticsUpdated(project_path));
2474 }
2475 Ok(())
2476 }
2477
2478 fn update_buffer_diagnostics(
2479 &mut self,
2480 buffer: &ModelHandle<Buffer>,
2481 mut diagnostics: Vec<DiagnosticEntry<PointUtf16>>,
2482 version: Option<i32>,
2483 cx: &mut ModelContext<Self>,
2484 ) -> Result<()> {
2485 fn compare_diagnostics(a: &Diagnostic, b: &Diagnostic) -> Ordering {
2486 Ordering::Equal
2487 .then_with(|| b.is_primary.cmp(&a.is_primary))
2488 .then_with(|| a.is_disk_based.cmp(&b.is_disk_based))
2489 .then_with(|| a.severity.cmp(&b.severity))
2490 .then_with(|| a.message.cmp(&b.message))
2491 }
2492
2493 let snapshot = self.buffer_snapshot_for_lsp_version(buffer, version, cx)?;
2494
2495 diagnostics.sort_unstable_by(|a, b| {
2496 Ordering::Equal
2497 .then_with(|| a.range.start.cmp(&b.range.start))
2498 .then_with(|| b.range.end.cmp(&a.range.end))
2499 .then_with(|| compare_diagnostics(&a.diagnostic, &b.diagnostic))
2500 });
2501
2502 let mut sanitized_diagnostics = Vec::new();
2503 let edits_since_save = Patch::new(
2504 snapshot
2505 .edits_since::<PointUtf16>(buffer.read(cx).saved_version())
2506 .collect(),
2507 );
2508 for entry in diagnostics {
2509 let start;
2510 let end;
2511 if entry.diagnostic.is_disk_based {
2512 // Some diagnostics are based on files on disk instead of buffers'
2513 // current contents. Adjust these diagnostics' ranges to reflect
2514 // any unsaved edits.
2515 start = edits_since_save.old_to_new(entry.range.start);
2516 end = edits_since_save.old_to_new(entry.range.end);
2517 } else {
2518 start = entry.range.start;
2519 end = entry.range.end;
2520 }
2521
2522 let mut range = snapshot.clip_point_utf16(start, Bias::Left)
2523 ..snapshot.clip_point_utf16(end, Bias::Right);
2524
2525 // Expand empty ranges by one character
2526 if range.start == range.end {
2527 range.end.column += 1;
2528 range.end = snapshot.clip_point_utf16(range.end, Bias::Right);
2529 if range.start == range.end && range.end.column > 0 {
2530 range.start.column -= 1;
2531 range.start = snapshot.clip_point_utf16(range.start, Bias::Left);
2532 }
2533 }
2534
2535 sanitized_diagnostics.push(DiagnosticEntry {
2536 range,
2537 diagnostic: entry.diagnostic,
2538 });
2539 }
2540 drop(edits_since_save);
2541
2542 let set = DiagnosticSet::new(sanitized_diagnostics, &snapshot);
2543 buffer.update(cx, |buffer, cx| buffer.update_diagnostics(set, cx));
2544 Ok(())
2545 }
2546
2547 pub fn reload_buffers(
2548 &self,
2549 buffers: HashSet<ModelHandle<Buffer>>,
2550 push_to_history: bool,
2551 cx: &mut ModelContext<Self>,
2552 ) -> Task<Result<ProjectTransaction>> {
2553 let mut local_buffers = Vec::new();
2554 let mut remote_buffers = None;
2555 for buffer_handle in buffers {
2556 let buffer = buffer_handle.read(cx);
2557 if buffer.is_dirty() {
2558 if let Some(file) = File::from_dyn(buffer.file()) {
2559 if file.is_local() {
2560 local_buffers.push(buffer_handle);
2561 } else {
2562 remote_buffers.get_or_insert(Vec::new()).push(buffer_handle);
2563 }
2564 }
2565 }
2566 }
2567
2568 let remote_buffers = self.remote_id().zip(remote_buffers);
2569 let client = self.client.clone();
2570
2571 cx.spawn(|this, mut cx| async move {
2572 let mut project_transaction = ProjectTransaction::default();
2573
2574 if let Some((project_id, remote_buffers)) = remote_buffers {
2575 let response = client
2576 .request(proto::ReloadBuffers {
2577 project_id,
2578 buffer_ids: remote_buffers
2579 .iter()
2580 .map(|buffer| buffer.read_with(&cx, |buffer, _| buffer.remote_id()))
2581 .collect(),
2582 })
2583 .await?
2584 .transaction
2585 .ok_or_else(|| anyhow!("missing transaction"))?;
2586 project_transaction = this
2587 .update(&mut cx, |this, cx| {
2588 this.deserialize_project_transaction(response, push_to_history, cx)
2589 })
2590 .await?;
2591 }
2592
2593 for buffer in local_buffers {
2594 let transaction = buffer
2595 .update(&mut cx, |buffer, cx| buffer.reload(cx))
2596 .await?;
2597 buffer.update(&mut cx, |buffer, cx| {
2598 if let Some(transaction) = transaction {
2599 if !push_to_history {
2600 buffer.forget_transaction(transaction.id);
2601 }
2602 project_transaction.0.insert(cx.handle(), transaction);
2603 }
2604 });
2605 }
2606
2607 Ok(project_transaction)
2608 })
2609 }
2610
2611 pub fn format(
2612 &self,
2613 buffers: HashSet<ModelHandle<Buffer>>,
2614 push_to_history: bool,
2615 cx: &mut ModelContext<Project>,
2616 ) -> Task<Result<ProjectTransaction>> {
2617 let mut local_buffers = Vec::new();
2618 let mut remote_buffers = None;
2619 for buffer_handle in buffers {
2620 let buffer = buffer_handle.read(cx);
2621 if let Some(file) = File::from_dyn(buffer.file()) {
2622 if let Some(buffer_abs_path) = file.as_local().map(|f| f.abs_path(cx)) {
2623 if let Some((_, server)) = self.language_server_for_buffer(buffer, cx) {
2624 local_buffers.push((buffer_handle, buffer_abs_path, server.clone()));
2625 }
2626 } else {
2627 remote_buffers.get_or_insert(Vec::new()).push(buffer_handle);
2628 }
2629 } else {
2630 return Task::ready(Ok(Default::default()));
2631 }
2632 }
2633
2634 let remote_buffers = self.remote_id().zip(remote_buffers);
2635 let client = self.client.clone();
2636
2637 cx.spawn(|this, mut cx| async move {
2638 let mut project_transaction = ProjectTransaction::default();
2639
2640 if let Some((project_id, remote_buffers)) = remote_buffers {
2641 let response = client
2642 .request(proto::FormatBuffers {
2643 project_id,
2644 buffer_ids: remote_buffers
2645 .iter()
2646 .map(|buffer| buffer.read_with(&cx, |buffer, _| buffer.remote_id()))
2647 .collect(),
2648 })
2649 .await?
2650 .transaction
2651 .ok_or_else(|| anyhow!("missing transaction"))?;
2652 project_transaction = this
2653 .update(&mut cx, |this, cx| {
2654 this.deserialize_project_transaction(response, push_to_history, cx)
2655 })
2656 .await?;
2657 }
2658
2659 for (buffer, buffer_abs_path, language_server) in local_buffers {
2660 let text_document = lsp::TextDocumentIdentifier::new(
2661 lsp::Url::from_file_path(&buffer_abs_path).unwrap(),
2662 );
2663 let capabilities = &language_server.capabilities();
2664 let tab_size = cx.update(|cx| {
2665 let language_name = buffer.read(cx).language().map(|language| language.name());
2666 cx.global::<Settings>().tab_size(language_name.as_deref())
2667 });
2668 let lsp_edits = if capabilities
2669 .document_formatting_provider
2670 .as_ref()
2671 .map_or(false, |provider| *provider != lsp::OneOf::Left(false))
2672 {
2673 language_server
2674 .request::<lsp::request::Formatting>(lsp::DocumentFormattingParams {
2675 text_document,
2676 options: lsp::FormattingOptions {
2677 tab_size,
2678 insert_spaces: true,
2679 insert_final_newline: Some(true),
2680 ..Default::default()
2681 },
2682 work_done_progress_params: Default::default(),
2683 })
2684 .await?
2685 } else if capabilities
2686 .document_range_formatting_provider
2687 .as_ref()
2688 .map_or(false, |provider| *provider != lsp::OneOf::Left(false))
2689 {
2690 let buffer_start = lsp::Position::new(0, 0);
2691 let buffer_end =
2692 buffer.read_with(&cx, |buffer, _| point_to_lsp(buffer.max_point_utf16()));
2693 language_server
2694 .request::<lsp::request::RangeFormatting>(
2695 lsp::DocumentRangeFormattingParams {
2696 text_document,
2697 range: lsp::Range::new(buffer_start, buffer_end),
2698 options: lsp::FormattingOptions {
2699 tab_size: 4,
2700 insert_spaces: true,
2701 insert_final_newline: Some(true),
2702 ..Default::default()
2703 },
2704 work_done_progress_params: Default::default(),
2705 },
2706 )
2707 .await?
2708 } else {
2709 continue;
2710 };
2711
2712 if let Some(lsp_edits) = lsp_edits {
2713 let edits = this
2714 .update(&mut cx, |this, cx| {
2715 this.edits_from_lsp(&buffer, lsp_edits, None, cx)
2716 })
2717 .await?;
2718 buffer.update(&mut cx, |buffer, cx| {
2719 buffer.finalize_last_transaction();
2720 buffer.start_transaction();
2721 for (range, text) in edits {
2722 buffer.edit([(range, text)], cx);
2723 }
2724 if buffer.end_transaction(cx).is_some() {
2725 let transaction = buffer.finalize_last_transaction().unwrap().clone();
2726 if !push_to_history {
2727 buffer.forget_transaction(transaction.id);
2728 }
2729 project_transaction.0.insert(cx.handle(), transaction);
2730 }
2731 });
2732 }
2733 }
2734
2735 Ok(project_transaction)
2736 })
2737 }
2738
2739 pub fn definition<T: ToPointUtf16>(
2740 &self,
2741 buffer: &ModelHandle<Buffer>,
2742 position: T,
2743 cx: &mut ModelContext<Self>,
2744 ) -> Task<Result<Vec<Location>>> {
2745 let position = position.to_point_utf16(buffer.read(cx));
2746 self.request_lsp(buffer.clone(), GetDefinition { position }, cx)
2747 }
2748
2749 pub fn references<T: ToPointUtf16>(
2750 &self,
2751 buffer: &ModelHandle<Buffer>,
2752 position: T,
2753 cx: &mut ModelContext<Self>,
2754 ) -> Task<Result<Vec<Location>>> {
2755 let position = position.to_point_utf16(buffer.read(cx));
2756 self.request_lsp(buffer.clone(), GetReferences { position }, cx)
2757 }
2758
2759 pub fn document_highlights<T: ToPointUtf16>(
2760 &self,
2761 buffer: &ModelHandle<Buffer>,
2762 position: T,
2763 cx: &mut ModelContext<Self>,
2764 ) -> Task<Result<Vec<DocumentHighlight>>> {
2765 let position = position.to_point_utf16(buffer.read(cx));
2766
2767 self.request_lsp(buffer.clone(), GetDocumentHighlights { position }, cx)
2768 }
2769
2770 pub fn symbols(&self, query: &str, cx: &mut ModelContext<Self>) -> Task<Result<Vec<Symbol>>> {
2771 if self.is_local() {
2772 let mut requests = Vec::new();
2773 for ((worktree_id, _), (lsp_adapter, language_server)) in self.language_servers.iter() {
2774 let worktree_id = *worktree_id;
2775 if let Some(worktree) = self
2776 .worktree_for_id(worktree_id, cx)
2777 .and_then(|worktree| worktree.read(cx).as_local())
2778 {
2779 let lsp_adapter = lsp_adapter.clone();
2780 let worktree_abs_path = worktree.abs_path().clone();
2781 requests.push(
2782 language_server
2783 .request::<lsp::request::WorkspaceSymbol>(lsp::WorkspaceSymbolParams {
2784 query: query.to_string(),
2785 ..Default::default()
2786 })
2787 .log_err()
2788 .map(move |response| {
2789 (
2790 lsp_adapter,
2791 worktree_id,
2792 worktree_abs_path,
2793 response.unwrap_or_default(),
2794 )
2795 }),
2796 );
2797 }
2798 }
2799
2800 cx.spawn_weak(|this, cx| async move {
2801 let responses = futures::future::join_all(requests).await;
2802 let this = if let Some(this) = this.upgrade(&cx) {
2803 this
2804 } else {
2805 return Ok(Default::default());
2806 };
2807 this.read_with(&cx, |this, cx| {
2808 let mut symbols = Vec::new();
2809 for (adapter, source_worktree_id, worktree_abs_path, response) in responses {
2810 symbols.extend(response.into_iter().flatten().filter_map(|lsp_symbol| {
2811 let abs_path = lsp_symbol.location.uri.to_file_path().ok()?;
2812 let mut worktree_id = source_worktree_id;
2813 let path;
2814 if let Some((worktree, rel_path)) =
2815 this.find_local_worktree(&abs_path, cx)
2816 {
2817 worktree_id = worktree.read(cx).id();
2818 path = rel_path;
2819 } else {
2820 path = relativize_path(&worktree_abs_path, &abs_path);
2821 }
2822
2823 let label = this
2824 .languages
2825 .select_language(&path)
2826 .and_then(|language| {
2827 language.label_for_symbol(&lsp_symbol.name, lsp_symbol.kind)
2828 })
2829 .unwrap_or_else(|| CodeLabel::plain(lsp_symbol.name.clone(), None));
2830 let signature = this.symbol_signature(worktree_id, &path);
2831
2832 Some(Symbol {
2833 source_worktree_id,
2834 worktree_id,
2835 language_server_name: adapter.name(),
2836 name: lsp_symbol.name,
2837 kind: lsp_symbol.kind,
2838 label,
2839 path,
2840 range: range_from_lsp(lsp_symbol.location.range),
2841 signature,
2842 })
2843 }));
2844 }
2845 Ok(symbols)
2846 })
2847 })
2848 } else if let Some(project_id) = self.remote_id() {
2849 let request = self.client.request(proto::GetProjectSymbols {
2850 project_id,
2851 query: query.to_string(),
2852 });
2853 cx.spawn_weak(|this, cx| async move {
2854 let response = request.await?;
2855 let mut symbols = Vec::new();
2856 if let Some(this) = this.upgrade(&cx) {
2857 this.read_with(&cx, |this, _| {
2858 symbols.extend(
2859 response
2860 .symbols
2861 .into_iter()
2862 .filter_map(|symbol| this.deserialize_symbol(symbol).log_err()),
2863 );
2864 })
2865 }
2866 Ok(symbols)
2867 })
2868 } else {
2869 Task::ready(Ok(Default::default()))
2870 }
2871 }
2872
2873 pub fn open_buffer_for_symbol(
2874 &mut self,
2875 symbol: &Symbol,
2876 cx: &mut ModelContext<Self>,
2877 ) -> Task<Result<ModelHandle<Buffer>>> {
2878 if self.is_local() {
2879 let (lsp_adapter, language_server) = if let Some(server) = self.language_servers.get(&(
2880 symbol.source_worktree_id,
2881 symbol.language_server_name.clone(),
2882 )) {
2883 server.clone()
2884 } else {
2885 return Task::ready(Err(anyhow!(
2886 "language server for worktree and language not found"
2887 )));
2888 };
2889
2890 let worktree_abs_path = if let Some(worktree_abs_path) = self
2891 .worktree_for_id(symbol.worktree_id, cx)
2892 .and_then(|worktree| worktree.read(cx).as_local())
2893 .map(|local_worktree| local_worktree.abs_path())
2894 {
2895 worktree_abs_path
2896 } else {
2897 return Task::ready(Err(anyhow!("worktree not found for symbol")));
2898 };
2899 let symbol_abs_path = worktree_abs_path.join(&symbol.path);
2900 let symbol_uri = if let Ok(uri) = lsp::Url::from_file_path(symbol_abs_path) {
2901 uri
2902 } else {
2903 return Task::ready(Err(anyhow!("invalid symbol path")));
2904 };
2905
2906 self.open_local_buffer_via_lsp(symbol_uri, lsp_adapter, language_server, cx)
2907 } else if let Some(project_id) = self.remote_id() {
2908 let request = self.client.request(proto::OpenBufferForSymbol {
2909 project_id,
2910 symbol: Some(serialize_symbol(symbol)),
2911 });
2912 cx.spawn(|this, mut cx| async move {
2913 let response = request.await?;
2914 let buffer = response.buffer.ok_or_else(|| anyhow!("invalid buffer"))?;
2915 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
2916 .await
2917 })
2918 } else {
2919 Task::ready(Err(anyhow!("project does not have a remote id")))
2920 }
2921 }
2922
2923 pub fn hover<T: ToPointUtf16>(
2924 &self,
2925 buffer: &ModelHandle<Buffer>,
2926 position: T,
2927 cx: &mut ModelContext<Self>,
2928 ) -> Task<Result<Option<Hover>>> {
2929 let position = position.to_point_utf16(buffer.read(cx));
2930 self.request_lsp(buffer.clone(), GetHover { position }, cx)
2931 }
2932
2933 pub fn completions<T: ToPointUtf16>(
2934 &self,
2935 source_buffer_handle: &ModelHandle<Buffer>,
2936 position: T,
2937 cx: &mut ModelContext<Self>,
2938 ) -> Task<Result<Vec<Completion>>> {
2939 let source_buffer_handle = source_buffer_handle.clone();
2940 let source_buffer = source_buffer_handle.read(cx);
2941 let buffer_id = source_buffer.remote_id();
2942 let language = source_buffer.language().cloned();
2943 let worktree;
2944 let buffer_abs_path;
2945 if let Some(file) = File::from_dyn(source_buffer.file()) {
2946 worktree = file.worktree.clone();
2947 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
2948 } else {
2949 return Task::ready(Ok(Default::default()));
2950 };
2951
2952 let position = position.to_point_utf16(source_buffer);
2953 let anchor = source_buffer.anchor_after(position);
2954
2955 if worktree.read(cx).as_local().is_some() {
2956 let buffer_abs_path = buffer_abs_path.unwrap();
2957 let (_, lang_server) =
2958 if let Some(server) = self.language_server_for_buffer(source_buffer, cx) {
2959 server.clone()
2960 } else {
2961 return Task::ready(Ok(Default::default()));
2962 };
2963
2964 cx.spawn(|_, cx| async move {
2965 let completions = lang_server
2966 .request::<lsp::request::Completion>(lsp::CompletionParams {
2967 text_document_position: lsp::TextDocumentPositionParams::new(
2968 lsp::TextDocumentIdentifier::new(
2969 lsp::Url::from_file_path(buffer_abs_path).unwrap(),
2970 ),
2971 point_to_lsp(position),
2972 ),
2973 context: Default::default(),
2974 work_done_progress_params: Default::default(),
2975 partial_result_params: Default::default(),
2976 })
2977 .await
2978 .context("lsp completion request failed")?;
2979
2980 let completions = if let Some(completions) = completions {
2981 match completions {
2982 lsp::CompletionResponse::Array(completions) => completions,
2983 lsp::CompletionResponse::List(list) => list.items,
2984 }
2985 } else {
2986 Default::default()
2987 };
2988
2989 source_buffer_handle.read_with(&cx, |this, _| {
2990 let snapshot = this.snapshot();
2991 let clipped_position = this.clip_point_utf16(position, Bias::Left);
2992 let mut range_for_token = None;
2993 Ok(completions
2994 .into_iter()
2995 .filter_map(|lsp_completion| {
2996 let (old_range, new_text) = match lsp_completion.text_edit.as_ref() {
2997 // If the language server provides a range to overwrite, then
2998 // check that the range is valid.
2999 Some(lsp::CompletionTextEdit::Edit(edit)) => {
3000 let range = range_from_lsp(edit.range);
3001 let start = snapshot.clip_point_utf16(range.start, Bias::Left);
3002 let end = snapshot.clip_point_utf16(range.end, Bias::Left);
3003 if start != range.start || end != range.end {
3004 log::info!("completion out of expected range");
3005 return None;
3006 }
3007 (
3008 snapshot.anchor_before(start)..snapshot.anchor_after(end),
3009 edit.new_text.clone(),
3010 )
3011 }
3012 // If the language server does not provide a range, then infer
3013 // the range based on the syntax tree.
3014 None => {
3015 if position != clipped_position {
3016 log::info!("completion out of expected range");
3017 return None;
3018 }
3019 let Range { start, end } = range_for_token
3020 .get_or_insert_with(|| {
3021 let offset = position.to_offset(&snapshot);
3022 snapshot
3023 .range_for_word_token_at(offset)
3024 .unwrap_or_else(|| offset..offset)
3025 })
3026 .clone();
3027 let text = lsp_completion
3028 .insert_text
3029 .as_ref()
3030 .unwrap_or(&lsp_completion.label)
3031 .clone();
3032 (
3033 snapshot.anchor_before(start)..snapshot.anchor_after(end),
3034 text.clone(),
3035 )
3036 }
3037 Some(lsp::CompletionTextEdit::InsertAndReplace(_)) => {
3038 log::info!("unsupported insert/replace completion");
3039 return None;
3040 }
3041 };
3042
3043 Some(Completion {
3044 old_range,
3045 new_text,
3046 label: language
3047 .as_ref()
3048 .and_then(|l| l.label_for_completion(&lsp_completion))
3049 .unwrap_or_else(|| {
3050 CodeLabel::plain(
3051 lsp_completion.label.clone(),
3052 lsp_completion.filter_text.as_deref(),
3053 )
3054 }),
3055 lsp_completion,
3056 })
3057 })
3058 .collect())
3059 })
3060 })
3061 } else if let Some(project_id) = self.remote_id() {
3062 let rpc = self.client.clone();
3063 let message = proto::GetCompletions {
3064 project_id,
3065 buffer_id,
3066 position: Some(language::proto::serialize_anchor(&anchor)),
3067 version: serialize_version(&source_buffer.version()),
3068 };
3069 cx.spawn_weak(|_, mut cx| async move {
3070 let response = rpc.request(message).await?;
3071
3072 source_buffer_handle
3073 .update(&mut cx, |buffer, _| {
3074 buffer.wait_for_version(deserialize_version(response.version))
3075 })
3076 .await;
3077
3078 response
3079 .completions
3080 .into_iter()
3081 .map(|completion| {
3082 language::proto::deserialize_completion(completion, language.as_ref())
3083 })
3084 .collect()
3085 })
3086 } else {
3087 Task::ready(Ok(Default::default()))
3088 }
3089 }
3090
3091 pub fn apply_additional_edits_for_completion(
3092 &self,
3093 buffer_handle: ModelHandle<Buffer>,
3094 completion: Completion,
3095 push_to_history: bool,
3096 cx: &mut ModelContext<Self>,
3097 ) -> Task<Result<Option<Transaction>>> {
3098 let buffer = buffer_handle.read(cx);
3099 let buffer_id = buffer.remote_id();
3100
3101 if self.is_local() {
3102 let (_, lang_server) = if let Some(server) = self.language_server_for_buffer(buffer, cx)
3103 {
3104 server.clone()
3105 } else {
3106 return Task::ready(Ok(Default::default()));
3107 };
3108
3109 cx.spawn(|this, mut cx| async move {
3110 let resolved_completion = lang_server
3111 .request::<lsp::request::ResolveCompletionItem>(completion.lsp_completion)
3112 .await?;
3113 if let Some(edits) = resolved_completion.additional_text_edits {
3114 let edits = this
3115 .update(&mut cx, |this, cx| {
3116 this.edits_from_lsp(&buffer_handle, edits, None, cx)
3117 })
3118 .await?;
3119 buffer_handle.update(&mut cx, |buffer, cx| {
3120 buffer.finalize_last_transaction();
3121 buffer.start_transaction();
3122 for (range, text) in edits {
3123 buffer.edit([(range, text)], cx);
3124 }
3125 let transaction = if buffer.end_transaction(cx).is_some() {
3126 let transaction = buffer.finalize_last_transaction().unwrap().clone();
3127 if !push_to_history {
3128 buffer.forget_transaction(transaction.id);
3129 }
3130 Some(transaction)
3131 } else {
3132 None
3133 };
3134 Ok(transaction)
3135 })
3136 } else {
3137 Ok(None)
3138 }
3139 })
3140 } else if let Some(project_id) = self.remote_id() {
3141 let client = self.client.clone();
3142 cx.spawn(|_, mut cx| async move {
3143 let response = client
3144 .request(proto::ApplyCompletionAdditionalEdits {
3145 project_id,
3146 buffer_id,
3147 completion: Some(language::proto::serialize_completion(&completion)),
3148 })
3149 .await?;
3150
3151 if let Some(transaction) = response.transaction {
3152 let transaction = language::proto::deserialize_transaction(transaction)?;
3153 buffer_handle
3154 .update(&mut cx, |buffer, _| {
3155 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
3156 })
3157 .await;
3158 if push_to_history {
3159 buffer_handle.update(&mut cx, |buffer, _| {
3160 buffer.push_transaction(transaction.clone(), Instant::now());
3161 });
3162 }
3163 Ok(Some(transaction))
3164 } else {
3165 Ok(None)
3166 }
3167 })
3168 } else {
3169 Task::ready(Err(anyhow!("project does not have a remote id")))
3170 }
3171 }
3172
3173 pub fn code_actions<T: Clone + ToOffset>(
3174 &self,
3175 buffer_handle: &ModelHandle<Buffer>,
3176 range: Range<T>,
3177 cx: &mut ModelContext<Self>,
3178 ) -> Task<Result<Vec<CodeAction>>> {
3179 let buffer_handle = buffer_handle.clone();
3180 let buffer = buffer_handle.read(cx);
3181 let snapshot = buffer.snapshot();
3182 let relevant_diagnostics = snapshot
3183 .diagnostics_in_range::<usize, usize>(range.to_offset(&snapshot), false)
3184 .map(|entry| entry.to_lsp_diagnostic_stub())
3185 .collect();
3186 let buffer_id = buffer.remote_id();
3187 let worktree;
3188 let buffer_abs_path;
3189 if let Some(file) = File::from_dyn(buffer.file()) {
3190 worktree = file.worktree.clone();
3191 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
3192 } else {
3193 return Task::ready(Ok(Default::default()));
3194 };
3195 let range = buffer.anchor_before(range.start)..buffer.anchor_before(range.end);
3196
3197 if worktree.read(cx).as_local().is_some() {
3198 let buffer_abs_path = buffer_abs_path.unwrap();
3199 let (_, lang_server) = if let Some(server) = self.language_server_for_buffer(buffer, cx)
3200 {
3201 server.clone()
3202 } else {
3203 return Task::ready(Ok(Default::default()));
3204 };
3205
3206 let lsp_range = range_to_lsp(range.to_point_utf16(buffer));
3207 cx.foreground().spawn(async move {
3208 if !lang_server.capabilities().code_action_provider.is_some() {
3209 return Ok(Default::default());
3210 }
3211
3212 Ok(lang_server
3213 .request::<lsp::request::CodeActionRequest>(lsp::CodeActionParams {
3214 text_document: lsp::TextDocumentIdentifier::new(
3215 lsp::Url::from_file_path(buffer_abs_path).unwrap(),
3216 ),
3217 range: lsp_range,
3218 work_done_progress_params: Default::default(),
3219 partial_result_params: Default::default(),
3220 context: lsp::CodeActionContext {
3221 diagnostics: relevant_diagnostics,
3222 only: Some(vec![
3223 lsp::CodeActionKind::QUICKFIX,
3224 lsp::CodeActionKind::REFACTOR,
3225 lsp::CodeActionKind::REFACTOR_EXTRACT,
3226 lsp::CodeActionKind::SOURCE,
3227 ]),
3228 },
3229 })
3230 .await?
3231 .unwrap_or_default()
3232 .into_iter()
3233 .filter_map(|entry| {
3234 if let lsp::CodeActionOrCommand::CodeAction(lsp_action) = entry {
3235 Some(CodeAction {
3236 range: range.clone(),
3237 lsp_action,
3238 })
3239 } else {
3240 None
3241 }
3242 })
3243 .collect())
3244 })
3245 } else if let Some(project_id) = self.remote_id() {
3246 let rpc = self.client.clone();
3247 let version = buffer.version();
3248 cx.spawn_weak(|_, mut cx| async move {
3249 let response = rpc
3250 .request(proto::GetCodeActions {
3251 project_id,
3252 buffer_id,
3253 start: Some(language::proto::serialize_anchor(&range.start)),
3254 end: Some(language::proto::serialize_anchor(&range.end)),
3255 version: serialize_version(&version),
3256 })
3257 .await?;
3258
3259 buffer_handle
3260 .update(&mut cx, |buffer, _| {
3261 buffer.wait_for_version(deserialize_version(response.version))
3262 })
3263 .await;
3264
3265 response
3266 .actions
3267 .into_iter()
3268 .map(language::proto::deserialize_code_action)
3269 .collect()
3270 })
3271 } else {
3272 Task::ready(Ok(Default::default()))
3273 }
3274 }
3275
3276 pub fn apply_code_action(
3277 &self,
3278 buffer_handle: ModelHandle<Buffer>,
3279 mut action: CodeAction,
3280 push_to_history: bool,
3281 cx: &mut ModelContext<Self>,
3282 ) -> Task<Result<ProjectTransaction>> {
3283 if self.is_local() {
3284 let buffer = buffer_handle.read(cx);
3285 let (lsp_adapter, lang_server) =
3286 if let Some(server) = self.language_server_for_buffer(buffer, cx) {
3287 server.clone()
3288 } else {
3289 return Task::ready(Ok(Default::default()));
3290 };
3291 let range = action.range.to_point_utf16(buffer);
3292
3293 cx.spawn(|this, mut cx| async move {
3294 if let Some(lsp_range) = action
3295 .lsp_action
3296 .data
3297 .as_mut()
3298 .and_then(|d| d.get_mut("codeActionParams"))
3299 .and_then(|d| d.get_mut("range"))
3300 {
3301 *lsp_range = serde_json::to_value(&range_to_lsp(range)).unwrap();
3302 action.lsp_action = lang_server
3303 .request::<lsp::request::CodeActionResolveRequest>(action.lsp_action)
3304 .await?;
3305 } else {
3306 let actions = this
3307 .update(&mut cx, |this, cx| {
3308 this.code_actions(&buffer_handle, action.range, cx)
3309 })
3310 .await?;
3311 action.lsp_action = actions
3312 .into_iter()
3313 .find(|a| a.lsp_action.title == action.lsp_action.title)
3314 .ok_or_else(|| anyhow!("code action is outdated"))?
3315 .lsp_action;
3316 }
3317
3318 if let Some(edit) = action.lsp_action.edit {
3319 Self::deserialize_workspace_edit(
3320 this,
3321 edit,
3322 push_to_history,
3323 lsp_adapter,
3324 lang_server,
3325 &mut cx,
3326 )
3327 .await
3328 } else if let Some(command) = action.lsp_action.command {
3329 this.update(&mut cx, |this, _| {
3330 this.last_workspace_edits_by_language_server
3331 .remove(&lang_server.server_id());
3332 });
3333 lang_server
3334 .request::<lsp::request::ExecuteCommand>(lsp::ExecuteCommandParams {
3335 command: command.command,
3336 arguments: command.arguments.unwrap_or_default(),
3337 ..Default::default()
3338 })
3339 .await?;
3340 Ok(this.update(&mut cx, |this, _| {
3341 this.last_workspace_edits_by_language_server
3342 .remove(&lang_server.server_id())
3343 .unwrap_or_default()
3344 }))
3345 } else {
3346 Ok(ProjectTransaction::default())
3347 }
3348 })
3349 } else if let Some(project_id) = self.remote_id() {
3350 let client = self.client.clone();
3351 let request = proto::ApplyCodeAction {
3352 project_id,
3353 buffer_id: buffer_handle.read(cx).remote_id(),
3354 action: Some(language::proto::serialize_code_action(&action)),
3355 };
3356 cx.spawn(|this, mut cx| async move {
3357 let response = client
3358 .request(request)
3359 .await?
3360 .transaction
3361 .ok_or_else(|| anyhow!("missing transaction"))?;
3362 this.update(&mut cx, |this, cx| {
3363 this.deserialize_project_transaction(response, push_to_history, cx)
3364 })
3365 .await
3366 })
3367 } else {
3368 Task::ready(Err(anyhow!("project does not have a remote id")))
3369 }
3370 }
3371
3372 async fn deserialize_workspace_edit(
3373 this: ModelHandle<Self>,
3374 edit: lsp::WorkspaceEdit,
3375 push_to_history: bool,
3376 lsp_adapter: Arc<dyn LspAdapter>,
3377 language_server: Arc<LanguageServer>,
3378 cx: &mut AsyncAppContext,
3379 ) -> Result<ProjectTransaction> {
3380 let fs = this.read_with(cx, |this, _| this.fs.clone());
3381 let mut operations = Vec::new();
3382 if let Some(document_changes) = edit.document_changes {
3383 match document_changes {
3384 lsp::DocumentChanges::Edits(edits) => {
3385 operations.extend(edits.into_iter().map(lsp::DocumentChangeOperation::Edit))
3386 }
3387 lsp::DocumentChanges::Operations(ops) => operations = ops,
3388 }
3389 } else if let Some(changes) = edit.changes {
3390 operations.extend(changes.into_iter().map(|(uri, edits)| {
3391 lsp::DocumentChangeOperation::Edit(lsp::TextDocumentEdit {
3392 text_document: lsp::OptionalVersionedTextDocumentIdentifier {
3393 uri,
3394 version: None,
3395 },
3396 edits: edits.into_iter().map(lsp::OneOf::Left).collect(),
3397 })
3398 }));
3399 }
3400
3401 let mut project_transaction = ProjectTransaction::default();
3402 for operation in operations {
3403 match operation {
3404 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Create(op)) => {
3405 let abs_path = op
3406 .uri
3407 .to_file_path()
3408 .map_err(|_| anyhow!("can't convert URI to path"))?;
3409
3410 if let Some(parent_path) = abs_path.parent() {
3411 fs.create_dir(parent_path).await?;
3412 }
3413 if abs_path.ends_with("/") {
3414 fs.create_dir(&abs_path).await?;
3415 } else {
3416 fs.create_file(&abs_path, op.options.map(Into::into).unwrap_or_default())
3417 .await?;
3418 }
3419 }
3420 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Rename(op)) => {
3421 let source_abs_path = op
3422 .old_uri
3423 .to_file_path()
3424 .map_err(|_| anyhow!("can't convert URI to path"))?;
3425 let target_abs_path = op
3426 .new_uri
3427 .to_file_path()
3428 .map_err(|_| anyhow!("can't convert URI to path"))?;
3429 fs.rename(
3430 &source_abs_path,
3431 &target_abs_path,
3432 op.options.map(Into::into).unwrap_or_default(),
3433 )
3434 .await?;
3435 }
3436 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Delete(op)) => {
3437 let abs_path = op
3438 .uri
3439 .to_file_path()
3440 .map_err(|_| anyhow!("can't convert URI to path"))?;
3441 let options = op.options.map(Into::into).unwrap_or_default();
3442 if abs_path.ends_with("/") {
3443 fs.remove_dir(&abs_path, options).await?;
3444 } else {
3445 fs.remove_file(&abs_path, options).await?;
3446 }
3447 }
3448 lsp::DocumentChangeOperation::Edit(op) => {
3449 let buffer_to_edit = this
3450 .update(cx, |this, cx| {
3451 this.open_local_buffer_via_lsp(
3452 op.text_document.uri,
3453 lsp_adapter.clone(),
3454 language_server.clone(),
3455 cx,
3456 )
3457 })
3458 .await?;
3459
3460 let edits = this
3461 .update(cx, |this, cx| {
3462 let edits = op.edits.into_iter().map(|edit| match edit {
3463 lsp::OneOf::Left(edit) => edit,
3464 lsp::OneOf::Right(edit) => edit.text_edit,
3465 });
3466 this.edits_from_lsp(
3467 &buffer_to_edit,
3468 edits,
3469 op.text_document.version,
3470 cx,
3471 )
3472 })
3473 .await?;
3474
3475 let transaction = buffer_to_edit.update(cx, |buffer, cx| {
3476 buffer.finalize_last_transaction();
3477 buffer.start_transaction();
3478 for (range, text) in edits {
3479 buffer.edit([(range, text)], cx);
3480 }
3481 let transaction = if buffer.end_transaction(cx).is_some() {
3482 let transaction = buffer.finalize_last_transaction().unwrap().clone();
3483 if !push_to_history {
3484 buffer.forget_transaction(transaction.id);
3485 }
3486 Some(transaction)
3487 } else {
3488 None
3489 };
3490
3491 transaction
3492 });
3493 if let Some(transaction) = transaction {
3494 project_transaction.0.insert(buffer_to_edit, transaction);
3495 }
3496 }
3497 }
3498 }
3499
3500 Ok(project_transaction)
3501 }
3502
3503 pub fn prepare_rename<T: ToPointUtf16>(
3504 &self,
3505 buffer: ModelHandle<Buffer>,
3506 position: T,
3507 cx: &mut ModelContext<Self>,
3508 ) -> Task<Result<Option<Range<Anchor>>>> {
3509 let position = position.to_point_utf16(buffer.read(cx));
3510 self.request_lsp(buffer, PrepareRename { position }, cx)
3511 }
3512
3513 pub fn perform_rename<T: ToPointUtf16>(
3514 &self,
3515 buffer: ModelHandle<Buffer>,
3516 position: T,
3517 new_name: String,
3518 push_to_history: bool,
3519 cx: &mut ModelContext<Self>,
3520 ) -> Task<Result<ProjectTransaction>> {
3521 let position = position.to_point_utf16(buffer.read(cx));
3522 self.request_lsp(
3523 buffer,
3524 PerformRename {
3525 position,
3526 new_name,
3527 push_to_history,
3528 },
3529 cx,
3530 )
3531 }
3532
3533 pub fn search(
3534 &self,
3535 query: SearchQuery,
3536 cx: &mut ModelContext<Self>,
3537 ) -> Task<Result<HashMap<ModelHandle<Buffer>, Vec<Range<Anchor>>>>> {
3538 if self.is_local() {
3539 let snapshots = self
3540 .visible_worktrees(cx)
3541 .filter_map(|tree| {
3542 let tree = tree.read(cx).as_local()?;
3543 Some(tree.snapshot())
3544 })
3545 .collect::<Vec<_>>();
3546
3547 let background = cx.background().clone();
3548 let path_count: usize = snapshots.iter().map(|s| s.visible_file_count()).sum();
3549 if path_count == 0 {
3550 return Task::ready(Ok(Default::default()));
3551 }
3552 let workers = background.num_cpus().min(path_count);
3553 let (matching_paths_tx, mut matching_paths_rx) = smol::channel::bounded(1024);
3554 cx.background()
3555 .spawn({
3556 let fs = self.fs.clone();
3557 let background = cx.background().clone();
3558 let query = query.clone();
3559 async move {
3560 let fs = &fs;
3561 let query = &query;
3562 let matching_paths_tx = &matching_paths_tx;
3563 let paths_per_worker = (path_count + workers - 1) / workers;
3564 let snapshots = &snapshots;
3565 background
3566 .scoped(|scope| {
3567 for worker_ix in 0..workers {
3568 let worker_start_ix = worker_ix * paths_per_worker;
3569 let worker_end_ix = worker_start_ix + paths_per_worker;
3570 scope.spawn(async move {
3571 let mut snapshot_start_ix = 0;
3572 let mut abs_path = PathBuf::new();
3573 for snapshot in snapshots {
3574 let snapshot_end_ix =
3575 snapshot_start_ix + snapshot.visible_file_count();
3576 if worker_end_ix <= snapshot_start_ix {
3577 break;
3578 } else if worker_start_ix > snapshot_end_ix {
3579 snapshot_start_ix = snapshot_end_ix;
3580 continue;
3581 } else {
3582 let start_in_snapshot = worker_start_ix
3583 .saturating_sub(snapshot_start_ix);
3584 let end_in_snapshot =
3585 cmp::min(worker_end_ix, snapshot_end_ix)
3586 - snapshot_start_ix;
3587
3588 for entry in snapshot
3589 .files(false, start_in_snapshot)
3590 .take(end_in_snapshot - start_in_snapshot)
3591 {
3592 if matching_paths_tx.is_closed() {
3593 break;
3594 }
3595
3596 abs_path.clear();
3597 abs_path.push(&snapshot.abs_path());
3598 abs_path.push(&entry.path);
3599 let matches = if let Some(file) =
3600 fs.open_sync(&abs_path).await.log_err()
3601 {
3602 query.detect(file).unwrap_or(false)
3603 } else {
3604 false
3605 };
3606
3607 if matches {
3608 let project_path =
3609 (snapshot.id(), entry.path.clone());
3610 if matching_paths_tx
3611 .send(project_path)
3612 .await
3613 .is_err()
3614 {
3615 break;
3616 }
3617 }
3618 }
3619
3620 snapshot_start_ix = snapshot_end_ix;
3621 }
3622 }
3623 });
3624 }
3625 })
3626 .await;
3627 }
3628 })
3629 .detach();
3630
3631 let (buffers_tx, buffers_rx) = smol::channel::bounded(1024);
3632 let open_buffers = self
3633 .opened_buffers
3634 .values()
3635 .filter_map(|b| b.upgrade(cx))
3636 .collect::<HashSet<_>>();
3637 cx.spawn(|this, cx| async move {
3638 for buffer in &open_buffers {
3639 let snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
3640 buffers_tx.send((buffer.clone(), snapshot)).await?;
3641 }
3642
3643 let open_buffers = Rc::new(RefCell::new(open_buffers));
3644 while let Some(project_path) = matching_paths_rx.next().await {
3645 if buffers_tx.is_closed() {
3646 break;
3647 }
3648
3649 let this = this.clone();
3650 let open_buffers = open_buffers.clone();
3651 let buffers_tx = buffers_tx.clone();
3652 cx.spawn(|mut cx| async move {
3653 if let Some(buffer) = this
3654 .update(&mut cx, |this, cx| this.open_buffer(project_path, cx))
3655 .await
3656 .log_err()
3657 {
3658 if open_buffers.borrow_mut().insert(buffer.clone()) {
3659 let snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
3660 buffers_tx.send((buffer, snapshot)).await?;
3661 }
3662 }
3663
3664 Ok::<_, anyhow::Error>(())
3665 })
3666 .detach();
3667 }
3668
3669 Ok::<_, anyhow::Error>(())
3670 })
3671 .detach_and_log_err(cx);
3672
3673 let background = cx.background().clone();
3674 cx.background().spawn(async move {
3675 let query = &query;
3676 let mut matched_buffers = Vec::new();
3677 for _ in 0..workers {
3678 matched_buffers.push(HashMap::default());
3679 }
3680 background
3681 .scoped(|scope| {
3682 for worker_matched_buffers in matched_buffers.iter_mut() {
3683 let mut buffers_rx = buffers_rx.clone();
3684 scope.spawn(async move {
3685 while let Some((buffer, snapshot)) = buffers_rx.next().await {
3686 let buffer_matches = query
3687 .search(snapshot.as_rope())
3688 .await
3689 .iter()
3690 .map(|range| {
3691 snapshot.anchor_before(range.start)
3692 ..snapshot.anchor_after(range.end)
3693 })
3694 .collect::<Vec<_>>();
3695 if !buffer_matches.is_empty() {
3696 worker_matched_buffers
3697 .insert(buffer.clone(), buffer_matches);
3698 }
3699 }
3700 });
3701 }
3702 })
3703 .await;
3704 Ok(matched_buffers.into_iter().flatten().collect())
3705 })
3706 } else if let Some(project_id) = self.remote_id() {
3707 let request = self.client.request(query.to_proto(project_id));
3708 cx.spawn(|this, mut cx| async move {
3709 let response = request.await?;
3710 let mut result = HashMap::default();
3711 for location in response.locations {
3712 let buffer = location.buffer.ok_or_else(|| anyhow!("missing buffer"))?;
3713 let target_buffer = this
3714 .update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
3715 .await?;
3716 let start = location
3717 .start
3718 .and_then(deserialize_anchor)
3719 .ok_or_else(|| anyhow!("missing target start"))?;
3720 let end = location
3721 .end
3722 .and_then(deserialize_anchor)
3723 .ok_or_else(|| anyhow!("missing target end"))?;
3724 result
3725 .entry(target_buffer)
3726 .or_insert(Vec::new())
3727 .push(start..end)
3728 }
3729 Ok(result)
3730 })
3731 } else {
3732 Task::ready(Ok(Default::default()))
3733 }
3734 }
3735
3736 fn request_lsp<R: LspCommand>(
3737 &self,
3738 buffer_handle: ModelHandle<Buffer>,
3739 request: R,
3740 cx: &mut ModelContext<Self>,
3741 ) -> Task<Result<R::Response>>
3742 where
3743 <R::LspRequest as lsp::request::Request>::Result: Send,
3744 {
3745 let buffer = buffer_handle.read(cx);
3746 if self.is_local() {
3747 let file = File::from_dyn(buffer.file()).and_then(File::as_local);
3748 if let Some((file, (_, language_server))) =
3749 file.zip(self.language_server_for_buffer(buffer, cx).cloned())
3750 {
3751 let lsp_params = request.to_lsp(&file.abs_path(cx), cx);
3752 return cx.spawn(|this, cx| async move {
3753 if !request.check_capabilities(&language_server.capabilities()) {
3754 return Ok(Default::default());
3755 }
3756
3757 let response = language_server
3758 .request::<R::LspRequest>(lsp_params)
3759 .await
3760 .context("lsp request failed")?;
3761 request
3762 .response_from_lsp(response, this, buffer_handle, cx)
3763 .await
3764 });
3765 }
3766 } else if let Some(project_id) = self.remote_id() {
3767 let rpc = self.client.clone();
3768 let message = request.to_proto(project_id, buffer);
3769 dbg!(&message);
3770 return cx.spawn(|this, cx| async move {
3771 let response = rpc.request(message).await?;
3772 dbg!(&response);
3773 request
3774 .response_from_proto(response, this, buffer_handle, cx)
3775 .await
3776 });
3777 }
3778 Task::ready(Ok(Default::default()))
3779 }
3780
3781 pub fn find_or_create_local_worktree(
3782 &mut self,
3783 abs_path: impl AsRef<Path>,
3784 visible: bool,
3785 cx: &mut ModelContext<Self>,
3786 ) -> Task<Result<(ModelHandle<Worktree>, PathBuf)>> {
3787 let abs_path = abs_path.as_ref();
3788 if let Some((tree, relative_path)) = self.find_local_worktree(abs_path, cx) {
3789 Task::ready(Ok((tree.clone(), relative_path.into())))
3790 } else {
3791 let worktree = self.create_local_worktree(abs_path, visible, cx);
3792 cx.foreground()
3793 .spawn(async move { Ok((worktree.await?, PathBuf::new())) })
3794 }
3795 }
3796
3797 pub fn find_local_worktree(
3798 &self,
3799 abs_path: &Path,
3800 cx: &AppContext,
3801 ) -> Option<(ModelHandle<Worktree>, PathBuf)> {
3802 for tree in self.worktrees(cx) {
3803 if let Some(relative_path) = tree
3804 .read(cx)
3805 .as_local()
3806 .and_then(|t| abs_path.strip_prefix(t.abs_path()).ok())
3807 {
3808 return Some((tree.clone(), relative_path.into()));
3809 }
3810 }
3811 None
3812 }
3813
3814 pub fn is_shared(&self) -> bool {
3815 match &self.client_state {
3816 ProjectClientState::Local { is_shared, .. } => *is_shared,
3817 ProjectClientState::Remote { .. } => false,
3818 }
3819 }
3820
3821 fn create_local_worktree(
3822 &mut self,
3823 abs_path: impl AsRef<Path>,
3824 visible: bool,
3825 cx: &mut ModelContext<Self>,
3826 ) -> Task<Result<ModelHandle<Worktree>>> {
3827 let fs = self.fs.clone();
3828 let client = self.client.clone();
3829 let next_entry_id = self.next_entry_id.clone();
3830 let path: Arc<Path> = abs_path.as_ref().into();
3831 let task = self
3832 .loading_local_worktrees
3833 .entry(path.clone())
3834 .or_insert_with(|| {
3835 cx.spawn(|project, mut cx| {
3836 async move {
3837 let worktree = Worktree::local(
3838 client.clone(),
3839 path.clone(),
3840 visible,
3841 fs,
3842 next_entry_id,
3843 &mut cx,
3844 )
3845 .await;
3846 project.update(&mut cx, |project, _| {
3847 project.loading_local_worktrees.remove(&path);
3848 });
3849 let worktree = worktree?;
3850
3851 let project_id = project.update(&mut cx, |project, cx| {
3852 project.add_worktree(&worktree, cx);
3853 project.shared_remote_id()
3854 });
3855
3856 if let Some(project_id) = project_id {
3857 worktree
3858 .update(&mut cx, |worktree, cx| {
3859 worktree.as_local_mut().unwrap().share(project_id, cx)
3860 })
3861 .await
3862 .log_err();
3863 }
3864
3865 Ok(worktree)
3866 }
3867 .map_err(|err| Arc::new(err))
3868 })
3869 .shared()
3870 })
3871 .clone();
3872 cx.foreground().spawn(async move {
3873 match task.await {
3874 Ok(worktree) => Ok(worktree),
3875 Err(err) => Err(anyhow!("{}", err)),
3876 }
3877 })
3878 }
3879
3880 pub fn remove_worktree(&mut self, id_to_remove: WorktreeId, cx: &mut ModelContext<Self>) {
3881 self.worktrees.retain(|worktree| {
3882 if let Some(worktree) = worktree.upgrade(cx) {
3883 let id = worktree.read(cx).id();
3884 if id == id_to_remove {
3885 cx.emit(Event::WorktreeRemoved(id));
3886 false
3887 } else {
3888 true
3889 }
3890 } else {
3891 false
3892 }
3893 });
3894 self.metadata_changed(true, cx);
3895 cx.notify();
3896 }
3897
3898 fn add_worktree(&mut self, worktree: &ModelHandle<Worktree>, cx: &mut ModelContext<Self>) {
3899 cx.observe(&worktree, |_, _, cx| cx.notify()).detach();
3900 if worktree.read(cx).is_local() {
3901 cx.subscribe(&worktree, |this, worktree, _, cx| {
3902 this.update_local_worktree_buffers(worktree, cx);
3903 })
3904 .detach();
3905 }
3906
3907 let push_strong_handle = {
3908 let worktree = worktree.read(cx);
3909 self.is_shared() || worktree.is_visible() || worktree.is_remote()
3910 };
3911 if push_strong_handle {
3912 self.worktrees
3913 .push(WorktreeHandle::Strong(worktree.clone()));
3914 } else {
3915 cx.observe_release(&worktree, |this, _, cx| {
3916 this.worktrees
3917 .retain(|worktree| worktree.upgrade(cx).is_some());
3918 cx.notify();
3919 })
3920 .detach();
3921 self.worktrees
3922 .push(WorktreeHandle::Weak(worktree.downgrade()));
3923 }
3924 self.metadata_changed(true, cx);
3925 cx.emit(Event::WorktreeAdded);
3926 cx.notify();
3927 }
3928
3929 fn update_local_worktree_buffers(
3930 &mut self,
3931 worktree_handle: ModelHandle<Worktree>,
3932 cx: &mut ModelContext<Self>,
3933 ) {
3934 let snapshot = worktree_handle.read(cx).snapshot();
3935 let mut buffers_to_delete = Vec::new();
3936 let mut renamed_buffers = Vec::new();
3937 for (buffer_id, buffer) in &self.opened_buffers {
3938 if let Some(buffer) = buffer.upgrade(cx) {
3939 buffer.update(cx, |buffer, cx| {
3940 if let Some(old_file) = File::from_dyn(buffer.file()) {
3941 if old_file.worktree != worktree_handle {
3942 return;
3943 }
3944
3945 let new_file = if let Some(entry) = old_file
3946 .entry_id
3947 .and_then(|entry_id| snapshot.entry_for_id(entry_id))
3948 {
3949 File {
3950 is_local: true,
3951 entry_id: Some(entry.id),
3952 mtime: entry.mtime,
3953 path: entry.path.clone(),
3954 worktree: worktree_handle.clone(),
3955 }
3956 } else if let Some(entry) =
3957 snapshot.entry_for_path(old_file.path().as_ref())
3958 {
3959 File {
3960 is_local: true,
3961 entry_id: Some(entry.id),
3962 mtime: entry.mtime,
3963 path: entry.path.clone(),
3964 worktree: worktree_handle.clone(),
3965 }
3966 } else {
3967 File {
3968 is_local: true,
3969 entry_id: None,
3970 path: old_file.path().clone(),
3971 mtime: old_file.mtime(),
3972 worktree: worktree_handle.clone(),
3973 }
3974 };
3975
3976 let old_path = old_file.abs_path(cx);
3977 if new_file.abs_path(cx) != old_path {
3978 renamed_buffers.push((cx.handle(), old_path));
3979 }
3980
3981 if let Some(project_id) = self.shared_remote_id() {
3982 self.client
3983 .send(proto::UpdateBufferFile {
3984 project_id,
3985 buffer_id: *buffer_id as u64,
3986 file: Some(new_file.to_proto()),
3987 })
3988 .log_err();
3989 }
3990 buffer.file_updated(Box::new(new_file), cx).detach();
3991 }
3992 });
3993 } else {
3994 buffers_to_delete.push(*buffer_id);
3995 }
3996 }
3997
3998 for buffer_id in buffers_to_delete {
3999 self.opened_buffers.remove(&buffer_id);
4000 }
4001
4002 for (buffer, old_path) in renamed_buffers {
4003 self.unregister_buffer_from_language_server(&buffer, old_path, cx);
4004 self.assign_language_to_buffer(&buffer, cx);
4005 self.register_buffer_with_language_server(&buffer, cx);
4006 }
4007 }
4008
4009 pub fn set_active_path(&mut self, entry: Option<ProjectPath>, cx: &mut ModelContext<Self>) {
4010 let new_active_entry = entry.and_then(|project_path| {
4011 let worktree = self.worktree_for_id(project_path.worktree_id, cx)?;
4012 let entry = worktree.read(cx).entry_for_path(project_path.path)?;
4013 Some(entry.id)
4014 });
4015 if new_active_entry != self.active_entry {
4016 self.active_entry = new_active_entry;
4017 cx.emit(Event::ActiveEntryChanged(new_active_entry));
4018 }
4019 }
4020
4021 pub fn is_running_disk_based_diagnostics(&self) -> bool {
4022 self.language_server_statuses
4023 .values()
4024 .any(|status| status.pending_diagnostic_updates > 0)
4025 }
4026
4027 pub fn diagnostic_summary(&self, cx: &AppContext) -> DiagnosticSummary {
4028 let mut summary = DiagnosticSummary::default();
4029 for (_, path_summary) in self.diagnostic_summaries(cx) {
4030 summary.error_count += path_summary.error_count;
4031 summary.warning_count += path_summary.warning_count;
4032 }
4033 summary
4034 }
4035
4036 pub fn diagnostic_summaries<'a>(
4037 &'a self,
4038 cx: &'a AppContext,
4039 ) -> impl Iterator<Item = (ProjectPath, DiagnosticSummary)> + 'a {
4040 self.worktrees(cx).flat_map(move |worktree| {
4041 let worktree = worktree.read(cx);
4042 let worktree_id = worktree.id();
4043 worktree
4044 .diagnostic_summaries()
4045 .map(move |(path, summary)| (ProjectPath { worktree_id, path }, summary))
4046 })
4047 }
4048
4049 pub fn disk_based_diagnostics_started(&mut self, cx: &mut ModelContext<Self>) {
4050 if self
4051 .language_server_statuses
4052 .values()
4053 .map(|status| status.pending_diagnostic_updates)
4054 .sum::<isize>()
4055 == 1
4056 {
4057 cx.emit(Event::DiskBasedDiagnosticsStarted);
4058 }
4059 }
4060
4061 pub fn disk_based_diagnostics_finished(&mut self, cx: &mut ModelContext<Self>) {
4062 cx.emit(Event::DiskBasedDiagnosticsUpdated);
4063 if self
4064 .language_server_statuses
4065 .values()
4066 .map(|status| status.pending_diagnostic_updates)
4067 .sum::<isize>()
4068 == 0
4069 {
4070 cx.emit(Event::DiskBasedDiagnosticsFinished);
4071 }
4072 }
4073
4074 pub fn active_entry(&self) -> Option<ProjectEntryId> {
4075 self.active_entry
4076 }
4077
4078 pub fn entry_for_path(&self, path: &ProjectPath, cx: &AppContext) -> Option<ProjectEntryId> {
4079 self.worktree_for_id(path.worktree_id, cx)?
4080 .read(cx)
4081 .entry_for_path(&path.path)
4082 .map(|entry| entry.id)
4083 }
4084
4085 pub fn path_for_entry(&self, entry_id: ProjectEntryId, cx: &AppContext) -> Option<ProjectPath> {
4086 let worktree = self.worktree_for_entry(entry_id, cx)?;
4087 let worktree = worktree.read(cx);
4088 let worktree_id = worktree.id();
4089 let path = worktree.entry_for_id(entry_id)?.path.clone();
4090 Some(ProjectPath { worktree_id, path })
4091 }
4092
4093 // RPC message handlers
4094
4095 async fn handle_request_join_project(
4096 this: ModelHandle<Self>,
4097 message: TypedEnvelope<proto::RequestJoinProject>,
4098 _: Arc<Client>,
4099 mut cx: AsyncAppContext,
4100 ) -> Result<()> {
4101 let user_id = message.payload.requester_id;
4102 if this.read_with(&cx, |project, _| {
4103 project.collaborators.values().any(|c| c.user.id == user_id)
4104 }) {
4105 this.update(&mut cx, |this, cx| {
4106 this.respond_to_join_request(user_id, true, cx)
4107 });
4108 } else {
4109 let user_store = this.read_with(&cx, |this, _| this.user_store.clone());
4110 let user = user_store
4111 .update(&mut cx, |store, cx| store.fetch_user(user_id, cx))
4112 .await?;
4113 this.update(&mut cx, |_, cx| cx.emit(Event::ContactRequestedJoin(user)));
4114 }
4115 Ok(())
4116 }
4117
4118 async fn handle_unregister_project(
4119 this: ModelHandle<Self>,
4120 _: TypedEnvelope<proto::UnregisterProject>,
4121 _: Arc<Client>,
4122 mut cx: AsyncAppContext,
4123 ) -> Result<()> {
4124 this.update(&mut cx, |this, cx| this.removed_from_project(cx));
4125 Ok(())
4126 }
4127
4128 async fn handle_project_unshared(
4129 this: ModelHandle<Self>,
4130 _: TypedEnvelope<proto::ProjectUnshared>,
4131 _: Arc<Client>,
4132 mut cx: AsyncAppContext,
4133 ) -> Result<()> {
4134 this.update(&mut cx, |this, cx| this.unshared(cx));
4135 Ok(())
4136 }
4137
4138 async fn handle_add_collaborator(
4139 this: ModelHandle<Self>,
4140 mut envelope: TypedEnvelope<proto::AddProjectCollaborator>,
4141 _: Arc<Client>,
4142 mut cx: AsyncAppContext,
4143 ) -> Result<()> {
4144 let user_store = this.read_with(&cx, |this, _| this.user_store.clone());
4145 let collaborator = envelope
4146 .payload
4147 .collaborator
4148 .take()
4149 .ok_or_else(|| anyhow!("empty collaborator"))?;
4150
4151 let collaborator = Collaborator::from_proto(collaborator, &user_store, &mut cx).await?;
4152 this.update(&mut cx, |this, cx| {
4153 this.collaborators
4154 .insert(collaborator.peer_id, collaborator);
4155 cx.notify();
4156 });
4157
4158 Ok(())
4159 }
4160
4161 async fn handle_remove_collaborator(
4162 this: ModelHandle<Self>,
4163 envelope: TypedEnvelope<proto::RemoveProjectCollaborator>,
4164 _: Arc<Client>,
4165 mut cx: AsyncAppContext,
4166 ) -> Result<()> {
4167 this.update(&mut cx, |this, cx| {
4168 let peer_id = PeerId(envelope.payload.peer_id);
4169 let replica_id = this
4170 .collaborators
4171 .remove(&peer_id)
4172 .ok_or_else(|| anyhow!("unknown peer {:?}", peer_id))?
4173 .replica_id;
4174 for (_, buffer) in &this.opened_buffers {
4175 if let Some(buffer) = buffer.upgrade(cx) {
4176 buffer.update(cx, |buffer, cx| buffer.remove_peer(replica_id, cx));
4177 }
4178 }
4179
4180 cx.emit(Event::CollaboratorLeft(peer_id));
4181 cx.notify();
4182 Ok(())
4183 })
4184 }
4185
4186 async fn handle_join_project_request_cancelled(
4187 this: ModelHandle<Self>,
4188 envelope: TypedEnvelope<proto::JoinProjectRequestCancelled>,
4189 _: Arc<Client>,
4190 mut cx: AsyncAppContext,
4191 ) -> Result<()> {
4192 let user = this
4193 .update(&mut cx, |this, cx| {
4194 this.user_store.update(cx, |user_store, cx| {
4195 user_store.fetch_user(envelope.payload.requester_id, cx)
4196 })
4197 })
4198 .await?;
4199
4200 this.update(&mut cx, |_, cx| {
4201 cx.emit(Event::ContactCancelledJoinRequest(user));
4202 });
4203
4204 Ok(())
4205 }
4206
4207 async fn handle_update_project(
4208 this: ModelHandle<Self>,
4209 envelope: TypedEnvelope<proto::UpdateProject>,
4210 client: Arc<Client>,
4211 mut cx: AsyncAppContext,
4212 ) -> Result<()> {
4213 this.update(&mut cx, |this, cx| {
4214 let replica_id = this.replica_id();
4215 let remote_id = this.remote_id().ok_or_else(|| anyhow!("invalid project"))?;
4216
4217 let mut old_worktrees_by_id = this
4218 .worktrees
4219 .drain(..)
4220 .filter_map(|worktree| {
4221 let worktree = worktree.upgrade(cx)?;
4222 Some((worktree.read(cx).id(), worktree))
4223 })
4224 .collect::<HashMap<_, _>>();
4225
4226 for worktree in envelope.payload.worktrees {
4227 if let Some(old_worktree) =
4228 old_worktrees_by_id.remove(&WorktreeId::from_proto(worktree.id))
4229 {
4230 this.worktrees.push(WorktreeHandle::Strong(old_worktree));
4231 } else {
4232 let worktree = proto::Worktree {
4233 id: worktree.id,
4234 root_name: worktree.root_name,
4235 entries: Default::default(),
4236 diagnostic_summaries: Default::default(),
4237 visible: worktree.visible,
4238 scan_id: 0,
4239 };
4240 let (worktree, load_task) =
4241 Worktree::remote(remote_id, replica_id, worktree, client.clone(), cx);
4242 this.add_worktree(&worktree, cx);
4243 load_task.detach();
4244 }
4245 }
4246
4247 this.metadata_changed(true, cx);
4248 for (id, _) in old_worktrees_by_id {
4249 cx.emit(Event::WorktreeRemoved(id));
4250 }
4251
4252 Ok(())
4253 })
4254 }
4255
4256 async fn handle_update_worktree(
4257 this: ModelHandle<Self>,
4258 envelope: TypedEnvelope<proto::UpdateWorktree>,
4259 _: Arc<Client>,
4260 mut cx: AsyncAppContext,
4261 ) -> Result<()> {
4262 this.update(&mut cx, |this, cx| {
4263 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
4264 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
4265 worktree.update(cx, |worktree, _| {
4266 let worktree = worktree.as_remote_mut().unwrap();
4267 worktree.update_from_remote(envelope)
4268 })?;
4269 }
4270 Ok(())
4271 })
4272 }
4273
4274 async fn handle_create_project_entry(
4275 this: ModelHandle<Self>,
4276 envelope: TypedEnvelope<proto::CreateProjectEntry>,
4277 _: Arc<Client>,
4278 mut cx: AsyncAppContext,
4279 ) -> Result<proto::ProjectEntryResponse> {
4280 let worktree = this.update(&mut cx, |this, cx| {
4281 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
4282 this.worktree_for_id(worktree_id, cx)
4283 .ok_or_else(|| anyhow!("worktree not found"))
4284 })?;
4285 let worktree_scan_id = worktree.read_with(&cx, |worktree, _| worktree.scan_id());
4286 let entry = worktree
4287 .update(&mut cx, |worktree, cx| {
4288 let worktree = worktree.as_local_mut().unwrap();
4289 let path = PathBuf::from(OsString::from_vec(envelope.payload.path));
4290 worktree.create_entry(path, envelope.payload.is_directory, cx)
4291 })
4292 .await?;
4293 Ok(proto::ProjectEntryResponse {
4294 entry: Some((&entry).into()),
4295 worktree_scan_id: worktree_scan_id as u64,
4296 })
4297 }
4298
4299 async fn handle_rename_project_entry(
4300 this: ModelHandle<Self>,
4301 envelope: TypedEnvelope<proto::RenameProjectEntry>,
4302 _: Arc<Client>,
4303 mut cx: AsyncAppContext,
4304 ) -> Result<proto::ProjectEntryResponse> {
4305 let entry_id = ProjectEntryId::from_proto(envelope.payload.entry_id);
4306 let worktree = this.read_with(&cx, |this, cx| {
4307 this.worktree_for_entry(entry_id, cx)
4308 .ok_or_else(|| anyhow!("worktree not found"))
4309 })?;
4310 let worktree_scan_id = worktree.read_with(&cx, |worktree, _| worktree.scan_id());
4311 let entry = worktree
4312 .update(&mut cx, |worktree, cx| {
4313 let new_path = PathBuf::from(OsString::from_vec(envelope.payload.new_path));
4314 worktree
4315 .as_local_mut()
4316 .unwrap()
4317 .rename_entry(entry_id, new_path, cx)
4318 .ok_or_else(|| anyhow!("invalid entry"))
4319 })?
4320 .await?;
4321 Ok(proto::ProjectEntryResponse {
4322 entry: Some((&entry).into()),
4323 worktree_scan_id: worktree_scan_id as u64,
4324 })
4325 }
4326
4327 async fn handle_copy_project_entry(
4328 this: ModelHandle<Self>,
4329 envelope: TypedEnvelope<proto::CopyProjectEntry>,
4330 _: Arc<Client>,
4331 mut cx: AsyncAppContext,
4332 ) -> Result<proto::ProjectEntryResponse> {
4333 let entry_id = ProjectEntryId::from_proto(envelope.payload.entry_id);
4334 let worktree = this.read_with(&cx, |this, cx| {
4335 this.worktree_for_entry(entry_id, cx)
4336 .ok_or_else(|| anyhow!("worktree not found"))
4337 })?;
4338 let worktree_scan_id = worktree.read_with(&cx, |worktree, _| worktree.scan_id());
4339 let entry = worktree
4340 .update(&mut cx, |worktree, cx| {
4341 let new_path = PathBuf::from(OsString::from_vec(envelope.payload.new_path));
4342 worktree
4343 .as_local_mut()
4344 .unwrap()
4345 .copy_entry(entry_id, new_path, cx)
4346 .ok_or_else(|| anyhow!("invalid entry"))
4347 })?
4348 .await?;
4349 Ok(proto::ProjectEntryResponse {
4350 entry: Some((&entry).into()),
4351 worktree_scan_id: worktree_scan_id as u64,
4352 })
4353 }
4354
4355 async fn handle_delete_project_entry(
4356 this: ModelHandle<Self>,
4357 envelope: TypedEnvelope<proto::DeleteProjectEntry>,
4358 _: Arc<Client>,
4359 mut cx: AsyncAppContext,
4360 ) -> Result<proto::ProjectEntryResponse> {
4361 let entry_id = ProjectEntryId::from_proto(envelope.payload.entry_id);
4362 let worktree = this.read_with(&cx, |this, cx| {
4363 this.worktree_for_entry(entry_id, cx)
4364 .ok_or_else(|| anyhow!("worktree not found"))
4365 })?;
4366 let worktree_scan_id = worktree.read_with(&cx, |worktree, _| worktree.scan_id());
4367 worktree
4368 .update(&mut cx, |worktree, cx| {
4369 worktree
4370 .as_local_mut()
4371 .unwrap()
4372 .delete_entry(entry_id, cx)
4373 .ok_or_else(|| anyhow!("invalid entry"))
4374 })?
4375 .await?;
4376 Ok(proto::ProjectEntryResponse {
4377 entry: None,
4378 worktree_scan_id: worktree_scan_id as u64,
4379 })
4380 }
4381
4382 async fn handle_update_diagnostic_summary(
4383 this: ModelHandle<Self>,
4384 envelope: TypedEnvelope<proto::UpdateDiagnosticSummary>,
4385 _: Arc<Client>,
4386 mut cx: AsyncAppContext,
4387 ) -> Result<()> {
4388 this.update(&mut cx, |this, cx| {
4389 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
4390 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
4391 if let Some(summary) = envelope.payload.summary {
4392 let project_path = ProjectPath {
4393 worktree_id,
4394 path: Path::new(&summary.path).into(),
4395 };
4396 worktree.update(cx, |worktree, _| {
4397 worktree
4398 .as_remote_mut()
4399 .unwrap()
4400 .update_diagnostic_summary(project_path.path.clone(), &summary);
4401 });
4402 cx.emit(Event::DiagnosticsUpdated(project_path));
4403 }
4404 }
4405 Ok(())
4406 })
4407 }
4408
4409 async fn handle_start_language_server(
4410 this: ModelHandle<Self>,
4411 envelope: TypedEnvelope<proto::StartLanguageServer>,
4412 _: Arc<Client>,
4413 mut cx: AsyncAppContext,
4414 ) -> Result<()> {
4415 let server = envelope
4416 .payload
4417 .server
4418 .ok_or_else(|| anyhow!("invalid server"))?;
4419 this.update(&mut cx, |this, cx| {
4420 this.language_server_statuses.insert(
4421 server.id as usize,
4422 LanguageServerStatus {
4423 name: server.name,
4424 pending_work: Default::default(),
4425 pending_diagnostic_updates: 0,
4426 },
4427 );
4428 cx.notify();
4429 });
4430 Ok(())
4431 }
4432
4433 async fn handle_update_language_server(
4434 this: ModelHandle<Self>,
4435 envelope: TypedEnvelope<proto::UpdateLanguageServer>,
4436 _: Arc<Client>,
4437 mut cx: AsyncAppContext,
4438 ) -> Result<()> {
4439 let language_server_id = envelope.payload.language_server_id as usize;
4440 match envelope
4441 .payload
4442 .variant
4443 .ok_or_else(|| anyhow!("invalid variant"))?
4444 {
4445 proto::update_language_server::Variant::WorkStart(payload) => {
4446 this.update(&mut cx, |this, cx| {
4447 this.on_lsp_work_start(language_server_id, payload.token, cx);
4448 })
4449 }
4450 proto::update_language_server::Variant::WorkProgress(payload) => {
4451 this.update(&mut cx, |this, cx| {
4452 this.on_lsp_work_progress(
4453 language_server_id,
4454 payload.token,
4455 LanguageServerProgress {
4456 message: payload.message,
4457 percentage: payload.percentage.map(|p| p as usize),
4458 last_update_at: Instant::now(),
4459 },
4460 cx,
4461 );
4462 })
4463 }
4464 proto::update_language_server::Variant::WorkEnd(payload) => {
4465 this.update(&mut cx, |this, cx| {
4466 this.on_lsp_work_end(language_server_id, payload.token, cx);
4467 })
4468 }
4469 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(_) => {
4470 this.update(&mut cx, |this, cx| {
4471 this.disk_based_diagnostics_started(cx);
4472 })
4473 }
4474 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(_) => {
4475 this.update(&mut cx, |this, cx| this.disk_based_diagnostics_finished(cx));
4476 }
4477 }
4478
4479 Ok(())
4480 }
4481
4482 async fn handle_update_buffer(
4483 this: ModelHandle<Self>,
4484 envelope: TypedEnvelope<proto::UpdateBuffer>,
4485 _: Arc<Client>,
4486 mut cx: AsyncAppContext,
4487 ) -> Result<()> {
4488 this.update(&mut cx, |this, cx| {
4489 let payload = envelope.payload.clone();
4490 let buffer_id = payload.buffer_id;
4491 let ops = payload
4492 .operations
4493 .into_iter()
4494 .map(|op| language::proto::deserialize_operation(op))
4495 .collect::<Result<Vec<_>, _>>()?;
4496 let is_remote = this.is_remote();
4497 match this.opened_buffers.entry(buffer_id) {
4498 hash_map::Entry::Occupied(mut e) => match e.get_mut() {
4499 OpenBuffer::Strong(buffer) => {
4500 buffer.update(cx, |buffer, cx| buffer.apply_ops(ops, cx))?;
4501 }
4502 OpenBuffer::Loading(operations) => operations.extend_from_slice(&ops),
4503 OpenBuffer::Weak(_) => {}
4504 },
4505 hash_map::Entry::Vacant(e) => {
4506 assert!(
4507 is_remote,
4508 "received buffer update from {:?}",
4509 envelope.original_sender_id
4510 );
4511 e.insert(OpenBuffer::Loading(ops));
4512 }
4513 }
4514 Ok(())
4515 })
4516 }
4517
4518 async fn handle_update_buffer_file(
4519 this: ModelHandle<Self>,
4520 envelope: TypedEnvelope<proto::UpdateBufferFile>,
4521 _: Arc<Client>,
4522 mut cx: AsyncAppContext,
4523 ) -> Result<()> {
4524 this.update(&mut cx, |this, cx| {
4525 let payload = envelope.payload.clone();
4526 let buffer_id = payload.buffer_id;
4527 let file = payload.file.ok_or_else(|| anyhow!("invalid file"))?;
4528 let worktree = this
4529 .worktree_for_id(WorktreeId::from_proto(file.worktree_id), cx)
4530 .ok_or_else(|| anyhow!("no such worktree"))?;
4531 let file = File::from_proto(file, worktree.clone(), cx)?;
4532 let buffer = this
4533 .opened_buffers
4534 .get_mut(&buffer_id)
4535 .and_then(|b| b.upgrade(cx))
4536 .ok_or_else(|| anyhow!("no such buffer"))?;
4537 buffer.update(cx, |buffer, cx| {
4538 buffer.file_updated(Box::new(file), cx).detach();
4539 });
4540 Ok(())
4541 })
4542 }
4543
4544 async fn handle_save_buffer(
4545 this: ModelHandle<Self>,
4546 envelope: TypedEnvelope<proto::SaveBuffer>,
4547 _: Arc<Client>,
4548 mut cx: AsyncAppContext,
4549 ) -> Result<proto::BufferSaved> {
4550 let buffer_id = envelope.payload.buffer_id;
4551 let requested_version = deserialize_version(envelope.payload.version);
4552
4553 let (project_id, buffer) = this.update(&mut cx, |this, cx| {
4554 let project_id = this.remote_id().ok_or_else(|| anyhow!("not connected"))?;
4555 let buffer = this
4556 .opened_buffers
4557 .get(&buffer_id)
4558 .and_then(|buffer| buffer.upgrade(cx))
4559 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?;
4560 Ok::<_, anyhow::Error>((project_id, buffer))
4561 })?;
4562 buffer
4563 .update(&mut cx, |buffer, _| {
4564 buffer.wait_for_version(requested_version)
4565 })
4566 .await;
4567
4568 let (saved_version, mtime) = buffer.update(&mut cx, |buffer, cx| buffer.save(cx)).await?;
4569 Ok(proto::BufferSaved {
4570 project_id,
4571 buffer_id,
4572 version: serialize_version(&saved_version),
4573 mtime: Some(mtime.into()),
4574 })
4575 }
4576
4577 async fn handle_reload_buffers(
4578 this: ModelHandle<Self>,
4579 envelope: TypedEnvelope<proto::ReloadBuffers>,
4580 _: Arc<Client>,
4581 mut cx: AsyncAppContext,
4582 ) -> Result<proto::ReloadBuffersResponse> {
4583 let sender_id = envelope.original_sender_id()?;
4584 let reload = this.update(&mut cx, |this, cx| {
4585 let mut buffers = HashSet::default();
4586 for buffer_id in &envelope.payload.buffer_ids {
4587 buffers.insert(
4588 this.opened_buffers
4589 .get(buffer_id)
4590 .and_then(|buffer| buffer.upgrade(cx))
4591 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?,
4592 );
4593 }
4594 Ok::<_, anyhow::Error>(this.reload_buffers(buffers, false, cx))
4595 })?;
4596
4597 let project_transaction = reload.await?;
4598 let project_transaction = this.update(&mut cx, |this, cx| {
4599 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
4600 });
4601 Ok(proto::ReloadBuffersResponse {
4602 transaction: Some(project_transaction),
4603 })
4604 }
4605
4606 async fn handle_format_buffers(
4607 this: ModelHandle<Self>,
4608 envelope: TypedEnvelope<proto::FormatBuffers>,
4609 _: Arc<Client>,
4610 mut cx: AsyncAppContext,
4611 ) -> Result<proto::FormatBuffersResponse> {
4612 let sender_id = envelope.original_sender_id()?;
4613 let format = this.update(&mut cx, |this, cx| {
4614 let mut buffers = HashSet::default();
4615 for buffer_id in &envelope.payload.buffer_ids {
4616 buffers.insert(
4617 this.opened_buffers
4618 .get(buffer_id)
4619 .and_then(|buffer| buffer.upgrade(cx))
4620 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?,
4621 );
4622 }
4623 Ok::<_, anyhow::Error>(this.format(buffers, false, cx))
4624 })?;
4625
4626 let project_transaction = format.await?;
4627 let project_transaction = this.update(&mut cx, |this, cx| {
4628 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
4629 });
4630 Ok(proto::FormatBuffersResponse {
4631 transaction: Some(project_transaction),
4632 })
4633 }
4634
4635 async fn handle_get_completions(
4636 this: ModelHandle<Self>,
4637 envelope: TypedEnvelope<proto::GetCompletions>,
4638 _: Arc<Client>,
4639 mut cx: AsyncAppContext,
4640 ) -> Result<proto::GetCompletionsResponse> {
4641 let position = envelope
4642 .payload
4643 .position
4644 .and_then(language::proto::deserialize_anchor)
4645 .ok_or_else(|| anyhow!("invalid position"))?;
4646 let version = deserialize_version(envelope.payload.version);
4647 let buffer = this.read_with(&cx, |this, cx| {
4648 this.opened_buffers
4649 .get(&envelope.payload.buffer_id)
4650 .and_then(|buffer| buffer.upgrade(cx))
4651 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
4652 })?;
4653 buffer
4654 .update(&mut cx, |buffer, _| buffer.wait_for_version(version))
4655 .await;
4656 let version = buffer.read_with(&cx, |buffer, _| buffer.version());
4657 let completions = this
4658 .update(&mut cx, |this, cx| this.completions(&buffer, position, cx))
4659 .await?;
4660
4661 Ok(proto::GetCompletionsResponse {
4662 completions: completions
4663 .iter()
4664 .map(language::proto::serialize_completion)
4665 .collect(),
4666 version: serialize_version(&version),
4667 })
4668 }
4669
4670 async fn handle_apply_additional_edits_for_completion(
4671 this: ModelHandle<Self>,
4672 envelope: TypedEnvelope<proto::ApplyCompletionAdditionalEdits>,
4673 _: Arc<Client>,
4674 mut cx: AsyncAppContext,
4675 ) -> Result<proto::ApplyCompletionAdditionalEditsResponse> {
4676 let apply_additional_edits = this.update(&mut cx, |this, cx| {
4677 let buffer = this
4678 .opened_buffers
4679 .get(&envelope.payload.buffer_id)
4680 .and_then(|buffer| buffer.upgrade(cx))
4681 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
4682 let language = buffer.read(cx).language();
4683 let completion = language::proto::deserialize_completion(
4684 envelope
4685 .payload
4686 .completion
4687 .ok_or_else(|| anyhow!("invalid completion"))?,
4688 language,
4689 )?;
4690 Ok::<_, anyhow::Error>(
4691 this.apply_additional_edits_for_completion(buffer, completion, false, cx),
4692 )
4693 })?;
4694
4695 Ok(proto::ApplyCompletionAdditionalEditsResponse {
4696 transaction: apply_additional_edits
4697 .await?
4698 .as_ref()
4699 .map(language::proto::serialize_transaction),
4700 })
4701 }
4702
4703 async fn handle_get_code_actions(
4704 this: ModelHandle<Self>,
4705 envelope: TypedEnvelope<proto::GetCodeActions>,
4706 _: Arc<Client>,
4707 mut cx: AsyncAppContext,
4708 ) -> Result<proto::GetCodeActionsResponse> {
4709 let start = envelope
4710 .payload
4711 .start
4712 .and_then(language::proto::deserialize_anchor)
4713 .ok_or_else(|| anyhow!("invalid start"))?;
4714 let end = envelope
4715 .payload
4716 .end
4717 .and_then(language::proto::deserialize_anchor)
4718 .ok_or_else(|| anyhow!("invalid end"))?;
4719 let buffer = this.update(&mut cx, |this, cx| {
4720 this.opened_buffers
4721 .get(&envelope.payload.buffer_id)
4722 .and_then(|buffer| buffer.upgrade(cx))
4723 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
4724 })?;
4725 buffer
4726 .update(&mut cx, |buffer, _| {
4727 buffer.wait_for_version(deserialize_version(envelope.payload.version))
4728 })
4729 .await;
4730
4731 let version = buffer.read_with(&cx, |buffer, _| buffer.version());
4732 let code_actions = this.update(&mut cx, |this, cx| {
4733 Ok::<_, anyhow::Error>(this.code_actions(&buffer, start..end, cx))
4734 })?;
4735
4736 Ok(proto::GetCodeActionsResponse {
4737 actions: code_actions
4738 .await?
4739 .iter()
4740 .map(language::proto::serialize_code_action)
4741 .collect(),
4742 version: serialize_version(&version),
4743 })
4744 }
4745
4746 async fn handle_apply_code_action(
4747 this: ModelHandle<Self>,
4748 envelope: TypedEnvelope<proto::ApplyCodeAction>,
4749 _: Arc<Client>,
4750 mut cx: AsyncAppContext,
4751 ) -> Result<proto::ApplyCodeActionResponse> {
4752 let sender_id = envelope.original_sender_id()?;
4753 let action = language::proto::deserialize_code_action(
4754 envelope
4755 .payload
4756 .action
4757 .ok_or_else(|| anyhow!("invalid action"))?,
4758 )?;
4759 let apply_code_action = this.update(&mut cx, |this, cx| {
4760 let buffer = this
4761 .opened_buffers
4762 .get(&envelope.payload.buffer_id)
4763 .and_then(|buffer| buffer.upgrade(cx))
4764 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
4765 Ok::<_, anyhow::Error>(this.apply_code_action(buffer, action, false, cx))
4766 })?;
4767
4768 let project_transaction = apply_code_action.await?;
4769 let project_transaction = this.update(&mut cx, |this, cx| {
4770 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
4771 });
4772 Ok(proto::ApplyCodeActionResponse {
4773 transaction: Some(project_transaction),
4774 })
4775 }
4776
4777 async fn handle_lsp_command<T: LspCommand>(
4778 this: ModelHandle<Self>,
4779 envelope: TypedEnvelope<T::ProtoRequest>,
4780 _: Arc<Client>,
4781 mut cx: AsyncAppContext,
4782 ) -> Result<<T::ProtoRequest as proto::RequestMessage>::Response>
4783 where
4784 <T::LspRequest as lsp::request::Request>::Result: Send,
4785 {
4786 let sender_id = envelope.original_sender_id()?;
4787 let buffer_id = T::buffer_id_from_proto(&envelope.payload);
4788 let buffer_handle = this.read_with(&cx, |this, _| {
4789 this.opened_buffers
4790 .get(&buffer_id)
4791 .and_then(|buffer| buffer.upgrade(&cx))
4792 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))
4793 })?;
4794 let request = T::from_proto(
4795 envelope.payload,
4796 this.clone(),
4797 buffer_handle.clone(),
4798 cx.clone(),
4799 )
4800 .await?;
4801 let buffer_version = buffer_handle.read_with(&cx, |buffer, _| buffer.version());
4802 let response = this
4803 .update(&mut cx, |this, cx| {
4804 this.request_lsp(buffer_handle, request, cx)
4805 })
4806 .await?;
4807 this.update(&mut cx, |this, cx| {
4808 Ok(T::response_to_proto(
4809 response,
4810 this,
4811 sender_id,
4812 &buffer_version,
4813 cx,
4814 ))
4815 })
4816 }
4817
4818 async fn handle_get_project_symbols(
4819 this: ModelHandle<Self>,
4820 envelope: TypedEnvelope<proto::GetProjectSymbols>,
4821 _: Arc<Client>,
4822 mut cx: AsyncAppContext,
4823 ) -> Result<proto::GetProjectSymbolsResponse> {
4824 let symbols = this
4825 .update(&mut cx, |this, cx| {
4826 this.symbols(&envelope.payload.query, cx)
4827 })
4828 .await?;
4829
4830 Ok(proto::GetProjectSymbolsResponse {
4831 symbols: symbols.iter().map(serialize_symbol).collect(),
4832 })
4833 }
4834
4835 async fn handle_search_project(
4836 this: ModelHandle<Self>,
4837 envelope: TypedEnvelope<proto::SearchProject>,
4838 _: Arc<Client>,
4839 mut cx: AsyncAppContext,
4840 ) -> Result<proto::SearchProjectResponse> {
4841 let peer_id = envelope.original_sender_id()?;
4842 let query = SearchQuery::from_proto(envelope.payload)?;
4843 let result = this
4844 .update(&mut cx, |this, cx| this.search(query, cx))
4845 .await?;
4846
4847 this.update(&mut cx, |this, cx| {
4848 let mut locations = Vec::new();
4849 for (buffer, ranges) in result {
4850 for range in ranges {
4851 let start = serialize_anchor(&range.start);
4852 let end = serialize_anchor(&range.end);
4853 let buffer = this.serialize_buffer_for_peer(&buffer, peer_id, cx);
4854 locations.push(proto::Location {
4855 buffer: Some(buffer),
4856 start: Some(start),
4857 end: Some(end),
4858 });
4859 }
4860 }
4861 Ok(proto::SearchProjectResponse { locations })
4862 })
4863 }
4864
4865 async fn handle_open_buffer_for_symbol(
4866 this: ModelHandle<Self>,
4867 envelope: TypedEnvelope<proto::OpenBufferForSymbol>,
4868 _: Arc<Client>,
4869 mut cx: AsyncAppContext,
4870 ) -> Result<proto::OpenBufferForSymbolResponse> {
4871 let peer_id = envelope.original_sender_id()?;
4872 let symbol = envelope
4873 .payload
4874 .symbol
4875 .ok_or_else(|| anyhow!("invalid symbol"))?;
4876 let symbol = this.read_with(&cx, |this, _| {
4877 let symbol = this.deserialize_symbol(symbol)?;
4878 let signature = this.symbol_signature(symbol.worktree_id, &symbol.path);
4879 if signature == symbol.signature {
4880 Ok(symbol)
4881 } else {
4882 Err(anyhow!("invalid symbol signature"))
4883 }
4884 })?;
4885 let buffer = this
4886 .update(&mut cx, |this, cx| this.open_buffer_for_symbol(&symbol, cx))
4887 .await?;
4888
4889 Ok(proto::OpenBufferForSymbolResponse {
4890 buffer: Some(this.update(&mut cx, |this, cx| {
4891 this.serialize_buffer_for_peer(&buffer, peer_id, cx)
4892 })),
4893 })
4894 }
4895
4896 fn symbol_signature(&self, worktree_id: WorktreeId, path: &Path) -> [u8; 32] {
4897 let mut hasher = Sha256::new();
4898 hasher.update(worktree_id.to_proto().to_be_bytes());
4899 hasher.update(path.to_string_lossy().as_bytes());
4900 hasher.update(self.nonce.to_be_bytes());
4901 hasher.finalize().as_slice().try_into().unwrap()
4902 }
4903
4904 async fn handle_open_buffer_by_id(
4905 this: ModelHandle<Self>,
4906 envelope: TypedEnvelope<proto::OpenBufferById>,
4907 _: Arc<Client>,
4908 mut cx: AsyncAppContext,
4909 ) -> Result<proto::OpenBufferResponse> {
4910 let peer_id = envelope.original_sender_id()?;
4911 let buffer = this
4912 .update(&mut cx, |this, cx| {
4913 this.open_buffer_by_id(envelope.payload.id, cx)
4914 })
4915 .await?;
4916 this.update(&mut cx, |this, cx| {
4917 Ok(proto::OpenBufferResponse {
4918 buffer: Some(this.serialize_buffer_for_peer(&buffer, peer_id, cx)),
4919 })
4920 })
4921 }
4922
4923 async fn handle_open_buffer_by_path(
4924 this: ModelHandle<Self>,
4925 envelope: TypedEnvelope<proto::OpenBufferByPath>,
4926 _: Arc<Client>,
4927 mut cx: AsyncAppContext,
4928 ) -> Result<proto::OpenBufferResponse> {
4929 let peer_id = envelope.original_sender_id()?;
4930 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
4931 let open_buffer = this.update(&mut cx, |this, cx| {
4932 this.open_buffer(
4933 ProjectPath {
4934 worktree_id,
4935 path: PathBuf::from(envelope.payload.path).into(),
4936 },
4937 cx,
4938 )
4939 });
4940
4941 let buffer = open_buffer.await?;
4942 this.update(&mut cx, |this, cx| {
4943 Ok(proto::OpenBufferResponse {
4944 buffer: Some(this.serialize_buffer_for_peer(&buffer, peer_id, cx)),
4945 })
4946 })
4947 }
4948
4949 fn serialize_project_transaction_for_peer(
4950 &mut self,
4951 project_transaction: ProjectTransaction,
4952 peer_id: PeerId,
4953 cx: &AppContext,
4954 ) -> proto::ProjectTransaction {
4955 let mut serialized_transaction = proto::ProjectTransaction {
4956 buffers: Default::default(),
4957 transactions: Default::default(),
4958 };
4959 for (buffer, transaction) in project_transaction.0 {
4960 serialized_transaction
4961 .buffers
4962 .push(self.serialize_buffer_for_peer(&buffer, peer_id, cx));
4963 serialized_transaction
4964 .transactions
4965 .push(language::proto::serialize_transaction(&transaction));
4966 }
4967 serialized_transaction
4968 }
4969
4970 fn deserialize_project_transaction(
4971 &mut self,
4972 message: proto::ProjectTransaction,
4973 push_to_history: bool,
4974 cx: &mut ModelContext<Self>,
4975 ) -> Task<Result<ProjectTransaction>> {
4976 cx.spawn(|this, mut cx| async move {
4977 let mut project_transaction = ProjectTransaction::default();
4978 for (buffer, transaction) in message.buffers.into_iter().zip(message.transactions) {
4979 let buffer = this
4980 .update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
4981 .await?;
4982 let transaction = language::proto::deserialize_transaction(transaction)?;
4983 project_transaction.0.insert(buffer, transaction);
4984 }
4985
4986 for (buffer, transaction) in &project_transaction.0 {
4987 buffer
4988 .update(&mut cx, |buffer, _| {
4989 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
4990 })
4991 .await;
4992
4993 if push_to_history {
4994 buffer.update(&mut cx, |buffer, _| {
4995 buffer.push_transaction(transaction.clone(), Instant::now());
4996 });
4997 }
4998 }
4999
5000 Ok(project_transaction)
5001 })
5002 }
5003
5004 fn serialize_buffer_for_peer(
5005 &mut self,
5006 buffer: &ModelHandle<Buffer>,
5007 peer_id: PeerId,
5008 cx: &AppContext,
5009 ) -> proto::Buffer {
5010 let buffer_id = buffer.read(cx).remote_id();
5011 let shared_buffers = self.shared_buffers.entry(peer_id).or_default();
5012 if shared_buffers.insert(buffer_id) {
5013 proto::Buffer {
5014 variant: Some(proto::buffer::Variant::State(buffer.read(cx).to_proto())),
5015 }
5016 } else {
5017 proto::Buffer {
5018 variant: Some(proto::buffer::Variant::Id(buffer_id)),
5019 }
5020 }
5021 }
5022
5023 fn deserialize_buffer(
5024 &mut self,
5025 buffer: proto::Buffer,
5026 cx: &mut ModelContext<Self>,
5027 ) -> Task<Result<ModelHandle<Buffer>>> {
5028 let replica_id = self.replica_id();
5029
5030 let opened_buffer_tx = self.opened_buffer.0.clone();
5031 let mut opened_buffer_rx = self.opened_buffer.1.clone();
5032 cx.spawn(|this, mut cx| async move {
5033 match buffer.variant.ok_or_else(|| anyhow!("missing buffer"))? {
5034 proto::buffer::Variant::Id(id) => {
5035 let buffer = loop {
5036 let buffer = this.read_with(&cx, |this, cx| {
5037 this.opened_buffers
5038 .get(&id)
5039 .and_then(|buffer| buffer.upgrade(cx))
5040 });
5041 if let Some(buffer) = buffer {
5042 break buffer;
5043 }
5044 opened_buffer_rx
5045 .next()
5046 .await
5047 .ok_or_else(|| anyhow!("project dropped while waiting for buffer"))?;
5048 };
5049 Ok(buffer)
5050 }
5051 proto::buffer::Variant::State(mut buffer) => {
5052 let mut buffer_worktree = None;
5053 let mut buffer_file = None;
5054 if let Some(file) = buffer.file.take() {
5055 this.read_with(&cx, |this, cx| {
5056 let worktree_id = WorktreeId::from_proto(file.worktree_id);
5057 let worktree =
5058 this.worktree_for_id(worktree_id, cx).ok_or_else(|| {
5059 anyhow!("no worktree found for id {}", file.worktree_id)
5060 })?;
5061 buffer_file =
5062 Some(Box::new(File::from_proto(file, worktree.clone(), cx)?)
5063 as Box<dyn language::File>);
5064 buffer_worktree = Some(worktree);
5065 Ok::<_, anyhow::Error>(())
5066 })?;
5067 }
5068
5069 let buffer = cx.add_model(|cx| {
5070 Buffer::from_proto(replica_id, buffer, buffer_file, cx).unwrap()
5071 });
5072
5073 this.update(&mut cx, |this, cx| this.register_buffer(&buffer, cx))?;
5074
5075 *opened_buffer_tx.borrow_mut().borrow_mut() = ();
5076 Ok(buffer)
5077 }
5078 }
5079 })
5080 }
5081
5082 fn deserialize_symbol(&self, serialized_symbol: proto::Symbol) -> Result<Symbol> {
5083 let source_worktree_id = WorktreeId::from_proto(serialized_symbol.source_worktree_id);
5084 let worktree_id = WorktreeId::from_proto(serialized_symbol.worktree_id);
5085 let start = serialized_symbol
5086 .start
5087 .ok_or_else(|| anyhow!("invalid start"))?;
5088 let end = serialized_symbol
5089 .end
5090 .ok_or_else(|| anyhow!("invalid end"))?;
5091 let kind = unsafe { mem::transmute(serialized_symbol.kind) };
5092 let path = PathBuf::from(serialized_symbol.path);
5093 let language = self.languages.select_language(&path);
5094 Ok(Symbol {
5095 source_worktree_id,
5096 worktree_id,
5097 language_server_name: LanguageServerName(serialized_symbol.language_server_name.into()),
5098 label: language
5099 .and_then(|language| language.label_for_symbol(&serialized_symbol.name, kind))
5100 .unwrap_or_else(|| CodeLabel::plain(serialized_symbol.name.clone(), None)),
5101 name: serialized_symbol.name,
5102 path,
5103 range: PointUtf16::new(start.row, start.column)..PointUtf16::new(end.row, end.column),
5104 kind,
5105 signature: serialized_symbol
5106 .signature
5107 .try_into()
5108 .map_err(|_| anyhow!("invalid signature"))?,
5109 })
5110 }
5111
5112 async fn handle_buffer_saved(
5113 this: ModelHandle<Self>,
5114 envelope: TypedEnvelope<proto::BufferSaved>,
5115 _: Arc<Client>,
5116 mut cx: AsyncAppContext,
5117 ) -> Result<()> {
5118 let version = deserialize_version(envelope.payload.version);
5119 let mtime = envelope
5120 .payload
5121 .mtime
5122 .ok_or_else(|| anyhow!("missing mtime"))?
5123 .into();
5124
5125 this.update(&mut cx, |this, cx| {
5126 let buffer = this
5127 .opened_buffers
5128 .get(&envelope.payload.buffer_id)
5129 .and_then(|buffer| buffer.upgrade(cx));
5130 if let Some(buffer) = buffer {
5131 buffer.update(cx, |buffer, cx| {
5132 buffer.did_save(version, mtime, None, cx);
5133 });
5134 }
5135 Ok(())
5136 })
5137 }
5138
5139 async fn handle_buffer_reloaded(
5140 this: ModelHandle<Self>,
5141 envelope: TypedEnvelope<proto::BufferReloaded>,
5142 _: Arc<Client>,
5143 mut cx: AsyncAppContext,
5144 ) -> Result<()> {
5145 let payload = envelope.payload.clone();
5146 let version = deserialize_version(payload.version);
5147 let mtime = payload
5148 .mtime
5149 .ok_or_else(|| anyhow!("missing mtime"))?
5150 .into();
5151 this.update(&mut cx, |this, cx| {
5152 let buffer = this
5153 .opened_buffers
5154 .get(&payload.buffer_id)
5155 .and_then(|buffer| buffer.upgrade(cx));
5156 if let Some(buffer) = buffer {
5157 buffer.update(cx, |buffer, cx| {
5158 buffer.did_reload(version, mtime, cx);
5159 });
5160 }
5161 Ok(())
5162 })
5163 }
5164
5165 pub fn match_paths<'a>(
5166 &self,
5167 query: &'a str,
5168 include_ignored: bool,
5169 smart_case: bool,
5170 max_results: usize,
5171 cancel_flag: &'a AtomicBool,
5172 cx: &AppContext,
5173 ) -> impl 'a + Future<Output = Vec<PathMatch>> {
5174 let worktrees = self
5175 .worktrees(cx)
5176 .filter(|worktree| worktree.read(cx).is_visible())
5177 .collect::<Vec<_>>();
5178 let include_root_name = worktrees.len() > 1;
5179 let candidate_sets = worktrees
5180 .into_iter()
5181 .map(|worktree| CandidateSet {
5182 snapshot: worktree.read(cx).snapshot(),
5183 include_ignored,
5184 include_root_name,
5185 })
5186 .collect::<Vec<_>>();
5187
5188 let background = cx.background().clone();
5189 async move {
5190 fuzzy::match_paths(
5191 candidate_sets.as_slice(),
5192 query,
5193 smart_case,
5194 max_results,
5195 cancel_flag,
5196 background,
5197 )
5198 .await
5199 }
5200 }
5201
5202 fn edits_from_lsp(
5203 &mut self,
5204 buffer: &ModelHandle<Buffer>,
5205 lsp_edits: impl 'static + Send + IntoIterator<Item = lsp::TextEdit>,
5206 version: Option<i32>,
5207 cx: &mut ModelContext<Self>,
5208 ) -> Task<Result<Vec<(Range<Anchor>, String)>>> {
5209 let snapshot = self.buffer_snapshot_for_lsp_version(buffer, version, cx);
5210 cx.background().spawn(async move {
5211 let snapshot = snapshot?;
5212 let mut lsp_edits = lsp_edits
5213 .into_iter()
5214 .map(|edit| (range_from_lsp(edit.range), edit.new_text))
5215 .collect::<Vec<_>>();
5216 lsp_edits.sort_by_key(|(range, _)| range.start);
5217
5218 let mut lsp_edits = lsp_edits.into_iter().peekable();
5219 let mut edits = Vec::new();
5220 while let Some((mut range, mut new_text)) = lsp_edits.next() {
5221 // Combine any LSP edits that are adjacent.
5222 //
5223 // Also, combine LSP edits that are separated from each other by only
5224 // a newline. This is important because for some code actions,
5225 // Rust-analyzer rewrites the entire buffer via a series of edits that
5226 // are separated by unchanged newline characters.
5227 //
5228 // In order for the diffing logic below to work properly, any edits that
5229 // cancel each other out must be combined into one.
5230 while let Some((next_range, next_text)) = lsp_edits.peek() {
5231 if next_range.start > range.end {
5232 if next_range.start.row > range.end.row + 1
5233 || next_range.start.column > 0
5234 || snapshot.clip_point_utf16(
5235 PointUtf16::new(range.end.row, u32::MAX),
5236 Bias::Left,
5237 ) > range.end
5238 {
5239 break;
5240 }
5241 new_text.push('\n');
5242 }
5243 range.end = next_range.end;
5244 new_text.push_str(&next_text);
5245 lsp_edits.next();
5246 }
5247
5248 if snapshot.clip_point_utf16(range.start, Bias::Left) != range.start
5249 || snapshot.clip_point_utf16(range.end, Bias::Left) != range.end
5250 {
5251 return Err(anyhow!("invalid edits received from language server"));
5252 }
5253
5254 // For multiline edits, perform a diff of the old and new text so that
5255 // we can identify the changes more precisely, preserving the locations
5256 // of any anchors positioned in the unchanged regions.
5257 if range.end.row > range.start.row {
5258 let mut offset = range.start.to_offset(&snapshot);
5259 let old_text = snapshot.text_for_range(range).collect::<String>();
5260
5261 let diff = TextDiff::from_lines(old_text.as_str(), &new_text);
5262 let mut moved_since_edit = true;
5263 for change in diff.iter_all_changes() {
5264 let tag = change.tag();
5265 let value = change.value();
5266 match tag {
5267 ChangeTag::Equal => {
5268 offset += value.len();
5269 moved_since_edit = true;
5270 }
5271 ChangeTag::Delete => {
5272 let start = snapshot.anchor_after(offset);
5273 let end = snapshot.anchor_before(offset + value.len());
5274 if moved_since_edit {
5275 edits.push((start..end, String::new()));
5276 } else {
5277 edits.last_mut().unwrap().0.end = end;
5278 }
5279 offset += value.len();
5280 moved_since_edit = false;
5281 }
5282 ChangeTag::Insert => {
5283 if moved_since_edit {
5284 let anchor = snapshot.anchor_after(offset);
5285 edits.push((anchor.clone()..anchor, value.to_string()));
5286 } else {
5287 edits.last_mut().unwrap().1.push_str(value);
5288 }
5289 moved_since_edit = false;
5290 }
5291 }
5292 }
5293 } else if range.end == range.start {
5294 let anchor = snapshot.anchor_after(range.start);
5295 edits.push((anchor.clone()..anchor, new_text));
5296 } else {
5297 let edit_start = snapshot.anchor_after(range.start);
5298 let edit_end = snapshot.anchor_before(range.end);
5299 edits.push((edit_start..edit_end, new_text));
5300 }
5301 }
5302
5303 Ok(edits)
5304 })
5305 }
5306
5307 fn buffer_snapshot_for_lsp_version(
5308 &mut self,
5309 buffer: &ModelHandle<Buffer>,
5310 version: Option<i32>,
5311 cx: &AppContext,
5312 ) -> Result<TextBufferSnapshot> {
5313 const OLD_VERSIONS_TO_RETAIN: i32 = 10;
5314
5315 if let Some(version) = version {
5316 let buffer_id = buffer.read(cx).remote_id();
5317 let snapshots = self
5318 .buffer_snapshots
5319 .get_mut(&buffer_id)
5320 .ok_or_else(|| anyhow!("no snapshot found for buffer {}", buffer_id))?;
5321 let mut found_snapshot = None;
5322 snapshots.retain(|(snapshot_version, snapshot)| {
5323 if snapshot_version + OLD_VERSIONS_TO_RETAIN < version {
5324 false
5325 } else {
5326 if *snapshot_version == version {
5327 found_snapshot = Some(snapshot.clone());
5328 }
5329 true
5330 }
5331 });
5332
5333 found_snapshot.ok_or_else(|| {
5334 anyhow!(
5335 "snapshot not found for buffer {} at version {}",
5336 buffer_id,
5337 version
5338 )
5339 })
5340 } else {
5341 Ok((buffer.read(cx)).text_snapshot())
5342 }
5343 }
5344
5345 fn language_server_for_buffer(
5346 &self,
5347 buffer: &Buffer,
5348 cx: &AppContext,
5349 ) -> Option<&(Arc<dyn LspAdapter>, Arc<LanguageServer>)> {
5350 if let Some((file, language)) = File::from_dyn(buffer.file()).zip(buffer.language()) {
5351 let worktree_id = file.worktree_id(cx);
5352 self.language_servers
5353 .get(&(worktree_id, language.lsp_adapter()?.name()))
5354 } else {
5355 None
5356 }
5357 }
5358}
5359
5360impl ProjectStore {
5361 pub fn new(db: Arc<Db>) -> Self {
5362 Self {
5363 db,
5364 projects: Default::default(),
5365 }
5366 }
5367
5368 pub fn projects<'a>(
5369 &'a self,
5370 cx: &'a AppContext,
5371 ) -> impl 'a + Iterator<Item = ModelHandle<Project>> {
5372 self.projects
5373 .iter()
5374 .filter_map(|project| project.upgrade(cx))
5375 }
5376
5377 fn add_project(&mut self, project: WeakModelHandle<Project>, cx: &mut ModelContext<Self>) {
5378 if let Err(ix) = self
5379 .projects
5380 .binary_search_by_key(&project.id(), WeakModelHandle::id)
5381 {
5382 self.projects.insert(ix, project);
5383 }
5384 cx.notify();
5385 }
5386
5387 fn prune_projects(&mut self, cx: &mut ModelContext<Self>) {
5388 let mut did_change = false;
5389 self.projects.retain(|project| {
5390 if project.is_upgradable(cx) {
5391 true
5392 } else {
5393 did_change = true;
5394 false
5395 }
5396 });
5397 if did_change {
5398 cx.notify();
5399 }
5400 }
5401}
5402
5403impl WorktreeHandle {
5404 pub fn upgrade(&self, cx: &AppContext) -> Option<ModelHandle<Worktree>> {
5405 match self {
5406 WorktreeHandle::Strong(handle) => Some(handle.clone()),
5407 WorktreeHandle::Weak(handle) => handle.upgrade(cx),
5408 }
5409 }
5410}
5411
5412impl OpenBuffer {
5413 pub fn upgrade(&self, cx: &impl UpgradeModelHandle) -> Option<ModelHandle<Buffer>> {
5414 match self {
5415 OpenBuffer::Strong(handle) => Some(handle.clone()),
5416 OpenBuffer::Weak(handle) => handle.upgrade(cx),
5417 OpenBuffer::Loading(_) => None,
5418 }
5419 }
5420}
5421
5422struct CandidateSet {
5423 snapshot: Snapshot,
5424 include_ignored: bool,
5425 include_root_name: bool,
5426}
5427
5428impl<'a> PathMatchCandidateSet<'a> for CandidateSet {
5429 type Candidates = CandidateSetIter<'a>;
5430
5431 fn id(&self) -> usize {
5432 self.snapshot.id().to_usize()
5433 }
5434
5435 fn len(&self) -> usize {
5436 if self.include_ignored {
5437 self.snapshot.file_count()
5438 } else {
5439 self.snapshot.visible_file_count()
5440 }
5441 }
5442
5443 fn prefix(&self) -> Arc<str> {
5444 if self.snapshot.root_entry().map_or(false, |e| e.is_file()) {
5445 self.snapshot.root_name().into()
5446 } else if self.include_root_name {
5447 format!("{}/", self.snapshot.root_name()).into()
5448 } else {
5449 "".into()
5450 }
5451 }
5452
5453 fn candidates(&'a self, start: usize) -> Self::Candidates {
5454 CandidateSetIter {
5455 traversal: self.snapshot.files(self.include_ignored, start),
5456 }
5457 }
5458}
5459
5460struct CandidateSetIter<'a> {
5461 traversal: Traversal<'a>,
5462}
5463
5464impl<'a> Iterator for CandidateSetIter<'a> {
5465 type Item = PathMatchCandidate<'a>;
5466
5467 fn next(&mut self) -> Option<Self::Item> {
5468 self.traversal.next().map(|entry| {
5469 if let EntryKind::File(char_bag) = entry.kind {
5470 PathMatchCandidate {
5471 path: &entry.path,
5472 char_bag,
5473 }
5474 } else {
5475 unreachable!()
5476 }
5477 })
5478 }
5479}
5480
5481impl Entity for ProjectStore {
5482 type Event = ();
5483}
5484
5485impl Entity for Project {
5486 type Event = Event;
5487
5488 fn release(&mut self, cx: &mut gpui::MutableAppContext) {
5489 self.project_store.update(cx, ProjectStore::prune_projects);
5490
5491 match &self.client_state {
5492 ProjectClientState::Local { remote_id_rx, .. } => {
5493 if let Some(project_id) = *remote_id_rx.borrow() {
5494 self.client
5495 .send(proto::UnregisterProject { project_id })
5496 .log_err();
5497 }
5498 }
5499 ProjectClientState::Remote { remote_id, .. } => {
5500 self.client
5501 .send(proto::LeaveProject {
5502 project_id: *remote_id,
5503 })
5504 .log_err();
5505 }
5506 }
5507 }
5508
5509 fn app_will_quit(
5510 &mut self,
5511 _: &mut MutableAppContext,
5512 ) -> Option<std::pin::Pin<Box<dyn 'static + Future<Output = ()>>>> {
5513 let shutdown_futures = self
5514 .language_servers
5515 .drain()
5516 .filter_map(|(_, (_, server))| server.shutdown())
5517 .collect::<Vec<_>>();
5518 Some(
5519 async move {
5520 futures::future::join_all(shutdown_futures).await;
5521 }
5522 .boxed(),
5523 )
5524 }
5525}
5526
5527impl Collaborator {
5528 fn from_proto(
5529 message: proto::Collaborator,
5530 user_store: &ModelHandle<UserStore>,
5531 cx: &mut AsyncAppContext,
5532 ) -> impl Future<Output = Result<Self>> {
5533 let user = user_store.update(cx, |user_store, cx| {
5534 user_store.fetch_user(message.user_id, cx)
5535 });
5536
5537 async move {
5538 Ok(Self {
5539 peer_id: PeerId(message.peer_id),
5540 user: user.await?,
5541 replica_id: message.replica_id as ReplicaId,
5542 })
5543 }
5544 }
5545}
5546
5547impl<P: AsRef<Path>> From<(WorktreeId, P)> for ProjectPath {
5548 fn from((worktree_id, path): (WorktreeId, P)) -> Self {
5549 Self {
5550 worktree_id,
5551 path: path.as_ref().into(),
5552 }
5553 }
5554}
5555
5556impl From<lsp::CreateFileOptions> for fs::CreateOptions {
5557 fn from(options: lsp::CreateFileOptions) -> Self {
5558 Self {
5559 overwrite: options.overwrite.unwrap_or(false),
5560 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
5561 }
5562 }
5563}
5564
5565impl From<lsp::RenameFileOptions> for fs::RenameOptions {
5566 fn from(options: lsp::RenameFileOptions) -> Self {
5567 Self {
5568 overwrite: options.overwrite.unwrap_or(false),
5569 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
5570 }
5571 }
5572}
5573
5574impl From<lsp::DeleteFileOptions> for fs::RemoveOptions {
5575 fn from(options: lsp::DeleteFileOptions) -> Self {
5576 Self {
5577 recursive: options.recursive.unwrap_or(false),
5578 ignore_if_not_exists: options.ignore_if_not_exists.unwrap_or(false),
5579 }
5580 }
5581}
5582
5583fn serialize_symbol(symbol: &Symbol) -> proto::Symbol {
5584 proto::Symbol {
5585 source_worktree_id: symbol.source_worktree_id.to_proto(),
5586 worktree_id: symbol.worktree_id.to_proto(),
5587 language_server_name: symbol.language_server_name.0.to_string(),
5588 name: symbol.name.clone(),
5589 kind: unsafe { mem::transmute(symbol.kind) },
5590 path: symbol.path.to_string_lossy().to_string(),
5591 start: Some(proto::Point {
5592 row: symbol.range.start.row,
5593 column: symbol.range.start.column,
5594 }),
5595 end: Some(proto::Point {
5596 row: symbol.range.end.row,
5597 column: symbol.range.end.column,
5598 }),
5599 signature: symbol.signature.to_vec(),
5600 }
5601}
5602
5603fn relativize_path(base: &Path, path: &Path) -> PathBuf {
5604 let mut path_components = path.components();
5605 let mut base_components = base.components();
5606 let mut components: Vec<Component> = Vec::new();
5607 loop {
5608 match (path_components.next(), base_components.next()) {
5609 (None, None) => break,
5610 (Some(a), None) => {
5611 components.push(a);
5612 components.extend(path_components.by_ref());
5613 break;
5614 }
5615 (None, _) => components.push(Component::ParentDir),
5616 (Some(a), Some(b)) if components.is_empty() && a == b => (),
5617 (Some(a), Some(b)) if b == Component::CurDir => components.push(a),
5618 (Some(a), Some(_)) => {
5619 components.push(Component::ParentDir);
5620 for _ in base_components {
5621 components.push(Component::ParentDir);
5622 }
5623 components.push(a);
5624 components.extend(path_components.by_ref());
5625 break;
5626 }
5627 }
5628 }
5629 components.iter().map(|c| c.as_os_str()).collect()
5630}
5631
5632impl Item for Buffer {
5633 fn entry_id(&self, cx: &AppContext) -> Option<ProjectEntryId> {
5634 File::from_dyn(self.file()).and_then(|file| file.project_entry_id(cx))
5635 }
5636}
5637
5638#[cfg(test)]
5639mod tests {
5640 use crate::worktree::WorktreeHandle;
5641
5642 use super::{Event, *};
5643 use fs::RealFs;
5644 use futures::{future, StreamExt};
5645 use gpui::test::subscribe;
5646 use language::{
5647 tree_sitter_rust, tree_sitter_typescript, Diagnostic, FakeLspAdapter, LanguageConfig,
5648 OffsetRangeExt, Point, ToPoint,
5649 };
5650 use lsp::Url;
5651 use serde_json::json;
5652 use std::{cell::RefCell, os::unix, path::PathBuf, rc::Rc, task::Poll};
5653 use unindent::Unindent as _;
5654 use util::{assert_set_eq, test::temp_tree};
5655
5656 #[gpui::test]
5657 async fn test_populate_and_search(cx: &mut gpui::TestAppContext) {
5658 let dir = temp_tree(json!({
5659 "root": {
5660 "apple": "",
5661 "banana": {
5662 "carrot": {
5663 "date": "",
5664 "endive": "",
5665 }
5666 },
5667 "fennel": {
5668 "grape": "",
5669 }
5670 }
5671 }));
5672
5673 let root_link_path = dir.path().join("root_link");
5674 unix::fs::symlink(&dir.path().join("root"), &root_link_path).unwrap();
5675 unix::fs::symlink(
5676 &dir.path().join("root/fennel"),
5677 &dir.path().join("root/finnochio"),
5678 )
5679 .unwrap();
5680
5681 let project = Project::test(Arc::new(RealFs), [root_link_path.as_ref()], cx).await;
5682
5683 project.read_with(cx, |project, cx| {
5684 let tree = project.worktrees(cx).next().unwrap().read(cx);
5685 assert_eq!(tree.file_count(), 5);
5686 assert_eq!(
5687 tree.inode_for_path("fennel/grape"),
5688 tree.inode_for_path("finnochio/grape")
5689 );
5690 });
5691
5692 let cancel_flag = Default::default();
5693 let results = project
5694 .read_with(cx, |project, cx| {
5695 project.match_paths("bna", false, false, 10, &cancel_flag, cx)
5696 })
5697 .await;
5698 assert_eq!(
5699 results
5700 .into_iter()
5701 .map(|result| result.path)
5702 .collect::<Vec<Arc<Path>>>(),
5703 vec![
5704 PathBuf::from("banana/carrot/date").into(),
5705 PathBuf::from("banana/carrot/endive").into(),
5706 ]
5707 );
5708 }
5709
5710 #[gpui::test]
5711 async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
5712 cx.foreground().forbid_parking();
5713
5714 let mut rust_language = Language::new(
5715 LanguageConfig {
5716 name: "Rust".into(),
5717 path_suffixes: vec!["rs".to_string()],
5718 ..Default::default()
5719 },
5720 Some(tree_sitter_rust::language()),
5721 );
5722 let mut json_language = Language::new(
5723 LanguageConfig {
5724 name: "JSON".into(),
5725 path_suffixes: vec!["json".to_string()],
5726 ..Default::default()
5727 },
5728 None,
5729 );
5730 let mut fake_rust_servers = rust_language.set_fake_lsp_adapter(FakeLspAdapter {
5731 name: "the-rust-language-server",
5732 capabilities: lsp::ServerCapabilities {
5733 completion_provider: Some(lsp::CompletionOptions {
5734 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
5735 ..Default::default()
5736 }),
5737 ..Default::default()
5738 },
5739 ..Default::default()
5740 });
5741 let mut fake_json_servers = json_language.set_fake_lsp_adapter(FakeLspAdapter {
5742 name: "the-json-language-server",
5743 capabilities: lsp::ServerCapabilities {
5744 completion_provider: Some(lsp::CompletionOptions {
5745 trigger_characters: Some(vec![":".to_string()]),
5746 ..Default::default()
5747 }),
5748 ..Default::default()
5749 },
5750 ..Default::default()
5751 });
5752
5753 let fs = FakeFs::new(cx.background());
5754 fs.insert_tree(
5755 "/the-root",
5756 json!({
5757 "test.rs": "const A: i32 = 1;",
5758 "test2.rs": "",
5759 "Cargo.toml": "a = 1",
5760 "package.json": "{\"a\": 1}",
5761 }),
5762 )
5763 .await;
5764
5765 let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
5766 project.update(cx, |project, _| {
5767 project.languages.add(Arc::new(rust_language));
5768 project.languages.add(Arc::new(json_language));
5769 });
5770
5771 // Open a buffer without an associated language server.
5772 let toml_buffer = project
5773 .update(cx, |project, cx| {
5774 project.open_local_buffer("/the-root/Cargo.toml", cx)
5775 })
5776 .await
5777 .unwrap();
5778
5779 // Open a buffer with an associated language server.
5780 let rust_buffer = project
5781 .update(cx, |project, cx| {
5782 project.open_local_buffer("/the-root/test.rs", cx)
5783 })
5784 .await
5785 .unwrap();
5786
5787 // A server is started up, and it is notified about Rust files.
5788 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
5789 assert_eq!(
5790 fake_rust_server
5791 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5792 .await
5793 .text_document,
5794 lsp::TextDocumentItem {
5795 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
5796 version: 0,
5797 text: "const A: i32 = 1;".to_string(),
5798 language_id: Default::default()
5799 }
5800 );
5801
5802 // The buffer is configured based on the language server's capabilities.
5803 rust_buffer.read_with(cx, |buffer, _| {
5804 assert_eq!(
5805 buffer.completion_triggers(),
5806 &[".".to_string(), "::".to_string()]
5807 );
5808 });
5809 toml_buffer.read_with(cx, |buffer, _| {
5810 assert!(buffer.completion_triggers().is_empty());
5811 });
5812
5813 // Edit a buffer. The changes are reported to the language server.
5814 rust_buffer.update(cx, |buffer, cx| buffer.edit([(16..16, "2")], cx));
5815 assert_eq!(
5816 fake_rust_server
5817 .receive_notification::<lsp::notification::DidChangeTextDocument>()
5818 .await
5819 .text_document,
5820 lsp::VersionedTextDocumentIdentifier::new(
5821 lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
5822 1
5823 )
5824 );
5825
5826 // Open a third buffer with a different associated language server.
5827 let json_buffer = project
5828 .update(cx, |project, cx| {
5829 project.open_local_buffer("/the-root/package.json", cx)
5830 })
5831 .await
5832 .unwrap();
5833
5834 // A json language server is started up and is only notified about the json buffer.
5835 let mut fake_json_server = fake_json_servers.next().await.unwrap();
5836 assert_eq!(
5837 fake_json_server
5838 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5839 .await
5840 .text_document,
5841 lsp::TextDocumentItem {
5842 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
5843 version: 0,
5844 text: "{\"a\": 1}".to_string(),
5845 language_id: Default::default()
5846 }
5847 );
5848
5849 // This buffer is configured based on the second language server's
5850 // capabilities.
5851 json_buffer.read_with(cx, |buffer, _| {
5852 assert_eq!(buffer.completion_triggers(), &[":".to_string()]);
5853 });
5854
5855 // When opening another buffer whose language server is already running,
5856 // it is also configured based on the existing language server's capabilities.
5857 let rust_buffer2 = project
5858 .update(cx, |project, cx| {
5859 project.open_local_buffer("/the-root/test2.rs", cx)
5860 })
5861 .await
5862 .unwrap();
5863 rust_buffer2.read_with(cx, |buffer, _| {
5864 assert_eq!(
5865 buffer.completion_triggers(),
5866 &[".".to_string(), "::".to_string()]
5867 );
5868 });
5869
5870 // Changes are reported only to servers matching the buffer's language.
5871 toml_buffer.update(cx, |buffer, cx| buffer.edit([(5..5, "23")], cx));
5872 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "let x = 1;")], cx));
5873 assert_eq!(
5874 fake_rust_server
5875 .receive_notification::<lsp::notification::DidChangeTextDocument>()
5876 .await
5877 .text_document,
5878 lsp::VersionedTextDocumentIdentifier::new(
5879 lsp::Url::from_file_path("/the-root/test2.rs").unwrap(),
5880 1
5881 )
5882 );
5883
5884 // Save notifications are reported to all servers.
5885 toml_buffer
5886 .update(cx, |buffer, cx| buffer.save(cx))
5887 .await
5888 .unwrap();
5889 assert_eq!(
5890 fake_rust_server
5891 .receive_notification::<lsp::notification::DidSaveTextDocument>()
5892 .await
5893 .text_document,
5894 lsp::TextDocumentIdentifier::new(
5895 lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap()
5896 )
5897 );
5898 assert_eq!(
5899 fake_json_server
5900 .receive_notification::<lsp::notification::DidSaveTextDocument>()
5901 .await
5902 .text_document,
5903 lsp::TextDocumentIdentifier::new(
5904 lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap()
5905 )
5906 );
5907
5908 // Renames are reported only to servers matching the buffer's language.
5909 fs.rename(
5910 Path::new("/the-root/test2.rs"),
5911 Path::new("/the-root/test3.rs"),
5912 Default::default(),
5913 )
5914 .await
5915 .unwrap();
5916 assert_eq!(
5917 fake_rust_server
5918 .receive_notification::<lsp::notification::DidCloseTextDocument>()
5919 .await
5920 .text_document,
5921 lsp::TextDocumentIdentifier::new(
5922 lsp::Url::from_file_path("/the-root/test2.rs").unwrap()
5923 ),
5924 );
5925 assert_eq!(
5926 fake_rust_server
5927 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5928 .await
5929 .text_document,
5930 lsp::TextDocumentItem {
5931 uri: lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),
5932 version: 0,
5933 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
5934 language_id: Default::default()
5935 },
5936 );
5937
5938 rust_buffer2.update(cx, |buffer, cx| {
5939 buffer.update_diagnostics(
5940 DiagnosticSet::from_sorted_entries(
5941 vec![DiagnosticEntry {
5942 diagnostic: Default::default(),
5943 range: Anchor::MIN..Anchor::MAX,
5944 }],
5945 &buffer.snapshot(),
5946 ),
5947 cx,
5948 );
5949 assert_eq!(
5950 buffer
5951 .snapshot()
5952 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
5953 .count(),
5954 1
5955 );
5956 });
5957
5958 // When the rename changes the extension of the file, the buffer gets closed on the old
5959 // language server and gets opened on the new one.
5960 fs.rename(
5961 Path::new("/the-root/test3.rs"),
5962 Path::new("/the-root/test3.json"),
5963 Default::default(),
5964 )
5965 .await
5966 .unwrap();
5967 assert_eq!(
5968 fake_rust_server
5969 .receive_notification::<lsp::notification::DidCloseTextDocument>()
5970 .await
5971 .text_document,
5972 lsp::TextDocumentIdentifier::new(
5973 lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),
5974 ),
5975 );
5976 assert_eq!(
5977 fake_json_server
5978 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5979 .await
5980 .text_document,
5981 lsp::TextDocumentItem {
5982 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
5983 version: 0,
5984 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
5985 language_id: Default::default()
5986 },
5987 );
5988
5989 // We clear the diagnostics, since the language has changed.
5990 rust_buffer2.read_with(cx, |buffer, _| {
5991 assert_eq!(
5992 buffer
5993 .snapshot()
5994 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
5995 .count(),
5996 0
5997 );
5998 });
5999
6000 // The renamed file's version resets after changing language server.
6001 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "// ")], cx));
6002 assert_eq!(
6003 fake_json_server
6004 .receive_notification::<lsp::notification::DidChangeTextDocument>()
6005 .await
6006 .text_document,
6007 lsp::VersionedTextDocumentIdentifier::new(
6008 lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
6009 1
6010 )
6011 );
6012
6013 // Restart language servers
6014 project.update(cx, |project, cx| {
6015 project.restart_language_servers_for_buffers(
6016 vec![rust_buffer.clone(), json_buffer.clone()],
6017 cx,
6018 );
6019 });
6020
6021 let mut rust_shutdown_requests = fake_rust_server
6022 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
6023 let mut json_shutdown_requests = fake_json_server
6024 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
6025 futures::join!(rust_shutdown_requests.next(), json_shutdown_requests.next());
6026
6027 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
6028 let mut fake_json_server = fake_json_servers.next().await.unwrap();
6029
6030 // Ensure rust document is reopened in new rust language server
6031 assert_eq!(
6032 fake_rust_server
6033 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6034 .await
6035 .text_document,
6036 lsp::TextDocumentItem {
6037 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
6038 version: 1,
6039 text: rust_buffer.read_with(cx, |buffer, _| buffer.text()),
6040 language_id: Default::default()
6041 }
6042 );
6043
6044 // Ensure json documents are reopened in new json language server
6045 assert_set_eq!(
6046 [
6047 fake_json_server
6048 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6049 .await
6050 .text_document,
6051 fake_json_server
6052 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6053 .await
6054 .text_document,
6055 ],
6056 [
6057 lsp::TextDocumentItem {
6058 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
6059 version: 0,
6060 text: json_buffer.read_with(cx, |buffer, _| buffer.text()),
6061 language_id: Default::default()
6062 },
6063 lsp::TextDocumentItem {
6064 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
6065 version: 1,
6066 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
6067 language_id: Default::default()
6068 }
6069 ]
6070 );
6071
6072 // Close notifications are reported only to servers matching the buffer's language.
6073 cx.update(|_| drop(json_buffer));
6074 let close_message = lsp::DidCloseTextDocumentParams {
6075 text_document: lsp::TextDocumentIdentifier::new(
6076 lsp::Url::from_file_path("/the-root/package.json").unwrap(),
6077 ),
6078 };
6079 assert_eq!(
6080 fake_json_server
6081 .receive_notification::<lsp::notification::DidCloseTextDocument>()
6082 .await,
6083 close_message,
6084 );
6085 }
6086
6087 #[gpui::test]
6088 async fn test_single_file_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
6089 cx.foreground().forbid_parking();
6090
6091 let fs = FakeFs::new(cx.background());
6092 fs.insert_tree(
6093 "/dir",
6094 json!({
6095 "a.rs": "let a = 1;",
6096 "b.rs": "let b = 2;"
6097 }),
6098 )
6099 .await;
6100
6101 let project = Project::test(fs, ["/dir/a.rs".as_ref(), "/dir/b.rs".as_ref()], cx).await;
6102
6103 let buffer_a = project
6104 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
6105 .await
6106 .unwrap();
6107 let buffer_b = project
6108 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
6109 .await
6110 .unwrap();
6111
6112 project.update(cx, |project, cx| {
6113 project
6114 .update_diagnostics(
6115 lsp::PublishDiagnosticsParams {
6116 uri: Url::from_file_path("/dir/a.rs").unwrap(),
6117 version: None,
6118 diagnostics: vec![lsp::Diagnostic {
6119 range: lsp::Range::new(
6120 lsp::Position::new(0, 4),
6121 lsp::Position::new(0, 5),
6122 ),
6123 severity: Some(lsp::DiagnosticSeverity::ERROR),
6124 message: "error 1".to_string(),
6125 ..Default::default()
6126 }],
6127 },
6128 &[],
6129 cx,
6130 )
6131 .unwrap();
6132 project
6133 .update_diagnostics(
6134 lsp::PublishDiagnosticsParams {
6135 uri: Url::from_file_path("/dir/b.rs").unwrap(),
6136 version: None,
6137 diagnostics: vec![lsp::Diagnostic {
6138 range: lsp::Range::new(
6139 lsp::Position::new(0, 4),
6140 lsp::Position::new(0, 5),
6141 ),
6142 severity: Some(lsp::DiagnosticSeverity::WARNING),
6143 message: "error 2".to_string(),
6144 ..Default::default()
6145 }],
6146 },
6147 &[],
6148 cx,
6149 )
6150 .unwrap();
6151 });
6152
6153 buffer_a.read_with(cx, |buffer, _| {
6154 let chunks = chunks_with_diagnostics(&buffer, 0..buffer.len());
6155 assert_eq!(
6156 chunks
6157 .iter()
6158 .map(|(s, d)| (s.as_str(), *d))
6159 .collect::<Vec<_>>(),
6160 &[
6161 ("let ", None),
6162 ("a", Some(DiagnosticSeverity::ERROR)),
6163 (" = 1;", None),
6164 ]
6165 );
6166 });
6167 buffer_b.read_with(cx, |buffer, _| {
6168 let chunks = chunks_with_diagnostics(&buffer, 0..buffer.len());
6169 assert_eq!(
6170 chunks
6171 .iter()
6172 .map(|(s, d)| (s.as_str(), *d))
6173 .collect::<Vec<_>>(),
6174 &[
6175 ("let ", None),
6176 ("b", Some(DiagnosticSeverity::WARNING)),
6177 (" = 2;", None),
6178 ]
6179 );
6180 });
6181 }
6182
6183 #[gpui::test]
6184 async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
6185 cx.foreground().forbid_parking();
6186
6187 let progress_token = "the-progress-token";
6188 let mut language = Language::new(
6189 LanguageConfig {
6190 name: "Rust".into(),
6191 path_suffixes: vec!["rs".to_string()],
6192 ..Default::default()
6193 },
6194 Some(tree_sitter_rust::language()),
6195 );
6196 let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
6197 disk_based_diagnostics_progress_token: Some(progress_token),
6198 disk_based_diagnostics_sources: &["disk"],
6199 ..Default::default()
6200 });
6201
6202 let fs = FakeFs::new(cx.background());
6203 fs.insert_tree(
6204 "/dir",
6205 json!({
6206 "a.rs": "fn a() { A }",
6207 "b.rs": "const y: i32 = 1",
6208 }),
6209 )
6210 .await;
6211
6212 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
6213 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
6214 let worktree_id =
6215 project.read_with(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
6216
6217 // Cause worktree to start the fake language server
6218 let _buffer = project
6219 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
6220 .await
6221 .unwrap();
6222
6223 let mut events = subscribe(&project, cx);
6224
6225 let mut fake_server = fake_servers.next().await.unwrap();
6226 fake_server.start_progress(progress_token).await;
6227 assert_eq!(
6228 events.next().await.unwrap(),
6229 Event::DiskBasedDiagnosticsStarted
6230 );
6231
6232 fake_server.start_progress(progress_token).await;
6233 fake_server.end_progress(progress_token).await;
6234 fake_server.start_progress(progress_token).await;
6235
6236 fake_server.notify::<lsp::notification::PublishDiagnostics>(
6237 lsp::PublishDiagnosticsParams {
6238 uri: Url::from_file_path("/dir/a.rs").unwrap(),
6239 version: None,
6240 diagnostics: vec![lsp::Diagnostic {
6241 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
6242 severity: Some(lsp::DiagnosticSeverity::ERROR),
6243 message: "undefined variable 'A'".to_string(),
6244 ..Default::default()
6245 }],
6246 },
6247 );
6248 assert_eq!(
6249 events.next().await.unwrap(),
6250 Event::DiagnosticsUpdated((worktree_id, Path::new("a.rs")).into())
6251 );
6252
6253 fake_server.end_progress(progress_token).await;
6254 fake_server.end_progress(progress_token).await;
6255 assert_eq!(
6256 events.next().await.unwrap(),
6257 Event::DiskBasedDiagnosticsUpdated
6258 );
6259 assert_eq!(
6260 events.next().await.unwrap(),
6261 Event::DiskBasedDiagnosticsFinished
6262 );
6263
6264 let buffer = project
6265 .update(cx, |p, cx| p.open_local_buffer("/dir/a.rs", cx))
6266 .await
6267 .unwrap();
6268
6269 buffer.read_with(cx, |buffer, _| {
6270 let snapshot = buffer.snapshot();
6271 let diagnostics = snapshot
6272 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
6273 .collect::<Vec<_>>();
6274 assert_eq!(
6275 diagnostics,
6276 &[DiagnosticEntry {
6277 range: Point::new(0, 9)..Point::new(0, 10),
6278 diagnostic: Diagnostic {
6279 severity: lsp::DiagnosticSeverity::ERROR,
6280 message: "undefined variable 'A'".to_string(),
6281 group_id: 0,
6282 is_primary: true,
6283 ..Default::default()
6284 }
6285 }]
6286 )
6287 });
6288
6289 // Ensure publishing empty diagnostics twice only results in one update event.
6290 fake_server.notify::<lsp::notification::PublishDiagnostics>(
6291 lsp::PublishDiagnosticsParams {
6292 uri: Url::from_file_path("/dir/a.rs").unwrap(),
6293 version: None,
6294 diagnostics: Default::default(),
6295 },
6296 );
6297 assert_eq!(
6298 events.next().await.unwrap(),
6299 Event::DiagnosticsUpdated((worktree_id, Path::new("a.rs")).into())
6300 );
6301
6302 fake_server.notify::<lsp::notification::PublishDiagnostics>(
6303 lsp::PublishDiagnosticsParams {
6304 uri: Url::from_file_path("/dir/a.rs").unwrap(),
6305 version: None,
6306 diagnostics: Default::default(),
6307 },
6308 );
6309 cx.foreground().run_until_parked();
6310 assert_eq!(futures::poll!(events.next()), Poll::Pending);
6311 }
6312
6313 #[gpui::test]
6314 async fn test_restarting_server_with_diagnostics_running(cx: &mut gpui::TestAppContext) {
6315 cx.foreground().forbid_parking();
6316
6317 let progress_token = "the-progress-token";
6318 let mut language = Language::new(
6319 LanguageConfig {
6320 path_suffixes: vec!["rs".to_string()],
6321 ..Default::default()
6322 },
6323 None,
6324 );
6325 let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
6326 disk_based_diagnostics_sources: &["disk"],
6327 disk_based_diagnostics_progress_token: Some(progress_token),
6328 ..Default::default()
6329 });
6330
6331 let fs = FakeFs::new(cx.background());
6332 fs.insert_tree("/dir", json!({ "a.rs": "" })).await;
6333
6334 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
6335 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
6336
6337 let buffer = project
6338 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
6339 .await
6340 .unwrap();
6341
6342 // Simulate diagnostics starting to update.
6343 let mut fake_server = fake_servers.next().await.unwrap();
6344 fake_server.start_progress(progress_token).await;
6345
6346 // Restart the server before the diagnostics finish updating.
6347 project.update(cx, |project, cx| {
6348 project.restart_language_servers_for_buffers([buffer], cx);
6349 });
6350 let mut events = subscribe(&project, cx);
6351
6352 // Simulate the newly started server sending more diagnostics.
6353 let mut fake_server = fake_servers.next().await.unwrap();
6354 fake_server.start_progress(progress_token).await;
6355 assert_eq!(
6356 events.next().await.unwrap(),
6357 Event::DiskBasedDiagnosticsStarted
6358 );
6359
6360 // All diagnostics are considered done, despite the old server's diagnostic
6361 // task never completing.
6362 fake_server.end_progress(progress_token).await;
6363 assert_eq!(
6364 events.next().await.unwrap(),
6365 Event::DiskBasedDiagnosticsUpdated
6366 );
6367 assert_eq!(
6368 events.next().await.unwrap(),
6369 Event::DiskBasedDiagnosticsFinished
6370 );
6371 project.read_with(cx, |project, _| {
6372 assert!(!project.is_running_disk_based_diagnostics());
6373 });
6374 }
6375
6376 #[gpui::test]
6377 async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
6378 cx.foreground().forbid_parking();
6379
6380 let mut language = Language::new(
6381 LanguageConfig {
6382 name: "Rust".into(),
6383 path_suffixes: vec!["rs".to_string()],
6384 ..Default::default()
6385 },
6386 Some(tree_sitter_rust::language()),
6387 );
6388 let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
6389 disk_based_diagnostics_sources: &["disk"],
6390 ..Default::default()
6391 });
6392
6393 let text = "
6394 fn a() { A }
6395 fn b() { BB }
6396 fn c() { CCC }
6397 "
6398 .unindent();
6399
6400 let fs = FakeFs::new(cx.background());
6401 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
6402
6403 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
6404 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
6405
6406 let buffer = project
6407 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
6408 .await
6409 .unwrap();
6410
6411 let mut fake_server = fake_servers.next().await.unwrap();
6412 let open_notification = fake_server
6413 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6414 .await;
6415
6416 // Edit the buffer, moving the content down
6417 buffer.update(cx, |buffer, cx| buffer.edit([(0..0, "\n\n")], cx));
6418 let change_notification_1 = fake_server
6419 .receive_notification::<lsp::notification::DidChangeTextDocument>()
6420 .await;
6421 assert!(
6422 change_notification_1.text_document.version > open_notification.text_document.version
6423 );
6424
6425 // Report some diagnostics for the initial version of the buffer
6426 fake_server.notify::<lsp::notification::PublishDiagnostics>(
6427 lsp::PublishDiagnosticsParams {
6428 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
6429 version: Some(open_notification.text_document.version),
6430 diagnostics: vec![
6431 lsp::Diagnostic {
6432 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
6433 severity: Some(DiagnosticSeverity::ERROR),
6434 message: "undefined variable 'A'".to_string(),
6435 source: Some("disk".to_string()),
6436 ..Default::default()
6437 },
6438 lsp::Diagnostic {
6439 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
6440 severity: Some(DiagnosticSeverity::ERROR),
6441 message: "undefined variable 'BB'".to_string(),
6442 source: Some("disk".to_string()),
6443 ..Default::default()
6444 },
6445 lsp::Diagnostic {
6446 range: lsp::Range::new(lsp::Position::new(2, 9), lsp::Position::new(2, 12)),
6447 severity: Some(DiagnosticSeverity::ERROR),
6448 source: Some("disk".to_string()),
6449 message: "undefined variable 'CCC'".to_string(),
6450 ..Default::default()
6451 },
6452 ],
6453 },
6454 );
6455
6456 // The diagnostics have moved down since they were created.
6457 buffer.next_notification(cx).await;
6458 buffer.read_with(cx, |buffer, _| {
6459 assert_eq!(
6460 buffer
6461 .snapshot()
6462 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0), false)
6463 .collect::<Vec<_>>(),
6464 &[
6465 DiagnosticEntry {
6466 range: Point::new(3, 9)..Point::new(3, 11),
6467 diagnostic: Diagnostic {
6468 severity: DiagnosticSeverity::ERROR,
6469 message: "undefined variable 'BB'".to_string(),
6470 is_disk_based: true,
6471 group_id: 1,
6472 is_primary: true,
6473 ..Default::default()
6474 },
6475 },
6476 DiagnosticEntry {
6477 range: Point::new(4, 9)..Point::new(4, 12),
6478 diagnostic: Diagnostic {
6479 severity: DiagnosticSeverity::ERROR,
6480 message: "undefined variable 'CCC'".to_string(),
6481 is_disk_based: true,
6482 group_id: 2,
6483 is_primary: true,
6484 ..Default::default()
6485 }
6486 }
6487 ]
6488 );
6489 assert_eq!(
6490 chunks_with_diagnostics(buffer, 0..buffer.len()),
6491 [
6492 ("\n\nfn a() { ".to_string(), None),
6493 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
6494 (" }\nfn b() { ".to_string(), None),
6495 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
6496 (" }\nfn c() { ".to_string(), None),
6497 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
6498 (" }\n".to_string(), None),
6499 ]
6500 );
6501 assert_eq!(
6502 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
6503 [
6504 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
6505 (" }\nfn c() { ".to_string(), None),
6506 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
6507 ]
6508 );
6509 });
6510
6511 // Ensure overlapping diagnostics are highlighted correctly.
6512 fake_server.notify::<lsp::notification::PublishDiagnostics>(
6513 lsp::PublishDiagnosticsParams {
6514 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
6515 version: Some(open_notification.text_document.version),
6516 diagnostics: vec![
6517 lsp::Diagnostic {
6518 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
6519 severity: Some(DiagnosticSeverity::ERROR),
6520 message: "undefined variable 'A'".to_string(),
6521 source: Some("disk".to_string()),
6522 ..Default::default()
6523 },
6524 lsp::Diagnostic {
6525 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 12)),
6526 severity: Some(DiagnosticSeverity::WARNING),
6527 message: "unreachable statement".to_string(),
6528 source: Some("disk".to_string()),
6529 ..Default::default()
6530 },
6531 ],
6532 },
6533 );
6534
6535 buffer.next_notification(cx).await;
6536 buffer.read_with(cx, |buffer, _| {
6537 assert_eq!(
6538 buffer
6539 .snapshot()
6540 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0), false)
6541 .collect::<Vec<_>>(),
6542 &[
6543 DiagnosticEntry {
6544 range: Point::new(2, 9)..Point::new(2, 12),
6545 diagnostic: Diagnostic {
6546 severity: DiagnosticSeverity::WARNING,
6547 message: "unreachable statement".to_string(),
6548 is_disk_based: true,
6549 group_id: 4,
6550 is_primary: true,
6551 ..Default::default()
6552 }
6553 },
6554 DiagnosticEntry {
6555 range: Point::new(2, 9)..Point::new(2, 10),
6556 diagnostic: Diagnostic {
6557 severity: DiagnosticSeverity::ERROR,
6558 message: "undefined variable 'A'".to_string(),
6559 is_disk_based: true,
6560 group_id: 3,
6561 is_primary: true,
6562 ..Default::default()
6563 },
6564 }
6565 ]
6566 );
6567 assert_eq!(
6568 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
6569 [
6570 ("fn a() { ".to_string(), None),
6571 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
6572 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
6573 ("\n".to_string(), None),
6574 ]
6575 );
6576 assert_eq!(
6577 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
6578 [
6579 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
6580 ("\n".to_string(), None),
6581 ]
6582 );
6583 });
6584
6585 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
6586 // changes since the last save.
6587 buffer.update(cx, |buffer, cx| {
6588 buffer.edit([(Point::new(2, 0)..Point::new(2, 0), " ")], cx);
6589 buffer.edit([(Point::new(2, 8)..Point::new(2, 10), "(x: usize)")], cx);
6590 buffer.edit([(Point::new(3, 10)..Point::new(3, 10), "xxx")], cx);
6591 });
6592 let change_notification_2 = fake_server
6593 .receive_notification::<lsp::notification::DidChangeTextDocument>()
6594 .await;
6595 assert!(
6596 change_notification_2.text_document.version
6597 > change_notification_1.text_document.version
6598 );
6599
6600 // Handle out-of-order diagnostics
6601 fake_server.notify::<lsp::notification::PublishDiagnostics>(
6602 lsp::PublishDiagnosticsParams {
6603 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
6604 version: Some(change_notification_2.text_document.version),
6605 diagnostics: vec![
6606 lsp::Diagnostic {
6607 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
6608 severity: Some(DiagnosticSeverity::ERROR),
6609 message: "undefined variable 'BB'".to_string(),
6610 source: Some("disk".to_string()),
6611 ..Default::default()
6612 },
6613 lsp::Diagnostic {
6614 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
6615 severity: Some(DiagnosticSeverity::WARNING),
6616 message: "undefined variable 'A'".to_string(),
6617 source: Some("disk".to_string()),
6618 ..Default::default()
6619 },
6620 ],
6621 },
6622 );
6623
6624 buffer.next_notification(cx).await;
6625 buffer.read_with(cx, |buffer, _| {
6626 assert_eq!(
6627 buffer
6628 .snapshot()
6629 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
6630 .collect::<Vec<_>>(),
6631 &[
6632 DiagnosticEntry {
6633 range: Point::new(2, 21)..Point::new(2, 22),
6634 diagnostic: Diagnostic {
6635 severity: DiagnosticSeverity::WARNING,
6636 message: "undefined variable 'A'".to_string(),
6637 is_disk_based: true,
6638 group_id: 6,
6639 is_primary: true,
6640 ..Default::default()
6641 }
6642 },
6643 DiagnosticEntry {
6644 range: Point::new(3, 9)..Point::new(3, 14),
6645 diagnostic: Diagnostic {
6646 severity: DiagnosticSeverity::ERROR,
6647 message: "undefined variable 'BB'".to_string(),
6648 is_disk_based: true,
6649 group_id: 5,
6650 is_primary: true,
6651 ..Default::default()
6652 },
6653 }
6654 ]
6655 );
6656 });
6657 }
6658
6659 #[gpui::test]
6660 async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
6661 cx.foreground().forbid_parking();
6662
6663 let text = concat!(
6664 "let one = ;\n", //
6665 "let two = \n",
6666 "let three = 3;\n",
6667 );
6668
6669 let fs = FakeFs::new(cx.background());
6670 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
6671
6672 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
6673 let buffer = project
6674 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
6675 .await
6676 .unwrap();
6677
6678 project.update(cx, |project, cx| {
6679 project
6680 .update_buffer_diagnostics(
6681 &buffer,
6682 vec![
6683 DiagnosticEntry {
6684 range: PointUtf16::new(0, 10)..PointUtf16::new(0, 10),
6685 diagnostic: Diagnostic {
6686 severity: DiagnosticSeverity::ERROR,
6687 message: "syntax error 1".to_string(),
6688 ..Default::default()
6689 },
6690 },
6691 DiagnosticEntry {
6692 range: PointUtf16::new(1, 10)..PointUtf16::new(1, 10),
6693 diagnostic: Diagnostic {
6694 severity: DiagnosticSeverity::ERROR,
6695 message: "syntax error 2".to_string(),
6696 ..Default::default()
6697 },
6698 },
6699 ],
6700 None,
6701 cx,
6702 )
6703 .unwrap();
6704 });
6705
6706 // An empty range is extended forward to include the following character.
6707 // At the end of a line, an empty range is extended backward to include
6708 // the preceding character.
6709 buffer.read_with(cx, |buffer, _| {
6710 let chunks = chunks_with_diagnostics(&buffer, 0..buffer.len());
6711 assert_eq!(
6712 chunks
6713 .iter()
6714 .map(|(s, d)| (s.as_str(), *d))
6715 .collect::<Vec<_>>(),
6716 &[
6717 ("let one = ", None),
6718 (";", Some(DiagnosticSeverity::ERROR)),
6719 ("\nlet two =", None),
6720 (" ", Some(DiagnosticSeverity::ERROR)),
6721 ("\nlet three = 3;\n", None)
6722 ]
6723 );
6724 });
6725 }
6726
6727 #[gpui::test]
6728 async fn test_edits_from_lsp_with_past_version(cx: &mut gpui::TestAppContext) {
6729 cx.foreground().forbid_parking();
6730
6731 let mut language = Language::new(
6732 LanguageConfig {
6733 name: "Rust".into(),
6734 path_suffixes: vec!["rs".to_string()],
6735 ..Default::default()
6736 },
6737 Some(tree_sitter_rust::language()),
6738 );
6739 let mut fake_servers = language.set_fake_lsp_adapter(Default::default());
6740
6741 let text = "
6742 fn a() {
6743 f1();
6744 }
6745 fn b() {
6746 f2();
6747 }
6748 fn c() {
6749 f3();
6750 }
6751 "
6752 .unindent();
6753
6754 let fs = FakeFs::new(cx.background());
6755 fs.insert_tree(
6756 "/dir",
6757 json!({
6758 "a.rs": text.clone(),
6759 }),
6760 )
6761 .await;
6762
6763 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
6764 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
6765 let buffer = project
6766 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
6767 .await
6768 .unwrap();
6769
6770 let mut fake_server = fake_servers.next().await.unwrap();
6771 let lsp_document_version = fake_server
6772 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6773 .await
6774 .text_document
6775 .version;
6776
6777 // Simulate editing the buffer after the language server computes some edits.
6778 buffer.update(cx, |buffer, cx| {
6779 buffer.edit(
6780 [(
6781 Point::new(0, 0)..Point::new(0, 0),
6782 "// above first function\n",
6783 )],
6784 cx,
6785 );
6786 buffer.edit(
6787 [(
6788 Point::new(2, 0)..Point::new(2, 0),
6789 " // inside first function\n",
6790 )],
6791 cx,
6792 );
6793 buffer.edit(
6794 [(
6795 Point::new(6, 4)..Point::new(6, 4),
6796 "// inside second function ",
6797 )],
6798 cx,
6799 );
6800
6801 assert_eq!(
6802 buffer.text(),
6803 "
6804 // above first function
6805 fn a() {
6806 // inside first function
6807 f1();
6808 }
6809 fn b() {
6810 // inside second function f2();
6811 }
6812 fn c() {
6813 f3();
6814 }
6815 "
6816 .unindent()
6817 );
6818 });
6819
6820 let edits = project
6821 .update(cx, |project, cx| {
6822 project.edits_from_lsp(
6823 &buffer,
6824 vec![
6825 // replace body of first function
6826 lsp::TextEdit {
6827 range: lsp::Range::new(
6828 lsp::Position::new(0, 0),
6829 lsp::Position::new(3, 0),
6830 ),
6831 new_text: "
6832 fn a() {
6833 f10();
6834 }
6835 "
6836 .unindent(),
6837 },
6838 // edit inside second function
6839 lsp::TextEdit {
6840 range: lsp::Range::new(
6841 lsp::Position::new(4, 6),
6842 lsp::Position::new(4, 6),
6843 ),
6844 new_text: "00".into(),
6845 },
6846 // edit inside third function via two distinct edits
6847 lsp::TextEdit {
6848 range: lsp::Range::new(
6849 lsp::Position::new(7, 5),
6850 lsp::Position::new(7, 5),
6851 ),
6852 new_text: "4000".into(),
6853 },
6854 lsp::TextEdit {
6855 range: lsp::Range::new(
6856 lsp::Position::new(7, 5),
6857 lsp::Position::new(7, 6),
6858 ),
6859 new_text: "".into(),
6860 },
6861 ],
6862 Some(lsp_document_version),
6863 cx,
6864 )
6865 })
6866 .await
6867 .unwrap();
6868
6869 buffer.update(cx, |buffer, cx| {
6870 for (range, new_text) in edits {
6871 buffer.edit([(range, new_text)], cx);
6872 }
6873 assert_eq!(
6874 buffer.text(),
6875 "
6876 // above first function
6877 fn a() {
6878 // inside first function
6879 f10();
6880 }
6881 fn b() {
6882 // inside second function f200();
6883 }
6884 fn c() {
6885 f4000();
6886 }
6887 "
6888 .unindent()
6889 );
6890 });
6891 }
6892
6893 #[gpui::test]
6894 async fn test_edits_from_lsp_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
6895 cx.foreground().forbid_parking();
6896
6897 let text = "
6898 use a::b;
6899 use a::c;
6900
6901 fn f() {
6902 b();
6903 c();
6904 }
6905 "
6906 .unindent();
6907
6908 let fs = FakeFs::new(cx.background());
6909 fs.insert_tree(
6910 "/dir",
6911 json!({
6912 "a.rs": text.clone(),
6913 }),
6914 )
6915 .await;
6916
6917 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
6918 let buffer = project
6919 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
6920 .await
6921 .unwrap();
6922
6923 // Simulate the language server sending us a small edit in the form of a very large diff.
6924 // Rust-analyzer does this when performing a merge-imports code action.
6925 let edits = project
6926 .update(cx, |project, cx| {
6927 project.edits_from_lsp(
6928 &buffer,
6929 [
6930 // Replace the first use statement without editing the semicolon.
6931 lsp::TextEdit {
6932 range: lsp::Range::new(
6933 lsp::Position::new(0, 4),
6934 lsp::Position::new(0, 8),
6935 ),
6936 new_text: "a::{b, c}".into(),
6937 },
6938 // Reinsert the remainder of the file between the semicolon and the final
6939 // newline of the file.
6940 lsp::TextEdit {
6941 range: lsp::Range::new(
6942 lsp::Position::new(0, 9),
6943 lsp::Position::new(0, 9),
6944 ),
6945 new_text: "\n\n".into(),
6946 },
6947 lsp::TextEdit {
6948 range: lsp::Range::new(
6949 lsp::Position::new(0, 9),
6950 lsp::Position::new(0, 9),
6951 ),
6952 new_text: "
6953 fn f() {
6954 b();
6955 c();
6956 }"
6957 .unindent(),
6958 },
6959 // Delete everything after the first newline of the file.
6960 lsp::TextEdit {
6961 range: lsp::Range::new(
6962 lsp::Position::new(1, 0),
6963 lsp::Position::new(7, 0),
6964 ),
6965 new_text: "".into(),
6966 },
6967 ],
6968 None,
6969 cx,
6970 )
6971 })
6972 .await
6973 .unwrap();
6974
6975 buffer.update(cx, |buffer, cx| {
6976 let edits = edits
6977 .into_iter()
6978 .map(|(range, text)| {
6979 (
6980 range.start.to_point(&buffer)..range.end.to_point(&buffer),
6981 text,
6982 )
6983 })
6984 .collect::<Vec<_>>();
6985
6986 assert_eq!(
6987 edits,
6988 [
6989 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
6990 (Point::new(1, 0)..Point::new(2, 0), "".into())
6991 ]
6992 );
6993
6994 for (range, new_text) in edits {
6995 buffer.edit([(range, new_text)], cx);
6996 }
6997 assert_eq!(
6998 buffer.text(),
6999 "
7000 use a::{b, c};
7001
7002 fn f() {
7003 b();
7004 c();
7005 }
7006 "
7007 .unindent()
7008 );
7009 });
7010 }
7011
7012 #[gpui::test]
7013 async fn test_invalid_edits_from_lsp(cx: &mut gpui::TestAppContext) {
7014 cx.foreground().forbid_parking();
7015
7016 let text = "
7017 use a::b;
7018 use a::c;
7019
7020 fn f() {
7021 b();
7022 c();
7023 }
7024 "
7025 .unindent();
7026
7027 let fs = FakeFs::new(cx.background());
7028 fs.insert_tree(
7029 "/dir",
7030 json!({
7031 "a.rs": text.clone(),
7032 }),
7033 )
7034 .await;
7035
7036 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
7037 let buffer = project
7038 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
7039 .await
7040 .unwrap();
7041
7042 // Simulate the language server sending us edits in a non-ordered fashion,
7043 // with ranges sometimes being inverted.
7044 let edits = project
7045 .update(cx, |project, cx| {
7046 project.edits_from_lsp(
7047 &buffer,
7048 [
7049 lsp::TextEdit {
7050 range: lsp::Range::new(
7051 lsp::Position::new(0, 9),
7052 lsp::Position::new(0, 9),
7053 ),
7054 new_text: "\n\n".into(),
7055 },
7056 lsp::TextEdit {
7057 range: lsp::Range::new(
7058 lsp::Position::new(0, 8),
7059 lsp::Position::new(0, 4),
7060 ),
7061 new_text: "a::{b, c}".into(),
7062 },
7063 lsp::TextEdit {
7064 range: lsp::Range::new(
7065 lsp::Position::new(1, 0),
7066 lsp::Position::new(7, 0),
7067 ),
7068 new_text: "".into(),
7069 },
7070 lsp::TextEdit {
7071 range: lsp::Range::new(
7072 lsp::Position::new(0, 9),
7073 lsp::Position::new(0, 9),
7074 ),
7075 new_text: "
7076 fn f() {
7077 b();
7078 c();
7079 }"
7080 .unindent(),
7081 },
7082 ],
7083 None,
7084 cx,
7085 )
7086 })
7087 .await
7088 .unwrap();
7089
7090 buffer.update(cx, |buffer, cx| {
7091 let edits = edits
7092 .into_iter()
7093 .map(|(range, text)| {
7094 (
7095 range.start.to_point(&buffer)..range.end.to_point(&buffer),
7096 text,
7097 )
7098 })
7099 .collect::<Vec<_>>();
7100
7101 assert_eq!(
7102 edits,
7103 [
7104 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
7105 (Point::new(1, 0)..Point::new(2, 0), "".into())
7106 ]
7107 );
7108
7109 for (range, new_text) in edits {
7110 buffer.edit([(range, new_text)], cx);
7111 }
7112 assert_eq!(
7113 buffer.text(),
7114 "
7115 use a::{b, c};
7116
7117 fn f() {
7118 b();
7119 c();
7120 }
7121 "
7122 .unindent()
7123 );
7124 });
7125 }
7126
7127 fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
7128 buffer: &Buffer,
7129 range: Range<T>,
7130 ) -> Vec<(String, Option<DiagnosticSeverity>)> {
7131 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
7132 for chunk in buffer.snapshot().chunks(range, true) {
7133 if chunks.last().map_or(false, |prev_chunk| {
7134 prev_chunk.1 == chunk.diagnostic_severity
7135 }) {
7136 chunks.last_mut().unwrap().0.push_str(chunk.text);
7137 } else {
7138 chunks.push((chunk.text.to_string(), chunk.diagnostic_severity));
7139 }
7140 }
7141 chunks
7142 }
7143
7144 #[gpui::test]
7145 async fn test_search_worktree_without_files(cx: &mut gpui::TestAppContext) {
7146 let dir = temp_tree(json!({
7147 "root": {
7148 "dir1": {},
7149 "dir2": {
7150 "dir3": {}
7151 }
7152 }
7153 }));
7154
7155 let project = Project::test(Arc::new(RealFs), [dir.path()], cx).await;
7156 let cancel_flag = Default::default();
7157 let results = project
7158 .read_with(cx, |project, cx| {
7159 project.match_paths("dir", false, false, 10, &cancel_flag, cx)
7160 })
7161 .await;
7162
7163 assert!(results.is_empty());
7164 }
7165
7166 #[gpui::test(iterations = 10)]
7167 async fn test_definition(cx: &mut gpui::TestAppContext) {
7168 let mut language = Language::new(
7169 LanguageConfig {
7170 name: "Rust".into(),
7171 path_suffixes: vec!["rs".to_string()],
7172 ..Default::default()
7173 },
7174 Some(tree_sitter_rust::language()),
7175 );
7176 let mut fake_servers = language.set_fake_lsp_adapter(Default::default());
7177
7178 let fs = FakeFs::new(cx.background());
7179 fs.insert_tree(
7180 "/dir",
7181 json!({
7182 "a.rs": "const fn a() { A }",
7183 "b.rs": "const y: i32 = crate::a()",
7184 }),
7185 )
7186 .await;
7187
7188 let project = Project::test(fs, ["/dir/b.rs".as_ref()], cx).await;
7189 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
7190
7191 let buffer = project
7192 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
7193 .await
7194 .unwrap();
7195
7196 let fake_server = fake_servers.next().await.unwrap();
7197 fake_server.handle_request::<lsp::request::GotoDefinition, _, _>(|params, _| async move {
7198 let params = params.text_document_position_params;
7199 assert_eq!(
7200 params.text_document.uri.to_file_path().unwrap(),
7201 Path::new("/dir/b.rs"),
7202 );
7203 assert_eq!(params.position, lsp::Position::new(0, 22));
7204
7205 Ok(Some(lsp::GotoDefinitionResponse::Scalar(
7206 lsp::Location::new(
7207 lsp::Url::from_file_path("/dir/a.rs").unwrap(),
7208 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
7209 ),
7210 )))
7211 });
7212
7213 let mut definitions = project
7214 .update(cx, |project, cx| project.definition(&buffer, 22, cx))
7215 .await
7216 .unwrap();
7217
7218 assert_eq!(definitions.len(), 1);
7219 let definition = definitions.pop().unwrap();
7220 cx.update(|cx| {
7221 let target_buffer = definition.buffer.read(cx);
7222 assert_eq!(
7223 target_buffer
7224 .file()
7225 .unwrap()
7226 .as_local()
7227 .unwrap()
7228 .abs_path(cx),
7229 Path::new("/dir/a.rs"),
7230 );
7231 assert_eq!(definition.range.to_offset(target_buffer), 9..10);
7232 assert_eq!(
7233 list_worktrees(&project, cx),
7234 [("/dir/b.rs".as_ref(), true), ("/dir/a.rs".as_ref(), false)]
7235 );
7236
7237 drop(definition);
7238 });
7239 cx.read(|cx| {
7240 assert_eq!(list_worktrees(&project, cx), [("/dir/b.rs".as_ref(), true)]);
7241 });
7242
7243 fn list_worktrees<'a>(
7244 project: &'a ModelHandle<Project>,
7245 cx: &'a AppContext,
7246 ) -> Vec<(&'a Path, bool)> {
7247 project
7248 .read(cx)
7249 .worktrees(cx)
7250 .map(|worktree| {
7251 let worktree = worktree.read(cx);
7252 (
7253 worktree.as_local().unwrap().abs_path().as_ref(),
7254 worktree.is_visible(),
7255 )
7256 })
7257 .collect::<Vec<_>>()
7258 }
7259 }
7260
7261 #[gpui::test]
7262 async fn test_completions_without_edit_ranges(cx: &mut gpui::TestAppContext) {
7263 let mut language = Language::new(
7264 LanguageConfig {
7265 name: "TypeScript".into(),
7266 path_suffixes: vec!["ts".to_string()],
7267 ..Default::default()
7268 },
7269 Some(tree_sitter_typescript::language_typescript()),
7270 );
7271 let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default());
7272
7273 let fs = FakeFs::new(cx.background());
7274 fs.insert_tree(
7275 "/dir",
7276 json!({
7277 "a.ts": "",
7278 }),
7279 )
7280 .await;
7281
7282 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
7283 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
7284 let buffer = project
7285 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
7286 .await
7287 .unwrap();
7288
7289 let fake_server = fake_language_servers.next().await.unwrap();
7290
7291 let text = "let a = b.fqn";
7292 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
7293 let completions = project.update(cx, |project, cx| {
7294 project.completions(&buffer, text.len(), cx)
7295 });
7296
7297 fake_server
7298 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
7299 Ok(Some(lsp::CompletionResponse::Array(vec![
7300 lsp::CompletionItem {
7301 label: "fullyQualifiedName?".into(),
7302 insert_text: Some("fullyQualifiedName".into()),
7303 ..Default::default()
7304 },
7305 ])))
7306 })
7307 .next()
7308 .await;
7309 let completions = completions.await.unwrap();
7310 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
7311 assert_eq!(completions.len(), 1);
7312 assert_eq!(completions[0].new_text, "fullyQualifiedName");
7313 assert_eq!(
7314 completions[0].old_range.to_offset(&snapshot),
7315 text.len() - 3..text.len()
7316 );
7317 }
7318
7319 #[gpui::test(iterations = 10)]
7320 async fn test_apply_code_actions_with_commands(cx: &mut gpui::TestAppContext) {
7321 let mut language = Language::new(
7322 LanguageConfig {
7323 name: "TypeScript".into(),
7324 path_suffixes: vec!["ts".to_string()],
7325 ..Default::default()
7326 },
7327 None,
7328 );
7329 let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default());
7330
7331 let fs = FakeFs::new(cx.background());
7332 fs.insert_tree(
7333 "/dir",
7334 json!({
7335 "a.ts": "a",
7336 }),
7337 )
7338 .await;
7339
7340 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
7341 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
7342 let buffer = project
7343 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
7344 .await
7345 .unwrap();
7346
7347 let fake_server = fake_language_servers.next().await.unwrap();
7348
7349 // Language server returns code actions that contain commands, and not edits.
7350 let actions = project.update(cx, |project, cx| project.code_actions(&buffer, 0..0, cx));
7351 fake_server
7352 .handle_request::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
7353 Ok(Some(vec![
7354 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
7355 title: "The code action".into(),
7356 command: Some(lsp::Command {
7357 title: "The command".into(),
7358 command: "_the/command".into(),
7359 arguments: Some(vec![json!("the-argument")]),
7360 }),
7361 ..Default::default()
7362 }),
7363 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
7364 title: "two".into(),
7365 ..Default::default()
7366 }),
7367 ]))
7368 })
7369 .next()
7370 .await;
7371
7372 let action = actions.await.unwrap()[0].clone();
7373 let apply = project.update(cx, |project, cx| {
7374 project.apply_code_action(buffer.clone(), action, true, cx)
7375 });
7376
7377 // Resolving the code action does not populate its edits. In absence of
7378 // edits, we must execute the given command.
7379 fake_server.handle_request::<lsp::request::CodeActionResolveRequest, _, _>(
7380 |action, _| async move { Ok(action) },
7381 );
7382
7383 // While executing the command, the language server sends the editor
7384 // a `workspaceEdit` request.
7385 fake_server
7386 .handle_request::<lsp::request::ExecuteCommand, _, _>({
7387 let fake = fake_server.clone();
7388 move |params, _| {
7389 assert_eq!(params.command, "_the/command");
7390 let fake = fake.clone();
7391 async move {
7392 fake.server
7393 .request::<lsp::request::ApplyWorkspaceEdit>(
7394 lsp::ApplyWorkspaceEditParams {
7395 label: None,
7396 edit: lsp::WorkspaceEdit {
7397 changes: Some(
7398 [(
7399 lsp::Url::from_file_path("/dir/a.ts").unwrap(),
7400 vec![lsp::TextEdit {
7401 range: lsp::Range::new(
7402 lsp::Position::new(0, 0),
7403 lsp::Position::new(0, 0),
7404 ),
7405 new_text: "X".into(),
7406 }],
7407 )]
7408 .into_iter()
7409 .collect(),
7410 ),
7411 ..Default::default()
7412 },
7413 },
7414 )
7415 .await
7416 .unwrap();
7417 Ok(Some(json!(null)))
7418 }
7419 }
7420 })
7421 .next()
7422 .await;
7423
7424 // Applying the code action returns a project transaction containing the edits
7425 // sent by the language server in its `workspaceEdit` request.
7426 let transaction = apply.await.unwrap();
7427 assert!(transaction.0.contains_key(&buffer));
7428 buffer.update(cx, |buffer, cx| {
7429 assert_eq!(buffer.text(), "Xa");
7430 buffer.undo(cx);
7431 assert_eq!(buffer.text(), "a");
7432 });
7433 }
7434
7435 #[gpui::test]
7436 async fn test_save_file(cx: &mut gpui::TestAppContext) {
7437 let fs = FakeFs::new(cx.background());
7438 fs.insert_tree(
7439 "/dir",
7440 json!({
7441 "file1": "the old contents",
7442 }),
7443 )
7444 .await;
7445
7446 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7447 let buffer = project
7448 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
7449 .await
7450 .unwrap();
7451 buffer
7452 .update(cx, |buffer, cx| {
7453 assert_eq!(buffer.text(), "the old contents");
7454 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], cx);
7455 buffer.save(cx)
7456 })
7457 .await
7458 .unwrap();
7459
7460 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
7461 assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
7462 }
7463
7464 #[gpui::test]
7465 async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
7466 let fs = FakeFs::new(cx.background());
7467 fs.insert_tree(
7468 "/dir",
7469 json!({
7470 "file1": "the old contents",
7471 }),
7472 )
7473 .await;
7474
7475 let project = Project::test(fs.clone(), ["/dir/file1".as_ref()], cx).await;
7476 let buffer = project
7477 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
7478 .await
7479 .unwrap();
7480 buffer
7481 .update(cx, |buffer, cx| {
7482 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], cx);
7483 buffer.save(cx)
7484 })
7485 .await
7486 .unwrap();
7487
7488 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
7489 assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
7490 }
7491
7492 #[gpui::test]
7493 async fn test_save_as(cx: &mut gpui::TestAppContext) {
7494 let fs = FakeFs::new(cx.background());
7495 fs.insert_tree("/dir", json!({})).await;
7496
7497 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7498 let buffer = project.update(cx, |project, cx| {
7499 project.create_buffer("", None, cx).unwrap()
7500 });
7501 buffer.update(cx, |buffer, cx| {
7502 buffer.edit([(0..0, "abc")], cx);
7503 assert!(buffer.is_dirty());
7504 assert!(!buffer.has_conflict());
7505 });
7506 project
7507 .update(cx, |project, cx| {
7508 project.save_buffer_as(buffer.clone(), "/dir/file1".into(), cx)
7509 })
7510 .await
7511 .unwrap();
7512 assert_eq!(fs.load(Path::new("/dir/file1")).await.unwrap(), "abc");
7513 buffer.read_with(cx, |buffer, cx| {
7514 assert_eq!(buffer.file().unwrap().full_path(cx), Path::new("dir/file1"));
7515 assert!(!buffer.is_dirty());
7516 assert!(!buffer.has_conflict());
7517 });
7518
7519 let opened_buffer = project
7520 .update(cx, |project, cx| {
7521 project.open_local_buffer("/dir/file1", cx)
7522 })
7523 .await
7524 .unwrap();
7525 assert_eq!(opened_buffer, buffer);
7526 }
7527
7528 #[gpui::test(retries = 5)]
7529 async fn test_rescan_and_remote_updates(cx: &mut gpui::TestAppContext) {
7530 let dir = temp_tree(json!({
7531 "a": {
7532 "file1": "",
7533 "file2": "",
7534 "file3": "",
7535 },
7536 "b": {
7537 "c": {
7538 "file4": "",
7539 "file5": "",
7540 }
7541 }
7542 }));
7543
7544 let project = Project::test(Arc::new(RealFs), [dir.path()], cx).await;
7545 let rpc = project.read_with(cx, |p, _| p.client.clone());
7546
7547 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
7548 let buffer = project.update(cx, |p, cx| p.open_local_buffer(dir.path().join(path), cx));
7549 async move { buffer.await.unwrap() }
7550 };
7551 let id_for_path = |path: &'static str, cx: &gpui::TestAppContext| {
7552 project.read_with(cx, |project, cx| {
7553 let tree = project.worktrees(cx).next().unwrap();
7554 tree.read(cx)
7555 .entry_for_path(path)
7556 .expect(&format!("no entry for path {}", path))
7557 .id
7558 })
7559 };
7560
7561 let buffer2 = buffer_for_path("a/file2", cx).await;
7562 let buffer3 = buffer_for_path("a/file3", cx).await;
7563 let buffer4 = buffer_for_path("b/c/file4", cx).await;
7564 let buffer5 = buffer_for_path("b/c/file5", cx).await;
7565
7566 let file2_id = id_for_path("a/file2", &cx);
7567 let file3_id = id_for_path("a/file3", &cx);
7568 let file4_id = id_for_path("b/c/file4", &cx);
7569
7570 // Create a remote copy of this worktree.
7571 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
7572 let initial_snapshot = tree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot());
7573 let (remote, load_task) = cx.update(|cx| {
7574 Worktree::remote(
7575 1,
7576 1,
7577 initial_snapshot.to_proto(&Default::default(), true),
7578 rpc.clone(),
7579 cx,
7580 )
7581 });
7582 // tree
7583 load_task.await;
7584
7585 cx.read(|cx| {
7586 assert!(!buffer2.read(cx).is_dirty());
7587 assert!(!buffer3.read(cx).is_dirty());
7588 assert!(!buffer4.read(cx).is_dirty());
7589 assert!(!buffer5.read(cx).is_dirty());
7590 });
7591
7592 // Rename and delete files and directories.
7593 tree.flush_fs_events(&cx).await;
7594 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
7595 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
7596 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
7597 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
7598 tree.flush_fs_events(&cx).await;
7599
7600 let expected_paths = vec![
7601 "a",
7602 "a/file1",
7603 "a/file2.new",
7604 "b",
7605 "d",
7606 "d/file3",
7607 "d/file4",
7608 ];
7609
7610 cx.read(|app| {
7611 assert_eq!(
7612 tree.read(app)
7613 .paths()
7614 .map(|p| p.to_str().unwrap())
7615 .collect::<Vec<_>>(),
7616 expected_paths
7617 );
7618
7619 assert_eq!(id_for_path("a/file2.new", &cx), file2_id);
7620 assert_eq!(id_for_path("d/file3", &cx), file3_id);
7621 assert_eq!(id_for_path("d/file4", &cx), file4_id);
7622
7623 assert_eq!(
7624 buffer2.read(app).file().unwrap().path().as_ref(),
7625 Path::new("a/file2.new")
7626 );
7627 assert_eq!(
7628 buffer3.read(app).file().unwrap().path().as_ref(),
7629 Path::new("d/file3")
7630 );
7631 assert_eq!(
7632 buffer4.read(app).file().unwrap().path().as_ref(),
7633 Path::new("d/file4")
7634 );
7635 assert_eq!(
7636 buffer5.read(app).file().unwrap().path().as_ref(),
7637 Path::new("b/c/file5")
7638 );
7639
7640 assert!(!buffer2.read(app).file().unwrap().is_deleted());
7641 assert!(!buffer3.read(app).file().unwrap().is_deleted());
7642 assert!(!buffer4.read(app).file().unwrap().is_deleted());
7643 assert!(buffer5.read(app).file().unwrap().is_deleted());
7644 });
7645
7646 // Update the remote worktree. Check that it becomes consistent with the
7647 // local worktree.
7648 remote.update(cx, |remote, cx| {
7649 let update_message = tree.read(cx).as_local().unwrap().snapshot().build_update(
7650 &initial_snapshot,
7651 1,
7652 1,
7653 true,
7654 );
7655 remote
7656 .as_remote_mut()
7657 .unwrap()
7658 .snapshot
7659 .apply_remote_update(update_message)
7660 .unwrap();
7661
7662 assert_eq!(
7663 remote
7664 .paths()
7665 .map(|p| p.to_str().unwrap())
7666 .collect::<Vec<_>>(),
7667 expected_paths
7668 );
7669 });
7670 }
7671
7672 #[gpui::test]
7673 async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
7674 let fs = FakeFs::new(cx.background());
7675 fs.insert_tree(
7676 "/dir",
7677 json!({
7678 "a.txt": "a-contents",
7679 "b.txt": "b-contents",
7680 }),
7681 )
7682 .await;
7683
7684 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7685
7686 // Spawn multiple tasks to open paths, repeating some paths.
7687 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
7688 (
7689 p.open_local_buffer("/dir/a.txt", cx),
7690 p.open_local_buffer("/dir/b.txt", cx),
7691 p.open_local_buffer("/dir/a.txt", cx),
7692 )
7693 });
7694
7695 let buffer_a_1 = buffer_a_1.await.unwrap();
7696 let buffer_a_2 = buffer_a_2.await.unwrap();
7697 let buffer_b = buffer_b.await.unwrap();
7698 assert_eq!(buffer_a_1.read_with(cx, |b, _| b.text()), "a-contents");
7699 assert_eq!(buffer_b.read_with(cx, |b, _| b.text()), "b-contents");
7700
7701 // There is only one buffer per path.
7702 let buffer_a_id = buffer_a_1.id();
7703 assert_eq!(buffer_a_2.id(), buffer_a_id);
7704
7705 // Open the same path again while it is still open.
7706 drop(buffer_a_1);
7707 let buffer_a_3 = project
7708 .update(cx, |p, cx| p.open_local_buffer("/dir/a.txt", cx))
7709 .await
7710 .unwrap();
7711
7712 // There's still only one buffer per path.
7713 assert_eq!(buffer_a_3.id(), buffer_a_id);
7714 }
7715
7716 #[gpui::test]
7717 async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
7718 let fs = FakeFs::new(cx.background());
7719 fs.insert_tree(
7720 "/dir",
7721 json!({
7722 "file1": "abc",
7723 "file2": "def",
7724 "file3": "ghi",
7725 }),
7726 )
7727 .await;
7728
7729 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7730
7731 let buffer1 = project
7732 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
7733 .await
7734 .unwrap();
7735 let events = Rc::new(RefCell::new(Vec::new()));
7736
7737 // initially, the buffer isn't dirty.
7738 buffer1.update(cx, |buffer, cx| {
7739 cx.subscribe(&buffer1, {
7740 let events = events.clone();
7741 move |_, _, event, _| match event {
7742 BufferEvent::Operation(_) => {}
7743 _ => events.borrow_mut().push(event.clone()),
7744 }
7745 })
7746 .detach();
7747
7748 assert!(!buffer.is_dirty());
7749 assert!(events.borrow().is_empty());
7750
7751 buffer.edit([(1..2, "")], cx);
7752 });
7753
7754 // after the first edit, the buffer is dirty, and emits a dirtied event.
7755 buffer1.update(cx, |buffer, cx| {
7756 assert!(buffer.text() == "ac");
7757 assert!(buffer.is_dirty());
7758 assert_eq!(
7759 *events.borrow(),
7760 &[language::Event::Edited, language::Event::Dirtied]
7761 );
7762 events.borrow_mut().clear();
7763 buffer.did_save(buffer.version(), buffer.file().unwrap().mtime(), None, cx);
7764 });
7765
7766 // after saving, the buffer is not dirty, and emits a saved event.
7767 buffer1.update(cx, |buffer, cx| {
7768 assert!(!buffer.is_dirty());
7769 assert_eq!(*events.borrow(), &[language::Event::Saved]);
7770 events.borrow_mut().clear();
7771
7772 buffer.edit([(1..1, "B")], cx);
7773 buffer.edit([(2..2, "D")], cx);
7774 });
7775
7776 // after editing again, the buffer is dirty, and emits another dirty event.
7777 buffer1.update(cx, |buffer, cx| {
7778 assert!(buffer.text() == "aBDc");
7779 assert!(buffer.is_dirty());
7780 assert_eq!(
7781 *events.borrow(),
7782 &[
7783 language::Event::Edited,
7784 language::Event::Dirtied,
7785 language::Event::Edited,
7786 ],
7787 );
7788 events.borrow_mut().clear();
7789
7790 // TODO - currently, after restoring the buffer to its
7791 // previously-saved state, the is still considered dirty.
7792 buffer.edit([(1..3, "")], cx);
7793 assert!(buffer.text() == "ac");
7794 assert!(buffer.is_dirty());
7795 });
7796
7797 assert_eq!(*events.borrow(), &[language::Event::Edited]);
7798
7799 // When a file is deleted, the buffer is considered dirty.
7800 let events = Rc::new(RefCell::new(Vec::new()));
7801 let buffer2 = project
7802 .update(cx, |p, cx| p.open_local_buffer("/dir/file2", cx))
7803 .await
7804 .unwrap();
7805 buffer2.update(cx, |_, cx| {
7806 cx.subscribe(&buffer2, {
7807 let events = events.clone();
7808 move |_, _, event, _| events.borrow_mut().push(event.clone())
7809 })
7810 .detach();
7811 });
7812
7813 fs.remove_file("/dir/file2".as_ref(), Default::default())
7814 .await
7815 .unwrap();
7816 buffer2.condition(&cx, |b, _| b.is_dirty()).await;
7817 assert_eq!(
7818 *events.borrow(),
7819 &[language::Event::Dirtied, language::Event::FileHandleChanged]
7820 );
7821
7822 // When a file is already dirty when deleted, we don't emit a Dirtied event.
7823 let events = Rc::new(RefCell::new(Vec::new()));
7824 let buffer3 = project
7825 .update(cx, |p, cx| p.open_local_buffer("/dir/file3", cx))
7826 .await
7827 .unwrap();
7828 buffer3.update(cx, |_, cx| {
7829 cx.subscribe(&buffer3, {
7830 let events = events.clone();
7831 move |_, _, event, _| events.borrow_mut().push(event.clone())
7832 })
7833 .detach();
7834 });
7835
7836 buffer3.update(cx, |buffer, cx| {
7837 buffer.edit([(0..0, "x")], cx);
7838 });
7839 events.borrow_mut().clear();
7840 fs.remove_file("/dir/file3".as_ref(), Default::default())
7841 .await
7842 .unwrap();
7843 buffer3
7844 .condition(&cx, |_, _| !events.borrow().is_empty())
7845 .await;
7846 assert_eq!(*events.borrow(), &[language::Event::FileHandleChanged]);
7847 cx.read(|cx| assert!(buffer3.read(cx).is_dirty()));
7848 }
7849
7850 #[gpui::test]
7851 async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
7852 let initial_contents = "aaa\nbbbbb\nc\n";
7853 let fs = FakeFs::new(cx.background());
7854 fs.insert_tree(
7855 "/dir",
7856 json!({
7857 "the-file": initial_contents,
7858 }),
7859 )
7860 .await;
7861 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7862 let buffer = project
7863 .update(cx, |p, cx| p.open_local_buffer("/dir/the-file", cx))
7864 .await
7865 .unwrap();
7866
7867 let anchors = (0..3)
7868 .map(|row| buffer.read_with(cx, |b, _| b.anchor_before(Point::new(row, 1))))
7869 .collect::<Vec<_>>();
7870
7871 // Change the file on disk, adding two new lines of text, and removing
7872 // one line.
7873 buffer.read_with(cx, |buffer, _| {
7874 assert!(!buffer.is_dirty());
7875 assert!(!buffer.has_conflict());
7876 });
7877 let new_contents = "AAAA\naaa\nBB\nbbbbb\n";
7878 fs.save("/dir/the-file".as_ref(), &new_contents.into())
7879 .await
7880 .unwrap();
7881
7882 // Because the buffer was not modified, it is reloaded from disk. Its
7883 // contents are edited according to the diff between the old and new
7884 // file contents.
7885 buffer
7886 .condition(&cx, |buffer, _| buffer.text() == new_contents)
7887 .await;
7888
7889 buffer.update(cx, |buffer, _| {
7890 assert_eq!(buffer.text(), new_contents);
7891 assert!(!buffer.is_dirty());
7892 assert!(!buffer.has_conflict());
7893
7894 let anchor_positions = anchors
7895 .iter()
7896 .map(|anchor| anchor.to_point(&*buffer))
7897 .collect::<Vec<_>>();
7898 assert_eq!(
7899 anchor_positions,
7900 [Point::new(1, 1), Point::new(3, 1), Point::new(4, 0)]
7901 );
7902 });
7903
7904 // Modify the buffer
7905 buffer.update(cx, |buffer, cx| {
7906 buffer.edit([(0..0, " ")], cx);
7907 assert!(buffer.is_dirty());
7908 assert!(!buffer.has_conflict());
7909 });
7910
7911 // Change the file on disk again, adding blank lines to the beginning.
7912 fs.save(
7913 "/dir/the-file".as_ref(),
7914 &"\n\n\nAAAA\naaa\nBB\nbbbbb\n".into(),
7915 )
7916 .await
7917 .unwrap();
7918
7919 // Because the buffer is modified, it doesn't reload from disk, but is
7920 // marked as having a conflict.
7921 buffer
7922 .condition(&cx, |buffer, _| buffer.has_conflict())
7923 .await;
7924 }
7925
7926 #[gpui::test]
7927 async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
7928 cx.foreground().forbid_parking();
7929
7930 let fs = FakeFs::new(cx.background());
7931 fs.insert_tree(
7932 "/the-dir",
7933 json!({
7934 "a.rs": "
7935 fn foo(mut v: Vec<usize>) {
7936 for x in &v {
7937 v.push(1);
7938 }
7939 }
7940 "
7941 .unindent(),
7942 }),
7943 )
7944 .await;
7945
7946 let project = Project::test(fs.clone(), ["/the-dir".as_ref()], cx).await;
7947 let buffer = project
7948 .update(cx, |p, cx| p.open_local_buffer("/the-dir/a.rs", cx))
7949 .await
7950 .unwrap();
7951
7952 let buffer_uri = Url::from_file_path("/the-dir/a.rs").unwrap();
7953 let message = lsp::PublishDiagnosticsParams {
7954 uri: buffer_uri.clone(),
7955 diagnostics: vec![
7956 lsp::Diagnostic {
7957 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
7958 severity: Some(DiagnosticSeverity::WARNING),
7959 message: "error 1".to_string(),
7960 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
7961 location: lsp::Location {
7962 uri: buffer_uri.clone(),
7963 range: lsp::Range::new(
7964 lsp::Position::new(1, 8),
7965 lsp::Position::new(1, 9),
7966 ),
7967 },
7968 message: "error 1 hint 1".to_string(),
7969 }]),
7970 ..Default::default()
7971 },
7972 lsp::Diagnostic {
7973 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
7974 severity: Some(DiagnosticSeverity::HINT),
7975 message: "error 1 hint 1".to_string(),
7976 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
7977 location: lsp::Location {
7978 uri: buffer_uri.clone(),
7979 range: lsp::Range::new(
7980 lsp::Position::new(1, 8),
7981 lsp::Position::new(1, 9),
7982 ),
7983 },
7984 message: "original diagnostic".to_string(),
7985 }]),
7986 ..Default::default()
7987 },
7988 lsp::Diagnostic {
7989 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
7990 severity: Some(DiagnosticSeverity::ERROR),
7991 message: "error 2".to_string(),
7992 related_information: Some(vec![
7993 lsp::DiagnosticRelatedInformation {
7994 location: lsp::Location {
7995 uri: buffer_uri.clone(),
7996 range: lsp::Range::new(
7997 lsp::Position::new(1, 13),
7998 lsp::Position::new(1, 15),
7999 ),
8000 },
8001 message: "error 2 hint 1".to_string(),
8002 },
8003 lsp::DiagnosticRelatedInformation {
8004 location: lsp::Location {
8005 uri: buffer_uri.clone(),
8006 range: lsp::Range::new(
8007 lsp::Position::new(1, 13),
8008 lsp::Position::new(1, 15),
8009 ),
8010 },
8011 message: "error 2 hint 2".to_string(),
8012 },
8013 ]),
8014 ..Default::default()
8015 },
8016 lsp::Diagnostic {
8017 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
8018 severity: Some(DiagnosticSeverity::HINT),
8019 message: "error 2 hint 1".to_string(),
8020 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
8021 location: lsp::Location {
8022 uri: buffer_uri.clone(),
8023 range: lsp::Range::new(
8024 lsp::Position::new(2, 8),
8025 lsp::Position::new(2, 17),
8026 ),
8027 },
8028 message: "original diagnostic".to_string(),
8029 }]),
8030 ..Default::default()
8031 },
8032 lsp::Diagnostic {
8033 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
8034 severity: Some(DiagnosticSeverity::HINT),
8035 message: "error 2 hint 2".to_string(),
8036 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
8037 location: lsp::Location {
8038 uri: buffer_uri.clone(),
8039 range: lsp::Range::new(
8040 lsp::Position::new(2, 8),
8041 lsp::Position::new(2, 17),
8042 ),
8043 },
8044 message: "original diagnostic".to_string(),
8045 }]),
8046 ..Default::default()
8047 },
8048 ],
8049 version: None,
8050 };
8051
8052 project
8053 .update(cx, |p, cx| p.update_diagnostics(message, &[], cx))
8054 .unwrap();
8055 let buffer = buffer.read_with(cx, |buffer, _| buffer.snapshot());
8056
8057 assert_eq!(
8058 buffer
8059 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
8060 .collect::<Vec<_>>(),
8061 &[
8062 DiagnosticEntry {
8063 range: Point::new(1, 8)..Point::new(1, 9),
8064 diagnostic: Diagnostic {
8065 severity: DiagnosticSeverity::WARNING,
8066 message: "error 1".to_string(),
8067 group_id: 0,
8068 is_primary: true,
8069 ..Default::default()
8070 }
8071 },
8072 DiagnosticEntry {
8073 range: Point::new(1, 8)..Point::new(1, 9),
8074 diagnostic: Diagnostic {
8075 severity: DiagnosticSeverity::HINT,
8076 message: "error 1 hint 1".to_string(),
8077 group_id: 0,
8078 is_primary: false,
8079 ..Default::default()
8080 }
8081 },
8082 DiagnosticEntry {
8083 range: Point::new(1, 13)..Point::new(1, 15),
8084 diagnostic: Diagnostic {
8085 severity: DiagnosticSeverity::HINT,
8086 message: "error 2 hint 1".to_string(),
8087 group_id: 1,
8088 is_primary: false,
8089 ..Default::default()
8090 }
8091 },
8092 DiagnosticEntry {
8093 range: Point::new(1, 13)..Point::new(1, 15),
8094 diagnostic: Diagnostic {
8095 severity: DiagnosticSeverity::HINT,
8096 message: "error 2 hint 2".to_string(),
8097 group_id: 1,
8098 is_primary: false,
8099 ..Default::default()
8100 }
8101 },
8102 DiagnosticEntry {
8103 range: Point::new(2, 8)..Point::new(2, 17),
8104 diagnostic: Diagnostic {
8105 severity: DiagnosticSeverity::ERROR,
8106 message: "error 2".to_string(),
8107 group_id: 1,
8108 is_primary: true,
8109 ..Default::default()
8110 }
8111 }
8112 ]
8113 );
8114
8115 assert_eq!(
8116 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
8117 &[
8118 DiagnosticEntry {
8119 range: Point::new(1, 8)..Point::new(1, 9),
8120 diagnostic: Diagnostic {
8121 severity: DiagnosticSeverity::WARNING,
8122 message: "error 1".to_string(),
8123 group_id: 0,
8124 is_primary: true,
8125 ..Default::default()
8126 }
8127 },
8128 DiagnosticEntry {
8129 range: Point::new(1, 8)..Point::new(1, 9),
8130 diagnostic: Diagnostic {
8131 severity: DiagnosticSeverity::HINT,
8132 message: "error 1 hint 1".to_string(),
8133 group_id: 0,
8134 is_primary: false,
8135 ..Default::default()
8136 }
8137 },
8138 ]
8139 );
8140 assert_eq!(
8141 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
8142 &[
8143 DiagnosticEntry {
8144 range: Point::new(1, 13)..Point::new(1, 15),
8145 diagnostic: Diagnostic {
8146 severity: DiagnosticSeverity::HINT,
8147 message: "error 2 hint 1".to_string(),
8148 group_id: 1,
8149 is_primary: false,
8150 ..Default::default()
8151 }
8152 },
8153 DiagnosticEntry {
8154 range: Point::new(1, 13)..Point::new(1, 15),
8155 diagnostic: Diagnostic {
8156 severity: DiagnosticSeverity::HINT,
8157 message: "error 2 hint 2".to_string(),
8158 group_id: 1,
8159 is_primary: false,
8160 ..Default::default()
8161 }
8162 },
8163 DiagnosticEntry {
8164 range: Point::new(2, 8)..Point::new(2, 17),
8165 diagnostic: Diagnostic {
8166 severity: DiagnosticSeverity::ERROR,
8167 message: "error 2".to_string(),
8168 group_id: 1,
8169 is_primary: true,
8170 ..Default::default()
8171 }
8172 }
8173 ]
8174 );
8175 }
8176
8177 #[gpui::test]
8178 async fn test_rename(cx: &mut gpui::TestAppContext) {
8179 cx.foreground().forbid_parking();
8180
8181 let mut language = Language::new(
8182 LanguageConfig {
8183 name: "Rust".into(),
8184 path_suffixes: vec!["rs".to_string()],
8185 ..Default::default()
8186 },
8187 Some(tree_sitter_rust::language()),
8188 );
8189 let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
8190 capabilities: lsp::ServerCapabilities {
8191 rename_provider: Some(lsp::OneOf::Right(lsp::RenameOptions {
8192 prepare_provider: Some(true),
8193 work_done_progress_options: Default::default(),
8194 })),
8195 ..Default::default()
8196 },
8197 ..Default::default()
8198 });
8199
8200 let fs = FakeFs::new(cx.background());
8201 fs.insert_tree(
8202 "/dir",
8203 json!({
8204 "one.rs": "const ONE: usize = 1;",
8205 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
8206 }),
8207 )
8208 .await;
8209
8210 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
8211 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
8212 let buffer = project
8213 .update(cx, |project, cx| {
8214 project.open_local_buffer("/dir/one.rs", cx)
8215 })
8216 .await
8217 .unwrap();
8218
8219 let fake_server = fake_servers.next().await.unwrap();
8220
8221 let response = project.update(cx, |project, cx| {
8222 project.prepare_rename(buffer.clone(), 7, cx)
8223 });
8224 fake_server
8225 .handle_request::<lsp::request::PrepareRenameRequest, _, _>(|params, _| async move {
8226 assert_eq!(params.text_document.uri.as_str(), "file:///dir/one.rs");
8227 assert_eq!(params.position, lsp::Position::new(0, 7));
8228 Ok(Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
8229 lsp::Position::new(0, 6),
8230 lsp::Position::new(0, 9),
8231 ))))
8232 })
8233 .next()
8234 .await
8235 .unwrap();
8236 let range = response.await.unwrap().unwrap();
8237 let range = buffer.read_with(cx, |buffer, _| range.to_offset(buffer));
8238 assert_eq!(range, 6..9);
8239
8240 let response = project.update(cx, |project, cx| {
8241 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), true, cx)
8242 });
8243 fake_server
8244 .handle_request::<lsp::request::Rename, _, _>(|params, _| async move {
8245 assert_eq!(
8246 params.text_document_position.text_document.uri.as_str(),
8247 "file:///dir/one.rs"
8248 );
8249 assert_eq!(
8250 params.text_document_position.position,
8251 lsp::Position::new(0, 7)
8252 );
8253 assert_eq!(params.new_name, "THREE");
8254 Ok(Some(lsp::WorkspaceEdit {
8255 changes: Some(
8256 [
8257 (
8258 lsp::Url::from_file_path("/dir/one.rs").unwrap(),
8259 vec![lsp::TextEdit::new(
8260 lsp::Range::new(
8261 lsp::Position::new(0, 6),
8262 lsp::Position::new(0, 9),
8263 ),
8264 "THREE".to_string(),
8265 )],
8266 ),
8267 (
8268 lsp::Url::from_file_path("/dir/two.rs").unwrap(),
8269 vec![
8270 lsp::TextEdit::new(
8271 lsp::Range::new(
8272 lsp::Position::new(0, 24),
8273 lsp::Position::new(0, 27),
8274 ),
8275 "THREE".to_string(),
8276 ),
8277 lsp::TextEdit::new(
8278 lsp::Range::new(
8279 lsp::Position::new(0, 35),
8280 lsp::Position::new(0, 38),
8281 ),
8282 "THREE".to_string(),
8283 ),
8284 ],
8285 ),
8286 ]
8287 .into_iter()
8288 .collect(),
8289 ),
8290 ..Default::default()
8291 }))
8292 })
8293 .next()
8294 .await
8295 .unwrap();
8296 let mut transaction = response.await.unwrap().0;
8297 assert_eq!(transaction.len(), 2);
8298 assert_eq!(
8299 transaction
8300 .remove_entry(&buffer)
8301 .unwrap()
8302 .0
8303 .read_with(cx, |buffer, _| buffer.text()),
8304 "const THREE: usize = 1;"
8305 );
8306 assert_eq!(
8307 transaction
8308 .into_keys()
8309 .next()
8310 .unwrap()
8311 .read_with(cx, |buffer, _| buffer.text()),
8312 "const TWO: usize = one::THREE + one::THREE;"
8313 );
8314 }
8315
8316 #[gpui::test]
8317 async fn test_search(cx: &mut gpui::TestAppContext) {
8318 let fs = FakeFs::new(cx.background());
8319 fs.insert_tree(
8320 "/dir",
8321 json!({
8322 "one.rs": "const ONE: usize = 1;",
8323 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
8324 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
8325 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
8326 }),
8327 )
8328 .await;
8329 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
8330 assert_eq!(
8331 search(&project, SearchQuery::text("TWO", false, true), cx)
8332 .await
8333 .unwrap(),
8334 HashMap::from_iter([
8335 ("two.rs".to_string(), vec![6..9]),
8336 ("three.rs".to_string(), vec![37..40])
8337 ])
8338 );
8339
8340 let buffer_4 = project
8341 .update(cx, |project, cx| {
8342 project.open_local_buffer("/dir/four.rs", cx)
8343 })
8344 .await
8345 .unwrap();
8346 buffer_4.update(cx, |buffer, cx| {
8347 let text = "two::TWO";
8348 buffer.edit([(20..28, text), (31..43, text)], cx);
8349 });
8350
8351 assert_eq!(
8352 search(&project, SearchQuery::text("TWO", false, true), cx)
8353 .await
8354 .unwrap(),
8355 HashMap::from_iter([
8356 ("two.rs".to_string(), vec![6..9]),
8357 ("three.rs".to_string(), vec![37..40]),
8358 ("four.rs".to_string(), vec![25..28, 36..39])
8359 ])
8360 );
8361
8362 async fn search(
8363 project: &ModelHandle<Project>,
8364 query: SearchQuery,
8365 cx: &mut gpui::TestAppContext,
8366 ) -> Result<HashMap<String, Vec<Range<usize>>>> {
8367 let results = project
8368 .update(cx, |project, cx| project.search(query, cx))
8369 .await?;
8370
8371 Ok(results
8372 .into_iter()
8373 .map(|(buffer, ranges)| {
8374 buffer.read_with(cx, |buffer, _| {
8375 let path = buffer.file().unwrap().path().to_string_lossy().to_string();
8376 let ranges = ranges
8377 .into_iter()
8378 .map(|range| range.to_offset(buffer))
8379 .collect::<Vec<_>>();
8380 (path, ranges)
8381 })
8382 })
8383 .collect())
8384 }
8385 }
8386}