1mod db;
2pub mod fs;
3mod ignore;
4mod lsp_command;
5pub mod search;
6pub mod worktree;
7
8use anyhow::{anyhow, Context, Result};
9use client::{proto, Client, PeerId, TypedEnvelope, User, UserStore};
10use clock::ReplicaId;
11use collections::{hash_map, BTreeMap, HashMap, HashSet};
12use futures::{future::Shared, Future, FutureExt, StreamExt, TryFutureExt};
13use fuzzy::{PathMatch, PathMatchCandidate, PathMatchCandidateSet};
14use gpui::{
15 AnyModelHandle, AppContext, AsyncAppContext, Entity, ModelContext, ModelHandle,
16 MutableAppContext, Task, UpgradeModelHandle, WeakModelHandle,
17};
18use language::{
19 point_to_lsp,
20 proto::{deserialize_anchor, deserialize_version, serialize_anchor, serialize_version},
21 range_from_lsp, range_to_lsp, Anchor, Bias, Buffer, CodeAction, CodeLabel, Completion,
22 Diagnostic, DiagnosticEntry, DiagnosticSet, Event as BufferEvent, File as _, Language,
23 LanguageRegistry, LanguageServerName, LocalFile, LspAdapter, OffsetRangeExt, Operation, Patch,
24 PointUtf16, TextBufferSnapshot, ToOffset, ToPointUtf16, Transaction,
25};
26use lsp::{
27 DiagnosticSeverity, DiagnosticTag, DocumentHighlightKind, LanguageServer, LanguageString,
28 MarkedString,
29};
30use lsp_command::*;
31use parking_lot::Mutex;
32use postage::stream::Stream;
33use postage::watch;
34use rand::prelude::*;
35use search::SearchQuery;
36use serde::Serialize;
37use settings::Settings;
38use sha2::{Digest, Sha256};
39use similar::{ChangeTag, TextDiff};
40use std::{
41 cell::RefCell,
42 cmp::{self, Ordering},
43 convert::TryInto,
44 ffi::OsString,
45 hash::Hash,
46 mem,
47 ops::Range,
48 os::unix::{ffi::OsStrExt, prelude::OsStringExt},
49 path::{Component, Path, PathBuf},
50 rc::Rc,
51 sync::{
52 atomic::{AtomicBool, AtomicUsize, Ordering::SeqCst},
53 Arc,
54 },
55 time::Instant,
56};
57use thiserror::Error;
58use util::{post_inc, ResultExt, TryFutureExt as _};
59
60pub use db::Db;
61pub use fs::*;
62pub use worktree::*;
63
64pub trait Item: Entity {
65 fn entry_id(&self, cx: &AppContext) -> Option<ProjectEntryId>;
66}
67
68pub struct ProjectStore {
69 db: Arc<Db>,
70 projects: Vec<WeakModelHandle<Project>>,
71}
72
73pub struct Project {
74 worktrees: Vec<WorktreeHandle>,
75 active_entry: Option<ProjectEntryId>,
76 languages: Arc<LanguageRegistry>,
77 language_servers:
78 HashMap<(WorktreeId, LanguageServerName), (Arc<dyn LspAdapter>, Arc<LanguageServer>)>,
79 started_language_servers:
80 HashMap<(WorktreeId, LanguageServerName), Task<Option<Arc<LanguageServer>>>>,
81 language_server_statuses: BTreeMap<usize, LanguageServerStatus>,
82 language_server_settings: Arc<Mutex<serde_json::Value>>,
83 last_workspace_edits_by_language_server: HashMap<usize, ProjectTransaction>,
84 next_language_server_id: usize,
85 client: Arc<client::Client>,
86 next_entry_id: Arc<AtomicUsize>,
87 next_diagnostic_group_id: usize,
88 user_store: ModelHandle<UserStore>,
89 project_store: ModelHandle<ProjectStore>,
90 fs: Arc<dyn Fs>,
91 client_state: ProjectClientState,
92 collaborators: HashMap<PeerId, Collaborator>,
93 subscriptions: Vec<client::Subscription>,
94 opened_buffer: (Rc<RefCell<watch::Sender<()>>>, watch::Receiver<()>),
95 shared_buffers: HashMap<PeerId, HashSet<u64>>,
96 loading_buffers: HashMap<
97 ProjectPath,
98 postage::watch::Receiver<Option<Result<ModelHandle<Buffer>, Arc<anyhow::Error>>>>,
99 >,
100 loading_local_worktrees:
101 HashMap<Arc<Path>, Shared<Task<Result<ModelHandle<Worktree>, Arc<anyhow::Error>>>>>,
102 opened_buffers: HashMap<u64, OpenBuffer>,
103 buffer_snapshots: HashMap<u64, Vec<(i32, TextBufferSnapshot)>>,
104 nonce: u128,
105 initialized_persistent_state: bool,
106}
107
108#[derive(Error, Debug)]
109pub enum JoinProjectError {
110 #[error("host declined join request")]
111 HostDeclined,
112 #[error("host closed the project")]
113 HostClosedProject,
114 #[error("host went offline")]
115 HostWentOffline,
116 #[error("{0}")]
117 Other(#[from] anyhow::Error),
118}
119
120enum OpenBuffer {
121 Strong(ModelHandle<Buffer>),
122 Weak(WeakModelHandle<Buffer>),
123 Loading(Vec<Operation>),
124}
125
126enum WorktreeHandle {
127 Strong(ModelHandle<Worktree>),
128 Weak(WeakModelHandle<Worktree>),
129}
130
131enum ProjectClientState {
132 Local {
133 is_shared: bool,
134 remote_id_tx: watch::Sender<Option<u64>>,
135 remote_id_rx: watch::Receiver<Option<u64>>,
136 online_tx: watch::Sender<bool>,
137 online_rx: watch::Receiver<bool>,
138 _maintain_remote_id_task: Task<Option<()>>,
139 },
140 Remote {
141 sharing_has_stopped: bool,
142 remote_id: u64,
143 replica_id: ReplicaId,
144 _detect_unshare_task: Task<Option<()>>,
145 },
146}
147
148#[derive(Clone, Debug)]
149pub struct Collaborator {
150 pub user: Arc<User>,
151 pub peer_id: PeerId,
152 pub replica_id: ReplicaId,
153}
154
155#[derive(Clone, Debug, PartialEq, Eq)]
156pub enum Event {
157 ActiveEntryChanged(Option<ProjectEntryId>),
158 WorktreeAdded,
159 WorktreeRemoved(WorktreeId),
160 DiskBasedDiagnosticsStarted,
161 DiskBasedDiagnosticsUpdated,
162 DiskBasedDiagnosticsFinished,
163 DiagnosticsUpdated(ProjectPath),
164 RemoteIdChanged(Option<u64>),
165 CollaboratorLeft(PeerId),
166 ContactRequestedJoin(Arc<User>),
167 ContactCancelledJoinRequest(Arc<User>),
168}
169
170#[derive(Serialize)]
171pub struct LanguageServerStatus {
172 pub name: String,
173 pub pending_work: BTreeMap<String, LanguageServerProgress>,
174 pub pending_diagnostic_updates: isize,
175}
176
177#[derive(Clone, Debug, Serialize)]
178pub struct LanguageServerProgress {
179 pub message: Option<String>,
180 pub percentage: Option<usize>,
181 #[serde(skip_serializing)]
182 pub last_update_at: Instant,
183}
184
185#[derive(Clone, Debug, Eq, PartialEq, Hash, PartialOrd, Ord)]
186pub struct ProjectPath {
187 pub worktree_id: WorktreeId,
188 pub path: Arc<Path>,
189}
190
191#[derive(Copy, Clone, Debug, Default, PartialEq, Serialize)]
192pub struct DiagnosticSummary {
193 pub error_count: usize,
194 pub warning_count: usize,
195}
196
197#[derive(Debug)]
198pub struct Location {
199 pub buffer: ModelHandle<Buffer>,
200 pub range: Range<language::Anchor>,
201}
202
203#[derive(Debug)]
204pub struct DocumentHighlight {
205 pub range: Range<language::Anchor>,
206 pub kind: DocumentHighlightKind,
207}
208
209#[derive(Clone, Debug)]
210pub struct Symbol {
211 pub source_worktree_id: WorktreeId,
212 pub worktree_id: WorktreeId,
213 pub language_server_name: LanguageServerName,
214 pub path: PathBuf,
215 pub label: CodeLabel,
216 pub name: String,
217 pub kind: lsp::SymbolKind,
218 pub range: Range<PointUtf16>,
219 pub signature: [u8; 32],
220}
221
222#[derive(Debug)]
223pub struct HoverBlock {
224 pub text: String,
225 pub language: Option<String>,
226}
227
228impl HoverBlock {
229 fn try_new(marked_string: MarkedString) -> Option<Self> {
230 let result = match marked_string {
231 MarkedString::LanguageString(LanguageString { language, value }) => HoverBlock {
232 text: value,
233 language: Some(language),
234 },
235 MarkedString::String(text) => HoverBlock {
236 text,
237 language: None,
238 },
239 };
240 if result.text.is_empty() {
241 None
242 } else {
243 Some(result)
244 }
245 }
246}
247
248#[derive(Debug)]
249pub struct Hover {
250 pub contents: Vec<HoverBlock>,
251 pub range: Option<Range<language::Anchor>>,
252}
253
254#[derive(Default)]
255pub struct ProjectTransaction(pub HashMap<ModelHandle<Buffer>, language::Transaction>);
256
257impl DiagnosticSummary {
258 fn new<'a, T: 'a>(diagnostics: impl IntoIterator<Item = &'a DiagnosticEntry<T>>) -> Self {
259 let mut this = Self {
260 error_count: 0,
261 warning_count: 0,
262 };
263
264 for entry in diagnostics {
265 if entry.diagnostic.is_primary {
266 match entry.diagnostic.severity {
267 DiagnosticSeverity::ERROR => this.error_count += 1,
268 DiagnosticSeverity::WARNING => this.warning_count += 1,
269 _ => {}
270 }
271 }
272 }
273
274 this
275 }
276
277 pub fn is_empty(&self) -> bool {
278 self.error_count == 0 && self.warning_count == 0
279 }
280
281 pub fn to_proto(&self, path: &Path) -> proto::DiagnosticSummary {
282 proto::DiagnosticSummary {
283 path: path.to_string_lossy().to_string(),
284 error_count: self.error_count as u32,
285 warning_count: self.warning_count as u32,
286 }
287 }
288}
289
290#[derive(Clone, Copy, Debug, Default, Hash, PartialEq, Eq, PartialOrd, Ord)]
291pub struct ProjectEntryId(usize);
292
293impl ProjectEntryId {
294 pub const MAX: Self = Self(usize::MAX);
295
296 pub fn new(counter: &AtomicUsize) -> Self {
297 Self(counter.fetch_add(1, SeqCst))
298 }
299
300 pub fn from_proto(id: u64) -> Self {
301 Self(id as usize)
302 }
303
304 pub fn to_proto(&self) -> u64 {
305 self.0 as u64
306 }
307
308 pub fn to_usize(&self) -> usize {
309 self.0
310 }
311}
312
313impl Project {
314 pub fn init(client: &Arc<Client>) {
315 client.add_model_message_handler(Self::handle_request_join_project);
316 client.add_model_message_handler(Self::handle_add_collaborator);
317 client.add_model_message_handler(Self::handle_buffer_reloaded);
318 client.add_model_message_handler(Self::handle_buffer_saved);
319 client.add_model_message_handler(Self::handle_start_language_server);
320 client.add_model_message_handler(Self::handle_update_language_server);
321 client.add_model_message_handler(Self::handle_remove_collaborator);
322 client.add_model_message_handler(Self::handle_join_project_request_cancelled);
323 client.add_model_message_handler(Self::handle_update_project);
324 client.add_model_message_handler(Self::handle_unregister_project);
325 client.add_model_message_handler(Self::handle_project_unshared);
326 client.add_model_message_handler(Self::handle_update_buffer_file);
327 client.add_model_message_handler(Self::handle_update_buffer);
328 client.add_model_message_handler(Self::handle_update_diagnostic_summary);
329 client.add_model_message_handler(Self::handle_update_worktree);
330 client.add_model_request_handler(Self::handle_create_project_entry);
331 client.add_model_request_handler(Self::handle_rename_project_entry);
332 client.add_model_request_handler(Self::handle_copy_project_entry);
333 client.add_model_request_handler(Self::handle_delete_project_entry);
334 client.add_model_request_handler(Self::handle_apply_additional_edits_for_completion);
335 client.add_model_request_handler(Self::handle_apply_code_action);
336 client.add_model_request_handler(Self::handle_reload_buffers);
337 client.add_model_request_handler(Self::handle_format_buffers);
338 client.add_model_request_handler(Self::handle_get_code_actions);
339 client.add_model_request_handler(Self::handle_get_completions);
340 client.add_model_request_handler(Self::handle_lsp_command::<GetHover>);
341 client.add_model_request_handler(Self::handle_lsp_command::<GetDefinition>);
342 client.add_model_request_handler(Self::handle_lsp_command::<GetDocumentHighlights>);
343 client.add_model_request_handler(Self::handle_lsp_command::<GetReferences>);
344 client.add_model_request_handler(Self::handle_lsp_command::<PrepareRename>);
345 client.add_model_request_handler(Self::handle_lsp_command::<PerformRename>);
346 client.add_model_request_handler(Self::handle_search_project);
347 client.add_model_request_handler(Self::handle_get_project_symbols);
348 client.add_model_request_handler(Self::handle_open_buffer_for_symbol);
349 client.add_model_request_handler(Self::handle_open_buffer_by_id);
350 client.add_model_request_handler(Self::handle_open_buffer_by_path);
351 client.add_model_request_handler(Self::handle_save_buffer);
352 }
353
354 pub fn local(
355 online: bool,
356 client: Arc<Client>,
357 user_store: ModelHandle<UserStore>,
358 project_store: ModelHandle<ProjectStore>,
359 languages: Arc<LanguageRegistry>,
360 fs: Arc<dyn Fs>,
361 cx: &mut MutableAppContext,
362 ) -> ModelHandle<Self> {
363 cx.add_model(|cx: &mut ModelContext<Self>| {
364 let (online_tx, online_rx) = watch::channel_with(online);
365 let (remote_id_tx, remote_id_rx) = watch::channel();
366 let _maintain_remote_id_task = cx.spawn_weak({
367 let status_rx = client.clone().status();
368 let online_rx = online_rx.clone();
369 move |this, mut cx| async move {
370 let mut stream = Stream::map(status_rx.clone(), drop)
371 .merge(Stream::map(online_rx.clone(), drop));
372 while stream.recv().await.is_some() {
373 let this = this.upgrade(&cx)?;
374 if status_rx.borrow().is_connected() && *online_rx.borrow() {
375 this.update(&mut cx, |this, cx| this.register(cx))
376 .await
377 .log_err()?;
378 } else {
379 this.update(&mut cx, |this, cx| this.unregister(cx))
380 .await
381 .log_err();
382 }
383 }
384 None
385 }
386 });
387
388 let handle = cx.weak_handle();
389 project_store.update(cx, |store, cx| store.add_project(handle, cx));
390
391 let (opened_buffer_tx, opened_buffer_rx) = watch::channel();
392 Self {
393 worktrees: Default::default(),
394 collaborators: Default::default(),
395 opened_buffers: Default::default(),
396 shared_buffers: Default::default(),
397 loading_buffers: Default::default(),
398 loading_local_worktrees: Default::default(),
399 buffer_snapshots: Default::default(),
400 client_state: ProjectClientState::Local {
401 is_shared: false,
402 remote_id_tx,
403 remote_id_rx,
404 online_tx,
405 online_rx,
406 _maintain_remote_id_task,
407 },
408 opened_buffer: (Rc::new(RefCell::new(opened_buffer_tx)), opened_buffer_rx),
409 subscriptions: Vec::new(),
410 active_entry: None,
411 languages,
412 client,
413 user_store,
414 project_store,
415 fs,
416 next_entry_id: Default::default(),
417 next_diagnostic_group_id: Default::default(),
418 language_servers: Default::default(),
419 started_language_servers: Default::default(),
420 language_server_statuses: Default::default(),
421 last_workspace_edits_by_language_server: Default::default(),
422 language_server_settings: Default::default(),
423 next_language_server_id: 0,
424 nonce: StdRng::from_entropy().gen(),
425 initialized_persistent_state: false,
426 }
427 })
428 }
429
430 pub async fn remote(
431 remote_id: u64,
432 client: Arc<Client>,
433 user_store: ModelHandle<UserStore>,
434 project_store: ModelHandle<ProjectStore>,
435 languages: Arc<LanguageRegistry>,
436 fs: Arc<dyn Fs>,
437 mut cx: AsyncAppContext,
438 ) -> Result<ModelHandle<Self>, JoinProjectError> {
439 client.authenticate_and_connect(true, &cx).await?;
440
441 let response = client
442 .request(proto::JoinProject {
443 project_id: remote_id,
444 })
445 .await?;
446
447 let response = match response.variant.ok_or_else(|| anyhow!("missing variant"))? {
448 proto::join_project_response::Variant::Accept(response) => response,
449 proto::join_project_response::Variant::Decline(decline) => {
450 match proto::join_project_response::decline::Reason::from_i32(decline.reason) {
451 Some(proto::join_project_response::decline::Reason::Declined) => {
452 Err(JoinProjectError::HostDeclined)?
453 }
454 Some(proto::join_project_response::decline::Reason::Closed) => {
455 Err(JoinProjectError::HostClosedProject)?
456 }
457 Some(proto::join_project_response::decline::Reason::WentOffline) => {
458 Err(JoinProjectError::HostWentOffline)?
459 }
460 None => Err(anyhow!("missing decline reason"))?,
461 }
462 }
463 };
464
465 let replica_id = response.replica_id as ReplicaId;
466
467 let mut worktrees = Vec::new();
468 for worktree in response.worktrees {
469 let (worktree, load_task) = cx
470 .update(|cx| Worktree::remote(remote_id, replica_id, worktree, client.clone(), cx));
471 worktrees.push(worktree);
472 load_task.detach();
473 }
474
475 let (opened_buffer_tx, opened_buffer_rx) = watch::channel();
476 let this = cx.add_model(|cx: &mut ModelContext<Self>| {
477 let handle = cx.weak_handle();
478 project_store.update(cx, |store, cx| store.add_project(handle, cx));
479
480 let mut this = Self {
481 worktrees: Vec::new(),
482 loading_buffers: Default::default(),
483 opened_buffer: (Rc::new(RefCell::new(opened_buffer_tx)), opened_buffer_rx),
484 shared_buffers: Default::default(),
485 loading_local_worktrees: Default::default(),
486 active_entry: None,
487 collaborators: Default::default(),
488 languages,
489 user_store: user_store.clone(),
490 project_store,
491 fs,
492 next_entry_id: Default::default(),
493 next_diagnostic_group_id: Default::default(),
494 subscriptions: vec![client.add_model_for_remote_entity(remote_id, cx)],
495 client: client.clone(),
496 client_state: ProjectClientState::Remote {
497 sharing_has_stopped: false,
498 remote_id,
499 replica_id,
500 _detect_unshare_task: cx.spawn_weak(move |this, mut cx| {
501 async move {
502 let mut status = client.status();
503 let is_connected =
504 status.next().await.map_or(false, |s| s.is_connected());
505 // Even if we're initially connected, any future change of the status means we momentarily disconnected.
506 if !is_connected || status.next().await.is_some() {
507 if let Some(this) = this.upgrade(&cx) {
508 this.update(&mut cx, |this, cx| this.removed_from_project(cx))
509 }
510 }
511 Ok(())
512 }
513 .log_err()
514 }),
515 },
516 language_servers: Default::default(),
517 started_language_servers: Default::default(),
518 language_server_settings: Default::default(),
519 language_server_statuses: response
520 .language_servers
521 .into_iter()
522 .map(|server| {
523 (
524 server.id as usize,
525 LanguageServerStatus {
526 name: server.name,
527 pending_work: Default::default(),
528 pending_diagnostic_updates: 0,
529 },
530 )
531 })
532 .collect(),
533 last_workspace_edits_by_language_server: Default::default(),
534 next_language_server_id: 0,
535 opened_buffers: Default::default(),
536 buffer_snapshots: Default::default(),
537 nonce: StdRng::from_entropy().gen(),
538 initialized_persistent_state: false,
539 };
540 for worktree in worktrees {
541 this.add_worktree(&worktree, cx);
542 }
543 this
544 });
545
546 let user_ids = response
547 .collaborators
548 .iter()
549 .map(|peer| peer.user_id)
550 .collect();
551 user_store
552 .update(&mut cx, |user_store, cx| user_store.get_users(user_ids, cx))
553 .await?;
554 let mut collaborators = HashMap::default();
555 for message in response.collaborators {
556 let collaborator = Collaborator::from_proto(message, &user_store, &mut cx).await?;
557 collaborators.insert(collaborator.peer_id, collaborator);
558 }
559
560 this.update(&mut cx, |this, _| {
561 this.collaborators = collaborators;
562 });
563
564 Ok(this)
565 }
566
567 #[cfg(any(test, feature = "test-support"))]
568 pub async fn test(
569 fs: Arc<dyn Fs>,
570 root_paths: impl IntoIterator<Item = &Path>,
571 cx: &mut gpui::TestAppContext,
572 ) -> ModelHandle<Project> {
573 let languages = Arc::new(LanguageRegistry::test());
574 let http_client = client::test::FakeHttpClient::with_404_response();
575 let client = client::Client::new(http_client.clone());
576 let user_store = cx.add_model(|cx| UserStore::new(client.clone(), http_client, cx));
577 let project_store = cx.add_model(|_| ProjectStore::new(Db::open_fake()));
578 let project = cx.update(|cx| {
579 Project::local(true, client, user_store, project_store, languages, fs, cx)
580 });
581 for path in root_paths {
582 let (tree, _) = project
583 .update(cx, |project, cx| {
584 project.find_or_create_local_worktree(path, true, cx)
585 })
586 .await
587 .unwrap();
588 tree.read_with(cx, |tree, _| tree.as_local().unwrap().scan_complete())
589 .await;
590 }
591 project
592 }
593
594 pub fn restore_state(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
595 if self.is_remote() {
596 return Task::ready(Ok(()));
597 }
598
599 let db = self.project_store.read(cx).db.clone();
600 let keys = self.db_keys_for_online_state(cx);
601 let online_by_default = cx.global::<Settings>().projects_online_by_default;
602 let read_online = cx.background().spawn(async move {
603 let values = db.read(keys)?;
604 anyhow::Ok(
605 values
606 .into_iter()
607 .all(|e| e.map_or(online_by_default, |e| e == [true as u8])),
608 )
609 });
610 cx.spawn(|this, mut cx| async move {
611 let online = read_online.await.log_err().unwrap_or(false);
612 this.update(&mut cx, |this, cx| {
613 this.initialized_persistent_state = true;
614 if let ProjectClientState::Local { online_tx, .. } = &mut this.client_state {
615 let mut online_tx = online_tx.borrow_mut();
616 if *online_tx != online {
617 *online_tx = online;
618 drop(online_tx);
619 this.metadata_changed(false, cx);
620 }
621 }
622 });
623 Ok(())
624 })
625 }
626
627 fn persist_state(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
628 if self.is_remote() || !self.initialized_persistent_state {
629 return Task::ready(Ok(()));
630 }
631
632 let db = self.project_store.read(cx).db.clone();
633 let keys = self.db_keys_for_online_state(cx);
634 let is_online = self.is_online();
635 cx.background().spawn(async move {
636 let value = &[is_online as u8];
637 db.write(keys.into_iter().map(|key| (key, value)))
638 })
639 }
640
641 pub fn buffer_for_id(&self, remote_id: u64, cx: &AppContext) -> Option<ModelHandle<Buffer>> {
642 self.opened_buffers
643 .get(&remote_id)
644 .and_then(|buffer| buffer.upgrade(cx))
645 }
646
647 pub fn languages(&self) -> &Arc<LanguageRegistry> {
648 &self.languages
649 }
650
651 pub fn client(&self) -> Arc<Client> {
652 self.client.clone()
653 }
654
655 pub fn user_store(&self) -> ModelHandle<UserStore> {
656 self.user_store.clone()
657 }
658
659 pub fn project_store(&self) -> ModelHandle<ProjectStore> {
660 self.project_store.clone()
661 }
662
663 #[cfg(any(test, feature = "test-support"))]
664 pub fn check_invariants(&self, cx: &AppContext) {
665 if self.is_local() {
666 let mut worktree_root_paths = HashMap::default();
667 for worktree in self.worktrees(cx) {
668 let worktree = worktree.read(cx);
669 let abs_path = worktree.as_local().unwrap().abs_path().clone();
670 let prev_worktree_id = worktree_root_paths.insert(abs_path.clone(), worktree.id());
671 assert_eq!(
672 prev_worktree_id,
673 None,
674 "abs path {:?} for worktree {:?} is not unique ({:?} was already registered with the same path)",
675 abs_path,
676 worktree.id(),
677 prev_worktree_id
678 )
679 }
680 } else {
681 let replica_id = self.replica_id();
682 for buffer in self.opened_buffers.values() {
683 if let Some(buffer) = buffer.upgrade(cx) {
684 let buffer = buffer.read(cx);
685 assert_eq!(
686 buffer.deferred_ops_len(),
687 0,
688 "replica {}, buffer {} has deferred operations",
689 replica_id,
690 buffer.remote_id()
691 );
692 }
693 }
694 }
695 }
696
697 #[cfg(any(test, feature = "test-support"))]
698 pub fn has_open_buffer(&self, path: impl Into<ProjectPath>, cx: &AppContext) -> bool {
699 let path = path.into();
700 if let Some(worktree) = self.worktree_for_id(path.worktree_id, cx) {
701 self.opened_buffers.iter().any(|(_, buffer)| {
702 if let Some(buffer) = buffer.upgrade(cx) {
703 if let Some(file) = File::from_dyn(buffer.read(cx).file()) {
704 if file.worktree == worktree && file.path() == &path.path {
705 return true;
706 }
707 }
708 }
709 false
710 })
711 } else {
712 false
713 }
714 }
715
716 pub fn fs(&self) -> &Arc<dyn Fs> {
717 &self.fs
718 }
719
720 pub fn set_online(&mut self, online: bool, cx: &mut ModelContext<Self>) {
721 if let ProjectClientState::Local { online_tx, .. } = &mut self.client_state {
722 let mut online_tx = online_tx.borrow_mut();
723 if *online_tx != online {
724 *online_tx = online;
725 drop(online_tx);
726 self.metadata_changed(true, cx);
727 }
728 }
729 }
730
731 pub fn is_online(&self) -> bool {
732 match &self.client_state {
733 ProjectClientState::Local { online_rx, .. } => *online_rx.borrow(),
734 ProjectClientState::Remote { .. } => true,
735 }
736 }
737
738 fn unregister(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
739 self.unshared(cx);
740 if let ProjectClientState::Local { remote_id_rx, .. } = &mut self.client_state {
741 if let Some(remote_id) = *remote_id_rx.borrow() {
742 let request = self.client.request(proto::UnregisterProject {
743 project_id: remote_id,
744 });
745 return cx.spawn(|this, mut cx| async move {
746 let response = request.await;
747
748 // Unregistering the project causes the server to send out a
749 // contact update removing this project from the host's list
750 // of online projects. Wait until this contact update has been
751 // processed before clearing out this project's remote id, so
752 // that there is no moment where this project appears in the
753 // contact metadata and *also* has no remote id.
754 this.update(&mut cx, |this, cx| {
755 this.user_store()
756 .update(cx, |store, _| store.contact_updates_done())
757 })
758 .await;
759
760 this.update(&mut cx, |this, cx| {
761 if let ProjectClientState::Local { remote_id_tx, .. } =
762 &mut this.client_state
763 {
764 *remote_id_tx.borrow_mut() = None;
765 }
766 this.subscriptions.clear();
767 this.metadata_changed(false, cx);
768 });
769 response.map(drop)
770 });
771 }
772 }
773 Task::ready(Ok(()))
774 }
775
776 fn register(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
777 if let ProjectClientState::Local { remote_id_rx, .. } = &self.client_state {
778 if remote_id_rx.borrow().is_some() {
779 return Task::ready(Ok(()));
780 }
781 }
782
783 let response = self.client.request(proto::RegisterProject {});
784 cx.spawn(|this, mut cx| async move {
785 let remote_id = response.await?.project_id;
786 this.update(&mut cx, |this, cx| {
787 if let ProjectClientState::Local { remote_id_tx, .. } = &mut this.client_state {
788 *remote_id_tx.borrow_mut() = Some(remote_id);
789 }
790
791 this.metadata_changed(false, cx);
792 cx.emit(Event::RemoteIdChanged(Some(remote_id)));
793 this.subscriptions
794 .push(this.client.add_model_for_remote_entity(remote_id, cx));
795 Ok(())
796 })
797 })
798 }
799
800 pub fn remote_id(&self) -> Option<u64> {
801 match &self.client_state {
802 ProjectClientState::Local { remote_id_rx, .. } => *remote_id_rx.borrow(),
803 ProjectClientState::Remote { remote_id, .. } => Some(*remote_id),
804 }
805 }
806
807 pub fn next_remote_id(&self) -> impl Future<Output = u64> {
808 let mut id = None;
809 let mut watch = None;
810 match &self.client_state {
811 ProjectClientState::Local { remote_id_rx, .. } => watch = Some(remote_id_rx.clone()),
812 ProjectClientState::Remote { remote_id, .. } => id = Some(*remote_id),
813 }
814
815 async move {
816 if let Some(id) = id {
817 return id;
818 }
819 let mut watch = watch.unwrap();
820 loop {
821 let id = *watch.borrow();
822 if let Some(id) = id {
823 return id;
824 }
825 watch.next().await;
826 }
827 }
828 }
829
830 pub fn shared_remote_id(&self) -> Option<u64> {
831 match &self.client_state {
832 ProjectClientState::Local {
833 remote_id_rx,
834 is_shared,
835 ..
836 } => {
837 if *is_shared {
838 *remote_id_rx.borrow()
839 } else {
840 None
841 }
842 }
843 ProjectClientState::Remote { remote_id, .. } => Some(*remote_id),
844 }
845 }
846
847 pub fn replica_id(&self) -> ReplicaId {
848 match &self.client_state {
849 ProjectClientState::Local { .. } => 0,
850 ProjectClientState::Remote { replica_id, .. } => *replica_id,
851 }
852 }
853
854 fn metadata_changed(&mut self, persist: bool, cx: &mut ModelContext<Self>) {
855 if let ProjectClientState::Local {
856 remote_id_rx,
857 online_rx,
858 ..
859 } = &self.client_state
860 {
861 if let (Some(project_id), true) = (*remote_id_rx.borrow(), *online_rx.borrow()) {
862 self.client
863 .send(proto::UpdateProject {
864 project_id,
865 worktrees: self
866 .worktrees
867 .iter()
868 .filter_map(|worktree| {
869 worktree.upgrade(&cx).map(|worktree| {
870 worktree.read(cx).as_local().unwrap().metadata_proto()
871 })
872 })
873 .collect(),
874 })
875 .log_err();
876 }
877
878 self.project_store.update(cx, |_, cx| cx.notify());
879 if persist {
880 self.persist_state(cx).detach_and_log_err(cx);
881 }
882 cx.notify();
883 }
884 }
885
886 pub fn collaborators(&self) -> &HashMap<PeerId, Collaborator> {
887 &self.collaborators
888 }
889
890 pub fn worktrees<'a>(
891 &'a self,
892 cx: &'a AppContext,
893 ) -> impl 'a + Iterator<Item = ModelHandle<Worktree>> {
894 self.worktrees
895 .iter()
896 .filter_map(move |worktree| worktree.upgrade(cx))
897 }
898
899 pub fn visible_worktrees<'a>(
900 &'a self,
901 cx: &'a AppContext,
902 ) -> impl 'a + Iterator<Item = ModelHandle<Worktree>> {
903 self.worktrees.iter().filter_map(|worktree| {
904 worktree.upgrade(cx).and_then(|worktree| {
905 if worktree.read(cx).is_visible() {
906 Some(worktree)
907 } else {
908 None
909 }
910 })
911 })
912 }
913
914 pub fn worktree_root_names<'a>(&'a self, cx: &'a AppContext) -> impl Iterator<Item = &'a str> {
915 self.visible_worktrees(cx)
916 .map(|tree| tree.read(cx).root_name())
917 }
918
919 fn db_keys_for_online_state(&self, cx: &AppContext) -> Vec<String> {
920 self.worktrees
921 .iter()
922 .filter_map(|worktree| {
923 let worktree = worktree.upgrade(&cx)?.read(cx);
924 if worktree.is_visible() {
925 Some(format!(
926 "project-path-online:{}",
927 worktree.as_local().unwrap().abs_path().to_string_lossy()
928 ))
929 } else {
930 None
931 }
932 })
933 .collect::<Vec<_>>()
934 }
935
936 pub fn worktree_for_id(
937 &self,
938 id: WorktreeId,
939 cx: &AppContext,
940 ) -> Option<ModelHandle<Worktree>> {
941 self.worktrees(cx)
942 .find(|worktree| worktree.read(cx).id() == id)
943 }
944
945 pub fn worktree_for_entry(
946 &self,
947 entry_id: ProjectEntryId,
948 cx: &AppContext,
949 ) -> Option<ModelHandle<Worktree>> {
950 self.worktrees(cx)
951 .find(|worktree| worktree.read(cx).contains_entry(entry_id))
952 }
953
954 pub fn worktree_id_for_entry(
955 &self,
956 entry_id: ProjectEntryId,
957 cx: &AppContext,
958 ) -> Option<WorktreeId> {
959 self.worktree_for_entry(entry_id, cx)
960 .map(|worktree| worktree.read(cx).id())
961 }
962
963 pub fn contains_paths(&self, paths: &[PathBuf], cx: &AppContext) -> bool {
964 paths.iter().all(|path| self.contains_path(&path, cx))
965 }
966
967 pub fn contains_path(&self, path: &Path, cx: &AppContext) -> bool {
968 for worktree in self.worktrees(cx) {
969 let worktree = worktree.read(cx).as_local();
970 if worktree.map_or(false, |w| w.contains_abs_path(path)) {
971 return true;
972 }
973 }
974 false
975 }
976
977 pub fn create_entry(
978 &mut self,
979 project_path: impl Into<ProjectPath>,
980 is_directory: bool,
981 cx: &mut ModelContext<Self>,
982 ) -> Option<Task<Result<Entry>>> {
983 let project_path = project_path.into();
984 let worktree = self.worktree_for_id(project_path.worktree_id, cx)?;
985 if self.is_local() {
986 Some(worktree.update(cx, |worktree, cx| {
987 worktree
988 .as_local_mut()
989 .unwrap()
990 .create_entry(project_path.path, is_directory, cx)
991 }))
992 } else {
993 let client = self.client.clone();
994 let project_id = self.remote_id().unwrap();
995 Some(cx.spawn_weak(|_, mut cx| async move {
996 let response = client
997 .request(proto::CreateProjectEntry {
998 worktree_id: project_path.worktree_id.to_proto(),
999 project_id,
1000 path: project_path.path.as_os_str().as_bytes().to_vec(),
1001 is_directory,
1002 })
1003 .await?;
1004 let entry = response
1005 .entry
1006 .ok_or_else(|| anyhow!("missing entry in response"))?;
1007 worktree
1008 .update(&mut cx, |worktree, cx| {
1009 worktree.as_remote().unwrap().insert_entry(
1010 entry,
1011 response.worktree_scan_id as usize,
1012 cx,
1013 )
1014 })
1015 .await
1016 }))
1017 }
1018 }
1019
1020 pub fn copy_entry(
1021 &mut self,
1022 entry_id: ProjectEntryId,
1023 new_path: impl Into<Arc<Path>>,
1024 cx: &mut ModelContext<Self>,
1025 ) -> Option<Task<Result<Entry>>> {
1026 let worktree = self.worktree_for_entry(entry_id, cx)?;
1027 let new_path = new_path.into();
1028 if self.is_local() {
1029 worktree.update(cx, |worktree, cx| {
1030 worktree
1031 .as_local_mut()
1032 .unwrap()
1033 .copy_entry(entry_id, new_path, cx)
1034 })
1035 } else {
1036 let client = self.client.clone();
1037 let project_id = self.remote_id().unwrap();
1038
1039 Some(cx.spawn_weak(|_, mut cx| async move {
1040 let response = client
1041 .request(proto::CopyProjectEntry {
1042 project_id,
1043 entry_id: entry_id.to_proto(),
1044 new_path: new_path.as_os_str().as_bytes().to_vec(),
1045 })
1046 .await?;
1047 let entry = response
1048 .entry
1049 .ok_or_else(|| anyhow!("missing entry in response"))?;
1050 worktree
1051 .update(&mut cx, |worktree, cx| {
1052 worktree.as_remote().unwrap().insert_entry(
1053 entry,
1054 response.worktree_scan_id as usize,
1055 cx,
1056 )
1057 })
1058 .await
1059 }))
1060 }
1061 }
1062
1063 pub fn rename_entry(
1064 &mut self,
1065 entry_id: ProjectEntryId,
1066 new_path: impl Into<Arc<Path>>,
1067 cx: &mut ModelContext<Self>,
1068 ) -> Option<Task<Result<Entry>>> {
1069 let worktree = self.worktree_for_entry(entry_id, cx)?;
1070 let new_path = new_path.into();
1071 if self.is_local() {
1072 worktree.update(cx, |worktree, cx| {
1073 worktree
1074 .as_local_mut()
1075 .unwrap()
1076 .rename_entry(entry_id, new_path, cx)
1077 })
1078 } else {
1079 let client = self.client.clone();
1080 let project_id = self.remote_id().unwrap();
1081
1082 Some(cx.spawn_weak(|_, mut cx| async move {
1083 let response = client
1084 .request(proto::RenameProjectEntry {
1085 project_id,
1086 entry_id: entry_id.to_proto(),
1087 new_path: new_path.as_os_str().as_bytes().to_vec(),
1088 })
1089 .await?;
1090 let entry = response
1091 .entry
1092 .ok_or_else(|| anyhow!("missing entry in response"))?;
1093 worktree
1094 .update(&mut cx, |worktree, cx| {
1095 worktree.as_remote().unwrap().insert_entry(
1096 entry,
1097 response.worktree_scan_id as usize,
1098 cx,
1099 )
1100 })
1101 .await
1102 }))
1103 }
1104 }
1105
1106 pub fn delete_entry(
1107 &mut self,
1108 entry_id: ProjectEntryId,
1109 cx: &mut ModelContext<Self>,
1110 ) -> Option<Task<Result<()>>> {
1111 let worktree = self.worktree_for_entry(entry_id, cx)?;
1112 if self.is_local() {
1113 worktree.update(cx, |worktree, cx| {
1114 worktree.as_local_mut().unwrap().delete_entry(entry_id, cx)
1115 })
1116 } else {
1117 let client = self.client.clone();
1118 let project_id = self.remote_id().unwrap();
1119 Some(cx.spawn_weak(|_, mut cx| async move {
1120 let response = client
1121 .request(proto::DeleteProjectEntry {
1122 project_id,
1123 entry_id: entry_id.to_proto(),
1124 })
1125 .await?;
1126 worktree
1127 .update(&mut cx, move |worktree, cx| {
1128 worktree.as_remote().unwrap().delete_entry(
1129 entry_id,
1130 response.worktree_scan_id as usize,
1131 cx,
1132 )
1133 })
1134 .await
1135 }))
1136 }
1137 }
1138
1139 fn share(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
1140 let project_id;
1141 if let ProjectClientState::Local {
1142 remote_id_rx,
1143 is_shared,
1144 ..
1145 } = &mut self.client_state
1146 {
1147 if *is_shared {
1148 return Task::ready(Ok(()));
1149 }
1150 *is_shared = true;
1151 if let Some(id) = *remote_id_rx.borrow() {
1152 project_id = id;
1153 } else {
1154 return Task::ready(Err(anyhow!("project hasn't been registered")));
1155 }
1156 } else {
1157 return Task::ready(Err(anyhow!("can't share a remote project")));
1158 };
1159
1160 for open_buffer in self.opened_buffers.values_mut() {
1161 match open_buffer {
1162 OpenBuffer::Strong(_) => {}
1163 OpenBuffer::Weak(buffer) => {
1164 if let Some(buffer) = buffer.upgrade(cx) {
1165 *open_buffer = OpenBuffer::Strong(buffer);
1166 }
1167 }
1168 OpenBuffer::Loading(_) => unreachable!(),
1169 }
1170 }
1171
1172 for worktree_handle in self.worktrees.iter_mut() {
1173 match worktree_handle {
1174 WorktreeHandle::Strong(_) => {}
1175 WorktreeHandle::Weak(worktree) => {
1176 if let Some(worktree) = worktree.upgrade(cx) {
1177 *worktree_handle = WorktreeHandle::Strong(worktree);
1178 }
1179 }
1180 }
1181 }
1182
1183 let mut tasks = Vec::new();
1184 for worktree in self.worktrees(cx).collect::<Vec<_>>() {
1185 worktree.update(cx, |worktree, cx| {
1186 let worktree = worktree.as_local_mut().unwrap();
1187 tasks.push(worktree.share(project_id, cx));
1188 });
1189 }
1190
1191 cx.spawn(|this, mut cx| async move {
1192 for task in tasks {
1193 task.await?;
1194 }
1195 this.update(&mut cx, |_, cx| cx.notify());
1196 Ok(())
1197 })
1198 }
1199
1200 fn unshared(&mut self, cx: &mut ModelContext<Self>) {
1201 if let ProjectClientState::Local { is_shared, .. } = &mut self.client_state {
1202 if !*is_shared {
1203 return;
1204 }
1205
1206 *is_shared = false;
1207 self.collaborators.clear();
1208 self.shared_buffers.clear();
1209 for worktree_handle in self.worktrees.iter_mut() {
1210 if let WorktreeHandle::Strong(worktree) = worktree_handle {
1211 let is_visible = worktree.update(cx, |worktree, _| {
1212 worktree.as_local_mut().unwrap().unshare();
1213 worktree.is_visible()
1214 });
1215 if !is_visible {
1216 *worktree_handle = WorktreeHandle::Weak(worktree.downgrade());
1217 }
1218 }
1219 }
1220
1221 for open_buffer in self.opened_buffers.values_mut() {
1222 match open_buffer {
1223 OpenBuffer::Strong(buffer) => {
1224 *open_buffer = OpenBuffer::Weak(buffer.downgrade());
1225 }
1226 _ => {}
1227 }
1228 }
1229
1230 cx.notify();
1231 } else {
1232 log::error!("attempted to unshare a remote project");
1233 }
1234 }
1235
1236 pub fn respond_to_join_request(
1237 &mut self,
1238 requester_id: u64,
1239 allow: bool,
1240 cx: &mut ModelContext<Self>,
1241 ) {
1242 if let Some(project_id) = self.remote_id() {
1243 let share = self.share(cx);
1244 let client = self.client.clone();
1245 cx.foreground()
1246 .spawn(async move {
1247 share.await?;
1248 client.send(proto::RespondToJoinProjectRequest {
1249 requester_id,
1250 project_id,
1251 allow,
1252 })
1253 })
1254 .detach_and_log_err(cx);
1255 }
1256 }
1257
1258 fn removed_from_project(&mut self, cx: &mut ModelContext<Self>) {
1259 if let ProjectClientState::Remote {
1260 sharing_has_stopped,
1261 ..
1262 } = &mut self.client_state
1263 {
1264 *sharing_has_stopped = true;
1265 self.collaborators.clear();
1266 cx.notify();
1267 }
1268 }
1269
1270 pub fn is_read_only(&self) -> bool {
1271 match &self.client_state {
1272 ProjectClientState::Local { .. } => false,
1273 ProjectClientState::Remote {
1274 sharing_has_stopped,
1275 ..
1276 } => *sharing_has_stopped,
1277 }
1278 }
1279
1280 pub fn is_local(&self) -> bool {
1281 match &self.client_state {
1282 ProjectClientState::Local { .. } => true,
1283 ProjectClientState::Remote { .. } => false,
1284 }
1285 }
1286
1287 pub fn is_remote(&self) -> bool {
1288 !self.is_local()
1289 }
1290
1291 pub fn create_buffer(
1292 &mut self,
1293 text: &str,
1294 language: Option<Arc<Language>>,
1295 cx: &mut ModelContext<Self>,
1296 ) -> Result<ModelHandle<Buffer>> {
1297 if self.is_remote() {
1298 return Err(anyhow!("creating buffers as a guest is not supported yet"));
1299 }
1300
1301 let buffer = cx.add_model(|cx| {
1302 Buffer::new(self.replica_id(), text, cx)
1303 .with_language(language.unwrap_or(language::PLAIN_TEXT.clone()), cx)
1304 });
1305 self.register_buffer(&buffer, cx)?;
1306 Ok(buffer)
1307 }
1308
1309 pub fn open_path(
1310 &mut self,
1311 path: impl Into<ProjectPath>,
1312 cx: &mut ModelContext<Self>,
1313 ) -> Task<Result<(ProjectEntryId, AnyModelHandle)>> {
1314 let task = self.open_buffer(path, cx);
1315 cx.spawn_weak(|_, cx| async move {
1316 let buffer = task.await?;
1317 let project_entry_id = buffer
1318 .read_with(&cx, |buffer, cx| {
1319 File::from_dyn(buffer.file()).and_then(|file| file.project_entry_id(cx))
1320 })
1321 .ok_or_else(|| anyhow!("no project entry"))?;
1322 Ok((project_entry_id, buffer.into()))
1323 })
1324 }
1325
1326 pub fn open_local_buffer(
1327 &mut self,
1328 abs_path: impl AsRef<Path>,
1329 cx: &mut ModelContext<Self>,
1330 ) -> Task<Result<ModelHandle<Buffer>>> {
1331 if let Some((worktree, relative_path)) = self.find_local_worktree(abs_path.as_ref(), cx) {
1332 self.open_buffer((worktree.read(cx).id(), relative_path), cx)
1333 } else {
1334 Task::ready(Err(anyhow!("no such path")))
1335 }
1336 }
1337
1338 pub fn open_buffer(
1339 &mut self,
1340 path: impl Into<ProjectPath>,
1341 cx: &mut ModelContext<Self>,
1342 ) -> Task<Result<ModelHandle<Buffer>>> {
1343 let project_path = path.into();
1344 let worktree = if let Some(worktree) = self.worktree_for_id(project_path.worktree_id, cx) {
1345 worktree
1346 } else {
1347 return Task::ready(Err(anyhow!("no such worktree")));
1348 };
1349
1350 // If there is already a buffer for the given path, then return it.
1351 let existing_buffer = self.get_open_buffer(&project_path, cx);
1352 if let Some(existing_buffer) = existing_buffer {
1353 return Task::ready(Ok(existing_buffer));
1354 }
1355
1356 let mut loading_watch = match self.loading_buffers.entry(project_path.clone()) {
1357 // If the given path is already being loaded, then wait for that existing
1358 // task to complete and return the same buffer.
1359 hash_map::Entry::Occupied(e) => e.get().clone(),
1360
1361 // Otherwise, record the fact that this path is now being loaded.
1362 hash_map::Entry::Vacant(entry) => {
1363 let (mut tx, rx) = postage::watch::channel();
1364 entry.insert(rx.clone());
1365
1366 let load_buffer = if worktree.read(cx).is_local() {
1367 self.open_local_buffer_internal(&project_path.path, &worktree, cx)
1368 } else {
1369 self.open_remote_buffer_internal(&project_path.path, &worktree, cx)
1370 };
1371
1372 cx.spawn(move |this, mut cx| async move {
1373 let load_result = load_buffer.await;
1374 *tx.borrow_mut() = Some(this.update(&mut cx, |this, _| {
1375 // Record the fact that the buffer is no longer loading.
1376 this.loading_buffers.remove(&project_path);
1377 let buffer = load_result.map_err(Arc::new)?;
1378 Ok(buffer)
1379 }));
1380 })
1381 .detach();
1382 rx
1383 }
1384 };
1385
1386 cx.foreground().spawn(async move {
1387 loop {
1388 if let Some(result) = loading_watch.borrow().as_ref() {
1389 match result {
1390 Ok(buffer) => return Ok(buffer.clone()),
1391 Err(error) => return Err(anyhow!("{}", error)),
1392 }
1393 }
1394 loading_watch.next().await;
1395 }
1396 })
1397 }
1398
1399 fn open_local_buffer_internal(
1400 &mut self,
1401 path: &Arc<Path>,
1402 worktree: &ModelHandle<Worktree>,
1403 cx: &mut ModelContext<Self>,
1404 ) -> Task<Result<ModelHandle<Buffer>>> {
1405 let load_buffer = worktree.update(cx, |worktree, cx| {
1406 let worktree = worktree.as_local_mut().unwrap();
1407 worktree.load_buffer(path, cx)
1408 });
1409 cx.spawn(|this, mut cx| async move {
1410 let buffer = load_buffer.await?;
1411 this.update(&mut cx, |this, cx| this.register_buffer(&buffer, cx))?;
1412 Ok(buffer)
1413 })
1414 }
1415
1416 fn open_remote_buffer_internal(
1417 &mut self,
1418 path: &Arc<Path>,
1419 worktree: &ModelHandle<Worktree>,
1420 cx: &mut ModelContext<Self>,
1421 ) -> Task<Result<ModelHandle<Buffer>>> {
1422 let rpc = self.client.clone();
1423 let project_id = self.remote_id().unwrap();
1424 let remote_worktree_id = worktree.read(cx).id();
1425 let path = path.clone();
1426 let path_string = path.to_string_lossy().to_string();
1427 cx.spawn(|this, mut cx| async move {
1428 let response = rpc
1429 .request(proto::OpenBufferByPath {
1430 project_id,
1431 worktree_id: remote_worktree_id.to_proto(),
1432 path: path_string,
1433 })
1434 .await?;
1435 let buffer = response.buffer.ok_or_else(|| anyhow!("missing buffer"))?;
1436 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
1437 .await
1438 })
1439 }
1440
1441 fn open_local_buffer_via_lsp(
1442 &mut self,
1443 abs_path: lsp::Url,
1444 lsp_adapter: Arc<dyn LspAdapter>,
1445 lsp_server: Arc<LanguageServer>,
1446 cx: &mut ModelContext<Self>,
1447 ) -> Task<Result<ModelHandle<Buffer>>> {
1448 cx.spawn(|this, mut cx| async move {
1449 let abs_path = abs_path
1450 .to_file_path()
1451 .map_err(|_| anyhow!("can't convert URI to path"))?;
1452 let (worktree, relative_path) = if let Some(result) =
1453 this.read_with(&cx, |this, cx| this.find_local_worktree(&abs_path, cx))
1454 {
1455 result
1456 } else {
1457 let worktree = this
1458 .update(&mut cx, |this, cx| {
1459 this.create_local_worktree(&abs_path, false, cx)
1460 })
1461 .await?;
1462 this.update(&mut cx, |this, cx| {
1463 this.language_servers.insert(
1464 (worktree.read(cx).id(), lsp_adapter.name()),
1465 (lsp_adapter, lsp_server),
1466 );
1467 });
1468 (worktree, PathBuf::new())
1469 };
1470
1471 let project_path = ProjectPath {
1472 worktree_id: worktree.read_with(&cx, |worktree, _| worktree.id()),
1473 path: relative_path.into(),
1474 };
1475 this.update(&mut cx, |this, cx| this.open_buffer(project_path, cx))
1476 .await
1477 })
1478 }
1479
1480 pub fn open_buffer_by_id(
1481 &mut self,
1482 id: u64,
1483 cx: &mut ModelContext<Self>,
1484 ) -> Task<Result<ModelHandle<Buffer>>> {
1485 if let Some(buffer) = self.buffer_for_id(id, cx) {
1486 Task::ready(Ok(buffer))
1487 } else if self.is_local() {
1488 Task::ready(Err(anyhow!("buffer {} does not exist", id)))
1489 } else if let Some(project_id) = self.remote_id() {
1490 let request = self
1491 .client
1492 .request(proto::OpenBufferById { project_id, id });
1493 cx.spawn(|this, mut cx| async move {
1494 let buffer = request
1495 .await?
1496 .buffer
1497 .ok_or_else(|| anyhow!("invalid buffer"))?;
1498 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
1499 .await
1500 })
1501 } else {
1502 Task::ready(Err(anyhow!("cannot open buffer while disconnected")))
1503 }
1504 }
1505
1506 pub fn save_buffer_as(
1507 &mut self,
1508 buffer: ModelHandle<Buffer>,
1509 abs_path: PathBuf,
1510 cx: &mut ModelContext<Project>,
1511 ) -> Task<Result<()>> {
1512 let worktree_task = self.find_or_create_local_worktree(&abs_path, true, cx);
1513 let old_path =
1514 File::from_dyn(buffer.read(cx).file()).and_then(|f| Some(f.as_local()?.abs_path(cx)));
1515 cx.spawn(|this, mut cx| async move {
1516 if let Some(old_path) = old_path {
1517 this.update(&mut cx, |this, cx| {
1518 this.unregister_buffer_from_language_server(&buffer, old_path, cx);
1519 });
1520 }
1521 let (worktree, path) = worktree_task.await?;
1522 worktree
1523 .update(&mut cx, |worktree, cx| {
1524 worktree
1525 .as_local_mut()
1526 .unwrap()
1527 .save_buffer_as(buffer.clone(), path, cx)
1528 })
1529 .await?;
1530 this.update(&mut cx, |this, cx| {
1531 this.assign_language_to_buffer(&buffer, cx);
1532 this.register_buffer_with_language_server(&buffer, cx);
1533 });
1534 Ok(())
1535 })
1536 }
1537
1538 pub fn get_open_buffer(
1539 &mut self,
1540 path: &ProjectPath,
1541 cx: &mut ModelContext<Self>,
1542 ) -> Option<ModelHandle<Buffer>> {
1543 let worktree = self.worktree_for_id(path.worktree_id, cx)?;
1544 self.opened_buffers.values().find_map(|buffer| {
1545 let buffer = buffer.upgrade(cx)?;
1546 let file = File::from_dyn(buffer.read(cx).file())?;
1547 if file.worktree == worktree && file.path() == &path.path {
1548 Some(buffer)
1549 } else {
1550 None
1551 }
1552 })
1553 }
1554
1555 fn register_buffer(
1556 &mut self,
1557 buffer: &ModelHandle<Buffer>,
1558 cx: &mut ModelContext<Self>,
1559 ) -> Result<()> {
1560 let remote_id = buffer.read(cx).remote_id();
1561 let open_buffer = if self.is_remote() || self.is_shared() {
1562 OpenBuffer::Strong(buffer.clone())
1563 } else {
1564 OpenBuffer::Weak(buffer.downgrade())
1565 };
1566
1567 match self.opened_buffers.insert(remote_id, open_buffer) {
1568 None => {}
1569 Some(OpenBuffer::Loading(operations)) => {
1570 buffer.update(cx, |buffer, cx| buffer.apply_ops(operations, cx))?
1571 }
1572 Some(OpenBuffer::Weak(existing_handle)) => {
1573 if existing_handle.upgrade(cx).is_some() {
1574 Err(anyhow!(
1575 "already registered buffer with remote id {}",
1576 remote_id
1577 ))?
1578 }
1579 }
1580 Some(OpenBuffer::Strong(_)) => Err(anyhow!(
1581 "already registered buffer with remote id {}",
1582 remote_id
1583 ))?,
1584 }
1585 cx.subscribe(buffer, |this, buffer, event, cx| {
1586 this.on_buffer_event(buffer, event, cx);
1587 })
1588 .detach();
1589
1590 self.assign_language_to_buffer(buffer, cx);
1591 self.register_buffer_with_language_server(buffer, cx);
1592 cx.observe_release(buffer, |this, buffer, cx| {
1593 if let Some(file) = File::from_dyn(buffer.file()) {
1594 if file.is_local() {
1595 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
1596 if let Some((_, server)) = this.language_server_for_buffer(buffer, cx) {
1597 server
1598 .notify::<lsp::notification::DidCloseTextDocument>(
1599 lsp::DidCloseTextDocumentParams {
1600 text_document: lsp::TextDocumentIdentifier::new(uri.clone()),
1601 },
1602 )
1603 .log_err();
1604 }
1605 }
1606 }
1607 })
1608 .detach();
1609
1610 Ok(())
1611 }
1612
1613 fn register_buffer_with_language_server(
1614 &mut self,
1615 buffer_handle: &ModelHandle<Buffer>,
1616 cx: &mut ModelContext<Self>,
1617 ) {
1618 let buffer = buffer_handle.read(cx);
1619 let buffer_id = buffer.remote_id();
1620 if let Some(file) = File::from_dyn(buffer.file()) {
1621 if file.is_local() {
1622 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
1623 let initial_snapshot = buffer.text_snapshot();
1624
1625 let mut language_server = None;
1626 let mut language_id = None;
1627 if let Some(language) = buffer.language() {
1628 let worktree_id = file.worktree_id(cx);
1629 if let Some(adapter) = language.lsp_adapter() {
1630 language_id = adapter.id_for_language(language.name().as_ref());
1631 language_server = self
1632 .language_servers
1633 .get(&(worktree_id, adapter.name()))
1634 .cloned();
1635 }
1636 }
1637
1638 if let Some(local_worktree) = file.worktree.read(cx).as_local() {
1639 if let Some(diagnostics) = local_worktree.diagnostics_for_path(file.path()) {
1640 self.update_buffer_diagnostics(&buffer_handle, diagnostics, None, cx)
1641 .log_err();
1642 }
1643 }
1644
1645 if let Some((_, server)) = language_server {
1646 server
1647 .notify::<lsp::notification::DidOpenTextDocument>(
1648 lsp::DidOpenTextDocumentParams {
1649 text_document: lsp::TextDocumentItem::new(
1650 uri,
1651 language_id.unwrap_or_default(),
1652 0,
1653 initial_snapshot.text(),
1654 ),
1655 }
1656 .clone(),
1657 )
1658 .log_err();
1659 buffer_handle.update(cx, |buffer, cx| {
1660 buffer.set_completion_triggers(
1661 server
1662 .capabilities()
1663 .completion_provider
1664 .as_ref()
1665 .and_then(|provider| provider.trigger_characters.clone())
1666 .unwrap_or(Vec::new()),
1667 cx,
1668 )
1669 });
1670 self.buffer_snapshots
1671 .insert(buffer_id, vec![(0, initial_snapshot)]);
1672 }
1673 }
1674 }
1675 }
1676
1677 fn unregister_buffer_from_language_server(
1678 &mut self,
1679 buffer: &ModelHandle<Buffer>,
1680 old_path: PathBuf,
1681 cx: &mut ModelContext<Self>,
1682 ) {
1683 buffer.update(cx, |buffer, cx| {
1684 buffer.update_diagnostics(Default::default(), cx);
1685 self.buffer_snapshots.remove(&buffer.remote_id());
1686 if let Some((_, language_server)) = self.language_server_for_buffer(buffer, cx) {
1687 language_server
1688 .notify::<lsp::notification::DidCloseTextDocument>(
1689 lsp::DidCloseTextDocumentParams {
1690 text_document: lsp::TextDocumentIdentifier::new(
1691 lsp::Url::from_file_path(old_path).unwrap(),
1692 ),
1693 },
1694 )
1695 .log_err();
1696 }
1697 });
1698 }
1699
1700 fn on_buffer_event(
1701 &mut self,
1702 buffer: ModelHandle<Buffer>,
1703 event: &BufferEvent,
1704 cx: &mut ModelContext<Self>,
1705 ) -> Option<()> {
1706 match event {
1707 BufferEvent::Operation(operation) => {
1708 if let Some(project_id) = self.shared_remote_id() {
1709 let request = self.client.request(proto::UpdateBuffer {
1710 project_id,
1711 buffer_id: buffer.read(cx).remote_id(),
1712 operations: vec![language::proto::serialize_operation(&operation)],
1713 });
1714 cx.background().spawn(request).detach_and_log_err(cx);
1715 }
1716 }
1717 BufferEvent::Edited { .. } => {
1718 let (_, language_server) = self
1719 .language_server_for_buffer(buffer.read(cx), cx)?
1720 .clone();
1721 let buffer = buffer.read(cx);
1722 let file = File::from_dyn(buffer.file())?;
1723 let abs_path = file.as_local()?.abs_path(cx);
1724 let uri = lsp::Url::from_file_path(abs_path).unwrap();
1725 let buffer_snapshots = self.buffer_snapshots.get_mut(&buffer.remote_id())?;
1726 let (version, prev_snapshot) = buffer_snapshots.last()?;
1727 let next_snapshot = buffer.text_snapshot();
1728 let next_version = version + 1;
1729
1730 let content_changes = buffer
1731 .edits_since::<(PointUtf16, usize)>(prev_snapshot.version())
1732 .map(|edit| {
1733 let edit_start = edit.new.start.0;
1734 let edit_end = edit_start + (edit.old.end.0 - edit.old.start.0);
1735 let new_text = next_snapshot
1736 .text_for_range(edit.new.start.1..edit.new.end.1)
1737 .collect();
1738 lsp::TextDocumentContentChangeEvent {
1739 range: Some(lsp::Range::new(
1740 point_to_lsp(edit_start),
1741 point_to_lsp(edit_end),
1742 )),
1743 range_length: None,
1744 text: new_text,
1745 }
1746 })
1747 .collect();
1748
1749 buffer_snapshots.push((next_version, next_snapshot));
1750
1751 language_server
1752 .notify::<lsp::notification::DidChangeTextDocument>(
1753 lsp::DidChangeTextDocumentParams {
1754 text_document: lsp::VersionedTextDocumentIdentifier::new(
1755 uri,
1756 next_version,
1757 ),
1758 content_changes,
1759 },
1760 )
1761 .log_err();
1762 }
1763 BufferEvent::Saved => {
1764 let file = File::from_dyn(buffer.read(cx).file())?;
1765 let worktree_id = file.worktree_id(cx);
1766 let abs_path = file.as_local()?.abs_path(cx);
1767 let text_document = lsp::TextDocumentIdentifier {
1768 uri: lsp::Url::from_file_path(abs_path).unwrap(),
1769 };
1770
1771 for (_, server) in self.language_servers_for_worktree(worktree_id) {
1772 server
1773 .notify::<lsp::notification::DidSaveTextDocument>(
1774 lsp::DidSaveTextDocumentParams {
1775 text_document: text_document.clone(),
1776 text: None,
1777 },
1778 )
1779 .log_err();
1780 }
1781 }
1782 _ => {}
1783 }
1784
1785 None
1786 }
1787
1788 fn language_servers_for_worktree(
1789 &self,
1790 worktree_id: WorktreeId,
1791 ) -> impl Iterator<Item = &(Arc<dyn LspAdapter>, Arc<LanguageServer>)> {
1792 self.language_servers.iter().filter_map(
1793 move |((language_server_worktree_id, _), server)| {
1794 if *language_server_worktree_id == worktree_id {
1795 Some(server)
1796 } else {
1797 None
1798 }
1799 },
1800 )
1801 }
1802
1803 fn assign_language_to_buffer(
1804 &mut self,
1805 buffer: &ModelHandle<Buffer>,
1806 cx: &mut ModelContext<Self>,
1807 ) -> Option<()> {
1808 // If the buffer has a language, set it and start the language server if we haven't already.
1809 let full_path = buffer.read(cx).file()?.full_path(cx);
1810 let language = self.languages.select_language(&full_path)?;
1811 buffer.update(cx, |buffer, cx| {
1812 buffer.set_language(Some(language.clone()), cx);
1813 });
1814
1815 let file = File::from_dyn(buffer.read(cx).file())?;
1816 let worktree = file.worktree.read(cx).as_local()?;
1817 let worktree_id = worktree.id();
1818 let worktree_abs_path = worktree.abs_path().clone();
1819 self.start_language_server(worktree_id, worktree_abs_path, language, cx);
1820
1821 None
1822 }
1823
1824 fn start_language_server(
1825 &mut self,
1826 worktree_id: WorktreeId,
1827 worktree_path: Arc<Path>,
1828 language: Arc<Language>,
1829 cx: &mut ModelContext<Self>,
1830 ) {
1831 let adapter = if let Some(adapter) = language.lsp_adapter() {
1832 adapter
1833 } else {
1834 return;
1835 };
1836 let key = (worktree_id, adapter.name());
1837 self.started_language_servers
1838 .entry(key.clone())
1839 .or_insert_with(|| {
1840 let server_id = post_inc(&mut self.next_language_server_id);
1841 let language_server = self.languages.start_language_server(
1842 server_id,
1843 language.clone(),
1844 worktree_path,
1845 self.client.http_client(),
1846 cx,
1847 );
1848 cx.spawn_weak(|this, mut cx| async move {
1849 let language_server = language_server?.await.log_err()?;
1850 let language_server = language_server
1851 .initialize(adapter.initialization_options())
1852 .await
1853 .log_err()?;
1854 let this = this.upgrade(&cx)?;
1855 let disk_based_diagnostics_progress_token =
1856 adapter.disk_based_diagnostics_progress_token();
1857
1858 language_server
1859 .on_notification::<lsp::notification::PublishDiagnostics, _>({
1860 let this = this.downgrade();
1861 let adapter = adapter.clone();
1862 move |params, mut cx| {
1863 if let Some(this) = this.upgrade(&cx) {
1864 this.update(&mut cx, |this, cx| {
1865 this.on_lsp_diagnostics_published(
1866 server_id,
1867 params,
1868 &adapter,
1869 disk_based_diagnostics_progress_token,
1870 cx,
1871 );
1872 });
1873 }
1874 }
1875 })
1876 .detach();
1877
1878 language_server
1879 .on_request::<lsp::request::WorkspaceConfiguration, _, _>({
1880 let settings = this
1881 .read_with(&cx, |this, _| this.language_server_settings.clone());
1882 move |params, _| {
1883 let settings = settings.lock().clone();
1884 async move {
1885 Ok(params
1886 .items
1887 .into_iter()
1888 .map(|item| {
1889 if let Some(section) = &item.section {
1890 settings
1891 .get(section)
1892 .cloned()
1893 .unwrap_or(serde_json::Value::Null)
1894 } else {
1895 settings.clone()
1896 }
1897 })
1898 .collect())
1899 }
1900 }
1901 })
1902 .detach();
1903
1904 language_server
1905 .on_request::<lsp::request::ApplyWorkspaceEdit, _, _>({
1906 let this = this.downgrade();
1907 let adapter = adapter.clone();
1908 let language_server = language_server.clone();
1909 move |params, cx| {
1910 Self::on_lsp_workspace_edit(
1911 this,
1912 params,
1913 server_id,
1914 adapter.clone(),
1915 language_server.clone(),
1916 cx,
1917 )
1918 }
1919 })
1920 .detach();
1921
1922 language_server
1923 .on_notification::<lsp::notification::Progress, _>({
1924 let this = this.downgrade();
1925 move |params, mut cx| {
1926 if let Some(this) = this.upgrade(&cx) {
1927 this.update(&mut cx, |this, cx| {
1928 this.on_lsp_progress(
1929 params,
1930 server_id,
1931 disk_based_diagnostics_progress_token,
1932 cx,
1933 );
1934 });
1935 }
1936 }
1937 })
1938 .detach();
1939
1940 this.update(&mut cx, |this, cx| {
1941 this.language_servers
1942 .insert(key.clone(), (adapter.clone(), language_server.clone()));
1943 this.language_server_statuses.insert(
1944 server_id,
1945 LanguageServerStatus {
1946 name: language_server.name().to_string(),
1947 pending_work: Default::default(),
1948 pending_diagnostic_updates: 0,
1949 },
1950 );
1951 language_server
1952 .notify::<lsp::notification::DidChangeConfiguration>(
1953 lsp::DidChangeConfigurationParams {
1954 settings: this.language_server_settings.lock().clone(),
1955 },
1956 )
1957 .ok();
1958
1959 if let Some(project_id) = this.shared_remote_id() {
1960 this.client
1961 .send(proto::StartLanguageServer {
1962 project_id,
1963 server: Some(proto::LanguageServer {
1964 id: server_id as u64,
1965 name: language_server.name().to_string(),
1966 }),
1967 })
1968 .log_err();
1969 }
1970
1971 // Tell the language server about every open buffer in the worktree that matches the language.
1972 for buffer in this.opened_buffers.values() {
1973 if let Some(buffer_handle) = buffer.upgrade(cx) {
1974 let buffer = buffer_handle.read(cx);
1975 let file = if let Some(file) = File::from_dyn(buffer.file()) {
1976 file
1977 } else {
1978 continue;
1979 };
1980 let language = if let Some(language) = buffer.language() {
1981 language
1982 } else {
1983 continue;
1984 };
1985 if file.worktree.read(cx).id() != key.0
1986 || language.lsp_adapter().map(|a| a.name())
1987 != Some(key.1.clone())
1988 {
1989 continue;
1990 }
1991
1992 let file = file.as_local()?;
1993 let versions = this
1994 .buffer_snapshots
1995 .entry(buffer.remote_id())
1996 .or_insert_with(|| vec![(0, buffer.text_snapshot())]);
1997 let (version, initial_snapshot) = versions.last().unwrap();
1998 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
1999 let language_id = adapter.id_for_language(language.name().as_ref());
2000 language_server
2001 .notify::<lsp::notification::DidOpenTextDocument>(
2002 lsp::DidOpenTextDocumentParams {
2003 text_document: lsp::TextDocumentItem::new(
2004 uri,
2005 language_id.unwrap_or_default(),
2006 *version,
2007 initial_snapshot.text(),
2008 ),
2009 },
2010 )
2011 .log_err()?;
2012 buffer_handle.update(cx, |buffer, cx| {
2013 buffer.set_completion_triggers(
2014 language_server
2015 .capabilities()
2016 .completion_provider
2017 .as_ref()
2018 .and_then(|provider| {
2019 provider.trigger_characters.clone()
2020 })
2021 .unwrap_or(Vec::new()),
2022 cx,
2023 )
2024 });
2025 }
2026 }
2027
2028 cx.notify();
2029 Some(())
2030 });
2031
2032 Some(language_server)
2033 })
2034 });
2035 }
2036
2037 pub fn restart_language_servers_for_buffers(
2038 &mut self,
2039 buffers: impl IntoIterator<Item = ModelHandle<Buffer>>,
2040 cx: &mut ModelContext<Self>,
2041 ) -> Option<()> {
2042 let language_server_lookup_info: HashSet<(WorktreeId, Arc<Path>, PathBuf)> = buffers
2043 .into_iter()
2044 .filter_map(|buffer| {
2045 let file = File::from_dyn(buffer.read(cx).file())?;
2046 let worktree = file.worktree.read(cx).as_local()?;
2047 let worktree_id = worktree.id();
2048 let worktree_abs_path = worktree.abs_path().clone();
2049 let full_path = file.full_path(cx);
2050 Some((worktree_id, worktree_abs_path, full_path))
2051 })
2052 .collect();
2053 for (worktree_id, worktree_abs_path, full_path) in language_server_lookup_info {
2054 let language = self.languages.select_language(&full_path)?;
2055 self.restart_language_server(worktree_id, worktree_abs_path, language, cx);
2056 }
2057
2058 None
2059 }
2060
2061 fn restart_language_server(
2062 &mut self,
2063 worktree_id: WorktreeId,
2064 worktree_path: Arc<Path>,
2065 language: Arc<Language>,
2066 cx: &mut ModelContext<Self>,
2067 ) {
2068 let adapter = if let Some(adapter) = language.lsp_adapter() {
2069 adapter
2070 } else {
2071 return;
2072 };
2073 let key = (worktree_id, adapter.name());
2074 let server_to_shutdown = self.language_servers.remove(&key);
2075 self.started_language_servers.remove(&key);
2076 server_to_shutdown
2077 .as_ref()
2078 .map(|(_, server)| self.language_server_statuses.remove(&server.server_id()));
2079 cx.spawn_weak(|this, mut cx| async move {
2080 if let Some(this) = this.upgrade(&cx) {
2081 if let Some((_, server_to_shutdown)) = server_to_shutdown {
2082 if let Some(shutdown_task) = server_to_shutdown.shutdown() {
2083 shutdown_task.await;
2084 }
2085 }
2086
2087 this.update(&mut cx, |this, cx| {
2088 this.start_language_server(worktree_id, worktree_path, language, cx);
2089 });
2090 }
2091 })
2092 .detach();
2093 }
2094
2095 fn on_lsp_diagnostics_published(
2096 &mut self,
2097 server_id: usize,
2098 mut params: lsp::PublishDiagnosticsParams,
2099 adapter: &Arc<dyn LspAdapter>,
2100 disk_based_diagnostics_progress_token: Option<&str>,
2101 cx: &mut ModelContext<Self>,
2102 ) {
2103 adapter.process_diagnostics(&mut params);
2104 if disk_based_diagnostics_progress_token.is_none() {
2105 self.disk_based_diagnostics_started(cx);
2106 self.broadcast_language_server_update(
2107 server_id,
2108 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(
2109 proto::LspDiskBasedDiagnosticsUpdating {},
2110 ),
2111 );
2112 }
2113 self.update_diagnostics(params, adapter.disk_based_diagnostic_sources(), cx)
2114 .log_err();
2115 if disk_based_diagnostics_progress_token.is_none() {
2116 self.disk_based_diagnostics_finished(cx);
2117 self.broadcast_language_server_update(
2118 server_id,
2119 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(
2120 proto::LspDiskBasedDiagnosticsUpdated {},
2121 ),
2122 );
2123 }
2124 }
2125
2126 fn on_lsp_progress(
2127 &mut self,
2128 progress: lsp::ProgressParams,
2129 server_id: usize,
2130 disk_based_diagnostics_progress_token: Option<&str>,
2131 cx: &mut ModelContext<Self>,
2132 ) {
2133 let token = match progress.token {
2134 lsp::NumberOrString::String(token) => token,
2135 lsp::NumberOrString::Number(token) => {
2136 log::info!("skipping numeric progress token {}", token);
2137 return;
2138 }
2139 };
2140 let progress = match progress.value {
2141 lsp::ProgressParamsValue::WorkDone(value) => value,
2142 };
2143 let language_server_status =
2144 if let Some(status) = self.language_server_statuses.get_mut(&server_id) {
2145 status
2146 } else {
2147 return;
2148 };
2149 match progress {
2150 lsp::WorkDoneProgress::Begin(_) => {
2151 if Some(token.as_str()) == disk_based_diagnostics_progress_token {
2152 language_server_status.pending_diagnostic_updates += 1;
2153 if language_server_status.pending_diagnostic_updates == 1 {
2154 self.disk_based_diagnostics_started(cx);
2155 self.broadcast_language_server_update(
2156 server_id,
2157 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(
2158 proto::LspDiskBasedDiagnosticsUpdating {},
2159 ),
2160 );
2161 }
2162 } else {
2163 self.on_lsp_work_start(server_id, token.clone(), cx);
2164 self.broadcast_language_server_update(
2165 server_id,
2166 proto::update_language_server::Variant::WorkStart(proto::LspWorkStart {
2167 token,
2168 }),
2169 );
2170 }
2171 }
2172 lsp::WorkDoneProgress::Report(report) => {
2173 if Some(token.as_str()) != disk_based_diagnostics_progress_token {
2174 self.on_lsp_work_progress(
2175 server_id,
2176 token.clone(),
2177 LanguageServerProgress {
2178 message: report.message.clone(),
2179 percentage: report.percentage.map(|p| p as usize),
2180 last_update_at: Instant::now(),
2181 },
2182 cx,
2183 );
2184 self.broadcast_language_server_update(
2185 server_id,
2186 proto::update_language_server::Variant::WorkProgress(
2187 proto::LspWorkProgress {
2188 token,
2189 message: report.message,
2190 percentage: report.percentage.map(|p| p as u32),
2191 },
2192 ),
2193 );
2194 }
2195 }
2196 lsp::WorkDoneProgress::End(_) => {
2197 if Some(token.as_str()) == disk_based_diagnostics_progress_token {
2198 language_server_status.pending_diagnostic_updates -= 1;
2199 if language_server_status.pending_diagnostic_updates == 0 {
2200 self.disk_based_diagnostics_finished(cx);
2201 self.broadcast_language_server_update(
2202 server_id,
2203 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(
2204 proto::LspDiskBasedDiagnosticsUpdated {},
2205 ),
2206 );
2207 }
2208 } else {
2209 self.on_lsp_work_end(server_id, token.clone(), cx);
2210 self.broadcast_language_server_update(
2211 server_id,
2212 proto::update_language_server::Variant::WorkEnd(proto::LspWorkEnd {
2213 token,
2214 }),
2215 );
2216 }
2217 }
2218 }
2219 }
2220
2221 fn on_lsp_work_start(
2222 &mut self,
2223 language_server_id: usize,
2224 token: String,
2225 cx: &mut ModelContext<Self>,
2226 ) {
2227 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
2228 status.pending_work.insert(
2229 token,
2230 LanguageServerProgress {
2231 message: None,
2232 percentage: None,
2233 last_update_at: Instant::now(),
2234 },
2235 );
2236 cx.notify();
2237 }
2238 }
2239
2240 fn on_lsp_work_progress(
2241 &mut self,
2242 language_server_id: usize,
2243 token: String,
2244 progress: LanguageServerProgress,
2245 cx: &mut ModelContext<Self>,
2246 ) {
2247 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
2248 status.pending_work.insert(token, progress);
2249 cx.notify();
2250 }
2251 }
2252
2253 fn on_lsp_work_end(
2254 &mut self,
2255 language_server_id: usize,
2256 token: String,
2257 cx: &mut ModelContext<Self>,
2258 ) {
2259 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
2260 status.pending_work.remove(&token);
2261 cx.notify();
2262 }
2263 }
2264
2265 async fn on_lsp_workspace_edit(
2266 this: WeakModelHandle<Self>,
2267 params: lsp::ApplyWorkspaceEditParams,
2268 server_id: usize,
2269 adapter: Arc<dyn LspAdapter>,
2270 language_server: Arc<LanguageServer>,
2271 mut cx: AsyncAppContext,
2272 ) -> Result<lsp::ApplyWorkspaceEditResponse> {
2273 let this = this
2274 .upgrade(&cx)
2275 .ok_or_else(|| anyhow!("project project closed"))?;
2276 let transaction = Self::deserialize_workspace_edit(
2277 this.clone(),
2278 params.edit,
2279 true,
2280 adapter.clone(),
2281 language_server.clone(),
2282 &mut cx,
2283 )
2284 .await
2285 .log_err();
2286 this.update(&mut cx, |this, _| {
2287 if let Some(transaction) = transaction {
2288 this.last_workspace_edits_by_language_server
2289 .insert(server_id, transaction);
2290 }
2291 });
2292 Ok(lsp::ApplyWorkspaceEditResponse {
2293 applied: true,
2294 failed_change: None,
2295 failure_reason: None,
2296 })
2297 }
2298
2299 fn broadcast_language_server_update(
2300 &self,
2301 language_server_id: usize,
2302 event: proto::update_language_server::Variant,
2303 ) {
2304 if let Some(project_id) = self.shared_remote_id() {
2305 self.client
2306 .send(proto::UpdateLanguageServer {
2307 project_id,
2308 language_server_id: language_server_id as u64,
2309 variant: Some(event),
2310 })
2311 .log_err();
2312 }
2313 }
2314
2315 pub fn set_language_server_settings(&mut self, settings: serde_json::Value) {
2316 for (_, server) in self.language_servers.values() {
2317 server
2318 .notify::<lsp::notification::DidChangeConfiguration>(
2319 lsp::DidChangeConfigurationParams {
2320 settings: settings.clone(),
2321 },
2322 )
2323 .ok();
2324 }
2325 *self.language_server_settings.lock() = settings;
2326 }
2327
2328 pub fn language_server_statuses(
2329 &self,
2330 ) -> impl DoubleEndedIterator<Item = &LanguageServerStatus> {
2331 self.language_server_statuses.values()
2332 }
2333
2334 pub fn update_diagnostics(
2335 &mut self,
2336 params: lsp::PublishDiagnosticsParams,
2337 disk_based_sources: &[&str],
2338 cx: &mut ModelContext<Self>,
2339 ) -> Result<()> {
2340 let abs_path = params
2341 .uri
2342 .to_file_path()
2343 .map_err(|_| anyhow!("URI is not a file"))?;
2344 let mut diagnostics = Vec::default();
2345 let mut primary_diagnostic_group_ids = HashMap::default();
2346 let mut sources_by_group_id = HashMap::default();
2347 let mut supporting_diagnostics = HashMap::default();
2348 for diagnostic in ¶ms.diagnostics {
2349 let source = diagnostic.source.as_ref();
2350 let code = diagnostic.code.as_ref().map(|code| match code {
2351 lsp::NumberOrString::Number(code) => code.to_string(),
2352 lsp::NumberOrString::String(code) => code.clone(),
2353 });
2354 let range = range_from_lsp(diagnostic.range);
2355 let is_supporting = diagnostic
2356 .related_information
2357 .as_ref()
2358 .map_or(false, |infos| {
2359 infos.iter().any(|info| {
2360 primary_diagnostic_group_ids.contains_key(&(
2361 source,
2362 code.clone(),
2363 range_from_lsp(info.location.range),
2364 ))
2365 })
2366 });
2367
2368 let is_unnecessary = diagnostic.tags.as_ref().map_or(false, |tags| {
2369 tags.iter().any(|tag| *tag == DiagnosticTag::UNNECESSARY)
2370 });
2371
2372 if is_supporting {
2373 supporting_diagnostics.insert(
2374 (source, code.clone(), range),
2375 (diagnostic.severity, is_unnecessary),
2376 );
2377 } else {
2378 let group_id = post_inc(&mut self.next_diagnostic_group_id);
2379 let is_disk_based = source.map_or(false, |source| {
2380 disk_based_sources.contains(&source.as_str())
2381 });
2382
2383 sources_by_group_id.insert(group_id, source);
2384 primary_diagnostic_group_ids
2385 .insert((source, code.clone(), range.clone()), group_id);
2386
2387 diagnostics.push(DiagnosticEntry {
2388 range,
2389 diagnostic: Diagnostic {
2390 code: code.clone(),
2391 severity: diagnostic.severity.unwrap_or(DiagnosticSeverity::ERROR),
2392 message: diagnostic.message.clone(),
2393 group_id,
2394 is_primary: true,
2395 is_valid: true,
2396 is_disk_based,
2397 is_unnecessary,
2398 },
2399 });
2400 if let Some(infos) = &diagnostic.related_information {
2401 for info in infos {
2402 if info.location.uri == params.uri && !info.message.is_empty() {
2403 let range = range_from_lsp(info.location.range);
2404 diagnostics.push(DiagnosticEntry {
2405 range,
2406 diagnostic: Diagnostic {
2407 code: code.clone(),
2408 severity: DiagnosticSeverity::INFORMATION,
2409 message: info.message.clone(),
2410 group_id,
2411 is_primary: false,
2412 is_valid: true,
2413 is_disk_based,
2414 is_unnecessary: false,
2415 },
2416 });
2417 }
2418 }
2419 }
2420 }
2421 }
2422
2423 for entry in &mut diagnostics {
2424 let diagnostic = &mut entry.diagnostic;
2425 if !diagnostic.is_primary {
2426 let source = *sources_by_group_id.get(&diagnostic.group_id).unwrap();
2427 if let Some(&(severity, is_unnecessary)) = supporting_diagnostics.get(&(
2428 source,
2429 diagnostic.code.clone(),
2430 entry.range.clone(),
2431 )) {
2432 if let Some(severity) = severity {
2433 diagnostic.severity = severity;
2434 }
2435 diagnostic.is_unnecessary = is_unnecessary;
2436 }
2437 }
2438 }
2439
2440 self.update_diagnostic_entries(abs_path, params.version, diagnostics, cx)?;
2441 Ok(())
2442 }
2443
2444 pub fn update_diagnostic_entries(
2445 &mut self,
2446 abs_path: PathBuf,
2447 version: Option<i32>,
2448 diagnostics: Vec<DiagnosticEntry<PointUtf16>>,
2449 cx: &mut ModelContext<Project>,
2450 ) -> Result<(), anyhow::Error> {
2451 let (worktree, relative_path) = self
2452 .find_local_worktree(&abs_path, cx)
2453 .ok_or_else(|| anyhow!("no worktree found for diagnostics"))?;
2454 if !worktree.read(cx).is_visible() {
2455 return Ok(());
2456 }
2457
2458 let project_path = ProjectPath {
2459 worktree_id: worktree.read(cx).id(),
2460 path: relative_path.into(),
2461 };
2462 if let Some(buffer) = self.get_open_buffer(&project_path, cx) {
2463 self.update_buffer_diagnostics(&buffer, diagnostics.clone(), version, cx)?;
2464 }
2465
2466 let updated = worktree.update(cx, |worktree, cx| {
2467 worktree
2468 .as_local_mut()
2469 .ok_or_else(|| anyhow!("not a local worktree"))?
2470 .update_diagnostics(project_path.path.clone(), diagnostics, cx)
2471 })?;
2472 if updated {
2473 cx.emit(Event::DiagnosticsUpdated(project_path));
2474 }
2475 Ok(())
2476 }
2477
2478 fn update_buffer_diagnostics(
2479 &mut self,
2480 buffer: &ModelHandle<Buffer>,
2481 mut diagnostics: Vec<DiagnosticEntry<PointUtf16>>,
2482 version: Option<i32>,
2483 cx: &mut ModelContext<Self>,
2484 ) -> Result<()> {
2485 fn compare_diagnostics(a: &Diagnostic, b: &Diagnostic) -> Ordering {
2486 Ordering::Equal
2487 .then_with(|| b.is_primary.cmp(&a.is_primary))
2488 .then_with(|| a.is_disk_based.cmp(&b.is_disk_based))
2489 .then_with(|| a.severity.cmp(&b.severity))
2490 .then_with(|| a.message.cmp(&b.message))
2491 }
2492
2493 let snapshot = self.buffer_snapshot_for_lsp_version(buffer, version, cx)?;
2494
2495 diagnostics.sort_unstable_by(|a, b| {
2496 Ordering::Equal
2497 .then_with(|| a.range.start.cmp(&b.range.start))
2498 .then_with(|| b.range.end.cmp(&a.range.end))
2499 .then_with(|| compare_diagnostics(&a.diagnostic, &b.diagnostic))
2500 });
2501
2502 let mut sanitized_diagnostics = Vec::new();
2503 let edits_since_save = Patch::new(
2504 snapshot
2505 .edits_since::<PointUtf16>(buffer.read(cx).saved_version())
2506 .collect(),
2507 );
2508 for entry in diagnostics {
2509 let start;
2510 let end;
2511 if entry.diagnostic.is_disk_based {
2512 // Some diagnostics are based on files on disk instead of buffers'
2513 // current contents. Adjust these diagnostics' ranges to reflect
2514 // any unsaved edits.
2515 start = edits_since_save.old_to_new(entry.range.start);
2516 end = edits_since_save.old_to_new(entry.range.end);
2517 } else {
2518 start = entry.range.start;
2519 end = entry.range.end;
2520 }
2521
2522 let mut range = snapshot.clip_point_utf16(start, Bias::Left)
2523 ..snapshot.clip_point_utf16(end, Bias::Right);
2524
2525 // Expand empty ranges by one character
2526 if range.start == range.end {
2527 range.end.column += 1;
2528 range.end = snapshot.clip_point_utf16(range.end, Bias::Right);
2529 if range.start == range.end && range.end.column > 0 {
2530 range.start.column -= 1;
2531 range.start = snapshot.clip_point_utf16(range.start, Bias::Left);
2532 }
2533 }
2534
2535 sanitized_diagnostics.push(DiagnosticEntry {
2536 range,
2537 diagnostic: entry.diagnostic,
2538 });
2539 }
2540 drop(edits_since_save);
2541
2542 let set = DiagnosticSet::new(sanitized_diagnostics, &snapshot);
2543 buffer.update(cx, |buffer, cx| buffer.update_diagnostics(set, cx));
2544 Ok(())
2545 }
2546
2547 pub fn reload_buffers(
2548 &self,
2549 buffers: HashSet<ModelHandle<Buffer>>,
2550 push_to_history: bool,
2551 cx: &mut ModelContext<Self>,
2552 ) -> Task<Result<ProjectTransaction>> {
2553 let mut local_buffers = Vec::new();
2554 let mut remote_buffers = None;
2555 for buffer_handle in buffers {
2556 let buffer = buffer_handle.read(cx);
2557 if buffer.is_dirty() {
2558 if let Some(file) = File::from_dyn(buffer.file()) {
2559 if file.is_local() {
2560 local_buffers.push(buffer_handle);
2561 } else {
2562 remote_buffers.get_or_insert(Vec::new()).push(buffer_handle);
2563 }
2564 }
2565 }
2566 }
2567
2568 let remote_buffers = self.remote_id().zip(remote_buffers);
2569 let client = self.client.clone();
2570
2571 cx.spawn(|this, mut cx| async move {
2572 let mut project_transaction = ProjectTransaction::default();
2573
2574 if let Some((project_id, remote_buffers)) = remote_buffers {
2575 let response = client
2576 .request(proto::ReloadBuffers {
2577 project_id,
2578 buffer_ids: remote_buffers
2579 .iter()
2580 .map(|buffer| buffer.read_with(&cx, |buffer, _| buffer.remote_id()))
2581 .collect(),
2582 })
2583 .await?
2584 .transaction
2585 .ok_or_else(|| anyhow!("missing transaction"))?;
2586 project_transaction = this
2587 .update(&mut cx, |this, cx| {
2588 this.deserialize_project_transaction(response, push_to_history, cx)
2589 })
2590 .await?;
2591 }
2592
2593 for buffer in local_buffers {
2594 let transaction = buffer
2595 .update(&mut cx, |buffer, cx| buffer.reload(cx))
2596 .await?;
2597 buffer.update(&mut cx, |buffer, cx| {
2598 if let Some(transaction) = transaction {
2599 if !push_to_history {
2600 buffer.forget_transaction(transaction.id);
2601 }
2602 project_transaction.0.insert(cx.handle(), transaction);
2603 }
2604 });
2605 }
2606
2607 Ok(project_transaction)
2608 })
2609 }
2610
2611 pub fn format(
2612 &self,
2613 buffers: HashSet<ModelHandle<Buffer>>,
2614 push_to_history: bool,
2615 cx: &mut ModelContext<Project>,
2616 ) -> Task<Result<ProjectTransaction>> {
2617 let mut local_buffers = Vec::new();
2618 let mut remote_buffers = None;
2619 for buffer_handle in buffers {
2620 let buffer = buffer_handle.read(cx);
2621 if let Some(file) = File::from_dyn(buffer.file()) {
2622 if let Some(buffer_abs_path) = file.as_local().map(|f| f.abs_path(cx)) {
2623 if let Some((_, server)) = self.language_server_for_buffer(buffer, cx) {
2624 local_buffers.push((buffer_handle, buffer_abs_path, server.clone()));
2625 }
2626 } else {
2627 remote_buffers.get_or_insert(Vec::new()).push(buffer_handle);
2628 }
2629 } else {
2630 return Task::ready(Ok(Default::default()));
2631 }
2632 }
2633
2634 let remote_buffers = self.remote_id().zip(remote_buffers);
2635 let client = self.client.clone();
2636
2637 cx.spawn(|this, mut cx| async move {
2638 let mut project_transaction = ProjectTransaction::default();
2639
2640 if let Some((project_id, remote_buffers)) = remote_buffers {
2641 let response = client
2642 .request(proto::FormatBuffers {
2643 project_id,
2644 buffer_ids: remote_buffers
2645 .iter()
2646 .map(|buffer| buffer.read_with(&cx, |buffer, _| buffer.remote_id()))
2647 .collect(),
2648 })
2649 .await?
2650 .transaction
2651 .ok_or_else(|| anyhow!("missing transaction"))?;
2652 project_transaction = this
2653 .update(&mut cx, |this, cx| {
2654 this.deserialize_project_transaction(response, push_to_history, cx)
2655 })
2656 .await?;
2657 }
2658
2659 for (buffer, buffer_abs_path, language_server) in local_buffers {
2660 let text_document = lsp::TextDocumentIdentifier::new(
2661 lsp::Url::from_file_path(&buffer_abs_path).unwrap(),
2662 );
2663 let capabilities = &language_server.capabilities();
2664 let tab_size = cx.update(|cx| {
2665 let language_name = buffer.read(cx).language().map(|language| language.name());
2666 cx.global::<Settings>().tab_size(language_name.as_deref())
2667 });
2668 let lsp_edits = if capabilities
2669 .document_formatting_provider
2670 .as_ref()
2671 .map_or(false, |provider| *provider != lsp::OneOf::Left(false))
2672 {
2673 language_server
2674 .request::<lsp::request::Formatting>(lsp::DocumentFormattingParams {
2675 text_document,
2676 options: lsp::FormattingOptions {
2677 tab_size,
2678 insert_spaces: true,
2679 insert_final_newline: Some(true),
2680 ..Default::default()
2681 },
2682 work_done_progress_params: Default::default(),
2683 })
2684 .await?
2685 } else if capabilities
2686 .document_range_formatting_provider
2687 .as_ref()
2688 .map_or(false, |provider| *provider != lsp::OneOf::Left(false))
2689 {
2690 let buffer_start = lsp::Position::new(0, 0);
2691 let buffer_end =
2692 buffer.read_with(&cx, |buffer, _| point_to_lsp(buffer.max_point_utf16()));
2693 language_server
2694 .request::<lsp::request::RangeFormatting>(
2695 lsp::DocumentRangeFormattingParams {
2696 text_document,
2697 range: lsp::Range::new(buffer_start, buffer_end),
2698 options: lsp::FormattingOptions {
2699 tab_size: 4,
2700 insert_spaces: true,
2701 insert_final_newline: Some(true),
2702 ..Default::default()
2703 },
2704 work_done_progress_params: Default::default(),
2705 },
2706 )
2707 .await?
2708 } else {
2709 continue;
2710 };
2711
2712 if let Some(lsp_edits) = lsp_edits {
2713 let edits = this
2714 .update(&mut cx, |this, cx| {
2715 this.edits_from_lsp(&buffer, lsp_edits, None, cx)
2716 })
2717 .await?;
2718 buffer.update(&mut cx, |buffer, cx| {
2719 buffer.finalize_last_transaction();
2720 buffer.start_transaction();
2721 for (range, text) in edits {
2722 buffer.edit([(range, text)], cx);
2723 }
2724 if buffer.end_transaction(cx).is_some() {
2725 let transaction = buffer.finalize_last_transaction().unwrap().clone();
2726 if !push_to_history {
2727 buffer.forget_transaction(transaction.id);
2728 }
2729 project_transaction.0.insert(cx.handle(), transaction);
2730 }
2731 });
2732 }
2733 }
2734
2735 Ok(project_transaction)
2736 })
2737 }
2738
2739 pub fn definition<T: ToPointUtf16>(
2740 &self,
2741 buffer: &ModelHandle<Buffer>,
2742 position: T,
2743 cx: &mut ModelContext<Self>,
2744 ) -> Task<Result<Vec<Location>>> {
2745 let position = position.to_point_utf16(buffer.read(cx));
2746 self.request_lsp(buffer.clone(), GetDefinition { position }, cx)
2747 }
2748
2749 pub fn references<T: ToPointUtf16>(
2750 &self,
2751 buffer: &ModelHandle<Buffer>,
2752 position: T,
2753 cx: &mut ModelContext<Self>,
2754 ) -> Task<Result<Vec<Location>>> {
2755 let position = position.to_point_utf16(buffer.read(cx));
2756 self.request_lsp(buffer.clone(), GetReferences { position }, cx)
2757 }
2758
2759 pub fn document_highlights<T: ToPointUtf16>(
2760 &self,
2761 buffer: &ModelHandle<Buffer>,
2762 position: T,
2763 cx: &mut ModelContext<Self>,
2764 ) -> Task<Result<Vec<DocumentHighlight>>> {
2765 let position = position.to_point_utf16(buffer.read(cx));
2766
2767 self.request_lsp(buffer.clone(), GetDocumentHighlights { position }, cx)
2768 }
2769
2770 pub fn symbols(&self, query: &str, cx: &mut ModelContext<Self>) -> Task<Result<Vec<Symbol>>> {
2771 if self.is_local() {
2772 let mut requests = Vec::new();
2773 for ((worktree_id, _), (lsp_adapter, language_server)) in self.language_servers.iter() {
2774 let worktree_id = *worktree_id;
2775 if let Some(worktree) = self
2776 .worktree_for_id(worktree_id, cx)
2777 .and_then(|worktree| worktree.read(cx).as_local())
2778 {
2779 let lsp_adapter = lsp_adapter.clone();
2780 let worktree_abs_path = worktree.abs_path().clone();
2781 requests.push(
2782 language_server
2783 .request::<lsp::request::WorkspaceSymbol>(lsp::WorkspaceSymbolParams {
2784 query: query.to_string(),
2785 ..Default::default()
2786 })
2787 .log_err()
2788 .map(move |response| {
2789 (
2790 lsp_adapter,
2791 worktree_id,
2792 worktree_abs_path,
2793 response.unwrap_or_default(),
2794 )
2795 }),
2796 );
2797 }
2798 }
2799
2800 cx.spawn_weak(|this, cx| async move {
2801 let responses = futures::future::join_all(requests).await;
2802 let this = if let Some(this) = this.upgrade(&cx) {
2803 this
2804 } else {
2805 return Ok(Default::default());
2806 };
2807 this.read_with(&cx, |this, cx| {
2808 let mut symbols = Vec::new();
2809 for (adapter, source_worktree_id, worktree_abs_path, response) in responses {
2810 symbols.extend(response.into_iter().flatten().filter_map(|lsp_symbol| {
2811 let abs_path = lsp_symbol.location.uri.to_file_path().ok()?;
2812 let mut worktree_id = source_worktree_id;
2813 let path;
2814 if let Some((worktree, rel_path)) =
2815 this.find_local_worktree(&abs_path, cx)
2816 {
2817 worktree_id = worktree.read(cx).id();
2818 path = rel_path;
2819 } else {
2820 path = relativize_path(&worktree_abs_path, &abs_path);
2821 }
2822
2823 let label = this
2824 .languages
2825 .select_language(&path)
2826 .and_then(|language| {
2827 language.label_for_symbol(&lsp_symbol.name, lsp_symbol.kind)
2828 })
2829 .unwrap_or_else(|| CodeLabel::plain(lsp_symbol.name.clone(), None));
2830 let signature = this.symbol_signature(worktree_id, &path);
2831
2832 Some(Symbol {
2833 source_worktree_id,
2834 worktree_id,
2835 language_server_name: adapter.name(),
2836 name: lsp_symbol.name,
2837 kind: lsp_symbol.kind,
2838 label,
2839 path,
2840 range: range_from_lsp(lsp_symbol.location.range),
2841 signature,
2842 })
2843 }));
2844 }
2845 Ok(symbols)
2846 })
2847 })
2848 } else if let Some(project_id) = self.remote_id() {
2849 let request = self.client.request(proto::GetProjectSymbols {
2850 project_id,
2851 query: query.to_string(),
2852 });
2853 cx.spawn_weak(|this, cx| async move {
2854 let response = request.await?;
2855 let mut symbols = Vec::new();
2856 if let Some(this) = this.upgrade(&cx) {
2857 this.read_with(&cx, |this, _| {
2858 symbols.extend(
2859 response
2860 .symbols
2861 .into_iter()
2862 .filter_map(|symbol| this.deserialize_symbol(symbol).log_err()),
2863 );
2864 })
2865 }
2866 Ok(symbols)
2867 })
2868 } else {
2869 Task::ready(Ok(Default::default()))
2870 }
2871 }
2872
2873 pub fn open_buffer_for_symbol(
2874 &mut self,
2875 symbol: &Symbol,
2876 cx: &mut ModelContext<Self>,
2877 ) -> Task<Result<ModelHandle<Buffer>>> {
2878 if self.is_local() {
2879 let (lsp_adapter, language_server) = if let Some(server) = self.language_servers.get(&(
2880 symbol.source_worktree_id,
2881 symbol.language_server_name.clone(),
2882 )) {
2883 server.clone()
2884 } else {
2885 return Task::ready(Err(anyhow!(
2886 "language server for worktree and language not found"
2887 )));
2888 };
2889
2890 let worktree_abs_path = if let Some(worktree_abs_path) = self
2891 .worktree_for_id(symbol.worktree_id, cx)
2892 .and_then(|worktree| worktree.read(cx).as_local())
2893 .map(|local_worktree| local_worktree.abs_path())
2894 {
2895 worktree_abs_path
2896 } else {
2897 return Task::ready(Err(anyhow!("worktree not found for symbol")));
2898 };
2899 let symbol_abs_path = worktree_abs_path.join(&symbol.path);
2900 let symbol_uri = if let Ok(uri) = lsp::Url::from_file_path(symbol_abs_path) {
2901 uri
2902 } else {
2903 return Task::ready(Err(anyhow!("invalid symbol path")));
2904 };
2905
2906 self.open_local_buffer_via_lsp(symbol_uri, lsp_adapter, language_server, cx)
2907 } else if let Some(project_id) = self.remote_id() {
2908 let request = self.client.request(proto::OpenBufferForSymbol {
2909 project_id,
2910 symbol: Some(serialize_symbol(symbol)),
2911 });
2912 cx.spawn(|this, mut cx| async move {
2913 let response = request.await?;
2914 let buffer = response.buffer.ok_or_else(|| anyhow!("invalid buffer"))?;
2915 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
2916 .await
2917 })
2918 } else {
2919 Task::ready(Err(anyhow!("project does not have a remote id")))
2920 }
2921 }
2922
2923 pub fn hover<T: ToPointUtf16>(
2924 &self,
2925 buffer: &ModelHandle<Buffer>,
2926 position: T,
2927 cx: &mut ModelContext<Self>,
2928 ) -> Task<Result<Option<Hover>>> {
2929 // TODO: proper return type
2930 let position = position.to_point_utf16(buffer.read(cx));
2931 self.request_lsp(buffer.clone(), GetHover { position }, cx)
2932 }
2933
2934 pub fn completions<T: ToPointUtf16>(
2935 &self,
2936 source_buffer_handle: &ModelHandle<Buffer>,
2937 position: T,
2938 cx: &mut ModelContext<Self>,
2939 ) -> Task<Result<Vec<Completion>>> {
2940 let source_buffer_handle = source_buffer_handle.clone();
2941 let source_buffer = source_buffer_handle.read(cx);
2942 let buffer_id = source_buffer.remote_id();
2943 let language = source_buffer.language().cloned();
2944 let worktree;
2945 let buffer_abs_path;
2946 if let Some(file) = File::from_dyn(source_buffer.file()) {
2947 worktree = file.worktree.clone();
2948 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
2949 } else {
2950 return Task::ready(Ok(Default::default()));
2951 };
2952
2953 let position = position.to_point_utf16(source_buffer);
2954 let anchor = source_buffer.anchor_after(position);
2955
2956 if worktree.read(cx).as_local().is_some() {
2957 let buffer_abs_path = buffer_abs_path.unwrap();
2958 let (_, lang_server) =
2959 if let Some(server) = self.language_server_for_buffer(source_buffer, cx) {
2960 server.clone()
2961 } else {
2962 return Task::ready(Ok(Default::default()));
2963 };
2964
2965 cx.spawn(|_, cx| async move {
2966 let completions = lang_server
2967 .request::<lsp::request::Completion>(lsp::CompletionParams {
2968 text_document_position: lsp::TextDocumentPositionParams::new(
2969 lsp::TextDocumentIdentifier::new(
2970 lsp::Url::from_file_path(buffer_abs_path).unwrap(),
2971 ),
2972 point_to_lsp(position),
2973 ),
2974 context: Default::default(),
2975 work_done_progress_params: Default::default(),
2976 partial_result_params: Default::default(),
2977 })
2978 .await
2979 .context("lsp completion request failed")?;
2980
2981 let completions = if let Some(completions) = completions {
2982 match completions {
2983 lsp::CompletionResponse::Array(completions) => completions,
2984 lsp::CompletionResponse::List(list) => list.items,
2985 }
2986 } else {
2987 Default::default()
2988 };
2989
2990 source_buffer_handle.read_with(&cx, |this, _| {
2991 let snapshot = this.snapshot();
2992 let clipped_position = this.clip_point_utf16(position, Bias::Left);
2993 let mut range_for_token = None;
2994 Ok(completions
2995 .into_iter()
2996 .filter_map(|lsp_completion| {
2997 let (old_range, new_text) = match lsp_completion.text_edit.as_ref() {
2998 // If the language server provides a range to overwrite, then
2999 // check that the range is valid.
3000 Some(lsp::CompletionTextEdit::Edit(edit)) => {
3001 let range = range_from_lsp(edit.range);
3002 let start = snapshot.clip_point_utf16(range.start, Bias::Left);
3003 let end = snapshot.clip_point_utf16(range.end, Bias::Left);
3004 if start != range.start || end != range.end {
3005 log::info!("completion out of expected range");
3006 return None;
3007 }
3008 (
3009 snapshot.anchor_before(start)..snapshot.anchor_after(end),
3010 edit.new_text.clone(),
3011 )
3012 }
3013 // If the language server does not provide a range, then infer
3014 // the range based on the syntax tree.
3015 None => {
3016 if position != clipped_position {
3017 log::info!("completion out of expected range");
3018 return None;
3019 }
3020 let Range { start, end } = range_for_token
3021 .get_or_insert_with(|| {
3022 let offset = position.to_offset(&snapshot);
3023 snapshot
3024 .range_for_word_token_at(offset)
3025 .unwrap_or_else(|| offset..offset)
3026 })
3027 .clone();
3028 let text = lsp_completion
3029 .insert_text
3030 .as_ref()
3031 .unwrap_or(&lsp_completion.label)
3032 .clone();
3033 (
3034 snapshot.anchor_before(start)..snapshot.anchor_after(end),
3035 text.clone(),
3036 )
3037 }
3038 Some(lsp::CompletionTextEdit::InsertAndReplace(_)) => {
3039 log::info!("unsupported insert/replace completion");
3040 return None;
3041 }
3042 };
3043
3044 Some(Completion {
3045 old_range,
3046 new_text,
3047 label: language
3048 .as_ref()
3049 .and_then(|l| l.label_for_completion(&lsp_completion))
3050 .unwrap_or_else(|| {
3051 CodeLabel::plain(
3052 lsp_completion.label.clone(),
3053 lsp_completion.filter_text.as_deref(),
3054 )
3055 }),
3056 lsp_completion,
3057 })
3058 })
3059 .collect())
3060 })
3061 })
3062 } else if let Some(project_id) = self.remote_id() {
3063 let rpc = self.client.clone();
3064 let message = proto::GetCompletions {
3065 project_id,
3066 buffer_id,
3067 position: Some(language::proto::serialize_anchor(&anchor)),
3068 version: serialize_version(&source_buffer.version()),
3069 };
3070 cx.spawn_weak(|_, mut cx| async move {
3071 let response = rpc.request(message).await?;
3072
3073 source_buffer_handle
3074 .update(&mut cx, |buffer, _| {
3075 buffer.wait_for_version(deserialize_version(response.version))
3076 })
3077 .await;
3078
3079 response
3080 .completions
3081 .into_iter()
3082 .map(|completion| {
3083 language::proto::deserialize_completion(completion, language.as_ref())
3084 })
3085 .collect()
3086 })
3087 } else {
3088 Task::ready(Ok(Default::default()))
3089 }
3090 }
3091
3092 pub fn apply_additional_edits_for_completion(
3093 &self,
3094 buffer_handle: ModelHandle<Buffer>,
3095 completion: Completion,
3096 push_to_history: bool,
3097 cx: &mut ModelContext<Self>,
3098 ) -> Task<Result<Option<Transaction>>> {
3099 let buffer = buffer_handle.read(cx);
3100 let buffer_id = buffer.remote_id();
3101
3102 if self.is_local() {
3103 let (_, lang_server) = if let Some(server) = self.language_server_for_buffer(buffer, cx)
3104 {
3105 server.clone()
3106 } else {
3107 return Task::ready(Ok(Default::default()));
3108 };
3109
3110 cx.spawn(|this, mut cx| async move {
3111 let resolved_completion = lang_server
3112 .request::<lsp::request::ResolveCompletionItem>(completion.lsp_completion)
3113 .await?;
3114 if let Some(edits) = resolved_completion.additional_text_edits {
3115 let edits = this
3116 .update(&mut cx, |this, cx| {
3117 this.edits_from_lsp(&buffer_handle, edits, None, cx)
3118 })
3119 .await?;
3120 buffer_handle.update(&mut cx, |buffer, cx| {
3121 buffer.finalize_last_transaction();
3122 buffer.start_transaction();
3123 for (range, text) in edits {
3124 buffer.edit([(range, text)], cx);
3125 }
3126 let transaction = if buffer.end_transaction(cx).is_some() {
3127 let transaction = buffer.finalize_last_transaction().unwrap().clone();
3128 if !push_to_history {
3129 buffer.forget_transaction(transaction.id);
3130 }
3131 Some(transaction)
3132 } else {
3133 None
3134 };
3135 Ok(transaction)
3136 })
3137 } else {
3138 Ok(None)
3139 }
3140 })
3141 } else if let Some(project_id) = self.remote_id() {
3142 let client = self.client.clone();
3143 cx.spawn(|_, mut cx| async move {
3144 let response = client
3145 .request(proto::ApplyCompletionAdditionalEdits {
3146 project_id,
3147 buffer_id,
3148 completion: Some(language::proto::serialize_completion(&completion)),
3149 })
3150 .await?;
3151
3152 if let Some(transaction) = response.transaction {
3153 let transaction = language::proto::deserialize_transaction(transaction)?;
3154 buffer_handle
3155 .update(&mut cx, |buffer, _| {
3156 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
3157 })
3158 .await;
3159 if push_to_history {
3160 buffer_handle.update(&mut cx, |buffer, _| {
3161 buffer.push_transaction(transaction.clone(), Instant::now());
3162 });
3163 }
3164 Ok(Some(transaction))
3165 } else {
3166 Ok(None)
3167 }
3168 })
3169 } else {
3170 Task::ready(Err(anyhow!("project does not have a remote id")))
3171 }
3172 }
3173
3174 pub fn code_actions<T: Clone + ToOffset>(
3175 &self,
3176 buffer_handle: &ModelHandle<Buffer>,
3177 range: Range<T>,
3178 cx: &mut ModelContext<Self>,
3179 ) -> Task<Result<Vec<CodeAction>>> {
3180 let buffer_handle = buffer_handle.clone();
3181 let buffer = buffer_handle.read(cx);
3182 let snapshot = buffer.snapshot();
3183 let relevant_diagnostics = snapshot
3184 .diagnostics_in_range::<usize, usize>(range.to_offset(&snapshot), false)
3185 .map(|entry| entry.to_lsp_diagnostic_stub())
3186 .collect();
3187 let buffer_id = buffer.remote_id();
3188 let worktree;
3189 let buffer_abs_path;
3190 if let Some(file) = File::from_dyn(buffer.file()) {
3191 worktree = file.worktree.clone();
3192 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
3193 } else {
3194 return Task::ready(Ok(Default::default()));
3195 };
3196 let range = buffer.anchor_before(range.start)..buffer.anchor_before(range.end);
3197
3198 if worktree.read(cx).as_local().is_some() {
3199 let buffer_abs_path = buffer_abs_path.unwrap();
3200 let (_, lang_server) = if let Some(server) = self.language_server_for_buffer(buffer, cx)
3201 {
3202 server.clone()
3203 } else {
3204 return Task::ready(Ok(Default::default()));
3205 };
3206
3207 let lsp_range = range_to_lsp(range.to_point_utf16(buffer));
3208 cx.foreground().spawn(async move {
3209 if !lang_server.capabilities().code_action_provider.is_some() {
3210 return Ok(Default::default());
3211 }
3212
3213 Ok(lang_server
3214 .request::<lsp::request::CodeActionRequest>(lsp::CodeActionParams {
3215 text_document: lsp::TextDocumentIdentifier::new(
3216 lsp::Url::from_file_path(buffer_abs_path).unwrap(),
3217 ),
3218 range: lsp_range,
3219 work_done_progress_params: Default::default(),
3220 partial_result_params: Default::default(),
3221 context: lsp::CodeActionContext {
3222 diagnostics: relevant_diagnostics,
3223 only: Some(vec![
3224 lsp::CodeActionKind::QUICKFIX,
3225 lsp::CodeActionKind::REFACTOR,
3226 lsp::CodeActionKind::REFACTOR_EXTRACT,
3227 lsp::CodeActionKind::SOURCE,
3228 ]),
3229 },
3230 })
3231 .await?
3232 .unwrap_or_default()
3233 .into_iter()
3234 .filter_map(|entry| {
3235 if let lsp::CodeActionOrCommand::CodeAction(lsp_action) = entry {
3236 Some(CodeAction {
3237 range: range.clone(),
3238 lsp_action,
3239 })
3240 } else {
3241 None
3242 }
3243 })
3244 .collect())
3245 })
3246 } else if let Some(project_id) = self.remote_id() {
3247 let rpc = self.client.clone();
3248 let version = buffer.version();
3249 cx.spawn_weak(|_, mut cx| async move {
3250 let response = rpc
3251 .request(proto::GetCodeActions {
3252 project_id,
3253 buffer_id,
3254 start: Some(language::proto::serialize_anchor(&range.start)),
3255 end: Some(language::proto::serialize_anchor(&range.end)),
3256 version: serialize_version(&version),
3257 })
3258 .await?;
3259
3260 buffer_handle
3261 .update(&mut cx, |buffer, _| {
3262 buffer.wait_for_version(deserialize_version(response.version))
3263 })
3264 .await;
3265
3266 response
3267 .actions
3268 .into_iter()
3269 .map(language::proto::deserialize_code_action)
3270 .collect()
3271 })
3272 } else {
3273 Task::ready(Ok(Default::default()))
3274 }
3275 }
3276
3277 pub fn apply_code_action(
3278 &self,
3279 buffer_handle: ModelHandle<Buffer>,
3280 mut action: CodeAction,
3281 push_to_history: bool,
3282 cx: &mut ModelContext<Self>,
3283 ) -> Task<Result<ProjectTransaction>> {
3284 if self.is_local() {
3285 let buffer = buffer_handle.read(cx);
3286 let (lsp_adapter, lang_server) =
3287 if let Some(server) = self.language_server_for_buffer(buffer, cx) {
3288 server.clone()
3289 } else {
3290 return Task::ready(Ok(Default::default()));
3291 };
3292 let range = action.range.to_point_utf16(buffer);
3293
3294 cx.spawn(|this, mut cx| async move {
3295 if let Some(lsp_range) = action
3296 .lsp_action
3297 .data
3298 .as_mut()
3299 .and_then(|d| d.get_mut("codeActionParams"))
3300 .and_then(|d| d.get_mut("range"))
3301 {
3302 *lsp_range = serde_json::to_value(&range_to_lsp(range)).unwrap();
3303 action.lsp_action = lang_server
3304 .request::<lsp::request::CodeActionResolveRequest>(action.lsp_action)
3305 .await?;
3306 } else {
3307 let actions = this
3308 .update(&mut cx, |this, cx| {
3309 this.code_actions(&buffer_handle, action.range, cx)
3310 })
3311 .await?;
3312 action.lsp_action = actions
3313 .into_iter()
3314 .find(|a| a.lsp_action.title == action.lsp_action.title)
3315 .ok_or_else(|| anyhow!("code action is outdated"))?
3316 .lsp_action;
3317 }
3318
3319 if let Some(edit) = action.lsp_action.edit {
3320 Self::deserialize_workspace_edit(
3321 this,
3322 edit,
3323 push_to_history,
3324 lsp_adapter,
3325 lang_server,
3326 &mut cx,
3327 )
3328 .await
3329 } else if let Some(command) = action.lsp_action.command {
3330 this.update(&mut cx, |this, _| {
3331 this.last_workspace_edits_by_language_server
3332 .remove(&lang_server.server_id());
3333 });
3334 lang_server
3335 .request::<lsp::request::ExecuteCommand>(lsp::ExecuteCommandParams {
3336 command: command.command,
3337 arguments: command.arguments.unwrap_or_default(),
3338 ..Default::default()
3339 })
3340 .await?;
3341 Ok(this.update(&mut cx, |this, _| {
3342 this.last_workspace_edits_by_language_server
3343 .remove(&lang_server.server_id())
3344 .unwrap_or_default()
3345 }))
3346 } else {
3347 Ok(ProjectTransaction::default())
3348 }
3349 })
3350 } else if let Some(project_id) = self.remote_id() {
3351 let client = self.client.clone();
3352 let request = proto::ApplyCodeAction {
3353 project_id,
3354 buffer_id: buffer_handle.read(cx).remote_id(),
3355 action: Some(language::proto::serialize_code_action(&action)),
3356 };
3357 cx.spawn(|this, mut cx| async move {
3358 let response = client
3359 .request(request)
3360 .await?
3361 .transaction
3362 .ok_or_else(|| anyhow!("missing transaction"))?;
3363 this.update(&mut cx, |this, cx| {
3364 this.deserialize_project_transaction(response, push_to_history, cx)
3365 })
3366 .await
3367 })
3368 } else {
3369 Task::ready(Err(anyhow!("project does not have a remote id")))
3370 }
3371 }
3372
3373 async fn deserialize_workspace_edit(
3374 this: ModelHandle<Self>,
3375 edit: lsp::WorkspaceEdit,
3376 push_to_history: bool,
3377 lsp_adapter: Arc<dyn LspAdapter>,
3378 language_server: Arc<LanguageServer>,
3379 cx: &mut AsyncAppContext,
3380 ) -> Result<ProjectTransaction> {
3381 let fs = this.read_with(cx, |this, _| this.fs.clone());
3382 let mut operations = Vec::new();
3383 if let Some(document_changes) = edit.document_changes {
3384 match document_changes {
3385 lsp::DocumentChanges::Edits(edits) => {
3386 operations.extend(edits.into_iter().map(lsp::DocumentChangeOperation::Edit))
3387 }
3388 lsp::DocumentChanges::Operations(ops) => operations = ops,
3389 }
3390 } else if let Some(changes) = edit.changes {
3391 operations.extend(changes.into_iter().map(|(uri, edits)| {
3392 lsp::DocumentChangeOperation::Edit(lsp::TextDocumentEdit {
3393 text_document: lsp::OptionalVersionedTextDocumentIdentifier {
3394 uri,
3395 version: None,
3396 },
3397 edits: edits.into_iter().map(lsp::OneOf::Left).collect(),
3398 })
3399 }));
3400 }
3401
3402 let mut project_transaction = ProjectTransaction::default();
3403 for operation in operations {
3404 match operation {
3405 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Create(op)) => {
3406 let abs_path = op
3407 .uri
3408 .to_file_path()
3409 .map_err(|_| anyhow!("can't convert URI to path"))?;
3410
3411 if let Some(parent_path) = abs_path.parent() {
3412 fs.create_dir(parent_path).await?;
3413 }
3414 if abs_path.ends_with("/") {
3415 fs.create_dir(&abs_path).await?;
3416 } else {
3417 fs.create_file(&abs_path, op.options.map(Into::into).unwrap_or_default())
3418 .await?;
3419 }
3420 }
3421 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Rename(op)) => {
3422 let source_abs_path = op
3423 .old_uri
3424 .to_file_path()
3425 .map_err(|_| anyhow!("can't convert URI to path"))?;
3426 let target_abs_path = op
3427 .new_uri
3428 .to_file_path()
3429 .map_err(|_| anyhow!("can't convert URI to path"))?;
3430 fs.rename(
3431 &source_abs_path,
3432 &target_abs_path,
3433 op.options.map(Into::into).unwrap_or_default(),
3434 )
3435 .await?;
3436 }
3437 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Delete(op)) => {
3438 let abs_path = op
3439 .uri
3440 .to_file_path()
3441 .map_err(|_| anyhow!("can't convert URI to path"))?;
3442 let options = op.options.map(Into::into).unwrap_or_default();
3443 if abs_path.ends_with("/") {
3444 fs.remove_dir(&abs_path, options).await?;
3445 } else {
3446 fs.remove_file(&abs_path, options).await?;
3447 }
3448 }
3449 lsp::DocumentChangeOperation::Edit(op) => {
3450 let buffer_to_edit = this
3451 .update(cx, |this, cx| {
3452 this.open_local_buffer_via_lsp(
3453 op.text_document.uri,
3454 lsp_adapter.clone(),
3455 language_server.clone(),
3456 cx,
3457 )
3458 })
3459 .await?;
3460
3461 let edits = this
3462 .update(cx, |this, cx| {
3463 let edits = op.edits.into_iter().map(|edit| match edit {
3464 lsp::OneOf::Left(edit) => edit,
3465 lsp::OneOf::Right(edit) => edit.text_edit,
3466 });
3467 this.edits_from_lsp(
3468 &buffer_to_edit,
3469 edits,
3470 op.text_document.version,
3471 cx,
3472 )
3473 })
3474 .await?;
3475
3476 let transaction = buffer_to_edit.update(cx, |buffer, cx| {
3477 buffer.finalize_last_transaction();
3478 buffer.start_transaction();
3479 for (range, text) in edits {
3480 buffer.edit([(range, text)], cx);
3481 }
3482 let transaction = if buffer.end_transaction(cx).is_some() {
3483 let transaction = buffer.finalize_last_transaction().unwrap().clone();
3484 if !push_to_history {
3485 buffer.forget_transaction(transaction.id);
3486 }
3487 Some(transaction)
3488 } else {
3489 None
3490 };
3491
3492 transaction
3493 });
3494 if let Some(transaction) = transaction {
3495 project_transaction.0.insert(buffer_to_edit, transaction);
3496 }
3497 }
3498 }
3499 }
3500
3501 Ok(project_transaction)
3502 }
3503
3504 pub fn prepare_rename<T: ToPointUtf16>(
3505 &self,
3506 buffer: ModelHandle<Buffer>,
3507 position: T,
3508 cx: &mut ModelContext<Self>,
3509 ) -> Task<Result<Option<Range<Anchor>>>> {
3510 let position = position.to_point_utf16(buffer.read(cx));
3511 self.request_lsp(buffer, PrepareRename { position }, cx)
3512 }
3513
3514 pub fn perform_rename<T: ToPointUtf16>(
3515 &self,
3516 buffer: ModelHandle<Buffer>,
3517 position: T,
3518 new_name: String,
3519 push_to_history: bool,
3520 cx: &mut ModelContext<Self>,
3521 ) -> Task<Result<ProjectTransaction>> {
3522 let position = position.to_point_utf16(buffer.read(cx));
3523 self.request_lsp(
3524 buffer,
3525 PerformRename {
3526 position,
3527 new_name,
3528 push_to_history,
3529 },
3530 cx,
3531 )
3532 }
3533
3534 pub fn search(
3535 &self,
3536 query: SearchQuery,
3537 cx: &mut ModelContext<Self>,
3538 ) -> Task<Result<HashMap<ModelHandle<Buffer>, Vec<Range<Anchor>>>>> {
3539 if self.is_local() {
3540 let snapshots = self
3541 .visible_worktrees(cx)
3542 .filter_map(|tree| {
3543 let tree = tree.read(cx).as_local()?;
3544 Some(tree.snapshot())
3545 })
3546 .collect::<Vec<_>>();
3547
3548 let background = cx.background().clone();
3549 let path_count: usize = snapshots.iter().map(|s| s.visible_file_count()).sum();
3550 if path_count == 0 {
3551 return Task::ready(Ok(Default::default()));
3552 }
3553 let workers = background.num_cpus().min(path_count);
3554 let (matching_paths_tx, mut matching_paths_rx) = smol::channel::bounded(1024);
3555 cx.background()
3556 .spawn({
3557 let fs = self.fs.clone();
3558 let background = cx.background().clone();
3559 let query = query.clone();
3560 async move {
3561 let fs = &fs;
3562 let query = &query;
3563 let matching_paths_tx = &matching_paths_tx;
3564 let paths_per_worker = (path_count + workers - 1) / workers;
3565 let snapshots = &snapshots;
3566 background
3567 .scoped(|scope| {
3568 for worker_ix in 0..workers {
3569 let worker_start_ix = worker_ix * paths_per_worker;
3570 let worker_end_ix = worker_start_ix + paths_per_worker;
3571 scope.spawn(async move {
3572 let mut snapshot_start_ix = 0;
3573 let mut abs_path = PathBuf::new();
3574 for snapshot in snapshots {
3575 let snapshot_end_ix =
3576 snapshot_start_ix + snapshot.visible_file_count();
3577 if worker_end_ix <= snapshot_start_ix {
3578 break;
3579 } else if worker_start_ix > snapshot_end_ix {
3580 snapshot_start_ix = snapshot_end_ix;
3581 continue;
3582 } else {
3583 let start_in_snapshot = worker_start_ix
3584 .saturating_sub(snapshot_start_ix);
3585 let end_in_snapshot =
3586 cmp::min(worker_end_ix, snapshot_end_ix)
3587 - snapshot_start_ix;
3588
3589 for entry in snapshot
3590 .files(false, start_in_snapshot)
3591 .take(end_in_snapshot - start_in_snapshot)
3592 {
3593 if matching_paths_tx.is_closed() {
3594 break;
3595 }
3596
3597 abs_path.clear();
3598 abs_path.push(&snapshot.abs_path());
3599 abs_path.push(&entry.path);
3600 let matches = if let Some(file) =
3601 fs.open_sync(&abs_path).await.log_err()
3602 {
3603 query.detect(file).unwrap_or(false)
3604 } else {
3605 false
3606 };
3607
3608 if matches {
3609 let project_path =
3610 (snapshot.id(), entry.path.clone());
3611 if matching_paths_tx
3612 .send(project_path)
3613 .await
3614 .is_err()
3615 {
3616 break;
3617 }
3618 }
3619 }
3620
3621 snapshot_start_ix = snapshot_end_ix;
3622 }
3623 }
3624 });
3625 }
3626 })
3627 .await;
3628 }
3629 })
3630 .detach();
3631
3632 let (buffers_tx, buffers_rx) = smol::channel::bounded(1024);
3633 let open_buffers = self
3634 .opened_buffers
3635 .values()
3636 .filter_map(|b| b.upgrade(cx))
3637 .collect::<HashSet<_>>();
3638 cx.spawn(|this, cx| async move {
3639 for buffer in &open_buffers {
3640 let snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
3641 buffers_tx.send((buffer.clone(), snapshot)).await?;
3642 }
3643
3644 let open_buffers = Rc::new(RefCell::new(open_buffers));
3645 while let Some(project_path) = matching_paths_rx.next().await {
3646 if buffers_tx.is_closed() {
3647 break;
3648 }
3649
3650 let this = this.clone();
3651 let open_buffers = open_buffers.clone();
3652 let buffers_tx = buffers_tx.clone();
3653 cx.spawn(|mut cx| async move {
3654 if let Some(buffer) = this
3655 .update(&mut cx, |this, cx| this.open_buffer(project_path, cx))
3656 .await
3657 .log_err()
3658 {
3659 if open_buffers.borrow_mut().insert(buffer.clone()) {
3660 let snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
3661 buffers_tx.send((buffer, snapshot)).await?;
3662 }
3663 }
3664
3665 Ok::<_, anyhow::Error>(())
3666 })
3667 .detach();
3668 }
3669
3670 Ok::<_, anyhow::Error>(())
3671 })
3672 .detach_and_log_err(cx);
3673
3674 let background = cx.background().clone();
3675 cx.background().spawn(async move {
3676 let query = &query;
3677 let mut matched_buffers = Vec::new();
3678 for _ in 0..workers {
3679 matched_buffers.push(HashMap::default());
3680 }
3681 background
3682 .scoped(|scope| {
3683 for worker_matched_buffers in matched_buffers.iter_mut() {
3684 let mut buffers_rx = buffers_rx.clone();
3685 scope.spawn(async move {
3686 while let Some((buffer, snapshot)) = buffers_rx.next().await {
3687 let buffer_matches = query
3688 .search(snapshot.as_rope())
3689 .await
3690 .iter()
3691 .map(|range| {
3692 snapshot.anchor_before(range.start)
3693 ..snapshot.anchor_after(range.end)
3694 })
3695 .collect::<Vec<_>>();
3696 if !buffer_matches.is_empty() {
3697 worker_matched_buffers
3698 .insert(buffer.clone(), buffer_matches);
3699 }
3700 }
3701 });
3702 }
3703 })
3704 .await;
3705 Ok(matched_buffers.into_iter().flatten().collect())
3706 })
3707 } else if let Some(project_id) = self.remote_id() {
3708 let request = self.client.request(query.to_proto(project_id));
3709 cx.spawn(|this, mut cx| async move {
3710 let response = request.await?;
3711 let mut result = HashMap::default();
3712 for location in response.locations {
3713 let buffer = location.buffer.ok_or_else(|| anyhow!("missing buffer"))?;
3714 let target_buffer = this
3715 .update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
3716 .await?;
3717 let start = location
3718 .start
3719 .and_then(deserialize_anchor)
3720 .ok_or_else(|| anyhow!("missing target start"))?;
3721 let end = location
3722 .end
3723 .and_then(deserialize_anchor)
3724 .ok_or_else(|| anyhow!("missing target end"))?;
3725 result
3726 .entry(target_buffer)
3727 .or_insert(Vec::new())
3728 .push(start..end)
3729 }
3730 Ok(result)
3731 })
3732 } else {
3733 Task::ready(Ok(Default::default()))
3734 }
3735 }
3736
3737 fn request_lsp<R: LspCommand>(
3738 &self,
3739 buffer_handle: ModelHandle<Buffer>,
3740 request: R,
3741 cx: &mut ModelContext<Self>,
3742 ) -> Task<Result<R::Response>>
3743 where
3744 <R::LspRequest as lsp::request::Request>::Result: Send,
3745 {
3746 let buffer = buffer_handle.read(cx);
3747 if self.is_local() {
3748 let file = File::from_dyn(buffer.file()).and_then(File::as_local);
3749 if let Some((file, (_, language_server))) =
3750 file.zip(self.language_server_for_buffer(buffer, cx).cloned())
3751 {
3752 let lsp_params = request.to_lsp(&file.abs_path(cx), cx);
3753 return cx.spawn(|this, cx| async move {
3754 if !request.check_capabilities(&language_server.capabilities()) {
3755 return Ok(Default::default());
3756 }
3757
3758 let response = language_server
3759 .request::<R::LspRequest>(lsp_params)
3760 .await
3761 .context("lsp request failed")?;
3762 request
3763 .response_from_lsp(response, this, buffer_handle, cx)
3764 .await
3765 });
3766 }
3767 } else if let Some(project_id) = self.remote_id() {
3768 let rpc = self.client.clone();
3769 let message = request.to_proto(project_id, buffer);
3770 return cx.spawn(|this, cx| async move {
3771 let response = rpc.request(message).await?;
3772 request
3773 .response_from_proto(response, this, buffer_handle, cx)
3774 .await
3775 });
3776 }
3777 Task::ready(Ok(Default::default()))
3778 }
3779
3780 pub fn find_or_create_local_worktree(
3781 &mut self,
3782 abs_path: impl AsRef<Path>,
3783 visible: bool,
3784 cx: &mut ModelContext<Self>,
3785 ) -> Task<Result<(ModelHandle<Worktree>, PathBuf)>> {
3786 let abs_path = abs_path.as_ref();
3787 if let Some((tree, relative_path)) = self.find_local_worktree(abs_path, cx) {
3788 Task::ready(Ok((tree.clone(), relative_path.into())))
3789 } else {
3790 let worktree = self.create_local_worktree(abs_path, visible, cx);
3791 cx.foreground()
3792 .spawn(async move { Ok((worktree.await?, PathBuf::new())) })
3793 }
3794 }
3795
3796 pub fn find_local_worktree(
3797 &self,
3798 abs_path: &Path,
3799 cx: &AppContext,
3800 ) -> Option<(ModelHandle<Worktree>, PathBuf)> {
3801 for tree in self.worktrees(cx) {
3802 if let Some(relative_path) = tree
3803 .read(cx)
3804 .as_local()
3805 .and_then(|t| abs_path.strip_prefix(t.abs_path()).ok())
3806 {
3807 return Some((tree.clone(), relative_path.into()));
3808 }
3809 }
3810 None
3811 }
3812
3813 pub fn is_shared(&self) -> bool {
3814 match &self.client_state {
3815 ProjectClientState::Local { is_shared, .. } => *is_shared,
3816 ProjectClientState::Remote { .. } => false,
3817 }
3818 }
3819
3820 fn create_local_worktree(
3821 &mut self,
3822 abs_path: impl AsRef<Path>,
3823 visible: bool,
3824 cx: &mut ModelContext<Self>,
3825 ) -> Task<Result<ModelHandle<Worktree>>> {
3826 let fs = self.fs.clone();
3827 let client = self.client.clone();
3828 let next_entry_id = self.next_entry_id.clone();
3829 let path: Arc<Path> = abs_path.as_ref().into();
3830 let task = self
3831 .loading_local_worktrees
3832 .entry(path.clone())
3833 .or_insert_with(|| {
3834 cx.spawn(|project, mut cx| {
3835 async move {
3836 let worktree = Worktree::local(
3837 client.clone(),
3838 path.clone(),
3839 visible,
3840 fs,
3841 next_entry_id,
3842 &mut cx,
3843 )
3844 .await;
3845 project.update(&mut cx, |project, _| {
3846 project.loading_local_worktrees.remove(&path);
3847 });
3848 let worktree = worktree?;
3849
3850 let project_id = project.update(&mut cx, |project, cx| {
3851 project.add_worktree(&worktree, cx);
3852 project.shared_remote_id()
3853 });
3854
3855 if let Some(project_id) = project_id {
3856 worktree
3857 .update(&mut cx, |worktree, cx| {
3858 worktree.as_local_mut().unwrap().share(project_id, cx)
3859 })
3860 .await
3861 .log_err();
3862 }
3863
3864 Ok(worktree)
3865 }
3866 .map_err(|err| Arc::new(err))
3867 })
3868 .shared()
3869 })
3870 .clone();
3871 cx.foreground().spawn(async move {
3872 match task.await {
3873 Ok(worktree) => Ok(worktree),
3874 Err(err) => Err(anyhow!("{}", err)),
3875 }
3876 })
3877 }
3878
3879 pub fn remove_worktree(&mut self, id_to_remove: WorktreeId, cx: &mut ModelContext<Self>) {
3880 self.worktrees.retain(|worktree| {
3881 if let Some(worktree) = worktree.upgrade(cx) {
3882 let id = worktree.read(cx).id();
3883 if id == id_to_remove {
3884 cx.emit(Event::WorktreeRemoved(id));
3885 false
3886 } else {
3887 true
3888 }
3889 } else {
3890 false
3891 }
3892 });
3893 self.metadata_changed(true, cx);
3894 cx.notify();
3895 }
3896
3897 fn add_worktree(&mut self, worktree: &ModelHandle<Worktree>, cx: &mut ModelContext<Self>) {
3898 cx.observe(&worktree, |_, _, cx| cx.notify()).detach();
3899 if worktree.read(cx).is_local() {
3900 cx.subscribe(&worktree, |this, worktree, _, cx| {
3901 this.update_local_worktree_buffers(worktree, cx);
3902 })
3903 .detach();
3904 }
3905
3906 let push_strong_handle = {
3907 let worktree = worktree.read(cx);
3908 self.is_shared() || worktree.is_visible() || worktree.is_remote()
3909 };
3910 if push_strong_handle {
3911 self.worktrees
3912 .push(WorktreeHandle::Strong(worktree.clone()));
3913 } else {
3914 cx.observe_release(&worktree, |this, _, cx| {
3915 this.worktrees
3916 .retain(|worktree| worktree.upgrade(cx).is_some());
3917 cx.notify();
3918 })
3919 .detach();
3920 self.worktrees
3921 .push(WorktreeHandle::Weak(worktree.downgrade()));
3922 }
3923 self.metadata_changed(true, cx);
3924 cx.emit(Event::WorktreeAdded);
3925 cx.notify();
3926 }
3927
3928 fn update_local_worktree_buffers(
3929 &mut self,
3930 worktree_handle: ModelHandle<Worktree>,
3931 cx: &mut ModelContext<Self>,
3932 ) {
3933 let snapshot = worktree_handle.read(cx).snapshot();
3934 let mut buffers_to_delete = Vec::new();
3935 let mut renamed_buffers = Vec::new();
3936 for (buffer_id, buffer) in &self.opened_buffers {
3937 if let Some(buffer) = buffer.upgrade(cx) {
3938 buffer.update(cx, |buffer, cx| {
3939 if let Some(old_file) = File::from_dyn(buffer.file()) {
3940 if old_file.worktree != worktree_handle {
3941 return;
3942 }
3943
3944 let new_file = if let Some(entry) = old_file
3945 .entry_id
3946 .and_then(|entry_id| snapshot.entry_for_id(entry_id))
3947 {
3948 File {
3949 is_local: true,
3950 entry_id: Some(entry.id),
3951 mtime: entry.mtime,
3952 path: entry.path.clone(),
3953 worktree: worktree_handle.clone(),
3954 }
3955 } else if let Some(entry) =
3956 snapshot.entry_for_path(old_file.path().as_ref())
3957 {
3958 File {
3959 is_local: true,
3960 entry_id: Some(entry.id),
3961 mtime: entry.mtime,
3962 path: entry.path.clone(),
3963 worktree: worktree_handle.clone(),
3964 }
3965 } else {
3966 File {
3967 is_local: true,
3968 entry_id: None,
3969 path: old_file.path().clone(),
3970 mtime: old_file.mtime(),
3971 worktree: worktree_handle.clone(),
3972 }
3973 };
3974
3975 let old_path = old_file.abs_path(cx);
3976 if new_file.abs_path(cx) != old_path {
3977 renamed_buffers.push((cx.handle(), old_path));
3978 }
3979
3980 if let Some(project_id) = self.shared_remote_id() {
3981 self.client
3982 .send(proto::UpdateBufferFile {
3983 project_id,
3984 buffer_id: *buffer_id as u64,
3985 file: Some(new_file.to_proto()),
3986 })
3987 .log_err();
3988 }
3989 buffer.file_updated(Box::new(new_file), cx).detach();
3990 }
3991 });
3992 } else {
3993 buffers_to_delete.push(*buffer_id);
3994 }
3995 }
3996
3997 for buffer_id in buffers_to_delete {
3998 self.opened_buffers.remove(&buffer_id);
3999 }
4000
4001 for (buffer, old_path) in renamed_buffers {
4002 self.unregister_buffer_from_language_server(&buffer, old_path, cx);
4003 self.assign_language_to_buffer(&buffer, cx);
4004 self.register_buffer_with_language_server(&buffer, cx);
4005 }
4006 }
4007
4008 pub fn set_active_path(&mut self, entry: Option<ProjectPath>, cx: &mut ModelContext<Self>) {
4009 let new_active_entry = entry.and_then(|project_path| {
4010 let worktree = self.worktree_for_id(project_path.worktree_id, cx)?;
4011 let entry = worktree.read(cx).entry_for_path(project_path.path)?;
4012 Some(entry.id)
4013 });
4014 if new_active_entry != self.active_entry {
4015 self.active_entry = new_active_entry;
4016 cx.emit(Event::ActiveEntryChanged(new_active_entry));
4017 }
4018 }
4019
4020 pub fn is_running_disk_based_diagnostics(&self) -> bool {
4021 self.language_server_statuses
4022 .values()
4023 .any(|status| status.pending_diagnostic_updates > 0)
4024 }
4025
4026 pub fn diagnostic_summary(&self, cx: &AppContext) -> DiagnosticSummary {
4027 let mut summary = DiagnosticSummary::default();
4028 for (_, path_summary) in self.diagnostic_summaries(cx) {
4029 summary.error_count += path_summary.error_count;
4030 summary.warning_count += path_summary.warning_count;
4031 }
4032 summary
4033 }
4034
4035 pub fn diagnostic_summaries<'a>(
4036 &'a self,
4037 cx: &'a AppContext,
4038 ) -> impl Iterator<Item = (ProjectPath, DiagnosticSummary)> + 'a {
4039 self.worktrees(cx).flat_map(move |worktree| {
4040 let worktree = worktree.read(cx);
4041 let worktree_id = worktree.id();
4042 worktree
4043 .diagnostic_summaries()
4044 .map(move |(path, summary)| (ProjectPath { worktree_id, path }, summary))
4045 })
4046 }
4047
4048 pub fn disk_based_diagnostics_started(&mut self, cx: &mut ModelContext<Self>) {
4049 if self
4050 .language_server_statuses
4051 .values()
4052 .map(|status| status.pending_diagnostic_updates)
4053 .sum::<isize>()
4054 == 1
4055 {
4056 cx.emit(Event::DiskBasedDiagnosticsStarted);
4057 }
4058 }
4059
4060 pub fn disk_based_diagnostics_finished(&mut self, cx: &mut ModelContext<Self>) {
4061 cx.emit(Event::DiskBasedDiagnosticsUpdated);
4062 if self
4063 .language_server_statuses
4064 .values()
4065 .map(|status| status.pending_diagnostic_updates)
4066 .sum::<isize>()
4067 == 0
4068 {
4069 cx.emit(Event::DiskBasedDiagnosticsFinished);
4070 }
4071 }
4072
4073 pub fn active_entry(&self) -> Option<ProjectEntryId> {
4074 self.active_entry
4075 }
4076
4077 pub fn entry_for_path(&self, path: &ProjectPath, cx: &AppContext) -> Option<ProjectEntryId> {
4078 self.worktree_for_id(path.worktree_id, cx)?
4079 .read(cx)
4080 .entry_for_path(&path.path)
4081 .map(|entry| entry.id)
4082 }
4083
4084 pub fn path_for_entry(&self, entry_id: ProjectEntryId, cx: &AppContext) -> Option<ProjectPath> {
4085 let worktree = self.worktree_for_entry(entry_id, cx)?;
4086 let worktree = worktree.read(cx);
4087 let worktree_id = worktree.id();
4088 let path = worktree.entry_for_id(entry_id)?.path.clone();
4089 Some(ProjectPath { worktree_id, path })
4090 }
4091
4092 // RPC message handlers
4093
4094 async fn handle_request_join_project(
4095 this: ModelHandle<Self>,
4096 message: TypedEnvelope<proto::RequestJoinProject>,
4097 _: Arc<Client>,
4098 mut cx: AsyncAppContext,
4099 ) -> Result<()> {
4100 let user_id = message.payload.requester_id;
4101 if this.read_with(&cx, |project, _| {
4102 project.collaborators.values().any(|c| c.user.id == user_id)
4103 }) {
4104 this.update(&mut cx, |this, cx| {
4105 this.respond_to_join_request(user_id, true, cx)
4106 });
4107 } else {
4108 let user_store = this.read_with(&cx, |this, _| this.user_store.clone());
4109 let user = user_store
4110 .update(&mut cx, |store, cx| store.fetch_user(user_id, cx))
4111 .await?;
4112 this.update(&mut cx, |_, cx| cx.emit(Event::ContactRequestedJoin(user)));
4113 }
4114 Ok(())
4115 }
4116
4117 async fn handle_unregister_project(
4118 this: ModelHandle<Self>,
4119 _: TypedEnvelope<proto::UnregisterProject>,
4120 _: Arc<Client>,
4121 mut cx: AsyncAppContext,
4122 ) -> Result<()> {
4123 this.update(&mut cx, |this, cx| this.removed_from_project(cx));
4124 Ok(())
4125 }
4126
4127 async fn handle_project_unshared(
4128 this: ModelHandle<Self>,
4129 _: TypedEnvelope<proto::ProjectUnshared>,
4130 _: Arc<Client>,
4131 mut cx: AsyncAppContext,
4132 ) -> Result<()> {
4133 this.update(&mut cx, |this, cx| this.unshared(cx));
4134 Ok(())
4135 }
4136
4137 async fn handle_add_collaborator(
4138 this: ModelHandle<Self>,
4139 mut envelope: TypedEnvelope<proto::AddProjectCollaborator>,
4140 _: Arc<Client>,
4141 mut cx: AsyncAppContext,
4142 ) -> Result<()> {
4143 let user_store = this.read_with(&cx, |this, _| this.user_store.clone());
4144 let collaborator = envelope
4145 .payload
4146 .collaborator
4147 .take()
4148 .ok_or_else(|| anyhow!("empty collaborator"))?;
4149
4150 let collaborator = Collaborator::from_proto(collaborator, &user_store, &mut cx).await?;
4151 this.update(&mut cx, |this, cx| {
4152 this.collaborators
4153 .insert(collaborator.peer_id, collaborator);
4154 cx.notify();
4155 });
4156
4157 Ok(())
4158 }
4159
4160 async fn handle_remove_collaborator(
4161 this: ModelHandle<Self>,
4162 envelope: TypedEnvelope<proto::RemoveProjectCollaborator>,
4163 _: Arc<Client>,
4164 mut cx: AsyncAppContext,
4165 ) -> Result<()> {
4166 this.update(&mut cx, |this, cx| {
4167 let peer_id = PeerId(envelope.payload.peer_id);
4168 let replica_id = this
4169 .collaborators
4170 .remove(&peer_id)
4171 .ok_or_else(|| anyhow!("unknown peer {:?}", peer_id))?
4172 .replica_id;
4173 for (_, buffer) in &this.opened_buffers {
4174 if let Some(buffer) = buffer.upgrade(cx) {
4175 buffer.update(cx, |buffer, cx| buffer.remove_peer(replica_id, cx));
4176 }
4177 }
4178
4179 cx.emit(Event::CollaboratorLeft(peer_id));
4180 cx.notify();
4181 Ok(())
4182 })
4183 }
4184
4185 async fn handle_join_project_request_cancelled(
4186 this: ModelHandle<Self>,
4187 envelope: TypedEnvelope<proto::JoinProjectRequestCancelled>,
4188 _: Arc<Client>,
4189 mut cx: AsyncAppContext,
4190 ) -> Result<()> {
4191 let user = this
4192 .update(&mut cx, |this, cx| {
4193 this.user_store.update(cx, |user_store, cx| {
4194 user_store.fetch_user(envelope.payload.requester_id, cx)
4195 })
4196 })
4197 .await?;
4198
4199 this.update(&mut cx, |_, cx| {
4200 cx.emit(Event::ContactCancelledJoinRequest(user));
4201 });
4202
4203 Ok(())
4204 }
4205
4206 async fn handle_update_project(
4207 this: ModelHandle<Self>,
4208 envelope: TypedEnvelope<proto::UpdateProject>,
4209 client: Arc<Client>,
4210 mut cx: AsyncAppContext,
4211 ) -> Result<()> {
4212 this.update(&mut cx, |this, cx| {
4213 let replica_id = this.replica_id();
4214 let remote_id = this.remote_id().ok_or_else(|| anyhow!("invalid project"))?;
4215
4216 let mut old_worktrees_by_id = this
4217 .worktrees
4218 .drain(..)
4219 .filter_map(|worktree| {
4220 let worktree = worktree.upgrade(cx)?;
4221 Some((worktree.read(cx).id(), worktree))
4222 })
4223 .collect::<HashMap<_, _>>();
4224
4225 for worktree in envelope.payload.worktrees {
4226 if let Some(old_worktree) =
4227 old_worktrees_by_id.remove(&WorktreeId::from_proto(worktree.id))
4228 {
4229 this.worktrees.push(WorktreeHandle::Strong(old_worktree));
4230 } else {
4231 let worktree = proto::Worktree {
4232 id: worktree.id,
4233 root_name: worktree.root_name,
4234 entries: Default::default(),
4235 diagnostic_summaries: Default::default(),
4236 visible: worktree.visible,
4237 scan_id: 0,
4238 };
4239 let (worktree, load_task) =
4240 Worktree::remote(remote_id, replica_id, worktree, client.clone(), cx);
4241 this.add_worktree(&worktree, cx);
4242 load_task.detach();
4243 }
4244 }
4245
4246 this.metadata_changed(true, cx);
4247 for (id, _) in old_worktrees_by_id {
4248 cx.emit(Event::WorktreeRemoved(id));
4249 }
4250
4251 Ok(())
4252 })
4253 }
4254
4255 async fn handle_update_worktree(
4256 this: ModelHandle<Self>,
4257 envelope: TypedEnvelope<proto::UpdateWorktree>,
4258 _: Arc<Client>,
4259 mut cx: AsyncAppContext,
4260 ) -> Result<()> {
4261 this.update(&mut cx, |this, cx| {
4262 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
4263 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
4264 worktree.update(cx, |worktree, _| {
4265 let worktree = worktree.as_remote_mut().unwrap();
4266 worktree.update_from_remote(envelope)
4267 })?;
4268 }
4269 Ok(())
4270 })
4271 }
4272
4273 async fn handle_create_project_entry(
4274 this: ModelHandle<Self>,
4275 envelope: TypedEnvelope<proto::CreateProjectEntry>,
4276 _: Arc<Client>,
4277 mut cx: AsyncAppContext,
4278 ) -> Result<proto::ProjectEntryResponse> {
4279 let worktree = this.update(&mut cx, |this, cx| {
4280 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
4281 this.worktree_for_id(worktree_id, cx)
4282 .ok_or_else(|| anyhow!("worktree not found"))
4283 })?;
4284 let worktree_scan_id = worktree.read_with(&cx, |worktree, _| worktree.scan_id());
4285 let entry = worktree
4286 .update(&mut cx, |worktree, cx| {
4287 let worktree = worktree.as_local_mut().unwrap();
4288 let path = PathBuf::from(OsString::from_vec(envelope.payload.path));
4289 worktree.create_entry(path, envelope.payload.is_directory, cx)
4290 })
4291 .await?;
4292 Ok(proto::ProjectEntryResponse {
4293 entry: Some((&entry).into()),
4294 worktree_scan_id: worktree_scan_id as u64,
4295 })
4296 }
4297
4298 async fn handle_rename_project_entry(
4299 this: ModelHandle<Self>,
4300 envelope: TypedEnvelope<proto::RenameProjectEntry>,
4301 _: Arc<Client>,
4302 mut cx: AsyncAppContext,
4303 ) -> Result<proto::ProjectEntryResponse> {
4304 let entry_id = ProjectEntryId::from_proto(envelope.payload.entry_id);
4305 let worktree = this.read_with(&cx, |this, cx| {
4306 this.worktree_for_entry(entry_id, cx)
4307 .ok_or_else(|| anyhow!("worktree not found"))
4308 })?;
4309 let worktree_scan_id = worktree.read_with(&cx, |worktree, _| worktree.scan_id());
4310 let entry = worktree
4311 .update(&mut cx, |worktree, cx| {
4312 let new_path = PathBuf::from(OsString::from_vec(envelope.payload.new_path));
4313 worktree
4314 .as_local_mut()
4315 .unwrap()
4316 .rename_entry(entry_id, new_path, cx)
4317 .ok_or_else(|| anyhow!("invalid entry"))
4318 })?
4319 .await?;
4320 Ok(proto::ProjectEntryResponse {
4321 entry: Some((&entry).into()),
4322 worktree_scan_id: worktree_scan_id as u64,
4323 })
4324 }
4325
4326 async fn handle_copy_project_entry(
4327 this: ModelHandle<Self>,
4328 envelope: TypedEnvelope<proto::CopyProjectEntry>,
4329 _: Arc<Client>,
4330 mut cx: AsyncAppContext,
4331 ) -> Result<proto::ProjectEntryResponse> {
4332 let entry_id = ProjectEntryId::from_proto(envelope.payload.entry_id);
4333 let worktree = this.read_with(&cx, |this, cx| {
4334 this.worktree_for_entry(entry_id, cx)
4335 .ok_or_else(|| anyhow!("worktree not found"))
4336 })?;
4337 let worktree_scan_id = worktree.read_with(&cx, |worktree, _| worktree.scan_id());
4338 let entry = worktree
4339 .update(&mut cx, |worktree, cx| {
4340 let new_path = PathBuf::from(OsString::from_vec(envelope.payload.new_path));
4341 worktree
4342 .as_local_mut()
4343 .unwrap()
4344 .copy_entry(entry_id, new_path, cx)
4345 .ok_or_else(|| anyhow!("invalid entry"))
4346 })?
4347 .await?;
4348 Ok(proto::ProjectEntryResponse {
4349 entry: Some((&entry).into()),
4350 worktree_scan_id: worktree_scan_id as u64,
4351 })
4352 }
4353
4354 async fn handle_delete_project_entry(
4355 this: ModelHandle<Self>,
4356 envelope: TypedEnvelope<proto::DeleteProjectEntry>,
4357 _: Arc<Client>,
4358 mut cx: AsyncAppContext,
4359 ) -> Result<proto::ProjectEntryResponse> {
4360 let entry_id = ProjectEntryId::from_proto(envelope.payload.entry_id);
4361 let worktree = this.read_with(&cx, |this, cx| {
4362 this.worktree_for_entry(entry_id, cx)
4363 .ok_or_else(|| anyhow!("worktree not found"))
4364 })?;
4365 let worktree_scan_id = worktree.read_with(&cx, |worktree, _| worktree.scan_id());
4366 worktree
4367 .update(&mut cx, |worktree, cx| {
4368 worktree
4369 .as_local_mut()
4370 .unwrap()
4371 .delete_entry(entry_id, cx)
4372 .ok_or_else(|| anyhow!("invalid entry"))
4373 })?
4374 .await?;
4375 Ok(proto::ProjectEntryResponse {
4376 entry: None,
4377 worktree_scan_id: worktree_scan_id as u64,
4378 })
4379 }
4380
4381 async fn handle_update_diagnostic_summary(
4382 this: ModelHandle<Self>,
4383 envelope: TypedEnvelope<proto::UpdateDiagnosticSummary>,
4384 _: Arc<Client>,
4385 mut cx: AsyncAppContext,
4386 ) -> Result<()> {
4387 this.update(&mut cx, |this, cx| {
4388 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
4389 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
4390 if let Some(summary) = envelope.payload.summary {
4391 let project_path = ProjectPath {
4392 worktree_id,
4393 path: Path::new(&summary.path).into(),
4394 };
4395 worktree.update(cx, |worktree, _| {
4396 worktree
4397 .as_remote_mut()
4398 .unwrap()
4399 .update_diagnostic_summary(project_path.path.clone(), &summary);
4400 });
4401 cx.emit(Event::DiagnosticsUpdated(project_path));
4402 }
4403 }
4404 Ok(())
4405 })
4406 }
4407
4408 async fn handle_start_language_server(
4409 this: ModelHandle<Self>,
4410 envelope: TypedEnvelope<proto::StartLanguageServer>,
4411 _: Arc<Client>,
4412 mut cx: AsyncAppContext,
4413 ) -> Result<()> {
4414 let server = envelope
4415 .payload
4416 .server
4417 .ok_or_else(|| anyhow!("invalid server"))?;
4418 this.update(&mut cx, |this, cx| {
4419 this.language_server_statuses.insert(
4420 server.id as usize,
4421 LanguageServerStatus {
4422 name: server.name,
4423 pending_work: Default::default(),
4424 pending_diagnostic_updates: 0,
4425 },
4426 );
4427 cx.notify();
4428 });
4429 Ok(())
4430 }
4431
4432 async fn handle_update_language_server(
4433 this: ModelHandle<Self>,
4434 envelope: TypedEnvelope<proto::UpdateLanguageServer>,
4435 _: Arc<Client>,
4436 mut cx: AsyncAppContext,
4437 ) -> Result<()> {
4438 let language_server_id = envelope.payload.language_server_id as usize;
4439 match envelope
4440 .payload
4441 .variant
4442 .ok_or_else(|| anyhow!("invalid variant"))?
4443 {
4444 proto::update_language_server::Variant::WorkStart(payload) => {
4445 this.update(&mut cx, |this, cx| {
4446 this.on_lsp_work_start(language_server_id, payload.token, cx);
4447 })
4448 }
4449 proto::update_language_server::Variant::WorkProgress(payload) => {
4450 this.update(&mut cx, |this, cx| {
4451 this.on_lsp_work_progress(
4452 language_server_id,
4453 payload.token,
4454 LanguageServerProgress {
4455 message: payload.message,
4456 percentage: payload.percentage.map(|p| p as usize),
4457 last_update_at: Instant::now(),
4458 },
4459 cx,
4460 );
4461 })
4462 }
4463 proto::update_language_server::Variant::WorkEnd(payload) => {
4464 this.update(&mut cx, |this, cx| {
4465 this.on_lsp_work_end(language_server_id, payload.token, cx);
4466 })
4467 }
4468 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(_) => {
4469 this.update(&mut cx, |this, cx| {
4470 this.disk_based_diagnostics_started(cx);
4471 })
4472 }
4473 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(_) => {
4474 this.update(&mut cx, |this, cx| this.disk_based_diagnostics_finished(cx));
4475 }
4476 }
4477
4478 Ok(())
4479 }
4480
4481 async fn handle_update_buffer(
4482 this: ModelHandle<Self>,
4483 envelope: TypedEnvelope<proto::UpdateBuffer>,
4484 _: Arc<Client>,
4485 mut cx: AsyncAppContext,
4486 ) -> Result<()> {
4487 this.update(&mut cx, |this, cx| {
4488 let payload = envelope.payload.clone();
4489 let buffer_id = payload.buffer_id;
4490 let ops = payload
4491 .operations
4492 .into_iter()
4493 .map(|op| language::proto::deserialize_operation(op))
4494 .collect::<Result<Vec<_>, _>>()?;
4495 let is_remote = this.is_remote();
4496 match this.opened_buffers.entry(buffer_id) {
4497 hash_map::Entry::Occupied(mut e) => match e.get_mut() {
4498 OpenBuffer::Strong(buffer) => {
4499 buffer.update(cx, |buffer, cx| buffer.apply_ops(ops, cx))?;
4500 }
4501 OpenBuffer::Loading(operations) => operations.extend_from_slice(&ops),
4502 OpenBuffer::Weak(_) => {}
4503 },
4504 hash_map::Entry::Vacant(e) => {
4505 assert!(
4506 is_remote,
4507 "received buffer update from {:?}",
4508 envelope.original_sender_id
4509 );
4510 e.insert(OpenBuffer::Loading(ops));
4511 }
4512 }
4513 Ok(())
4514 })
4515 }
4516
4517 async fn handle_update_buffer_file(
4518 this: ModelHandle<Self>,
4519 envelope: TypedEnvelope<proto::UpdateBufferFile>,
4520 _: Arc<Client>,
4521 mut cx: AsyncAppContext,
4522 ) -> Result<()> {
4523 this.update(&mut cx, |this, cx| {
4524 let payload = envelope.payload.clone();
4525 let buffer_id = payload.buffer_id;
4526 let file = payload.file.ok_or_else(|| anyhow!("invalid file"))?;
4527 let worktree = this
4528 .worktree_for_id(WorktreeId::from_proto(file.worktree_id), cx)
4529 .ok_or_else(|| anyhow!("no such worktree"))?;
4530 let file = File::from_proto(file, worktree.clone(), cx)?;
4531 let buffer = this
4532 .opened_buffers
4533 .get_mut(&buffer_id)
4534 .and_then(|b| b.upgrade(cx))
4535 .ok_or_else(|| anyhow!("no such buffer"))?;
4536 buffer.update(cx, |buffer, cx| {
4537 buffer.file_updated(Box::new(file), cx).detach();
4538 });
4539 Ok(())
4540 })
4541 }
4542
4543 async fn handle_save_buffer(
4544 this: ModelHandle<Self>,
4545 envelope: TypedEnvelope<proto::SaveBuffer>,
4546 _: Arc<Client>,
4547 mut cx: AsyncAppContext,
4548 ) -> Result<proto::BufferSaved> {
4549 let buffer_id = envelope.payload.buffer_id;
4550 let requested_version = deserialize_version(envelope.payload.version);
4551
4552 let (project_id, buffer) = this.update(&mut cx, |this, cx| {
4553 let project_id = this.remote_id().ok_or_else(|| anyhow!("not connected"))?;
4554 let buffer = this
4555 .opened_buffers
4556 .get(&buffer_id)
4557 .and_then(|buffer| buffer.upgrade(cx))
4558 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?;
4559 Ok::<_, anyhow::Error>((project_id, buffer))
4560 })?;
4561 buffer
4562 .update(&mut cx, |buffer, _| {
4563 buffer.wait_for_version(requested_version)
4564 })
4565 .await;
4566
4567 let (saved_version, mtime) = buffer.update(&mut cx, |buffer, cx| buffer.save(cx)).await?;
4568 Ok(proto::BufferSaved {
4569 project_id,
4570 buffer_id,
4571 version: serialize_version(&saved_version),
4572 mtime: Some(mtime.into()),
4573 })
4574 }
4575
4576 async fn handle_reload_buffers(
4577 this: ModelHandle<Self>,
4578 envelope: TypedEnvelope<proto::ReloadBuffers>,
4579 _: Arc<Client>,
4580 mut cx: AsyncAppContext,
4581 ) -> Result<proto::ReloadBuffersResponse> {
4582 let sender_id = envelope.original_sender_id()?;
4583 let reload = this.update(&mut cx, |this, cx| {
4584 let mut buffers = HashSet::default();
4585 for buffer_id in &envelope.payload.buffer_ids {
4586 buffers.insert(
4587 this.opened_buffers
4588 .get(buffer_id)
4589 .and_then(|buffer| buffer.upgrade(cx))
4590 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?,
4591 );
4592 }
4593 Ok::<_, anyhow::Error>(this.reload_buffers(buffers, false, cx))
4594 })?;
4595
4596 let project_transaction = reload.await?;
4597 let project_transaction = this.update(&mut cx, |this, cx| {
4598 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
4599 });
4600 Ok(proto::ReloadBuffersResponse {
4601 transaction: Some(project_transaction),
4602 })
4603 }
4604
4605 async fn handle_format_buffers(
4606 this: ModelHandle<Self>,
4607 envelope: TypedEnvelope<proto::FormatBuffers>,
4608 _: Arc<Client>,
4609 mut cx: AsyncAppContext,
4610 ) -> Result<proto::FormatBuffersResponse> {
4611 let sender_id = envelope.original_sender_id()?;
4612 let format = this.update(&mut cx, |this, cx| {
4613 let mut buffers = HashSet::default();
4614 for buffer_id in &envelope.payload.buffer_ids {
4615 buffers.insert(
4616 this.opened_buffers
4617 .get(buffer_id)
4618 .and_then(|buffer| buffer.upgrade(cx))
4619 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?,
4620 );
4621 }
4622 Ok::<_, anyhow::Error>(this.format(buffers, false, cx))
4623 })?;
4624
4625 let project_transaction = format.await?;
4626 let project_transaction = this.update(&mut cx, |this, cx| {
4627 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
4628 });
4629 Ok(proto::FormatBuffersResponse {
4630 transaction: Some(project_transaction),
4631 })
4632 }
4633
4634 async fn handle_get_completions(
4635 this: ModelHandle<Self>,
4636 envelope: TypedEnvelope<proto::GetCompletions>,
4637 _: Arc<Client>,
4638 mut cx: AsyncAppContext,
4639 ) -> Result<proto::GetCompletionsResponse> {
4640 let position = envelope
4641 .payload
4642 .position
4643 .and_then(language::proto::deserialize_anchor)
4644 .ok_or_else(|| anyhow!("invalid position"))?;
4645 let version = deserialize_version(envelope.payload.version);
4646 let buffer = this.read_with(&cx, |this, cx| {
4647 this.opened_buffers
4648 .get(&envelope.payload.buffer_id)
4649 .and_then(|buffer| buffer.upgrade(cx))
4650 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
4651 })?;
4652 buffer
4653 .update(&mut cx, |buffer, _| buffer.wait_for_version(version))
4654 .await;
4655 let version = buffer.read_with(&cx, |buffer, _| buffer.version());
4656 let completions = this
4657 .update(&mut cx, |this, cx| this.completions(&buffer, position, cx))
4658 .await?;
4659
4660 Ok(proto::GetCompletionsResponse {
4661 completions: completions
4662 .iter()
4663 .map(language::proto::serialize_completion)
4664 .collect(),
4665 version: serialize_version(&version),
4666 })
4667 }
4668
4669 async fn handle_apply_additional_edits_for_completion(
4670 this: ModelHandle<Self>,
4671 envelope: TypedEnvelope<proto::ApplyCompletionAdditionalEdits>,
4672 _: Arc<Client>,
4673 mut cx: AsyncAppContext,
4674 ) -> Result<proto::ApplyCompletionAdditionalEditsResponse> {
4675 let apply_additional_edits = this.update(&mut cx, |this, cx| {
4676 let buffer = this
4677 .opened_buffers
4678 .get(&envelope.payload.buffer_id)
4679 .and_then(|buffer| buffer.upgrade(cx))
4680 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
4681 let language = buffer.read(cx).language();
4682 let completion = language::proto::deserialize_completion(
4683 envelope
4684 .payload
4685 .completion
4686 .ok_or_else(|| anyhow!("invalid completion"))?,
4687 language,
4688 )?;
4689 Ok::<_, anyhow::Error>(
4690 this.apply_additional_edits_for_completion(buffer, completion, false, cx),
4691 )
4692 })?;
4693
4694 Ok(proto::ApplyCompletionAdditionalEditsResponse {
4695 transaction: apply_additional_edits
4696 .await?
4697 .as_ref()
4698 .map(language::proto::serialize_transaction),
4699 })
4700 }
4701
4702 async fn handle_get_code_actions(
4703 this: ModelHandle<Self>,
4704 envelope: TypedEnvelope<proto::GetCodeActions>,
4705 _: Arc<Client>,
4706 mut cx: AsyncAppContext,
4707 ) -> Result<proto::GetCodeActionsResponse> {
4708 let start = envelope
4709 .payload
4710 .start
4711 .and_then(language::proto::deserialize_anchor)
4712 .ok_or_else(|| anyhow!("invalid start"))?;
4713 let end = envelope
4714 .payload
4715 .end
4716 .and_then(language::proto::deserialize_anchor)
4717 .ok_or_else(|| anyhow!("invalid end"))?;
4718 let buffer = this.update(&mut cx, |this, cx| {
4719 this.opened_buffers
4720 .get(&envelope.payload.buffer_id)
4721 .and_then(|buffer| buffer.upgrade(cx))
4722 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
4723 })?;
4724 buffer
4725 .update(&mut cx, |buffer, _| {
4726 buffer.wait_for_version(deserialize_version(envelope.payload.version))
4727 })
4728 .await;
4729
4730 let version = buffer.read_with(&cx, |buffer, _| buffer.version());
4731 let code_actions = this.update(&mut cx, |this, cx| {
4732 Ok::<_, anyhow::Error>(this.code_actions(&buffer, start..end, cx))
4733 })?;
4734
4735 Ok(proto::GetCodeActionsResponse {
4736 actions: code_actions
4737 .await?
4738 .iter()
4739 .map(language::proto::serialize_code_action)
4740 .collect(),
4741 version: serialize_version(&version),
4742 })
4743 }
4744
4745 async fn handle_apply_code_action(
4746 this: ModelHandle<Self>,
4747 envelope: TypedEnvelope<proto::ApplyCodeAction>,
4748 _: Arc<Client>,
4749 mut cx: AsyncAppContext,
4750 ) -> Result<proto::ApplyCodeActionResponse> {
4751 let sender_id = envelope.original_sender_id()?;
4752 let action = language::proto::deserialize_code_action(
4753 envelope
4754 .payload
4755 .action
4756 .ok_or_else(|| anyhow!("invalid action"))?,
4757 )?;
4758 let apply_code_action = this.update(&mut cx, |this, cx| {
4759 let buffer = this
4760 .opened_buffers
4761 .get(&envelope.payload.buffer_id)
4762 .and_then(|buffer| buffer.upgrade(cx))
4763 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
4764 Ok::<_, anyhow::Error>(this.apply_code_action(buffer, action, false, cx))
4765 })?;
4766
4767 let project_transaction = apply_code_action.await?;
4768 let project_transaction = this.update(&mut cx, |this, cx| {
4769 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
4770 });
4771 Ok(proto::ApplyCodeActionResponse {
4772 transaction: Some(project_transaction),
4773 })
4774 }
4775
4776 async fn handle_lsp_command<T: LspCommand>(
4777 this: ModelHandle<Self>,
4778 envelope: TypedEnvelope<T::ProtoRequest>,
4779 _: Arc<Client>,
4780 mut cx: AsyncAppContext,
4781 ) -> Result<<T::ProtoRequest as proto::RequestMessage>::Response>
4782 where
4783 <T::LspRequest as lsp::request::Request>::Result: Send,
4784 {
4785 let sender_id = envelope.original_sender_id()?;
4786 let buffer_id = T::buffer_id_from_proto(&envelope.payload);
4787 let buffer_handle = this.read_with(&cx, |this, _| {
4788 this.opened_buffers
4789 .get(&buffer_id)
4790 .and_then(|buffer| buffer.upgrade(&cx))
4791 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))
4792 })?;
4793 let request = T::from_proto(
4794 envelope.payload,
4795 this.clone(),
4796 buffer_handle.clone(),
4797 cx.clone(),
4798 )
4799 .await?;
4800 let buffer_version = buffer_handle.read_with(&cx, |buffer, _| buffer.version());
4801 let response = this
4802 .update(&mut cx, |this, cx| {
4803 this.request_lsp(buffer_handle, request, cx)
4804 })
4805 .await?;
4806 this.update(&mut cx, |this, cx| {
4807 Ok(T::response_to_proto(
4808 response,
4809 this,
4810 sender_id,
4811 &buffer_version,
4812 cx,
4813 ))
4814 })
4815 }
4816
4817 async fn handle_get_project_symbols(
4818 this: ModelHandle<Self>,
4819 envelope: TypedEnvelope<proto::GetProjectSymbols>,
4820 _: Arc<Client>,
4821 mut cx: AsyncAppContext,
4822 ) -> Result<proto::GetProjectSymbolsResponse> {
4823 let symbols = this
4824 .update(&mut cx, |this, cx| {
4825 this.symbols(&envelope.payload.query, cx)
4826 })
4827 .await?;
4828
4829 Ok(proto::GetProjectSymbolsResponse {
4830 symbols: symbols.iter().map(serialize_symbol).collect(),
4831 })
4832 }
4833
4834 async fn handle_search_project(
4835 this: ModelHandle<Self>,
4836 envelope: TypedEnvelope<proto::SearchProject>,
4837 _: Arc<Client>,
4838 mut cx: AsyncAppContext,
4839 ) -> Result<proto::SearchProjectResponse> {
4840 let peer_id = envelope.original_sender_id()?;
4841 let query = SearchQuery::from_proto(envelope.payload)?;
4842 let result = this
4843 .update(&mut cx, |this, cx| this.search(query, cx))
4844 .await?;
4845
4846 this.update(&mut cx, |this, cx| {
4847 let mut locations = Vec::new();
4848 for (buffer, ranges) in result {
4849 for range in ranges {
4850 let start = serialize_anchor(&range.start);
4851 let end = serialize_anchor(&range.end);
4852 let buffer = this.serialize_buffer_for_peer(&buffer, peer_id, cx);
4853 locations.push(proto::Location {
4854 buffer: Some(buffer),
4855 start: Some(start),
4856 end: Some(end),
4857 });
4858 }
4859 }
4860 Ok(proto::SearchProjectResponse { locations })
4861 })
4862 }
4863
4864 async fn handle_open_buffer_for_symbol(
4865 this: ModelHandle<Self>,
4866 envelope: TypedEnvelope<proto::OpenBufferForSymbol>,
4867 _: Arc<Client>,
4868 mut cx: AsyncAppContext,
4869 ) -> Result<proto::OpenBufferForSymbolResponse> {
4870 let peer_id = envelope.original_sender_id()?;
4871 let symbol = envelope
4872 .payload
4873 .symbol
4874 .ok_or_else(|| anyhow!("invalid symbol"))?;
4875 let symbol = this.read_with(&cx, |this, _| {
4876 let symbol = this.deserialize_symbol(symbol)?;
4877 let signature = this.symbol_signature(symbol.worktree_id, &symbol.path);
4878 if signature == symbol.signature {
4879 Ok(symbol)
4880 } else {
4881 Err(anyhow!("invalid symbol signature"))
4882 }
4883 })?;
4884 let buffer = this
4885 .update(&mut cx, |this, cx| this.open_buffer_for_symbol(&symbol, cx))
4886 .await?;
4887
4888 Ok(proto::OpenBufferForSymbolResponse {
4889 buffer: Some(this.update(&mut cx, |this, cx| {
4890 this.serialize_buffer_for_peer(&buffer, peer_id, cx)
4891 })),
4892 })
4893 }
4894
4895 fn symbol_signature(&self, worktree_id: WorktreeId, path: &Path) -> [u8; 32] {
4896 let mut hasher = Sha256::new();
4897 hasher.update(worktree_id.to_proto().to_be_bytes());
4898 hasher.update(path.to_string_lossy().as_bytes());
4899 hasher.update(self.nonce.to_be_bytes());
4900 hasher.finalize().as_slice().try_into().unwrap()
4901 }
4902
4903 async fn handle_open_buffer_by_id(
4904 this: ModelHandle<Self>,
4905 envelope: TypedEnvelope<proto::OpenBufferById>,
4906 _: Arc<Client>,
4907 mut cx: AsyncAppContext,
4908 ) -> Result<proto::OpenBufferResponse> {
4909 let peer_id = envelope.original_sender_id()?;
4910 let buffer = this
4911 .update(&mut cx, |this, cx| {
4912 this.open_buffer_by_id(envelope.payload.id, cx)
4913 })
4914 .await?;
4915 this.update(&mut cx, |this, cx| {
4916 Ok(proto::OpenBufferResponse {
4917 buffer: Some(this.serialize_buffer_for_peer(&buffer, peer_id, cx)),
4918 })
4919 })
4920 }
4921
4922 async fn handle_open_buffer_by_path(
4923 this: ModelHandle<Self>,
4924 envelope: TypedEnvelope<proto::OpenBufferByPath>,
4925 _: Arc<Client>,
4926 mut cx: AsyncAppContext,
4927 ) -> Result<proto::OpenBufferResponse> {
4928 let peer_id = envelope.original_sender_id()?;
4929 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
4930 let open_buffer = this.update(&mut cx, |this, cx| {
4931 this.open_buffer(
4932 ProjectPath {
4933 worktree_id,
4934 path: PathBuf::from(envelope.payload.path).into(),
4935 },
4936 cx,
4937 )
4938 });
4939
4940 let buffer = open_buffer.await?;
4941 this.update(&mut cx, |this, cx| {
4942 Ok(proto::OpenBufferResponse {
4943 buffer: Some(this.serialize_buffer_for_peer(&buffer, peer_id, cx)),
4944 })
4945 })
4946 }
4947
4948 fn serialize_project_transaction_for_peer(
4949 &mut self,
4950 project_transaction: ProjectTransaction,
4951 peer_id: PeerId,
4952 cx: &AppContext,
4953 ) -> proto::ProjectTransaction {
4954 let mut serialized_transaction = proto::ProjectTransaction {
4955 buffers: Default::default(),
4956 transactions: Default::default(),
4957 };
4958 for (buffer, transaction) in project_transaction.0 {
4959 serialized_transaction
4960 .buffers
4961 .push(self.serialize_buffer_for_peer(&buffer, peer_id, cx));
4962 serialized_transaction
4963 .transactions
4964 .push(language::proto::serialize_transaction(&transaction));
4965 }
4966 serialized_transaction
4967 }
4968
4969 fn deserialize_project_transaction(
4970 &mut self,
4971 message: proto::ProjectTransaction,
4972 push_to_history: bool,
4973 cx: &mut ModelContext<Self>,
4974 ) -> Task<Result<ProjectTransaction>> {
4975 cx.spawn(|this, mut cx| async move {
4976 let mut project_transaction = ProjectTransaction::default();
4977 for (buffer, transaction) in message.buffers.into_iter().zip(message.transactions) {
4978 let buffer = this
4979 .update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
4980 .await?;
4981 let transaction = language::proto::deserialize_transaction(transaction)?;
4982 project_transaction.0.insert(buffer, transaction);
4983 }
4984
4985 for (buffer, transaction) in &project_transaction.0 {
4986 buffer
4987 .update(&mut cx, |buffer, _| {
4988 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
4989 })
4990 .await;
4991
4992 if push_to_history {
4993 buffer.update(&mut cx, |buffer, _| {
4994 buffer.push_transaction(transaction.clone(), Instant::now());
4995 });
4996 }
4997 }
4998
4999 Ok(project_transaction)
5000 })
5001 }
5002
5003 fn serialize_buffer_for_peer(
5004 &mut self,
5005 buffer: &ModelHandle<Buffer>,
5006 peer_id: PeerId,
5007 cx: &AppContext,
5008 ) -> proto::Buffer {
5009 let buffer_id = buffer.read(cx).remote_id();
5010 let shared_buffers = self.shared_buffers.entry(peer_id).or_default();
5011 if shared_buffers.insert(buffer_id) {
5012 proto::Buffer {
5013 variant: Some(proto::buffer::Variant::State(buffer.read(cx).to_proto())),
5014 }
5015 } else {
5016 proto::Buffer {
5017 variant: Some(proto::buffer::Variant::Id(buffer_id)),
5018 }
5019 }
5020 }
5021
5022 fn deserialize_buffer(
5023 &mut self,
5024 buffer: proto::Buffer,
5025 cx: &mut ModelContext<Self>,
5026 ) -> Task<Result<ModelHandle<Buffer>>> {
5027 let replica_id = self.replica_id();
5028
5029 let opened_buffer_tx = self.opened_buffer.0.clone();
5030 let mut opened_buffer_rx = self.opened_buffer.1.clone();
5031 cx.spawn(|this, mut cx| async move {
5032 match buffer.variant.ok_or_else(|| anyhow!("missing buffer"))? {
5033 proto::buffer::Variant::Id(id) => {
5034 let buffer = loop {
5035 let buffer = this.read_with(&cx, |this, cx| {
5036 this.opened_buffers
5037 .get(&id)
5038 .and_then(|buffer| buffer.upgrade(cx))
5039 });
5040 if let Some(buffer) = buffer {
5041 break buffer;
5042 }
5043 opened_buffer_rx
5044 .next()
5045 .await
5046 .ok_or_else(|| anyhow!("project dropped while waiting for buffer"))?;
5047 };
5048 Ok(buffer)
5049 }
5050 proto::buffer::Variant::State(mut buffer) => {
5051 let mut buffer_worktree = None;
5052 let mut buffer_file = None;
5053 if let Some(file) = buffer.file.take() {
5054 this.read_with(&cx, |this, cx| {
5055 let worktree_id = WorktreeId::from_proto(file.worktree_id);
5056 let worktree =
5057 this.worktree_for_id(worktree_id, cx).ok_or_else(|| {
5058 anyhow!("no worktree found for id {}", file.worktree_id)
5059 })?;
5060 buffer_file =
5061 Some(Box::new(File::from_proto(file, worktree.clone(), cx)?)
5062 as Box<dyn language::File>);
5063 buffer_worktree = Some(worktree);
5064 Ok::<_, anyhow::Error>(())
5065 })?;
5066 }
5067
5068 let buffer = cx.add_model(|cx| {
5069 Buffer::from_proto(replica_id, buffer, buffer_file, cx).unwrap()
5070 });
5071
5072 this.update(&mut cx, |this, cx| this.register_buffer(&buffer, cx))?;
5073
5074 *opened_buffer_tx.borrow_mut().borrow_mut() = ();
5075 Ok(buffer)
5076 }
5077 }
5078 })
5079 }
5080
5081 fn deserialize_symbol(&self, serialized_symbol: proto::Symbol) -> Result<Symbol> {
5082 let source_worktree_id = WorktreeId::from_proto(serialized_symbol.source_worktree_id);
5083 let worktree_id = WorktreeId::from_proto(serialized_symbol.worktree_id);
5084 let start = serialized_symbol
5085 .start
5086 .ok_or_else(|| anyhow!("invalid start"))?;
5087 let end = serialized_symbol
5088 .end
5089 .ok_or_else(|| anyhow!("invalid end"))?;
5090 let kind = unsafe { mem::transmute(serialized_symbol.kind) };
5091 let path = PathBuf::from(serialized_symbol.path);
5092 let language = self.languages.select_language(&path);
5093 Ok(Symbol {
5094 source_worktree_id,
5095 worktree_id,
5096 language_server_name: LanguageServerName(serialized_symbol.language_server_name.into()),
5097 label: language
5098 .and_then(|language| language.label_for_symbol(&serialized_symbol.name, kind))
5099 .unwrap_or_else(|| CodeLabel::plain(serialized_symbol.name.clone(), None)),
5100 name: serialized_symbol.name,
5101 path,
5102 range: PointUtf16::new(start.row, start.column)..PointUtf16::new(end.row, end.column),
5103 kind,
5104 signature: serialized_symbol
5105 .signature
5106 .try_into()
5107 .map_err(|_| anyhow!("invalid signature"))?,
5108 })
5109 }
5110
5111 async fn handle_buffer_saved(
5112 this: ModelHandle<Self>,
5113 envelope: TypedEnvelope<proto::BufferSaved>,
5114 _: Arc<Client>,
5115 mut cx: AsyncAppContext,
5116 ) -> Result<()> {
5117 let version = deserialize_version(envelope.payload.version);
5118 let mtime = envelope
5119 .payload
5120 .mtime
5121 .ok_or_else(|| anyhow!("missing mtime"))?
5122 .into();
5123
5124 this.update(&mut cx, |this, cx| {
5125 let buffer = this
5126 .opened_buffers
5127 .get(&envelope.payload.buffer_id)
5128 .and_then(|buffer| buffer.upgrade(cx));
5129 if let Some(buffer) = buffer {
5130 buffer.update(cx, |buffer, cx| {
5131 buffer.did_save(version, mtime, None, cx);
5132 });
5133 }
5134 Ok(())
5135 })
5136 }
5137
5138 async fn handle_buffer_reloaded(
5139 this: ModelHandle<Self>,
5140 envelope: TypedEnvelope<proto::BufferReloaded>,
5141 _: Arc<Client>,
5142 mut cx: AsyncAppContext,
5143 ) -> Result<()> {
5144 let payload = envelope.payload.clone();
5145 let version = deserialize_version(payload.version);
5146 let mtime = payload
5147 .mtime
5148 .ok_or_else(|| anyhow!("missing mtime"))?
5149 .into();
5150 this.update(&mut cx, |this, cx| {
5151 let buffer = this
5152 .opened_buffers
5153 .get(&payload.buffer_id)
5154 .and_then(|buffer| buffer.upgrade(cx));
5155 if let Some(buffer) = buffer {
5156 buffer.update(cx, |buffer, cx| {
5157 buffer.did_reload(version, mtime, cx);
5158 });
5159 }
5160 Ok(())
5161 })
5162 }
5163
5164 pub fn match_paths<'a>(
5165 &self,
5166 query: &'a str,
5167 include_ignored: bool,
5168 smart_case: bool,
5169 max_results: usize,
5170 cancel_flag: &'a AtomicBool,
5171 cx: &AppContext,
5172 ) -> impl 'a + Future<Output = Vec<PathMatch>> {
5173 let worktrees = self
5174 .worktrees(cx)
5175 .filter(|worktree| worktree.read(cx).is_visible())
5176 .collect::<Vec<_>>();
5177 let include_root_name = worktrees.len() > 1;
5178 let candidate_sets = worktrees
5179 .into_iter()
5180 .map(|worktree| CandidateSet {
5181 snapshot: worktree.read(cx).snapshot(),
5182 include_ignored,
5183 include_root_name,
5184 })
5185 .collect::<Vec<_>>();
5186
5187 let background = cx.background().clone();
5188 async move {
5189 fuzzy::match_paths(
5190 candidate_sets.as_slice(),
5191 query,
5192 smart_case,
5193 max_results,
5194 cancel_flag,
5195 background,
5196 )
5197 .await
5198 }
5199 }
5200
5201 fn edits_from_lsp(
5202 &mut self,
5203 buffer: &ModelHandle<Buffer>,
5204 lsp_edits: impl 'static + Send + IntoIterator<Item = lsp::TextEdit>,
5205 version: Option<i32>,
5206 cx: &mut ModelContext<Self>,
5207 ) -> Task<Result<Vec<(Range<Anchor>, String)>>> {
5208 let snapshot = self.buffer_snapshot_for_lsp_version(buffer, version, cx);
5209 cx.background().spawn(async move {
5210 let snapshot = snapshot?;
5211 let mut lsp_edits = lsp_edits
5212 .into_iter()
5213 .map(|edit| (range_from_lsp(edit.range), edit.new_text))
5214 .collect::<Vec<_>>();
5215 lsp_edits.sort_by_key(|(range, _)| range.start);
5216
5217 let mut lsp_edits = lsp_edits.into_iter().peekable();
5218 let mut edits = Vec::new();
5219 while let Some((mut range, mut new_text)) = lsp_edits.next() {
5220 // Combine any LSP edits that are adjacent.
5221 //
5222 // Also, combine LSP edits that are separated from each other by only
5223 // a newline. This is important because for some code actions,
5224 // Rust-analyzer rewrites the entire buffer via a series of edits that
5225 // are separated by unchanged newline characters.
5226 //
5227 // In order for the diffing logic below to work properly, any edits that
5228 // cancel each other out must be combined into one.
5229 while let Some((next_range, next_text)) = lsp_edits.peek() {
5230 if next_range.start > range.end {
5231 if next_range.start.row > range.end.row + 1
5232 || next_range.start.column > 0
5233 || snapshot.clip_point_utf16(
5234 PointUtf16::new(range.end.row, u32::MAX),
5235 Bias::Left,
5236 ) > range.end
5237 {
5238 break;
5239 }
5240 new_text.push('\n');
5241 }
5242 range.end = next_range.end;
5243 new_text.push_str(&next_text);
5244 lsp_edits.next();
5245 }
5246
5247 if snapshot.clip_point_utf16(range.start, Bias::Left) != range.start
5248 || snapshot.clip_point_utf16(range.end, Bias::Left) != range.end
5249 {
5250 return Err(anyhow!("invalid edits received from language server"));
5251 }
5252
5253 // For multiline edits, perform a diff of the old and new text so that
5254 // we can identify the changes more precisely, preserving the locations
5255 // of any anchors positioned in the unchanged regions.
5256 if range.end.row > range.start.row {
5257 let mut offset = range.start.to_offset(&snapshot);
5258 let old_text = snapshot.text_for_range(range).collect::<String>();
5259
5260 let diff = TextDiff::from_lines(old_text.as_str(), &new_text);
5261 let mut moved_since_edit = true;
5262 for change in diff.iter_all_changes() {
5263 let tag = change.tag();
5264 let value = change.value();
5265 match tag {
5266 ChangeTag::Equal => {
5267 offset += value.len();
5268 moved_since_edit = true;
5269 }
5270 ChangeTag::Delete => {
5271 let start = snapshot.anchor_after(offset);
5272 let end = snapshot.anchor_before(offset + value.len());
5273 if moved_since_edit {
5274 edits.push((start..end, String::new()));
5275 } else {
5276 edits.last_mut().unwrap().0.end = end;
5277 }
5278 offset += value.len();
5279 moved_since_edit = false;
5280 }
5281 ChangeTag::Insert => {
5282 if moved_since_edit {
5283 let anchor = snapshot.anchor_after(offset);
5284 edits.push((anchor.clone()..anchor, value.to_string()));
5285 } else {
5286 edits.last_mut().unwrap().1.push_str(value);
5287 }
5288 moved_since_edit = false;
5289 }
5290 }
5291 }
5292 } else if range.end == range.start {
5293 let anchor = snapshot.anchor_after(range.start);
5294 edits.push((anchor.clone()..anchor, new_text));
5295 } else {
5296 let edit_start = snapshot.anchor_after(range.start);
5297 let edit_end = snapshot.anchor_before(range.end);
5298 edits.push((edit_start..edit_end, new_text));
5299 }
5300 }
5301
5302 Ok(edits)
5303 })
5304 }
5305
5306 fn buffer_snapshot_for_lsp_version(
5307 &mut self,
5308 buffer: &ModelHandle<Buffer>,
5309 version: Option<i32>,
5310 cx: &AppContext,
5311 ) -> Result<TextBufferSnapshot> {
5312 const OLD_VERSIONS_TO_RETAIN: i32 = 10;
5313
5314 if let Some(version) = version {
5315 let buffer_id = buffer.read(cx).remote_id();
5316 let snapshots = self
5317 .buffer_snapshots
5318 .get_mut(&buffer_id)
5319 .ok_or_else(|| anyhow!("no snapshot found for buffer {}", buffer_id))?;
5320 let mut found_snapshot = None;
5321 snapshots.retain(|(snapshot_version, snapshot)| {
5322 if snapshot_version + OLD_VERSIONS_TO_RETAIN < version {
5323 false
5324 } else {
5325 if *snapshot_version == version {
5326 found_snapshot = Some(snapshot.clone());
5327 }
5328 true
5329 }
5330 });
5331
5332 found_snapshot.ok_or_else(|| {
5333 anyhow!(
5334 "snapshot not found for buffer {} at version {}",
5335 buffer_id,
5336 version
5337 )
5338 })
5339 } else {
5340 Ok((buffer.read(cx)).text_snapshot())
5341 }
5342 }
5343
5344 fn language_server_for_buffer(
5345 &self,
5346 buffer: &Buffer,
5347 cx: &AppContext,
5348 ) -> Option<&(Arc<dyn LspAdapter>, Arc<LanguageServer>)> {
5349 if let Some((file, language)) = File::from_dyn(buffer.file()).zip(buffer.language()) {
5350 let worktree_id = file.worktree_id(cx);
5351 self.language_servers
5352 .get(&(worktree_id, language.lsp_adapter()?.name()))
5353 } else {
5354 None
5355 }
5356 }
5357}
5358
5359impl ProjectStore {
5360 pub fn new(db: Arc<Db>) -> Self {
5361 Self {
5362 db,
5363 projects: Default::default(),
5364 }
5365 }
5366
5367 pub fn projects<'a>(
5368 &'a self,
5369 cx: &'a AppContext,
5370 ) -> impl 'a + Iterator<Item = ModelHandle<Project>> {
5371 self.projects
5372 .iter()
5373 .filter_map(|project| project.upgrade(cx))
5374 }
5375
5376 fn add_project(&mut self, project: WeakModelHandle<Project>, cx: &mut ModelContext<Self>) {
5377 if let Err(ix) = self
5378 .projects
5379 .binary_search_by_key(&project.id(), WeakModelHandle::id)
5380 {
5381 self.projects.insert(ix, project);
5382 }
5383 cx.notify();
5384 }
5385
5386 fn prune_projects(&mut self, cx: &mut ModelContext<Self>) {
5387 let mut did_change = false;
5388 self.projects.retain(|project| {
5389 if project.is_upgradable(cx) {
5390 true
5391 } else {
5392 did_change = true;
5393 false
5394 }
5395 });
5396 if did_change {
5397 cx.notify();
5398 }
5399 }
5400}
5401
5402impl WorktreeHandle {
5403 pub fn upgrade(&self, cx: &AppContext) -> Option<ModelHandle<Worktree>> {
5404 match self {
5405 WorktreeHandle::Strong(handle) => Some(handle.clone()),
5406 WorktreeHandle::Weak(handle) => handle.upgrade(cx),
5407 }
5408 }
5409}
5410
5411impl OpenBuffer {
5412 pub fn upgrade(&self, cx: &impl UpgradeModelHandle) -> Option<ModelHandle<Buffer>> {
5413 match self {
5414 OpenBuffer::Strong(handle) => Some(handle.clone()),
5415 OpenBuffer::Weak(handle) => handle.upgrade(cx),
5416 OpenBuffer::Loading(_) => None,
5417 }
5418 }
5419}
5420
5421struct CandidateSet {
5422 snapshot: Snapshot,
5423 include_ignored: bool,
5424 include_root_name: bool,
5425}
5426
5427impl<'a> PathMatchCandidateSet<'a> for CandidateSet {
5428 type Candidates = CandidateSetIter<'a>;
5429
5430 fn id(&self) -> usize {
5431 self.snapshot.id().to_usize()
5432 }
5433
5434 fn len(&self) -> usize {
5435 if self.include_ignored {
5436 self.snapshot.file_count()
5437 } else {
5438 self.snapshot.visible_file_count()
5439 }
5440 }
5441
5442 fn prefix(&self) -> Arc<str> {
5443 if self.snapshot.root_entry().map_or(false, |e| e.is_file()) {
5444 self.snapshot.root_name().into()
5445 } else if self.include_root_name {
5446 format!("{}/", self.snapshot.root_name()).into()
5447 } else {
5448 "".into()
5449 }
5450 }
5451
5452 fn candidates(&'a self, start: usize) -> Self::Candidates {
5453 CandidateSetIter {
5454 traversal: self.snapshot.files(self.include_ignored, start),
5455 }
5456 }
5457}
5458
5459struct CandidateSetIter<'a> {
5460 traversal: Traversal<'a>,
5461}
5462
5463impl<'a> Iterator for CandidateSetIter<'a> {
5464 type Item = PathMatchCandidate<'a>;
5465
5466 fn next(&mut self) -> Option<Self::Item> {
5467 self.traversal.next().map(|entry| {
5468 if let EntryKind::File(char_bag) = entry.kind {
5469 PathMatchCandidate {
5470 path: &entry.path,
5471 char_bag,
5472 }
5473 } else {
5474 unreachable!()
5475 }
5476 })
5477 }
5478}
5479
5480impl Entity for ProjectStore {
5481 type Event = ();
5482}
5483
5484impl Entity for Project {
5485 type Event = Event;
5486
5487 fn release(&mut self, cx: &mut gpui::MutableAppContext) {
5488 self.project_store.update(cx, ProjectStore::prune_projects);
5489
5490 match &self.client_state {
5491 ProjectClientState::Local { remote_id_rx, .. } => {
5492 if let Some(project_id) = *remote_id_rx.borrow() {
5493 self.client
5494 .send(proto::UnregisterProject { project_id })
5495 .log_err();
5496 }
5497 }
5498 ProjectClientState::Remote { remote_id, .. } => {
5499 self.client
5500 .send(proto::LeaveProject {
5501 project_id: *remote_id,
5502 })
5503 .log_err();
5504 }
5505 }
5506 }
5507
5508 fn app_will_quit(
5509 &mut self,
5510 _: &mut MutableAppContext,
5511 ) -> Option<std::pin::Pin<Box<dyn 'static + Future<Output = ()>>>> {
5512 let shutdown_futures = self
5513 .language_servers
5514 .drain()
5515 .filter_map(|(_, (_, server))| server.shutdown())
5516 .collect::<Vec<_>>();
5517 Some(
5518 async move {
5519 futures::future::join_all(shutdown_futures).await;
5520 }
5521 .boxed(),
5522 )
5523 }
5524}
5525
5526impl Collaborator {
5527 fn from_proto(
5528 message: proto::Collaborator,
5529 user_store: &ModelHandle<UserStore>,
5530 cx: &mut AsyncAppContext,
5531 ) -> impl Future<Output = Result<Self>> {
5532 let user = user_store.update(cx, |user_store, cx| {
5533 user_store.fetch_user(message.user_id, cx)
5534 });
5535
5536 async move {
5537 Ok(Self {
5538 peer_id: PeerId(message.peer_id),
5539 user: user.await?,
5540 replica_id: message.replica_id as ReplicaId,
5541 })
5542 }
5543 }
5544}
5545
5546impl<P: AsRef<Path>> From<(WorktreeId, P)> for ProjectPath {
5547 fn from((worktree_id, path): (WorktreeId, P)) -> Self {
5548 Self {
5549 worktree_id,
5550 path: path.as_ref().into(),
5551 }
5552 }
5553}
5554
5555impl From<lsp::CreateFileOptions> for fs::CreateOptions {
5556 fn from(options: lsp::CreateFileOptions) -> Self {
5557 Self {
5558 overwrite: options.overwrite.unwrap_or(false),
5559 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
5560 }
5561 }
5562}
5563
5564impl From<lsp::RenameFileOptions> for fs::RenameOptions {
5565 fn from(options: lsp::RenameFileOptions) -> Self {
5566 Self {
5567 overwrite: options.overwrite.unwrap_or(false),
5568 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
5569 }
5570 }
5571}
5572
5573impl From<lsp::DeleteFileOptions> for fs::RemoveOptions {
5574 fn from(options: lsp::DeleteFileOptions) -> Self {
5575 Self {
5576 recursive: options.recursive.unwrap_or(false),
5577 ignore_if_not_exists: options.ignore_if_not_exists.unwrap_or(false),
5578 }
5579 }
5580}
5581
5582fn serialize_symbol(symbol: &Symbol) -> proto::Symbol {
5583 proto::Symbol {
5584 source_worktree_id: symbol.source_worktree_id.to_proto(),
5585 worktree_id: symbol.worktree_id.to_proto(),
5586 language_server_name: symbol.language_server_name.0.to_string(),
5587 name: symbol.name.clone(),
5588 kind: unsafe { mem::transmute(symbol.kind) },
5589 path: symbol.path.to_string_lossy().to_string(),
5590 start: Some(proto::Point {
5591 row: symbol.range.start.row,
5592 column: symbol.range.start.column,
5593 }),
5594 end: Some(proto::Point {
5595 row: symbol.range.end.row,
5596 column: symbol.range.end.column,
5597 }),
5598 signature: symbol.signature.to_vec(),
5599 }
5600}
5601
5602fn relativize_path(base: &Path, path: &Path) -> PathBuf {
5603 let mut path_components = path.components();
5604 let mut base_components = base.components();
5605 let mut components: Vec<Component> = Vec::new();
5606 loop {
5607 match (path_components.next(), base_components.next()) {
5608 (None, None) => break,
5609 (Some(a), None) => {
5610 components.push(a);
5611 components.extend(path_components.by_ref());
5612 break;
5613 }
5614 (None, _) => components.push(Component::ParentDir),
5615 (Some(a), Some(b)) if components.is_empty() && a == b => (),
5616 (Some(a), Some(b)) if b == Component::CurDir => components.push(a),
5617 (Some(a), Some(_)) => {
5618 components.push(Component::ParentDir);
5619 for _ in base_components {
5620 components.push(Component::ParentDir);
5621 }
5622 components.push(a);
5623 components.extend(path_components.by_ref());
5624 break;
5625 }
5626 }
5627 }
5628 components.iter().map(|c| c.as_os_str()).collect()
5629}
5630
5631impl Item for Buffer {
5632 fn entry_id(&self, cx: &AppContext) -> Option<ProjectEntryId> {
5633 File::from_dyn(self.file()).and_then(|file| file.project_entry_id(cx))
5634 }
5635}
5636
5637#[cfg(test)]
5638mod tests {
5639 use crate::worktree::WorktreeHandle;
5640
5641 use super::{Event, *};
5642 use fs::RealFs;
5643 use futures::{future, StreamExt};
5644 use gpui::test::subscribe;
5645 use language::{
5646 tree_sitter_rust, tree_sitter_typescript, Diagnostic, FakeLspAdapter, LanguageConfig,
5647 OffsetRangeExt, Point, ToPoint,
5648 };
5649 use lsp::Url;
5650 use serde_json::json;
5651 use std::{cell::RefCell, os::unix, path::PathBuf, rc::Rc, task::Poll};
5652 use unindent::Unindent as _;
5653 use util::{assert_set_eq, test::temp_tree};
5654
5655 #[gpui::test]
5656 async fn test_populate_and_search(cx: &mut gpui::TestAppContext) {
5657 let dir = temp_tree(json!({
5658 "root": {
5659 "apple": "",
5660 "banana": {
5661 "carrot": {
5662 "date": "",
5663 "endive": "",
5664 }
5665 },
5666 "fennel": {
5667 "grape": "",
5668 }
5669 }
5670 }));
5671
5672 let root_link_path = dir.path().join("root_link");
5673 unix::fs::symlink(&dir.path().join("root"), &root_link_path).unwrap();
5674 unix::fs::symlink(
5675 &dir.path().join("root/fennel"),
5676 &dir.path().join("root/finnochio"),
5677 )
5678 .unwrap();
5679
5680 let project = Project::test(Arc::new(RealFs), [root_link_path.as_ref()], cx).await;
5681
5682 project.read_with(cx, |project, cx| {
5683 let tree = project.worktrees(cx).next().unwrap().read(cx);
5684 assert_eq!(tree.file_count(), 5);
5685 assert_eq!(
5686 tree.inode_for_path("fennel/grape"),
5687 tree.inode_for_path("finnochio/grape")
5688 );
5689 });
5690
5691 let cancel_flag = Default::default();
5692 let results = project
5693 .read_with(cx, |project, cx| {
5694 project.match_paths("bna", false, false, 10, &cancel_flag, cx)
5695 })
5696 .await;
5697 assert_eq!(
5698 results
5699 .into_iter()
5700 .map(|result| result.path)
5701 .collect::<Vec<Arc<Path>>>(),
5702 vec![
5703 PathBuf::from("banana/carrot/date").into(),
5704 PathBuf::from("banana/carrot/endive").into(),
5705 ]
5706 );
5707 }
5708
5709 #[gpui::test]
5710 async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
5711 cx.foreground().forbid_parking();
5712
5713 let mut rust_language = Language::new(
5714 LanguageConfig {
5715 name: "Rust".into(),
5716 path_suffixes: vec!["rs".to_string()],
5717 ..Default::default()
5718 },
5719 Some(tree_sitter_rust::language()),
5720 );
5721 let mut json_language = Language::new(
5722 LanguageConfig {
5723 name: "JSON".into(),
5724 path_suffixes: vec!["json".to_string()],
5725 ..Default::default()
5726 },
5727 None,
5728 );
5729 let mut fake_rust_servers = rust_language.set_fake_lsp_adapter(FakeLspAdapter {
5730 name: "the-rust-language-server",
5731 capabilities: lsp::ServerCapabilities {
5732 completion_provider: Some(lsp::CompletionOptions {
5733 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
5734 ..Default::default()
5735 }),
5736 ..Default::default()
5737 },
5738 ..Default::default()
5739 });
5740 let mut fake_json_servers = json_language.set_fake_lsp_adapter(FakeLspAdapter {
5741 name: "the-json-language-server",
5742 capabilities: lsp::ServerCapabilities {
5743 completion_provider: Some(lsp::CompletionOptions {
5744 trigger_characters: Some(vec![":".to_string()]),
5745 ..Default::default()
5746 }),
5747 ..Default::default()
5748 },
5749 ..Default::default()
5750 });
5751
5752 let fs = FakeFs::new(cx.background());
5753 fs.insert_tree(
5754 "/the-root",
5755 json!({
5756 "test.rs": "const A: i32 = 1;",
5757 "test2.rs": "",
5758 "Cargo.toml": "a = 1",
5759 "package.json": "{\"a\": 1}",
5760 }),
5761 )
5762 .await;
5763
5764 let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
5765 project.update(cx, |project, _| {
5766 project.languages.add(Arc::new(rust_language));
5767 project.languages.add(Arc::new(json_language));
5768 });
5769
5770 // Open a buffer without an associated language server.
5771 let toml_buffer = project
5772 .update(cx, |project, cx| {
5773 project.open_local_buffer("/the-root/Cargo.toml", cx)
5774 })
5775 .await
5776 .unwrap();
5777
5778 // Open a buffer with an associated language server.
5779 let rust_buffer = project
5780 .update(cx, |project, cx| {
5781 project.open_local_buffer("/the-root/test.rs", cx)
5782 })
5783 .await
5784 .unwrap();
5785
5786 // A server is started up, and it is notified about Rust files.
5787 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
5788 assert_eq!(
5789 fake_rust_server
5790 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5791 .await
5792 .text_document,
5793 lsp::TextDocumentItem {
5794 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
5795 version: 0,
5796 text: "const A: i32 = 1;".to_string(),
5797 language_id: Default::default()
5798 }
5799 );
5800
5801 // The buffer is configured based on the language server's capabilities.
5802 rust_buffer.read_with(cx, |buffer, _| {
5803 assert_eq!(
5804 buffer.completion_triggers(),
5805 &[".".to_string(), "::".to_string()]
5806 );
5807 });
5808 toml_buffer.read_with(cx, |buffer, _| {
5809 assert!(buffer.completion_triggers().is_empty());
5810 });
5811
5812 // Edit a buffer. The changes are reported to the language server.
5813 rust_buffer.update(cx, |buffer, cx| buffer.edit([(16..16, "2")], cx));
5814 assert_eq!(
5815 fake_rust_server
5816 .receive_notification::<lsp::notification::DidChangeTextDocument>()
5817 .await
5818 .text_document,
5819 lsp::VersionedTextDocumentIdentifier::new(
5820 lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
5821 1
5822 )
5823 );
5824
5825 // Open a third buffer with a different associated language server.
5826 let json_buffer = project
5827 .update(cx, |project, cx| {
5828 project.open_local_buffer("/the-root/package.json", cx)
5829 })
5830 .await
5831 .unwrap();
5832
5833 // A json language server is started up and is only notified about the json buffer.
5834 let mut fake_json_server = fake_json_servers.next().await.unwrap();
5835 assert_eq!(
5836 fake_json_server
5837 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5838 .await
5839 .text_document,
5840 lsp::TextDocumentItem {
5841 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
5842 version: 0,
5843 text: "{\"a\": 1}".to_string(),
5844 language_id: Default::default()
5845 }
5846 );
5847
5848 // This buffer is configured based on the second language server's
5849 // capabilities.
5850 json_buffer.read_with(cx, |buffer, _| {
5851 assert_eq!(buffer.completion_triggers(), &[":".to_string()]);
5852 });
5853
5854 // When opening another buffer whose language server is already running,
5855 // it is also configured based on the existing language server's capabilities.
5856 let rust_buffer2 = project
5857 .update(cx, |project, cx| {
5858 project.open_local_buffer("/the-root/test2.rs", cx)
5859 })
5860 .await
5861 .unwrap();
5862 rust_buffer2.read_with(cx, |buffer, _| {
5863 assert_eq!(
5864 buffer.completion_triggers(),
5865 &[".".to_string(), "::".to_string()]
5866 );
5867 });
5868
5869 // Changes are reported only to servers matching the buffer's language.
5870 toml_buffer.update(cx, |buffer, cx| buffer.edit([(5..5, "23")], cx));
5871 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "let x = 1;")], cx));
5872 assert_eq!(
5873 fake_rust_server
5874 .receive_notification::<lsp::notification::DidChangeTextDocument>()
5875 .await
5876 .text_document,
5877 lsp::VersionedTextDocumentIdentifier::new(
5878 lsp::Url::from_file_path("/the-root/test2.rs").unwrap(),
5879 1
5880 )
5881 );
5882
5883 // Save notifications are reported to all servers.
5884 toml_buffer
5885 .update(cx, |buffer, cx| buffer.save(cx))
5886 .await
5887 .unwrap();
5888 assert_eq!(
5889 fake_rust_server
5890 .receive_notification::<lsp::notification::DidSaveTextDocument>()
5891 .await
5892 .text_document,
5893 lsp::TextDocumentIdentifier::new(
5894 lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap()
5895 )
5896 );
5897 assert_eq!(
5898 fake_json_server
5899 .receive_notification::<lsp::notification::DidSaveTextDocument>()
5900 .await
5901 .text_document,
5902 lsp::TextDocumentIdentifier::new(
5903 lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap()
5904 )
5905 );
5906
5907 // Renames are reported only to servers matching the buffer's language.
5908 fs.rename(
5909 Path::new("/the-root/test2.rs"),
5910 Path::new("/the-root/test3.rs"),
5911 Default::default(),
5912 )
5913 .await
5914 .unwrap();
5915 assert_eq!(
5916 fake_rust_server
5917 .receive_notification::<lsp::notification::DidCloseTextDocument>()
5918 .await
5919 .text_document,
5920 lsp::TextDocumentIdentifier::new(
5921 lsp::Url::from_file_path("/the-root/test2.rs").unwrap()
5922 ),
5923 );
5924 assert_eq!(
5925 fake_rust_server
5926 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5927 .await
5928 .text_document,
5929 lsp::TextDocumentItem {
5930 uri: lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),
5931 version: 0,
5932 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
5933 language_id: Default::default()
5934 },
5935 );
5936
5937 rust_buffer2.update(cx, |buffer, cx| {
5938 buffer.update_diagnostics(
5939 DiagnosticSet::from_sorted_entries(
5940 vec![DiagnosticEntry {
5941 diagnostic: Default::default(),
5942 range: Anchor::MIN..Anchor::MAX,
5943 }],
5944 &buffer.snapshot(),
5945 ),
5946 cx,
5947 );
5948 assert_eq!(
5949 buffer
5950 .snapshot()
5951 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
5952 .count(),
5953 1
5954 );
5955 });
5956
5957 // When the rename changes the extension of the file, the buffer gets closed on the old
5958 // language server and gets opened on the new one.
5959 fs.rename(
5960 Path::new("/the-root/test3.rs"),
5961 Path::new("/the-root/test3.json"),
5962 Default::default(),
5963 )
5964 .await
5965 .unwrap();
5966 assert_eq!(
5967 fake_rust_server
5968 .receive_notification::<lsp::notification::DidCloseTextDocument>()
5969 .await
5970 .text_document,
5971 lsp::TextDocumentIdentifier::new(
5972 lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),
5973 ),
5974 );
5975 assert_eq!(
5976 fake_json_server
5977 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5978 .await
5979 .text_document,
5980 lsp::TextDocumentItem {
5981 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
5982 version: 0,
5983 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
5984 language_id: Default::default()
5985 },
5986 );
5987
5988 // We clear the diagnostics, since the language has changed.
5989 rust_buffer2.read_with(cx, |buffer, _| {
5990 assert_eq!(
5991 buffer
5992 .snapshot()
5993 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
5994 .count(),
5995 0
5996 );
5997 });
5998
5999 // The renamed file's version resets after changing language server.
6000 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "// ")], cx));
6001 assert_eq!(
6002 fake_json_server
6003 .receive_notification::<lsp::notification::DidChangeTextDocument>()
6004 .await
6005 .text_document,
6006 lsp::VersionedTextDocumentIdentifier::new(
6007 lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
6008 1
6009 )
6010 );
6011
6012 // Restart language servers
6013 project.update(cx, |project, cx| {
6014 project.restart_language_servers_for_buffers(
6015 vec![rust_buffer.clone(), json_buffer.clone()],
6016 cx,
6017 );
6018 });
6019
6020 let mut rust_shutdown_requests = fake_rust_server
6021 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
6022 let mut json_shutdown_requests = fake_json_server
6023 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
6024 futures::join!(rust_shutdown_requests.next(), json_shutdown_requests.next());
6025
6026 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
6027 let mut fake_json_server = fake_json_servers.next().await.unwrap();
6028
6029 // Ensure rust document is reopened in new rust language server
6030 assert_eq!(
6031 fake_rust_server
6032 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6033 .await
6034 .text_document,
6035 lsp::TextDocumentItem {
6036 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
6037 version: 1,
6038 text: rust_buffer.read_with(cx, |buffer, _| buffer.text()),
6039 language_id: Default::default()
6040 }
6041 );
6042
6043 // Ensure json documents are reopened in new json language server
6044 assert_set_eq!(
6045 [
6046 fake_json_server
6047 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6048 .await
6049 .text_document,
6050 fake_json_server
6051 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6052 .await
6053 .text_document,
6054 ],
6055 [
6056 lsp::TextDocumentItem {
6057 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
6058 version: 0,
6059 text: json_buffer.read_with(cx, |buffer, _| buffer.text()),
6060 language_id: Default::default()
6061 },
6062 lsp::TextDocumentItem {
6063 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
6064 version: 1,
6065 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
6066 language_id: Default::default()
6067 }
6068 ]
6069 );
6070
6071 // Close notifications are reported only to servers matching the buffer's language.
6072 cx.update(|_| drop(json_buffer));
6073 let close_message = lsp::DidCloseTextDocumentParams {
6074 text_document: lsp::TextDocumentIdentifier::new(
6075 lsp::Url::from_file_path("/the-root/package.json").unwrap(),
6076 ),
6077 };
6078 assert_eq!(
6079 fake_json_server
6080 .receive_notification::<lsp::notification::DidCloseTextDocument>()
6081 .await,
6082 close_message,
6083 );
6084 }
6085
6086 #[gpui::test]
6087 async fn test_single_file_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
6088 cx.foreground().forbid_parking();
6089
6090 let fs = FakeFs::new(cx.background());
6091 fs.insert_tree(
6092 "/dir",
6093 json!({
6094 "a.rs": "let a = 1;",
6095 "b.rs": "let b = 2;"
6096 }),
6097 )
6098 .await;
6099
6100 let project = Project::test(fs, ["/dir/a.rs".as_ref(), "/dir/b.rs".as_ref()], cx).await;
6101
6102 let buffer_a = project
6103 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
6104 .await
6105 .unwrap();
6106 let buffer_b = project
6107 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
6108 .await
6109 .unwrap();
6110
6111 project.update(cx, |project, cx| {
6112 project
6113 .update_diagnostics(
6114 lsp::PublishDiagnosticsParams {
6115 uri: Url::from_file_path("/dir/a.rs").unwrap(),
6116 version: None,
6117 diagnostics: vec![lsp::Diagnostic {
6118 range: lsp::Range::new(
6119 lsp::Position::new(0, 4),
6120 lsp::Position::new(0, 5),
6121 ),
6122 severity: Some(lsp::DiagnosticSeverity::ERROR),
6123 message: "error 1".to_string(),
6124 ..Default::default()
6125 }],
6126 },
6127 &[],
6128 cx,
6129 )
6130 .unwrap();
6131 project
6132 .update_diagnostics(
6133 lsp::PublishDiagnosticsParams {
6134 uri: Url::from_file_path("/dir/b.rs").unwrap(),
6135 version: None,
6136 diagnostics: vec![lsp::Diagnostic {
6137 range: lsp::Range::new(
6138 lsp::Position::new(0, 4),
6139 lsp::Position::new(0, 5),
6140 ),
6141 severity: Some(lsp::DiagnosticSeverity::WARNING),
6142 message: "error 2".to_string(),
6143 ..Default::default()
6144 }],
6145 },
6146 &[],
6147 cx,
6148 )
6149 .unwrap();
6150 });
6151
6152 buffer_a.read_with(cx, |buffer, _| {
6153 let chunks = chunks_with_diagnostics(&buffer, 0..buffer.len());
6154 assert_eq!(
6155 chunks
6156 .iter()
6157 .map(|(s, d)| (s.as_str(), *d))
6158 .collect::<Vec<_>>(),
6159 &[
6160 ("let ", None),
6161 ("a", Some(DiagnosticSeverity::ERROR)),
6162 (" = 1;", None),
6163 ]
6164 );
6165 });
6166 buffer_b.read_with(cx, |buffer, _| {
6167 let chunks = chunks_with_diagnostics(&buffer, 0..buffer.len());
6168 assert_eq!(
6169 chunks
6170 .iter()
6171 .map(|(s, d)| (s.as_str(), *d))
6172 .collect::<Vec<_>>(),
6173 &[
6174 ("let ", None),
6175 ("b", Some(DiagnosticSeverity::WARNING)),
6176 (" = 2;", None),
6177 ]
6178 );
6179 });
6180 }
6181
6182 #[gpui::test]
6183 async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
6184 cx.foreground().forbid_parking();
6185
6186 let progress_token = "the-progress-token";
6187 let mut language = Language::new(
6188 LanguageConfig {
6189 name: "Rust".into(),
6190 path_suffixes: vec!["rs".to_string()],
6191 ..Default::default()
6192 },
6193 Some(tree_sitter_rust::language()),
6194 );
6195 let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
6196 disk_based_diagnostics_progress_token: Some(progress_token),
6197 disk_based_diagnostics_sources: &["disk"],
6198 ..Default::default()
6199 });
6200
6201 let fs = FakeFs::new(cx.background());
6202 fs.insert_tree(
6203 "/dir",
6204 json!({
6205 "a.rs": "fn a() { A }",
6206 "b.rs": "const y: i32 = 1",
6207 }),
6208 )
6209 .await;
6210
6211 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
6212 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
6213 let worktree_id =
6214 project.read_with(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
6215
6216 // Cause worktree to start the fake language server
6217 let _buffer = project
6218 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
6219 .await
6220 .unwrap();
6221
6222 let mut events = subscribe(&project, cx);
6223
6224 let mut fake_server = fake_servers.next().await.unwrap();
6225 fake_server.start_progress(progress_token).await;
6226 assert_eq!(
6227 events.next().await.unwrap(),
6228 Event::DiskBasedDiagnosticsStarted
6229 );
6230
6231 fake_server.start_progress(progress_token).await;
6232 fake_server.end_progress(progress_token).await;
6233 fake_server.start_progress(progress_token).await;
6234
6235 fake_server.notify::<lsp::notification::PublishDiagnostics>(
6236 lsp::PublishDiagnosticsParams {
6237 uri: Url::from_file_path("/dir/a.rs").unwrap(),
6238 version: None,
6239 diagnostics: vec![lsp::Diagnostic {
6240 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
6241 severity: Some(lsp::DiagnosticSeverity::ERROR),
6242 message: "undefined variable 'A'".to_string(),
6243 ..Default::default()
6244 }],
6245 },
6246 );
6247 assert_eq!(
6248 events.next().await.unwrap(),
6249 Event::DiagnosticsUpdated((worktree_id, Path::new("a.rs")).into())
6250 );
6251
6252 fake_server.end_progress(progress_token).await;
6253 fake_server.end_progress(progress_token).await;
6254 assert_eq!(
6255 events.next().await.unwrap(),
6256 Event::DiskBasedDiagnosticsUpdated
6257 );
6258 assert_eq!(
6259 events.next().await.unwrap(),
6260 Event::DiskBasedDiagnosticsFinished
6261 );
6262
6263 let buffer = project
6264 .update(cx, |p, cx| p.open_local_buffer("/dir/a.rs", cx))
6265 .await
6266 .unwrap();
6267
6268 buffer.read_with(cx, |buffer, _| {
6269 let snapshot = buffer.snapshot();
6270 let diagnostics = snapshot
6271 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
6272 .collect::<Vec<_>>();
6273 assert_eq!(
6274 diagnostics,
6275 &[DiagnosticEntry {
6276 range: Point::new(0, 9)..Point::new(0, 10),
6277 diagnostic: Diagnostic {
6278 severity: lsp::DiagnosticSeverity::ERROR,
6279 message: "undefined variable 'A'".to_string(),
6280 group_id: 0,
6281 is_primary: true,
6282 ..Default::default()
6283 }
6284 }]
6285 )
6286 });
6287
6288 // Ensure publishing empty diagnostics twice only results in one update event.
6289 fake_server.notify::<lsp::notification::PublishDiagnostics>(
6290 lsp::PublishDiagnosticsParams {
6291 uri: Url::from_file_path("/dir/a.rs").unwrap(),
6292 version: None,
6293 diagnostics: Default::default(),
6294 },
6295 );
6296 assert_eq!(
6297 events.next().await.unwrap(),
6298 Event::DiagnosticsUpdated((worktree_id, Path::new("a.rs")).into())
6299 );
6300
6301 fake_server.notify::<lsp::notification::PublishDiagnostics>(
6302 lsp::PublishDiagnosticsParams {
6303 uri: Url::from_file_path("/dir/a.rs").unwrap(),
6304 version: None,
6305 diagnostics: Default::default(),
6306 },
6307 );
6308 cx.foreground().run_until_parked();
6309 assert_eq!(futures::poll!(events.next()), Poll::Pending);
6310 }
6311
6312 #[gpui::test]
6313 async fn test_restarting_server_with_diagnostics_running(cx: &mut gpui::TestAppContext) {
6314 cx.foreground().forbid_parking();
6315
6316 let progress_token = "the-progress-token";
6317 let mut language = Language::new(
6318 LanguageConfig {
6319 path_suffixes: vec!["rs".to_string()],
6320 ..Default::default()
6321 },
6322 None,
6323 );
6324 let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
6325 disk_based_diagnostics_sources: &["disk"],
6326 disk_based_diagnostics_progress_token: Some(progress_token),
6327 ..Default::default()
6328 });
6329
6330 let fs = FakeFs::new(cx.background());
6331 fs.insert_tree("/dir", json!({ "a.rs": "" })).await;
6332
6333 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
6334 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
6335
6336 let buffer = project
6337 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
6338 .await
6339 .unwrap();
6340
6341 // Simulate diagnostics starting to update.
6342 let mut fake_server = fake_servers.next().await.unwrap();
6343 fake_server.start_progress(progress_token).await;
6344
6345 // Restart the server before the diagnostics finish updating.
6346 project.update(cx, |project, cx| {
6347 project.restart_language_servers_for_buffers([buffer], cx);
6348 });
6349 let mut events = subscribe(&project, cx);
6350
6351 // Simulate the newly started server sending more diagnostics.
6352 let mut fake_server = fake_servers.next().await.unwrap();
6353 fake_server.start_progress(progress_token).await;
6354 assert_eq!(
6355 events.next().await.unwrap(),
6356 Event::DiskBasedDiagnosticsStarted
6357 );
6358
6359 // All diagnostics are considered done, despite the old server's diagnostic
6360 // task never completing.
6361 fake_server.end_progress(progress_token).await;
6362 assert_eq!(
6363 events.next().await.unwrap(),
6364 Event::DiskBasedDiagnosticsUpdated
6365 );
6366 assert_eq!(
6367 events.next().await.unwrap(),
6368 Event::DiskBasedDiagnosticsFinished
6369 );
6370 project.read_with(cx, |project, _| {
6371 assert!(!project.is_running_disk_based_diagnostics());
6372 });
6373 }
6374
6375 #[gpui::test]
6376 async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
6377 cx.foreground().forbid_parking();
6378
6379 let mut language = Language::new(
6380 LanguageConfig {
6381 name: "Rust".into(),
6382 path_suffixes: vec!["rs".to_string()],
6383 ..Default::default()
6384 },
6385 Some(tree_sitter_rust::language()),
6386 );
6387 let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
6388 disk_based_diagnostics_sources: &["disk"],
6389 ..Default::default()
6390 });
6391
6392 let text = "
6393 fn a() { A }
6394 fn b() { BB }
6395 fn c() { CCC }
6396 "
6397 .unindent();
6398
6399 let fs = FakeFs::new(cx.background());
6400 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
6401
6402 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
6403 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
6404
6405 let buffer = project
6406 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
6407 .await
6408 .unwrap();
6409
6410 let mut fake_server = fake_servers.next().await.unwrap();
6411 let open_notification = fake_server
6412 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6413 .await;
6414
6415 // Edit the buffer, moving the content down
6416 buffer.update(cx, |buffer, cx| buffer.edit([(0..0, "\n\n")], cx));
6417 let change_notification_1 = fake_server
6418 .receive_notification::<lsp::notification::DidChangeTextDocument>()
6419 .await;
6420 assert!(
6421 change_notification_1.text_document.version > open_notification.text_document.version
6422 );
6423
6424 // Report some diagnostics for the initial version of the buffer
6425 fake_server.notify::<lsp::notification::PublishDiagnostics>(
6426 lsp::PublishDiagnosticsParams {
6427 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
6428 version: Some(open_notification.text_document.version),
6429 diagnostics: vec![
6430 lsp::Diagnostic {
6431 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
6432 severity: Some(DiagnosticSeverity::ERROR),
6433 message: "undefined variable 'A'".to_string(),
6434 source: Some("disk".to_string()),
6435 ..Default::default()
6436 },
6437 lsp::Diagnostic {
6438 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
6439 severity: Some(DiagnosticSeverity::ERROR),
6440 message: "undefined variable 'BB'".to_string(),
6441 source: Some("disk".to_string()),
6442 ..Default::default()
6443 },
6444 lsp::Diagnostic {
6445 range: lsp::Range::new(lsp::Position::new(2, 9), lsp::Position::new(2, 12)),
6446 severity: Some(DiagnosticSeverity::ERROR),
6447 source: Some("disk".to_string()),
6448 message: "undefined variable 'CCC'".to_string(),
6449 ..Default::default()
6450 },
6451 ],
6452 },
6453 );
6454
6455 // The diagnostics have moved down since they were created.
6456 buffer.next_notification(cx).await;
6457 buffer.read_with(cx, |buffer, _| {
6458 assert_eq!(
6459 buffer
6460 .snapshot()
6461 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0), false)
6462 .collect::<Vec<_>>(),
6463 &[
6464 DiagnosticEntry {
6465 range: Point::new(3, 9)..Point::new(3, 11),
6466 diagnostic: Diagnostic {
6467 severity: DiagnosticSeverity::ERROR,
6468 message: "undefined variable 'BB'".to_string(),
6469 is_disk_based: true,
6470 group_id: 1,
6471 is_primary: true,
6472 ..Default::default()
6473 },
6474 },
6475 DiagnosticEntry {
6476 range: Point::new(4, 9)..Point::new(4, 12),
6477 diagnostic: Diagnostic {
6478 severity: DiagnosticSeverity::ERROR,
6479 message: "undefined variable 'CCC'".to_string(),
6480 is_disk_based: true,
6481 group_id: 2,
6482 is_primary: true,
6483 ..Default::default()
6484 }
6485 }
6486 ]
6487 );
6488 assert_eq!(
6489 chunks_with_diagnostics(buffer, 0..buffer.len()),
6490 [
6491 ("\n\nfn a() { ".to_string(), None),
6492 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
6493 (" }\nfn b() { ".to_string(), None),
6494 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
6495 (" }\nfn c() { ".to_string(), None),
6496 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
6497 (" }\n".to_string(), None),
6498 ]
6499 );
6500 assert_eq!(
6501 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
6502 [
6503 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
6504 (" }\nfn c() { ".to_string(), None),
6505 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
6506 ]
6507 );
6508 });
6509
6510 // Ensure overlapping diagnostics are highlighted correctly.
6511 fake_server.notify::<lsp::notification::PublishDiagnostics>(
6512 lsp::PublishDiagnosticsParams {
6513 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
6514 version: Some(open_notification.text_document.version),
6515 diagnostics: vec![
6516 lsp::Diagnostic {
6517 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
6518 severity: Some(DiagnosticSeverity::ERROR),
6519 message: "undefined variable 'A'".to_string(),
6520 source: Some("disk".to_string()),
6521 ..Default::default()
6522 },
6523 lsp::Diagnostic {
6524 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 12)),
6525 severity: Some(DiagnosticSeverity::WARNING),
6526 message: "unreachable statement".to_string(),
6527 source: Some("disk".to_string()),
6528 ..Default::default()
6529 },
6530 ],
6531 },
6532 );
6533
6534 buffer.next_notification(cx).await;
6535 buffer.read_with(cx, |buffer, _| {
6536 assert_eq!(
6537 buffer
6538 .snapshot()
6539 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0), false)
6540 .collect::<Vec<_>>(),
6541 &[
6542 DiagnosticEntry {
6543 range: Point::new(2, 9)..Point::new(2, 12),
6544 diagnostic: Diagnostic {
6545 severity: DiagnosticSeverity::WARNING,
6546 message: "unreachable statement".to_string(),
6547 is_disk_based: true,
6548 group_id: 4,
6549 is_primary: true,
6550 ..Default::default()
6551 }
6552 },
6553 DiagnosticEntry {
6554 range: Point::new(2, 9)..Point::new(2, 10),
6555 diagnostic: Diagnostic {
6556 severity: DiagnosticSeverity::ERROR,
6557 message: "undefined variable 'A'".to_string(),
6558 is_disk_based: true,
6559 group_id: 3,
6560 is_primary: true,
6561 ..Default::default()
6562 },
6563 }
6564 ]
6565 );
6566 assert_eq!(
6567 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
6568 [
6569 ("fn a() { ".to_string(), None),
6570 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
6571 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
6572 ("\n".to_string(), None),
6573 ]
6574 );
6575 assert_eq!(
6576 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
6577 [
6578 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
6579 ("\n".to_string(), None),
6580 ]
6581 );
6582 });
6583
6584 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
6585 // changes since the last save.
6586 buffer.update(cx, |buffer, cx| {
6587 buffer.edit([(Point::new(2, 0)..Point::new(2, 0), " ")], cx);
6588 buffer.edit([(Point::new(2, 8)..Point::new(2, 10), "(x: usize)")], cx);
6589 buffer.edit([(Point::new(3, 10)..Point::new(3, 10), "xxx")], cx);
6590 });
6591 let change_notification_2 = fake_server
6592 .receive_notification::<lsp::notification::DidChangeTextDocument>()
6593 .await;
6594 assert!(
6595 change_notification_2.text_document.version
6596 > change_notification_1.text_document.version
6597 );
6598
6599 // Handle out-of-order diagnostics
6600 fake_server.notify::<lsp::notification::PublishDiagnostics>(
6601 lsp::PublishDiagnosticsParams {
6602 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
6603 version: Some(change_notification_2.text_document.version),
6604 diagnostics: vec![
6605 lsp::Diagnostic {
6606 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
6607 severity: Some(DiagnosticSeverity::ERROR),
6608 message: "undefined variable 'BB'".to_string(),
6609 source: Some("disk".to_string()),
6610 ..Default::default()
6611 },
6612 lsp::Diagnostic {
6613 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
6614 severity: Some(DiagnosticSeverity::WARNING),
6615 message: "undefined variable 'A'".to_string(),
6616 source: Some("disk".to_string()),
6617 ..Default::default()
6618 },
6619 ],
6620 },
6621 );
6622
6623 buffer.next_notification(cx).await;
6624 buffer.read_with(cx, |buffer, _| {
6625 assert_eq!(
6626 buffer
6627 .snapshot()
6628 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
6629 .collect::<Vec<_>>(),
6630 &[
6631 DiagnosticEntry {
6632 range: Point::new(2, 21)..Point::new(2, 22),
6633 diagnostic: Diagnostic {
6634 severity: DiagnosticSeverity::WARNING,
6635 message: "undefined variable 'A'".to_string(),
6636 is_disk_based: true,
6637 group_id: 6,
6638 is_primary: true,
6639 ..Default::default()
6640 }
6641 },
6642 DiagnosticEntry {
6643 range: Point::new(3, 9)..Point::new(3, 14),
6644 diagnostic: Diagnostic {
6645 severity: DiagnosticSeverity::ERROR,
6646 message: "undefined variable 'BB'".to_string(),
6647 is_disk_based: true,
6648 group_id: 5,
6649 is_primary: true,
6650 ..Default::default()
6651 },
6652 }
6653 ]
6654 );
6655 });
6656 }
6657
6658 #[gpui::test]
6659 async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
6660 cx.foreground().forbid_parking();
6661
6662 let text = concat!(
6663 "let one = ;\n", //
6664 "let two = \n",
6665 "let three = 3;\n",
6666 );
6667
6668 let fs = FakeFs::new(cx.background());
6669 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
6670
6671 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
6672 let buffer = project
6673 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
6674 .await
6675 .unwrap();
6676
6677 project.update(cx, |project, cx| {
6678 project
6679 .update_buffer_diagnostics(
6680 &buffer,
6681 vec![
6682 DiagnosticEntry {
6683 range: PointUtf16::new(0, 10)..PointUtf16::new(0, 10),
6684 diagnostic: Diagnostic {
6685 severity: DiagnosticSeverity::ERROR,
6686 message: "syntax error 1".to_string(),
6687 ..Default::default()
6688 },
6689 },
6690 DiagnosticEntry {
6691 range: PointUtf16::new(1, 10)..PointUtf16::new(1, 10),
6692 diagnostic: Diagnostic {
6693 severity: DiagnosticSeverity::ERROR,
6694 message: "syntax error 2".to_string(),
6695 ..Default::default()
6696 },
6697 },
6698 ],
6699 None,
6700 cx,
6701 )
6702 .unwrap();
6703 });
6704
6705 // An empty range is extended forward to include the following character.
6706 // At the end of a line, an empty range is extended backward to include
6707 // the preceding character.
6708 buffer.read_with(cx, |buffer, _| {
6709 let chunks = chunks_with_diagnostics(&buffer, 0..buffer.len());
6710 assert_eq!(
6711 chunks
6712 .iter()
6713 .map(|(s, d)| (s.as_str(), *d))
6714 .collect::<Vec<_>>(),
6715 &[
6716 ("let one = ", None),
6717 (";", Some(DiagnosticSeverity::ERROR)),
6718 ("\nlet two =", None),
6719 (" ", Some(DiagnosticSeverity::ERROR)),
6720 ("\nlet three = 3;\n", None)
6721 ]
6722 );
6723 });
6724 }
6725
6726 #[gpui::test]
6727 async fn test_edits_from_lsp_with_past_version(cx: &mut gpui::TestAppContext) {
6728 cx.foreground().forbid_parking();
6729
6730 let mut language = Language::new(
6731 LanguageConfig {
6732 name: "Rust".into(),
6733 path_suffixes: vec!["rs".to_string()],
6734 ..Default::default()
6735 },
6736 Some(tree_sitter_rust::language()),
6737 );
6738 let mut fake_servers = language.set_fake_lsp_adapter(Default::default());
6739
6740 let text = "
6741 fn a() {
6742 f1();
6743 }
6744 fn b() {
6745 f2();
6746 }
6747 fn c() {
6748 f3();
6749 }
6750 "
6751 .unindent();
6752
6753 let fs = FakeFs::new(cx.background());
6754 fs.insert_tree(
6755 "/dir",
6756 json!({
6757 "a.rs": text.clone(),
6758 }),
6759 )
6760 .await;
6761
6762 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
6763 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
6764 let buffer = project
6765 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
6766 .await
6767 .unwrap();
6768
6769 let mut fake_server = fake_servers.next().await.unwrap();
6770 let lsp_document_version = fake_server
6771 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6772 .await
6773 .text_document
6774 .version;
6775
6776 // Simulate editing the buffer after the language server computes some edits.
6777 buffer.update(cx, |buffer, cx| {
6778 buffer.edit(
6779 [(
6780 Point::new(0, 0)..Point::new(0, 0),
6781 "// above first function\n",
6782 )],
6783 cx,
6784 );
6785 buffer.edit(
6786 [(
6787 Point::new(2, 0)..Point::new(2, 0),
6788 " // inside first function\n",
6789 )],
6790 cx,
6791 );
6792 buffer.edit(
6793 [(
6794 Point::new(6, 4)..Point::new(6, 4),
6795 "// inside second function ",
6796 )],
6797 cx,
6798 );
6799
6800 assert_eq!(
6801 buffer.text(),
6802 "
6803 // above first function
6804 fn a() {
6805 // inside first function
6806 f1();
6807 }
6808 fn b() {
6809 // inside second function f2();
6810 }
6811 fn c() {
6812 f3();
6813 }
6814 "
6815 .unindent()
6816 );
6817 });
6818
6819 let edits = project
6820 .update(cx, |project, cx| {
6821 project.edits_from_lsp(
6822 &buffer,
6823 vec![
6824 // replace body of first function
6825 lsp::TextEdit {
6826 range: lsp::Range::new(
6827 lsp::Position::new(0, 0),
6828 lsp::Position::new(3, 0),
6829 ),
6830 new_text: "
6831 fn a() {
6832 f10();
6833 }
6834 "
6835 .unindent(),
6836 },
6837 // edit inside second function
6838 lsp::TextEdit {
6839 range: lsp::Range::new(
6840 lsp::Position::new(4, 6),
6841 lsp::Position::new(4, 6),
6842 ),
6843 new_text: "00".into(),
6844 },
6845 // edit inside third function via two distinct edits
6846 lsp::TextEdit {
6847 range: lsp::Range::new(
6848 lsp::Position::new(7, 5),
6849 lsp::Position::new(7, 5),
6850 ),
6851 new_text: "4000".into(),
6852 },
6853 lsp::TextEdit {
6854 range: lsp::Range::new(
6855 lsp::Position::new(7, 5),
6856 lsp::Position::new(7, 6),
6857 ),
6858 new_text: "".into(),
6859 },
6860 ],
6861 Some(lsp_document_version),
6862 cx,
6863 )
6864 })
6865 .await
6866 .unwrap();
6867
6868 buffer.update(cx, |buffer, cx| {
6869 for (range, new_text) in edits {
6870 buffer.edit([(range, new_text)], cx);
6871 }
6872 assert_eq!(
6873 buffer.text(),
6874 "
6875 // above first function
6876 fn a() {
6877 // inside first function
6878 f10();
6879 }
6880 fn b() {
6881 // inside second function f200();
6882 }
6883 fn c() {
6884 f4000();
6885 }
6886 "
6887 .unindent()
6888 );
6889 });
6890 }
6891
6892 #[gpui::test]
6893 async fn test_edits_from_lsp_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
6894 cx.foreground().forbid_parking();
6895
6896 let text = "
6897 use a::b;
6898 use a::c;
6899
6900 fn f() {
6901 b();
6902 c();
6903 }
6904 "
6905 .unindent();
6906
6907 let fs = FakeFs::new(cx.background());
6908 fs.insert_tree(
6909 "/dir",
6910 json!({
6911 "a.rs": text.clone(),
6912 }),
6913 )
6914 .await;
6915
6916 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
6917 let buffer = project
6918 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
6919 .await
6920 .unwrap();
6921
6922 // Simulate the language server sending us a small edit in the form of a very large diff.
6923 // Rust-analyzer does this when performing a merge-imports code action.
6924 let edits = project
6925 .update(cx, |project, cx| {
6926 project.edits_from_lsp(
6927 &buffer,
6928 [
6929 // Replace the first use statement without editing the semicolon.
6930 lsp::TextEdit {
6931 range: lsp::Range::new(
6932 lsp::Position::new(0, 4),
6933 lsp::Position::new(0, 8),
6934 ),
6935 new_text: "a::{b, c}".into(),
6936 },
6937 // Reinsert the remainder of the file between the semicolon and the final
6938 // newline of the file.
6939 lsp::TextEdit {
6940 range: lsp::Range::new(
6941 lsp::Position::new(0, 9),
6942 lsp::Position::new(0, 9),
6943 ),
6944 new_text: "\n\n".into(),
6945 },
6946 lsp::TextEdit {
6947 range: lsp::Range::new(
6948 lsp::Position::new(0, 9),
6949 lsp::Position::new(0, 9),
6950 ),
6951 new_text: "
6952 fn f() {
6953 b();
6954 c();
6955 }"
6956 .unindent(),
6957 },
6958 // Delete everything after the first newline of the file.
6959 lsp::TextEdit {
6960 range: lsp::Range::new(
6961 lsp::Position::new(1, 0),
6962 lsp::Position::new(7, 0),
6963 ),
6964 new_text: "".into(),
6965 },
6966 ],
6967 None,
6968 cx,
6969 )
6970 })
6971 .await
6972 .unwrap();
6973
6974 buffer.update(cx, |buffer, cx| {
6975 let edits = edits
6976 .into_iter()
6977 .map(|(range, text)| {
6978 (
6979 range.start.to_point(&buffer)..range.end.to_point(&buffer),
6980 text,
6981 )
6982 })
6983 .collect::<Vec<_>>();
6984
6985 assert_eq!(
6986 edits,
6987 [
6988 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
6989 (Point::new(1, 0)..Point::new(2, 0), "".into())
6990 ]
6991 );
6992
6993 for (range, new_text) in edits {
6994 buffer.edit([(range, new_text)], cx);
6995 }
6996 assert_eq!(
6997 buffer.text(),
6998 "
6999 use a::{b, c};
7000
7001 fn f() {
7002 b();
7003 c();
7004 }
7005 "
7006 .unindent()
7007 );
7008 });
7009 }
7010
7011 #[gpui::test]
7012 async fn test_invalid_edits_from_lsp(cx: &mut gpui::TestAppContext) {
7013 cx.foreground().forbid_parking();
7014
7015 let text = "
7016 use a::b;
7017 use a::c;
7018
7019 fn f() {
7020 b();
7021 c();
7022 }
7023 "
7024 .unindent();
7025
7026 let fs = FakeFs::new(cx.background());
7027 fs.insert_tree(
7028 "/dir",
7029 json!({
7030 "a.rs": text.clone(),
7031 }),
7032 )
7033 .await;
7034
7035 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
7036 let buffer = project
7037 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
7038 .await
7039 .unwrap();
7040
7041 // Simulate the language server sending us edits in a non-ordered fashion,
7042 // with ranges sometimes being inverted.
7043 let edits = project
7044 .update(cx, |project, cx| {
7045 project.edits_from_lsp(
7046 &buffer,
7047 [
7048 lsp::TextEdit {
7049 range: lsp::Range::new(
7050 lsp::Position::new(0, 9),
7051 lsp::Position::new(0, 9),
7052 ),
7053 new_text: "\n\n".into(),
7054 },
7055 lsp::TextEdit {
7056 range: lsp::Range::new(
7057 lsp::Position::new(0, 8),
7058 lsp::Position::new(0, 4),
7059 ),
7060 new_text: "a::{b, c}".into(),
7061 },
7062 lsp::TextEdit {
7063 range: lsp::Range::new(
7064 lsp::Position::new(1, 0),
7065 lsp::Position::new(7, 0),
7066 ),
7067 new_text: "".into(),
7068 },
7069 lsp::TextEdit {
7070 range: lsp::Range::new(
7071 lsp::Position::new(0, 9),
7072 lsp::Position::new(0, 9),
7073 ),
7074 new_text: "
7075 fn f() {
7076 b();
7077 c();
7078 }"
7079 .unindent(),
7080 },
7081 ],
7082 None,
7083 cx,
7084 )
7085 })
7086 .await
7087 .unwrap();
7088
7089 buffer.update(cx, |buffer, cx| {
7090 let edits = edits
7091 .into_iter()
7092 .map(|(range, text)| {
7093 (
7094 range.start.to_point(&buffer)..range.end.to_point(&buffer),
7095 text,
7096 )
7097 })
7098 .collect::<Vec<_>>();
7099
7100 assert_eq!(
7101 edits,
7102 [
7103 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
7104 (Point::new(1, 0)..Point::new(2, 0), "".into())
7105 ]
7106 );
7107
7108 for (range, new_text) in edits {
7109 buffer.edit([(range, new_text)], cx);
7110 }
7111 assert_eq!(
7112 buffer.text(),
7113 "
7114 use a::{b, c};
7115
7116 fn f() {
7117 b();
7118 c();
7119 }
7120 "
7121 .unindent()
7122 );
7123 });
7124 }
7125
7126 fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
7127 buffer: &Buffer,
7128 range: Range<T>,
7129 ) -> Vec<(String, Option<DiagnosticSeverity>)> {
7130 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
7131 for chunk in buffer.snapshot().chunks(range, true) {
7132 if chunks.last().map_or(false, |prev_chunk| {
7133 prev_chunk.1 == chunk.diagnostic_severity
7134 }) {
7135 chunks.last_mut().unwrap().0.push_str(chunk.text);
7136 } else {
7137 chunks.push((chunk.text.to_string(), chunk.diagnostic_severity));
7138 }
7139 }
7140 chunks
7141 }
7142
7143 #[gpui::test]
7144 async fn test_search_worktree_without_files(cx: &mut gpui::TestAppContext) {
7145 let dir = temp_tree(json!({
7146 "root": {
7147 "dir1": {},
7148 "dir2": {
7149 "dir3": {}
7150 }
7151 }
7152 }));
7153
7154 let project = Project::test(Arc::new(RealFs), [dir.path()], cx).await;
7155 let cancel_flag = Default::default();
7156 let results = project
7157 .read_with(cx, |project, cx| {
7158 project.match_paths("dir", false, false, 10, &cancel_flag, cx)
7159 })
7160 .await;
7161
7162 assert!(results.is_empty());
7163 }
7164
7165 #[gpui::test(iterations = 10)]
7166 async fn test_definition(cx: &mut gpui::TestAppContext) {
7167 let mut language = Language::new(
7168 LanguageConfig {
7169 name: "Rust".into(),
7170 path_suffixes: vec!["rs".to_string()],
7171 ..Default::default()
7172 },
7173 Some(tree_sitter_rust::language()),
7174 );
7175 let mut fake_servers = language.set_fake_lsp_adapter(Default::default());
7176
7177 let fs = FakeFs::new(cx.background());
7178 fs.insert_tree(
7179 "/dir",
7180 json!({
7181 "a.rs": "const fn a() { A }",
7182 "b.rs": "const y: i32 = crate::a()",
7183 }),
7184 )
7185 .await;
7186
7187 let project = Project::test(fs, ["/dir/b.rs".as_ref()], cx).await;
7188 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
7189
7190 let buffer = project
7191 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
7192 .await
7193 .unwrap();
7194
7195 let fake_server = fake_servers.next().await.unwrap();
7196 fake_server.handle_request::<lsp::request::GotoDefinition, _, _>(|params, _| async move {
7197 let params = params.text_document_position_params;
7198 assert_eq!(
7199 params.text_document.uri.to_file_path().unwrap(),
7200 Path::new("/dir/b.rs"),
7201 );
7202 assert_eq!(params.position, lsp::Position::new(0, 22));
7203
7204 Ok(Some(lsp::GotoDefinitionResponse::Scalar(
7205 lsp::Location::new(
7206 lsp::Url::from_file_path("/dir/a.rs").unwrap(),
7207 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
7208 ),
7209 )))
7210 });
7211
7212 let mut definitions = project
7213 .update(cx, |project, cx| project.definition(&buffer, 22, cx))
7214 .await
7215 .unwrap();
7216
7217 assert_eq!(definitions.len(), 1);
7218 let definition = definitions.pop().unwrap();
7219 cx.update(|cx| {
7220 let target_buffer = definition.buffer.read(cx);
7221 assert_eq!(
7222 target_buffer
7223 .file()
7224 .unwrap()
7225 .as_local()
7226 .unwrap()
7227 .abs_path(cx),
7228 Path::new("/dir/a.rs"),
7229 );
7230 assert_eq!(definition.range.to_offset(target_buffer), 9..10);
7231 assert_eq!(
7232 list_worktrees(&project, cx),
7233 [("/dir/b.rs".as_ref(), true), ("/dir/a.rs".as_ref(), false)]
7234 );
7235
7236 drop(definition);
7237 });
7238 cx.read(|cx| {
7239 assert_eq!(list_worktrees(&project, cx), [("/dir/b.rs".as_ref(), true)]);
7240 });
7241
7242 fn list_worktrees<'a>(
7243 project: &'a ModelHandle<Project>,
7244 cx: &'a AppContext,
7245 ) -> Vec<(&'a Path, bool)> {
7246 project
7247 .read(cx)
7248 .worktrees(cx)
7249 .map(|worktree| {
7250 let worktree = worktree.read(cx);
7251 (
7252 worktree.as_local().unwrap().abs_path().as_ref(),
7253 worktree.is_visible(),
7254 )
7255 })
7256 .collect::<Vec<_>>()
7257 }
7258 }
7259
7260 #[gpui::test]
7261 async fn test_completions_without_edit_ranges(cx: &mut gpui::TestAppContext) {
7262 let mut language = Language::new(
7263 LanguageConfig {
7264 name: "TypeScript".into(),
7265 path_suffixes: vec!["ts".to_string()],
7266 ..Default::default()
7267 },
7268 Some(tree_sitter_typescript::language_typescript()),
7269 );
7270 let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default());
7271
7272 let fs = FakeFs::new(cx.background());
7273 fs.insert_tree(
7274 "/dir",
7275 json!({
7276 "a.ts": "",
7277 }),
7278 )
7279 .await;
7280
7281 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
7282 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
7283 let buffer = project
7284 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
7285 .await
7286 .unwrap();
7287
7288 let fake_server = fake_language_servers.next().await.unwrap();
7289
7290 let text = "let a = b.fqn";
7291 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
7292 let completions = project.update(cx, |project, cx| {
7293 project.completions(&buffer, text.len(), cx)
7294 });
7295
7296 fake_server
7297 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
7298 Ok(Some(lsp::CompletionResponse::Array(vec![
7299 lsp::CompletionItem {
7300 label: "fullyQualifiedName?".into(),
7301 insert_text: Some("fullyQualifiedName".into()),
7302 ..Default::default()
7303 },
7304 ])))
7305 })
7306 .next()
7307 .await;
7308 let completions = completions.await.unwrap();
7309 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
7310 assert_eq!(completions.len(), 1);
7311 assert_eq!(completions[0].new_text, "fullyQualifiedName");
7312 assert_eq!(
7313 completions[0].old_range.to_offset(&snapshot),
7314 text.len() - 3..text.len()
7315 );
7316 }
7317
7318 #[gpui::test(iterations = 10)]
7319 async fn test_apply_code_actions_with_commands(cx: &mut gpui::TestAppContext) {
7320 let mut language = Language::new(
7321 LanguageConfig {
7322 name: "TypeScript".into(),
7323 path_suffixes: vec!["ts".to_string()],
7324 ..Default::default()
7325 },
7326 None,
7327 );
7328 let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default());
7329
7330 let fs = FakeFs::new(cx.background());
7331 fs.insert_tree(
7332 "/dir",
7333 json!({
7334 "a.ts": "a",
7335 }),
7336 )
7337 .await;
7338
7339 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
7340 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
7341 let buffer = project
7342 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
7343 .await
7344 .unwrap();
7345
7346 let fake_server = fake_language_servers.next().await.unwrap();
7347
7348 // Language server returns code actions that contain commands, and not edits.
7349 let actions = project.update(cx, |project, cx| project.code_actions(&buffer, 0..0, cx));
7350 fake_server
7351 .handle_request::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
7352 Ok(Some(vec![
7353 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
7354 title: "The code action".into(),
7355 command: Some(lsp::Command {
7356 title: "The command".into(),
7357 command: "_the/command".into(),
7358 arguments: Some(vec![json!("the-argument")]),
7359 }),
7360 ..Default::default()
7361 }),
7362 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
7363 title: "two".into(),
7364 ..Default::default()
7365 }),
7366 ]))
7367 })
7368 .next()
7369 .await;
7370
7371 let action = actions.await.unwrap()[0].clone();
7372 let apply = project.update(cx, |project, cx| {
7373 project.apply_code_action(buffer.clone(), action, true, cx)
7374 });
7375
7376 // Resolving the code action does not populate its edits. In absence of
7377 // edits, we must execute the given command.
7378 fake_server.handle_request::<lsp::request::CodeActionResolveRequest, _, _>(
7379 |action, _| async move { Ok(action) },
7380 );
7381
7382 // While executing the command, the language server sends the editor
7383 // a `workspaceEdit` request.
7384 fake_server
7385 .handle_request::<lsp::request::ExecuteCommand, _, _>({
7386 let fake = fake_server.clone();
7387 move |params, _| {
7388 assert_eq!(params.command, "_the/command");
7389 let fake = fake.clone();
7390 async move {
7391 fake.server
7392 .request::<lsp::request::ApplyWorkspaceEdit>(
7393 lsp::ApplyWorkspaceEditParams {
7394 label: None,
7395 edit: lsp::WorkspaceEdit {
7396 changes: Some(
7397 [(
7398 lsp::Url::from_file_path("/dir/a.ts").unwrap(),
7399 vec![lsp::TextEdit {
7400 range: lsp::Range::new(
7401 lsp::Position::new(0, 0),
7402 lsp::Position::new(0, 0),
7403 ),
7404 new_text: "X".into(),
7405 }],
7406 )]
7407 .into_iter()
7408 .collect(),
7409 ),
7410 ..Default::default()
7411 },
7412 },
7413 )
7414 .await
7415 .unwrap();
7416 Ok(Some(json!(null)))
7417 }
7418 }
7419 })
7420 .next()
7421 .await;
7422
7423 // Applying the code action returns a project transaction containing the edits
7424 // sent by the language server in its `workspaceEdit` request.
7425 let transaction = apply.await.unwrap();
7426 assert!(transaction.0.contains_key(&buffer));
7427 buffer.update(cx, |buffer, cx| {
7428 assert_eq!(buffer.text(), "Xa");
7429 buffer.undo(cx);
7430 assert_eq!(buffer.text(), "a");
7431 });
7432 }
7433
7434 #[gpui::test]
7435 async fn test_save_file(cx: &mut gpui::TestAppContext) {
7436 let fs = FakeFs::new(cx.background());
7437 fs.insert_tree(
7438 "/dir",
7439 json!({
7440 "file1": "the old contents",
7441 }),
7442 )
7443 .await;
7444
7445 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7446 let buffer = project
7447 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
7448 .await
7449 .unwrap();
7450 buffer
7451 .update(cx, |buffer, cx| {
7452 assert_eq!(buffer.text(), "the old contents");
7453 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], cx);
7454 buffer.save(cx)
7455 })
7456 .await
7457 .unwrap();
7458
7459 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
7460 assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
7461 }
7462
7463 #[gpui::test]
7464 async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
7465 let fs = FakeFs::new(cx.background());
7466 fs.insert_tree(
7467 "/dir",
7468 json!({
7469 "file1": "the old contents",
7470 }),
7471 )
7472 .await;
7473
7474 let project = Project::test(fs.clone(), ["/dir/file1".as_ref()], cx).await;
7475 let buffer = project
7476 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
7477 .await
7478 .unwrap();
7479 buffer
7480 .update(cx, |buffer, cx| {
7481 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], cx);
7482 buffer.save(cx)
7483 })
7484 .await
7485 .unwrap();
7486
7487 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
7488 assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
7489 }
7490
7491 #[gpui::test]
7492 async fn test_save_as(cx: &mut gpui::TestAppContext) {
7493 let fs = FakeFs::new(cx.background());
7494 fs.insert_tree("/dir", json!({})).await;
7495
7496 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7497 let buffer = project.update(cx, |project, cx| {
7498 project.create_buffer("", None, cx).unwrap()
7499 });
7500 buffer.update(cx, |buffer, cx| {
7501 buffer.edit([(0..0, "abc")], cx);
7502 assert!(buffer.is_dirty());
7503 assert!(!buffer.has_conflict());
7504 });
7505 project
7506 .update(cx, |project, cx| {
7507 project.save_buffer_as(buffer.clone(), "/dir/file1".into(), cx)
7508 })
7509 .await
7510 .unwrap();
7511 assert_eq!(fs.load(Path::new("/dir/file1")).await.unwrap(), "abc");
7512 buffer.read_with(cx, |buffer, cx| {
7513 assert_eq!(buffer.file().unwrap().full_path(cx), Path::new("dir/file1"));
7514 assert!(!buffer.is_dirty());
7515 assert!(!buffer.has_conflict());
7516 });
7517
7518 let opened_buffer = project
7519 .update(cx, |project, cx| {
7520 project.open_local_buffer("/dir/file1", cx)
7521 })
7522 .await
7523 .unwrap();
7524 assert_eq!(opened_buffer, buffer);
7525 }
7526
7527 #[gpui::test(retries = 5)]
7528 async fn test_rescan_and_remote_updates(cx: &mut gpui::TestAppContext) {
7529 let dir = temp_tree(json!({
7530 "a": {
7531 "file1": "",
7532 "file2": "",
7533 "file3": "",
7534 },
7535 "b": {
7536 "c": {
7537 "file4": "",
7538 "file5": "",
7539 }
7540 }
7541 }));
7542
7543 let project = Project::test(Arc::new(RealFs), [dir.path()], cx).await;
7544 let rpc = project.read_with(cx, |p, _| p.client.clone());
7545
7546 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
7547 let buffer = project.update(cx, |p, cx| p.open_local_buffer(dir.path().join(path), cx));
7548 async move { buffer.await.unwrap() }
7549 };
7550 let id_for_path = |path: &'static str, cx: &gpui::TestAppContext| {
7551 project.read_with(cx, |project, cx| {
7552 let tree = project.worktrees(cx).next().unwrap();
7553 tree.read(cx)
7554 .entry_for_path(path)
7555 .expect(&format!("no entry for path {}", path))
7556 .id
7557 })
7558 };
7559
7560 let buffer2 = buffer_for_path("a/file2", cx).await;
7561 let buffer3 = buffer_for_path("a/file3", cx).await;
7562 let buffer4 = buffer_for_path("b/c/file4", cx).await;
7563 let buffer5 = buffer_for_path("b/c/file5", cx).await;
7564
7565 let file2_id = id_for_path("a/file2", &cx);
7566 let file3_id = id_for_path("a/file3", &cx);
7567 let file4_id = id_for_path("b/c/file4", &cx);
7568
7569 // Create a remote copy of this worktree.
7570 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
7571 let initial_snapshot = tree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot());
7572 let (remote, load_task) = cx.update(|cx| {
7573 Worktree::remote(
7574 1,
7575 1,
7576 initial_snapshot.to_proto(&Default::default(), true),
7577 rpc.clone(),
7578 cx,
7579 )
7580 });
7581 // tree
7582 load_task.await;
7583
7584 cx.read(|cx| {
7585 assert!(!buffer2.read(cx).is_dirty());
7586 assert!(!buffer3.read(cx).is_dirty());
7587 assert!(!buffer4.read(cx).is_dirty());
7588 assert!(!buffer5.read(cx).is_dirty());
7589 });
7590
7591 // Rename and delete files and directories.
7592 tree.flush_fs_events(&cx).await;
7593 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
7594 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
7595 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
7596 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
7597 tree.flush_fs_events(&cx).await;
7598
7599 let expected_paths = vec![
7600 "a",
7601 "a/file1",
7602 "a/file2.new",
7603 "b",
7604 "d",
7605 "d/file3",
7606 "d/file4",
7607 ];
7608
7609 cx.read(|app| {
7610 assert_eq!(
7611 tree.read(app)
7612 .paths()
7613 .map(|p| p.to_str().unwrap())
7614 .collect::<Vec<_>>(),
7615 expected_paths
7616 );
7617
7618 assert_eq!(id_for_path("a/file2.new", &cx), file2_id);
7619 assert_eq!(id_for_path("d/file3", &cx), file3_id);
7620 assert_eq!(id_for_path("d/file4", &cx), file4_id);
7621
7622 assert_eq!(
7623 buffer2.read(app).file().unwrap().path().as_ref(),
7624 Path::new("a/file2.new")
7625 );
7626 assert_eq!(
7627 buffer3.read(app).file().unwrap().path().as_ref(),
7628 Path::new("d/file3")
7629 );
7630 assert_eq!(
7631 buffer4.read(app).file().unwrap().path().as_ref(),
7632 Path::new("d/file4")
7633 );
7634 assert_eq!(
7635 buffer5.read(app).file().unwrap().path().as_ref(),
7636 Path::new("b/c/file5")
7637 );
7638
7639 assert!(!buffer2.read(app).file().unwrap().is_deleted());
7640 assert!(!buffer3.read(app).file().unwrap().is_deleted());
7641 assert!(!buffer4.read(app).file().unwrap().is_deleted());
7642 assert!(buffer5.read(app).file().unwrap().is_deleted());
7643 });
7644
7645 // Update the remote worktree. Check that it becomes consistent with the
7646 // local worktree.
7647 remote.update(cx, |remote, cx| {
7648 let update_message = tree.read(cx).as_local().unwrap().snapshot().build_update(
7649 &initial_snapshot,
7650 1,
7651 1,
7652 true,
7653 );
7654 remote
7655 .as_remote_mut()
7656 .unwrap()
7657 .snapshot
7658 .apply_remote_update(update_message)
7659 .unwrap();
7660
7661 assert_eq!(
7662 remote
7663 .paths()
7664 .map(|p| p.to_str().unwrap())
7665 .collect::<Vec<_>>(),
7666 expected_paths
7667 );
7668 });
7669 }
7670
7671 #[gpui::test]
7672 async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
7673 let fs = FakeFs::new(cx.background());
7674 fs.insert_tree(
7675 "/dir",
7676 json!({
7677 "a.txt": "a-contents",
7678 "b.txt": "b-contents",
7679 }),
7680 )
7681 .await;
7682
7683 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7684
7685 // Spawn multiple tasks to open paths, repeating some paths.
7686 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
7687 (
7688 p.open_local_buffer("/dir/a.txt", cx),
7689 p.open_local_buffer("/dir/b.txt", cx),
7690 p.open_local_buffer("/dir/a.txt", cx),
7691 )
7692 });
7693
7694 let buffer_a_1 = buffer_a_1.await.unwrap();
7695 let buffer_a_2 = buffer_a_2.await.unwrap();
7696 let buffer_b = buffer_b.await.unwrap();
7697 assert_eq!(buffer_a_1.read_with(cx, |b, _| b.text()), "a-contents");
7698 assert_eq!(buffer_b.read_with(cx, |b, _| b.text()), "b-contents");
7699
7700 // There is only one buffer per path.
7701 let buffer_a_id = buffer_a_1.id();
7702 assert_eq!(buffer_a_2.id(), buffer_a_id);
7703
7704 // Open the same path again while it is still open.
7705 drop(buffer_a_1);
7706 let buffer_a_3 = project
7707 .update(cx, |p, cx| p.open_local_buffer("/dir/a.txt", cx))
7708 .await
7709 .unwrap();
7710
7711 // There's still only one buffer per path.
7712 assert_eq!(buffer_a_3.id(), buffer_a_id);
7713 }
7714
7715 #[gpui::test]
7716 async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
7717 let fs = FakeFs::new(cx.background());
7718 fs.insert_tree(
7719 "/dir",
7720 json!({
7721 "file1": "abc",
7722 "file2": "def",
7723 "file3": "ghi",
7724 }),
7725 )
7726 .await;
7727
7728 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7729
7730 let buffer1 = project
7731 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
7732 .await
7733 .unwrap();
7734 let events = Rc::new(RefCell::new(Vec::new()));
7735
7736 // initially, the buffer isn't dirty.
7737 buffer1.update(cx, |buffer, cx| {
7738 cx.subscribe(&buffer1, {
7739 let events = events.clone();
7740 move |_, _, event, _| match event {
7741 BufferEvent::Operation(_) => {}
7742 _ => events.borrow_mut().push(event.clone()),
7743 }
7744 })
7745 .detach();
7746
7747 assert!(!buffer.is_dirty());
7748 assert!(events.borrow().is_empty());
7749
7750 buffer.edit([(1..2, "")], cx);
7751 });
7752
7753 // after the first edit, the buffer is dirty, and emits a dirtied event.
7754 buffer1.update(cx, |buffer, cx| {
7755 assert!(buffer.text() == "ac");
7756 assert!(buffer.is_dirty());
7757 assert_eq!(
7758 *events.borrow(),
7759 &[language::Event::Edited, language::Event::Dirtied]
7760 );
7761 events.borrow_mut().clear();
7762 buffer.did_save(buffer.version(), buffer.file().unwrap().mtime(), None, cx);
7763 });
7764
7765 // after saving, the buffer is not dirty, and emits a saved event.
7766 buffer1.update(cx, |buffer, cx| {
7767 assert!(!buffer.is_dirty());
7768 assert_eq!(*events.borrow(), &[language::Event::Saved]);
7769 events.borrow_mut().clear();
7770
7771 buffer.edit([(1..1, "B")], cx);
7772 buffer.edit([(2..2, "D")], cx);
7773 });
7774
7775 // after editing again, the buffer is dirty, and emits another dirty event.
7776 buffer1.update(cx, |buffer, cx| {
7777 assert!(buffer.text() == "aBDc");
7778 assert!(buffer.is_dirty());
7779 assert_eq!(
7780 *events.borrow(),
7781 &[
7782 language::Event::Edited,
7783 language::Event::Dirtied,
7784 language::Event::Edited,
7785 ],
7786 );
7787 events.borrow_mut().clear();
7788
7789 // TODO - currently, after restoring the buffer to its
7790 // previously-saved state, the is still considered dirty.
7791 buffer.edit([(1..3, "")], cx);
7792 assert!(buffer.text() == "ac");
7793 assert!(buffer.is_dirty());
7794 });
7795
7796 assert_eq!(*events.borrow(), &[language::Event::Edited]);
7797
7798 // When a file is deleted, the buffer is considered dirty.
7799 let events = Rc::new(RefCell::new(Vec::new()));
7800 let buffer2 = project
7801 .update(cx, |p, cx| p.open_local_buffer("/dir/file2", cx))
7802 .await
7803 .unwrap();
7804 buffer2.update(cx, |_, cx| {
7805 cx.subscribe(&buffer2, {
7806 let events = events.clone();
7807 move |_, _, event, _| events.borrow_mut().push(event.clone())
7808 })
7809 .detach();
7810 });
7811
7812 fs.remove_file("/dir/file2".as_ref(), Default::default())
7813 .await
7814 .unwrap();
7815 buffer2.condition(&cx, |b, _| b.is_dirty()).await;
7816 assert_eq!(
7817 *events.borrow(),
7818 &[language::Event::Dirtied, language::Event::FileHandleChanged]
7819 );
7820
7821 // When a file is already dirty when deleted, we don't emit a Dirtied event.
7822 let events = Rc::new(RefCell::new(Vec::new()));
7823 let buffer3 = project
7824 .update(cx, |p, cx| p.open_local_buffer("/dir/file3", cx))
7825 .await
7826 .unwrap();
7827 buffer3.update(cx, |_, cx| {
7828 cx.subscribe(&buffer3, {
7829 let events = events.clone();
7830 move |_, _, event, _| events.borrow_mut().push(event.clone())
7831 })
7832 .detach();
7833 });
7834
7835 buffer3.update(cx, |buffer, cx| {
7836 buffer.edit([(0..0, "x")], cx);
7837 });
7838 events.borrow_mut().clear();
7839 fs.remove_file("/dir/file3".as_ref(), Default::default())
7840 .await
7841 .unwrap();
7842 buffer3
7843 .condition(&cx, |_, _| !events.borrow().is_empty())
7844 .await;
7845 assert_eq!(*events.borrow(), &[language::Event::FileHandleChanged]);
7846 cx.read(|cx| assert!(buffer3.read(cx).is_dirty()));
7847 }
7848
7849 #[gpui::test]
7850 async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
7851 let initial_contents = "aaa\nbbbbb\nc\n";
7852 let fs = FakeFs::new(cx.background());
7853 fs.insert_tree(
7854 "/dir",
7855 json!({
7856 "the-file": initial_contents,
7857 }),
7858 )
7859 .await;
7860 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7861 let buffer = project
7862 .update(cx, |p, cx| p.open_local_buffer("/dir/the-file", cx))
7863 .await
7864 .unwrap();
7865
7866 let anchors = (0..3)
7867 .map(|row| buffer.read_with(cx, |b, _| b.anchor_before(Point::new(row, 1))))
7868 .collect::<Vec<_>>();
7869
7870 // Change the file on disk, adding two new lines of text, and removing
7871 // one line.
7872 buffer.read_with(cx, |buffer, _| {
7873 assert!(!buffer.is_dirty());
7874 assert!(!buffer.has_conflict());
7875 });
7876 let new_contents = "AAAA\naaa\nBB\nbbbbb\n";
7877 fs.save("/dir/the-file".as_ref(), &new_contents.into())
7878 .await
7879 .unwrap();
7880
7881 // Because the buffer was not modified, it is reloaded from disk. Its
7882 // contents are edited according to the diff between the old and new
7883 // file contents.
7884 buffer
7885 .condition(&cx, |buffer, _| buffer.text() == new_contents)
7886 .await;
7887
7888 buffer.update(cx, |buffer, _| {
7889 assert_eq!(buffer.text(), new_contents);
7890 assert!(!buffer.is_dirty());
7891 assert!(!buffer.has_conflict());
7892
7893 let anchor_positions = anchors
7894 .iter()
7895 .map(|anchor| anchor.to_point(&*buffer))
7896 .collect::<Vec<_>>();
7897 assert_eq!(
7898 anchor_positions,
7899 [Point::new(1, 1), Point::new(3, 1), Point::new(4, 0)]
7900 );
7901 });
7902
7903 // Modify the buffer
7904 buffer.update(cx, |buffer, cx| {
7905 buffer.edit([(0..0, " ")], cx);
7906 assert!(buffer.is_dirty());
7907 assert!(!buffer.has_conflict());
7908 });
7909
7910 // Change the file on disk again, adding blank lines to the beginning.
7911 fs.save(
7912 "/dir/the-file".as_ref(),
7913 &"\n\n\nAAAA\naaa\nBB\nbbbbb\n".into(),
7914 )
7915 .await
7916 .unwrap();
7917
7918 // Because the buffer is modified, it doesn't reload from disk, but is
7919 // marked as having a conflict.
7920 buffer
7921 .condition(&cx, |buffer, _| buffer.has_conflict())
7922 .await;
7923 }
7924
7925 #[gpui::test]
7926 async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
7927 cx.foreground().forbid_parking();
7928
7929 let fs = FakeFs::new(cx.background());
7930 fs.insert_tree(
7931 "/the-dir",
7932 json!({
7933 "a.rs": "
7934 fn foo(mut v: Vec<usize>) {
7935 for x in &v {
7936 v.push(1);
7937 }
7938 }
7939 "
7940 .unindent(),
7941 }),
7942 )
7943 .await;
7944
7945 let project = Project::test(fs.clone(), ["/the-dir".as_ref()], cx).await;
7946 let buffer = project
7947 .update(cx, |p, cx| p.open_local_buffer("/the-dir/a.rs", cx))
7948 .await
7949 .unwrap();
7950
7951 let buffer_uri = Url::from_file_path("/the-dir/a.rs").unwrap();
7952 let message = lsp::PublishDiagnosticsParams {
7953 uri: buffer_uri.clone(),
7954 diagnostics: vec![
7955 lsp::Diagnostic {
7956 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
7957 severity: Some(DiagnosticSeverity::WARNING),
7958 message: "error 1".to_string(),
7959 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
7960 location: lsp::Location {
7961 uri: buffer_uri.clone(),
7962 range: lsp::Range::new(
7963 lsp::Position::new(1, 8),
7964 lsp::Position::new(1, 9),
7965 ),
7966 },
7967 message: "error 1 hint 1".to_string(),
7968 }]),
7969 ..Default::default()
7970 },
7971 lsp::Diagnostic {
7972 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
7973 severity: Some(DiagnosticSeverity::HINT),
7974 message: "error 1 hint 1".to_string(),
7975 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
7976 location: lsp::Location {
7977 uri: buffer_uri.clone(),
7978 range: lsp::Range::new(
7979 lsp::Position::new(1, 8),
7980 lsp::Position::new(1, 9),
7981 ),
7982 },
7983 message: "original diagnostic".to_string(),
7984 }]),
7985 ..Default::default()
7986 },
7987 lsp::Diagnostic {
7988 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
7989 severity: Some(DiagnosticSeverity::ERROR),
7990 message: "error 2".to_string(),
7991 related_information: Some(vec![
7992 lsp::DiagnosticRelatedInformation {
7993 location: lsp::Location {
7994 uri: buffer_uri.clone(),
7995 range: lsp::Range::new(
7996 lsp::Position::new(1, 13),
7997 lsp::Position::new(1, 15),
7998 ),
7999 },
8000 message: "error 2 hint 1".to_string(),
8001 },
8002 lsp::DiagnosticRelatedInformation {
8003 location: lsp::Location {
8004 uri: buffer_uri.clone(),
8005 range: lsp::Range::new(
8006 lsp::Position::new(1, 13),
8007 lsp::Position::new(1, 15),
8008 ),
8009 },
8010 message: "error 2 hint 2".to_string(),
8011 },
8012 ]),
8013 ..Default::default()
8014 },
8015 lsp::Diagnostic {
8016 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
8017 severity: Some(DiagnosticSeverity::HINT),
8018 message: "error 2 hint 1".to_string(),
8019 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
8020 location: lsp::Location {
8021 uri: buffer_uri.clone(),
8022 range: lsp::Range::new(
8023 lsp::Position::new(2, 8),
8024 lsp::Position::new(2, 17),
8025 ),
8026 },
8027 message: "original diagnostic".to_string(),
8028 }]),
8029 ..Default::default()
8030 },
8031 lsp::Diagnostic {
8032 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
8033 severity: Some(DiagnosticSeverity::HINT),
8034 message: "error 2 hint 2".to_string(),
8035 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
8036 location: lsp::Location {
8037 uri: buffer_uri.clone(),
8038 range: lsp::Range::new(
8039 lsp::Position::new(2, 8),
8040 lsp::Position::new(2, 17),
8041 ),
8042 },
8043 message: "original diagnostic".to_string(),
8044 }]),
8045 ..Default::default()
8046 },
8047 ],
8048 version: None,
8049 };
8050
8051 project
8052 .update(cx, |p, cx| p.update_diagnostics(message, &[], cx))
8053 .unwrap();
8054 let buffer = buffer.read_with(cx, |buffer, _| buffer.snapshot());
8055
8056 assert_eq!(
8057 buffer
8058 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
8059 .collect::<Vec<_>>(),
8060 &[
8061 DiagnosticEntry {
8062 range: Point::new(1, 8)..Point::new(1, 9),
8063 diagnostic: Diagnostic {
8064 severity: DiagnosticSeverity::WARNING,
8065 message: "error 1".to_string(),
8066 group_id: 0,
8067 is_primary: true,
8068 ..Default::default()
8069 }
8070 },
8071 DiagnosticEntry {
8072 range: Point::new(1, 8)..Point::new(1, 9),
8073 diagnostic: Diagnostic {
8074 severity: DiagnosticSeverity::HINT,
8075 message: "error 1 hint 1".to_string(),
8076 group_id: 0,
8077 is_primary: false,
8078 ..Default::default()
8079 }
8080 },
8081 DiagnosticEntry {
8082 range: Point::new(1, 13)..Point::new(1, 15),
8083 diagnostic: Diagnostic {
8084 severity: DiagnosticSeverity::HINT,
8085 message: "error 2 hint 1".to_string(),
8086 group_id: 1,
8087 is_primary: false,
8088 ..Default::default()
8089 }
8090 },
8091 DiagnosticEntry {
8092 range: Point::new(1, 13)..Point::new(1, 15),
8093 diagnostic: Diagnostic {
8094 severity: DiagnosticSeverity::HINT,
8095 message: "error 2 hint 2".to_string(),
8096 group_id: 1,
8097 is_primary: false,
8098 ..Default::default()
8099 }
8100 },
8101 DiagnosticEntry {
8102 range: Point::new(2, 8)..Point::new(2, 17),
8103 diagnostic: Diagnostic {
8104 severity: DiagnosticSeverity::ERROR,
8105 message: "error 2".to_string(),
8106 group_id: 1,
8107 is_primary: true,
8108 ..Default::default()
8109 }
8110 }
8111 ]
8112 );
8113
8114 assert_eq!(
8115 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
8116 &[
8117 DiagnosticEntry {
8118 range: Point::new(1, 8)..Point::new(1, 9),
8119 diagnostic: Diagnostic {
8120 severity: DiagnosticSeverity::WARNING,
8121 message: "error 1".to_string(),
8122 group_id: 0,
8123 is_primary: true,
8124 ..Default::default()
8125 }
8126 },
8127 DiagnosticEntry {
8128 range: Point::new(1, 8)..Point::new(1, 9),
8129 diagnostic: Diagnostic {
8130 severity: DiagnosticSeverity::HINT,
8131 message: "error 1 hint 1".to_string(),
8132 group_id: 0,
8133 is_primary: false,
8134 ..Default::default()
8135 }
8136 },
8137 ]
8138 );
8139 assert_eq!(
8140 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
8141 &[
8142 DiagnosticEntry {
8143 range: Point::new(1, 13)..Point::new(1, 15),
8144 diagnostic: Diagnostic {
8145 severity: DiagnosticSeverity::HINT,
8146 message: "error 2 hint 1".to_string(),
8147 group_id: 1,
8148 is_primary: false,
8149 ..Default::default()
8150 }
8151 },
8152 DiagnosticEntry {
8153 range: Point::new(1, 13)..Point::new(1, 15),
8154 diagnostic: Diagnostic {
8155 severity: DiagnosticSeverity::HINT,
8156 message: "error 2 hint 2".to_string(),
8157 group_id: 1,
8158 is_primary: false,
8159 ..Default::default()
8160 }
8161 },
8162 DiagnosticEntry {
8163 range: Point::new(2, 8)..Point::new(2, 17),
8164 diagnostic: Diagnostic {
8165 severity: DiagnosticSeverity::ERROR,
8166 message: "error 2".to_string(),
8167 group_id: 1,
8168 is_primary: true,
8169 ..Default::default()
8170 }
8171 }
8172 ]
8173 );
8174 }
8175
8176 #[gpui::test]
8177 async fn test_rename(cx: &mut gpui::TestAppContext) {
8178 cx.foreground().forbid_parking();
8179
8180 let mut language = Language::new(
8181 LanguageConfig {
8182 name: "Rust".into(),
8183 path_suffixes: vec!["rs".to_string()],
8184 ..Default::default()
8185 },
8186 Some(tree_sitter_rust::language()),
8187 );
8188 let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
8189 capabilities: lsp::ServerCapabilities {
8190 rename_provider: Some(lsp::OneOf::Right(lsp::RenameOptions {
8191 prepare_provider: Some(true),
8192 work_done_progress_options: Default::default(),
8193 })),
8194 ..Default::default()
8195 },
8196 ..Default::default()
8197 });
8198
8199 let fs = FakeFs::new(cx.background());
8200 fs.insert_tree(
8201 "/dir",
8202 json!({
8203 "one.rs": "const ONE: usize = 1;",
8204 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
8205 }),
8206 )
8207 .await;
8208
8209 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
8210 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
8211 let buffer = project
8212 .update(cx, |project, cx| {
8213 project.open_local_buffer("/dir/one.rs", cx)
8214 })
8215 .await
8216 .unwrap();
8217
8218 let fake_server = fake_servers.next().await.unwrap();
8219
8220 let response = project.update(cx, |project, cx| {
8221 project.prepare_rename(buffer.clone(), 7, cx)
8222 });
8223 fake_server
8224 .handle_request::<lsp::request::PrepareRenameRequest, _, _>(|params, _| async move {
8225 assert_eq!(params.text_document.uri.as_str(), "file:///dir/one.rs");
8226 assert_eq!(params.position, lsp::Position::new(0, 7));
8227 Ok(Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
8228 lsp::Position::new(0, 6),
8229 lsp::Position::new(0, 9),
8230 ))))
8231 })
8232 .next()
8233 .await
8234 .unwrap();
8235 let range = response.await.unwrap().unwrap();
8236 let range = buffer.read_with(cx, |buffer, _| range.to_offset(buffer));
8237 assert_eq!(range, 6..9);
8238
8239 let response = project.update(cx, |project, cx| {
8240 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), true, cx)
8241 });
8242 fake_server
8243 .handle_request::<lsp::request::Rename, _, _>(|params, _| async move {
8244 assert_eq!(
8245 params.text_document_position.text_document.uri.as_str(),
8246 "file:///dir/one.rs"
8247 );
8248 assert_eq!(
8249 params.text_document_position.position,
8250 lsp::Position::new(0, 7)
8251 );
8252 assert_eq!(params.new_name, "THREE");
8253 Ok(Some(lsp::WorkspaceEdit {
8254 changes: Some(
8255 [
8256 (
8257 lsp::Url::from_file_path("/dir/one.rs").unwrap(),
8258 vec![lsp::TextEdit::new(
8259 lsp::Range::new(
8260 lsp::Position::new(0, 6),
8261 lsp::Position::new(0, 9),
8262 ),
8263 "THREE".to_string(),
8264 )],
8265 ),
8266 (
8267 lsp::Url::from_file_path("/dir/two.rs").unwrap(),
8268 vec![
8269 lsp::TextEdit::new(
8270 lsp::Range::new(
8271 lsp::Position::new(0, 24),
8272 lsp::Position::new(0, 27),
8273 ),
8274 "THREE".to_string(),
8275 ),
8276 lsp::TextEdit::new(
8277 lsp::Range::new(
8278 lsp::Position::new(0, 35),
8279 lsp::Position::new(0, 38),
8280 ),
8281 "THREE".to_string(),
8282 ),
8283 ],
8284 ),
8285 ]
8286 .into_iter()
8287 .collect(),
8288 ),
8289 ..Default::default()
8290 }))
8291 })
8292 .next()
8293 .await
8294 .unwrap();
8295 let mut transaction = response.await.unwrap().0;
8296 assert_eq!(transaction.len(), 2);
8297 assert_eq!(
8298 transaction
8299 .remove_entry(&buffer)
8300 .unwrap()
8301 .0
8302 .read_with(cx, |buffer, _| buffer.text()),
8303 "const THREE: usize = 1;"
8304 );
8305 assert_eq!(
8306 transaction
8307 .into_keys()
8308 .next()
8309 .unwrap()
8310 .read_with(cx, |buffer, _| buffer.text()),
8311 "const TWO: usize = one::THREE + one::THREE;"
8312 );
8313 }
8314
8315 #[gpui::test]
8316 async fn test_search(cx: &mut gpui::TestAppContext) {
8317 let fs = FakeFs::new(cx.background());
8318 fs.insert_tree(
8319 "/dir",
8320 json!({
8321 "one.rs": "const ONE: usize = 1;",
8322 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
8323 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
8324 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
8325 }),
8326 )
8327 .await;
8328 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
8329 assert_eq!(
8330 search(&project, SearchQuery::text("TWO", false, true), cx)
8331 .await
8332 .unwrap(),
8333 HashMap::from_iter([
8334 ("two.rs".to_string(), vec![6..9]),
8335 ("three.rs".to_string(), vec![37..40])
8336 ])
8337 );
8338
8339 let buffer_4 = project
8340 .update(cx, |project, cx| {
8341 project.open_local_buffer("/dir/four.rs", cx)
8342 })
8343 .await
8344 .unwrap();
8345 buffer_4.update(cx, |buffer, cx| {
8346 let text = "two::TWO";
8347 buffer.edit([(20..28, text), (31..43, text)], cx);
8348 });
8349
8350 assert_eq!(
8351 search(&project, SearchQuery::text("TWO", false, true), cx)
8352 .await
8353 .unwrap(),
8354 HashMap::from_iter([
8355 ("two.rs".to_string(), vec![6..9]),
8356 ("three.rs".to_string(), vec![37..40]),
8357 ("four.rs".to_string(), vec![25..28, 36..39])
8358 ])
8359 );
8360
8361 async fn search(
8362 project: &ModelHandle<Project>,
8363 query: SearchQuery,
8364 cx: &mut gpui::TestAppContext,
8365 ) -> Result<HashMap<String, Vec<Range<usize>>>> {
8366 let results = project
8367 .update(cx, |project, cx| project.search(query, cx))
8368 .await?;
8369
8370 Ok(results
8371 .into_iter()
8372 .map(|(buffer, ranges)| {
8373 buffer.read_with(cx, |buffer, _| {
8374 let path = buffer.file().unwrap().path().to_string_lossy().to_string();
8375 let ranges = ranges
8376 .into_iter()
8377 .map(|range| range.to_offset(buffer))
8378 .collect::<Vec<_>>();
8379 (path, ranges)
8380 })
8381 })
8382 .collect())
8383 }
8384 }
8385}