1mod db;
2pub mod fs;
3mod ignore;
4mod lsp_command;
5pub mod search;
6pub mod worktree;
7
8use anyhow::{anyhow, Context, Result};
9use client::{proto, Client, PeerId, TypedEnvelope, User, UserStore};
10use clock::ReplicaId;
11use collections::{hash_map, BTreeMap, HashMap, HashSet};
12use futures::{future::Shared, Future, FutureExt, StreamExt, TryFutureExt};
13use fuzzy::{PathMatch, PathMatchCandidate, PathMatchCandidateSet};
14use gpui::{
15 AnyModelHandle, AppContext, AsyncAppContext, Entity, ModelContext, ModelHandle,
16 MutableAppContext, Task, UpgradeModelHandle, WeakModelHandle,
17};
18use language::{
19 point_to_lsp,
20 proto::{deserialize_anchor, deserialize_version, serialize_anchor, serialize_version},
21 range_from_lsp, range_to_lsp, Anchor, Bias, Buffer, CodeAction, CodeLabel, Completion,
22 Diagnostic, DiagnosticEntry, DiagnosticSet, Event as BufferEvent, File as _, Language,
23 LanguageRegistry, LanguageServerName, LocalFile, LspAdapter, OffsetRangeExt, Operation, Patch,
24 PointUtf16, TextBufferSnapshot, ToOffset, ToPointUtf16, Transaction,
25};
26use lsp::{DiagnosticSeverity, DiagnosticTag, DocumentHighlightKind, LanguageServer};
27use lsp_command::*;
28use parking_lot::Mutex;
29use postage::stream::Stream;
30use postage::watch;
31use rand::prelude::*;
32use search::SearchQuery;
33use serde::Serialize;
34use settings::Settings;
35use sha2::{Digest, Sha256};
36use similar::{ChangeTag, TextDiff};
37use std::{
38 cell::RefCell,
39 cmp::{self, Ordering},
40 convert::TryInto,
41 ffi::OsString,
42 hash::Hash,
43 mem,
44 ops::Range,
45 os::unix::{ffi::OsStrExt, prelude::OsStringExt},
46 path::{Component, Path, PathBuf},
47 rc::Rc,
48 sync::{
49 atomic::{AtomicBool, AtomicUsize, Ordering::SeqCst},
50 Arc,
51 },
52 time::Instant,
53};
54use thiserror::Error;
55use util::{post_inc, ResultExt, TryFutureExt as _};
56
57pub use db::Db;
58pub use fs::*;
59pub use worktree::*;
60
61pub trait Item: Entity {
62 fn entry_id(&self, cx: &AppContext) -> Option<ProjectEntryId>;
63}
64
65pub struct ProjectStore {
66 db: Arc<Db>,
67 projects: Vec<WeakModelHandle<Project>>,
68}
69
70pub struct Project {
71 worktrees: Vec<WorktreeHandle>,
72 active_entry: Option<ProjectEntryId>,
73 languages: Arc<LanguageRegistry>,
74 language_servers:
75 HashMap<(WorktreeId, LanguageServerName), (Arc<dyn LspAdapter>, Arc<LanguageServer>)>,
76 started_language_servers:
77 HashMap<(WorktreeId, LanguageServerName), Task<Option<Arc<LanguageServer>>>>,
78 language_server_statuses: BTreeMap<usize, LanguageServerStatus>,
79 language_server_settings: Arc<Mutex<serde_json::Value>>,
80 last_workspace_edits_by_language_server: HashMap<usize, ProjectTransaction>,
81 next_language_server_id: usize,
82 client: Arc<client::Client>,
83 next_entry_id: Arc<AtomicUsize>,
84 next_diagnostic_group_id: usize,
85 user_store: ModelHandle<UserStore>,
86 project_store: ModelHandle<ProjectStore>,
87 fs: Arc<dyn Fs>,
88 client_state: ProjectClientState,
89 collaborators: HashMap<PeerId, Collaborator>,
90 subscriptions: Vec<client::Subscription>,
91 opened_buffer: (Rc<RefCell<watch::Sender<()>>>, watch::Receiver<()>),
92 shared_buffers: HashMap<PeerId, HashSet<u64>>,
93 loading_buffers: HashMap<
94 ProjectPath,
95 postage::watch::Receiver<Option<Result<ModelHandle<Buffer>, Arc<anyhow::Error>>>>,
96 >,
97 loading_local_worktrees:
98 HashMap<Arc<Path>, Shared<Task<Result<ModelHandle<Worktree>, Arc<anyhow::Error>>>>>,
99 opened_buffers: HashMap<u64, OpenBuffer>,
100 buffer_snapshots: HashMap<u64, Vec<(i32, TextBufferSnapshot)>>,
101 nonce: u128,
102 initialized_persistent_state: bool,
103}
104
105#[derive(Error, Debug)]
106pub enum JoinProjectError {
107 #[error("host declined join request")]
108 HostDeclined,
109 #[error("host closed the project")]
110 HostClosedProject,
111 #[error("host went offline")]
112 HostWentOffline,
113 #[error("{0}")]
114 Other(#[from] anyhow::Error),
115}
116
117enum OpenBuffer {
118 Strong(ModelHandle<Buffer>),
119 Weak(WeakModelHandle<Buffer>),
120 Loading(Vec<Operation>),
121}
122
123enum WorktreeHandle {
124 Strong(ModelHandle<Worktree>),
125 Weak(WeakModelHandle<Worktree>),
126}
127
128enum ProjectClientState {
129 Local {
130 is_shared: bool,
131 remote_id_tx: watch::Sender<Option<u64>>,
132 remote_id_rx: watch::Receiver<Option<u64>>,
133 online_tx: watch::Sender<bool>,
134 online_rx: watch::Receiver<bool>,
135 _maintain_remote_id_task: Task<Option<()>>,
136 },
137 Remote {
138 sharing_has_stopped: bool,
139 remote_id: u64,
140 replica_id: ReplicaId,
141 _detect_unshare_task: Task<Option<()>>,
142 },
143}
144
145#[derive(Clone, Debug)]
146pub struct Collaborator {
147 pub user: Arc<User>,
148 pub peer_id: PeerId,
149 pub replica_id: ReplicaId,
150}
151
152#[derive(Clone, Debug, PartialEq, Eq)]
153pub enum Event {
154 ActiveEntryChanged(Option<ProjectEntryId>),
155 WorktreeAdded,
156 WorktreeRemoved(WorktreeId),
157 DiskBasedDiagnosticsStarted,
158 DiskBasedDiagnosticsUpdated,
159 DiskBasedDiagnosticsFinished,
160 DiagnosticsUpdated(ProjectPath),
161 RemoteIdChanged(Option<u64>),
162 CollaboratorLeft(PeerId),
163 ContactRequestedJoin(Arc<User>),
164 ContactCancelledJoinRequest(Arc<User>),
165}
166
167#[derive(Serialize)]
168pub struct LanguageServerStatus {
169 pub name: String,
170 pub pending_work: BTreeMap<String, LanguageServerProgress>,
171 pub pending_diagnostic_updates: isize,
172}
173
174#[derive(Clone, Debug, Serialize)]
175pub struct LanguageServerProgress {
176 pub message: Option<String>,
177 pub percentage: Option<usize>,
178 #[serde(skip_serializing)]
179 pub last_update_at: Instant,
180}
181
182#[derive(Clone, Debug, Eq, PartialEq, Hash, PartialOrd, Ord)]
183pub struct ProjectPath {
184 pub worktree_id: WorktreeId,
185 pub path: Arc<Path>,
186}
187
188#[derive(Copy, Clone, Debug, Default, PartialEq, Serialize)]
189pub struct DiagnosticSummary {
190 pub error_count: usize,
191 pub warning_count: usize,
192}
193
194#[derive(Debug)]
195pub struct Location {
196 pub buffer: ModelHandle<Buffer>,
197 pub range: Range<language::Anchor>,
198}
199
200#[derive(Debug)]
201pub struct DocumentHighlight {
202 pub range: Range<language::Anchor>,
203 pub kind: DocumentHighlightKind,
204}
205
206#[derive(Clone, Debug)]
207pub struct Symbol {
208 pub source_worktree_id: WorktreeId,
209 pub worktree_id: WorktreeId,
210 pub language_server_name: LanguageServerName,
211 pub path: PathBuf,
212 pub label: CodeLabel,
213 pub name: String,
214 pub kind: lsp::SymbolKind,
215 pub range: Range<PointUtf16>,
216 pub signature: [u8; 32],
217}
218
219#[derive(Debug)]
220pub struct Hover {
221 pub contents: lsp::HoverContents,
222 pub range: Option<Range<language::Anchor>>,
223}
224
225#[derive(Default)]
226pub struct ProjectTransaction(pub HashMap<ModelHandle<Buffer>, language::Transaction>);
227
228impl DiagnosticSummary {
229 fn new<'a, T: 'a>(diagnostics: impl IntoIterator<Item = &'a DiagnosticEntry<T>>) -> Self {
230 let mut this = Self {
231 error_count: 0,
232 warning_count: 0,
233 };
234
235 for entry in diagnostics {
236 if entry.diagnostic.is_primary {
237 match entry.diagnostic.severity {
238 DiagnosticSeverity::ERROR => this.error_count += 1,
239 DiagnosticSeverity::WARNING => this.warning_count += 1,
240 _ => {}
241 }
242 }
243 }
244
245 this
246 }
247
248 pub fn is_empty(&self) -> bool {
249 self.error_count == 0 && self.warning_count == 0
250 }
251
252 pub fn to_proto(&self, path: &Path) -> proto::DiagnosticSummary {
253 proto::DiagnosticSummary {
254 path: path.to_string_lossy().to_string(),
255 error_count: self.error_count as u32,
256 warning_count: self.warning_count as u32,
257 }
258 }
259}
260
261#[derive(Clone, Copy, Debug, Default, Hash, PartialEq, Eq, PartialOrd, Ord)]
262pub struct ProjectEntryId(usize);
263
264impl ProjectEntryId {
265 pub const MAX: Self = Self(usize::MAX);
266
267 pub fn new(counter: &AtomicUsize) -> Self {
268 Self(counter.fetch_add(1, SeqCst))
269 }
270
271 pub fn from_proto(id: u64) -> Self {
272 Self(id as usize)
273 }
274
275 pub fn to_proto(&self) -> u64 {
276 self.0 as u64
277 }
278
279 pub fn to_usize(&self) -> usize {
280 self.0
281 }
282}
283
284impl Project {
285 pub fn init(client: &Arc<Client>) {
286 client.add_model_message_handler(Self::handle_request_join_project);
287 client.add_model_message_handler(Self::handle_add_collaborator);
288 client.add_model_message_handler(Self::handle_buffer_reloaded);
289 client.add_model_message_handler(Self::handle_buffer_saved);
290 client.add_model_message_handler(Self::handle_start_language_server);
291 client.add_model_message_handler(Self::handle_update_language_server);
292 client.add_model_message_handler(Self::handle_remove_collaborator);
293 client.add_model_message_handler(Self::handle_join_project_request_cancelled);
294 client.add_model_message_handler(Self::handle_update_project);
295 client.add_model_message_handler(Self::handle_unregister_project);
296 client.add_model_message_handler(Self::handle_project_unshared);
297 client.add_model_message_handler(Self::handle_update_buffer_file);
298 client.add_model_message_handler(Self::handle_update_buffer);
299 client.add_model_message_handler(Self::handle_update_diagnostic_summary);
300 client.add_model_message_handler(Self::handle_update_worktree);
301 client.add_model_request_handler(Self::handle_create_project_entry);
302 client.add_model_request_handler(Self::handle_rename_project_entry);
303 client.add_model_request_handler(Self::handle_copy_project_entry);
304 client.add_model_request_handler(Self::handle_delete_project_entry);
305 client.add_model_request_handler(Self::handle_apply_additional_edits_for_completion);
306 client.add_model_request_handler(Self::handle_apply_code_action);
307 client.add_model_request_handler(Self::handle_reload_buffers);
308 client.add_model_request_handler(Self::handle_format_buffers);
309 client.add_model_request_handler(Self::handle_get_code_actions);
310 client.add_model_request_handler(Self::handle_get_completions);
311 client.add_model_request_handler(Self::handle_lsp_command::<GetHover>);
312 client.add_model_request_handler(Self::handle_lsp_command::<GetDefinition>);
313 client.add_model_request_handler(Self::handle_lsp_command::<GetDocumentHighlights>);
314 client.add_model_request_handler(Self::handle_lsp_command::<GetReferences>);
315 client.add_model_request_handler(Self::handle_lsp_command::<PrepareRename>);
316 client.add_model_request_handler(Self::handle_lsp_command::<PerformRename>);
317 client.add_model_request_handler(Self::handle_search_project);
318 client.add_model_request_handler(Self::handle_get_project_symbols);
319 client.add_model_request_handler(Self::handle_open_buffer_for_symbol);
320 client.add_model_request_handler(Self::handle_open_buffer_by_id);
321 client.add_model_request_handler(Self::handle_open_buffer_by_path);
322 client.add_model_request_handler(Self::handle_save_buffer);
323 }
324
325 pub fn local(
326 online: bool,
327 client: Arc<Client>,
328 user_store: ModelHandle<UserStore>,
329 project_store: ModelHandle<ProjectStore>,
330 languages: Arc<LanguageRegistry>,
331 fs: Arc<dyn Fs>,
332 cx: &mut MutableAppContext,
333 ) -> ModelHandle<Self> {
334 cx.add_model(|cx: &mut ModelContext<Self>| {
335 let (online_tx, online_rx) = watch::channel_with(online);
336 let (remote_id_tx, remote_id_rx) = watch::channel();
337 let _maintain_remote_id_task = cx.spawn_weak({
338 let status_rx = client.clone().status();
339 let online_rx = online_rx.clone();
340 move |this, mut cx| async move {
341 let mut stream = Stream::map(status_rx.clone(), drop)
342 .merge(Stream::map(online_rx.clone(), drop));
343 while stream.recv().await.is_some() {
344 let this = this.upgrade(&cx)?;
345 if status_rx.borrow().is_connected() && *online_rx.borrow() {
346 this.update(&mut cx, |this, cx| this.register(cx))
347 .await
348 .log_err()?;
349 } else {
350 this.update(&mut cx, |this, cx| this.unregister(cx))
351 .await
352 .log_err();
353 }
354 }
355 None
356 }
357 });
358
359 let handle = cx.weak_handle();
360 project_store.update(cx, |store, cx| store.add_project(handle, cx));
361
362 let (opened_buffer_tx, opened_buffer_rx) = watch::channel();
363 Self {
364 worktrees: Default::default(),
365 collaborators: Default::default(),
366 opened_buffers: Default::default(),
367 shared_buffers: Default::default(),
368 loading_buffers: Default::default(),
369 loading_local_worktrees: Default::default(),
370 buffer_snapshots: Default::default(),
371 client_state: ProjectClientState::Local {
372 is_shared: false,
373 remote_id_tx,
374 remote_id_rx,
375 online_tx,
376 online_rx,
377 _maintain_remote_id_task,
378 },
379 opened_buffer: (Rc::new(RefCell::new(opened_buffer_tx)), opened_buffer_rx),
380 subscriptions: Vec::new(),
381 active_entry: None,
382 languages,
383 client,
384 user_store,
385 project_store,
386 fs,
387 next_entry_id: Default::default(),
388 next_diagnostic_group_id: Default::default(),
389 language_servers: Default::default(),
390 started_language_servers: Default::default(),
391 language_server_statuses: Default::default(),
392 last_workspace_edits_by_language_server: Default::default(),
393 language_server_settings: Default::default(),
394 next_language_server_id: 0,
395 nonce: StdRng::from_entropy().gen(),
396 initialized_persistent_state: false,
397 }
398 })
399 }
400
401 pub async fn remote(
402 remote_id: u64,
403 client: Arc<Client>,
404 user_store: ModelHandle<UserStore>,
405 project_store: ModelHandle<ProjectStore>,
406 languages: Arc<LanguageRegistry>,
407 fs: Arc<dyn Fs>,
408 mut cx: AsyncAppContext,
409 ) -> Result<ModelHandle<Self>, JoinProjectError> {
410 client.authenticate_and_connect(true, &cx).await?;
411
412 let response = client
413 .request(proto::JoinProject {
414 project_id: remote_id,
415 })
416 .await?;
417
418 let response = match response.variant.ok_or_else(|| anyhow!("missing variant"))? {
419 proto::join_project_response::Variant::Accept(response) => response,
420 proto::join_project_response::Variant::Decline(decline) => {
421 match proto::join_project_response::decline::Reason::from_i32(decline.reason) {
422 Some(proto::join_project_response::decline::Reason::Declined) => {
423 Err(JoinProjectError::HostDeclined)?
424 }
425 Some(proto::join_project_response::decline::Reason::Closed) => {
426 Err(JoinProjectError::HostClosedProject)?
427 }
428 Some(proto::join_project_response::decline::Reason::WentOffline) => {
429 Err(JoinProjectError::HostWentOffline)?
430 }
431 None => Err(anyhow!("missing decline reason"))?,
432 }
433 }
434 };
435
436 let replica_id = response.replica_id as ReplicaId;
437
438 let mut worktrees = Vec::new();
439 for worktree in response.worktrees {
440 let (worktree, load_task) = cx
441 .update(|cx| Worktree::remote(remote_id, replica_id, worktree, client.clone(), cx));
442 worktrees.push(worktree);
443 load_task.detach();
444 }
445
446 let (opened_buffer_tx, opened_buffer_rx) = watch::channel();
447 let this = cx.add_model(|cx: &mut ModelContext<Self>| {
448 let handle = cx.weak_handle();
449 project_store.update(cx, |store, cx| store.add_project(handle, cx));
450
451 let mut this = Self {
452 worktrees: Vec::new(),
453 loading_buffers: Default::default(),
454 opened_buffer: (Rc::new(RefCell::new(opened_buffer_tx)), opened_buffer_rx),
455 shared_buffers: Default::default(),
456 loading_local_worktrees: Default::default(),
457 active_entry: None,
458 collaborators: Default::default(),
459 languages,
460 user_store: user_store.clone(),
461 project_store,
462 fs,
463 next_entry_id: Default::default(),
464 next_diagnostic_group_id: Default::default(),
465 subscriptions: vec![client.add_model_for_remote_entity(remote_id, cx)],
466 client: client.clone(),
467 client_state: ProjectClientState::Remote {
468 sharing_has_stopped: false,
469 remote_id,
470 replica_id,
471 _detect_unshare_task: cx.spawn_weak(move |this, mut cx| {
472 async move {
473 let mut status = client.status();
474 let is_connected =
475 status.next().await.map_or(false, |s| s.is_connected());
476 // Even if we're initially connected, any future change of the status means we momentarily disconnected.
477 if !is_connected || status.next().await.is_some() {
478 if let Some(this) = this.upgrade(&cx) {
479 this.update(&mut cx, |this, cx| this.removed_from_project(cx))
480 }
481 }
482 Ok(())
483 }
484 .log_err()
485 }),
486 },
487 language_servers: Default::default(),
488 started_language_servers: Default::default(),
489 language_server_settings: Default::default(),
490 language_server_statuses: response
491 .language_servers
492 .into_iter()
493 .map(|server| {
494 (
495 server.id as usize,
496 LanguageServerStatus {
497 name: server.name,
498 pending_work: Default::default(),
499 pending_diagnostic_updates: 0,
500 },
501 )
502 })
503 .collect(),
504 last_workspace_edits_by_language_server: Default::default(),
505 next_language_server_id: 0,
506 opened_buffers: Default::default(),
507 buffer_snapshots: Default::default(),
508 nonce: StdRng::from_entropy().gen(),
509 initialized_persistent_state: false,
510 };
511 for worktree in worktrees {
512 this.add_worktree(&worktree, cx);
513 }
514 this
515 });
516
517 let user_ids = response
518 .collaborators
519 .iter()
520 .map(|peer| peer.user_id)
521 .collect();
522 user_store
523 .update(&mut cx, |user_store, cx| user_store.get_users(user_ids, cx))
524 .await?;
525 let mut collaborators = HashMap::default();
526 for message in response.collaborators {
527 let collaborator = Collaborator::from_proto(message, &user_store, &mut cx).await?;
528 collaborators.insert(collaborator.peer_id, collaborator);
529 }
530
531 this.update(&mut cx, |this, _| {
532 this.collaborators = collaborators;
533 });
534
535 Ok(this)
536 }
537
538 #[cfg(any(test, feature = "test-support"))]
539 pub async fn test(
540 fs: Arc<dyn Fs>,
541 root_paths: impl IntoIterator<Item = &Path>,
542 cx: &mut gpui::TestAppContext,
543 ) -> ModelHandle<Project> {
544 let languages = Arc::new(LanguageRegistry::test());
545 let http_client = client::test::FakeHttpClient::with_404_response();
546 let client = client::Client::new(http_client.clone());
547 let user_store = cx.add_model(|cx| UserStore::new(client.clone(), http_client, cx));
548 let project_store = cx.add_model(|_| ProjectStore::new(Db::open_fake()));
549 let project = cx.update(|cx| {
550 Project::local(true, client, user_store, project_store, languages, fs, cx)
551 });
552 for path in root_paths {
553 let (tree, _) = project
554 .update(cx, |project, cx| {
555 project.find_or_create_local_worktree(path, true, cx)
556 })
557 .await
558 .unwrap();
559 tree.read_with(cx, |tree, _| tree.as_local().unwrap().scan_complete())
560 .await;
561 }
562 project
563 }
564
565 pub fn restore_state(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
566 if self.is_remote() {
567 return Task::ready(Ok(()));
568 }
569
570 let db = self.project_store.read(cx).db.clone();
571 let keys = self.db_keys_for_online_state(cx);
572 let online_by_default = cx.global::<Settings>().projects_online_by_default;
573 let read_online = cx.background().spawn(async move {
574 let values = db.read(keys)?;
575 anyhow::Ok(
576 values
577 .into_iter()
578 .all(|e| e.map_or(online_by_default, |e| e == [true as u8])),
579 )
580 });
581 cx.spawn(|this, mut cx| async move {
582 let online = read_online.await.log_err().unwrap_or(false);
583 this.update(&mut cx, |this, cx| {
584 this.initialized_persistent_state = true;
585 if let ProjectClientState::Local { online_tx, .. } = &mut this.client_state {
586 let mut online_tx = online_tx.borrow_mut();
587 if *online_tx != online {
588 *online_tx = online;
589 drop(online_tx);
590 this.metadata_changed(false, cx);
591 }
592 }
593 });
594 Ok(())
595 })
596 }
597
598 fn persist_state(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
599 if self.is_remote() || !self.initialized_persistent_state {
600 return Task::ready(Ok(()));
601 }
602
603 let db = self.project_store.read(cx).db.clone();
604 let keys = self.db_keys_for_online_state(cx);
605 let is_online = self.is_online();
606 cx.background().spawn(async move {
607 let value = &[is_online as u8];
608 db.write(keys.into_iter().map(|key| (key, value)))
609 })
610 }
611
612 pub fn buffer_for_id(&self, remote_id: u64, cx: &AppContext) -> Option<ModelHandle<Buffer>> {
613 self.opened_buffers
614 .get(&remote_id)
615 .and_then(|buffer| buffer.upgrade(cx))
616 }
617
618 pub fn languages(&self) -> &Arc<LanguageRegistry> {
619 &self.languages
620 }
621
622 pub fn client(&self) -> Arc<Client> {
623 self.client.clone()
624 }
625
626 pub fn user_store(&self) -> ModelHandle<UserStore> {
627 self.user_store.clone()
628 }
629
630 pub fn project_store(&self) -> ModelHandle<ProjectStore> {
631 self.project_store.clone()
632 }
633
634 #[cfg(any(test, feature = "test-support"))]
635 pub fn check_invariants(&self, cx: &AppContext) {
636 if self.is_local() {
637 let mut worktree_root_paths = HashMap::default();
638 for worktree in self.worktrees(cx) {
639 let worktree = worktree.read(cx);
640 let abs_path = worktree.as_local().unwrap().abs_path().clone();
641 let prev_worktree_id = worktree_root_paths.insert(abs_path.clone(), worktree.id());
642 assert_eq!(
643 prev_worktree_id,
644 None,
645 "abs path {:?} for worktree {:?} is not unique ({:?} was already registered with the same path)",
646 abs_path,
647 worktree.id(),
648 prev_worktree_id
649 )
650 }
651 } else {
652 let replica_id = self.replica_id();
653 for buffer in self.opened_buffers.values() {
654 if let Some(buffer) = buffer.upgrade(cx) {
655 let buffer = buffer.read(cx);
656 assert_eq!(
657 buffer.deferred_ops_len(),
658 0,
659 "replica {}, buffer {} has deferred operations",
660 replica_id,
661 buffer.remote_id()
662 );
663 }
664 }
665 }
666 }
667
668 #[cfg(any(test, feature = "test-support"))]
669 pub fn has_open_buffer(&self, path: impl Into<ProjectPath>, cx: &AppContext) -> bool {
670 let path = path.into();
671 if let Some(worktree) = self.worktree_for_id(path.worktree_id, cx) {
672 self.opened_buffers.iter().any(|(_, buffer)| {
673 if let Some(buffer) = buffer.upgrade(cx) {
674 if let Some(file) = File::from_dyn(buffer.read(cx).file()) {
675 if file.worktree == worktree && file.path() == &path.path {
676 return true;
677 }
678 }
679 }
680 false
681 })
682 } else {
683 false
684 }
685 }
686
687 pub fn fs(&self) -> &Arc<dyn Fs> {
688 &self.fs
689 }
690
691 pub fn set_online(&mut self, online: bool, cx: &mut ModelContext<Self>) {
692 if let ProjectClientState::Local { online_tx, .. } = &mut self.client_state {
693 let mut online_tx = online_tx.borrow_mut();
694 if *online_tx != online {
695 *online_tx = online;
696 drop(online_tx);
697 self.metadata_changed(true, cx);
698 }
699 }
700 }
701
702 pub fn is_online(&self) -> bool {
703 match &self.client_state {
704 ProjectClientState::Local { online_rx, .. } => *online_rx.borrow(),
705 ProjectClientState::Remote { .. } => true,
706 }
707 }
708
709 fn unregister(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
710 self.unshared(cx);
711 if let ProjectClientState::Local { remote_id_rx, .. } = &mut self.client_state {
712 if let Some(remote_id) = *remote_id_rx.borrow() {
713 let request = self.client.request(proto::UnregisterProject {
714 project_id: remote_id,
715 });
716 return cx.spawn(|this, mut cx| async move {
717 let response = request.await;
718
719 // Unregistering the project causes the server to send out a
720 // contact update removing this project from the host's list
721 // of online projects. Wait until this contact update has been
722 // processed before clearing out this project's remote id, so
723 // that there is no moment where this project appears in the
724 // contact metadata and *also* has no remote id.
725 this.update(&mut cx, |this, cx| {
726 this.user_store()
727 .update(cx, |store, _| store.contact_updates_done())
728 })
729 .await;
730
731 this.update(&mut cx, |this, cx| {
732 if let ProjectClientState::Local { remote_id_tx, .. } =
733 &mut this.client_state
734 {
735 *remote_id_tx.borrow_mut() = None;
736 }
737 this.subscriptions.clear();
738 this.metadata_changed(false, cx);
739 });
740 response.map(drop)
741 });
742 }
743 }
744 Task::ready(Ok(()))
745 }
746
747 fn register(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
748 if let ProjectClientState::Local { remote_id_rx, .. } = &self.client_state {
749 if remote_id_rx.borrow().is_some() {
750 return Task::ready(Ok(()));
751 }
752 }
753
754 let response = self.client.request(proto::RegisterProject {});
755 cx.spawn(|this, mut cx| async move {
756 let remote_id = response.await?.project_id;
757 this.update(&mut cx, |this, cx| {
758 if let ProjectClientState::Local { remote_id_tx, .. } = &mut this.client_state {
759 *remote_id_tx.borrow_mut() = Some(remote_id);
760 }
761
762 this.metadata_changed(false, cx);
763 cx.emit(Event::RemoteIdChanged(Some(remote_id)));
764 this.subscriptions
765 .push(this.client.add_model_for_remote_entity(remote_id, cx));
766 Ok(())
767 })
768 })
769 }
770
771 pub fn remote_id(&self) -> Option<u64> {
772 match &self.client_state {
773 ProjectClientState::Local { remote_id_rx, .. } => *remote_id_rx.borrow(),
774 ProjectClientState::Remote { remote_id, .. } => Some(*remote_id),
775 }
776 }
777
778 pub fn next_remote_id(&self) -> impl Future<Output = u64> {
779 let mut id = None;
780 let mut watch = None;
781 match &self.client_state {
782 ProjectClientState::Local { remote_id_rx, .. } => watch = Some(remote_id_rx.clone()),
783 ProjectClientState::Remote { remote_id, .. } => id = Some(*remote_id),
784 }
785
786 async move {
787 if let Some(id) = id {
788 return id;
789 }
790 let mut watch = watch.unwrap();
791 loop {
792 let id = *watch.borrow();
793 if let Some(id) = id {
794 return id;
795 }
796 watch.next().await;
797 }
798 }
799 }
800
801 pub fn shared_remote_id(&self) -> Option<u64> {
802 match &self.client_state {
803 ProjectClientState::Local {
804 remote_id_rx,
805 is_shared,
806 ..
807 } => {
808 if *is_shared {
809 *remote_id_rx.borrow()
810 } else {
811 None
812 }
813 }
814 ProjectClientState::Remote { remote_id, .. } => Some(*remote_id),
815 }
816 }
817
818 pub fn replica_id(&self) -> ReplicaId {
819 match &self.client_state {
820 ProjectClientState::Local { .. } => 0,
821 ProjectClientState::Remote { replica_id, .. } => *replica_id,
822 }
823 }
824
825 fn metadata_changed(&mut self, persist: bool, cx: &mut ModelContext<Self>) {
826 if let ProjectClientState::Local {
827 remote_id_rx,
828 online_rx,
829 ..
830 } = &self.client_state
831 {
832 if let (Some(project_id), true) = (*remote_id_rx.borrow(), *online_rx.borrow()) {
833 self.client
834 .send(proto::UpdateProject {
835 project_id,
836 worktrees: self
837 .worktrees
838 .iter()
839 .filter_map(|worktree| {
840 worktree.upgrade(&cx).map(|worktree| {
841 worktree.read(cx).as_local().unwrap().metadata_proto()
842 })
843 })
844 .collect(),
845 })
846 .log_err();
847 }
848
849 self.project_store.update(cx, |_, cx| cx.notify());
850 if persist {
851 self.persist_state(cx).detach_and_log_err(cx);
852 }
853 cx.notify();
854 }
855 }
856
857 pub fn collaborators(&self) -> &HashMap<PeerId, Collaborator> {
858 &self.collaborators
859 }
860
861 pub fn worktrees<'a>(
862 &'a self,
863 cx: &'a AppContext,
864 ) -> impl 'a + Iterator<Item = ModelHandle<Worktree>> {
865 self.worktrees
866 .iter()
867 .filter_map(move |worktree| worktree.upgrade(cx))
868 }
869
870 pub fn visible_worktrees<'a>(
871 &'a self,
872 cx: &'a AppContext,
873 ) -> impl 'a + Iterator<Item = ModelHandle<Worktree>> {
874 self.worktrees.iter().filter_map(|worktree| {
875 worktree.upgrade(cx).and_then(|worktree| {
876 if worktree.read(cx).is_visible() {
877 Some(worktree)
878 } else {
879 None
880 }
881 })
882 })
883 }
884
885 pub fn worktree_root_names<'a>(&'a self, cx: &'a AppContext) -> impl Iterator<Item = &'a str> {
886 self.visible_worktrees(cx)
887 .map(|tree| tree.read(cx).root_name())
888 }
889
890 fn db_keys_for_online_state(&self, cx: &AppContext) -> Vec<String> {
891 self.worktrees
892 .iter()
893 .filter_map(|worktree| {
894 let worktree = worktree.upgrade(&cx)?.read(cx);
895 if worktree.is_visible() {
896 Some(format!(
897 "project-path-online:{}",
898 worktree.as_local().unwrap().abs_path().to_string_lossy()
899 ))
900 } else {
901 None
902 }
903 })
904 .collect::<Vec<_>>()
905 }
906
907 pub fn worktree_for_id(
908 &self,
909 id: WorktreeId,
910 cx: &AppContext,
911 ) -> Option<ModelHandle<Worktree>> {
912 self.worktrees(cx)
913 .find(|worktree| worktree.read(cx).id() == id)
914 }
915
916 pub fn worktree_for_entry(
917 &self,
918 entry_id: ProjectEntryId,
919 cx: &AppContext,
920 ) -> Option<ModelHandle<Worktree>> {
921 self.worktrees(cx)
922 .find(|worktree| worktree.read(cx).contains_entry(entry_id))
923 }
924
925 pub fn worktree_id_for_entry(
926 &self,
927 entry_id: ProjectEntryId,
928 cx: &AppContext,
929 ) -> Option<WorktreeId> {
930 self.worktree_for_entry(entry_id, cx)
931 .map(|worktree| worktree.read(cx).id())
932 }
933
934 pub fn contains_paths(&self, paths: &[PathBuf], cx: &AppContext) -> bool {
935 paths.iter().all(|path| self.contains_path(&path, cx))
936 }
937
938 pub fn contains_path(&self, path: &Path, cx: &AppContext) -> bool {
939 for worktree in self.worktrees(cx) {
940 let worktree = worktree.read(cx).as_local();
941 if worktree.map_or(false, |w| w.contains_abs_path(path)) {
942 return true;
943 }
944 }
945 false
946 }
947
948 pub fn create_entry(
949 &mut self,
950 project_path: impl Into<ProjectPath>,
951 is_directory: bool,
952 cx: &mut ModelContext<Self>,
953 ) -> Option<Task<Result<Entry>>> {
954 let project_path = project_path.into();
955 let worktree = self.worktree_for_id(project_path.worktree_id, cx)?;
956 if self.is_local() {
957 Some(worktree.update(cx, |worktree, cx| {
958 worktree
959 .as_local_mut()
960 .unwrap()
961 .create_entry(project_path.path, is_directory, cx)
962 }))
963 } else {
964 let client = self.client.clone();
965 let project_id = self.remote_id().unwrap();
966 Some(cx.spawn_weak(|_, mut cx| async move {
967 let response = client
968 .request(proto::CreateProjectEntry {
969 worktree_id: project_path.worktree_id.to_proto(),
970 project_id,
971 path: project_path.path.as_os_str().as_bytes().to_vec(),
972 is_directory,
973 })
974 .await?;
975 let entry = response
976 .entry
977 .ok_or_else(|| anyhow!("missing entry in response"))?;
978 worktree
979 .update(&mut cx, |worktree, cx| {
980 worktree.as_remote().unwrap().insert_entry(
981 entry,
982 response.worktree_scan_id as usize,
983 cx,
984 )
985 })
986 .await
987 }))
988 }
989 }
990
991 pub fn copy_entry(
992 &mut self,
993 entry_id: ProjectEntryId,
994 new_path: impl Into<Arc<Path>>,
995 cx: &mut ModelContext<Self>,
996 ) -> Option<Task<Result<Entry>>> {
997 let worktree = self.worktree_for_entry(entry_id, cx)?;
998 let new_path = new_path.into();
999 if self.is_local() {
1000 worktree.update(cx, |worktree, cx| {
1001 worktree
1002 .as_local_mut()
1003 .unwrap()
1004 .copy_entry(entry_id, new_path, cx)
1005 })
1006 } else {
1007 let client = self.client.clone();
1008 let project_id = self.remote_id().unwrap();
1009
1010 Some(cx.spawn_weak(|_, mut cx| async move {
1011 let response = client
1012 .request(proto::CopyProjectEntry {
1013 project_id,
1014 entry_id: entry_id.to_proto(),
1015 new_path: new_path.as_os_str().as_bytes().to_vec(),
1016 })
1017 .await?;
1018 let entry = response
1019 .entry
1020 .ok_or_else(|| anyhow!("missing entry in response"))?;
1021 worktree
1022 .update(&mut cx, |worktree, cx| {
1023 worktree.as_remote().unwrap().insert_entry(
1024 entry,
1025 response.worktree_scan_id as usize,
1026 cx,
1027 )
1028 })
1029 .await
1030 }))
1031 }
1032 }
1033
1034 pub fn rename_entry(
1035 &mut self,
1036 entry_id: ProjectEntryId,
1037 new_path: impl Into<Arc<Path>>,
1038 cx: &mut ModelContext<Self>,
1039 ) -> Option<Task<Result<Entry>>> {
1040 let worktree = self.worktree_for_entry(entry_id, cx)?;
1041 let new_path = new_path.into();
1042 if self.is_local() {
1043 worktree.update(cx, |worktree, cx| {
1044 worktree
1045 .as_local_mut()
1046 .unwrap()
1047 .rename_entry(entry_id, new_path, cx)
1048 })
1049 } else {
1050 let client = self.client.clone();
1051 let project_id = self.remote_id().unwrap();
1052
1053 Some(cx.spawn_weak(|_, mut cx| async move {
1054 let response = client
1055 .request(proto::RenameProjectEntry {
1056 project_id,
1057 entry_id: entry_id.to_proto(),
1058 new_path: new_path.as_os_str().as_bytes().to_vec(),
1059 })
1060 .await?;
1061 let entry = response
1062 .entry
1063 .ok_or_else(|| anyhow!("missing entry in response"))?;
1064 worktree
1065 .update(&mut cx, |worktree, cx| {
1066 worktree.as_remote().unwrap().insert_entry(
1067 entry,
1068 response.worktree_scan_id as usize,
1069 cx,
1070 )
1071 })
1072 .await
1073 }))
1074 }
1075 }
1076
1077 pub fn delete_entry(
1078 &mut self,
1079 entry_id: ProjectEntryId,
1080 cx: &mut ModelContext<Self>,
1081 ) -> Option<Task<Result<()>>> {
1082 let worktree = self.worktree_for_entry(entry_id, cx)?;
1083 if self.is_local() {
1084 worktree.update(cx, |worktree, cx| {
1085 worktree.as_local_mut().unwrap().delete_entry(entry_id, cx)
1086 })
1087 } else {
1088 let client = self.client.clone();
1089 let project_id = self.remote_id().unwrap();
1090 Some(cx.spawn_weak(|_, mut cx| async move {
1091 let response = client
1092 .request(proto::DeleteProjectEntry {
1093 project_id,
1094 entry_id: entry_id.to_proto(),
1095 })
1096 .await?;
1097 worktree
1098 .update(&mut cx, move |worktree, cx| {
1099 worktree.as_remote().unwrap().delete_entry(
1100 entry_id,
1101 response.worktree_scan_id as usize,
1102 cx,
1103 )
1104 })
1105 .await
1106 }))
1107 }
1108 }
1109
1110 fn share(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
1111 let project_id;
1112 if let ProjectClientState::Local {
1113 remote_id_rx,
1114 is_shared,
1115 ..
1116 } = &mut self.client_state
1117 {
1118 if *is_shared {
1119 return Task::ready(Ok(()));
1120 }
1121 *is_shared = true;
1122 if let Some(id) = *remote_id_rx.borrow() {
1123 project_id = id;
1124 } else {
1125 return Task::ready(Err(anyhow!("project hasn't been registered")));
1126 }
1127 } else {
1128 return Task::ready(Err(anyhow!("can't share a remote project")));
1129 };
1130
1131 for open_buffer in self.opened_buffers.values_mut() {
1132 match open_buffer {
1133 OpenBuffer::Strong(_) => {}
1134 OpenBuffer::Weak(buffer) => {
1135 if let Some(buffer) = buffer.upgrade(cx) {
1136 *open_buffer = OpenBuffer::Strong(buffer);
1137 }
1138 }
1139 OpenBuffer::Loading(_) => unreachable!(),
1140 }
1141 }
1142
1143 for worktree_handle in self.worktrees.iter_mut() {
1144 match worktree_handle {
1145 WorktreeHandle::Strong(_) => {}
1146 WorktreeHandle::Weak(worktree) => {
1147 if let Some(worktree) = worktree.upgrade(cx) {
1148 *worktree_handle = WorktreeHandle::Strong(worktree);
1149 }
1150 }
1151 }
1152 }
1153
1154 let mut tasks = Vec::new();
1155 for worktree in self.worktrees(cx).collect::<Vec<_>>() {
1156 worktree.update(cx, |worktree, cx| {
1157 let worktree = worktree.as_local_mut().unwrap();
1158 tasks.push(worktree.share(project_id, cx));
1159 });
1160 }
1161
1162 cx.spawn(|this, mut cx| async move {
1163 for task in tasks {
1164 task.await?;
1165 }
1166 this.update(&mut cx, |_, cx| cx.notify());
1167 Ok(())
1168 })
1169 }
1170
1171 fn unshared(&mut self, cx: &mut ModelContext<Self>) {
1172 if let ProjectClientState::Local { is_shared, .. } = &mut self.client_state {
1173 if !*is_shared {
1174 return;
1175 }
1176
1177 *is_shared = false;
1178 self.collaborators.clear();
1179 self.shared_buffers.clear();
1180 for worktree_handle in self.worktrees.iter_mut() {
1181 if let WorktreeHandle::Strong(worktree) = worktree_handle {
1182 let is_visible = worktree.update(cx, |worktree, _| {
1183 worktree.as_local_mut().unwrap().unshare();
1184 worktree.is_visible()
1185 });
1186 if !is_visible {
1187 *worktree_handle = WorktreeHandle::Weak(worktree.downgrade());
1188 }
1189 }
1190 }
1191
1192 for open_buffer in self.opened_buffers.values_mut() {
1193 match open_buffer {
1194 OpenBuffer::Strong(buffer) => {
1195 *open_buffer = OpenBuffer::Weak(buffer.downgrade());
1196 }
1197 _ => {}
1198 }
1199 }
1200
1201 cx.notify();
1202 } else {
1203 log::error!("attempted to unshare a remote project");
1204 }
1205 }
1206
1207 pub fn respond_to_join_request(
1208 &mut self,
1209 requester_id: u64,
1210 allow: bool,
1211 cx: &mut ModelContext<Self>,
1212 ) {
1213 if let Some(project_id) = self.remote_id() {
1214 let share = self.share(cx);
1215 let client = self.client.clone();
1216 cx.foreground()
1217 .spawn(async move {
1218 share.await?;
1219 client.send(proto::RespondToJoinProjectRequest {
1220 requester_id,
1221 project_id,
1222 allow,
1223 })
1224 })
1225 .detach_and_log_err(cx);
1226 }
1227 }
1228
1229 fn removed_from_project(&mut self, cx: &mut ModelContext<Self>) {
1230 if let ProjectClientState::Remote {
1231 sharing_has_stopped,
1232 ..
1233 } = &mut self.client_state
1234 {
1235 *sharing_has_stopped = true;
1236 self.collaborators.clear();
1237 cx.notify();
1238 }
1239 }
1240
1241 pub fn is_read_only(&self) -> bool {
1242 match &self.client_state {
1243 ProjectClientState::Local { .. } => false,
1244 ProjectClientState::Remote {
1245 sharing_has_stopped,
1246 ..
1247 } => *sharing_has_stopped,
1248 }
1249 }
1250
1251 pub fn is_local(&self) -> bool {
1252 match &self.client_state {
1253 ProjectClientState::Local { .. } => true,
1254 ProjectClientState::Remote { .. } => false,
1255 }
1256 }
1257
1258 pub fn is_remote(&self) -> bool {
1259 !self.is_local()
1260 }
1261
1262 pub fn create_buffer(
1263 &mut self,
1264 text: &str,
1265 language: Option<Arc<Language>>,
1266 cx: &mut ModelContext<Self>,
1267 ) -> Result<ModelHandle<Buffer>> {
1268 if self.is_remote() {
1269 return Err(anyhow!("creating buffers as a guest is not supported yet"));
1270 }
1271
1272 let buffer = cx.add_model(|cx| {
1273 Buffer::new(self.replica_id(), text, cx)
1274 .with_language(language.unwrap_or(language::PLAIN_TEXT.clone()), cx)
1275 });
1276 self.register_buffer(&buffer, cx)?;
1277 Ok(buffer)
1278 }
1279
1280 pub fn open_path(
1281 &mut self,
1282 path: impl Into<ProjectPath>,
1283 cx: &mut ModelContext<Self>,
1284 ) -> Task<Result<(ProjectEntryId, AnyModelHandle)>> {
1285 let task = self.open_buffer(path, cx);
1286 cx.spawn_weak(|_, cx| async move {
1287 let buffer = task.await?;
1288 let project_entry_id = buffer
1289 .read_with(&cx, |buffer, cx| {
1290 File::from_dyn(buffer.file()).and_then(|file| file.project_entry_id(cx))
1291 })
1292 .ok_or_else(|| anyhow!("no project entry"))?;
1293 Ok((project_entry_id, buffer.into()))
1294 })
1295 }
1296
1297 pub fn open_local_buffer(
1298 &mut self,
1299 abs_path: impl AsRef<Path>,
1300 cx: &mut ModelContext<Self>,
1301 ) -> Task<Result<ModelHandle<Buffer>>> {
1302 if let Some((worktree, relative_path)) = self.find_local_worktree(abs_path.as_ref(), cx) {
1303 self.open_buffer((worktree.read(cx).id(), relative_path), cx)
1304 } else {
1305 Task::ready(Err(anyhow!("no such path")))
1306 }
1307 }
1308
1309 pub fn open_buffer(
1310 &mut self,
1311 path: impl Into<ProjectPath>,
1312 cx: &mut ModelContext<Self>,
1313 ) -> Task<Result<ModelHandle<Buffer>>> {
1314 let project_path = path.into();
1315 let worktree = if let Some(worktree) = self.worktree_for_id(project_path.worktree_id, cx) {
1316 worktree
1317 } else {
1318 return Task::ready(Err(anyhow!("no such worktree")));
1319 };
1320
1321 // If there is already a buffer for the given path, then return it.
1322 let existing_buffer = self.get_open_buffer(&project_path, cx);
1323 if let Some(existing_buffer) = existing_buffer {
1324 return Task::ready(Ok(existing_buffer));
1325 }
1326
1327 let mut loading_watch = match self.loading_buffers.entry(project_path.clone()) {
1328 // If the given path is already being loaded, then wait for that existing
1329 // task to complete and return the same buffer.
1330 hash_map::Entry::Occupied(e) => e.get().clone(),
1331
1332 // Otherwise, record the fact that this path is now being loaded.
1333 hash_map::Entry::Vacant(entry) => {
1334 let (mut tx, rx) = postage::watch::channel();
1335 entry.insert(rx.clone());
1336
1337 let load_buffer = if worktree.read(cx).is_local() {
1338 self.open_local_buffer_internal(&project_path.path, &worktree, cx)
1339 } else {
1340 self.open_remote_buffer_internal(&project_path.path, &worktree, cx)
1341 };
1342
1343 cx.spawn(move |this, mut cx| async move {
1344 let load_result = load_buffer.await;
1345 *tx.borrow_mut() = Some(this.update(&mut cx, |this, _| {
1346 // Record the fact that the buffer is no longer loading.
1347 this.loading_buffers.remove(&project_path);
1348 let buffer = load_result.map_err(Arc::new)?;
1349 Ok(buffer)
1350 }));
1351 })
1352 .detach();
1353 rx
1354 }
1355 };
1356
1357 cx.foreground().spawn(async move {
1358 loop {
1359 if let Some(result) = loading_watch.borrow().as_ref() {
1360 match result {
1361 Ok(buffer) => return Ok(buffer.clone()),
1362 Err(error) => return Err(anyhow!("{}", error)),
1363 }
1364 }
1365 loading_watch.next().await;
1366 }
1367 })
1368 }
1369
1370 fn open_local_buffer_internal(
1371 &mut self,
1372 path: &Arc<Path>,
1373 worktree: &ModelHandle<Worktree>,
1374 cx: &mut ModelContext<Self>,
1375 ) -> Task<Result<ModelHandle<Buffer>>> {
1376 let load_buffer = worktree.update(cx, |worktree, cx| {
1377 let worktree = worktree.as_local_mut().unwrap();
1378 worktree.load_buffer(path, cx)
1379 });
1380 cx.spawn(|this, mut cx| async move {
1381 let buffer = load_buffer.await?;
1382 this.update(&mut cx, |this, cx| this.register_buffer(&buffer, cx))?;
1383 Ok(buffer)
1384 })
1385 }
1386
1387 fn open_remote_buffer_internal(
1388 &mut self,
1389 path: &Arc<Path>,
1390 worktree: &ModelHandle<Worktree>,
1391 cx: &mut ModelContext<Self>,
1392 ) -> Task<Result<ModelHandle<Buffer>>> {
1393 let rpc = self.client.clone();
1394 let project_id = self.remote_id().unwrap();
1395 let remote_worktree_id = worktree.read(cx).id();
1396 let path = path.clone();
1397 let path_string = path.to_string_lossy().to_string();
1398 cx.spawn(|this, mut cx| async move {
1399 let response = rpc
1400 .request(proto::OpenBufferByPath {
1401 project_id,
1402 worktree_id: remote_worktree_id.to_proto(),
1403 path: path_string,
1404 })
1405 .await?;
1406 let buffer = response.buffer.ok_or_else(|| anyhow!("missing buffer"))?;
1407 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
1408 .await
1409 })
1410 }
1411
1412 fn open_local_buffer_via_lsp(
1413 &mut self,
1414 abs_path: lsp::Url,
1415 lsp_adapter: Arc<dyn LspAdapter>,
1416 lsp_server: Arc<LanguageServer>,
1417 cx: &mut ModelContext<Self>,
1418 ) -> Task<Result<ModelHandle<Buffer>>> {
1419 cx.spawn(|this, mut cx| async move {
1420 let abs_path = abs_path
1421 .to_file_path()
1422 .map_err(|_| anyhow!("can't convert URI to path"))?;
1423 let (worktree, relative_path) = if let Some(result) =
1424 this.read_with(&cx, |this, cx| this.find_local_worktree(&abs_path, cx))
1425 {
1426 result
1427 } else {
1428 let worktree = this
1429 .update(&mut cx, |this, cx| {
1430 this.create_local_worktree(&abs_path, false, cx)
1431 })
1432 .await?;
1433 this.update(&mut cx, |this, cx| {
1434 this.language_servers.insert(
1435 (worktree.read(cx).id(), lsp_adapter.name()),
1436 (lsp_adapter, lsp_server),
1437 );
1438 });
1439 (worktree, PathBuf::new())
1440 };
1441
1442 let project_path = ProjectPath {
1443 worktree_id: worktree.read_with(&cx, |worktree, _| worktree.id()),
1444 path: relative_path.into(),
1445 };
1446 this.update(&mut cx, |this, cx| this.open_buffer(project_path, cx))
1447 .await
1448 })
1449 }
1450
1451 pub fn open_buffer_by_id(
1452 &mut self,
1453 id: u64,
1454 cx: &mut ModelContext<Self>,
1455 ) -> Task<Result<ModelHandle<Buffer>>> {
1456 if let Some(buffer) = self.buffer_for_id(id, cx) {
1457 Task::ready(Ok(buffer))
1458 } else if self.is_local() {
1459 Task::ready(Err(anyhow!("buffer {} does not exist", id)))
1460 } else if let Some(project_id) = self.remote_id() {
1461 let request = self
1462 .client
1463 .request(proto::OpenBufferById { project_id, id });
1464 cx.spawn(|this, mut cx| async move {
1465 let buffer = request
1466 .await?
1467 .buffer
1468 .ok_or_else(|| anyhow!("invalid buffer"))?;
1469 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
1470 .await
1471 })
1472 } else {
1473 Task::ready(Err(anyhow!("cannot open buffer while disconnected")))
1474 }
1475 }
1476
1477 pub fn save_buffer_as(
1478 &mut self,
1479 buffer: ModelHandle<Buffer>,
1480 abs_path: PathBuf,
1481 cx: &mut ModelContext<Project>,
1482 ) -> Task<Result<()>> {
1483 let worktree_task = self.find_or_create_local_worktree(&abs_path, true, cx);
1484 let old_path =
1485 File::from_dyn(buffer.read(cx).file()).and_then(|f| Some(f.as_local()?.abs_path(cx)));
1486 cx.spawn(|this, mut cx| async move {
1487 if let Some(old_path) = old_path {
1488 this.update(&mut cx, |this, cx| {
1489 this.unregister_buffer_from_language_server(&buffer, old_path, cx);
1490 });
1491 }
1492 let (worktree, path) = worktree_task.await?;
1493 worktree
1494 .update(&mut cx, |worktree, cx| {
1495 worktree
1496 .as_local_mut()
1497 .unwrap()
1498 .save_buffer_as(buffer.clone(), path, cx)
1499 })
1500 .await?;
1501 this.update(&mut cx, |this, cx| {
1502 this.assign_language_to_buffer(&buffer, cx);
1503 this.register_buffer_with_language_server(&buffer, cx);
1504 });
1505 Ok(())
1506 })
1507 }
1508
1509 pub fn get_open_buffer(
1510 &mut self,
1511 path: &ProjectPath,
1512 cx: &mut ModelContext<Self>,
1513 ) -> Option<ModelHandle<Buffer>> {
1514 let worktree = self.worktree_for_id(path.worktree_id, cx)?;
1515 self.opened_buffers.values().find_map(|buffer| {
1516 let buffer = buffer.upgrade(cx)?;
1517 let file = File::from_dyn(buffer.read(cx).file())?;
1518 if file.worktree == worktree && file.path() == &path.path {
1519 Some(buffer)
1520 } else {
1521 None
1522 }
1523 })
1524 }
1525
1526 fn register_buffer(
1527 &mut self,
1528 buffer: &ModelHandle<Buffer>,
1529 cx: &mut ModelContext<Self>,
1530 ) -> Result<()> {
1531 let remote_id = buffer.read(cx).remote_id();
1532 let open_buffer = if self.is_remote() || self.is_shared() {
1533 OpenBuffer::Strong(buffer.clone())
1534 } else {
1535 OpenBuffer::Weak(buffer.downgrade())
1536 };
1537
1538 match self.opened_buffers.insert(remote_id, open_buffer) {
1539 None => {}
1540 Some(OpenBuffer::Loading(operations)) => {
1541 buffer.update(cx, |buffer, cx| buffer.apply_ops(operations, cx))?
1542 }
1543 Some(OpenBuffer::Weak(existing_handle)) => {
1544 if existing_handle.upgrade(cx).is_some() {
1545 Err(anyhow!(
1546 "already registered buffer with remote id {}",
1547 remote_id
1548 ))?
1549 }
1550 }
1551 Some(OpenBuffer::Strong(_)) => Err(anyhow!(
1552 "already registered buffer with remote id {}",
1553 remote_id
1554 ))?,
1555 }
1556 cx.subscribe(buffer, |this, buffer, event, cx| {
1557 this.on_buffer_event(buffer, event, cx);
1558 })
1559 .detach();
1560
1561 self.assign_language_to_buffer(buffer, cx);
1562 self.register_buffer_with_language_server(buffer, cx);
1563 cx.observe_release(buffer, |this, buffer, cx| {
1564 if let Some(file) = File::from_dyn(buffer.file()) {
1565 if file.is_local() {
1566 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
1567 if let Some((_, server)) = this.language_server_for_buffer(buffer, cx) {
1568 server
1569 .notify::<lsp::notification::DidCloseTextDocument>(
1570 lsp::DidCloseTextDocumentParams {
1571 text_document: lsp::TextDocumentIdentifier::new(uri.clone()),
1572 },
1573 )
1574 .log_err();
1575 }
1576 }
1577 }
1578 })
1579 .detach();
1580
1581 Ok(())
1582 }
1583
1584 fn register_buffer_with_language_server(
1585 &mut self,
1586 buffer_handle: &ModelHandle<Buffer>,
1587 cx: &mut ModelContext<Self>,
1588 ) {
1589 let buffer = buffer_handle.read(cx);
1590 let buffer_id = buffer.remote_id();
1591 if let Some(file) = File::from_dyn(buffer.file()) {
1592 if file.is_local() {
1593 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
1594 let initial_snapshot = buffer.text_snapshot();
1595
1596 let mut language_server = None;
1597 let mut language_id = None;
1598 if let Some(language) = buffer.language() {
1599 let worktree_id = file.worktree_id(cx);
1600 if let Some(adapter) = language.lsp_adapter() {
1601 language_id = adapter.id_for_language(language.name().as_ref());
1602 language_server = self
1603 .language_servers
1604 .get(&(worktree_id, adapter.name()))
1605 .cloned();
1606 }
1607 }
1608
1609 if let Some(local_worktree) = file.worktree.read(cx).as_local() {
1610 if let Some(diagnostics) = local_worktree.diagnostics_for_path(file.path()) {
1611 self.update_buffer_diagnostics(&buffer_handle, diagnostics, None, cx)
1612 .log_err();
1613 }
1614 }
1615
1616 if let Some((_, server)) = language_server {
1617 server
1618 .notify::<lsp::notification::DidOpenTextDocument>(
1619 lsp::DidOpenTextDocumentParams {
1620 text_document: lsp::TextDocumentItem::new(
1621 uri,
1622 language_id.unwrap_or_default(),
1623 0,
1624 initial_snapshot.text(),
1625 ),
1626 }
1627 .clone(),
1628 )
1629 .log_err();
1630 buffer_handle.update(cx, |buffer, cx| {
1631 buffer.set_completion_triggers(
1632 server
1633 .capabilities()
1634 .completion_provider
1635 .as_ref()
1636 .and_then(|provider| provider.trigger_characters.clone())
1637 .unwrap_or(Vec::new()),
1638 cx,
1639 )
1640 });
1641 self.buffer_snapshots
1642 .insert(buffer_id, vec![(0, initial_snapshot)]);
1643 }
1644 }
1645 }
1646 }
1647
1648 fn unregister_buffer_from_language_server(
1649 &mut self,
1650 buffer: &ModelHandle<Buffer>,
1651 old_path: PathBuf,
1652 cx: &mut ModelContext<Self>,
1653 ) {
1654 buffer.update(cx, |buffer, cx| {
1655 buffer.update_diagnostics(Default::default(), cx);
1656 self.buffer_snapshots.remove(&buffer.remote_id());
1657 if let Some((_, language_server)) = self.language_server_for_buffer(buffer, cx) {
1658 language_server
1659 .notify::<lsp::notification::DidCloseTextDocument>(
1660 lsp::DidCloseTextDocumentParams {
1661 text_document: lsp::TextDocumentIdentifier::new(
1662 lsp::Url::from_file_path(old_path).unwrap(),
1663 ),
1664 },
1665 )
1666 .log_err();
1667 }
1668 });
1669 }
1670
1671 fn on_buffer_event(
1672 &mut self,
1673 buffer: ModelHandle<Buffer>,
1674 event: &BufferEvent,
1675 cx: &mut ModelContext<Self>,
1676 ) -> Option<()> {
1677 match event {
1678 BufferEvent::Operation(operation) => {
1679 if let Some(project_id) = self.shared_remote_id() {
1680 let request = self.client.request(proto::UpdateBuffer {
1681 project_id,
1682 buffer_id: buffer.read(cx).remote_id(),
1683 operations: vec![language::proto::serialize_operation(&operation)],
1684 });
1685 cx.background().spawn(request).detach_and_log_err(cx);
1686 }
1687 }
1688 BufferEvent::Edited { .. } => {
1689 let (_, language_server) = self
1690 .language_server_for_buffer(buffer.read(cx), cx)?
1691 .clone();
1692 let buffer = buffer.read(cx);
1693 let file = File::from_dyn(buffer.file())?;
1694 let abs_path = file.as_local()?.abs_path(cx);
1695 let uri = lsp::Url::from_file_path(abs_path).unwrap();
1696 let buffer_snapshots = self.buffer_snapshots.get_mut(&buffer.remote_id())?;
1697 let (version, prev_snapshot) = buffer_snapshots.last()?;
1698 let next_snapshot = buffer.text_snapshot();
1699 let next_version = version + 1;
1700
1701 let content_changes = buffer
1702 .edits_since::<(PointUtf16, usize)>(prev_snapshot.version())
1703 .map(|edit| {
1704 let edit_start = edit.new.start.0;
1705 let edit_end = edit_start + (edit.old.end.0 - edit.old.start.0);
1706 let new_text = next_snapshot
1707 .text_for_range(edit.new.start.1..edit.new.end.1)
1708 .collect();
1709 lsp::TextDocumentContentChangeEvent {
1710 range: Some(lsp::Range::new(
1711 point_to_lsp(edit_start),
1712 point_to_lsp(edit_end),
1713 )),
1714 range_length: None,
1715 text: new_text,
1716 }
1717 })
1718 .collect();
1719
1720 buffer_snapshots.push((next_version, next_snapshot));
1721
1722 language_server
1723 .notify::<lsp::notification::DidChangeTextDocument>(
1724 lsp::DidChangeTextDocumentParams {
1725 text_document: lsp::VersionedTextDocumentIdentifier::new(
1726 uri,
1727 next_version,
1728 ),
1729 content_changes,
1730 },
1731 )
1732 .log_err();
1733 }
1734 BufferEvent::Saved => {
1735 let file = File::from_dyn(buffer.read(cx).file())?;
1736 let worktree_id = file.worktree_id(cx);
1737 let abs_path = file.as_local()?.abs_path(cx);
1738 let text_document = lsp::TextDocumentIdentifier {
1739 uri: lsp::Url::from_file_path(abs_path).unwrap(),
1740 };
1741
1742 for (_, server) in self.language_servers_for_worktree(worktree_id) {
1743 server
1744 .notify::<lsp::notification::DidSaveTextDocument>(
1745 lsp::DidSaveTextDocumentParams {
1746 text_document: text_document.clone(),
1747 text: None,
1748 },
1749 )
1750 .log_err();
1751 }
1752 }
1753 _ => {}
1754 }
1755
1756 None
1757 }
1758
1759 fn language_servers_for_worktree(
1760 &self,
1761 worktree_id: WorktreeId,
1762 ) -> impl Iterator<Item = &(Arc<dyn LspAdapter>, Arc<LanguageServer>)> {
1763 self.language_servers.iter().filter_map(
1764 move |((language_server_worktree_id, _), server)| {
1765 if *language_server_worktree_id == worktree_id {
1766 Some(server)
1767 } else {
1768 None
1769 }
1770 },
1771 )
1772 }
1773
1774 fn assign_language_to_buffer(
1775 &mut self,
1776 buffer: &ModelHandle<Buffer>,
1777 cx: &mut ModelContext<Self>,
1778 ) -> Option<()> {
1779 // If the buffer has a language, set it and start the language server if we haven't already.
1780 let full_path = buffer.read(cx).file()?.full_path(cx);
1781 let language = self.languages.select_language(&full_path)?;
1782 buffer.update(cx, |buffer, cx| {
1783 buffer.set_language(Some(language.clone()), cx);
1784 });
1785
1786 let file = File::from_dyn(buffer.read(cx).file())?;
1787 let worktree = file.worktree.read(cx).as_local()?;
1788 let worktree_id = worktree.id();
1789 let worktree_abs_path = worktree.abs_path().clone();
1790 self.start_language_server(worktree_id, worktree_abs_path, language, cx);
1791
1792 None
1793 }
1794
1795 fn start_language_server(
1796 &mut self,
1797 worktree_id: WorktreeId,
1798 worktree_path: Arc<Path>,
1799 language: Arc<Language>,
1800 cx: &mut ModelContext<Self>,
1801 ) {
1802 let adapter = if let Some(adapter) = language.lsp_adapter() {
1803 adapter
1804 } else {
1805 return;
1806 };
1807 let key = (worktree_id, adapter.name());
1808 self.started_language_servers
1809 .entry(key.clone())
1810 .or_insert_with(|| {
1811 let server_id = post_inc(&mut self.next_language_server_id);
1812 let language_server = self.languages.start_language_server(
1813 server_id,
1814 language.clone(),
1815 worktree_path,
1816 self.client.http_client(),
1817 cx,
1818 );
1819 cx.spawn_weak(|this, mut cx| async move {
1820 let language_server = language_server?.await.log_err()?;
1821 let language_server = language_server
1822 .initialize(adapter.initialization_options())
1823 .await
1824 .log_err()?;
1825 let this = this.upgrade(&cx)?;
1826 let disk_based_diagnostics_progress_token =
1827 adapter.disk_based_diagnostics_progress_token();
1828
1829 language_server
1830 .on_notification::<lsp::notification::PublishDiagnostics, _>({
1831 let this = this.downgrade();
1832 let adapter = adapter.clone();
1833 move |params, mut cx| {
1834 if let Some(this) = this.upgrade(&cx) {
1835 this.update(&mut cx, |this, cx| {
1836 this.on_lsp_diagnostics_published(
1837 server_id,
1838 params,
1839 &adapter,
1840 disk_based_diagnostics_progress_token,
1841 cx,
1842 );
1843 });
1844 }
1845 }
1846 })
1847 .detach();
1848
1849 language_server
1850 .on_request::<lsp::request::WorkspaceConfiguration, _, _>({
1851 let settings = this
1852 .read_with(&cx, |this, _| this.language_server_settings.clone());
1853 move |params, _| {
1854 let settings = settings.lock().clone();
1855 async move {
1856 Ok(params
1857 .items
1858 .into_iter()
1859 .map(|item| {
1860 if let Some(section) = &item.section {
1861 settings
1862 .get(section)
1863 .cloned()
1864 .unwrap_or(serde_json::Value::Null)
1865 } else {
1866 settings.clone()
1867 }
1868 })
1869 .collect())
1870 }
1871 }
1872 })
1873 .detach();
1874
1875 language_server
1876 .on_request::<lsp::request::ApplyWorkspaceEdit, _, _>({
1877 let this = this.downgrade();
1878 let adapter = adapter.clone();
1879 let language_server = language_server.clone();
1880 move |params, cx| {
1881 Self::on_lsp_workspace_edit(
1882 this,
1883 params,
1884 server_id,
1885 adapter.clone(),
1886 language_server.clone(),
1887 cx,
1888 )
1889 }
1890 })
1891 .detach();
1892
1893 language_server
1894 .on_notification::<lsp::notification::Progress, _>({
1895 let this = this.downgrade();
1896 move |params, mut cx| {
1897 if let Some(this) = this.upgrade(&cx) {
1898 this.update(&mut cx, |this, cx| {
1899 this.on_lsp_progress(
1900 params,
1901 server_id,
1902 disk_based_diagnostics_progress_token,
1903 cx,
1904 );
1905 });
1906 }
1907 }
1908 })
1909 .detach();
1910
1911 this.update(&mut cx, |this, cx| {
1912 this.language_servers
1913 .insert(key.clone(), (adapter.clone(), language_server.clone()));
1914 this.language_server_statuses.insert(
1915 server_id,
1916 LanguageServerStatus {
1917 name: language_server.name().to_string(),
1918 pending_work: Default::default(),
1919 pending_diagnostic_updates: 0,
1920 },
1921 );
1922 language_server
1923 .notify::<lsp::notification::DidChangeConfiguration>(
1924 lsp::DidChangeConfigurationParams {
1925 settings: this.language_server_settings.lock().clone(),
1926 },
1927 )
1928 .ok();
1929
1930 if let Some(project_id) = this.shared_remote_id() {
1931 this.client
1932 .send(proto::StartLanguageServer {
1933 project_id,
1934 server: Some(proto::LanguageServer {
1935 id: server_id as u64,
1936 name: language_server.name().to_string(),
1937 }),
1938 })
1939 .log_err();
1940 }
1941
1942 // Tell the language server about every open buffer in the worktree that matches the language.
1943 for buffer in this.opened_buffers.values() {
1944 if let Some(buffer_handle) = buffer.upgrade(cx) {
1945 let buffer = buffer_handle.read(cx);
1946 let file = if let Some(file) = File::from_dyn(buffer.file()) {
1947 file
1948 } else {
1949 continue;
1950 };
1951 let language = if let Some(language) = buffer.language() {
1952 language
1953 } else {
1954 continue;
1955 };
1956 if file.worktree.read(cx).id() != key.0
1957 || language.lsp_adapter().map(|a| a.name())
1958 != Some(key.1.clone())
1959 {
1960 continue;
1961 }
1962
1963 let file = file.as_local()?;
1964 let versions = this
1965 .buffer_snapshots
1966 .entry(buffer.remote_id())
1967 .or_insert_with(|| vec![(0, buffer.text_snapshot())]);
1968 let (version, initial_snapshot) = versions.last().unwrap();
1969 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
1970 let language_id = adapter.id_for_language(language.name().as_ref());
1971 language_server
1972 .notify::<lsp::notification::DidOpenTextDocument>(
1973 lsp::DidOpenTextDocumentParams {
1974 text_document: lsp::TextDocumentItem::new(
1975 uri,
1976 language_id.unwrap_or_default(),
1977 *version,
1978 initial_snapshot.text(),
1979 ),
1980 },
1981 )
1982 .log_err()?;
1983 buffer_handle.update(cx, |buffer, cx| {
1984 buffer.set_completion_triggers(
1985 language_server
1986 .capabilities()
1987 .completion_provider
1988 .as_ref()
1989 .and_then(|provider| {
1990 provider.trigger_characters.clone()
1991 })
1992 .unwrap_or(Vec::new()),
1993 cx,
1994 )
1995 });
1996 }
1997 }
1998
1999 cx.notify();
2000 Some(())
2001 });
2002
2003 Some(language_server)
2004 })
2005 });
2006 }
2007
2008 pub fn restart_language_servers_for_buffers(
2009 &mut self,
2010 buffers: impl IntoIterator<Item = ModelHandle<Buffer>>,
2011 cx: &mut ModelContext<Self>,
2012 ) -> Option<()> {
2013 let language_server_lookup_info: HashSet<(WorktreeId, Arc<Path>, PathBuf)> = buffers
2014 .into_iter()
2015 .filter_map(|buffer| {
2016 let file = File::from_dyn(buffer.read(cx).file())?;
2017 let worktree = file.worktree.read(cx).as_local()?;
2018 let worktree_id = worktree.id();
2019 let worktree_abs_path = worktree.abs_path().clone();
2020 let full_path = file.full_path(cx);
2021 Some((worktree_id, worktree_abs_path, full_path))
2022 })
2023 .collect();
2024 for (worktree_id, worktree_abs_path, full_path) in language_server_lookup_info {
2025 let language = self.languages.select_language(&full_path)?;
2026 self.restart_language_server(worktree_id, worktree_abs_path, language, cx);
2027 }
2028
2029 None
2030 }
2031
2032 fn restart_language_server(
2033 &mut self,
2034 worktree_id: WorktreeId,
2035 worktree_path: Arc<Path>,
2036 language: Arc<Language>,
2037 cx: &mut ModelContext<Self>,
2038 ) {
2039 let adapter = if let Some(adapter) = language.lsp_adapter() {
2040 adapter
2041 } else {
2042 return;
2043 };
2044 let key = (worktree_id, adapter.name());
2045 let server_to_shutdown = self.language_servers.remove(&key);
2046 self.started_language_servers.remove(&key);
2047 server_to_shutdown
2048 .as_ref()
2049 .map(|(_, server)| self.language_server_statuses.remove(&server.server_id()));
2050 cx.spawn_weak(|this, mut cx| async move {
2051 if let Some(this) = this.upgrade(&cx) {
2052 if let Some((_, server_to_shutdown)) = server_to_shutdown {
2053 if let Some(shutdown_task) = server_to_shutdown.shutdown() {
2054 shutdown_task.await;
2055 }
2056 }
2057
2058 this.update(&mut cx, |this, cx| {
2059 this.start_language_server(worktree_id, worktree_path, language, cx);
2060 });
2061 }
2062 })
2063 .detach();
2064 }
2065
2066 fn on_lsp_diagnostics_published(
2067 &mut self,
2068 server_id: usize,
2069 mut params: lsp::PublishDiagnosticsParams,
2070 adapter: &Arc<dyn LspAdapter>,
2071 disk_based_diagnostics_progress_token: Option<&str>,
2072 cx: &mut ModelContext<Self>,
2073 ) {
2074 adapter.process_diagnostics(&mut params);
2075 if disk_based_diagnostics_progress_token.is_none() {
2076 self.disk_based_diagnostics_started(cx);
2077 self.broadcast_language_server_update(
2078 server_id,
2079 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(
2080 proto::LspDiskBasedDiagnosticsUpdating {},
2081 ),
2082 );
2083 }
2084 self.update_diagnostics(params, adapter.disk_based_diagnostic_sources(), cx)
2085 .log_err();
2086 if disk_based_diagnostics_progress_token.is_none() {
2087 self.disk_based_diagnostics_finished(cx);
2088 self.broadcast_language_server_update(
2089 server_id,
2090 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(
2091 proto::LspDiskBasedDiagnosticsUpdated {},
2092 ),
2093 );
2094 }
2095 }
2096
2097 fn on_lsp_progress(
2098 &mut self,
2099 progress: lsp::ProgressParams,
2100 server_id: usize,
2101 disk_based_diagnostics_progress_token: Option<&str>,
2102 cx: &mut ModelContext<Self>,
2103 ) {
2104 let token = match progress.token {
2105 lsp::NumberOrString::String(token) => token,
2106 lsp::NumberOrString::Number(token) => {
2107 log::info!("skipping numeric progress token {}", token);
2108 return;
2109 }
2110 };
2111 let progress = match progress.value {
2112 lsp::ProgressParamsValue::WorkDone(value) => value,
2113 };
2114 let language_server_status =
2115 if let Some(status) = self.language_server_statuses.get_mut(&server_id) {
2116 status
2117 } else {
2118 return;
2119 };
2120 match progress {
2121 lsp::WorkDoneProgress::Begin(_) => {
2122 if Some(token.as_str()) == disk_based_diagnostics_progress_token {
2123 language_server_status.pending_diagnostic_updates += 1;
2124 if language_server_status.pending_diagnostic_updates == 1 {
2125 self.disk_based_diagnostics_started(cx);
2126 self.broadcast_language_server_update(
2127 server_id,
2128 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(
2129 proto::LspDiskBasedDiagnosticsUpdating {},
2130 ),
2131 );
2132 }
2133 } else {
2134 self.on_lsp_work_start(server_id, token.clone(), cx);
2135 self.broadcast_language_server_update(
2136 server_id,
2137 proto::update_language_server::Variant::WorkStart(proto::LspWorkStart {
2138 token,
2139 }),
2140 );
2141 }
2142 }
2143 lsp::WorkDoneProgress::Report(report) => {
2144 if Some(token.as_str()) != disk_based_diagnostics_progress_token {
2145 self.on_lsp_work_progress(
2146 server_id,
2147 token.clone(),
2148 LanguageServerProgress {
2149 message: report.message.clone(),
2150 percentage: report.percentage.map(|p| p as usize),
2151 last_update_at: Instant::now(),
2152 },
2153 cx,
2154 );
2155 self.broadcast_language_server_update(
2156 server_id,
2157 proto::update_language_server::Variant::WorkProgress(
2158 proto::LspWorkProgress {
2159 token,
2160 message: report.message,
2161 percentage: report.percentage.map(|p| p as u32),
2162 },
2163 ),
2164 );
2165 }
2166 }
2167 lsp::WorkDoneProgress::End(_) => {
2168 if Some(token.as_str()) == disk_based_diagnostics_progress_token {
2169 language_server_status.pending_diagnostic_updates -= 1;
2170 if language_server_status.pending_diagnostic_updates == 0 {
2171 self.disk_based_diagnostics_finished(cx);
2172 self.broadcast_language_server_update(
2173 server_id,
2174 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(
2175 proto::LspDiskBasedDiagnosticsUpdated {},
2176 ),
2177 );
2178 }
2179 } else {
2180 self.on_lsp_work_end(server_id, token.clone(), cx);
2181 self.broadcast_language_server_update(
2182 server_id,
2183 proto::update_language_server::Variant::WorkEnd(proto::LspWorkEnd {
2184 token,
2185 }),
2186 );
2187 }
2188 }
2189 }
2190 }
2191
2192 fn on_lsp_work_start(
2193 &mut self,
2194 language_server_id: usize,
2195 token: String,
2196 cx: &mut ModelContext<Self>,
2197 ) {
2198 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
2199 status.pending_work.insert(
2200 token,
2201 LanguageServerProgress {
2202 message: None,
2203 percentage: None,
2204 last_update_at: Instant::now(),
2205 },
2206 );
2207 cx.notify();
2208 }
2209 }
2210
2211 fn on_lsp_work_progress(
2212 &mut self,
2213 language_server_id: usize,
2214 token: String,
2215 progress: LanguageServerProgress,
2216 cx: &mut ModelContext<Self>,
2217 ) {
2218 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
2219 status.pending_work.insert(token, progress);
2220 cx.notify();
2221 }
2222 }
2223
2224 fn on_lsp_work_end(
2225 &mut self,
2226 language_server_id: usize,
2227 token: String,
2228 cx: &mut ModelContext<Self>,
2229 ) {
2230 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
2231 status.pending_work.remove(&token);
2232 cx.notify();
2233 }
2234 }
2235
2236 async fn on_lsp_workspace_edit(
2237 this: WeakModelHandle<Self>,
2238 params: lsp::ApplyWorkspaceEditParams,
2239 server_id: usize,
2240 adapter: Arc<dyn LspAdapter>,
2241 language_server: Arc<LanguageServer>,
2242 mut cx: AsyncAppContext,
2243 ) -> Result<lsp::ApplyWorkspaceEditResponse> {
2244 let this = this
2245 .upgrade(&cx)
2246 .ok_or_else(|| anyhow!("project project closed"))?;
2247 let transaction = Self::deserialize_workspace_edit(
2248 this.clone(),
2249 params.edit,
2250 true,
2251 adapter.clone(),
2252 language_server.clone(),
2253 &mut cx,
2254 )
2255 .await
2256 .log_err();
2257 this.update(&mut cx, |this, _| {
2258 if let Some(transaction) = transaction {
2259 this.last_workspace_edits_by_language_server
2260 .insert(server_id, transaction);
2261 }
2262 });
2263 Ok(lsp::ApplyWorkspaceEditResponse {
2264 applied: true,
2265 failed_change: None,
2266 failure_reason: None,
2267 })
2268 }
2269
2270 fn broadcast_language_server_update(
2271 &self,
2272 language_server_id: usize,
2273 event: proto::update_language_server::Variant,
2274 ) {
2275 if let Some(project_id) = self.shared_remote_id() {
2276 self.client
2277 .send(proto::UpdateLanguageServer {
2278 project_id,
2279 language_server_id: language_server_id as u64,
2280 variant: Some(event),
2281 })
2282 .log_err();
2283 }
2284 }
2285
2286 pub fn set_language_server_settings(&mut self, settings: serde_json::Value) {
2287 for (_, server) in self.language_servers.values() {
2288 server
2289 .notify::<lsp::notification::DidChangeConfiguration>(
2290 lsp::DidChangeConfigurationParams {
2291 settings: settings.clone(),
2292 },
2293 )
2294 .ok();
2295 }
2296 *self.language_server_settings.lock() = settings;
2297 }
2298
2299 pub fn language_server_statuses(
2300 &self,
2301 ) -> impl DoubleEndedIterator<Item = &LanguageServerStatus> {
2302 self.language_server_statuses.values()
2303 }
2304
2305 pub fn update_diagnostics(
2306 &mut self,
2307 params: lsp::PublishDiagnosticsParams,
2308 disk_based_sources: &[&str],
2309 cx: &mut ModelContext<Self>,
2310 ) -> Result<()> {
2311 let abs_path = params
2312 .uri
2313 .to_file_path()
2314 .map_err(|_| anyhow!("URI is not a file"))?;
2315 let mut diagnostics = Vec::default();
2316 let mut primary_diagnostic_group_ids = HashMap::default();
2317 let mut sources_by_group_id = HashMap::default();
2318 let mut supporting_diagnostics = HashMap::default();
2319 for diagnostic in ¶ms.diagnostics {
2320 let source = diagnostic.source.as_ref();
2321 let code = diagnostic.code.as_ref().map(|code| match code {
2322 lsp::NumberOrString::Number(code) => code.to_string(),
2323 lsp::NumberOrString::String(code) => code.clone(),
2324 });
2325 let range = range_from_lsp(diagnostic.range);
2326 let is_supporting = diagnostic
2327 .related_information
2328 .as_ref()
2329 .map_or(false, |infos| {
2330 infos.iter().any(|info| {
2331 primary_diagnostic_group_ids.contains_key(&(
2332 source,
2333 code.clone(),
2334 range_from_lsp(info.location.range),
2335 ))
2336 })
2337 });
2338
2339 let is_unnecessary = diagnostic.tags.as_ref().map_or(false, |tags| {
2340 tags.iter().any(|tag| *tag == DiagnosticTag::UNNECESSARY)
2341 });
2342
2343 if is_supporting {
2344 supporting_diagnostics.insert(
2345 (source, code.clone(), range),
2346 (diagnostic.severity, is_unnecessary),
2347 );
2348 } else {
2349 let group_id = post_inc(&mut self.next_diagnostic_group_id);
2350 let is_disk_based = source.map_or(false, |source| {
2351 disk_based_sources.contains(&source.as_str())
2352 });
2353
2354 sources_by_group_id.insert(group_id, source);
2355 primary_diagnostic_group_ids
2356 .insert((source, code.clone(), range.clone()), group_id);
2357
2358 diagnostics.push(DiagnosticEntry {
2359 range,
2360 diagnostic: Diagnostic {
2361 code: code.clone(),
2362 severity: diagnostic.severity.unwrap_or(DiagnosticSeverity::ERROR),
2363 message: diagnostic.message.clone(),
2364 group_id,
2365 is_primary: true,
2366 is_valid: true,
2367 is_disk_based,
2368 is_unnecessary,
2369 },
2370 });
2371 if let Some(infos) = &diagnostic.related_information {
2372 for info in infos {
2373 if info.location.uri == params.uri && !info.message.is_empty() {
2374 let range = range_from_lsp(info.location.range);
2375 diagnostics.push(DiagnosticEntry {
2376 range,
2377 diagnostic: Diagnostic {
2378 code: code.clone(),
2379 severity: DiagnosticSeverity::INFORMATION,
2380 message: info.message.clone(),
2381 group_id,
2382 is_primary: false,
2383 is_valid: true,
2384 is_disk_based,
2385 is_unnecessary: false,
2386 },
2387 });
2388 }
2389 }
2390 }
2391 }
2392 }
2393
2394 for entry in &mut diagnostics {
2395 let diagnostic = &mut entry.diagnostic;
2396 if !diagnostic.is_primary {
2397 let source = *sources_by_group_id.get(&diagnostic.group_id).unwrap();
2398 if let Some(&(severity, is_unnecessary)) = supporting_diagnostics.get(&(
2399 source,
2400 diagnostic.code.clone(),
2401 entry.range.clone(),
2402 )) {
2403 if let Some(severity) = severity {
2404 diagnostic.severity = severity;
2405 }
2406 diagnostic.is_unnecessary = is_unnecessary;
2407 }
2408 }
2409 }
2410
2411 self.update_diagnostic_entries(abs_path, params.version, diagnostics, cx)?;
2412 Ok(())
2413 }
2414
2415 pub fn update_diagnostic_entries(
2416 &mut self,
2417 abs_path: PathBuf,
2418 version: Option<i32>,
2419 diagnostics: Vec<DiagnosticEntry<PointUtf16>>,
2420 cx: &mut ModelContext<Project>,
2421 ) -> Result<(), anyhow::Error> {
2422 let (worktree, relative_path) = self
2423 .find_local_worktree(&abs_path, cx)
2424 .ok_or_else(|| anyhow!("no worktree found for diagnostics"))?;
2425 if !worktree.read(cx).is_visible() {
2426 return Ok(());
2427 }
2428
2429 let project_path = ProjectPath {
2430 worktree_id: worktree.read(cx).id(),
2431 path: relative_path.into(),
2432 };
2433 if let Some(buffer) = self.get_open_buffer(&project_path, cx) {
2434 self.update_buffer_diagnostics(&buffer, diagnostics.clone(), version, cx)?;
2435 }
2436
2437 let updated = worktree.update(cx, |worktree, cx| {
2438 worktree
2439 .as_local_mut()
2440 .ok_or_else(|| anyhow!("not a local worktree"))?
2441 .update_diagnostics(project_path.path.clone(), diagnostics, cx)
2442 })?;
2443 if updated {
2444 cx.emit(Event::DiagnosticsUpdated(project_path));
2445 }
2446 Ok(())
2447 }
2448
2449 fn update_buffer_diagnostics(
2450 &mut self,
2451 buffer: &ModelHandle<Buffer>,
2452 mut diagnostics: Vec<DiagnosticEntry<PointUtf16>>,
2453 version: Option<i32>,
2454 cx: &mut ModelContext<Self>,
2455 ) -> Result<()> {
2456 fn compare_diagnostics(a: &Diagnostic, b: &Diagnostic) -> Ordering {
2457 Ordering::Equal
2458 .then_with(|| b.is_primary.cmp(&a.is_primary))
2459 .then_with(|| a.is_disk_based.cmp(&b.is_disk_based))
2460 .then_with(|| a.severity.cmp(&b.severity))
2461 .then_with(|| a.message.cmp(&b.message))
2462 }
2463
2464 let snapshot = self.buffer_snapshot_for_lsp_version(buffer, version, cx)?;
2465
2466 diagnostics.sort_unstable_by(|a, b| {
2467 Ordering::Equal
2468 .then_with(|| a.range.start.cmp(&b.range.start))
2469 .then_with(|| b.range.end.cmp(&a.range.end))
2470 .then_with(|| compare_diagnostics(&a.diagnostic, &b.diagnostic))
2471 });
2472
2473 let mut sanitized_diagnostics = Vec::new();
2474 let edits_since_save = Patch::new(
2475 snapshot
2476 .edits_since::<PointUtf16>(buffer.read(cx).saved_version())
2477 .collect(),
2478 );
2479 for entry in diagnostics {
2480 let start;
2481 let end;
2482 if entry.diagnostic.is_disk_based {
2483 // Some diagnostics are based on files on disk instead of buffers'
2484 // current contents. Adjust these diagnostics' ranges to reflect
2485 // any unsaved edits.
2486 start = edits_since_save.old_to_new(entry.range.start);
2487 end = edits_since_save.old_to_new(entry.range.end);
2488 } else {
2489 start = entry.range.start;
2490 end = entry.range.end;
2491 }
2492
2493 let mut range = snapshot.clip_point_utf16(start, Bias::Left)
2494 ..snapshot.clip_point_utf16(end, Bias::Right);
2495
2496 // Expand empty ranges by one character
2497 if range.start == range.end {
2498 range.end.column += 1;
2499 range.end = snapshot.clip_point_utf16(range.end, Bias::Right);
2500 if range.start == range.end && range.end.column > 0 {
2501 range.start.column -= 1;
2502 range.start = snapshot.clip_point_utf16(range.start, Bias::Left);
2503 }
2504 }
2505
2506 sanitized_diagnostics.push(DiagnosticEntry {
2507 range,
2508 diagnostic: entry.diagnostic,
2509 });
2510 }
2511 drop(edits_since_save);
2512
2513 let set = DiagnosticSet::new(sanitized_diagnostics, &snapshot);
2514 buffer.update(cx, |buffer, cx| buffer.update_diagnostics(set, cx));
2515 Ok(())
2516 }
2517
2518 pub fn reload_buffers(
2519 &self,
2520 buffers: HashSet<ModelHandle<Buffer>>,
2521 push_to_history: bool,
2522 cx: &mut ModelContext<Self>,
2523 ) -> Task<Result<ProjectTransaction>> {
2524 let mut local_buffers = Vec::new();
2525 let mut remote_buffers = None;
2526 for buffer_handle in buffers {
2527 let buffer = buffer_handle.read(cx);
2528 if buffer.is_dirty() {
2529 if let Some(file) = File::from_dyn(buffer.file()) {
2530 if file.is_local() {
2531 local_buffers.push(buffer_handle);
2532 } else {
2533 remote_buffers.get_or_insert(Vec::new()).push(buffer_handle);
2534 }
2535 }
2536 }
2537 }
2538
2539 let remote_buffers = self.remote_id().zip(remote_buffers);
2540 let client = self.client.clone();
2541
2542 cx.spawn(|this, mut cx| async move {
2543 let mut project_transaction = ProjectTransaction::default();
2544
2545 if let Some((project_id, remote_buffers)) = remote_buffers {
2546 let response = client
2547 .request(proto::ReloadBuffers {
2548 project_id,
2549 buffer_ids: remote_buffers
2550 .iter()
2551 .map(|buffer| buffer.read_with(&cx, |buffer, _| buffer.remote_id()))
2552 .collect(),
2553 })
2554 .await?
2555 .transaction
2556 .ok_or_else(|| anyhow!("missing transaction"))?;
2557 project_transaction = this
2558 .update(&mut cx, |this, cx| {
2559 this.deserialize_project_transaction(response, push_to_history, cx)
2560 })
2561 .await?;
2562 }
2563
2564 for buffer in local_buffers {
2565 let transaction = buffer
2566 .update(&mut cx, |buffer, cx| buffer.reload(cx))
2567 .await?;
2568 buffer.update(&mut cx, |buffer, cx| {
2569 if let Some(transaction) = transaction {
2570 if !push_to_history {
2571 buffer.forget_transaction(transaction.id);
2572 }
2573 project_transaction.0.insert(cx.handle(), transaction);
2574 }
2575 });
2576 }
2577
2578 Ok(project_transaction)
2579 })
2580 }
2581
2582 pub fn format(
2583 &self,
2584 buffers: HashSet<ModelHandle<Buffer>>,
2585 push_to_history: bool,
2586 cx: &mut ModelContext<Project>,
2587 ) -> Task<Result<ProjectTransaction>> {
2588 let mut local_buffers = Vec::new();
2589 let mut remote_buffers = None;
2590 for buffer_handle in buffers {
2591 let buffer = buffer_handle.read(cx);
2592 if let Some(file) = File::from_dyn(buffer.file()) {
2593 if let Some(buffer_abs_path) = file.as_local().map(|f| f.abs_path(cx)) {
2594 if let Some((_, server)) = self.language_server_for_buffer(buffer, cx) {
2595 local_buffers.push((buffer_handle, buffer_abs_path, server.clone()));
2596 }
2597 } else {
2598 remote_buffers.get_or_insert(Vec::new()).push(buffer_handle);
2599 }
2600 } else {
2601 return Task::ready(Ok(Default::default()));
2602 }
2603 }
2604
2605 let remote_buffers = self.remote_id().zip(remote_buffers);
2606 let client = self.client.clone();
2607
2608 cx.spawn(|this, mut cx| async move {
2609 let mut project_transaction = ProjectTransaction::default();
2610
2611 if let Some((project_id, remote_buffers)) = remote_buffers {
2612 let response = client
2613 .request(proto::FormatBuffers {
2614 project_id,
2615 buffer_ids: remote_buffers
2616 .iter()
2617 .map(|buffer| buffer.read_with(&cx, |buffer, _| buffer.remote_id()))
2618 .collect(),
2619 })
2620 .await?
2621 .transaction
2622 .ok_or_else(|| anyhow!("missing transaction"))?;
2623 project_transaction = this
2624 .update(&mut cx, |this, cx| {
2625 this.deserialize_project_transaction(response, push_to_history, cx)
2626 })
2627 .await?;
2628 }
2629
2630 for (buffer, buffer_abs_path, language_server) in local_buffers {
2631 let text_document = lsp::TextDocumentIdentifier::new(
2632 lsp::Url::from_file_path(&buffer_abs_path).unwrap(),
2633 );
2634 let capabilities = &language_server.capabilities();
2635 let tab_size = cx.update(|cx| {
2636 let language_name = buffer.read(cx).language().map(|language| language.name());
2637 cx.global::<Settings>().tab_size(language_name.as_deref())
2638 });
2639 let lsp_edits = if capabilities
2640 .document_formatting_provider
2641 .as_ref()
2642 .map_or(false, |provider| *provider != lsp::OneOf::Left(false))
2643 {
2644 language_server
2645 .request::<lsp::request::Formatting>(lsp::DocumentFormattingParams {
2646 text_document,
2647 options: lsp::FormattingOptions {
2648 tab_size,
2649 insert_spaces: true,
2650 insert_final_newline: Some(true),
2651 ..Default::default()
2652 },
2653 work_done_progress_params: Default::default(),
2654 })
2655 .await?
2656 } else if capabilities
2657 .document_range_formatting_provider
2658 .as_ref()
2659 .map_or(false, |provider| *provider != lsp::OneOf::Left(false))
2660 {
2661 let buffer_start = lsp::Position::new(0, 0);
2662 let buffer_end =
2663 buffer.read_with(&cx, |buffer, _| point_to_lsp(buffer.max_point_utf16()));
2664 language_server
2665 .request::<lsp::request::RangeFormatting>(
2666 lsp::DocumentRangeFormattingParams {
2667 text_document,
2668 range: lsp::Range::new(buffer_start, buffer_end),
2669 options: lsp::FormattingOptions {
2670 tab_size: 4,
2671 insert_spaces: true,
2672 insert_final_newline: Some(true),
2673 ..Default::default()
2674 },
2675 work_done_progress_params: Default::default(),
2676 },
2677 )
2678 .await?
2679 } else {
2680 continue;
2681 };
2682
2683 if let Some(lsp_edits) = lsp_edits {
2684 let edits = this
2685 .update(&mut cx, |this, cx| {
2686 this.edits_from_lsp(&buffer, lsp_edits, None, cx)
2687 })
2688 .await?;
2689 buffer.update(&mut cx, |buffer, cx| {
2690 buffer.finalize_last_transaction();
2691 buffer.start_transaction();
2692 for (range, text) in edits {
2693 buffer.edit([(range, text)], cx);
2694 }
2695 if buffer.end_transaction(cx).is_some() {
2696 let transaction = buffer.finalize_last_transaction().unwrap().clone();
2697 if !push_to_history {
2698 buffer.forget_transaction(transaction.id);
2699 }
2700 project_transaction.0.insert(cx.handle(), transaction);
2701 }
2702 });
2703 }
2704 }
2705
2706 Ok(project_transaction)
2707 })
2708 }
2709
2710 pub fn definition<T: ToPointUtf16>(
2711 &self,
2712 buffer: &ModelHandle<Buffer>,
2713 position: T,
2714 cx: &mut ModelContext<Self>,
2715 ) -> Task<Result<Vec<Location>>> {
2716 let position = position.to_point_utf16(buffer.read(cx));
2717 self.request_lsp(buffer.clone(), GetDefinition { position }, cx)
2718 }
2719
2720 pub fn references<T: ToPointUtf16>(
2721 &self,
2722 buffer: &ModelHandle<Buffer>,
2723 position: T,
2724 cx: &mut ModelContext<Self>,
2725 ) -> Task<Result<Vec<Location>>> {
2726 let position = position.to_point_utf16(buffer.read(cx));
2727 self.request_lsp(buffer.clone(), GetReferences { position }, cx)
2728 }
2729
2730 pub fn document_highlights<T: ToPointUtf16>(
2731 &self,
2732 buffer: &ModelHandle<Buffer>,
2733 position: T,
2734 cx: &mut ModelContext<Self>,
2735 ) -> Task<Result<Vec<DocumentHighlight>>> {
2736 let position = position.to_point_utf16(buffer.read(cx));
2737
2738 self.request_lsp(buffer.clone(), GetDocumentHighlights { position }, cx)
2739 }
2740
2741 pub fn symbols(&self, query: &str, cx: &mut ModelContext<Self>) -> Task<Result<Vec<Symbol>>> {
2742 if self.is_local() {
2743 let mut requests = Vec::new();
2744 for ((worktree_id, _), (lsp_adapter, language_server)) in self.language_servers.iter() {
2745 let worktree_id = *worktree_id;
2746 if let Some(worktree) = self
2747 .worktree_for_id(worktree_id, cx)
2748 .and_then(|worktree| worktree.read(cx).as_local())
2749 {
2750 let lsp_adapter = lsp_adapter.clone();
2751 let worktree_abs_path = worktree.abs_path().clone();
2752 requests.push(
2753 language_server
2754 .request::<lsp::request::WorkspaceSymbol>(lsp::WorkspaceSymbolParams {
2755 query: query.to_string(),
2756 ..Default::default()
2757 })
2758 .log_err()
2759 .map(move |response| {
2760 (
2761 lsp_adapter,
2762 worktree_id,
2763 worktree_abs_path,
2764 response.unwrap_or_default(),
2765 )
2766 }),
2767 );
2768 }
2769 }
2770
2771 cx.spawn_weak(|this, cx| async move {
2772 let responses = futures::future::join_all(requests).await;
2773 let this = if let Some(this) = this.upgrade(&cx) {
2774 this
2775 } else {
2776 return Ok(Default::default());
2777 };
2778 this.read_with(&cx, |this, cx| {
2779 let mut symbols = Vec::new();
2780 for (adapter, source_worktree_id, worktree_abs_path, response) in responses {
2781 symbols.extend(response.into_iter().flatten().filter_map(|lsp_symbol| {
2782 let abs_path = lsp_symbol.location.uri.to_file_path().ok()?;
2783 let mut worktree_id = source_worktree_id;
2784 let path;
2785 if let Some((worktree, rel_path)) =
2786 this.find_local_worktree(&abs_path, cx)
2787 {
2788 worktree_id = worktree.read(cx).id();
2789 path = rel_path;
2790 } else {
2791 path = relativize_path(&worktree_abs_path, &abs_path);
2792 }
2793
2794 let label = this
2795 .languages
2796 .select_language(&path)
2797 .and_then(|language| {
2798 language.label_for_symbol(&lsp_symbol.name, lsp_symbol.kind)
2799 })
2800 .unwrap_or_else(|| CodeLabel::plain(lsp_symbol.name.clone(), None));
2801 let signature = this.symbol_signature(worktree_id, &path);
2802
2803 Some(Symbol {
2804 source_worktree_id,
2805 worktree_id,
2806 language_server_name: adapter.name(),
2807 name: lsp_symbol.name,
2808 kind: lsp_symbol.kind,
2809 label,
2810 path,
2811 range: range_from_lsp(lsp_symbol.location.range),
2812 signature,
2813 })
2814 }));
2815 }
2816 Ok(symbols)
2817 })
2818 })
2819 } else if let Some(project_id) = self.remote_id() {
2820 let request = self.client.request(proto::GetProjectSymbols {
2821 project_id,
2822 query: query.to_string(),
2823 });
2824 cx.spawn_weak(|this, cx| async move {
2825 let response = request.await?;
2826 let mut symbols = Vec::new();
2827 if let Some(this) = this.upgrade(&cx) {
2828 this.read_with(&cx, |this, _| {
2829 symbols.extend(
2830 response
2831 .symbols
2832 .into_iter()
2833 .filter_map(|symbol| this.deserialize_symbol(symbol).log_err()),
2834 );
2835 })
2836 }
2837 Ok(symbols)
2838 })
2839 } else {
2840 Task::ready(Ok(Default::default()))
2841 }
2842 }
2843
2844 pub fn open_buffer_for_symbol(
2845 &mut self,
2846 symbol: &Symbol,
2847 cx: &mut ModelContext<Self>,
2848 ) -> Task<Result<ModelHandle<Buffer>>> {
2849 if self.is_local() {
2850 let (lsp_adapter, language_server) = if let Some(server) = self.language_servers.get(&(
2851 symbol.source_worktree_id,
2852 symbol.language_server_name.clone(),
2853 )) {
2854 server.clone()
2855 } else {
2856 return Task::ready(Err(anyhow!(
2857 "language server for worktree and language not found"
2858 )));
2859 };
2860
2861 let worktree_abs_path = if let Some(worktree_abs_path) = self
2862 .worktree_for_id(symbol.worktree_id, cx)
2863 .and_then(|worktree| worktree.read(cx).as_local())
2864 .map(|local_worktree| local_worktree.abs_path())
2865 {
2866 worktree_abs_path
2867 } else {
2868 return Task::ready(Err(anyhow!("worktree not found for symbol")));
2869 };
2870 let symbol_abs_path = worktree_abs_path.join(&symbol.path);
2871 let symbol_uri = if let Ok(uri) = lsp::Url::from_file_path(symbol_abs_path) {
2872 uri
2873 } else {
2874 return Task::ready(Err(anyhow!("invalid symbol path")));
2875 };
2876
2877 self.open_local_buffer_via_lsp(symbol_uri, lsp_adapter, language_server, cx)
2878 } else if let Some(project_id) = self.remote_id() {
2879 let request = self.client.request(proto::OpenBufferForSymbol {
2880 project_id,
2881 symbol: Some(serialize_symbol(symbol)),
2882 });
2883 cx.spawn(|this, mut cx| async move {
2884 let response = request.await?;
2885 let buffer = response.buffer.ok_or_else(|| anyhow!("invalid buffer"))?;
2886 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
2887 .await
2888 })
2889 } else {
2890 Task::ready(Err(anyhow!("project does not have a remote id")))
2891 }
2892 }
2893
2894 pub fn hover<T: ToPointUtf16>(
2895 &self,
2896 buffer: &ModelHandle<Buffer>,
2897 position: T,
2898 cx: &mut ModelContext<Self>,
2899 ) -> Task<Result<Option<Hover>>> {
2900 // TODO: proper return type
2901 let position = position.to_point_utf16(buffer.read(cx));
2902 self.request_lsp(buffer.clone(), GetHover { position }, cx)
2903 }
2904
2905 pub fn completions<T: ToPointUtf16>(
2906 &self,
2907 source_buffer_handle: &ModelHandle<Buffer>,
2908 position: T,
2909 cx: &mut ModelContext<Self>,
2910 ) -> Task<Result<Vec<Completion>>> {
2911 let source_buffer_handle = source_buffer_handle.clone();
2912 let source_buffer = source_buffer_handle.read(cx);
2913 let buffer_id = source_buffer.remote_id();
2914 let language = source_buffer.language().cloned();
2915 let worktree;
2916 let buffer_abs_path;
2917 if let Some(file) = File::from_dyn(source_buffer.file()) {
2918 worktree = file.worktree.clone();
2919 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
2920 } else {
2921 return Task::ready(Ok(Default::default()));
2922 };
2923
2924 let position = position.to_point_utf16(source_buffer);
2925 let anchor = source_buffer.anchor_after(position);
2926
2927 if worktree.read(cx).as_local().is_some() {
2928 let buffer_abs_path = buffer_abs_path.unwrap();
2929 let (_, lang_server) =
2930 if let Some(server) = self.language_server_for_buffer(source_buffer, cx) {
2931 server.clone()
2932 } else {
2933 return Task::ready(Ok(Default::default()));
2934 };
2935
2936 cx.spawn(|_, cx| async move {
2937 let completions = lang_server
2938 .request::<lsp::request::Completion>(lsp::CompletionParams {
2939 text_document_position: lsp::TextDocumentPositionParams::new(
2940 lsp::TextDocumentIdentifier::new(
2941 lsp::Url::from_file_path(buffer_abs_path).unwrap(),
2942 ),
2943 point_to_lsp(position),
2944 ),
2945 context: Default::default(),
2946 work_done_progress_params: Default::default(),
2947 partial_result_params: Default::default(),
2948 })
2949 .await
2950 .context("lsp completion request failed")?;
2951
2952 let completions = if let Some(completions) = completions {
2953 match completions {
2954 lsp::CompletionResponse::Array(completions) => completions,
2955 lsp::CompletionResponse::List(list) => list.items,
2956 }
2957 } else {
2958 Default::default()
2959 };
2960
2961 source_buffer_handle.read_with(&cx, |this, _| {
2962 let snapshot = this.snapshot();
2963 let clipped_position = this.clip_point_utf16(position, Bias::Left);
2964 let mut range_for_token = None;
2965 Ok(completions
2966 .into_iter()
2967 .filter_map(|lsp_completion| {
2968 let (old_range, new_text) = match lsp_completion.text_edit.as_ref() {
2969 // If the language server provides a range to overwrite, then
2970 // check that the range is valid.
2971 Some(lsp::CompletionTextEdit::Edit(edit)) => {
2972 let range = range_from_lsp(edit.range);
2973 let start = snapshot.clip_point_utf16(range.start, Bias::Left);
2974 let end = snapshot.clip_point_utf16(range.end, Bias::Left);
2975 if start != range.start || end != range.end {
2976 log::info!("completion out of expected range");
2977 return None;
2978 }
2979 (
2980 snapshot.anchor_before(start)..snapshot.anchor_after(end),
2981 edit.new_text.clone(),
2982 )
2983 }
2984 // If the language server does not provide a range, then infer
2985 // the range based on the syntax tree.
2986 None => {
2987 if position != clipped_position {
2988 log::info!("completion out of expected range");
2989 return None;
2990 }
2991 let Range { start, end } = range_for_token
2992 .get_or_insert_with(|| {
2993 let offset = position.to_offset(&snapshot);
2994 snapshot
2995 .range_for_word_token_at(offset)
2996 .unwrap_or_else(|| offset..offset)
2997 })
2998 .clone();
2999 let text = lsp_completion
3000 .insert_text
3001 .as_ref()
3002 .unwrap_or(&lsp_completion.label)
3003 .clone();
3004 (
3005 snapshot.anchor_before(start)..snapshot.anchor_after(end),
3006 text.clone(),
3007 )
3008 }
3009 Some(lsp::CompletionTextEdit::InsertAndReplace(_)) => {
3010 log::info!("unsupported insert/replace completion");
3011 return None;
3012 }
3013 };
3014
3015 Some(Completion {
3016 old_range,
3017 new_text,
3018 label: language
3019 .as_ref()
3020 .and_then(|l| l.label_for_completion(&lsp_completion))
3021 .unwrap_or_else(|| {
3022 CodeLabel::plain(
3023 lsp_completion.label.clone(),
3024 lsp_completion.filter_text.as_deref(),
3025 )
3026 }),
3027 lsp_completion,
3028 })
3029 })
3030 .collect())
3031 })
3032 })
3033 } else if let Some(project_id) = self.remote_id() {
3034 let rpc = self.client.clone();
3035 let message = proto::GetCompletions {
3036 project_id,
3037 buffer_id,
3038 position: Some(language::proto::serialize_anchor(&anchor)),
3039 version: serialize_version(&source_buffer.version()),
3040 };
3041 cx.spawn_weak(|_, mut cx| async move {
3042 let response = rpc.request(message).await?;
3043
3044 source_buffer_handle
3045 .update(&mut cx, |buffer, _| {
3046 buffer.wait_for_version(deserialize_version(response.version))
3047 })
3048 .await;
3049
3050 response
3051 .completions
3052 .into_iter()
3053 .map(|completion| {
3054 language::proto::deserialize_completion(completion, language.as_ref())
3055 })
3056 .collect()
3057 })
3058 } else {
3059 Task::ready(Ok(Default::default()))
3060 }
3061 }
3062
3063 pub fn apply_additional_edits_for_completion(
3064 &self,
3065 buffer_handle: ModelHandle<Buffer>,
3066 completion: Completion,
3067 push_to_history: bool,
3068 cx: &mut ModelContext<Self>,
3069 ) -> Task<Result<Option<Transaction>>> {
3070 let buffer = buffer_handle.read(cx);
3071 let buffer_id = buffer.remote_id();
3072
3073 if self.is_local() {
3074 let (_, lang_server) = if let Some(server) = self.language_server_for_buffer(buffer, cx)
3075 {
3076 server.clone()
3077 } else {
3078 return Task::ready(Ok(Default::default()));
3079 };
3080
3081 cx.spawn(|this, mut cx| async move {
3082 let resolved_completion = lang_server
3083 .request::<lsp::request::ResolveCompletionItem>(completion.lsp_completion)
3084 .await?;
3085 if let Some(edits) = resolved_completion.additional_text_edits {
3086 let edits = this
3087 .update(&mut cx, |this, cx| {
3088 this.edits_from_lsp(&buffer_handle, edits, None, cx)
3089 })
3090 .await?;
3091 buffer_handle.update(&mut cx, |buffer, cx| {
3092 buffer.finalize_last_transaction();
3093 buffer.start_transaction();
3094 for (range, text) in edits {
3095 buffer.edit([(range, text)], cx);
3096 }
3097 let transaction = if buffer.end_transaction(cx).is_some() {
3098 let transaction = buffer.finalize_last_transaction().unwrap().clone();
3099 if !push_to_history {
3100 buffer.forget_transaction(transaction.id);
3101 }
3102 Some(transaction)
3103 } else {
3104 None
3105 };
3106 Ok(transaction)
3107 })
3108 } else {
3109 Ok(None)
3110 }
3111 })
3112 } else if let Some(project_id) = self.remote_id() {
3113 let client = self.client.clone();
3114 cx.spawn(|_, mut cx| async move {
3115 let response = client
3116 .request(proto::ApplyCompletionAdditionalEdits {
3117 project_id,
3118 buffer_id,
3119 completion: Some(language::proto::serialize_completion(&completion)),
3120 })
3121 .await?;
3122
3123 if let Some(transaction) = response.transaction {
3124 let transaction = language::proto::deserialize_transaction(transaction)?;
3125 buffer_handle
3126 .update(&mut cx, |buffer, _| {
3127 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
3128 })
3129 .await;
3130 if push_to_history {
3131 buffer_handle.update(&mut cx, |buffer, _| {
3132 buffer.push_transaction(transaction.clone(), Instant::now());
3133 });
3134 }
3135 Ok(Some(transaction))
3136 } else {
3137 Ok(None)
3138 }
3139 })
3140 } else {
3141 Task::ready(Err(anyhow!("project does not have a remote id")))
3142 }
3143 }
3144
3145 pub fn code_actions<T: Clone + ToOffset>(
3146 &self,
3147 buffer_handle: &ModelHandle<Buffer>,
3148 range: Range<T>,
3149 cx: &mut ModelContext<Self>,
3150 ) -> Task<Result<Vec<CodeAction>>> {
3151 let buffer_handle = buffer_handle.clone();
3152 let buffer = buffer_handle.read(cx);
3153 let snapshot = buffer.snapshot();
3154 let relevant_diagnostics = snapshot
3155 .diagnostics_in_range::<usize, usize>(range.to_offset(&snapshot), false)
3156 .map(|entry| entry.to_lsp_diagnostic_stub())
3157 .collect();
3158 let buffer_id = buffer.remote_id();
3159 let worktree;
3160 let buffer_abs_path;
3161 if let Some(file) = File::from_dyn(buffer.file()) {
3162 worktree = file.worktree.clone();
3163 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
3164 } else {
3165 return Task::ready(Ok(Default::default()));
3166 };
3167 let range = buffer.anchor_before(range.start)..buffer.anchor_before(range.end);
3168
3169 if worktree.read(cx).as_local().is_some() {
3170 let buffer_abs_path = buffer_abs_path.unwrap();
3171 let (_, lang_server) = if let Some(server) = self.language_server_for_buffer(buffer, cx)
3172 {
3173 server.clone()
3174 } else {
3175 return Task::ready(Ok(Default::default()));
3176 };
3177
3178 let lsp_range = range_to_lsp(range.to_point_utf16(buffer));
3179 cx.foreground().spawn(async move {
3180 if !lang_server.capabilities().code_action_provider.is_some() {
3181 return Ok(Default::default());
3182 }
3183
3184 Ok(lang_server
3185 .request::<lsp::request::CodeActionRequest>(lsp::CodeActionParams {
3186 text_document: lsp::TextDocumentIdentifier::new(
3187 lsp::Url::from_file_path(buffer_abs_path).unwrap(),
3188 ),
3189 range: lsp_range,
3190 work_done_progress_params: Default::default(),
3191 partial_result_params: Default::default(),
3192 context: lsp::CodeActionContext {
3193 diagnostics: relevant_diagnostics,
3194 only: Some(vec![
3195 lsp::CodeActionKind::QUICKFIX,
3196 lsp::CodeActionKind::REFACTOR,
3197 lsp::CodeActionKind::REFACTOR_EXTRACT,
3198 lsp::CodeActionKind::SOURCE,
3199 ]),
3200 },
3201 })
3202 .await?
3203 .unwrap_or_default()
3204 .into_iter()
3205 .filter_map(|entry| {
3206 if let lsp::CodeActionOrCommand::CodeAction(lsp_action) = entry {
3207 Some(CodeAction {
3208 range: range.clone(),
3209 lsp_action,
3210 })
3211 } else {
3212 None
3213 }
3214 })
3215 .collect())
3216 })
3217 } else if let Some(project_id) = self.remote_id() {
3218 let rpc = self.client.clone();
3219 let version = buffer.version();
3220 cx.spawn_weak(|_, mut cx| async move {
3221 let response = rpc
3222 .request(proto::GetCodeActions {
3223 project_id,
3224 buffer_id,
3225 start: Some(language::proto::serialize_anchor(&range.start)),
3226 end: Some(language::proto::serialize_anchor(&range.end)),
3227 version: serialize_version(&version),
3228 })
3229 .await?;
3230
3231 buffer_handle
3232 .update(&mut cx, |buffer, _| {
3233 buffer.wait_for_version(deserialize_version(response.version))
3234 })
3235 .await;
3236
3237 response
3238 .actions
3239 .into_iter()
3240 .map(language::proto::deserialize_code_action)
3241 .collect()
3242 })
3243 } else {
3244 Task::ready(Ok(Default::default()))
3245 }
3246 }
3247
3248 pub fn apply_code_action(
3249 &self,
3250 buffer_handle: ModelHandle<Buffer>,
3251 mut action: CodeAction,
3252 push_to_history: bool,
3253 cx: &mut ModelContext<Self>,
3254 ) -> Task<Result<ProjectTransaction>> {
3255 if self.is_local() {
3256 let buffer = buffer_handle.read(cx);
3257 let (lsp_adapter, lang_server) =
3258 if let Some(server) = self.language_server_for_buffer(buffer, cx) {
3259 server.clone()
3260 } else {
3261 return Task::ready(Ok(Default::default()));
3262 };
3263 let range = action.range.to_point_utf16(buffer);
3264
3265 cx.spawn(|this, mut cx| async move {
3266 if let Some(lsp_range) = action
3267 .lsp_action
3268 .data
3269 .as_mut()
3270 .and_then(|d| d.get_mut("codeActionParams"))
3271 .and_then(|d| d.get_mut("range"))
3272 {
3273 *lsp_range = serde_json::to_value(&range_to_lsp(range)).unwrap();
3274 action.lsp_action = lang_server
3275 .request::<lsp::request::CodeActionResolveRequest>(action.lsp_action)
3276 .await?;
3277 } else {
3278 let actions = this
3279 .update(&mut cx, |this, cx| {
3280 this.code_actions(&buffer_handle, action.range, cx)
3281 })
3282 .await?;
3283 action.lsp_action = actions
3284 .into_iter()
3285 .find(|a| a.lsp_action.title == action.lsp_action.title)
3286 .ok_or_else(|| anyhow!("code action is outdated"))?
3287 .lsp_action;
3288 }
3289
3290 if let Some(edit) = action.lsp_action.edit {
3291 Self::deserialize_workspace_edit(
3292 this,
3293 edit,
3294 push_to_history,
3295 lsp_adapter,
3296 lang_server,
3297 &mut cx,
3298 )
3299 .await
3300 } else if let Some(command) = action.lsp_action.command {
3301 this.update(&mut cx, |this, _| {
3302 this.last_workspace_edits_by_language_server
3303 .remove(&lang_server.server_id());
3304 });
3305 lang_server
3306 .request::<lsp::request::ExecuteCommand>(lsp::ExecuteCommandParams {
3307 command: command.command,
3308 arguments: command.arguments.unwrap_or_default(),
3309 ..Default::default()
3310 })
3311 .await?;
3312 Ok(this.update(&mut cx, |this, _| {
3313 this.last_workspace_edits_by_language_server
3314 .remove(&lang_server.server_id())
3315 .unwrap_or_default()
3316 }))
3317 } else {
3318 Ok(ProjectTransaction::default())
3319 }
3320 })
3321 } else if let Some(project_id) = self.remote_id() {
3322 let client = self.client.clone();
3323 let request = proto::ApplyCodeAction {
3324 project_id,
3325 buffer_id: buffer_handle.read(cx).remote_id(),
3326 action: Some(language::proto::serialize_code_action(&action)),
3327 };
3328 cx.spawn(|this, mut cx| async move {
3329 let response = client
3330 .request(request)
3331 .await?
3332 .transaction
3333 .ok_or_else(|| anyhow!("missing transaction"))?;
3334 this.update(&mut cx, |this, cx| {
3335 this.deserialize_project_transaction(response, push_to_history, cx)
3336 })
3337 .await
3338 })
3339 } else {
3340 Task::ready(Err(anyhow!("project does not have a remote id")))
3341 }
3342 }
3343
3344 async fn deserialize_workspace_edit(
3345 this: ModelHandle<Self>,
3346 edit: lsp::WorkspaceEdit,
3347 push_to_history: bool,
3348 lsp_adapter: Arc<dyn LspAdapter>,
3349 language_server: Arc<LanguageServer>,
3350 cx: &mut AsyncAppContext,
3351 ) -> Result<ProjectTransaction> {
3352 let fs = this.read_with(cx, |this, _| this.fs.clone());
3353 let mut operations = Vec::new();
3354 if let Some(document_changes) = edit.document_changes {
3355 match document_changes {
3356 lsp::DocumentChanges::Edits(edits) => {
3357 operations.extend(edits.into_iter().map(lsp::DocumentChangeOperation::Edit))
3358 }
3359 lsp::DocumentChanges::Operations(ops) => operations = ops,
3360 }
3361 } else if let Some(changes) = edit.changes {
3362 operations.extend(changes.into_iter().map(|(uri, edits)| {
3363 lsp::DocumentChangeOperation::Edit(lsp::TextDocumentEdit {
3364 text_document: lsp::OptionalVersionedTextDocumentIdentifier {
3365 uri,
3366 version: None,
3367 },
3368 edits: edits.into_iter().map(lsp::OneOf::Left).collect(),
3369 })
3370 }));
3371 }
3372
3373 let mut project_transaction = ProjectTransaction::default();
3374 for operation in operations {
3375 match operation {
3376 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Create(op)) => {
3377 let abs_path = op
3378 .uri
3379 .to_file_path()
3380 .map_err(|_| anyhow!("can't convert URI to path"))?;
3381
3382 if let Some(parent_path) = abs_path.parent() {
3383 fs.create_dir(parent_path).await?;
3384 }
3385 if abs_path.ends_with("/") {
3386 fs.create_dir(&abs_path).await?;
3387 } else {
3388 fs.create_file(&abs_path, op.options.map(Into::into).unwrap_or_default())
3389 .await?;
3390 }
3391 }
3392 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Rename(op)) => {
3393 let source_abs_path = op
3394 .old_uri
3395 .to_file_path()
3396 .map_err(|_| anyhow!("can't convert URI to path"))?;
3397 let target_abs_path = op
3398 .new_uri
3399 .to_file_path()
3400 .map_err(|_| anyhow!("can't convert URI to path"))?;
3401 fs.rename(
3402 &source_abs_path,
3403 &target_abs_path,
3404 op.options.map(Into::into).unwrap_or_default(),
3405 )
3406 .await?;
3407 }
3408 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Delete(op)) => {
3409 let abs_path = op
3410 .uri
3411 .to_file_path()
3412 .map_err(|_| anyhow!("can't convert URI to path"))?;
3413 let options = op.options.map(Into::into).unwrap_or_default();
3414 if abs_path.ends_with("/") {
3415 fs.remove_dir(&abs_path, options).await?;
3416 } else {
3417 fs.remove_file(&abs_path, options).await?;
3418 }
3419 }
3420 lsp::DocumentChangeOperation::Edit(op) => {
3421 let buffer_to_edit = this
3422 .update(cx, |this, cx| {
3423 this.open_local_buffer_via_lsp(
3424 op.text_document.uri,
3425 lsp_adapter.clone(),
3426 language_server.clone(),
3427 cx,
3428 )
3429 })
3430 .await?;
3431
3432 let edits = this
3433 .update(cx, |this, cx| {
3434 let edits = op.edits.into_iter().map(|edit| match edit {
3435 lsp::OneOf::Left(edit) => edit,
3436 lsp::OneOf::Right(edit) => edit.text_edit,
3437 });
3438 this.edits_from_lsp(
3439 &buffer_to_edit,
3440 edits,
3441 op.text_document.version,
3442 cx,
3443 )
3444 })
3445 .await?;
3446
3447 let transaction = buffer_to_edit.update(cx, |buffer, cx| {
3448 buffer.finalize_last_transaction();
3449 buffer.start_transaction();
3450 for (range, text) in edits {
3451 buffer.edit([(range, text)], cx);
3452 }
3453 let transaction = if buffer.end_transaction(cx).is_some() {
3454 let transaction = buffer.finalize_last_transaction().unwrap().clone();
3455 if !push_to_history {
3456 buffer.forget_transaction(transaction.id);
3457 }
3458 Some(transaction)
3459 } else {
3460 None
3461 };
3462
3463 transaction
3464 });
3465 if let Some(transaction) = transaction {
3466 project_transaction.0.insert(buffer_to_edit, transaction);
3467 }
3468 }
3469 }
3470 }
3471
3472 Ok(project_transaction)
3473 }
3474
3475 pub fn prepare_rename<T: ToPointUtf16>(
3476 &self,
3477 buffer: ModelHandle<Buffer>,
3478 position: T,
3479 cx: &mut ModelContext<Self>,
3480 ) -> Task<Result<Option<Range<Anchor>>>> {
3481 let position = position.to_point_utf16(buffer.read(cx));
3482 self.request_lsp(buffer, PrepareRename { position }, cx)
3483 }
3484
3485 pub fn perform_rename<T: ToPointUtf16>(
3486 &self,
3487 buffer: ModelHandle<Buffer>,
3488 position: T,
3489 new_name: String,
3490 push_to_history: bool,
3491 cx: &mut ModelContext<Self>,
3492 ) -> Task<Result<ProjectTransaction>> {
3493 let position = position.to_point_utf16(buffer.read(cx));
3494 self.request_lsp(
3495 buffer,
3496 PerformRename {
3497 position,
3498 new_name,
3499 push_to_history,
3500 },
3501 cx,
3502 )
3503 }
3504
3505 pub fn search(
3506 &self,
3507 query: SearchQuery,
3508 cx: &mut ModelContext<Self>,
3509 ) -> Task<Result<HashMap<ModelHandle<Buffer>, Vec<Range<Anchor>>>>> {
3510 if self.is_local() {
3511 let snapshots = self
3512 .visible_worktrees(cx)
3513 .filter_map(|tree| {
3514 let tree = tree.read(cx).as_local()?;
3515 Some(tree.snapshot())
3516 })
3517 .collect::<Vec<_>>();
3518
3519 let background = cx.background().clone();
3520 let path_count: usize = snapshots.iter().map(|s| s.visible_file_count()).sum();
3521 if path_count == 0 {
3522 return Task::ready(Ok(Default::default()));
3523 }
3524 let workers = background.num_cpus().min(path_count);
3525 let (matching_paths_tx, mut matching_paths_rx) = smol::channel::bounded(1024);
3526 cx.background()
3527 .spawn({
3528 let fs = self.fs.clone();
3529 let background = cx.background().clone();
3530 let query = query.clone();
3531 async move {
3532 let fs = &fs;
3533 let query = &query;
3534 let matching_paths_tx = &matching_paths_tx;
3535 let paths_per_worker = (path_count + workers - 1) / workers;
3536 let snapshots = &snapshots;
3537 background
3538 .scoped(|scope| {
3539 for worker_ix in 0..workers {
3540 let worker_start_ix = worker_ix * paths_per_worker;
3541 let worker_end_ix = worker_start_ix + paths_per_worker;
3542 scope.spawn(async move {
3543 let mut snapshot_start_ix = 0;
3544 let mut abs_path = PathBuf::new();
3545 for snapshot in snapshots {
3546 let snapshot_end_ix =
3547 snapshot_start_ix + snapshot.visible_file_count();
3548 if worker_end_ix <= snapshot_start_ix {
3549 break;
3550 } else if worker_start_ix > snapshot_end_ix {
3551 snapshot_start_ix = snapshot_end_ix;
3552 continue;
3553 } else {
3554 let start_in_snapshot = worker_start_ix
3555 .saturating_sub(snapshot_start_ix);
3556 let end_in_snapshot =
3557 cmp::min(worker_end_ix, snapshot_end_ix)
3558 - snapshot_start_ix;
3559
3560 for entry in snapshot
3561 .files(false, start_in_snapshot)
3562 .take(end_in_snapshot - start_in_snapshot)
3563 {
3564 if matching_paths_tx.is_closed() {
3565 break;
3566 }
3567
3568 abs_path.clear();
3569 abs_path.push(&snapshot.abs_path());
3570 abs_path.push(&entry.path);
3571 let matches = if let Some(file) =
3572 fs.open_sync(&abs_path).await.log_err()
3573 {
3574 query.detect(file).unwrap_or(false)
3575 } else {
3576 false
3577 };
3578
3579 if matches {
3580 let project_path =
3581 (snapshot.id(), entry.path.clone());
3582 if matching_paths_tx
3583 .send(project_path)
3584 .await
3585 .is_err()
3586 {
3587 break;
3588 }
3589 }
3590 }
3591
3592 snapshot_start_ix = snapshot_end_ix;
3593 }
3594 }
3595 });
3596 }
3597 })
3598 .await;
3599 }
3600 })
3601 .detach();
3602
3603 let (buffers_tx, buffers_rx) = smol::channel::bounded(1024);
3604 let open_buffers = self
3605 .opened_buffers
3606 .values()
3607 .filter_map(|b| b.upgrade(cx))
3608 .collect::<HashSet<_>>();
3609 cx.spawn(|this, cx| async move {
3610 for buffer in &open_buffers {
3611 let snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
3612 buffers_tx.send((buffer.clone(), snapshot)).await?;
3613 }
3614
3615 let open_buffers = Rc::new(RefCell::new(open_buffers));
3616 while let Some(project_path) = matching_paths_rx.next().await {
3617 if buffers_tx.is_closed() {
3618 break;
3619 }
3620
3621 let this = this.clone();
3622 let open_buffers = open_buffers.clone();
3623 let buffers_tx = buffers_tx.clone();
3624 cx.spawn(|mut cx| async move {
3625 if let Some(buffer) = this
3626 .update(&mut cx, |this, cx| this.open_buffer(project_path, cx))
3627 .await
3628 .log_err()
3629 {
3630 if open_buffers.borrow_mut().insert(buffer.clone()) {
3631 let snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
3632 buffers_tx.send((buffer, snapshot)).await?;
3633 }
3634 }
3635
3636 Ok::<_, anyhow::Error>(())
3637 })
3638 .detach();
3639 }
3640
3641 Ok::<_, anyhow::Error>(())
3642 })
3643 .detach_and_log_err(cx);
3644
3645 let background = cx.background().clone();
3646 cx.background().spawn(async move {
3647 let query = &query;
3648 let mut matched_buffers = Vec::new();
3649 for _ in 0..workers {
3650 matched_buffers.push(HashMap::default());
3651 }
3652 background
3653 .scoped(|scope| {
3654 for worker_matched_buffers in matched_buffers.iter_mut() {
3655 let mut buffers_rx = buffers_rx.clone();
3656 scope.spawn(async move {
3657 while let Some((buffer, snapshot)) = buffers_rx.next().await {
3658 let buffer_matches = query
3659 .search(snapshot.as_rope())
3660 .await
3661 .iter()
3662 .map(|range| {
3663 snapshot.anchor_before(range.start)
3664 ..snapshot.anchor_after(range.end)
3665 })
3666 .collect::<Vec<_>>();
3667 if !buffer_matches.is_empty() {
3668 worker_matched_buffers
3669 .insert(buffer.clone(), buffer_matches);
3670 }
3671 }
3672 });
3673 }
3674 })
3675 .await;
3676 Ok(matched_buffers.into_iter().flatten().collect())
3677 })
3678 } else if let Some(project_id) = self.remote_id() {
3679 let request = self.client.request(query.to_proto(project_id));
3680 cx.spawn(|this, mut cx| async move {
3681 let response = request.await?;
3682 let mut result = HashMap::default();
3683 for location in response.locations {
3684 let buffer = location.buffer.ok_or_else(|| anyhow!("missing buffer"))?;
3685 let target_buffer = this
3686 .update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
3687 .await?;
3688 let start = location
3689 .start
3690 .and_then(deserialize_anchor)
3691 .ok_or_else(|| anyhow!("missing target start"))?;
3692 let end = location
3693 .end
3694 .and_then(deserialize_anchor)
3695 .ok_or_else(|| anyhow!("missing target end"))?;
3696 result
3697 .entry(target_buffer)
3698 .or_insert(Vec::new())
3699 .push(start..end)
3700 }
3701 Ok(result)
3702 })
3703 } else {
3704 Task::ready(Ok(Default::default()))
3705 }
3706 }
3707
3708 fn request_lsp<R: LspCommand>(
3709 &self,
3710 buffer_handle: ModelHandle<Buffer>,
3711 request: R,
3712 cx: &mut ModelContext<Self>,
3713 ) -> Task<Result<R::Response>>
3714 where
3715 <R::LspRequest as lsp::request::Request>::Result: Send,
3716 {
3717 let buffer = buffer_handle.read(cx);
3718 if self.is_local() {
3719 let file = File::from_dyn(buffer.file()).and_then(File::as_local);
3720 if let Some((file, (_, language_server))) =
3721 file.zip(self.language_server_for_buffer(buffer, cx).cloned())
3722 {
3723 let lsp_params = request.to_lsp(&file.abs_path(cx), cx);
3724 return cx.spawn(|this, cx| async move {
3725 if !request.check_capabilities(&language_server.capabilities()) {
3726 return Ok(Default::default());
3727 }
3728
3729 let response = language_server
3730 .request::<R::LspRequest>(lsp_params)
3731 .await
3732 .context("lsp request failed")?;
3733 request
3734 .response_from_lsp(response, this, buffer_handle, cx)
3735 .await
3736 });
3737 }
3738 } else if let Some(project_id) = self.remote_id() {
3739 let rpc = self.client.clone();
3740 let message = request.to_proto(project_id, buffer);
3741 return cx.spawn(|this, cx| async move {
3742 let response = rpc.request(message).await?;
3743 request
3744 .response_from_proto(response, this, buffer_handle, cx)
3745 .await
3746 });
3747 }
3748 Task::ready(Ok(Default::default()))
3749 }
3750
3751 pub fn find_or_create_local_worktree(
3752 &mut self,
3753 abs_path: impl AsRef<Path>,
3754 visible: bool,
3755 cx: &mut ModelContext<Self>,
3756 ) -> Task<Result<(ModelHandle<Worktree>, PathBuf)>> {
3757 let abs_path = abs_path.as_ref();
3758 if let Some((tree, relative_path)) = self.find_local_worktree(abs_path, cx) {
3759 Task::ready(Ok((tree.clone(), relative_path.into())))
3760 } else {
3761 let worktree = self.create_local_worktree(abs_path, visible, cx);
3762 cx.foreground()
3763 .spawn(async move { Ok((worktree.await?, PathBuf::new())) })
3764 }
3765 }
3766
3767 pub fn find_local_worktree(
3768 &self,
3769 abs_path: &Path,
3770 cx: &AppContext,
3771 ) -> Option<(ModelHandle<Worktree>, PathBuf)> {
3772 for tree in self.worktrees(cx) {
3773 if let Some(relative_path) = tree
3774 .read(cx)
3775 .as_local()
3776 .and_then(|t| abs_path.strip_prefix(t.abs_path()).ok())
3777 {
3778 return Some((tree.clone(), relative_path.into()));
3779 }
3780 }
3781 None
3782 }
3783
3784 pub fn is_shared(&self) -> bool {
3785 match &self.client_state {
3786 ProjectClientState::Local { is_shared, .. } => *is_shared,
3787 ProjectClientState::Remote { .. } => false,
3788 }
3789 }
3790
3791 fn create_local_worktree(
3792 &mut self,
3793 abs_path: impl AsRef<Path>,
3794 visible: bool,
3795 cx: &mut ModelContext<Self>,
3796 ) -> Task<Result<ModelHandle<Worktree>>> {
3797 let fs = self.fs.clone();
3798 let client = self.client.clone();
3799 let next_entry_id = self.next_entry_id.clone();
3800 let path: Arc<Path> = abs_path.as_ref().into();
3801 let task = self
3802 .loading_local_worktrees
3803 .entry(path.clone())
3804 .or_insert_with(|| {
3805 cx.spawn(|project, mut cx| {
3806 async move {
3807 let worktree = Worktree::local(
3808 client.clone(),
3809 path.clone(),
3810 visible,
3811 fs,
3812 next_entry_id,
3813 &mut cx,
3814 )
3815 .await;
3816 project.update(&mut cx, |project, _| {
3817 project.loading_local_worktrees.remove(&path);
3818 });
3819 let worktree = worktree?;
3820
3821 let project_id = project.update(&mut cx, |project, cx| {
3822 project.add_worktree(&worktree, cx);
3823 project.shared_remote_id()
3824 });
3825
3826 if let Some(project_id) = project_id {
3827 worktree
3828 .update(&mut cx, |worktree, cx| {
3829 worktree.as_local_mut().unwrap().share(project_id, cx)
3830 })
3831 .await
3832 .log_err();
3833 }
3834
3835 Ok(worktree)
3836 }
3837 .map_err(|err| Arc::new(err))
3838 })
3839 .shared()
3840 })
3841 .clone();
3842 cx.foreground().spawn(async move {
3843 match task.await {
3844 Ok(worktree) => Ok(worktree),
3845 Err(err) => Err(anyhow!("{}", err)),
3846 }
3847 })
3848 }
3849
3850 pub fn remove_worktree(&mut self, id_to_remove: WorktreeId, cx: &mut ModelContext<Self>) {
3851 self.worktrees.retain(|worktree| {
3852 if let Some(worktree) = worktree.upgrade(cx) {
3853 let id = worktree.read(cx).id();
3854 if id == id_to_remove {
3855 cx.emit(Event::WorktreeRemoved(id));
3856 false
3857 } else {
3858 true
3859 }
3860 } else {
3861 false
3862 }
3863 });
3864 self.metadata_changed(true, cx);
3865 cx.notify();
3866 }
3867
3868 fn add_worktree(&mut self, worktree: &ModelHandle<Worktree>, cx: &mut ModelContext<Self>) {
3869 cx.observe(&worktree, |_, _, cx| cx.notify()).detach();
3870 if worktree.read(cx).is_local() {
3871 cx.subscribe(&worktree, |this, worktree, _, cx| {
3872 this.update_local_worktree_buffers(worktree, cx);
3873 })
3874 .detach();
3875 }
3876
3877 let push_strong_handle = {
3878 let worktree = worktree.read(cx);
3879 self.is_shared() || worktree.is_visible() || worktree.is_remote()
3880 };
3881 if push_strong_handle {
3882 self.worktrees
3883 .push(WorktreeHandle::Strong(worktree.clone()));
3884 } else {
3885 cx.observe_release(&worktree, |this, _, cx| {
3886 this.worktrees
3887 .retain(|worktree| worktree.upgrade(cx).is_some());
3888 cx.notify();
3889 })
3890 .detach();
3891 self.worktrees
3892 .push(WorktreeHandle::Weak(worktree.downgrade()));
3893 }
3894 self.metadata_changed(true, cx);
3895 cx.emit(Event::WorktreeAdded);
3896 cx.notify();
3897 }
3898
3899 fn update_local_worktree_buffers(
3900 &mut self,
3901 worktree_handle: ModelHandle<Worktree>,
3902 cx: &mut ModelContext<Self>,
3903 ) {
3904 let snapshot = worktree_handle.read(cx).snapshot();
3905 let mut buffers_to_delete = Vec::new();
3906 let mut renamed_buffers = Vec::new();
3907 for (buffer_id, buffer) in &self.opened_buffers {
3908 if let Some(buffer) = buffer.upgrade(cx) {
3909 buffer.update(cx, |buffer, cx| {
3910 if let Some(old_file) = File::from_dyn(buffer.file()) {
3911 if old_file.worktree != worktree_handle {
3912 return;
3913 }
3914
3915 let new_file = if let Some(entry) = old_file
3916 .entry_id
3917 .and_then(|entry_id| snapshot.entry_for_id(entry_id))
3918 {
3919 File {
3920 is_local: true,
3921 entry_id: Some(entry.id),
3922 mtime: entry.mtime,
3923 path: entry.path.clone(),
3924 worktree: worktree_handle.clone(),
3925 }
3926 } else if let Some(entry) =
3927 snapshot.entry_for_path(old_file.path().as_ref())
3928 {
3929 File {
3930 is_local: true,
3931 entry_id: Some(entry.id),
3932 mtime: entry.mtime,
3933 path: entry.path.clone(),
3934 worktree: worktree_handle.clone(),
3935 }
3936 } else {
3937 File {
3938 is_local: true,
3939 entry_id: None,
3940 path: old_file.path().clone(),
3941 mtime: old_file.mtime(),
3942 worktree: worktree_handle.clone(),
3943 }
3944 };
3945
3946 let old_path = old_file.abs_path(cx);
3947 if new_file.abs_path(cx) != old_path {
3948 renamed_buffers.push((cx.handle(), old_path));
3949 }
3950
3951 if let Some(project_id) = self.shared_remote_id() {
3952 self.client
3953 .send(proto::UpdateBufferFile {
3954 project_id,
3955 buffer_id: *buffer_id as u64,
3956 file: Some(new_file.to_proto()),
3957 })
3958 .log_err();
3959 }
3960 buffer.file_updated(Box::new(new_file), cx).detach();
3961 }
3962 });
3963 } else {
3964 buffers_to_delete.push(*buffer_id);
3965 }
3966 }
3967
3968 for buffer_id in buffers_to_delete {
3969 self.opened_buffers.remove(&buffer_id);
3970 }
3971
3972 for (buffer, old_path) in renamed_buffers {
3973 self.unregister_buffer_from_language_server(&buffer, old_path, cx);
3974 self.assign_language_to_buffer(&buffer, cx);
3975 self.register_buffer_with_language_server(&buffer, cx);
3976 }
3977 }
3978
3979 pub fn set_active_path(&mut self, entry: Option<ProjectPath>, cx: &mut ModelContext<Self>) {
3980 let new_active_entry = entry.and_then(|project_path| {
3981 let worktree = self.worktree_for_id(project_path.worktree_id, cx)?;
3982 let entry = worktree.read(cx).entry_for_path(project_path.path)?;
3983 Some(entry.id)
3984 });
3985 if new_active_entry != self.active_entry {
3986 self.active_entry = new_active_entry;
3987 cx.emit(Event::ActiveEntryChanged(new_active_entry));
3988 }
3989 }
3990
3991 pub fn is_running_disk_based_diagnostics(&self) -> bool {
3992 self.language_server_statuses
3993 .values()
3994 .any(|status| status.pending_diagnostic_updates > 0)
3995 }
3996
3997 pub fn diagnostic_summary(&self, cx: &AppContext) -> DiagnosticSummary {
3998 let mut summary = DiagnosticSummary::default();
3999 for (_, path_summary) in self.diagnostic_summaries(cx) {
4000 summary.error_count += path_summary.error_count;
4001 summary.warning_count += path_summary.warning_count;
4002 }
4003 summary
4004 }
4005
4006 pub fn diagnostic_summaries<'a>(
4007 &'a self,
4008 cx: &'a AppContext,
4009 ) -> impl Iterator<Item = (ProjectPath, DiagnosticSummary)> + 'a {
4010 self.worktrees(cx).flat_map(move |worktree| {
4011 let worktree = worktree.read(cx);
4012 let worktree_id = worktree.id();
4013 worktree
4014 .diagnostic_summaries()
4015 .map(move |(path, summary)| (ProjectPath { worktree_id, path }, summary))
4016 })
4017 }
4018
4019 pub fn disk_based_diagnostics_started(&mut self, cx: &mut ModelContext<Self>) {
4020 if self
4021 .language_server_statuses
4022 .values()
4023 .map(|status| status.pending_diagnostic_updates)
4024 .sum::<isize>()
4025 == 1
4026 {
4027 cx.emit(Event::DiskBasedDiagnosticsStarted);
4028 }
4029 }
4030
4031 pub fn disk_based_diagnostics_finished(&mut self, cx: &mut ModelContext<Self>) {
4032 cx.emit(Event::DiskBasedDiagnosticsUpdated);
4033 if self
4034 .language_server_statuses
4035 .values()
4036 .map(|status| status.pending_diagnostic_updates)
4037 .sum::<isize>()
4038 == 0
4039 {
4040 cx.emit(Event::DiskBasedDiagnosticsFinished);
4041 }
4042 }
4043
4044 pub fn active_entry(&self) -> Option<ProjectEntryId> {
4045 self.active_entry
4046 }
4047
4048 pub fn entry_for_path(&self, path: &ProjectPath, cx: &AppContext) -> Option<ProjectEntryId> {
4049 self.worktree_for_id(path.worktree_id, cx)?
4050 .read(cx)
4051 .entry_for_path(&path.path)
4052 .map(|entry| entry.id)
4053 }
4054
4055 pub fn path_for_entry(&self, entry_id: ProjectEntryId, cx: &AppContext) -> Option<ProjectPath> {
4056 let worktree = self.worktree_for_entry(entry_id, cx)?;
4057 let worktree = worktree.read(cx);
4058 let worktree_id = worktree.id();
4059 let path = worktree.entry_for_id(entry_id)?.path.clone();
4060 Some(ProjectPath { worktree_id, path })
4061 }
4062
4063 // RPC message handlers
4064
4065 async fn handle_request_join_project(
4066 this: ModelHandle<Self>,
4067 message: TypedEnvelope<proto::RequestJoinProject>,
4068 _: Arc<Client>,
4069 mut cx: AsyncAppContext,
4070 ) -> Result<()> {
4071 let user_id = message.payload.requester_id;
4072 if this.read_with(&cx, |project, _| {
4073 project.collaborators.values().any(|c| c.user.id == user_id)
4074 }) {
4075 this.update(&mut cx, |this, cx| {
4076 this.respond_to_join_request(user_id, true, cx)
4077 });
4078 } else {
4079 let user_store = this.read_with(&cx, |this, _| this.user_store.clone());
4080 let user = user_store
4081 .update(&mut cx, |store, cx| store.fetch_user(user_id, cx))
4082 .await?;
4083 this.update(&mut cx, |_, cx| cx.emit(Event::ContactRequestedJoin(user)));
4084 }
4085 Ok(())
4086 }
4087
4088 async fn handle_unregister_project(
4089 this: ModelHandle<Self>,
4090 _: TypedEnvelope<proto::UnregisterProject>,
4091 _: Arc<Client>,
4092 mut cx: AsyncAppContext,
4093 ) -> Result<()> {
4094 this.update(&mut cx, |this, cx| this.removed_from_project(cx));
4095 Ok(())
4096 }
4097
4098 async fn handle_project_unshared(
4099 this: ModelHandle<Self>,
4100 _: TypedEnvelope<proto::ProjectUnshared>,
4101 _: Arc<Client>,
4102 mut cx: AsyncAppContext,
4103 ) -> Result<()> {
4104 this.update(&mut cx, |this, cx| this.unshared(cx));
4105 Ok(())
4106 }
4107
4108 async fn handle_add_collaborator(
4109 this: ModelHandle<Self>,
4110 mut envelope: TypedEnvelope<proto::AddProjectCollaborator>,
4111 _: Arc<Client>,
4112 mut cx: AsyncAppContext,
4113 ) -> Result<()> {
4114 let user_store = this.read_with(&cx, |this, _| this.user_store.clone());
4115 let collaborator = envelope
4116 .payload
4117 .collaborator
4118 .take()
4119 .ok_or_else(|| anyhow!("empty collaborator"))?;
4120
4121 let collaborator = Collaborator::from_proto(collaborator, &user_store, &mut cx).await?;
4122 this.update(&mut cx, |this, cx| {
4123 this.collaborators
4124 .insert(collaborator.peer_id, collaborator);
4125 cx.notify();
4126 });
4127
4128 Ok(())
4129 }
4130
4131 async fn handle_remove_collaborator(
4132 this: ModelHandle<Self>,
4133 envelope: TypedEnvelope<proto::RemoveProjectCollaborator>,
4134 _: Arc<Client>,
4135 mut cx: AsyncAppContext,
4136 ) -> Result<()> {
4137 this.update(&mut cx, |this, cx| {
4138 let peer_id = PeerId(envelope.payload.peer_id);
4139 let replica_id = this
4140 .collaborators
4141 .remove(&peer_id)
4142 .ok_or_else(|| anyhow!("unknown peer {:?}", peer_id))?
4143 .replica_id;
4144 for (_, buffer) in &this.opened_buffers {
4145 if let Some(buffer) = buffer.upgrade(cx) {
4146 buffer.update(cx, |buffer, cx| buffer.remove_peer(replica_id, cx));
4147 }
4148 }
4149
4150 cx.emit(Event::CollaboratorLeft(peer_id));
4151 cx.notify();
4152 Ok(())
4153 })
4154 }
4155
4156 async fn handle_join_project_request_cancelled(
4157 this: ModelHandle<Self>,
4158 envelope: TypedEnvelope<proto::JoinProjectRequestCancelled>,
4159 _: Arc<Client>,
4160 mut cx: AsyncAppContext,
4161 ) -> Result<()> {
4162 let user = this
4163 .update(&mut cx, |this, cx| {
4164 this.user_store.update(cx, |user_store, cx| {
4165 user_store.fetch_user(envelope.payload.requester_id, cx)
4166 })
4167 })
4168 .await?;
4169
4170 this.update(&mut cx, |_, cx| {
4171 cx.emit(Event::ContactCancelledJoinRequest(user));
4172 });
4173
4174 Ok(())
4175 }
4176
4177 async fn handle_update_project(
4178 this: ModelHandle<Self>,
4179 envelope: TypedEnvelope<proto::UpdateProject>,
4180 client: Arc<Client>,
4181 mut cx: AsyncAppContext,
4182 ) -> Result<()> {
4183 this.update(&mut cx, |this, cx| {
4184 let replica_id = this.replica_id();
4185 let remote_id = this.remote_id().ok_or_else(|| anyhow!("invalid project"))?;
4186
4187 let mut old_worktrees_by_id = this
4188 .worktrees
4189 .drain(..)
4190 .filter_map(|worktree| {
4191 let worktree = worktree.upgrade(cx)?;
4192 Some((worktree.read(cx).id(), worktree))
4193 })
4194 .collect::<HashMap<_, _>>();
4195
4196 for worktree in envelope.payload.worktrees {
4197 if let Some(old_worktree) =
4198 old_worktrees_by_id.remove(&WorktreeId::from_proto(worktree.id))
4199 {
4200 this.worktrees.push(WorktreeHandle::Strong(old_worktree));
4201 } else {
4202 let worktree = proto::Worktree {
4203 id: worktree.id,
4204 root_name: worktree.root_name,
4205 entries: Default::default(),
4206 diagnostic_summaries: Default::default(),
4207 visible: worktree.visible,
4208 scan_id: 0,
4209 };
4210 let (worktree, load_task) =
4211 Worktree::remote(remote_id, replica_id, worktree, client.clone(), cx);
4212 this.add_worktree(&worktree, cx);
4213 load_task.detach();
4214 }
4215 }
4216
4217 this.metadata_changed(true, cx);
4218 for (id, _) in old_worktrees_by_id {
4219 cx.emit(Event::WorktreeRemoved(id));
4220 }
4221
4222 Ok(())
4223 })
4224 }
4225
4226 async fn handle_update_worktree(
4227 this: ModelHandle<Self>,
4228 envelope: TypedEnvelope<proto::UpdateWorktree>,
4229 _: Arc<Client>,
4230 mut cx: AsyncAppContext,
4231 ) -> Result<()> {
4232 this.update(&mut cx, |this, cx| {
4233 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
4234 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
4235 worktree.update(cx, |worktree, _| {
4236 let worktree = worktree.as_remote_mut().unwrap();
4237 worktree.update_from_remote(envelope)
4238 })?;
4239 }
4240 Ok(())
4241 })
4242 }
4243
4244 async fn handle_create_project_entry(
4245 this: ModelHandle<Self>,
4246 envelope: TypedEnvelope<proto::CreateProjectEntry>,
4247 _: Arc<Client>,
4248 mut cx: AsyncAppContext,
4249 ) -> Result<proto::ProjectEntryResponse> {
4250 let worktree = this.update(&mut cx, |this, cx| {
4251 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
4252 this.worktree_for_id(worktree_id, cx)
4253 .ok_or_else(|| anyhow!("worktree not found"))
4254 })?;
4255 let worktree_scan_id = worktree.read_with(&cx, |worktree, _| worktree.scan_id());
4256 let entry = worktree
4257 .update(&mut cx, |worktree, cx| {
4258 let worktree = worktree.as_local_mut().unwrap();
4259 let path = PathBuf::from(OsString::from_vec(envelope.payload.path));
4260 worktree.create_entry(path, envelope.payload.is_directory, cx)
4261 })
4262 .await?;
4263 Ok(proto::ProjectEntryResponse {
4264 entry: Some((&entry).into()),
4265 worktree_scan_id: worktree_scan_id as u64,
4266 })
4267 }
4268
4269 async fn handle_rename_project_entry(
4270 this: ModelHandle<Self>,
4271 envelope: TypedEnvelope<proto::RenameProjectEntry>,
4272 _: Arc<Client>,
4273 mut cx: AsyncAppContext,
4274 ) -> Result<proto::ProjectEntryResponse> {
4275 let entry_id = ProjectEntryId::from_proto(envelope.payload.entry_id);
4276 let worktree = this.read_with(&cx, |this, cx| {
4277 this.worktree_for_entry(entry_id, cx)
4278 .ok_or_else(|| anyhow!("worktree not found"))
4279 })?;
4280 let worktree_scan_id = worktree.read_with(&cx, |worktree, _| worktree.scan_id());
4281 let entry = worktree
4282 .update(&mut cx, |worktree, cx| {
4283 let new_path = PathBuf::from(OsString::from_vec(envelope.payload.new_path));
4284 worktree
4285 .as_local_mut()
4286 .unwrap()
4287 .rename_entry(entry_id, new_path, cx)
4288 .ok_or_else(|| anyhow!("invalid entry"))
4289 })?
4290 .await?;
4291 Ok(proto::ProjectEntryResponse {
4292 entry: Some((&entry).into()),
4293 worktree_scan_id: worktree_scan_id as u64,
4294 })
4295 }
4296
4297 async fn handle_copy_project_entry(
4298 this: ModelHandle<Self>,
4299 envelope: TypedEnvelope<proto::CopyProjectEntry>,
4300 _: Arc<Client>,
4301 mut cx: AsyncAppContext,
4302 ) -> Result<proto::ProjectEntryResponse> {
4303 let entry_id = ProjectEntryId::from_proto(envelope.payload.entry_id);
4304 let worktree = this.read_with(&cx, |this, cx| {
4305 this.worktree_for_entry(entry_id, cx)
4306 .ok_or_else(|| anyhow!("worktree not found"))
4307 })?;
4308 let worktree_scan_id = worktree.read_with(&cx, |worktree, _| worktree.scan_id());
4309 let entry = worktree
4310 .update(&mut cx, |worktree, cx| {
4311 let new_path = PathBuf::from(OsString::from_vec(envelope.payload.new_path));
4312 worktree
4313 .as_local_mut()
4314 .unwrap()
4315 .copy_entry(entry_id, new_path, cx)
4316 .ok_or_else(|| anyhow!("invalid entry"))
4317 })?
4318 .await?;
4319 Ok(proto::ProjectEntryResponse {
4320 entry: Some((&entry).into()),
4321 worktree_scan_id: worktree_scan_id as u64,
4322 })
4323 }
4324
4325 async fn handle_delete_project_entry(
4326 this: ModelHandle<Self>,
4327 envelope: TypedEnvelope<proto::DeleteProjectEntry>,
4328 _: Arc<Client>,
4329 mut cx: AsyncAppContext,
4330 ) -> Result<proto::ProjectEntryResponse> {
4331 let entry_id = ProjectEntryId::from_proto(envelope.payload.entry_id);
4332 let worktree = this.read_with(&cx, |this, cx| {
4333 this.worktree_for_entry(entry_id, cx)
4334 .ok_or_else(|| anyhow!("worktree not found"))
4335 })?;
4336 let worktree_scan_id = worktree.read_with(&cx, |worktree, _| worktree.scan_id());
4337 worktree
4338 .update(&mut cx, |worktree, cx| {
4339 worktree
4340 .as_local_mut()
4341 .unwrap()
4342 .delete_entry(entry_id, cx)
4343 .ok_or_else(|| anyhow!("invalid entry"))
4344 })?
4345 .await?;
4346 Ok(proto::ProjectEntryResponse {
4347 entry: None,
4348 worktree_scan_id: worktree_scan_id as u64,
4349 })
4350 }
4351
4352 async fn handle_update_diagnostic_summary(
4353 this: ModelHandle<Self>,
4354 envelope: TypedEnvelope<proto::UpdateDiagnosticSummary>,
4355 _: Arc<Client>,
4356 mut cx: AsyncAppContext,
4357 ) -> Result<()> {
4358 this.update(&mut cx, |this, cx| {
4359 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
4360 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
4361 if let Some(summary) = envelope.payload.summary {
4362 let project_path = ProjectPath {
4363 worktree_id,
4364 path: Path::new(&summary.path).into(),
4365 };
4366 worktree.update(cx, |worktree, _| {
4367 worktree
4368 .as_remote_mut()
4369 .unwrap()
4370 .update_diagnostic_summary(project_path.path.clone(), &summary);
4371 });
4372 cx.emit(Event::DiagnosticsUpdated(project_path));
4373 }
4374 }
4375 Ok(())
4376 })
4377 }
4378
4379 async fn handle_start_language_server(
4380 this: ModelHandle<Self>,
4381 envelope: TypedEnvelope<proto::StartLanguageServer>,
4382 _: Arc<Client>,
4383 mut cx: AsyncAppContext,
4384 ) -> Result<()> {
4385 let server = envelope
4386 .payload
4387 .server
4388 .ok_or_else(|| anyhow!("invalid server"))?;
4389 this.update(&mut cx, |this, cx| {
4390 this.language_server_statuses.insert(
4391 server.id as usize,
4392 LanguageServerStatus {
4393 name: server.name,
4394 pending_work: Default::default(),
4395 pending_diagnostic_updates: 0,
4396 },
4397 );
4398 cx.notify();
4399 });
4400 Ok(())
4401 }
4402
4403 async fn handle_update_language_server(
4404 this: ModelHandle<Self>,
4405 envelope: TypedEnvelope<proto::UpdateLanguageServer>,
4406 _: Arc<Client>,
4407 mut cx: AsyncAppContext,
4408 ) -> Result<()> {
4409 let language_server_id = envelope.payload.language_server_id as usize;
4410 match envelope
4411 .payload
4412 .variant
4413 .ok_or_else(|| anyhow!("invalid variant"))?
4414 {
4415 proto::update_language_server::Variant::WorkStart(payload) => {
4416 this.update(&mut cx, |this, cx| {
4417 this.on_lsp_work_start(language_server_id, payload.token, cx);
4418 })
4419 }
4420 proto::update_language_server::Variant::WorkProgress(payload) => {
4421 this.update(&mut cx, |this, cx| {
4422 this.on_lsp_work_progress(
4423 language_server_id,
4424 payload.token,
4425 LanguageServerProgress {
4426 message: payload.message,
4427 percentage: payload.percentage.map(|p| p as usize),
4428 last_update_at: Instant::now(),
4429 },
4430 cx,
4431 );
4432 })
4433 }
4434 proto::update_language_server::Variant::WorkEnd(payload) => {
4435 this.update(&mut cx, |this, cx| {
4436 this.on_lsp_work_end(language_server_id, payload.token, cx);
4437 })
4438 }
4439 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(_) => {
4440 this.update(&mut cx, |this, cx| {
4441 this.disk_based_diagnostics_started(cx);
4442 })
4443 }
4444 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(_) => {
4445 this.update(&mut cx, |this, cx| this.disk_based_diagnostics_finished(cx));
4446 }
4447 }
4448
4449 Ok(())
4450 }
4451
4452 async fn handle_update_buffer(
4453 this: ModelHandle<Self>,
4454 envelope: TypedEnvelope<proto::UpdateBuffer>,
4455 _: Arc<Client>,
4456 mut cx: AsyncAppContext,
4457 ) -> Result<()> {
4458 this.update(&mut cx, |this, cx| {
4459 let payload = envelope.payload.clone();
4460 let buffer_id = payload.buffer_id;
4461 let ops = payload
4462 .operations
4463 .into_iter()
4464 .map(|op| language::proto::deserialize_operation(op))
4465 .collect::<Result<Vec<_>, _>>()?;
4466 let is_remote = this.is_remote();
4467 match this.opened_buffers.entry(buffer_id) {
4468 hash_map::Entry::Occupied(mut e) => match e.get_mut() {
4469 OpenBuffer::Strong(buffer) => {
4470 buffer.update(cx, |buffer, cx| buffer.apply_ops(ops, cx))?;
4471 }
4472 OpenBuffer::Loading(operations) => operations.extend_from_slice(&ops),
4473 OpenBuffer::Weak(_) => {}
4474 },
4475 hash_map::Entry::Vacant(e) => {
4476 assert!(
4477 is_remote,
4478 "received buffer update from {:?}",
4479 envelope.original_sender_id
4480 );
4481 e.insert(OpenBuffer::Loading(ops));
4482 }
4483 }
4484 Ok(())
4485 })
4486 }
4487
4488 async fn handle_update_buffer_file(
4489 this: ModelHandle<Self>,
4490 envelope: TypedEnvelope<proto::UpdateBufferFile>,
4491 _: Arc<Client>,
4492 mut cx: AsyncAppContext,
4493 ) -> Result<()> {
4494 this.update(&mut cx, |this, cx| {
4495 let payload = envelope.payload.clone();
4496 let buffer_id = payload.buffer_id;
4497 let file = payload.file.ok_or_else(|| anyhow!("invalid file"))?;
4498 let worktree = this
4499 .worktree_for_id(WorktreeId::from_proto(file.worktree_id), cx)
4500 .ok_or_else(|| anyhow!("no such worktree"))?;
4501 let file = File::from_proto(file, worktree.clone(), cx)?;
4502 let buffer = this
4503 .opened_buffers
4504 .get_mut(&buffer_id)
4505 .and_then(|b| b.upgrade(cx))
4506 .ok_or_else(|| anyhow!("no such buffer"))?;
4507 buffer.update(cx, |buffer, cx| {
4508 buffer.file_updated(Box::new(file), cx).detach();
4509 });
4510 Ok(())
4511 })
4512 }
4513
4514 async fn handle_save_buffer(
4515 this: ModelHandle<Self>,
4516 envelope: TypedEnvelope<proto::SaveBuffer>,
4517 _: Arc<Client>,
4518 mut cx: AsyncAppContext,
4519 ) -> Result<proto::BufferSaved> {
4520 let buffer_id = envelope.payload.buffer_id;
4521 let requested_version = deserialize_version(envelope.payload.version);
4522
4523 let (project_id, buffer) = this.update(&mut cx, |this, cx| {
4524 let project_id = this.remote_id().ok_or_else(|| anyhow!("not connected"))?;
4525 let buffer = this
4526 .opened_buffers
4527 .get(&buffer_id)
4528 .and_then(|buffer| buffer.upgrade(cx))
4529 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?;
4530 Ok::<_, anyhow::Error>((project_id, buffer))
4531 })?;
4532 buffer
4533 .update(&mut cx, |buffer, _| {
4534 buffer.wait_for_version(requested_version)
4535 })
4536 .await;
4537
4538 let (saved_version, mtime) = buffer.update(&mut cx, |buffer, cx| buffer.save(cx)).await?;
4539 Ok(proto::BufferSaved {
4540 project_id,
4541 buffer_id,
4542 version: serialize_version(&saved_version),
4543 mtime: Some(mtime.into()),
4544 })
4545 }
4546
4547 async fn handle_reload_buffers(
4548 this: ModelHandle<Self>,
4549 envelope: TypedEnvelope<proto::ReloadBuffers>,
4550 _: Arc<Client>,
4551 mut cx: AsyncAppContext,
4552 ) -> Result<proto::ReloadBuffersResponse> {
4553 let sender_id = envelope.original_sender_id()?;
4554 let reload = this.update(&mut cx, |this, cx| {
4555 let mut buffers = HashSet::default();
4556 for buffer_id in &envelope.payload.buffer_ids {
4557 buffers.insert(
4558 this.opened_buffers
4559 .get(buffer_id)
4560 .and_then(|buffer| buffer.upgrade(cx))
4561 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?,
4562 );
4563 }
4564 Ok::<_, anyhow::Error>(this.reload_buffers(buffers, false, cx))
4565 })?;
4566
4567 let project_transaction = reload.await?;
4568 let project_transaction = this.update(&mut cx, |this, cx| {
4569 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
4570 });
4571 Ok(proto::ReloadBuffersResponse {
4572 transaction: Some(project_transaction),
4573 })
4574 }
4575
4576 async fn handle_format_buffers(
4577 this: ModelHandle<Self>,
4578 envelope: TypedEnvelope<proto::FormatBuffers>,
4579 _: Arc<Client>,
4580 mut cx: AsyncAppContext,
4581 ) -> Result<proto::FormatBuffersResponse> {
4582 let sender_id = envelope.original_sender_id()?;
4583 let format = this.update(&mut cx, |this, cx| {
4584 let mut buffers = HashSet::default();
4585 for buffer_id in &envelope.payload.buffer_ids {
4586 buffers.insert(
4587 this.opened_buffers
4588 .get(buffer_id)
4589 .and_then(|buffer| buffer.upgrade(cx))
4590 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?,
4591 );
4592 }
4593 Ok::<_, anyhow::Error>(this.format(buffers, false, cx))
4594 })?;
4595
4596 let project_transaction = format.await?;
4597 let project_transaction = this.update(&mut cx, |this, cx| {
4598 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
4599 });
4600 Ok(proto::FormatBuffersResponse {
4601 transaction: Some(project_transaction),
4602 })
4603 }
4604
4605 async fn handle_get_completions(
4606 this: ModelHandle<Self>,
4607 envelope: TypedEnvelope<proto::GetCompletions>,
4608 _: Arc<Client>,
4609 mut cx: AsyncAppContext,
4610 ) -> Result<proto::GetCompletionsResponse> {
4611 let position = envelope
4612 .payload
4613 .position
4614 .and_then(language::proto::deserialize_anchor)
4615 .ok_or_else(|| anyhow!("invalid position"))?;
4616 let version = deserialize_version(envelope.payload.version);
4617 let buffer = this.read_with(&cx, |this, cx| {
4618 this.opened_buffers
4619 .get(&envelope.payload.buffer_id)
4620 .and_then(|buffer| buffer.upgrade(cx))
4621 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
4622 })?;
4623 buffer
4624 .update(&mut cx, |buffer, _| buffer.wait_for_version(version))
4625 .await;
4626 let version = buffer.read_with(&cx, |buffer, _| buffer.version());
4627 let completions = this
4628 .update(&mut cx, |this, cx| this.completions(&buffer, position, cx))
4629 .await?;
4630
4631 Ok(proto::GetCompletionsResponse {
4632 completions: completions
4633 .iter()
4634 .map(language::proto::serialize_completion)
4635 .collect(),
4636 version: serialize_version(&version),
4637 })
4638 }
4639
4640 async fn handle_apply_additional_edits_for_completion(
4641 this: ModelHandle<Self>,
4642 envelope: TypedEnvelope<proto::ApplyCompletionAdditionalEdits>,
4643 _: Arc<Client>,
4644 mut cx: AsyncAppContext,
4645 ) -> Result<proto::ApplyCompletionAdditionalEditsResponse> {
4646 let apply_additional_edits = this.update(&mut cx, |this, cx| {
4647 let buffer = this
4648 .opened_buffers
4649 .get(&envelope.payload.buffer_id)
4650 .and_then(|buffer| buffer.upgrade(cx))
4651 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
4652 let language = buffer.read(cx).language();
4653 let completion = language::proto::deserialize_completion(
4654 envelope
4655 .payload
4656 .completion
4657 .ok_or_else(|| anyhow!("invalid completion"))?,
4658 language,
4659 )?;
4660 Ok::<_, anyhow::Error>(
4661 this.apply_additional_edits_for_completion(buffer, completion, false, cx),
4662 )
4663 })?;
4664
4665 Ok(proto::ApplyCompletionAdditionalEditsResponse {
4666 transaction: apply_additional_edits
4667 .await?
4668 .as_ref()
4669 .map(language::proto::serialize_transaction),
4670 })
4671 }
4672
4673 async fn handle_get_code_actions(
4674 this: ModelHandle<Self>,
4675 envelope: TypedEnvelope<proto::GetCodeActions>,
4676 _: Arc<Client>,
4677 mut cx: AsyncAppContext,
4678 ) -> Result<proto::GetCodeActionsResponse> {
4679 let start = envelope
4680 .payload
4681 .start
4682 .and_then(language::proto::deserialize_anchor)
4683 .ok_or_else(|| anyhow!("invalid start"))?;
4684 let end = envelope
4685 .payload
4686 .end
4687 .and_then(language::proto::deserialize_anchor)
4688 .ok_or_else(|| anyhow!("invalid end"))?;
4689 let buffer = this.update(&mut cx, |this, cx| {
4690 this.opened_buffers
4691 .get(&envelope.payload.buffer_id)
4692 .and_then(|buffer| buffer.upgrade(cx))
4693 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
4694 })?;
4695 buffer
4696 .update(&mut cx, |buffer, _| {
4697 buffer.wait_for_version(deserialize_version(envelope.payload.version))
4698 })
4699 .await;
4700
4701 let version = buffer.read_with(&cx, |buffer, _| buffer.version());
4702 let code_actions = this.update(&mut cx, |this, cx| {
4703 Ok::<_, anyhow::Error>(this.code_actions(&buffer, start..end, cx))
4704 })?;
4705
4706 Ok(proto::GetCodeActionsResponse {
4707 actions: code_actions
4708 .await?
4709 .iter()
4710 .map(language::proto::serialize_code_action)
4711 .collect(),
4712 version: serialize_version(&version),
4713 })
4714 }
4715
4716 async fn handle_apply_code_action(
4717 this: ModelHandle<Self>,
4718 envelope: TypedEnvelope<proto::ApplyCodeAction>,
4719 _: Arc<Client>,
4720 mut cx: AsyncAppContext,
4721 ) -> Result<proto::ApplyCodeActionResponse> {
4722 let sender_id = envelope.original_sender_id()?;
4723 let action = language::proto::deserialize_code_action(
4724 envelope
4725 .payload
4726 .action
4727 .ok_or_else(|| anyhow!("invalid action"))?,
4728 )?;
4729 let apply_code_action = this.update(&mut cx, |this, cx| {
4730 let buffer = this
4731 .opened_buffers
4732 .get(&envelope.payload.buffer_id)
4733 .and_then(|buffer| buffer.upgrade(cx))
4734 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
4735 Ok::<_, anyhow::Error>(this.apply_code_action(buffer, action, false, cx))
4736 })?;
4737
4738 let project_transaction = apply_code_action.await?;
4739 let project_transaction = this.update(&mut cx, |this, cx| {
4740 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
4741 });
4742 Ok(proto::ApplyCodeActionResponse {
4743 transaction: Some(project_transaction),
4744 })
4745 }
4746
4747 async fn handle_lsp_command<T: LspCommand>(
4748 this: ModelHandle<Self>,
4749 envelope: TypedEnvelope<T::ProtoRequest>,
4750 _: Arc<Client>,
4751 mut cx: AsyncAppContext,
4752 ) -> Result<<T::ProtoRequest as proto::RequestMessage>::Response>
4753 where
4754 <T::LspRequest as lsp::request::Request>::Result: Send,
4755 {
4756 let sender_id = envelope.original_sender_id()?;
4757 let buffer_id = T::buffer_id_from_proto(&envelope.payload);
4758 let buffer_handle = this.read_with(&cx, |this, _| {
4759 this.opened_buffers
4760 .get(&buffer_id)
4761 .and_then(|buffer| buffer.upgrade(&cx))
4762 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))
4763 })?;
4764 let request = T::from_proto(
4765 envelope.payload,
4766 this.clone(),
4767 buffer_handle.clone(),
4768 cx.clone(),
4769 )
4770 .await?;
4771 let buffer_version = buffer_handle.read_with(&cx, |buffer, _| buffer.version());
4772 let response = this
4773 .update(&mut cx, |this, cx| {
4774 this.request_lsp(buffer_handle, request, cx)
4775 })
4776 .await?;
4777 this.update(&mut cx, |this, cx| {
4778 Ok(T::response_to_proto(
4779 response,
4780 this,
4781 sender_id,
4782 &buffer_version,
4783 cx,
4784 ))
4785 })
4786 }
4787
4788 async fn handle_get_project_symbols(
4789 this: ModelHandle<Self>,
4790 envelope: TypedEnvelope<proto::GetProjectSymbols>,
4791 _: Arc<Client>,
4792 mut cx: AsyncAppContext,
4793 ) -> Result<proto::GetProjectSymbolsResponse> {
4794 let symbols = this
4795 .update(&mut cx, |this, cx| {
4796 this.symbols(&envelope.payload.query, cx)
4797 })
4798 .await?;
4799
4800 Ok(proto::GetProjectSymbolsResponse {
4801 symbols: symbols.iter().map(serialize_symbol).collect(),
4802 })
4803 }
4804
4805 async fn handle_search_project(
4806 this: ModelHandle<Self>,
4807 envelope: TypedEnvelope<proto::SearchProject>,
4808 _: Arc<Client>,
4809 mut cx: AsyncAppContext,
4810 ) -> Result<proto::SearchProjectResponse> {
4811 let peer_id = envelope.original_sender_id()?;
4812 let query = SearchQuery::from_proto(envelope.payload)?;
4813 let result = this
4814 .update(&mut cx, |this, cx| this.search(query, cx))
4815 .await?;
4816
4817 this.update(&mut cx, |this, cx| {
4818 let mut locations = Vec::new();
4819 for (buffer, ranges) in result {
4820 for range in ranges {
4821 let start = serialize_anchor(&range.start);
4822 let end = serialize_anchor(&range.end);
4823 let buffer = this.serialize_buffer_for_peer(&buffer, peer_id, cx);
4824 locations.push(proto::Location {
4825 buffer: Some(buffer),
4826 start: Some(start),
4827 end: Some(end),
4828 });
4829 }
4830 }
4831 Ok(proto::SearchProjectResponse { locations })
4832 })
4833 }
4834
4835 async fn handle_open_buffer_for_symbol(
4836 this: ModelHandle<Self>,
4837 envelope: TypedEnvelope<proto::OpenBufferForSymbol>,
4838 _: Arc<Client>,
4839 mut cx: AsyncAppContext,
4840 ) -> Result<proto::OpenBufferForSymbolResponse> {
4841 let peer_id = envelope.original_sender_id()?;
4842 let symbol = envelope
4843 .payload
4844 .symbol
4845 .ok_or_else(|| anyhow!("invalid symbol"))?;
4846 let symbol = this.read_with(&cx, |this, _| {
4847 let symbol = this.deserialize_symbol(symbol)?;
4848 let signature = this.symbol_signature(symbol.worktree_id, &symbol.path);
4849 if signature == symbol.signature {
4850 Ok(symbol)
4851 } else {
4852 Err(anyhow!("invalid symbol signature"))
4853 }
4854 })?;
4855 let buffer = this
4856 .update(&mut cx, |this, cx| this.open_buffer_for_symbol(&symbol, cx))
4857 .await?;
4858
4859 Ok(proto::OpenBufferForSymbolResponse {
4860 buffer: Some(this.update(&mut cx, |this, cx| {
4861 this.serialize_buffer_for_peer(&buffer, peer_id, cx)
4862 })),
4863 })
4864 }
4865
4866 fn symbol_signature(&self, worktree_id: WorktreeId, path: &Path) -> [u8; 32] {
4867 let mut hasher = Sha256::new();
4868 hasher.update(worktree_id.to_proto().to_be_bytes());
4869 hasher.update(path.to_string_lossy().as_bytes());
4870 hasher.update(self.nonce.to_be_bytes());
4871 hasher.finalize().as_slice().try_into().unwrap()
4872 }
4873
4874 async fn handle_open_buffer_by_id(
4875 this: ModelHandle<Self>,
4876 envelope: TypedEnvelope<proto::OpenBufferById>,
4877 _: Arc<Client>,
4878 mut cx: AsyncAppContext,
4879 ) -> Result<proto::OpenBufferResponse> {
4880 let peer_id = envelope.original_sender_id()?;
4881 let buffer = this
4882 .update(&mut cx, |this, cx| {
4883 this.open_buffer_by_id(envelope.payload.id, cx)
4884 })
4885 .await?;
4886 this.update(&mut cx, |this, cx| {
4887 Ok(proto::OpenBufferResponse {
4888 buffer: Some(this.serialize_buffer_for_peer(&buffer, peer_id, cx)),
4889 })
4890 })
4891 }
4892
4893 async fn handle_open_buffer_by_path(
4894 this: ModelHandle<Self>,
4895 envelope: TypedEnvelope<proto::OpenBufferByPath>,
4896 _: Arc<Client>,
4897 mut cx: AsyncAppContext,
4898 ) -> Result<proto::OpenBufferResponse> {
4899 let peer_id = envelope.original_sender_id()?;
4900 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
4901 let open_buffer = this.update(&mut cx, |this, cx| {
4902 this.open_buffer(
4903 ProjectPath {
4904 worktree_id,
4905 path: PathBuf::from(envelope.payload.path).into(),
4906 },
4907 cx,
4908 )
4909 });
4910
4911 let buffer = open_buffer.await?;
4912 this.update(&mut cx, |this, cx| {
4913 Ok(proto::OpenBufferResponse {
4914 buffer: Some(this.serialize_buffer_for_peer(&buffer, peer_id, cx)),
4915 })
4916 })
4917 }
4918
4919 fn serialize_project_transaction_for_peer(
4920 &mut self,
4921 project_transaction: ProjectTransaction,
4922 peer_id: PeerId,
4923 cx: &AppContext,
4924 ) -> proto::ProjectTransaction {
4925 let mut serialized_transaction = proto::ProjectTransaction {
4926 buffers: Default::default(),
4927 transactions: Default::default(),
4928 };
4929 for (buffer, transaction) in project_transaction.0 {
4930 serialized_transaction
4931 .buffers
4932 .push(self.serialize_buffer_for_peer(&buffer, peer_id, cx));
4933 serialized_transaction
4934 .transactions
4935 .push(language::proto::serialize_transaction(&transaction));
4936 }
4937 serialized_transaction
4938 }
4939
4940 fn deserialize_project_transaction(
4941 &mut self,
4942 message: proto::ProjectTransaction,
4943 push_to_history: bool,
4944 cx: &mut ModelContext<Self>,
4945 ) -> Task<Result<ProjectTransaction>> {
4946 cx.spawn(|this, mut cx| async move {
4947 let mut project_transaction = ProjectTransaction::default();
4948 for (buffer, transaction) in message.buffers.into_iter().zip(message.transactions) {
4949 let buffer = this
4950 .update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
4951 .await?;
4952 let transaction = language::proto::deserialize_transaction(transaction)?;
4953 project_transaction.0.insert(buffer, transaction);
4954 }
4955
4956 for (buffer, transaction) in &project_transaction.0 {
4957 buffer
4958 .update(&mut cx, |buffer, _| {
4959 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
4960 })
4961 .await;
4962
4963 if push_to_history {
4964 buffer.update(&mut cx, |buffer, _| {
4965 buffer.push_transaction(transaction.clone(), Instant::now());
4966 });
4967 }
4968 }
4969
4970 Ok(project_transaction)
4971 })
4972 }
4973
4974 fn serialize_buffer_for_peer(
4975 &mut self,
4976 buffer: &ModelHandle<Buffer>,
4977 peer_id: PeerId,
4978 cx: &AppContext,
4979 ) -> proto::Buffer {
4980 let buffer_id = buffer.read(cx).remote_id();
4981 let shared_buffers = self.shared_buffers.entry(peer_id).or_default();
4982 if shared_buffers.insert(buffer_id) {
4983 proto::Buffer {
4984 variant: Some(proto::buffer::Variant::State(buffer.read(cx).to_proto())),
4985 }
4986 } else {
4987 proto::Buffer {
4988 variant: Some(proto::buffer::Variant::Id(buffer_id)),
4989 }
4990 }
4991 }
4992
4993 fn deserialize_buffer(
4994 &mut self,
4995 buffer: proto::Buffer,
4996 cx: &mut ModelContext<Self>,
4997 ) -> Task<Result<ModelHandle<Buffer>>> {
4998 let replica_id = self.replica_id();
4999
5000 let opened_buffer_tx = self.opened_buffer.0.clone();
5001 let mut opened_buffer_rx = self.opened_buffer.1.clone();
5002 cx.spawn(|this, mut cx| async move {
5003 match buffer.variant.ok_or_else(|| anyhow!("missing buffer"))? {
5004 proto::buffer::Variant::Id(id) => {
5005 let buffer = loop {
5006 let buffer = this.read_with(&cx, |this, cx| {
5007 this.opened_buffers
5008 .get(&id)
5009 .and_then(|buffer| buffer.upgrade(cx))
5010 });
5011 if let Some(buffer) = buffer {
5012 break buffer;
5013 }
5014 opened_buffer_rx
5015 .next()
5016 .await
5017 .ok_or_else(|| anyhow!("project dropped while waiting for buffer"))?;
5018 };
5019 Ok(buffer)
5020 }
5021 proto::buffer::Variant::State(mut buffer) => {
5022 let mut buffer_worktree = None;
5023 let mut buffer_file = None;
5024 if let Some(file) = buffer.file.take() {
5025 this.read_with(&cx, |this, cx| {
5026 let worktree_id = WorktreeId::from_proto(file.worktree_id);
5027 let worktree =
5028 this.worktree_for_id(worktree_id, cx).ok_or_else(|| {
5029 anyhow!("no worktree found for id {}", file.worktree_id)
5030 })?;
5031 buffer_file =
5032 Some(Box::new(File::from_proto(file, worktree.clone(), cx)?)
5033 as Box<dyn language::File>);
5034 buffer_worktree = Some(worktree);
5035 Ok::<_, anyhow::Error>(())
5036 })?;
5037 }
5038
5039 let buffer = cx.add_model(|cx| {
5040 Buffer::from_proto(replica_id, buffer, buffer_file, cx).unwrap()
5041 });
5042
5043 this.update(&mut cx, |this, cx| this.register_buffer(&buffer, cx))?;
5044
5045 *opened_buffer_tx.borrow_mut().borrow_mut() = ();
5046 Ok(buffer)
5047 }
5048 }
5049 })
5050 }
5051
5052 fn deserialize_symbol(&self, serialized_symbol: proto::Symbol) -> Result<Symbol> {
5053 let source_worktree_id = WorktreeId::from_proto(serialized_symbol.source_worktree_id);
5054 let worktree_id = WorktreeId::from_proto(serialized_symbol.worktree_id);
5055 let start = serialized_symbol
5056 .start
5057 .ok_or_else(|| anyhow!("invalid start"))?;
5058 let end = serialized_symbol
5059 .end
5060 .ok_or_else(|| anyhow!("invalid end"))?;
5061 let kind = unsafe { mem::transmute(serialized_symbol.kind) };
5062 let path = PathBuf::from(serialized_symbol.path);
5063 let language = self.languages.select_language(&path);
5064 Ok(Symbol {
5065 source_worktree_id,
5066 worktree_id,
5067 language_server_name: LanguageServerName(serialized_symbol.language_server_name.into()),
5068 label: language
5069 .and_then(|language| language.label_for_symbol(&serialized_symbol.name, kind))
5070 .unwrap_or_else(|| CodeLabel::plain(serialized_symbol.name.clone(), None)),
5071 name: serialized_symbol.name,
5072 path,
5073 range: PointUtf16::new(start.row, start.column)..PointUtf16::new(end.row, end.column),
5074 kind,
5075 signature: serialized_symbol
5076 .signature
5077 .try_into()
5078 .map_err(|_| anyhow!("invalid signature"))?,
5079 })
5080 }
5081
5082 async fn handle_buffer_saved(
5083 this: ModelHandle<Self>,
5084 envelope: TypedEnvelope<proto::BufferSaved>,
5085 _: Arc<Client>,
5086 mut cx: AsyncAppContext,
5087 ) -> Result<()> {
5088 let version = deserialize_version(envelope.payload.version);
5089 let mtime = envelope
5090 .payload
5091 .mtime
5092 .ok_or_else(|| anyhow!("missing mtime"))?
5093 .into();
5094
5095 this.update(&mut cx, |this, cx| {
5096 let buffer = this
5097 .opened_buffers
5098 .get(&envelope.payload.buffer_id)
5099 .and_then(|buffer| buffer.upgrade(cx));
5100 if let Some(buffer) = buffer {
5101 buffer.update(cx, |buffer, cx| {
5102 buffer.did_save(version, mtime, None, cx);
5103 });
5104 }
5105 Ok(())
5106 })
5107 }
5108
5109 async fn handle_buffer_reloaded(
5110 this: ModelHandle<Self>,
5111 envelope: TypedEnvelope<proto::BufferReloaded>,
5112 _: Arc<Client>,
5113 mut cx: AsyncAppContext,
5114 ) -> Result<()> {
5115 let payload = envelope.payload.clone();
5116 let version = deserialize_version(payload.version);
5117 let mtime = payload
5118 .mtime
5119 .ok_or_else(|| anyhow!("missing mtime"))?
5120 .into();
5121 this.update(&mut cx, |this, cx| {
5122 let buffer = this
5123 .opened_buffers
5124 .get(&payload.buffer_id)
5125 .and_then(|buffer| buffer.upgrade(cx));
5126 if let Some(buffer) = buffer {
5127 buffer.update(cx, |buffer, cx| {
5128 buffer.did_reload(version, mtime, cx);
5129 });
5130 }
5131 Ok(())
5132 })
5133 }
5134
5135 pub fn match_paths<'a>(
5136 &self,
5137 query: &'a str,
5138 include_ignored: bool,
5139 smart_case: bool,
5140 max_results: usize,
5141 cancel_flag: &'a AtomicBool,
5142 cx: &AppContext,
5143 ) -> impl 'a + Future<Output = Vec<PathMatch>> {
5144 let worktrees = self
5145 .worktrees(cx)
5146 .filter(|worktree| worktree.read(cx).is_visible())
5147 .collect::<Vec<_>>();
5148 let include_root_name = worktrees.len() > 1;
5149 let candidate_sets = worktrees
5150 .into_iter()
5151 .map(|worktree| CandidateSet {
5152 snapshot: worktree.read(cx).snapshot(),
5153 include_ignored,
5154 include_root_name,
5155 })
5156 .collect::<Vec<_>>();
5157
5158 let background = cx.background().clone();
5159 async move {
5160 fuzzy::match_paths(
5161 candidate_sets.as_slice(),
5162 query,
5163 smart_case,
5164 max_results,
5165 cancel_flag,
5166 background,
5167 )
5168 .await
5169 }
5170 }
5171
5172 fn edits_from_lsp(
5173 &mut self,
5174 buffer: &ModelHandle<Buffer>,
5175 lsp_edits: impl 'static + Send + IntoIterator<Item = lsp::TextEdit>,
5176 version: Option<i32>,
5177 cx: &mut ModelContext<Self>,
5178 ) -> Task<Result<Vec<(Range<Anchor>, String)>>> {
5179 let snapshot = self.buffer_snapshot_for_lsp_version(buffer, version, cx);
5180 cx.background().spawn(async move {
5181 let snapshot = snapshot?;
5182 let mut lsp_edits = lsp_edits
5183 .into_iter()
5184 .map(|edit| (range_from_lsp(edit.range), edit.new_text))
5185 .collect::<Vec<_>>();
5186 lsp_edits.sort_by_key(|(range, _)| range.start);
5187
5188 let mut lsp_edits = lsp_edits.into_iter().peekable();
5189 let mut edits = Vec::new();
5190 while let Some((mut range, mut new_text)) = lsp_edits.next() {
5191 // Combine any LSP edits that are adjacent.
5192 //
5193 // Also, combine LSP edits that are separated from each other by only
5194 // a newline. This is important because for some code actions,
5195 // Rust-analyzer rewrites the entire buffer via a series of edits that
5196 // are separated by unchanged newline characters.
5197 //
5198 // In order for the diffing logic below to work properly, any edits that
5199 // cancel each other out must be combined into one.
5200 while let Some((next_range, next_text)) = lsp_edits.peek() {
5201 if next_range.start > range.end {
5202 if next_range.start.row > range.end.row + 1
5203 || next_range.start.column > 0
5204 || snapshot.clip_point_utf16(
5205 PointUtf16::new(range.end.row, u32::MAX),
5206 Bias::Left,
5207 ) > range.end
5208 {
5209 break;
5210 }
5211 new_text.push('\n');
5212 }
5213 range.end = next_range.end;
5214 new_text.push_str(&next_text);
5215 lsp_edits.next();
5216 }
5217
5218 if snapshot.clip_point_utf16(range.start, Bias::Left) != range.start
5219 || snapshot.clip_point_utf16(range.end, Bias::Left) != range.end
5220 {
5221 return Err(anyhow!("invalid edits received from language server"));
5222 }
5223
5224 // For multiline edits, perform a diff of the old and new text so that
5225 // we can identify the changes more precisely, preserving the locations
5226 // of any anchors positioned in the unchanged regions.
5227 if range.end.row > range.start.row {
5228 let mut offset = range.start.to_offset(&snapshot);
5229 let old_text = snapshot.text_for_range(range).collect::<String>();
5230
5231 let diff = TextDiff::from_lines(old_text.as_str(), &new_text);
5232 let mut moved_since_edit = true;
5233 for change in diff.iter_all_changes() {
5234 let tag = change.tag();
5235 let value = change.value();
5236 match tag {
5237 ChangeTag::Equal => {
5238 offset += value.len();
5239 moved_since_edit = true;
5240 }
5241 ChangeTag::Delete => {
5242 let start = snapshot.anchor_after(offset);
5243 let end = snapshot.anchor_before(offset + value.len());
5244 if moved_since_edit {
5245 edits.push((start..end, String::new()));
5246 } else {
5247 edits.last_mut().unwrap().0.end = end;
5248 }
5249 offset += value.len();
5250 moved_since_edit = false;
5251 }
5252 ChangeTag::Insert => {
5253 if moved_since_edit {
5254 let anchor = snapshot.anchor_after(offset);
5255 edits.push((anchor.clone()..anchor, value.to_string()));
5256 } else {
5257 edits.last_mut().unwrap().1.push_str(value);
5258 }
5259 moved_since_edit = false;
5260 }
5261 }
5262 }
5263 } else if range.end == range.start {
5264 let anchor = snapshot.anchor_after(range.start);
5265 edits.push((anchor.clone()..anchor, new_text));
5266 } else {
5267 let edit_start = snapshot.anchor_after(range.start);
5268 let edit_end = snapshot.anchor_before(range.end);
5269 edits.push((edit_start..edit_end, new_text));
5270 }
5271 }
5272
5273 Ok(edits)
5274 })
5275 }
5276
5277 fn buffer_snapshot_for_lsp_version(
5278 &mut self,
5279 buffer: &ModelHandle<Buffer>,
5280 version: Option<i32>,
5281 cx: &AppContext,
5282 ) -> Result<TextBufferSnapshot> {
5283 const OLD_VERSIONS_TO_RETAIN: i32 = 10;
5284
5285 if let Some(version) = version {
5286 let buffer_id = buffer.read(cx).remote_id();
5287 let snapshots = self
5288 .buffer_snapshots
5289 .get_mut(&buffer_id)
5290 .ok_or_else(|| anyhow!("no snapshot found for buffer {}", buffer_id))?;
5291 let mut found_snapshot = None;
5292 snapshots.retain(|(snapshot_version, snapshot)| {
5293 if snapshot_version + OLD_VERSIONS_TO_RETAIN < version {
5294 false
5295 } else {
5296 if *snapshot_version == version {
5297 found_snapshot = Some(snapshot.clone());
5298 }
5299 true
5300 }
5301 });
5302
5303 found_snapshot.ok_or_else(|| {
5304 anyhow!(
5305 "snapshot not found for buffer {} at version {}",
5306 buffer_id,
5307 version
5308 )
5309 })
5310 } else {
5311 Ok((buffer.read(cx)).text_snapshot())
5312 }
5313 }
5314
5315 fn language_server_for_buffer(
5316 &self,
5317 buffer: &Buffer,
5318 cx: &AppContext,
5319 ) -> Option<&(Arc<dyn LspAdapter>, Arc<LanguageServer>)> {
5320 if let Some((file, language)) = File::from_dyn(buffer.file()).zip(buffer.language()) {
5321 let worktree_id = file.worktree_id(cx);
5322 self.language_servers
5323 .get(&(worktree_id, language.lsp_adapter()?.name()))
5324 } else {
5325 None
5326 }
5327 }
5328}
5329
5330impl ProjectStore {
5331 pub fn new(db: Arc<Db>) -> Self {
5332 Self {
5333 db,
5334 projects: Default::default(),
5335 }
5336 }
5337
5338 pub fn projects<'a>(
5339 &'a self,
5340 cx: &'a AppContext,
5341 ) -> impl 'a + Iterator<Item = ModelHandle<Project>> {
5342 self.projects
5343 .iter()
5344 .filter_map(|project| project.upgrade(cx))
5345 }
5346
5347 fn add_project(&mut self, project: WeakModelHandle<Project>, cx: &mut ModelContext<Self>) {
5348 if let Err(ix) = self
5349 .projects
5350 .binary_search_by_key(&project.id(), WeakModelHandle::id)
5351 {
5352 self.projects.insert(ix, project);
5353 }
5354 cx.notify();
5355 }
5356
5357 fn prune_projects(&mut self, cx: &mut ModelContext<Self>) {
5358 let mut did_change = false;
5359 self.projects.retain(|project| {
5360 if project.is_upgradable(cx) {
5361 true
5362 } else {
5363 did_change = true;
5364 false
5365 }
5366 });
5367 if did_change {
5368 cx.notify();
5369 }
5370 }
5371}
5372
5373impl WorktreeHandle {
5374 pub fn upgrade(&self, cx: &AppContext) -> Option<ModelHandle<Worktree>> {
5375 match self {
5376 WorktreeHandle::Strong(handle) => Some(handle.clone()),
5377 WorktreeHandle::Weak(handle) => handle.upgrade(cx),
5378 }
5379 }
5380}
5381
5382impl OpenBuffer {
5383 pub fn upgrade(&self, cx: &impl UpgradeModelHandle) -> Option<ModelHandle<Buffer>> {
5384 match self {
5385 OpenBuffer::Strong(handle) => Some(handle.clone()),
5386 OpenBuffer::Weak(handle) => handle.upgrade(cx),
5387 OpenBuffer::Loading(_) => None,
5388 }
5389 }
5390}
5391
5392struct CandidateSet {
5393 snapshot: Snapshot,
5394 include_ignored: bool,
5395 include_root_name: bool,
5396}
5397
5398impl<'a> PathMatchCandidateSet<'a> for CandidateSet {
5399 type Candidates = CandidateSetIter<'a>;
5400
5401 fn id(&self) -> usize {
5402 self.snapshot.id().to_usize()
5403 }
5404
5405 fn len(&self) -> usize {
5406 if self.include_ignored {
5407 self.snapshot.file_count()
5408 } else {
5409 self.snapshot.visible_file_count()
5410 }
5411 }
5412
5413 fn prefix(&self) -> Arc<str> {
5414 if self.snapshot.root_entry().map_or(false, |e| e.is_file()) {
5415 self.snapshot.root_name().into()
5416 } else if self.include_root_name {
5417 format!("{}/", self.snapshot.root_name()).into()
5418 } else {
5419 "".into()
5420 }
5421 }
5422
5423 fn candidates(&'a self, start: usize) -> Self::Candidates {
5424 CandidateSetIter {
5425 traversal: self.snapshot.files(self.include_ignored, start),
5426 }
5427 }
5428}
5429
5430struct CandidateSetIter<'a> {
5431 traversal: Traversal<'a>,
5432}
5433
5434impl<'a> Iterator for CandidateSetIter<'a> {
5435 type Item = PathMatchCandidate<'a>;
5436
5437 fn next(&mut self) -> Option<Self::Item> {
5438 self.traversal.next().map(|entry| {
5439 if let EntryKind::File(char_bag) = entry.kind {
5440 PathMatchCandidate {
5441 path: &entry.path,
5442 char_bag,
5443 }
5444 } else {
5445 unreachable!()
5446 }
5447 })
5448 }
5449}
5450
5451impl Entity for ProjectStore {
5452 type Event = ();
5453}
5454
5455impl Entity for Project {
5456 type Event = Event;
5457
5458 fn release(&mut self, cx: &mut gpui::MutableAppContext) {
5459 self.project_store.update(cx, ProjectStore::prune_projects);
5460
5461 match &self.client_state {
5462 ProjectClientState::Local { remote_id_rx, .. } => {
5463 if let Some(project_id) = *remote_id_rx.borrow() {
5464 self.client
5465 .send(proto::UnregisterProject { project_id })
5466 .log_err();
5467 }
5468 }
5469 ProjectClientState::Remote { remote_id, .. } => {
5470 self.client
5471 .send(proto::LeaveProject {
5472 project_id: *remote_id,
5473 })
5474 .log_err();
5475 }
5476 }
5477 }
5478
5479 fn app_will_quit(
5480 &mut self,
5481 _: &mut MutableAppContext,
5482 ) -> Option<std::pin::Pin<Box<dyn 'static + Future<Output = ()>>>> {
5483 let shutdown_futures = self
5484 .language_servers
5485 .drain()
5486 .filter_map(|(_, (_, server))| server.shutdown())
5487 .collect::<Vec<_>>();
5488 Some(
5489 async move {
5490 futures::future::join_all(shutdown_futures).await;
5491 }
5492 .boxed(),
5493 )
5494 }
5495}
5496
5497impl Collaborator {
5498 fn from_proto(
5499 message: proto::Collaborator,
5500 user_store: &ModelHandle<UserStore>,
5501 cx: &mut AsyncAppContext,
5502 ) -> impl Future<Output = Result<Self>> {
5503 let user = user_store.update(cx, |user_store, cx| {
5504 user_store.fetch_user(message.user_id, cx)
5505 });
5506
5507 async move {
5508 Ok(Self {
5509 peer_id: PeerId(message.peer_id),
5510 user: user.await?,
5511 replica_id: message.replica_id as ReplicaId,
5512 })
5513 }
5514 }
5515}
5516
5517impl<P: AsRef<Path>> From<(WorktreeId, P)> for ProjectPath {
5518 fn from((worktree_id, path): (WorktreeId, P)) -> Self {
5519 Self {
5520 worktree_id,
5521 path: path.as_ref().into(),
5522 }
5523 }
5524}
5525
5526impl From<lsp::CreateFileOptions> for fs::CreateOptions {
5527 fn from(options: lsp::CreateFileOptions) -> Self {
5528 Self {
5529 overwrite: options.overwrite.unwrap_or(false),
5530 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
5531 }
5532 }
5533}
5534
5535impl From<lsp::RenameFileOptions> for fs::RenameOptions {
5536 fn from(options: lsp::RenameFileOptions) -> Self {
5537 Self {
5538 overwrite: options.overwrite.unwrap_or(false),
5539 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
5540 }
5541 }
5542}
5543
5544impl From<lsp::DeleteFileOptions> for fs::RemoveOptions {
5545 fn from(options: lsp::DeleteFileOptions) -> Self {
5546 Self {
5547 recursive: options.recursive.unwrap_or(false),
5548 ignore_if_not_exists: options.ignore_if_not_exists.unwrap_or(false),
5549 }
5550 }
5551}
5552
5553fn serialize_symbol(symbol: &Symbol) -> proto::Symbol {
5554 proto::Symbol {
5555 source_worktree_id: symbol.source_worktree_id.to_proto(),
5556 worktree_id: symbol.worktree_id.to_proto(),
5557 language_server_name: symbol.language_server_name.0.to_string(),
5558 name: symbol.name.clone(),
5559 kind: unsafe { mem::transmute(symbol.kind) },
5560 path: symbol.path.to_string_lossy().to_string(),
5561 start: Some(proto::Point {
5562 row: symbol.range.start.row,
5563 column: symbol.range.start.column,
5564 }),
5565 end: Some(proto::Point {
5566 row: symbol.range.end.row,
5567 column: symbol.range.end.column,
5568 }),
5569 signature: symbol.signature.to_vec(),
5570 }
5571}
5572
5573fn relativize_path(base: &Path, path: &Path) -> PathBuf {
5574 let mut path_components = path.components();
5575 let mut base_components = base.components();
5576 let mut components: Vec<Component> = Vec::new();
5577 loop {
5578 match (path_components.next(), base_components.next()) {
5579 (None, None) => break,
5580 (Some(a), None) => {
5581 components.push(a);
5582 components.extend(path_components.by_ref());
5583 break;
5584 }
5585 (None, _) => components.push(Component::ParentDir),
5586 (Some(a), Some(b)) if components.is_empty() && a == b => (),
5587 (Some(a), Some(b)) if b == Component::CurDir => components.push(a),
5588 (Some(a), Some(_)) => {
5589 components.push(Component::ParentDir);
5590 for _ in base_components {
5591 components.push(Component::ParentDir);
5592 }
5593 components.push(a);
5594 components.extend(path_components.by_ref());
5595 break;
5596 }
5597 }
5598 }
5599 components.iter().map(|c| c.as_os_str()).collect()
5600}
5601
5602impl Item for Buffer {
5603 fn entry_id(&self, cx: &AppContext) -> Option<ProjectEntryId> {
5604 File::from_dyn(self.file()).and_then(|file| file.project_entry_id(cx))
5605 }
5606}
5607
5608#[cfg(test)]
5609mod tests {
5610 use crate::worktree::WorktreeHandle;
5611
5612 use super::{Event, *};
5613 use fs::RealFs;
5614 use futures::{future, StreamExt};
5615 use gpui::test::subscribe;
5616 use language::{
5617 tree_sitter_rust, tree_sitter_typescript, Diagnostic, FakeLspAdapter, LanguageConfig,
5618 OffsetRangeExt, Point, ToPoint,
5619 };
5620 use lsp::Url;
5621 use serde_json::json;
5622 use std::{cell::RefCell, os::unix, path::PathBuf, rc::Rc, task::Poll};
5623 use unindent::Unindent as _;
5624 use util::{assert_set_eq, test::temp_tree};
5625
5626 #[gpui::test]
5627 async fn test_populate_and_search(cx: &mut gpui::TestAppContext) {
5628 let dir = temp_tree(json!({
5629 "root": {
5630 "apple": "",
5631 "banana": {
5632 "carrot": {
5633 "date": "",
5634 "endive": "",
5635 }
5636 },
5637 "fennel": {
5638 "grape": "",
5639 }
5640 }
5641 }));
5642
5643 let root_link_path = dir.path().join("root_link");
5644 unix::fs::symlink(&dir.path().join("root"), &root_link_path).unwrap();
5645 unix::fs::symlink(
5646 &dir.path().join("root/fennel"),
5647 &dir.path().join("root/finnochio"),
5648 )
5649 .unwrap();
5650
5651 let project = Project::test(Arc::new(RealFs), [root_link_path.as_ref()], cx).await;
5652
5653 project.read_with(cx, |project, cx| {
5654 let tree = project.worktrees(cx).next().unwrap().read(cx);
5655 assert_eq!(tree.file_count(), 5);
5656 assert_eq!(
5657 tree.inode_for_path("fennel/grape"),
5658 tree.inode_for_path("finnochio/grape")
5659 );
5660 });
5661
5662 let cancel_flag = Default::default();
5663 let results = project
5664 .read_with(cx, |project, cx| {
5665 project.match_paths("bna", false, false, 10, &cancel_flag, cx)
5666 })
5667 .await;
5668 assert_eq!(
5669 results
5670 .into_iter()
5671 .map(|result| result.path)
5672 .collect::<Vec<Arc<Path>>>(),
5673 vec![
5674 PathBuf::from("banana/carrot/date").into(),
5675 PathBuf::from("banana/carrot/endive").into(),
5676 ]
5677 );
5678 }
5679
5680 #[gpui::test]
5681 async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
5682 cx.foreground().forbid_parking();
5683
5684 let mut rust_language = Language::new(
5685 LanguageConfig {
5686 name: "Rust".into(),
5687 path_suffixes: vec!["rs".to_string()],
5688 ..Default::default()
5689 },
5690 Some(tree_sitter_rust::language()),
5691 );
5692 let mut json_language = Language::new(
5693 LanguageConfig {
5694 name: "JSON".into(),
5695 path_suffixes: vec!["json".to_string()],
5696 ..Default::default()
5697 },
5698 None,
5699 );
5700 let mut fake_rust_servers = rust_language.set_fake_lsp_adapter(FakeLspAdapter {
5701 name: "the-rust-language-server",
5702 capabilities: lsp::ServerCapabilities {
5703 completion_provider: Some(lsp::CompletionOptions {
5704 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
5705 ..Default::default()
5706 }),
5707 ..Default::default()
5708 },
5709 ..Default::default()
5710 });
5711 let mut fake_json_servers = json_language.set_fake_lsp_adapter(FakeLspAdapter {
5712 name: "the-json-language-server",
5713 capabilities: lsp::ServerCapabilities {
5714 completion_provider: Some(lsp::CompletionOptions {
5715 trigger_characters: Some(vec![":".to_string()]),
5716 ..Default::default()
5717 }),
5718 ..Default::default()
5719 },
5720 ..Default::default()
5721 });
5722
5723 let fs = FakeFs::new(cx.background());
5724 fs.insert_tree(
5725 "/the-root",
5726 json!({
5727 "test.rs": "const A: i32 = 1;",
5728 "test2.rs": "",
5729 "Cargo.toml": "a = 1",
5730 "package.json": "{\"a\": 1}",
5731 }),
5732 )
5733 .await;
5734
5735 let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
5736 project.update(cx, |project, _| {
5737 project.languages.add(Arc::new(rust_language));
5738 project.languages.add(Arc::new(json_language));
5739 });
5740
5741 // Open a buffer without an associated language server.
5742 let toml_buffer = project
5743 .update(cx, |project, cx| {
5744 project.open_local_buffer("/the-root/Cargo.toml", cx)
5745 })
5746 .await
5747 .unwrap();
5748
5749 // Open a buffer with an associated language server.
5750 let rust_buffer = project
5751 .update(cx, |project, cx| {
5752 project.open_local_buffer("/the-root/test.rs", cx)
5753 })
5754 .await
5755 .unwrap();
5756
5757 // A server is started up, and it is notified about Rust files.
5758 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
5759 assert_eq!(
5760 fake_rust_server
5761 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5762 .await
5763 .text_document,
5764 lsp::TextDocumentItem {
5765 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
5766 version: 0,
5767 text: "const A: i32 = 1;".to_string(),
5768 language_id: Default::default()
5769 }
5770 );
5771
5772 // The buffer is configured based on the language server's capabilities.
5773 rust_buffer.read_with(cx, |buffer, _| {
5774 assert_eq!(
5775 buffer.completion_triggers(),
5776 &[".".to_string(), "::".to_string()]
5777 );
5778 });
5779 toml_buffer.read_with(cx, |buffer, _| {
5780 assert!(buffer.completion_triggers().is_empty());
5781 });
5782
5783 // Edit a buffer. The changes are reported to the language server.
5784 rust_buffer.update(cx, |buffer, cx| buffer.edit([(16..16, "2")], cx));
5785 assert_eq!(
5786 fake_rust_server
5787 .receive_notification::<lsp::notification::DidChangeTextDocument>()
5788 .await
5789 .text_document,
5790 lsp::VersionedTextDocumentIdentifier::new(
5791 lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
5792 1
5793 )
5794 );
5795
5796 // Open a third buffer with a different associated language server.
5797 let json_buffer = project
5798 .update(cx, |project, cx| {
5799 project.open_local_buffer("/the-root/package.json", cx)
5800 })
5801 .await
5802 .unwrap();
5803
5804 // A json language server is started up and is only notified about the json buffer.
5805 let mut fake_json_server = fake_json_servers.next().await.unwrap();
5806 assert_eq!(
5807 fake_json_server
5808 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5809 .await
5810 .text_document,
5811 lsp::TextDocumentItem {
5812 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
5813 version: 0,
5814 text: "{\"a\": 1}".to_string(),
5815 language_id: Default::default()
5816 }
5817 );
5818
5819 // This buffer is configured based on the second language server's
5820 // capabilities.
5821 json_buffer.read_with(cx, |buffer, _| {
5822 assert_eq!(buffer.completion_triggers(), &[":".to_string()]);
5823 });
5824
5825 // When opening another buffer whose language server is already running,
5826 // it is also configured based on the existing language server's capabilities.
5827 let rust_buffer2 = project
5828 .update(cx, |project, cx| {
5829 project.open_local_buffer("/the-root/test2.rs", cx)
5830 })
5831 .await
5832 .unwrap();
5833 rust_buffer2.read_with(cx, |buffer, _| {
5834 assert_eq!(
5835 buffer.completion_triggers(),
5836 &[".".to_string(), "::".to_string()]
5837 );
5838 });
5839
5840 // Changes are reported only to servers matching the buffer's language.
5841 toml_buffer.update(cx, |buffer, cx| buffer.edit([(5..5, "23")], cx));
5842 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "let x = 1;")], cx));
5843 assert_eq!(
5844 fake_rust_server
5845 .receive_notification::<lsp::notification::DidChangeTextDocument>()
5846 .await
5847 .text_document,
5848 lsp::VersionedTextDocumentIdentifier::new(
5849 lsp::Url::from_file_path("/the-root/test2.rs").unwrap(),
5850 1
5851 )
5852 );
5853
5854 // Save notifications are reported to all servers.
5855 toml_buffer
5856 .update(cx, |buffer, cx| buffer.save(cx))
5857 .await
5858 .unwrap();
5859 assert_eq!(
5860 fake_rust_server
5861 .receive_notification::<lsp::notification::DidSaveTextDocument>()
5862 .await
5863 .text_document,
5864 lsp::TextDocumentIdentifier::new(
5865 lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap()
5866 )
5867 );
5868 assert_eq!(
5869 fake_json_server
5870 .receive_notification::<lsp::notification::DidSaveTextDocument>()
5871 .await
5872 .text_document,
5873 lsp::TextDocumentIdentifier::new(
5874 lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap()
5875 )
5876 );
5877
5878 // Renames are reported only to servers matching the buffer's language.
5879 fs.rename(
5880 Path::new("/the-root/test2.rs"),
5881 Path::new("/the-root/test3.rs"),
5882 Default::default(),
5883 )
5884 .await
5885 .unwrap();
5886 assert_eq!(
5887 fake_rust_server
5888 .receive_notification::<lsp::notification::DidCloseTextDocument>()
5889 .await
5890 .text_document,
5891 lsp::TextDocumentIdentifier::new(
5892 lsp::Url::from_file_path("/the-root/test2.rs").unwrap()
5893 ),
5894 );
5895 assert_eq!(
5896 fake_rust_server
5897 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5898 .await
5899 .text_document,
5900 lsp::TextDocumentItem {
5901 uri: lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),
5902 version: 0,
5903 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
5904 language_id: Default::default()
5905 },
5906 );
5907
5908 rust_buffer2.update(cx, |buffer, cx| {
5909 buffer.update_diagnostics(
5910 DiagnosticSet::from_sorted_entries(
5911 vec![DiagnosticEntry {
5912 diagnostic: Default::default(),
5913 range: Anchor::MIN..Anchor::MAX,
5914 }],
5915 &buffer.snapshot(),
5916 ),
5917 cx,
5918 );
5919 assert_eq!(
5920 buffer
5921 .snapshot()
5922 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
5923 .count(),
5924 1
5925 );
5926 });
5927
5928 // When the rename changes the extension of the file, the buffer gets closed on the old
5929 // language server and gets opened on the new one.
5930 fs.rename(
5931 Path::new("/the-root/test3.rs"),
5932 Path::new("/the-root/test3.json"),
5933 Default::default(),
5934 )
5935 .await
5936 .unwrap();
5937 assert_eq!(
5938 fake_rust_server
5939 .receive_notification::<lsp::notification::DidCloseTextDocument>()
5940 .await
5941 .text_document,
5942 lsp::TextDocumentIdentifier::new(
5943 lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),
5944 ),
5945 );
5946 assert_eq!(
5947 fake_json_server
5948 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5949 .await
5950 .text_document,
5951 lsp::TextDocumentItem {
5952 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
5953 version: 0,
5954 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
5955 language_id: Default::default()
5956 },
5957 );
5958
5959 // We clear the diagnostics, since the language has changed.
5960 rust_buffer2.read_with(cx, |buffer, _| {
5961 assert_eq!(
5962 buffer
5963 .snapshot()
5964 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
5965 .count(),
5966 0
5967 );
5968 });
5969
5970 // The renamed file's version resets after changing language server.
5971 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "// ")], cx));
5972 assert_eq!(
5973 fake_json_server
5974 .receive_notification::<lsp::notification::DidChangeTextDocument>()
5975 .await
5976 .text_document,
5977 lsp::VersionedTextDocumentIdentifier::new(
5978 lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
5979 1
5980 )
5981 );
5982
5983 // Restart language servers
5984 project.update(cx, |project, cx| {
5985 project.restart_language_servers_for_buffers(
5986 vec![rust_buffer.clone(), json_buffer.clone()],
5987 cx,
5988 );
5989 });
5990
5991 let mut rust_shutdown_requests = fake_rust_server
5992 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
5993 let mut json_shutdown_requests = fake_json_server
5994 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
5995 futures::join!(rust_shutdown_requests.next(), json_shutdown_requests.next());
5996
5997 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
5998 let mut fake_json_server = fake_json_servers.next().await.unwrap();
5999
6000 // Ensure rust document is reopened in new rust language server
6001 assert_eq!(
6002 fake_rust_server
6003 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6004 .await
6005 .text_document,
6006 lsp::TextDocumentItem {
6007 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
6008 version: 1,
6009 text: rust_buffer.read_with(cx, |buffer, _| buffer.text()),
6010 language_id: Default::default()
6011 }
6012 );
6013
6014 // Ensure json documents are reopened in new json language server
6015 assert_set_eq!(
6016 [
6017 fake_json_server
6018 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6019 .await
6020 .text_document,
6021 fake_json_server
6022 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6023 .await
6024 .text_document,
6025 ],
6026 [
6027 lsp::TextDocumentItem {
6028 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
6029 version: 0,
6030 text: json_buffer.read_with(cx, |buffer, _| buffer.text()),
6031 language_id: Default::default()
6032 },
6033 lsp::TextDocumentItem {
6034 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
6035 version: 1,
6036 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
6037 language_id: Default::default()
6038 }
6039 ]
6040 );
6041
6042 // Close notifications are reported only to servers matching the buffer's language.
6043 cx.update(|_| drop(json_buffer));
6044 let close_message = lsp::DidCloseTextDocumentParams {
6045 text_document: lsp::TextDocumentIdentifier::new(
6046 lsp::Url::from_file_path("/the-root/package.json").unwrap(),
6047 ),
6048 };
6049 assert_eq!(
6050 fake_json_server
6051 .receive_notification::<lsp::notification::DidCloseTextDocument>()
6052 .await,
6053 close_message,
6054 );
6055 }
6056
6057 #[gpui::test]
6058 async fn test_single_file_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
6059 cx.foreground().forbid_parking();
6060
6061 let fs = FakeFs::new(cx.background());
6062 fs.insert_tree(
6063 "/dir",
6064 json!({
6065 "a.rs": "let a = 1;",
6066 "b.rs": "let b = 2;"
6067 }),
6068 )
6069 .await;
6070
6071 let project = Project::test(fs, ["/dir/a.rs".as_ref(), "/dir/b.rs".as_ref()], cx).await;
6072
6073 let buffer_a = project
6074 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
6075 .await
6076 .unwrap();
6077 let buffer_b = project
6078 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
6079 .await
6080 .unwrap();
6081
6082 project.update(cx, |project, cx| {
6083 project
6084 .update_diagnostics(
6085 lsp::PublishDiagnosticsParams {
6086 uri: Url::from_file_path("/dir/a.rs").unwrap(),
6087 version: None,
6088 diagnostics: vec![lsp::Diagnostic {
6089 range: lsp::Range::new(
6090 lsp::Position::new(0, 4),
6091 lsp::Position::new(0, 5),
6092 ),
6093 severity: Some(lsp::DiagnosticSeverity::ERROR),
6094 message: "error 1".to_string(),
6095 ..Default::default()
6096 }],
6097 },
6098 &[],
6099 cx,
6100 )
6101 .unwrap();
6102 project
6103 .update_diagnostics(
6104 lsp::PublishDiagnosticsParams {
6105 uri: Url::from_file_path("/dir/b.rs").unwrap(),
6106 version: None,
6107 diagnostics: vec![lsp::Diagnostic {
6108 range: lsp::Range::new(
6109 lsp::Position::new(0, 4),
6110 lsp::Position::new(0, 5),
6111 ),
6112 severity: Some(lsp::DiagnosticSeverity::WARNING),
6113 message: "error 2".to_string(),
6114 ..Default::default()
6115 }],
6116 },
6117 &[],
6118 cx,
6119 )
6120 .unwrap();
6121 });
6122
6123 buffer_a.read_with(cx, |buffer, _| {
6124 let chunks = chunks_with_diagnostics(&buffer, 0..buffer.len());
6125 assert_eq!(
6126 chunks
6127 .iter()
6128 .map(|(s, d)| (s.as_str(), *d))
6129 .collect::<Vec<_>>(),
6130 &[
6131 ("let ", None),
6132 ("a", Some(DiagnosticSeverity::ERROR)),
6133 (" = 1;", None),
6134 ]
6135 );
6136 });
6137 buffer_b.read_with(cx, |buffer, _| {
6138 let chunks = chunks_with_diagnostics(&buffer, 0..buffer.len());
6139 assert_eq!(
6140 chunks
6141 .iter()
6142 .map(|(s, d)| (s.as_str(), *d))
6143 .collect::<Vec<_>>(),
6144 &[
6145 ("let ", None),
6146 ("b", Some(DiagnosticSeverity::WARNING)),
6147 (" = 2;", None),
6148 ]
6149 );
6150 });
6151 }
6152
6153 #[gpui::test]
6154 async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
6155 cx.foreground().forbid_parking();
6156
6157 let progress_token = "the-progress-token";
6158 let mut language = Language::new(
6159 LanguageConfig {
6160 name: "Rust".into(),
6161 path_suffixes: vec!["rs".to_string()],
6162 ..Default::default()
6163 },
6164 Some(tree_sitter_rust::language()),
6165 );
6166 let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
6167 disk_based_diagnostics_progress_token: Some(progress_token),
6168 disk_based_diagnostics_sources: &["disk"],
6169 ..Default::default()
6170 });
6171
6172 let fs = FakeFs::new(cx.background());
6173 fs.insert_tree(
6174 "/dir",
6175 json!({
6176 "a.rs": "fn a() { A }",
6177 "b.rs": "const y: i32 = 1",
6178 }),
6179 )
6180 .await;
6181
6182 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
6183 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
6184 let worktree_id =
6185 project.read_with(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
6186
6187 // Cause worktree to start the fake language server
6188 let _buffer = project
6189 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
6190 .await
6191 .unwrap();
6192
6193 let mut events = subscribe(&project, cx);
6194
6195 let mut fake_server = fake_servers.next().await.unwrap();
6196 fake_server.start_progress(progress_token).await;
6197 assert_eq!(
6198 events.next().await.unwrap(),
6199 Event::DiskBasedDiagnosticsStarted
6200 );
6201
6202 fake_server.start_progress(progress_token).await;
6203 fake_server.end_progress(progress_token).await;
6204 fake_server.start_progress(progress_token).await;
6205
6206 fake_server.notify::<lsp::notification::PublishDiagnostics>(
6207 lsp::PublishDiagnosticsParams {
6208 uri: Url::from_file_path("/dir/a.rs").unwrap(),
6209 version: None,
6210 diagnostics: vec![lsp::Diagnostic {
6211 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
6212 severity: Some(lsp::DiagnosticSeverity::ERROR),
6213 message: "undefined variable 'A'".to_string(),
6214 ..Default::default()
6215 }],
6216 },
6217 );
6218 assert_eq!(
6219 events.next().await.unwrap(),
6220 Event::DiagnosticsUpdated((worktree_id, Path::new("a.rs")).into())
6221 );
6222
6223 fake_server.end_progress(progress_token).await;
6224 fake_server.end_progress(progress_token).await;
6225 assert_eq!(
6226 events.next().await.unwrap(),
6227 Event::DiskBasedDiagnosticsUpdated
6228 );
6229 assert_eq!(
6230 events.next().await.unwrap(),
6231 Event::DiskBasedDiagnosticsFinished
6232 );
6233
6234 let buffer = project
6235 .update(cx, |p, cx| p.open_local_buffer("/dir/a.rs", cx))
6236 .await
6237 .unwrap();
6238
6239 buffer.read_with(cx, |buffer, _| {
6240 let snapshot = buffer.snapshot();
6241 let diagnostics = snapshot
6242 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
6243 .collect::<Vec<_>>();
6244 assert_eq!(
6245 diagnostics,
6246 &[DiagnosticEntry {
6247 range: Point::new(0, 9)..Point::new(0, 10),
6248 diagnostic: Diagnostic {
6249 severity: lsp::DiagnosticSeverity::ERROR,
6250 message: "undefined variable 'A'".to_string(),
6251 group_id: 0,
6252 is_primary: true,
6253 ..Default::default()
6254 }
6255 }]
6256 )
6257 });
6258
6259 // Ensure publishing empty diagnostics twice only results in one update event.
6260 fake_server.notify::<lsp::notification::PublishDiagnostics>(
6261 lsp::PublishDiagnosticsParams {
6262 uri: Url::from_file_path("/dir/a.rs").unwrap(),
6263 version: None,
6264 diagnostics: Default::default(),
6265 },
6266 );
6267 assert_eq!(
6268 events.next().await.unwrap(),
6269 Event::DiagnosticsUpdated((worktree_id, Path::new("a.rs")).into())
6270 );
6271
6272 fake_server.notify::<lsp::notification::PublishDiagnostics>(
6273 lsp::PublishDiagnosticsParams {
6274 uri: Url::from_file_path("/dir/a.rs").unwrap(),
6275 version: None,
6276 diagnostics: Default::default(),
6277 },
6278 );
6279 cx.foreground().run_until_parked();
6280 assert_eq!(futures::poll!(events.next()), Poll::Pending);
6281 }
6282
6283 #[gpui::test]
6284 async fn test_restarting_server_with_diagnostics_running(cx: &mut gpui::TestAppContext) {
6285 cx.foreground().forbid_parking();
6286
6287 let progress_token = "the-progress-token";
6288 let mut language = Language::new(
6289 LanguageConfig {
6290 path_suffixes: vec!["rs".to_string()],
6291 ..Default::default()
6292 },
6293 None,
6294 );
6295 let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
6296 disk_based_diagnostics_sources: &["disk"],
6297 disk_based_diagnostics_progress_token: Some(progress_token),
6298 ..Default::default()
6299 });
6300
6301 let fs = FakeFs::new(cx.background());
6302 fs.insert_tree("/dir", json!({ "a.rs": "" })).await;
6303
6304 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
6305 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
6306
6307 let buffer = project
6308 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
6309 .await
6310 .unwrap();
6311
6312 // Simulate diagnostics starting to update.
6313 let mut fake_server = fake_servers.next().await.unwrap();
6314 fake_server.start_progress(progress_token).await;
6315
6316 // Restart the server before the diagnostics finish updating.
6317 project.update(cx, |project, cx| {
6318 project.restart_language_servers_for_buffers([buffer], cx);
6319 });
6320 let mut events = subscribe(&project, cx);
6321
6322 // Simulate the newly started server sending more diagnostics.
6323 let mut fake_server = fake_servers.next().await.unwrap();
6324 fake_server.start_progress(progress_token).await;
6325 assert_eq!(
6326 events.next().await.unwrap(),
6327 Event::DiskBasedDiagnosticsStarted
6328 );
6329
6330 // All diagnostics are considered done, despite the old server's diagnostic
6331 // task never completing.
6332 fake_server.end_progress(progress_token).await;
6333 assert_eq!(
6334 events.next().await.unwrap(),
6335 Event::DiskBasedDiagnosticsUpdated
6336 );
6337 assert_eq!(
6338 events.next().await.unwrap(),
6339 Event::DiskBasedDiagnosticsFinished
6340 );
6341 project.read_with(cx, |project, _| {
6342 assert!(!project.is_running_disk_based_diagnostics());
6343 });
6344 }
6345
6346 #[gpui::test]
6347 async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
6348 cx.foreground().forbid_parking();
6349
6350 let mut language = Language::new(
6351 LanguageConfig {
6352 name: "Rust".into(),
6353 path_suffixes: vec!["rs".to_string()],
6354 ..Default::default()
6355 },
6356 Some(tree_sitter_rust::language()),
6357 );
6358 let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
6359 disk_based_diagnostics_sources: &["disk"],
6360 ..Default::default()
6361 });
6362
6363 let text = "
6364 fn a() { A }
6365 fn b() { BB }
6366 fn c() { CCC }
6367 "
6368 .unindent();
6369
6370 let fs = FakeFs::new(cx.background());
6371 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
6372
6373 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
6374 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
6375
6376 let buffer = project
6377 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
6378 .await
6379 .unwrap();
6380
6381 let mut fake_server = fake_servers.next().await.unwrap();
6382 let open_notification = fake_server
6383 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6384 .await;
6385
6386 // Edit the buffer, moving the content down
6387 buffer.update(cx, |buffer, cx| buffer.edit([(0..0, "\n\n")], cx));
6388 let change_notification_1 = fake_server
6389 .receive_notification::<lsp::notification::DidChangeTextDocument>()
6390 .await;
6391 assert!(
6392 change_notification_1.text_document.version > open_notification.text_document.version
6393 );
6394
6395 // Report some diagnostics for the initial version of the buffer
6396 fake_server.notify::<lsp::notification::PublishDiagnostics>(
6397 lsp::PublishDiagnosticsParams {
6398 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
6399 version: Some(open_notification.text_document.version),
6400 diagnostics: vec![
6401 lsp::Diagnostic {
6402 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
6403 severity: Some(DiagnosticSeverity::ERROR),
6404 message: "undefined variable 'A'".to_string(),
6405 source: Some("disk".to_string()),
6406 ..Default::default()
6407 },
6408 lsp::Diagnostic {
6409 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
6410 severity: Some(DiagnosticSeverity::ERROR),
6411 message: "undefined variable 'BB'".to_string(),
6412 source: Some("disk".to_string()),
6413 ..Default::default()
6414 },
6415 lsp::Diagnostic {
6416 range: lsp::Range::new(lsp::Position::new(2, 9), lsp::Position::new(2, 12)),
6417 severity: Some(DiagnosticSeverity::ERROR),
6418 source: Some("disk".to_string()),
6419 message: "undefined variable 'CCC'".to_string(),
6420 ..Default::default()
6421 },
6422 ],
6423 },
6424 );
6425
6426 // The diagnostics have moved down since they were created.
6427 buffer.next_notification(cx).await;
6428 buffer.read_with(cx, |buffer, _| {
6429 assert_eq!(
6430 buffer
6431 .snapshot()
6432 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0), false)
6433 .collect::<Vec<_>>(),
6434 &[
6435 DiagnosticEntry {
6436 range: Point::new(3, 9)..Point::new(3, 11),
6437 diagnostic: Diagnostic {
6438 severity: DiagnosticSeverity::ERROR,
6439 message: "undefined variable 'BB'".to_string(),
6440 is_disk_based: true,
6441 group_id: 1,
6442 is_primary: true,
6443 ..Default::default()
6444 },
6445 },
6446 DiagnosticEntry {
6447 range: Point::new(4, 9)..Point::new(4, 12),
6448 diagnostic: Diagnostic {
6449 severity: DiagnosticSeverity::ERROR,
6450 message: "undefined variable 'CCC'".to_string(),
6451 is_disk_based: true,
6452 group_id: 2,
6453 is_primary: true,
6454 ..Default::default()
6455 }
6456 }
6457 ]
6458 );
6459 assert_eq!(
6460 chunks_with_diagnostics(buffer, 0..buffer.len()),
6461 [
6462 ("\n\nfn a() { ".to_string(), None),
6463 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
6464 (" }\nfn b() { ".to_string(), None),
6465 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
6466 (" }\nfn c() { ".to_string(), None),
6467 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
6468 (" }\n".to_string(), None),
6469 ]
6470 );
6471 assert_eq!(
6472 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
6473 [
6474 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
6475 (" }\nfn c() { ".to_string(), None),
6476 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
6477 ]
6478 );
6479 });
6480
6481 // Ensure overlapping diagnostics are highlighted correctly.
6482 fake_server.notify::<lsp::notification::PublishDiagnostics>(
6483 lsp::PublishDiagnosticsParams {
6484 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
6485 version: Some(open_notification.text_document.version),
6486 diagnostics: vec![
6487 lsp::Diagnostic {
6488 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
6489 severity: Some(DiagnosticSeverity::ERROR),
6490 message: "undefined variable 'A'".to_string(),
6491 source: Some("disk".to_string()),
6492 ..Default::default()
6493 },
6494 lsp::Diagnostic {
6495 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 12)),
6496 severity: Some(DiagnosticSeverity::WARNING),
6497 message: "unreachable statement".to_string(),
6498 source: Some("disk".to_string()),
6499 ..Default::default()
6500 },
6501 ],
6502 },
6503 );
6504
6505 buffer.next_notification(cx).await;
6506 buffer.read_with(cx, |buffer, _| {
6507 assert_eq!(
6508 buffer
6509 .snapshot()
6510 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0), false)
6511 .collect::<Vec<_>>(),
6512 &[
6513 DiagnosticEntry {
6514 range: Point::new(2, 9)..Point::new(2, 12),
6515 diagnostic: Diagnostic {
6516 severity: DiagnosticSeverity::WARNING,
6517 message: "unreachable statement".to_string(),
6518 is_disk_based: true,
6519 group_id: 4,
6520 is_primary: true,
6521 ..Default::default()
6522 }
6523 },
6524 DiagnosticEntry {
6525 range: Point::new(2, 9)..Point::new(2, 10),
6526 diagnostic: Diagnostic {
6527 severity: DiagnosticSeverity::ERROR,
6528 message: "undefined variable 'A'".to_string(),
6529 is_disk_based: true,
6530 group_id: 3,
6531 is_primary: true,
6532 ..Default::default()
6533 },
6534 }
6535 ]
6536 );
6537 assert_eq!(
6538 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
6539 [
6540 ("fn a() { ".to_string(), None),
6541 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
6542 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
6543 ("\n".to_string(), None),
6544 ]
6545 );
6546 assert_eq!(
6547 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
6548 [
6549 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
6550 ("\n".to_string(), None),
6551 ]
6552 );
6553 });
6554
6555 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
6556 // changes since the last save.
6557 buffer.update(cx, |buffer, cx| {
6558 buffer.edit([(Point::new(2, 0)..Point::new(2, 0), " ")], cx);
6559 buffer.edit([(Point::new(2, 8)..Point::new(2, 10), "(x: usize)")], cx);
6560 buffer.edit([(Point::new(3, 10)..Point::new(3, 10), "xxx")], cx);
6561 });
6562 let change_notification_2 = fake_server
6563 .receive_notification::<lsp::notification::DidChangeTextDocument>()
6564 .await;
6565 assert!(
6566 change_notification_2.text_document.version
6567 > change_notification_1.text_document.version
6568 );
6569
6570 // Handle out-of-order diagnostics
6571 fake_server.notify::<lsp::notification::PublishDiagnostics>(
6572 lsp::PublishDiagnosticsParams {
6573 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
6574 version: Some(change_notification_2.text_document.version),
6575 diagnostics: vec![
6576 lsp::Diagnostic {
6577 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
6578 severity: Some(DiagnosticSeverity::ERROR),
6579 message: "undefined variable 'BB'".to_string(),
6580 source: Some("disk".to_string()),
6581 ..Default::default()
6582 },
6583 lsp::Diagnostic {
6584 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
6585 severity: Some(DiagnosticSeverity::WARNING),
6586 message: "undefined variable 'A'".to_string(),
6587 source: Some("disk".to_string()),
6588 ..Default::default()
6589 },
6590 ],
6591 },
6592 );
6593
6594 buffer.next_notification(cx).await;
6595 buffer.read_with(cx, |buffer, _| {
6596 assert_eq!(
6597 buffer
6598 .snapshot()
6599 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
6600 .collect::<Vec<_>>(),
6601 &[
6602 DiagnosticEntry {
6603 range: Point::new(2, 21)..Point::new(2, 22),
6604 diagnostic: Diagnostic {
6605 severity: DiagnosticSeverity::WARNING,
6606 message: "undefined variable 'A'".to_string(),
6607 is_disk_based: true,
6608 group_id: 6,
6609 is_primary: true,
6610 ..Default::default()
6611 }
6612 },
6613 DiagnosticEntry {
6614 range: Point::new(3, 9)..Point::new(3, 14),
6615 diagnostic: Diagnostic {
6616 severity: DiagnosticSeverity::ERROR,
6617 message: "undefined variable 'BB'".to_string(),
6618 is_disk_based: true,
6619 group_id: 5,
6620 is_primary: true,
6621 ..Default::default()
6622 },
6623 }
6624 ]
6625 );
6626 });
6627 }
6628
6629 #[gpui::test]
6630 async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
6631 cx.foreground().forbid_parking();
6632
6633 let text = concat!(
6634 "let one = ;\n", //
6635 "let two = \n",
6636 "let three = 3;\n",
6637 );
6638
6639 let fs = FakeFs::new(cx.background());
6640 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
6641
6642 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
6643 let buffer = project
6644 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
6645 .await
6646 .unwrap();
6647
6648 project.update(cx, |project, cx| {
6649 project
6650 .update_buffer_diagnostics(
6651 &buffer,
6652 vec![
6653 DiagnosticEntry {
6654 range: PointUtf16::new(0, 10)..PointUtf16::new(0, 10),
6655 diagnostic: Diagnostic {
6656 severity: DiagnosticSeverity::ERROR,
6657 message: "syntax error 1".to_string(),
6658 ..Default::default()
6659 },
6660 },
6661 DiagnosticEntry {
6662 range: PointUtf16::new(1, 10)..PointUtf16::new(1, 10),
6663 diagnostic: Diagnostic {
6664 severity: DiagnosticSeverity::ERROR,
6665 message: "syntax error 2".to_string(),
6666 ..Default::default()
6667 },
6668 },
6669 ],
6670 None,
6671 cx,
6672 )
6673 .unwrap();
6674 });
6675
6676 // An empty range is extended forward to include the following character.
6677 // At the end of a line, an empty range is extended backward to include
6678 // the preceding character.
6679 buffer.read_with(cx, |buffer, _| {
6680 let chunks = chunks_with_diagnostics(&buffer, 0..buffer.len());
6681 assert_eq!(
6682 chunks
6683 .iter()
6684 .map(|(s, d)| (s.as_str(), *d))
6685 .collect::<Vec<_>>(),
6686 &[
6687 ("let one = ", None),
6688 (";", Some(DiagnosticSeverity::ERROR)),
6689 ("\nlet two =", None),
6690 (" ", Some(DiagnosticSeverity::ERROR)),
6691 ("\nlet three = 3;\n", None)
6692 ]
6693 );
6694 });
6695 }
6696
6697 #[gpui::test]
6698 async fn test_edits_from_lsp_with_past_version(cx: &mut gpui::TestAppContext) {
6699 cx.foreground().forbid_parking();
6700
6701 let mut language = Language::new(
6702 LanguageConfig {
6703 name: "Rust".into(),
6704 path_suffixes: vec!["rs".to_string()],
6705 ..Default::default()
6706 },
6707 Some(tree_sitter_rust::language()),
6708 );
6709 let mut fake_servers = language.set_fake_lsp_adapter(Default::default());
6710
6711 let text = "
6712 fn a() {
6713 f1();
6714 }
6715 fn b() {
6716 f2();
6717 }
6718 fn c() {
6719 f3();
6720 }
6721 "
6722 .unindent();
6723
6724 let fs = FakeFs::new(cx.background());
6725 fs.insert_tree(
6726 "/dir",
6727 json!({
6728 "a.rs": text.clone(),
6729 }),
6730 )
6731 .await;
6732
6733 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
6734 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
6735 let buffer = project
6736 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
6737 .await
6738 .unwrap();
6739
6740 let mut fake_server = fake_servers.next().await.unwrap();
6741 let lsp_document_version = fake_server
6742 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6743 .await
6744 .text_document
6745 .version;
6746
6747 // Simulate editing the buffer after the language server computes some edits.
6748 buffer.update(cx, |buffer, cx| {
6749 buffer.edit(
6750 [(
6751 Point::new(0, 0)..Point::new(0, 0),
6752 "// above first function\n",
6753 )],
6754 cx,
6755 );
6756 buffer.edit(
6757 [(
6758 Point::new(2, 0)..Point::new(2, 0),
6759 " // inside first function\n",
6760 )],
6761 cx,
6762 );
6763 buffer.edit(
6764 [(
6765 Point::new(6, 4)..Point::new(6, 4),
6766 "// inside second function ",
6767 )],
6768 cx,
6769 );
6770
6771 assert_eq!(
6772 buffer.text(),
6773 "
6774 // above first function
6775 fn a() {
6776 // inside first function
6777 f1();
6778 }
6779 fn b() {
6780 // inside second function f2();
6781 }
6782 fn c() {
6783 f3();
6784 }
6785 "
6786 .unindent()
6787 );
6788 });
6789
6790 let edits = project
6791 .update(cx, |project, cx| {
6792 project.edits_from_lsp(
6793 &buffer,
6794 vec![
6795 // replace body of first function
6796 lsp::TextEdit {
6797 range: lsp::Range::new(
6798 lsp::Position::new(0, 0),
6799 lsp::Position::new(3, 0),
6800 ),
6801 new_text: "
6802 fn a() {
6803 f10();
6804 }
6805 "
6806 .unindent(),
6807 },
6808 // edit inside second function
6809 lsp::TextEdit {
6810 range: lsp::Range::new(
6811 lsp::Position::new(4, 6),
6812 lsp::Position::new(4, 6),
6813 ),
6814 new_text: "00".into(),
6815 },
6816 // edit inside third function via two distinct edits
6817 lsp::TextEdit {
6818 range: lsp::Range::new(
6819 lsp::Position::new(7, 5),
6820 lsp::Position::new(7, 5),
6821 ),
6822 new_text: "4000".into(),
6823 },
6824 lsp::TextEdit {
6825 range: lsp::Range::new(
6826 lsp::Position::new(7, 5),
6827 lsp::Position::new(7, 6),
6828 ),
6829 new_text: "".into(),
6830 },
6831 ],
6832 Some(lsp_document_version),
6833 cx,
6834 )
6835 })
6836 .await
6837 .unwrap();
6838
6839 buffer.update(cx, |buffer, cx| {
6840 for (range, new_text) in edits {
6841 buffer.edit([(range, new_text)], cx);
6842 }
6843 assert_eq!(
6844 buffer.text(),
6845 "
6846 // above first function
6847 fn a() {
6848 // inside first function
6849 f10();
6850 }
6851 fn b() {
6852 // inside second function f200();
6853 }
6854 fn c() {
6855 f4000();
6856 }
6857 "
6858 .unindent()
6859 );
6860 });
6861 }
6862
6863 #[gpui::test]
6864 async fn test_edits_from_lsp_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
6865 cx.foreground().forbid_parking();
6866
6867 let text = "
6868 use a::b;
6869 use a::c;
6870
6871 fn f() {
6872 b();
6873 c();
6874 }
6875 "
6876 .unindent();
6877
6878 let fs = FakeFs::new(cx.background());
6879 fs.insert_tree(
6880 "/dir",
6881 json!({
6882 "a.rs": text.clone(),
6883 }),
6884 )
6885 .await;
6886
6887 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
6888 let buffer = project
6889 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
6890 .await
6891 .unwrap();
6892
6893 // Simulate the language server sending us a small edit in the form of a very large diff.
6894 // Rust-analyzer does this when performing a merge-imports code action.
6895 let edits = project
6896 .update(cx, |project, cx| {
6897 project.edits_from_lsp(
6898 &buffer,
6899 [
6900 // Replace the first use statement without editing the semicolon.
6901 lsp::TextEdit {
6902 range: lsp::Range::new(
6903 lsp::Position::new(0, 4),
6904 lsp::Position::new(0, 8),
6905 ),
6906 new_text: "a::{b, c}".into(),
6907 },
6908 // Reinsert the remainder of the file between the semicolon and the final
6909 // newline of the file.
6910 lsp::TextEdit {
6911 range: lsp::Range::new(
6912 lsp::Position::new(0, 9),
6913 lsp::Position::new(0, 9),
6914 ),
6915 new_text: "\n\n".into(),
6916 },
6917 lsp::TextEdit {
6918 range: lsp::Range::new(
6919 lsp::Position::new(0, 9),
6920 lsp::Position::new(0, 9),
6921 ),
6922 new_text: "
6923 fn f() {
6924 b();
6925 c();
6926 }"
6927 .unindent(),
6928 },
6929 // Delete everything after the first newline of the file.
6930 lsp::TextEdit {
6931 range: lsp::Range::new(
6932 lsp::Position::new(1, 0),
6933 lsp::Position::new(7, 0),
6934 ),
6935 new_text: "".into(),
6936 },
6937 ],
6938 None,
6939 cx,
6940 )
6941 })
6942 .await
6943 .unwrap();
6944
6945 buffer.update(cx, |buffer, cx| {
6946 let edits = edits
6947 .into_iter()
6948 .map(|(range, text)| {
6949 (
6950 range.start.to_point(&buffer)..range.end.to_point(&buffer),
6951 text,
6952 )
6953 })
6954 .collect::<Vec<_>>();
6955
6956 assert_eq!(
6957 edits,
6958 [
6959 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
6960 (Point::new(1, 0)..Point::new(2, 0), "".into())
6961 ]
6962 );
6963
6964 for (range, new_text) in edits {
6965 buffer.edit([(range, new_text)], cx);
6966 }
6967 assert_eq!(
6968 buffer.text(),
6969 "
6970 use a::{b, c};
6971
6972 fn f() {
6973 b();
6974 c();
6975 }
6976 "
6977 .unindent()
6978 );
6979 });
6980 }
6981
6982 #[gpui::test]
6983 async fn test_invalid_edits_from_lsp(cx: &mut gpui::TestAppContext) {
6984 cx.foreground().forbid_parking();
6985
6986 let text = "
6987 use a::b;
6988 use a::c;
6989
6990 fn f() {
6991 b();
6992 c();
6993 }
6994 "
6995 .unindent();
6996
6997 let fs = FakeFs::new(cx.background());
6998 fs.insert_tree(
6999 "/dir",
7000 json!({
7001 "a.rs": text.clone(),
7002 }),
7003 )
7004 .await;
7005
7006 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
7007 let buffer = project
7008 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
7009 .await
7010 .unwrap();
7011
7012 // Simulate the language server sending us edits in a non-ordered fashion,
7013 // with ranges sometimes being inverted.
7014 let edits = project
7015 .update(cx, |project, cx| {
7016 project.edits_from_lsp(
7017 &buffer,
7018 [
7019 lsp::TextEdit {
7020 range: lsp::Range::new(
7021 lsp::Position::new(0, 9),
7022 lsp::Position::new(0, 9),
7023 ),
7024 new_text: "\n\n".into(),
7025 },
7026 lsp::TextEdit {
7027 range: lsp::Range::new(
7028 lsp::Position::new(0, 8),
7029 lsp::Position::new(0, 4),
7030 ),
7031 new_text: "a::{b, c}".into(),
7032 },
7033 lsp::TextEdit {
7034 range: lsp::Range::new(
7035 lsp::Position::new(1, 0),
7036 lsp::Position::new(7, 0),
7037 ),
7038 new_text: "".into(),
7039 },
7040 lsp::TextEdit {
7041 range: lsp::Range::new(
7042 lsp::Position::new(0, 9),
7043 lsp::Position::new(0, 9),
7044 ),
7045 new_text: "
7046 fn f() {
7047 b();
7048 c();
7049 }"
7050 .unindent(),
7051 },
7052 ],
7053 None,
7054 cx,
7055 )
7056 })
7057 .await
7058 .unwrap();
7059
7060 buffer.update(cx, |buffer, cx| {
7061 let edits = edits
7062 .into_iter()
7063 .map(|(range, text)| {
7064 (
7065 range.start.to_point(&buffer)..range.end.to_point(&buffer),
7066 text,
7067 )
7068 })
7069 .collect::<Vec<_>>();
7070
7071 assert_eq!(
7072 edits,
7073 [
7074 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
7075 (Point::new(1, 0)..Point::new(2, 0), "".into())
7076 ]
7077 );
7078
7079 for (range, new_text) in edits {
7080 buffer.edit([(range, new_text)], cx);
7081 }
7082 assert_eq!(
7083 buffer.text(),
7084 "
7085 use a::{b, c};
7086
7087 fn f() {
7088 b();
7089 c();
7090 }
7091 "
7092 .unindent()
7093 );
7094 });
7095 }
7096
7097 fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
7098 buffer: &Buffer,
7099 range: Range<T>,
7100 ) -> Vec<(String, Option<DiagnosticSeverity>)> {
7101 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
7102 for chunk in buffer.snapshot().chunks(range, true) {
7103 if chunks.last().map_or(false, |prev_chunk| {
7104 prev_chunk.1 == chunk.diagnostic_severity
7105 }) {
7106 chunks.last_mut().unwrap().0.push_str(chunk.text);
7107 } else {
7108 chunks.push((chunk.text.to_string(), chunk.diagnostic_severity));
7109 }
7110 }
7111 chunks
7112 }
7113
7114 #[gpui::test]
7115 async fn test_search_worktree_without_files(cx: &mut gpui::TestAppContext) {
7116 let dir = temp_tree(json!({
7117 "root": {
7118 "dir1": {},
7119 "dir2": {
7120 "dir3": {}
7121 }
7122 }
7123 }));
7124
7125 let project = Project::test(Arc::new(RealFs), [dir.path()], cx).await;
7126 let cancel_flag = Default::default();
7127 let results = project
7128 .read_with(cx, |project, cx| {
7129 project.match_paths("dir", false, false, 10, &cancel_flag, cx)
7130 })
7131 .await;
7132
7133 assert!(results.is_empty());
7134 }
7135
7136 #[gpui::test(iterations = 10)]
7137 async fn test_definition(cx: &mut gpui::TestAppContext) {
7138 let mut language = Language::new(
7139 LanguageConfig {
7140 name: "Rust".into(),
7141 path_suffixes: vec!["rs".to_string()],
7142 ..Default::default()
7143 },
7144 Some(tree_sitter_rust::language()),
7145 );
7146 let mut fake_servers = language.set_fake_lsp_adapter(Default::default());
7147
7148 let fs = FakeFs::new(cx.background());
7149 fs.insert_tree(
7150 "/dir",
7151 json!({
7152 "a.rs": "const fn a() { A }",
7153 "b.rs": "const y: i32 = crate::a()",
7154 }),
7155 )
7156 .await;
7157
7158 let project = Project::test(fs, ["/dir/b.rs".as_ref()], cx).await;
7159 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
7160
7161 let buffer = project
7162 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
7163 .await
7164 .unwrap();
7165
7166 let fake_server = fake_servers.next().await.unwrap();
7167 fake_server.handle_request::<lsp::request::GotoDefinition, _, _>(|params, _| async move {
7168 let params = params.text_document_position_params;
7169 assert_eq!(
7170 params.text_document.uri.to_file_path().unwrap(),
7171 Path::new("/dir/b.rs"),
7172 );
7173 assert_eq!(params.position, lsp::Position::new(0, 22));
7174
7175 Ok(Some(lsp::GotoDefinitionResponse::Scalar(
7176 lsp::Location::new(
7177 lsp::Url::from_file_path("/dir/a.rs").unwrap(),
7178 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
7179 ),
7180 )))
7181 });
7182
7183 let mut definitions = project
7184 .update(cx, |project, cx| project.definition(&buffer, 22, cx))
7185 .await
7186 .unwrap();
7187
7188 assert_eq!(definitions.len(), 1);
7189 let definition = definitions.pop().unwrap();
7190 cx.update(|cx| {
7191 let target_buffer = definition.buffer.read(cx);
7192 assert_eq!(
7193 target_buffer
7194 .file()
7195 .unwrap()
7196 .as_local()
7197 .unwrap()
7198 .abs_path(cx),
7199 Path::new("/dir/a.rs"),
7200 );
7201 assert_eq!(definition.range.to_offset(target_buffer), 9..10);
7202 assert_eq!(
7203 list_worktrees(&project, cx),
7204 [("/dir/b.rs".as_ref(), true), ("/dir/a.rs".as_ref(), false)]
7205 );
7206
7207 drop(definition);
7208 });
7209 cx.read(|cx| {
7210 assert_eq!(list_worktrees(&project, cx), [("/dir/b.rs".as_ref(), true)]);
7211 });
7212
7213 fn list_worktrees<'a>(
7214 project: &'a ModelHandle<Project>,
7215 cx: &'a AppContext,
7216 ) -> Vec<(&'a Path, bool)> {
7217 project
7218 .read(cx)
7219 .worktrees(cx)
7220 .map(|worktree| {
7221 let worktree = worktree.read(cx);
7222 (
7223 worktree.as_local().unwrap().abs_path().as_ref(),
7224 worktree.is_visible(),
7225 )
7226 })
7227 .collect::<Vec<_>>()
7228 }
7229 }
7230
7231 #[gpui::test]
7232 async fn test_completions_without_edit_ranges(cx: &mut gpui::TestAppContext) {
7233 let mut language = Language::new(
7234 LanguageConfig {
7235 name: "TypeScript".into(),
7236 path_suffixes: vec!["ts".to_string()],
7237 ..Default::default()
7238 },
7239 Some(tree_sitter_typescript::language_typescript()),
7240 );
7241 let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default());
7242
7243 let fs = FakeFs::new(cx.background());
7244 fs.insert_tree(
7245 "/dir",
7246 json!({
7247 "a.ts": "",
7248 }),
7249 )
7250 .await;
7251
7252 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
7253 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
7254 let buffer = project
7255 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
7256 .await
7257 .unwrap();
7258
7259 let fake_server = fake_language_servers.next().await.unwrap();
7260
7261 let text = "let a = b.fqn";
7262 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
7263 let completions = project.update(cx, |project, cx| {
7264 project.completions(&buffer, text.len(), cx)
7265 });
7266
7267 fake_server
7268 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
7269 Ok(Some(lsp::CompletionResponse::Array(vec![
7270 lsp::CompletionItem {
7271 label: "fullyQualifiedName?".into(),
7272 insert_text: Some("fullyQualifiedName".into()),
7273 ..Default::default()
7274 },
7275 ])))
7276 })
7277 .next()
7278 .await;
7279 let completions = completions.await.unwrap();
7280 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
7281 assert_eq!(completions.len(), 1);
7282 assert_eq!(completions[0].new_text, "fullyQualifiedName");
7283 assert_eq!(
7284 completions[0].old_range.to_offset(&snapshot),
7285 text.len() - 3..text.len()
7286 );
7287 }
7288
7289 #[gpui::test(iterations = 10)]
7290 async fn test_apply_code_actions_with_commands(cx: &mut gpui::TestAppContext) {
7291 let mut language = Language::new(
7292 LanguageConfig {
7293 name: "TypeScript".into(),
7294 path_suffixes: vec!["ts".to_string()],
7295 ..Default::default()
7296 },
7297 None,
7298 );
7299 let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default());
7300
7301 let fs = FakeFs::new(cx.background());
7302 fs.insert_tree(
7303 "/dir",
7304 json!({
7305 "a.ts": "a",
7306 }),
7307 )
7308 .await;
7309
7310 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
7311 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
7312 let buffer = project
7313 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
7314 .await
7315 .unwrap();
7316
7317 let fake_server = fake_language_servers.next().await.unwrap();
7318
7319 // Language server returns code actions that contain commands, and not edits.
7320 let actions = project.update(cx, |project, cx| project.code_actions(&buffer, 0..0, cx));
7321 fake_server
7322 .handle_request::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
7323 Ok(Some(vec![
7324 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
7325 title: "The code action".into(),
7326 command: Some(lsp::Command {
7327 title: "The command".into(),
7328 command: "_the/command".into(),
7329 arguments: Some(vec![json!("the-argument")]),
7330 }),
7331 ..Default::default()
7332 }),
7333 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
7334 title: "two".into(),
7335 ..Default::default()
7336 }),
7337 ]))
7338 })
7339 .next()
7340 .await;
7341
7342 let action = actions.await.unwrap()[0].clone();
7343 let apply = project.update(cx, |project, cx| {
7344 project.apply_code_action(buffer.clone(), action, true, cx)
7345 });
7346
7347 // Resolving the code action does not populate its edits. In absence of
7348 // edits, we must execute the given command.
7349 fake_server.handle_request::<lsp::request::CodeActionResolveRequest, _, _>(
7350 |action, _| async move { Ok(action) },
7351 );
7352
7353 // While executing the command, the language server sends the editor
7354 // a `workspaceEdit` request.
7355 fake_server
7356 .handle_request::<lsp::request::ExecuteCommand, _, _>({
7357 let fake = fake_server.clone();
7358 move |params, _| {
7359 assert_eq!(params.command, "_the/command");
7360 let fake = fake.clone();
7361 async move {
7362 fake.server
7363 .request::<lsp::request::ApplyWorkspaceEdit>(
7364 lsp::ApplyWorkspaceEditParams {
7365 label: None,
7366 edit: lsp::WorkspaceEdit {
7367 changes: Some(
7368 [(
7369 lsp::Url::from_file_path("/dir/a.ts").unwrap(),
7370 vec![lsp::TextEdit {
7371 range: lsp::Range::new(
7372 lsp::Position::new(0, 0),
7373 lsp::Position::new(0, 0),
7374 ),
7375 new_text: "X".into(),
7376 }],
7377 )]
7378 .into_iter()
7379 .collect(),
7380 ),
7381 ..Default::default()
7382 },
7383 },
7384 )
7385 .await
7386 .unwrap();
7387 Ok(Some(json!(null)))
7388 }
7389 }
7390 })
7391 .next()
7392 .await;
7393
7394 // Applying the code action returns a project transaction containing the edits
7395 // sent by the language server in its `workspaceEdit` request.
7396 let transaction = apply.await.unwrap();
7397 assert!(transaction.0.contains_key(&buffer));
7398 buffer.update(cx, |buffer, cx| {
7399 assert_eq!(buffer.text(), "Xa");
7400 buffer.undo(cx);
7401 assert_eq!(buffer.text(), "a");
7402 });
7403 }
7404
7405 #[gpui::test]
7406 async fn test_save_file(cx: &mut gpui::TestAppContext) {
7407 let fs = FakeFs::new(cx.background());
7408 fs.insert_tree(
7409 "/dir",
7410 json!({
7411 "file1": "the old contents",
7412 }),
7413 )
7414 .await;
7415
7416 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7417 let buffer = project
7418 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
7419 .await
7420 .unwrap();
7421 buffer
7422 .update(cx, |buffer, cx| {
7423 assert_eq!(buffer.text(), "the old contents");
7424 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], cx);
7425 buffer.save(cx)
7426 })
7427 .await
7428 .unwrap();
7429
7430 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
7431 assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
7432 }
7433
7434 #[gpui::test]
7435 async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
7436 let fs = FakeFs::new(cx.background());
7437 fs.insert_tree(
7438 "/dir",
7439 json!({
7440 "file1": "the old contents",
7441 }),
7442 )
7443 .await;
7444
7445 let project = Project::test(fs.clone(), ["/dir/file1".as_ref()], cx).await;
7446 let buffer = project
7447 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
7448 .await
7449 .unwrap();
7450 buffer
7451 .update(cx, |buffer, cx| {
7452 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], cx);
7453 buffer.save(cx)
7454 })
7455 .await
7456 .unwrap();
7457
7458 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
7459 assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
7460 }
7461
7462 #[gpui::test]
7463 async fn test_save_as(cx: &mut gpui::TestAppContext) {
7464 let fs = FakeFs::new(cx.background());
7465 fs.insert_tree("/dir", json!({})).await;
7466
7467 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7468 let buffer = project.update(cx, |project, cx| {
7469 project.create_buffer("", None, cx).unwrap()
7470 });
7471 buffer.update(cx, |buffer, cx| {
7472 buffer.edit([(0..0, "abc")], cx);
7473 assert!(buffer.is_dirty());
7474 assert!(!buffer.has_conflict());
7475 });
7476 project
7477 .update(cx, |project, cx| {
7478 project.save_buffer_as(buffer.clone(), "/dir/file1".into(), cx)
7479 })
7480 .await
7481 .unwrap();
7482 assert_eq!(fs.load(Path::new("/dir/file1")).await.unwrap(), "abc");
7483 buffer.read_with(cx, |buffer, cx| {
7484 assert_eq!(buffer.file().unwrap().full_path(cx), Path::new("dir/file1"));
7485 assert!(!buffer.is_dirty());
7486 assert!(!buffer.has_conflict());
7487 });
7488
7489 let opened_buffer = project
7490 .update(cx, |project, cx| {
7491 project.open_local_buffer("/dir/file1", cx)
7492 })
7493 .await
7494 .unwrap();
7495 assert_eq!(opened_buffer, buffer);
7496 }
7497
7498 #[gpui::test(retries = 5)]
7499 async fn test_rescan_and_remote_updates(cx: &mut gpui::TestAppContext) {
7500 let dir = temp_tree(json!({
7501 "a": {
7502 "file1": "",
7503 "file2": "",
7504 "file3": "",
7505 },
7506 "b": {
7507 "c": {
7508 "file4": "",
7509 "file5": "",
7510 }
7511 }
7512 }));
7513
7514 let project = Project::test(Arc::new(RealFs), [dir.path()], cx).await;
7515 let rpc = project.read_with(cx, |p, _| p.client.clone());
7516
7517 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
7518 let buffer = project.update(cx, |p, cx| p.open_local_buffer(dir.path().join(path), cx));
7519 async move { buffer.await.unwrap() }
7520 };
7521 let id_for_path = |path: &'static str, cx: &gpui::TestAppContext| {
7522 project.read_with(cx, |project, cx| {
7523 let tree = project.worktrees(cx).next().unwrap();
7524 tree.read(cx)
7525 .entry_for_path(path)
7526 .expect(&format!("no entry for path {}", path))
7527 .id
7528 })
7529 };
7530
7531 let buffer2 = buffer_for_path("a/file2", cx).await;
7532 let buffer3 = buffer_for_path("a/file3", cx).await;
7533 let buffer4 = buffer_for_path("b/c/file4", cx).await;
7534 let buffer5 = buffer_for_path("b/c/file5", cx).await;
7535
7536 let file2_id = id_for_path("a/file2", &cx);
7537 let file3_id = id_for_path("a/file3", &cx);
7538 let file4_id = id_for_path("b/c/file4", &cx);
7539
7540 // Create a remote copy of this worktree.
7541 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
7542 let initial_snapshot = tree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot());
7543 let (remote, load_task) = cx.update(|cx| {
7544 Worktree::remote(
7545 1,
7546 1,
7547 initial_snapshot.to_proto(&Default::default(), true),
7548 rpc.clone(),
7549 cx,
7550 )
7551 });
7552 // tree
7553 load_task.await;
7554
7555 cx.read(|cx| {
7556 assert!(!buffer2.read(cx).is_dirty());
7557 assert!(!buffer3.read(cx).is_dirty());
7558 assert!(!buffer4.read(cx).is_dirty());
7559 assert!(!buffer5.read(cx).is_dirty());
7560 });
7561
7562 // Rename and delete files and directories.
7563 tree.flush_fs_events(&cx).await;
7564 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
7565 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
7566 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
7567 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
7568 tree.flush_fs_events(&cx).await;
7569
7570 let expected_paths = vec![
7571 "a",
7572 "a/file1",
7573 "a/file2.new",
7574 "b",
7575 "d",
7576 "d/file3",
7577 "d/file4",
7578 ];
7579
7580 cx.read(|app| {
7581 assert_eq!(
7582 tree.read(app)
7583 .paths()
7584 .map(|p| p.to_str().unwrap())
7585 .collect::<Vec<_>>(),
7586 expected_paths
7587 );
7588
7589 assert_eq!(id_for_path("a/file2.new", &cx), file2_id);
7590 assert_eq!(id_for_path("d/file3", &cx), file3_id);
7591 assert_eq!(id_for_path("d/file4", &cx), file4_id);
7592
7593 assert_eq!(
7594 buffer2.read(app).file().unwrap().path().as_ref(),
7595 Path::new("a/file2.new")
7596 );
7597 assert_eq!(
7598 buffer3.read(app).file().unwrap().path().as_ref(),
7599 Path::new("d/file3")
7600 );
7601 assert_eq!(
7602 buffer4.read(app).file().unwrap().path().as_ref(),
7603 Path::new("d/file4")
7604 );
7605 assert_eq!(
7606 buffer5.read(app).file().unwrap().path().as_ref(),
7607 Path::new("b/c/file5")
7608 );
7609
7610 assert!(!buffer2.read(app).file().unwrap().is_deleted());
7611 assert!(!buffer3.read(app).file().unwrap().is_deleted());
7612 assert!(!buffer4.read(app).file().unwrap().is_deleted());
7613 assert!(buffer5.read(app).file().unwrap().is_deleted());
7614 });
7615
7616 // Update the remote worktree. Check that it becomes consistent with the
7617 // local worktree.
7618 remote.update(cx, |remote, cx| {
7619 let update_message = tree.read(cx).as_local().unwrap().snapshot().build_update(
7620 &initial_snapshot,
7621 1,
7622 1,
7623 true,
7624 );
7625 remote
7626 .as_remote_mut()
7627 .unwrap()
7628 .snapshot
7629 .apply_remote_update(update_message)
7630 .unwrap();
7631
7632 assert_eq!(
7633 remote
7634 .paths()
7635 .map(|p| p.to_str().unwrap())
7636 .collect::<Vec<_>>(),
7637 expected_paths
7638 );
7639 });
7640 }
7641
7642 #[gpui::test]
7643 async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
7644 let fs = FakeFs::new(cx.background());
7645 fs.insert_tree(
7646 "/dir",
7647 json!({
7648 "a.txt": "a-contents",
7649 "b.txt": "b-contents",
7650 }),
7651 )
7652 .await;
7653
7654 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7655
7656 // Spawn multiple tasks to open paths, repeating some paths.
7657 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
7658 (
7659 p.open_local_buffer("/dir/a.txt", cx),
7660 p.open_local_buffer("/dir/b.txt", cx),
7661 p.open_local_buffer("/dir/a.txt", cx),
7662 )
7663 });
7664
7665 let buffer_a_1 = buffer_a_1.await.unwrap();
7666 let buffer_a_2 = buffer_a_2.await.unwrap();
7667 let buffer_b = buffer_b.await.unwrap();
7668 assert_eq!(buffer_a_1.read_with(cx, |b, _| b.text()), "a-contents");
7669 assert_eq!(buffer_b.read_with(cx, |b, _| b.text()), "b-contents");
7670
7671 // There is only one buffer per path.
7672 let buffer_a_id = buffer_a_1.id();
7673 assert_eq!(buffer_a_2.id(), buffer_a_id);
7674
7675 // Open the same path again while it is still open.
7676 drop(buffer_a_1);
7677 let buffer_a_3 = project
7678 .update(cx, |p, cx| p.open_local_buffer("/dir/a.txt", cx))
7679 .await
7680 .unwrap();
7681
7682 // There's still only one buffer per path.
7683 assert_eq!(buffer_a_3.id(), buffer_a_id);
7684 }
7685
7686 #[gpui::test]
7687 async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
7688 let fs = FakeFs::new(cx.background());
7689 fs.insert_tree(
7690 "/dir",
7691 json!({
7692 "file1": "abc",
7693 "file2": "def",
7694 "file3": "ghi",
7695 }),
7696 )
7697 .await;
7698
7699 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7700
7701 let buffer1 = project
7702 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
7703 .await
7704 .unwrap();
7705 let events = Rc::new(RefCell::new(Vec::new()));
7706
7707 // initially, the buffer isn't dirty.
7708 buffer1.update(cx, |buffer, cx| {
7709 cx.subscribe(&buffer1, {
7710 let events = events.clone();
7711 move |_, _, event, _| match event {
7712 BufferEvent::Operation(_) => {}
7713 _ => events.borrow_mut().push(event.clone()),
7714 }
7715 })
7716 .detach();
7717
7718 assert!(!buffer.is_dirty());
7719 assert!(events.borrow().is_empty());
7720
7721 buffer.edit([(1..2, "")], cx);
7722 });
7723
7724 // after the first edit, the buffer is dirty, and emits a dirtied event.
7725 buffer1.update(cx, |buffer, cx| {
7726 assert!(buffer.text() == "ac");
7727 assert!(buffer.is_dirty());
7728 assert_eq!(
7729 *events.borrow(),
7730 &[language::Event::Edited, language::Event::Dirtied]
7731 );
7732 events.borrow_mut().clear();
7733 buffer.did_save(buffer.version(), buffer.file().unwrap().mtime(), None, cx);
7734 });
7735
7736 // after saving, the buffer is not dirty, and emits a saved event.
7737 buffer1.update(cx, |buffer, cx| {
7738 assert!(!buffer.is_dirty());
7739 assert_eq!(*events.borrow(), &[language::Event::Saved]);
7740 events.borrow_mut().clear();
7741
7742 buffer.edit([(1..1, "B")], cx);
7743 buffer.edit([(2..2, "D")], cx);
7744 });
7745
7746 // after editing again, the buffer is dirty, and emits another dirty event.
7747 buffer1.update(cx, |buffer, cx| {
7748 assert!(buffer.text() == "aBDc");
7749 assert!(buffer.is_dirty());
7750 assert_eq!(
7751 *events.borrow(),
7752 &[
7753 language::Event::Edited,
7754 language::Event::Dirtied,
7755 language::Event::Edited,
7756 ],
7757 );
7758 events.borrow_mut().clear();
7759
7760 // TODO - currently, after restoring the buffer to its
7761 // previously-saved state, the is still considered dirty.
7762 buffer.edit([(1..3, "")], cx);
7763 assert!(buffer.text() == "ac");
7764 assert!(buffer.is_dirty());
7765 });
7766
7767 assert_eq!(*events.borrow(), &[language::Event::Edited]);
7768
7769 // When a file is deleted, the buffer is considered dirty.
7770 let events = Rc::new(RefCell::new(Vec::new()));
7771 let buffer2 = project
7772 .update(cx, |p, cx| p.open_local_buffer("/dir/file2", cx))
7773 .await
7774 .unwrap();
7775 buffer2.update(cx, |_, cx| {
7776 cx.subscribe(&buffer2, {
7777 let events = events.clone();
7778 move |_, _, event, _| events.borrow_mut().push(event.clone())
7779 })
7780 .detach();
7781 });
7782
7783 fs.remove_file("/dir/file2".as_ref(), Default::default())
7784 .await
7785 .unwrap();
7786 buffer2.condition(&cx, |b, _| b.is_dirty()).await;
7787 assert_eq!(
7788 *events.borrow(),
7789 &[language::Event::Dirtied, language::Event::FileHandleChanged]
7790 );
7791
7792 // When a file is already dirty when deleted, we don't emit a Dirtied event.
7793 let events = Rc::new(RefCell::new(Vec::new()));
7794 let buffer3 = project
7795 .update(cx, |p, cx| p.open_local_buffer("/dir/file3", cx))
7796 .await
7797 .unwrap();
7798 buffer3.update(cx, |_, cx| {
7799 cx.subscribe(&buffer3, {
7800 let events = events.clone();
7801 move |_, _, event, _| events.borrow_mut().push(event.clone())
7802 })
7803 .detach();
7804 });
7805
7806 buffer3.update(cx, |buffer, cx| {
7807 buffer.edit([(0..0, "x")], cx);
7808 });
7809 events.borrow_mut().clear();
7810 fs.remove_file("/dir/file3".as_ref(), Default::default())
7811 .await
7812 .unwrap();
7813 buffer3
7814 .condition(&cx, |_, _| !events.borrow().is_empty())
7815 .await;
7816 assert_eq!(*events.borrow(), &[language::Event::FileHandleChanged]);
7817 cx.read(|cx| assert!(buffer3.read(cx).is_dirty()));
7818 }
7819
7820 #[gpui::test]
7821 async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
7822 let initial_contents = "aaa\nbbbbb\nc\n";
7823 let fs = FakeFs::new(cx.background());
7824 fs.insert_tree(
7825 "/dir",
7826 json!({
7827 "the-file": initial_contents,
7828 }),
7829 )
7830 .await;
7831 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7832 let buffer = project
7833 .update(cx, |p, cx| p.open_local_buffer("/dir/the-file", cx))
7834 .await
7835 .unwrap();
7836
7837 let anchors = (0..3)
7838 .map(|row| buffer.read_with(cx, |b, _| b.anchor_before(Point::new(row, 1))))
7839 .collect::<Vec<_>>();
7840
7841 // Change the file on disk, adding two new lines of text, and removing
7842 // one line.
7843 buffer.read_with(cx, |buffer, _| {
7844 assert!(!buffer.is_dirty());
7845 assert!(!buffer.has_conflict());
7846 });
7847 let new_contents = "AAAA\naaa\nBB\nbbbbb\n";
7848 fs.save("/dir/the-file".as_ref(), &new_contents.into())
7849 .await
7850 .unwrap();
7851
7852 // Because the buffer was not modified, it is reloaded from disk. Its
7853 // contents are edited according to the diff between the old and new
7854 // file contents.
7855 buffer
7856 .condition(&cx, |buffer, _| buffer.text() == new_contents)
7857 .await;
7858
7859 buffer.update(cx, |buffer, _| {
7860 assert_eq!(buffer.text(), new_contents);
7861 assert!(!buffer.is_dirty());
7862 assert!(!buffer.has_conflict());
7863
7864 let anchor_positions = anchors
7865 .iter()
7866 .map(|anchor| anchor.to_point(&*buffer))
7867 .collect::<Vec<_>>();
7868 assert_eq!(
7869 anchor_positions,
7870 [Point::new(1, 1), Point::new(3, 1), Point::new(4, 0)]
7871 );
7872 });
7873
7874 // Modify the buffer
7875 buffer.update(cx, |buffer, cx| {
7876 buffer.edit([(0..0, " ")], cx);
7877 assert!(buffer.is_dirty());
7878 assert!(!buffer.has_conflict());
7879 });
7880
7881 // Change the file on disk again, adding blank lines to the beginning.
7882 fs.save(
7883 "/dir/the-file".as_ref(),
7884 &"\n\n\nAAAA\naaa\nBB\nbbbbb\n".into(),
7885 )
7886 .await
7887 .unwrap();
7888
7889 // Because the buffer is modified, it doesn't reload from disk, but is
7890 // marked as having a conflict.
7891 buffer
7892 .condition(&cx, |buffer, _| buffer.has_conflict())
7893 .await;
7894 }
7895
7896 #[gpui::test]
7897 async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
7898 cx.foreground().forbid_parking();
7899
7900 let fs = FakeFs::new(cx.background());
7901 fs.insert_tree(
7902 "/the-dir",
7903 json!({
7904 "a.rs": "
7905 fn foo(mut v: Vec<usize>) {
7906 for x in &v {
7907 v.push(1);
7908 }
7909 }
7910 "
7911 .unindent(),
7912 }),
7913 )
7914 .await;
7915
7916 let project = Project::test(fs.clone(), ["/the-dir".as_ref()], cx).await;
7917 let buffer = project
7918 .update(cx, |p, cx| p.open_local_buffer("/the-dir/a.rs", cx))
7919 .await
7920 .unwrap();
7921
7922 let buffer_uri = Url::from_file_path("/the-dir/a.rs").unwrap();
7923 let message = lsp::PublishDiagnosticsParams {
7924 uri: buffer_uri.clone(),
7925 diagnostics: vec![
7926 lsp::Diagnostic {
7927 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
7928 severity: Some(DiagnosticSeverity::WARNING),
7929 message: "error 1".to_string(),
7930 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
7931 location: lsp::Location {
7932 uri: buffer_uri.clone(),
7933 range: lsp::Range::new(
7934 lsp::Position::new(1, 8),
7935 lsp::Position::new(1, 9),
7936 ),
7937 },
7938 message: "error 1 hint 1".to_string(),
7939 }]),
7940 ..Default::default()
7941 },
7942 lsp::Diagnostic {
7943 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
7944 severity: Some(DiagnosticSeverity::HINT),
7945 message: "error 1 hint 1".to_string(),
7946 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
7947 location: lsp::Location {
7948 uri: buffer_uri.clone(),
7949 range: lsp::Range::new(
7950 lsp::Position::new(1, 8),
7951 lsp::Position::new(1, 9),
7952 ),
7953 },
7954 message: "original diagnostic".to_string(),
7955 }]),
7956 ..Default::default()
7957 },
7958 lsp::Diagnostic {
7959 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
7960 severity: Some(DiagnosticSeverity::ERROR),
7961 message: "error 2".to_string(),
7962 related_information: Some(vec![
7963 lsp::DiagnosticRelatedInformation {
7964 location: lsp::Location {
7965 uri: buffer_uri.clone(),
7966 range: lsp::Range::new(
7967 lsp::Position::new(1, 13),
7968 lsp::Position::new(1, 15),
7969 ),
7970 },
7971 message: "error 2 hint 1".to_string(),
7972 },
7973 lsp::DiagnosticRelatedInformation {
7974 location: lsp::Location {
7975 uri: buffer_uri.clone(),
7976 range: lsp::Range::new(
7977 lsp::Position::new(1, 13),
7978 lsp::Position::new(1, 15),
7979 ),
7980 },
7981 message: "error 2 hint 2".to_string(),
7982 },
7983 ]),
7984 ..Default::default()
7985 },
7986 lsp::Diagnostic {
7987 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
7988 severity: Some(DiagnosticSeverity::HINT),
7989 message: "error 2 hint 1".to_string(),
7990 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
7991 location: lsp::Location {
7992 uri: buffer_uri.clone(),
7993 range: lsp::Range::new(
7994 lsp::Position::new(2, 8),
7995 lsp::Position::new(2, 17),
7996 ),
7997 },
7998 message: "original diagnostic".to_string(),
7999 }]),
8000 ..Default::default()
8001 },
8002 lsp::Diagnostic {
8003 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
8004 severity: Some(DiagnosticSeverity::HINT),
8005 message: "error 2 hint 2".to_string(),
8006 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
8007 location: lsp::Location {
8008 uri: buffer_uri.clone(),
8009 range: lsp::Range::new(
8010 lsp::Position::new(2, 8),
8011 lsp::Position::new(2, 17),
8012 ),
8013 },
8014 message: "original diagnostic".to_string(),
8015 }]),
8016 ..Default::default()
8017 },
8018 ],
8019 version: None,
8020 };
8021
8022 project
8023 .update(cx, |p, cx| p.update_diagnostics(message, &[], cx))
8024 .unwrap();
8025 let buffer = buffer.read_with(cx, |buffer, _| buffer.snapshot());
8026
8027 assert_eq!(
8028 buffer
8029 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
8030 .collect::<Vec<_>>(),
8031 &[
8032 DiagnosticEntry {
8033 range: Point::new(1, 8)..Point::new(1, 9),
8034 diagnostic: Diagnostic {
8035 severity: DiagnosticSeverity::WARNING,
8036 message: "error 1".to_string(),
8037 group_id: 0,
8038 is_primary: true,
8039 ..Default::default()
8040 }
8041 },
8042 DiagnosticEntry {
8043 range: Point::new(1, 8)..Point::new(1, 9),
8044 diagnostic: Diagnostic {
8045 severity: DiagnosticSeverity::HINT,
8046 message: "error 1 hint 1".to_string(),
8047 group_id: 0,
8048 is_primary: false,
8049 ..Default::default()
8050 }
8051 },
8052 DiagnosticEntry {
8053 range: Point::new(1, 13)..Point::new(1, 15),
8054 diagnostic: Diagnostic {
8055 severity: DiagnosticSeverity::HINT,
8056 message: "error 2 hint 1".to_string(),
8057 group_id: 1,
8058 is_primary: false,
8059 ..Default::default()
8060 }
8061 },
8062 DiagnosticEntry {
8063 range: Point::new(1, 13)..Point::new(1, 15),
8064 diagnostic: Diagnostic {
8065 severity: DiagnosticSeverity::HINT,
8066 message: "error 2 hint 2".to_string(),
8067 group_id: 1,
8068 is_primary: false,
8069 ..Default::default()
8070 }
8071 },
8072 DiagnosticEntry {
8073 range: Point::new(2, 8)..Point::new(2, 17),
8074 diagnostic: Diagnostic {
8075 severity: DiagnosticSeverity::ERROR,
8076 message: "error 2".to_string(),
8077 group_id: 1,
8078 is_primary: true,
8079 ..Default::default()
8080 }
8081 }
8082 ]
8083 );
8084
8085 assert_eq!(
8086 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
8087 &[
8088 DiagnosticEntry {
8089 range: Point::new(1, 8)..Point::new(1, 9),
8090 diagnostic: Diagnostic {
8091 severity: DiagnosticSeverity::WARNING,
8092 message: "error 1".to_string(),
8093 group_id: 0,
8094 is_primary: true,
8095 ..Default::default()
8096 }
8097 },
8098 DiagnosticEntry {
8099 range: Point::new(1, 8)..Point::new(1, 9),
8100 diagnostic: Diagnostic {
8101 severity: DiagnosticSeverity::HINT,
8102 message: "error 1 hint 1".to_string(),
8103 group_id: 0,
8104 is_primary: false,
8105 ..Default::default()
8106 }
8107 },
8108 ]
8109 );
8110 assert_eq!(
8111 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
8112 &[
8113 DiagnosticEntry {
8114 range: Point::new(1, 13)..Point::new(1, 15),
8115 diagnostic: Diagnostic {
8116 severity: DiagnosticSeverity::HINT,
8117 message: "error 2 hint 1".to_string(),
8118 group_id: 1,
8119 is_primary: false,
8120 ..Default::default()
8121 }
8122 },
8123 DiagnosticEntry {
8124 range: Point::new(1, 13)..Point::new(1, 15),
8125 diagnostic: Diagnostic {
8126 severity: DiagnosticSeverity::HINT,
8127 message: "error 2 hint 2".to_string(),
8128 group_id: 1,
8129 is_primary: false,
8130 ..Default::default()
8131 }
8132 },
8133 DiagnosticEntry {
8134 range: Point::new(2, 8)..Point::new(2, 17),
8135 diagnostic: Diagnostic {
8136 severity: DiagnosticSeverity::ERROR,
8137 message: "error 2".to_string(),
8138 group_id: 1,
8139 is_primary: true,
8140 ..Default::default()
8141 }
8142 }
8143 ]
8144 );
8145 }
8146
8147 #[gpui::test]
8148 async fn test_rename(cx: &mut gpui::TestAppContext) {
8149 cx.foreground().forbid_parking();
8150
8151 let mut language = Language::new(
8152 LanguageConfig {
8153 name: "Rust".into(),
8154 path_suffixes: vec!["rs".to_string()],
8155 ..Default::default()
8156 },
8157 Some(tree_sitter_rust::language()),
8158 );
8159 let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
8160 capabilities: lsp::ServerCapabilities {
8161 rename_provider: Some(lsp::OneOf::Right(lsp::RenameOptions {
8162 prepare_provider: Some(true),
8163 work_done_progress_options: Default::default(),
8164 })),
8165 ..Default::default()
8166 },
8167 ..Default::default()
8168 });
8169
8170 let fs = FakeFs::new(cx.background());
8171 fs.insert_tree(
8172 "/dir",
8173 json!({
8174 "one.rs": "const ONE: usize = 1;",
8175 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
8176 }),
8177 )
8178 .await;
8179
8180 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
8181 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
8182 let buffer = project
8183 .update(cx, |project, cx| {
8184 project.open_local_buffer("/dir/one.rs", cx)
8185 })
8186 .await
8187 .unwrap();
8188
8189 let fake_server = fake_servers.next().await.unwrap();
8190
8191 let response = project.update(cx, |project, cx| {
8192 project.prepare_rename(buffer.clone(), 7, cx)
8193 });
8194 fake_server
8195 .handle_request::<lsp::request::PrepareRenameRequest, _, _>(|params, _| async move {
8196 assert_eq!(params.text_document.uri.as_str(), "file:///dir/one.rs");
8197 assert_eq!(params.position, lsp::Position::new(0, 7));
8198 Ok(Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
8199 lsp::Position::new(0, 6),
8200 lsp::Position::new(0, 9),
8201 ))))
8202 })
8203 .next()
8204 .await
8205 .unwrap();
8206 let range = response.await.unwrap().unwrap();
8207 let range = buffer.read_with(cx, |buffer, _| range.to_offset(buffer));
8208 assert_eq!(range, 6..9);
8209
8210 let response = project.update(cx, |project, cx| {
8211 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), true, cx)
8212 });
8213 fake_server
8214 .handle_request::<lsp::request::Rename, _, _>(|params, _| async move {
8215 assert_eq!(
8216 params.text_document_position.text_document.uri.as_str(),
8217 "file:///dir/one.rs"
8218 );
8219 assert_eq!(
8220 params.text_document_position.position,
8221 lsp::Position::new(0, 7)
8222 );
8223 assert_eq!(params.new_name, "THREE");
8224 Ok(Some(lsp::WorkspaceEdit {
8225 changes: Some(
8226 [
8227 (
8228 lsp::Url::from_file_path("/dir/one.rs").unwrap(),
8229 vec![lsp::TextEdit::new(
8230 lsp::Range::new(
8231 lsp::Position::new(0, 6),
8232 lsp::Position::new(0, 9),
8233 ),
8234 "THREE".to_string(),
8235 )],
8236 ),
8237 (
8238 lsp::Url::from_file_path("/dir/two.rs").unwrap(),
8239 vec![
8240 lsp::TextEdit::new(
8241 lsp::Range::new(
8242 lsp::Position::new(0, 24),
8243 lsp::Position::new(0, 27),
8244 ),
8245 "THREE".to_string(),
8246 ),
8247 lsp::TextEdit::new(
8248 lsp::Range::new(
8249 lsp::Position::new(0, 35),
8250 lsp::Position::new(0, 38),
8251 ),
8252 "THREE".to_string(),
8253 ),
8254 ],
8255 ),
8256 ]
8257 .into_iter()
8258 .collect(),
8259 ),
8260 ..Default::default()
8261 }))
8262 })
8263 .next()
8264 .await
8265 .unwrap();
8266 let mut transaction = response.await.unwrap().0;
8267 assert_eq!(transaction.len(), 2);
8268 assert_eq!(
8269 transaction
8270 .remove_entry(&buffer)
8271 .unwrap()
8272 .0
8273 .read_with(cx, |buffer, _| buffer.text()),
8274 "const THREE: usize = 1;"
8275 );
8276 assert_eq!(
8277 transaction
8278 .into_keys()
8279 .next()
8280 .unwrap()
8281 .read_with(cx, |buffer, _| buffer.text()),
8282 "const TWO: usize = one::THREE + one::THREE;"
8283 );
8284 }
8285
8286 #[gpui::test]
8287 async fn test_search(cx: &mut gpui::TestAppContext) {
8288 let fs = FakeFs::new(cx.background());
8289 fs.insert_tree(
8290 "/dir",
8291 json!({
8292 "one.rs": "const ONE: usize = 1;",
8293 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
8294 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
8295 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
8296 }),
8297 )
8298 .await;
8299 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
8300 assert_eq!(
8301 search(&project, SearchQuery::text("TWO", false, true), cx)
8302 .await
8303 .unwrap(),
8304 HashMap::from_iter([
8305 ("two.rs".to_string(), vec![6..9]),
8306 ("three.rs".to_string(), vec![37..40])
8307 ])
8308 );
8309
8310 let buffer_4 = project
8311 .update(cx, |project, cx| {
8312 project.open_local_buffer("/dir/four.rs", cx)
8313 })
8314 .await
8315 .unwrap();
8316 buffer_4.update(cx, |buffer, cx| {
8317 let text = "two::TWO";
8318 buffer.edit([(20..28, text), (31..43, text)], cx);
8319 });
8320
8321 assert_eq!(
8322 search(&project, SearchQuery::text("TWO", false, true), cx)
8323 .await
8324 .unwrap(),
8325 HashMap::from_iter([
8326 ("two.rs".to_string(), vec![6..9]),
8327 ("three.rs".to_string(), vec![37..40]),
8328 ("four.rs".to_string(), vec![25..28, 36..39])
8329 ])
8330 );
8331
8332 async fn search(
8333 project: &ModelHandle<Project>,
8334 query: SearchQuery,
8335 cx: &mut gpui::TestAppContext,
8336 ) -> Result<HashMap<String, Vec<Range<usize>>>> {
8337 let results = project
8338 .update(cx, |project, cx| project.search(query, cx))
8339 .await?;
8340
8341 Ok(results
8342 .into_iter()
8343 .map(|(buffer, ranges)| {
8344 buffer.read_with(cx, |buffer, _| {
8345 let path = buffer.file().unwrap().path().to_string_lossy().to_string();
8346 let ranges = ranges
8347 .into_iter()
8348 .map(|range| range.to_offset(buffer))
8349 .collect::<Vec<_>>();
8350 (path, ranges)
8351 })
8352 })
8353 .collect())
8354 }
8355 }
8356}