1mod db;
2pub mod fs;
3mod ignore;
4mod lsp_command;
5pub mod search;
6pub mod worktree;
7
8use anyhow::{anyhow, Context, Result};
9use client::{proto, Client, PeerId, TypedEnvelope, User, UserStore};
10use clock::ReplicaId;
11use collections::{hash_map, BTreeMap, HashMap, HashSet};
12use futures::{future::Shared, Future, FutureExt, StreamExt, TryFutureExt};
13use fuzzy::{PathMatch, PathMatchCandidate, PathMatchCandidateSet};
14use gpui::{
15 AnyModelHandle, AppContext, AsyncAppContext, Entity, ModelContext, ModelHandle,
16 MutableAppContext, Task, UpgradeModelHandle, WeakModelHandle,
17};
18use language::{
19 point_to_lsp,
20 proto::{deserialize_anchor, deserialize_version, serialize_anchor, serialize_version},
21 range_from_lsp, range_to_lsp, Anchor, Bias, Buffer, CodeAction, CodeLabel, Completion,
22 Diagnostic, DiagnosticEntry, DiagnosticSet, Event as BufferEvent, File as _, HighlightId,
23 Language, LanguageRegistry, LanguageServerName, LocalFile, LspAdapter, OffsetRangeExt,
24 Operation, Patch, PointUtf16, TextBufferSnapshot, ToOffset, ToPointUtf16, Transaction,
25};
26use lsp::{DiagnosticSeverity, DiagnosticTag, DocumentHighlightKind, LanguageServer};
27use lsp_command::*;
28use parking_lot::Mutex;
29use postage::stream::Stream;
30use postage::watch;
31use rand::prelude::*;
32use search::SearchQuery;
33use serde::Serialize;
34use settings::Settings;
35use sha2::{Digest, Sha256};
36use similar::{ChangeTag, TextDiff};
37use std::{
38 cell::RefCell,
39 cmp::{self, Ordering},
40 convert::TryInto,
41 ffi::OsString,
42 hash::Hash,
43 mem,
44 ops::Range,
45 os::unix::{ffi::OsStrExt, prelude::OsStringExt},
46 path::{Component, Path, PathBuf},
47 rc::Rc,
48 sync::{
49 atomic::{AtomicBool, AtomicUsize, Ordering::SeqCst},
50 Arc,
51 },
52 time::Instant,
53};
54use thiserror::Error;
55use util::{post_inc, ResultExt, TryFutureExt as _};
56
57pub use db::Db;
58pub use fs::*;
59pub use worktree::*;
60
61pub trait Item: Entity {
62 fn entry_id(&self, cx: &AppContext) -> Option<ProjectEntryId>;
63}
64
65pub struct ProjectStore {
66 db: Arc<Db>,
67 projects: Vec<WeakModelHandle<Project>>,
68}
69
70pub struct Project {
71 worktrees: Vec<WorktreeHandle>,
72 active_entry: Option<ProjectEntryId>,
73 languages: Arc<LanguageRegistry>,
74 language_servers:
75 HashMap<(WorktreeId, LanguageServerName), (Arc<dyn LspAdapter>, Arc<LanguageServer>)>,
76 started_language_servers:
77 HashMap<(WorktreeId, LanguageServerName), Task<Option<Arc<LanguageServer>>>>,
78 language_server_statuses: BTreeMap<usize, LanguageServerStatus>,
79 language_server_settings: Arc<Mutex<serde_json::Value>>,
80 last_workspace_edits_by_language_server: HashMap<usize, ProjectTransaction>,
81 next_language_server_id: usize,
82 client: Arc<client::Client>,
83 next_entry_id: Arc<AtomicUsize>,
84 next_diagnostic_group_id: usize,
85 user_store: ModelHandle<UserStore>,
86 project_store: ModelHandle<ProjectStore>,
87 fs: Arc<dyn Fs>,
88 client_state: ProjectClientState,
89 collaborators: HashMap<PeerId, Collaborator>,
90 subscriptions: Vec<client::Subscription>,
91 opened_buffer: (Rc<RefCell<watch::Sender<()>>>, watch::Receiver<()>),
92 shared_buffers: HashMap<PeerId, HashSet<u64>>,
93 loading_buffers: HashMap<
94 ProjectPath,
95 postage::watch::Receiver<Option<Result<ModelHandle<Buffer>, Arc<anyhow::Error>>>>,
96 >,
97 loading_local_worktrees:
98 HashMap<Arc<Path>, Shared<Task<Result<ModelHandle<Worktree>, Arc<anyhow::Error>>>>>,
99 opened_buffers: HashMap<u64, OpenBuffer>,
100 buffer_snapshots: HashMap<u64, Vec<(i32, TextBufferSnapshot)>>,
101 nonce: u128,
102 initialized_persistent_state: bool,
103}
104
105#[derive(Error, Debug)]
106pub enum JoinProjectError {
107 #[error("host declined join request")]
108 HostDeclined,
109 #[error("host closed the project")]
110 HostClosedProject,
111 #[error("host went offline")]
112 HostWentOffline,
113 #[error("{0}")]
114 Other(#[from] anyhow::Error),
115}
116
117enum OpenBuffer {
118 Strong(ModelHandle<Buffer>),
119 Weak(WeakModelHandle<Buffer>),
120 Loading(Vec<Operation>),
121}
122
123enum WorktreeHandle {
124 Strong(ModelHandle<Worktree>),
125 Weak(WeakModelHandle<Worktree>),
126}
127
128enum ProjectClientState {
129 Local {
130 is_shared: bool,
131 remote_id_tx: watch::Sender<Option<u64>>,
132 remote_id_rx: watch::Receiver<Option<u64>>,
133 online_tx: watch::Sender<bool>,
134 online_rx: watch::Receiver<bool>,
135 _maintain_remote_id_task: Task<Option<()>>,
136 },
137 Remote {
138 sharing_has_stopped: bool,
139 remote_id: u64,
140 replica_id: ReplicaId,
141 _detect_unshare_task: Task<Option<()>>,
142 },
143}
144
145#[derive(Clone, Debug)]
146pub struct Collaborator {
147 pub user: Arc<User>,
148 pub peer_id: PeerId,
149 pub replica_id: ReplicaId,
150}
151
152#[derive(Clone, Debug, PartialEq, Eq)]
153pub enum Event {
154 ActiveEntryChanged(Option<ProjectEntryId>),
155 WorktreeAdded,
156 WorktreeRemoved(WorktreeId),
157 DiskBasedDiagnosticsStarted,
158 DiskBasedDiagnosticsUpdated,
159 DiskBasedDiagnosticsFinished,
160 DiagnosticsUpdated(ProjectPath),
161 RemoteIdChanged(Option<u64>),
162 CollaboratorLeft(PeerId),
163 ContactRequestedJoin(Arc<User>),
164 ContactCancelledJoinRequest(Arc<User>),
165}
166
167#[derive(Serialize)]
168pub struct LanguageServerStatus {
169 pub name: String,
170 pub pending_work: BTreeMap<String, LanguageServerProgress>,
171 pub pending_diagnostic_updates: isize,
172}
173
174#[derive(Clone, Debug, Serialize)]
175pub struct LanguageServerProgress {
176 pub message: Option<String>,
177 pub percentage: Option<usize>,
178 #[serde(skip_serializing)]
179 pub last_update_at: Instant,
180}
181
182#[derive(Clone, Debug, Eq, PartialEq, Hash, PartialOrd, Ord)]
183pub struct ProjectPath {
184 pub worktree_id: WorktreeId,
185 pub path: Arc<Path>,
186}
187
188#[derive(Copy, Clone, Debug, Default, PartialEq, Serialize)]
189pub struct DiagnosticSummary {
190 pub error_count: usize,
191 pub warning_count: usize,
192}
193
194#[derive(Debug)]
195pub struct Location {
196 pub buffer: ModelHandle<Buffer>,
197 pub range: Range<language::Anchor>,
198}
199
200#[derive(Debug)]
201pub struct DocumentHighlight {
202 pub range: Range<language::Anchor>,
203 pub kind: DocumentHighlightKind,
204}
205
206#[derive(Clone, Debug)]
207pub struct Symbol {
208 pub source_worktree_id: WorktreeId,
209 pub worktree_id: WorktreeId,
210 pub language_server_name: LanguageServerName,
211 pub path: PathBuf,
212 pub label: CodeLabel,
213 pub name: String,
214 pub kind: lsp::SymbolKind,
215 pub range: Range<PointUtf16>,
216 pub signature: [u8; 32],
217}
218
219#[derive(Debug)]
220pub struct HoverContents {
221 pub text: String,
222 pub runs: Vec<(Range<usize>, HighlightId)>,
223}
224
225#[derive(Debug)]
226pub struct Hover {
227 pub contents: Vec<HoverContents>,
228 pub range: Option<Range<language::Anchor>>,
229}
230
231#[derive(Default)]
232pub struct ProjectTransaction(pub HashMap<ModelHandle<Buffer>, language::Transaction>);
233
234impl DiagnosticSummary {
235 fn new<'a, T: 'a>(diagnostics: impl IntoIterator<Item = &'a DiagnosticEntry<T>>) -> Self {
236 let mut this = Self {
237 error_count: 0,
238 warning_count: 0,
239 };
240
241 for entry in diagnostics {
242 if entry.diagnostic.is_primary {
243 match entry.diagnostic.severity {
244 DiagnosticSeverity::ERROR => this.error_count += 1,
245 DiagnosticSeverity::WARNING => this.warning_count += 1,
246 _ => {}
247 }
248 }
249 }
250
251 this
252 }
253
254 pub fn is_empty(&self) -> bool {
255 self.error_count == 0 && self.warning_count == 0
256 }
257
258 pub fn to_proto(&self, path: &Path) -> proto::DiagnosticSummary {
259 proto::DiagnosticSummary {
260 path: path.to_string_lossy().to_string(),
261 error_count: self.error_count as u32,
262 warning_count: self.warning_count as u32,
263 }
264 }
265}
266
267#[derive(Clone, Copy, Debug, Default, Hash, PartialEq, Eq, PartialOrd, Ord)]
268pub struct ProjectEntryId(usize);
269
270impl ProjectEntryId {
271 pub const MAX: Self = Self(usize::MAX);
272
273 pub fn new(counter: &AtomicUsize) -> Self {
274 Self(counter.fetch_add(1, SeqCst))
275 }
276
277 pub fn from_proto(id: u64) -> Self {
278 Self(id as usize)
279 }
280
281 pub fn to_proto(&self) -> u64 {
282 self.0 as u64
283 }
284
285 pub fn to_usize(&self) -> usize {
286 self.0
287 }
288}
289
290impl Project {
291 pub fn init(client: &Arc<Client>) {
292 client.add_model_message_handler(Self::handle_request_join_project);
293 client.add_model_message_handler(Self::handle_add_collaborator);
294 client.add_model_message_handler(Self::handle_buffer_reloaded);
295 client.add_model_message_handler(Self::handle_buffer_saved);
296 client.add_model_message_handler(Self::handle_start_language_server);
297 client.add_model_message_handler(Self::handle_update_language_server);
298 client.add_model_message_handler(Self::handle_remove_collaborator);
299 client.add_model_message_handler(Self::handle_join_project_request_cancelled);
300 client.add_model_message_handler(Self::handle_update_project);
301 client.add_model_message_handler(Self::handle_unregister_project);
302 client.add_model_message_handler(Self::handle_project_unshared);
303 client.add_model_message_handler(Self::handle_update_buffer_file);
304 client.add_model_message_handler(Self::handle_update_buffer);
305 client.add_model_message_handler(Self::handle_update_diagnostic_summary);
306 client.add_model_message_handler(Self::handle_update_worktree);
307 client.add_model_request_handler(Self::handle_create_project_entry);
308 client.add_model_request_handler(Self::handle_rename_project_entry);
309 client.add_model_request_handler(Self::handle_copy_project_entry);
310 client.add_model_request_handler(Self::handle_delete_project_entry);
311 client.add_model_request_handler(Self::handle_apply_additional_edits_for_completion);
312 client.add_model_request_handler(Self::handle_apply_code_action);
313 client.add_model_request_handler(Self::handle_reload_buffers);
314 client.add_model_request_handler(Self::handle_format_buffers);
315 client.add_model_request_handler(Self::handle_get_code_actions);
316 client.add_model_request_handler(Self::handle_get_completions);
317 client.add_model_request_handler(Self::handle_lsp_command::<GetHover>);
318 client.add_model_request_handler(Self::handle_lsp_command::<GetDefinition>);
319 client.add_model_request_handler(Self::handle_lsp_command::<GetDocumentHighlights>);
320 client.add_model_request_handler(Self::handle_lsp_command::<GetReferences>);
321 client.add_model_request_handler(Self::handle_lsp_command::<PrepareRename>);
322 client.add_model_request_handler(Self::handle_lsp_command::<PerformRename>);
323 client.add_model_request_handler(Self::handle_search_project);
324 client.add_model_request_handler(Self::handle_get_project_symbols);
325 client.add_model_request_handler(Self::handle_open_buffer_for_symbol);
326 client.add_model_request_handler(Self::handle_open_buffer_by_id);
327 client.add_model_request_handler(Self::handle_open_buffer_by_path);
328 client.add_model_request_handler(Self::handle_save_buffer);
329 }
330
331 pub fn local(
332 online: bool,
333 client: Arc<Client>,
334 user_store: ModelHandle<UserStore>,
335 project_store: ModelHandle<ProjectStore>,
336 languages: Arc<LanguageRegistry>,
337 fs: Arc<dyn Fs>,
338 cx: &mut MutableAppContext,
339 ) -> ModelHandle<Self> {
340 cx.add_model(|cx: &mut ModelContext<Self>| {
341 let (online_tx, online_rx) = watch::channel_with(online);
342 let (remote_id_tx, remote_id_rx) = watch::channel();
343 let _maintain_remote_id_task = cx.spawn_weak({
344 let status_rx = client.clone().status();
345 let online_rx = online_rx.clone();
346 move |this, mut cx| async move {
347 let mut stream = Stream::map(status_rx.clone(), drop)
348 .merge(Stream::map(online_rx.clone(), drop));
349 while stream.recv().await.is_some() {
350 let this = this.upgrade(&cx)?;
351 if status_rx.borrow().is_connected() && *online_rx.borrow() {
352 this.update(&mut cx, |this, cx| this.register(cx))
353 .await
354 .log_err()?;
355 } else {
356 this.update(&mut cx, |this, cx| this.unregister(cx))
357 .await
358 .log_err();
359 }
360 }
361 None
362 }
363 });
364
365 let handle = cx.weak_handle();
366 project_store.update(cx, |store, cx| store.add_project(handle, cx));
367
368 let (opened_buffer_tx, opened_buffer_rx) = watch::channel();
369 Self {
370 worktrees: Default::default(),
371 collaborators: Default::default(),
372 opened_buffers: Default::default(),
373 shared_buffers: Default::default(),
374 loading_buffers: Default::default(),
375 loading_local_worktrees: Default::default(),
376 buffer_snapshots: Default::default(),
377 client_state: ProjectClientState::Local {
378 is_shared: false,
379 remote_id_tx,
380 remote_id_rx,
381 online_tx,
382 online_rx,
383 _maintain_remote_id_task,
384 },
385 opened_buffer: (Rc::new(RefCell::new(opened_buffer_tx)), opened_buffer_rx),
386 subscriptions: Vec::new(),
387 active_entry: None,
388 languages,
389 client,
390 user_store,
391 project_store,
392 fs,
393 next_entry_id: Default::default(),
394 next_diagnostic_group_id: Default::default(),
395 language_servers: Default::default(),
396 started_language_servers: Default::default(),
397 language_server_statuses: Default::default(),
398 last_workspace_edits_by_language_server: Default::default(),
399 language_server_settings: Default::default(),
400 next_language_server_id: 0,
401 nonce: StdRng::from_entropy().gen(),
402 initialized_persistent_state: false,
403 }
404 })
405 }
406
407 pub async fn remote(
408 remote_id: u64,
409 client: Arc<Client>,
410 user_store: ModelHandle<UserStore>,
411 project_store: ModelHandle<ProjectStore>,
412 languages: Arc<LanguageRegistry>,
413 fs: Arc<dyn Fs>,
414 mut cx: AsyncAppContext,
415 ) -> Result<ModelHandle<Self>, JoinProjectError> {
416 client.authenticate_and_connect(true, &cx).await?;
417
418 let response = client
419 .request(proto::JoinProject {
420 project_id: remote_id,
421 })
422 .await?;
423
424 let response = match response.variant.ok_or_else(|| anyhow!("missing variant"))? {
425 proto::join_project_response::Variant::Accept(response) => response,
426 proto::join_project_response::Variant::Decline(decline) => {
427 match proto::join_project_response::decline::Reason::from_i32(decline.reason) {
428 Some(proto::join_project_response::decline::Reason::Declined) => {
429 Err(JoinProjectError::HostDeclined)?
430 }
431 Some(proto::join_project_response::decline::Reason::Closed) => {
432 Err(JoinProjectError::HostClosedProject)?
433 }
434 Some(proto::join_project_response::decline::Reason::WentOffline) => {
435 Err(JoinProjectError::HostWentOffline)?
436 }
437 None => Err(anyhow!("missing decline reason"))?,
438 }
439 }
440 };
441
442 let replica_id = response.replica_id as ReplicaId;
443
444 let mut worktrees = Vec::new();
445 for worktree in response.worktrees {
446 let (worktree, load_task) = cx
447 .update(|cx| Worktree::remote(remote_id, replica_id, worktree, client.clone(), cx));
448 worktrees.push(worktree);
449 load_task.detach();
450 }
451
452 let (opened_buffer_tx, opened_buffer_rx) = watch::channel();
453 let this = cx.add_model(|cx: &mut ModelContext<Self>| {
454 let handle = cx.weak_handle();
455 project_store.update(cx, |store, cx| store.add_project(handle, cx));
456
457 let mut this = Self {
458 worktrees: Vec::new(),
459 loading_buffers: Default::default(),
460 opened_buffer: (Rc::new(RefCell::new(opened_buffer_tx)), opened_buffer_rx),
461 shared_buffers: Default::default(),
462 loading_local_worktrees: Default::default(),
463 active_entry: None,
464 collaborators: Default::default(),
465 languages,
466 user_store: user_store.clone(),
467 project_store,
468 fs,
469 next_entry_id: Default::default(),
470 next_diagnostic_group_id: Default::default(),
471 subscriptions: vec![client.add_model_for_remote_entity(remote_id, cx)],
472 client: client.clone(),
473 client_state: ProjectClientState::Remote {
474 sharing_has_stopped: false,
475 remote_id,
476 replica_id,
477 _detect_unshare_task: cx.spawn_weak(move |this, mut cx| {
478 async move {
479 let mut status = client.status();
480 let is_connected =
481 status.next().await.map_or(false, |s| s.is_connected());
482 // Even if we're initially connected, any future change of the status means we momentarily disconnected.
483 if !is_connected || status.next().await.is_some() {
484 if let Some(this) = this.upgrade(&cx) {
485 this.update(&mut cx, |this, cx| this.removed_from_project(cx))
486 }
487 }
488 Ok(())
489 }
490 .log_err()
491 }),
492 },
493 language_servers: Default::default(),
494 started_language_servers: Default::default(),
495 language_server_settings: Default::default(),
496 language_server_statuses: response
497 .language_servers
498 .into_iter()
499 .map(|server| {
500 (
501 server.id as usize,
502 LanguageServerStatus {
503 name: server.name,
504 pending_work: Default::default(),
505 pending_diagnostic_updates: 0,
506 },
507 )
508 })
509 .collect(),
510 last_workspace_edits_by_language_server: Default::default(),
511 next_language_server_id: 0,
512 opened_buffers: Default::default(),
513 buffer_snapshots: Default::default(),
514 nonce: StdRng::from_entropy().gen(),
515 initialized_persistent_state: false,
516 };
517 for worktree in worktrees {
518 this.add_worktree(&worktree, cx);
519 }
520 this
521 });
522
523 let user_ids = response
524 .collaborators
525 .iter()
526 .map(|peer| peer.user_id)
527 .collect();
528 user_store
529 .update(&mut cx, |user_store, cx| user_store.get_users(user_ids, cx))
530 .await?;
531 let mut collaborators = HashMap::default();
532 for message in response.collaborators {
533 let collaborator = Collaborator::from_proto(message, &user_store, &mut cx).await?;
534 collaborators.insert(collaborator.peer_id, collaborator);
535 }
536
537 this.update(&mut cx, |this, _| {
538 this.collaborators = collaborators;
539 });
540
541 Ok(this)
542 }
543
544 #[cfg(any(test, feature = "test-support"))]
545 pub async fn test(
546 fs: Arc<dyn Fs>,
547 root_paths: impl IntoIterator<Item = &Path>,
548 cx: &mut gpui::TestAppContext,
549 ) -> ModelHandle<Project> {
550 let languages = Arc::new(LanguageRegistry::test());
551 let http_client = client::test::FakeHttpClient::with_404_response();
552 let client = client::Client::new(http_client.clone());
553 let user_store = cx.add_model(|cx| UserStore::new(client.clone(), http_client, cx));
554 let project_store = cx.add_model(|_| ProjectStore::new(Db::open_fake()));
555 let project = cx.update(|cx| {
556 Project::local(true, client, user_store, project_store, languages, fs, cx)
557 });
558 for path in root_paths {
559 let (tree, _) = project
560 .update(cx, |project, cx| {
561 project.find_or_create_local_worktree(path, true, cx)
562 })
563 .await
564 .unwrap();
565 tree.read_with(cx, |tree, _| tree.as_local().unwrap().scan_complete())
566 .await;
567 }
568 project
569 }
570
571 pub fn restore_state(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
572 if self.is_remote() {
573 return Task::ready(Ok(()));
574 }
575
576 let db = self.project_store.read(cx).db.clone();
577 let keys = self.db_keys_for_online_state(cx);
578 let online_by_default = cx.global::<Settings>().projects_online_by_default;
579 let read_online = cx.background().spawn(async move {
580 let values = db.read(keys)?;
581 anyhow::Ok(
582 values
583 .into_iter()
584 .all(|e| e.map_or(online_by_default, |e| e == [true as u8])),
585 )
586 });
587 cx.spawn(|this, mut cx| async move {
588 let online = read_online.await.log_err().unwrap_or(false);
589 this.update(&mut cx, |this, cx| {
590 this.initialized_persistent_state = true;
591 if let ProjectClientState::Local { online_tx, .. } = &mut this.client_state {
592 let mut online_tx = online_tx.borrow_mut();
593 if *online_tx != online {
594 *online_tx = online;
595 drop(online_tx);
596 this.metadata_changed(false, cx);
597 }
598 }
599 });
600 Ok(())
601 })
602 }
603
604 fn persist_state(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
605 if self.is_remote() || !self.initialized_persistent_state {
606 return Task::ready(Ok(()));
607 }
608
609 let db = self.project_store.read(cx).db.clone();
610 let keys = self.db_keys_for_online_state(cx);
611 let is_online = self.is_online();
612 cx.background().spawn(async move {
613 let value = &[is_online as u8];
614 db.write(keys.into_iter().map(|key| (key, value)))
615 })
616 }
617
618 pub fn buffer_for_id(&self, remote_id: u64, cx: &AppContext) -> Option<ModelHandle<Buffer>> {
619 self.opened_buffers
620 .get(&remote_id)
621 .and_then(|buffer| buffer.upgrade(cx))
622 }
623
624 pub fn languages(&self) -> &Arc<LanguageRegistry> {
625 &self.languages
626 }
627
628 pub fn client(&self) -> Arc<Client> {
629 self.client.clone()
630 }
631
632 pub fn user_store(&self) -> ModelHandle<UserStore> {
633 self.user_store.clone()
634 }
635
636 pub fn project_store(&self) -> ModelHandle<ProjectStore> {
637 self.project_store.clone()
638 }
639
640 #[cfg(any(test, feature = "test-support"))]
641 pub fn check_invariants(&self, cx: &AppContext) {
642 if self.is_local() {
643 let mut worktree_root_paths = HashMap::default();
644 for worktree in self.worktrees(cx) {
645 let worktree = worktree.read(cx);
646 let abs_path = worktree.as_local().unwrap().abs_path().clone();
647 let prev_worktree_id = worktree_root_paths.insert(abs_path.clone(), worktree.id());
648 assert_eq!(
649 prev_worktree_id,
650 None,
651 "abs path {:?} for worktree {:?} is not unique ({:?} was already registered with the same path)",
652 abs_path,
653 worktree.id(),
654 prev_worktree_id
655 )
656 }
657 } else {
658 let replica_id = self.replica_id();
659 for buffer in self.opened_buffers.values() {
660 if let Some(buffer) = buffer.upgrade(cx) {
661 let buffer = buffer.read(cx);
662 assert_eq!(
663 buffer.deferred_ops_len(),
664 0,
665 "replica {}, buffer {} has deferred operations",
666 replica_id,
667 buffer.remote_id()
668 );
669 }
670 }
671 }
672 }
673
674 #[cfg(any(test, feature = "test-support"))]
675 pub fn has_open_buffer(&self, path: impl Into<ProjectPath>, cx: &AppContext) -> bool {
676 let path = path.into();
677 if let Some(worktree) = self.worktree_for_id(path.worktree_id, cx) {
678 self.opened_buffers.iter().any(|(_, buffer)| {
679 if let Some(buffer) = buffer.upgrade(cx) {
680 if let Some(file) = File::from_dyn(buffer.read(cx).file()) {
681 if file.worktree == worktree && file.path() == &path.path {
682 return true;
683 }
684 }
685 }
686 false
687 })
688 } else {
689 false
690 }
691 }
692
693 pub fn fs(&self) -> &Arc<dyn Fs> {
694 &self.fs
695 }
696
697 pub fn set_online(&mut self, online: bool, cx: &mut ModelContext<Self>) {
698 if let ProjectClientState::Local { online_tx, .. } = &mut self.client_state {
699 let mut online_tx = online_tx.borrow_mut();
700 if *online_tx != online {
701 *online_tx = online;
702 drop(online_tx);
703 self.metadata_changed(true, cx);
704 }
705 }
706 }
707
708 pub fn is_online(&self) -> bool {
709 match &self.client_state {
710 ProjectClientState::Local { online_rx, .. } => *online_rx.borrow(),
711 ProjectClientState::Remote { .. } => true,
712 }
713 }
714
715 fn unregister(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
716 self.unshared(cx);
717 if let ProjectClientState::Local { remote_id_rx, .. } = &mut self.client_state {
718 if let Some(remote_id) = *remote_id_rx.borrow() {
719 let request = self.client.request(proto::UnregisterProject {
720 project_id: remote_id,
721 });
722 return cx.spawn(|this, mut cx| async move {
723 let response = request.await;
724
725 // Unregistering the project causes the server to send out a
726 // contact update removing this project from the host's list
727 // of online projects. Wait until this contact update has been
728 // processed before clearing out this project's remote id, so
729 // that there is no moment where this project appears in the
730 // contact metadata and *also* has no remote id.
731 this.update(&mut cx, |this, cx| {
732 this.user_store()
733 .update(cx, |store, _| store.contact_updates_done())
734 })
735 .await;
736
737 this.update(&mut cx, |this, cx| {
738 if let ProjectClientState::Local { remote_id_tx, .. } =
739 &mut this.client_state
740 {
741 *remote_id_tx.borrow_mut() = None;
742 }
743 this.subscriptions.clear();
744 this.metadata_changed(false, cx);
745 });
746 response.map(drop)
747 });
748 }
749 }
750 Task::ready(Ok(()))
751 }
752
753 fn register(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
754 if let ProjectClientState::Local { remote_id_rx, .. } = &self.client_state {
755 if remote_id_rx.borrow().is_some() {
756 return Task::ready(Ok(()));
757 }
758 }
759
760 let response = self.client.request(proto::RegisterProject {});
761 cx.spawn(|this, mut cx| async move {
762 let remote_id = response.await?.project_id;
763 this.update(&mut cx, |this, cx| {
764 if let ProjectClientState::Local { remote_id_tx, .. } = &mut this.client_state {
765 *remote_id_tx.borrow_mut() = Some(remote_id);
766 }
767
768 this.metadata_changed(false, cx);
769 cx.emit(Event::RemoteIdChanged(Some(remote_id)));
770 this.subscriptions
771 .push(this.client.add_model_for_remote_entity(remote_id, cx));
772 Ok(())
773 })
774 })
775 }
776
777 pub fn remote_id(&self) -> Option<u64> {
778 match &self.client_state {
779 ProjectClientState::Local { remote_id_rx, .. } => *remote_id_rx.borrow(),
780 ProjectClientState::Remote { remote_id, .. } => Some(*remote_id),
781 }
782 }
783
784 pub fn next_remote_id(&self) -> impl Future<Output = u64> {
785 let mut id = None;
786 let mut watch = None;
787 match &self.client_state {
788 ProjectClientState::Local { remote_id_rx, .. } => watch = Some(remote_id_rx.clone()),
789 ProjectClientState::Remote { remote_id, .. } => id = Some(*remote_id),
790 }
791
792 async move {
793 if let Some(id) = id {
794 return id;
795 }
796 let mut watch = watch.unwrap();
797 loop {
798 let id = *watch.borrow();
799 if let Some(id) = id {
800 return id;
801 }
802 watch.next().await;
803 }
804 }
805 }
806
807 pub fn shared_remote_id(&self) -> Option<u64> {
808 match &self.client_state {
809 ProjectClientState::Local {
810 remote_id_rx,
811 is_shared,
812 ..
813 } => {
814 if *is_shared {
815 *remote_id_rx.borrow()
816 } else {
817 None
818 }
819 }
820 ProjectClientState::Remote { remote_id, .. } => Some(*remote_id),
821 }
822 }
823
824 pub fn replica_id(&self) -> ReplicaId {
825 match &self.client_state {
826 ProjectClientState::Local { .. } => 0,
827 ProjectClientState::Remote { replica_id, .. } => *replica_id,
828 }
829 }
830
831 fn metadata_changed(&mut self, persist: bool, cx: &mut ModelContext<Self>) {
832 if let ProjectClientState::Local {
833 remote_id_rx,
834 online_rx,
835 ..
836 } = &self.client_state
837 {
838 if let (Some(project_id), true) = (*remote_id_rx.borrow(), *online_rx.borrow()) {
839 self.client
840 .send(proto::UpdateProject {
841 project_id,
842 worktrees: self
843 .worktrees
844 .iter()
845 .filter_map(|worktree| {
846 worktree.upgrade(&cx).map(|worktree| {
847 worktree.read(cx).as_local().unwrap().metadata_proto()
848 })
849 })
850 .collect(),
851 })
852 .log_err();
853 }
854
855 self.project_store.update(cx, |_, cx| cx.notify());
856 if persist {
857 self.persist_state(cx).detach_and_log_err(cx);
858 }
859 cx.notify();
860 }
861 }
862
863 pub fn collaborators(&self) -> &HashMap<PeerId, Collaborator> {
864 &self.collaborators
865 }
866
867 pub fn worktrees<'a>(
868 &'a self,
869 cx: &'a AppContext,
870 ) -> impl 'a + Iterator<Item = ModelHandle<Worktree>> {
871 self.worktrees
872 .iter()
873 .filter_map(move |worktree| worktree.upgrade(cx))
874 }
875
876 pub fn visible_worktrees<'a>(
877 &'a self,
878 cx: &'a AppContext,
879 ) -> impl 'a + Iterator<Item = ModelHandle<Worktree>> {
880 self.worktrees.iter().filter_map(|worktree| {
881 worktree.upgrade(cx).and_then(|worktree| {
882 if worktree.read(cx).is_visible() {
883 Some(worktree)
884 } else {
885 None
886 }
887 })
888 })
889 }
890
891 pub fn worktree_root_names<'a>(&'a self, cx: &'a AppContext) -> impl Iterator<Item = &'a str> {
892 self.visible_worktrees(cx)
893 .map(|tree| tree.read(cx).root_name())
894 }
895
896 fn db_keys_for_online_state(&self, cx: &AppContext) -> Vec<String> {
897 self.worktrees
898 .iter()
899 .filter_map(|worktree| {
900 let worktree = worktree.upgrade(&cx)?.read(cx);
901 if worktree.is_visible() {
902 Some(format!(
903 "project-path-online:{}",
904 worktree.as_local().unwrap().abs_path().to_string_lossy()
905 ))
906 } else {
907 None
908 }
909 })
910 .collect::<Vec<_>>()
911 }
912
913 pub fn worktree_for_id(
914 &self,
915 id: WorktreeId,
916 cx: &AppContext,
917 ) -> Option<ModelHandle<Worktree>> {
918 self.worktrees(cx)
919 .find(|worktree| worktree.read(cx).id() == id)
920 }
921
922 pub fn worktree_for_entry(
923 &self,
924 entry_id: ProjectEntryId,
925 cx: &AppContext,
926 ) -> Option<ModelHandle<Worktree>> {
927 self.worktrees(cx)
928 .find(|worktree| worktree.read(cx).contains_entry(entry_id))
929 }
930
931 pub fn worktree_id_for_entry(
932 &self,
933 entry_id: ProjectEntryId,
934 cx: &AppContext,
935 ) -> Option<WorktreeId> {
936 self.worktree_for_entry(entry_id, cx)
937 .map(|worktree| worktree.read(cx).id())
938 }
939
940 pub fn contains_paths(&self, paths: &[PathBuf], cx: &AppContext) -> bool {
941 paths.iter().all(|path| self.contains_path(&path, cx))
942 }
943
944 pub fn contains_path(&self, path: &Path, cx: &AppContext) -> bool {
945 for worktree in self.worktrees(cx) {
946 let worktree = worktree.read(cx).as_local();
947 if worktree.map_or(false, |w| w.contains_abs_path(path)) {
948 return true;
949 }
950 }
951 false
952 }
953
954 pub fn create_entry(
955 &mut self,
956 project_path: impl Into<ProjectPath>,
957 is_directory: bool,
958 cx: &mut ModelContext<Self>,
959 ) -> Option<Task<Result<Entry>>> {
960 let project_path = project_path.into();
961 let worktree = self.worktree_for_id(project_path.worktree_id, cx)?;
962 if self.is_local() {
963 Some(worktree.update(cx, |worktree, cx| {
964 worktree
965 .as_local_mut()
966 .unwrap()
967 .create_entry(project_path.path, is_directory, cx)
968 }))
969 } else {
970 let client = self.client.clone();
971 let project_id = self.remote_id().unwrap();
972 Some(cx.spawn_weak(|_, mut cx| async move {
973 let response = client
974 .request(proto::CreateProjectEntry {
975 worktree_id: project_path.worktree_id.to_proto(),
976 project_id,
977 path: project_path.path.as_os_str().as_bytes().to_vec(),
978 is_directory,
979 })
980 .await?;
981 let entry = response
982 .entry
983 .ok_or_else(|| anyhow!("missing entry in response"))?;
984 worktree
985 .update(&mut cx, |worktree, cx| {
986 worktree.as_remote().unwrap().insert_entry(
987 entry,
988 response.worktree_scan_id as usize,
989 cx,
990 )
991 })
992 .await
993 }))
994 }
995 }
996
997 pub fn copy_entry(
998 &mut self,
999 entry_id: ProjectEntryId,
1000 new_path: impl Into<Arc<Path>>,
1001 cx: &mut ModelContext<Self>,
1002 ) -> Option<Task<Result<Entry>>> {
1003 let worktree = self.worktree_for_entry(entry_id, cx)?;
1004 let new_path = new_path.into();
1005 if self.is_local() {
1006 worktree.update(cx, |worktree, cx| {
1007 worktree
1008 .as_local_mut()
1009 .unwrap()
1010 .copy_entry(entry_id, new_path, cx)
1011 })
1012 } else {
1013 let client = self.client.clone();
1014 let project_id = self.remote_id().unwrap();
1015
1016 Some(cx.spawn_weak(|_, mut cx| async move {
1017 let response = client
1018 .request(proto::CopyProjectEntry {
1019 project_id,
1020 entry_id: entry_id.to_proto(),
1021 new_path: new_path.as_os_str().as_bytes().to_vec(),
1022 })
1023 .await?;
1024 let entry = response
1025 .entry
1026 .ok_or_else(|| anyhow!("missing entry in response"))?;
1027 worktree
1028 .update(&mut cx, |worktree, cx| {
1029 worktree.as_remote().unwrap().insert_entry(
1030 entry,
1031 response.worktree_scan_id as usize,
1032 cx,
1033 )
1034 })
1035 .await
1036 }))
1037 }
1038 }
1039
1040 pub fn rename_entry(
1041 &mut self,
1042 entry_id: ProjectEntryId,
1043 new_path: impl Into<Arc<Path>>,
1044 cx: &mut ModelContext<Self>,
1045 ) -> Option<Task<Result<Entry>>> {
1046 let worktree = self.worktree_for_entry(entry_id, cx)?;
1047 let new_path = new_path.into();
1048 if self.is_local() {
1049 worktree.update(cx, |worktree, cx| {
1050 worktree
1051 .as_local_mut()
1052 .unwrap()
1053 .rename_entry(entry_id, new_path, cx)
1054 })
1055 } else {
1056 let client = self.client.clone();
1057 let project_id = self.remote_id().unwrap();
1058
1059 Some(cx.spawn_weak(|_, mut cx| async move {
1060 let response = client
1061 .request(proto::RenameProjectEntry {
1062 project_id,
1063 entry_id: entry_id.to_proto(),
1064 new_path: new_path.as_os_str().as_bytes().to_vec(),
1065 })
1066 .await?;
1067 let entry = response
1068 .entry
1069 .ok_or_else(|| anyhow!("missing entry in response"))?;
1070 worktree
1071 .update(&mut cx, |worktree, cx| {
1072 worktree.as_remote().unwrap().insert_entry(
1073 entry,
1074 response.worktree_scan_id as usize,
1075 cx,
1076 )
1077 })
1078 .await
1079 }))
1080 }
1081 }
1082
1083 pub fn delete_entry(
1084 &mut self,
1085 entry_id: ProjectEntryId,
1086 cx: &mut ModelContext<Self>,
1087 ) -> Option<Task<Result<()>>> {
1088 let worktree = self.worktree_for_entry(entry_id, cx)?;
1089 if self.is_local() {
1090 worktree.update(cx, |worktree, cx| {
1091 worktree.as_local_mut().unwrap().delete_entry(entry_id, cx)
1092 })
1093 } else {
1094 let client = self.client.clone();
1095 let project_id = self.remote_id().unwrap();
1096 Some(cx.spawn_weak(|_, mut cx| async move {
1097 let response = client
1098 .request(proto::DeleteProjectEntry {
1099 project_id,
1100 entry_id: entry_id.to_proto(),
1101 })
1102 .await?;
1103 worktree
1104 .update(&mut cx, move |worktree, cx| {
1105 worktree.as_remote().unwrap().delete_entry(
1106 entry_id,
1107 response.worktree_scan_id as usize,
1108 cx,
1109 )
1110 })
1111 .await
1112 }))
1113 }
1114 }
1115
1116 fn share(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
1117 let project_id;
1118 if let ProjectClientState::Local {
1119 remote_id_rx,
1120 is_shared,
1121 ..
1122 } = &mut self.client_state
1123 {
1124 if *is_shared {
1125 return Task::ready(Ok(()));
1126 }
1127 *is_shared = true;
1128 if let Some(id) = *remote_id_rx.borrow() {
1129 project_id = id;
1130 } else {
1131 return Task::ready(Err(anyhow!("project hasn't been registered")));
1132 }
1133 } else {
1134 return Task::ready(Err(anyhow!("can't share a remote project")));
1135 };
1136
1137 for open_buffer in self.opened_buffers.values_mut() {
1138 match open_buffer {
1139 OpenBuffer::Strong(_) => {}
1140 OpenBuffer::Weak(buffer) => {
1141 if let Some(buffer) = buffer.upgrade(cx) {
1142 *open_buffer = OpenBuffer::Strong(buffer);
1143 }
1144 }
1145 OpenBuffer::Loading(_) => unreachable!(),
1146 }
1147 }
1148
1149 for worktree_handle in self.worktrees.iter_mut() {
1150 match worktree_handle {
1151 WorktreeHandle::Strong(_) => {}
1152 WorktreeHandle::Weak(worktree) => {
1153 if let Some(worktree) = worktree.upgrade(cx) {
1154 *worktree_handle = WorktreeHandle::Strong(worktree);
1155 }
1156 }
1157 }
1158 }
1159
1160 let mut tasks = Vec::new();
1161 for worktree in self.worktrees(cx).collect::<Vec<_>>() {
1162 worktree.update(cx, |worktree, cx| {
1163 let worktree = worktree.as_local_mut().unwrap();
1164 tasks.push(worktree.share(project_id, cx));
1165 });
1166 }
1167
1168 cx.spawn(|this, mut cx| async move {
1169 for task in tasks {
1170 task.await?;
1171 }
1172 this.update(&mut cx, |_, cx| cx.notify());
1173 Ok(())
1174 })
1175 }
1176
1177 fn unshared(&mut self, cx: &mut ModelContext<Self>) {
1178 if let ProjectClientState::Local { is_shared, .. } = &mut self.client_state {
1179 if !*is_shared {
1180 return;
1181 }
1182
1183 *is_shared = false;
1184 self.collaborators.clear();
1185 self.shared_buffers.clear();
1186 for worktree_handle in self.worktrees.iter_mut() {
1187 if let WorktreeHandle::Strong(worktree) = worktree_handle {
1188 let is_visible = worktree.update(cx, |worktree, _| {
1189 worktree.as_local_mut().unwrap().unshare();
1190 worktree.is_visible()
1191 });
1192 if !is_visible {
1193 *worktree_handle = WorktreeHandle::Weak(worktree.downgrade());
1194 }
1195 }
1196 }
1197
1198 for open_buffer in self.opened_buffers.values_mut() {
1199 match open_buffer {
1200 OpenBuffer::Strong(buffer) => {
1201 *open_buffer = OpenBuffer::Weak(buffer.downgrade());
1202 }
1203 _ => {}
1204 }
1205 }
1206
1207 cx.notify();
1208 } else {
1209 log::error!("attempted to unshare a remote project");
1210 }
1211 }
1212
1213 pub fn respond_to_join_request(
1214 &mut self,
1215 requester_id: u64,
1216 allow: bool,
1217 cx: &mut ModelContext<Self>,
1218 ) {
1219 if let Some(project_id) = self.remote_id() {
1220 let share = self.share(cx);
1221 let client = self.client.clone();
1222 cx.foreground()
1223 .spawn(async move {
1224 share.await?;
1225 client.send(proto::RespondToJoinProjectRequest {
1226 requester_id,
1227 project_id,
1228 allow,
1229 })
1230 })
1231 .detach_and_log_err(cx);
1232 }
1233 }
1234
1235 fn removed_from_project(&mut self, cx: &mut ModelContext<Self>) {
1236 if let ProjectClientState::Remote {
1237 sharing_has_stopped,
1238 ..
1239 } = &mut self.client_state
1240 {
1241 *sharing_has_stopped = true;
1242 self.collaborators.clear();
1243 cx.notify();
1244 }
1245 }
1246
1247 pub fn is_read_only(&self) -> bool {
1248 match &self.client_state {
1249 ProjectClientState::Local { .. } => false,
1250 ProjectClientState::Remote {
1251 sharing_has_stopped,
1252 ..
1253 } => *sharing_has_stopped,
1254 }
1255 }
1256
1257 pub fn is_local(&self) -> bool {
1258 match &self.client_state {
1259 ProjectClientState::Local { .. } => true,
1260 ProjectClientState::Remote { .. } => false,
1261 }
1262 }
1263
1264 pub fn is_remote(&self) -> bool {
1265 !self.is_local()
1266 }
1267
1268 pub fn create_buffer(
1269 &mut self,
1270 text: &str,
1271 language: Option<Arc<Language>>,
1272 cx: &mut ModelContext<Self>,
1273 ) -> Result<ModelHandle<Buffer>> {
1274 if self.is_remote() {
1275 return Err(anyhow!("creating buffers as a guest is not supported yet"));
1276 }
1277
1278 let buffer = cx.add_model(|cx| {
1279 Buffer::new(self.replica_id(), text, cx)
1280 .with_language(language.unwrap_or(language::PLAIN_TEXT.clone()), cx)
1281 });
1282 self.register_buffer(&buffer, cx)?;
1283 Ok(buffer)
1284 }
1285
1286 pub fn open_path(
1287 &mut self,
1288 path: impl Into<ProjectPath>,
1289 cx: &mut ModelContext<Self>,
1290 ) -> Task<Result<(ProjectEntryId, AnyModelHandle)>> {
1291 let task = self.open_buffer(path, cx);
1292 cx.spawn_weak(|_, cx| async move {
1293 let buffer = task.await?;
1294 let project_entry_id = buffer
1295 .read_with(&cx, |buffer, cx| {
1296 File::from_dyn(buffer.file()).and_then(|file| file.project_entry_id(cx))
1297 })
1298 .ok_or_else(|| anyhow!("no project entry"))?;
1299 Ok((project_entry_id, buffer.into()))
1300 })
1301 }
1302
1303 pub fn open_local_buffer(
1304 &mut self,
1305 abs_path: impl AsRef<Path>,
1306 cx: &mut ModelContext<Self>,
1307 ) -> Task<Result<ModelHandle<Buffer>>> {
1308 if let Some((worktree, relative_path)) = self.find_local_worktree(abs_path.as_ref(), cx) {
1309 self.open_buffer((worktree.read(cx).id(), relative_path), cx)
1310 } else {
1311 Task::ready(Err(anyhow!("no such path")))
1312 }
1313 }
1314
1315 pub fn open_buffer(
1316 &mut self,
1317 path: impl Into<ProjectPath>,
1318 cx: &mut ModelContext<Self>,
1319 ) -> Task<Result<ModelHandle<Buffer>>> {
1320 let project_path = path.into();
1321 let worktree = if let Some(worktree) = self.worktree_for_id(project_path.worktree_id, cx) {
1322 worktree
1323 } else {
1324 return Task::ready(Err(anyhow!("no such worktree")));
1325 };
1326
1327 // If there is already a buffer for the given path, then return it.
1328 let existing_buffer = self.get_open_buffer(&project_path, cx);
1329 if let Some(existing_buffer) = existing_buffer {
1330 return Task::ready(Ok(existing_buffer));
1331 }
1332
1333 let mut loading_watch = match self.loading_buffers.entry(project_path.clone()) {
1334 // If the given path is already being loaded, then wait for that existing
1335 // task to complete and return the same buffer.
1336 hash_map::Entry::Occupied(e) => e.get().clone(),
1337
1338 // Otherwise, record the fact that this path is now being loaded.
1339 hash_map::Entry::Vacant(entry) => {
1340 let (mut tx, rx) = postage::watch::channel();
1341 entry.insert(rx.clone());
1342
1343 let load_buffer = if worktree.read(cx).is_local() {
1344 self.open_local_buffer_internal(&project_path.path, &worktree, cx)
1345 } else {
1346 self.open_remote_buffer_internal(&project_path.path, &worktree, cx)
1347 };
1348
1349 cx.spawn(move |this, mut cx| async move {
1350 let load_result = load_buffer.await;
1351 *tx.borrow_mut() = Some(this.update(&mut cx, |this, _| {
1352 // Record the fact that the buffer is no longer loading.
1353 this.loading_buffers.remove(&project_path);
1354 let buffer = load_result.map_err(Arc::new)?;
1355 Ok(buffer)
1356 }));
1357 })
1358 .detach();
1359 rx
1360 }
1361 };
1362
1363 cx.foreground().spawn(async move {
1364 loop {
1365 if let Some(result) = loading_watch.borrow().as_ref() {
1366 match result {
1367 Ok(buffer) => return Ok(buffer.clone()),
1368 Err(error) => return Err(anyhow!("{}", error)),
1369 }
1370 }
1371 loading_watch.next().await;
1372 }
1373 })
1374 }
1375
1376 fn open_local_buffer_internal(
1377 &mut self,
1378 path: &Arc<Path>,
1379 worktree: &ModelHandle<Worktree>,
1380 cx: &mut ModelContext<Self>,
1381 ) -> Task<Result<ModelHandle<Buffer>>> {
1382 let load_buffer = worktree.update(cx, |worktree, cx| {
1383 let worktree = worktree.as_local_mut().unwrap();
1384 worktree.load_buffer(path, cx)
1385 });
1386 cx.spawn(|this, mut cx| async move {
1387 let buffer = load_buffer.await?;
1388 this.update(&mut cx, |this, cx| this.register_buffer(&buffer, cx))?;
1389 Ok(buffer)
1390 })
1391 }
1392
1393 fn open_remote_buffer_internal(
1394 &mut self,
1395 path: &Arc<Path>,
1396 worktree: &ModelHandle<Worktree>,
1397 cx: &mut ModelContext<Self>,
1398 ) -> Task<Result<ModelHandle<Buffer>>> {
1399 let rpc = self.client.clone();
1400 let project_id = self.remote_id().unwrap();
1401 let remote_worktree_id = worktree.read(cx).id();
1402 let path = path.clone();
1403 let path_string = path.to_string_lossy().to_string();
1404 cx.spawn(|this, mut cx| async move {
1405 let response = rpc
1406 .request(proto::OpenBufferByPath {
1407 project_id,
1408 worktree_id: remote_worktree_id.to_proto(),
1409 path: path_string,
1410 })
1411 .await?;
1412 let buffer = response.buffer.ok_or_else(|| anyhow!("missing buffer"))?;
1413 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
1414 .await
1415 })
1416 }
1417
1418 fn open_local_buffer_via_lsp(
1419 &mut self,
1420 abs_path: lsp::Url,
1421 lsp_adapter: Arc<dyn LspAdapter>,
1422 lsp_server: Arc<LanguageServer>,
1423 cx: &mut ModelContext<Self>,
1424 ) -> Task<Result<ModelHandle<Buffer>>> {
1425 cx.spawn(|this, mut cx| async move {
1426 let abs_path = abs_path
1427 .to_file_path()
1428 .map_err(|_| anyhow!("can't convert URI to path"))?;
1429 let (worktree, relative_path) = if let Some(result) =
1430 this.read_with(&cx, |this, cx| this.find_local_worktree(&abs_path, cx))
1431 {
1432 result
1433 } else {
1434 let worktree = this
1435 .update(&mut cx, |this, cx| {
1436 this.create_local_worktree(&abs_path, false, cx)
1437 })
1438 .await?;
1439 this.update(&mut cx, |this, cx| {
1440 this.language_servers.insert(
1441 (worktree.read(cx).id(), lsp_adapter.name()),
1442 (lsp_adapter, lsp_server),
1443 );
1444 });
1445 (worktree, PathBuf::new())
1446 };
1447
1448 let project_path = ProjectPath {
1449 worktree_id: worktree.read_with(&cx, |worktree, _| worktree.id()),
1450 path: relative_path.into(),
1451 };
1452 this.update(&mut cx, |this, cx| this.open_buffer(project_path, cx))
1453 .await
1454 })
1455 }
1456
1457 pub fn open_buffer_by_id(
1458 &mut self,
1459 id: u64,
1460 cx: &mut ModelContext<Self>,
1461 ) -> Task<Result<ModelHandle<Buffer>>> {
1462 if let Some(buffer) = self.buffer_for_id(id, cx) {
1463 Task::ready(Ok(buffer))
1464 } else if self.is_local() {
1465 Task::ready(Err(anyhow!("buffer {} does not exist", id)))
1466 } else if let Some(project_id) = self.remote_id() {
1467 let request = self
1468 .client
1469 .request(proto::OpenBufferById { project_id, id });
1470 cx.spawn(|this, mut cx| async move {
1471 let buffer = request
1472 .await?
1473 .buffer
1474 .ok_or_else(|| anyhow!("invalid buffer"))?;
1475 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
1476 .await
1477 })
1478 } else {
1479 Task::ready(Err(anyhow!("cannot open buffer while disconnected")))
1480 }
1481 }
1482
1483 pub fn save_buffer_as(
1484 &mut self,
1485 buffer: ModelHandle<Buffer>,
1486 abs_path: PathBuf,
1487 cx: &mut ModelContext<Project>,
1488 ) -> Task<Result<()>> {
1489 let worktree_task = self.find_or_create_local_worktree(&abs_path, true, cx);
1490 let old_path =
1491 File::from_dyn(buffer.read(cx).file()).and_then(|f| Some(f.as_local()?.abs_path(cx)));
1492 cx.spawn(|this, mut cx| async move {
1493 if let Some(old_path) = old_path {
1494 this.update(&mut cx, |this, cx| {
1495 this.unregister_buffer_from_language_server(&buffer, old_path, cx);
1496 });
1497 }
1498 let (worktree, path) = worktree_task.await?;
1499 worktree
1500 .update(&mut cx, |worktree, cx| {
1501 worktree
1502 .as_local_mut()
1503 .unwrap()
1504 .save_buffer_as(buffer.clone(), path, cx)
1505 })
1506 .await?;
1507 this.update(&mut cx, |this, cx| {
1508 this.assign_language_to_buffer(&buffer, cx);
1509 this.register_buffer_with_language_server(&buffer, cx);
1510 });
1511 Ok(())
1512 })
1513 }
1514
1515 pub fn get_open_buffer(
1516 &mut self,
1517 path: &ProjectPath,
1518 cx: &mut ModelContext<Self>,
1519 ) -> Option<ModelHandle<Buffer>> {
1520 let worktree = self.worktree_for_id(path.worktree_id, cx)?;
1521 self.opened_buffers.values().find_map(|buffer| {
1522 let buffer = buffer.upgrade(cx)?;
1523 let file = File::from_dyn(buffer.read(cx).file())?;
1524 if file.worktree == worktree && file.path() == &path.path {
1525 Some(buffer)
1526 } else {
1527 None
1528 }
1529 })
1530 }
1531
1532 fn register_buffer(
1533 &mut self,
1534 buffer: &ModelHandle<Buffer>,
1535 cx: &mut ModelContext<Self>,
1536 ) -> Result<()> {
1537 let remote_id = buffer.read(cx).remote_id();
1538 let open_buffer = if self.is_remote() || self.is_shared() {
1539 OpenBuffer::Strong(buffer.clone())
1540 } else {
1541 OpenBuffer::Weak(buffer.downgrade())
1542 };
1543
1544 match self.opened_buffers.insert(remote_id, open_buffer) {
1545 None => {}
1546 Some(OpenBuffer::Loading(operations)) => {
1547 buffer.update(cx, |buffer, cx| buffer.apply_ops(operations, cx))?
1548 }
1549 Some(OpenBuffer::Weak(existing_handle)) => {
1550 if existing_handle.upgrade(cx).is_some() {
1551 Err(anyhow!(
1552 "already registered buffer with remote id {}",
1553 remote_id
1554 ))?
1555 }
1556 }
1557 Some(OpenBuffer::Strong(_)) => Err(anyhow!(
1558 "already registered buffer with remote id {}",
1559 remote_id
1560 ))?,
1561 }
1562 cx.subscribe(buffer, |this, buffer, event, cx| {
1563 this.on_buffer_event(buffer, event, cx);
1564 })
1565 .detach();
1566
1567 self.assign_language_to_buffer(buffer, cx);
1568 self.register_buffer_with_language_server(buffer, cx);
1569 cx.observe_release(buffer, |this, buffer, cx| {
1570 if let Some(file) = File::from_dyn(buffer.file()) {
1571 if file.is_local() {
1572 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
1573 if let Some((_, server)) = this.language_server_for_buffer(buffer, cx) {
1574 server
1575 .notify::<lsp::notification::DidCloseTextDocument>(
1576 lsp::DidCloseTextDocumentParams {
1577 text_document: lsp::TextDocumentIdentifier::new(uri.clone()),
1578 },
1579 )
1580 .log_err();
1581 }
1582 }
1583 }
1584 })
1585 .detach();
1586
1587 Ok(())
1588 }
1589
1590 fn register_buffer_with_language_server(
1591 &mut self,
1592 buffer_handle: &ModelHandle<Buffer>,
1593 cx: &mut ModelContext<Self>,
1594 ) {
1595 let buffer = buffer_handle.read(cx);
1596 let buffer_id = buffer.remote_id();
1597 if let Some(file) = File::from_dyn(buffer.file()) {
1598 if file.is_local() {
1599 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
1600 let initial_snapshot = buffer.text_snapshot();
1601
1602 let mut language_server = None;
1603 let mut language_id = None;
1604 if let Some(language) = buffer.language() {
1605 let worktree_id = file.worktree_id(cx);
1606 if let Some(adapter) = language.lsp_adapter() {
1607 language_id = adapter.id_for_language(language.name().as_ref());
1608 language_server = self
1609 .language_servers
1610 .get(&(worktree_id, adapter.name()))
1611 .cloned();
1612 }
1613 }
1614
1615 if let Some(local_worktree) = file.worktree.read(cx).as_local() {
1616 if let Some(diagnostics) = local_worktree.diagnostics_for_path(file.path()) {
1617 self.update_buffer_diagnostics(&buffer_handle, diagnostics, None, cx)
1618 .log_err();
1619 }
1620 }
1621
1622 if let Some((_, server)) = language_server {
1623 server
1624 .notify::<lsp::notification::DidOpenTextDocument>(
1625 lsp::DidOpenTextDocumentParams {
1626 text_document: lsp::TextDocumentItem::new(
1627 uri,
1628 language_id.unwrap_or_default(),
1629 0,
1630 initial_snapshot.text(),
1631 ),
1632 }
1633 .clone(),
1634 )
1635 .log_err();
1636 buffer_handle.update(cx, |buffer, cx| {
1637 buffer.set_completion_triggers(
1638 server
1639 .capabilities()
1640 .completion_provider
1641 .as_ref()
1642 .and_then(|provider| provider.trigger_characters.clone())
1643 .unwrap_or(Vec::new()),
1644 cx,
1645 )
1646 });
1647 self.buffer_snapshots
1648 .insert(buffer_id, vec![(0, initial_snapshot)]);
1649 }
1650 }
1651 }
1652 }
1653
1654 fn unregister_buffer_from_language_server(
1655 &mut self,
1656 buffer: &ModelHandle<Buffer>,
1657 old_path: PathBuf,
1658 cx: &mut ModelContext<Self>,
1659 ) {
1660 buffer.update(cx, |buffer, cx| {
1661 buffer.update_diagnostics(Default::default(), cx);
1662 self.buffer_snapshots.remove(&buffer.remote_id());
1663 if let Some((_, language_server)) = self.language_server_for_buffer(buffer, cx) {
1664 language_server
1665 .notify::<lsp::notification::DidCloseTextDocument>(
1666 lsp::DidCloseTextDocumentParams {
1667 text_document: lsp::TextDocumentIdentifier::new(
1668 lsp::Url::from_file_path(old_path).unwrap(),
1669 ),
1670 },
1671 )
1672 .log_err();
1673 }
1674 });
1675 }
1676
1677 fn on_buffer_event(
1678 &mut self,
1679 buffer: ModelHandle<Buffer>,
1680 event: &BufferEvent,
1681 cx: &mut ModelContext<Self>,
1682 ) -> Option<()> {
1683 match event {
1684 BufferEvent::Operation(operation) => {
1685 if let Some(project_id) = self.shared_remote_id() {
1686 let request = self.client.request(proto::UpdateBuffer {
1687 project_id,
1688 buffer_id: buffer.read(cx).remote_id(),
1689 operations: vec![language::proto::serialize_operation(&operation)],
1690 });
1691 cx.background().spawn(request).detach_and_log_err(cx);
1692 }
1693 }
1694 BufferEvent::Edited { .. } => {
1695 let (_, language_server) = self
1696 .language_server_for_buffer(buffer.read(cx), cx)?
1697 .clone();
1698 let buffer = buffer.read(cx);
1699 let file = File::from_dyn(buffer.file())?;
1700 let abs_path = file.as_local()?.abs_path(cx);
1701 let uri = lsp::Url::from_file_path(abs_path).unwrap();
1702 let buffer_snapshots = self.buffer_snapshots.get_mut(&buffer.remote_id())?;
1703 let (version, prev_snapshot) = buffer_snapshots.last()?;
1704 let next_snapshot = buffer.text_snapshot();
1705 let next_version = version + 1;
1706
1707 let content_changes = buffer
1708 .edits_since::<(PointUtf16, usize)>(prev_snapshot.version())
1709 .map(|edit| {
1710 let edit_start = edit.new.start.0;
1711 let edit_end = edit_start + (edit.old.end.0 - edit.old.start.0);
1712 let new_text = next_snapshot
1713 .text_for_range(edit.new.start.1..edit.new.end.1)
1714 .collect();
1715 lsp::TextDocumentContentChangeEvent {
1716 range: Some(lsp::Range::new(
1717 point_to_lsp(edit_start),
1718 point_to_lsp(edit_end),
1719 )),
1720 range_length: None,
1721 text: new_text,
1722 }
1723 })
1724 .collect();
1725
1726 buffer_snapshots.push((next_version, next_snapshot));
1727
1728 language_server
1729 .notify::<lsp::notification::DidChangeTextDocument>(
1730 lsp::DidChangeTextDocumentParams {
1731 text_document: lsp::VersionedTextDocumentIdentifier::new(
1732 uri,
1733 next_version,
1734 ),
1735 content_changes,
1736 },
1737 )
1738 .log_err();
1739 }
1740 BufferEvent::Saved => {
1741 let file = File::from_dyn(buffer.read(cx).file())?;
1742 let worktree_id = file.worktree_id(cx);
1743 let abs_path = file.as_local()?.abs_path(cx);
1744 let text_document = lsp::TextDocumentIdentifier {
1745 uri: lsp::Url::from_file_path(abs_path).unwrap(),
1746 };
1747
1748 for (_, server) in self.language_servers_for_worktree(worktree_id) {
1749 server
1750 .notify::<lsp::notification::DidSaveTextDocument>(
1751 lsp::DidSaveTextDocumentParams {
1752 text_document: text_document.clone(),
1753 text: None,
1754 },
1755 )
1756 .log_err();
1757 }
1758 }
1759 _ => {}
1760 }
1761
1762 None
1763 }
1764
1765 fn language_servers_for_worktree(
1766 &self,
1767 worktree_id: WorktreeId,
1768 ) -> impl Iterator<Item = &(Arc<dyn LspAdapter>, Arc<LanguageServer>)> {
1769 self.language_servers.iter().filter_map(
1770 move |((language_server_worktree_id, _), server)| {
1771 if *language_server_worktree_id == worktree_id {
1772 Some(server)
1773 } else {
1774 None
1775 }
1776 },
1777 )
1778 }
1779
1780 fn assign_language_to_buffer(
1781 &mut self,
1782 buffer: &ModelHandle<Buffer>,
1783 cx: &mut ModelContext<Self>,
1784 ) -> Option<()> {
1785 // If the buffer has a language, set it and start the language server if we haven't already.
1786 let full_path = buffer.read(cx).file()?.full_path(cx);
1787 let language = self.languages.select_language(&full_path)?;
1788 buffer.update(cx, |buffer, cx| {
1789 buffer.set_language(Some(language.clone()), cx);
1790 });
1791
1792 let file = File::from_dyn(buffer.read(cx).file())?;
1793 let worktree = file.worktree.read(cx).as_local()?;
1794 let worktree_id = worktree.id();
1795 let worktree_abs_path = worktree.abs_path().clone();
1796 self.start_language_server(worktree_id, worktree_abs_path, language, cx);
1797
1798 None
1799 }
1800
1801 fn start_language_server(
1802 &mut self,
1803 worktree_id: WorktreeId,
1804 worktree_path: Arc<Path>,
1805 language: Arc<Language>,
1806 cx: &mut ModelContext<Self>,
1807 ) {
1808 let adapter = if let Some(adapter) = language.lsp_adapter() {
1809 adapter
1810 } else {
1811 return;
1812 };
1813 let key = (worktree_id, adapter.name());
1814 self.started_language_servers
1815 .entry(key.clone())
1816 .or_insert_with(|| {
1817 let server_id = post_inc(&mut self.next_language_server_id);
1818 let language_server = self.languages.start_language_server(
1819 server_id,
1820 language.clone(),
1821 worktree_path,
1822 self.client.http_client(),
1823 cx,
1824 );
1825 cx.spawn_weak(|this, mut cx| async move {
1826 let language_server = language_server?.await.log_err()?;
1827 let language_server = language_server
1828 .initialize(adapter.initialization_options())
1829 .await
1830 .log_err()?;
1831 let this = this.upgrade(&cx)?;
1832 let disk_based_diagnostics_progress_token =
1833 adapter.disk_based_diagnostics_progress_token();
1834
1835 language_server
1836 .on_notification::<lsp::notification::PublishDiagnostics, _>({
1837 let this = this.downgrade();
1838 let adapter = adapter.clone();
1839 move |params, mut cx| {
1840 if let Some(this) = this.upgrade(&cx) {
1841 this.update(&mut cx, |this, cx| {
1842 this.on_lsp_diagnostics_published(
1843 server_id,
1844 params,
1845 &adapter,
1846 disk_based_diagnostics_progress_token,
1847 cx,
1848 );
1849 });
1850 }
1851 }
1852 })
1853 .detach();
1854
1855 language_server
1856 .on_request::<lsp::request::WorkspaceConfiguration, _, _>({
1857 let settings = this
1858 .read_with(&cx, |this, _| this.language_server_settings.clone());
1859 move |params, _| {
1860 let settings = settings.lock().clone();
1861 async move {
1862 Ok(params
1863 .items
1864 .into_iter()
1865 .map(|item| {
1866 if let Some(section) = &item.section {
1867 settings
1868 .get(section)
1869 .cloned()
1870 .unwrap_or(serde_json::Value::Null)
1871 } else {
1872 settings.clone()
1873 }
1874 })
1875 .collect())
1876 }
1877 }
1878 })
1879 .detach();
1880
1881 language_server
1882 .on_request::<lsp::request::ApplyWorkspaceEdit, _, _>({
1883 let this = this.downgrade();
1884 let adapter = adapter.clone();
1885 let language_server = language_server.clone();
1886 move |params, cx| {
1887 Self::on_lsp_workspace_edit(
1888 this,
1889 params,
1890 server_id,
1891 adapter.clone(),
1892 language_server.clone(),
1893 cx,
1894 )
1895 }
1896 })
1897 .detach();
1898
1899 language_server
1900 .on_notification::<lsp::notification::Progress, _>({
1901 let this = this.downgrade();
1902 move |params, mut cx| {
1903 if let Some(this) = this.upgrade(&cx) {
1904 this.update(&mut cx, |this, cx| {
1905 this.on_lsp_progress(
1906 params,
1907 server_id,
1908 disk_based_diagnostics_progress_token,
1909 cx,
1910 );
1911 });
1912 }
1913 }
1914 })
1915 .detach();
1916
1917 this.update(&mut cx, |this, cx| {
1918 this.language_servers
1919 .insert(key.clone(), (adapter.clone(), language_server.clone()));
1920 this.language_server_statuses.insert(
1921 server_id,
1922 LanguageServerStatus {
1923 name: language_server.name().to_string(),
1924 pending_work: Default::default(),
1925 pending_diagnostic_updates: 0,
1926 },
1927 );
1928 language_server
1929 .notify::<lsp::notification::DidChangeConfiguration>(
1930 lsp::DidChangeConfigurationParams {
1931 settings: this.language_server_settings.lock().clone(),
1932 },
1933 )
1934 .ok();
1935
1936 if let Some(project_id) = this.shared_remote_id() {
1937 this.client
1938 .send(proto::StartLanguageServer {
1939 project_id,
1940 server: Some(proto::LanguageServer {
1941 id: server_id as u64,
1942 name: language_server.name().to_string(),
1943 }),
1944 })
1945 .log_err();
1946 }
1947
1948 // Tell the language server about every open buffer in the worktree that matches the language.
1949 for buffer in this.opened_buffers.values() {
1950 if let Some(buffer_handle) = buffer.upgrade(cx) {
1951 let buffer = buffer_handle.read(cx);
1952 let file = if let Some(file) = File::from_dyn(buffer.file()) {
1953 file
1954 } else {
1955 continue;
1956 };
1957 let language = if let Some(language) = buffer.language() {
1958 language
1959 } else {
1960 continue;
1961 };
1962 if file.worktree.read(cx).id() != key.0
1963 || language.lsp_adapter().map(|a| a.name())
1964 != Some(key.1.clone())
1965 {
1966 continue;
1967 }
1968
1969 let file = file.as_local()?;
1970 let versions = this
1971 .buffer_snapshots
1972 .entry(buffer.remote_id())
1973 .or_insert_with(|| vec![(0, buffer.text_snapshot())]);
1974 let (version, initial_snapshot) = versions.last().unwrap();
1975 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
1976 let language_id = adapter.id_for_language(language.name().as_ref());
1977 language_server
1978 .notify::<lsp::notification::DidOpenTextDocument>(
1979 lsp::DidOpenTextDocumentParams {
1980 text_document: lsp::TextDocumentItem::new(
1981 uri,
1982 language_id.unwrap_or_default(),
1983 *version,
1984 initial_snapshot.text(),
1985 ),
1986 },
1987 )
1988 .log_err()?;
1989 buffer_handle.update(cx, |buffer, cx| {
1990 buffer.set_completion_triggers(
1991 language_server
1992 .capabilities()
1993 .completion_provider
1994 .as_ref()
1995 .and_then(|provider| {
1996 provider.trigger_characters.clone()
1997 })
1998 .unwrap_or(Vec::new()),
1999 cx,
2000 )
2001 });
2002 }
2003 }
2004
2005 cx.notify();
2006 Some(())
2007 });
2008
2009 Some(language_server)
2010 })
2011 });
2012 }
2013
2014 pub fn restart_language_servers_for_buffers(
2015 &mut self,
2016 buffers: impl IntoIterator<Item = ModelHandle<Buffer>>,
2017 cx: &mut ModelContext<Self>,
2018 ) -> Option<()> {
2019 let language_server_lookup_info: HashSet<(WorktreeId, Arc<Path>, PathBuf)> = buffers
2020 .into_iter()
2021 .filter_map(|buffer| {
2022 let file = File::from_dyn(buffer.read(cx).file())?;
2023 let worktree = file.worktree.read(cx).as_local()?;
2024 let worktree_id = worktree.id();
2025 let worktree_abs_path = worktree.abs_path().clone();
2026 let full_path = file.full_path(cx);
2027 Some((worktree_id, worktree_abs_path, full_path))
2028 })
2029 .collect();
2030 for (worktree_id, worktree_abs_path, full_path) in language_server_lookup_info {
2031 let language = self.languages.select_language(&full_path)?;
2032 self.restart_language_server(worktree_id, worktree_abs_path, language, cx);
2033 }
2034
2035 None
2036 }
2037
2038 fn restart_language_server(
2039 &mut self,
2040 worktree_id: WorktreeId,
2041 worktree_path: Arc<Path>,
2042 language: Arc<Language>,
2043 cx: &mut ModelContext<Self>,
2044 ) {
2045 let adapter = if let Some(adapter) = language.lsp_adapter() {
2046 adapter
2047 } else {
2048 return;
2049 };
2050 let key = (worktree_id, adapter.name());
2051 let server_to_shutdown = self.language_servers.remove(&key);
2052 self.started_language_servers.remove(&key);
2053 server_to_shutdown
2054 .as_ref()
2055 .map(|(_, server)| self.language_server_statuses.remove(&server.server_id()));
2056 cx.spawn_weak(|this, mut cx| async move {
2057 if let Some(this) = this.upgrade(&cx) {
2058 if let Some((_, server_to_shutdown)) = server_to_shutdown {
2059 if let Some(shutdown_task) = server_to_shutdown.shutdown() {
2060 shutdown_task.await;
2061 }
2062 }
2063
2064 this.update(&mut cx, |this, cx| {
2065 this.start_language_server(worktree_id, worktree_path, language, cx);
2066 });
2067 }
2068 })
2069 .detach();
2070 }
2071
2072 fn on_lsp_diagnostics_published(
2073 &mut self,
2074 server_id: usize,
2075 mut params: lsp::PublishDiagnosticsParams,
2076 adapter: &Arc<dyn LspAdapter>,
2077 disk_based_diagnostics_progress_token: Option<&str>,
2078 cx: &mut ModelContext<Self>,
2079 ) {
2080 adapter.process_diagnostics(&mut params);
2081 if disk_based_diagnostics_progress_token.is_none() {
2082 self.disk_based_diagnostics_started(cx);
2083 self.broadcast_language_server_update(
2084 server_id,
2085 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(
2086 proto::LspDiskBasedDiagnosticsUpdating {},
2087 ),
2088 );
2089 }
2090 self.update_diagnostics(params, adapter.disk_based_diagnostic_sources(), cx)
2091 .log_err();
2092 if disk_based_diagnostics_progress_token.is_none() {
2093 self.disk_based_diagnostics_finished(cx);
2094 self.broadcast_language_server_update(
2095 server_id,
2096 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(
2097 proto::LspDiskBasedDiagnosticsUpdated {},
2098 ),
2099 );
2100 }
2101 }
2102
2103 fn on_lsp_progress(
2104 &mut self,
2105 progress: lsp::ProgressParams,
2106 server_id: usize,
2107 disk_based_diagnostics_progress_token: Option<&str>,
2108 cx: &mut ModelContext<Self>,
2109 ) {
2110 let token = match progress.token {
2111 lsp::NumberOrString::String(token) => token,
2112 lsp::NumberOrString::Number(token) => {
2113 log::info!("skipping numeric progress token {}", token);
2114 return;
2115 }
2116 };
2117 let progress = match progress.value {
2118 lsp::ProgressParamsValue::WorkDone(value) => value,
2119 };
2120 let language_server_status =
2121 if let Some(status) = self.language_server_statuses.get_mut(&server_id) {
2122 status
2123 } else {
2124 return;
2125 };
2126 match progress {
2127 lsp::WorkDoneProgress::Begin(_) => {
2128 if Some(token.as_str()) == disk_based_diagnostics_progress_token {
2129 language_server_status.pending_diagnostic_updates += 1;
2130 if language_server_status.pending_diagnostic_updates == 1 {
2131 self.disk_based_diagnostics_started(cx);
2132 self.broadcast_language_server_update(
2133 server_id,
2134 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(
2135 proto::LspDiskBasedDiagnosticsUpdating {},
2136 ),
2137 );
2138 }
2139 } else {
2140 self.on_lsp_work_start(server_id, token.clone(), cx);
2141 self.broadcast_language_server_update(
2142 server_id,
2143 proto::update_language_server::Variant::WorkStart(proto::LspWorkStart {
2144 token,
2145 }),
2146 );
2147 }
2148 }
2149 lsp::WorkDoneProgress::Report(report) => {
2150 if Some(token.as_str()) != disk_based_diagnostics_progress_token {
2151 self.on_lsp_work_progress(
2152 server_id,
2153 token.clone(),
2154 LanguageServerProgress {
2155 message: report.message.clone(),
2156 percentage: report.percentage.map(|p| p as usize),
2157 last_update_at: Instant::now(),
2158 },
2159 cx,
2160 );
2161 self.broadcast_language_server_update(
2162 server_id,
2163 proto::update_language_server::Variant::WorkProgress(
2164 proto::LspWorkProgress {
2165 token,
2166 message: report.message,
2167 percentage: report.percentage.map(|p| p as u32),
2168 },
2169 ),
2170 );
2171 }
2172 }
2173 lsp::WorkDoneProgress::End(_) => {
2174 if Some(token.as_str()) == disk_based_diagnostics_progress_token {
2175 language_server_status.pending_diagnostic_updates -= 1;
2176 if language_server_status.pending_diagnostic_updates == 0 {
2177 self.disk_based_diagnostics_finished(cx);
2178 self.broadcast_language_server_update(
2179 server_id,
2180 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(
2181 proto::LspDiskBasedDiagnosticsUpdated {},
2182 ),
2183 );
2184 }
2185 } else {
2186 self.on_lsp_work_end(server_id, token.clone(), cx);
2187 self.broadcast_language_server_update(
2188 server_id,
2189 proto::update_language_server::Variant::WorkEnd(proto::LspWorkEnd {
2190 token,
2191 }),
2192 );
2193 }
2194 }
2195 }
2196 }
2197
2198 fn on_lsp_work_start(
2199 &mut self,
2200 language_server_id: usize,
2201 token: String,
2202 cx: &mut ModelContext<Self>,
2203 ) {
2204 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
2205 status.pending_work.insert(
2206 token,
2207 LanguageServerProgress {
2208 message: None,
2209 percentage: None,
2210 last_update_at: Instant::now(),
2211 },
2212 );
2213 cx.notify();
2214 }
2215 }
2216
2217 fn on_lsp_work_progress(
2218 &mut self,
2219 language_server_id: usize,
2220 token: String,
2221 progress: LanguageServerProgress,
2222 cx: &mut ModelContext<Self>,
2223 ) {
2224 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
2225 status.pending_work.insert(token, progress);
2226 cx.notify();
2227 }
2228 }
2229
2230 fn on_lsp_work_end(
2231 &mut self,
2232 language_server_id: usize,
2233 token: String,
2234 cx: &mut ModelContext<Self>,
2235 ) {
2236 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
2237 status.pending_work.remove(&token);
2238 cx.notify();
2239 }
2240 }
2241
2242 async fn on_lsp_workspace_edit(
2243 this: WeakModelHandle<Self>,
2244 params: lsp::ApplyWorkspaceEditParams,
2245 server_id: usize,
2246 adapter: Arc<dyn LspAdapter>,
2247 language_server: Arc<LanguageServer>,
2248 mut cx: AsyncAppContext,
2249 ) -> Result<lsp::ApplyWorkspaceEditResponse> {
2250 let this = this
2251 .upgrade(&cx)
2252 .ok_or_else(|| anyhow!("project project closed"))?;
2253 let transaction = Self::deserialize_workspace_edit(
2254 this.clone(),
2255 params.edit,
2256 true,
2257 adapter.clone(),
2258 language_server.clone(),
2259 &mut cx,
2260 )
2261 .await
2262 .log_err();
2263 this.update(&mut cx, |this, _| {
2264 if let Some(transaction) = transaction {
2265 this.last_workspace_edits_by_language_server
2266 .insert(server_id, transaction);
2267 }
2268 });
2269 Ok(lsp::ApplyWorkspaceEditResponse {
2270 applied: true,
2271 failed_change: None,
2272 failure_reason: None,
2273 })
2274 }
2275
2276 fn broadcast_language_server_update(
2277 &self,
2278 language_server_id: usize,
2279 event: proto::update_language_server::Variant,
2280 ) {
2281 if let Some(project_id) = self.shared_remote_id() {
2282 self.client
2283 .send(proto::UpdateLanguageServer {
2284 project_id,
2285 language_server_id: language_server_id as u64,
2286 variant: Some(event),
2287 })
2288 .log_err();
2289 }
2290 }
2291
2292 pub fn set_language_server_settings(&mut self, settings: serde_json::Value) {
2293 for (_, server) in self.language_servers.values() {
2294 server
2295 .notify::<lsp::notification::DidChangeConfiguration>(
2296 lsp::DidChangeConfigurationParams {
2297 settings: settings.clone(),
2298 },
2299 )
2300 .ok();
2301 }
2302 *self.language_server_settings.lock() = settings;
2303 }
2304
2305 pub fn language_server_statuses(
2306 &self,
2307 ) -> impl DoubleEndedIterator<Item = &LanguageServerStatus> {
2308 self.language_server_statuses.values()
2309 }
2310
2311 pub fn update_diagnostics(
2312 &mut self,
2313 params: lsp::PublishDiagnosticsParams,
2314 disk_based_sources: &[&str],
2315 cx: &mut ModelContext<Self>,
2316 ) -> Result<()> {
2317 let abs_path = params
2318 .uri
2319 .to_file_path()
2320 .map_err(|_| anyhow!("URI is not a file"))?;
2321 let mut diagnostics = Vec::default();
2322 let mut primary_diagnostic_group_ids = HashMap::default();
2323 let mut sources_by_group_id = HashMap::default();
2324 let mut supporting_diagnostics = HashMap::default();
2325 for diagnostic in ¶ms.diagnostics {
2326 let source = diagnostic.source.as_ref();
2327 let code = diagnostic.code.as_ref().map(|code| match code {
2328 lsp::NumberOrString::Number(code) => code.to_string(),
2329 lsp::NumberOrString::String(code) => code.clone(),
2330 });
2331 let range = range_from_lsp(diagnostic.range);
2332 let is_supporting = diagnostic
2333 .related_information
2334 .as_ref()
2335 .map_or(false, |infos| {
2336 infos.iter().any(|info| {
2337 primary_diagnostic_group_ids.contains_key(&(
2338 source,
2339 code.clone(),
2340 range_from_lsp(info.location.range),
2341 ))
2342 })
2343 });
2344
2345 let is_unnecessary = diagnostic.tags.as_ref().map_or(false, |tags| {
2346 tags.iter().any(|tag| *tag == DiagnosticTag::UNNECESSARY)
2347 });
2348
2349 if is_supporting {
2350 supporting_diagnostics.insert(
2351 (source, code.clone(), range),
2352 (diagnostic.severity, is_unnecessary),
2353 );
2354 } else {
2355 let group_id = post_inc(&mut self.next_diagnostic_group_id);
2356 let is_disk_based = source.map_or(false, |source| {
2357 disk_based_sources.contains(&source.as_str())
2358 });
2359
2360 sources_by_group_id.insert(group_id, source);
2361 primary_diagnostic_group_ids
2362 .insert((source, code.clone(), range.clone()), group_id);
2363
2364 diagnostics.push(DiagnosticEntry {
2365 range,
2366 diagnostic: Diagnostic {
2367 code: code.clone(),
2368 severity: diagnostic.severity.unwrap_or(DiagnosticSeverity::ERROR),
2369 message: diagnostic.message.clone(),
2370 group_id,
2371 is_primary: true,
2372 is_valid: true,
2373 is_disk_based,
2374 is_unnecessary,
2375 },
2376 });
2377 if let Some(infos) = &diagnostic.related_information {
2378 for info in infos {
2379 if info.location.uri == params.uri && !info.message.is_empty() {
2380 let range = range_from_lsp(info.location.range);
2381 diagnostics.push(DiagnosticEntry {
2382 range,
2383 diagnostic: Diagnostic {
2384 code: code.clone(),
2385 severity: DiagnosticSeverity::INFORMATION,
2386 message: info.message.clone(),
2387 group_id,
2388 is_primary: false,
2389 is_valid: true,
2390 is_disk_based,
2391 is_unnecessary: false,
2392 },
2393 });
2394 }
2395 }
2396 }
2397 }
2398 }
2399
2400 for entry in &mut diagnostics {
2401 let diagnostic = &mut entry.diagnostic;
2402 if !diagnostic.is_primary {
2403 let source = *sources_by_group_id.get(&diagnostic.group_id).unwrap();
2404 if let Some(&(severity, is_unnecessary)) = supporting_diagnostics.get(&(
2405 source,
2406 diagnostic.code.clone(),
2407 entry.range.clone(),
2408 )) {
2409 if let Some(severity) = severity {
2410 diagnostic.severity = severity;
2411 }
2412 diagnostic.is_unnecessary = is_unnecessary;
2413 }
2414 }
2415 }
2416
2417 self.update_diagnostic_entries(abs_path, params.version, diagnostics, cx)?;
2418 Ok(())
2419 }
2420
2421 pub fn update_diagnostic_entries(
2422 &mut self,
2423 abs_path: PathBuf,
2424 version: Option<i32>,
2425 diagnostics: Vec<DiagnosticEntry<PointUtf16>>,
2426 cx: &mut ModelContext<Project>,
2427 ) -> Result<(), anyhow::Error> {
2428 let (worktree, relative_path) = self
2429 .find_local_worktree(&abs_path, cx)
2430 .ok_or_else(|| anyhow!("no worktree found for diagnostics"))?;
2431 if !worktree.read(cx).is_visible() {
2432 return Ok(());
2433 }
2434
2435 let project_path = ProjectPath {
2436 worktree_id: worktree.read(cx).id(),
2437 path: relative_path.into(),
2438 };
2439 if let Some(buffer) = self.get_open_buffer(&project_path, cx) {
2440 self.update_buffer_diagnostics(&buffer, diagnostics.clone(), version, cx)?;
2441 }
2442
2443 let updated = worktree.update(cx, |worktree, cx| {
2444 worktree
2445 .as_local_mut()
2446 .ok_or_else(|| anyhow!("not a local worktree"))?
2447 .update_diagnostics(project_path.path.clone(), diagnostics, cx)
2448 })?;
2449 if updated {
2450 cx.emit(Event::DiagnosticsUpdated(project_path));
2451 }
2452 Ok(())
2453 }
2454
2455 fn update_buffer_diagnostics(
2456 &mut self,
2457 buffer: &ModelHandle<Buffer>,
2458 mut diagnostics: Vec<DiagnosticEntry<PointUtf16>>,
2459 version: Option<i32>,
2460 cx: &mut ModelContext<Self>,
2461 ) -> Result<()> {
2462 fn compare_diagnostics(a: &Diagnostic, b: &Diagnostic) -> Ordering {
2463 Ordering::Equal
2464 .then_with(|| b.is_primary.cmp(&a.is_primary))
2465 .then_with(|| a.is_disk_based.cmp(&b.is_disk_based))
2466 .then_with(|| a.severity.cmp(&b.severity))
2467 .then_with(|| a.message.cmp(&b.message))
2468 }
2469
2470 let snapshot = self.buffer_snapshot_for_lsp_version(buffer, version, cx)?;
2471
2472 diagnostics.sort_unstable_by(|a, b| {
2473 Ordering::Equal
2474 .then_with(|| a.range.start.cmp(&b.range.start))
2475 .then_with(|| b.range.end.cmp(&a.range.end))
2476 .then_with(|| compare_diagnostics(&a.diagnostic, &b.diagnostic))
2477 });
2478
2479 let mut sanitized_diagnostics = Vec::new();
2480 let edits_since_save = Patch::new(
2481 snapshot
2482 .edits_since::<PointUtf16>(buffer.read(cx).saved_version())
2483 .collect(),
2484 );
2485 for entry in diagnostics {
2486 let start;
2487 let end;
2488 if entry.diagnostic.is_disk_based {
2489 // Some diagnostics are based on files on disk instead of buffers'
2490 // current contents. Adjust these diagnostics' ranges to reflect
2491 // any unsaved edits.
2492 start = edits_since_save.old_to_new(entry.range.start);
2493 end = edits_since_save.old_to_new(entry.range.end);
2494 } else {
2495 start = entry.range.start;
2496 end = entry.range.end;
2497 }
2498
2499 let mut range = snapshot.clip_point_utf16(start, Bias::Left)
2500 ..snapshot.clip_point_utf16(end, Bias::Right);
2501
2502 // Expand empty ranges by one character
2503 if range.start == range.end {
2504 range.end.column += 1;
2505 range.end = snapshot.clip_point_utf16(range.end, Bias::Right);
2506 if range.start == range.end && range.end.column > 0 {
2507 range.start.column -= 1;
2508 range.start = snapshot.clip_point_utf16(range.start, Bias::Left);
2509 }
2510 }
2511
2512 sanitized_diagnostics.push(DiagnosticEntry {
2513 range,
2514 diagnostic: entry.diagnostic,
2515 });
2516 }
2517 drop(edits_since_save);
2518
2519 let set = DiagnosticSet::new(sanitized_diagnostics, &snapshot);
2520 buffer.update(cx, |buffer, cx| buffer.update_diagnostics(set, cx));
2521 Ok(())
2522 }
2523
2524 pub fn reload_buffers(
2525 &self,
2526 buffers: HashSet<ModelHandle<Buffer>>,
2527 push_to_history: bool,
2528 cx: &mut ModelContext<Self>,
2529 ) -> Task<Result<ProjectTransaction>> {
2530 let mut local_buffers = Vec::new();
2531 let mut remote_buffers = None;
2532 for buffer_handle in buffers {
2533 let buffer = buffer_handle.read(cx);
2534 if buffer.is_dirty() {
2535 if let Some(file) = File::from_dyn(buffer.file()) {
2536 if file.is_local() {
2537 local_buffers.push(buffer_handle);
2538 } else {
2539 remote_buffers.get_or_insert(Vec::new()).push(buffer_handle);
2540 }
2541 }
2542 }
2543 }
2544
2545 let remote_buffers = self.remote_id().zip(remote_buffers);
2546 let client = self.client.clone();
2547
2548 cx.spawn(|this, mut cx| async move {
2549 let mut project_transaction = ProjectTransaction::default();
2550
2551 if let Some((project_id, remote_buffers)) = remote_buffers {
2552 let response = client
2553 .request(proto::ReloadBuffers {
2554 project_id,
2555 buffer_ids: remote_buffers
2556 .iter()
2557 .map(|buffer| buffer.read_with(&cx, |buffer, _| buffer.remote_id()))
2558 .collect(),
2559 })
2560 .await?
2561 .transaction
2562 .ok_or_else(|| anyhow!("missing transaction"))?;
2563 project_transaction = this
2564 .update(&mut cx, |this, cx| {
2565 this.deserialize_project_transaction(response, push_to_history, cx)
2566 })
2567 .await?;
2568 }
2569
2570 for buffer in local_buffers {
2571 let transaction = buffer
2572 .update(&mut cx, |buffer, cx| buffer.reload(cx))
2573 .await?;
2574 buffer.update(&mut cx, |buffer, cx| {
2575 if let Some(transaction) = transaction {
2576 if !push_to_history {
2577 buffer.forget_transaction(transaction.id);
2578 }
2579 project_transaction.0.insert(cx.handle(), transaction);
2580 }
2581 });
2582 }
2583
2584 Ok(project_transaction)
2585 })
2586 }
2587
2588 pub fn format(
2589 &self,
2590 buffers: HashSet<ModelHandle<Buffer>>,
2591 push_to_history: bool,
2592 cx: &mut ModelContext<Project>,
2593 ) -> Task<Result<ProjectTransaction>> {
2594 let mut local_buffers = Vec::new();
2595 let mut remote_buffers = None;
2596 for buffer_handle in buffers {
2597 let buffer = buffer_handle.read(cx);
2598 if let Some(file) = File::from_dyn(buffer.file()) {
2599 if let Some(buffer_abs_path) = file.as_local().map(|f| f.abs_path(cx)) {
2600 if let Some((_, server)) = self.language_server_for_buffer(buffer, cx) {
2601 local_buffers.push((buffer_handle, buffer_abs_path, server.clone()));
2602 }
2603 } else {
2604 remote_buffers.get_or_insert(Vec::new()).push(buffer_handle);
2605 }
2606 } else {
2607 return Task::ready(Ok(Default::default()));
2608 }
2609 }
2610
2611 let remote_buffers = self.remote_id().zip(remote_buffers);
2612 let client = self.client.clone();
2613
2614 cx.spawn(|this, mut cx| async move {
2615 let mut project_transaction = ProjectTransaction::default();
2616
2617 if let Some((project_id, remote_buffers)) = remote_buffers {
2618 let response = client
2619 .request(proto::FormatBuffers {
2620 project_id,
2621 buffer_ids: remote_buffers
2622 .iter()
2623 .map(|buffer| buffer.read_with(&cx, |buffer, _| buffer.remote_id()))
2624 .collect(),
2625 })
2626 .await?
2627 .transaction
2628 .ok_or_else(|| anyhow!("missing transaction"))?;
2629 project_transaction = this
2630 .update(&mut cx, |this, cx| {
2631 this.deserialize_project_transaction(response, push_to_history, cx)
2632 })
2633 .await?;
2634 }
2635
2636 for (buffer, buffer_abs_path, language_server) in local_buffers {
2637 let text_document = lsp::TextDocumentIdentifier::new(
2638 lsp::Url::from_file_path(&buffer_abs_path).unwrap(),
2639 );
2640 let capabilities = &language_server.capabilities();
2641 let tab_size = cx.update(|cx| {
2642 let language_name = buffer.read(cx).language().map(|language| language.name());
2643 cx.global::<Settings>().tab_size(language_name.as_deref())
2644 });
2645 let lsp_edits = if capabilities
2646 .document_formatting_provider
2647 .as_ref()
2648 .map_or(false, |provider| *provider != lsp::OneOf::Left(false))
2649 {
2650 language_server
2651 .request::<lsp::request::Formatting>(lsp::DocumentFormattingParams {
2652 text_document,
2653 options: lsp::FormattingOptions {
2654 tab_size,
2655 insert_spaces: true,
2656 insert_final_newline: Some(true),
2657 ..Default::default()
2658 },
2659 work_done_progress_params: Default::default(),
2660 })
2661 .await?
2662 } else if capabilities
2663 .document_range_formatting_provider
2664 .as_ref()
2665 .map_or(false, |provider| *provider != lsp::OneOf::Left(false))
2666 {
2667 let buffer_start = lsp::Position::new(0, 0);
2668 let buffer_end =
2669 buffer.read_with(&cx, |buffer, _| point_to_lsp(buffer.max_point_utf16()));
2670 language_server
2671 .request::<lsp::request::RangeFormatting>(
2672 lsp::DocumentRangeFormattingParams {
2673 text_document,
2674 range: lsp::Range::new(buffer_start, buffer_end),
2675 options: lsp::FormattingOptions {
2676 tab_size: 4,
2677 insert_spaces: true,
2678 insert_final_newline: Some(true),
2679 ..Default::default()
2680 },
2681 work_done_progress_params: Default::default(),
2682 },
2683 )
2684 .await?
2685 } else {
2686 continue;
2687 };
2688
2689 if let Some(lsp_edits) = lsp_edits {
2690 let edits = this
2691 .update(&mut cx, |this, cx| {
2692 this.edits_from_lsp(&buffer, lsp_edits, None, cx)
2693 })
2694 .await?;
2695 buffer.update(&mut cx, |buffer, cx| {
2696 buffer.finalize_last_transaction();
2697 buffer.start_transaction();
2698 for (range, text) in edits {
2699 buffer.edit([(range, text)], cx);
2700 }
2701 if buffer.end_transaction(cx).is_some() {
2702 let transaction = buffer.finalize_last_transaction().unwrap().clone();
2703 if !push_to_history {
2704 buffer.forget_transaction(transaction.id);
2705 }
2706 project_transaction.0.insert(cx.handle(), transaction);
2707 }
2708 });
2709 }
2710 }
2711
2712 Ok(project_transaction)
2713 })
2714 }
2715
2716 pub fn definition<T: ToPointUtf16>(
2717 &self,
2718 buffer: &ModelHandle<Buffer>,
2719 position: T,
2720 cx: &mut ModelContext<Self>,
2721 ) -> Task<Result<Vec<Location>>> {
2722 let position = position.to_point_utf16(buffer.read(cx));
2723 self.request_lsp(buffer.clone(), GetDefinition { position }, cx)
2724 }
2725
2726 pub fn references<T: ToPointUtf16>(
2727 &self,
2728 buffer: &ModelHandle<Buffer>,
2729 position: T,
2730 cx: &mut ModelContext<Self>,
2731 ) -> Task<Result<Vec<Location>>> {
2732 let position = position.to_point_utf16(buffer.read(cx));
2733 self.request_lsp(buffer.clone(), GetReferences { position }, cx)
2734 }
2735
2736 pub fn document_highlights<T: ToPointUtf16>(
2737 &self,
2738 buffer: &ModelHandle<Buffer>,
2739 position: T,
2740 cx: &mut ModelContext<Self>,
2741 ) -> Task<Result<Vec<DocumentHighlight>>> {
2742 let position = position.to_point_utf16(buffer.read(cx));
2743
2744 self.request_lsp(buffer.clone(), GetDocumentHighlights { position }, cx)
2745 }
2746
2747 pub fn symbols(&self, query: &str, cx: &mut ModelContext<Self>) -> Task<Result<Vec<Symbol>>> {
2748 if self.is_local() {
2749 let mut requests = Vec::new();
2750 for ((worktree_id, _), (lsp_adapter, language_server)) in self.language_servers.iter() {
2751 let worktree_id = *worktree_id;
2752 if let Some(worktree) = self
2753 .worktree_for_id(worktree_id, cx)
2754 .and_then(|worktree| worktree.read(cx).as_local())
2755 {
2756 let lsp_adapter = lsp_adapter.clone();
2757 let worktree_abs_path = worktree.abs_path().clone();
2758 requests.push(
2759 language_server
2760 .request::<lsp::request::WorkspaceSymbol>(lsp::WorkspaceSymbolParams {
2761 query: query.to_string(),
2762 ..Default::default()
2763 })
2764 .log_err()
2765 .map(move |response| {
2766 (
2767 lsp_adapter,
2768 worktree_id,
2769 worktree_abs_path,
2770 response.unwrap_or_default(),
2771 )
2772 }),
2773 );
2774 }
2775 }
2776
2777 cx.spawn_weak(|this, cx| async move {
2778 let responses = futures::future::join_all(requests).await;
2779 let this = if let Some(this) = this.upgrade(&cx) {
2780 this
2781 } else {
2782 return Ok(Default::default());
2783 };
2784 this.read_with(&cx, |this, cx| {
2785 let mut symbols = Vec::new();
2786 for (adapter, source_worktree_id, worktree_abs_path, response) in responses {
2787 symbols.extend(response.into_iter().flatten().filter_map(|lsp_symbol| {
2788 let abs_path = lsp_symbol.location.uri.to_file_path().ok()?;
2789 let mut worktree_id = source_worktree_id;
2790 let path;
2791 if let Some((worktree, rel_path)) =
2792 this.find_local_worktree(&abs_path, cx)
2793 {
2794 worktree_id = worktree.read(cx).id();
2795 path = rel_path;
2796 } else {
2797 path = relativize_path(&worktree_abs_path, &abs_path);
2798 }
2799
2800 let label = this
2801 .languages
2802 .select_language(&path)
2803 .and_then(|language| {
2804 language.label_for_symbol(&lsp_symbol.name, lsp_symbol.kind)
2805 })
2806 .unwrap_or_else(|| CodeLabel::plain(lsp_symbol.name.clone(), None));
2807 let signature = this.symbol_signature(worktree_id, &path);
2808
2809 Some(Symbol {
2810 source_worktree_id,
2811 worktree_id,
2812 language_server_name: adapter.name(),
2813 name: lsp_symbol.name,
2814 kind: lsp_symbol.kind,
2815 label,
2816 path,
2817 range: range_from_lsp(lsp_symbol.location.range),
2818 signature,
2819 })
2820 }));
2821 }
2822 Ok(symbols)
2823 })
2824 })
2825 } else if let Some(project_id) = self.remote_id() {
2826 let request = self.client.request(proto::GetProjectSymbols {
2827 project_id,
2828 query: query.to_string(),
2829 });
2830 cx.spawn_weak(|this, cx| async move {
2831 let response = request.await?;
2832 let mut symbols = Vec::new();
2833 if let Some(this) = this.upgrade(&cx) {
2834 this.read_with(&cx, |this, _| {
2835 symbols.extend(
2836 response
2837 .symbols
2838 .into_iter()
2839 .filter_map(|symbol| this.deserialize_symbol(symbol).log_err()),
2840 );
2841 })
2842 }
2843 Ok(symbols)
2844 })
2845 } else {
2846 Task::ready(Ok(Default::default()))
2847 }
2848 }
2849
2850 pub fn open_buffer_for_symbol(
2851 &mut self,
2852 symbol: &Symbol,
2853 cx: &mut ModelContext<Self>,
2854 ) -> Task<Result<ModelHandle<Buffer>>> {
2855 if self.is_local() {
2856 let (lsp_adapter, language_server) = if let Some(server) = self.language_servers.get(&(
2857 symbol.source_worktree_id,
2858 symbol.language_server_name.clone(),
2859 )) {
2860 server.clone()
2861 } else {
2862 return Task::ready(Err(anyhow!(
2863 "language server for worktree and language not found"
2864 )));
2865 };
2866
2867 let worktree_abs_path = if let Some(worktree_abs_path) = self
2868 .worktree_for_id(symbol.worktree_id, cx)
2869 .and_then(|worktree| worktree.read(cx).as_local())
2870 .map(|local_worktree| local_worktree.abs_path())
2871 {
2872 worktree_abs_path
2873 } else {
2874 return Task::ready(Err(anyhow!("worktree not found for symbol")));
2875 };
2876 let symbol_abs_path = worktree_abs_path.join(&symbol.path);
2877 let symbol_uri = if let Ok(uri) = lsp::Url::from_file_path(symbol_abs_path) {
2878 uri
2879 } else {
2880 return Task::ready(Err(anyhow!("invalid symbol path")));
2881 };
2882
2883 self.open_local_buffer_via_lsp(symbol_uri, lsp_adapter, language_server, cx)
2884 } else if let Some(project_id) = self.remote_id() {
2885 let request = self.client.request(proto::OpenBufferForSymbol {
2886 project_id,
2887 symbol: Some(serialize_symbol(symbol)),
2888 });
2889 cx.spawn(|this, mut cx| async move {
2890 let response = request.await?;
2891 let buffer = response.buffer.ok_or_else(|| anyhow!("invalid buffer"))?;
2892 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
2893 .await
2894 })
2895 } else {
2896 Task::ready(Err(anyhow!("project does not have a remote id")))
2897 }
2898 }
2899
2900 pub fn hover<T: ToPointUtf16>(
2901 &self,
2902 buffer: &ModelHandle<Buffer>,
2903 position: T,
2904 cx: &mut ModelContext<Self>,
2905 ) -> Task<Result<Option<Hover>>> {
2906 // TODO: proper return type
2907 let position = position.to_point_utf16(buffer.read(cx));
2908 self.request_lsp(buffer.clone(), GetHover { position }, cx)
2909 }
2910
2911 pub fn completions<T: ToPointUtf16>(
2912 &self,
2913 source_buffer_handle: &ModelHandle<Buffer>,
2914 position: T,
2915 cx: &mut ModelContext<Self>,
2916 ) -> Task<Result<Vec<Completion>>> {
2917 let source_buffer_handle = source_buffer_handle.clone();
2918 let source_buffer = source_buffer_handle.read(cx);
2919 let buffer_id = source_buffer.remote_id();
2920 let language = source_buffer.language().cloned();
2921 let worktree;
2922 let buffer_abs_path;
2923 if let Some(file) = File::from_dyn(source_buffer.file()) {
2924 worktree = file.worktree.clone();
2925 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
2926 } else {
2927 return Task::ready(Ok(Default::default()));
2928 };
2929
2930 let position = position.to_point_utf16(source_buffer);
2931 let anchor = source_buffer.anchor_after(position);
2932
2933 if worktree.read(cx).as_local().is_some() {
2934 let buffer_abs_path = buffer_abs_path.unwrap();
2935 let (_, lang_server) =
2936 if let Some(server) = self.language_server_for_buffer(source_buffer, cx) {
2937 server.clone()
2938 } else {
2939 return Task::ready(Ok(Default::default()));
2940 };
2941
2942 cx.spawn(|_, cx| async move {
2943 let completions = lang_server
2944 .request::<lsp::request::Completion>(lsp::CompletionParams {
2945 text_document_position: lsp::TextDocumentPositionParams::new(
2946 lsp::TextDocumentIdentifier::new(
2947 lsp::Url::from_file_path(buffer_abs_path).unwrap(),
2948 ),
2949 point_to_lsp(position),
2950 ),
2951 context: Default::default(),
2952 work_done_progress_params: Default::default(),
2953 partial_result_params: Default::default(),
2954 })
2955 .await
2956 .context("lsp completion request failed")?;
2957
2958 let completions = if let Some(completions) = completions {
2959 match completions {
2960 lsp::CompletionResponse::Array(completions) => completions,
2961 lsp::CompletionResponse::List(list) => list.items,
2962 }
2963 } else {
2964 Default::default()
2965 };
2966
2967 source_buffer_handle.read_with(&cx, |this, _| {
2968 let snapshot = this.snapshot();
2969 let clipped_position = this.clip_point_utf16(position, Bias::Left);
2970 let mut range_for_token = None;
2971 Ok(completions
2972 .into_iter()
2973 .filter_map(|lsp_completion| {
2974 let (old_range, new_text) = match lsp_completion.text_edit.as_ref() {
2975 // If the language server provides a range to overwrite, then
2976 // check that the range is valid.
2977 Some(lsp::CompletionTextEdit::Edit(edit)) => {
2978 let range = range_from_lsp(edit.range);
2979 let start = snapshot.clip_point_utf16(range.start, Bias::Left);
2980 let end = snapshot.clip_point_utf16(range.end, Bias::Left);
2981 if start != range.start || end != range.end {
2982 log::info!("completion out of expected range");
2983 return None;
2984 }
2985 (
2986 snapshot.anchor_before(start)..snapshot.anchor_after(end),
2987 edit.new_text.clone(),
2988 )
2989 }
2990 // If the language server does not provide a range, then infer
2991 // the range based on the syntax tree.
2992 None => {
2993 if position != clipped_position {
2994 log::info!("completion out of expected range");
2995 return None;
2996 }
2997 let Range { start, end } = range_for_token
2998 .get_or_insert_with(|| {
2999 let offset = position.to_offset(&snapshot);
3000 snapshot
3001 .range_for_word_token_at(offset)
3002 .unwrap_or_else(|| offset..offset)
3003 })
3004 .clone();
3005 let text = lsp_completion
3006 .insert_text
3007 .as_ref()
3008 .unwrap_or(&lsp_completion.label)
3009 .clone();
3010 (
3011 snapshot.anchor_before(start)..snapshot.anchor_after(end),
3012 text.clone(),
3013 )
3014 }
3015 Some(lsp::CompletionTextEdit::InsertAndReplace(_)) => {
3016 log::info!("unsupported insert/replace completion");
3017 return None;
3018 }
3019 };
3020
3021 Some(Completion {
3022 old_range,
3023 new_text,
3024 label: language
3025 .as_ref()
3026 .and_then(|l| l.label_for_completion(&lsp_completion))
3027 .unwrap_or_else(|| {
3028 CodeLabel::plain(
3029 lsp_completion.label.clone(),
3030 lsp_completion.filter_text.as_deref(),
3031 )
3032 }),
3033 lsp_completion,
3034 })
3035 })
3036 .collect())
3037 })
3038 })
3039 } else if let Some(project_id) = self.remote_id() {
3040 let rpc = self.client.clone();
3041 let message = proto::GetCompletions {
3042 project_id,
3043 buffer_id,
3044 position: Some(language::proto::serialize_anchor(&anchor)),
3045 version: serialize_version(&source_buffer.version()),
3046 };
3047 cx.spawn_weak(|_, mut cx| async move {
3048 let response = rpc.request(message).await?;
3049
3050 source_buffer_handle
3051 .update(&mut cx, |buffer, _| {
3052 buffer.wait_for_version(deserialize_version(response.version))
3053 })
3054 .await;
3055
3056 response
3057 .completions
3058 .into_iter()
3059 .map(|completion| {
3060 language::proto::deserialize_completion(completion, language.as_ref())
3061 })
3062 .collect()
3063 })
3064 } else {
3065 Task::ready(Ok(Default::default()))
3066 }
3067 }
3068
3069 pub fn apply_additional_edits_for_completion(
3070 &self,
3071 buffer_handle: ModelHandle<Buffer>,
3072 completion: Completion,
3073 push_to_history: bool,
3074 cx: &mut ModelContext<Self>,
3075 ) -> Task<Result<Option<Transaction>>> {
3076 let buffer = buffer_handle.read(cx);
3077 let buffer_id = buffer.remote_id();
3078
3079 if self.is_local() {
3080 let (_, lang_server) = if let Some(server) = self.language_server_for_buffer(buffer, cx)
3081 {
3082 server.clone()
3083 } else {
3084 return Task::ready(Ok(Default::default()));
3085 };
3086
3087 cx.spawn(|this, mut cx| async move {
3088 let resolved_completion = lang_server
3089 .request::<lsp::request::ResolveCompletionItem>(completion.lsp_completion)
3090 .await?;
3091 if let Some(edits) = resolved_completion.additional_text_edits {
3092 let edits = this
3093 .update(&mut cx, |this, cx| {
3094 this.edits_from_lsp(&buffer_handle, edits, None, cx)
3095 })
3096 .await?;
3097 buffer_handle.update(&mut cx, |buffer, cx| {
3098 buffer.finalize_last_transaction();
3099 buffer.start_transaction();
3100 for (range, text) in edits {
3101 buffer.edit([(range, text)], cx);
3102 }
3103 let transaction = if buffer.end_transaction(cx).is_some() {
3104 let transaction = buffer.finalize_last_transaction().unwrap().clone();
3105 if !push_to_history {
3106 buffer.forget_transaction(transaction.id);
3107 }
3108 Some(transaction)
3109 } else {
3110 None
3111 };
3112 Ok(transaction)
3113 })
3114 } else {
3115 Ok(None)
3116 }
3117 })
3118 } else if let Some(project_id) = self.remote_id() {
3119 let client = self.client.clone();
3120 cx.spawn(|_, mut cx| async move {
3121 let response = client
3122 .request(proto::ApplyCompletionAdditionalEdits {
3123 project_id,
3124 buffer_id,
3125 completion: Some(language::proto::serialize_completion(&completion)),
3126 })
3127 .await?;
3128
3129 if let Some(transaction) = response.transaction {
3130 let transaction = language::proto::deserialize_transaction(transaction)?;
3131 buffer_handle
3132 .update(&mut cx, |buffer, _| {
3133 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
3134 })
3135 .await;
3136 if push_to_history {
3137 buffer_handle.update(&mut cx, |buffer, _| {
3138 buffer.push_transaction(transaction.clone(), Instant::now());
3139 });
3140 }
3141 Ok(Some(transaction))
3142 } else {
3143 Ok(None)
3144 }
3145 })
3146 } else {
3147 Task::ready(Err(anyhow!("project does not have a remote id")))
3148 }
3149 }
3150
3151 pub fn code_actions<T: Clone + ToOffset>(
3152 &self,
3153 buffer_handle: &ModelHandle<Buffer>,
3154 range: Range<T>,
3155 cx: &mut ModelContext<Self>,
3156 ) -> Task<Result<Vec<CodeAction>>> {
3157 let buffer_handle = buffer_handle.clone();
3158 let buffer = buffer_handle.read(cx);
3159 let snapshot = buffer.snapshot();
3160 let relevant_diagnostics = snapshot
3161 .diagnostics_in_range::<usize, usize>(range.to_offset(&snapshot), false)
3162 .map(|entry| entry.to_lsp_diagnostic_stub())
3163 .collect();
3164 let buffer_id = buffer.remote_id();
3165 let worktree;
3166 let buffer_abs_path;
3167 if let Some(file) = File::from_dyn(buffer.file()) {
3168 worktree = file.worktree.clone();
3169 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
3170 } else {
3171 return Task::ready(Ok(Default::default()));
3172 };
3173 let range = buffer.anchor_before(range.start)..buffer.anchor_before(range.end);
3174
3175 if worktree.read(cx).as_local().is_some() {
3176 let buffer_abs_path = buffer_abs_path.unwrap();
3177 let (_, lang_server) = if let Some(server) = self.language_server_for_buffer(buffer, cx)
3178 {
3179 server.clone()
3180 } else {
3181 return Task::ready(Ok(Default::default()));
3182 };
3183
3184 let lsp_range = range_to_lsp(range.to_point_utf16(buffer));
3185 cx.foreground().spawn(async move {
3186 if !lang_server.capabilities().code_action_provider.is_some() {
3187 return Ok(Default::default());
3188 }
3189
3190 Ok(lang_server
3191 .request::<lsp::request::CodeActionRequest>(lsp::CodeActionParams {
3192 text_document: lsp::TextDocumentIdentifier::new(
3193 lsp::Url::from_file_path(buffer_abs_path).unwrap(),
3194 ),
3195 range: lsp_range,
3196 work_done_progress_params: Default::default(),
3197 partial_result_params: Default::default(),
3198 context: lsp::CodeActionContext {
3199 diagnostics: relevant_diagnostics,
3200 only: Some(vec![
3201 lsp::CodeActionKind::QUICKFIX,
3202 lsp::CodeActionKind::REFACTOR,
3203 lsp::CodeActionKind::REFACTOR_EXTRACT,
3204 lsp::CodeActionKind::SOURCE,
3205 ]),
3206 },
3207 })
3208 .await?
3209 .unwrap_or_default()
3210 .into_iter()
3211 .filter_map(|entry| {
3212 if let lsp::CodeActionOrCommand::CodeAction(lsp_action) = entry {
3213 Some(CodeAction {
3214 range: range.clone(),
3215 lsp_action,
3216 })
3217 } else {
3218 None
3219 }
3220 })
3221 .collect())
3222 })
3223 } else if let Some(project_id) = self.remote_id() {
3224 let rpc = self.client.clone();
3225 let version = buffer.version();
3226 cx.spawn_weak(|_, mut cx| async move {
3227 let response = rpc
3228 .request(proto::GetCodeActions {
3229 project_id,
3230 buffer_id,
3231 start: Some(language::proto::serialize_anchor(&range.start)),
3232 end: Some(language::proto::serialize_anchor(&range.end)),
3233 version: serialize_version(&version),
3234 })
3235 .await?;
3236
3237 buffer_handle
3238 .update(&mut cx, |buffer, _| {
3239 buffer.wait_for_version(deserialize_version(response.version))
3240 })
3241 .await;
3242
3243 response
3244 .actions
3245 .into_iter()
3246 .map(language::proto::deserialize_code_action)
3247 .collect()
3248 })
3249 } else {
3250 Task::ready(Ok(Default::default()))
3251 }
3252 }
3253
3254 pub fn apply_code_action(
3255 &self,
3256 buffer_handle: ModelHandle<Buffer>,
3257 mut action: CodeAction,
3258 push_to_history: bool,
3259 cx: &mut ModelContext<Self>,
3260 ) -> Task<Result<ProjectTransaction>> {
3261 if self.is_local() {
3262 let buffer = buffer_handle.read(cx);
3263 let (lsp_adapter, lang_server) =
3264 if let Some(server) = self.language_server_for_buffer(buffer, cx) {
3265 server.clone()
3266 } else {
3267 return Task::ready(Ok(Default::default()));
3268 };
3269 let range = action.range.to_point_utf16(buffer);
3270
3271 cx.spawn(|this, mut cx| async move {
3272 if let Some(lsp_range) = action
3273 .lsp_action
3274 .data
3275 .as_mut()
3276 .and_then(|d| d.get_mut("codeActionParams"))
3277 .and_then(|d| d.get_mut("range"))
3278 {
3279 *lsp_range = serde_json::to_value(&range_to_lsp(range)).unwrap();
3280 action.lsp_action = lang_server
3281 .request::<lsp::request::CodeActionResolveRequest>(action.lsp_action)
3282 .await?;
3283 } else {
3284 let actions = this
3285 .update(&mut cx, |this, cx| {
3286 this.code_actions(&buffer_handle, action.range, cx)
3287 })
3288 .await?;
3289 action.lsp_action = actions
3290 .into_iter()
3291 .find(|a| a.lsp_action.title == action.lsp_action.title)
3292 .ok_or_else(|| anyhow!("code action is outdated"))?
3293 .lsp_action;
3294 }
3295
3296 if let Some(edit) = action.lsp_action.edit {
3297 Self::deserialize_workspace_edit(
3298 this,
3299 edit,
3300 push_to_history,
3301 lsp_adapter,
3302 lang_server,
3303 &mut cx,
3304 )
3305 .await
3306 } else if let Some(command) = action.lsp_action.command {
3307 this.update(&mut cx, |this, _| {
3308 this.last_workspace_edits_by_language_server
3309 .remove(&lang_server.server_id());
3310 });
3311 lang_server
3312 .request::<lsp::request::ExecuteCommand>(lsp::ExecuteCommandParams {
3313 command: command.command,
3314 arguments: command.arguments.unwrap_or_default(),
3315 ..Default::default()
3316 })
3317 .await?;
3318 Ok(this.update(&mut cx, |this, _| {
3319 this.last_workspace_edits_by_language_server
3320 .remove(&lang_server.server_id())
3321 .unwrap_or_default()
3322 }))
3323 } else {
3324 Ok(ProjectTransaction::default())
3325 }
3326 })
3327 } else if let Some(project_id) = self.remote_id() {
3328 let client = self.client.clone();
3329 let request = proto::ApplyCodeAction {
3330 project_id,
3331 buffer_id: buffer_handle.read(cx).remote_id(),
3332 action: Some(language::proto::serialize_code_action(&action)),
3333 };
3334 cx.spawn(|this, mut cx| async move {
3335 let response = client
3336 .request(request)
3337 .await?
3338 .transaction
3339 .ok_or_else(|| anyhow!("missing transaction"))?;
3340 this.update(&mut cx, |this, cx| {
3341 this.deserialize_project_transaction(response, push_to_history, cx)
3342 })
3343 .await
3344 })
3345 } else {
3346 Task::ready(Err(anyhow!("project does not have a remote id")))
3347 }
3348 }
3349
3350 async fn deserialize_workspace_edit(
3351 this: ModelHandle<Self>,
3352 edit: lsp::WorkspaceEdit,
3353 push_to_history: bool,
3354 lsp_adapter: Arc<dyn LspAdapter>,
3355 language_server: Arc<LanguageServer>,
3356 cx: &mut AsyncAppContext,
3357 ) -> Result<ProjectTransaction> {
3358 let fs = this.read_with(cx, |this, _| this.fs.clone());
3359 let mut operations = Vec::new();
3360 if let Some(document_changes) = edit.document_changes {
3361 match document_changes {
3362 lsp::DocumentChanges::Edits(edits) => {
3363 operations.extend(edits.into_iter().map(lsp::DocumentChangeOperation::Edit))
3364 }
3365 lsp::DocumentChanges::Operations(ops) => operations = ops,
3366 }
3367 } else if let Some(changes) = edit.changes {
3368 operations.extend(changes.into_iter().map(|(uri, edits)| {
3369 lsp::DocumentChangeOperation::Edit(lsp::TextDocumentEdit {
3370 text_document: lsp::OptionalVersionedTextDocumentIdentifier {
3371 uri,
3372 version: None,
3373 },
3374 edits: edits.into_iter().map(lsp::OneOf::Left).collect(),
3375 })
3376 }));
3377 }
3378
3379 let mut project_transaction = ProjectTransaction::default();
3380 for operation in operations {
3381 match operation {
3382 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Create(op)) => {
3383 let abs_path = op
3384 .uri
3385 .to_file_path()
3386 .map_err(|_| anyhow!("can't convert URI to path"))?;
3387
3388 if let Some(parent_path) = abs_path.parent() {
3389 fs.create_dir(parent_path).await?;
3390 }
3391 if abs_path.ends_with("/") {
3392 fs.create_dir(&abs_path).await?;
3393 } else {
3394 fs.create_file(&abs_path, op.options.map(Into::into).unwrap_or_default())
3395 .await?;
3396 }
3397 }
3398 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Rename(op)) => {
3399 let source_abs_path = op
3400 .old_uri
3401 .to_file_path()
3402 .map_err(|_| anyhow!("can't convert URI to path"))?;
3403 let target_abs_path = op
3404 .new_uri
3405 .to_file_path()
3406 .map_err(|_| anyhow!("can't convert URI to path"))?;
3407 fs.rename(
3408 &source_abs_path,
3409 &target_abs_path,
3410 op.options.map(Into::into).unwrap_or_default(),
3411 )
3412 .await?;
3413 }
3414 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Delete(op)) => {
3415 let abs_path = op
3416 .uri
3417 .to_file_path()
3418 .map_err(|_| anyhow!("can't convert URI to path"))?;
3419 let options = op.options.map(Into::into).unwrap_or_default();
3420 if abs_path.ends_with("/") {
3421 fs.remove_dir(&abs_path, options).await?;
3422 } else {
3423 fs.remove_file(&abs_path, options).await?;
3424 }
3425 }
3426 lsp::DocumentChangeOperation::Edit(op) => {
3427 let buffer_to_edit = this
3428 .update(cx, |this, cx| {
3429 this.open_local_buffer_via_lsp(
3430 op.text_document.uri,
3431 lsp_adapter.clone(),
3432 language_server.clone(),
3433 cx,
3434 )
3435 })
3436 .await?;
3437
3438 let edits = this
3439 .update(cx, |this, cx| {
3440 let edits = op.edits.into_iter().map(|edit| match edit {
3441 lsp::OneOf::Left(edit) => edit,
3442 lsp::OneOf::Right(edit) => edit.text_edit,
3443 });
3444 this.edits_from_lsp(
3445 &buffer_to_edit,
3446 edits,
3447 op.text_document.version,
3448 cx,
3449 )
3450 })
3451 .await?;
3452
3453 let transaction = buffer_to_edit.update(cx, |buffer, cx| {
3454 buffer.finalize_last_transaction();
3455 buffer.start_transaction();
3456 for (range, text) in edits {
3457 buffer.edit([(range, text)], cx);
3458 }
3459 let transaction = if buffer.end_transaction(cx).is_some() {
3460 let transaction = buffer.finalize_last_transaction().unwrap().clone();
3461 if !push_to_history {
3462 buffer.forget_transaction(transaction.id);
3463 }
3464 Some(transaction)
3465 } else {
3466 None
3467 };
3468
3469 transaction
3470 });
3471 if let Some(transaction) = transaction {
3472 project_transaction.0.insert(buffer_to_edit, transaction);
3473 }
3474 }
3475 }
3476 }
3477
3478 Ok(project_transaction)
3479 }
3480
3481 pub fn prepare_rename<T: ToPointUtf16>(
3482 &self,
3483 buffer: ModelHandle<Buffer>,
3484 position: T,
3485 cx: &mut ModelContext<Self>,
3486 ) -> Task<Result<Option<Range<Anchor>>>> {
3487 let position = position.to_point_utf16(buffer.read(cx));
3488 self.request_lsp(buffer, PrepareRename { position }, cx)
3489 }
3490
3491 pub fn perform_rename<T: ToPointUtf16>(
3492 &self,
3493 buffer: ModelHandle<Buffer>,
3494 position: T,
3495 new_name: String,
3496 push_to_history: bool,
3497 cx: &mut ModelContext<Self>,
3498 ) -> Task<Result<ProjectTransaction>> {
3499 let position = position.to_point_utf16(buffer.read(cx));
3500 self.request_lsp(
3501 buffer,
3502 PerformRename {
3503 position,
3504 new_name,
3505 push_to_history,
3506 },
3507 cx,
3508 )
3509 }
3510
3511 pub fn search(
3512 &self,
3513 query: SearchQuery,
3514 cx: &mut ModelContext<Self>,
3515 ) -> Task<Result<HashMap<ModelHandle<Buffer>, Vec<Range<Anchor>>>>> {
3516 if self.is_local() {
3517 let snapshots = self
3518 .visible_worktrees(cx)
3519 .filter_map(|tree| {
3520 let tree = tree.read(cx).as_local()?;
3521 Some(tree.snapshot())
3522 })
3523 .collect::<Vec<_>>();
3524
3525 let background = cx.background().clone();
3526 let path_count: usize = snapshots.iter().map(|s| s.visible_file_count()).sum();
3527 if path_count == 0 {
3528 return Task::ready(Ok(Default::default()));
3529 }
3530 let workers = background.num_cpus().min(path_count);
3531 let (matching_paths_tx, mut matching_paths_rx) = smol::channel::bounded(1024);
3532 cx.background()
3533 .spawn({
3534 let fs = self.fs.clone();
3535 let background = cx.background().clone();
3536 let query = query.clone();
3537 async move {
3538 let fs = &fs;
3539 let query = &query;
3540 let matching_paths_tx = &matching_paths_tx;
3541 let paths_per_worker = (path_count + workers - 1) / workers;
3542 let snapshots = &snapshots;
3543 background
3544 .scoped(|scope| {
3545 for worker_ix in 0..workers {
3546 let worker_start_ix = worker_ix * paths_per_worker;
3547 let worker_end_ix = worker_start_ix + paths_per_worker;
3548 scope.spawn(async move {
3549 let mut snapshot_start_ix = 0;
3550 let mut abs_path = PathBuf::new();
3551 for snapshot in snapshots {
3552 let snapshot_end_ix =
3553 snapshot_start_ix + snapshot.visible_file_count();
3554 if worker_end_ix <= snapshot_start_ix {
3555 break;
3556 } else if worker_start_ix > snapshot_end_ix {
3557 snapshot_start_ix = snapshot_end_ix;
3558 continue;
3559 } else {
3560 let start_in_snapshot = worker_start_ix
3561 .saturating_sub(snapshot_start_ix);
3562 let end_in_snapshot =
3563 cmp::min(worker_end_ix, snapshot_end_ix)
3564 - snapshot_start_ix;
3565
3566 for entry in snapshot
3567 .files(false, start_in_snapshot)
3568 .take(end_in_snapshot - start_in_snapshot)
3569 {
3570 if matching_paths_tx.is_closed() {
3571 break;
3572 }
3573
3574 abs_path.clear();
3575 abs_path.push(&snapshot.abs_path());
3576 abs_path.push(&entry.path);
3577 let matches = if let Some(file) =
3578 fs.open_sync(&abs_path).await.log_err()
3579 {
3580 query.detect(file).unwrap_or(false)
3581 } else {
3582 false
3583 };
3584
3585 if matches {
3586 let project_path =
3587 (snapshot.id(), entry.path.clone());
3588 if matching_paths_tx
3589 .send(project_path)
3590 .await
3591 .is_err()
3592 {
3593 break;
3594 }
3595 }
3596 }
3597
3598 snapshot_start_ix = snapshot_end_ix;
3599 }
3600 }
3601 });
3602 }
3603 })
3604 .await;
3605 }
3606 })
3607 .detach();
3608
3609 let (buffers_tx, buffers_rx) = smol::channel::bounded(1024);
3610 let open_buffers = self
3611 .opened_buffers
3612 .values()
3613 .filter_map(|b| b.upgrade(cx))
3614 .collect::<HashSet<_>>();
3615 cx.spawn(|this, cx| async move {
3616 for buffer in &open_buffers {
3617 let snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
3618 buffers_tx.send((buffer.clone(), snapshot)).await?;
3619 }
3620
3621 let open_buffers = Rc::new(RefCell::new(open_buffers));
3622 while let Some(project_path) = matching_paths_rx.next().await {
3623 if buffers_tx.is_closed() {
3624 break;
3625 }
3626
3627 let this = this.clone();
3628 let open_buffers = open_buffers.clone();
3629 let buffers_tx = buffers_tx.clone();
3630 cx.spawn(|mut cx| async move {
3631 if let Some(buffer) = this
3632 .update(&mut cx, |this, cx| this.open_buffer(project_path, cx))
3633 .await
3634 .log_err()
3635 {
3636 if open_buffers.borrow_mut().insert(buffer.clone()) {
3637 let snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
3638 buffers_tx.send((buffer, snapshot)).await?;
3639 }
3640 }
3641
3642 Ok::<_, anyhow::Error>(())
3643 })
3644 .detach();
3645 }
3646
3647 Ok::<_, anyhow::Error>(())
3648 })
3649 .detach_and_log_err(cx);
3650
3651 let background = cx.background().clone();
3652 cx.background().spawn(async move {
3653 let query = &query;
3654 let mut matched_buffers = Vec::new();
3655 for _ in 0..workers {
3656 matched_buffers.push(HashMap::default());
3657 }
3658 background
3659 .scoped(|scope| {
3660 for worker_matched_buffers in matched_buffers.iter_mut() {
3661 let mut buffers_rx = buffers_rx.clone();
3662 scope.spawn(async move {
3663 while let Some((buffer, snapshot)) = buffers_rx.next().await {
3664 let buffer_matches = query
3665 .search(snapshot.as_rope())
3666 .await
3667 .iter()
3668 .map(|range| {
3669 snapshot.anchor_before(range.start)
3670 ..snapshot.anchor_after(range.end)
3671 })
3672 .collect::<Vec<_>>();
3673 if !buffer_matches.is_empty() {
3674 worker_matched_buffers
3675 .insert(buffer.clone(), buffer_matches);
3676 }
3677 }
3678 });
3679 }
3680 })
3681 .await;
3682 Ok(matched_buffers.into_iter().flatten().collect())
3683 })
3684 } else if let Some(project_id) = self.remote_id() {
3685 let request = self.client.request(query.to_proto(project_id));
3686 cx.spawn(|this, mut cx| async move {
3687 let response = request.await?;
3688 let mut result = HashMap::default();
3689 for location in response.locations {
3690 let buffer = location.buffer.ok_or_else(|| anyhow!("missing buffer"))?;
3691 let target_buffer = this
3692 .update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
3693 .await?;
3694 let start = location
3695 .start
3696 .and_then(deserialize_anchor)
3697 .ok_or_else(|| anyhow!("missing target start"))?;
3698 let end = location
3699 .end
3700 .and_then(deserialize_anchor)
3701 .ok_or_else(|| anyhow!("missing target end"))?;
3702 result
3703 .entry(target_buffer)
3704 .or_insert(Vec::new())
3705 .push(start..end)
3706 }
3707 Ok(result)
3708 })
3709 } else {
3710 Task::ready(Ok(Default::default()))
3711 }
3712 }
3713
3714 fn request_lsp<R: LspCommand>(
3715 &self,
3716 buffer_handle: ModelHandle<Buffer>,
3717 request: R,
3718 cx: &mut ModelContext<Self>,
3719 ) -> Task<Result<R::Response>>
3720 where
3721 <R::LspRequest as lsp::request::Request>::Result: Send,
3722 {
3723 let buffer = buffer_handle.read(cx);
3724 if self.is_local() {
3725 let file = File::from_dyn(buffer.file()).and_then(File::as_local);
3726 if let Some((file, (_, language_server))) =
3727 file.zip(self.language_server_for_buffer(buffer, cx).cloned())
3728 {
3729 let lsp_params = request.to_lsp(&file.abs_path(cx), cx);
3730 return cx.spawn(|this, cx| async move {
3731 if !request.check_capabilities(&language_server.capabilities()) {
3732 return Ok(Default::default());
3733 }
3734
3735 let response = language_server
3736 .request::<R::LspRequest>(lsp_params)
3737 .await
3738 .context("lsp request failed")?;
3739 request
3740 .response_from_lsp(response, this, buffer_handle, cx)
3741 .await
3742 });
3743 }
3744 } else if let Some(project_id) = self.remote_id() {
3745 let rpc = self.client.clone();
3746 let message = request.to_proto(project_id, buffer);
3747 return cx.spawn(|this, cx| async move {
3748 let response = rpc.request(message).await?;
3749 request
3750 .response_from_proto(response, this, buffer_handle, cx)
3751 .await
3752 });
3753 }
3754 Task::ready(Ok(Default::default()))
3755 }
3756
3757 pub fn find_or_create_local_worktree(
3758 &mut self,
3759 abs_path: impl AsRef<Path>,
3760 visible: bool,
3761 cx: &mut ModelContext<Self>,
3762 ) -> Task<Result<(ModelHandle<Worktree>, PathBuf)>> {
3763 let abs_path = abs_path.as_ref();
3764 if let Some((tree, relative_path)) = self.find_local_worktree(abs_path, cx) {
3765 Task::ready(Ok((tree.clone(), relative_path.into())))
3766 } else {
3767 let worktree = self.create_local_worktree(abs_path, visible, cx);
3768 cx.foreground()
3769 .spawn(async move { Ok((worktree.await?, PathBuf::new())) })
3770 }
3771 }
3772
3773 pub fn find_local_worktree(
3774 &self,
3775 abs_path: &Path,
3776 cx: &AppContext,
3777 ) -> Option<(ModelHandle<Worktree>, PathBuf)> {
3778 for tree in self.worktrees(cx) {
3779 if let Some(relative_path) = tree
3780 .read(cx)
3781 .as_local()
3782 .and_then(|t| abs_path.strip_prefix(t.abs_path()).ok())
3783 {
3784 return Some((tree.clone(), relative_path.into()));
3785 }
3786 }
3787 None
3788 }
3789
3790 pub fn is_shared(&self) -> bool {
3791 match &self.client_state {
3792 ProjectClientState::Local { is_shared, .. } => *is_shared,
3793 ProjectClientState::Remote { .. } => false,
3794 }
3795 }
3796
3797 fn create_local_worktree(
3798 &mut self,
3799 abs_path: impl AsRef<Path>,
3800 visible: bool,
3801 cx: &mut ModelContext<Self>,
3802 ) -> Task<Result<ModelHandle<Worktree>>> {
3803 let fs = self.fs.clone();
3804 let client = self.client.clone();
3805 let next_entry_id = self.next_entry_id.clone();
3806 let path: Arc<Path> = abs_path.as_ref().into();
3807 let task = self
3808 .loading_local_worktrees
3809 .entry(path.clone())
3810 .or_insert_with(|| {
3811 cx.spawn(|project, mut cx| {
3812 async move {
3813 let worktree = Worktree::local(
3814 client.clone(),
3815 path.clone(),
3816 visible,
3817 fs,
3818 next_entry_id,
3819 &mut cx,
3820 )
3821 .await;
3822 project.update(&mut cx, |project, _| {
3823 project.loading_local_worktrees.remove(&path);
3824 });
3825 let worktree = worktree?;
3826
3827 let project_id = project.update(&mut cx, |project, cx| {
3828 project.add_worktree(&worktree, cx);
3829 project.shared_remote_id()
3830 });
3831
3832 if let Some(project_id) = project_id {
3833 worktree
3834 .update(&mut cx, |worktree, cx| {
3835 worktree.as_local_mut().unwrap().share(project_id, cx)
3836 })
3837 .await
3838 .log_err();
3839 }
3840
3841 Ok(worktree)
3842 }
3843 .map_err(|err| Arc::new(err))
3844 })
3845 .shared()
3846 })
3847 .clone();
3848 cx.foreground().spawn(async move {
3849 match task.await {
3850 Ok(worktree) => Ok(worktree),
3851 Err(err) => Err(anyhow!("{}", err)),
3852 }
3853 })
3854 }
3855
3856 pub fn remove_worktree(&mut self, id_to_remove: WorktreeId, cx: &mut ModelContext<Self>) {
3857 self.worktrees.retain(|worktree| {
3858 if let Some(worktree) = worktree.upgrade(cx) {
3859 let id = worktree.read(cx).id();
3860 if id == id_to_remove {
3861 cx.emit(Event::WorktreeRemoved(id));
3862 false
3863 } else {
3864 true
3865 }
3866 } else {
3867 false
3868 }
3869 });
3870 self.metadata_changed(true, cx);
3871 cx.notify();
3872 }
3873
3874 fn add_worktree(&mut self, worktree: &ModelHandle<Worktree>, cx: &mut ModelContext<Self>) {
3875 cx.observe(&worktree, |_, _, cx| cx.notify()).detach();
3876 if worktree.read(cx).is_local() {
3877 cx.subscribe(&worktree, |this, worktree, _, cx| {
3878 this.update_local_worktree_buffers(worktree, cx);
3879 })
3880 .detach();
3881 }
3882
3883 let push_strong_handle = {
3884 let worktree = worktree.read(cx);
3885 self.is_shared() || worktree.is_visible() || worktree.is_remote()
3886 };
3887 if push_strong_handle {
3888 self.worktrees
3889 .push(WorktreeHandle::Strong(worktree.clone()));
3890 } else {
3891 cx.observe_release(&worktree, |this, _, cx| {
3892 this.worktrees
3893 .retain(|worktree| worktree.upgrade(cx).is_some());
3894 cx.notify();
3895 })
3896 .detach();
3897 self.worktrees
3898 .push(WorktreeHandle::Weak(worktree.downgrade()));
3899 }
3900 self.metadata_changed(true, cx);
3901 cx.emit(Event::WorktreeAdded);
3902 cx.notify();
3903 }
3904
3905 fn update_local_worktree_buffers(
3906 &mut self,
3907 worktree_handle: ModelHandle<Worktree>,
3908 cx: &mut ModelContext<Self>,
3909 ) {
3910 let snapshot = worktree_handle.read(cx).snapshot();
3911 let mut buffers_to_delete = Vec::new();
3912 let mut renamed_buffers = Vec::new();
3913 for (buffer_id, buffer) in &self.opened_buffers {
3914 if let Some(buffer) = buffer.upgrade(cx) {
3915 buffer.update(cx, |buffer, cx| {
3916 if let Some(old_file) = File::from_dyn(buffer.file()) {
3917 if old_file.worktree != worktree_handle {
3918 return;
3919 }
3920
3921 let new_file = if let Some(entry) = old_file
3922 .entry_id
3923 .and_then(|entry_id| snapshot.entry_for_id(entry_id))
3924 {
3925 File {
3926 is_local: true,
3927 entry_id: Some(entry.id),
3928 mtime: entry.mtime,
3929 path: entry.path.clone(),
3930 worktree: worktree_handle.clone(),
3931 }
3932 } else if let Some(entry) =
3933 snapshot.entry_for_path(old_file.path().as_ref())
3934 {
3935 File {
3936 is_local: true,
3937 entry_id: Some(entry.id),
3938 mtime: entry.mtime,
3939 path: entry.path.clone(),
3940 worktree: worktree_handle.clone(),
3941 }
3942 } else {
3943 File {
3944 is_local: true,
3945 entry_id: None,
3946 path: old_file.path().clone(),
3947 mtime: old_file.mtime(),
3948 worktree: worktree_handle.clone(),
3949 }
3950 };
3951
3952 let old_path = old_file.abs_path(cx);
3953 if new_file.abs_path(cx) != old_path {
3954 renamed_buffers.push((cx.handle(), old_path));
3955 }
3956
3957 if let Some(project_id) = self.shared_remote_id() {
3958 self.client
3959 .send(proto::UpdateBufferFile {
3960 project_id,
3961 buffer_id: *buffer_id as u64,
3962 file: Some(new_file.to_proto()),
3963 })
3964 .log_err();
3965 }
3966 buffer.file_updated(Box::new(new_file), cx).detach();
3967 }
3968 });
3969 } else {
3970 buffers_to_delete.push(*buffer_id);
3971 }
3972 }
3973
3974 for buffer_id in buffers_to_delete {
3975 self.opened_buffers.remove(&buffer_id);
3976 }
3977
3978 for (buffer, old_path) in renamed_buffers {
3979 self.unregister_buffer_from_language_server(&buffer, old_path, cx);
3980 self.assign_language_to_buffer(&buffer, cx);
3981 self.register_buffer_with_language_server(&buffer, cx);
3982 }
3983 }
3984
3985 pub fn set_active_path(&mut self, entry: Option<ProjectPath>, cx: &mut ModelContext<Self>) {
3986 let new_active_entry = entry.and_then(|project_path| {
3987 let worktree = self.worktree_for_id(project_path.worktree_id, cx)?;
3988 let entry = worktree.read(cx).entry_for_path(project_path.path)?;
3989 Some(entry.id)
3990 });
3991 if new_active_entry != self.active_entry {
3992 self.active_entry = new_active_entry;
3993 cx.emit(Event::ActiveEntryChanged(new_active_entry));
3994 }
3995 }
3996
3997 pub fn is_running_disk_based_diagnostics(&self) -> bool {
3998 self.language_server_statuses
3999 .values()
4000 .any(|status| status.pending_diagnostic_updates > 0)
4001 }
4002
4003 pub fn diagnostic_summary(&self, cx: &AppContext) -> DiagnosticSummary {
4004 let mut summary = DiagnosticSummary::default();
4005 for (_, path_summary) in self.diagnostic_summaries(cx) {
4006 summary.error_count += path_summary.error_count;
4007 summary.warning_count += path_summary.warning_count;
4008 }
4009 summary
4010 }
4011
4012 pub fn diagnostic_summaries<'a>(
4013 &'a self,
4014 cx: &'a AppContext,
4015 ) -> impl Iterator<Item = (ProjectPath, DiagnosticSummary)> + 'a {
4016 self.worktrees(cx).flat_map(move |worktree| {
4017 let worktree = worktree.read(cx);
4018 let worktree_id = worktree.id();
4019 worktree
4020 .diagnostic_summaries()
4021 .map(move |(path, summary)| (ProjectPath { worktree_id, path }, summary))
4022 })
4023 }
4024
4025 pub fn disk_based_diagnostics_started(&mut self, cx: &mut ModelContext<Self>) {
4026 if self
4027 .language_server_statuses
4028 .values()
4029 .map(|status| status.pending_diagnostic_updates)
4030 .sum::<isize>()
4031 == 1
4032 {
4033 cx.emit(Event::DiskBasedDiagnosticsStarted);
4034 }
4035 }
4036
4037 pub fn disk_based_diagnostics_finished(&mut self, cx: &mut ModelContext<Self>) {
4038 cx.emit(Event::DiskBasedDiagnosticsUpdated);
4039 if self
4040 .language_server_statuses
4041 .values()
4042 .map(|status| status.pending_diagnostic_updates)
4043 .sum::<isize>()
4044 == 0
4045 {
4046 cx.emit(Event::DiskBasedDiagnosticsFinished);
4047 }
4048 }
4049
4050 pub fn active_entry(&self) -> Option<ProjectEntryId> {
4051 self.active_entry
4052 }
4053
4054 pub fn entry_for_path(&self, path: &ProjectPath, cx: &AppContext) -> Option<ProjectEntryId> {
4055 self.worktree_for_id(path.worktree_id, cx)?
4056 .read(cx)
4057 .entry_for_path(&path.path)
4058 .map(|entry| entry.id)
4059 }
4060
4061 pub fn path_for_entry(&self, entry_id: ProjectEntryId, cx: &AppContext) -> Option<ProjectPath> {
4062 let worktree = self.worktree_for_entry(entry_id, cx)?;
4063 let worktree = worktree.read(cx);
4064 let worktree_id = worktree.id();
4065 let path = worktree.entry_for_id(entry_id)?.path.clone();
4066 Some(ProjectPath { worktree_id, path })
4067 }
4068
4069 // RPC message handlers
4070
4071 async fn handle_request_join_project(
4072 this: ModelHandle<Self>,
4073 message: TypedEnvelope<proto::RequestJoinProject>,
4074 _: Arc<Client>,
4075 mut cx: AsyncAppContext,
4076 ) -> Result<()> {
4077 let user_id = message.payload.requester_id;
4078 if this.read_with(&cx, |project, _| {
4079 project.collaborators.values().any(|c| c.user.id == user_id)
4080 }) {
4081 this.update(&mut cx, |this, cx| {
4082 this.respond_to_join_request(user_id, true, cx)
4083 });
4084 } else {
4085 let user_store = this.read_with(&cx, |this, _| this.user_store.clone());
4086 let user = user_store
4087 .update(&mut cx, |store, cx| store.fetch_user(user_id, cx))
4088 .await?;
4089 this.update(&mut cx, |_, cx| cx.emit(Event::ContactRequestedJoin(user)));
4090 }
4091 Ok(())
4092 }
4093
4094 async fn handle_unregister_project(
4095 this: ModelHandle<Self>,
4096 _: TypedEnvelope<proto::UnregisterProject>,
4097 _: Arc<Client>,
4098 mut cx: AsyncAppContext,
4099 ) -> Result<()> {
4100 this.update(&mut cx, |this, cx| this.removed_from_project(cx));
4101 Ok(())
4102 }
4103
4104 async fn handle_project_unshared(
4105 this: ModelHandle<Self>,
4106 _: TypedEnvelope<proto::ProjectUnshared>,
4107 _: Arc<Client>,
4108 mut cx: AsyncAppContext,
4109 ) -> Result<()> {
4110 this.update(&mut cx, |this, cx| this.unshared(cx));
4111 Ok(())
4112 }
4113
4114 async fn handle_add_collaborator(
4115 this: ModelHandle<Self>,
4116 mut envelope: TypedEnvelope<proto::AddProjectCollaborator>,
4117 _: Arc<Client>,
4118 mut cx: AsyncAppContext,
4119 ) -> Result<()> {
4120 let user_store = this.read_with(&cx, |this, _| this.user_store.clone());
4121 let collaborator = envelope
4122 .payload
4123 .collaborator
4124 .take()
4125 .ok_or_else(|| anyhow!("empty collaborator"))?;
4126
4127 let collaborator = Collaborator::from_proto(collaborator, &user_store, &mut cx).await?;
4128 this.update(&mut cx, |this, cx| {
4129 this.collaborators
4130 .insert(collaborator.peer_id, collaborator);
4131 cx.notify();
4132 });
4133
4134 Ok(())
4135 }
4136
4137 async fn handle_remove_collaborator(
4138 this: ModelHandle<Self>,
4139 envelope: TypedEnvelope<proto::RemoveProjectCollaborator>,
4140 _: Arc<Client>,
4141 mut cx: AsyncAppContext,
4142 ) -> Result<()> {
4143 this.update(&mut cx, |this, cx| {
4144 let peer_id = PeerId(envelope.payload.peer_id);
4145 let replica_id = this
4146 .collaborators
4147 .remove(&peer_id)
4148 .ok_or_else(|| anyhow!("unknown peer {:?}", peer_id))?
4149 .replica_id;
4150 for (_, buffer) in &this.opened_buffers {
4151 if let Some(buffer) = buffer.upgrade(cx) {
4152 buffer.update(cx, |buffer, cx| buffer.remove_peer(replica_id, cx));
4153 }
4154 }
4155
4156 cx.emit(Event::CollaboratorLeft(peer_id));
4157 cx.notify();
4158 Ok(())
4159 })
4160 }
4161
4162 async fn handle_join_project_request_cancelled(
4163 this: ModelHandle<Self>,
4164 envelope: TypedEnvelope<proto::JoinProjectRequestCancelled>,
4165 _: Arc<Client>,
4166 mut cx: AsyncAppContext,
4167 ) -> Result<()> {
4168 let user = this
4169 .update(&mut cx, |this, cx| {
4170 this.user_store.update(cx, |user_store, cx| {
4171 user_store.fetch_user(envelope.payload.requester_id, cx)
4172 })
4173 })
4174 .await?;
4175
4176 this.update(&mut cx, |_, cx| {
4177 cx.emit(Event::ContactCancelledJoinRequest(user));
4178 });
4179
4180 Ok(())
4181 }
4182
4183 async fn handle_update_project(
4184 this: ModelHandle<Self>,
4185 envelope: TypedEnvelope<proto::UpdateProject>,
4186 client: Arc<Client>,
4187 mut cx: AsyncAppContext,
4188 ) -> Result<()> {
4189 this.update(&mut cx, |this, cx| {
4190 let replica_id = this.replica_id();
4191 let remote_id = this.remote_id().ok_or_else(|| anyhow!("invalid project"))?;
4192
4193 let mut old_worktrees_by_id = this
4194 .worktrees
4195 .drain(..)
4196 .filter_map(|worktree| {
4197 let worktree = worktree.upgrade(cx)?;
4198 Some((worktree.read(cx).id(), worktree))
4199 })
4200 .collect::<HashMap<_, _>>();
4201
4202 for worktree in envelope.payload.worktrees {
4203 if let Some(old_worktree) =
4204 old_worktrees_by_id.remove(&WorktreeId::from_proto(worktree.id))
4205 {
4206 this.worktrees.push(WorktreeHandle::Strong(old_worktree));
4207 } else {
4208 let worktree = proto::Worktree {
4209 id: worktree.id,
4210 root_name: worktree.root_name,
4211 entries: Default::default(),
4212 diagnostic_summaries: Default::default(),
4213 visible: worktree.visible,
4214 scan_id: 0,
4215 };
4216 let (worktree, load_task) =
4217 Worktree::remote(remote_id, replica_id, worktree, client.clone(), cx);
4218 this.add_worktree(&worktree, cx);
4219 load_task.detach();
4220 }
4221 }
4222
4223 this.metadata_changed(true, cx);
4224 for (id, _) in old_worktrees_by_id {
4225 cx.emit(Event::WorktreeRemoved(id));
4226 }
4227
4228 Ok(())
4229 })
4230 }
4231
4232 async fn handle_update_worktree(
4233 this: ModelHandle<Self>,
4234 envelope: TypedEnvelope<proto::UpdateWorktree>,
4235 _: Arc<Client>,
4236 mut cx: AsyncAppContext,
4237 ) -> Result<()> {
4238 this.update(&mut cx, |this, cx| {
4239 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
4240 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
4241 worktree.update(cx, |worktree, _| {
4242 let worktree = worktree.as_remote_mut().unwrap();
4243 worktree.update_from_remote(envelope)
4244 })?;
4245 }
4246 Ok(())
4247 })
4248 }
4249
4250 async fn handle_create_project_entry(
4251 this: ModelHandle<Self>,
4252 envelope: TypedEnvelope<proto::CreateProjectEntry>,
4253 _: Arc<Client>,
4254 mut cx: AsyncAppContext,
4255 ) -> Result<proto::ProjectEntryResponse> {
4256 let worktree = this.update(&mut cx, |this, cx| {
4257 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
4258 this.worktree_for_id(worktree_id, cx)
4259 .ok_or_else(|| anyhow!("worktree not found"))
4260 })?;
4261 let worktree_scan_id = worktree.read_with(&cx, |worktree, _| worktree.scan_id());
4262 let entry = worktree
4263 .update(&mut cx, |worktree, cx| {
4264 let worktree = worktree.as_local_mut().unwrap();
4265 let path = PathBuf::from(OsString::from_vec(envelope.payload.path));
4266 worktree.create_entry(path, envelope.payload.is_directory, cx)
4267 })
4268 .await?;
4269 Ok(proto::ProjectEntryResponse {
4270 entry: Some((&entry).into()),
4271 worktree_scan_id: worktree_scan_id as u64,
4272 })
4273 }
4274
4275 async fn handle_rename_project_entry(
4276 this: ModelHandle<Self>,
4277 envelope: TypedEnvelope<proto::RenameProjectEntry>,
4278 _: Arc<Client>,
4279 mut cx: AsyncAppContext,
4280 ) -> Result<proto::ProjectEntryResponse> {
4281 let entry_id = ProjectEntryId::from_proto(envelope.payload.entry_id);
4282 let worktree = this.read_with(&cx, |this, cx| {
4283 this.worktree_for_entry(entry_id, cx)
4284 .ok_or_else(|| anyhow!("worktree not found"))
4285 })?;
4286 let worktree_scan_id = worktree.read_with(&cx, |worktree, _| worktree.scan_id());
4287 let entry = worktree
4288 .update(&mut cx, |worktree, cx| {
4289 let new_path = PathBuf::from(OsString::from_vec(envelope.payload.new_path));
4290 worktree
4291 .as_local_mut()
4292 .unwrap()
4293 .rename_entry(entry_id, new_path, cx)
4294 .ok_or_else(|| anyhow!("invalid entry"))
4295 })?
4296 .await?;
4297 Ok(proto::ProjectEntryResponse {
4298 entry: Some((&entry).into()),
4299 worktree_scan_id: worktree_scan_id as u64,
4300 })
4301 }
4302
4303 async fn handle_copy_project_entry(
4304 this: ModelHandle<Self>,
4305 envelope: TypedEnvelope<proto::CopyProjectEntry>,
4306 _: Arc<Client>,
4307 mut cx: AsyncAppContext,
4308 ) -> Result<proto::ProjectEntryResponse> {
4309 let entry_id = ProjectEntryId::from_proto(envelope.payload.entry_id);
4310 let worktree = this.read_with(&cx, |this, cx| {
4311 this.worktree_for_entry(entry_id, cx)
4312 .ok_or_else(|| anyhow!("worktree not found"))
4313 })?;
4314 let worktree_scan_id = worktree.read_with(&cx, |worktree, _| worktree.scan_id());
4315 let entry = worktree
4316 .update(&mut cx, |worktree, cx| {
4317 let new_path = PathBuf::from(OsString::from_vec(envelope.payload.new_path));
4318 worktree
4319 .as_local_mut()
4320 .unwrap()
4321 .copy_entry(entry_id, new_path, cx)
4322 .ok_or_else(|| anyhow!("invalid entry"))
4323 })?
4324 .await?;
4325 Ok(proto::ProjectEntryResponse {
4326 entry: Some((&entry).into()),
4327 worktree_scan_id: worktree_scan_id as u64,
4328 })
4329 }
4330
4331 async fn handle_delete_project_entry(
4332 this: ModelHandle<Self>,
4333 envelope: TypedEnvelope<proto::DeleteProjectEntry>,
4334 _: Arc<Client>,
4335 mut cx: AsyncAppContext,
4336 ) -> Result<proto::ProjectEntryResponse> {
4337 let entry_id = ProjectEntryId::from_proto(envelope.payload.entry_id);
4338 let worktree = this.read_with(&cx, |this, cx| {
4339 this.worktree_for_entry(entry_id, cx)
4340 .ok_or_else(|| anyhow!("worktree not found"))
4341 })?;
4342 let worktree_scan_id = worktree.read_with(&cx, |worktree, _| worktree.scan_id());
4343 worktree
4344 .update(&mut cx, |worktree, cx| {
4345 worktree
4346 .as_local_mut()
4347 .unwrap()
4348 .delete_entry(entry_id, cx)
4349 .ok_or_else(|| anyhow!("invalid entry"))
4350 })?
4351 .await?;
4352 Ok(proto::ProjectEntryResponse {
4353 entry: None,
4354 worktree_scan_id: worktree_scan_id as u64,
4355 })
4356 }
4357
4358 async fn handle_update_diagnostic_summary(
4359 this: ModelHandle<Self>,
4360 envelope: TypedEnvelope<proto::UpdateDiagnosticSummary>,
4361 _: Arc<Client>,
4362 mut cx: AsyncAppContext,
4363 ) -> Result<()> {
4364 this.update(&mut cx, |this, cx| {
4365 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
4366 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
4367 if let Some(summary) = envelope.payload.summary {
4368 let project_path = ProjectPath {
4369 worktree_id,
4370 path: Path::new(&summary.path).into(),
4371 };
4372 worktree.update(cx, |worktree, _| {
4373 worktree
4374 .as_remote_mut()
4375 .unwrap()
4376 .update_diagnostic_summary(project_path.path.clone(), &summary);
4377 });
4378 cx.emit(Event::DiagnosticsUpdated(project_path));
4379 }
4380 }
4381 Ok(())
4382 })
4383 }
4384
4385 async fn handle_start_language_server(
4386 this: ModelHandle<Self>,
4387 envelope: TypedEnvelope<proto::StartLanguageServer>,
4388 _: Arc<Client>,
4389 mut cx: AsyncAppContext,
4390 ) -> Result<()> {
4391 let server = envelope
4392 .payload
4393 .server
4394 .ok_or_else(|| anyhow!("invalid server"))?;
4395 this.update(&mut cx, |this, cx| {
4396 this.language_server_statuses.insert(
4397 server.id as usize,
4398 LanguageServerStatus {
4399 name: server.name,
4400 pending_work: Default::default(),
4401 pending_diagnostic_updates: 0,
4402 },
4403 );
4404 cx.notify();
4405 });
4406 Ok(())
4407 }
4408
4409 async fn handle_update_language_server(
4410 this: ModelHandle<Self>,
4411 envelope: TypedEnvelope<proto::UpdateLanguageServer>,
4412 _: Arc<Client>,
4413 mut cx: AsyncAppContext,
4414 ) -> Result<()> {
4415 let language_server_id = envelope.payload.language_server_id as usize;
4416 match envelope
4417 .payload
4418 .variant
4419 .ok_or_else(|| anyhow!("invalid variant"))?
4420 {
4421 proto::update_language_server::Variant::WorkStart(payload) => {
4422 this.update(&mut cx, |this, cx| {
4423 this.on_lsp_work_start(language_server_id, payload.token, cx);
4424 })
4425 }
4426 proto::update_language_server::Variant::WorkProgress(payload) => {
4427 this.update(&mut cx, |this, cx| {
4428 this.on_lsp_work_progress(
4429 language_server_id,
4430 payload.token,
4431 LanguageServerProgress {
4432 message: payload.message,
4433 percentage: payload.percentage.map(|p| p as usize),
4434 last_update_at: Instant::now(),
4435 },
4436 cx,
4437 );
4438 })
4439 }
4440 proto::update_language_server::Variant::WorkEnd(payload) => {
4441 this.update(&mut cx, |this, cx| {
4442 this.on_lsp_work_end(language_server_id, payload.token, cx);
4443 })
4444 }
4445 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(_) => {
4446 this.update(&mut cx, |this, cx| {
4447 this.disk_based_diagnostics_started(cx);
4448 })
4449 }
4450 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(_) => {
4451 this.update(&mut cx, |this, cx| this.disk_based_diagnostics_finished(cx));
4452 }
4453 }
4454
4455 Ok(())
4456 }
4457
4458 async fn handle_update_buffer(
4459 this: ModelHandle<Self>,
4460 envelope: TypedEnvelope<proto::UpdateBuffer>,
4461 _: Arc<Client>,
4462 mut cx: AsyncAppContext,
4463 ) -> Result<()> {
4464 this.update(&mut cx, |this, cx| {
4465 let payload = envelope.payload.clone();
4466 let buffer_id = payload.buffer_id;
4467 let ops = payload
4468 .operations
4469 .into_iter()
4470 .map(|op| language::proto::deserialize_operation(op))
4471 .collect::<Result<Vec<_>, _>>()?;
4472 let is_remote = this.is_remote();
4473 match this.opened_buffers.entry(buffer_id) {
4474 hash_map::Entry::Occupied(mut e) => match e.get_mut() {
4475 OpenBuffer::Strong(buffer) => {
4476 buffer.update(cx, |buffer, cx| buffer.apply_ops(ops, cx))?;
4477 }
4478 OpenBuffer::Loading(operations) => operations.extend_from_slice(&ops),
4479 OpenBuffer::Weak(_) => {}
4480 },
4481 hash_map::Entry::Vacant(e) => {
4482 assert!(
4483 is_remote,
4484 "received buffer update from {:?}",
4485 envelope.original_sender_id
4486 );
4487 e.insert(OpenBuffer::Loading(ops));
4488 }
4489 }
4490 Ok(())
4491 })
4492 }
4493
4494 async fn handle_update_buffer_file(
4495 this: ModelHandle<Self>,
4496 envelope: TypedEnvelope<proto::UpdateBufferFile>,
4497 _: Arc<Client>,
4498 mut cx: AsyncAppContext,
4499 ) -> Result<()> {
4500 this.update(&mut cx, |this, cx| {
4501 let payload = envelope.payload.clone();
4502 let buffer_id = payload.buffer_id;
4503 let file = payload.file.ok_or_else(|| anyhow!("invalid file"))?;
4504 let worktree = this
4505 .worktree_for_id(WorktreeId::from_proto(file.worktree_id), cx)
4506 .ok_or_else(|| anyhow!("no such worktree"))?;
4507 let file = File::from_proto(file, worktree.clone(), cx)?;
4508 let buffer = this
4509 .opened_buffers
4510 .get_mut(&buffer_id)
4511 .and_then(|b| b.upgrade(cx))
4512 .ok_or_else(|| anyhow!("no such buffer"))?;
4513 buffer.update(cx, |buffer, cx| {
4514 buffer.file_updated(Box::new(file), cx).detach();
4515 });
4516 Ok(())
4517 })
4518 }
4519
4520 async fn handle_save_buffer(
4521 this: ModelHandle<Self>,
4522 envelope: TypedEnvelope<proto::SaveBuffer>,
4523 _: Arc<Client>,
4524 mut cx: AsyncAppContext,
4525 ) -> Result<proto::BufferSaved> {
4526 let buffer_id = envelope.payload.buffer_id;
4527 let requested_version = deserialize_version(envelope.payload.version);
4528
4529 let (project_id, buffer) = this.update(&mut cx, |this, cx| {
4530 let project_id = this.remote_id().ok_or_else(|| anyhow!("not connected"))?;
4531 let buffer = this
4532 .opened_buffers
4533 .get(&buffer_id)
4534 .and_then(|buffer| buffer.upgrade(cx))
4535 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?;
4536 Ok::<_, anyhow::Error>((project_id, buffer))
4537 })?;
4538 buffer
4539 .update(&mut cx, |buffer, _| {
4540 buffer.wait_for_version(requested_version)
4541 })
4542 .await;
4543
4544 let (saved_version, mtime) = buffer.update(&mut cx, |buffer, cx| buffer.save(cx)).await?;
4545 Ok(proto::BufferSaved {
4546 project_id,
4547 buffer_id,
4548 version: serialize_version(&saved_version),
4549 mtime: Some(mtime.into()),
4550 })
4551 }
4552
4553 async fn handle_reload_buffers(
4554 this: ModelHandle<Self>,
4555 envelope: TypedEnvelope<proto::ReloadBuffers>,
4556 _: Arc<Client>,
4557 mut cx: AsyncAppContext,
4558 ) -> Result<proto::ReloadBuffersResponse> {
4559 let sender_id = envelope.original_sender_id()?;
4560 let reload = this.update(&mut cx, |this, cx| {
4561 let mut buffers = HashSet::default();
4562 for buffer_id in &envelope.payload.buffer_ids {
4563 buffers.insert(
4564 this.opened_buffers
4565 .get(buffer_id)
4566 .and_then(|buffer| buffer.upgrade(cx))
4567 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?,
4568 );
4569 }
4570 Ok::<_, anyhow::Error>(this.reload_buffers(buffers, false, cx))
4571 })?;
4572
4573 let project_transaction = reload.await?;
4574 let project_transaction = this.update(&mut cx, |this, cx| {
4575 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
4576 });
4577 Ok(proto::ReloadBuffersResponse {
4578 transaction: Some(project_transaction),
4579 })
4580 }
4581
4582 async fn handle_format_buffers(
4583 this: ModelHandle<Self>,
4584 envelope: TypedEnvelope<proto::FormatBuffers>,
4585 _: Arc<Client>,
4586 mut cx: AsyncAppContext,
4587 ) -> Result<proto::FormatBuffersResponse> {
4588 let sender_id = envelope.original_sender_id()?;
4589 let format = this.update(&mut cx, |this, cx| {
4590 let mut buffers = HashSet::default();
4591 for buffer_id in &envelope.payload.buffer_ids {
4592 buffers.insert(
4593 this.opened_buffers
4594 .get(buffer_id)
4595 .and_then(|buffer| buffer.upgrade(cx))
4596 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?,
4597 );
4598 }
4599 Ok::<_, anyhow::Error>(this.format(buffers, false, cx))
4600 })?;
4601
4602 let project_transaction = format.await?;
4603 let project_transaction = this.update(&mut cx, |this, cx| {
4604 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
4605 });
4606 Ok(proto::FormatBuffersResponse {
4607 transaction: Some(project_transaction),
4608 })
4609 }
4610
4611 async fn handle_get_completions(
4612 this: ModelHandle<Self>,
4613 envelope: TypedEnvelope<proto::GetCompletions>,
4614 _: Arc<Client>,
4615 mut cx: AsyncAppContext,
4616 ) -> Result<proto::GetCompletionsResponse> {
4617 let position = envelope
4618 .payload
4619 .position
4620 .and_then(language::proto::deserialize_anchor)
4621 .ok_or_else(|| anyhow!("invalid position"))?;
4622 let version = deserialize_version(envelope.payload.version);
4623 let buffer = this.read_with(&cx, |this, cx| {
4624 this.opened_buffers
4625 .get(&envelope.payload.buffer_id)
4626 .and_then(|buffer| buffer.upgrade(cx))
4627 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
4628 })?;
4629 buffer
4630 .update(&mut cx, |buffer, _| buffer.wait_for_version(version))
4631 .await;
4632 let version = buffer.read_with(&cx, |buffer, _| buffer.version());
4633 let completions = this
4634 .update(&mut cx, |this, cx| this.completions(&buffer, position, cx))
4635 .await?;
4636
4637 Ok(proto::GetCompletionsResponse {
4638 completions: completions
4639 .iter()
4640 .map(language::proto::serialize_completion)
4641 .collect(),
4642 version: serialize_version(&version),
4643 })
4644 }
4645
4646 async fn handle_apply_additional_edits_for_completion(
4647 this: ModelHandle<Self>,
4648 envelope: TypedEnvelope<proto::ApplyCompletionAdditionalEdits>,
4649 _: Arc<Client>,
4650 mut cx: AsyncAppContext,
4651 ) -> Result<proto::ApplyCompletionAdditionalEditsResponse> {
4652 let apply_additional_edits = this.update(&mut cx, |this, cx| {
4653 let buffer = this
4654 .opened_buffers
4655 .get(&envelope.payload.buffer_id)
4656 .and_then(|buffer| buffer.upgrade(cx))
4657 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
4658 let language = buffer.read(cx).language();
4659 let completion = language::proto::deserialize_completion(
4660 envelope
4661 .payload
4662 .completion
4663 .ok_or_else(|| anyhow!("invalid completion"))?,
4664 language,
4665 )?;
4666 Ok::<_, anyhow::Error>(
4667 this.apply_additional_edits_for_completion(buffer, completion, false, cx),
4668 )
4669 })?;
4670
4671 Ok(proto::ApplyCompletionAdditionalEditsResponse {
4672 transaction: apply_additional_edits
4673 .await?
4674 .as_ref()
4675 .map(language::proto::serialize_transaction),
4676 })
4677 }
4678
4679 async fn handle_get_code_actions(
4680 this: ModelHandle<Self>,
4681 envelope: TypedEnvelope<proto::GetCodeActions>,
4682 _: Arc<Client>,
4683 mut cx: AsyncAppContext,
4684 ) -> Result<proto::GetCodeActionsResponse> {
4685 let start = envelope
4686 .payload
4687 .start
4688 .and_then(language::proto::deserialize_anchor)
4689 .ok_or_else(|| anyhow!("invalid start"))?;
4690 let end = envelope
4691 .payload
4692 .end
4693 .and_then(language::proto::deserialize_anchor)
4694 .ok_or_else(|| anyhow!("invalid end"))?;
4695 let buffer = this.update(&mut cx, |this, cx| {
4696 this.opened_buffers
4697 .get(&envelope.payload.buffer_id)
4698 .and_then(|buffer| buffer.upgrade(cx))
4699 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
4700 })?;
4701 buffer
4702 .update(&mut cx, |buffer, _| {
4703 buffer.wait_for_version(deserialize_version(envelope.payload.version))
4704 })
4705 .await;
4706
4707 let version = buffer.read_with(&cx, |buffer, _| buffer.version());
4708 let code_actions = this.update(&mut cx, |this, cx| {
4709 Ok::<_, anyhow::Error>(this.code_actions(&buffer, start..end, cx))
4710 })?;
4711
4712 Ok(proto::GetCodeActionsResponse {
4713 actions: code_actions
4714 .await?
4715 .iter()
4716 .map(language::proto::serialize_code_action)
4717 .collect(),
4718 version: serialize_version(&version),
4719 })
4720 }
4721
4722 async fn handle_apply_code_action(
4723 this: ModelHandle<Self>,
4724 envelope: TypedEnvelope<proto::ApplyCodeAction>,
4725 _: Arc<Client>,
4726 mut cx: AsyncAppContext,
4727 ) -> Result<proto::ApplyCodeActionResponse> {
4728 let sender_id = envelope.original_sender_id()?;
4729 let action = language::proto::deserialize_code_action(
4730 envelope
4731 .payload
4732 .action
4733 .ok_or_else(|| anyhow!("invalid action"))?,
4734 )?;
4735 let apply_code_action = this.update(&mut cx, |this, cx| {
4736 let buffer = this
4737 .opened_buffers
4738 .get(&envelope.payload.buffer_id)
4739 .and_then(|buffer| buffer.upgrade(cx))
4740 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
4741 Ok::<_, anyhow::Error>(this.apply_code_action(buffer, action, false, cx))
4742 })?;
4743
4744 let project_transaction = apply_code_action.await?;
4745 let project_transaction = this.update(&mut cx, |this, cx| {
4746 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
4747 });
4748 Ok(proto::ApplyCodeActionResponse {
4749 transaction: Some(project_transaction),
4750 })
4751 }
4752
4753 async fn handle_lsp_command<T: LspCommand>(
4754 this: ModelHandle<Self>,
4755 envelope: TypedEnvelope<T::ProtoRequest>,
4756 _: Arc<Client>,
4757 mut cx: AsyncAppContext,
4758 ) -> Result<<T::ProtoRequest as proto::RequestMessage>::Response>
4759 where
4760 <T::LspRequest as lsp::request::Request>::Result: Send,
4761 {
4762 let sender_id = envelope.original_sender_id()?;
4763 let buffer_id = T::buffer_id_from_proto(&envelope.payload);
4764 let buffer_handle = this.read_with(&cx, |this, _| {
4765 this.opened_buffers
4766 .get(&buffer_id)
4767 .and_then(|buffer| buffer.upgrade(&cx))
4768 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))
4769 })?;
4770 let request = T::from_proto(
4771 envelope.payload,
4772 this.clone(),
4773 buffer_handle.clone(),
4774 cx.clone(),
4775 )
4776 .await?;
4777 let buffer_version = buffer_handle.read_with(&cx, |buffer, _| buffer.version());
4778 let response = this
4779 .update(&mut cx, |this, cx| {
4780 this.request_lsp(buffer_handle, request, cx)
4781 })
4782 .await?;
4783 this.update(&mut cx, |this, cx| {
4784 Ok(T::response_to_proto(
4785 response,
4786 this,
4787 sender_id,
4788 &buffer_version,
4789 cx,
4790 ))
4791 })
4792 }
4793
4794 async fn handle_get_project_symbols(
4795 this: ModelHandle<Self>,
4796 envelope: TypedEnvelope<proto::GetProjectSymbols>,
4797 _: Arc<Client>,
4798 mut cx: AsyncAppContext,
4799 ) -> Result<proto::GetProjectSymbolsResponse> {
4800 let symbols = this
4801 .update(&mut cx, |this, cx| {
4802 this.symbols(&envelope.payload.query, cx)
4803 })
4804 .await?;
4805
4806 Ok(proto::GetProjectSymbolsResponse {
4807 symbols: symbols.iter().map(serialize_symbol).collect(),
4808 })
4809 }
4810
4811 async fn handle_search_project(
4812 this: ModelHandle<Self>,
4813 envelope: TypedEnvelope<proto::SearchProject>,
4814 _: Arc<Client>,
4815 mut cx: AsyncAppContext,
4816 ) -> Result<proto::SearchProjectResponse> {
4817 let peer_id = envelope.original_sender_id()?;
4818 let query = SearchQuery::from_proto(envelope.payload)?;
4819 let result = this
4820 .update(&mut cx, |this, cx| this.search(query, cx))
4821 .await?;
4822
4823 this.update(&mut cx, |this, cx| {
4824 let mut locations = Vec::new();
4825 for (buffer, ranges) in result {
4826 for range in ranges {
4827 let start = serialize_anchor(&range.start);
4828 let end = serialize_anchor(&range.end);
4829 let buffer = this.serialize_buffer_for_peer(&buffer, peer_id, cx);
4830 locations.push(proto::Location {
4831 buffer: Some(buffer),
4832 start: Some(start),
4833 end: Some(end),
4834 });
4835 }
4836 }
4837 Ok(proto::SearchProjectResponse { locations })
4838 })
4839 }
4840
4841 async fn handle_open_buffer_for_symbol(
4842 this: ModelHandle<Self>,
4843 envelope: TypedEnvelope<proto::OpenBufferForSymbol>,
4844 _: Arc<Client>,
4845 mut cx: AsyncAppContext,
4846 ) -> Result<proto::OpenBufferForSymbolResponse> {
4847 let peer_id = envelope.original_sender_id()?;
4848 let symbol = envelope
4849 .payload
4850 .symbol
4851 .ok_or_else(|| anyhow!("invalid symbol"))?;
4852 let symbol = this.read_with(&cx, |this, _| {
4853 let symbol = this.deserialize_symbol(symbol)?;
4854 let signature = this.symbol_signature(symbol.worktree_id, &symbol.path);
4855 if signature == symbol.signature {
4856 Ok(symbol)
4857 } else {
4858 Err(anyhow!("invalid symbol signature"))
4859 }
4860 })?;
4861 let buffer = this
4862 .update(&mut cx, |this, cx| this.open_buffer_for_symbol(&symbol, cx))
4863 .await?;
4864
4865 Ok(proto::OpenBufferForSymbolResponse {
4866 buffer: Some(this.update(&mut cx, |this, cx| {
4867 this.serialize_buffer_for_peer(&buffer, peer_id, cx)
4868 })),
4869 })
4870 }
4871
4872 fn symbol_signature(&self, worktree_id: WorktreeId, path: &Path) -> [u8; 32] {
4873 let mut hasher = Sha256::new();
4874 hasher.update(worktree_id.to_proto().to_be_bytes());
4875 hasher.update(path.to_string_lossy().as_bytes());
4876 hasher.update(self.nonce.to_be_bytes());
4877 hasher.finalize().as_slice().try_into().unwrap()
4878 }
4879
4880 async fn handle_open_buffer_by_id(
4881 this: ModelHandle<Self>,
4882 envelope: TypedEnvelope<proto::OpenBufferById>,
4883 _: Arc<Client>,
4884 mut cx: AsyncAppContext,
4885 ) -> Result<proto::OpenBufferResponse> {
4886 let peer_id = envelope.original_sender_id()?;
4887 let buffer = this
4888 .update(&mut cx, |this, cx| {
4889 this.open_buffer_by_id(envelope.payload.id, cx)
4890 })
4891 .await?;
4892 this.update(&mut cx, |this, cx| {
4893 Ok(proto::OpenBufferResponse {
4894 buffer: Some(this.serialize_buffer_for_peer(&buffer, peer_id, cx)),
4895 })
4896 })
4897 }
4898
4899 async fn handle_open_buffer_by_path(
4900 this: ModelHandle<Self>,
4901 envelope: TypedEnvelope<proto::OpenBufferByPath>,
4902 _: Arc<Client>,
4903 mut cx: AsyncAppContext,
4904 ) -> Result<proto::OpenBufferResponse> {
4905 let peer_id = envelope.original_sender_id()?;
4906 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
4907 let open_buffer = this.update(&mut cx, |this, cx| {
4908 this.open_buffer(
4909 ProjectPath {
4910 worktree_id,
4911 path: PathBuf::from(envelope.payload.path).into(),
4912 },
4913 cx,
4914 )
4915 });
4916
4917 let buffer = open_buffer.await?;
4918 this.update(&mut cx, |this, cx| {
4919 Ok(proto::OpenBufferResponse {
4920 buffer: Some(this.serialize_buffer_for_peer(&buffer, peer_id, cx)),
4921 })
4922 })
4923 }
4924
4925 fn serialize_project_transaction_for_peer(
4926 &mut self,
4927 project_transaction: ProjectTransaction,
4928 peer_id: PeerId,
4929 cx: &AppContext,
4930 ) -> proto::ProjectTransaction {
4931 let mut serialized_transaction = proto::ProjectTransaction {
4932 buffers: Default::default(),
4933 transactions: Default::default(),
4934 };
4935 for (buffer, transaction) in project_transaction.0 {
4936 serialized_transaction
4937 .buffers
4938 .push(self.serialize_buffer_for_peer(&buffer, peer_id, cx));
4939 serialized_transaction
4940 .transactions
4941 .push(language::proto::serialize_transaction(&transaction));
4942 }
4943 serialized_transaction
4944 }
4945
4946 fn deserialize_project_transaction(
4947 &mut self,
4948 message: proto::ProjectTransaction,
4949 push_to_history: bool,
4950 cx: &mut ModelContext<Self>,
4951 ) -> Task<Result<ProjectTransaction>> {
4952 cx.spawn(|this, mut cx| async move {
4953 let mut project_transaction = ProjectTransaction::default();
4954 for (buffer, transaction) in message.buffers.into_iter().zip(message.transactions) {
4955 let buffer = this
4956 .update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
4957 .await?;
4958 let transaction = language::proto::deserialize_transaction(transaction)?;
4959 project_transaction.0.insert(buffer, transaction);
4960 }
4961
4962 for (buffer, transaction) in &project_transaction.0 {
4963 buffer
4964 .update(&mut cx, |buffer, _| {
4965 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
4966 })
4967 .await;
4968
4969 if push_to_history {
4970 buffer.update(&mut cx, |buffer, _| {
4971 buffer.push_transaction(transaction.clone(), Instant::now());
4972 });
4973 }
4974 }
4975
4976 Ok(project_transaction)
4977 })
4978 }
4979
4980 fn serialize_buffer_for_peer(
4981 &mut self,
4982 buffer: &ModelHandle<Buffer>,
4983 peer_id: PeerId,
4984 cx: &AppContext,
4985 ) -> proto::Buffer {
4986 let buffer_id = buffer.read(cx).remote_id();
4987 let shared_buffers = self.shared_buffers.entry(peer_id).or_default();
4988 if shared_buffers.insert(buffer_id) {
4989 proto::Buffer {
4990 variant: Some(proto::buffer::Variant::State(buffer.read(cx).to_proto())),
4991 }
4992 } else {
4993 proto::Buffer {
4994 variant: Some(proto::buffer::Variant::Id(buffer_id)),
4995 }
4996 }
4997 }
4998
4999 fn deserialize_buffer(
5000 &mut self,
5001 buffer: proto::Buffer,
5002 cx: &mut ModelContext<Self>,
5003 ) -> Task<Result<ModelHandle<Buffer>>> {
5004 let replica_id = self.replica_id();
5005
5006 let opened_buffer_tx = self.opened_buffer.0.clone();
5007 let mut opened_buffer_rx = self.opened_buffer.1.clone();
5008 cx.spawn(|this, mut cx| async move {
5009 match buffer.variant.ok_or_else(|| anyhow!("missing buffer"))? {
5010 proto::buffer::Variant::Id(id) => {
5011 let buffer = loop {
5012 let buffer = this.read_with(&cx, |this, cx| {
5013 this.opened_buffers
5014 .get(&id)
5015 .and_then(|buffer| buffer.upgrade(cx))
5016 });
5017 if let Some(buffer) = buffer {
5018 break buffer;
5019 }
5020 opened_buffer_rx
5021 .next()
5022 .await
5023 .ok_or_else(|| anyhow!("project dropped while waiting for buffer"))?;
5024 };
5025 Ok(buffer)
5026 }
5027 proto::buffer::Variant::State(mut buffer) => {
5028 let mut buffer_worktree = None;
5029 let mut buffer_file = None;
5030 if let Some(file) = buffer.file.take() {
5031 this.read_with(&cx, |this, cx| {
5032 let worktree_id = WorktreeId::from_proto(file.worktree_id);
5033 let worktree =
5034 this.worktree_for_id(worktree_id, cx).ok_or_else(|| {
5035 anyhow!("no worktree found for id {}", file.worktree_id)
5036 })?;
5037 buffer_file =
5038 Some(Box::new(File::from_proto(file, worktree.clone(), cx)?)
5039 as Box<dyn language::File>);
5040 buffer_worktree = Some(worktree);
5041 Ok::<_, anyhow::Error>(())
5042 })?;
5043 }
5044
5045 let buffer = cx.add_model(|cx| {
5046 Buffer::from_proto(replica_id, buffer, buffer_file, cx).unwrap()
5047 });
5048
5049 this.update(&mut cx, |this, cx| this.register_buffer(&buffer, cx))?;
5050
5051 *opened_buffer_tx.borrow_mut().borrow_mut() = ();
5052 Ok(buffer)
5053 }
5054 }
5055 })
5056 }
5057
5058 fn deserialize_symbol(&self, serialized_symbol: proto::Symbol) -> Result<Symbol> {
5059 let source_worktree_id = WorktreeId::from_proto(serialized_symbol.source_worktree_id);
5060 let worktree_id = WorktreeId::from_proto(serialized_symbol.worktree_id);
5061 let start = serialized_symbol
5062 .start
5063 .ok_or_else(|| anyhow!("invalid start"))?;
5064 let end = serialized_symbol
5065 .end
5066 .ok_or_else(|| anyhow!("invalid end"))?;
5067 let kind = unsafe { mem::transmute(serialized_symbol.kind) };
5068 let path = PathBuf::from(serialized_symbol.path);
5069 let language = self.languages.select_language(&path);
5070 Ok(Symbol {
5071 source_worktree_id,
5072 worktree_id,
5073 language_server_name: LanguageServerName(serialized_symbol.language_server_name.into()),
5074 label: language
5075 .and_then(|language| language.label_for_symbol(&serialized_symbol.name, kind))
5076 .unwrap_or_else(|| CodeLabel::plain(serialized_symbol.name.clone(), None)),
5077 name: serialized_symbol.name,
5078 path,
5079 range: PointUtf16::new(start.row, start.column)..PointUtf16::new(end.row, end.column),
5080 kind,
5081 signature: serialized_symbol
5082 .signature
5083 .try_into()
5084 .map_err(|_| anyhow!("invalid signature"))?,
5085 })
5086 }
5087
5088 async fn handle_buffer_saved(
5089 this: ModelHandle<Self>,
5090 envelope: TypedEnvelope<proto::BufferSaved>,
5091 _: Arc<Client>,
5092 mut cx: AsyncAppContext,
5093 ) -> Result<()> {
5094 let version = deserialize_version(envelope.payload.version);
5095 let mtime = envelope
5096 .payload
5097 .mtime
5098 .ok_or_else(|| anyhow!("missing mtime"))?
5099 .into();
5100
5101 this.update(&mut cx, |this, cx| {
5102 let buffer = this
5103 .opened_buffers
5104 .get(&envelope.payload.buffer_id)
5105 .and_then(|buffer| buffer.upgrade(cx));
5106 if let Some(buffer) = buffer {
5107 buffer.update(cx, |buffer, cx| {
5108 buffer.did_save(version, mtime, None, cx);
5109 });
5110 }
5111 Ok(())
5112 })
5113 }
5114
5115 async fn handle_buffer_reloaded(
5116 this: ModelHandle<Self>,
5117 envelope: TypedEnvelope<proto::BufferReloaded>,
5118 _: Arc<Client>,
5119 mut cx: AsyncAppContext,
5120 ) -> Result<()> {
5121 let payload = envelope.payload.clone();
5122 let version = deserialize_version(payload.version);
5123 let mtime = payload
5124 .mtime
5125 .ok_or_else(|| anyhow!("missing mtime"))?
5126 .into();
5127 this.update(&mut cx, |this, cx| {
5128 let buffer = this
5129 .opened_buffers
5130 .get(&payload.buffer_id)
5131 .and_then(|buffer| buffer.upgrade(cx));
5132 if let Some(buffer) = buffer {
5133 buffer.update(cx, |buffer, cx| {
5134 buffer.did_reload(version, mtime, cx);
5135 });
5136 }
5137 Ok(())
5138 })
5139 }
5140
5141 pub fn match_paths<'a>(
5142 &self,
5143 query: &'a str,
5144 include_ignored: bool,
5145 smart_case: bool,
5146 max_results: usize,
5147 cancel_flag: &'a AtomicBool,
5148 cx: &AppContext,
5149 ) -> impl 'a + Future<Output = Vec<PathMatch>> {
5150 let worktrees = self
5151 .worktrees(cx)
5152 .filter(|worktree| worktree.read(cx).is_visible())
5153 .collect::<Vec<_>>();
5154 let include_root_name = worktrees.len() > 1;
5155 let candidate_sets = worktrees
5156 .into_iter()
5157 .map(|worktree| CandidateSet {
5158 snapshot: worktree.read(cx).snapshot(),
5159 include_ignored,
5160 include_root_name,
5161 })
5162 .collect::<Vec<_>>();
5163
5164 let background = cx.background().clone();
5165 async move {
5166 fuzzy::match_paths(
5167 candidate_sets.as_slice(),
5168 query,
5169 smart_case,
5170 max_results,
5171 cancel_flag,
5172 background,
5173 )
5174 .await
5175 }
5176 }
5177
5178 fn edits_from_lsp(
5179 &mut self,
5180 buffer: &ModelHandle<Buffer>,
5181 lsp_edits: impl 'static + Send + IntoIterator<Item = lsp::TextEdit>,
5182 version: Option<i32>,
5183 cx: &mut ModelContext<Self>,
5184 ) -> Task<Result<Vec<(Range<Anchor>, String)>>> {
5185 let snapshot = self.buffer_snapshot_for_lsp_version(buffer, version, cx);
5186 cx.background().spawn(async move {
5187 let snapshot = snapshot?;
5188 let mut lsp_edits = lsp_edits
5189 .into_iter()
5190 .map(|edit| (range_from_lsp(edit.range), edit.new_text))
5191 .collect::<Vec<_>>();
5192 lsp_edits.sort_by_key(|(range, _)| range.start);
5193
5194 let mut lsp_edits = lsp_edits.into_iter().peekable();
5195 let mut edits = Vec::new();
5196 while let Some((mut range, mut new_text)) = lsp_edits.next() {
5197 // Combine any LSP edits that are adjacent.
5198 //
5199 // Also, combine LSP edits that are separated from each other by only
5200 // a newline. This is important because for some code actions,
5201 // Rust-analyzer rewrites the entire buffer via a series of edits that
5202 // are separated by unchanged newline characters.
5203 //
5204 // In order for the diffing logic below to work properly, any edits that
5205 // cancel each other out must be combined into one.
5206 while let Some((next_range, next_text)) = lsp_edits.peek() {
5207 if next_range.start > range.end {
5208 if next_range.start.row > range.end.row + 1
5209 || next_range.start.column > 0
5210 || snapshot.clip_point_utf16(
5211 PointUtf16::new(range.end.row, u32::MAX),
5212 Bias::Left,
5213 ) > range.end
5214 {
5215 break;
5216 }
5217 new_text.push('\n');
5218 }
5219 range.end = next_range.end;
5220 new_text.push_str(&next_text);
5221 lsp_edits.next();
5222 }
5223
5224 if snapshot.clip_point_utf16(range.start, Bias::Left) != range.start
5225 || snapshot.clip_point_utf16(range.end, Bias::Left) != range.end
5226 {
5227 return Err(anyhow!("invalid edits received from language server"));
5228 }
5229
5230 // For multiline edits, perform a diff of the old and new text so that
5231 // we can identify the changes more precisely, preserving the locations
5232 // of any anchors positioned in the unchanged regions.
5233 if range.end.row > range.start.row {
5234 let mut offset = range.start.to_offset(&snapshot);
5235 let old_text = snapshot.text_for_range(range).collect::<String>();
5236
5237 let diff = TextDiff::from_lines(old_text.as_str(), &new_text);
5238 let mut moved_since_edit = true;
5239 for change in diff.iter_all_changes() {
5240 let tag = change.tag();
5241 let value = change.value();
5242 match tag {
5243 ChangeTag::Equal => {
5244 offset += value.len();
5245 moved_since_edit = true;
5246 }
5247 ChangeTag::Delete => {
5248 let start = snapshot.anchor_after(offset);
5249 let end = snapshot.anchor_before(offset + value.len());
5250 if moved_since_edit {
5251 edits.push((start..end, String::new()));
5252 } else {
5253 edits.last_mut().unwrap().0.end = end;
5254 }
5255 offset += value.len();
5256 moved_since_edit = false;
5257 }
5258 ChangeTag::Insert => {
5259 if moved_since_edit {
5260 let anchor = snapshot.anchor_after(offset);
5261 edits.push((anchor.clone()..anchor, value.to_string()));
5262 } else {
5263 edits.last_mut().unwrap().1.push_str(value);
5264 }
5265 moved_since_edit = false;
5266 }
5267 }
5268 }
5269 } else if range.end == range.start {
5270 let anchor = snapshot.anchor_after(range.start);
5271 edits.push((anchor.clone()..anchor, new_text));
5272 } else {
5273 let edit_start = snapshot.anchor_after(range.start);
5274 let edit_end = snapshot.anchor_before(range.end);
5275 edits.push((edit_start..edit_end, new_text));
5276 }
5277 }
5278
5279 Ok(edits)
5280 })
5281 }
5282
5283 fn buffer_snapshot_for_lsp_version(
5284 &mut self,
5285 buffer: &ModelHandle<Buffer>,
5286 version: Option<i32>,
5287 cx: &AppContext,
5288 ) -> Result<TextBufferSnapshot> {
5289 const OLD_VERSIONS_TO_RETAIN: i32 = 10;
5290
5291 if let Some(version) = version {
5292 let buffer_id = buffer.read(cx).remote_id();
5293 let snapshots = self
5294 .buffer_snapshots
5295 .get_mut(&buffer_id)
5296 .ok_or_else(|| anyhow!("no snapshot found for buffer {}", buffer_id))?;
5297 let mut found_snapshot = None;
5298 snapshots.retain(|(snapshot_version, snapshot)| {
5299 if snapshot_version + OLD_VERSIONS_TO_RETAIN < version {
5300 false
5301 } else {
5302 if *snapshot_version == version {
5303 found_snapshot = Some(snapshot.clone());
5304 }
5305 true
5306 }
5307 });
5308
5309 found_snapshot.ok_or_else(|| {
5310 anyhow!(
5311 "snapshot not found for buffer {} at version {}",
5312 buffer_id,
5313 version
5314 )
5315 })
5316 } else {
5317 Ok((buffer.read(cx)).text_snapshot())
5318 }
5319 }
5320
5321 fn language_server_for_buffer(
5322 &self,
5323 buffer: &Buffer,
5324 cx: &AppContext,
5325 ) -> Option<&(Arc<dyn LspAdapter>, Arc<LanguageServer>)> {
5326 if let Some((file, language)) = File::from_dyn(buffer.file()).zip(buffer.language()) {
5327 let worktree_id = file.worktree_id(cx);
5328 self.language_servers
5329 .get(&(worktree_id, language.lsp_adapter()?.name()))
5330 } else {
5331 None
5332 }
5333 }
5334}
5335
5336impl ProjectStore {
5337 pub fn new(db: Arc<Db>) -> Self {
5338 Self {
5339 db,
5340 projects: Default::default(),
5341 }
5342 }
5343
5344 pub fn projects<'a>(
5345 &'a self,
5346 cx: &'a AppContext,
5347 ) -> impl 'a + Iterator<Item = ModelHandle<Project>> {
5348 self.projects
5349 .iter()
5350 .filter_map(|project| project.upgrade(cx))
5351 }
5352
5353 fn add_project(&mut self, project: WeakModelHandle<Project>, cx: &mut ModelContext<Self>) {
5354 if let Err(ix) = self
5355 .projects
5356 .binary_search_by_key(&project.id(), WeakModelHandle::id)
5357 {
5358 self.projects.insert(ix, project);
5359 }
5360 cx.notify();
5361 }
5362
5363 fn prune_projects(&mut self, cx: &mut ModelContext<Self>) {
5364 let mut did_change = false;
5365 self.projects.retain(|project| {
5366 if project.is_upgradable(cx) {
5367 true
5368 } else {
5369 did_change = true;
5370 false
5371 }
5372 });
5373 if did_change {
5374 cx.notify();
5375 }
5376 }
5377}
5378
5379impl WorktreeHandle {
5380 pub fn upgrade(&self, cx: &AppContext) -> Option<ModelHandle<Worktree>> {
5381 match self {
5382 WorktreeHandle::Strong(handle) => Some(handle.clone()),
5383 WorktreeHandle::Weak(handle) => handle.upgrade(cx),
5384 }
5385 }
5386}
5387
5388impl OpenBuffer {
5389 pub fn upgrade(&self, cx: &impl UpgradeModelHandle) -> Option<ModelHandle<Buffer>> {
5390 match self {
5391 OpenBuffer::Strong(handle) => Some(handle.clone()),
5392 OpenBuffer::Weak(handle) => handle.upgrade(cx),
5393 OpenBuffer::Loading(_) => None,
5394 }
5395 }
5396}
5397
5398struct CandidateSet {
5399 snapshot: Snapshot,
5400 include_ignored: bool,
5401 include_root_name: bool,
5402}
5403
5404impl<'a> PathMatchCandidateSet<'a> for CandidateSet {
5405 type Candidates = CandidateSetIter<'a>;
5406
5407 fn id(&self) -> usize {
5408 self.snapshot.id().to_usize()
5409 }
5410
5411 fn len(&self) -> usize {
5412 if self.include_ignored {
5413 self.snapshot.file_count()
5414 } else {
5415 self.snapshot.visible_file_count()
5416 }
5417 }
5418
5419 fn prefix(&self) -> Arc<str> {
5420 if self.snapshot.root_entry().map_or(false, |e| e.is_file()) {
5421 self.snapshot.root_name().into()
5422 } else if self.include_root_name {
5423 format!("{}/", self.snapshot.root_name()).into()
5424 } else {
5425 "".into()
5426 }
5427 }
5428
5429 fn candidates(&'a self, start: usize) -> Self::Candidates {
5430 CandidateSetIter {
5431 traversal: self.snapshot.files(self.include_ignored, start),
5432 }
5433 }
5434}
5435
5436struct CandidateSetIter<'a> {
5437 traversal: Traversal<'a>,
5438}
5439
5440impl<'a> Iterator for CandidateSetIter<'a> {
5441 type Item = PathMatchCandidate<'a>;
5442
5443 fn next(&mut self) -> Option<Self::Item> {
5444 self.traversal.next().map(|entry| {
5445 if let EntryKind::File(char_bag) = entry.kind {
5446 PathMatchCandidate {
5447 path: &entry.path,
5448 char_bag,
5449 }
5450 } else {
5451 unreachable!()
5452 }
5453 })
5454 }
5455}
5456
5457impl Entity for ProjectStore {
5458 type Event = ();
5459}
5460
5461impl Entity for Project {
5462 type Event = Event;
5463
5464 fn release(&mut self, cx: &mut gpui::MutableAppContext) {
5465 self.project_store.update(cx, ProjectStore::prune_projects);
5466
5467 match &self.client_state {
5468 ProjectClientState::Local { remote_id_rx, .. } => {
5469 if let Some(project_id) = *remote_id_rx.borrow() {
5470 self.client
5471 .send(proto::UnregisterProject { project_id })
5472 .log_err();
5473 }
5474 }
5475 ProjectClientState::Remote { remote_id, .. } => {
5476 self.client
5477 .send(proto::LeaveProject {
5478 project_id: *remote_id,
5479 })
5480 .log_err();
5481 }
5482 }
5483 }
5484
5485 fn app_will_quit(
5486 &mut self,
5487 _: &mut MutableAppContext,
5488 ) -> Option<std::pin::Pin<Box<dyn 'static + Future<Output = ()>>>> {
5489 let shutdown_futures = self
5490 .language_servers
5491 .drain()
5492 .filter_map(|(_, (_, server))| server.shutdown())
5493 .collect::<Vec<_>>();
5494 Some(
5495 async move {
5496 futures::future::join_all(shutdown_futures).await;
5497 }
5498 .boxed(),
5499 )
5500 }
5501}
5502
5503impl Collaborator {
5504 fn from_proto(
5505 message: proto::Collaborator,
5506 user_store: &ModelHandle<UserStore>,
5507 cx: &mut AsyncAppContext,
5508 ) -> impl Future<Output = Result<Self>> {
5509 let user = user_store.update(cx, |user_store, cx| {
5510 user_store.fetch_user(message.user_id, cx)
5511 });
5512
5513 async move {
5514 Ok(Self {
5515 peer_id: PeerId(message.peer_id),
5516 user: user.await?,
5517 replica_id: message.replica_id as ReplicaId,
5518 })
5519 }
5520 }
5521}
5522
5523impl<P: AsRef<Path>> From<(WorktreeId, P)> for ProjectPath {
5524 fn from((worktree_id, path): (WorktreeId, P)) -> Self {
5525 Self {
5526 worktree_id,
5527 path: path.as_ref().into(),
5528 }
5529 }
5530}
5531
5532impl From<lsp::CreateFileOptions> for fs::CreateOptions {
5533 fn from(options: lsp::CreateFileOptions) -> Self {
5534 Self {
5535 overwrite: options.overwrite.unwrap_or(false),
5536 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
5537 }
5538 }
5539}
5540
5541impl From<lsp::RenameFileOptions> for fs::RenameOptions {
5542 fn from(options: lsp::RenameFileOptions) -> Self {
5543 Self {
5544 overwrite: options.overwrite.unwrap_or(false),
5545 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
5546 }
5547 }
5548}
5549
5550impl From<lsp::DeleteFileOptions> for fs::RemoveOptions {
5551 fn from(options: lsp::DeleteFileOptions) -> Self {
5552 Self {
5553 recursive: options.recursive.unwrap_or(false),
5554 ignore_if_not_exists: options.ignore_if_not_exists.unwrap_or(false),
5555 }
5556 }
5557}
5558
5559fn serialize_symbol(symbol: &Symbol) -> proto::Symbol {
5560 proto::Symbol {
5561 source_worktree_id: symbol.source_worktree_id.to_proto(),
5562 worktree_id: symbol.worktree_id.to_proto(),
5563 language_server_name: symbol.language_server_name.0.to_string(),
5564 name: symbol.name.clone(),
5565 kind: unsafe { mem::transmute(symbol.kind) },
5566 path: symbol.path.to_string_lossy().to_string(),
5567 start: Some(proto::Point {
5568 row: symbol.range.start.row,
5569 column: symbol.range.start.column,
5570 }),
5571 end: Some(proto::Point {
5572 row: symbol.range.end.row,
5573 column: symbol.range.end.column,
5574 }),
5575 signature: symbol.signature.to_vec(),
5576 }
5577}
5578
5579fn relativize_path(base: &Path, path: &Path) -> PathBuf {
5580 let mut path_components = path.components();
5581 let mut base_components = base.components();
5582 let mut components: Vec<Component> = Vec::new();
5583 loop {
5584 match (path_components.next(), base_components.next()) {
5585 (None, None) => break,
5586 (Some(a), None) => {
5587 components.push(a);
5588 components.extend(path_components.by_ref());
5589 break;
5590 }
5591 (None, _) => components.push(Component::ParentDir),
5592 (Some(a), Some(b)) if components.is_empty() && a == b => (),
5593 (Some(a), Some(b)) if b == Component::CurDir => components.push(a),
5594 (Some(a), Some(_)) => {
5595 components.push(Component::ParentDir);
5596 for _ in base_components {
5597 components.push(Component::ParentDir);
5598 }
5599 components.push(a);
5600 components.extend(path_components.by_ref());
5601 break;
5602 }
5603 }
5604 }
5605 components.iter().map(|c| c.as_os_str()).collect()
5606}
5607
5608impl Item for Buffer {
5609 fn entry_id(&self, cx: &AppContext) -> Option<ProjectEntryId> {
5610 File::from_dyn(self.file()).and_then(|file| file.project_entry_id(cx))
5611 }
5612}
5613
5614#[cfg(test)]
5615mod tests {
5616 use crate::worktree::WorktreeHandle;
5617
5618 use super::{Event, *};
5619 use fs::RealFs;
5620 use futures::{future, StreamExt};
5621 use gpui::test::subscribe;
5622 use language::{
5623 tree_sitter_rust, tree_sitter_typescript, Diagnostic, FakeLspAdapter, LanguageConfig,
5624 OffsetRangeExt, Point, ToPoint,
5625 };
5626 use lsp::Url;
5627 use serde_json::json;
5628 use std::{cell::RefCell, os::unix, path::PathBuf, rc::Rc, task::Poll};
5629 use unindent::Unindent as _;
5630 use util::{assert_set_eq, test::temp_tree};
5631
5632 #[gpui::test]
5633 async fn test_populate_and_search(cx: &mut gpui::TestAppContext) {
5634 let dir = temp_tree(json!({
5635 "root": {
5636 "apple": "",
5637 "banana": {
5638 "carrot": {
5639 "date": "",
5640 "endive": "",
5641 }
5642 },
5643 "fennel": {
5644 "grape": "",
5645 }
5646 }
5647 }));
5648
5649 let root_link_path = dir.path().join("root_link");
5650 unix::fs::symlink(&dir.path().join("root"), &root_link_path).unwrap();
5651 unix::fs::symlink(
5652 &dir.path().join("root/fennel"),
5653 &dir.path().join("root/finnochio"),
5654 )
5655 .unwrap();
5656
5657 let project = Project::test(Arc::new(RealFs), [root_link_path.as_ref()], cx).await;
5658
5659 project.read_with(cx, |project, cx| {
5660 let tree = project.worktrees(cx).next().unwrap().read(cx);
5661 assert_eq!(tree.file_count(), 5);
5662 assert_eq!(
5663 tree.inode_for_path("fennel/grape"),
5664 tree.inode_for_path("finnochio/grape")
5665 );
5666 });
5667
5668 let cancel_flag = Default::default();
5669 let results = project
5670 .read_with(cx, |project, cx| {
5671 project.match_paths("bna", false, false, 10, &cancel_flag, cx)
5672 })
5673 .await;
5674 assert_eq!(
5675 results
5676 .into_iter()
5677 .map(|result| result.path)
5678 .collect::<Vec<Arc<Path>>>(),
5679 vec![
5680 PathBuf::from("banana/carrot/date").into(),
5681 PathBuf::from("banana/carrot/endive").into(),
5682 ]
5683 );
5684 }
5685
5686 #[gpui::test]
5687 async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
5688 cx.foreground().forbid_parking();
5689
5690 let mut rust_language = Language::new(
5691 LanguageConfig {
5692 name: "Rust".into(),
5693 path_suffixes: vec!["rs".to_string()],
5694 ..Default::default()
5695 },
5696 Some(tree_sitter_rust::language()),
5697 );
5698 let mut json_language = Language::new(
5699 LanguageConfig {
5700 name: "JSON".into(),
5701 path_suffixes: vec!["json".to_string()],
5702 ..Default::default()
5703 },
5704 None,
5705 );
5706 let mut fake_rust_servers = rust_language.set_fake_lsp_adapter(FakeLspAdapter {
5707 name: "the-rust-language-server",
5708 capabilities: lsp::ServerCapabilities {
5709 completion_provider: Some(lsp::CompletionOptions {
5710 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
5711 ..Default::default()
5712 }),
5713 ..Default::default()
5714 },
5715 ..Default::default()
5716 });
5717 let mut fake_json_servers = json_language.set_fake_lsp_adapter(FakeLspAdapter {
5718 name: "the-json-language-server",
5719 capabilities: lsp::ServerCapabilities {
5720 completion_provider: Some(lsp::CompletionOptions {
5721 trigger_characters: Some(vec![":".to_string()]),
5722 ..Default::default()
5723 }),
5724 ..Default::default()
5725 },
5726 ..Default::default()
5727 });
5728
5729 let fs = FakeFs::new(cx.background());
5730 fs.insert_tree(
5731 "/the-root",
5732 json!({
5733 "test.rs": "const A: i32 = 1;",
5734 "test2.rs": "",
5735 "Cargo.toml": "a = 1",
5736 "package.json": "{\"a\": 1}",
5737 }),
5738 )
5739 .await;
5740
5741 let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
5742 project.update(cx, |project, _| {
5743 project.languages.add(Arc::new(rust_language));
5744 project.languages.add(Arc::new(json_language));
5745 });
5746
5747 // Open a buffer without an associated language server.
5748 let toml_buffer = project
5749 .update(cx, |project, cx| {
5750 project.open_local_buffer("/the-root/Cargo.toml", cx)
5751 })
5752 .await
5753 .unwrap();
5754
5755 // Open a buffer with an associated language server.
5756 let rust_buffer = project
5757 .update(cx, |project, cx| {
5758 project.open_local_buffer("/the-root/test.rs", cx)
5759 })
5760 .await
5761 .unwrap();
5762
5763 // A server is started up, and it is notified about Rust files.
5764 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
5765 assert_eq!(
5766 fake_rust_server
5767 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5768 .await
5769 .text_document,
5770 lsp::TextDocumentItem {
5771 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
5772 version: 0,
5773 text: "const A: i32 = 1;".to_string(),
5774 language_id: Default::default()
5775 }
5776 );
5777
5778 // The buffer is configured based on the language server's capabilities.
5779 rust_buffer.read_with(cx, |buffer, _| {
5780 assert_eq!(
5781 buffer.completion_triggers(),
5782 &[".".to_string(), "::".to_string()]
5783 );
5784 });
5785 toml_buffer.read_with(cx, |buffer, _| {
5786 assert!(buffer.completion_triggers().is_empty());
5787 });
5788
5789 // Edit a buffer. The changes are reported to the language server.
5790 rust_buffer.update(cx, |buffer, cx| buffer.edit([(16..16, "2")], cx));
5791 assert_eq!(
5792 fake_rust_server
5793 .receive_notification::<lsp::notification::DidChangeTextDocument>()
5794 .await
5795 .text_document,
5796 lsp::VersionedTextDocumentIdentifier::new(
5797 lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
5798 1
5799 )
5800 );
5801
5802 // Open a third buffer with a different associated language server.
5803 let json_buffer = project
5804 .update(cx, |project, cx| {
5805 project.open_local_buffer("/the-root/package.json", cx)
5806 })
5807 .await
5808 .unwrap();
5809
5810 // A json language server is started up and is only notified about the json buffer.
5811 let mut fake_json_server = fake_json_servers.next().await.unwrap();
5812 assert_eq!(
5813 fake_json_server
5814 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5815 .await
5816 .text_document,
5817 lsp::TextDocumentItem {
5818 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
5819 version: 0,
5820 text: "{\"a\": 1}".to_string(),
5821 language_id: Default::default()
5822 }
5823 );
5824
5825 // This buffer is configured based on the second language server's
5826 // capabilities.
5827 json_buffer.read_with(cx, |buffer, _| {
5828 assert_eq!(buffer.completion_triggers(), &[":".to_string()]);
5829 });
5830
5831 // When opening another buffer whose language server is already running,
5832 // it is also configured based on the existing language server's capabilities.
5833 let rust_buffer2 = project
5834 .update(cx, |project, cx| {
5835 project.open_local_buffer("/the-root/test2.rs", cx)
5836 })
5837 .await
5838 .unwrap();
5839 rust_buffer2.read_with(cx, |buffer, _| {
5840 assert_eq!(
5841 buffer.completion_triggers(),
5842 &[".".to_string(), "::".to_string()]
5843 );
5844 });
5845
5846 // Changes are reported only to servers matching the buffer's language.
5847 toml_buffer.update(cx, |buffer, cx| buffer.edit([(5..5, "23")], cx));
5848 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "let x = 1;")], cx));
5849 assert_eq!(
5850 fake_rust_server
5851 .receive_notification::<lsp::notification::DidChangeTextDocument>()
5852 .await
5853 .text_document,
5854 lsp::VersionedTextDocumentIdentifier::new(
5855 lsp::Url::from_file_path("/the-root/test2.rs").unwrap(),
5856 1
5857 )
5858 );
5859
5860 // Save notifications are reported to all servers.
5861 toml_buffer
5862 .update(cx, |buffer, cx| buffer.save(cx))
5863 .await
5864 .unwrap();
5865 assert_eq!(
5866 fake_rust_server
5867 .receive_notification::<lsp::notification::DidSaveTextDocument>()
5868 .await
5869 .text_document,
5870 lsp::TextDocumentIdentifier::new(
5871 lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap()
5872 )
5873 );
5874 assert_eq!(
5875 fake_json_server
5876 .receive_notification::<lsp::notification::DidSaveTextDocument>()
5877 .await
5878 .text_document,
5879 lsp::TextDocumentIdentifier::new(
5880 lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap()
5881 )
5882 );
5883
5884 // Renames are reported only to servers matching the buffer's language.
5885 fs.rename(
5886 Path::new("/the-root/test2.rs"),
5887 Path::new("/the-root/test3.rs"),
5888 Default::default(),
5889 )
5890 .await
5891 .unwrap();
5892 assert_eq!(
5893 fake_rust_server
5894 .receive_notification::<lsp::notification::DidCloseTextDocument>()
5895 .await
5896 .text_document,
5897 lsp::TextDocumentIdentifier::new(
5898 lsp::Url::from_file_path("/the-root/test2.rs").unwrap()
5899 ),
5900 );
5901 assert_eq!(
5902 fake_rust_server
5903 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5904 .await
5905 .text_document,
5906 lsp::TextDocumentItem {
5907 uri: lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),
5908 version: 0,
5909 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
5910 language_id: Default::default()
5911 },
5912 );
5913
5914 rust_buffer2.update(cx, |buffer, cx| {
5915 buffer.update_diagnostics(
5916 DiagnosticSet::from_sorted_entries(
5917 vec![DiagnosticEntry {
5918 diagnostic: Default::default(),
5919 range: Anchor::MIN..Anchor::MAX,
5920 }],
5921 &buffer.snapshot(),
5922 ),
5923 cx,
5924 );
5925 assert_eq!(
5926 buffer
5927 .snapshot()
5928 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
5929 .count(),
5930 1
5931 );
5932 });
5933
5934 // When the rename changes the extension of the file, the buffer gets closed on the old
5935 // language server and gets opened on the new one.
5936 fs.rename(
5937 Path::new("/the-root/test3.rs"),
5938 Path::new("/the-root/test3.json"),
5939 Default::default(),
5940 )
5941 .await
5942 .unwrap();
5943 assert_eq!(
5944 fake_rust_server
5945 .receive_notification::<lsp::notification::DidCloseTextDocument>()
5946 .await
5947 .text_document,
5948 lsp::TextDocumentIdentifier::new(
5949 lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),
5950 ),
5951 );
5952 assert_eq!(
5953 fake_json_server
5954 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5955 .await
5956 .text_document,
5957 lsp::TextDocumentItem {
5958 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
5959 version: 0,
5960 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
5961 language_id: Default::default()
5962 },
5963 );
5964
5965 // We clear the diagnostics, since the language has changed.
5966 rust_buffer2.read_with(cx, |buffer, _| {
5967 assert_eq!(
5968 buffer
5969 .snapshot()
5970 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
5971 .count(),
5972 0
5973 );
5974 });
5975
5976 // The renamed file's version resets after changing language server.
5977 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "// ")], cx));
5978 assert_eq!(
5979 fake_json_server
5980 .receive_notification::<lsp::notification::DidChangeTextDocument>()
5981 .await
5982 .text_document,
5983 lsp::VersionedTextDocumentIdentifier::new(
5984 lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
5985 1
5986 )
5987 );
5988
5989 // Restart language servers
5990 project.update(cx, |project, cx| {
5991 project.restart_language_servers_for_buffers(
5992 vec![rust_buffer.clone(), json_buffer.clone()],
5993 cx,
5994 );
5995 });
5996
5997 let mut rust_shutdown_requests = fake_rust_server
5998 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
5999 let mut json_shutdown_requests = fake_json_server
6000 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
6001 futures::join!(rust_shutdown_requests.next(), json_shutdown_requests.next());
6002
6003 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
6004 let mut fake_json_server = fake_json_servers.next().await.unwrap();
6005
6006 // Ensure rust document is reopened in new rust language server
6007 assert_eq!(
6008 fake_rust_server
6009 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6010 .await
6011 .text_document,
6012 lsp::TextDocumentItem {
6013 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
6014 version: 1,
6015 text: rust_buffer.read_with(cx, |buffer, _| buffer.text()),
6016 language_id: Default::default()
6017 }
6018 );
6019
6020 // Ensure json documents are reopened in new json language server
6021 assert_set_eq!(
6022 [
6023 fake_json_server
6024 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6025 .await
6026 .text_document,
6027 fake_json_server
6028 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6029 .await
6030 .text_document,
6031 ],
6032 [
6033 lsp::TextDocumentItem {
6034 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
6035 version: 0,
6036 text: json_buffer.read_with(cx, |buffer, _| buffer.text()),
6037 language_id: Default::default()
6038 },
6039 lsp::TextDocumentItem {
6040 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
6041 version: 1,
6042 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
6043 language_id: Default::default()
6044 }
6045 ]
6046 );
6047
6048 // Close notifications are reported only to servers matching the buffer's language.
6049 cx.update(|_| drop(json_buffer));
6050 let close_message = lsp::DidCloseTextDocumentParams {
6051 text_document: lsp::TextDocumentIdentifier::new(
6052 lsp::Url::from_file_path("/the-root/package.json").unwrap(),
6053 ),
6054 };
6055 assert_eq!(
6056 fake_json_server
6057 .receive_notification::<lsp::notification::DidCloseTextDocument>()
6058 .await,
6059 close_message,
6060 );
6061 }
6062
6063 #[gpui::test]
6064 async fn test_single_file_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
6065 cx.foreground().forbid_parking();
6066
6067 let fs = FakeFs::new(cx.background());
6068 fs.insert_tree(
6069 "/dir",
6070 json!({
6071 "a.rs": "let a = 1;",
6072 "b.rs": "let b = 2;"
6073 }),
6074 )
6075 .await;
6076
6077 let project = Project::test(fs, ["/dir/a.rs".as_ref(), "/dir/b.rs".as_ref()], cx).await;
6078
6079 let buffer_a = project
6080 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
6081 .await
6082 .unwrap();
6083 let buffer_b = project
6084 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
6085 .await
6086 .unwrap();
6087
6088 project.update(cx, |project, cx| {
6089 project
6090 .update_diagnostics(
6091 lsp::PublishDiagnosticsParams {
6092 uri: Url::from_file_path("/dir/a.rs").unwrap(),
6093 version: None,
6094 diagnostics: vec![lsp::Diagnostic {
6095 range: lsp::Range::new(
6096 lsp::Position::new(0, 4),
6097 lsp::Position::new(0, 5),
6098 ),
6099 severity: Some(lsp::DiagnosticSeverity::ERROR),
6100 message: "error 1".to_string(),
6101 ..Default::default()
6102 }],
6103 },
6104 &[],
6105 cx,
6106 )
6107 .unwrap();
6108 project
6109 .update_diagnostics(
6110 lsp::PublishDiagnosticsParams {
6111 uri: Url::from_file_path("/dir/b.rs").unwrap(),
6112 version: None,
6113 diagnostics: vec![lsp::Diagnostic {
6114 range: lsp::Range::new(
6115 lsp::Position::new(0, 4),
6116 lsp::Position::new(0, 5),
6117 ),
6118 severity: Some(lsp::DiagnosticSeverity::WARNING),
6119 message: "error 2".to_string(),
6120 ..Default::default()
6121 }],
6122 },
6123 &[],
6124 cx,
6125 )
6126 .unwrap();
6127 });
6128
6129 buffer_a.read_with(cx, |buffer, _| {
6130 let chunks = chunks_with_diagnostics(&buffer, 0..buffer.len());
6131 assert_eq!(
6132 chunks
6133 .iter()
6134 .map(|(s, d)| (s.as_str(), *d))
6135 .collect::<Vec<_>>(),
6136 &[
6137 ("let ", None),
6138 ("a", Some(DiagnosticSeverity::ERROR)),
6139 (" = 1;", None),
6140 ]
6141 );
6142 });
6143 buffer_b.read_with(cx, |buffer, _| {
6144 let chunks = chunks_with_diagnostics(&buffer, 0..buffer.len());
6145 assert_eq!(
6146 chunks
6147 .iter()
6148 .map(|(s, d)| (s.as_str(), *d))
6149 .collect::<Vec<_>>(),
6150 &[
6151 ("let ", None),
6152 ("b", Some(DiagnosticSeverity::WARNING)),
6153 (" = 2;", None),
6154 ]
6155 );
6156 });
6157 }
6158
6159 #[gpui::test]
6160 async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
6161 cx.foreground().forbid_parking();
6162
6163 let progress_token = "the-progress-token";
6164 let mut language = Language::new(
6165 LanguageConfig {
6166 name: "Rust".into(),
6167 path_suffixes: vec!["rs".to_string()],
6168 ..Default::default()
6169 },
6170 Some(tree_sitter_rust::language()),
6171 );
6172 let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
6173 disk_based_diagnostics_progress_token: Some(progress_token),
6174 disk_based_diagnostics_sources: &["disk"],
6175 ..Default::default()
6176 });
6177
6178 let fs = FakeFs::new(cx.background());
6179 fs.insert_tree(
6180 "/dir",
6181 json!({
6182 "a.rs": "fn a() { A }",
6183 "b.rs": "const y: i32 = 1",
6184 }),
6185 )
6186 .await;
6187
6188 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
6189 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
6190 let worktree_id =
6191 project.read_with(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
6192
6193 // Cause worktree to start the fake language server
6194 let _buffer = project
6195 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
6196 .await
6197 .unwrap();
6198
6199 let mut events = subscribe(&project, cx);
6200
6201 let mut fake_server = fake_servers.next().await.unwrap();
6202 fake_server.start_progress(progress_token).await;
6203 assert_eq!(
6204 events.next().await.unwrap(),
6205 Event::DiskBasedDiagnosticsStarted
6206 );
6207
6208 fake_server.start_progress(progress_token).await;
6209 fake_server.end_progress(progress_token).await;
6210 fake_server.start_progress(progress_token).await;
6211
6212 fake_server.notify::<lsp::notification::PublishDiagnostics>(
6213 lsp::PublishDiagnosticsParams {
6214 uri: Url::from_file_path("/dir/a.rs").unwrap(),
6215 version: None,
6216 diagnostics: vec![lsp::Diagnostic {
6217 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
6218 severity: Some(lsp::DiagnosticSeverity::ERROR),
6219 message: "undefined variable 'A'".to_string(),
6220 ..Default::default()
6221 }],
6222 },
6223 );
6224 assert_eq!(
6225 events.next().await.unwrap(),
6226 Event::DiagnosticsUpdated((worktree_id, Path::new("a.rs")).into())
6227 );
6228
6229 fake_server.end_progress(progress_token).await;
6230 fake_server.end_progress(progress_token).await;
6231 assert_eq!(
6232 events.next().await.unwrap(),
6233 Event::DiskBasedDiagnosticsUpdated
6234 );
6235 assert_eq!(
6236 events.next().await.unwrap(),
6237 Event::DiskBasedDiagnosticsFinished
6238 );
6239
6240 let buffer = project
6241 .update(cx, |p, cx| p.open_local_buffer("/dir/a.rs", cx))
6242 .await
6243 .unwrap();
6244
6245 buffer.read_with(cx, |buffer, _| {
6246 let snapshot = buffer.snapshot();
6247 let diagnostics = snapshot
6248 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
6249 .collect::<Vec<_>>();
6250 assert_eq!(
6251 diagnostics,
6252 &[DiagnosticEntry {
6253 range: Point::new(0, 9)..Point::new(0, 10),
6254 diagnostic: Diagnostic {
6255 severity: lsp::DiagnosticSeverity::ERROR,
6256 message: "undefined variable 'A'".to_string(),
6257 group_id: 0,
6258 is_primary: true,
6259 ..Default::default()
6260 }
6261 }]
6262 )
6263 });
6264
6265 // Ensure publishing empty diagnostics twice only results in one update event.
6266 fake_server.notify::<lsp::notification::PublishDiagnostics>(
6267 lsp::PublishDiagnosticsParams {
6268 uri: Url::from_file_path("/dir/a.rs").unwrap(),
6269 version: None,
6270 diagnostics: Default::default(),
6271 },
6272 );
6273 assert_eq!(
6274 events.next().await.unwrap(),
6275 Event::DiagnosticsUpdated((worktree_id, Path::new("a.rs")).into())
6276 );
6277
6278 fake_server.notify::<lsp::notification::PublishDiagnostics>(
6279 lsp::PublishDiagnosticsParams {
6280 uri: Url::from_file_path("/dir/a.rs").unwrap(),
6281 version: None,
6282 diagnostics: Default::default(),
6283 },
6284 );
6285 cx.foreground().run_until_parked();
6286 assert_eq!(futures::poll!(events.next()), Poll::Pending);
6287 }
6288
6289 #[gpui::test]
6290 async fn test_restarting_server_with_diagnostics_running(cx: &mut gpui::TestAppContext) {
6291 cx.foreground().forbid_parking();
6292
6293 let progress_token = "the-progress-token";
6294 let mut language = Language::new(
6295 LanguageConfig {
6296 path_suffixes: vec!["rs".to_string()],
6297 ..Default::default()
6298 },
6299 None,
6300 );
6301 let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
6302 disk_based_diagnostics_sources: &["disk"],
6303 disk_based_diagnostics_progress_token: Some(progress_token),
6304 ..Default::default()
6305 });
6306
6307 let fs = FakeFs::new(cx.background());
6308 fs.insert_tree("/dir", json!({ "a.rs": "" })).await;
6309
6310 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
6311 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
6312
6313 let buffer = project
6314 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
6315 .await
6316 .unwrap();
6317
6318 // Simulate diagnostics starting to update.
6319 let mut fake_server = fake_servers.next().await.unwrap();
6320 fake_server.start_progress(progress_token).await;
6321
6322 // Restart the server before the diagnostics finish updating.
6323 project.update(cx, |project, cx| {
6324 project.restart_language_servers_for_buffers([buffer], cx);
6325 });
6326 let mut events = subscribe(&project, cx);
6327
6328 // Simulate the newly started server sending more diagnostics.
6329 let mut fake_server = fake_servers.next().await.unwrap();
6330 fake_server.start_progress(progress_token).await;
6331 assert_eq!(
6332 events.next().await.unwrap(),
6333 Event::DiskBasedDiagnosticsStarted
6334 );
6335
6336 // All diagnostics are considered done, despite the old server's diagnostic
6337 // task never completing.
6338 fake_server.end_progress(progress_token).await;
6339 assert_eq!(
6340 events.next().await.unwrap(),
6341 Event::DiskBasedDiagnosticsUpdated
6342 );
6343 assert_eq!(
6344 events.next().await.unwrap(),
6345 Event::DiskBasedDiagnosticsFinished
6346 );
6347 project.read_with(cx, |project, _| {
6348 assert!(!project.is_running_disk_based_diagnostics());
6349 });
6350 }
6351
6352 #[gpui::test]
6353 async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
6354 cx.foreground().forbid_parking();
6355
6356 let mut language = Language::new(
6357 LanguageConfig {
6358 name: "Rust".into(),
6359 path_suffixes: vec!["rs".to_string()],
6360 ..Default::default()
6361 },
6362 Some(tree_sitter_rust::language()),
6363 );
6364 let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
6365 disk_based_diagnostics_sources: &["disk"],
6366 ..Default::default()
6367 });
6368
6369 let text = "
6370 fn a() { A }
6371 fn b() { BB }
6372 fn c() { CCC }
6373 "
6374 .unindent();
6375
6376 let fs = FakeFs::new(cx.background());
6377 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
6378
6379 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
6380 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
6381
6382 let buffer = project
6383 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
6384 .await
6385 .unwrap();
6386
6387 let mut fake_server = fake_servers.next().await.unwrap();
6388 let open_notification = fake_server
6389 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6390 .await;
6391
6392 // Edit the buffer, moving the content down
6393 buffer.update(cx, |buffer, cx| buffer.edit([(0..0, "\n\n")], cx));
6394 let change_notification_1 = fake_server
6395 .receive_notification::<lsp::notification::DidChangeTextDocument>()
6396 .await;
6397 assert!(
6398 change_notification_1.text_document.version > open_notification.text_document.version
6399 );
6400
6401 // Report some diagnostics for the initial version of the buffer
6402 fake_server.notify::<lsp::notification::PublishDiagnostics>(
6403 lsp::PublishDiagnosticsParams {
6404 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
6405 version: Some(open_notification.text_document.version),
6406 diagnostics: vec![
6407 lsp::Diagnostic {
6408 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
6409 severity: Some(DiagnosticSeverity::ERROR),
6410 message: "undefined variable 'A'".to_string(),
6411 source: Some("disk".to_string()),
6412 ..Default::default()
6413 },
6414 lsp::Diagnostic {
6415 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
6416 severity: Some(DiagnosticSeverity::ERROR),
6417 message: "undefined variable 'BB'".to_string(),
6418 source: Some("disk".to_string()),
6419 ..Default::default()
6420 },
6421 lsp::Diagnostic {
6422 range: lsp::Range::new(lsp::Position::new(2, 9), lsp::Position::new(2, 12)),
6423 severity: Some(DiagnosticSeverity::ERROR),
6424 source: Some("disk".to_string()),
6425 message: "undefined variable 'CCC'".to_string(),
6426 ..Default::default()
6427 },
6428 ],
6429 },
6430 );
6431
6432 // The diagnostics have moved down since they were created.
6433 buffer.next_notification(cx).await;
6434 buffer.read_with(cx, |buffer, _| {
6435 assert_eq!(
6436 buffer
6437 .snapshot()
6438 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0), false)
6439 .collect::<Vec<_>>(),
6440 &[
6441 DiagnosticEntry {
6442 range: Point::new(3, 9)..Point::new(3, 11),
6443 diagnostic: Diagnostic {
6444 severity: DiagnosticSeverity::ERROR,
6445 message: "undefined variable 'BB'".to_string(),
6446 is_disk_based: true,
6447 group_id: 1,
6448 is_primary: true,
6449 ..Default::default()
6450 },
6451 },
6452 DiagnosticEntry {
6453 range: Point::new(4, 9)..Point::new(4, 12),
6454 diagnostic: Diagnostic {
6455 severity: DiagnosticSeverity::ERROR,
6456 message: "undefined variable 'CCC'".to_string(),
6457 is_disk_based: true,
6458 group_id: 2,
6459 is_primary: true,
6460 ..Default::default()
6461 }
6462 }
6463 ]
6464 );
6465 assert_eq!(
6466 chunks_with_diagnostics(buffer, 0..buffer.len()),
6467 [
6468 ("\n\nfn a() { ".to_string(), None),
6469 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
6470 (" }\nfn b() { ".to_string(), None),
6471 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
6472 (" }\nfn c() { ".to_string(), None),
6473 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
6474 (" }\n".to_string(), None),
6475 ]
6476 );
6477 assert_eq!(
6478 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
6479 [
6480 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
6481 (" }\nfn c() { ".to_string(), None),
6482 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
6483 ]
6484 );
6485 });
6486
6487 // Ensure overlapping diagnostics are highlighted correctly.
6488 fake_server.notify::<lsp::notification::PublishDiagnostics>(
6489 lsp::PublishDiagnosticsParams {
6490 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
6491 version: Some(open_notification.text_document.version),
6492 diagnostics: vec![
6493 lsp::Diagnostic {
6494 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
6495 severity: Some(DiagnosticSeverity::ERROR),
6496 message: "undefined variable 'A'".to_string(),
6497 source: Some("disk".to_string()),
6498 ..Default::default()
6499 },
6500 lsp::Diagnostic {
6501 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 12)),
6502 severity: Some(DiagnosticSeverity::WARNING),
6503 message: "unreachable statement".to_string(),
6504 source: Some("disk".to_string()),
6505 ..Default::default()
6506 },
6507 ],
6508 },
6509 );
6510
6511 buffer.next_notification(cx).await;
6512 buffer.read_with(cx, |buffer, _| {
6513 assert_eq!(
6514 buffer
6515 .snapshot()
6516 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0), false)
6517 .collect::<Vec<_>>(),
6518 &[
6519 DiagnosticEntry {
6520 range: Point::new(2, 9)..Point::new(2, 12),
6521 diagnostic: Diagnostic {
6522 severity: DiagnosticSeverity::WARNING,
6523 message: "unreachable statement".to_string(),
6524 is_disk_based: true,
6525 group_id: 4,
6526 is_primary: true,
6527 ..Default::default()
6528 }
6529 },
6530 DiagnosticEntry {
6531 range: Point::new(2, 9)..Point::new(2, 10),
6532 diagnostic: Diagnostic {
6533 severity: DiagnosticSeverity::ERROR,
6534 message: "undefined variable 'A'".to_string(),
6535 is_disk_based: true,
6536 group_id: 3,
6537 is_primary: true,
6538 ..Default::default()
6539 },
6540 }
6541 ]
6542 );
6543 assert_eq!(
6544 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
6545 [
6546 ("fn a() { ".to_string(), None),
6547 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
6548 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
6549 ("\n".to_string(), None),
6550 ]
6551 );
6552 assert_eq!(
6553 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
6554 [
6555 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
6556 ("\n".to_string(), None),
6557 ]
6558 );
6559 });
6560
6561 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
6562 // changes since the last save.
6563 buffer.update(cx, |buffer, cx| {
6564 buffer.edit([(Point::new(2, 0)..Point::new(2, 0), " ")], cx);
6565 buffer.edit([(Point::new(2, 8)..Point::new(2, 10), "(x: usize)")], cx);
6566 buffer.edit([(Point::new(3, 10)..Point::new(3, 10), "xxx")], cx);
6567 });
6568 let change_notification_2 = fake_server
6569 .receive_notification::<lsp::notification::DidChangeTextDocument>()
6570 .await;
6571 assert!(
6572 change_notification_2.text_document.version
6573 > change_notification_1.text_document.version
6574 );
6575
6576 // Handle out-of-order diagnostics
6577 fake_server.notify::<lsp::notification::PublishDiagnostics>(
6578 lsp::PublishDiagnosticsParams {
6579 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
6580 version: Some(change_notification_2.text_document.version),
6581 diagnostics: vec![
6582 lsp::Diagnostic {
6583 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
6584 severity: Some(DiagnosticSeverity::ERROR),
6585 message: "undefined variable 'BB'".to_string(),
6586 source: Some("disk".to_string()),
6587 ..Default::default()
6588 },
6589 lsp::Diagnostic {
6590 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
6591 severity: Some(DiagnosticSeverity::WARNING),
6592 message: "undefined variable 'A'".to_string(),
6593 source: Some("disk".to_string()),
6594 ..Default::default()
6595 },
6596 ],
6597 },
6598 );
6599
6600 buffer.next_notification(cx).await;
6601 buffer.read_with(cx, |buffer, _| {
6602 assert_eq!(
6603 buffer
6604 .snapshot()
6605 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
6606 .collect::<Vec<_>>(),
6607 &[
6608 DiagnosticEntry {
6609 range: Point::new(2, 21)..Point::new(2, 22),
6610 diagnostic: Diagnostic {
6611 severity: DiagnosticSeverity::WARNING,
6612 message: "undefined variable 'A'".to_string(),
6613 is_disk_based: true,
6614 group_id: 6,
6615 is_primary: true,
6616 ..Default::default()
6617 }
6618 },
6619 DiagnosticEntry {
6620 range: Point::new(3, 9)..Point::new(3, 14),
6621 diagnostic: Diagnostic {
6622 severity: DiagnosticSeverity::ERROR,
6623 message: "undefined variable 'BB'".to_string(),
6624 is_disk_based: true,
6625 group_id: 5,
6626 is_primary: true,
6627 ..Default::default()
6628 },
6629 }
6630 ]
6631 );
6632 });
6633 }
6634
6635 #[gpui::test]
6636 async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
6637 cx.foreground().forbid_parking();
6638
6639 let text = concat!(
6640 "let one = ;\n", //
6641 "let two = \n",
6642 "let three = 3;\n",
6643 );
6644
6645 let fs = FakeFs::new(cx.background());
6646 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
6647
6648 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
6649 let buffer = project
6650 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
6651 .await
6652 .unwrap();
6653
6654 project.update(cx, |project, cx| {
6655 project
6656 .update_buffer_diagnostics(
6657 &buffer,
6658 vec![
6659 DiagnosticEntry {
6660 range: PointUtf16::new(0, 10)..PointUtf16::new(0, 10),
6661 diagnostic: Diagnostic {
6662 severity: DiagnosticSeverity::ERROR,
6663 message: "syntax error 1".to_string(),
6664 ..Default::default()
6665 },
6666 },
6667 DiagnosticEntry {
6668 range: PointUtf16::new(1, 10)..PointUtf16::new(1, 10),
6669 diagnostic: Diagnostic {
6670 severity: DiagnosticSeverity::ERROR,
6671 message: "syntax error 2".to_string(),
6672 ..Default::default()
6673 },
6674 },
6675 ],
6676 None,
6677 cx,
6678 )
6679 .unwrap();
6680 });
6681
6682 // An empty range is extended forward to include the following character.
6683 // At the end of a line, an empty range is extended backward to include
6684 // the preceding character.
6685 buffer.read_with(cx, |buffer, _| {
6686 let chunks = chunks_with_diagnostics(&buffer, 0..buffer.len());
6687 assert_eq!(
6688 chunks
6689 .iter()
6690 .map(|(s, d)| (s.as_str(), *d))
6691 .collect::<Vec<_>>(),
6692 &[
6693 ("let one = ", None),
6694 (";", Some(DiagnosticSeverity::ERROR)),
6695 ("\nlet two =", None),
6696 (" ", Some(DiagnosticSeverity::ERROR)),
6697 ("\nlet three = 3;\n", None)
6698 ]
6699 );
6700 });
6701 }
6702
6703 #[gpui::test]
6704 async fn test_edits_from_lsp_with_past_version(cx: &mut gpui::TestAppContext) {
6705 cx.foreground().forbid_parking();
6706
6707 let mut language = Language::new(
6708 LanguageConfig {
6709 name: "Rust".into(),
6710 path_suffixes: vec!["rs".to_string()],
6711 ..Default::default()
6712 },
6713 Some(tree_sitter_rust::language()),
6714 );
6715 let mut fake_servers = language.set_fake_lsp_adapter(Default::default());
6716
6717 let text = "
6718 fn a() {
6719 f1();
6720 }
6721 fn b() {
6722 f2();
6723 }
6724 fn c() {
6725 f3();
6726 }
6727 "
6728 .unindent();
6729
6730 let fs = FakeFs::new(cx.background());
6731 fs.insert_tree(
6732 "/dir",
6733 json!({
6734 "a.rs": text.clone(),
6735 }),
6736 )
6737 .await;
6738
6739 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
6740 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
6741 let buffer = project
6742 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
6743 .await
6744 .unwrap();
6745
6746 let mut fake_server = fake_servers.next().await.unwrap();
6747 let lsp_document_version = fake_server
6748 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6749 .await
6750 .text_document
6751 .version;
6752
6753 // Simulate editing the buffer after the language server computes some edits.
6754 buffer.update(cx, |buffer, cx| {
6755 buffer.edit(
6756 [(
6757 Point::new(0, 0)..Point::new(0, 0),
6758 "// above first function\n",
6759 )],
6760 cx,
6761 );
6762 buffer.edit(
6763 [(
6764 Point::new(2, 0)..Point::new(2, 0),
6765 " // inside first function\n",
6766 )],
6767 cx,
6768 );
6769 buffer.edit(
6770 [(
6771 Point::new(6, 4)..Point::new(6, 4),
6772 "// inside second function ",
6773 )],
6774 cx,
6775 );
6776
6777 assert_eq!(
6778 buffer.text(),
6779 "
6780 // above first function
6781 fn a() {
6782 // inside first function
6783 f1();
6784 }
6785 fn b() {
6786 // inside second function f2();
6787 }
6788 fn c() {
6789 f3();
6790 }
6791 "
6792 .unindent()
6793 );
6794 });
6795
6796 let edits = project
6797 .update(cx, |project, cx| {
6798 project.edits_from_lsp(
6799 &buffer,
6800 vec![
6801 // replace body of first function
6802 lsp::TextEdit {
6803 range: lsp::Range::new(
6804 lsp::Position::new(0, 0),
6805 lsp::Position::new(3, 0),
6806 ),
6807 new_text: "
6808 fn a() {
6809 f10();
6810 }
6811 "
6812 .unindent(),
6813 },
6814 // edit inside second function
6815 lsp::TextEdit {
6816 range: lsp::Range::new(
6817 lsp::Position::new(4, 6),
6818 lsp::Position::new(4, 6),
6819 ),
6820 new_text: "00".into(),
6821 },
6822 // edit inside third function via two distinct edits
6823 lsp::TextEdit {
6824 range: lsp::Range::new(
6825 lsp::Position::new(7, 5),
6826 lsp::Position::new(7, 5),
6827 ),
6828 new_text: "4000".into(),
6829 },
6830 lsp::TextEdit {
6831 range: lsp::Range::new(
6832 lsp::Position::new(7, 5),
6833 lsp::Position::new(7, 6),
6834 ),
6835 new_text: "".into(),
6836 },
6837 ],
6838 Some(lsp_document_version),
6839 cx,
6840 )
6841 })
6842 .await
6843 .unwrap();
6844
6845 buffer.update(cx, |buffer, cx| {
6846 for (range, new_text) in edits {
6847 buffer.edit([(range, new_text)], cx);
6848 }
6849 assert_eq!(
6850 buffer.text(),
6851 "
6852 // above first function
6853 fn a() {
6854 // inside first function
6855 f10();
6856 }
6857 fn b() {
6858 // inside second function f200();
6859 }
6860 fn c() {
6861 f4000();
6862 }
6863 "
6864 .unindent()
6865 );
6866 });
6867 }
6868
6869 #[gpui::test]
6870 async fn test_edits_from_lsp_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
6871 cx.foreground().forbid_parking();
6872
6873 let text = "
6874 use a::b;
6875 use a::c;
6876
6877 fn f() {
6878 b();
6879 c();
6880 }
6881 "
6882 .unindent();
6883
6884 let fs = FakeFs::new(cx.background());
6885 fs.insert_tree(
6886 "/dir",
6887 json!({
6888 "a.rs": text.clone(),
6889 }),
6890 )
6891 .await;
6892
6893 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
6894 let buffer = project
6895 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
6896 .await
6897 .unwrap();
6898
6899 // Simulate the language server sending us a small edit in the form of a very large diff.
6900 // Rust-analyzer does this when performing a merge-imports code action.
6901 let edits = project
6902 .update(cx, |project, cx| {
6903 project.edits_from_lsp(
6904 &buffer,
6905 [
6906 // Replace the first use statement without editing the semicolon.
6907 lsp::TextEdit {
6908 range: lsp::Range::new(
6909 lsp::Position::new(0, 4),
6910 lsp::Position::new(0, 8),
6911 ),
6912 new_text: "a::{b, c}".into(),
6913 },
6914 // Reinsert the remainder of the file between the semicolon and the final
6915 // newline of the file.
6916 lsp::TextEdit {
6917 range: lsp::Range::new(
6918 lsp::Position::new(0, 9),
6919 lsp::Position::new(0, 9),
6920 ),
6921 new_text: "\n\n".into(),
6922 },
6923 lsp::TextEdit {
6924 range: lsp::Range::new(
6925 lsp::Position::new(0, 9),
6926 lsp::Position::new(0, 9),
6927 ),
6928 new_text: "
6929 fn f() {
6930 b();
6931 c();
6932 }"
6933 .unindent(),
6934 },
6935 // Delete everything after the first newline of the file.
6936 lsp::TextEdit {
6937 range: lsp::Range::new(
6938 lsp::Position::new(1, 0),
6939 lsp::Position::new(7, 0),
6940 ),
6941 new_text: "".into(),
6942 },
6943 ],
6944 None,
6945 cx,
6946 )
6947 })
6948 .await
6949 .unwrap();
6950
6951 buffer.update(cx, |buffer, cx| {
6952 let edits = edits
6953 .into_iter()
6954 .map(|(range, text)| {
6955 (
6956 range.start.to_point(&buffer)..range.end.to_point(&buffer),
6957 text,
6958 )
6959 })
6960 .collect::<Vec<_>>();
6961
6962 assert_eq!(
6963 edits,
6964 [
6965 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
6966 (Point::new(1, 0)..Point::new(2, 0), "".into())
6967 ]
6968 );
6969
6970 for (range, new_text) in edits {
6971 buffer.edit([(range, new_text)], cx);
6972 }
6973 assert_eq!(
6974 buffer.text(),
6975 "
6976 use a::{b, c};
6977
6978 fn f() {
6979 b();
6980 c();
6981 }
6982 "
6983 .unindent()
6984 );
6985 });
6986 }
6987
6988 #[gpui::test]
6989 async fn test_invalid_edits_from_lsp(cx: &mut gpui::TestAppContext) {
6990 cx.foreground().forbid_parking();
6991
6992 let text = "
6993 use a::b;
6994 use a::c;
6995
6996 fn f() {
6997 b();
6998 c();
6999 }
7000 "
7001 .unindent();
7002
7003 let fs = FakeFs::new(cx.background());
7004 fs.insert_tree(
7005 "/dir",
7006 json!({
7007 "a.rs": text.clone(),
7008 }),
7009 )
7010 .await;
7011
7012 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
7013 let buffer = project
7014 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
7015 .await
7016 .unwrap();
7017
7018 // Simulate the language server sending us edits in a non-ordered fashion,
7019 // with ranges sometimes being inverted.
7020 let edits = project
7021 .update(cx, |project, cx| {
7022 project.edits_from_lsp(
7023 &buffer,
7024 [
7025 lsp::TextEdit {
7026 range: lsp::Range::new(
7027 lsp::Position::new(0, 9),
7028 lsp::Position::new(0, 9),
7029 ),
7030 new_text: "\n\n".into(),
7031 },
7032 lsp::TextEdit {
7033 range: lsp::Range::new(
7034 lsp::Position::new(0, 8),
7035 lsp::Position::new(0, 4),
7036 ),
7037 new_text: "a::{b, c}".into(),
7038 },
7039 lsp::TextEdit {
7040 range: lsp::Range::new(
7041 lsp::Position::new(1, 0),
7042 lsp::Position::new(7, 0),
7043 ),
7044 new_text: "".into(),
7045 },
7046 lsp::TextEdit {
7047 range: lsp::Range::new(
7048 lsp::Position::new(0, 9),
7049 lsp::Position::new(0, 9),
7050 ),
7051 new_text: "
7052 fn f() {
7053 b();
7054 c();
7055 }"
7056 .unindent(),
7057 },
7058 ],
7059 None,
7060 cx,
7061 )
7062 })
7063 .await
7064 .unwrap();
7065
7066 buffer.update(cx, |buffer, cx| {
7067 let edits = edits
7068 .into_iter()
7069 .map(|(range, text)| {
7070 (
7071 range.start.to_point(&buffer)..range.end.to_point(&buffer),
7072 text,
7073 )
7074 })
7075 .collect::<Vec<_>>();
7076
7077 assert_eq!(
7078 edits,
7079 [
7080 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
7081 (Point::new(1, 0)..Point::new(2, 0), "".into())
7082 ]
7083 );
7084
7085 for (range, new_text) in edits {
7086 buffer.edit([(range, new_text)], cx);
7087 }
7088 assert_eq!(
7089 buffer.text(),
7090 "
7091 use a::{b, c};
7092
7093 fn f() {
7094 b();
7095 c();
7096 }
7097 "
7098 .unindent()
7099 );
7100 });
7101 }
7102
7103 fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
7104 buffer: &Buffer,
7105 range: Range<T>,
7106 ) -> Vec<(String, Option<DiagnosticSeverity>)> {
7107 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
7108 for chunk in buffer.snapshot().chunks(range, true) {
7109 if chunks.last().map_or(false, |prev_chunk| {
7110 prev_chunk.1 == chunk.diagnostic_severity
7111 }) {
7112 chunks.last_mut().unwrap().0.push_str(chunk.text);
7113 } else {
7114 chunks.push((chunk.text.to_string(), chunk.diagnostic_severity));
7115 }
7116 }
7117 chunks
7118 }
7119
7120 #[gpui::test]
7121 async fn test_search_worktree_without_files(cx: &mut gpui::TestAppContext) {
7122 let dir = temp_tree(json!({
7123 "root": {
7124 "dir1": {},
7125 "dir2": {
7126 "dir3": {}
7127 }
7128 }
7129 }));
7130
7131 let project = Project::test(Arc::new(RealFs), [dir.path()], cx).await;
7132 let cancel_flag = Default::default();
7133 let results = project
7134 .read_with(cx, |project, cx| {
7135 project.match_paths("dir", false, false, 10, &cancel_flag, cx)
7136 })
7137 .await;
7138
7139 assert!(results.is_empty());
7140 }
7141
7142 #[gpui::test(iterations = 10)]
7143 async fn test_definition(cx: &mut gpui::TestAppContext) {
7144 let mut language = Language::new(
7145 LanguageConfig {
7146 name: "Rust".into(),
7147 path_suffixes: vec!["rs".to_string()],
7148 ..Default::default()
7149 },
7150 Some(tree_sitter_rust::language()),
7151 );
7152 let mut fake_servers = language.set_fake_lsp_adapter(Default::default());
7153
7154 let fs = FakeFs::new(cx.background());
7155 fs.insert_tree(
7156 "/dir",
7157 json!({
7158 "a.rs": "const fn a() { A }",
7159 "b.rs": "const y: i32 = crate::a()",
7160 }),
7161 )
7162 .await;
7163
7164 let project = Project::test(fs, ["/dir/b.rs".as_ref()], cx).await;
7165 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
7166
7167 let buffer = project
7168 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
7169 .await
7170 .unwrap();
7171
7172 let fake_server = fake_servers.next().await.unwrap();
7173 fake_server.handle_request::<lsp::request::GotoDefinition, _, _>(|params, _| async move {
7174 let params = params.text_document_position_params;
7175 assert_eq!(
7176 params.text_document.uri.to_file_path().unwrap(),
7177 Path::new("/dir/b.rs"),
7178 );
7179 assert_eq!(params.position, lsp::Position::new(0, 22));
7180
7181 Ok(Some(lsp::GotoDefinitionResponse::Scalar(
7182 lsp::Location::new(
7183 lsp::Url::from_file_path("/dir/a.rs").unwrap(),
7184 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
7185 ),
7186 )))
7187 });
7188
7189 let mut definitions = project
7190 .update(cx, |project, cx| project.definition(&buffer, 22, cx))
7191 .await
7192 .unwrap();
7193
7194 assert_eq!(definitions.len(), 1);
7195 let definition = definitions.pop().unwrap();
7196 cx.update(|cx| {
7197 let target_buffer = definition.buffer.read(cx);
7198 assert_eq!(
7199 target_buffer
7200 .file()
7201 .unwrap()
7202 .as_local()
7203 .unwrap()
7204 .abs_path(cx),
7205 Path::new("/dir/a.rs"),
7206 );
7207 assert_eq!(definition.range.to_offset(target_buffer), 9..10);
7208 assert_eq!(
7209 list_worktrees(&project, cx),
7210 [("/dir/b.rs".as_ref(), true), ("/dir/a.rs".as_ref(), false)]
7211 );
7212
7213 drop(definition);
7214 });
7215 cx.read(|cx| {
7216 assert_eq!(list_worktrees(&project, cx), [("/dir/b.rs".as_ref(), true)]);
7217 });
7218
7219 fn list_worktrees<'a>(
7220 project: &'a ModelHandle<Project>,
7221 cx: &'a AppContext,
7222 ) -> Vec<(&'a Path, bool)> {
7223 project
7224 .read(cx)
7225 .worktrees(cx)
7226 .map(|worktree| {
7227 let worktree = worktree.read(cx);
7228 (
7229 worktree.as_local().unwrap().abs_path().as_ref(),
7230 worktree.is_visible(),
7231 )
7232 })
7233 .collect::<Vec<_>>()
7234 }
7235 }
7236
7237 #[gpui::test]
7238 async fn test_completions_without_edit_ranges(cx: &mut gpui::TestAppContext) {
7239 let mut language = Language::new(
7240 LanguageConfig {
7241 name: "TypeScript".into(),
7242 path_suffixes: vec!["ts".to_string()],
7243 ..Default::default()
7244 },
7245 Some(tree_sitter_typescript::language_typescript()),
7246 );
7247 let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default());
7248
7249 let fs = FakeFs::new(cx.background());
7250 fs.insert_tree(
7251 "/dir",
7252 json!({
7253 "a.ts": "",
7254 }),
7255 )
7256 .await;
7257
7258 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
7259 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
7260 let buffer = project
7261 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
7262 .await
7263 .unwrap();
7264
7265 let fake_server = fake_language_servers.next().await.unwrap();
7266
7267 let text = "let a = b.fqn";
7268 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
7269 let completions = project.update(cx, |project, cx| {
7270 project.completions(&buffer, text.len(), cx)
7271 });
7272
7273 fake_server
7274 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
7275 Ok(Some(lsp::CompletionResponse::Array(vec![
7276 lsp::CompletionItem {
7277 label: "fullyQualifiedName?".into(),
7278 insert_text: Some("fullyQualifiedName".into()),
7279 ..Default::default()
7280 },
7281 ])))
7282 })
7283 .next()
7284 .await;
7285 let completions = completions.await.unwrap();
7286 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
7287 assert_eq!(completions.len(), 1);
7288 assert_eq!(completions[0].new_text, "fullyQualifiedName");
7289 assert_eq!(
7290 completions[0].old_range.to_offset(&snapshot),
7291 text.len() - 3..text.len()
7292 );
7293 }
7294
7295 #[gpui::test(iterations = 10)]
7296 async fn test_apply_code_actions_with_commands(cx: &mut gpui::TestAppContext) {
7297 let mut language = Language::new(
7298 LanguageConfig {
7299 name: "TypeScript".into(),
7300 path_suffixes: vec!["ts".to_string()],
7301 ..Default::default()
7302 },
7303 None,
7304 );
7305 let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default());
7306
7307 let fs = FakeFs::new(cx.background());
7308 fs.insert_tree(
7309 "/dir",
7310 json!({
7311 "a.ts": "a",
7312 }),
7313 )
7314 .await;
7315
7316 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
7317 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
7318 let buffer = project
7319 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
7320 .await
7321 .unwrap();
7322
7323 let fake_server = fake_language_servers.next().await.unwrap();
7324
7325 // Language server returns code actions that contain commands, and not edits.
7326 let actions = project.update(cx, |project, cx| project.code_actions(&buffer, 0..0, cx));
7327 fake_server
7328 .handle_request::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
7329 Ok(Some(vec![
7330 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
7331 title: "The code action".into(),
7332 command: Some(lsp::Command {
7333 title: "The command".into(),
7334 command: "_the/command".into(),
7335 arguments: Some(vec![json!("the-argument")]),
7336 }),
7337 ..Default::default()
7338 }),
7339 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
7340 title: "two".into(),
7341 ..Default::default()
7342 }),
7343 ]))
7344 })
7345 .next()
7346 .await;
7347
7348 let action = actions.await.unwrap()[0].clone();
7349 let apply = project.update(cx, |project, cx| {
7350 project.apply_code_action(buffer.clone(), action, true, cx)
7351 });
7352
7353 // Resolving the code action does not populate its edits. In absence of
7354 // edits, we must execute the given command.
7355 fake_server.handle_request::<lsp::request::CodeActionResolveRequest, _, _>(
7356 |action, _| async move { Ok(action) },
7357 );
7358
7359 // While executing the command, the language server sends the editor
7360 // a `workspaceEdit` request.
7361 fake_server
7362 .handle_request::<lsp::request::ExecuteCommand, _, _>({
7363 let fake = fake_server.clone();
7364 move |params, _| {
7365 assert_eq!(params.command, "_the/command");
7366 let fake = fake.clone();
7367 async move {
7368 fake.server
7369 .request::<lsp::request::ApplyWorkspaceEdit>(
7370 lsp::ApplyWorkspaceEditParams {
7371 label: None,
7372 edit: lsp::WorkspaceEdit {
7373 changes: Some(
7374 [(
7375 lsp::Url::from_file_path("/dir/a.ts").unwrap(),
7376 vec![lsp::TextEdit {
7377 range: lsp::Range::new(
7378 lsp::Position::new(0, 0),
7379 lsp::Position::new(0, 0),
7380 ),
7381 new_text: "X".into(),
7382 }],
7383 )]
7384 .into_iter()
7385 .collect(),
7386 ),
7387 ..Default::default()
7388 },
7389 },
7390 )
7391 .await
7392 .unwrap();
7393 Ok(Some(json!(null)))
7394 }
7395 }
7396 })
7397 .next()
7398 .await;
7399
7400 // Applying the code action returns a project transaction containing the edits
7401 // sent by the language server in its `workspaceEdit` request.
7402 let transaction = apply.await.unwrap();
7403 assert!(transaction.0.contains_key(&buffer));
7404 buffer.update(cx, |buffer, cx| {
7405 assert_eq!(buffer.text(), "Xa");
7406 buffer.undo(cx);
7407 assert_eq!(buffer.text(), "a");
7408 });
7409 }
7410
7411 #[gpui::test]
7412 async fn test_save_file(cx: &mut gpui::TestAppContext) {
7413 let fs = FakeFs::new(cx.background());
7414 fs.insert_tree(
7415 "/dir",
7416 json!({
7417 "file1": "the old contents",
7418 }),
7419 )
7420 .await;
7421
7422 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7423 let buffer = project
7424 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
7425 .await
7426 .unwrap();
7427 buffer
7428 .update(cx, |buffer, cx| {
7429 assert_eq!(buffer.text(), "the old contents");
7430 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], cx);
7431 buffer.save(cx)
7432 })
7433 .await
7434 .unwrap();
7435
7436 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
7437 assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
7438 }
7439
7440 #[gpui::test]
7441 async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
7442 let fs = FakeFs::new(cx.background());
7443 fs.insert_tree(
7444 "/dir",
7445 json!({
7446 "file1": "the old contents",
7447 }),
7448 )
7449 .await;
7450
7451 let project = Project::test(fs.clone(), ["/dir/file1".as_ref()], cx).await;
7452 let buffer = project
7453 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
7454 .await
7455 .unwrap();
7456 buffer
7457 .update(cx, |buffer, cx| {
7458 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], cx);
7459 buffer.save(cx)
7460 })
7461 .await
7462 .unwrap();
7463
7464 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
7465 assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
7466 }
7467
7468 #[gpui::test]
7469 async fn test_save_as(cx: &mut gpui::TestAppContext) {
7470 let fs = FakeFs::new(cx.background());
7471 fs.insert_tree("/dir", json!({})).await;
7472
7473 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7474 let buffer = project.update(cx, |project, cx| {
7475 project.create_buffer("", None, cx).unwrap()
7476 });
7477 buffer.update(cx, |buffer, cx| {
7478 buffer.edit([(0..0, "abc")], cx);
7479 assert!(buffer.is_dirty());
7480 assert!(!buffer.has_conflict());
7481 });
7482 project
7483 .update(cx, |project, cx| {
7484 project.save_buffer_as(buffer.clone(), "/dir/file1".into(), cx)
7485 })
7486 .await
7487 .unwrap();
7488 assert_eq!(fs.load(Path::new("/dir/file1")).await.unwrap(), "abc");
7489 buffer.read_with(cx, |buffer, cx| {
7490 assert_eq!(buffer.file().unwrap().full_path(cx), Path::new("dir/file1"));
7491 assert!(!buffer.is_dirty());
7492 assert!(!buffer.has_conflict());
7493 });
7494
7495 let opened_buffer = project
7496 .update(cx, |project, cx| {
7497 project.open_local_buffer("/dir/file1", cx)
7498 })
7499 .await
7500 .unwrap();
7501 assert_eq!(opened_buffer, buffer);
7502 }
7503
7504 #[gpui::test(retries = 5)]
7505 async fn test_rescan_and_remote_updates(cx: &mut gpui::TestAppContext) {
7506 let dir = temp_tree(json!({
7507 "a": {
7508 "file1": "",
7509 "file2": "",
7510 "file3": "",
7511 },
7512 "b": {
7513 "c": {
7514 "file4": "",
7515 "file5": "",
7516 }
7517 }
7518 }));
7519
7520 let project = Project::test(Arc::new(RealFs), [dir.path()], cx).await;
7521 let rpc = project.read_with(cx, |p, _| p.client.clone());
7522
7523 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
7524 let buffer = project.update(cx, |p, cx| p.open_local_buffer(dir.path().join(path), cx));
7525 async move { buffer.await.unwrap() }
7526 };
7527 let id_for_path = |path: &'static str, cx: &gpui::TestAppContext| {
7528 project.read_with(cx, |project, cx| {
7529 let tree = project.worktrees(cx).next().unwrap();
7530 tree.read(cx)
7531 .entry_for_path(path)
7532 .expect(&format!("no entry for path {}", path))
7533 .id
7534 })
7535 };
7536
7537 let buffer2 = buffer_for_path("a/file2", cx).await;
7538 let buffer3 = buffer_for_path("a/file3", cx).await;
7539 let buffer4 = buffer_for_path("b/c/file4", cx).await;
7540 let buffer5 = buffer_for_path("b/c/file5", cx).await;
7541
7542 let file2_id = id_for_path("a/file2", &cx);
7543 let file3_id = id_for_path("a/file3", &cx);
7544 let file4_id = id_for_path("b/c/file4", &cx);
7545
7546 // Create a remote copy of this worktree.
7547 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
7548 let initial_snapshot = tree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot());
7549 let (remote, load_task) = cx.update(|cx| {
7550 Worktree::remote(
7551 1,
7552 1,
7553 initial_snapshot.to_proto(&Default::default(), true),
7554 rpc.clone(),
7555 cx,
7556 )
7557 });
7558 // tree
7559 load_task.await;
7560
7561 cx.read(|cx| {
7562 assert!(!buffer2.read(cx).is_dirty());
7563 assert!(!buffer3.read(cx).is_dirty());
7564 assert!(!buffer4.read(cx).is_dirty());
7565 assert!(!buffer5.read(cx).is_dirty());
7566 });
7567
7568 // Rename and delete files and directories.
7569 tree.flush_fs_events(&cx).await;
7570 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
7571 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
7572 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
7573 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
7574 tree.flush_fs_events(&cx).await;
7575
7576 let expected_paths = vec![
7577 "a",
7578 "a/file1",
7579 "a/file2.new",
7580 "b",
7581 "d",
7582 "d/file3",
7583 "d/file4",
7584 ];
7585
7586 cx.read(|app| {
7587 assert_eq!(
7588 tree.read(app)
7589 .paths()
7590 .map(|p| p.to_str().unwrap())
7591 .collect::<Vec<_>>(),
7592 expected_paths
7593 );
7594
7595 assert_eq!(id_for_path("a/file2.new", &cx), file2_id);
7596 assert_eq!(id_for_path("d/file3", &cx), file3_id);
7597 assert_eq!(id_for_path("d/file4", &cx), file4_id);
7598
7599 assert_eq!(
7600 buffer2.read(app).file().unwrap().path().as_ref(),
7601 Path::new("a/file2.new")
7602 );
7603 assert_eq!(
7604 buffer3.read(app).file().unwrap().path().as_ref(),
7605 Path::new("d/file3")
7606 );
7607 assert_eq!(
7608 buffer4.read(app).file().unwrap().path().as_ref(),
7609 Path::new("d/file4")
7610 );
7611 assert_eq!(
7612 buffer5.read(app).file().unwrap().path().as_ref(),
7613 Path::new("b/c/file5")
7614 );
7615
7616 assert!(!buffer2.read(app).file().unwrap().is_deleted());
7617 assert!(!buffer3.read(app).file().unwrap().is_deleted());
7618 assert!(!buffer4.read(app).file().unwrap().is_deleted());
7619 assert!(buffer5.read(app).file().unwrap().is_deleted());
7620 });
7621
7622 // Update the remote worktree. Check that it becomes consistent with the
7623 // local worktree.
7624 remote.update(cx, |remote, cx| {
7625 let update_message = tree.read(cx).as_local().unwrap().snapshot().build_update(
7626 &initial_snapshot,
7627 1,
7628 1,
7629 true,
7630 );
7631 remote
7632 .as_remote_mut()
7633 .unwrap()
7634 .snapshot
7635 .apply_remote_update(update_message)
7636 .unwrap();
7637
7638 assert_eq!(
7639 remote
7640 .paths()
7641 .map(|p| p.to_str().unwrap())
7642 .collect::<Vec<_>>(),
7643 expected_paths
7644 );
7645 });
7646 }
7647
7648 #[gpui::test]
7649 async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
7650 let fs = FakeFs::new(cx.background());
7651 fs.insert_tree(
7652 "/dir",
7653 json!({
7654 "a.txt": "a-contents",
7655 "b.txt": "b-contents",
7656 }),
7657 )
7658 .await;
7659
7660 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7661
7662 // Spawn multiple tasks to open paths, repeating some paths.
7663 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
7664 (
7665 p.open_local_buffer("/dir/a.txt", cx),
7666 p.open_local_buffer("/dir/b.txt", cx),
7667 p.open_local_buffer("/dir/a.txt", cx),
7668 )
7669 });
7670
7671 let buffer_a_1 = buffer_a_1.await.unwrap();
7672 let buffer_a_2 = buffer_a_2.await.unwrap();
7673 let buffer_b = buffer_b.await.unwrap();
7674 assert_eq!(buffer_a_1.read_with(cx, |b, _| b.text()), "a-contents");
7675 assert_eq!(buffer_b.read_with(cx, |b, _| b.text()), "b-contents");
7676
7677 // There is only one buffer per path.
7678 let buffer_a_id = buffer_a_1.id();
7679 assert_eq!(buffer_a_2.id(), buffer_a_id);
7680
7681 // Open the same path again while it is still open.
7682 drop(buffer_a_1);
7683 let buffer_a_3 = project
7684 .update(cx, |p, cx| p.open_local_buffer("/dir/a.txt", cx))
7685 .await
7686 .unwrap();
7687
7688 // There's still only one buffer per path.
7689 assert_eq!(buffer_a_3.id(), buffer_a_id);
7690 }
7691
7692 #[gpui::test]
7693 async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
7694 let fs = FakeFs::new(cx.background());
7695 fs.insert_tree(
7696 "/dir",
7697 json!({
7698 "file1": "abc",
7699 "file2": "def",
7700 "file3": "ghi",
7701 }),
7702 )
7703 .await;
7704
7705 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7706
7707 let buffer1 = project
7708 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
7709 .await
7710 .unwrap();
7711 let events = Rc::new(RefCell::new(Vec::new()));
7712
7713 // initially, the buffer isn't dirty.
7714 buffer1.update(cx, |buffer, cx| {
7715 cx.subscribe(&buffer1, {
7716 let events = events.clone();
7717 move |_, _, event, _| match event {
7718 BufferEvent::Operation(_) => {}
7719 _ => events.borrow_mut().push(event.clone()),
7720 }
7721 })
7722 .detach();
7723
7724 assert!(!buffer.is_dirty());
7725 assert!(events.borrow().is_empty());
7726
7727 buffer.edit([(1..2, "")], cx);
7728 });
7729
7730 // after the first edit, the buffer is dirty, and emits a dirtied event.
7731 buffer1.update(cx, |buffer, cx| {
7732 assert!(buffer.text() == "ac");
7733 assert!(buffer.is_dirty());
7734 assert_eq!(
7735 *events.borrow(),
7736 &[language::Event::Edited, language::Event::Dirtied]
7737 );
7738 events.borrow_mut().clear();
7739 buffer.did_save(buffer.version(), buffer.file().unwrap().mtime(), None, cx);
7740 });
7741
7742 // after saving, the buffer is not dirty, and emits a saved event.
7743 buffer1.update(cx, |buffer, cx| {
7744 assert!(!buffer.is_dirty());
7745 assert_eq!(*events.borrow(), &[language::Event::Saved]);
7746 events.borrow_mut().clear();
7747
7748 buffer.edit([(1..1, "B")], cx);
7749 buffer.edit([(2..2, "D")], cx);
7750 });
7751
7752 // after editing again, the buffer is dirty, and emits another dirty event.
7753 buffer1.update(cx, |buffer, cx| {
7754 assert!(buffer.text() == "aBDc");
7755 assert!(buffer.is_dirty());
7756 assert_eq!(
7757 *events.borrow(),
7758 &[
7759 language::Event::Edited,
7760 language::Event::Dirtied,
7761 language::Event::Edited,
7762 ],
7763 );
7764 events.borrow_mut().clear();
7765
7766 // TODO - currently, after restoring the buffer to its
7767 // previously-saved state, the is still considered dirty.
7768 buffer.edit([(1..3, "")], cx);
7769 assert!(buffer.text() == "ac");
7770 assert!(buffer.is_dirty());
7771 });
7772
7773 assert_eq!(*events.borrow(), &[language::Event::Edited]);
7774
7775 // When a file is deleted, the buffer is considered dirty.
7776 let events = Rc::new(RefCell::new(Vec::new()));
7777 let buffer2 = project
7778 .update(cx, |p, cx| p.open_local_buffer("/dir/file2", cx))
7779 .await
7780 .unwrap();
7781 buffer2.update(cx, |_, cx| {
7782 cx.subscribe(&buffer2, {
7783 let events = events.clone();
7784 move |_, _, event, _| events.borrow_mut().push(event.clone())
7785 })
7786 .detach();
7787 });
7788
7789 fs.remove_file("/dir/file2".as_ref(), Default::default())
7790 .await
7791 .unwrap();
7792 buffer2.condition(&cx, |b, _| b.is_dirty()).await;
7793 assert_eq!(
7794 *events.borrow(),
7795 &[language::Event::Dirtied, language::Event::FileHandleChanged]
7796 );
7797
7798 // When a file is already dirty when deleted, we don't emit a Dirtied event.
7799 let events = Rc::new(RefCell::new(Vec::new()));
7800 let buffer3 = project
7801 .update(cx, |p, cx| p.open_local_buffer("/dir/file3", cx))
7802 .await
7803 .unwrap();
7804 buffer3.update(cx, |_, cx| {
7805 cx.subscribe(&buffer3, {
7806 let events = events.clone();
7807 move |_, _, event, _| events.borrow_mut().push(event.clone())
7808 })
7809 .detach();
7810 });
7811
7812 buffer3.update(cx, |buffer, cx| {
7813 buffer.edit([(0..0, "x")], cx);
7814 });
7815 events.borrow_mut().clear();
7816 fs.remove_file("/dir/file3".as_ref(), Default::default())
7817 .await
7818 .unwrap();
7819 buffer3
7820 .condition(&cx, |_, _| !events.borrow().is_empty())
7821 .await;
7822 assert_eq!(*events.borrow(), &[language::Event::FileHandleChanged]);
7823 cx.read(|cx| assert!(buffer3.read(cx).is_dirty()));
7824 }
7825
7826 #[gpui::test]
7827 async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
7828 let initial_contents = "aaa\nbbbbb\nc\n";
7829 let fs = FakeFs::new(cx.background());
7830 fs.insert_tree(
7831 "/dir",
7832 json!({
7833 "the-file": initial_contents,
7834 }),
7835 )
7836 .await;
7837 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7838 let buffer = project
7839 .update(cx, |p, cx| p.open_local_buffer("/dir/the-file", cx))
7840 .await
7841 .unwrap();
7842
7843 let anchors = (0..3)
7844 .map(|row| buffer.read_with(cx, |b, _| b.anchor_before(Point::new(row, 1))))
7845 .collect::<Vec<_>>();
7846
7847 // Change the file on disk, adding two new lines of text, and removing
7848 // one line.
7849 buffer.read_with(cx, |buffer, _| {
7850 assert!(!buffer.is_dirty());
7851 assert!(!buffer.has_conflict());
7852 });
7853 let new_contents = "AAAA\naaa\nBB\nbbbbb\n";
7854 fs.save("/dir/the-file".as_ref(), &new_contents.into())
7855 .await
7856 .unwrap();
7857
7858 // Because the buffer was not modified, it is reloaded from disk. Its
7859 // contents are edited according to the diff between the old and new
7860 // file contents.
7861 buffer
7862 .condition(&cx, |buffer, _| buffer.text() == new_contents)
7863 .await;
7864
7865 buffer.update(cx, |buffer, _| {
7866 assert_eq!(buffer.text(), new_contents);
7867 assert!(!buffer.is_dirty());
7868 assert!(!buffer.has_conflict());
7869
7870 let anchor_positions = anchors
7871 .iter()
7872 .map(|anchor| anchor.to_point(&*buffer))
7873 .collect::<Vec<_>>();
7874 assert_eq!(
7875 anchor_positions,
7876 [Point::new(1, 1), Point::new(3, 1), Point::new(4, 0)]
7877 );
7878 });
7879
7880 // Modify the buffer
7881 buffer.update(cx, |buffer, cx| {
7882 buffer.edit([(0..0, " ")], cx);
7883 assert!(buffer.is_dirty());
7884 assert!(!buffer.has_conflict());
7885 });
7886
7887 // Change the file on disk again, adding blank lines to the beginning.
7888 fs.save(
7889 "/dir/the-file".as_ref(),
7890 &"\n\n\nAAAA\naaa\nBB\nbbbbb\n".into(),
7891 )
7892 .await
7893 .unwrap();
7894
7895 // Because the buffer is modified, it doesn't reload from disk, but is
7896 // marked as having a conflict.
7897 buffer
7898 .condition(&cx, |buffer, _| buffer.has_conflict())
7899 .await;
7900 }
7901
7902 #[gpui::test]
7903 async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
7904 cx.foreground().forbid_parking();
7905
7906 let fs = FakeFs::new(cx.background());
7907 fs.insert_tree(
7908 "/the-dir",
7909 json!({
7910 "a.rs": "
7911 fn foo(mut v: Vec<usize>) {
7912 for x in &v {
7913 v.push(1);
7914 }
7915 }
7916 "
7917 .unindent(),
7918 }),
7919 )
7920 .await;
7921
7922 let project = Project::test(fs.clone(), ["/the-dir".as_ref()], cx).await;
7923 let buffer = project
7924 .update(cx, |p, cx| p.open_local_buffer("/the-dir/a.rs", cx))
7925 .await
7926 .unwrap();
7927
7928 let buffer_uri = Url::from_file_path("/the-dir/a.rs").unwrap();
7929 let message = lsp::PublishDiagnosticsParams {
7930 uri: buffer_uri.clone(),
7931 diagnostics: vec![
7932 lsp::Diagnostic {
7933 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
7934 severity: Some(DiagnosticSeverity::WARNING),
7935 message: "error 1".to_string(),
7936 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
7937 location: lsp::Location {
7938 uri: buffer_uri.clone(),
7939 range: lsp::Range::new(
7940 lsp::Position::new(1, 8),
7941 lsp::Position::new(1, 9),
7942 ),
7943 },
7944 message: "error 1 hint 1".to_string(),
7945 }]),
7946 ..Default::default()
7947 },
7948 lsp::Diagnostic {
7949 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
7950 severity: Some(DiagnosticSeverity::HINT),
7951 message: "error 1 hint 1".to_string(),
7952 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
7953 location: lsp::Location {
7954 uri: buffer_uri.clone(),
7955 range: lsp::Range::new(
7956 lsp::Position::new(1, 8),
7957 lsp::Position::new(1, 9),
7958 ),
7959 },
7960 message: "original diagnostic".to_string(),
7961 }]),
7962 ..Default::default()
7963 },
7964 lsp::Diagnostic {
7965 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
7966 severity: Some(DiagnosticSeverity::ERROR),
7967 message: "error 2".to_string(),
7968 related_information: Some(vec![
7969 lsp::DiagnosticRelatedInformation {
7970 location: lsp::Location {
7971 uri: buffer_uri.clone(),
7972 range: lsp::Range::new(
7973 lsp::Position::new(1, 13),
7974 lsp::Position::new(1, 15),
7975 ),
7976 },
7977 message: "error 2 hint 1".to_string(),
7978 },
7979 lsp::DiagnosticRelatedInformation {
7980 location: lsp::Location {
7981 uri: buffer_uri.clone(),
7982 range: lsp::Range::new(
7983 lsp::Position::new(1, 13),
7984 lsp::Position::new(1, 15),
7985 ),
7986 },
7987 message: "error 2 hint 2".to_string(),
7988 },
7989 ]),
7990 ..Default::default()
7991 },
7992 lsp::Diagnostic {
7993 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
7994 severity: Some(DiagnosticSeverity::HINT),
7995 message: "error 2 hint 1".to_string(),
7996 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
7997 location: lsp::Location {
7998 uri: buffer_uri.clone(),
7999 range: lsp::Range::new(
8000 lsp::Position::new(2, 8),
8001 lsp::Position::new(2, 17),
8002 ),
8003 },
8004 message: "original diagnostic".to_string(),
8005 }]),
8006 ..Default::default()
8007 },
8008 lsp::Diagnostic {
8009 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
8010 severity: Some(DiagnosticSeverity::HINT),
8011 message: "error 2 hint 2".to_string(),
8012 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
8013 location: lsp::Location {
8014 uri: buffer_uri.clone(),
8015 range: lsp::Range::new(
8016 lsp::Position::new(2, 8),
8017 lsp::Position::new(2, 17),
8018 ),
8019 },
8020 message: "original diagnostic".to_string(),
8021 }]),
8022 ..Default::default()
8023 },
8024 ],
8025 version: None,
8026 };
8027
8028 project
8029 .update(cx, |p, cx| p.update_diagnostics(message, &[], cx))
8030 .unwrap();
8031 let buffer = buffer.read_with(cx, |buffer, _| buffer.snapshot());
8032
8033 assert_eq!(
8034 buffer
8035 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
8036 .collect::<Vec<_>>(),
8037 &[
8038 DiagnosticEntry {
8039 range: Point::new(1, 8)..Point::new(1, 9),
8040 diagnostic: Diagnostic {
8041 severity: DiagnosticSeverity::WARNING,
8042 message: "error 1".to_string(),
8043 group_id: 0,
8044 is_primary: true,
8045 ..Default::default()
8046 }
8047 },
8048 DiagnosticEntry {
8049 range: Point::new(1, 8)..Point::new(1, 9),
8050 diagnostic: Diagnostic {
8051 severity: DiagnosticSeverity::HINT,
8052 message: "error 1 hint 1".to_string(),
8053 group_id: 0,
8054 is_primary: false,
8055 ..Default::default()
8056 }
8057 },
8058 DiagnosticEntry {
8059 range: Point::new(1, 13)..Point::new(1, 15),
8060 diagnostic: Diagnostic {
8061 severity: DiagnosticSeverity::HINT,
8062 message: "error 2 hint 1".to_string(),
8063 group_id: 1,
8064 is_primary: false,
8065 ..Default::default()
8066 }
8067 },
8068 DiagnosticEntry {
8069 range: Point::new(1, 13)..Point::new(1, 15),
8070 diagnostic: Diagnostic {
8071 severity: DiagnosticSeverity::HINT,
8072 message: "error 2 hint 2".to_string(),
8073 group_id: 1,
8074 is_primary: false,
8075 ..Default::default()
8076 }
8077 },
8078 DiagnosticEntry {
8079 range: Point::new(2, 8)..Point::new(2, 17),
8080 diagnostic: Diagnostic {
8081 severity: DiagnosticSeverity::ERROR,
8082 message: "error 2".to_string(),
8083 group_id: 1,
8084 is_primary: true,
8085 ..Default::default()
8086 }
8087 }
8088 ]
8089 );
8090
8091 assert_eq!(
8092 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
8093 &[
8094 DiagnosticEntry {
8095 range: Point::new(1, 8)..Point::new(1, 9),
8096 diagnostic: Diagnostic {
8097 severity: DiagnosticSeverity::WARNING,
8098 message: "error 1".to_string(),
8099 group_id: 0,
8100 is_primary: true,
8101 ..Default::default()
8102 }
8103 },
8104 DiagnosticEntry {
8105 range: Point::new(1, 8)..Point::new(1, 9),
8106 diagnostic: Diagnostic {
8107 severity: DiagnosticSeverity::HINT,
8108 message: "error 1 hint 1".to_string(),
8109 group_id: 0,
8110 is_primary: false,
8111 ..Default::default()
8112 }
8113 },
8114 ]
8115 );
8116 assert_eq!(
8117 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
8118 &[
8119 DiagnosticEntry {
8120 range: Point::new(1, 13)..Point::new(1, 15),
8121 diagnostic: Diagnostic {
8122 severity: DiagnosticSeverity::HINT,
8123 message: "error 2 hint 1".to_string(),
8124 group_id: 1,
8125 is_primary: false,
8126 ..Default::default()
8127 }
8128 },
8129 DiagnosticEntry {
8130 range: Point::new(1, 13)..Point::new(1, 15),
8131 diagnostic: Diagnostic {
8132 severity: DiagnosticSeverity::HINT,
8133 message: "error 2 hint 2".to_string(),
8134 group_id: 1,
8135 is_primary: false,
8136 ..Default::default()
8137 }
8138 },
8139 DiagnosticEntry {
8140 range: Point::new(2, 8)..Point::new(2, 17),
8141 diagnostic: Diagnostic {
8142 severity: DiagnosticSeverity::ERROR,
8143 message: "error 2".to_string(),
8144 group_id: 1,
8145 is_primary: true,
8146 ..Default::default()
8147 }
8148 }
8149 ]
8150 );
8151 }
8152
8153 #[gpui::test]
8154 async fn test_rename(cx: &mut gpui::TestAppContext) {
8155 cx.foreground().forbid_parking();
8156
8157 let mut language = Language::new(
8158 LanguageConfig {
8159 name: "Rust".into(),
8160 path_suffixes: vec!["rs".to_string()],
8161 ..Default::default()
8162 },
8163 Some(tree_sitter_rust::language()),
8164 );
8165 let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
8166 capabilities: lsp::ServerCapabilities {
8167 rename_provider: Some(lsp::OneOf::Right(lsp::RenameOptions {
8168 prepare_provider: Some(true),
8169 work_done_progress_options: Default::default(),
8170 })),
8171 ..Default::default()
8172 },
8173 ..Default::default()
8174 });
8175
8176 let fs = FakeFs::new(cx.background());
8177 fs.insert_tree(
8178 "/dir",
8179 json!({
8180 "one.rs": "const ONE: usize = 1;",
8181 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
8182 }),
8183 )
8184 .await;
8185
8186 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
8187 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
8188 let buffer = project
8189 .update(cx, |project, cx| {
8190 project.open_local_buffer("/dir/one.rs", cx)
8191 })
8192 .await
8193 .unwrap();
8194
8195 let fake_server = fake_servers.next().await.unwrap();
8196
8197 let response = project.update(cx, |project, cx| {
8198 project.prepare_rename(buffer.clone(), 7, cx)
8199 });
8200 fake_server
8201 .handle_request::<lsp::request::PrepareRenameRequest, _, _>(|params, _| async move {
8202 assert_eq!(params.text_document.uri.as_str(), "file:///dir/one.rs");
8203 assert_eq!(params.position, lsp::Position::new(0, 7));
8204 Ok(Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
8205 lsp::Position::new(0, 6),
8206 lsp::Position::new(0, 9),
8207 ))))
8208 })
8209 .next()
8210 .await
8211 .unwrap();
8212 let range = response.await.unwrap().unwrap();
8213 let range = buffer.read_with(cx, |buffer, _| range.to_offset(buffer));
8214 assert_eq!(range, 6..9);
8215
8216 let response = project.update(cx, |project, cx| {
8217 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), true, cx)
8218 });
8219 fake_server
8220 .handle_request::<lsp::request::Rename, _, _>(|params, _| async move {
8221 assert_eq!(
8222 params.text_document_position.text_document.uri.as_str(),
8223 "file:///dir/one.rs"
8224 );
8225 assert_eq!(
8226 params.text_document_position.position,
8227 lsp::Position::new(0, 7)
8228 );
8229 assert_eq!(params.new_name, "THREE");
8230 Ok(Some(lsp::WorkspaceEdit {
8231 changes: Some(
8232 [
8233 (
8234 lsp::Url::from_file_path("/dir/one.rs").unwrap(),
8235 vec![lsp::TextEdit::new(
8236 lsp::Range::new(
8237 lsp::Position::new(0, 6),
8238 lsp::Position::new(0, 9),
8239 ),
8240 "THREE".to_string(),
8241 )],
8242 ),
8243 (
8244 lsp::Url::from_file_path("/dir/two.rs").unwrap(),
8245 vec![
8246 lsp::TextEdit::new(
8247 lsp::Range::new(
8248 lsp::Position::new(0, 24),
8249 lsp::Position::new(0, 27),
8250 ),
8251 "THREE".to_string(),
8252 ),
8253 lsp::TextEdit::new(
8254 lsp::Range::new(
8255 lsp::Position::new(0, 35),
8256 lsp::Position::new(0, 38),
8257 ),
8258 "THREE".to_string(),
8259 ),
8260 ],
8261 ),
8262 ]
8263 .into_iter()
8264 .collect(),
8265 ),
8266 ..Default::default()
8267 }))
8268 })
8269 .next()
8270 .await
8271 .unwrap();
8272 let mut transaction = response.await.unwrap().0;
8273 assert_eq!(transaction.len(), 2);
8274 assert_eq!(
8275 transaction
8276 .remove_entry(&buffer)
8277 .unwrap()
8278 .0
8279 .read_with(cx, |buffer, _| buffer.text()),
8280 "const THREE: usize = 1;"
8281 );
8282 assert_eq!(
8283 transaction
8284 .into_keys()
8285 .next()
8286 .unwrap()
8287 .read_with(cx, |buffer, _| buffer.text()),
8288 "const TWO: usize = one::THREE + one::THREE;"
8289 );
8290 }
8291
8292 #[gpui::test]
8293 async fn test_search(cx: &mut gpui::TestAppContext) {
8294 let fs = FakeFs::new(cx.background());
8295 fs.insert_tree(
8296 "/dir",
8297 json!({
8298 "one.rs": "const ONE: usize = 1;",
8299 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
8300 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
8301 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
8302 }),
8303 )
8304 .await;
8305 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
8306 assert_eq!(
8307 search(&project, SearchQuery::text("TWO", false, true), cx)
8308 .await
8309 .unwrap(),
8310 HashMap::from_iter([
8311 ("two.rs".to_string(), vec![6..9]),
8312 ("three.rs".to_string(), vec![37..40])
8313 ])
8314 );
8315
8316 let buffer_4 = project
8317 .update(cx, |project, cx| {
8318 project.open_local_buffer("/dir/four.rs", cx)
8319 })
8320 .await
8321 .unwrap();
8322 buffer_4.update(cx, |buffer, cx| {
8323 let text = "two::TWO";
8324 buffer.edit([(20..28, text), (31..43, text)], cx);
8325 });
8326
8327 assert_eq!(
8328 search(&project, SearchQuery::text("TWO", false, true), cx)
8329 .await
8330 .unwrap(),
8331 HashMap::from_iter([
8332 ("two.rs".to_string(), vec![6..9]),
8333 ("three.rs".to_string(), vec![37..40]),
8334 ("four.rs".to_string(), vec![25..28, 36..39])
8335 ])
8336 );
8337
8338 async fn search(
8339 project: &ModelHandle<Project>,
8340 query: SearchQuery,
8341 cx: &mut gpui::TestAppContext,
8342 ) -> Result<HashMap<String, Vec<Range<usize>>>> {
8343 let results = project
8344 .update(cx, |project, cx| project.search(query, cx))
8345 .await?;
8346
8347 Ok(results
8348 .into_iter()
8349 .map(|(buffer, ranges)| {
8350 buffer.read_with(cx, |buffer, _| {
8351 let path = buffer.file().unwrap().path().to_string_lossy().to_string();
8352 let ranges = ranges
8353 .into_iter()
8354 .map(|range| range.to_offset(buffer))
8355 .collect::<Vec<_>>();
8356 (path, ranges)
8357 })
8358 })
8359 .collect())
8360 }
8361 }
8362}