1mod db;
2pub mod fs;
3mod ignore;
4mod lsp_command;
5pub mod search;
6pub mod worktree;
7
8use anyhow::{anyhow, Context, Result};
9use client::{proto, Client, PeerId, TypedEnvelope, User, UserStore};
10use clock::ReplicaId;
11use collections::{hash_map, BTreeMap, HashMap, HashSet};
12use futures::{future::Shared, Future, FutureExt, StreamExt, TryFutureExt};
13use fuzzy::{PathMatch, PathMatchCandidate, PathMatchCandidateSet};
14use gpui::{
15 AnyModelHandle, AppContext, AsyncAppContext, Entity, ModelContext, ModelHandle,
16 MutableAppContext, Task, UpgradeModelHandle, WeakModelHandle,
17};
18use language::{
19 point_to_lsp,
20 proto::{deserialize_anchor, deserialize_version, serialize_anchor, serialize_version},
21 range_from_lsp, range_to_lsp, Anchor, Bias, Buffer, CharKind, CodeAction, CodeLabel,
22 Completion, Diagnostic, DiagnosticEntry, DiagnosticSet, Event as BufferEvent, File as _,
23 Language, LanguageRegistry, LanguageServerName, LocalFile, LspAdapter, OffsetRangeExt,
24 Operation, Patch, PointUtf16, TextBufferSnapshot, ToOffset, ToPointUtf16, Transaction,
25};
26use lsp::{
27 DiagnosticSeverity, DiagnosticTag, DocumentHighlightKind, LanguageServer, LanguageString,
28 MarkedString,
29};
30use lsp_command::*;
31use parking_lot::Mutex;
32use postage::stream::Stream;
33use postage::watch;
34use rand::prelude::*;
35use search::SearchQuery;
36use serde::Serialize;
37use settings::Settings;
38use sha2::{Digest, Sha256};
39use similar::{ChangeTag, TextDiff};
40use std::{
41 cell::RefCell,
42 cmp::{self, Ordering},
43 convert::TryInto,
44 ffi::OsString,
45 hash::Hash,
46 mem,
47 ops::Range,
48 os::unix::{ffi::OsStrExt, prelude::OsStringExt},
49 path::{Component, Path, PathBuf},
50 rc::Rc,
51 sync::{
52 atomic::{AtomicBool, AtomicUsize, Ordering::SeqCst},
53 Arc,
54 },
55 time::Instant,
56};
57use thiserror::Error;
58use util::{post_inc, ResultExt, TryFutureExt as _};
59
60pub use db::Db;
61pub use fs::*;
62pub use worktree::*;
63
64pub trait Item: Entity {
65 fn entry_id(&self, cx: &AppContext) -> Option<ProjectEntryId>;
66}
67
68pub struct ProjectStore {
69 db: Arc<Db>,
70 projects: Vec<WeakModelHandle<Project>>,
71}
72
73// Language server state is stored across 3 collections:
74// language_servers =>
75// a mapping from unique server id to LanguageServerState which can either be a task for a
76// server in the process of starting, or a running server with adapter and language server arcs
77// language_server_ids => a mapping from worktreeId and server name to the unique server id
78// language_server_statuses => a mapping from unique server id to the current server status
79//
80// Multiple worktrees can map to the same language server for example when you jump to the definition
81// of a file in the standard library. So language_server_ids is used to look up which server is active
82// for a given worktree and language server name
83//
84// When starting a language server, first the id map is checked to make sure a server isn't already available
85// for that worktree. If there is one, it finishes early. Otherwise, a new id is allocated and and
86// the Starting variant of LanguageServerState is stored in the language_servers map.
87pub struct Project {
88 worktrees: Vec<WorktreeHandle>,
89 active_entry: Option<ProjectEntryId>,
90 languages: Arc<LanguageRegistry>,
91 language_servers: HashMap<usize, LanguageServerState>,
92 language_server_ids: HashMap<(WorktreeId, LanguageServerName), usize>,
93 language_server_statuses: BTreeMap<usize, LanguageServerStatus>,
94 language_server_settings: Arc<Mutex<serde_json::Value>>,
95 last_workspace_edits_by_language_server: HashMap<usize, ProjectTransaction>,
96 next_language_server_id: usize,
97 client: Arc<client::Client>,
98 next_entry_id: Arc<AtomicUsize>,
99 next_diagnostic_group_id: usize,
100 user_store: ModelHandle<UserStore>,
101 project_store: ModelHandle<ProjectStore>,
102 fs: Arc<dyn Fs>,
103 client_state: ProjectClientState,
104 collaborators: HashMap<PeerId, Collaborator>,
105 client_subscriptions: Vec<client::Subscription>,
106 _subscriptions: Vec<gpui::Subscription>,
107 opened_buffer: (Rc<RefCell<watch::Sender<()>>>, watch::Receiver<()>),
108 shared_buffers: HashMap<PeerId, HashSet<u64>>,
109 loading_buffers: HashMap<
110 ProjectPath,
111 postage::watch::Receiver<Option<Result<ModelHandle<Buffer>, Arc<anyhow::Error>>>>,
112 >,
113 loading_local_worktrees:
114 HashMap<Arc<Path>, Shared<Task<Result<ModelHandle<Worktree>, Arc<anyhow::Error>>>>>,
115 opened_buffers: HashMap<u64, OpenBuffer>,
116 buffer_snapshots: HashMap<u64, Vec<(i32, TextBufferSnapshot)>>,
117 nonce: u128,
118 initialized_persistent_state: bool,
119}
120
121#[derive(Error, Debug)]
122pub enum JoinProjectError {
123 #[error("host declined join request")]
124 HostDeclined,
125 #[error("host closed the project")]
126 HostClosedProject,
127 #[error("host went offline")]
128 HostWentOffline,
129 #[error("{0}")]
130 Other(#[from] anyhow::Error),
131}
132
133enum OpenBuffer {
134 Strong(ModelHandle<Buffer>),
135 Weak(WeakModelHandle<Buffer>),
136 Loading(Vec<Operation>),
137}
138
139enum WorktreeHandle {
140 Strong(ModelHandle<Worktree>),
141 Weak(WeakModelHandle<Worktree>),
142}
143
144enum ProjectClientState {
145 Local {
146 is_shared: bool,
147 remote_id_tx: watch::Sender<Option<u64>>,
148 remote_id_rx: watch::Receiver<Option<u64>>,
149 online_tx: watch::Sender<bool>,
150 online_rx: watch::Receiver<bool>,
151 _maintain_remote_id: Task<Option<()>>,
152 _maintain_online_status: Task<Option<()>>,
153 },
154 Remote {
155 sharing_has_stopped: bool,
156 remote_id: u64,
157 replica_id: ReplicaId,
158 _detect_unshare: Task<Option<()>>,
159 },
160}
161
162#[derive(Clone, Debug)]
163pub struct Collaborator {
164 pub user: Arc<User>,
165 pub peer_id: PeerId,
166 pub replica_id: ReplicaId,
167}
168
169#[derive(Clone, Debug, PartialEq, Eq)]
170pub enum Event {
171 ActiveEntryChanged(Option<ProjectEntryId>),
172 WorktreeAdded,
173 WorktreeRemoved(WorktreeId),
174 DiskBasedDiagnosticsStarted {
175 language_server_id: usize,
176 },
177 DiskBasedDiagnosticsFinished {
178 language_server_id: usize,
179 },
180 DiagnosticsUpdated {
181 path: ProjectPath,
182 language_server_id: usize,
183 },
184 RemoteIdChanged(Option<u64>),
185 CollaboratorLeft(PeerId),
186 ContactRequestedJoin(Arc<User>),
187 ContactCancelledJoinRequest(Arc<User>),
188}
189
190pub enum LanguageServerState {
191 Starting(Task<Option<Arc<LanguageServer>>>),
192 Running {
193 adapter: Arc<dyn LspAdapter>,
194 server: Arc<LanguageServer>,
195 },
196}
197
198#[derive(Serialize)]
199pub struct LanguageServerStatus {
200 pub name: String,
201 pub pending_work: BTreeMap<String, LanguageServerProgress>,
202 pub has_pending_diagnostic_updates: bool,
203 progress_tokens: HashSet<String>,
204}
205
206#[derive(Clone, Debug, Serialize)]
207pub struct LanguageServerProgress {
208 pub message: Option<String>,
209 pub percentage: Option<usize>,
210 #[serde(skip_serializing)]
211 pub last_update_at: Instant,
212}
213
214#[derive(Clone, Debug, Eq, PartialEq, Hash, PartialOrd, Ord)]
215pub struct ProjectPath {
216 pub worktree_id: WorktreeId,
217 pub path: Arc<Path>,
218}
219
220#[derive(Copy, Clone, Debug, Default, PartialEq, Serialize)]
221pub struct DiagnosticSummary {
222 pub language_server_id: usize,
223 pub error_count: usize,
224 pub warning_count: usize,
225}
226
227#[derive(Debug, Clone)]
228pub struct Location {
229 pub buffer: ModelHandle<Buffer>,
230 pub range: Range<language::Anchor>,
231}
232
233#[derive(Debug, Clone)]
234pub struct LocationLink {
235 pub origin: Option<Location>,
236 pub target: Location,
237}
238
239#[derive(Debug)]
240pub struct DocumentHighlight {
241 pub range: Range<language::Anchor>,
242 pub kind: DocumentHighlightKind,
243}
244
245#[derive(Clone, Debug)]
246pub struct Symbol {
247 pub source_worktree_id: WorktreeId,
248 pub worktree_id: WorktreeId,
249 pub language_server_name: LanguageServerName,
250 pub path: PathBuf,
251 pub label: CodeLabel,
252 pub name: String,
253 pub kind: lsp::SymbolKind,
254 pub range: Range<PointUtf16>,
255 pub signature: [u8; 32],
256}
257
258#[derive(Clone, Debug, PartialEq)]
259pub struct HoverBlock {
260 pub text: String,
261 pub language: Option<String>,
262}
263
264impl HoverBlock {
265 fn try_new(marked_string: MarkedString) -> Option<Self> {
266 let result = match marked_string {
267 MarkedString::LanguageString(LanguageString { language, value }) => HoverBlock {
268 text: value,
269 language: Some(language),
270 },
271 MarkedString::String(text) => HoverBlock {
272 text,
273 language: None,
274 },
275 };
276 if result.text.is_empty() {
277 None
278 } else {
279 Some(result)
280 }
281 }
282}
283
284#[derive(Debug)]
285pub struct Hover {
286 pub contents: Vec<HoverBlock>,
287 pub range: Option<Range<language::Anchor>>,
288}
289
290#[derive(Default)]
291pub struct ProjectTransaction(pub HashMap<ModelHandle<Buffer>, language::Transaction>);
292
293impl DiagnosticSummary {
294 fn new<'a, T: 'a>(
295 language_server_id: usize,
296 diagnostics: impl IntoIterator<Item = &'a DiagnosticEntry<T>>,
297 ) -> Self {
298 let mut this = Self {
299 language_server_id,
300 error_count: 0,
301 warning_count: 0,
302 };
303
304 for entry in diagnostics {
305 if entry.diagnostic.is_primary {
306 match entry.diagnostic.severity {
307 DiagnosticSeverity::ERROR => this.error_count += 1,
308 DiagnosticSeverity::WARNING => this.warning_count += 1,
309 _ => {}
310 }
311 }
312 }
313
314 this
315 }
316
317 pub fn is_empty(&self) -> bool {
318 self.error_count == 0 && self.warning_count == 0
319 }
320
321 pub fn to_proto(&self, path: &Path) -> proto::DiagnosticSummary {
322 proto::DiagnosticSummary {
323 path: path.to_string_lossy().to_string(),
324 language_server_id: self.language_server_id as u64,
325 error_count: self.error_count as u32,
326 warning_count: self.warning_count as u32,
327 }
328 }
329}
330
331#[derive(Clone, Copy, Debug, Default, Hash, PartialEq, Eq, PartialOrd, Ord)]
332pub struct ProjectEntryId(usize);
333
334impl ProjectEntryId {
335 pub const MAX: Self = Self(usize::MAX);
336
337 pub fn new(counter: &AtomicUsize) -> Self {
338 Self(counter.fetch_add(1, SeqCst))
339 }
340
341 pub fn from_proto(id: u64) -> Self {
342 Self(id as usize)
343 }
344
345 pub fn to_proto(&self) -> u64 {
346 self.0 as u64
347 }
348
349 pub fn to_usize(&self) -> usize {
350 self.0
351 }
352}
353
354impl Project {
355 pub fn init(client: &Arc<Client>) {
356 client.add_model_message_handler(Self::handle_request_join_project);
357 client.add_model_message_handler(Self::handle_add_collaborator);
358 client.add_model_message_handler(Self::handle_buffer_reloaded);
359 client.add_model_message_handler(Self::handle_buffer_saved);
360 client.add_model_message_handler(Self::handle_start_language_server);
361 client.add_model_message_handler(Self::handle_update_language_server);
362 client.add_model_message_handler(Self::handle_remove_collaborator);
363 client.add_model_message_handler(Self::handle_join_project_request_cancelled);
364 client.add_model_message_handler(Self::handle_update_project);
365 client.add_model_message_handler(Self::handle_unregister_project);
366 client.add_model_message_handler(Self::handle_project_unshared);
367 client.add_model_message_handler(Self::handle_update_buffer_file);
368 client.add_model_message_handler(Self::handle_update_buffer);
369 client.add_model_message_handler(Self::handle_update_diagnostic_summary);
370 client.add_model_message_handler(Self::handle_update_worktree);
371 client.add_model_request_handler(Self::handle_create_project_entry);
372 client.add_model_request_handler(Self::handle_rename_project_entry);
373 client.add_model_request_handler(Self::handle_copy_project_entry);
374 client.add_model_request_handler(Self::handle_delete_project_entry);
375 client.add_model_request_handler(Self::handle_apply_additional_edits_for_completion);
376 client.add_model_request_handler(Self::handle_apply_code_action);
377 client.add_model_request_handler(Self::handle_reload_buffers);
378 client.add_model_request_handler(Self::handle_format_buffers);
379 client.add_model_request_handler(Self::handle_get_code_actions);
380 client.add_model_request_handler(Self::handle_get_completions);
381 client.add_model_request_handler(Self::handle_lsp_command::<GetHover>);
382 client.add_model_request_handler(Self::handle_lsp_command::<GetDefinition>);
383 client.add_model_request_handler(Self::handle_lsp_command::<GetDocumentHighlights>);
384 client.add_model_request_handler(Self::handle_lsp_command::<GetReferences>);
385 client.add_model_request_handler(Self::handle_lsp_command::<PrepareRename>);
386 client.add_model_request_handler(Self::handle_lsp_command::<PerformRename>);
387 client.add_model_request_handler(Self::handle_search_project);
388 client.add_model_request_handler(Self::handle_get_project_symbols);
389 client.add_model_request_handler(Self::handle_open_buffer_for_symbol);
390 client.add_model_request_handler(Self::handle_open_buffer_by_id);
391 client.add_model_request_handler(Self::handle_open_buffer_by_path);
392 client.add_model_request_handler(Self::handle_save_buffer);
393 }
394
395 pub fn local(
396 online: bool,
397 client: Arc<Client>,
398 user_store: ModelHandle<UserStore>,
399 project_store: ModelHandle<ProjectStore>,
400 languages: Arc<LanguageRegistry>,
401 fs: Arc<dyn Fs>,
402 cx: &mut MutableAppContext,
403 ) -> ModelHandle<Self> {
404 cx.add_model(|cx: &mut ModelContext<Self>| {
405 let (remote_id_tx, remote_id_rx) = watch::channel();
406 let _maintain_remote_id = cx.spawn_weak({
407 let mut status_rx = client.clone().status();
408 move |this, mut cx| async move {
409 while let Some(status) = status_rx.recv().await {
410 let this = this.upgrade(&cx)?;
411 if status.is_connected() {
412 this.update(&mut cx, |this, cx| this.register(cx))
413 .await
414 .log_err()?;
415 } else {
416 this.update(&mut cx, |this, cx| this.unregister(cx))
417 .await
418 .log_err();
419 }
420 }
421 None
422 }
423 });
424
425 let (online_tx, online_rx) = watch::channel_with(online);
426 let _maintain_online_status = cx.spawn_weak({
427 let mut online_rx = online_rx.clone();
428 move |this, mut cx| async move {
429 while let Some(online) = online_rx.recv().await {
430 let this = this.upgrade(&cx)?;
431 this.update(&mut cx, |this, cx| {
432 if !online {
433 this.unshared(cx);
434 }
435 this.metadata_changed(false, cx)
436 });
437 }
438 None
439 }
440 });
441
442 let handle = cx.weak_handle();
443 project_store.update(cx, |store, cx| store.add_project(handle, cx));
444
445 let (opened_buffer_tx, opened_buffer_rx) = watch::channel();
446 Self {
447 worktrees: Default::default(),
448 collaborators: Default::default(),
449 opened_buffers: Default::default(),
450 shared_buffers: Default::default(),
451 loading_buffers: Default::default(),
452 loading_local_worktrees: Default::default(),
453 buffer_snapshots: Default::default(),
454 client_state: ProjectClientState::Local {
455 is_shared: false,
456 remote_id_tx,
457 remote_id_rx,
458 online_tx,
459 online_rx,
460 _maintain_remote_id,
461 _maintain_online_status,
462 },
463 opened_buffer: (Rc::new(RefCell::new(opened_buffer_tx)), opened_buffer_rx),
464 client_subscriptions: Vec::new(),
465 _subscriptions: vec![cx.observe_global::<Settings, _>(Self::on_settings_changed)],
466 active_entry: None,
467 languages,
468 client,
469 user_store,
470 project_store,
471 fs,
472 next_entry_id: Default::default(),
473 next_diagnostic_group_id: Default::default(),
474 language_servers: Default::default(),
475 language_server_ids: Default::default(),
476 language_server_statuses: Default::default(),
477 last_workspace_edits_by_language_server: Default::default(),
478 language_server_settings: Default::default(),
479 next_language_server_id: 0,
480 nonce: StdRng::from_entropy().gen(),
481 initialized_persistent_state: false,
482 }
483 })
484 }
485
486 pub async fn remote(
487 remote_id: u64,
488 client: Arc<Client>,
489 user_store: ModelHandle<UserStore>,
490 project_store: ModelHandle<ProjectStore>,
491 languages: Arc<LanguageRegistry>,
492 fs: Arc<dyn Fs>,
493 mut cx: AsyncAppContext,
494 ) -> Result<ModelHandle<Self>, JoinProjectError> {
495 client.authenticate_and_connect(true, &cx).await?;
496
497 let response = client
498 .request(proto::JoinProject {
499 project_id: remote_id,
500 })
501 .await?;
502
503 let response = match response.variant.ok_or_else(|| anyhow!("missing variant"))? {
504 proto::join_project_response::Variant::Accept(response) => response,
505 proto::join_project_response::Variant::Decline(decline) => {
506 match proto::join_project_response::decline::Reason::from_i32(decline.reason) {
507 Some(proto::join_project_response::decline::Reason::Declined) => {
508 Err(JoinProjectError::HostDeclined)?
509 }
510 Some(proto::join_project_response::decline::Reason::Closed) => {
511 Err(JoinProjectError::HostClosedProject)?
512 }
513 Some(proto::join_project_response::decline::Reason::WentOffline) => {
514 Err(JoinProjectError::HostWentOffline)?
515 }
516 None => Err(anyhow!("missing decline reason"))?,
517 }
518 }
519 };
520
521 let replica_id = response.replica_id as ReplicaId;
522
523 let mut worktrees = Vec::new();
524 for worktree in response.worktrees {
525 let worktree = cx
526 .update(|cx| Worktree::remote(remote_id, replica_id, worktree, client.clone(), cx));
527 worktrees.push(worktree);
528 }
529
530 let (opened_buffer_tx, opened_buffer_rx) = watch::channel();
531 let this = cx.add_model(|cx: &mut ModelContext<Self>| {
532 let handle = cx.weak_handle();
533 project_store.update(cx, |store, cx| store.add_project(handle, cx));
534
535 let mut this = Self {
536 worktrees: Vec::new(),
537 loading_buffers: Default::default(),
538 opened_buffer: (Rc::new(RefCell::new(opened_buffer_tx)), opened_buffer_rx),
539 shared_buffers: Default::default(),
540 loading_local_worktrees: Default::default(),
541 active_entry: None,
542 collaborators: Default::default(),
543 languages,
544 user_store: user_store.clone(),
545 project_store,
546 fs,
547 next_entry_id: Default::default(),
548 next_diagnostic_group_id: Default::default(),
549 client_subscriptions: vec![client.add_model_for_remote_entity(remote_id, cx)],
550 _subscriptions: Default::default(),
551 client: client.clone(),
552 client_state: ProjectClientState::Remote {
553 sharing_has_stopped: false,
554 remote_id,
555 replica_id,
556 _detect_unshare: cx.spawn_weak(move |this, mut cx| {
557 async move {
558 let mut status = client.status();
559 let is_connected =
560 status.next().await.map_or(false, |s| s.is_connected());
561 // Even if we're initially connected, any future change of the status means we momentarily disconnected.
562 if !is_connected || status.next().await.is_some() {
563 if let Some(this) = this.upgrade(&cx) {
564 this.update(&mut cx, |this, cx| this.removed_from_project(cx))
565 }
566 }
567 Ok(())
568 }
569 .log_err()
570 }),
571 },
572 language_servers: Default::default(),
573 language_server_ids: Default::default(),
574 language_server_settings: Default::default(),
575 language_server_statuses: response
576 .language_servers
577 .into_iter()
578 .map(|server| {
579 (
580 server.id as usize,
581 LanguageServerStatus {
582 name: server.name,
583 pending_work: Default::default(),
584 has_pending_diagnostic_updates: false,
585 progress_tokens: Default::default(),
586 },
587 )
588 })
589 .collect(),
590 last_workspace_edits_by_language_server: Default::default(),
591 next_language_server_id: 0,
592 opened_buffers: Default::default(),
593 buffer_snapshots: Default::default(),
594 nonce: StdRng::from_entropy().gen(),
595 initialized_persistent_state: false,
596 };
597 for worktree in worktrees {
598 this.add_worktree(&worktree, cx);
599 }
600 this
601 });
602
603 let user_ids = response
604 .collaborators
605 .iter()
606 .map(|peer| peer.user_id)
607 .collect();
608 user_store
609 .update(&mut cx, |user_store, cx| user_store.get_users(user_ids, cx))
610 .await?;
611 let mut collaborators = HashMap::default();
612 for message in response.collaborators {
613 let collaborator = Collaborator::from_proto(message, &user_store, &mut cx).await?;
614 collaborators.insert(collaborator.peer_id, collaborator);
615 }
616
617 this.update(&mut cx, |this, _| {
618 this.collaborators = collaborators;
619 });
620
621 Ok(this)
622 }
623
624 #[cfg(any(test, feature = "test-support"))]
625 pub async fn test(
626 fs: Arc<dyn Fs>,
627 root_paths: impl IntoIterator<Item = &Path>,
628 cx: &mut gpui::TestAppContext,
629 ) -> ModelHandle<Project> {
630 if !cx.read(|cx| cx.has_global::<Settings>()) {
631 cx.update(|cx| cx.set_global(Settings::test(cx)));
632 }
633
634 let languages = Arc::new(LanguageRegistry::test());
635 let http_client = client::test::FakeHttpClient::with_404_response();
636 let client = client::Client::new(http_client.clone());
637 let user_store = cx.add_model(|cx| UserStore::new(client.clone(), http_client, cx));
638 let project_store = cx.add_model(|_| ProjectStore::new(Db::open_fake()));
639 let project = cx.update(|cx| {
640 Project::local(true, client, user_store, project_store, languages, fs, cx)
641 });
642 for path in root_paths {
643 let (tree, _) = project
644 .update(cx, |project, cx| {
645 project.find_or_create_local_worktree(path, true, cx)
646 })
647 .await
648 .unwrap();
649 tree.read_with(cx, |tree, _| tree.as_local().unwrap().scan_complete())
650 .await;
651 }
652 project
653 }
654
655 pub fn restore_state(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
656 if self.is_remote() {
657 return Task::ready(Ok(()));
658 }
659
660 let db = self.project_store.read(cx).db.clone();
661 let keys = self.db_keys_for_online_state(cx);
662 let online_by_default = cx.global::<Settings>().projects_online_by_default;
663 let read_online = cx.background().spawn(async move {
664 let values = db.read(keys)?;
665 anyhow::Ok(
666 values
667 .into_iter()
668 .all(|e| e.map_or(online_by_default, |e| e == [true as u8])),
669 )
670 });
671 cx.spawn(|this, mut cx| async move {
672 let online = read_online.await.log_err().unwrap_or(false);
673 this.update(&mut cx, |this, cx| {
674 this.initialized_persistent_state = true;
675 if let ProjectClientState::Local { online_tx, .. } = &mut this.client_state {
676 let mut online_tx = online_tx.borrow_mut();
677 if *online_tx != online {
678 *online_tx = online;
679 drop(online_tx);
680 this.metadata_changed(false, cx);
681 }
682 }
683 });
684 Ok(())
685 })
686 }
687
688 fn persist_state(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
689 if self.is_remote() || !self.initialized_persistent_state {
690 return Task::ready(Ok(()));
691 }
692
693 let db = self.project_store.read(cx).db.clone();
694 let keys = self.db_keys_for_online_state(cx);
695 let is_online = self.is_online();
696 cx.background().spawn(async move {
697 let value = &[is_online as u8];
698 db.write(keys.into_iter().map(|key| (key, value)))
699 })
700 }
701
702 fn on_settings_changed(&mut self, cx: &mut ModelContext<Self>) {
703 let settings = cx.global::<Settings>();
704
705 let mut language_servers_to_start = Vec::new();
706 for buffer in self.opened_buffers.values() {
707 if let Some(buffer) = buffer.upgrade(cx) {
708 let buffer = buffer.read(cx);
709 if let Some((file, language)) = File::from_dyn(buffer.file()).zip(buffer.language())
710 {
711 if settings.enable_language_server(Some(&language.name())) {
712 let worktree = file.worktree.read(cx);
713 language_servers_to_start.push((
714 worktree.id(),
715 worktree.as_local().unwrap().abs_path().clone(),
716 language.clone(),
717 ));
718 }
719 }
720 }
721 }
722
723 let mut language_servers_to_stop = Vec::new();
724 for language in self.languages.to_vec() {
725 if let Some(lsp_adapter) = language.lsp_adapter() {
726 if !settings.enable_language_server(Some(&language.name())) {
727 let lsp_name = lsp_adapter.name();
728 for (worktree_id, started_lsp_name) in self.language_server_ids.keys() {
729 if lsp_name == *started_lsp_name {
730 language_servers_to_stop.push((*worktree_id, started_lsp_name.clone()));
731 }
732 }
733 }
734 }
735 }
736
737 // Stop all newly-disabled language servers.
738 for (worktree_id, adapter_name) in language_servers_to_stop {
739 self.stop_language_server(worktree_id, adapter_name, cx)
740 .detach();
741 }
742
743 // Start all the newly-enabled language servers.
744 for (worktree_id, worktree_path, language) in language_servers_to_start {
745 self.start_language_server(worktree_id, worktree_path, language, cx);
746 }
747
748 cx.notify();
749 }
750
751 pub fn buffer_for_id(&self, remote_id: u64, cx: &AppContext) -> Option<ModelHandle<Buffer>> {
752 self.opened_buffers
753 .get(&remote_id)
754 .and_then(|buffer| buffer.upgrade(cx))
755 }
756
757 pub fn languages(&self) -> &Arc<LanguageRegistry> {
758 &self.languages
759 }
760
761 pub fn client(&self) -> Arc<Client> {
762 self.client.clone()
763 }
764
765 pub fn user_store(&self) -> ModelHandle<UserStore> {
766 self.user_store.clone()
767 }
768
769 pub fn project_store(&self) -> ModelHandle<ProjectStore> {
770 self.project_store.clone()
771 }
772
773 #[cfg(any(test, feature = "test-support"))]
774 pub fn check_invariants(&self, cx: &AppContext) {
775 if self.is_local() {
776 let mut worktree_root_paths = HashMap::default();
777 for worktree in self.worktrees(cx) {
778 let worktree = worktree.read(cx);
779 let abs_path = worktree.as_local().unwrap().abs_path().clone();
780 let prev_worktree_id = worktree_root_paths.insert(abs_path.clone(), worktree.id());
781 assert_eq!(
782 prev_worktree_id,
783 None,
784 "abs path {:?} for worktree {:?} is not unique ({:?} was already registered with the same path)",
785 abs_path,
786 worktree.id(),
787 prev_worktree_id
788 )
789 }
790 } else {
791 let replica_id = self.replica_id();
792 for buffer in self.opened_buffers.values() {
793 if let Some(buffer) = buffer.upgrade(cx) {
794 let buffer = buffer.read(cx);
795 assert_eq!(
796 buffer.deferred_ops_len(),
797 0,
798 "replica {}, buffer {} has deferred operations",
799 replica_id,
800 buffer.remote_id()
801 );
802 }
803 }
804 }
805 }
806
807 #[cfg(any(test, feature = "test-support"))]
808 pub fn has_open_buffer(&self, path: impl Into<ProjectPath>, cx: &AppContext) -> bool {
809 let path = path.into();
810 if let Some(worktree) = self.worktree_for_id(path.worktree_id, cx) {
811 self.opened_buffers.iter().any(|(_, buffer)| {
812 if let Some(buffer) = buffer.upgrade(cx) {
813 if let Some(file) = File::from_dyn(buffer.read(cx).file()) {
814 if file.worktree == worktree && file.path() == &path.path {
815 return true;
816 }
817 }
818 }
819 false
820 })
821 } else {
822 false
823 }
824 }
825
826 pub fn fs(&self) -> &Arc<dyn Fs> {
827 &self.fs
828 }
829
830 pub fn set_online(&mut self, online: bool, _: &mut ModelContext<Self>) {
831 if let ProjectClientState::Local { online_tx, .. } = &mut self.client_state {
832 let mut online_tx = online_tx.borrow_mut();
833 if *online_tx != online {
834 *online_tx = online;
835 }
836 }
837 }
838
839 pub fn is_online(&self) -> bool {
840 match &self.client_state {
841 ProjectClientState::Local { online_rx, .. } => *online_rx.borrow(),
842 ProjectClientState::Remote { .. } => true,
843 }
844 }
845
846 fn unregister(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
847 self.unshared(cx);
848 if let ProjectClientState::Local { remote_id_rx, .. } = &mut self.client_state {
849 if let Some(remote_id) = *remote_id_rx.borrow() {
850 let request = self.client.request(proto::UnregisterProject {
851 project_id: remote_id,
852 });
853 return cx.spawn(|this, mut cx| async move {
854 let response = request.await;
855
856 // Unregistering the project causes the server to send out a
857 // contact update removing this project from the host's list
858 // of online projects. Wait until this contact update has been
859 // processed before clearing out this project's remote id, so
860 // that there is no moment where this project appears in the
861 // contact metadata and *also* has no remote id.
862 this.update(&mut cx, |this, cx| {
863 this.user_store()
864 .update(cx, |store, _| store.contact_updates_done())
865 })
866 .await;
867
868 this.update(&mut cx, |this, cx| {
869 if let ProjectClientState::Local { remote_id_tx, .. } =
870 &mut this.client_state
871 {
872 *remote_id_tx.borrow_mut() = None;
873 }
874 this.client_subscriptions.clear();
875 this.metadata_changed(false, cx);
876 });
877 response.map(drop)
878 });
879 }
880 }
881 Task::ready(Ok(()))
882 }
883
884 fn register(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
885 if let ProjectClientState::Local {
886 remote_id_rx,
887 online_rx,
888 ..
889 } = &self.client_state
890 {
891 if remote_id_rx.borrow().is_some() {
892 return Task::ready(Ok(()));
893 }
894
895 let response = self.client.request(proto::RegisterProject {
896 online: *online_rx.borrow(),
897 });
898 cx.spawn(|this, mut cx| async move {
899 let remote_id = response.await?.project_id;
900 this.update(&mut cx, |this, cx| {
901 if let ProjectClientState::Local { remote_id_tx, .. } = &mut this.client_state {
902 *remote_id_tx.borrow_mut() = Some(remote_id);
903 }
904
905 this.metadata_changed(false, cx);
906 cx.emit(Event::RemoteIdChanged(Some(remote_id)));
907 this.client_subscriptions
908 .push(this.client.add_model_for_remote_entity(remote_id, cx));
909 Ok(())
910 })
911 })
912 } else {
913 Task::ready(Err(anyhow!("can't register a remote project")))
914 }
915 }
916
917 pub fn remote_id(&self) -> Option<u64> {
918 match &self.client_state {
919 ProjectClientState::Local { remote_id_rx, .. } => *remote_id_rx.borrow(),
920 ProjectClientState::Remote { remote_id, .. } => Some(*remote_id),
921 }
922 }
923
924 pub fn next_remote_id(&self) -> impl Future<Output = u64> {
925 let mut id = None;
926 let mut watch = None;
927 match &self.client_state {
928 ProjectClientState::Local { remote_id_rx, .. } => watch = Some(remote_id_rx.clone()),
929 ProjectClientState::Remote { remote_id, .. } => id = Some(*remote_id),
930 }
931
932 async move {
933 if let Some(id) = id {
934 return id;
935 }
936 let mut watch = watch.unwrap();
937 loop {
938 let id = *watch.borrow();
939 if let Some(id) = id {
940 return id;
941 }
942 watch.next().await;
943 }
944 }
945 }
946
947 pub fn shared_remote_id(&self) -> Option<u64> {
948 match &self.client_state {
949 ProjectClientState::Local {
950 remote_id_rx,
951 is_shared,
952 ..
953 } => {
954 if *is_shared {
955 *remote_id_rx.borrow()
956 } else {
957 None
958 }
959 }
960 ProjectClientState::Remote { remote_id, .. } => Some(*remote_id),
961 }
962 }
963
964 pub fn replica_id(&self) -> ReplicaId {
965 match &self.client_state {
966 ProjectClientState::Local { .. } => 0,
967 ProjectClientState::Remote { replica_id, .. } => *replica_id,
968 }
969 }
970
971 fn metadata_changed(&mut self, persist: bool, cx: &mut ModelContext<Self>) {
972 if let ProjectClientState::Local {
973 remote_id_rx,
974 online_rx,
975 ..
976 } = &self.client_state
977 {
978 // Broadcast worktrees only if the project is online.
979 let worktrees = if *online_rx.borrow() {
980 self.worktrees
981 .iter()
982 .filter_map(|worktree| {
983 worktree
984 .upgrade(&cx)
985 .map(|worktree| worktree.read(cx).as_local().unwrap().metadata_proto())
986 })
987 .collect()
988 } else {
989 Default::default()
990 };
991 if let Some(project_id) = *remote_id_rx.borrow() {
992 let online = *online_rx.borrow();
993 self.client
994 .send(proto::UpdateProject {
995 project_id,
996 worktrees,
997 online,
998 })
999 .log_err();
1000
1001 if online {
1002 let worktrees = self.visible_worktrees(cx).collect::<Vec<_>>();
1003 let scans_complete =
1004 futures::future::join_all(worktrees.iter().filter_map(|worktree| {
1005 Some(worktree.read(cx).as_local()?.scan_complete())
1006 }));
1007
1008 let worktrees = worktrees.into_iter().map(|handle| handle.downgrade());
1009 cx.spawn_weak(move |_, cx| async move {
1010 scans_complete.await;
1011 cx.read(|cx| {
1012 for worktree in worktrees {
1013 if let Some(worktree) = worktree
1014 .upgrade(cx)
1015 .and_then(|worktree| worktree.read(cx).as_local())
1016 {
1017 worktree.send_extension_counts(project_id);
1018 }
1019 }
1020 })
1021 })
1022 .detach();
1023 }
1024 }
1025
1026 self.project_store.update(cx, |_, cx| cx.notify());
1027 if persist {
1028 self.persist_state(cx).detach_and_log_err(cx);
1029 }
1030 cx.notify();
1031 }
1032 }
1033
1034 pub fn collaborators(&self) -> &HashMap<PeerId, Collaborator> {
1035 &self.collaborators
1036 }
1037
1038 pub fn worktrees<'a>(
1039 &'a self,
1040 cx: &'a AppContext,
1041 ) -> impl 'a + Iterator<Item = ModelHandle<Worktree>> {
1042 self.worktrees
1043 .iter()
1044 .filter_map(move |worktree| worktree.upgrade(cx))
1045 }
1046
1047 pub fn visible_worktrees<'a>(
1048 &'a self,
1049 cx: &'a AppContext,
1050 ) -> impl 'a + Iterator<Item = ModelHandle<Worktree>> {
1051 self.worktrees.iter().filter_map(|worktree| {
1052 worktree.upgrade(cx).and_then(|worktree| {
1053 if worktree.read(cx).is_visible() {
1054 Some(worktree)
1055 } else {
1056 None
1057 }
1058 })
1059 })
1060 }
1061
1062 pub fn worktree_root_names<'a>(&'a self, cx: &'a AppContext) -> impl Iterator<Item = &'a str> {
1063 self.visible_worktrees(cx)
1064 .map(|tree| tree.read(cx).root_name())
1065 }
1066
1067 fn db_keys_for_online_state(&self, cx: &AppContext) -> Vec<String> {
1068 self.worktrees
1069 .iter()
1070 .filter_map(|worktree| {
1071 let worktree = worktree.upgrade(&cx)?.read(cx);
1072 if worktree.is_visible() {
1073 Some(format!(
1074 "project-path-online:{}",
1075 worktree.as_local().unwrap().abs_path().to_string_lossy()
1076 ))
1077 } else {
1078 None
1079 }
1080 })
1081 .collect::<Vec<_>>()
1082 }
1083
1084 pub fn worktree_for_id(
1085 &self,
1086 id: WorktreeId,
1087 cx: &AppContext,
1088 ) -> Option<ModelHandle<Worktree>> {
1089 self.worktrees(cx)
1090 .find(|worktree| worktree.read(cx).id() == id)
1091 }
1092
1093 pub fn worktree_for_entry(
1094 &self,
1095 entry_id: ProjectEntryId,
1096 cx: &AppContext,
1097 ) -> Option<ModelHandle<Worktree>> {
1098 self.worktrees(cx)
1099 .find(|worktree| worktree.read(cx).contains_entry(entry_id))
1100 }
1101
1102 pub fn worktree_id_for_entry(
1103 &self,
1104 entry_id: ProjectEntryId,
1105 cx: &AppContext,
1106 ) -> Option<WorktreeId> {
1107 self.worktree_for_entry(entry_id, cx)
1108 .map(|worktree| worktree.read(cx).id())
1109 }
1110
1111 pub fn contains_paths(&self, paths: &[PathBuf], cx: &AppContext) -> bool {
1112 paths.iter().all(|path| self.contains_path(&path, cx))
1113 }
1114
1115 pub fn contains_path(&self, path: &Path, cx: &AppContext) -> bool {
1116 for worktree in self.worktrees(cx) {
1117 let worktree = worktree.read(cx).as_local();
1118 if worktree.map_or(false, |w| w.contains_abs_path(path)) {
1119 return true;
1120 }
1121 }
1122 false
1123 }
1124
1125 pub fn create_entry(
1126 &mut self,
1127 project_path: impl Into<ProjectPath>,
1128 is_directory: bool,
1129 cx: &mut ModelContext<Self>,
1130 ) -> Option<Task<Result<Entry>>> {
1131 let project_path = project_path.into();
1132 let worktree = self.worktree_for_id(project_path.worktree_id, cx)?;
1133 if self.is_local() {
1134 Some(worktree.update(cx, |worktree, cx| {
1135 worktree
1136 .as_local_mut()
1137 .unwrap()
1138 .create_entry(project_path.path, is_directory, cx)
1139 }))
1140 } else {
1141 let client = self.client.clone();
1142 let project_id = self.remote_id().unwrap();
1143 Some(cx.spawn_weak(|_, mut cx| async move {
1144 let response = client
1145 .request(proto::CreateProjectEntry {
1146 worktree_id: project_path.worktree_id.to_proto(),
1147 project_id,
1148 path: project_path.path.as_os_str().as_bytes().to_vec(),
1149 is_directory,
1150 })
1151 .await?;
1152 let entry = response
1153 .entry
1154 .ok_or_else(|| anyhow!("missing entry in response"))?;
1155 worktree
1156 .update(&mut cx, |worktree, cx| {
1157 worktree.as_remote_mut().unwrap().insert_entry(
1158 entry,
1159 response.worktree_scan_id as usize,
1160 cx,
1161 )
1162 })
1163 .await
1164 }))
1165 }
1166 }
1167
1168 pub fn copy_entry(
1169 &mut self,
1170 entry_id: ProjectEntryId,
1171 new_path: impl Into<Arc<Path>>,
1172 cx: &mut ModelContext<Self>,
1173 ) -> Option<Task<Result<Entry>>> {
1174 let worktree = self.worktree_for_entry(entry_id, cx)?;
1175 let new_path = new_path.into();
1176 if self.is_local() {
1177 worktree.update(cx, |worktree, cx| {
1178 worktree
1179 .as_local_mut()
1180 .unwrap()
1181 .copy_entry(entry_id, new_path, cx)
1182 })
1183 } else {
1184 let client = self.client.clone();
1185 let project_id = self.remote_id().unwrap();
1186
1187 Some(cx.spawn_weak(|_, mut cx| async move {
1188 let response = client
1189 .request(proto::CopyProjectEntry {
1190 project_id,
1191 entry_id: entry_id.to_proto(),
1192 new_path: new_path.as_os_str().as_bytes().to_vec(),
1193 })
1194 .await?;
1195 let entry = response
1196 .entry
1197 .ok_or_else(|| anyhow!("missing entry in response"))?;
1198 worktree
1199 .update(&mut cx, |worktree, cx| {
1200 worktree.as_remote_mut().unwrap().insert_entry(
1201 entry,
1202 response.worktree_scan_id as usize,
1203 cx,
1204 )
1205 })
1206 .await
1207 }))
1208 }
1209 }
1210
1211 pub fn rename_entry(
1212 &mut self,
1213 entry_id: ProjectEntryId,
1214 new_path: impl Into<Arc<Path>>,
1215 cx: &mut ModelContext<Self>,
1216 ) -> Option<Task<Result<Entry>>> {
1217 let worktree = self.worktree_for_entry(entry_id, cx)?;
1218 let new_path = new_path.into();
1219 if self.is_local() {
1220 worktree.update(cx, |worktree, cx| {
1221 worktree
1222 .as_local_mut()
1223 .unwrap()
1224 .rename_entry(entry_id, new_path, cx)
1225 })
1226 } else {
1227 let client = self.client.clone();
1228 let project_id = self.remote_id().unwrap();
1229
1230 Some(cx.spawn_weak(|_, mut cx| async move {
1231 let response = client
1232 .request(proto::RenameProjectEntry {
1233 project_id,
1234 entry_id: entry_id.to_proto(),
1235 new_path: new_path.as_os_str().as_bytes().to_vec(),
1236 })
1237 .await?;
1238 let entry = response
1239 .entry
1240 .ok_or_else(|| anyhow!("missing entry in response"))?;
1241 worktree
1242 .update(&mut cx, |worktree, cx| {
1243 worktree.as_remote_mut().unwrap().insert_entry(
1244 entry,
1245 response.worktree_scan_id as usize,
1246 cx,
1247 )
1248 })
1249 .await
1250 }))
1251 }
1252 }
1253
1254 pub fn delete_entry(
1255 &mut self,
1256 entry_id: ProjectEntryId,
1257 cx: &mut ModelContext<Self>,
1258 ) -> Option<Task<Result<()>>> {
1259 let worktree = self.worktree_for_entry(entry_id, cx)?;
1260 if self.is_local() {
1261 worktree.update(cx, |worktree, cx| {
1262 worktree.as_local_mut().unwrap().delete_entry(entry_id, cx)
1263 })
1264 } else {
1265 let client = self.client.clone();
1266 let project_id = self.remote_id().unwrap();
1267 Some(cx.spawn_weak(|_, mut cx| async move {
1268 let response = client
1269 .request(proto::DeleteProjectEntry {
1270 project_id,
1271 entry_id: entry_id.to_proto(),
1272 })
1273 .await?;
1274 worktree
1275 .update(&mut cx, move |worktree, cx| {
1276 worktree.as_remote_mut().unwrap().delete_entry(
1277 entry_id,
1278 response.worktree_scan_id as usize,
1279 cx,
1280 )
1281 })
1282 .await
1283 }))
1284 }
1285 }
1286
1287 fn share(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
1288 if !self.is_online() {
1289 return Task::ready(Err(anyhow!("can't share an offline project")));
1290 }
1291
1292 let project_id;
1293 if let ProjectClientState::Local {
1294 remote_id_rx,
1295 is_shared,
1296 ..
1297 } = &mut self.client_state
1298 {
1299 if *is_shared {
1300 return Task::ready(Ok(()));
1301 }
1302 *is_shared = true;
1303 if let Some(id) = *remote_id_rx.borrow() {
1304 project_id = id;
1305 } else {
1306 return Task::ready(Err(anyhow!("project hasn't been registered")));
1307 }
1308 } else {
1309 return Task::ready(Err(anyhow!("can't share a remote project")));
1310 };
1311
1312 for open_buffer in self.opened_buffers.values_mut() {
1313 match open_buffer {
1314 OpenBuffer::Strong(_) => {}
1315 OpenBuffer::Weak(buffer) => {
1316 if let Some(buffer) = buffer.upgrade(cx) {
1317 *open_buffer = OpenBuffer::Strong(buffer);
1318 }
1319 }
1320 OpenBuffer::Loading(_) => unreachable!(),
1321 }
1322 }
1323
1324 for worktree_handle in self.worktrees.iter_mut() {
1325 match worktree_handle {
1326 WorktreeHandle::Strong(_) => {}
1327 WorktreeHandle::Weak(worktree) => {
1328 if let Some(worktree) = worktree.upgrade(cx) {
1329 *worktree_handle = WorktreeHandle::Strong(worktree);
1330 }
1331 }
1332 }
1333 }
1334
1335 let mut tasks = Vec::new();
1336 for worktree in self.worktrees(cx).collect::<Vec<_>>() {
1337 worktree.update(cx, |worktree, cx| {
1338 let worktree = worktree.as_local_mut().unwrap();
1339 tasks.push(worktree.share(project_id, cx));
1340 });
1341 }
1342
1343 for (server_id, status) in &self.language_server_statuses {
1344 self.client
1345 .send(proto::StartLanguageServer {
1346 project_id,
1347 server: Some(proto::LanguageServer {
1348 id: *server_id as u64,
1349 name: status.name.clone(),
1350 }),
1351 })
1352 .log_err();
1353 }
1354
1355 cx.spawn(|this, mut cx| async move {
1356 for task in tasks {
1357 task.await?;
1358 }
1359 this.update(&mut cx, |_, cx| cx.notify());
1360 Ok(())
1361 })
1362 }
1363
1364 fn unshared(&mut self, cx: &mut ModelContext<Self>) {
1365 if let ProjectClientState::Local { is_shared, .. } = &mut self.client_state {
1366 if !*is_shared {
1367 return;
1368 }
1369
1370 *is_shared = false;
1371 self.collaborators.clear();
1372 self.shared_buffers.clear();
1373 for worktree_handle in self.worktrees.iter_mut() {
1374 if let WorktreeHandle::Strong(worktree) = worktree_handle {
1375 let is_visible = worktree.update(cx, |worktree, _| {
1376 worktree.as_local_mut().unwrap().unshare();
1377 worktree.is_visible()
1378 });
1379 if !is_visible {
1380 *worktree_handle = WorktreeHandle::Weak(worktree.downgrade());
1381 }
1382 }
1383 }
1384
1385 for open_buffer in self.opened_buffers.values_mut() {
1386 match open_buffer {
1387 OpenBuffer::Strong(buffer) => {
1388 *open_buffer = OpenBuffer::Weak(buffer.downgrade());
1389 }
1390 _ => {}
1391 }
1392 }
1393
1394 cx.notify();
1395 } else {
1396 log::error!("attempted to unshare a remote project");
1397 }
1398 }
1399
1400 pub fn respond_to_join_request(
1401 &mut self,
1402 requester_id: u64,
1403 allow: bool,
1404 cx: &mut ModelContext<Self>,
1405 ) {
1406 if let Some(project_id) = self.remote_id() {
1407 let share = if self.is_online() && allow {
1408 Some(self.share(cx))
1409 } else {
1410 None
1411 };
1412 let client = self.client.clone();
1413 cx.foreground()
1414 .spawn(async move {
1415 client.send(proto::RespondToJoinProjectRequest {
1416 requester_id,
1417 project_id,
1418 allow,
1419 })?;
1420 if let Some(share) = share {
1421 share.await?;
1422 }
1423 anyhow::Ok(())
1424 })
1425 .detach_and_log_err(cx);
1426 }
1427 }
1428
1429 fn removed_from_project(&mut self, cx: &mut ModelContext<Self>) {
1430 if let ProjectClientState::Remote {
1431 sharing_has_stopped,
1432 ..
1433 } = &mut self.client_state
1434 {
1435 *sharing_has_stopped = true;
1436 self.collaborators.clear();
1437 for worktree in &self.worktrees {
1438 if let Some(worktree) = worktree.upgrade(cx) {
1439 worktree.update(cx, |worktree, _| {
1440 if let Some(worktree) = worktree.as_remote_mut() {
1441 worktree.disconnected_from_host();
1442 }
1443 });
1444 }
1445 }
1446 cx.notify();
1447 }
1448 }
1449
1450 pub fn is_read_only(&self) -> bool {
1451 match &self.client_state {
1452 ProjectClientState::Local { .. } => false,
1453 ProjectClientState::Remote {
1454 sharing_has_stopped,
1455 ..
1456 } => *sharing_has_stopped,
1457 }
1458 }
1459
1460 pub fn is_local(&self) -> bool {
1461 match &self.client_state {
1462 ProjectClientState::Local { .. } => true,
1463 ProjectClientState::Remote { .. } => false,
1464 }
1465 }
1466
1467 pub fn is_remote(&self) -> bool {
1468 !self.is_local()
1469 }
1470
1471 pub fn create_buffer(
1472 &mut self,
1473 text: &str,
1474 language: Option<Arc<Language>>,
1475 cx: &mut ModelContext<Self>,
1476 ) -> Result<ModelHandle<Buffer>> {
1477 if self.is_remote() {
1478 return Err(anyhow!("creating buffers as a guest is not supported yet"));
1479 }
1480
1481 let buffer = cx.add_model(|cx| {
1482 Buffer::new(self.replica_id(), text, cx)
1483 .with_language(language.unwrap_or(language::PLAIN_TEXT.clone()), cx)
1484 });
1485 self.register_buffer(&buffer, cx)?;
1486 Ok(buffer)
1487 }
1488
1489 pub fn open_path(
1490 &mut self,
1491 path: impl Into<ProjectPath>,
1492 cx: &mut ModelContext<Self>,
1493 ) -> Task<Result<(ProjectEntryId, AnyModelHandle)>> {
1494 let task = self.open_buffer(path, cx);
1495 cx.spawn_weak(|_, cx| async move {
1496 let buffer = task.await?;
1497 let project_entry_id = buffer
1498 .read_with(&cx, |buffer, cx| {
1499 File::from_dyn(buffer.file()).and_then(|file| file.project_entry_id(cx))
1500 })
1501 .ok_or_else(|| anyhow!("no project entry"))?;
1502 Ok((project_entry_id, buffer.into()))
1503 })
1504 }
1505
1506 pub fn open_local_buffer(
1507 &mut self,
1508 abs_path: impl AsRef<Path>,
1509 cx: &mut ModelContext<Self>,
1510 ) -> Task<Result<ModelHandle<Buffer>>> {
1511 if let Some((worktree, relative_path)) = self.find_local_worktree(abs_path.as_ref(), cx) {
1512 self.open_buffer((worktree.read(cx).id(), relative_path), cx)
1513 } else {
1514 Task::ready(Err(anyhow!("no such path")))
1515 }
1516 }
1517
1518 pub fn open_buffer(
1519 &mut self,
1520 path: impl Into<ProjectPath>,
1521 cx: &mut ModelContext<Self>,
1522 ) -> Task<Result<ModelHandle<Buffer>>> {
1523 let project_path = path.into();
1524 let worktree = if let Some(worktree) = self.worktree_for_id(project_path.worktree_id, cx) {
1525 worktree
1526 } else {
1527 return Task::ready(Err(anyhow!("no such worktree")));
1528 };
1529
1530 // If there is already a buffer for the given path, then return it.
1531 let existing_buffer = self.get_open_buffer(&project_path, cx);
1532 if let Some(existing_buffer) = existing_buffer {
1533 return Task::ready(Ok(existing_buffer));
1534 }
1535
1536 let mut loading_watch = match self.loading_buffers.entry(project_path.clone()) {
1537 // If the given path is already being loaded, then wait for that existing
1538 // task to complete and return the same buffer.
1539 hash_map::Entry::Occupied(e) => e.get().clone(),
1540
1541 // Otherwise, record the fact that this path is now being loaded.
1542 hash_map::Entry::Vacant(entry) => {
1543 let (mut tx, rx) = postage::watch::channel();
1544 entry.insert(rx.clone());
1545
1546 let load_buffer = if worktree.read(cx).is_local() {
1547 self.open_local_buffer_internal(&project_path.path, &worktree, cx)
1548 } else {
1549 self.open_remote_buffer_internal(&project_path.path, &worktree, cx)
1550 };
1551
1552 cx.spawn(move |this, mut cx| async move {
1553 let load_result = load_buffer.await;
1554 *tx.borrow_mut() = Some(this.update(&mut cx, |this, _| {
1555 // Record the fact that the buffer is no longer loading.
1556 this.loading_buffers.remove(&project_path);
1557 let buffer = load_result.map_err(Arc::new)?;
1558 Ok(buffer)
1559 }));
1560 })
1561 .detach();
1562 rx
1563 }
1564 };
1565
1566 cx.foreground().spawn(async move {
1567 loop {
1568 if let Some(result) = loading_watch.borrow().as_ref() {
1569 match result {
1570 Ok(buffer) => return Ok(buffer.clone()),
1571 Err(error) => return Err(anyhow!("{}", error)),
1572 }
1573 }
1574 loading_watch.next().await;
1575 }
1576 })
1577 }
1578
1579 fn open_local_buffer_internal(
1580 &mut self,
1581 path: &Arc<Path>,
1582 worktree: &ModelHandle<Worktree>,
1583 cx: &mut ModelContext<Self>,
1584 ) -> Task<Result<ModelHandle<Buffer>>> {
1585 let load_buffer = worktree.update(cx, |worktree, cx| {
1586 let worktree = worktree.as_local_mut().unwrap();
1587 worktree.load_buffer(path, cx)
1588 });
1589 cx.spawn(|this, mut cx| async move {
1590 let buffer = load_buffer.await?;
1591 this.update(&mut cx, |this, cx| this.register_buffer(&buffer, cx))?;
1592 Ok(buffer)
1593 })
1594 }
1595
1596 fn open_remote_buffer_internal(
1597 &mut self,
1598 path: &Arc<Path>,
1599 worktree: &ModelHandle<Worktree>,
1600 cx: &mut ModelContext<Self>,
1601 ) -> Task<Result<ModelHandle<Buffer>>> {
1602 let rpc = self.client.clone();
1603 let project_id = self.remote_id().unwrap();
1604 let remote_worktree_id = worktree.read(cx).id();
1605 let path = path.clone();
1606 let path_string = path.to_string_lossy().to_string();
1607 cx.spawn(|this, mut cx| async move {
1608 let response = rpc
1609 .request(proto::OpenBufferByPath {
1610 project_id,
1611 worktree_id: remote_worktree_id.to_proto(),
1612 path: path_string,
1613 })
1614 .await?;
1615 let buffer = response.buffer.ok_or_else(|| anyhow!("missing buffer"))?;
1616 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
1617 .await
1618 })
1619 }
1620
1621 fn open_local_buffer_via_lsp(
1622 &mut self,
1623 abs_path: lsp::Url,
1624 language_server_id: usize,
1625 language_server_name: LanguageServerName,
1626 cx: &mut ModelContext<Self>,
1627 ) -> Task<Result<ModelHandle<Buffer>>> {
1628 cx.spawn(|this, mut cx| async move {
1629 let abs_path = abs_path
1630 .to_file_path()
1631 .map_err(|_| anyhow!("can't convert URI to path"))?;
1632 let (worktree, relative_path) = if let Some(result) =
1633 this.read_with(&cx, |this, cx| this.find_local_worktree(&abs_path, cx))
1634 {
1635 result
1636 } else {
1637 let worktree = this
1638 .update(&mut cx, |this, cx| {
1639 this.create_local_worktree(&abs_path, false, cx)
1640 })
1641 .await?;
1642 this.update(&mut cx, |this, cx| {
1643 this.language_server_ids.insert(
1644 (worktree.read(cx).id(), language_server_name),
1645 language_server_id,
1646 );
1647 });
1648 (worktree, PathBuf::new())
1649 };
1650
1651 let project_path = ProjectPath {
1652 worktree_id: worktree.read_with(&cx, |worktree, _| worktree.id()),
1653 path: relative_path.into(),
1654 };
1655 this.update(&mut cx, |this, cx| this.open_buffer(project_path, cx))
1656 .await
1657 })
1658 }
1659
1660 pub fn open_buffer_by_id(
1661 &mut self,
1662 id: u64,
1663 cx: &mut ModelContext<Self>,
1664 ) -> Task<Result<ModelHandle<Buffer>>> {
1665 if let Some(buffer) = self.buffer_for_id(id, cx) {
1666 Task::ready(Ok(buffer))
1667 } else if self.is_local() {
1668 Task::ready(Err(anyhow!("buffer {} does not exist", id)))
1669 } else if let Some(project_id) = self.remote_id() {
1670 let request = self
1671 .client
1672 .request(proto::OpenBufferById { project_id, id });
1673 cx.spawn(|this, mut cx| async move {
1674 let buffer = request
1675 .await?
1676 .buffer
1677 .ok_or_else(|| anyhow!("invalid buffer"))?;
1678 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
1679 .await
1680 })
1681 } else {
1682 Task::ready(Err(anyhow!("cannot open buffer while disconnected")))
1683 }
1684 }
1685
1686 pub fn save_buffer_as(
1687 &mut self,
1688 buffer: ModelHandle<Buffer>,
1689 abs_path: PathBuf,
1690 cx: &mut ModelContext<Project>,
1691 ) -> Task<Result<()>> {
1692 let worktree_task = self.find_or_create_local_worktree(&abs_path, true, cx);
1693 let old_path =
1694 File::from_dyn(buffer.read(cx).file()).and_then(|f| Some(f.as_local()?.abs_path(cx)));
1695 cx.spawn(|this, mut cx| async move {
1696 if let Some(old_path) = old_path {
1697 this.update(&mut cx, |this, cx| {
1698 this.unregister_buffer_from_language_server(&buffer, old_path, cx);
1699 });
1700 }
1701 let (worktree, path) = worktree_task.await?;
1702 worktree
1703 .update(&mut cx, |worktree, cx| {
1704 worktree
1705 .as_local_mut()
1706 .unwrap()
1707 .save_buffer_as(buffer.clone(), path, cx)
1708 })
1709 .await?;
1710 this.update(&mut cx, |this, cx| {
1711 this.assign_language_to_buffer(&buffer, cx);
1712 this.register_buffer_with_language_server(&buffer, cx);
1713 });
1714 Ok(())
1715 })
1716 }
1717
1718 pub fn get_open_buffer(
1719 &mut self,
1720 path: &ProjectPath,
1721 cx: &mut ModelContext<Self>,
1722 ) -> Option<ModelHandle<Buffer>> {
1723 let worktree = self.worktree_for_id(path.worktree_id, cx)?;
1724 self.opened_buffers.values().find_map(|buffer| {
1725 let buffer = buffer.upgrade(cx)?;
1726 let file = File::from_dyn(buffer.read(cx).file())?;
1727 if file.worktree == worktree && file.path() == &path.path {
1728 Some(buffer)
1729 } else {
1730 None
1731 }
1732 })
1733 }
1734
1735 fn register_buffer(
1736 &mut self,
1737 buffer: &ModelHandle<Buffer>,
1738 cx: &mut ModelContext<Self>,
1739 ) -> Result<()> {
1740 let remote_id = buffer.read(cx).remote_id();
1741 let open_buffer = if self.is_remote() || self.is_shared() {
1742 OpenBuffer::Strong(buffer.clone())
1743 } else {
1744 OpenBuffer::Weak(buffer.downgrade())
1745 };
1746
1747 match self.opened_buffers.insert(remote_id, open_buffer) {
1748 None => {}
1749 Some(OpenBuffer::Loading(operations)) => {
1750 buffer.update(cx, |buffer, cx| buffer.apply_ops(operations, cx))?
1751 }
1752 Some(OpenBuffer::Weak(existing_handle)) => {
1753 if existing_handle.upgrade(cx).is_some() {
1754 Err(anyhow!(
1755 "already registered buffer with remote id {}",
1756 remote_id
1757 ))?
1758 }
1759 }
1760 Some(OpenBuffer::Strong(_)) => Err(anyhow!(
1761 "already registered buffer with remote id {}",
1762 remote_id
1763 ))?,
1764 }
1765 cx.subscribe(buffer, |this, buffer, event, cx| {
1766 this.on_buffer_event(buffer, event, cx);
1767 })
1768 .detach();
1769
1770 self.assign_language_to_buffer(buffer, cx);
1771 self.register_buffer_with_language_server(buffer, cx);
1772 cx.observe_release(buffer, |this, buffer, cx| {
1773 if let Some(file) = File::from_dyn(buffer.file()) {
1774 if file.is_local() {
1775 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
1776 if let Some((_, server)) = this.language_server_for_buffer(buffer, cx) {
1777 server
1778 .notify::<lsp::notification::DidCloseTextDocument>(
1779 lsp::DidCloseTextDocumentParams {
1780 text_document: lsp::TextDocumentIdentifier::new(uri.clone()),
1781 },
1782 )
1783 .log_err();
1784 }
1785 }
1786 }
1787 })
1788 .detach();
1789
1790 Ok(())
1791 }
1792
1793 fn register_buffer_with_language_server(
1794 &mut self,
1795 buffer_handle: &ModelHandle<Buffer>,
1796 cx: &mut ModelContext<Self>,
1797 ) {
1798 let buffer = buffer_handle.read(cx);
1799 let buffer_id = buffer.remote_id();
1800 if let Some(file) = File::from_dyn(buffer.file()) {
1801 if file.is_local() {
1802 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
1803 let initial_snapshot = buffer.text_snapshot();
1804
1805 let mut language_server = None;
1806 let mut language_id = None;
1807 if let Some(language) = buffer.language() {
1808 let worktree_id = file.worktree_id(cx);
1809 if let Some(adapter) = language.lsp_adapter() {
1810 language_id = adapter.id_for_language(language.name().as_ref());
1811 language_server = self
1812 .language_server_ids
1813 .get(&(worktree_id, adapter.name()))
1814 .and_then(|id| self.language_servers.get(&id))
1815 .and_then(|server_state| {
1816 if let LanguageServerState::Running { server, .. } = server_state {
1817 Some(server.clone())
1818 } else {
1819 None
1820 }
1821 });
1822 }
1823 }
1824
1825 if let Some(local_worktree) = file.worktree.read(cx).as_local() {
1826 if let Some(diagnostics) = local_worktree.diagnostics_for_path(file.path()) {
1827 self.update_buffer_diagnostics(&buffer_handle, diagnostics, None, cx)
1828 .log_err();
1829 }
1830 }
1831
1832 if let Some(server) = language_server {
1833 server
1834 .notify::<lsp::notification::DidOpenTextDocument>(
1835 lsp::DidOpenTextDocumentParams {
1836 text_document: lsp::TextDocumentItem::new(
1837 uri,
1838 language_id.unwrap_or_default(),
1839 0,
1840 initial_snapshot.text(),
1841 ),
1842 }
1843 .clone(),
1844 )
1845 .log_err();
1846 buffer_handle.update(cx, |buffer, cx| {
1847 buffer.set_completion_triggers(
1848 server
1849 .capabilities()
1850 .completion_provider
1851 .as_ref()
1852 .and_then(|provider| provider.trigger_characters.clone())
1853 .unwrap_or(Vec::new()),
1854 cx,
1855 )
1856 });
1857 self.buffer_snapshots
1858 .insert(buffer_id, vec![(0, initial_snapshot)]);
1859 }
1860 }
1861 }
1862 }
1863
1864 fn unregister_buffer_from_language_server(
1865 &mut self,
1866 buffer: &ModelHandle<Buffer>,
1867 old_path: PathBuf,
1868 cx: &mut ModelContext<Self>,
1869 ) {
1870 buffer.update(cx, |buffer, cx| {
1871 buffer.update_diagnostics(Default::default(), cx);
1872 self.buffer_snapshots.remove(&buffer.remote_id());
1873 if let Some((_, language_server)) = self.language_server_for_buffer(buffer, cx) {
1874 language_server
1875 .notify::<lsp::notification::DidCloseTextDocument>(
1876 lsp::DidCloseTextDocumentParams {
1877 text_document: lsp::TextDocumentIdentifier::new(
1878 lsp::Url::from_file_path(old_path).unwrap(),
1879 ),
1880 },
1881 )
1882 .log_err();
1883 }
1884 });
1885 }
1886
1887 fn on_buffer_event(
1888 &mut self,
1889 buffer: ModelHandle<Buffer>,
1890 event: &BufferEvent,
1891 cx: &mut ModelContext<Self>,
1892 ) -> Option<()> {
1893 match event {
1894 BufferEvent::Operation(operation) => {
1895 if let Some(project_id) = self.shared_remote_id() {
1896 let request = self.client.request(proto::UpdateBuffer {
1897 project_id,
1898 buffer_id: buffer.read(cx).remote_id(),
1899 operations: vec![language::proto::serialize_operation(&operation)],
1900 });
1901 cx.background().spawn(request).detach_and_log_err(cx);
1902 } else if let Some(project_id) = self.remote_id() {
1903 let _ = self
1904 .client
1905 .send(proto::RegisterProjectActivity { project_id });
1906 }
1907 }
1908 BufferEvent::Edited { .. } => {
1909 let language_server = self
1910 .language_server_for_buffer(buffer.read(cx), cx)
1911 .map(|(_, server)| server.clone())?;
1912 let buffer = buffer.read(cx);
1913 let file = File::from_dyn(buffer.file())?;
1914 let abs_path = file.as_local()?.abs_path(cx);
1915 let uri = lsp::Url::from_file_path(abs_path).unwrap();
1916 let buffer_snapshots = self.buffer_snapshots.get_mut(&buffer.remote_id())?;
1917 let (version, prev_snapshot) = buffer_snapshots.last()?;
1918 let next_snapshot = buffer.text_snapshot();
1919 let next_version = version + 1;
1920
1921 let content_changes = buffer
1922 .edits_since::<(PointUtf16, usize)>(prev_snapshot.version())
1923 .map(|edit| {
1924 let edit_start = edit.new.start.0;
1925 let edit_end = edit_start + (edit.old.end.0 - edit.old.start.0);
1926 let new_text = next_snapshot
1927 .text_for_range(edit.new.start.1..edit.new.end.1)
1928 .collect();
1929 lsp::TextDocumentContentChangeEvent {
1930 range: Some(lsp::Range::new(
1931 point_to_lsp(edit_start),
1932 point_to_lsp(edit_end),
1933 )),
1934 range_length: None,
1935 text: new_text,
1936 }
1937 })
1938 .collect();
1939
1940 buffer_snapshots.push((next_version, next_snapshot));
1941
1942 language_server
1943 .notify::<lsp::notification::DidChangeTextDocument>(
1944 lsp::DidChangeTextDocumentParams {
1945 text_document: lsp::VersionedTextDocumentIdentifier::new(
1946 uri,
1947 next_version,
1948 ),
1949 content_changes,
1950 },
1951 )
1952 .log_err();
1953 }
1954 BufferEvent::Saved => {
1955 let file = File::from_dyn(buffer.read(cx).file())?;
1956 let worktree_id = file.worktree_id(cx);
1957 let abs_path = file.as_local()?.abs_path(cx);
1958 let text_document = lsp::TextDocumentIdentifier {
1959 uri: lsp::Url::from_file_path(abs_path).unwrap(),
1960 };
1961
1962 for (_, server) in self.language_servers_for_worktree(worktree_id) {
1963 server
1964 .notify::<lsp::notification::DidSaveTextDocument>(
1965 lsp::DidSaveTextDocumentParams {
1966 text_document: text_document.clone(),
1967 text: None,
1968 },
1969 )
1970 .log_err();
1971 }
1972
1973 // After saving a buffer, simulate disk-based diagnostics being finished for languages
1974 // that don't support a disk-based progress token.
1975 let (lsp_adapter, language_server) =
1976 self.language_server_for_buffer(buffer.read(cx), cx)?;
1977 if lsp_adapter
1978 .disk_based_diagnostics_progress_token()
1979 .is_none()
1980 {
1981 let server_id = language_server.server_id();
1982 self.disk_based_diagnostics_finished(server_id, cx);
1983 self.broadcast_language_server_update(
1984 server_id,
1985 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(
1986 proto::LspDiskBasedDiagnosticsUpdated {},
1987 ),
1988 );
1989 }
1990 }
1991 _ => {}
1992 }
1993
1994 None
1995 }
1996
1997 fn language_servers_for_worktree(
1998 &self,
1999 worktree_id: WorktreeId,
2000 ) -> impl Iterator<Item = (&Arc<dyn LspAdapter>, &Arc<LanguageServer>)> {
2001 self.language_server_ids
2002 .iter()
2003 .filter_map(move |((language_server_worktree_id, _), id)| {
2004 if *language_server_worktree_id == worktree_id {
2005 if let Some(LanguageServerState::Running { adapter, server }) =
2006 self.language_servers.get(&id)
2007 {
2008 return Some((adapter, server));
2009 }
2010 }
2011 None
2012 })
2013 }
2014
2015 fn assign_language_to_buffer(
2016 &mut self,
2017 buffer: &ModelHandle<Buffer>,
2018 cx: &mut ModelContext<Self>,
2019 ) -> Option<()> {
2020 // If the buffer has a language, set it and start the language server if we haven't already.
2021 let full_path = buffer.read(cx).file()?.full_path(cx);
2022 let language = self.languages.select_language(&full_path)?;
2023 buffer.update(cx, |buffer, cx| {
2024 buffer.set_language(Some(language.clone()), cx);
2025 });
2026
2027 let file = File::from_dyn(buffer.read(cx).file())?;
2028 let worktree = file.worktree.read(cx).as_local()?;
2029 let worktree_id = worktree.id();
2030 let worktree_abs_path = worktree.abs_path().clone();
2031 self.start_language_server(worktree_id, worktree_abs_path, language, cx);
2032
2033 None
2034 }
2035
2036 fn start_language_server(
2037 &mut self,
2038 worktree_id: WorktreeId,
2039 worktree_path: Arc<Path>,
2040 language: Arc<Language>,
2041 cx: &mut ModelContext<Self>,
2042 ) {
2043 if !cx
2044 .global::<Settings>()
2045 .enable_language_server(Some(&language.name()))
2046 {
2047 return;
2048 }
2049
2050 let adapter = if let Some(adapter) = language.lsp_adapter() {
2051 adapter
2052 } else {
2053 return;
2054 };
2055 let key = (worktree_id, adapter.name());
2056
2057 self.language_server_ids
2058 .entry(key.clone())
2059 .or_insert_with(|| {
2060 let server_id = post_inc(&mut self.next_language_server_id);
2061 let language_server = self.languages.start_language_server(
2062 server_id,
2063 language.clone(),
2064 worktree_path,
2065 self.client.http_client(),
2066 cx,
2067 );
2068 self.language_servers.insert(
2069 server_id,
2070 LanguageServerState::Starting(cx.spawn_weak(|this, mut cx| async move {
2071 let language_server = language_server?.await.log_err()?;
2072 let language_server = language_server
2073 .initialize(adapter.initialization_options())
2074 .await
2075 .log_err()?;
2076 let this = this.upgrade(&cx)?;
2077 let disk_based_diagnostics_progress_token =
2078 adapter.disk_based_diagnostics_progress_token();
2079
2080 language_server
2081 .on_notification::<lsp::notification::PublishDiagnostics, _>({
2082 let this = this.downgrade();
2083 let adapter = adapter.clone();
2084 move |params, mut cx| {
2085 if let Some(this) = this.upgrade(&cx) {
2086 this.update(&mut cx, |this, cx| {
2087 this.on_lsp_diagnostics_published(
2088 server_id, params, &adapter, cx,
2089 );
2090 });
2091 }
2092 }
2093 })
2094 .detach();
2095
2096 language_server
2097 .on_request::<lsp::request::WorkspaceConfiguration, _, _>({
2098 let settings = this.read_with(&cx, |this, _| {
2099 this.language_server_settings.clone()
2100 });
2101 move |params, _| {
2102 let settings = settings.lock().clone();
2103 async move {
2104 Ok(params
2105 .items
2106 .into_iter()
2107 .map(|item| {
2108 if let Some(section) = &item.section {
2109 settings
2110 .get(section)
2111 .cloned()
2112 .unwrap_or(serde_json::Value::Null)
2113 } else {
2114 settings.clone()
2115 }
2116 })
2117 .collect())
2118 }
2119 }
2120 })
2121 .detach();
2122
2123 // Even though we don't have handling for these requests, respond to them to
2124 // avoid stalling any language server like `gopls` which waits for a response
2125 // to these requests when initializing.
2126 language_server
2127 .on_request::<lsp::request::WorkDoneProgressCreate, _, _>({
2128 let this = this.downgrade();
2129 move |params, mut cx| async move {
2130 if let Some(this) = this.upgrade(&cx) {
2131 this.update(&mut cx, |this, _| {
2132 if let Some(status) =
2133 this.language_server_statuses.get_mut(&server_id)
2134 {
2135 if let lsp::NumberOrString::String(token) =
2136 params.token
2137 {
2138 status.progress_tokens.insert(token);
2139 }
2140 }
2141 });
2142 }
2143 Ok(())
2144 }
2145 })
2146 .detach();
2147 language_server
2148 .on_request::<lsp::request::RegisterCapability, _, _>(|_, _| async {
2149 Ok(())
2150 })
2151 .detach();
2152
2153 language_server
2154 .on_request::<lsp::request::ApplyWorkspaceEdit, _, _>({
2155 let this = this.downgrade();
2156 let adapter = adapter.clone();
2157 let language_server = language_server.clone();
2158 move |params, cx| {
2159 Self::on_lsp_workspace_edit(
2160 this,
2161 params,
2162 server_id,
2163 adapter.clone(),
2164 language_server.clone(),
2165 cx,
2166 )
2167 }
2168 })
2169 .detach();
2170
2171 language_server
2172 .on_notification::<lsp::notification::Progress, _>({
2173 let this = this.downgrade();
2174 move |params, mut cx| {
2175 if let Some(this) = this.upgrade(&cx) {
2176 this.update(&mut cx, |this, cx| {
2177 this.on_lsp_progress(
2178 params,
2179 server_id,
2180 disk_based_diagnostics_progress_token,
2181 cx,
2182 );
2183 });
2184 }
2185 }
2186 })
2187 .detach();
2188
2189 this.update(&mut cx, |this, cx| {
2190 // If the language server for this key doesn't match the server id, don't store the
2191 // server. Which will cause it to be dropped, killing the process
2192 if this
2193 .language_server_ids
2194 .get(&key)
2195 .map(|id| id != &server_id)
2196 .unwrap_or(false)
2197 {
2198 return None;
2199 }
2200
2201 // Update language_servers collection with Running variant of LanguageServerState
2202 // indicating that the server is up and running and ready
2203 this.language_servers.insert(
2204 server_id,
2205 LanguageServerState::Running {
2206 adapter: adapter.clone(),
2207 server: language_server.clone(),
2208 },
2209 );
2210 this.language_server_statuses.insert(
2211 server_id,
2212 LanguageServerStatus {
2213 name: language_server.name().to_string(),
2214 pending_work: Default::default(),
2215 has_pending_diagnostic_updates: false,
2216 progress_tokens: Default::default(),
2217 },
2218 );
2219 language_server
2220 .notify::<lsp::notification::DidChangeConfiguration>(
2221 lsp::DidChangeConfigurationParams {
2222 settings: this.language_server_settings.lock().clone(),
2223 },
2224 )
2225 .ok();
2226
2227 if let Some(project_id) = this.shared_remote_id() {
2228 this.client
2229 .send(proto::StartLanguageServer {
2230 project_id,
2231 server: Some(proto::LanguageServer {
2232 id: server_id as u64,
2233 name: language_server.name().to_string(),
2234 }),
2235 })
2236 .log_err();
2237 }
2238
2239 // Tell the language server about every open buffer in the worktree that matches the language.
2240 for buffer in this.opened_buffers.values() {
2241 if let Some(buffer_handle) = buffer.upgrade(cx) {
2242 let buffer = buffer_handle.read(cx);
2243 let file = if let Some(file) = File::from_dyn(buffer.file()) {
2244 file
2245 } else {
2246 continue;
2247 };
2248 let language = if let Some(language) = buffer.language() {
2249 language
2250 } else {
2251 continue;
2252 };
2253 if file.worktree.read(cx).id() != key.0
2254 || language.lsp_adapter().map(|a| a.name())
2255 != Some(key.1.clone())
2256 {
2257 continue;
2258 }
2259
2260 let file = file.as_local()?;
2261 let versions = this
2262 .buffer_snapshots
2263 .entry(buffer.remote_id())
2264 .or_insert_with(|| vec![(0, buffer.text_snapshot())]);
2265 let (version, initial_snapshot) = versions.last().unwrap();
2266 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
2267 let language_id =
2268 adapter.id_for_language(language.name().as_ref());
2269 language_server
2270 .notify::<lsp::notification::DidOpenTextDocument>(
2271 lsp::DidOpenTextDocumentParams {
2272 text_document: lsp::TextDocumentItem::new(
2273 uri,
2274 language_id.unwrap_or_default(),
2275 *version,
2276 initial_snapshot.text(),
2277 ),
2278 },
2279 )
2280 .log_err()?;
2281 buffer_handle.update(cx, |buffer, cx| {
2282 buffer.set_completion_triggers(
2283 language_server
2284 .capabilities()
2285 .completion_provider
2286 .as_ref()
2287 .and_then(|provider| {
2288 provider.trigger_characters.clone()
2289 })
2290 .unwrap_or(Vec::new()),
2291 cx,
2292 )
2293 });
2294 }
2295 }
2296
2297 cx.notify();
2298 Some(language_server)
2299 })
2300 })),
2301 );
2302
2303 server_id
2304 });
2305 }
2306
2307 // Returns a list of all of the worktrees which no longer have a language server and the root path
2308 // for the stopped server
2309 fn stop_language_server(
2310 &mut self,
2311 worktree_id: WorktreeId,
2312 adapter_name: LanguageServerName,
2313 cx: &mut ModelContext<Self>,
2314 ) -> Task<(Option<PathBuf>, Vec<WorktreeId>)> {
2315 let key = (worktree_id, adapter_name);
2316 if let Some(server_id) = self.language_server_ids.remove(&key) {
2317 // Remove other entries for this language server as well
2318 let mut orphaned_worktrees = vec![worktree_id];
2319 let other_keys = self.language_server_ids.keys().cloned().collect::<Vec<_>>();
2320 for other_key in other_keys {
2321 if self.language_server_ids.get(&other_key) == Some(&server_id) {
2322 self.language_server_ids.remove(&other_key);
2323 orphaned_worktrees.push(other_key.0);
2324 }
2325 }
2326
2327 self.language_server_statuses.remove(&server_id);
2328 cx.notify();
2329
2330 let server_state = self.language_servers.remove(&server_id);
2331 cx.spawn_weak(|this, mut cx| async move {
2332 let mut root_path = None;
2333
2334 let server = match server_state {
2335 Some(LanguageServerState::Starting(started_language_server)) => {
2336 started_language_server.await
2337 }
2338 Some(LanguageServerState::Running { server, .. }) => Some(server),
2339 None => None,
2340 };
2341
2342 if let Some(server) = server {
2343 root_path = Some(server.root_path().clone());
2344 if let Some(shutdown) = server.shutdown() {
2345 shutdown.await;
2346 }
2347 }
2348
2349 if let Some(this) = this.upgrade(&cx) {
2350 this.update(&mut cx, |this, cx| {
2351 this.language_server_statuses.remove(&server_id);
2352 cx.notify();
2353 });
2354 }
2355
2356 (root_path, orphaned_worktrees)
2357 })
2358 } else {
2359 Task::ready((None, Vec::new()))
2360 }
2361 }
2362
2363 pub fn restart_language_servers_for_buffers(
2364 &mut self,
2365 buffers: impl IntoIterator<Item = ModelHandle<Buffer>>,
2366 cx: &mut ModelContext<Self>,
2367 ) -> Option<()> {
2368 let language_server_lookup_info: HashSet<(WorktreeId, Arc<Path>, PathBuf)> = buffers
2369 .into_iter()
2370 .filter_map(|buffer| {
2371 let file = File::from_dyn(buffer.read(cx).file())?;
2372 let worktree = file.worktree.read(cx).as_local()?;
2373 let worktree_id = worktree.id();
2374 let worktree_abs_path = worktree.abs_path().clone();
2375 let full_path = file.full_path(cx);
2376 Some((worktree_id, worktree_abs_path, full_path))
2377 })
2378 .collect();
2379 for (worktree_id, worktree_abs_path, full_path) in language_server_lookup_info {
2380 let language = self.languages.select_language(&full_path)?;
2381 self.restart_language_server(worktree_id, worktree_abs_path, language, cx);
2382 }
2383
2384 None
2385 }
2386
2387 fn restart_language_server(
2388 &mut self,
2389 worktree_id: WorktreeId,
2390 fallback_path: Arc<Path>,
2391 language: Arc<Language>,
2392 cx: &mut ModelContext<Self>,
2393 ) {
2394 let adapter = if let Some(adapter) = language.lsp_adapter() {
2395 adapter
2396 } else {
2397 return;
2398 };
2399
2400 let server_name = adapter.name();
2401 let stop = self.stop_language_server(worktree_id, server_name.clone(), cx);
2402 cx.spawn_weak(|this, mut cx| async move {
2403 let (original_root_path, orphaned_worktrees) = stop.await;
2404 if let Some(this) = this.upgrade(&cx) {
2405 this.update(&mut cx, |this, cx| {
2406 // Attempt to restart using original server path. Fallback to passed in
2407 // path if we could not retrieve the root path
2408 let root_path = original_root_path
2409 .map(|path_buf| Arc::from(path_buf.as_path()))
2410 .unwrap_or(fallback_path);
2411
2412 this.start_language_server(worktree_id, root_path, language, cx);
2413
2414 // Lookup new server id and set it for each of the orphaned worktrees
2415 if let Some(new_server_id) = this
2416 .language_server_ids
2417 .get(&(worktree_id, server_name.clone()))
2418 .cloned()
2419 {
2420 for orphaned_worktree in orphaned_worktrees {
2421 this.language_server_ids.insert(
2422 (orphaned_worktree, server_name.clone()),
2423 new_server_id.clone(),
2424 );
2425 }
2426 }
2427 });
2428 }
2429 })
2430 .detach();
2431 }
2432
2433 fn on_lsp_diagnostics_published(
2434 &mut self,
2435 server_id: usize,
2436 mut params: lsp::PublishDiagnosticsParams,
2437 adapter: &Arc<dyn LspAdapter>,
2438 cx: &mut ModelContext<Self>,
2439 ) {
2440 adapter.process_diagnostics(&mut params);
2441 self.update_diagnostics(
2442 server_id,
2443 params,
2444 adapter.disk_based_diagnostic_sources(),
2445 cx,
2446 )
2447 .log_err();
2448 }
2449
2450 fn on_lsp_progress(
2451 &mut self,
2452 progress: lsp::ProgressParams,
2453 server_id: usize,
2454 disk_based_diagnostics_progress_token: Option<&str>,
2455 cx: &mut ModelContext<Self>,
2456 ) {
2457 let token = match progress.token {
2458 lsp::NumberOrString::String(token) => token,
2459 lsp::NumberOrString::Number(token) => {
2460 log::info!("skipping numeric progress token {}", token);
2461 return;
2462 }
2463 };
2464 let progress = match progress.value {
2465 lsp::ProgressParamsValue::WorkDone(value) => value,
2466 };
2467 let language_server_status =
2468 if let Some(status) = self.language_server_statuses.get_mut(&server_id) {
2469 status
2470 } else {
2471 return;
2472 };
2473
2474 if !language_server_status.progress_tokens.contains(&token) {
2475 return;
2476 }
2477
2478 match progress {
2479 lsp::WorkDoneProgress::Begin(report) => {
2480 if Some(token.as_str()) == disk_based_diagnostics_progress_token {
2481 language_server_status.has_pending_diagnostic_updates = true;
2482 self.disk_based_diagnostics_started(server_id, cx);
2483 self.broadcast_language_server_update(
2484 server_id,
2485 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(
2486 proto::LspDiskBasedDiagnosticsUpdating {},
2487 ),
2488 );
2489 } else {
2490 self.on_lsp_work_start(
2491 server_id,
2492 token.clone(),
2493 LanguageServerProgress {
2494 message: report.message.clone(),
2495 percentage: report.percentage.map(|p| p as usize),
2496 last_update_at: Instant::now(),
2497 },
2498 cx,
2499 );
2500 self.broadcast_language_server_update(
2501 server_id,
2502 proto::update_language_server::Variant::WorkStart(proto::LspWorkStart {
2503 token,
2504 message: report.message,
2505 percentage: report.percentage.map(|p| p as u32),
2506 }),
2507 );
2508 }
2509 }
2510 lsp::WorkDoneProgress::Report(report) => {
2511 if Some(token.as_str()) != disk_based_diagnostics_progress_token {
2512 self.on_lsp_work_progress(
2513 server_id,
2514 token.clone(),
2515 LanguageServerProgress {
2516 message: report.message.clone(),
2517 percentage: report.percentage.map(|p| p as usize),
2518 last_update_at: Instant::now(),
2519 },
2520 cx,
2521 );
2522 self.broadcast_language_server_update(
2523 server_id,
2524 proto::update_language_server::Variant::WorkProgress(
2525 proto::LspWorkProgress {
2526 token,
2527 message: report.message,
2528 percentage: report.percentage.map(|p| p as u32),
2529 },
2530 ),
2531 );
2532 }
2533 }
2534 lsp::WorkDoneProgress::End(_) => {
2535 language_server_status.progress_tokens.remove(&token);
2536
2537 if Some(token.as_str()) == disk_based_diagnostics_progress_token {
2538 language_server_status.has_pending_diagnostic_updates = false;
2539 self.disk_based_diagnostics_finished(server_id, cx);
2540 self.broadcast_language_server_update(
2541 server_id,
2542 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(
2543 proto::LspDiskBasedDiagnosticsUpdated {},
2544 ),
2545 );
2546 } else {
2547 self.on_lsp_work_end(server_id, token.clone(), cx);
2548 self.broadcast_language_server_update(
2549 server_id,
2550 proto::update_language_server::Variant::WorkEnd(proto::LspWorkEnd {
2551 token,
2552 }),
2553 );
2554 }
2555 }
2556 }
2557 }
2558
2559 fn on_lsp_work_start(
2560 &mut self,
2561 language_server_id: usize,
2562 token: String,
2563 progress: LanguageServerProgress,
2564 cx: &mut ModelContext<Self>,
2565 ) {
2566 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
2567 status.pending_work.insert(token, progress);
2568 cx.notify();
2569 }
2570 }
2571
2572 fn on_lsp_work_progress(
2573 &mut self,
2574 language_server_id: usize,
2575 token: String,
2576 progress: LanguageServerProgress,
2577 cx: &mut ModelContext<Self>,
2578 ) {
2579 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
2580 let entry = status
2581 .pending_work
2582 .entry(token)
2583 .or_insert(LanguageServerProgress {
2584 message: Default::default(),
2585 percentage: Default::default(),
2586 last_update_at: progress.last_update_at,
2587 });
2588 if progress.message.is_some() {
2589 entry.message = progress.message;
2590 }
2591 if progress.percentage.is_some() {
2592 entry.percentage = progress.percentage;
2593 }
2594 entry.last_update_at = progress.last_update_at;
2595 cx.notify();
2596 }
2597 }
2598
2599 fn on_lsp_work_end(
2600 &mut self,
2601 language_server_id: usize,
2602 token: String,
2603 cx: &mut ModelContext<Self>,
2604 ) {
2605 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
2606 status.pending_work.remove(&token);
2607 cx.notify();
2608 }
2609 }
2610
2611 async fn on_lsp_workspace_edit(
2612 this: WeakModelHandle<Self>,
2613 params: lsp::ApplyWorkspaceEditParams,
2614 server_id: usize,
2615 adapter: Arc<dyn LspAdapter>,
2616 language_server: Arc<LanguageServer>,
2617 mut cx: AsyncAppContext,
2618 ) -> Result<lsp::ApplyWorkspaceEditResponse> {
2619 let this = this
2620 .upgrade(&cx)
2621 .ok_or_else(|| anyhow!("project project closed"))?;
2622 let transaction = Self::deserialize_workspace_edit(
2623 this.clone(),
2624 params.edit,
2625 true,
2626 adapter.clone(),
2627 language_server.clone(),
2628 &mut cx,
2629 )
2630 .await
2631 .log_err();
2632 this.update(&mut cx, |this, _| {
2633 if let Some(transaction) = transaction {
2634 this.last_workspace_edits_by_language_server
2635 .insert(server_id, transaction);
2636 }
2637 });
2638 Ok(lsp::ApplyWorkspaceEditResponse {
2639 applied: true,
2640 failed_change: None,
2641 failure_reason: None,
2642 })
2643 }
2644
2645 fn broadcast_language_server_update(
2646 &self,
2647 language_server_id: usize,
2648 event: proto::update_language_server::Variant,
2649 ) {
2650 if let Some(project_id) = self.shared_remote_id() {
2651 self.client
2652 .send(proto::UpdateLanguageServer {
2653 project_id,
2654 language_server_id: language_server_id as u64,
2655 variant: Some(event),
2656 })
2657 .log_err();
2658 }
2659 }
2660
2661 pub fn set_language_server_settings(&mut self, settings: serde_json::Value) {
2662 for server_state in self.language_servers.values() {
2663 if let LanguageServerState::Running { server, .. } = server_state {
2664 server
2665 .notify::<lsp::notification::DidChangeConfiguration>(
2666 lsp::DidChangeConfigurationParams {
2667 settings: settings.clone(),
2668 },
2669 )
2670 .ok();
2671 }
2672 }
2673 *self.language_server_settings.lock() = settings;
2674 }
2675
2676 pub fn language_server_statuses(
2677 &self,
2678 ) -> impl DoubleEndedIterator<Item = &LanguageServerStatus> {
2679 self.language_server_statuses.values()
2680 }
2681
2682 pub fn update_diagnostics(
2683 &mut self,
2684 language_server_id: usize,
2685 params: lsp::PublishDiagnosticsParams,
2686 disk_based_sources: &[&str],
2687 cx: &mut ModelContext<Self>,
2688 ) -> Result<()> {
2689 let abs_path = params
2690 .uri
2691 .to_file_path()
2692 .map_err(|_| anyhow!("URI is not a file"))?;
2693 let mut diagnostics = Vec::default();
2694 let mut primary_diagnostic_group_ids = HashMap::default();
2695 let mut sources_by_group_id = HashMap::default();
2696 let mut supporting_diagnostics = HashMap::default();
2697 for diagnostic in ¶ms.diagnostics {
2698 let source = diagnostic.source.as_ref();
2699 let code = diagnostic.code.as_ref().map(|code| match code {
2700 lsp::NumberOrString::Number(code) => code.to_string(),
2701 lsp::NumberOrString::String(code) => code.clone(),
2702 });
2703 let range = range_from_lsp(diagnostic.range);
2704 let is_supporting = diagnostic
2705 .related_information
2706 .as_ref()
2707 .map_or(false, |infos| {
2708 infos.iter().any(|info| {
2709 primary_diagnostic_group_ids.contains_key(&(
2710 source,
2711 code.clone(),
2712 range_from_lsp(info.location.range),
2713 ))
2714 })
2715 });
2716
2717 let is_unnecessary = diagnostic.tags.as_ref().map_or(false, |tags| {
2718 tags.iter().any(|tag| *tag == DiagnosticTag::UNNECESSARY)
2719 });
2720
2721 if is_supporting {
2722 supporting_diagnostics.insert(
2723 (source, code.clone(), range),
2724 (diagnostic.severity, is_unnecessary),
2725 );
2726 } else {
2727 let group_id = post_inc(&mut self.next_diagnostic_group_id);
2728 let is_disk_based = source.map_or(false, |source| {
2729 disk_based_sources.contains(&source.as_str())
2730 });
2731
2732 sources_by_group_id.insert(group_id, source);
2733 primary_diagnostic_group_ids
2734 .insert((source, code.clone(), range.clone()), group_id);
2735
2736 diagnostics.push(DiagnosticEntry {
2737 range,
2738 diagnostic: Diagnostic {
2739 code: code.clone(),
2740 severity: diagnostic.severity.unwrap_or(DiagnosticSeverity::ERROR),
2741 message: diagnostic.message.clone(),
2742 group_id,
2743 is_primary: true,
2744 is_valid: true,
2745 is_disk_based,
2746 is_unnecessary,
2747 },
2748 });
2749 if let Some(infos) = &diagnostic.related_information {
2750 for info in infos {
2751 if info.location.uri == params.uri && !info.message.is_empty() {
2752 let range = range_from_lsp(info.location.range);
2753 diagnostics.push(DiagnosticEntry {
2754 range,
2755 diagnostic: Diagnostic {
2756 code: code.clone(),
2757 severity: DiagnosticSeverity::INFORMATION,
2758 message: info.message.clone(),
2759 group_id,
2760 is_primary: false,
2761 is_valid: true,
2762 is_disk_based,
2763 is_unnecessary: false,
2764 },
2765 });
2766 }
2767 }
2768 }
2769 }
2770 }
2771
2772 for entry in &mut diagnostics {
2773 let diagnostic = &mut entry.diagnostic;
2774 if !diagnostic.is_primary {
2775 let source = *sources_by_group_id.get(&diagnostic.group_id).unwrap();
2776 if let Some(&(severity, is_unnecessary)) = supporting_diagnostics.get(&(
2777 source,
2778 diagnostic.code.clone(),
2779 entry.range.clone(),
2780 )) {
2781 if let Some(severity) = severity {
2782 diagnostic.severity = severity;
2783 }
2784 diagnostic.is_unnecessary = is_unnecessary;
2785 }
2786 }
2787 }
2788
2789 self.update_diagnostic_entries(
2790 language_server_id,
2791 abs_path,
2792 params.version,
2793 diagnostics,
2794 cx,
2795 )?;
2796 Ok(())
2797 }
2798
2799 pub fn update_diagnostic_entries(
2800 &mut self,
2801 language_server_id: usize,
2802 abs_path: PathBuf,
2803 version: Option<i32>,
2804 diagnostics: Vec<DiagnosticEntry<PointUtf16>>,
2805 cx: &mut ModelContext<Project>,
2806 ) -> Result<(), anyhow::Error> {
2807 let (worktree, relative_path) = self
2808 .find_local_worktree(&abs_path, cx)
2809 .ok_or_else(|| anyhow!("no worktree found for diagnostics"))?;
2810
2811 let project_path = ProjectPath {
2812 worktree_id: worktree.read(cx).id(),
2813 path: relative_path.into(),
2814 };
2815 if let Some(buffer) = self.get_open_buffer(&project_path, cx) {
2816 self.update_buffer_diagnostics(&buffer, diagnostics.clone(), version, cx)?;
2817 }
2818
2819 let updated = worktree.update(cx, |worktree, cx| {
2820 worktree
2821 .as_local_mut()
2822 .ok_or_else(|| anyhow!("not a local worktree"))?
2823 .update_diagnostics(
2824 language_server_id,
2825 project_path.path.clone(),
2826 diagnostics,
2827 cx,
2828 )
2829 })?;
2830 if updated {
2831 cx.emit(Event::DiagnosticsUpdated {
2832 language_server_id,
2833 path: project_path,
2834 });
2835 }
2836 Ok(())
2837 }
2838
2839 fn update_buffer_diagnostics(
2840 &mut self,
2841 buffer: &ModelHandle<Buffer>,
2842 mut diagnostics: Vec<DiagnosticEntry<PointUtf16>>,
2843 version: Option<i32>,
2844 cx: &mut ModelContext<Self>,
2845 ) -> Result<()> {
2846 fn compare_diagnostics(a: &Diagnostic, b: &Diagnostic) -> Ordering {
2847 Ordering::Equal
2848 .then_with(|| b.is_primary.cmp(&a.is_primary))
2849 .then_with(|| a.is_disk_based.cmp(&b.is_disk_based))
2850 .then_with(|| a.severity.cmp(&b.severity))
2851 .then_with(|| a.message.cmp(&b.message))
2852 }
2853
2854 let snapshot = self.buffer_snapshot_for_lsp_version(buffer, version, cx)?;
2855
2856 diagnostics.sort_unstable_by(|a, b| {
2857 Ordering::Equal
2858 .then_with(|| a.range.start.cmp(&b.range.start))
2859 .then_with(|| b.range.end.cmp(&a.range.end))
2860 .then_with(|| compare_diagnostics(&a.diagnostic, &b.diagnostic))
2861 });
2862
2863 let mut sanitized_diagnostics = Vec::new();
2864 let edits_since_save = Patch::new(
2865 snapshot
2866 .edits_since::<PointUtf16>(buffer.read(cx).saved_version())
2867 .collect(),
2868 );
2869 for entry in diagnostics {
2870 let start;
2871 let end;
2872 if entry.diagnostic.is_disk_based {
2873 // Some diagnostics are based on files on disk instead of buffers'
2874 // current contents. Adjust these diagnostics' ranges to reflect
2875 // any unsaved edits.
2876 start = edits_since_save.old_to_new(entry.range.start);
2877 end = edits_since_save.old_to_new(entry.range.end);
2878 } else {
2879 start = entry.range.start;
2880 end = entry.range.end;
2881 }
2882
2883 let mut range = snapshot.clip_point_utf16(start, Bias::Left)
2884 ..snapshot.clip_point_utf16(end, Bias::Right);
2885
2886 // Expand empty ranges by one character
2887 if range.start == range.end {
2888 range.end.column += 1;
2889 range.end = snapshot.clip_point_utf16(range.end, Bias::Right);
2890 if range.start == range.end && range.end.column > 0 {
2891 range.start.column -= 1;
2892 range.start = snapshot.clip_point_utf16(range.start, Bias::Left);
2893 }
2894 }
2895
2896 sanitized_diagnostics.push(DiagnosticEntry {
2897 range,
2898 diagnostic: entry.diagnostic,
2899 });
2900 }
2901 drop(edits_since_save);
2902
2903 let set = DiagnosticSet::new(sanitized_diagnostics, &snapshot);
2904 buffer.update(cx, |buffer, cx| buffer.update_diagnostics(set, cx));
2905 Ok(())
2906 }
2907
2908 pub fn reload_buffers(
2909 &self,
2910 buffers: HashSet<ModelHandle<Buffer>>,
2911 push_to_history: bool,
2912 cx: &mut ModelContext<Self>,
2913 ) -> Task<Result<ProjectTransaction>> {
2914 let mut local_buffers = Vec::new();
2915 let mut remote_buffers = None;
2916 for buffer_handle in buffers {
2917 let buffer = buffer_handle.read(cx);
2918 if buffer.is_dirty() {
2919 if let Some(file) = File::from_dyn(buffer.file()) {
2920 if file.is_local() {
2921 local_buffers.push(buffer_handle);
2922 } else {
2923 remote_buffers.get_or_insert(Vec::new()).push(buffer_handle);
2924 }
2925 }
2926 }
2927 }
2928
2929 let remote_buffers = self.remote_id().zip(remote_buffers);
2930 let client = self.client.clone();
2931
2932 cx.spawn(|this, mut cx| async move {
2933 let mut project_transaction = ProjectTransaction::default();
2934
2935 if let Some((project_id, remote_buffers)) = remote_buffers {
2936 let response = client
2937 .request(proto::ReloadBuffers {
2938 project_id,
2939 buffer_ids: remote_buffers
2940 .iter()
2941 .map(|buffer| buffer.read_with(&cx, |buffer, _| buffer.remote_id()))
2942 .collect(),
2943 })
2944 .await?
2945 .transaction
2946 .ok_or_else(|| anyhow!("missing transaction"))?;
2947 project_transaction = this
2948 .update(&mut cx, |this, cx| {
2949 this.deserialize_project_transaction(response, push_to_history, cx)
2950 })
2951 .await?;
2952 }
2953
2954 for buffer in local_buffers {
2955 let transaction = buffer
2956 .update(&mut cx, |buffer, cx| buffer.reload(cx))
2957 .await?;
2958 buffer.update(&mut cx, |buffer, cx| {
2959 if let Some(transaction) = transaction {
2960 if !push_to_history {
2961 buffer.forget_transaction(transaction.id);
2962 }
2963 project_transaction.0.insert(cx.handle(), transaction);
2964 }
2965 });
2966 }
2967
2968 Ok(project_transaction)
2969 })
2970 }
2971
2972 pub fn format(
2973 &self,
2974 buffers: HashSet<ModelHandle<Buffer>>,
2975 push_to_history: bool,
2976 cx: &mut ModelContext<Project>,
2977 ) -> Task<Result<ProjectTransaction>> {
2978 let mut local_buffers = Vec::new();
2979 let mut remote_buffers = None;
2980 for buffer_handle in buffers {
2981 let buffer = buffer_handle.read(cx);
2982 if let Some(file) = File::from_dyn(buffer.file()) {
2983 if let Some(buffer_abs_path) = file.as_local().map(|f| f.abs_path(cx)) {
2984 if let Some((_, server)) = self.language_server_for_buffer(buffer, cx) {
2985 local_buffers.push((buffer_handle, buffer_abs_path, server.clone()));
2986 }
2987 } else {
2988 remote_buffers.get_or_insert(Vec::new()).push(buffer_handle);
2989 }
2990 } else {
2991 return Task::ready(Ok(Default::default()));
2992 }
2993 }
2994
2995 let remote_buffers = self.remote_id().zip(remote_buffers);
2996 let client = self.client.clone();
2997
2998 cx.spawn(|this, mut cx| async move {
2999 let mut project_transaction = ProjectTransaction::default();
3000
3001 if let Some((project_id, remote_buffers)) = remote_buffers {
3002 let response = client
3003 .request(proto::FormatBuffers {
3004 project_id,
3005 buffer_ids: remote_buffers
3006 .iter()
3007 .map(|buffer| buffer.read_with(&cx, |buffer, _| buffer.remote_id()))
3008 .collect(),
3009 })
3010 .await?
3011 .transaction
3012 .ok_or_else(|| anyhow!("missing transaction"))?;
3013 project_transaction = this
3014 .update(&mut cx, |this, cx| {
3015 this.deserialize_project_transaction(response, push_to_history, cx)
3016 })
3017 .await?;
3018 }
3019
3020 for (buffer, buffer_abs_path, language_server) in local_buffers {
3021 let text_document = lsp::TextDocumentIdentifier::new(
3022 lsp::Url::from_file_path(&buffer_abs_path).unwrap(),
3023 );
3024 let capabilities = &language_server.capabilities();
3025 let tab_size = cx.update(|cx| {
3026 let language_name = buffer.read(cx).language().map(|language| language.name());
3027 cx.global::<Settings>().tab_size(language_name.as_deref())
3028 });
3029 let lsp_edits = if capabilities
3030 .document_formatting_provider
3031 .as_ref()
3032 .map_or(false, |provider| *provider != lsp::OneOf::Left(false))
3033 {
3034 language_server
3035 .request::<lsp::request::Formatting>(lsp::DocumentFormattingParams {
3036 text_document,
3037 options: lsp::FormattingOptions {
3038 tab_size: tab_size.into(),
3039 insert_spaces: true,
3040 insert_final_newline: Some(true),
3041 ..Default::default()
3042 },
3043 work_done_progress_params: Default::default(),
3044 })
3045 .await?
3046 } else if capabilities
3047 .document_range_formatting_provider
3048 .as_ref()
3049 .map_or(false, |provider| *provider != lsp::OneOf::Left(false))
3050 {
3051 let buffer_start = lsp::Position::new(0, 0);
3052 let buffer_end =
3053 buffer.read_with(&cx, |buffer, _| point_to_lsp(buffer.max_point_utf16()));
3054 language_server
3055 .request::<lsp::request::RangeFormatting>(
3056 lsp::DocumentRangeFormattingParams {
3057 text_document,
3058 range: lsp::Range::new(buffer_start, buffer_end),
3059 options: lsp::FormattingOptions {
3060 tab_size: tab_size.into(),
3061 insert_spaces: true,
3062 insert_final_newline: Some(true),
3063 ..Default::default()
3064 },
3065 work_done_progress_params: Default::default(),
3066 },
3067 )
3068 .await?
3069 } else {
3070 continue;
3071 };
3072
3073 if let Some(lsp_edits) = lsp_edits {
3074 let edits = this
3075 .update(&mut cx, |this, cx| {
3076 this.edits_from_lsp(&buffer, lsp_edits, None, cx)
3077 })
3078 .await?;
3079 buffer.update(&mut cx, |buffer, cx| {
3080 buffer.finalize_last_transaction();
3081 buffer.start_transaction();
3082 for (range, text) in edits {
3083 buffer.edit([(range, text)], cx);
3084 }
3085 if buffer.end_transaction(cx).is_some() {
3086 let transaction = buffer.finalize_last_transaction().unwrap().clone();
3087 if !push_to_history {
3088 buffer.forget_transaction(transaction.id);
3089 }
3090 project_transaction.0.insert(cx.handle(), transaction);
3091 }
3092 });
3093 }
3094 }
3095
3096 Ok(project_transaction)
3097 })
3098 }
3099
3100 pub fn definition<T: ToPointUtf16>(
3101 &self,
3102 buffer: &ModelHandle<Buffer>,
3103 position: T,
3104 cx: &mut ModelContext<Self>,
3105 ) -> Task<Result<Vec<LocationLink>>> {
3106 let position = position.to_point_utf16(buffer.read(cx));
3107 self.request_lsp(buffer.clone(), GetDefinition { position }, cx)
3108 }
3109
3110 pub fn references<T: ToPointUtf16>(
3111 &self,
3112 buffer: &ModelHandle<Buffer>,
3113 position: T,
3114 cx: &mut ModelContext<Self>,
3115 ) -> Task<Result<Vec<Location>>> {
3116 let position = position.to_point_utf16(buffer.read(cx));
3117 self.request_lsp(buffer.clone(), GetReferences { position }, cx)
3118 }
3119
3120 pub fn document_highlights<T: ToPointUtf16>(
3121 &self,
3122 buffer: &ModelHandle<Buffer>,
3123 position: T,
3124 cx: &mut ModelContext<Self>,
3125 ) -> Task<Result<Vec<DocumentHighlight>>> {
3126 let position = position.to_point_utf16(buffer.read(cx));
3127
3128 self.request_lsp(buffer.clone(), GetDocumentHighlights { position }, cx)
3129 }
3130
3131 pub fn symbols(&self, query: &str, cx: &mut ModelContext<Self>) -> Task<Result<Vec<Symbol>>> {
3132 if self.is_local() {
3133 let mut requests = Vec::new();
3134 for ((worktree_id, _), server_id) in self.language_server_ids.iter() {
3135 let worktree_id = *worktree_id;
3136 if let Some(worktree) = self
3137 .worktree_for_id(worktree_id, cx)
3138 .and_then(|worktree| worktree.read(cx).as_local())
3139 {
3140 if let Some(LanguageServerState::Running { adapter, server }) =
3141 self.language_servers.get(server_id)
3142 {
3143 let adapter = adapter.clone();
3144 let worktree_abs_path = worktree.abs_path().clone();
3145 requests.push(
3146 server
3147 .request::<lsp::request::WorkspaceSymbol>(
3148 lsp::WorkspaceSymbolParams {
3149 query: query.to_string(),
3150 ..Default::default()
3151 },
3152 )
3153 .log_err()
3154 .map(move |response| {
3155 (
3156 adapter,
3157 worktree_id,
3158 worktree_abs_path,
3159 response.unwrap_or_default(),
3160 )
3161 }),
3162 );
3163 }
3164 }
3165 }
3166
3167 cx.spawn_weak(|this, cx| async move {
3168 let responses = futures::future::join_all(requests).await;
3169 let this = if let Some(this) = this.upgrade(&cx) {
3170 this
3171 } else {
3172 return Ok(Default::default());
3173 };
3174 this.read_with(&cx, |this, cx| {
3175 let mut symbols = Vec::new();
3176 for (adapter, source_worktree_id, worktree_abs_path, response) in responses {
3177 symbols.extend(response.into_iter().flatten().filter_map(|lsp_symbol| {
3178 let abs_path = lsp_symbol.location.uri.to_file_path().ok()?;
3179 let mut worktree_id = source_worktree_id;
3180 let path;
3181 if let Some((worktree, rel_path)) =
3182 this.find_local_worktree(&abs_path, cx)
3183 {
3184 worktree_id = worktree.read(cx).id();
3185 path = rel_path;
3186 } else {
3187 path = relativize_path(&worktree_abs_path, &abs_path);
3188 }
3189
3190 let label = this
3191 .languages
3192 .select_language(&path)
3193 .and_then(|language| {
3194 language.label_for_symbol(&lsp_symbol.name, lsp_symbol.kind)
3195 })
3196 .unwrap_or_else(|| CodeLabel::plain(lsp_symbol.name.clone(), None));
3197 let signature = this.symbol_signature(worktree_id, &path);
3198
3199 Some(Symbol {
3200 source_worktree_id,
3201 worktree_id,
3202 language_server_name: adapter.name(),
3203 name: lsp_symbol.name,
3204 kind: lsp_symbol.kind,
3205 label,
3206 path,
3207 range: range_from_lsp(lsp_symbol.location.range),
3208 signature,
3209 })
3210 }));
3211 }
3212 Ok(symbols)
3213 })
3214 })
3215 } else if let Some(project_id) = self.remote_id() {
3216 let request = self.client.request(proto::GetProjectSymbols {
3217 project_id,
3218 query: query.to_string(),
3219 });
3220 cx.spawn_weak(|this, cx| async move {
3221 let response = request.await?;
3222 let mut symbols = Vec::new();
3223 if let Some(this) = this.upgrade(&cx) {
3224 this.read_with(&cx, |this, _| {
3225 symbols.extend(
3226 response
3227 .symbols
3228 .into_iter()
3229 .filter_map(|symbol| this.deserialize_symbol(symbol).log_err()),
3230 );
3231 })
3232 }
3233 Ok(symbols)
3234 })
3235 } else {
3236 Task::ready(Ok(Default::default()))
3237 }
3238 }
3239
3240 pub fn open_buffer_for_symbol(
3241 &mut self,
3242 symbol: &Symbol,
3243 cx: &mut ModelContext<Self>,
3244 ) -> Task<Result<ModelHandle<Buffer>>> {
3245 if self.is_local() {
3246 let language_server_id = if let Some(id) = self.language_server_ids.get(&(
3247 symbol.source_worktree_id,
3248 symbol.language_server_name.clone(),
3249 )) {
3250 *id
3251 } else {
3252 return Task::ready(Err(anyhow!(
3253 "language server for worktree and language not found"
3254 )));
3255 };
3256
3257 let worktree_abs_path = if let Some(worktree_abs_path) = self
3258 .worktree_for_id(symbol.worktree_id, cx)
3259 .and_then(|worktree| worktree.read(cx).as_local())
3260 .map(|local_worktree| local_worktree.abs_path())
3261 {
3262 worktree_abs_path
3263 } else {
3264 return Task::ready(Err(anyhow!("worktree not found for symbol")));
3265 };
3266 let symbol_abs_path = worktree_abs_path.join(&symbol.path);
3267 let symbol_uri = if let Ok(uri) = lsp::Url::from_file_path(symbol_abs_path) {
3268 uri
3269 } else {
3270 return Task::ready(Err(anyhow!("invalid symbol path")));
3271 };
3272
3273 self.open_local_buffer_via_lsp(
3274 symbol_uri,
3275 language_server_id,
3276 symbol.language_server_name.clone(),
3277 cx,
3278 )
3279 } else if let Some(project_id) = self.remote_id() {
3280 let request = self.client.request(proto::OpenBufferForSymbol {
3281 project_id,
3282 symbol: Some(serialize_symbol(symbol)),
3283 });
3284 cx.spawn(|this, mut cx| async move {
3285 let response = request.await?;
3286 let buffer = response.buffer.ok_or_else(|| anyhow!("invalid buffer"))?;
3287 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
3288 .await
3289 })
3290 } else {
3291 Task::ready(Err(anyhow!("project does not have a remote id")))
3292 }
3293 }
3294
3295 pub fn hover<T: ToPointUtf16>(
3296 &self,
3297 buffer: &ModelHandle<Buffer>,
3298 position: T,
3299 cx: &mut ModelContext<Self>,
3300 ) -> Task<Result<Option<Hover>>> {
3301 let position = position.to_point_utf16(buffer.read(cx));
3302 self.request_lsp(buffer.clone(), GetHover { position }, cx)
3303 }
3304
3305 pub fn completions<T: ToPointUtf16>(
3306 &self,
3307 source_buffer_handle: &ModelHandle<Buffer>,
3308 position: T,
3309 cx: &mut ModelContext<Self>,
3310 ) -> Task<Result<Vec<Completion>>> {
3311 let source_buffer_handle = source_buffer_handle.clone();
3312 let source_buffer = source_buffer_handle.read(cx);
3313 let buffer_id = source_buffer.remote_id();
3314 let language = source_buffer.language().cloned();
3315 let worktree;
3316 let buffer_abs_path;
3317 if let Some(file) = File::from_dyn(source_buffer.file()) {
3318 worktree = file.worktree.clone();
3319 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
3320 } else {
3321 return Task::ready(Ok(Default::default()));
3322 };
3323
3324 let position = position.to_point_utf16(source_buffer);
3325 let anchor = source_buffer.anchor_after(position);
3326
3327 if worktree.read(cx).as_local().is_some() {
3328 let buffer_abs_path = buffer_abs_path.unwrap();
3329 let lang_server =
3330 if let Some((_, server)) = self.language_server_for_buffer(source_buffer, cx) {
3331 server.clone()
3332 } else {
3333 return Task::ready(Ok(Default::default()));
3334 };
3335
3336 cx.spawn(|_, cx| async move {
3337 let completions = lang_server
3338 .request::<lsp::request::Completion>(lsp::CompletionParams {
3339 text_document_position: lsp::TextDocumentPositionParams::new(
3340 lsp::TextDocumentIdentifier::new(
3341 lsp::Url::from_file_path(buffer_abs_path).unwrap(),
3342 ),
3343 point_to_lsp(position),
3344 ),
3345 context: Default::default(),
3346 work_done_progress_params: Default::default(),
3347 partial_result_params: Default::default(),
3348 })
3349 .await
3350 .context("lsp completion request failed")?;
3351
3352 let completions = if let Some(completions) = completions {
3353 match completions {
3354 lsp::CompletionResponse::Array(completions) => completions,
3355 lsp::CompletionResponse::List(list) => list.items,
3356 }
3357 } else {
3358 Default::default()
3359 };
3360
3361 source_buffer_handle.read_with(&cx, |this, _| {
3362 let snapshot = this.snapshot();
3363 let clipped_position = this.clip_point_utf16(position, Bias::Left);
3364 let mut range_for_token = None;
3365 Ok(completions
3366 .into_iter()
3367 .filter_map(|lsp_completion| {
3368 // For now, we can only handle additional edits if they are returned
3369 // when resolving the completion, not if they are present initially.
3370 if lsp_completion
3371 .additional_text_edits
3372 .as_ref()
3373 .map_or(false, |edits| !edits.is_empty())
3374 {
3375 return None;
3376 }
3377
3378 let (old_range, new_text) = match lsp_completion.text_edit.as_ref() {
3379 // If the language server provides a range to overwrite, then
3380 // check that the range is valid.
3381 Some(lsp::CompletionTextEdit::Edit(edit)) => {
3382 let range = range_from_lsp(edit.range);
3383 let start = snapshot.clip_point_utf16(range.start, Bias::Left);
3384 let end = snapshot.clip_point_utf16(range.end, Bias::Left);
3385 if start != range.start || end != range.end {
3386 log::info!("completion out of expected range");
3387 return None;
3388 }
3389 (
3390 snapshot.anchor_before(start)..snapshot.anchor_after(end),
3391 edit.new_text.clone(),
3392 )
3393 }
3394 // If the language server does not provide a range, then infer
3395 // the range based on the syntax tree.
3396 None => {
3397 if position != clipped_position {
3398 log::info!("completion out of expected range");
3399 return None;
3400 }
3401 let Range { start, end } = range_for_token
3402 .get_or_insert_with(|| {
3403 let offset = position.to_offset(&snapshot);
3404 let (range, kind) = snapshot.surrounding_word(offset);
3405 if kind == Some(CharKind::Word) {
3406 range
3407 } else {
3408 offset..offset
3409 }
3410 })
3411 .clone();
3412 let text = lsp_completion
3413 .insert_text
3414 .as_ref()
3415 .unwrap_or(&lsp_completion.label)
3416 .clone();
3417 (
3418 snapshot.anchor_before(start)..snapshot.anchor_after(end),
3419 text.clone(),
3420 )
3421 }
3422 Some(lsp::CompletionTextEdit::InsertAndReplace(_)) => {
3423 log::info!("unsupported insert/replace completion");
3424 return None;
3425 }
3426 };
3427
3428 Some(Completion {
3429 old_range,
3430 new_text,
3431 label: language
3432 .as_ref()
3433 .and_then(|l| l.label_for_completion(&lsp_completion))
3434 .unwrap_or_else(|| {
3435 CodeLabel::plain(
3436 lsp_completion.label.clone(),
3437 lsp_completion.filter_text.as_deref(),
3438 )
3439 }),
3440 lsp_completion,
3441 })
3442 })
3443 .collect())
3444 })
3445 })
3446 } else if let Some(project_id) = self.remote_id() {
3447 let rpc = self.client.clone();
3448 let message = proto::GetCompletions {
3449 project_id,
3450 buffer_id,
3451 position: Some(language::proto::serialize_anchor(&anchor)),
3452 version: serialize_version(&source_buffer.version()),
3453 };
3454 cx.spawn_weak(|_, mut cx| async move {
3455 let response = rpc.request(message).await?;
3456
3457 source_buffer_handle
3458 .update(&mut cx, |buffer, _| {
3459 buffer.wait_for_version(deserialize_version(response.version))
3460 })
3461 .await;
3462
3463 response
3464 .completions
3465 .into_iter()
3466 .map(|completion| {
3467 language::proto::deserialize_completion(completion, language.as_ref())
3468 })
3469 .collect()
3470 })
3471 } else {
3472 Task::ready(Ok(Default::default()))
3473 }
3474 }
3475
3476 pub fn apply_additional_edits_for_completion(
3477 &self,
3478 buffer_handle: ModelHandle<Buffer>,
3479 completion: Completion,
3480 push_to_history: bool,
3481 cx: &mut ModelContext<Self>,
3482 ) -> Task<Result<Option<Transaction>>> {
3483 let buffer = buffer_handle.read(cx);
3484 let buffer_id = buffer.remote_id();
3485
3486 if self.is_local() {
3487 let lang_server = if let Some((_, server)) = self.language_server_for_buffer(buffer, cx)
3488 {
3489 server.clone()
3490 } else {
3491 return Task::ready(Ok(Default::default()));
3492 };
3493
3494 cx.spawn(|this, mut cx| async move {
3495 let resolved_completion = lang_server
3496 .request::<lsp::request::ResolveCompletionItem>(completion.lsp_completion)
3497 .await?;
3498 if let Some(edits) = resolved_completion.additional_text_edits {
3499 let edits = this
3500 .update(&mut cx, |this, cx| {
3501 this.edits_from_lsp(&buffer_handle, edits, None, cx)
3502 })
3503 .await?;
3504 buffer_handle.update(&mut cx, |buffer, cx| {
3505 buffer.finalize_last_transaction();
3506 buffer.start_transaction();
3507 for (range, text) in edits {
3508 buffer.edit([(range, text)], cx);
3509 }
3510 let transaction = if buffer.end_transaction(cx).is_some() {
3511 let transaction = buffer.finalize_last_transaction().unwrap().clone();
3512 if !push_to_history {
3513 buffer.forget_transaction(transaction.id);
3514 }
3515 Some(transaction)
3516 } else {
3517 None
3518 };
3519 Ok(transaction)
3520 })
3521 } else {
3522 Ok(None)
3523 }
3524 })
3525 } else if let Some(project_id) = self.remote_id() {
3526 let client = self.client.clone();
3527 cx.spawn(|_, mut cx| async move {
3528 let response = client
3529 .request(proto::ApplyCompletionAdditionalEdits {
3530 project_id,
3531 buffer_id,
3532 completion: Some(language::proto::serialize_completion(&completion)),
3533 })
3534 .await?;
3535
3536 if let Some(transaction) = response.transaction {
3537 let transaction = language::proto::deserialize_transaction(transaction)?;
3538 buffer_handle
3539 .update(&mut cx, |buffer, _| {
3540 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
3541 })
3542 .await;
3543 if push_to_history {
3544 buffer_handle.update(&mut cx, |buffer, _| {
3545 buffer.push_transaction(transaction.clone(), Instant::now());
3546 });
3547 }
3548 Ok(Some(transaction))
3549 } else {
3550 Ok(None)
3551 }
3552 })
3553 } else {
3554 Task::ready(Err(anyhow!("project does not have a remote id")))
3555 }
3556 }
3557
3558 pub fn code_actions<T: Clone + ToOffset>(
3559 &self,
3560 buffer_handle: &ModelHandle<Buffer>,
3561 range: Range<T>,
3562 cx: &mut ModelContext<Self>,
3563 ) -> Task<Result<Vec<CodeAction>>> {
3564 let buffer_handle = buffer_handle.clone();
3565 let buffer = buffer_handle.read(cx);
3566 let snapshot = buffer.snapshot();
3567 let relevant_diagnostics = snapshot
3568 .diagnostics_in_range::<usize, usize>(range.to_offset(&snapshot), false)
3569 .map(|entry| entry.to_lsp_diagnostic_stub())
3570 .collect();
3571 let buffer_id = buffer.remote_id();
3572 let worktree;
3573 let buffer_abs_path;
3574 if let Some(file) = File::from_dyn(buffer.file()) {
3575 worktree = file.worktree.clone();
3576 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
3577 } else {
3578 return Task::ready(Ok(Default::default()));
3579 };
3580 let range = buffer.anchor_before(range.start)..buffer.anchor_before(range.end);
3581
3582 if worktree.read(cx).as_local().is_some() {
3583 let buffer_abs_path = buffer_abs_path.unwrap();
3584 let lang_server = if let Some((_, server)) = self.language_server_for_buffer(buffer, cx)
3585 {
3586 server.clone()
3587 } else {
3588 return Task::ready(Ok(Default::default()));
3589 };
3590
3591 let lsp_range = range_to_lsp(range.to_point_utf16(buffer));
3592 cx.foreground().spawn(async move {
3593 if !lang_server.capabilities().code_action_provider.is_some() {
3594 return Ok(Default::default());
3595 }
3596
3597 Ok(lang_server
3598 .request::<lsp::request::CodeActionRequest>(lsp::CodeActionParams {
3599 text_document: lsp::TextDocumentIdentifier::new(
3600 lsp::Url::from_file_path(buffer_abs_path).unwrap(),
3601 ),
3602 range: lsp_range,
3603 work_done_progress_params: Default::default(),
3604 partial_result_params: Default::default(),
3605 context: lsp::CodeActionContext {
3606 diagnostics: relevant_diagnostics,
3607 only: Some(vec![
3608 lsp::CodeActionKind::QUICKFIX,
3609 lsp::CodeActionKind::REFACTOR,
3610 lsp::CodeActionKind::REFACTOR_EXTRACT,
3611 lsp::CodeActionKind::SOURCE,
3612 ]),
3613 },
3614 })
3615 .await?
3616 .unwrap_or_default()
3617 .into_iter()
3618 .filter_map(|entry| {
3619 if let lsp::CodeActionOrCommand::CodeAction(lsp_action) = entry {
3620 Some(CodeAction {
3621 range: range.clone(),
3622 lsp_action,
3623 })
3624 } else {
3625 None
3626 }
3627 })
3628 .collect())
3629 })
3630 } else if let Some(project_id) = self.remote_id() {
3631 let rpc = self.client.clone();
3632 let version = buffer.version();
3633 cx.spawn_weak(|_, mut cx| async move {
3634 let response = rpc
3635 .request(proto::GetCodeActions {
3636 project_id,
3637 buffer_id,
3638 start: Some(language::proto::serialize_anchor(&range.start)),
3639 end: Some(language::proto::serialize_anchor(&range.end)),
3640 version: serialize_version(&version),
3641 })
3642 .await?;
3643
3644 buffer_handle
3645 .update(&mut cx, |buffer, _| {
3646 buffer.wait_for_version(deserialize_version(response.version))
3647 })
3648 .await;
3649
3650 response
3651 .actions
3652 .into_iter()
3653 .map(language::proto::deserialize_code_action)
3654 .collect()
3655 })
3656 } else {
3657 Task::ready(Ok(Default::default()))
3658 }
3659 }
3660
3661 pub fn apply_code_action(
3662 &self,
3663 buffer_handle: ModelHandle<Buffer>,
3664 mut action: CodeAction,
3665 push_to_history: bool,
3666 cx: &mut ModelContext<Self>,
3667 ) -> Task<Result<ProjectTransaction>> {
3668 if self.is_local() {
3669 let buffer = buffer_handle.read(cx);
3670 let (lsp_adapter, lang_server) =
3671 if let Some((adapter, server)) = self.language_server_for_buffer(buffer, cx) {
3672 (adapter.clone(), server.clone())
3673 } else {
3674 return Task::ready(Ok(Default::default()));
3675 };
3676 let range = action.range.to_point_utf16(buffer);
3677
3678 cx.spawn(|this, mut cx| async move {
3679 if let Some(lsp_range) = action
3680 .lsp_action
3681 .data
3682 .as_mut()
3683 .and_then(|d| d.get_mut("codeActionParams"))
3684 .and_then(|d| d.get_mut("range"))
3685 {
3686 *lsp_range = serde_json::to_value(&range_to_lsp(range)).unwrap();
3687 action.lsp_action = lang_server
3688 .request::<lsp::request::CodeActionResolveRequest>(action.lsp_action)
3689 .await?;
3690 } else {
3691 let actions = this
3692 .update(&mut cx, |this, cx| {
3693 this.code_actions(&buffer_handle, action.range, cx)
3694 })
3695 .await?;
3696 action.lsp_action = actions
3697 .into_iter()
3698 .find(|a| a.lsp_action.title == action.lsp_action.title)
3699 .ok_or_else(|| anyhow!("code action is outdated"))?
3700 .lsp_action;
3701 }
3702
3703 if let Some(edit) = action.lsp_action.edit {
3704 Self::deserialize_workspace_edit(
3705 this,
3706 edit,
3707 push_to_history,
3708 lsp_adapter.clone(),
3709 lang_server.clone(),
3710 &mut cx,
3711 )
3712 .await
3713 } else if let Some(command) = action.lsp_action.command {
3714 this.update(&mut cx, |this, _| {
3715 this.last_workspace_edits_by_language_server
3716 .remove(&lang_server.server_id());
3717 });
3718 lang_server
3719 .request::<lsp::request::ExecuteCommand>(lsp::ExecuteCommandParams {
3720 command: command.command,
3721 arguments: command.arguments.unwrap_or_default(),
3722 ..Default::default()
3723 })
3724 .await?;
3725 Ok(this.update(&mut cx, |this, _| {
3726 this.last_workspace_edits_by_language_server
3727 .remove(&lang_server.server_id())
3728 .unwrap_or_default()
3729 }))
3730 } else {
3731 Ok(ProjectTransaction::default())
3732 }
3733 })
3734 } else if let Some(project_id) = self.remote_id() {
3735 let client = self.client.clone();
3736 let request = proto::ApplyCodeAction {
3737 project_id,
3738 buffer_id: buffer_handle.read(cx).remote_id(),
3739 action: Some(language::proto::serialize_code_action(&action)),
3740 };
3741 cx.spawn(|this, mut cx| async move {
3742 let response = client
3743 .request(request)
3744 .await?
3745 .transaction
3746 .ok_or_else(|| anyhow!("missing transaction"))?;
3747 this.update(&mut cx, |this, cx| {
3748 this.deserialize_project_transaction(response, push_to_history, cx)
3749 })
3750 .await
3751 })
3752 } else {
3753 Task::ready(Err(anyhow!("project does not have a remote id")))
3754 }
3755 }
3756
3757 async fn deserialize_workspace_edit(
3758 this: ModelHandle<Self>,
3759 edit: lsp::WorkspaceEdit,
3760 push_to_history: bool,
3761 lsp_adapter: Arc<dyn LspAdapter>,
3762 language_server: Arc<LanguageServer>,
3763 cx: &mut AsyncAppContext,
3764 ) -> Result<ProjectTransaction> {
3765 let fs = this.read_with(cx, |this, _| this.fs.clone());
3766 let mut operations = Vec::new();
3767 if let Some(document_changes) = edit.document_changes {
3768 match document_changes {
3769 lsp::DocumentChanges::Edits(edits) => {
3770 operations.extend(edits.into_iter().map(lsp::DocumentChangeOperation::Edit))
3771 }
3772 lsp::DocumentChanges::Operations(ops) => operations = ops,
3773 }
3774 } else if let Some(changes) = edit.changes {
3775 operations.extend(changes.into_iter().map(|(uri, edits)| {
3776 lsp::DocumentChangeOperation::Edit(lsp::TextDocumentEdit {
3777 text_document: lsp::OptionalVersionedTextDocumentIdentifier {
3778 uri,
3779 version: None,
3780 },
3781 edits: edits.into_iter().map(lsp::OneOf::Left).collect(),
3782 })
3783 }));
3784 }
3785
3786 let mut project_transaction = ProjectTransaction::default();
3787 for operation in operations {
3788 match operation {
3789 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Create(op)) => {
3790 let abs_path = op
3791 .uri
3792 .to_file_path()
3793 .map_err(|_| anyhow!("can't convert URI to path"))?;
3794
3795 if let Some(parent_path) = abs_path.parent() {
3796 fs.create_dir(parent_path).await?;
3797 }
3798 if abs_path.ends_with("/") {
3799 fs.create_dir(&abs_path).await?;
3800 } else {
3801 fs.create_file(&abs_path, op.options.map(Into::into).unwrap_or_default())
3802 .await?;
3803 }
3804 }
3805 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Rename(op)) => {
3806 let source_abs_path = op
3807 .old_uri
3808 .to_file_path()
3809 .map_err(|_| anyhow!("can't convert URI to path"))?;
3810 let target_abs_path = op
3811 .new_uri
3812 .to_file_path()
3813 .map_err(|_| anyhow!("can't convert URI to path"))?;
3814 fs.rename(
3815 &source_abs_path,
3816 &target_abs_path,
3817 op.options.map(Into::into).unwrap_or_default(),
3818 )
3819 .await?;
3820 }
3821 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Delete(op)) => {
3822 let abs_path = op
3823 .uri
3824 .to_file_path()
3825 .map_err(|_| anyhow!("can't convert URI to path"))?;
3826 let options = op.options.map(Into::into).unwrap_or_default();
3827 if abs_path.ends_with("/") {
3828 fs.remove_dir(&abs_path, options).await?;
3829 } else {
3830 fs.remove_file(&abs_path, options).await?;
3831 }
3832 }
3833 lsp::DocumentChangeOperation::Edit(op) => {
3834 let buffer_to_edit = this
3835 .update(cx, |this, cx| {
3836 this.open_local_buffer_via_lsp(
3837 op.text_document.uri,
3838 language_server.server_id(),
3839 lsp_adapter.name(),
3840 cx,
3841 )
3842 })
3843 .await?;
3844
3845 let edits = this
3846 .update(cx, |this, cx| {
3847 let edits = op.edits.into_iter().map(|edit| match edit {
3848 lsp::OneOf::Left(edit) => edit,
3849 lsp::OneOf::Right(edit) => edit.text_edit,
3850 });
3851 this.edits_from_lsp(
3852 &buffer_to_edit,
3853 edits,
3854 op.text_document.version,
3855 cx,
3856 )
3857 })
3858 .await?;
3859
3860 let transaction = buffer_to_edit.update(cx, |buffer, cx| {
3861 buffer.finalize_last_transaction();
3862 buffer.start_transaction();
3863 for (range, text) in edits {
3864 buffer.edit([(range, text)], cx);
3865 }
3866 let transaction = if buffer.end_transaction(cx).is_some() {
3867 let transaction = buffer.finalize_last_transaction().unwrap().clone();
3868 if !push_to_history {
3869 buffer.forget_transaction(transaction.id);
3870 }
3871 Some(transaction)
3872 } else {
3873 None
3874 };
3875
3876 transaction
3877 });
3878 if let Some(transaction) = transaction {
3879 project_transaction.0.insert(buffer_to_edit, transaction);
3880 }
3881 }
3882 }
3883 }
3884
3885 Ok(project_transaction)
3886 }
3887
3888 pub fn prepare_rename<T: ToPointUtf16>(
3889 &self,
3890 buffer: ModelHandle<Buffer>,
3891 position: T,
3892 cx: &mut ModelContext<Self>,
3893 ) -> Task<Result<Option<Range<Anchor>>>> {
3894 let position = position.to_point_utf16(buffer.read(cx));
3895 self.request_lsp(buffer, PrepareRename { position }, cx)
3896 }
3897
3898 pub fn perform_rename<T: ToPointUtf16>(
3899 &self,
3900 buffer: ModelHandle<Buffer>,
3901 position: T,
3902 new_name: String,
3903 push_to_history: bool,
3904 cx: &mut ModelContext<Self>,
3905 ) -> Task<Result<ProjectTransaction>> {
3906 let position = position.to_point_utf16(buffer.read(cx));
3907 self.request_lsp(
3908 buffer,
3909 PerformRename {
3910 position,
3911 new_name,
3912 push_to_history,
3913 },
3914 cx,
3915 )
3916 }
3917
3918 pub fn search(
3919 &self,
3920 query: SearchQuery,
3921 cx: &mut ModelContext<Self>,
3922 ) -> Task<Result<HashMap<ModelHandle<Buffer>, Vec<Range<Anchor>>>>> {
3923 if self.is_local() {
3924 let snapshots = self
3925 .visible_worktrees(cx)
3926 .filter_map(|tree| {
3927 let tree = tree.read(cx).as_local()?;
3928 Some(tree.snapshot())
3929 })
3930 .collect::<Vec<_>>();
3931
3932 let background = cx.background().clone();
3933 let path_count: usize = snapshots.iter().map(|s| s.visible_file_count()).sum();
3934 if path_count == 0 {
3935 return Task::ready(Ok(Default::default()));
3936 }
3937 let workers = background.num_cpus().min(path_count);
3938 let (matching_paths_tx, mut matching_paths_rx) = smol::channel::bounded(1024);
3939 cx.background()
3940 .spawn({
3941 let fs = self.fs.clone();
3942 let background = cx.background().clone();
3943 let query = query.clone();
3944 async move {
3945 let fs = &fs;
3946 let query = &query;
3947 let matching_paths_tx = &matching_paths_tx;
3948 let paths_per_worker = (path_count + workers - 1) / workers;
3949 let snapshots = &snapshots;
3950 background
3951 .scoped(|scope| {
3952 for worker_ix in 0..workers {
3953 let worker_start_ix = worker_ix * paths_per_worker;
3954 let worker_end_ix = worker_start_ix + paths_per_worker;
3955 scope.spawn(async move {
3956 let mut snapshot_start_ix = 0;
3957 let mut abs_path = PathBuf::new();
3958 for snapshot in snapshots {
3959 let snapshot_end_ix =
3960 snapshot_start_ix + snapshot.visible_file_count();
3961 if worker_end_ix <= snapshot_start_ix {
3962 break;
3963 } else if worker_start_ix > snapshot_end_ix {
3964 snapshot_start_ix = snapshot_end_ix;
3965 continue;
3966 } else {
3967 let start_in_snapshot = worker_start_ix
3968 .saturating_sub(snapshot_start_ix);
3969 let end_in_snapshot =
3970 cmp::min(worker_end_ix, snapshot_end_ix)
3971 - snapshot_start_ix;
3972
3973 for entry in snapshot
3974 .files(false, start_in_snapshot)
3975 .take(end_in_snapshot - start_in_snapshot)
3976 {
3977 if matching_paths_tx.is_closed() {
3978 break;
3979 }
3980
3981 abs_path.clear();
3982 abs_path.push(&snapshot.abs_path());
3983 abs_path.push(&entry.path);
3984 let matches = if let Some(file) =
3985 fs.open_sync(&abs_path).await.log_err()
3986 {
3987 query.detect(file).unwrap_or(false)
3988 } else {
3989 false
3990 };
3991
3992 if matches {
3993 let project_path =
3994 (snapshot.id(), entry.path.clone());
3995 if matching_paths_tx
3996 .send(project_path)
3997 .await
3998 .is_err()
3999 {
4000 break;
4001 }
4002 }
4003 }
4004
4005 snapshot_start_ix = snapshot_end_ix;
4006 }
4007 }
4008 });
4009 }
4010 })
4011 .await;
4012 }
4013 })
4014 .detach();
4015
4016 let (buffers_tx, buffers_rx) = smol::channel::bounded(1024);
4017 let open_buffers = self
4018 .opened_buffers
4019 .values()
4020 .filter_map(|b| b.upgrade(cx))
4021 .collect::<HashSet<_>>();
4022 cx.spawn(|this, cx| async move {
4023 for buffer in &open_buffers {
4024 let snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
4025 buffers_tx.send((buffer.clone(), snapshot)).await?;
4026 }
4027
4028 let open_buffers = Rc::new(RefCell::new(open_buffers));
4029 while let Some(project_path) = matching_paths_rx.next().await {
4030 if buffers_tx.is_closed() {
4031 break;
4032 }
4033
4034 let this = this.clone();
4035 let open_buffers = open_buffers.clone();
4036 let buffers_tx = buffers_tx.clone();
4037 cx.spawn(|mut cx| async move {
4038 if let Some(buffer) = this
4039 .update(&mut cx, |this, cx| this.open_buffer(project_path, cx))
4040 .await
4041 .log_err()
4042 {
4043 if open_buffers.borrow_mut().insert(buffer.clone()) {
4044 let snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
4045 buffers_tx.send((buffer, snapshot)).await?;
4046 }
4047 }
4048
4049 Ok::<_, anyhow::Error>(())
4050 })
4051 .detach();
4052 }
4053
4054 Ok::<_, anyhow::Error>(())
4055 })
4056 .detach_and_log_err(cx);
4057
4058 let background = cx.background().clone();
4059 cx.background().spawn(async move {
4060 let query = &query;
4061 let mut matched_buffers = Vec::new();
4062 for _ in 0..workers {
4063 matched_buffers.push(HashMap::default());
4064 }
4065 background
4066 .scoped(|scope| {
4067 for worker_matched_buffers in matched_buffers.iter_mut() {
4068 let mut buffers_rx = buffers_rx.clone();
4069 scope.spawn(async move {
4070 while let Some((buffer, snapshot)) = buffers_rx.next().await {
4071 let buffer_matches = query
4072 .search(snapshot.as_rope())
4073 .await
4074 .iter()
4075 .map(|range| {
4076 snapshot.anchor_before(range.start)
4077 ..snapshot.anchor_after(range.end)
4078 })
4079 .collect::<Vec<_>>();
4080 if !buffer_matches.is_empty() {
4081 worker_matched_buffers
4082 .insert(buffer.clone(), buffer_matches);
4083 }
4084 }
4085 });
4086 }
4087 })
4088 .await;
4089 Ok(matched_buffers.into_iter().flatten().collect())
4090 })
4091 } else if let Some(project_id) = self.remote_id() {
4092 let request = self.client.request(query.to_proto(project_id));
4093 cx.spawn(|this, mut cx| async move {
4094 let response = request.await?;
4095 let mut result = HashMap::default();
4096 for location in response.locations {
4097 let buffer = location.buffer.ok_or_else(|| anyhow!("missing buffer"))?;
4098 let target_buffer = this
4099 .update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
4100 .await?;
4101 let start = location
4102 .start
4103 .and_then(deserialize_anchor)
4104 .ok_or_else(|| anyhow!("missing target start"))?;
4105 let end = location
4106 .end
4107 .and_then(deserialize_anchor)
4108 .ok_or_else(|| anyhow!("missing target end"))?;
4109 result
4110 .entry(target_buffer)
4111 .or_insert(Vec::new())
4112 .push(start..end)
4113 }
4114 Ok(result)
4115 })
4116 } else {
4117 Task::ready(Ok(Default::default()))
4118 }
4119 }
4120
4121 fn request_lsp<R: LspCommand>(
4122 &self,
4123 buffer_handle: ModelHandle<Buffer>,
4124 request: R,
4125 cx: &mut ModelContext<Self>,
4126 ) -> Task<Result<R::Response>>
4127 where
4128 <R::LspRequest as lsp::request::Request>::Result: Send,
4129 {
4130 let buffer = buffer_handle.read(cx);
4131 if self.is_local() {
4132 let file = File::from_dyn(buffer.file()).and_then(File::as_local);
4133 if let Some((file, language_server)) = file.zip(
4134 self.language_server_for_buffer(buffer, cx)
4135 .map(|(_, server)| server.clone()),
4136 ) {
4137 let lsp_params = request.to_lsp(&file.abs_path(cx), cx);
4138 return cx.spawn(|this, cx| async move {
4139 if !request.check_capabilities(&language_server.capabilities()) {
4140 return Ok(Default::default());
4141 }
4142
4143 let response = language_server
4144 .request::<R::LspRequest>(lsp_params)
4145 .await
4146 .context("lsp request failed")?;
4147 request
4148 .response_from_lsp(response, this, buffer_handle, cx)
4149 .await
4150 });
4151 }
4152 } else if let Some(project_id) = self.remote_id() {
4153 let rpc = self.client.clone();
4154 let message = request.to_proto(project_id, buffer);
4155 return cx.spawn(|this, cx| async move {
4156 let response = rpc.request(message).await?;
4157 request
4158 .response_from_proto(response, this, buffer_handle, cx)
4159 .await
4160 });
4161 }
4162 Task::ready(Ok(Default::default()))
4163 }
4164
4165 pub fn find_or_create_local_worktree(
4166 &mut self,
4167 abs_path: impl AsRef<Path>,
4168 visible: bool,
4169 cx: &mut ModelContext<Self>,
4170 ) -> Task<Result<(ModelHandle<Worktree>, PathBuf)>> {
4171 let abs_path = abs_path.as_ref();
4172 if let Some((tree, relative_path)) = self.find_local_worktree(abs_path, cx) {
4173 Task::ready(Ok((tree.clone(), relative_path.into())))
4174 } else {
4175 let worktree = self.create_local_worktree(abs_path, visible, cx);
4176 cx.foreground()
4177 .spawn(async move { Ok((worktree.await?, PathBuf::new())) })
4178 }
4179 }
4180
4181 pub fn find_local_worktree(
4182 &self,
4183 abs_path: &Path,
4184 cx: &AppContext,
4185 ) -> Option<(ModelHandle<Worktree>, PathBuf)> {
4186 for tree in &self.worktrees {
4187 if let Some(tree) = tree.upgrade(cx) {
4188 if let Some(relative_path) = tree
4189 .read(cx)
4190 .as_local()
4191 .and_then(|t| abs_path.strip_prefix(t.abs_path()).ok())
4192 {
4193 return Some((tree.clone(), relative_path.into()));
4194 }
4195 }
4196 }
4197 None
4198 }
4199
4200 pub fn is_shared(&self) -> bool {
4201 match &self.client_state {
4202 ProjectClientState::Local { is_shared, .. } => *is_shared,
4203 ProjectClientState::Remote { .. } => false,
4204 }
4205 }
4206
4207 fn create_local_worktree(
4208 &mut self,
4209 abs_path: impl AsRef<Path>,
4210 visible: bool,
4211 cx: &mut ModelContext<Self>,
4212 ) -> Task<Result<ModelHandle<Worktree>>> {
4213 let fs = self.fs.clone();
4214 let client = self.client.clone();
4215 let next_entry_id = self.next_entry_id.clone();
4216 let path: Arc<Path> = abs_path.as_ref().into();
4217 let task = self
4218 .loading_local_worktrees
4219 .entry(path.clone())
4220 .or_insert_with(|| {
4221 cx.spawn(|project, mut cx| {
4222 async move {
4223 let worktree = Worktree::local(
4224 client.clone(),
4225 path.clone(),
4226 visible,
4227 fs,
4228 next_entry_id,
4229 &mut cx,
4230 )
4231 .await;
4232 project.update(&mut cx, |project, _| {
4233 project.loading_local_worktrees.remove(&path);
4234 });
4235 let worktree = worktree?;
4236
4237 let project_id = project.update(&mut cx, |project, cx| {
4238 project.add_worktree(&worktree, cx);
4239 project.shared_remote_id()
4240 });
4241
4242 if let Some(project_id) = project_id {
4243 worktree
4244 .update(&mut cx, |worktree, cx| {
4245 worktree.as_local_mut().unwrap().share(project_id, cx)
4246 })
4247 .await
4248 .log_err();
4249 }
4250
4251 Ok(worktree)
4252 }
4253 .map_err(|err| Arc::new(err))
4254 })
4255 .shared()
4256 })
4257 .clone();
4258 cx.foreground().spawn(async move {
4259 match task.await {
4260 Ok(worktree) => Ok(worktree),
4261 Err(err) => Err(anyhow!("{}", err)),
4262 }
4263 })
4264 }
4265
4266 pub fn remove_worktree(&mut self, id_to_remove: WorktreeId, cx: &mut ModelContext<Self>) {
4267 self.worktrees.retain(|worktree| {
4268 if let Some(worktree) = worktree.upgrade(cx) {
4269 let id = worktree.read(cx).id();
4270 if id == id_to_remove {
4271 cx.emit(Event::WorktreeRemoved(id));
4272 false
4273 } else {
4274 true
4275 }
4276 } else {
4277 false
4278 }
4279 });
4280 self.metadata_changed(true, cx);
4281 cx.notify();
4282 }
4283
4284 fn add_worktree(&mut self, worktree: &ModelHandle<Worktree>, cx: &mut ModelContext<Self>) {
4285 cx.observe(&worktree, |_, _, cx| cx.notify()).detach();
4286 if worktree.read(cx).is_local() {
4287 cx.subscribe(&worktree, |this, worktree, _, cx| {
4288 this.update_local_worktree_buffers(worktree, cx);
4289 })
4290 .detach();
4291 }
4292
4293 let push_strong_handle = {
4294 let worktree = worktree.read(cx);
4295 self.is_shared() || worktree.is_visible() || worktree.is_remote()
4296 };
4297 if push_strong_handle {
4298 self.worktrees
4299 .push(WorktreeHandle::Strong(worktree.clone()));
4300 } else {
4301 self.worktrees
4302 .push(WorktreeHandle::Weak(worktree.downgrade()));
4303 }
4304
4305 self.metadata_changed(true, cx);
4306 cx.observe_release(&worktree, |this, worktree, cx| {
4307 this.remove_worktree(worktree.id(), cx);
4308 cx.notify();
4309 })
4310 .detach();
4311
4312 cx.emit(Event::WorktreeAdded);
4313 cx.notify();
4314 }
4315
4316 fn update_local_worktree_buffers(
4317 &mut self,
4318 worktree_handle: ModelHandle<Worktree>,
4319 cx: &mut ModelContext<Self>,
4320 ) {
4321 let snapshot = worktree_handle.read(cx).snapshot();
4322 let mut buffers_to_delete = Vec::new();
4323 let mut renamed_buffers = Vec::new();
4324 for (buffer_id, buffer) in &self.opened_buffers {
4325 if let Some(buffer) = buffer.upgrade(cx) {
4326 buffer.update(cx, |buffer, cx| {
4327 if let Some(old_file) = File::from_dyn(buffer.file()) {
4328 if old_file.worktree != worktree_handle {
4329 return;
4330 }
4331
4332 let new_file = if let Some(entry) = old_file
4333 .entry_id
4334 .and_then(|entry_id| snapshot.entry_for_id(entry_id))
4335 {
4336 File {
4337 is_local: true,
4338 entry_id: Some(entry.id),
4339 mtime: entry.mtime,
4340 path: entry.path.clone(),
4341 worktree: worktree_handle.clone(),
4342 }
4343 } else if let Some(entry) =
4344 snapshot.entry_for_path(old_file.path().as_ref())
4345 {
4346 File {
4347 is_local: true,
4348 entry_id: Some(entry.id),
4349 mtime: entry.mtime,
4350 path: entry.path.clone(),
4351 worktree: worktree_handle.clone(),
4352 }
4353 } else {
4354 File {
4355 is_local: true,
4356 entry_id: None,
4357 path: old_file.path().clone(),
4358 mtime: old_file.mtime(),
4359 worktree: worktree_handle.clone(),
4360 }
4361 };
4362
4363 let old_path = old_file.abs_path(cx);
4364 if new_file.abs_path(cx) != old_path {
4365 renamed_buffers.push((cx.handle(), old_path));
4366 }
4367
4368 if let Some(project_id) = self.shared_remote_id() {
4369 self.client
4370 .send(proto::UpdateBufferFile {
4371 project_id,
4372 buffer_id: *buffer_id as u64,
4373 file: Some(new_file.to_proto()),
4374 })
4375 .log_err();
4376 }
4377 buffer.file_updated(Arc::new(new_file), cx).detach();
4378 }
4379 });
4380 } else {
4381 buffers_to_delete.push(*buffer_id);
4382 }
4383 }
4384
4385 for buffer_id in buffers_to_delete {
4386 self.opened_buffers.remove(&buffer_id);
4387 }
4388
4389 for (buffer, old_path) in renamed_buffers {
4390 self.unregister_buffer_from_language_server(&buffer, old_path, cx);
4391 self.assign_language_to_buffer(&buffer, cx);
4392 self.register_buffer_with_language_server(&buffer, cx);
4393 }
4394 }
4395
4396 pub fn set_active_path(&mut self, entry: Option<ProjectPath>, cx: &mut ModelContext<Self>) {
4397 let new_active_entry = entry.and_then(|project_path| {
4398 let worktree = self.worktree_for_id(project_path.worktree_id, cx)?;
4399 let entry = worktree.read(cx).entry_for_path(project_path.path)?;
4400 Some(entry.id)
4401 });
4402 if new_active_entry != self.active_entry {
4403 self.active_entry = new_active_entry;
4404 cx.emit(Event::ActiveEntryChanged(new_active_entry));
4405 }
4406 }
4407
4408 pub fn language_servers_running_disk_based_diagnostics<'a>(
4409 &'a self,
4410 ) -> impl 'a + Iterator<Item = usize> {
4411 self.language_server_statuses
4412 .iter()
4413 .filter_map(|(id, status)| {
4414 if status.has_pending_diagnostic_updates {
4415 Some(*id)
4416 } else {
4417 None
4418 }
4419 })
4420 }
4421
4422 pub fn diagnostic_summary(&self, cx: &AppContext) -> DiagnosticSummary {
4423 let mut summary = DiagnosticSummary::default();
4424 for (_, path_summary) in self.diagnostic_summaries(cx) {
4425 summary.error_count += path_summary.error_count;
4426 summary.warning_count += path_summary.warning_count;
4427 }
4428 summary
4429 }
4430
4431 pub fn diagnostic_summaries<'a>(
4432 &'a self,
4433 cx: &'a AppContext,
4434 ) -> impl Iterator<Item = (ProjectPath, DiagnosticSummary)> + 'a {
4435 self.visible_worktrees(cx).flat_map(move |worktree| {
4436 let worktree = worktree.read(cx);
4437 let worktree_id = worktree.id();
4438 worktree
4439 .diagnostic_summaries()
4440 .map(move |(path, summary)| (ProjectPath { worktree_id, path }, summary))
4441 })
4442 }
4443
4444 pub fn disk_based_diagnostics_started(
4445 &mut self,
4446 language_server_id: usize,
4447 cx: &mut ModelContext<Self>,
4448 ) {
4449 cx.emit(Event::DiskBasedDiagnosticsStarted { language_server_id });
4450 }
4451
4452 pub fn disk_based_diagnostics_finished(
4453 &mut self,
4454 language_server_id: usize,
4455 cx: &mut ModelContext<Self>,
4456 ) {
4457 cx.emit(Event::DiskBasedDiagnosticsFinished { language_server_id });
4458 }
4459
4460 pub fn active_entry(&self) -> Option<ProjectEntryId> {
4461 self.active_entry
4462 }
4463
4464 pub fn entry_for_path(&self, path: &ProjectPath, cx: &AppContext) -> Option<ProjectEntryId> {
4465 self.worktree_for_id(path.worktree_id, cx)?
4466 .read(cx)
4467 .entry_for_path(&path.path)
4468 .map(|entry| entry.id)
4469 }
4470
4471 pub fn path_for_entry(&self, entry_id: ProjectEntryId, cx: &AppContext) -> Option<ProjectPath> {
4472 let worktree = self.worktree_for_entry(entry_id, cx)?;
4473 let worktree = worktree.read(cx);
4474 let worktree_id = worktree.id();
4475 let path = worktree.entry_for_id(entry_id)?.path.clone();
4476 Some(ProjectPath { worktree_id, path })
4477 }
4478
4479 // RPC message handlers
4480
4481 async fn handle_request_join_project(
4482 this: ModelHandle<Self>,
4483 message: TypedEnvelope<proto::RequestJoinProject>,
4484 _: Arc<Client>,
4485 mut cx: AsyncAppContext,
4486 ) -> Result<()> {
4487 let user_id = message.payload.requester_id;
4488 if this.read_with(&cx, |project, _| {
4489 project.collaborators.values().any(|c| c.user.id == user_id)
4490 }) {
4491 this.update(&mut cx, |this, cx| {
4492 this.respond_to_join_request(user_id, true, cx)
4493 });
4494 } else {
4495 let user_store = this.read_with(&cx, |this, _| this.user_store.clone());
4496 let user = user_store
4497 .update(&mut cx, |store, cx| store.fetch_user(user_id, cx))
4498 .await?;
4499 this.update(&mut cx, |_, cx| cx.emit(Event::ContactRequestedJoin(user)));
4500 }
4501 Ok(())
4502 }
4503
4504 async fn handle_unregister_project(
4505 this: ModelHandle<Self>,
4506 _: TypedEnvelope<proto::UnregisterProject>,
4507 _: Arc<Client>,
4508 mut cx: AsyncAppContext,
4509 ) -> Result<()> {
4510 this.update(&mut cx, |this, cx| this.removed_from_project(cx));
4511 Ok(())
4512 }
4513
4514 async fn handle_project_unshared(
4515 this: ModelHandle<Self>,
4516 _: TypedEnvelope<proto::ProjectUnshared>,
4517 _: Arc<Client>,
4518 mut cx: AsyncAppContext,
4519 ) -> Result<()> {
4520 this.update(&mut cx, |this, cx| this.unshared(cx));
4521 Ok(())
4522 }
4523
4524 async fn handle_add_collaborator(
4525 this: ModelHandle<Self>,
4526 mut envelope: TypedEnvelope<proto::AddProjectCollaborator>,
4527 _: Arc<Client>,
4528 mut cx: AsyncAppContext,
4529 ) -> Result<()> {
4530 let user_store = this.read_with(&cx, |this, _| this.user_store.clone());
4531 let collaborator = envelope
4532 .payload
4533 .collaborator
4534 .take()
4535 .ok_or_else(|| anyhow!("empty collaborator"))?;
4536
4537 let collaborator = Collaborator::from_proto(collaborator, &user_store, &mut cx).await?;
4538 this.update(&mut cx, |this, cx| {
4539 this.collaborators
4540 .insert(collaborator.peer_id, collaborator);
4541 cx.notify();
4542 });
4543
4544 Ok(())
4545 }
4546
4547 async fn handle_remove_collaborator(
4548 this: ModelHandle<Self>,
4549 envelope: TypedEnvelope<proto::RemoveProjectCollaborator>,
4550 _: Arc<Client>,
4551 mut cx: AsyncAppContext,
4552 ) -> Result<()> {
4553 this.update(&mut cx, |this, cx| {
4554 let peer_id = PeerId(envelope.payload.peer_id);
4555 let replica_id = this
4556 .collaborators
4557 .remove(&peer_id)
4558 .ok_or_else(|| anyhow!("unknown peer {:?}", peer_id))?
4559 .replica_id;
4560 for (_, buffer) in &this.opened_buffers {
4561 if let Some(buffer) = buffer.upgrade(cx) {
4562 buffer.update(cx, |buffer, cx| buffer.remove_peer(replica_id, cx));
4563 }
4564 }
4565
4566 cx.emit(Event::CollaboratorLeft(peer_id));
4567 cx.notify();
4568 Ok(())
4569 })
4570 }
4571
4572 async fn handle_join_project_request_cancelled(
4573 this: ModelHandle<Self>,
4574 envelope: TypedEnvelope<proto::JoinProjectRequestCancelled>,
4575 _: Arc<Client>,
4576 mut cx: AsyncAppContext,
4577 ) -> Result<()> {
4578 let user = this
4579 .update(&mut cx, |this, cx| {
4580 this.user_store.update(cx, |user_store, cx| {
4581 user_store.fetch_user(envelope.payload.requester_id, cx)
4582 })
4583 })
4584 .await?;
4585
4586 this.update(&mut cx, |_, cx| {
4587 cx.emit(Event::ContactCancelledJoinRequest(user));
4588 });
4589
4590 Ok(())
4591 }
4592
4593 async fn handle_update_project(
4594 this: ModelHandle<Self>,
4595 envelope: TypedEnvelope<proto::UpdateProject>,
4596 client: Arc<Client>,
4597 mut cx: AsyncAppContext,
4598 ) -> Result<()> {
4599 this.update(&mut cx, |this, cx| {
4600 let replica_id = this.replica_id();
4601 let remote_id = this.remote_id().ok_or_else(|| anyhow!("invalid project"))?;
4602
4603 let mut old_worktrees_by_id = this
4604 .worktrees
4605 .drain(..)
4606 .filter_map(|worktree| {
4607 let worktree = worktree.upgrade(cx)?;
4608 Some((worktree.read(cx).id(), worktree))
4609 })
4610 .collect::<HashMap<_, _>>();
4611
4612 for worktree in envelope.payload.worktrees {
4613 if let Some(old_worktree) =
4614 old_worktrees_by_id.remove(&WorktreeId::from_proto(worktree.id))
4615 {
4616 this.worktrees.push(WorktreeHandle::Strong(old_worktree));
4617 } else {
4618 let worktree =
4619 Worktree::remote(remote_id, replica_id, worktree, client.clone(), cx);
4620 this.add_worktree(&worktree, cx);
4621 }
4622 }
4623
4624 this.metadata_changed(true, cx);
4625 for (id, _) in old_worktrees_by_id {
4626 cx.emit(Event::WorktreeRemoved(id));
4627 }
4628
4629 Ok(())
4630 })
4631 }
4632
4633 async fn handle_update_worktree(
4634 this: ModelHandle<Self>,
4635 envelope: TypedEnvelope<proto::UpdateWorktree>,
4636 _: Arc<Client>,
4637 mut cx: AsyncAppContext,
4638 ) -> Result<()> {
4639 this.update(&mut cx, |this, cx| {
4640 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
4641 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
4642 worktree.update(cx, |worktree, _| {
4643 let worktree = worktree.as_remote_mut().unwrap();
4644 worktree.update_from_remote(envelope.payload);
4645 });
4646 }
4647 Ok(())
4648 })
4649 }
4650
4651 async fn handle_create_project_entry(
4652 this: ModelHandle<Self>,
4653 envelope: TypedEnvelope<proto::CreateProjectEntry>,
4654 _: Arc<Client>,
4655 mut cx: AsyncAppContext,
4656 ) -> Result<proto::ProjectEntryResponse> {
4657 let worktree = this.update(&mut cx, |this, cx| {
4658 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
4659 this.worktree_for_id(worktree_id, cx)
4660 .ok_or_else(|| anyhow!("worktree not found"))
4661 })?;
4662 let worktree_scan_id = worktree.read_with(&cx, |worktree, _| worktree.scan_id());
4663 let entry = worktree
4664 .update(&mut cx, |worktree, cx| {
4665 let worktree = worktree.as_local_mut().unwrap();
4666 let path = PathBuf::from(OsString::from_vec(envelope.payload.path));
4667 worktree.create_entry(path, envelope.payload.is_directory, cx)
4668 })
4669 .await?;
4670 Ok(proto::ProjectEntryResponse {
4671 entry: Some((&entry).into()),
4672 worktree_scan_id: worktree_scan_id as u64,
4673 })
4674 }
4675
4676 async fn handle_rename_project_entry(
4677 this: ModelHandle<Self>,
4678 envelope: TypedEnvelope<proto::RenameProjectEntry>,
4679 _: Arc<Client>,
4680 mut cx: AsyncAppContext,
4681 ) -> Result<proto::ProjectEntryResponse> {
4682 let entry_id = ProjectEntryId::from_proto(envelope.payload.entry_id);
4683 let worktree = this.read_with(&cx, |this, cx| {
4684 this.worktree_for_entry(entry_id, cx)
4685 .ok_or_else(|| anyhow!("worktree not found"))
4686 })?;
4687 let worktree_scan_id = worktree.read_with(&cx, |worktree, _| worktree.scan_id());
4688 let entry = worktree
4689 .update(&mut cx, |worktree, cx| {
4690 let new_path = PathBuf::from(OsString::from_vec(envelope.payload.new_path));
4691 worktree
4692 .as_local_mut()
4693 .unwrap()
4694 .rename_entry(entry_id, new_path, cx)
4695 .ok_or_else(|| anyhow!("invalid entry"))
4696 })?
4697 .await?;
4698 Ok(proto::ProjectEntryResponse {
4699 entry: Some((&entry).into()),
4700 worktree_scan_id: worktree_scan_id as u64,
4701 })
4702 }
4703
4704 async fn handle_copy_project_entry(
4705 this: ModelHandle<Self>,
4706 envelope: TypedEnvelope<proto::CopyProjectEntry>,
4707 _: Arc<Client>,
4708 mut cx: AsyncAppContext,
4709 ) -> Result<proto::ProjectEntryResponse> {
4710 let entry_id = ProjectEntryId::from_proto(envelope.payload.entry_id);
4711 let worktree = this.read_with(&cx, |this, cx| {
4712 this.worktree_for_entry(entry_id, cx)
4713 .ok_or_else(|| anyhow!("worktree not found"))
4714 })?;
4715 let worktree_scan_id = worktree.read_with(&cx, |worktree, _| worktree.scan_id());
4716 let entry = worktree
4717 .update(&mut cx, |worktree, cx| {
4718 let new_path = PathBuf::from(OsString::from_vec(envelope.payload.new_path));
4719 worktree
4720 .as_local_mut()
4721 .unwrap()
4722 .copy_entry(entry_id, new_path, cx)
4723 .ok_or_else(|| anyhow!("invalid entry"))
4724 })?
4725 .await?;
4726 Ok(proto::ProjectEntryResponse {
4727 entry: Some((&entry).into()),
4728 worktree_scan_id: worktree_scan_id as u64,
4729 })
4730 }
4731
4732 async fn handle_delete_project_entry(
4733 this: ModelHandle<Self>,
4734 envelope: TypedEnvelope<proto::DeleteProjectEntry>,
4735 _: Arc<Client>,
4736 mut cx: AsyncAppContext,
4737 ) -> Result<proto::ProjectEntryResponse> {
4738 let entry_id = ProjectEntryId::from_proto(envelope.payload.entry_id);
4739 let worktree = this.read_with(&cx, |this, cx| {
4740 this.worktree_for_entry(entry_id, cx)
4741 .ok_or_else(|| anyhow!("worktree not found"))
4742 })?;
4743 let worktree_scan_id = worktree.read_with(&cx, |worktree, _| worktree.scan_id());
4744 worktree
4745 .update(&mut cx, |worktree, cx| {
4746 worktree
4747 .as_local_mut()
4748 .unwrap()
4749 .delete_entry(entry_id, cx)
4750 .ok_or_else(|| anyhow!("invalid entry"))
4751 })?
4752 .await?;
4753 Ok(proto::ProjectEntryResponse {
4754 entry: None,
4755 worktree_scan_id: worktree_scan_id as u64,
4756 })
4757 }
4758
4759 async fn handle_update_diagnostic_summary(
4760 this: ModelHandle<Self>,
4761 envelope: TypedEnvelope<proto::UpdateDiagnosticSummary>,
4762 _: Arc<Client>,
4763 mut cx: AsyncAppContext,
4764 ) -> Result<()> {
4765 this.update(&mut cx, |this, cx| {
4766 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
4767 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
4768 if let Some(summary) = envelope.payload.summary {
4769 let project_path = ProjectPath {
4770 worktree_id,
4771 path: Path::new(&summary.path).into(),
4772 };
4773 worktree.update(cx, |worktree, _| {
4774 worktree
4775 .as_remote_mut()
4776 .unwrap()
4777 .update_diagnostic_summary(project_path.path.clone(), &summary);
4778 });
4779 cx.emit(Event::DiagnosticsUpdated {
4780 language_server_id: summary.language_server_id as usize,
4781 path: project_path,
4782 });
4783 }
4784 }
4785 Ok(())
4786 })
4787 }
4788
4789 async fn handle_start_language_server(
4790 this: ModelHandle<Self>,
4791 envelope: TypedEnvelope<proto::StartLanguageServer>,
4792 _: Arc<Client>,
4793 mut cx: AsyncAppContext,
4794 ) -> Result<()> {
4795 let server = envelope
4796 .payload
4797 .server
4798 .ok_or_else(|| anyhow!("invalid server"))?;
4799 this.update(&mut cx, |this, cx| {
4800 this.language_server_statuses.insert(
4801 server.id as usize,
4802 LanguageServerStatus {
4803 name: server.name,
4804 pending_work: Default::default(),
4805 has_pending_diagnostic_updates: false,
4806 progress_tokens: Default::default(),
4807 },
4808 );
4809 cx.notify();
4810 });
4811 Ok(())
4812 }
4813
4814 async fn handle_update_language_server(
4815 this: ModelHandle<Self>,
4816 envelope: TypedEnvelope<proto::UpdateLanguageServer>,
4817 _: Arc<Client>,
4818 mut cx: AsyncAppContext,
4819 ) -> Result<()> {
4820 let language_server_id = envelope.payload.language_server_id as usize;
4821 match envelope
4822 .payload
4823 .variant
4824 .ok_or_else(|| anyhow!("invalid variant"))?
4825 {
4826 proto::update_language_server::Variant::WorkStart(payload) => {
4827 this.update(&mut cx, |this, cx| {
4828 this.on_lsp_work_start(
4829 language_server_id,
4830 payload.token,
4831 LanguageServerProgress {
4832 message: payload.message,
4833 percentage: payload.percentage.map(|p| p as usize),
4834 last_update_at: Instant::now(),
4835 },
4836 cx,
4837 );
4838 })
4839 }
4840 proto::update_language_server::Variant::WorkProgress(payload) => {
4841 this.update(&mut cx, |this, cx| {
4842 this.on_lsp_work_progress(
4843 language_server_id,
4844 payload.token,
4845 LanguageServerProgress {
4846 message: payload.message,
4847 percentage: payload.percentage.map(|p| p as usize),
4848 last_update_at: Instant::now(),
4849 },
4850 cx,
4851 );
4852 })
4853 }
4854 proto::update_language_server::Variant::WorkEnd(payload) => {
4855 this.update(&mut cx, |this, cx| {
4856 this.on_lsp_work_end(language_server_id, payload.token, cx);
4857 })
4858 }
4859 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(_) => {
4860 this.update(&mut cx, |this, cx| {
4861 this.disk_based_diagnostics_started(language_server_id, cx);
4862 })
4863 }
4864 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(_) => {
4865 this.update(&mut cx, |this, cx| {
4866 this.disk_based_diagnostics_finished(language_server_id, cx)
4867 });
4868 }
4869 }
4870
4871 Ok(())
4872 }
4873
4874 async fn handle_update_buffer(
4875 this: ModelHandle<Self>,
4876 envelope: TypedEnvelope<proto::UpdateBuffer>,
4877 _: Arc<Client>,
4878 mut cx: AsyncAppContext,
4879 ) -> Result<()> {
4880 this.update(&mut cx, |this, cx| {
4881 let payload = envelope.payload.clone();
4882 let buffer_id = payload.buffer_id;
4883 let ops = payload
4884 .operations
4885 .into_iter()
4886 .map(|op| language::proto::deserialize_operation(op))
4887 .collect::<Result<Vec<_>, _>>()?;
4888 let is_remote = this.is_remote();
4889 match this.opened_buffers.entry(buffer_id) {
4890 hash_map::Entry::Occupied(mut e) => match e.get_mut() {
4891 OpenBuffer::Strong(buffer) => {
4892 buffer.update(cx, |buffer, cx| buffer.apply_ops(ops, cx))?;
4893 }
4894 OpenBuffer::Loading(operations) => operations.extend_from_slice(&ops),
4895 OpenBuffer::Weak(_) => {}
4896 },
4897 hash_map::Entry::Vacant(e) => {
4898 assert!(
4899 is_remote,
4900 "received buffer update from {:?}",
4901 envelope.original_sender_id
4902 );
4903 e.insert(OpenBuffer::Loading(ops));
4904 }
4905 }
4906 Ok(())
4907 })
4908 }
4909
4910 async fn handle_update_buffer_file(
4911 this: ModelHandle<Self>,
4912 envelope: TypedEnvelope<proto::UpdateBufferFile>,
4913 _: Arc<Client>,
4914 mut cx: AsyncAppContext,
4915 ) -> Result<()> {
4916 this.update(&mut cx, |this, cx| {
4917 let payload = envelope.payload.clone();
4918 let buffer_id = payload.buffer_id;
4919 let file = payload.file.ok_or_else(|| anyhow!("invalid file"))?;
4920 let worktree = this
4921 .worktree_for_id(WorktreeId::from_proto(file.worktree_id), cx)
4922 .ok_or_else(|| anyhow!("no such worktree"))?;
4923 let file = File::from_proto(file, worktree.clone(), cx)?;
4924 let buffer = this
4925 .opened_buffers
4926 .get_mut(&buffer_id)
4927 .and_then(|b| b.upgrade(cx))
4928 .ok_or_else(|| anyhow!("no such buffer"))?;
4929 buffer.update(cx, |buffer, cx| {
4930 buffer.file_updated(Arc::new(file), cx).detach();
4931 });
4932 Ok(())
4933 })
4934 }
4935
4936 async fn handle_save_buffer(
4937 this: ModelHandle<Self>,
4938 envelope: TypedEnvelope<proto::SaveBuffer>,
4939 _: Arc<Client>,
4940 mut cx: AsyncAppContext,
4941 ) -> Result<proto::BufferSaved> {
4942 let buffer_id = envelope.payload.buffer_id;
4943 let requested_version = deserialize_version(envelope.payload.version);
4944
4945 let (project_id, buffer) = this.update(&mut cx, |this, cx| {
4946 let project_id = this.remote_id().ok_or_else(|| anyhow!("not connected"))?;
4947 let buffer = this
4948 .opened_buffers
4949 .get(&buffer_id)
4950 .and_then(|buffer| buffer.upgrade(cx))
4951 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?;
4952 Ok::<_, anyhow::Error>((project_id, buffer))
4953 })?;
4954 buffer
4955 .update(&mut cx, |buffer, _| {
4956 buffer.wait_for_version(requested_version)
4957 })
4958 .await;
4959
4960 let (saved_version, fingerprint, mtime) =
4961 buffer.update(&mut cx, |buffer, cx| buffer.save(cx)).await?;
4962 Ok(proto::BufferSaved {
4963 project_id,
4964 buffer_id,
4965 version: serialize_version(&saved_version),
4966 mtime: Some(mtime.into()),
4967 fingerprint,
4968 })
4969 }
4970
4971 async fn handle_reload_buffers(
4972 this: ModelHandle<Self>,
4973 envelope: TypedEnvelope<proto::ReloadBuffers>,
4974 _: Arc<Client>,
4975 mut cx: AsyncAppContext,
4976 ) -> Result<proto::ReloadBuffersResponse> {
4977 let sender_id = envelope.original_sender_id()?;
4978 let reload = this.update(&mut cx, |this, cx| {
4979 let mut buffers = HashSet::default();
4980 for buffer_id in &envelope.payload.buffer_ids {
4981 buffers.insert(
4982 this.opened_buffers
4983 .get(buffer_id)
4984 .and_then(|buffer| buffer.upgrade(cx))
4985 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?,
4986 );
4987 }
4988 Ok::<_, anyhow::Error>(this.reload_buffers(buffers, false, cx))
4989 })?;
4990
4991 let project_transaction = reload.await?;
4992 let project_transaction = this.update(&mut cx, |this, cx| {
4993 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
4994 });
4995 Ok(proto::ReloadBuffersResponse {
4996 transaction: Some(project_transaction),
4997 })
4998 }
4999
5000 async fn handle_format_buffers(
5001 this: ModelHandle<Self>,
5002 envelope: TypedEnvelope<proto::FormatBuffers>,
5003 _: Arc<Client>,
5004 mut cx: AsyncAppContext,
5005 ) -> Result<proto::FormatBuffersResponse> {
5006 let sender_id = envelope.original_sender_id()?;
5007 let format = this.update(&mut cx, |this, cx| {
5008 let mut buffers = HashSet::default();
5009 for buffer_id in &envelope.payload.buffer_ids {
5010 buffers.insert(
5011 this.opened_buffers
5012 .get(buffer_id)
5013 .and_then(|buffer| buffer.upgrade(cx))
5014 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?,
5015 );
5016 }
5017 Ok::<_, anyhow::Error>(this.format(buffers, false, cx))
5018 })?;
5019
5020 let project_transaction = format.await?;
5021 let project_transaction = this.update(&mut cx, |this, cx| {
5022 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
5023 });
5024 Ok(proto::FormatBuffersResponse {
5025 transaction: Some(project_transaction),
5026 })
5027 }
5028
5029 async fn handle_get_completions(
5030 this: ModelHandle<Self>,
5031 envelope: TypedEnvelope<proto::GetCompletions>,
5032 _: Arc<Client>,
5033 mut cx: AsyncAppContext,
5034 ) -> Result<proto::GetCompletionsResponse> {
5035 let position = envelope
5036 .payload
5037 .position
5038 .and_then(language::proto::deserialize_anchor)
5039 .ok_or_else(|| anyhow!("invalid position"))?;
5040 let version = deserialize_version(envelope.payload.version);
5041 let buffer = this.read_with(&cx, |this, cx| {
5042 this.opened_buffers
5043 .get(&envelope.payload.buffer_id)
5044 .and_then(|buffer| buffer.upgrade(cx))
5045 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
5046 })?;
5047 buffer
5048 .update(&mut cx, |buffer, _| buffer.wait_for_version(version))
5049 .await;
5050 let version = buffer.read_with(&cx, |buffer, _| buffer.version());
5051 let completions = this
5052 .update(&mut cx, |this, cx| this.completions(&buffer, position, cx))
5053 .await?;
5054
5055 Ok(proto::GetCompletionsResponse {
5056 completions: completions
5057 .iter()
5058 .map(language::proto::serialize_completion)
5059 .collect(),
5060 version: serialize_version(&version),
5061 })
5062 }
5063
5064 async fn handle_apply_additional_edits_for_completion(
5065 this: ModelHandle<Self>,
5066 envelope: TypedEnvelope<proto::ApplyCompletionAdditionalEdits>,
5067 _: Arc<Client>,
5068 mut cx: AsyncAppContext,
5069 ) -> Result<proto::ApplyCompletionAdditionalEditsResponse> {
5070 let apply_additional_edits = this.update(&mut cx, |this, cx| {
5071 let buffer = this
5072 .opened_buffers
5073 .get(&envelope.payload.buffer_id)
5074 .and_then(|buffer| buffer.upgrade(cx))
5075 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
5076 let language = buffer.read(cx).language();
5077 let completion = language::proto::deserialize_completion(
5078 envelope
5079 .payload
5080 .completion
5081 .ok_or_else(|| anyhow!("invalid completion"))?,
5082 language,
5083 )?;
5084 Ok::<_, anyhow::Error>(
5085 this.apply_additional_edits_for_completion(buffer, completion, false, cx),
5086 )
5087 })?;
5088
5089 Ok(proto::ApplyCompletionAdditionalEditsResponse {
5090 transaction: apply_additional_edits
5091 .await?
5092 .as_ref()
5093 .map(language::proto::serialize_transaction),
5094 })
5095 }
5096
5097 async fn handle_get_code_actions(
5098 this: ModelHandle<Self>,
5099 envelope: TypedEnvelope<proto::GetCodeActions>,
5100 _: Arc<Client>,
5101 mut cx: AsyncAppContext,
5102 ) -> Result<proto::GetCodeActionsResponse> {
5103 let start = envelope
5104 .payload
5105 .start
5106 .and_then(language::proto::deserialize_anchor)
5107 .ok_or_else(|| anyhow!("invalid start"))?;
5108 let end = envelope
5109 .payload
5110 .end
5111 .and_then(language::proto::deserialize_anchor)
5112 .ok_or_else(|| anyhow!("invalid end"))?;
5113 let buffer = this.update(&mut cx, |this, cx| {
5114 this.opened_buffers
5115 .get(&envelope.payload.buffer_id)
5116 .and_then(|buffer| buffer.upgrade(cx))
5117 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
5118 })?;
5119 buffer
5120 .update(&mut cx, |buffer, _| {
5121 buffer.wait_for_version(deserialize_version(envelope.payload.version))
5122 })
5123 .await;
5124
5125 let version = buffer.read_with(&cx, |buffer, _| buffer.version());
5126 let code_actions = this.update(&mut cx, |this, cx| {
5127 Ok::<_, anyhow::Error>(this.code_actions(&buffer, start..end, cx))
5128 })?;
5129
5130 Ok(proto::GetCodeActionsResponse {
5131 actions: code_actions
5132 .await?
5133 .iter()
5134 .map(language::proto::serialize_code_action)
5135 .collect(),
5136 version: serialize_version(&version),
5137 })
5138 }
5139
5140 async fn handle_apply_code_action(
5141 this: ModelHandle<Self>,
5142 envelope: TypedEnvelope<proto::ApplyCodeAction>,
5143 _: Arc<Client>,
5144 mut cx: AsyncAppContext,
5145 ) -> Result<proto::ApplyCodeActionResponse> {
5146 let sender_id = envelope.original_sender_id()?;
5147 let action = language::proto::deserialize_code_action(
5148 envelope
5149 .payload
5150 .action
5151 .ok_or_else(|| anyhow!("invalid action"))?,
5152 )?;
5153 let apply_code_action = this.update(&mut cx, |this, cx| {
5154 let buffer = this
5155 .opened_buffers
5156 .get(&envelope.payload.buffer_id)
5157 .and_then(|buffer| buffer.upgrade(cx))
5158 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
5159 Ok::<_, anyhow::Error>(this.apply_code_action(buffer, action, false, cx))
5160 })?;
5161
5162 let project_transaction = apply_code_action.await?;
5163 let project_transaction = this.update(&mut cx, |this, cx| {
5164 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
5165 });
5166 Ok(proto::ApplyCodeActionResponse {
5167 transaction: Some(project_transaction),
5168 })
5169 }
5170
5171 async fn handle_lsp_command<T: LspCommand>(
5172 this: ModelHandle<Self>,
5173 envelope: TypedEnvelope<T::ProtoRequest>,
5174 _: Arc<Client>,
5175 mut cx: AsyncAppContext,
5176 ) -> Result<<T::ProtoRequest as proto::RequestMessage>::Response>
5177 where
5178 <T::LspRequest as lsp::request::Request>::Result: Send,
5179 {
5180 let sender_id = envelope.original_sender_id()?;
5181 let buffer_id = T::buffer_id_from_proto(&envelope.payload);
5182 let buffer_handle = this.read_with(&cx, |this, _| {
5183 this.opened_buffers
5184 .get(&buffer_id)
5185 .and_then(|buffer| buffer.upgrade(&cx))
5186 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))
5187 })?;
5188 let request = T::from_proto(
5189 envelope.payload,
5190 this.clone(),
5191 buffer_handle.clone(),
5192 cx.clone(),
5193 )
5194 .await?;
5195 let buffer_version = buffer_handle.read_with(&cx, |buffer, _| buffer.version());
5196 let response = this
5197 .update(&mut cx, |this, cx| {
5198 this.request_lsp(buffer_handle, request, cx)
5199 })
5200 .await?;
5201 this.update(&mut cx, |this, cx| {
5202 Ok(T::response_to_proto(
5203 response,
5204 this,
5205 sender_id,
5206 &buffer_version,
5207 cx,
5208 ))
5209 })
5210 }
5211
5212 async fn handle_get_project_symbols(
5213 this: ModelHandle<Self>,
5214 envelope: TypedEnvelope<proto::GetProjectSymbols>,
5215 _: Arc<Client>,
5216 mut cx: AsyncAppContext,
5217 ) -> Result<proto::GetProjectSymbolsResponse> {
5218 let symbols = this
5219 .update(&mut cx, |this, cx| {
5220 this.symbols(&envelope.payload.query, cx)
5221 })
5222 .await?;
5223
5224 Ok(proto::GetProjectSymbolsResponse {
5225 symbols: symbols.iter().map(serialize_symbol).collect(),
5226 })
5227 }
5228
5229 async fn handle_search_project(
5230 this: ModelHandle<Self>,
5231 envelope: TypedEnvelope<proto::SearchProject>,
5232 _: Arc<Client>,
5233 mut cx: AsyncAppContext,
5234 ) -> Result<proto::SearchProjectResponse> {
5235 let peer_id = envelope.original_sender_id()?;
5236 let query = SearchQuery::from_proto(envelope.payload)?;
5237 let result = this
5238 .update(&mut cx, |this, cx| this.search(query, cx))
5239 .await?;
5240
5241 this.update(&mut cx, |this, cx| {
5242 let mut locations = Vec::new();
5243 for (buffer, ranges) in result {
5244 for range in ranges {
5245 let start = serialize_anchor(&range.start);
5246 let end = serialize_anchor(&range.end);
5247 let buffer = this.serialize_buffer_for_peer(&buffer, peer_id, cx);
5248 locations.push(proto::Location {
5249 buffer: Some(buffer),
5250 start: Some(start),
5251 end: Some(end),
5252 });
5253 }
5254 }
5255 Ok(proto::SearchProjectResponse { locations })
5256 })
5257 }
5258
5259 async fn handle_open_buffer_for_symbol(
5260 this: ModelHandle<Self>,
5261 envelope: TypedEnvelope<proto::OpenBufferForSymbol>,
5262 _: Arc<Client>,
5263 mut cx: AsyncAppContext,
5264 ) -> Result<proto::OpenBufferForSymbolResponse> {
5265 let peer_id = envelope.original_sender_id()?;
5266 let symbol = envelope
5267 .payload
5268 .symbol
5269 .ok_or_else(|| anyhow!("invalid symbol"))?;
5270 let symbol = this.read_with(&cx, |this, _| {
5271 let symbol = this.deserialize_symbol(symbol)?;
5272 let signature = this.symbol_signature(symbol.worktree_id, &symbol.path);
5273 if signature == symbol.signature {
5274 Ok(symbol)
5275 } else {
5276 Err(anyhow!("invalid symbol signature"))
5277 }
5278 })?;
5279 let buffer = this
5280 .update(&mut cx, |this, cx| this.open_buffer_for_symbol(&symbol, cx))
5281 .await?;
5282
5283 Ok(proto::OpenBufferForSymbolResponse {
5284 buffer: Some(this.update(&mut cx, |this, cx| {
5285 this.serialize_buffer_for_peer(&buffer, peer_id, cx)
5286 })),
5287 })
5288 }
5289
5290 fn symbol_signature(&self, worktree_id: WorktreeId, path: &Path) -> [u8; 32] {
5291 let mut hasher = Sha256::new();
5292 hasher.update(worktree_id.to_proto().to_be_bytes());
5293 hasher.update(path.to_string_lossy().as_bytes());
5294 hasher.update(self.nonce.to_be_bytes());
5295 hasher.finalize().as_slice().try_into().unwrap()
5296 }
5297
5298 async fn handle_open_buffer_by_id(
5299 this: ModelHandle<Self>,
5300 envelope: TypedEnvelope<proto::OpenBufferById>,
5301 _: Arc<Client>,
5302 mut cx: AsyncAppContext,
5303 ) -> Result<proto::OpenBufferResponse> {
5304 let peer_id = envelope.original_sender_id()?;
5305 let buffer = this
5306 .update(&mut cx, |this, cx| {
5307 this.open_buffer_by_id(envelope.payload.id, cx)
5308 })
5309 .await?;
5310 this.update(&mut cx, |this, cx| {
5311 Ok(proto::OpenBufferResponse {
5312 buffer: Some(this.serialize_buffer_for_peer(&buffer, peer_id, cx)),
5313 })
5314 })
5315 }
5316
5317 async fn handle_open_buffer_by_path(
5318 this: ModelHandle<Self>,
5319 envelope: TypedEnvelope<proto::OpenBufferByPath>,
5320 _: Arc<Client>,
5321 mut cx: AsyncAppContext,
5322 ) -> Result<proto::OpenBufferResponse> {
5323 let peer_id = envelope.original_sender_id()?;
5324 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
5325 let open_buffer = this.update(&mut cx, |this, cx| {
5326 this.open_buffer(
5327 ProjectPath {
5328 worktree_id,
5329 path: PathBuf::from(envelope.payload.path).into(),
5330 },
5331 cx,
5332 )
5333 });
5334
5335 let buffer = open_buffer.await?;
5336 this.update(&mut cx, |this, cx| {
5337 Ok(proto::OpenBufferResponse {
5338 buffer: Some(this.serialize_buffer_for_peer(&buffer, peer_id, cx)),
5339 })
5340 })
5341 }
5342
5343 fn serialize_project_transaction_for_peer(
5344 &mut self,
5345 project_transaction: ProjectTransaction,
5346 peer_id: PeerId,
5347 cx: &AppContext,
5348 ) -> proto::ProjectTransaction {
5349 let mut serialized_transaction = proto::ProjectTransaction {
5350 buffers: Default::default(),
5351 transactions: Default::default(),
5352 };
5353 for (buffer, transaction) in project_transaction.0 {
5354 serialized_transaction
5355 .buffers
5356 .push(self.serialize_buffer_for_peer(&buffer, peer_id, cx));
5357 serialized_transaction
5358 .transactions
5359 .push(language::proto::serialize_transaction(&transaction));
5360 }
5361 serialized_transaction
5362 }
5363
5364 fn deserialize_project_transaction(
5365 &mut self,
5366 message: proto::ProjectTransaction,
5367 push_to_history: bool,
5368 cx: &mut ModelContext<Self>,
5369 ) -> Task<Result<ProjectTransaction>> {
5370 cx.spawn(|this, mut cx| async move {
5371 let mut project_transaction = ProjectTransaction::default();
5372 for (buffer, transaction) in message.buffers.into_iter().zip(message.transactions) {
5373 let buffer = this
5374 .update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
5375 .await?;
5376 let transaction = language::proto::deserialize_transaction(transaction)?;
5377 project_transaction.0.insert(buffer, transaction);
5378 }
5379
5380 for (buffer, transaction) in &project_transaction.0 {
5381 buffer
5382 .update(&mut cx, |buffer, _| {
5383 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
5384 })
5385 .await;
5386
5387 if push_to_history {
5388 buffer.update(&mut cx, |buffer, _| {
5389 buffer.push_transaction(transaction.clone(), Instant::now());
5390 });
5391 }
5392 }
5393
5394 Ok(project_transaction)
5395 })
5396 }
5397
5398 fn serialize_buffer_for_peer(
5399 &mut self,
5400 buffer: &ModelHandle<Buffer>,
5401 peer_id: PeerId,
5402 cx: &AppContext,
5403 ) -> proto::Buffer {
5404 let buffer_id = buffer.read(cx).remote_id();
5405 let shared_buffers = self.shared_buffers.entry(peer_id).or_default();
5406 if shared_buffers.insert(buffer_id) {
5407 proto::Buffer {
5408 variant: Some(proto::buffer::Variant::State(buffer.read(cx).to_proto())),
5409 }
5410 } else {
5411 proto::Buffer {
5412 variant: Some(proto::buffer::Variant::Id(buffer_id)),
5413 }
5414 }
5415 }
5416
5417 fn deserialize_buffer(
5418 &mut self,
5419 buffer: proto::Buffer,
5420 cx: &mut ModelContext<Self>,
5421 ) -> Task<Result<ModelHandle<Buffer>>> {
5422 let replica_id = self.replica_id();
5423
5424 let opened_buffer_tx = self.opened_buffer.0.clone();
5425 let mut opened_buffer_rx = self.opened_buffer.1.clone();
5426 cx.spawn(|this, mut cx| async move {
5427 match buffer.variant.ok_or_else(|| anyhow!("missing buffer"))? {
5428 proto::buffer::Variant::Id(id) => {
5429 let buffer = loop {
5430 let buffer = this.read_with(&cx, |this, cx| {
5431 this.opened_buffers
5432 .get(&id)
5433 .and_then(|buffer| buffer.upgrade(cx))
5434 });
5435 if let Some(buffer) = buffer {
5436 break buffer;
5437 }
5438 opened_buffer_rx
5439 .next()
5440 .await
5441 .ok_or_else(|| anyhow!("project dropped while waiting for buffer"))?;
5442 };
5443 Ok(buffer)
5444 }
5445 proto::buffer::Variant::State(mut buffer) => {
5446 let mut buffer_worktree = None;
5447 let mut buffer_file = None;
5448 if let Some(file) = buffer.file.take() {
5449 this.read_with(&cx, |this, cx| {
5450 let worktree_id = WorktreeId::from_proto(file.worktree_id);
5451 let worktree =
5452 this.worktree_for_id(worktree_id, cx).ok_or_else(|| {
5453 anyhow!("no worktree found for id {}", file.worktree_id)
5454 })?;
5455 buffer_file =
5456 Some(Arc::new(File::from_proto(file, worktree.clone(), cx)?)
5457 as Arc<dyn language::File>);
5458 buffer_worktree = Some(worktree);
5459 Ok::<_, anyhow::Error>(())
5460 })?;
5461 }
5462
5463 let buffer = cx.add_model(|cx| {
5464 Buffer::from_proto(replica_id, buffer, buffer_file, cx).unwrap()
5465 });
5466
5467 this.update(&mut cx, |this, cx| this.register_buffer(&buffer, cx))?;
5468
5469 *opened_buffer_tx.borrow_mut().borrow_mut() = ();
5470 Ok(buffer)
5471 }
5472 }
5473 })
5474 }
5475
5476 fn deserialize_symbol(&self, serialized_symbol: proto::Symbol) -> Result<Symbol> {
5477 let source_worktree_id = WorktreeId::from_proto(serialized_symbol.source_worktree_id);
5478 let worktree_id = WorktreeId::from_proto(serialized_symbol.worktree_id);
5479 let start = serialized_symbol
5480 .start
5481 .ok_or_else(|| anyhow!("invalid start"))?;
5482 let end = serialized_symbol
5483 .end
5484 .ok_or_else(|| anyhow!("invalid end"))?;
5485 let kind = unsafe { mem::transmute(serialized_symbol.kind) };
5486 let path = PathBuf::from(serialized_symbol.path);
5487 let language = self.languages.select_language(&path);
5488 Ok(Symbol {
5489 source_worktree_id,
5490 worktree_id,
5491 language_server_name: LanguageServerName(serialized_symbol.language_server_name.into()),
5492 label: language
5493 .and_then(|language| language.label_for_symbol(&serialized_symbol.name, kind))
5494 .unwrap_or_else(|| CodeLabel::plain(serialized_symbol.name.clone(), None)),
5495 name: serialized_symbol.name,
5496 path,
5497 range: PointUtf16::new(start.row, start.column)..PointUtf16::new(end.row, end.column),
5498 kind,
5499 signature: serialized_symbol
5500 .signature
5501 .try_into()
5502 .map_err(|_| anyhow!("invalid signature"))?,
5503 })
5504 }
5505
5506 async fn handle_buffer_saved(
5507 this: ModelHandle<Self>,
5508 envelope: TypedEnvelope<proto::BufferSaved>,
5509 _: Arc<Client>,
5510 mut cx: AsyncAppContext,
5511 ) -> Result<()> {
5512 let version = deserialize_version(envelope.payload.version);
5513 let mtime = envelope
5514 .payload
5515 .mtime
5516 .ok_or_else(|| anyhow!("missing mtime"))?
5517 .into();
5518
5519 this.update(&mut cx, |this, cx| {
5520 let buffer = this
5521 .opened_buffers
5522 .get(&envelope.payload.buffer_id)
5523 .and_then(|buffer| buffer.upgrade(cx));
5524 if let Some(buffer) = buffer {
5525 buffer.update(cx, |buffer, cx| {
5526 buffer.did_save(version, envelope.payload.fingerprint, mtime, None, cx);
5527 });
5528 }
5529 Ok(())
5530 })
5531 }
5532
5533 async fn handle_buffer_reloaded(
5534 this: ModelHandle<Self>,
5535 envelope: TypedEnvelope<proto::BufferReloaded>,
5536 _: Arc<Client>,
5537 mut cx: AsyncAppContext,
5538 ) -> Result<()> {
5539 let payload = envelope.payload.clone();
5540 let version = deserialize_version(payload.version);
5541 let mtime = payload
5542 .mtime
5543 .ok_or_else(|| anyhow!("missing mtime"))?
5544 .into();
5545 this.update(&mut cx, |this, cx| {
5546 let buffer = this
5547 .opened_buffers
5548 .get(&payload.buffer_id)
5549 .and_then(|buffer| buffer.upgrade(cx));
5550 if let Some(buffer) = buffer {
5551 buffer.update(cx, |buffer, cx| {
5552 buffer.did_reload(version, payload.fingerprint, mtime, cx);
5553 });
5554 }
5555 Ok(())
5556 })
5557 }
5558
5559 pub fn match_paths<'a>(
5560 &self,
5561 query: &'a str,
5562 include_ignored: bool,
5563 smart_case: bool,
5564 max_results: usize,
5565 cancel_flag: &'a AtomicBool,
5566 cx: &AppContext,
5567 ) -> impl 'a + Future<Output = Vec<PathMatch>> {
5568 let worktrees = self
5569 .worktrees(cx)
5570 .filter(|worktree| worktree.read(cx).is_visible())
5571 .collect::<Vec<_>>();
5572 let include_root_name = worktrees.len() > 1;
5573 let candidate_sets = worktrees
5574 .into_iter()
5575 .map(|worktree| CandidateSet {
5576 snapshot: worktree.read(cx).snapshot(),
5577 include_ignored,
5578 include_root_name,
5579 })
5580 .collect::<Vec<_>>();
5581
5582 let background = cx.background().clone();
5583 async move {
5584 fuzzy::match_paths(
5585 candidate_sets.as_slice(),
5586 query,
5587 smart_case,
5588 max_results,
5589 cancel_flag,
5590 background,
5591 )
5592 .await
5593 }
5594 }
5595
5596 fn edits_from_lsp(
5597 &mut self,
5598 buffer: &ModelHandle<Buffer>,
5599 lsp_edits: impl 'static + Send + IntoIterator<Item = lsp::TextEdit>,
5600 version: Option<i32>,
5601 cx: &mut ModelContext<Self>,
5602 ) -> Task<Result<Vec<(Range<Anchor>, String)>>> {
5603 let snapshot = self.buffer_snapshot_for_lsp_version(buffer, version, cx);
5604 cx.background().spawn(async move {
5605 let snapshot = snapshot?;
5606 let mut lsp_edits = lsp_edits
5607 .into_iter()
5608 .map(|edit| (range_from_lsp(edit.range), edit.new_text))
5609 .collect::<Vec<_>>();
5610 lsp_edits.sort_by_key(|(range, _)| range.start);
5611
5612 let mut lsp_edits = lsp_edits.into_iter().peekable();
5613 let mut edits = Vec::new();
5614 while let Some((mut range, mut new_text)) = lsp_edits.next() {
5615 // Combine any LSP edits that are adjacent.
5616 //
5617 // Also, combine LSP edits that are separated from each other by only
5618 // a newline. This is important because for some code actions,
5619 // Rust-analyzer rewrites the entire buffer via a series of edits that
5620 // are separated by unchanged newline characters.
5621 //
5622 // In order for the diffing logic below to work properly, any edits that
5623 // cancel each other out must be combined into one.
5624 while let Some((next_range, next_text)) = lsp_edits.peek() {
5625 if next_range.start > range.end {
5626 if next_range.start.row > range.end.row + 1
5627 || next_range.start.column > 0
5628 || snapshot.clip_point_utf16(
5629 PointUtf16::new(range.end.row, u32::MAX),
5630 Bias::Left,
5631 ) > range.end
5632 {
5633 break;
5634 }
5635 new_text.push('\n');
5636 }
5637 range.end = next_range.end;
5638 new_text.push_str(&next_text);
5639 lsp_edits.next();
5640 }
5641
5642 if snapshot.clip_point_utf16(range.start, Bias::Left) != range.start
5643 || snapshot.clip_point_utf16(range.end, Bias::Left) != range.end
5644 {
5645 return Err(anyhow!("invalid edits received from language server"));
5646 }
5647
5648 // For multiline edits, perform a diff of the old and new text so that
5649 // we can identify the changes more precisely, preserving the locations
5650 // of any anchors positioned in the unchanged regions.
5651 if range.end.row > range.start.row {
5652 let mut offset = range.start.to_offset(&snapshot);
5653 let old_text = snapshot.text_for_range(range).collect::<String>();
5654
5655 let diff = TextDiff::from_lines(old_text.as_str(), &new_text);
5656 let mut moved_since_edit = true;
5657 for change in diff.iter_all_changes() {
5658 let tag = change.tag();
5659 let value = change.value();
5660 match tag {
5661 ChangeTag::Equal => {
5662 offset += value.len();
5663 moved_since_edit = true;
5664 }
5665 ChangeTag::Delete => {
5666 let start = snapshot.anchor_after(offset);
5667 let end = snapshot.anchor_before(offset + value.len());
5668 if moved_since_edit {
5669 edits.push((start..end, String::new()));
5670 } else {
5671 edits.last_mut().unwrap().0.end = end;
5672 }
5673 offset += value.len();
5674 moved_since_edit = false;
5675 }
5676 ChangeTag::Insert => {
5677 if moved_since_edit {
5678 let anchor = snapshot.anchor_after(offset);
5679 edits.push((anchor.clone()..anchor, value.to_string()));
5680 } else {
5681 edits.last_mut().unwrap().1.push_str(value);
5682 }
5683 moved_since_edit = false;
5684 }
5685 }
5686 }
5687 } else if range.end == range.start {
5688 let anchor = snapshot.anchor_after(range.start);
5689 edits.push((anchor.clone()..anchor, new_text));
5690 } else {
5691 let edit_start = snapshot.anchor_after(range.start);
5692 let edit_end = snapshot.anchor_before(range.end);
5693 edits.push((edit_start..edit_end, new_text));
5694 }
5695 }
5696
5697 Ok(edits)
5698 })
5699 }
5700
5701 fn buffer_snapshot_for_lsp_version(
5702 &mut self,
5703 buffer: &ModelHandle<Buffer>,
5704 version: Option<i32>,
5705 cx: &AppContext,
5706 ) -> Result<TextBufferSnapshot> {
5707 const OLD_VERSIONS_TO_RETAIN: i32 = 10;
5708
5709 if let Some(version) = version {
5710 let buffer_id = buffer.read(cx).remote_id();
5711 let snapshots = self
5712 .buffer_snapshots
5713 .get_mut(&buffer_id)
5714 .ok_or_else(|| anyhow!("no snapshot found for buffer {}", buffer_id))?;
5715 let mut found_snapshot = None;
5716 snapshots.retain(|(snapshot_version, snapshot)| {
5717 if snapshot_version + OLD_VERSIONS_TO_RETAIN < version {
5718 false
5719 } else {
5720 if *snapshot_version == version {
5721 found_snapshot = Some(snapshot.clone());
5722 }
5723 true
5724 }
5725 });
5726
5727 found_snapshot.ok_or_else(|| {
5728 anyhow!(
5729 "snapshot not found for buffer {} at version {}",
5730 buffer_id,
5731 version
5732 )
5733 })
5734 } else {
5735 Ok((buffer.read(cx)).text_snapshot())
5736 }
5737 }
5738
5739 fn language_server_for_buffer(
5740 &self,
5741 buffer: &Buffer,
5742 cx: &AppContext,
5743 ) -> Option<(&Arc<dyn LspAdapter>, &Arc<LanguageServer>)> {
5744 if let Some((file, language)) = File::from_dyn(buffer.file()).zip(buffer.language()) {
5745 let worktree_id = file.worktree_id(cx);
5746 let key = (worktree_id, language.lsp_adapter()?.name());
5747
5748 if let Some(server_id) = self.language_server_ids.get(&key) {
5749 if let Some(LanguageServerState::Running { adapter, server }) =
5750 self.language_servers.get(&server_id)
5751 {
5752 return Some((adapter, server));
5753 }
5754 }
5755 }
5756
5757 None
5758 }
5759}
5760
5761impl ProjectStore {
5762 pub fn new(db: Arc<Db>) -> Self {
5763 Self {
5764 db,
5765 projects: Default::default(),
5766 }
5767 }
5768
5769 pub fn projects<'a>(
5770 &'a self,
5771 cx: &'a AppContext,
5772 ) -> impl 'a + Iterator<Item = ModelHandle<Project>> {
5773 self.projects
5774 .iter()
5775 .filter_map(|project| project.upgrade(cx))
5776 }
5777
5778 fn add_project(&mut self, project: WeakModelHandle<Project>, cx: &mut ModelContext<Self>) {
5779 if let Err(ix) = self
5780 .projects
5781 .binary_search_by_key(&project.id(), WeakModelHandle::id)
5782 {
5783 self.projects.insert(ix, project);
5784 }
5785 cx.notify();
5786 }
5787
5788 fn prune_projects(&mut self, cx: &mut ModelContext<Self>) {
5789 let mut did_change = false;
5790 self.projects.retain(|project| {
5791 if project.is_upgradable(cx) {
5792 true
5793 } else {
5794 did_change = true;
5795 false
5796 }
5797 });
5798 if did_change {
5799 cx.notify();
5800 }
5801 }
5802}
5803
5804impl WorktreeHandle {
5805 pub fn upgrade(&self, cx: &AppContext) -> Option<ModelHandle<Worktree>> {
5806 match self {
5807 WorktreeHandle::Strong(handle) => Some(handle.clone()),
5808 WorktreeHandle::Weak(handle) => handle.upgrade(cx),
5809 }
5810 }
5811}
5812
5813impl OpenBuffer {
5814 pub fn upgrade(&self, cx: &impl UpgradeModelHandle) -> Option<ModelHandle<Buffer>> {
5815 match self {
5816 OpenBuffer::Strong(handle) => Some(handle.clone()),
5817 OpenBuffer::Weak(handle) => handle.upgrade(cx),
5818 OpenBuffer::Loading(_) => None,
5819 }
5820 }
5821}
5822
5823struct CandidateSet {
5824 snapshot: Snapshot,
5825 include_ignored: bool,
5826 include_root_name: bool,
5827}
5828
5829impl<'a> PathMatchCandidateSet<'a> for CandidateSet {
5830 type Candidates = CandidateSetIter<'a>;
5831
5832 fn id(&self) -> usize {
5833 self.snapshot.id().to_usize()
5834 }
5835
5836 fn len(&self) -> usize {
5837 if self.include_ignored {
5838 self.snapshot.file_count()
5839 } else {
5840 self.snapshot.visible_file_count()
5841 }
5842 }
5843
5844 fn prefix(&self) -> Arc<str> {
5845 if self.snapshot.root_entry().map_or(false, |e| e.is_file()) {
5846 self.snapshot.root_name().into()
5847 } else if self.include_root_name {
5848 format!("{}/", self.snapshot.root_name()).into()
5849 } else {
5850 "".into()
5851 }
5852 }
5853
5854 fn candidates(&'a self, start: usize) -> Self::Candidates {
5855 CandidateSetIter {
5856 traversal: self.snapshot.files(self.include_ignored, start),
5857 }
5858 }
5859}
5860
5861struct CandidateSetIter<'a> {
5862 traversal: Traversal<'a>,
5863}
5864
5865impl<'a> Iterator for CandidateSetIter<'a> {
5866 type Item = PathMatchCandidate<'a>;
5867
5868 fn next(&mut self) -> Option<Self::Item> {
5869 self.traversal.next().map(|entry| {
5870 if let EntryKind::File(char_bag) = entry.kind {
5871 PathMatchCandidate {
5872 path: &entry.path,
5873 char_bag,
5874 }
5875 } else {
5876 unreachable!()
5877 }
5878 })
5879 }
5880}
5881
5882impl Entity for ProjectStore {
5883 type Event = ();
5884}
5885
5886impl Entity for Project {
5887 type Event = Event;
5888
5889 fn release(&mut self, cx: &mut gpui::MutableAppContext) {
5890 self.project_store.update(cx, ProjectStore::prune_projects);
5891
5892 match &self.client_state {
5893 ProjectClientState::Local { remote_id_rx, .. } => {
5894 if let Some(project_id) = *remote_id_rx.borrow() {
5895 self.client
5896 .send(proto::UnregisterProject { project_id })
5897 .log_err();
5898 }
5899 }
5900 ProjectClientState::Remote { remote_id, .. } => {
5901 self.client
5902 .send(proto::LeaveProject {
5903 project_id: *remote_id,
5904 })
5905 .log_err();
5906 }
5907 }
5908 }
5909
5910 fn app_will_quit(
5911 &mut self,
5912 _: &mut MutableAppContext,
5913 ) -> Option<std::pin::Pin<Box<dyn 'static + Future<Output = ()>>>> {
5914 let shutdown_futures = self
5915 .language_servers
5916 .drain()
5917 .map(|(_, server_state)| async {
5918 match server_state {
5919 LanguageServerState::Running { server, .. } => server.shutdown()?.await,
5920 LanguageServerState::Starting(starting_server) => {
5921 starting_server.await?.shutdown()?.await
5922 }
5923 }
5924 })
5925 .collect::<Vec<_>>();
5926
5927 Some(
5928 async move {
5929 futures::future::join_all(shutdown_futures).await;
5930 }
5931 .boxed(),
5932 )
5933 }
5934}
5935
5936impl Collaborator {
5937 fn from_proto(
5938 message: proto::Collaborator,
5939 user_store: &ModelHandle<UserStore>,
5940 cx: &mut AsyncAppContext,
5941 ) -> impl Future<Output = Result<Self>> {
5942 let user = user_store.update(cx, |user_store, cx| {
5943 user_store.fetch_user(message.user_id, cx)
5944 });
5945
5946 async move {
5947 Ok(Self {
5948 peer_id: PeerId(message.peer_id),
5949 user: user.await?,
5950 replica_id: message.replica_id as ReplicaId,
5951 })
5952 }
5953 }
5954}
5955
5956impl<P: AsRef<Path>> From<(WorktreeId, P)> for ProjectPath {
5957 fn from((worktree_id, path): (WorktreeId, P)) -> Self {
5958 Self {
5959 worktree_id,
5960 path: path.as_ref().into(),
5961 }
5962 }
5963}
5964
5965impl From<lsp::CreateFileOptions> for fs::CreateOptions {
5966 fn from(options: lsp::CreateFileOptions) -> Self {
5967 Self {
5968 overwrite: options.overwrite.unwrap_or(false),
5969 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
5970 }
5971 }
5972}
5973
5974impl From<lsp::RenameFileOptions> for fs::RenameOptions {
5975 fn from(options: lsp::RenameFileOptions) -> Self {
5976 Self {
5977 overwrite: options.overwrite.unwrap_or(false),
5978 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
5979 }
5980 }
5981}
5982
5983impl From<lsp::DeleteFileOptions> for fs::RemoveOptions {
5984 fn from(options: lsp::DeleteFileOptions) -> Self {
5985 Self {
5986 recursive: options.recursive.unwrap_or(false),
5987 ignore_if_not_exists: options.ignore_if_not_exists.unwrap_or(false),
5988 }
5989 }
5990}
5991
5992fn serialize_symbol(symbol: &Symbol) -> proto::Symbol {
5993 proto::Symbol {
5994 source_worktree_id: symbol.source_worktree_id.to_proto(),
5995 worktree_id: symbol.worktree_id.to_proto(),
5996 language_server_name: symbol.language_server_name.0.to_string(),
5997 name: symbol.name.clone(),
5998 kind: unsafe { mem::transmute(symbol.kind) },
5999 path: symbol.path.to_string_lossy().to_string(),
6000 start: Some(proto::Point {
6001 row: symbol.range.start.row,
6002 column: symbol.range.start.column,
6003 }),
6004 end: Some(proto::Point {
6005 row: symbol.range.end.row,
6006 column: symbol.range.end.column,
6007 }),
6008 signature: symbol.signature.to_vec(),
6009 }
6010}
6011
6012fn relativize_path(base: &Path, path: &Path) -> PathBuf {
6013 let mut path_components = path.components();
6014 let mut base_components = base.components();
6015 let mut components: Vec<Component> = Vec::new();
6016 loop {
6017 match (path_components.next(), base_components.next()) {
6018 (None, None) => break,
6019 (Some(a), None) => {
6020 components.push(a);
6021 components.extend(path_components.by_ref());
6022 break;
6023 }
6024 (None, _) => components.push(Component::ParentDir),
6025 (Some(a), Some(b)) if components.is_empty() && a == b => (),
6026 (Some(a), Some(b)) if b == Component::CurDir => components.push(a),
6027 (Some(a), Some(_)) => {
6028 components.push(Component::ParentDir);
6029 for _ in base_components {
6030 components.push(Component::ParentDir);
6031 }
6032 components.push(a);
6033 components.extend(path_components.by_ref());
6034 break;
6035 }
6036 }
6037 }
6038 components.iter().map(|c| c.as_os_str()).collect()
6039}
6040
6041impl Item for Buffer {
6042 fn entry_id(&self, cx: &AppContext) -> Option<ProjectEntryId> {
6043 File::from_dyn(self.file()).and_then(|file| file.project_entry_id(cx))
6044 }
6045}
6046
6047#[cfg(test)]
6048mod tests {
6049 use crate::worktree::WorktreeHandle;
6050
6051 use super::{Event, *};
6052 use fs::RealFs;
6053 use futures::{future, StreamExt};
6054 use gpui::{executor::Deterministic, test::subscribe};
6055 use language::{
6056 tree_sitter_rust, tree_sitter_typescript, Diagnostic, FakeLspAdapter, LanguageConfig,
6057 OffsetRangeExt, Point, ToPoint,
6058 };
6059 use lsp::Url;
6060 use serde_json::json;
6061 use std::{cell::RefCell, os::unix, path::PathBuf, rc::Rc, task::Poll};
6062 use unindent::Unindent as _;
6063 use util::{assert_set_eq, test::temp_tree};
6064
6065 #[gpui::test]
6066 async fn test_populate_and_search(cx: &mut gpui::TestAppContext) {
6067 let dir = temp_tree(json!({
6068 "root": {
6069 "apple": "",
6070 "banana": {
6071 "carrot": {
6072 "date": "",
6073 "endive": "",
6074 }
6075 },
6076 "fennel": {
6077 "grape": "",
6078 }
6079 }
6080 }));
6081
6082 let root_link_path = dir.path().join("root_link");
6083 unix::fs::symlink(&dir.path().join("root"), &root_link_path).unwrap();
6084 unix::fs::symlink(
6085 &dir.path().join("root/fennel"),
6086 &dir.path().join("root/finnochio"),
6087 )
6088 .unwrap();
6089
6090 let project = Project::test(Arc::new(RealFs), [root_link_path.as_ref()], cx).await;
6091
6092 project.read_with(cx, |project, cx| {
6093 let tree = project.worktrees(cx).next().unwrap().read(cx);
6094 assert_eq!(tree.file_count(), 5);
6095 assert_eq!(
6096 tree.inode_for_path("fennel/grape"),
6097 tree.inode_for_path("finnochio/grape")
6098 );
6099 });
6100
6101 let cancel_flag = Default::default();
6102 let results = project
6103 .read_with(cx, |project, cx| {
6104 project.match_paths("bna", false, false, 10, &cancel_flag, cx)
6105 })
6106 .await;
6107 assert_eq!(
6108 results
6109 .into_iter()
6110 .map(|result| result.path)
6111 .collect::<Vec<Arc<Path>>>(),
6112 vec![
6113 PathBuf::from("banana/carrot/date").into(),
6114 PathBuf::from("banana/carrot/endive").into(),
6115 ]
6116 );
6117 }
6118
6119 #[gpui::test]
6120 async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
6121 cx.foreground().forbid_parking();
6122
6123 let mut rust_language = Language::new(
6124 LanguageConfig {
6125 name: "Rust".into(),
6126 path_suffixes: vec!["rs".to_string()],
6127 ..Default::default()
6128 },
6129 Some(tree_sitter_rust::language()),
6130 );
6131 let mut json_language = Language::new(
6132 LanguageConfig {
6133 name: "JSON".into(),
6134 path_suffixes: vec!["json".to_string()],
6135 ..Default::default()
6136 },
6137 None,
6138 );
6139 let mut fake_rust_servers = rust_language.set_fake_lsp_adapter(FakeLspAdapter {
6140 name: "the-rust-language-server",
6141 capabilities: lsp::ServerCapabilities {
6142 completion_provider: Some(lsp::CompletionOptions {
6143 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
6144 ..Default::default()
6145 }),
6146 ..Default::default()
6147 },
6148 ..Default::default()
6149 });
6150 let mut fake_json_servers = json_language.set_fake_lsp_adapter(FakeLspAdapter {
6151 name: "the-json-language-server",
6152 capabilities: lsp::ServerCapabilities {
6153 completion_provider: Some(lsp::CompletionOptions {
6154 trigger_characters: Some(vec![":".to_string()]),
6155 ..Default::default()
6156 }),
6157 ..Default::default()
6158 },
6159 ..Default::default()
6160 });
6161
6162 let fs = FakeFs::new(cx.background());
6163 fs.insert_tree(
6164 "/the-root",
6165 json!({
6166 "test.rs": "const A: i32 = 1;",
6167 "test2.rs": "",
6168 "Cargo.toml": "a = 1",
6169 "package.json": "{\"a\": 1}",
6170 }),
6171 )
6172 .await;
6173
6174 let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
6175 project.update(cx, |project, _| {
6176 project.languages.add(Arc::new(rust_language));
6177 project.languages.add(Arc::new(json_language));
6178 });
6179
6180 // Open a buffer without an associated language server.
6181 let toml_buffer = project
6182 .update(cx, |project, cx| {
6183 project.open_local_buffer("/the-root/Cargo.toml", cx)
6184 })
6185 .await
6186 .unwrap();
6187
6188 // Open a buffer with an associated language server.
6189 let rust_buffer = project
6190 .update(cx, |project, cx| {
6191 project.open_local_buffer("/the-root/test.rs", cx)
6192 })
6193 .await
6194 .unwrap();
6195
6196 // A server is started up, and it is notified about Rust files.
6197 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
6198 assert_eq!(
6199 fake_rust_server
6200 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6201 .await
6202 .text_document,
6203 lsp::TextDocumentItem {
6204 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
6205 version: 0,
6206 text: "const A: i32 = 1;".to_string(),
6207 language_id: Default::default()
6208 }
6209 );
6210
6211 // The buffer is configured based on the language server's capabilities.
6212 rust_buffer.read_with(cx, |buffer, _| {
6213 assert_eq!(
6214 buffer.completion_triggers(),
6215 &[".".to_string(), "::".to_string()]
6216 );
6217 });
6218 toml_buffer.read_with(cx, |buffer, _| {
6219 assert!(buffer.completion_triggers().is_empty());
6220 });
6221
6222 // Edit a buffer. The changes are reported to the language server.
6223 rust_buffer.update(cx, |buffer, cx| buffer.edit([(16..16, "2")], cx));
6224 assert_eq!(
6225 fake_rust_server
6226 .receive_notification::<lsp::notification::DidChangeTextDocument>()
6227 .await
6228 .text_document,
6229 lsp::VersionedTextDocumentIdentifier::new(
6230 lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
6231 1
6232 )
6233 );
6234
6235 // Open a third buffer with a different associated language server.
6236 let json_buffer = project
6237 .update(cx, |project, cx| {
6238 project.open_local_buffer("/the-root/package.json", cx)
6239 })
6240 .await
6241 .unwrap();
6242
6243 // A json language server is started up and is only notified about the json buffer.
6244 let mut fake_json_server = fake_json_servers.next().await.unwrap();
6245 assert_eq!(
6246 fake_json_server
6247 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6248 .await
6249 .text_document,
6250 lsp::TextDocumentItem {
6251 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
6252 version: 0,
6253 text: "{\"a\": 1}".to_string(),
6254 language_id: Default::default()
6255 }
6256 );
6257
6258 // This buffer is configured based on the second language server's
6259 // capabilities.
6260 json_buffer.read_with(cx, |buffer, _| {
6261 assert_eq!(buffer.completion_triggers(), &[":".to_string()]);
6262 });
6263
6264 // When opening another buffer whose language server is already running,
6265 // it is also configured based on the existing language server's capabilities.
6266 let rust_buffer2 = project
6267 .update(cx, |project, cx| {
6268 project.open_local_buffer("/the-root/test2.rs", cx)
6269 })
6270 .await
6271 .unwrap();
6272 rust_buffer2.read_with(cx, |buffer, _| {
6273 assert_eq!(
6274 buffer.completion_triggers(),
6275 &[".".to_string(), "::".to_string()]
6276 );
6277 });
6278
6279 // Changes are reported only to servers matching the buffer's language.
6280 toml_buffer.update(cx, |buffer, cx| buffer.edit([(5..5, "23")], cx));
6281 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "let x = 1;")], cx));
6282 assert_eq!(
6283 fake_rust_server
6284 .receive_notification::<lsp::notification::DidChangeTextDocument>()
6285 .await
6286 .text_document,
6287 lsp::VersionedTextDocumentIdentifier::new(
6288 lsp::Url::from_file_path("/the-root/test2.rs").unwrap(),
6289 1
6290 )
6291 );
6292
6293 // Save notifications are reported to all servers.
6294 toml_buffer
6295 .update(cx, |buffer, cx| buffer.save(cx))
6296 .await
6297 .unwrap();
6298 assert_eq!(
6299 fake_rust_server
6300 .receive_notification::<lsp::notification::DidSaveTextDocument>()
6301 .await
6302 .text_document,
6303 lsp::TextDocumentIdentifier::new(
6304 lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap()
6305 )
6306 );
6307 assert_eq!(
6308 fake_json_server
6309 .receive_notification::<lsp::notification::DidSaveTextDocument>()
6310 .await
6311 .text_document,
6312 lsp::TextDocumentIdentifier::new(
6313 lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap()
6314 )
6315 );
6316
6317 // Renames are reported only to servers matching the buffer's language.
6318 fs.rename(
6319 Path::new("/the-root/test2.rs"),
6320 Path::new("/the-root/test3.rs"),
6321 Default::default(),
6322 )
6323 .await
6324 .unwrap();
6325 assert_eq!(
6326 fake_rust_server
6327 .receive_notification::<lsp::notification::DidCloseTextDocument>()
6328 .await
6329 .text_document,
6330 lsp::TextDocumentIdentifier::new(
6331 lsp::Url::from_file_path("/the-root/test2.rs").unwrap()
6332 ),
6333 );
6334 assert_eq!(
6335 fake_rust_server
6336 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6337 .await
6338 .text_document,
6339 lsp::TextDocumentItem {
6340 uri: lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),
6341 version: 0,
6342 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
6343 language_id: Default::default()
6344 },
6345 );
6346
6347 rust_buffer2.update(cx, |buffer, cx| {
6348 buffer.update_diagnostics(
6349 DiagnosticSet::from_sorted_entries(
6350 vec![DiagnosticEntry {
6351 diagnostic: Default::default(),
6352 range: Anchor::MIN..Anchor::MAX,
6353 }],
6354 &buffer.snapshot(),
6355 ),
6356 cx,
6357 );
6358 assert_eq!(
6359 buffer
6360 .snapshot()
6361 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
6362 .count(),
6363 1
6364 );
6365 });
6366
6367 // When the rename changes the extension of the file, the buffer gets closed on the old
6368 // language server and gets opened on the new one.
6369 fs.rename(
6370 Path::new("/the-root/test3.rs"),
6371 Path::new("/the-root/test3.json"),
6372 Default::default(),
6373 )
6374 .await
6375 .unwrap();
6376 assert_eq!(
6377 fake_rust_server
6378 .receive_notification::<lsp::notification::DidCloseTextDocument>()
6379 .await
6380 .text_document,
6381 lsp::TextDocumentIdentifier::new(
6382 lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),
6383 ),
6384 );
6385 assert_eq!(
6386 fake_json_server
6387 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6388 .await
6389 .text_document,
6390 lsp::TextDocumentItem {
6391 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
6392 version: 0,
6393 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
6394 language_id: Default::default()
6395 },
6396 );
6397
6398 // We clear the diagnostics, since the language has changed.
6399 rust_buffer2.read_with(cx, |buffer, _| {
6400 assert_eq!(
6401 buffer
6402 .snapshot()
6403 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
6404 .count(),
6405 0
6406 );
6407 });
6408
6409 // The renamed file's version resets after changing language server.
6410 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "// ")], cx));
6411 assert_eq!(
6412 fake_json_server
6413 .receive_notification::<lsp::notification::DidChangeTextDocument>()
6414 .await
6415 .text_document,
6416 lsp::VersionedTextDocumentIdentifier::new(
6417 lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
6418 1
6419 )
6420 );
6421
6422 // Restart language servers
6423 project.update(cx, |project, cx| {
6424 project.restart_language_servers_for_buffers(
6425 vec![rust_buffer.clone(), json_buffer.clone()],
6426 cx,
6427 );
6428 });
6429
6430 let mut rust_shutdown_requests = fake_rust_server
6431 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
6432 let mut json_shutdown_requests = fake_json_server
6433 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
6434 futures::join!(rust_shutdown_requests.next(), json_shutdown_requests.next());
6435
6436 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
6437 let mut fake_json_server = fake_json_servers.next().await.unwrap();
6438
6439 // Ensure rust document is reopened in new rust language server
6440 assert_eq!(
6441 fake_rust_server
6442 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6443 .await
6444 .text_document,
6445 lsp::TextDocumentItem {
6446 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
6447 version: 1,
6448 text: rust_buffer.read_with(cx, |buffer, _| buffer.text()),
6449 language_id: Default::default()
6450 }
6451 );
6452
6453 // Ensure json documents are reopened in new json language server
6454 assert_set_eq!(
6455 [
6456 fake_json_server
6457 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6458 .await
6459 .text_document,
6460 fake_json_server
6461 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6462 .await
6463 .text_document,
6464 ],
6465 [
6466 lsp::TextDocumentItem {
6467 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
6468 version: 0,
6469 text: json_buffer.read_with(cx, |buffer, _| buffer.text()),
6470 language_id: Default::default()
6471 },
6472 lsp::TextDocumentItem {
6473 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
6474 version: 1,
6475 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
6476 language_id: Default::default()
6477 }
6478 ]
6479 );
6480
6481 // Close notifications are reported only to servers matching the buffer's language.
6482 cx.update(|_| drop(json_buffer));
6483 let close_message = lsp::DidCloseTextDocumentParams {
6484 text_document: lsp::TextDocumentIdentifier::new(
6485 lsp::Url::from_file_path("/the-root/package.json").unwrap(),
6486 ),
6487 };
6488 assert_eq!(
6489 fake_json_server
6490 .receive_notification::<lsp::notification::DidCloseTextDocument>()
6491 .await,
6492 close_message,
6493 );
6494 }
6495
6496 #[gpui::test]
6497 async fn test_single_file_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
6498 cx.foreground().forbid_parking();
6499
6500 let fs = FakeFs::new(cx.background());
6501 fs.insert_tree(
6502 "/dir",
6503 json!({
6504 "a.rs": "let a = 1;",
6505 "b.rs": "let b = 2;"
6506 }),
6507 )
6508 .await;
6509
6510 let project = Project::test(fs, ["/dir/a.rs".as_ref(), "/dir/b.rs".as_ref()], cx).await;
6511
6512 let buffer_a = project
6513 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
6514 .await
6515 .unwrap();
6516 let buffer_b = project
6517 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
6518 .await
6519 .unwrap();
6520
6521 project.update(cx, |project, cx| {
6522 project
6523 .update_diagnostics(
6524 0,
6525 lsp::PublishDiagnosticsParams {
6526 uri: Url::from_file_path("/dir/a.rs").unwrap(),
6527 version: None,
6528 diagnostics: vec![lsp::Diagnostic {
6529 range: lsp::Range::new(
6530 lsp::Position::new(0, 4),
6531 lsp::Position::new(0, 5),
6532 ),
6533 severity: Some(lsp::DiagnosticSeverity::ERROR),
6534 message: "error 1".to_string(),
6535 ..Default::default()
6536 }],
6537 },
6538 &[],
6539 cx,
6540 )
6541 .unwrap();
6542 project
6543 .update_diagnostics(
6544 0,
6545 lsp::PublishDiagnosticsParams {
6546 uri: Url::from_file_path("/dir/b.rs").unwrap(),
6547 version: None,
6548 diagnostics: vec![lsp::Diagnostic {
6549 range: lsp::Range::new(
6550 lsp::Position::new(0, 4),
6551 lsp::Position::new(0, 5),
6552 ),
6553 severity: Some(lsp::DiagnosticSeverity::WARNING),
6554 message: "error 2".to_string(),
6555 ..Default::default()
6556 }],
6557 },
6558 &[],
6559 cx,
6560 )
6561 .unwrap();
6562 });
6563
6564 buffer_a.read_with(cx, |buffer, _| {
6565 let chunks = chunks_with_diagnostics(&buffer, 0..buffer.len());
6566 assert_eq!(
6567 chunks
6568 .iter()
6569 .map(|(s, d)| (s.as_str(), *d))
6570 .collect::<Vec<_>>(),
6571 &[
6572 ("let ", None),
6573 ("a", Some(DiagnosticSeverity::ERROR)),
6574 (" = 1;", None),
6575 ]
6576 );
6577 });
6578 buffer_b.read_with(cx, |buffer, _| {
6579 let chunks = chunks_with_diagnostics(&buffer, 0..buffer.len());
6580 assert_eq!(
6581 chunks
6582 .iter()
6583 .map(|(s, d)| (s.as_str(), *d))
6584 .collect::<Vec<_>>(),
6585 &[
6586 ("let ", None),
6587 ("b", Some(DiagnosticSeverity::WARNING)),
6588 (" = 2;", None),
6589 ]
6590 );
6591 });
6592 }
6593
6594 #[gpui::test]
6595 async fn test_hidden_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
6596 cx.foreground().forbid_parking();
6597
6598 let fs = FakeFs::new(cx.background());
6599 fs.insert_tree(
6600 "/root",
6601 json!({
6602 "dir": {
6603 "a.rs": "let a = 1;",
6604 },
6605 "other.rs": "let b = c;"
6606 }),
6607 )
6608 .await;
6609
6610 let project = Project::test(fs, ["/root/dir".as_ref()], cx).await;
6611
6612 let (worktree, _) = project
6613 .update(cx, |project, cx| {
6614 project.find_or_create_local_worktree("/root/other.rs", false, cx)
6615 })
6616 .await
6617 .unwrap();
6618 let worktree_id = worktree.read_with(cx, |tree, _| tree.id());
6619
6620 project.update(cx, |project, cx| {
6621 project
6622 .update_diagnostics(
6623 0,
6624 lsp::PublishDiagnosticsParams {
6625 uri: Url::from_file_path("/root/other.rs").unwrap(),
6626 version: None,
6627 diagnostics: vec![lsp::Diagnostic {
6628 range: lsp::Range::new(
6629 lsp::Position::new(0, 8),
6630 lsp::Position::new(0, 9),
6631 ),
6632 severity: Some(lsp::DiagnosticSeverity::ERROR),
6633 message: "unknown variable 'c'".to_string(),
6634 ..Default::default()
6635 }],
6636 },
6637 &[],
6638 cx,
6639 )
6640 .unwrap();
6641 });
6642
6643 let buffer = project
6644 .update(cx, |project, cx| project.open_buffer((worktree_id, ""), cx))
6645 .await
6646 .unwrap();
6647 buffer.read_with(cx, |buffer, _| {
6648 let chunks = chunks_with_diagnostics(&buffer, 0..buffer.len());
6649 assert_eq!(
6650 chunks
6651 .iter()
6652 .map(|(s, d)| (s.as_str(), *d))
6653 .collect::<Vec<_>>(),
6654 &[
6655 ("let b = ", None),
6656 ("c", Some(DiagnosticSeverity::ERROR)),
6657 (";", None),
6658 ]
6659 );
6660 });
6661
6662 project.read_with(cx, |project, cx| {
6663 assert_eq!(project.diagnostic_summaries(cx).next(), None);
6664 assert_eq!(project.diagnostic_summary(cx).error_count, 0);
6665 });
6666 }
6667
6668 #[gpui::test]
6669 async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
6670 cx.foreground().forbid_parking();
6671
6672 let progress_token = "the-progress-token";
6673 let mut language = Language::new(
6674 LanguageConfig {
6675 name: "Rust".into(),
6676 path_suffixes: vec!["rs".to_string()],
6677 ..Default::default()
6678 },
6679 Some(tree_sitter_rust::language()),
6680 );
6681 let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
6682 disk_based_diagnostics_progress_token: Some(progress_token),
6683 disk_based_diagnostics_sources: &["disk"],
6684 ..Default::default()
6685 });
6686
6687 let fs = FakeFs::new(cx.background());
6688 fs.insert_tree(
6689 "/dir",
6690 json!({
6691 "a.rs": "fn a() { A }",
6692 "b.rs": "const y: i32 = 1",
6693 }),
6694 )
6695 .await;
6696
6697 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
6698 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
6699 let worktree_id =
6700 project.read_with(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
6701
6702 // Cause worktree to start the fake language server
6703 let _buffer = project
6704 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
6705 .await
6706 .unwrap();
6707
6708 let mut events = subscribe(&project, cx);
6709
6710 let fake_server = fake_servers.next().await.unwrap();
6711 fake_server.start_progress(progress_token).await;
6712 assert_eq!(
6713 events.next().await.unwrap(),
6714 Event::DiskBasedDiagnosticsStarted {
6715 language_server_id: 0,
6716 }
6717 );
6718
6719 fake_server.notify::<lsp::notification::PublishDiagnostics>(
6720 lsp::PublishDiagnosticsParams {
6721 uri: Url::from_file_path("/dir/a.rs").unwrap(),
6722 version: None,
6723 diagnostics: vec![lsp::Diagnostic {
6724 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
6725 severity: Some(lsp::DiagnosticSeverity::ERROR),
6726 message: "undefined variable 'A'".to_string(),
6727 ..Default::default()
6728 }],
6729 },
6730 );
6731 assert_eq!(
6732 events.next().await.unwrap(),
6733 Event::DiagnosticsUpdated {
6734 language_server_id: 0,
6735 path: (worktree_id, Path::new("a.rs")).into()
6736 }
6737 );
6738
6739 fake_server.end_progress(progress_token);
6740 assert_eq!(
6741 events.next().await.unwrap(),
6742 Event::DiskBasedDiagnosticsFinished {
6743 language_server_id: 0
6744 }
6745 );
6746
6747 let buffer = project
6748 .update(cx, |p, cx| p.open_local_buffer("/dir/a.rs", cx))
6749 .await
6750 .unwrap();
6751
6752 buffer.read_with(cx, |buffer, _| {
6753 let snapshot = buffer.snapshot();
6754 let diagnostics = snapshot
6755 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
6756 .collect::<Vec<_>>();
6757 assert_eq!(
6758 diagnostics,
6759 &[DiagnosticEntry {
6760 range: Point::new(0, 9)..Point::new(0, 10),
6761 diagnostic: Diagnostic {
6762 severity: lsp::DiagnosticSeverity::ERROR,
6763 message: "undefined variable 'A'".to_string(),
6764 group_id: 0,
6765 is_primary: true,
6766 ..Default::default()
6767 }
6768 }]
6769 )
6770 });
6771
6772 // Ensure publishing empty diagnostics twice only results in one update event.
6773 fake_server.notify::<lsp::notification::PublishDiagnostics>(
6774 lsp::PublishDiagnosticsParams {
6775 uri: Url::from_file_path("/dir/a.rs").unwrap(),
6776 version: None,
6777 diagnostics: Default::default(),
6778 },
6779 );
6780 assert_eq!(
6781 events.next().await.unwrap(),
6782 Event::DiagnosticsUpdated {
6783 language_server_id: 0,
6784 path: (worktree_id, Path::new("a.rs")).into()
6785 }
6786 );
6787
6788 fake_server.notify::<lsp::notification::PublishDiagnostics>(
6789 lsp::PublishDiagnosticsParams {
6790 uri: Url::from_file_path("/dir/a.rs").unwrap(),
6791 version: None,
6792 diagnostics: Default::default(),
6793 },
6794 );
6795 cx.foreground().run_until_parked();
6796 assert_eq!(futures::poll!(events.next()), Poll::Pending);
6797 }
6798
6799 #[gpui::test]
6800 async fn test_restarting_server_with_diagnostics_running(cx: &mut gpui::TestAppContext) {
6801 cx.foreground().forbid_parking();
6802
6803 let progress_token = "the-progress-token";
6804 let mut language = Language::new(
6805 LanguageConfig {
6806 path_suffixes: vec!["rs".to_string()],
6807 ..Default::default()
6808 },
6809 None,
6810 );
6811 let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
6812 disk_based_diagnostics_sources: &["disk"],
6813 disk_based_diagnostics_progress_token: Some(progress_token),
6814 ..Default::default()
6815 });
6816
6817 let fs = FakeFs::new(cx.background());
6818 fs.insert_tree("/dir", json!({ "a.rs": "" })).await;
6819
6820 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
6821 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
6822
6823 let buffer = project
6824 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
6825 .await
6826 .unwrap();
6827
6828 // Simulate diagnostics starting to update.
6829 let fake_server = fake_servers.next().await.unwrap();
6830 fake_server.start_progress(progress_token).await;
6831
6832 // Restart the server before the diagnostics finish updating.
6833 project.update(cx, |project, cx| {
6834 project.restart_language_servers_for_buffers([buffer], cx);
6835 });
6836 let mut events = subscribe(&project, cx);
6837
6838 // Simulate the newly started server sending more diagnostics.
6839 let fake_server = fake_servers.next().await.unwrap();
6840 fake_server.start_progress(progress_token).await;
6841 assert_eq!(
6842 events.next().await.unwrap(),
6843 Event::DiskBasedDiagnosticsStarted {
6844 language_server_id: 1
6845 }
6846 );
6847 project.read_with(cx, |project, _| {
6848 assert_eq!(
6849 project
6850 .language_servers_running_disk_based_diagnostics()
6851 .collect::<Vec<_>>(),
6852 [1]
6853 );
6854 });
6855
6856 // All diagnostics are considered done, despite the old server's diagnostic
6857 // task never completing.
6858 fake_server.end_progress(progress_token);
6859 assert_eq!(
6860 events.next().await.unwrap(),
6861 Event::DiskBasedDiagnosticsFinished {
6862 language_server_id: 1
6863 }
6864 );
6865 project.read_with(cx, |project, _| {
6866 assert_eq!(
6867 project
6868 .language_servers_running_disk_based_diagnostics()
6869 .collect::<Vec<_>>(),
6870 [0; 0]
6871 );
6872 });
6873 }
6874
6875 #[gpui::test]
6876 async fn test_toggling_enable_language_server(
6877 deterministic: Arc<Deterministic>,
6878 cx: &mut gpui::TestAppContext,
6879 ) {
6880 deterministic.forbid_parking();
6881
6882 let mut rust = Language::new(
6883 LanguageConfig {
6884 name: Arc::from("Rust"),
6885 path_suffixes: vec!["rs".to_string()],
6886 ..Default::default()
6887 },
6888 None,
6889 );
6890 let mut fake_rust_servers = rust.set_fake_lsp_adapter(FakeLspAdapter {
6891 name: "rust-lsp",
6892 ..Default::default()
6893 });
6894 let mut js = Language::new(
6895 LanguageConfig {
6896 name: Arc::from("JavaScript"),
6897 path_suffixes: vec!["js".to_string()],
6898 ..Default::default()
6899 },
6900 None,
6901 );
6902 let mut fake_js_servers = js.set_fake_lsp_adapter(FakeLspAdapter {
6903 name: "js-lsp",
6904 ..Default::default()
6905 });
6906
6907 let fs = FakeFs::new(cx.background());
6908 fs.insert_tree("/dir", json!({ "a.rs": "", "b.js": "" }))
6909 .await;
6910
6911 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
6912 project.update(cx, |project, _| {
6913 project.languages.add(Arc::new(rust));
6914 project.languages.add(Arc::new(js));
6915 });
6916
6917 let _rs_buffer = project
6918 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
6919 .await
6920 .unwrap();
6921 let _js_buffer = project
6922 .update(cx, |project, cx| project.open_local_buffer("/dir/b.js", cx))
6923 .await
6924 .unwrap();
6925
6926 let mut fake_rust_server_1 = fake_rust_servers.next().await.unwrap();
6927 assert_eq!(
6928 fake_rust_server_1
6929 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6930 .await
6931 .text_document
6932 .uri
6933 .as_str(),
6934 "file:///dir/a.rs"
6935 );
6936
6937 let mut fake_js_server = fake_js_servers.next().await.unwrap();
6938 assert_eq!(
6939 fake_js_server
6940 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6941 .await
6942 .text_document
6943 .uri
6944 .as_str(),
6945 "file:///dir/b.js"
6946 );
6947
6948 // Disable Rust language server, ensuring only that server gets stopped.
6949 cx.update(|cx| {
6950 cx.update_global(|settings: &mut Settings, _| {
6951 settings.language_overrides.insert(
6952 Arc::from("Rust"),
6953 settings::LanguageSettings {
6954 enable_language_server: Some(false),
6955 ..Default::default()
6956 },
6957 );
6958 })
6959 });
6960 fake_rust_server_1
6961 .receive_notification::<lsp::notification::Exit>()
6962 .await;
6963
6964 // Enable Rust and disable JavaScript language servers, ensuring that the
6965 // former gets started again and that the latter stops.
6966 cx.update(|cx| {
6967 cx.update_global(|settings: &mut Settings, _| {
6968 settings.language_overrides.insert(
6969 Arc::from("Rust"),
6970 settings::LanguageSettings {
6971 enable_language_server: Some(true),
6972 ..Default::default()
6973 },
6974 );
6975 settings.language_overrides.insert(
6976 Arc::from("JavaScript"),
6977 settings::LanguageSettings {
6978 enable_language_server: Some(false),
6979 ..Default::default()
6980 },
6981 );
6982 })
6983 });
6984 let mut fake_rust_server_2 = fake_rust_servers.next().await.unwrap();
6985 assert_eq!(
6986 fake_rust_server_2
6987 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6988 .await
6989 .text_document
6990 .uri
6991 .as_str(),
6992 "file:///dir/a.rs"
6993 );
6994 fake_js_server
6995 .receive_notification::<lsp::notification::Exit>()
6996 .await;
6997 }
6998
6999 #[gpui::test]
7000 async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
7001 cx.foreground().forbid_parking();
7002
7003 let mut language = Language::new(
7004 LanguageConfig {
7005 name: "Rust".into(),
7006 path_suffixes: vec!["rs".to_string()],
7007 ..Default::default()
7008 },
7009 Some(tree_sitter_rust::language()),
7010 );
7011 let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
7012 disk_based_diagnostics_sources: &["disk"],
7013 ..Default::default()
7014 });
7015
7016 let text = "
7017 fn a() { A }
7018 fn b() { BB }
7019 fn c() { CCC }
7020 "
7021 .unindent();
7022
7023 let fs = FakeFs::new(cx.background());
7024 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
7025
7026 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
7027 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
7028
7029 let buffer = project
7030 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
7031 .await
7032 .unwrap();
7033
7034 let mut fake_server = fake_servers.next().await.unwrap();
7035 let open_notification = fake_server
7036 .receive_notification::<lsp::notification::DidOpenTextDocument>()
7037 .await;
7038
7039 // Edit the buffer, moving the content down
7040 buffer.update(cx, |buffer, cx| buffer.edit([(0..0, "\n\n")], cx));
7041 let change_notification_1 = fake_server
7042 .receive_notification::<lsp::notification::DidChangeTextDocument>()
7043 .await;
7044 assert!(
7045 change_notification_1.text_document.version > open_notification.text_document.version
7046 );
7047
7048 // Report some diagnostics for the initial version of the buffer
7049 fake_server.notify::<lsp::notification::PublishDiagnostics>(
7050 lsp::PublishDiagnosticsParams {
7051 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
7052 version: Some(open_notification.text_document.version),
7053 diagnostics: vec![
7054 lsp::Diagnostic {
7055 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
7056 severity: Some(DiagnosticSeverity::ERROR),
7057 message: "undefined variable 'A'".to_string(),
7058 source: Some("disk".to_string()),
7059 ..Default::default()
7060 },
7061 lsp::Diagnostic {
7062 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
7063 severity: Some(DiagnosticSeverity::ERROR),
7064 message: "undefined variable 'BB'".to_string(),
7065 source: Some("disk".to_string()),
7066 ..Default::default()
7067 },
7068 lsp::Diagnostic {
7069 range: lsp::Range::new(lsp::Position::new(2, 9), lsp::Position::new(2, 12)),
7070 severity: Some(DiagnosticSeverity::ERROR),
7071 source: Some("disk".to_string()),
7072 message: "undefined variable 'CCC'".to_string(),
7073 ..Default::default()
7074 },
7075 ],
7076 },
7077 );
7078
7079 // The diagnostics have moved down since they were created.
7080 buffer.next_notification(cx).await;
7081 buffer.read_with(cx, |buffer, _| {
7082 assert_eq!(
7083 buffer
7084 .snapshot()
7085 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0), false)
7086 .collect::<Vec<_>>(),
7087 &[
7088 DiagnosticEntry {
7089 range: Point::new(3, 9)..Point::new(3, 11),
7090 diagnostic: Diagnostic {
7091 severity: DiagnosticSeverity::ERROR,
7092 message: "undefined variable 'BB'".to_string(),
7093 is_disk_based: true,
7094 group_id: 1,
7095 is_primary: true,
7096 ..Default::default()
7097 },
7098 },
7099 DiagnosticEntry {
7100 range: Point::new(4, 9)..Point::new(4, 12),
7101 diagnostic: Diagnostic {
7102 severity: DiagnosticSeverity::ERROR,
7103 message: "undefined variable 'CCC'".to_string(),
7104 is_disk_based: true,
7105 group_id: 2,
7106 is_primary: true,
7107 ..Default::default()
7108 }
7109 }
7110 ]
7111 );
7112 assert_eq!(
7113 chunks_with_diagnostics(buffer, 0..buffer.len()),
7114 [
7115 ("\n\nfn a() { ".to_string(), None),
7116 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
7117 (" }\nfn b() { ".to_string(), None),
7118 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
7119 (" }\nfn c() { ".to_string(), None),
7120 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
7121 (" }\n".to_string(), None),
7122 ]
7123 );
7124 assert_eq!(
7125 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
7126 [
7127 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
7128 (" }\nfn c() { ".to_string(), None),
7129 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
7130 ]
7131 );
7132 });
7133
7134 // Ensure overlapping diagnostics are highlighted correctly.
7135 fake_server.notify::<lsp::notification::PublishDiagnostics>(
7136 lsp::PublishDiagnosticsParams {
7137 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
7138 version: Some(open_notification.text_document.version),
7139 diagnostics: vec![
7140 lsp::Diagnostic {
7141 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
7142 severity: Some(DiagnosticSeverity::ERROR),
7143 message: "undefined variable 'A'".to_string(),
7144 source: Some("disk".to_string()),
7145 ..Default::default()
7146 },
7147 lsp::Diagnostic {
7148 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 12)),
7149 severity: Some(DiagnosticSeverity::WARNING),
7150 message: "unreachable statement".to_string(),
7151 source: Some("disk".to_string()),
7152 ..Default::default()
7153 },
7154 ],
7155 },
7156 );
7157
7158 buffer.next_notification(cx).await;
7159 buffer.read_with(cx, |buffer, _| {
7160 assert_eq!(
7161 buffer
7162 .snapshot()
7163 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0), false)
7164 .collect::<Vec<_>>(),
7165 &[
7166 DiagnosticEntry {
7167 range: Point::new(2, 9)..Point::new(2, 12),
7168 diagnostic: Diagnostic {
7169 severity: DiagnosticSeverity::WARNING,
7170 message: "unreachable statement".to_string(),
7171 is_disk_based: true,
7172 group_id: 4,
7173 is_primary: true,
7174 ..Default::default()
7175 }
7176 },
7177 DiagnosticEntry {
7178 range: Point::new(2, 9)..Point::new(2, 10),
7179 diagnostic: Diagnostic {
7180 severity: DiagnosticSeverity::ERROR,
7181 message: "undefined variable 'A'".to_string(),
7182 is_disk_based: true,
7183 group_id: 3,
7184 is_primary: true,
7185 ..Default::default()
7186 },
7187 }
7188 ]
7189 );
7190 assert_eq!(
7191 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
7192 [
7193 ("fn a() { ".to_string(), None),
7194 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
7195 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
7196 ("\n".to_string(), None),
7197 ]
7198 );
7199 assert_eq!(
7200 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
7201 [
7202 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
7203 ("\n".to_string(), None),
7204 ]
7205 );
7206 });
7207
7208 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
7209 // changes since the last save.
7210 buffer.update(cx, |buffer, cx| {
7211 buffer.edit([(Point::new(2, 0)..Point::new(2, 0), " ")], cx);
7212 buffer.edit([(Point::new(2, 8)..Point::new(2, 10), "(x: usize)")], cx);
7213 buffer.edit([(Point::new(3, 10)..Point::new(3, 10), "xxx")], cx);
7214 });
7215 let change_notification_2 = fake_server
7216 .receive_notification::<lsp::notification::DidChangeTextDocument>()
7217 .await;
7218 assert!(
7219 change_notification_2.text_document.version
7220 > change_notification_1.text_document.version
7221 );
7222
7223 // Handle out-of-order diagnostics
7224 fake_server.notify::<lsp::notification::PublishDiagnostics>(
7225 lsp::PublishDiagnosticsParams {
7226 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
7227 version: Some(change_notification_2.text_document.version),
7228 diagnostics: vec![
7229 lsp::Diagnostic {
7230 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
7231 severity: Some(DiagnosticSeverity::ERROR),
7232 message: "undefined variable 'BB'".to_string(),
7233 source: Some("disk".to_string()),
7234 ..Default::default()
7235 },
7236 lsp::Diagnostic {
7237 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
7238 severity: Some(DiagnosticSeverity::WARNING),
7239 message: "undefined variable 'A'".to_string(),
7240 source: Some("disk".to_string()),
7241 ..Default::default()
7242 },
7243 ],
7244 },
7245 );
7246
7247 buffer.next_notification(cx).await;
7248 buffer.read_with(cx, |buffer, _| {
7249 assert_eq!(
7250 buffer
7251 .snapshot()
7252 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
7253 .collect::<Vec<_>>(),
7254 &[
7255 DiagnosticEntry {
7256 range: Point::new(2, 21)..Point::new(2, 22),
7257 diagnostic: Diagnostic {
7258 severity: DiagnosticSeverity::WARNING,
7259 message: "undefined variable 'A'".to_string(),
7260 is_disk_based: true,
7261 group_id: 6,
7262 is_primary: true,
7263 ..Default::default()
7264 }
7265 },
7266 DiagnosticEntry {
7267 range: Point::new(3, 9)..Point::new(3, 14),
7268 diagnostic: Diagnostic {
7269 severity: DiagnosticSeverity::ERROR,
7270 message: "undefined variable 'BB'".to_string(),
7271 is_disk_based: true,
7272 group_id: 5,
7273 is_primary: true,
7274 ..Default::default()
7275 },
7276 }
7277 ]
7278 );
7279 });
7280 }
7281
7282 #[gpui::test]
7283 async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
7284 cx.foreground().forbid_parking();
7285
7286 let text = concat!(
7287 "let one = ;\n", //
7288 "let two = \n",
7289 "let three = 3;\n",
7290 );
7291
7292 let fs = FakeFs::new(cx.background());
7293 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
7294
7295 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
7296 let buffer = project
7297 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
7298 .await
7299 .unwrap();
7300
7301 project.update(cx, |project, cx| {
7302 project
7303 .update_buffer_diagnostics(
7304 &buffer,
7305 vec![
7306 DiagnosticEntry {
7307 range: PointUtf16::new(0, 10)..PointUtf16::new(0, 10),
7308 diagnostic: Diagnostic {
7309 severity: DiagnosticSeverity::ERROR,
7310 message: "syntax error 1".to_string(),
7311 ..Default::default()
7312 },
7313 },
7314 DiagnosticEntry {
7315 range: PointUtf16::new(1, 10)..PointUtf16::new(1, 10),
7316 diagnostic: Diagnostic {
7317 severity: DiagnosticSeverity::ERROR,
7318 message: "syntax error 2".to_string(),
7319 ..Default::default()
7320 },
7321 },
7322 ],
7323 None,
7324 cx,
7325 )
7326 .unwrap();
7327 });
7328
7329 // An empty range is extended forward to include the following character.
7330 // At the end of a line, an empty range is extended backward to include
7331 // the preceding character.
7332 buffer.read_with(cx, |buffer, _| {
7333 let chunks = chunks_with_diagnostics(&buffer, 0..buffer.len());
7334 assert_eq!(
7335 chunks
7336 .iter()
7337 .map(|(s, d)| (s.as_str(), *d))
7338 .collect::<Vec<_>>(),
7339 &[
7340 ("let one = ", None),
7341 (";", Some(DiagnosticSeverity::ERROR)),
7342 ("\nlet two =", None),
7343 (" ", Some(DiagnosticSeverity::ERROR)),
7344 ("\nlet three = 3;\n", None)
7345 ]
7346 );
7347 });
7348 }
7349
7350 #[gpui::test]
7351 async fn test_edits_from_lsp_with_past_version(cx: &mut gpui::TestAppContext) {
7352 cx.foreground().forbid_parking();
7353
7354 let mut language = Language::new(
7355 LanguageConfig {
7356 name: "Rust".into(),
7357 path_suffixes: vec!["rs".to_string()],
7358 ..Default::default()
7359 },
7360 Some(tree_sitter_rust::language()),
7361 );
7362 let mut fake_servers = language.set_fake_lsp_adapter(Default::default());
7363
7364 let text = "
7365 fn a() {
7366 f1();
7367 }
7368 fn b() {
7369 f2();
7370 }
7371 fn c() {
7372 f3();
7373 }
7374 "
7375 .unindent();
7376
7377 let fs = FakeFs::new(cx.background());
7378 fs.insert_tree(
7379 "/dir",
7380 json!({
7381 "a.rs": text.clone(),
7382 }),
7383 )
7384 .await;
7385
7386 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
7387 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
7388 let buffer = project
7389 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
7390 .await
7391 .unwrap();
7392
7393 let mut fake_server = fake_servers.next().await.unwrap();
7394 let lsp_document_version = fake_server
7395 .receive_notification::<lsp::notification::DidOpenTextDocument>()
7396 .await
7397 .text_document
7398 .version;
7399
7400 // Simulate editing the buffer after the language server computes some edits.
7401 buffer.update(cx, |buffer, cx| {
7402 buffer.edit(
7403 [(
7404 Point::new(0, 0)..Point::new(0, 0),
7405 "// above first function\n",
7406 )],
7407 cx,
7408 );
7409 buffer.edit(
7410 [(
7411 Point::new(2, 0)..Point::new(2, 0),
7412 " // inside first function\n",
7413 )],
7414 cx,
7415 );
7416 buffer.edit(
7417 [(
7418 Point::new(6, 4)..Point::new(6, 4),
7419 "// inside second function ",
7420 )],
7421 cx,
7422 );
7423
7424 assert_eq!(
7425 buffer.text(),
7426 "
7427 // above first function
7428 fn a() {
7429 // inside first function
7430 f1();
7431 }
7432 fn b() {
7433 // inside second function f2();
7434 }
7435 fn c() {
7436 f3();
7437 }
7438 "
7439 .unindent()
7440 );
7441 });
7442
7443 let edits = project
7444 .update(cx, |project, cx| {
7445 project.edits_from_lsp(
7446 &buffer,
7447 vec![
7448 // replace body of first function
7449 lsp::TextEdit {
7450 range: lsp::Range::new(
7451 lsp::Position::new(0, 0),
7452 lsp::Position::new(3, 0),
7453 ),
7454 new_text: "
7455 fn a() {
7456 f10();
7457 }
7458 "
7459 .unindent(),
7460 },
7461 // edit inside second function
7462 lsp::TextEdit {
7463 range: lsp::Range::new(
7464 lsp::Position::new(4, 6),
7465 lsp::Position::new(4, 6),
7466 ),
7467 new_text: "00".into(),
7468 },
7469 // edit inside third function via two distinct edits
7470 lsp::TextEdit {
7471 range: lsp::Range::new(
7472 lsp::Position::new(7, 5),
7473 lsp::Position::new(7, 5),
7474 ),
7475 new_text: "4000".into(),
7476 },
7477 lsp::TextEdit {
7478 range: lsp::Range::new(
7479 lsp::Position::new(7, 5),
7480 lsp::Position::new(7, 6),
7481 ),
7482 new_text: "".into(),
7483 },
7484 ],
7485 Some(lsp_document_version),
7486 cx,
7487 )
7488 })
7489 .await
7490 .unwrap();
7491
7492 buffer.update(cx, |buffer, cx| {
7493 for (range, new_text) in edits {
7494 buffer.edit([(range, new_text)], cx);
7495 }
7496 assert_eq!(
7497 buffer.text(),
7498 "
7499 // above first function
7500 fn a() {
7501 // inside first function
7502 f10();
7503 }
7504 fn b() {
7505 // inside second function f200();
7506 }
7507 fn c() {
7508 f4000();
7509 }
7510 "
7511 .unindent()
7512 );
7513 });
7514 }
7515
7516 #[gpui::test]
7517 async fn test_edits_from_lsp_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
7518 cx.foreground().forbid_parking();
7519
7520 let text = "
7521 use a::b;
7522 use a::c;
7523
7524 fn f() {
7525 b();
7526 c();
7527 }
7528 "
7529 .unindent();
7530
7531 let fs = FakeFs::new(cx.background());
7532 fs.insert_tree(
7533 "/dir",
7534 json!({
7535 "a.rs": text.clone(),
7536 }),
7537 )
7538 .await;
7539
7540 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
7541 let buffer = project
7542 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
7543 .await
7544 .unwrap();
7545
7546 // Simulate the language server sending us a small edit in the form of a very large diff.
7547 // Rust-analyzer does this when performing a merge-imports code action.
7548 let edits = project
7549 .update(cx, |project, cx| {
7550 project.edits_from_lsp(
7551 &buffer,
7552 [
7553 // Replace the first use statement without editing the semicolon.
7554 lsp::TextEdit {
7555 range: lsp::Range::new(
7556 lsp::Position::new(0, 4),
7557 lsp::Position::new(0, 8),
7558 ),
7559 new_text: "a::{b, c}".into(),
7560 },
7561 // Reinsert the remainder of the file between the semicolon and the final
7562 // newline of the file.
7563 lsp::TextEdit {
7564 range: lsp::Range::new(
7565 lsp::Position::new(0, 9),
7566 lsp::Position::new(0, 9),
7567 ),
7568 new_text: "\n\n".into(),
7569 },
7570 lsp::TextEdit {
7571 range: lsp::Range::new(
7572 lsp::Position::new(0, 9),
7573 lsp::Position::new(0, 9),
7574 ),
7575 new_text: "
7576 fn f() {
7577 b();
7578 c();
7579 }"
7580 .unindent(),
7581 },
7582 // Delete everything after the first newline of the file.
7583 lsp::TextEdit {
7584 range: lsp::Range::new(
7585 lsp::Position::new(1, 0),
7586 lsp::Position::new(7, 0),
7587 ),
7588 new_text: "".into(),
7589 },
7590 ],
7591 None,
7592 cx,
7593 )
7594 })
7595 .await
7596 .unwrap();
7597
7598 buffer.update(cx, |buffer, cx| {
7599 let edits = edits
7600 .into_iter()
7601 .map(|(range, text)| {
7602 (
7603 range.start.to_point(&buffer)..range.end.to_point(&buffer),
7604 text,
7605 )
7606 })
7607 .collect::<Vec<_>>();
7608
7609 assert_eq!(
7610 edits,
7611 [
7612 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
7613 (Point::new(1, 0)..Point::new(2, 0), "".into())
7614 ]
7615 );
7616
7617 for (range, new_text) in edits {
7618 buffer.edit([(range, new_text)], cx);
7619 }
7620 assert_eq!(
7621 buffer.text(),
7622 "
7623 use a::{b, c};
7624
7625 fn f() {
7626 b();
7627 c();
7628 }
7629 "
7630 .unindent()
7631 );
7632 });
7633 }
7634
7635 #[gpui::test]
7636 async fn test_invalid_edits_from_lsp(cx: &mut gpui::TestAppContext) {
7637 cx.foreground().forbid_parking();
7638
7639 let text = "
7640 use a::b;
7641 use a::c;
7642
7643 fn f() {
7644 b();
7645 c();
7646 }
7647 "
7648 .unindent();
7649
7650 let fs = FakeFs::new(cx.background());
7651 fs.insert_tree(
7652 "/dir",
7653 json!({
7654 "a.rs": text.clone(),
7655 }),
7656 )
7657 .await;
7658
7659 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
7660 let buffer = project
7661 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
7662 .await
7663 .unwrap();
7664
7665 // Simulate the language server sending us edits in a non-ordered fashion,
7666 // with ranges sometimes being inverted.
7667 let edits = project
7668 .update(cx, |project, cx| {
7669 project.edits_from_lsp(
7670 &buffer,
7671 [
7672 lsp::TextEdit {
7673 range: lsp::Range::new(
7674 lsp::Position::new(0, 9),
7675 lsp::Position::new(0, 9),
7676 ),
7677 new_text: "\n\n".into(),
7678 },
7679 lsp::TextEdit {
7680 range: lsp::Range::new(
7681 lsp::Position::new(0, 8),
7682 lsp::Position::new(0, 4),
7683 ),
7684 new_text: "a::{b, c}".into(),
7685 },
7686 lsp::TextEdit {
7687 range: lsp::Range::new(
7688 lsp::Position::new(1, 0),
7689 lsp::Position::new(7, 0),
7690 ),
7691 new_text: "".into(),
7692 },
7693 lsp::TextEdit {
7694 range: lsp::Range::new(
7695 lsp::Position::new(0, 9),
7696 lsp::Position::new(0, 9),
7697 ),
7698 new_text: "
7699 fn f() {
7700 b();
7701 c();
7702 }"
7703 .unindent(),
7704 },
7705 ],
7706 None,
7707 cx,
7708 )
7709 })
7710 .await
7711 .unwrap();
7712
7713 buffer.update(cx, |buffer, cx| {
7714 let edits = edits
7715 .into_iter()
7716 .map(|(range, text)| {
7717 (
7718 range.start.to_point(&buffer)..range.end.to_point(&buffer),
7719 text,
7720 )
7721 })
7722 .collect::<Vec<_>>();
7723
7724 assert_eq!(
7725 edits,
7726 [
7727 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
7728 (Point::new(1, 0)..Point::new(2, 0), "".into())
7729 ]
7730 );
7731
7732 for (range, new_text) in edits {
7733 buffer.edit([(range, new_text)], cx);
7734 }
7735 assert_eq!(
7736 buffer.text(),
7737 "
7738 use a::{b, c};
7739
7740 fn f() {
7741 b();
7742 c();
7743 }
7744 "
7745 .unindent()
7746 );
7747 });
7748 }
7749
7750 fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
7751 buffer: &Buffer,
7752 range: Range<T>,
7753 ) -> Vec<(String, Option<DiagnosticSeverity>)> {
7754 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
7755 for chunk in buffer.snapshot().chunks(range, true) {
7756 if chunks.last().map_or(false, |prev_chunk| {
7757 prev_chunk.1 == chunk.diagnostic_severity
7758 }) {
7759 chunks.last_mut().unwrap().0.push_str(chunk.text);
7760 } else {
7761 chunks.push((chunk.text.to_string(), chunk.diagnostic_severity));
7762 }
7763 }
7764 chunks
7765 }
7766
7767 #[gpui::test]
7768 async fn test_search_worktree_without_files(cx: &mut gpui::TestAppContext) {
7769 let dir = temp_tree(json!({
7770 "root": {
7771 "dir1": {},
7772 "dir2": {
7773 "dir3": {}
7774 }
7775 }
7776 }));
7777
7778 let project = Project::test(Arc::new(RealFs), [dir.path()], cx).await;
7779 let cancel_flag = Default::default();
7780 let results = project
7781 .read_with(cx, |project, cx| {
7782 project.match_paths("dir", false, false, 10, &cancel_flag, cx)
7783 })
7784 .await;
7785
7786 assert!(results.is_empty());
7787 }
7788
7789 #[gpui::test(iterations = 10)]
7790 async fn test_definition(cx: &mut gpui::TestAppContext) {
7791 let mut language = Language::new(
7792 LanguageConfig {
7793 name: "Rust".into(),
7794 path_suffixes: vec!["rs".to_string()],
7795 ..Default::default()
7796 },
7797 Some(tree_sitter_rust::language()),
7798 );
7799 let mut fake_servers = language.set_fake_lsp_adapter(Default::default());
7800
7801 let fs = FakeFs::new(cx.background());
7802 fs.insert_tree(
7803 "/dir",
7804 json!({
7805 "a.rs": "const fn a() { A }",
7806 "b.rs": "const y: i32 = crate::a()",
7807 }),
7808 )
7809 .await;
7810
7811 let project = Project::test(fs, ["/dir/b.rs".as_ref()], cx).await;
7812 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
7813
7814 let buffer = project
7815 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
7816 .await
7817 .unwrap();
7818
7819 let fake_server = fake_servers.next().await.unwrap();
7820 fake_server.handle_request::<lsp::request::GotoDefinition, _, _>(|params, _| async move {
7821 let params = params.text_document_position_params;
7822 assert_eq!(
7823 params.text_document.uri.to_file_path().unwrap(),
7824 Path::new("/dir/b.rs"),
7825 );
7826 assert_eq!(params.position, lsp::Position::new(0, 22));
7827
7828 Ok(Some(lsp::GotoDefinitionResponse::Scalar(
7829 lsp::Location::new(
7830 lsp::Url::from_file_path("/dir/a.rs").unwrap(),
7831 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
7832 ),
7833 )))
7834 });
7835
7836 let mut definitions = project
7837 .update(cx, |project, cx| project.definition(&buffer, 22, cx))
7838 .await
7839 .unwrap();
7840
7841 // Assert no new language server started
7842 cx.foreground().run_until_parked();
7843 assert!(fake_servers.try_next().is_err());
7844
7845 assert_eq!(definitions.len(), 1);
7846 let definition = definitions.pop().unwrap();
7847 cx.update(|cx| {
7848 let target_buffer = definition.target.buffer.read(cx);
7849 assert_eq!(
7850 target_buffer
7851 .file()
7852 .unwrap()
7853 .as_local()
7854 .unwrap()
7855 .abs_path(cx),
7856 Path::new("/dir/a.rs"),
7857 );
7858 assert_eq!(definition.target.range.to_offset(target_buffer), 9..10);
7859 assert_eq!(
7860 list_worktrees(&project, cx),
7861 [("/dir/b.rs".as_ref(), true), ("/dir/a.rs".as_ref(), false)]
7862 );
7863
7864 drop(definition);
7865 });
7866 cx.read(|cx| {
7867 assert_eq!(list_worktrees(&project, cx), [("/dir/b.rs".as_ref(), true)]);
7868 });
7869
7870 fn list_worktrees<'a>(
7871 project: &'a ModelHandle<Project>,
7872 cx: &'a AppContext,
7873 ) -> Vec<(&'a Path, bool)> {
7874 project
7875 .read(cx)
7876 .worktrees(cx)
7877 .map(|worktree| {
7878 let worktree = worktree.read(cx);
7879 (
7880 worktree.as_local().unwrap().abs_path().as_ref(),
7881 worktree.is_visible(),
7882 )
7883 })
7884 .collect::<Vec<_>>()
7885 }
7886 }
7887
7888 #[gpui::test]
7889 async fn test_completions_without_edit_ranges(cx: &mut gpui::TestAppContext) {
7890 let mut language = Language::new(
7891 LanguageConfig {
7892 name: "TypeScript".into(),
7893 path_suffixes: vec!["ts".to_string()],
7894 ..Default::default()
7895 },
7896 Some(tree_sitter_typescript::language_typescript()),
7897 );
7898 let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default());
7899
7900 let fs = FakeFs::new(cx.background());
7901 fs.insert_tree(
7902 "/dir",
7903 json!({
7904 "a.ts": "",
7905 }),
7906 )
7907 .await;
7908
7909 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
7910 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
7911 let buffer = project
7912 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
7913 .await
7914 .unwrap();
7915
7916 let fake_server = fake_language_servers.next().await.unwrap();
7917
7918 let text = "let a = b.fqn";
7919 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
7920 let completions = project.update(cx, |project, cx| {
7921 project.completions(&buffer, text.len(), cx)
7922 });
7923
7924 fake_server
7925 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
7926 Ok(Some(lsp::CompletionResponse::Array(vec![
7927 lsp::CompletionItem {
7928 label: "fullyQualifiedName?".into(),
7929 insert_text: Some("fullyQualifiedName".into()),
7930 ..Default::default()
7931 },
7932 ])))
7933 })
7934 .next()
7935 .await;
7936 let completions = completions.await.unwrap();
7937 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
7938 assert_eq!(completions.len(), 1);
7939 assert_eq!(completions[0].new_text, "fullyQualifiedName");
7940 assert_eq!(
7941 completions[0].old_range.to_offset(&snapshot),
7942 text.len() - 3..text.len()
7943 );
7944
7945 let text = "let a = \"atoms/cmp\"";
7946 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
7947 let completions = project.update(cx, |project, cx| {
7948 project.completions(&buffer, text.len() - 1, cx)
7949 });
7950
7951 fake_server
7952 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
7953 Ok(Some(lsp::CompletionResponse::Array(vec![
7954 lsp::CompletionItem {
7955 label: "component".into(),
7956 ..Default::default()
7957 },
7958 ])))
7959 })
7960 .next()
7961 .await;
7962 let completions = completions.await.unwrap();
7963 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
7964 assert_eq!(completions.len(), 1);
7965 assert_eq!(completions[0].new_text, "component");
7966 assert_eq!(
7967 completions[0].old_range.to_offset(&snapshot),
7968 text.len() - 4..text.len() - 1
7969 );
7970 }
7971
7972 #[gpui::test(iterations = 10)]
7973 async fn test_apply_code_actions_with_commands(cx: &mut gpui::TestAppContext) {
7974 let mut language = Language::new(
7975 LanguageConfig {
7976 name: "TypeScript".into(),
7977 path_suffixes: vec!["ts".to_string()],
7978 ..Default::default()
7979 },
7980 None,
7981 );
7982 let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default());
7983
7984 let fs = FakeFs::new(cx.background());
7985 fs.insert_tree(
7986 "/dir",
7987 json!({
7988 "a.ts": "a",
7989 }),
7990 )
7991 .await;
7992
7993 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
7994 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
7995 let buffer = project
7996 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
7997 .await
7998 .unwrap();
7999
8000 let fake_server = fake_language_servers.next().await.unwrap();
8001
8002 // Language server returns code actions that contain commands, and not edits.
8003 let actions = project.update(cx, |project, cx| project.code_actions(&buffer, 0..0, cx));
8004 fake_server
8005 .handle_request::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
8006 Ok(Some(vec![
8007 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
8008 title: "The code action".into(),
8009 command: Some(lsp::Command {
8010 title: "The command".into(),
8011 command: "_the/command".into(),
8012 arguments: Some(vec![json!("the-argument")]),
8013 }),
8014 ..Default::default()
8015 }),
8016 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
8017 title: "two".into(),
8018 ..Default::default()
8019 }),
8020 ]))
8021 })
8022 .next()
8023 .await;
8024
8025 let action = actions.await.unwrap()[0].clone();
8026 let apply = project.update(cx, |project, cx| {
8027 project.apply_code_action(buffer.clone(), action, true, cx)
8028 });
8029
8030 // Resolving the code action does not populate its edits. In absence of
8031 // edits, we must execute the given command.
8032 fake_server.handle_request::<lsp::request::CodeActionResolveRequest, _, _>(
8033 |action, _| async move { Ok(action) },
8034 );
8035
8036 // While executing the command, the language server sends the editor
8037 // a `workspaceEdit` request.
8038 fake_server
8039 .handle_request::<lsp::request::ExecuteCommand, _, _>({
8040 let fake = fake_server.clone();
8041 move |params, _| {
8042 assert_eq!(params.command, "_the/command");
8043 let fake = fake.clone();
8044 async move {
8045 fake.server
8046 .request::<lsp::request::ApplyWorkspaceEdit>(
8047 lsp::ApplyWorkspaceEditParams {
8048 label: None,
8049 edit: lsp::WorkspaceEdit {
8050 changes: Some(
8051 [(
8052 lsp::Url::from_file_path("/dir/a.ts").unwrap(),
8053 vec![lsp::TextEdit {
8054 range: lsp::Range::new(
8055 lsp::Position::new(0, 0),
8056 lsp::Position::new(0, 0),
8057 ),
8058 new_text: "X".into(),
8059 }],
8060 )]
8061 .into_iter()
8062 .collect(),
8063 ),
8064 ..Default::default()
8065 },
8066 },
8067 )
8068 .await
8069 .unwrap();
8070 Ok(Some(json!(null)))
8071 }
8072 }
8073 })
8074 .next()
8075 .await;
8076
8077 // Applying the code action returns a project transaction containing the edits
8078 // sent by the language server in its `workspaceEdit` request.
8079 let transaction = apply.await.unwrap();
8080 assert!(transaction.0.contains_key(&buffer));
8081 buffer.update(cx, |buffer, cx| {
8082 assert_eq!(buffer.text(), "Xa");
8083 buffer.undo(cx);
8084 assert_eq!(buffer.text(), "a");
8085 });
8086 }
8087
8088 #[gpui::test]
8089 async fn test_save_file(cx: &mut gpui::TestAppContext) {
8090 let fs = FakeFs::new(cx.background());
8091 fs.insert_tree(
8092 "/dir",
8093 json!({
8094 "file1": "the old contents",
8095 }),
8096 )
8097 .await;
8098
8099 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
8100 let buffer = project
8101 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
8102 .await
8103 .unwrap();
8104 buffer
8105 .update(cx, |buffer, cx| {
8106 assert_eq!(buffer.text(), "the old contents");
8107 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], cx);
8108 buffer.save(cx)
8109 })
8110 .await
8111 .unwrap();
8112
8113 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
8114 assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
8115 }
8116
8117 #[gpui::test]
8118 async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
8119 let fs = FakeFs::new(cx.background());
8120 fs.insert_tree(
8121 "/dir",
8122 json!({
8123 "file1": "the old contents",
8124 }),
8125 )
8126 .await;
8127
8128 let project = Project::test(fs.clone(), ["/dir/file1".as_ref()], cx).await;
8129 let buffer = project
8130 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
8131 .await
8132 .unwrap();
8133 buffer
8134 .update(cx, |buffer, cx| {
8135 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], cx);
8136 buffer.save(cx)
8137 })
8138 .await
8139 .unwrap();
8140
8141 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
8142 assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
8143 }
8144
8145 #[gpui::test]
8146 async fn test_save_as(cx: &mut gpui::TestAppContext) {
8147 let fs = FakeFs::new(cx.background());
8148 fs.insert_tree("/dir", json!({})).await;
8149
8150 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
8151 let buffer = project.update(cx, |project, cx| {
8152 project.create_buffer("", None, cx).unwrap()
8153 });
8154 buffer.update(cx, |buffer, cx| {
8155 buffer.edit([(0..0, "abc")], cx);
8156 assert!(buffer.is_dirty());
8157 assert!(!buffer.has_conflict());
8158 });
8159 project
8160 .update(cx, |project, cx| {
8161 project.save_buffer_as(buffer.clone(), "/dir/file1".into(), cx)
8162 })
8163 .await
8164 .unwrap();
8165 assert_eq!(fs.load(Path::new("/dir/file1")).await.unwrap(), "abc");
8166 buffer.read_with(cx, |buffer, cx| {
8167 assert_eq!(buffer.file().unwrap().full_path(cx), Path::new("dir/file1"));
8168 assert!(!buffer.is_dirty());
8169 assert!(!buffer.has_conflict());
8170 });
8171
8172 let opened_buffer = project
8173 .update(cx, |project, cx| {
8174 project.open_local_buffer("/dir/file1", cx)
8175 })
8176 .await
8177 .unwrap();
8178 assert_eq!(opened_buffer, buffer);
8179 }
8180
8181 #[gpui::test(retries = 5)]
8182 async fn test_rescan_and_remote_updates(
8183 deterministic: Arc<Deterministic>,
8184 cx: &mut gpui::TestAppContext,
8185 ) {
8186 let dir = temp_tree(json!({
8187 "a": {
8188 "file1": "",
8189 "file2": "",
8190 "file3": "",
8191 },
8192 "b": {
8193 "c": {
8194 "file4": "",
8195 "file5": "",
8196 }
8197 }
8198 }));
8199
8200 let project = Project::test(Arc::new(RealFs), [dir.path()], cx).await;
8201 let rpc = project.read_with(cx, |p, _| p.client.clone());
8202
8203 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
8204 let buffer = project.update(cx, |p, cx| p.open_local_buffer(dir.path().join(path), cx));
8205 async move { buffer.await.unwrap() }
8206 };
8207 let id_for_path = |path: &'static str, cx: &gpui::TestAppContext| {
8208 project.read_with(cx, |project, cx| {
8209 let tree = project.worktrees(cx).next().unwrap();
8210 tree.read(cx)
8211 .entry_for_path(path)
8212 .expect(&format!("no entry for path {}", path))
8213 .id
8214 })
8215 };
8216
8217 let buffer2 = buffer_for_path("a/file2", cx).await;
8218 let buffer3 = buffer_for_path("a/file3", cx).await;
8219 let buffer4 = buffer_for_path("b/c/file4", cx).await;
8220 let buffer5 = buffer_for_path("b/c/file5", cx).await;
8221
8222 let file2_id = id_for_path("a/file2", &cx);
8223 let file3_id = id_for_path("a/file3", &cx);
8224 let file4_id = id_for_path("b/c/file4", &cx);
8225
8226 // Create a remote copy of this worktree.
8227 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8228 let initial_snapshot = tree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot());
8229 let remote = cx.update(|cx| {
8230 Worktree::remote(
8231 1,
8232 1,
8233 proto::WorktreeMetadata {
8234 id: initial_snapshot.id().to_proto(),
8235 root_name: initial_snapshot.root_name().into(),
8236 visible: true,
8237 },
8238 rpc.clone(),
8239 cx,
8240 )
8241 });
8242 remote.update(cx, |remote, _| {
8243 let update = initial_snapshot.build_initial_update(1);
8244 remote.as_remote_mut().unwrap().update_from_remote(update);
8245 });
8246 deterministic.run_until_parked();
8247
8248 cx.read(|cx| {
8249 assert!(!buffer2.read(cx).is_dirty());
8250 assert!(!buffer3.read(cx).is_dirty());
8251 assert!(!buffer4.read(cx).is_dirty());
8252 assert!(!buffer5.read(cx).is_dirty());
8253 });
8254
8255 // Rename and delete files and directories.
8256 tree.flush_fs_events(&cx).await;
8257 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
8258 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
8259 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
8260 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
8261 tree.flush_fs_events(&cx).await;
8262
8263 let expected_paths = vec![
8264 "a",
8265 "a/file1",
8266 "a/file2.new",
8267 "b",
8268 "d",
8269 "d/file3",
8270 "d/file4",
8271 ];
8272
8273 cx.read(|app| {
8274 assert_eq!(
8275 tree.read(app)
8276 .paths()
8277 .map(|p| p.to_str().unwrap())
8278 .collect::<Vec<_>>(),
8279 expected_paths
8280 );
8281
8282 assert_eq!(id_for_path("a/file2.new", &cx), file2_id);
8283 assert_eq!(id_for_path("d/file3", &cx), file3_id);
8284 assert_eq!(id_for_path("d/file4", &cx), file4_id);
8285
8286 assert_eq!(
8287 buffer2.read(app).file().unwrap().path().as_ref(),
8288 Path::new("a/file2.new")
8289 );
8290 assert_eq!(
8291 buffer3.read(app).file().unwrap().path().as_ref(),
8292 Path::new("d/file3")
8293 );
8294 assert_eq!(
8295 buffer4.read(app).file().unwrap().path().as_ref(),
8296 Path::new("d/file4")
8297 );
8298 assert_eq!(
8299 buffer5.read(app).file().unwrap().path().as_ref(),
8300 Path::new("b/c/file5")
8301 );
8302
8303 assert!(!buffer2.read(app).file().unwrap().is_deleted());
8304 assert!(!buffer3.read(app).file().unwrap().is_deleted());
8305 assert!(!buffer4.read(app).file().unwrap().is_deleted());
8306 assert!(buffer5.read(app).file().unwrap().is_deleted());
8307 });
8308
8309 // Update the remote worktree. Check that it becomes consistent with the
8310 // local worktree.
8311 remote.update(cx, |remote, cx| {
8312 let update = tree.read(cx).as_local().unwrap().snapshot().build_update(
8313 &initial_snapshot,
8314 1,
8315 1,
8316 true,
8317 );
8318 remote.as_remote_mut().unwrap().update_from_remote(update);
8319 });
8320 deterministic.run_until_parked();
8321 remote.read_with(cx, |remote, _| {
8322 assert_eq!(
8323 remote
8324 .paths()
8325 .map(|p| p.to_str().unwrap())
8326 .collect::<Vec<_>>(),
8327 expected_paths
8328 );
8329 });
8330 }
8331
8332 #[gpui::test]
8333 async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
8334 let fs = FakeFs::new(cx.background());
8335 fs.insert_tree(
8336 "/dir",
8337 json!({
8338 "a.txt": "a-contents",
8339 "b.txt": "b-contents",
8340 }),
8341 )
8342 .await;
8343
8344 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
8345
8346 // Spawn multiple tasks to open paths, repeating some paths.
8347 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
8348 (
8349 p.open_local_buffer("/dir/a.txt", cx),
8350 p.open_local_buffer("/dir/b.txt", cx),
8351 p.open_local_buffer("/dir/a.txt", cx),
8352 )
8353 });
8354
8355 let buffer_a_1 = buffer_a_1.await.unwrap();
8356 let buffer_a_2 = buffer_a_2.await.unwrap();
8357 let buffer_b = buffer_b.await.unwrap();
8358 assert_eq!(buffer_a_1.read_with(cx, |b, _| b.text()), "a-contents");
8359 assert_eq!(buffer_b.read_with(cx, |b, _| b.text()), "b-contents");
8360
8361 // There is only one buffer per path.
8362 let buffer_a_id = buffer_a_1.id();
8363 assert_eq!(buffer_a_2.id(), buffer_a_id);
8364
8365 // Open the same path again while it is still open.
8366 drop(buffer_a_1);
8367 let buffer_a_3 = project
8368 .update(cx, |p, cx| p.open_local_buffer("/dir/a.txt", cx))
8369 .await
8370 .unwrap();
8371
8372 // There's still only one buffer per path.
8373 assert_eq!(buffer_a_3.id(), buffer_a_id);
8374 }
8375
8376 #[gpui::test]
8377 async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
8378 let fs = FakeFs::new(cx.background());
8379 fs.insert_tree(
8380 "/dir",
8381 json!({
8382 "file1": "abc",
8383 "file2": "def",
8384 "file3": "ghi",
8385 }),
8386 )
8387 .await;
8388
8389 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
8390
8391 let buffer1 = project
8392 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
8393 .await
8394 .unwrap();
8395 let events = Rc::new(RefCell::new(Vec::new()));
8396
8397 // initially, the buffer isn't dirty.
8398 buffer1.update(cx, |buffer, cx| {
8399 cx.subscribe(&buffer1, {
8400 let events = events.clone();
8401 move |_, _, event, _| match event {
8402 BufferEvent::Operation(_) => {}
8403 _ => events.borrow_mut().push(event.clone()),
8404 }
8405 })
8406 .detach();
8407
8408 assert!(!buffer.is_dirty());
8409 assert!(events.borrow().is_empty());
8410
8411 buffer.edit([(1..2, "")], cx);
8412 });
8413
8414 // after the first edit, the buffer is dirty, and emits a dirtied event.
8415 buffer1.update(cx, |buffer, cx| {
8416 assert!(buffer.text() == "ac");
8417 assert!(buffer.is_dirty());
8418 assert_eq!(
8419 *events.borrow(),
8420 &[language::Event::Edited, language::Event::DirtyChanged]
8421 );
8422 events.borrow_mut().clear();
8423 buffer.did_save(
8424 buffer.version(),
8425 buffer.as_rope().fingerprint(),
8426 buffer.file().unwrap().mtime(),
8427 None,
8428 cx,
8429 );
8430 });
8431
8432 // after saving, the buffer is not dirty, and emits a saved event.
8433 buffer1.update(cx, |buffer, cx| {
8434 assert!(!buffer.is_dirty());
8435 assert_eq!(*events.borrow(), &[language::Event::Saved]);
8436 events.borrow_mut().clear();
8437
8438 buffer.edit([(1..1, "B")], cx);
8439 buffer.edit([(2..2, "D")], cx);
8440 });
8441
8442 // after editing again, the buffer is dirty, and emits another dirty event.
8443 buffer1.update(cx, |buffer, cx| {
8444 assert!(buffer.text() == "aBDc");
8445 assert!(buffer.is_dirty());
8446 assert_eq!(
8447 *events.borrow(),
8448 &[
8449 language::Event::Edited,
8450 language::Event::DirtyChanged,
8451 language::Event::Edited,
8452 ],
8453 );
8454 events.borrow_mut().clear();
8455
8456 // After restoring the buffer to its previously-saved state,
8457 // the buffer is not considered dirty anymore.
8458 buffer.edit([(1..3, "")], cx);
8459 assert!(buffer.text() == "ac");
8460 assert!(!buffer.is_dirty());
8461 });
8462
8463 assert_eq!(
8464 *events.borrow(),
8465 &[language::Event::Edited, language::Event::DirtyChanged]
8466 );
8467
8468 // When a file is deleted, the buffer is considered dirty.
8469 let events = Rc::new(RefCell::new(Vec::new()));
8470 let buffer2 = project
8471 .update(cx, |p, cx| p.open_local_buffer("/dir/file2", cx))
8472 .await
8473 .unwrap();
8474 buffer2.update(cx, |_, cx| {
8475 cx.subscribe(&buffer2, {
8476 let events = events.clone();
8477 move |_, _, event, _| events.borrow_mut().push(event.clone())
8478 })
8479 .detach();
8480 });
8481
8482 fs.remove_file("/dir/file2".as_ref(), Default::default())
8483 .await
8484 .unwrap();
8485 buffer2.condition(&cx, |b, _| b.is_dirty()).await;
8486 assert_eq!(
8487 *events.borrow(),
8488 &[
8489 language::Event::DirtyChanged,
8490 language::Event::FileHandleChanged
8491 ]
8492 );
8493
8494 // When a file is already dirty when deleted, we don't emit a Dirtied event.
8495 let events = Rc::new(RefCell::new(Vec::new()));
8496 let buffer3 = project
8497 .update(cx, |p, cx| p.open_local_buffer("/dir/file3", cx))
8498 .await
8499 .unwrap();
8500 buffer3.update(cx, |_, cx| {
8501 cx.subscribe(&buffer3, {
8502 let events = events.clone();
8503 move |_, _, event, _| events.borrow_mut().push(event.clone())
8504 })
8505 .detach();
8506 });
8507
8508 buffer3.update(cx, |buffer, cx| {
8509 buffer.edit([(0..0, "x")], cx);
8510 });
8511 events.borrow_mut().clear();
8512 fs.remove_file("/dir/file3".as_ref(), Default::default())
8513 .await
8514 .unwrap();
8515 buffer3
8516 .condition(&cx, |_, _| !events.borrow().is_empty())
8517 .await;
8518 assert_eq!(*events.borrow(), &[language::Event::FileHandleChanged]);
8519 cx.read(|cx| assert!(buffer3.read(cx).is_dirty()));
8520 }
8521
8522 #[gpui::test]
8523 async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
8524 let initial_contents = "aaa\nbbbbb\nc\n";
8525 let fs = FakeFs::new(cx.background());
8526 fs.insert_tree(
8527 "/dir",
8528 json!({
8529 "the-file": initial_contents,
8530 }),
8531 )
8532 .await;
8533 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
8534 let buffer = project
8535 .update(cx, |p, cx| p.open_local_buffer("/dir/the-file", cx))
8536 .await
8537 .unwrap();
8538
8539 let anchors = (0..3)
8540 .map(|row| buffer.read_with(cx, |b, _| b.anchor_before(Point::new(row, 1))))
8541 .collect::<Vec<_>>();
8542
8543 // Change the file on disk, adding two new lines of text, and removing
8544 // one line.
8545 buffer.read_with(cx, |buffer, _| {
8546 assert!(!buffer.is_dirty());
8547 assert!(!buffer.has_conflict());
8548 });
8549 let new_contents = "AAAA\naaa\nBB\nbbbbb\n";
8550 fs.save("/dir/the-file".as_ref(), &new_contents.into())
8551 .await
8552 .unwrap();
8553
8554 // Because the buffer was not modified, it is reloaded from disk. Its
8555 // contents are edited according to the diff between the old and new
8556 // file contents.
8557 buffer
8558 .condition(&cx, |buffer, _| buffer.text() == new_contents)
8559 .await;
8560
8561 buffer.update(cx, |buffer, _| {
8562 assert_eq!(buffer.text(), new_contents);
8563 assert!(!buffer.is_dirty());
8564 assert!(!buffer.has_conflict());
8565
8566 let anchor_positions = anchors
8567 .iter()
8568 .map(|anchor| anchor.to_point(&*buffer))
8569 .collect::<Vec<_>>();
8570 assert_eq!(
8571 anchor_positions,
8572 [Point::new(1, 1), Point::new(3, 1), Point::new(4, 0)]
8573 );
8574 });
8575
8576 // Modify the buffer
8577 buffer.update(cx, |buffer, cx| {
8578 buffer.edit([(0..0, " ")], cx);
8579 assert!(buffer.is_dirty());
8580 assert!(!buffer.has_conflict());
8581 });
8582
8583 // Change the file on disk again, adding blank lines to the beginning.
8584 fs.save(
8585 "/dir/the-file".as_ref(),
8586 &"\n\n\nAAAA\naaa\nBB\nbbbbb\n".into(),
8587 )
8588 .await
8589 .unwrap();
8590
8591 // Because the buffer is modified, it doesn't reload from disk, but is
8592 // marked as having a conflict.
8593 buffer
8594 .condition(&cx, |buffer, _| buffer.has_conflict())
8595 .await;
8596 }
8597
8598 #[gpui::test]
8599 async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
8600 cx.foreground().forbid_parking();
8601
8602 let fs = FakeFs::new(cx.background());
8603 fs.insert_tree(
8604 "/the-dir",
8605 json!({
8606 "a.rs": "
8607 fn foo(mut v: Vec<usize>) {
8608 for x in &v {
8609 v.push(1);
8610 }
8611 }
8612 "
8613 .unindent(),
8614 }),
8615 )
8616 .await;
8617
8618 let project = Project::test(fs.clone(), ["/the-dir".as_ref()], cx).await;
8619 let buffer = project
8620 .update(cx, |p, cx| p.open_local_buffer("/the-dir/a.rs", cx))
8621 .await
8622 .unwrap();
8623
8624 let buffer_uri = Url::from_file_path("/the-dir/a.rs").unwrap();
8625 let message = lsp::PublishDiagnosticsParams {
8626 uri: buffer_uri.clone(),
8627 diagnostics: vec![
8628 lsp::Diagnostic {
8629 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
8630 severity: Some(DiagnosticSeverity::WARNING),
8631 message: "error 1".to_string(),
8632 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
8633 location: lsp::Location {
8634 uri: buffer_uri.clone(),
8635 range: lsp::Range::new(
8636 lsp::Position::new(1, 8),
8637 lsp::Position::new(1, 9),
8638 ),
8639 },
8640 message: "error 1 hint 1".to_string(),
8641 }]),
8642 ..Default::default()
8643 },
8644 lsp::Diagnostic {
8645 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
8646 severity: Some(DiagnosticSeverity::HINT),
8647 message: "error 1 hint 1".to_string(),
8648 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
8649 location: lsp::Location {
8650 uri: buffer_uri.clone(),
8651 range: lsp::Range::new(
8652 lsp::Position::new(1, 8),
8653 lsp::Position::new(1, 9),
8654 ),
8655 },
8656 message: "original diagnostic".to_string(),
8657 }]),
8658 ..Default::default()
8659 },
8660 lsp::Diagnostic {
8661 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
8662 severity: Some(DiagnosticSeverity::ERROR),
8663 message: "error 2".to_string(),
8664 related_information: Some(vec![
8665 lsp::DiagnosticRelatedInformation {
8666 location: lsp::Location {
8667 uri: buffer_uri.clone(),
8668 range: lsp::Range::new(
8669 lsp::Position::new(1, 13),
8670 lsp::Position::new(1, 15),
8671 ),
8672 },
8673 message: "error 2 hint 1".to_string(),
8674 },
8675 lsp::DiagnosticRelatedInformation {
8676 location: lsp::Location {
8677 uri: buffer_uri.clone(),
8678 range: lsp::Range::new(
8679 lsp::Position::new(1, 13),
8680 lsp::Position::new(1, 15),
8681 ),
8682 },
8683 message: "error 2 hint 2".to_string(),
8684 },
8685 ]),
8686 ..Default::default()
8687 },
8688 lsp::Diagnostic {
8689 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
8690 severity: Some(DiagnosticSeverity::HINT),
8691 message: "error 2 hint 1".to_string(),
8692 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
8693 location: lsp::Location {
8694 uri: buffer_uri.clone(),
8695 range: lsp::Range::new(
8696 lsp::Position::new(2, 8),
8697 lsp::Position::new(2, 17),
8698 ),
8699 },
8700 message: "original diagnostic".to_string(),
8701 }]),
8702 ..Default::default()
8703 },
8704 lsp::Diagnostic {
8705 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
8706 severity: Some(DiagnosticSeverity::HINT),
8707 message: "error 2 hint 2".to_string(),
8708 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
8709 location: lsp::Location {
8710 uri: buffer_uri.clone(),
8711 range: lsp::Range::new(
8712 lsp::Position::new(2, 8),
8713 lsp::Position::new(2, 17),
8714 ),
8715 },
8716 message: "original diagnostic".to_string(),
8717 }]),
8718 ..Default::default()
8719 },
8720 ],
8721 version: None,
8722 };
8723
8724 project
8725 .update(cx, |p, cx| p.update_diagnostics(0, message, &[], cx))
8726 .unwrap();
8727 let buffer = buffer.read_with(cx, |buffer, _| buffer.snapshot());
8728
8729 assert_eq!(
8730 buffer
8731 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
8732 .collect::<Vec<_>>(),
8733 &[
8734 DiagnosticEntry {
8735 range: Point::new(1, 8)..Point::new(1, 9),
8736 diagnostic: Diagnostic {
8737 severity: DiagnosticSeverity::WARNING,
8738 message: "error 1".to_string(),
8739 group_id: 0,
8740 is_primary: true,
8741 ..Default::default()
8742 }
8743 },
8744 DiagnosticEntry {
8745 range: Point::new(1, 8)..Point::new(1, 9),
8746 diagnostic: Diagnostic {
8747 severity: DiagnosticSeverity::HINT,
8748 message: "error 1 hint 1".to_string(),
8749 group_id: 0,
8750 is_primary: false,
8751 ..Default::default()
8752 }
8753 },
8754 DiagnosticEntry {
8755 range: Point::new(1, 13)..Point::new(1, 15),
8756 diagnostic: Diagnostic {
8757 severity: DiagnosticSeverity::HINT,
8758 message: "error 2 hint 1".to_string(),
8759 group_id: 1,
8760 is_primary: false,
8761 ..Default::default()
8762 }
8763 },
8764 DiagnosticEntry {
8765 range: Point::new(1, 13)..Point::new(1, 15),
8766 diagnostic: Diagnostic {
8767 severity: DiagnosticSeverity::HINT,
8768 message: "error 2 hint 2".to_string(),
8769 group_id: 1,
8770 is_primary: false,
8771 ..Default::default()
8772 }
8773 },
8774 DiagnosticEntry {
8775 range: Point::new(2, 8)..Point::new(2, 17),
8776 diagnostic: Diagnostic {
8777 severity: DiagnosticSeverity::ERROR,
8778 message: "error 2".to_string(),
8779 group_id: 1,
8780 is_primary: true,
8781 ..Default::default()
8782 }
8783 }
8784 ]
8785 );
8786
8787 assert_eq!(
8788 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
8789 &[
8790 DiagnosticEntry {
8791 range: Point::new(1, 8)..Point::new(1, 9),
8792 diagnostic: Diagnostic {
8793 severity: DiagnosticSeverity::WARNING,
8794 message: "error 1".to_string(),
8795 group_id: 0,
8796 is_primary: true,
8797 ..Default::default()
8798 }
8799 },
8800 DiagnosticEntry {
8801 range: Point::new(1, 8)..Point::new(1, 9),
8802 diagnostic: Diagnostic {
8803 severity: DiagnosticSeverity::HINT,
8804 message: "error 1 hint 1".to_string(),
8805 group_id: 0,
8806 is_primary: false,
8807 ..Default::default()
8808 }
8809 },
8810 ]
8811 );
8812 assert_eq!(
8813 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
8814 &[
8815 DiagnosticEntry {
8816 range: Point::new(1, 13)..Point::new(1, 15),
8817 diagnostic: Diagnostic {
8818 severity: DiagnosticSeverity::HINT,
8819 message: "error 2 hint 1".to_string(),
8820 group_id: 1,
8821 is_primary: false,
8822 ..Default::default()
8823 }
8824 },
8825 DiagnosticEntry {
8826 range: Point::new(1, 13)..Point::new(1, 15),
8827 diagnostic: Diagnostic {
8828 severity: DiagnosticSeverity::HINT,
8829 message: "error 2 hint 2".to_string(),
8830 group_id: 1,
8831 is_primary: false,
8832 ..Default::default()
8833 }
8834 },
8835 DiagnosticEntry {
8836 range: Point::new(2, 8)..Point::new(2, 17),
8837 diagnostic: Diagnostic {
8838 severity: DiagnosticSeverity::ERROR,
8839 message: "error 2".to_string(),
8840 group_id: 1,
8841 is_primary: true,
8842 ..Default::default()
8843 }
8844 }
8845 ]
8846 );
8847 }
8848
8849 #[gpui::test]
8850 async fn test_rename(cx: &mut gpui::TestAppContext) {
8851 cx.foreground().forbid_parking();
8852
8853 let mut language = Language::new(
8854 LanguageConfig {
8855 name: "Rust".into(),
8856 path_suffixes: vec!["rs".to_string()],
8857 ..Default::default()
8858 },
8859 Some(tree_sitter_rust::language()),
8860 );
8861 let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
8862 capabilities: lsp::ServerCapabilities {
8863 rename_provider: Some(lsp::OneOf::Right(lsp::RenameOptions {
8864 prepare_provider: Some(true),
8865 work_done_progress_options: Default::default(),
8866 })),
8867 ..Default::default()
8868 },
8869 ..Default::default()
8870 });
8871
8872 let fs = FakeFs::new(cx.background());
8873 fs.insert_tree(
8874 "/dir",
8875 json!({
8876 "one.rs": "const ONE: usize = 1;",
8877 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
8878 }),
8879 )
8880 .await;
8881
8882 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
8883 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
8884 let buffer = project
8885 .update(cx, |project, cx| {
8886 project.open_local_buffer("/dir/one.rs", cx)
8887 })
8888 .await
8889 .unwrap();
8890
8891 let fake_server = fake_servers.next().await.unwrap();
8892
8893 let response = project.update(cx, |project, cx| {
8894 project.prepare_rename(buffer.clone(), 7, cx)
8895 });
8896 fake_server
8897 .handle_request::<lsp::request::PrepareRenameRequest, _, _>(|params, _| async move {
8898 assert_eq!(params.text_document.uri.as_str(), "file:///dir/one.rs");
8899 assert_eq!(params.position, lsp::Position::new(0, 7));
8900 Ok(Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
8901 lsp::Position::new(0, 6),
8902 lsp::Position::new(0, 9),
8903 ))))
8904 })
8905 .next()
8906 .await
8907 .unwrap();
8908 let range = response.await.unwrap().unwrap();
8909 let range = buffer.read_with(cx, |buffer, _| range.to_offset(buffer));
8910 assert_eq!(range, 6..9);
8911
8912 let response = project.update(cx, |project, cx| {
8913 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), true, cx)
8914 });
8915 fake_server
8916 .handle_request::<lsp::request::Rename, _, _>(|params, _| async move {
8917 assert_eq!(
8918 params.text_document_position.text_document.uri.as_str(),
8919 "file:///dir/one.rs"
8920 );
8921 assert_eq!(
8922 params.text_document_position.position,
8923 lsp::Position::new(0, 7)
8924 );
8925 assert_eq!(params.new_name, "THREE");
8926 Ok(Some(lsp::WorkspaceEdit {
8927 changes: Some(
8928 [
8929 (
8930 lsp::Url::from_file_path("/dir/one.rs").unwrap(),
8931 vec![lsp::TextEdit::new(
8932 lsp::Range::new(
8933 lsp::Position::new(0, 6),
8934 lsp::Position::new(0, 9),
8935 ),
8936 "THREE".to_string(),
8937 )],
8938 ),
8939 (
8940 lsp::Url::from_file_path("/dir/two.rs").unwrap(),
8941 vec![
8942 lsp::TextEdit::new(
8943 lsp::Range::new(
8944 lsp::Position::new(0, 24),
8945 lsp::Position::new(0, 27),
8946 ),
8947 "THREE".to_string(),
8948 ),
8949 lsp::TextEdit::new(
8950 lsp::Range::new(
8951 lsp::Position::new(0, 35),
8952 lsp::Position::new(0, 38),
8953 ),
8954 "THREE".to_string(),
8955 ),
8956 ],
8957 ),
8958 ]
8959 .into_iter()
8960 .collect(),
8961 ),
8962 ..Default::default()
8963 }))
8964 })
8965 .next()
8966 .await
8967 .unwrap();
8968 let mut transaction = response.await.unwrap().0;
8969 assert_eq!(transaction.len(), 2);
8970 assert_eq!(
8971 transaction
8972 .remove_entry(&buffer)
8973 .unwrap()
8974 .0
8975 .read_with(cx, |buffer, _| buffer.text()),
8976 "const THREE: usize = 1;"
8977 );
8978 assert_eq!(
8979 transaction
8980 .into_keys()
8981 .next()
8982 .unwrap()
8983 .read_with(cx, |buffer, _| buffer.text()),
8984 "const TWO: usize = one::THREE + one::THREE;"
8985 );
8986 }
8987
8988 #[gpui::test]
8989 async fn test_search(cx: &mut gpui::TestAppContext) {
8990 let fs = FakeFs::new(cx.background());
8991 fs.insert_tree(
8992 "/dir",
8993 json!({
8994 "one.rs": "const ONE: usize = 1;",
8995 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
8996 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
8997 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
8998 }),
8999 )
9000 .await;
9001 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
9002 assert_eq!(
9003 search(&project, SearchQuery::text("TWO", false, true), cx)
9004 .await
9005 .unwrap(),
9006 HashMap::from_iter([
9007 ("two.rs".to_string(), vec![6..9]),
9008 ("three.rs".to_string(), vec![37..40])
9009 ])
9010 );
9011
9012 let buffer_4 = project
9013 .update(cx, |project, cx| {
9014 project.open_local_buffer("/dir/four.rs", cx)
9015 })
9016 .await
9017 .unwrap();
9018 buffer_4.update(cx, |buffer, cx| {
9019 let text = "two::TWO";
9020 buffer.edit([(20..28, text), (31..43, text)], cx);
9021 });
9022
9023 assert_eq!(
9024 search(&project, SearchQuery::text("TWO", false, true), cx)
9025 .await
9026 .unwrap(),
9027 HashMap::from_iter([
9028 ("two.rs".to_string(), vec![6..9]),
9029 ("three.rs".to_string(), vec![37..40]),
9030 ("four.rs".to_string(), vec![25..28, 36..39])
9031 ])
9032 );
9033
9034 async fn search(
9035 project: &ModelHandle<Project>,
9036 query: SearchQuery,
9037 cx: &mut gpui::TestAppContext,
9038 ) -> Result<HashMap<String, Vec<Range<usize>>>> {
9039 let results = project
9040 .update(cx, |project, cx| project.search(query, cx))
9041 .await?;
9042
9043 Ok(results
9044 .into_iter()
9045 .map(|(buffer, ranges)| {
9046 buffer.read_with(cx, |buffer, _| {
9047 let path = buffer.file().unwrap().path().to_string_lossy().to_string();
9048 let ranges = ranges
9049 .into_iter()
9050 .map(|range| range.to_offset(buffer))
9051 .collect::<Vec<_>>();
9052 (path, ranges)
9053 })
9054 })
9055 .collect())
9056 }
9057 }
9058}