1mod db;
2pub mod fs;
3mod ignore;
4mod lsp_command;
5pub mod search;
6pub mod worktree;
7
8use anyhow::{anyhow, Context, Result};
9use client::{proto, Client, PeerId, TypedEnvelope, User, UserStore};
10use clock::ReplicaId;
11use collections::{hash_map, BTreeMap, HashMap, HashSet};
12use futures::{future::Shared, Future, FutureExt, StreamExt, TryFutureExt};
13use fuzzy::{PathMatch, PathMatchCandidate, PathMatchCandidateSet};
14use gpui::{
15 AnyModelHandle, AppContext, AsyncAppContext, Entity, ModelContext, ModelHandle,
16 MutableAppContext, Task, UpgradeModelHandle, WeakModelHandle,
17};
18use language::{
19 point_to_lsp,
20 proto::{deserialize_anchor, deserialize_version, serialize_anchor, serialize_version},
21 range_from_lsp, range_to_lsp, Anchor, Bias, Buffer, CharKind, CodeAction, CodeLabel,
22 Completion, Diagnostic, DiagnosticEntry, DiagnosticSet, Event as BufferEvent, File as _,
23 Language, LanguageRegistry, LanguageServerName, LocalFile, LspAdapter, OffsetRangeExt,
24 Operation, Patch, PointUtf16, TextBufferSnapshot, ToOffset, ToPointUtf16, Transaction,
25};
26use lsp::{
27 DiagnosticSeverity, DiagnosticTag, DocumentHighlightKind, LanguageServer, LanguageString,
28 MarkedString,
29};
30use lsp_command::*;
31use parking_lot::Mutex;
32use postage::stream::Stream;
33use postage::watch;
34use rand::prelude::*;
35use search::SearchQuery;
36use serde::Serialize;
37use settings::Settings;
38use sha2::{Digest, Sha256};
39use similar::{ChangeTag, TextDiff};
40use std::{
41 cell::RefCell,
42 cmp::{self, Ordering},
43 convert::TryInto,
44 ffi::OsString,
45 hash::Hash,
46 mem,
47 ops::Range,
48 os::unix::{ffi::OsStrExt, prelude::OsStringExt},
49 path::{Component, Path, PathBuf},
50 rc::Rc,
51 sync::{
52 atomic::{AtomicBool, AtomicUsize, Ordering::SeqCst},
53 Arc,
54 },
55 time::Instant,
56};
57use thiserror::Error;
58use util::{post_inc, ResultExt, TryFutureExt as _};
59
60pub use db::Db;
61pub use fs::*;
62pub use worktree::*;
63
64pub trait Item: Entity {
65 fn entry_id(&self, cx: &AppContext) -> Option<ProjectEntryId>;
66}
67
68pub struct ProjectStore {
69 db: Arc<Db>,
70 projects: Vec<WeakModelHandle<Project>>,
71}
72
73// Language server state is stored across 3 collections:
74// language_servers =>
75// a mapping from unique server id to LanguageServerState which can either be a task for a
76// server in the process of starting, or a running server with adapter and language server arcs
77// language_server_ids => a mapping from worktreeId and server name to the unique server id
78// language_server_statuses => a mapping from unique server id to the current server status
79//
80// Multiple worktrees can map to the same language server for example when you jump to the definition
81// of a file in the standard library. So language_server_ids is used to look up which server is active
82// for a given worktree and language server name
83//
84// When starting a language server, first the id map is checked to make sure a server isn't already available
85// for that worktree. If there is one, it finishes early. Otherwise, a new id is allocated and and
86// the Starting variant of LanguageServerState is stored in the language_servers map.
87pub struct Project {
88 worktrees: Vec<WorktreeHandle>,
89 active_entry: Option<ProjectEntryId>,
90 languages: Arc<LanguageRegistry>,
91 language_servers: HashMap<usize, LanguageServerState>,
92 language_server_ids: HashMap<(WorktreeId, LanguageServerName), usize>,
93 language_server_statuses: BTreeMap<usize, LanguageServerStatus>,
94 language_server_settings: Arc<Mutex<serde_json::Value>>,
95 last_workspace_edits_by_language_server: HashMap<usize, ProjectTransaction>,
96 next_language_server_id: usize,
97 client: Arc<client::Client>,
98 next_entry_id: Arc<AtomicUsize>,
99 next_diagnostic_group_id: usize,
100 user_store: ModelHandle<UserStore>,
101 project_store: ModelHandle<ProjectStore>,
102 fs: Arc<dyn Fs>,
103 client_state: ProjectClientState,
104 collaborators: HashMap<PeerId, Collaborator>,
105 client_subscriptions: Vec<client::Subscription>,
106 _subscriptions: Vec<gpui::Subscription>,
107 opened_buffer: (Rc<RefCell<watch::Sender<()>>>, watch::Receiver<()>),
108 shared_buffers: HashMap<PeerId, HashSet<u64>>,
109 loading_buffers: HashMap<
110 ProjectPath,
111 postage::watch::Receiver<Option<Result<ModelHandle<Buffer>, Arc<anyhow::Error>>>>,
112 >,
113 loading_local_worktrees:
114 HashMap<Arc<Path>, Shared<Task<Result<ModelHandle<Worktree>, Arc<anyhow::Error>>>>>,
115 opened_buffers: HashMap<u64, OpenBuffer>,
116 buffer_snapshots: HashMap<u64, Vec<(i32, TextBufferSnapshot)>>,
117 nonce: u128,
118 initialized_persistent_state: bool,
119}
120
121#[derive(Error, Debug)]
122pub enum JoinProjectError {
123 #[error("host declined join request")]
124 HostDeclined,
125 #[error("host closed the project")]
126 HostClosedProject,
127 #[error("host went offline")]
128 HostWentOffline,
129 #[error("{0}")]
130 Other(#[from] anyhow::Error),
131}
132
133enum OpenBuffer {
134 Strong(ModelHandle<Buffer>),
135 Weak(WeakModelHandle<Buffer>),
136 Loading(Vec<Operation>),
137}
138
139enum WorktreeHandle {
140 Strong(ModelHandle<Worktree>),
141 Weak(WeakModelHandle<Worktree>),
142}
143
144enum ProjectClientState {
145 Local {
146 is_shared: bool,
147 remote_id_tx: watch::Sender<Option<u64>>,
148 remote_id_rx: watch::Receiver<Option<u64>>,
149 online_tx: watch::Sender<bool>,
150 online_rx: watch::Receiver<bool>,
151 _maintain_remote_id_task: Task<Option<()>>,
152 },
153 Remote {
154 sharing_has_stopped: bool,
155 remote_id: u64,
156 replica_id: ReplicaId,
157 _detect_unshare_task: Task<Option<()>>,
158 },
159}
160
161#[derive(Clone, Debug)]
162pub struct Collaborator {
163 pub user: Arc<User>,
164 pub peer_id: PeerId,
165 pub replica_id: ReplicaId,
166}
167
168#[derive(Clone, Debug, PartialEq, Eq)]
169pub enum Event {
170 ActiveEntryChanged(Option<ProjectEntryId>),
171 WorktreeAdded,
172 WorktreeRemoved(WorktreeId),
173 DiskBasedDiagnosticsStarted {
174 language_server_id: usize,
175 },
176 DiskBasedDiagnosticsFinished {
177 language_server_id: usize,
178 },
179 DiagnosticsUpdated {
180 path: ProjectPath,
181 language_server_id: usize,
182 },
183 RemoteIdChanged(Option<u64>),
184 CollaboratorLeft(PeerId),
185 ContactRequestedJoin(Arc<User>),
186 ContactCancelledJoinRequest(Arc<User>),
187}
188
189pub enum LanguageServerState {
190 Starting(Task<Option<Arc<LanguageServer>>>),
191 Running {
192 adapter: Arc<dyn LspAdapter>,
193 server: Arc<LanguageServer>,
194 },
195}
196
197#[derive(Serialize)]
198pub struct LanguageServerStatus {
199 pub name: String,
200 pub pending_work: BTreeMap<String, LanguageServerProgress>,
201 pub has_pending_diagnostic_updates: bool,
202 progress_tokens: HashSet<String>,
203}
204
205#[derive(Clone, Debug, Serialize)]
206pub struct LanguageServerProgress {
207 pub message: Option<String>,
208 pub percentage: Option<usize>,
209 #[serde(skip_serializing)]
210 pub last_update_at: Instant,
211}
212
213#[derive(Clone, Debug, Eq, PartialEq, Hash, PartialOrd, Ord)]
214pub struct ProjectPath {
215 pub worktree_id: WorktreeId,
216 pub path: Arc<Path>,
217}
218
219#[derive(Copy, Clone, Debug, Default, PartialEq, Serialize)]
220pub struct DiagnosticSummary {
221 pub language_server_id: usize,
222 pub error_count: usize,
223 pub warning_count: usize,
224}
225
226#[derive(Debug, Clone)]
227pub struct Location {
228 pub buffer: ModelHandle<Buffer>,
229 pub range: Range<language::Anchor>,
230}
231
232#[derive(Debug, Clone)]
233pub struct LocationLink {
234 pub origin: Option<Location>,
235 pub target: Location,
236}
237
238#[derive(Debug)]
239pub struct DocumentHighlight {
240 pub range: Range<language::Anchor>,
241 pub kind: DocumentHighlightKind,
242}
243
244#[derive(Clone, Debug)]
245pub struct Symbol {
246 pub source_worktree_id: WorktreeId,
247 pub worktree_id: WorktreeId,
248 pub language_server_name: LanguageServerName,
249 pub path: PathBuf,
250 pub label: CodeLabel,
251 pub name: String,
252 pub kind: lsp::SymbolKind,
253 pub range: Range<PointUtf16>,
254 pub signature: [u8; 32],
255}
256
257#[derive(Clone, Debug, PartialEq)]
258pub struct HoverBlock {
259 pub text: String,
260 pub language: Option<String>,
261}
262
263impl HoverBlock {
264 fn try_new(marked_string: MarkedString) -> Option<Self> {
265 let result = match marked_string {
266 MarkedString::LanguageString(LanguageString { language, value }) => HoverBlock {
267 text: value,
268 language: Some(language),
269 },
270 MarkedString::String(text) => HoverBlock {
271 text,
272 language: None,
273 },
274 };
275 if result.text.is_empty() {
276 None
277 } else {
278 Some(result)
279 }
280 }
281}
282
283#[derive(Debug)]
284pub struct Hover {
285 pub contents: Vec<HoverBlock>,
286 pub range: Option<Range<language::Anchor>>,
287}
288
289#[derive(Default)]
290pub struct ProjectTransaction(pub HashMap<ModelHandle<Buffer>, language::Transaction>);
291
292impl DiagnosticSummary {
293 fn new<'a, T: 'a>(
294 language_server_id: usize,
295 diagnostics: impl IntoIterator<Item = &'a DiagnosticEntry<T>>,
296 ) -> Self {
297 let mut this = Self {
298 language_server_id,
299 error_count: 0,
300 warning_count: 0,
301 };
302
303 for entry in diagnostics {
304 if entry.diagnostic.is_primary {
305 match entry.diagnostic.severity {
306 DiagnosticSeverity::ERROR => this.error_count += 1,
307 DiagnosticSeverity::WARNING => this.warning_count += 1,
308 _ => {}
309 }
310 }
311 }
312
313 this
314 }
315
316 pub fn is_empty(&self) -> bool {
317 self.error_count == 0 && self.warning_count == 0
318 }
319
320 pub fn to_proto(&self, path: &Path) -> proto::DiagnosticSummary {
321 proto::DiagnosticSummary {
322 path: path.to_string_lossy().to_string(),
323 language_server_id: self.language_server_id as u64,
324 error_count: self.error_count as u32,
325 warning_count: self.warning_count as u32,
326 }
327 }
328}
329
330#[derive(Clone, Copy, Debug, Default, Hash, PartialEq, Eq, PartialOrd, Ord)]
331pub struct ProjectEntryId(usize);
332
333impl ProjectEntryId {
334 pub const MAX: Self = Self(usize::MAX);
335
336 pub fn new(counter: &AtomicUsize) -> Self {
337 Self(counter.fetch_add(1, SeqCst))
338 }
339
340 pub fn from_proto(id: u64) -> Self {
341 Self(id as usize)
342 }
343
344 pub fn to_proto(&self) -> u64 {
345 self.0 as u64
346 }
347
348 pub fn to_usize(&self) -> usize {
349 self.0
350 }
351}
352
353impl Project {
354 pub fn init(client: &Arc<Client>) {
355 client.add_model_message_handler(Self::handle_request_join_project);
356 client.add_model_message_handler(Self::handle_add_collaborator);
357 client.add_model_message_handler(Self::handle_buffer_reloaded);
358 client.add_model_message_handler(Self::handle_buffer_saved);
359 client.add_model_message_handler(Self::handle_start_language_server);
360 client.add_model_message_handler(Self::handle_update_language_server);
361 client.add_model_message_handler(Self::handle_remove_collaborator);
362 client.add_model_message_handler(Self::handle_join_project_request_cancelled);
363 client.add_model_message_handler(Self::handle_update_project);
364 client.add_model_message_handler(Self::handle_unregister_project);
365 client.add_model_message_handler(Self::handle_project_unshared);
366 client.add_model_message_handler(Self::handle_update_buffer_file);
367 client.add_model_message_handler(Self::handle_update_buffer);
368 client.add_model_message_handler(Self::handle_update_diagnostic_summary);
369 client.add_model_message_handler(Self::handle_update_worktree);
370 client.add_model_request_handler(Self::handle_create_project_entry);
371 client.add_model_request_handler(Self::handle_rename_project_entry);
372 client.add_model_request_handler(Self::handle_copy_project_entry);
373 client.add_model_request_handler(Self::handle_delete_project_entry);
374 client.add_model_request_handler(Self::handle_apply_additional_edits_for_completion);
375 client.add_model_request_handler(Self::handle_apply_code_action);
376 client.add_model_request_handler(Self::handle_reload_buffers);
377 client.add_model_request_handler(Self::handle_format_buffers);
378 client.add_model_request_handler(Self::handle_get_code_actions);
379 client.add_model_request_handler(Self::handle_get_completions);
380 client.add_model_request_handler(Self::handle_lsp_command::<GetHover>);
381 client.add_model_request_handler(Self::handle_lsp_command::<GetDefinition>);
382 client.add_model_request_handler(Self::handle_lsp_command::<GetDocumentHighlights>);
383 client.add_model_request_handler(Self::handle_lsp_command::<GetReferences>);
384 client.add_model_request_handler(Self::handle_lsp_command::<PrepareRename>);
385 client.add_model_request_handler(Self::handle_lsp_command::<PerformRename>);
386 client.add_model_request_handler(Self::handle_search_project);
387 client.add_model_request_handler(Self::handle_get_project_symbols);
388 client.add_model_request_handler(Self::handle_open_buffer_for_symbol);
389 client.add_model_request_handler(Self::handle_open_buffer_by_id);
390 client.add_model_request_handler(Self::handle_open_buffer_by_path);
391 client.add_model_request_handler(Self::handle_save_buffer);
392 }
393
394 pub fn local(
395 online: bool,
396 client: Arc<Client>,
397 user_store: ModelHandle<UserStore>,
398 project_store: ModelHandle<ProjectStore>,
399 languages: Arc<LanguageRegistry>,
400 fs: Arc<dyn Fs>,
401 cx: &mut MutableAppContext,
402 ) -> ModelHandle<Self> {
403 cx.add_model(|cx: &mut ModelContext<Self>| {
404 let (online_tx, online_rx) = watch::channel_with(online);
405 let (remote_id_tx, remote_id_rx) = watch::channel();
406 let _maintain_remote_id_task = cx.spawn_weak({
407 let status_rx = client.clone().status();
408 let online_rx = online_rx.clone();
409 move |this, mut cx| async move {
410 let mut stream = Stream::map(status_rx.clone(), drop)
411 .merge(Stream::map(online_rx.clone(), drop));
412 while stream.recv().await.is_some() {
413 let this = this.upgrade(&cx)?;
414 if status_rx.borrow().is_connected() && *online_rx.borrow() {
415 this.update(&mut cx, |this, cx| this.register(cx))
416 .await
417 .log_err()?;
418 } else {
419 this.update(&mut cx, |this, cx| this.unregister(cx))
420 .await
421 .log_err();
422 }
423 }
424 None
425 }
426 });
427
428 let handle = cx.weak_handle();
429 project_store.update(cx, |store, cx| store.add_project(handle, cx));
430
431 let (opened_buffer_tx, opened_buffer_rx) = watch::channel();
432 Self {
433 worktrees: Default::default(),
434 collaborators: Default::default(),
435 opened_buffers: Default::default(),
436 shared_buffers: Default::default(),
437 loading_buffers: Default::default(),
438 loading_local_worktrees: Default::default(),
439 buffer_snapshots: Default::default(),
440 client_state: ProjectClientState::Local {
441 is_shared: false,
442 remote_id_tx,
443 remote_id_rx,
444 online_tx,
445 online_rx,
446 _maintain_remote_id_task,
447 },
448 opened_buffer: (Rc::new(RefCell::new(opened_buffer_tx)), opened_buffer_rx),
449 client_subscriptions: Vec::new(),
450 _subscriptions: vec![cx.observe_global::<Settings, _>(Self::on_settings_changed)],
451 active_entry: None,
452 languages,
453 client,
454 user_store,
455 project_store,
456 fs,
457 next_entry_id: Default::default(),
458 next_diagnostic_group_id: Default::default(),
459 language_servers: Default::default(),
460 language_server_ids: Default::default(),
461 language_server_statuses: Default::default(),
462 last_workspace_edits_by_language_server: Default::default(),
463 language_server_settings: Default::default(),
464 next_language_server_id: 0,
465 nonce: StdRng::from_entropy().gen(),
466 initialized_persistent_state: false,
467 }
468 })
469 }
470
471 pub async fn remote(
472 remote_id: u64,
473 client: Arc<Client>,
474 user_store: ModelHandle<UserStore>,
475 project_store: ModelHandle<ProjectStore>,
476 languages: Arc<LanguageRegistry>,
477 fs: Arc<dyn Fs>,
478 mut cx: AsyncAppContext,
479 ) -> Result<ModelHandle<Self>, JoinProjectError> {
480 client.authenticate_and_connect(true, &cx).await?;
481
482 let response = client
483 .request(proto::JoinProject {
484 project_id: remote_id,
485 })
486 .await?;
487
488 let response = match response.variant.ok_or_else(|| anyhow!("missing variant"))? {
489 proto::join_project_response::Variant::Accept(response) => response,
490 proto::join_project_response::Variant::Decline(decline) => {
491 match proto::join_project_response::decline::Reason::from_i32(decline.reason) {
492 Some(proto::join_project_response::decline::Reason::Declined) => {
493 Err(JoinProjectError::HostDeclined)?
494 }
495 Some(proto::join_project_response::decline::Reason::Closed) => {
496 Err(JoinProjectError::HostClosedProject)?
497 }
498 Some(proto::join_project_response::decline::Reason::WentOffline) => {
499 Err(JoinProjectError::HostWentOffline)?
500 }
501 None => Err(anyhow!("missing decline reason"))?,
502 }
503 }
504 };
505
506 let replica_id = response.replica_id as ReplicaId;
507
508 let mut worktrees = Vec::new();
509 for worktree in response.worktrees {
510 let (worktree, load_task) = cx
511 .update(|cx| Worktree::remote(remote_id, replica_id, worktree, client.clone(), cx));
512 worktrees.push(worktree);
513 load_task.detach();
514 }
515
516 let (opened_buffer_tx, opened_buffer_rx) = watch::channel();
517 let this = cx.add_model(|cx: &mut ModelContext<Self>| {
518 let handle = cx.weak_handle();
519 project_store.update(cx, |store, cx| store.add_project(handle, cx));
520
521 let mut this = Self {
522 worktrees: Vec::new(),
523 loading_buffers: Default::default(),
524 opened_buffer: (Rc::new(RefCell::new(opened_buffer_tx)), opened_buffer_rx),
525 shared_buffers: Default::default(),
526 loading_local_worktrees: Default::default(),
527 active_entry: None,
528 collaborators: Default::default(),
529 languages,
530 user_store: user_store.clone(),
531 project_store,
532 fs,
533 next_entry_id: Default::default(),
534 next_diagnostic_group_id: Default::default(),
535 client_subscriptions: vec![client.add_model_for_remote_entity(remote_id, cx)],
536 _subscriptions: Default::default(),
537 client: client.clone(),
538 client_state: ProjectClientState::Remote {
539 sharing_has_stopped: false,
540 remote_id,
541 replica_id,
542 _detect_unshare_task: cx.spawn_weak(move |this, mut cx| {
543 async move {
544 let mut status = client.status();
545 let is_connected =
546 status.next().await.map_or(false, |s| s.is_connected());
547 // Even if we're initially connected, any future change of the status means we momentarily disconnected.
548 if !is_connected || status.next().await.is_some() {
549 if let Some(this) = this.upgrade(&cx) {
550 this.update(&mut cx, |this, cx| this.removed_from_project(cx))
551 }
552 }
553 Ok(())
554 }
555 .log_err()
556 }),
557 },
558 language_servers: Default::default(),
559 language_server_ids: Default::default(),
560 language_server_settings: Default::default(),
561 language_server_statuses: response
562 .language_servers
563 .into_iter()
564 .map(|server| {
565 (
566 server.id as usize,
567 LanguageServerStatus {
568 name: server.name,
569 pending_work: Default::default(),
570 has_pending_diagnostic_updates: false,
571 progress_tokens: Default::default(),
572 },
573 )
574 })
575 .collect(),
576 last_workspace_edits_by_language_server: Default::default(),
577 next_language_server_id: 0,
578 opened_buffers: Default::default(),
579 buffer_snapshots: Default::default(),
580 nonce: StdRng::from_entropy().gen(),
581 initialized_persistent_state: false,
582 };
583 for worktree in worktrees {
584 this.add_worktree(&worktree, cx);
585 }
586 this
587 });
588
589 let user_ids = response
590 .collaborators
591 .iter()
592 .map(|peer| peer.user_id)
593 .collect();
594 user_store
595 .update(&mut cx, |user_store, cx| user_store.get_users(user_ids, cx))
596 .await?;
597 let mut collaborators = HashMap::default();
598 for message in response.collaborators {
599 let collaborator = Collaborator::from_proto(message, &user_store, &mut cx).await?;
600 collaborators.insert(collaborator.peer_id, collaborator);
601 }
602
603 this.update(&mut cx, |this, _| {
604 this.collaborators = collaborators;
605 });
606
607 Ok(this)
608 }
609
610 #[cfg(any(test, feature = "test-support"))]
611 pub async fn test(
612 fs: Arc<dyn Fs>,
613 root_paths: impl IntoIterator<Item = &Path>,
614 cx: &mut gpui::TestAppContext,
615 ) -> ModelHandle<Project> {
616 if !cx.read(|cx| cx.has_global::<Settings>()) {
617 cx.update(|cx| cx.set_global(Settings::test(cx)));
618 }
619
620 let languages = Arc::new(LanguageRegistry::test());
621 let http_client = client::test::FakeHttpClient::with_404_response();
622 let client = client::Client::new(http_client.clone());
623 let user_store = cx.add_model(|cx| UserStore::new(client.clone(), http_client, cx));
624 let project_store = cx.add_model(|_| ProjectStore::new(Db::open_fake()));
625 let project = cx.update(|cx| {
626 Project::local(true, client, user_store, project_store, languages, fs, cx)
627 });
628 for path in root_paths {
629 let (tree, _) = project
630 .update(cx, |project, cx| {
631 project.find_or_create_local_worktree(path, true, cx)
632 })
633 .await
634 .unwrap();
635 tree.read_with(cx, |tree, _| tree.as_local().unwrap().scan_complete())
636 .await;
637 }
638 project
639 }
640
641 pub fn restore_state(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
642 if self.is_remote() {
643 return Task::ready(Ok(()));
644 }
645
646 let db = self.project_store.read(cx).db.clone();
647 let keys = self.db_keys_for_online_state(cx);
648 let online_by_default = cx.global::<Settings>().projects_online_by_default;
649 let read_online = cx.background().spawn(async move {
650 let values = db.read(keys)?;
651 anyhow::Ok(
652 values
653 .into_iter()
654 .all(|e| e.map_or(online_by_default, |e| e == [true as u8])),
655 )
656 });
657 cx.spawn(|this, mut cx| async move {
658 let online = read_online.await.log_err().unwrap_or(false);
659 this.update(&mut cx, |this, cx| {
660 this.initialized_persistent_state = true;
661 if let ProjectClientState::Local { online_tx, .. } = &mut this.client_state {
662 let mut online_tx = online_tx.borrow_mut();
663 if *online_tx != online {
664 *online_tx = online;
665 drop(online_tx);
666 this.metadata_changed(false, cx);
667 }
668 }
669 });
670 Ok(())
671 })
672 }
673
674 fn persist_state(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
675 if self.is_remote() || !self.initialized_persistent_state {
676 return Task::ready(Ok(()));
677 }
678
679 let db = self.project_store.read(cx).db.clone();
680 let keys = self.db_keys_for_online_state(cx);
681 let is_online = self.is_online();
682 cx.background().spawn(async move {
683 let value = &[is_online as u8];
684 db.write(keys.into_iter().map(|key| (key, value)))
685 })
686 }
687
688 fn on_settings_changed(&mut self, cx: &mut ModelContext<Self>) {
689 let settings = cx.global::<Settings>();
690
691 let mut language_servers_to_start = Vec::new();
692 for buffer in self.opened_buffers.values() {
693 if let Some(buffer) = buffer.upgrade(cx) {
694 let buffer = buffer.read(cx);
695 if let Some((file, language)) = File::from_dyn(buffer.file()).zip(buffer.language())
696 {
697 if settings.enable_language_server(Some(&language.name())) {
698 let worktree = file.worktree.read(cx);
699 language_servers_to_start.push((
700 worktree.id(),
701 worktree.as_local().unwrap().abs_path().clone(),
702 language.clone(),
703 ));
704 }
705 }
706 }
707 }
708
709 let mut language_servers_to_stop = Vec::new();
710 for language in self.languages.to_vec() {
711 if let Some(lsp_adapter) = language.lsp_adapter() {
712 if !settings.enable_language_server(Some(&language.name())) {
713 let lsp_name = lsp_adapter.name();
714 for (worktree_id, started_lsp_name) in self.language_server_ids.keys() {
715 if lsp_name == *started_lsp_name {
716 language_servers_to_stop.push((*worktree_id, started_lsp_name.clone()));
717 }
718 }
719 }
720 }
721 }
722
723 // Stop all newly-disabled language servers.
724 for (worktree_id, adapter_name) in language_servers_to_stop {
725 self.stop_language_server(worktree_id, adapter_name, cx)
726 .detach();
727 }
728
729 // Start all the newly-enabled language servers.
730 for (worktree_id, worktree_path, language) in language_servers_to_start {
731 self.start_language_server(worktree_id, worktree_path, language, cx);
732 }
733
734 cx.notify();
735 }
736
737 pub fn buffer_for_id(&self, remote_id: u64, cx: &AppContext) -> Option<ModelHandle<Buffer>> {
738 self.opened_buffers
739 .get(&remote_id)
740 .and_then(|buffer| buffer.upgrade(cx))
741 }
742
743 pub fn languages(&self) -> &Arc<LanguageRegistry> {
744 &self.languages
745 }
746
747 pub fn client(&self) -> Arc<Client> {
748 self.client.clone()
749 }
750
751 pub fn user_store(&self) -> ModelHandle<UserStore> {
752 self.user_store.clone()
753 }
754
755 pub fn project_store(&self) -> ModelHandle<ProjectStore> {
756 self.project_store.clone()
757 }
758
759 #[cfg(any(test, feature = "test-support"))]
760 pub fn check_invariants(&self, cx: &AppContext) {
761 if self.is_local() {
762 let mut worktree_root_paths = HashMap::default();
763 for worktree in self.worktrees(cx) {
764 let worktree = worktree.read(cx);
765 let abs_path = worktree.as_local().unwrap().abs_path().clone();
766 let prev_worktree_id = worktree_root_paths.insert(abs_path.clone(), worktree.id());
767 assert_eq!(
768 prev_worktree_id,
769 None,
770 "abs path {:?} for worktree {:?} is not unique ({:?} was already registered with the same path)",
771 abs_path,
772 worktree.id(),
773 prev_worktree_id
774 )
775 }
776 } else {
777 let replica_id = self.replica_id();
778 for buffer in self.opened_buffers.values() {
779 if let Some(buffer) = buffer.upgrade(cx) {
780 let buffer = buffer.read(cx);
781 assert_eq!(
782 buffer.deferred_ops_len(),
783 0,
784 "replica {}, buffer {} has deferred operations",
785 replica_id,
786 buffer.remote_id()
787 );
788 }
789 }
790 }
791 }
792
793 #[cfg(any(test, feature = "test-support"))]
794 pub fn has_open_buffer(&self, path: impl Into<ProjectPath>, cx: &AppContext) -> bool {
795 let path = path.into();
796 if let Some(worktree) = self.worktree_for_id(path.worktree_id, cx) {
797 self.opened_buffers.iter().any(|(_, buffer)| {
798 if let Some(buffer) = buffer.upgrade(cx) {
799 if let Some(file) = File::from_dyn(buffer.read(cx).file()) {
800 if file.worktree == worktree && file.path() == &path.path {
801 return true;
802 }
803 }
804 }
805 false
806 })
807 } else {
808 false
809 }
810 }
811
812 pub fn fs(&self) -> &Arc<dyn Fs> {
813 &self.fs
814 }
815
816 pub fn set_online(&mut self, online: bool, cx: &mut ModelContext<Self>) {
817 if let ProjectClientState::Local { online_tx, .. } = &mut self.client_state {
818 let mut online_tx = online_tx.borrow_mut();
819 if *online_tx != online {
820 *online_tx = online;
821 drop(online_tx);
822 self.metadata_changed(true, cx);
823 }
824 }
825 }
826
827 pub fn is_online(&self) -> bool {
828 match &self.client_state {
829 ProjectClientState::Local { online_rx, .. } => *online_rx.borrow(),
830 ProjectClientState::Remote { .. } => true,
831 }
832 }
833
834 fn unregister(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
835 self.unshared(cx);
836 if let ProjectClientState::Local { remote_id_rx, .. } = &mut self.client_state {
837 if let Some(remote_id) = *remote_id_rx.borrow() {
838 let request = self.client.request(proto::UnregisterProject {
839 project_id: remote_id,
840 });
841 return cx.spawn(|this, mut cx| async move {
842 let response = request.await;
843
844 // Unregistering the project causes the server to send out a
845 // contact update removing this project from the host's list
846 // of online projects. Wait until this contact update has been
847 // processed before clearing out this project's remote id, so
848 // that there is no moment where this project appears in the
849 // contact metadata and *also* has no remote id.
850 this.update(&mut cx, |this, cx| {
851 this.user_store()
852 .update(cx, |store, _| store.contact_updates_done())
853 })
854 .await;
855
856 this.update(&mut cx, |this, cx| {
857 if let ProjectClientState::Local { remote_id_tx, .. } =
858 &mut this.client_state
859 {
860 *remote_id_tx.borrow_mut() = None;
861 }
862 this.client_subscriptions.clear();
863 this.metadata_changed(false, cx);
864 });
865 response.map(drop)
866 });
867 }
868 }
869 Task::ready(Ok(()))
870 }
871
872 fn register(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
873 if let ProjectClientState::Local { remote_id_rx, .. } = &self.client_state {
874 if remote_id_rx.borrow().is_some() {
875 return Task::ready(Ok(()));
876 }
877 }
878
879 let response = self.client.request(proto::RegisterProject {});
880 cx.spawn(|this, mut cx| async move {
881 let remote_id = response.await?.project_id;
882 this.update(&mut cx, |this, cx| {
883 if let ProjectClientState::Local { remote_id_tx, .. } = &mut this.client_state {
884 *remote_id_tx.borrow_mut() = Some(remote_id);
885 }
886
887 this.metadata_changed(false, cx);
888 cx.emit(Event::RemoteIdChanged(Some(remote_id)));
889 this.client_subscriptions
890 .push(this.client.add_model_for_remote_entity(remote_id, cx));
891 Ok(())
892 })
893 })
894 }
895
896 pub fn remote_id(&self) -> Option<u64> {
897 match &self.client_state {
898 ProjectClientState::Local { remote_id_rx, .. } => *remote_id_rx.borrow(),
899 ProjectClientState::Remote { remote_id, .. } => Some(*remote_id),
900 }
901 }
902
903 pub fn next_remote_id(&self) -> impl Future<Output = u64> {
904 let mut id = None;
905 let mut watch = None;
906 match &self.client_state {
907 ProjectClientState::Local { remote_id_rx, .. } => watch = Some(remote_id_rx.clone()),
908 ProjectClientState::Remote { remote_id, .. } => id = Some(*remote_id),
909 }
910
911 async move {
912 if let Some(id) = id {
913 return id;
914 }
915 let mut watch = watch.unwrap();
916 loop {
917 let id = *watch.borrow();
918 if let Some(id) = id {
919 return id;
920 }
921 watch.next().await;
922 }
923 }
924 }
925
926 pub fn shared_remote_id(&self) -> Option<u64> {
927 match &self.client_state {
928 ProjectClientState::Local {
929 remote_id_rx,
930 is_shared,
931 ..
932 } => {
933 if *is_shared {
934 *remote_id_rx.borrow()
935 } else {
936 None
937 }
938 }
939 ProjectClientState::Remote { remote_id, .. } => Some(*remote_id),
940 }
941 }
942
943 pub fn replica_id(&self) -> ReplicaId {
944 match &self.client_state {
945 ProjectClientState::Local { .. } => 0,
946 ProjectClientState::Remote { replica_id, .. } => *replica_id,
947 }
948 }
949
950 fn metadata_changed(&mut self, persist: bool, cx: &mut ModelContext<Self>) {
951 if let ProjectClientState::Local {
952 remote_id_rx,
953 online_rx,
954 ..
955 } = &self.client_state
956 {
957 if let (Some(project_id), true) = (*remote_id_rx.borrow(), *online_rx.borrow()) {
958 self.client
959 .send(proto::UpdateProject {
960 project_id,
961 worktrees: self
962 .worktrees
963 .iter()
964 .filter_map(|worktree| {
965 worktree.upgrade(&cx).map(|worktree| {
966 worktree.read(cx).as_local().unwrap().metadata_proto()
967 })
968 })
969 .collect(),
970 })
971 .log_err();
972 }
973
974 self.project_store.update(cx, |_, cx| cx.notify());
975 if persist {
976 self.persist_state(cx).detach_and_log_err(cx);
977 }
978 cx.notify();
979 }
980 }
981
982 pub fn collaborators(&self) -> &HashMap<PeerId, Collaborator> {
983 &self.collaborators
984 }
985
986 pub fn worktrees<'a>(
987 &'a self,
988 cx: &'a AppContext,
989 ) -> impl 'a + Iterator<Item = ModelHandle<Worktree>> {
990 self.worktrees
991 .iter()
992 .filter_map(move |worktree| worktree.upgrade(cx))
993 }
994
995 pub fn visible_worktrees<'a>(
996 &'a self,
997 cx: &'a AppContext,
998 ) -> impl 'a + Iterator<Item = ModelHandle<Worktree>> {
999 self.worktrees.iter().filter_map(|worktree| {
1000 worktree.upgrade(cx).and_then(|worktree| {
1001 if worktree.read(cx).is_visible() {
1002 Some(worktree)
1003 } else {
1004 None
1005 }
1006 })
1007 })
1008 }
1009
1010 pub fn worktree_root_names<'a>(&'a self, cx: &'a AppContext) -> impl Iterator<Item = &'a str> {
1011 self.visible_worktrees(cx)
1012 .map(|tree| tree.read(cx).root_name())
1013 }
1014
1015 fn db_keys_for_online_state(&self, cx: &AppContext) -> Vec<String> {
1016 self.worktrees
1017 .iter()
1018 .filter_map(|worktree| {
1019 let worktree = worktree.upgrade(&cx)?.read(cx);
1020 if worktree.is_visible() {
1021 Some(format!(
1022 "project-path-online:{}",
1023 worktree.as_local().unwrap().abs_path().to_string_lossy()
1024 ))
1025 } else {
1026 None
1027 }
1028 })
1029 .collect::<Vec<_>>()
1030 }
1031
1032 pub fn worktree_for_id(
1033 &self,
1034 id: WorktreeId,
1035 cx: &AppContext,
1036 ) -> Option<ModelHandle<Worktree>> {
1037 self.worktrees(cx)
1038 .find(|worktree| worktree.read(cx).id() == id)
1039 }
1040
1041 pub fn worktree_for_entry(
1042 &self,
1043 entry_id: ProjectEntryId,
1044 cx: &AppContext,
1045 ) -> Option<ModelHandle<Worktree>> {
1046 self.worktrees(cx)
1047 .find(|worktree| worktree.read(cx).contains_entry(entry_id))
1048 }
1049
1050 pub fn worktree_id_for_entry(
1051 &self,
1052 entry_id: ProjectEntryId,
1053 cx: &AppContext,
1054 ) -> Option<WorktreeId> {
1055 self.worktree_for_entry(entry_id, cx)
1056 .map(|worktree| worktree.read(cx).id())
1057 }
1058
1059 pub fn contains_paths(&self, paths: &[PathBuf], cx: &AppContext) -> bool {
1060 paths.iter().all(|path| self.contains_path(&path, cx))
1061 }
1062
1063 pub fn contains_path(&self, path: &Path, cx: &AppContext) -> bool {
1064 for worktree in self.worktrees(cx) {
1065 let worktree = worktree.read(cx).as_local();
1066 if worktree.map_or(false, |w| w.contains_abs_path(path)) {
1067 return true;
1068 }
1069 }
1070 false
1071 }
1072
1073 pub fn create_entry(
1074 &mut self,
1075 project_path: impl Into<ProjectPath>,
1076 is_directory: bool,
1077 cx: &mut ModelContext<Self>,
1078 ) -> Option<Task<Result<Entry>>> {
1079 let project_path = project_path.into();
1080 let worktree = self.worktree_for_id(project_path.worktree_id, cx)?;
1081 if self.is_local() {
1082 Some(worktree.update(cx, |worktree, cx| {
1083 worktree
1084 .as_local_mut()
1085 .unwrap()
1086 .create_entry(project_path.path, is_directory, cx)
1087 }))
1088 } else {
1089 let client = self.client.clone();
1090 let project_id = self.remote_id().unwrap();
1091 Some(cx.spawn_weak(|_, mut cx| async move {
1092 let response = client
1093 .request(proto::CreateProjectEntry {
1094 worktree_id: project_path.worktree_id.to_proto(),
1095 project_id,
1096 path: project_path.path.as_os_str().as_bytes().to_vec(),
1097 is_directory,
1098 })
1099 .await?;
1100 let entry = response
1101 .entry
1102 .ok_or_else(|| anyhow!("missing entry in response"))?;
1103 worktree
1104 .update(&mut cx, |worktree, cx| {
1105 worktree.as_remote().unwrap().insert_entry(
1106 entry,
1107 response.worktree_scan_id as usize,
1108 cx,
1109 )
1110 })
1111 .await
1112 }))
1113 }
1114 }
1115
1116 pub fn copy_entry(
1117 &mut self,
1118 entry_id: ProjectEntryId,
1119 new_path: impl Into<Arc<Path>>,
1120 cx: &mut ModelContext<Self>,
1121 ) -> Option<Task<Result<Entry>>> {
1122 let worktree = self.worktree_for_entry(entry_id, cx)?;
1123 let new_path = new_path.into();
1124 if self.is_local() {
1125 worktree.update(cx, |worktree, cx| {
1126 worktree
1127 .as_local_mut()
1128 .unwrap()
1129 .copy_entry(entry_id, new_path, cx)
1130 })
1131 } else {
1132 let client = self.client.clone();
1133 let project_id = self.remote_id().unwrap();
1134
1135 Some(cx.spawn_weak(|_, mut cx| async move {
1136 let response = client
1137 .request(proto::CopyProjectEntry {
1138 project_id,
1139 entry_id: entry_id.to_proto(),
1140 new_path: new_path.as_os_str().as_bytes().to_vec(),
1141 })
1142 .await?;
1143 let entry = response
1144 .entry
1145 .ok_or_else(|| anyhow!("missing entry in response"))?;
1146 worktree
1147 .update(&mut cx, |worktree, cx| {
1148 worktree.as_remote().unwrap().insert_entry(
1149 entry,
1150 response.worktree_scan_id as usize,
1151 cx,
1152 )
1153 })
1154 .await
1155 }))
1156 }
1157 }
1158
1159 pub fn rename_entry(
1160 &mut self,
1161 entry_id: ProjectEntryId,
1162 new_path: impl Into<Arc<Path>>,
1163 cx: &mut ModelContext<Self>,
1164 ) -> Option<Task<Result<Entry>>> {
1165 let worktree = self.worktree_for_entry(entry_id, cx)?;
1166 let new_path = new_path.into();
1167 if self.is_local() {
1168 worktree.update(cx, |worktree, cx| {
1169 worktree
1170 .as_local_mut()
1171 .unwrap()
1172 .rename_entry(entry_id, new_path, cx)
1173 })
1174 } else {
1175 let client = self.client.clone();
1176 let project_id = self.remote_id().unwrap();
1177
1178 Some(cx.spawn_weak(|_, mut cx| async move {
1179 let response = client
1180 .request(proto::RenameProjectEntry {
1181 project_id,
1182 entry_id: entry_id.to_proto(),
1183 new_path: new_path.as_os_str().as_bytes().to_vec(),
1184 })
1185 .await?;
1186 let entry = response
1187 .entry
1188 .ok_or_else(|| anyhow!("missing entry in response"))?;
1189 worktree
1190 .update(&mut cx, |worktree, cx| {
1191 worktree.as_remote().unwrap().insert_entry(
1192 entry,
1193 response.worktree_scan_id as usize,
1194 cx,
1195 )
1196 })
1197 .await
1198 }))
1199 }
1200 }
1201
1202 pub fn delete_entry(
1203 &mut self,
1204 entry_id: ProjectEntryId,
1205 cx: &mut ModelContext<Self>,
1206 ) -> Option<Task<Result<()>>> {
1207 let worktree = self.worktree_for_entry(entry_id, cx)?;
1208 if self.is_local() {
1209 worktree.update(cx, |worktree, cx| {
1210 worktree.as_local_mut().unwrap().delete_entry(entry_id, cx)
1211 })
1212 } else {
1213 let client = self.client.clone();
1214 let project_id = self.remote_id().unwrap();
1215 Some(cx.spawn_weak(|_, mut cx| async move {
1216 let response = client
1217 .request(proto::DeleteProjectEntry {
1218 project_id,
1219 entry_id: entry_id.to_proto(),
1220 })
1221 .await?;
1222 worktree
1223 .update(&mut cx, move |worktree, cx| {
1224 worktree.as_remote().unwrap().delete_entry(
1225 entry_id,
1226 response.worktree_scan_id as usize,
1227 cx,
1228 )
1229 })
1230 .await
1231 }))
1232 }
1233 }
1234
1235 fn share(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
1236 let project_id;
1237 if let ProjectClientState::Local {
1238 remote_id_rx,
1239 is_shared,
1240 ..
1241 } = &mut self.client_state
1242 {
1243 if *is_shared {
1244 return Task::ready(Ok(()));
1245 }
1246 *is_shared = true;
1247 if let Some(id) = *remote_id_rx.borrow() {
1248 project_id = id;
1249 } else {
1250 return Task::ready(Err(anyhow!("project hasn't been registered")));
1251 }
1252 } else {
1253 return Task::ready(Err(anyhow!("can't share a remote project")));
1254 };
1255
1256 for open_buffer in self.opened_buffers.values_mut() {
1257 match open_buffer {
1258 OpenBuffer::Strong(_) => {}
1259 OpenBuffer::Weak(buffer) => {
1260 if let Some(buffer) = buffer.upgrade(cx) {
1261 *open_buffer = OpenBuffer::Strong(buffer);
1262 }
1263 }
1264 OpenBuffer::Loading(_) => unreachable!(),
1265 }
1266 }
1267
1268 for worktree_handle in self.worktrees.iter_mut() {
1269 match worktree_handle {
1270 WorktreeHandle::Strong(_) => {}
1271 WorktreeHandle::Weak(worktree) => {
1272 if let Some(worktree) = worktree.upgrade(cx) {
1273 *worktree_handle = WorktreeHandle::Strong(worktree);
1274 }
1275 }
1276 }
1277 }
1278
1279 let mut tasks = Vec::new();
1280 for worktree in self.worktrees(cx).collect::<Vec<_>>() {
1281 worktree.update(cx, |worktree, cx| {
1282 let worktree = worktree.as_local_mut().unwrap();
1283 tasks.push(worktree.share(project_id, cx));
1284 });
1285 }
1286
1287 for (server_id, status) in &self.language_server_statuses {
1288 self.client
1289 .send(proto::StartLanguageServer {
1290 project_id,
1291 server: Some(proto::LanguageServer {
1292 id: *server_id as u64,
1293 name: status.name.clone(),
1294 }),
1295 })
1296 .log_err();
1297 }
1298
1299 cx.spawn(|this, mut cx| async move {
1300 for task in tasks {
1301 task.await?;
1302 }
1303 this.update(&mut cx, |_, cx| cx.notify());
1304 Ok(())
1305 })
1306 }
1307
1308 fn unshared(&mut self, cx: &mut ModelContext<Self>) {
1309 if let ProjectClientState::Local { is_shared, .. } = &mut self.client_state {
1310 if !*is_shared {
1311 return;
1312 }
1313
1314 *is_shared = false;
1315 self.collaborators.clear();
1316 self.shared_buffers.clear();
1317 for worktree_handle in self.worktrees.iter_mut() {
1318 if let WorktreeHandle::Strong(worktree) = worktree_handle {
1319 let is_visible = worktree.update(cx, |worktree, _| {
1320 worktree.as_local_mut().unwrap().unshare();
1321 worktree.is_visible()
1322 });
1323 if !is_visible {
1324 *worktree_handle = WorktreeHandle::Weak(worktree.downgrade());
1325 }
1326 }
1327 }
1328
1329 for open_buffer in self.opened_buffers.values_mut() {
1330 match open_buffer {
1331 OpenBuffer::Strong(buffer) => {
1332 *open_buffer = OpenBuffer::Weak(buffer.downgrade());
1333 }
1334 _ => {}
1335 }
1336 }
1337
1338 cx.notify();
1339 } else {
1340 log::error!("attempted to unshare a remote project");
1341 }
1342 }
1343
1344 pub fn respond_to_join_request(
1345 &mut self,
1346 requester_id: u64,
1347 allow: bool,
1348 cx: &mut ModelContext<Self>,
1349 ) {
1350 if let Some(project_id) = self.remote_id() {
1351 let share = self.share(cx);
1352 let client = self.client.clone();
1353 cx.foreground()
1354 .spawn(async move {
1355 share.await?;
1356 client.send(proto::RespondToJoinProjectRequest {
1357 requester_id,
1358 project_id,
1359 allow,
1360 })
1361 })
1362 .detach_and_log_err(cx);
1363 }
1364 }
1365
1366 fn removed_from_project(&mut self, cx: &mut ModelContext<Self>) {
1367 if let ProjectClientState::Remote {
1368 sharing_has_stopped,
1369 ..
1370 } = &mut self.client_state
1371 {
1372 *sharing_has_stopped = true;
1373 self.collaborators.clear();
1374 for worktree in &self.worktrees {
1375 if let Some(worktree) = worktree.upgrade(cx) {
1376 worktree.update(cx, |worktree, _| {
1377 if let Some(worktree) = worktree.as_remote_mut() {
1378 worktree.disconnected_from_host();
1379 }
1380 });
1381 }
1382 }
1383 cx.notify();
1384 }
1385 }
1386
1387 pub fn is_read_only(&self) -> bool {
1388 match &self.client_state {
1389 ProjectClientState::Local { .. } => false,
1390 ProjectClientState::Remote {
1391 sharing_has_stopped,
1392 ..
1393 } => *sharing_has_stopped,
1394 }
1395 }
1396
1397 pub fn is_local(&self) -> bool {
1398 match &self.client_state {
1399 ProjectClientState::Local { .. } => true,
1400 ProjectClientState::Remote { .. } => false,
1401 }
1402 }
1403
1404 pub fn is_remote(&self) -> bool {
1405 !self.is_local()
1406 }
1407
1408 pub fn create_buffer(
1409 &mut self,
1410 text: &str,
1411 language: Option<Arc<Language>>,
1412 cx: &mut ModelContext<Self>,
1413 ) -> Result<ModelHandle<Buffer>> {
1414 if self.is_remote() {
1415 return Err(anyhow!("creating buffers as a guest is not supported yet"));
1416 }
1417
1418 let buffer = cx.add_model(|cx| {
1419 Buffer::new(self.replica_id(), text, cx)
1420 .with_language(language.unwrap_or(language::PLAIN_TEXT.clone()), cx)
1421 });
1422 self.register_buffer(&buffer, cx)?;
1423 Ok(buffer)
1424 }
1425
1426 pub fn open_path(
1427 &mut self,
1428 path: impl Into<ProjectPath>,
1429 cx: &mut ModelContext<Self>,
1430 ) -> Task<Result<(ProjectEntryId, AnyModelHandle)>> {
1431 let task = self.open_buffer(path, cx);
1432 cx.spawn_weak(|_, cx| async move {
1433 let buffer = task.await?;
1434 let project_entry_id = buffer
1435 .read_with(&cx, |buffer, cx| {
1436 File::from_dyn(buffer.file()).and_then(|file| file.project_entry_id(cx))
1437 })
1438 .ok_or_else(|| anyhow!("no project entry"))?;
1439 Ok((project_entry_id, buffer.into()))
1440 })
1441 }
1442
1443 pub fn open_local_buffer(
1444 &mut self,
1445 abs_path: impl AsRef<Path>,
1446 cx: &mut ModelContext<Self>,
1447 ) -> Task<Result<ModelHandle<Buffer>>> {
1448 if let Some((worktree, relative_path)) = self.find_local_worktree(abs_path.as_ref(), cx) {
1449 self.open_buffer((worktree.read(cx).id(), relative_path), cx)
1450 } else {
1451 Task::ready(Err(anyhow!("no such path")))
1452 }
1453 }
1454
1455 pub fn open_buffer(
1456 &mut self,
1457 path: impl Into<ProjectPath>,
1458 cx: &mut ModelContext<Self>,
1459 ) -> Task<Result<ModelHandle<Buffer>>> {
1460 let project_path = path.into();
1461 let worktree = if let Some(worktree) = self.worktree_for_id(project_path.worktree_id, cx) {
1462 worktree
1463 } else {
1464 return Task::ready(Err(anyhow!("no such worktree")));
1465 };
1466
1467 // If there is already a buffer for the given path, then return it.
1468 let existing_buffer = self.get_open_buffer(&project_path, cx);
1469 if let Some(existing_buffer) = existing_buffer {
1470 return Task::ready(Ok(existing_buffer));
1471 }
1472
1473 let mut loading_watch = match self.loading_buffers.entry(project_path.clone()) {
1474 // If the given path is already being loaded, then wait for that existing
1475 // task to complete and return the same buffer.
1476 hash_map::Entry::Occupied(e) => e.get().clone(),
1477
1478 // Otherwise, record the fact that this path is now being loaded.
1479 hash_map::Entry::Vacant(entry) => {
1480 let (mut tx, rx) = postage::watch::channel();
1481 entry.insert(rx.clone());
1482
1483 let load_buffer = if worktree.read(cx).is_local() {
1484 self.open_local_buffer_internal(&project_path.path, &worktree, cx)
1485 } else {
1486 self.open_remote_buffer_internal(&project_path.path, &worktree, cx)
1487 };
1488
1489 cx.spawn(move |this, mut cx| async move {
1490 let load_result = load_buffer.await;
1491 *tx.borrow_mut() = Some(this.update(&mut cx, |this, _| {
1492 // Record the fact that the buffer is no longer loading.
1493 this.loading_buffers.remove(&project_path);
1494 let buffer = load_result.map_err(Arc::new)?;
1495 Ok(buffer)
1496 }));
1497 })
1498 .detach();
1499 rx
1500 }
1501 };
1502
1503 cx.foreground().spawn(async move {
1504 loop {
1505 if let Some(result) = loading_watch.borrow().as_ref() {
1506 match result {
1507 Ok(buffer) => return Ok(buffer.clone()),
1508 Err(error) => return Err(anyhow!("{}", error)),
1509 }
1510 }
1511 loading_watch.next().await;
1512 }
1513 })
1514 }
1515
1516 fn open_local_buffer_internal(
1517 &mut self,
1518 path: &Arc<Path>,
1519 worktree: &ModelHandle<Worktree>,
1520 cx: &mut ModelContext<Self>,
1521 ) -> Task<Result<ModelHandle<Buffer>>> {
1522 let load_buffer = worktree.update(cx, |worktree, cx| {
1523 let worktree = worktree.as_local_mut().unwrap();
1524 worktree.load_buffer(path, cx)
1525 });
1526 cx.spawn(|this, mut cx| async move {
1527 let buffer = load_buffer.await?;
1528 this.update(&mut cx, |this, cx| this.register_buffer(&buffer, cx))?;
1529 Ok(buffer)
1530 })
1531 }
1532
1533 fn open_remote_buffer_internal(
1534 &mut self,
1535 path: &Arc<Path>,
1536 worktree: &ModelHandle<Worktree>,
1537 cx: &mut ModelContext<Self>,
1538 ) -> Task<Result<ModelHandle<Buffer>>> {
1539 let rpc = self.client.clone();
1540 let project_id = self.remote_id().unwrap();
1541 let remote_worktree_id = worktree.read(cx).id();
1542 let path = path.clone();
1543 let path_string = path.to_string_lossy().to_string();
1544 cx.spawn(|this, mut cx| async move {
1545 let response = rpc
1546 .request(proto::OpenBufferByPath {
1547 project_id,
1548 worktree_id: remote_worktree_id.to_proto(),
1549 path: path_string,
1550 })
1551 .await?;
1552 let buffer = response.buffer.ok_or_else(|| anyhow!("missing buffer"))?;
1553 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
1554 .await
1555 })
1556 }
1557
1558 fn open_local_buffer_via_lsp(
1559 &mut self,
1560 abs_path: lsp::Url,
1561 language_server_id: usize,
1562 language_server_name: LanguageServerName,
1563 cx: &mut ModelContext<Self>,
1564 ) -> Task<Result<ModelHandle<Buffer>>> {
1565 cx.spawn(|this, mut cx| async move {
1566 let abs_path = abs_path
1567 .to_file_path()
1568 .map_err(|_| anyhow!("can't convert URI to path"))?;
1569 let (worktree, relative_path) = if let Some(result) =
1570 this.read_with(&cx, |this, cx| this.find_local_worktree(&abs_path, cx))
1571 {
1572 result
1573 } else {
1574 let worktree = this
1575 .update(&mut cx, |this, cx| {
1576 this.create_local_worktree(&abs_path, false, cx)
1577 })
1578 .await?;
1579 this.update(&mut cx, |this, cx| {
1580 this.language_server_ids.insert(
1581 (worktree.read(cx).id(), language_server_name),
1582 language_server_id,
1583 );
1584 });
1585 (worktree, PathBuf::new())
1586 };
1587
1588 let project_path = ProjectPath {
1589 worktree_id: worktree.read_with(&cx, |worktree, _| worktree.id()),
1590 path: relative_path.into(),
1591 };
1592 this.update(&mut cx, |this, cx| this.open_buffer(project_path, cx))
1593 .await
1594 })
1595 }
1596
1597 pub fn open_buffer_by_id(
1598 &mut self,
1599 id: u64,
1600 cx: &mut ModelContext<Self>,
1601 ) -> Task<Result<ModelHandle<Buffer>>> {
1602 if let Some(buffer) = self.buffer_for_id(id, cx) {
1603 Task::ready(Ok(buffer))
1604 } else if self.is_local() {
1605 Task::ready(Err(anyhow!("buffer {} does not exist", id)))
1606 } else if let Some(project_id) = self.remote_id() {
1607 let request = self
1608 .client
1609 .request(proto::OpenBufferById { project_id, id });
1610 cx.spawn(|this, mut cx| async move {
1611 let buffer = request
1612 .await?
1613 .buffer
1614 .ok_or_else(|| anyhow!("invalid buffer"))?;
1615 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
1616 .await
1617 })
1618 } else {
1619 Task::ready(Err(anyhow!("cannot open buffer while disconnected")))
1620 }
1621 }
1622
1623 pub fn save_buffer_as(
1624 &mut self,
1625 buffer: ModelHandle<Buffer>,
1626 abs_path: PathBuf,
1627 cx: &mut ModelContext<Project>,
1628 ) -> Task<Result<()>> {
1629 let worktree_task = self.find_or_create_local_worktree(&abs_path, true, cx);
1630 let old_path =
1631 File::from_dyn(buffer.read(cx).file()).and_then(|f| Some(f.as_local()?.abs_path(cx)));
1632 cx.spawn(|this, mut cx| async move {
1633 if let Some(old_path) = old_path {
1634 this.update(&mut cx, |this, cx| {
1635 this.unregister_buffer_from_language_server(&buffer, old_path, cx);
1636 });
1637 }
1638 let (worktree, path) = worktree_task.await?;
1639 worktree
1640 .update(&mut cx, |worktree, cx| {
1641 worktree
1642 .as_local_mut()
1643 .unwrap()
1644 .save_buffer_as(buffer.clone(), path, cx)
1645 })
1646 .await?;
1647 this.update(&mut cx, |this, cx| {
1648 this.assign_language_to_buffer(&buffer, cx);
1649 this.register_buffer_with_language_server(&buffer, cx);
1650 });
1651 Ok(())
1652 })
1653 }
1654
1655 pub fn get_open_buffer(
1656 &mut self,
1657 path: &ProjectPath,
1658 cx: &mut ModelContext<Self>,
1659 ) -> Option<ModelHandle<Buffer>> {
1660 let worktree = self.worktree_for_id(path.worktree_id, cx)?;
1661 self.opened_buffers.values().find_map(|buffer| {
1662 let buffer = buffer.upgrade(cx)?;
1663 let file = File::from_dyn(buffer.read(cx).file())?;
1664 if file.worktree == worktree && file.path() == &path.path {
1665 Some(buffer)
1666 } else {
1667 None
1668 }
1669 })
1670 }
1671
1672 fn register_buffer(
1673 &mut self,
1674 buffer: &ModelHandle<Buffer>,
1675 cx: &mut ModelContext<Self>,
1676 ) -> Result<()> {
1677 let remote_id = buffer.read(cx).remote_id();
1678 let open_buffer = if self.is_remote() || self.is_shared() {
1679 OpenBuffer::Strong(buffer.clone())
1680 } else {
1681 OpenBuffer::Weak(buffer.downgrade())
1682 };
1683
1684 match self.opened_buffers.insert(remote_id, open_buffer) {
1685 None => {}
1686 Some(OpenBuffer::Loading(operations)) => {
1687 buffer.update(cx, |buffer, cx| buffer.apply_ops(operations, cx))?
1688 }
1689 Some(OpenBuffer::Weak(existing_handle)) => {
1690 if existing_handle.upgrade(cx).is_some() {
1691 Err(anyhow!(
1692 "already registered buffer with remote id {}",
1693 remote_id
1694 ))?
1695 }
1696 }
1697 Some(OpenBuffer::Strong(_)) => Err(anyhow!(
1698 "already registered buffer with remote id {}",
1699 remote_id
1700 ))?,
1701 }
1702 cx.subscribe(buffer, |this, buffer, event, cx| {
1703 this.on_buffer_event(buffer, event, cx);
1704 })
1705 .detach();
1706
1707 self.assign_language_to_buffer(buffer, cx);
1708 self.register_buffer_with_language_server(buffer, cx);
1709 cx.observe_release(buffer, |this, buffer, cx| {
1710 if let Some(file) = File::from_dyn(buffer.file()) {
1711 if file.is_local() {
1712 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
1713 if let Some((_, server)) = this.language_server_for_buffer(buffer, cx) {
1714 server
1715 .notify::<lsp::notification::DidCloseTextDocument>(
1716 lsp::DidCloseTextDocumentParams {
1717 text_document: lsp::TextDocumentIdentifier::new(uri.clone()),
1718 },
1719 )
1720 .log_err();
1721 }
1722 }
1723 }
1724 })
1725 .detach();
1726
1727 Ok(())
1728 }
1729
1730 fn register_buffer_with_language_server(
1731 &mut self,
1732 buffer_handle: &ModelHandle<Buffer>,
1733 cx: &mut ModelContext<Self>,
1734 ) {
1735 let buffer = buffer_handle.read(cx);
1736 let buffer_id = buffer.remote_id();
1737 if let Some(file) = File::from_dyn(buffer.file()) {
1738 if file.is_local() {
1739 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
1740 let initial_snapshot = buffer.text_snapshot();
1741
1742 let mut language_server = None;
1743 let mut language_id = None;
1744 if let Some(language) = buffer.language() {
1745 let worktree_id = file.worktree_id(cx);
1746 if let Some(adapter) = language.lsp_adapter() {
1747 language_id = adapter.id_for_language(language.name().as_ref());
1748 language_server = self
1749 .language_server_ids
1750 .get(&(worktree_id, adapter.name()))
1751 .and_then(|id| self.language_servers.get(&id))
1752 .and_then(|server_state| {
1753 if let LanguageServerState::Running { server, .. } = server_state {
1754 Some(server.clone())
1755 } else {
1756 None
1757 }
1758 });
1759 }
1760 }
1761
1762 if let Some(local_worktree) = file.worktree.read(cx).as_local() {
1763 if let Some(diagnostics) = local_worktree.diagnostics_for_path(file.path()) {
1764 self.update_buffer_diagnostics(&buffer_handle, diagnostics, None, cx)
1765 .log_err();
1766 }
1767 }
1768
1769 if let Some(server) = language_server {
1770 server
1771 .notify::<lsp::notification::DidOpenTextDocument>(
1772 lsp::DidOpenTextDocumentParams {
1773 text_document: lsp::TextDocumentItem::new(
1774 uri,
1775 language_id.unwrap_or_default(),
1776 0,
1777 initial_snapshot.text(),
1778 ),
1779 }
1780 .clone(),
1781 )
1782 .log_err();
1783 buffer_handle.update(cx, |buffer, cx| {
1784 buffer.set_completion_triggers(
1785 server
1786 .capabilities()
1787 .completion_provider
1788 .as_ref()
1789 .and_then(|provider| provider.trigger_characters.clone())
1790 .unwrap_or(Vec::new()),
1791 cx,
1792 )
1793 });
1794 self.buffer_snapshots
1795 .insert(buffer_id, vec![(0, initial_snapshot)]);
1796 }
1797 }
1798 }
1799 }
1800
1801 fn unregister_buffer_from_language_server(
1802 &mut self,
1803 buffer: &ModelHandle<Buffer>,
1804 old_path: PathBuf,
1805 cx: &mut ModelContext<Self>,
1806 ) {
1807 buffer.update(cx, |buffer, cx| {
1808 buffer.update_diagnostics(Default::default(), cx);
1809 self.buffer_snapshots.remove(&buffer.remote_id());
1810 if let Some((_, language_server)) = self.language_server_for_buffer(buffer, cx) {
1811 language_server
1812 .notify::<lsp::notification::DidCloseTextDocument>(
1813 lsp::DidCloseTextDocumentParams {
1814 text_document: lsp::TextDocumentIdentifier::new(
1815 lsp::Url::from_file_path(old_path).unwrap(),
1816 ),
1817 },
1818 )
1819 .log_err();
1820 }
1821 });
1822 }
1823
1824 fn on_buffer_event(
1825 &mut self,
1826 buffer: ModelHandle<Buffer>,
1827 event: &BufferEvent,
1828 cx: &mut ModelContext<Self>,
1829 ) -> Option<()> {
1830 match event {
1831 BufferEvent::Operation(operation) => {
1832 if let Some(project_id) = self.shared_remote_id() {
1833 let request = self.client.request(proto::UpdateBuffer {
1834 project_id,
1835 buffer_id: buffer.read(cx).remote_id(),
1836 operations: vec![language::proto::serialize_operation(&operation)],
1837 });
1838 cx.background().spawn(request).detach_and_log_err(cx);
1839 } else if let Some(project_id) = self.remote_id() {
1840 let _ = self
1841 .client
1842 .send(proto::RegisterProjectActivity { project_id });
1843 }
1844 }
1845 BufferEvent::Edited { .. } => {
1846 let language_server = self
1847 .language_server_for_buffer(buffer.read(cx), cx)
1848 .map(|(_, server)| server.clone())?;
1849 let buffer = buffer.read(cx);
1850 let file = File::from_dyn(buffer.file())?;
1851 let abs_path = file.as_local()?.abs_path(cx);
1852 let uri = lsp::Url::from_file_path(abs_path).unwrap();
1853 let buffer_snapshots = self.buffer_snapshots.get_mut(&buffer.remote_id())?;
1854 let (version, prev_snapshot) = buffer_snapshots.last()?;
1855 let next_snapshot = buffer.text_snapshot();
1856 let next_version = version + 1;
1857
1858 let content_changes = buffer
1859 .edits_since::<(PointUtf16, usize)>(prev_snapshot.version())
1860 .map(|edit| {
1861 let edit_start = edit.new.start.0;
1862 let edit_end = edit_start + (edit.old.end.0 - edit.old.start.0);
1863 let new_text = next_snapshot
1864 .text_for_range(edit.new.start.1..edit.new.end.1)
1865 .collect();
1866 lsp::TextDocumentContentChangeEvent {
1867 range: Some(lsp::Range::new(
1868 point_to_lsp(edit_start),
1869 point_to_lsp(edit_end),
1870 )),
1871 range_length: None,
1872 text: new_text,
1873 }
1874 })
1875 .collect();
1876
1877 buffer_snapshots.push((next_version, next_snapshot));
1878
1879 language_server
1880 .notify::<lsp::notification::DidChangeTextDocument>(
1881 lsp::DidChangeTextDocumentParams {
1882 text_document: lsp::VersionedTextDocumentIdentifier::new(
1883 uri,
1884 next_version,
1885 ),
1886 content_changes,
1887 },
1888 )
1889 .log_err();
1890 }
1891 BufferEvent::Saved => {
1892 let file = File::from_dyn(buffer.read(cx).file())?;
1893 let worktree_id = file.worktree_id(cx);
1894 let abs_path = file.as_local()?.abs_path(cx);
1895 let text_document = lsp::TextDocumentIdentifier {
1896 uri: lsp::Url::from_file_path(abs_path).unwrap(),
1897 };
1898
1899 for (_, server) in self.language_servers_for_worktree(worktree_id) {
1900 server
1901 .notify::<lsp::notification::DidSaveTextDocument>(
1902 lsp::DidSaveTextDocumentParams {
1903 text_document: text_document.clone(),
1904 text: None,
1905 },
1906 )
1907 .log_err();
1908 }
1909
1910 // After saving a buffer, simulate disk-based diagnostics being finished for languages
1911 // that don't support a disk-based progress token.
1912 let (lsp_adapter, language_server) =
1913 self.language_server_for_buffer(buffer.read(cx), cx)?;
1914 if lsp_adapter
1915 .disk_based_diagnostics_progress_token()
1916 .is_none()
1917 {
1918 let server_id = language_server.server_id();
1919 self.disk_based_diagnostics_finished(server_id, cx);
1920 self.broadcast_language_server_update(
1921 server_id,
1922 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(
1923 proto::LspDiskBasedDiagnosticsUpdated {},
1924 ),
1925 );
1926 }
1927 }
1928 _ => {}
1929 }
1930
1931 None
1932 }
1933
1934 fn language_servers_for_worktree(
1935 &self,
1936 worktree_id: WorktreeId,
1937 ) -> impl Iterator<Item = (&Arc<dyn LspAdapter>, &Arc<LanguageServer>)> {
1938 self.language_server_ids
1939 .iter()
1940 .filter_map(move |((language_server_worktree_id, _), id)| {
1941 if *language_server_worktree_id == worktree_id {
1942 if let Some(LanguageServerState::Running { adapter, server }) =
1943 self.language_servers.get(&id)
1944 {
1945 return Some((adapter, server));
1946 }
1947 }
1948 None
1949 })
1950 }
1951
1952 fn assign_language_to_buffer(
1953 &mut self,
1954 buffer: &ModelHandle<Buffer>,
1955 cx: &mut ModelContext<Self>,
1956 ) -> Option<()> {
1957 // If the buffer has a language, set it and start the language server if we haven't already.
1958 let full_path = buffer.read(cx).file()?.full_path(cx);
1959 let language = self.languages.select_language(&full_path)?;
1960 buffer.update(cx, |buffer, cx| {
1961 buffer.set_language(Some(language.clone()), cx);
1962 });
1963
1964 let file = File::from_dyn(buffer.read(cx).file())?;
1965 let worktree = file.worktree.read(cx).as_local()?;
1966 let worktree_id = worktree.id();
1967 let worktree_abs_path = worktree.abs_path().clone();
1968 self.start_language_server(worktree_id, worktree_abs_path, language, cx);
1969
1970 None
1971 }
1972
1973 fn start_language_server(
1974 &mut self,
1975 worktree_id: WorktreeId,
1976 worktree_path: Arc<Path>,
1977 language: Arc<Language>,
1978 cx: &mut ModelContext<Self>,
1979 ) {
1980 if !cx
1981 .global::<Settings>()
1982 .enable_language_server(Some(&language.name()))
1983 {
1984 return;
1985 }
1986
1987 let adapter = if let Some(adapter) = language.lsp_adapter() {
1988 adapter
1989 } else {
1990 return;
1991 };
1992 let key = (worktree_id, adapter.name());
1993
1994 self.language_server_ids
1995 .entry(key.clone())
1996 .or_insert_with(|| {
1997 let server_id = post_inc(&mut self.next_language_server_id);
1998 let language_server = self.languages.start_language_server(
1999 server_id,
2000 language.clone(),
2001 worktree_path,
2002 self.client.http_client(),
2003 cx,
2004 );
2005 self.language_servers.insert(
2006 server_id,
2007 LanguageServerState::Starting(cx.spawn_weak(|this, mut cx| async move {
2008 let language_server = language_server?.await.log_err()?;
2009 let language_server = language_server
2010 .initialize(adapter.initialization_options())
2011 .await
2012 .log_err()?;
2013 let this = this.upgrade(&cx)?;
2014 let disk_based_diagnostics_progress_token =
2015 adapter.disk_based_diagnostics_progress_token();
2016
2017 language_server
2018 .on_notification::<lsp::notification::PublishDiagnostics, _>({
2019 let this = this.downgrade();
2020 let adapter = adapter.clone();
2021 move |params, mut cx| {
2022 if let Some(this) = this.upgrade(&cx) {
2023 this.update(&mut cx, |this, cx| {
2024 this.on_lsp_diagnostics_published(
2025 server_id, params, &adapter, cx,
2026 );
2027 });
2028 }
2029 }
2030 })
2031 .detach();
2032
2033 language_server
2034 .on_request::<lsp::request::WorkspaceConfiguration, _, _>({
2035 let settings = this.read_with(&cx, |this, _| {
2036 this.language_server_settings.clone()
2037 });
2038 move |params, _| {
2039 let settings = settings.lock().clone();
2040 async move {
2041 Ok(params
2042 .items
2043 .into_iter()
2044 .map(|item| {
2045 if let Some(section) = &item.section {
2046 settings
2047 .get(section)
2048 .cloned()
2049 .unwrap_or(serde_json::Value::Null)
2050 } else {
2051 settings.clone()
2052 }
2053 })
2054 .collect())
2055 }
2056 }
2057 })
2058 .detach();
2059
2060 // Even though we don't have handling for these requests, respond to them to
2061 // avoid stalling any language server like `gopls` which waits for a response
2062 // to these requests when initializing.
2063 language_server
2064 .on_request::<lsp::request::WorkDoneProgressCreate, _, _>({
2065 let this = this.downgrade();
2066 move |params, mut cx| async move {
2067 if let Some(this) = this.upgrade(&cx) {
2068 this.update(&mut cx, |this, _| {
2069 if let Some(status) =
2070 this.language_server_statuses.get_mut(&server_id)
2071 {
2072 if let lsp::NumberOrString::String(token) =
2073 params.token
2074 {
2075 status.progress_tokens.insert(token);
2076 }
2077 }
2078 });
2079 }
2080 Ok(())
2081 }
2082 })
2083 .detach();
2084 language_server
2085 .on_request::<lsp::request::RegisterCapability, _, _>(|_, _| async {
2086 Ok(())
2087 })
2088 .detach();
2089
2090 language_server
2091 .on_request::<lsp::request::ApplyWorkspaceEdit, _, _>({
2092 let this = this.downgrade();
2093 let adapter = adapter.clone();
2094 let language_server = language_server.clone();
2095 move |params, cx| {
2096 Self::on_lsp_workspace_edit(
2097 this,
2098 params,
2099 server_id,
2100 adapter.clone(),
2101 language_server.clone(),
2102 cx,
2103 )
2104 }
2105 })
2106 .detach();
2107
2108 language_server
2109 .on_notification::<lsp::notification::Progress, _>({
2110 let this = this.downgrade();
2111 move |params, mut cx| {
2112 if let Some(this) = this.upgrade(&cx) {
2113 this.update(&mut cx, |this, cx| {
2114 this.on_lsp_progress(
2115 params,
2116 server_id,
2117 disk_based_diagnostics_progress_token,
2118 cx,
2119 );
2120 });
2121 }
2122 }
2123 })
2124 .detach();
2125
2126 this.update(&mut cx, |this, cx| {
2127 // If the language server for this key doesn't match the server id, don't store the
2128 // server. Which will cause it to be dropped, killing the process
2129 if this
2130 .language_server_ids
2131 .get(&key)
2132 .map(|id| id != &server_id)
2133 .unwrap_or(false)
2134 {
2135 return None;
2136 }
2137
2138 // Update language_servers collection with Running variant of LanguageServerState
2139 // indicating that the server is up and running and ready
2140 this.language_servers.insert(
2141 server_id,
2142 LanguageServerState::Running {
2143 adapter: adapter.clone(),
2144 server: language_server.clone(),
2145 },
2146 );
2147 this.language_server_statuses.insert(
2148 server_id,
2149 LanguageServerStatus {
2150 name: language_server.name().to_string(),
2151 pending_work: Default::default(),
2152 has_pending_diagnostic_updates: false,
2153 progress_tokens: Default::default(),
2154 },
2155 );
2156 language_server
2157 .notify::<lsp::notification::DidChangeConfiguration>(
2158 lsp::DidChangeConfigurationParams {
2159 settings: this.language_server_settings.lock().clone(),
2160 },
2161 )
2162 .ok();
2163
2164 if let Some(project_id) = this.shared_remote_id() {
2165 this.client
2166 .send(proto::StartLanguageServer {
2167 project_id,
2168 server: Some(proto::LanguageServer {
2169 id: server_id as u64,
2170 name: language_server.name().to_string(),
2171 }),
2172 })
2173 .log_err();
2174 }
2175
2176 // Tell the language server about every open buffer in the worktree that matches the language.
2177 for buffer in this.opened_buffers.values() {
2178 if let Some(buffer_handle) = buffer.upgrade(cx) {
2179 let buffer = buffer_handle.read(cx);
2180 let file = if let Some(file) = File::from_dyn(buffer.file()) {
2181 file
2182 } else {
2183 continue;
2184 };
2185 let language = if let Some(language) = buffer.language() {
2186 language
2187 } else {
2188 continue;
2189 };
2190 if file.worktree.read(cx).id() != key.0
2191 || language.lsp_adapter().map(|a| a.name())
2192 != Some(key.1.clone())
2193 {
2194 continue;
2195 }
2196
2197 let file = file.as_local()?;
2198 let versions = this
2199 .buffer_snapshots
2200 .entry(buffer.remote_id())
2201 .or_insert_with(|| vec![(0, buffer.text_snapshot())]);
2202 let (version, initial_snapshot) = versions.last().unwrap();
2203 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
2204 let language_id =
2205 adapter.id_for_language(language.name().as_ref());
2206 language_server
2207 .notify::<lsp::notification::DidOpenTextDocument>(
2208 lsp::DidOpenTextDocumentParams {
2209 text_document: lsp::TextDocumentItem::new(
2210 uri,
2211 language_id.unwrap_or_default(),
2212 *version,
2213 initial_snapshot.text(),
2214 ),
2215 },
2216 )
2217 .log_err()?;
2218 buffer_handle.update(cx, |buffer, cx| {
2219 buffer.set_completion_triggers(
2220 language_server
2221 .capabilities()
2222 .completion_provider
2223 .as_ref()
2224 .and_then(|provider| {
2225 provider.trigger_characters.clone()
2226 })
2227 .unwrap_or(Vec::new()),
2228 cx,
2229 )
2230 });
2231 }
2232 }
2233
2234 cx.notify();
2235 Some(language_server)
2236 })
2237 })),
2238 );
2239
2240 server_id
2241 });
2242 }
2243
2244 fn stop_language_server(
2245 &mut self,
2246 worktree_id: WorktreeId,
2247 adapter_name: LanguageServerName,
2248 cx: &mut ModelContext<Self>,
2249 ) -> Task<()> {
2250 let key = (worktree_id, adapter_name);
2251 if let Some(server_id) = self.language_server_ids.remove(&key) {
2252 let server_state = self.language_servers.remove(&server_id);
2253 cx.spawn_weak(|this, mut cx| async move {
2254 let server = match server_state {
2255 Some(LanguageServerState::Starting(started_language_server)) => {
2256 started_language_server.await
2257 }
2258 Some(LanguageServerState::Running { server, .. }) => Some(server),
2259 None => None,
2260 };
2261
2262 if let Some(server) = server {
2263 if let Some(shutdown) = server.shutdown() {
2264 shutdown.await;
2265 }
2266 }
2267
2268 if let Some(this) = this.upgrade(&cx) {
2269 this.update(&mut cx, |this, cx| {
2270 this.language_server_statuses.remove(&server_id);
2271 cx.notify();
2272 });
2273 }
2274 })
2275 } else {
2276 Task::ready(())
2277 }
2278 }
2279
2280 pub fn restart_language_servers_for_buffers(
2281 &mut self,
2282 buffers: impl IntoIterator<Item = ModelHandle<Buffer>>,
2283 cx: &mut ModelContext<Self>,
2284 ) -> Option<()> {
2285 let language_server_lookup_info: HashSet<(WorktreeId, Arc<Path>, PathBuf)> = buffers
2286 .into_iter()
2287 .filter_map(|buffer| {
2288 let file = File::from_dyn(buffer.read(cx).file())?;
2289 let worktree = file.worktree.read(cx).as_local()?;
2290 let worktree_id = worktree.id();
2291 let worktree_abs_path = worktree.abs_path().clone();
2292 let full_path = file.full_path(cx);
2293 Some((worktree_id, worktree_abs_path, full_path))
2294 })
2295 .collect();
2296 for (worktree_id, worktree_abs_path, full_path) in language_server_lookup_info {
2297 let language = self.languages.select_language(&full_path)?;
2298 self.restart_language_server(worktree_id, worktree_abs_path, language, cx);
2299 }
2300
2301 None
2302 }
2303
2304 fn restart_language_server(
2305 &mut self,
2306 worktree_id: WorktreeId,
2307 worktree_path: Arc<Path>,
2308 language: Arc<Language>,
2309 cx: &mut ModelContext<Self>,
2310 ) {
2311 let adapter = if let Some(adapter) = language.lsp_adapter() {
2312 adapter
2313 } else {
2314 return;
2315 };
2316
2317 let stop = self.stop_language_server(worktree_id, adapter.name(), cx);
2318 cx.spawn_weak(|this, mut cx| async move {
2319 stop.await;
2320 if let Some(this) = this.upgrade(&cx) {
2321 this.update(&mut cx, |this, cx| {
2322 this.start_language_server(worktree_id, worktree_path, language, cx);
2323 });
2324 }
2325 })
2326 .detach();
2327 }
2328
2329 fn on_lsp_diagnostics_published(
2330 &mut self,
2331 server_id: usize,
2332 mut params: lsp::PublishDiagnosticsParams,
2333 adapter: &Arc<dyn LspAdapter>,
2334 cx: &mut ModelContext<Self>,
2335 ) {
2336 adapter.process_diagnostics(&mut params);
2337 self.update_diagnostics(
2338 server_id,
2339 params,
2340 adapter.disk_based_diagnostic_sources(),
2341 cx,
2342 )
2343 .log_err();
2344 }
2345
2346 fn on_lsp_progress(
2347 &mut self,
2348 progress: lsp::ProgressParams,
2349 server_id: usize,
2350 disk_based_diagnostics_progress_token: Option<&str>,
2351 cx: &mut ModelContext<Self>,
2352 ) {
2353 let token = match progress.token {
2354 lsp::NumberOrString::String(token) => token,
2355 lsp::NumberOrString::Number(token) => {
2356 log::info!("skipping numeric progress token {}", token);
2357 return;
2358 }
2359 };
2360 let progress = match progress.value {
2361 lsp::ProgressParamsValue::WorkDone(value) => value,
2362 };
2363 let language_server_status =
2364 if let Some(status) = self.language_server_statuses.get_mut(&server_id) {
2365 status
2366 } else {
2367 return;
2368 };
2369
2370 if !language_server_status.progress_tokens.contains(&token) {
2371 return;
2372 }
2373
2374 match progress {
2375 lsp::WorkDoneProgress::Begin(report) => {
2376 if Some(token.as_str()) == disk_based_diagnostics_progress_token {
2377 language_server_status.has_pending_diagnostic_updates = true;
2378 self.disk_based_diagnostics_started(server_id, cx);
2379 self.broadcast_language_server_update(
2380 server_id,
2381 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(
2382 proto::LspDiskBasedDiagnosticsUpdating {},
2383 ),
2384 );
2385 } else {
2386 self.on_lsp_work_start(
2387 server_id,
2388 token.clone(),
2389 LanguageServerProgress {
2390 message: report.message.clone(),
2391 percentage: report.percentage.map(|p| p as usize),
2392 last_update_at: Instant::now(),
2393 },
2394 cx,
2395 );
2396 self.broadcast_language_server_update(
2397 server_id,
2398 proto::update_language_server::Variant::WorkStart(proto::LspWorkStart {
2399 token,
2400 message: report.message,
2401 percentage: report.percentage.map(|p| p as u32),
2402 }),
2403 );
2404 }
2405 }
2406 lsp::WorkDoneProgress::Report(report) => {
2407 if Some(token.as_str()) != disk_based_diagnostics_progress_token {
2408 self.on_lsp_work_progress(
2409 server_id,
2410 token.clone(),
2411 LanguageServerProgress {
2412 message: report.message.clone(),
2413 percentage: report.percentage.map(|p| p as usize),
2414 last_update_at: Instant::now(),
2415 },
2416 cx,
2417 );
2418 self.broadcast_language_server_update(
2419 server_id,
2420 proto::update_language_server::Variant::WorkProgress(
2421 proto::LspWorkProgress {
2422 token,
2423 message: report.message,
2424 percentage: report.percentage.map(|p| p as u32),
2425 },
2426 ),
2427 );
2428 }
2429 }
2430 lsp::WorkDoneProgress::End(_) => {
2431 language_server_status.progress_tokens.remove(&token);
2432
2433 if Some(token.as_str()) == disk_based_diagnostics_progress_token {
2434 language_server_status.has_pending_diagnostic_updates = false;
2435 self.disk_based_diagnostics_finished(server_id, cx);
2436 self.broadcast_language_server_update(
2437 server_id,
2438 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(
2439 proto::LspDiskBasedDiagnosticsUpdated {},
2440 ),
2441 );
2442 } else {
2443 self.on_lsp_work_end(server_id, token.clone(), cx);
2444 self.broadcast_language_server_update(
2445 server_id,
2446 proto::update_language_server::Variant::WorkEnd(proto::LspWorkEnd {
2447 token,
2448 }),
2449 );
2450 }
2451 }
2452 }
2453 }
2454
2455 fn on_lsp_work_start(
2456 &mut self,
2457 language_server_id: usize,
2458 token: String,
2459 progress: LanguageServerProgress,
2460 cx: &mut ModelContext<Self>,
2461 ) {
2462 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
2463 status.pending_work.insert(token, progress);
2464 cx.notify();
2465 }
2466 }
2467
2468 fn on_lsp_work_progress(
2469 &mut self,
2470 language_server_id: usize,
2471 token: String,
2472 progress: LanguageServerProgress,
2473 cx: &mut ModelContext<Self>,
2474 ) {
2475 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
2476 let entry = status
2477 .pending_work
2478 .entry(token)
2479 .or_insert(LanguageServerProgress {
2480 message: Default::default(),
2481 percentage: Default::default(),
2482 last_update_at: progress.last_update_at,
2483 });
2484 if progress.message.is_some() {
2485 entry.message = progress.message;
2486 }
2487 if progress.percentage.is_some() {
2488 entry.percentage = progress.percentage;
2489 }
2490 entry.last_update_at = progress.last_update_at;
2491 cx.notify();
2492 }
2493 }
2494
2495 fn on_lsp_work_end(
2496 &mut self,
2497 language_server_id: usize,
2498 token: String,
2499 cx: &mut ModelContext<Self>,
2500 ) {
2501 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
2502 status.pending_work.remove(&token);
2503 cx.notify();
2504 }
2505 }
2506
2507 async fn on_lsp_workspace_edit(
2508 this: WeakModelHandle<Self>,
2509 params: lsp::ApplyWorkspaceEditParams,
2510 server_id: usize,
2511 adapter: Arc<dyn LspAdapter>,
2512 language_server: Arc<LanguageServer>,
2513 mut cx: AsyncAppContext,
2514 ) -> Result<lsp::ApplyWorkspaceEditResponse> {
2515 let this = this
2516 .upgrade(&cx)
2517 .ok_or_else(|| anyhow!("project project closed"))?;
2518 let transaction = Self::deserialize_workspace_edit(
2519 this.clone(),
2520 params.edit,
2521 true,
2522 adapter.clone(),
2523 language_server.clone(),
2524 &mut cx,
2525 )
2526 .await
2527 .log_err();
2528 this.update(&mut cx, |this, _| {
2529 if let Some(transaction) = transaction {
2530 this.last_workspace_edits_by_language_server
2531 .insert(server_id, transaction);
2532 }
2533 });
2534 Ok(lsp::ApplyWorkspaceEditResponse {
2535 applied: true,
2536 failed_change: None,
2537 failure_reason: None,
2538 })
2539 }
2540
2541 fn broadcast_language_server_update(
2542 &self,
2543 language_server_id: usize,
2544 event: proto::update_language_server::Variant,
2545 ) {
2546 if let Some(project_id) = self.shared_remote_id() {
2547 self.client
2548 .send(proto::UpdateLanguageServer {
2549 project_id,
2550 language_server_id: language_server_id as u64,
2551 variant: Some(event),
2552 })
2553 .log_err();
2554 }
2555 }
2556
2557 pub fn set_language_server_settings(&mut self, settings: serde_json::Value) {
2558 for server_state in self.language_servers.values() {
2559 if let LanguageServerState::Running { server, .. } = server_state {
2560 server
2561 .notify::<lsp::notification::DidChangeConfiguration>(
2562 lsp::DidChangeConfigurationParams {
2563 settings: settings.clone(),
2564 },
2565 )
2566 .ok();
2567 }
2568 }
2569 *self.language_server_settings.lock() = settings;
2570 }
2571
2572 pub fn language_server_statuses(
2573 &self,
2574 ) -> impl DoubleEndedIterator<Item = &LanguageServerStatus> {
2575 self.language_server_statuses.values()
2576 }
2577
2578 pub fn update_diagnostics(
2579 &mut self,
2580 language_server_id: usize,
2581 params: lsp::PublishDiagnosticsParams,
2582 disk_based_sources: &[&str],
2583 cx: &mut ModelContext<Self>,
2584 ) -> Result<()> {
2585 let abs_path = params
2586 .uri
2587 .to_file_path()
2588 .map_err(|_| anyhow!("URI is not a file"))?;
2589 let mut diagnostics = Vec::default();
2590 let mut primary_diagnostic_group_ids = HashMap::default();
2591 let mut sources_by_group_id = HashMap::default();
2592 let mut supporting_diagnostics = HashMap::default();
2593 for diagnostic in ¶ms.diagnostics {
2594 let source = diagnostic.source.as_ref();
2595 let code = diagnostic.code.as_ref().map(|code| match code {
2596 lsp::NumberOrString::Number(code) => code.to_string(),
2597 lsp::NumberOrString::String(code) => code.clone(),
2598 });
2599 let range = range_from_lsp(diagnostic.range);
2600 let is_supporting = diagnostic
2601 .related_information
2602 .as_ref()
2603 .map_or(false, |infos| {
2604 infos.iter().any(|info| {
2605 primary_diagnostic_group_ids.contains_key(&(
2606 source,
2607 code.clone(),
2608 range_from_lsp(info.location.range),
2609 ))
2610 })
2611 });
2612
2613 let is_unnecessary = diagnostic.tags.as_ref().map_or(false, |tags| {
2614 tags.iter().any(|tag| *tag == DiagnosticTag::UNNECESSARY)
2615 });
2616
2617 if is_supporting {
2618 supporting_diagnostics.insert(
2619 (source, code.clone(), range),
2620 (diagnostic.severity, is_unnecessary),
2621 );
2622 } else {
2623 let group_id = post_inc(&mut self.next_diagnostic_group_id);
2624 let is_disk_based = source.map_or(false, |source| {
2625 disk_based_sources.contains(&source.as_str())
2626 });
2627
2628 sources_by_group_id.insert(group_id, source);
2629 primary_diagnostic_group_ids
2630 .insert((source, code.clone(), range.clone()), group_id);
2631
2632 diagnostics.push(DiagnosticEntry {
2633 range,
2634 diagnostic: Diagnostic {
2635 code: code.clone(),
2636 severity: diagnostic.severity.unwrap_or(DiagnosticSeverity::ERROR),
2637 message: diagnostic.message.clone(),
2638 group_id,
2639 is_primary: true,
2640 is_valid: true,
2641 is_disk_based,
2642 is_unnecessary,
2643 },
2644 });
2645 if let Some(infos) = &diagnostic.related_information {
2646 for info in infos {
2647 if info.location.uri == params.uri && !info.message.is_empty() {
2648 let range = range_from_lsp(info.location.range);
2649 diagnostics.push(DiagnosticEntry {
2650 range,
2651 diagnostic: Diagnostic {
2652 code: code.clone(),
2653 severity: DiagnosticSeverity::INFORMATION,
2654 message: info.message.clone(),
2655 group_id,
2656 is_primary: false,
2657 is_valid: true,
2658 is_disk_based,
2659 is_unnecessary: false,
2660 },
2661 });
2662 }
2663 }
2664 }
2665 }
2666 }
2667
2668 for entry in &mut diagnostics {
2669 let diagnostic = &mut entry.diagnostic;
2670 if !diagnostic.is_primary {
2671 let source = *sources_by_group_id.get(&diagnostic.group_id).unwrap();
2672 if let Some(&(severity, is_unnecessary)) = supporting_diagnostics.get(&(
2673 source,
2674 diagnostic.code.clone(),
2675 entry.range.clone(),
2676 )) {
2677 if let Some(severity) = severity {
2678 diagnostic.severity = severity;
2679 }
2680 diagnostic.is_unnecessary = is_unnecessary;
2681 }
2682 }
2683 }
2684
2685 self.update_diagnostic_entries(
2686 language_server_id,
2687 abs_path,
2688 params.version,
2689 diagnostics,
2690 cx,
2691 )?;
2692 Ok(())
2693 }
2694
2695 pub fn update_diagnostic_entries(
2696 &mut self,
2697 language_server_id: usize,
2698 abs_path: PathBuf,
2699 version: Option<i32>,
2700 diagnostics: Vec<DiagnosticEntry<PointUtf16>>,
2701 cx: &mut ModelContext<Project>,
2702 ) -> Result<(), anyhow::Error> {
2703 let (worktree, relative_path) = self
2704 .find_local_worktree(&abs_path, cx)
2705 .ok_or_else(|| anyhow!("no worktree found for diagnostics"))?;
2706
2707 let project_path = ProjectPath {
2708 worktree_id: worktree.read(cx).id(),
2709 path: relative_path.into(),
2710 };
2711 if let Some(buffer) = self.get_open_buffer(&project_path, cx) {
2712 self.update_buffer_diagnostics(&buffer, diagnostics.clone(), version, cx)?;
2713 }
2714
2715 let updated = worktree.update(cx, |worktree, cx| {
2716 worktree
2717 .as_local_mut()
2718 .ok_or_else(|| anyhow!("not a local worktree"))?
2719 .update_diagnostics(
2720 language_server_id,
2721 project_path.path.clone(),
2722 diagnostics,
2723 cx,
2724 )
2725 })?;
2726 if updated {
2727 cx.emit(Event::DiagnosticsUpdated {
2728 language_server_id,
2729 path: project_path,
2730 });
2731 }
2732 Ok(())
2733 }
2734
2735 fn update_buffer_diagnostics(
2736 &mut self,
2737 buffer: &ModelHandle<Buffer>,
2738 mut diagnostics: Vec<DiagnosticEntry<PointUtf16>>,
2739 version: Option<i32>,
2740 cx: &mut ModelContext<Self>,
2741 ) -> Result<()> {
2742 fn compare_diagnostics(a: &Diagnostic, b: &Diagnostic) -> Ordering {
2743 Ordering::Equal
2744 .then_with(|| b.is_primary.cmp(&a.is_primary))
2745 .then_with(|| a.is_disk_based.cmp(&b.is_disk_based))
2746 .then_with(|| a.severity.cmp(&b.severity))
2747 .then_with(|| a.message.cmp(&b.message))
2748 }
2749
2750 let snapshot = self.buffer_snapshot_for_lsp_version(buffer, version, cx)?;
2751
2752 diagnostics.sort_unstable_by(|a, b| {
2753 Ordering::Equal
2754 .then_with(|| a.range.start.cmp(&b.range.start))
2755 .then_with(|| b.range.end.cmp(&a.range.end))
2756 .then_with(|| compare_diagnostics(&a.diagnostic, &b.diagnostic))
2757 });
2758
2759 let mut sanitized_diagnostics = Vec::new();
2760 let edits_since_save = Patch::new(
2761 snapshot
2762 .edits_since::<PointUtf16>(buffer.read(cx).saved_version())
2763 .collect(),
2764 );
2765 for entry in diagnostics {
2766 let start;
2767 let end;
2768 if entry.diagnostic.is_disk_based {
2769 // Some diagnostics are based on files on disk instead of buffers'
2770 // current contents. Adjust these diagnostics' ranges to reflect
2771 // any unsaved edits.
2772 start = edits_since_save.old_to_new(entry.range.start);
2773 end = edits_since_save.old_to_new(entry.range.end);
2774 } else {
2775 start = entry.range.start;
2776 end = entry.range.end;
2777 }
2778
2779 let mut range = snapshot.clip_point_utf16(start, Bias::Left)
2780 ..snapshot.clip_point_utf16(end, Bias::Right);
2781
2782 // Expand empty ranges by one character
2783 if range.start == range.end {
2784 range.end.column += 1;
2785 range.end = snapshot.clip_point_utf16(range.end, Bias::Right);
2786 if range.start == range.end && range.end.column > 0 {
2787 range.start.column -= 1;
2788 range.start = snapshot.clip_point_utf16(range.start, Bias::Left);
2789 }
2790 }
2791
2792 sanitized_diagnostics.push(DiagnosticEntry {
2793 range,
2794 diagnostic: entry.diagnostic,
2795 });
2796 }
2797 drop(edits_since_save);
2798
2799 let set = DiagnosticSet::new(sanitized_diagnostics, &snapshot);
2800 buffer.update(cx, |buffer, cx| buffer.update_diagnostics(set, cx));
2801 Ok(())
2802 }
2803
2804 pub fn reload_buffers(
2805 &self,
2806 buffers: HashSet<ModelHandle<Buffer>>,
2807 push_to_history: bool,
2808 cx: &mut ModelContext<Self>,
2809 ) -> Task<Result<ProjectTransaction>> {
2810 let mut local_buffers = Vec::new();
2811 let mut remote_buffers = None;
2812 for buffer_handle in buffers {
2813 let buffer = buffer_handle.read(cx);
2814 if buffer.is_dirty() {
2815 if let Some(file) = File::from_dyn(buffer.file()) {
2816 if file.is_local() {
2817 local_buffers.push(buffer_handle);
2818 } else {
2819 remote_buffers.get_or_insert(Vec::new()).push(buffer_handle);
2820 }
2821 }
2822 }
2823 }
2824
2825 let remote_buffers = self.remote_id().zip(remote_buffers);
2826 let client = self.client.clone();
2827
2828 cx.spawn(|this, mut cx| async move {
2829 let mut project_transaction = ProjectTransaction::default();
2830
2831 if let Some((project_id, remote_buffers)) = remote_buffers {
2832 let response = client
2833 .request(proto::ReloadBuffers {
2834 project_id,
2835 buffer_ids: remote_buffers
2836 .iter()
2837 .map(|buffer| buffer.read_with(&cx, |buffer, _| buffer.remote_id()))
2838 .collect(),
2839 })
2840 .await?
2841 .transaction
2842 .ok_or_else(|| anyhow!("missing transaction"))?;
2843 project_transaction = this
2844 .update(&mut cx, |this, cx| {
2845 this.deserialize_project_transaction(response, push_to_history, cx)
2846 })
2847 .await?;
2848 }
2849
2850 for buffer in local_buffers {
2851 let transaction = buffer
2852 .update(&mut cx, |buffer, cx| buffer.reload(cx))
2853 .await?;
2854 buffer.update(&mut cx, |buffer, cx| {
2855 if let Some(transaction) = transaction {
2856 if !push_to_history {
2857 buffer.forget_transaction(transaction.id);
2858 }
2859 project_transaction.0.insert(cx.handle(), transaction);
2860 }
2861 });
2862 }
2863
2864 Ok(project_transaction)
2865 })
2866 }
2867
2868 pub fn format(
2869 &self,
2870 buffers: HashSet<ModelHandle<Buffer>>,
2871 push_to_history: bool,
2872 cx: &mut ModelContext<Project>,
2873 ) -> Task<Result<ProjectTransaction>> {
2874 let mut local_buffers = Vec::new();
2875 let mut remote_buffers = None;
2876 for buffer_handle in buffers {
2877 let buffer = buffer_handle.read(cx);
2878 if let Some(file) = File::from_dyn(buffer.file()) {
2879 if let Some(buffer_abs_path) = file.as_local().map(|f| f.abs_path(cx)) {
2880 if let Some((_, server)) = self.language_server_for_buffer(buffer, cx) {
2881 local_buffers.push((buffer_handle, buffer_abs_path, server.clone()));
2882 }
2883 } else {
2884 remote_buffers.get_or_insert(Vec::new()).push(buffer_handle);
2885 }
2886 } else {
2887 return Task::ready(Ok(Default::default()));
2888 }
2889 }
2890
2891 let remote_buffers = self.remote_id().zip(remote_buffers);
2892 let client = self.client.clone();
2893
2894 cx.spawn(|this, mut cx| async move {
2895 let mut project_transaction = ProjectTransaction::default();
2896
2897 if let Some((project_id, remote_buffers)) = remote_buffers {
2898 let response = client
2899 .request(proto::FormatBuffers {
2900 project_id,
2901 buffer_ids: remote_buffers
2902 .iter()
2903 .map(|buffer| buffer.read_with(&cx, |buffer, _| buffer.remote_id()))
2904 .collect(),
2905 })
2906 .await?
2907 .transaction
2908 .ok_or_else(|| anyhow!("missing transaction"))?;
2909 project_transaction = this
2910 .update(&mut cx, |this, cx| {
2911 this.deserialize_project_transaction(response, push_to_history, cx)
2912 })
2913 .await?;
2914 }
2915
2916 for (buffer, buffer_abs_path, language_server) in local_buffers {
2917 let text_document = lsp::TextDocumentIdentifier::new(
2918 lsp::Url::from_file_path(&buffer_abs_path).unwrap(),
2919 );
2920 let capabilities = &language_server.capabilities();
2921 let tab_size = cx.update(|cx| {
2922 let language_name = buffer.read(cx).language().map(|language| language.name());
2923 cx.global::<Settings>().tab_size(language_name.as_deref())
2924 });
2925 let lsp_edits = if capabilities
2926 .document_formatting_provider
2927 .as_ref()
2928 .map_or(false, |provider| *provider != lsp::OneOf::Left(false))
2929 {
2930 language_server
2931 .request::<lsp::request::Formatting>(lsp::DocumentFormattingParams {
2932 text_document,
2933 options: lsp::FormattingOptions {
2934 tab_size: tab_size.into(),
2935 insert_spaces: true,
2936 insert_final_newline: Some(true),
2937 ..Default::default()
2938 },
2939 work_done_progress_params: Default::default(),
2940 })
2941 .await?
2942 } else if capabilities
2943 .document_range_formatting_provider
2944 .as_ref()
2945 .map_or(false, |provider| *provider != lsp::OneOf::Left(false))
2946 {
2947 let buffer_start = lsp::Position::new(0, 0);
2948 let buffer_end =
2949 buffer.read_with(&cx, |buffer, _| point_to_lsp(buffer.max_point_utf16()));
2950 language_server
2951 .request::<lsp::request::RangeFormatting>(
2952 lsp::DocumentRangeFormattingParams {
2953 text_document,
2954 range: lsp::Range::new(buffer_start, buffer_end),
2955 options: lsp::FormattingOptions {
2956 tab_size: tab_size.into(),
2957 insert_spaces: true,
2958 insert_final_newline: Some(true),
2959 ..Default::default()
2960 },
2961 work_done_progress_params: Default::default(),
2962 },
2963 )
2964 .await?
2965 } else {
2966 continue;
2967 };
2968
2969 if let Some(lsp_edits) = lsp_edits {
2970 let edits = this
2971 .update(&mut cx, |this, cx| {
2972 this.edits_from_lsp(&buffer, lsp_edits, None, cx)
2973 })
2974 .await?;
2975 buffer.update(&mut cx, |buffer, cx| {
2976 buffer.finalize_last_transaction();
2977 buffer.start_transaction();
2978 for (range, text) in edits {
2979 buffer.edit([(range, text)], cx);
2980 }
2981 if buffer.end_transaction(cx).is_some() {
2982 let transaction = buffer.finalize_last_transaction().unwrap().clone();
2983 if !push_to_history {
2984 buffer.forget_transaction(transaction.id);
2985 }
2986 project_transaction.0.insert(cx.handle(), transaction);
2987 }
2988 });
2989 }
2990 }
2991
2992 Ok(project_transaction)
2993 })
2994 }
2995
2996 pub fn definition<T: ToPointUtf16>(
2997 &self,
2998 buffer: &ModelHandle<Buffer>,
2999 position: T,
3000 cx: &mut ModelContext<Self>,
3001 ) -> Task<Result<Vec<LocationLink>>> {
3002 let position = position.to_point_utf16(buffer.read(cx));
3003 self.request_lsp(buffer.clone(), GetDefinition { position }, cx)
3004 }
3005
3006 pub fn references<T: ToPointUtf16>(
3007 &self,
3008 buffer: &ModelHandle<Buffer>,
3009 position: T,
3010 cx: &mut ModelContext<Self>,
3011 ) -> Task<Result<Vec<Location>>> {
3012 let position = position.to_point_utf16(buffer.read(cx));
3013 self.request_lsp(buffer.clone(), GetReferences { position }, cx)
3014 }
3015
3016 pub fn document_highlights<T: ToPointUtf16>(
3017 &self,
3018 buffer: &ModelHandle<Buffer>,
3019 position: T,
3020 cx: &mut ModelContext<Self>,
3021 ) -> Task<Result<Vec<DocumentHighlight>>> {
3022 let position = position.to_point_utf16(buffer.read(cx));
3023
3024 self.request_lsp(buffer.clone(), GetDocumentHighlights { position }, cx)
3025 }
3026
3027 pub fn symbols(&self, query: &str, cx: &mut ModelContext<Self>) -> Task<Result<Vec<Symbol>>> {
3028 if self.is_local() {
3029 let mut requests = Vec::new();
3030 for ((worktree_id, _), server_id) in self.language_server_ids.iter() {
3031 let worktree_id = *worktree_id;
3032 if let Some(worktree) = self
3033 .worktree_for_id(worktree_id, cx)
3034 .and_then(|worktree| worktree.read(cx).as_local())
3035 {
3036 if let Some(LanguageServerState::Running { adapter, server }) =
3037 self.language_servers.get(server_id)
3038 {
3039 let adapter = adapter.clone();
3040 let worktree_abs_path = worktree.abs_path().clone();
3041 requests.push(
3042 server
3043 .request::<lsp::request::WorkspaceSymbol>(
3044 lsp::WorkspaceSymbolParams {
3045 query: query.to_string(),
3046 ..Default::default()
3047 },
3048 )
3049 .log_err()
3050 .map(move |response| {
3051 (
3052 adapter,
3053 worktree_id,
3054 worktree_abs_path,
3055 response.unwrap_or_default(),
3056 )
3057 }),
3058 );
3059 }
3060 }
3061 }
3062
3063 cx.spawn_weak(|this, cx| async move {
3064 let responses = futures::future::join_all(requests).await;
3065 let this = if let Some(this) = this.upgrade(&cx) {
3066 this
3067 } else {
3068 return Ok(Default::default());
3069 };
3070 this.read_with(&cx, |this, cx| {
3071 let mut symbols = Vec::new();
3072 for (adapter, source_worktree_id, worktree_abs_path, response) in responses {
3073 symbols.extend(response.into_iter().flatten().filter_map(|lsp_symbol| {
3074 let abs_path = lsp_symbol.location.uri.to_file_path().ok()?;
3075 let mut worktree_id = source_worktree_id;
3076 let path;
3077 if let Some((worktree, rel_path)) =
3078 this.find_local_worktree(&abs_path, cx)
3079 {
3080 worktree_id = worktree.read(cx).id();
3081 path = rel_path;
3082 } else {
3083 path = relativize_path(&worktree_abs_path, &abs_path);
3084 }
3085
3086 let label = this
3087 .languages
3088 .select_language(&path)
3089 .and_then(|language| {
3090 language.label_for_symbol(&lsp_symbol.name, lsp_symbol.kind)
3091 })
3092 .unwrap_or_else(|| CodeLabel::plain(lsp_symbol.name.clone(), None));
3093 let signature = this.symbol_signature(worktree_id, &path);
3094
3095 Some(Symbol {
3096 source_worktree_id,
3097 worktree_id,
3098 language_server_name: adapter.name(),
3099 name: lsp_symbol.name,
3100 kind: lsp_symbol.kind,
3101 label,
3102 path,
3103 range: range_from_lsp(lsp_symbol.location.range),
3104 signature,
3105 })
3106 }));
3107 }
3108 Ok(symbols)
3109 })
3110 })
3111 } else if let Some(project_id) = self.remote_id() {
3112 let request = self.client.request(proto::GetProjectSymbols {
3113 project_id,
3114 query: query.to_string(),
3115 });
3116 cx.spawn_weak(|this, cx| async move {
3117 let response = request.await?;
3118 let mut symbols = Vec::new();
3119 if let Some(this) = this.upgrade(&cx) {
3120 this.read_with(&cx, |this, _| {
3121 symbols.extend(
3122 response
3123 .symbols
3124 .into_iter()
3125 .filter_map(|symbol| this.deserialize_symbol(symbol).log_err()),
3126 );
3127 })
3128 }
3129 Ok(symbols)
3130 })
3131 } else {
3132 Task::ready(Ok(Default::default()))
3133 }
3134 }
3135
3136 pub fn open_buffer_for_symbol(
3137 &mut self,
3138 symbol: &Symbol,
3139 cx: &mut ModelContext<Self>,
3140 ) -> Task<Result<ModelHandle<Buffer>>> {
3141 if self.is_local() {
3142 let language_server_id = if let Some(id) = self.language_server_ids.get(&(
3143 symbol.source_worktree_id,
3144 symbol.language_server_name.clone(),
3145 )) {
3146 *id
3147 } else {
3148 return Task::ready(Err(anyhow!(
3149 "language server for worktree and language not found"
3150 )));
3151 };
3152
3153 let worktree_abs_path = if let Some(worktree_abs_path) = self
3154 .worktree_for_id(symbol.worktree_id, cx)
3155 .and_then(|worktree| worktree.read(cx).as_local())
3156 .map(|local_worktree| local_worktree.abs_path())
3157 {
3158 worktree_abs_path
3159 } else {
3160 return Task::ready(Err(anyhow!("worktree not found for symbol")));
3161 };
3162 let symbol_abs_path = worktree_abs_path.join(&symbol.path);
3163 let symbol_uri = if let Ok(uri) = lsp::Url::from_file_path(symbol_abs_path) {
3164 uri
3165 } else {
3166 return Task::ready(Err(anyhow!("invalid symbol path")));
3167 };
3168
3169 self.open_local_buffer_via_lsp(
3170 symbol_uri,
3171 language_server_id,
3172 symbol.language_server_name.clone(),
3173 cx,
3174 )
3175 } else if let Some(project_id) = self.remote_id() {
3176 let request = self.client.request(proto::OpenBufferForSymbol {
3177 project_id,
3178 symbol: Some(serialize_symbol(symbol)),
3179 });
3180 cx.spawn(|this, mut cx| async move {
3181 let response = request.await?;
3182 let buffer = response.buffer.ok_or_else(|| anyhow!("invalid buffer"))?;
3183 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
3184 .await
3185 })
3186 } else {
3187 Task::ready(Err(anyhow!("project does not have a remote id")))
3188 }
3189 }
3190
3191 pub fn hover<T: ToPointUtf16>(
3192 &self,
3193 buffer: &ModelHandle<Buffer>,
3194 position: T,
3195 cx: &mut ModelContext<Self>,
3196 ) -> Task<Result<Option<Hover>>> {
3197 let position = position.to_point_utf16(buffer.read(cx));
3198 self.request_lsp(buffer.clone(), GetHover { position }, cx)
3199 }
3200
3201 pub fn completions<T: ToPointUtf16>(
3202 &self,
3203 source_buffer_handle: &ModelHandle<Buffer>,
3204 position: T,
3205 cx: &mut ModelContext<Self>,
3206 ) -> Task<Result<Vec<Completion>>> {
3207 let source_buffer_handle = source_buffer_handle.clone();
3208 let source_buffer = source_buffer_handle.read(cx);
3209 let buffer_id = source_buffer.remote_id();
3210 let language = source_buffer.language().cloned();
3211 let worktree;
3212 let buffer_abs_path;
3213 if let Some(file) = File::from_dyn(source_buffer.file()) {
3214 worktree = file.worktree.clone();
3215 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
3216 } else {
3217 return Task::ready(Ok(Default::default()));
3218 };
3219
3220 let position = position.to_point_utf16(source_buffer);
3221 let anchor = source_buffer.anchor_after(position);
3222
3223 if worktree.read(cx).as_local().is_some() {
3224 let buffer_abs_path = buffer_abs_path.unwrap();
3225 let lang_server =
3226 if let Some((_, server)) = self.language_server_for_buffer(source_buffer, cx) {
3227 server.clone()
3228 } else {
3229 return Task::ready(Ok(Default::default()));
3230 };
3231
3232 cx.spawn(|_, cx| async move {
3233 let completions = lang_server
3234 .request::<lsp::request::Completion>(lsp::CompletionParams {
3235 text_document_position: lsp::TextDocumentPositionParams::new(
3236 lsp::TextDocumentIdentifier::new(
3237 lsp::Url::from_file_path(buffer_abs_path).unwrap(),
3238 ),
3239 point_to_lsp(position),
3240 ),
3241 context: Default::default(),
3242 work_done_progress_params: Default::default(),
3243 partial_result_params: Default::default(),
3244 })
3245 .await
3246 .context("lsp completion request failed")?;
3247
3248 let completions = if let Some(completions) = completions {
3249 match completions {
3250 lsp::CompletionResponse::Array(completions) => completions,
3251 lsp::CompletionResponse::List(list) => list.items,
3252 }
3253 } else {
3254 Default::default()
3255 };
3256
3257 source_buffer_handle.read_with(&cx, |this, _| {
3258 let snapshot = this.snapshot();
3259 let clipped_position = this.clip_point_utf16(position, Bias::Left);
3260 let mut range_for_token = None;
3261 Ok(completions
3262 .into_iter()
3263 .filter_map(|lsp_completion| {
3264 // For now, we can only handle additional edits if they are returned
3265 // when resolving the completion, not if they are present initially.
3266 if lsp_completion
3267 .additional_text_edits
3268 .as_ref()
3269 .map_or(false, |edits| !edits.is_empty())
3270 {
3271 return None;
3272 }
3273
3274 let (old_range, new_text) = match lsp_completion.text_edit.as_ref() {
3275 // If the language server provides a range to overwrite, then
3276 // check that the range is valid.
3277 Some(lsp::CompletionTextEdit::Edit(edit)) => {
3278 let range = range_from_lsp(edit.range);
3279 let start = snapshot.clip_point_utf16(range.start, Bias::Left);
3280 let end = snapshot.clip_point_utf16(range.end, Bias::Left);
3281 if start != range.start || end != range.end {
3282 log::info!("completion out of expected range");
3283 return None;
3284 }
3285 (
3286 snapshot.anchor_before(start)..snapshot.anchor_after(end),
3287 edit.new_text.clone(),
3288 )
3289 }
3290 // If the language server does not provide a range, then infer
3291 // the range based on the syntax tree.
3292 None => {
3293 if position != clipped_position {
3294 log::info!("completion out of expected range");
3295 return None;
3296 }
3297 let Range { start, end } = range_for_token
3298 .get_or_insert_with(|| {
3299 let offset = position.to_offset(&snapshot);
3300 let (range, kind) = snapshot.surrounding_word(offset);
3301 if kind == Some(CharKind::Word) {
3302 range
3303 } else {
3304 offset..offset
3305 }
3306 })
3307 .clone();
3308 let text = lsp_completion
3309 .insert_text
3310 .as_ref()
3311 .unwrap_or(&lsp_completion.label)
3312 .clone();
3313 (
3314 snapshot.anchor_before(start)..snapshot.anchor_after(end),
3315 text.clone(),
3316 )
3317 }
3318 Some(lsp::CompletionTextEdit::InsertAndReplace(_)) => {
3319 log::info!("unsupported insert/replace completion");
3320 return None;
3321 }
3322 };
3323
3324 Some(Completion {
3325 old_range,
3326 new_text,
3327 label: language
3328 .as_ref()
3329 .and_then(|l| l.label_for_completion(&lsp_completion))
3330 .unwrap_or_else(|| {
3331 CodeLabel::plain(
3332 lsp_completion.label.clone(),
3333 lsp_completion.filter_text.as_deref(),
3334 )
3335 }),
3336 lsp_completion,
3337 })
3338 })
3339 .collect())
3340 })
3341 })
3342 } else if let Some(project_id) = self.remote_id() {
3343 let rpc = self.client.clone();
3344 let message = proto::GetCompletions {
3345 project_id,
3346 buffer_id,
3347 position: Some(language::proto::serialize_anchor(&anchor)),
3348 version: serialize_version(&source_buffer.version()),
3349 };
3350 cx.spawn_weak(|_, mut cx| async move {
3351 let response = rpc.request(message).await?;
3352
3353 source_buffer_handle
3354 .update(&mut cx, |buffer, _| {
3355 buffer.wait_for_version(deserialize_version(response.version))
3356 })
3357 .await;
3358
3359 response
3360 .completions
3361 .into_iter()
3362 .map(|completion| {
3363 language::proto::deserialize_completion(completion, language.as_ref())
3364 })
3365 .collect()
3366 })
3367 } else {
3368 Task::ready(Ok(Default::default()))
3369 }
3370 }
3371
3372 pub fn apply_additional_edits_for_completion(
3373 &self,
3374 buffer_handle: ModelHandle<Buffer>,
3375 completion: Completion,
3376 push_to_history: bool,
3377 cx: &mut ModelContext<Self>,
3378 ) -> Task<Result<Option<Transaction>>> {
3379 let buffer = buffer_handle.read(cx);
3380 let buffer_id = buffer.remote_id();
3381
3382 if self.is_local() {
3383 let lang_server = if let Some((_, server)) = self.language_server_for_buffer(buffer, cx)
3384 {
3385 server.clone()
3386 } else {
3387 return Task::ready(Ok(Default::default()));
3388 };
3389
3390 cx.spawn(|this, mut cx| async move {
3391 let resolved_completion = lang_server
3392 .request::<lsp::request::ResolveCompletionItem>(completion.lsp_completion)
3393 .await?;
3394 if let Some(edits) = resolved_completion.additional_text_edits {
3395 let edits = this
3396 .update(&mut cx, |this, cx| {
3397 this.edits_from_lsp(&buffer_handle, edits, None, cx)
3398 })
3399 .await?;
3400 buffer_handle.update(&mut cx, |buffer, cx| {
3401 buffer.finalize_last_transaction();
3402 buffer.start_transaction();
3403 for (range, text) in edits {
3404 buffer.edit([(range, text)], cx);
3405 }
3406 let transaction = if buffer.end_transaction(cx).is_some() {
3407 let transaction = buffer.finalize_last_transaction().unwrap().clone();
3408 if !push_to_history {
3409 buffer.forget_transaction(transaction.id);
3410 }
3411 Some(transaction)
3412 } else {
3413 None
3414 };
3415 Ok(transaction)
3416 })
3417 } else {
3418 Ok(None)
3419 }
3420 })
3421 } else if let Some(project_id) = self.remote_id() {
3422 let client = self.client.clone();
3423 cx.spawn(|_, mut cx| async move {
3424 let response = client
3425 .request(proto::ApplyCompletionAdditionalEdits {
3426 project_id,
3427 buffer_id,
3428 completion: Some(language::proto::serialize_completion(&completion)),
3429 })
3430 .await?;
3431
3432 if let Some(transaction) = response.transaction {
3433 let transaction = language::proto::deserialize_transaction(transaction)?;
3434 buffer_handle
3435 .update(&mut cx, |buffer, _| {
3436 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
3437 })
3438 .await;
3439 if push_to_history {
3440 buffer_handle.update(&mut cx, |buffer, _| {
3441 buffer.push_transaction(transaction.clone(), Instant::now());
3442 });
3443 }
3444 Ok(Some(transaction))
3445 } else {
3446 Ok(None)
3447 }
3448 })
3449 } else {
3450 Task::ready(Err(anyhow!("project does not have a remote id")))
3451 }
3452 }
3453
3454 pub fn code_actions<T: Clone + ToOffset>(
3455 &self,
3456 buffer_handle: &ModelHandle<Buffer>,
3457 range: Range<T>,
3458 cx: &mut ModelContext<Self>,
3459 ) -> Task<Result<Vec<CodeAction>>> {
3460 let buffer_handle = buffer_handle.clone();
3461 let buffer = buffer_handle.read(cx);
3462 let snapshot = buffer.snapshot();
3463 let relevant_diagnostics = snapshot
3464 .diagnostics_in_range::<usize, usize>(range.to_offset(&snapshot), false)
3465 .map(|entry| entry.to_lsp_diagnostic_stub())
3466 .collect();
3467 let buffer_id = buffer.remote_id();
3468 let worktree;
3469 let buffer_abs_path;
3470 if let Some(file) = File::from_dyn(buffer.file()) {
3471 worktree = file.worktree.clone();
3472 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
3473 } else {
3474 return Task::ready(Ok(Default::default()));
3475 };
3476 let range = buffer.anchor_before(range.start)..buffer.anchor_before(range.end);
3477
3478 if worktree.read(cx).as_local().is_some() {
3479 let buffer_abs_path = buffer_abs_path.unwrap();
3480 let lang_server = if let Some((_, server)) = self.language_server_for_buffer(buffer, cx)
3481 {
3482 server.clone()
3483 } else {
3484 return Task::ready(Ok(Default::default()));
3485 };
3486
3487 let lsp_range = range_to_lsp(range.to_point_utf16(buffer));
3488 cx.foreground().spawn(async move {
3489 if !lang_server.capabilities().code_action_provider.is_some() {
3490 return Ok(Default::default());
3491 }
3492
3493 Ok(lang_server
3494 .request::<lsp::request::CodeActionRequest>(lsp::CodeActionParams {
3495 text_document: lsp::TextDocumentIdentifier::new(
3496 lsp::Url::from_file_path(buffer_abs_path).unwrap(),
3497 ),
3498 range: lsp_range,
3499 work_done_progress_params: Default::default(),
3500 partial_result_params: Default::default(),
3501 context: lsp::CodeActionContext {
3502 diagnostics: relevant_diagnostics,
3503 only: Some(vec![
3504 lsp::CodeActionKind::QUICKFIX,
3505 lsp::CodeActionKind::REFACTOR,
3506 lsp::CodeActionKind::REFACTOR_EXTRACT,
3507 lsp::CodeActionKind::SOURCE,
3508 ]),
3509 },
3510 })
3511 .await?
3512 .unwrap_or_default()
3513 .into_iter()
3514 .filter_map(|entry| {
3515 if let lsp::CodeActionOrCommand::CodeAction(lsp_action) = entry {
3516 Some(CodeAction {
3517 range: range.clone(),
3518 lsp_action,
3519 })
3520 } else {
3521 None
3522 }
3523 })
3524 .collect())
3525 })
3526 } else if let Some(project_id) = self.remote_id() {
3527 let rpc = self.client.clone();
3528 let version = buffer.version();
3529 cx.spawn_weak(|_, mut cx| async move {
3530 let response = rpc
3531 .request(proto::GetCodeActions {
3532 project_id,
3533 buffer_id,
3534 start: Some(language::proto::serialize_anchor(&range.start)),
3535 end: Some(language::proto::serialize_anchor(&range.end)),
3536 version: serialize_version(&version),
3537 })
3538 .await?;
3539
3540 buffer_handle
3541 .update(&mut cx, |buffer, _| {
3542 buffer.wait_for_version(deserialize_version(response.version))
3543 })
3544 .await;
3545
3546 response
3547 .actions
3548 .into_iter()
3549 .map(language::proto::deserialize_code_action)
3550 .collect()
3551 })
3552 } else {
3553 Task::ready(Ok(Default::default()))
3554 }
3555 }
3556
3557 pub fn apply_code_action(
3558 &self,
3559 buffer_handle: ModelHandle<Buffer>,
3560 mut action: CodeAction,
3561 push_to_history: bool,
3562 cx: &mut ModelContext<Self>,
3563 ) -> Task<Result<ProjectTransaction>> {
3564 if self.is_local() {
3565 let buffer = buffer_handle.read(cx);
3566 let (lsp_adapter, lang_server) =
3567 if let Some((adapter, server)) = self.language_server_for_buffer(buffer, cx) {
3568 (adapter.clone(), server.clone())
3569 } else {
3570 return Task::ready(Ok(Default::default()));
3571 };
3572 let range = action.range.to_point_utf16(buffer);
3573
3574 cx.spawn(|this, mut cx| async move {
3575 if let Some(lsp_range) = action
3576 .lsp_action
3577 .data
3578 .as_mut()
3579 .and_then(|d| d.get_mut("codeActionParams"))
3580 .and_then(|d| d.get_mut("range"))
3581 {
3582 *lsp_range = serde_json::to_value(&range_to_lsp(range)).unwrap();
3583 action.lsp_action = lang_server
3584 .request::<lsp::request::CodeActionResolveRequest>(action.lsp_action)
3585 .await?;
3586 } else {
3587 let actions = this
3588 .update(&mut cx, |this, cx| {
3589 this.code_actions(&buffer_handle, action.range, cx)
3590 })
3591 .await?;
3592 action.lsp_action = actions
3593 .into_iter()
3594 .find(|a| a.lsp_action.title == action.lsp_action.title)
3595 .ok_or_else(|| anyhow!("code action is outdated"))?
3596 .lsp_action;
3597 }
3598
3599 if let Some(edit) = action.lsp_action.edit {
3600 Self::deserialize_workspace_edit(
3601 this,
3602 edit,
3603 push_to_history,
3604 lsp_adapter.clone(),
3605 lang_server.clone(),
3606 &mut cx,
3607 )
3608 .await
3609 } else if let Some(command) = action.lsp_action.command {
3610 this.update(&mut cx, |this, _| {
3611 this.last_workspace_edits_by_language_server
3612 .remove(&lang_server.server_id());
3613 });
3614 lang_server
3615 .request::<lsp::request::ExecuteCommand>(lsp::ExecuteCommandParams {
3616 command: command.command,
3617 arguments: command.arguments.unwrap_or_default(),
3618 ..Default::default()
3619 })
3620 .await?;
3621 Ok(this.update(&mut cx, |this, _| {
3622 this.last_workspace_edits_by_language_server
3623 .remove(&lang_server.server_id())
3624 .unwrap_or_default()
3625 }))
3626 } else {
3627 Ok(ProjectTransaction::default())
3628 }
3629 })
3630 } else if let Some(project_id) = self.remote_id() {
3631 let client = self.client.clone();
3632 let request = proto::ApplyCodeAction {
3633 project_id,
3634 buffer_id: buffer_handle.read(cx).remote_id(),
3635 action: Some(language::proto::serialize_code_action(&action)),
3636 };
3637 cx.spawn(|this, mut cx| async move {
3638 let response = client
3639 .request(request)
3640 .await?
3641 .transaction
3642 .ok_or_else(|| anyhow!("missing transaction"))?;
3643 this.update(&mut cx, |this, cx| {
3644 this.deserialize_project_transaction(response, push_to_history, cx)
3645 })
3646 .await
3647 })
3648 } else {
3649 Task::ready(Err(anyhow!("project does not have a remote id")))
3650 }
3651 }
3652
3653 async fn deserialize_workspace_edit(
3654 this: ModelHandle<Self>,
3655 edit: lsp::WorkspaceEdit,
3656 push_to_history: bool,
3657 lsp_adapter: Arc<dyn LspAdapter>,
3658 language_server: Arc<LanguageServer>,
3659 cx: &mut AsyncAppContext,
3660 ) -> Result<ProjectTransaction> {
3661 let fs = this.read_with(cx, |this, _| this.fs.clone());
3662 let mut operations = Vec::new();
3663 if let Some(document_changes) = edit.document_changes {
3664 match document_changes {
3665 lsp::DocumentChanges::Edits(edits) => {
3666 operations.extend(edits.into_iter().map(lsp::DocumentChangeOperation::Edit))
3667 }
3668 lsp::DocumentChanges::Operations(ops) => operations = ops,
3669 }
3670 } else if let Some(changes) = edit.changes {
3671 operations.extend(changes.into_iter().map(|(uri, edits)| {
3672 lsp::DocumentChangeOperation::Edit(lsp::TextDocumentEdit {
3673 text_document: lsp::OptionalVersionedTextDocumentIdentifier {
3674 uri,
3675 version: None,
3676 },
3677 edits: edits.into_iter().map(lsp::OneOf::Left).collect(),
3678 })
3679 }));
3680 }
3681
3682 let mut project_transaction = ProjectTransaction::default();
3683 for operation in operations {
3684 match operation {
3685 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Create(op)) => {
3686 let abs_path = op
3687 .uri
3688 .to_file_path()
3689 .map_err(|_| anyhow!("can't convert URI to path"))?;
3690
3691 if let Some(parent_path) = abs_path.parent() {
3692 fs.create_dir(parent_path).await?;
3693 }
3694 if abs_path.ends_with("/") {
3695 fs.create_dir(&abs_path).await?;
3696 } else {
3697 fs.create_file(&abs_path, op.options.map(Into::into).unwrap_or_default())
3698 .await?;
3699 }
3700 }
3701 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Rename(op)) => {
3702 let source_abs_path = op
3703 .old_uri
3704 .to_file_path()
3705 .map_err(|_| anyhow!("can't convert URI to path"))?;
3706 let target_abs_path = op
3707 .new_uri
3708 .to_file_path()
3709 .map_err(|_| anyhow!("can't convert URI to path"))?;
3710 fs.rename(
3711 &source_abs_path,
3712 &target_abs_path,
3713 op.options.map(Into::into).unwrap_or_default(),
3714 )
3715 .await?;
3716 }
3717 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Delete(op)) => {
3718 let abs_path = op
3719 .uri
3720 .to_file_path()
3721 .map_err(|_| anyhow!("can't convert URI to path"))?;
3722 let options = op.options.map(Into::into).unwrap_or_default();
3723 if abs_path.ends_with("/") {
3724 fs.remove_dir(&abs_path, options).await?;
3725 } else {
3726 fs.remove_file(&abs_path, options).await?;
3727 }
3728 }
3729 lsp::DocumentChangeOperation::Edit(op) => {
3730 let buffer_to_edit = this
3731 .update(cx, |this, cx| {
3732 this.open_local_buffer_via_lsp(
3733 op.text_document.uri,
3734 language_server.server_id(),
3735 lsp_adapter.name(),
3736 cx,
3737 )
3738 })
3739 .await?;
3740
3741 let edits = this
3742 .update(cx, |this, cx| {
3743 let edits = op.edits.into_iter().map(|edit| match edit {
3744 lsp::OneOf::Left(edit) => edit,
3745 lsp::OneOf::Right(edit) => edit.text_edit,
3746 });
3747 this.edits_from_lsp(
3748 &buffer_to_edit,
3749 edits,
3750 op.text_document.version,
3751 cx,
3752 )
3753 })
3754 .await?;
3755
3756 let transaction = buffer_to_edit.update(cx, |buffer, cx| {
3757 buffer.finalize_last_transaction();
3758 buffer.start_transaction();
3759 for (range, text) in edits {
3760 buffer.edit([(range, text)], cx);
3761 }
3762 let transaction = if buffer.end_transaction(cx).is_some() {
3763 let transaction = buffer.finalize_last_transaction().unwrap().clone();
3764 if !push_to_history {
3765 buffer.forget_transaction(transaction.id);
3766 }
3767 Some(transaction)
3768 } else {
3769 None
3770 };
3771
3772 transaction
3773 });
3774 if let Some(transaction) = transaction {
3775 project_transaction.0.insert(buffer_to_edit, transaction);
3776 }
3777 }
3778 }
3779 }
3780
3781 Ok(project_transaction)
3782 }
3783
3784 pub fn prepare_rename<T: ToPointUtf16>(
3785 &self,
3786 buffer: ModelHandle<Buffer>,
3787 position: T,
3788 cx: &mut ModelContext<Self>,
3789 ) -> Task<Result<Option<Range<Anchor>>>> {
3790 let position = position.to_point_utf16(buffer.read(cx));
3791 self.request_lsp(buffer, PrepareRename { position }, cx)
3792 }
3793
3794 pub fn perform_rename<T: ToPointUtf16>(
3795 &self,
3796 buffer: ModelHandle<Buffer>,
3797 position: T,
3798 new_name: String,
3799 push_to_history: bool,
3800 cx: &mut ModelContext<Self>,
3801 ) -> Task<Result<ProjectTransaction>> {
3802 let position = position.to_point_utf16(buffer.read(cx));
3803 self.request_lsp(
3804 buffer,
3805 PerformRename {
3806 position,
3807 new_name,
3808 push_to_history,
3809 },
3810 cx,
3811 )
3812 }
3813
3814 pub fn search(
3815 &self,
3816 query: SearchQuery,
3817 cx: &mut ModelContext<Self>,
3818 ) -> Task<Result<HashMap<ModelHandle<Buffer>, Vec<Range<Anchor>>>>> {
3819 if self.is_local() {
3820 let snapshots = self
3821 .visible_worktrees(cx)
3822 .filter_map(|tree| {
3823 let tree = tree.read(cx).as_local()?;
3824 Some(tree.snapshot())
3825 })
3826 .collect::<Vec<_>>();
3827
3828 let background = cx.background().clone();
3829 let path_count: usize = snapshots.iter().map(|s| s.visible_file_count()).sum();
3830 if path_count == 0 {
3831 return Task::ready(Ok(Default::default()));
3832 }
3833 let workers = background.num_cpus().min(path_count);
3834 let (matching_paths_tx, mut matching_paths_rx) = smol::channel::bounded(1024);
3835 cx.background()
3836 .spawn({
3837 let fs = self.fs.clone();
3838 let background = cx.background().clone();
3839 let query = query.clone();
3840 async move {
3841 let fs = &fs;
3842 let query = &query;
3843 let matching_paths_tx = &matching_paths_tx;
3844 let paths_per_worker = (path_count + workers - 1) / workers;
3845 let snapshots = &snapshots;
3846 background
3847 .scoped(|scope| {
3848 for worker_ix in 0..workers {
3849 let worker_start_ix = worker_ix * paths_per_worker;
3850 let worker_end_ix = worker_start_ix + paths_per_worker;
3851 scope.spawn(async move {
3852 let mut snapshot_start_ix = 0;
3853 let mut abs_path = PathBuf::new();
3854 for snapshot in snapshots {
3855 let snapshot_end_ix =
3856 snapshot_start_ix + snapshot.visible_file_count();
3857 if worker_end_ix <= snapshot_start_ix {
3858 break;
3859 } else if worker_start_ix > snapshot_end_ix {
3860 snapshot_start_ix = snapshot_end_ix;
3861 continue;
3862 } else {
3863 let start_in_snapshot = worker_start_ix
3864 .saturating_sub(snapshot_start_ix);
3865 let end_in_snapshot =
3866 cmp::min(worker_end_ix, snapshot_end_ix)
3867 - snapshot_start_ix;
3868
3869 for entry in snapshot
3870 .files(false, start_in_snapshot)
3871 .take(end_in_snapshot - start_in_snapshot)
3872 {
3873 if matching_paths_tx.is_closed() {
3874 break;
3875 }
3876
3877 abs_path.clear();
3878 abs_path.push(&snapshot.abs_path());
3879 abs_path.push(&entry.path);
3880 let matches = if let Some(file) =
3881 fs.open_sync(&abs_path).await.log_err()
3882 {
3883 query.detect(file).unwrap_or(false)
3884 } else {
3885 false
3886 };
3887
3888 if matches {
3889 let project_path =
3890 (snapshot.id(), entry.path.clone());
3891 if matching_paths_tx
3892 .send(project_path)
3893 .await
3894 .is_err()
3895 {
3896 break;
3897 }
3898 }
3899 }
3900
3901 snapshot_start_ix = snapshot_end_ix;
3902 }
3903 }
3904 });
3905 }
3906 })
3907 .await;
3908 }
3909 })
3910 .detach();
3911
3912 let (buffers_tx, buffers_rx) = smol::channel::bounded(1024);
3913 let open_buffers = self
3914 .opened_buffers
3915 .values()
3916 .filter_map(|b| b.upgrade(cx))
3917 .collect::<HashSet<_>>();
3918 cx.spawn(|this, cx| async move {
3919 for buffer in &open_buffers {
3920 let snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
3921 buffers_tx.send((buffer.clone(), snapshot)).await?;
3922 }
3923
3924 let open_buffers = Rc::new(RefCell::new(open_buffers));
3925 while let Some(project_path) = matching_paths_rx.next().await {
3926 if buffers_tx.is_closed() {
3927 break;
3928 }
3929
3930 let this = this.clone();
3931 let open_buffers = open_buffers.clone();
3932 let buffers_tx = buffers_tx.clone();
3933 cx.spawn(|mut cx| async move {
3934 if let Some(buffer) = this
3935 .update(&mut cx, |this, cx| this.open_buffer(project_path, cx))
3936 .await
3937 .log_err()
3938 {
3939 if open_buffers.borrow_mut().insert(buffer.clone()) {
3940 let snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
3941 buffers_tx.send((buffer, snapshot)).await?;
3942 }
3943 }
3944
3945 Ok::<_, anyhow::Error>(())
3946 })
3947 .detach();
3948 }
3949
3950 Ok::<_, anyhow::Error>(())
3951 })
3952 .detach_and_log_err(cx);
3953
3954 let background = cx.background().clone();
3955 cx.background().spawn(async move {
3956 let query = &query;
3957 let mut matched_buffers = Vec::new();
3958 for _ in 0..workers {
3959 matched_buffers.push(HashMap::default());
3960 }
3961 background
3962 .scoped(|scope| {
3963 for worker_matched_buffers in matched_buffers.iter_mut() {
3964 let mut buffers_rx = buffers_rx.clone();
3965 scope.spawn(async move {
3966 while let Some((buffer, snapshot)) = buffers_rx.next().await {
3967 let buffer_matches = query
3968 .search(snapshot.as_rope())
3969 .await
3970 .iter()
3971 .map(|range| {
3972 snapshot.anchor_before(range.start)
3973 ..snapshot.anchor_after(range.end)
3974 })
3975 .collect::<Vec<_>>();
3976 if !buffer_matches.is_empty() {
3977 worker_matched_buffers
3978 .insert(buffer.clone(), buffer_matches);
3979 }
3980 }
3981 });
3982 }
3983 })
3984 .await;
3985 Ok(matched_buffers.into_iter().flatten().collect())
3986 })
3987 } else if let Some(project_id) = self.remote_id() {
3988 let request = self.client.request(query.to_proto(project_id));
3989 cx.spawn(|this, mut cx| async move {
3990 let response = request.await?;
3991 let mut result = HashMap::default();
3992 for location in response.locations {
3993 let buffer = location.buffer.ok_or_else(|| anyhow!("missing buffer"))?;
3994 let target_buffer = this
3995 .update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
3996 .await?;
3997 let start = location
3998 .start
3999 .and_then(deserialize_anchor)
4000 .ok_or_else(|| anyhow!("missing target start"))?;
4001 let end = location
4002 .end
4003 .and_then(deserialize_anchor)
4004 .ok_or_else(|| anyhow!("missing target end"))?;
4005 result
4006 .entry(target_buffer)
4007 .or_insert(Vec::new())
4008 .push(start..end)
4009 }
4010 Ok(result)
4011 })
4012 } else {
4013 Task::ready(Ok(Default::default()))
4014 }
4015 }
4016
4017 fn request_lsp<R: LspCommand>(
4018 &self,
4019 buffer_handle: ModelHandle<Buffer>,
4020 request: R,
4021 cx: &mut ModelContext<Self>,
4022 ) -> Task<Result<R::Response>>
4023 where
4024 <R::LspRequest as lsp::request::Request>::Result: Send,
4025 {
4026 let buffer = buffer_handle.read(cx);
4027 if self.is_local() {
4028 let file = File::from_dyn(buffer.file()).and_then(File::as_local);
4029 if let Some((file, language_server)) = file.zip(
4030 self.language_server_for_buffer(buffer, cx)
4031 .map(|(_, server)| server.clone()),
4032 ) {
4033 let lsp_params = request.to_lsp(&file.abs_path(cx), cx);
4034 return cx.spawn(|this, cx| async move {
4035 if !request.check_capabilities(&language_server.capabilities()) {
4036 return Ok(Default::default());
4037 }
4038
4039 let response = language_server
4040 .request::<R::LspRequest>(lsp_params)
4041 .await
4042 .context("lsp request failed")?;
4043 request
4044 .response_from_lsp(response, this, buffer_handle, cx)
4045 .await
4046 });
4047 }
4048 } else if let Some(project_id) = self.remote_id() {
4049 let rpc = self.client.clone();
4050 let message = request.to_proto(project_id, buffer);
4051 return cx.spawn(|this, cx| async move {
4052 let response = rpc.request(message).await?;
4053 request
4054 .response_from_proto(response, this, buffer_handle, cx)
4055 .await
4056 });
4057 }
4058 Task::ready(Ok(Default::default()))
4059 }
4060
4061 pub fn find_or_create_local_worktree(
4062 &mut self,
4063 abs_path: impl AsRef<Path>,
4064 visible: bool,
4065 cx: &mut ModelContext<Self>,
4066 ) -> Task<Result<(ModelHandle<Worktree>, PathBuf)>> {
4067 let abs_path = abs_path.as_ref();
4068 if let Some((tree, relative_path)) = self.find_local_worktree(abs_path, cx) {
4069 Task::ready(Ok((tree.clone(), relative_path.into())))
4070 } else {
4071 let worktree = self.create_local_worktree(abs_path, visible, cx);
4072 cx.foreground()
4073 .spawn(async move { Ok((worktree.await?, PathBuf::new())) })
4074 }
4075 }
4076
4077 pub fn find_local_worktree(
4078 &self,
4079 abs_path: &Path,
4080 cx: &AppContext,
4081 ) -> Option<(ModelHandle<Worktree>, PathBuf)> {
4082 for tree in &self.worktrees {
4083 if let Some(tree) = tree.upgrade(cx) {
4084 if let Some(relative_path) = tree
4085 .read(cx)
4086 .as_local()
4087 .and_then(|t| abs_path.strip_prefix(t.abs_path()).ok())
4088 {
4089 return Some((tree.clone(), relative_path.into()));
4090 }
4091 }
4092 }
4093 None
4094 }
4095
4096 pub fn is_shared(&self) -> bool {
4097 match &self.client_state {
4098 ProjectClientState::Local { is_shared, .. } => *is_shared,
4099 ProjectClientState::Remote { .. } => false,
4100 }
4101 }
4102
4103 fn create_local_worktree(
4104 &mut self,
4105 abs_path: impl AsRef<Path>,
4106 visible: bool,
4107 cx: &mut ModelContext<Self>,
4108 ) -> Task<Result<ModelHandle<Worktree>>> {
4109 let fs = self.fs.clone();
4110 let client = self.client.clone();
4111 let next_entry_id = self.next_entry_id.clone();
4112 let path: Arc<Path> = abs_path.as_ref().into();
4113 let task = self
4114 .loading_local_worktrees
4115 .entry(path.clone())
4116 .or_insert_with(|| {
4117 cx.spawn(|project, mut cx| {
4118 async move {
4119 let worktree = Worktree::local(
4120 client.clone(),
4121 path.clone(),
4122 visible,
4123 fs,
4124 next_entry_id,
4125 &mut cx,
4126 )
4127 .await;
4128 project.update(&mut cx, |project, _| {
4129 project.loading_local_worktrees.remove(&path);
4130 });
4131 let worktree = worktree?;
4132
4133 let project_id = project.update(&mut cx, |project, cx| {
4134 project.add_worktree(&worktree, cx);
4135 project.shared_remote_id()
4136 });
4137
4138 if let Some(project_id) = project_id {
4139 worktree
4140 .update(&mut cx, |worktree, cx| {
4141 worktree.as_local_mut().unwrap().share(project_id, cx)
4142 })
4143 .await
4144 .log_err();
4145 }
4146
4147 Ok(worktree)
4148 }
4149 .map_err(|err| Arc::new(err))
4150 })
4151 .shared()
4152 })
4153 .clone();
4154 cx.foreground().spawn(async move {
4155 match task.await {
4156 Ok(worktree) => Ok(worktree),
4157 Err(err) => Err(anyhow!("{}", err)),
4158 }
4159 })
4160 }
4161
4162 pub fn remove_worktree(&mut self, id_to_remove: WorktreeId, cx: &mut ModelContext<Self>) {
4163 self.worktrees.retain(|worktree| {
4164 if let Some(worktree) = worktree.upgrade(cx) {
4165 let id = worktree.read(cx).id();
4166 if id == id_to_remove {
4167 cx.emit(Event::WorktreeRemoved(id));
4168 false
4169 } else {
4170 true
4171 }
4172 } else {
4173 false
4174 }
4175 });
4176 self.metadata_changed(true, cx);
4177 cx.notify();
4178 }
4179
4180 fn add_worktree(&mut self, worktree: &ModelHandle<Worktree>, cx: &mut ModelContext<Self>) {
4181 cx.observe(&worktree, |_, _, cx| cx.notify()).detach();
4182 if worktree.read(cx).is_local() {
4183 cx.subscribe(&worktree, |this, worktree, _, cx| {
4184 this.update_local_worktree_buffers(worktree, cx);
4185 })
4186 .detach();
4187 }
4188
4189 let push_strong_handle = {
4190 let worktree = worktree.read(cx);
4191 self.is_shared() || worktree.is_visible() || worktree.is_remote()
4192 };
4193 if push_strong_handle {
4194 self.worktrees
4195 .push(WorktreeHandle::Strong(worktree.clone()));
4196 } else {
4197 self.worktrees
4198 .push(WorktreeHandle::Weak(worktree.downgrade()));
4199 }
4200
4201 self.metadata_changed(true, cx);
4202 cx.observe_release(&worktree, |this, worktree, cx| {
4203 this.remove_worktree(worktree.id(), cx);
4204 cx.notify();
4205 })
4206 .detach();
4207
4208 cx.emit(Event::WorktreeAdded);
4209 cx.notify();
4210 }
4211
4212 fn update_local_worktree_buffers(
4213 &mut self,
4214 worktree_handle: ModelHandle<Worktree>,
4215 cx: &mut ModelContext<Self>,
4216 ) {
4217 let snapshot = worktree_handle.read(cx).snapshot();
4218 let mut buffers_to_delete = Vec::new();
4219 let mut renamed_buffers = Vec::new();
4220 for (buffer_id, buffer) in &self.opened_buffers {
4221 if let Some(buffer) = buffer.upgrade(cx) {
4222 buffer.update(cx, |buffer, cx| {
4223 if let Some(old_file) = File::from_dyn(buffer.file()) {
4224 if old_file.worktree != worktree_handle {
4225 return;
4226 }
4227
4228 let new_file = if let Some(entry) = old_file
4229 .entry_id
4230 .and_then(|entry_id| snapshot.entry_for_id(entry_id))
4231 {
4232 File {
4233 is_local: true,
4234 entry_id: Some(entry.id),
4235 mtime: entry.mtime,
4236 path: entry.path.clone(),
4237 worktree: worktree_handle.clone(),
4238 }
4239 } else if let Some(entry) =
4240 snapshot.entry_for_path(old_file.path().as_ref())
4241 {
4242 File {
4243 is_local: true,
4244 entry_id: Some(entry.id),
4245 mtime: entry.mtime,
4246 path: entry.path.clone(),
4247 worktree: worktree_handle.clone(),
4248 }
4249 } else {
4250 File {
4251 is_local: true,
4252 entry_id: None,
4253 path: old_file.path().clone(),
4254 mtime: old_file.mtime(),
4255 worktree: worktree_handle.clone(),
4256 }
4257 };
4258
4259 let old_path = old_file.abs_path(cx);
4260 if new_file.abs_path(cx) != old_path {
4261 renamed_buffers.push((cx.handle(), old_path));
4262 }
4263
4264 if let Some(project_id) = self.shared_remote_id() {
4265 self.client
4266 .send(proto::UpdateBufferFile {
4267 project_id,
4268 buffer_id: *buffer_id as u64,
4269 file: Some(new_file.to_proto()),
4270 })
4271 .log_err();
4272 }
4273 buffer.file_updated(Arc::new(new_file), cx).detach();
4274 }
4275 });
4276 } else {
4277 buffers_to_delete.push(*buffer_id);
4278 }
4279 }
4280
4281 for buffer_id in buffers_to_delete {
4282 self.opened_buffers.remove(&buffer_id);
4283 }
4284
4285 for (buffer, old_path) in renamed_buffers {
4286 self.unregister_buffer_from_language_server(&buffer, old_path, cx);
4287 self.assign_language_to_buffer(&buffer, cx);
4288 self.register_buffer_with_language_server(&buffer, cx);
4289 }
4290 }
4291
4292 pub fn set_active_path(&mut self, entry: Option<ProjectPath>, cx: &mut ModelContext<Self>) {
4293 let new_active_entry = entry.and_then(|project_path| {
4294 let worktree = self.worktree_for_id(project_path.worktree_id, cx)?;
4295 let entry = worktree.read(cx).entry_for_path(project_path.path)?;
4296 Some(entry.id)
4297 });
4298 if new_active_entry != self.active_entry {
4299 self.active_entry = new_active_entry;
4300 cx.emit(Event::ActiveEntryChanged(new_active_entry));
4301 }
4302 }
4303
4304 pub fn language_servers_running_disk_based_diagnostics<'a>(
4305 &'a self,
4306 ) -> impl 'a + Iterator<Item = usize> {
4307 self.language_server_statuses
4308 .iter()
4309 .filter_map(|(id, status)| {
4310 if status.has_pending_diagnostic_updates {
4311 Some(*id)
4312 } else {
4313 None
4314 }
4315 })
4316 }
4317
4318 pub fn diagnostic_summary(&self, cx: &AppContext) -> DiagnosticSummary {
4319 let mut summary = DiagnosticSummary::default();
4320 for (_, path_summary) in self.diagnostic_summaries(cx) {
4321 summary.error_count += path_summary.error_count;
4322 summary.warning_count += path_summary.warning_count;
4323 }
4324 summary
4325 }
4326
4327 pub fn diagnostic_summaries<'a>(
4328 &'a self,
4329 cx: &'a AppContext,
4330 ) -> impl Iterator<Item = (ProjectPath, DiagnosticSummary)> + 'a {
4331 self.visible_worktrees(cx).flat_map(move |worktree| {
4332 let worktree = worktree.read(cx);
4333 let worktree_id = worktree.id();
4334 worktree
4335 .diagnostic_summaries()
4336 .map(move |(path, summary)| (ProjectPath { worktree_id, path }, summary))
4337 })
4338 }
4339
4340 pub fn disk_based_diagnostics_started(
4341 &mut self,
4342 language_server_id: usize,
4343 cx: &mut ModelContext<Self>,
4344 ) {
4345 cx.emit(Event::DiskBasedDiagnosticsStarted { language_server_id });
4346 }
4347
4348 pub fn disk_based_diagnostics_finished(
4349 &mut self,
4350 language_server_id: usize,
4351 cx: &mut ModelContext<Self>,
4352 ) {
4353 cx.emit(Event::DiskBasedDiagnosticsFinished { language_server_id });
4354 }
4355
4356 pub fn active_entry(&self) -> Option<ProjectEntryId> {
4357 self.active_entry
4358 }
4359
4360 pub fn entry_for_path(&self, path: &ProjectPath, cx: &AppContext) -> Option<ProjectEntryId> {
4361 self.worktree_for_id(path.worktree_id, cx)?
4362 .read(cx)
4363 .entry_for_path(&path.path)
4364 .map(|entry| entry.id)
4365 }
4366
4367 pub fn path_for_entry(&self, entry_id: ProjectEntryId, cx: &AppContext) -> Option<ProjectPath> {
4368 let worktree = self.worktree_for_entry(entry_id, cx)?;
4369 let worktree = worktree.read(cx);
4370 let worktree_id = worktree.id();
4371 let path = worktree.entry_for_id(entry_id)?.path.clone();
4372 Some(ProjectPath { worktree_id, path })
4373 }
4374
4375 // RPC message handlers
4376
4377 async fn handle_request_join_project(
4378 this: ModelHandle<Self>,
4379 message: TypedEnvelope<proto::RequestJoinProject>,
4380 _: Arc<Client>,
4381 mut cx: AsyncAppContext,
4382 ) -> Result<()> {
4383 let user_id = message.payload.requester_id;
4384 if this.read_with(&cx, |project, _| {
4385 project.collaborators.values().any(|c| c.user.id == user_id)
4386 }) {
4387 this.update(&mut cx, |this, cx| {
4388 this.respond_to_join_request(user_id, true, cx)
4389 });
4390 } else {
4391 let user_store = this.read_with(&cx, |this, _| this.user_store.clone());
4392 let user = user_store
4393 .update(&mut cx, |store, cx| store.fetch_user(user_id, cx))
4394 .await?;
4395 this.update(&mut cx, |_, cx| cx.emit(Event::ContactRequestedJoin(user)));
4396 }
4397 Ok(())
4398 }
4399
4400 async fn handle_unregister_project(
4401 this: ModelHandle<Self>,
4402 _: TypedEnvelope<proto::UnregisterProject>,
4403 _: Arc<Client>,
4404 mut cx: AsyncAppContext,
4405 ) -> Result<()> {
4406 this.update(&mut cx, |this, cx| this.removed_from_project(cx));
4407 Ok(())
4408 }
4409
4410 async fn handle_project_unshared(
4411 this: ModelHandle<Self>,
4412 _: TypedEnvelope<proto::ProjectUnshared>,
4413 _: Arc<Client>,
4414 mut cx: AsyncAppContext,
4415 ) -> Result<()> {
4416 this.update(&mut cx, |this, cx| this.unshared(cx));
4417 Ok(())
4418 }
4419
4420 async fn handle_add_collaborator(
4421 this: ModelHandle<Self>,
4422 mut envelope: TypedEnvelope<proto::AddProjectCollaborator>,
4423 _: Arc<Client>,
4424 mut cx: AsyncAppContext,
4425 ) -> Result<()> {
4426 let user_store = this.read_with(&cx, |this, _| this.user_store.clone());
4427 let collaborator = envelope
4428 .payload
4429 .collaborator
4430 .take()
4431 .ok_or_else(|| anyhow!("empty collaborator"))?;
4432
4433 let collaborator = Collaborator::from_proto(collaborator, &user_store, &mut cx).await?;
4434 this.update(&mut cx, |this, cx| {
4435 this.collaborators
4436 .insert(collaborator.peer_id, collaborator);
4437 cx.notify();
4438 });
4439
4440 Ok(())
4441 }
4442
4443 async fn handle_remove_collaborator(
4444 this: ModelHandle<Self>,
4445 envelope: TypedEnvelope<proto::RemoveProjectCollaborator>,
4446 _: Arc<Client>,
4447 mut cx: AsyncAppContext,
4448 ) -> Result<()> {
4449 this.update(&mut cx, |this, cx| {
4450 let peer_id = PeerId(envelope.payload.peer_id);
4451 let replica_id = this
4452 .collaborators
4453 .remove(&peer_id)
4454 .ok_or_else(|| anyhow!("unknown peer {:?}", peer_id))?
4455 .replica_id;
4456 for (_, buffer) in &this.opened_buffers {
4457 if let Some(buffer) = buffer.upgrade(cx) {
4458 buffer.update(cx, |buffer, cx| buffer.remove_peer(replica_id, cx));
4459 }
4460 }
4461
4462 cx.emit(Event::CollaboratorLeft(peer_id));
4463 cx.notify();
4464 Ok(())
4465 })
4466 }
4467
4468 async fn handle_join_project_request_cancelled(
4469 this: ModelHandle<Self>,
4470 envelope: TypedEnvelope<proto::JoinProjectRequestCancelled>,
4471 _: Arc<Client>,
4472 mut cx: AsyncAppContext,
4473 ) -> Result<()> {
4474 let user = this
4475 .update(&mut cx, |this, cx| {
4476 this.user_store.update(cx, |user_store, cx| {
4477 user_store.fetch_user(envelope.payload.requester_id, cx)
4478 })
4479 })
4480 .await?;
4481
4482 this.update(&mut cx, |_, cx| {
4483 cx.emit(Event::ContactCancelledJoinRequest(user));
4484 });
4485
4486 Ok(())
4487 }
4488
4489 async fn handle_update_project(
4490 this: ModelHandle<Self>,
4491 envelope: TypedEnvelope<proto::UpdateProject>,
4492 client: Arc<Client>,
4493 mut cx: AsyncAppContext,
4494 ) -> Result<()> {
4495 this.update(&mut cx, |this, cx| {
4496 let replica_id = this.replica_id();
4497 let remote_id = this.remote_id().ok_or_else(|| anyhow!("invalid project"))?;
4498
4499 let mut old_worktrees_by_id = this
4500 .worktrees
4501 .drain(..)
4502 .filter_map(|worktree| {
4503 let worktree = worktree.upgrade(cx)?;
4504 Some((worktree.read(cx).id(), worktree))
4505 })
4506 .collect::<HashMap<_, _>>();
4507
4508 for worktree in envelope.payload.worktrees {
4509 if let Some(old_worktree) =
4510 old_worktrees_by_id.remove(&WorktreeId::from_proto(worktree.id))
4511 {
4512 this.worktrees.push(WorktreeHandle::Strong(old_worktree));
4513 } else {
4514 let worktree = proto::Worktree {
4515 id: worktree.id,
4516 root_name: worktree.root_name,
4517 entries: Default::default(),
4518 diagnostic_summaries: Default::default(),
4519 visible: worktree.visible,
4520 scan_id: 0,
4521 };
4522 let (worktree, load_task) =
4523 Worktree::remote(remote_id, replica_id, worktree, client.clone(), cx);
4524 this.add_worktree(&worktree, cx);
4525 load_task.detach();
4526 }
4527 }
4528
4529 this.metadata_changed(true, cx);
4530 for (id, _) in old_worktrees_by_id {
4531 cx.emit(Event::WorktreeRemoved(id));
4532 }
4533
4534 Ok(())
4535 })
4536 }
4537
4538 async fn handle_update_worktree(
4539 this: ModelHandle<Self>,
4540 envelope: TypedEnvelope<proto::UpdateWorktree>,
4541 _: Arc<Client>,
4542 mut cx: AsyncAppContext,
4543 ) -> Result<()> {
4544 this.update(&mut cx, |this, cx| {
4545 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
4546 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
4547 worktree.update(cx, |worktree, _| {
4548 let worktree = worktree.as_remote_mut().unwrap();
4549 worktree.update_from_remote(envelope)
4550 })?;
4551 }
4552 Ok(())
4553 })
4554 }
4555
4556 async fn handle_create_project_entry(
4557 this: ModelHandle<Self>,
4558 envelope: TypedEnvelope<proto::CreateProjectEntry>,
4559 _: Arc<Client>,
4560 mut cx: AsyncAppContext,
4561 ) -> Result<proto::ProjectEntryResponse> {
4562 let worktree = this.update(&mut cx, |this, cx| {
4563 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
4564 this.worktree_for_id(worktree_id, cx)
4565 .ok_or_else(|| anyhow!("worktree not found"))
4566 })?;
4567 let worktree_scan_id = worktree.read_with(&cx, |worktree, _| worktree.scan_id());
4568 let entry = worktree
4569 .update(&mut cx, |worktree, cx| {
4570 let worktree = worktree.as_local_mut().unwrap();
4571 let path = PathBuf::from(OsString::from_vec(envelope.payload.path));
4572 worktree.create_entry(path, envelope.payload.is_directory, cx)
4573 })
4574 .await?;
4575 Ok(proto::ProjectEntryResponse {
4576 entry: Some((&entry).into()),
4577 worktree_scan_id: worktree_scan_id as u64,
4578 })
4579 }
4580
4581 async fn handle_rename_project_entry(
4582 this: ModelHandle<Self>,
4583 envelope: TypedEnvelope<proto::RenameProjectEntry>,
4584 _: Arc<Client>,
4585 mut cx: AsyncAppContext,
4586 ) -> Result<proto::ProjectEntryResponse> {
4587 let entry_id = ProjectEntryId::from_proto(envelope.payload.entry_id);
4588 let worktree = this.read_with(&cx, |this, cx| {
4589 this.worktree_for_entry(entry_id, cx)
4590 .ok_or_else(|| anyhow!("worktree not found"))
4591 })?;
4592 let worktree_scan_id = worktree.read_with(&cx, |worktree, _| worktree.scan_id());
4593 let entry = worktree
4594 .update(&mut cx, |worktree, cx| {
4595 let new_path = PathBuf::from(OsString::from_vec(envelope.payload.new_path));
4596 worktree
4597 .as_local_mut()
4598 .unwrap()
4599 .rename_entry(entry_id, new_path, cx)
4600 .ok_or_else(|| anyhow!("invalid entry"))
4601 })?
4602 .await?;
4603 Ok(proto::ProjectEntryResponse {
4604 entry: Some((&entry).into()),
4605 worktree_scan_id: worktree_scan_id as u64,
4606 })
4607 }
4608
4609 async fn handle_copy_project_entry(
4610 this: ModelHandle<Self>,
4611 envelope: TypedEnvelope<proto::CopyProjectEntry>,
4612 _: Arc<Client>,
4613 mut cx: AsyncAppContext,
4614 ) -> Result<proto::ProjectEntryResponse> {
4615 let entry_id = ProjectEntryId::from_proto(envelope.payload.entry_id);
4616 let worktree = this.read_with(&cx, |this, cx| {
4617 this.worktree_for_entry(entry_id, cx)
4618 .ok_or_else(|| anyhow!("worktree not found"))
4619 })?;
4620 let worktree_scan_id = worktree.read_with(&cx, |worktree, _| worktree.scan_id());
4621 let entry = worktree
4622 .update(&mut cx, |worktree, cx| {
4623 let new_path = PathBuf::from(OsString::from_vec(envelope.payload.new_path));
4624 worktree
4625 .as_local_mut()
4626 .unwrap()
4627 .copy_entry(entry_id, new_path, cx)
4628 .ok_or_else(|| anyhow!("invalid entry"))
4629 })?
4630 .await?;
4631 Ok(proto::ProjectEntryResponse {
4632 entry: Some((&entry).into()),
4633 worktree_scan_id: worktree_scan_id as u64,
4634 })
4635 }
4636
4637 async fn handle_delete_project_entry(
4638 this: ModelHandle<Self>,
4639 envelope: TypedEnvelope<proto::DeleteProjectEntry>,
4640 _: Arc<Client>,
4641 mut cx: AsyncAppContext,
4642 ) -> Result<proto::ProjectEntryResponse> {
4643 let entry_id = ProjectEntryId::from_proto(envelope.payload.entry_id);
4644 let worktree = this.read_with(&cx, |this, cx| {
4645 this.worktree_for_entry(entry_id, cx)
4646 .ok_or_else(|| anyhow!("worktree not found"))
4647 })?;
4648 let worktree_scan_id = worktree.read_with(&cx, |worktree, _| worktree.scan_id());
4649 worktree
4650 .update(&mut cx, |worktree, cx| {
4651 worktree
4652 .as_local_mut()
4653 .unwrap()
4654 .delete_entry(entry_id, cx)
4655 .ok_or_else(|| anyhow!("invalid entry"))
4656 })?
4657 .await?;
4658 Ok(proto::ProjectEntryResponse {
4659 entry: None,
4660 worktree_scan_id: worktree_scan_id as u64,
4661 })
4662 }
4663
4664 async fn handle_update_diagnostic_summary(
4665 this: ModelHandle<Self>,
4666 envelope: TypedEnvelope<proto::UpdateDiagnosticSummary>,
4667 _: Arc<Client>,
4668 mut cx: AsyncAppContext,
4669 ) -> Result<()> {
4670 this.update(&mut cx, |this, cx| {
4671 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
4672 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
4673 if let Some(summary) = envelope.payload.summary {
4674 let project_path = ProjectPath {
4675 worktree_id,
4676 path: Path::new(&summary.path).into(),
4677 };
4678 worktree.update(cx, |worktree, _| {
4679 worktree
4680 .as_remote_mut()
4681 .unwrap()
4682 .update_diagnostic_summary(project_path.path.clone(), &summary);
4683 });
4684 cx.emit(Event::DiagnosticsUpdated {
4685 language_server_id: summary.language_server_id as usize,
4686 path: project_path,
4687 });
4688 }
4689 }
4690 Ok(())
4691 })
4692 }
4693
4694 async fn handle_start_language_server(
4695 this: ModelHandle<Self>,
4696 envelope: TypedEnvelope<proto::StartLanguageServer>,
4697 _: Arc<Client>,
4698 mut cx: AsyncAppContext,
4699 ) -> Result<()> {
4700 let server = envelope
4701 .payload
4702 .server
4703 .ok_or_else(|| anyhow!("invalid server"))?;
4704 this.update(&mut cx, |this, cx| {
4705 this.language_server_statuses.insert(
4706 server.id as usize,
4707 LanguageServerStatus {
4708 name: server.name,
4709 pending_work: Default::default(),
4710 has_pending_diagnostic_updates: false,
4711 progress_tokens: Default::default(),
4712 },
4713 );
4714 cx.notify();
4715 });
4716 Ok(())
4717 }
4718
4719 async fn handle_update_language_server(
4720 this: ModelHandle<Self>,
4721 envelope: TypedEnvelope<proto::UpdateLanguageServer>,
4722 _: Arc<Client>,
4723 mut cx: AsyncAppContext,
4724 ) -> Result<()> {
4725 let language_server_id = envelope.payload.language_server_id as usize;
4726 match envelope
4727 .payload
4728 .variant
4729 .ok_or_else(|| anyhow!("invalid variant"))?
4730 {
4731 proto::update_language_server::Variant::WorkStart(payload) => {
4732 this.update(&mut cx, |this, cx| {
4733 this.on_lsp_work_start(
4734 language_server_id,
4735 payload.token,
4736 LanguageServerProgress {
4737 message: payload.message,
4738 percentage: payload.percentage.map(|p| p as usize),
4739 last_update_at: Instant::now(),
4740 },
4741 cx,
4742 );
4743 })
4744 }
4745 proto::update_language_server::Variant::WorkProgress(payload) => {
4746 this.update(&mut cx, |this, cx| {
4747 this.on_lsp_work_progress(
4748 language_server_id,
4749 payload.token,
4750 LanguageServerProgress {
4751 message: payload.message,
4752 percentage: payload.percentage.map(|p| p as usize),
4753 last_update_at: Instant::now(),
4754 },
4755 cx,
4756 );
4757 })
4758 }
4759 proto::update_language_server::Variant::WorkEnd(payload) => {
4760 this.update(&mut cx, |this, cx| {
4761 this.on_lsp_work_end(language_server_id, payload.token, cx);
4762 })
4763 }
4764 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(_) => {
4765 this.update(&mut cx, |this, cx| {
4766 this.disk_based_diagnostics_started(language_server_id, cx);
4767 })
4768 }
4769 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(_) => {
4770 this.update(&mut cx, |this, cx| {
4771 this.disk_based_diagnostics_finished(language_server_id, cx)
4772 });
4773 }
4774 }
4775
4776 Ok(())
4777 }
4778
4779 async fn handle_update_buffer(
4780 this: ModelHandle<Self>,
4781 envelope: TypedEnvelope<proto::UpdateBuffer>,
4782 _: Arc<Client>,
4783 mut cx: AsyncAppContext,
4784 ) -> Result<()> {
4785 this.update(&mut cx, |this, cx| {
4786 let payload = envelope.payload.clone();
4787 let buffer_id = payload.buffer_id;
4788 let ops = payload
4789 .operations
4790 .into_iter()
4791 .map(|op| language::proto::deserialize_operation(op))
4792 .collect::<Result<Vec<_>, _>>()?;
4793 let is_remote = this.is_remote();
4794 match this.opened_buffers.entry(buffer_id) {
4795 hash_map::Entry::Occupied(mut e) => match e.get_mut() {
4796 OpenBuffer::Strong(buffer) => {
4797 buffer.update(cx, |buffer, cx| buffer.apply_ops(ops, cx))?;
4798 }
4799 OpenBuffer::Loading(operations) => operations.extend_from_slice(&ops),
4800 OpenBuffer::Weak(_) => {}
4801 },
4802 hash_map::Entry::Vacant(e) => {
4803 assert!(
4804 is_remote,
4805 "received buffer update from {:?}",
4806 envelope.original_sender_id
4807 );
4808 e.insert(OpenBuffer::Loading(ops));
4809 }
4810 }
4811 Ok(())
4812 })
4813 }
4814
4815 async fn handle_update_buffer_file(
4816 this: ModelHandle<Self>,
4817 envelope: TypedEnvelope<proto::UpdateBufferFile>,
4818 _: Arc<Client>,
4819 mut cx: AsyncAppContext,
4820 ) -> Result<()> {
4821 this.update(&mut cx, |this, cx| {
4822 let payload = envelope.payload.clone();
4823 let buffer_id = payload.buffer_id;
4824 let file = payload.file.ok_or_else(|| anyhow!("invalid file"))?;
4825 let worktree = this
4826 .worktree_for_id(WorktreeId::from_proto(file.worktree_id), cx)
4827 .ok_or_else(|| anyhow!("no such worktree"))?;
4828 let file = File::from_proto(file, worktree.clone(), cx)?;
4829 let buffer = this
4830 .opened_buffers
4831 .get_mut(&buffer_id)
4832 .and_then(|b| b.upgrade(cx))
4833 .ok_or_else(|| anyhow!("no such buffer"))?;
4834 buffer.update(cx, |buffer, cx| {
4835 buffer.file_updated(Arc::new(file), cx).detach();
4836 });
4837 Ok(())
4838 })
4839 }
4840
4841 async fn handle_save_buffer(
4842 this: ModelHandle<Self>,
4843 envelope: TypedEnvelope<proto::SaveBuffer>,
4844 _: Arc<Client>,
4845 mut cx: AsyncAppContext,
4846 ) -> Result<proto::BufferSaved> {
4847 let buffer_id = envelope.payload.buffer_id;
4848 let requested_version = deserialize_version(envelope.payload.version);
4849
4850 let (project_id, buffer) = this.update(&mut cx, |this, cx| {
4851 let project_id = this.remote_id().ok_or_else(|| anyhow!("not connected"))?;
4852 let buffer = this
4853 .opened_buffers
4854 .get(&buffer_id)
4855 .and_then(|buffer| buffer.upgrade(cx))
4856 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?;
4857 Ok::<_, anyhow::Error>((project_id, buffer))
4858 })?;
4859 buffer
4860 .update(&mut cx, |buffer, _| {
4861 buffer.wait_for_version(requested_version)
4862 })
4863 .await;
4864
4865 let (saved_version, fingerprint, mtime) =
4866 buffer.update(&mut cx, |buffer, cx| buffer.save(cx)).await?;
4867 Ok(proto::BufferSaved {
4868 project_id,
4869 buffer_id,
4870 version: serialize_version(&saved_version),
4871 mtime: Some(mtime.into()),
4872 fingerprint,
4873 })
4874 }
4875
4876 async fn handle_reload_buffers(
4877 this: ModelHandle<Self>,
4878 envelope: TypedEnvelope<proto::ReloadBuffers>,
4879 _: Arc<Client>,
4880 mut cx: AsyncAppContext,
4881 ) -> Result<proto::ReloadBuffersResponse> {
4882 let sender_id = envelope.original_sender_id()?;
4883 let reload = this.update(&mut cx, |this, cx| {
4884 let mut buffers = HashSet::default();
4885 for buffer_id in &envelope.payload.buffer_ids {
4886 buffers.insert(
4887 this.opened_buffers
4888 .get(buffer_id)
4889 .and_then(|buffer| buffer.upgrade(cx))
4890 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?,
4891 );
4892 }
4893 Ok::<_, anyhow::Error>(this.reload_buffers(buffers, false, cx))
4894 })?;
4895
4896 let project_transaction = reload.await?;
4897 let project_transaction = this.update(&mut cx, |this, cx| {
4898 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
4899 });
4900 Ok(proto::ReloadBuffersResponse {
4901 transaction: Some(project_transaction),
4902 })
4903 }
4904
4905 async fn handle_format_buffers(
4906 this: ModelHandle<Self>,
4907 envelope: TypedEnvelope<proto::FormatBuffers>,
4908 _: Arc<Client>,
4909 mut cx: AsyncAppContext,
4910 ) -> Result<proto::FormatBuffersResponse> {
4911 let sender_id = envelope.original_sender_id()?;
4912 let format = this.update(&mut cx, |this, cx| {
4913 let mut buffers = HashSet::default();
4914 for buffer_id in &envelope.payload.buffer_ids {
4915 buffers.insert(
4916 this.opened_buffers
4917 .get(buffer_id)
4918 .and_then(|buffer| buffer.upgrade(cx))
4919 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?,
4920 );
4921 }
4922 Ok::<_, anyhow::Error>(this.format(buffers, false, cx))
4923 })?;
4924
4925 let project_transaction = format.await?;
4926 let project_transaction = this.update(&mut cx, |this, cx| {
4927 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
4928 });
4929 Ok(proto::FormatBuffersResponse {
4930 transaction: Some(project_transaction),
4931 })
4932 }
4933
4934 async fn handle_get_completions(
4935 this: ModelHandle<Self>,
4936 envelope: TypedEnvelope<proto::GetCompletions>,
4937 _: Arc<Client>,
4938 mut cx: AsyncAppContext,
4939 ) -> Result<proto::GetCompletionsResponse> {
4940 let position = envelope
4941 .payload
4942 .position
4943 .and_then(language::proto::deserialize_anchor)
4944 .ok_or_else(|| anyhow!("invalid position"))?;
4945 let version = deserialize_version(envelope.payload.version);
4946 let buffer = this.read_with(&cx, |this, cx| {
4947 this.opened_buffers
4948 .get(&envelope.payload.buffer_id)
4949 .and_then(|buffer| buffer.upgrade(cx))
4950 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
4951 })?;
4952 buffer
4953 .update(&mut cx, |buffer, _| buffer.wait_for_version(version))
4954 .await;
4955 let version = buffer.read_with(&cx, |buffer, _| buffer.version());
4956 let completions = this
4957 .update(&mut cx, |this, cx| this.completions(&buffer, position, cx))
4958 .await?;
4959
4960 Ok(proto::GetCompletionsResponse {
4961 completions: completions
4962 .iter()
4963 .map(language::proto::serialize_completion)
4964 .collect(),
4965 version: serialize_version(&version),
4966 })
4967 }
4968
4969 async fn handle_apply_additional_edits_for_completion(
4970 this: ModelHandle<Self>,
4971 envelope: TypedEnvelope<proto::ApplyCompletionAdditionalEdits>,
4972 _: Arc<Client>,
4973 mut cx: AsyncAppContext,
4974 ) -> Result<proto::ApplyCompletionAdditionalEditsResponse> {
4975 let apply_additional_edits = this.update(&mut cx, |this, cx| {
4976 let buffer = this
4977 .opened_buffers
4978 .get(&envelope.payload.buffer_id)
4979 .and_then(|buffer| buffer.upgrade(cx))
4980 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
4981 let language = buffer.read(cx).language();
4982 let completion = language::proto::deserialize_completion(
4983 envelope
4984 .payload
4985 .completion
4986 .ok_or_else(|| anyhow!("invalid completion"))?,
4987 language,
4988 )?;
4989 Ok::<_, anyhow::Error>(
4990 this.apply_additional_edits_for_completion(buffer, completion, false, cx),
4991 )
4992 })?;
4993
4994 Ok(proto::ApplyCompletionAdditionalEditsResponse {
4995 transaction: apply_additional_edits
4996 .await?
4997 .as_ref()
4998 .map(language::proto::serialize_transaction),
4999 })
5000 }
5001
5002 async fn handle_get_code_actions(
5003 this: ModelHandle<Self>,
5004 envelope: TypedEnvelope<proto::GetCodeActions>,
5005 _: Arc<Client>,
5006 mut cx: AsyncAppContext,
5007 ) -> Result<proto::GetCodeActionsResponse> {
5008 let start = envelope
5009 .payload
5010 .start
5011 .and_then(language::proto::deserialize_anchor)
5012 .ok_or_else(|| anyhow!("invalid start"))?;
5013 let end = envelope
5014 .payload
5015 .end
5016 .and_then(language::proto::deserialize_anchor)
5017 .ok_or_else(|| anyhow!("invalid end"))?;
5018 let buffer = this.update(&mut cx, |this, cx| {
5019 this.opened_buffers
5020 .get(&envelope.payload.buffer_id)
5021 .and_then(|buffer| buffer.upgrade(cx))
5022 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
5023 })?;
5024 buffer
5025 .update(&mut cx, |buffer, _| {
5026 buffer.wait_for_version(deserialize_version(envelope.payload.version))
5027 })
5028 .await;
5029
5030 let version = buffer.read_with(&cx, |buffer, _| buffer.version());
5031 let code_actions = this.update(&mut cx, |this, cx| {
5032 Ok::<_, anyhow::Error>(this.code_actions(&buffer, start..end, cx))
5033 })?;
5034
5035 Ok(proto::GetCodeActionsResponse {
5036 actions: code_actions
5037 .await?
5038 .iter()
5039 .map(language::proto::serialize_code_action)
5040 .collect(),
5041 version: serialize_version(&version),
5042 })
5043 }
5044
5045 async fn handle_apply_code_action(
5046 this: ModelHandle<Self>,
5047 envelope: TypedEnvelope<proto::ApplyCodeAction>,
5048 _: Arc<Client>,
5049 mut cx: AsyncAppContext,
5050 ) -> Result<proto::ApplyCodeActionResponse> {
5051 let sender_id = envelope.original_sender_id()?;
5052 let action = language::proto::deserialize_code_action(
5053 envelope
5054 .payload
5055 .action
5056 .ok_or_else(|| anyhow!("invalid action"))?,
5057 )?;
5058 let apply_code_action = this.update(&mut cx, |this, cx| {
5059 let buffer = this
5060 .opened_buffers
5061 .get(&envelope.payload.buffer_id)
5062 .and_then(|buffer| buffer.upgrade(cx))
5063 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
5064 Ok::<_, anyhow::Error>(this.apply_code_action(buffer, action, false, cx))
5065 })?;
5066
5067 let project_transaction = apply_code_action.await?;
5068 let project_transaction = this.update(&mut cx, |this, cx| {
5069 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
5070 });
5071 Ok(proto::ApplyCodeActionResponse {
5072 transaction: Some(project_transaction),
5073 })
5074 }
5075
5076 async fn handle_lsp_command<T: LspCommand>(
5077 this: ModelHandle<Self>,
5078 envelope: TypedEnvelope<T::ProtoRequest>,
5079 _: Arc<Client>,
5080 mut cx: AsyncAppContext,
5081 ) -> Result<<T::ProtoRequest as proto::RequestMessage>::Response>
5082 where
5083 <T::LspRequest as lsp::request::Request>::Result: Send,
5084 {
5085 let sender_id = envelope.original_sender_id()?;
5086 let buffer_id = T::buffer_id_from_proto(&envelope.payload);
5087 let buffer_handle = this.read_with(&cx, |this, _| {
5088 this.opened_buffers
5089 .get(&buffer_id)
5090 .and_then(|buffer| buffer.upgrade(&cx))
5091 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))
5092 })?;
5093 let request = T::from_proto(
5094 envelope.payload,
5095 this.clone(),
5096 buffer_handle.clone(),
5097 cx.clone(),
5098 )
5099 .await?;
5100 let buffer_version = buffer_handle.read_with(&cx, |buffer, _| buffer.version());
5101 let response = this
5102 .update(&mut cx, |this, cx| {
5103 this.request_lsp(buffer_handle, request, cx)
5104 })
5105 .await?;
5106 this.update(&mut cx, |this, cx| {
5107 Ok(T::response_to_proto(
5108 response,
5109 this,
5110 sender_id,
5111 &buffer_version,
5112 cx,
5113 ))
5114 })
5115 }
5116
5117 async fn handle_get_project_symbols(
5118 this: ModelHandle<Self>,
5119 envelope: TypedEnvelope<proto::GetProjectSymbols>,
5120 _: Arc<Client>,
5121 mut cx: AsyncAppContext,
5122 ) -> Result<proto::GetProjectSymbolsResponse> {
5123 let symbols = this
5124 .update(&mut cx, |this, cx| {
5125 this.symbols(&envelope.payload.query, cx)
5126 })
5127 .await?;
5128
5129 Ok(proto::GetProjectSymbolsResponse {
5130 symbols: symbols.iter().map(serialize_symbol).collect(),
5131 })
5132 }
5133
5134 async fn handle_search_project(
5135 this: ModelHandle<Self>,
5136 envelope: TypedEnvelope<proto::SearchProject>,
5137 _: Arc<Client>,
5138 mut cx: AsyncAppContext,
5139 ) -> Result<proto::SearchProjectResponse> {
5140 let peer_id = envelope.original_sender_id()?;
5141 let query = SearchQuery::from_proto(envelope.payload)?;
5142 let result = this
5143 .update(&mut cx, |this, cx| this.search(query, cx))
5144 .await?;
5145
5146 this.update(&mut cx, |this, cx| {
5147 let mut locations = Vec::new();
5148 for (buffer, ranges) in result {
5149 for range in ranges {
5150 let start = serialize_anchor(&range.start);
5151 let end = serialize_anchor(&range.end);
5152 let buffer = this.serialize_buffer_for_peer(&buffer, peer_id, cx);
5153 locations.push(proto::Location {
5154 buffer: Some(buffer),
5155 start: Some(start),
5156 end: Some(end),
5157 });
5158 }
5159 }
5160 Ok(proto::SearchProjectResponse { locations })
5161 })
5162 }
5163
5164 async fn handle_open_buffer_for_symbol(
5165 this: ModelHandle<Self>,
5166 envelope: TypedEnvelope<proto::OpenBufferForSymbol>,
5167 _: Arc<Client>,
5168 mut cx: AsyncAppContext,
5169 ) -> Result<proto::OpenBufferForSymbolResponse> {
5170 let peer_id = envelope.original_sender_id()?;
5171 let symbol = envelope
5172 .payload
5173 .symbol
5174 .ok_or_else(|| anyhow!("invalid symbol"))?;
5175 let symbol = this.read_with(&cx, |this, _| {
5176 let symbol = this.deserialize_symbol(symbol)?;
5177 let signature = this.symbol_signature(symbol.worktree_id, &symbol.path);
5178 if signature == symbol.signature {
5179 Ok(symbol)
5180 } else {
5181 Err(anyhow!("invalid symbol signature"))
5182 }
5183 })?;
5184 let buffer = this
5185 .update(&mut cx, |this, cx| this.open_buffer_for_symbol(&symbol, cx))
5186 .await?;
5187
5188 Ok(proto::OpenBufferForSymbolResponse {
5189 buffer: Some(this.update(&mut cx, |this, cx| {
5190 this.serialize_buffer_for_peer(&buffer, peer_id, cx)
5191 })),
5192 })
5193 }
5194
5195 fn symbol_signature(&self, worktree_id: WorktreeId, path: &Path) -> [u8; 32] {
5196 let mut hasher = Sha256::new();
5197 hasher.update(worktree_id.to_proto().to_be_bytes());
5198 hasher.update(path.to_string_lossy().as_bytes());
5199 hasher.update(self.nonce.to_be_bytes());
5200 hasher.finalize().as_slice().try_into().unwrap()
5201 }
5202
5203 async fn handle_open_buffer_by_id(
5204 this: ModelHandle<Self>,
5205 envelope: TypedEnvelope<proto::OpenBufferById>,
5206 _: Arc<Client>,
5207 mut cx: AsyncAppContext,
5208 ) -> Result<proto::OpenBufferResponse> {
5209 let peer_id = envelope.original_sender_id()?;
5210 let buffer = this
5211 .update(&mut cx, |this, cx| {
5212 this.open_buffer_by_id(envelope.payload.id, cx)
5213 })
5214 .await?;
5215 this.update(&mut cx, |this, cx| {
5216 Ok(proto::OpenBufferResponse {
5217 buffer: Some(this.serialize_buffer_for_peer(&buffer, peer_id, cx)),
5218 })
5219 })
5220 }
5221
5222 async fn handle_open_buffer_by_path(
5223 this: ModelHandle<Self>,
5224 envelope: TypedEnvelope<proto::OpenBufferByPath>,
5225 _: Arc<Client>,
5226 mut cx: AsyncAppContext,
5227 ) -> Result<proto::OpenBufferResponse> {
5228 let peer_id = envelope.original_sender_id()?;
5229 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
5230 let open_buffer = this.update(&mut cx, |this, cx| {
5231 this.open_buffer(
5232 ProjectPath {
5233 worktree_id,
5234 path: PathBuf::from(envelope.payload.path).into(),
5235 },
5236 cx,
5237 )
5238 });
5239
5240 let buffer = open_buffer.await?;
5241 this.update(&mut cx, |this, cx| {
5242 Ok(proto::OpenBufferResponse {
5243 buffer: Some(this.serialize_buffer_for_peer(&buffer, peer_id, cx)),
5244 })
5245 })
5246 }
5247
5248 fn serialize_project_transaction_for_peer(
5249 &mut self,
5250 project_transaction: ProjectTransaction,
5251 peer_id: PeerId,
5252 cx: &AppContext,
5253 ) -> proto::ProjectTransaction {
5254 let mut serialized_transaction = proto::ProjectTransaction {
5255 buffers: Default::default(),
5256 transactions: Default::default(),
5257 };
5258 for (buffer, transaction) in project_transaction.0 {
5259 serialized_transaction
5260 .buffers
5261 .push(self.serialize_buffer_for_peer(&buffer, peer_id, cx));
5262 serialized_transaction
5263 .transactions
5264 .push(language::proto::serialize_transaction(&transaction));
5265 }
5266 serialized_transaction
5267 }
5268
5269 fn deserialize_project_transaction(
5270 &mut self,
5271 message: proto::ProjectTransaction,
5272 push_to_history: bool,
5273 cx: &mut ModelContext<Self>,
5274 ) -> Task<Result<ProjectTransaction>> {
5275 cx.spawn(|this, mut cx| async move {
5276 let mut project_transaction = ProjectTransaction::default();
5277 for (buffer, transaction) in message.buffers.into_iter().zip(message.transactions) {
5278 let buffer = this
5279 .update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
5280 .await?;
5281 let transaction = language::proto::deserialize_transaction(transaction)?;
5282 project_transaction.0.insert(buffer, transaction);
5283 }
5284
5285 for (buffer, transaction) in &project_transaction.0 {
5286 buffer
5287 .update(&mut cx, |buffer, _| {
5288 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
5289 })
5290 .await;
5291
5292 if push_to_history {
5293 buffer.update(&mut cx, |buffer, _| {
5294 buffer.push_transaction(transaction.clone(), Instant::now());
5295 });
5296 }
5297 }
5298
5299 Ok(project_transaction)
5300 })
5301 }
5302
5303 fn serialize_buffer_for_peer(
5304 &mut self,
5305 buffer: &ModelHandle<Buffer>,
5306 peer_id: PeerId,
5307 cx: &AppContext,
5308 ) -> proto::Buffer {
5309 let buffer_id = buffer.read(cx).remote_id();
5310 let shared_buffers = self.shared_buffers.entry(peer_id).or_default();
5311 if shared_buffers.insert(buffer_id) {
5312 proto::Buffer {
5313 variant: Some(proto::buffer::Variant::State(buffer.read(cx).to_proto())),
5314 }
5315 } else {
5316 proto::Buffer {
5317 variant: Some(proto::buffer::Variant::Id(buffer_id)),
5318 }
5319 }
5320 }
5321
5322 fn deserialize_buffer(
5323 &mut self,
5324 buffer: proto::Buffer,
5325 cx: &mut ModelContext<Self>,
5326 ) -> Task<Result<ModelHandle<Buffer>>> {
5327 let replica_id = self.replica_id();
5328
5329 let opened_buffer_tx = self.opened_buffer.0.clone();
5330 let mut opened_buffer_rx = self.opened_buffer.1.clone();
5331 cx.spawn(|this, mut cx| async move {
5332 match buffer.variant.ok_or_else(|| anyhow!("missing buffer"))? {
5333 proto::buffer::Variant::Id(id) => {
5334 let buffer = loop {
5335 let buffer = this.read_with(&cx, |this, cx| {
5336 this.opened_buffers
5337 .get(&id)
5338 .and_then(|buffer| buffer.upgrade(cx))
5339 });
5340 if let Some(buffer) = buffer {
5341 break buffer;
5342 }
5343 opened_buffer_rx
5344 .next()
5345 .await
5346 .ok_or_else(|| anyhow!("project dropped while waiting for buffer"))?;
5347 };
5348 Ok(buffer)
5349 }
5350 proto::buffer::Variant::State(mut buffer) => {
5351 let mut buffer_worktree = None;
5352 let mut buffer_file = None;
5353 if let Some(file) = buffer.file.take() {
5354 this.read_with(&cx, |this, cx| {
5355 let worktree_id = WorktreeId::from_proto(file.worktree_id);
5356 let worktree =
5357 this.worktree_for_id(worktree_id, cx).ok_or_else(|| {
5358 anyhow!("no worktree found for id {}", file.worktree_id)
5359 })?;
5360 buffer_file =
5361 Some(Arc::new(File::from_proto(file, worktree.clone(), cx)?)
5362 as Arc<dyn language::File>);
5363 buffer_worktree = Some(worktree);
5364 Ok::<_, anyhow::Error>(())
5365 })?;
5366 }
5367
5368 let buffer = cx.add_model(|cx| {
5369 Buffer::from_proto(replica_id, buffer, buffer_file, cx).unwrap()
5370 });
5371
5372 this.update(&mut cx, |this, cx| this.register_buffer(&buffer, cx))?;
5373
5374 *opened_buffer_tx.borrow_mut().borrow_mut() = ();
5375 Ok(buffer)
5376 }
5377 }
5378 })
5379 }
5380
5381 fn deserialize_symbol(&self, serialized_symbol: proto::Symbol) -> Result<Symbol> {
5382 let source_worktree_id = WorktreeId::from_proto(serialized_symbol.source_worktree_id);
5383 let worktree_id = WorktreeId::from_proto(serialized_symbol.worktree_id);
5384 let start = serialized_symbol
5385 .start
5386 .ok_or_else(|| anyhow!("invalid start"))?;
5387 let end = serialized_symbol
5388 .end
5389 .ok_or_else(|| anyhow!("invalid end"))?;
5390 let kind = unsafe { mem::transmute(serialized_symbol.kind) };
5391 let path = PathBuf::from(serialized_symbol.path);
5392 let language = self.languages.select_language(&path);
5393 Ok(Symbol {
5394 source_worktree_id,
5395 worktree_id,
5396 language_server_name: LanguageServerName(serialized_symbol.language_server_name.into()),
5397 label: language
5398 .and_then(|language| language.label_for_symbol(&serialized_symbol.name, kind))
5399 .unwrap_or_else(|| CodeLabel::plain(serialized_symbol.name.clone(), None)),
5400 name: serialized_symbol.name,
5401 path,
5402 range: PointUtf16::new(start.row, start.column)..PointUtf16::new(end.row, end.column),
5403 kind,
5404 signature: serialized_symbol
5405 .signature
5406 .try_into()
5407 .map_err(|_| anyhow!("invalid signature"))?,
5408 })
5409 }
5410
5411 async fn handle_buffer_saved(
5412 this: ModelHandle<Self>,
5413 envelope: TypedEnvelope<proto::BufferSaved>,
5414 _: Arc<Client>,
5415 mut cx: AsyncAppContext,
5416 ) -> Result<()> {
5417 let version = deserialize_version(envelope.payload.version);
5418 let mtime = envelope
5419 .payload
5420 .mtime
5421 .ok_or_else(|| anyhow!("missing mtime"))?
5422 .into();
5423
5424 this.update(&mut cx, |this, cx| {
5425 let buffer = this
5426 .opened_buffers
5427 .get(&envelope.payload.buffer_id)
5428 .and_then(|buffer| buffer.upgrade(cx));
5429 if let Some(buffer) = buffer {
5430 buffer.update(cx, |buffer, cx| {
5431 buffer.did_save(version, envelope.payload.fingerprint, mtime, None, cx);
5432 });
5433 }
5434 Ok(())
5435 })
5436 }
5437
5438 async fn handle_buffer_reloaded(
5439 this: ModelHandle<Self>,
5440 envelope: TypedEnvelope<proto::BufferReloaded>,
5441 _: Arc<Client>,
5442 mut cx: AsyncAppContext,
5443 ) -> Result<()> {
5444 let payload = envelope.payload.clone();
5445 let version = deserialize_version(payload.version);
5446 let mtime = payload
5447 .mtime
5448 .ok_or_else(|| anyhow!("missing mtime"))?
5449 .into();
5450 this.update(&mut cx, |this, cx| {
5451 let buffer = this
5452 .opened_buffers
5453 .get(&payload.buffer_id)
5454 .and_then(|buffer| buffer.upgrade(cx));
5455 if let Some(buffer) = buffer {
5456 buffer.update(cx, |buffer, cx| {
5457 buffer.did_reload(version, payload.fingerprint, mtime, cx);
5458 });
5459 }
5460 Ok(())
5461 })
5462 }
5463
5464 pub fn match_paths<'a>(
5465 &self,
5466 query: &'a str,
5467 include_ignored: bool,
5468 smart_case: bool,
5469 max_results: usize,
5470 cancel_flag: &'a AtomicBool,
5471 cx: &AppContext,
5472 ) -> impl 'a + Future<Output = Vec<PathMatch>> {
5473 let worktrees = self
5474 .worktrees(cx)
5475 .filter(|worktree| worktree.read(cx).is_visible())
5476 .collect::<Vec<_>>();
5477 let include_root_name = worktrees.len() > 1;
5478 let candidate_sets = worktrees
5479 .into_iter()
5480 .map(|worktree| CandidateSet {
5481 snapshot: worktree.read(cx).snapshot(),
5482 include_ignored,
5483 include_root_name,
5484 })
5485 .collect::<Vec<_>>();
5486
5487 let background = cx.background().clone();
5488 async move {
5489 fuzzy::match_paths(
5490 candidate_sets.as_slice(),
5491 query,
5492 smart_case,
5493 max_results,
5494 cancel_flag,
5495 background,
5496 )
5497 .await
5498 }
5499 }
5500
5501 fn edits_from_lsp(
5502 &mut self,
5503 buffer: &ModelHandle<Buffer>,
5504 lsp_edits: impl 'static + Send + IntoIterator<Item = lsp::TextEdit>,
5505 version: Option<i32>,
5506 cx: &mut ModelContext<Self>,
5507 ) -> Task<Result<Vec<(Range<Anchor>, String)>>> {
5508 let snapshot = self.buffer_snapshot_for_lsp_version(buffer, version, cx);
5509 cx.background().spawn(async move {
5510 let snapshot = snapshot?;
5511 let mut lsp_edits = lsp_edits
5512 .into_iter()
5513 .map(|edit| (range_from_lsp(edit.range), edit.new_text))
5514 .collect::<Vec<_>>();
5515 lsp_edits.sort_by_key(|(range, _)| range.start);
5516
5517 let mut lsp_edits = lsp_edits.into_iter().peekable();
5518 let mut edits = Vec::new();
5519 while let Some((mut range, mut new_text)) = lsp_edits.next() {
5520 // Combine any LSP edits that are adjacent.
5521 //
5522 // Also, combine LSP edits that are separated from each other by only
5523 // a newline. This is important because for some code actions,
5524 // Rust-analyzer rewrites the entire buffer via a series of edits that
5525 // are separated by unchanged newline characters.
5526 //
5527 // In order for the diffing logic below to work properly, any edits that
5528 // cancel each other out must be combined into one.
5529 while let Some((next_range, next_text)) = lsp_edits.peek() {
5530 if next_range.start > range.end {
5531 if next_range.start.row > range.end.row + 1
5532 || next_range.start.column > 0
5533 || snapshot.clip_point_utf16(
5534 PointUtf16::new(range.end.row, u32::MAX),
5535 Bias::Left,
5536 ) > range.end
5537 {
5538 break;
5539 }
5540 new_text.push('\n');
5541 }
5542 range.end = next_range.end;
5543 new_text.push_str(&next_text);
5544 lsp_edits.next();
5545 }
5546
5547 if snapshot.clip_point_utf16(range.start, Bias::Left) != range.start
5548 || snapshot.clip_point_utf16(range.end, Bias::Left) != range.end
5549 {
5550 return Err(anyhow!("invalid edits received from language server"));
5551 }
5552
5553 // For multiline edits, perform a diff of the old and new text so that
5554 // we can identify the changes more precisely, preserving the locations
5555 // of any anchors positioned in the unchanged regions.
5556 if range.end.row > range.start.row {
5557 let mut offset = range.start.to_offset(&snapshot);
5558 let old_text = snapshot.text_for_range(range).collect::<String>();
5559
5560 let diff = TextDiff::from_lines(old_text.as_str(), &new_text);
5561 let mut moved_since_edit = true;
5562 for change in diff.iter_all_changes() {
5563 let tag = change.tag();
5564 let value = change.value();
5565 match tag {
5566 ChangeTag::Equal => {
5567 offset += value.len();
5568 moved_since_edit = true;
5569 }
5570 ChangeTag::Delete => {
5571 let start = snapshot.anchor_after(offset);
5572 let end = snapshot.anchor_before(offset + value.len());
5573 if moved_since_edit {
5574 edits.push((start..end, String::new()));
5575 } else {
5576 edits.last_mut().unwrap().0.end = end;
5577 }
5578 offset += value.len();
5579 moved_since_edit = false;
5580 }
5581 ChangeTag::Insert => {
5582 if moved_since_edit {
5583 let anchor = snapshot.anchor_after(offset);
5584 edits.push((anchor.clone()..anchor, value.to_string()));
5585 } else {
5586 edits.last_mut().unwrap().1.push_str(value);
5587 }
5588 moved_since_edit = false;
5589 }
5590 }
5591 }
5592 } else if range.end == range.start {
5593 let anchor = snapshot.anchor_after(range.start);
5594 edits.push((anchor.clone()..anchor, new_text));
5595 } else {
5596 let edit_start = snapshot.anchor_after(range.start);
5597 let edit_end = snapshot.anchor_before(range.end);
5598 edits.push((edit_start..edit_end, new_text));
5599 }
5600 }
5601
5602 Ok(edits)
5603 })
5604 }
5605
5606 fn buffer_snapshot_for_lsp_version(
5607 &mut self,
5608 buffer: &ModelHandle<Buffer>,
5609 version: Option<i32>,
5610 cx: &AppContext,
5611 ) -> Result<TextBufferSnapshot> {
5612 const OLD_VERSIONS_TO_RETAIN: i32 = 10;
5613
5614 if let Some(version) = version {
5615 let buffer_id = buffer.read(cx).remote_id();
5616 let snapshots = self
5617 .buffer_snapshots
5618 .get_mut(&buffer_id)
5619 .ok_or_else(|| anyhow!("no snapshot found for buffer {}", buffer_id))?;
5620 let mut found_snapshot = None;
5621 snapshots.retain(|(snapshot_version, snapshot)| {
5622 if snapshot_version + OLD_VERSIONS_TO_RETAIN < version {
5623 false
5624 } else {
5625 if *snapshot_version == version {
5626 found_snapshot = Some(snapshot.clone());
5627 }
5628 true
5629 }
5630 });
5631
5632 found_snapshot.ok_or_else(|| {
5633 anyhow!(
5634 "snapshot not found for buffer {} at version {}",
5635 buffer_id,
5636 version
5637 )
5638 })
5639 } else {
5640 Ok((buffer.read(cx)).text_snapshot())
5641 }
5642 }
5643
5644 fn language_server_for_buffer(
5645 &self,
5646 buffer: &Buffer,
5647 cx: &AppContext,
5648 ) -> Option<(&Arc<dyn LspAdapter>, &Arc<LanguageServer>)> {
5649 if let Some((file, language)) = File::from_dyn(buffer.file()).zip(buffer.language()) {
5650 let worktree_id = file.worktree_id(cx);
5651 let key = (worktree_id, language.lsp_adapter()?.name());
5652
5653 if let Some(server_id) = self.language_server_ids.get(&key) {
5654 if let Some(LanguageServerState::Running { adapter, server }) =
5655 self.language_servers.get(&server_id)
5656 {
5657 return Some((adapter, server));
5658 }
5659 }
5660 }
5661
5662 None
5663 }
5664}
5665
5666impl ProjectStore {
5667 pub fn new(db: Arc<Db>) -> Self {
5668 Self {
5669 db,
5670 projects: Default::default(),
5671 }
5672 }
5673
5674 pub fn projects<'a>(
5675 &'a self,
5676 cx: &'a AppContext,
5677 ) -> impl 'a + Iterator<Item = ModelHandle<Project>> {
5678 self.projects
5679 .iter()
5680 .filter_map(|project| project.upgrade(cx))
5681 }
5682
5683 fn add_project(&mut self, project: WeakModelHandle<Project>, cx: &mut ModelContext<Self>) {
5684 if let Err(ix) = self
5685 .projects
5686 .binary_search_by_key(&project.id(), WeakModelHandle::id)
5687 {
5688 self.projects.insert(ix, project);
5689 }
5690 cx.notify();
5691 }
5692
5693 fn prune_projects(&mut self, cx: &mut ModelContext<Self>) {
5694 let mut did_change = false;
5695 self.projects.retain(|project| {
5696 if project.is_upgradable(cx) {
5697 true
5698 } else {
5699 did_change = true;
5700 false
5701 }
5702 });
5703 if did_change {
5704 cx.notify();
5705 }
5706 }
5707}
5708
5709impl WorktreeHandle {
5710 pub fn upgrade(&self, cx: &AppContext) -> Option<ModelHandle<Worktree>> {
5711 match self {
5712 WorktreeHandle::Strong(handle) => Some(handle.clone()),
5713 WorktreeHandle::Weak(handle) => handle.upgrade(cx),
5714 }
5715 }
5716}
5717
5718impl OpenBuffer {
5719 pub fn upgrade(&self, cx: &impl UpgradeModelHandle) -> Option<ModelHandle<Buffer>> {
5720 match self {
5721 OpenBuffer::Strong(handle) => Some(handle.clone()),
5722 OpenBuffer::Weak(handle) => handle.upgrade(cx),
5723 OpenBuffer::Loading(_) => None,
5724 }
5725 }
5726}
5727
5728struct CandidateSet {
5729 snapshot: Snapshot,
5730 include_ignored: bool,
5731 include_root_name: bool,
5732}
5733
5734impl<'a> PathMatchCandidateSet<'a> for CandidateSet {
5735 type Candidates = CandidateSetIter<'a>;
5736
5737 fn id(&self) -> usize {
5738 self.snapshot.id().to_usize()
5739 }
5740
5741 fn len(&self) -> usize {
5742 if self.include_ignored {
5743 self.snapshot.file_count()
5744 } else {
5745 self.snapshot.visible_file_count()
5746 }
5747 }
5748
5749 fn prefix(&self) -> Arc<str> {
5750 if self.snapshot.root_entry().map_or(false, |e| e.is_file()) {
5751 self.snapshot.root_name().into()
5752 } else if self.include_root_name {
5753 format!("{}/", self.snapshot.root_name()).into()
5754 } else {
5755 "".into()
5756 }
5757 }
5758
5759 fn candidates(&'a self, start: usize) -> Self::Candidates {
5760 CandidateSetIter {
5761 traversal: self.snapshot.files(self.include_ignored, start),
5762 }
5763 }
5764}
5765
5766struct CandidateSetIter<'a> {
5767 traversal: Traversal<'a>,
5768}
5769
5770impl<'a> Iterator for CandidateSetIter<'a> {
5771 type Item = PathMatchCandidate<'a>;
5772
5773 fn next(&mut self) -> Option<Self::Item> {
5774 self.traversal.next().map(|entry| {
5775 if let EntryKind::File(char_bag) = entry.kind {
5776 PathMatchCandidate {
5777 path: &entry.path,
5778 char_bag,
5779 }
5780 } else {
5781 unreachable!()
5782 }
5783 })
5784 }
5785}
5786
5787impl Entity for ProjectStore {
5788 type Event = ();
5789}
5790
5791impl Entity for Project {
5792 type Event = Event;
5793
5794 fn release(&mut self, cx: &mut gpui::MutableAppContext) {
5795 self.project_store.update(cx, ProjectStore::prune_projects);
5796
5797 match &self.client_state {
5798 ProjectClientState::Local { remote_id_rx, .. } => {
5799 if let Some(project_id) = *remote_id_rx.borrow() {
5800 self.client
5801 .send(proto::UnregisterProject { project_id })
5802 .log_err();
5803 }
5804 }
5805 ProjectClientState::Remote { remote_id, .. } => {
5806 self.client
5807 .send(proto::LeaveProject {
5808 project_id: *remote_id,
5809 })
5810 .log_err();
5811 }
5812 }
5813 }
5814
5815 fn app_will_quit(
5816 &mut self,
5817 _: &mut MutableAppContext,
5818 ) -> Option<std::pin::Pin<Box<dyn 'static + Future<Output = ()>>>> {
5819 let shutdown_futures = self
5820 .language_servers
5821 .drain()
5822 .map(|(_, server_state)| async {
5823 match server_state {
5824 LanguageServerState::Running { server, .. } => server.shutdown()?.await,
5825 LanguageServerState::Starting(starting_server) => {
5826 starting_server.await?.shutdown()?.await
5827 }
5828 }
5829 })
5830 .collect::<Vec<_>>();
5831
5832 Some(
5833 async move {
5834 futures::future::join_all(shutdown_futures).await;
5835 }
5836 .boxed(),
5837 )
5838 }
5839}
5840
5841impl Collaborator {
5842 fn from_proto(
5843 message: proto::Collaborator,
5844 user_store: &ModelHandle<UserStore>,
5845 cx: &mut AsyncAppContext,
5846 ) -> impl Future<Output = Result<Self>> {
5847 let user = user_store.update(cx, |user_store, cx| {
5848 user_store.fetch_user(message.user_id, cx)
5849 });
5850
5851 async move {
5852 Ok(Self {
5853 peer_id: PeerId(message.peer_id),
5854 user: user.await?,
5855 replica_id: message.replica_id as ReplicaId,
5856 })
5857 }
5858 }
5859}
5860
5861impl<P: AsRef<Path>> From<(WorktreeId, P)> for ProjectPath {
5862 fn from((worktree_id, path): (WorktreeId, P)) -> Self {
5863 Self {
5864 worktree_id,
5865 path: path.as_ref().into(),
5866 }
5867 }
5868}
5869
5870impl From<lsp::CreateFileOptions> for fs::CreateOptions {
5871 fn from(options: lsp::CreateFileOptions) -> Self {
5872 Self {
5873 overwrite: options.overwrite.unwrap_or(false),
5874 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
5875 }
5876 }
5877}
5878
5879impl From<lsp::RenameFileOptions> for fs::RenameOptions {
5880 fn from(options: lsp::RenameFileOptions) -> Self {
5881 Self {
5882 overwrite: options.overwrite.unwrap_or(false),
5883 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
5884 }
5885 }
5886}
5887
5888impl From<lsp::DeleteFileOptions> for fs::RemoveOptions {
5889 fn from(options: lsp::DeleteFileOptions) -> Self {
5890 Self {
5891 recursive: options.recursive.unwrap_or(false),
5892 ignore_if_not_exists: options.ignore_if_not_exists.unwrap_or(false),
5893 }
5894 }
5895}
5896
5897fn serialize_symbol(symbol: &Symbol) -> proto::Symbol {
5898 proto::Symbol {
5899 source_worktree_id: symbol.source_worktree_id.to_proto(),
5900 worktree_id: symbol.worktree_id.to_proto(),
5901 language_server_name: symbol.language_server_name.0.to_string(),
5902 name: symbol.name.clone(),
5903 kind: unsafe { mem::transmute(symbol.kind) },
5904 path: symbol.path.to_string_lossy().to_string(),
5905 start: Some(proto::Point {
5906 row: symbol.range.start.row,
5907 column: symbol.range.start.column,
5908 }),
5909 end: Some(proto::Point {
5910 row: symbol.range.end.row,
5911 column: symbol.range.end.column,
5912 }),
5913 signature: symbol.signature.to_vec(),
5914 }
5915}
5916
5917fn relativize_path(base: &Path, path: &Path) -> PathBuf {
5918 let mut path_components = path.components();
5919 let mut base_components = base.components();
5920 let mut components: Vec<Component> = Vec::new();
5921 loop {
5922 match (path_components.next(), base_components.next()) {
5923 (None, None) => break,
5924 (Some(a), None) => {
5925 components.push(a);
5926 components.extend(path_components.by_ref());
5927 break;
5928 }
5929 (None, _) => components.push(Component::ParentDir),
5930 (Some(a), Some(b)) if components.is_empty() && a == b => (),
5931 (Some(a), Some(b)) if b == Component::CurDir => components.push(a),
5932 (Some(a), Some(_)) => {
5933 components.push(Component::ParentDir);
5934 for _ in base_components {
5935 components.push(Component::ParentDir);
5936 }
5937 components.push(a);
5938 components.extend(path_components.by_ref());
5939 break;
5940 }
5941 }
5942 }
5943 components.iter().map(|c| c.as_os_str()).collect()
5944}
5945
5946impl Item for Buffer {
5947 fn entry_id(&self, cx: &AppContext) -> Option<ProjectEntryId> {
5948 File::from_dyn(self.file()).and_then(|file| file.project_entry_id(cx))
5949 }
5950}
5951
5952#[cfg(test)]
5953mod tests {
5954 use crate::worktree::WorktreeHandle;
5955
5956 use super::{Event, *};
5957 use fs::RealFs;
5958 use futures::{future, StreamExt};
5959 use gpui::{executor::Deterministic, test::subscribe};
5960 use language::{
5961 tree_sitter_rust, tree_sitter_typescript, Diagnostic, FakeLspAdapter, LanguageConfig,
5962 OffsetRangeExt, Point, ToPoint,
5963 };
5964 use lsp::Url;
5965 use serde_json::json;
5966 use std::{cell::RefCell, os::unix, path::PathBuf, rc::Rc, task::Poll};
5967 use unindent::Unindent as _;
5968 use util::{assert_set_eq, test::temp_tree};
5969
5970 #[gpui::test]
5971 async fn test_populate_and_search(cx: &mut gpui::TestAppContext) {
5972 let dir = temp_tree(json!({
5973 "root": {
5974 "apple": "",
5975 "banana": {
5976 "carrot": {
5977 "date": "",
5978 "endive": "",
5979 }
5980 },
5981 "fennel": {
5982 "grape": "",
5983 }
5984 }
5985 }));
5986
5987 let root_link_path = dir.path().join("root_link");
5988 unix::fs::symlink(&dir.path().join("root"), &root_link_path).unwrap();
5989 unix::fs::symlink(
5990 &dir.path().join("root/fennel"),
5991 &dir.path().join("root/finnochio"),
5992 )
5993 .unwrap();
5994
5995 let project = Project::test(Arc::new(RealFs), [root_link_path.as_ref()], cx).await;
5996
5997 project.read_with(cx, |project, cx| {
5998 let tree = project.worktrees(cx).next().unwrap().read(cx);
5999 assert_eq!(tree.file_count(), 5);
6000 assert_eq!(
6001 tree.inode_for_path("fennel/grape"),
6002 tree.inode_for_path("finnochio/grape")
6003 );
6004 });
6005
6006 let cancel_flag = Default::default();
6007 let results = project
6008 .read_with(cx, |project, cx| {
6009 project.match_paths("bna", false, false, 10, &cancel_flag, cx)
6010 })
6011 .await;
6012 assert_eq!(
6013 results
6014 .into_iter()
6015 .map(|result| result.path)
6016 .collect::<Vec<Arc<Path>>>(),
6017 vec![
6018 PathBuf::from("banana/carrot/date").into(),
6019 PathBuf::from("banana/carrot/endive").into(),
6020 ]
6021 );
6022 }
6023
6024 #[gpui::test]
6025 async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
6026 cx.foreground().forbid_parking();
6027
6028 let mut rust_language = Language::new(
6029 LanguageConfig {
6030 name: "Rust".into(),
6031 path_suffixes: vec!["rs".to_string()],
6032 ..Default::default()
6033 },
6034 Some(tree_sitter_rust::language()),
6035 );
6036 let mut json_language = Language::new(
6037 LanguageConfig {
6038 name: "JSON".into(),
6039 path_suffixes: vec!["json".to_string()],
6040 ..Default::default()
6041 },
6042 None,
6043 );
6044 let mut fake_rust_servers = rust_language.set_fake_lsp_adapter(FakeLspAdapter {
6045 name: "the-rust-language-server",
6046 capabilities: lsp::ServerCapabilities {
6047 completion_provider: Some(lsp::CompletionOptions {
6048 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
6049 ..Default::default()
6050 }),
6051 ..Default::default()
6052 },
6053 ..Default::default()
6054 });
6055 let mut fake_json_servers = json_language.set_fake_lsp_adapter(FakeLspAdapter {
6056 name: "the-json-language-server",
6057 capabilities: lsp::ServerCapabilities {
6058 completion_provider: Some(lsp::CompletionOptions {
6059 trigger_characters: Some(vec![":".to_string()]),
6060 ..Default::default()
6061 }),
6062 ..Default::default()
6063 },
6064 ..Default::default()
6065 });
6066
6067 let fs = FakeFs::new(cx.background());
6068 fs.insert_tree(
6069 "/the-root",
6070 json!({
6071 "test.rs": "const A: i32 = 1;",
6072 "test2.rs": "",
6073 "Cargo.toml": "a = 1",
6074 "package.json": "{\"a\": 1}",
6075 }),
6076 )
6077 .await;
6078
6079 let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
6080 project.update(cx, |project, _| {
6081 project.languages.add(Arc::new(rust_language));
6082 project.languages.add(Arc::new(json_language));
6083 });
6084
6085 // Open a buffer without an associated language server.
6086 let toml_buffer = project
6087 .update(cx, |project, cx| {
6088 project.open_local_buffer("/the-root/Cargo.toml", cx)
6089 })
6090 .await
6091 .unwrap();
6092
6093 // Open a buffer with an associated language server.
6094 let rust_buffer = project
6095 .update(cx, |project, cx| {
6096 project.open_local_buffer("/the-root/test.rs", cx)
6097 })
6098 .await
6099 .unwrap();
6100
6101 // A server is started up, and it is notified about Rust files.
6102 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
6103 assert_eq!(
6104 fake_rust_server
6105 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6106 .await
6107 .text_document,
6108 lsp::TextDocumentItem {
6109 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
6110 version: 0,
6111 text: "const A: i32 = 1;".to_string(),
6112 language_id: Default::default()
6113 }
6114 );
6115
6116 // The buffer is configured based on the language server's capabilities.
6117 rust_buffer.read_with(cx, |buffer, _| {
6118 assert_eq!(
6119 buffer.completion_triggers(),
6120 &[".".to_string(), "::".to_string()]
6121 );
6122 });
6123 toml_buffer.read_with(cx, |buffer, _| {
6124 assert!(buffer.completion_triggers().is_empty());
6125 });
6126
6127 // Edit a buffer. The changes are reported to the language server.
6128 rust_buffer.update(cx, |buffer, cx| buffer.edit([(16..16, "2")], cx));
6129 assert_eq!(
6130 fake_rust_server
6131 .receive_notification::<lsp::notification::DidChangeTextDocument>()
6132 .await
6133 .text_document,
6134 lsp::VersionedTextDocumentIdentifier::new(
6135 lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
6136 1
6137 )
6138 );
6139
6140 // Open a third buffer with a different associated language server.
6141 let json_buffer = project
6142 .update(cx, |project, cx| {
6143 project.open_local_buffer("/the-root/package.json", cx)
6144 })
6145 .await
6146 .unwrap();
6147
6148 // A json language server is started up and is only notified about the json buffer.
6149 let mut fake_json_server = fake_json_servers.next().await.unwrap();
6150 assert_eq!(
6151 fake_json_server
6152 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6153 .await
6154 .text_document,
6155 lsp::TextDocumentItem {
6156 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
6157 version: 0,
6158 text: "{\"a\": 1}".to_string(),
6159 language_id: Default::default()
6160 }
6161 );
6162
6163 // This buffer is configured based on the second language server's
6164 // capabilities.
6165 json_buffer.read_with(cx, |buffer, _| {
6166 assert_eq!(buffer.completion_triggers(), &[":".to_string()]);
6167 });
6168
6169 // When opening another buffer whose language server is already running,
6170 // it is also configured based on the existing language server's capabilities.
6171 let rust_buffer2 = project
6172 .update(cx, |project, cx| {
6173 project.open_local_buffer("/the-root/test2.rs", cx)
6174 })
6175 .await
6176 .unwrap();
6177 rust_buffer2.read_with(cx, |buffer, _| {
6178 assert_eq!(
6179 buffer.completion_triggers(),
6180 &[".".to_string(), "::".to_string()]
6181 );
6182 });
6183
6184 // Changes are reported only to servers matching the buffer's language.
6185 toml_buffer.update(cx, |buffer, cx| buffer.edit([(5..5, "23")], cx));
6186 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "let x = 1;")], cx));
6187 assert_eq!(
6188 fake_rust_server
6189 .receive_notification::<lsp::notification::DidChangeTextDocument>()
6190 .await
6191 .text_document,
6192 lsp::VersionedTextDocumentIdentifier::new(
6193 lsp::Url::from_file_path("/the-root/test2.rs").unwrap(),
6194 1
6195 )
6196 );
6197
6198 // Save notifications are reported to all servers.
6199 toml_buffer
6200 .update(cx, |buffer, cx| buffer.save(cx))
6201 .await
6202 .unwrap();
6203 assert_eq!(
6204 fake_rust_server
6205 .receive_notification::<lsp::notification::DidSaveTextDocument>()
6206 .await
6207 .text_document,
6208 lsp::TextDocumentIdentifier::new(
6209 lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap()
6210 )
6211 );
6212 assert_eq!(
6213 fake_json_server
6214 .receive_notification::<lsp::notification::DidSaveTextDocument>()
6215 .await
6216 .text_document,
6217 lsp::TextDocumentIdentifier::new(
6218 lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap()
6219 )
6220 );
6221
6222 // Renames are reported only to servers matching the buffer's language.
6223 fs.rename(
6224 Path::new("/the-root/test2.rs"),
6225 Path::new("/the-root/test3.rs"),
6226 Default::default(),
6227 )
6228 .await
6229 .unwrap();
6230 assert_eq!(
6231 fake_rust_server
6232 .receive_notification::<lsp::notification::DidCloseTextDocument>()
6233 .await
6234 .text_document,
6235 lsp::TextDocumentIdentifier::new(
6236 lsp::Url::from_file_path("/the-root/test2.rs").unwrap()
6237 ),
6238 );
6239 assert_eq!(
6240 fake_rust_server
6241 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6242 .await
6243 .text_document,
6244 lsp::TextDocumentItem {
6245 uri: lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),
6246 version: 0,
6247 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
6248 language_id: Default::default()
6249 },
6250 );
6251
6252 rust_buffer2.update(cx, |buffer, cx| {
6253 buffer.update_diagnostics(
6254 DiagnosticSet::from_sorted_entries(
6255 vec![DiagnosticEntry {
6256 diagnostic: Default::default(),
6257 range: Anchor::MIN..Anchor::MAX,
6258 }],
6259 &buffer.snapshot(),
6260 ),
6261 cx,
6262 );
6263 assert_eq!(
6264 buffer
6265 .snapshot()
6266 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
6267 .count(),
6268 1
6269 );
6270 });
6271
6272 // When the rename changes the extension of the file, the buffer gets closed on the old
6273 // language server and gets opened on the new one.
6274 fs.rename(
6275 Path::new("/the-root/test3.rs"),
6276 Path::new("/the-root/test3.json"),
6277 Default::default(),
6278 )
6279 .await
6280 .unwrap();
6281 assert_eq!(
6282 fake_rust_server
6283 .receive_notification::<lsp::notification::DidCloseTextDocument>()
6284 .await
6285 .text_document,
6286 lsp::TextDocumentIdentifier::new(
6287 lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),
6288 ),
6289 );
6290 assert_eq!(
6291 fake_json_server
6292 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6293 .await
6294 .text_document,
6295 lsp::TextDocumentItem {
6296 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
6297 version: 0,
6298 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
6299 language_id: Default::default()
6300 },
6301 );
6302
6303 // We clear the diagnostics, since the language has changed.
6304 rust_buffer2.read_with(cx, |buffer, _| {
6305 assert_eq!(
6306 buffer
6307 .snapshot()
6308 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
6309 .count(),
6310 0
6311 );
6312 });
6313
6314 // The renamed file's version resets after changing language server.
6315 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "// ")], cx));
6316 assert_eq!(
6317 fake_json_server
6318 .receive_notification::<lsp::notification::DidChangeTextDocument>()
6319 .await
6320 .text_document,
6321 lsp::VersionedTextDocumentIdentifier::new(
6322 lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
6323 1
6324 )
6325 );
6326
6327 // Restart language servers
6328 project.update(cx, |project, cx| {
6329 project.restart_language_servers_for_buffers(
6330 vec![rust_buffer.clone(), json_buffer.clone()],
6331 cx,
6332 );
6333 });
6334
6335 let mut rust_shutdown_requests = fake_rust_server
6336 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
6337 let mut json_shutdown_requests = fake_json_server
6338 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
6339 futures::join!(rust_shutdown_requests.next(), json_shutdown_requests.next());
6340
6341 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
6342 let mut fake_json_server = fake_json_servers.next().await.unwrap();
6343
6344 // Ensure rust document is reopened in new rust language server
6345 assert_eq!(
6346 fake_rust_server
6347 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6348 .await
6349 .text_document,
6350 lsp::TextDocumentItem {
6351 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
6352 version: 1,
6353 text: rust_buffer.read_with(cx, |buffer, _| buffer.text()),
6354 language_id: Default::default()
6355 }
6356 );
6357
6358 // Ensure json documents are reopened in new json language server
6359 assert_set_eq!(
6360 [
6361 fake_json_server
6362 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6363 .await
6364 .text_document,
6365 fake_json_server
6366 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6367 .await
6368 .text_document,
6369 ],
6370 [
6371 lsp::TextDocumentItem {
6372 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
6373 version: 0,
6374 text: json_buffer.read_with(cx, |buffer, _| buffer.text()),
6375 language_id: Default::default()
6376 },
6377 lsp::TextDocumentItem {
6378 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
6379 version: 1,
6380 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
6381 language_id: Default::default()
6382 }
6383 ]
6384 );
6385
6386 // Close notifications are reported only to servers matching the buffer's language.
6387 cx.update(|_| drop(json_buffer));
6388 let close_message = lsp::DidCloseTextDocumentParams {
6389 text_document: lsp::TextDocumentIdentifier::new(
6390 lsp::Url::from_file_path("/the-root/package.json").unwrap(),
6391 ),
6392 };
6393 assert_eq!(
6394 fake_json_server
6395 .receive_notification::<lsp::notification::DidCloseTextDocument>()
6396 .await,
6397 close_message,
6398 );
6399 }
6400
6401 #[gpui::test]
6402 async fn test_single_file_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
6403 cx.foreground().forbid_parking();
6404
6405 let fs = FakeFs::new(cx.background());
6406 fs.insert_tree(
6407 "/dir",
6408 json!({
6409 "a.rs": "let a = 1;",
6410 "b.rs": "let b = 2;"
6411 }),
6412 )
6413 .await;
6414
6415 let project = Project::test(fs, ["/dir/a.rs".as_ref(), "/dir/b.rs".as_ref()], cx).await;
6416
6417 let buffer_a = project
6418 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
6419 .await
6420 .unwrap();
6421 let buffer_b = project
6422 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
6423 .await
6424 .unwrap();
6425
6426 project.update(cx, |project, cx| {
6427 project
6428 .update_diagnostics(
6429 0,
6430 lsp::PublishDiagnosticsParams {
6431 uri: Url::from_file_path("/dir/a.rs").unwrap(),
6432 version: None,
6433 diagnostics: vec![lsp::Diagnostic {
6434 range: lsp::Range::new(
6435 lsp::Position::new(0, 4),
6436 lsp::Position::new(0, 5),
6437 ),
6438 severity: Some(lsp::DiagnosticSeverity::ERROR),
6439 message: "error 1".to_string(),
6440 ..Default::default()
6441 }],
6442 },
6443 &[],
6444 cx,
6445 )
6446 .unwrap();
6447 project
6448 .update_diagnostics(
6449 0,
6450 lsp::PublishDiagnosticsParams {
6451 uri: Url::from_file_path("/dir/b.rs").unwrap(),
6452 version: None,
6453 diagnostics: vec![lsp::Diagnostic {
6454 range: lsp::Range::new(
6455 lsp::Position::new(0, 4),
6456 lsp::Position::new(0, 5),
6457 ),
6458 severity: Some(lsp::DiagnosticSeverity::WARNING),
6459 message: "error 2".to_string(),
6460 ..Default::default()
6461 }],
6462 },
6463 &[],
6464 cx,
6465 )
6466 .unwrap();
6467 });
6468
6469 buffer_a.read_with(cx, |buffer, _| {
6470 let chunks = chunks_with_diagnostics(&buffer, 0..buffer.len());
6471 assert_eq!(
6472 chunks
6473 .iter()
6474 .map(|(s, d)| (s.as_str(), *d))
6475 .collect::<Vec<_>>(),
6476 &[
6477 ("let ", None),
6478 ("a", Some(DiagnosticSeverity::ERROR)),
6479 (" = 1;", None),
6480 ]
6481 );
6482 });
6483 buffer_b.read_with(cx, |buffer, _| {
6484 let chunks = chunks_with_diagnostics(&buffer, 0..buffer.len());
6485 assert_eq!(
6486 chunks
6487 .iter()
6488 .map(|(s, d)| (s.as_str(), *d))
6489 .collect::<Vec<_>>(),
6490 &[
6491 ("let ", None),
6492 ("b", Some(DiagnosticSeverity::WARNING)),
6493 (" = 2;", None),
6494 ]
6495 );
6496 });
6497 }
6498
6499 #[gpui::test]
6500 async fn test_hidden_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
6501 cx.foreground().forbid_parking();
6502
6503 let fs = FakeFs::new(cx.background());
6504 fs.insert_tree(
6505 "/root",
6506 json!({
6507 "dir": {
6508 "a.rs": "let a = 1;",
6509 },
6510 "other.rs": "let b = c;"
6511 }),
6512 )
6513 .await;
6514
6515 let project = Project::test(fs, ["/root/dir".as_ref()], cx).await;
6516
6517 let (worktree, _) = project
6518 .update(cx, |project, cx| {
6519 project.find_or_create_local_worktree("/root/other.rs", false, cx)
6520 })
6521 .await
6522 .unwrap();
6523 let worktree_id = worktree.read_with(cx, |tree, _| tree.id());
6524
6525 project.update(cx, |project, cx| {
6526 project
6527 .update_diagnostics(
6528 0,
6529 lsp::PublishDiagnosticsParams {
6530 uri: Url::from_file_path("/root/other.rs").unwrap(),
6531 version: None,
6532 diagnostics: vec![lsp::Diagnostic {
6533 range: lsp::Range::new(
6534 lsp::Position::new(0, 8),
6535 lsp::Position::new(0, 9),
6536 ),
6537 severity: Some(lsp::DiagnosticSeverity::ERROR),
6538 message: "unknown variable 'c'".to_string(),
6539 ..Default::default()
6540 }],
6541 },
6542 &[],
6543 cx,
6544 )
6545 .unwrap();
6546 });
6547
6548 let buffer = project
6549 .update(cx, |project, cx| project.open_buffer((worktree_id, ""), cx))
6550 .await
6551 .unwrap();
6552 buffer.read_with(cx, |buffer, _| {
6553 let chunks = chunks_with_diagnostics(&buffer, 0..buffer.len());
6554 assert_eq!(
6555 chunks
6556 .iter()
6557 .map(|(s, d)| (s.as_str(), *d))
6558 .collect::<Vec<_>>(),
6559 &[
6560 ("let b = ", None),
6561 ("c", Some(DiagnosticSeverity::ERROR)),
6562 (";", None),
6563 ]
6564 );
6565 });
6566
6567 project.read_with(cx, |project, cx| {
6568 assert_eq!(project.diagnostic_summaries(cx).next(), None);
6569 assert_eq!(project.diagnostic_summary(cx).error_count, 0);
6570 });
6571 }
6572
6573 #[gpui::test]
6574 async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
6575 cx.foreground().forbid_parking();
6576
6577 let progress_token = "the-progress-token";
6578 let mut language = Language::new(
6579 LanguageConfig {
6580 name: "Rust".into(),
6581 path_suffixes: vec!["rs".to_string()],
6582 ..Default::default()
6583 },
6584 Some(tree_sitter_rust::language()),
6585 );
6586 let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
6587 disk_based_diagnostics_progress_token: Some(progress_token),
6588 disk_based_diagnostics_sources: &["disk"],
6589 ..Default::default()
6590 });
6591
6592 let fs = FakeFs::new(cx.background());
6593 fs.insert_tree(
6594 "/dir",
6595 json!({
6596 "a.rs": "fn a() { A }",
6597 "b.rs": "const y: i32 = 1",
6598 }),
6599 )
6600 .await;
6601
6602 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
6603 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
6604 let worktree_id =
6605 project.read_with(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
6606
6607 // Cause worktree to start the fake language server
6608 let _buffer = project
6609 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
6610 .await
6611 .unwrap();
6612
6613 let mut events = subscribe(&project, cx);
6614
6615 let fake_server = fake_servers.next().await.unwrap();
6616 fake_server.start_progress(progress_token).await;
6617 assert_eq!(
6618 events.next().await.unwrap(),
6619 Event::DiskBasedDiagnosticsStarted {
6620 language_server_id: 0,
6621 }
6622 );
6623
6624 fake_server.notify::<lsp::notification::PublishDiagnostics>(
6625 lsp::PublishDiagnosticsParams {
6626 uri: Url::from_file_path("/dir/a.rs").unwrap(),
6627 version: None,
6628 diagnostics: vec![lsp::Diagnostic {
6629 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
6630 severity: Some(lsp::DiagnosticSeverity::ERROR),
6631 message: "undefined variable 'A'".to_string(),
6632 ..Default::default()
6633 }],
6634 },
6635 );
6636 assert_eq!(
6637 events.next().await.unwrap(),
6638 Event::DiagnosticsUpdated {
6639 language_server_id: 0,
6640 path: (worktree_id, Path::new("a.rs")).into()
6641 }
6642 );
6643
6644 fake_server.end_progress(progress_token);
6645 assert_eq!(
6646 events.next().await.unwrap(),
6647 Event::DiskBasedDiagnosticsFinished {
6648 language_server_id: 0
6649 }
6650 );
6651
6652 let buffer = project
6653 .update(cx, |p, cx| p.open_local_buffer("/dir/a.rs", cx))
6654 .await
6655 .unwrap();
6656
6657 buffer.read_with(cx, |buffer, _| {
6658 let snapshot = buffer.snapshot();
6659 let diagnostics = snapshot
6660 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
6661 .collect::<Vec<_>>();
6662 assert_eq!(
6663 diagnostics,
6664 &[DiagnosticEntry {
6665 range: Point::new(0, 9)..Point::new(0, 10),
6666 diagnostic: Diagnostic {
6667 severity: lsp::DiagnosticSeverity::ERROR,
6668 message: "undefined variable 'A'".to_string(),
6669 group_id: 0,
6670 is_primary: true,
6671 ..Default::default()
6672 }
6673 }]
6674 )
6675 });
6676
6677 // Ensure publishing empty diagnostics twice only results in one update event.
6678 fake_server.notify::<lsp::notification::PublishDiagnostics>(
6679 lsp::PublishDiagnosticsParams {
6680 uri: Url::from_file_path("/dir/a.rs").unwrap(),
6681 version: None,
6682 diagnostics: Default::default(),
6683 },
6684 );
6685 assert_eq!(
6686 events.next().await.unwrap(),
6687 Event::DiagnosticsUpdated {
6688 language_server_id: 0,
6689 path: (worktree_id, Path::new("a.rs")).into()
6690 }
6691 );
6692
6693 fake_server.notify::<lsp::notification::PublishDiagnostics>(
6694 lsp::PublishDiagnosticsParams {
6695 uri: Url::from_file_path("/dir/a.rs").unwrap(),
6696 version: None,
6697 diagnostics: Default::default(),
6698 },
6699 );
6700 cx.foreground().run_until_parked();
6701 assert_eq!(futures::poll!(events.next()), Poll::Pending);
6702 }
6703
6704 #[gpui::test]
6705 async fn test_restarting_server_with_diagnostics_running(cx: &mut gpui::TestAppContext) {
6706 cx.foreground().forbid_parking();
6707
6708 let progress_token = "the-progress-token";
6709 let mut language = Language::new(
6710 LanguageConfig {
6711 path_suffixes: vec!["rs".to_string()],
6712 ..Default::default()
6713 },
6714 None,
6715 );
6716 let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
6717 disk_based_diagnostics_sources: &["disk"],
6718 disk_based_diagnostics_progress_token: Some(progress_token),
6719 ..Default::default()
6720 });
6721
6722 let fs = FakeFs::new(cx.background());
6723 fs.insert_tree("/dir", json!({ "a.rs": "" })).await;
6724
6725 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
6726 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
6727
6728 let buffer = project
6729 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
6730 .await
6731 .unwrap();
6732
6733 // Simulate diagnostics starting to update.
6734 let fake_server = fake_servers.next().await.unwrap();
6735 fake_server.start_progress(progress_token).await;
6736
6737 // Restart the server before the diagnostics finish updating.
6738 project.update(cx, |project, cx| {
6739 project.restart_language_servers_for_buffers([buffer], cx);
6740 });
6741 let mut events = subscribe(&project, cx);
6742
6743 // Simulate the newly started server sending more diagnostics.
6744 let fake_server = fake_servers.next().await.unwrap();
6745 fake_server.start_progress(progress_token).await;
6746 assert_eq!(
6747 events.next().await.unwrap(),
6748 Event::DiskBasedDiagnosticsStarted {
6749 language_server_id: 1
6750 }
6751 );
6752 project.read_with(cx, |project, _| {
6753 assert_eq!(
6754 project
6755 .language_servers_running_disk_based_diagnostics()
6756 .collect::<Vec<_>>(),
6757 [1]
6758 );
6759 });
6760
6761 // All diagnostics are considered done, despite the old server's diagnostic
6762 // task never completing.
6763 fake_server.end_progress(progress_token);
6764 assert_eq!(
6765 events.next().await.unwrap(),
6766 Event::DiskBasedDiagnosticsFinished {
6767 language_server_id: 1
6768 }
6769 );
6770 project.read_with(cx, |project, _| {
6771 assert_eq!(
6772 project
6773 .language_servers_running_disk_based_diagnostics()
6774 .collect::<Vec<_>>(),
6775 [0; 0]
6776 );
6777 });
6778 }
6779
6780 #[gpui::test]
6781 async fn test_toggling_enable_language_server(
6782 deterministic: Arc<Deterministic>,
6783 cx: &mut gpui::TestAppContext,
6784 ) {
6785 deterministic.forbid_parking();
6786
6787 let mut rust = Language::new(
6788 LanguageConfig {
6789 name: Arc::from("Rust"),
6790 path_suffixes: vec!["rs".to_string()],
6791 ..Default::default()
6792 },
6793 None,
6794 );
6795 let mut fake_rust_servers = rust.set_fake_lsp_adapter(FakeLspAdapter {
6796 name: "rust-lsp",
6797 ..Default::default()
6798 });
6799 let mut js = Language::new(
6800 LanguageConfig {
6801 name: Arc::from("JavaScript"),
6802 path_suffixes: vec!["js".to_string()],
6803 ..Default::default()
6804 },
6805 None,
6806 );
6807 let mut fake_js_servers = js.set_fake_lsp_adapter(FakeLspAdapter {
6808 name: "js-lsp",
6809 ..Default::default()
6810 });
6811
6812 let fs = FakeFs::new(cx.background());
6813 fs.insert_tree("/dir", json!({ "a.rs": "", "b.js": "" }))
6814 .await;
6815
6816 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
6817 project.update(cx, |project, _| {
6818 project.languages.add(Arc::new(rust));
6819 project.languages.add(Arc::new(js));
6820 });
6821
6822 let _rs_buffer = project
6823 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
6824 .await
6825 .unwrap();
6826 let _js_buffer = project
6827 .update(cx, |project, cx| project.open_local_buffer("/dir/b.js", cx))
6828 .await
6829 .unwrap();
6830
6831 let mut fake_rust_server_1 = fake_rust_servers.next().await.unwrap();
6832 assert_eq!(
6833 fake_rust_server_1
6834 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6835 .await
6836 .text_document
6837 .uri
6838 .as_str(),
6839 "file:///dir/a.rs"
6840 );
6841
6842 let mut fake_js_server = fake_js_servers.next().await.unwrap();
6843 assert_eq!(
6844 fake_js_server
6845 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6846 .await
6847 .text_document
6848 .uri
6849 .as_str(),
6850 "file:///dir/b.js"
6851 );
6852
6853 // Disable Rust language server, ensuring only that server gets stopped.
6854 cx.update(|cx| {
6855 cx.update_global(|settings: &mut Settings, _| {
6856 settings.language_overrides.insert(
6857 Arc::from("Rust"),
6858 settings::LanguageSettings {
6859 enable_language_server: Some(false),
6860 ..Default::default()
6861 },
6862 );
6863 })
6864 });
6865 fake_rust_server_1
6866 .receive_notification::<lsp::notification::Exit>()
6867 .await;
6868
6869 // Enable Rust and disable JavaScript language servers, ensuring that the
6870 // former gets started again and that the latter stops.
6871 cx.update(|cx| {
6872 cx.update_global(|settings: &mut Settings, _| {
6873 settings.language_overrides.insert(
6874 Arc::from("Rust"),
6875 settings::LanguageSettings {
6876 enable_language_server: Some(true),
6877 ..Default::default()
6878 },
6879 );
6880 settings.language_overrides.insert(
6881 Arc::from("JavaScript"),
6882 settings::LanguageSettings {
6883 enable_language_server: Some(false),
6884 ..Default::default()
6885 },
6886 );
6887 })
6888 });
6889 let mut fake_rust_server_2 = fake_rust_servers.next().await.unwrap();
6890 assert_eq!(
6891 fake_rust_server_2
6892 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6893 .await
6894 .text_document
6895 .uri
6896 .as_str(),
6897 "file:///dir/a.rs"
6898 );
6899 fake_js_server
6900 .receive_notification::<lsp::notification::Exit>()
6901 .await;
6902 }
6903
6904 #[gpui::test]
6905 async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
6906 cx.foreground().forbid_parking();
6907
6908 let mut language = Language::new(
6909 LanguageConfig {
6910 name: "Rust".into(),
6911 path_suffixes: vec!["rs".to_string()],
6912 ..Default::default()
6913 },
6914 Some(tree_sitter_rust::language()),
6915 );
6916 let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
6917 disk_based_diagnostics_sources: &["disk"],
6918 ..Default::default()
6919 });
6920
6921 let text = "
6922 fn a() { A }
6923 fn b() { BB }
6924 fn c() { CCC }
6925 "
6926 .unindent();
6927
6928 let fs = FakeFs::new(cx.background());
6929 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
6930
6931 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
6932 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
6933
6934 let buffer = project
6935 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
6936 .await
6937 .unwrap();
6938
6939 let mut fake_server = fake_servers.next().await.unwrap();
6940 let open_notification = fake_server
6941 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6942 .await;
6943
6944 // Edit the buffer, moving the content down
6945 buffer.update(cx, |buffer, cx| buffer.edit([(0..0, "\n\n")], cx));
6946 let change_notification_1 = fake_server
6947 .receive_notification::<lsp::notification::DidChangeTextDocument>()
6948 .await;
6949 assert!(
6950 change_notification_1.text_document.version > open_notification.text_document.version
6951 );
6952
6953 // Report some diagnostics for the initial version of the buffer
6954 fake_server.notify::<lsp::notification::PublishDiagnostics>(
6955 lsp::PublishDiagnosticsParams {
6956 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
6957 version: Some(open_notification.text_document.version),
6958 diagnostics: vec![
6959 lsp::Diagnostic {
6960 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
6961 severity: Some(DiagnosticSeverity::ERROR),
6962 message: "undefined variable 'A'".to_string(),
6963 source: Some("disk".to_string()),
6964 ..Default::default()
6965 },
6966 lsp::Diagnostic {
6967 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
6968 severity: Some(DiagnosticSeverity::ERROR),
6969 message: "undefined variable 'BB'".to_string(),
6970 source: Some("disk".to_string()),
6971 ..Default::default()
6972 },
6973 lsp::Diagnostic {
6974 range: lsp::Range::new(lsp::Position::new(2, 9), lsp::Position::new(2, 12)),
6975 severity: Some(DiagnosticSeverity::ERROR),
6976 source: Some("disk".to_string()),
6977 message: "undefined variable 'CCC'".to_string(),
6978 ..Default::default()
6979 },
6980 ],
6981 },
6982 );
6983
6984 // The diagnostics have moved down since they were created.
6985 buffer.next_notification(cx).await;
6986 buffer.read_with(cx, |buffer, _| {
6987 assert_eq!(
6988 buffer
6989 .snapshot()
6990 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0), false)
6991 .collect::<Vec<_>>(),
6992 &[
6993 DiagnosticEntry {
6994 range: Point::new(3, 9)..Point::new(3, 11),
6995 diagnostic: Diagnostic {
6996 severity: DiagnosticSeverity::ERROR,
6997 message: "undefined variable 'BB'".to_string(),
6998 is_disk_based: true,
6999 group_id: 1,
7000 is_primary: true,
7001 ..Default::default()
7002 },
7003 },
7004 DiagnosticEntry {
7005 range: Point::new(4, 9)..Point::new(4, 12),
7006 diagnostic: Diagnostic {
7007 severity: DiagnosticSeverity::ERROR,
7008 message: "undefined variable 'CCC'".to_string(),
7009 is_disk_based: true,
7010 group_id: 2,
7011 is_primary: true,
7012 ..Default::default()
7013 }
7014 }
7015 ]
7016 );
7017 assert_eq!(
7018 chunks_with_diagnostics(buffer, 0..buffer.len()),
7019 [
7020 ("\n\nfn a() { ".to_string(), None),
7021 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
7022 (" }\nfn b() { ".to_string(), None),
7023 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
7024 (" }\nfn c() { ".to_string(), None),
7025 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
7026 (" }\n".to_string(), None),
7027 ]
7028 );
7029 assert_eq!(
7030 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
7031 [
7032 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
7033 (" }\nfn c() { ".to_string(), None),
7034 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
7035 ]
7036 );
7037 });
7038
7039 // Ensure overlapping diagnostics are highlighted correctly.
7040 fake_server.notify::<lsp::notification::PublishDiagnostics>(
7041 lsp::PublishDiagnosticsParams {
7042 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
7043 version: Some(open_notification.text_document.version),
7044 diagnostics: vec![
7045 lsp::Diagnostic {
7046 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
7047 severity: Some(DiagnosticSeverity::ERROR),
7048 message: "undefined variable 'A'".to_string(),
7049 source: Some("disk".to_string()),
7050 ..Default::default()
7051 },
7052 lsp::Diagnostic {
7053 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 12)),
7054 severity: Some(DiagnosticSeverity::WARNING),
7055 message: "unreachable statement".to_string(),
7056 source: Some("disk".to_string()),
7057 ..Default::default()
7058 },
7059 ],
7060 },
7061 );
7062
7063 buffer.next_notification(cx).await;
7064 buffer.read_with(cx, |buffer, _| {
7065 assert_eq!(
7066 buffer
7067 .snapshot()
7068 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0), false)
7069 .collect::<Vec<_>>(),
7070 &[
7071 DiagnosticEntry {
7072 range: Point::new(2, 9)..Point::new(2, 12),
7073 diagnostic: Diagnostic {
7074 severity: DiagnosticSeverity::WARNING,
7075 message: "unreachable statement".to_string(),
7076 is_disk_based: true,
7077 group_id: 4,
7078 is_primary: true,
7079 ..Default::default()
7080 }
7081 },
7082 DiagnosticEntry {
7083 range: Point::new(2, 9)..Point::new(2, 10),
7084 diagnostic: Diagnostic {
7085 severity: DiagnosticSeverity::ERROR,
7086 message: "undefined variable 'A'".to_string(),
7087 is_disk_based: true,
7088 group_id: 3,
7089 is_primary: true,
7090 ..Default::default()
7091 },
7092 }
7093 ]
7094 );
7095 assert_eq!(
7096 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
7097 [
7098 ("fn a() { ".to_string(), None),
7099 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
7100 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
7101 ("\n".to_string(), None),
7102 ]
7103 );
7104 assert_eq!(
7105 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
7106 [
7107 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
7108 ("\n".to_string(), None),
7109 ]
7110 );
7111 });
7112
7113 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
7114 // changes since the last save.
7115 buffer.update(cx, |buffer, cx| {
7116 buffer.edit([(Point::new(2, 0)..Point::new(2, 0), " ")], cx);
7117 buffer.edit([(Point::new(2, 8)..Point::new(2, 10), "(x: usize)")], cx);
7118 buffer.edit([(Point::new(3, 10)..Point::new(3, 10), "xxx")], cx);
7119 });
7120 let change_notification_2 = fake_server
7121 .receive_notification::<lsp::notification::DidChangeTextDocument>()
7122 .await;
7123 assert!(
7124 change_notification_2.text_document.version
7125 > change_notification_1.text_document.version
7126 );
7127
7128 // Handle out-of-order diagnostics
7129 fake_server.notify::<lsp::notification::PublishDiagnostics>(
7130 lsp::PublishDiagnosticsParams {
7131 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
7132 version: Some(change_notification_2.text_document.version),
7133 diagnostics: vec![
7134 lsp::Diagnostic {
7135 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
7136 severity: Some(DiagnosticSeverity::ERROR),
7137 message: "undefined variable 'BB'".to_string(),
7138 source: Some("disk".to_string()),
7139 ..Default::default()
7140 },
7141 lsp::Diagnostic {
7142 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
7143 severity: Some(DiagnosticSeverity::WARNING),
7144 message: "undefined variable 'A'".to_string(),
7145 source: Some("disk".to_string()),
7146 ..Default::default()
7147 },
7148 ],
7149 },
7150 );
7151
7152 buffer.next_notification(cx).await;
7153 buffer.read_with(cx, |buffer, _| {
7154 assert_eq!(
7155 buffer
7156 .snapshot()
7157 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
7158 .collect::<Vec<_>>(),
7159 &[
7160 DiagnosticEntry {
7161 range: Point::new(2, 21)..Point::new(2, 22),
7162 diagnostic: Diagnostic {
7163 severity: DiagnosticSeverity::WARNING,
7164 message: "undefined variable 'A'".to_string(),
7165 is_disk_based: true,
7166 group_id: 6,
7167 is_primary: true,
7168 ..Default::default()
7169 }
7170 },
7171 DiagnosticEntry {
7172 range: Point::new(3, 9)..Point::new(3, 14),
7173 diagnostic: Diagnostic {
7174 severity: DiagnosticSeverity::ERROR,
7175 message: "undefined variable 'BB'".to_string(),
7176 is_disk_based: true,
7177 group_id: 5,
7178 is_primary: true,
7179 ..Default::default()
7180 },
7181 }
7182 ]
7183 );
7184 });
7185 }
7186
7187 #[gpui::test]
7188 async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
7189 cx.foreground().forbid_parking();
7190
7191 let text = concat!(
7192 "let one = ;\n", //
7193 "let two = \n",
7194 "let three = 3;\n",
7195 );
7196
7197 let fs = FakeFs::new(cx.background());
7198 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
7199
7200 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
7201 let buffer = project
7202 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
7203 .await
7204 .unwrap();
7205
7206 project.update(cx, |project, cx| {
7207 project
7208 .update_buffer_diagnostics(
7209 &buffer,
7210 vec![
7211 DiagnosticEntry {
7212 range: PointUtf16::new(0, 10)..PointUtf16::new(0, 10),
7213 diagnostic: Diagnostic {
7214 severity: DiagnosticSeverity::ERROR,
7215 message: "syntax error 1".to_string(),
7216 ..Default::default()
7217 },
7218 },
7219 DiagnosticEntry {
7220 range: PointUtf16::new(1, 10)..PointUtf16::new(1, 10),
7221 diagnostic: Diagnostic {
7222 severity: DiagnosticSeverity::ERROR,
7223 message: "syntax error 2".to_string(),
7224 ..Default::default()
7225 },
7226 },
7227 ],
7228 None,
7229 cx,
7230 )
7231 .unwrap();
7232 });
7233
7234 // An empty range is extended forward to include the following character.
7235 // At the end of a line, an empty range is extended backward to include
7236 // the preceding character.
7237 buffer.read_with(cx, |buffer, _| {
7238 let chunks = chunks_with_diagnostics(&buffer, 0..buffer.len());
7239 assert_eq!(
7240 chunks
7241 .iter()
7242 .map(|(s, d)| (s.as_str(), *d))
7243 .collect::<Vec<_>>(),
7244 &[
7245 ("let one = ", None),
7246 (";", Some(DiagnosticSeverity::ERROR)),
7247 ("\nlet two =", None),
7248 (" ", Some(DiagnosticSeverity::ERROR)),
7249 ("\nlet three = 3;\n", None)
7250 ]
7251 );
7252 });
7253 }
7254
7255 #[gpui::test]
7256 async fn test_edits_from_lsp_with_past_version(cx: &mut gpui::TestAppContext) {
7257 cx.foreground().forbid_parking();
7258
7259 let mut language = Language::new(
7260 LanguageConfig {
7261 name: "Rust".into(),
7262 path_suffixes: vec!["rs".to_string()],
7263 ..Default::default()
7264 },
7265 Some(tree_sitter_rust::language()),
7266 );
7267 let mut fake_servers = language.set_fake_lsp_adapter(Default::default());
7268
7269 let text = "
7270 fn a() {
7271 f1();
7272 }
7273 fn b() {
7274 f2();
7275 }
7276 fn c() {
7277 f3();
7278 }
7279 "
7280 .unindent();
7281
7282 let fs = FakeFs::new(cx.background());
7283 fs.insert_tree(
7284 "/dir",
7285 json!({
7286 "a.rs": text.clone(),
7287 }),
7288 )
7289 .await;
7290
7291 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
7292 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
7293 let buffer = project
7294 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
7295 .await
7296 .unwrap();
7297
7298 let mut fake_server = fake_servers.next().await.unwrap();
7299 let lsp_document_version = fake_server
7300 .receive_notification::<lsp::notification::DidOpenTextDocument>()
7301 .await
7302 .text_document
7303 .version;
7304
7305 // Simulate editing the buffer after the language server computes some edits.
7306 buffer.update(cx, |buffer, cx| {
7307 buffer.edit(
7308 [(
7309 Point::new(0, 0)..Point::new(0, 0),
7310 "// above first function\n",
7311 )],
7312 cx,
7313 );
7314 buffer.edit(
7315 [(
7316 Point::new(2, 0)..Point::new(2, 0),
7317 " // inside first function\n",
7318 )],
7319 cx,
7320 );
7321 buffer.edit(
7322 [(
7323 Point::new(6, 4)..Point::new(6, 4),
7324 "// inside second function ",
7325 )],
7326 cx,
7327 );
7328
7329 assert_eq!(
7330 buffer.text(),
7331 "
7332 // above first function
7333 fn a() {
7334 // inside first function
7335 f1();
7336 }
7337 fn b() {
7338 // inside second function f2();
7339 }
7340 fn c() {
7341 f3();
7342 }
7343 "
7344 .unindent()
7345 );
7346 });
7347
7348 let edits = project
7349 .update(cx, |project, cx| {
7350 project.edits_from_lsp(
7351 &buffer,
7352 vec![
7353 // replace body of first function
7354 lsp::TextEdit {
7355 range: lsp::Range::new(
7356 lsp::Position::new(0, 0),
7357 lsp::Position::new(3, 0),
7358 ),
7359 new_text: "
7360 fn a() {
7361 f10();
7362 }
7363 "
7364 .unindent(),
7365 },
7366 // edit inside second function
7367 lsp::TextEdit {
7368 range: lsp::Range::new(
7369 lsp::Position::new(4, 6),
7370 lsp::Position::new(4, 6),
7371 ),
7372 new_text: "00".into(),
7373 },
7374 // edit inside third function via two distinct edits
7375 lsp::TextEdit {
7376 range: lsp::Range::new(
7377 lsp::Position::new(7, 5),
7378 lsp::Position::new(7, 5),
7379 ),
7380 new_text: "4000".into(),
7381 },
7382 lsp::TextEdit {
7383 range: lsp::Range::new(
7384 lsp::Position::new(7, 5),
7385 lsp::Position::new(7, 6),
7386 ),
7387 new_text: "".into(),
7388 },
7389 ],
7390 Some(lsp_document_version),
7391 cx,
7392 )
7393 })
7394 .await
7395 .unwrap();
7396
7397 buffer.update(cx, |buffer, cx| {
7398 for (range, new_text) in edits {
7399 buffer.edit([(range, new_text)], cx);
7400 }
7401 assert_eq!(
7402 buffer.text(),
7403 "
7404 // above first function
7405 fn a() {
7406 // inside first function
7407 f10();
7408 }
7409 fn b() {
7410 // inside second function f200();
7411 }
7412 fn c() {
7413 f4000();
7414 }
7415 "
7416 .unindent()
7417 );
7418 });
7419 }
7420
7421 #[gpui::test]
7422 async fn test_edits_from_lsp_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
7423 cx.foreground().forbid_parking();
7424
7425 let text = "
7426 use a::b;
7427 use a::c;
7428
7429 fn f() {
7430 b();
7431 c();
7432 }
7433 "
7434 .unindent();
7435
7436 let fs = FakeFs::new(cx.background());
7437 fs.insert_tree(
7438 "/dir",
7439 json!({
7440 "a.rs": text.clone(),
7441 }),
7442 )
7443 .await;
7444
7445 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
7446 let buffer = project
7447 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
7448 .await
7449 .unwrap();
7450
7451 // Simulate the language server sending us a small edit in the form of a very large diff.
7452 // Rust-analyzer does this when performing a merge-imports code action.
7453 let edits = project
7454 .update(cx, |project, cx| {
7455 project.edits_from_lsp(
7456 &buffer,
7457 [
7458 // Replace the first use statement without editing the semicolon.
7459 lsp::TextEdit {
7460 range: lsp::Range::new(
7461 lsp::Position::new(0, 4),
7462 lsp::Position::new(0, 8),
7463 ),
7464 new_text: "a::{b, c}".into(),
7465 },
7466 // Reinsert the remainder of the file between the semicolon and the final
7467 // newline of the file.
7468 lsp::TextEdit {
7469 range: lsp::Range::new(
7470 lsp::Position::new(0, 9),
7471 lsp::Position::new(0, 9),
7472 ),
7473 new_text: "\n\n".into(),
7474 },
7475 lsp::TextEdit {
7476 range: lsp::Range::new(
7477 lsp::Position::new(0, 9),
7478 lsp::Position::new(0, 9),
7479 ),
7480 new_text: "
7481 fn f() {
7482 b();
7483 c();
7484 }"
7485 .unindent(),
7486 },
7487 // Delete everything after the first newline of the file.
7488 lsp::TextEdit {
7489 range: lsp::Range::new(
7490 lsp::Position::new(1, 0),
7491 lsp::Position::new(7, 0),
7492 ),
7493 new_text: "".into(),
7494 },
7495 ],
7496 None,
7497 cx,
7498 )
7499 })
7500 .await
7501 .unwrap();
7502
7503 buffer.update(cx, |buffer, cx| {
7504 let edits = edits
7505 .into_iter()
7506 .map(|(range, text)| {
7507 (
7508 range.start.to_point(&buffer)..range.end.to_point(&buffer),
7509 text,
7510 )
7511 })
7512 .collect::<Vec<_>>();
7513
7514 assert_eq!(
7515 edits,
7516 [
7517 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
7518 (Point::new(1, 0)..Point::new(2, 0), "".into())
7519 ]
7520 );
7521
7522 for (range, new_text) in edits {
7523 buffer.edit([(range, new_text)], cx);
7524 }
7525 assert_eq!(
7526 buffer.text(),
7527 "
7528 use a::{b, c};
7529
7530 fn f() {
7531 b();
7532 c();
7533 }
7534 "
7535 .unindent()
7536 );
7537 });
7538 }
7539
7540 #[gpui::test]
7541 async fn test_invalid_edits_from_lsp(cx: &mut gpui::TestAppContext) {
7542 cx.foreground().forbid_parking();
7543
7544 let text = "
7545 use a::b;
7546 use a::c;
7547
7548 fn f() {
7549 b();
7550 c();
7551 }
7552 "
7553 .unindent();
7554
7555 let fs = FakeFs::new(cx.background());
7556 fs.insert_tree(
7557 "/dir",
7558 json!({
7559 "a.rs": text.clone(),
7560 }),
7561 )
7562 .await;
7563
7564 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
7565 let buffer = project
7566 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
7567 .await
7568 .unwrap();
7569
7570 // Simulate the language server sending us edits in a non-ordered fashion,
7571 // with ranges sometimes being inverted.
7572 let edits = project
7573 .update(cx, |project, cx| {
7574 project.edits_from_lsp(
7575 &buffer,
7576 [
7577 lsp::TextEdit {
7578 range: lsp::Range::new(
7579 lsp::Position::new(0, 9),
7580 lsp::Position::new(0, 9),
7581 ),
7582 new_text: "\n\n".into(),
7583 },
7584 lsp::TextEdit {
7585 range: lsp::Range::new(
7586 lsp::Position::new(0, 8),
7587 lsp::Position::new(0, 4),
7588 ),
7589 new_text: "a::{b, c}".into(),
7590 },
7591 lsp::TextEdit {
7592 range: lsp::Range::new(
7593 lsp::Position::new(1, 0),
7594 lsp::Position::new(7, 0),
7595 ),
7596 new_text: "".into(),
7597 },
7598 lsp::TextEdit {
7599 range: lsp::Range::new(
7600 lsp::Position::new(0, 9),
7601 lsp::Position::new(0, 9),
7602 ),
7603 new_text: "
7604 fn f() {
7605 b();
7606 c();
7607 }"
7608 .unindent(),
7609 },
7610 ],
7611 None,
7612 cx,
7613 )
7614 })
7615 .await
7616 .unwrap();
7617
7618 buffer.update(cx, |buffer, cx| {
7619 let edits = edits
7620 .into_iter()
7621 .map(|(range, text)| {
7622 (
7623 range.start.to_point(&buffer)..range.end.to_point(&buffer),
7624 text,
7625 )
7626 })
7627 .collect::<Vec<_>>();
7628
7629 assert_eq!(
7630 edits,
7631 [
7632 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
7633 (Point::new(1, 0)..Point::new(2, 0), "".into())
7634 ]
7635 );
7636
7637 for (range, new_text) in edits {
7638 buffer.edit([(range, new_text)], cx);
7639 }
7640 assert_eq!(
7641 buffer.text(),
7642 "
7643 use a::{b, c};
7644
7645 fn f() {
7646 b();
7647 c();
7648 }
7649 "
7650 .unindent()
7651 );
7652 });
7653 }
7654
7655 fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
7656 buffer: &Buffer,
7657 range: Range<T>,
7658 ) -> Vec<(String, Option<DiagnosticSeverity>)> {
7659 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
7660 for chunk in buffer.snapshot().chunks(range, true) {
7661 if chunks.last().map_or(false, |prev_chunk| {
7662 prev_chunk.1 == chunk.diagnostic_severity
7663 }) {
7664 chunks.last_mut().unwrap().0.push_str(chunk.text);
7665 } else {
7666 chunks.push((chunk.text.to_string(), chunk.diagnostic_severity));
7667 }
7668 }
7669 chunks
7670 }
7671
7672 #[gpui::test]
7673 async fn test_search_worktree_without_files(cx: &mut gpui::TestAppContext) {
7674 let dir = temp_tree(json!({
7675 "root": {
7676 "dir1": {},
7677 "dir2": {
7678 "dir3": {}
7679 }
7680 }
7681 }));
7682
7683 let project = Project::test(Arc::new(RealFs), [dir.path()], cx).await;
7684 let cancel_flag = Default::default();
7685 let results = project
7686 .read_with(cx, |project, cx| {
7687 project.match_paths("dir", false, false, 10, &cancel_flag, cx)
7688 })
7689 .await;
7690
7691 assert!(results.is_empty());
7692 }
7693
7694 #[gpui::test(iterations = 10)]
7695 async fn test_definition(cx: &mut gpui::TestAppContext) {
7696 let mut language = Language::new(
7697 LanguageConfig {
7698 name: "Rust".into(),
7699 path_suffixes: vec!["rs".to_string()],
7700 ..Default::default()
7701 },
7702 Some(tree_sitter_rust::language()),
7703 );
7704 let mut fake_servers = language.set_fake_lsp_adapter(Default::default());
7705
7706 let fs = FakeFs::new(cx.background());
7707 fs.insert_tree(
7708 "/dir",
7709 json!({
7710 "a.rs": "const fn a() { A }",
7711 "b.rs": "const y: i32 = crate::a()",
7712 }),
7713 )
7714 .await;
7715
7716 let project = Project::test(fs, ["/dir/b.rs".as_ref()], cx).await;
7717 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
7718
7719 let buffer = project
7720 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
7721 .await
7722 .unwrap();
7723
7724 let fake_server = fake_servers.next().await.unwrap();
7725 fake_server.handle_request::<lsp::request::GotoDefinition, _, _>(|params, _| async move {
7726 let params = params.text_document_position_params;
7727 assert_eq!(
7728 params.text_document.uri.to_file_path().unwrap(),
7729 Path::new("/dir/b.rs"),
7730 );
7731 assert_eq!(params.position, lsp::Position::new(0, 22));
7732
7733 Ok(Some(lsp::GotoDefinitionResponse::Scalar(
7734 lsp::Location::new(
7735 lsp::Url::from_file_path("/dir/a.rs").unwrap(),
7736 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
7737 ),
7738 )))
7739 });
7740
7741 let mut definitions = project
7742 .update(cx, |project, cx| project.definition(&buffer, 22, cx))
7743 .await
7744 .unwrap();
7745
7746 // Assert no new language server started
7747 cx.foreground().run_until_parked();
7748 assert!(fake_servers.try_next().is_err());
7749
7750 assert_eq!(definitions.len(), 1);
7751 let definition = definitions.pop().unwrap();
7752 cx.update(|cx| {
7753 let target_buffer = definition.target.buffer.read(cx);
7754 assert_eq!(
7755 target_buffer
7756 .file()
7757 .unwrap()
7758 .as_local()
7759 .unwrap()
7760 .abs_path(cx),
7761 Path::new("/dir/a.rs"),
7762 );
7763 assert_eq!(definition.target.range.to_offset(target_buffer), 9..10);
7764 assert_eq!(
7765 list_worktrees(&project, cx),
7766 [("/dir/b.rs".as_ref(), true), ("/dir/a.rs".as_ref(), false)]
7767 );
7768
7769 drop(definition);
7770 });
7771 cx.read(|cx| {
7772 assert_eq!(list_worktrees(&project, cx), [("/dir/b.rs".as_ref(), true)]);
7773 });
7774
7775 fn list_worktrees<'a>(
7776 project: &'a ModelHandle<Project>,
7777 cx: &'a AppContext,
7778 ) -> Vec<(&'a Path, bool)> {
7779 project
7780 .read(cx)
7781 .worktrees(cx)
7782 .map(|worktree| {
7783 let worktree = worktree.read(cx);
7784 (
7785 worktree.as_local().unwrap().abs_path().as_ref(),
7786 worktree.is_visible(),
7787 )
7788 })
7789 .collect::<Vec<_>>()
7790 }
7791 }
7792
7793 #[gpui::test]
7794 async fn test_completions_without_edit_ranges(cx: &mut gpui::TestAppContext) {
7795 let mut language = Language::new(
7796 LanguageConfig {
7797 name: "TypeScript".into(),
7798 path_suffixes: vec!["ts".to_string()],
7799 ..Default::default()
7800 },
7801 Some(tree_sitter_typescript::language_typescript()),
7802 );
7803 let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default());
7804
7805 let fs = FakeFs::new(cx.background());
7806 fs.insert_tree(
7807 "/dir",
7808 json!({
7809 "a.ts": "",
7810 }),
7811 )
7812 .await;
7813
7814 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
7815 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
7816 let buffer = project
7817 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
7818 .await
7819 .unwrap();
7820
7821 let fake_server = fake_language_servers.next().await.unwrap();
7822
7823 let text = "let a = b.fqn";
7824 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
7825 let completions = project.update(cx, |project, cx| {
7826 project.completions(&buffer, text.len(), cx)
7827 });
7828
7829 fake_server
7830 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
7831 Ok(Some(lsp::CompletionResponse::Array(vec![
7832 lsp::CompletionItem {
7833 label: "fullyQualifiedName?".into(),
7834 insert_text: Some("fullyQualifiedName".into()),
7835 ..Default::default()
7836 },
7837 ])))
7838 })
7839 .next()
7840 .await;
7841 let completions = completions.await.unwrap();
7842 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
7843 assert_eq!(completions.len(), 1);
7844 assert_eq!(completions[0].new_text, "fullyQualifiedName");
7845 assert_eq!(
7846 completions[0].old_range.to_offset(&snapshot),
7847 text.len() - 3..text.len()
7848 );
7849
7850 let text = "let a = \"atoms/cmp\"";
7851 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
7852 let completions = project.update(cx, |project, cx| {
7853 project.completions(&buffer, text.len() - 1, cx)
7854 });
7855
7856 fake_server
7857 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
7858 Ok(Some(lsp::CompletionResponse::Array(vec![
7859 lsp::CompletionItem {
7860 label: "component".into(),
7861 ..Default::default()
7862 },
7863 ])))
7864 })
7865 .next()
7866 .await;
7867 let completions = completions.await.unwrap();
7868 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
7869 assert_eq!(completions.len(), 1);
7870 assert_eq!(completions[0].new_text, "component");
7871 assert_eq!(
7872 completions[0].old_range.to_offset(&snapshot),
7873 text.len() - 4..text.len() - 1
7874 );
7875 }
7876
7877 #[gpui::test(iterations = 10)]
7878 async fn test_apply_code_actions_with_commands(cx: &mut gpui::TestAppContext) {
7879 let mut language = Language::new(
7880 LanguageConfig {
7881 name: "TypeScript".into(),
7882 path_suffixes: vec!["ts".to_string()],
7883 ..Default::default()
7884 },
7885 None,
7886 );
7887 let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default());
7888
7889 let fs = FakeFs::new(cx.background());
7890 fs.insert_tree(
7891 "/dir",
7892 json!({
7893 "a.ts": "a",
7894 }),
7895 )
7896 .await;
7897
7898 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
7899 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
7900 let buffer = project
7901 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
7902 .await
7903 .unwrap();
7904
7905 let fake_server = fake_language_servers.next().await.unwrap();
7906
7907 // Language server returns code actions that contain commands, and not edits.
7908 let actions = project.update(cx, |project, cx| project.code_actions(&buffer, 0..0, cx));
7909 fake_server
7910 .handle_request::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
7911 Ok(Some(vec![
7912 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
7913 title: "The code action".into(),
7914 command: Some(lsp::Command {
7915 title: "The command".into(),
7916 command: "_the/command".into(),
7917 arguments: Some(vec![json!("the-argument")]),
7918 }),
7919 ..Default::default()
7920 }),
7921 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
7922 title: "two".into(),
7923 ..Default::default()
7924 }),
7925 ]))
7926 })
7927 .next()
7928 .await;
7929
7930 let action = actions.await.unwrap()[0].clone();
7931 let apply = project.update(cx, |project, cx| {
7932 project.apply_code_action(buffer.clone(), action, true, cx)
7933 });
7934
7935 // Resolving the code action does not populate its edits. In absence of
7936 // edits, we must execute the given command.
7937 fake_server.handle_request::<lsp::request::CodeActionResolveRequest, _, _>(
7938 |action, _| async move { Ok(action) },
7939 );
7940
7941 // While executing the command, the language server sends the editor
7942 // a `workspaceEdit` request.
7943 fake_server
7944 .handle_request::<lsp::request::ExecuteCommand, _, _>({
7945 let fake = fake_server.clone();
7946 move |params, _| {
7947 assert_eq!(params.command, "_the/command");
7948 let fake = fake.clone();
7949 async move {
7950 fake.server
7951 .request::<lsp::request::ApplyWorkspaceEdit>(
7952 lsp::ApplyWorkspaceEditParams {
7953 label: None,
7954 edit: lsp::WorkspaceEdit {
7955 changes: Some(
7956 [(
7957 lsp::Url::from_file_path("/dir/a.ts").unwrap(),
7958 vec![lsp::TextEdit {
7959 range: lsp::Range::new(
7960 lsp::Position::new(0, 0),
7961 lsp::Position::new(0, 0),
7962 ),
7963 new_text: "X".into(),
7964 }],
7965 )]
7966 .into_iter()
7967 .collect(),
7968 ),
7969 ..Default::default()
7970 },
7971 },
7972 )
7973 .await
7974 .unwrap();
7975 Ok(Some(json!(null)))
7976 }
7977 }
7978 })
7979 .next()
7980 .await;
7981
7982 // Applying the code action returns a project transaction containing the edits
7983 // sent by the language server in its `workspaceEdit` request.
7984 let transaction = apply.await.unwrap();
7985 assert!(transaction.0.contains_key(&buffer));
7986 buffer.update(cx, |buffer, cx| {
7987 assert_eq!(buffer.text(), "Xa");
7988 buffer.undo(cx);
7989 assert_eq!(buffer.text(), "a");
7990 });
7991 }
7992
7993 #[gpui::test]
7994 async fn test_save_file(cx: &mut gpui::TestAppContext) {
7995 let fs = FakeFs::new(cx.background());
7996 fs.insert_tree(
7997 "/dir",
7998 json!({
7999 "file1": "the old contents",
8000 }),
8001 )
8002 .await;
8003
8004 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
8005 let buffer = project
8006 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
8007 .await
8008 .unwrap();
8009 buffer
8010 .update(cx, |buffer, cx| {
8011 assert_eq!(buffer.text(), "the old contents");
8012 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], cx);
8013 buffer.save(cx)
8014 })
8015 .await
8016 .unwrap();
8017
8018 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
8019 assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
8020 }
8021
8022 #[gpui::test]
8023 async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
8024 let fs = FakeFs::new(cx.background());
8025 fs.insert_tree(
8026 "/dir",
8027 json!({
8028 "file1": "the old contents",
8029 }),
8030 )
8031 .await;
8032
8033 let project = Project::test(fs.clone(), ["/dir/file1".as_ref()], cx).await;
8034 let buffer = project
8035 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
8036 .await
8037 .unwrap();
8038 buffer
8039 .update(cx, |buffer, cx| {
8040 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], cx);
8041 buffer.save(cx)
8042 })
8043 .await
8044 .unwrap();
8045
8046 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
8047 assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
8048 }
8049
8050 #[gpui::test]
8051 async fn test_save_as(cx: &mut gpui::TestAppContext) {
8052 let fs = FakeFs::new(cx.background());
8053 fs.insert_tree("/dir", json!({})).await;
8054
8055 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
8056 let buffer = project.update(cx, |project, cx| {
8057 project.create_buffer("", None, cx).unwrap()
8058 });
8059 buffer.update(cx, |buffer, cx| {
8060 buffer.edit([(0..0, "abc")], cx);
8061 assert!(buffer.is_dirty());
8062 assert!(!buffer.has_conflict());
8063 });
8064 project
8065 .update(cx, |project, cx| {
8066 project.save_buffer_as(buffer.clone(), "/dir/file1".into(), cx)
8067 })
8068 .await
8069 .unwrap();
8070 assert_eq!(fs.load(Path::new("/dir/file1")).await.unwrap(), "abc");
8071 buffer.read_with(cx, |buffer, cx| {
8072 assert_eq!(buffer.file().unwrap().full_path(cx), Path::new("dir/file1"));
8073 assert!(!buffer.is_dirty());
8074 assert!(!buffer.has_conflict());
8075 });
8076
8077 let opened_buffer = project
8078 .update(cx, |project, cx| {
8079 project.open_local_buffer("/dir/file1", cx)
8080 })
8081 .await
8082 .unwrap();
8083 assert_eq!(opened_buffer, buffer);
8084 }
8085
8086 #[gpui::test(retries = 5)]
8087 async fn test_rescan_and_remote_updates(cx: &mut gpui::TestAppContext) {
8088 let dir = temp_tree(json!({
8089 "a": {
8090 "file1": "",
8091 "file2": "",
8092 "file3": "",
8093 },
8094 "b": {
8095 "c": {
8096 "file4": "",
8097 "file5": "",
8098 }
8099 }
8100 }));
8101
8102 let project = Project::test(Arc::new(RealFs), [dir.path()], cx).await;
8103 let rpc = project.read_with(cx, |p, _| p.client.clone());
8104
8105 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
8106 let buffer = project.update(cx, |p, cx| p.open_local_buffer(dir.path().join(path), cx));
8107 async move { buffer.await.unwrap() }
8108 };
8109 let id_for_path = |path: &'static str, cx: &gpui::TestAppContext| {
8110 project.read_with(cx, |project, cx| {
8111 let tree = project.worktrees(cx).next().unwrap();
8112 tree.read(cx)
8113 .entry_for_path(path)
8114 .expect(&format!("no entry for path {}", path))
8115 .id
8116 })
8117 };
8118
8119 let buffer2 = buffer_for_path("a/file2", cx).await;
8120 let buffer3 = buffer_for_path("a/file3", cx).await;
8121 let buffer4 = buffer_for_path("b/c/file4", cx).await;
8122 let buffer5 = buffer_for_path("b/c/file5", cx).await;
8123
8124 let file2_id = id_for_path("a/file2", &cx);
8125 let file3_id = id_for_path("a/file3", &cx);
8126 let file4_id = id_for_path("b/c/file4", &cx);
8127
8128 // Create a remote copy of this worktree.
8129 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8130 let initial_snapshot = tree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot());
8131 let (remote, load_task) = cx.update(|cx| {
8132 Worktree::remote(
8133 1,
8134 1,
8135 initial_snapshot.to_proto(&Default::default(), true),
8136 rpc.clone(),
8137 cx,
8138 )
8139 });
8140 // tree
8141 load_task.await;
8142
8143 cx.read(|cx| {
8144 assert!(!buffer2.read(cx).is_dirty());
8145 assert!(!buffer3.read(cx).is_dirty());
8146 assert!(!buffer4.read(cx).is_dirty());
8147 assert!(!buffer5.read(cx).is_dirty());
8148 });
8149
8150 // Rename and delete files and directories.
8151 tree.flush_fs_events(&cx).await;
8152 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
8153 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
8154 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
8155 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
8156 tree.flush_fs_events(&cx).await;
8157
8158 let expected_paths = vec![
8159 "a",
8160 "a/file1",
8161 "a/file2.new",
8162 "b",
8163 "d",
8164 "d/file3",
8165 "d/file4",
8166 ];
8167
8168 cx.read(|app| {
8169 assert_eq!(
8170 tree.read(app)
8171 .paths()
8172 .map(|p| p.to_str().unwrap())
8173 .collect::<Vec<_>>(),
8174 expected_paths
8175 );
8176
8177 assert_eq!(id_for_path("a/file2.new", &cx), file2_id);
8178 assert_eq!(id_for_path("d/file3", &cx), file3_id);
8179 assert_eq!(id_for_path("d/file4", &cx), file4_id);
8180
8181 assert_eq!(
8182 buffer2.read(app).file().unwrap().path().as_ref(),
8183 Path::new("a/file2.new")
8184 );
8185 assert_eq!(
8186 buffer3.read(app).file().unwrap().path().as_ref(),
8187 Path::new("d/file3")
8188 );
8189 assert_eq!(
8190 buffer4.read(app).file().unwrap().path().as_ref(),
8191 Path::new("d/file4")
8192 );
8193 assert_eq!(
8194 buffer5.read(app).file().unwrap().path().as_ref(),
8195 Path::new("b/c/file5")
8196 );
8197
8198 assert!(!buffer2.read(app).file().unwrap().is_deleted());
8199 assert!(!buffer3.read(app).file().unwrap().is_deleted());
8200 assert!(!buffer4.read(app).file().unwrap().is_deleted());
8201 assert!(buffer5.read(app).file().unwrap().is_deleted());
8202 });
8203
8204 // Update the remote worktree. Check that it becomes consistent with the
8205 // local worktree.
8206 remote.update(cx, |remote, cx| {
8207 let update_message = tree.read(cx).as_local().unwrap().snapshot().build_update(
8208 &initial_snapshot,
8209 1,
8210 1,
8211 true,
8212 );
8213 remote
8214 .as_remote_mut()
8215 .unwrap()
8216 .snapshot
8217 .apply_remote_update(update_message)
8218 .unwrap();
8219
8220 assert_eq!(
8221 remote
8222 .paths()
8223 .map(|p| p.to_str().unwrap())
8224 .collect::<Vec<_>>(),
8225 expected_paths
8226 );
8227 });
8228 }
8229
8230 #[gpui::test]
8231 async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
8232 let fs = FakeFs::new(cx.background());
8233 fs.insert_tree(
8234 "/dir",
8235 json!({
8236 "a.txt": "a-contents",
8237 "b.txt": "b-contents",
8238 }),
8239 )
8240 .await;
8241
8242 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
8243
8244 // Spawn multiple tasks to open paths, repeating some paths.
8245 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
8246 (
8247 p.open_local_buffer("/dir/a.txt", cx),
8248 p.open_local_buffer("/dir/b.txt", cx),
8249 p.open_local_buffer("/dir/a.txt", cx),
8250 )
8251 });
8252
8253 let buffer_a_1 = buffer_a_1.await.unwrap();
8254 let buffer_a_2 = buffer_a_2.await.unwrap();
8255 let buffer_b = buffer_b.await.unwrap();
8256 assert_eq!(buffer_a_1.read_with(cx, |b, _| b.text()), "a-contents");
8257 assert_eq!(buffer_b.read_with(cx, |b, _| b.text()), "b-contents");
8258
8259 // There is only one buffer per path.
8260 let buffer_a_id = buffer_a_1.id();
8261 assert_eq!(buffer_a_2.id(), buffer_a_id);
8262
8263 // Open the same path again while it is still open.
8264 drop(buffer_a_1);
8265 let buffer_a_3 = project
8266 .update(cx, |p, cx| p.open_local_buffer("/dir/a.txt", cx))
8267 .await
8268 .unwrap();
8269
8270 // There's still only one buffer per path.
8271 assert_eq!(buffer_a_3.id(), buffer_a_id);
8272 }
8273
8274 #[gpui::test]
8275 async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
8276 let fs = FakeFs::new(cx.background());
8277 fs.insert_tree(
8278 "/dir",
8279 json!({
8280 "file1": "abc",
8281 "file2": "def",
8282 "file3": "ghi",
8283 }),
8284 )
8285 .await;
8286
8287 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
8288
8289 let buffer1 = project
8290 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
8291 .await
8292 .unwrap();
8293 let events = Rc::new(RefCell::new(Vec::new()));
8294
8295 // initially, the buffer isn't dirty.
8296 buffer1.update(cx, |buffer, cx| {
8297 cx.subscribe(&buffer1, {
8298 let events = events.clone();
8299 move |_, _, event, _| match event {
8300 BufferEvent::Operation(_) => {}
8301 _ => events.borrow_mut().push(event.clone()),
8302 }
8303 })
8304 .detach();
8305
8306 assert!(!buffer.is_dirty());
8307 assert!(events.borrow().is_empty());
8308
8309 buffer.edit([(1..2, "")], cx);
8310 });
8311
8312 // after the first edit, the buffer is dirty, and emits a dirtied event.
8313 buffer1.update(cx, |buffer, cx| {
8314 assert!(buffer.text() == "ac");
8315 assert!(buffer.is_dirty());
8316 assert_eq!(
8317 *events.borrow(),
8318 &[language::Event::Edited, language::Event::DirtyChanged]
8319 );
8320 events.borrow_mut().clear();
8321 buffer.did_save(
8322 buffer.version(),
8323 buffer.as_rope().fingerprint(),
8324 buffer.file().unwrap().mtime(),
8325 None,
8326 cx,
8327 );
8328 });
8329
8330 // after saving, the buffer is not dirty, and emits a saved event.
8331 buffer1.update(cx, |buffer, cx| {
8332 assert!(!buffer.is_dirty());
8333 assert_eq!(*events.borrow(), &[language::Event::Saved]);
8334 events.borrow_mut().clear();
8335
8336 buffer.edit([(1..1, "B")], cx);
8337 buffer.edit([(2..2, "D")], cx);
8338 });
8339
8340 // after editing again, the buffer is dirty, and emits another dirty event.
8341 buffer1.update(cx, |buffer, cx| {
8342 assert!(buffer.text() == "aBDc");
8343 assert!(buffer.is_dirty());
8344 assert_eq!(
8345 *events.borrow(),
8346 &[
8347 language::Event::Edited,
8348 language::Event::DirtyChanged,
8349 language::Event::Edited,
8350 ],
8351 );
8352 events.borrow_mut().clear();
8353
8354 // After restoring the buffer to its previously-saved state,
8355 // the buffer is not considered dirty anymore.
8356 buffer.edit([(1..3, "")], cx);
8357 assert!(buffer.text() == "ac");
8358 assert!(!buffer.is_dirty());
8359 });
8360
8361 assert_eq!(
8362 *events.borrow(),
8363 &[language::Event::Edited, language::Event::DirtyChanged]
8364 );
8365
8366 // When a file is deleted, the buffer is considered dirty.
8367 let events = Rc::new(RefCell::new(Vec::new()));
8368 let buffer2 = project
8369 .update(cx, |p, cx| p.open_local_buffer("/dir/file2", cx))
8370 .await
8371 .unwrap();
8372 buffer2.update(cx, |_, cx| {
8373 cx.subscribe(&buffer2, {
8374 let events = events.clone();
8375 move |_, _, event, _| events.borrow_mut().push(event.clone())
8376 })
8377 .detach();
8378 });
8379
8380 fs.remove_file("/dir/file2".as_ref(), Default::default())
8381 .await
8382 .unwrap();
8383 buffer2.condition(&cx, |b, _| b.is_dirty()).await;
8384 assert_eq!(
8385 *events.borrow(),
8386 &[
8387 language::Event::DirtyChanged,
8388 language::Event::FileHandleChanged
8389 ]
8390 );
8391
8392 // When a file is already dirty when deleted, we don't emit a Dirtied event.
8393 let events = Rc::new(RefCell::new(Vec::new()));
8394 let buffer3 = project
8395 .update(cx, |p, cx| p.open_local_buffer("/dir/file3", cx))
8396 .await
8397 .unwrap();
8398 buffer3.update(cx, |_, cx| {
8399 cx.subscribe(&buffer3, {
8400 let events = events.clone();
8401 move |_, _, event, _| events.borrow_mut().push(event.clone())
8402 })
8403 .detach();
8404 });
8405
8406 buffer3.update(cx, |buffer, cx| {
8407 buffer.edit([(0..0, "x")], cx);
8408 });
8409 events.borrow_mut().clear();
8410 fs.remove_file("/dir/file3".as_ref(), Default::default())
8411 .await
8412 .unwrap();
8413 buffer3
8414 .condition(&cx, |_, _| !events.borrow().is_empty())
8415 .await;
8416 assert_eq!(*events.borrow(), &[language::Event::FileHandleChanged]);
8417 cx.read(|cx| assert!(buffer3.read(cx).is_dirty()));
8418 }
8419
8420 #[gpui::test]
8421 async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
8422 let initial_contents = "aaa\nbbbbb\nc\n";
8423 let fs = FakeFs::new(cx.background());
8424 fs.insert_tree(
8425 "/dir",
8426 json!({
8427 "the-file": initial_contents,
8428 }),
8429 )
8430 .await;
8431 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
8432 let buffer = project
8433 .update(cx, |p, cx| p.open_local_buffer("/dir/the-file", cx))
8434 .await
8435 .unwrap();
8436
8437 let anchors = (0..3)
8438 .map(|row| buffer.read_with(cx, |b, _| b.anchor_before(Point::new(row, 1))))
8439 .collect::<Vec<_>>();
8440
8441 // Change the file on disk, adding two new lines of text, and removing
8442 // one line.
8443 buffer.read_with(cx, |buffer, _| {
8444 assert!(!buffer.is_dirty());
8445 assert!(!buffer.has_conflict());
8446 });
8447 let new_contents = "AAAA\naaa\nBB\nbbbbb\n";
8448 fs.save("/dir/the-file".as_ref(), &new_contents.into())
8449 .await
8450 .unwrap();
8451
8452 // Because the buffer was not modified, it is reloaded from disk. Its
8453 // contents are edited according to the diff between the old and new
8454 // file contents.
8455 buffer
8456 .condition(&cx, |buffer, _| buffer.text() == new_contents)
8457 .await;
8458
8459 buffer.update(cx, |buffer, _| {
8460 assert_eq!(buffer.text(), new_contents);
8461 assert!(!buffer.is_dirty());
8462 assert!(!buffer.has_conflict());
8463
8464 let anchor_positions = anchors
8465 .iter()
8466 .map(|anchor| anchor.to_point(&*buffer))
8467 .collect::<Vec<_>>();
8468 assert_eq!(
8469 anchor_positions,
8470 [Point::new(1, 1), Point::new(3, 1), Point::new(4, 0)]
8471 );
8472 });
8473
8474 // Modify the buffer
8475 buffer.update(cx, |buffer, cx| {
8476 buffer.edit([(0..0, " ")], cx);
8477 assert!(buffer.is_dirty());
8478 assert!(!buffer.has_conflict());
8479 });
8480
8481 // Change the file on disk again, adding blank lines to the beginning.
8482 fs.save(
8483 "/dir/the-file".as_ref(),
8484 &"\n\n\nAAAA\naaa\nBB\nbbbbb\n".into(),
8485 )
8486 .await
8487 .unwrap();
8488
8489 // Because the buffer is modified, it doesn't reload from disk, but is
8490 // marked as having a conflict.
8491 buffer
8492 .condition(&cx, |buffer, _| buffer.has_conflict())
8493 .await;
8494 }
8495
8496 #[gpui::test]
8497 async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
8498 cx.foreground().forbid_parking();
8499
8500 let fs = FakeFs::new(cx.background());
8501 fs.insert_tree(
8502 "/the-dir",
8503 json!({
8504 "a.rs": "
8505 fn foo(mut v: Vec<usize>) {
8506 for x in &v {
8507 v.push(1);
8508 }
8509 }
8510 "
8511 .unindent(),
8512 }),
8513 )
8514 .await;
8515
8516 let project = Project::test(fs.clone(), ["/the-dir".as_ref()], cx).await;
8517 let buffer = project
8518 .update(cx, |p, cx| p.open_local_buffer("/the-dir/a.rs", cx))
8519 .await
8520 .unwrap();
8521
8522 let buffer_uri = Url::from_file_path("/the-dir/a.rs").unwrap();
8523 let message = lsp::PublishDiagnosticsParams {
8524 uri: buffer_uri.clone(),
8525 diagnostics: vec![
8526 lsp::Diagnostic {
8527 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
8528 severity: Some(DiagnosticSeverity::WARNING),
8529 message: "error 1".to_string(),
8530 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
8531 location: lsp::Location {
8532 uri: buffer_uri.clone(),
8533 range: lsp::Range::new(
8534 lsp::Position::new(1, 8),
8535 lsp::Position::new(1, 9),
8536 ),
8537 },
8538 message: "error 1 hint 1".to_string(),
8539 }]),
8540 ..Default::default()
8541 },
8542 lsp::Diagnostic {
8543 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
8544 severity: Some(DiagnosticSeverity::HINT),
8545 message: "error 1 hint 1".to_string(),
8546 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
8547 location: lsp::Location {
8548 uri: buffer_uri.clone(),
8549 range: lsp::Range::new(
8550 lsp::Position::new(1, 8),
8551 lsp::Position::new(1, 9),
8552 ),
8553 },
8554 message: "original diagnostic".to_string(),
8555 }]),
8556 ..Default::default()
8557 },
8558 lsp::Diagnostic {
8559 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
8560 severity: Some(DiagnosticSeverity::ERROR),
8561 message: "error 2".to_string(),
8562 related_information: Some(vec![
8563 lsp::DiagnosticRelatedInformation {
8564 location: lsp::Location {
8565 uri: buffer_uri.clone(),
8566 range: lsp::Range::new(
8567 lsp::Position::new(1, 13),
8568 lsp::Position::new(1, 15),
8569 ),
8570 },
8571 message: "error 2 hint 1".to_string(),
8572 },
8573 lsp::DiagnosticRelatedInformation {
8574 location: lsp::Location {
8575 uri: buffer_uri.clone(),
8576 range: lsp::Range::new(
8577 lsp::Position::new(1, 13),
8578 lsp::Position::new(1, 15),
8579 ),
8580 },
8581 message: "error 2 hint 2".to_string(),
8582 },
8583 ]),
8584 ..Default::default()
8585 },
8586 lsp::Diagnostic {
8587 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
8588 severity: Some(DiagnosticSeverity::HINT),
8589 message: "error 2 hint 1".to_string(),
8590 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
8591 location: lsp::Location {
8592 uri: buffer_uri.clone(),
8593 range: lsp::Range::new(
8594 lsp::Position::new(2, 8),
8595 lsp::Position::new(2, 17),
8596 ),
8597 },
8598 message: "original diagnostic".to_string(),
8599 }]),
8600 ..Default::default()
8601 },
8602 lsp::Diagnostic {
8603 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
8604 severity: Some(DiagnosticSeverity::HINT),
8605 message: "error 2 hint 2".to_string(),
8606 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
8607 location: lsp::Location {
8608 uri: buffer_uri.clone(),
8609 range: lsp::Range::new(
8610 lsp::Position::new(2, 8),
8611 lsp::Position::new(2, 17),
8612 ),
8613 },
8614 message: "original diagnostic".to_string(),
8615 }]),
8616 ..Default::default()
8617 },
8618 ],
8619 version: None,
8620 };
8621
8622 project
8623 .update(cx, |p, cx| p.update_diagnostics(0, message, &[], cx))
8624 .unwrap();
8625 let buffer = buffer.read_with(cx, |buffer, _| buffer.snapshot());
8626
8627 assert_eq!(
8628 buffer
8629 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
8630 .collect::<Vec<_>>(),
8631 &[
8632 DiagnosticEntry {
8633 range: Point::new(1, 8)..Point::new(1, 9),
8634 diagnostic: Diagnostic {
8635 severity: DiagnosticSeverity::WARNING,
8636 message: "error 1".to_string(),
8637 group_id: 0,
8638 is_primary: true,
8639 ..Default::default()
8640 }
8641 },
8642 DiagnosticEntry {
8643 range: Point::new(1, 8)..Point::new(1, 9),
8644 diagnostic: Diagnostic {
8645 severity: DiagnosticSeverity::HINT,
8646 message: "error 1 hint 1".to_string(),
8647 group_id: 0,
8648 is_primary: false,
8649 ..Default::default()
8650 }
8651 },
8652 DiagnosticEntry {
8653 range: Point::new(1, 13)..Point::new(1, 15),
8654 diagnostic: Diagnostic {
8655 severity: DiagnosticSeverity::HINT,
8656 message: "error 2 hint 1".to_string(),
8657 group_id: 1,
8658 is_primary: false,
8659 ..Default::default()
8660 }
8661 },
8662 DiagnosticEntry {
8663 range: Point::new(1, 13)..Point::new(1, 15),
8664 diagnostic: Diagnostic {
8665 severity: DiagnosticSeverity::HINT,
8666 message: "error 2 hint 2".to_string(),
8667 group_id: 1,
8668 is_primary: false,
8669 ..Default::default()
8670 }
8671 },
8672 DiagnosticEntry {
8673 range: Point::new(2, 8)..Point::new(2, 17),
8674 diagnostic: Diagnostic {
8675 severity: DiagnosticSeverity::ERROR,
8676 message: "error 2".to_string(),
8677 group_id: 1,
8678 is_primary: true,
8679 ..Default::default()
8680 }
8681 }
8682 ]
8683 );
8684
8685 assert_eq!(
8686 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
8687 &[
8688 DiagnosticEntry {
8689 range: Point::new(1, 8)..Point::new(1, 9),
8690 diagnostic: Diagnostic {
8691 severity: DiagnosticSeverity::WARNING,
8692 message: "error 1".to_string(),
8693 group_id: 0,
8694 is_primary: true,
8695 ..Default::default()
8696 }
8697 },
8698 DiagnosticEntry {
8699 range: Point::new(1, 8)..Point::new(1, 9),
8700 diagnostic: Diagnostic {
8701 severity: DiagnosticSeverity::HINT,
8702 message: "error 1 hint 1".to_string(),
8703 group_id: 0,
8704 is_primary: false,
8705 ..Default::default()
8706 }
8707 },
8708 ]
8709 );
8710 assert_eq!(
8711 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
8712 &[
8713 DiagnosticEntry {
8714 range: Point::new(1, 13)..Point::new(1, 15),
8715 diagnostic: Diagnostic {
8716 severity: DiagnosticSeverity::HINT,
8717 message: "error 2 hint 1".to_string(),
8718 group_id: 1,
8719 is_primary: false,
8720 ..Default::default()
8721 }
8722 },
8723 DiagnosticEntry {
8724 range: Point::new(1, 13)..Point::new(1, 15),
8725 diagnostic: Diagnostic {
8726 severity: DiagnosticSeverity::HINT,
8727 message: "error 2 hint 2".to_string(),
8728 group_id: 1,
8729 is_primary: false,
8730 ..Default::default()
8731 }
8732 },
8733 DiagnosticEntry {
8734 range: Point::new(2, 8)..Point::new(2, 17),
8735 diagnostic: Diagnostic {
8736 severity: DiagnosticSeverity::ERROR,
8737 message: "error 2".to_string(),
8738 group_id: 1,
8739 is_primary: true,
8740 ..Default::default()
8741 }
8742 }
8743 ]
8744 );
8745 }
8746
8747 #[gpui::test]
8748 async fn test_rename(cx: &mut gpui::TestAppContext) {
8749 cx.foreground().forbid_parking();
8750
8751 let mut language = Language::new(
8752 LanguageConfig {
8753 name: "Rust".into(),
8754 path_suffixes: vec!["rs".to_string()],
8755 ..Default::default()
8756 },
8757 Some(tree_sitter_rust::language()),
8758 );
8759 let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
8760 capabilities: lsp::ServerCapabilities {
8761 rename_provider: Some(lsp::OneOf::Right(lsp::RenameOptions {
8762 prepare_provider: Some(true),
8763 work_done_progress_options: Default::default(),
8764 })),
8765 ..Default::default()
8766 },
8767 ..Default::default()
8768 });
8769
8770 let fs = FakeFs::new(cx.background());
8771 fs.insert_tree(
8772 "/dir",
8773 json!({
8774 "one.rs": "const ONE: usize = 1;",
8775 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
8776 }),
8777 )
8778 .await;
8779
8780 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
8781 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
8782 let buffer = project
8783 .update(cx, |project, cx| {
8784 project.open_local_buffer("/dir/one.rs", cx)
8785 })
8786 .await
8787 .unwrap();
8788
8789 let fake_server = fake_servers.next().await.unwrap();
8790
8791 let response = project.update(cx, |project, cx| {
8792 project.prepare_rename(buffer.clone(), 7, cx)
8793 });
8794 fake_server
8795 .handle_request::<lsp::request::PrepareRenameRequest, _, _>(|params, _| async move {
8796 assert_eq!(params.text_document.uri.as_str(), "file:///dir/one.rs");
8797 assert_eq!(params.position, lsp::Position::new(0, 7));
8798 Ok(Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
8799 lsp::Position::new(0, 6),
8800 lsp::Position::new(0, 9),
8801 ))))
8802 })
8803 .next()
8804 .await
8805 .unwrap();
8806 let range = response.await.unwrap().unwrap();
8807 let range = buffer.read_with(cx, |buffer, _| range.to_offset(buffer));
8808 assert_eq!(range, 6..9);
8809
8810 let response = project.update(cx, |project, cx| {
8811 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), true, cx)
8812 });
8813 fake_server
8814 .handle_request::<lsp::request::Rename, _, _>(|params, _| async move {
8815 assert_eq!(
8816 params.text_document_position.text_document.uri.as_str(),
8817 "file:///dir/one.rs"
8818 );
8819 assert_eq!(
8820 params.text_document_position.position,
8821 lsp::Position::new(0, 7)
8822 );
8823 assert_eq!(params.new_name, "THREE");
8824 Ok(Some(lsp::WorkspaceEdit {
8825 changes: Some(
8826 [
8827 (
8828 lsp::Url::from_file_path("/dir/one.rs").unwrap(),
8829 vec![lsp::TextEdit::new(
8830 lsp::Range::new(
8831 lsp::Position::new(0, 6),
8832 lsp::Position::new(0, 9),
8833 ),
8834 "THREE".to_string(),
8835 )],
8836 ),
8837 (
8838 lsp::Url::from_file_path("/dir/two.rs").unwrap(),
8839 vec![
8840 lsp::TextEdit::new(
8841 lsp::Range::new(
8842 lsp::Position::new(0, 24),
8843 lsp::Position::new(0, 27),
8844 ),
8845 "THREE".to_string(),
8846 ),
8847 lsp::TextEdit::new(
8848 lsp::Range::new(
8849 lsp::Position::new(0, 35),
8850 lsp::Position::new(0, 38),
8851 ),
8852 "THREE".to_string(),
8853 ),
8854 ],
8855 ),
8856 ]
8857 .into_iter()
8858 .collect(),
8859 ),
8860 ..Default::default()
8861 }))
8862 })
8863 .next()
8864 .await
8865 .unwrap();
8866 let mut transaction = response.await.unwrap().0;
8867 assert_eq!(transaction.len(), 2);
8868 assert_eq!(
8869 transaction
8870 .remove_entry(&buffer)
8871 .unwrap()
8872 .0
8873 .read_with(cx, |buffer, _| buffer.text()),
8874 "const THREE: usize = 1;"
8875 );
8876 assert_eq!(
8877 transaction
8878 .into_keys()
8879 .next()
8880 .unwrap()
8881 .read_with(cx, |buffer, _| buffer.text()),
8882 "const TWO: usize = one::THREE + one::THREE;"
8883 );
8884 }
8885
8886 #[gpui::test]
8887 async fn test_search(cx: &mut gpui::TestAppContext) {
8888 let fs = FakeFs::new(cx.background());
8889 fs.insert_tree(
8890 "/dir",
8891 json!({
8892 "one.rs": "const ONE: usize = 1;",
8893 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
8894 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
8895 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
8896 }),
8897 )
8898 .await;
8899 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
8900 assert_eq!(
8901 search(&project, SearchQuery::text("TWO", false, true), cx)
8902 .await
8903 .unwrap(),
8904 HashMap::from_iter([
8905 ("two.rs".to_string(), vec![6..9]),
8906 ("three.rs".to_string(), vec![37..40])
8907 ])
8908 );
8909
8910 let buffer_4 = project
8911 .update(cx, |project, cx| {
8912 project.open_local_buffer("/dir/four.rs", cx)
8913 })
8914 .await
8915 .unwrap();
8916 buffer_4.update(cx, |buffer, cx| {
8917 let text = "two::TWO";
8918 buffer.edit([(20..28, text), (31..43, text)], cx);
8919 });
8920
8921 assert_eq!(
8922 search(&project, SearchQuery::text("TWO", false, true), cx)
8923 .await
8924 .unwrap(),
8925 HashMap::from_iter([
8926 ("two.rs".to_string(), vec![6..9]),
8927 ("three.rs".to_string(), vec![37..40]),
8928 ("four.rs".to_string(), vec![25..28, 36..39])
8929 ])
8930 );
8931
8932 async fn search(
8933 project: &ModelHandle<Project>,
8934 query: SearchQuery,
8935 cx: &mut gpui::TestAppContext,
8936 ) -> Result<HashMap<String, Vec<Range<usize>>>> {
8937 let results = project
8938 .update(cx, |project, cx| project.search(query, cx))
8939 .await?;
8940
8941 Ok(results
8942 .into_iter()
8943 .map(|(buffer, ranges)| {
8944 buffer.read_with(cx, |buffer, _| {
8945 let path = buffer.file().unwrap().path().to_string_lossy().to_string();
8946 let ranges = ranges
8947 .into_iter()
8948 .map(|range| range.to_offset(buffer))
8949 .collect::<Vec<_>>();
8950 (path, ranges)
8951 })
8952 })
8953 .collect())
8954 }
8955 }
8956}