1mod db;
2pub mod fs;
3mod ignore;
4mod lsp_command;
5pub mod search;
6pub mod worktree;
7
8use anyhow::{anyhow, Context, Result};
9use client::{proto, Client, PeerId, TypedEnvelope, User, UserStore};
10use clock::ReplicaId;
11use collections::{hash_map, BTreeMap, HashMap, HashSet};
12use futures::{future::Shared, Future, FutureExt, StreamExt, TryFutureExt};
13use fuzzy::{PathMatch, PathMatchCandidate, PathMatchCandidateSet};
14use gpui::{
15 AnyModelHandle, AppContext, AsyncAppContext, Entity, ModelContext, ModelHandle,
16 MutableAppContext, Task, UpgradeModelHandle, WeakModelHandle,
17};
18use language::{
19 point_to_lsp,
20 proto::{deserialize_anchor, deserialize_version, serialize_anchor, serialize_version},
21 range_from_lsp, range_to_lsp, Anchor, Bias, Buffer, CharKind, CodeAction, CodeLabel,
22 Completion, Diagnostic, DiagnosticEntry, DiagnosticSet, Event as BufferEvent, File as _,
23 Language, LanguageRegistry, LanguageServerName, LocalFile, LspAdapter, OffsetRangeExt,
24 Operation, Patch, PointUtf16, TextBufferSnapshot, ToOffset, ToPointUtf16, Transaction,
25};
26use lsp::{
27 DiagnosticSeverity, DiagnosticTag, DocumentHighlightKind, LanguageServer, LanguageString,
28 MarkedString,
29};
30use lsp_command::*;
31use parking_lot::Mutex;
32use postage::stream::Stream;
33use postage::watch;
34use rand::prelude::*;
35use search::SearchQuery;
36use serde::Serialize;
37use settings::Settings;
38use sha2::{Digest, Sha256};
39use similar::{ChangeTag, TextDiff};
40use std::{
41 cell::RefCell,
42 cmp::{self, Ordering},
43 convert::TryInto,
44 ffi::OsString,
45 hash::Hash,
46 mem,
47 ops::Range,
48 os::unix::{ffi::OsStrExt, prelude::OsStringExt},
49 path::{Component, Path, PathBuf},
50 rc::Rc,
51 sync::{
52 atomic::{AtomicBool, AtomicUsize, Ordering::SeqCst},
53 Arc,
54 },
55 time::Instant,
56};
57use thiserror::Error;
58use util::{post_inc, ResultExt, TryFutureExt as _};
59
60pub use db::Db;
61pub use fs::*;
62pub use worktree::*;
63
64pub trait Item: Entity {
65 fn entry_id(&self, cx: &AppContext) -> Option<ProjectEntryId>;
66}
67
68pub struct ProjectStore {
69 db: Arc<Db>,
70 projects: Vec<WeakModelHandle<Project>>,
71}
72
73pub struct Project {
74 worktrees: Vec<WorktreeHandle>,
75 active_entry: Option<ProjectEntryId>,
76 languages: Arc<LanguageRegistry>,
77 language_servers:
78 HashMap<(WorktreeId, LanguageServerName), (Arc<dyn LspAdapter>, Arc<LanguageServer>)>,
79 started_language_servers:
80 HashMap<(WorktreeId, LanguageServerName), Task<Option<Arc<LanguageServer>>>>,
81 language_server_statuses: BTreeMap<usize, LanguageServerStatus>,
82 language_server_settings: Arc<Mutex<serde_json::Value>>,
83 last_workspace_edits_by_language_server: HashMap<usize, ProjectTransaction>,
84 next_language_server_id: usize,
85 client: Arc<client::Client>,
86 next_entry_id: Arc<AtomicUsize>,
87 next_diagnostic_group_id: usize,
88 user_store: ModelHandle<UserStore>,
89 project_store: ModelHandle<ProjectStore>,
90 fs: Arc<dyn Fs>,
91 client_state: ProjectClientState,
92 collaborators: HashMap<PeerId, Collaborator>,
93 client_subscriptions: Vec<client::Subscription>,
94 _subscriptions: Vec<gpui::Subscription>,
95 opened_buffer: (Rc<RefCell<watch::Sender<()>>>, watch::Receiver<()>),
96 shared_buffers: HashMap<PeerId, HashSet<u64>>,
97 loading_buffers: HashMap<
98 ProjectPath,
99 postage::watch::Receiver<Option<Result<ModelHandle<Buffer>, Arc<anyhow::Error>>>>,
100 >,
101 loading_local_worktrees:
102 HashMap<Arc<Path>, Shared<Task<Result<ModelHandle<Worktree>, Arc<anyhow::Error>>>>>,
103 opened_buffers: HashMap<u64, OpenBuffer>,
104 buffer_snapshots: HashMap<u64, Vec<(i32, TextBufferSnapshot)>>,
105 nonce: u128,
106 initialized_persistent_state: bool,
107}
108
109#[derive(Error, Debug)]
110pub enum JoinProjectError {
111 #[error("host declined join request")]
112 HostDeclined,
113 #[error("host closed the project")]
114 HostClosedProject,
115 #[error("host went offline")]
116 HostWentOffline,
117 #[error("{0}")]
118 Other(#[from] anyhow::Error),
119}
120
121enum OpenBuffer {
122 Strong(ModelHandle<Buffer>),
123 Weak(WeakModelHandle<Buffer>),
124 Loading(Vec<Operation>),
125}
126
127enum WorktreeHandle {
128 Strong(ModelHandle<Worktree>),
129 Weak(WeakModelHandle<Worktree>),
130}
131
132enum ProjectClientState {
133 Local {
134 is_shared: bool,
135 remote_id_tx: watch::Sender<Option<u64>>,
136 remote_id_rx: watch::Receiver<Option<u64>>,
137 online_tx: watch::Sender<bool>,
138 online_rx: watch::Receiver<bool>,
139 _maintain_remote_id: Task<Option<()>>,
140 _maintain_online_status: Task<Option<()>>,
141 },
142 Remote {
143 sharing_has_stopped: bool,
144 remote_id: u64,
145 replica_id: ReplicaId,
146 _detect_unshare: Task<Option<()>>,
147 },
148}
149
150#[derive(Clone, Debug)]
151pub struct Collaborator {
152 pub user: Arc<User>,
153 pub peer_id: PeerId,
154 pub replica_id: ReplicaId,
155}
156
157#[derive(Clone, Debug, PartialEq, Eq)]
158pub enum Event {
159 ActiveEntryChanged(Option<ProjectEntryId>),
160 WorktreeAdded,
161 WorktreeRemoved(WorktreeId),
162 DiskBasedDiagnosticsStarted {
163 language_server_id: usize,
164 },
165 DiskBasedDiagnosticsFinished {
166 language_server_id: usize,
167 },
168 DiagnosticsUpdated {
169 path: ProjectPath,
170 language_server_id: usize,
171 },
172 RemoteIdChanged(Option<u64>),
173 CollaboratorLeft(PeerId),
174 ContactRequestedJoin(Arc<User>),
175 ContactCancelledJoinRequest(Arc<User>),
176}
177
178#[derive(Serialize)]
179pub struct LanguageServerStatus {
180 pub name: String,
181 pub pending_work: BTreeMap<String, LanguageServerProgress>,
182 pub has_pending_diagnostic_updates: bool,
183 progress_tokens: HashSet<String>,
184}
185
186#[derive(Clone, Debug, Serialize)]
187pub struct LanguageServerProgress {
188 pub message: Option<String>,
189 pub percentage: Option<usize>,
190 #[serde(skip_serializing)]
191 pub last_update_at: Instant,
192}
193
194#[derive(Clone, Debug, Eq, PartialEq, Hash, PartialOrd, Ord)]
195pub struct ProjectPath {
196 pub worktree_id: WorktreeId,
197 pub path: Arc<Path>,
198}
199
200#[derive(Copy, Clone, Debug, Default, PartialEq, Serialize)]
201pub struct DiagnosticSummary {
202 pub language_server_id: usize,
203 pub error_count: usize,
204 pub warning_count: usize,
205}
206
207#[derive(Debug, Clone)]
208pub struct Location {
209 pub buffer: ModelHandle<Buffer>,
210 pub range: Range<language::Anchor>,
211}
212
213#[derive(Debug, Clone)]
214pub struct LocationLink {
215 pub origin: Option<Location>,
216 pub target: Location,
217}
218
219#[derive(Debug)]
220pub struct DocumentHighlight {
221 pub range: Range<language::Anchor>,
222 pub kind: DocumentHighlightKind,
223}
224
225#[derive(Clone, Debug)]
226pub struct Symbol {
227 pub source_worktree_id: WorktreeId,
228 pub worktree_id: WorktreeId,
229 pub language_server_name: LanguageServerName,
230 pub path: PathBuf,
231 pub label: CodeLabel,
232 pub name: String,
233 pub kind: lsp::SymbolKind,
234 pub range: Range<PointUtf16>,
235 pub signature: [u8; 32],
236}
237
238#[derive(Clone, Debug, PartialEq)]
239pub struct HoverBlock {
240 pub text: String,
241 pub language: Option<String>,
242}
243
244impl HoverBlock {
245 fn try_new(marked_string: MarkedString) -> Option<Self> {
246 let result = match marked_string {
247 MarkedString::LanguageString(LanguageString { language, value }) => HoverBlock {
248 text: value,
249 language: Some(language),
250 },
251 MarkedString::String(text) => HoverBlock {
252 text,
253 language: None,
254 },
255 };
256 if result.text.is_empty() {
257 None
258 } else {
259 Some(result)
260 }
261 }
262}
263
264#[derive(Debug)]
265pub struct Hover {
266 pub contents: Vec<HoverBlock>,
267 pub range: Option<Range<language::Anchor>>,
268}
269
270#[derive(Default)]
271pub struct ProjectTransaction(pub HashMap<ModelHandle<Buffer>, language::Transaction>);
272
273impl DiagnosticSummary {
274 fn new<'a, T: 'a>(
275 language_server_id: usize,
276 diagnostics: impl IntoIterator<Item = &'a DiagnosticEntry<T>>,
277 ) -> Self {
278 let mut this = Self {
279 language_server_id,
280 error_count: 0,
281 warning_count: 0,
282 };
283
284 for entry in diagnostics {
285 if entry.diagnostic.is_primary {
286 match entry.diagnostic.severity {
287 DiagnosticSeverity::ERROR => this.error_count += 1,
288 DiagnosticSeverity::WARNING => this.warning_count += 1,
289 _ => {}
290 }
291 }
292 }
293
294 this
295 }
296
297 pub fn is_empty(&self) -> bool {
298 self.error_count == 0 && self.warning_count == 0
299 }
300
301 pub fn to_proto(&self, path: &Path) -> proto::DiagnosticSummary {
302 proto::DiagnosticSummary {
303 path: path.to_string_lossy().to_string(),
304 language_server_id: self.language_server_id as u64,
305 error_count: self.error_count as u32,
306 warning_count: self.warning_count as u32,
307 }
308 }
309}
310
311#[derive(Clone, Copy, Debug, Default, Hash, PartialEq, Eq, PartialOrd, Ord)]
312pub struct ProjectEntryId(usize);
313
314impl ProjectEntryId {
315 pub const MAX: Self = Self(usize::MAX);
316
317 pub fn new(counter: &AtomicUsize) -> Self {
318 Self(counter.fetch_add(1, SeqCst))
319 }
320
321 pub fn from_proto(id: u64) -> Self {
322 Self(id as usize)
323 }
324
325 pub fn to_proto(&self) -> u64 {
326 self.0 as u64
327 }
328
329 pub fn to_usize(&self) -> usize {
330 self.0
331 }
332}
333
334impl Project {
335 pub fn init(client: &Arc<Client>) {
336 client.add_model_message_handler(Self::handle_request_join_project);
337 client.add_model_message_handler(Self::handle_add_collaborator);
338 client.add_model_message_handler(Self::handle_buffer_reloaded);
339 client.add_model_message_handler(Self::handle_buffer_saved);
340 client.add_model_message_handler(Self::handle_start_language_server);
341 client.add_model_message_handler(Self::handle_update_language_server);
342 client.add_model_message_handler(Self::handle_remove_collaborator);
343 client.add_model_message_handler(Self::handle_join_project_request_cancelled);
344 client.add_model_message_handler(Self::handle_update_project);
345 client.add_model_message_handler(Self::handle_unregister_project);
346 client.add_model_message_handler(Self::handle_project_unshared);
347 client.add_model_message_handler(Self::handle_update_buffer_file);
348 client.add_model_message_handler(Self::handle_update_buffer);
349 client.add_model_message_handler(Self::handle_update_diagnostic_summary);
350 client.add_model_message_handler(Self::handle_update_worktree);
351 client.add_model_request_handler(Self::handle_create_project_entry);
352 client.add_model_request_handler(Self::handle_rename_project_entry);
353 client.add_model_request_handler(Self::handle_copy_project_entry);
354 client.add_model_request_handler(Self::handle_delete_project_entry);
355 client.add_model_request_handler(Self::handle_apply_additional_edits_for_completion);
356 client.add_model_request_handler(Self::handle_apply_code_action);
357 client.add_model_request_handler(Self::handle_reload_buffers);
358 client.add_model_request_handler(Self::handle_format_buffers);
359 client.add_model_request_handler(Self::handle_get_code_actions);
360 client.add_model_request_handler(Self::handle_get_completions);
361 client.add_model_request_handler(Self::handle_lsp_command::<GetHover>);
362 client.add_model_request_handler(Self::handle_lsp_command::<GetDefinition>);
363 client.add_model_request_handler(Self::handle_lsp_command::<GetDocumentHighlights>);
364 client.add_model_request_handler(Self::handle_lsp_command::<GetReferences>);
365 client.add_model_request_handler(Self::handle_lsp_command::<PrepareRename>);
366 client.add_model_request_handler(Self::handle_lsp_command::<PerformRename>);
367 client.add_model_request_handler(Self::handle_search_project);
368 client.add_model_request_handler(Self::handle_get_project_symbols);
369 client.add_model_request_handler(Self::handle_open_buffer_for_symbol);
370 client.add_model_request_handler(Self::handle_open_buffer_by_id);
371 client.add_model_request_handler(Self::handle_open_buffer_by_path);
372 client.add_model_request_handler(Self::handle_save_buffer);
373 }
374
375 pub fn local(
376 online: bool,
377 client: Arc<Client>,
378 user_store: ModelHandle<UserStore>,
379 project_store: ModelHandle<ProjectStore>,
380 languages: Arc<LanguageRegistry>,
381 fs: Arc<dyn Fs>,
382 cx: &mut MutableAppContext,
383 ) -> ModelHandle<Self> {
384 cx.add_model(|cx: &mut ModelContext<Self>| {
385 let (remote_id_tx, remote_id_rx) = watch::channel();
386 let _maintain_remote_id = cx.spawn_weak({
387 let mut status_rx = client.clone().status();
388 move |this, mut cx| async move {
389 while let Some(status) = status_rx.recv().await {
390 let this = this.upgrade(&cx)?;
391 if status.is_connected() {
392 this.update(&mut cx, |this, cx| this.register(cx))
393 .await
394 .log_err()?;
395 } else {
396 this.update(&mut cx, |this, cx| this.unregister(cx))
397 .await
398 .log_err();
399 }
400 }
401 None
402 }
403 });
404
405 let (online_tx, online_rx) = watch::channel_with(online);
406 let _maintain_online_status = cx.spawn_weak({
407 let mut online_rx = online_rx.clone();
408 move |this, mut cx| async move {
409 while online_rx.recv().await.is_some() {
410 let this = this.upgrade(&cx)?;
411 this.update(&mut cx, |this, cx| {
412 if !this.is_online() {
413 this.unshared(cx);
414 }
415 this.metadata_changed(false, cx)
416 });
417 }
418 None
419 }
420 });
421
422 let handle = cx.weak_handle();
423 project_store.update(cx, |store, cx| store.add_project(handle, cx));
424
425 let (opened_buffer_tx, opened_buffer_rx) = watch::channel();
426 Self {
427 worktrees: Default::default(),
428 collaborators: Default::default(),
429 opened_buffers: Default::default(),
430 shared_buffers: Default::default(),
431 loading_buffers: Default::default(),
432 loading_local_worktrees: Default::default(),
433 buffer_snapshots: Default::default(),
434 client_state: ProjectClientState::Local {
435 is_shared: false,
436 remote_id_tx,
437 remote_id_rx,
438 online_tx,
439 online_rx,
440 _maintain_remote_id,
441 _maintain_online_status,
442 },
443 opened_buffer: (Rc::new(RefCell::new(opened_buffer_tx)), opened_buffer_rx),
444 client_subscriptions: Vec::new(),
445 _subscriptions: vec![cx.observe_global::<Settings, _>(Self::on_settings_changed)],
446 active_entry: None,
447 languages,
448 client,
449 user_store,
450 project_store,
451 fs,
452 next_entry_id: Default::default(),
453 next_diagnostic_group_id: Default::default(),
454 language_servers: Default::default(),
455 started_language_servers: Default::default(),
456 language_server_statuses: Default::default(),
457 last_workspace_edits_by_language_server: Default::default(),
458 language_server_settings: Default::default(),
459 next_language_server_id: 0,
460 nonce: StdRng::from_entropy().gen(),
461 initialized_persistent_state: false,
462 }
463 })
464 }
465
466 pub async fn remote(
467 remote_id: u64,
468 client: Arc<Client>,
469 user_store: ModelHandle<UserStore>,
470 project_store: ModelHandle<ProjectStore>,
471 languages: Arc<LanguageRegistry>,
472 fs: Arc<dyn Fs>,
473 mut cx: AsyncAppContext,
474 ) -> Result<ModelHandle<Self>, JoinProjectError> {
475 client.authenticate_and_connect(true, &cx).await?;
476
477 let response = client
478 .request(proto::JoinProject {
479 project_id: remote_id,
480 })
481 .await?;
482
483 let response = match response.variant.ok_or_else(|| anyhow!("missing variant"))? {
484 proto::join_project_response::Variant::Accept(response) => response,
485 proto::join_project_response::Variant::Decline(decline) => {
486 match proto::join_project_response::decline::Reason::from_i32(decline.reason) {
487 Some(proto::join_project_response::decline::Reason::Declined) => {
488 Err(JoinProjectError::HostDeclined)?
489 }
490 Some(proto::join_project_response::decline::Reason::Closed) => {
491 Err(JoinProjectError::HostClosedProject)?
492 }
493 Some(proto::join_project_response::decline::Reason::WentOffline) => {
494 Err(JoinProjectError::HostWentOffline)?
495 }
496 None => Err(anyhow!("missing decline reason"))?,
497 }
498 }
499 };
500
501 let replica_id = response.replica_id as ReplicaId;
502
503 let mut worktrees = Vec::new();
504 for worktree in response.worktrees {
505 let (worktree, load_task) = cx
506 .update(|cx| Worktree::remote(remote_id, replica_id, worktree, client.clone(), cx));
507 worktrees.push(worktree);
508 load_task.detach();
509 }
510
511 let (opened_buffer_tx, opened_buffer_rx) = watch::channel();
512 let this = cx.add_model(|cx: &mut ModelContext<Self>| {
513 let handle = cx.weak_handle();
514 project_store.update(cx, |store, cx| store.add_project(handle, cx));
515
516 let mut this = Self {
517 worktrees: Vec::new(),
518 loading_buffers: Default::default(),
519 opened_buffer: (Rc::new(RefCell::new(opened_buffer_tx)), opened_buffer_rx),
520 shared_buffers: Default::default(),
521 loading_local_worktrees: Default::default(),
522 active_entry: None,
523 collaborators: Default::default(),
524 languages,
525 user_store: user_store.clone(),
526 project_store,
527 fs,
528 next_entry_id: Default::default(),
529 next_diagnostic_group_id: Default::default(),
530 client_subscriptions: vec![client.add_model_for_remote_entity(remote_id, cx)],
531 _subscriptions: Default::default(),
532 client: client.clone(),
533 client_state: ProjectClientState::Remote {
534 sharing_has_stopped: false,
535 remote_id,
536 replica_id,
537 _detect_unshare: cx.spawn_weak(move |this, mut cx| {
538 async move {
539 let mut status = client.status();
540 let is_connected =
541 status.next().await.map_or(false, |s| s.is_connected());
542 // Even if we're initially connected, any future change of the status means we momentarily disconnected.
543 if !is_connected || status.next().await.is_some() {
544 if let Some(this) = this.upgrade(&cx) {
545 this.update(&mut cx, |this, cx| this.removed_from_project(cx))
546 }
547 }
548 Ok(())
549 }
550 .log_err()
551 }),
552 },
553 language_servers: Default::default(),
554 started_language_servers: Default::default(),
555 language_server_settings: Default::default(),
556 language_server_statuses: response
557 .language_servers
558 .into_iter()
559 .map(|server| {
560 (
561 server.id as usize,
562 LanguageServerStatus {
563 name: server.name,
564 pending_work: Default::default(),
565 has_pending_diagnostic_updates: false,
566 progress_tokens: Default::default(),
567 },
568 )
569 })
570 .collect(),
571 last_workspace_edits_by_language_server: Default::default(),
572 next_language_server_id: 0,
573 opened_buffers: Default::default(),
574 buffer_snapshots: Default::default(),
575 nonce: StdRng::from_entropy().gen(),
576 initialized_persistent_state: false,
577 };
578 for worktree in worktrees {
579 this.add_worktree(&worktree, cx);
580 }
581 this
582 });
583
584 let user_ids = response
585 .collaborators
586 .iter()
587 .map(|peer| peer.user_id)
588 .collect();
589 user_store
590 .update(&mut cx, |user_store, cx| user_store.get_users(user_ids, cx))
591 .await?;
592 let mut collaborators = HashMap::default();
593 for message in response.collaborators {
594 let collaborator = Collaborator::from_proto(message, &user_store, &mut cx).await?;
595 collaborators.insert(collaborator.peer_id, collaborator);
596 }
597
598 this.update(&mut cx, |this, _| {
599 this.collaborators = collaborators;
600 });
601
602 Ok(this)
603 }
604
605 #[cfg(any(test, feature = "test-support"))]
606 pub async fn test(
607 fs: Arc<dyn Fs>,
608 root_paths: impl IntoIterator<Item = &Path>,
609 cx: &mut gpui::TestAppContext,
610 ) -> ModelHandle<Project> {
611 if !cx.read(|cx| cx.has_global::<Settings>()) {
612 cx.update(|cx| cx.set_global(Settings::test(cx)));
613 }
614
615 let languages = Arc::new(LanguageRegistry::test());
616 let http_client = client::test::FakeHttpClient::with_404_response();
617 let client = client::Client::new(http_client.clone());
618 let user_store = cx.add_model(|cx| UserStore::new(client.clone(), http_client, cx));
619 let project_store = cx.add_model(|_| ProjectStore::new(Db::open_fake()));
620 let project = cx.update(|cx| {
621 Project::local(true, client, user_store, project_store, languages, fs, cx)
622 });
623 for path in root_paths {
624 let (tree, _) = project
625 .update(cx, |project, cx| {
626 project.find_or_create_local_worktree(path, true, cx)
627 })
628 .await
629 .unwrap();
630 tree.read_with(cx, |tree, _| tree.as_local().unwrap().scan_complete())
631 .await;
632 }
633 project
634 }
635
636 pub fn restore_state(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
637 if self.is_remote() {
638 return Task::ready(Ok(()));
639 }
640
641 let db = self.project_store.read(cx).db.clone();
642 let keys = self.db_keys_for_online_state(cx);
643 let online_by_default = cx.global::<Settings>().projects_online_by_default;
644 let read_online = cx.background().spawn(async move {
645 let values = db.read(keys)?;
646 anyhow::Ok(
647 values
648 .into_iter()
649 .all(|e| e.map_or(online_by_default, |e| e == [true as u8])),
650 )
651 });
652 cx.spawn(|this, mut cx| async move {
653 let online = read_online.await.log_err().unwrap_or(false);
654 this.update(&mut cx, |this, cx| {
655 this.initialized_persistent_state = true;
656 if let ProjectClientState::Local { online_tx, .. } = &mut this.client_state {
657 let mut online_tx = online_tx.borrow_mut();
658 if *online_tx != online {
659 *online_tx = online;
660 drop(online_tx);
661 this.metadata_changed(false, cx);
662 }
663 }
664 });
665 Ok(())
666 })
667 }
668
669 fn persist_state(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
670 if self.is_remote() || !self.initialized_persistent_state {
671 return Task::ready(Ok(()));
672 }
673
674 let db = self.project_store.read(cx).db.clone();
675 let keys = self.db_keys_for_online_state(cx);
676 let is_online = self.is_online();
677 cx.background().spawn(async move {
678 let value = &[is_online as u8];
679 db.write(keys.into_iter().map(|key| (key, value)))
680 })
681 }
682
683 fn on_settings_changed(&mut self, cx: &mut ModelContext<Self>) {
684 let settings = cx.global::<Settings>();
685
686 let mut language_servers_to_start = Vec::new();
687 for buffer in self.opened_buffers.values() {
688 if let Some(buffer) = buffer.upgrade(cx) {
689 let buffer = buffer.read(cx);
690 if let Some((file, language)) = File::from_dyn(buffer.file()).zip(buffer.language())
691 {
692 if settings.enable_language_server(Some(&language.name())) {
693 let worktree = file.worktree.read(cx);
694 language_servers_to_start.push((
695 worktree.id(),
696 worktree.as_local().unwrap().abs_path().clone(),
697 language.clone(),
698 ));
699 }
700 }
701 }
702 }
703
704 let mut language_servers_to_stop = Vec::new();
705 for language in self.languages.to_vec() {
706 if let Some(lsp_adapter) = language.lsp_adapter() {
707 if !settings.enable_language_server(Some(&language.name())) {
708 let lsp_name = lsp_adapter.name();
709 for (worktree_id, started_lsp_name) in self.started_language_servers.keys() {
710 if lsp_name == *started_lsp_name {
711 language_servers_to_stop.push((*worktree_id, started_lsp_name.clone()));
712 }
713 }
714 }
715 }
716 }
717
718 // Stop all newly-disabled language servers.
719 for (worktree_id, adapter_name) in language_servers_to_stop {
720 self.stop_language_server(worktree_id, adapter_name, cx)
721 .detach();
722 }
723
724 // Start all the newly-enabled language servers.
725 for (worktree_id, worktree_path, language) in language_servers_to_start {
726 self.start_language_server(worktree_id, worktree_path, language, cx);
727 }
728
729 cx.notify();
730 }
731
732 pub fn buffer_for_id(&self, remote_id: u64, cx: &AppContext) -> Option<ModelHandle<Buffer>> {
733 self.opened_buffers
734 .get(&remote_id)
735 .and_then(|buffer| buffer.upgrade(cx))
736 }
737
738 pub fn languages(&self) -> &Arc<LanguageRegistry> {
739 &self.languages
740 }
741
742 pub fn client(&self) -> Arc<Client> {
743 self.client.clone()
744 }
745
746 pub fn user_store(&self) -> ModelHandle<UserStore> {
747 self.user_store.clone()
748 }
749
750 pub fn project_store(&self) -> ModelHandle<ProjectStore> {
751 self.project_store.clone()
752 }
753
754 #[cfg(any(test, feature = "test-support"))]
755 pub fn check_invariants(&self, cx: &AppContext) {
756 if self.is_local() {
757 let mut worktree_root_paths = HashMap::default();
758 for worktree in self.worktrees(cx) {
759 let worktree = worktree.read(cx);
760 let abs_path = worktree.as_local().unwrap().abs_path().clone();
761 let prev_worktree_id = worktree_root_paths.insert(abs_path.clone(), worktree.id());
762 assert_eq!(
763 prev_worktree_id,
764 None,
765 "abs path {:?} for worktree {:?} is not unique ({:?} was already registered with the same path)",
766 abs_path,
767 worktree.id(),
768 prev_worktree_id
769 )
770 }
771 } else {
772 let replica_id = self.replica_id();
773 for buffer in self.opened_buffers.values() {
774 if let Some(buffer) = buffer.upgrade(cx) {
775 let buffer = buffer.read(cx);
776 assert_eq!(
777 buffer.deferred_ops_len(),
778 0,
779 "replica {}, buffer {} has deferred operations",
780 replica_id,
781 buffer.remote_id()
782 );
783 }
784 }
785 }
786 }
787
788 #[cfg(any(test, feature = "test-support"))]
789 pub fn has_open_buffer(&self, path: impl Into<ProjectPath>, cx: &AppContext) -> bool {
790 let path = path.into();
791 if let Some(worktree) = self.worktree_for_id(path.worktree_id, cx) {
792 self.opened_buffers.iter().any(|(_, buffer)| {
793 if let Some(buffer) = buffer.upgrade(cx) {
794 if let Some(file) = File::from_dyn(buffer.read(cx).file()) {
795 if file.worktree == worktree && file.path() == &path.path {
796 return true;
797 }
798 }
799 }
800 false
801 })
802 } else {
803 false
804 }
805 }
806
807 pub fn fs(&self) -> &Arc<dyn Fs> {
808 &self.fs
809 }
810
811 pub fn set_online(&mut self, online: bool, cx: &mut ModelContext<Self>) {
812 if let ProjectClientState::Local { online_tx, .. } = &mut self.client_state {
813 let mut online_tx = online_tx.borrow_mut();
814 if *online_tx != online {
815 *online_tx = online;
816 drop(online_tx);
817 self.metadata_changed(true, cx);
818 }
819 }
820 }
821
822 pub fn is_online(&self) -> bool {
823 match &self.client_state {
824 ProjectClientState::Local { online_rx, .. } => *online_rx.borrow(),
825 ProjectClientState::Remote { .. } => true,
826 }
827 }
828
829 fn unregister(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
830 self.unshared(cx);
831 if let ProjectClientState::Local { remote_id_rx, .. } = &mut self.client_state {
832 if let Some(remote_id) = *remote_id_rx.borrow() {
833 let request = self.client.request(proto::UnregisterProject {
834 project_id: remote_id,
835 });
836 return cx.spawn(|this, mut cx| async move {
837 let response = request.await;
838
839 // Unregistering the project causes the server to send out a
840 // contact update removing this project from the host's list
841 // of online projects. Wait until this contact update has been
842 // processed before clearing out this project's remote id, so
843 // that there is no moment where this project appears in the
844 // contact metadata and *also* has no remote id.
845 this.update(&mut cx, |this, cx| {
846 this.user_store()
847 .update(cx, |store, _| store.contact_updates_done())
848 })
849 .await;
850
851 this.update(&mut cx, |this, cx| {
852 if let ProjectClientState::Local { remote_id_tx, .. } =
853 &mut this.client_state
854 {
855 *remote_id_tx.borrow_mut() = None;
856 }
857 this.client_subscriptions.clear();
858 this.metadata_changed(false, cx);
859 });
860 response.map(drop)
861 });
862 }
863 }
864 Task::ready(Ok(()))
865 }
866
867 fn register(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
868 if let ProjectClientState::Local {
869 remote_id_rx,
870 online_rx,
871 ..
872 } = &self.client_state
873 {
874 if remote_id_rx.borrow().is_some() {
875 return Task::ready(Ok(()));
876 }
877
878 let response = self.client.request(proto::RegisterProject {
879 online: *online_rx.borrow(),
880 });
881 cx.spawn(|this, mut cx| async move {
882 let remote_id = response.await?.project_id;
883 this.update(&mut cx, |this, cx| {
884 if let ProjectClientState::Local { remote_id_tx, .. } = &mut this.client_state {
885 *remote_id_tx.borrow_mut() = Some(remote_id);
886 }
887
888 this.metadata_changed(false, cx);
889 cx.emit(Event::RemoteIdChanged(Some(remote_id)));
890 this.client_subscriptions
891 .push(this.client.add_model_for_remote_entity(remote_id, cx));
892 Ok(())
893 })
894 })
895 } else {
896 Task::ready(Err(anyhow!("can't register a remote project")))
897 }
898 }
899
900 pub fn remote_id(&self) -> Option<u64> {
901 match &self.client_state {
902 ProjectClientState::Local { remote_id_rx, .. } => *remote_id_rx.borrow(),
903 ProjectClientState::Remote { remote_id, .. } => Some(*remote_id),
904 }
905 }
906
907 pub fn next_remote_id(&self) -> impl Future<Output = u64> {
908 let mut id = None;
909 let mut watch = None;
910 match &self.client_state {
911 ProjectClientState::Local { remote_id_rx, .. } => watch = Some(remote_id_rx.clone()),
912 ProjectClientState::Remote { remote_id, .. } => id = Some(*remote_id),
913 }
914
915 async move {
916 if let Some(id) = id {
917 return id;
918 }
919 let mut watch = watch.unwrap();
920 loop {
921 let id = *watch.borrow();
922 if let Some(id) = id {
923 return id;
924 }
925 watch.next().await;
926 }
927 }
928 }
929
930 pub fn shared_remote_id(&self) -> Option<u64> {
931 match &self.client_state {
932 ProjectClientState::Local {
933 remote_id_rx,
934 is_shared,
935 ..
936 } => {
937 if *is_shared {
938 *remote_id_rx.borrow()
939 } else {
940 None
941 }
942 }
943 ProjectClientState::Remote { remote_id, .. } => Some(*remote_id),
944 }
945 }
946
947 pub fn replica_id(&self) -> ReplicaId {
948 match &self.client_state {
949 ProjectClientState::Local { .. } => 0,
950 ProjectClientState::Remote { replica_id, .. } => *replica_id,
951 }
952 }
953
954 fn metadata_changed(&mut self, persist: bool, cx: &mut ModelContext<Self>) {
955 if let ProjectClientState::Local {
956 remote_id_rx,
957 online_rx,
958 ..
959 } = &self.client_state
960 {
961 // Broadcast worktrees only if the project is public.
962 let worktrees = if *online_rx.borrow() {
963 self.worktrees
964 .iter()
965 .filter_map(|worktree| {
966 worktree
967 .upgrade(&cx)
968 .map(|worktree| worktree.read(cx).as_local().unwrap().metadata_proto())
969 })
970 .collect()
971 } else {
972 Default::default()
973 };
974 if let Some(project_id) = *remote_id_rx.borrow() {
975 self.client
976 .send(proto::UpdateProject {
977 project_id,
978 worktrees,
979 online: *online_rx.borrow(),
980 })
981 .log_err();
982 }
983
984 self.project_store.update(cx, |_, cx| cx.notify());
985 if persist {
986 self.persist_state(cx).detach_and_log_err(cx);
987 }
988 cx.notify();
989 }
990 }
991
992 pub fn collaborators(&self) -> &HashMap<PeerId, Collaborator> {
993 &self.collaborators
994 }
995
996 pub fn worktrees<'a>(
997 &'a self,
998 cx: &'a AppContext,
999 ) -> impl 'a + Iterator<Item = ModelHandle<Worktree>> {
1000 self.worktrees
1001 .iter()
1002 .filter_map(move |worktree| worktree.upgrade(cx))
1003 }
1004
1005 pub fn visible_worktrees<'a>(
1006 &'a self,
1007 cx: &'a AppContext,
1008 ) -> impl 'a + Iterator<Item = ModelHandle<Worktree>> {
1009 self.worktrees.iter().filter_map(|worktree| {
1010 worktree.upgrade(cx).and_then(|worktree| {
1011 if worktree.read(cx).is_visible() {
1012 Some(worktree)
1013 } else {
1014 None
1015 }
1016 })
1017 })
1018 }
1019
1020 pub fn worktree_root_names<'a>(&'a self, cx: &'a AppContext) -> impl Iterator<Item = &'a str> {
1021 self.visible_worktrees(cx)
1022 .map(|tree| tree.read(cx).root_name())
1023 }
1024
1025 fn db_keys_for_online_state(&self, cx: &AppContext) -> Vec<String> {
1026 self.worktrees
1027 .iter()
1028 .filter_map(|worktree| {
1029 let worktree = worktree.upgrade(&cx)?.read(cx);
1030 if worktree.is_visible() {
1031 Some(format!(
1032 "project-path-online:{}",
1033 worktree.as_local().unwrap().abs_path().to_string_lossy()
1034 ))
1035 } else {
1036 None
1037 }
1038 })
1039 .collect::<Vec<_>>()
1040 }
1041
1042 pub fn worktree_for_id(
1043 &self,
1044 id: WorktreeId,
1045 cx: &AppContext,
1046 ) -> Option<ModelHandle<Worktree>> {
1047 self.worktrees(cx)
1048 .find(|worktree| worktree.read(cx).id() == id)
1049 }
1050
1051 pub fn worktree_for_entry(
1052 &self,
1053 entry_id: ProjectEntryId,
1054 cx: &AppContext,
1055 ) -> Option<ModelHandle<Worktree>> {
1056 self.worktrees(cx)
1057 .find(|worktree| worktree.read(cx).contains_entry(entry_id))
1058 }
1059
1060 pub fn worktree_id_for_entry(
1061 &self,
1062 entry_id: ProjectEntryId,
1063 cx: &AppContext,
1064 ) -> Option<WorktreeId> {
1065 self.worktree_for_entry(entry_id, cx)
1066 .map(|worktree| worktree.read(cx).id())
1067 }
1068
1069 pub fn contains_paths(&self, paths: &[PathBuf], cx: &AppContext) -> bool {
1070 paths.iter().all(|path| self.contains_path(&path, cx))
1071 }
1072
1073 pub fn contains_path(&self, path: &Path, cx: &AppContext) -> bool {
1074 for worktree in self.worktrees(cx) {
1075 let worktree = worktree.read(cx).as_local();
1076 if worktree.map_or(false, |w| w.contains_abs_path(path)) {
1077 return true;
1078 }
1079 }
1080 false
1081 }
1082
1083 pub fn create_entry(
1084 &mut self,
1085 project_path: impl Into<ProjectPath>,
1086 is_directory: bool,
1087 cx: &mut ModelContext<Self>,
1088 ) -> Option<Task<Result<Entry>>> {
1089 let project_path = project_path.into();
1090 let worktree = self.worktree_for_id(project_path.worktree_id, cx)?;
1091 if self.is_local() {
1092 Some(worktree.update(cx, |worktree, cx| {
1093 worktree
1094 .as_local_mut()
1095 .unwrap()
1096 .create_entry(project_path.path, is_directory, cx)
1097 }))
1098 } else {
1099 let client = self.client.clone();
1100 let project_id = self.remote_id().unwrap();
1101 Some(cx.spawn_weak(|_, mut cx| async move {
1102 let response = client
1103 .request(proto::CreateProjectEntry {
1104 worktree_id: project_path.worktree_id.to_proto(),
1105 project_id,
1106 path: project_path.path.as_os_str().as_bytes().to_vec(),
1107 is_directory,
1108 })
1109 .await?;
1110 let entry = response
1111 .entry
1112 .ok_or_else(|| anyhow!("missing entry in response"))?;
1113 worktree
1114 .update(&mut cx, |worktree, cx| {
1115 worktree.as_remote().unwrap().insert_entry(
1116 entry,
1117 response.worktree_scan_id as usize,
1118 cx,
1119 )
1120 })
1121 .await
1122 }))
1123 }
1124 }
1125
1126 pub fn copy_entry(
1127 &mut self,
1128 entry_id: ProjectEntryId,
1129 new_path: impl Into<Arc<Path>>,
1130 cx: &mut ModelContext<Self>,
1131 ) -> Option<Task<Result<Entry>>> {
1132 let worktree = self.worktree_for_entry(entry_id, cx)?;
1133 let new_path = new_path.into();
1134 if self.is_local() {
1135 worktree.update(cx, |worktree, cx| {
1136 worktree
1137 .as_local_mut()
1138 .unwrap()
1139 .copy_entry(entry_id, new_path, cx)
1140 })
1141 } else {
1142 let client = self.client.clone();
1143 let project_id = self.remote_id().unwrap();
1144
1145 Some(cx.spawn_weak(|_, mut cx| async move {
1146 let response = client
1147 .request(proto::CopyProjectEntry {
1148 project_id,
1149 entry_id: entry_id.to_proto(),
1150 new_path: new_path.as_os_str().as_bytes().to_vec(),
1151 })
1152 .await?;
1153 let entry = response
1154 .entry
1155 .ok_or_else(|| anyhow!("missing entry in response"))?;
1156 worktree
1157 .update(&mut cx, |worktree, cx| {
1158 worktree.as_remote().unwrap().insert_entry(
1159 entry,
1160 response.worktree_scan_id as usize,
1161 cx,
1162 )
1163 })
1164 .await
1165 }))
1166 }
1167 }
1168
1169 pub fn rename_entry(
1170 &mut self,
1171 entry_id: ProjectEntryId,
1172 new_path: impl Into<Arc<Path>>,
1173 cx: &mut ModelContext<Self>,
1174 ) -> Option<Task<Result<Entry>>> {
1175 let worktree = self.worktree_for_entry(entry_id, cx)?;
1176 let new_path = new_path.into();
1177 if self.is_local() {
1178 worktree.update(cx, |worktree, cx| {
1179 worktree
1180 .as_local_mut()
1181 .unwrap()
1182 .rename_entry(entry_id, new_path, cx)
1183 })
1184 } else {
1185 let client = self.client.clone();
1186 let project_id = self.remote_id().unwrap();
1187
1188 Some(cx.spawn_weak(|_, mut cx| async move {
1189 let response = client
1190 .request(proto::RenameProjectEntry {
1191 project_id,
1192 entry_id: entry_id.to_proto(),
1193 new_path: new_path.as_os_str().as_bytes().to_vec(),
1194 })
1195 .await?;
1196 let entry = response
1197 .entry
1198 .ok_or_else(|| anyhow!("missing entry in response"))?;
1199 worktree
1200 .update(&mut cx, |worktree, cx| {
1201 worktree.as_remote().unwrap().insert_entry(
1202 entry,
1203 response.worktree_scan_id as usize,
1204 cx,
1205 )
1206 })
1207 .await
1208 }))
1209 }
1210 }
1211
1212 pub fn delete_entry(
1213 &mut self,
1214 entry_id: ProjectEntryId,
1215 cx: &mut ModelContext<Self>,
1216 ) -> Option<Task<Result<()>>> {
1217 let worktree = self.worktree_for_entry(entry_id, cx)?;
1218 if self.is_local() {
1219 worktree.update(cx, |worktree, cx| {
1220 worktree.as_local_mut().unwrap().delete_entry(entry_id, cx)
1221 })
1222 } else {
1223 let client = self.client.clone();
1224 let project_id = self.remote_id().unwrap();
1225 Some(cx.spawn_weak(|_, mut cx| async move {
1226 let response = client
1227 .request(proto::DeleteProjectEntry {
1228 project_id,
1229 entry_id: entry_id.to_proto(),
1230 })
1231 .await?;
1232 worktree
1233 .update(&mut cx, move |worktree, cx| {
1234 worktree.as_remote().unwrap().delete_entry(
1235 entry_id,
1236 response.worktree_scan_id as usize,
1237 cx,
1238 )
1239 })
1240 .await
1241 }))
1242 }
1243 }
1244
1245 fn share(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
1246 if !self.is_online() {
1247 return Task::ready(Err(anyhow!("can't share an offline project")));
1248 }
1249
1250 let project_id;
1251 if let ProjectClientState::Local {
1252 remote_id_rx,
1253 is_shared,
1254 ..
1255 } = &mut self.client_state
1256 {
1257 if *is_shared {
1258 return Task::ready(Ok(()));
1259 }
1260 *is_shared = true;
1261 if let Some(id) = *remote_id_rx.borrow() {
1262 project_id = id;
1263 } else {
1264 return Task::ready(Err(anyhow!("project hasn't been registered")));
1265 }
1266 } else {
1267 return Task::ready(Err(anyhow!("can't share a remote project")));
1268 };
1269
1270 for open_buffer in self.opened_buffers.values_mut() {
1271 match open_buffer {
1272 OpenBuffer::Strong(_) => {}
1273 OpenBuffer::Weak(buffer) => {
1274 if let Some(buffer) = buffer.upgrade(cx) {
1275 *open_buffer = OpenBuffer::Strong(buffer);
1276 }
1277 }
1278 OpenBuffer::Loading(_) => unreachable!(),
1279 }
1280 }
1281
1282 for worktree_handle in self.worktrees.iter_mut() {
1283 match worktree_handle {
1284 WorktreeHandle::Strong(_) => {}
1285 WorktreeHandle::Weak(worktree) => {
1286 if let Some(worktree) = worktree.upgrade(cx) {
1287 *worktree_handle = WorktreeHandle::Strong(worktree);
1288 }
1289 }
1290 }
1291 }
1292
1293 let mut tasks = Vec::new();
1294 for worktree in self.worktrees(cx).collect::<Vec<_>>() {
1295 worktree.update(cx, |worktree, cx| {
1296 let worktree = worktree.as_local_mut().unwrap();
1297 tasks.push(worktree.share(project_id, cx));
1298 });
1299 }
1300
1301 for (server_id, status) in &self.language_server_statuses {
1302 self.client
1303 .send(proto::StartLanguageServer {
1304 project_id,
1305 server: Some(proto::LanguageServer {
1306 id: *server_id as u64,
1307 name: status.name.clone(),
1308 }),
1309 })
1310 .log_err();
1311 }
1312
1313 cx.spawn(|this, mut cx| async move {
1314 for task in tasks {
1315 task.await?;
1316 }
1317 this.update(&mut cx, |_, cx| cx.notify());
1318 Ok(())
1319 })
1320 }
1321
1322 fn unshared(&mut self, cx: &mut ModelContext<Self>) {
1323 if let ProjectClientState::Local { is_shared, .. } = &mut self.client_state {
1324 if !*is_shared {
1325 return;
1326 }
1327
1328 *is_shared = false;
1329 self.collaborators.clear();
1330 self.shared_buffers.clear();
1331 for worktree_handle in self.worktrees.iter_mut() {
1332 if let WorktreeHandle::Strong(worktree) = worktree_handle {
1333 let is_visible = worktree.update(cx, |worktree, _| {
1334 worktree.as_local_mut().unwrap().unshare();
1335 worktree.is_visible()
1336 });
1337 if !is_visible {
1338 *worktree_handle = WorktreeHandle::Weak(worktree.downgrade());
1339 }
1340 }
1341 }
1342
1343 for open_buffer in self.opened_buffers.values_mut() {
1344 match open_buffer {
1345 OpenBuffer::Strong(buffer) => {
1346 *open_buffer = OpenBuffer::Weak(buffer.downgrade());
1347 }
1348 _ => {}
1349 }
1350 }
1351
1352 cx.notify();
1353 } else {
1354 log::error!("attempted to unshare a remote project");
1355 }
1356 }
1357
1358 pub fn respond_to_join_request(
1359 &mut self,
1360 requester_id: u64,
1361 allow: bool,
1362 cx: &mut ModelContext<Self>,
1363 ) {
1364 if let Some(project_id) = self.remote_id() {
1365 let share = if self.is_online() && allow {
1366 Some(self.share(cx))
1367 } else {
1368 None
1369 };
1370 let client = self.client.clone();
1371 cx.foreground()
1372 .spawn(async move {
1373 if let Some(share) = share {
1374 share.await?;
1375 }
1376 client.send(proto::RespondToJoinProjectRequest {
1377 requester_id,
1378 project_id,
1379 allow,
1380 })
1381 })
1382 .detach_and_log_err(cx);
1383 }
1384 }
1385
1386 fn removed_from_project(&mut self, cx: &mut ModelContext<Self>) {
1387 if let ProjectClientState::Remote {
1388 sharing_has_stopped,
1389 ..
1390 } = &mut self.client_state
1391 {
1392 *sharing_has_stopped = true;
1393 self.collaborators.clear();
1394 for worktree in &self.worktrees {
1395 if let Some(worktree) = worktree.upgrade(cx) {
1396 worktree.update(cx, |worktree, _| {
1397 if let Some(worktree) = worktree.as_remote_mut() {
1398 worktree.disconnected_from_host();
1399 }
1400 });
1401 }
1402 }
1403 cx.notify();
1404 }
1405 }
1406
1407 pub fn is_read_only(&self) -> bool {
1408 match &self.client_state {
1409 ProjectClientState::Local { .. } => false,
1410 ProjectClientState::Remote {
1411 sharing_has_stopped,
1412 ..
1413 } => *sharing_has_stopped,
1414 }
1415 }
1416
1417 pub fn is_local(&self) -> bool {
1418 match &self.client_state {
1419 ProjectClientState::Local { .. } => true,
1420 ProjectClientState::Remote { .. } => false,
1421 }
1422 }
1423
1424 pub fn is_remote(&self) -> bool {
1425 !self.is_local()
1426 }
1427
1428 pub fn create_buffer(
1429 &mut self,
1430 text: &str,
1431 language: Option<Arc<Language>>,
1432 cx: &mut ModelContext<Self>,
1433 ) -> Result<ModelHandle<Buffer>> {
1434 if self.is_remote() {
1435 return Err(anyhow!("creating buffers as a guest is not supported yet"));
1436 }
1437
1438 let buffer = cx.add_model(|cx| {
1439 Buffer::new(self.replica_id(), text, cx)
1440 .with_language(language.unwrap_or(language::PLAIN_TEXT.clone()), cx)
1441 });
1442 self.register_buffer(&buffer, cx)?;
1443 Ok(buffer)
1444 }
1445
1446 pub fn open_path(
1447 &mut self,
1448 path: impl Into<ProjectPath>,
1449 cx: &mut ModelContext<Self>,
1450 ) -> Task<Result<(ProjectEntryId, AnyModelHandle)>> {
1451 let task = self.open_buffer(path, cx);
1452 cx.spawn_weak(|_, cx| async move {
1453 let buffer = task.await?;
1454 let project_entry_id = buffer
1455 .read_with(&cx, |buffer, cx| {
1456 File::from_dyn(buffer.file()).and_then(|file| file.project_entry_id(cx))
1457 })
1458 .ok_or_else(|| anyhow!("no project entry"))?;
1459 Ok((project_entry_id, buffer.into()))
1460 })
1461 }
1462
1463 pub fn open_local_buffer(
1464 &mut self,
1465 abs_path: impl AsRef<Path>,
1466 cx: &mut ModelContext<Self>,
1467 ) -> Task<Result<ModelHandle<Buffer>>> {
1468 if let Some((worktree, relative_path)) = self.find_local_worktree(abs_path.as_ref(), cx) {
1469 self.open_buffer((worktree.read(cx).id(), relative_path), cx)
1470 } else {
1471 Task::ready(Err(anyhow!("no such path")))
1472 }
1473 }
1474
1475 pub fn open_buffer(
1476 &mut self,
1477 path: impl Into<ProjectPath>,
1478 cx: &mut ModelContext<Self>,
1479 ) -> Task<Result<ModelHandle<Buffer>>> {
1480 let project_path = path.into();
1481 let worktree = if let Some(worktree) = self.worktree_for_id(project_path.worktree_id, cx) {
1482 worktree
1483 } else {
1484 return Task::ready(Err(anyhow!("no such worktree")));
1485 };
1486
1487 // If there is already a buffer for the given path, then return it.
1488 let existing_buffer = self.get_open_buffer(&project_path, cx);
1489 if let Some(existing_buffer) = existing_buffer {
1490 return Task::ready(Ok(existing_buffer));
1491 }
1492
1493 let mut loading_watch = match self.loading_buffers.entry(project_path.clone()) {
1494 // If the given path is already being loaded, then wait for that existing
1495 // task to complete and return the same buffer.
1496 hash_map::Entry::Occupied(e) => e.get().clone(),
1497
1498 // Otherwise, record the fact that this path is now being loaded.
1499 hash_map::Entry::Vacant(entry) => {
1500 let (mut tx, rx) = postage::watch::channel();
1501 entry.insert(rx.clone());
1502
1503 let load_buffer = if worktree.read(cx).is_local() {
1504 self.open_local_buffer_internal(&project_path.path, &worktree, cx)
1505 } else {
1506 self.open_remote_buffer_internal(&project_path.path, &worktree, cx)
1507 };
1508
1509 cx.spawn(move |this, mut cx| async move {
1510 let load_result = load_buffer.await;
1511 *tx.borrow_mut() = Some(this.update(&mut cx, |this, _| {
1512 // Record the fact that the buffer is no longer loading.
1513 this.loading_buffers.remove(&project_path);
1514 let buffer = load_result.map_err(Arc::new)?;
1515 Ok(buffer)
1516 }));
1517 })
1518 .detach();
1519 rx
1520 }
1521 };
1522
1523 cx.foreground().spawn(async move {
1524 loop {
1525 if let Some(result) = loading_watch.borrow().as_ref() {
1526 match result {
1527 Ok(buffer) => return Ok(buffer.clone()),
1528 Err(error) => return Err(anyhow!("{}", error)),
1529 }
1530 }
1531 loading_watch.next().await;
1532 }
1533 })
1534 }
1535
1536 fn open_local_buffer_internal(
1537 &mut self,
1538 path: &Arc<Path>,
1539 worktree: &ModelHandle<Worktree>,
1540 cx: &mut ModelContext<Self>,
1541 ) -> Task<Result<ModelHandle<Buffer>>> {
1542 let load_buffer = worktree.update(cx, |worktree, cx| {
1543 let worktree = worktree.as_local_mut().unwrap();
1544 worktree.load_buffer(path, cx)
1545 });
1546 cx.spawn(|this, mut cx| async move {
1547 let buffer = load_buffer.await?;
1548 this.update(&mut cx, |this, cx| this.register_buffer(&buffer, cx))?;
1549 Ok(buffer)
1550 })
1551 }
1552
1553 fn open_remote_buffer_internal(
1554 &mut self,
1555 path: &Arc<Path>,
1556 worktree: &ModelHandle<Worktree>,
1557 cx: &mut ModelContext<Self>,
1558 ) -> Task<Result<ModelHandle<Buffer>>> {
1559 let rpc = self.client.clone();
1560 let project_id = self.remote_id().unwrap();
1561 let remote_worktree_id = worktree.read(cx).id();
1562 let path = path.clone();
1563 let path_string = path.to_string_lossy().to_string();
1564 cx.spawn(|this, mut cx| async move {
1565 let response = rpc
1566 .request(proto::OpenBufferByPath {
1567 project_id,
1568 worktree_id: remote_worktree_id.to_proto(),
1569 path: path_string,
1570 })
1571 .await?;
1572 let buffer = response.buffer.ok_or_else(|| anyhow!("missing buffer"))?;
1573 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
1574 .await
1575 })
1576 }
1577
1578 fn open_local_buffer_via_lsp(
1579 &mut self,
1580 abs_path: lsp::Url,
1581 lsp_adapter: Arc<dyn LspAdapter>,
1582 lsp_server: Arc<LanguageServer>,
1583 cx: &mut ModelContext<Self>,
1584 ) -> Task<Result<ModelHandle<Buffer>>> {
1585 cx.spawn(|this, mut cx| async move {
1586 let abs_path = abs_path
1587 .to_file_path()
1588 .map_err(|_| anyhow!("can't convert URI to path"))?;
1589 let (worktree, relative_path) = if let Some(result) =
1590 this.read_with(&cx, |this, cx| this.find_local_worktree(&abs_path, cx))
1591 {
1592 result
1593 } else {
1594 let worktree = this
1595 .update(&mut cx, |this, cx| {
1596 this.create_local_worktree(&abs_path, false, cx)
1597 })
1598 .await?;
1599 this.update(&mut cx, |this, cx| {
1600 this.language_servers.insert(
1601 (worktree.read(cx).id(), lsp_adapter.name()),
1602 (lsp_adapter, lsp_server),
1603 );
1604 });
1605 (worktree, PathBuf::new())
1606 };
1607
1608 let project_path = ProjectPath {
1609 worktree_id: worktree.read_with(&cx, |worktree, _| worktree.id()),
1610 path: relative_path.into(),
1611 };
1612 this.update(&mut cx, |this, cx| this.open_buffer(project_path, cx))
1613 .await
1614 })
1615 }
1616
1617 pub fn open_buffer_by_id(
1618 &mut self,
1619 id: u64,
1620 cx: &mut ModelContext<Self>,
1621 ) -> Task<Result<ModelHandle<Buffer>>> {
1622 if let Some(buffer) = self.buffer_for_id(id, cx) {
1623 Task::ready(Ok(buffer))
1624 } else if self.is_local() {
1625 Task::ready(Err(anyhow!("buffer {} does not exist", id)))
1626 } else if let Some(project_id) = self.remote_id() {
1627 let request = self
1628 .client
1629 .request(proto::OpenBufferById { project_id, id });
1630 cx.spawn(|this, mut cx| async move {
1631 let buffer = request
1632 .await?
1633 .buffer
1634 .ok_or_else(|| anyhow!("invalid buffer"))?;
1635 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
1636 .await
1637 })
1638 } else {
1639 Task::ready(Err(anyhow!("cannot open buffer while disconnected")))
1640 }
1641 }
1642
1643 pub fn save_buffer_as(
1644 &mut self,
1645 buffer: ModelHandle<Buffer>,
1646 abs_path: PathBuf,
1647 cx: &mut ModelContext<Project>,
1648 ) -> Task<Result<()>> {
1649 let worktree_task = self.find_or_create_local_worktree(&abs_path, true, cx);
1650 let old_path =
1651 File::from_dyn(buffer.read(cx).file()).and_then(|f| Some(f.as_local()?.abs_path(cx)));
1652 cx.spawn(|this, mut cx| async move {
1653 if let Some(old_path) = old_path {
1654 this.update(&mut cx, |this, cx| {
1655 this.unregister_buffer_from_language_server(&buffer, old_path, cx);
1656 });
1657 }
1658 let (worktree, path) = worktree_task.await?;
1659 worktree
1660 .update(&mut cx, |worktree, cx| {
1661 worktree
1662 .as_local_mut()
1663 .unwrap()
1664 .save_buffer_as(buffer.clone(), path, cx)
1665 })
1666 .await?;
1667 this.update(&mut cx, |this, cx| {
1668 this.assign_language_to_buffer(&buffer, cx);
1669 this.register_buffer_with_language_server(&buffer, cx);
1670 });
1671 Ok(())
1672 })
1673 }
1674
1675 pub fn get_open_buffer(
1676 &mut self,
1677 path: &ProjectPath,
1678 cx: &mut ModelContext<Self>,
1679 ) -> Option<ModelHandle<Buffer>> {
1680 let worktree = self.worktree_for_id(path.worktree_id, cx)?;
1681 self.opened_buffers.values().find_map(|buffer| {
1682 let buffer = buffer.upgrade(cx)?;
1683 let file = File::from_dyn(buffer.read(cx).file())?;
1684 if file.worktree == worktree && file.path() == &path.path {
1685 Some(buffer)
1686 } else {
1687 None
1688 }
1689 })
1690 }
1691
1692 fn register_buffer(
1693 &mut self,
1694 buffer: &ModelHandle<Buffer>,
1695 cx: &mut ModelContext<Self>,
1696 ) -> Result<()> {
1697 let remote_id = buffer.read(cx).remote_id();
1698 let open_buffer = if self.is_remote() || self.is_shared() {
1699 OpenBuffer::Strong(buffer.clone())
1700 } else {
1701 OpenBuffer::Weak(buffer.downgrade())
1702 };
1703
1704 match self.opened_buffers.insert(remote_id, open_buffer) {
1705 None => {}
1706 Some(OpenBuffer::Loading(operations)) => {
1707 buffer.update(cx, |buffer, cx| buffer.apply_ops(operations, cx))?
1708 }
1709 Some(OpenBuffer::Weak(existing_handle)) => {
1710 if existing_handle.upgrade(cx).is_some() {
1711 Err(anyhow!(
1712 "already registered buffer with remote id {}",
1713 remote_id
1714 ))?
1715 }
1716 }
1717 Some(OpenBuffer::Strong(_)) => Err(anyhow!(
1718 "already registered buffer with remote id {}",
1719 remote_id
1720 ))?,
1721 }
1722 cx.subscribe(buffer, |this, buffer, event, cx| {
1723 this.on_buffer_event(buffer, event, cx);
1724 })
1725 .detach();
1726
1727 self.assign_language_to_buffer(buffer, cx);
1728 self.register_buffer_with_language_server(buffer, cx);
1729 cx.observe_release(buffer, |this, buffer, cx| {
1730 if let Some(file) = File::from_dyn(buffer.file()) {
1731 if file.is_local() {
1732 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
1733 if let Some((_, server)) = this.language_server_for_buffer(buffer, cx) {
1734 server
1735 .notify::<lsp::notification::DidCloseTextDocument>(
1736 lsp::DidCloseTextDocumentParams {
1737 text_document: lsp::TextDocumentIdentifier::new(uri.clone()),
1738 },
1739 )
1740 .log_err();
1741 }
1742 }
1743 }
1744 })
1745 .detach();
1746
1747 Ok(())
1748 }
1749
1750 fn register_buffer_with_language_server(
1751 &mut self,
1752 buffer_handle: &ModelHandle<Buffer>,
1753 cx: &mut ModelContext<Self>,
1754 ) {
1755 let buffer = buffer_handle.read(cx);
1756 let buffer_id = buffer.remote_id();
1757 if let Some(file) = File::from_dyn(buffer.file()) {
1758 if file.is_local() {
1759 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
1760 let initial_snapshot = buffer.text_snapshot();
1761
1762 let mut language_server = None;
1763 let mut language_id = None;
1764 if let Some(language) = buffer.language() {
1765 let worktree_id = file.worktree_id(cx);
1766 if let Some(adapter) = language.lsp_adapter() {
1767 language_id = adapter.id_for_language(language.name().as_ref());
1768 language_server = self
1769 .language_servers
1770 .get(&(worktree_id, adapter.name()))
1771 .cloned();
1772 }
1773 }
1774
1775 if let Some(local_worktree) = file.worktree.read(cx).as_local() {
1776 if let Some(diagnostics) = local_worktree.diagnostics_for_path(file.path()) {
1777 self.update_buffer_diagnostics(&buffer_handle, diagnostics, None, cx)
1778 .log_err();
1779 }
1780 }
1781
1782 if let Some((_, server)) = language_server {
1783 server
1784 .notify::<lsp::notification::DidOpenTextDocument>(
1785 lsp::DidOpenTextDocumentParams {
1786 text_document: lsp::TextDocumentItem::new(
1787 uri,
1788 language_id.unwrap_or_default(),
1789 0,
1790 initial_snapshot.text(),
1791 ),
1792 }
1793 .clone(),
1794 )
1795 .log_err();
1796 buffer_handle.update(cx, |buffer, cx| {
1797 buffer.set_completion_triggers(
1798 server
1799 .capabilities()
1800 .completion_provider
1801 .as_ref()
1802 .and_then(|provider| provider.trigger_characters.clone())
1803 .unwrap_or(Vec::new()),
1804 cx,
1805 )
1806 });
1807 self.buffer_snapshots
1808 .insert(buffer_id, vec![(0, initial_snapshot)]);
1809 }
1810 }
1811 }
1812 }
1813
1814 fn unregister_buffer_from_language_server(
1815 &mut self,
1816 buffer: &ModelHandle<Buffer>,
1817 old_path: PathBuf,
1818 cx: &mut ModelContext<Self>,
1819 ) {
1820 buffer.update(cx, |buffer, cx| {
1821 buffer.update_diagnostics(Default::default(), cx);
1822 self.buffer_snapshots.remove(&buffer.remote_id());
1823 if let Some((_, language_server)) = self.language_server_for_buffer(buffer, cx) {
1824 language_server
1825 .notify::<lsp::notification::DidCloseTextDocument>(
1826 lsp::DidCloseTextDocumentParams {
1827 text_document: lsp::TextDocumentIdentifier::new(
1828 lsp::Url::from_file_path(old_path).unwrap(),
1829 ),
1830 },
1831 )
1832 .log_err();
1833 }
1834 });
1835 }
1836
1837 fn on_buffer_event(
1838 &mut self,
1839 buffer: ModelHandle<Buffer>,
1840 event: &BufferEvent,
1841 cx: &mut ModelContext<Self>,
1842 ) -> Option<()> {
1843 match event {
1844 BufferEvent::Operation(operation) => {
1845 if let Some(project_id) = self.shared_remote_id() {
1846 let request = self.client.request(proto::UpdateBuffer {
1847 project_id,
1848 buffer_id: buffer.read(cx).remote_id(),
1849 operations: vec![language::proto::serialize_operation(&operation)],
1850 });
1851 cx.background().spawn(request).detach_and_log_err(cx);
1852 } else if let Some(project_id) = self.remote_id() {
1853 let _ = self
1854 .client
1855 .send(proto::RegisterProjectActivity { project_id });
1856 }
1857 }
1858 BufferEvent::Edited { .. } => {
1859 let (_, language_server) = self
1860 .language_server_for_buffer(buffer.read(cx), cx)?
1861 .clone();
1862 let buffer = buffer.read(cx);
1863 let file = File::from_dyn(buffer.file())?;
1864 let abs_path = file.as_local()?.abs_path(cx);
1865 let uri = lsp::Url::from_file_path(abs_path).unwrap();
1866 let buffer_snapshots = self.buffer_snapshots.get_mut(&buffer.remote_id())?;
1867 let (version, prev_snapshot) = buffer_snapshots.last()?;
1868 let next_snapshot = buffer.text_snapshot();
1869 let next_version = version + 1;
1870
1871 let content_changes = buffer
1872 .edits_since::<(PointUtf16, usize)>(prev_snapshot.version())
1873 .map(|edit| {
1874 let edit_start = edit.new.start.0;
1875 let edit_end = edit_start + (edit.old.end.0 - edit.old.start.0);
1876 let new_text = next_snapshot
1877 .text_for_range(edit.new.start.1..edit.new.end.1)
1878 .collect();
1879 lsp::TextDocumentContentChangeEvent {
1880 range: Some(lsp::Range::new(
1881 point_to_lsp(edit_start),
1882 point_to_lsp(edit_end),
1883 )),
1884 range_length: None,
1885 text: new_text,
1886 }
1887 })
1888 .collect();
1889
1890 buffer_snapshots.push((next_version, next_snapshot));
1891
1892 language_server
1893 .notify::<lsp::notification::DidChangeTextDocument>(
1894 lsp::DidChangeTextDocumentParams {
1895 text_document: lsp::VersionedTextDocumentIdentifier::new(
1896 uri,
1897 next_version,
1898 ),
1899 content_changes,
1900 },
1901 )
1902 .log_err();
1903 }
1904 BufferEvent::Saved => {
1905 let file = File::from_dyn(buffer.read(cx).file())?;
1906 let worktree_id = file.worktree_id(cx);
1907 let abs_path = file.as_local()?.abs_path(cx);
1908 let text_document = lsp::TextDocumentIdentifier {
1909 uri: lsp::Url::from_file_path(abs_path).unwrap(),
1910 };
1911
1912 for (_, server) in self.language_servers_for_worktree(worktree_id) {
1913 server
1914 .notify::<lsp::notification::DidSaveTextDocument>(
1915 lsp::DidSaveTextDocumentParams {
1916 text_document: text_document.clone(),
1917 text: None,
1918 },
1919 )
1920 .log_err();
1921 }
1922
1923 // After saving a buffer, simulate disk-based diagnostics being finished for languages
1924 // that don't support a disk-based progress token.
1925 let (lsp_adapter, language_server) =
1926 self.language_server_for_buffer(buffer.read(cx), cx)?;
1927 if lsp_adapter
1928 .disk_based_diagnostics_progress_token()
1929 .is_none()
1930 {
1931 let server_id = language_server.server_id();
1932 self.disk_based_diagnostics_finished(server_id, cx);
1933 self.broadcast_language_server_update(
1934 server_id,
1935 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(
1936 proto::LspDiskBasedDiagnosticsUpdated {},
1937 ),
1938 );
1939 }
1940 }
1941 _ => {}
1942 }
1943
1944 None
1945 }
1946
1947 fn language_servers_for_worktree(
1948 &self,
1949 worktree_id: WorktreeId,
1950 ) -> impl Iterator<Item = &(Arc<dyn LspAdapter>, Arc<LanguageServer>)> {
1951 self.language_servers.iter().filter_map(
1952 move |((language_server_worktree_id, _), server)| {
1953 if *language_server_worktree_id == worktree_id {
1954 Some(server)
1955 } else {
1956 None
1957 }
1958 },
1959 )
1960 }
1961
1962 fn assign_language_to_buffer(
1963 &mut self,
1964 buffer: &ModelHandle<Buffer>,
1965 cx: &mut ModelContext<Self>,
1966 ) -> Option<()> {
1967 // If the buffer has a language, set it and start the language server if we haven't already.
1968 let full_path = buffer.read(cx).file()?.full_path(cx);
1969 let language = self.languages.select_language(&full_path)?;
1970 buffer.update(cx, |buffer, cx| {
1971 buffer.set_language(Some(language.clone()), cx);
1972 });
1973
1974 let file = File::from_dyn(buffer.read(cx).file())?;
1975 let worktree = file.worktree.read(cx).as_local()?;
1976 let worktree_id = worktree.id();
1977 let worktree_abs_path = worktree.abs_path().clone();
1978 self.start_language_server(worktree_id, worktree_abs_path, language, cx);
1979
1980 None
1981 }
1982
1983 fn start_language_server(
1984 &mut self,
1985 worktree_id: WorktreeId,
1986 worktree_path: Arc<Path>,
1987 language: Arc<Language>,
1988 cx: &mut ModelContext<Self>,
1989 ) {
1990 if !cx
1991 .global::<Settings>()
1992 .enable_language_server(Some(&language.name()))
1993 {
1994 return;
1995 }
1996
1997 let adapter = if let Some(adapter) = language.lsp_adapter() {
1998 adapter
1999 } else {
2000 return;
2001 };
2002 let key = (worktree_id, adapter.name());
2003 self.started_language_servers
2004 .entry(key.clone())
2005 .or_insert_with(|| {
2006 let server_id = post_inc(&mut self.next_language_server_id);
2007 let language_server = self.languages.start_language_server(
2008 server_id,
2009 language.clone(),
2010 worktree_path,
2011 self.client.http_client(),
2012 cx,
2013 );
2014 cx.spawn_weak(|this, mut cx| async move {
2015 let language_server = language_server?.await.log_err()?;
2016 let language_server = language_server
2017 .initialize(adapter.initialization_options())
2018 .await
2019 .log_err()?;
2020 let this = this.upgrade(&cx)?;
2021 let disk_based_diagnostics_progress_token =
2022 adapter.disk_based_diagnostics_progress_token();
2023
2024 language_server
2025 .on_notification::<lsp::notification::PublishDiagnostics, _>({
2026 let this = this.downgrade();
2027 let adapter = adapter.clone();
2028 move |params, mut cx| {
2029 if let Some(this) = this.upgrade(&cx) {
2030 this.update(&mut cx, |this, cx| {
2031 this.on_lsp_diagnostics_published(
2032 server_id, params, &adapter, cx,
2033 );
2034 });
2035 }
2036 }
2037 })
2038 .detach();
2039
2040 language_server
2041 .on_request::<lsp::request::WorkspaceConfiguration, _, _>({
2042 let settings = this
2043 .read_with(&cx, |this, _| this.language_server_settings.clone());
2044 move |params, _| {
2045 let settings = settings.lock().clone();
2046 async move {
2047 Ok(params
2048 .items
2049 .into_iter()
2050 .map(|item| {
2051 if let Some(section) = &item.section {
2052 settings
2053 .get(section)
2054 .cloned()
2055 .unwrap_or(serde_json::Value::Null)
2056 } else {
2057 settings.clone()
2058 }
2059 })
2060 .collect())
2061 }
2062 }
2063 })
2064 .detach();
2065
2066 // Even though we don't have handling for these requests, respond to them to
2067 // avoid stalling any language server like `gopls` which waits for a response
2068 // to these requests when initializing.
2069 language_server
2070 .on_request::<lsp::request::WorkDoneProgressCreate, _, _>({
2071 let this = this.downgrade();
2072 move |params, mut cx| async move {
2073 if let Some(this) = this.upgrade(&cx) {
2074 this.update(&mut cx, |this, _| {
2075 if let Some(status) =
2076 this.language_server_statuses.get_mut(&server_id)
2077 {
2078 if let lsp::NumberOrString::String(token) = params.token
2079 {
2080 status.progress_tokens.insert(token);
2081 }
2082 }
2083 });
2084 }
2085 Ok(())
2086 }
2087 })
2088 .detach();
2089 language_server
2090 .on_request::<lsp::request::RegisterCapability, _, _>(|_, _| async {
2091 Ok(())
2092 })
2093 .detach();
2094
2095 language_server
2096 .on_request::<lsp::request::ApplyWorkspaceEdit, _, _>({
2097 let this = this.downgrade();
2098 let adapter = adapter.clone();
2099 let language_server = language_server.clone();
2100 move |params, cx| {
2101 Self::on_lsp_workspace_edit(
2102 this,
2103 params,
2104 server_id,
2105 adapter.clone(),
2106 language_server.clone(),
2107 cx,
2108 )
2109 }
2110 })
2111 .detach();
2112
2113 language_server
2114 .on_notification::<lsp::notification::Progress, _>({
2115 let this = this.downgrade();
2116 move |params, mut cx| {
2117 if let Some(this) = this.upgrade(&cx) {
2118 this.update(&mut cx, |this, cx| {
2119 this.on_lsp_progress(
2120 params,
2121 server_id,
2122 disk_based_diagnostics_progress_token,
2123 cx,
2124 );
2125 });
2126 }
2127 }
2128 })
2129 .detach();
2130
2131 this.update(&mut cx, |this, cx| {
2132 this.language_servers
2133 .insert(key.clone(), (adapter.clone(), language_server.clone()));
2134 this.language_server_statuses.insert(
2135 server_id,
2136 LanguageServerStatus {
2137 name: language_server.name().to_string(),
2138 pending_work: Default::default(),
2139 has_pending_diagnostic_updates: false,
2140 progress_tokens: Default::default(),
2141 },
2142 );
2143 language_server
2144 .notify::<lsp::notification::DidChangeConfiguration>(
2145 lsp::DidChangeConfigurationParams {
2146 settings: this.language_server_settings.lock().clone(),
2147 },
2148 )
2149 .ok();
2150
2151 if let Some(project_id) = this.shared_remote_id() {
2152 this.client
2153 .send(proto::StartLanguageServer {
2154 project_id,
2155 server: Some(proto::LanguageServer {
2156 id: server_id as u64,
2157 name: language_server.name().to_string(),
2158 }),
2159 })
2160 .log_err();
2161 }
2162
2163 // Tell the language server about every open buffer in the worktree that matches the language.
2164 for buffer in this.opened_buffers.values() {
2165 if let Some(buffer_handle) = buffer.upgrade(cx) {
2166 let buffer = buffer_handle.read(cx);
2167 let file = if let Some(file) = File::from_dyn(buffer.file()) {
2168 file
2169 } else {
2170 continue;
2171 };
2172 let language = if let Some(language) = buffer.language() {
2173 language
2174 } else {
2175 continue;
2176 };
2177 if file.worktree.read(cx).id() != key.0
2178 || language.lsp_adapter().map(|a| a.name())
2179 != Some(key.1.clone())
2180 {
2181 continue;
2182 }
2183
2184 let file = file.as_local()?;
2185 let versions = this
2186 .buffer_snapshots
2187 .entry(buffer.remote_id())
2188 .or_insert_with(|| vec![(0, buffer.text_snapshot())]);
2189 let (version, initial_snapshot) = versions.last().unwrap();
2190 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
2191 let language_id = adapter.id_for_language(language.name().as_ref());
2192 language_server
2193 .notify::<lsp::notification::DidOpenTextDocument>(
2194 lsp::DidOpenTextDocumentParams {
2195 text_document: lsp::TextDocumentItem::new(
2196 uri,
2197 language_id.unwrap_or_default(),
2198 *version,
2199 initial_snapshot.text(),
2200 ),
2201 },
2202 )
2203 .log_err()?;
2204 buffer_handle.update(cx, |buffer, cx| {
2205 buffer.set_completion_triggers(
2206 language_server
2207 .capabilities()
2208 .completion_provider
2209 .as_ref()
2210 .and_then(|provider| {
2211 provider.trigger_characters.clone()
2212 })
2213 .unwrap_or(Vec::new()),
2214 cx,
2215 )
2216 });
2217 }
2218 }
2219
2220 cx.notify();
2221 Some(())
2222 });
2223
2224 Some(language_server)
2225 })
2226 });
2227 }
2228
2229 fn stop_language_server(
2230 &mut self,
2231 worktree_id: WorktreeId,
2232 adapter_name: LanguageServerName,
2233 cx: &mut ModelContext<Self>,
2234 ) -> Task<()> {
2235 let key = (worktree_id, adapter_name);
2236 if let Some((_, language_server)) = self.language_servers.remove(&key) {
2237 self.language_server_statuses
2238 .remove(&language_server.server_id());
2239 cx.notify();
2240 }
2241
2242 if let Some(started_language_server) = self.started_language_servers.remove(&key) {
2243 cx.spawn_weak(|this, mut cx| async move {
2244 if let Some(language_server) = started_language_server.await {
2245 if let Some(shutdown) = language_server.shutdown() {
2246 shutdown.await;
2247 }
2248
2249 if let Some(this) = this.upgrade(&cx) {
2250 this.update(&mut cx, |this, cx| {
2251 this.language_server_statuses
2252 .remove(&language_server.server_id());
2253 cx.notify();
2254 });
2255 }
2256 }
2257 })
2258 } else {
2259 Task::ready(())
2260 }
2261 }
2262
2263 pub fn restart_language_servers_for_buffers(
2264 &mut self,
2265 buffers: impl IntoIterator<Item = ModelHandle<Buffer>>,
2266 cx: &mut ModelContext<Self>,
2267 ) -> Option<()> {
2268 let language_server_lookup_info: HashSet<(WorktreeId, Arc<Path>, PathBuf)> = buffers
2269 .into_iter()
2270 .filter_map(|buffer| {
2271 let file = File::from_dyn(buffer.read(cx).file())?;
2272 let worktree = file.worktree.read(cx).as_local()?;
2273 let worktree_id = worktree.id();
2274 let worktree_abs_path = worktree.abs_path().clone();
2275 let full_path = file.full_path(cx);
2276 Some((worktree_id, worktree_abs_path, full_path))
2277 })
2278 .collect();
2279 for (worktree_id, worktree_abs_path, full_path) in language_server_lookup_info {
2280 let language = self.languages.select_language(&full_path)?;
2281 self.restart_language_server(worktree_id, worktree_abs_path, language, cx);
2282 }
2283
2284 None
2285 }
2286
2287 fn restart_language_server(
2288 &mut self,
2289 worktree_id: WorktreeId,
2290 worktree_path: Arc<Path>,
2291 language: Arc<Language>,
2292 cx: &mut ModelContext<Self>,
2293 ) {
2294 let adapter = if let Some(adapter) = language.lsp_adapter() {
2295 adapter
2296 } else {
2297 return;
2298 };
2299
2300 let stop = self.stop_language_server(worktree_id, adapter.name(), cx);
2301 cx.spawn_weak(|this, mut cx| async move {
2302 stop.await;
2303 if let Some(this) = this.upgrade(&cx) {
2304 this.update(&mut cx, |this, cx| {
2305 this.start_language_server(worktree_id, worktree_path, language, cx);
2306 });
2307 }
2308 })
2309 .detach();
2310 }
2311
2312 fn on_lsp_diagnostics_published(
2313 &mut self,
2314 server_id: usize,
2315 mut params: lsp::PublishDiagnosticsParams,
2316 adapter: &Arc<dyn LspAdapter>,
2317 cx: &mut ModelContext<Self>,
2318 ) {
2319 adapter.process_diagnostics(&mut params);
2320 self.update_diagnostics(
2321 server_id,
2322 params,
2323 adapter.disk_based_diagnostic_sources(),
2324 cx,
2325 )
2326 .log_err();
2327 }
2328
2329 fn on_lsp_progress(
2330 &mut self,
2331 progress: lsp::ProgressParams,
2332 server_id: usize,
2333 disk_based_diagnostics_progress_token: Option<&str>,
2334 cx: &mut ModelContext<Self>,
2335 ) {
2336 let token = match progress.token {
2337 lsp::NumberOrString::String(token) => token,
2338 lsp::NumberOrString::Number(token) => {
2339 log::info!("skipping numeric progress token {}", token);
2340 return;
2341 }
2342 };
2343 let progress = match progress.value {
2344 lsp::ProgressParamsValue::WorkDone(value) => value,
2345 };
2346 let language_server_status =
2347 if let Some(status) = self.language_server_statuses.get_mut(&server_id) {
2348 status
2349 } else {
2350 return;
2351 };
2352
2353 if !language_server_status.progress_tokens.contains(&token) {
2354 return;
2355 }
2356
2357 match progress {
2358 lsp::WorkDoneProgress::Begin(report) => {
2359 if Some(token.as_str()) == disk_based_diagnostics_progress_token {
2360 language_server_status.has_pending_diagnostic_updates = true;
2361 self.disk_based_diagnostics_started(server_id, cx);
2362 self.broadcast_language_server_update(
2363 server_id,
2364 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(
2365 proto::LspDiskBasedDiagnosticsUpdating {},
2366 ),
2367 );
2368 } else {
2369 self.on_lsp_work_start(
2370 server_id,
2371 token.clone(),
2372 LanguageServerProgress {
2373 message: report.message.clone(),
2374 percentage: report.percentage.map(|p| p as usize),
2375 last_update_at: Instant::now(),
2376 },
2377 cx,
2378 );
2379 self.broadcast_language_server_update(
2380 server_id,
2381 proto::update_language_server::Variant::WorkStart(proto::LspWorkStart {
2382 token,
2383 message: report.message,
2384 percentage: report.percentage.map(|p| p as u32),
2385 }),
2386 );
2387 }
2388 }
2389 lsp::WorkDoneProgress::Report(report) => {
2390 if Some(token.as_str()) != disk_based_diagnostics_progress_token {
2391 self.on_lsp_work_progress(
2392 server_id,
2393 token.clone(),
2394 LanguageServerProgress {
2395 message: report.message.clone(),
2396 percentage: report.percentage.map(|p| p as usize),
2397 last_update_at: Instant::now(),
2398 },
2399 cx,
2400 );
2401 self.broadcast_language_server_update(
2402 server_id,
2403 proto::update_language_server::Variant::WorkProgress(
2404 proto::LspWorkProgress {
2405 token,
2406 message: report.message,
2407 percentage: report.percentage.map(|p| p as u32),
2408 },
2409 ),
2410 );
2411 }
2412 }
2413 lsp::WorkDoneProgress::End(_) => {
2414 language_server_status.progress_tokens.remove(&token);
2415
2416 if Some(token.as_str()) == disk_based_diagnostics_progress_token {
2417 language_server_status.has_pending_diagnostic_updates = false;
2418 self.disk_based_diagnostics_finished(server_id, cx);
2419 self.broadcast_language_server_update(
2420 server_id,
2421 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(
2422 proto::LspDiskBasedDiagnosticsUpdated {},
2423 ),
2424 );
2425 } else {
2426 self.on_lsp_work_end(server_id, token.clone(), cx);
2427 self.broadcast_language_server_update(
2428 server_id,
2429 proto::update_language_server::Variant::WorkEnd(proto::LspWorkEnd {
2430 token,
2431 }),
2432 );
2433 }
2434 }
2435 }
2436 }
2437
2438 fn on_lsp_work_start(
2439 &mut self,
2440 language_server_id: usize,
2441 token: String,
2442 progress: LanguageServerProgress,
2443 cx: &mut ModelContext<Self>,
2444 ) {
2445 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
2446 status.pending_work.insert(token, progress);
2447 cx.notify();
2448 }
2449 }
2450
2451 fn on_lsp_work_progress(
2452 &mut self,
2453 language_server_id: usize,
2454 token: String,
2455 progress: LanguageServerProgress,
2456 cx: &mut ModelContext<Self>,
2457 ) {
2458 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
2459 let entry = status
2460 .pending_work
2461 .entry(token)
2462 .or_insert(LanguageServerProgress {
2463 message: Default::default(),
2464 percentage: Default::default(),
2465 last_update_at: progress.last_update_at,
2466 });
2467 if progress.message.is_some() {
2468 entry.message = progress.message;
2469 }
2470 if progress.percentage.is_some() {
2471 entry.percentage = progress.percentage;
2472 }
2473 entry.last_update_at = progress.last_update_at;
2474 cx.notify();
2475 }
2476 }
2477
2478 fn on_lsp_work_end(
2479 &mut self,
2480 language_server_id: usize,
2481 token: String,
2482 cx: &mut ModelContext<Self>,
2483 ) {
2484 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
2485 status.pending_work.remove(&token);
2486 cx.notify();
2487 }
2488 }
2489
2490 async fn on_lsp_workspace_edit(
2491 this: WeakModelHandle<Self>,
2492 params: lsp::ApplyWorkspaceEditParams,
2493 server_id: usize,
2494 adapter: Arc<dyn LspAdapter>,
2495 language_server: Arc<LanguageServer>,
2496 mut cx: AsyncAppContext,
2497 ) -> Result<lsp::ApplyWorkspaceEditResponse> {
2498 let this = this
2499 .upgrade(&cx)
2500 .ok_or_else(|| anyhow!("project project closed"))?;
2501 let transaction = Self::deserialize_workspace_edit(
2502 this.clone(),
2503 params.edit,
2504 true,
2505 adapter.clone(),
2506 language_server.clone(),
2507 &mut cx,
2508 )
2509 .await
2510 .log_err();
2511 this.update(&mut cx, |this, _| {
2512 if let Some(transaction) = transaction {
2513 this.last_workspace_edits_by_language_server
2514 .insert(server_id, transaction);
2515 }
2516 });
2517 Ok(lsp::ApplyWorkspaceEditResponse {
2518 applied: true,
2519 failed_change: None,
2520 failure_reason: None,
2521 })
2522 }
2523
2524 fn broadcast_language_server_update(
2525 &self,
2526 language_server_id: usize,
2527 event: proto::update_language_server::Variant,
2528 ) {
2529 if let Some(project_id) = self.shared_remote_id() {
2530 self.client
2531 .send(proto::UpdateLanguageServer {
2532 project_id,
2533 language_server_id: language_server_id as u64,
2534 variant: Some(event),
2535 })
2536 .log_err();
2537 }
2538 }
2539
2540 pub fn set_language_server_settings(&mut self, settings: serde_json::Value) {
2541 for (_, server) in self.language_servers.values() {
2542 server
2543 .notify::<lsp::notification::DidChangeConfiguration>(
2544 lsp::DidChangeConfigurationParams {
2545 settings: settings.clone(),
2546 },
2547 )
2548 .ok();
2549 }
2550 *self.language_server_settings.lock() = settings;
2551 }
2552
2553 pub fn language_server_statuses(
2554 &self,
2555 ) -> impl DoubleEndedIterator<Item = &LanguageServerStatus> {
2556 self.language_server_statuses.values()
2557 }
2558
2559 pub fn update_diagnostics(
2560 &mut self,
2561 language_server_id: usize,
2562 params: lsp::PublishDiagnosticsParams,
2563 disk_based_sources: &[&str],
2564 cx: &mut ModelContext<Self>,
2565 ) -> Result<()> {
2566 let abs_path = params
2567 .uri
2568 .to_file_path()
2569 .map_err(|_| anyhow!("URI is not a file"))?;
2570 let mut diagnostics = Vec::default();
2571 let mut primary_diagnostic_group_ids = HashMap::default();
2572 let mut sources_by_group_id = HashMap::default();
2573 let mut supporting_diagnostics = HashMap::default();
2574 for diagnostic in ¶ms.diagnostics {
2575 let source = diagnostic.source.as_ref();
2576 let code = diagnostic.code.as_ref().map(|code| match code {
2577 lsp::NumberOrString::Number(code) => code.to_string(),
2578 lsp::NumberOrString::String(code) => code.clone(),
2579 });
2580 let range = range_from_lsp(diagnostic.range);
2581 let is_supporting = diagnostic
2582 .related_information
2583 .as_ref()
2584 .map_or(false, |infos| {
2585 infos.iter().any(|info| {
2586 primary_diagnostic_group_ids.contains_key(&(
2587 source,
2588 code.clone(),
2589 range_from_lsp(info.location.range),
2590 ))
2591 })
2592 });
2593
2594 let is_unnecessary = diagnostic.tags.as_ref().map_or(false, |tags| {
2595 tags.iter().any(|tag| *tag == DiagnosticTag::UNNECESSARY)
2596 });
2597
2598 if is_supporting {
2599 supporting_diagnostics.insert(
2600 (source, code.clone(), range),
2601 (diagnostic.severity, is_unnecessary),
2602 );
2603 } else {
2604 let group_id = post_inc(&mut self.next_diagnostic_group_id);
2605 let is_disk_based = source.map_or(false, |source| {
2606 disk_based_sources.contains(&source.as_str())
2607 });
2608
2609 sources_by_group_id.insert(group_id, source);
2610 primary_diagnostic_group_ids
2611 .insert((source, code.clone(), range.clone()), group_id);
2612
2613 diagnostics.push(DiagnosticEntry {
2614 range,
2615 diagnostic: Diagnostic {
2616 code: code.clone(),
2617 severity: diagnostic.severity.unwrap_or(DiagnosticSeverity::ERROR),
2618 message: diagnostic.message.clone(),
2619 group_id,
2620 is_primary: true,
2621 is_valid: true,
2622 is_disk_based,
2623 is_unnecessary,
2624 },
2625 });
2626 if let Some(infos) = &diagnostic.related_information {
2627 for info in infos {
2628 if info.location.uri == params.uri && !info.message.is_empty() {
2629 let range = range_from_lsp(info.location.range);
2630 diagnostics.push(DiagnosticEntry {
2631 range,
2632 diagnostic: Diagnostic {
2633 code: code.clone(),
2634 severity: DiagnosticSeverity::INFORMATION,
2635 message: info.message.clone(),
2636 group_id,
2637 is_primary: false,
2638 is_valid: true,
2639 is_disk_based,
2640 is_unnecessary: false,
2641 },
2642 });
2643 }
2644 }
2645 }
2646 }
2647 }
2648
2649 for entry in &mut diagnostics {
2650 let diagnostic = &mut entry.diagnostic;
2651 if !diagnostic.is_primary {
2652 let source = *sources_by_group_id.get(&diagnostic.group_id).unwrap();
2653 if let Some(&(severity, is_unnecessary)) = supporting_diagnostics.get(&(
2654 source,
2655 diagnostic.code.clone(),
2656 entry.range.clone(),
2657 )) {
2658 if let Some(severity) = severity {
2659 diagnostic.severity = severity;
2660 }
2661 diagnostic.is_unnecessary = is_unnecessary;
2662 }
2663 }
2664 }
2665
2666 self.update_diagnostic_entries(
2667 language_server_id,
2668 abs_path,
2669 params.version,
2670 diagnostics,
2671 cx,
2672 )?;
2673 Ok(())
2674 }
2675
2676 pub fn update_diagnostic_entries(
2677 &mut self,
2678 language_server_id: usize,
2679 abs_path: PathBuf,
2680 version: Option<i32>,
2681 diagnostics: Vec<DiagnosticEntry<PointUtf16>>,
2682 cx: &mut ModelContext<Project>,
2683 ) -> Result<(), anyhow::Error> {
2684 let (worktree, relative_path) = self
2685 .find_local_worktree(&abs_path, cx)
2686 .ok_or_else(|| anyhow!("no worktree found for diagnostics"))?;
2687
2688 let project_path = ProjectPath {
2689 worktree_id: worktree.read(cx).id(),
2690 path: relative_path.into(),
2691 };
2692 if let Some(buffer) = self.get_open_buffer(&project_path, cx) {
2693 self.update_buffer_diagnostics(&buffer, diagnostics.clone(), version, cx)?;
2694 }
2695
2696 let updated = worktree.update(cx, |worktree, cx| {
2697 worktree
2698 .as_local_mut()
2699 .ok_or_else(|| anyhow!("not a local worktree"))?
2700 .update_diagnostics(
2701 language_server_id,
2702 project_path.path.clone(),
2703 diagnostics,
2704 cx,
2705 )
2706 })?;
2707 if updated {
2708 cx.emit(Event::DiagnosticsUpdated {
2709 language_server_id,
2710 path: project_path,
2711 });
2712 }
2713 Ok(())
2714 }
2715
2716 fn update_buffer_diagnostics(
2717 &mut self,
2718 buffer: &ModelHandle<Buffer>,
2719 mut diagnostics: Vec<DiagnosticEntry<PointUtf16>>,
2720 version: Option<i32>,
2721 cx: &mut ModelContext<Self>,
2722 ) -> Result<()> {
2723 fn compare_diagnostics(a: &Diagnostic, b: &Diagnostic) -> Ordering {
2724 Ordering::Equal
2725 .then_with(|| b.is_primary.cmp(&a.is_primary))
2726 .then_with(|| a.is_disk_based.cmp(&b.is_disk_based))
2727 .then_with(|| a.severity.cmp(&b.severity))
2728 .then_with(|| a.message.cmp(&b.message))
2729 }
2730
2731 let snapshot = self.buffer_snapshot_for_lsp_version(buffer, version, cx)?;
2732
2733 diagnostics.sort_unstable_by(|a, b| {
2734 Ordering::Equal
2735 .then_with(|| a.range.start.cmp(&b.range.start))
2736 .then_with(|| b.range.end.cmp(&a.range.end))
2737 .then_with(|| compare_diagnostics(&a.diagnostic, &b.diagnostic))
2738 });
2739
2740 let mut sanitized_diagnostics = Vec::new();
2741 let edits_since_save = Patch::new(
2742 snapshot
2743 .edits_since::<PointUtf16>(buffer.read(cx).saved_version())
2744 .collect(),
2745 );
2746 for entry in diagnostics {
2747 let start;
2748 let end;
2749 if entry.diagnostic.is_disk_based {
2750 // Some diagnostics are based on files on disk instead of buffers'
2751 // current contents. Adjust these diagnostics' ranges to reflect
2752 // any unsaved edits.
2753 start = edits_since_save.old_to_new(entry.range.start);
2754 end = edits_since_save.old_to_new(entry.range.end);
2755 } else {
2756 start = entry.range.start;
2757 end = entry.range.end;
2758 }
2759
2760 let mut range = snapshot.clip_point_utf16(start, Bias::Left)
2761 ..snapshot.clip_point_utf16(end, Bias::Right);
2762
2763 // Expand empty ranges by one character
2764 if range.start == range.end {
2765 range.end.column += 1;
2766 range.end = snapshot.clip_point_utf16(range.end, Bias::Right);
2767 if range.start == range.end && range.end.column > 0 {
2768 range.start.column -= 1;
2769 range.start = snapshot.clip_point_utf16(range.start, Bias::Left);
2770 }
2771 }
2772
2773 sanitized_diagnostics.push(DiagnosticEntry {
2774 range,
2775 diagnostic: entry.diagnostic,
2776 });
2777 }
2778 drop(edits_since_save);
2779
2780 let set = DiagnosticSet::new(sanitized_diagnostics, &snapshot);
2781 buffer.update(cx, |buffer, cx| buffer.update_diagnostics(set, cx));
2782 Ok(())
2783 }
2784
2785 pub fn reload_buffers(
2786 &self,
2787 buffers: HashSet<ModelHandle<Buffer>>,
2788 push_to_history: bool,
2789 cx: &mut ModelContext<Self>,
2790 ) -> Task<Result<ProjectTransaction>> {
2791 let mut local_buffers = Vec::new();
2792 let mut remote_buffers = None;
2793 for buffer_handle in buffers {
2794 let buffer = buffer_handle.read(cx);
2795 if buffer.is_dirty() {
2796 if let Some(file) = File::from_dyn(buffer.file()) {
2797 if file.is_local() {
2798 local_buffers.push(buffer_handle);
2799 } else {
2800 remote_buffers.get_or_insert(Vec::new()).push(buffer_handle);
2801 }
2802 }
2803 }
2804 }
2805
2806 let remote_buffers = self.remote_id().zip(remote_buffers);
2807 let client = self.client.clone();
2808
2809 cx.spawn(|this, mut cx| async move {
2810 let mut project_transaction = ProjectTransaction::default();
2811
2812 if let Some((project_id, remote_buffers)) = remote_buffers {
2813 let response = client
2814 .request(proto::ReloadBuffers {
2815 project_id,
2816 buffer_ids: remote_buffers
2817 .iter()
2818 .map(|buffer| buffer.read_with(&cx, |buffer, _| buffer.remote_id()))
2819 .collect(),
2820 })
2821 .await?
2822 .transaction
2823 .ok_or_else(|| anyhow!("missing transaction"))?;
2824 project_transaction = this
2825 .update(&mut cx, |this, cx| {
2826 this.deserialize_project_transaction(response, push_to_history, cx)
2827 })
2828 .await?;
2829 }
2830
2831 for buffer in local_buffers {
2832 let transaction = buffer
2833 .update(&mut cx, |buffer, cx| buffer.reload(cx))
2834 .await?;
2835 buffer.update(&mut cx, |buffer, cx| {
2836 if let Some(transaction) = transaction {
2837 if !push_to_history {
2838 buffer.forget_transaction(transaction.id);
2839 }
2840 project_transaction.0.insert(cx.handle(), transaction);
2841 }
2842 });
2843 }
2844
2845 Ok(project_transaction)
2846 })
2847 }
2848
2849 pub fn format(
2850 &self,
2851 buffers: HashSet<ModelHandle<Buffer>>,
2852 push_to_history: bool,
2853 cx: &mut ModelContext<Project>,
2854 ) -> Task<Result<ProjectTransaction>> {
2855 let mut local_buffers = Vec::new();
2856 let mut remote_buffers = None;
2857 for buffer_handle in buffers {
2858 let buffer = buffer_handle.read(cx);
2859 if let Some(file) = File::from_dyn(buffer.file()) {
2860 if let Some(buffer_abs_path) = file.as_local().map(|f| f.abs_path(cx)) {
2861 if let Some((_, server)) = self.language_server_for_buffer(buffer, cx) {
2862 local_buffers.push((buffer_handle, buffer_abs_path, server.clone()));
2863 }
2864 } else {
2865 remote_buffers.get_or_insert(Vec::new()).push(buffer_handle);
2866 }
2867 } else {
2868 return Task::ready(Ok(Default::default()));
2869 }
2870 }
2871
2872 let remote_buffers = self.remote_id().zip(remote_buffers);
2873 let client = self.client.clone();
2874
2875 cx.spawn(|this, mut cx| async move {
2876 let mut project_transaction = ProjectTransaction::default();
2877
2878 if let Some((project_id, remote_buffers)) = remote_buffers {
2879 let response = client
2880 .request(proto::FormatBuffers {
2881 project_id,
2882 buffer_ids: remote_buffers
2883 .iter()
2884 .map(|buffer| buffer.read_with(&cx, |buffer, _| buffer.remote_id()))
2885 .collect(),
2886 })
2887 .await?
2888 .transaction
2889 .ok_or_else(|| anyhow!("missing transaction"))?;
2890 project_transaction = this
2891 .update(&mut cx, |this, cx| {
2892 this.deserialize_project_transaction(response, push_to_history, cx)
2893 })
2894 .await?;
2895 }
2896
2897 for (buffer, buffer_abs_path, language_server) in local_buffers {
2898 let text_document = lsp::TextDocumentIdentifier::new(
2899 lsp::Url::from_file_path(&buffer_abs_path).unwrap(),
2900 );
2901 let capabilities = &language_server.capabilities();
2902 let tab_size = cx.update(|cx| {
2903 let language_name = buffer.read(cx).language().map(|language| language.name());
2904 cx.global::<Settings>().tab_size(language_name.as_deref())
2905 });
2906 let lsp_edits = if capabilities
2907 .document_formatting_provider
2908 .as_ref()
2909 .map_or(false, |provider| *provider != lsp::OneOf::Left(false))
2910 {
2911 language_server
2912 .request::<lsp::request::Formatting>(lsp::DocumentFormattingParams {
2913 text_document,
2914 options: lsp::FormattingOptions {
2915 tab_size: tab_size.into(),
2916 insert_spaces: true,
2917 insert_final_newline: Some(true),
2918 ..Default::default()
2919 },
2920 work_done_progress_params: Default::default(),
2921 })
2922 .await?
2923 } else if capabilities
2924 .document_range_formatting_provider
2925 .as_ref()
2926 .map_or(false, |provider| *provider != lsp::OneOf::Left(false))
2927 {
2928 let buffer_start = lsp::Position::new(0, 0);
2929 let buffer_end =
2930 buffer.read_with(&cx, |buffer, _| point_to_lsp(buffer.max_point_utf16()));
2931 language_server
2932 .request::<lsp::request::RangeFormatting>(
2933 lsp::DocumentRangeFormattingParams {
2934 text_document,
2935 range: lsp::Range::new(buffer_start, buffer_end),
2936 options: lsp::FormattingOptions {
2937 tab_size: tab_size.into(),
2938 insert_spaces: true,
2939 insert_final_newline: Some(true),
2940 ..Default::default()
2941 },
2942 work_done_progress_params: Default::default(),
2943 },
2944 )
2945 .await?
2946 } else {
2947 continue;
2948 };
2949
2950 if let Some(lsp_edits) = lsp_edits {
2951 let edits = this
2952 .update(&mut cx, |this, cx| {
2953 this.edits_from_lsp(&buffer, lsp_edits, None, cx)
2954 })
2955 .await?;
2956 buffer.update(&mut cx, |buffer, cx| {
2957 buffer.finalize_last_transaction();
2958 buffer.start_transaction();
2959 for (range, text) in edits {
2960 buffer.edit([(range, text)], cx);
2961 }
2962 if buffer.end_transaction(cx).is_some() {
2963 let transaction = buffer.finalize_last_transaction().unwrap().clone();
2964 if !push_to_history {
2965 buffer.forget_transaction(transaction.id);
2966 }
2967 project_transaction.0.insert(cx.handle(), transaction);
2968 }
2969 });
2970 }
2971 }
2972
2973 Ok(project_transaction)
2974 })
2975 }
2976
2977 pub fn definition<T: ToPointUtf16>(
2978 &self,
2979 buffer: &ModelHandle<Buffer>,
2980 position: T,
2981 cx: &mut ModelContext<Self>,
2982 ) -> Task<Result<Vec<LocationLink>>> {
2983 let position = position.to_point_utf16(buffer.read(cx));
2984 self.request_lsp(buffer.clone(), GetDefinition { position }, cx)
2985 }
2986
2987 pub fn references<T: ToPointUtf16>(
2988 &self,
2989 buffer: &ModelHandle<Buffer>,
2990 position: T,
2991 cx: &mut ModelContext<Self>,
2992 ) -> Task<Result<Vec<Location>>> {
2993 let position = position.to_point_utf16(buffer.read(cx));
2994 self.request_lsp(buffer.clone(), GetReferences { position }, cx)
2995 }
2996
2997 pub fn document_highlights<T: ToPointUtf16>(
2998 &self,
2999 buffer: &ModelHandle<Buffer>,
3000 position: T,
3001 cx: &mut ModelContext<Self>,
3002 ) -> Task<Result<Vec<DocumentHighlight>>> {
3003 let position = position.to_point_utf16(buffer.read(cx));
3004
3005 self.request_lsp(buffer.clone(), GetDocumentHighlights { position }, cx)
3006 }
3007
3008 pub fn symbols(&self, query: &str, cx: &mut ModelContext<Self>) -> Task<Result<Vec<Symbol>>> {
3009 if self.is_local() {
3010 let mut requests = Vec::new();
3011 for ((worktree_id, _), (lsp_adapter, language_server)) in self.language_servers.iter() {
3012 let worktree_id = *worktree_id;
3013 if let Some(worktree) = self
3014 .worktree_for_id(worktree_id, cx)
3015 .and_then(|worktree| worktree.read(cx).as_local())
3016 {
3017 let lsp_adapter = lsp_adapter.clone();
3018 let worktree_abs_path = worktree.abs_path().clone();
3019 requests.push(
3020 language_server
3021 .request::<lsp::request::WorkspaceSymbol>(lsp::WorkspaceSymbolParams {
3022 query: query.to_string(),
3023 ..Default::default()
3024 })
3025 .log_err()
3026 .map(move |response| {
3027 (
3028 lsp_adapter,
3029 worktree_id,
3030 worktree_abs_path,
3031 response.unwrap_or_default(),
3032 )
3033 }),
3034 );
3035 }
3036 }
3037
3038 cx.spawn_weak(|this, cx| async move {
3039 let responses = futures::future::join_all(requests).await;
3040 let this = if let Some(this) = this.upgrade(&cx) {
3041 this
3042 } else {
3043 return Ok(Default::default());
3044 };
3045 this.read_with(&cx, |this, cx| {
3046 let mut symbols = Vec::new();
3047 for (adapter, source_worktree_id, worktree_abs_path, response) in responses {
3048 symbols.extend(response.into_iter().flatten().filter_map(|lsp_symbol| {
3049 let abs_path = lsp_symbol.location.uri.to_file_path().ok()?;
3050 let mut worktree_id = source_worktree_id;
3051 let path;
3052 if let Some((worktree, rel_path)) =
3053 this.find_local_worktree(&abs_path, cx)
3054 {
3055 worktree_id = worktree.read(cx).id();
3056 path = rel_path;
3057 } else {
3058 path = relativize_path(&worktree_abs_path, &abs_path);
3059 }
3060
3061 let label = this
3062 .languages
3063 .select_language(&path)
3064 .and_then(|language| {
3065 language.label_for_symbol(&lsp_symbol.name, lsp_symbol.kind)
3066 })
3067 .unwrap_or_else(|| CodeLabel::plain(lsp_symbol.name.clone(), None));
3068 let signature = this.symbol_signature(worktree_id, &path);
3069
3070 Some(Symbol {
3071 source_worktree_id,
3072 worktree_id,
3073 language_server_name: adapter.name(),
3074 name: lsp_symbol.name,
3075 kind: lsp_symbol.kind,
3076 label,
3077 path,
3078 range: range_from_lsp(lsp_symbol.location.range),
3079 signature,
3080 })
3081 }));
3082 }
3083 Ok(symbols)
3084 })
3085 })
3086 } else if let Some(project_id) = self.remote_id() {
3087 let request = self.client.request(proto::GetProjectSymbols {
3088 project_id,
3089 query: query.to_string(),
3090 });
3091 cx.spawn_weak(|this, cx| async move {
3092 let response = request.await?;
3093 let mut symbols = Vec::new();
3094 if let Some(this) = this.upgrade(&cx) {
3095 this.read_with(&cx, |this, _| {
3096 symbols.extend(
3097 response
3098 .symbols
3099 .into_iter()
3100 .filter_map(|symbol| this.deserialize_symbol(symbol).log_err()),
3101 );
3102 })
3103 }
3104 Ok(symbols)
3105 })
3106 } else {
3107 Task::ready(Ok(Default::default()))
3108 }
3109 }
3110
3111 pub fn open_buffer_for_symbol(
3112 &mut self,
3113 symbol: &Symbol,
3114 cx: &mut ModelContext<Self>,
3115 ) -> Task<Result<ModelHandle<Buffer>>> {
3116 if self.is_local() {
3117 let (lsp_adapter, language_server) = if let Some(server) = self.language_servers.get(&(
3118 symbol.source_worktree_id,
3119 symbol.language_server_name.clone(),
3120 )) {
3121 server.clone()
3122 } else {
3123 return Task::ready(Err(anyhow!(
3124 "language server for worktree and language not found"
3125 )));
3126 };
3127
3128 let worktree_abs_path = if let Some(worktree_abs_path) = self
3129 .worktree_for_id(symbol.worktree_id, cx)
3130 .and_then(|worktree| worktree.read(cx).as_local())
3131 .map(|local_worktree| local_worktree.abs_path())
3132 {
3133 worktree_abs_path
3134 } else {
3135 return Task::ready(Err(anyhow!("worktree not found for symbol")));
3136 };
3137 let symbol_abs_path = worktree_abs_path.join(&symbol.path);
3138 let symbol_uri = if let Ok(uri) = lsp::Url::from_file_path(symbol_abs_path) {
3139 uri
3140 } else {
3141 return Task::ready(Err(anyhow!("invalid symbol path")));
3142 };
3143
3144 self.open_local_buffer_via_lsp(symbol_uri, lsp_adapter, language_server, cx)
3145 } else if let Some(project_id) = self.remote_id() {
3146 let request = self.client.request(proto::OpenBufferForSymbol {
3147 project_id,
3148 symbol: Some(serialize_symbol(symbol)),
3149 });
3150 cx.spawn(|this, mut cx| async move {
3151 let response = request.await?;
3152 let buffer = response.buffer.ok_or_else(|| anyhow!("invalid buffer"))?;
3153 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
3154 .await
3155 })
3156 } else {
3157 Task::ready(Err(anyhow!("project does not have a remote id")))
3158 }
3159 }
3160
3161 pub fn hover<T: ToPointUtf16>(
3162 &self,
3163 buffer: &ModelHandle<Buffer>,
3164 position: T,
3165 cx: &mut ModelContext<Self>,
3166 ) -> Task<Result<Option<Hover>>> {
3167 let position = position.to_point_utf16(buffer.read(cx));
3168 self.request_lsp(buffer.clone(), GetHover { position }, cx)
3169 }
3170
3171 pub fn completions<T: ToPointUtf16>(
3172 &self,
3173 source_buffer_handle: &ModelHandle<Buffer>,
3174 position: T,
3175 cx: &mut ModelContext<Self>,
3176 ) -> Task<Result<Vec<Completion>>> {
3177 let source_buffer_handle = source_buffer_handle.clone();
3178 let source_buffer = source_buffer_handle.read(cx);
3179 let buffer_id = source_buffer.remote_id();
3180 let language = source_buffer.language().cloned();
3181 let worktree;
3182 let buffer_abs_path;
3183 if let Some(file) = File::from_dyn(source_buffer.file()) {
3184 worktree = file.worktree.clone();
3185 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
3186 } else {
3187 return Task::ready(Ok(Default::default()));
3188 };
3189
3190 let position = position.to_point_utf16(source_buffer);
3191 let anchor = source_buffer.anchor_after(position);
3192
3193 if worktree.read(cx).as_local().is_some() {
3194 let buffer_abs_path = buffer_abs_path.unwrap();
3195 let (_, lang_server) =
3196 if let Some(server) = self.language_server_for_buffer(source_buffer, cx) {
3197 server.clone()
3198 } else {
3199 return Task::ready(Ok(Default::default()));
3200 };
3201
3202 cx.spawn(|_, cx| async move {
3203 let completions = lang_server
3204 .request::<lsp::request::Completion>(lsp::CompletionParams {
3205 text_document_position: lsp::TextDocumentPositionParams::new(
3206 lsp::TextDocumentIdentifier::new(
3207 lsp::Url::from_file_path(buffer_abs_path).unwrap(),
3208 ),
3209 point_to_lsp(position),
3210 ),
3211 context: Default::default(),
3212 work_done_progress_params: Default::default(),
3213 partial_result_params: Default::default(),
3214 })
3215 .await
3216 .context("lsp completion request failed")?;
3217
3218 let completions = if let Some(completions) = completions {
3219 match completions {
3220 lsp::CompletionResponse::Array(completions) => completions,
3221 lsp::CompletionResponse::List(list) => list.items,
3222 }
3223 } else {
3224 Default::default()
3225 };
3226
3227 source_buffer_handle.read_with(&cx, |this, _| {
3228 let snapshot = this.snapshot();
3229 let clipped_position = this.clip_point_utf16(position, Bias::Left);
3230 let mut range_for_token = None;
3231 Ok(completions
3232 .into_iter()
3233 .filter_map(|lsp_completion| {
3234 // For now, we can only handle additional edits if they are returned
3235 // when resolving the completion, not if they are present initially.
3236 if lsp_completion
3237 .additional_text_edits
3238 .as_ref()
3239 .map_or(false, |edits| !edits.is_empty())
3240 {
3241 return None;
3242 }
3243
3244 let (old_range, new_text) = match lsp_completion.text_edit.as_ref() {
3245 // If the language server provides a range to overwrite, then
3246 // check that the range is valid.
3247 Some(lsp::CompletionTextEdit::Edit(edit)) => {
3248 let range = range_from_lsp(edit.range);
3249 let start = snapshot.clip_point_utf16(range.start, Bias::Left);
3250 let end = snapshot.clip_point_utf16(range.end, Bias::Left);
3251 if start != range.start || end != range.end {
3252 log::info!("completion out of expected range");
3253 return None;
3254 }
3255 (
3256 snapshot.anchor_before(start)..snapshot.anchor_after(end),
3257 edit.new_text.clone(),
3258 )
3259 }
3260 // If the language server does not provide a range, then infer
3261 // the range based on the syntax tree.
3262 None => {
3263 if position != clipped_position {
3264 log::info!("completion out of expected range");
3265 return None;
3266 }
3267 let Range { start, end } = range_for_token
3268 .get_or_insert_with(|| {
3269 let offset = position.to_offset(&snapshot);
3270 let (range, kind) = snapshot.surrounding_word(offset);
3271 if kind == Some(CharKind::Word) {
3272 range
3273 } else {
3274 offset..offset
3275 }
3276 })
3277 .clone();
3278 let text = lsp_completion
3279 .insert_text
3280 .as_ref()
3281 .unwrap_or(&lsp_completion.label)
3282 .clone();
3283 (
3284 snapshot.anchor_before(start)..snapshot.anchor_after(end),
3285 text.clone(),
3286 )
3287 }
3288 Some(lsp::CompletionTextEdit::InsertAndReplace(_)) => {
3289 log::info!("unsupported insert/replace completion");
3290 return None;
3291 }
3292 };
3293
3294 Some(Completion {
3295 old_range,
3296 new_text,
3297 label: language
3298 .as_ref()
3299 .and_then(|l| l.label_for_completion(&lsp_completion))
3300 .unwrap_or_else(|| {
3301 CodeLabel::plain(
3302 lsp_completion.label.clone(),
3303 lsp_completion.filter_text.as_deref(),
3304 )
3305 }),
3306 lsp_completion,
3307 })
3308 })
3309 .collect())
3310 })
3311 })
3312 } else if let Some(project_id) = self.remote_id() {
3313 let rpc = self.client.clone();
3314 let message = proto::GetCompletions {
3315 project_id,
3316 buffer_id,
3317 position: Some(language::proto::serialize_anchor(&anchor)),
3318 version: serialize_version(&source_buffer.version()),
3319 };
3320 cx.spawn_weak(|_, mut cx| async move {
3321 let response = rpc.request(message).await?;
3322
3323 source_buffer_handle
3324 .update(&mut cx, |buffer, _| {
3325 buffer.wait_for_version(deserialize_version(response.version))
3326 })
3327 .await;
3328
3329 response
3330 .completions
3331 .into_iter()
3332 .map(|completion| {
3333 language::proto::deserialize_completion(completion, language.as_ref())
3334 })
3335 .collect()
3336 })
3337 } else {
3338 Task::ready(Ok(Default::default()))
3339 }
3340 }
3341
3342 pub fn apply_additional_edits_for_completion(
3343 &self,
3344 buffer_handle: ModelHandle<Buffer>,
3345 completion: Completion,
3346 push_to_history: bool,
3347 cx: &mut ModelContext<Self>,
3348 ) -> Task<Result<Option<Transaction>>> {
3349 let buffer = buffer_handle.read(cx);
3350 let buffer_id = buffer.remote_id();
3351
3352 if self.is_local() {
3353 let (_, lang_server) = if let Some(server) = self.language_server_for_buffer(buffer, cx)
3354 {
3355 server.clone()
3356 } else {
3357 return Task::ready(Ok(Default::default()));
3358 };
3359
3360 cx.spawn(|this, mut cx| async move {
3361 let resolved_completion = lang_server
3362 .request::<lsp::request::ResolveCompletionItem>(completion.lsp_completion)
3363 .await?;
3364 if let Some(edits) = resolved_completion.additional_text_edits {
3365 let edits = this
3366 .update(&mut cx, |this, cx| {
3367 this.edits_from_lsp(&buffer_handle, edits, None, cx)
3368 })
3369 .await?;
3370 buffer_handle.update(&mut cx, |buffer, cx| {
3371 buffer.finalize_last_transaction();
3372 buffer.start_transaction();
3373 for (range, text) in edits {
3374 buffer.edit([(range, text)], cx);
3375 }
3376 let transaction = if buffer.end_transaction(cx).is_some() {
3377 let transaction = buffer.finalize_last_transaction().unwrap().clone();
3378 if !push_to_history {
3379 buffer.forget_transaction(transaction.id);
3380 }
3381 Some(transaction)
3382 } else {
3383 None
3384 };
3385 Ok(transaction)
3386 })
3387 } else {
3388 Ok(None)
3389 }
3390 })
3391 } else if let Some(project_id) = self.remote_id() {
3392 let client = self.client.clone();
3393 cx.spawn(|_, mut cx| async move {
3394 let response = client
3395 .request(proto::ApplyCompletionAdditionalEdits {
3396 project_id,
3397 buffer_id,
3398 completion: Some(language::proto::serialize_completion(&completion)),
3399 })
3400 .await?;
3401
3402 if let Some(transaction) = response.transaction {
3403 let transaction = language::proto::deserialize_transaction(transaction)?;
3404 buffer_handle
3405 .update(&mut cx, |buffer, _| {
3406 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
3407 })
3408 .await;
3409 if push_to_history {
3410 buffer_handle.update(&mut cx, |buffer, _| {
3411 buffer.push_transaction(transaction.clone(), Instant::now());
3412 });
3413 }
3414 Ok(Some(transaction))
3415 } else {
3416 Ok(None)
3417 }
3418 })
3419 } else {
3420 Task::ready(Err(anyhow!("project does not have a remote id")))
3421 }
3422 }
3423
3424 pub fn code_actions<T: Clone + ToOffset>(
3425 &self,
3426 buffer_handle: &ModelHandle<Buffer>,
3427 range: Range<T>,
3428 cx: &mut ModelContext<Self>,
3429 ) -> Task<Result<Vec<CodeAction>>> {
3430 let buffer_handle = buffer_handle.clone();
3431 let buffer = buffer_handle.read(cx);
3432 let snapshot = buffer.snapshot();
3433 let relevant_diagnostics = snapshot
3434 .diagnostics_in_range::<usize, usize>(range.to_offset(&snapshot), false)
3435 .map(|entry| entry.to_lsp_diagnostic_stub())
3436 .collect();
3437 let buffer_id = buffer.remote_id();
3438 let worktree;
3439 let buffer_abs_path;
3440 if let Some(file) = File::from_dyn(buffer.file()) {
3441 worktree = file.worktree.clone();
3442 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
3443 } else {
3444 return Task::ready(Ok(Default::default()));
3445 };
3446 let range = buffer.anchor_before(range.start)..buffer.anchor_before(range.end);
3447
3448 if worktree.read(cx).as_local().is_some() {
3449 let buffer_abs_path = buffer_abs_path.unwrap();
3450 let (_, lang_server) = if let Some(server) = self.language_server_for_buffer(buffer, cx)
3451 {
3452 server.clone()
3453 } else {
3454 return Task::ready(Ok(Default::default()));
3455 };
3456
3457 let lsp_range = range_to_lsp(range.to_point_utf16(buffer));
3458 cx.foreground().spawn(async move {
3459 if !lang_server.capabilities().code_action_provider.is_some() {
3460 return Ok(Default::default());
3461 }
3462
3463 Ok(lang_server
3464 .request::<lsp::request::CodeActionRequest>(lsp::CodeActionParams {
3465 text_document: lsp::TextDocumentIdentifier::new(
3466 lsp::Url::from_file_path(buffer_abs_path).unwrap(),
3467 ),
3468 range: lsp_range,
3469 work_done_progress_params: Default::default(),
3470 partial_result_params: Default::default(),
3471 context: lsp::CodeActionContext {
3472 diagnostics: relevant_diagnostics,
3473 only: Some(vec![
3474 lsp::CodeActionKind::QUICKFIX,
3475 lsp::CodeActionKind::REFACTOR,
3476 lsp::CodeActionKind::REFACTOR_EXTRACT,
3477 lsp::CodeActionKind::SOURCE,
3478 ]),
3479 },
3480 })
3481 .await?
3482 .unwrap_or_default()
3483 .into_iter()
3484 .filter_map(|entry| {
3485 if let lsp::CodeActionOrCommand::CodeAction(lsp_action) = entry {
3486 Some(CodeAction {
3487 range: range.clone(),
3488 lsp_action,
3489 })
3490 } else {
3491 None
3492 }
3493 })
3494 .collect())
3495 })
3496 } else if let Some(project_id) = self.remote_id() {
3497 let rpc = self.client.clone();
3498 let version = buffer.version();
3499 cx.spawn_weak(|_, mut cx| async move {
3500 let response = rpc
3501 .request(proto::GetCodeActions {
3502 project_id,
3503 buffer_id,
3504 start: Some(language::proto::serialize_anchor(&range.start)),
3505 end: Some(language::proto::serialize_anchor(&range.end)),
3506 version: serialize_version(&version),
3507 })
3508 .await?;
3509
3510 buffer_handle
3511 .update(&mut cx, |buffer, _| {
3512 buffer.wait_for_version(deserialize_version(response.version))
3513 })
3514 .await;
3515
3516 response
3517 .actions
3518 .into_iter()
3519 .map(language::proto::deserialize_code_action)
3520 .collect()
3521 })
3522 } else {
3523 Task::ready(Ok(Default::default()))
3524 }
3525 }
3526
3527 pub fn apply_code_action(
3528 &self,
3529 buffer_handle: ModelHandle<Buffer>,
3530 mut action: CodeAction,
3531 push_to_history: bool,
3532 cx: &mut ModelContext<Self>,
3533 ) -> Task<Result<ProjectTransaction>> {
3534 if self.is_local() {
3535 let buffer = buffer_handle.read(cx);
3536 let (lsp_adapter, lang_server) =
3537 if let Some(server) = self.language_server_for_buffer(buffer, cx) {
3538 server.clone()
3539 } else {
3540 return Task::ready(Ok(Default::default()));
3541 };
3542 let range = action.range.to_point_utf16(buffer);
3543
3544 cx.spawn(|this, mut cx| async move {
3545 if let Some(lsp_range) = action
3546 .lsp_action
3547 .data
3548 .as_mut()
3549 .and_then(|d| d.get_mut("codeActionParams"))
3550 .and_then(|d| d.get_mut("range"))
3551 {
3552 *lsp_range = serde_json::to_value(&range_to_lsp(range)).unwrap();
3553 action.lsp_action = lang_server
3554 .request::<lsp::request::CodeActionResolveRequest>(action.lsp_action)
3555 .await?;
3556 } else {
3557 let actions = this
3558 .update(&mut cx, |this, cx| {
3559 this.code_actions(&buffer_handle, action.range, cx)
3560 })
3561 .await?;
3562 action.lsp_action = actions
3563 .into_iter()
3564 .find(|a| a.lsp_action.title == action.lsp_action.title)
3565 .ok_or_else(|| anyhow!("code action is outdated"))?
3566 .lsp_action;
3567 }
3568
3569 if let Some(edit) = action.lsp_action.edit {
3570 Self::deserialize_workspace_edit(
3571 this,
3572 edit,
3573 push_to_history,
3574 lsp_adapter,
3575 lang_server,
3576 &mut cx,
3577 )
3578 .await
3579 } else if let Some(command) = action.lsp_action.command {
3580 this.update(&mut cx, |this, _| {
3581 this.last_workspace_edits_by_language_server
3582 .remove(&lang_server.server_id());
3583 });
3584 lang_server
3585 .request::<lsp::request::ExecuteCommand>(lsp::ExecuteCommandParams {
3586 command: command.command,
3587 arguments: command.arguments.unwrap_or_default(),
3588 ..Default::default()
3589 })
3590 .await?;
3591 Ok(this.update(&mut cx, |this, _| {
3592 this.last_workspace_edits_by_language_server
3593 .remove(&lang_server.server_id())
3594 .unwrap_or_default()
3595 }))
3596 } else {
3597 Ok(ProjectTransaction::default())
3598 }
3599 })
3600 } else if let Some(project_id) = self.remote_id() {
3601 let client = self.client.clone();
3602 let request = proto::ApplyCodeAction {
3603 project_id,
3604 buffer_id: buffer_handle.read(cx).remote_id(),
3605 action: Some(language::proto::serialize_code_action(&action)),
3606 };
3607 cx.spawn(|this, mut cx| async move {
3608 let response = client
3609 .request(request)
3610 .await?
3611 .transaction
3612 .ok_or_else(|| anyhow!("missing transaction"))?;
3613 this.update(&mut cx, |this, cx| {
3614 this.deserialize_project_transaction(response, push_to_history, cx)
3615 })
3616 .await
3617 })
3618 } else {
3619 Task::ready(Err(anyhow!("project does not have a remote id")))
3620 }
3621 }
3622
3623 async fn deserialize_workspace_edit(
3624 this: ModelHandle<Self>,
3625 edit: lsp::WorkspaceEdit,
3626 push_to_history: bool,
3627 lsp_adapter: Arc<dyn LspAdapter>,
3628 language_server: Arc<LanguageServer>,
3629 cx: &mut AsyncAppContext,
3630 ) -> Result<ProjectTransaction> {
3631 let fs = this.read_with(cx, |this, _| this.fs.clone());
3632 let mut operations = Vec::new();
3633 if let Some(document_changes) = edit.document_changes {
3634 match document_changes {
3635 lsp::DocumentChanges::Edits(edits) => {
3636 operations.extend(edits.into_iter().map(lsp::DocumentChangeOperation::Edit))
3637 }
3638 lsp::DocumentChanges::Operations(ops) => operations = ops,
3639 }
3640 } else if let Some(changes) = edit.changes {
3641 operations.extend(changes.into_iter().map(|(uri, edits)| {
3642 lsp::DocumentChangeOperation::Edit(lsp::TextDocumentEdit {
3643 text_document: lsp::OptionalVersionedTextDocumentIdentifier {
3644 uri,
3645 version: None,
3646 },
3647 edits: edits.into_iter().map(lsp::OneOf::Left).collect(),
3648 })
3649 }));
3650 }
3651
3652 let mut project_transaction = ProjectTransaction::default();
3653 for operation in operations {
3654 match operation {
3655 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Create(op)) => {
3656 let abs_path = op
3657 .uri
3658 .to_file_path()
3659 .map_err(|_| anyhow!("can't convert URI to path"))?;
3660
3661 if let Some(parent_path) = abs_path.parent() {
3662 fs.create_dir(parent_path).await?;
3663 }
3664 if abs_path.ends_with("/") {
3665 fs.create_dir(&abs_path).await?;
3666 } else {
3667 fs.create_file(&abs_path, op.options.map(Into::into).unwrap_or_default())
3668 .await?;
3669 }
3670 }
3671 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Rename(op)) => {
3672 let source_abs_path = op
3673 .old_uri
3674 .to_file_path()
3675 .map_err(|_| anyhow!("can't convert URI to path"))?;
3676 let target_abs_path = op
3677 .new_uri
3678 .to_file_path()
3679 .map_err(|_| anyhow!("can't convert URI to path"))?;
3680 fs.rename(
3681 &source_abs_path,
3682 &target_abs_path,
3683 op.options.map(Into::into).unwrap_or_default(),
3684 )
3685 .await?;
3686 }
3687 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Delete(op)) => {
3688 let abs_path = op
3689 .uri
3690 .to_file_path()
3691 .map_err(|_| anyhow!("can't convert URI to path"))?;
3692 let options = op.options.map(Into::into).unwrap_or_default();
3693 if abs_path.ends_with("/") {
3694 fs.remove_dir(&abs_path, options).await?;
3695 } else {
3696 fs.remove_file(&abs_path, options).await?;
3697 }
3698 }
3699 lsp::DocumentChangeOperation::Edit(op) => {
3700 let buffer_to_edit = this
3701 .update(cx, |this, cx| {
3702 this.open_local_buffer_via_lsp(
3703 op.text_document.uri,
3704 lsp_adapter.clone(),
3705 language_server.clone(),
3706 cx,
3707 )
3708 })
3709 .await?;
3710
3711 let edits = this
3712 .update(cx, |this, cx| {
3713 let edits = op.edits.into_iter().map(|edit| match edit {
3714 lsp::OneOf::Left(edit) => edit,
3715 lsp::OneOf::Right(edit) => edit.text_edit,
3716 });
3717 this.edits_from_lsp(
3718 &buffer_to_edit,
3719 edits,
3720 op.text_document.version,
3721 cx,
3722 )
3723 })
3724 .await?;
3725
3726 let transaction = buffer_to_edit.update(cx, |buffer, cx| {
3727 buffer.finalize_last_transaction();
3728 buffer.start_transaction();
3729 for (range, text) in edits {
3730 buffer.edit([(range, text)], cx);
3731 }
3732 let transaction = if buffer.end_transaction(cx).is_some() {
3733 let transaction = buffer.finalize_last_transaction().unwrap().clone();
3734 if !push_to_history {
3735 buffer.forget_transaction(transaction.id);
3736 }
3737 Some(transaction)
3738 } else {
3739 None
3740 };
3741
3742 transaction
3743 });
3744 if let Some(transaction) = transaction {
3745 project_transaction.0.insert(buffer_to_edit, transaction);
3746 }
3747 }
3748 }
3749 }
3750
3751 Ok(project_transaction)
3752 }
3753
3754 pub fn prepare_rename<T: ToPointUtf16>(
3755 &self,
3756 buffer: ModelHandle<Buffer>,
3757 position: T,
3758 cx: &mut ModelContext<Self>,
3759 ) -> Task<Result<Option<Range<Anchor>>>> {
3760 let position = position.to_point_utf16(buffer.read(cx));
3761 self.request_lsp(buffer, PrepareRename { position }, cx)
3762 }
3763
3764 pub fn perform_rename<T: ToPointUtf16>(
3765 &self,
3766 buffer: ModelHandle<Buffer>,
3767 position: T,
3768 new_name: String,
3769 push_to_history: bool,
3770 cx: &mut ModelContext<Self>,
3771 ) -> Task<Result<ProjectTransaction>> {
3772 let position = position.to_point_utf16(buffer.read(cx));
3773 self.request_lsp(
3774 buffer,
3775 PerformRename {
3776 position,
3777 new_name,
3778 push_to_history,
3779 },
3780 cx,
3781 )
3782 }
3783
3784 pub fn search(
3785 &self,
3786 query: SearchQuery,
3787 cx: &mut ModelContext<Self>,
3788 ) -> Task<Result<HashMap<ModelHandle<Buffer>, Vec<Range<Anchor>>>>> {
3789 if self.is_local() {
3790 let snapshots = self
3791 .visible_worktrees(cx)
3792 .filter_map(|tree| {
3793 let tree = tree.read(cx).as_local()?;
3794 Some(tree.snapshot())
3795 })
3796 .collect::<Vec<_>>();
3797
3798 let background = cx.background().clone();
3799 let path_count: usize = snapshots.iter().map(|s| s.visible_file_count()).sum();
3800 if path_count == 0 {
3801 return Task::ready(Ok(Default::default()));
3802 }
3803 let workers = background.num_cpus().min(path_count);
3804 let (matching_paths_tx, mut matching_paths_rx) = smol::channel::bounded(1024);
3805 cx.background()
3806 .spawn({
3807 let fs = self.fs.clone();
3808 let background = cx.background().clone();
3809 let query = query.clone();
3810 async move {
3811 let fs = &fs;
3812 let query = &query;
3813 let matching_paths_tx = &matching_paths_tx;
3814 let paths_per_worker = (path_count + workers - 1) / workers;
3815 let snapshots = &snapshots;
3816 background
3817 .scoped(|scope| {
3818 for worker_ix in 0..workers {
3819 let worker_start_ix = worker_ix * paths_per_worker;
3820 let worker_end_ix = worker_start_ix + paths_per_worker;
3821 scope.spawn(async move {
3822 let mut snapshot_start_ix = 0;
3823 let mut abs_path = PathBuf::new();
3824 for snapshot in snapshots {
3825 let snapshot_end_ix =
3826 snapshot_start_ix + snapshot.visible_file_count();
3827 if worker_end_ix <= snapshot_start_ix {
3828 break;
3829 } else if worker_start_ix > snapshot_end_ix {
3830 snapshot_start_ix = snapshot_end_ix;
3831 continue;
3832 } else {
3833 let start_in_snapshot = worker_start_ix
3834 .saturating_sub(snapshot_start_ix);
3835 let end_in_snapshot =
3836 cmp::min(worker_end_ix, snapshot_end_ix)
3837 - snapshot_start_ix;
3838
3839 for entry in snapshot
3840 .files(false, start_in_snapshot)
3841 .take(end_in_snapshot - start_in_snapshot)
3842 {
3843 if matching_paths_tx.is_closed() {
3844 break;
3845 }
3846
3847 abs_path.clear();
3848 abs_path.push(&snapshot.abs_path());
3849 abs_path.push(&entry.path);
3850 let matches = if let Some(file) =
3851 fs.open_sync(&abs_path).await.log_err()
3852 {
3853 query.detect(file).unwrap_or(false)
3854 } else {
3855 false
3856 };
3857
3858 if matches {
3859 let project_path =
3860 (snapshot.id(), entry.path.clone());
3861 if matching_paths_tx
3862 .send(project_path)
3863 .await
3864 .is_err()
3865 {
3866 break;
3867 }
3868 }
3869 }
3870
3871 snapshot_start_ix = snapshot_end_ix;
3872 }
3873 }
3874 });
3875 }
3876 })
3877 .await;
3878 }
3879 })
3880 .detach();
3881
3882 let (buffers_tx, buffers_rx) = smol::channel::bounded(1024);
3883 let open_buffers = self
3884 .opened_buffers
3885 .values()
3886 .filter_map(|b| b.upgrade(cx))
3887 .collect::<HashSet<_>>();
3888 cx.spawn(|this, cx| async move {
3889 for buffer in &open_buffers {
3890 let snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
3891 buffers_tx.send((buffer.clone(), snapshot)).await?;
3892 }
3893
3894 let open_buffers = Rc::new(RefCell::new(open_buffers));
3895 while let Some(project_path) = matching_paths_rx.next().await {
3896 if buffers_tx.is_closed() {
3897 break;
3898 }
3899
3900 let this = this.clone();
3901 let open_buffers = open_buffers.clone();
3902 let buffers_tx = buffers_tx.clone();
3903 cx.spawn(|mut cx| async move {
3904 if let Some(buffer) = this
3905 .update(&mut cx, |this, cx| this.open_buffer(project_path, cx))
3906 .await
3907 .log_err()
3908 {
3909 if open_buffers.borrow_mut().insert(buffer.clone()) {
3910 let snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
3911 buffers_tx.send((buffer, snapshot)).await?;
3912 }
3913 }
3914
3915 Ok::<_, anyhow::Error>(())
3916 })
3917 .detach();
3918 }
3919
3920 Ok::<_, anyhow::Error>(())
3921 })
3922 .detach_and_log_err(cx);
3923
3924 let background = cx.background().clone();
3925 cx.background().spawn(async move {
3926 let query = &query;
3927 let mut matched_buffers = Vec::new();
3928 for _ in 0..workers {
3929 matched_buffers.push(HashMap::default());
3930 }
3931 background
3932 .scoped(|scope| {
3933 for worker_matched_buffers in matched_buffers.iter_mut() {
3934 let mut buffers_rx = buffers_rx.clone();
3935 scope.spawn(async move {
3936 while let Some((buffer, snapshot)) = buffers_rx.next().await {
3937 let buffer_matches = query
3938 .search(snapshot.as_rope())
3939 .await
3940 .iter()
3941 .map(|range| {
3942 snapshot.anchor_before(range.start)
3943 ..snapshot.anchor_after(range.end)
3944 })
3945 .collect::<Vec<_>>();
3946 if !buffer_matches.is_empty() {
3947 worker_matched_buffers
3948 .insert(buffer.clone(), buffer_matches);
3949 }
3950 }
3951 });
3952 }
3953 })
3954 .await;
3955 Ok(matched_buffers.into_iter().flatten().collect())
3956 })
3957 } else if let Some(project_id) = self.remote_id() {
3958 let request = self.client.request(query.to_proto(project_id));
3959 cx.spawn(|this, mut cx| async move {
3960 let response = request.await?;
3961 let mut result = HashMap::default();
3962 for location in response.locations {
3963 let buffer = location.buffer.ok_or_else(|| anyhow!("missing buffer"))?;
3964 let target_buffer = this
3965 .update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
3966 .await?;
3967 let start = location
3968 .start
3969 .and_then(deserialize_anchor)
3970 .ok_or_else(|| anyhow!("missing target start"))?;
3971 let end = location
3972 .end
3973 .and_then(deserialize_anchor)
3974 .ok_or_else(|| anyhow!("missing target end"))?;
3975 result
3976 .entry(target_buffer)
3977 .or_insert(Vec::new())
3978 .push(start..end)
3979 }
3980 Ok(result)
3981 })
3982 } else {
3983 Task::ready(Ok(Default::default()))
3984 }
3985 }
3986
3987 fn request_lsp<R: LspCommand>(
3988 &self,
3989 buffer_handle: ModelHandle<Buffer>,
3990 request: R,
3991 cx: &mut ModelContext<Self>,
3992 ) -> Task<Result<R::Response>>
3993 where
3994 <R::LspRequest as lsp::request::Request>::Result: Send,
3995 {
3996 let buffer = buffer_handle.read(cx);
3997 if self.is_local() {
3998 let file = File::from_dyn(buffer.file()).and_then(File::as_local);
3999 if let Some((file, (_, language_server))) =
4000 file.zip(self.language_server_for_buffer(buffer, cx).cloned())
4001 {
4002 let lsp_params = request.to_lsp(&file.abs_path(cx), cx);
4003 return cx.spawn(|this, cx| async move {
4004 if !request.check_capabilities(&language_server.capabilities()) {
4005 return Ok(Default::default());
4006 }
4007
4008 let response = language_server
4009 .request::<R::LspRequest>(lsp_params)
4010 .await
4011 .context("lsp request failed")?;
4012 request
4013 .response_from_lsp(response, this, buffer_handle, cx)
4014 .await
4015 });
4016 }
4017 } else if let Some(project_id) = self.remote_id() {
4018 let rpc = self.client.clone();
4019 let message = request.to_proto(project_id, buffer);
4020 return cx.spawn(|this, cx| async move {
4021 let response = rpc.request(message).await?;
4022 request
4023 .response_from_proto(response, this, buffer_handle, cx)
4024 .await
4025 });
4026 }
4027 Task::ready(Ok(Default::default()))
4028 }
4029
4030 pub fn find_or_create_local_worktree(
4031 &mut self,
4032 abs_path: impl AsRef<Path>,
4033 visible: bool,
4034 cx: &mut ModelContext<Self>,
4035 ) -> Task<Result<(ModelHandle<Worktree>, PathBuf)>> {
4036 let abs_path = abs_path.as_ref();
4037 if let Some((tree, relative_path)) = self.find_local_worktree(abs_path, cx) {
4038 Task::ready(Ok((tree.clone(), relative_path.into())))
4039 } else {
4040 let worktree = self.create_local_worktree(abs_path, visible, cx);
4041 cx.foreground()
4042 .spawn(async move { Ok((worktree.await?, PathBuf::new())) })
4043 }
4044 }
4045
4046 pub fn find_local_worktree(
4047 &self,
4048 abs_path: &Path,
4049 cx: &AppContext,
4050 ) -> Option<(ModelHandle<Worktree>, PathBuf)> {
4051 for tree in &self.worktrees {
4052 if let Some(tree) = tree.upgrade(cx) {
4053 if let Some(relative_path) = tree
4054 .read(cx)
4055 .as_local()
4056 .and_then(|t| abs_path.strip_prefix(t.abs_path()).ok())
4057 {
4058 return Some((tree.clone(), relative_path.into()));
4059 }
4060 }
4061 }
4062 None
4063 }
4064
4065 pub fn is_shared(&self) -> bool {
4066 match &self.client_state {
4067 ProjectClientState::Local { is_shared, .. } => *is_shared,
4068 ProjectClientState::Remote { .. } => false,
4069 }
4070 }
4071
4072 fn create_local_worktree(
4073 &mut self,
4074 abs_path: impl AsRef<Path>,
4075 visible: bool,
4076 cx: &mut ModelContext<Self>,
4077 ) -> Task<Result<ModelHandle<Worktree>>> {
4078 let fs = self.fs.clone();
4079 let client = self.client.clone();
4080 let next_entry_id = self.next_entry_id.clone();
4081 let path: Arc<Path> = abs_path.as_ref().into();
4082 let task = self
4083 .loading_local_worktrees
4084 .entry(path.clone())
4085 .or_insert_with(|| {
4086 cx.spawn(|project, mut cx| {
4087 async move {
4088 let worktree = Worktree::local(
4089 client.clone(),
4090 path.clone(),
4091 visible,
4092 fs,
4093 next_entry_id,
4094 &mut cx,
4095 )
4096 .await;
4097 project.update(&mut cx, |project, _| {
4098 project.loading_local_worktrees.remove(&path);
4099 });
4100 let worktree = worktree?;
4101
4102 let project_id = project.update(&mut cx, |project, cx| {
4103 project.add_worktree(&worktree, cx);
4104 project.shared_remote_id()
4105 });
4106
4107 if let Some(project_id) = project_id {
4108 worktree
4109 .update(&mut cx, |worktree, cx| {
4110 worktree.as_local_mut().unwrap().share(project_id, cx)
4111 })
4112 .await
4113 .log_err();
4114 }
4115
4116 Ok(worktree)
4117 }
4118 .map_err(|err| Arc::new(err))
4119 })
4120 .shared()
4121 })
4122 .clone();
4123 cx.foreground().spawn(async move {
4124 match task.await {
4125 Ok(worktree) => Ok(worktree),
4126 Err(err) => Err(anyhow!("{}", err)),
4127 }
4128 })
4129 }
4130
4131 pub fn remove_worktree(&mut self, id_to_remove: WorktreeId, cx: &mut ModelContext<Self>) {
4132 self.worktrees.retain(|worktree| {
4133 if let Some(worktree) = worktree.upgrade(cx) {
4134 let id = worktree.read(cx).id();
4135 if id == id_to_remove {
4136 cx.emit(Event::WorktreeRemoved(id));
4137 false
4138 } else {
4139 true
4140 }
4141 } else {
4142 false
4143 }
4144 });
4145 self.metadata_changed(true, cx);
4146 cx.notify();
4147 }
4148
4149 fn add_worktree(&mut self, worktree: &ModelHandle<Worktree>, cx: &mut ModelContext<Self>) {
4150 cx.observe(&worktree, |_, _, cx| cx.notify()).detach();
4151 if worktree.read(cx).is_local() {
4152 cx.subscribe(&worktree, |this, worktree, _, cx| {
4153 this.update_local_worktree_buffers(worktree, cx);
4154 })
4155 .detach();
4156 }
4157
4158 let push_strong_handle = {
4159 let worktree = worktree.read(cx);
4160 self.is_shared() || worktree.is_visible() || worktree.is_remote()
4161 };
4162 if push_strong_handle {
4163 self.worktrees
4164 .push(WorktreeHandle::Strong(worktree.clone()));
4165 } else {
4166 self.worktrees
4167 .push(WorktreeHandle::Weak(worktree.downgrade()));
4168 }
4169
4170 self.metadata_changed(true, cx);
4171 cx.observe_release(&worktree, |this, worktree, cx| {
4172 this.remove_worktree(worktree.id(), cx);
4173 cx.notify();
4174 })
4175 .detach();
4176
4177 cx.emit(Event::WorktreeAdded);
4178 cx.notify();
4179 }
4180
4181 fn update_local_worktree_buffers(
4182 &mut self,
4183 worktree_handle: ModelHandle<Worktree>,
4184 cx: &mut ModelContext<Self>,
4185 ) {
4186 let snapshot = worktree_handle.read(cx).snapshot();
4187 let mut buffers_to_delete = Vec::new();
4188 let mut renamed_buffers = Vec::new();
4189 for (buffer_id, buffer) in &self.opened_buffers {
4190 if let Some(buffer) = buffer.upgrade(cx) {
4191 buffer.update(cx, |buffer, cx| {
4192 if let Some(old_file) = File::from_dyn(buffer.file()) {
4193 if old_file.worktree != worktree_handle {
4194 return;
4195 }
4196
4197 let new_file = if let Some(entry) = old_file
4198 .entry_id
4199 .and_then(|entry_id| snapshot.entry_for_id(entry_id))
4200 {
4201 File {
4202 is_local: true,
4203 entry_id: Some(entry.id),
4204 mtime: entry.mtime,
4205 path: entry.path.clone(),
4206 worktree: worktree_handle.clone(),
4207 }
4208 } else if let Some(entry) =
4209 snapshot.entry_for_path(old_file.path().as_ref())
4210 {
4211 File {
4212 is_local: true,
4213 entry_id: Some(entry.id),
4214 mtime: entry.mtime,
4215 path: entry.path.clone(),
4216 worktree: worktree_handle.clone(),
4217 }
4218 } else {
4219 File {
4220 is_local: true,
4221 entry_id: None,
4222 path: old_file.path().clone(),
4223 mtime: old_file.mtime(),
4224 worktree: worktree_handle.clone(),
4225 }
4226 };
4227
4228 let old_path = old_file.abs_path(cx);
4229 if new_file.abs_path(cx) != old_path {
4230 renamed_buffers.push((cx.handle(), old_path));
4231 }
4232
4233 if let Some(project_id) = self.shared_remote_id() {
4234 self.client
4235 .send(proto::UpdateBufferFile {
4236 project_id,
4237 buffer_id: *buffer_id as u64,
4238 file: Some(new_file.to_proto()),
4239 })
4240 .log_err();
4241 }
4242 buffer.file_updated(Arc::new(new_file), cx).detach();
4243 }
4244 });
4245 } else {
4246 buffers_to_delete.push(*buffer_id);
4247 }
4248 }
4249
4250 for buffer_id in buffers_to_delete {
4251 self.opened_buffers.remove(&buffer_id);
4252 }
4253
4254 for (buffer, old_path) in renamed_buffers {
4255 self.unregister_buffer_from_language_server(&buffer, old_path, cx);
4256 self.assign_language_to_buffer(&buffer, cx);
4257 self.register_buffer_with_language_server(&buffer, cx);
4258 }
4259 }
4260
4261 pub fn set_active_path(&mut self, entry: Option<ProjectPath>, cx: &mut ModelContext<Self>) {
4262 let new_active_entry = entry.and_then(|project_path| {
4263 let worktree = self.worktree_for_id(project_path.worktree_id, cx)?;
4264 let entry = worktree.read(cx).entry_for_path(project_path.path)?;
4265 Some(entry.id)
4266 });
4267 if new_active_entry != self.active_entry {
4268 self.active_entry = new_active_entry;
4269 cx.emit(Event::ActiveEntryChanged(new_active_entry));
4270 }
4271 }
4272
4273 pub fn language_servers_running_disk_based_diagnostics<'a>(
4274 &'a self,
4275 ) -> impl 'a + Iterator<Item = usize> {
4276 self.language_server_statuses
4277 .iter()
4278 .filter_map(|(id, status)| {
4279 if status.has_pending_diagnostic_updates {
4280 Some(*id)
4281 } else {
4282 None
4283 }
4284 })
4285 }
4286
4287 pub fn diagnostic_summary(&self, cx: &AppContext) -> DiagnosticSummary {
4288 let mut summary = DiagnosticSummary::default();
4289 for (_, path_summary) in self.diagnostic_summaries(cx) {
4290 summary.error_count += path_summary.error_count;
4291 summary.warning_count += path_summary.warning_count;
4292 }
4293 summary
4294 }
4295
4296 pub fn diagnostic_summaries<'a>(
4297 &'a self,
4298 cx: &'a AppContext,
4299 ) -> impl Iterator<Item = (ProjectPath, DiagnosticSummary)> + 'a {
4300 self.visible_worktrees(cx).flat_map(move |worktree| {
4301 let worktree = worktree.read(cx);
4302 let worktree_id = worktree.id();
4303 worktree
4304 .diagnostic_summaries()
4305 .map(move |(path, summary)| (ProjectPath { worktree_id, path }, summary))
4306 })
4307 }
4308
4309 pub fn disk_based_diagnostics_started(
4310 &mut self,
4311 language_server_id: usize,
4312 cx: &mut ModelContext<Self>,
4313 ) {
4314 cx.emit(Event::DiskBasedDiagnosticsStarted { language_server_id });
4315 }
4316
4317 pub fn disk_based_diagnostics_finished(
4318 &mut self,
4319 language_server_id: usize,
4320 cx: &mut ModelContext<Self>,
4321 ) {
4322 cx.emit(Event::DiskBasedDiagnosticsFinished { language_server_id });
4323 }
4324
4325 pub fn active_entry(&self) -> Option<ProjectEntryId> {
4326 self.active_entry
4327 }
4328
4329 pub fn entry_for_path(&self, path: &ProjectPath, cx: &AppContext) -> Option<ProjectEntryId> {
4330 self.worktree_for_id(path.worktree_id, cx)?
4331 .read(cx)
4332 .entry_for_path(&path.path)
4333 .map(|entry| entry.id)
4334 }
4335
4336 pub fn path_for_entry(&self, entry_id: ProjectEntryId, cx: &AppContext) -> Option<ProjectPath> {
4337 let worktree = self.worktree_for_entry(entry_id, cx)?;
4338 let worktree = worktree.read(cx);
4339 let worktree_id = worktree.id();
4340 let path = worktree.entry_for_id(entry_id)?.path.clone();
4341 Some(ProjectPath { worktree_id, path })
4342 }
4343
4344 // RPC message handlers
4345
4346 async fn handle_request_join_project(
4347 this: ModelHandle<Self>,
4348 message: TypedEnvelope<proto::RequestJoinProject>,
4349 _: Arc<Client>,
4350 mut cx: AsyncAppContext,
4351 ) -> Result<()> {
4352 let user_id = message.payload.requester_id;
4353 if this.read_with(&cx, |project, _| {
4354 project.collaborators.values().any(|c| c.user.id == user_id)
4355 }) {
4356 this.update(&mut cx, |this, cx| {
4357 this.respond_to_join_request(user_id, true, cx)
4358 });
4359 } else {
4360 let user_store = this.read_with(&cx, |this, _| this.user_store.clone());
4361 let user = user_store
4362 .update(&mut cx, |store, cx| store.fetch_user(user_id, cx))
4363 .await?;
4364 this.update(&mut cx, |_, cx| cx.emit(Event::ContactRequestedJoin(user)));
4365 }
4366 Ok(())
4367 }
4368
4369 async fn handle_unregister_project(
4370 this: ModelHandle<Self>,
4371 _: TypedEnvelope<proto::UnregisterProject>,
4372 _: Arc<Client>,
4373 mut cx: AsyncAppContext,
4374 ) -> Result<()> {
4375 this.update(&mut cx, |this, cx| this.removed_from_project(cx));
4376 Ok(())
4377 }
4378
4379 async fn handle_project_unshared(
4380 this: ModelHandle<Self>,
4381 _: TypedEnvelope<proto::ProjectUnshared>,
4382 _: Arc<Client>,
4383 mut cx: AsyncAppContext,
4384 ) -> Result<()> {
4385 this.update(&mut cx, |this, cx| this.unshared(cx));
4386 Ok(())
4387 }
4388
4389 async fn handle_add_collaborator(
4390 this: ModelHandle<Self>,
4391 mut envelope: TypedEnvelope<proto::AddProjectCollaborator>,
4392 _: Arc<Client>,
4393 mut cx: AsyncAppContext,
4394 ) -> Result<()> {
4395 let user_store = this.read_with(&cx, |this, _| this.user_store.clone());
4396 let collaborator = envelope
4397 .payload
4398 .collaborator
4399 .take()
4400 .ok_or_else(|| anyhow!("empty collaborator"))?;
4401
4402 let collaborator = Collaborator::from_proto(collaborator, &user_store, &mut cx).await?;
4403 this.update(&mut cx, |this, cx| {
4404 this.collaborators
4405 .insert(collaborator.peer_id, collaborator);
4406 cx.notify();
4407 });
4408
4409 Ok(())
4410 }
4411
4412 async fn handle_remove_collaborator(
4413 this: ModelHandle<Self>,
4414 envelope: TypedEnvelope<proto::RemoveProjectCollaborator>,
4415 _: Arc<Client>,
4416 mut cx: AsyncAppContext,
4417 ) -> Result<()> {
4418 this.update(&mut cx, |this, cx| {
4419 let peer_id = PeerId(envelope.payload.peer_id);
4420 let replica_id = this
4421 .collaborators
4422 .remove(&peer_id)
4423 .ok_or_else(|| anyhow!("unknown peer {:?}", peer_id))?
4424 .replica_id;
4425 for (_, buffer) in &this.opened_buffers {
4426 if let Some(buffer) = buffer.upgrade(cx) {
4427 buffer.update(cx, |buffer, cx| buffer.remove_peer(replica_id, cx));
4428 }
4429 }
4430
4431 cx.emit(Event::CollaboratorLeft(peer_id));
4432 cx.notify();
4433 Ok(())
4434 })
4435 }
4436
4437 async fn handle_join_project_request_cancelled(
4438 this: ModelHandle<Self>,
4439 envelope: TypedEnvelope<proto::JoinProjectRequestCancelled>,
4440 _: Arc<Client>,
4441 mut cx: AsyncAppContext,
4442 ) -> Result<()> {
4443 let user = this
4444 .update(&mut cx, |this, cx| {
4445 this.user_store.update(cx, |user_store, cx| {
4446 user_store.fetch_user(envelope.payload.requester_id, cx)
4447 })
4448 })
4449 .await?;
4450
4451 this.update(&mut cx, |_, cx| {
4452 cx.emit(Event::ContactCancelledJoinRequest(user));
4453 });
4454
4455 Ok(())
4456 }
4457
4458 async fn handle_update_project(
4459 this: ModelHandle<Self>,
4460 envelope: TypedEnvelope<proto::UpdateProject>,
4461 client: Arc<Client>,
4462 mut cx: AsyncAppContext,
4463 ) -> Result<()> {
4464 this.update(&mut cx, |this, cx| {
4465 let replica_id = this.replica_id();
4466 let remote_id = this.remote_id().ok_or_else(|| anyhow!("invalid project"))?;
4467
4468 let mut old_worktrees_by_id = this
4469 .worktrees
4470 .drain(..)
4471 .filter_map(|worktree| {
4472 let worktree = worktree.upgrade(cx)?;
4473 Some((worktree.read(cx).id(), worktree))
4474 })
4475 .collect::<HashMap<_, _>>();
4476
4477 for worktree in envelope.payload.worktrees {
4478 if let Some(old_worktree) =
4479 old_worktrees_by_id.remove(&WorktreeId::from_proto(worktree.id))
4480 {
4481 this.worktrees.push(WorktreeHandle::Strong(old_worktree));
4482 } else {
4483 let worktree = proto::Worktree {
4484 id: worktree.id,
4485 root_name: worktree.root_name,
4486 entries: Default::default(),
4487 diagnostic_summaries: Default::default(),
4488 visible: worktree.visible,
4489 scan_id: 0,
4490 };
4491 let (worktree, load_task) =
4492 Worktree::remote(remote_id, replica_id, worktree, client.clone(), cx);
4493 this.add_worktree(&worktree, cx);
4494 load_task.detach();
4495 }
4496 }
4497
4498 this.metadata_changed(true, cx);
4499 for (id, _) in old_worktrees_by_id {
4500 cx.emit(Event::WorktreeRemoved(id));
4501 }
4502
4503 Ok(())
4504 })
4505 }
4506
4507 async fn handle_update_worktree(
4508 this: ModelHandle<Self>,
4509 envelope: TypedEnvelope<proto::UpdateWorktree>,
4510 _: Arc<Client>,
4511 mut cx: AsyncAppContext,
4512 ) -> Result<()> {
4513 this.update(&mut cx, |this, cx| {
4514 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
4515 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
4516 worktree.update(cx, |worktree, _| {
4517 let worktree = worktree.as_remote_mut().unwrap();
4518 worktree.update_from_remote(envelope)
4519 })?;
4520 }
4521 Ok(())
4522 })
4523 }
4524
4525 async fn handle_create_project_entry(
4526 this: ModelHandle<Self>,
4527 envelope: TypedEnvelope<proto::CreateProjectEntry>,
4528 _: Arc<Client>,
4529 mut cx: AsyncAppContext,
4530 ) -> Result<proto::ProjectEntryResponse> {
4531 let worktree = this.update(&mut cx, |this, cx| {
4532 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
4533 this.worktree_for_id(worktree_id, cx)
4534 .ok_or_else(|| anyhow!("worktree not found"))
4535 })?;
4536 let worktree_scan_id = worktree.read_with(&cx, |worktree, _| worktree.scan_id());
4537 let entry = worktree
4538 .update(&mut cx, |worktree, cx| {
4539 let worktree = worktree.as_local_mut().unwrap();
4540 let path = PathBuf::from(OsString::from_vec(envelope.payload.path));
4541 worktree.create_entry(path, envelope.payload.is_directory, cx)
4542 })
4543 .await?;
4544 Ok(proto::ProjectEntryResponse {
4545 entry: Some((&entry).into()),
4546 worktree_scan_id: worktree_scan_id as u64,
4547 })
4548 }
4549
4550 async fn handle_rename_project_entry(
4551 this: ModelHandle<Self>,
4552 envelope: TypedEnvelope<proto::RenameProjectEntry>,
4553 _: Arc<Client>,
4554 mut cx: AsyncAppContext,
4555 ) -> Result<proto::ProjectEntryResponse> {
4556 let entry_id = ProjectEntryId::from_proto(envelope.payload.entry_id);
4557 let worktree = this.read_with(&cx, |this, cx| {
4558 this.worktree_for_entry(entry_id, cx)
4559 .ok_or_else(|| anyhow!("worktree not found"))
4560 })?;
4561 let worktree_scan_id = worktree.read_with(&cx, |worktree, _| worktree.scan_id());
4562 let entry = worktree
4563 .update(&mut cx, |worktree, cx| {
4564 let new_path = PathBuf::from(OsString::from_vec(envelope.payload.new_path));
4565 worktree
4566 .as_local_mut()
4567 .unwrap()
4568 .rename_entry(entry_id, new_path, cx)
4569 .ok_or_else(|| anyhow!("invalid entry"))
4570 })?
4571 .await?;
4572 Ok(proto::ProjectEntryResponse {
4573 entry: Some((&entry).into()),
4574 worktree_scan_id: worktree_scan_id as u64,
4575 })
4576 }
4577
4578 async fn handle_copy_project_entry(
4579 this: ModelHandle<Self>,
4580 envelope: TypedEnvelope<proto::CopyProjectEntry>,
4581 _: Arc<Client>,
4582 mut cx: AsyncAppContext,
4583 ) -> Result<proto::ProjectEntryResponse> {
4584 let entry_id = ProjectEntryId::from_proto(envelope.payload.entry_id);
4585 let worktree = this.read_with(&cx, |this, cx| {
4586 this.worktree_for_entry(entry_id, cx)
4587 .ok_or_else(|| anyhow!("worktree not found"))
4588 })?;
4589 let worktree_scan_id = worktree.read_with(&cx, |worktree, _| worktree.scan_id());
4590 let entry = worktree
4591 .update(&mut cx, |worktree, cx| {
4592 let new_path = PathBuf::from(OsString::from_vec(envelope.payload.new_path));
4593 worktree
4594 .as_local_mut()
4595 .unwrap()
4596 .copy_entry(entry_id, new_path, cx)
4597 .ok_or_else(|| anyhow!("invalid entry"))
4598 })?
4599 .await?;
4600 Ok(proto::ProjectEntryResponse {
4601 entry: Some((&entry).into()),
4602 worktree_scan_id: worktree_scan_id as u64,
4603 })
4604 }
4605
4606 async fn handle_delete_project_entry(
4607 this: ModelHandle<Self>,
4608 envelope: TypedEnvelope<proto::DeleteProjectEntry>,
4609 _: Arc<Client>,
4610 mut cx: AsyncAppContext,
4611 ) -> Result<proto::ProjectEntryResponse> {
4612 let entry_id = ProjectEntryId::from_proto(envelope.payload.entry_id);
4613 let worktree = this.read_with(&cx, |this, cx| {
4614 this.worktree_for_entry(entry_id, cx)
4615 .ok_or_else(|| anyhow!("worktree not found"))
4616 })?;
4617 let worktree_scan_id = worktree.read_with(&cx, |worktree, _| worktree.scan_id());
4618 worktree
4619 .update(&mut cx, |worktree, cx| {
4620 worktree
4621 .as_local_mut()
4622 .unwrap()
4623 .delete_entry(entry_id, cx)
4624 .ok_or_else(|| anyhow!("invalid entry"))
4625 })?
4626 .await?;
4627 Ok(proto::ProjectEntryResponse {
4628 entry: None,
4629 worktree_scan_id: worktree_scan_id as u64,
4630 })
4631 }
4632
4633 async fn handle_update_diagnostic_summary(
4634 this: ModelHandle<Self>,
4635 envelope: TypedEnvelope<proto::UpdateDiagnosticSummary>,
4636 _: Arc<Client>,
4637 mut cx: AsyncAppContext,
4638 ) -> Result<()> {
4639 this.update(&mut cx, |this, cx| {
4640 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
4641 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
4642 if let Some(summary) = envelope.payload.summary {
4643 let project_path = ProjectPath {
4644 worktree_id,
4645 path: Path::new(&summary.path).into(),
4646 };
4647 worktree.update(cx, |worktree, _| {
4648 worktree
4649 .as_remote_mut()
4650 .unwrap()
4651 .update_diagnostic_summary(project_path.path.clone(), &summary);
4652 });
4653 cx.emit(Event::DiagnosticsUpdated {
4654 language_server_id: summary.language_server_id as usize,
4655 path: project_path,
4656 });
4657 }
4658 }
4659 Ok(())
4660 })
4661 }
4662
4663 async fn handle_start_language_server(
4664 this: ModelHandle<Self>,
4665 envelope: TypedEnvelope<proto::StartLanguageServer>,
4666 _: Arc<Client>,
4667 mut cx: AsyncAppContext,
4668 ) -> Result<()> {
4669 let server = envelope
4670 .payload
4671 .server
4672 .ok_or_else(|| anyhow!("invalid server"))?;
4673 this.update(&mut cx, |this, cx| {
4674 this.language_server_statuses.insert(
4675 server.id as usize,
4676 LanguageServerStatus {
4677 name: server.name,
4678 pending_work: Default::default(),
4679 has_pending_diagnostic_updates: false,
4680 progress_tokens: Default::default(),
4681 },
4682 );
4683 cx.notify();
4684 });
4685 Ok(())
4686 }
4687
4688 async fn handle_update_language_server(
4689 this: ModelHandle<Self>,
4690 envelope: TypedEnvelope<proto::UpdateLanguageServer>,
4691 _: Arc<Client>,
4692 mut cx: AsyncAppContext,
4693 ) -> Result<()> {
4694 let language_server_id = envelope.payload.language_server_id as usize;
4695 match envelope
4696 .payload
4697 .variant
4698 .ok_or_else(|| anyhow!("invalid variant"))?
4699 {
4700 proto::update_language_server::Variant::WorkStart(payload) => {
4701 this.update(&mut cx, |this, cx| {
4702 this.on_lsp_work_start(
4703 language_server_id,
4704 payload.token,
4705 LanguageServerProgress {
4706 message: payload.message,
4707 percentage: payload.percentage.map(|p| p as usize),
4708 last_update_at: Instant::now(),
4709 },
4710 cx,
4711 );
4712 })
4713 }
4714 proto::update_language_server::Variant::WorkProgress(payload) => {
4715 this.update(&mut cx, |this, cx| {
4716 this.on_lsp_work_progress(
4717 language_server_id,
4718 payload.token,
4719 LanguageServerProgress {
4720 message: payload.message,
4721 percentage: payload.percentage.map(|p| p as usize),
4722 last_update_at: Instant::now(),
4723 },
4724 cx,
4725 );
4726 })
4727 }
4728 proto::update_language_server::Variant::WorkEnd(payload) => {
4729 this.update(&mut cx, |this, cx| {
4730 this.on_lsp_work_end(language_server_id, payload.token, cx);
4731 })
4732 }
4733 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(_) => {
4734 this.update(&mut cx, |this, cx| {
4735 this.disk_based_diagnostics_started(language_server_id, cx);
4736 })
4737 }
4738 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(_) => {
4739 this.update(&mut cx, |this, cx| {
4740 this.disk_based_diagnostics_finished(language_server_id, cx)
4741 });
4742 }
4743 }
4744
4745 Ok(())
4746 }
4747
4748 async fn handle_update_buffer(
4749 this: ModelHandle<Self>,
4750 envelope: TypedEnvelope<proto::UpdateBuffer>,
4751 _: Arc<Client>,
4752 mut cx: AsyncAppContext,
4753 ) -> Result<()> {
4754 this.update(&mut cx, |this, cx| {
4755 let payload = envelope.payload.clone();
4756 let buffer_id = payload.buffer_id;
4757 let ops = payload
4758 .operations
4759 .into_iter()
4760 .map(|op| language::proto::deserialize_operation(op))
4761 .collect::<Result<Vec<_>, _>>()?;
4762 let is_remote = this.is_remote();
4763 match this.opened_buffers.entry(buffer_id) {
4764 hash_map::Entry::Occupied(mut e) => match e.get_mut() {
4765 OpenBuffer::Strong(buffer) => {
4766 buffer.update(cx, |buffer, cx| buffer.apply_ops(ops, cx))?;
4767 }
4768 OpenBuffer::Loading(operations) => operations.extend_from_slice(&ops),
4769 OpenBuffer::Weak(_) => {}
4770 },
4771 hash_map::Entry::Vacant(e) => {
4772 assert!(
4773 is_remote,
4774 "received buffer update from {:?}",
4775 envelope.original_sender_id
4776 );
4777 e.insert(OpenBuffer::Loading(ops));
4778 }
4779 }
4780 Ok(())
4781 })
4782 }
4783
4784 async fn handle_update_buffer_file(
4785 this: ModelHandle<Self>,
4786 envelope: TypedEnvelope<proto::UpdateBufferFile>,
4787 _: Arc<Client>,
4788 mut cx: AsyncAppContext,
4789 ) -> Result<()> {
4790 this.update(&mut cx, |this, cx| {
4791 let payload = envelope.payload.clone();
4792 let buffer_id = payload.buffer_id;
4793 let file = payload.file.ok_or_else(|| anyhow!("invalid file"))?;
4794 let worktree = this
4795 .worktree_for_id(WorktreeId::from_proto(file.worktree_id), cx)
4796 .ok_or_else(|| anyhow!("no such worktree"))?;
4797 let file = File::from_proto(file, worktree.clone(), cx)?;
4798 let buffer = this
4799 .opened_buffers
4800 .get_mut(&buffer_id)
4801 .and_then(|b| b.upgrade(cx))
4802 .ok_or_else(|| anyhow!("no such buffer"))?;
4803 buffer.update(cx, |buffer, cx| {
4804 buffer.file_updated(Arc::new(file), cx).detach();
4805 });
4806 Ok(())
4807 })
4808 }
4809
4810 async fn handle_save_buffer(
4811 this: ModelHandle<Self>,
4812 envelope: TypedEnvelope<proto::SaveBuffer>,
4813 _: Arc<Client>,
4814 mut cx: AsyncAppContext,
4815 ) -> Result<proto::BufferSaved> {
4816 let buffer_id = envelope.payload.buffer_id;
4817 let requested_version = deserialize_version(envelope.payload.version);
4818
4819 let (project_id, buffer) = this.update(&mut cx, |this, cx| {
4820 let project_id = this.remote_id().ok_or_else(|| anyhow!("not connected"))?;
4821 let buffer = this
4822 .opened_buffers
4823 .get(&buffer_id)
4824 .and_then(|buffer| buffer.upgrade(cx))
4825 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?;
4826 Ok::<_, anyhow::Error>((project_id, buffer))
4827 })?;
4828 buffer
4829 .update(&mut cx, |buffer, _| {
4830 buffer.wait_for_version(requested_version)
4831 })
4832 .await;
4833
4834 let (saved_version, fingerprint, mtime) =
4835 buffer.update(&mut cx, |buffer, cx| buffer.save(cx)).await?;
4836 Ok(proto::BufferSaved {
4837 project_id,
4838 buffer_id,
4839 version: serialize_version(&saved_version),
4840 mtime: Some(mtime.into()),
4841 fingerprint,
4842 })
4843 }
4844
4845 async fn handle_reload_buffers(
4846 this: ModelHandle<Self>,
4847 envelope: TypedEnvelope<proto::ReloadBuffers>,
4848 _: Arc<Client>,
4849 mut cx: AsyncAppContext,
4850 ) -> Result<proto::ReloadBuffersResponse> {
4851 let sender_id = envelope.original_sender_id()?;
4852 let reload = this.update(&mut cx, |this, cx| {
4853 let mut buffers = HashSet::default();
4854 for buffer_id in &envelope.payload.buffer_ids {
4855 buffers.insert(
4856 this.opened_buffers
4857 .get(buffer_id)
4858 .and_then(|buffer| buffer.upgrade(cx))
4859 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?,
4860 );
4861 }
4862 Ok::<_, anyhow::Error>(this.reload_buffers(buffers, false, cx))
4863 })?;
4864
4865 let project_transaction = reload.await?;
4866 let project_transaction = this.update(&mut cx, |this, cx| {
4867 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
4868 });
4869 Ok(proto::ReloadBuffersResponse {
4870 transaction: Some(project_transaction),
4871 })
4872 }
4873
4874 async fn handle_format_buffers(
4875 this: ModelHandle<Self>,
4876 envelope: TypedEnvelope<proto::FormatBuffers>,
4877 _: Arc<Client>,
4878 mut cx: AsyncAppContext,
4879 ) -> Result<proto::FormatBuffersResponse> {
4880 let sender_id = envelope.original_sender_id()?;
4881 let format = this.update(&mut cx, |this, cx| {
4882 let mut buffers = HashSet::default();
4883 for buffer_id in &envelope.payload.buffer_ids {
4884 buffers.insert(
4885 this.opened_buffers
4886 .get(buffer_id)
4887 .and_then(|buffer| buffer.upgrade(cx))
4888 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?,
4889 );
4890 }
4891 Ok::<_, anyhow::Error>(this.format(buffers, false, cx))
4892 })?;
4893
4894 let project_transaction = format.await?;
4895 let project_transaction = this.update(&mut cx, |this, cx| {
4896 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
4897 });
4898 Ok(proto::FormatBuffersResponse {
4899 transaction: Some(project_transaction),
4900 })
4901 }
4902
4903 async fn handle_get_completions(
4904 this: ModelHandle<Self>,
4905 envelope: TypedEnvelope<proto::GetCompletions>,
4906 _: Arc<Client>,
4907 mut cx: AsyncAppContext,
4908 ) -> Result<proto::GetCompletionsResponse> {
4909 let position = envelope
4910 .payload
4911 .position
4912 .and_then(language::proto::deserialize_anchor)
4913 .ok_or_else(|| anyhow!("invalid position"))?;
4914 let version = deserialize_version(envelope.payload.version);
4915 let buffer = this.read_with(&cx, |this, cx| {
4916 this.opened_buffers
4917 .get(&envelope.payload.buffer_id)
4918 .and_then(|buffer| buffer.upgrade(cx))
4919 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
4920 })?;
4921 buffer
4922 .update(&mut cx, |buffer, _| buffer.wait_for_version(version))
4923 .await;
4924 let version = buffer.read_with(&cx, |buffer, _| buffer.version());
4925 let completions = this
4926 .update(&mut cx, |this, cx| this.completions(&buffer, position, cx))
4927 .await?;
4928
4929 Ok(proto::GetCompletionsResponse {
4930 completions: completions
4931 .iter()
4932 .map(language::proto::serialize_completion)
4933 .collect(),
4934 version: serialize_version(&version),
4935 })
4936 }
4937
4938 async fn handle_apply_additional_edits_for_completion(
4939 this: ModelHandle<Self>,
4940 envelope: TypedEnvelope<proto::ApplyCompletionAdditionalEdits>,
4941 _: Arc<Client>,
4942 mut cx: AsyncAppContext,
4943 ) -> Result<proto::ApplyCompletionAdditionalEditsResponse> {
4944 let apply_additional_edits = this.update(&mut cx, |this, cx| {
4945 let buffer = this
4946 .opened_buffers
4947 .get(&envelope.payload.buffer_id)
4948 .and_then(|buffer| buffer.upgrade(cx))
4949 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
4950 let language = buffer.read(cx).language();
4951 let completion = language::proto::deserialize_completion(
4952 envelope
4953 .payload
4954 .completion
4955 .ok_or_else(|| anyhow!("invalid completion"))?,
4956 language,
4957 )?;
4958 Ok::<_, anyhow::Error>(
4959 this.apply_additional_edits_for_completion(buffer, completion, false, cx),
4960 )
4961 })?;
4962
4963 Ok(proto::ApplyCompletionAdditionalEditsResponse {
4964 transaction: apply_additional_edits
4965 .await?
4966 .as_ref()
4967 .map(language::proto::serialize_transaction),
4968 })
4969 }
4970
4971 async fn handle_get_code_actions(
4972 this: ModelHandle<Self>,
4973 envelope: TypedEnvelope<proto::GetCodeActions>,
4974 _: Arc<Client>,
4975 mut cx: AsyncAppContext,
4976 ) -> Result<proto::GetCodeActionsResponse> {
4977 let start = envelope
4978 .payload
4979 .start
4980 .and_then(language::proto::deserialize_anchor)
4981 .ok_or_else(|| anyhow!("invalid start"))?;
4982 let end = envelope
4983 .payload
4984 .end
4985 .and_then(language::proto::deserialize_anchor)
4986 .ok_or_else(|| anyhow!("invalid end"))?;
4987 let buffer = this.update(&mut cx, |this, cx| {
4988 this.opened_buffers
4989 .get(&envelope.payload.buffer_id)
4990 .and_then(|buffer| buffer.upgrade(cx))
4991 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
4992 })?;
4993 buffer
4994 .update(&mut cx, |buffer, _| {
4995 buffer.wait_for_version(deserialize_version(envelope.payload.version))
4996 })
4997 .await;
4998
4999 let version = buffer.read_with(&cx, |buffer, _| buffer.version());
5000 let code_actions = this.update(&mut cx, |this, cx| {
5001 Ok::<_, anyhow::Error>(this.code_actions(&buffer, start..end, cx))
5002 })?;
5003
5004 Ok(proto::GetCodeActionsResponse {
5005 actions: code_actions
5006 .await?
5007 .iter()
5008 .map(language::proto::serialize_code_action)
5009 .collect(),
5010 version: serialize_version(&version),
5011 })
5012 }
5013
5014 async fn handle_apply_code_action(
5015 this: ModelHandle<Self>,
5016 envelope: TypedEnvelope<proto::ApplyCodeAction>,
5017 _: Arc<Client>,
5018 mut cx: AsyncAppContext,
5019 ) -> Result<proto::ApplyCodeActionResponse> {
5020 let sender_id = envelope.original_sender_id()?;
5021 let action = language::proto::deserialize_code_action(
5022 envelope
5023 .payload
5024 .action
5025 .ok_or_else(|| anyhow!("invalid action"))?,
5026 )?;
5027 let apply_code_action = this.update(&mut cx, |this, cx| {
5028 let buffer = this
5029 .opened_buffers
5030 .get(&envelope.payload.buffer_id)
5031 .and_then(|buffer| buffer.upgrade(cx))
5032 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
5033 Ok::<_, anyhow::Error>(this.apply_code_action(buffer, action, false, cx))
5034 })?;
5035
5036 let project_transaction = apply_code_action.await?;
5037 let project_transaction = this.update(&mut cx, |this, cx| {
5038 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
5039 });
5040 Ok(proto::ApplyCodeActionResponse {
5041 transaction: Some(project_transaction),
5042 })
5043 }
5044
5045 async fn handle_lsp_command<T: LspCommand>(
5046 this: ModelHandle<Self>,
5047 envelope: TypedEnvelope<T::ProtoRequest>,
5048 _: Arc<Client>,
5049 mut cx: AsyncAppContext,
5050 ) -> Result<<T::ProtoRequest as proto::RequestMessage>::Response>
5051 where
5052 <T::LspRequest as lsp::request::Request>::Result: Send,
5053 {
5054 let sender_id = envelope.original_sender_id()?;
5055 let buffer_id = T::buffer_id_from_proto(&envelope.payload);
5056 let buffer_handle = this.read_with(&cx, |this, _| {
5057 this.opened_buffers
5058 .get(&buffer_id)
5059 .and_then(|buffer| buffer.upgrade(&cx))
5060 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))
5061 })?;
5062 let request = T::from_proto(
5063 envelope.payload,
5064 this.clone(),
5065 buffer_handle.clone(),
5066 cx.clone(),
5067 )
5068 .await?;
5069 let buffer_version = buffer_handle.read_with(&cx, |buffer, _| buffer.version());
5070 let response = this
5071 .update(&mut cx, |this, cx| {
5072 this.request_lsp(buffer_handle, request, cx)
5073 })
5074 .await?;
5075 this.update(&mut cx, |this, cx| {
5076 Ok(T::response_to_proto(
5077 response,
5078 this,
5079 sender_id,
5080 &buffer_version,
5081 cx,
5082 ))
5083 })
5084 }
5085
5086 async fn handle_get_project_symbols(
5087 this: ModelHandle<Self>,
5088 envelope: TypedEnvelope<proto::GetProjectSymbols>,
5089 _: Arc<Client>,
5090 mut cx: AsyncAppContext,
5091 ) -> Result<proto::GetProjectSymbolsResponse> {
5092 let symbols = this
5093 .update(&mut cx, |this, cx| {
5094 this.symbols(&envelope.payload.query, cx)
5095 })
5096 .await?;
5097
5098 Ok(proto::GetProjectSymbolsResponse {
5099 symbols: symbols.iter().map(serialize_symbol).collect(),
5100 })
5101 }
5102
5103 async fn handle_search_project(
5104 this: ModelHandle<Self>,
5105 envelope: TypedEnvelope<proto::SearchProject>,
5106 _: Arc<Client>,
5107 mut cx: AsyncAppContext,
5108 ) -> Result<proto::SearchProjectResponse> {
5109 let peer_id = envelope.original_sender_id()?;
5110 let query = SearchQuery::from_proto(envelope.payload)?;
5111 let result = this
5112 .update(&mut cx, |this, cx| this.search(query, cx))
5113 .await?;
5114
5115 this.update(&mut cx, |this, cx| {
5116 let mut locations = Vec::new();
5117 for (buffer, ranges) in result {
5118 for range in ranges {
5119 let start = serialize_anchor(&range.start);
5120 let end = serialize_anchor(&range.end);
5121 let buffer = this.serialize_buffer_for_peer(&buffer, peer_id, cx);
5122 locations.push(proto::Location {
5123 buffer: Some(buffer),
5124 start: Some(start),
5125 end: Some(end),
5126 });
5127 }
5128 }
5129 Ok(proto::SearchProjectResponse { locations })
5130 })
5131 }
5132
5133 async fn handle_open_buffer_for_symbol(
5134 this: ModelHandle<Self>,
5135 envelope: TypedEnvelope<proto::OpenBufferForSymbol>,
5136 _: Arc<Client>,
5137 mut cx: AsyncAppContext,
5138 ) -> Result<proto::OpenBufferForSymbolResponse> {
5139 let peer_id = envelope.original_sender_id()?;
5140 let symbol = envelope
5141 .payload
5142 .symbol
5143 .ok_or_else(|| anyhow!("invalid symbol"))?;
5144 let symbol = this.read_with(&cx, |this, _| {
5145 let symbol = this.deserialize_symbol(symbol)?;
5146 let signature = this.symbol_signature(symbol.worktree_id, &symbol.path);
5147 if signature == symbol.signature {
5148 Ok(symbol)
5149 } else {
5150 Err(anyhow!("invalid symbol signature"))
5151 }
5152 })?;
5153 let buffer = this
5154 .update(&mut cx, |this, cx| this.open_buffer_for_symbol(&symbol, cx))
5155 .await?;
5156
5157 Ok(proto::OpenBufferForSymbolResponse {
5158 buffer: Some(this.update(&mut cx, |this, cx| {
5159 this.serialize_buffer_for_peer(&buffer, peer_id, cx)
5160 })),
5161 })
5162 }
5163
5164 fn symbol_signature(&self, worktree_id: WorktreeId, path: &Path) -> [u8; 32] {
5165 let mut hasher = Sha256::new();
5166 hasher.update(worktree_id.to_proto().to_be_bytes());
5167 hasher.update(path.to_string_lossy().as_bytes());
5168 hasher.update(self.nonce.to_be_bytes());
5169 hasher.finalize().as_slice().try_into().unwrap()
5170 }
5171
5172 async fn handle_open_buffer_by_id(
5173 this: ModelHandle<Self>,
5174 envelope: TypedEnvelope<proto::OpenBufferById>,
5175 _: Arc<Client>,
5176 mut cx: AsyncAppContext,
5177 ) -> Result<proto::OpenBufferResponse> {
5178 let peer_id = envelope.original_sender_id()?;
5179 let buffer = this
5180 .update(&mut cx, |this, cx| {
5181 this.open_buffer_by_id(envelope.payload.id, cx)
5182 })
5183 .await?;
5184 this.update(&mut cx, |this, cx| {
5185 Ok(proto::OpenBufferResponse {
5186 buffer: Some(this.serialize_buffer_for_peer(&buffer, peer_id, cx)),
5187 })
5188 })
5189 }
5190
5191 async fn handle_open_buffer_by_path(
5192 this: ModelHandle<Self>,
5193 envelope: TypedEnvelope<proto::OpenBufferByPath>,
5194 _: Arc<Client>,
5195 mut cx: AsyncAppContext,
5196 ) -> Result<proto::OpenBufferResponse> {
5197 let peer_id = envelope.original_sender_id()?;
5198 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
5199 let open_buffer = this.update(&mut cx, |this, cx| {
5200 this.open_buffer(
5201 ProjectPath {
5202 worktree_id,
5203 path: PathBuf::from(envelope.payload.path).into(),
5204 },
5205 cx,
5206 )
5207 });
5208
5209 let buffer = open_buffer.await?;
5210 this.update(&mut cx, |this, cx| {
5211 Ok(proto::OpenBufferResponse {
5212 buffer: Some(this.serialize_buffer_for_peer(&buffer, peer_id, cx)),
5213 })
5214 })
5215 }
5216
5217 fn serialize_project_transaction_for_peer(
5218 &mut self,
5219 project_transaction: ProjectTransaction,
5220 peer_id: PeerId,
5221 cx: &AppContext,
5222 ) -> proto::ProjectTransaction {
5223 let mut serialized_transaction = proto::ProjectTransaction {
5224 buffers: Default::default(),
5225 transactions: Default::default(),
5226 };
5227 for (buffer, transaction) in project_transaction.0 {
5228 serialized_transaction
5229 .buffers
5230 .push(self.serialize_buffer_for_peer(&buffer, peer_id, cx));
5231 serialized_transaction
5232 .transactions
5233 .push(language::proto::serialize_transaction(&transaction));
5234 }
5235 serialized_transaction
5236 }
5237
5238 fn deserialize_project_transaction(
5239 &mut self,
5240 message: proto::ProjectTransaction,
5241 push_to_history: bool,
5242 cx: &mut ModelContext<Self>,
5243 ) -> Task<Result<ProjectTransaction>> {
5244 cx.spawn(|this, mut cx| async move {
5245 let mut project_transaction = ProjectTransaction::default();
5246 for (buffer, transaction) in message.buffers.into_iter().zip(message.transactions) {
5247 let buffer = this
5248 .update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
5249 .await?;
5250 let transaction = language::proto::deserialize_transaction(transaction)?;
5251 project_transaction.0.insert(buffer, transaction);
5252 }
5253
5254 for (buffer, transaction) in &project_transaction.0 {
5255 buffer
5256 .update(&mut cx, |buffer, _| {
5257 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
5258 })
5259 .await;
5260
5261 if push_to_history {
5262 buffer.update(&mut cx, |buffer, _| {
5263 buffer.push_transaction(transaction.clone(), Instant::now());
5264 });
5265 }
5266 }
5267
5268 Ok(project_transaction)
5269 })
5270 }
5271
5272 fn serialize_buffer_for_peer(
5273 &mut self,
5274 buffer: &ModelHandle<Buffer>,
5275 peer_id: PeerId,
5276 cx: &AppContext,
5277 ) -> proto::Buffer {
5278 let buffer_id = buffer.read(cx).remote_id();
5279 let shared_buffers = self.shared_buffers.entry(peer_id).or_default();
5280 if shared_buffers.insert(buffer_id) {
5281 proto::Buffer {
5282 variant: Some(proto::buffer::Variant::State(buffer.read(cx).to_proto())),
5283 }
5284 } else {
5285 proto::Buffer {
5286 variant: Some(proto::buffer::Variant::Id(buffer_id)),
5287 }
5288 }
5289 }
5290
5291 fn deserialize_buffer(
5292 &mut self,
5293 buffer: proto::Buffer,
5294 cx: &mut ModelContext<Self>,
5295 ) -> Task<Result<ModelHandle<Buffer>>> {
5296 let replica_id = self.replica_id();
5297
5298 let opened_buffer_tx = self.opened_buffer.0.clone();
5299 let mut opened_buffer_rx = self.opened_buffer.1.clone();
5300 cx.spawn(|this, mut cx| async move {
5301 match buffer.variant.ok_or_else(|| anyhow!("missing buffer"))? {
5302 proto::buffer::Variant::Id(id) => {
5303 let buffer = loop {
5304 let buffer = this.read_with(&cx, |this, cx| {
5305 this.opened_buffers
5306 .get(&id)
5307 .and_then(|buffer| buffer.upgrade(cx))
5308 });
5309 if let Some(buffer) = buffer {
5310 break buffer;
5311 }
5312 opened_buffer_rx
5313 .next()
5314 .await
5315 .ok_or_else(|| anyhow!("project dropped while waiting for buffer"))?;
5316 };
5317 Ok(buffer)
5318 }
5319 proto::buffer::Variant::State(mut buffer) => {
5320 let mut buffer_worktree = None;
5321 let mut buffer_file = None;
5322 if let Some(file) = buffer.file.take() {
5323 this.read_with(&cx, |this, cx| {
5324 let worktree_id = WorktreeId::from_proto(file.worktree_id);
5325 let worktree =
5326 this.worktree_for_id(worktree_id, cx).ok_or_else(|| {
5327 anyhow!("no worktree found for id {}", file.worktree_id)
5328 })?;
5329 buffer_file =
5330 Some(Arc::new(File::from_proto(file, worktree.clone(), cx)?)
5331 as Arc<dyn language::File>);
5332 buffer_worktree = Some(worktree);
5333 Ok::<_, anyhow::Error>(())
5334 })?;
5335 }
5336
5337 let buffer = cx.add_model(|cx| {
5338 Buffer::from_proto(replica_id, buffer, buffer_file, cx).unwrap()
5339 });
5340
5341 this.update(&mut cx, |this, cx| this.register_buffer(&buffer, cx))?;
5342
5343 *opened_buffer_tx.borrow_mut().borrow_mut() = ();
5344 Ok(buffer)
5345 }
5346 }
5347 })
5348 }
5349
5350 fn deserialize_symbol(&self, serialized_symbol: proto::Symbol) -> Result<Symbol> {
5351 let source_worktree_id = WorktreeId::from_proto(serialized_symbol.source_worktree_id);
5352 let worktree_id = WorktreeId::from_proto(serialized_symbol.worktree_id);
5353 let start = serialized_symbol
5354 .start
5355 .ok_or_else(|| anyhow!("invalid start"))?;
5356 let end = serialized_symbol
5357 .end
5358 .ok_or_else(|| anyhow!("invalid end"))?;
5359 let kind = unsafe { mem::transmute(serialized_symbol.kind) };
5360 let path = PathBuf::from(serialized_symbol.path);
5361 let language = self.languages.select_language(&path);
5362 Ok(Symbol {
5363 source_worktree_id,
5364 worktree_id,
5365 language_server_name: LanguageServerName(serialized_symbol.language_server_name.into()),
5366 label: language
5367 .and_then(|language| language.label_for_symbol(&serialized_symbol.name, kind))
5368 .unwrap_or_else(|| CodeLabel::plain(serialized_symbol.name.clone(), None)),
5369 name: serialized_symbol.name,
5370 path,
5371 range: PointUtf16::new(start.row, start.column)..PointUtf16::new(end.row, end.column),
5372 kind,
5373 signature: serialized_symbol
5374 .signature
5375 .try_into()
5376 .map_err(|_| anyhow!("invalid signature"))?,
5377 })
5378 }
5379
5380 async fn handle_buffer_saved(
5381 this: ModelHandle<Self>,
5382 envelope: TypedEnvelope<proto::BufferSaved>,
5383 _: Arc<Client>,
5384 mut cx: AsyncAppContext,
5385 ) -> Result<()> {
5386 let version = deserialize_version(envelope.payload.version);
5387 let mtime = envelope
5388 .payload
5389 .mtime
5390 .ok_or_else(|| anyhow!("missing mtime"))?
5391 .into();
5392
5393 this.update(&mut cx, |this, cx| {
5394 let buffer = this
5395 .opened_buffers
5396 .get(&envelope.payload.buffer_id)
5397 .and_then(|buffer| buffer.upgrade(cx));
5398 if let Some(buffer) = buffer {
5399 buffer.update(cx, |buffer, cx| {
5400 buffer.did_save(version, envelope.payload.fingerprint, mtime, None, cx);
5401 });
5402 }
5403 Ok(())
5404 })
5405 }
5406
5407 async fn handle_buffer_reloaded(
5408 this: ModelHandle<Self>,
5409 envelope: TypedEnvelope<proto::BufferReloaded>,
5410 _: Arc<Client>,
5411 mut cx: AsyncAppContext,
5412 ) -> Result<()> {
5413 let payload = envelope.payload.clone();
5414 let version = deserialize_version(payload.version);
5415 let mtime = payload
5416 .mtime
5417 .ok_or_else(|| anyhow!("missing mtime"))?
5418 .into();
5419 this.update(&mut cx, |this, cx| {
5420 let buffer = this
5421 .opened_buffers
5422 .get(&payload.buffer_id)
5423 .and_then(|buffer| buffer.upgrade(cx));
5424 if let Some(buffer) = buffer {
5425 buffer.update(cx, |buffer, cx| {
5426 buffer.did_reload(version, payload.fingerprint, mtime, cx);
5427 });
5428 }
5429 Ok(())
5430 })
5431 }
5432
5433 pub fn match_paths<'a>(
5434 &self,
5435 query: &'a str,
5436 include_ignored: bool,
5437 smart_case: bool,
5438 max_results: usize,
5439 cancel_flag: &'a AtomicBool,
5440 cx: &AppContext,
5441 ) -> impl 'a + Future<Output = Vec<PathMatch>> {
5442 let worktrees = self
5443 .worktrees(cx)
5444 .filter(|worktree| worktree.read(cx).is_visible())
5445 .collect::<Vec<_>>();
5446 let include_root_name = worktrees.len() > 1;
5447 let candidate_sets = worktrees
5448 .into_iter()
5449 .map(|worktree| CandidateSet {
5450 snapshot: worktree.read(cx).snapshot(),
5451 include_ignored,
5452 include_root_name,
5453 })
5454 .collect::<Vec<_>>();
5455
5456 let background = cx.background().clone();
5457 async move {
5458 fuzzy::match_paths(
5459 candidate_sets.as_slice(),
5460 query,
5461 smart_case,
5462 max_results,
5463 cancel_flag,
5464 background,
5465 )
5466 .await
5467 }
5468 }
5469
5470 fn edits_from_lsp(
5471 &mut self,
5472 buffer: &ModelHandle<Buffer>,
5473 lsp_edits: impl 'static + Send + IntoIterator<Item = lsp::TextEdit>,
5474 version: Option<i32>,
5475 cx: &mut ModelContext<Self>,
5476 ) -> Task<Result<Vec<(Range<Anchor>, String)>>> {
5477 let snapshot = self.buffer_snapshot_for_lsp_version(buffer, version, cx);
5478 cx.background().spawn(async move {
5479 let snapshot = snapshot?;
5480 let mut lsp_edits = lsp_edits
5481 .into_iter()
5482 .map(|edit| (range_from_lsp(edit.range), edit.new_text))
5483 .collect::<Vec<_>>();
5484 lsp_edits.sort_by_key(|(range, _)| range.start);
5485
5486 let mut lsp_edits = lsp_edits.into_iter().peekable();
5487 let mut edits = Vec::new();
5488 while let Some((mut range, mut new_text)) = lsp_edits.next() {
5489 // Combine any LSP edits that are adjacent.
5490 //
5491 // Also, combine LSP edits that are separated from each other by only
5492 // a newline. This is important because for some code actions,
5493 // Rust-analyzer rewrites the entire buffer via a series of edits that
5494 // are separated by unchanged newline characters.
5495 //
5496 // In order for the diffing logic below to work properly, any edits that
5497 // cancel each other out must be combined into one.
5498 while let Some((next_range, next_text)) = lsp_edits.peek() {
5499 if next_range.start > range.end {
5500 if next_range.start.row > range.end.row + 1
5501 || next_range.start.column > 0
5502 || snapshot.clip_point_utf16(
5503 PointUtf16::new(range.end.row, u32::MAX),
5504 Bias::Left,
5505 ) > range.end
5506 {
5507 break;
5508 }
5509 new_text.push('\n');
5510 }
5511 range.end = next_range.end;
5512 new_text.push_str(&next_text);
5513 lsp_edits.next();
5514 }
5515
5516 if snapshot.clip_point_utf16(range.start, Bias::Left) != range.start
5517 || snapshot.clip_point_utf16(range.end, Bias::Left) != range.end
5518 {
5519 return Err(anyhow!("invalid edits received from language server"));
5520 }
5521
5522 // For multiline edits, perform a diff of the old and new text so that
5523 // we can identify the changes more precisely, preserving the locations
5524 // of any anchors positioned in the unchanged regions.
5525 if range.end.row > range.start.row {
5526 let mut offset = range.start.to_offset(&snapshot);
5527 let old_text = snapshot.text_for_range(range).collect::<String>();
5528
5529 let diff = TextDiff::from_lines(old_text.as_str(), &new_text);
5530 let mut moved_since_edit = true;
5531 for change in diff.iter_all_changes() {
5532 let tag = change.tag();
5533 let value = change.value();
5534 match tag {
5535 ChangeTag::Equal => {
5536 offset += value.len();
5537 moved_since_edit = true;
5538 }
5539 ChangeTag::Delete => {
5540 let start = snapshot.anchor_after(offset);
5541 let end = snapshot.anchor_before(offset + value.len());
5542 if moved_since_edit {
5543 edits.push((start..end, String::new()));
5544 } else {
5545 edits.last_mut().unwrap().0.end = end;
5546 }
5547 offset += value.len();
5548 moved_since_edit = false;
5549 }
5550 ChangeTag::Insert => {
5551 if moved_since_edit {
5552 let anchor = snapshot.anchor_after(offset);
5553 edits.push((anchor.clone()..anchor, value.to_string()));
5554 } else {
5555 edits.last_mut().unwrap().1.push_str(value);
5556 }
5557 moved_since_edit = false;
5558 }
5559 }
5560 }
5561 } else if range.end == range.start {
5562 let anchor = snapshot.anchor_after(range.start);
5563 edits.push((anchor.clone()..anchor, new_text));
5564 } else {
5565 let edit_start = snapshot.anchor_after(range.start);
5566 let edit_end = snapshot.anchor_before(range.end);
5567 edits.push((edit_start..edit_end, new_text));
5568 }
5569 }
5570
5571 Ok(edits)
5572 })
5573 }
5574
5575 fn buffer_snapshot_for_lsp_version(
5576 &mut self,
5577 buffer: &ModelHandle<Buffer>,
5578 version: Option<i32>,
5579 cx: &AppContext,
5580 ) -> Result<TextBufferSnapshot> {
5581 const OLD_VERSIONS_TO_RETAIN: i32 = 10;
5582
5583 if let Some(version) = version {
5584 let buffer_id = buffer.read(cx).remote_id();
5585 let snapshots = self
5586 .buffer_snapshots
5587 .get_mut(&buffer_id)
5588 .ok_or_else(|| anyhow!("no snapshot found for buffer {}", buffer_id))?;
5589 let mut found_snapshot = None;
5590 snapshots.retain(|(snapshot_version, snapshot)| {
5591 if snapshot_version + OLD_VERSIONS_TO_RETAIN < version {
5592 false
5593 } else {
5594 if *snapshot_version == version {
5595 found_snapshot = Some(snapshot.clone());
5596 }
5597 true
5598 }
5599 });
5600
5601 found_snapshot.ok_or_else(|| {
5602 anyhow!(
5603 "snapshot not found for buffer {} at version {}",
5604 buffer_id,
5605 version
5606 )
5607 })
5608 } else {
5609 Ok((buffer.read(cx)).text_snapshot())
5610 }
5611 }
5612
5613 fn language_server_for_buffer(
5614 &self,
5615 buffer: &Buffer,
5616 cx: &AppContext,
5617 ) -> Option<&(Arc<dyn LspAdapter>, Arc<LanguageServer>)> {
5618 if let Some((file, language)) = File::from_dyn(buffer.file()).zip(buffer.language()) {
5619 let worktree_id = file.worktree_id(cx);
5620 self.language_servers
5621 .get(&(worktree_id, language.lsp_adapter()?.name()))
5622 } else {
5623 None
5624 }
5625 }
5626}
5627
5628impl ProjectStore {
5629 pub fn new(db: Arc<Db>) -> Self {
5630 Self {
5631 db,
5632 projects: Default::default(),
5633 }
5634 }
5635
5636 pub fn projects<'a>(
5637 &'a self,
5638 cx: &'a AppContext,
5639 ) -> impl 'a + Iterator<Item = ModelHandle<Project>> {
5640 self.projects
5641 .iter()
5642 .filter_map(|project| project.upgrade(cx))
5643 }
5644
5645 fn add_project(&mut self, project: WeakModelHandle<Project>, cx: &mut ModelContext<Self>) {
5646 if let Err(ix) = self
5647 .projects
5648 .binary_search_by_key(&project.id(), WeakModelHandle::id)
5649 {
5650 self.projects.insert(ix, project);
5651 }
5652 cx.notify();
5653 }
5654
5655 fn prune_projects(&mut self, cx: &mut ModelContext<Self>) {
5656 let mut did_change = false;
5657 self.projects.retain(|project| {
5658 if project.is_upgradable(cx) {
5659 true
5660 } else {
5661 did_change = true;
5662 false
5663 }
5664 });
5665 if did_change {
5666 cx.notify();
5667 }
5668 }
5669}
5670
5671impl WorktreeHandle {
5672 pub fn upgrade(&self, cx: &AppContext) -> Option<ModelHandle<Worktree>> {
5673 match self {
5674 WorktreeHandle::Strong(handle) => Some(handle.clone()),
5675 WorktreeHandle::Weak(handle) => handle.upgrade(cx),
5676 }
5677 }
5678}
5679
5680impl OpenBuffer {
5681 pub fn upgrade(&self, cx: &impl UpgradeModelHandle) -> Option<ModelHandle<Buffer>> {
5682 match self {
5683 OpenBuffer::Strong(handle) => Some(handle.clone()),
5684 OpenBuffer::Weak(handle) => handle.upgrade(cx),
5685 OpenBuffer::Loading(_) => None,
5686 }
5687 }
5688}
5689
5690struct CandidateSet {
5691 snapshot: Snapshot,
5692 include_ignored: bool,
5693 include_root_name: bool,
5694}
5695
5696impl<'a> PathMatchCandidateSet<'a> for CandidateSet {
5697 type Candidates = CandidateSetIter<'a>;
5698
5699 fn id(&self) -> usize {
5700 self.snapshot.id().to_usize()
5701 }
5702
5703 fn len(&self) -> usize {
5704 if self.include_ignored {
5705 self.snapshot.file_count()
5706 } else {
5707 self.snapshot.visible_file_count()
5708 }
5709 }
5710
5711 fn prefix(&self) -> Arc<str> {
5712 if self.snapshot.root_entry().map_or(false, |e| e.is_file()) {
5713 self.snapshot.root_name().into()
5714 } else if self.include_root_name {
5715 format!("{}/", self.snapshot.root_name()).into()
5716 } else {
5717 "".into()
5718 }
5719 }
5720
5721 fn candidates(&'a self, start: usize) -> Self::Candidates {
5722 CandidateSetIter {
5723 traversal: self.snapshot.files(self.include_ignored, start),
5724 }
5725 }
5726}
5727
5728struct CandidateSetIter<'a> {
5729 traversal: Traversal<'a>,
5730}
5731
5732impl<'a> Iterator for CandidateSetIter<'a> {
5733 type Item = PathMatchCandidate<'a>;
5734
5735 fn next(&mut self) -> Option<Self::Item> {
5736 self.traversal.next().map(|entry| {
5737 if let EntryKind::File(char_bag) = entry.kind {
5738 PathMatchCandidate {
5739 path: &entry.path,
5740 char_bag,
5741 }
5742 } else {
5743 unreachable!()
5744 }
5745 })
5746 }
5747}
5748
5749impl Entity for ProjectStore {
5750 type Event = ();
5751}
5752
5753impl Entity for Project {
5754 type Event = Event;
5755
5756 fn release(&mut self, cx: &mut gpui::MutableAppContext) {
5757 self.project_store.update(cx, ProjectStore::prune_projects);
5758
5759 match &self.client_state {
5760 ProjectClientState::Local { remote_id_rx, .. } => {
5761 if let Some(project_id) = *remote_id_rx.borrow() {
5762 self.client
5763 .send(proto::UnregisterProject { project_id })
5764 .log_err();
5765 }
5766 }
5767 ProjectClientState::Remote { remote_id, .. } => {
5768 self.client
5769 .send(proto::LeaveProject {
5770 project_id: *remote_id,
5771 })
5772 .log_err();
5773 }
5774 }
5775 }
5776
5777 fn app_will_quit(
5778 &mut self,
5779 _: &mut MutableAppContext,
5780 ) -> Option<std::pin::Pin<Box<dyn 'static + Future<Output = ()>>>> {
5781 let shutdown_futures = self
5782 .language_servers
5783 .drain()
5784 .filter_map(|(_, (_, server))| server.shutdown())
5785 .collect::<Vec<_>>();
5786 Some(
5787 async move {
5788 futures::future::join_all(shutdown_futures).await;
5789 }
5790 .boxed(),
5791 )
5792 }
5793}
5794
5795impl Collaborator {
5796 fn from_proto(
5797 message: proto::Collaborator,
5798 user_store: &ModelHandle<UserStore>,
5799 cx: &mut AsyncAppContext,
5800 ) -> impl Future<Output = Result<Self>> {
5801 let user = user_store.update(cx, |user_store, cx| {
5802 user_store.fetch_user(message.user_id, cx)
5803 });
5804
5805 async move {
5806 Ok(Self {
5807 peer_id: PeerId(message.peer_id),
5808 user: user.await?,
5809 replica_id: message.replica_id as ReplicaId,
5810 })
5811 }
5812 }
5813}
5814
5815impl<P: AsRef<Path>> From<(WorktreeId, P)> for ProjectPath {
5816 fn from((worktree_id, path): (WorktreeId, P)) -> Self {
5817 Self {
5818 worktree_id,
5819 path: path.as_ref().into(),
5820 }
5821 }
5822}
5823
5824impl From<lsp::CreateFileOptions> for fs::CreateOptions {
5825 fn from(options: lsp::CreateFileOptions) -> Self {
5826 Self {
5827 overwrite: options.overwrite.unwrap_or(false),
5828 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
5829 }
5830 }
5831}
5832
5833impl From<lsp::RenameFileOptions> for fs::RenameOptions {
5834 fn from(options: lsp::RenameFileOptions) -> Self {
5835 Self {
5836 overwrite: options.overwrite.unwrap_or(false),
5837 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
5838 }
5839 }
5840}
5841
5842impl From<lsp::DeleteFileOptions> for fs::RemoveOptions {
5843 fn from(options: lsp::DeleteFileOptions) -> Self {
5844 Self {
5845 recursive: options.recursive.unwrap_or(false),
5846 ignore_if_not_exists: options.ignore_if_not_exists.unwrap_or(false),
5847 }
5848 }
5849}
5850
5851fn serialize_symbol(symbol: &Symbol) -> proto::Symbol {
5852 proto::Symbol {
5853 source_worktree_id: symbol.source_worktree_id.to_proto(),
5854 worktree_id: symbol.worktree_id.to_proto(),
5855 language_server_name: symbol.language_server_name.0.to_string(),
5856 name: symbol.name.clone(),
5857 kind: unsafe { mem::transmute(symbol.kind) },
5858 path: symbol.path.to_string_lossy().to_string(),
5859 start: Some(proto::Point {
5860 row: symbol.range.start.row,
5861 column: symbol.range.start.column,
5862 }),
5863 end: Some(proto::Point {
5864 row: symbol.range.end.row,
5865 column: symbol.range.end.column,
5866 }),
5867 signature: symbol.signature.to_vec(),
5868 }
5869}
5870
5871fn relativize_path(base: &Path, path: &Path) -> PathBuf {
5872 let mut path_components = path.components();
5873 let mut base_components = base.components();
5874 let mut components: Vec<Component> = Vec::new();
5875 loop {
5876 match (path_components.next(), base_components.next()) {
5877 (None, None) => break,
5878 (Some(a), None) => {
5879 components.push(a);
5880 components.extend(path_components.by_ref());
5881 break;
5882 }
5883 (None, _) => components.push(Component::ParentDir),
5884 (Some(a), Some(b)) if components.is_empty() && a == b => (),
5885 (Some(a), Some(b)) if b == Component::CurDir => components.push(a),
5886 (Some(a), Some(_)) => {
5887 components.push(Component::ParentDir);
5888 for _ in base_components {
5889 components.push(Component::ParentDir);
5890 }
5891 components.push(a);
5892 components.extend(path_components.by_ref());
5893 break;
5894 }
5895 }
5896 }
5897 components.iter().map(|c| c.as_os_str()).collect()
5898}
5899
5900impl Item for Buffer {
5901 fn entry_id(&self, cx: &AppContext) -> Option<ProjectEntryId> {
5902 File::from_dyn(self.file()).and_then(|file| file.project_entry_id(cx))
5903 }
5904}
5905
5906#[cfg(test)]
5907mod tests {
5908 use crate::worktree::WorktreeHandle;
5909
5910 use super::{Event, *};
5911 use fs::RealFs;
5912 use futures::{future, StreamExt};
5913 use gpui::{executor::Deterministic, test::subscribe};
5914 use language::{
5915 tree_sitter_rust, tree_sitter_typescript, Diagnostic, FakeLspAdapter, LanguageConfig,
5916 OffsetRangeExt, Point, ToPoint,
5917 };
5918 use lsp::Url;
5919 use serde_json::json;
5920 use std::{cell::RefCell, os::unix, path::PathBuf, rc::Rc, task::Poll};
5921 use unindent::Unindent as _;
5922 use util::{assert_set_eq, test::temp_tree};
5923
5924 #[gpui::test]
5925 async fn test_populate_and_search(cx: &mut gpui::TestAppContext) {
5926 let dir = temp_tree(json!({
5927 "root": {
5928 "apple": "",
5929 "banana": {
5930 "carrot": {
5931 "date": "",
5932 "endive": "",
5933 }
5934 },
5935 "fennel": {
5936 "grape": "",
5937 }
5938 }
5939 }));
5940
5941 let root_link_path = dir.path().join("root_link");
5942 unix::fs::symlink(&dir.path().join("root"), &root_link_path).unwrap();
5943 unix::fs::symlink(
5944 &dir.path().join("root/fennel"),
5945 &dir.path().join("root/finnochio"),
5946 )
5947 .unwrap();
5948
5949 let project = Project::test(Arc::new(RealFs), [root_link_path.as_ref()], cx).await;
5950
5951 project.read_with(cx, |project, cx| {
5952 let tree = project.worktrees(cx).next().unwrap().read(cx);
5953 assert_eq!(tree.file_count(), 5);
5954 assert_eq!(
5955 tree.inode_for_path("fennel/grape"),
5956 tree.inode_for_path("finnochio/grape")
5957 );
5958 });
5959
5960 let cancel_flag = Default::default();
5961 let results = project
5962 .read_with(cx, |project, cx| {
5963 project.match_paths("bna", false, false, 10, &cancel_flag, cx)
5964 })
5965 .await;
5966 assert_eq!(
5967 results
5968 .into_iter()
5969 .map(|result| result.path)
5970 .collect::<Vec<Arc<Path>>>(),
5971 vec![
5972 PathBuf::from("banana/carrot/date").into(),
5973 PathBuf::from("banana/carrot/endive").into(),
5974 ]
5975 );
5976 }
5977
5978 #[gpui::test]
5979 async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
5980 cx.foreground().forbid_parking();
5981
5982 let mut rust_language = Language::new(
5983 LanguageConfig {
5984 name: "Rust".into(),
5985 path_suffixes: vec!["rs".to_string()],
5986 ..Default::default()
5987 },
5988 Some(tree_sitter_rust::language()),
5989 );
5990 let mut json_language = Language::new(
5991 LanguageConfig {
5992 name: "JSON".into(),
5993 path_suffixes: vec!["json".to_string()],
5994 ..Default::default()
5995 },
5996 None,
5997 );
5998 let mut fake_rust_servers = rust_language.set_fake_lsp_adapter(FakeLspAdapter {
5999 name: "the-rust-language-server",
6000 capabilities: lsp::ServerCapabilities {
6001 completion_provider: Some(lsp::CompletionOptions {
6002 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
6003 ..Default::default()
6004 }),
6005 ..Default::default()
6006 },
6007 ..Default::default()
6008 });
6009 let mut fake_json_servers = json_language.set_fake_lsp_adapter(FakeLspAdapter {
6010 name: "the-json-language-server",
6011 capabilities: lsp::ServerCapabilities {
6012 completion_provider: Some(lsp::CompletionOptions {
6013 trigger_characters: Some(vec![":".to_string()]),
6014 ..Default::default()
6015 }),
6016 ..Default::default()
6017 },
6018 ..Default::default()
6019 });
6020
6021 let fs = FakeFs::new(cx.background());
6022 fs.insert_tree(
6023 "/the-root",
6024 json!({
6025 "test.rs": "const A: i32 = 1;",
6026 "test2.rs": "",
6027 "Cargo.toml": "a = 1",
6028 "package.json": "{\"a\": 1}",
6029 }),
6030 )
6031 .await;
6032
6033 let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
6034 project.update(cx, |project, _| {
6035 project.languages.add(Arc::new(rust_language));
6036 project.languages.add(Arc::new(json_language));
6037 });
6038
6039 // Open a buffer without an associated language server.
6040 let toml_buffer = project
6041 .update(cx, |project, cx| {
6042 project.open_local_buffer("/the-root/Cargo.toml", cx)
6043 })
6044 .await
6045 .unwrap();
6046
6047 // Open a buffer with an associated language server.
6048 let rust_buffer = project
6049 .update(cx, |project, cx| {
6050 project.open_local_buffer("/the-root/test.rs", cx)
6051 })
6052 .await
6053 .unwrap();
6054
6055 // A server is started up, and it is notified about Rust files.
6056 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
6057 assert_eq!(
6058 fake_rust_server
6059 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6060 .await
6061 .text_document,
6062 lsp::TextDocumentItem {
6063 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
6064 version: 0,
6065 text: "const A: i32 = 1;".to_string(),
6066 language_id: Default::default()
6067 }
6068 );
6069
6070 // The buffer is configured based on the language server's capabilities.
6071 rust_buffer.read_with(cx, |buffer, _| {
6072 assert_eq!(
6073 buffer.completion_triggers(),
6074 &[".".to_string(), "::".to_string()]
6075 );
6076 });
6077 toml_buffer.read_with(cx, |buffer, _| {
6078 assert!(buffer.completion_triggers().is_empty());
6079 });
6080
6081 // Edit a buffer. The changes are reported to the language server.
6082 rust_buffer.update(cx, |buffer, cx| buffer.edit([(16..16, "2")], cx));
6083 assert_eq!(
6084 fake_rust_server
6085 .receive_notification::<lsp::notification::DidChangeTextDocument>()
6086 .await
6087 .text_document,
6088 lsp::VersionedTextDocumentIdentifier::new(
6089 lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
6090 1
6091 )
6092 );
6093
6094 // Open a third buffer with a different associated language server.
6095 let json_buffer = project
6096 .update(cx, |project, cx| {
6097 project.open_local_buffer("/the-root/package.json", cx)
6098 })
6099 .await
6100 .unwrap();
6101
6102 // A json language server is started up and is only notified about the json buffer.
6103 let mut fake_json_server = fake_json_servers.next().await.unwrap();
6104 assert_eq!(
6105 fake_json_server
6106 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6107 .await
6108 .text_document,
6109 lsp::TextDocumentItem {
6110 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
6111 version: 0,
6112 text: "{\"a\": 1}".to_string(),
6113 language_id: Default::default()
6114 }
6115 );
6116
6117 // This buffer is configured based on the second language server's
6118 // capabilities.
6119 json_buffer.read_with(cx, |buffer, _| {
6120 assert_eq!(buffer.completion_triggers(), &[":".to_string()]);
6121 });
6122
6123 // When opening another buffer whose language server is already running,
6124 // it is also configured based on the existing language server's capabilities.
6125 let rust_buffer2 = project
6126 .update(cx, |project, cx| {
6127 project.open_local_buffer("/the-root/test2.rs", cx)
6128 })
6129 .await
6130 .unwrap();
6131 rust_buffer2.read_with(cx, |buffer, _| {
6132 assert_eq!(
6133 buffer.completion_triggers(),
6134 &[".".to_string(), "::".to_string()]
6135 );
6136 });
6137
6138 // Changes are reported only to servers matching the buffer's language.
6139 toml_buffer.update(cx, |buffer, cx| buffer.edit([(5..5, "23")], cx));
6140 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "let x = 1;")], cx));
6141 assert_eq!(
6142 fake_rust_server
6143 .receive_notification::<lsp::notification::DidChangeTextDocument>()
6144 .await
6145 .text_document,
6146 lsp::VersionedTextDocumentIdentifier::new(
6147 lsp::Url::from_file_path("/the-root/test2.rs").unwrap(),
6148 1
6149 )
6150 );
6151
6152 // Save notifications are reported to all servers.
6153 toml_buffer
6154 .update(cx, |buffer, cx| buffer.save(cx))
6155 .await
6156 .unwrap();
6157 assert_eq!(
6158 fake_rust_server
6159 .receive_notification::<lsp::notification::DidSaveTextDocument>()
6160 .await
6161 .text_document,
6162 lsp::TextDocumentIdentifier::new(
6163 lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap()
6164 )
6165 );
6166 assert_eq!(
6167 fake_json_server
6168 .receive_notification::<lsp::notification::DidSaveTextDocument>()
6169 .await
6170 .text_document,
6171 lsp::TextDocumentIdentifier::new(
6172 lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap()
6173 )
6174 );
6175
6176 // Renames are reported only to servers matching the buffer's language.
6177 fs.rename(
6178 Path::new("/the-root/test2.rs"),
6179 Path::new("/the-root/test3.rs"),
6180 Default::default(),
6181 )
6182 .await
6183 .unwrap();
6184 assert_eq!(
6185 fake_rust_server
6186 .receive_notification::<lsp::notification::DidCloseTextDocument>()
6187 .await
6188 .text_document,
6189 lsp::TextDocumentIdentifier::new(
6190 lsp::Url::from_file_path("/the-root/test2.rs").unwrap()
6191 ),
6192 );
6193 assert_eq!(
6194 fake_rust_server
6195 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6196 .await
6197 .text_document,
6198 lsp::TextDocumentItem {
6199 uri: lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),
6200 version: 0,
6201 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
6202 language_id: Default::default()
6203 },
6204 );
6205
6206 rust_buffer2.update(cx, |buffer, cx| {
6207 buffer.update_diagnostics(
6208 DiagnosticSet::from_sorted_entries(
6209 vec![DiagnosticEntry {
6210 diagnostic: Default::default(),
6211 range: Anchor::MIN..Anchor::MAX,
6212 }],
6213 &buffer.snapshot(),
6214 ),
6215 cx,
6216 );
6217 assert_eq!(
6218 buffer
6219 .snapshot()
6220 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
6221 .count(),
6222 1
6223 );
6224 });
6225
6226 // When the rename changes the extension of the file, the buffer gets closed on the old
6227 // language server and gets opened on the new one.
6228 fs.rename(
6229 Path::new("/the-root/test3.rs"),
6230 Path::new("/the-root/test3.json"),
6231 Default::default(),
6232 )
6233 .await
6234 .unwrap();
6235 assert_eq!(
6236 fake_rust_server
6237 .receive_notification::<lsp::notification::DidCloseTextDocument>()
6238 .await
6239 .text_document,
6240 lsp::TextDocumentIdentifier::new(
6241 lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),
6242 ),
6243 );
6244 assert_eq!(
6245 fake_json_server
6246 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6247 .await
6248 .text_document,
6249 lsp::TextDocumentItem {
6250 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
6251 version: 0,
6252 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
6253 language_id: Default::default()
6254 },
6255 );
6256
6257 // We clear the diagnostics, since the language has changed.
6258 rust_buffer2.read_with(cx, |buffer, _| {
6259 assert_eq!(
6260 buffer
6261 .snapshot()
6262 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
6263 .count(),
6264 0
6265 );
6266 });
6267
6268 // The renamed file's version resets after changing language server.
6269 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "// ")], cx));
6270 assert_eq!(
6271 fake_json_server
6272 .receive_notification::<lsp::notification::DidChangeTextDocument>()
6273 .await
6274 .text_document,
6275 lsp::VersionedTextDocumentIdentifier::new(
6276 lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
6277 1
6278 )
6279 );
6280
6281 // Restart language servers
6282 project.update(cx, |project, cx| {
6283 project.restart_language_servers_for_buffers(
6284 vec![rust_buffer.clone(), json_buffer.clone()],
6285 cx,
6286 );
6287 });
6288
6289 let mut rust_shutdown_requests = fake_rust_server
6290 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
6291 let mut json_shutdown_requests = fake_json_server
6292 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
6293 futures::join!(rust_shutdown_requests.next(), json_shutdown_requests.next());
6294
6295 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
6296 let mut fake_json_server = fake_json_servers.next().await.unwrap();
6297
6298 // Ensure rust document is reopened in new rust language server
6299 assert_eq!(
6300 fake_rust_server
6301 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6302 .await
6303 .text_document,
6304 lsp::TextDocumentItem {
6305 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
6306 version: 1,
6307 text: rust_buffer.read_with(cx, |buffer, _| buffer.text()),
6308 language_id: Default::default()
6309 }
6310 );
6311
6312 // Ensure json documents are reopened in new json language server
6313 assert_set_eq!(
6314 [
6315 fake_json_server
6316 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6317 .await
6318 .text_document,
6319 fake_json_server
6320 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6321 .await
6322 .text_document,
6323 ],
6324 [
6325 lsp::TextDocumentItem {
6326 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
6327 version: 0,
6328 text: json_buffer.read_with(cx, |buffer, _| buffer.text()),
6329 language_id: Default::default()
6330 },
6331 lsp::TextDocumentItem {
6332 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
6333 version: 1,
6334 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
6335 language_id: Default::default()
6336 }
6337 ]
6338 );
6339
6340 // Close notifications are reported only to servers matching the buffer's language.
6341 cx.update(|_| drop(json_buffer));
6342 let close_message = lsp::DidCloseTextDocumentParams {
6343 text_document: lsp::TextDocumentIdentifier::new(
6344 lsp::Url::from_file_path("/the-root/package.json").unwrap(),
6345 ),
6346 };
6347 assert_eq!(
6348 fake_json_server
6349 .receive_notification::<lsp::notification::DidCloseTextDocument>()
6350 .await,
6351 close_message,
6352 );
6353 }
6354
6355 #[gpui::test]
6356 async fn test_single_file_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
6357 cx.foreground().forbid_parking();
6358
6359 let fs = FakeFs::new(cx.background());
6360 fs.insert_tree(
6361 "/dir",
6362 json!({
6363 "a.rs": "let a = 1;",
6364 "b.rs": "let b = 2;"
6365 }),
6366 )
6367 .await;
6368
6369 let project = Project::test(fs, ["/dir/a.rs".as_ref(), "/dir/b.rs".as_ref()], cx).await;
6370
6371 let buffer_a = project
6372 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
6373 .await
6374 .unwrap();
6375 let buffer_b = project
6376 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
6377 .await
6378 .unwrap();
6379
6380 project.update(cx, |project, cx| {
6381 project
6382 .update_diagnostics(
6383 0,
6384 lsp::PublishDiagnosticsParams {
6385 uri: Url::from_file_path("/dir/a.rs").unwrap(),
6386 version: None,
6387 diagnostics: vec![lsp::Diagnostic {
6388 range: lsp::Range::new(
6389 lsp::Position::new(0, 4),
6390 lsp::Position::new(0, 5),
6391 ),
6392 severity: Some(lsp::DiagnosticSeverity::ERROR),
6393 message: "error 1".to_string(),
6394 ..Default::default()
6395 }],
6396 },
6397 &[],
6398 cx,
6399 )
6400 .unwrap();
6401 project
6402 .update_diagnostics(
6403 0,
6404 lsp::PublishDiagnosticsParams {
6405 uri: Url::from_file_path("/dir/b.rs").unwrap(),
6406 version: None,
6407 diagnostics: vec![lsp::Diagnostic {
6408 range: lsp::Range::new(
6409 lsp::Position::new(0, 4),
6410 lsp::Position::new(0, 5),
6411 ),
6412 severity: Some(lsp::DiagnosticSeverity::WARNING),
6413 message: "error 2".to_string(),
6414 ..Default::default()
6415 }],
6416 },
6417 &[],
6418 cx,
6419 )
6420 .unwrap();
6421 });
6422
6423 buffer_a.read_with(cx, |buffer, _| {
6424 let chunks = chunks_with_diagnostics(&buffer, 0..buffer.len());
6425 assert_eq!(
6426 chunks
6427 .iter()
6428 .map(|(s, d)| (s.as_str(), *d))
6429 .collect::<Vec<_>>(),
6430 &[
6431 ("let ", None),
6432 ("a", Some(DiagnosticSeverity::ERROR)),
6433 (" = 1;", None),
6434 ]
6435 );
6436 });
6437 buffer_b.read_with(cx, |buffer, _| {
6438 let chunks = chunks_with_diagnostics(&buffer, 0..buffer.len());
6439 assert_eq!(
6440 chunks
6441 .iter()
6442 .map(|(s, d)| (s.as_str(), *d))
6443 .collect::<Vec<_>>(),
6444 &[
6445 ("let ", None),
6446 ("b", Some(DiagnosticSeverity::WARNING)),
6447 (" = 2;", None),
6448 ]
6449 );
6450 });
6451 }
6452
6453 #[gpui::test]
6454 async fn test_hidden_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
6455 cx.foreground().forbid_parking();
6456
6457 let fs = FakeFs::new(cx.background());
6458 fs.insert_tree(
6459 "/root",
6460 json!({
6461 "dir": {
6462 "a.rs": "let a = 1;",
6463 },
6464 "other.rs": "let b = c;"
6465 }),
6466 )
6467 .await;
6468
6469 let project = Project::test(fs, ["/root/dir".as_ref()], cx).await;
6470
6471 let (worktree, _) = project
6472 .update(cx, |project, cx| {
6473 project.find_or_create_local_worktree("/root/other.rs", false, cx)
6474 })
6475 .await
6476 .unwrap();
6477 let worktree_id = worktree.read_with(cx, |tree, _| tree.id());
6478
6479 project.update(cx, |project, cx| {
6480 project
6481 .update_diagnostics(
6482 0,
6483 lsp::PublishDiagnosticsParams {
6484 uri: Url::from_file_path("/root/other.rs").unwrap(),
6485 version: None,
6486 diagnostics: vec![lsp::Diagnostic {
6487 range: lsp::Range::new(
6488 lsp::Position::new(0, 8),
6489 lsp::Position::new(0, 9),
6490 ),
6491 severity: Some(lsp::DiagnosticSeverity::ERROR),
6492 message: "unknown variable 'c'".to_string(),
6493 ..Default::default()
6494 }],
6495 },
6496 &[],
6497 cx,
6498 )
6499 .unwrap();
6500 });
6501
6502 let buffer = project
6503 .update(cx, |project, cx| project.open_buffer((worktree_id, ""), cx))
6504 .await
6505 .unwrap();
6506 buffer.read_with(cx, |buffer, _| {
6507 let chunks = chunks_with_diagnostics(&buffer, 0..buffer.len());
6508 assert_eq!(
6509 chunks
6510 .iter()
6511 .map(|(s, d)| (s.as_str(), *d))
6512 .collect::<Vec<_>>(),
6513 &[
6514 ("let b = ", None),
6515 ("c", Some(DiagnosticSeverity::ERROR)),
6516 (";", None),
6517 ]
6518 );
6519 });
6520
6521 project.read_with(cx, |project, cx| {
6522 assert_eq!(project.diagnostic_summaries(cx).next(), None);
6523 assert_eq!(project.diagnostic_summary(cx).error_count, 0);
6524 });
6525 }
6526
6527 #[gpui::test]
6528 async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
6529 cx.foreground().forbid_parking();
6530
6531 let progress_token = "the-progress-token";
6532 let mut language = Language::new(
6533 LanguageConfig {
6534 name: "Rust".into(),
6535 path_suffixes: vec!["rs".to_string()],
6536 ..Default::default()
6537 },
6538 Some(tree_sitter_rust::language()),
6539 );
6540 let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
6541 disk_based_diagnostics_progress_token: Some(progress_token),
6542 disk_based_diagnostics_sources: &["disk"],
6543 ..Default::default()
6544 });
6545
6546 let fs = FakeFs::new(cx.background());
6547 fs.insert_tree(
6548 "/dir",
6549 json!({
6550 "a.rs": "fn a() { A }",
6551 "b.rs": "const y: i32 = 1",
6552 }),
6553 )
6554 .await;
6555
6556 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
6557 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
6558 let worktree_id =
6559 project.read_with(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
6560
6561 // Cause worktree to start the fake language server
6562 let _buffer = project
6563 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
6564 .await
6565 .unwrap();
6566
6567 let mut events = subscribe(&project, cx);
6568
6569 let fake_server = fake_servers.next().await.unwrap();
6570 fake_server.start_progress(progress_token).await;
6571 assert_eq!(
6572 events.next().await.unwrap(),
6573 Event::DiskBasedDiagnosticsStarted {
6574 language_server_id: 0,
6575 }
6576 );
6577
6578 fake_server.notify::<lsp::notification::PublishDiagnostics>(
6579 lsp::PublishDiagnosticsParams {
6580 uri: Url::from_file_path("/dir/a.rs").unwrap(),
6581 version: None,
6582 diagnostics: vec![lsp::Diagnostic {
6583 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
6584 severity: Some(lsp::DiagnosticSeverity::ERROR),
6585 message: "undefined variable 'A'".to_string(),
6586 ..Default::default()
6587 }],
6588 },
6589 );
6590 assert_eq!(
6591 events.next().await.unwrap(),
6592 Event::DiagnosticsUpdated {
6593 language_server_id: 0,
6594 path: (worktree_id, Path::new("a.rs")).into()
6595 }
6596 );
6597
6598 fake_server.end_progress(progress_token);
6599 assert_eq!(
6600 events.next().await.unwrap(),
6601 Event::DiskBasedDiagnosticsFinished {
6602 language_server_id: 0
6603 }
6604 );
6605
6606 let buffer = project
6607 .update(cx, |p, cx| p.open_local_buffer("/dir/a.rs", cx))
6608 .await
6609 .unwrap();
6610
6611 buffer.read_with(cx, |buffer, _| {
6612 let snapshot = buffer.snapshot();
6613 let diagnostics = snapshot
6614 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
6615 .collect::<Vec<_>>();
6616 assert_eq!(
6617 diagnostics,
6618 &[DiagnosticEntry {
6619 range: Point::new(0, 9)..Point::new(0, 10),
6620 diagnostic: Diagnostic {
6621 severity: lsp::DiagnosticSeverity::ERROR,
6622 message: "undefined variable 'A'".to_string(),
6623 group_id: 0,
6624 is_primary: true,
6625 ..Default::default()
6626 }
6627 }]
6628 )
6629 });
6630
6631 // Ensure publishing empty diagnostics twice only results in one update event.
6632 fake_server.notify::<lsp::notification::PublishDiagnostics>(
6633 lsp::PublishDiagnosticsParams {
6634 uri: Url::from_file_path("/dir/a.rs").unwrap(),
6635 version: None,
6636 diagnostics: Default::default(),
6637 },
6638 );
6639 assert_eq!(
6640 events.next().await.unwrap(),
6641 Event::DiagnosticsUpdated {
6642 language_server_id: 0,
6643 path: (worktree_id, Path::new("a.rs")).into()
6644 }
6645 );
6646
6647 fake_server.notify::<lsp::notification::PublishDiagnostics>(
6648 lsp::PublishDiagnosticsParams {
6649 uri: Url::from_file_path("/dir/a.rs").unwrap(),
6650 version: None,
6651 diagnostics: Default::default(),
6652 },
6653 );
6654 cx.foreground().run_until_parked();
6655 assert_eq!(futures::poll!(events.next()), Poll::Pending);
6656 }
6657
6658 #[gpui::test]
6659 async fn test_restarting_server_with_diagnostics_running(cx: &mut gpui::TestAppContext) {
6660 cx.foreground().forbid_parking();
6661
6662 let progress_token = "the-progress-token";
6663 let mut language = Language::new(
6664 LanguageConfig {
6665 path_suffixes: vec!["rs".to_string()],
6666 ..Default::default()
6667 },
6668 None,
6669 );
6670 let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
6671 disk_based_diagnostics_sources: &["disk"],
6672 disk_based_diagnostics_progress_token: Some(progress_token),
6673 ..Default::default()
6674 });
6675
6676 let fs = FakeFs::new(cx.background());
6677 fs.insert_tree("/dir", json!({ "a.rs": "" })).await;
6678
6679 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
6680 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
6681
6682 let buffer = project
6683 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
6684 .await
6685 .unwrap();
6686
6687 // Simulate diagnostics starting to update.
6688 let fake_server = fake_servers.next().await.unwrap();
6689 fake_server.start_progress(progress_token).await;
6690
6691 // Restart the server before the diagnostics finish updating.
6692 project.update(cx, |project, cx| {
6693 project.restart_language_servers_for_buffers([buffer], cx);
6694 });
6695 let mut events = subscribe(&project, cx);
6696
6697 // Simulate the newly started server sending more diagnostics.
6698 let fake_server = fake_servers.next().await.unwrap();
6699 fake_server.start_progress(progress_token).await;
6700 assert_eq!(
6701 events.next().await.unwrap(),
6702 Event::DiskBasedDiagnosticsStarted {
6703 language_server_id: 1
6704 }
6705 );
6706 project.read_with(cx, |project, _| {
6707 assert_eq!(
6708 project
6709 .language_servers_running_disk_based_diagnostics()
6710 .collect::<Vec<_>>(),
6711 [1]
6712 );
6713 });
6714
6715 // All diagnostics are considered done, despite the old server's diagnostic
6716 // task never completing.
6717 fake_server.end_progress(progress_token);
6718 assert_eq!(
6719 events.next().await.unwrap(),
6720 Event::DiskBasedDiagnosticsFinished {
6721 language_server_id: 1
6722 }
6723 );
6724 project.read_with(cx, |project, _| {
6725 assert_eq!(
6726 project
6727 .language_servers_running_disk_based_diagnostics()
6728 .collect::<Vec<_>>(),
6729 [0; 0]
6730 );
6731 });
6732 }
6733
6734 #[gpui::test]
6735 async fn test_toggling_enable_language_server(
6736 deterministic: Arc<Deterministic>,
6737 cx: &mut gpui::TestAppContext,
6738 ) {
6739 deterministic.forbid_parking();
6740
6741 let mut rust = Language::new(
6742 LanguageConfig {
6743 name: Arc::from("Rust"),
6744 path_suffixes: vec!["rs".to_string()],
6745 ..Default::default()
6746 },
6747 None,
6748 );
6749 let mut fake_rust_servers = rust.set_fake_lsp_adapter(FakeLspAdapter {
6750 name: "rust-lsp",
6751 ..Default::default()
6752 });
6753 let mut js = Language::new(
6754 LanguageConfig {
6755 name: Arc::from("JavaScript"),
6756 path_suffixes: vec!["js".to_string()],
6757 ..Default::default()
6758 },
6759 None,
6760 );
6761 let mut fake_js_servers = js.set_fake_lsp_adapter(FakeLspAdapter {
6762 name: "js-lsp",
6763 ..Default::default()
6764 });
6765
6766 let fs = FakeFs::new(cx.background());
6767 fs.insert_tree("/dir", json!({ "a.rs": "", "b.js": "" }))
6768 .await;
6769
6770 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
6771 project.update(cx, |project, _| {
6772 project.languages.add(Arc::new(rust));
6773 project.languages.add(Arc::new(js));
6774 });
6775
6776 let _rs_buffer = project
6777 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
6778 .await
6779 .unwrap();
6780 let _js_buffer = project
6781 .update(cx, |project, cx| project.open_local_buffer("/dir/b.js", cx))
6782 .await
6783 .unwrap();
6784
6785 let mut fake_rust_server_1 = fake_rust_servers.next().await.unwrap();
6786 assert_eq!(
6787 fake_rust_server_1
6788 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6789 .await
6790 .text_document
6791 .uri
6792 .as_str(),
6793 "file:///dir/a.rs"
6794 );
6795
6796 let mut fake_js_server = fake_js_servers.next().await.unwrap();
6797 assert_eq!(
6798 fake_js_server
6799 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6800 .await
6801 .text_document
6802 .uri
6803 .as_str(),
6804 "file:///dir/b.js"
6805 );
6806
6807 // Disable Rust language server, ensuring only that server gets stopped.
6808 cx.update(|cx| {
6809 cx.update_global(|settings: &mut Settings, _| {
6810 settings.language_overrides.insert(
6811 Arc::from("Rust"),
6812 settings::LanguageSettings {
6813 enable_language_server: Some(false),
6814 ..Default::default()
6815 },
6816 );
6817 })
6818 });
6819 fake_rust_server_1
6820 .receive_notification::<lsp::notification::Exit>()
6821 .await;
6822
6823 // Enable Rust and disable JavaScript language servers, ensuring that the
6824 // former gets started again and that the latter stops.
6825 cx.update(|cx| {
6826 cx.update_global(|settings: &mut Settings, _| {
6827 settings.language_overrides.insert(
6828 Arc::from("Rust"),
6829 settings::LanguageSettings {
6830 enable_language_server: Some(true),
6831 ..Default::default()
6832 },
6833 );
6834 settings.language_overrides.insert(
6835 Arc::from("JavaScript"),
6836 settings::LanguageSettings {
6837 enable_language_server: Some(false),
6838 ..Default::default()
6839 },
6840 );
6841 })
6842 });
6843 let mut fake_rust_server_2 = fake_rust_servers.next().await.unwrap();
6844 assert_eq!(
6845 fake_rust_server_2
6846 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6847 .await
6848 .text_document
6849 .uri
6850 .as_str(),
6851 "file:///dir/a.rs"
6852 );
6853 fake_js_server
6854 .receive_notification::<lsp::notification::Exit>()
6855 .await;
6856 }
6857
6858 #[gpui::test]
6859 async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
6860 cx.foreground().forbid_parking();
6861
6862 let mut language = Language::new(
6863 LanguageConfig {
6864 name: "Rust".into(),
6865 path_suffixes: vec!["rs".to_string()],
6866 ..Default::default()
6867 },
6868 Some(tree_sitter_rust::language()),
6869 );
6870 let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
6871 disk_based_diagnostics_sources: &["disk"],
6872 ..Default::default()
6873 });
6874
6875 let text = "
6876 fn a() { A }
6877 fn b() { BB }
6878 fn c() { CCC }
6879 "
6880 .unindent();
6881
6882 let fs = FakeFs::new(cx.background());
6883 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
6884
6885 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
6886 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
6887
6888 let buffer = project
6889 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
6890 .await
6891 .unwrap();
6892
6893 let mut fake_server = fake_servers.next().await.unwrap();
6894 let open_notification = fake_server
6895 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6896 .await;
6897
6898 // Edit the buffer, moving the content down
6899 buffer.update(cx, |buffer, cx| buffer.edit([(0..0, "\n\n")], cx));
6900 let change_notification_1 = fake_server
6901 .receive_notification::<lsp::notification::DidChangeTextDocument>()
6902 .await;
6903 assert!(
6904 change_notification_1.text_document.version > open_notification.text_document.version
6905 );
6906
6907 // Report some diagnostics for the initial version of the buffer
6908 fake_server.notify::<lsp::notification::PublishDiagnostics>(
6909 lsp::PublishDiagnosticsParams {
6910 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
6911 version: Some(open_notification.text_document.version),
6912 diagnostics: vec![
6913 lsp::Diagnostic {
6914 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
6915 severity: Some(DiagnosticSeverity::ERROR),
6916 message: "undefined variable 'A'".to_string(),
6917 source: Some("disk".to_string()),
6918 ..Default::default()
6919 },
6920 lsp::Diagnostic {
6921 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
6922 severity: Some(DiagnosticSeverity::ERROR),
6923 message: "undefined variable 'BB'".to_string(),
6924 source: Some("disk".to_string()),
6925 ..Default::default()
6926 },
6927 lsp::Diagnostic {
6928 range: lsp::Range::new(lsp::Position::new(2, 9), lsp::Position::new(2, 12)),
6929 severity: Some(DiagnosticSeverity::ERROR),
6930 source: Some("disk".to_string()),
6931 message: "undefined variable 'CCC'".to_string(),
6932 ..Default::default()
6933 },
6934 ],
6935 },
6936 );
6937
6938 // The diagnostics have moved down since they were created.
6939 buffer.next_notification(cx).await;
6940 buffer.read_with(cx, |buffer, _| {
6941 assert_eq!(
6942 buffer
6943 .snapshot()
6944 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0), false)
6945 .collect::<Vec<_>>(),
6946 &[
6947 DiagnosticEntry {
6948 range: Point::new(3, 9)..Point::new(3, 11),
6949 diagnostic: Diagnostic {
6950 severity: DiagnosticSeverity::ERROR,
6951 message: "undefined variable 'BB'".to_string(),
6952 is_disk_based: true,
6953 group_id: 1,
6954 is_primary: true,
6955 ..Default::default()
6956 },
6957 },
6958 DiagnosticEntry {
6959 range: Point::new(4, 9)..Point::new(4, 12),
6960 diagnostic: Diagnostic {
6961 severity: DiagnosticSeverity::ERROR,
6962 message: "undefined variable 'CCC'".to_string(),
6963 is_disk_based: true,
6964 group_id: 2,
6965 is_primary: true,
6966 ..Default::default()
6967 }
6968 }
6969 ]
6970 );
6971 assert_eq!(
6972 chunks_with_diagnostics(buffer, 0..buffer.len()),
6973 [
6974 ("\n\nfn a() { ".to_string(), None),
6975 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
6976 (" }\nfn b() { ".to_string(), None),
6977 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
6978 (" }\nfn c() { ".to_string(), None),
6979 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
6980 (" }\n".to_string(), None),
6981 ]
6982 );
6983 assert_eq!(
6984 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
6985 [
6986 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
6987 (" }\nfn c() { ".to_string(), None),
6988 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
6989 ]
6990 );
6991 });
6992
6993 // Ensure overlapping diagnostics are highlighted correctly.
6994 fake_server.notify::<lsp::notification::PublishDiagnostics>(
6995 lsp::PublishDiagnosticsParams {
6996 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
6997 version: Some(open_notification.text_document.version),
6998 diagnostics: vec![
6999 lsp::Diagnostic {
7000 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
7001 severity: Some(DiagnosticSeverity::ERROR),
7002 message: "undefined variable 'A'".to_string(),
7003 source: Some("disk".to_string()),
7004 ..Default::default()
7005 },
7006 lsp::Diagnostic {
7007 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 12)),
7008 severity: Some(DiagnosticSeverity::WARNING),
7009 message: "unreachable statement".to_string(),
7010 source: Some("disk".to_string()),
7011 ..Default::default()
7012 },
7013 ],
7014 },
7015 );
7016
7017 buffer.next_notification(cx).await;
7018 buffer.read_with(cx, |buffer, _| {
7019 assert_eq!(
7020 buffer
7021 .snapshot()
7022 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0), false)
7023 .collect::<Vec<_>>(),
7024 &[
7025 DiagnosticEntry {
7026 range: Point::new(2, 9)..Point::new(2, 12),
7027 diagnostic: Diagnostic {
7028 severity: DiagnosticSeverity::WARNING,
7029 message: "unreachable statement".to_string(),
7030 is_disk_based: true,
7031 group_id: 4,
7032 is_primary: true,
7033 ..Default::default()
7034 }
7035 },
7036 DiagnosticEntry {
7037 range: Point::new(2, 9)..Point::new(2, 10),
7038 diagnostic: Diagnostic {
7039 severity: DiagnosticSeverity::ERROR,
7040 message: "undefined variable 'A'".to_string(),
7041 is_disk_based: true,
7042 group_id: 3,
7043 is_primary: true,
7044 ..Default::default()
7045 },
7046 }
7047 ]
7048 );
7049 assert_eq!(
7050 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
7051 [
7052 ("fn a() { ".to_string(), None),
7053 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
7054 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
7055 ("\n".to_string(), None),
7056 ]
7057 );
7058 assert_eq!(
7059 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
7060 [
7061 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
7062 ("\n".to_string(), None),
7063 ]
7064 );
7065 });
7066
7067 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
7068 // changes since the last save.
7069 buffer.update(cx, |buffer, cx| {
7070 buffer.edit([(Point::new(2, 0)..Point::new(2, 0), " ")], cx);
7071 buffer.edit([(Point::new(2, 8)..Point::new(2, 10), "(x: usize)")], cx);
7072 buffer.edit([(Point::new(3, 10)..Point::new(3, 10), "xxx")], cx);
7073 });
7074 let change_notification_2 = fake_server
7075 .receive_notification::<lsp::notification::DidChangeTextDocument>()
7076 .await;
7077 assert!(
7078 change_notification_2.text_document.version
7079 > change_notification_1.text_document.version
7080 );
7081
7082 // Handle out-of-order diagnostics
7083 fake_server.notify::<lsp::notification::PublishDiagnostics>(
7084 lsp::PublishDiagnosticsParams {
7085 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
7086 version: Some(change_notification_2.text_document.version),
7087 diagnostics: vec![
7088 lsp::Diagnostic {
7089 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
7090 severity: Some(DiagnosticSeverity::ERROR),
7091 message: "undefined variable 'BB'".to_string(),
7092 source: Some("disk".to_string()),
7093 ..Default::default()
7094 },
7095 lsp::Diagnostic {
7096 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
7097 severity: Some(DiagnosticSeverity::WARNING),
7098 message: "undefined variable 'A'".to_string(),
7099 source: Some("disk".to_string()),
7100 ..Default::default()
7101 },
7102 ],
7103 },
7104 );
7105
7106 buffer.next_notification(cx).await;
7107 buffer.read_with(cx, |buffer, _| {
7108 assert_eq!(
7109 buffer
7110 .snapshot()
7111 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
7112 .collect::<Vec<_>>(),
7113 &[
7114 DiagnosticEntry {
7115 range: Point::new(2, 21)..Point::new(2, 22),
7116 diagnostic: Diagnostic {
7117 severity: DiagnosticSeverity::WARNING,
7118 message: "undefined variable 'A'".to_string(),
7119 is_disk_based: true,
7120 group_id: 6,
7121 is_primary: true,
7122 ..Default::default()
7123 }
7124 },
7125 DiagnosticEntry {
7126 range: Point::new(3, 9)..Point::new(3, 14),
7127 diagnostic: Diagnostic {
7128 severity: DiagnosticSeverity::ERROR,
7129 message: "undefined variable 'BB'".to_string(),
7130 is_disk_based: true,
7131 group_id: 5,
7132 is_primary: true,
7133 ..Default::default()
7134 },
7135 }
7136 ]
7137 );
7138 });
7139 }
7140
7141 #[gpui::test]
7142 async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
7143 cx.foreground().forbid_parking();
7144
7145 let text = concat!(
7146 "let one = ;\n", //
7147 "let two = \n",
7148 "let three = 3;\n",
7149 );
7150
7151 let fs = FakeFs::new(cx.background());
7152 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
7153
7154 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
7155 let buffer = project
7156 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
7157 .await
7158 .unwrap();
7159
7160 project.update(cx, |project, cx| {
7161 project
7162 .update_buffer_diagnostics(
7163 &buffer,
7164 vec![
7165 DiagnosticEntry {
7166 range: PointUtf16::new(0, 10)..PointUtf16::new(0, 10),
7167 diagnostic: Diagnostic {
7168 severity: DiagnosticSeverity::ERROR,
7169 message: "syntax error 1".to_string(),
7170 ..Default::default()
7171 },
7172 },
7173 DiagnosticEntry {
7174 range: PointUtf16::new(1, 10)..PointUtf16::new(1, 10),
7175 diagnostic: Diagnostic {
7176 severity: DiagnosticSeverity::ERROR,
7177 message: "syntax error 2".to_string(),
7178 ..Default::default()
7179 },
7180 },
7181 ],
7182 None,
7183 cx,
7184 )
7185 .unwrap();
7186 });
7187
7188 // An empty range is extended forward to include the following character.
7189 // At the end of a line, an empty range is extended backward to include
7190 // the preceding character.
7191 buffer.read_with(cx, |buffer, _| {
7192 let chunks = chunks_with_diagnostics(&buffer, 0..buffer.len());
7193 assert_eq!(
7194 chunks
7195 .iter()
7196 .map(|(s, d)| (s.as_str(), *d))
7197 .collect::<Vec<_>>(),
7198 &[
7199 ("let one = ", None),
7200 (";", Some(DiagnosticSeverity::ERROR)),
7201 ("\nlet two =", None),
7202 (" ", Some(DiagnosticSeverity::ERROR)),
7203 ("\nlet three = 3;\n", None)
7204 ]
7205 );
7206 });
7207 }
7208
7209 #[gpui::test]
7210 async fn test_edits_from_lsp_with_past_version(cx: &mut gpui::TestAppContext) {
7211 cx.foreground().forbid_parking();
7212
7213 let mut language = Language::new(
7214 LanguageConfig {
7215 name: "Rust".into(),
7216 path_suffixes: vec!["rs".to_string()],
7217 ..Default::default()
7218 },
7219 Some(tree_sitter_rust::language()),
7220 );
7221 let mut fake_servers = language.set_fake_lsp_adapter(Default::default());
7222
7223 let text = "
7224 fn a() {
7225 f1();
7226 }
7227 fn b() {
7228 f2();
7229 }
7230 fn c() {
7231 f3();
7232 }
7233 "
7234 .unindent();
7235
7236 let fs = FakeFs::new(cx.background());
7237 fs.insert_tree(
7238 "/dir",
7239 json!({
7240 "a.rs": text.clone(),
7241 }),
7242 )
7243 .await;
7244
7245 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
7246 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
7247 let buffer = project
7248 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
7249 .await
7250 .unwrap();
7251
7252 let mut fake_server = fake_servers.next().await.unwrap();
7253 let lsp_document_version = fake_server
7254 .receive_notification::<lsp::notification::DidOpenTextDocument>()
7255 .await
7256 .text_document
7257 .version;
7258
7259 // Simulate editing the buffer after the language server computes some edits.
7260 buffer.update(cx, |buffer, cx| {
7261 buffer.edit(
7262 [(
7263 Point::new(0, 0)..Point::new(0, 0),
7264 "// above first function\n",
7265 )],
7266 cx,
7267 );
7268 buffer.edit(
7269 [(
7270 Point::new(2, 0)..Point::new(2, 0),
7271 " // inside first function\n",
7272 )],
7273 cx,
7274 );
7275 buffer.edit(
7276 [(
7277 Point::new(6, 4)..Point::new(6, 4),
7278 "// inside second function ",
7279 )],
7280 cx,
7281 );
7282
7283 assert_eq!(
7284 buffer.text(),
7285 "
7286 // above first function
7287 fn a() {
7288 // inside first function
7289 f1();
7290 }
7291 fn b() {
7292 // inside second function f2();
7293 }
7294 fn c() {
7295 f3();
7296 }
7297 "
7298 .unindent()
7299 );
7300 });
7301
7302 let edits = project
7303 .update(cx, |project, cx| {
7304 project.edits_from_lsp(
7305 &buffer,
7306 vec![
7307 // replace body of first function
7308 lsp::TextEdit {
7309 range: lsp::Range::new(
7310 lsp::Position::new(0, 0),
7311 lsp::Position::new(3, 0),
7312 ),
7313 new_text: "
7314 fn a() {
7315 f10();
7316 }
7317 "
7318 .unindent(),
7319 },
7320 // edit inside second function
7321 lsp::TextEdit {
7322 range: lsp::Range::new(
7323 lsp::Position::new(4, 6),
7324 lsp::Position::new(4, 6),
7325 ),
7326 new_text: "00".into(),
7327 },
7328 // edit inside third function via two distinct edits
7329 lsp::TextEdit {
7330 range: lsp::Range::new(
7331 lsp::Position::new(7, 5),
7332 lsp::Position::new(7, 5),
7333 ),
7334 new_text: "4000".into(),
7335 },
7336 lsp::TextEdit {
7337 range: lsp::Range::new(
7338 lsp::Position::new(7, 5),
7339 lsp::Position::new(7, 6),
7340 ),
7341 new_text: "".into(),
7342 },
7343 ],
7344 Some(lsp_document_version),
7345 cx,
7346 )
7347 })
7348 .await
7349 .unwrap();
7350
7351 buffer.update(cx, |buffer, cx| {
7352 for (range, new_text) in edits {
7353 buffer.edit([(range, new_text)], cx);
7354 }
7355 assert_eq!(
7356 buffer.text(),
7357 "
7358 // above first function
7359 fn a() {
7360 // inside first function
7361 f10();
7362 }
7363 fn b() {
7364 // inside second function f200();
7365 }
7366 fn c() {
7367 f4000();
7368 }
7369 "
7370 .unindent()
7371 );
7372 });
7373 }
7374
7375 #[gpui::test]
7376 async fn test_edits_from_lsp_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
7377 cx.foreground().forbid_parking();
7378
7379 let text = "
7380 use a::b;
7381 use a::c;
7382
7383 fn f() {
7384 b();
7385 c();
7386 }
7387 "
7388 .unindent();
7389
7390 let fs = FakeFs::new(cx.background());
7391 fs.insert_tree(
7392 "/dir",
7393 json!({
7394 "a.rs": text.clone(),
7395 }),
7396 )
7397 .await;
7398
7399 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
7400 let buffer = project
7401 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
7402 .await
7403 .unwrap();
7404
7405 // Simulate the language server sending us a small edit in the form of a very large diff.
7406 // Rust-analyzer does this when performing a merge-imports code action.
7407 let edits = project
7408 .update(cx, |project, cx| {
7409 project.edits_from_lsp(
7410 &buffer,
7411 [
7412 // Replace the first use statement without editing the semicolon.
7413 lsp::TextEdit {
7414 range: lsp::Range::new(
7415 lsp::Position::new(0, 4),
7416 lsp::Position::new(0, 8),
7417 ),
7418 new_text: "a::{b, c}".into(),
7419 },
7420 // Reinsert the remainder of the file between the semicolon and the final
7421 // newline of the file.
7422 lsp::TextEdit {
7423 range: lsp::Range::new(
7424 lsp::Position::new(0, 9),
7425 lsp::Position::new(0, 9),
7426 ),
7427 new_text: "\n\n".into(),
7428 },
7429 lsp::TextEdit {
7430 range: lsp::Range::new(
7431 lsp::Position::new(0, 9),
7432 lsp::Position::new(0, 9),
7433 ),
7434 new_text: "
7435 fn f() {
7436 b();
7437 c();
7438 }"
7439 .unindent(),
7440 },
7441 // Delete everything after the first newline of the file.
7442 lsp::TextEdit {
7443 range: lsp::Range::new(
7444 lsp::Position::new(1, 0),
7445 lsp::Position::new(7, 0),
7446 ),
7447 new_text: "".into(),
7448 },
7449 ],
7450 None,
7451 cx,
7452 )
7453 })
7454 .await
7455 .unwrap();
7456
7457 buffer.update(cx, |buffer, cx| {
7458 let edits = edits
7459 .into_iter()
7460 .map(|(range, text)| {
7461 (
7462 range.start.to_point(&buffer)..range.end.to_point(&buffer),
7463 text,
7464 )
7465 })
7466 .collect::<Vec<_>>();
7467
7468 assert_eq!(
7469 edits,
7470 [
7471 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
7472 (Point::new(1, 0)..Point::new(2, 0), "".into())
7473 ]
7474 );
7475
7476 for (range, new_text) in edits {
7477 buffer.edit([(range, new_text)], cx);
7478 }
7479 assert_eq!(
7480 buffer.text(),
7481 "
7482 use a::{b, c};
7483
7484 fn f() {
7485 b();
7486 c();
7487 }
7488 "
7489 .unindent()
7490 );
7491 });
7492 }
7493
7494 #[gpui::test]
7495 async fn test_invalid_edits_from_lsp(cx: &mut gpui::TestAppContext) {
7496 cx.foreground().forbid_parking();
7497
7498 let text = "
7499 use a::b;
7500 use a::c;
7501
7502 fn f() {
7503 b();
7504 c();
7505 }
7506 "
7507 .unindent();
7508
7509 let fs = FakeFs::new(cx.background());
7510 fs.insert_tree(
7511 "/dir",
7512 json!({
7513 "a.rs": text.clone(),
7514 }),
7515 )
7516 .await;
7517
7518 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
7519 let buffer = project
7520 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
7521 .await
7522 .unwrap();
7523
7524 // Simulate the language server sending us edits in a non-ordered fashion,
7525 // with ranges sometimes being inverted.
7526 let edits = project
7527 .update(cx, |project, cx| {
7528 project.edits_from_lsp(
7529 &buffer,
7530 [
7531 lsp::TextEdit {
7532 range: lsp::Range::new(
7533 lsp::Position::new(0, 9),
7534 lsp::Position::new(0, 9),
7535 ),
7536 new_text: "\n\n".into(),
7537 },
7538 lsp::TextEdit {
7539 range: lsp::Range::new(
7540 lsp::Position::new(0, 8),
7541 lsp::Position::new(0, 4),
7542 ),
7543 new_text: "a::{b, c}".into(),
7544 },
7545 lsp::TextEdit {
7546 range: lsp::Range::new(
7547 lsp::Position::new(1, 0),
7548 lsp::Position::new(7, 0),
7549 ),
7550 new_text: "".into(),
7551 },
7552 lsp::TextEdit {
7553 range: lsp::Range::new(
7554 lsp::Position::new(0, 9),
7555 lsp::Position::new(0, 9),
7556 ),
7557 new_text: "
7558 fn f() {
7559 b();
7560 c();
7561 }"
7562 .unindent(),
7563 },
7564 ],
7565 None,
7566 cx,
7567 )
7568 })
7569 .await
7570 .unwrap();
7571
7572 buffer.update(cx, |buffer, cx| {
7573 let edits = edits
7574 .into_iter()
7575 .map(|(range, text)| {
7576 (
7577 range.start.to_point(&buffer)..range.end.to_point(&buffer),
7578 text,
7579 )
7580 })
7581 .collect::<Vec<_>>();
7582
7583 assert_eq!(
7584 edits,
7585 [
7586 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
7587 (Point::new(1, 0)..Point::new(2, 0), "".into())
7588 ]
7589 );
7590
7591 for (range, new_text) in edits {
7592 buffer.edit([(range, new_text)], cx);
7593 }
7594 assert_eq!(
7595 buffer.text(),
7596 "
7597 use a::{b, c};
7598
7599 fn f() {
7600 b();
7601 c();
7602 }
7603 "
7604 .unindent()
7605 );
7606 });
7607 }
7608
7609 fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
7610 buffer: &Buffer,
7611 range: Range<T>,
7612 ) -> Vec<(String, Option<DiagnosticSeverity>)> {
7613 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
7614 for chunk in buffer.snapshot().chunks(range, true) {
7615 if chunks.last().map_or(false, |prev_chunk| {
7616 prev_chunk.1 == chunk.diagnostic_severity
7617 }) {
7618 chunks.last_mut().unwrap().0.push_str(chunk.text);
7619 } else {
7620 chunks.push((chunk.text.to_string(), chunk.diagnostic_severity));
7621 }
7622 }
7623 chunks
7624 }
7625
7626 #[gpui::test]
7627 async fn test_search_worktree_without_files(cx: &mut gpui::TestAppContext) {
7628 let dir = temp_tree(json!({
7629 "root": {
7630 "dir1": {},
7631 "dir2": {
7632 "dir3": {}
7633 }
7634 }
7635 }));
7636
7637 let project = Project::test(Arc::new(RealFs), [dir.path()], cx).await;
7638 let cancel_flag = Default::default();
7639 let results = project
7640 .read_with(cx, |project, cx| {
7641 project.match_paths("dir", false, false, 10, &cancel_flag, cx)
7642 })
7643 .await;
7644
7645 assert!(results.is_empty());
7646 }
7647
7648 #[gpui::test(iterations = 10)]
7649 async fn test_definition(cx: &mut gpui::TestAppContext) {
7650 let mut language = Language::new(
7651 LanguageConfig {
7652 name: "Rust".into(),
7653 path_suffixes: vec!["rs".to_string()],
7654 ..Default::default()
7655 },
7656 Some(tree_sitter_rust::language()),
7657 );
7658 let mut fake_servers = language.set_fake_lsp_adapter(Default::default());
7659
7660 let fs = FakeFs::new(cx.background());
7661 fs.insert_tree(
7662 "/dir",
7663 json!({
7664 "a.rs": "const fn a() { A }",
7665 "b.rs": "const y: i32 = crate::a()",
7666 }),
7667 )
7668 .await;
7669
7670 let project = Project::test(fs, ["/dir/b.rs".as_ref()], cx).await;
7671 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
7672
7673 let buffer = project
7674 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
7675 .await
7676 .unwrap();
7677
7678 let fake_server = fake_servers.next().await.unwrap();
7679 fake_server.handle_request::<lsp::request::GotoDefinition, _, _>(|params, _| async move {
7680 let params = params.text_document_position_params;
7681 assert_eq!(
7682 params.text_document.uri.to_file_path().unwrap(),
7683 Path::new("/dir/b.rs"),
7684 );
7685 assert_eq!(params.position, lsp::Position::new(0, 22));
7686
7687 Ok(Some(lsp::GotoDefinitionResponse::Scalar(
7688 lsp::Location::new(
7689 lsp::Url::from_file_path("/dir/a.rs").unwrap(),
7690 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
7691 ),
7692 )))
7693 });
7694
7695 let mut definitions = project
7696 .update(cx, |project, cx| project.definition(&buffer, 22, cx))
7697 .await
7698 .unwrap();
7699
7700 assert_eq!(definitions.len(), 1);
7701 let definition = definitions.pop().unwrap();
7702 cx.update(|cx| {
7703 let target_buffer = definition.target.buffer.read(cx);
7704 assert_eq!(
7705 target_buffer
7706 .file()
7707 .unwrap()
7708 .as_local()
7709 .unwrap()
7710 .abs_path(cx),
7711 Path::new("/dir/a.rs"),
7712 );
7713 assert_eq!(definition.target.range.to_offset(target_buffer), 9..10);
7714 assert_eq!(
7715 list_worktrees(&project, cx),
7716 [("/dir/b.rs".as_ref(), true), ("/dir/a.rs".as_ref(), false)]
7717 );
7718
7719 drop(definition);
7720 });
7721 cx.read(|cx| {
7722 assert_eq!(list_worktrees(&project, cx), [("/dir/b.rs".as_ref(), true)]);
7723 });
7724
7725 fn list_worktrees<'a>(
7726 project: &'a ModelHandle<Project>,
7727 cx: &'a AppContext,
7728 ) -> Vec<(&'a Path, bool)> {
7729 project
7730 .read(cx)
7731 .worktrees(cx)
7732 .map(|worktree| {
7733 let worktree = worktree.read(cx);
7734 (
7735 worktree.as_local().unwrap().abs_path().as_ref(),
7736 worktree.is_visible(),
7737 )
7738 })
7739 .collect::<Vec<_>>()
7740 }
7741 }
7742
7743 #[gpui::test]
7744 async fn test_completions_without_edit_ranges(cx: &mut gpui::TestAppContext) {
7745 let mut language = Language::new(
7746 LanguageConfig {
7747 name: "TypeScript".into(),
7748 path_suffixes: vec!["ts".to_string()],
7749 ..Default::default()
7750 },
7751 Some(tree_sitter_typescript::language_typescript()),
7752 );
7753 let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default());
7754
7755 let fs = FakeFs::new(cx.background());
7756 fs.insert_tree(
7757 "/dir",
7758 json!({
7759 "a.ts": "",
7760 }),
7761 )
7762 .await;
7763
7764 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
7765 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
7766 let buffer = project
7767 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
7768 .await
7769 .unwrap();
7770
7771 let fake_server = fake_language_servers.next().await.unwrap();
7772
7773 let text = "let a = b.fqn";
7774 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
7775 let completions = project.update(cx, |project, cx| {
7776 project.completions(&buffer, text.len(), cx)
7777 });
7778
7779 fake_server
7780 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
7781 Ok(Some(lsp::CompletionResponse::Array(vec![
7782 lsp::CompletionItem {
7783 label: "fullyQualifiedName?".into(),
7784 insert_text: Some("fullyQualifiedName".into()),
7785 ..Default::default()
7786 },
7787 ])))
7788 })
7789 .next()
7790 .await;
7791 let completions = completions.await.unwrap();
7792 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
7793 assert_eq!(completions.len(), 1);
7794 assert_eq!(completions[0].new_text, "fullyQualifiedName");
7795 assert_eq!(
7796 completions[0].old_range.to_offset(&snapshot),
7797 text.len() - 3..text.len()
7798 );
7799
7800 let text = "let a = \"atoms/cmp\"";
7801 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
7802 let completions = project.update(cx, |project, cx| {
7803 project.completions(&buffer, text.len() - 1, cx)
7804 });
7805
7806 fake_server
7807 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
7808 Ok(Some(lsp::CompletionResponse::Array(vec![
7809 lsp::CompletionItem {
7810 label: "component".into(),
7811 ..Default::default()
7812 },
7813 ])))
7814 })
7815 .next()
7816 .await;
7817 let completions = completions.await.unwrap();
7818 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
7819 assert_eq!(completions.len(), 1);
7820 assert_eq!(completions[0].new_text, "component");
7821 assert_eq!(
7822 completions[0].old_range.to_offset(&snapshot),
7823 text.len() - 4..text.len() - 1
7824 );
7825 }
7826
7827 #[gpui::test(iterations = 10)]
7828 async fn test_apply_code_actions_with_commands(cx: &mut gpui::TestAppContext) {
7829 let mut language = Language::new(
7830 LanguageConfig {
7831 name: "TypeScript".into(),
7832 path_suffixes: vec!["ts".to_string()],
7833 ..Default::default()
7834 },
7835 None,
7836 );
7837 let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default());
7838
7839 let fs = FakeFs::new(cx.background());
7840 fs.insert_tree(
7841 "/dir",
7842 json!({
7843 "a.ts": "a",
7844 }),
7845 )
7846 .await;
7847
7848 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
7849 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
7850 let buffer = project
7851 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
7852 .await
7853 .unwrap();
7854
7855 let fake_server = fake_language_servers.next().await.unwrap();
7856
7857 // Language server returns code actions that contain commands, and not edits.
7858 let actions = project.update(cx, |project, cx| project.code_actions(&buffer, 0..0, cx));
7859 fake_server
7860 .handle_request::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
7861 Ok(Some(vec![
7862 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
7863 title: "The code action".into(),
7864 command: Some(lsp::Command {
7865 title: "The command".into(),
7866 command: "_the/command".into(),
7867 arguments: Some(vec![json!("the-argument")]),
7868 }),
7869 ..Default::default()
7870 }),
7871 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
7872 title: "two".into(),
7873 ..Default::default()
7874 }),
7875 ]))
7876 })
7877 .next()
7878 .await;
7879
7880 let action = actions.await.unwrap()[0].clone();
7881 let apply = project.update(cx, |project, cx| {
7882 project.apply_code_action(buffer.clone(), action, true, cx)
7883 });
7884
7885 // Resolving the code action does not populate its edits. In absence of
7886 // edits, we must execute the given command.
7887 fake_server.handle_request::<lsp::request::CodeActionResolveRequest, _, _>(
7888 |action, _| async move { Ok(action) },
7889 );
7890
7891 // While executing the command, the language server sends the editor
7892 // a `workspaceEdit` request.
7893 fake_server
7894 .handle_request::<lsp::request::ExecuteCommand, _, _>({
7895 let fake = fake_server.clone();
7896 move |params, _| {
7897 assert_eq!(params.command, "_the/command");
7898 let fake = fake.clone();
7899 async move {
7900 fake.server
7901 .request::<lsp::request::ApplyWorkspaceEdit>(
7902 lsp::ApplyWorkspaceEditParams {
7903 label: None,
7904 edit: lsp::WorkspaceEdit {
7905 changes: Some(
7906 [(
7907 lsp::Url::from_file_path("/dir/a.ts").unwrap(),
7908 vec![lsp::TextEdit {
7909 range: lsp::Range::new(
7910 lsp::Position::new(0, 0),
7911 lsp::Position::new(0, 0),
7912 ),
7913 new_text: "X".into(),
7914 }],
7915 )]
7916 .into_iter()
7917 .collect(),
7918 ),
7919 ..Default::default()
7920 },
7921 },
7922 )
7923 .await
7924 .unwrap();
7925 Ok(Some(json!(null)))
7926 }
7927 }
7928 })
7929 .next()
7930 .await;
7931
7932 // Applying the code action returns a project transaction containing the edits
7933 // sent by the language server in its `workspaceEdit` request.
7934 let transaction = apply.await.unwrap();
7935 assert!(transaction.0.contains_key(&buffer));
7936 buffer.update(cx, |buffer, cx| {
7937 assert_eq!(buffer.text(), "Xa");
7938 buffer.undo(cx);
7939 assert_eq!(buffer.text(), "a");
7940 });
7941 }
7942
7943 #[gpui::test]
7944 async fn test_save_file(cx: &mut gpui::TestAppContext) {
7945 let fs = FakeFs::new(cx.background());
7946 fs.insert_tree(
7947 "/dir",
7948 json!({
7949 "file1": "the old contents",
7950 }),
7951 )
7952 .await;
7953
7954 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7955 let buffer = project
7956 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
7957 .await
7958 .unwrap();
7959 buffer
7960 .update(cx, |buffer, cx| {
7961 assert_eq!(buffer.text(), "the old contents");
7962 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], cx);
7963 buffer.save(cx)
7964 })
7965 .await
7966 .unwrap();
7967
7968 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
7969 assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
7970 }
7971
7972 #[gpui::test]
7973 async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
7974 let fs = FakeFs::new(cx.background());
7975 fs.insert_tree(
7976 "/dir",
7977 json!({
7978 "file1": "the old contents",
7979 }),
7980 )
7981 .await;
7982
7983 let project = Project::test(fs.clone(), ["/dir/file1".as_ref()], cx).await;
7984 let buffer = project
7985 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
7986 .await
7987 .unwrap();
7988 buffer
7989 .update(cx, |buffer, cx| {
7990 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], cx);
7991 buffer.save(cx)
7992 })
7993 .await
7994 .unwrap();
7995
7996 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
7997 assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
7998 }
7999
8000 #[gpui::test]
8001 async fn test_save_as(cx: &mut gpui::TestAppContext) {
8002 let fs = FakeFs::new(cx.background());
8003 fs.insert_tree("/dir", json!({})).await;
8004
8005 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
8006 let buffer = project.update(cx, |project, cx| {
8007 project.create_buffer("", None, cx).unwrap()
8008 });
8009 buffer.update(cx, |buffer, cx| {
8010 buffer.edit([(0..0, "abc")], cx);
8011 assert!(buffer.is_dirty());
8012 assert!(!buffer.has_conflict());
8013 });
8014 project
8015 .update(cx, |project, cx| {
8016 project.save_buffer_as(buffer.clone(), "/dir/file1".into(), cx)
8017 })
8018 .await
8019 .unwrap();
8020 assert_eq!(fs.load(Path::new("/dir/file1")).await.unwrap(), "abc");
8021 buffer.read_with(cx, |buffer, cx| {
8022 assert_eq!(buffer.file().unwrap().full_path(cx), Path::new("dir/file1"));
8023 assert!(!buffer.is_dirty());
8024 assert!(!buffer.has_conflict());
8025 });
8026
8027 let opened_buffer = project
8028 .update(cx, |project, cx| {
8029 project.open_local_buffer("/dir/file1", cx)
8030 })
8031 .await
8032 .unwrap();
8033 assert_eq!(opened_buffer, buffer);
8034 }
8035
8036 #[gpui::test(retries = 5)]
8037 async fn test_rescan_and_remote_updates(cx: &mut gpui::TestAppContext) {
8038 let dir = temp_tree(json!({
8039 "a": {
8040 "file1": "",
8041 "file2": "",
8042 "file3": "",
8043 },
8044 "b": {
8045 "c": {
8046 "file4": "",
8047 "file5": "",
8048 }
8049 }
8050 }));
8051
8052 let project = Project::test(Arc::new(RealFs), [dir.path()], cx).await;
8053 let rpc = project.read_with(cx, |p, _| p.client.clone());
8054
8055 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
8056 let buffer = project.update(cx, |p, cx| p.open_local_buffer(dir.path().join(path), cx));
8057 async move { buffer.await.unwrap() }
8058 };
8059 let id_for_path = |path: &'static str, cx: &gpui::TestAppContext| {
8060 project.read_with(cx, |project, cx| {
8061 let tree = project.worktrees(cx).next().unwrap();
8062 tree.read(cx)
8063 .entry_for_path(path)
8064 .expect(&format!("no entry for path {}", path))
8065 .id
8066 })
8067 };
8068
8069 let buffer2 = buffer_for_path("a/file2", cx).await;
8070 let buffer3 = buffer_for_path("a/file3", cx).await;
8071 let buffer4 = buffer_for_path("b/c/file4", cx).await;
8072 let buffer5 = buffer_for_path("b/c/file5", cx).await;
8073
8074 let file2_id = id_for_path("a/file2", &cx);
8075 let file3_id = id_for_path("a/file3", &cx);
8076 let file4_id = id_for_path("b/c/file4", &cx);
8077
8078 // Create a remote copy of this worktree.
8079 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8080 let initial_snapshot = tree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot());
8081 let (remote, load_task) = cx.update(|cx| {
8082 Worktree::remote(
8083 1,
8084 1,
8085 initial_snapshot.to_proto(&Default::default(), true),
8086 rpc.clone(),
8087 cx,
8088 )
8089 });
8090 // tree
8091 load_task.await;
8092
8093 cx.read(|cx| {
8094 assert!(!buffer2.read(cx).is_dirty());
8095 assert!(!buffer3.read(cx).is_dirty());
8096 assert!(!buffer4.read(cx).is_dirty());
8097 assert!(!buffer5.read(cx).is_dirty());
8098 });
8099
8100 // Rename and delete files and directories.
8101 tree.flush_fs_events(&cx).await;
8102 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
8103 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
8104 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
8105 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
8106 tree.flush_fs_events(&cx).await;
8107
8108 let expected_paths = vec![
8109 "a",
8110 "a/file1",
8111 "a/file2.new",
8112 "b",
8113 "d",
8114 "d/file3",
8115 "d/file4",
8116 ];
8117
8118 cx.read(|app| {
8119 assert_eq!(
8120 tree.read(app)
8121 .paths()
8122 .map(|p| p.to_str().unwrap())
8123 .collect::<Vec<_>>(),
8124 expected_paths
8125 );
8126
8127 assert_eq!(id_for_path("a/file2.new", &cx), file2_id);
8128 assert_eq!(id_for_path("d/file3", &cx), file3_id);
8129 assert_eq!(id_for_path("d/file4", &cx), file4_id);
8130
8131 assert_eq!(
8132 buffer2.read(app).file().unwrap().path().as_ref(),
8133 Path::new("a/file2.new")
8134 );
8135 assert_eq!(
8136 buffer3.read(app).file().unwrap().path().as_ref(),
8137 Path::new("d/file3")
8138 );
8139 assert_eq!(
8140 buffer4.read(app).file().unwrap().path().as_ref(),
8141 Path::new("d/file4")
8142 );
8143 assert_eq!(
8144 buffer5.read(app).file().unwrap().path().as_ref(),
8145 Path::new("b/c/file5")
8146 );
8147
8148 assert!(!buffer2.read(app).file().unwrap().is_deleted());
8149 assert!(!buffer3.read(app).file().unwrap().is_deleted());
8150 assert!(!buffer4.read(app).file().unwrap().is_deleted());
8151 assert!(buffer5.read(app).file().unwrap().is_deleted());
8152 });
8153
8154 // Update the remote worktree. Check that it becomes consistent with the
8155 // local worktree.
8156 remote.update(cx, |remote, cx| {
8157 let update_message = tree.read(cx).as_local().unwrap().snapshot().build_update(
8158 &initial_snapshot,
8159 1,
8160 1,
8161 true,
8162 );
8163 remote
8164 .as_remote_mut()
8165 .unwrap()
8166 .snapshot
8167 .apply_remote_update(update_message)
8168 .unwrap();
8169
8170 assert_eq!(
8171 remote
8172 .paths()
8173 .map(|p| p.to_str().unwrap())
8174 .collect::<Vec<_>>(),
8175 expected_paths
8176 );
8177 });
8178 }
8179
8180 #[gpui::test]
8181 async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
8182 let fs = FakeFs::new(cx.background());
8183 fs.insert_tree(
8184 "/dir",
8185 json!({
8186 "a.txt": "a-contents",
8187 "b.txt": "b-contents",
8188 }),
8189 )
8190 .await;
8191
8192 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
8193
8194 // Spawn multiple tasks to open paths, repeating some paths.
8195 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
8196 (
8197 p.open_local_buffer("/dir/a.txt", cx),
8198 p.open_local_buffer("/dir/b.txt", cx),
8199 p.open_local_buffer("/dir/a.txt", cx),
8200 )
8201 });
8202
8203 let buffer_a_1 = buffer_a_1.await.unwrap();
8204 let buffer_a_2 = buffer_a_2.await.unwrap();
8205 let buffer_b = buffer_b.await.unwrap();
8206 assert_eq!(buffer_a_1.read_with(cx, |b, _| b.text()), "a-contents");
8207 assert_eq!(buffer_b.read_with(cx, |b, _| b.text()), "b-contents");
8208
8209 // There is only one buffer per path.
8210 let buffer_a_id = buffer_a_1.id();
8211 assert_eq!(buffer_a_2.id(), buffer_a_id);
8212
8213 // Open the same path again while it is still open.
8214 drop(buffer_a_1);
8215 let buffer_a_3 = project
8216 .update(cx, |p, cx| p.open_local_buffer("/dir/a.txt", cx))
8217 .await
8218 .unwrap();
8219
8220 // There's still only one buffer per path.
8221 assert_eq!(buffer_a_3.id(), buffer_a_id);
8222 }
8223
8224 #[gpui::test]
8225 async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
8226 let fs = FakeFs::new(cx.background());
8227 fs.insert_tree(
8228 "/dir",
8229 json!({
8230 "file1": "abc",
8231 "file2": "def",
8232 "file3": "ghi",
8233 }),
8234 )
8235 .await;
8236
8237 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
8238
8239 let buffer1 = project
8240 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
8241 .await
8242 .unwrap();
8243 let events = Rc::new(RefCell::new(Vec::new()));
8244
8245 // initially, the buffer isn't dirty.
8246 buffer1.update(cx, |buffer, cx| {
8247 cx.subscribe(&buffer1, {
8248 let events = events.clone();
8249 move |_, _, event, _| match event {
8250 BufferEvent::Operation(_) => {}
8251 _ => events.borrow_mut().push(event.clone()),
8252 }
8253 })
8254 .detach();
8255
8256 assert!(!buffer.is_dirty());
8257 assert!(events.borrow().is_empty());
8258
8259 buffer.edit([(1..2, "")], cx);
8260 });
8261
8262 // after the first edit, the buffer is dirty, and emits a dirtied event.
8263 buffer1.update(cx, |buffer, cx| {
8264 assert!(buffer.text() == "ac");
8265 assert!(buffer.is_dirty());
8266 assert_eq!(
8267 *events.borrow(),
8268 &[language::Event::Edited, language::Event::DirtyChanged]
8269 );
8270 events.borrow_mut().clear();
8271 buffer.did_save(
8272 buffer.version(),
8273 buffer.as_rope().fingerprint(),
8274 buffer.file().unwrap().mtime(),
8275 None,
8276 cx,
8277 );
8278 });
8279
8280 // after saving, the buffer is not dirty, and emits a saved event.
8281 buffer1.update(cx, |buffer, cx| {
8282 assert!(!buffer.is_dirty());
8283 assert_eq!(*events.borrow(), &[language::Event::Saved]);
8284 events.borrow_mut().clear();
8285
8286 buffer.edit([(1..1, "B")], cx);
8287 buffer.edit([(2..2, "D")], cx);
8288 });
8289
8290 // after editing again, the buffer is dirty, and emits another dirty event.
8291 buffer1.update(cx, |buffer, cx| {
8292 assert!(buffer.text() == "aBDc");
8293 assert!(buffer.is_dirty());
8294 assert_eq!(
8295 *events.borrow(),
8296 &[
8297 language::Event::Edited,
8298 language::Event::DirtyChanged,
8299 language::Event::Edited,
8300 ],
8301 );
8302 events.borrow_mut().clear();
8303
8304 // After restoring the buffer to its previously-saved state,
8305 // the buffer is not considered dirty anymore.
8306 buffer.edit([(1..3, "")], cx);
8307 assert!(buffer.text() == "ac");
8308 assert!(!buffer.is_dirty());
8309 });
8310
8311 assert_eq!(
8312 *events.borrow(),
8313 &[language::Event::Edited, language::Event::DirtyChanged]
8314 );
8315
8316 // When a file is deleted, the buffer is considered dirty.
8317 let events = Rc::new(RefCell::new(Vec::new()));
8318 let buffer2 = project
8319 .update(cx, |p, cx| p.open_local_buffer("/dir/file2", cx))
8320 .await
8321 .unwrap();
8322 buffer2.update(cx, |_, cx| {
8323 cx.subscribe(&buffer2, {
8324 let events = events.clone();
8325 move |_, _, event, _| events.borrow_mut().push(event.clone())
8326 })
8327 .detach();
8328 });
8329
8330 fs.remove_file("/dir/file2".as_ref(), Default::default())
8331 .await
8332 .unwrap();
8333 buffer2.condition(&cx, |b, _| b.is_dirty()).await;
8334 assert_eq!(
8335 *events.borrow(),
8336 &[
8337 language::Event::DirtyChanged,
8338 language::Event::FileHandleChanged
8339 ]
8340 );
8341
8342 // When a file is already dirty when deleted, we don't emit a Dirtied event.
8343 let events = Rc::new(RefCell::new(Vec::new()));
8344 let buffer3 = project
8345 .update(cx, |p, cx| p.open_local_buffer("/dir/file3", cx))
8346 .await
8347 .unwrap();
8348 buffer3.update(cx, |_, cx| {
8349 cx.subscribe(&buffer3, {
8350 let events = events.clone();
8351 move |_, _, event, _| events.borrow_mut().push(event.clone())
8352 })
8353 .detach();
8354 });
8355
8356 buffer3.update(cx, |buffer, cx| {
8357 buffer.edit([(0..0, "x")], cx);
8358 });
8359 events.borrow_mut().clear();
8360 fs.remove_file("/dir/file3".as_ref(), Default::default())
8361 .await
8362 .unwrap();
8363 buffer3
8364 .condition(&cx, |_, _| !events.borrow().is_empty())
8365 .await;
8366 assert_eq!(*events.borrow(), &[language::Event::FileHandleChanged]);
8367 cx.read(|cx| assert!(buffer3.read(cx).is_dirty()));
8368 }
8369
8370 #[gpui::test]
8371 async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
8372 let initial_contents = "aaa\nbbbbb\nc\n";
8373 let fs = FakeFs::new(cx.background());
8374 fs.insert_tree(
8375 "/dir",
8376 json!({
8377 "the-file": initial_contents,
8378 }),
8379 )
8380 .await;
8381 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
8382 let buffer = project
8383 .update(cx, |p, cx| p.open_local_buffer("/dir/the-file", cx))
8384 .await
8385 .unwrap();
8386
8387 let anchors = (0..3)
8388 .map(|row| buffer.read_with(cx, |b, _| b.anchor_before(Point::new(row, 1))))
8389 .collect::<Vec<_>>();
8390
8391 // Change the file on disk, adding two new lines of text, and removing
8392 // one line.
8393 buffer.read_with(cx, |buffer, _| {
8394 assert!(!buffer.is_dirty());
8395 assert!(!buffer.has_conflict());
8396 });
8397 let new_contents = "AAAA\naaa\nBB\nbbbbb\n";
8398 fs.save("/dir/the-file".as_ref(), &new_contents.into())
8399 .await
8400 .unwrap();
8401
8402 // Because the buffer was not modified, it is reloaded from disk. Its
8403 // contents are edited according to the diff between the old and new
8404 // file contents.
8405 buffer
8406 .condition(&cx, |buffer, _| buffer.text() == new_contents)
8407 .await;
8408
8409 buffer.update(cx, |buffer, _| {
8410 assert_eq!(buffer.text(), new_contents);
8411 assert!(!buffer.is_dirty());
8412 assert!(!buffer.has_conflict());
8413
8414 let anchor_positions = anchors
8415 .iter()
8416 .map(|anchor| anchor.to_point(&*buffer))
8417 .collect::<Vec<_>>();
8418 assert_eq!(
8419 anchor_positions,
8420 [Point::new(1, 1), Point::new(3, 1), Point::new(4, 0)]
8421 );
8422 });
8423
8424 // Modify the buffer
8425 buffer.update(cx, |buffer, cx| {
8426 buffer.edit([(0..0, " ")], cx);
8427 assert!(buffer.is_dirty());
8428 assert!(!buffer.has_conflict());
8429 });
8430
8431 // Change the file on disk again, adding blank lines to the beginning.
8432 fs.save(
8433 "/dir/the-file".as_ref(),
8434 &"\n\n\nAAAA\naaa\nBB\nbbbbb\n".into(),
8435 )
8436 .await
8437 .unwrap();
8438
8439 // Because the buffer is modified, it doesn't reload from disk, but is
8440 // marked as having a conflict.
8441 buffer
8442 .condition(&cx, |buffer, _| buffer.has_conflict())
8443 .await;
8444 }
8445
8446 #[gpui::test]
8447 async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
8448 cx.foreground().forbid_parking();
8449
8450 let fs = FakeFs::new(cx.background());
8451 fs.insert_tree(
8452 "/the-dir",
8453 json!({
8454 "a.rs": "
8455 fn foo(mut v: Vec<usize>) {
8456 for x in &v {
8457 v.push(1);
8458 }
8459 }
8460 "
8461 .unindent(),
8462 }),
8463 )
8464 .await;
8465
8466 let project = Project::test(fs.clone(), ["/the-dir".as_ref()], cx).await;
8467 let buffer = project
8468 .update(cx, |p, cx| p.open_local_buffer("/the-dir/a.rs", cx))
8469 .await
8470 .unwrap();
8471
8472 let buffer_uri = Url::from_file_path("/the-dir/a.rs").unwrap();
8473 let message = lsp::PublishDiagnosticsParams {
8474 uri: buffer_uri.clone(),
8475 diagnostics: vec![
8476 lsp::Diagnostic {
8477 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
8478 severity: Some(DiagnosticSeverity::WARNING),
8479 message: "error 1".to_string(),
8480 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
8481 location: lsp::Location {
8482 uri: buffer_uri.clone(),
8483 range: lsp::Range::new(
8484 lsp::Position::new(1, 8),
8485 lsp::Position::new(1, 9),
8486 ),
8487 },
8488 message: "error 1 hint 1".to_string(),
8489 }]),
8490 ..Default::default()
8491 },
8492 lsp::Diagnostic {
8493 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
8494 severity: Some(DiagnosticSeverity::HINT),
8495 message: "error 1 hint 1".to_string(),
8496 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
8497 location: lsp::Location {
8498 uri: buffer_uri.clone(),
8499 range: lsp::Range::new(
8500 lsp::Position::new(1, 8),
8501 lsp::Position::new(1, 9),
8502 ),
8503 },
8504 message: "original diagnostic".to_string(),
8505 }]),
8506 ..Default::default()
8507 },
8508 lsp::Diagnostic {
8509 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
8510 severity: Some(DiagnosticSeverity::ERROR),
8511 message: "error 2".to_string(),
8512 related_information: Some(vec![
8513 lsp::DiagnosticRelatedInformation {
8514 location: lsp::Location {
8515 uri: buffer_uri.clone(),
8516 range: lsp::Range::new(
8517 lsp::Position::new(1, 13),
8518 lsp::Position::new(1, 15),
8519 ),
8520 },
8521 message: "error 2 hint 1".to_string(),
8522 },
8523 lsp::DiagnosticRelatedInformation {
8524 location: lsp::Location {
8525 uri: buffer_uri.clone(),
8526 range: lsp::Range::new(
8527 lsp::Position::new(1, 13),
8528 lsp::Position::new(1, 15),
8529 ),
8530 },
8531 message: "error 2 hint 2".to_string(),
8532 },
8533 ]),
8534 ..Default::default()
8535 },
8536 lsp::Diagnostic {
8537 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
8538 severity: Some(DiagnosticSeverity::HINT),
8539 message: "error 2 hint 1".to_string(),
8540 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
8541 location: lsp::Location {
8542 uri: buffer_uri.clone(),
8543 range: lsp::Range::new(
8544 lsp::Position::new(2, 8),
8545 lsp::Position::new(2, 17),
8546 ),
8547 },
8548 message: "original diagnostic".to_string(),
8549 }]),
8550 ..Default::default()
8551 },
8552 lsp::Diagnostic {
8553 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
8554 severity: Some(DiagnosticSeverity::HINT),
8555 message: "error 2 hint 2".to_string(),
8556 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
8557 location: lsp::Location {
8558 uri: buffer_uri.clone(),
8559 range: lsp::Range::new(
8560 lsp::Position::new(2, 8),
8561 lsp::Position::new(2, 17),
8562 ),
8563 },
8564 message: "original diagnostic".to_string(),
8565 }]),
8566 ..Default::default()
8567 },
8568 ],
8569 version: None,
8570 };
8571
8572 project
8573 .update(cx, |p, cx| p.update_diagnostics(0, message, &[], cx))
8574 .unwrap();
8575 let buffer = buffer.read_with(cx, |buffer, _| buffer.snapshot());
8576
8577 assert_eq!(
8578 buffer
8579 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
8580 .collect::<Vec<_>>(),
8581 &[
8582 DiagnosticEntry {
8583 range: Point::new(1, 8)..Point::new(1, 9),
8584 diagnostic: Diagnostic {
8585 severity: DiagnosticSeverity::WARNING,
8586 message: "error 1".to_string(),
8587 group_id: 0,
8588 is_primary: true,
8589 ..Default::default()
8590 }
8591 },
8592 DiagnosticEntry {
8593 range: Point::new(1, 8)..Point::new(1, 9),
8594 diagnostic: Diagnostic {
8595 severity: DiagnosticSeverity::HINT,
8596 message: "error 1 hint 1".to_string(),
8597 group_id: 0,
8598 is_primary: false,
8599 ..Default::default()
8600 }
8601 },
8602 DiagnosticEntry {
8603 range: Point::new(1, 13)..Point::new(1, 15),
8604 diagnostic: Diagnostic {
8605 severity: DiagnosticSeverity::HINT,
8606 message: "error 2 hint 1".to_string(),
8607 group_id: 1,
8608 is_primary: false,
8609 ..Default::default()
8610 }
8611 },
8612 DiagnosticEntry {
8613 range: Point::new(1, 13)..Point::new(1, 15),
8614 diagnostic: Diagnostic {
8615 severity: DiagnosticSeverity::HINT,
8616 message: "error 2 hint 2".to_string(),
8617 group_id: 1,
8618 is_primary: false,
8619 ..Default::default()
8620 }
8621 },
8622 DiagnosticEntry {
8623 range: Point::new(2, 8)..Point::new(2, 17),
8624 diagnostic: Diagnostic {
8625 severity: DiagnosticSeverity::ERROR,
8626 message: "error 2".to_string(),
8627 group_id: 1,
8628 is_primary: true,
8629 ..Default::default()
8630 }
8631 }
8632 ]
8633 );
8634
8635 assert_eq!(
8636 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
8637 &[
8638 DiagnosticEntry {
8639 range: Point::new(1, 8)..Point::new(1, 9),
8640 diagnostic: Diagnostic {
8641 severity: DiagnosticSeverity::WARNING,
8642 message: "error 1".to_string(),
8643 group_id: 0,
8644 is_primary: true,
8645 ..Default::default()
8646 }
8647 },
8648 DiagnosticEntry {
8649 range: Point::new(1, 8)..Point::new(1, 9),
8650 diagnostic: Diagnostic {
8651 severity: DiagnosticSeverity::HINT,
8652 message: "error 1 hint 1".to_string(),
8653 group_id: 0,
8654 is_primary: false,
8655 ..Default::default()
8656 }
8657 },
8658 ]
8659 );
8660 assert_eq!(
8661 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
8662 &[
8663 DiagnosticEntry {
8664 range: Point::new(1, 13)..Point::new(1, 15),
8665 diagnostic: Diagnostic {
8666 severity: DiagnosticSeverity::HINT,
8667 message: "error 2 hint 1".to_string(),
8668 group_id: 1,
8669 is_primary: false,
8670 ..Default::default()
8671 }
8672 },
8673 DiagnosticEntry {
8674 range: Point::new(1, 13)..Point::new(1, 15),
8675 diagnostic: Diagnostic {
8676 severity: DiagnosticSeverity::HINT,
8677 message: "error 2 hint 2".to_string(),
8678 group_id: 1,
8679 is_primary: false,
8680 ..Default::default()
8681 }
8682 },
8683 DiagnosticEntry {
8684 range: Point::new(2, 8)..Point::new(2, 17),
8685 diagnostic: Diagnostic {
8686 severity: DiagnosticSeverity::ERROR,
8687 message: "error 2".to_string(),
8688 group_id: 1,
8689 is_primary: true,
8690 ..Default::default()
8691 }
8692 }
8693 ]
8694 );
8695 }
8696
8697 #[gpui::test]
8698 async fn test_rename(cx: &mut gpui::TestAppContext) {
8699 cx.foreground().forbid_parking();
8700
8701 let mut language = Language::new(
8702 LanguageConfig {
8703 name: "Rust".into(),
8704 path_suffixes: vec!["rs".to_string()],
8705 ..Default::default()
8706 },
8707 Some(tree_sitter_rust::language()),
8708 );
8709 let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
8710 capabilities: lsp::ServerCapabilities {
8711 rename_provider: Some(lsp::OneOf::Right(lsp::RenameOptions {
8712 prepare_provider: Some(true),
8713 work_done_progress_options: Default::default(),
8714 })),
8715 ..Default::default()
8716 },
8717 ..Default::default()
8718 });
8719
8720 let fs = FakeFs::new(cx.background());
8721 fs.insert_tree(
8722 "/dir",
8723 json!({
8724 "one.rs": "const ONE: usize = 1;",
8725 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
8726 }),
8727 )
8728 .await;
8729
8730 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
8731 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
8732 let buffer = project
8733 .update(cx, |project, cx| {
8734 project.open_local_buffer("/dir/one.rs", cx)
8735 })
8736 .await
8737 .unwrap();
8738
8739 let fake_server = fake_servers.next().await.unwrap();
8740
8741 let response = project.update(cx, |project, cx| {
8742 project.prepare_rename(buffer.clone(), 7, cx)
8743 });
8744 fake_server
8745 .handle_request::<lsp::request::PrepareRenameRequest, _, _>(|params, _| async move {
8746 assert_eq!(params.text_document.uri.as_str(), "file:///dir/one.rs");
8747 assert_eq!(params.position, lsp::Position::new(0, 7));
8748 Ok(Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
8749 lsp::Position::new(0, 6),
8750 lsp::Position::new(0, 9),
8751 ))))
8752 })
8753 .next()
8754 .await
8755 .unwrap();
8756 let range = response.await.unwrap().unwrap();
8757 let range = buffer.read_with(cx, |buffer, _| range.to_offset(buffer));
8758 assert_eq!(range, 6..9);
8759
8760 let response = project.update(cx, |project, cx| {
8761 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), true, cx)
8762 });
8763 fake_server
8764 .handle_request::<lsp::request::Rename, _, _>(|params, _| async move {
8765 assert_eq!(
8766 params.text_document_position.text_document.uri.as_str(),
8767 "file:///dir/one.rs"
8768 );
8769 assert_eq!(
8770 params.text_document_position.position,
8771 lsp::Position::new(0, 7)
8772 );
8773 assert_eq!(params.new_name, "THREE");
8774 Ok(Some(lsp::WorkspaceEdit {
8775 changes: Some(
8776 [
8777 (
8778 lsp::Url::from_file_path("/dir/one.rs").unwrap(),
8779 vec![lsp::TextEdit::new(
8780 lsp::Range::new(
8781 lsp::Position::new(0, 6),
8782 lsp::Position::new(0, 9),
8783 ),
8784 "THREE".to_string(),
8785 )],
8786 ),
8787 (
8788 lsp::Url::from_file_path("/dir/two.rs").unwrap(),
8789 vec![
8790 lsp::TextEdit::new(
8791 lsp::Range::new(
8792 lsp::Position::new(0, 24),
8793 lsp::Position::new(0, 27),
8794 ),
8795 "THREE".to_string(),
8796 ),
8797 lsp::TextEdit::new(
8798 lsp::Range::new(
8799 lsp::Position::new(0, 35),
8800 lsp::Position::new(0, 38),
8801 ),
8802 "THREE".to_string(),
8803 ),
8804 ],
8805 ),
8806 ]
8807 .into_iter()
8808 .collect(),
8809 ),
8810 ..Default::default()
8811 }))
8812 })
8813 .next()
8814 .await
8815 .unwrap();
8816 let mut transaction = response.await.unwrap().0;
8817 assert_eq!(transaction.len(), 2);
8818 assert_eq!(
8819 transaction
8820 .remove_entry(&buffer)
8821 .unwrap()
8822 .0
8823 .read_with(cx, |buffer, _| buffer.text()),
8824 "const THREE: usize = 1;"
8825 );
8826 assert_eq!(
8827 transaction
8828 .into_keys()
8829 .next()
8830 .unwrap()
8831 .read_with(cx, |buffer, _| buffer.text()),
8832 "const TWO: usize = one::THREE + one::THREE;"
8833 );
8834 }
8835
8836 #[gpui::test]
8837 async fn test_search(cx: &mut gpui::TestAppContext) {
8838 let fs = FakeFs::new(cx.background());
8839 fs.insert_tree(
8840 "/dir",
8841 json!({
8842 "one.rs": "const ONE: usize = 1;",
8843 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
8844 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
8845 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
8846 }),
8847 )
8848 .await;
8849 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
8850 assert_eq!(
8851 search(&project, SearchQuery::text("TWO", false, true), cx)
8852 .await
8853 .unwrap(),
8854 HashMap::from_iter([
8855 ("two.rs".to_string(), vec![6..9]),
8856 ("three.rs".to_string(), vec![37..40])
8857 ])
8858 );
8859
8860 let buffer_4 = project
8861 .update(cx, |project, cx| {
8862 project.open_local_buffer("/dir/four.rs", cx)
8863 })
8864 .await
8865 .unwrap();
8866 buffer_4.update(cx, |buffer, cx| {
8867 let text = "two::TWO";
8868 buffer.edit([(20..28, text), (31..43, text)], cx);
8869 });
8870
8871 assert_eq!(
8872 search(&project, SearchQuery::text("TWO", false, true), cx)
8873 .await
8874 .unwrap(),
8875 HashMap::from_iter([
8876 ("two.rs".to_string(), vec![6..9]),
8877 ("three.rs".to_string(), vec![37..40]),
8878 ("four.rs".to_string(), vec![25..28, 36..39])
8879 ])
8880 );
8881
8882 async fn search(
8883 project: &ModelHandle<Project>,
8884 query: SearchQuery,
8885 cx: &mut gpui::TestAppContext,
8886 ) -> Result<HashMap<String, Vec<Range<usize>>>> {
8887 let results = project
8888 .update(cx, |project, cx| project.search(query, cx))
8889 .await?;
8890
8891 Ok(results
8892 .into_iter()
8893 .map(|(buffer, ranges)| {
8894 buffer.read_with(cx, |buffer, _| {
8895 let path = buffer.file().unwrap().path().to_string_lossy().to_string();
8896 let ranges = ranges
8897 .into_iter()
8898 .map(|range| range.to_offset(buffer))
8899 .collect::<Vec<_>>();
8900 (path, ranges)
8901 })
8902 })
8903 .collect())
8904 }
8905 }
8906}