1mod db;
2pub mod fs;
3mod ignore;
4mod lsp_command;
5pub mod search;
6pub mod worktree;
7
8use anyhow::{anyhow, Context, Result};
9use client::{proto, Client, PeerId, TypedEnvelope, User, UserStore};
10use clock::ReplicaId;
11use collections::{hash_map, BTreeMap, HashMap, HashSet};
12use futures::{future::Shared, Future, FutureExt, StreamExt, TryFutureExt};
13use fuzzy::{PathMatch, PathMatchCandidate, PathMatchCandidateSet};
14use gpui::{
15 AnyModelHandle, AppContext, AsyncAppContext, Entity, ModelContext, ModelHandle,
16 MutableAppContext, Task, UpgradeModelHandle, WeakModelHandle,
17};
18use language::{
19 point_to_lsp,
20 proto::{deserialize_anchor, deserialize_version, serialize_anchor, serialize_version},
21 range_from_lsp, range_to_lsp, Anchor, Bias, Buffer, CharKind, CodeAction, CodeLabel,
22 Completion, Diagnostic, DiagnosticEntry, DiagnosticSet, Event as BufferEvent, File as _,
23 Language, LanguageRegistry, LanguageServerName, LocalFile, LspAdapter, OffsetRangeExt,
24 Operation, Patch, PointUtf16, TextBufferSnapshot, ToOffset, ToPointUtf16, Transaction,
25};
26use lsp::{
27 DiagnosticSeverity, DiagnosticTag, DocumentHighlightKind, LanguageServer, LanguageString,
28 MarkedString,
29};
30use lsp_command::*;
31use parking_lot::Mutex;
32use postage::stream::Stream;
33use postage::watch;
34use rand::prelude::*;
35use search::SearchQuery;
36use serde::Serialize;
37use settings::Settings;
38use sha2::{Digest, Sha256};
39use similar::{ChangeTag, TextDiff};
40use std::{
41 cell::RefCell,
42 cmp::{self, Ordering},
43 convert::TryInto,
44 ffi::OsString,
45 hash::Hash,
46 mem,
47 ops::Range,
48 os::unix::{ffi::OsStrExt, prelude::OsStringExt},
49 path::{Component, Path, PathBuf},
50 rc::Rc,
51 sync::{
52 atomic::{AtomicBool, AtomicUsize, Ordering::SeqCst},
53 Arc,
54 },
55 time::Instant,
56};
57use thiserror::Error;
58use util::{post_inc, ResultExt, TryFutureExt as _};
59
60pub use db::Db;
61pub use fs::*;
62pub use worktree::*;
63
64pub trait Item: Entity {
65 fn entry_id(&self, cx: &AppContext) -> Option<ProjectEntryId>;
66}
67
68pub struct ProjectStore {
69 db: Arc<Db>,
70 projects: Vec<WeakModelHandle<Project>>,
71}
72
73pub struct Project {
74 worktrees: Vec<WorktreeHandle>,
75 active_entry: Option<ProjectEntryId>,
76 languages: Arc<LanguageRegistry>,
77 language_servers:
78 HashMap<(WorktreeId, LanguageServerName), (Arc<dyn LspAdapter>, Arc<LanguageServer>)>,
79 started_language_servers:
80 HashMap<(WorktreeId, LanguageServerName), Task<Option<Arc<LanguageServer>>>>,
81 language_server_statuses: BTreeMap<usize, LanguageServerStatus>,
82 language_server_settings: Arc<Mutex<serde_json::Value>>,
83 last_workspace_edits_by_language_server: HashMap<usize, ProjectTransaction>,
84 next_language_server_id: usize,
85 client: Arc<client::Client>,
86 next_entry_id: Arc<AtomicUsize>,
87 next_diagnostic_group_id: usize,
88 user_store: ModelHandle<UserStore>,
89 project_store: ModelHandle<ProjectStore>,
90 fs: Arc<dyn Fs>,
91 client_state: ProjectClientState,
92 collaborators: HashMap<PeerId, Collaborator>,
93 client_subscriptions: Vec<client::Subscription>,
94 _subscriptions: Vec<gpui::Subscription>,
95 opened_buffer: (Rc<RefCell<watch::Sender<()>>>, watch::Receiver<()>),
96 shared_buffers: HashMap<PeerId, HashSet<u64>>,
97 loading_buffers: HashMap<
98 ProjectPath,
99 postage::watch::Receiver<Option<Result<ModelHandle<Buffer>, Arc<anyhow::Error>>>>,
100 >,
101 loading_local_worktrees:
102 HashMap<Arc<Path>, Shared<Task<Result<ModelHandle<Worktree>, Arc<anyhow::Error>>>>>,
103 opened_buffers: HashMap<u64, OpenBuffer>,
104 buffer_snapshots: HashMap<u64, Vec<(i32, TextBufferSnapshot)>>,
105 nonce: u128,
106 initialized_persistent_state: bool,
107}
108
109#[derive(Error, Debug)]
110pub enum JoinProjectError {
111 #[error("host declined join request")]
112 HostDeclined,
113 #[error("host closed the project")]
114 HostClosedProject,
115 #[error("host went offline")]
116 HostWentOffline,
117 #[error("{0}")]
118 Other(#[from] anyhow::Error),
119}
120
121enum OpenBuffer {
122 Strong(ModelHandle<Buffer>),
123 Weak(WeakModelHandle<Buffer>),
124 Loading(Vec<Operation>),
125}
126
127enum WorktreeHandle {
128 Strong(ModelHandle<Worktree>),
129 Weak(WeakModelHandle<Worktree>),
130}
131
132enum ProjectClientState {
133 Local {
134 is_shared: bool,
135 remote_id_tx: watch::Sender<Option<u64>>,
136 remote_id_rx: watch::Receiver<Option<u64>>,
137 online_tx: watch::Sender<bool>,
138 online_rx: watch::Receiver<bool>,
139 _maintain_remote_id_task: Task<Option<()>>,
140 },
141 Remote {
142 sharing_has_stopped: bool,
143 remote_id: u64,
144 replica_id: ReplicaId,
145 _detect_unshare_task: Task<Option<()>>,
146 },
147}
148
149#[derive(Clone, Debug)]
150pub struct Collaborator {
151 pub user: Arc<User>,
152 pub peer_id: PeerId,
153 pub replica_id: ReplicaId,
154}
155
156#[derive(Clone, Debug, PartialEq, Eq)]
157pub enum Event {
158 ActiveEntryChanged(Option<ProjectEntryId>),
159 WorktreeAdded,
160 WorktreeRemoved(WorktreeId),
161 DiskBasedDiagnosticsStarted {
162 language_server_id: usize,
163 },
164 DiskBasedDiagnosticsFinished {
165 language_server_id: usize,
166 },
167 DiagnosticsUpdated {
168 path: ProjectPath,
169 language_server_id: usize,
170 },
171 RemoteIdChanged(Option<u64>),
172 CollaboratorLeft(PeerId),
173 ContactRequestedJoin(Arc<User>),
174 ContactCancelledJoinRequest(Arc<User>),
175}
176
177#[derive(Serialize)]
178pub struct LanguageServerStatus {
179 pub name: String,
180 pub pending_work: BTreeMap<String, LanguageServerProgress>,
181 pub has_pending_diagnostic_updates: bool,
182 progress_tokens: HashSet<String>,
183}
184
185#[derive(Clone, Debug, Serialize)]
186pub struct LanguageServerProgress {
187 pub message: Option<String>,
188 pub percentage: Option<usize>,
189 #[serde(skip_serializing)]
190 pub last_update_at: Instant,
191}
192
193#[derive(Clone, Debug, Eq, PartialEq, Hash, PartialOrd, Ord)]
194pub struct ProjectPath {
195 pub worktree_id: WorktreeId,
196 pub path: Arc<Path>,
197}
198
199#[derive(Copy, Clone, Debug, Default, PartialEq, Serialize)]
200pub struct DiagnosticSummary {
201 pub language_server_id: usize,
202 pub error_count: usize,
203 pub warning_count: usize,
204}
205
206#[derive(Debug, Clone)]
207pub struct Location {
208 pub buffer: ModelHandle<Buffer>,
209 pub range: Range<language::Anchor>,
210}
211
212#[derive(Debug, Clone)]
213pub struct LocationLink {
214 pub origin: Option<Location>,
215 pub target: Location,
216}
217
218#[derive(Debug)]
219pub struct DocumentHighlight {
220 pub range: Range<language::Anchor>,
221 pub kind: DocumentHighlightKind,
222}
223
224#[derive(Clone, Debug)]
225pub struct Symbol {
226 pub source_worktree_id: WorktreeId,
227 pub worktree_id: WorktreeId,
228 pub language_server_name: LanguageServerName,
229 pub path: PathBuf,
230 pub label: CodeLabel,
231 pub name: String,
232 pub kind: lsp::SymbolKind,
233 pub range: Range<PointUtf16>,
234 pub signature: [u8; 32],
235}
236
237#[derive(Clone, Debug, PartialEq)]
238pub struct HoverBlock {
239 pub text: String,
240 pub language: Option<String>,
241}
242
243impl HoverBlock {
244 fn try_new(marked_string: MarkedString) -> Option<Self> {
245 let result = match marked_string {
246 MarkedString::LanguageString(LanguageString { language, value }) => HoverBlock {
247 text: value,
248 language: Some(language),
249 },
250 MarkedString::String(text) => HoverBlock {
251 text,
252 language: None,
253 },
254 };
255 if result.text.is_empty() {
256 None
257 } else {
258 Some(result)
259 }
260 }
261}
262
263#[derive(Debug)]
264pub struct Hover {
265 pub contents: Vec<HoverBlock>,
266 pub range: Option<Range<language::Anchor>>,
267}
268
269#[derive(Default)]
270pub struct ProjectTransaction(pub HashMap<ModelHandle<Buffer>, language::Transaction>);
271
272impl DiagnosticSummary {
273 fn new<'a, T: 'a>(
274 language_server_id: usize,
275 diagnostics: impl IntoIterator<Item = &'a DiagnosticEntry<T>>,
276 ) -> Self {
277 let mut this = Self {
278 language_server_id,
279 error_count: 0,
280 warning_count: 0,
281 };
282
283 for entry in diagnostics {
284 if entry.diagnostic.is_primary {
285 match entry.diagnostic.severity {
286 DiagnosticSeverity::ERROR => this.error_count += 1,
287 DiagnosticSeverity::WARNING => this.warning_count += 1,
288 _ => {}
289 }
290 }
291 }
292
293 this
294 }
295
296 pub fn is_empty(&self) -> bool {
297 self.error_count == 0 && self.warning_count == 0
298 }
299
300 pub fn to_proto(&self, path: &Path) -> proto::DiagnosticSummary {
301 proto::DiagnosticSummary {
302 path: path.to_string_lossy().to_string(),
303 language_server_id: self.language_server_id as u64,
304 error_count: self.error_count as u32,
305 warning_count: self.warning_count as u32,
306 }
307 }
308}
309
310#[derive(Clone, Copy, Debug, Default, Hash, PartialEq, Eq, PartialOrd, Ord)]
311pub struct ProjectEntryId(usize);
312
313impl ProjectEntryId {
314 pub const MAX: Self = Self(usize::MAX);
315
316 pub fn new(counter: &AtomicUsize) -> Self {
317 Self(counter.fetch_add(1, SeqCst))
318 }
319
320 pub fn from_proto(id: u64) -> Self {
321 Self(id as usize)
322 }
323
324 pub fn to_proto(&self) -> u64 {
325 self.0 as u64
326 }
327
328 pub fn to_usize(&self) -> usize {
329 self.0
330 }
331}
332
333impl Project {
334 pub fn init(client: &Arc<Client>) {
335 client.add_model_message_handler(Self::handle_request_join_project);
336 client.add_model_message_handler(Self::handle_add_collaborator);
337 client.add_model_message_handler(Self::handle_buffer_reloaded);
338 client.add_model_message_handler(Self::handle_buffer_saved);
339 client.add_model_message_handler(Self::handle_start_language_server);
340 client.add_model_message_handler(Self::handle_update_language_server);
341 client.add_model_message_handler(Self::handle_remove_collaborator);
342 client.add_model_message_handler(Self::handle_join_project_request_cancelled);
343 client.add_model_message_handler(Self::handle_update_project);
344 client.add_model_message_handler(Self::handle_unregister_project);
345 client.add_model_message_handler(Self::handle_project_unshared);
346 client.add_model_message_handler(Self::handle_update_buffer_file);
347 client.add_model_message_handler(Self::handle_update_buffer);
348 client.add_model_message_handler(Self::handle_update_diagnostic_summary);
349 client.add_model_message_handler(Self::handle_update_worktree);
350 client.add_model_request_handler(Self::handle_create_project_entry);
351 client.add_model_request_handler(Self::handle_rename_project_entry);
352 client.add_model_request_handler(Self::handle_copy_project_entry);
353 client.add_model_request_handler(Self::handle_delete_project_entry);
354 client.add_model_request_handler(Self::handle_apply_additional_edits_for_completion);
355 client.add_model_request_handler(Self::handle_apply_code_action);
356 client.add_model_request_handler(Self::handle_reload_buffers);
357 client.add_model_request_handler(Self::handle_format_buffers);
358 client.add_model_request_handler(Self::handle_get_code_actions);
359 client.add_model_request_handler(Self::handle_get_completions);
360 client.add_model_request_handler(Self::handle_lsp_command::<GetHover>);
361 client.add_model_request_handler(Self::handle_lsp_command::<GetDefinition>);
362 client.add_model_request_handler(Self::handle_lsp_command::<GetDocumentHighlights>);
363 client.add_model_request_handler(Self::handle_lsp_command::<GetReferences>);
364 client.add_model_request_handler(Self::handle_lsp_command::<PrepareRename>);
365 client.add_model_request_handler(Self::handle_lsp_command::<PerformRename>);
366 client.add_model_request_handler(Self::handle_search_project);
367 client.add_model_request_handler(Self::handle_get_project_symbols);
368 client.add_model_request_handler(Self::handle_open_buffer_for_symbol);
369 client.add_model_request_handler(Self::handle_open_buffer_by_id);
370 client.add_model_request_handler(Self::handle_open_buffer_by_path);
371 client.add_model_request_handler(Self::handle_save_buffer);
372 }
373
374 pub fn local(
375 online: bool,
376 client: Arc<Client>,
377 user_store: ModelHandle<UserStore>,
378 project_store: ModelHandle<ProjectStore>,
379 languages: Arc<LanguageRegistry>,
380 fs: Arc<dyn Fs>,
381 cx: &mut MutableAppContext,
382 ) -> ModelHandle<Self> {
383 cx.add_model(|cx: &mut ModelContext<Self>| {
384 let (online_tx, online_rx) = watch::channel_with(online);
385 let (remote_id_tx, remote_id_rx) = watch::channel();
386 let _maintain_remote_id_task = cx.spawn_weak({
387 let status_rx = client.clone().status();
388 let online_rx = online_rx.clone();
389 move |this, mut cx| async move {
390 let mut stream = Stream::map(status_rx.clone(), drop)
391 .merge(Stream::map(online_rx.clone(), drop));
392 while stream.recv().await.is_some() {
393 let this = this.upgrade(&cx)?;
394 if status_rx.borrow().is_connected() && *online_rx.borrow() {
395 this.update(&mut cx, |this, cx| this.register(cx))
396 .await
397 .log_err()?;
398 } else {
399 this.update(&mut cx, |this, cx| this.unregister(cx))
400 .await
401 .log_err();
402 }
403 }
404 None
405 }
406 });
407
408 let handle = cx.weak_handle();
409 project_store.update(cx, |store, cx| store.add_project(handle, cx));
410
411 let (opened_buffer_tx, opened_buffer_rx) = watch::channel();
412 Self {
413 worktrees: Default::default(),
414 collaborators: Default::default(),
415 opened_buffers: Default::default(),
416 shared_buffers: Default::default(),
417 loading_buffers: Default::default(),
418 loading_local_worktrees: Default::default(),
419 buffer_snapshots: Default::default(),
420 client_state: ProjectClientState::Local {
421 is_shared: false,
422 remote_id_tx,
423 remote_id_rx,
424 online_tx,
425 online_rx,
426 _maintain_remote_id_task,
427 },
428 opened_buffer: (Rc::new(RefCell::new(opened_buffer_tx)), opened_buffer_rx),
429 client_subscriptions: Vec::new(),
430 _subscriptions: vec![cx.observe_global::<Settings, _>(Self::on_settings_changed)],
431 active_entry: None,
432 languages,
433 client,
434 user_store,
435 project_store,
436 fs,
437 next_entry_id: Default::default(),
438 next_diagnostic_group_id: Default::default(),
439 language_servers: Default::default(),
440 started_language_servers: Default::default(),
441 language_server_statuses: Default::default(),
442 last_workspace_edits_by_language_server: Default::default(),
443 language_server_settings: Default::default(),
444 next_language_server_id: 0,
445 nonce: StdRng::from_entropy().gen(),
446 initialized_persistent_state: false,
447 }
448 })
449 }
450
451 pub async fn remote(
452 remote_id: u64,
453 client: Arc<Client>,
454 user_store: ModelHandle<UserStore>,
455 project_store: ModelHandle<ProjectStore>,
456 languages: Arc<LanguageRegistry>,
457 fs: Arc<dyn Fs>,
458 mut cx: AsyncAppContext,
459 ) -> Result<ModelHandle<Self>, JoinProjectError> {
460 client.authenticate_and_connect(true, &cx).await?;
461
462 let response = client
463 .request(proto::JoinProject {
464 project_id: remote_id,
465 })
466 .await?;
467
468 let response = match response.variant.ok_or_else(|| anyhow!("missing variant"))? {
469 proto::join_project_response::Variant::Accept(response) => response,
470 proto::join_project_response::Variant::Decline(decline) => {
471 match proto::join_project_response::decline::Reason::from_i32(decline.reason) {
472 Some(proto::join_project_response::decline::Reason::Declined) => {
473 Err(JoinProjectError::HostDeclined)?
474 }
475 Some(proto::join_project_response::decline::Reason::Closed) => {
476 Err(JoinProjectError::HostClosedProject)?
477 }
478 Some(proto::join_project_response::decline::Reason::WentOffline) => {
479 Err(JoinProjectError::HostWentOffline)?
480 }
481 None => Err(anyhow!("missing decline reason"))?,
482 }
483 }
484 };
485
486 let replica_id = response.replica_id as ReplicaId;
487
488 let mut worktrees = Vec::new();
489 for worktree in response.worktrees {
490 let worktree = cx
491 .update(|cx| Worktree::remote(remote_id, replica_id, worktree, client.clone(), cx));
492 worktrees.push(worktree);
493 }
494
495 let (opened_buffer_tx, opened_buffer_rx) = watch::channel();
496 let this = cx.add_model(|cx: &mut ModelContext<Self>| {
497 let handle = cx.weak_handle();
498 project_store.update(cx, |store, cx| store.add_project(handle, cx));
499
500 let mut this = Self {
501 worktrees: Vec::new(),
502 loading_buffers: Default::default(),
503 opened_buffer: (Rc::new(RefCell::new(opened_buffer_tx)), opened_buffer_rx),
504 shared_buffers: Default::default(),
505 loading_local_worktrees: Default::default(),
506 active_entry: None,
507 collaborators: Default::default(),
508 languages,
509 user_store: user_store.clone(),
510 project_store,
511 fs,
512 next_entry_id: Default::default(),
513 next_diagnostic_group_id: Default::default(),
514 client_subscriptions: vec![client.add_model_for_remote_entity(remote_id, cx)],
515 _subscriptions: Default::default(),
516 client: client.clone(),
517 client_state: ProjectClientState::Remote {
518 sharing_has_stopped: false,
519 remote_id,
520 replica_id,
521 _detect_unshare_task: cx.spawn_weak(move |this, mut cx| {
522 async move {
523 let mut status = client.status();
524 let is_connected =
525 status.next().await.map_or(false, |s| s.is_connected());
526 // Even if we're initially connected, any future change of the status means we momentarily disconnected.
527 if !is_connected || status.next().await.is_some() {
528 if let Some(this) = this.upgrade(&cx) {
529 this.update(&mut cx, |this, cx| this.removed_from_project(cx))
530 }
531 }
532 Ok(())
533 }
534 .log_err()
535 }),
536 },
537 language_servers: Default::default(),
538 started_language_servers: Default::default(),
539 language_server_settings: Default::default(),
540 language_server_statuses: response
541 .language_servers
542 .into_iter()
543 .map(|server| {
544 (
545 server.id as usize,
546 LanguageServerStatus {
547 name: server.name,
548 pending_work: Default::default(),
549 has_pending_diagnostic_updates: false,
550 progress_tokens: Default::default(),
551 },
552 )
553 })
554 .collect(),
555 last_workspace_edits_by_language_server: Default::default(),
556 next_language_server_id: 0,
557 opened_buffers: Default::default(),
558 buffer_snapshots: Default::default(),
559 nonce: StdRng::from_entropy().gen(),
560 initialized_persistent_state: false,
561 };
562 for worktree in worktrees {
563 this.add_worktree(&worktree, cx);
564 }
565 this
566 });
567
568 let user_ids = response
569 .collaborators
570 .iter()
571 .map(|peer| peer.user_id)
572 .collect();
573 user_store
574 .update(&mut cx, |user_store, cx| user_store.get_users(user_ids, cx))
575 .await?;
576 let mut collaborators = HashMap::default();
577 for message in response.collaborators {
578 let collaborator = Collaborator::from_proto(message, &user_store, &mut cx).await?;
579 collaborators.insert(collaborator.peer_id, collaborator);
580 }
581
582 this.update(&mut cx, |this, _| {
583 this.collaborators = collaborators;
584 });
585
586 Ok(this)
587 }
588
589 #[cfg(any(test, feature = "test-support"))]
590 pub async fn test(
591 fs: Arc<dyn Fs>,
592 root_paths: impl IntoIterator<Item = &Path>,
593 cx: &mut gpui::TestAppContext,
594 ) -> ModelHandle<Project> {
595 if !cx.read(|cx| cx.has_global::<Settings>()) {
596 cx.update(|cx| cx.set_global(Settings::test(cx)));
597 }
598
599 let languages = Arc::new(LanguageRegistry::test());
600 let http_client = client::test::FakeHttpClient::with_404_response();
601 let client = client::Client::new(http_client.clone());
602 let user_store = cx.add_model(|cx| UserStore::new(client.clone(), http_client, cx));
603 let project_store = cx.add_model(|_| ProjectStore::new(Db::open_fake()));
604 let project = cx.update(|cx| {
605 Project::local(true, client, user_store, project_store, languages, fs, cx)
606 });
607 for path in root_paths {
608 let (tree, _) = project
609 .update(cx, |project, cx| {
610 project.find_or_create_local_worktree(path, true, cx)
611 })
612 .await
613 .unwrap();
614 tree.read_with(cx, |tree, _| tree.as_local().unwrap().scan_complete())
615 .await;
616 }
617 project
618 }
619
620 pub fn restore_state(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
621 if self.is_remote() {
622 return Task::ready(Ok(()));
623 }
624
625 let db = self.project_store.read(cx).db.clone();
626 let keys = self.db_keys_for_online_state(cx);
627 let online_by_default = cx.global::<Settings>().projects_online_by_default;
628 let read_online = cx.background().spawn(async move {
629 let values = db.read(keys)?;
630 anyhow::Ok(
631 values
632 .into_iter()
633 .all(|e| e.map_or(online_by_default, |e| e == [true as u8])),
634 )
635 });
636 cx.spawn(|this, mut cx| async move {
637 let online = read_online.await.log_err().unwrap_or(false);
638 this.update(&mut cx, |this, cx| {
639 this.initialized_persistent_state = true;
640 if let ProjectClientState::Local { online_tx, .. } = &mut this.client_state {
641 let mut online_tx = online_tx.borrow_mut();
642 if *online_tx != online {
643 *online_tx = online;
644 drop(online_tx);
645 this.metadata_changed(false, cx);
646 }
647 }
648 });
649 Ok(())
650 })
651 }
652
653 fn persist_state(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
654 if self.is_remote() || !self.initialized_persistent_state {
655 return Task::ready(Ok(()));
656 }
657
658 let db = self.project_store.read(cx).db.clone();
659 let keys = self.db_keys_for_online_state(cx);
660 let is_online = self.is_online();
661 cx.background().spawn(async move {
662 let value = &[is_online as u8];
663 db.write(keys.into_iter().map(|key| (key, value)))
664 })
665 }
666
667 fn on_settings_changed(&mut self, cx: &mut ModelContext<Self>) {
668 let settings = cx.global::<Settings>();
669
670 let mut language_servers_to_start = Vec::new();
671 for buffer in self.opened_buffers.values() {
672 if let Some(buffer) = buffer.upgrade(cx) {
673 let buffer = buffer.read(cx);
674 if let Some((file, language)) = File::from_dyn(buffer.file()).zip(buffer.language())
675 {
676 if settings.enable_language_server(Some(&language.name())) {
677 let worktree = file.worktree.read(cx);
678 language_servers_to_start.push((
679 worktree.id(),
680 worktree.as_local().unwrap().abs_path().clone(),
681 language.clone(),
682 ));
683 }
684 }
685 }
686 }
687
688 let mut language_servers_to_stop = Vec::new();
689 for language in self.languages.to_vec() {
690 if let Some(lsp_adapter) = language.lsp_adapter() {
691 if !settings.enable_language_server(Some(&language.name())) {
692 let lsp_name = lsp_adapter.name();
693 for (worktree_id, started_lsp_name) in self.started_language_servers.keys() {
694 if lsp_name == *started_lsp_name {
695 language_servers_to_stop.push((*worktree_id, started_lsp_name.clone()));
696 }
697 }
698 }
699 }
700 }
701
702 // Stop all newly-disabled language servers.
703 for (worktree_id, adapter_name) in language_servers_to_stop {
704 self.stop_language_server(worktree_id, adapter_name, cx)
705 .detach();
706 }
707
708 // Start all the newly-enabled language servers.
709 for (worktree_id, worktree_path, language) in language_servers_to_start {
710 self.start_language_server(worktree_id, worktree_path, language, cx);
711 }
712
713 cx.notify();
714 }
715
716 pub fn buffer_for_id(&self, remote_id: u64, cx: &AppContext) -> Option<ModelHandle<Buffer>> {
717 self.opened_buffers
718 .get(&remote_id)
719 .and_then(|buffer| buffer.upgrade(cx))
720 }
721
722 pub fn languages(&self) -> &Arc<LanguageRegistry> {
723 &self.languages
724 }
725
726 pub fn client(&self) -> Arc<Client> {
727 self.client.clone()
728 }
729
730 pub fn user_store(&self) -> ModelHandle<UserStore> {
731 self.user_store.clone()
732 }
733
734 pub fn project_store(&self) -> ModelHandle<ProjectStore> {
735 self.project_store.clone()
736 }
737
738 #[cfg(any(test, feature = "test-support"))]
739 pub fn check_invariants(&self, cx: &AppContext) {
740 if self.is_local() {
741 let mut worktree_root_paths = HashMap::default();
742 for worktree in self.worktrees(cx) {
743 let worktree = worktree.read(cx);
744 let abs_path = worktree.as_local().unwrap().abs_path().clone();
745 let prev_worktree_id = worktree_root_paths.insert(abs_path.clone(), worktree.id());
746 assert_eq!(
747 prev_worktree_id,
748 None,
749 "abs path {:?} for worktree {:?} is not unique ({:?} was already registered with the same path)",
750 abs_path,
751 worktree.id(),
752 prev_worktree_id
753 )
754 }
755 } else {
756 let replica_id = self.replica_id();
757 for buffer in self.opened_buffers.values() {
758 if let Some(buffer) = buffer.upgrade(cx) {
759 let buffer = buffer.read(cx);
760 assert_eq!(
761 buffer.deferred_ops_len(),
762 0,
763 "replica {}, buffer {} has deferred operations",
764 replica_id,
765 buffer.remote_id()
766 );
767 }
768 }
769 }
770 }
771
772 #[cfg(any(test, feature = "test-support"))]
773 pub fn has_open_buffer(&self, path: impl Into<ProjectPath>, cx: &AppContext) -> bool {
774 let path = path.into();
775 if let Some(worktree) = self.worktree_for_id(path.worktree_id, cx) {
776 self.opened_buffers.iter().any(|(_, buffer)| {
777 if let Some(buffer) = buffer.upgrade(cx) {
778 if let Some(file) = File::from_dyn(buffer.read(cx).file()) {
779 if file.worktree == worktree && file.path() == &path.path {
780 return true;
781 }
782 }
783 }
784 false
785 })
786 } else {
787 false
788 }
789 }
790
791 pub fn fs(&self) -> &Arc<dyn Fs> {
792 &self.fs
793 }
794
795 pub fn set_online(&mut self, online: bool, cx: &mut ModelContext<Self>) {
796 if let ProjectClientState::Local { online_tx, .. } = &mut self.client_state {
797 let mut online_tx = online_tx.borrow_mut();
798 if *online_tx != online {
799 *online_tx = online;
800 drop(online_tx);
801 self.metadata_changed(true, cx);
802 }
803 }
804 }
805
806 pub fn is_online(&self) -> bool {
807 match &self.client_state {
808 ProjectClientState::Local { online_rx, .. } => *online_rx.borrow(),
809 ProjectClientState::Remote { .. } => true,
810 }
811 }
812
813 fn unregister(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
814 self.unshared(cx);
815 if let ProjectClientState::Local { remote_id_rx, .. } = &mut self.client_state {
816 if let Some(remote_id) = *remote_id_rx.borrow() {
817 let request = self.client.request(proto::UnregisterProject {
818 project_id: remote_id,
819 });
820 return cx.spawn(|this, mut cx| async move {
821 let response = request.await;
822
823 // Unregistering the project causes the server to send out a
824 // contact update removing this project from the host's list
825 // of online projects. Wait until this contact update has been
826 // processed before clearing out this project's remote id, so
827 // that there is no moment where this project appears in the
828 // contact metadata and *also* has no remote id.
829 this.update(&mut cx, |this, cx| {
830 this.user_store()
831 .update(cx, |store, _| store.contact_updates_done())
832 })
833 .await;
834
835 this.update(&mut cx, |this, cx| {
836 if let ProjectClientState::Local { remote_id_tx, .. } =
837 &mut this.client_state
838 {
839 *remote_id_tx.borrow_mut() = None;
840 }
841 this.client_subscriptions.clear();
842 this.metadata_changed(false, cx);
843 });
844 response.map(drop)
845 });
846 }
847 }
848 Task::ready(Ok(()))
849 }
850
851 fn register(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
852 if let ProjectClientState::Local { remote_id_rx, .. } = &self.client_state {
853 if remote_id_rx.borrow().is_some() {
854 return Task::ready(Ok(()));
855 }
856 }
857
858 let response = self.client.request(proto::RegisterProject {});
859 cx.spawn(|this, mut cx| async move {
860 let remote_id = response.await?.project_id;
861 this.update(&mut cx, |this, cx| {
862 if let ProjectClientState::Local { remote_id_tx, .. } = &mut this.client_state {
863 *remote_id_tx.borrow_mut() = Some(remote_id);
864 }
865
866 this.metadata_changed(false, cx);
867 cx.emit(Event::RemoteIdChanged(Some(remote_id)));
868 this.client_subscriptions
869 .push(this.client.add_model_for_remote_entity(remote_id, cx));
870 Ok(())
871 })
872 })
873 }
874
875 pub fn remote_id(&self) -> Option<u64> {
876 match &self.client_state {
877 ProjectClientState::Local { remote_id_rx, .. } => *remote_id_rx.borrow(),
878 ProjectClientState::Remote { remote_id, .. } => Some(*remote_id),
879 }
880 }
881
882 pub fn next_remote_id(&self) -> impl Future<Output = u64> {
883 let mut id = None;
884 let mut watch = None;
885 match &self.client_state {
886 ProjectClientState::Local { remote_id_rx, .. } => watch = Some(remote_id_rx.clone()),
887 ProjectClientState::Remote { remote_id, .. } => id = Some(*remote_id),
888 }
889
890 async move {
891 if let Some(id) = id {
892 return id;
893 }
894 let mut watch = watch.unwrap();
895 loop {
896 let id = *watch.borrow();
897 if let Some(id) = id {
898 return id;
899 }
900 watch.next().await;
901 }
902 }
903 }
904
905 pub fn shared_remote_id(&self) -> Option<u64> {
906 match &self.client_state {
907 ProjectClientState::Local {
908 remote_id_rx,
909 is_shared,
910 ..
911 } => {
912 if *is_shared {
913 *remote_id_rx.borrow()
914 } else {
915 None
916 }
917 }
918 ProjectClientState::Remote { remote_id, .. } => Some(*remote_id),
919 }
920 }
921
922 pub fn replica_id(&self) -> ReplicaId {
923 match &self.client_state {
924 ProjectClientState::Local { .. } => 0,
925 ProjectClientState::Remote { replica_id, .. } => *replica_id,
926 }
927 }
928
929 fn metadata_changed(&mut self, persist: bool, cx: &mut ModelContext<Self>) {
930 if let ProjectClientState::Local {
931 remote_id_rx,
932 online_rx,
933 ..
934 } = &self.client_state
935 {
936 if let (Some(project_id), true) = (*remote_id_rx.borrow(), *online_rx.borrow()) {
937 self.client
938 .send(proto::UpdateProject {
939 project_id,
940 worktrees: self
941 .worktrees
942 .iter()
943 .filter_map(|worktree| {
944 worktree.upgrade(&cx).map(|worktree| {
945 worktree.read(cx).as_local().unwrap().metadata_proto()
946 })
947 })
948 .collect(),
949 })
950 .log_err();
951 }
952
953 self.project_store.update(cx, |_, cx| cx.notify());
954 if persist {
955 self.persist_state(cx).detach_and_log_err(cx);
956 }
957 cx.notify();
958 }
959 }
960
961 pub fn collaborators(&self) -> &HashMap<PeerId, Collaborator> {
962 &self.collaborators
963 }
964
965 pub fn worktrees<'a>(
966 &'a self,
967 cx: &'a AppContext,
968 ) -> impl 'a + Iterator<Item = ModelHandle<Worktree>> {
969 self.worktrees
970 .iter()
971 .filter_map(move |worktree| worktree.upgrade(cx))
972 }
973
974 pub fn visible_worktrees<'a>(
975 &'a self,
976 cx: &'a AppContext,
977 ) -> impl 'a + Iterator<Item = ModelHandle<Worktree>> {
978 self.worktrees.iter().filter_map(|worktree| {
979 worktree.upgrade(cx).and_then(|worktree| {
980 if worktree.read(cx).is_visible() {
981 Some(worktree)
982 } else {
983 None
984 }
985 })
986 })
987 }
988
989 pub fn worktree_root_names<'a>(&'a self, cx: &'a AppContext) -> impl Iterator<Item = &'a str> {
990 self.visible_worktrees(cx)
991 .map(|tree| tree.read(cx).root_name())
992 }
993
994 fn db_keys_for_online_state(&self, cx: &AppContext) -> Vec<String> {
995 self.worktrees
996 .iter()
997 .filter_map(|worktree| {
998 let worktree = worktree.upgrade(&cx)?.read(cx);
999 if worktree.is_visible() {
1000 Some(format!(
1001 "project-path-online:{}",
1002 worktree.as_local().unwrap().abs_path().to_string_lossy()
1003 ))
1004 } else {
1005 None
1006 }
1007 })
1008 .collect::<Vec<_>>()
1009 }
1010
1011 pub fn worktree_for_id(
1012 &self,
1013 id: WorktreeId,
1014 cx: &AppContext,
1015 ) -> Option<ModelHandle<Worktree>> {
1016 self.worktrees(cx)
1017 .find(|worktree| worktree.read(cx).id() == id)
1018 }
1019
1020 pub fn worktree_for_entry(
1021 &self,
1022 entry_id: ProjectEntryId,
1023 cx: &AppContext,
1024 ) -> Option<ModelHandle<Worktree>> {
1025 self.worktrees(cx)
1026 .find(|worktree| worktree.read(cx).contains_entry(entry_id))
1027 }
1028
1029 pub fn worktree_id_for_entry(
1030 &self,
1031 entry_id: ProjectEntryId,
1032 cx: &AppContext,
1033 ) -> Option<WorktreeId> {
1034 self.worktree_for_entry(entry_id, cx)
1035 .map(|worktree| worktree.read(cx).id())
1036 }
1037
1038 pub fn contains_paths(&self, paths: &[PathBuf], cx: &AppContext) -> bool {
1039 paths.iter().all(|path| self.contains_path(&path, cx))
1040 }
1041
1042 pub fn contains_path(&self, path: &Path, cx: &AppContext) -> bool {
1043 for worktree in self.worktrees(cx) {
1044 let worktree = worktree.read(cx).as_local();
1045 if worktree.map_or(false, |w| w.contains_abs_path(path)) {
1046 return true;
1047 }
1048 }
1049 false
1050 }
1051
1052 pub fn create_entry(
1053 &mut self,
1054 project_path: impl Into<ProjectPath>,
1055 is_directory: bool,
1056 cx: &mut ModelContext<Self>,
1057 ) -> Option<Task<Result<Entry>>> {
1058 let project_path = project_path.into();
1059 let worktree = self.worktree_for_id(project_path.worktree_id, cx)?;
1060 if self.is_local() {
1061 Some(worktree.update(cx, |worktree, cx| {
1062 worktree
1063 .as_local_mut()
1064 .unwrap()
1065 .create_entry(project_path.path, is_directory, cx)
1066 }))
1067 } else {
1068 let client = self.client.clone();
1069 let project_id = self.remote_id().unwrap();
1070 Some(cx.spawn_weak(|_, mut cx| async move {
1071 let response = client
1072 .request(proto::CreateProjectEntry {
1073 worktree_id: project_path.worktree_id.to_proto(),
1074 project_id,
1075 path: project_path.path.as_os_str().as_bytes().to_vec(),
1076 is_directory,
1077 })
1078 .await?;
1079 let entry = response
1080 .entry
1081 .ok_or_else(|| anyhow!("missing entry in response"))?;
1082 worktree
1083 .update(&mut cx, |worktree, cx| {
1084 worktree.as_remote_mut().unwrap().insert_entry(
1085 entry,
1086 response.worktree_scan_id as usize,
1087 cx,
1088 )
1089 })
1090 .await
1091 }))
1092 }
1093 }
1094
1095 pub fn copy_entry(
1096 &mut self,
1097 entry_id: ProjectEntryId,
1098 new_path: impl Into<Arc<Path>>,
1099 cx: &mut ModelContext<Self>,
1100 ) -> Option<Task<Result<Entry>>> {
1101 let worktree = self.worktree_for_entry(entry_id, cx)?;
1102 let new_path = new_path.into();
1103 if self.is_local() {
1104 worktree.update(cx, |worktree, cx| {
1105 worktree
1106 .as_local_mut()
1107 .unwrap()
1108 .copy_entry(entry_id, new_path, cx)
1109 })
1110 } else {
1111 let client = self.client.clone();
1112 let project_id = self.remote_id().unwrap();
1113
1114 Some(cx.spawn_weak(|_, mut cx| async move {
1115 let response = client
1116 .request(proto::CopyProjectEntry {
1117 project_id,
1118 entry_id: entry_id.to_proto(),
1119 new_path: new_path.as_os_str().as_bytes().to_vec(),
1120 })
1121 .await?;
1122 let entry = response
1123 .entry
1124 .ok_or_else(|| anyhow!("missing entry in response"))?;
1125 worktree
1126 .update(&mut cx, |worktree, cx| {
1127 worktree.as_remote_mut().unwrap().insert_entry(
1128 entry,
1129 response.worktree_scan_id as usize,
1130 cx,
1131 )
1132 })
1133 .await
1134 }))
1135 }
1136 }
1137
1138 pub fn rename_entry(
1139 &mut self,
1140 entry_id: ProjectEntryId,
1141 new_path: impl Into<Arc<Path>>,
1142 cx: &mut ModelContext<Self>,
1143 ) -> Option<Task<Result<Entry>>> {
1144 let worktree = self.worktree_for_entry(entry_id, cx)?;
1145 let new_path = new_path.into();
1146 if self.is_local() {
1147 worktree.update(cx, |worktree, cx| {
1148 worktree
1149 .as_local_mut()
1150 .unwrap()
1151 .rename_entry(entry_id, new_path, cx)
1152 })
1153 } else {
1154 let client = self.client.clone();
1155 let project_id = self.remote_id().unwrap();
1156
1157 Some(cx.spawn_weak(|_, mut cx| async move {
1158 let response = client
1159 .request(proto::RenameProjectEntry {
1160 project_id,
1161 entry_id: entry_id.to_proto(),
1162 new_path: new_path.as_os_str().as_bytes().to_vec(),
1163 })
1164 .await?;
1165 let entry = response
1166 .entry
1167 .ok_or_else(|| anyhow!("missing entry in response"))?;
1168 worktree
1169 .update(&mut cx, |worktree, cx| {
1170 worktree.as_remote_mut().unwrap().insert_entry(
1171 entry,
1172 response.worktree_scan_id as usize,
1173 cx,
1174 )
1175 })
1176 .await
1177 }))
1178 }
1179 }
1180
1181 pub fn delete_entry(
1182 &mut self,
1183 entry_id: ProjectEntryId,
1184 cx: &mut ModelContext<Self>,
1185 ) -> Option<Task<Result<()>>> {
1186 let worktree = self.worktree_for_entry(entry_id, cx)?;
1187 if self.is_local() {
1188 worktree.update(cx, |worktree, cx| {
1189 worktree.as_local_mut().unwrap().delete_entry(entry_id, cx)
1190 })
1191 } else {
1192 let client = self.client.clone();
1193 let project_id = self.remote_id().unwrap();
1194 Some(cx.spawn_weak(|_, mut cx| async move {
1195 let response = client
1196 .request(proto::DeleteProjectEntry {
1197 project_id,
1198 entry_id: entry_id.to_proto(),
1199 })
1200 .await?;
1201 worktree
1202 .update(&mut cx, move |worktree, cx| {
1203 worktree.as_remote_mut().unwrap().delete_entry(
1204 entry_id,
1205 response.worktree_scan_id as usize,
1206 cx,
1207 )
1208 })
1209 .await
1210 }))
1211 }
1212 }
1213
1214 fn share(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
1215 let project_id;
1216 if let ProjectClientState::Local {
1217 remote_id_rx,
1218 is_shared,
1219 ..
1220 } = &mut self.client_state
1221 {
1222 if *is_shared {
1223 return Task::ready(Ok(()));
1224 }
1225 *is_shared = true;
1226 if let Some(id) = *remote_id_rx.borrow() {
1227 project_id = id;
1228 } else {
1229 return Task::ready(Err(anyhow!("project hasn't been registered")));
1230 }
1231 } else {
1232 return Task::ready(Err(anyhow!("can't share a remote project")));
1233 };
1234
1235 for open_buffer in self.opened_buffers.values_mut() {
1236 match open_buffer {
1237 OpenBuffer::Strong(_) => {}
1238 OpenBuffer::Weak(buffer) => {
1239 if let Some(buffer) = buffer.upgrade(cx) {
1240 *open_buffer = OpenBuffer::Strong(buffer);
1241 }
1242 }
1243 OpenBuffer::Loading(_) => unreachable!(),
1244 }
1245 }
1246
1247 for worktree_handle in self.worktrees.iter_mut() {
1248 match worktree_handle {
1249 WorktreeHandle::Strong(_) => {}
1250 WorktreeHandle::Weak(worktree) => {
1251 if let Some(worktree) = worktree.upgrade(cx) {
1252 *worktree_handle = WorktreeHandle::Strong(worktree);
1253 }
1254 }
1255 }
1256 }
1257
1258 let mut tasks = Vec::new();
1259 for worktree in self.worktrees(cx).collect::<Vec<_>>() {
1260 worktree.update(cx, |worktree, cx| {
1261 let worktree = worktree.as_local_mut().unwrap();
1262 tasks.push(worktree.share(project_id, cx));
1263 });
1264 }
1265
1266 for (server_id, status) in &self.language_server_statuses {
1267 self.client
1268 .send(proto::StartLanguageServer {
1269 project_id,
1270 server: Some(proto::LanguageServer {
1271 id: *server_id as u64,
1272 name: status.name.clone(),
1273 }),
1274 })
1275 .log_err();
1276 }
1277
1278 cx.spawn(|this, mut cx| async move {
1279 for task in tasks {
1280 task.await?;
1281 }
1282 this.update(&mut cx, |_, cx| cx.notify());
1283 Ok(())
1284 })
1285 }
1286
1287 fn unshared(&mut self, cx: &mut ModelContext<Self>) {
1288 if let ProjectClientState::Local { is_shared, .. } = &mut self.client_state {
1289 if !*is_shared {
1290 return;
1291 }
1292
1293 *is_shared = false;
1294 self.collaborators.clear();
1295 self.shared_buffers.clear();
1296 for worktree_handle in self.worktrees.iter_mut() {
1297 if let WorktreeHandle::Strong(worktree) = worktree_handle {
1298 let is_visible = worktree.update(cx, |worktree, _| {
1299 worktree.as_local_mut().unwrap().unshare();
1300 worktree.is_visible()
1301 });
1302 if !is_visible {
1303 *worktree_handle = WorktreeHandle::Weak(worktree.downgrade());
1304 }
1305 }
1306 }
1307
1308 for open_buffer in self.opened_buffers.values_mut() {
1309 match open_buffer {
1310 OpenBuffer::Strong(buffer) => {
1311 *open_buffer = OpenBuffer::Weak(buffer.downgrade());
1312 }
1313 _ => {}
1314 }
1315 }
1316
1317 cx.notify();
1318 } else {
1319 log::error!("attempted to unshare a remote project");
1320 }
1321 }
1322
1323 pub fn respond_to_join_request(
1324 &mut self,
1325 requester_id: u64,
1326 allow: bool,
1327 cx: &mut ModelContext<Self>,
1328 ) {
1329 if let Some(project_id) = self.remote_id() {
1330 let share = self.share(cx);
1331 let client = self.client.clone();
1332 cx.foreground()
1333 .spawn(async move {
1334 client.send(proto::RespondToJoinProjectRequest {
1335 requester_id,
1336 project_id,
1337 allow,
1338 })?;
1339 share.await?;
1340 anyhow::Ok(())
1341 })
1342 .detach_and_log_err(cx);
1343 }
1344 }
1345
1346 fn removed_from_project(&mut self, cx: &mut ModelContext<Self>) {
1347 if let ProjectClientState::Remote {
1348 sharing_has_stopped,
1349 ..
1350 } = &mut self.client_state
1351 {
1352 *sharing_has_stopped = true;
1353 self.collaborators.clear();
1354 for worktree in &self.worktrees {
1355 if let Some(worktree) = worktree.upgrade(cx) {
1356 worktree.update(cx, |worktree, _| {
1357 if let Some(worktree) = worktree.as_remote_mut() {
1358 worktree.disconnected_from_host();
1359 }
1360 });
1361 }
1362 }
1363 cx.notify();
1364 }
1365 }
1366
1367 pub fn is_read_only(&self) -> bool {
1368 match &self.client_state {
1369 ProjectClientState::Local { .. } => false,
1370 ProjectClientState::Remote {
1371 sharing_has_stopped,
1372 ..
1373 } => *sharing_has_stopped,
1374 }
1375 }
1376
1377 pub fn is_local(&self) -> bool {
1378 match &self.client_state {
1379 ProjectClientState::Local { .. } => true,
1380 ProjectClientState::Remote { .. } => false,
1381 }
1382 }
1383
1384 pub fn is_remote(&self) -> bool {
1385 !self.is_local()
1386 }
1387
1388 pub fn create_buffer(
1389 &mut self,
1390 text: &str,
1391 language: Option<Arc<Language>>,
1392 cx: &mut ModelContext<Self>,
1393 ) -> Result<ModelHandle<Buffer>> {
1394 if self.is_remote() {
1395 return Err(anyhow!("creating buffers as a guest is not supported yet"));
1396 }
1397
1398 let buffer = cx.add_model(|cx| {
1399 Buffer::new(self.replica_id(), text, cx)
1400 .with_language(language.unwrap_or(language::PLAIN_TEXT.clone()), cx)
1401 });
1402 self.register_buffer(&buffer, cx)?;
1403 Ok(buffer)
1404 }
1405
1406 pub fn open_path(
1407 &mut self,
1408 path: impl Into<ProjectPath>,
1409 cx: &mut ModelContext<Self>,
1410 ) -> Task<Result<(ProjectEntryId, AnyModelHandle)>> {
1411 let task = self.open_buffer(path, cx);
1412 cx.spawn_weak(|_, cx| async move {
1413 let buffer = task.await?;
1414 let project_entry_id = buffer
1415 .read_with(&cx, |buffer, cx| {
1416 File::from_dyn(buffer.file()).and_then(|file| file.project_entry_id(cx))
1417 })
1418 .ok_or_else(|| anyhow!("no project entry"))?;
1419 Ok((project_entry_id, buffer.into()))
1420 })
1421 }
1422
1423 pub fn open_local_buffer(
1424 &mut self,
1425 abs_path: impl AsRef<Path>,
1426 cx: &mut ModelContext<Self>,
1427 ) -> Task<Result<ModelHandle<Buffer>>> {
1428 if let Some((worktree, relative_path)) = self.find_local_worktree(abs_path.as_ref(), cx) {
1429 self.open_buffer((worktree.read(cx).id(), relative_path), cx)
1430 } else {
1431 Task::ready(Err(anyhow!("no such path")))
1432 }
1433 }
1434
1435 pub fn open_buffer(
1436 &mut self,
1437 path: impl Into<ProjectPath>,
1438 cx: &mut ModelContext<Self>,
1439 ) -> Task<Result<ModelHandle<Buffer>>> {
1440 let project_path = path.into();
1441 let worktree = if let Some(worktree) = self.worktree_for_id(project_path.worktree_id, cx) {
1442 worktree
1443 } else {
1444 return Task::ready(Err(anyhow!("no such worktree")));
1445 };
1446
1447 // If there is already a buffer for the given path, then return it.
1448 let existing_buffer = self.get_open_buffer(&project_path, cx);
1449 if let Some(existing_buffer) = existing_buffer {
1450 return Task::ready(Ok(existing_buffer));
1451 }
1452
1453 let mut loading_watch = match self.loading_buffers.entry(project_path.clone()) {
1454 // If the given path is already being loaded, then wait for that existing
1455 // task to complete and return the same buffer.
1456 hash_map::Entry::Occupied(e) => e.get().clone(),
1457
1458 // Otherwise, record the fact that this path is now being loaded.
1459 hash_map::Entry::Vacant(entry) => {
1460 let (mut tx, rx) = postage::watch::channel();
1461 entry.insert(rx.clone());
1462
1463 let load_buffer = if worktree.read(cx).is_local() {
1464 self.open_local_buffer_internal(&project_path.path, &worktree, cx)
1465 } else {
1466 self.open_remote_buffer_internal(&project_path.path, &worktree, cx)
1467 };
1468
1469 cx.spawn(move |this, mut cx| async move {
1470 let load_result = load_buffer.await;
1471 *tx.borrow_mut() = Some(this.update(&mut cx, |this, _| {
1472 // Record the fact that the buffer is no longer loading.
1473 this.loading_buffers.remove(&project_path);
1474 let buffer = load_result.map_err(Arc::new)?;
1475 Ok(buffer)
1476 }));
1477 })
1478 .detach();
1479 rx
1480 }
1481 };
1482
1483 cx.foreground().spawn(async move {
1484 loop {
1485 if let Some(result) = loading_watch.borrow().as_ref() {
1486 match result {
1487 Ok(buffer) => return Ok(buffer.clone()),
1488 Err(error) => return Err(anyhow!("{}", error)),
1489 }
1490 }
1491 loading_watch.next().await;
1492 }
1493 })
1494 }
1495
1496 fn open_local_buffer_internal(
1497 &mut self,
1498 path: &Arc<Path>,
1499 worktree: &ModelHandle<Worktree>,
1500 cx: &mut ModelContext<Self>,
1501 ) -> Task<Result<ModelHandle<Buffer>>> {
1502 let load_buffer = worktree.update(cx, |worktree, cx| {
1503 let worktree = worktree.as_local_mut().unwrap();
1504 worktree.load_buffer(path, cx)
1505 });
1506 cx.spawn(|this, mut cx| async move {
1507 let buffer = load_buffer.await?;
1508 this.update(&mut cx, |this, cx| this.register_buffer(&buffer, cx))?;
1509 Ok(buffer)
1510 })
1511 }
1512
1513 fn open_remote_buffer_internal(
1514 &mut self,
1515 path: &Arc<Path>,
1516 worktree: &ModelHandle<Worktree>,
1517 cx: &mut ModelContext<Self>,
1518 ) -> Task<Result<ModelHandle<Buffer>>> {
1519 let rpc = self.client.clone();
1520 let project_id = self.remote_id().unwrap();
1521 let remote_worktree_id = worktree.read(cx).id();
1522 let path = path.clone();
1523 let path_string = path.to_string_lossy().to_string();
1524 cx.spawn(|this, mut cx| async move {
1525 let response = rpc
1526 .request(proto::OpenBufferByPath {
1527 project_id,
1528 worktree_id: remote_worktree_id.to_proto(),
1529 path: path_string,
1530 })
1531 .await?;
1532 let buffer = response.buffer.ok_or_else(|| anyhow!("missing buffer"))?;
1533 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
1534 .await
1535 })
1536 }
1537
1538 fn open_local_buffer_via_lsp(
1539 &mut self,
1540 abs_path: lsp::Url,
1541 lsp_adapter: Arc<dyn LspAdapter>,
1542 lsp_server: Arc<LanguageServer>,
1543 cx: &mut ModelContext<Self>,
1544 ) -> Task<Result<ModelHandle<Buffer>>> {
1545 cx.spawn(|this, mut cx| async move {
1546 let abs_path = abs_path
1547 .to_file_path()
1548 .map_err(|_| anyhow!("can't convert URI to path"))?;
1549 let (worktree, relative_path) = if let Some(result) =
1550 this.read_with(&cx, |this, cx| this.find_local_worktree(&abs_path, cx))
1551 {
1552 result
1553 } else {
1554 let worktree = this
1555 .update(&mut cx, |this, cx| {
1556 this.create_local_worktree(&abs_path, false, cx)
1557 })
1558 .await?;
1559 this.update(&mut cx, |this, cx| {
1560 this.language_servers.insert(
1561 (worktree.read(cx).id(), lsp_adapter.name()),
1562 (lsp_adapter, lsp_server),
1563 );
1564 });
1565 (worktree, PathBuf::new())
1566 };
1567
1568 let project_path = ProjectPath {
1569 worktree_id: worktree.read_with(&cx, |worktree, _| worktree.id()),
1570 path: relative_path.into(),
1571 };
1572 this.update(&mut cx, |this, cx| this.open_buffer(project_path, cx))
1573 .await
1574 })
1575 }
1576
1577 pub fn open_buffer_by_id(
1578 &mut self,
1579 id: u64,
1580 cx: &mut ModelContext<Self>,
1581 ) -> Task<Result<ModelHandle<Buffer>>> {
1582 if let Some(buffer) = self.buffer_for_id(id, cx) {
1583 Task::ready(Ok(buffer))
1584 } else if self.is_local() {
1585 Task::ready(Err(anyhow!("buffer {} does not exist", id)))
1586 } else if let Some(project_id) = self.remote_id() {
1587 let request = self
1588 .client
1589 .request(proto::OpenBufferById { project_id, id });
1590 cx.spawn(|this, mut cx| async move {
1591 let buffer = request
1592 .await?
1593 .buffer
1594 .ok_or_else(|| anyhow!("invalid buffer"))?;
1595 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
1596 .await
1597 })
1598 } else {
1599 Task::ready(Err(anyhow!("cannot open buffer while disconnected")))
1600 }
1601 }
1602
1603 pub fn save_buffer_as(
1604 &mut self,
1605 buffer: ModelHandle<Buffer>,
1606 abs_path: PathBuf,
1607 cx: &mut ModelContext<Project>,
1608 ) -> Task<Result<()>> {
1609 let worktree_task = self.find_or_create_local_worktree(&abs_path, true, cx);
1610 let old_path =
1611 File::from_dyn(buffer.read(cx).file()).and_then(|f| Some(f.as_local()?.abs_path(cx)));
1612 cx.spawn(|this, mut cx| async move {
1613 if let Some(old_path) = old_path {
1614 this.update(&mut cx, |this, cx| {
1615 this.unregister_buffer_from_language_server(&buffer, old_path, cx);
1616 });
1617 }
1618 let (worktree, path) = worktree_task.await?;
1619 worktree
1620 .update(&mut cx, |worktree, cx| {
1621 worktree
1622 .as_local_mut()
1623 .unwrap()
1624 .save_buffer_as(buffer.clone(), path, cx)
1625 })
1626 .await?;
1627 this.update(&mut cx, |this, cx| {
1628 this.assign_language_to_buffer(&buffer, cx);
1629 this.register_buffer_with_language_server(&buffer, cx);
1630 });
1631 Ok(())
1632 })
1633 }
1634
1635 pub fn get_open_buffer(
1636 &mut self,
1637 path: &ProjectPath,
1638 cx: &mut ModelContext<Self>,
1639 ) -> Option<ModelHandle<Buffer>> {
1640 let worktree = self.worktree_for_id(path.worktree_id, cx)?;
1641 self.opened_buffers.values().find_map(|buffer| {
1642 let buffer = buffer.upgrade(cx)?;
1643 let file = File::from_dyn(buffer.read(cx).file())?;
1644 if file.worktree == worktree && file.path() == &path.path {
1645 Some(buffer)
1646 } else {
1647 None
1648 }
1649 })
1650 }
1651
1652 fn register_buffer(
1653 &mut self,
1654 buffer: &ModelHandle<Buffer>,
1655 cx: &mut ModelContext<Self>,
1656 ) -> Result<()> {
1657 let remote_id = buffer.read(cx).remote_id();
1658 let open_buffer = if self.is_remote() || self.is_shared() {
1659 OpenBuffer::Strong(buffer.clone())
1660 } else {
1661 OpenBuffer::Weak(buffer.downgrade())
1662 };
1663
1664 match self.opened_buffers.insert(remote_id, open_buffer) {
1665 None => {}
1666 Some(OpenBuffer::Loading(operations)) => {
1667 buffer.update(cx, |buffer, cx| buffer.apply_ops(operations, cx))?
1668 }
1669 Some(OpenBuffer::Weak(existing_handle)) => {
1670 if existing_handle.upgrade(cx).is_some() {
1671 Err(anyhow!(
1672 "already registered buffer with remote id {}",
1673 remote_id
1674 ))?
1675 }
1676 }
1677 Some(OpenBuffer::Strong(_)) => Err(anyhow!(
1678 "already registered buffer with remote id {}",
1679 remote_id
1680 ))?,
1681 }
1682 cx.subscribe(buffer, |this, buffer, event, cx| {
1683 this.on_buffer_event(buffer, event, cx);
1684 })
1685 .detach();
1686
1687 self.assign_language_to_buffer(buffer, cx);
1688 self.register_buffer_with_language_server(buffer, cx);
1689 cx.observe_release(buffer, |this, buffer, cx| {
1690 if let Some(file) = File::from_dyn(buffer.file()) {
1691 if file.is_local() {
1692 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
1693 if let Some((_, server)) = this.language_server_for_buffer(buffer, cx) {
1694 server
1695 .notify::<lsp::notification::DidCloseTextDocument>(
1696 lsp::DidCloseTextDocumentParams {
1697 text_document: lsp::TextDocumentIdentifier::new(uri.clone()),
1698 },
1699 )
1700 .log_err();
1701 }
1702 }
1703 }
1704 })
1705 .detach();
1706
1707 Ok(())
1708 }
1709
1710 fn register_buffer_with_language_server(
1711 &mut self,
1712 buffer_handle: &ModelHandle<Buffer>,
1713 cx: &mut ModelContext<Self>,
1714 ) {
1715 let buffer = buffer_handle.read(cx);
1716 let buffer_id = buffer.remote_id();
1717 if let Some(file) = File::from_dyn(buffer.file()) {
1718 if file.is_local() {
1719 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
1720 let initial_snapshot = buffer.text_snapshot();
1721
1722 let mut language_server = None;
1723 let mut language_id = None;
1724 if let Some(language) = buffer.language() {
1725 let worktree_id = file.worktree_id(cx);
1726 if let Some(adapter) = language.lsp_adapter() {
1727 language_id = adapter.id_for_language(language.name().as_ref());
1728 language_server = self
1729 .language_servers
1730 .get(&(worktree_id, adapter.name()))
1731 .cloned();
1732 }
1733 }
1734
1735 if let Some(local_worktree) = file.worktree.read(cx).as_local() {
1736 if let Some(diagnostics) = local_worktree.diagnostics_for_path(file.path()) {
1737 self.update_buffer_diagnostics(&buffer_handle, diagnostics, None, cx)
1738 .log_err();
1739 }
1740 }
1741
1742 if let Some((_, server)) = language_server {
1743 server
1744 .notify::<lsp::notification::DidOpenTextDocument>(
1745 lsp::DidOpenTextDocumentParams {
1746 text_document: lsp::TextDocumentItem::new(
1747 uri,
1748 language_id.unwrap_or_default(),
1749 0,
1750 initial_snapshot.text(),
1751 ),
1752 }
1753 .clone(),
1754 )
1755 .log_err();
1756 buffer_handle.update(cx, |buffer, cx| {
1757 buffer.set_completion_triggers(
1758 server
1759 .capabilities()
1760 .completion_provider
1761 .as_ref()
1762 .and_then(|provider| provider.trigger_characters.clone())
1763 .unwrap_or(Vec::new()),
1764 cx,
1765 )
1766 });
1767 self.buffer_snapshots
1768 .insert(buffer_id, vec![(0, initial_snapshot)]);
1769 }
1770 }
1771 }
1772 }
1773
1774 fn unregister_buffer_from_language_server(
1775 &mut self,
1776 buffer: &ModelHandle<Buffer>,
1777 old_path: PathBuf,
1778 cx: &mut ModelContext<Self>,
1779 ) {
1780 buffer.update(cx, |buffer, cx| {
1781 buffer.update_diagnostics(Default::default(), cx);
1782 self.buffer_snapshots.remove(&buffer.remote_id());
1783 if let Some((_, language_server)) = self.language_server_for_buffer(buffer, cx) {
1784 language_server
1785 .notify::<lsp::notification::DidCloseTextDocument>(
1786 lsp::DidCloseTextDocumentParams {
1787 text_document: lsp::TextDocumentIdentifier::new(
1788 lsp::Url::from_file_path(old_path).unwrap(),
1789 ),
1790 },
1791 )
1792 .log_err();
1793 }
1794 });
1795 }
1796
1797 fn on_buffer_event(
1798 &mut self,
1799 buffer: ModelHandle<Buffer>,
1800 event: &BufferEvent,
1801 cx: &mut ModelContext<Self>,
1802 ) -> Option<()> {
1803 match event {
1804 BufferEvent::Operation(operation) => {
1805 if let Some(project_id) = self.shared_remote_id() {
1806 let request = self.client.request(proto::UpdateBuffer {
1807 project_id,
1808 buffer_id: buffer.read(cx).remote_id(),
1809 operations: vec![language::proto::serialize_operation(&operation)],
1810 });
1811 cx.background().spawn(request).detach_and_log_err(cx);
1812 } else if let Some(project_id) = self.remote_id() {
1813 let _ = self
1814 .client
1815 .send(proto::RegisterProjectActivity { project_id });
1816 }
1817 }
1818 BufferEvent::Edited { .. } => {
1819 let (_, language_server) = self
1820 .language_server_for_buffer(buffer.read(cx), cx)?
1821 .clone();
1822 let buffer = buffer.read(cx);
1823 let file = File::from_dyn(buffer.file())?;
1824 let abs_path = file.as_local()?.abs_path(cx);
1825 let uri = lsp::Url::from_file_path(abs_path).unwrap();
1826 let buffer_snapshots = self.buffer_snapshots.get_mut(&buffer.remote_id())?;
1827 let (version, prev_snapshot) = buffer_snapshots.last()?;
1828 let next_snapshot = buffer.text_snapshot();
1829 let next_version = version + 1;
1830
1831 let content_changes = buffer
1832 .edits_since::<(PointUtf16, usize)>(prev_snapshot.version())
1833 .map(|edit| {
1834 let edit_start = edit.new.start.0;
1835 let edit_end = edit_start + (edit.old.end.0 - edit.old.start.0);
1836 let new_text = next_snapshot
1837 .text_for_range(edit.new.start.1..edit.new.end.1)
1838 .collect();
1839 lsp::TextDocumentContentChangeEvent {
1840 range: Some(lsp::Range::new(
1841 point_to_lsp(edit_start),
1842 point_to_lsp(edit_end),
1843 )),
1844 range_length: None,
1845 text: new_text,
1846 }
1847 })
1848 .collect();
1849
1850 buffer_snapshots.push((next_version, next_snapshot));
1851
1852 language_server
1853 .notify::<lsp::notification::DidChangeTextDocument>(
1854 lsp::DidChangeTextDocumentParams {
1855 text_document: lsp::VersionedTextDocumentIdentifier::new(
1856 uri,
1857 next_version,
1858 ),
1859 content_changes,
1860 },
1861 )
1862 .log_err();
1863 }
1864 BufferEvent::Saved => {
1865 let file = File::from_dyn(buffer.read(cx).file())?;
1866 let worktree_id = file.worktree_id(cx);
1867 let abs_path = file.as_local()?.abs_path(cx);
1868 let text_document = lsp::TextDocumentIdentifier {
1869 uri: lsp::Url::from_file_path(abs_path).unwrap(),
1870 };
1871
1872 for (_, server) in self.language_servers_for_worktree(worktree_id) {
1873 server
1874 .notify::<lsp::notification::DidSaveTextDocument>(
1875 lsp::DidSaveTextDocumentParams {
1876 text_document: text_document.clone(),
1877 text: None,
1878 },
1879 )
1880 .log_err();
1881 }
1882
1883 // After saving a buffer, simulate disk-based diagnostics being finished for languages
1884 // that don't support a disk-based progress token.
1885 let (lsp_adapter, language_server) =
1886 self.language_server_for_buffer(buffer.read(cx), cx)?;
1887 if lsp_adapter
1888 .disk_based_diagnostics_progress_token()
1889 .is_none()
1890 {
1891 let server_id = language_server.server_id();
1892 self.disk_based_diagnostics_finished(server_id, cx);
1893 self.broadcast_language_server_update(
1894 server_id,
1895 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(
1896 proto::LspDiskBasedDiagnosticsUpdated {},
1897 ),
1898 );
1899 }
1900 }
1901 _ => {}
1902 }
1903
1904 None
1905 }
1906
1907 fn language_servers_for_worktree(
1908 &self,
1909 worktree_id: WorktreeId,
1910 ) -> impl Iterator<Item = &(Arc<dyn LspAdapter>, Arc<LanguageServer>)> {
1911 self.language_servers.iter().filter_map(
1912 move |((language_server_worktree_id, _), server)| {
1913 if *language_server_worktree_id == worktree_id {
1914 Some(server)
1915 } else {
1916 None
1917 }
1918 },
1919 )
1920 }
1921
1922 fn assign_language_to_buffer(
1923 &mut self,
1924 buffer: &ModelHandle<Buffer>,
1925 cx: &mut ModelContext<Self>,
1926 ) -> Option<()> {
1927 // If the buffer has a language, set it and start the language server if we haven't already.
1928 let full_path = buffer.read(cx).file()?.full_path(cx);
1929 let language = self.languages.select_language(&full_path)?;
1930 buffer.update(cx, |buffer, cx| {
1931 buffer.set_language(Some(language.clone()), cx);
1932 });
1933
1934 let file = File::from_dyn(buffer.read(cx).file())?;
1935 let worktree = file.worktree.read(cx).as_local()?;
1936 let worktree_id = worktree.id();
1937 let worktree_abs_path = worktree.abs_path().clone();
1938 self.start_language_server(worktree_id, worktree_abs_path, language, cx);
1939
1940 None
1941 }
1942
1943 fn start_language_server(
1944 &mut self,
1945 worktree_id: WorktreeId,
1946 worktree_path: Arc<Path>,
1947 language: Arc<Language>,
1948 cx: &mut ModelContext<Self>,
1949 ) {
1950 if !cx
1951 .global::<Settings>()
1952 .enable_language_server(Some(&language.name()))
1953 {
1954 return;
1955 }
1956
1957 let adapter = if let Some(adapter) = language.lsp_adapter() {
1958 adapter
1959 } else {
1960 return;
1961 };
1962 let key = (worktree_id, adapter.name());
1963 self.started_language_servers
1964 .entry(key.clone())
1965 .or_insert_with(|| {
1966 let server_id = post_inc(&mut self.next_language_server_id);
1967 let language_server = self.languages.start_language_server(
1968 server_id,
1969 language.clone(),
1970 worktree_path,
1971 self.client.http_client(),
1972 cx,
1973 );
1974 cx.spawn_weak(|this, mut cx| async move {
1975 let language_server = language_server?.await.log_err()?;
1976 let language_server = language_server
1977 .initialize(adapter.initialization_options())
1978 .await
1979 .log_err()?;
1980 let this = this.upgrade(&cx)?;
1981 let disk_based_diagnostics_progress_token =
1982 adapter.disk_based_diagnostics_progress_token();
1983
1984 language_server
1985 .on_notification::<lsp::notification::PublishDiagnostics, _>({
1986 let this = this.downgrade();
1987 let adapter = adapter.clone();
1988 move |params, mut cx| {
1989 if let Some(this) = this.upgrade(&cx) {
1990 this.update(&mut cx, |this, cx| {
1991 this.on_lsp_diagnostics_published(
1992 server_id, params, &adapter, cx,
1993 );
1994 });
1995 }
1996 }
1997 })
1998 .detach();
1999
2000 language_server
2001 .on_request::<lsp::request::WorkspaceConfiguration, _, _>({
2002 let settings = this
2003 .read_with(&cx, |this, _| this.language_server_settings.clone());
2004 move |params, _| {
2005 let settings = settings.lock().clone();
2006 async move {
2007 Ok(params
2008 .items
2009 .into_iter()
2010 .map(|item| {
2011 if let Some(section) = &item.section {
2012 settings
2013 .get(section)
2014 .cloned()
2015 .unwrap_or(serde_json::Value::Null)
2016 } else {
2017 settings.clone()
2018 }
2019 })
2020 .collect())
2021 }
2022 }
2023 })
2024 .detach();
2025
2026 // Even though we don't have handling for these requests, respond to them to
2027 // avoid stalling any language server like `gopls` which waits for a response
2028 // to these requests when initializing.
2029 language_server
2030 .on_request::<lsp::request::WorkDoneProgressCreate, _, _>({
2031 let this = this.downgrade();
2032 move |params, mut cx| async move {
2033 if let Some(this) = this.upgrade(&cx) {
2034 this.update(&mut cx, |this, _| {
2035 if let Some(status) =
2036 this.language_server_statuses.get_mut(&server_id)
2037 {
2038 if let lsp::NumberOrString::String(token) = params.token
2039 {
2040 status.progress_tokens.insert(token);
2041 }
2042 }
2043 });
2044 }
2045 Ok(())
2046 }
2047 })
2048 .detach();
2049 language_server
2050 .on_request::<lsp::request::RegisterCapability, _, _>(|_, _| async {
2051 Ok(())
2052 })
2053 .detach();
2054
2055 language_server
2056 .on_request::<lsp::request::ApplyWorkspaceEdit, _, _>({
2057 let this = this.downgrade();
2058 let adapter = adapter.clone();
2059 let language_server = language_server.clone();
2060 move |params, cx| {
2061 Self::on_lsp_workspace_edit(
2062 this,
2063 params,
2064 server_id,
2065 adapter.clone(),
2066 language_server.clone(),
2067 cx,
2068 )
2069 }
2070 })
2071 .detach();
2072
2073 language_server
2074 .on_notification::<lsp::notification::Progress, _>({
2075 let this = this.downgrade();
2076 move |params, mut cx| {
2077 if let Some(this) = this.upgrade(&cx) {
2078 this.update(&mut cx, |this, cx| {
2079 this.on_lsp_progress(
2080 params,
2081 server_id,
2082 disk_based_diagnostics_progress_token,
2083 cx,
2084 );
2085 });
2086 }
2087 }
2088 })
2089 .detach();
2090
2091 this.update(&mut cx, |this, cx| {
2092 this.language_servers
2093 .insert(key.clone(), (adapter.clone(), language_server.clone()));
2094 this.language_server_statuses.insert(
2095 server_id,
2096 LanguageServerStatus {
2097 name: language_server.name().to_string(),
2098 pending_work: Default::default(),
2099 has_pending_diagnostic_updates: false,
2100 progress_tokens: Default::default(),
2101 },
2102 );
2103 language_server
2104 .notify::<lsp::notification::DidChangeConfiguration>(
2105 lsp::DidChangeConfigurationParams {
2106 settings: this.language_server_settings.lock().clone(),
2107 },
2108 )
2109 .ok();
2110
2111 if let Some(project_id) = this.shared_remote_id() {
2112 this.client
2113 .send(proto::StartLanguageServer {
2114 project_id,
2115 server: Some(proto::LanguageServer {
2116 id: server_id as u64,
2117 name: language_server.name().to_string(),
2118 }),
2119 })
2120 .log_err();
2121 }
2122
2123 // Tell the language server about every open buffer in the worktree that matches the language.
2124 for buffer in this.opened_buffers.values() {
2125 if let Some(buffer_handle) = buffer.upgrade(cx) {
2126 let buffer = buffer_handle.read(cx);
2127 let file = if let Some(file) = File::from_dyn(buffer.file()) {
2128 file
2129 } else {
2130 continue;
2131 };
2132 let language = if let Some(language) = buffer.language() {
2133 language
2134 } else {
2135 continue;
2136 };
2137 if file.worktree.read(cx).id() != key.0
2138 || language.lsp_adapter().map(|a| a.name())
2139 != Some(key.1.clone())
2140 {
2141 continue;
2142 }
2143
2144 let file = file.as_local()?;
2145 let versions = this
2146 .buffer_snapshots
2147 .entry(buffer.remote_id())
2148 .or_insert_with(|| vec![(0, buffer.text_snapshot())]);
2149 let (version, initial_snapshot) = versions.last().unwrap();
2150 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
2151 let language_id = adapter.id_for_language(language.name().as_ref());
2152 language_server
2153 .notify::<lsp::notification::DidOpenTextDocument>(
2154 lsp::DidOpenTextDocumentParams {
2155 text_document: lsp::TextDocumentItem::new(
2156 uri,
2157 language_id.unwrap_or_default(),
2158 *version,
2159 initial_snapshot.text(),
2160 ),
2161 },
2162 )
2163 .log_err()?;
2164 buffer_handle.update(cx, |buffer, cx| {
2165 buffer.set_completion_triggers(
2166 language_server
2167 .capabilities()
2168 .completion_provider
2169 .as_ref()
2170 .and_then(|provider| {
2171 provider.trigger_characters.clone()
2172 })
2173 .unwrap_or(Vec::new()),
2174 cx,
2175 )
2176 });
2177 }
2178 }
2179
2180 cx.notify();
2181 Some(())
2182 });
2183
2184 Some(language_server)
2185 })
2186 });
2187 }
2188
2189 fn stop_language_server(
2190 &mut self,
2191 worktree_id: WorktreeId,
2192 adapter_name: LanguageServerName,
2193 cx: &mut ModelContext<Self>,
2194 ) -> Task<()> {
2195 let key = (worktree_id, adapter_name);
2196 if let Some((_, language_server)) = self.language_servers.remove(&key) {
2197 self.language_server_statuses
2198 .remove(&language_server.server_id());
2199 cx.notify();
2200 }
2201
2202 if let Some(started_language_server) = self.started_language_servers.remove(&key) {
2203 cx.spawn_weak(|this, mut cx| async move {
2204 if let Some(language_server) = started_language_server.await {
2205 if let Some(shutdown) = language_server.shutdown() {
2206 shutdown.await;
2207 }
2208
2209 if let Some(this) = this.upgrade(&cx) {
2210 this.update(&mut cx, |this, cx| {
2211 this.language_server_statuses
2212 .remove(&language_server.server_id());
2213 cx.notify();
2214 });
2215 }
2216 }
2217 })
2218 } else {
2219 Task::ready(())
2220 }
2221 }
2222
2223 pub fn restart_language_servers_for_buffers(
2224 &mut self,
2225 buffers: impl IntoIterator<Item = ModelHandle<Buffer>>,
2226 cx: &mut ModelContext<Self>,
2227 ) -> Option<()> {
2228 let language_server_lookup_info: HashSet<(WorktreeId, Arc<Path>, PathBuf)> = buffers
2229 .into_iter()
2230 .filter_map(|buffer| {
2231 let file = File::from_dyn(buffer.read(cx).file())?;
2232 let worktree = file.worktree.read(cx).as_local()?;
2233 let worktree_id = worktree.id();
2234 let worktree_abs_path = worktree.abs_path().clone();
2235 let full_path = file.full_path(cx);
2236 Some((worktree_id, worktree_abs_path, full_path))
2237 })
2238 .collect();
2239 for (worktree_id, worktree_abs_path, full_path) in language_server_lookup_info {
2240 let language = self.languages.select_language(&full_path)?;
2241 self.restart_language_server(worktree_id, worktree_abs_path, language, cx);
2242 }
2243
2244 None
2245 }
2246
2247 fn restart_language_server(
2248 &mut self,
2249 worktree_id: WorktreeId,
2250 worktree_path: Arc<Path>,
2251 language: Arc<Language>,
2252 cx: &mut ModelContext<Self>,
2253 ) {
2254 let adapter = if let Some(adapter) = language.lsp_adapter() {
2255 adapter
2256 } else {
2257 return;
2258 };
2259
2260 let stop = self.stop_language_server(worktree_id, adapter.name(), cx);
2261 cx.spawn_weak(|this, mut cx| async move {
2262 stop.await;
2263 if let Some(this) = this.upgrade(&cx) {
2264 this.update(&mut cx, |this, cx| {
2265 this.start_language_server(worktree_id, worktree_path, language, cx);
2266 });
2267 }
2268 })
2269 .detach();
2270 }
2271
2272 fn on_lsp_diagnostics_published(
2273 &mut self,
2274 server_id: usize,
2275 mut params: lsp::PublishDiagnosticsParams,
2276 adapter: &Arc<dyn LspAdapter>,
2277 cx: &mut ModelContext<Self>,
2278 ) {
2279 adapter.process_diagnostics(&mut params);
2280 self.update_diagnostics(
2281 server_id,
2282 params,
2283 adapter.disk_based_diagnostic_sources(),
2284 cx,
2285 )
2286 .log_err();
2287 }
2288
2289 fn on_lsp_progress(
2290 &mut self,
2291 progress: lsp::ProgressParams,
2292 server_id: usize,
2293 disk_based_diagnostics_progress_token: Option<&str>,
2294 cx: &mut ModelContext<Self>,
2295 ) {
2296 let token = match progress.token {
2297 lsp::NumberOrString::String(token) => token,
2298 lsp::NumberOrString::Number(token) => {
2299 log::info!("skipping numeric progress token {}", token);
2300 return;
2301 }
2302 };
2303 let progress = match progress.value {
2304 lsp::ProgressParamsValue::WorkDone(value) => value,
2305 };
2306 let language_server_status =
2307 if let Some(status) = self.language_server_statuses.get_mut(&server_id) {
2308 status
2309 } else {
2310 return;
2311 };
2312
2313 if !language_server_status.progress_tokens.contains(&token) {
2314 return;
2315 }
2316
2317 match progress {
2318 lsp::WorkDoneProgress::Begin(report) => {
2319 if Some(token.as_str()) == disk_based_diagnostics_progress_token {
2320 language_server_status.has_pending_diagnostic_updates = true;
2321 self.disk_based_diagnostics_started(server_id, cx);
2322 self.broadcast_language_server_update(
2323 server_id,
2324 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(
2325 proto::LspDiskBasedDiagnosticsUpdating {},
2326 ),
2327 );
2328 } else {
2329 self.on_lsp_work_start(
2330 server_id,
2331 token.clone(),
2332 LanguageServerProgress {
2333 message: report.message.clone(),
2334 percentage: report.percentage.map(|p| p as usize),
2335 last_update_at: Instant::now(),
2336 },
2337 cx,
2338 );
2339 self.broadcast_language_server_update(
2340 server_id,
2341 proto::update_language_server::Variant::WorkStart(proto::LspWorkStart {
2342 token,
2343 message: report.message,
2344 percentage: report.percentage.map(|p| p as u32),
2345 }),
2346 );
2347 }
2348 }
2349 lsp::WorkDoneProgress::Report(report) => {
2350 if Some(token.as_str()) != disk_based_diagnostics_progress_token {
2351 self.on_lsp_work_progress(
2352 server_id,
2353 token.clone(),
2354 LanguageServerProgress {
2355 message: report.message.clone(),
2356 percentage: report.percentage.map(|p| p as usize),
2357 last_update_at: Instant::now(),
2358 },
2359 cx,
2360 );
2361 self.broadcast_language_server_update(
2362 server_id,
2363 proto::update_language_server::Variant::WorkProgress(
2364 proto::LspWorkProgress {
2365 token,
2366 message: report.message,
2367 percentage: report.percentage.map(|p| p as u32),
2368 },
2369 ),
2370 );
2371 }
2372 }
2373 lsp::WorkDoneProgress::End(_) => {
2374 language_server_status.progress_tokens.remove(&token);
2375
2376 if Some(token.as_str()) == disk_based_diagnostics_progress_token {
2377 language_server_status.has_pending_diagnostic_updates = false;
2378 self.disk_based_diagnostics_finished(server_id, cx);
2379 self.broadcast_language_server_update(
2380 server_id,
2381 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(
2382 proto::LspDiskBasedDiagnosticsUpdated {},
2383 ),
2384 );
2385 } else {
2386 self.on_lsp_work_end(server_id, token.clone(), cx);
2387 self.broadcast_language_server_update(
2388 server_id,
2389 proto::update_language_server::Variant::WorkEnd(proto::LspWorkEnd {
2390 token,
2391 }),
2392 );
2393 }
2394 }
2395 }
2396 }
2397
2398 fn on_lsp_work_start(
2399 &mut self,
2400 language_server_id: usize,
2401 token: String,
2402 progress: LanguageServerProgress,
2403 cx: &mut ModelContext<Self>,
2404 ) {
2405 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
2406 status.pending_work.insert(token, progress);
2407 cx.notify();
2408 }
2409 }
2410
2411 fn on_lsp_work_progress(
2412 &mut self,
2413 language_server_id: usize,
2414 token: String,
2415 progress: LanguageServerProgress,
2416 cx: &mut ModelContext<Self>,
2417 ) {
2418 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
2419 let entry = status
2420 .pending_work
2421 .entry(token)
2422 .or_insert(LanguageServerProgress {
2423 message: Default::default(),
2424 percentage: Default::default(),
2425 last_update_at: progress.last_update_at,
2426 });
2427 if progress.message.is_some() {
2428 entry.message = progress.message;
2429 }
2430 if progress.percentage.is_some() {
2431 entry.percentage = progress.percentage;
2432 }
2433 entry.last_update_at = progress.last_update_at;
2434 cx.notify();
2435 }
2436 }
2437
2438 fn on_lsp_work_end(
2439 &mut self,
2440 language_server_id: usize,
2441 token: String,
2442 cx: &mut ModelContext<Self>,
2443 ) {
2444 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
2445 status.pending_work.remove(&token);
2446 cx.notify();
2447 }
2448 }
2449
2450 async fn on_lsp_workspace_edit(
2451 this: WeakModelHandle<Self>,
2452 params: lsp::ApplyWorkspaceEditParams,
2453 server_id: usize,
2454 adapter: Arc<dyn LspAdapter>,
2455 language_server: Arc<LanguageServer>,
2456 mut cx: AsyncAppContext,
2457 ) -> Result<lsp::ApplyWorkspaceEditResponse> {
2458 let this = this
2459 .upgrade(&cx)
2460 .ok_or_else(|| anyhow!("project project closed"))?;
2461 let transaction = Self::deserialize_workspace_edit(
2462 this.clone(),
2463 params.edit,
2464 true,
2465 adapter.clone(),
2466 language_server.clone(),
2467 &mut cx,
2468 )
2469 .await
2470 .log_err();
2471 this.update(&mut cx, |this, _| {
2472 if let Some(transaction) = transaction {
2473 this.last_workspace_edits_by_language_server
2474 .insert(server_id, transaction);
2475 }
2476 });
2477 Ok(lsp::ApplyWorkspaceEditResponse {
2478 applied: true,
2479 failed_change: None,
2480 failure_reason: None,
2481 })
2482 }
2483
2484 fn broadcast_language_server_update(
2485 &self,
2486 language_server_id: usize,
2487 event: proto::update_language_server::Variant,
2488 ) {
2489 if let Some(project_id) = self.shared_remote_id() {
2490 self.client
2491 .send(proto::UpdateLanguageServer {
2492 project_id,
2493 language_server_id: language_server_id as u64,
2494 variant: Some(event),
2495 })
2496 .log_err();
2497 }
2498 }
2499
2500 pub fn set_language_server_settings(&mut self, settings: serde_json::Value) {
2501 for (_, server) in self.language_servers.values() {
2502 server
2503 .notify::<lsp::notification::DidChangeConfiguration>(
2504 lsp::DidChangeConfigurationParams {
2505 settings: settings.clone(),
2506 },
2507 )
2508 .ok();
2509 }
2510 *self.language_server_settings.lock() = settings;
2511 }
2512
2513 pub fn language_server_statuses(
2514 &self,
2515 ) -> impl DoubleEndedIterator<Item = &LanguageServerStatus> {
2516 self.language_server_statuses.values()
2517 }
2518
2519 pub fn update_diagnostics(
2520 &mut self,
2521 language_server_id: usize,
2522 params: lsp::PublishDiagnosticsParams,
2523 disk_based_sources: &[&str],
2524 cx: &mut ModelContext<Self>,
2525 ) -> Result<()> {
2526 let abs_path = params
2527 .uri
2528 .to_file_path()
2529 .map_err(|_| anyhow!("URI is not a file"))?;
2530 let mut diagnostics = Vec::default();
2531 let mut primary_diagnostic_group_ids = HashMap::default();
2532 let mut sources_by_group_id = HashMap::default();
2533 let mut supporting_diagnostics = HashMap::default();
2534 for diagnostic in ¶ms.diagnostics {
2535 let source = diagnostic.source.as_ref();
2536 let code = diagnostic.code.as_ref().map(|code| match code {
2537 lsp::NumberOrString::Number(code) => code.to_string(),
2538 lsp::NumberOrString::String(code) => code.clone(),
2539 });
2540 let range = range_from_lsp(diagnostic.range);
2541 let is_supporting = diagnostic
2542 .related_information
2543 .as_ref()
2544 .map_or(false, |infos| {
2545 infos.iter().any(|info| {
2546 primary_diagnostic_group_ids.contains_key(&(
2547 source,
2548 code.clone(),
2549 range_from_lsp(info.location.range),
2550 ))
2551 })
2552 });
2553
2554 let is_unnecessary = diagnostic.tags.as_ref().map_or(false, |tags| {
2555 tags.iter().any(|tag| *tag == DiagnosticTag::UNNECESSARY)
2556 });
2557
2558 if is_supporting {
2559 supporting_diagnostics.insert(
2560 (source, code.clone(), range),
2561 (diagnostic.severity, is_unnecessary),
2562 );
2563 } else {
2564 let group_id = post_inc(&mut self.next_diagnostic_group_id);
2565 let is_disk_based = source.map_or(false, |source| {
2566 disk_based_sources.contains(&source.as_str())
2567 });
2568
2569 sources_by_group_id.insert(group_id, source);
2570 primary_diagnostic_group_ids
2571 .insert((source, code.clone(), range.clone()), group_id);
2572
2573 diagnostics.push(DiagnosticEntry {
2574 range,
2575 diagnostic: Diagnostic {
2576 code: code.clone(),
2577 severity: diagnostic.severity.unwrap_or(DiagnosticSeverity::ERROR),
2578 message: diagnostic.message.clone(),
2579 group_id,
2580 is_primary: true,
2581 is_valid: true,
2582 is_disk_based,
2583 is_unnecessary,
2584 },
2585 });
2586 if let Some(infos) = &diagnostic.related_information {
2587 for info in infos {
2588 if info.location.uri == params.uri && !info.message.is_empty() {
2589 let range = range_from_lsp(info.location.range);
2590 diagnostics.push(DiagnosticEntry {
2591 range,
2592 diagnostic: Diagnostic {
2593 code: code.clone(),
2594 severity: DiagnosticSeverity::INFORMATION,
2595 message: info.message.clone(),
2596 group_id,
2597 is_primary: false,
2598 is_valid: true,
2599 is_disk_based,
2600 is_unnecessary: false,
2601 },
2602 });
2603 }
2604 }
2605 }
2606 }
2607 }
2608
2609 for entry in &mut diagnostics {
2610 let diagnostic = &mut entry.diagnostic;
2611 if !diagnostic.is_primary {
2612 let source = *sources_by_group_id.get(&diagnostic.group_id).unwrap();
2613 if let Some(&(severity, is_unnecessary)) = supporting_diagnostics.get(&(
2614 source,
2615 diagnostic.code.clone(),
2616 entry.range.clone(),
2617 )) {
2618 if let Some(severity) = severity {
2619 diagnostic.severity = severity;
2620 }
2621 diagnostic.is_unnecessary = is_unnecessary;
2622 }
2623 }
2624 }
2625
2626 self.update_diagnostic_entries(
2627 language_server_id,
2628 abs_path,
2629 params.version,
2630 diagnostics,
2631 cx,
2632 )?;
2633 Ok(())
2634 }
2635
2636 pub fn update_diagnostic_entries(
2637 &mut self,
2638 language_server_id: usize,
2639 abs_path: PathBuf,
2640 version: Option<i32>,
2641 diagnostics: Vec<DiagnosticEntry<PointUtf16>>,
2642 cx: &mut ModelContext<Project>,
2643 ) -> Result<(), anyhow::Error> {
2644 let (worktree, relative_path) = self
2645 .find_local_worktree(&abs_path, cx)
2646 .ok_or_else(|| anyhow!("no worktree found for diagnostics"))?;
2647
2648 let project_path = ProjectPath {
2649 worktree_id: worktree.read(cx).id(),
2650 path: relative_path.into(),
2651 };
2652 if let Some(buffer) = self.get_open_buffer(&project_path, cx) {
2653 self.update_buffer_diagnostics(&buffer, diagnostics.clone(), version, cx)?;
2654 }
2655
2656 let updated = worktree.update(cx, |worktree, cx| {
2657 worktree
2658 .as_local_mut()
2659 .ok_or_else(|| anyhow!("not a local worktree"))?
2660 .update_diagnostics(
2661 language_server_id,
2662 project_path.path.clone(),
2663 diagnostics,
2664 cx,
2665 )
2666 })?;
2667 if updated {
2668 cx.emit(Event::DiagnosticsUpdated {
2669 language_server_id,
2670 path: project_path,
2671 });
2672 }
2673 Ok(())
2674 }
2675
2676 fn update_buffer_diagnostics(
2677 &mut self,
2678 buffer: &ModelHandle<Buffer>,
2679 mut diagnostics: Vec<DiagnosticEntry<PointUtf16>>,
2680 version: Option<i32>,
2681 cx: &mut ModelContext<Self>,
2682 ) -> Result<()> {
2683 fn compare_diagnostics(a: &Diagnostic, b: &Diagnostic) -> Ordering {
2684 Ordering::Equal
2685 .then_with(|| b.is_primary.cmp(&a.is_primary))
2686 .then_with(|| a.is_disk_based.cmp(&b.is_disk_based))
2687 .then_with(|| a.severity.cmp(&b.severity))
2688 .then_with(|| a.message.cmp(&b.message))
2689 }
2690
2691 let snapshot = self.buffer_snapshot_for_lsp_version(buffer, version, cx)?;
2692
2693 diagnostics.sort_unstable_by(|a, b| {
2694 Ordering::Equal
2695 .then_with(|| a.range.start.cmp(&b.range.start))
2696 .then_with(|| b.range.end.cmp(&a.range.end))
2697 .then_with(|| compare_diagnostics(&a.diagnostic, &b.diagnostic))
2698 });
2699
2700 let mut sanitized_diagnostics = Vec::new();
2701 let edits_since_save = Patch::new(
2702 snapshot
2703 .edits_since::<PointUtf16>(buffer.read(cx).saved_version())
2704 .collect(),
2705 );
2706 for entry in diagnostics {
2707 let start;
2708 let end;
2709 if entry.diagnostic.is_disk_based {
2710 // Some diagnostics are based on files on disk instead of buffers'
2711 // current contents. Adjust these diagnostics' ranges to reflect
2712 // any unsaved edits.
2713 start = edits_since_save.old_to_new(entry.range.start);
2714 end = edits_since_save.old_to_new(entry.range.end);
2715 } else {
2716 start = entry.range.start;
2717 end = entry.range.end;
2718 }
2719
2720 let mut range = snapshot.clip_point_utf16(start, Bias::Left)
2721 ..snapshot.clip_point_utf16(end, Bias::Right);
2722
2723 // Expand empty ranges by one character
2724 if range.start == range.end {
2725 range.end.column += 1;
2726 range.end = snapshot.clip_point_utf16(range.end, Bias::Right);
2727 if range.start == range.end && range.end.column > 0 {
2728 range.start.column -= 1;
2729 range.start = snapshot.clip_point_utf16(range.start, Bias::Left);
2730 }
2731 }
2732
2733 sanitized_diagnostics.push(DiagnosticEntry {
2734 range,
2735 diagnostic: entry.diagnostic,
2736 });
2737 }
2738 drop(edits_since_save);
2739
2740 let set = DiagnosticSet::new(sanitized_diagnostics, &snapshot);
2741 buffer.update(cx, |buffer, cx| buffer.update_diagnostics(set, cx));
2742 Ok(())
2743 }
2744
2745 pub fn reload_buffers(
2746 &self,
2747 buffers: HashSet<ModelHandle<Buffer>>,
2748 push_to_history: bool,
2749 cx: &mut ModelContext<Self>,
2750 ) -> Task<Result<ProjectTransaction>> {
2751 let mut local_buffers = Vec::new();
2752 let mut remote_buffers = None;
2753 for buffer_handle in buffers {
2754 let buffer = buffer_handle.read(cx);
2755 if buffer.is_dirty() {
2756 if let Some(file) = File::from_dyn(buffer.file()) {
2757 if file.is_local() {
2758 local_buffers.push(buffer_handle);
2759 } else {
2760 remote_buffers.get_or_insert(Vec::new()).push(buffer_handle);
2761 }
2762 }
2763 }
2764 }
2765
2766 let remote_buffers = self.remote_id().zip(remote_buffers);
2767 let client = self.client.clone();
2768
2769 cx.spawn(|this, mut cx| async move {
2770 let mut project_transaction = ProjectTransaction::default();
2771
2772 if let Some((project_id, remote_buffers)) = remote_buffers {
2773 let response = client
2774 .request(proto::ReloadBuffers {
2775 project_id,
2776 buffer_ids: remote_buffers
2777 .iter()
2778 .map(|buffer| buffer.read_with(&cx, |buffer, _| buffer.remote_id()))
2779 .collect(),
2780 })
2781 .await?
2782 .transaction
2783 .ok_or_else(|| anyhow!("missing transaction"))?;
2784 project_transaction = this
2785 .update(&mut cx, |this, cx| {
2786 this.deserialize_project_transaction(response, push_to_history, cx)
2787 })
2788 .await?;
2789 }
2790
2791 for buffer in local_buffers {
2792 let transaction = buffer
2793 .update(&mut cx, |buffer, cx| buffer.reload(cx))
2794 .await?;
2795 buffer.update(&mut cx, |buffer, cx| {
2796 if let Some(transaction) = transaction {
2797 if !push_to_history {
2798 buffer.forget_transaction(transaction.id);
2799 }
2800 project_transaction.0.insert(cx.handle(), transaction);
2801 }
2802 });
2803 }
2804
2805 Ok(project_transaction)
2806 })
2807 }
2808
2809 pub fn format(
2810 &self,
2811 buffers: HashSet<ModelHandle<Buffer>>,
2812 push_to_history: bool,
2813 cx: &mut ModelContext<Project>,
2814 ) -> Task<Result<ProjectTransaction>> {
2815 let mut local_buffers = Vec::new();
2816 let mut remote_buffers = None;
2817 for buffer_handle in buffers {
2818 let buffer = buffer_handle.read(cx);
2819 if let Some(file) = File::from_dyn(buffer.file()) {
2820 if let Some(buffer_abs_path) = file.as_local().map(|f| f.abs_path(cx)) {
2821 if let Some((_, server)) = self.language_server_for_buffer(buffer, cx) {
2822 local_buffers.push((buffer_handle, buffer_abs_path, server.clone()));
2823 }
2824 } else {
2825 remote_buffers.get_or_insert(Vec::new()).push(buffer_handle);
2826 }
2827 } else {
2828 return Task::ready(Ok(Default::default()));
2829 }
2830 }
2831
2832 let remote_buffers = self.remote_id().zip(remote_buffers);
2833 let client = self.client.clone();
2834
2835 cx.spawn(|this, mut cx| async move {
2836 let mut project_transaction = ProjectTransaction::default();
2837
2838 if let Some((project_id, remote_buffers)) = remote_buffers {
2839 let response = client
2840 .request(proto::FormatBuffers {
2841 project_id,
2842 buffer_ids: remote_buffers
2843 .iter()
2844 .map(|buffer| buffer.read_with(&cx, |buffer, _| buffer.remote_id()))
2845 .collect(),
2846 })
2847 .await?
2848 .transaction
2849 .ok_or_else(|| anyhow!("missing transaction"))?;
2850 project_transaction = this
2851 .update(&mut cx, |this, cx| {
2852 this.deserialize_project_transaction(response, push_to_history, cx)
2853 })
2854 .await?;
2855 }
2856
2857 for (buffer, buffer_abs_path, language_server) in local_buffers {
2858 let text_document = lsp::TextDocumentIdentifier::new(
2859 lsp::Url::from_file_path(&buffer_abs_path).unwrap(),
2860 );
2861 let capabilities = &language_server.capabilities();
2862 let tab_size = cx.update(|cx| {
2863 let language_name = buffer.read(cx).language().map(|language| language.name());
2864 cx.global::<Settings>().tab_size(language_name.as_deref())
2865 });
2866 let lsp_edits = if capabilities
2867 .document_formatting_provider
2868 .as_ref()
2869 .map_or(false, |provider| *provider != lsp::OneOf::Left(false))
2870 {
2871 language_server
2872 .request::<lsp::request::Formatting>(lsp::DocumentFormattingParams {
2873 text_document,
2874 options: lsp::FormattingOptions {
2875 tab_size: tab_size.into(),
2876 insert_spaces: true,
2877 insert_final_newline: Some(true),
2878 ..Default::default()
2879 },
2880 work_done_progress_params: Default::default(),
2881 })
2882 .await?
2883 } else if capabilities
2884 .document_range_formatting_provider
2885 .as_ref()
2886 .map_or(false, |provider| *provider != lsp::OneOf::Left(false))
2887 {
2888 let buffer_start = lsp::Position::new(0, 0);
2889 let buffer_end =
2890 buffer.read_with(&cx, |buffer, _| point_to_lsp(buffer.max_point_utf16()));
2891 language_server
2892 .request::<lsp::request::RangeFormatting>(
2893 lsp::DocumentRangeFormattingParams {
2894 text_document,
2895 range: lsp::Range::new(buffer_start, buffer_end),
2896 options: lsp::FormattingOptions {
2897 tab_size: tab_size.into(),
2898 insert_spaces: true,
2899 insert_final_newline: Some(true),
2900 ..Default::default()
2901 },
2902 work_done_progress_params: Default::default(),
2903 },
2904 )
2905 .await?
2906 } else {
2907 continue;
2908 };
2909
2910 if let Some(lsp_edits) = lsp_edits {
2911 let edits = this
2912 .update(&mut cx, |this, cx| {
2913 this.edits_from_lsp(&buffer, lsp_edits, None, cx)
2914 })
2915 .await?;
2916 buffer.update(&mut cx, |buffer, cx| {
2917 buffer.finalize_last_transaction();
2918 buffer.start_transaction();
2919 for (range, text) in edits {
2920 buffer.edit([(range, text)], cx);
2921 }
2922 if buffer.end_transaction(cx).is_some() {
2923 let transaction = buffer.finalize_last_transaction().unwrap().clone();
2924 if !push_to_history {
2925 buffer.forget_transaction(transaction.id);
2926 }
2927 project_transaction.0.insert(cx.handle(), transaction);
2928 }
2929 });
2930 }
2931 }
2932
2933 Ok(project_transaction)
2934 })
2935 }
2936
2937 pub fn definition<T: ToPointUtf16>(
2938 &self,
2939 buffer: &ModelHandle<Buffer>,
2940 position: T,
2941 cx: &mut ModelContext<Self>,
2942 ) -> Task<Result<Vec<LocationLink>>> {
2943 let position = position.to_point_utf16(buffer.read(cx));
2944 self.request_lsp(buffer.clone(), GetDefinition { position }, cx)
2945 }
2946
2947 pub fn references<T: ToPointUtf16>(
2948 &self,
2949 buffer: &ModelHandle<Buffer>,
2950 position: T,
2951 cx: &mut ModelContext<Self>,
2952 ) -> Task<Result<Vec<Location>>> {
2953 let position = position.to_point_utf16(buffer.read(cx));
2954 self.request_lsp(buffer.clone(), GetReferences { position }, cx)
2955 }
2956
2957 pub fn document_highlights<T: ToPointUtf16>(
2958 &self,
2959 buffer: &ModelHandle<Buffer>,
2960 position: T,
2961 cx: &mut ModelContext<Self>,
2962 ) -> Task<Result<Vec<DocumentHighlight>>> {
2963 let position = position.to_point_utf16(buffer.read(cx));
2964
2965 self.request_lsp(buffer.clone(), GetDocumentHighlights { position }, cx)
2966 }
2967
2968 pub fn symbols(&self, query: &str, cx: &mut ModelContext<Self>) -> Task<Result<Vec<Symbol>>> {
2969 if self.is_local() {
2970 let mut requests = Vec::new();
2971 for ((worktree_id, _), (lsp_adapter, language_server)) in self.language_servers.iter() {
2972 let worktree_id = *worktree_id;
2973 if let Some(worktree) = self
2974 .worktree_for_id(worktree_id, cx)
2975 .and_then(|worktree| worktree.read(cx).as_local())
2976 {
2977 let lsp_adapter = lsp_adapter.clone();
2978 let worktree_abs_path = worktree.abs_path().clone();
2979 requests.push(
2980 language_server
2981 .request::<lsp::request::WorkspaceSymbol>(lsp::WorkspaceSymbolParams {
2982 query: query.to_string(),
2983 ..Default::default()
2984 })
2985 .log_err()
2986 .map(move |response| {
2987 (
2988 lsp_adapter,
2989 worktree_id,
2990 worktree_abs_path,
2991 response.unwrap_or_default(),
2992 )
2993 }),
2994 );
2995 }
2996 }
2997
2998 cx.spawn_weak(|this, cx| async move {
2999 let responses = futures::future::join_all(requests).await;
3000 let this = if let Some(this) = this.upgrade(&cx) {
3001 this
3002 } else {
3003 return Ok(Default::default());
3004 };
3005 this.read_with(&cx, |this, cx| {
3006 let mut symbols = Vec::new();
3007 for (adapter, source_worktree_id, worktree_abs_path, response) in responses {
3008 symbols.extend(response.into_iter().flatten().filter_map(|lsp_symbol| {
3009 let abs_path = lsp_symbol.location.uri.to_file_path().ok()?;
3010 let mut worktree_id = source_worktree_id;
3011 let path;
3012 if let Some((worktree, rel_path)) =
3013 this.find_local_worktree(&abs_path, cx)
3014 {
3015 worktree_id = worktree.read(cx).id();
3016 path = rel_path;
3017 } else {
3018 path = relativize_path(&worktree_abs_path, &abs_path);
3019 }
3020
3021 let label = this
3022 .languages
3023 .select_language(&path)
3024 .and_then(|language| {
3025 language.label_for_symbol(&lsp_symbol.name, lsp_symbol.kind)
3026 })
3027 .unwrap_or_else(|| CodeLabel::plain(lsp_symbol.name.clone(), None));
3028 let signature = this.symbol_signature(worktree_id, &path);
3029
3030 Some(Symbol {
3031 source_worktree_id,
3032 worktree_id,
3033 language_server_name: adapter.name(),
3034 name: lsp_symbol.name,
3035 kind: lsp_symbol.kind,
3036 label,
3037 path,
3038 range: range_from_lsp(lsp_symbol.location.range),
3039 signature,
3040 })
3041 }));
3042 }
3043 Ok(symbols)
3044 })
3045 })
3046 } else if let Some(project_id) = self.remote_id() {
3047 let request = self.client.request(proto::GetProjectSymbols {
3048 project_id,
3049 query: query.to_string(),
3050 });
3051 cx.spawn_weak(|this, cx| async move {
3052 let response = request.await?;
3053 let mut symbols = Vec::new();
3054 if let Some(this) = this.upgrade(&cx) {
3055 this.read_with(&cx, |this, _| {
3056 symbols.extend(
3057 response
3058 .symbols
3059 .into_iter()
3060 .filter_map(|symbol| this.deserialize_symbol(symbol).log_err()),
3061 );
3062 })
3063 }
3064 Ok(symbols)
3065 })
3066 } else {
3067 Task::ready(Ok(Default::default()))
3068 }
3069 }
3070
3071 pub fn open_buffer_for_symbol(
3072 &mut self,
3073 symbol: &Symbol,
3074 cx: &mut ModelContext<Self>,
3075 ) -> Task<Result<ModelHandle<Buffer>>> {
3076 if self.is_local() {
3077 let (lsp_adapter, language_server) = if let Some(server) = self.language_servers.get(&(
3078 symbol.source_worktree_id,
3079 symbol.language_server_name.clone(),
3080 )) {
3081 server.clone()
3082 } else {
3083 return Task::ready(Err(anyhow!(
3084 "language server for worktree and language not found"
3085 )));
3086 };
3087
3088 let worktree_abs_path = if let Some(worktree_abs_path) = self
3089 .worktree_for_id(symbol.worktree_id, cx)
3090 .and_then(|worktree| worktree.read(cx).as_local())
3091 .map(|local_worktree| local_worktree.abs_path())
3092 {
3093 worktree_abs_path
3094 } else {
3095 return Task::ready(Err(anyhow!("worktree not found for symbol")));
3096 };
3097 let symbol_abs_path = worktree_abs_path.join(&symbol.path);
3098 let symbol_uri = if let Ok(uri) = lsp::Url::from_file_path(symbol_abs_path) {
3099 uri
3100 } else {
3101 return Task::ready(Err(anyhow!("invalid symbol path")));
3102 };
3103
3104 self.open_local_buffer_via_lsp(symbol_uri, lsp_adapter, language_server, cx)
3105 } else if let Some(project_id) = self.remote_id() {
3106 let request = self.client.request(proto::OpenBufferForSymbol {
3107 project_id,
3108 symbol: Some(serialize_symbol(symbol)),
3109 });
3110 cx.spawn(|this, mut cx| async move {
3111 let response = request.await?;
3112 let buffer = response.buffer.ok_or_else(|| anyhow!("invalid buffer"))?;
3113 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
3114 .await
3115 })
3116 } else {
3117 Task::ready(Err(anyhow!("project does not have a remote id")))
3118 }
3119 }
3120
3121 pub fn hover<T: ToPointUtf16>(
3122 &self,
3123 buffer: &ModelHandle<Buffer>,
3124 position: T,
3125 cx: &mut ModelContext<Self>,
3126 ) -> Task<Result<Option<Hover>>> {
3127 let position = position.to_point_utf16(buffer.read(cx));
3128 self.request_lsp(buffer.clone(), GetHover { position }, cx)
3129 }
3130
3131 pub fn completions<T: ToPointUtf16>(
3132 &self,
3133 source_buffer_handle: &ModelHandle<Buffer>,
3134 position: T,
3135 cx: &mut ModelContext<Self>,
3136 ) -> Task<Result<Vec<Completion>>> {
3137 let source_buffer_handle = source_buffer_handle.clone();
3138 let source_buffer = source_buffer_handle.read(cx);
3139 let buffer_id = source_buffer.remote_id();
3140 let language = source_buffer.language().cloned();
3141 let worktree;
3142 let buffer_abs_path;
3143 if let Some(file) = File::from_dyn(source_buffer.file()) {
3144 worktree = file.worktree.clone();
3145 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
3146 } else {
3147 return Task::ready(Ok(Default::default()));
3148 };
3149
3150 let position = position.to_point_utf16(source_buffer);
3151 let anchor = source_buffer.anchor_after(position);
3152
3153 if worktree.read(cx).as_local().is_some() {
3154 let buffer_abs_path = buffer_abs_path.unwrap();
3155 let (_, lang_server) =
3156 if let Some(server) = self.language_server_for_buffer(source_buffer, cx) {
3157 server.clone()
3158 } else {
3159 return Task::ready(Ok(Default::default()));
3160 };
3161
3162 cx.spawn(|_, cx| async move {
3163 let completions = lang_server
3164 .request::<lsp::request::Completion>(lsp::CompletionParams {
3165 text_document_position: lsp::TextDocumentPositionParams::new(
3166 lsp::TextDocumentIdentifier::new(
3167 lsp::Url::from_file_path(buffer_abs_path).unwrap(),
3168 ),
3169 point_to_lsp(position),
3170 ),
3171 context: Default::default(),
3172 work_done_progress_params: Default::default(),
3173 partial_result_params: Default::default(),
3174 })
3175 .await
3176 .context("lsp completion request failed")?;
3177
3178 let completions = if let Some(completions) = completions {
3179 match completions {
3180 lsp::CompletionResponse::Array(completions) => completions,
3181 lsp::CompletionResponse::List(list) => list.items,
3182 }
3183 } else {
3184 Default::default()
3185 };
3186
3187 source_buffer_handle.read_with(&cx, |this, _| {
3188 let snapshot = this.snapshot();
3189 let clipped_position = this.clip_point_utf16(position, Bias::Left);
3190 let mut range_for_token = None;
3191 Ok(completions
3192 .into_iter()
3193 .filter_map(|lsp_completion| {
3194 // For now, we can only handle additional edits if they are returned
3195 // when resolving the completion, not if they are present initially.
3196 if lsp_completion
3197 .additional_text_edits
3198 .as_ref()
3199 .map_or(false, |edits| !edits.is_empty())
3200 {
3201 return None;
3202 }
3203
3204 let (old_range, new_text) = match lsp_completion.text_edit.as_ref() {
3205 // If the language server provides a range to overwrite, then
3206 // check that the range is valid.
3207 Some(lsp::CompletionTextEdit::Edit(edit)) => {
3208 let range = range_from_lsp(edit.range);
3209 let start = snapshot.clip_point_utf16(range.start, Bias::Left);
3210 let end = snapshot.clip_point_utf16(range.end, Bias::Left);
3211 if start != range.start || end != range.end {
3212 log::info!("completion out of expected range");
3213 return None;
3214 }
3215 (
3216 snapshot.anchor_before(start)..snapshot.anchor_after(end),
3217 edit.new_text.clone(),
3218 )
3219 }
3220 // If the language server does not provide a range, then infer
3221 // the range based on the syntax tree.
3222 None => {
3223 if position != clipped_position {
3224 log::info!("completion out of expected range");
3225 return None;
3226 }
3227 let Range { start, end } = range_for_token
3228 .get_or_insert_with(|| {
3229 let offset = position.to_offset(&snapshot);
3230 let (range, kind) = snapshot.surrounding_word(offset);
3231 if kind == Some(CharKind::Word) {
3232 range
3233 } else {
3234 offset..offset
3235 }
3236 })
3237 .clone();
3238 let text = lsp_completion
3239 .insert_text
3240 .as_ref()
3241 .unwrap_or(&lsp_completion.label)
3242 .clone();
3243 (
3244 snapshot.anchor_before(start)..snapshot.anchor_after(end),
3245 text.clone(),
3246 )
3247 }
3248 Some(lsp::CompletionTextEdit::InsertAndReplace(_)) => {
3249 log::info!("unsupported insert/replace completion");
3250 return None;
3251 }
3252 };
3253
3254 Some(Completion {
3255 old_range,
3256 new_text,
3257 label: language
3258 .as_ref()
3259 .and_then(|l| l.label_for_completion(&lsp_completion))
3260 .unwrap_or_else(|| {
3261 CodeLabel::plain(
3262 lsp_completion.label.clone(),
3263 lsp_completion.filter_text.as_deref(),
3264 )
3265 }),
3266 lsp_completion,
3267 })
3268 })
3269 .collect())
3270 })
3271 })
3272 } else if let Some(project_id) = self.remote_id() {
3273 let rpc = self.client.clone();
3274 let message = proto::GetCompletions {
3275 project_id,
3276 buffer_id,
3277 position: Some(language::proto::serialize_anchor(&anchor)),
3278 version: serialize_version(&source_buffer.version()),
3279 };
3280 cx.spawn_weak(|_, mut cx| async move {
3281 let response = rpc.request(message).await?;
3282
3283 source_buffer_handle
3284 .update(&mut cx, |buffer, _| {
3285 buffer.wait_for_version(deserialize_version(response.version))
3286 })
3287 .await;
3288
3289 response
3290 .completions
3291 .into_iter()
3292 .map(|completion| {
3293 language::proto::deserialize_completion(completion, language.as_ref())
3294 })
3295 .collect()
3296 })
3297 } else {
3298 Task::ready(Ok(Default::default()))
3299 }
3300 }
3301
3302 pub fn apply_additional_edits_for_completion(
3303 &self,
3304 buffer_handle: ModelHandle<Buffer>,
3305 completion: Completion,
3306 push_to_history: bool,
3307 cx: &mut ModelContext<Self>,
3308 ) -> Task<Result<Option<Transaction>>> {
3309 let buffer = buffer_handle.read(cx);
3310 let buffer_id = buffer.remote_id();
3311
3312 if self.is_local() {
3313 let (_, lang_server) = if let Some(server) = self.language_server_for_buffer(buffer, cx)
3314 {
3315 server.clone()
3316 } else {
3317 return Task::ready(Ok(Default::default()));
3318 };
3319
3320 cx.spawn(|this, mut cx| async move {
3321 let resolved_completion = lang_server
3322 .request::<lsp::request::ResolveCompletionItem>(completion.lsp_completion)
3323 .await?;
3324 if let Some(edits) = resolved_completion.additional_text_edits {
3325 let edits = this
3326 .update(&mut cx, |this, cx| {
3327 this.edits_from_lsp(&buffer_handle, edits, None, cx)
3328 })
3329 .await?;
3330 buffer_handle.update(&mut cx, |buffer, cx| {
3331 buffer.finalize_last_transaction();
3332 buffer.start_transaction();
3333 for (range, text) in edits {
3334 buffer.edit([(range, text)], cx);
3335 }
3336 let transaction = if buffer.end_transaction(cx).is_some() {
3337 let transaction = buffer.finalize_last_transaction().unwrap().clone();
3338 if !push_to_history {
3339 buffer.forget_transaction(transaction.id);
3340 }
3341 Some(transaction)
3342 } else {
3343 None
3344 };
3345 Ok(transaction)
3346 })
3347 } else {
3348 Ok(None)
3349 }
3350 })
3351 } else if let Some(project_id) = self.remote_id() {
3352 let client = self.client.clone();
3353 cx.spawn(|_, mut cx| async move {
3354 let response = client
3355 .request(proto::ApplyCompletionAdditionalEdits {
3356 project_id,
3357 buffer_id,
3358 completion: Some(language::proto::serialize_completion(&completion)),
3359 })
3360 .await?;
3361
3362 if let Some(transaction) = response.transaction {
3363 let transaction = language::proto::deserialize_transaction(transaction)?;
3364 buffer_handle
3365 .update(&mut cx, |buffer, _| {
3366 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
3367 })
3368 .await;
3369 if push_to_history {
3370 buffer_handle.update(&mut cx, |buffer, _| {
3371 buffer.push_transaction(transaction.clone(), Instant::now());
3372 });
3373 }
3374 Ok(Some(transaction))
3375 } else {
3376 Ok(None)
3377 }
3378 })
3379 } else {
3380 Task::ready(Err(anyhow!("project does not have a remote id")))
3381 }
3382 }
3383
3384 pub fn code_actions<T: Clone + ToOffset>(
3385 &self,
3386 buffer_handle: &ModelHandle<Buffer>,
3387 range: Range<T>,
3388 cx: &mut ModelContext<Self>,
3389 ) -> Task<Result<Vec<CodeAction>>> {
3390 let buffer_handle = buffer_handle.clone();
3391 let buffer = buffer_handle.read(cx);
3392 let snapshot = buffer.snapshot();
3393 let relevant_diagnostics = snapshot
3394 .diagnostics_in_range::<usize, usize>(range.to_offset(&snapshot), false)
3395 .map(|entry| entry.to_lsp_diagnostic_stub())
3396 .collect();
3397 let buffer_id = buffer.remote_id();
3398 let worktree;
3399 let buffer_abs_path;
3400 if let Some(file) = File::from_dyn(buffer.file()) {
3401 worktree = file.worktree.clone();
3402 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
3403 } else {
3404 return Task::ready(Ok(Default::default()));
3405 };
3406 let range = buffer.anchor_before(range.start)..buffer.anchor_before(range.end);
3407
3408 if worktree.read(cx).as_local().is_some() {
3409 let buffer_abs_path = buffer_abs_path.unwrap();
3410 let (_, lang_server) = if let Some(server) = self.language_server_for_buffer(buffer, cx)
3411 {
3412 server.clone()
3413 } else {
3414 return Task::ready(Ok(Default::default()));
3415 };
3416
3417 let lsp_range = range_to_lsp(range.to_point_utf16(buffer));
3418 cx.foreground().spawn(async move {
3419 if !lang_server.capabilities().code_action_provider.is_some() {
3420 return Ok(Default::default());
3421 }
3422
3423 Ok(lang_server
3424 .request::<lsp::request::CodeActionRequest>(lsp::CodeActionParams {
3425 text_document: lsp::TextDocumentIdentifier::new(
3426 lsp::Url::from_file_path(buffer_abs_path).unwrap(),
3427 ),
3428 range: lsp_range,
3429 work_done_progress_params: Default::default(),
3430 partial_result_params: Default::default(),
3431 context: lsp::CodeActionContext {
3432 diagnostics: relevant_diagnostics,
3433 only: Some(vec![
3434 lsp::CodeActionKind::QUICKFIX,
3435 lsp::CodeActionKind::REFACTOR,
3436 lsp::CodeActionKind::REFACTOR_EXTRACT,
3437 lsp::CodeActionKind::SOURCE,
3438 ]),
3439 },
3440 })
3441 .await?
3442 .unwrap_or_default()
3443 .into_iter()
3444 .filter_map(|entry| {
3445 if let lsp::CodeActionOrCommand::CodeAction(lsp_action) = entry {
3446 Some(CodeAction {
3447 range: range.clone(),
3448 lsp_action,
3449 })
3450 } else {
3451 None
3452 }
3453 })
3454 .collect())
3455 })
3456 } else if let Some(project_id) = self.remote_id() {
3457 let rpc = self.client.clone();
3458 let version = buffer.version();
3459 cx.spawn_weak(|_, mut cx| async move {
3460 let response = rpc
3461 .request(proto::GetCodeActions {
3462 project_id,
3463 buffer_id,
3464 start: Some(language::proto::serialize_anchor(&range.start)),
3465 end: Some(language::proto::serialize_anchor(&range.end)),
3466 version: serialize_version(&version),
3467 })
3468 .await?;
3469
3470 buffer_handle
3471 .update(&mut cx, |buffer, _| {
3472 buffer.wait_for_version(deserialize_version(response.version))
3473 })
3474 .await;
3475
3476 response
3477 .actions
3478 .into_iter()
3479 .map(language::proto::deserialize_code_action)
3480 .collect()
3481 })
3482 } else {
3483 Task::ready(Ok(Default::default()))
3484 }
3485 }
3486
3487 pub fn apply_code_action(
3488 &self,
3489 buffer_handle: ModelHandle<Buffer>,
3490 mut action: CodeAction,
3491 push_to_history: bool,
3492 cx: &mut ModelContext<Self>,
3493 ) -> Task<Result<ProjectTransaction>> {
3494 if self.is_local() {
3495 let buffer = buffer_handle.read(cx);
3496 let (lsp_adapter, lang_server) =
3497 if let Some(server) = self.language_server_for_buffer(buffer, cx) {
3498 server.clone()
3499 } else {
3500 return Task::ready(Ok(Default::default()));
3501 };
3502 let range = action.range.to_point_utf16(buffer);
3503
3504 cx.spawn(|this, mut cx| async move {
3505 if let Some(lsp_range) = action
3506 .lsp_action
3507 .data
3508 .as_mut()
3509 .and_then(|d| d.get_mut("codeActionParams"))
3510 .and_then(|d| d.get_mut("range"))
3511 {
3512 *lsp_range = serde_json::to_value(&range_to_lsp(range)).unwrap();
3513 action.lsp_action = lang_server
3514 .request::<lsp::request::CodeActionResolveRequest>(action.lsp_action)
3515 .await?;
3516 } else {
3517 let actions = this
3518 .update(&mut cx, |this, cx| {
3519 this.code_actions(&buffer_handle, action.range, cx)
3520 })
3521 .await?;
3522 action.lsp_action = actions
3523 .into_iter()
3524 .find(|a| a.lsp_action.title == action.lsp_action.title)
3525 .ok_or_else(|| anyhow!("code action is outdated"))?
3526 .lsp_action;
3527 }
3528
3529 if let Some(edit) = action.lsp_action.edit {
3530 Self::deserialize_workspace_edit(
3531 this,
3532 edit,
3533 push_to_history,
3534 lsp_adapter,
3535 lang_server,
3536 &mut cx,
3537 )
3538 .await
3539 } else if let Some(command) = action.lsp_action.command {
3540 this.update(&mut cx, |this, _| {
3541 this.last_workspace_edits_by_language_server
3542 .remove(&lang_server.server_id());
3543 });
3544 lang_server
3545 .request::<lsp::request::ExecuteCommand>(lsp::ExecuteCommandParams {
3546 command: command.command,
3547 arguments: command.arguments.unwrap_or_default(),
3548 ..Default::default()
3549 })
3550 .await?;
3551 Ok(this.update(&mut cx, |this, _| {
3552 this.last_workspace_edits_by_language_server
3553 .remove(&lang_server.server_id())
3554 .unwrap_or_default()
3555 }))
3556 } else {
3557 Ok(ProjectTransaction::default())
3558 }
3559 })
3560 } else if let Some(project_id) = self.remote_id() {
3561 let client = self.client.clone();
3562 let request = proto::ApplyCodeAction {
3563 project_id,
3564 buffer_id: buffer_handle.read(cx).remote_id(),
3565 action: Some(language::proto::serialize_code_action(&action)),
3566 };
3567 cx.spawn(|this, mut cx| async move {
3568 let response = client
3569 .request(request)
3570 .await?
3571 .transaction
3572 .ok_or_else(|| anyhow!("missing transaction"))?;
3573 this.update(&mut cx, |this, cx| {
3574 this.deserialize_project_transaction(response, push_to_history, cx)
3575 })
3576 .await
3577 })
3578 } else {
3579 Task::ready(Err(anyhow!("project does not have a remote id")))
3580 }
3581 }
3582
3583 async fn deserialize_workspace_edit(
3584 this: ModelHandle<Self>,
3585 edit: lsp::WorkspaceEdit,
3586 push_to_history: bool,
3587 lsp_adapter: Arc<dyn LspAdapter>,
3588 language_server: Arc<LanguageServer>,
3589 cx: &mut AsyncAppContext,
3590 ) -> Result<ProjectTransaction> {
3591 let fs = this.read_with(cx, |this, _| this.fs.clone());
3592 let mut operations = Vec::new();
3593 if let Some(document_changes) = edit.document_changes {
3594 match document_changes {
3595 lsp::DocumentChanges::Edits(edits) => {
3596 operations.extend(edits.into_iter().map(lsp::DocumentChangeOperation::Edit))
3597 }
3598 lsp::DocumentChanges::Operations(ops) => operations = ops,
3599 }
3600 } else if let Some(changes) = edit.changes {
3601 operations.extend(changes.into_iter().map(|(uri, edits)| {
3602 lsp::DocumentChangeOperation::Edit(lsp::TextDocumentEdit {
3603 text_document: lsp::OptionalVersionedTextDocumentIdentifier {
3604 uri,
3605 version: None,
3606 },
3607 edits: edits.into_iter().map(lsp::OneOf::Left).collect(),
3608 })
3609 }));
3610 }
3611
3612 let mut project_transaction = ProjectTransaction::default();
3613 for operation in operations {
3614 match operation {
3615 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Create(op)) => {
3616 let abs_path = op
3617 .uri
3618 .to_file_path()
3619 .map_err(|_| anyhow!("can't convert URI to path"))?;
3620
3621 if let Some(parent_path) = abs_path.parent() {
3622 fs.create_dir(parent_path).await?;
3623 }
3624 if abs_path.ends_with("/") {
3625 fs.create_dir(&abs_path).await?;
3626 } else {
3627 fs.create_file(&abs_path, op.options.map(Into::into).unwrap_or_default())
3628 .await?;
3629 }
3630 }
3631 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Rename(op)) => {
3632 let source_abs_path = op
3633 .old_uri
3634 .to_file_path()
3635 .map_err(|_| anyhow!("can't convert URI to path"))?;
3636 let target_abs_path = op
3637 .new_uri
3638 .to_file_path()
3639 .map_err(|_| anyhow!("can't convert URI to path"))?;
3640 fs.rename(
3641 &source_abs_path,
3642 &target_abs_path,
3643 op.options.map(Into::into).unwrap_or_default(),
3644 )
3645 .await?;
3646 }
3647 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Delete(op)) => {
3648 let abs_path = op
3649 .uri
3650 .to_file_path()
3651 .map_err(|_| anyhow!("can't convert URI to path"))?;
3652 let options = op.options.map(Into::into).unwrap_or_default();
3653 if abs_path.ends_with("/") {
3654 fs.remove_dir(&abs_path, options).await?;
3655 } else {
3656 fs.remove_file(&abs_path, options).await?;
3657 }
3658 }
3659 lsp::DocumentChangeOperation::Edit(op) => {
3660 let buffer_to_edit = this
3661 .update(cx, |this, cx| {
3662 this.open_local_buffer_via_lsp(
3663 op.text_document.uri,
3664 lsp_adapter.clone(),
3665 language_server.clone(),
3666 cx,
3667 )
3668 })
3669 .await?;
3670
3671 let edits = this
3672 .update(cx, |this, cx| {
3673 let edits = op.edits.into_iter().map(|edit| match edit {
3674 lsp::OneOf::Left(edit) => edit,
3675 lsp::OneOf::Right(edit) => edit.text_edit,
3676 });
3677 this.edits_from_lsp(
3678 &buffer_to_edit,
3679 edits,
3680 op.text_document.version,
3681 cx,
3682 )
3683 })
3684 .await?;
3685
3686 let transaction = buffer_to_edit.update(cx, |buffer, cx| {
3687 buffer.finalize_last_transaction();
3688 buffer.start_transaction();
3689 for (range, text) in edits {
3690 buffer.edit([(range, text)], cx);
3691 }
3692 let transaction = if buffer.end_transaction(cx).is_some() {
3693 let transaction = buffer.finalize_last_transaction().unwrap().clone();
3694 if !push_to_history {
3695 buffer.forget_transaction(transaction.id);
3696 }
3697 Some(transaction)
3698 } else {
3699 None
3700 };
3701
3702 transaction
3703 });
3704 if let Some(transaction) = transaction {
3705 project_transaction.0.insert(buffer_to_edit, transaction);
3706 }
3707 }
3708 }
3709 }
3710
3711 Ok(project_transaction)
3712 }
3713
3714 pub fn prepare_rename<T: ToPointUtf16>(
3715 &self,
3716 buffer: ModelHandle<Buffer>,
3717 position: T,
3718 cx: &mut ModelContext<Self>,
3719 ) -> Task<Result<Option<Range<Anchor>>>> {
3720 let position = position.to_point_utf16(buffer.read(cx));
3721 self.request_lsp(buffer, PrepareRename { position }, cx)
3722 }
3723
3724 pub fn perform_rename<T: ToPointUtf16>(
3725 &self,
3726 buffer: ModelHandle<Buffer>,
3727 position: T,
3728 new_name: String,
3729 push_to_history: bool,
3730 cx: &mut ModelContext<Self>,
3731 ) -> Task<Result<ProjectTransaction>> {
3732 let position = position.to_point_utf16(buffer.read(cx));
3733 self.request_lsp(
3734 buffer,
3735 PerformRename {
3736 position,
3737 new_name,
3738 push_to_history,
3739 },
3740 cx,
3741 )
3742 }
3743
3744 pub fn search(
3745 &self,
3746 query: SearchQuery,
3747 cx: &mut ModelContext<Self>,
3748 ) -> Task<Result<HashMap<ModelHandle<Buffer>, Vec<Range<Anchor>>>>> {
3749 if self.is_local() {
3750 let snapshots = self
3751 .visible_worktrees(cx)
3752 .filter_map(|tree| {
3753 let tree = tree.read(cx).as_local()?;
3754 Some(tree.snapshot())
3755 })
3756 .collect::<Vec<_>>();
3757
3758 let background = cx.background().clone();
3759 let path_count: usize = snapshots.iter().map(|s| s.visible_file_count()).sum();
3760 if path_count == 0 {
3761 return Task::ready(Ok(Default::default()));
3762 }
3763 let workers = background.num_cpus().min(path_count);
3764 let (matching_paths_tx, mut matching_paths_rx) = smol::channel::bounded(1024);
3765 cx.background()
3766 .spawn({
3767 let fs = self.fs.clone();
3768 let background = cx.background().clone();
3769 let query = query.clone();
3770 async move {
3771 let fs = &fs;
3772 let query = &query;
3773 let matching_paths_tx = &matching_paths_tx;
3774 let paths_per_worker = (path_count + workers - 1) / workers;
3775 let snapshots = &snapshots;
3776 background
3777 .scoped(|scope| {
3778 for worker_ix in 0..workers {
3779 let worker_start_ix = worker_ix * paths_per_worker;
3780 let worker_end_ix = worker_start_ix + paths_per_worker;
3781 scope.spawn(async move {
3782 let mut snapshot_start_ix = 0;
3783 let mut abs_path = PathBuf::new();
3784 for snapshot in snapshots {
3785 let snapshot_end_ix =
3786 snapshot_start_ix + snapshot.visible_file_count();
3787 if worker_end_ix <= snapshot_start_ix {
3788 break;
3789 } else if worker_start_ix > snapshot_end_ix {
3790 snapshot_start_ix = snapshot_end_ix;
3791 continue;
3792 } else {
3793 let start_in_snapshot = worker_start_ix
3794 .saturating_sub(snapshot_start_ix);
3795 let end_in_snapshot =
3796 cmp::min(worker_end_ix, snapshot_end_ix)
3797 - snapshot_start_ix;
3798
3799 for entry in snapshot
3800 .files(false, start_in_snapshot)
3801 .take(end_in_snapshot - start_in_snapshot)
3802 {
3803 if matching_paths_tx.is_closed() {
3804 break;
3805 }
3806
3807 abs_path.clear();
3808 abs_path.push(&snapshot.abs_path());
3809 abs_path.push(&entry.path);
3810 let matches = if let Some(file) =
3811 fs.open_sync(&abs_path).await.log_err()
3812 {
3813 query.detect(file).unwrap_or(false)
3814 } else {
3815 false
3816 };
3817
3818 if matches {
3819 let project_path =
3820 (snapshot.id(), entry.path.clone());
3821 if matching_paths_tx
3822 .send(project_path)
3823 .await
3824 .is_err()
3825 {
3826 break;
3827 }
3828 }
3829 }
3830
3831 snapshot_start_ix = snapshot_end_ix;
3832 }
3833 }
3834 });
3835 }
3836 })
3837 .await;
3838 }
3839 })
3840 .detach();
3841
3842 let (buffers_tx, buffers_rx) = smol::channel::bounded(1024);
3843 let open_buffers = self
3844 .opened_buffers
3845 .values()
3846 .filter_map(|b| b.upgrade(cx))
3847 .collect::<HashSet<_>>();
3848 cx.spawn(|this, cx| async move {
3849 for buffer in &open_buffers {
3850 let snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
3851 buffers_tx.send((buffer.clone(), snapshot)).await?;
3852 }
3853
3854 let open_buffers = Rc::new(RefCell::new(open_buffers));
3855 while let Some(project_path) = matching_paths_rx.next().await {
3856 if buffers_tx.is_closed() {
3857 break;
3858 }
3859
3860 let this = this.clone();
3861 let open_buffers = open_buffers.clone();
3862 let buffers_tx = buffers_tx.clone();
3863 cx.spawn(|mut cx| async move {
3864 if let Some(buffer) = this
3865 .update(&mut cx, |this, cx| this.open_buffer(project_path, cx))
3866 .await
3867 .log_err()
3868 {
3869 if open_buffers.borrow_mut().insert(buffer.clone()) {
3870 let snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
3871 buffers_tx.send((buffer, snapshot)).await?;
3872 }
3873 }
3874
3875 Ok::<_, anyhow::Error>(())
3876 })
3877 .detach();
3878 }
3879
3880 Ok::<_, anyhow::Error>(())
3881 })
3882 .detach_and_log_err(cx);
3883
3884 let background = cx.background().clone();
3885 cx.background().spawn(async move {
3886 let query = &query;
3887 let mut matched_buffers = Vec::new();
3888 for _ in 0..workers {
3889 matched_buffers.push(HashMap::default());
3890 }
3891 background
3892 .scoped(|scope| {
3893 for worker_matched_buffers in matched_buffers.iter_mut() {
3894 let mut buffers_rx = buffers_rx.clone();
3895 scope.spawn(async move {
3896 while let Some((buffer, snapshot)) = buffers_rx.next().await {
3897 let buffer_matches = query
3898 .search(snapshot.as_rope())
3899 .await
3900 .iter()
3901 .map(|range| {
3902 snapshot.anchor_before(range.start)
3903 ..snapshot.anchor_after(range.end)
3904 })
3905 .collect::<Vec<_>>();
3906 if !buffer_matches.is_empty() {
3907 worker_matched_buffers
3908 .insert(buffer.clone(), buffer_matches);
3909 }
3910 }
3911 });
3912 }
3913 })
3914 .await;
3915 Ok(matched_buffers.into_iter().flatten().collect())
3916 })
3917 } else if let Some(project_id) = self.remote_id() {
3918 let request = self.client.request(query.to_proto(project_id));
3919 cx.spawn(|this, mut cx| async move {
3920 let response = request.await?;
3921 let mut result = HashMap::default();
3922 for location in response.locations {
3923 let buffer = location.buffer.ok_or_else(|| anyhow!("missing buffer"))?;
3924 let target_buffer = this
3925 .update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
3926 .await?;
3927 let start = location
3928 .start
3929 .and_then(deserialize_anchor)
3930 .ok_or_else(|| anyhow!("missing target start"))?;
3931 let end = location
3932 .end
3933 .and_then(deserialize_anchor)
3934 .ok_or_else(|| anyhow!("missing target end"))?;
3935 result
3936 .entry(target_buffer)
3937 .or_insert(Vec::new())
3938 .push(start..end)
3939 }
3940 Ok(result)
3941 })
3942 } else {
3943 Task::ready(Ok(Default::default()))
3944 }
3945 }
3946
3947 fn request_lsp<R: LspCommand>(
3948 &self,
3949 buffer_handle: ModelHandle<Buffer>,
3950 request: R,
3951 cx: &mut ModelContext<Self>,
3952 ) -> Task<Result<R::Response>>
3953 where
3954 <R::LspRequest as lsp::request::Request>::Result: Send,
3955 {
3956 let buffer = buffer_handle.read(cx);
3957 if self.is_local() {
3958 let file = File::from_dyn(buffer.file()).and_then(File::as_local);
3959 if let Some((file, (_, language_server))) =
3960 file.zip(self.language_server_for_buffer(buffer, cx).cloned())
3961 {
3962 let lsp_params = request.to_lsp(&file.abs_path(cx), cx);
3963 return cx.spawn(|this, cx| async move {
3964 if !request.check_capabilities(&language_server.capabilities()) {
3965 return Ok(Default::default());
3966 }
3967
3968 let response = language_server
3969 .request::<R::LspRequest>(lsp_params)
3970 .await
3971 .context("lsp request failed")?;
3972 request
3973 .response_from_lsp(response, this, buffer_handle, cx)
3974 .await
3975 });
3976 }
3977 } else if let Some(project_id) = self.remote_id() {
3978 let rpc = self.client.clone();
3979 let message = request.to_proto(project_id, buffer);
3980 return cx.spawn(|this, cx| async move {
3981 let response = rpc.request(message).await?;
3982 request
3983 .response_from_proto(response, this, buffer_handle, cx)
3984 .await
3985 });
3986 }
3987 Task::ready(Ok(Default::default()))
3988 }
3989
3990 pub fn find_or_create_local_worktree(
3991 &mut self,
3992 abs_path: impl AsRef<Path>,
3993 visible: bool,
3994 cx: &mut ModelContext<Self>,
3995 ) -> Task<Result<(ModelHandle<Worktree>, PathBuf)>> {
3996 let abs_path = abs_path.as_ref();
3997 if let Some((tree, relative_path)) = self.find_local_worktree(abs_path, cx) {
3998 Task::ready(Ok((tree.clone(), relative_path.into())))
3999 } else {
4000 let worktree = self.create_local_worktree(abs_path, visible, cx);
4001 cx.foreground()
4002 .spawn(async move { Ok((worktree.await?, PathBuf::new())) })
4003 }
4004 }
4005
4006 pub fn find_local_worktree(
4007 &self,
4008 abs_path: &Path,
4009 cx: &AppContext,
4010 ) -> Option<(ModelHandle<Worktree>, PathBuf)> {
4011 for tree in &self.worktrees {
4012 if let Some(tree) = tree.upgrade(cx) {
4013 if let Some(relative_path) = tree
4014 .read(cx)
4015 .as_local()
4016 .and_then(|t| abs_path.strip_prefix(t.abs_path()).ok())
4017 {
4018 return Some((tree.clone(), relative_path.into()));
4019 }
4020 }
4021 }
4022 None
4023 }
4024
4025 pub fn is_shared(&self) -> bool {
4026 match &self.client_state {
4027 ProjectClientState::Local { is_shared, .. } => *is_shared,
4028 ProjectClientState::Remote { .. } => false,
4029 }
4030 }
4031
4032 fn create_local_worktree(
4033 &mut self,
4034 abs_path: impl AsRef<Path>,
4035 visible: bool,
4036 cx: &mut ModelContext<Self>,
4037 ) -> Task<Result<ModelHandle<Worktree>>> {
4038 let fs = self.fs.clone();
4039 let client = self.client.clone();
4040 let next_entry_id = self.next_entry_id.clone();
4041 let path: Arc<Path> = abs_path.as_ref().into();
4042 let task = self
4043 .loading_local_worktrees
4044 .entry(path.clone())
4045 .or_insert_with(|| {
4046 cx.spawn(|project, mut cx| {
4047 async move {
4048 let worktree = Worktree::local(
4049 client.clone(),
4050 path.clone(),
4051 visible,
4052 fs,
4053 next_entry_id,
4054 &mut cx,
4055 )
4056 .await;
4057 project.update(&mut cx, |project, _| {
4058 project.loading_local_worktrees.remove(&path);
4059 });
4060 let worktree = worktree?;
4061
4062 let project_id = project.update(&mut cx, |project, cx| {
4063 project.add_worktree(&worktree, cx);
4064 project.shared_remote_id()
4065 });
4066
4067 if let Some(project_id) = project_id {
4068 worktree
4069 .update(&mut cx, |worktree, cx| {
4070 worktree.as_local_mut().unwrap().share(project_id, cx)
4071 })
4072 .await
4073 .log_err();
4074 }
4075
4076 Ok(worktree)
4077 }
4078 .map_err(|err| Arc::new(err))
4079 })
4080 .shared()
4081 })
4082 .clone();
4083 cx.foreground().spawn(async move {
4084 match task.await {
4085 Ok(worktree) => Ok(worktree),
4086 Err(err) => Err(anyhow!("{}", err)),
4087 }
4088 })
4089 }
4090
4091 pub fn remove_worktree(&mut self, id_to_remove: WorktreeId, cx: &mut ModelContext<Self>) {
4092 self.worktrees.retain(|worktree| {
4093 if let Some(worktree) = worktree.upgrade(cx) {
4094 let id = worktree.read(cx).id();
4095 if id == id_to_remove {
4096 cx.emit(Event::WorktreeRemoved(id));
4097 false
4098 } else {
4099 true
4100 }
4101 } else {
4102 false
4103 }
4104 });
4105 self.metadata_changed(true, cx);
4106 cx.notify();
4107 }
4108
4109 fn add_worktree(&mut self, worktree: &ModelHandle<Worktree>, cx: &mut ModelContext<Self>) {
4110 cx.observe(&worktree, |_, _, cx| cx.notify()).detach();
4111 if worktree.read(cx).is_local() {
4112 cx.subscribe(&worktree, |this, worktree, _, cx| {
4113 this.update_local_worktree_buffers(worktree, cx);
4114 })
4115 .detach();
4116 }
4117
4118 let push_strong_handle = {
4119 let worktree = worktree.read(cx);
4120 self.is_shared() || worktree.is_visible() || worktree.is_remote()
4121 };
4122 if push_strong_handle {
4123 self.worktrees
4124 .push(WorktreeHandle::Strong(worktree.clone()));
4125 } else {
4126 self.worktrees
4127 .push(WorktreeHandle::Weak(worktree.downgrade()));
4128 }
4129
4130 self.metadata_changed(true, cx);
4131 cx.observe_release(&worktree, |this, worktree, cx| {
4132 this.remove_worktree(worktree.id(), cx);
4133 cx.notify();
4134 })
4135 .detach();
4136
4137 cx.emit(Event::WorktreeAdded);
4138 cx.notify();
4139 }
4140
4141 fn update_local_worktree_buffers(
4142 &mut self,
4143 worktree_handle: ModelHandle<Worktree>,
4144 cx: &mut ModelContext<Self>,
4145 ) {
4146 let snapshot = worktree_handle.read(cx).snapshot();
4147 let mut buffers_to_delete = Vec::new();
4148 let mut renamed_buffers = Vec::new();
4149 for (buffer_id, buffer) in &self.opened_buffers {
4150 if let Some(buffer) = buffer.upgrade(cx) {
4151 buffer.update(cx, |buffer, cx| {
4152 if let Some(old_file) = File::from_dyn(buffer.file()) {
4153 if old_file.worktree != worktree_handle {
4154 return;
4155 }
4156
4157 let new_file = if let Some(entry) = old_file
4158 .entry_id
4159 .and_then(|entry_id| snapshot.entry_for_id(entry_id))
4160 {
4161 File {
4162 is_local: true,
4163 entry_id: Some(entry.id),
4164 mtime: entry.mtime,
4165 path: entry.path.clone(),
4166 worktree: worktree_handle.clone(),
4167 }
4168 } else if let Some(entry) =
4169 snapshot.entry_for_path(old_file.path().as_ref())
4170 {
4171 File {
4172 is_local: true,
4173 entry_id: Some(entry.id),
4174 mtime: entry.mtime,
4175 path: entry.path.clone(),
4176 worktree: worktree_handle.clone(),
4177 }
4178 } else {
4179 File {
4180 is_local: true,
4181 entry_id: None,
4182 path: old_file.path().clone(),
4183 mtime: old_file.mtime(),
4184 worktree: worktree_handle.clone(),
4185 }
4186 };
4187
4188 let old_path = old_file.abs_path(cx);
4189 if new_file.abs_path(cx) != old_path {
4190 renamed_buffers.push((cx.handle(), old_path));
4191 }
4192
4193 if let Some(project_id) = self.shared_remote_id() {
4194 self.client
4195 .send(proto::UpdateBufferFile {
4196 project_id,
4197 buffer_id: *buffer_id as u64,
4198 file: Some(new_file.to_proto()),
4199 })
4200 .log_err();
4201 }
4202 buffer.file_updated(Arc::new(new_file), cx).detach();
4203 }
4204 });
4205 } else {
4206 buffers_to_delete.push(*buffer_id);
4207 }
4208 }
4209
4210 for buffer_id in buffers_to_delete {
4211 self.opened_buffers.remove(&buffer_id);
4212 }
4213
4214 for (buffer, old_path) in renamed_buffers {
4215 self.unregister_buffer_from_language_server(&buffer, old_path, cx);
4216 self.assign_language_to_buffer(&buffer, cx);
4217 self.register_buffer_with_language_server(&buffer, cx);
4218 }
4219 }
4220
4221 pub fn set_active_path(&mut self, entry: Option<ProjectPath>, cx: &mut ModelContext<Self>) {
4222 let new_active_entry = entry.and_then(|project_path| {
4223 let worktree = self.worktree_for_id(project_path.worktree_id, cx)?;
4224 let entry = worktree.read(cx).entry_for_path(project_path.path)?;
4225 Some(entry.id)
4226 });
4227 if new_active_entry != self.active_entry {
4228 self.active_entry = new_active_entry;
4229 cx.emit(Event::ActiveEntryChanged(new_active_entry));
4230 }
4231 }
4232
4233 pub fn language_servers_running_disk_based_diagnostics<'a>(
4234 &'a self,
4235 ) -> impl 'a + Iterator<Item = usize> {
4236 self.language_server_statuses
4237 .iter()
4238 .filter_map(|(id, status)| {
4239 if status.has_pending_diagnostic_updates {
4240 Some(*id)
4241 } else {
4242 None
4243 }
4244 })
4245 }
4246
4247 pub fn diagnostic_summary(&self, cx: &AppContext) -> DiagnosticSummary {
4248 let mut summary = DiagnosticSummary::default();
4249 for (_, path_summary) in self.diagnostic_summaries(cx) {
4250 summary.error_count += path_summary.error_count;
4251 summary.warning_count += path_summary.warning_count;
4252 }
4253 summary
4254 }
4255
4256 pub fn diagnostic_summaries<'a>(
4257 &'a self,
4258 cx: &'a AppContext,
4259 ) -> impl Iterator<Item = (ProjectPath, DiagnosticSummary)> + 'a {
4260 self.visible_worktrees(cx).flat_map(move |worktree| {
4261 let worktree = worktree.read(cx);
4262 let worktree_id = worktree.id();
4263 worktree
4264 .diagnostic_summaries()
4265 .map(move |(path, summary)| (ProjectPath { worktree_id, path }, summary))
4266 })
4267 }
4268
4269 pub fn disk_based_diagnostics_started(
4270 &mut self,
4271 language_server_id: usize,
4272 cx: &mut ModelContext<Self>,
4273 ) {
4274 cx.emit(Event::DiskBasedDiagnosticsStarted { language_server_id });
4275 }
4276
4277 pub fn disk_based_diagnostics_finished(
4278 &mut self,
4279 language_server_id: usize,
4280 cx: &mut ModelContext<Self>,
4281 ) {
4282 cx.emit(Event::DiskBasedDiagnosticsFinished { language_server_id });
4283 }
4284
4285 pub fn active_entry(&self) -> Option<ProjectEntryId> {
4286 self.active_entry
4287 }
4288
4289 pub fn entry_for_path(&self, path: &ProjectPath, cx: &AppContext) -> Option<ProjectEntryId> {
4290 self.worktree_for_id(path.worktree_id, cx)?
4291 .read(cx)
4292 .entry_for_path(&path.path)
4293 .map(|entry| entry.id)
4294 }
4295
4296 pub fn path_for_entry(&self, entry_id: ProjectEntryId, cx: &AppContext) -> Option<ProjectPath> {
4297 let worktree = self.worktree_for_entry(entry_id, cx)?;
4298 let worktree = worktree.read(cx);
4299 let worktree_id = worktree.id();
4300 let path = worktree.entry_for_id(entry_id)?.path.clone();
4301 Some(ProjectPath { worktree_id, path })
4302 }
4303
4304 // RPC message handlers
4305
4306 async fn handle_request_join_project(
4307 this: ModelHandle<Self>,
4308 message: TypedEnvelope<proto::RequestJoinProject>,
4309 _: Arc<Client>,
4310 mut cx: AsyncAppContext,
4311 ) -> Result<()> {
4312 let user_id = message.payload.requester_id;
4313 if this.read_with(&cx, |project, _| {
4314 project.collaborators.values().any(|c| c.user.id == user_id)
4315 }) {
4316 this.update(&mut cx, |this, cx| {
4317 this.respond_to_join_request(user_id, true, cx)
4318 });
4319 } else {
4320 let user_store = this.read_with(&cx, |this, _| this.user_store.clone());
4321 let user = user_store
4322 .update(&mut cx, |store, cx| store.fetch_user(user_id, cx))
4323 .await?;
4324 this.update(&mut cx, |_, cx| cx.emit(Event::ContactRequestedJoin(user)));
4325 }
4326 Ok(())
4327 }
4328
4329 async fn handle_unregister_project(
4330 this: ModelHandle<Self>,
4331 _: TypedEnvelope<proto::UnregisterProject>,
4332 _: Arc<Client>,
4333 mut cx: AsyncAppContext,
4334 ) -> Result<()> {
4335 this.update(&mut cx, |this, cx| this.removed_from_project(cx));
4336 Ok(())
4337 }
4338
4339 async fn handle_project_unshared(
4340 this: ModelHandle<Self>,
4341 _: TypedEnvelope<proto::ProjectUnshared>,
4342 _: Arc<Client>,
4343 mut cx: AsyncAppContext,
4344 ) -> Result<()> {
4345 this.update(&mut cx, |this, cx| this.unshared(cx));
4346 Ok(())
4347 }
4348
4349 async fn handle_add_collaborator(
4350 this: ModelHandle<Self>,
4351 mut envelope: TypedEnvelope<proto::AddProjectCollaborator>,
4352 _: Arc<Client>,
4353 mut cx: AsyncAppContext,
4354 ) -> Result<()> {
4355 let user_store = this.read_with(&cx, |this, _| this.user_store.clone());
4356 let collaborator = envelope
4357 .payload
4358 .collaborator
4359 .take()
4360 .ok_or_else(|| anyhow!("empty collaborator"))?;
4361
4362 let collaborator = Collaborator::from_proto(collaborator, &user_store, &mut cx).await?;
4363 this.update(&mut cx, |this, cx| {
4364 this.collaborators
4365 .insert(collaborator.peer_id, collaborator);
4366 cx.notify();
4367 });
4368
4369 Ok(())
4370 }
4371
4372 async fn handle_remove_collaborator(
4373 this: ModelHandle<Self>,
4374 envelope: TypedEnvelope<proto::RemoveProjectCollaborator>,
4375 _: Arc<Client>,
4376 mut cx: AsyncAppContext,
4377 ) -> Result<()> {
4378 this.update(&mut cx, |this, cx| {
4379 let peer_id = PeerId(envelope.payload.peer_id);
4380 let replica_id = this
4381 .collaborators
4382 .remove(&peer_id)
4383 .ok_or_else(|| anyhow!("unknown peer {:?}", peer_id))?
4384 .replica_id;
4385 for (_, buffer) in &this.opened_buffers {
4386 if let Some(buffer) = buffer.upgrade(cx) {
4387 buffer.update(cx, |buffer, cx| buffer.remove_peer(replica_id, cx));
4388 }
4389 }
4390
4391 cx.emit(Event::CollaboratorLeft(peer_id));
4392 cx.notify();
4393 Ok(())
4394 })
4395 }
4396
4397 async fn handle_join_project_request_cancelled(
4398 this: ModelHandle<Self>,
4399 envelope: TypedEnvelope<proto::JoinProjectRequestCancelled>,
4400 _: Arc<Client>,
4401 mut cx: AsyncAppContext,
4402 ) -> Result<()> {
4403 let user = this
4404 .update(&mut cx, |this, cx| {
4405 this.user_store.update(cx, |user_store, cx| {
4406 user_store.fetch_user(envelope.payload.requester_id, cx)
4407 })
4408 })
4409 .await?;
4410
4411 this.update(&mut cx, |_, cx| {
4412 cx.emit(Event::ContactCancelledJoinRequest(user));
4413 });
4414
4415 Ok(())
4416 }
4417
4418 async fn handle_update_project(
4419 this: ModelHandle<Self>,
4420 envelope: TypedEnvelope<proto::UpdateProject>,
4421 client: Arc<Client>,
4422 mut cx: AsyncAppContext,
4423 ) -> Result<()> {
4424 this.update(&mut cx, |this, cx| {
4425 let replica_id = this.replica_id();
4426 let remote_id = this.remote_id().ok_or_else(|| anyhow!("invalid project"))?;
4427
4428 let mut old_worktrees_by_id = this
4429 .worktrees
4430 .drain(..)
4431 .filter_map(|worktree| {
4432 let worktree = worktree.upgrade(cx)?;
4433 Some((worktree.read(cx).id(), worktree))
4434 })
4435 .collect::<HashMap<_, _>>();
4436
4437 for worktree in envelope.payload.worktrees {
4438 if let Some(old_worktree) =
4439 old_worktrees_by_id.remove(&WorktreeId::from_proto(worktree.id))
4440 {
4441 this.worktrees.push(WorktreeHandle::Strong(old_worktree));
4442 } else {
4443 let worktree =
4444 Worktree::remote(remote_id, replica_id, worktree, client.clone(), cx);
4445 this.add_worktree(&worktree, cx);
4446 }
4447 }
4448
4449 this.metadata_changed(true, cx);
4450 for (id, _) in old_worktrees_by_id {
4451 cx.emit(Event::WorktreeRemoved(id));
4452 }
4453
4454 Ok(())
4455 })
4456 }
4457
4458 async fn handle_update_worktree(
4459 this: ModelHandle<Self>,
4460 envelope: TypedEnvelope<proto::UpdateWorktree>,
4461 _: Arc<Client>,
4462 mut cx: AsyncAppContext,
4463 ) -> Result<()> {
4464 this.update(&mut cx, |this, cx| {
4465 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
4466 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
4467 worktree.update(cx, |worktree, _| {
4468 let worktree = worktree.as_remote_mut().unwrap();
4469 worktree.update_from_remote(envelope.payload);
4470 });
4471 }
4472 Ok(())
4473 })
4474 }
4475
4476 async fn handle_create_project_entry(
4477 this: ModelHandle<Self>,
4478 envelope: TypedEnvelope<proto::CreateProjectEntry>,
4479 _: Arc<Client>,
4480 mut cx: AsyncAppContext,
4481 ) -> Result<proto::ProjectEntryResponse> {
4482 let worktree = this.update(&mut cx, |this, cx| {
4483 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
4484 this.worktree_for_id(worktree_id, cx)
4485 .ok_or_else(|| anyhow!("worktree not found"))
4486 })?;
4487 let worktree_scan_id = worktree.read_with(&cx, |worktree, _| worktree.scan_id());
4488 let entry = worktree
4489 .update(&mut cx, |worktree, cx| {
4490 let worktree = worktree.as_local_mut().unwrap();
4491 let path = PathBuf::from(OsString::from_vec(envelope.payload.path));
4492 worktree.create_entry(path, envelope.payload.is_directory, cx)
4493 })
4494 .await?;
4495 Ok(proto::ProjectEntryResponse {
4496 entry: Some((&entry).into()),
4497 worktree_scan_id: worktree_scan_id as u64,
4498 })
4499 }
4500
4501 async fn handle_rename_project_entry(
4502 this: ModelHandle<Self>,
4503 envelope: TypedEnvelope<proto::RenameProjectEntry>,
4504 _: Arc<Client>,
4505 mut cx: AsyncAppContext,
4506 ) -> Result<proto::ProjectEntryResponse> {
4507 let entry_id = ProjectEntryId::from_proto(envelope.payload.entry_id);
4508 let worktree = this.read_with(&cx, |this, cx| {
4509 this.worktree_for_entry(entry_id, cx)
4510 .ok_or_else(|| anyhow!("worktree not found"))
4511 })?;
4512 let worktree_scan_id = worktree.read_with(&cx, |worktree, _| worktree.scan_id());
4513 let entry = worktree
4514 .update(&mut cx, |worktree, cx| {
4515 let new_path = PathBuf::from(OsString::from_vec(envelope.payload.new_path));
4516 worktree
4517 .as_local_mut()
4518 .unwrap()
4519 .rename_entry(entry_id, new_path, cx)
4520 .ok_or_else(|| anyhow!("invalid entry"))
4521 })?
4522 .await?;
4523 Ok(proto::ProjectEntryResponse {
4524 entry: Some((&entry).into()),
4525 worktree_scan_id: worktree_scan_id as u64,
4526 })
4527 }
4528
4529 async fn handle_copy_project_entry(
4530 this: ModelHandle<Self>,
4531 envelope: TypedEnvelope<proto::CopyProjectEntry>,
4532 _: Arc<Client>,
4533 mut cx: AsyncAppContext,
4534 ) -> Result<proto::ProjectEntryResponse> {
4535 let entry_id = ProjectEntryId::from_proto(envelope.payload.entry_id);
4536 let worktree = this.read_with(&cx, |this, cx| {
4537 this.worktree_for_entry(entry_id, cx)
4538 .ok_or_else(|| anyhow!("worktree not found"))
4539 })?;
4540 let worktree_scan_id = worktree.read_with(&cx, |worktree, _| worktree.scan_id());
4541 let entry = worktree
4542 .update(&mut cx, |worktree, cx| {
4543 let new_path = PathBuf::from(OsString::from_vec(envelope.payload.new_path));
4544 worktree
4545 .as_local_mut()
4546 .unwrap()
4547 .copy_entry(entry_id, new_path, cx)
4548 .ok_or_else(|| anyhow!("invalid entry"))
4549 })?
4550 .await?;
4551 Ok(proto::ProjectEntryResponse {
4552 entry: Some((&entry).into()),
4553 worktree_scan_id: worktree_scan_id as u64,
4554 })
4555 }
4556
4557 async fn handle_delete_project_entry(
4558 this: ModelHandle<Self>,
4559 envelope: TypedEnvelope<proto::DeleteProjectEntry>,
4560 _: Arc<Client>,
4561 mut cx: AsyncAppContext,
4562 ) -> Result<proto::ProjectEntryResponse> {
4563 let entry_id = ProjectEntryId::from_proto(envelope.payload.entry_id);
4564 let worktree = this.read_with(&cx, |this, cx| {
4565 this.worktree_for_entry(entry_id, cx)
4566 .ok_or_else(|| anyhow!("worktree not found"))
4567 })?;
4568 let worktree_scan_id = worktree.read_with(&cx, |worktree, _| worktree.scan_id());
4569 worktree
4570 .update(&mut cx, |worktree, cx| {
4571 worktree
4572 .as_local_mut()
4573 .unwrap()
4574 .delete_entry(entry_id, cx)
4575 .ok_or_else(|| anyhow!("invalid entry"))
4576 })?
4577 .await?;
4578 Ok(proto::ProjectEntryResponse {
4579 entry: None,
4580 worktree_scan_id: worktree_scan_id as u64,
4581 })
4582 }
4583
4584 async fn handle_update_diagnostic_summary(
4585 this: ModelHandle<Self>,
4586 envelope: TypedEnvelope<proto::UpdateDiagnosticSummary>,
4587 _: Arc<Client>,
4588 mut cx: AsyncAppContext,
4589 ) -> Result<()> {
4590 this.update(&mut cx, |this, cx| {
4591 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
4592 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
4593 if let Some(summary) = envelope.payload.summary {
4594 let project_path = ProjectPath {
4595 worktree_id,
4596 path: Path::new(&summary.path).into(),
4597 };
4598 worktree.update(cx, |worktree, _| {
4599 worktree
4600 .as_remote_mut()
4601 .unwrap()
4602 .update_diagnostic_summary(project_path.path.clone(), &summary);
4603 });
4604 cx.emit(Event::DiagnosticsUpdated {
4605 language_server_id: summary.language_server_id as usize,
4606 path: project_path,
4607 });
4608 }
4609 }
4610 Ok(())
4611 })
4612 }
4613
4614 async fn handle_start_language_server(
4615 this: ModelHandle<Self>,
4616 envelope: TypedEnvelope<proto::StartLanguageServer>,
4617 _: Arc<Client>,
4618 mut cx: AsyncAppContext,
4619 ) -> Result<()> {
4620 let server = envelope
4621 .payload
4622 .server
4623 .ok_or_else(|| anyhow!("invalid server"))?;
4624 this.update(&mut cx, |this, cx| {
4625 this.language_server_statuses.insert(
4626 server.id as usize,
4627 LanguageServerStatus {
4628 name: server.name,
4629 pending_work: Default::default(),
4630 has_pending_diagnostic_updates: false,
4631 progress_tokens: Default::default(),
4632 },
4633 );
4634 cx.notify();
4635 });
4636 Ok(())
4637 }
4638
4639 async fn handle_update_language_server(
4640 this: ModelHandle<Self>,
4641 envelope: TypedEnvelope<proto::UpdateLanguageServer>,
4642 _: Arc<Client>,
4643 mut cx: AsyncAppContext,
4644 ) -> Result<()> {
4645 let language_server_id = envelope.payload.language_server_id as usize;
4646 match envelope
4647 .payload
4648 .variant
4649 .ok_or_else(|| anyhow!("invalid variant"))?
4650 {
4651 proto::update_language_server::Variant::WorkStart(payload) => {
4652 this.update(&mut cx, |this, cx| {
4653 this.on_lsp_work_start(
4654 language_server_id,
4655 payload.token,
4656 LanguageServerProgress {
4657 message: payload.message,
4658 percentage: payload.percentage.map(|p| p as usize),
4659 last_update_at: Instant::now(),
4660 },
4661 cx,
4662 );
4663 })
4664 }
4665 proto::update_language_server::Variant::WorkProgress(payload) => {
4666 this.update(&mut cx, |this, cx| {
4667 this.on_lsp_work_progress(
4668 language_server_id,
4669 payload.token,
4670 LanguageServerProgress {
4671 message: payload.message,
4672 percentage: payload.percentage.map(|p| p as usize),
4673 last_update_at: Instant::now(),
4674 },
4675 cx,
4676 );
4677 })
4678 }
4679 proto::update_language_server::Variant::WorkEnd(payload) => {
4680 this.update(&mut cx, |this, cx| {
4681 this.on_lsp_work_end(language_server_id, payload.token, cx);
4682 })
4683 }
4684 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(_) => {
4685 this.update(&mut cx, |this, cx| {
4686 this.disk_based_diagnostics_started(language_server_id, cx);
4687 })
4688 }
4689 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(_) => {
4690 this.update(&mut cx, |this, cx| {
4691 this.disk_based_diagnostics_finished(language_server_id, cx)
4692 });
4693 }
4694 }
4695
4696 Ok(())
4697 }
4698
4699 async fn handle_update_buffer(
4700 this: ModelHandle<Self>,
4701 envelope: TypedEnvelope<proto::UpdateBuffer>,
4702 _: Arc<Client>,
4703 mut cx: AsyncAppContext,
4704 ) -> Result<()> {
4705 this.update(&mut cx, |this, cx| {
4706 let payload = envelope.payload.clone();
4707 let buffer_id = payload.buffer_id;
4708 let ops = payload
4709 .operations
4710 .into_iter()
4711 .map(|op| language::proto::deserialize_operation(op))
4712 .collect::<Result<Vec<_>, _>>()?;
4713 let is_remote = this.is_remote();
4714 match this.opened_buffers.entry(buffer_id) {
4715 hash_map::Entry::Occupied(mut e) => match e.get_mut() {
4716 OpenBuffer::Strong(buffer) => {
4717 buffer.update(cx, |buffer, cx| buffer.apply_ops(ops, cx))?;
4718 }
4719 OpenBuffer::Loading(operations) => operations.extend_from_slice(&ops),
4720 OpenBuffer::Weak(_) => {}
4721 },
4722 hash_map::Entry::Vacant(e) => {
4723 assert!(
4724 is_remote,
4725 "received buffer update from {:?}",
4726 envelope.original_sender_id
4727 );
4728 e.insert(OpenBuffer::Loading(ops));
4729 }
4730 }
4731 Ok(())
4732 })
4733 }
4734
4735 async fn handle_update_buffer_file(
4736 this: ModelHandle<Self>,
4737 envelope: TypedEnvelope<proto::UpdateBufferFile>,
4738 _: Arc<Client>,
4739 mut cx: AsyncAppContext,
4740 ) -> Result<()> {
4741 this.update(&mut cx, |this, cx| {
4742 let payload = envelope.payload.clone();
4743 let buffer_id = payload.buffer_id;
4744 let file = payload.file.ok_or_else(|| anyhow!("invalid file"))?;
4745 let worktree = this
4746 .worktree_for_id(WorktreeId::from_proto(file.worktree_id), cx)
4747 .ok_or_else(|| anyhow!("no such worktree"))?;
4748 let file = File::from_proto(file, worktree.clone(), cx)?;
4749 let buffer = this
4750 .opened_buffers
4751 .get_mut(&buffer_id)
4752 .and_then(|b| b.upgrade(cx))
4753 .ok_or_else(|| anyhow!("no such buffer"))?;
4754 buffer.update(cx, |buffer, cx| {
4755 buffer.file_updated(Arc::new(file), cx).detach();
4756 });
4757 Ok(())
4758 })
4759 }
4760
4761 async fn handle_save_buffer(
4762 this: ModelHandle<Self>,
4763 envelope: TypedEnvelope<proto::SaveBuffer>,
4764 _: Arc<Client>,
4765 mut cx: AsyncAppContext,
4766 ) -> Result<proto::BufferSaved> {
4767 let buffer_id = envelope.payload.buffer_id;
4768 let requested_version = deserialize_version(envelope.payload.version);
4769
4770 let (project_id, buffer) = this.update(&mut cx, |this, cx| {
4771 let project_id = this.remote_id().ok_or_else(|| anyhow!("not connected"))?;
4772 let buffer = this
4773 .opened_buffers
4774 .get(&buffer_id)
4775 .and_then(|buffer| buffer.upgrade(cx))
4776 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?;
4777 Ok::<_, anyhow::Error>((project_id, buffer))
4778 })?;
4779 buffer
4780 .update(&mut cx, |buffer, _| {
4781 buffer.wait_for_version(requested_version)
4782 })
4783 .await;
4784
4785 let (saved_version, fingerprint, mtime) =
4786 buffer.update(&mut cx, |buffer, cx| buffer.save(cx)).await?;
4787 Ok(proto::BufferSaved {
4788 project_id,
4789 buffer_id,
4790 version: serialize_version(&saved_version),
4791 mtime: Some(mtime.into()),
4792 fingerprint,
4793 })
4794 }
4795
4796 async fn handle_reload_buffers(
4797 this: ModelHandle<Self>,
4798 envelope: TypedEnvelope<proto::ReloadBuffers>,
4799 _: Arc<Client>,
4800 mut cx: AsyncAppContext,
4801 ) -> Result<proto::ReloadBuffersResponse> {
4802 let sender_id = envelope.original_sender_id()?;
4803 let reload = this.update(&mut cx, |this, cx| {
4804 let mut buffers = HashSet::default();
4805 for buffer_id in &envelope.payload.buffer_ids {
4806 buffers.insert(
4807 this.opened_buffers
4808 .get(buffer_id)
4809 .and_then(|buffer| buffer.upgrade(cx))
4810 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?,
4811 );
4812 }
4813 Ok::<_, anyhow::Error>(this.reload_buffers(buffers, false, cx))
4814 })?;
4815
4816 let project_transaction = reload.await?;
4817 let project_transaction = this.update(&mut cx, |this, cx| {
4818 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
4819 });
4820 Ok(proto::ReloadBuffersResponse {
4821 transaction: Some(project_transaction),
4822 })
4823 }
4824
4825 async fn handle_format_buffers(
4826 this: ModelHandle<Self>,
4827 envelope: TypedEnvelope<proto::FormatBuffers>,
4828 _: Arc<Client>,
4829 mut cx: AsyncAppContext,
4830 ) -> Result<proto::FormatBuffersResponse> {
4831 let sender_id = envelope.original_sender_id()?;
4832 let format = this.update(&mut cx, |this, cx| {
4833 let mut buffers = HashSet::default();
4834 for buffer_id in &envelope.payload.buffer_ids {
4835 buffers.insert(
4836 this.opened_buffers
4837 .get(buffer_id)
4838 .and_then(|buffer| buffer.upgrade(cx))
4839 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?,
4840 );
4841 }
4842 Ok::<_, anyhow::Error>(this.format(buffers, false, cx))
4843 })?;
4844
4845 let project_transaction = format.await?;
4846 let project_transaction = this.update(&mut cx, |this, cx| {
4847 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
4848 });
4849 Ok(proto::FormatBuffersResponse {
4850 transaction: Some(project_transaction),
4851 })
4852 }
4853
4854 async fn handle_get_completions(
4855 this: ModelHandle<Self>,
4856 envelope: TypedEnvelope<proto::GetCompletions>,
4857 _: Arc<Client>,
4858 mut cx: AsyncAppContext,
4859 ) -> Result<proto::GetCompletionsResponse> {
4860 let position = envelope
4861 .payload
4862 .position
4863 .and_then(language::proto::deserialize_anchor)
4864 .ok_or_else(|| anyhow!("invalid position"))?;
4865 let version = deserialize_version(envelope.payload.version);
4866 let buffer = this.read_with(&cx, |this, cx| {
4867 this.opened_buffers
4868 .get(&envelope.payload.buffer_id)
4869 .and_then(|buffer| buffer.upgrade(cx))
4870 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
4871 })?;
4872 buffer
4873 .update(&mut cx, |buffer, _| buffer.wait_for_version(version))
4874 .await;
4875 let version = buffer.read_with(&cx, |buffer, _| buffer.version());
4876 let completions = this
4877 .update(&mut cx, |this, cx| this.completions(&buffer, position, cx))
4878 .await?;
4879
4880 Ok(proto::GetCompletionsResponse {
4881 completions: completions
4882 .iter()
4883 .map(language::proto::serialize_completion)
4884 .collect(),
4885 version: serialize_version(&version),
4886 })
4887 }
4888
4889 async fn handle_apply_additional_edits_for_completion(
4890 this: ModelHandle<Self>,
4891 envelope: TypedEnvelope<proto::ApplyCompletionAdditionalEdits>,
4892 _: Arc<Client>,
4893 mut cx: AsyncAppContext,
4894 ) -> Result<proto::ApplyCompletionAdditionalEditsResponse> {
4895 let apply_additional_edits = this.update(&mut cx, |this, cx| {
4896 let buffer = this
4897 .opened_buffers
4898 .get(&envelope.payload.buffer_id)
4899 .and_then(|buffer| buffer.upgrade(cx))
4900 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
4901 let language = buffer.read(cx).language();
4902 let completion = language::proto::deserialize_completion(
4903 envelope
4904 .payload
4905 .completion
4906 .ok_or_else(|| anyhow!("invalid completion"))?,
4907 language,
4908 )?;
4909 Ok::<_, anyhow::Error>(
4910 this.apply_additional_edits_for_completion(buffer, completion, false, cx),
4911 )
4912 })?;
4913
4914 Ok(proto::ApplyCompletionAdditionalEditsResponse {
4915 transaction: apply_additional_edits
4916 .await?
4917 .as_ref()
4918 .map(language::proto::serialize_transaction),
4919 })
4920 }
4921
4922 async fn handle_get_code_actions(
4923 this: ModelHandle<Self>,
4924 envelope: TypedEnvelope<proto::GetCodeActions>,
4925 _: Arc<Client>,
4926 mut cx: AsyncAppContext,
4927 ) -> Result<proto::GetCodeActionsResponse> {
4928 let start = envelope
4929 .payload
4930 .start
4931 .and_then(language::proto::deserialize_anchor)
4932 .ok_or_else(|| anyhow!("invalid start"))?;
4933 let end = envelope
4934 .payload
4935 .end
4936 .and_then(language::proto::deserialize_anchor)
4937 .ok_or_else(|| anyhow!("invalid end"))?;
4938 let buffer = this.update(&mut cx, |this, cx| {
4939 this.opened_buffers
4940 .get(&envelope.payload.buffer_id)
4941 .and_then(|buffer| buffer.upgrade(cx))
4942 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
4943 })?;
4944 buffer
4945 .update(&mut cx, |buffer, _| {
4946 buffer.wait_for_version(deserialize_version(envelope.payload.version))
4947 })
4948 .await;
4949
4950 let version = buffer.read_with(&cx, |buffer, _| buffer.version());
4951 let code_actions = this.update(&mut cx, |this, cx| {
4952 Ok::<_, anyhow::Error>(this.code_actions(&buffer, start..end, cx))
4953 })?;
4954
4955 Ok(proto::GetCodeActionsResponse {
4956 actions: code_actions
4957 .await?
4958 .iter()
4959 .map(language::proto::serialize_code_action)
4960 .collect(),
4961 version: serialize_version(&version),
4962 })
4963 }
4964
4965 async fn handle_apply_code_action(
4966 this: ModelHandle<Self>,
4967 envelope: TypedEnvelope<proto::ApplyCodeAction>,
4968 _: Arc<Client>,
4969 mut cx: AsyncAppContext,
4970 ) -> Result<proto::ApplyCodeActionResponse> {
4971 let sender_id = envelope.original_sender_id()?;
4972 let action = language::proto::deserialize_code_action(
4973 envelope
4974 .payload
4975 .action
4976 .ok_or_else(|| anyhow!("invalid action"))?,
4977 )?;
4978 let apply_code_action = this.update(&mut cx, |this, cx| {
4979 let buffer = this
4980 .opened_buffers
4981 .get(&envelope.payload.buffer_id)
4982 .and_then(|buffer| buffer.upgrade(cx))
4983 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
4984 Ok::<_, anyhow::Error>(this.apply_code_action(buffer, action, false, cx))
4985 })?;
4986
4987 let project_transaction = apply_code_action.await?;
4988 let project_transaction = this.update(&mut cx, |this, cx| {
4989 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
4990 });
4991 Ok(proto::ApplyCodeActionResponse {
4992 transaction: Some(project_transaction),
4993 })
4994 }
4995
4996 async fn handle_lsp_command<T: LspCommand>(
4997 this: ModelHandle<Self>,
4998 envelope: TypedEnvelope<T::ProtoRequest>,
4999 _: Arc<Client>,
5000 mut cx: AsyncAppContext,
5001 ) -> Result<<T::ProtoRequest as proto::RequestMessage>::Response>
5002 where
5003 <T::LspRequest as lsp::request::Request>::Result: Send,
5004 {
5005 let sender_id = envelope.original_sender_id()?;
5006 let buffer_id = T::buffer_id_from_proto(&envelope.payload);
5007 let buffer_handle = this.read_with(&cx, |this, _| {
5008 this.opened_buffers
5009 .get(&buffer_id)
5010 .and_then(|buffer| buffer.upgrade(&cx))
5011 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))
5012 })?;
5013 let request = T::from_proto(
5014 envelope.payload,
5015 this.clone(),
5016 buffer_handle.clone(),
5017 cx.clone(),
5018 )
5019 .await?;
5020 let buffer_version = buffer_handle.read_with(&cx, |buffer, _| buffer.version());
5021 let response = this
5022 .update(&mut cx, |this, cx| {
5023 this.request_lsp(buffer_handle, request, cx)
5024 })
5025 .await?;
5026 this.update(&mut cx, |this, cx| {
5027 Ok(T::response_to_proto(
5028 response,
5029 this,
5030 sender_id,
5031 &buffer_version,
5032 cx,
5033 ))
5034 })
5035 }
5036
5037 async fn handle_get_project_symbols(
5038 this: ModelHandle<Self>,
5039 envelope: TypedEnvelope<proto::GetProjectSymbols>,
5040 _: Arc<Client>,
5041 mut cx: AsyncAppContext,
5042 ) -> Result<proto::GetProjectSymbolsResponse> {
5043 let symbols = this
5044 .update(&mut cx, |this, cx| {
5045 this.symbols(&envelope.payload.query, cx)
5046 })
5047 .await?;
5048
5049 Ok(proto::GetProjectSymbolsResponse {
5050 symbols: symbols.iter().map(serialize_symbol).collect(),
5051 })
5052 }
5053
5054 async fn handle_search_project(
5055 this: ModelHandle<Self>,
5056 envelope: TypedEnvelope<proto::SearchProject>,
5057 _: Arc<Client>,
5058 mut cx: AsyncAppContext,
5059 ) -> Result<proto::SearchProjectResponse> {
5060 let peer_id = envelope.original_sender_id()?;
5061 let query = SearchQuery::from_proto(envelope.payload)?;
5062 let result = this
5063 .update(&mut cx, |this, cx| this.search(query, cx))
5064 .await?;
5065
5066 this.update(&mut cx, |this, cx| {
5067 let mut locations = Vec::new();
5068 for (buffer, ranges) in result {
5069 for range in ranges {
5070 let start = serialize_anchor(&range.start);
5071 let end = serialize_anchor(&range.end);
5072 let buffer = this.serialize_buffer_for_peer(&buffer, peer_id, cx);
5073 locations.push(proto::Location {
5074 buffer: Some(buffer),
5075 start: Some(start),
5076 end: Some(end),
5077 });
5078 }
5079 }
5080 Ok(proto::SearchProjectResponse { locations })
5081 })
5082 }
5083
5084 async fn handle_open_buffer_for_symbol(
5085 this: ModelHandle<Self>,
5086 envelope: TypedEnvelope<proto::OpenBufferForSymbol>,
5087 _: Arc<Client>,
5088 mut cx: AsyncAppContext,
5089 ) -> Result<proto::OpenBufferForSymbolResponse> {
5090 let peer_id = envelope.original_sender_id()?;
5091 let symbol = envelope
5092 .payload
5093 .symbol
5094 .ok_or_else(|| anyhow!("invalid symbol"))?;
5095 let symbol = this.read_with(&cx, |this, _| {
5096 let symbol = this.deserialize_symbol(symbol)?;
5097 let signature = this.symbol_signature(symbol.worktree_id, &symbol.path);
5098 if signature == symbol.signature {
5099 Ok(symbol)
5100 } else {
5101 Err(anyhow!("invalid symbol signature"))
5102 }
5103 })?;
5104 let buffer = this
5105 .update(&mut cx, |this, cx| this.open_buffer_for_symbol(&symbol, cx))
5106 .await?;
5107
5108 Ok(proto::OpenBufferForSymbolResponse {
5109 buffer: Some(this.update(&mut cx, |this, cx| {
5110 this.serialize_buffer_for_peer(&buffer, peer_id, cx)
5111 })),
5112 })
5113 }
5114
5115 fn symbol_signature(&self, worktree_id: WorktreeId, path: &Path) -> [u8; 32] {
5116 let mut hasher = Sha256::new();
5117 hasher.update(worktree_id.to_proto().to_be_bytes());
5118 hasher.update(path.to_string_lossy().as_bytes());
5119 hasher.update(self.nonce.to_be_bytes());
5120 hasher.finalize().as_slice().try_into().unwrap()
5121 }
5122
5123 async fn handle_open_buffer_by_id(
5124 this: ModelHandle<Self>,
5125 envelope: TypedEnvelope<proto::OpenBufferById>,
5126 _: Arc<Client>,
5127 mut cx: AsyncAppContext,
5128 ) -> Result<proto::OpenBufferResponse> {
5129 let peer_id = envelope.original_sender_id()?;
5130 let buffer = this
5131 .update(&mut cx, |this, cx| {
5132 this.open_buffer_by_id(envelope.payload.id, cx)
5133 })
5134 .await?;
5135 this.update(&mut cx, |this, cx| {
5136 Ok(proto::OpenBufferResponse {
5137 buffer: Some(this.serialize_buffer_for_peer(&buffer, peer_id, cx)),
5138 })
5139 })
5140 }
5141
5142 async fn handle_open_buffer_by_path(
5143 this: ModelHandle<Self>,
5144 envelope: TypedEnvelope<proto::OpenBufferByPath>,
5145 _: Arc<Client>,
5146 mut cx: AsyncAppContext,
5147 ) -> Result<proto::OpenBufferResponse> {
5148 let peer_id = envelope.original_sender_id()?;
5149 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
5150 let open_buffer = this.update(&mut cx, |this, cx| {
5151 this.open_buffer(
5152 ProjectPath {
5153 worktree_id,
5154 path: PathBuf::from(envelope.payload.path).into(),
5155 },
5156 cx,
5157 )
5158 });
5159
5160 let buffer = open_buffer.await?;
5161 this.update(&mut cx, |this, cx| {
5162 Ok(proto::OpenBufferResponse {
5163 buffer: Some(this.serialize_buffer_for_peer(&buffer, peer_id, cx)),
5164 })
5165 })
5166 }
5167
5168 fn serialize_project_transaction_for_peer(
5169 &mut self,
5170 project_transaction: ProjectTransaction,
5171 peer_id: PeerId,
5172 cx: &AppContext,
5173 ) -> proto::ProjectTransaction {
5174 let mut serialized_transaction = proto::ProjectTransaction {
5175 buffers: Default::default(),
5176 transactions: Default::default(),
5177 };
5178 for (buffer, transaction) in project_transaction.0 {
5179 serialized_transaction
5180 .buffers
5181 .push(self.serialize_buffer_for_peer(&buffer, peer_id, cx));
5182 serialized_transaction
5183 .transactions
5184 .push(language::proto::serialize_transaction(&transaction));
5185 }
5186 serialized_transaction
5187 }
5188
5189 fn deserialize_project_transaction(
5190 &mut self,
5191 message: proto::ProjectTransaction,
5192 push_to_history: bool,
5193 cx: &mut ModelContext<Self>,
5194 ) -> Task<Result<ProjectTransaction>> {
5195 cx.spawn(|this, mut cx| async move {
5196 let mut project_transaction = ProjectTransaction::default();
5197 for (buffer, transaction) in message.buffers.into_iter().zip(message.transactions) {
5198 let buffer = this
5199 .update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
5200 .await?;
5201 let transaction = language::proto::deserialize_transaction(transaction)?;
5202 project_transaction.0.insert(buffer, transaction);
5203 }
5204
5205 for (buffer, transaction) in &project_transaction.0 {
5206 buffer
5207 .update(&mut cx, |buffer, _| {
5208 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
5209 })
5210 .await;
5211
5212 if push_to_history {
5213 buffer.update(&mut cx, |buffer, _| {
5214 buffer.push_transaction(transaction.clone(), Instant::now());
5215 });
5216 }
5217 }
5218
5219 Ok(project_transaction)
5220 })
5221 }
5222
5223 fn serialize_buffer_for_peer(
5224 &mut self,
5225 buffer: &ModelHandle<Buffer>,
5226 peer_id: PeerId,
5227 cx: &AppContext,
5228 ) -> proto::Buffer {
5229 let buffer_id = buffer.read(cx).remote_id();
5230 let shared_buffers = self.shared_buffers.entry(peer_id).or_default();
5231 if shared_buffers.insert(buffer_id) {
5232 proto::Buffer {
5233 variant: Some(proto::buffer::Variant::State(buffer.read(cx).to_proto())),
5234 }
5235 } else {
5236 proto::Buffer {
5237 variant: Some(proto::buffer::Variant::Id(buffer_id)),
5238 }
5239 }
5240 }
5241
5242 fn deserialize_buffer(
5243 &mut self,
5244 buffer: proto::Buffer,
5245 cx: &mut ModelContext<Self>,
5246 ) -> Task<Result<ModelHandle<Buffer>>> {
5247 let replica_id = self.replica_id();
5248
5249 let opened_buffer_tx = self.opened_buffer.0.clone();
5250 let mut opened_buffer_rx = self.opened_buffer.1.clone();
5251 cx.spawn(|this, mut cx| async move {
5252 match buffer.variant.ok_or_else(|| anyhow!("missing buffer"))? {
5253 proto::buffer::Variant::Id(id) => {
5254 let buffer = loop {
5255 let buffer = this.read_with(&cx, |this, cx| {
5256 this.opened_buffers
5257 .get(&id)
5258 .and_then(|buffer| buffer.upgrade(cx))
5259 });
5260 if let Some(buffer) = buffer {
5261 break buffer;
5262 }
5263 opened_buffer_rx
5264 .next()
5265 .await
5266 .ok_or_else(|| anyhow!("project dropped while waiting for buffer"))?;
5267 };
5268 Ok(buffer)
5269 }
5270 proto::buffer::Variant::State(mut buffer) => {
5271 let mut buffer_worktree = None;
5272 let mut buffer_file = None;
5273 if let Some(file) = buffer.file.take() {
5274 this.read_with(&cx, |this, cx| {
5275 let worktree_id = WorktreeId::from_proto(file.worktree_id);
5276 let worktree =
5277 this.worktree_for_id(worktree_id, cx).ok_or_else(|| {
5278 anyhow!("no worktree found for id {}", file.worktree_id)
5279 })?;
5280 buffer_file =
5281 Some(Arc::new(File::from_proto(file, worktree.clone(), cx)?)
5282 as Arc<dyn language::File>);
5283 buffer_worktree = Some(worktree);
5284 Ok::<_, anyhow::Error>(())
5285 })?;
5286 }
5287
5288 let buffer = cx.add_model(|cx| {
5289 Buffer::from_proto(replica_id, buffer, buffer_file, cx).unwrap()
5290 });
5291
5292 this.update(&mut cx, |this, cx| this.register_buffer(&buffer, cx))?;
5293
5294 *opened_buffer_tx.borrow_mut().borrow_mut() = ();
5295 Ok(buffer)
5296 }
5297 }
5298 })
5299 }
5300
5301 fn deserialize_symbol(&self, serialized_symbol: proto::Symbol) -> Result<Symbol> {
5302 let source_worktree_id = WorktreeId::from_proto(serialized_symbol.source_worktree_id);
5303 let worktree_id = WorktreeId::from_proto(serialized_symbol.worktree_id);
5304 let start = serialized_symbol
5305 .start
5306 .ok_or_else(|| anyhow!("invalid start"))?;
5307 let end = serialized_symbol
5308 .end
5309 .ok_or_else(|| anyhow!("invalid end"))?;
5310 let kind = unsafe { mem::transmute(serialized_symbol.kind) };
5311 let path = PathBuf::from(serialized_symbol.path);
5312 let language = self.languages.select_language(&path);
5313 Ok(Symbol {
5314 source_worktree_id,
5315 worktree_id,
5316 language_server_name: LanguageServerName(serialized_symbol.language_server_name.into()),
5317 label: language
5318 .and_then(|language| language.label_for_symbol(&serialized_symbol.name, kind))
5319 .unwrap_or_else(|| CodeLabel::plain(serialized_symbol.name.clone(), None)),
5320 name: serialized_symbol.name,
5321 path,
5322 range: PointUtf16::new(start.row, start.column)..PointUtf16::new(end.row, end.column),
5323 kind,
5324 signature: serialized_symbol
5325 .signature
5326 .try_into()
5327 .map_err(|_| anyhow!("invalid signature"))?,
5328 })
5329 }
5330
5331 async fn handle_buffer_saved(
5332 this: ModelHandle<Self>,
5333 envelope: TypedEnvelope<proto::BufferSaved>,
5334 _: Arc<Client>,
5335 mut cx: AsyncAppContext,
5336 ) -> Result<()> {
5337 let version = deserialize_version(envelope.payload.version);
5338 let mtime = envelope
5339 .payload
5340 .mtime
5341 .ok_or_else(|| anyhow!("missing mtime"))?
5342 .into();
5343
5344 this.update(&mut cx, |this, cx| {
5345 let buffer = this
5346 .opened_buffers
5347 .get(&envelope.payload.buffer_id)
5348 .and_then(|buffer| buffer.upgrade(cx));
5349 if let Some(buffer) = buffer {
5350 buffer.update(cx, |buffer, cx| {
5351 buffer.did_save(version, envelope.payload.fingerprint, mtime, None, cx);
5352 });
5353 }
5354 Ok(())
5355 })
5356 }
5357
5358 async fn handle_buffer_reloaded(
5359 this: ModelHandle<Self>,
5360 envelope: TypedEnvelope<proto::BufferReloaded>,
5361 _: Arc<Client>,
5362 mut cx: AsyncAppContext,
5363 ) -> Result<()> {
5364 let payload = envelope.payload.clone();
5365 let version = deserialize_version(payload.version);
5366 let mtime = payload
5367 .mtime
5368 .ok_or_else(|| anyhow!("missing mtime"))?
5369 .into();
5370 this.update(&mut cx, |this, cx| {
5371 let buffer = this
5372 .opened_buffers
5373 .get(&payload.buffer_id)
5374 .and_then(|buffer| buffer.upgrade(cx));
5375 if let Some(buffer) = buffer {
5376 buffer.update(cx, |buffer, cx| {
5377 buffer.did_reload(version, payload.fingerprint, mtime, cx);
5378 });
5379 }
5380 Ok(())
5381 })
5382 }
5383
5384 pub fn match_paths<'a>(
5385 &self,
5386 query: &'a str,
5387 include_ignored: bool,
5388 smart_case: bool,
5389 max_results: usize,
5390 cancel_flag: &'a AtomicBool,
5391 cx: &AppContext,
5392 ) -> impl 'a + Future<Output = Vec<PathMatch>> {
5393 let worktrees = self
5394 .worktrees(cx)
5395 .filter(|worktree| worktree.read(cx).is_visible())
5396 .collect::<Vec<_>>();
5397 let include_root_name = worktrees.len() > 1;
5398 let candidate_sets = worktrees
5399 .into_iter()
5400 .map(|worktree| CandidateSet {
5401 snapshot: worktree.read(cx).snapshot(),
5402 include_ignored,
5403 include_root_name,
5404 })
5405 .collect::<Vec<_>>();
5406
5407 let background = cx.background().clone();
5408 async move {
5409 fuzzy::match_paths(
5410 candidate_sets.as_slice(),
5411 query,
5412 smart_case,
5413 max_results,
5414 cancel_flag,
5415 background,
5416 )
5417 .await
5418 }
5419 }
5420
5421 fn edits_from_lsp(
5422 &mut self,
5423 buffer: &ModelHandle<Buffer>,
5424 lsp_edits: impl 'static + Send + IntoIterator<Item = lsp::TextEdit>,
5425 version: Option<i32>,
5426 cx: &mut ModelContext<Self>,
5427 ) -> Task<Result<Vec<(Range<Anchor>, String)>>> {
5428 let snapshot = self.buffer_snapshot_for_lsp_version(buffer, version, cx);
5429 cx.background().spawn(async move {
5430 let snapshot = snapshot?;
5431 let mut lsp_edits = lsp_edits
5432 .into_iter()
5433 .map(|edit| (range_from_lsp(edit.range), edit.new_text))
5434 .collect::<Vec<_>>();
5435 lsp_edits.sort_by_key(|(range, _)| range.start);
5436
5437 let mut lsp_edits = lsp_edits.into_iter().peekable();
5438 let mut edits = Vec::new();
5439 while let Some((mut range, mut new_text)) = lsp_edits.next() {
5440 // Combine any LSP edits that are adjacent.
5441 //
5442 // Also, combine LSP edits that are separated from each other by only
5443 // a newline. This is important because for some code actions,
5444 // Rust-analyzer rewrites the entire buffer via a series of edits that
5445 // are separated by unchanged newline characters.
5446 //
5447 // In order for the diffing logic below to work properly, any edits that
5448 // cancel each other out must be combined into one.
5449 while let Some((next_range, next_text)) = lsp_edits.peek() {
5450 if next_range.start > range.end {
5451 if next_range.start.row > range.end.row + 1
5452 || next_range.start.column > 0
5453 || snapshot.clip_point_utf16(
5454 PointUtf16::new(range.end.row, u32::MAX),
5455 Bias::Left,
5456 ) > range.end
5457 {
5458 break;
5459 }
5460 new_text.push('\n');
5461 }
5462 range.end = next_range.end;
5463 new_text.push_str(&next_text);
5464 lsp_edits.next();
5465 }
5466
5467 if snapshot.clip_point_utf16(range.start, Bias::Left) != range.start
5468 || snapshot.clip_point_utf16(range.end, Bias::Left) != range.end
5469 {
5470 return Err(anyhow!("invalid edits received from language server"));
5471 }
5472
5473 // For multiline edits, perform a diff of the old and new text so that
5474 // we can identify the changes more precisely, preserving the locations
5475 // of any anchors positioned in the unchanged regions.
5476 if range.end.row > range.start.row {
5477 let mut offset = range.start.to_offset(&snapshot);
5478 let old_text = snapshot.text_for_range(range).collect::<String>();
5479
5480 let diff = TextDiff::from_lines(old_text.as_str(), &new_text);
5481 let mut moved_since_edit = true;
5482 for change in diff.iter_all_changes() {
5483 let tag = change.tag();
5484 let value = change.value();
5485 match tag {
5486 ChangeTag::Equal => {
5487 offset += value.len();
5488 moved_since_edit = true;
5489 }
5490 ChangeTag::Delete => {
5491 let start = snapshot.anchor_after(offset);
5492 let end = snapshot.anchor_before(offset + value.len());
5493 if moved_since_edit {
5494 edits.push((start..end, String::new()));
5495 } else {
5496 edits.last_mut().unwrap().0.end = end;
5497 }
5498 offset += value.len();
5499 moved_since_edit = false;
5500 }
5501 ChangeTag::Insert => {
5502 if moved_since_edit {
5503 let anchor = snapshot.anchor_after(offset);
5504 edits.push((anchor.clone()..anchor, value.to_string()));
5505 } else {
5506 edits.last_mut().unwrap().1.push_str(value);
5507 }
5508 moved_since_edit = false;
5509 }
5510 }
5511 }
5512 } else if range.end == range.start {
5513 let anchor = snapshot.anchor_after(range.start);
5514 edits.push((anchor.clone()..anchor, new_text));
5515 } else {
5516 let edit_start = snapshot.anchor_after(range.start);
5517 let edit_end = snapshot.anchor_before(range.end);
5518 edits.push((edit_start..edit_end, new_text));
5519 }
5520 }
5521
5522 Ok(edits)
5523 })
5524 }
5525
5526 fn buffer_snapshot_for_lsp_version(
5527 &mut self,
5528 buffer: &ModelHandle<Buffer>,
5529 version: Option<i32>,
5530 cx: &AppContext,
5531 ) -> Result<TextBufferSnapshot> {
5532 const OLD_VERSIONS_TO_RETAIN: i32 = 10;
5533
5534 if let Some(version) = version {
5535 let buffer_id = buffer.read(cx).remote_id();
5536 let snapshots = self
5537 .buffer_snapshots
5538 .get_mut(&buffer_id)
5539 .ok_or_else(|| anyhow!("no snapshot found for buffer {}", buffer_id))?;
5540 let mut found_snapshot = None;
5541 snapshots.retain(|(snapshot_version, snapshot)| {
5542 if snapshot_version + OLD_VERSIONS_TO_RETAIN < version {
5543 false
5544 } else {
5545 if *snapshot_version == version {
5546 found_snapshot = Some(snapshot.clone());
5547 }
5548 true
5549 }
5550 });
5551
5552 found_snapshot.ok_or_else(|| {
5553 anyhow!(
5554 "snapshot not found for buffer {} at version {}",
5555 buffer_id,
5556 version
5557 )
5558 })
5559 } else {
5560 Ok((buffer.read(cx)).text_snapshot())
5561 }
5562 }
5563
5564 fn language_server_for_buffer(
5565 &self,
5566 buffer: &Buffer,
5567 cx: &AppContext,
5568 ) -> Option<&(Arc<dyn LspAdapter>, Arc<LanguageServer>)> {
5569 if let Some((file, language)) = File::from_dyn(buffer.file()).zip(buffer.language()) {
5570 let worktree_id = file.worktree_id(cx);
5571 self.language_servers
5572 .get(&(worktree_id, language.lsp_adapter()?.name()))
5573 } else {
5574 None
5575 }
5576 }
5577}
5578
5579impl ProjectStore {
5580 pub fn new(db: Arc<Db>) -> Self {
5581 Self {
5582 db,
5583 projects: Default::default(),
5584 }
5585 }
5586
5587 pub fn projects<'a>(
5588 &'a self,
5589 cx: &'a AppContext,
5590 ) -> impl 'a + Iterator<Item = ModelHandle<Project>> {
5591 self.projects
5592 .iter()
5593 .filter_map(|project| project.upgrade(cx))
5594 }
5595
5596 fn add_project(&mut self, project: WeakModelHandle<Project>, cx: &mut ModelContext<Self>) {
5597 if let Err(ix) = self
5598 .projects
5599 .binary_search_by_key(&project.id(), WeakModelHandle::id)
5600 {
5601 self.projects.insert(ix, project);
5602 }
5603 cx.notify();
5604 }
5605
5606 fn prune_projects(&mut self, cx: &mut ModelContext<Self>) {
5607 let mut did_change = false;
5608 self.projects.retain(|project| {
5609 if project.is_upgradable(cx) {
5610 true
5611 } else {
5612 did_change = true;
5613 false
5614 }
5615 });
5616 if did_change {
5617 cx.notify();
5618 }
5619 }
5620}
5621
5622impl WorktreeHandle {
5623 pub fn upgrade(&self, cx: &AppContext) -> Option<ModelHandle<Worktree>> {
5624 match self {
5625 WorktreeHandle::Strong(handle) => Some(handle.clone()),
5626 WorktreeHandle::Weak(handle) => handle.upgrade(cx),
5627 }
5628 }
5629}
5630
5631impl OpenBuffer {
5632 pub fn upgrade(&self, cx: &impl UpgradeModelHandle) -> Option<ModelHandle<Buffer>> {
5633 match self {
5634 OpenBuffer::Strong(handle) => Some(handle.clone()),
5635 OpenBuffer::Weak(handle) => handle.upgrade(cx),
5636 OpenBuffer::Loading(_) => None,
5637 }
5638 }
5639}
5640
5641struct CandidateSet {
5642 snapshot: Snapshot,
5643 include_ignored: bool,
5644 include_root_name: bool,
5645}
5646
5647impl<'a> PathMatchCandidateSet<'a> for CandidateSet {
5648 type Candidates = CandidateSetIter<'a>;
5649
5650 fn id(&self) -> usize {
5651 self.snapshot.id().to_usize()
5652 }
5653
5654 fn len(&self) -> usize {
5655 if self.include_ignored {
5656 self.snapshot.file_count()
5657 } else {
5658 self.snapshot.visible_file_count()
5659 }
5660 }
5661
5662 fn prefix(&self) -> Arc<str> {
5663 if self.snapshot.root_entry().map_or(false, |e| e.is_file()) {
5664 self.snapshot.root_name().into()
5665 } else if self.include_root_name {
5666 format!("{}/", self.snapshot.root_name()).into()
5667 } else {
5668 "".into()
5669 }
5670 }
5671
5672 fn candidates(&'a self, start: usize) -> Self::Candidates {
5673 CandidateSetIter {
5674 traversal: self.snapshot.files(self.include_ignored, start),
5675 }
5676 }
5677}
5678
5679struct CandidateSetIter<'a> {
5680 traversal: Traversal<'a>,
5681}
5682
5683impl<'a> Iterator for CandidateSetIter<'a> {
5684 type Item = PathMatchCandidate<'a>;
5685
5686 fn next(&mut self) -> Option<Self::Item> {
5687 self.traversal.next().map(|entry| {
5688 if let EntryKind::File(char_bag) = entry.kind {
5689 PathMatchCandidate {
5690 path: &entry.path,
5691 char_bag,
5692 }
5693 } else {
5694 unreachable!()
5695 }
5696 })
5697 }
5698}
5699
5700impl Entity for ProjectStore {
5701 type Event = ();
5702}
5703
5704impl Entity for Project {
5705 type Event = Event;
5706
5707 fn release(&mut self, cx: &mut gpui::MutableAppContext) {
5708 self.project_store.update(cx, ProjectStore::prune_projects);
5709
5710 match &self.client_state {
5711 ProjectClientState::Local { remote_id_rx, .. } => {
5712 if let Some(project_id) = *remote_id_rx.borrow() {
5713 self.client
5714 .send(proto::UnregisterProject { project_id })
5715 .log_err();
5716 }
5717 }
5718 ProjectClientState::Remote { remote_id, .. } => {
5719 self.client
5720 .send(proto::LeaveProject {
5721 project_id: *remote_id,
5722 })
5723 .log_err();
5724 }
5725 }
5726 }
5727
5728 fn app_will_quit(
5729 &mut self,
5730 _: &mut MutableAppContext,
5731 ) -> Option<std::pin::Pin<Box<dyn 'static + Future<Output = ()>>>> {
5732 let shutdown_futures = self
5733 .language_servers
5734 .drain()
5735 .filter_map(|(_, (_, server))| server.shutdown())
5736 .collect::<Vec<_>>();
5737 Some(
5738 async move {
5739 futures::future::join_all(shutdown_futures).await;
5740 }
5741 .boxed(),
5742 )
5743 }
5744}
5745
5746impl Collaborator {
5747 fn from_proto(
5748 message: proto::Collaborator,
5749 user_store: &ModelHandle<UserStore>,
5750 cx: &mut AsyncAppContext,
5751 ) -> impl Future<Output = Result<Self>> {
5752 let user = user_store.update(cx, |user_store, cx| {
5753 user_store.fetch_user(message.user_id, cx)
5754 });
5755
5756 async move {
5757 Ok(Self {
5758 peer_id: PeerId(message.peer_id),
5759 user: user.await?,
5760 replica_id: message.replica_id as ReplicaId,
5761 })
5762 }
5763 }
5764}
5765
5766impl<P: AsRef<Path>> From<(WorktreeId, P)> for ProjectPath {
5767 fn from((worktree_id, path): (WorktreeId, P)) -> Self {
5768 Self {
5769 worktree_id,
5770 path: path.as_ref().into(),
5771 }
5772 }
5773}
5774
5775impl From<lsp::CreateFileOptions> for fs::CreateOptions {
5776 fn from(options: lsp::CreateFileOptions) -> Self {
5777 Self {
5778 overwrite: options.overwrite.unwrap_or(false),
5779 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
5780 }
5781 }
5782}
5783
5784impl From<lsp::RenameFileOptions> for fs::RenameOptions {
5785 fn from(options: lsp::RenameFileOptions) -> Self {
5786 Self {
5787 overwrite: options.overwrite.unwrap_or(false),
5788 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
5789 }
5790 }
5791}
5792
5793impl From<lsp::DeleteFileOptions> for fs::RemoveOptions {
5794 fn from(options: lsp::DeleteFileOptions) -> Self {
5795 Self {
5796 recursive: options.recursive.unwrap_or(false),
5797 ignore_if_not_exists: options.ignore_if_not_exists.unwrap_or(false),
5798 }
5799 }
5800}
5801
5802fn serialize_symbol(symbol: &Symbol) -> proto::Symbol {
5803 proto::Symbol {
5804 source_worktree_id: symbol.source_worktree_id.to_proto(),
5805 worktree_id: symbol.worktree_id.to_proto(),
5806 language_server_name: symbol.language_server_name.0.to_string(),
5807 name: symbol.name.clone(),
5808 kind: unsafe { mem::transmute(symbol.kind) },
5809 path: symbol.path.to_string_lossy().to_string(),
5810 start: Some(proto::Point {
5811 row: symbol.range.start.row,
5812 column: symbol.range.start.column,
5813 }),
5814 end: Some(proto::Point {
5815 row: symbol.range.end.row,
5816 column: symbol.range.end.column,
5817 }),
5818 signature: symbol.signature.to_vec(),
5819 }
5820}
5821
5822fn relativize_path(base: &Path, path: &Path) -> PathBuf {
5823 let mut path_components = path.components();
5824 let mut base_components = base.components();
5825 let mut components: Vec<Component> = Vec::new();
5826 loop {
5827 match (path_components.next(), base_components.next()) {
5828 (None, None) => break,
5829 (Some(a), None) => {
5830 components.push(a);
5831 components.extend(path_components.by_ref());
5832 break;
5833 }
5834 (None, _) => components.push(Component::ParentDir),
5835 (Some(a), Some(b)) if components.is_empty() && a == b => (),
5836 (Some(a), Some(b)) if b == Component::CurDir => components.push(a),
5837 (Some(a), Some(_)) => {
5838 components.push(Component::ParentDir);
5839 for _ in base_components {
5840 components.push(Component::ParentDir);
5841 }
5842 components.push(a);
5843 components.extend(path_components.by_ref());
5844 break;
5845 }
5846 }
5847 }
5848 components.iter().map(|c| c.as_os_str()).collect()
5849}
5850
5851impl Item for Buffer {
5852 fn entry_id(&self, cx: &AppContext) -> Option<ProjectEntryId> {
5853 File::from_dyn(self.file()).and_then(|file| file.project_entry_id(cx))
5854 }
5855}
5856
5857#[cfg(test)]
5858mod tests {
5859 use crate::worktree::WorktreeHandle;
5860
5861 use super::{Event, *};
5862 use fs::RealFs;
5863 use futures::{future, StreamExt};
5864 use gpui::{executor::Deterministic, test::subscribe};
5865 use language::{
5866 tree_sitter_rust, tree_sitter_typescript, Diagnostic, FakeLspAdapter, LanguageConfig,
5867 OffsetRangeExt, Point, ToPoint,
5868 };
5869 use lsp::Url;
5870 use serde_json::json;
5871 use std::{cell::RefCell, os::unix, path::PathBuf, rc::Rc, task::Poll};
5872 use unindent::Unindent as _;
5873 use util::{assert_set_eq, test::temp_tree};
5874
5875 #[gpui::test]
5876 async fn test_populate_and_search(cx: &mut gpui::TestAppContext) {
5877 let dir = temp_tree(json!({
5878 "root": {
5879 "apple": "",
5880 "banana": {
5881 "carrot": {
5882 "date": "",
5883 "endive": "",
5884 }
5885 },
5886 "fennel": {
5887 "grape": "",
5888 }
5889 }
5890 }));
5891
5892 let root_link_path = dir.path().join("root_link");
5893 unix::fs::symlink(&dir.path().join("root"), &root_link_path).unwrap();
5894 unix::fs::symlink(
5895 &dir.path().join("root/fennel"),
5896 &dir.path().join("root/finnochio"),
5897 )
5898 .unwrap();
5899
5900 let project = Project::test(Arc::new(RealFs), [root_link_path.as_ref()], cx).await;
5901
5902 project.read_with(cx, |project, cx| {
5903 let tree = project.worktrees(cx).next().unwrap().read(cx);
5904 assert_eq!(tree.file_count(), 5);
5905 assert_eq!(
5906 tree.inode_for_path("fennel/grape"),
5907 tree.inode_for_path("finnochio/grape")
5908 );
5909 });
5910
5911 let cancel_flag = Default::default();
5912 let results = project
5913 .read_with(cx, |project, cx| {
5914 project.match_paths("bna", false, false, 10, &cancel_flag, cx)
5915 })
5916 .await;
5917 assert_eq!(
5918 results
5919 .into_iter()
5920 .map(|result| result.path)
5921 .collect::<Vec<Arc<Path>>>(),
5922 vec![
5923 PathBuf::from("banana/carrot/date").into(),
5924 PathBuf::from("banana/carrot/endive").into(),
5925 ]
5926 );
5927 }
5928
5929 #[gpui::test]
5930 async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
5931 cx.foreground().forbid_parking();
5932
5933 let mut rust_language = Language::new(
5934 LanguageConfig {
5935 name: "Rust".into(),
5936 path_suffixes: vec!["rs".to_string()],
5937 ..Default::default()
5938 },
5939 Some(tree_sitter_rust::language()),
5940 );
5941 let mut json_language = Language::new(
5942 LanguageConfig {
5943 name: "JSON".into(),
5944 path_suffixes: vec!["json".to_string()],
5945 ..Default::default()
5946 },
5947 None,
5948 );
5949 let mut fake_rust_servers = rust_language.set_fake_lsp_adapter(FakeLspAdapter {
5950 name: "the-rust-language-server",
5951 capabilities: lsp::ServerCapabilities {
5952 completion_provider: Some(lsp::CompletionOptions {
5953 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
5954 ..Default::default()
5955 }),
5956 ..Default::default()
5957 },
5958 ..Default::default()
5959 });
5960 let mut fake_json_servers = json_language.set_fake_lsp_adapter(FakeLspAdapter {
5961 name: "the-json-language-server",
5962 capabilities: lsp::ServerCapabilities {
5963 completion_provider: Some(lsp::CompletionOptions {
5964 trigger_characters: Some(vec![":".to_string()]),
5965 ..Default::default()
5966 }),
5967 ..Default::default()
5968 },
5969 ..Default::default()
5970 });
5971
5972 let fs = FakeFs::new(cx.background());
5973 fs.insert_tree(
5974 "/the-root",
5975 json!({
5976 "test.rs": "const A: i32 = 1;",
5977 "test2.rs": "",
5978 "Cargo.toml": "a = 1",
5979 "package.json": "{\"a\": 1}",
5980 }),
5981 )
5982 .await;
5983
5984 let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
5985 project.update(cx, |project, _| {
5986 project.languages.add(Arc::new(rust_language));
5987 project.languages.add(Arc::new(json_language));
5988 });
5989
5990 // Open a buffer without an associated language server.
5991 let toml_buffer = project
5992 .update(cx, |project, cx| {
5993 project.open_local_buffer("/the-root/Cargo.toml", cx)
5994 })
5995 .await
5996 .unwrap();
5997
5998 // Open a buffer with an associated language server.
5999 let rust_buffer = project
6000 .update(cx, |project, cx| {
6001 project.open_local_buffer("/the-root/test.rs", cx)
6002 })
6003 .await
6004 .unwrap();
6005
6006 // A server is started up, and it is notified about Rust files.
6007 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
6008 assert_eq!(
6009 fake_rust_server
6010 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6011 .await
6012 .text_document,
6013 lsp::TextDocumentItem {
6014 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
6015 version: 0,
6016 text: "const A: i32 = 1;".to_string(),
6017 language_id: Default::default()
6018 }
6019 );
6020
6021 // The buffer is configured based on the language server's capabilities.
6022 rust_buffer.read_with(cx, |buffer, _| {
6023 assert_eq!(
6024 buffer.completion_triggers(),
6025 &[".".to_string(), "::".to_string()]
6026 );
6027 });
6028 toml_buffer.read_with(cx, |buffer, _| {
6029 assert!(buffer.completion_triggers().is_empty());
6030 });
6031
6032 // Edit a buffer. The changes are reported to the language server.
6033 rust_buffer.update(cx, |buffer, cx| buffer.edit([(16..16, "2")], cx));
6034 assert_eq!(
6035 fake_rust_server
6036 .receive_notification::<lsp::notification::DidChangeTextDocument>()
6037 .await
6038 .text_document,
6039 lsp::VersionedTextDocumentIdentifier::new(
6040 lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
6041 1
6042 )
6043 );
6044
6045 // Open a third buffer with a different associated language server.
6046 let json_buffer = project
6047 .update(cx, |project, cx| {
6048 project.open_local_buffer("/the-root/package.json", cx)
6049 })
6050 .await
6051 .unwrap();
6052
6053 // A json language server is started up and is only notified about the json buffer.
6054 let mut fake_json_server = fake_json_servers.next().await.unwrap();
6055 assert_eq!(
6056 fake_json_server
6057 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6058 .await
6059 .text_document,
6060 lsp::TextDocumentItem {
6061 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
6062 version: 0,
6063 text: "{\"a\": 1}".to_string(),
6064 language_id: Default::default()
6065 }
6066 );
6067
6068 // This buffer is configured based on the second language server's
6069 // capabilities.
6070 json_buffer.read_with(cx, |buffer, _| {
6071 assert_eq!(buffer.completion_triggers(), &[":".to_string()]);
6072 });
6073
6074 // When opening another buffer whose language server is already running,
6075 // it is also configured based on the existing language server's capabilities.
6076 let rust_buffer2 = project
6077 .update(cx, |project, cx| {
6078 project.open_local_buffer("/the-root/test2.rs", cx)
6079 })
6080 .await
6081 .unwrap();
6082 rust_buffer2.read_with(cx, |buffer, _| {
6083 assert_eq!(
6084 buffer.completion_triggers(),
6085 &[".".to_string(), "::".to_string()]
6086 );
6087 });
6088
6089 // Changes are reported only to servers matching the buffer's language.
6090 toml_buffer.update(cx, |buffer, cx| buffer.edit([(5..5, "23")], cx));
6091 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "let x = 1;")], cx));
6092 assert_eq!(
6093 fake_rust_server
6094 .receive_notification::<lsp::notification::DidChangeTextDocument>()
6095 .await
6096 .text_document,
6097 lsp::VersionedTextDocumentIdentifier::new(
6098 lsp::Url::from_file_path("/the-root/test2.rs").unwrap(),
6099 1
6100 )
6101 );
6102
6103 // Save notifications are reported to all servers.
6104 toml_buffer
6105 .update(cx, |buffer, cx| buffer.save(cx))
6106 .await
6107 .unwrap();
6108 assert_eq!(
6109 fake_rust_server
6110 .receive_notification::<lsp::notification::DidSaveTextDocument>()
6111 .await
6112 .text_document,
6113 lsp::TextDocumentIdentifier::new(
6114 lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap()
6115 )
6116 );
6117 assert_eq!(
6118 fake_json_server
6119 .receive_notification::<lsp::notification::DidSaveTextDocument>()
6120 .await
6121 .text_document,
6122 lsp::TextDocumentIdentifier::new(
6123 lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap()
6124 )
6125 );
6126
6127 // Renames are reported only to servers matching the buffer's language.
6128 fs.rename(
6129 Path::new("/the-root/test2.rs"),
6130 Path::new("/the-root/test3.rs"),
6131 Default::default(),
6132 )
6133 .await
6134 .unwrap();
6135 assert_eq!(
6136 fake_rust_server
6137 .receive_notification::<lsp::notification::DidCloseTextDocument>()
6138 .await
6139 .text_document,
6140 lsp::TextDocumentIdentifier::new(
6141 lsp::Url::from_file_path("/the-root/test2.rs").unwrap()
6142 ),
6143 );
6144 assert_eq!(
6145 fake_rust_server
6146 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6147 .await
6148 .text_document,
6149 lsp::TextDocumentItem {
6150 uri: lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),
6151 version: 0,
6152 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
6153 language_id: Default::default()
6154 },
6155 );
6156
6157 rust_buffer2.update(cx, |buffer, cx| {
6158 buffer.update_diagnostics(
6159 DiagnosticSet::from_sorted_entries(
6160 vec![DiagnosticEntry {
6161 diagnostic: Default::default(),
6162 range: Anchor::MIN..Anchor::MAX,
6163 }],
6164 &buffer.snapshot(),
6165 ),
6166 cx,
6167 );
6168 assert_eq!(
6169 buffer
6170 .snapshot()
6171 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
6172 .count(),
6173 1
6174 );
6175 });
6176
6177 // When the rename changes the extension of the file, the buffer gets closed on the old
6178 // language server and gets opened on the new one.
6179 fs.rename(
6180 Path::new("/the-root/test3.rs"),
6181 Path::new("/the-root/test3.json"),
6182 Default::default(),
6183 )
6184 .await
6185 .unwrap();
6186 assert_eq!(
6187 fake_rust_server
6188 .receive_notification::<lsp::notification::DidCloseTextDocument>()
6189 .await
6190 .text_document,
6191 lsp::TextDocumentIdentifier::new(
6192 lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),
6193 ),
6194 );
6195 assert_eq!(
6196 fake_json_server
6197 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6198 .await
6199 .text_document,
6200 lsp::TextDocumentItem {
6201 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
6202 version: 0,
6203 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
6204 language_id: Default::default()
6205 },
6206 );
6207
6208 // We clear the diagnostics, since the language has changed.
6209 rust_buffer2.read_with(cx, |buffer, _| {
6210 assert_eq!(
6211 buffer
6212 .snapshot()
6213 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
6214 .count(),
6215 0
6216 );
6217 });
6218
6219 // The renamed file's version resets after changing language server.
6220 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "// ")], cx));
6221 assert_eq!(
6222 fake_json_server
6223 .receive_notification::<lsp::notification::DidChangeTextDocument>()
6224 .await
6225 .text_document,
6226 lsp::VersionedTextDocumentIdentifier::new(
6227 lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
6228 1
6229 )
6230 );
6231
6232 // Restart language servers
6233 project.update(cx, |project, cx| {
6234 project.restart_language_servers_for_buffers(
6235 vec![rust_buffer.clone(), json_buffer.clone()],
6236 cx,
6237 );
6238 });
6239
6240 let mut rust_shutdown_requests = fake_rust_server
6241 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
6242 let mut json_shutdown_requests = fake_json_server
6243 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
6244 futures::join!(rust_shutdown_requests.next(), json_shutdown_requests.next());
6245
6246 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
6247 let mut fake_json_server = fake_json_servers.next().await.unwrap();
6248
6249 // Ensure rust document is reopened in new rust language server
6250 assert_eq!(
6251 fake_rust_server
6252 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6253 .await
6254 .text_document,
6255 lsp::TextDocumentItem {
6256 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
6257 version: 1,
6258 text: rust_buffer.read_with(cx, |buffer, _| buffer.text()),
6259 language_id: Default::default()
6260 }
6261 );
6262
6263 // Ensure json documents are reopened in new json language server
6264 assert_set_eq!(
6265 [
6266 fake_json_server
6267 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6268 .await
6269 .text_document,
6270 fake_json_server
6271 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6272 .await
6273 .text_document,
6274 ],
6275 [
6276 lsp::TextDocumentItem {
6277 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
6278 version: 0,
6279 text: json_buffer.read_with(cx, |buffer, _| buffer.text()),
6280 language_id: Default::default()
6281 },
6282 lsp::TextDocumentItem {
6283 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
6284 version: 1,
6285 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
6286 language_id: Default::default()
6287 }
6288 ]
6289 );
6290
6291 // Close notifications are reported only to servers matching the buffer's language.
6292 cx.update(|_| drop(json_buffer));
6293 let close_message = lsp::DidCloseTextDocumentParams {
6294 text_document: lsp::TextDocumentIdentifier::new(
6295 lsp::Url::from_file_path("/the-root/package.json").unwrap(),
6296 ),
6297 };
6298 assert_eq!(
6299 fake_json_server
6300 .receive_notification::<lsp::notification::DidCloseTextDocument>()
6301 .await,
6302 close_message,
6303 );
6304 }
6305
6306 #[gpui::test]
6307 async fn test_single_file_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
6308 cx.foreground().forbid_parking();
6309
6310 let fs = FakeFs::new(cx.background());
6311 fs.insert_tree(
6312 "/dir",
6313 json!({
6314 "a.rs": "let a = 1;",
6315 "b.rs": "let b = 2;"
6316 }),
6317 )
6318 .await;
6319
6320 let project = Project::test(fs, ["/dir/a.rs".as_ref(), "/dir/b.rs".as_ref()], cx).await;
6321
6322 let buffer_a = project
6323 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
6324 .await
6325 .unwrap();
6326 let buffer_b = project
6327 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
6328 .await
6329 .unwrap();
6330
6331 project.update(cx, |project, cx| {
6332 project
6333 .update_diagnostics(
6334 0,
6335 lsp::PublishDiagnosticsParams {
6336 uri: Url::from_file_path("/dir/a.rs").unwrap(),
6337 version: None,
6338 diagnostics: vec![lsp::Diagnostic {
6339 range: lsp::Range::new(
6340 lsp::Position::new(0, 4),
6341 lsp::Position::new(0, 5),
6342 ),
6343 severity: Some(lsp::DiagnosticSeverity::ERROR),
6344 message: "error 1".to_string(),
6345 ..Default::default()
6346 }],
6347 },
6348 &[],
6349 cx,
6350 )
6351 .unwrap();
6352 project
6353 .update_diagnostics(
6354 0,
6355 lsp::PublishDiagnosticsParams {
6356 uri: Url::from_file_path("/dir/b.rs").unwrap(),
6357 version: None,
6358 diagnostics: vec![lsp::Diagnostic {
6359 range: lsp::Range::new(
6360 lsp::Position::new(0, 4),
6361 lsp::Position::new(0, 5),
6362 ),
6363 severity: Some(lsp::DiagnosticSeverity::WARNING),
6364 message: "error 2".to_string(),
6365 ..Default::default()
6366 }],
6367 },
6368 &[],
6369 cx,
6370 )
6371 .unwrap();
6372 });
6373
6374 buffer_a.read_with(cx, |buffer, _| {
6375 let chunks = chunks_with_diagnostics(&buffer, 0..buffer.len());
6376 assert_eq!(
6377 chunks
6378 .iter()
6379 .map(|(s, d)| (s.as_str(), *d))
6380 .collect::<Vec<_>>(),
6381 &[
6382 ("let ", None),
6383 ("a", Some(DiagnosticSeverity::ERROR)),
6384 (" = 1;", None),
6385 ]
6386 );
6387 });
6388 buffer_b.read_with(cx, |buffer, _| {
6389 let chunks = chunks_with_diagnostics(&buffer, 0..buffer.len());
6390 assert_eq!(
6391 chunks
6392 .iter()
6393 .map(|(s, d)| (s.as_str(), *d))
6394 .collect::<Vec<_>>(),
6395 &[
6396 ("let ", None),
6397 ("b", Some(DiagnosticSeverity::WARNING)),
6398 (" = 2;", None),
6399 ]
6400 );
6401 });
6402 }
6403
6404 #[gpui::test]
6405 async fn test_hidden_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
6406 cx.foreground().forbid_parking();
6407
6408 let fs = FakeFs::new(cx.background());
6409 fs.insert_tree(
6410 "/root",
6411 json!({
6412 "dir": {
6413 "a.rs": "let a = 1;",
6414 },
6415 "other.rs": "let b = c;"
6416 }),
6417 )
6418 .await;
6419
6420 let project = Project::test(fs, ["/root/dir".as_ref()], cx).await;
6421
6422 let (worktree, _) = project
6423 .update(cx, |project, cx| {
6424 project.find_or_create_local_worktree("/root/other.rs", false, cx)
6425 })
6426 .await
6427 .unwrap();
6428 let worktree_id = worktree.read_with(cx, |tree, _| tree.id());
6429
6430 project.update(cx, |project, cx| {
6431 project
6432 .update_diagnostics(
6433 0,
6434 lsp::PublishDiagnosticsParams {
6435 uri: Url::from_file_path("/root/other.rs").unwrap(),
6436 version: None,
6437 diagnostics: vec![lsp::Diagnostic {
6438 range: lsp::Range::new(
6439 lsp::Position::new(0, 8),
6440 lsp::Position::new(0, 9),
6441 ),
6442 severity: Some(lsp::DiagnosticSeverity::ERROR),
6443 message: "unknown variable 'c'".to_string(),
6444 ..Default::default()
6445 }],
6446 },
6447 &[],
6448 cx,
6449 )
6450 .unwrap();
6451 });
6452
6453 let buffer = project
6454 .update(cx, |project, cx| project.open_buffer((worktree_id, ""), cx))
6455 .await
6456 .unwrap();
6457 buffer.read_with(cx, |buffer, _| {
6458 let chunks = chunks_with_diagnostics(&buffer, 0..buffer.len());
6459 assert_eq!(
6460 chunks
6461 .iter()
6462 .map(|(s, d)| (s.as_str(), *d))
6463 .collect::<Vec<_>>(),
6464 &[
6465 ("let b = ", None),
6466 ("c", Some(DiagnosticSeverity::ERROR)),
6467 (";", None),
6468 ]
6469 );
6470 });
6471
6472 project.read_with(cx, |project, cx| {
6473 assert_eq!(project.diagnostic_summaries(cx).next(), None);
6474 assert_eq!(project.diagnostic_summary(cx).error_count, 0);
6475 });
6476 }
6477
6478 #[gpui::test]
6479 async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
6480 cx.foreground().forbid_parking();
6481
6482 let progress_token = "the-progress-token";
6483 let mut language = Language::new(
6484 LanguageConfig {
6485 name: "Rust".into(),
6486 path_suffixes: vec!["rs".to_string()],
6487 ..Default::default()
6488 },
6489 Some(tree_sitter_rust::language()),
6490 );
6491 let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
6492 disk_based_diagnostics_progress_token: Some(progress_token),
6493 disk_based_diagnostics_sources: &["disk"],
6494 ..Default::default()
6495 });
6496
6497 let fs = FakeFs::new(cx.background());
6498 fs.insert_tree(
6499 "/dir",
6500 json!({
6501 "a.rs": "fn a() { A }",
6502 "b.rs": "const y: i32 = 1",
6503 }),
6504 )
6505 .await;
6506
6507 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
6508 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
6509 let worktree_id =
6510 project.read_with(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
6511
6512 // Cause worktree to start the fake language server
6513 let _buffer = project
6514 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
6515 .await
6516 .unwrap();
6517
6518 let mut events = subscribe(&project, cx);
6519
6520 let fake_server = fake_servers.next().await.unwrap();
6521 fake_server.start_progress(progress_token).await;
6522 assert_eq!(
6523 events.next().await.unwrap(),
6524 Event::DiskBasedDiagnosticsStarted {
6525 language_server_id: 0,
6526 }
6527 );
6528
6529 fake_server.notify::<lsp::notification::PublishDiagnostics>(
6530 lsp::PublishDiagnosticsParams {
6531 uri: Url::from_file_path("/dir/a.rs").unwrap(),
6532 version: None,
6533 diagnostics: vec![lsp::Diagnostic {
6534 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
6535 severity: Some(lsp::DiagnosticSeverity::ERROR),
6536 message: "undefined variable 'A'".to_string(),
6537 ..Default::default()
6538 }],
6539 },
6540 );
6541 assert_eq!(
6542 events.next().await.unwrap(),
6543 Event::DiagnosticsUpdated {
6544 language_server_id: 0,
6545 path: (worktree_id, Path::new("a.rs")).into()
6546 }
6547 );
6548
6549 fake_server.end_progress(progress_token);
6550 assert_eq!(
6551 events.next().await.unwrap(),
6552 Event::DiskBasedDiagnosticsFinished {
6553 language_server_id: 0
6554 }
6555 );
6556
6557 let buffer = project
6558 .update(cx, |p, cx| p.open_local_buffer("/dir/a.rs", cx))
6559 .await
6560 .unwrap();
6561
6562 buffer.read_with(cx, |buffer, _| {
6563 let snapshot = buffer.snapshot();
6564 let diagnostics = snapshot
6565 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
6566 .collect::<Vec<_>>();
6567 assert_eq!(
6568 diagnostics,
6569 &[DiagnosticEntry {
6570 range: Point::new(0, 9)..Point::new(0, 10),
6571 diagnostic: Diagnostic {
6572 severity: lsp::DiagnosticSeverity::ERROR,
6573 message: "undefined variable 'A'".to_string(),
6574 group_id: 0,
6575 is_primary: true,
6576 ..Default::default()
6577 }
6578 }]
6579 )
6580 });
6581
6582 // Ensure publishing empty diagnostics twice only results in one update event.
6583 fake_server.notify::<lsp::notification::PublishDiagnostics>(
6584 lsp::PublishDiagnosticsParams {
6585 uri: Url::from_file_path("/dir/a.rs").unwrap(),
6586 version: None,
6587 diagnostics: Default::default(),
6588 },
6589 );
6590 assert_eq!(
6591 events.next().await.unwrap(),
6592 Event::DiagnosticsUpdated {
6593 language_server_id: 0,
6594 path: (worktree_id, Path::new("a.rs")).into()
6595 }
6596 );
6597
6598 fake_server.notify::<lsp::notification::PublishDiagnostics>(
6599 lsp::PublishDiagnosticsParams {
6600 uri: Url::from_file_path("/dir/a.rs").unwrap(),
6601 version: None,
6602 diagnostics: Default::default(),
6603 },
6604 );
6605 cx.foreground().run_until_parked();
6606 assert_eq!(futures::poll!(events.next()), Poll::Pending);
6607 }
6608
6609 #[gpui::test]
6610 async fn test_restarting_server_with_diagnostics_running(cx: &mut gpui::TestAppContext) {
6611 cx.foreground().forbid_parking();
6612
6613 let progress_token = "the-progress-token";
6614 let mut language = Language::new(
6615 LanguageConfig {
6616 path_suffixes: vec!["rs".to_string()],
6617 ..Default::default()
6618 },
6619 None,
6620 );
6621 let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
6622 disk_based_diagnostics_sources: &["disk"],
6623 disk_based_diagnostics_progress_token: Some(progress_token),
6624 ..Default::default()
6625 });
6626
6627 let fs = FakeFs::new(cx.background());
6628 fs.insert_tree("/dir", json!({ "a.rs": "" })).await;
6629
6630 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
6631 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
6632
6633 let buffer = project
6634 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
6635 .await
6636 .unwrap();
6637
6638 // Simulate diagnostics starting to update.
6639 let fake_server = fake_servers.next().await.unwrap();
6640 fake_server.start_progress(progress_token).await;
6641
6642 // Restart the server before the diagnostics finish updating.
6643 project.update(cx, |project, cx| {
6644 project.restart_language_servers_for_buffers([buffer], cx);
6645 });
6646 let mut events = subscribe(&project, cx);
6647
6648 // Simulate the newly started server sending more diagnostics.
6649 let fake_server = fake_servers.next().await.unwrap();
6650 fake_server.start_progress(progress_token).await;
6651 assert_eq!(
6652 events.next().await.unwrap(),
6653 Event::DiskBasedDiagnosticsStarted {
6654 language_server_id: 1
6655 }
6656 );
6657 project.read_with(cx, |project, _| {
6658 assert_eq!(
6659 project
6660 .language_servers_running_disk_based_diagnostics()
6661 .collect::<Vec<_>>(),
6662 [1]
6663 );
6664 });
6665
6666 // All diagnostics are considered done, despite the old server's diagnostic
6667 // task never completing.
6668 fake_server.end_progress(progress_token);
6669 assert_eq!(
6670 events.next().await.unwrap(),
6671 Event::DiskBasedDiagnosticsFinished {
6672 language_server_id: 1
6673 }
6674 );
6675 project.read_with(cx, |project, _| {
6676 assert_eq!(
6677 project
6678 .language_servers_running_disk_based_diagnostics()
6679 .collect::<Vec<_>>(),
6680 [0; 0]
6681 );
6682 });
6683 }
6684
6685 #[gpui::test]
6686 async fn test_toggling_enable_language_server(
6687 deterministic: Arc<Deterministic>,
6688 cx: &mut gpui::TestAppContext,
6689 ) {
6690 deterministic.forbid_parking();
6691
6692 let mut rust = Language::new(
6693 LanguageConfig {
6694 name: Arc::from("Rust"),
6695 path_suffixes: vec!["rs".to_string()],
6696 ..Default::default()
6697 },
6698 None,
6699 );
6700 let mut fake_rust_servers = rust.set_fake_lsp_adapter(FakeLspAdapter {
6701 name: "rust-lsp",
6702 ..Default::default()
6703 });
6704 let mut js = Language::new(
6705 LanguageConfig {
6706 name: Arc::from("JavaScript"),
6707 path_suffixes: vec!["js".to_string()],
6708 ..Default::default()
6709 },
6710 None,
6711 );
6712 let mut fake_js_servers = js.set_fake_lsp_adapter(FakeLspAdapter {
6713 name: "js-lsp",
6714 ..Default::default()
6715 });
6716
6717 let fs = FakeFs::new(cx.background());
6718 fs.insert_tree("/dir", json!({ "a.rs": "", "b.js": "" }))
6719 .await;
6720
6721 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
6722 project.update(cx, |project, _| {
6723 project.languages.add(Arc::new(rust));
6724 project.languages.add(Arc::new(js));
6725 });
6726
6727 let _rs_buffer = project
6728 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
6729 .await
6730 .unwrap();
6731 let _js_buffer = project
6732 .update(cx, |project, cx| project.open_local_buffer("/dir/b.js", cx))
6733 .await
6734 .unwrap();
6735
6736 let mut fake_rust_server_1 = fake_rust_servers.next().await.unwrap();
6737 assert_eq!(
6738 fake_rust_server_1
6739 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6740 .await
6741 .text_document
6742 .uri
6743 .as_str(),
6744 "file:///dir/a.rs"
6745 );
6746
6747 let mut fake_js_server = fake_js_servers.next().await.unwrap();
6748 assert_eq!(
6749 fake_js_server
6750 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6751 .await
6752 .text_document
6753 .uri
6754 .as_str(),
6755 "file:///dir/b.js"
6756 );
6757
6758 // Disable Rust language server, ensuring only that server gets stopped.
6759 cx.update(|cx| {
6760 cx.update_global(|settings: &mut Settings, _| {
6761 settings.language_overrides.insert(
6762 Arc::from("Rust"),
6763 settings::LanguageSettings {
6764 enable_language_server: Some(false),
6765 ..Default::default()
6766 },
6767 );
6768 })
6769 });
6770 fake_rust_server_1
6771 .receive_notification::<lsp::notification::Exit>()
6772 .await;
6773
6774 // Enable Rust and disable JavaScript language servers, ensuring that the
6775 // former gets started again and that the latter stops.
6776 cx.update(|cx| {
6777 cx.update_global(|settings: &mut Settings, _| {
6778 settings.language_overrides.insert(
6779 Arc::from("Rust"),
6780 settings::LanguageSettings {
6781 enable_language_server: Some(true),
6782 ..Default::default()
6783 },
6784 );
6785 settings.language_overrides.insert(
6786 Arc::from("JavaScript"),
6787 settings::LanguageSettings {
6788 enable_language_server: Some(false),
6789 ..Default::default()
6790 },
6791 );
6792 })
6793 });
6794 let mut fake_rust_server_2 = fake_rust_servers.next().await.unwrap();
6795 assert_eq!(
6796 fake_rust_server_2
6797 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6798 .await
6799 .text_document
6800 .uri
6801 .as_str(),
6802 "file:///dir/a.rs"
6803 );
6804 fake_js_server
6805 .receive_notification::<lsp::notification::Exit>()
6806 .await;
6807 }
6808
6809 #[gpui::test]
6810 async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
6811 cx.foreground().forbid_parking();
6812
6813 let mut language = Language::new(
6814 LanguageConfig {
6815 name: "Rust".into(),
6816 path_suffixes: vec!["rs".to_string()],
6817 ..Default::default()
6818 },
6819 Some(tree_sitter_rust::language()),
6820 );
6821 let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
6822 disk_based_diagnostics_sources: &["disk"],
6823 ..Default::default()
6824 });
6825
6826 let text = "
6827 fn a() { A }
6828 fn b() { BB }
6829 fn c() { CCC }
6830 "
6831 .unindent();
6832
6833 let fs = FakeFs::new(cx.background());
6834 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
6835
6836 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
6837 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
6838
6839 let buffer = project
6840 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
6841 .await
6842 .unwrap();
6843
6844 let mut fake_server = fake_servers.next().await.unwrap();
6845 let open_notification = fake_server
6846 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6847 .await;
6848
6849 // Edit the buffer, moving the content down
6850 buffer.update(cx, |buffer, cx| buffer.edit([(0..0, "\n\n")], cx));
6851 let change_notification_1 = fake_server
6852 .receive_notification::<lsp::notification::DidChangeTextDocument>()
6853 .await;
6854 assert!(
6855 change_notification_1.text_document.version > open_notification.text_document.version
6856 );
6857
6858 // Report some diagnostics for the initial version of the buffer
6859 fake_server.notify::<lsp::notification::PublishDiagnostics>(
6860 lsp::PublishDiagnosticsParams {
6861 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
6862 version: Some(open_notification.text_document.version),
6863 diagnostics: vec![
6864 lsp::Diagnostic {
6865 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
6866 severity: Some(DiagnosticSeverity::ERROR),
6867 message: "undefined variable 'A'".to_string(),
6868 source: Some("disk".to_string()),
6869 ..Default::default()
6870 },
6871 lsp::Diagnostic {
6872 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
6873 severity: Some(DiagnosticSeverity::ERROR),
6874 message: "undefined variable 'BB'".to_string(),
6875 source: Some("disk".to_string()),
6876 ..Default::default()
6877 },
6878 lsp::Diagnostic {
6879 range: lsp::Range::new(lsp::Position::new(2, 9), lsp::Position::new(2, 12)),
6880 severity: Some(DiagnosticSeverity::ERROR),
6881 source: Some("disk".to_string()),
6882 message: "undefined variable 'CCC'".to_string(),
6883 ..Default::default()
6884 },
6885 ],
6886 },
6887 );
6888
6889 // The diagnostics have moved down since they were created.
6890 buffer.next_notification(cx).await;
6891 buffer.read_with(cx, |buffer, _| {
6892 assert_eq!(
6893 buffer
6894 .snapshot()
6895 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0), false)
6896 .collect::<Vec<_>>(),
6897 &[
6898 DiagnosticEntry {
6899 range: Point::new(3, 9)..Point::new(3, 11),
6900 diagnostic: Diagnostic {
6901 severity: DiagnosticSeverity::ERROR,
6902 message: "undefined variable 'BB'".to_string(),
6903 is_disk_based: true,
6904 group_id: 1,
6905 is_primary: true,
6906 ..Default::default()
6907 },
6908 },
6909 DiagnosticEntry {
6910 range: Point::new(4, 9)..Point::new(4, 12),
6911 diagnostic: Diagnostic {
6912 severity: DiagnosticSeverity::ERROR,
6913 message: "undefined variable 'CCC'".to_string(),
6914 is_disk_based: true,
6915 group_id: 2,
6916 is_primary: true,
6917 ..Default::default()
6918 }
6919 }
6920 ]
6921 );
6922 assert_eq!(
6923 chunks_with_diagnostics(buffer, 0..buffer.len()),
6924 [
6925 ("\n\nfn a() { ".to_string(), None),
6926 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
6927 (" }\nfn b() { ".to_string(), None),
6928 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
6929 (" }\nfn c() { ".to_string(), None),
6930 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
6931 (" }\n".to_string(), None),
6932 ]
6933 );
6934 assert_eq!(
6935 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
6936 [
6937 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
6938 (" }\nfn c() { ".to_string(), None),
6939 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
6940 ]
6941 );
6942 });
6943
6944 // Ensure overlapping diagnostics are highlighted correctly.
6945 fake_server.notify::<lsp::notification::PublishDiagnostics>(
6946 lsp::PublishDiagnosticsParams {
6947 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
6948 version: Some(open_notification.text_document.version),
6949 diagnostics: vec![
6950 lsp::Diagnostic {
6951 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
6952 severity: Some(DiagnosticSeverity::ERROR),
6953 message: "undefined variable 'A'".to_string(),
6954 source: Some("disk".to_string()),
6955 ..Default::default()
6956 },
6957 lsp::Diagnostic {
6958 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 12)),
6959 severity: Some(DiagnosticSeverity::WARNING),
6960 message: "unreachable statement".to_string(),
6961 source: Some("disk".to_string()),
6962 ..Default::default()
6963 },
6964 ],
6965 },
6966 );
6967
6968 buffer.next_notification(cx).await;
6969 buffer.read_with(cx, |buffer, _| {
6970 assert_eq!(
6971 buffer
6972 .snapshot()
6973 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0), false)
6974 .collect::<Vec<_>>(),
6975 &[
6976 DiagnosticEntry {
6977 range: Point::new(2, 9)..Point::new(2, 12),
6978 diagnostic: Diagnostic {
6979 severity: DiagnosticSeverity::WARNING,
6980 message: "unreachable statement".to_string(),
6981 is_disk_based: true,
6982 group_id: 4,
6983 is_primary: true,
6984 ..Default::default()
6985 }
6986 },
6987 DiagnosticEntry {
6988 range: Point::new(2, 9)..Point::new(2, 10),
6989 diagnostic: Diagnostic {
6990 severity: DiagnosticSeverity::ERROR,
6991 message: "undefined variable 'A'".to_string(),
6992 is_disk_based: true,
6993 group_id: 3,
6994 is_primary: true,
6995 ..Default::default()
6996 },
6997 }
6998 ]
6999 );
7000 assert_eq!(
7001 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
7002 [
7003 ("fn a() { ".to_string(), None),
7004 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
7005 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
7006 ("\n".to_string(), None),
7007 ]
7008 );
7009 assert_eq!(
7010 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
7011 [
7012 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
7013 ("\n".to_string(), None),
7014 ]
7015 );
7016 });
7017
7018 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
7019 // changes since the last save.
7020 buffer.update(cx, |buffer, cx| {
7021 buffer.edit([(Point::new(2, 0)..Point::new(2, 0), " ")], cx);
7022 buffer.edit([(Point::new(2, 8)..Point::new(2, 10), "(x: usize)")], cx);
7023 buffer.edit([(Point::new(3, 10)..Point::new(3, 10), "xxx")], cx);
7024 });
7025 let change_notification_2 = fake_server
7026 .receive_notification::<lsp::notification::DidChangeTextDocument>()
7027 .await;
7028 assert!(
7029 change_notification_2.text_document.version
7030 > change_notification_1.text_document.version
7031 );
7032
7033 // Handle out-of-order diagnostics
7034 fake_server.notify::<lsp::notification::PublishDiagnostics>(
7035 lsp::PublishDiagnosticsParams {
7036 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
7037 version: Some(change_notification_2.text_document.version),
7038 diagnostics: vec![
7039 lsp::Diagnostic {
7040 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
7041 severity: Some(DiagnosticSeverity::ERROR),
7042 message: "undefined variable 'BB'".to_string(),
7043 source: Some("disk".to_string()),
7044 ..Default::default()
7045 },
7046 lsp::Diagnostic {
7047 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
7048 severity: Some(DiagnosticSeverity::WARNING),
7049 message: "undefined variable 'A'".to_string(),
7050 source: Some("disk".to_string()),
7051 ..Default::default()
7052 },
7053 ],
7054 },
7055 );
7056
7057 buffer.next_notification(cx).await;
7058 buffer.read_with(cx, |buffer, _| {
7059 assert_eq!(
7060 buffer
7061 .snapshot()
7062 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
7063 .collect::<Vec<_>>(),
7064 &[
7065 DiagnosticEntry {
7066 range: Point::new(2, 21)..Point::new(2, 22),
7067 diagnostic: Diagnostic {
7068 severity: DiagnosticSeverity::WARNING,
7069 message: "undefined variable 'A'".to_string(),
7070 is_disk_based: true,
7071 group_id: 6,
7072 is_primary: true,
7073 ..Default::default()
7074 }
7075 },
7076 DiagnosticEntry {
7077 range: Point::new(3, 9)..Point::new(3, 14),
7078 diagnostic: Diagnostic {
7079 severity: DiagnosticSeverity::ERROR,
7080 message: "undefined variable 'BB'".to_string(),
7081 is_disk_based: true,
7082 group_id: 5,
7083 is_primary: true,
7084 ..Default::default()
7085 },
7086 }
7087 ]
7088 );
7089 });
7090 }
7091
7092 #[gpui::test]
7093 async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
7094 cx.foreground().forbid_parking();
7095
7096 let text = concat!(
7097 "let one = ;\n", //
7098 "let two = \n",
7099 "let three = 3;\n",
7100 );
7101
7102 let fs = FakeFs::new(cx.background());
7103 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
7104
7105 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
7106 let buffer = project
7107 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
7108 .await
7109 .unwrap();
7110
7111 project.update(cx, |project, cx| {
7112 project
7113 .update_buffer_diagnostics(
7114 &buffer,
7115 vec![
7116 DiagnosticEntry {
7117 range: PointUtf16::new(0, 10)..PointUtf16::new(0, 10),
7118 diagnostic: Diagnostic {
7119 severity: DiagnosticSeverity::ERROR,
7120 message: "syntax error 1".to_string(),
7121 ..Default::default()
7122 },
7123 },
7124 DiagnosticEntry {
7125 range: PointUtf16::new(1, 10)..PointUtf16::new(1, 10),
7126 diagnostic: Diagnostic {
7127 severity: DiagnosticSeverity::ERROR,
7128 message: "syntax error 2".to_string(),
7129 ..Default::default()
7130 },
7131 },
7132 ],
7133 None,
7134 cx,
7135 )
7136 .unwrap();
7137 });
7138
7139 // An empty range is extended forward to include the following character.
7140 // At the end of a line, an empty range is extended backward to include
7141 // the preceding character.
7142 buffer.read_with(cx, |buffer, _| {
7143 let chunks = chunks_with_diagnostics(&buffer, 0..buffer.len());
7144 assert_eq!(
7145 chunks
7146 .iter()
7147 .map(|(s, d)| (s.as_str(), *d))
7148 .collect::<Vec<_>>(),
7149 &[
7150 ("let one = ", None),
7151 (";", Some(DiagnosticSeverity::ERROR)),
7152 ("\nlet two =", None),
7153 (" ", Some(DiagnosticSeverity::ERROR)),
7154 ("\nlet three = 3;\n", None)
7155 ]
7156 );
7157 });
7158 }
7159
7160 #[gpui::test]
7161 async fn test_edits_from_lsp_with_past_version(cx: &mut gpui::TestAppContext) {
7162 cx.foreground().forbid_parking();
7163
7164 let mut language = Language::new(
7165 LanguageConfig {
7166 name: "Rust".into(),
7167 path_suffixes: vec!["rs".to_string()],
7168 ..Default::default()
7169 },
7170 Some(tree_sitter_rust::language()),
7171 );
7172 let mut fake_servers = language.set_fake_lsp_adapter(Default::default());
7173
7174 let text = "
7175 fn a() {
7176 f1();
7177 }
7178 fn b() {
7179 f2();
7180 }
7181 fn c() {
7182 f3();
7183 }
7184 "
7185 .unindent();
7186
7187 let fs = FakeFs::new(cx.background());
7188 fs.insert_tree(
7189 "/dir",
7190 json!({
7191 "a.rs": text.clone(),
7192 }),
7193 )
7194 .await;
7195
7196 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
7197 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
7198 let buffer = project
7199 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
7200 .await
7201 .unwrap();
7202
7203 let mut fake_server = fake_servers.next().await.unwrap();
7204 let lsp_document_version = fake_server
7205 .receive_notification::<lsp::notification::DidOpenTextDocument>()
7206 .await
7207 .text_document
7208 .version;
7209
7210 // Simulate editing the buffer after the language server computes some edits.
7211 buffer.update(cx, |buffer, cx| {
7212 buffer.edit(
7213 [(
7214 Point::new(0, 0)..Point::new(0, 0),
7215 "// above first function\n",
7216 )],
7217 cx,
7218 );
7219 buffer.edit(
7220 [(
7221 Point::new(2, 0)..Point::new(2, 0),
7222 " // inside first function\n",
7223 )],
7224 cx,
7225 );
7226 buffer.edit(
7227 [(
7228 Point::new(6, 4)..Point::new(6, 4),
7229 "// inside second function ",
7230 )],
7231 cx,
7232 );
7233
7234 assert_eq!(
7235 buffer.text(),
7236 "
7237 // above first function
7238 fn a() {
7239 // inside first function
7240 f1();
7241 }
7242 fn b() {
7243 // inside second function f2();
7244 }
7245 fn c() {
7246 f3();
7247 }
7248 "
7249 .unindent()
7250 );
7251 });
7252
7253 let edits = project
7254 .update(cx, |project, cx| {
7255 project.edits_from_lsp(
7256 &buffer,
7257 vec![
7258 // replace body of first function
7259 lsp::TextEdit {
7260 range: lsp::Range::new(
7261 lsp::Position::new(0, 0),
7262 lsp::Position::new(3, 0),
7263 ),
7264 new_text: "
7265 fn a() {
7266 f10();
7267 }
7268 "
7269 .unindent(),
7270 },
7271 // edit inside second function
7272 lsp::TextEdit {
7273 range: lsp::Range::new(
7274 lsp::Position::new(4, 6),
7275 lsp::Position::new(4, 6),
7276 ),
7277 new_text: "00".into(),
7278 },
7279 // edit inside third function via two distinct edits
7280 lsp::TextEdit {
7281 range: lsp::Range::new(
7282 lsp::Position::new(7, 5),
7283 lsp::Position::new(7, 5),
7284 ),
7285 new_text: "4000".into(),
7286 },
7287 lsp::TextEdit {
7288 range: lsp::Range::new(
7289 lsp::Position::new(7, 5),
7290 lsp::Position::new(7, 6),
7291 ),
7292 new_text: "".into(),
7293 },
7294 ],
7295 Some(lsp_document_version),
7296 cx,
7297 )
7298 })
7299 .await
7300 .unwrap();
7301
7302 buffer.update(cx, |buffer, cx| {
7303 for (range, new_text) in edits {
7304 buffer.edit([(range, new_text)], cx);
7305 }
7306 assert_eq!(
7307 buffer.text(),
7308 "
7309 // above first function
7310 fn a() {
7311 // inside first function
7312 f10();
7313 }
7314 fn b() {
7315 // inside second function f200();
7316 }
7317 fn c() {
7318 f4000();
7319 }
7320 "
7321 .unindent()
7322 );
7323 });
7324 }
7325
7326 #[gpui::test]
7327 async fn test_edits_from_lsp_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
7328 cx.foreground().forbid_parking();
7329
7330 let text = "
7331 use a::b;
7332 use a::c;
7333
7334 fn f() {
7335 b();
7336 c();
7337 }
7338 "
7339 .unindent();
7340
7341 let fs = FakeFs::new(cx.background());
7342 fs.insert_tree(
7343 "/dir",
7344 json!({
7345 "a.rs": text.clone(),
7346 }),
7347 )
7348 .await;
7349
7350 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
7351 let buffer = project
7352 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
7353 .await
7354 .unwrap();
7355
7356 // Simulate the language server sending us a small edit in the form of a very large diff.
7357 // Rust-analyzer does this when performing a merge-imports code action.
7358 let edits = project
7359 .update(cx, |project, cx| {
7360 project.edits_from_lsp(
7361 &buffer,
7362 [
7363 // Replace the first use statement without editing the semicolon.
7364 lsp::TextEdit {
7365 range: lsp::Range::new(
7366 lsp::Position::new(0, 4),
7367 lsp::Position::new(0, 8),
7368 ),
7369 new_text: "a::{b, c}".into(),
7370 },
7371 // Reinsert the remainder of the file between the semicolon and the final
7372 // newline of the file.
7373 lsp::TextEdit {
7374 range: lsp::Range::new(
7375 lsp::Position::new(0, 9),
7376 lsp::Position::new(0, 9),
7377 ),
7378 new_text: "\n\n".into(),
7379 },
7380 lsp::TextEdit {
7381 range: lsp::Range::new(
7382 lsp::Position::new(0, 9),
7383 lsp::Position::new(0, 9),
7384 ),
7385 new_text: "
7386 fn f() {
7387 b();
7388 c();
7389 }"
7390 .unindent(),
7391 },
7392 // Delete everything after the first newline of the file.
7393 lsp::TextEdit {
7394 range: lsp::Range::new(
7395 lsp::Position::new(1, 0),
7396 lsp::Position::new(7, 0),
7397 ),
7398 new_text: "".into(),
7399 },
7400 ],
7401 None,
7402 cx,
7403 )
7404 })
7405 .await
7406 .unwrap();
7407
7408 buffer.update(cx, |buffer, cx| {
7409 let edits = edits
7410 .into_iter()
7411 .map(|(range, text)| {
7412 (
7413 range.start.to_point(&buffer)..range.end.to_point(&buffer),
7414 text,
7415 )
7416 })
7417 .collect::<Vec<_>>();
7418
7419 assert_eq!(
7420 edits,
7421 [
7422 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
7423 (Point::new(1, 0)..Point::new(2, 0), "".into())
7424 ]
7425 );
7426
7427 for (range, new_text) in edits {
7428 buffer.edit([(range, new_text)], cx);
7429 }
7430 assert_eq!(
7431 buffer.text(),
7432 "
7433 use a::{b, c};
7434
7435 fn f() {
7436 b();
7437 c();
7438 }
7439 "
7440 .unindent()
7441 );
7442 });
7443 }
7444
7445 #[gpui::test]
7446 async fn test_invalid_edits_from_lsp(cx: &mut gpui::TestAppContext) {
7447 cx.foreground().forbid_parking();
7448
7449 let text = "
7450 use a::b;
7451 use a::c;
7452
7453 fn f() {
7454 b();
7455 c();
7456 }
7457 "
7458 .unindent();
7459
7460 let fs = FakeFs::new(cx.background());
7461 fs.insert_tree(
7462 "/dir",
7463 json!({
7464 "a.rs": text.clone(),
7465 }),
7466 )
7467 .await;
7468
7469 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
7470 let buffer = project
7471 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
7472 .await
7473 .unwrap();
7474
7475 // Simulate the language server sending us edits in a non-ordered fashion,
7476 // with ranges sometimes being inverted.
7477 let edits = project
7478 .update(cx, |project, cx| {
7479 project.edits_from_lsp(
7480 &buffer,
7481 [
7482 lsp::TextEdit {
7483 range: lsp::Range::new(
7484 lsp::Position::new(0, 9),
7485 lsp::Position::new(0, 9),
7486 ),
7487 new_text: "\n\n".into(),
7488 },
7489 lsp::TextEdit {
7490 range: lsp::Range::new(
7491 lsp::Position::new(0, 8),
7492 lsp::Position::new(0, 4),
7493 ),
7494 new_text: "a::{b, c}".into(),
7495 },
7496 lsp::TextEdit {
7497 range: lsp::Range::new(
7498 lsp::Position::new(1, 0),
7499 lsp::Position::new(7, 0),
7500 ),
7501 new_text: "".into(),
7502 },
7503 lsp::TextEdit {
7504 range: lsp::Range::new(
7505 lsp::Position::new(0, 9),
7506 lsp::Position::new(0, 9),
7507 ),
7508 new_text: "
7509 fn f() {
7510 b();
7511 c();
7512 }"
7513 .unindent(),
7514 },
7515 ],
7516 None,
7517 cx,
7518 )
7519 })
7520 .await
7521 .unwrap();
7522
7523 buffer.update(cx, |buffer, cx| {
7524 let edits = edits
7525 .into_iter()
7526 .map(|(range, text)| {
7527 (
7528 range.start.to_point(&buffer)..range.end.to_point(&buffer),
7529 text,
7530 )
7531 })
7532 .collect::<Vec<_>>();
7533
7534 assert_eq!(
7535 edits,
7536 [
7537 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
7538 (Point::new(1, 0)..Point::new(2, 0), "".into())
7539 ]
7540 );
7541
7542 for (range, new_text) in edits {
7543 buffer.edit([(range, new_text)], cx);
7544 }
7545 assert_eq!(
7546 buffer.text(),
7547 "
7548 use a::{b, c};
7549
7550 fn f() {
7551 b();
7552 c();
7553 }
7554 "
7555 .unindent()
7556 );
7557 });
7558 }
7559
7560 fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
7561 buffer: &Buffer,
7562 range: Range<T>,
7563 ) -> Vec<(String, Option<DiagnosticSeverity>)> {
7564 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
7565 for chunk in buffer.snapshot().chunks(range, true) {
7566 if chunks.last().map_or(false, |prev_chunk| {
7567 prev_chunk.1 == chunk.diagnostic_severity
7568 }) {
7569 chunks.last_mut().unwrap().0.push_str(chunk.text);
7570 } else {
7571 chunks.push((chunk.text.to_string(), chunk.diagnostic_severity));
7572 }
7573 }
7574 chunks
7575 }
7576
7577 #[gpui::test]
7578 async fn test_search_worktree_without_files(cx: &mut gpui::TestAppContext) {
7579 let dir = temp_tree(json!({
7580 "root": {
7581 "dir1": {},
7582 "dir2": {
7583 "dir3": {}
7584 }
7585 }
7586 }));
7587
7588 let project = Project::test(Arc::new(RealFs), [dir.path()], cx).await;
7589 let cancel_flag = Default::default();
7590 let results = project
7591 .read_with(cx, |project, cx| {
7592 project.match_paths("dir", false, false, 10, &cancel_flag, cx)
7593 })
7594 .await;
7595
7596 assert!(results.is_empty());
7597 }
7598
7599 #[gpui::test(iterations = 10)]
7600 async fn test_definition(cx: &mut gpui::TestAppContext) {
7601 let mut language = Language::new(
7602 LanguageConfig {
7603 name: "Rust".into(),
7604 path_suffixes: vec!["rs".to_string()],
7605 ..Default::default()
7606 },
7607 Some(tree_sitter_rust::language()),
7608 );
7609 let mut fake_servers = language.set_fake_lsp_adapter(Default::default());
7610
7611 let fs = FakeFs::new(cx.background());
7612 fs.insert_tree(
7613 "/dir",
7614 json!({
7615 "a.rs": "const fn a() { A }",
7616 "b.rs": "const y: i32 = crate::a()",
7617 }),
7618 )
7619 .await;
7620
7621 let project = Project::test(fs, ["/dir/b.rs".as_ref()], cx).await;
7622 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
7623
7624 let buffer = project
7625 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
7626 .await
7627 .unwrap();
7628
7629 let fake_server = fake_servers.next().await.unwrap();
7630 fake_server.handle_request::<lsp::request::GotoDefinition, _, _>(|params, _| async move {
7631 let params = params.text_document_position_params;
7632 assert_eq!(
7633 params.text_document.uri.to_file_path().unwrap(),
7634 Path::new("/dir/b.rs"),
7635 );
7636 assert_eq!(params.position, lsp::Position::new(0, 22));
7637
7638 Ok(Some(lsp::GotoDefinitionResponse::Scalar(
7639 lsp::Location::new(
7640 lsp::Url::from_file_path("/dir/a.rs").unwrap(),
7641 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
7642 ),
7643 )))
7644 });
7645
7646 let mut definitions = project
7647 .update(cx, |project, cx| project.definition(&buffer, 22, cx))
7648 .await
7649 .unwrap();
7650
7651 assert_eq!(definitions.len(), 1);
7652 let definition = definitions.pop().unwrap();
7653 cx.update(|cx| {
7654 let target_buffer = definition.target.buffer.read(cx);
7655 assert_eq!(
7656 target_buffer
7657 .file()
7658 .unwrap()
7659 .as_local()
7660 .unwrap()
7661 .abs_path(cx),
7662 Path::new("/dir/a.rs"),
7663 );
7664 assert_eq!(definition.target.range.to_offset(target_buffer), 9..10);
7665 assert_eq!(
7666 list_worktrees(&project, cx),
7667 [("/dir/b.rs".as_ref(), true), ("/dir/a.rs".as_ref(), false)]
7668 );
7669
7670 drop(definition);
7671 });
7672 cx.read(|cx| {
7673 assert_eq!(list_worktrees(&project, cx), [("/dir/b.rs".as_ref(), true)]);
7674 });
7675
7676 fn list_worktrees<'a>(
7677 project: &'a ModelHandle<Project>,
7678 cx: &'a AppContext,
7679 ) -> Vec<(&'a Path, bool)> {
7680 project
7681 .read(cx)
7682 .worktrees(cx)
7683 .map(|worktree| {
7684 let worktree = worktree.read(cx);
7685 (
7686 worktree.as_local().unwrap().abs_path().as_ref(),
7687 worktree.is_visible(),
7688 )
7689 })
7690 .collect::<Vec<_>>()
7691 }
7692 }
7693
7694 #[gpui::test]
7695 async fn test_completions_without_edit_ranges(cx: &mut gpui::TestAppContext) {
7696 let mut language = Language::new(
7697 LanguageConfig {
7698 name: "TypeScript".into(),
7699 path_suffixes: vec!["ts".to_string()],
7700 ..Default::default()
7701 },
7702 Some(tree_sitter_typescript::language_typescript()),
7703 );
7704 let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default());
7705
7706 let fs = FakeFs::new(cx.background());
7707 fs.insert_tree(
7708 "/dir",
7709 json!({
7710 "a.ts": "",
7711 }),
7712 )
7713 .await;
7714
7715 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
7716 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
7717 let buffer = project
7718 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
7719 .await
7720 .unwrap();
7721
7722 let fake_server = fake_language_servers.next().await.unwrap();
7723
7724 let text = "let a = b.fqn";
7725 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
7726 let completions = project.update(cx, |project, cx| {
7727 project.completions(&buffer, text.len(), cx)
7728 });
7729
7730 fake_server
7731 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
7732 Ok(Some(lsp::CompletionResponse::Array(vec![
7733 lsp::CompletionItem {
7734 label: "fullyQualifiedName?".into(),
7735 insert_text: Some("fullyQualifiedName".into()),
7736 ..Default::default()
7737 },
7738 ])))
7739 })
7740 .next()
7741 .await;
7742 let completions = completions.await.unwrap();
7743 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
7744 assert_eq!(completions.len(), 1);
7745 assert_eq!(completions[0].new_text, "fullyQualifiedName");
7746 assert_eq!(
7747 completions[0].old_range.to_offset(&snapshot),
7748 text.len() - 3..text.len()
7749 );
7750
7751 let text = "let a = \"atoms/cmp\"";
7752 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
7753 let completions = project.update(cx, |project, cx| {
7754 project.completions(&buffer, text.len() - 1, cx)
7755 });
7756
7757 fake_server
7758 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
7759 Ok(Some(lsp::CompletionResponse::Array(vec![
7760 lsp::CompletionItem {
7761 label: "component".into(),
7762 ..Default::default()
7763 },
7764 ])))
7765 })
7766 .next()
7767 .await;
7768 let completions = completions.await.unwrap();
7769 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
7770 assert_eq!(completions.len(), 1);
7771 assert_eq!(completions[0].new_text, "component");
7772 assert_eq!(
7773 completions[0].old_range.to_offset(&snapshot),
7774 text.len() - 4..text.len() - 1
7775 );
7776 }
7777
7778 #[gpui::test(iterations = 10)]
7779 async fn test_apply_code_actions_with_commands(cx: &mut gpui::TestAppContext) {
7780 let mut language = Language::new(
7781 LanguageConfig {
7782 name: "TypeScript".into(),
7783 path_suffixes: vec!["ts".to_string()],
7784 ..Default::default()
7785 },
7786 None,
7787 );
7788 let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default());
7789
7790 let fs = FakeFs::new(cx.background());
7791 fs.insert_tree(
7792 "/dir",
7793 json!({
7794 "a.ts": "a",
7795 }),
7796 )
7797 .await;
7798
7799 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
7800 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
7801 let buffer = project
7802 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
7803 .await
7804 .unwrap();
7805
7806 let fake_server = fake_language_servers.next().await.unwrap();
7807
7808 // Language server returns code actions that contain commands, and not edits.
7809 let actions = project.update(cx, |project, cx| project.code_actions(&buffer, 0..0, cx));
7810 fake_server
7811 .handle_request::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
7812 Ok(Some(vec![
7813 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
7814 title: "The code action".into(),
7815 command: Some(lsp::Command {
7816 title: "The command".into(),
7817 command: "_the/command".into(),
7818 arguments: Some(vec![json!("the-argument")]),
7819 }),
7820 ..Default::default()
7821 }),
7822 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
7823 title: "two".into(),
7824 ..Default::default()
7825 }),
7826 ]))
7827 })
7828 .next()
7829 .await;
7830
7831 let action = actions.await.unwrap()[0].clone();
7832 let apply = project.update(cx, |project, cx| {
7833 project.apply_code_action(buffer.clone(), action, true, cx)
7834 });
7835
7836 // Resolving the code action does not populate its edits. In absence of
7837 // edits, we must execute the given command.
7838 fake_server.handle_request::<lsp::request::CodeActionResolveRequest, _, _>(
7839 |action, _| async move { Ok(action) },
7840 );
7841
7842 // While executing the command, the language server sends the editor
7843 // a `workspaceEdit` request.
7844 fake_server
7845 .handle_request::<lsp::request::ExecuteCommand, _, _>({
7846 let fake = fake_server.clone();
7847 move |params, _| {
7848 assert_eq!(params.command, "_the/command");
7849 let fake = fake.clone();
7850 async move {
7851 fake.server
7852 .request::<lsp::request::ApplyWorkspaceEdit>(
7853 lsp::ApplyWorkspaceEditParams {
7854 label: None,
7855 edit: lsp::WorkspaceEdit {
7856 changes: Some(
7857 [(
7858 lsp::Url::from_file_path("/dir/a.ts").unwrap(),
7859 vec![lsp::TextEdit {
7860 range: lsp::Range::new(
7861 lsp::Position::new(0, 0),
7862 lsp::Position::new(0, 0),
7863 ),
7864 new_text: "X".into(),
7865 }],
7866 )]
7867 .into_iter()
7868 .collect(),
7869 ),
7870 ..Default::default()
7871 },
7872 },
7873 )
7874 .await
7875 .unwrap();
7876 Ok(Some(json!(null)))
7877 }
7878 }
7879 })
7880 .next()
7881 .await;
7882
7883 // Applying the code action returns a project transaction containing the edits
7884 // sent by the language server in its `workspaceEdit` request.
7885 let transaction = apply.await.unwrap();
7886 assert!(transaction.0.contains_key(&buffer));
7887 buffer.update(cx, |buffer, cx| {
7888 assert_eq!(buffer.text(), "Xa");
7889 buffer.undo(cx);
7890 assert_eq!(buffer.text(), "a");
7891 });
7892 }
7893
7894 #[gpui::test]
7895 async fn test_save_file(cx: &mut gpui::TestAppContext) {
7896 let fs = FakeFs::new(cx.background());
7897 fs.insert_tree(
7898 "/dir",
7899 json!({
7900 "file1": "the old contents",
7901 }),
7902 )
7903 .await;
7904
7905 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7906 let buffer = project
7907 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
7908 .await
7909 .unwrap();
7910 buffer
7911 .update(cx, |buffer, cx| {
7912 assert_eq!(buffer.text(), "the old contents");
7913 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], cx);
7914 buffer.save(cx)
7915 })
7916 .await
7917 .unwrap();
7918
7919 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
7920 assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
7921 }
7922
7923 #[gpui::test]
7924 async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
7925 let fs = FakeFs::new(cx.background());
7926 fs.insert_tree(
7927 "/dir",
7928 json!({
7929 "file1": "the old contents",
7930 }),
7931 )
7932 .await;
7933
7934 let project = Project::test(fs.clone(), ["/dir/file1".as_ref()], cx).await;
7935 let buffer = project
7936 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
7937 .await
7938 .unwrap();
7939 buffer
7940 .update(cx, |buffer, cx| {
7941 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], cx);
7942 buffer.save(cx)
7943 })
7944 .await
7945 .unwrap();
7946
7947 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
7948 assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
7949 }
7950
7951 #[gpui::test]
7952 async fn test_save_as(cx: &mut gpui::TestAppContext) {
7953 let fs = FakeFs::new(cx.background());
7954 fs.insert_tree("/dir", json!({})).await;
7955
7956 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7957 let buffer = project.update(cx, |project, cx| {
7958 project.create_buffer("", None, cx).unwrap()
7959 });
7960 buffer.update(cx, |buffer, cx| {
7961 buffer.edit([(0..0, "abc")], cx);
7962 assert!(buffer.is_dirty());
7963 assert!(!buffer.has_conflict());
7964 });
7965 project
7966 .update(cx, |project, cx| {
7967 project.save_buffer_as(buffer.clone(), "/dir/file1".into(), cx)
7968 })
7969 .await
7970 .unwrap();
7971 assert_eq!(fs.load(Path::new("/dir/file1")).await.unwrap(), "abc");
7972 buffer.read_with(cx, |buffer, cx| {
7973 assert_eq!(buffer.file().unwrap().full_path(cx), Path::new("dir/file1"));
7974 assert!(!buffer.is_dirty());
7975 assert!(!buffer.has_conflict());
7976 });
7977
7978 let opened_buffer = project
7979 .update(cx, |project, cx| {
7980 project.open_local_buffer("/dir/file1", cx)
7981 })
7982 .await
7983 .unwrap();
7984 assert_eq!(opened_buffer, buffer);
7985 }
7986
7987 #[gpui::test(retries = 5)]
7988 async fn test_rescan_and_remote_updates(
7989 deterministic: Arc<Deterministic>,
7990 cx: &mut gpui::TestAppContext,
7991 ) {
7992 let dir = temp_tree(json!({
7993 "a": {
7994 "file1": "",
7995 "file2": "",
7996 "file3": "",
7997 },
7998 "b": {
7999 "c": {
8000 "file4": "",
8001 "file5": "",
8002 }
8003 }
8004 }));
8005
8006 let project = Project::test(Arc::new(RealFs), [dir.path()], cx).await;
8007 let rpc = project.read_with(cx, |p, _| p.client.clone());
8008
8009 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
8010 let buffer = project.update(cx, |p, cx| p.open_local_buffer(dir.path().join(path), cx));
8011 async move { buffer.await.unwrap() }
8012 };
8013 let id_for_path = |path: &'static str, cx: &gpui::TestAppContext| {
8014 project.read_with(cx, |project, cx| {
8015 let tree = project.worktrees(cx).next().unwrap();
8016 tree.read(cx)
8017 .entry_for_path(path)
8018 .expect(&format!("no entry for path {}", path))
8019 .id
8020 })
8021 };
8022
8023 let buffer2 = buffer_for_path("a/file2", cx).await;
8024 let buffer3 = buffer_for_path("a/file3", cx).await;
8025 let buffer4 = buffer_for_path("b/c/file4", cx).await;
8026 let buffer5 = buffer_for_path("b/c/file5", cx).await;
8027
8028 let file2_id = id_for_path("a/file2", &cx);
8029 let file3_id = id_for_path("a/file3", &cx);
8030 let file4_id = id_for_path("b/c/file4", &cx);
8031
8032 // Create a remote copy of this worktree.
8033 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8034 let initial_snapshot = tree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot());
8035 let remote = cx.update(|cx| {
8036 Worktree::remote(
8037 1,
8038 1,
8039 proto::WorktreeMetadata {
8040 id: initial_snapshot.id().to_proto(),
8041 root_name: initial_snapshot.root_name().into(),
8042 visible: true,
8043 },
8044 rpc.clone(),
8045 cx,
8046 )
8047 });
8048 remote.update(cx, |remote, _| {
8049 let update = initial_snapshot.build_initial_update(1);
8050 remote.as_remote_mut().unwrap().update_from_remote(update);
8051 });
8052 deterministic.run_until_parked();
8053
8054 cx.read(|cx| {
8055 assert!(!buffer2.read(cx).is_dirty());
8056 assert!(!buffer3.read(cx).is_dirty());
8057 assert!(!buffer4.read(cx).is_dirty());
8058 assert!(!buffer5.read(cx).is_dirty());
8059 });
8060
8061 // Rename and delete files and directories.
8062 tree.flush_fs_events(&cx).await;
8063 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
8064 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
8065 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
8066 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
8067 tree.flush_fs_events(&cx).await;
8068
8069 let expected_paths = vec![
8070 "a",
8071 "a/file1",
8072 "a/file2.new",
8073 "b",
8074 "d",
8075 "d/file3",
8076 "d/file4",
8077 ];
8078
8079 cx.read(|app| {
8080 assert_eq!(
8081 tree.read(app)
8082 .paths()
8083 .map(|p| p.to_str().unwrap())
8084 .collect::<Vec<_>>(),
8085 expected_paths
8086 );
8087
8088 assert_eq!(id_for_path("a/file2.new", &cx), file2_id);
8089 assert_eq!(id_for_path("d/file3", &cx), file3_id);
8090 assert_eq!(id_for_path("d/file4", &cx), file4_id);
8091
8092 assert_eq!(
8093 buffer2.read(app).file().unwrap().path().as_ref(),
8094 Path::new("a/file2.new")
8095 );
8096 assert_eq!(
8097 buffer3.read(app).file().unwrap().path().as_ref(),
8098 Path::new("d/file3")
8099 );
8100 assert_eq!(
8101 buffer4.read(app).file().unwrap().path().as_ref(),
8102 Path::new("d/file4")
8103 );
8104 assert_eq!(
8105 buffer5.read(app).file().unwrap().path().as_ref(),
8106 Path::new("b/c/file5")
8107 );
8108
8109 assert!(!buffer2.read(app).file().unwrap().is_deleted());
8110 assert!(!buffer3.read(app).file().unwrap().is_deleted());
8111 assert!(!buffer4.read(app).file().unwrap().is_deleted());
8112 assert!(buffer5.read(app).file().unwrap().is_deleted());
8113 });
8114
8115 // Update the remote worktree. Check that it becomes consistent with the
8116 // local worktree.
8117 remote.update(cx, |remote, cx| {
8118 let update = tree.read(cx).as_local().unwrap().snapshot().build_update(
8119 &initial_snapshot,
8120 1,
8121 1,
8122 true,
8123 );
8124 remote.as_remote_mut().unwrap().update_from_remote(update);
8125 });
8126 deterministic.run_until_parked();
8127 remote.read_with(cx, |remote, _| {
8128 assert_eq!(
8129 remote
8130 .paths()
8131 .map(|p| p.to_str().unwrap())
8132 .collect::<Vec<_>>(),
8133 expected_paths
8134 );
8135 });
8136 }
8137
8138 #[gpui::test]
8139 async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
8140 let fs = FakeFs::new(cx.background());
8141 fs.insert_tree(
8142 "/dir",
8143 json!({
8144 "a.txt": "a-contents",
8145 "b.txt": "b-contents",
8146 }),
8147 )
8148 .await;
8149
8150 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
8151
8152 // Spawn multiple tasks to open paths, repeating some paths.
8153 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
8154 (
8155 p.open_local_buffer("/dir/a.txt", cx),
8156 p.open_local_buffer("/dir/b.txt", cx),
8157 p.open_local_buffer("/dir/a.txt", cx),
8158 )
8159 });
8160
8161 let buffer_a_1 = buffer_a_1.await.unwrap();
8162 let buffer_a_2 = buffer_a_2.await.unwrap();
8163 let buffer_b = buffer_b.await.unwrap();
8164 assert_eq!(buffer_a_1.read_with(cx, |b, _| b.text()), "a-contents");
8165 assert_eq!(buffer_b.read_with(cx, |b, _| b.text()), "b-contents");
8166
8167 // There is only one buffer per path.
8168 let buffer_a_id = buffer_a_1.id();
8169 assert_eq!(buffer_a_2.id(), buffer_a_id);
8170
8171 // Open the same path again while it is still open.
8172 drop(buffer_a_1);
8173 let buffer_a_3 = project
8174 .update(cx, |p, cx| p.open_local_buffer("/dir/a.txt", cx))
8175 .await
8176 .unwrap();
8177
8178 // There's still only one buffer per path.
8179 assert_eq!(buffer_a_3.id(), buffer_a_id);
8180 }
8181
8182 #[gpui::test]
8183 async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
8184 let fs = FakeFs::new(cx.background());
8185 fs.insert_tree(
8186 "/dir",
8187 json!({
8188 "file1": "abc",
8189 "file2": "def",
8190 "file3": "ghi",
8191 }),
8192 )
8193 .await;
8194
8195 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
8196
8197 let buffer1 = project
8198 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
8199 .await
8200 .unwrap();
8201 let events = Rc::new(RefCell::new(Vec::new()));
8202
8203 // initially, the buffer isn't dirty.
8204 buffer1.update(cx, |buffer, cx| {
8205 cx.subscribe(&buffer1, {
8206 let events = events.clone();
8207 move |_, _, event, _| match event {
8208 BufferEvent::Operation(_) => {}
8209 _ => events.borrow_mut().push(event.clone()),
8210 }
8211 })
8212 .detach();
8213
8214 assert!(!buffer.is_dirty());
8215 assert!(events.borrow().is_empty());
8216
8217 buffer.edit([(1..2, "")], cx);
8218 });
8219
8220 // after the first edit, the buffer is dirty, and emits a dirtied event.
8221 buffer1.update(cx, |buffer, cx| {
8222 assert!(buffer.text() == "ac");
8223 assert!(buffer.is_dirty());
8224 assert_eq!(
8225 *events.borrow(),
8226 &[language::Event::Edited, language::Event::DirtyChanged]
8227 );
8228 events.borrow_mut().clear();
8229 buffer.did_save(
8230 buffer.version(),
8231 buffer.as_rope().fingerprint(),
8232 buffer.file().unwrap().mtime(),
8233 None,
8234 cx,
8235 );
8236 });
8237
8238 // after saving, the buffer is not dirty, and emits a saved event.
8239 buffer1.update(cx, |buffer, cx| {
8240 assert!(!buffer.is_dirty());
8241 assert_eq!(*events.borrow(), &[language::Event::Saved]);
8242 events.borrow_mut().clear();
8243
8244 buffer.edit([(1..1, "B")], cx);
8245 buffer.edit([(2..2, "D")], cx);
8246 });
8247
8248 // after editing again, the buffer is dirty, and emits another dirty event.
8249 buffer1.update(cx, |buffer, cx| {
8250 assert!(buffer.text() == "aBDc");
8251 assert!(buffer.is_dirty());
8252 assert_eq!(
8253 *events.borrow(),
8254 &[
8255 language::Event::Edited,
8256 language::Event::DirtyChanged,
8257 language::Event::Edited,
8258 ],
8259 );
8260 events.borrow_mut().clear();
8261
8262 // After restoring the buffer to its previously-saved state,
8263 // the buffer is not considered dirty anymore.
8264 buffer.edit([(1..3, "")], cx);
8265 assert!(buffer.text() == "ac");
8266 assert!(!buffer.is_dirty());
8267 });
8268
8269 assert_eq!(
8270 *events.borrow(),
8271 &[language::Event::Edited, language::Event::DirtyChanged]
8272 );
8273
8274 // When a file is deleted, the buffer is considered dirty.
8275 let events = Rc::new(RefCell::new(Vec::new()));
8276 let buffer2 = project
8277 .update(cx, |p, cx| p.open_local_buffer("/dir/file2", cx))
8278 .await
8279 .unwrap();
8280 buffer2.update(cx, |_, cx| {
8281 cx.subscribe(&buffer2, {
8282 let events = events.clone();
8283 move |_, _, event, _| events.borrow_mut().push(event.clone())
8284 })
8285 .detach();
8286 });
8287
8288 fs.remove_file("/dir/file2".as_ref(), Default::default())
8289 .await
8290 .unwrap();
8291 buffer2.condition(&cx, |b, _| b.is_dirty()).await;
8292 assert_eq!(
8293 *events.borrow(),
8294 &[
8295 language::Event::DirtyChanged,
8296 language::Event::FileHandleChanged
8297 ]
8298 );
8299
8300 // When a file is already dirty when deleted, we don't emit a Dirtied event.
8301 let events = Rc::new(RefCell::new(Vec::new()));
8302 let buffer3 = project
8303 .update(cx, |p, cx| p.open_local_buffer("/dir/file3", cx))
8304 .await
8305 .unwrap();
8306 buffer3.update(cx, |_, cx| {
8307 cx.subscribe(&buffer3, {
8308 let events = events.clone();
8309 move |_, _, event, _| events.borrow_mut().push(event.clone())
8310 })
8311 .detach();
8312 });
8313
8314 buffer3.update(cx, |buffer, cx| {
8315 buffer.edit([(0..0, "x")], cx);
8316 });
8317 events.borrow_mut().clear();
8318 fs.remove_file("/dir/file3".as_ref(), Default::default())
8319 .await
8320 .unwrap();
8321 buffer3
8322 .condition(&cx, |_, _| !events.borrow().is_empty())
8323 .await;
8324 assert_eq!(*events.borrow(), &[language::Event::FileHandleChanged]);
8325 cx.read(|cx| assert!(buffer3.read(cx).is_dirty()));
8326 }
8327
8328 #[gpui::test]
8329 async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
8330 let initial_contents = "aaa\nbbbbb\nc\n";
8331 let fs = FakeFs::new(cx.background());
8332 fs.insert_tree(
8333 "/dir",
8334 json!({
8335 "the-file": initial_contents,
8336 }),
8337 )
8338 .await;
8339 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
8340 let buffer = project
8341 .update(cx, |p, cx| p.open_local_buffer("/dir/the-file", cx))
8342 .await
8343 .unwrap();
8344
8345 let anchors = (0..3)
8346 .map(|row| buffer.read_with(cx, |b, _| b.anchor_before(Point::new(row, 1))))
8347 .collect::<Vec<_>>();
8348
8349 // Change the file on disk, adding two new lines of text, and removing
8350 // one line.
8351 buffer.read_with(cx, |buffer, _| {
8352 assert!(!buffer.is_dirty());
8353 assert!(!buffer.has_conflict());
8354 });
8355 let new_contents = "AAAA\naaa\nBB\nbbbbb\n";
8356 fs.save("/dir/the-file".as_ref(), &new_contents.into())
8357 .await
8358 .unwrap();
8359
8360 // Because the buffer was not modified, it is reloaded from disk. Its
8361 // contents are edited according to the diff between the old and new
8362 // file contents.
8363 buffer
8364 .condition(&cx, |buffer, _| buffer.text() == new_contents)
8365 .await;
8366
8367 buffer.update(cx, |buffer, _| {
8368 assert_eq!(buffer.text(), new_contents);
8369 assert!(!buffer.is_dirty());
8370 assert!(!buffer.has_conflict());
8371
8372 let anchor_positions = anchors
8373 .iter()
8374 .map(|anchor| anchor.to_point(&*buffer))
8375 .collect::<Vec<_>>();
8376 assert_eq!(
8377 anchor_positions,
8378 [Point::new(1, 1), Point::new(3, 1), Point::new(4, 0)]
8379 );
8380 });
8381
8382 // Modify the buffer
8383 buffer.update(cx, |buffer, cx| {
8384 buffer.edit([(0..0, " ")], cx);
8385 assert!(buffer.is_dirty());
8386 assert!(!buffer.has_conflict());
8387 });
8388
8389 // Change the file on disk again, adding blank lines to the beginning.
8390 fs.save(
8391 "/dir/the-file".as_ref(),
8392 &"\n\n\nAAAA\naaa\nBB\nbbbbb\n".into(),
8393 )
8394 .await
8395 .unwrap();
8396
8397 // Because the buffer is modified, it doesn't reload from disk, but is
8398 // marked as having a conflict.
8399 buffer
8400 .condition(&cx, |buffer, _| buffer.has_conflict())
8401 .await;
8402 }
8403
8404 #[gpui::test]
8405 async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
8406 cx.foreground().forbid_parking();
8407
8408 let fs = FakeFs::new(cx.background());
8409 fs.insert_tree(
8410 "/the-dir",
8411 json!({
8412 "a.rs": "
8413 fn foo(mut v: Vec<usize>) {
8414 for x in &v {
8415 v.push(1);
8416 }
8417 }
8418 "
8419 .unindent(),
8420 }),
8421 )
8422 .await;
8423
8424 let project = Project::test(fs.clone(), ["/the-dir".as_ref()], cx).await;
8425 let buffer = project
8426 .update(cx, |p, cx| p.open_local_buffer("/the-dir/a.rs", cx))
8427 .await
8428 .unwrap();
8429
8430 let buffer_uri = Url::from_file_path("/the-dir/a.rs").unwrap();
8431 let message = lsp::PublishDiagnosticsParams {
8432 uri: buffer_uri.clone(),
8433 diagnostics: vec![
8434 lsp::Diagnostic {
8435 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
8436 severity: Some(DiagnosticSeverity::WARNING),
8437 message: "error 1".to_string(),
8438 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
8439 location: lsp::Location {
8440 uri: buffer_uri.clone(),
8441 range: lsp::Range::new(
8442 lsp::Position::new(1, 8),
8443 lsp::Position::new(1, 9),
8444 ),
8445 },
8446 message: "error 1 hint 1".to_string(),
8447 }]),
8448 ..Default::default()
8449 },
8450 lsp::Diagnostic {
8451 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
8452 severity: Some(DiagnosticSeverity::HINT),
8453 message: "error 1 hint 1".to_string(),
8454 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
8455 location: lsp::Location {
8456 uri: buffer_uri.clone(),
8457 range: lsp::Range::new(
8458 lsp::Position::new(1, 8),
8459 lsp::Position::new(1, 9),
8460 ),
8461 },
8462 message: "original diagnostic".to_string(),
8463 }]),
8464 ..Default::default()
8465 },
8466 lsp::Diagnostic {
8467 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
8468 severity: Some(DiagnosticSeverity::ERROR),
8469 message: "error 2".to_string(),
8470 related_information: Some(vec![
8471 lsp::DiagnosticRelatedInformation {
8472 location: lsp::Location {
8473 uri: buffer_uri.clone(),
8474 range: lsp::Range::new(
8475 lsp::Position::new(1, 13),
8476 lsp::Position::new(1, 15),
8477 ),
8478 },
8479 message: "error 2 hint 1".to_string(),
8480 },
8481 lsp::DiagnosticRelatedInformation {
8482 location: lsp::Location {
8483 uri: buffer_uri.clone(),
8484 range: lsp::Range::new(
8485 lsp::Position::new(1, 13),
8486 lsp::Position::new(1, 15),
8487 ),
8488 },
8489 message: "error 2 hint 2".to_string(),
8490 },
8491 ]),
8492 ..Default::default()
8493 },
8494 lsp::Diagnostic {
8495 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
8496 severity: Some(DiagnosticSeverity::HINT),
8497 message: "error 2 hint 1".to_string(),
8498 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
8499 location: lsp::Location {
8500 uri: buffer_uri.clone(),
8501 range: lsp::Range::new(
8502 lsp::Position::new(2, 8),
8503 lsp::Position::new(2, 17),
8504 ),
8505 },
8506 message: "original diagnostic".to_string(),
8507 }]),
8508 ..Default::default()
8509 },
8510 lsp::Diagnostic {
8511 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
8512 severity: Some(DiagnosticSeverity::HINT),
8513 message: "error 2 hint 2".to_string(),
8514 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
8515 location: lsp::Location {
8516 uri: buffer_uri.clone(),
8517 range: lsp::Range::new(
8518 lsp::Position::new(2, 8),
8519 lsp::Position::new(2, 17),
8520 ),
8521 },
8522 message: "original diagnostic".to_string(),
8523 }]),
8524 ..Default::default()
8525 },
8526 ],
8527 version: None,
8528 };
8529
8530 project
8531 .update(cx, |p, cx| p.update_diagnostics(0, message, &[], cx))
8532 .unwrap();
8533 let buffer = buffer.read_with(cx, |buffer, _| buffer.snapshot());
8534
8535 assert_eq!(
8536 buffer
8537 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
8538 .collect::<Vec<_>>(),
8539 &[
8540 DiagnosticEntry {
8541 range: Point::new(1, 8)..Point::new(1, 9),
8542 diagnostic: Diagnostic {
8543 severity: DiagnosticSeverity::WARNING,
8544 message: "error 1".to_string(),
8545 group_id: 0,
8546 is_primary: true,
8547 ..Default::default()
8548 }
8549 },
8550 DiagnosticEntry {
8551 range: Point::new(1, 8)..Point::new(1, 9),
8552 diagnostic: Diagnostic {
8553 severity: DiagnosticSeverity::HINT,
8554 message: "error 1 hint 1".to_string(),
8555 group_id: 0,
8556 is_primary: false,
8557 ..Default::default()
8558 }
8559 },
8560 DiagnosticEntry {
8561 range: Point::new(1, 13)..Point::new(1, 15),
8562 diagnostic: Diagnostic {
8563 severity: DiagnosticSeverity::HINT,
8564 message: "error 2 hint 1".to_string(),
8565 group_id: 1,
8566 is_primary: false,
8567 ..Default::default()
8568 }
8569 },
8570 DiagnosticEntry {
8571 range: Point::new(1, 13)..Point::new(1, 15),
8572 diagnostic: Diagnostic {
8573 severity: DiagnosticSeverity::HINT,
8574 message: "error 2 hint 2".to_string(),
8575 group_id: 1,
8576 is_primary: false,
8577 ..Default::default()
8578 }
8579 },
8580 DiagnosticEntry {
8581 range: Point::new(2, 8)..Point::new(2, 17),
8582 diagnostic: Diagnostic {
8583 severity: DiagnosticSeverity::ERROR,
8584 message: "error 2".to_string(),
8585 group_id: 1,
8586 is_primary: true,
8587 ..Default::default()
8588 }
8589 }
8590 ]
8591 );
8592
8593 assert_eq!(
8594 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
8595 &[
8596 DiagnosticEntry {
8597 range: Point::new(1, 8)..Point::new(1, 9),
8598 diagnostic: Diagnostic {
8599 severity: DiagnosticSeverity::WARNING,
8600 message: "error 1".to_string(),
8601 group_id: 0,
8602 is_primary: true,
8603 ..Default::default()
8604 }
8605 },
8606 DiagnosticEntry {
8607 range: Point::new(1, 8)..Point::new(1, 9),
8608 diagnostic: Diagnostic {
8609 severity: DiagnosticSeverity::HINT,
8610 message: "error 1 hint 1".to_string(),
8611 group_id: 0,
8612 is_primary: false,
8613 ..Default::default()
8614 }
8615 },
8616 ]
8617 );
8618 assert_eq!(
8619 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
8620 &[
8621 DiagnosticEntry {
8622 range: Point::new(1, 13)..Point::new(1, 15),
8623 diagnostic: Diagnostic {
8624 severity: DiagnosticSeverity::HINT,
8625 message: "error 2 hint 1".to_string(),
8626 group_id: 1,
8627 is_primary: false,
8628 ..Default::default()
8629 }
8630 },
8631 DiagnosticEntry {
8632 range: Point::new(1, 13)..Point::new(1, 15),
8633 diagnostic: Diagnostic {
8634 severity: DiagnosticSeverity::HINT,
8635 message: "error 2 hint 2".to_string(),
8636 group_id: 1,
8637 is_primary: false,
8638 ..Default::default()
8639 }
8640 },
8641 DiagnosticEntry {
8642 range: Point::new(2, 8)..Point::new(2, 17),
8643 diagnostic: Diagnostic {
8644 severity: DiagnosticSeverity::ERROR,
8645 message: "error 2".to_string(),
8646 group_id: 1,
8647 is_primary: true,
8648 ..Default::default()
8649 }
8650 }
8651 ]
8652 );
8653 }
8654
8655 #[gpui::test]
8656 async fn test_rename(cx: &mut gpui::TestAppContext) {
8657 cx.foreground().forbid_parking();
8658
8659 let mut language = Language::new(
8660 LanguageConfig {
8661 name: "Rust".into(),
8662 path_suffixes: vec!["rs".to_string()],
8663 ..Default::default()
8664 },
8665 Some(tree_sitter_rust::language()),
8666 );
8667 let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
8668 capabilities: lsp::ServerCapabilities {
8669 rename_provider: Some(lsp::OneOf::Right(lsp::RenameOptions {
8670 prepare_provider: Some(true),
8671 work_done_progress_options: Default::default(),
8672 })),
8673 ..Default::default()
8674 },
8675 ..Default::default()
8676 });
8677
8678 let fs = FakeFs::new(cx.background());
8679 fs.insert_tree(
8680 "/dir",
8681 json!({
8682 "one.rs": "const ONE: usize = 1;",
8683 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
8684 }),
8685 )
8686 .await;
8687
8688 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
8689 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
8690 let buffer = project
8691 .update(cx, |project, cx| {
8692 project.open_local_buffer("/dir/one.rs", cx)
8693 })
8694 .await
8695 .unwrap();
8696
8697 let fake_server = fake_servers.next().await.unwrap();
8698
8699 let response = project.update(cx, |project, cx| {
8700 project.prepare_rename(buffer.clone(), 7, cx)
8701 });
8702 fake_server
8703 .handle_request::<lsp::request::PrepareRenameRequest, _, _>(|params, _| async move {
8704 assert_eq!(params.text_document.uri.as_str(), "file:///dir/one.rs");
8705 assert_eq!(params.position, lsp::Position::new(0, 7));
8706 Ok(Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
8707 lsp::Position::new(0, 6),
8708 lsp::Position::new(0, 9),
8709 ))))
8710 })
8711 .next()
8712 .await
8713 .unwrap();
8714 let range = response.await.unwrap().unwrap();
8715 let range = buffer.read_with(cx, |buffer, _| range.to_offset(buffer));
8716 assert_eq!(range, 6..9);
8717
8718 let response = project.update(cx, |project, cx| {
8719 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), true, cx)
8720 });
8721 fake_server
8722 .handle_request::<lsp::request::Rename, _, _>(|params, _| async move {
8723 assert_eq!(
8724 params.text_document_position.text_document.uri.as_str(),
8725 "file:///dir/one.rs"
8726 );
8727 assert_eq!(
8728 params.text_document_position.position,
8729 lsp::Position::new(0, 7)
8730 );
8731 assert_eq!(params.new_name, "THREE");
8732 Ok(Some(lsp::WorkspaceEdit {
8733 changes: Some(
8734 [
8735 (
8736 lsp::Url::from_file_path("/dir/one.rs").unwrap(),
8737 vec![lsp::TextEdit::new(
8738 lsp::Range::new(
8739 lsp::Position::new(0, 6),
8740 lsp::Position::new(0, 9),
8741 ),
8742 "THREE".to_string(),
8743 )],
8744 ),
8745 (
8746 lsp::Url::from_file_path("/dir/two.rs").unwrap(),
8747 vec![
8748 lsp::TextEdit::new(
8749 lsp::Range::new(
8750 lsp::Position::new(0, 24),
8751 lsp::Position::new(0, 27),
8752 ),
8753 "THREE".to_string(),
8754 ),
8755 lsp::TextEdit::new(
8756 lsp::Range::new(
8757 lsp::Position::new(0, 35),
8758 lsp::Position::new(0, 38),
8759 ),
8760 "THREE".to_string(),
8761 ),
8762 ],
8763 ),
8764 ]
8765 .into_iter()
8766 .collect(),
8767 ),
8768 ..Default::default()
8769 }))
8770 })
8771 .next()
8772 .await
8773 .unwrap();
8774 let mut transaction = response.await.unwrap().0;
8775 assert_eq!(transaction.len(), 2);
8776 assert_eq!(
8777 transaction
8778 .remove_entry(&buffer)
8779 .unwrap()
8780 .0
8781 .read_with(cx, |buffer, _| buffer.text()),
8782 "const THREE: usize = 1;"
8783 );
8784 assert_eq!(
8785 transaction
8786 .into_keys()
8787 .next()
8788 .unwrap()
8789 .read_with(cx, |buffer, _| buffer.text()),
8790 "const TWO: usize = one::THREE + one::THREE;"
8791 );
8792 }
8793
8794 #[gpui::test]
8795 async fn test_search(cx: &mut gpui::TestAppContext) {
8796 let fs = FakeFs::new(cx.background());
8797 fs.insert_tree(
8798 "/dir",
8799 json!({
8800 "one.rs": "const ONE: usize = 1;",
8801 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
8802 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
8803 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
8804 }),
8805 )
8806 .await;
8807 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
8808 assert_eq!(
8809 search(&project, SearchQuery::text("TWO", false, true), cx)
8810 .await
8811 .unwrap(),
8812 HashMap::from_iter([
8813 ("two.rs".to_string(), vec![6..9]),
8814 ("three.rs".to_string(), vec![37..40])
8815 ])
8816 );
8817
8818 let buffer_4 = project
8819 .update(cx, |project, cx| {
8820 project.open_local_buffer("/dir/four.rs", cx)
8821 })
8822 .await
8823 .unwrap();
8824 buffer_4.update(cx, |buffer, cx| {
8825 let text = "two::TWO";
8826 buffer.edit([(20..28, text), (31..43, text)], cx);
8827 });
8828
8829 assert_eq!(
8830 search(&project, SearchQuery::text("TWO", false, true), cx)
8831 .await
8832 .unwrap(),
8833 HashMap::from_iter([
8834 ("two.rs".to_string(), vec![6..9]),
8835 ("three.rs".to_string(), vec![37..40]),
8836 ("four.rs".to_string(), vec![25..28, 36..39])
8837 ])
8838 );
8839
8840 async fn search(
8841 project: &ModelHandle<Project>,
8842 query: SearchQuery,
8843 cx: &mut gpui::TestAppContext,
8844 ) -> Result<HashMap<String, Vec<Range<usize>>>> {
8845 let results = project
8846 .update(cx, |project, cx| project.search(query, cx))
8847 .await?;
8848
8849 Ok(results
8850 .into_iter()
8851 .map(|(buffer, ranges)| {
8852 buffer.read_with(cx, |buffer, _| {
8853 let path = buffer.file().unwrap().path().to_string_lossy().to_string();
8854 let ranges = ranges
8855 .into_iter()
8856 .map(|range| range.to_offset(buffer))
8857 .collect::<Vec<_>>();
8858 (path, ranges)
8859 })
8860 })
8861 .collect())
8862 }
8863 }
8864}