1mod db;
2pub mod fs;
3mod ignore;
4mod lsp_command;
5pub mod search;
6pub mod worktree;
7
8use anyhow::{anyhow, Context, Result};
9use client::{proto, Client, PeerId, TypedEnvelope, User, UserStore};
10use clock::ReplicaId;
11use collections::{hash_map, BTreeMap, HashMap, HashSet};
12use futures::{future::Shared, Future, FutureExt, StreamExt, TryFutureExt};
13use fuzzy::{PathMatch, PathMatchCandidate, PathMatchCandidateSet};
14use gpui::{
15 AnyModelHandle, AppContext, AsyncAppContext, Entity, ModelContext, ModelHandle,
16 MutableAppContext, Task, UpgradeModelHandle, WeakModelHandle,
17};
18use language::{
19 point_to_lsp,
20 proto::{deserialize_anchor, deserialize_version, serialize_anchor, serialize_version},
21 range_from_lsp, range_to_lsp, Anchor, Bias, Buffer, CharKind, CodeAction, CodeLabel,
22 Completion, Diagnostic, DiagnosticEntry, DiagnosticSet, Event as BufferEvent, File as _,
23 Language, LanguageRegistry, LanguageServerName, LocalFile, LspAdapter, OffsetRangeExt,
24 Operation, Patch, PointUtf16, TextBufferSnapshot, ToOffset, ToPointUtf16, Transaction,
25};
26use lsp::{
27 DiagnosticSeverity, DiagnosticTag, DocumentHighlightKind, LanguageServer, LanguageString,
28 MarkedString,
29};
30use lsp_command::*;
31use parking_lot::Mutex;
32use postage::stream::Stream;
33use postage::watch;
34use rand::prelude::*;
35use search::SearchQuery;
36use serde::Serialize;
37use settings::Settings;
38use sha2::{Digest, Sha256};
39use similar::{ChangeTag, TextDiff};
40use std::{
41 cell::RefCell,
42 cmp::{self, Ordering},
43 convert::TryInto,
44 ffi::OsString,
45 hash::Hash,
46 mem,
47 ops::Range,
48 os::unix::{ffi::OsStrExt, prelude::OsStringExt},
49 path::{Component, Path, PathBuf},
50 rc::Rc,
51 sync::{
52 atomic::{AtomicBool, AtomicUsize, Ordering::SeqCst},
53 Arc,
54 },
55 time::Instant,
56};
57use thiserror::Error;
58use util::{post_inc, ResultExt, TryFutureExt as _};
59
60pub use db::Db;
61pub use fs::*;
62pub use worktree::*;
63
64pub trait Item: Entity {
65 fn entry_id(&self, cx: &AppContext) -> Option<ProjectEntryId>;
66}
67
68pub struct ProjectStore {
69 db: Arc<Db>,
70 projects: Vec<WeakModelHandle<Project>>,
71}
72
73pub struct Project {
74 worktrees: Vec<WorktreeHandle>,
75 active_entry: Option<ProjectEntryId>,
76 languages: Arc<LanguageRegistry>,
77 language_servers:
78 HashMap<(WorktreeId, LanguageServerName), (Arc<dyn LspAdapter>, Arc<LanguageServer>)>,
79 started_language_servers:
80 HashMap<(WorktreeId, LanguageServerName), Task<Option<Arc<LanguageServer>>>>,
81 language_server_statuses: BTreeMap<usize, LanguageServerStatus>,
82 language_server_settings: Arc<Mutex<serde_json::Value>>,
83 last_workspace_edits_by_language_server: HashMap<usize, ProjectTransaction>,
84 next_language_server_id: usize,
85 client: Arc<client::Client>,
86 next_entry_id: Arc<AtomicUsize>,
87 next_diagnostic_group_id: usize,
88 user_store: ModelHandle<UserStore>,
89 project_store: ModelHandle<ProjectStore>,
90 fs: Arc<dyn Fs>,
91 client_state: ProjectClientState,
92 collaborators: HashMap<PeerId, Collaborator>,
93 client_subscriptions: Vec<client::Subscription>,
94 _subscriptions: Vec<gpui::Subscription>,
95 opened_buffer: (Rc<RefCell<watch::Sender<()>>>, watch::Receiver<()>),
96 shared_buffers: HashMap<PeerId, HashSet<u64>>,
97 loading_buffers: HashMap<
98 ProjectPath,
99 postage::watch::Receiver<Option<Result<ModelHandle<Buffer>, Arc<anyhow::Error>>>>,
100 >,
101 loading_local_worktrees:
102 HashMap<Arc<Path>, Shared<Task<Result<ModelHandle<Worktree>, Arc<anyhow::Error>>>>>,
103 opened_buffers: HashMap<u64, OpenBuffer>,
104 buffer_snapshots: HashMap<u64, Vec<(i32, TextBufferSnapshot)>>,
105 nonce: u128,
106 initialized_persistent_state: bool,
107}
108
109#[derive(Error, Debug)]
110pub enum JoinProjectError {
111 #[error("host declined join request")]
112 HostDeclined,
113 #[error("host closed the project")]
114 HostClosedProject,
115 #[error("host went offline")]
116 HostWentOffline,
117 #[error("{0}")]
118 Other(#[from] anyhow::Error),
119}
120
121enum OpenBuffer {
122 Strong(ModelHandle<Buffer>),
123 Weak(WeakModelHandle<Buffer>),
124 Loading(Vec<Operation>),
125}
126
127enum WorktreeHandle {
128 Strong(ModelHandle<Worktree>),
129 Weak(WeakModelHandle<Worktree>),
130}
131
132enum ProjectClientState {
133 Local {
134 is_shared: bool,
135 remote_id_tx: watch::Sender<Option<u64>>,
136 remote_id_rx: watch::Receiver<Option<u64>>,
137 online_tx: watch::Sender<bool>,
138 online_rx: watch::Receiver<bool>,
139 _maintain_remote_id_task: Task<Option<()>>,
140 },
141 Remote {
142 sharing_has_stopped: bool,
143 remote_id: u64,
144 replica_id: ReplicaId,
145 _detect_unshare_task: Task<Option<()>>,
146 },
147}
148
149#[derive(Clone, Debug)]
150pub struct Collaborator {
151 pub user: Arc<User>,
152 pub peer_id: PeerId,
153 pub replica_id: ReplicaId,
154}
155
156#[derive(Clone, Debug, PartialEq, Eq)]
157pub enum Event {
158 ActiveEntryChanged(Option<ProjectEntryId>),
159 WorktreeAdded,
160 WorktreeRemoved(WorktreeId),
161 DiskBasedDiagnosticsStarted {
162 language_server_id: usize,
163 },
164 DiskBasedDiagnosticsFinished {
165 language_server_id: usize,
166 },
167 DiagnosticsUpdated {
168 path: ProjectPath,
169 language_server_id: usize,
170 },
171 RemoteIdChanged(Option<u64>),
172 CollaboratorLeft(PeerId),
173 ContactRequestedJoin(Arc<User>),
174 ContactCancelledJoinRequest(Arc<User>),
175}
176
177#[derive(Serialize)]
178pub struct LanguageServerStatus {
179 pub name: String,
180 pub pending_work: BTreeMap<String, LanguageServerProgress>,
181 pub pending_diagnostic_updates: isize,
182}
183
184#[derive(Clone, Debug, Serialize)]
185pub struct LanguageServerProgress {
186 pub message: Option<String>,
187 pub percentage: Option<usize>,
188 #[serde(skip_serializing)]
189 pub last_update_at: Instant,
190}
191
192#[derive(Clone, Debug, Eq, PartialEq, Hash, PartialOrd, Ord)]
193pub struct ProjectPath {
194 pub worktree_id: WorktreeId,
195 pub path: Arc<Path>,
196}
197
198#[derive(Copy, Clone, Debug, Default, PartialEq, Serialize)]
199pub struct DiagnosticSummary {
200 pub language_server_id: usize,
201 pub error_count: usize,
202 pub warning_count: usize,
203}
204
205#[derive(Debug)]
206pub struct Location {
207 pub buffer: ModelHandle<Buffer>,
208 pub range: Range<language::Anchor>,
209}
210
211#[derive(Debug)]
212pub struct DocumentHighlight {
213 pub range: Range<language::Anchor>,
214 pub kind: DocumentHighlightKind,
215}
216
217#[derive(Clone, Debug)]
218pub struct Symbol {
219 pub source_worktree_id: WorktreeId,
220 pub worktree_id: WorktreeId,
221 pub language_server_name: LanguageServerName,
222 pub path: PathBuf,
223 pub label: CodeLabel,
224 pub name: String,
225 pub kind: lsp::SymbolKind,
226 pub range: Range<PointUtf16>,
227 pub signature: [u8; 32],
228}
229
230#[derive(Clone, Debug, PartialEq)]
231pub struct HoverBlock {
232 pub text: String,
233 pub language: Option<String>,
234}
235
236impl HoverBlock {
237 fn try_new(marked_string: MarkedString) -> Option<Self> {
238 let result = match marked_string {
239 MarkedString::LanguageString(LanguageString { language, value }) => HoverBlock {
240 text: value,
241 language: Some(language),
242 },
243 MarkedString::String(text) => HoverBlock {
244 text,
245 language: None,
246 },
247 };
248 if result.text.is_empty() {
249 None
250 } else {
251 Some(result)
252 }
253 }
254}
255
256#[derive(Debug)]
257pub struct Hover {
258 pub contents: Vec<HoverBlock>,
259 pub range: Option<Range<language::Anchor>>,
260}
261
262#[derive(Default)]
263pub struct ProjectTransaction(pub HashMap<ModelHandle<Buffer>, language::Transaction>);
264
265impl DiagnosticSummary {
266 fn new<'a, T: 'a>(
267 language_server_id: usize,
268 diagnostics: impl IntoIterator<Item = &'a DiagnosticEntry<T>>,
269 ) -> Self {
270 let mut this = Self {
271 language_server_id,
272 error_count: 0,
273 warning_count: 0,
274 };
275
276 for entry in diagnostics {
277 if entry.diagnostic.is_primary {
278 match entry.diagnostic.severity {
279 DiagnosticSeverity::ERROR => this.error_count += 1,
280 DiagnosticSeverity::WARNING => this.warning_count += 1,
281 _ => {}
282 }
283 }
284 }
285
286 this
287 }
288
289 pub fn is_empty(&self) -> bool {
290 self.error_count == 0 && self.warning_count == 0
291 }
292
293 pub fn to_proto(&self, path: &Path) -> proto::DiagnosticSummary {
294 proto::DiagnosticSummary {
295 path: path.to_string_lossy().to_string(),
296 language_server_id: self.language_server_id as u64,
297 error_count: self.error_count as u32,
298 warning_count: self.warning_count as u32,
299 }
300 }
301}
302
303#[derive(Clone, Copy, Debug, Default, Hash, PartialEq, Eq, PartialOrd, Ord)]
304pub struct ProjectEntryId(usize);
305
306impl ProjectEntryId {
307 pub const MAX: Self = Self(usize::MAX);
308
309 pub fn new(counter: &AtomicUsize) -> Self {
310 Self(counter.fetch_add(1, SeqCst))
311 }
312
313 pub fn from_proto(id: u64) -> Self {
314 Self(id as usize)
315 }
316
317 pub fn to_proto(&self) -> u64 {
318 self.0 as u64
319 }
320
321 pub fn to_usize(&self) -> usize {
322 self.0
323 }
324}
325
326impl Project {
327 pub fn init(client: &Arc<Client>) {
328 client.add_model_message_handler(Self::handle_request_join_project);
329 client.add_model_message_handler(Self::handle_add_collaborator);
330 client.add_model_message_handler(Self::handle_buffer_reloaded);
331 client.add_model_message_handler(Self::handle_buffer_saved);
332 client.add_model_message_handler(Self::handle_start_language_server);
333 client.add_model_message_handler(Self::handle_update_language_server);
334 client.add_model_message_handler(Self::handle_remove_collaborator);
335 client.add_model_message_handler(Self::handle_join_project_request_cancelled);
336 client.add_model_message_handler(Self::handle_update_project);
337 client.add_model_message_handler(Self::handle_unregister_project);
338 client.add_model_message_handler(Self::handle_project_unshared);
339 client.add_model_message_handler(Self::handle_update_buffer_file);
340 client.add_model_message_handler(Self::handle_update_buffer);
341 client.add_model_message_handler(Self::handle_update_diagnostic_summary);
342 client.add_model_message_handler(Self::handle_update_worktree);
343 client.add_model_request_handler(Self::handle_create_project_entry);
344 client.add_model_request_handler(Self::handle_rename_project_entry);
345 client.add_model_request_handler(Self::handle_copy_project_entry);
346 client.add_model_request_handler(Self::handle_delete_project_entry);
347 client.add_model_request_handler(Self::handle_apply_additional_edits_for_completion);
348 client.add_model_request_handler(Self::handle_apply_code_action);
349 client.add_model_request_handler(Self::handle_reload_buffers);
350 client.add_model_request_handler(Self::handle_format_buffers);
351 client.add_model_request_handler(Self::handle_get_code_actions);
352 client.add_model_request_handler(Self::handle_get_completions);
353 client.add_model_request_handler(Self::handle_lsp_command::<GetHover>);
354 client.add_model_request_handler(Self::handle_lsp_command::<GetDefinition>);
355 client.add_model_request_handler(Self::handle_lsp_command::<GetDocumentHighlights>);
356 client.add_model_request_handler(Self::handle_lsp_command::<GetReferences>);
357 client.add_model_request_handler(Self::handle_lsp_command::<PrepareRename>);
358 client.add_model_request_handler(Self::handle_lsp_command::<PerformRename>);
359 client.add_model_request_handler(Self::handle_search_project);
360 client.add_model_request_handler(Self::handle_get_project_symbols);
361 client.add_model_request_handler(Self::handle_open_buffer_for_symbol);
362 client.add_model_request_handler(Self::handle_open_buffer_by_id);
363 client.add_model_request_handler(Self::handle_open_buffer_by_path);
364 client.add_model_request_handler(Self::handle_save_buffer);
365 }
366
367 pub fn local(
368 online: bool,
369 client: Arc<Client>,
370 user_store: ModelHandle<UserStore>,
371 project_store: ModelHandle<ProjectStore>,
372 languages: Arc<LanguageRegistry>,
373 fs: Arc<dyn Fs>,
374 cx: &mut MutableAppContext,
375 ) -> ModelHandle<Self> {
376 cx.add_model(|cx: &mut ModelContext<Self>| {
377 let (online_tx, online_rx) = watch::channel_with(online);
378 let (remote_id_tx, remote_id_rx) = watch::channel();
379 let _maintain_remote_id_task = cx.spawn_weak({
380 let status_rx = client.clone().status();
381 let online_rx = online_rx.clone();
382 move |this, mut cx| async move {
383 let mut stream = Stream::map(status_rx.clone(), drop)
384 .merge(Stream::map(online_rx.clone(), drop));
385 while stream.recv().await.is_some() {
386 let this = this.upgrade(&cx)?;
387 if status_rx.borrow().is_connected() && *online_rx.borrow() {
388 this.update(&mut cx, |this, cx| this.register(cx))
389 .await
390 .log_err()?;
391 } else {
392 this.update(&mut cx, |this, cx| this.unregister(cx))
393 .await
394 .log_err();
395 }
396 }
397 None
398 }
399 });
400
401 let handle = cx.weak_handle();
402 project_store.update(cx, |store, cx| store.add_project(handle, cx));
403
404 let (opened_buffer_tx, opened_buffer_rx) = watch::channel();
405 Self {
406 worktrees: Default::default(),
407 collaborators: Default::default(),
408 opened_buffers: Default::default(),
409 shared_buffers: Default::default(),
410 loading_buffers: Default::default(),
411 loading_local_worktrees: Default::default(),
412 buffer_snapshots: Default::default(),
413 client_state: ProjectClientState::Local {
414 is_shared: false,
415 remote_id_tx,
416 remote_id_rx,
417 online_tx,
418 online_rx,
419 _maintain_remote_id_task,
420 },
421 opened_buffer: (Rc::new(RefCell::new(opened_buffer_tx)), opened_buffer_rx),
422 client_subscriptions: Vec::new(),
423 _subscriptions: vec![cx.observe_global::<Settings, _>(Self::on_settings_changed)],
424 active_entry: None,
425 languages,
426 client,
427 user_store,
428 project_store,
429 fs,
430 next_entry_id: Default::default(),
431 next_diagnostic_group_id: Default::default(),
432 language_servers: Default::default(),
433 started_language_servers: Default::default(),
434 language_server_statuses: Default::default(),
435 last_workspace_edits_by_language_server: Default::default(),
436 language_server_settings: Default::default(),
437 next_language_server_id: 0,
438 nonce: StdRng::from_entropy().gen(),
439 initialized_persistent_state: false,
440 }
441 })
442 }
443
444 pub async fn remote(
445 remote_id: u64,
446 client: Arc<Client>,
447 user_store: ModelHandle<UserStore>,
448 project_store: ModelHandle<ProjectStore>,
449 languages: Arc<LanguageRegistry>,
450 fs: Arc<dyn Fs>,
451 mut cx: AsyncAppContext,
452 ) -> Result<ModelHandle<Self>, JoinProjectError> {
453 client.authenticate_and_connect(true, &cx).await?;
454
455 let response = client
456 .request(proto::JoinProject {
457 project_id: remote_id,
458 })
459 .await?;
460
461 let response = match response.variant.ok_or_else(|| anyhow!("missing variant"))? {
462 proto::join_project_response::Variant::Accept(response) => response,
463 proto::join_project_response::Variant::Decline(decline) => {
464 match proto::join_project_response::decline::Reason::from_i32(decline.reason) {
465 Some(proto::join_project_response::decline::Reason::Declined) => {
466 Err(JoinProjectError::HostDeclined)?
467 }
468 Some(proto::join_project_response::decline::Reason::Closed) => {
469 Err(JoinProjectError::HostClosedProject)?
470 }
471 Some(proto::join_project_response::decline::Reason::WentOffline) => {
472 Err(JoinProjectError::HostWentOffline)?
473 }
474 None => Err(anyhow!("missing decline reason"))?,
475 }
476 }
477 };
478
479 let replica_id = response.replica_id as ReplicaId;
480
481 let mut worktrees = Vec::new();
482 for worktree in response.worktrees {
483 let (worktree, load_task) = cx
484 .update(|cx| Worktree::remote(remote_id, replica_id, worktree, client.clone(), cx));
485 worktrees.push(worktree);
486 load_task.detach();
487 }
488
489 let (opened_buffer_tx, opened_buffer_rx) = watch::channel();
490 let this = cx.add_model(|cx: &mut ModelContext<Self>| {
491 let handle = cx.weak_handle();
492 project_store.update(cx, |store, cx| store.add_project(handle, cx));
493
494 let mut this = Self {
495 worktrees: Vec::new(),
496 loading_buffers: Default::default(),
497 opened_buffer: (Rc::new(RefCell::new(opened_buffer_tx)), opened_buffer_rx),
498 shared_buffers: Default::default(),
499 loading_local_worktrees: Default::default(),
500 active_entry: None,
501 collaborators: Default::default(),
502 languages,
503 user_store: user_store.clone(),
504 project_store,
505 fs,
506 next_entry_id: Default::default(),
507 next_diagnostic_group_id: Default::default(),
508 client_subscriptions: vec![client.add_model_for_remote_entity(remote_id, cx)],
509 _subscriptions: Default::default(),
510 client: client.clone(),
511 client_state: ProjectClientState::Remote {
512 sharing_has_stopped: false,
513 remote_id,
514 replica_id,
515 _detect_unshare_task: cx.spawn_weak(move |this, mut cx| {
516 async move {
517 let mut status = client.status();
518 let is_connected =
519 status.next().await.map_or(false, |s| s.is_connected());
520 // Even if we're initially connected, any future change of the status means we momentarily disconnected.
521 if !is_connected || status.next().await.is_some() {
522 if let Some(this) = this.upgrade(&cx) {
523 this.update(&mut cx, |this, cx| this.removed_from_project(cx))
524 }
525 }
526 Ok(())
527 }
528 .log_err()
529 }),
530 },
531 language_servers: Default::default(),
532 started_language_servers: Default::default(),
533 language_server_settings: Default::default(),
534 language_server_statuses: response
535 .language_servers
536 .into_iter()
537 .map(|server| {
538 (
539 server.id as usize,
540 LanguageServerStatus {
541 name: server.name,
542 pending_work: Default::default(),
543 pending_diagnostic_updates: 0,
544 },
545 )
546 })
547 .collect(),
548 last_workspace_edits_by_language_server: Default::default(),
549 next_language_server_id: 0,
550 opened_buffers: Default::default(),
551 buffer_snapshots: Default::default(),
552 nonce: StdRng::from_entropy().gen(),
553 initialized_persistent_state: false,
554 };
555 for worktree in worktrees {
556 this.add_worktree(&worktree, cx);
557 }
558 this
559 });
560
561 let user_ids = response
562 .collaborators
563 .iter()
564 .map(|peer| peer.user_id)
565 .collect();
566 user_store
567 .update(&mut cx, |user_store, cx| user_store.get_users(user_ids, cx))
568 .await?;
569 let mut collaborators = HashMap::default();
570 for message in response.collaborators {
571 let collaborator = Collaborator::from_proto(message, &user_store, &mut cx).await?;
572 collaborators.insert(collaborator.peer_id, collaborator);
573 }
574
575 this.update(&mut cx, |this, _| {
576 this.collaborators = collaborators;
577 });
578
579 Ok(this)
580 }
581
582 #[cfg(any(test, feature = "test-support"))]
583 pub async fn test(
584 fs: Arc<dyn Fs>,
585 root_paths: impl IntoIterator<Item = &Path>,
586 cx: &mut gpui::TestAppContext,
587 ) -> ModelHandle<Project> {
588 if !cx.read(|cx| cx.has_global::<Settings>()) {
589 cx.update(|cx| cx.set_global(Settings::test(cx)));
590 }
591
592 let languages = Arc::new(LanguageRegistry::test());
593 let http_client = client::test::FakeHttpClient::with_404_response();
594 let client = client::Client::new(http_client.clone());
595 let user_store = cx.add_model(|cx| UserStore::new(client.clone(), http_client, cx));
596 let project_store = cx.add_model(|_| ProjectStore::new(Db::open_fake()));
597 let project = cx.update(|cx| {
598 Project::local(true, client, user_store, project_store, languages, fs, cx)
599 });
600 for path in root_paths {
601 let (tree, _) = project
602 .update(cx, |project, cx| {
603 project.find_or_create_local_worktree(path, true, cx)
604 })
605 .await
606 .unwrap();
607 tree.read_with(cx, |tree, _| tree.as_local().unwrap().scan_complete())
608 .await;
609 }
610 project
611 }
612
613 pub fn restore_state(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
614 if self.is_remote() {
615 return Task::ready(Ok(()));
616 }
617
618 let db = self.project_store.read(cx).db.clone();
619 let keys = self.db_keys_for_online_state(cx);
620 let online_by_default = cx.global::<Settings>().projects_online_by_default;
621 let read_online = cx.background().spawn(async move {
622 let values = db.read(keys)?;
623 anyhow::Ok(
624 values
625 .into_iter()
626 .all(|e| e.map_or(online_by_default, |e| e == [true as u8])),
627 )
628 });
629 cx.spawn(|this, mut cx| async move {
630 let online = read_online.await.log_err().unwrap_or(false);
631 this.update(&mut cx, |this, cx| {
632 this.initialized_persistent_state = true;
633 if let ProjectClientState::Local { online_tx, .. } = &mut this.client_state {
634 let mut online_tx = online_tx.borrow_mut();
635 if *online_tx != online {
636 *online_tx = online;
637 drop(online_tx);
638 this.metadata_changed(false, cx);
639 }
640 }
641 });
642 Ok(())
643 })
644 }
645
646 fn persist_state(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
647 if self.is_remote() || !self.initialized_persistent_state {
648 return Task::ready(Ok(()));
649 }
650
651 let db = self.project_store.read(cx).db.clone();
652 let keys = self.db_keys_for_online_state(cx);
653 let is_online = self.is_online();
654 cx.background().spawn(async move {
655 let value = &[is_online as u8];
656 db.write(keys.into_iter().map(|key| (key, value)))
657 })
658 }
659
660 fn on_settings_changed(&mut self, cx: &mut ModelContext<Self>) {
661 let settings = cx.global::<Settings>();
662
663 let mut language_servers_to_start = Vec::new();
664 for buffer in self.opened_buffers.values() {
665 if let Some(buffer) = buffer.upgrade(cx) {
666 let buffer = buffer.read(cx);
667 if let Some((file, language)) = File::from_dyn(buffer.file()).zip(buffer.language())
668 {
669 if settings.enable_language_server(Some(&language.name())) {
670 let worktree = file.worktree.read(cx);
671 language_servers_to_start.push((
672 worktree.id(),
673 worktree.as_local().unwrap().abs_path().clone(),
674 language.clone(),
675 ));
676 }
677 }
678 }
679 }
680
681 let mut language_servers_to_stop = Vec::new();
682 for language in self.languages.to_vec() {
683 if let Some(lsp_adapter) = language.lsp_adapter() {
684 if !settings.enable_language_server(Some(&language.name())) {
685 let lsp_name = lsp_adapter.name();
686 for (worktree_id, started_lsp_name) in self.started_language_servers.keys() {
687 if lsp_name == *started_lsp_name {
688 language_servers_to_stop.push((*worktree_id, started_lsp_name.clone()));
689 }
690 }
691 }
692 }
693 }
694
695 // Stop all newly-disabled language servers.
696 for (worktree_id, adapter_name) in language_servers_to_stop {
697 self.stop_language_server(worktree_id, adapter_name, cx)
698 .detach();
699 }
700
701 // Start all the newly-enabled language servers.
702 for (worktree_id, worktree_path, language) in language_servers_to_start {
703 self.start_language_server(worktree_id, worktree_path, language, cx);
704 }
705
706 cx.notify();
707 }
708
709 pub fn buffer_for_id(&self, remote_id: u64, cx: &AppContext) -> Option<ModelHandle<Buffer>> {
710 self.opened_buffers
711 .get(&remote_id)
712 .and_then(|buffer| buffer.upgrade(cx))
713 }
714
715 pub fn languages(&self) -> &Arc<LanguageRegistry> {
716 &self.languages
717 }
718
719 pub fn client(&self) -> Arc<Client> {
720 self.client.clone()
721 }
722
723 pub fn user_store(&self) -> ModelHandle<UserStore> {
724 self.user_store.clone()
725 }
726
727 pub fn project_store(&self) -> ModelHandle<ProjectStore> {
728 self.project_store.clone()
729 }
730
731 #[cfg(any(test, feature = "test-support"))]
732 pub fn check_invariants(&self, cx: &AppContext) {
733 if self.is_local() {
734 let mut worktree_root_paths = HashMap::default();
735 for worktree in self.worktrees(cx) {
736 let worktree = worktree.read(cx);
737 let abs_path = worktree.as_local().unwrap().abs_path().clone();
738 let prev_worktree_id = worktree_root_paths.insert(abs_path.clone(), worktree.id());
739 assert_eq!(
740 prev_worktree_id,
741 None,
742 "abs path {:?} for worktree {:?} is not unique ({:?} was already registered with the same path)",
743 abs_path,
744 worktree.id(),
745 prev_worktree_id
746 )
747 }
748 } else {
749 let replica_id = self.replica_id();
750 for buffer in self.opened_buffers.values() {
751 if let Some(buffer) = buffer.upgrade(cx) {
752 let buffer = buffer.read(cx);
753 assert_eq!(
754 buffer.deferred_ops_len(),
755 0,
756 "replica {}, buffer {} has deferred operations",
757 replica_id,
758 buffer.remote_id()
759 );
760 }
761 }
762 }
763 }
764
765 #[cfg(any(test, feature = "test-support"))]
766 pub fn has_open_buffer(&self, path: impl Into<ProjectPath>, cx: &AppContext) -> bool {
767 let path = path.into();
768 if let Some(worktree) = self.worktree_for_id(path.worktree_id, cx) {
769 self.opened_buffers.iter().any(|(_, buffer)| {
770 if let Some(buffer) = buffer.upgrade(cx) {
771 if let Some(file) = File::from_dyn(buffer.read(cx).file()) {
772 if file.worktree == worktree && file.path() == &path.path {
773 return true;
774 }
775 }
776 }
777 false
778 })
779 } else {
780 false
781 }
782 }
783
784 pub fn fs(&self) -> &Arc<dyn Fs> {
785 &self.fs
786 }
787
788 pub fn set_online(&mut self, online: bool, cx: &mut ModelContext<Self>) {
789 if let ProjectClientState::Local { online_tx, .. } = &mut self.client_state {
790 let mut online_tx = online_tx.borrow_mut();
791 if *online_tx != online {
792 *online_tx = online;
793 drop(online_tx);
794 self.metadata_changed(true, cx);
795 }
796 }
797 }
798
799 pub fn is_online(&self) -> bool {
800 match &self.client_state {
801 ProjectClientState::Local { online_rx, .. } => *online_rx.borrow(),
802 ProjectClientState::Remote { .. } => true,
803 }
804 }
805
806 fn unregister(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
807 self.unshared(cx);
808 if let ProjectClientState::Local { remote_id_rx, .. } = &mut self.client_state {
809 if let Some(remote_id) = *remote_id_rx.borrow() {
810 let request = self.client.request(proto::UnregisterProject {
811 project_id: remote_id,
812 });
813 return cx.spawn(|this, mut cx| async move {
814 let response = request.await;
815
816 // Unregistering the project causes the server to send out a
817 // contact update removing this project from the host's list
818 // of online projects. Wait until this contact update has been
819 // processed before clearing out this project's remote id, so
820 // that there is no moment where this project appears in the
821 // contact metadata and *also* has no remote id.
822 this.update(&mut cx, |this, cx| {
823 this.user_store()
824 .update(cx, |store, _| store.contact_updates_done())
825 })
826 .await;
827
828 this.update(&mut cx, |this, cx| {
829 if let ProjectClientState::Local { remote_id_tx, .. } =
830 &mut this.client_state
831 {
832 *remote_id_tx.borrow_mut() = None;
833 }
834 this.client_subscriptions.clear();
835 this.metadata_changed(false, cx);
836 });
837 response.map(drop)
838 });
839 }
840 }
841 Task::ready(Ok(()))
842 }
843
844 fn register(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
845 if let ProjectClientState::Local { remote_id_rx, .. } = &self.client_state {
846 if remote_id_rx.borrow().is_some() {
847 return Task::ready(Ok(()));
848 }
849 }
850
851 let response = self.client.request(proto::RegisterProject {});
852 cx.spawn(|this, mut cx| async move {
853 let remote_id = response.await?.project_id;
854 this.update(&mut cx, |this, cx| {
855 if let ProjectClientState::Local { remote_id_tx, .. } = &mut this.client_state {
856 *remote_id_tx.borrow_mut() = Some(remote_id);
857 }
858
859 this.metadata_changed(false, cx);
860 cx.emit(Event::RemoteIdChanged(Some(remote_id)));
861 this.client_subscriptions
862 .push(this.client.add_model_for_remote_entity(remote_id, cx));
863 Ok(())
864 })
865 })
866 }
867
868 pub fn remote_id(&self) -> Option<u64> {
869 match &self.client_state {
870 ProjectClientState::Local { remote_id_rx, .. } => *remote_id_rx.borrow(),
871 ProjectClientState::Remote { remote_id, .. } => Some(*remote_id),
872 }
873 }
874
875 pub fn next_remote_id(&self) -> impl Future<Output = u64> {
876 let mut id = None;
877 let mut watch = None;
878 match &self.client_state {
879 ProjectClientState::Local { remote_id_rx, .. } => watch = Some(remote_id_rx.clone()),
880 ProjectClientState::Remote { remote_id, .. } => id = Some(*remote_id),
881 }
882
883 async move {
884 if let Some(id) = id {
885 return id;
886 }
887 let mut watch = watch.unwrap();
888 loop {
889 let id = *watch.borrow();
890 if let Some(id) = id {
891 return id;
892 }
893 watch.next().await;
894 }
895 }
896 }
897
898 pub fn shared_remote_id(&self) -> Option<u64> {
899 match &self.client_state {
900 ProjectClientState::Local {
901 remote_id_rx,
902 is_shared,
903 ..
904 } => {
905 if *is_shared {
906 *remote_id_rx.borrow()
907 } else {
908 None
909 }
910 }
911 ProjectClientState::Remote { remote_id, .. } => Some(*remote_id),
912 }
913 }
914
915 pub fn replica_id(&self) -> ReplicaId {
916 match &self.client_state {
917 ProjectClientState::Local { .. } => 0,
918 ProjectClientState::Remote { replica_id, .. } => *replica_id,
919 }
920 }
921
922 fn metadata_changed(&mut self, persist: bool, cx: &mut ModelContext<Self>) {
923 if let ProjectClientState::Local {
924 remote_id_rx,
925 online_rx,
926 ..
927 } = &self.client_state
928 {
929 if let (Some(project_id), true) = (*remote_id_rx.borrow(), *online_rx.borrow()) {
930 self.client
931 .send(proto::UpdateProject {
932 project_id,
933 worktrees: self
934 .worktrees
935 .iter()
936 .filter_map(|worktree| {
937 worktree.upgrade(&cx).map(|worktree| {
938 worktree.read(cx).as_local().unwrap().metadata_proto()
939 })
940 })
941 .collect(),
942 })
943 .log_err();
944 }
945
946 self.project_store.update(cx, |_, cx| cx.notify());
947 if persist {
948 self.persist_state(cx).detach_and_log_err(cx);
949 }
950 cx.notify();
951 }
952 }
953
954 pub fn collaborators(&self) -> &HashMap<PeerId, Collaborator> {
955 &self.collaborators
956 }
957
958 pub fn worktrees<'a>(
959 &'a self,
960 cx: &'a AppContext,
961 ) -> impl 'a + Iterator<Item = ModelHandle<Worktree>> {
962 self.worktrees
963 .iter()
964 .filter_map(move |worktree| worktree.upgrade(cx))
965 }
966
967 pub fn visible_worktrees<'a>(
968 &'a self,
969 cx: &'a AppContext,
970 ) -> impl 'a + Iterator<Item = ModelHandle<Worktree>> {
971 self.worktrees.iter().filter_map(|worktree| {
972 worktree.upgrade(cx).and_then(|worktree| {
973 if worktree.read(cx).is_visible() {
974 Some(worktree)
975 } else {
976 None
977 }
978 })
979 })
980 }
981
982 pub fn worktree_root_names<'a>(&'a self, cx: &'a AppContext) -> impl Iterator<Item = &'a str> {
983 self.visible_worktrees(cx)
984 .map(|tree| tree.read(cx).root_name())
985 }
986
987 fn db_keys_for_online_state(&self, cx: &AppContext) -> Vec<String> {
988 self.worktrees
989 .iter()
990 .filter_map(|worktree| {
991 let worktree = worktree.upgrade(&cx)?.read(cx);
992 if worktree.is_visible() {
993 Some(format!(
994 "project-path-online:{}",
995 worktree.as_local().unwrap().abs_path().to_string_lossy()
996 ))
997 } else {
998 None
999 }
1000 })
1001 .collect::<Vec<_>>()
1002 }
1003
1004 pub fn worktree_for_id(
1005 &self,
1006 id: WorktreeId,
1007 cx: &AppContext,
1008 ) -> Option<ModelHandle<Worktree>> {
1009 self.worktrees(cx)
1010 .find(|worktree| worktree.read(cx).id() == id)
1011 }
1012
1013 pub fn worktree_for_entry(
1014 &self,
1015 entry_id: ProjectEntryId,
1016 cx: &AppContext,
1017 ) -> Option<ModelHandle<Worktree>> {
1018 self.worktrees(cx)
1019 .find(|worktree| worktree.read(cx).contains_entry(entry_id))
1020 }
1021
1022 pub fn worktree_id_for_entry(
1023 &self,
1024 entry_id: ProjectEntryId,
1025 cx: &AppContext,
1026 ) -> Option<WorktreeId> {
1027 self.worktree_for_entry(entry_id, cx)
1028 .map(|worktree| worktree.read(cx).id())
1029 }
1030
1031 pub fn contains_paths(&self, paths: &[PathBuf], cx: &AppContext) -> bool {
1032 paths.iter().all(|path| self.contains_path(&path, cx))
1033 }
1034
1035 pub fn contains_path(&self, path: &Path, cx: &AppContext) -> bool {
1036 for worktree in self.worktrees(cx) {
1037 let worktree = worktree.read(cx).as_local();
1038 if worktree.map_or(false, |w| w.contains_abs_path(path)) {
1039 return true;
1040 }
1041 }
1042 false
1043 }
1044
1045 pub fn create_entry(
1046 &mut self,
1047 project_path: impl Into<ProjectPath>,
1048 is_directory: bool,
1049 cx: &mut ModelContext<Self>,
1050 ) -> Option<Task<Result<Entry>>> {
1051 let project_path = project_path.into();
1052 let worktree = self.worktree_for_id(project_path.worktree_id, cx)?;
1053 if self.is_local() {
1054 Some(worktree.update(cx, |worktree, cx| {
1055 worktree
1056 .as_local_mut()
1057 .unwrap()
1058 .create_entry(project_path.path, is_directory, cx)
1059 }))
1060 } else {
1061 let client = self.client.clone();
1062 let project_id = self.remote_id().unwrap();
1063 Some(cx.spawn_weak(|_, mut cx| async move {
1064 let response = client
1065 .request(proto::CreateProjectEntry {
1066 worktree_id: project_path.worktree_id.to_proto(),
1067 project_id,
1068 path: project_path.path.as_os_str().as_bytes().to_vec(),
1069 is_directory,
1070 })
1071 .await?;
1072 let entry = response
1073 .entry
1074 .ok_or_else(|| anyhow!("missing entry in response"))?;
1075 worktree
1076 .update(&mut cx, |worktree, cx| {
1077 worktree.as_remote().unwrap().insert_entry(
1078 entry,
1079 response.worktree_scan_id as usize,
1080 cx,
1081 )
1082 })
1083 .await
1084 }))
1085 }
1086 }
1087
1088 pub fn copy_entry(
1089 &mut self,
1090 entry_id: ProjectEntryId,
1091 new_path: impl Into<Arc<Path>>,
1092 cx: &mut ModelContext<Self>,
1093 ) -> Option<Task<Result<Entry>>> {
1094 let worktree = self.worktree_for_entry(entry_id, cx)?;
1095 let new_path = new_path.into();
1096 if self.is_local() {
1097 worktree.update(cx, |worktree, cx| {
1098 worktree
1099 .as_local_mut()
1100 .unwrap()
1101 .copy_entry(entry_id, new_path, cx)
1102 })
1103 } else {
1104 let client = self.client.clone();
1105 let project_id = self.remote_id().unwrap();
1106
1107 Some(cx.spawn_weak(|_, mut cx| async move {
1108 let response = client
1109 .request(proto::CopyProjectEntry {
1110 project_id,
1111 entry_id: entry_id.to_proto(),
1112 new_path: new_path.as_os_str().as_bytes().to_vec(),
1113 })
1114 .await?;
1115 let entry = response
1116 .entry
1117 .ok_or_else(|| anyhow!("missing entry in response"))?;
1118 worktree
1119 .update(&mut cx, |worktree, cx| {
1120 worktree.as_remote().unwrap().insert_entry(
1121 entry,
1122 response.worktree_scan_id as usize,
1123 cx,
1124 )
1125 })
1126 .await
1127 }))
1128 }
1129 }
1130
1131 pub fn rename_entry(
1132 &mut self,
1133 entry_id: ProjectEntryId,
1134 new_path: impl Into<Arc<Path>>,
1135 cx: &mut ModelContext<Self>,
1136 ) -> Option<Task<Result<Entry>>> {
1137 let worktree = self.worktree_for_entry(entry_id, cx)?;
1138 let new_path = new_path.into();
1139 if self.is_local() {
1140 worktree.update(cx, |worktree, cx| {
1141 worktree
1142 .as_local_mut()
1143 .unwrap()
1144 .rename_entry(entry_id, new_path, cx)
1145 })
1146 } else {
1147 let client = self.client.clone();
1148 let project_id = self.remote_id().unwrap();
1149
1150 Some(cx.spawn_weak(|_, mut cx| async move {
1151 let response = client
1152 .request(proto::RenameProjectEntry {
1153 project_id,
1154 entry_id: entry_id.to_proto(),
1155 new_path: new_path.as_os_str().as_bytes().to_vec(),
1156 })
1157 .await?;
1158 let entry = response
1159 .entry
1160 .ok_or_else(|| anyhow!("missing entry in response"))?;
1161 worktree
1162 .update(&mut cx, |worktree, cx| {
1163 worktree.as_remote().unwrap().insert_entry(
1164 entry,
1165 response.worktree_scan_id as usize,
1166 cx,
1167 )
1168 })
1169 .await
1170 }))
1171 }
1172 }
1173
1174 pub fn delete_entry(
1175 &mut self,
1176 entry_id: ProjectEntryId,
1177 cx: &mut ModelContext<Self>,
1178 ) -> Option<Task<Result<()>>> {
1179 let worktree = self.worktree_for_entry(entry_id, cx)?;
1180 if self.is_local() {
1181 worktree.update(cx, |worktree, cx| {
1182 worktree.as_local_mut().unwrap().delete_entry(entry_id, cx)
1183 })
1184 } else {
1185 let client = self.client.clone();
1186 let project_id = self.remote_id().unwrap();
1187 Some(cx.spawn_weak(|_, mut cx| async move {
1188 let response = client
1189 .request(proto::DeleteProjectEntry {
1190 project_id,
1191 entry_id: entry_id.to_proto(),
1192 })
1193 .await?;
1194 worktree
1195 .update(&mut cx, move |worktree, cx| {
1196 worktree.as_remote().unwrap().delete_entry(
1197 entry_id,
1198 response.worktree_scan_id as usize,
1199 cx,
1200 )
1201 })
1202 .await
1203 }))
1204 }
1205 }
1206
1207 fn share(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
1208 let project_id;
1209 if let ProjectClientState::Local {
1210 remote_id_rx,
1211 is_shared,
1212 ..
1213 } = &mut self.client_state
1214 {
1215 if *is_shared {
1216 return Task::ready(Ok(()));
1217 }
1218 *is_shared = true;
1219 if let Some(id) = *remote_id_rx.borrow() {
1220 project_id = id;
1221 } else {
1222 return Task::ready(Err(anyhow!("project hasn't been registered")));
1223 }
1224 } else {
1225 return Task::ready(Err(anyhow!("can't share a remote project")));
1226 };
1227
1228 for open_buffer in self.opened_buffers.values_mut() {
1229 match open_buffer {
1230 OpenBuffer::Strong(_) => {}
1231 OpenBuffer::Weak(buffer) => {
1232 if let Some(buffer) = buffer.upgrade(cx) {
1233 *open_buffer = OpenBuffer::Strong(buffer);
1234 }
1235 }
1236 OpenBuffer::Loading(_) => unreachable!(),
1237 }
1238 }
1239
1240 for worktree_handle in self.worktrees.iter_mut() {
1241 match worktree_handle {
1242 WorktreeHandle::Strong(_) => {}
1243 WorktreeHandle::Weak(worktree) => {
1244 if let Some(worktree) = worktree.upgrade(cx) {
1245 *worktree_handle = WorktreeHandle::Strong(worktree);
1246 }
1247 }
1248 }
1249 }
1250
1251 let mut tasks = Vec::new();
1252 for worktree in self.worktrees(cx).collect::<Vec<_>>() {
1253 worktree.update(cx, |worktree, cx| {
1254 let worktree = worktree.as_local_mut().unwrap();
1255 tasks.push(worktree.share(project_id, cx));
1256 });
1257 }
1258
1259 for (server_id, status) in &self.language_server_statuses {
1260 self.client
1261 .send(proto::StartLanguageServer {
1262 project_id,
1263 server: Some(proto::LanguageServer {
1264 id: *server_id as u64,
1265 name: status.name.clone(),
1266 }),
1267 })
1268 .log_err();
1269 }
1270
1271 cx.spawn(|this, mut cx| async move {
1272 for task in tasks {
1273 task.await?;
1274 }
1275 this.update(&mut cx, |_, cx| cx.notify());
1276 Ok(())
1277 })
1278 }
1279
1280 fn unshared(&mut self, cx: &mut ModelContext<Self>) {
1281 if let ProjectClientState::Local { is_shared, .. } = &mut self.client_state {
1282 if !*is_shared {
1283 return;
1284 }
1285
1286 *is_shared = false;
1287 self.collaborators.clear();
1288 self.shared_buffers.clear();
1289 for worktree_handle in self.worktrees.iter_mut() {
1290 if let WorktreeHandle::Strong(worktree) = worktree_handle {
1291 let is_visible = worktree.update(cx, |worktree, _| {
1292 worktree.as_local_mut().unwrap().unshare();
1293 worktree.is_visible()
1294 });
1295 if !is_visible {
1296 *worktree_handle = WorktreeHandle::Weak(worktree.downgrade());
1297 }
1298 }
1299 }
1300
1301 for open_buffer in self.opened_buffers.values_mut() {
1302 match open_buffer {
1303 OpenBuffer::Strong(buffer) => {
1304 *open_buffer = OpenBuffer::Weak(buffer.downgrade());
1305 }
1306 _ => {}
1307 }
1308 }
1309
1310 cx.notify();
1311 } else {
1312 log::error!("attempted to unshare a remote project");
1313 }
1314 }
1315
1316 pub fn respond_to_join_request(
1317 &mut self,
1318 requester_id: u64,
1319 allow: bool,
1320 cx: &mut ModelContext<Self>,
1321 ) {
1322 if let Some(project_id) = self.remote_id() {
1323 let share = self.share(cx);
1324 let client = self.client.clone();
1325 cx.foreground()
1326 .spawn(async move {
1327 share.await?;
1328 client.send(proto::RespondToJoinProjectRequest {
1329 requester_id,
1330 project_id,
1331 allow,
1332 })
1333 })
1334 .detach_and_log_err(cx);
1335 }
1336 }
1337
1338 fn removed_from_project(&mut self, cx: &mut ModelContext<Self>) {
1339 if let ProjectClientState::Remote {
1340 sharing_has_stopped,
1341 ..
1342 } = &mut self.client_state
1343 {
1344 *sharing_has_stopped = true;
1345 self.collaborators.clear();
1346 for worktree in &self.worktrees {
1347 if let Some(worktree) = worktree.upgrade(cx) {
1348 worktree.update(cx, |worktree, _| {
1349 if let Some(worktree) = worktree.as_remote_mut() {
1350 worktree.disconnected_from_host();
1351 }
1352 });
1353 }
1354 }
1355 cx.notify();
1356 }
1357 }
1358
1359 pub fn is_read_only(&self) -> bool {
1360 match &self.client_state {
1361 ProjectClientState::Local { .. } => false,
1362 ProjectClientState::Remote {
1363 sharing_has_stopped,
1364 ..
1365 } => *sharing_has_stopped,
1366 }
1367 }
1368
1369 pub fn is_local(&self) -> bool {
1370 match &self.client_state {
1371 ProjectClientState::Local { .. } => true,
1372 ProjectClientState::Remote { .. } => false,
1373 }
1374 }
1375
1376 pub fn is_remote(&self) -> bool {
1377 !self.is_local()
1378 }
1379
1380 pub fn create_buffer(
1381 &mut self,
1382 text: &str,
1383 language: Option<Arc<Language>>,
1384 cx: &mut ModelContext<Self>,
1385 ) -> Result<ModelHandle<Buffer>> {
1386 if self.is_remote() {
1387 return Err(anyhow!("creating buffers as a guest is not supported yet"));
1388 }
1389
1390 let buffer = cx.add_model(|cx| {
1391 Buffer::new(self.replica_id(), text, cx)
1392 .with_language(language.unwrap_or(language::PLAIN_TEXT.clone()), cx)
1393 });
1394 self.register_buffer(&buffer, cx)?;
1395 Ok(buffer)
1396 }
1397
1398 pub fn open_path(
1399 &mut self,
1400 path: impl Into<ProjectPath>,
1401 cx: &mut ModelContext<Self>,
1402 ) -> Task<Result<(ProjectEntryId, AnyModelHandle)>> {
1403 let task = self.open_buffer(path, cx);
1404 cx.spawn_weak(|_, cx| async move {
1405 let buffer = task.await?;
1406 let project_entry_id = buffer
1407 .read_with(&cx, |buffer, cx| {
1408 File::from_dyn(buffer.file()).and_then(|file| file.project_entry_id(cx))
1409 })
1410 .ok_or_else(|| anyhow!("no project entry"))?;
1411 Ok((project_entry_id, buffer.into()))
1412 })
1413 }
1414
1415 pub fn open_local_buffer(
1416 &mut self,
1417 abs_path: impl AsRef<Path>,
1418 cx: &mut ModelContext<Self>,
1419 ) -> Task<Result<ModelHandle<Buffer>>> {
1420 if let Some((worktree, relative_path)) = self.find_local_worktree(abs_path.as_ref(), cx) {
1421 self.open_buffer((worktree.read(cx).id(), relative_path), cx)
1422 } else {
1423 Task::ready(Err(anyhow!("no such path")))
1424 }
1425 }
1426
1427 pub fn open_buffer(
1428 &mut self,
1429 path: impl Into<ProjectPath>,
1430 cx: &mut ModelContext<Self>,
1431 ) -> Task<Result<ModelHandle<Buffer>>> {
1432 let project_path = path.into();
1433 let worktree = if let Some(worktree) = self.worktree_for_id(project_path.worktree_id, cx) {
1434 worktree
1435 } else {
1436 return Task::ready(Err(anyhow!("no such worktree")));
1437 };
1438
1439 // If there is already a buffer for the given path, then return it.
1440 let existing_buffer = self.get_open_buffer(&project_path, cx);
1441 if let Some(existing_buffer) = existing_buffer {
1442 return Task::ready(Ok(existing_buffer));
1443 }
1444
1445 let mut loading_watch = match self.loading_buffers.entry(project_path.clone()) {
1446 // If the given path is already being loaded, then wait for that existing
1447 // task to complete and return the same buffer.
1448 hash_map::Entry::Occupied(e) => e.get().clone(),
1449
1450 // Otherwise, record the fact that this path is now being loaded.
1451 hash_map::Entry::Vacant(entry) => {
1452 let (mut tx, rx) = postage::watch::channel();
1453 entry.insert(rx.clone());
1454
1455 let load_buffer = if worktree.read(cx).is_local() {
1456 self.open_local_buffer_internal(&project_path.path, &worktree, cx)
1457 } else {
1458 self.open_remote_buffer_internal(&project_path.path, &worktree, cx)
1459 };
1460
1461 cx.spawn(move |this, mut cx| async move {
1462 let load_result = load_buffer.await;
1463 *tx.borrow_mut() = Some(this.update(&mut cx, |this, _| {
1464 // Record the fact that the buffer is no longer loading.
1465 this.loading_buffers.remove(&project_path);
1466 let buffer = load_result.map_err(Arc::new)?;
1467 Ok(buffer)
1468 }));
1469 })
1470 .detach();
1471 rx
1472 }
1473 };
1474
1475 cx.foreground().spawn(async move {
1476 loop {
1477 if let Some(result) = loading_watch.borrow().as_ref() {
1478 match result {
1479 Ok(buffer) => return Ok(buffer.clone()),
1480 Err(error) => return Err(anyhow!("{}", error)),
1481 }
1482 }
1483 loading_watch.next().await;
1484 }
1485 })
1486 }
1487
1488 fn open_local_buffer_internal(
1489 &mut self,
1490 path: &Arc<Path>,
1491 worktree: &ModelHandle<Worktree>,
1492 cx: &mut ModelContext<Self>,
1493 ) -> Task<Result<ModelHandle<Buffer>>> {
1494 let load_buffer = worktree.update(cx, |worktree, cx| {
1495 let worktree = worktree.as_local_mut().unwrap();
1496 worktree.load_buffer(path, cx)
1497 });
1498 cx.spawn(|this, mut cx| async move {
1499 let buffer = load_buffer.await?;
1500 this.update(&mut cx, |this, cx| this.register_buffer(&buffer, cx))?;
1501 Ok(buffer)
1502 })
1503 }
1504
1505 fn open_remote_buffer_internal(
1506 &mut self,
1507 path: &Arc<Path>,
1508 worktree: &ModelHandle<Worktree>,
1509 cx: &mut ModelContext<Self>,
1510 ) -> Task<Result<ModelHandle<Buffer>>> {
1511 let rpc = self.client.clone();
1512 let project_id = self.remote_id().unwrap();
1513 let remote_worktree_id = worktree.read(cx).id();
1514 let path = path.clone();
1515 let path_string = path.to_string_lossy().to_string();
1516 cx.spawn(|this, mut cx| async move {
1517 let response = rpc
1518 .request(proto::OpenBufferByPath {
1519 project_id,
1520 worktree_id: remote_worktree_id.to_proto(),
1521 path: path_string,
1522 })
1523 .await?;
1524 let buffer = response.buffer.ok_or_else(|| anyhow!("missing buffer"))?;
1525 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
1526 .await
1527 })
1528 }
1529
1530 fn open_local_buffer_via_lsp(
1531 &mut self,
1532 abs_path: lsp::Url,
1533 lsp_adapter: Arc<dyn LspAdapter>,
1534 lsp_server: Arc<LanguageServer>,
1535 cx: &mut ModelContext<Self>,
1536 ) -> Task<Result<ModelHandle<Buffer>>> {
1537 cx.spawn(|this, mut cx| async move {
1538 let abs_path = abs_path
1539 .to_file_path()
1540 .map_err(|_| anyhow!("can't convert URI to path"))?;
1541 let (worktree, relative_path) = if let Some(result) =
1542 this.read_with(&cx, |this, cx| this.find_local_worktree(&abs_path, cx))
1543 {
1544 result
1545 } else {
1546 let worktree = this
1547 .update(&mut cx, |this, cx| {
1548 this.create_local_worktree(&abs_path, false, cx)
1549 })
1550 .await?;
1551 this.update(&mut cx, |this, cx| {
1552 this.language_servers.insert(
1553 (worktree.read(cx).id(), lsp_adapter.name()),
1554 (lsp_adapter, lsp_server),
1555 );
1556 });
1557 (worktree, PathBuf::new())
1558 };
1559
1560 let project_path = ProjectPath {
1561 worktree_id: worktree.read_with(&cx, |worktree, _| worktree.id()),
1562 path: relative_path.into(),
1563 };
1564 this.update(&mut cx, |this, cx| this.open_buffer(project_path, cx))
1565 .await
1566 })
1567 }
1568
1569 pub fn open_buffer_by_id(
1570 &mut self,
1571 id: u64,
1572 cx: &mut ModelContext<Self>,
1573 ) -> Task<Result<ModelHandle<Buffer>>> {
1574 if let Some(buffer) = self.buffer_for_id(id, cx) {
1575 Task::ready(Ok(buffer))
1576 } else if self.is_local() {
1577 Task::ready(Err(anyhow!("buffer {} does not exist", id)))
1578 } else if let Some(project_id) = self.remote_id() {
1579 let request = self
1580 .client
1581 .request(proto::OpenBufferById { project_id, id });
1582 cx.spawn(|this, mut cx| async move {
1583 let buffer = request
1584 .await?
1585 .buffer
1586 .ok_or_else(|| anyhow!("invalid buffer"))?;
1587 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
1588 .await
1589 })
1590 } else {
1591 Task::ready(Err(anyhow!("cannot open buffer while disconnected")))
1592 }
1593 }
1594
1595 pub fn save_buffer_as(
1596 &mut self,
1597 buffer: ModelHandle<Buffer>,
1598 abs_path: PathBuf,
1599 cx: &mut ModelContext<Project>,
1600 ) -> Task<Result<()>> {
1601 let worktree_task = self.find_or_create_local_worktree(&abs_path, true, cx);
1602 let old_path =
1603 File::from_dyn(buffer.read(cx).file()).and_then(|f| Some(f.as_local()?.abs_path(cx)));
1604 cx.spawn(|this, mut cx| async move {
1605 if let Some(old_path) = old_path {
1606 this.update(&mut cx, |this, cx| {
1607 this.unregister_buffer_from_language_server(&buffer, old_path, cx);
1608 });
1609 }
1610 let (worktree, path) = worktree_task.await?;
1611 worktree
1612 .update(&mut cx, |worktree, cx| {
1613 worktree
1614 .as_local_mut()
1615 .unwrap()
1616 .save_buffer_as(buffer.clone(), path, cx)
1617 })
1618 .await?;
1619 this.update(&mut cx, |this, cx| {
1620 this.assign_language_to_buffer(&buffer, cx);
1621 this.register_buffer_with_language_server(&buffer, cx);
1622 });
1623 Ok(())
1624 })
1625 }
1626
1627 pub fn get_open_buffer(
1628 &mut self,
1629 path: &ProjectPath,
1630 cx: &mut ModelContext<Self>,
1631 ) -> Option<ModelHandle<Buffer>> {
1632 let worktree = self.worktree_for_id(path.worktree_id, cx)?;
1633 self.opened_buffers.values().find_map(|buffer| {
1634 let buffer = buffer.upgrade(cx)?;
1635 let file = File::from_dyn(buffer.read(cx).file())?;
1636 if file.worktree == worktree && file.path() == &path.path {
1637 Some(buffer)
1638 } else {
1639 None
1640 }
1641 })
1642 }
1643
1644 fn register_buffer(
1645 &mut self,
1646 buffer: &ModelHandle<Buffer>,
1647 cx: &mut ModelContext<Self>,
1648 ) -> Result<()> {
1649 let remote_id = buffer.read(cx).remote_id();
1650 let open_buffer = if self.is_remote() || self.is_shared() {
1651 OpenBuffer::Strong(buffer.clone())
1652 } else {
1653 OpenBuffer::Weak(buffer.downgrade())
1654 };
1655
1656 match self.opened_buffers.insert(remote_id, open_buffer) {
1657 None => {}
1658 Some(OpenBuffer::Loading(operations)) => {
1659 buffer.update(cx, |buffer, cx| buffer.apply_ops(operations, cx))?
1660 }
1661 Some(OpenBuffer::Weak(existing_handle)) => {
1662 if existing_handle.upgrade(cx).is_some() {
1663 Err(anyhow!(
1664 "already registered buffer with remote id {}",
1665 remote_id
1666 ))?
1667 }
1668 }
1669 Some(OpenBuffer::Strong(_)) => Err(anyhow!(
1670 "already registered buffer with remote id {}",
1671 remote_id
1672 ))?,
1673 }
1674 cx.subscribe(buffer, |this, buffer, event, cx| {
1675 this.on_buffer_event(buffer, event, cx);
1676 })
1677 .detach();
1678
1679 self.assign_language_to_buffer(buffer, cx);
1680 self.register_buffer_with_language_server(buffer, cx);
1681 cx.observe_release(buffer, |this, buffer, cx| {
1682 if let Some(file) = File::from_dyn(buffer.file()) {
1683 if file.is_local() {
1684 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
1685 if let Some((_, server)) = this.language_server_for_buffer(buffer, cx) {
1686 server
1687 .notify::<lsp::notification::DidCloseTextDocument>(
1688 lsp::DidCloseTextDocumentParams {
1689 text_document: lsp::TextDocumentIdentifier::new(uri.clone()),
1690 },
1691 )
1692 .log_err();
1693 }
1694 }
1695 }
1696 })
1697 .detach();
1698
1699 Ok(())
1700 }
1701
1702 fn register_buffer_with_language_server(
1703 &mut self,
1704 buffer_handle: &ModelHandle<Buffer>,
1705 cx: &mut ModelContext<Self>,
1706 ) {
1707 let buffer = buffer_handle.read(cx);
1708 let buffer_id = buffer.remote_id();
1709 if let Some(file) = File::from_dyn(buffer.file()) {
1710 if file.is_local() {
1711 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
1712 let initial_snapshot = buffer.text_snapshot();
1713
1714 let mut language_server = None;
1715 let mut language_id = None;
1716 if let Some(language) = buffer.language() {
1717 let worktree_id = file.worktree_id(cx);
1718 if let Some(adapter) = language.lsp_adapter() {
1719 language_id = adapter.id_for_language(language.name().as_ref());
1720 language_server = self
1721 .language_servers
1722 .get(&(worktree_id, adapter.name()))
1723 .cloned();
1724 }
1725 }
1726
1727 if let Some(local_worktree) = file.worktree.read(cx).as_local() {
1728 if let Some(diagnostics) = local_worktree.diagnostics_for_path(file.path()) {
1729 self.update_buffer_diagnostics(&buffer_handle, diagnostics, None, cx)
1730 .log_err();
1731 }
1732 }
1733
1734 if let Some((_, server)) = language_server {
1735 server
1736 .notify::<lsp::notification::DidOpenTextDocument>(
1737 lsp::DidOpenTextDocumentParams {
1738 text_document: lsp::TextDocumentItem::new(
1739 uri,
1740 language_id.unwrap_or_default(),
1741 0,
1742 initial_snapshot.text(),
1743 ),
1744 }
1745 .clone(),
1746 )
1747 .log_err();
1748 buffer_handle.update(cx, |buffer, cx| {
1749 buffer.set_completion_triggers(
1750 server
1751 .capabilities()
1752 .completion_provider
1753 .as_ref()
1754 .and_then(|provider| provider.trigger_characters.clone())
1755 .unwrap_or(Vec::new()),
1756 cx,
1757 )
1758 });
1759 self.buffer_snapshots
1760 .insert(buffer_id, vec![(0, initial_snapshot)]);
1761 }
1762 }
1763 }
1764 }
1765
1766 fn unregister_buffer_from_language_server(
1767 &mut self,
1768 buffer: &ModelHandle<Buffer>,
1769 old_path: PathBuf,
1770 cx: &mut ModelContext<Self>,
1771 ) {
1772 buffer.update(cx, |buffer, cx| {
1773 buffer.update_diagnostics(Default::default(), cx);
1774 self.buffer_snapshots.remove(&buffer.remote_id());
1775 if let Some((_, language_server)) = self.language_server_for_buffer(buffer, cx) {
1776 language_server
1777 .notify::<lsp::notification::DidCloseTextDocument>(
1778 lsp::DidCloseTextDocumentParams {
1779 text_document: lsp::TextDocumentIdentifier::new(
1780 lsp::Url::from_file_path(old_path).unwrap(),
1781 ),
1782 },
1783 )
1784 .log_err();
1785 }
1786 });
1787 }
1788
1789 fn on_buffer_event(
1790 &mut self,
1791 buffer: ModelHandle<Buffer>,
1792 event: &BufferEvent,
1793 cx: &mut ModelContext<Self>,
1794 ) -> Option<()> {
1795 match event {
1796 BufferEvent::Operation(operation) => {
1797 if let Some(project_id) = self.shared_remote_id() {
1798 let request = self.client.request(proto::UpdateBuffer {
1799 project_id,
1800 buffer_id: buffer.read(cx).remote_id(),
1801 operations: vec![language::proto::serialize_operation(&operation)],
1802 });
1803 cx.background().spawn(request).detach_and_log_err(cx);
1804 } else if let Some(project_id) = self.remote_id() {
1805 let _ = self
1806 .client
1807 .send(proto::RegisterProjectActivity { project_id });
1808 }
1809 }
1810 BufferEvent::Edited { .. } => {
1811 let (_, language_server) = self
1812 .language_server_for_buffer(buffer.read(cx), cx)?
1813 .clone();
1814 let buffer = buffer.read(cx);
1815 let file = File::from_dyn(buffer.file())?;
1816 let abs_path = file.as_local()?.abs_path(cx);
1817 let uri = lsp::Url::from_file_path(abs_path).unwrap();
1818 let buffer_snapshots = self.buffer_snapshots.get_mut(&buffer.remote_id())?;
1819 let (version, prev_snapshot) = buffer_snapshots.last()?;
1820 let next_snapshot = buffer.text_snapshot();
1821 let next_version = version + 1;
1822
1823 let content_changes = buffer
1824 .edits_since::<(PointUtf16, usize)>(prev_snapshot.version())
1825 .map(|edit| {
1826 let edit_start = edit.new.start.0;
1827 let edit_end = edit_start + (edit.old.end.0 - edit.old.start.0);
1828 let new_text = next_snapshot
1829 .text_for_range(edit.new.start.1..edit.new.end.1)
1830 .collect();
1831 lsp::TextDocumentContentChangeEvent {
1832 range: Some(lsp::Range::new(
1833 point_to_lsp(edit_start),
1834 point_to_lsp(edit_end),
1835 )),
1836 range_length: None,
1837 text: new_text,
1838 }
1839 })
1840 .collect();
1841
1842 buffer_snapshots.push((next_version, next_snapshot));
1843
1844 language_server
1845 .notify::<lsp::notification::DidChangeTextDocument>(
1846 lsp::DidChangeTextDocumentParams {
1847 text_document: lsp::VersionedTextDocumentIdentifier::new(
1848 uri,
1849 next_version,
1850 ),
1851 content_changes,
1852 },
1853 )
1854 .log_err();
1855 }
1856 BufferEvent::Saved => {
1857 let file = File::from_dyn(buffer.read(cx).file())?;
1858 let worktree_id = file.worktree_id(cx);
1859 let abs_path = file.as_local()?.abs_path(cx);
1860 let text_document = lsp::TextDocumentIdentifier {
1861 uri: lsp::Url::from_file_path(abs_path).unwrap(),
1862 };
1863
1864 for (_, server) in self.language_servers_for_worktree(worktree_id) {
1865 server
1866 .notify::<lsp::notification::DidSaveTextDocument>(
1867 lsp::DidSaveTextDocumentParams {
1868 text_document: text_document.clone(),
1869 text: None,
1870 },
1871 )
1872 .log_err();
1873 }
1874
1875 // After saving a buffer, simulate disk-based diagnostics being finished for languages
1876 // that don't support a disk-based progress token.
1877 let (lsp_adapter, language_server) =
1878 self.language_server_for_buffer(buffer.read(cx), cx)?;
1879 if lsp_adapter
1880 .disk_based_diagnostics_progress_token()
1881 .is_none()
1882 {
1883 let server_id = language_server.server_id();
1884 self.disk_based_diagnostics_finished(server_id, cx);
1885 self.broadcast_language_server_update(
1886 server_id,
1887 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(
1888 proto::LspDiskBasedDiagnosticsUpdated {},
1889 ),
1890 );
1891 }
1892 }
1893 _ => {}
1894 }
1895
1896 None
1897 }
1898
1899 fn language_servers_for_worktree(
1900 &self,
1901 worktree_id: WorktreeId,
1902 ) -> impl Iterator<Item = &(Arc<dyn LspAdapter>, Arc<LanguageServer>)> {
1903 self.language_servers.iter().filter_map(
1904 move |((language_server_worktree_id, _), server)| {
1905 if *language_server_worktree_id == worktree_id {
1906 Some(server)
1907 } else {
1908 None
1909 }
1910 },
1911 )
1912 }
1913
1914 fn assign_language_to_buffer(
1915 &mut self,
1916 buffer: &ModelHandle<Buffer>,
1917 cx: &mut ModelContext<Self>,
1918 ) -> Option<()> {
1919 // If the buffer has a language, set it and start the language server if we haven't already.
1920 let full_path = buffer.read(cx).file()?.full_path(cx);
1921 let language = self.languages.select_language(&full_path)?;
1922 buffer.update(cx, |buffer, cx| {
1923 buffer.set_language(Some(language.clone()), cx);
1924 });
1925
1926 let file = File::from_dyn(buffer.read(cx).file())?;
1927 let worktree = file.worktree.read(cx).as_local()?;
1928 let worktree_id = worktree.id();
1929 let worktree_abs_path = worktree.abs_path().clone();
1930 self.start_language_server(worktree_id, worktree_abs_path, language, cx);
1931
1932 None
1933 }
1934
1935 fn start_language_server(
1936 &mut self,
1937 worktree_id: WorktreeId,
1938 worktree_path: Arc<Path>,
1939 language: Arc<Language>,
1940 cx: &mut ModelContext<Self>,
1941 ) {
1942 if !cx
1943 .global::<Settings>()
1944 .enable_language_server(Some(&language.name()))
1945 {
1946 return;
1947 }
1948
1949 let adapter = if let Some(adapter) = language.lsp_adapter() {
1950 adapter
1951 } else {
1952 return;
1953 };
1954 let key = (worktree_id, adapter.name());
1955 self.started_language_servers
1956 .entry(key.clone())
1957 .or_insert_with(|| {
1958 let server_id = post_inc(&mut self.next_language_server_id);
1959 let language_server = self.languages.start_language_server(
1960 server_id,
1961 language.clone(),
1962 worktree_path,
1963 self.client.http_client(),
1964 cx,
1965 );
1966 cx.spawn_weak(|this, mut cx| async move {
1967 let language_server = language_server?.await.log_err()?;
1968 let language_server = language_server
1969 .initialize(adapter.initialization_options())
1970 .await
1971 .log_err()?;
1972 let this = this.upgrade(&cx)?;
1973 let disk_based_diagnostics_progress_token =
1974 adapter.disk_based_diagnostics_progress_token();
1975
1976 language_server
1977 .on_notification::<lsp::notification::PublishDiagnostics, _>({
1978 let this = this.downgrade();
1979 let adapter = adapter.clone();
1980 move |params, mut cx| {
1981 if let Some(this) = this.upgrade(&cx) {
1982 this.update(&mut cx, |this, cx| {
1983 this.on_lsp_diagnostics_published(
1984 server_id, params, &adapter, cx,
1985 );
1986 });
1987 }
1988 }
1989 })
1990 .detach();
1991
1992 language_server
1993 .on_request::<lsp::request::WorkspaceConfiguration, _, _>({
1994 let settings = this
1995 .read_with(&cx, |this, _| this.language_server_settings.clone());
1996 move |params, _| {
1997 let settings = settings.lock().clone();
1998 async move {
1999 Ok(params
2000 .items
2001 .into_iter()
2002 .map(|item| {
2003 if let Some(section) = &item.section {
2004 settings
2005 .get(section)
2006 .cloned()
2007 .unwrap_or(serde_json::Value::Null)
2008 } else {
2009 settings.clone()
2010 }
2011 })
2012 .collect())
2013 }
2014 }
2015 })
2016 .detach();
2017
2018 // Even though we don't have handling for these requests, respond to them to
2019 // avoid stalling any language server like `gopls` which waits for a response
2020 // to these requests when initializing.
2021 language_server
2022 .on_request::<lsp::request::WorkDoneProgressCreate, _, _>(|_, _| async {
2023 Ok(())
2024 })
2025 .detach();
2026 language_server
2027 .on_request::<lsp::request::RegisterCapability, _, _>(|_, _| async {
2028 Ok(())
2029 })
2030 .detach();
2031
2032 language_server
2033 .on_request::<lsp::request::ApplyWorkspaceEdit, _, _>({
2034 let this = this.downgrade();
2035 let adapter = adapter.clone();
2036 let language_server = language_server.clone();
2037 move |params, cx| {
2038 Self::on_lsp_workspace_edit(
2039 this,
2040 params,
2041 server_id,
2042 adapter.clone(),
2043 language_server.clone(),
2044 cx,
2045 )
2046 }
2047 })
2048 .detach();
2049
2050 language_server
2051 .on_notification::<lsp::notification::Progress, _>({
2052 let this = this.downgrade();
2053 move |params, mut cx| {
2054 if let Some(this) = this.upgrade(&cx) {
2055 this.update(&mut cx, |this, cx| {
2056 this.on_lsp_progress(
2057 params,
2058 server_id,
2059 disk_based_diagnostics_progress_token,
2060 cx,
2061 );
2062 });
2063 }
2064 }
2065 })
2066 .detach();
2067
2068 this.update(&mut cx, |this, cx| {
2069 this.language_servers
2070 .insert(key.clone(), (adapter.clone(), language_server.clone()));
2071 this.language_server_statuses.insert(
2072 server_id,
2073 LanguageServerStatus {
2074 name: language_server.name().to_string(),
2075 pending_work: Default::default(),
2076 pending_diagnostic_updates: 0,
2077 },
2078 );
2079 language_server
2080 .notify::<lsp::notification::DidChangeConfiguration>(
2081 lsp::DidChangeConfigurationParams {
2082 settings: this.language_server_settings.lock().clone(),
2083 },
2084 )
2085 .ok();
2086
2087 if let Some(project_id) = this.shared_remote_id() {
2088 this.client
2089 .send(proto::StartLanguageServer {
2090 project_id,
2091 server: Some(proto::LanguageServer {
2092 id: server_id as u64,
2093 name: language_server.name().to_string(),
2094 }),
2095 })
2096 .log_err();
2097 }
2098
2099 // Tell the language server about every open buffer in the worktree that matches the language.
2100 for buffer in this.opened_buffers.values() {
2101 if let Some(buffer_handle) = buffer.upgrade(cx) {
2102 let buffer = buffer_handle.read(cx);
2103 let file = if let Some(file) = File::from_dyn(buffer.file()) {
2104 file
2105 } else {
2106 continue;
2107 };
2108 let language = if let Some(language) = buffer.language() {
2109 language
2110 } else {
2111 continue;
2112 };
2113 if file.worktree.read(cx).id() != key.0
2114 || language.lsp_adapter().map(|a| a.name())
2115 != Some(key.1.clone())
2116 {
2117 continue;
2118 }
2119
2120 let file = file.as_local()?;
2121 let versions = this
2122 .buffer_snapshots
2123 .entry(buffer.remote_id())
2124 .or_insert_with(|| vec![(0, buffer.text_snapshot())]);
2125 let (version, initial_snapshot) = versions.last().unwrap();
2126 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
2127 let language_id = adapter.id_for_language(language.name().as_ref());
2128 language_server
2129 .notify::<lsp::notification::DidOpenTextDocument>(
2130 lsp::DidOpenTextDocumentParams {
2131 text_document: lsp::TextDocumentItem::new(
2132 uri,
2133 language_id.unwrap_or_default(),
2134 *version,
2135 initial_snapshot.text(),
2136 ),
2137 },
2138 )
2139 .log_err()?;
2140 buffer_handle.update(cx, |buffer, cx| {
2141 buffer.set_completion_triggers(
2142 language_server
2143 .capabilities()
2144 .completion_provider
2145 .as_ref()
2146 .and_then(|provider| {
2147 provider.trigger_characters.clone()
2148 })
2149 .unwrap_or(Vec::new()),
2150 cx,
2151 )
2152 });
2153 }
2154 }
2155
2156 cx.notify();
2157 Some(())
2158 });
2159
2160 Some(language_server)
2161 })
2162 });
2163 }
2164
2165 fn stop_language_server(
2166 &mut self,
2167 worktree_id: WorktreeId,
2168 adapter_name: LanguageServerName,
2169 cx: &mut ModelContext<Self>,
2170 ) -> Task<()> {
2171 let key = (worktree_id, adapter_name);
2172 if let Some((_, language_server)) = self.language_servers.remove(&key) {
2173 self.language_server_statuses
2174 .remove(&language_server.server_id());
2175 cx.notify();
2176 }
2177
2178 if let Some(started_language_server) = self.started_language_servers.remove(&key) {
2179 cx.spawn_weak(|this, mut cx| async move {
2180 if let Some(language_server) = started_language_server.await {
2181 if let Some(shutdown) = language_server.shutdown() {
2182 shutdown.await;
2183 }
2184
2185 if let Some(this) = this.upgrade(&cx) {
2186 this.update(&mut cx, |this, cx| {
2187 this.language_server_statuses
2188 .remove(&language_server.server_id());
2189 cx.notify();
2190 });
2191 }
2192 }
2193 })
2194 } else {
2195 Task::ready(())
2196 }
2197 }
2198
2199 pub fn restart_language_servers_for_buffers(
2200 &mut self,
2201 buffers: impl IntoIterator<Item = ModelHandle<Buffer>>,
2202 cx: &mut ModelContext<Self>,
2203 ) -> Option<()> {
2204 let language_server_lookup_info: HashSet<(WorktreeId, Arc<Path>, PathBuf)> = buffers
2205 .into_iter()
2206 .filter_map(|buffer| {
2207 let file = File::from_dyn(buffer.read(cx).file())?;
2208 let worktree = file.worktree.read(cx).as_local()?;
2209 let worktree_id = worktree.id();
2210 let worktree_abs_path = worktree.abs_path().clone();
2211 let full_path = file.full_path(cx);
2212 Some((worktree_id, worktree_abs_path, full_path))
2213 })
2214 .collect();
2215 for (worktree_id, worktree_abs_path, full_path) in language_server_lookup_info {
2216 let language = self.languages.select_language(&full_path)?;
2217 self.restart_language_server(worktree_id, worktree_abs_path, language, cx);
2218 }
2219
2220 None
2221 }
2222
2223 fn restart_language_server(
2224 &mut self,
2225 worktree_id: WorktreeId,
2226 worktree_path: Arc<Path>,
2227 language: Arc<Language>,
2228 cx: &mut ModelContext<Self>,
2229 ) {
2230 let adapter = if let Some(adapter) = language.lsp_adapter() {
2231 adapter
2232 } else {
2233 return;
2234 };
2235
2236 let stop = self.stop_language_server(worktree_id, adapter.name(), cx);
2237 cx.spawn_weak(|this, mut cx| async move {
2238 stop.await;
2239 if let Some(this) = this.upgrade(&cx) {
2240 this.update(&mut cx, |this, cx| {
2241 this.start_language_server(worktree_id, worktree_path, language, cx);
2242 });
2243 }
2244 })
2245 .detach();
2246 }
2247
2248 fn on_lsp_diagnostics_published(
2249 &mut self,
2250 server_id: usize,
2251 mut params: lsp::PublishDiagnosticsParams,
2252 adapter: &Arc<dyn LspAdapter>,
2253 cx: &mut ModelContext<Self>,
2254 ) {
2255 adapter.process_diagnostics(&mut params);
2256 self.update_diagnostics(
2257 server_id,
2258 params,
2259 adapter.disk_based_diagnostic_sources(),
2260 cx,
2261 )
2262 .log_err();
2263 }
2264
2265 fn on_lsp_progress(
2266 &mut self,
2267 progress: lsp::ProgressParams,
2268 server_id: usize,
2269 disk_based_diagnostics_progress_token: Option<&str>,
2270 cx: &mut ModelContext<Self>,
2271 ) {
2272 let token = match progress.token {
2273 lsp::NumberOrString::String(token) => token,
2274 lsp::NumberOrString::Number(token) => {
2275 log::info!("skipping numeric progress token {}", token);
2276 return;
2277 }
2278 };
2279 let progress = match progress.value {
2280 lsp::ProgressParamsValue::WorkDone(value) => value,
2281 };
2282 let language_server_status =
2283 if let Some(status) = self.language_server_statuses.get_mut(&server_id) {
2284 status
2285 } else {
2286 return;
2287 };
2288 match progress {
2289 lsp::WorkDoneProgress::Begin(report) => {
2290 if Some(token.as_str()) == disk_based_diagnostics_progress_token {
2291 language_server_status.pending_diagnostic_updates += 1;
2292 if language_server_status.pending_diagnostic_updates == 1 {
2293 self.disk_based_diagnostics_started(server_id, cx);
2294 self.broadcast_language_server_update(
2295 server_id,
2296 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(
2297 proto::LspDiskBasedDiagnosticsUpdating {},
2298 ),
2299 );
2300 }
2301 } else {
2302 self.on_lsp_work_start(
2303 server_id,
2304 token.clone(),
2305 LanguageServerProgress {
2306 message: report.message.clone(),
2307 percentage: report.percentage.map(|p| p as usize),
2308 last_update_at: Instant::now(),
2309 },
2310 cx,
2311 );
2312 self.broadcast_language_server_update(
2313 server_id,
2314 proto::update_language_server::Variant::WorkStart(proto::LspWorkStart {
2315 token,
2316 message: report.message,
2317 percentage: report.percentage.map(|p| p as u32),
2318 }),
2319 );
2320 }
2321 }
2322 lsp::WorkDoneProgress::Report(report) => {
2323 if Some(token.as_str()) != disk_based_diagnostics_progress_token {
2324 self.on_lsp_work_progress(
2325 server_id,
2326 token.clone(),
2327 LanguageServerProgress {
2328 message: report.message.clone(),
2329 percentage: report.percentage.map(|p| p as usize),
2330 last_update_at: Instant::now(),
2331 },
2332 cx,
2333 );
2334 self.broadcast_language_server_update(
2335 server_id,
2336 proto::update_language_server::Variant::WorkProgress(
2337 proto::LspWorkProgress {
2338 token,
2339 message: report.message,
2340 percentage: report.percentage.map(|p| p as u32),
2341 },
2342 ),
2343 );
2344 }
2345 }
2346 lsp::WorkDoneProgress::End(_) => {
2347 if Some(token.as_str()) == disk_based_diagnostics_progress_token {
2348 language_server_status.pending_diagnostic_updates -= 1;
2349 if language_server_status.pending_diagnostic_updates == 0 {
2350 self.disk_based_diagnostics_finished(server_id, cx);
2351 self.broadcast_language_server_update(
2352 server_id,
2353 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(
2354 proto::LspDiskBasedDiagnosticsUpdated {},
2355 ),
2356 );
2357 }
2358 } else {
2359 self.on_lsp_work_end(server_id, token.clone(), cx);
2360 self.broadcast_language_server_update(
2361 server_id,
2362 proto::update_language_server::Variant::WorkEnd(proto::LspWorkEnd {
2363 token,
2364 }),
2365 );
2366 }
2367 }
2368 }
2369 }
2370
2371 fn on_lsp_work_start(
2372 &mut self,
2373 language_server_id: usize,
2374 token: String,
2375 progress: LanguageServerProgress,
2376 cx: &mut ModelContext<Self>,
2377 ) {
2378 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
2379 status.pending_work.insert(token, progress);
2380 cx.notify();
2381 }
2382 }
2383
2384 fn on_lsp_work_progress(
2385 &mut self,
2386 language_server_id: usize,
2387 token: String,
2388 progress: LanguageServerProgress,
2389 cx: &mut ModelContext<Self>,
2390 ) {
2391 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
2392 let entry = status
2393 .pending_work
2394 .entry(token)
2395 .or_insert(LanguageServerProgress {
2396 message: Default::default(),
2397 percentage: Default::default(),
2398 last_update_at: progress.last_update_at,
2399 });
2400 if progress.message.is_some() {
2401 entry.message = progress.message;
2402 }
2403 if progress.percentage.is_some() {
2404 entry.percentage = progress.percentage;
2405 }
2406 entry.last_update_at = progress.last_update_at;
2407 cx.notify();
2408 }
2409 }
2410
2411 fn on_lsp_work_end(
2412 &mut self,
2413 language_server_id: usize,
2414 token: String,
2415 cx: &mut ModelContext<Self>,
2416 ) {
2417 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
2418 status.pending_work.remove(&token);
2419 cx.notify();
2420 }
2421 }
2422
2423 async fn on_lsp_workspace_edit(
2424 this: WeakModelHandle<Self>,
2425 params: lsp::ApplyWorkspaceEditParams,
2426 server_id: usize,
2427 adapter: Arc<dyn LspAdapter>,
2428 language_server: Arc<LanguageServer>,
2429 mut cx: AsyncAppContext,
2430 ) -> Result<lsp::ApplyWorkspaceEditResponse> {
2431 let this = this
2432 .upgrade(&cx)
2433 .ok_or_else(|| anyhow!("project project closed"))?;
2434 let transaction = Self::deserialize_workspace_edit(
2435 this.clone(),
2436 params.edit,
2437 true,
2438 adapter.clone(),
2439 language_server.clone(),
2440 &mut cx,
2441 )
2442 .await
2443 .log_err();
2444 this.update(&mut cx, |this, _| {
2445 if let Some(transaction) = transaction {
2446 this.last_workspace_edits_by_language_server
2447 .insert(server_id, transaction);
2448 }
2449 });
2450 Ok(lsp::ApplyWorkspaceEditResponse {
2451 applied: true,
2452 failed_change: None,
2453 failure_reason: None,
2454 })
2455 }
2456
2457 fn broadcast_language_server_update(
2458 &self,
2459 language_server_id: usize,
2460 event: proto::update_language_server::Variant,
2461 ) {
2462 if let Some(project_id) = self.shared_remote_id() {
2463 self.client
2464 .send(proto::UpdateLanguageServer {
2465 project_id,
2466 language_server_id: language_server_id as u64,
2467 variant: Some(event),
2468 })
2469 .log_err();
2470 }
2471 }
2472
2473 pub fn set_language_server_settings(&mut self, settings: serde_json::Value) {
2474 for (_, server) in self.language_servers.values() {
2475 server
2476 .notify::<lsp::notification::DidChangeConfiguration>(
2477 lsp::DidChangeConfigurationParams {
2478 settings: settings.clone(),
2479 },
2480 )
2481 .ok();
2482 }
2483 *self.language_server_settings.lock() = settings;
2484 }
2485
2486 pub fn language_server_statuses(
2487 &self,
2488 ) -> impl DoubleEndedIterator<Item = &LanguageServerStatus> {
2489 self.language_server_statuses.values()
2490 }
2491
2492 pub fn update_diagnostics(
2493 &mut self,
2494 language_server_id: usize,
2495 params: lsp::PublishDiagnosticsParams,
2496 disk_based_sources: &[&str],
2497 cx: &mut ModelContext<Self>,
2498 ) -> Result<()> {
2499 let abs_path = params
2500 .uri
2501 .to_file_path()
2502 .map_err(|_| anyhow!("URI is not a file"))?;
2503 let mut diagnostics = Vec::default();
2504 let mut primary_diagnostic_group_ids = HashMap::default();
2505 let mut sources_by_group_id = HashMap::default();
2506 let mut supporting_diagnostics = HashMap::default();
2507 for diagnostic in ¶ms.diagnostics {
2508 let source = diagnostic.source.as_ref();
2509 let code = diagnostic.code.as_ref().map(|code| match code {
2510 lsp::NumberOrString::Number(code) => code.to_string(),
2511 lsp::NumberOrString::String(code) => code.clone(),
2512 });
2513 let range = range_from_lsp(diagnostic.range);
2514 let is_supporting = diagnostic
2515 .related_information
2516 .as_ref()
2517 .map_or(false, |infos| {
2518 infos.iter().any(|info| {
2519 primary_diagnostic_group_ids.contains_key(&(
2520 source,
2521 code.clone(),
2522 range_from_lsp(info.location.range),
2523 ))
2524 })
2525 });
2526
2527 let is_unnecessary = diagnostic.tags.as_ref().map_or(false, |tags| {
2528 tags.iter().any(|tag| *tag == DiagnosticTag::UNNECESSARY)
2529 });
2530
2531 if is_supporting {
2532 supporting_diagnostics.insert(
2533 (source, code.clone(), range),
2534 (diagnostic.severity, is_unnecessary),
2535 );
2536 } else {
2537 let group_id = post_inc(&mut self.next_diagnostic_group_id);
2538 let is_disk_based = source.map_or(false, |source| {
2539 disk_based_sources.contains(&source.as_str())
2540 });
2541
2542 sources_by_group_id.insert(group_id, source);
2543 primary_diagnostic_group_ids
2544 .insert((source, code.clone(), range.clone()), group_id);
2545
2546 diagnostics.push(DiagnosticEntry {
2547 range,
2548 diagnostic: Diagnostic {
2549 code: code.clone(),
2550 severity: diagnostic.severity.unwrap_or(DiagnosticSeverity::ERROR),
2551 message: diagnostic.message.clone(),
2552 group_id,
2553 is_primary: true,
2554 is_valid: true,
2555 is_disk_based,
2556 is_unnecessary,
2557 },
2558 });
2559 if let Some(infos) = &diagnostic.related_information {
2560 for info in infos {
2561 if info.location.uri == params.uri && !info.message.is_empty() {
2562 let range = range_from_lsp(info.location.range);
2563 diagnostics.push(DiagnosticEntry {
2564 range,
2565 diagnostic: Diagnostic {
2566 code: code.clone(),
2567 severity: DiagnosticSeverity::INFORMATION,
2568 message: info.message.clone(),
2569 group_id,
2570 is_primary: false,
2571 is_valid: true,
2572 is_disk_based,
2573 is_unnecessary: false,
2574 },
2575 });
2576 }
2577 }
2578 }
2579 }
2580 }
2581
2582 for entry in &mut diagnostics {
2583 let diagnostic = &mut entry.diagnostic;
2584 if !diagnostic.is_primary {
2585 let source = *sources_by_group_id.get(&diagnostic.group_id).unwrap();
2586 if let Some(&(severity, is_unnecessary)) = supporting_diagnostics.get(&(
2587 source,
2588 diagnostic.code.clone(),
2589 entry.range.clone(),
2590 )) {
2591 if let Some(severity) = severity {
2592 diagnostic.severity = severity;
2593 }
2594 diagnostic.is_unnecessary = is_unnecessary;
2595 }
2596 }
2597 }
2598
2599 self.update_diagnostic_entries(
2600 language_server_id,
2601 abs_path,
2602 params.version,
2603 diagnostics,
2604 cx,
2605 )?;
2606 Ok(())
2607 }
2608
2609 pub fn update_diagnostic_entries(
2610 &mut self,
2611 language_server_id: usize,
2612 abs_path: PathBuf,
2613 version: Option<i32>,
2614 diagnostics: Vec<DiagnosticEntry<PointUtf16>>,
2615 cx: &mut ModelContext<Project>,
2616 ) -> Result<(), anyhow::Error> {
2617 let (worktree, relative_path) = self
2618 .find_local_worktree(&abs_path, cx)
2619 .ok_or_else(|| anyhow!("no worktree found for diagnostics"))?;
2620 if !worktree.read(cx).is_visible() {
2621 return Ok(());
2622 }
2623
2624 let project_path = ProjectPath {
2625 worktree_id: worktree.read(cx).id(),
2626 path: relative_path.into(),
2627 };
2628 if let Some(buffer) = self.get_open_buffer(&project_path, cx) {
2629 self.update_buffer_diagnostics(&buffer, diagnostics.clone(), version, cx)?;
2630 }
2631
2632 let updated = worktree.update(cx, |worktree, cx| {
2633 worktree
2634 .as_local_mut()
2635 .ok_or_else(|| anyhow!("not a local worktree"))?
2636 .update_diagnostics(
2637 language_server_id,
2638 project_path.path.clone(),
2639 diagnostics,
2640 cx,
2641 )
2642 })?;
2643 if updated {
2644 cx.emit(Event::DiagnosticsUpdated {
2645 language_server_id,
2646 path: project_path,
2647 });
2648 }
2649 Ok(())
2650 }
2651
2652 fn update_buffer_diagnostics(
2653 &mut self,
2654 buffer: &ModelHandle<Buffer>,
2655 mut diagnostics: Vec<DiagnosticEntry<PointUtf16>>,
2656 version: Option<i32>,
2657 cx: &mut ModelContext<Self>,
2658 ) -> Result<()> {
2659 fn compare_diagnostics(a: &Diagnostic, b: &Diagnostic) -> Ordering {
2660 Ordering::Equal
2661 .then_with(|| b.is_primary.cmp(&a.is_primary))
2662 .then_with(|| a.is_disk_based.cmp(&b.is_disk_based))
2663 .then_with(|| a.severity.cmp(&b.severity))
2664 .then_with(|| a.message.cmp(&b.message))
2665 }
2666
2667 let snapshot = self.buffer_snapshot_for_lsp_version(buffer, version, cx)?;
2668
2669 diagnostics.sort_unstable_by(|a, b| {
2670 Ordering::Equal
2671 .then_with(|| a.range.start.cmp(&b.range.start))
2672 .then_with(|| b.range.end.cmp(&a.range.end))
2673 .then_with(|| compare_diagnostics(&a.diagnostic, &b.diagnostic))
2674 });
2675
2676 let mut sanitized_diagnostics = Vec::new();
2677 let edits_since_save = Patch::new(
2678 snapshot
2679 .edits_since::<PointUtf16>(buffer.read(cx).saved_version())
2680 .collect(),
2681 );
2682 for entry in diagnostics {
2683 let start;
2684 let end;
2685 if entry.diagnostic.is_disk_based {
2686 // Some diagnostics are based on files on disk instead of buffers'
2687 // current contents. Adjust these diagnostics' ranges to reflect
2688 // any unsaved edits.
2689 start = edits_since_save.old_to_new(entry.range.start);
2690 end = edits_since_save.old_to_new(entry.range.end);
2691 } else {
2692 start = entry.range.start;
2693 end = entry.range.end;
2694 }
2695
2696 let mut range = snapshot.clip_point_utf16(start, Bias::Left)
2697 ..snapshot.clip_point_utf16(end, Bias::Right);
2698
2699 // Expand empty ranges by one character
2700 if range.start == range.end {
2701 range.end.column += 1;
2702 range.end = snapshot.clip_point_utf16(range.end, Bias::Right);
2703 if range.start == range.end && range.end.column > 0 {
2704 range.start.column -= 1;
2705 range.start = snapshot.clip_point_utf16(range.start, Bias::Left);
2706 }
2707 }
2708
2709 sanitized_diagnostics.push(DiagnosticEntry {
2710 range,
2711 diagnostic: entry.diagnostic,
2712 });
2713 }
2714 drop(edits_since_save);
2715
2716 let set = DiagnosticSet::new(sanitized_diagnostics, &snapshot);
2717 buffer.update(cx, |buffer, cx| buffer.update_diagnostics(set, cx));
2718 Ok(())
2719 }
2720
2721 pub fn reload_buffers(
2722 &self,
2723 buffers: HashSet<ModelHandle<Buffer>>,
2724 push_to_history: bool,
2725 cx: &mut ModelContext<Self>,
2726 ) -> Task<Result<ProjectTransaction>> {
2727 let mut local_buffers = Vec::new();
2728 let mut remote_buffers = None;
2729 for buffer_handle in buffers {
2730 let buffer = buffer_handle.read(cx);
2731 if buffer.is_dirty() {
2732 if let Some(file) = File::from_dyn(buffer.file()) {
2733 if file.is_local() {
2734 local_buffers.push(buffer_handle);
2735 } else {
2736 remote_buffers.get_or_insert(Vec::new()).push(buffer_handle);
2737 }
2738 }
2739 }
2740 }
2741
2742 let remote_buffers = self.remote_id().zip(remote_buffers);
2743 let client = self.client.clone();
2744
2745 cx.spawn(|this, mut cx| async move {
2746 let mut project_transaction = ProjectTransaction::default();
2747
2748 if let Some((project_id, remote_buffers)) = remote_buffers {
2749 let response = client
2750 .request(proto::ReloadBuffers {
2751 project_id,
2752 buffer_ids: remote_buffers
2753 .iter()
2754 .map(|buffer| buffer.read_with(&cx, |buffer, _| buffer.remote_id()))
2755 .collect(),
2756 })
2757 .await?
2758 .transaction
2759 .ok_or_else(|| anyhow!("missing transaction"))?;
2760 project_transaction = this
2761 .update(&mut cx, |this, cx| {
2762 this.deserialize_project_transaction(response, push_to_history, cx)
2763 })
2764 .await?;
2765 }
2766
2767 for buffer in local_buffers {
2768 let transaction = buffer
2769 .update(&mut cx, |buffer, cx| buffer.reload(cx))
2770 .await?;
2771 buffer.update(&mut cx, |buffer, cx| {
2772 if let Some(transaction) = transaction {
2773 if !push_to_history {
2774 buffer.forget_transaction(transaction.id);
2775 }
2776 project_transaction.0.insert(cx.handle(), transaction);
2777 }
2778 });
2779 }
2780
2781 Ok(project_transaction)
2782 })
2783 }
2784
2785 pub fn format(
2786 &self,
2787 buffers: HashSet<ModelHandle<Buffer>>,
2788 push_to_history: bool,
2789 cx: &mut ModelContext<Project>,
2790 ) -> Task<Result<ProjectTransaction>> {
2791 let mut local_buffers = Vec::new();
2792 let mut remote_buffers = None;
2793 for buffer_handle in buffers {
2794 let buffer = buffer_handle.read(cx);
2795 if let Some(file) = File::from_dyn(buffer.file()) {
2796 if let Some(buffer_abs_path) = file.as_local().map(|f| f.abs_path(cx)) {
2797 if let Some((_, server)) = self.language_server_for_buffer(buffer, cx) {
2798 local_buffers.push((buffer_handle, buffer_abs_path, server.clone()));
2799 }
2800 } else {
2801 remote_buffers.get_or_insert(Vec::new()).push(buffer_handle);
2802 }
2803 } else {
2804 return Task::ready(Ok(Default::default()));
2805 }
2806 }
2807
2808 let remote_buffers = self.remote_id().zip(remote_buffers);
2809 let client = self.client.clone();
2810
2811 cx.spawn(|this, mut cx| async move {
2812 let mut project_transaction = ProjectTransaction::default();
2813
2814 if let Some((project_id, remote_buffers)) = remote_buffers {
2815 let response = client
2816 .request(proto::FormatBuffers {
2817 project_id,
2818 buffer_ids: remote_buffers
2819 .iter()
2820 .map(|buffer| buffer.read_with(&cx, |buffer, _| buffer.remote_id()))
2821 .collect(),
2822 })
2823 .await?
2824 .transaction
2825 .ok_or_else(|| anyhow!("missing transaction"))?;
2826 project_transaction = this
2827 .update(&mut cx, |this, cx| {
2828 this.deserialize_project_transaction(response, push_to_history, cx)
2829 })
2830 .await?;
2831 }
2832
2833 for (buffer, buffer_abs_path, language_server) in local_buffers {
2834 let text_document = lsp::TextDocumentIdentifier::new(
2835 lsp::Url::from_file_path(&buffer_abs_path).unwrap(),
2836 );
2837 let capabilities = &language_server.capabilities();
2838 let tab_size = cx.update(|cx| {
2839 let language_name = buffer.read(cx).language().map(|language| language.name());
2840 cx.global::<Settings>().tab_size(language_name.as_deref())
2841 });
2842 let lsp_edits = if capabilities
2843 .document_formatting_provider
2844 .as_ref()
2845 .map_or(false, |provider| *provider != lsp::OneOf::Left(false))
2846 {
2847 language_server
2848 .request::<lsp::request::Formatting>(lsp::DocumentFormattingParams {
2849 text_document,
2850 options: lsp::FormattingOptions {
2851 tab_size,
2852 insert_spaces: true,
2853 insert_final_newline: Some(true),
2854 ..Default::default()
2855 },
2856 work_done_progress_params: Default::default(),
2857 })
2858 .await?
2859 } else if capabilities
2860 .document_range_formatting_provider
2861 .as_ref()
2862 .map_or(false, |provider| *provider != lsp::OneOf::Left(false))
2863 {
2864 let buffer_start = lsp::Position::new(0, 0);
2865 let buffer_end =
2866 buffer.read_with(&cx, |buffer, _| point_to_lsp(buffer.max_point_utf16()));
2867 language_server
2868 .request::<lsp::request::RangeFormatting>(
2869 lsp::DocumentRangeFormattingParams {
2870 text_document,
2871 range: lsp::Range::new(buffer_start, buffer_end),
2872 options: lsp::FormattingOptions {
2873 tab_size,
2874 insert_spaces: true,
2875 insert_final_newline: Some(true),
2876 ..Default::default()
2877 },
2878 work_done_progress_params: Default::default(),
2879 },
2880 )
2881 .await?
2882 } else {
2883 continue;
2884 };
2885
2886 if let Some(lsp_edits) = lsp_edits {
2887 let edits = this
2888 .update(&mut cx, |this, cx| {
2889 this.edits_from_lsp(&buffer, lsp_edits, None, cx)
2890 })
2891 .await?;
2892 buffer.update(&mut cx, |buffer, cx| {
2893 buffer.finalize_last_transaction();
2894 buffer.start_transaction();
2895 for (range, text) in edits {
2896 buffer.edit([(range, text)], cx);
2897 }
2898 if buffer.end_transaction(cx).is_some() {
2899 let transaction = buffer.finalize_last_transaction().unwrap().clone();
2900 if !push_to_history {
2901 buffer.forget_transaction(transaction.id);
2902 }
2903 project_transaction.0.insert(cx.handle(), transaction);
2904 }
2905 });
2906 }
2907 }
2908
2909 Ok(project_transaction)
2910 })
2911 }
2912
2913 pub fn definition<T: ToPointUtf16>(
2914 &self,
2915 buffer: &ModelHandle<Buffer>,
2916 position: T,
2917 cx: &mut ModelContext<Self>,
2918 ) -> Task<Result<Vec<Location>>> {
2919 let position = position.to_point_utf16(buffer.read(cx));
2920 self.request_lsp(buffer.clone(), GetDefinition { position }, cx)
2921 }
2922
2923 pub fn references<T: ToPointUtf16>(
2924 &self,
2925 buffer: &ModelHandle<Buffer>,
2926 position: T,
2927 cx: &mut ModelContext<Self>,
2928 ) -> Task<Result<Vec<Location>>> {
2929 let position = position.to_point_utf16(buffer.read(cx));
2930 self.request_lsp(buffer.clone(), GetReferences { position }, cx)
2931 }
2932
2933 pub fn document_highlights<T: ToPointUtf16>(
2934 &self,
2935 buffer: &ModelHandle<Buffer>,
2936 position: T,
2937 cx: &mut ModelContext<Self>,
2938 ) -> Task<Result<Vec<DocumentHighlight>>> {
2939 let position = position.to_point_utf16(buffer.read(cx));
2940
2941 self.request_lsp(buffer.clone(), GetDocumentHighlights { position }, cx)
2942 }
2943
2944 pub fn symbols(&self, query: &str, cx: &mut ModelContext<Self>) -> Task<Result<Vec<Symbol>>> {
2945 if self.is_local() {
2946 let mut requests = Vec::new();
2947 for ((worktree_id, _), (lsp_adapter, language_server)) in self.language_servers.iter() {
2948 let worktree_id = *worktree_id;
2949 if let Some(worktree) = self
2950 .worktree_for_id(worktree_id, cx)
2951 .and_then(|worktree| worktree.read(cx).as_local())
2952 {
2953 let lsp_adapter = lsp_adapter.clone();
2954 let worktree_abs_path = worktree.abs_path().clone();
2955 requests.push(
2956 language_server
2957 .request::<lsp::request::WorkspaceSymbol>(lsp::WorkspaceSymbolParams {
2958 query: query.to_string(),
2959 ..Default::default()
2960 })
2961 .log_err()
2962 .map(move |response| {
2963 (
2964 lsp_adapter,
2965 worktree_id,
2966 worktree_abs_path,
2967 response.unwrap_or_default(),
2968 )
2969 }),
2970 );
2971 }
2972 }
2973
2974 cx.spawn_weak(|this, cx| async move {
2975 let responses = futures::future::join_all(requests).await;
2976 let this = if let Some(this) = this.upgrade(&cx) {
2977 this
2978 } else {
2979 return Ok(Default::default());
2980 };
2981 this.read_with(&cx, |this, cx| {
2982 let mut symbols = Vec::new();
2983 for (adapter, source_worktree_id, worktree_abs_path, response) in responses {
2984 symbols.extend(response.into_iter().flatten().filter_map(|lsp_symbol| {
2985 let abs_path = lsp_symbol.location.uri.to_file_path().ok()?;
2986 let mut worktree_id = source_worktree_id;
2987 let path;
2988 if let Some((worktree, rel_path)) =
2989 this.find_local_worktree(&abs_path, cx)
2990 {
2991 worktree_id = worktree.read(cx).id();
2992 path = rel_path;
2993 } else {
2994 path = relativize_path(&worktree_abs_path, &abs_path);
2995 }
2996
2997 let label = this
2998 .languages
2999 .select_language(&path)
3000 .and_then(|language| {
3001 language.label_for_symbol(&lsp_symbol.name, lsp_symbol.kind)
3002 })
3003 .unwrap_or_else(|| CodeLabel::plain(lsp_symbol.name.clone(), None));
3004 let signature = this.symbol_signature(worktree_id, &path);
3005
3006 Some(Symbol {
3007 source_worktree_id,
3008 worktree_id,
3009 language_server_name: adapter.name(),
3010 name: lsp_symbol.name,
3011 kind: lsp_symbol.kind,
3012 label,
3013 path,
3014 range: range_from_lsp(lsp_symbol.location.range),
3015 signature,
3016 })
3017 }));
3018 }
3019 Ok(symbols)
3020 })
3021 })
3022 } else if let Some(project_id) = self.remote_id() {
3023 let request = self.client.request(proto::GetProjectSymbols {
3024 project_id,
3025 query: query.to_string(),
3026 });
3027 cx.spawn_weak(|this, cx| async move {
3028 let response = request.await?;
3029 let mut symbols = Vec::new();
3030 if let Some(this) = this.upgrade(&cx) {
3031 this.read_with(&cx, |this, _| {
3032 symbols.extend(
3033 response
3034 .symbols
3035 .into_iter()
3036 .filter_map(|symbol| this.deserialize_symbol(symbol).log_err()),
3037 );
3038 })
3039 }
3040 Ok(symbols)
3041 })
3042 } else {
3043 Task::ready(Ok(Default::default()))
3044 }
3045 }
3046
3047 pub fn open_buffer_for_symbol(
3048 &mut self,
3049 symbol: &Symbol,
3050 cx: &mut ModelContext<Self>,
3051 ) -> Task<Result<ModelHandle<Buffer>>> {
3052 if self.is_local() {
3053 let (lsp_adapter, language_server) = if let Some(server) = self.language_servers.get(&(
3054 symbol.source_worktree_id,
3055 symbol.language_server_name.clone(),
3056 )) {
3057 server.clone()
3058 } else {
3059 return Task::ready(Err(anyhow!(
3060 "language server for worktree and language not found"
3061 )));
3062 };
3063
3064 let worktree_abs_path = if let Some(worktree_abs_path) = self
3065 .worktree_for_id(symbol.worktree_id, cx)
3066 .and_then(|worktree| worktree.read(cx).as_local())
3067 .map(|local_worktree| local_worktree.abs_path())
3068 {
3069 worktree_abs_path
3070 } else {
3071 return Task::ready(Err(anyhow!("worktree not found for symbol")));
3072 };
3073 let symbol_abs_path = worktree_abs_path.join(&symbol.path);
3074 let symbol_uri = if let Ok(uri) = lsp::Url::from_file_path(symbol_abs_path) {
3075 uri
3076 } else {
3077 return Task::ready(Err(anyhow!("invalid symbol path")));
3078 };
3079
3080 self.open_local_buffer_via_lsp(symbol_uri, lsp_adapter, language_server, cx)
3081 } else if let Some(project_id) = self.remote_id() {
3082 let request = self.client.request(proto::OpenBufferForSymbol {
3083 project_id,
3084 symbol: Some(serialize_symbol(symbol)),
3085 });
3086 cx.spawn(|this, mut cx| async move {
3087 let response = request.await?;
3088 let buffer = response.buffer.ok_or_else(|| anyhow!("invalid buffer"))?;
3089 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
3090 .await
3091 })
3092 } else {
3093 Task::ready(Err(anyhow!("project does not have a remote id")))
3094 }
3095 }
3096
3097 pub fn hover<T: ToPointUtf16>(
3098 &self,
3099 buffer: &ModelHandle<Buffer>,
3100 position: T,
3101 cx: &mut ModelContext<Self>,
3102 ) -> Task<Result<Option<Hover>>> {
3103 let position = position.to_point_utf16(buffer.read(cx));
3104 self.request_lsp(buffer.clone(), GetHover { position }, cx)
3105 }
3106
3107 pub fn completions<T: ToPointUtf16>(
3108 &self,
3109 source_buffer_handle: &ModelHandle<Buffer>,
3110 position: T,
3111 cx: &mut ModelContext<Self>,
3112 ) -> Task<Result<Vec<Completion>>> {
3113 let source_buffer_handle = source_buffer_handle.clone();
3114 let source_buffer = source_buffer_handle.read(cx);
3115 let buffer_id = source_buffer.remote_id();
3116 let language = source_buffer.language().cloned();
3117 let worktree;
3118 let buffer_abs_path;
3119 if let Some(file) = File::from_dyn(source_buffer.file()) {
3120 worktree = file.worktree.clone();
3121 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
3122 } else {
3123 return Task::ready(Ok(Default::default()));
3124 };
3125
3126 let position = position.to_point_utf16(source_buffer);
3127 let anchor = source_buffer.anchor_after(position);
3128
3129 if worktree.read(cx).as_local().is_some() {
3130 let buffer_abs_path = buffer_abs_path.unwrap();
3131 let (_, lang_server) =
3132 if let Some(server) = self.language_server_for_buffer(source_buffer, cx) {
3133 server.clone()
3134 } else {
3135 return Task::ready(Ok(Default::default()));
3136 };
3137
3138 cx.spawn(|_, cx| async move {
3139 let completions = lang_server
3140 .request::<lsp::request::Completion>(lsp::CompletionParams {
3141 text_document_position: lsp::TextDocumentPositionParams::new(
3142 lsp::TextDocumentIdentifier::new(
3143 lsp::Url::from_file_path(buffer_abs_path).unwrap(),
3144 ),
3145 point_to_lsp(position),
3146 ),
3147 context: Default::default(),
3148 work_done_progress_params: Default::default(),
3149 partial_result_params: Default::default(),
3150 })
3151 .await
3152 .context("lsp completion request failed")?;
3153
3154 let completions = if let Some(completions) = completions {
3155 match completions {
3156 lsp::CompletionResponse::Array(completions) => completions,
3157 lsp::CompletionResponse::List(list) => list.items,
3158 }
3159 } else {
3160 Default::default()
3161 };
3162
3163 source_buffer_handle.read_with(&cx, |this, _| {
3164 let snapshot = this.snapshot();
3165 let clipped_position = this.clip_point_utf16(position, Bias::Left);
3166 let mut range_for_token = None;
3167 Ok(completions
3168 .into_iter()
3169 .filter_map(|lsp_completion| {
3170 // For now, we can only handle additional edits if they are returned
3171 // when resolving the completion, not if they are present initially.
3172 if lsp_completion
3173 .additional_text_edits
3174 .as_ref()
3175 .map_or(false, |edits| !edits.is_empty())
3176 {
3177 return None;
3178 }
3179
3180 let (old_range, new_text) = match lsp_completion.text_edit.as_ref() {
3181 // If the language server provides a range to overwrite, then
3182 // check that the range is valid.
3183 Some(lsp::CompletionTextEdit::Edit(edit)) => {
3184 let range = range_from_lsp(edit.range);
3185 let start = snapshot.clip_point_utf16(range.start, Bias::Left);
3186 let end = snapshot.clip_point_utf16(range.end, Bias::Left);
3187 if start != range.start || end != range.end {
3188 log::info!("completion out of expected range");
3189 return None;
3190 }
3191 (
3192 snapshot.anchor_before(start)..snapshot.anchor_after(end),
3193 edit.new_text.clone(),
3194 )
3195 }
3196 // If the language server does not provide a range, then infer
3197 // the range based on the syntax tree.
3198 None => {
3199 if position != clipped_position {
3200 log::info!("completion out of expected range");
3201 return None;
3202 }
3203 let Range { start, end } = range_for_token
3204 .get_or_insert_with(|| {
3205 let offset = position.to_offset(&snapshot);
3206 let (range, kind) = snapshot.surrounding_word(offset);
3207 if kind == Some(CharKind::Word) {
3208 range
3209 } else {
3210 offset..offset
3211 }
3212 })
3213 .clone();
3214 let text = lsp_completion
3215 .insert_text
3216 .as_ref()
3217 .unwrap_or(&lsp_completion.label)
3218 .clone();
3219 (
3220 snapshot.anchor_before(start)..snapshot.anchor_after(end),
3221 text.clone(),
3222 )
3223 }
3224 Some(lsp::CompletionTextEdit::InsertAndReplace(_)) => {
3225 log::info!("unsupported insert/replace completion");
3226 return None;
3227 }
3228 };
3229
3230 Some(Completion {
3231 old_range,
3232 new_text,
3233 label: language
3234 .as_ref()
3235 .and_then(|l| l.label_for_completion(&lsp_completion))
3236 .unwrap_or_else(|| {
3237 CodeLabel::plain(
3238 lsp_completion.label.clone(),
3239 lsp_completion.filter_text.as_deref(),
3240 )
3241 }),
3242 lsp_completion,
3243 })
3244 })
3245 .collect())
3246 })
3247 })
3248 } else if let Some(project_id) = self.remote_id() {
3249 let rpc = self.client.clone();
3250 let message = proto::GetCompletions {
3251 project_id,
3252 buffer_id,
3253 position: Some(language::proto::serialize_anchor(&anchor)),
3254 version: serialize_version(&source_buffer.version()),
3255 };
3256 cx.spawn_weak(|_, mut cx| async move {
3257 let response = rpc.request(message).await?;
3258
3259 source_buffer_handle
3260 .update(&mut cx, |buffer, _| {
3261 buffer.wait_for_version(deserialize_version(response.version))
3262 })
3263 .await;
3264
3265 response
3266 .completions
3267 .into_iter()
3268 .map(|completion| {
3269 language::proto::deserialize_completion(completion, language.as_ref())
3270 })
3271 .collect()
3272 })
3273 } else {
3274 Task::ready(Ok(Default::default()))
3275 }
3276 }
3277
3278 pub fn apply_additional_edits_for_completion(
3279 &self,
3280 buffer_handle: ModelHandle<Buffer>,
3281 completion: Completion,
3282 push_to_history: bool,
3283 cx: &mut ModelContext<Self>,
3284 ) -> Task<Result<Option<Transaction>>> {
3285 let buffer = buffer_handle.read(cx);
3286 let buffer_id = buffer.remote_id();
3287
3288 if self.is_local() {
3289 let (_, lang_server) = if let Some(server) = self.language_server_for_buffer(buffer, cx)
3290 {
3291 server.clone()
3292 } else {
3293 return Task::ready(Ok(Default::default()));
3294 };
3295
3296 cx.spawn(|this, mut cx| async move {
3297 let resolved_completion = lang_server
3298 .request::<lsp::request::ResolveCompletionItem>(completion.lsp_completion)
3299 .await?;
3300 if let Some(edits) = resolved_completion.additional_text_edits {
3301 let edits = this
3302 .update(&mut cx, |this, cx| {
3303 this.edits_from_lsp(&buffer_handle, edits, None, cx)
3304 })
3305 .await?;
3306 buffer_handle.update(&mut cx, |buffer, cx| {
3307 buffer.finalize_last_transaction();
3308 buffer.start_transaction();
3309 for (range, text) in edits {
3310 buffer.edit([(range, text)], cx);
3311 }
3312 let transaction = if buffer.end_transaction(cx).is_some() {
3313 let transaction = buffer.finalize_last_transaction().unwrap().clone();
3314 if !push_to_history {
3315 buffer.forget_transaction(transaction.id);
3316 }
3317 Some(transaction)
3318 } else {
3319 None
3320 };
3321 Ok(transaction)
3322 })
3323 } else {
3324 Ok(None)
3325 }
3326 })
3327 } else if let Some(project_id) = self.remote_id() {
3328 let client = self.client.clone();
3329 cx.spawn(|_, mut cx| async move {
3330 let response = client
3331 .request(proto::ApplyCompletionAdditionalEdits {
3332 project_id,
3333 buffer_id,
3334 completion: Some(language::proto::serialize_completion(&completion)),
3335 })
3336 .await?;
3337
3338 if let Some(transaction) = response.transaction {
3339 let transaction = language::proto::deserialize_transaction(transaction)?;
3340 buffer_handle
3341 .update(&mut cx, |buffer, _| {
3342 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
3343 })
3344 .await;
3345 if push_to_history {
3346 buffer_handle.update(&mut cx, |buffer, _| {
3347 buffer.push_transaction(transaction.clone(), Instant::now());
3348 });
3349 }
3350 Ok(Some(transaction))
3351 } else {
3352 Ok(None)
3353 }
3354 })
3355 } else {
3356 Task::ready(Err(anyhow!("project does not have a remote id")))
3357 }
3358 }
3359
3360 pub fn code_actions<T: Clone + ToOffset>(
3361 &self,
3362 buffer_handle: &ModelHandle<Buffer>,
3363 range: Range<T>,
3364 cx: &mut ModelContext<Self>,
3365 ) -> Task<Result<Vec<CodeAction>>> {
3366 let buffer_handle = buffer_handle.clone();
3367 let buffer = buffer_handle.read(cx);
3368 let snapshot = buffer.snapshot();
3369 let relevant_diagnostics = snapshot
3370 .diagnostics_in_range::<usize, usize>(range.to_offset(&snapshot), false)
3371 .map(|entry| entry.to_lsp_diagnostic_stub())
3372 .collect();
3373 let buffer_id = buffer.remote_id();
3374 let worktree;
3375 let buffer_abs_path;
3376 if let Some(file) = File::from_dyn(buffer.file()) {
3377 worktree = file.worktree.clone();
3378 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
3379 } else {
3380 return Task::ready(Ok(Default::default()));
3381 };
3382 let range = buffer.anchor_before(range.start)..buffer.anchor_before(range.end);
3383
3384 if worktree.read(cx).as_local().is_some() {
3385 let buffer_abs_path = buffer_abs_path.unwrap();
3386 let (_, lang_server) = if let Some(server) = self.language_server_for_buffer(buffer, cx)
3387 {
3388 server.clone()
3389 } else {
3390 return Task::ready(Ok(Default::default()));
3391 };
3392
3393 let lsp_range = range_to_lsp(range.to_point_utf16(buffer));
3394 cx.foreground().spawn(async move {
3395 if !lang_server.capabilities().code_action_provider.is_some() {
3396 return Ok(Default::default());
3397 }
3398
3399 Ok(lang_server
3400 .request::<lsp::request::CodeActionRequest>(lsp::CodeActionParams {
3401 text_document: lsp::TextDocumentIdentifier::new(
3402 lsp::Url::from_file_path(buffer_abs_path).unwrap(),
3403 ),
3404 range: lsp_range,
3405 work_done_progress_params: Default::default(),
3406 partial_result_params: Default::default(),
3407 context: lsp::CodeActionContext {
3408 diagnostics: relevant_diagnostics,
3409 only: Some(vec![
3410 lsp::CodeActionKind::QUICKFIX,
3411 lsp::CodeActionKind::REFACTOR,
3412 lsp::CodeActionKind::REFACTOR_EXTRACT,
3413 lsp::CodeActionKind::SOURCE,
3414 ]),
3415 },
3416 })
3417 .await?
3418 .unwrap_or_default()
3419 .into_iter()
3420 .filter_map(|entry| {
3421 if let lsp::CodeActionOrCommand::CodeAction(lsp_action) = entry {
3422 Some(CodeAction {
3423 range: range.clone(),
3424 lsp_action,
3425 })
3426 } else {
3427 None
3428 }
3429 })
3430 .collect())
3431 })
3432 } else if let Some(project_id) = self.remote_id() {
3433 let rpc = self.client.clone();
3434 let version = buffer.version();
3435 cx.spawn_weak(|_, mut cx| async move {
3436 let response = rpc
3437 .request(proto::GetCodeActions {
3438 project_id,
3439 buffer_id,
3440 start: Some(language::proto::serialize_anchor(&range.start)),
3441 end: Some(language::proto::serialize_anchor(&range.end)),
3442 version: serialize_version(&version),
3443 })
3444 .await?;
3445
3446 buffer_handle
3447 .update(&mut cx, |buffer, _| {
3448 buffer.wait_for_version(deserialize_version(response.version))
3449 })
3450 .await;
3451
3452 response
3453 .actions
3454 .into_iter()
3455 .map(language::proto::deserialize_code_action)
3456 .collect()
3457 })
3458 } else {
3459 Task::ready(Ok(Default::default()))
3460 }
3461 }
3462
3463 pub fn apply_code_action(
3464 &self,
3465 buffer_handle: ModelHandle<Buffer>,
3466 mut action: CodeAction,
3467 push_to_history: bool,
3468 cx: &mut ModelContext<Self>,
3469 ) -> Task<Result<ProjectTransaction>> {
3470 if self.is_local() {
3471 let buffer = buffer_handle.read(cx);
3472 let (lsp_adapter, lang_server) =
3473 if let Some(server) = self.language_server_for_buffer(buffer, cx) {
3474 server.clone()
3475 } else {
3476 return Task::ready(Ok(Default::default()));
3477 };
3478 let range = action.range.to_point_utf16(buffer);
3479
3480 cx.spawn(|this, mut cx| async move {
3481 if let Some(lsp_range) = action
3482 .lsp_action
3483 .data
3484 .as_mut()
3485 .and_then(|d| d.get_mut("codeActionParams"))
3486 .and_then(|d| d.get_mut("range"))
3487 {
3488 *lsp_range = serde_json::to_value(&range_to_lsp(range)).unwrap();
3489 action.lsp_action = lang_server
3490 .request::<lsp::request::CodeActionResolveRequest>(action.lsp_action)
3491 .await?;
3492 } else {
3493 let actions = this
3494 .update(&mut cx, |this, cx| {
3495 this.code_actions(&buffer_handle, action.range, cx)
3496 })
3497 .await?;
3498 action.lsp_action = actions
3499 .into_iter()
3500 .find(|a| a.lsp_action.title == action.lsp_action.title)
3501 .ok_or_else(|| anyhow!("code action is outdated"))?
3502 .lsp_action;
3503 }
3504
3505 if let Some(edit) = action.lsp_action.edit {
3506 Self::deserialize_workspace_edit(
3507 this,
3508 edit,
3509 push_to_history,
3510 lsp_adapter,
3511 lang_server,
3512 &mut cx,
3513 )
3514 .await
3515 } else if let Some(command) = action.lsp_action.command {
3516 this.update(&mut cx, |this, _| {
3517 this.last_workspace_edits_by_language_server
3518 .remove(&lang_server.server_id());
3519 });
3520 lang_server
3521 .request::<lsp::request::ExecuteCommand>(lsp::ExecuteCommandParams {
3522 command: command.command,
3523 arguments: command.arguments.unwrap_or_default(),
3524 ..Default::default()
3525 })
3526 .await?;
3527 Ok(this.update(&mut cx, |this, _| {
3528 this.last_workspace_edits_by_language_server
3529 .remove(&lang_server.server_id())
3530 .unwrap_or_default()
3531 }))
3532 } else {
3533 Ok(ProjectTransaction::default())
3534 }
3535 })
3536 } else if let Some(project_id) = self.remote_id() {
3537 let client = self.client.clone();
3538 let request = proto::ApplyCodeAction {
3539 project_id,
3540 buffer_id: buffer_handle.read(cx).remote_id(),
3541 action: Some(language::proto::serialize_code_action(&action)),
3542 };
3543 cx.spawn(|this, mut cx| async move {
3544 let response = client
3545 .request(request)
3546 .await?
3547 .transaction
3548 .ok_or_else(|| anyhow!("missing transaction"))?;
3549 this.update(&mut cx, |this, cx| {
3550 this.deserialize_project_transaction(response, push_to_history, cx)
3551 })
3552 .await
3553 })
3554 } else {
3555 Task::ready(Err(anyhow!("project does not have a remote id")))
3556 }
3557 }
3558
3559 async fn deserialize_workspace_edit(
3560 this: ModelHandle<Self>,
3561 edit: lsp::WorkspaceEdit,
3562 push_to_history: bool,
3563 lsp_adapter: Arc<dyn LspAdapter>,
3564 language_server: Arc<LanguageServer>,
3565 cx: &mut AsyncAppContext,
3566 ) -> Result<ProjectTransaction> {
3567 let fs = this.read_with(cx, |this, _| this.fs.clone());
3568 let mut operations = Vec::new();
3569 if let Some(document_changes) = edit.document_changes {
3570 match document_changes {
3571 lsp::DocumentChanges::Edits(edits) => {
3572 operations.extend(edits.into_iter().map(lsp::DocumentChangeOperation::Edit))
3573 }
3574 lsp::DocumentChanges::Operations(ops) => operations = ops,
3575 }
3576 } else if let Some(changes) = edit.changes {
3577 operations.extend(changes.into_iter().map(|(uri, edits)| {
3578 lsp::DocumentChangeOperation::Edit(lsp::TextDocumentEdit {
3579 text_document: lsp::OptionalVersionedTextDocumentIdentifier {
3580 uri,
3581 version: None,
3582 },
3583 edits: edits.into_iter().map(lsp::OneOf::Left).collect(),
3584 })
3585 }));
3586 }
3587
3588 let mut project_transaction = ProjectTransaction::default();
3589 for operation in operations {
3590 match operation {
3591 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Create(op)) => {
3592 let abs_path = op
3593 .uri
3594 .to_file_path()
3595 .map_err(|_| anyhow!("can't convert URI to path"))?;
3596
3597 if let Some(parent_path) = abs_path.parent() {
3598 fs.create_dir(parent_path).await?;
3599 }
3600 if abs_path.ends_with("/") {
3601 fs.create_dir(&abs_path).await?;
3602 } else {
3603 fs.create_file(&abs_path, op.options.map(Into::into).unwrap_or_default())
3604 .await?;
3605 }
3606 }
3607 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Rename(op)) => {
3608 let source_abs_path = op
3609 .old_uri
3610 .to_file_path()
3611 .map_err(|_| anyhow!("can't convert URI to path"))?;
3612 let target_abs_path = op
3613 .new_uri
3614 .to_file_path()
3615 .map_err(|_| anyhow!("can't convert URI to path"))?;
3616 fs.rename(
3617 &source_abs_path,
3618 &target_abs_path,
3619 op.options.map(Into::into).unwrap_or_default(),
3620 )
3621 .await?;
3622 }
3623 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Delete(op)) => {
3624 let abs_path = op
3625 .uri
3626 .to_file_path()
3627 .map_err(|_| anyhow!("can't convert URI to path"))?;
3628 let options = op.options.map(Into::into).unwrap_or_default();
3629 if abs_path.ends_with("/") {
3630 fs.remove_dir(&abs_path, options).await?;
3631 } else {
3632 fs.remove_file(&abs_path, options).await?;
3633 }
3634 }
3635 lsp::DocumentChangeOperation::Edit(op) => {
3636 let buffer_to_edit = this
3637 .update(cx, |this, cx| {
3638 this.open_local_buffer_via_lsp(
3639 op.text_document.uri,
3640 lsp_adapter.clone(),
3641 language_server.clone(),
3642 cx,
3643 )
3644 })
3645 .await?;
3646
3647 let edits = this
3648 .update(cx, |this, cx| {
3649 let edits = op.edits.into_iter().map(|edit| match edit {
3650 lsp::OneOf::Left(edit) => edit,
3651 lsp::OneOf::Right(edit) => edit.text_edit,
3652 });
3653 this.edits_from_lsp(
3654 &buffer_to_edit,
3655 edits,
3656 op.text_document.version,
3657 cx,
3658 )
3659 })
3660 .await?;
3661
3662 let transaction = buffer_to_edit.update(cx, |buffer, cx| {
3663 buffer.finalize_last_transaction();
3664 buffer.start_transaction();
3665 for (range, text) in edits {
3666 buffer.edit([(range, text)], cx);
3667 }
3668 let transaction = if buffer.end_transaction(cx).is_some() {
3669 let transaction = buffer.finalize_last_transaction().unwrap().clone();
3670 if !push_to_history {
3671 buffer.forget_transaction(transaction.id);
3672 }
3673 Some(transaction)
3674 } else {
3675 None
3676 };
3677
3678 transaction
3679 });
3680 if let Some(transaction) = transaction {
3681 project_transaction.0.insert(buffer_to_edit, transaction);
3682 }
3683 }
3684 }
3685 }
3686
3687 Ok(project_transaction)
3688 }
3689
3690 pub fn prepare_rename<T: ToPointUtf16>(
3691 &self,
3692 buffer: ModelHandle<Buffer>,
3693 position: T,
3694 cx: &mut ModelContext<Self>,
3695 ) -> Task<Result<Option<Range<Anchor>>>> {
3696 let position = position.to_point_utf16(buffer.read(cx));
3697 self.request_lsp(buffer, PrepareRename { position }, cx)
3698 }
3699
3700 pub fn perform_rename<T: ToPointUtf16>(
3701 &self,
3702 buffer: ModelHandle<Buffer>,
3703 position: T,
3704 new_name: String,
3705 push_to_history: bool,
3706 cx: &mut ModelContext<Self>,
3707 ) -> Task<Result<ProjectTransaction>> {
3708 let position = position.to_point_utf16(buffer.read(cx));
3709 self.request_lsp(
3710 buffer,
3711 PerformRename {
3712 position,
3713 new_name,
3714 push_to_history,
3715 },
3716 cx,
3717 )
3718 }
3719
3720 pub fn search(
3721 &self,
3722 query: SearchQuery,
3723 cx: &mut ModelContext<Self>,
3724 ) -> Task<Result<HashMap<ModelHandle<Buffer>, Vec<Range<Anchor>>>>> {
3725 if self.is_local() {
3726 let snapshots = self
3727 .visible_worktrees(cx)
3728 .filter_map(|tree| {
3729 let tree = tree.read(cx).as_local()?;
3730 Some(tree.snapshot())
3731 })
3732 .collect::<Vec<_>>();
3733
3734 let background = cx.background().clone();
3735 let path_count: usize = snapshots.iter().map(|s| s.visible_file_count()).sum();
3736 if path_count == 0 {
3737 return Task::ready(Ok(Default::default()));
3738 }
3739 let workers = background.num_cpus().min(path_count);
3740 let (matching_paths_tx, mut matching_paths_rx) = smol::channel::bounded(1024);
3741 cx.background()
3742 .spawn({
3743 let fs = self.fs.clone();
3744 let background = cx.background().clone();
3745 let query = query.clone();
3746 async move {
3747 let fs = &fs;
3748 let query = &query;
3749 let matching_paths_tx = &matching_paths_tx;
3750 let paths_per_worker = (path_count + workers - 1) / workers;
3751 let snapshots = &snapshots;
3752 background
3753 .scoped(|scope| {
3754 for worker_ix in 0..workers {
3755 let worker_start_ix = worker_ix * paths_per_worker;
3756 let worker_end_ix = worker_start_ix + paths_per_worker;
3757 scope.spawn(async move {
3758 let mut snapshot_start_ix = 0;
3759 let mut abs_path = PathBuf::new();
3760 for snapshot in snapshots {
3761 let snapshot_end_ix =
3762 snapshot_start_ix + snapshot.visible_file_count();
3763 if worker_end_ix <= snapshot_start_ix {
3764 break;
3765 } else if worker_start_ix > snapshot_end_ix {
3766 snapshot_start_ix = snapshot_end_ix;
3767 continue;
3768 } else {
3769 let start_in_snapshot = worker_start_ix
3770 .saturating_sub(snapshot_start_ix);
3771 let end_in_snapshot =
3772 cmp::min(worker_end_ix, snapshot_end_ix)
3773 - snapshot_start_ix;
3774
3775 for entry in snapshot
3776 .files(false, start_in_snapshot)
3777 .take(end_in_snapshot - start_in_snapshot)
3778 {
3779 if matching_paths_tx.is_closed() {
3780 break;
3781 }
3782
3783 abs_path.clear();
3784 abs_path.push(&snapshot.abs_path());
3785 abs_path.push(&entry.path);
3786 let matches = if let Some(file) =
3787 fs.open_sync(&abs_path).await.log_err()
3788 {
3789 query.detect(file).unwrap_or(false)
3790 } else {
3791 false
3792 };
3793
3794 if matches {
3795 let project_path =
3796 (snapshot.id(), entry.path.clone());
3797 if matching_paths_tx
3798 .send(project_path)
3799 .await
3800 .is_err()
3801 {
3802 break;
3803 }
3804 }
3805 }
3806
3807 snapshot_start_ix = snapshot_end_ix;
3808 }
3809 }
3810 });
3811 }
3812 })
3813 .await;
3814 }
3815 })
3816 .detach();
3817
3818 let (buffers_tx, buffers_rx) = smol::channel::bounded(1024);
3819 let open_buffers = self
3820 .opened_buffers
3821 .values()
3822 .filter_map(|b| b.upgrade(cx))
3823 .collect::<HashSet<_>>();
3824 cx.spawn(|this, cx| async move {
3825 for buffer in &open_buffers {
3826 let snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
3827 buffers_tx.send((buffer.clone(), snapshot)).await?;
3828 }
3829
3830 let open_buffers = Rc::new(RefCell::new(open_buffers));
3831 while let Some(project_path) = matching_paths_rx.next().await {
3832 if buffers_tx.is_closed() {
3833 break;
3834 }
3835
3836 let this = this.clone();
3837 let open_buffers = open_buffers.clone();
3838 let buffers_tx = buffers_tx.clone();
3839 cx.spawn(|mut cx| async move {
3840 if let Some(buffer) = this
3841 .update(&mut cx, |this, cx| this.open_buffer(project_path, cx))
3842 .await
3843 .log_err()
3844 {
3845 if open_buffers.borrow_mut().insert(buffer.clone()) {
3846 let snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
3847 buffers_tx.send((buffer, snapshot)).await?;
3848 }
3849 }
3850
3851 Ok::<_, anyhow::Error>(())
3852 })
3853 .detach();
3854 }
3855
3856 Ok::<_, anyhow::Error>(())
3857 })
3858 .detach_and_log_err(cx);
3859
3860 let background = cx.background().clone();
3861 cx.background().spawn(async move {
3862 let query = &query;
3863 let mut matched_buffers = Vec::new();
3864 for _ in 0..workers {
3865 matched_buffers.push(HashMap::default());
3866 }
3867 background
3868 .scoped(|scope| {
3869 for worker_matched_buffers in matched_buffers.iter_mut() {
3870 let mut buffers_rx = buffers_rx.clone();
3871 scope.spawn(async move {
3872 while let Some((buffer, snapshot)) = buffers_rx.next().await {
3873 let buffer_matches = query
3874 .search(snapshot.as_rope())
3875 .await
3876 .iter()
3877 .map(|range| {
3878 snapshot.anchor_before(range.start)
3879 ..snapshot.anchor_after(range.end)
3880 })
3881 .collect::<Vec<_>>();
3882 if !buffer_matches.is_empty() {
3883 worker_matched_buffers
3884 .insert(buffer.clone(), buffer_matches);
3885 }
3886 }
3887 });
3888 }
3889 })
3890 .await;
3891 Ok(matched_buffers.into_iter().flatten().collect())
3892 })
3893 } else if let Some(project_id) = self.remote_id() {
3894 let request = self.client.request(query.to_proto(project_id));
3895 cx.spawn(|this, mut cx| async move {
3896 let response = request.await?;
3897 let mut result = HashMap::default();
3898 for location in response.locations {
3899 let buffer = location.buffer.ok_or_else(|| anyhow!("missing buffer"))?;
3900 let target_buffer = this
3901 .update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
3902 .await?;
3903 let start = location
3904 .start
3905 .and_then(deserialize_anchor)
3906 .ok_or_else(|| anyhow!("missing target start"))?;
3907 let end = location
3908 .end
3909 .and_then(deserialize_anchor)
3910 .ok_or_else(|| anyhow!("missing target end"))?;
3911 result
3912 .entry(target_buffer)
3913 .or_insert(Vec::new())
3914 .push(start..end)
3915 }
3916 Ok(result)
3917 })
3918 } else {
3919 Task::ready(Ok(Default::default()))
3920 }
3921 }
3922
3923 fn request_lsp<R: LspCommand>(
3924 &self,
3925 buffer_handle: ModelHandle<Buffer>,
3926 request: R,
3927 cx: &mut ModelContext<Self>,
3928 ) -> Task<Result<R::Response>>
3929 where
3930 <R::LspRequest as lsp::request::Request>::Result: Send,
3931 {
3932 let buffer = buffer_handle.read(cx);
3933 if self.is_local() {
3934 let file = File::from_dyn(buffer.file()).and_then(File::as_local);
3935 if let Some((file, (_, language_server))) =
3936 file.zip(self.language_server_for_buffer(buffer, cx).cloned())
3937 {
3938 let lsp_params = request.to_lsp(&file.abs_path(cx), cx);
3939 return cx.spawn(|this, cx| async move {
3940 if !request.check_capabilities(&language_server.capabilities()) {
3941 return Ok(Default::default());
3942 }
3943
3944 let response = language_server
3945 .request::<R::LspRequest>(lsp_params)
3946 .await
3947 .context("lsp request failed")?;
3948 request
3949 .response_from_lsp(response, this, buffer_handle, cx)
3950 .await
3951 });
3952 }
3953 } else if let Some(project_id) = self.remote_id() {
3954 let rpc = self.client.clone();
3955 let message = request.to_proto(project_id, buffer);
3956 return cx.spawn(|this, cx| async move {
3957 let response = rpc.request(message).await?;
3958 request
3959 .response_from_proto(response, this, buffer_handle, cx)
3960 .await
3961 });
3962 }
3963 Task::ready(Ok(Default::default()))
3964 }
3965
3966 pub fn find_or_create_local_worktree(
3967 &mut self,
3968 abs_path: impl AsRef<Path>,
3969 visible: bool,
3970 cx: &mut ModelContext<Self>,
3971 ) -> Task<Result<(ModelHandle<Worktree>, PathBuf)>> {
3972 let abs_path = abs_path.as_ref();
3973 if let Some((tree, relative_path)) = self.find_local_worktree(abs_path, cx) {
3974 Task::ready(Ok((tree.clone(), relative_path.into())))
3975 } else {
3976 let worktree = self.create_local_worktree(abs_path, visible, cx);
3977 cx.foreground()
3978 .spawn(async move { Ok((worktree.await?, PathBuf::new())) })
3979 }
3980 }
3981
3982 pub fn find_local_worktree(
3983 &self,
3984 abs_path: &Path,
3985 cx: &AppContext,
3986 ) -> Option<(ModelHandle<Worktree>, PathBuf)> {
3987 for tree in self.worktrees(cx) {
3988 if let Some(relative_path) = tree
3989 .read(cx)
3990 .as_local()
3991 .and_then(|t| abs_path.strip_prefix(t.abs_path()).ok())
3992 {
3993 return Some((tree.clone(), relative_path.into()));
3994 }
3995 }
3996 None
3997 }
3998
3999 pub fn is_shared(&self) -> bool {
4000 match &self.client_state {
4001 ProjectClientState::Local { is_shared, .. } => *is_shared,
4002 ProjectClientState::Remote { .. } => false,
4003 }
4004 }
4005
4006 fn create_local_worktree(
4007 &mut self,
4008 abs_path: impl AsRef<Path>,
4009 visible: bool,
4010 cx: &mut ModelContext<Self>,
4011 ) -> Task<Result<ModelHandle<Worktree>>> {
4012 let fs = self.fs.clone();
4013 let client = self.client.clone();
4014 let next_entry_id = self.next_entry_id.clone();
4015 let path: Arc<Path> = abs_path.as_ref().into();
4016 let task = self
4017 .loading_local_worktrees
4018 .entry(path.clone())
4019 .or_insert_with(|| {
4020 cx.spawn(|project, mut cx| {
4021 async move {
4022 let worktree = Worktree::local(
4023 client.clone(),
4024 path.clone(),
4025 visible,
4026 fs,
4027 next_entry_id,
4028 &mut cx,
4029 )
4030 .await;
4031 project.update(&mut cx, |project, _| {
4032 project.loading_local_worktrees.remove(&path);
4033 });
4034 let worktree = worktree?;
4035
4036 let project_id = project.update(&mut cx, |project, cx| {
4037 project.add_worktree(&worktree, cx);
4038 project.shared_remote_id()
4039 });
4040
4041 if let Some(project_id) = project_id {
4042 worktree
4043 .update(&mut cx, |worktree, cx| {
4044 worktree.as_local_mut().unwrap().share(project_id, cx)
4045 })
4046 .await
4047 .log_err();
4048 }
4049
4050 Ok(worktree)
4051 }
4052 .map_err(|err| Arc::new(err))
4053 })
4054 .shared()
4055 })
4056 .clone();
4057 cx.foreground().spawn(async move {
4058 match task.await {
4059 Ok(worktree) => Ok(worktree),
4060 Err(err) => Err(anyhow!("{}", err)),
4061 }
4062 })
4063 }
4064
4065 pub fn remove_worktree(&mut self, id_to_remove: WorktreeId, cx: &mut ModelContext<Self>) {
4066 self.worktrees.retain(|worktree| {
4067 if let Some(worktree) = worktree.upgrade(cx) {
4068 let id = worktree.read(cx).id();
4069 if id == id_to_remove {
4070 cx.emit(Event::WorktreeRemoved(id));
4071 false
4072 } else {
4073 true
4074 }
4075 } else {
4076 false
4077 }
4078 });
4079 self.metadata_changed(true, cx);
4080 cx.notify();
4081 }
4082
4083 fn add_worktree(&mut self, worktree: &ModelHandle<Worktree>, cx: &mut ModelContext<Self>) {
4084 cx.observe(&worktree, |_, _, cx| cx.notify()).detach();
4085 if worktree.read(cx).is_local() {
4086 cx.subscribe(&worktree, |this, worktree, _, cx| {
4087 this.update_local_worktree_buffers(worktree, cx);
4088 })
4089 .detach();
4090 }
4091
4092 let push_strong_handle = {
4093 let worktree = worktree.read(cx);
4094 self.is_shared() || worktree.is_visible() || worktree.is_remote()
4095 };
4096 if push_strong_handle {
4097 self.worktrees
4098 .push(WorktreeHandle::Strong(worktree.clone()));
4099 } else {
4100 self.worktrees
4101 .push(WorktreeHandle::Weak(worktree.downgrade()));
4102 }
4103
4104 self.metadata_changed(true, cx);
4105 cx.observe_release(&worktree, |this, worktree, cx| {
4106 this.remove_worktree(worktree.id(), cx);
4107 cx.notify();
4108 })
4109 .detach();
4110
4111 cx.emit(Event::WorktreeAdded);
4112 cx.notify();
4113 }
4114
4115 fn update_local_worktree_buffers(
4116 &mut self,
4117 worktree_handle: ModelHandle<Worktree>,
4118 cx: &mut ModelContext<Self>,
4119 ) {
4120 let snapshot = worktree_handle.read(cx).snapshot();
4121 let mut buffers_to_delete = Vec::new();
4122 let mut renamed_buffers = Vec::new();
4123 for (buffer_id, buffer) in &self.opened_buffers {
4124 if let Some(buffer) = buffer.upgrade(cx) {
4125 buffer.update(cx, |buffer, cx| {
4126 if let Some(old_file) = File::from_dyn(buffer.file()) {
4127 if old_file.worktree != worktree_handle {
4128 return;
4129 }
4130
4131 let new_file = if let Some(entry) = old_file
4132 .entry_id
4133 .and_then(|entry_id| snapshot.entry_for_id(entry_id))
4134 {
4135 File {
4136 is_local: true,
4137 entry_id: Some(entry.id),
4138 mtime: entry.mtime,
4139 path: entry.path.clone(),
4140 worktree: worktree_handle.clone(),
4141 }
4142 } else if let Some(entry) =
4143 snapshot.entry_for_path(old_file.path().as_ref())
4144 {
4145 File {
4146 is_local: true,
4147 entry_id: Some(entry.id),
4148 mtime: entry.mtime,
4149 path: entry.path.clone(),
4150 worktree: worktree_handle.clone(),
4151 }
4152 } else {
4153 File {
4154 is_local: true,
4155 entry_id: None,
4156 path: old_file.path().clone(),
4157 mtime: old_file.mtime(),
4158 worktree: worktree_handle.clone(),
4159 }
4160 };
4161
4162 let old_path = old_file.abs_path(cx);
4163 if new_file.abs_path(cx) != old_path {
4164 renamed_buffers.push((cx.handle(), old_path));
4165 }
4166
4167 if let Some(project_id) = self.shared_remote_id() {
4168 self.client
4169 .send(proto::UpdateBufferFile {
4170 project_id,
4171 buffer_id: *buffer_id as u64,
4172 file: Some(new_file.to_proto()),
4173 })
4174 .log_err();
4175 }
4176 buffer.file_updated(Arc::new(new_file), cx).detach();
4177 }
4178 });
4179 } else {
4180 buffers_to_delete.push(*buffer_id);
4181 }
4182 }
4183
4184 for buffer_id in buffers_to_delete {
4185 self.opened_buffers.remove(&buffer_id);
4186 }
4187
4188 for (buffer, old_path) in renamed_buffers {
4189 self.unregister_buffer_from_language_server(&buffer, old_path, cx);
4190 self.assign_language_to_buffer(&buffer, cx);
4191 self.register_buffer_with_language_server(&buffer, cx);
4192 }
4193 }
4194
4195 pub fn set_active_path(&mut self, entry: Option<ProjectPath>, cx: &mut ModelContext<Self>) {
4196 let new_active_entry = entry.and_then(|project_path| {
4197 let worktree = self.worktree_for_id(project_path.worktree_id, cx)?;
4198 let entry = worktree.read(cx).entry_for_path(project_path.path)?;
4199 Some(entry.id)
4200 });
4201 if new_active_entry != self.active_entry {
4202 self.active_entry = new_active_entry;
4203 cx.emit(Event::ActiveEntryChanged(new_active_entry));
4204 }
4205 }
4206
4207 pub fn language_servers_running_disk_based_diagnostics<'a>(
4208 &'a self,
4209 ) -> impl 'a + Iterator<Item = usize> {
4210 self.language_server_statuses
4211 .iter()
4212 .filter_map(|(id, status)| {
4213 if status.pending_diagnostic_updates > 0 {
4214 Some(*id)
4215 } else {
4216 None
4217 }
4218 })
4219 }
4220
4221 pub fn diagnostic_summary(&self, cx: &AppContext) -> DiagnosticSummary {
4222 let mut summary = DiagnosticSummary::default();
4223 for (_, path_summary) in self.diagnostic_summaries(cx) {
4224 summary.error_count += path_summary.error_count;
4225 summary.warning_count += path_summary.warning_count;
4226 }
4227 summary
4228 }
4229
4230 pub fn diagnostic_summaries<'a>(
4231 &'a self,
4232 cx: &'a AppContext,
4233 ) -> impl Iterator<Item = (ProjectPath, DiagnosticSummary)> + 'a {
4234 self.worktrees(cx).flat_map(move |worktree| {
4235 let worktree = worktree.read(cx);
4236 let worktree_id = worktree.id();
4237 worktree
4238 .diagnostic_summaries()
4239 .map(move |(path, summary)| (ProjectPath { worktree_id, path }, summary))
4240 })
4241 }
4242
4243 pub fn disk_based_diagnostics_started(
4244 &mut self,
4245 language_server_id: usize,
4246 cx: &mut ModelContext<Self>,
4247 ) {
4248 cx.emit(Event::DiskBasedDiagnosticsStarted { language_server_id });
4249 }
4250
4251 pub fn disk_based_diagnostics_finished(
4252 &mut self,
4253 language_server_id: usize,
4254 cx: &mut ModelContext<Self>,
4255 ) {
4256 cx.emit(Event::DiskBasedDiagnosticsFinished { language_server_id });
4257 }
4258
4259 pub fn active_entry(&self) -> Option<ProjectEntryId> {
4260 self.active_entry
4261 }
4262
4263 pub fn entry_for_path(&self, path: &ProjectPath, cx: &AppContext) -> Option<ProjectEntryId> {
4264 self.worktree_for_id(path.worktree_id, cx)?
4265 .read(cx)
4266 .entry_for_path(&path.path)
4267 .map(|entry| entry.id)
4268 }
4269
4270 pub fn path_for_entry(&self, entry_id: ProjectEntryId, cx: &AppContext) -> Option<ProjectPath> {
4271 let worktree = self.worktree_for_entry(entry_id, cx)?;
4272 let worktree = worktree.read(cx);
4273 let worktree_id = worktree.id();
4274 let path = worktree.entry_for_id(entry_id)?.path.clone();
4275 Some(ProjectPath { worktree_id, path })
4276 }
4277
4278 // RPC message handlers
4279
4280 async fn handle_request_join_project(
4281 this: ModelHandle<Self>,
4282 message: TypedEnvelope<proto::RequestJoinProject>,
4283 _: Arc<Client>,
4284 mut cx: AsyncAppContext,
4285 ) -> Result<()> {
4286 let user_id = message.payload.requester_id;
4287 if this.read_with(&cx, |project, _| {
4288 project.collaborators.values().any(|c| c.user.id == user_id)
4289 }) {
4290 this.update(&mut cx, |this, cx| {
4291 this.respond_to_join_request(user_id, true, cx)
4292 });
4293 } else {
4294 let user_store = this.read_with(&cx, |this, _| this.user_store.clone());
4295 let user = user_store
4296 .update(&mut cx, |store, cx| store.fetch_user(user_id, cx))
4297 .await?;
4298 this.update(&mut cx, |_, cx| cx.emit(Event::ContactRequestedJoin(user)));
4299 }
4300 Ok(())
4301 }
4302
4303 async fn handle_unregister_project(
4304 this: ModelHandle<Self>,
4305 _: TypedEnvelope<proto::UnregisterProject>,
4306 _: Arc<Client>,
4307 mut cx: AsyncAppContext,
4308 ) -> Result<()> {
4309 this.update(&mut cx, |this, cx| this.removed_from_project(cx));
4310 Ok(())
4311 }
4312
4313 async fn handle_project_unshared(
4314 this: ModelHandle<Self>,
4315 _: TypedEnvelope<proto::ProjectUnshared>,
4316 _: Arc<Client>,
4317 mut cx: AsyncAppContext,
4318 ) -> Result<()> {
4319 this.update(&mut cx, |this, cx| this.unshared(cx));
4320 Ok(())
4321 }
4322
4323 async fn handle_add_collaborator(
4324 this: ModelHandle<Self>,
4325 mut envelope: TypedEnvelope<proto::AddProjectCollaborator>,
4326 _: Arc<Client>,
4327 mut cx: AsyncAppContext,
4328 ) -> Result<()> {
4329 let user_store = this.read_with(&cx, |this, _| this.user_store.clone());
4330 let collaborator = envelope
4331 .payload
4332 .collaborator
4333 .take()
4334 .ok_or_else(|| anyhow!("empty collaborator"))?;
4335
4336 let collaborator = Collaborator::from_proto(collaborator, &user_store, &mut cx).await?;
4337 this.update(&mut cx, |this, cx| {
4338 this.collaborators
4339 .insert(collaborator.peer_id, collaborator);
4340 cx.notify();
4341 });
4342
4343 Ok(())
4344 }
4345
4346 async fn handle_remove_collaborator(
4347 this: ModelHandle<Self>,
4348 envelope: TypedEnvelope<proto::RemoveProjectCollaborator>,
4349 _: Arc<Client>,
4350 mut cx: AsyncAppContext,
4351 ) -> Result<()> {
4352 this.update(&mut cx, |this, cx| {
4353 let peer_id = PeerId(envelope.payload.peer_id);
4354 let replica_id = this
4355 .collaborators
4356 .remove(&peer_id)
4357 .ok_or_else(|| anyhow!("unknown peer {:?}", peer_id))?
4358 .replica_id;
4359 for (_, buffer) in &this.opened_buffers {
4360 if let Some(buffer) = buffer.upgrade(cx) {
4361 buffer.update(cx, |buffer, cx| buffer.remove_peer(replica_id, cx));
4362 }
4363 }
4364
4365 cx.emit(Event::CollaboratorLeft(peer_id));
4366 cx.notify();
4367 Ok(())
4368 })
4369 }
4370
4371 async fn handle_join_project_request_cancelled(
4372 this: ModelHandle<Self>,
4373 envelope: TypedEnvelope<proto::JoinProjectRequestCancelled>,
4374 _: Arc<Client>,
4375 mut cx: AsyncAppContext,
4376 ) -> Result<()> {
4377 let user = this
4378 .update(&mut cx, |this, cx| {
4379 this.user_store.update(cx, |user_store, cx| {
4380 user_store.fetch_user(envelope.payload.requester_id, cx)
4381 })
4382 })
4383 .await?;
4384
4385 this.update(&mut cx, |_, cx| {
4386 cx.emit(Event::ContactCancelledJoinRequest(user));
4387 });
4388
4389 Ok(())
4390 }
4391
4392 async fn handle_update_project(
4393 this: ModelHandle<Self>,
4394 envelope: TypedEnvelope<proto::UpdateProject>,
4395 client: Arc<Client>,
4396 mut cx: AsyncAppContext,
4397 ) -> Result<()> {
4398 this.update(&mut cx, |this, cx| {
4399 let replica_id = this.replica_id();
4400 let remote_id = this.remote_id().ok_or_else(|| anyhow!("invalid project"))?;
4401
4402 let mut old_worktrees_by_id = this
4403 .worktrees
4404 .drain(..)
4405 .filter_map(|worktree| {
4406 let worktree = worktree.upgrade(cx)?;
4407 Some((worktree.read(cx).id(), worktree))
4408 })
4409 .collect::<HashMap<_, _>>();
4410
4411 for worktree in envelope.payload.worktrees {
4412 if let Some(old_worktree) =
4413 old_worktrees_by_id.remove(&WorktreeId::from_proto(worktree.id))
4414 {
4415 this.worktrees.push(WorktreeHandle::Strong(old_worktree));
4416 } else {
4417 let worktree = proto::Worktree {
4418 id: worktree.id,
4419 root_name: worktree.root_name,
4420 entries: Default::default(),
4421 diagnostic_summaries: Default::default(),
4422 visible: worktree.visible,
4423 scan_id: 0,
4424 };
4425 let (worktree, load_task) =
4426 Worktree::remote(remote_id, replica_id, worktree, client.clone(), cx);
4427 this.add_worktree(&worktree, cx);
4428 load_task.detach();
4429 }
4430 }
4431
4432 this.metadata_changed(true, cx);
4433 for (id, _) in old_worktrees_by_id {
4434 cx.emit(Event::WorktreeRemoved(id));
4435 }
4436
4437 Ok(())
4438 })
4439 }
4440
4441 async fn handle_update_worktree(
4442 this: ModelHandle<Self>,
4443 envelope: TypedEnvelope<proto::UpdateWorktree>,
4444 _: Arc<Client>,
4445 mut cx: AsyncAppContext,
4446 ) -> Result<()> {
4447 this.update(&mut cx, |this, cx| {
4448 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
4449 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
4450 worktree.update(cx, |worktree, _| {
4451 let worktree = worktree.as_remote_mut().unwrap();
4452 worktree.update_from_remote(envelope)
4453 })?;
4454 }
4455 Ok(())
4456 })
4457 }
4458
4459 async fn handle_create_project_entry(
4460 this: ModelHandle<Self>,
4461 envelope: TypedEnvelope<proto::CreateProjectEntry>,
4462 _: Arc<Client>,
4463 mut cx: AsyncAppContext,
4464 ) -> Result<proto::ProjectEntryResponse> {
4465 let worktree = this.update(&mut cx, |this, cx| {
4466 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
4467 this.worktree_for_id(worktree_id, cx)
4468 .ok_or_else(|| anyhow!("worktree not found"))
4469 })?;
4470 let worktree_scan_id = worktree.read_with(&cx, |worktree, _| worktree.scan_id());
4471 let entry = worktree
4472 .update(&mut cx, |worktree, cx| {
4473 let worktree = worktree.as_local_mut().unwrap();
4474 let path = PathBuf::from(OsString::from_vec(envelope.payload.path));
4475 worktree.create_entry(path, envelope.payload.is_directory, cx)
4476 })
4477 .await?;
4478 Ok(proto::ProjectEntryResponse {
4479 entry: Some((&entry).into()),
4480 worktree_scan_id: worktree_scan_id as u64,
4481 })
4482 }
4483
4484 async fn handle_rename_project_entry(
4485 this: ModelHandle<Self>,
4486 envelope: TypedEnvelope<proto::RenameProjectEntry>,
4487 _: Arc<Client>,
4488 mut cx: AsyncAppContext,
4489 ) -> Result<proto::ProjectEntryResponse> {
4490 let entry_id = ProjectEntryId::from_proto(envelope.payload.entry_id);
4491 let worktree = this.read_with(&cx, |this, cx| {
4492 this.worktree_for_entry(entry_id, cx)
4493 .ok_or_else(|| anyhow!("worktree not found"))
4494 })?;
4495 let worktree_scan_id = worktree.read_with(&cx, |worktree, _| worktree.scan_id());
4496 let entry = worktree
4497 .update(&mut cx, |worktree, cx| {
4498 let new_path = PathBuf::from(OsString::from_vec(envelope.payload.new_path));
4499 worktree
4500 .as_local_mut()
4501 .unwrap()
4502 .rename_entry(entry_id, new_path, cx)
4503 .ok_or_else(|| anyhow!("invalid entry"))
4504 })?
4505 .await?;
4506 Ok(proto::ProjectEntryResponse {
4507 entry: Some((&entry).into()),
4508 worktree_scan_id: worktree_scan_id as u64,
4509 })
4510 }
4511
4512 async fn handle_copy_project_entry(
4513 this: ModelHandle<Self>,
4514 envelope: TypedEnvelope<proto::CopyProjectEntry>,
4515 _: Arc<Client>,
4516 mut cx: AsyncAppContext,
4517 ) -> Result<proto::ProjectEntryResponse> {
4518 let entry_id = ProjectEntryId::from_proto(envelope.payload.entry_id);
4519 let worktree = this.read_with(&cx, |this, cx| {
4520 this.worktree_for_entry(entry_id, cx)
4521 .ok_or_else(|| anyhow!("worktree not found"))
4522 })?;
4523 let worktree_scan_id = worktree.read_with(&cx, |worktree, _| worktree.scan_id());
4524 let entry = worktree
4525 .update(&mut cx, |worktree, cx| {
4526 let new_path = PathBuf::from(OsString::from_vec(envelope.payload.new_path));
4527 worktree
4528 .as_local_mut()
4529 .unwrap()
4530 .copy_entry(entry_id, new_path, cx)
4531 .ok_or_else(|| anyhow!("invalid entry"))
4532 })?
4533 .await?;
4534 Ok(proto::ProjectEntryResponse {
4535 entry: Some((&entry).into()),
4536 worktree_scan_id: worktree_scan_id as u64,
4537 })
4538 }
4539
4540 async fn handle_delete_project_entry(
4541 this: ModelHandle<Self>,
4542 envelope: TypedEnvelope<proto::DeleteProjectEntry>,
4543 _: Arc<Client>,
4544 mut cx: AsyncAppContext,
4545 ) -> Result<proto::ProjectEntryResponse> {
4546 let entry_id = ProjectEntryId::from_proto(envelope.payload.entry_id);
4547 let worktree = this.read_with(&cx, |this, cx| {
4548 this.worktree_for_entry(entry_id, cx)
4549 .ok_or_else(|| anyhow!("worktree not found"))
4550 })?;
4551 let worktree_scan_id = worktree.read_with(&cx, |worktree, _| worktree.scan_id());
4552 worktree
4553 .update(&mut cx, |worktree, cx| {
4554 worktree
4555 .as_local_mut()
4556 .unwrap()
4557 .delete_entry(entry_id, cx)
4558 .ok_or_else(|| anyhow!("invalid entry"))
4559 })?
4560 .await?;
4561 Ok(proto::ProjectEntryResponse {
4562 entry: None,
4563 worktree_scan_id: worktree_scan_id as u64,
4564 })
4565 }
4566
4567 async fn handle_update_diagnostic_summary(
4568 this: ModelHandle<Self>,
4569 envelope: TypedEnvelope<proto::UpdateDiagnosticSummary>,
4570 _: Arc<Client>,
4571 mut cx: AsyncAppContext,
4572 ) -> Result<()> {
4573 this.update(&mut cx, |this, cx| {
4574 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
4575 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
4576 if let Some(summary) = envelope.payload.summary {
4577 let project_path = ProjectPath {
4578 worktree_id,
4579 path: Path::new(&summary.path).into(),
4580 };
4581 worktree.update(cx, |worktree, _| {
4582 worktree
4583 .as_remote_mut()
4584 .unwrap()
4585 .update_diagnostic_summary(project_path.path.clone(), &summary);
4586 });
4587 cx.emit(Event::DiagnosticsUpdated {
4588 language_server_id: summary.language_server_id as usize,
4589 path: project_path,
4590 });
4591 }
4592 }
4593 Ok(())
4594 })
4595 }
4596
4597 async fn handle_start_language_server(
4598 this: ModelHandle<Self>,
4599 envelope: TypedEnvelope<proto::StartLanguageServer>,
4600 _: Arc<Client>,
4601 mut cx: AsyncAppContext,
4602 ) -> Result<()> {
4603 let server = envelope
4604 .payload
4605 .server
4606 .ok_or_else(|| anyhow!("invalid server"))?;
4607 this.update(&mut cx, |this, cx| {
4608 this.language_server_statuses.insert(
4609 server.id as usize,
4610 LanguageServerStatus {
4611 name: server.name,
4612 pending_work: Default::default(),
4613 pending_diagnostic_updates: 0,
4614 },
4615 );
4616 cx.notify();
4617 });
4618 Ok(())
4619 }
4620
4621 async fn handle_update_language_server(
4622 this: ModelHandle<Self>,
4623 envelope: TypedEnvelope<proto::UpdateLanguageServer>,
4624 _: Arc<Client>,
4625 mut cx: AsyncAppContext,
4626 ) -> Result<()> {
4627 let language_server_id = envelope.payload.language_server_id as usize;
4628 match envelope
4629 .payload
4630 .variant
4631 .ok_or_else(|| anyhow!("invalid variant"))?
4632 {
4633 proto::update_language_server::Variant::WorkStart(payload) => {
4634 this.update(&mut cx, |this, cx| {
4635 this.on_lsp_work_start(
4636 language_server_id,
4637 payload.token,
4638 LanguageServerProgress {
4639 message: payload.message,
4640 percentage: payload.percentage.map(|p| p as usize),
4641 last_update_at: Instant::now(),
4642 },
4643 cx,
4644 );
4645 })
4646 }
4647 proto::update_language_server::Variant::WorkProgress(payload) => {
4648 this.update(&mut cx, |this, cx| {
4649 this.on_lsp_work_progress(
4650 language_server_id,
4651 payload.token,
4652 LanguageServerProgress {
4653 message: payload.message,
4654 percentage: payload.percentage.map(|p| p as usize),
4655 last_update_at: Instant::now(),
4656 },
4657 cx,
4658 );
4659 })
4660 }
4661 proto::update_language_server::Variant::WorkEnd(payload) => {
4662 this.update(&mut cx, |this, cx| {
4663 this.on_lsp_work_end(language_server_id, payload.token, cx);
4664 })
4665 }
4666 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(_) => {
4667 this.update(&mut cx, |this, cx| {
4668 this.disk_based_diagnostics_started(language_server_id, cx);
4669 })
4670 }
4671 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(_) => {
4672 this.update(&mut cx, |this, cx| {
4673 this.disk_based_diagnostics_finished(language_server_id, cx)
4674 });
4675 }
4676 }
4677
4678 Ok(())
4679 }
4680
4681 async fn handle_update_buffer(
4682 this: ModelHandle<Self>,
4683 envelope: TypedEnvelope<proto::UpdateBuffer>,
4684 _: Arc<Client>,
4685 mut cx: AsyncAppContext,
4686 ) -> Result<()> {
4687 this.update(&mut cx, |this, cx| {
4688 let payload = envelope.payload.clone();
4689 let buffer_id = payload.buffer_id;
4690 let ops = payload
4691 .operations
4692 .into_iter()
4693 .map(|op| language::proto::deserialize_operation(op))
4694 .collect::<Result<Vec<_>, _>>()?;
4695 let is_remote = this.is_remote();
4696 match this.opened_buffers.entry(buffer_id) {
4697 hash_map::Entry::Occupied(mut e) => match e.get_mut() {
4698 OpenBuffer::Strong(buffer) => {
4699 buffer.update(cx, |buffer, cx| buffer.apply_ops(ops, cx))?;
4700 }
4701 OpenBuffer::Loading(operations) => operations.extend_from_slice(&ops),
4702 OpenBuffer::Weak(_) => {}
4703 },
4704 hash_map::Entry::Vacant(e) => {
4705 assert!(
4706 is_remote,
4707 "received buffer update from {:?}",
4708 envelope.original_sender_id
4709 );
4710 e.insert(OpenBuffer::Loading(ops));
4711 }
4712 }
4713 Ok(())
4714 })
4715 }
4716
4717 async fn handle_update_buffer_file(
4718 this: ModelHandle<Self>,
4719 envelope: TypedEnvelope<proto::UpdateBufferFile>,
4720 _: Arc<Client>,
4721 mut cx: AsyncAppContext,
4722 ) -> Result<()> {
4723 this.update(&mut cx, |this, cx| {
4724 let payload = envelope.payload.clone();
4725 let buffer_id = payload.buffer_id;
4726 let file = payload.file.ok_or_else(|| anyhow!("invalid file"))?;
4727 let worktree = this
4728 .worktree_for_id(WorktreeId::from_proto(file.worktree_id), cx)
4729 .ok_or_else(|| anyhow!("no such worktree"))?;
4730 let file = File::from_proto(file, worktree.clone(), cx)?;
4731 let buffer = this
4732 .opened_buffers
4733 .get_mut(&buffer_id)
4734 .and_then(|b| b.upgrade(cx))
4735 .ok_or_else(|| anyhow!("no such buffer"))?;
4736 buffer.update(cx, |buffer, cx| {
4737 buffer.file_updated(Arc::new(file), cx).detach();
4738 });
4739 Ok(())
4740 })
4741 }
4742
4743 async fn handle_save_buffer(
4744 this: ModelHandle<Self>,
4745 envelope: TypedEnvelope<proto::SaveBuffer>,
4746 _: Arc<Client>,
4747 mut cx: AsyncAppContext,
4748 ) -> Result<proto::BufferSaved> {
4749 let buffer_id = envelope.payload.buffer_id;
4750 let requested_version = deserialize_version(envelope.payload.version);
4751
4752 let (project_id, buffer) = this.update(&mut cx, |this, cx| {
4753 let project_id = this.remote_id().ok_or_else(|| anyhow!("not connected"))?;
4754 let buffer = this
4755 .opened_buffers
4756 .get(&buffer_id)
4757 .and_then(|buffer| buffer.upgrade(cx))
4758 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?;
4759 Ok::<_, anyhow::Error>((project_id, buffer))
4760 })?;
4761 buffer
4762 .update(&mut cx, |buffer, _| {
4763 buffer.wait_for_version(requested_version)
4764 })
4765 .await;
4766
4767 let (saved_version, fingerprint, mtime) =
4768 buffer.update(&mut cx, |buffer, cx| buffer.save(cx)).await?;
4769 Ok(proto::BufferSaved {
4770 project_id,
4771 buffer_id,
4772 version: serialize_version(&saved_version),
4773 mtime: Some(mtime.into()),
4774 fingerprint,
4775 })
4776 }
4777
4778 async fn handle_reload_buffers(
4779 this: ModelHandle<Self>,
4780 envelope: TypedEnvelope<proto::ReloadBuffers>,
4781 _: Arc<Client>,
4782 mut cx: AsyncAppContext,
4783 ) -> Result<proto::ReloadBuffersResponse> {
4784 let sender_id = envelope.original_sender_id()?;
4785 let reload = this.update(&mut cx, |this, cx| {
4786 let mut buffers = HashSet::default();
4787 for buffer_id in &envelope.payload.buffer_ids {
4788 buffers.insert(
4789 this.opened_buffers
4790 .get(buffer_id)
4791 .and_then(|buffer| buffer.upgrade(cx))
4792 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?,
4793 );
4794 }
4795 Ok::<_, anyhow::Error>(this.reload_buffers(buffers, false, cx))
4796 })?;
4797
4798 let project_transaction = reload.await?;
4799 let project_transaction = this.update(&mut cx, |this, cx| {
4800 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
4801 });
4802 Ok(proto::ReloadBuffersResponse {
4803 transaction: Some(project_transaction),
4804 })
4805 }
4806
4807 async fn handle_format_buffers(
4808 this: ModelHandle<Self>,
4809 envelope: TypedEnvelope<proto::FormatBuffers>,
4810 _: Arc<Client>,
4811 mut cx: AsyncAppContext,
4812 ) -> Result<proto::FormatBuffersResponse> {
4813 let sender_id = envelope.original_sender_id()?;
4814 let format = this.update(&mut cx, |this, cx| {
4815 let mut buffers = HashSet::default();
4816 for buffer_id in &envelope.payload.buffer_ids {
4817 buffers.insert(
4818 this.opened_buffers
4819 .get(buffer_id)
4820 .and_then(|buffer| buffer.upgrade(cx))
4821 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?,
4822 );
4823 }
4824 Ok::<_, anyhow::Error>(this.format(buffers, false, cx))
4825 })?;
4826
4827 let project_transaction = format.await?;
4828 let project_transaction = this.update(&mut cx, |this, cx| {
4829 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
4830 });
4831 Ok(proto::FormatBuffersResponse {
4832 transaction: Some(project_transaction),
4833 })
4834 }
4835
4836 async fn handle_get_completions(
4837 this: ModelHandle<Self>,
4838 envelope: TypedEnvelope<proto::GetCompletions>,
4839 _: Arc<Client>,
4840 mut cx: AsyncAppContext,
4841 ) -> Result<proto::GetCompletionsResponse> {
4842 let position = envelope
4843 .payload
4844 .position
4845 .and_then(language::proto::deserialize_anchor)
4846 .ok_or_else(|| anyhow!("invalid position"))?;
4847 let version = deserialize_version(envelope.payload.version);
4848 let buffer = this.read_with(&cx, |this, cx| {
4849 this.opened_buffers
4850 .get(&envelope.payload.buffer_id)
4851 .and_then(|buffer| buffer.upgrade(cx))
4852 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
4853 })?;
4854 buffer
4855 .update(&mut cx, |buffer, _| buffer.wait_for_version(version))
4856 .await;
4857 let version = buffer.read_with(&cx, |buffer, _| buffer.version());
4858 let completions = this
4859 .update(&mut cx, |this, cx| this.completions(&buffer, position, cx))
4860 .await?;
4861
4862 Ok(proto::GetCompletionsResponse {
4863 completions: completions
4864 .iter()
4865 .map(language::proto::serialize_completion)
4866 .collect(),
4867 version: serialize_version(&version),
4868 })
4869 }
4870
4871 async fn handle_apply_additional_edits_for_completion(
4872 this: ModelHandle<Self>,
4873 envelope: TypedEnvelope<proto::ApplyCompletionAdditionalEdits>,
4874 _: Arc<Client>,
4875 mut cx: AsyncAppContext,
4876 ) -> Result<proto::ApplyCompletionAdditionalEditsResponse> {
4877 let apply_additional_edits = this.update(&mut cx, |this, cx| {
4878 let buffer = this
4879 .opened_buffers
4880 .get(&envelope.payload.buffer_id)
4881 .and_then(|buffer| buffer.upgrade(cx))
4882 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
4883 let language = buffer.read(cx).language();
4884 let completion = language::proto::deserialize_completion(
4885 envelope
4886 .payload
4887 .completion
4888 .ok_or_else(|| anyhow!("invalid completion"))?,
4889 language,
4890 )?;
4891 Ok::<_, anyhow::Error>(
4892 this.apply_additional_edits_for_completion(buffer, completion, false, cx),
4893 )
4894 })?;
4895
4896 Ok(proto::ApplyCompletionAdditionalEditsResponse {
4897 transaction: apply_additional_edits
4898 .await?
4899 .as_ref()
4900 .map(language::proto::serialize_transaction),
4901 })
4902 }
4903
4904 async fn handle_get_code_actions(
4905 this: ModelHandle<Self>,
4906 envelope: TypedEnvelope<proto::GetCodeActions>,
4907 _: Arc<Client>,
4908 mut cx: AsyncAppContext,
4909 ) -> Result<proto::GetCodeActionsResponse> {
4910 let start = envelope
4911 .payload
4912 .start
4913 .and_then(language::proto::deserialize_anchor)
4914 .ok_or_else(|| anyhow!("invalid start"))?;
4915 let end = envelope
4916 .payload
4917 .end
4918 .and_then(language::proto::deserialize_anchor)
4919 .ok_or_else(|| anyhow!("invalid end"))?;
4920 let buffer = this.update(&mut cx, |this, cx| {
4921 this.opened_buffers
4922 .get(&envelope.payload.buffer_id)
4923 .and_then(|buffer| buffer.upgrade(cx))
4924 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
4925 })?;
4926 buffer
4927 .update(&mut cx, |buffer, _| {
4928 buffer.wait_for_version(deserialize_version(envelope.payload.version))
4929 })
4930 .await;
4931
4932 let version = buffer.read_with(&cx, |buffer, _| buffer.version());
4933 let code_actions = this.update(&mut cx, |this, cx| {
4934 Ok::<_, anyhow::Error>(this.code_actions(&buffer, start..end, cx))
4935 })?;
4936
4937 Ok(proto::GetCodeActionsResponse {
4938 actions: code_actions
4939 .await?
4940 .iter()
4941 .map(language::proto::serialize_code_action)
4942 .collect(),
4943 version: serialize_version(&version),
4944 })
4945 }
4946
4947 async fn handle_apply_code_action(
4948 this: ModelHandle<Self>,
4949 envelope: TypedEnvelope<proto::ApplyCodeAction>,
4950 _: Arc<Client>,
4951 mut cx: AsyncAppContext,
4952 ) -> Result<proto::ApplyCodeActionResponse> {
4953 let sender_id = envelope.original_sender_id()?;
4954 let action = language::proto::deserialize_code_action(
4955 envelope
4956 .payload
4957 .action
4958 .ok_or_else(|| anyhow!("invalid action"))?,
4959 )?;
4960 let apply_code_action = this.update(&mut cx, |this, cx| {
4961 let buffer = this
4962 .opened_buffers
4963 .get(&envelope.payload.buffer_id)
4964 .and_then(|buffer| buffer.upgrade(cx))
4965 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
4966 Ok::<_, anyhow::Error>(this.apply_code_action(buffer, action, false, cx))
4967 })?;
4968
4969 let project_transaction = apply_code_action.await?;
4970 let project_transaction = this.update(&mut cx, |this, cx| {
4971 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
4972 });
4973 Ok(proto::ApplyCodeActionResponse {
4974 transaction: Some(project_transaction),
4975 })
4976 }
4977
4978 async fn handle_lsp_command<T: LspCommand>(
4979 this: ModelHandle<Self>,
4980 envelope: TypedEnvelope<T::ProtoRequest>,
4981 _: Arc<Client>,
4982 mut cx: AsyncAppContext,
4983 ) -> Result<<T::ProtoRequest as proto::RequestMessage>::Response>
4984 where
4985 <T::LspRequest as lsp::request::Request>::Result: Send,
4986 {
4987 let sender_id = envelope.original_sender_id()?;
4988 let buffer_id = T::buffer_id_from_proto(&envelope.payload);
4989 let buffer_handle = this.read_with(&cx, |this, _| {
4990 this.opened_buffers
4991 .get(&buffer_id)
4992 .and_then(|buffer| buffer.upgrade(&cx))
4993 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))
4994 })?;
4995 let request = T::from_proto(
4996 envelope.payload,
4997 this.clone(),
4998 buffer_handle.clone(),
4999 cx.clone(),
5000 )
5001 .await?;
5002 let buffer_version = buffer_handle.read_with(&cx, |buffer, _| buffer.version());
5003 let response = this
5004 .update(&mut cx, |this, cx| {
5005 this.request_lsp(buffer_handle, request, cx)
5006 })
5007 .await?;
5008 this.update(&mut cx, |this, cx| {
5009 Ok(T::response_to_proto(
5010 response,
5011 this,
5012 sender_id,
5013 &buffer_version,
5014 cx,
5015 ))
5016 })
5017 }
5018
5019 async fn handle_get_project_symbols(
5020 this: ModelHandle<Self>,
5021 envelope: TypedEnvelope<proto::GetProjectSymbols>,
5022 _: Arc<Client>,
5023 mut cx: AsyncAppContext,
5024 ) -> Result<proto::GetProjectSymbolsResponse> {
5025 let symbols = this
5026 .update(&mut cx, |this, cx| {
5027 this.symbols(&envelope.payload.query, cx)
5028 })
5029 .await?;
5030
5031 Ok(proto::GetProjectSymbolsResponse {
5032 symbols: symbols.iter().map(serialize_symbol).collect(),
5033 })
5034 }
5035
5036 async fn handle_search_project(
5037 this: ModelHandle<Self>,
5038 envelope: TypedEnvelope<proto::SearchProject>,
5039 _: Arc<Client>,
5040 mut cx: AsyncAppContext,
5041 ) -> Result<proto::SearchProjectResponse> {
5042 let peer_id = envelope.original_sender_id()?;
5043 let query = SearchQuery::from_proto(envelope.payload)?;
5044 let result = this
5045 .update(&mut cx, |this, cx| this.search(query, cx))
5046 .await?;
5047
5048 this.update(&mut cx, |this, cx| {
5049 let mut locations = Vec::new();
5050 for (buffer, ranges) in result {
5051 for range in ranges {
5052 let start = serialize_anchor(&range.start);
5053 let end = serialize_anchor(&range.end);
5054 let buffer = this.serialize_buffer_for_peer(&buffer, peer_id, cx);
5055 locations.push(proto::Location {
5056 buffer: Some(buffer),
5057 start: Some(start),
5058 end: Some(end),
5059 });
5060 }
5061 }
5062 Ok(proto::SearchProjectResponse { locations })
5063 })
5064 }
5065
5066 async fn handle_open_buffer_for_symbol(
5067 this: ModelHandle<Self>,
5068 envelope: TypedEnvelope<proto::OpenBufferForSymbol>,
5069 _: Arc<Client>,
5070 mut cx: AsyncAppContext,
5071 ) -> Result<proto::OpenBufferForSymbolResponse> {
5072 let peer_id = envelope.original_sender_id()?;
5073 let symbol = envelope
5074 .payload
5075 .symbol
5076 .ok_or_else(|| anyhow!("invalid symbol"))?;
5077 let symbol = this.read_with(&cx, |this, _| {
5078 let symbol = this.deserialize_symbol(symbol)?;
5079 let signature = this.symbol_signature(symbol.worktree_id, &symbol.path);
5080 if signature == symbol.signature {
5081 Ok(symbol)
5082 } else {
5083 Err(anyhow!("invalid symbol signature"))
5084 }
5085 })?;
5086 let buffer = this
5087 .update(&mut cx, |this, cx| this.open_buffer_for_symbol(&symbol, cx))
5088 .await?;
5089
5090 Ok(proto::OpenBufferForSymbolResponse {
5091 buffer: Some(this.update(&mut cx, |this, cx| {
5092 this.serialize_buffer_for_peer(&buffer, peer_id, cx)
5093 })),
5094 })
5095 }
5096
5097 fn symbol_signature(&self, worktree_id: WorktreeId, path: &Path) -> [u8; 32] {
5098 let mut hasher = Sha256::new();
5099 hasher.update(worktree_id.to_proto().to_be_bytes());
5100 hasher.update(path.to_string_lossy().as_bytes());
5101 hasher.update(self.nonce.to_be_bytes());
5102 hasher.finalize().as_slice().try_into().unwrap()
5103 }
5104
5105 async fn handle_open_buffer_by_id(
5106 this: ModelHandle<Self>,
5107 envelope: TypedEnvelope<proto::OpenBufferById>,
5108 _: Arc<Client>,
5109 mut cx: AsyncAppContext,
5110 ) -> Result<proto::OpenBufferResponse> {
5111 let peer_id = envelope.original_sender_id()?;
5112 let buffer = this
5113 .update(&mut cx, |this, cx| {
5114 this.open_buffer_by_id(envelope.payload.id, cx)
5115 })
5116 .await?;
5117 this.update(&mut cx, |this, cx| {
5118 Ok(proto::OpenBufferResponse {
5119 buffer: Some(this.serialize_buffer_for_peer(&buffer, peer_id, cx)),
5120 })
5121 })
5122 }
5123
5124 async fn handle_open_buffer_by_path(
5125 this: ModelHandle<Self>,
5126 envelope: TypedEnvelope<proto::OpenBufferByPath>,
5127 _: Arc<Client>,
5128 mut cx: AsyncAppContext,
5129 ) -> Result<proto::OpenBufferResponse> {
5130 let peer_id = envelope.original_sender_id()?;
5131 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
5132 let open_buffer = this.update(&mut cx, |this, cx| {
5133 this.open_buffer(
5134 ProjectPath {
5135 worktree_id,
5136 path: PathBuf::from(envelope.payload.path).into(),
5137 },
5138 cx,
5139 )
5140 });
5141
5142 let buffer = open_buffer.await?;
5143 this.update(&mut cx, |this, cx| {
5144 Ok(proto::OpenBufferResponse {
5145 buffer: Some(this.serialize_buffer_for_peer(&buffer, peer_id, cx)),
5146 })
5147 })
5148 }
5149
5150 fn serialize_project_transaction_for_peer(
5151 &mut self,
5152 project_transaction: ProjectTransaction,
5153 peer_id: PeerId,
5154 cx: &AppContext,
5155 ) -> proto::ProjectTransaction {
5156 let mut serialized_transaction = proto::ProjectTransaction {
5157 buffers: Default::default(),
5158 transactions: Default::default(),
5159 };
5160 for (buffer, transaction) in project_transaction.0 {
5161 serialized_transaction
5162 .buffers
5163 .push(self.serialize_buffer_for_peer(&buffer, peer_id, cx));
5164 serialized_transaction
5165 .transactions
5166 .push(language::proto::serialize_transaction(&transaction));
5167 }
5168 serialized_transaction
5169 }
5170
5171 fn deserialize_project_transaction(
5172 &mut self,
5173 message: proto::ProjectTransaction,
5174 push_to_history: bool,
5175 cx: &mut ModelContext<Self>,
5176 ) -> Task<Result<ProjectTransaction>> {
5177 cx.spawn(|this, mut cx| async move {
5178 let mut project_transaction = ProjectTransaction::default();
5179 for (buffer, transaction) in message.buffers.into_iter().zip(message.transactions) {
5180 let buffer = this
5181 .update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
5182 .await?;
5183 let transaction = language::proto::deserialize_transaction(transaction)?;
5184 project_transaction.0.insert(buffer, transaction);
5185 }
5186
5187 for (buffer, transaction) in &project_transaction.0 {
5188 buffer
5189 .update(&mut cx, |buffer, _| {
5190 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
5191 })
5192 .await;
5193
5194 if push_to_history {
5195 buffer.update(&mut cx, |buffer, _| {
5196 buffer.push_transaction(transaction.clone(), Instant::now());
5197 });
5198 }
5199 }
5200
5201 Ok(project_transaction)
5202 })
5203 }
5204
5205 fn serialize_buffer_for_peer(
5206 &mut self,
5207 buffer: &ModelHandle<Buffer>,
5208 peer_id: PeerId,
5209 cx: &AppContext,
5210 ) -> proto::Buffer {
5211 let buffer_id = buffer.read(cx).remote_id();
5212 let shared_buffers = self.shared_buffers.entry(peer_id).or_default();
5213 if shared_buffers.insert(buffer_id) {
5214 proto::Buffer {
5215 variant: Some(proto::buffer::Variant::State(buffer.read(cx).to_proto())),
5216 }
5217 } else {
5218 proto::Buffer {
5219 variant: Some(proto::buffer::Variant::Id(buffer_id)),
5220 }
5221 }
5222 }
5223
5224 fn deserialize_buffer(
5225 &mut self,
5226 buffer: proto::Buffer,
5227 cx: &mut ModelContext<Self>,
5228 ) -> Task<Result<ModelHandle<Buffer>>> {
5229 let replica_id = self.replica_id();
5230
5231 let opened_buffer_tx = self.opened_buffer.0.clone();
5232 let mut opened_buffer_rx = self.opened_buffer.1.clone();
5233 cx.spawn(|this, mut cx| async move {
5234 match buffer.variant.ok_or_else(|| anyhow!("missing buffer"))? {
5235 proto::buffer::Variant::Id(id) => {
5236 let buffer = loop {
5237 let buffer = this.read_with(&cx, |this, cx| {
5238 this.opened_buffers
5239 .get(&id)
5240 .and_then(|buffer| buffer.upgrade(cx))
5241 });
5242 if let Some(buffer) = buffer {
5243 break buffer;
5244 }
5245 opened_buffer_rx
5246 .next()
5247 .await
5248 .ok_or_else(|| anyhow!("project dropped while waiting for buffer"))?;
5249 };
5250 Ok(buffer)
5251 }
5252 proto::buffer::Variant::State(mut buffer) => {
5253 let mut buffer_worktree = None;
5254 let mut buffer_file = None;
5255 if let Some(file) = buffer.file.take() {
5256 this.read_with(&cx, |this, cx| {
5257 let worktree_id = WorktreeId::from_proto(file.worktree_id);
5258 let worktree =
5259 this.worktree_for_id(worktree_id, cx).ok_or_else(|| {
5260 anyhow!("no worktree found for id {}", file.worktree_id)
5261 })?;
5262 buffer_file =
5263 Some(Arc::new(File::from_proto(file, worktree.clone(), cx)?)
5264 as Arc<dyn language::File>);
5265 buffer_worktree = Some(worktree);
5266 Ok::<_, anyhow::Error>(())
5267 })?;
5268 }
5269
5270 let buffer = cx.add_model(|cx| {
5271 Buffer::from_proto(replica_id, buffer, buffer_file, cx).unwrap()
5272 });
5273
5274 this.update(&mut cx, |this, cx| this.register_buffer(&buffer, cx))?;
5275
5276 *opened_buffer_tx.borrow_mut().borrow_mut() = ();
5277 Ok(buffer)
5278 }
5279 }
5280 })
5281 }
5282
5283 fn deserialize_symbol(&self, serialized_symbol: proto::Symbol) -> Result<Symbol> {
5284 let source_worktree_id = WorktreeId::from_proto(serialized_symbol.source_worktree_id);
5285 let worktree_id = WorktreeId::from_proto(serialized_symbol.worktree_id);
5286 let start = serialized_symbol
5287 .start
5288 .ok_or_else(|| anyhow!("invalid start"))?;
5289 let end = serialized_symbol
5290 .end
5291 .ok_or_else(|| anyhow!("invalid end"))?;
5292 let kind = unsafe { mem::transmute(serialized_symbol.kind) };
5293 let path = PathBuf::from(serialized_symbol.path);
5294 let language = self.languages.select_language(&path);
5295 Ok(Symbol {
5296 source_worktree_id,
5297 worktree_id,
5298 language_server_name: LanguageServerName(serialized_symbol.language_server_name.into()),
5299 label: language
5300 .and_then(|language| language.label_for_symbol(&serialized_symbol.name, kind))
5301 .unwrap_or_else(|| CodeLabel::plain(serialized_symbol.name.clone(), None)),
5302 name: serialized_symbol.name,
5303 path,
5304 range: PointUtf16::new(start.row, start.column)..PointUtf16::new(end.row, end.column),
5305 kind,
5306 signature: serialized_symbol
5307 .signature
5308 .try_into()
5309 .map_err(|_| anyhow!("invalid signature"))?,
5310 })
5311 }
5312
5313 async fn handle_buffer_saved(
5314 this: ModelHandle<Self>,
5315 envelope: TypedEnvelope<proto::BufferSaved>,
5316 _: Arc<Client>,
5317 mut cx: AsyncAppContext,
5318 ) -> Result<()> {
5319 let version = deserialize_version(envelope.payload.version);
5320 let mtime = envelope
5321 .payload
5322 .mtime
5323 .ok_or_else(|| anyhow!("missing mtime"))?
5324 .into();
5325
5326 this.update(&mut cx, |this, cx| {
5327 let buffer = this
5328 .opened_buffers
5329 .get(&envelope.payload.buffer_id)
5330 .and_then(|buffer| buffer.upgrade(cx));
5331 if let Some(buffer) = buffer {
5332 buffer.update(cx, |buffer, cx| {
5333 buffer.did_save(version, envelope.payload.fingerprint, mtime, None, cx);
5334 });
5335 }
5336 Ok(())
5337 })
5338 }
5339
5340 async fn handle_buffer_reloaded(
5341 this: ModelHandle<Self>,
5342 envelope: TypedEnvelope<proto::BufferReloaded>,
5343 _: Arc<Client>,
5344 mut cx: AsyncAppContext,
5345 ) -> Result<()> {
5346 let payload = envelope.payload.clone();
5347 let version = deserialize_version(payload.version);
5348 let mtime = payload
5349 .mtime
5350 .ok_or_else(|| anyhow!("missing mtime"))?
5351 .into();
5352 this.update(&mut cx, |this, cx| {
5353 let buffer = this
5354 .opened_buffers
5355 .get(&payload.buffer_id)
5356 .and_then(|buffer| buffer.upgrade(cx));
5357 if let Some(buffer) = buffer {
5358 buffer.update(cx, |buffer, cx| {
5359 buffer.did_reload(version, payload.fingerprint, mtime, cx);
5360 });
5361 }
5362 Ok(())
5363 })
5364 }
5365
5366 pub fn match_paths<'a>(
5367 &self,
5368 query: &'a str,
5369 include_ignored: bool,
5370 smart_case: bool,
5371 max_results: usize,
5372 cancel_flag: &'a AtomicBool,
5373 cx: &AppContext,
5374 ) -> impl 'a + Future<Output = Vec<PathMatch>> {
5375 let worktrees = self
5376 .worktrees(cx)
5377 .filter(|worktree| worktree.read(cx).is_visible())
5378 .collect::<Vec<_>>();
5379 let include_root_name = worktrees.len() > 1;
5380 let candidate_sets = worktrees
5381 .into_iter()
5382 .map(|worktree| CandidateSet {
5383 snapshot: worktree.read(cx).snapshot(),
5384 include_ignored,
5385 include_root_name,
5386 })
5387 .collect::<Vec<_>>();
5388
5389 let background = cx.background().clone();
5390 async move {
5391 fuzzy::match_paths(
5392 candidate_sets.as_slice(),
5393 query,
5394 smart_case,
5395 max_results,
5396 cancel_flag,
5397 background,
5398 )
5399 .await
5400 }
5401 }
5402
5403 fn edits_from_lsp(
5404 &mut self,
5405 buffer: &ModelHandle<Buffer>,
5406 lsp_edits: impl 'static + Send + IntoIterator<Item = lsp::TextEdit>,
5407 version: Option<i32>,
5408 cx: &mut ModelContext<Self>,
5409 ) -> Task<Result<Vec<(Range<Anchor>, String)>>> {
5410 let snapshot = self.buffer_snapshot_for_lsp_version(buffer, version, cx);
5411 cx.background().spawn(async move {
5412 let snapshot = snapshot?;
5413 let mut lsp_edits = lsp_edits
5414 .into_iter()
5415 .map(|edit| (range_from_lsp(edit.range), edit.new_text))
5416 .collect::<Vec<_>>();
5417 lsp_edits.sort_by_key(|(range, _)| range.start);
5418
5419 let mut lsp_edits = lsp_edits.into_iter().peekable();
5420 let mut edits = Vec::new();
5421 while let Some((mut range, mut new_text)) = lsp_edits.next() {
5422 // Combine any LSP edits that are adjacent.
5423 //
5424 // Also, combine LSP edits that are separated from each other by only
5425 // a newline. This is important because for some code actions,
5426 // Rust-analyzer rewrites the entire buffer via a series of edits that
5427 // are separated by unchanged newline characters.
5428 //
5429 // In order for the diffing logic below to work properly, any edits that
5430 // cancel each other out must be combined into one.
5431 while let Some((next_range, next_text)) = lsp_edits.peek() {
5432 if next_range.start > range.end {
5433 if next_range.start.row > range.end.row + 1
5434 || next_range.start.column > 0
5435 || snapshot.clip_point_utf16(
5436 PointUtf16::new(range.end.row, u32::MAX),
5437 Bias::Left,
5438 ) > range.end
5439 {
5440 break;
5441 }
5442 new_text.push('\n');
5443 }
5444 range.end = next_range.end;
5445 new_text.push_str(&next_text);
5446 lsp_edits.next();
5447 }
5448
5449 if snapshot.clip_point_utf16(range.start, Bias::Left) != range.start
5450 || snapshot.clip_point_utf16(range.end, Bias::Left) != range.end
5451 {
5452 return Err(anyhow!("invalid edits received from language server"));
5453 }
5454
5455 // For multiline edits, perform a diff of the old and new text so that
5456 // we can identify the changes more precisely, preserving the locations
5457 // of any anchors positioned in the unchanged regions.
5458 if range.end.row > range.start.row {
5459 let mut offset = range.start.to_offset(&snapshot);
5460 let old_text = snapshot.text_for_range(range).collect::<String>();
5461
5462 let diff = TextDiff::from_lines(old_text.as_str(), &new_text);
5463 let mut moved_since_edit = true;
5464 for change in diff.iter_all_changes() {
5465 let tag = change.tag();
5466 let value = change.value();
5467 match tag {
5468 ChangeTag::Equal => {
5469 offset += value.len();
5470 moved_since_edit = true;
5471 }
5472 ChangeTag::Delete => {
5473 let start = snapshot.anchor_after(offset);
5474 let end = snapshot.anchor_before(offset + value.len());
5475 if moved_since_edit {
5476 edits.push((start..end, String::new()));
5477 } else {
5478 edits.last_mut().unwrap().0.end = end;
5479 }
5480 offset += value.len();
5481 moved_since_edit = false;
5482 }
5483 ChangeTag::Insert => {
5484 if moved_since_edit {
5485 let anchor = snapshot.anchor_after(offset);
5486 edits.push((anchor.clone()..anchor, value.to_string()));
5487 } else {
5488 edits.last_mut().unwrap().1.push_str(value);
5489 }
5490 moved_since_edit = false;
5491 }
5492 }
5493 }
5494 } else if range.end == range.start {
5495 let anchor = snapshot.anchor_after(range.start);
5496 edits.push((anchor.clone()..anchor, new_text));
5497 } else {
5498 let edit_start = snapshot.anchor_after(range.start);
5499 let edit_end = snapshot.anchor_before(range.end);
5500 edits.push((edit_start..edit_end, new_text));
5501 }
5502 }
5503
5504 Ok(edits)
5505 })
5506 }
5507
5508 fn buffer_snapshot_for_lsp_version(
5509 &mut self,
5510 buffer: &ModelHandle<Buffer>,
5511 version: Option<i32>,
5512 cx: &AppContext,
5513 ) -> Result<TextBufferSnapshot> {
5514 const OLD_VERSIONS_TO_RETAIN: i32 = 10;
5515
5516 if let Some(version) = version {
5517 let buffer_id = buffer.read(cx).remote_id();
5518 let snapshots = self
5519 .buffer_snapshots
5520 .get_mut(&buffer_id)
5521 .ok_or_else(|| anyhow!("no snapshot found for buffer {}", buffer_id))?;
5522 let mut found_snapshot = None;
5523 snapshots.retain(|(snapshot_version, snapshot)| {
5524 if snapshot_version + OLD_VERSIONS_TO_RETAIN < version {
5525 false
5526 } else {
5527 if *snapshot_version == version {
5528 found_snapshot = Some(snapshot.clone());
5529 }
5530 true
5531 }
5532 });
5533
5534 found_snapshot.ok_or_else(|| {
5535 anyhow!(
5536 "snapshot not found for buffer {} at version {}",
5537 buffer_id,
5538 version
5539 )
5540 })
5541 } else {
5542 Ok((buffer.read(cx)).text_snapshot())
5543 }
5544 }
5545
5546 fn language_server_for_buffer(
5547 &self,
5548 buffer: &Buffer,
5549 cx: &AppContext,
5550 ) -> Option<&(Arc<dyn LspAdapter>, Arc<LanguageServer>)> {
5551 if let Some((file, language)) = File::from_dyn(buffer.file()).zip(buffer.language()) {
5552 let worktree_id = file.worktree_id(cx);
5553 self.language_servers
5554 .get(&(worktree_id, language.lsp_adapter()?.name()))
5555 } else {
5556 None
5557 }
5558 }
5559}
5560
5561impl ProjectStore {
5562 pub fn new(db: Arc<Db>) -> Self {
5563 Self {
5564 db,
5565 projects: Default::default(),
5566 }
5567 }
5568
5569 pub fn projects<'a>(
5570 &'a self,
5571 cx: &'a AppContext,
5572 ) -> impl 'a + Iterator<Item = ModelHandle<Project>> {
5573 self.projects
5574 .iter()
5575 .filter_map(|project| project.upgrade(cx))
5576 }
5577
5578 fn add_project(&mut self, project: WeakModelHandle<Project>, cx: &mut ModelContext<Self>) {
5579 if let Err(ix) = self
5580 .projects
5581 .binary_search_by_key(&project.id(), WeakModelHandle::id)
5582 {
5583 self.projects.insert(ix, project);
5584 }
5585 cx.notify();
5586 }
5587
5588 fn prune_projects(&mut self, cx: &mut ModelContext<Self>) {
5589 let mut did_change = false;
5590 self.projects.retain(|project| {
5591 if project.is_upgradable(cx) {
5592 true
5593 } else {
5594 did_change = true;
5595 false
5596 }
5597 });
5598 if did_change {
5599 cx.notify();
5600 }
5601 }
5602}
5603
5604impl WorktreeHandle {
5605 pub fn upgrade(&self, cx: &AppContext) -> Option<ModelHandle<Worktree>> {
5606 match self {
5607 WorktreeHandle::Strong(handle) => Some(handle.clone()),
5608 WorktreeHandle::Weak(handle) => handle.upgrade(cx),
5609 }
5610 }
5611}
5612
5613impl OpenBuffer {
5614 pub fn upgrade(&self, cx: &impl UpgradeModelHandle) -> Option<ModelHandle<Buffer>> {
5615 match self {
5616 OpenBuffer::Strong(handle) => Some(handle.clone()),
5617 OpenBuffer::Weak(handle) => handle.upgrade(cx),
5618 OpenBuffer::Loading(_) => None,
5619 }
5620 }
5621}
5622
5623struct CandidateSet {
5624 snapshot: Snapshot,
5625 include_ignored: bool,
5626 include_root_name: bool,
5627}
5628
5629impl<'a> PathMatchCandidateSet<'a> for CandidateSet {
5630 type Candidates = CandidateSetIter<'a>;
5631
5632 fn id(&self) -> usize {
5633 self.snapshot.id().to_usize()
5634 }
5635
5636 fn len(&self) -> usize {
5637 if self.include_ignored {
5638 self.snapshot.file_count()
5639 } else {
5640 self.snapshot.visible_file_count()
5641 }
5642 }
5643
5644 fn prefix(&self) -> Arc<str> {
5645 if self.snapshot.root_entry().map_or(false, |e| e.is_file()) {
5646 self.snapshot.root_name().into()
5647 } else if self.include_root_name {
5648 format!("{}/", self.snapshot.root_name()).into()
5649 } else {
5650 "".into()
5651 }
5652 }
5653
5654 fn candidates(&'a self, start: usize) -> Self::Candidates {
5655 CandidateSetIter {
5656 traversal: self.snapshot.files(self.include_ignored, start),
5657 }
5658 }
5659}
5660
5661struct CandidateSetIter<'a> {
5662 traversal: Traversal<'a>,
5663}
5664
5665impl<'a> Iterator for CandidateSetIter<'a> {
5666 type Item = PathMatchCandidate<'a>;
5667
5668 fn next(&mut self) -> Option<Self::Item> {
5669 self.traversal.next().map(|entry| {
5670 if let EntryKind::File(char_bag) = entry.kind {
5671 PathMatchCandidate {
5672 path: &entry.path,
5673 char_bag,
5674 }
5675 } else {
5676 unreachable!()
5677 }
5678 })
5679 }
5680}
5681
5682impl Entity for ProjectStore {
5683 type Event = ();
5684}
5685
5686impl Entity for Project {
5687 type Event = Event;
5688
5689 fn release(&mut self, cx: &mut gpui::MutableAppContext) {
5690 self.project_store.update(cx, ProjectStore::prune_projects);
5691
5692 match &self.client_state {
5693 ProjectClientState::Local { remote_id_rx, .. } => {
5694 if let Some(project_id) = *remote_id_rx.borrow() {
5695 self.client
5696 .send(proto::UnregisterProject { project_id })
5697 .log_err();
5698 }
5699 }
5700 ProjectClientState::Remote { remote_id, .. } => {
5701 self.client
5702 .send(proto::LeaveProject {
5703 project_id: *remote_id,
5704 })
5705 .log_err();
5706 }
5707 }
5708 }
5709
5710 fn app_will_quit(
5711 &mut self,
5712 _: &mut MutableAppContext,
5713 ) -> Option<std::pin::Pin<Box<dyn 'static + Future<Output = ()>>>> {
5714 let shutdown_futures = self
5715 .language_servers
5716 .drain()
5717 .filter_map(|(_, (_, server))| server.shutdown())
5718 .collect::<Vec<_>>();
5719 Some(
5720 async move {
5721 futures::future::join_all(shutdown_futures).await;
5722 }
5723 .boxed(),
5724 )
5725 }
5726}
5727
5728impl Collaborator {
5729 fn from_proto(
5730 message: proto::Collaborator,
5731 user_store: &ModelHandle<UserStore>,
5732 cx: &mut AsyncAppContext,
5733 ) -> impl Future<Output = Result<Self>> {
5734 let user = user_store.update(cx, |user_store, cx| {
5735 user_store.fetch_user(message.user_id, cx)
5736 });
5737
5738 async move {
5739 Ok(Self {
5740 peer_id: PeerId(message.peer_id),
5741 user: user.await?,
5742 replica_id: message.replica_id as ReplicaId,
5743 })
5744 }
5745 }
5746}
5747
5748impl<P: AsRef<Path>> From<(WorktreeId, P)> for ProjectPath {
5749 fn from((worktree_id, path): (WorktreeId, P)) -> Self {
5750 Self {
5751 worktree_id,
5752 path: path.as_ref().into(),
5753 }
5754 }
5755}
5756
5757impl From<lsp::CreateFileOptions> for fs::CreateOptions {
5758 fn from(options: lsp::CreateFileOptions) -> Self {
5759 Self {
5760 overwrite: options.overwrite.unwrap_or(false),
5761 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
5762 }
5763 }
5764}
5765
5766impl From<lsp::RenameFileOptions> for fs::RenameOptions {
5767 fn from(options: lsp::RenameFileOptions) -> Self {
5768 Self {
5769 overwrite: options.overwrite.unwrap_or(false),
5770 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
5771 }
5772 }
5773}
5774
5775impl From<lsp::DeleteFileOptions> for fs::RemoveOptions {
5776 fn from(options: lsp::DeleteFileOptions) -> Self {
5777 Self {
5778 recursive: options.recursive.unwrap_or(false),
5779 ignore_if_not_exists: options.ignore_if_not_exists.unwrap_or(false),
5780 }
5781 }
5782}
5783
5784fn serialize_symbol(symbol: &Symbol) -> proto::Symbol {
5785 proto::Symbol {
5786 source_worktree_id: symbol.source_worktree_id.to_proto(),
5787 worktree_id: symbol.worktree_id.to_proto(),
5788 language_server_name: symbol.language_server_name.0.to_string(),
5789 name: symbol.name.clone(),
5790 kind: unsafe { mem::transmute(symbol.kind) },
5791 path: symbol.path.to_string_lossy().to_string(),
5792 start: Some(proto::Point {
5793 row: symbol.range.start.row,
5794 column: symbol.range.start.column,
5795 }),
5796 end: Some(proto::Point {
5797 row: symbol.range.end.row,
5798 column: symbol.range.end.column,
5799 }),
5800 signature: symbol.signature.to_vec(),
5801 }
5802}
5803
5804fn relativize_path(base: &Path, path: &Path) -> PathBuf {
5805 let mut path_components = path.components();
5806 let mut base_components = base.components();
5807 let mut components: Vec<Component> = Vec::new();
5808 loop {
5809 match (path_components.next(), base_components.next()) {
5810 (None, None) => break,
5811 (Some(a), None) => {
5812 components.push(a);
5813 components.extend(path_components.by_ref());
5814 break;
5815 }
5816 (None, _) => components.push(Component::ParentDir),
5817 (Some(a), Some(b)) if components.is_empty() && a == b => (),
5818 (Some(a), Some(b)) if b == Component::CurDir => components.push(a),
5819 (Some(a), Some(_)) => {
5820 components.push(Component::ParentDir);
5821 for _ in base_components {
5822 components.push(Component::ParentDir);
5823 }
5824 components.push(a);
5825 components.extend(path_components.by_ref());
5826 break;
5827 }
5828 }
5829 }
5830 components.iter().map(|c| c.as_os_str()).collect()
5831}
5832
5833impl Item for Buffer {
5834 fn entry_id(&self, cx: &AppContext) -> Option<ProjectEntryId> {
5835 File::from_dyn(self.file()).and_then(|file| file.project_entry_id(cx))
5836 }
5837}
5838
5839#[cfg(test)]
5840mod tests {
5841 use crate::worktree::WorktreeHandle;
5842
5843 use super::{Event, *};
5844 use fs::RealFs;
5845 use futures::{future, StreamExt};
5846 use gpui::{executor::Deterministic, test::subscribe};
5847 use language::{
5848 tree_sitter_rust, tree_sitter_typescript, Diagnostic, FakeLspAdapter, LanguageConfig,
5849 OffsetRangeExt, Point, ToPoint,
5850 };
5851 use lsp::Url;
5852 use serde_json::json;
5853 use std::{cell::RefCell, os::unix, path::PathBuf, rc::Rc, task::Poll};
5854 use unindent::Unindent as _;
5855 use util::{assert_set_eq, test::temp_tree};
5856
5857 #[gpui::test]
5858 async fn test_populate_and_search(cx: &mut gpui::TestAppContext) {
5859 let dir = temp_tree(json!({
5860 "root": {
5861 "apple": "",
5862 "banana": {
5863 "carrot": {
5864 "date": "",
5865 "endive": "",
5866 }
5867 },
5868 "fennel": {
5869 "grape": "",
5870 }
5871 }
5872 }));
5873
5874 let root_link_path = dir.path().join("root_link");
5875 unix::fs::symlink(&dir.path().join("root"), &root_link_path).unwrap();
5876 unix::fs::symlink(
5877 &dir.path().join("root/fennel"),
5878 &dir.path().join("root/finnochio"),
5879 )
5880 .unwrap();
5881
5882 let project = Project::test(Arc::new(RealFs), [root_link_path.as_ref()], cx).await;
5883
5884 project.read_with(cx, |project, cx| {
5885 let tree = project.worktrees(cx).next().unwrap().read(cx);
5886 assert_eq!(tree.file_count(), 5);
5887 assert_eq!(
5888 tree.inode_for_path("fennel/grape"),
5889 tree.inode_for_path("finnochio/grape")
5890 );
5891 });
5892
5893 let cancel_flag = Default::default();
5894 let results = project
5895 .read_with(cx, |project, cx| {
5896 project.match_paths("bna", false, false, 10, &cancel_flag, cx)
5897 })
5898 .await;
5899 assert_eq!(
5900 results
5901 .into_iter()
5902 .map(|result| result.path)
5903 .collect::<Vec<Arc<Path>>>(),
5904 vec![
5905 PathBuf::from("banana/carrot/date").into(),
5906 PathBuf::from("banana/carrot/endive").into(),
5907 ]
5908 );
5909 }
5910
5911 #[gpui::test]
5912 async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
5913 cx.foreground().forbid_parking();
5914
5915 let mut rust_language = Language::new(
5916 LanguageConfig {
5917 name: "Rust".into(),
5918 path_suffixes: vec!["rs".to_string()],
5919 ..Default::default()
5920 },
5921 Some(tree_sitter_rust::language()),
5922 );
5923 let mut json_language = Language::new(
5924 LanguageConfig {
5925 name: "JSON".into(),
5926 path_suffixes: vec!["json".to_string()],
5927 ..Default::default()
5928 },
5929 None,
5930 );
5931 let mut fake_rust_servers = rust_language.set_fake_lsp_adapter(FakeLspAdapter {
5932 name: "the-rust-language-server",
5933 capabilities: lsp::ServerCapabilities {
5934 completion_provider: Some(lsp::CompletionOptions {
5935 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
5936 ..Default::default()
5937 }),
5938 ..Default::default()
5939 },
5940 ..Default::default()
5941 });
5942 let mut fake_json_servers = json_language.set_fake_lsp_adapter(FakeLspAdapter {
5943 name: "the-json-language-server",
5944 capabilities: lsp::ServerCapabilities {
5945 completion_provider: Some(lsp::CompletionOptions {
5946 trigger_characters: Some(vec![":".to_string()]),
5947 ..Default::default()
5948 }),
5949 ..Default::default()
5950 },
5951 ..Default::default()
5952 });
5953
5954 let fs = FakeFs::new(cx.background());
5955 fs.insert_tree(
5956 "/the-root",
5957 json!({
5958 "test.rs": "const A: i32 = 1;",
5959 "test2.rs": "",
5960 "Cargo.toml": "a = 1",
5961 "package.json": "{\"a\": 1}",
5962 }),
5963 )
5964 .await;
5965
5966 let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
5967 project.update(cx, |project, _| {
5968 project.languages.add(Arc::new(rust_language));
5969 project.languages.add(Arc::new(json_language));
5970 });
5971
5972 // Open a buffer without an associated language server.
5973 let toml_buffer = project
5974 .update(cx, |project, cx| {
5975 project.open_local_buffer("/the-root/Cargo.toml", cx)
5976 })
5977 .await
5978 .unwrap();
5979
5980 // Open a buffer with an associated language server.
5981 let rust_buffer = project
5982 .update(cx, |project, cx| {
5983 project.open_local_buffer("/the-root/test.rs", cx)
5984 })
5985 .await
5986 .unwrap();
5987
5988 // A server is started up, and it is notified about Rust files.
5989 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
5990 assert_eq!(
5991 fake_rust_server
5992 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5993 .await
5994 .text_document,
5995 lsp::TextDocumentItem {
5996 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
5997 version: 0,
5998 text: "const A: i32 = 1;".to_string(),
5999 language_id: Default::default()
6000 }
6001 );
6002
6003 // The buffer is configured based on the language server's capabilities.
6004 rust_buffer.read_with(cx, |buffer, _| {
6005 assert_eq!(
6006 buffer.completion_triggers(),
6007 &[".".to_string(), "::".to_string()]
6008 );
6009 });
6010 toml_buffer.read_with(cx, |buffer, _| {
6011 assert!(buffer.completion_triggers().is_empty());
6012 });
6013
6014 // Edit a buffer. The changes are reported to the language server.
6015 rust_buffer.update(cx, |buffer, cx| buffer.edit([(16..16, "2")], cx));
6016 assert_eq!(
6017 fake_rust_server
6018 .receive_notification::<lsp::notification::DidChangeTextDocument>()
6019 .await
6020 .text_document,
6021 lsp::VersionedTextDocumentIdentifier::new(
6022 lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
6023 1
6024 )
6025 );
6026
6027 // Open a third buffer with a different associated language server.
6028 let json_buffer = project
6029 .update(cx, |project, cx| {
6030 project.open_local_buffer("/the-root/package.json", cx)
6031 })
6032 .await
6033 .unwrap();
6034
6035 // A json language server is started up and is only notified about the json buffer.
6036 let mut fake_json_server = fake_json_servers.next().await.unwrap();
6037 assert_eq!(
6038 fake_json_server
6039 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6040 .await
6041 .text_document,
6042 lsp::TextDocumentItem {
6043 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
6044 version: 0,
6045 text: "{\"a\": 1}".to_string(),
6046 language_id: Default::default()
6047 }
6048 );
6049
6050 // This buffer is configured based on the second language server's
6051 // capabilities.
6052 json_buffer.read_with(cx, |buffer, _| {
6053 assert_eq!(buffer.completion_triggers(), &[":".to_string()]);
6054 });
6055
6056 // When opening another buffer whose language server is already running,
6057 // it is also configured based on the existing language server's capabilities.
6058 let rust_buffer2 = project
6059 .update(cx, |project, cx| {
6060 project.open_local_buffer("/the-root/test2.rs", cx)
6061 })
6062 .await
6063 .unwrap();
6064 rust_buffer2.read_with(cx, |buffer, _| {
6065 assert_eq!(
6066 buffer.completion_triggers(),
6067 &[".".to_string(), "::".to_string()]
6068 );
6069 });
6070
6071 // Changes are reported only to servers matching the buffer's language.
6072 toml_buffer.update(cx, |buffer, cx| buffer.edit([(5..5, "23")], cx));
6073 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "let x = 1;")], cx));
6074 assert_eq!(
6075 fake_rust_server
6076 .receive_notification::<lsp::notification::DidChangeTextDocument>()
6077 .await
6078 .text_document,
6079 lsp::VersionedTextDocumentIdentifier::new(
6080 lsp::Url::from_file_path("/the-root/test2.rs").unwrap(),
6081 1
6082 )
6083 );
6084
6085 // Save notifications are reported to all servers.
6086 toml_buffer
6087 .update(cx, |buffer, cx| buffer.save(cx))
6088 .await
6089 .unwrap();
6090 assert_eq!(
6091 fake_rust_server
6092 .receive_notification::<lsp::notification::DidSaveTextDocument>()
6093 .await
6094 .text_document,
6095 lsp::TextDocumentIdentifier::new(
6096 lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap()
6097 )
6098 );
6099 assert_eq!(
6100 fake_json_server
6101 .receive_notification::<lsp::notification::DidSaveTextDocument>()
6102 .await
6103 .text_document,
6104 lsp::TextDocumentIdentifier::new(
6105 lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap()
6106 )
6107 );
6108
6109 // Renames are reported only to servers matching the buffer's language.
6110 fs.rename(
6111 Path::new("/the-root/test2.rs"),
6112 Path::new("/the-root/test3.rs"),
6113 Default::default(),
6114 )
6115 .await
6116 .unwrap();
6117 assert_eq!(
6118 fake_rust_server
6119 .receive_notification::<lsp::notification::DidCloseTextDocument>()
6120 .await
6121 .text_document,
6122 lsp::TextDocumentIdentifier::new(
6123 lsp::Url::from_file_path("/the-root/test2.rs").unwrap()
6124 ),
6125 );
6126 assert_eq!(
6127 fake_rust_server
6128 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6129 .await
6130 .text_document,
6131 lsp::TextDocumentItem {
6132 uri: lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),
6133 version: 0,
6134 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
6135 language_id: Default::default()
6136 },
6137 );
6138
6139 rust_buffer2.update(cx, |buffer, cx| {
6140 buffer.update_diagnostics(
6141 DiagnosticSet::from_sorted_entries(
6142 vec![DiagnosticEntry {
6143 diagnostic: Default::default(),
6144 range: Anchor::MIN..Anchor::MAX,
6145 }],
6146 &buffer.snapshot(),
6147 ),
6148 cx,
6149 );
6150 assert_eq!(
6151 buffer
6152 .snapshot()
6153 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
6154 .count(),
6155 1
6156 );
6157 });
6158
6159 // When the rename changes the extension of the file, the buffer gets closed on the old
6160 // language server and gets opened on the new one.
6161 fs.rename(
6162 Path::new("/the-root/test3.rs"),
6163 Path::new("/the-root/test3.json"),
6164 Default::default(),
6165 )
6166 .await
6167 .unwrap();
6168 assert_eq!(
6169 fake_rust_server
6170 .receive_notification::<lsp::notification::DidCloseTextDocument>()
6171 .await
6172 .text_document,
6173 lsp::TextDocumentIdentifier::new(
6174 lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),
6175 ),
6176 );
6177 assert_eq!(
6178 fake_json_server
6179 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6180 .await
6181 .text_document,
6182 lsp::TextDocumentItem {
6183 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
6184 version: 0,
6185 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
6186 language_id: Default::default()
6187 },
6188 );
6189
6190 // We clear the diagnostics, since the language has changed.
6191 rust_buffer2.read_with(cx, |buffer, _| {
6192 assert_eq!(
6193 buffer
6194 .snapshot()
6195 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
6196 .count(),
6197 0
6198 );
6199 });
6200
6201 // The renamed file's version resets after changing language server.
6202 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "// ")], cx));
6203 assert_eq!(
6204 fake_json_server
6205 .receive_notification::<lsp::notification::DidChangeTextDocument>()
6206 .await
6207 .text_document,
6208 lsp::VersionedTextDocumentIdentifier::new(
6209 lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
6210 1
6211 )
6212 );
6213
6214 // Restart language servers
6215 project.update(cx, |project, cx| {
6216 project.restart_language_servers_for_buffers(
6217 vec![rust_buffer.clone(), json_buffer.clone()],
6218 cx,
6219 );
6220 });
6221
6222 let mut rust_shutdown_requests = fake_rust_server
6223 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
6224 let mut json_shutdown_requests = fake_json_server
6225 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
6226 futures::join!(rust_shutdown_requests.next(), json_shutdown_requests.next());
6227
6228 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
6229 let mut fake_json_server = fake_json_servers.next().await.unwrap();
6230
6231 // Ensure rust document is reopened in new rust language server
6232 assert_eq!(
6233 fake_rust_server
6234 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6235 .await
6236 .text_document,
6237 lsp::TextDocumentItem {
6238 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
6239 version: 1,
6240 text: rust_buffer.read_with(cx, |buffer, _| buffer.text()),
6241 language_id: Default::default()
6242 }
6243 );
6244
6245 // Ensure json documents are reopened in new json language server
6246 assert_set_eq!(
6247 [
6248 fake_json_server
6249 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6250 .await
6251 .text_document,
6252 fake_json_server
6253 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6254 .await
6255 .text_document,
6256 ],
6257 [
6258 lsp::TextDocumentItem {
6259 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
6260 version: 0,
6261 text: json_buffer.read_with(cx, |buffer, _| buffer.text()),
6262 language_id: Default::default()
6263 },
6264 lsp::TextDocumentItem {
6265 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
6266 version: 1,
6267 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
6268 language_id: Default::default()
6269 }
6270 ]
6271 );
6272
6273 // Close notifications are reported only to servers matching the buffer's language.
6274 cx.update(|_| drop(json_buffer));
6275 let close_message = lsp::DidCloseTextDocumentParams {
6276 text_document: lsp::TextDocumentIdentifier::new(
6277 lsp::Url::from_file_path("/the-root/package.json").unwrap(),
6278 ),
6279 };
6280 assert_eq!(
6281 fake_json_server
6282 .receive_notification::<lsp::notification::DidCloseTextDocument>()
6283 .await,
6284 close_message,
6285 );
6286 }
6287
6288 #[gpui::test]
6289 async fn test_single_file_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
6290 cx.foreground().forbid_parking();
6291
6292 let fs = FakeFs::new(cx.background());
6293 fs.insert_tree(
6294 "/dir",
6295 json!({
6296 "a.rs": "let a = 1;",
6297 "b.rs": "let b = 2;"
6298 }),
6299 )
6300 .await;
6301
6302 let project = Project::test(fs, ["/dir/a.rs".as_ref(), "/dir/b.rs".as_ref()], cx).await;
6303
6304 let buffer_a = project
6305 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
6306 .await
6307 .unwrap();
6308 let buffer_b = project
6309 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
6310 .await
6311 .unwrap();
6312
6313 project.update(cx, |project, cx| {
6314 project
6315 .update_diagnostics(
6316 0,
6317 lsp::PublishDiagnosticsParams {
6318 uri: Url::from_file_path("/dir/a.rs").unwrap(),
6319 version: None,
6320 diagnostics: vec![lsp::Diagnostic {
6321 range: lsp::Range::new(
6322 lsp::Position::new(0, 4),
6323 lsp::Position::new(0, 5),
6324 ),
6325 severity: Some(lsp::DiagnosticSeverity::ERROR),
6326 message: "error 1".to_string(),
6327 ..Default::default()
6328 }],
6329 },
6330 &[],
6331 cx,
6332 )
6333 .unwrap();
6334 project
6335 .update_diagnostics(
6336 0,
6337 lsp::PublishDiagnosticsParams {
6338 uri: Url::from_file_path("/dir/b.rs").unwrap(),
6339 version: None,
6340 diagnostics: vec![lsp::Diagnostic {
6341 range: lsp::Range::new(
6342 lsp::Position::new(0, 4),
6343 lsp::Position::new(0, 5),
6344 ),
6345 severity: Some(lsp::DiagnosticSeverity::WARNING),
6346 message: "error 2".to_string(),
6347 ..Default::default()
6348 }],
6349 },
6350 &[],
6351 cx,
6352 )
6353 .unwrap();
6354 });
6355
6356 buffer_a.read_with(cx, |buffer, _| {
6357 let chunks = chunks_with_diagnostics(&buffer, 0..buffer.len());
6358 assert_eq!(
6359 chunks
6360 .iter()
6361 .map(|(s, d)| (s.as_str(), *d))
6362 .collect::<Vec<_>>(),
6363 &[
6364 ("let ", None),
6365 ("a", Some(DiagnosticSeverity::ERROR)),
6366 (" = 1;", None),
6367 ]
6368 );
6369 });
6370 buffer_b.read_with(cx, |buffer, _| {
6371 let chunks = chunks_with_diagnostics(&buffer, 0..buffer.len());
6372 assert_eq!(
6373 chunks
6374 .iter()
6375 .map(|(s, d)| (s.as_str(), *d))
6376 .collect::<Vec<_>>(),
6377 &[
6378 ("let ", None),
6379 ("b", Some(DiagnosticSeverity::WARNING)),
6380 (" = 2;", None),
6381 ]
6382 );
6383 });
6384 }
6385
6386 #[gpui::test]
6387 async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
6388 cx.foreground().forbid_parking();
6389
6390 let progress_token = "the-progress-token";
6391 let mut language = Language::new(
6392 LanguageConfig {
6393 name: "Rust".into(),
6394 path_suffixes: vec!["rs".to_string()],
6395 ..Default::default()
6396 },
6397 Some(tree_sitter_rust::language()),
6398 );
6399 let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
6400 disk_based_diagnostics_progress_token: Some(progress_token),
6401 disk_based_diagnostics_sources: &["disk"],
6402 ..Default::default()
6403 });
6404
6405 let fs = FakeFs::new(cx.background());
6406 fs.insert_tree(
6407 "/dir",
6408 json!({
6409 "a.rs": "fn a() { A }",
6410 "b.rs": "const y: i32 = 1",
6411 }),
6412 )
6413 .await;
6414
6415 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
6416 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
6417 let worktree_id =
6418 project.read_with(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
6419
6420 // Cause worktree to start the fake language server
6421 let _buffer = project
6422 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
6423 .await
6424 .unwrap();
6425
6426 let mut events = subscribe(&project, cx);
6427
6428 let mut fake_server = fake_servers.next().await.unwrap();
6429 fake_server.start_progress(progress_token).await;
6430 assert_eq!(
6431 events.next().await.unwrap(),
6432 Event::DiskBasedDiagnosticsStarted {
6433 language_server_id: 0,
6434 }
6435 );
6436
6437 fake_server.start_progress(progress_token).await;
6438 fake_server.end_progress(progress_token).await;
6439 fake_server.start_progress(progress_token).await;
6440
6441 fake_server.notify::<lsp::notification::PublishDiagnostics>(
6442 lsp::PublishDiagnosticsParams {
6443 uri: Url::from_file_path("/dir/a.rs").unwrap(),
6444 version: None,
6445 diagnostics: vec![lsp::Diagnostic {
6446 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
6447 severity: Some(lsp::DiagnosticSeverity::ERROR),
6448 message: "undefined variable 'A'".to_string(),
6449 ..Default::default()
6450 }],
6451 },
6452 );
6453 assert_eq!(
6454 events.next().await.unwrap(),
6455 Event::DiagnosticsUpdated {
6456 language_server_id: 0,
6457 path: (worktree_id, Path::new("a.rs")).into()
6458 }
6459 );
6460
6461 fake_server.end_progress(progress_token).await;
6462 fake_server.end_progress(progress_token).await;
6463 assert_eq!(
6464 events.next().await.unwrap(),
6465 Event::DiskBasedDiagnosticsFinished {
6466 language_server_id: 0
6467 }
6468 );
6469
6470 let buffer = project
6471 .update(cx, |p, cx| p.open_local_buffer("/dir/a.rs", cx))
6472 .await
6473 .unwrap();
6474
6475 buffer.read_with(cx, |buffer, _| {
6476 let snapshot = buffer.snapshot();
6477 let diagnostics = snapshot
6478 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
6479 .collect::<Vec<_>>();
6480 assert_eq!(
6481 diagnostics,
6482 &[DiagnosticEntry {
6483 range: Point::new(0, 9)..Point::new(0, 10),
6484 diagnostic: Diagnostic {
6485 severity: lsp::DiagnosticSeverity::ERROR,
6486 message: "undefined variable 'A'".to_string(),
6487 group_id: 0,
6488 is_primary: true,
6489 ..Default::default()
6490 }
6491 }]
6492 )
6493 });
6494
6495 // Ensure publishing empty diagnostics twice only results in one update event.
6496 fake_server.notify::<lsp::notification::PublishDiagnostics>(
6497 lsp::PublishDiagnosticsParams {
6498 uri: Url::from_file_path("/dir/a.rs").unwrap(),
6499 version: None,
6500 diagnostics: Default::default(),
6501 },
6502 );
6503 assert_eq!(
6504 events.next().await.unwrap(),
6505 Event::DiagnosticsUpdated {
6506 language_server_id: 0,
6507 path: (worktree_id, Path::new("a.rs")).into()
6508 }
6509 );
6510
6511 fake_server.notify::<lsp::notification::PublishDiagnostics>(
6512 lsp::PublishDiagnosticsParams {
6513 uri: Url::from_file_path("/dir/a.rs").unwrap(),
6514 version: None,
6515 diagnostics: Default::default(),
6516 },
6517 );
6518 cx.foreground().run_until_parked();
6519 assert_eq!(futures::poll!(events.next()), Poll::Pending);
6520 }
6521
6522 #[gpui::test]
6523 async fn test_restarting_server_with_diagnostics_running(cx: &mut gpui::TestAppContext) {
6524 cx.foreground().forbid_parking();
6525
6526 let progress_token = "the-progress-token";
6527 let mut language = Language::new(
6528 LanguageConfig {
6529 path_suffixes: vec!["rs".to_string()],
6530 ..Default::default()
6531 },
6532 None,
6533 );
6534 let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
6535 disk_based_diagnostics_sources: &["disk"],
6536 disk_based_diagnostics_progress_token: Some(progress_token),
6537 ..Default::default()
6538 });
6539
6540 let fs = FakeFs::new(cx.background());
6541 fs.insert_tree("/dir", json!({ "a.rs": "" })).await;
6542
6543 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
6544 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
6545
6546 let buffer = project
6547 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
6548 .await
6549 .unwrap();
6550
6551 // Simulate diagnostics starting to update.
6552 let mut fake_server = fake_servers.next().await.unwrap();
6553 fake_server.start_progress(progress_token).await;
6554
6555 // Restart the server before the diagnostics finish updating.
6556 project.update(cx, |project, cx| {
6557 project.restart_language_servers_for_buffers([buffer], cx);
6558 });
6559 let mut events = subscribe(&project, cx);
6560
6561 // Simulate the newly started server sending more diagnostics.
6562 let mut fake_server = fake_servers.next().await.unwrap();
6563 fake_server.start_progress(progress_token).await;
6564 assert_eq!(
6565 events.next().await.unwrap(),
6566 Event::DiskBasedDiagnosticsStarted {
6567 language_server_id: 1
6568 }
6569 );
6570 project.read_with(cx, |project, _| {
6571 assert_eq!(
6572 project
6573 .language_servers_running_disk_based_diagnostics()
6574 .collect::<Vec<_>>(),
6575 [1]
6576 );
6577 });
6578
6579 // All diagnostics are considered done, despite the old server's diagnostic
6580 // task never completing.
6581 fake_server.end_progress(progress_token).await;
6582 assert_eq!(
6583 events.next().await.unwrap(),
6584 Event::DiskBasedDiagnosticsFinished {
6585 language_server_id: 1
6586 }
6587 );
6588 project.read_with(cx, |project, _| {
6589 assert_eq!(
6590 project
6591 .language_servers_running_disk_based_diagnostics()
6592 .collect::<Vec<_>>(),
6593 [0; 0]
6594 );
6595 });
6596 }
6597
6598 #[gpui::test]
6599 async fn test_toggling_enable_language_server(
6600 deterministic: Arc<Deterministic>,
6601 cx: &mut gpui::TestAppContext,
6602 ) {
6603 deterministic.forbid_parking();
6604
6605 let mut rust = Language::new(
6606 LanguageConfig {
6607 name: Arc::from("Rust"),
6608 path_suffixes: vec!["rs".to_string()],
6609 ..Default::default()
6610 },
6611 None,
6612 );
6613 let mut fake_rust_servers = rust.set_fake_lsp_adapter(FakeLspAdapter {
6614 name: "rust-lsp",
6615 ..Default::default()
6616 });
6617 let mut js = Language::new(
6618 LanguageConfig {
6619 name: Arc::from("JavaScript"),
6620 path_suffixes: vec!["js".to_string()],
6621 ..Default::default()
6622 },
6623 None,
6624 );
6625 let mut fake_js_servers = js.set_fake_lsp_adapter(FakeLspAdapter {
6626 name: "js-lsp",
6627 ..Default::default()
6628 });
6629
6630 let fs = FakeFs::new(cx.background());
6631 fs.insert_tree("/dir", json!({ "a.rs": "", "b.js": "" }))
6632 .await;
6633
6634 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
6635 project.update(cx, |project, _| {
6636 project.languages.add(Arc::new(rust));
6637 project.languages.add(Arc::new(js));
6638 });
6639
6640 let _rs_buffer = project
6641 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
6642 .await
6643 .unwrap();
6644 let _js_buffer = project
6645 .update(cx, |project, cx| project.open_local_buffer("/dir/b.js", cx))
6646 .await
6647 .unwrap();
6648
6649 let mut fake_rust_server_1 = fake_rust_servers.next().await.unwrap();
6650 assert_eq!(
6651 fake_rust_server_1
6652 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6653 .await
6654 .text_document
6655 .uri
6656 .as_str(),
6657 "file:///dir/a.rs"
6658 );
6659
6660 let mut fake_js_server = fake_js_servers.next().await.unwrap();
6661 assert_eq!(
6662 fake_js_server
6663 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6664 .await
6665 .text_document
6666 .uri
6667 .as_str(),
6668 "file:///dir/b.js"
6669 );
6670
6671 // Disable Rust language server, ensuring only that server gets stopped.
6672 cx.update(|cx| {
6673 cx.update_global(|settings: &mut Settings, _| {
6674 settings.language_overrides.insert(
6675 Arc::from("Rust"),
6676 settings::LanguageSettings {
6677 enable_language_server: Some(false),
6678 ..Default::default()
6679 },
6680 );
6681 })
6682 });
6683 fake_rust_server_1
6684 .receive_notification::<lsp::notification::Exit>()
6685 .await;
6686
6687 // Enable Rust and disable JavaScript language servers, ensuring that the
6688 // former gets started again and that the latter stops.
6689 cx.update(|cx| {
6690 cx.update_global(|settings: &mut Settings, _| {
6691 settings.language_overrides.insert(
6692 Arc::from("Rust"),
6693 settings::LanguageSettings {
6694 enable_language_server: Some(true),
6695 ..Default::default()
6696 },
6697 );
6698 settings.language_overrides.insert(
6699 Arc::from("JavaScript"),
6700 settings::LanguageSettings {
6701 enable_language_server: Some(false),
6702 ..Default::default()
6703 },
6704 );
6705 })
6706 });
6707 let mut fake_rust_server_2 = fake_rust_servers.next().await.unwrap();
6708 assert_eq!(
6709 fake_rust_server_2
6710 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6711 .await
6712 .text_document
6713 .uri
6714 .as_str(),
6715 "file:///dir/a.rs"
6716 );
6717 fake_js_server
6718 .receive_notification::<lsp::notification::Exit>()
6719 .await;
6720 }
6721
6722 #[gpui::test]
6723 async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
6724 cx.foreground().forbid_parking();
6725
6726 let mut language = Language::new(
6727 LanguageConfig {
6728 name: "Rust".into(),
6729 path_suffixes: vec!["rs".to_string()],
6730 ..Default::default()
6731 },
6732 Some(tree_sitter_rust::language()),
6733 );
6734 let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
6735 disk_based_diagnostics_sources: &["disk"],
6736 ..Default::default()
6737 });
6738
6739 let text = "
6740 fn a() { A }
6741 fn b() { BB }
6742 fn c() { CCC }
6743 "
6744 .unindent();
6745
6746 let fs = FakeFs::new(cx.background());
6747 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
6748
6749 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
6750 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
6751
6752 let buffer = project
6753 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
6754 .await
6755 .unwrap();
6756
6757 let mut fake_server = fake_servers.next().await.unwrap();
6758 let open_notification = fake_server
6759 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6760 .await;
6761
6762 // Edit the buffer, moving the content down
6763 buffer.update(cx, |buffer, cx| buffer.edit([(0..0, "\n\n")], cx));
6764 let change_notification_1 = fake_server
6765 .receive_notification::<lsp::notification::DidChangeTextDocument>()
6766 .await;
6767 assert!(
6768 change_notification_1.text_document.version > open_notification.text_document.version
6769 );
6770
6771 // Report some diagnostics for the initial version of the buffer
6772 fake_server.notify::<lsp::notification::PublishDiagnostics>(
6773 lsp::PublishDiagnosticsParams {
6774 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
6775 version: Some(open_notification.text_document.version),
6776 diagnostics: vec![
6777 lsp::Diagnostic {
6778 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
6779 severity: Some(DiagnosticSeverity::ERROR),
6780 message: "undefined variable 'A'".to_string(),
6781 source: Some("disk".to_string()),
6782 ..Default::default()
6783 },
6784 lsp::Diagnostic {
6785 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
6786 severity: Some(DiagnosticSeverity::ERROR),
6787 message: "undefined variable 'BB'".to_string(),
6788 source: Some("disk".to_string()),
6789 ..Default::default()
6790 },
6791 lsp::Diagnostic {
6792 range: lsp::Range::new(lsp::Position::new(2, 9), lsp::Position::new(2, 12)),
6793 severity: Some(DiagnosticSeverity::ERROR),
6794 source: Some("disk".to_string()),
6795 message: "undefined variable 'CCC'".to_string(),
6796 ..Default::default()
6797 },
6798 ],
6799 },
6800 );
6801
6802 // The diagnostics have moved down since they were created.
6803 buffer.next_notification(cx).await;
6804 buffer.read_with(cx, |buffer, _| {
6805 assert_eq!(
6806 buffer
6807 .snapshot()
6808 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0), false)
6809 .collect::<Vec<_>>(),
6810 &[
6811 DiagnosticEntry {
6812 range: Point::new(3, 9)..Point::new(3, 11),
6813 diagnostic: Diagnostic {
6814 severity: DiagnosticSeverity::ERROR,
6815 message: "undefined variable 'BB'".to_string(),
6816 is_disk_based: true,
6817 group_id: 1,
6818 is_primary: true,
6819 ..Default::default()
6820 },
6821 },
6822 DiagnosticEntry {
6823 range: Point::new(4, 9)..Point::new(4, 12),
6824 diagnostic: Diagnostic {
6825 severity: DiagnosticSeverity::ERROR,
6826 message: "undefined variable 'CCC'".to_string(),
6827 is_disk_based: true,
6828 group_id: 2,
6829 is_primary: true,
6830 ..Default::default()
6831 }
6832 }
6833 ]
6834 );
6835 assert_eq!(
6836 chunks_with_diagnostics(buffer, 0..buffer.len()),
6837 [
6838 ("\n\nfn a() { ".to_string(), None),
6839 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
6840 (" }\nfn b() { ".to_string(), None),
6841 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
6842 (" }\nfn c() { ".to_string(), None),
6843 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
6844 (" }\n".to_string(), None),
6845 ]
6846 );
6847 assert_eq!(
6848 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
6849 [
6850 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
6851 (" }\nfn c() { ".to_string(), None),
6852 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
6853 ]
6854 );
6855 });
6856
6857 // Ensure overlapping diagnostics are highlighted correctly.
6858 fake_server.notify::<lsp::notification::PublishDiagnostics>(
6859 lsp::PublishDiagnosticsParams {
6860 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
6861 version: Some(open_notification.text_document.version),
6862 diagnostics: vec![
6863 lsp::Diagnostic {
6864 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
6865 severity: Some(DiagnosticSeverity::ERROR),
6866 message: "undefined variable 'A'".to_string(),
6867 source: Some("disk".to_string()),
6868 ..Default::default()
6869 },
6870 lsp::Diagnostic {
6871 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 12)),
6872 severity: Some(DiagnosticSeverity::WARNING),
6873 message: "unreachable statement".to_string(),
6874 source: Some("disk".to_string()),
6875 ..Default::default()
6876 },
6877 ],
6878 },
6879 );
6880
6881 buffer.next_notification(cx).await;
6882 buffer.read_with(cx, |buffer, _| {
6883 assert_eq!(
6884 buffer
6885 .snapshot()
6886 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0), false)
6887 .collect::<Vec<_>>(),
6888 &[
6889 DiagnosticEntry {
6890 range: Point::new(2, 9)..Point::new(2, 12),
6891 diagnostic: Diagnostic {
6892 severity: DiagnosticSeverity::WARNING,
6893 message: "unreachable statement".to_string(),
6894 is_disk_based: true,
6895 group_id: 4,
6896 is_primary: true,
6897 ..Default::default()
6898 }
6899 },
6900 DiagnosticEntry {
6901 range: Point::new(2, 9)..Point::new(2, 10),
6902 diagnostic: Diagnostic {
6903 severity: DiagnosticSeverity::ERROR,
6904 message: "undefined variable 'A'".to_string(),
6905 is_disk_based: true,
6906 group_id: 3,
6907 is_primary: true,
6908 ..Default::default()
6909 },
6910 }
6911 ]
6912 );
6913 assert_eq!(
6914 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
6915 [
6916 ("fn a() { ".to_string(), None),
6917 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
6918 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
6919 ("\n".to_string(), None),
6920 ]
6921 );
6922 assert_eq!(
6923 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
6924 [
6925 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
6926 ("\n".to_string(), None),
6927 ]
6928 );
6929 });
6930
6931 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
6932 // changes since the last save.
6933 buffer.update(cx, |buffer, cx| {
6934 buffer.edit([(Point::new(2, 0)..Point::new(2, 0), " ")], cx);
6935 buffer.edit([(Point::new(2, 8)..Point::new(2, 10), "(x: usize)")], cx);
6936 buffer.edit([(Point::new(3, 10)..Point::new(3, 10), "xxx")], cx);
6937 });
6938 let change_notification_2 = fake_server
6939 .receive_notification::<lsp::notification::DidChangeTextDocument>()
6940 .await;
6941 assert!(
6942 change_notification_2.text_document.version
6943 > change_notification_1.text_document.version
6944 );
6945
6946 // Handle out-of-order diagnostics
6947 fake_server.notify::<lsp::notification::PublishDiagnostics>(
6948 lsp::PublishDiagnosticsParams {
6949 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
6950 version: Some(change_notification_2.text_document.version),
6951 diagnostics: vec![
6952 lsp::Diagnostic {
6953 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
6954 severity: Some(DiagnosticSeverity::ERROR),
6955 message: "undefined variable 'BB'".to_string(),
6956 source: Some("disk".to_string()),
6957 ..Default::default()
6958 },
6959 lsp::Diagnostic {
6960 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
6961 severity: Some(DiagnosticSeverity::WARNING),
6962 message: "undefined variable 'A'".to_string(),
6963 source: Some("disk".to_string()),
6964 ..Default::default()
6965 },
6966 ],
6967 },
6968 );
6969
6970 buffer.next_notification(cx).await;
6971 buffer.read_with(cx, |buffer, _| {
6972 assert_eq!(
6973 buffer
6974 .snapshot()
6975 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
6976 .collect::<Vec<_>>(),
6977 &[
6978 DiagnosticEntry {
6979 range: Point::new(2, 21)..Point::new(2, 22),
6980 diagnostic: Diagnostic {
6981 severity: DiagnosticSeverity::WARNING,
6982 message: "undefined variable 'A'".to_string(),
6983 is_disk_based: true,
6984 group_id: 6,
6985 is_primary: true,
6986 ..Default::default()
6987 }
6988 },
6989 DiagnosticEntry {
6990 range: Point::new(3, 9)..Point::new(3, 14),
6991 diagnostic: Diagnostic {
6992 severity: DiagnosticSeverity::ERROR,
6993 message: "undefined variable 'BB'".to_string(),
6994 is_disk_based: true,
6995 group_id: 5,
6996 is_primary: true,
6997 ..Default::default()
6998 },
6999 }
7000 ]
7001 );
7002 });
7003 }
7004
7005 #[gpui::test]
7006 async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
7007 cx.foreground().forbid_parking();
7008
7009 let text = concat!(
7010 "let one = ;\n", //
7011 "let two = \n",
7012 "let three = 3;\n",
7013 );
7014
7015 let fs = FakeFs::new(cx.background());
7016 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
7017
7018 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
7019 let buffer = project
7020 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
7021 .await
7022 .unwrap();
7023
7024 project.update(cx, |project, cx| {
7025 project
7026 .update_buffer_diagnostics(
7027 &buffer,
7028 vec![
7029 DiagnosticEntry {
7030 range: PointUtf16::new(0, 10)..PointUtf16::new(0, 10),
7031 diagnostic: Diagnostic {
7032 severity: DiagnosticSeverity::ERROR,
7033 message: "syntax error 1".to_string(),
7034 ..Default::default()
7035 },
7036 },
7037 DiagnosticEntry {
7038 range: PointUtf16::new(1, 10)..PointUtf16::new(1, 10),
7039 diagnostic: Diagnostic {
7040 severity: DiagnosticSeverity::ERROR,
7041 message: "syntax error 2".to_string(),
7042 ..Default::default()
7043 },
7044 },
7045 ],
7046 None,
7047 cx,
7048 )
7049 .unwrap();
7050 });
7051
7052 // An empty range is extended forward to include the following character.
7053 // At the end of a line, an empty range is extended backward to include
7054 // the preceding character.
7055 buffer.read_with(cx, |buffer, _| {
7056 let chunks = chunks_with_diagnostics(&buffer, 0..buffer.len());
7057 assert_eq!(
7058 chunks
7059 .iter()
7060 .map(|(s, d)| (s.as_str(), *d))
7061 .collect::<Vec<_>>(),
7062 &[
7063 ("let one = ", None),
7064 (";", Some(DiagnosticSeverity::ERROR)),
7065 ("\nlet two =", None),
7066 (" ", Some(DiagnosticSeverity::ERROR)),
7067 ("\nlet three = 3;\n", None)
7068 ]
7069 );
7070 });
7071 }
7072
7073 #[gpui::test]
7074 async fn test_edits_from_lsp_with_past_version(cx: &mut gpui::TestAppContext) {
7075 cx.foreground().forbid_parking();
7076
7077 let mut language = Language::new(
7078 LanguageConfig {
7079 name: "Rust".into(),
7080 path_suffixes: vec!["rs".to_string()],
7081 ..Default::default()
7082 },
7083 Some(tree_sitter_rust::language()),
7084 );
7085 let mut fake_servers = language.set_fake_lsp_adapter(Default::default());
7086
7087 let text = "
7088 fn a() {
7089 f1();
7090 }
7091 fn b() {
7092 f2();
7093 }
7094 fn c() {
7095 f3();
7096 }
7097 "
7098 .unindent();
7099
7100 let fs = FakeFs::new(cx.background());
7101 fs.insert_tree(
7102 "/dir",
7103 json!({
7104 "a.rs": text.clone(),
7105 }),
7106 )
7107 .await;
7108
7109 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
7110 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
7111 let buffer = project
7112 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
7113 .await
7114 .unwrap();
7115
7116 let mut fake_server = fake_servers.next().await.unwrap();
7117 let lsp_document_version = fake_server
7118 .receive_notification::<lsp::notification::DidOpenTextDocument>()
7119 .await
7120 .text_document
7121 .version;
7122
7123 // Simulate editing the buffer after the language server computes some edits.
7124 buffer.update(cx, |buffer, cx| {
7125 buffer.edit(
7126 [(
7127 Point::new(0, 0)..Point::new(0, 0),
7128 "// above first function\n",
7129 )],
7130 cx,
7131 );
7132 buffer.edit(
7133 [(
7134 Point::new(2, 0)..Point::new(2, 0),
7135 " // inside first function\n",
7136 )],
7137 cx,
7138 );
7139 buffer.edit(
7140 [(
7141 Point::new(6, 4)..Point::new(6, 4),
7142 "// inside second function ",
7143 )],
7144 cx,
7145 );
7146
7147 assert_eq!(
7148 buffer.text(),
7149 "
7150 // above first function
7151 fn a() {
7152 // inside first function
7153 f1();
7154 }
7155 fn b() {
7156 // inside second function f2();
7157 }
7158 fn c() {
7159 f3();
7160 }
7161 "
7162 .unindent()
7163 );
7164 });
7165
7166 let edits = project
7167 .update(cx, |project, cx| {
7168 project.edits_from_lsp(
7169 &buffer,
7170 vec![
7171 // replace body of first function
7172 lsp::TextEdit {
7173 range: lsp::Range::new(
7174 lsp::Position::new(0, 0),
7175 lsp::Position::new(3, 0),
7176 ),
7177 new_text: "
7178 fn a() {
7179 f10();
7180 }
7181 "
7182 .unindent(),
7183 },
7184 // edit inside second function
7185 lsp::TextEdit {
7186 range: lsp::Range::new(
7187 lsp::Position::new(4, 6),
7188 lsp::Position::new(4, 6),
7189 ),
7190 new_text: "00".into(),
7191 },
7192 // edit inside third function via two distinct edits
7193 lsp::TextEdit {
7194 range: lsp::Range::new(
7195 lsp::Position::new(7, 5),
7196 lsp::Position::new(7, 5),
7197 ),
7198 new_text: "4000".into(),
7199 },
7200 lsp::TextEdit {
7201 range: lsp::Range::new(
7202 lsp::Position::new(7, 5),
7203 lsp::Position::new(7, 6),
7204 ),
7205 new_text: "".into(),
7206 },
7207 ],
7208 Some(lsp_document_version),
7209 cx,
7210 )
7211 })
7212 .await
7213 .unwrap();
7214
7215 buffer.update(cx, |buffer, cx| {
7216 for (range, new_text) in edits {
7217 buffer.edit([(range, new_text)], cx);
7218 }
7219 assert_eq!(
7220 buffer.text(),
7221 "
7222 // above first function
7223 fn a() {
7224 // inside first function
7225 f10();
7226 }
7227 fn b() {
7228 // inside second function f200();
7229 }
7230 fn c() {
7231 f4000();
7232 }
7233 "
7234 .unindent()
7235 );
7236 });
7237 }
7238
7239 #[gpui::test]
7240 async fn test_edits_from_lsp_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
7241 cx.foreground().forbid_parking();
7242
7243 let text = "
7244 use a::b;
7245 use a::c;
7246
7247 fn f() {
7248 b();
7249 c();
7250 }
7251 "
7252 .unindent();
7253
7254 let fs = FakeFs::new(cx.background());
7255 fs.insert_tree(
7256 "/dir",
7257 json!({
7258 "a.rs": text.clone(),
7259 }),
7260 )
7261 .await;
7262
7263 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
7264 let buffer = project
7265 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
7266 .await
7267 .unwrap();
7268
7269 // Simulate the language server sending us a small edit in the form of a very large diff.
7270 // Rust-analyzer does this when performing a merge-imports code action.
7271 let edits = project
7272 .update(cx, |project, cx| {
7273 project.edits_from_lsp(
7274 &buffer,
7275 [
7276 // Replace the first use statement without editing the semicolon.
7277 lsp::TextEdit {
7278 range: lsp::Range::new(
7279 lsp::Position::new(0, 4),
7280 lsp::Position::new(0, 8),
7281 ),
7282 new_text: "a::{b, c}".into(),
7283 },
7284 // Reinsert the remainder of the file between the semicolon and the final
7285 // newline of the file.
7286 lsp::TextEdit {
7287 range: lsp::Range::new(
7288 lsp::Position::new(0, 9),
7289 lsp::Position::new(0, 9),
7290 ),
7291 new_text: "\n\n".into(),
7292 },
7293 lsp::TextEdit {
7294 range: lsp::Range::new(
7295 lsp::Position::new(0, 9),
7296 lsp::Position::new(0, 9),
7297 ),
7298 new_text: "
7299 fn f() {
7300 b();
7301 c();
7302 }"
7303 .unindent(),
7304 },
7305 // Delete everything after the first newline of the file.
7306 lsp::TextEdit {
7307 range: lsp::Range::new(
7308 lsp::Position::new(1, 0),
7309 lsp::Position::new(7, 0),
7310 ),
7311 new_text: "".into(),
7312 },
7313 ],
7314 None,
7315 cx,
7316 )
7317 })
7318 .await
7319 .unwrap();
7320
7321 buffer.update(cx, |buffer, cx| {
7322 let edits = edits
7323 .into_iter()
7324 .map(|(range, text)| {
7325 (
7326 range.start.to_point(&buffer)..range.end.to_point(&buffer),
7327 text,
7328 )
7329 })
7330 .collect::<Vec<_>>();
7331
7332 assert_eq!(
7333 edits,
7334 [
7335 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
7336 (Point::new(1, 0)..Point::new(2, 0), "".into())
7337 ]
7338 );
7339
7340 for (range, new_text) in edits {
7341 buffer.edit([(range, new_text)], cx);
7342 }
7343 assert_eq!(
7344 buffer.text(),
7345 "
7346 use a::{b, c};
7347
7348 fn f() {
7349 b();
7350 c();
7351 }
7352 "
7353 .unindent()
7354 );
7355 });
7356 }
7357
7358 #[gpui::test]
7359 async fn test_invalid_edits_from_lsp(cx: &mut gpui::TestAppContext) {
7360 cx.foreground().forbid_parking();
7361
7362 let text = "
7363 use a::b;
7364 use a::c;
7365
7366 fn f() {
7367 b();
7368 c();
7369 }
7370 "
7371 .unindent();
7372
7373 let fs = FakeFs::new(cx.background());
7374 fs.insert_tree(
7375 "/dir",
7376 json!({
7377 "a.rs": text.clone(),
7378 }),
7379 )
7380 .await;
7381
7382 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
7383 let buffer = project
7384 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
7385 .await
7386 .unwrap();
7387
7388 // Simulate the language server sending us edits in a non-ordered fashion,
7389 // with ranges sometimes being inverted.
7390 let edits = project
7391 .update(cx, |project, cx| {
7392 project.edits_from_lsp(
7393 &buffer,
7394 [
7395 lsp::TextEdit {
7396 range: lsp::Range::new(
7397 lsp::Position::new(0, 9),
7398 lsp::Position::new(0, 9),
7399 ),
7400 new_text: "\n\n".into(),
7401 },
7402 lsp::TextEdit {
7403 range: lsp::Range::new(
7404 lsp::Position::new(0, 8),
7405 lsp::Position::new(0, 4),
7406 ),
7407 new_text: "a::{b, c}".into(),
7408 },
7409 lsp::TextEdit {
7410 range: lsp::Range::new(
7411 lsp::Position::new(1, 0),
7412 lsp::Position::new(7, 0),
7413 ),
7414 new_text: "".into(),
7415 },
7416 lsp::TextEdit {
7417 range: lsp::Range::new(
7418 lsp::Position::new(0, 9),
7419 lsp::Position::new(0, 9),
7420 ),
7421 new_text: "
7422 fn f() {
7423 b();
7424 c();
7425 }"
7426 .unindent(),
7427 },
7428 ],
7429 None,
7430 cx,
7431 )
7432 })
7433 .await
7434 .unwrap();
7435
7436 buffer.update(cx, |buffer, cx| {
7437 let edits = edits
7438 .into_iter()
7439 .map(|(range, text)| {
7440 (
7441 range.start.to_point(&buffer)..range.end.to_point(&buffer),
7442 text,
7443 )
7444 })
7445 .collect::<Vec<_>>();
7446
7447 assert_eq!(
7448 edits,
7449 [
7450 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
7451 (Point::new(1, 0)..Point::new(2, 0), "".into())
7452 ]
7453 );
7454
7455 for (range, new_text) in edits {
7456 buffer.edit([(range, new_text)], cx);
7457 }
7458 assert_eq!(
7459 buffer.text(),
7460 "
7461 use a::{b, c};
7462
7463 fn f() {
7464 b();
7465 c();
7466 }
7467 "
7468 .unindent()
7469 );
7470 });
7471 }
7472
7473 fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
7474 buffer: &Buffer,
7475 range: Range<T>,
7476 ) -> Vec<(String, Option<DiagnosticSeverity>)> {
7477 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
7478 for chunk in buffer.snapshot().chunks(range, true) {
7479 if chunks.last().map_or(false, |prev_chunk| {
7480 prev_chunk.1 == chunk.diagnostic_severity
7481 }) {
7482 chunks.last_mut().unwrap().0.push_str(chunk.text);
7483 } else {
7484 chunks.push((chunk.text.to_string(), chunk.diagnostic_severity));
7485 }
7486 }
7487 chunks
7488 }
7489
7490 #[gpui::test]
7491 async fn test_search_worktree_without_files(cx: &mut gpui::TestAppContext) {
7492 let dir = temp_tree(json!({
7493 "root": {
7494 "dir1": {},
7495 "dir2": {
7496 "dir3": {}
7497 }
7498 }
7499 }));
7500
7501 let project = Project::test(Arc::new(RealFs), [dir.path()], cx).await;
7502 let cancel_flag = Default::default();
7503 let results = project
7504 .read_with(cx, |project, cx| {
7505 project.match_paths("dir", false, false, 10, &cancel_flag, cx)
7506 })
7507 .await;
7508
7509 assert!(results.is_empty());
7510 }
7511
7512 #[gpui::test(iterations = 10)]
7513 async fn test_definition(cx: &mut gpui::TestAppContext) {
7514 let mut language = Language::new(
7515 LanguageConfig {
7516 name: "Rust".into(),
7517 path_suffixes: vec!["rs".to_string()],
7518 ..Default::default()
7519 },
7520 Some(tree_sitter_rust::language()),
7521 );
7522 let mut fake_servers = language.set_fake_lsp_adapter(Default::default());
7523
7524 let fs = FakeFs::new(cx.background());
7525 fs.insert_tree(
7526 "/dir",
7527 json!({
7528 "a.rs": "const fn a() { A }",
7529 "b.rs": "const y: i32 = crate::a()",
7530 }),
7531 )
7532 .await;
7533
7534 let project = Project::test(fs, ["/dir/b.rs".as_ref()], cx).await;
7535 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
7536
7537 let buffer = project
7538 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
7539 .await
7540 .unwrap();
7541
7542 let fake_server = fake_servers.next().await.unwrap();
7543 fake_server.handle_request::<lsp::request::GotoDefinition, _, _>(|params, _| async move {
7544 let params = params.text_document_position_params;
7545 assert_eq!(
7546 params.text_document.uri.to_file_path().unwrap(),
7547 Path::new("/dir/b.rs"),
7548 );
7549 assert_eq!(params.position, lsp::Position::new(0, 22));
7550
7551 Ok(Some(lsp::GotoDefinitionResponse::Scalar(
7552 lsp::Location::new(
7553 lsp::Url::from_file_path("/dir/a.rs").unwrap(),
7554 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
7555 ),
7556 )))
7557 });
7558
7559 let mut definitions = project
7560 .update(cx, |project, cx| project.definition(&buffer, 22, cx))
7561 .await
7562 .unwrap();
7563
7564 assert_eq!(definitions.len(), 1);
7565 let definition = definitions.pop().unwrap();
7566 cx.update(|cx| {
7567 let target_buffer = definition.buffer.read(cx);
7568 assert_eq!(
7569 target_buffer
7570 .file()
7571 .unwrap()
7572 .as_local()
7573 .unwrap()
7574 .abs_path(cx),
7575 Path::new("/dir/a.rs"),
7576 );
7577 assert_eq!(definition.range.to_offset(target_buffer), 9..10);
7578 assert_eq!(
7579 list_worktrees(&project, cx),
7580 [("/dir/b.rs".as_ref(), true), ("/dir/a.rs".as_ref(), false)]
7581 );
7582
7583 drop(definition);
7584 });
7585 cx.read(|cx| {
7586 assert_eq!(list_worktrees(&project, cx), [("/dir/b.rs".as_ref(), true)]);
7587 });
7588
7589 fn list_worktrees<'a>(
7590 project: &'a ModelHandle<Project>,
7591 cx: &'a AppContext,
7592 ) -> Vec<(&'a Path, bool)> {
7593 project
7594 .read(cx)
7595 .worktrees(cx)
7596 .map(|worktree| {
7597 let worktree = worktree.read(cx);
7598 (
7599 worktree.as_local().unwrap().abs_path().as_ref(),
7600 worktree.is_visible(),
7601 )
7602 })
7603 .collect::<Vec<_>>()
7604 }
7605 }
7606
7607 #[gpui::test]
7608 async fn test_completions_without_edit_ranges(cx: &mut gpui::TestAppContext) {
7609 let mut language = Language::new(
7610 LanguageConfig {
7611 name: "TypeScript".into(),
7612 path_suffixes: vec!["ts".to_string()],
7613 ..Default::default()
7614 },
7615 Some(tree_sitter_typescript::language_typescript()),
7616 );
7617 let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default());
7618
7619 let fs = FakeFs::new(cx.background());
7620 fs.insert_tree(
7621 "/dir",
7622 json!({
7623 "a.ts": "",
7624 }),
7625 )
7626 .await;
7627
7628 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
7629 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
7630 let buffer = project
7631 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
7632 .await
7633 .unwrap();
7634
7635 let fake_server = fake_language_servers.next().await.unwrap();
7636
7637 let text = "let a = b.fqn";
7638 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
7639 let completions = project.update(cx, |project, cx| {
7640 project.completions(&buffer, text.len(), cx)
7641 });
7642
7643 fake_server
7644 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
7645 Ok(Some(lsp::CompletionResponse::Array(vec![
7646 lsp::CompletionItem {
7647 label: "fullyQualifiedName?".into(),
7648 insert_text: Some("fullyQualifiedName".into()),
7649 ..Default::default()
7650 },
7651 ])))
7652 })
7653 .next()
7654 .await;
7655 let completions = completions.await.unwrap();
7656 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
7657 assert_eq!(completions.len(), 1);
7658 assert_eq!(completions[0].new_text, "fullyQualifiedName");
7659 assert_eq!(
7660 completions[0].old_range.to_offset(&snapshot),
7661 text.len() - 3..text.len()
7662 );
7663
7664 let text = "let a = \"atoms/cmp\"";
7665 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
7666 let completions = project.update(cx, |project, cx| {
7667 project.completions(&buffer, text.len() - 1, cx)
7668 });
7669
7670 fake_server
7671 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
7672 Ok(Some(lsp::CompletionResponse::Array(vec![
7673 lsp::CompletionItem {
7674 label: "component".into(),
7675 ..Default::default()
7676 },
7677 ])))
7678 })
7679 .next()
7680 .await;
7681 let completions = completions.await.unwrap();
7682 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
7683 assert_eq!(completions.len(), 1);
7684 assert_eq!(completions[0].new_text, "component");
7685 assert_eq!(
7686 completions[0].old_range.to_offset(&snapshot),
7687 text.len() - 4..text.len() - 1
7688 );
7689 }
7690
7691 #[gpui::test(iterations = 10)]
7692 async fn test_apply_code_actions_with_commands(cx: &mut gpui::TestAppContext) {
7693 let mut language = Language::new(
7694 LanguageConfig {
7695 name: "TypeScript".into(),
7696 path_suffixes: vec!["ts".to_string()],
7697 ..Default::default()
7698 },
7699 None,
7700 );
7701 let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default());
7702
7703 let fs = FakeFs::new(cx.background());
7704 fs.insert_tree(
7705 "/dir",
7706 json!({
7707 "a.ts": "a",
7708 }),
7709 )
7710 .await;
7711
7712 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
7713 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
7714 let buffer = project
7715 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
7716 .await
7717 .unwrap();
7718
7719 let fake_server = fake_language_servers.next().await.unwrap();
7720
7721 // Language server returns code actions that contain commands, and not edits.
7722 let actions = project.update(cx, |project, cx| project.code_actions(&buffer, 0..0, cx));
7723 fake_server
7724 .handle_request::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
7725 Ok(Some(vec![
7726 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
7727 title: "The code action".into(),
7728 command: Some(lsp::Command {
7729 title: "The command".into(),
7730 command: "_the/command".into(),
7731 arguments: Some(vec![json!("the-argument")]),
7732 }),
7733 ..Default::default()
7734 }),
7735 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
7736 title: "two".into(),
7737 ..Default::default()
7738 }),
7739 ]))
7740 })
7741 .next()
7742 .await;
7743
7744 let action = actions.await.unwrap()[0].clone();
7745 let apply = project.update(cx, |project, cx| {
7746 project.apply_code_action(buffer.clone(), action, true, cx)
7747 });
7748
7749 // Resolving the code action does not populate its edits. In absence of
7750 // edits, we must execute the given command.
7751 fake_server.handle_request::<lsp::request::CodeActionResolveRequest, _, _>(
7752 |action, _| async move { Ok(action) },
7753 );
7754
7755 // While executing the command, the language server sends the editor
7756 // a `workspaceEdit` request.
7757 fake_server
7758 .handle_request::<lsp::request::ExecuteCommand, _, _>({
7759 let fake = fake_server.clone();
7760 move |params, _| {
7761 assert_eq!(params.command, "_the/command");
7762 let fake = fake.clone();
7763 async move {
7764 fake.server
7765 .request::<lsp::request::ApplyWorkspaceEdit>(
7766 lsp::ApplyWorkspaceEditParams {
7767 label: None,
7768 edit: lsp::WorkspaceEdit {
7769 changes: Some(
7770 [(
7771 lsp::Url::from_file_path("/dir/a.ts").unwrap(),
7772 vec![lsp::TextEdit {
7773 range: lsp::Range::new(
7774 lsp::Position::new(0, 0),
7775 lsp::Position::new(0, 0),
7776 ),
7777 new_text: "X".into(),
7778 }],
7779 )]
7780 .into_iter()
7781 .collect(),
7782 ),
7783 ..Default::default()
7784 },
7785 },
7786 )
7787 .await
7788 .unwrap();
7789 Ok(Some(json!(null)))
7790 }
7791 }
7792 })
7793 .next()
7794 .await;
7795
7796 // Applying the code action returns a project transaction containing the edits
7797 // sent by the language server in its `workspaceEdit` request.
7798 let transaction = apply.await.unwrap();
7799 assert!(transaction.0.contains_key(&buffer));
7800 buffer.update(cx, |buffer, cx| {
7801 assert_eq!(buffer.text(), "Xa");
7802 buffer.undo(cx);
7803 assert_eq!(buffer.text(), "a");
7804 });
7805 }
7806
7807 #[gpui::test]
7808 async fn test_save_file(cx: &mut gpui::TestAppContext) {
7809 let fs = FakeFs::new(cx.background());
7810 fs.insert_tree(
7811 "/dir",
7812 json!({
7813 "file1": "the old contents",
7814 }),
7815 )
7816 .await;
7817
7818 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7819 let buffer = project
7820 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
7821 .await
7822 .unwrap();
7823 buffer
7824 .update(cx, |buffer, cx| {
7825 assert_eq!(buffer.text(), "the old contents");
7826 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], cx);
7827 buffer.save(cx)
7828 })
7829 .await
7830 .unwrap();
7831
7832 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
7833 assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
7834 }
7835
7836 #[gpui::test]
7837 async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
7838 let fs = FakeFs::new(cx.background());
7839 fs.insert_tree(
7840 "/dir",
7841 json!({
7842 "file1": "the old contents",
7843 }),
7844 )
7845 .await;
7846
7847 let project = Project::test(fs.clone(), ["/dir/file1".as_ref()], cx).await;
7848 let buffer = project
7849 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
7850 .await
7851 .unwrap();
7852 buffer
7853 .update(cx, |buffer, cx| {
7854 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], cx);
7855 buffer.save(cx)
7856 })
7857 .await
7858 .unwrap();
7859
7860 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
7861 assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
7862 }
7863
7864 #[gpui::test]
7865 async fn test_save_as(cx: &mut gpui::TestAppContext) {
7866 let fs = FakeFs::new(cx.background());
7867 fs.insert_tree("/dir", json!({})).await;
7868
7869 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7870 let buffer = project.update(cx, |project, cx| {
7871 project.create_buffer("", None, cx).unwrap()
7872 });
7873 buffer.update(cx, |buffer, cx| {
7874 buffer.edit([(0..0, "abc")], cx);
7875 assert!(buffer.is_dirty());
7876 assert!(!buffer.has_conflict());
7877 });
7878 project
7879 .update(cx, |project, cx| {
7880 project.save_buffer_as(buffer.clone(), "/dir/file1".into(), cx)
7881 })
7882 .await
7883 .unwrap();
7884 assert_eq!(fs.load(Path::new("/dir/file1")).await.unwrap(), "abc");
7885 buffer.read_with(cx, |buffer, cx| {
7886 assert_eq!(buffer.file().unwrap().full_path(cx), Path::new("dir/file1"));
7887 assert!(!buffer.is_dirty());
7888 assert!(!buffer.has_conflict());
7889 });
7890
7891 let opened_buffer = project
7892 .update(cx, |project, cx| {
7893 project.open_local_buffer("/dir/file1", cx)
7894 })
7895 .await
7896 .unwrap();
7897 assert_eq!(opened_buffer, buffer);
7898 }
7899
7900 #[gpui::test(retries = 5)]
7901 async fn test_rescan_and_remote_updates(cx: &mut gpui::TestAppContext) {
7902 let dir = temp_tree(json!({
7903 "a": {
7904 "file1": "",
7905 "file2": "",
7906 "file3": "",
7907 },
7908 "b": {
7909 "c": {
7910 "file4": "",
7911 "file5": "",
7912 }
7913 }
7914 }));
7915
7916 let project = Project::test(Arc::new(RealFs), [dir.path()], cx).await;
7917 let rpc = project.read_with(cx, |p, _| p.client.clone());
7918
7919 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
7920 let buffer = project.update(cx, |p, cx| p.open_local_buffer(dir.path().join(path), cx));
7921 async move { buffer.await.unwrap() }
7922 };
7923 let id_for_path = |path: &'static str, cx: &gpui::TestAppContext| {
7924 project.read_with(cx, |project, cx| {
7925 let tree = project.worktrees(cx).next().unwrap();
7926 tree.read(cx)
7927 .entry_for_path(path)
7928 .expect(&format!("no entry for path {}", path))
7929 .id
7930 })
7931 };
7932
7933 let buffer2 = buffer_for_path("a/file2", cx).await;
7934 let buffer3 = buffer_for_path("a/file3", cx).await;
7935 let buffer4 = buffer_for_path("b/c/file4", cx).await;
7936 let buffer5 = buffer_for_path("b/c/file5", cx).await;
7937
7938 let file2_id = id_for_path("a/file2", &cx);
7939 let file3_id = id_for_path("a/file3", &cx);
7940 let file4_id = id_for_path("b/c/file4", &cx);
7941
7942 // Create a remote copy of this worktree.
7943 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
7944 let initial_snapshot = tree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot());
7945 let (remote, load_task) = cx.update(|cx| {
7946 Worktree::remote(
7947 1,
7948 1,
7949 initial_snapshot.to_proto(&Default::default(), true),
7950 rpc.clone(),
7951 cx,
7952 )
7953 });
7954 // tree
7955 load_task.await;
7956
7957 cx.read(|cx| {
7958 assert!(!buffer2.read(cx).is_dirty());
7959 assert!(!buffer3.read(cx).is_dirty());
7960 assert!(!buffer4.read(cx).is_dirty());
7961 assert!(!buffer5.read(cx).is_dirty());
7962 });
7963
7964 // Rename and delete files and directories.
7965 tree.flush_fs_events(&cx).await;
7966 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
7967 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
7968 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
7969 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
7970 tree.flush_fs_events(&cx).await;
7971
7972 let expected_paths = vec![
7973 "a",
7974 "a/file1",
7975 "a/file2.new",
7976 "b",
7977 "d",
7978 "d/file3",
7979 "d/file4",
7980 ];
7981
7982 cx.read(|app| {
7983 assert_eq!(
7984 tree.read(app)
7985 .paths()
7986 .map(|p| p.to_str().unwrap())
7987 .collect::<Vec<_>>(),
7988 expected_paths
7989 );
7990
7991 assert_eq!(id_for_path("a/file2.new", &cx), file2_id);
7992 assert_eq!(id_for_path("d/file3", &cx), file3_id);
7993 assert_eq!(id_for_path("d/file4", &cx), file4_id);
7994
7995 assert_eq!(
7996 buffer2.read(app).file().unwrap().path().as_ref(),
7997 Path::new("a/file2.new")
7998 );
7999 assert_eq!(
8000 buffer3.read(app).file().unwrap().path().as_ref(),
8001 Path::new("d/file3")
8002 );
8003 assert_eq!(
8004 buffer4.read(app).file().unwrap().path().as_ref(),
8005 Path::new("d/file4")
8006 );
8007 assert_eq!(
8008 buffer5.read(app).file().unwrap().path().as_ref(),
8009 Path::new("b/c/file5")
8010 );
8011
8012 assert!(!buffer2.read(app).file().unwrap().is_deleted());
8013 assert!(!buffer3.read(app).file().unwrap().is_deleted());
8014 assert!(!buffer4.read(app).file().unwrap().is_deleted());
8015 assert!(buffer5.read(app).file().unwrap().is_deleted());
8016 });
8017
8018 // Update the remote worktree. Check that it becomes consistent with the
8019 // local worktree.
8020 remote.update(cx, |remote, cx| {
8021 let update_message = tree.read(cx).as_local().unwrap().snapshot().build_update(
8022 &initial_snapshot,
8023 1,
8024 1,
8025 true,
8026 );
8027 remote
8028 .as_remote_mut()
8029 .unwrap()
8030 .snapshot
8031 .apply_remote_update(update_message)
8032 .unwrap();
8033
8034 assert_eq!(
8035 remote
8036 .paths()
8037 .map(|p| p.to_str().unwrap())
8038 .collect::<Vec<_>>(),
8039 expected_paths
8040 );
8041 });
8042 }
8043
8044 #[gpui::test]
8045 async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
8046 let fs = FakeFs::new(cx.background());
8047 fs.insert_tree(
8048 "/dir",
8049 json!({
8050 "a.txt": "a-contents",
8051 "b.txt": "b-contents",
8052 }),
8053 )
8054 .await;
8055
8056 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
8057
8058 // Spawn multiple tasks to open paths, repeating some paths.
8059 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
8060 (
8061 p.open_local_buffer("/dir/a.txt", cx),
8062 p.open_local_buffer("/dir/b.txt", cx),
8063 p.open_local_buffer("/dir/a.txt", cx),
8064 )
8065 });
8066
8067 let buffer_a_1 = buffer_a_1.await.unwrap();
8068 let buffer_a_2 = buffer_a_2.await.unwrap();
8069 let buffer_b = buffer_b.await.unwrap();
8070 assert_eq!(buffer_a_1.read_with(cx, |b, _| b.text()), "a-contents");
8071 assert_eq!(buffer_b.read_with(cx, |b, _| b.text()), "b-contents");
8072
8073 // There is only one buffer per path.
8074 let buffer_a_id = buffer_a_1.id();
8075 assert_eq!(buffer_a_2.id(), buffer_a_id);
8076
8077 // Open the same path again while it is still open.
8078 drop(buffer_a_1);
8079 let buffer_a_3 = project
8080 .update(cx, |p, cx| p.open_local_buffer("/dir/a.txt", cx))
8081 .await
8082 .unwrap();
8083
8084 // There's still only one buffer per path.
8085 assert_eq!(buffer_a_3.id(), buffer_a_id);
8086 }
8087
8088 #[gpui::test]
8089 async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
8090 let fs = FakeFs::new(cx.background());
8091 fs.insert_tree(
8092 "/dir",
8093 json!({
8094 "file1": "abc",
8095 "file2": "def",
8096 "file3": "ghi",
8097 }),
8098 )
8099 .await;
8100
8101 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
8102
8103 let buffer1 = project
8104 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
8105 .await
8106 .unwrap();
8107 let events = Rc::new(RefCell::new(Vec::new()));
8108
8109 // initially, the buffer isn't dirty.
8110 buffer1.update(cx, |buffer, cx| {
8111 cx.subscribe(&buffer1, {
8112 let events = events.clone();
8113 move |_, _, event, _| match event {
8114 BufferEvent::Operation(_) => {}
8115 _ => events.borrow_mut().push(event.clone()),
8116 }
8117 })
8118 .detach();
8119
8120 assert!(!buffer.is_dirty());
8121 assert!(events.borrow().is_empty());
8122
8123 buffer.edit([(1..2, "")], cx);
8124 });
8125
8126 // after the first edit, the buffer is dirty, and emits a dirtied event.
8127 buffer1.update(cx, |buffer, cx| {
8128 assert!(buffer.text() == "ac");
8129 assert!(buffer.is_dirty());
8130 assert_eq!(
8131 *events.borrow(),
8132 &[language::Event::Edited, language::Event::DirtyChanged]
8133 );
8134 events.borrow_mut().clear();
8135 buffer.did_save(
8136 buffer.version(),
8137 buffer.as_rope().fingerprint(),
8138 buffer.file().unwrap().mtime(),
8139 None,
8140 cx,
8141 );
8142 });
8143
8144 // after saving, the buffer is not dirty, and emits a saved event.
8145 buffer1.update(cx, |buffer, cx| {
8146 assert!(!buffer.is_dirty());
8147 assert_eq!(*events.borrow(), &[language::Event::Saved]);
8148 events.borrow_mut().clear();
8149
8150 buffer.edit([(1..1, "B")], cx);
8151 buffer.edit([(2..2, "D")], cx);
8152 });
8153
8154 // after editing again, the buffer is dirty, and emits another dirty event.
8155 buffer1.update(cx, |buffer, cx| {
8156 assert!(buffer.text() == "aBDc");
8157 assert!(buffer.is_dirty());
8158 assert_eq!(
8159 *events.borrow(),
8160 &[
8161 language::Event::Edited,
8162 language::Event::DirtyChanged,
8163 language::Event::Edited,
8164 ],
8165 );
8166 events.borrow_mut().clear();
8167
8168 // After restoring the buffer to its previously-saved state,
8169 // the buffer is not considered dirty anymore.
8170 buffer.edit([(1..3, "")], cx);
8171 assert!(buffer.text() == "ac");
8172 assert!(!buffer.is_dirty());
8173 });
8174
8175 assert_eq!(
8176 *events.borrow(),
8177 &[language::Event::Edited, language::Event::DirtyChanged]
8178 );
8179
8180 // When a file is deleted, the buffer is considered dirty.
8181 let events = Rc::new(RefCell::new(Vec::new()));
8182 let buffer2 = project
8183 .update(cx, |p, cx| p.open_local_buffer("/dir/file2", cx))
8184 .await
8185 .unwrap();
8186 buffer2.update(cx, |_, cx| {
8187 cx.subscribe(&buffer2, {
8188 let events = events.clone();
8189 move |_, _, event, _| events.borrow_mut().push(event.clone())
8190 })
8191 .detach();
8192 });
8193
8194 fs.remove_file("/dir/file2".as_ref(), Default::default())
8195 .await
8196 .unwrap();
8197 buffer2.condition(&cx, |b, _| b.is_dirty()).await;
8198 assert_eq!(
8199 *events.borrow(),
8200 &[
8201 language::Event::DirtyChanged,
8202 language::Event::FileHandleChanged
8203 ]
8204 );
8205
8206 // When a file is already dirty when deleted, we don't emit a Dirtied event.
8207 let events = Rc::new(RefCell::new(Vec::new()));
8208 let buffer3 = project
8209 .update(cx, |p, cx| p.open_local_buffer("/dir/file3", cx))
8210 .await
8211 .unwrap();
8212 buffer3.update(cx, |_, cx| {
8213 cx.subscribe(&buffer3, {
8214 let events = events.clone();
8215 move |_, _, event, _| events.borrow_mut().push(event.clone())
8216 })
8217 .detach();
8218 });
8219
8220 buffer3.update(cx, |buffer, cx| {
8221 buffer.edit([(0..0, "x")], cx);
8222 });
8223 events.borrow_mut().clear();
8224 fs.remove_file("/dir/file3".as_ref(), Default::default())
8225 .await
8226 .unwrap();
8227 buffer3
8228 .condition(&cx, |_, _| !events.borrow().is_empty())
8229 .await;
8230 assert_eq!(*events.borrow(), &[language::Event::FileHandleChanged]);
8231 cx.read(|cx| assert!(buffer3.read(cx).is_dirty()));
8232 }
8233
8234 #[gpui::test]
8235 async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
8236 let initial_contents = "aaa\nbbbbb\nc\n";
8237 let fs = FakeFs::new(cx.background());
8238 fs.insert_tree(
8239 "/dir",
8240 json!({
8241 "the-file": initial_contents,
8242 }),
8243 )
8244 .await;
8245 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
8246 let buffer = project
8247 .update(cx, |p, cx| p.open_local_buffer("/dir/the-file", cx))
8248 .await
8249 .unwrap();
8250
8251 let anchors = (0..3)
8252 .map(|row| buffer.read_with(cx, |b, _| b.anchor_before(Point::new(row, 1))))
8253 .collect::<Vec<_>>();
8254
8255 // Change the file on disk, adding two new lines of text, and removing
8256 // one line.
8257 buffer.read_with(cx, |buffer, _| {
8258 assert!(!buffer.is_dirty());
8259 assert!(!buffer.has_conflict());
8260 });
8261 let new_contents = "AAAA\naaa\nBB\nbbbbb\n";
8262 fs.save("/dir/the-file".as_ref(), &new_contents.into())
8263 .await
8264 .unwrap();
8265
8266 // Because the buffer was not modified, it is reloaded from disk. Its
8267 // contents are edited according to the diff between the old and new
8268 // file contents.
8269 buffer
8270 .condition(&cx, |buffer, _| buffer.text() == new_contents)
8271 .await;
8272
8273 buffer.update(cx, |buffer, _| {
8274 assert_eq!(buffer.text(), new_contents);
8275 assert!(!buffer.is_dirty());
8276 assert!(!buffer.has_conflict());
8277
8278 let anchor_positions = anchors
8279 .iter()
8280 .map(|anchor| anchor.to_point(&*buffer))
8281 .collect::<Vec<_>>();
8282 assert_eq!(
8283 anchor_positions,
8284 [Point::new(1, 1), Point::new(3, 1), Point::new(4, 0)]
8285 );
8286 });
8287
8288 // Modify the buffer
8289 buffer.update(cx, |buffer, cx| {
8290 buffer.edit([(0..0, " ")], cx);
8291 assert!(buffer.is_dirty());
8292 assert!(!buffer.has_conflict());
8293 });
8294
8295 // Change the file on disk again, adding blank lines to the beginning.
8296 fs.save(
8297 "/dir/the-file".as_ref(),
8298 &"\n\n\nAAAA\naaa\nBB\nbbbbb\n".into(),
8299 )
8300 .await
8301 .unwrap();
8302
8303 // Because the buffer is modified, it doesn't reload from disk, but is
8304 // marked as having a conflict.
8305 buffer
8306 .condition(&cx, |buffer, _| buffer.has_conflict())
8307 .await;
8308 }
8309
8310 #[gpui::test]
8311 async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
8312 cx.foreground().forbid_parking();
8313
8314 let fs = FakeFs::new(cx.background());
8315 fs.insert_tree(
8316 "/the-dir",
8317 json!({
8318 "a.rs": "
8319 fn foo(mut v: Vec<usize>) {
8320 for x in &v {
8321 v.push(1);
8322 }
8323 }
8324 "
8325 .unindent(),
8326 }),
8327 )
8328 .await;
8329
8330 let project = Project::test(fs.clone(), ["/the-dir".as_ref()], cx).await;
8331 let buffer = project
8332 .update(cx, |p, cx| p.open_local_buffer("/the-dir/a.rs", cx))
8333 .await
8334 .unwrap();
8335
8336 let buffer_uri = Url::from_file_path("/the-dir/a.rs").unwrap();
8337 let message = lsp::PublishDiagnosticsParams {
8338 uri: buffer_uri.clone(),
8339 diagnostics: vec![
8340 lsp::Diagnostic {
8341 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
8342 severity: Some(DiagnosticSeverity::WARNING),
8343 message: "error 1".to_string(),
8344 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
8345 location: lsp::Location {
8346 uri: buffer_uri.clone(),
8347 range: lsp::Range::new(
8348 lsp::Position::new(1, 8),
8349 lsp::Position::new(1, 9),
8350 ),
8351 },
8352 message: "error 1 hint 1".to_string(),
8353 }]),
8354 ..Default::default()
8355 },
8356 lsp::Diagnostic {
8357 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
8358 severity: Some(DiagnosticSeverity::HINT),
8359 message: "error 1 hint 1".to_string(),
8360 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
8361 location: lsp::Location {
8362 uri: buffer_uri.clone(),
8363 range: lsp::Range::new(
8364 lsp::Position::new(1, 8),
8365 lsp::Position::new(1, 9),
8366 ),
8367 },
8368 message: "original diagnostic".to_string(),
8369 }]),
8370 ..Default::default()
8371 },
8372 lsp::Diagnostic {
8373 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
8374 severity: Some(DiagnosticSeverity::ERROR),
8375 message: "error 2".to_string(),
8376 related_information: Some(vec![
8377 lsp::DiagnosticRelatedInformation {
8378 location: lsp::Location {
8379 uri: buffer_uri.clone(),
8380 range: lsp::Range::new(
8381 lsp::Position::new(1, 13),
8382 lsp::Position::new(1, 15),
8383 ),
8384 },
8385 message: "error 2 hint 1".to_string(),
8386 },
8387 lsp::DiagnosticRelatedInformation {
8388 location: lsp::Location {
8389 uri: buffer_uri.clone(),
8390 range: lsp::Range::new(
8391 lsp::Position::new(1, 13),
8392 lsp::Position::new(1, 15),
8393 ),
8394 },
8395 message: "error 2 hint 2".to_string(),
8396 },
8397 ]),
8398 ..Default::default()
8399 },
8400 lsp::Diagnostic {
8401 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
8402 severity: Some(DiagnosticSeverity::HINT),
8403 message: "error 2 hint 1".to_string(),
8404 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
8405 location: lsp::Location {
8406 uri: buffer_uri.clone(),
8407 range: lsp::Range::new(
8408 lsp::Position::new(2, 8),
8409 lsp::Position::new(2, 17),
8410 ),
8411 },
8412 message: "original diagnostic".to_string(),
8413 }]),
8414 ..Default::default()
8415 },
8416 lsp::Diagnostic {
8417 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
8418 severity: Some(DiagnosticSeverity::HINT),
8419 message: "error 2 hint 2".to_string(),
8420 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
8421 location: lsp::Location {
8422 uri: buffer_uri.clone(),
8423 range: lsp::Range::new(
8424 lsp::Position::new(2, 8),
8425 lsp::Position::new(2, 17),
8426 ),
8427 },
8428 message: "original diagnostic".to_string(),
8429 }]),
8430 ..Default::default()
8431 },
8432 ],
8433 version: None,
8434 };
8435
8436 project
8437 .update(cx, |p, cx| p.update_diagnostics(0, message, &[], cx))
8438 .unwrap();
8439 let buffer = buffer.read_with(cx, |buffer, _| buffer.snapshot());
8440
8441 assert_eq!(
8442 buffer
8443 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
8444 .collect::<Vec<_>>(),
8445 &[
8446 DiagnosticEntry {
8447 range: Point::new(1, 8)..Point::new(1, 9),
8448 diagnostic: Diagnostic {
8449 severity: DiagnosticSeverity::WARNING,
8450 message: "error 1".to_string(),
8451 group_id: 0,
8452 is_primary: true,
8453 ..Default::default()
8454 }
8455 },
8456 DiagnosticEntry {
8457 range: Point::new(1, 8)..Point::new(1, 9),
8458 diagnostic: Diagnostic {
8459 severity: DiagnosticSeverity::HINT,
8460 message: "error 1 hint 1".to_string(),
8461 group_id: 0,
8462 is_primary: false,
8463 ..Default::default()
8464 }
8465 },
8466 DiagnosticEntry {
8467 range: Point::new(1, 13)..Point::new(1, 15),
8468 diagnostic: Diagnostic {
8469 severity: DiagnosticSeverity::HINT,
8470 message: "error 2 hint 1".to_string(),
8471 group_id: 1,
8472 is_primary: false,
8473 ..Default::default()
8474 }
8475 },
8476 DiagnosticEntry {
8477 range: Point::new(1, 13)..Point::new(1, 15),
8478 diagnostic: Diagnostic {
8479 severity: DiagnosticSeverity::HINT,
8480 message: "error 2 hint 2".to_string(),
8481 group_id: 1,
8482 is_primary: false,
8483 ..Default::default()
8484 }
8485 },
8486 DiagnosticEntry {
8487 range: Point::new(2, 8)..Point::new(2, 17),
8488 diagnostic: Diagnostic {
8489 severity: DiagnosticSeverity::ERROR,
8490 message: "error 2".to_string(),
8491 group_id: 1,
8492 is_primary: true,
8493 ..Default::default()
8494 }
8495 }
8496 ]
8497 );
8498
8499 assert_eq!(
8500 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
8501 &[
8502 DiagnosticEntry {
8503 range: Point::new(1, 8)..Point::new(1, 9),
8504 diagnostic: Diagnostic {
8505 severity: DiagnosticSeverity::WARNING,
8506 message: "error 1".to_string(),
8507 group_id: 0,
8508 is_primary: true,
8509 ..Default::default()
8510 }
8511 },
8512 DiagnosticEntry {
8513 range: Point::new(1, 8)..Point::new(1, 9),
8514 diagnostic: Diagnostic {
8515 severity: DiagnosticSeverity::HINT,
8516 message: "error 1 hint 1".to_string(),
8517 group_id: 0,
8518 is_primary: false,
8519 ..Default::default()
8520 }
8521 },
8522 ]
8523 );
8524 assert_eq!(
8525 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
8526 &[
8527 DiagnosticEntry {
8528 range: Point::new(1, 13)..Point::new(1, 15),
8529 diagnostic: Diagnostic {
8530 severity: DiagnosticSeverity::HINT,
8531 message: "error 2 hint 1".to_string(),
8532 group_id: 1,
8533 is_primary: false,
8534 ..Default::default()
8535 }
8536 },
8537 DiagnosticEntry {
8538 range: Point::new(1, 13)..Point::new(1, 15),
8539 diagnostic: Diagnostic {
8540 severity: DiagnosticSeverity::HINT,
8541 message: "error 2 hint 2".to_string(),
8542 group_id: 1,
8543 is_primary: false,
8544 ..Default::default()
8545 }
8546 },
8547 DiagnosticEntry {
8548 range: Point::new(2, 8)..Point::new(2, 17),
8549 diagnostic: Diagnostic {
8550 severity: DiagnosticSeverity::ERROR,
8551 message: "error 2".to_string(),
8552 group_id: 1,
8553 is_primary: true,
8554 ..Default::default()
8555 }
8556 }
8557 ]
8558 );
8559 }
8560
8561 #[gpui::test]
8562 async fn test_rename(cx: &mut gpui::TestAppContext) {
8563 cx.foreground().forbid_parking();
8564
8565 let mut language = Language::new(
8566 LanguageConfig {
8567 name: "Rust".into(),
8568 path_suffixes: vec!["rs".to_string()],
8569 ..Default::default()
8570 },
8571 Some(tree_sitter_rust::language()),
8572 );
8573 let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
8574 capabilities: lsp::ServerCapabilities {
8575 rename_provider: Some(lsp::OneOf::Right(lsp::RenameOptions {
8576 prepare_provider: Some(true),
8577 work_done_progress_options: Default::default(),
8578 })),
8579 ..Default::default()
8580 },
8581 ..Default::default()
8582 });
8583
8584 let fs = FakeFs::new(cx.background());
8585 fs.insert_tree(
8586 "/dir",
8587 json!({
8588 "one.rs": "const ONE: usize = 1;",
8589 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
8590 }),
8591 )
8592 .await;
8593
8594 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
8595 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
8596 let buffer = project
8597 .update(cx, |project, cx| {
8598 project.open_local_buffer("/dir/one.rs", cx)
8599 })
8600 .await
8601 .unwrap();
8602
8603 let fake_server = fake_servers.next().await.unwrap();
8604
8605 let response = project.update(cx, |project, cx| {
8606 project.prepare_rename(buffer.clone(), 7, cx)
8607 });
8608 fake_server
8609 .handle_request::<lsp::request::PrepareRenameRequest, _, _>(|params, _| async move {
8610 assert_eq!(params.text_document.uri.as_str(), "file:///dir/one.rs");
8611 assert_eq!(params.position, lsp::Position::new(0, 7));
8612 Ok(Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
8613 lsp::Position::new(0, 6),
8614 lsp::Position::new(0, 9),
8615 ))))
8616 })
8617 .next()
8618 .await
8619 .unwrap();
8620 let range = response.await.unwrap().unwrap();
8621 let range = buffer.read_with(cx, |buffer, _| range.to_offset(buffer));
8622 assert_eq!(range, 6..9);
8623
8624 let response = project.update(cx, |project, cx| {
8625 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), true, cx)
8626 });
8627 fake_server
8628 .handle_request::<lsp::request::Rename, _, _>(|params, _| async move {
8629 assert_eq!(
8630 params.text_document_position.text_document.uri.as_str(),
8631 "file:///dir/one.rs"
8632 );
8633 assert_eq!(
8634 params.text_document_position.position,
8635 lsp::Position::new(0, 7)
8636 );
8637 assert_eq!(params.new_name, "THREE");
8638 Ok(Some(lsp::WorkspaceEdit {
8639 changes: Some(
8640 [
8641 (
8642 lsp::Url::from_file_path("/dir/one.rs").unwrap(),
8643 vec![lsp::TextEdit::new(
8644 lsp::Range::new(
8645 lsp::Position::new(0, 6),
8646 lsp::Position::new(0, 9),
8647 ),
8648 "THREE".to_string(),
8649 )],
8650 ),
8651 (
8652 lsp::Url::from_file_path("/dir/two.rs").unwrap(),
8653 vec![
8654 lsp::TextEdit::new(
8655 lsp::Range::new(
8656 lsp::Position::new(0, 24),
8657 lsp::Position::new(0, 27),
8658 ),
8659 "THREE".to_string(),
8660 ),
8661 lsp::TextEdit::new(
8662 lsp::Range::new(
8663 lsp::Position::new(0, 35),
8664 lsp::Position::new(0, 38),
8665 ),
8666 "THREE".to_string(),
8667 ),
8668 ],
8669 ),
8670 ]
8671 .into_iter()
8672 .collect(),
8673 ),
8674 ..Default::default()
8675 }))
8676 })
8677 .next()
8678 .await
8679 .unwrap();
8680 let mut transaction = response.await.unwrap().0;
8681 assert_eq!(transaction.len(), 2);
8682 assert_eq!(
8683 transaction
8684 .remove_entry(&buffer)
8685 .unwrap()
8686 .0
8687 .read_with(cx, |buffer, _| buffer.text()),
8688 "const THREE: usize = 1;"
8689 );
8690 assert_eq!(
8691 transaction
8692 .into_keys()
8693 .next()
8694 .unwrap()
8695 .read_with(cx, |buffer, _| buffer.text()),
8696 "const TWO: usize = one::THREE + one::THREE;"
8697 );
8698 }
8699
8700 #[gpui::test]
8701 async fn test_search(cx: &mut gpui::TestAppContext) {
8702 let fs = FakeFs::new(cx.background());
8703 fs.insert_tree(
8704 "/dir",
8705 json!({
8706 "one.rs": "const ONE: usize = 1;",
8707 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
8708 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
8709 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
8710 }),
8711 )
8712 .await;
8713 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
8714 assert_eq!(
8715 search(&project, SearchQuery::text("TWO", false, true), cx)
8716 .await
8717 .unwrap(),
8718 HashMap::from_iter([
8719 ("two.rs".to_string(), vec![6..9]),
8720 ("three.rs".to_string(), vec![37..40])
8721 ])
8722 );
8723
8724 let buffer_4 = project
8725 .update(cx, |project, cx| {
8726 project.open_local_buffer("/dir/four.rs", cx)
8727 })
8728 .await
8729 .unwrap();
8730 buffer_4.update(cx, |buffer, cx| {
8731 let text = "two::TWO";
8732 buffer.edit([(20..28, text), (31..43, text)], cx);
8733 });
8734
8735 assert_eq!(
8736 search(&project, SearchQuery::text("TWO", false, true), cx)
8737 .await
8738 .unwrap(),
8739 HashMap::from_iter([
8740 ("two.rs".to_string(), vec![6..9]),
8741 ("three.rs".to_string(), vec![37..40]),
8742 ("four.rs".to_string(), vec![25..28, 36..39])
8743 ])
8744 );
8745
8746 async fn search(
8747 project: &ModelHandle<Project>,
8748 query: SearchQuery,
8749 cx: &mut gpui::TestAppContext,
8750 ) -> Result<HashMap<String, Vec<Range<usize>>>> {
8751 let results = project
8752 .update(cx, |project, cx| project.search(query, cx))
8753 .await?;
8754
8755 Ok(results
8756 .into_iter()
8757 .map(|(buffer, ranges)| {
8758 buffer.read_with(cx, |buffer, _| {
8759 let path = buffer.file().unwrap().path().to_string_lossy().to_string();
8760 let ranges = ranges
8761 .into_iter()
8762 .map(|range| range.to_offset(buffer))
8763 .collect::<Vec<_>>();
8764 (path, ranges)
8765 })
8766 })
8767 .collect())
8768 }
8769 }
8770}